Compare commits

..

1 commit

Author SHA1 Message Date
86fc148806
WIP adding runtime-evaluated generic parameters
All checks were successful
/ test (push) Successful in 14m43s
2024-08-06 02:53:45 -07:00
139 changed files with 15078 additions and 67528 deletions

View file

@ -1,77 +0,0 @@
# SPDX-License-Identifier: LGPL-3.0-or-later
# See Notices.txt for copyright information
on:
workflow_call:
outputs:
cache-primary-key:
value: ${{ jobs.deps.outputs.cache-primary-key }}
jobs:
deps:
runs-on: debian-12
outputs:
cache-primary-key: ${{ steps.restore-deps.outputs.cache-primary-key }}
steps:
- uses: https://git.libre-chip.org/mirrors/checkout@v3
with:
fetch-depth: 0
- uses: https://git.libre-chip.org/mirrors/cache/restore@v3
id: restore-deps
with:
path: deps
key: ${{ github.repository }}-deps-${{ runner.os }}-${{ hashFiles('.forgejo/workflows/deps.yml') }}
lookup-only: true
- name: Install Apt packages
if: steps.restore-deps.outputs.cache-hit != 'true'
run: |
apt-get update -qq
apt-get install -qq \
bison \
build-essential \
ccache \
clang \
cvc5 \
flex \
gawk \
g++ \
git \
libboost-filesystem-dev \
libboost-python-dev \
libboost-system-dev \
libffi-dev \
libreadline-dev \
lld \
pkg-config \
python3 \
python3-click \
tcl-dev \
zlib1g-dev
- name: Install Firtool
if: steps.restore-deps.outputs.cache-hit != 'true'
run: |
mkdir -p deps
wget -O deps/firrtl.tar.gz https://github.com/llvm/circt/releases/download/firtool-1.86.0/firrtl-bin-linux-x64.tar.gz
sha256sum -c - <<<'bf6f4ab18ae76f135c944efbd81e25391c31c1bd0617c58ab0592640abefee14 deps/firrtl.tar.gz'
tar -C deps -xvaf deps/firrtl.tar.gz
rm -rf deps/firtool
mv deps/firtool-1.86.0 deps/firtool
- name: Get SymbiYosys
if: steps.restore-deps.outputs.cache-hit != 'true'
run: |
git clone --depth=1 --branch=yosys-0.45 https://git.libre-chip.org/mirrors/sby deps/sby
- name: Build Z3
if: steps.restore-deps.outputs.cache-hit != 'true'
run: |
git clone --depth=1 --recursive --branch=z3-4.13.3 https://git.libre-chip.org/mirrors/z3 deps/z3
(cd deps/z3; PYTHON=python3 ./configure --prefix=/usr/local)
make -C deps/z3/build -j"$(nproc)"
- name: Build Yosys
if: steps.restore-deps.outputs.cache-hit != 'true'
run: |
git clone --depth=1 --recursive --branch=0.45 https://git.libre-chip.org/mirrors/yosys deps/yosys
make -C deps/yosys -j"$(nproc)"
- uses: https://git.libre-chip.org/mirrors/cache/save@v3
if: steps.restore-deps.outputs.cache-hit != 'true'
with:
path: deps
key: ${{ steps.restore-deps.outputs.cache-primary-key }}

View file

@ -1,62 +1,19 @@
# SPDX-License-Identifier: LGPL-3.0-or-later
# See Notices.txt for copyright information
on: [push, pull_request]
jobs:
deps:
uses: ./.forgejo/workflows/deps.yml
test:
runs-on: debian-12
needs: deps
steps:
- uses: https://git.libre-chip.org/mirrors/checkout@v3
- uses: https://code.forgejo.org/actions/checkout@v3
with:
fetch-depth: 0
- run: |
scripts/check-copyright.sh
- run: |
apt-get update -qq
apt-get install -qq \
bison \
build-essential \
ccache \
clang \
cvc5 \
flex \
gawk \
git \
libboost-filesystem-dev \
libboost-python-dev \
libboost-system-dev \
libffi-dev \
libreadline-dev \
lld \
pkg-config \
python3 \
python3-click \
tcl-dev \
z3 \
zlib1g-dev
- run: |
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain 1.82.0
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain 1.79.0
source "$HOME/.cargo/env"
echo "$PATH" >> "$GITHUB_PATH"
- uses: https://git.libre-chip.org/mirrors/cache/restore@v3
with:
path: deps
key: ${{ needs.deps.outputs.cache-primary-key }}
fail-on-cache-miss: true
- run: |
make -C deps/z3/build install
make -C deps/sby install
make -C deps/yosys install
export PATH="$(realpath deps/firtool/bin):$PATH"
echo "$PATH" >> "$GITHUB_PATH"
- uses: https://git.libre-chip.org/mirrors/rust-cache@v2
- uses: https://github.com/Swatinem/rust-cache@v2
with:
save-if: ${{ github.ref == 'refs/heads/master' }}
- run: cargo test
- run: cargo build --tests --features=unstable-doc
- run: cargo test --doc --features=unstable-doc
- run: cargo test --features=unstable-doc
- run: cargo doc --features=unstable-doc
- run: FAYALITE_TEST_HASHER=always_zero cargo test --test=module --features=unstable-doc,unstable-test-hasher

4
.gitignore vendored
View file

@ -1,4 +1,2 @@
# SPDX-License-Identifier: LGPL-3.0-or-later
# See Notices.txt for copyright information
/target
.vscode
.vscode

276
Cargo.lock generated
View file

@ -2,6 +2,18 @@
# It is not intended for manual editing.
version = 3
[[package]]
name = "ahash"
version = "0.8.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77c3a9648d43b9cd48db467b3f87fdd6e146bcc88ab0180006cef2179fe11d01"
dependencies = [
"cfg-if",
"once_cell",
"version_check",
"zerocopy",
]
[[package]]
name = "allocator-api2"
version = "0.2.16"
@ -44,7 +56,7 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ad186efb764318d35165f1758e7dcef3b10628e26d41a44bc5550652e6804391"
dependencies = [
"windows-sys 0.52.0",
"windows-sys",
]
[[package]]
@ -54,21 +66,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19"
dependencies = [
"anstyle",
"windows-sys 0.52.0",
"windows-sys",
]
[[package]]
name = "arrayref"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb"
[[package]]
name = "arrayvec"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
[[package]]
name = "autocfg"
version = "1.1.0"
@ -109,20 +109,6 @@ dependencies = [
"wyz",
]
[[package]]
name = "blake3"
version = "1.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d82033247fd8e890df8f740e407ad4d038debb9eb1f40533fffb32e7d17dc6f7"
dependencies = [
"arrayref",
"arrayvec",
"cc",
"cfg-if",
"constant_time_eq",
"serde",
]
[[package]]
name = "block-buffer"
version = "0.10.4"
@ -132,15 +118,6 @@ dependencies = [
"generic-array",
]
[[package]]
name = "cc"
version = "1.1.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e80e3b6a3ab07840e1cae9b0666a63970dc28e8ed5ffbcdacbfc760c281bfc1"
dependencies = [
"shlex",
]
[[package]]
name = "cfg-if"
version = "1.0.0"
@ -193,12 +170,6 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422"
[[package]]
name = "constant_time_eq"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6"
[[package]]
name = "cpufeatures"
version = "0.2.12"
@ -218,27 +189,6 @@ dependencies = [
"typenum",
]
[[package]]
name = "ctor"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "edb49164822f3ee45b17acd4a208cfc1251410cf0cad9a833234c9890774dd9f"
dependencies = [
"quote",
"syn",
]
[[package]]
name = "derive_destructure2"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64b697ac90ff296f0fc031ee5a61c7ac31fb9fff50e3fb32873b09223613fc0c"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "digest"
version = "0.10.7"
@ -268,7 +218,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245"
dependencies = [
"libc",
"windows-sys 0.52.0",
"windows-sys",
]
[[package]]
@ -289,39 +239,32 @@ checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5"
[[package]]
name = "fayalite"
version = "0.3.0"
version = "0.2.0"
dependencies = [
"bitvec",
"blake3",
"clap",
"ctor",
"eyre",
"fayalite-proc-macros",
"fayalite-visit-gen",
"hashbrown",
"jobslot",
"num-bigint",
"num-traits",
"os_pipe",
"petgraph",
"serde",
"serde_json",
"tempfile",
"trybuild",
"vec_map",
"which",
]
[[package]]
name = "fayalite-proc-macros"
version = "0.3.0"
version = "0.2.0"
dependencies = [
"fayalite-proc-macros-impl",
]
[[package]]
name = "fayalite-proc-macros-impl"
version = "0.3.0"
version = "0.2.0"
dependencies = [
"base16ct",
"num-bigint",
@ -335,7 +278,7 @@ dependencies = [
[[package]]
name = "fayalite-visit-gen"
version = "0.3.0"
version = "0.2.0"
dependencies = [
"indexmap",
"prettyplease",
@ -347,18 +290,6 @@ dependencies = [
"thiserror",
]
[[package]]
name = "fixedbitset"
version = "0.5.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99"
[[package]]
name = "foldhash"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
[[package]]
name = "funty"
version = "2.0.0"
@ -375,17 +306,6 @@ dependencies = [
"version_check",
]
[[package]]
name = "getrandom"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c"
dependencies = [
"cfg-if",
"libc",
"wasi",
]
[[package]]
name = "glob"
version = "0.3.1"
@ -394,13 +314,12 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
[[package]]
name = "hashbrown"
version = "0.15.2"
version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289"
checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604"
dependencies = [
"ahash",
"allocator-api2",
"equivalent",
"foldhash",
]
[[package]]
@ -415,7 +334,7 @@ version = "0.5.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5"
dependencies = [
"windows-sys 0.52.0",
"windows-sys",
]
[[package]]
@ -426,9 +345,9 @@ checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683"
[[package]]
name = "indexmap"
version = "2.9.0"
version = "2.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e"
checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26"
dependencies = [
"equivalent",
"hashbrown",
@ -447,20 +366,6 @@ version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c"
[[package]]
name = "jobslot"
version = "0.2.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe10868679d7a24c2c67d862d0e64a342ce9aef7cdde9ce8019bd35d353d458d"
dependencies = [
"cfg-if",
"derive_destructure2",
"getrandom",
"libc",
"scopeguard",
"windows-sys 0.59.0",
]
[[package]]
name = "libc"
version = "0.2.153"
@ -475,10 +380,11 @@ checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c"
[[package]]
name = "num-bigint"
version = "0.4.6"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9"
checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0"
dependencies = [
"autocfg",
"num-integer",
"num-traits",
]
@ -507,28 +413,6 @@ version = "1.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
[[package]]
name = "os_pipe"
version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ffd2b0a5634335b135d5728d84c5e0fd726954b87111f7506a61c502280d982"
dependencies = [
"libc",
"windows-sys 0.59.0",
]
[[package]]
name = "petgraph"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a98c6720655620a521dcc722d0ad66cd8afd5d86e34a89ef691c50b7b24de06"
dependencies = [
"fixedbitset",
"hashbrown",
"indexmap",
"serde",
]
[[package]]
name = "prettyplease"
version = "0.2.20"
@ -541,9 +425,9 @@ dependencies = [
[[package]]
name = "proc-macro2"
version = "1.0.92"
version = "1.0.83"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0"
checksum = "0b33eb56c327dec362a9e55b3ad14f9d2f0904fb5a5b03b513ab5465399e9f43"
dependencies = [
"unicode-ident",
]
@ -573,7 +457,7 @@ dependencies = [
"errno",
"libc",
"linux-raw-sys",
"windows-sys 0.52.0",
"windows-sys",
]
[[package]]
@ -582,12 +466,6 @@ version = "1.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1"
[[package]]
name = "scopeguard"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
name = "serde"
version = "1.0.202"
@ -631,12 +509,6 @@ dependencies = [
"digest",
]
[[package]]
name = "shlex"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
[[package]]
name = "strsim"
version = "0.11.1"
@ -645,9 +517,9 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "syn"
version = "2.0.93"
version = "2.0.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c786062daee0d6db1132800e623df74274a0a87322d8e183338e01b3d98d058"
checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5"
dependencies = [
"proc-macro2",
"quote",
@ -669,7 +541,7 @@ dependencies = [
"cfg-if",
"fastrand",
"rustix",
"windows-sys 0.52.0",
"windows-sys",
]
[[package]]
@ -734,24 +606,12 @@ version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
[[package]]
name = "vec_map"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
[[package]]
name = "version_check"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "wasi"
version = "0.11.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "which"
version = "6.0.1"
@ -804,25 +664,15 @@ dependencies = [
"windows-targets",
]
[[package]]
name = "windows-sys"
version = "0.59.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
dependencies = [
"windows-targets",
]
[[package]]
name = "windows-targets"
version = "0.52.6"
version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b"
dependencies = [
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_gnullvm",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_gnullvm",
@ -831,51 +681,45 @@ dependencies = [
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.6"
version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.6"
version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675"
[[package]]
name = "windows_i686_gnu"
version = "0.52.6"
version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
[[package]]
name = "windows_i686_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3"
[[package]]
name = "windows_i686_msvc"
version = "0.52.6"
version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.6"
version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.6"
version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.6"
version = "0.52.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8"
[[package]]
name = "winsafe"
@ -891,3 +735,23 @@ checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed"
dependencies = [
"tap",
]
[[package]]
name = "zerocopy"
version = "0.7.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be"
dependencies = [
"zerocopy-derive",
]
[[package]]
name = "zerocopy-derive"
version = "0.7.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6"
dependencies = [
"proc-macro2",
"quote",
"syn",
]

View file

@ -5,40 +5,31 @@ resolver = "2"
members = ["crates/*"]
[workspace.package]
version = "0.3.0"
version = "0.2.0"
license = "LGPL-3.0-or-later"
edition = "2021"
repository = "https://git.libre-chip.org/libre-chip/fayalite"
keywords = ["hdl", "hardware", "semiconductors", "firrtl", "fpga"]
categories = ["simulation", "development-tools", "compilers"]
rust-version = "1.82.0"
rust-version = "1.79"
[workspace.dependencies]
fayalite-proc-macros = { version = "=0.3.0", path = "crates/fayalite-proc-macros" }
fayalite-proc-macros-impl = { version = "=0.3.0", path = "crates/fayalite-proc-macros-impl" }
fayalite-visit-gen = { version = "=0.3.0", path = "crates/fayalite-visit-gen" }
fayalite-proc-macros = { version = "=0.2.0", path = "crates/fayalite-proc-macros" }
fayalite-proc-macros-impl = { version = "=0.2.0", path = "crates/fayalite-proc-macros-impl" }
fayalite-visit-gen = { version = "=0.2.0", path = "crates/fayalite-visit-gen" }
base16ct = "0.2.0"
bitvec = { version = "1.0.1", features = ["serde"] }
blake3 = { version = "1.5.4", features = ["serde"] }
clap = { version = "4.5.9", features = ["derive", "env", "string"] }
ctor = "0.2.8"
eyre = "0.6.12"
hashbrown = "0.15.2"
indexmap = { version = "2.5.0", features = ["serde"] }
jobslot = "0.2.19"
num-bigint = "0.4.6"
hashbrown = "0.14.3"
indexmap = { version = "2.2.6", features = ["serde"] }
num-bigint = "0.4.4"
num-traits = "0.2.16"
os_pipe = "1.2.1"
petgraph = "0.8.1"
prettyplease = "0.2.20"
proc-macro2 = "1.0.83"
quote = "1.0.36"
serde = { version = "1.0.202", features = ["derive"] }
serde_json = { version = "1.0.117", features = ["preserve_order"] }
sha2 = "0.10.8"
syn = { version = "2.0.93", features = ["full", "fold", "visit", "extra-traits"] }
syn = { version = "2.0.66", features = ["full", "fold", "visit", "extra-traits"] }
tempfile = "3.10.1"
thiserror = "1.0.61"
trybuild = "1.0"
vec_map = "0.8.2"
which = "6.0.1"

View file

@ -1,7 +1,3 @@
<!--
SPDX-License-Identifier: LGPL-3.0-or-later
See Notices.txt for copyright information
-->
# Fayalite
Fayalite is a library for designing digital hardware -- a hardware description language (HDL) embedded in the Rust programming language. Fayalite's semantics are based on [FIRRTL] as interpreted by [LLVM CIRCT](https://circt.llvm.org/docs/Dialects/FIRRTL/FIRRTLAnnotations/).

View file

@ -13,11 +13,11 @@ rust-version.workspace = true
version.workspace = true
[dependencies]
base16ct.workspace = true
num-bigint.workspace = true
prettyplease.workspace = true
proc-macro2.workspace = true
quote.workspace = true
sha2.workspace = true
syn.workspace = true
tempfile.workspace = true
base16ct = { workspace = true }
num-bigint = { workspace = true }
prettyplease = { workspace = true }
proc-macro2 = { workspace = true }
quote = { workspace = true }
sha2 = { workspace = true }
syn = { workspace = true }
tempfile = { workspace = true }

View file

@ -220,36 +220,29 @@ forward_fold!(syn::ExprArray => fold_expr_array);
forward_fold!(syn::ExprCall => fold_expr_call);
forward_fold!(syn::ExprIf => fold_expr_if);
forward_fold!(syn::ExprMatch => fold_expr_match);
forward_fold!(syn::ExprMethodCall => fold_expr_method_call);
forward_fold!(syn::ExprPath => fold_expr_path);
forward_fold!(syn::ExprRepeat => fold_expr_repeat);
forward_fold!(syn::ExprStruct => fold_expr_struct);
forward_fold!(syn::ExprTuple => fold_expr_tuple);
forward_fold!(syn::FieldPat => fold_field_pat);
forward_fold!(syn::Ident => fold_ident);
forward_fold!(syn::Member => fold_member);
forward_fold!(syn::Path => fold_path);
forward_fold!(syn::Type => fold_type);
forward_fold!(syn::TypePath => fold_type_path);
forward_fold!(syn::WherePredicate => fold_where_predicate);
no_op_fold!(proc_macro2::Span);
no_op_fold!(syn::parse::Nothing);
no_op_fold!(syn::token::Brace);
no_op_fold!(syn::token::Bracket);
no_op_fold!(syn::token::Group);
no_op_fold!(syn::token::Paren);
no_op_fold!(syn::Token![_]);
no_op_fold!(syn::Token![,]);
no_op_fold!(syn::Token![;]);
no_op_fold!(syn::Token![:]);
no_op_fold!(syn::Token![::]);
no_op_fold!(syn::Token![..]);
no_op_fold!(syn::Token![.]);
no_op_fold!(syn::Token![#]);
no_op_fold!(syn::Token![<]);
no_op_fold!(syn::Token![=]);
no_op_fold!(syn::Token![=>]);
no_op_fold!(syn::Token![>]);
no_op_fold!(syn::Token![|]);
no_op_fold!(syn::Token![enum]);
no_op_fold!(syn::Token![extern]);
@ -258,4 +251,3 @@ no_op_fold!(syn::Token![mut]);
no_op_fold!(syn::Token![static]);
no_op_fold!(syn::Token![struct]);
no_op_fold!(syn::Token![where]);
no_op_fold!(usize);

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -1,138 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
hdl_type_common::{
get_target, ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedGenerics, ParsedType,
TypesParser,
},
kw, Errors, HdlAttr,
};
use proc_macro2::TokenStream;
use quote::ToTokens;
use syn::{parse_quote_spanned, Attribute, Generics, Ident, ItemType, Token, Type, Visibility};
#[derive(Clone, Debug)]
pub(crate) struct ParsedTypeAlias {
pub(crate) attrs: Vec<Attribute>,
pub(crate) options: HdlAttr<ItemOptions, kw::hdl>,
pub(crate) vis: Visibility,
pub(crate) type_token: Token![type],
pub(crate) ident: Ident,
pub(crate) generics: MaybeParsed<ParsedGenerics, Generics>,
pub(crate) eq_token: Token![=],
pub(crate) ty: MaybeParsed<ParsedType, Type>,
pub(crate) semi_token: Token![;],
}
impl ParsedTypeAlias {
fn parse(item: ItemType) -> syn::Result<Self> {
let ItemType {
mut attrs,
vis,
type_token,
ident,
mut generics,
eq_token,
ty,
semi_token,
} = item;
let mut errors = Errors::new();
let mut options = errors
.unwrap_or_default(HdlAttr::<ItemOptions, kw::hdl>::parse_and_take_attr(
&mut attrs,
))
.unwrap_or_default();
errors.ok(options.body.validate());
let ItemOptions {
outline_generated: _,
target: _,
custom_bounds,
no_static,
no_runtime_generics: _,
cmp_eq,
} = options.body;
if let Some((no_static,)) = no_static {
errors.error(no_static, "no_static is not valid on type aliases");
}
if let Some((cmp_eq,)) = cmp_eq {
errors.error(cmp_eq, "cmp_eq is not valid on type aliases");
}
let generics = if custom_bounds.is_some() {
MaybeParsed::Unrecognized(generics)
} else if let Some(generics) = errors.ok(ParsedGenerics::parse(&mut generics)) {
MaybeParsed::Parsed(generics)
} else {
MaybeParsed::Unrecognized(generics)
};
let ty = TypesParser::maybe_run(generics.as_ref(), *ty, &mut errors);
errors.finish()?;
Ok(Self {
attrs,
options,
vis,
type_token,
ident,
generics,
eq_token,
ty,
semi_token,
})
}
}
impl ToTokens for ParsedTypeAlias {
fn to_tokens(&self, tokens: &mut TokenStream) {
let Self {
attrs,
options,
vis,
type_token,
ident,
generics,
eq_token,
ty,
semi_token,
} = self;
let ItemOptions {
outline_generated: _,
target,
custom_bounds: _,
no_static: _,
no_runtime_generics,
cmp_eq: _,
} = &options.body;
let target = get_target(target, ident);
let mut type_attrs = attrs.clone();
type_attrs.push(parse_quote_spanned! {ident.span()=>
#[allow(type_alias_bounds)]
});
ItemType {
attrs: type_attrs,
vis: vis.clone(),
type_token: *type_token,
ident: ident.clone(),
generics: generics.into(),
eq_token: *eq_token,
ty: Box::new(ty.clone().into()),
semi_token: *semi_token,
}
.to_tokens(tokens);
if let (MaybeParsed::Parsed(generics), MaybeParsed::Parsed(ty), None) =
(generics, ty, no_runtime_generics)
{
generics.make_runtime_generics(tokens, vis, ident, &target, |context| {
ty.make_hdl_type_expr(context)
})
}
}
}
pub(crate) fn hdl_type_alias_impl(item: ItemType) -> syn::Result<TokenStream> {
let item = ParsedTypeAlias::parse(item)?;
let outline_generated = item.options.body.outline_generated;
let mut contents = item.to_token_stream();
if outline_generated.is_some() {
contents = crate::outline_generated(contents, "hdl-type-alias-");
}
Ok(contents)
}

File diff suppressed because it is too large Load diff

View file

@ -3,46 +3,25 @@
#![cfg_attr(test, recursion_limit = "512")]
use proc_macro2::{Span, TokenStream};
use quote::{quote, ToTokens};
use std::{
collections::{hash_map::Entry, HashMap},
io::{ErrorKind, Write},
};
use std::io::{ErrorKind, Write};
use syn::{
bracketed,
ext::IdentExt,
parenthesized,
bracketed, parenthesized,
parse::{Parse, ParseStream, Parser},
parse_quote,
punctuated::{Pair, Punctuated},
spanned::Spanned,
token::{Bracket, Paren},
AttrStyle, Attribute, Error, Ident, Item, ItemFn, LitBool, LitStr, Meta, Token,
punctuated::Pair,
AttrStyle, Attribute, Error, Item, Token,
};
mod fold;
mod hdl_bundle;
mod hdl_enum;
mod hdl_type_alias;
mod hdl_type_common;
mod module;
mod process_cfg;
pub(crate) trait CustomToken:
Copy
+ Spanned
+ ToTokens
+ std::fmt::Debug
+ Eq
+ std::hash::Hash
+ Default
+ quote::IdentFragment
+ Parse
{
const IDENT_STR: &'static str;
}
mod value_derive_common;
mod value_derive_enum;
mod value_derive_struct;
mod kw {
pub(crate) use syn::token::Extern as extern_;
pub(crate) use syn::token::{
Enum as enum_, Extern as extern_, Static as static_, Struct as struct_, Where as where_,
};
macro_rules! custom_keyword {
($kw:ident) => {
@ -59,26 +38,13 @@ mod kw {
}
crate::fold::no_op_fold!($kw);
impl crate::CustomToken for $kw {
const IDENT_STR: &'static str = stringify!($kw);
}
};
}
custom_keyword!(__evaluated_cfgs);
custom_keyword!(all);
custom_keyword!(any);
custom_keyword!(cfg);
custom_keyword!(cfg_attr);
custom_keyword!(clock_domain);
custom_keyword!(cmp_eq);
custom_keyword!(connect_inexact);
custom_keyword!(custom_bounds);
custom_keyword!(flip);
custom_keyword!(hdl);
custom_keyword!(hdl_module);
custom_keyword!(incomplete_wire);
custom_keyword!(input);
custom_keyword!(instance);
custom_keyword!(m);
@ -86,14 +52,11 @@ mod kw {
custom_keyword!(memory_array);
custom_keyword!(memory_with_init);
custom_keyword!(no_reset);
custom_keyword!(no_runtime_generics);
custom_keyword!(no_static);
custom_keyword!(not);
custom_keyword!(outline_generated);
custom_keyword!(output);
custom_keyword!(reg_builder);
custom_keyword!(reset);
custom_keyword!(sim);
custom_keyword!(reset_default);
custom_keyword!(skip);
custom_keyword!(target);
custom_keyword!(wire);
@ -102,34 +65,34 @@ mod kw {
type Pound = Token![#]; // work around https://github.com/rust-lang/rust/issues/50676
#[derive(Clone, Debug)]
pub(crate) struct HdlAttr<T, KW> {
pub(crate) struct HdlAttr<T> {
pub(crate) pound_token: Pound,
pub(crate) style: AttrStyle,
pub(crate) bracket_token: syn::token::Bracket,
pub(crate) kw: KW,
pub(crate) hdl: kw::hdl,
pub(crate) paren_token: Option<syn::token::Paren>,
pub(crate) body: T,
}
crate::fold::impl_fold! {
struct HdlAttr<T, KW,> {
struct HdlAttr<T,> {
pound_token: Pound,
style: AttrStyle,
bracket_token: syn::token::Bracket,
kw: KW,
hdl: kw::hdl,
paren_token: Option<syn::token::Paren>,
body: T,
}
}
#[allow(dead_code)]
impl<T, KW> HdlAttr<T, KW> {
pub(crate) fn split_body(self) -> (HdlAttr<(), KW>, T) {
impl<T> HdlAttr<T> {
pub(crate) fn split_body(self) -> (HdlAttr<()>, T) {
let Self {
pound_token,
style,
bracket_token,
kw,
hdl,
paren_token,
body,
} = self;
@ -138,19 +101,19 @@ impl<T, KW> HdlAttr<T, KW> {
pound_token,
style,
bracket_token,
kw,
hdl,
paren_token,
body: (),
},
body,
)
}
pub(crate) fn replace_body<T2>(self, body: T2) -> HdlAttr<T2, KW> {
pub(crate) fn replace_body<T2>(self, body: T2) -> HdlAttr<T2> {
let Self {
pound_token,
style,
bracket_token,
kw,
hdl,
paren_token,
body: _,
} = self;
@ -158,20 +121,17 @@ impl<T, KW> HdlAttr<T, KW> {
pound_token,
style,
bracket_token,
kw,
hdl,
paren_token,
body,
}
}
pub(crate) fn as_ref(&self) -> HdlAttr<&T, KW>
where
KW: Clone,
{
pub(crate) fn as_ref(&self) -> HdlAttr<&T> {
let Self {
pound_token,
style,
bracket_token,
ref kw,
hdl,
paren_token,
ref body,
} = *self;
@ -179,20 +139,17 @@ impl<T, KW> HdlAttr<T, KW> {
pound_token,
style,
bracket_token,
kw: kw.clone(),
hdl,
paren_token,
body,
}
}
pub(crate) fn try_map<R, E, F: FnOnce(T) -> Result<R, E>>(
self,
f: F,
) -> Result<HdlAttr<R, KW>, E> {
pub(crate) fn try_map<R, E, F: FnOnce(T) -> Result<R, E>>(self, f: F) -> Result<HdlAttr<R>, E> {
let Self {
pound_token,
style,
bracket_token,
kw,
hdl,
paren_token,
body,
} = self;
@ -200,17 +157,17 @@ impl<T, KW> HdlAttr<T, KW> {
pound_token,
style,
bracket_token,
kw,
hdl,
paren_token,
body: f(body)?,
})
}
pub(crate) fn map<R, F: FnOnce(T) -> R>(self, f: F) -> HdlAttr<R, KW> {
pub(crate) fn map<R, F: FnOnce(T) -> R>(self, f: F) -> HdlAttr<R> {
let Self {
pound_token,
style,
bracket_token,
kw,
hdl,
paren_token,
body,
} = self;
@ -218,7 +175,7 @@ impl<T, KW> HdlAttr<T, KW> {
pound_token,
style,
bracket_token,
kw,
hdl,
paren_token,
body: f(body),
}
@ -226,32 +183,31 @@ impl<T, KW> HdlAttr<T, KW> {
fn to_attr(&self) -> Attribute
where
T: ToTokens,
KW: ToTokens,
{
parse_quote! { #self }
}
}
impl<T: Default, KW: Default> Default for HdlAttr<T, KW> {
impl<T: Default> Default for HdlAttr<T> {
fn default() -> Self {
T::default().into()
}
}
impl<T, KW: Default> From<T> for HdlAttr<T, KW> {
impl<T> From<T> for HdlAttr<T> {
fn from(body: T) -> Self {
HdlAttr {
pound_token: Default::default(),
style: AttrStyle::Outer,
bracket_token: Default::default(),
kw: Default::default(),
hdl: Default::default(),
paren_token: Default::default(),
body,
}
}
}
impl<T: ToTokens, KW: ToTokens + Spanned> ToTokens for HdlAttr<T, KW> {
impl<T: ToTokens> ToTokens for HdlAttr<T> {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.pound_token.to_tokens(tokens);
match self.style {
@ -259,7 +215,7 @@ impl<T: ToTokens, KW: ToTokens + Spanned> ToTokens for HdlAttr<T, KW> {
AttrStyle::Outer => {}
};
self.bracket_token.surround(tokens, |tokens| {
self.kw.to_tokens(tokens);
self.hdl.to_tokens(tokens);
match self.paren_token {
Some(paren_token) => {
paren_token.surround(tokens, |tokens| self.body.to_tokens(tokens))
@ -267,7 +223,7 @@ impl<T: ToTokens, KW: ToTokens + Spanned> ToTokens for HdlAttr<T, KW> {
None => {
let body = self.body.to_token_stream();
if !body.is_empty() {
syn::token::Paren(self.kw.span())
syn::token::Paren(self.hdl.span)
.surround(tokens, |tokens| tokens.extend([body]));
}
}
@ -276,24 +232,18 @@ impl<T: ToTokens, KW: ToTokens + Spanned> ToTokens for HdlAttr<T, KW> {
}
}
fn is_hdl_attr<KW: CustomToken>(attr: &Attribute) -> bool {
attr.path().is_ident(KW::IDENT_STR)
fn is_hdl_attr(attr: &Attribute) -> bool {
attr.path().is_ident("hdl")
}
impl<T: Parse, KW: Parse> HdlAttr<T, KW> {
fn parse_and_take_attr(attrs: &mut Vec<Attribute>) -> syn::Result<Option<Self>>
where
KW: ToTokens,
{
impl<T: Parse> HdlAttr<T> {
fn parse_and_take_attr(attrs: &mut Vec<Attribute>) -> syn::Result<Option<Self>> {
let mut retval = None;
let mut errors = Errors::new();
attrs.retain(|attr| {
if let Ok(kw) = syn::parse2::<KW>(attr.path().to_token_stream()) {
if is_hdl_attr(attr) {
if retval.is_some() {
errors.push(Error::new_spanned(
attr,
format_args!("more than one #[{}] attribute", kw.to_token_stream()),
));
errors.push(Error::new_spanned(attr, "more than one #[hdl] attribute"));
}
errors.unwrap_or_default(Self::parse_attr(attr).map(|v| retval = Some(v)));
false
@ -304,19 +254,13 @@ impl<T: Parse, KW: Parse> HdlAttr<T, KW> {
errors.finish()?;
Ok(retval)
}
fn parse_and_leave_attr(attrs: &[Attribute]) -> syn::Result<Option<Self>>
where
KW: ToTokens,
{
fn parse_and_leave_attr(attrs: &[Attribute]) -> syn::Result<Option<Self>> {
let mut retval = None;
let mut errors = Errors::new();
for attr in attrs {
if let Ok(kw) = syn::parse2::<KW>(attr.path().to_token_stream()) {
if is_hdl_attr(attr) {
if retval.is_some() {
errors.push(Error::new_spanned(
attr,
format_args!("more than one #[{}] attribute", kw.to_token_stream()),
));
errors.push(Error::new_spanned(attr, "more than one #[hdl] attribute"));
}
errors.unwrap_or_default(Self::parse_attr(attr).map(|v| retval = Some(v)));
}
@ -337,7 +281,7 @@ impl<T: Parse, KW: Parse> HdlAttr<T, KW> {
) -> syn::Result<Self> {
let bracket_content;
let bracket_token = bracketed!(bracket_content in input);
let kw = bracket_content.parse()?;
let hdl = bracket_content.parse()?;
let paren_content;
let body;
let paren_token;
@ -358,7 +302,7 @@ impl<T: Parse, KW: Parse> HdlAttr<T, KW> {
pound_token,
style,
bracket_token,
kw,
hdl,
paren_token,
body,
})
@ -575,26 +519,6 @@ macro_rules! impl_extra_traits_for_options {
) => {
impl Copy for $option_enum_name {}
impl PartialEq for $option_enum_name {
fn eq(&self, other: &Self) -> bool {
self.cmp(other).is_eq()
}
}
impl Eq for $option_enum_name {}
impl PartialOrd for $option_enum_name {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for $option_enum_name {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.variant().cmp(&other.variant())
}
}
impl quote::IdentFragment for $option_enum_name {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
let _ = f;
@ -630,66 +554,6 @@ pub(crate) use impl_extra_traits_for_options;
macro_rules! options {
(
#[options = $options_name:ident]
$($tt:tt)*
) => {
crate::options! {
#[options = $options_name, punct = syn::Token![,], allow_duplicates = false]
$($tt)*
}
};
(
#[options = $options_name:ident, punct = $Punct:ty, allow_duplicates = true]
$(#[$($enum_meta:tt)*])*
$enum_vis:vis enum $option_enum_name:ident {
$($Variant:ident($key:ident $(, $value:ty)?),)*
}
) => {
crate::options! {
#[options = $options_name, punct = $Punct, allow_duplicates = (true)]
$(#[$($enum_meta)*])*
$enum_vis enum $option_enum_name {
$($Variant($key $(, $value)?),)*
}
}
impl Extend<$option_enum_name> for $options_name {
fn extend<T: IntoIterator<Item = $option_enum_name>>(&mut self, iter: T) {
iter.into_iter().for_each(|v| match v {
$($option_enum_name::$Variant(v) => {
self.$key = Some(v);
})*
});
}
}
impl FromIterator<$option_enum_name> for $options_name {
fn from_iter<T: IntoIterator<Item = $option_enum_name>>(iter: T) -> Self {
let mut retval = Self::default();
retval.extend(iter);
retval
}
}
impl Extend<$options_name> for $options_name {
fn extend<T: IntoIterator<Item = $options_name>>(&mut self, iter: T) {
iter.into_iter().for_each(|v| {
$(if let Some(v) = v.$key {
self.$key = Some(v);
})*
});
}
}
impl FromIterator<$options_name> for $options_name {
fn from_iter<T: IntoIterator<Item = $options_name>>(iter: T) -> Self {
let mut retval = Self::default();
retval.extend(iter);
retval
}
}
};
(
#[options = $options_name:ident, punct = $Punct:ty, allow_duplicates = $allow_duplicates:expr]
$(#[$($enum_meta:tt)*])*
$enum_vis:vis enum $option_enum_name:ident {
$($Variant:ident($key:ident $(, $value:ty)?),)*
@ -703,11 +567,8 @@ macro_rules! options {
}
#[derive(Clone, Debug, Default)]
#[allow(non_snake_case)]
$enum_vis struct $options_name {
$(
$enum_vis $key: Option<(crate::kw::$key, $(syn::token::Paren, $value)?)>,
)*
$($enum_vis $key: Option<(crate::kw::$key, $(syn::token::Paren, $value)?)>,)*
}
crate::fold::impl_fold! {
@ -716,43 +577,6 @@ macro_rules! options {
}
}
const _: () = {
#[derive(Clone, Debug)]
$enum_vis struct Iter($enum_vis $options_name);
impl IntoIterator for $options_name {
type Item = $option_enum_name;
type IntoIter = Iter;
fn into_iter(self) -> Self::IntoIter {
Iter(self)
}
}
impl Iterator for Iter {
type Item = $option_enum_name;
fn next(&mut self) -> Option<Self::Item> {
$(
if let Some(value) = self.0.$key.take() {
return Some($option_enum_name::$Variant(value));
}
)*
None
}
#[allow(unused_mut, unused_variables)]
fn fold<B, F: FnMut(B, Self::Item) -> B>(mut self, mut init: B, mut f: F) -> B {
$(
if let Some(value) = self.0.$key.take() {
init = f(init, $option_enum_name::$Variant(value));
}
)*
init
}
}
};
impl syn::parse::Parse for $options_name {
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
#![allow(unused_mut, unused_variables, unreachable_code)]
@ -761,7 +585,7 @@ macro_rules! options {
let old_input = input.fork();
match input.parse::<$option_enum_name>()? {
$($option_enum_name::$Variant(v) => {
if retval.$key.replace(v).is_some() && !$allow_duplicates {
if retval.$key.replace(v).is_some() {
return Err(old_input.error(concat!("duplicate ", stringify!($key), " option")));
}
})*
@ -769,7 +593,7 @@ macro_rules! options {
if input.is_empty() {
break;
}
input.parse::<$Punct>()?;
input.parse::<syn::Token![,]>()?;
}
Ok(retval)
}
@ -778,7 +602,7 @@ macro_rules! options {
impl quote::ToTokens for $options_name {
#[allow(unused_mut, unused_variables, unused_assignments)]
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let mut separator: Option<$Punct> = None;
let mut separator: Option<syn::Token![,]> = None;
$(if let Some(v) = &self.$key {
separator.to_tokens(tokens);
separator = Some(Default::default());
@ -849,24 +673,9 @@ macro_rules! options {
}
}
}
impl $option_enum_name {
#[allow(dead_code)]
fn variant(&self) -> usize {
#[repr(usize)]
enum Variant {
$($Variant,)*
__Last, // so it doesn't complain about zero-variant enums
}
match *self {
$(Self::$Variant(..) => Variant::$Variant as usize,)*
}
}
}
};
}
use crate::hdl_type_alias::hdl_type_alias_impl;
pub(crate) use options;
pub(crate) fn outline_generated(contents: TokenStream, prefix: &str) -> TokenStream {
@ -877,15 +686,6 @@ pub(crate) fn outline_generated(contents: TokenStream, prefix: &str) -> TokenStr
.suffix(".tmp.rs")
.tempfile_in(out_dir)
.unwrap();
struct PrintOnPanic<'a>(&'a TokenStream);
impl Drop for PrintOnPanic<'_> {
fn drop(&mut self) {
if std::thread::panicking() {
println!("{}", self.0);
}
}
}
let _print_on_panic = PrintOnPanic(&contents);
let contents = prettyplease::unparse(&parse_quote! { #contents });
let hash = <sha2::Sha256 as sha2::Digest>::digest(&contents);
let hash = base16ct::HexDisplay(&hash[..5]);
@ -906,372 +706,25 @@ pub(crate) fn outline_generated(contents: TokenStream, prefix: &str) -> TokenStr
}
}
fn hdl_module_impl(item: ItemFn) -> syn::Result<TokenStream> {
let func = module::ModuleFn::parse_from_fn(item)?;
let options = func.config_options();
pub fn module(attr: TokenStream, item: TokenStream) -> syn::Result<TokenStream> {
let options = syn::parse2::<module::ConfigOptions>(attr)?;
let options = HdlAttr::from(options);
let func = syn::parse2::<module::ModuleFn>(quote! { #options #item })?;
let mut contents = func.generate();
if options.outline_generated.is_some() {
if options.body.outline_generated.is_some() {
contents = outline_generated(contents, "module-");
}
Ok(contents)
}
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub(crate) enum CfgExpr {
Option {
ident: Ident,
value: Option<(Token![=], LitStr)>,
},
All {
all: kw::all,
paren: Paren,
exprs: Punctuated<CfgExpr, Token![,]>,
},
Any {
any: kw::any,
paren: Paren,
exprs: Punctuated<CfgExpr, Token![,]>,
},
Not {
not: kw::not,
paren: Paren,
expr: Box<CfgExpr>,
trailing_comma: Option<Token![,]>,
},
}
impl Parse for CfgExpr {
fn parse(input: ParseStream) -> syn::Result<Self> {
match input.cursor().ident() {
Some((_, cursor)) if cursor.eof() => {
return Ok(CfgExpr::Option {
ident: input.call(Ident::parse_any)?,
value: None,
});
}
_ => {}
}
if input.peek(Ident::peek_any) && input.peek2(Token![=]) {
return Ok(CfgExpr::Option {
ident: input.call(Ident::parse_any)?,
value: Some((input.parse()?, input.parse()?)),
});
}
let contents;
if input.peek(kw::all) {
Ok(CfgExpr::All {
all: input.parse()?,
paren: parenthesized!(contents in input),
exprs: contents.call(Punctuated::parse_terminated)?,
})
} else if input.peek(kw::any) {
Ok(CfgExpr::Any {
any: input.parse()?,
paren: parenthesized!(contents in input),
exprs: contents.call(Punctuated::parse_terminated)?,
})
} else if input.peek(kw::not) {
Ok(CfgExpr::Not {
not: input.parse()?,
paren: parenthesized!(contents in input),
expr: contents.parse()?,
trailing_comma: contents.parse()?,
})
} else {
Err(input.error("expected cfg-pattern"))
}
}
}
impl ToTokens for CfgExpr {
fn to_tokens(&self, tokens: &mut TokenStream) {
match self {
CfgExpr::Option { ident, value } => {
ident.to_tokens(tokens);
if let Some((eq, value)) = value {
eq.to_tokens(tokens);
value.to_tokens(tokens);
}
}
CfgExpr::All { all, paren, exprs } => {
all.to_tokens(tokens);
paren.surround(tokens, |tokens| exprs.to_tokens(tokens));
}
CfgExpr::Any { any, paren, exprs } => {
any.to_tokens(tokens);
paren.surround(tokens, |tokens| exprs.to_tokens(tokens));
}
CfgExpr::Not {
not,
paren,
expr,
trailing_comma,
} => {
not.to_tokens(tokens);
paren.surround(tokens, |tokens| {
expr.to_tokens(tokens);
trailing_comma.to_tokens(tokens);
});
}
}
}
}
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub(crate) struct Cfg {
cfg: kw::cfg,
paren: Paren,
expr: CfgExpr,
trailing_comma: Option<Token![,]>,
}
impl Cfg {
fn parse_meta(meta: &Meta) -> syn::Result<Self> {
syn::parse2(meta.to_token_stream())
}
}
impl ToTokens for Cfg {
fn to_tokens(&self, tokens: &mut TokenStream) {
let Self {
cfg,
paren,
expr,
trailing_comma,
} = self;
cfg.to_tokens(tokens);
paren.surround(tokens, |tokens| {
expr.to_tokens(tokens);
trailing_comma.to_tokens(tokens);
});
}
}
impl Parse for Cfg {
fn parse(input: ParseStream) -> syn::Result<Self> {
let contents;
Ok(Self {
cfg: input.parse()?,
paren: parenthesized!(contents in input),
expr: contents.parse()?,
trailing_comma: contents.parse()?,
})
}
}
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub(crate) struct CfgAttr {
cfg_attr: kw::cfg_attr,
paren: Paren,
expr: CfgExpr,
comma: Token![,],
attrs: Punctuated<Meta, Token![,]>,
}
impl CfgAttr {
pub(crate) fn to_cfg(&self) -> Cfg {
Cfg {
cfg: kw::cfg(self.cfg_attr.span),
paren: self.paren,
expr: self.expr.clone(),
trailing_comma: None,
}
}
fn parse_meta(meta: &Meta) -> syn::Result<Self> {
syn::parse2(meta.to_token_stream())
}
}
impl Parse for CfgAttr {
fn parse(input: ParseStream) -> syn::Result<Self> {
let contents;
Ok(Self {
cfg_attr: input.parse()?,
paren: parenthesized!(contents in input),
expr: contents.parse()?,
comma: contents.parse()?,
attrs: contents.call(Punctuated::parse_terminated)?,
})
}
}
pub(crate) struct CfgAndValue {
cfg: Cfg,
eq_token: Token![=],
value: LitBool,
}
impl Parse for CfgAndValue {
fn parse(input: ParseStream) -> syn::Result<Self> {
Ok(Self {
cfg: input.parse()?,
eq_token: input.parse()?,
value: input.parse()?,
})
}
}
pub(crate) struct Cfgs<T> {
pub(crate) bracket: Bracket,
pub(crate) cfgs_map: HashMap<Cfg, T>,
pub(crate) cfgs_list: Vec<Cfg>,
}
impl<T> Default for Cfgs<T> {
fn default() -> Self {
Self {
bracket: Default::default(),
cfgs_map: Default::default(),
cfgs_list: Default::default(),
}
}
}
impl<T> Cfgs<T> {
fn insert_cfg(&mut self, cfg: Cfg, value: T) {
match self.cfgs_map.entry(cfg) {
Entry::Occupied(_) => {}
Entry::Vacant(entry) => {
self.cfgs_list.push(entry.key().clone());
entry.insert(value);
}
}
}
}
impl Parse for Cfgs<bool> {
fn parse(input: ParseStream) -> syn::Result<Self> {
let contents;
let bracket = bracketed!(contents in input);
let mut cfgs_map = HashMap::new();
let mut cfgs_list = Vec::new();
for CfgAndValue {
cfg,
eq_token,
value,
} in contents.call(Punctuated::<CfgAndValue, Token![,]>::parse_terminated)?
{
let _ = eq_token;
match cfgs_map.entry(cfg) {
Entry::Occupied(_) => {}
Entry::Vacant(entry) => {
cfgs_list.push(entry.key().clone());
entry.insert(value.value);
}
}
}
Ok(Self {
bracket,
cfgs_map,
cfgs_list,
})
}
}
impl Parse for Cfgs<()> {
fn parse(input: ParseStream) -> syn::Result<Self> {
let contents;
let bracket = bracketed!(contents in input);
let mut cfgs_map = HashMap::new();
let mut cfgs_list = Vec::new();
for cfg in contents.call(Punctuated::<Cfg, Token![,]>::parse_terminated)? {
match cfgs_map.entry(cfg) {
Entry::Occupied(_) => {}
Entry::Vacant(entry) => {
cfgs_list.push(entry.key().clone());
entry.insert(());
}
}
}
Ok(Self {
bracket,
cfgs_map,
cfgs_list,
})
}
}
impl ToTokens for Cfgs<()> {
fn to_tokens(&self, tokens: &mut TokenStream) {
let Self {
bracket,
cfgs_map: _,
cfgs_list,
} = self;
bracket.surround(tokens, |tokens| {
for cfg in cfgs_list {
cfg.to_tokens(tokens);
<Token![,]>::default().to_tokens(tokens);
}
});
}
}
fn hdl_main(
kw: impl CustomToken,
attr: TokenStream,
item: TokenStream,
) -> syn::Result<TokenStream> {
fn parse_evaluated_cfgs_attr<R>(
input: ParseStream,
parse_inner: impl FnOnce(ParseStream) -> syn::Result<R>,
) -> syn::Result<R> {
let _: Token![#] = input.parse()?;
let bracket_content;
bracketed!(bracket_content in input);
let _: kw::__evaluated_cfgs = bracket_content.parse()?;
let paren_content;
parenthesized!(paren_content in bracket_content);
parse_inner(&paren_content)
}
let (evaluated_cfgs, item): (_, TokenStream) = Parser::parse2(
|input: ParseStream| {
let peek = input.fork();
if parse_evaluated_cfgs_attr(&peek, |_| Ok(())).is_ok() {
let evaluated_cfgs = parse_evaluated_cfgs_attr(input, Cfgs::<bool>::parse)?;
Ok((Some(evaluated_cfgs), input.parse()?))
} else {
Ok((None, input.parse()?))
}
},
item,
)?;
let cfgs = if let Some(cfgs) = evaluated_cfgs {
cfgs
} else {
let cfgs = process_cfg::collect_cfgs(syn::parse2(item.clone())?)?;
if cfgs.cfgs_list.is_empty() {
Cfgs::default()
} else {
return Ok(quote! {
::fayalite::__cfg_expansion_helper! {
[]
#cfgs
{#[::fayalite::#kw(#attr)]} { #item }
}
});
}
};
let item = syn::parse2(quote! { #[#kw(#attr)] #item })?;
let Some(item) = process_cfg::process_cfgs(item, cfgs)? else {
return Ok(TokenStream::new());
};
pub fn value_derive(item: TokenStream) -> syn::Result<TokenStream> {
let item = syn::parse2::<Item>(item)?;
match item {
Item::Enum(item) => hdl_enum::hdl_enum(item),
Item::Struct(item) => hdl_bundle::hdl_bundle(item),
Item::Fn(item) => hdl_module_impl(item),
Item::Type(item) => hdl_type_alias_impl(item),
Item::Enum(item) => value_derive_enum::value_derive_enum(item),
Item::Struct(item) => value_derive_struct::value_derive_struct(item),
_ => Err(syn::Error::new(
Span::call_site(),
"top-level #[hdl] can only be used on structs, enums, type aliases, or functions",
"derive(Value) can only be used on structs or enums",
)),
}
}
pub fn hdl_module(attr: TokenStream, item: TokenStream) -> syn::Result<TokenStream> {
hdl_main(kw::hdl_module::default(), attr, item)
}
pub fn hdl_attr(attr: TokenStream, item: TokenStream) -> syn::Result<TokenStream> {
hdl_main(kw::hdl::default(), attr, item)
}

View file

@ -1,8 +1,7 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
hdl_type_common::{ParsedGenerics, SplitForImpl},
kw,
is_hdl_attr,
module::transform_body::{HdlLet, HdlLetKindIO},
options, Errors, HdlAttr, PairsIterExt,
};
@ -10,6 +9,7 @@ use proc_macro2::TokenStream;
use quote::{format_ident, quote, quote_spanned, ToTokens};
use std::collections::HashSet;
use syn::{
parse::{Parse, ParseStream},
parse_quote,
visit::{visit_pat, Visit},
Attribute, Block, ConstParam, Error, FnArg, GenericParam, Generics, Ident, ItemFn, ItemStruct,
@ -57,39 +57,26 @@ impl Visit<'_> for CheckNameConflictsWithModuleBuilderVisitor<'_> {
}
}
fn retain_struct_attrs<F: FnMut(&Attribute) -> bool>(item: &mut ItemStruct, mut f: F) {
item.attrs.retain(&mut f);
for field in item.fields.iter_mut() {
field.attrs.retain(&mut f);
}
}
pub(crate) type ModuleIO = HdlLet<HdlLetKindIO>;
struct ModuleFnModule {
pub(crate) struct ModuleFn {
attrs: Vec<Attribute>,
config_options: HdlAttr<ConfigOptions, kw::hdl_module>,
config_options: HdlAttr<ConfigOptions>,
module_kind: ModuleKind,
vis: Visibility,
sig: Signature,
block: Box<Block>,
struct_generics: ParsedGenerics,
the_struct: TokenStream,
io: Vec<ModuleIO>,
struct_generics: Generics,
}
enum ModuleFnImpl {
Module(ModuleFnModule),
Fn {
attrs: Vec<Attribute>,
config_options: HdlAttr<ConfigOptions, kw::hdl>,
vis: Visibility,
sig: Signature,
block: Box<Block>,
},
}
options! {
pub(crate) enum HdlOrHdlModule {
Hdl(hdl),
HdlModule(hdl_module),
}
}
pub(crate) struct ModuleFn(ModuleFnImpl);
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
pub(crate) enum ModuleKind {
Extern,
@ -109,25 +96,14 @@ impl Visit<'_> for ContainsSkippedIdent<'_> {
}
}
impl ModuleFn {
pub(crate) fn config_options(&self) -> ConfigOptions {
let (ModuleFnImpl::Module(ModuleFnModule {
config_options: HdlAttr { body, .. },
..
})
| ModuleFnImpl::Fn {
config_options: HdlAttr { body, .. },
..
}) = &self.0;
body.clone()
}
pub(crate) fn parse_from_fn(item: ItemFn) -> syn::Result<Self> {
impl Parse for ModuleFn {
fn parse(input: ParseStream) -> syn::Result<Self> {
let ItemFn {
mut attrs,
vis,
mut sig,
block,
} = item;
} = input.parse()?;
let Signature {
ref constness,
ref asyncness,
@ -142,60 +118,43 @@ impl ModuleFn {
ref output,
} = sig;
let mut errors = Errors::new();
let Some(mut config_options) =
errors.unwrap_or_default(
HdlAttr::<ConfigOptions, HdlOrHdlModule>::parse_and_take_attr(&mut attrs),
)
else {
errors.error(sig.ident, "missing #[hdl] or #[hdl_module] attribute");
errors.finish()?;
unreachable!();
};
let config_options = errors
.unwrap_or_default(HdlAttr::parse_and_take_attr(&mut attrs))
.unwrap_or_default();
let ConfigOptions {
outline_generated: _,
extern_,
} = config_options.body;
let module_kind = match (config_options.kw, extern_) {
(HdlOrHdlModule::Hdl(_), None) => None,
(HdlOrHdlModule::Hdl(_), Some(extern2)) => {
config_options.body.extern_ = None;
errors.error(
extern2.0,
"extern can only be used as #[hdl_module(extern)]",
);
None
}
(HdlOrHdlModule::HdlModule(_), None) => Some(ModuleKind::Normal),
(HdlOrHdlModule::HdlModule(_), Some(_)) => Some(ModuleKind::Extern),
let module_kind = match extern_ {
Some(_) => ModuleKind::Extern,
None => ModuleKind::Normal,
};
if let HdlOrHdlModule::HdlModule(_) = config_options.kw {
for fn_arg in inputs {
match fn_arg {
FnArg::Receiver(_) => {
errors.push(syn::Error::new_spanned(fn_arg, "self not allowed here"));
}
FnArg::Typed(fn_arg) => {
visit_pat(
&mut CheckNameConflictsWithModuleBuilderVisitor {
errors: &mut errors,
},
&fn_arg.pat,
);
}
for fn_arg in inputs {
match fn_arg {
FnArg::Receiver(_) => {
errors.push(syn::Error::new_spanned(fn_arg, "self not allowed here"));
}
FnArg::Typed(fn_arg) => {
visit_pat(
&mut CheckNameConflictsWithModuleBuilderVisitor {
errors: &mut errors,
},
&fn_arg.pat,
);
}
}
if let Some(constness) = constness {
errors.push(syn::Error::new_spanned(constness, "const not allowed here"));
}
if let Some(asyncness) = asyncness {
errors.push(syn::Error::new_spanned(asyncness, "async not allowed here"));
}
if let Some(unsafety) = unsafety {
errors.push(syn::Error::new_spanned(unsafety, "unsafe not allowed here"));
}
if let Some(abi) = abi {
errors.push(syn::Error::new_spanned(abi, "extern not allowed here"));
}
}
if let Some(constness) = constness {
errors.push(syn::Error::new_spanned(constness, "const not allowed here"));
}
if let Some(asyncness) = asyncness {
errors.push(syn::Error::new_spanned(asyncness, "async not allowed here"));
}
if let Some(unsafety) = unsafety {
errors.push(syn::Error::new_spanned(unsafety, "unsafe not allowed here"));
}
if let Some(abi) = abi {
errors.push(syn::Error::new_spanned(abi, "extern not allowed here"));
}
let mut skipped_idents = HashSet::new();
let struct_generic_params = generics
@ -203,17 +162,14 @@ impl ModuleFn {
.pairs_mut()
.filter_map_pair_value_mut(|v| match v {
GenericParam::Lifetime(LifetimeParam { attrs, .. }) => {
errors.unwrap_or_default(
HdlAttr::<crate::kw::skip, kw::hdl>::parse_and_take_attr(attrs),
);
errors
.unwrap_or_default(HdlAttr::<crate::kw::skip>::parse_and_take_attr(attrs));
None
}
GenericParam::Type(TypeParam { attrs, ident, .. })
| GenericParam::Const(ConstParam { attrs, ident, .. }) => {
if errors
.unwrap_or_default(
HdlAttr::<crate::kw::skip, kw::hdl>::parse_and_take_attr(attrs),
)
.unwrap_or_default(HdlAttr::<crate::kw::skip>::parse_and_take_attr(attrs))
.is_some()
{
skipped_idents.insert(ident.clone());
@ -227,7 +183,6 @@ impl ModuleFn {
let struct_where_clause = generics
.where_clause
.as_mut()
.filter(|_| matches!(config_options.kw, HdlOrHdlModule::HdlModule(_)))
.map(|where_clause| WhereClause {
where_token: where_clause.where_token,
predicates: where_clause
@ -250,158 +205,49 @@ impl ModuleFn {
})
.collect(),
});
let struct_generics = if let HdlOrHdlModule::HdlModule(_) = config_options.kw {
let mut struct_generics = Generics {
lt_token: generics.lt_token,
params: struct_generic_params,
gt_token: generics.gt_token,
where_clause: struct_where_clause,
};
if let Some(variadic) = variadic {
errors.push(syn::Error::new_spanned(variadic, "... not allowed here"));
}
if !matches!(output, ReturnType::Default) {
errors.push(syn::Error::new_spanned(
output,
"return type not allowed here",
));
}
errors.ok(ParsedGenerics::parse(&mut struct_generics))
} else {
Some(ParsedGenerics::default())
let struct_generics = Generics {
lt_token: generics.lt_token,
params: struct_generic_params,
gt_token: generics.gt_token,
where_clause: struct_where_clause,
};
let body_results = struct_generics.as_ref().and_then(|struct_generics| {
errors.ok(transform_body::transform_body(
module_kind,
block,
struct_generics,
))
});
errors.finish()?;
let struct_generics = struct_generics.unwrap();
let (block, io) = body_results.unwrap();
let config_options = match config_options {
HdlAttr {
pound_token,
style,
bracket_token,
kw: HdlOrHdlModule::Hdl((kw,)),
paren_token,
body,
} => {
debug_assert!(io.is_empty());
return Ok(Self(ModuleFnImpl::Fn {
attrs,
config_options: HdlAttr {
pound_token,
style,
bracket_token,
kw,
paren_token,
body,
},
vis,
sig,
block,
}));
}
HdlAttr {
pound_token,
style,
bracket_token,
kw: HdlOrHdlModule::HdlModule((kw,)),
paren_token,
body,
} => HdlAttr {
pound_token,
style,
bracket_token,
kw,
paren_token,
body,
},
};
let (_struct_impl_generics, _struct_type_generics, struct_where_clause) =
struct_generics.split_for_impl();
let struct_where_clause: Option<WhereClause> = parse_quote! { #struct_where_clause };
if let Some(struct_where_clause) = &struct_where_clause {
sig.generics
.where_clause
.get_or_insert_with(|| WhereClause {
where_token: struct_where_clause.where_token,
predicates: Default::default(),
})
.predicates
.extend(struct_where_clause.predicates.clone());
if let Some(variadic) = variadic {
errors.push(syn::Error::new_spanned(variadic, "... not allowed here"));
}
let fn_name = &sig.ident;
let io_flips = io
.iter()
.map(|io| match io.kind.kind {
ModuleIOKind::Input((input,)) => quote_spanned! {input.span=>
#[hdl(flip)]
},
ModuleIOKind::Output(_) => quote! {},
})
.collect::<Vec<_>>();
let io_types = io.iter().map(|io| &io.kind.ty).collect::<Vec<_>>();
let io_names = io.iter().map(|io| &io.name).collect::<Vec<_>>();
let the_struct: ItemStruct = parse_quote! {
#[allow(non_camel_case_types)]
#[hdl(no_runtime_generics, no_static)]
#vis struct #fn_name #struct_generics #struct_where_clause {
#(
#io_flips
#vis #io_names: #io_types,)*
}
};
let the_struct = crate::hdl_bundle::hdl_bundle(the_struct)?;
Ok(Self(ModuleFnImpl::Module(ModuleFnModule {
attrs,
config_options,
module_kind: module_kind.unwrap(),
vis,
sig,
block,
struct_generics,
the_struct,
})))
}
}
impl ModuleFn {
pub(crate) fn generate(self) -> TokenStream {
let ModuleFnModule {
if !matches!(output, ReturnType::Default) {
errors.push(syn::Error::new_spanned(
output,
"return type not allowed here",
));
}
let body_results = errors.ok(transform_body::transform_body(module_kind, block));
errors.finish()?;
let (block, io) = body_results.unwrap();
Ok(Self {
attrs,
config_options,
module_kind,
vis,
sig,
mut block,
block,
io,
struct_generics,
the_struct,
} = match self.0 {
ModuleFnImpl::Module(v) => v,
ModuleFnImpl::Fn {
attrs,
config_options,
vis,
sig,
block,
} => {
let ConfigOptions {
outline_generated: _,
extern_: _,
} = config_options.body;
return ItemFn {
attrs,
vis,
sig,
block,
}
.into_token_stream();
}
};
})
}
}
impl ModuleFn {
pub(crate) fn generate(self) -> TokenStream {
let Self {
attrs,
config_options,
module_kind,
vis,
sig,
block,
io,
struct_generics,
} = self;
let ConfigOptions {
outline_generated: _,
extern_: _,
@ -427,23 +273,18 @@ impl ModuleFn {
});
name
}));
let module_kind_value = match module_kind {
ModuleKind::Extern => quote! { ::fayalite::module::ModuleKind::Extern },
ModuleKind::Normal => quote! { ::fayalite::module::ModuleKind::Normal },
let module_kind_ty = match module_kind {
ModuleKind::Extern => quote! { ::fayalite::module::ExternModule },
ModuleKind::Normal => quote! { ::fayalite::module::NormalModule },
};
let fn_name = &outer_sig.ident;
let (_struct_impl_generics, struct_type_generics, _struct_where_clause) =
let (_struct_impl_generics, struct_type_generics, struct_where_clause) =
struct_generics.split_for_impl();
let struct_ty = quote! {#fn_name #struct_type_generics};
body_sig.ident = parse_quote! {__body};
body_sig
.inputs
.insert(0, parse_quote! { m: &::fayalite::module::ModuleBuilder });
block.stmts.insert(
body_sig.inputs.insert(
0,
parse_quote! {
let _ = m;
},
parse_quote! {m: &mut ::fayalite::module::ModuleBuilder<#struct_ty, #module_kind_ty>},
);
let body_fn = ItemFn {
attrs: vec![],
@ -453,26 +294,50 @@ impl ModuleFn {
};
outer_sig.output =
parse_quote! {-> ::fayalite::intern::Interned<::fayalite::module::Module<#struct_ty>>};
let io_flips = io
.iter()
.map(|io| match io.kind.kind {
ModuleIOKind::Input((input,)) => quote_spanned! {input.span=>
#[hdl(flip)]
},
ModuleIOKind::Output(_) => quote! {},
})
.collect::<Vec<_>>();
let io_types = io.iter().map(|io| &io.kind.ty).collect::<Vec<_>>();
let io_names = io.iter().map(|io| &io.name).collect::<Vec<_>>();
let fn_name_str = fn_name.to_string();
let (_, body_type_generics, _) = body_fn.sig.generics.split_for_impl();
let body_turbofish_type_generics = body_type_generics.as_turbofish();
let body_lambda = if param_names.is_empty() {
quote! {
__body #body_turbofish_type_generics
}
} else {
quote! {
|m| __body #body_turbofish_type_generics(m, #(#param_names,)*)
}
};
let block = parse_quote! {{
#body_fn
::fayalite::module::ModuleBuilder::run(
#fn_name_str,
#module_kind_value,
#body_lambda,
)
::fayalite::module::ModuleBuilder::run(#fn_name_str, |m| __body #body_turbofish_type_generics(m, #(#param_names,)*))
}};
let static_type = io.iter().all(|io| io.kind.ty_expr.is_none());
let struct_options = if static_type {
quote! { #[hdl(static)] }
} else {
quote! {}
};
let the_struct: ItemStruct = parse_quote! {
#[derive(::fayalite::__std::clone::Clone,
::fayalite::__std::hash::Hash,
::fayalite::__std::cmp::PartialEq,
::fayalite::__std::cmp::Eq,
::fayalite::__std::fmt::Debug)]
#[allow(non_camel_case_types)]
#struct_options
#vis struct #fn_name #struct_generics #struct_where_clause {
#(
#io_flips
#vis #io_names: #io_types,)*
}
};
let mut struct_without_hdl_attrs = the_struct.clone();
let mut struct_without_derives = the_struct;
retain_struct_attrs(&mut struct_without_hdl_attrs, |attr| !is_hdl_attr(attr));
retain_struct_attrs(&mut struct_without_derives, |attr| {
!attr.path().is_ident("derive")
});
let outer_fn = ItemFn {
attrs,
vis,
@ -480,7 +345,10 @@ impl ModuleFn {
block,
};
let mut retval = outer_fn.into_token_stream();
retval.extend(the_struct);
struct_without_hdl_attrs.to_tokens(&mut retval);
retval.extend(
crate::value_derive_struct::value_derive_struct(struct_without_derives).unwrap(),
);
retval
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,274 +1,540 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
kw,
module::transform_body::{
expand_match::{parse_enum_path, EnumPath},
ExprOptions, Visitor,
},
HdlAttr,
};
use quote::{format_ident, quote_spanned};
use std::mem;
use crate::{module::transform_body::Visitor, options, Errors, HdlAttr, PairsIterExt};
use proc_macro2::{Span, TokenStream};
use quote::{format_ident, quote_spanned, ToTokens, TokenStreamExt};
use syn::{
parse_quote_spanned, punctuated::Punctuated, spanned::Spanned, token::Paren, Expr, ExprArray,
ExprCall, ExprGroup, ExprMethodCall, ExprParen, ExprPath, ExprRepeat, ExprStruct, ExprTuple,
FieldValue, Token, TypePath,
parse::Nothing,
parse_quote, parse_quote_spanned,
punctuated::{Pair, Punctuated},
spanned::Spanned,
token::{Brace, Paren},
Attribute, Expr, ExprArray, ExprCall, ExprGroup, ExprPath, ExprRepeat, ExprStruct, ExprTuple,
FieldValue, Ident, Index, Member, Path, PathArguments, PathSegment, Token, TypePath,
};
impl Visitor<'_> {
pub(crate) fn process_hdl_array(
&mut self,
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
mut expr_array: ExprArray,
) -> Expr {
let ExprOptions { sim } = hdl_attr.body;
let span = hdl_attr.kw.span;
if sim.is_some() {
for elem in &mut expr_array.elems {
*elem = parse_quote_spanned! {elem.span()=>
::fayalite::sim::value::ToSimValue::to_sim_value(&(#elem))
};
}
options! {
#[options = AggregateLiteralOptions]
#[no_ident_fragment]
pub(crate) enum AggregateLiteralOption {
Struct(struct_),
Enum(enum_),
}
}
#[derive(Clone, Debug)]
pub(crate) struct StructOrEnumPath {
pub(crate) ty: TypePath,
pub(crate) variant: Option<(TypePath, Ident)>,
}
#[derive(Debug, Copy, Clone)]
pub(crate) struct SingleSegmentVariant {
pub(crate) name: &'static str,
pub(crate) make_type_path: fn(Span, &PathArguments) -> Path,
}
impl StructOrEnumPath {
pub(crate) const SINGLE_SEGMENT_VARIANTS: &'static [SingleSegmentVariant] = {
fn make_option_type_path(span: Span, arguments: &PathArguments) -> Path {
let arguments = if arguments.is_none() {
quote_spanned! {span=>
<_>
}
} else {
arguments.to_token_stream()
};
parse_quote_spanned! {span=>
::fayalite::sim::value::ToSimValue::into_sim_value(#expr_array)
}
} else {
for elem in &mut expr_array.elems {
*elem = parse_quote_spanned! {elem.span()=>
::fayalite::expr::ToExpr::to_expr(&(#elem))
};
}
parse_quote_spanned! {span=>
::fayalite::expr::ToExpr::to_expr(&#expr_array)
::fayalite::__std::option::Option #arguments
}
}
fn make_result_type_path(span: Span, arguments: &PathArguments) -> Path {
let arguments = if arguments.is_none() {
quote_spanned! {span=>
<_, _>
}
} else {
arguments.to_token_stream()
};
parse_quote_spanned! {span=>
::fayalite::__std::result::Result #arguments
}
}
&[
SingleSegmentVariant {
name: "Some",
make_type_path: make_option_type_path,
},
SingleSegmentVariant {
name: "None",
make_type_path: make_option_type_path,
},
SingleSegmentVariant {
name: "Ok",
make_type_path: make_result_type_path,
},
SingleSegmentVariant {
name: "Err",
make_type_path: make_result_type_path,
},
]
};
pub(crate) fn new(
errors: &mut Errors,
path: TypePath,
options: &AggregateLiteralOptions,
) -> Result<Self, ()> {
let Path {
leading_colon,
segments,
} = &path.path;
let qself_position = path.qself.as_ref().map(|qself| qself.position).unwrap_or(0);
let variant_name = if qself_position < segments.len() {
Some(segments.last().unwrap().ident.clone())
} else {
None
};
let enum_type = 'guess_enum_type: {
if options.enum_.is_some() {
if let Some((struct_,)) = options.struct_ {
errors.error(
struct_,
"can't specify both #[hdl(enum)] and #[hdl(struct)]",
);
}
break 'guess_enum_type Some(None);
}
if options.struct_.is_some() {
break 'guess_enum_type None;
}
if path.qself.is_none() && leading_colon.is_none() && segments.len() == 1 {
let PathSegment { ident, arguments } = &segments[0];
for &SingleSegmentVariant {
name,
make_type_path,
} in Self::SINGLE_SEGMENT_VARIANTS
{
if ident == name {
break 'guess_enum_type Some(Some(TypePath {
qself: None,
path: make_type_path(ident.span(), arguments),
}));
}
}
}
if segments.len() == qself_position + 2
&& segments[qself_position + 1].arguments.is_none()
&& (path.qself.is_some()
|| segments[qself_position].ident.to_string().as_bytes()[0]
.is_ascii_uppercase())
{
let mut ty = path.clone();
ty.path.segments.pop();
ty.path.segments.pop_punct();
break 'guess_enum_type Some(Some(ty));
}
None
};
if let Some(enum_type) = enum_type {
let ty = if let Some(enum_type) = enum_type {
enum_type
} else {
if qself_position >= segments.len() {
errors.error(path, "#[hdl]: can't figure out enum's type");
return Err(());
}
let mut ty = path.clone();
ty.path.segments.pop();
ty.path.segments.pop_punct();
ty
};
let Some(variant_name) = variant_name else {
errors.error(path, "#[hdl]: can't figure out enum's variant name");
return Err(());
};
Ok(Self {
ty,
variant: Some((path, variant_name)),
})
} else {
Ok(Self {
ty: path,
variant: None,
})
}
}
}
#[derive(Copy, Clone, Debug)]
pub(crate) enum BraceOrParen {
Brace(Brace),
Paren(Paren),
}
impl BraceOrParen {
pub(crate) fn surround(self, tokens: &mut TokenStream, f: impl FnOnce(&mut TokenStream)) {
match self {
BraceOrParen::Brace(v) => v.surround(tokens, f),
BraceOrParen::Paren(v) => v.surround(tokens, f),
}
}
}
#[derive(Debug, Clone)]
pub(crate) struct StructOrEnumLiteralField {
pub(crate) attrs: Vec<Attribute>,
pub(crate) member: Member,
pub(crate) colon_token: Option<Token![:]>,
pub(crate) expr: Expr,
}
#[derive(Debug, Clone)]
pub(crate) struct StructOrEnumLiteral {
pub(crate) attrs: Vec<Attribute>,
pub(crate) path: TypePath,
pub(crate) brace_or_paren: BraceOrParen,
pub(crate) fields: Punctuated<StructOrEnumLiteralField, Token![,]>,
pub(crate) dot2_token: Option<Token![..]>,
pub(crate) rest: Option<Box<Expr>>,
}
impl StructOrEnumLiteral {
pub(crate) fn map_field_exprs(self, mut f: impl FnMut(Expr) -> Expr) -> Self {
self.map_fields(|mut field| {
field.expr = f(field.expr);
field
})
}
pub(crate) fn map_fields(
self,
f: impl FnMut(StructOrEnumLiteralField) -> StructOrEnumLiteralField,
) -> Self {
let Self {
attrs,
path,
brace_or_paren,
fields,
dot2_token,
rest,
} = self;
let fields = fields.into_pairs().map_pair_value(f).collect();
Self {
attrs,
path,
brace_or_paren,
fields,
dot2_token,
rest,
}
}
}
impl From<ExprStruct> for StructOrEnumLiteral {
fn from(value: ExprStruct) -> Self {
let ExprStruct {
attrs,
qself,
path,
brace_token,
fields,
dot2_token,
rest,
} = value;
Self {
attrs,
path: TypePath { qself, path },
brace_or_paren: BraceOrParen::Brace(brace_token),
fields: fields
.into_pairs()
.map_pair_value(
|FieldValue {
attrs,
member,
colon_token,
expr,
}| StructOrEnumLiteralField {
attrs,
member,
colon_token,
expr,
},
)
.collect(),
dot2_token,
rest,
}
}
}
fn expr_to_member(expr: &Expr) -> Option<Member> {
syn::parse2(expr.to_token_stream()).ok()
}
impl ToTokens for StructOrEnumLiteral {
fn to_tokens(&self, tokens: &mut TokenStream) {
let Self {
attrs,
path,
brace_or_paren,
fields,
dot2_token,
rest,
} = self;
tokens.append_all(attrs);
path.to_tokens(tokens);
brace_or_paren.surround(tokens, |tokens| {
match brace_or_paren {
BraceOrParen::Brace(_) => {
for (
StructOrEnumLiteralField {
attrs,
member,
mut colon_token,
expr,
},
comma,
) in fields.pairs().map(|v| v.into_tuple())
{
tokens.append_all(attrs);
if Some(member) != expr_to_member(expr).as_ref() {
colon_token = Some(<Token![:]>::default());
}
member.to_tokens(tokens);
colon_token.to_tokens(tokens);
expr.to_tokens(tokens);
comma.to_tokens(tokens);
}
}
BraceOrParen::Paren(_) => {
for (
StructOrEnumLiteralField {
attrs,
member: _,
colon_token: _,
expr,
},
comma,
) in fields.pairs().map(|v| v.into_tuple())
{
tokens.append_all(attrs);
expr.to_tokens(tokens);
comma.to_tokens(tokens);
}
}
}
if let Some(rest) = rest {
dot2_token.unwrap_or_default().to_tokens(tokens);
rest.to_tokens(tokens);
}
});
}
}
impl Visitor {
pub(crate) fn process_hdl_array(
&mut self,
hdl_attr: HdlAttr<Nothing>,
mut expr_array: ExprArray,
) -> Expr {
self.require_normal_module(hdl_attr);
for elem in &mut expr_array.elems {
*elem = parse_quote_spanned! {elem.span()=>
::fayalite::expr::ToExpr::to_expr(&(#elem))
};
}
parse_quote! {::fayalite::expr::ToExpr::to_expr(&#expr_array)}
}
pub(crate) fn process_hdl_repeat(
&mut self,
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
hdl_attr: HdlAttr<Nothing>,
mut expr_repeat: ExprRepeat,
) -> Expr {
self.require_normal_module(hdl_attr);
let repeated_value = &expr_repeat.expr;
let ExprOptions { sim } = hdl_attr.body;
let span = hdl_attr.kw.span;
if sim.is_some() {
*expr_repeat.expr = parse_quote_spanned! {repeated_value.span()=>
::fayalite::sim::value::ToSimValue::to_sim_value(&(#repeated_value))
*expr_repeat.expr = parse_quote_spanned! {repeated_value.span()=>
::fayalite::expr::ToExpr::to_expr(&(#repeated_value))
};
parse_quote! {::fayalite::expr::ToExpr::to_expr(&#expr_repeat)}
}
pub(crate) fn process_struct_enum(
&mut self,
hdl_attr: HdlAttr<AggregateLiteralOptions>,
mut literal: StructOrEnumLiteral,
) -> Expr {
let span = hdl_attr.hdl.span;
if let Some(rest) = literal.rest.take() {
self.errors
.error(rest, "#[hdl] struct functional update syntax not supported");
}
let mut next_var = 0usize;
let mut new_var = || -> Ident {
let retval = format_ident!("__v{}", next_var, span = span);
next_var += 1;
retval
};
let infallible_var = new_var();
let retval_var = new_var();
let mut lets = vec![];
let mut build_steps = vec![];
let literal = literal.map_field_exprs(|expr| {
let field_var = new_var();
lets.push(quote_spanned! {span=>
let #field_var = ::fayalite::expr::ToExpr::to_expr(&#expr);
});
parse_quote! { #field_var }
});
let Ok(StructOrEnumPath { ty, variant }) =
StructOrEnumPath::new(&mut self.errors, literal.path.clone(), &hdl_attr.body)
else {
return parse_quote_spanned! {span=>
{}
};
};
for StructOrEnumLiteralField {
attrs: _,
member,
colon_token: _,
expr,
} in literal.fields.iter()
{
let field_fn = format_ident!("field_{}", member);
build_steps.push(quote_spanned! {span=>
let #retval_var = #retval_var.#field_fn(#expr);
});
}
let check_literal = literal.map_field_exprs(|expr| {
parse_quote_spanned! {span=>
::fayalite::sim::value::ToSimValue::into_sim_value(#expr_repeat)
::fayalite::expr::value_from_expr_type(#expr, #infallible_var)
}
});
let make_expr_fn = if let Some((_variant_path, variant_ident)) = &variant {
let variant_fn = format_ident!("variant_{}", variant_ident);
build_steps.push(quote_spanned! {span=>
let #retval_var = #retval_var.#variant_fn();
});
quote_spanned! {span=>
::fayalite::expr::make_enum_expr
}
} else {
*expr_repeat.expr = parse_quote_spanned! {repeated_value.span()=>
::fayalite::expr::ToExpr::to_expr(&(#repeated_value))
};
parse_quote_spanned! {span=>
::fayalite::expr::ToExpr::to_expr(&#expr_repeat)
build_steps.push(quote_spanned! {span=>
let #retval_var = #retval_var.build();
});
quote_spanned! {span=>
::fayalite::expr::make_bundle_expr
}
};
let variant_or_type =
variant.map_or_else(|| ty.clone(), |(variant_path, _variant_ident)| variant_path);
parse_quote_spanned! {span=>
{
#(#lets)*
#make_expr_fn::<#ty>(|#infallible_var| {
let #retval_var = #check_literal;
#[allow(unreachable_code)]
match #retval_var {
#variant_or_type { .. } => #retval_var,
#[allow(unreachable_patterns)]
_ => match #infallible_var {},
}
}, |#retval_var| {
#(#build_steps)*
#retval_var
})
}
}
}
pub(crate) fn process_hdl_struct(
&mut self,
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
mut expr_struct: ExprStruct,
hdl_attr: HdlAttr<AggregateLiteralOptions>,
expr_struct: ExprStruct,
) -> Expr {
let name_span = expr_struct.path.segments.last().unwrap().ident.span();
let ExprOptions { sim } = hdl_attr.body;
if sim.is_some() {
let ty_path = TypePath {
qself: expr_struct.qself.take(),
path: expr_struct.path,
};
expr_struct.path = parse_quote_spanned! {name_span=>
__SimValue::<#ty_path>
};
for field in &mut expr_struct.fields {
let expr = &field.expr;
field.expr = parse_quote_spanned! {field.member.span()=>
::fayalite::sim::value::ToSimValue::to_sim_value(&(#expr))
};
}
return parse_quote_spanned! {name_span=>
{
type __SimValue<T> = <T as ::fayalite::ty::Type>::SimValue;
let value: ::fayalite::sim::value::SimValue<#ty_path> = ::fayalite::sim::value::ToSimValue::into_sim_value(#expr_struct);
value
}
};
}
let builder_ident = format_ident!("__builder", span = name_span);
let empty_builder = if expr_struct.qself.is_some()
|| expr_struct
.path
.segments
.iter()
.any(|seg| !seg.arguments.is_none())
{
let ty = TypePath {
qself: expr_struct.qself,
path: expr_struct.path,
};
let builder_ty = quote_spanned! {name_span=>
<#ty as ::fayalite::bundle::BundleType>::Builder
};
quote_spanned! {name_span=>
<#builder_ty as ::fayalite::__std::default::Default>::default()
}
} else {
let path = ExprPath {
attrs: vec![],
qself: expr_struct.qself,
path: expr_struct.path,
};
quote_spanned! {name_span=>
#path::__bundle_builder()
}
};
let field_calls = Vec::from_iter(expr_struct.fields.iter().map(
|FieldValue {
attrs: _,
member,
colon_token: _,
expr,
}| {
let field_fn = format_ident!("field_{}", member);
quote_spanned! {member.span()=>
let #builder_ident = #builder_ident.#field_fn(#expr);
}
},
));
parse_quote_spanned! {name_span=>
{
let #builder_ident = #empty_builder;
#(#field_calls)*
::fayalite::expr::ToExpr::to_expr(&#builder_ident)
}
}
self.require_normal_module(&hdl_attr);
self.process_struct_enum(hdl_attr, expr_struct.into())
}
pub(crate) fn process_hdl_tuple(
&mut self,
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
mut expr_tuple: ExprTuple,
hdl_attr: HdlAttr<Nothing>,
expr_tuple: ExprTuple,
) -> Expr {
let ExprOptions { sim } = hdl_attr.body;
if sim.is_some() {
for element in &mut expr_tuple.elems {
*element = parse_quote_spanned! {element.span()=>
&(#element)
};
}
parse_quote_spanned! {expr_tuple.span()=>
::fayalite::sim::value::ToSimValue::into_sim_value(#expr_tuple)
}
} else {
parse_quote_spanned! {expr_tuple.span()=>
::fayalite::expr::ToExpr::to_expr(&#expr_tuple)
}
self.require_normal_module(hdl_attr);
parse_quote_spanned! {expr_tuple.span()=>
::fayalite::expr::ToExpr::to_expr(&#expr_tuple)
}
}
pub(crate) fn process_hdl_path(
&mut self,
hdl_attr: HdlAttr<Nothing>,
expr_path: ExprPath,
) -> Expr {
self.require_normal_module(hdl_attr);
parse_quote_spanned! {expr_path.span()=>
::fayalite::expr::ToExpr::to_expr(&#expr_path)
}
}
pub(crate) fn process_hdl_call(
&mut self,
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
mut expr_call: ExprCall,
hdl_attr: HdlAttr<AggregateLiteralOptions>,
expr_call: ExprCall,
) -> Expr {
let span = hdl_attr.kw.span;
let mut func = &mut *expr_call.func;
let EnumPath {
variant_path: _,
enum_path,
variant_name,
} = loop {
match func {
Expr::Group(ExprGroup { expr, .. }) | Expr::Paren(ExprParen { expr, .. }) => {
func = &mut **expr;
self.require_normal_module(&hdl_attr);
let ExprCall {
attrs: mut literal_attrs,
func,
paren_token,
args,
} = expr_call;
let mut path_expr = *func;
let path = loop {
break match path_expr {
Expr::Group(ExprGroup {
attrs,
group_token: _,
expr,
}) => {
literal_attrs.extend(attrs);
path_expr = *expr;
continue;
}
Expr::Path(_) => {
let Expr::Path(ExprPath { attrs, qself, path }) =
mem::replace(func, Expr::PLACEHOLDER)
else {
unreachable!();
};
match parse_enum_path(TypePath { qself, path }) {
Ok(path) => break path,
Err(path) => {
self.errors.error(&path, "unsupported enum variant path");
let TypePath { qself, path } = path;
*func = ExprPath { attrs, qself, path }.into();
return expr_call.into();
}
}
Expr::Path(ExprPath { attrs, qself, path }) => {
literal_attrs.extend(attrs);
TypePath { qself, path }
}
_ => {
self.errors.error(
&expr_call.func,
"#[hdl] function call -- function must be a possibly-parenthesized path",
);
return expr_call.into();
self.errors.error(&path_expr, "missing tuple struct's name");
return parse_quote_spanned! {path_expr.span()=>
{}
};
}
}
};
};
self.process_hdl_method_call(
let fields = args
.into_pairs()
.enumerate()
.map(|(index, p)| {
let (expr, comma) = p.into_tuple();
let mut index = Index::from(index);
index.span = hdl_attr.hdl.span;
Pair::new(
StructOrEnumLiteralField {
attrs: vec![],
member: Member::Unnamed(index),
colon_token: None,
expr,
},
comma,
)
})
.collect();
self.process_struct_enum(
hdl_attr,
ExprMethodCall {
attrs: expr_call.attrs,
receiver: parse_quote_spanned! {span=>
<#enum_path as ::fayalite::ty::StaticType>::TYPE
},
dot_token: Token![.](span),
method: variant_name,
turbofish: None,
paren_token: expr_call.paren_token,
args: expr_call.args,
StructOrEnumLiteral {
attrs: literal_attrs,
path,
brace_or_paren: BraceOrParen::Paren(paren_token),
fields,
dot2_token: None,
rest: None,
},
)
}
pub(crate) fn process_hdl_method_call(
&mut self,
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
mut expr_method_call: ExprMethodCall,
) -> Expr {
let ExprOptions { sim } = hdl_attr.body;
let span = hdl_attr.kw.span;
// remove any number of groups and up to one paren
let mut receiver = &mut *expr_method_call.receiver;
let mut has_group = false;
let receiver = loop {
match receiver {
Expr::Group(ExprGroup { expr, .. }) => {
has_group = true;
receiver = expr;
}
Expr::Paren(ExprParen { expr, .. }) => break &mut **expr,
receiver @ Expr::Path(_) => break receiver,
_ => {
if !has_group {
self.errors.error(
&expr_method_call.receiver,
"#[hdl] on a method call needs parenthesized receiver",
);
}
break &mut *expr_method_call.receiver;
}
}
};
let func = if sim.is_some() {
parse_quote_spanned! {span=>
::fayalite::enum_::enum_type_to_sim_builder
}
} else {
parse_quote_spanned! {span=>
::fayalite::enum_::assert_is_enum_type
}
};
*expr_method_call.receiver = ExprCall {
attrs: vec![],
func,
paren_token: Paren(span),
args: Punctuated::from_iter([mem::replace(receiver, Expr::PLACEHOLDER)]),
}
.into();
expr_method_call.into()
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,761 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{fold::impl_fold, kw, Errors, HdlAttr};
use proc_macro2::{Span, TokenStream};
use quote::{format_ident, quote, quote_spanned, ToTokens};
use std::collections::{BTreeMap, HashMap, HashSet};
use syn::{
fold::{fold_generics, Fold},
parse::{Parse, ParseStream},
parse_quote, parse_quote_spanned,
punctuated::Punctuated,
spanned::Spanned,
token::{Brace, Paren, Where},
Block, ConstParam, Expr, Field, Fields, FieldsNamed, FieldsUnnamed, GenericParam, Generics,
Ident, Index, ItemImpl, Lifetime, LifetimeParam, Member, Path, Token, Type, TypeParam,
TypePath, Visibility, WhereClause, WherePredicate,
};
#[derive(Clone, Debug)]
pub(crate) struct Bounds(pub(crate) Punctuated<WherePredicate, Token![,]>);
impl_fold! {
struct Bounds<>(Punctuated<WherePredicate, Token![,]>);
}
impl Parse for Bounds {
fn parse(input: ParseStream) -> syn::Result<Self> {
Ok(Bounds(Punctuated::parse_terminated(input)?))
}
}
impl From<Option<WhereClause>> for Bounds {
fn from(value: Option<WhereClause>) -> Self {
Self(value.map_or_else(Punctuated::new, |v| v.predicates))
}
}
impl ToTokens for Bounds {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.0.to_tokens(tokens)
}
}
#[derive(Debug, Clone)]
pub(crate) struct ParsedField<O> {
pub(crate) options: HdlAttr<O>,
pub(crate) vis: Visibility,
pub(crate) name: Member,
pub(crate) ty: Type,
}
impl<O> ParsedField<O> {
pub(crate) fn var_name(&self) -> Ident {
format_ident!("__v_{}", self.name)
}
}
pub(crate) fn get_field_name(
index: usize,
name: Option<Ident>,
ty_span: impl FnOnce() -> Span,
) -> Member {
match name {
Some(name) => Member::Named(name),
None => Member::Unnamed(Index {
index: index as _,
span: ty_span(),
}),
}
}
pub(crate) fn get_field_names(fields: &Fields) -> impl Iterator<Item = Member> + '_ {
fields
.iter()
.enumerate()
.map(|(index, field)| get_field_name(index, field.ident.clone(), || field.ty.span()))
}
impl<O: Parse + Default> ParsedField<O> {
pub(crate) fn parse_fields(
errors: &mut Errors,
fields: &mut Fields,
in_enum: bool,
) -> (FieldsKind, Vec<ParsedField<O>>) {
let mut unit_fields = Punctuated::new();
let (fields_kind, fields) = match fields {
Fields::Named(fields) => (FieldsKind::Named(fields.brace_token), &mut fields.named),
Fields::Unnamed(fields) => {
(FieldsKind::Unnamed(fields.paren_token), &mut fields.unnamed)
}
Fields::Unit => (FieldsKind::Unit, &mut unit_fields),
};
let fields = fields
.iter_mut()
.enumerate()
.map(|(index, field)| {
let options = errors
.unwrap_or_default(HdlAttr::parse_and_take_attr(&mut field.attrs))
.unwrap_or_default();
let name = get_field_name(index, field.ident.clone(), || field.ty.span());
if in_enum && !matches!(field.vis, Visibility::Inherited) {
errors.error(&field.vis, "field visibility not allowed in enums");
}
ParsedField {
options,
vis: field.vis.clone(),
name,
ty: field.ty.clone(),
}
})
.collect();
(fields_kind, fields)
}
}
#[derive(Copy, Clone, Debug)]
pub(crate) enum FieldsKind {
Unit,
Named(Brace),
Unnamed(Paren),
}
impl FieldsKind {
pub(crate) fn into_fields_named(
brace_token: Brace,
fields: impl IntoIterator<Item = syn::Field>,
) -> Fields {
Fields::Named(FieldsNamed {
brace_token,
named: Punctuated::from_iter(fields),
})
}
pub(crate) fn into_fields_unnamed(
paren_token: Paren,
fields: impl IntoIterator<Item = syn::Field>,
) -> Fields {
Fields::Unnamed(FieldsUnnamed {
paren_token,
unnamed: Punctuated::from_iter(fields),
})
}
pub(crate) fn into_fields(self, fields: impl IntoIterator<Item = syn::Field>) -> Fields {
match self {
FieldsKind::Unit => {
let mut fields = fields.into_iter().peekable();
let Some(first_field) = fields.peek() else {
return Fields::Unit;
};
if first_field.ident.is_some() {
Self::into_fields_named(Default::default(), fields)
} else {
Self::into_fields_unnamed(Default::default(), fields)
}
}
FieldsKind::Named(brace_token) => Self::into_fields_named(brace_token, fields),
FieldsKind::Unnamed(paren_token) => Self::into_fields_unnamed(paren_token, fields),
}
}
}
pub(crate) fn get_target(target: &Option<(kw::target, Paren, Path)>, item_ident: &Ident) -> Path {
match target {
Some((_, _, target)) => target.clone(),
None => item_ident.clone().into(),
}
}
pub(crate) struct ValueDeriveGenerics {
pub(crate) generics: Generics,
pub(crate) static_type_generics: Generics,
}
impl ValueDeriveGenerics {
pub(crate) fn get(mut generics: Generics, where_: &Option<(Where, Paren, Bounds)>) -> Self {
let mut static_type_generics = generics.clone();
if let Some((_, _, bounds)) = where_ {
generics
.make_where_clause()
.predicates
.extend(bounds.0.iter().cloned());
static_type_generics
.where_clause
.clone_from(&generics.where_clause);
} else {
let type_params = Vec::from_iter(generics.type_params().map(|v| v.ident.clone()));
let predicates = &mut generics.make_where_clause().predicates;
let static_type_predicates = &mut static_type_generics.make_where_clause().predicates;
for type_param in type_params {
predicates.push(parse_quote! {#type_param: ::fayalite::ty::Value<Type: ::fayalite::ty::Type<Value = #type_param>>});
static_type_predicates
.push(parse_quote! {#type_param: ::fayalite::ty::StaticValue});
}
}
Self {
generics,
static_type_generics,
}
}
}
pub(crate) fn derive_clone_hash_eq_partialeq_for_struct<Name: ToTokens>(
the_struct_ident: &Ident,
generics: &Generics,
field_names: &[Name],
) -> TokenStream {
let (impl_generics, type_generics, where_clause) = generics.split_for_impl();
quote! {
#[automatically_derived]
impl #impl_generics ::fayalite::__std::clone::Clone for #the_struct_ident #type_generics
#where_clause
{
fn clone(&self) -> Self {
Self {
#(#field_names: ::fayalite::__std::clone::Clone::clone(&self.#field_names),)*
}
}
}
#[automatically_derived]
impl #impl_generics ::fayalite::__std::hash::Hash for #the_struct_ident #type_generics
#where_clause
{
#[allow(unused_variables)]
fn hash<__H: ::fayalite::__std::hash::Hasher>(&self, hasher: &mut __H) {
#(::fayalite::__std::hash::Hash::hash(&self.#field_names, hasher);)*
}
}
#[automatically_derived]
impl #impl_generics ::fayalite::__std::cmp::Eq for #the_struct_ident #type_generics
#where_clause
{
}
#[automatically_derived]
impl #impl_generics ::fayalite::__std::cmp::PartialEq for #the_struct_ident #type_generics
#where_clause
{
#[allow(unused_variables)]
#[allow(clippy::nonminimal_bool)]
fn eq(&self, other: &Self) -> ::fayalite::__std::primitive::bool {
true #(&& ::fayalite::__std::cmp::PartialEq::eq(
&self.#field_names,
&other.#field_names,
))*
}
}
}
}
pub(crate) fn append_field(fields: &mut Fields, mut field: Field) -> Member {
let ident = field.ident.clone().expect("ident is supplied");
match fields {
Fields::Named(FieldsNamed { named, .. }) => {
named.push(field);
Member::Named(ident)
}
Fields::Unnamed(FieldsUnnamed { unnamed, .. }) => {
field.ident = None;
field.colon_token = None;
let index = unnamed.len();
unnamed.push(field);
Member::Unnamed(index.into())
}
Fields::Unit => {
*fields = Fields::Named(FieldsNamed {
brace_token: Default::default(),
named: Punctuated::from_iter([field]),
});
Member::Named(ident)
}
}
}
#[derive(Clone, Debug)]
pub(crate) struct BuilderField {
pub(crate) names: HashSet<Member>,
pub(crate) mapped_value: Expr,
pub(crate) mapped_type: Type,
pub(crate) where_clause: Option<WhereClause>,
pub(crate) builder_field_name: Ident,
pub(crate) type_param: Ident,
}
#[derive(Debug)]
pub(crate) struct Builder {
struct_name: Ident,
vis: Visibility,
fields: BTreeMap<String, BuilderField>,
}
#[derive(Debug)]
pub(crate) struct BuilderWithFields {
struct_name: Ident,
vis: Visibility,
phantom_type_param: Ident,
phantom_type_field: Ident,
fields: Vec<(String, BuilderField)>,
}
impl Builder {
pub(crate) fn new(struct_name: Ident, vis: Visibility) -> Self {
Self {
struct_name,
vis,
fields: BTreeMap::new(),
}
}
pub(crate) fn insert_field(
&mut self,
name: Member,
map_value: impl FnOnce(&Ident) -> Expr,
map_type: impl FnOnce(&Ident) -> Type,
where_clause: impl FnOnce(&Ident) -> Option<WhereClause>,
) {
self.fields
.entry(name.to_token_stream().to_string())
.or_insert_with_key(|name| {
let builder_field_name =
format_ident!("field_{}", name, span = self.struct_name.span());
let type_param = format_ident!("__T_{}", name, span = self.struct_name.span());
BuilderField {
names: HashSet::new(),
mapped_value: map_value(&builder_field_name),
mapped_type: map_type(&type_param),
where_clause: where_clause(&type_param),
builder_field_name,
type_param,
}
})
.names
.insert(name);
}
pub(crate) fn finish_filling_in_fields(self) -> BuilderWithFields {
let Self {
struct_name,
vis,
fields,
} = self;
let fields = Vec::from_iter(fields);
BuilderWithFields {
phantom_type_param: Ident::new("__Phantom", struct_name.span()),
phantom_type_field: Ident::new("__phantom", struct_name.span()),
struct_name,
vis,
fields,
}
}
}
impl BuilderWithFields {
pub(crate) fn get_field(&self, name: &Member) -> Option<(usize, &BuilderField)> {
let index = self
.fields
.binary_search_by_key(&&*name.to_token_stream().to_string(), |v| &*v.0)
.ok()?;
Some((index, &self.fields[index].1))
}
pub(crate) fn ty(
&self,
specified_fields: impl IntoIterator<Item = (Member, Type)>,
phantom_type: Option<&Type>,
other_fields_are_any_type: bool,
) -> TypePath {
let Self {
struct_name,
vis: _,
phantom_type_param,
phantom_type_field: _,
fields,
} = self;
let span = struct_name.span();
let mut arguments =
Vec::from_iter(fields.iter().map(|(_, BuilderField { type_param, .. })| {
if other_fields_are_any_type {
parse_quote_spanned! {span=>
#type_param
}
} else {
parse_quote_spanned! {span=>
()
}
}
}));
for (name, ty) in specified_fields {
let Some((index, _)) = self.get_field(&name) else {
panic!("field not found: {}", name.to_token_stream());
};
arguments[index] = ty;
}
let phantom_type_param = phantom_type.is_none().then_some(phantom_type_param);
parse_quote_spanned! {span=>
#struct_name::<#phantom_type_param #phantom_type #(, #arguments)*>
}
}
pub(crate) fn append_generics(
&self,
specified_fields: impl IntoIterator<Item = Member>,
has_phantom_type_param: bool,
other_fields_are_any_type: bool,
generics: &mut Generics,
) {
let Self {
struct_name: _,
vis: _,
phantom_type_param,
phantom_type_field: _,
fields,
} = self;
if has_phantom_type_param {
generics.params.push(GenericParam::from(TypeParam::from(
phantom_type_param.clone(),
)));
}
if !other_fields_are_any_type {
return;
}
let mut type_params = Vec::from_iter(
fields
.iter()
.map(|(_, BuilderField { type_param, .. })| Some(type_param)),
);
for name in specified_fields {
let Some((index, _)) = self.get_field(&name) else {
panic!("field not found: {}", name.to_token_stream());
};
type_params[index] = None;
}
generics.params.extend(
type_params
.into_iter()
.filter_map(|v| Some(GenericParam::from(TypeParam::from(v?.clone())))),
);
}
pub(crate) fn make_build_method(
&self,
build_fn_name: &Ident,
specified_fields: impl IntoIterator<Item = (Member, Type)>,
generics: &Generics,
phantom_type: &Type,
return_ty: &Type,
mut body: Block,
) -> ItemImpl {
let Self {
struct_name,
vis,
phantom_type_param: _,
phantom_type_field,
fields,
} = self;
let span = struct_name.span();
let field_names = Vec::from_iter(fields.iter().map(|v| &v.1.builder_field_name));
let (impl_generics, _type_generics, where_clause) = generics.split_for_impl();
let empty_arg = parse_quote_spanned! {span=>
()
};
let mut ty_arguments = vec![empty_arg; fields.len()];
let empty_field_pat = quote_spanned! {span=>
: _
};
let mut field_pats = vec![Some(empty_field_pat); fields.len()];
for (name, ty) in specified_fields {
let Some((index, _)) = self.get_field(&name) else {
panic!("field not found: {}", name.to_token_stream());
};
ty_arguments[index] = ty;
field_pats[index] = None;
}
body.stmts.insert(
0,
parse_quote_spanned! {span=>
let Self {
#(#field_names #field_pats,)*
#phantom_type_field: _,
} = self;
},
);
parse_quote_spanned! {span=>
#[automatically_derived]
impl #impl_generics #struct_name<#phantom_type #(, #ty_arguments)*>
#where_clause
{
#[allow(non_snake_case, dead_code)]
#vis fn #build_fn_name(self) -> #return_ty
#body
}
}
}
}
impl ToTokens for BuilderWithFields {
fn to_tokens(&self, tokens: &mut TokenStream) {
let Self {
struct_name,
vis,
phantom_type_param,
phantom_type_field,
fields,
} = self;
let span = struct_name.span();
let mut any_generics = Generics::default();
self.append_generics([], true, true, &mut any_generics);
let empty_ty = self.ty([], None, false);
let field_names = Vec::from_iter(fields.iter().map(|v| &v.1.builder_field_name));
let field_type_params = Vec::from_iter(fields.iter().map(|v| &v.1.type_param));
quote_spanned! {span=>
#[allow(non_camel_case_types)]
#[non_exhaustive]
#vis struct #struct_name #any_generics {
#(#field_names: #field_type_params,)*
#phantom_type_field: ::fayalite::__std::marker::PhantomData<#phantom_type_param>,
}
#[automatically_derived]
impl<#phantom_type_param> #empty_ty {
fn new() -> Self {
Self {
#(#field_names: (),)*
#phantom_type_field: ::fayalite::__std::marker::PhantomData,
}
}
}
}
.to_tokens(tokens);
for (field_index, (_, field)) in self.fields.iter().enumerate() {
let initial_fields = &fields[..field_index];
let final_fields = &fields[field_index..][1..];
let initial_type_params =
Vec::from_iter(initial_fields.iter().map(|v| &v.1.type_param));
let final_type_params = Vec::from_iter(final_fields.iter().map(|v| &v.1.type_param));
let initial_field_names =
Vec::from_iter(initial_fields.iter().map(|v| &v.1.builder_field_name));
let final_field_names =
Vec::from_iter(final_fields.iter().map(|v| &v.1.builder_field_name));
let BuilderField {
names: _,
mapped_value,
mapped_type,
where_clause,
builder_field_name,
type_param,
} = field;
quote_spanned! {span=>
#[automatically_derived]
#[allow(non_camel_case_types, dead_code)]
impl<#phantom_type_param #(, #initial_type_params)* #(, #final_type_params)*>
#struct_name<
#phantom_type_param,
#(#initial_type_params,)*
(), #(#final_type_params,)*
>
{
#vis fn #builder_field_name<#type_param>(
self,
#builder_field_name: #type_param,
) -> #struct_name<
#phantom_type_param,
#(#initial_type_params,)*
#mapped_type,
#(#final_type_params,)*
>
#where_clause
{
let Self {
#(#initial_field_names,)*
#builder_field_name: (),
#(#final_field_names,)*
#phantom_type_field: _,
} = self;
let #builder_field_name = #mapped_value;
#struct_name {
#(#field_names,)*
#phantom_type_field: ::fayalite::__std::marker::PhantomData,
}
}
}
}
.to_tokens(tokens);
}
}
}
pub(crate) struct MapIdents {
pub(crate) map: HashMap<Ident, Ident>,
}
impl Fold for &MapIdents {
fn fold_ident(&mut self, i: Ident) -> Ident {
self.map.get(&i).cloned().unwrap_or(i)
}
}
pub(crate) struct DupGenerics<M> {
pub(crate) combined: Generics,
pub(crate) maps: M,
}
pub(crate) fn merge_punctuated<T, P: Default>(
target: &mut Punctuated<T, P>,
source: Punctuated<T, P>,
make_punct: impl FnOnce() -> P,
) {
if source.is_empty() {
return;
}
if target.is_empty() {
*target = source;
return;
}
if !target.trailing_punct() {
target.push_punct(make_punct());
}
target.extend(source.into_pairs());
}
pub(crate) fn merge_generics(target: &mut Generics, source: Generics) {
let Generics {
lt_token,
params,
gt_token,
where_clause,
} = source;
let span = lt_token.map(|v| v.span).unwrap_or_else(|| params.span());
target.lt_token = target.lt_token.or(lt_token);
merge_punctuated(&mut target.params, params, || Token![,](span));
target.gt_token = target.gt_token.or(gt_token);
if let Some(where_clause) = where_clause {
if let Some(target_where_clause) = &mut target.where_clause {
let WhereClause {
where_token,
predicates,
} = where_clause;
let span = where_token.span;
target_where_clause.where_token = where_token;
merge_punctuated(&mut target_where_clause.predicates, predicates, || {
Token![,](span)
});
} else {
target.where_clause = Some(where_clause);
}
}
}
impl DupGenerics<Vec<MapIdents>> {
pub(crate) fn new_dyn(generics: &Generics, count: usize) -> Self {
let mut maps = Vec::from_iter((0..count).map(|_| MapIdents {
map: HashMap::new(),
}));
for param in &generics.params {
let (GenericParam::Lifetime(LifetimeParam {
lifetime: Lifetime { ident, .. },
..
})
| GenericParam::Type(TypeParam { ident, .. })
| GenericParam::Const(ConstParam { ident, .. })) = param;
for (i, map_idents) in maps.iter_mut().enumerate() {
map_idents
.map
.insert(ident.clone(), format_ident!("__{}_{}", ident, i));
}
}
let mut combined = Generics::default();
for map_idents in maps.iter() {
merge_generics(
&mut combined,
fold_generics(&mut { map_idents }, generics.clone()),
);
}
Self { combined, maps }
}
}
impl<const COUNT: usize> DupGenerics<[MapIdents; COUNT]> {
pub(crate) fn new(generics: &Generics) -> Self {
let DupGenerics { combined, maps } = DupGenerics::new_dyn(generics, COUNT);
Self {
combined,
maps: maps.try_into().ok().unwrap(),
}
}
}
pub(crate) fn add_where_predicate(
target: &mut Generics,
span: Span,
where_predicate: WherePredicate,
) {
let WhereClause {
where_token: _,
predicates,
} = target.where_clause.get_or_insert_with(|| WhereClause {
where_token: Token![where](span),
predicates: Punctuated::new(),
});
if !predicates.empty_or_trailing() {
predicates.push_punct(Token![,](span));
}
predicates.push_value(where_predicate);
}
pub(crate) fn make_connect_impl(
connect_inexact: Option<(crate::kw::connect_inexact,)>,
generics: &Generics,
ty_ident: &Ident,
field_types: impl IntoIterator<Item = Type>,
) -> TokenStream {
let span = ty_ident.span();
let impl_generics;
let combined_generics;
let where_clause;
let lhs_generics;
let lhs_type_generics;
let rhs_generics;
let rhs_type_generics;
if connect_inexact.is_some() {
let DupGenerics {
mut combined,
maps: [lhs_map, rhs_map],
} = DupGenerics::new(generics);
for field_type in field_types {
let lhs_type = (&lhs_map).fold_type(field_type.clone());
let rhs_type = (&rhs_map).fold_type(field_type);
add_where_predicate(
&mut combined,
span,
parse_quote_spanned! {span=>
#lhs_type: ::fayalite::ty::Connect<#rhs_type>
},
);
}
combined_generics = combined;
(impl_generics, _, where_clause) = combined_generics.split_for_impl();
lhs_generics = (&lhs_map).fold_generics(generics.clone());
(_, lhs_type_generics, _) = lhs_generics.split_for_impl();
rhs_generics = (&rhs_map).fold_generics(generics.clone());
(_, rhs_type_generics, _) = rhs_generics.split_for_impl();
} else {
let mut generics = generics.clone();
for field_type in field_types {
add_where_predicate(
&mut generics,
span,
parse_quote_spanned! {span=>
#field_type: ::fayalite::ty::Connect<#field_type>
},
);
}
combined_generics = generics;
(impl_generics, lhs_type_generics, where_clause) = combined_generics.split_for_impl();
rhs_type_generics = lhs_type_generics.clone();
}
quote_spanned! {span=>
#[automatically_derived]
#[allow(non_camel_case_types)]
impl #impl_generics ::fayalite::ty::Connect<#ty_ident #rhs_type_generics>
for #ty_ident #lhs_type_generics
#where_clause
{
}
}
}

View file

@ -0,0 +1,969 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
value_derive_common::{
append_field, derive_clone_hash_eq_partialeq_for_struct, get_field_names, get_target,
make_connect_impl, Bounds, Builder, FieldsKind, ParsedField, ValueDeriveGenerics,
},
value_derive_struct::{self, ParsedStruct, ParsedStructNames, StructOptions},
Errors, HdlAttr,
};
use proc_macro2::TokenStream;
use quote::{format_ident, quote, quote_spanned, ToTokens};
use syn::{
parse_quote, parse_quote_spanned, punctuated::Punctuated, spanned::Spanned, token::Brace,
Field, FieldMutability, Fields, FieldsNamed, Generics, Ident, Index, ItemEnum, ItemStruct,
Member, Path, Token, Type, Variant, Visibility,
};
crate::options! {
#[options = EnumOptions]
enum EnumOption {
OutlineGenerated(outline_generated),
ConnectInexact(connect_inexact),
Bounds(where_, Bounds),
Target(target, Path),
}
}
crate::options! {
#[options = VariantOptions]
enum VariantOption {}
}
crate::options! {
#[options = FieldOptions]
enum FieldOption {}
}
enum VariantValue {
None,
Direct {
value_type: Type,
},
Struct {
value_struct: ItemStruct,
parsed_struct: ParsedStruct,
},
}
impl VariantValue {
fn is_none(&self) -> bool {
matches!(self, Self::None)
}
fn value_ty(&self) -> Option<Type> {
match self {
VariantValue::None => None,
VariantValue::Direct { value_type } => Some(value_type.clone()),
VariantValue::Struct { value_struct, .. } => {
let (_, type_generics, _) = value_struct.generics.split_for_impl();
let ident = &value_struct.ident;
Some(parse_quote! { #ident #type_generics })
}
}
}
}
struct ParsedVariant {
options: HdlAttr<VariantOptions>,
ident: Ident,
fields_kind: FieldsKind,
fields: Vec<ParsedField<FieldOptions>>,
value: VariantValue,
}
impl ParsedVariant {
fn parse(
errors: &mut Errors,
variant: Variant,
enum_options: &EnumOptions,
enum_vis: &Visibility,
enum_ident: &Ident,
enum_generics: &Generics,
) -> Self {
let target = get_target(&enum_options.target, enum_ident);
let Variant {
mut attrs,
ident,
fields,
discriminant,
} = variant;
if let Some((eq, _)) = discriminant {
errors.error(eq, "#[derive(Value)]: discriminants not allowed");
}
let variant_options = errors
.unwrap_or_default(HdlAttr::parse_and_take_attr(&mut attrs))
.unwrap_or_default();
let (fields_kind, parsed_fields) =
ParsedField::parse_fields(errors, &mut fields.clone(), true);
let value = match (&fields_kind, &*parsed_fields) {
(FieldsKind::Unit, _) => VariantValue::None,
(
FieldsKind::Unnamed(_),
[ParsedField {
options,
vis: _,
name: Member::Unnamed(Index { index: 0, span: _ }),
ty,
}],
) => {
let FieldOptions {} = options.body;
VariantValue::Direct {
value_type: ty.clone(),
}
}
_ => {
let variant_value_struct_ident =
format_ident!("__{}__{}", enum_ident, ident, span = ident.span());
let variant_type_struct_ident =
format_ident!("__{}__{}__Type", enum_ident, ident, span = ident.span());
let mut value_struct_fields = fields.clone();
let (_, type_generics, _) = enum_generics.split_for_impl();
append_field(
&mut value_struct_fields,
Field {
attrs: vec![HdlAttr::from(value_derive_struct::FieldOptions {
flip: None,
skip: Some(Default::default()),
})
.to_attr()],
vis: enum_vis.clone(),
mutability: FieldMutability::None,
ident: Some(Ident::new("__phantom", ident.span())),
colon_token: None,
ty: parse_quote_spanned! {ident.span()=>
::fayalite::__std::marker::PhantomData<#target #type_generics>
},
},
);
let (value_struct_fields_kind, value_struct_parsed_fields) =
ParsedField::parse_fields(errors, &mut value_struct_fields, false);
let value_struct = ItemStruct {
attrs: vec![parse_quote! { #[allow(non_camel_case_types)] }],
vis: enum_vis.clone(),
struct_token: Token![struct](ident.span()),
ident: variant_value_struct_ident.clone(),
generics: enum_generics.clone(),
fields: value_struct_fields,
semi_token: None,
};
VariantValue::Struct {
value_struct,
parsed_struct: ParsedStruct {
options: StructOptions {
outline_generated: None,
static_: Some(Default::default()),
where_: Some((
Default::default(),
Default::default(),
ValueDeriveGenerics::get(
enum_generics.clone(),
&enum_options.where_,
)
.static_type_generics
.where_clause
.into(),
)),
target: None,
connect_inexact: enum_options.connect_inexact,
}
.into(),
vis: enum_vis.clone(),
struct_token: Default::default(),
generics: enum_generics.clone(),
fields_kind: value_struct_fields_kind,
fields: value_struct_parsed_fields,
semi_token: None, // it will fill in the semicolon if needed
skip_check_fields: true,
names: ParsedStructNames {
ident: variant_value_struct_ident.clone(),
type_struct_debug_ident: Some(format!("{enum_ident}::{ident}::Type")),
type_struct_ident: variant_type_struct_ident,
match_variant_ident: None,
builder_struct_ident: None,
mask_match_variant_ident: None,
mask_type_ident: None,
mask_type_debug_ident: Some(format!(
"AsMask<{enum_ident}::{ident}>::Type"
)),
mask_value_ident: None,
mask_value_debug_ident: Some(format!("AsMask<{enum_ident}::{ident}>")),
mask_builder_struct_ident: None,
},
},
}
}
};
ParsedVariant {
options: variant_options,
ident,
fields_kind,
fields: parsed_fields,
value,
}
}
}
struct ParsedEnum {
options: HdlAttr<EnumOptions>,
vis: Visibility,
enum_token: Token![enum],
ident: Ident,
generics: Generics,
brace_token: Brace,
variants: Vec<ParsedVariant>,
}
impl ParsedEnum {
fn parse(item: ItemEnum) -> syn::Result<Self> {
let ItemEnum {
mut attrs,
vis,
enum_token,
ident,
generics,
brace_token,
variants,
} = item;
let mut errors = Errors::new();
let enum_options = errors
.unwrap_or_default(HdlAttr::parse_and_take_attr(&mut attrs))
.unwrap_or_default();
let variants = variants
.into_iter()
.map(|variant| {
ParsedVariant::parse(
&mut errors,
variant,
&enum_options.body,
&vis,
&ident,
&generics,
)
})
.collect();
errors.finish()?;
Ok(ParsedEnum {
options: enum_options,
vis,
enum_token,
ident,
generics,
brace_token,
variants,
})
}
}
impl ToTokens for ParsedEnum {
fn to_tokens(&self, tokens: &mut TokenStream) {
let Self {
options,
vis,
enum_token,
ident: enum_ident,
generics: enum_generics,
brace_token,
variants,
} = self;
let EnumOptions {
outline_generated: _,
connect_inexact,
where_,
target,
} = &options.body;
let target = get_target(target, enum_ident);
let ValueDeriveGenerics {
generics: _,
static_type_generics,
} = ValueDeriveGenerics::get(enum_generics.clone(), where_);
let (static_type_impl_generics, static_type_type_generics, static_type_where_clause) =
static_type_generics.split_for_impl();
let type_struct_ident = format_ident!("__{}__Type", enum_ident);
let mut field_checks = vec![];
let mut make_type_struct_variant_type = |variant: &ParsedVariant| {
let VariantOptions {} = variant.options.body;
let (value_struct, parsed_struct) = match &variant.value {
VariantValue::None => {
return None;
}
VariantValue::Direct { value_type } => {
field_checks.push(quote_spanned! {value_type.span()=>
__check_field::<#value_type>();
});
return Some(parse_quote! { <#value_type as ::fayalite::expr::ToExpr>::Type });
}
VariantValue::Struct {
value_struct,
parsed_struct,
} => (value_struct, parsed_struct),
};
value_struct.to_tokens(tokens);
parsed_struct.to_tokens(tokens);
let mut field_names = Vec::from_iter(get_field_names(&value_struct.fields));
derive_clone_hash_eq_partialeq_for_struct(
&value_struct.ident,
&static_type_generics,
&field_names,
)
.to_tokens(tokens);
field_names = Vec::from_iter(
field_names
.into_iter()
.zip(parsed_struct.fields.iter())
.filter_map(|(member, field)| {
field.options.body.skip.is_none().then_some(member)
}),
);
let field_name_strs =
Vec::from_iter(field_names.iter().map(|v| v.to_token_stream().to_string()));
let debug_ident = format!("{enum_ident}::{}", variant.ident);
let debug_body = match variant.fields_kind {
FieldsKind::Unit => quote! {
f.debug_struct(#debug_ident).finish()
},
FieldsKind::Named(_) => quote! {
f.debug_struct(#debug_ident)
#(.field(#field_name_strs, &self.#field_names))*
.finish()
},
FieldsKind::Unnamed(_) => quote! {
f.debug_tuple(#debug_ident)#(.field(&self.#field_names))*.finish()
},
};
let value_struct_ident = &value_struct.ident;
quote! {
#[automatically_derived]
impl #static_type_impl_generics ::fayalite::__std::fmt::Debug
for #value_struct_ident #static_type_type_generics
#static_type_where_clause
{
fn fmt(
&self,
f: &mut ::fayalite::__std::fmt::Formatter<'_>,
) -> ::fayalite::__std::fmt::Result {
#debug_body
}
}
}
.to_tokens(tokens);
Some(parse_quote! {
<
#value_struct_ident #static_type_type_generics
as ::fayalite::expr::ToExpr
>::Type
})
};
let type_struct_variants = Punctuated::from_iter(variants.iter().filter_map(|variant| {
let VariantOptions {} = variant.options.body;
Some(Field {
attrs: vec![],
vis: vis.clone(),
mutability: FieldMutability::None,
ident: Some(variant.ident.clone()),
colon_token: None, // it will fill in the colon if needed
ty: make_type_struct_variant_type(variant)?,
})
}));
let type_struct = ItemStruct {
attrs: vec![
parse_quote! {#[allow(non_camel_case_types)]},
parse_quote! {#[allow(non_snake_case)]},
],
vis: vis.clone(),
struct_token: Token![struct](enum_token.span),
ident: type_struct_ident,
generics: static_type_generics.clone(),
fields: Fields::Named(FieldsNamed {
brace_token: *brace_token,
named: type_struct_variants,
}),
semi_token: None,
};
let type_struct_ident = &type_struct.ident;
let type_struct_debug_ident = format!("{enum_ident}::Type");
type_struct.to_tokens(tokens);
let non_empty_variant_names = Vec::from_iter(
variants
.iter()
.filter(|v| !v.value.is_none())
.map(|v| v.ident.clone()),
);
let non_empty_variant_name_strs =
Vec::from_iter(non_empty_variant_names.iter().map(|v| v.to_string()));
let debug_type_body = quote! {
f.debug_struct(#type_struct_debug_ident)
#(.field(#non_empty_variant_name_strs, &self.#non_empty_variant_names))*
.finish()
};
derive_clone_hash_eq_partialeq_for_struct(
type_struct_ident,
&static_type_generics,
&non_empty_variant_names,
)
.to_tokens(tokens);
let variant_names = Vec::from_iter(variants.iter().map(|v| &v.ident));
let variant_name_strs = Vec::from_iter(variant_names.iter().map(|v| v.to_string()));
let (variant_field_pats, variant_to_canonical_values): (Vec<_>, Vec<_>) = variants
.iter()
.map(|v| {
let field_names: Vec<_> = v.fields.iter().map(|field| &field.name).collect();
let var_names: Vec<_> = v.fields.iter().map(|field| field.var_name()).collect();
let field_pats = quote! {
#(#field_names: #var_names,)*
};
let to_canonical_value = match &v.value {
VariantValue::None => quote! { ::fayalite::__std::option::Option::None },
VariantValue::Direct { .. } => {
debug_assert_eq!(var_names.len(), 1);
quote! {
::fayalite::__std::option::Option::Some(
::fayalite::ty::DynValueTrait::to_canonical_dyn(#(#var_names)*),
)
}
}
VariantValue::Struct {
value_struct,
parsed_struct,
} => {
let value_struct_ident = &value_struct.ident;
let phantom_field_name = &parsed_struct
.fields
.last()
.expect("missing phantom field")
.name;
let type_generics = static_type_type_generics.as_turbofish();
quote! {
::fayalite::__std::option::Option::Some(
::fayalite::ty::DynValueTrait::to_canonical_dyn(
&#value_struct_ident #type_generics {
#(#field_names:
::fayalite::__std::clone::Clone::clone(#var_names),)*
#phantom_field_name: ::fayalite::__std::marker::PhantomData,
},
),
)
}
}
};
(field_pats, to_canonical_value)
})
.unzip();
let mut match_enum_variants = Punctuated::new();
let mut match_enum_debug_arms = vec![];
let mut match_enum_arms = vec![];
let mut variant_vars = vec![];
let mut from_canonical_type_variant_lets = vec![];
let mut non_empty_variant_vars = vec![];
let mut enum_type_variants = vec![];
let mut enum_type_variants_hint = vec![];
let match_enum_ident = format_ident!("__{}__MatchEnum", enum_ident);
let mut builder = Builder::new(format_ident!("__{}__Builder", enum_ident), vis.clone());
for variant in variants.iter() {
for field in variant.fields.iter() {
builder.insert_field(
field.name.clone(),
|v| {
parse_quote_spanned! {v.span()=>
::fayalite::expr::ToExpr::to_expr(&#v)
}
},
|t| {
parse_quote_spanned! {t.span()=>
::fayalite::expr::Expr<<
<#t as ::fayalite::expr::ToExpr>::Type
as ::fayalite::ty::Type
>::Value>
}
},
|t| {
parse_quote_spanned! {t.span()=>
where
#t: ::fayalite::expr::ToExpr,
}
},
);
}
}
let builder = builder.finish_filling_in_fields();
builder.to_tokens(tokens);
for (variant_index, variant) in variants.iter().enumerate() {
let variant_var = format_ident!("__v_{}", variant.ident);
let variant_name = &variant.ident;
let variant_name_str = variant.ident.to_string();
match_enum_variants.push(Variant {
attrs: vec![],
ident: variant.ident.clone(),
fields: variant.fields_kind.into_fields(variant.fields.iter().map(
|ParsedField {
options,
vis,
name,
ty,
}| {
let FieldOptions {} = options.body;
Field {
attrs: vec![],
vis: vis.clone(),
mutability: FieldMutability::None,
ident: if let Member::Named(name) = name {
Some(name.clone())
} else {
None
},
colon_token: None,
ty: parse_quote! { ::fayalite::expr::Expr<#ty> },
}
},
)),
discriminant: None,
});
let match_enum_field_names = Vec::from_iter(variant.fields.iter().map(
|ParsedField {
options,
vis: _,
name,
ty: _,
}| {
let FieldOptions {} = options.body;
name
},
));
let match_enum_field_name_strs = Vec::from_iter(variant.fields.iter().map(
|ParsedField {
options,
vis: _,
name,
ty: _,
}| {
let FieldOptions {} = options.body;
name.to_token_stream().to_string()
},
));
let match_enum_debug_vars = Vec::from_iter(variant.fields.iter().map(
|ParsedField {
options,
vis: _,
name,
ty: _,
}| {
let FieldOptions {} = options.body;
format_ident!("__v_{}", name)
},
));
match_enum_debug_arms.push(match variant.fields_kind {
FieldsKind::Unit | FieldsKind::Named(_) => quote! {
Self::#variant_name {
#(#match_enum_field_names: ref #match_enum_debug_vars,)*
} => f.debug_struct(#variant_name_str)
#(.field(#match_enum_field_name_strs, #match_enum_debug_vars))*
.finish(),
},
FieldsKind::Unnamed(_) => quote! {
Self::#variant_name(
#(ref #match_enum_debug_vars,)*
) => f.debug_tuple(#variant_name_str)
#(.field(#match_enum_debug_vars))*
.finish(),
},
});
if let Some(value_ty) = variant.value.value_ty() {
from_canonical_type_variant_lets.push(quote! {
let #variant_var =
#variant_var.from_canonical_type_helper_has_value(#variant_name_str);
});
non_empty_variant_vars.push(variant_var.clone());
enum_type_variants.push(quote! {
::fayalite::enum_::VariantType {
name: ::fayalite::intern::Intern::intern(#variant_name_str),
ty: ::fayalite::__std::option::Option::Some(
::fayalite::ty::DynType::canonical_dyn(&self.#variant_name),
),
}
});
enum_type_variants_hint.push(quote! {
::fayalite::enum_::VariantType {
name: ::fayalite::intern::Intern::intern(#variant_name_str),
ty: ::fayalite::__std::option::Option::Some(
::fayalite::bundle::TypeHint::<
<#value_ty as ::fayalite::expr::ToExpr>::Type,
>::intern_dyn(),
),
}
});
} else {
from_canonical_type_variant_lets.push(quote! {
#variant_var.from_canonical_type_helper_no_value(#variant_name_str);
});
enum_type_variants.push(quote! {
::fayalite::enum_::VariantType {
name: ::fayalite::intern::Intern::intern(#variant_name_str),
ty: ::fayalite::__std::option::Option::None,
}
});
enum_type_variants_hint.push(quote! {
::fayalite::enum_::VariantType {
name: ::fayalite::intern::Intern::intern(#variant_name_str),
ty: ::fayalite::__std::option::Option::None,
}
});
}
variant_vars.push(variant_var);
match_enum_arms.push(match &variant.value {
VariantValue::None => quote! {
#variant_index => #match_enum_ident::#variant_name,
},
VariantValue::Direct { value_type } => quote! {
#variant_index => #match_enum_ident::#variant_name {
#(#match_enum_field_names)*: ::fayalite::expr::ToExpr::to_expr(
&__variant_access.downcast_unchecked::<
<#value_type as ::fayalite::expr::ToExpr>::Type>(),
),
},
},
VariantValue::Struct {
value_struct: ItemStruct { ident, .. },
..
} => quote! {
#variant_index => {
let __variant_access = ::fayalite::expr::ToExpr::to_expr(
&__variant_access.downcast_unchecked::<<
#ident #static_type_type_generics
as ::fayalite::expr::ToExpr
>::Type>(),
);
#match_enum_ident::#variant_name {
#(#match_enum_field_names:
(*__variant_access).#match_enum_field_names,)*
}
},
},
});
let builder_field_and_types = Vec::from_iter(variant.fields.iter().map(
|ParsedField {
options,
vis: _,
name,
ty,
}| {
let FieldOptions {} = options.body;
(name, ty)
},
));
let builder_field_vars = Vec::from_iter(
builder_field_and_types
.iter()
.map(|(name, _)| &builder.get_field(name).unwrap().1.builder_field_name),
);
let build_body = match &variant.value {
VariantValue::None => parse_quote! {
{
::fayalite::expr::ToExpr::to_expr(
&::fayalite::expr::ops::EnumLiteral::<
#type_struct_ident #static_type_type_generics
>::new_unchecked(
::fayalite::__std::option::Option::None,
#variant_index,
::fayalite::ty::StaticType::static_type(),
),
)
}
},
VariantValue::Direct { value_type: _ } => parse_quote! {
{
::fayalite::expr::ToExpr::to_expr(
&::fayalite::expr::ops::EnumLiteral::<
#type_struct_ident #static_type_type_generics
>::new_unchecked(
::fayalite::__std::option::Option::Some(
#(#builder_field_vars)*.to_canonical_dyn(),
),
#variant_index,
::fayalite::ty::StaticType::static_type(),
),
)
}
},
VariantValue::Struct {
parsed_struct:
ParsedStruct {
names:
ParsedStructNames {
type_struct_ident: field_type_struct_ident,
..
},
..
},
..
} => parse_quote! {
{
let __builder = <
#field_type_struct_ident #static_type_type_generics
as ::fayalite::bundle::BundleType
>::builder();
#(let __builder = __builder.#builder_field_vars(#builder_field_vars);)*
::fayalite::expr::ToExpr::to_expr(
&::fayalite::expr::ops::EnumLiteral::<
#type_struct_ident #static_type_type_generics
>::new_unchecked(
::fayalite::__std::option::Option::Some(
__builder.build().to_canonical_dyn(),
),
#variant_index,
::fayalite::ty::StaticType::static_type(),
),
)
}
},
};
builder
.make_build_method(
&format_ident!("variant_{}", variant_name),
variant.fields.iter().map(
|ParsedField {
options,
vis: _,
name,
ty,
}| {
let FieldOptions {} = options.body;
(name.clone(), parse_quote! { ::fayalite::expr::Expr<#ty> })
},
),
&static_type_generics,
&parse_quote! {#type_struct_ident #static_type_type_generics},
&parse_quote! { ::fayalite::expr::Expr<#target #static_type_type_generics> },
build_body,
)
.to_tokens(tokens);
}
let match_enum = ItemEnum {
attrs: vec![parse_quote! {#[allow(non_camel_case_types)]}],
vis: vis.clone(),
enum_token: *enum_token,
ident: match_enum_ident,
generics: static_type_generics.clone(),
brace_token: *brace_token,
variants: match_enum_variants,
};
let match_enum_ident = &match_enum.ident;
match_enum.to_tokens(tokens);
make_connect_impl(
*connect_inexact,
&static_type_generics,
type_struct_ident,
variants.iter().flat_map(|variant| {
variant.fields.iter().map(|field| {
let ty = &field.ty;
parse_quote_spanned! {field.name.span()=>
<#ty as ::fayalite::expr::ToExpr>::Type
}
})
}),
)
.to_tokens(tokens);
let variant_count = variants.len();
let empty_builder_ty = builder.ty([], Some(&parse_quote! { Self }), false);
quote! {
#[automatically_derived]
impl #static_type_impl_generics ::fayalite::__std::fmt::Debug
for #match_enum_ident #static_type_type_generics
#static_type_where_clause
{
fn fmt(
&self,
f: &mut ::fayalite::__std::fmt::Formatter<'_>,
) -> ::fayalite::__std::fmt::Result {
match *self {
#(#match_enum_debug_arms)*
}
}
}
#[automatically_derived]
impl #static_type_impl_generics ::fayalite::ty::StaticType
for #type_struct_ident #static_type_type_generics
#static_type_where_clause
{
fn static_type() -> Self {
Self {
#(#non_empty_variant_names: ::fayalite::ty::StaticType::static_type(),)*
}
}
}
fn __check_field<T: ::fayalite::ty::Value>()
where
<T as ::fayalite::expr::ToExpr>::Type: ::fayalite::ty::Type<Value = T>,
{}
fn __check_fields #static_type_impl_generics(_: #target #static_type_type_generics)
#static_type_where_clause
{
#(#field_checks)*
}
#[automatically_derived]
impl #static_type_impl_generics ::fayalite::__std::fmt::Debug
for #type_struct_ident #static_type_type_generics
#static_type_where_clause
{
fn fmt(
&self,
f: &mut ::fayalite::__std::fmt::Formatter<'_>,
) -> ::fayalite::__std::fmt::Result {
#debug_type_body
}
}
#[automatically_derived]
impl #static_type_impl_generics ::fayalite::ty::Type
for #type_struct_ident #static_type_type_generics
#static_type_where_clause
{
type CanonicalType = ::fayalite::enum_::DynEnumType;
type Value = #target #static_type_type_generics;
type CanonicalValue = ::fayalite::enum_::DynEnum;
type MaskType = ::fayalite::int::UIntType<1>;
type MaskValue = ::fayalite::int::UInt<1>;
type MatchVariant = #match_enum_ident #static_type_type_generics;
type MatchActiveScope = ::fayalite::module::Scope;
type MatchVariantAndInactiveScope =
::fayalite::enum_::EnumMatchVariantAndInactiveScope<Self>;
type MatchVariantsIter = ::fayalite::enum_::EnumMatchVariantsIter<Self>;
fn match_variants<IO: ::fayalite::bundle::BundleValue>(
this: ::fayalite::expr::Expr<<Self as ::fayalite::ty::Type>::Value>,
module_builder: &mut ::fayalite::module::ModuleBuilder<
IO,
::fayalite::module::NormalModule,
>,
source_location: ::fayalite::source_location::SourceLocation,
) -> <Self as ::fayalite::ty::Type>::MatchVariantsIter
where
<IO as ::fayalite::expr::ToExpr>::Type:
::fayalite::bundle::BundleType<Value = IO>,
{
module_builder.enum_match_variants_helper(this, source_location)
}
fn mask_type(&self) -> <Self as ::fayalite::ty::Type>::MaskType {
::fayalite::int::UIntType::new()
}
fn canonical(&self) -> <Self as ::fayalite::ty::Type>::CanonicalType {
let variants = ::fayalite::enum_::EnumType::variants(self);
::fayalite::enum_::DynEnumType::new(variants)
}
fn source_location(&self) -> ::fayalite::source_location::SourceLocation {
::fayalite::source_location::SourceLocation::caller()
}
fn type_enum(&self) -> ::fayalite::ty::TypeEnum {
::fayalite::ty::TypeEnum::EnumType(::fayalite::ty::Type::canonical(self))
}
#[allow(non_snake_case)]
fn from_canonical_type(t: <Self as ::fayalite::ty::Type>::CanonicalType) -> Self {
let [#(#variant_vars),*] = *::fayalite::enum_::EnumType::variants(&t) else {
::fayalite::__std::panic!("wrong number of variants");
};
#(#from_canonical_type_variant_lets)*
Self {
#(#non_empty_variant_names: #non_empty_variant_vars,)*
}
}
}
#[automatically_derived]
#[allow(clippy::init_numbered_fields)]
impl #static_type_impl_generics ::fayalite::enum_::EnumType
for #type_struct_ident #static_type_type_generics
#static_type_where_clause
{
type Builder = #empty_builder_ty;
fn match_activate_scope(
v: <Self as ::fayalite::ty::Type>::MatchVariantAndInactiveScope,
) -> (
<Self as ::fayalite::ty::Type>::MatchVariant,
<Self as ::fayalite::ty::Type>::MatchActiveScope,
) {
let (__variant_access, __scope) = v.activate();
(
match ::fayalite::expr::ops::VariantAccess::variant_index(
&*__variant_access,
) {
#(#match_enum_arms)*
#variant_count.. => ::fayalite::__std::panic!("invalid variant index"),
},
__scope,
)
}
fn builder() -> <Self as ::fayalite::enum_::EnumType>::Builder {
#empty_builder_ty::new()
}
fn variants(&self) -> ::fayalite::intern::Interned<[::fayalite::enum_::VariantType<
::fayalite::intern::Interned<dyn ::fayalite::ty::DynCanonicalType>,
>]> {
::fayalite::intern::Intern::intern(&[#(#enum_type_variants,)*][..])
}
fn variants_hint() -> ::fayalite::enum_::VariantsHint {
::fayalite::enum_::VariantsHint::new([#(#enum_type_variants_hint,)*], false)
}
}
#[automatically_derived]
impl #static_type_impl_generics ::fayalite::expr::ToExpr
for #target #static_type_type_generics
#static_type_where_clause
{
type Type = #type_struct_ident #static_type_type_generics;
fn ty(&self) -> <Self as ::fayalite::expr::ToExpr>::Type {
::fayalite::ty::StaticType::static_type()
}
fn to_expr(&self) -> ::fayalite::expr::Expr<Self> {
::fayalite::expr::Expr::from_value(self)
}
}
#[automatically_derived]
impl #static_type_impl_generics ::fayalite::ty::Value
for #target #static_type_type_generics
#static_type_where_clause
{
fn to_canonical(&self) -> <
<Self as ::fayalite::expr::ToExpr>::Type
as ::fayalite::ty::Type
>::CanonicalValue
{
let __ty = ::fayalite::ty::Type::canonical(&::fayalite::expr::ToExpr::ty(self));
match self {
#(Self::#variant_names { #variant_field_pats } => {
::fayalite::enum_::DynEnum::new_by_name(
__ty,
::fayalite::intern::Intern::intern(#variant_name_strs),
#variant_to_canonical_values,
)
})*
}
}
}
#[automatically_derived]
impl #static_type_impl_generics ::fayalite::enum_::EnumValue
for #target #static_type_type_generics
#static_type_where_clause
{
}
}
.to_tokens(tokens);
}
}
pub(crate) fn value_derive_enum(item: ItemEnum) -> syn::Result<TokenStream> {
let item = ParsedEnum::parse(item)?;
let outline_generated = item.options.body.outline_generated;
let mut contents = quote! {
const _: () = {
#item
};
};
if outline_generated.is_some() {
contents = crate::outline_generated(contents, "value-enum-");
}
Ok(contents)
}

View file

@ -0,0 +1,765 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
value_derive_common::{
append_field, derive_clone_hash_eq_partialeq_for_struct, get_target, make_connect_impl,
Bounds, Builder, FieldsKind, ParsedField, ValueDeriveGenerics,
},
Errors, HdlAttr,
};
use proc_macro2::TokenStream;
use quote::{format_ident, quote, quote_spanned, ToTokens};
use syn::{
parse_quote, parse_quote_spanned, spanned::Spanned, FieldMutability, Generics, Ident,
ItemStruct, Member, Path, Token, Visibility,
};
crate::options! {
#[options = StructOptions]
pub(crate) enum StructOption {
OutlineGenerated(outline_generated),
Static(static_),
ConnectInexact(connect_inexact),
Bounds(where_, Bounds),
Target(target, Path),
}
}
crate::options! {
#[options = FieldOptions]
pub(crate) enum FieldOption {
Flip(flip),
Skip(skip),
}
}
pub(crate) struct ParsedStructNames<I, S> {
pub(crate) ident: Ident,
pub(crate) type_struct_debug_ident: S,
pub(crate) type_struct_ident: Ident,
pub(crate) match_variant_ident: I,
pub(crate) builder_struct_ident: I,
pub(crate) mask_match_variant_ident: I,
pub(crate) mask_type_ident: I,
pub(crate) mask_type_debug_ident: S,
pub(crate) mask_value_ident: I,
pub(crate) mask_value_debug_ident: S,
pub(crate) mask_builder_struct_ident: I,
}
pub(crate) struct ParsedStruct {
pub(crate) options: HdlAttr<StructOptions>,
pub(crate) vis: Visibility,
pub(crate) struct_token: Token![struct],
pub(crate) generics: Generics,
pub(crate) fields_kind: FieldsKind,
pub(crate) fields: Vec<ParsedField<FieldOptions>>,
pub(crate) semi_token: Option<Token![;]>,
pub(crate) skip_check_fields: bool,
pub(crate) names: ParsedStructNames<Option<Ident>, Option<String>>,
}
impl ParsedStruct {
pub(crate) fn parse(item: &mut ItemStruct) -> syn::Result<Self> {
let ItemStruct {
attrs,
vis,
struct_token,
ident,
generics,
fields,
semi_token,
} = item;
let mut errors = Errors::new();
let struct_options = errors
.unwrap_or_default(HdlAttr::parse_and_take_attr(attrs))
.unwrap_or_default();
let (fields_kind, fields) = ParsedField::parse_fields(&mut errors, fields, false);
errors.finish()?;
Ok(ParsedStruct {
options: struct_options,
vis: vis.clone(),
struct_token: *struct_token,
generics: generics.clone(),
fields_kind,
fields,
semi_token: *semi_token,
skip_check_fields: false,
names: ParsedStructNames {
ident: ident.clone(),
type_struct_debug_ident: None,
type_struct_ident: format_ident!("__{}__Type", ident),
match_variant_ident: None,
builder_struct_ident: None,
mask_match_variant_ident: None,
mask_type_ident: None,
mask_type_debug_ident: None,
mask_value_ident: None,
mask_value_debug_ident: None,
mask_builder_struct_ident: None,
},
})
}
pub(crate) fn write_body(
&self,
target: Path,
names: ParsedStructNames<&Ident, &String>,
is_for_mask: bool,
tokens: &mut TokenStream,
) {
let Self {
options,
vis,
struct_token,
generics,
fields_kind,
fields,
semi_token,
skip_check_fields,
names: _,
} = self;
let skip_check_fields = *skip_check_fields || is_for_mask;
let ParsedStructNames {
ident: struct_ident,
type_struct_debug_ident,
type_struct_ident,
match_variant_ident,
builder_struct_ident,
mask_match_variant_ident: _,
mask_type_ident,
mask_type_debug_ident: _,
mask_value_ident,
mask_value_debug_ident,
mask_builder_struct_ident: _,
} = names;
let StructOptions {
outline_generated: _,
where_,
target: _,
static_,
connect_inexact,
} = &options.body;
let ValueDeriveGenerics {
generics,
static_type_generics,
} = ValueDeriveGenerics::get(generics.clone(), where_);
let (impl_generics, type_generics, where_clause) = generics.split_for_impl();
let unskipped_fields = fields
.iter()
.filter(|field| field.options.body.skip.is_none());
let _field_names = Vec::from_iter(fields.iter().map(|field| field.name.clone()));
let unskipped_field_names =
Vec::from_iter(unskipped_fields.clone().map(|field| field.name.clone()));
let unskipped_field_name_strs = Vec::from_iter(
unskipped_field_names
.iter()
.map(|field_name| field_name.to_token_stream().to_string()),
);
let unskipped_field_vars = Vec::from_iter(
unskipped_field_names
.iter()
.map(|field_name| format_ident!("__v_{}", field_name)),
);
let unskipped_field_flips = Vec::from_iter(
unskipped_fields
.clone()
.map(|field| field.options.body.flip.is_some()),
);
let mut any_fields_skipped = false;
let type_fields = Vec::from_iter(fields.iter().filter_map(|field| {
let ParsedField {
options,
vis,
name,
ty,
} = field;
let FieldOptions { flip: _, skip } = &options.body;
if skip.is_some() {
any_fields_skipped = true;
return None;
}
let ty = if is_for_mask {
parse_quote! { ::fayalite::ty::AsMask<#ty> }
} else {
ty.to_token_stream()
};
Some(syn::Field {
attrs: vec![],
vis: vis.clone(),
mutability: FieldMutability::None,
ident: match name.clone() {
Member::Named(name) => Some(name),
Member::Unnamed(_) => None,
},
colon_token: None,
ty: parse_quote! { <#ty as ::fayalite::expr::ToExpr>::Type },
})
}));
let field_types = Vec::from_iter(type_fields.iter().map(|field| field.ty.clone()));
let match_variant_fields = Vec::from_iter(fields.iter().zip(&type_fields).map(
|(parsed_field, type_field)| {
let field_ty = &parsed_field.ty;
syn::Field {
ty: parse_quote! { ::fayalite::expr::Expr<#field_ty> },
..type_field.clone()
}
},
));
let mask_value_fields = Vec::from_iter(fields.iter().zip(&type_fields).map(
|(parsed_field, type_field)| {
let field_ty = &parsed_field.ty;
syn::Field {
ty: parse_quote! { ::fayalite::ty::AsMask<#field_ty> },
..type_field.clone()
}
},
));
let mut type_struct_fields = fields_kind.into_fields(type_fields);
let mut match_variant_fields = fields_kind.into_fields(match_variant_fields);
let mut mask_value_fields = fields_kind.into_fields(mask_value_fields);
let phantom_data_field_name = any_fields_skipped.then(|| {
let phantom_data_field_name = Ident::new("__phantom_data", type_struct_ident.span());
let member = append_field(
&mut type_struct_fields,
syn::Field {
attrs: vec![],
vis: vis.clone(),
mutability: FieldMutability::None,
ident: Some(phantom_data_field_name.clone()),
colon_token: None,
ty: parse_quote_spanned! {type_struct_ident.span()=>
::fayalite::__std::marker::PhantomData<#struct_ident #type_generics>
},
},
);
append_field(
&mut match_variant_fields,
syn::Field {
attrs: vec![],
vis: Visibility::Inherited,
mutability: FieldMutability::None,
ident: Some(phantom_data_field_name.clone()),
colon_token: None,
ty: parse_quote_spanned! {type_struct_ident.span()=>
::fayalite::__std::marker::PhantomData<#struct_ident #type_generics>
},
},
);
append_field(
&mut mask_value_fields,
syn::Field {
attrs: vec![],
vis: Visibility::Inherited,
mutability: FieldMutability::None,
ident: Some(phantom_data_field_name),
colon_token: None,
ty: parse_quote_spanned! {type_struct_ident.span()=>
::fayalite::__std::marker::PhantomData<#struct_ident #type_generics>
},
},
);
member
});
let phantom_data_field_name_slice = phantom_data_field_name.as_slice();
let type_struct = ItemStruct {
attrs: vec![parse_quote! {#[allow(non_camel_case_types)]}],
vis: vis.clone(),
struct_token: *struct_token,
ident: type_struct_ident.clone(),
generics: generics.clone(),
fields: type_struct_fields,
semi_token: *semi_token,
};
type_struct.to_tokens(tokens);
let match_variant_struct = ItemStruct {
attrs: vec![parse_quote! {#[allow(non_camel_case_types)]}],
vis: vis.clone(),
struct_token: *struct_token,
ident: match_variant_ident.clone(),
generics: generics.clone(),
fields: match_variant_fields,
semi_token: *semi_token,
};
match_variant_struct.to_tokens(tokens);
let mask_type_body = if is_for_mask {
quote! {
::fayalite::__std::clone::Clone::clone(self)
}
} else {
let mask_value_struct = ItemStruct {
attrs: vec![parse_quote! {#[allow(non_camel_case_types)]}],
vis: vis.clone(),
struct_token: *struct_token,
ident: mask_value_ident.clone(),
generics: generics.clone(),
fields: mask_value_fields,
semi_token: *semi_token,
};
mask_value_struct.to_tokens(tokens);
let debug_mask_value_body = match fields_kind {
FieldsKind::Unit => quote! {
f.debug_struct(#mask_value_debug_ident).finish()
},
FieldsKind::Named(_) => quote! {
f.debug_struct(#mask_value_debug_ident)
#(.field(#unskipped_field_name_strs, &self.#unskipped_field_names))*
.finish()
},
FieldsKind::Unnamed(_) => quote! {
f.debug_tuple(#mask_value_debug_ident)
#(.field(&self.#unskipped_field_names))*
.finish()
},
};
quote! {
#[automatically_derived]
impl #impl_generics ::fayalite::__std::fmt::Debug
for #mask_value_ident #type_generics
#where_clause
{
fn fmt(
&self,
f: &mut ::fayalite::__std::fmt::Formatter<'_>,
) -> ::fayalite::__std::fmt::Result {
#debug_mask_value_body
}
}
}
.to_tokens(tokens);
quote! {
#mask_type_ident {
#(#unskipped_field_names:
::fayalite::ty::Type::mask_type(&self.#unskipped_field_names),)*
#(#phantom_data_field_name_slice: ::fayalite::__std::marker::PhantomData,)*
}
}
};
let debug_type_body = match fields_kind {
FieldsKind::Unit => quote! {
f.debug_struct(#type_struct_debug_ident).finish()
},
FieldsKind::Named(_) => quote! {
f.debug_struct(#type_struct_debug_ident)
#(.field(#unskipped_field_name_strs, &self.#unskipped_field_names))*
.finish()
},
FieldsKind::Unnamed(_) => quote! {
f.debug_tuple(#type_struct_debug_ident)
#(.field(&self.#unskipped_field_names))*
.finish()
},
};
for the_struct_ident in [&type_struct_ident, match_variant_ident]
.into_iter()
.chain(is_for_mask.then_some(mask_value_ident))
{
derive_clone_hash_eq_partialeq_for_struct(
the_struct_ident,
&generics,
&Vec::from_iter(
unskipped_field_names
.iter()
.cloned()
.chain(phantom_data_field_name.clone()),
),
)
.to_tokens(tokens);
}
let check_v = format_ident!("__v");
let field_checks = Vec::from_iter(fields.iter().map(|ParsedField { ty, name, .. }| {
quote_spanned! {ty.span()=>
__check_field(#check_v.#name);
}
}));
if static_.is_some() {
let (impl_generics, type_generics, where_clause) =
static_type_generics.split_for_impl();
quote! {
#[automatically_derived]
impl #impl_generics ::fayalite::ty::StaticType for #type_struct_ident #type_generics
#where_clause
{
fn static_type() -> Self {
Self {
#(#unskipped_field_names: ::fayalite::ty::StaticType::static_type(),)*
#(#phantom_data_field_name_slice:
::fayalite::__std::marker::PhantomData,)*
}
}
}
}
.to_tokens(tokens);
}
if !skip_check_fields {
quote! {
fn __check_field<T: ::fayalite::ty::Value>(_v: T)
where
<T as ::fayalite::expr::ToExpr>::Type: ::fayalite::ty::Type<Value = T>,
{}
fn __check_fields #impl_generics(#check_v: #target #type_generics)
#where_clause
{
#(#field_checks)*
}
}
.to_tokens(tokens);
}
let mut builder = Builder::new(builder_struct_ident.clone(), vis.clone());
for field in unskipped_fields.clone() {
builder.insert_field(
field.name.clone(),
|v| {
parse_quote_spanned! {v.span()=>
::fayalite::expr::ToExpr::to_expr(&#v)
}
},
|t| {
parse_quote_spanned! {t.span()=>
::fayalite::expr::Expr<<
<#t as ::fayalite::expr::ToExpr>::Type
as ::fayalite::ty::Type
>::Value>
}
},
|t| {
parse_quote_spanned! {t.span()=>
where
#t: ::fayalite::expr::ToExpr,
}
},
);
}
let builder = builder.finish_filling_in_fields();
builder.to_tokens(tokens);
let build_type_fields =
Vec::from_iter(unskipped_fields.clone().map(|ParsedField { name, .. }| {
let builder_field_name = &builder.get_field(name).unwrap().1.builder_field_name;
quote_spanned! {struct_ident.span()=>
#name: ::fayalite::expr::ToExpr::ty(&#builder_field_name)
}
}));
let build_expr_fields =
Vec::from_iter(unskipped_fields.clone().map(|ParsedField { name, .. }| {
let builder_field_name = &builder.get_field(name).unwrap().1.builder_field_name;
quote_spanned! {struct_ident.span()=>
#builder_field_name.to_canonical_dyn()
}
}));
let build_specified_fields = unskipped_fields.clone().map(
|ParsedField {
options: _,
vis: _,
name,
ty,
}| {
let ty = if is_for_mask {
parse_quote_spanned! {name.span()=>
::fayalite::expr::Expr<::fayalite::ty::AsMask<#ty>>
}
} else {
parse_quote_spanned! {name.span()=>
::fayalite::expr::Expr<#ty>
}
};
(name.clone(), ty)
},
);
let build_body = parse_quote_spanned! {struct_ident.span()=>
{
::fayalite::expr::ToExpr::to_expr(
&::fayalite::expr::ops::BundleLiteral::new_unchecked(
::fayalite::intern::Intern::intern(&[#(
#build_expr_fields,
)*][..]),
#type_struct_ident {
#(#build_type_fields,)*
#(#phantom_data_field_name_slice:
::fayalite::__std::marker::PhantomData,)*
},
),
)
}
};
builder
.make_build_method(
&Ident::new("build", struct_ident.span()),
build_specified_fields,
&generics,
&parse_quote_spanned! {struct_ident.span()=>
#type_struct_ident #type_generics
},
&parse_quote_spanned! {struct_ident.span()=>
::fayalite::expr::Expr<#target #type_generics>
},
build_body,
)
.to_tokens(tokens);
make_connect_impl(
*connect_inexact,
&generics,
&type_struct_ident,
unskipped_fields.clone().map(|field| {
let ty = &field.ty;
parse_quote_spanned! {field.name.span()=>
<#ty as ::fayalite::expr::ToExpr>::Type
}
}),
)
.to_tokens(tokens);
let empty_builder_ty = builder.ty([], Some(&parse_quote! { Self }), false);
quote! {
#[automatically_derived]
impl #impl_generics ::fayalite::__std::fmt::Debug for #type_struct_ident #type_generics
#where_clause
{
fn fmt(
&self,
f: &mut ::fayalite::__std::fmt::Formatter<'_>,
) -> ::fayalite::__std::fmt::Result {
#debug_type_body
}
}
#[automatically_derived]
impl #impl_generics ::fayalite::ty::Type for #type_struct_ident #type_generics
#where_clause
{
type CanonicalType = ::fayalite::bundle::DynBundleType;
type Value = #target #type_generics;
type CanonicalValue = ::fayalite::bundle::DynBundle;
type MaskType = #mask_type_ident #type_generics;
type MaskValue = #mask_value_ident #type_generics;
type MatchVariant = #match_variant_ident #type_generics;
type MatchActiveScope = ();
type MatchVariantAndInactiveScope = ::fayalite::ty::MatchVariantWithoutScope<
#match_variant_ident #type_generics,
>;
type MatchVariantsIter = ::fayalite::__std::iter::Once<
<Self as ::fayalite::ty::Type>::MatchVariantAndInactiveScope,
>;
#[allow(unused_variables)]
fn match_variants<IO: ::fayalite::bundle::BundleValue>(
this: ::fayalite::expr::Expr<<Self as ::fayalite::ty::Type>::Value>,
module_builder: &mut ::fayalite::module::ModuleBuilder<
IO,
::fayalite::module::NormalModule,
>,
source_location: ::fayalite::source_location::SourceLocation,
) -> <Self as ::fayalite::ty::Type>::MatchVariantsIter
where
<IO as ::fayalite::expr::ToExpr>::Type:
::fayalite::bundle::BundleType<Value = IO>,
{
::fayalite::__std::iter::once(::fayalite::ty::MatchVariantWithoutScope(
#match_variant_ident {
#(#unskipped_field_names: this.field(#unskipped_field_name_strs),)*
#(#phantom_data_field_name_slice:
::fayalite::__std::marker::PhantomData,)*
},
))
}
fn mask_type(&self) -> <Self as ::fayalite::ty::Type>::MaskType {
#mask_type_body
}
fn canonical(&self) -> <Self as ::fayalite::ty::Type>::CanonicalType {
let fields = ::fayalite::bundle::BundleType::fields(self);
::fayalite::bundle::DynBundleType::new(fields)
}
fn source_location(&self) -> ::fayalite::source_location::SourceLocation {
::fayalite::source_location::SourceLocation::caller()
}
fn type_enum(&self) -> ::fayalite::ty::TypeEnum {
::fayalite::ty::TypeEnum::BundleType(::fayalite::ty::Type::canonical(self))
}
fn from_canonical_type(t: <Self as ::fayalite::ty::Type>::CanonicalType) -> Self {
let [#(#unskipped_field_vars),*] = *::fayalite::bundle::BundleType::fields(&t)
else {
::fayalite::__std::panic!("wrong number of fields");
};
Self {
#(#unskipped_field_names: #unskipped_field_vars.from_canonical_type_helper(
#unskipped_field_name_strs,
#unskipped_field_flips,
),)*
#(#phantom_data_field_name_slice: ::fayalite::__std::marker::PhantomData,)*
}
}
}
#[automatically_derived]
impl #impl_generics ::fayalite::ty::TypeWithDeref for #type_struct_ident #type_generics
#where_clause
{
#[allow(unused_variables)]
fn expr_deref(this: &::fayalite::expr::Expr<<Self as ::fayalite::ty::Type>::Value>)
-> &<Self as ::fayalite::ty::Type>::MatchVariant
{
::fayalite::intern::Interned::<_>::into_inner(
::fayalite::intern::Intern::intern_sized(#match_variant_ident {
#(#unskipped_field_names: this.field(#unskipped_field_name_strs),)*
#(#phantom_data_field_name_slice:
::fayalite::__std::marker::PhantomData,)*
}),
)
}
}
#[automatically_derived]
impl #impl_generics ::fayalite::bundle::BundleType for #type_struct_ident #type_generics
#where_clause
{
type Builder = #empty_builder_ty;
fn builder() -> <Self as ::fayalite::bundle::BundleType>::Builder {
#empty_builder_ty::new()
}
fn fields(&self) -> ::fayalite::intern::Interned<
[::fayalite::bundle::FieldType<::fayalite::intern::Interned<
dyn ::fayalite::ty::DynCanonicalType,
>>],
>
{
::fayalite::intern::Intern::intern(&[#(
::fayalite::bundle::FieldType {
name: ::fayalite::intern::Intern::intern(#unskipped_field_name_strs),
flipped: #unskipped_field_flips,
ty: ::fayalite::ty::DynType::canonical_dyn(
&self.#unskipped_field_names,
),
},
)*][..])
}
fn fields_hint() -> ::fayalite::bundle::FieldsHint {
::fayalite::bundle::FieldsHint::new([#(
::fayalite::bundle::FieldType {
name: ::fayalite::intern::Intern::intern(#unskipped_field_name_strs),
flipped: #unskipped_field_flips,
ty: ::fayalite::bundle::TypeHint::<#field_types>::intern_dyn(),
},
)*], false)
}
}
#[automatically_derived]
impl #impl_generics ::fayalite::expr::ToExpr for #target #type_generics
#where_clause
{
type Type = #type_struct_ident #type_generics;
fn ty(&self) -> <Self as ::fayalite::expr::ToExpr>::Type {
#type_struct_ident {
#(#unskipped_field_names: ::fayalite::expr::ToExpr::ty(
&self.#unskipped_field_names,
),)*
#(#phantom_data_field_name_slice: ::fayalite::__std::marker::PhantomData,)*
}
}
fn to_expr(&self) -> ::fayalite::expr::Expr<Self> {
::fayalite::expr::Expr::from_value(self)
}
}
#[automatically_derived]
impl #impl_generics ::fayalite::ty::Value for #target #type_generics
#where_clause
{
fn to_canonical(&self) -> <
<Self as ::fayalite::expr::ToExpr>::Type
as ::fayalite::ty::Type
>::CanonicalValue
{
let ty = ::fayalite::ty::Type::canonical(&::fayalite::expr::ToExpr::ty(self));
::fayalite::bundle::DynBundle::new(ty, ::fayalite::__std::sync::Arc::new([
#(::fayalite::ty::DynValueTrait::to_canonical_dyn(
&self.#unskipped_field_names,
),)*
]))
}
}
#[automatically_derived]
impl #impl_generics ::fayalite::bundle::BundleValue for #target #type_generics
#where_clause
{
}
}
.to_tokens(tokens);
}
}
impl ToTokens for ParsedStruct {
fn to_tokens(&self, tokens: &mut TokenStream) {
let ParsedStructNames {
ident: struct_ident,
type_struct_debug_ident,
type_struct_ident,
match_variant_ident,
builder_struct_ident,
mask_match_variant_ident,
mask_type_ident,
mask_type_debug_ident,
mask_value_ident,
mask_value_debug_ident,
mask_builder_struct_ident,
} = &self.names;
macro_rules! unwrap_or_set {
($(let $var:ident =? $fallback_value:expr;)*) => {
$(let $var = $var.clone().unwrap_or_else(|| $fallback_value);)*
};
}
unwrap_or_set! {
let type_struct_debug_ident =? format!("{struct_ident}::Type");
let match_variant_ident =? format_ident!("__{}__MatchVariant", struct_ident);
let builder_struct_ident =? format_ident!("__{}__Builder", struct_ident);
let mask_match_variant_ident =? format_ident!("__AsMask__{}__MatchVariant", struct_ident);
let mask_type_ident =? format_ident!("__AsMask__{}__Type", struct_ident);
let mask_type_debug_ident =? format!("AsMask<{struct_ident}>::Type");
let mask_value_ident =? format_ident!("__AsMask__{}", struct_ident);
let mask_value_debug_ident =? format!("AsMask<{struct_ident}>");
let mask_builder_struct_ident =? format_ident!("__AsMask__{}__Builder", struct_ident);
}
let target = get_target(&self.options.body.target, struct_ident);
let names = ParsedStructNames {
ident: struct_ident.clone(),
type_struct_debug_ident: &type_struct_debug_ident,
type_struct_ident: type_struct_ident.clone(),
match_variant_ident: &match_variant_ident,
builder_struct_ident: &builder_struct_ident,
mask_match_variant_ident: &mask_match_variant_ident,
mask_type_ident: &mask_type_ident,
mask_type_debug_ident: &mask_type_debug_ident,
mask_value_ident: &mask_value_ident,
mask_value_debug_ident: &mask_value_debug_ident,
mask_builder_struct_ident: &mask_builder_struct_ident,
};
self.write_body(target, names, false, tokens);
let mask_names = ParsedStructNames {
ident: mask_value_ident.clone(),
type_struct_debug_ident: &mask_type_debug_ident,
type_struct_ident: mask_type_ident.clone(),
match_variant_ident: &mask_match_variant_ident,
builder_struct_ident: &mask_builder_struct_ident,
mask_match_variant_ident: &mask_match_variant_ident,
mask_type_ident: &mask_type_ident,
mask_type_debug_ident: &mask_type_debug_ident,
mask_value_ident: &mask_value_ident,
mask_value_debug_ident: &mask_value_debug_ident,
mask_builder_struct_ident: &mask_builder_struct_ident,
};
self.write_body(mask_value_ident.clone().into(), mask_names, true, tokens);
}
}
pub(crate) fn value_derive_struct(mut item: ItemStruct) -> syn::Result<TokenStream> {
let item = ParsedStruct::parse(&mut item)?;
let outline_generated = item.options.body.outline_generated;
let mut contents = quote! {
const _: () = {
#item
};
};
if outline_generated.is_some() {
contents = crate::outline_generated(contents, "value-struct-");
}
Ok(contents)
}

View file

@ -16,4 +16,4 @@ version.workspace = true
proc-macro = true
[dependencies]
fayalite-proc-macros-impl.workspace = true
fayalite-proc-macros-impl = { workspace = true }

View file

@ -2,7 +2,7 @@
// See Notices.txt for copyright information
//! proc macros for `fayalite`
//!
//! see `fayalite::hdl_module` and `fayalite::hdl` for docs
//! see `fayalite::hdl_module` and `fayalite::ty::Value` for docs
// intentionally not documented here, see `fayalite::hdl_module` for docs
#[proc_macro_attribute]
@ -10,19 +10,16 @@ pub fn hdl_module(
attr: proc_macro::TokenStream,
item: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
match fayalite_proc_macros_impl::hdl_module(attr.into(), item.into()) {
match fayalite_proc_macros_impl::module(attr.into(), item.into()) {
Ok(retval) => retval.into(),
Err(err) => err.into_compile_error().into(),
}
}
// intentionally not documented here, see `fayalite::hdl` for docs
#[proc_macro_attribute]
pub fn hdl(
attr: proc_macro::TokenStream,
item: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
match fayalite_proc_macros_impl::hdl_attr(attr.into(), item.into()) {
// intentionally not documented here, see `fayalite::ty::Value` for docs
#[proc_macro_derive(Value, attributes(hdl))]
pub fn value_derive(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
match fayalite_proc_macros_impl::value_derive(item.into()) {
Ok(retval) => retval.into(),
Err(err) => err.into_compile_error().into(),
}

View file

@ -13,11 +13,11 @@ rust-version.workspace = true
version.workspace = true
[dependencies]
indexmap.workspace = true
prettyplease.workspace = true
proc-macro2.workspace = true
quote.workspace = true
serde.workspace = true
serde_json.workspace = true
syn.workspace = true
thiserror.workspace = true
indexmap = { workspace = true }
prettyplease = { workspace = true }
proc-macro2 = { workspace = true }
quote = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
syn = { workspace = true }
thiserror = { workspace = true }

View file

@ -14,33 +14,25 @@ rust-version.workspace = true
version.workspace = true
[dependencies]
bitvec.workspace = true
blake3.workspace = true
clap.workspace = true
ctor.workspace = true
eyre.workspace = true
fayalite-proc-macros.workspace = true
hashbrown.workspace = true
jobslot.workspace = true
num-bigint.workspace = true
num-traits.workspace = true
os_pipe.workspace = true
petgraph.workspace = true
serde_json.workspace = true
serde.workspace = true
tempfile.workspace = true
vec_map.workspace = true
which.workspace = true
bitvec = { workspace = true }
hashbrown = { workspace = true }
num-bigint = { workspace = true }
num-traits = { workspace = true }
fayalite-proc-macros = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
clap = { version = "4.5.9", features = ["derive", "env"] }
eyre = "0.6.12"
which = "6.0.1"
[dev-dependencies]
trybuild.workspace = true
trybuild = { workspace = true }
[build-dependencies]
fayalite-visit-gen.workspace = true
fayalite-visit-gen = { workspace = true }
[features]
unstable-doc = []
unstable-test-hasher = []
[package.metadata.docs.rs]
features = ["unstable-doc"]

View file

@ -4,10 +4,6 @@ use fayalite_visit_gen::parse_and_generate;
use std::{env, fs, path::Path};
fn main() {
println!("cargo::rustc-check-cfg=cfg(todo)");
println!("cargo::rustc-check-cfg=cfg(cfg_false_for_tests)");
println!("cargo::rustc-check-cfg=cfg(cfg_true_for_tests)");
println!("cargo::rustc-cfg=cfg_true_for_tests");
let path = "visit_types.json";
println!("cargo::rerun-if-changed={path}");
println!("cargo::rerun-if-changed=build.rs");

View file

@ -1,7 +1,11 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use clap::Parser;
use fayalite::{cli, prelude::*};
use fayalite::{
cli,
clock::{Clock, ClockDomain},
hdl_module,
int::{DynUInt, DynUIntType, IntCmp, IntTypeTrait, UInt},
reset::{SyncReset, ToReset},
};
#[hdl_module]
fn blinky(clock_frequency: u64) {
@ -15,21 +19,21 @@ fn blinky(clock_frequency: u64) {
rst: rst.to_reset(),
};
let max_value = clock_frequency / 2 - 1;
let int_ty = UInt::range_inclusive(0..=max_value);
let int_ty = DynUIntType::range_inclusive(0..=max_value);
#[hdl]
let counter_reg: UInt = reg_builder().clock_domain(cd).reset(0u8.cast_to(int_ty));
let counter: DynUInt = m.reg_builder().clock_domain(cd).reset(int_ty.literal(0));
#[hdl]
let output_reg: Bool = reg_builder().clock_domain(cd).reset(false);
let output_reg: UInt<1> = m.reg_builder().clock_domain(cd).reset_default();
#[hdl]
if counter_reg.cmp_eq(max_value) {
connect_any(counter_reg, 0u8);
connect(output_reg, !output_reg);
if counter.cmp_eq(max_value) {
m.connect_any(counter, 0u8);
m.connect(output_reg, !output_reg);
} else {
connect_any(counter_reg, counter_reg + 1_hdl_u1);
m.connect_any(counter, counter + 1_hdl_u1);
}
#[hdl]
let led: Bool = m.output();
connect(led, output_reg);
let led: UInt<1> = m.output();
m.connect(led, output_reg);
}
#[derive(Parser)]

View file

@ -1,5 +1,3 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
#![doc = include_str!("../README.md")]
//!
@ -12,7 +10,7 @@
//! function to add inputs/outputs and other components to that module.
//!
//! ```
//! # use fayalite::prelude::*;
//! # use fayalite::{hdl_module, int::UInt};
//! #
//! #[hdl_module]
//! pub fn example_module() {
@ -20,7 +18,7 @@
//! let an_input: UInt<10> = m.input(); // create an input that is a 10-bit unsigned integer
//! #[hdl]
//! let some_output: UInt<10> = m.output();
//! connect(some_output, an_input); // assigns the value of `an_input` to `some_output`
//! m.connect(some_output, an_input); // assigns the value of `an_input` to `some_output`
//! }
//! ```

View file

@ -1,5 +1,3 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! # Fayalite Modules
//!
//! The [`#[hdl_module]`][`crate::hdl_module`] attribute is applied to a Rust

View file

@ -1,5 +1,3 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! These are for when you want to use modules written in
//! some other language, such as Verilog.
//!
@ -13,6 +11,8 @@
//! * [`parameter_raw_verilog()`][`ModuleBuilder::parameter_raw_verilog`]
//! * [`parameter()`][`ModuleBuilder::parameter`]
//!
//! These use the [`ExternModule`][`crate::module::ExternModule`] tag type.
//!
//! [inputs/outputs]: crate::_docs::modules::module_bodies::hdl_let_statements::inputs_outputs
#[allow(unused)]

View file

@ -1,5 +1,3 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! # Module Function Bodies
//!
//! The `#[hdl_module]` attribute lets you have statements/expressions with `#[hdl]` annotations

View file

@ -1,26 +1,24 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! # `#[hdl]` Array Expressions
//!
//! `#[hdl]` can be used on Array Expressions to construct an [`Array<[T; N]>`][type@Array] expression:
//! `#[hdl]` can be used on Array Expressions to construct an [`Array<[T; N]>`][Array] expression:
//!
//! ```
//! # use fayalite::prelude::*;
//! # use fayalite::{hdl_module, int::UInt, array::Array};
//! # #[hdl_module]
//! # fn module() {
//! #[hdl]
//! let v: UInt<8> = m.input();
//! #[hdl]
//! let w: Array<UInt<8>, 4> = wire();
//! connect(
//! let w: Array<[UInt<8>; 4]> = m.wire();
//! m.connect(
//! w,
//! #[hdl]
//! [4_hdl_u8, v, 3_hdl_u8, (v + 7_hdl_u8).cast_to_static()] // you can make an array like this
//! [4_hdl_u8, v, 3_hdl_u8, (v + 7_hdl_u8).cast()] // you can make an array like this
//! );
//! connect(
//! m.connect(
//! w,
//! #[hdl]
//! [(v + 1_hdl_u8).cast_to_static(); 4] // or you can make an array repeat like this
//! [(v + 1_hdl_u8).cast(); 4] // or you can make an array repeat like this
//! );
//! # }
//! ```

View file

@ -1,11 +1,10 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! # `#[hdl] if` Statements
//!
//! `#[hdl] if` statements behave similarly to Rust `if` statements, except they end up as muxes
//! and stuff in the final hardware instead of being run when the fayalite module is being created.
//!
//! The condition of an `#[hdl] if` statement must have type [`Expr<Bool>`][Bool].
//! The condition of an `#[hdl] if` statement must have type [`UInt<1>`] or [`DynUInt`] with
//! `width() == 1` or be an [expression][Expr] of one of those types.
//!
//! `#[hdl] if` statements' bodies must evaluate to type `()` for now.
//!
@ -15,4 +14,7 @@
//! [match]: super::hdl_match_statements
#[allow(unused)]
use crate::int::Bool;
use crate::{
expr::Expr,
int::{DynUInt, UInt},
};

View file

@ -1,8 +1,5 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! ## `#[hdl] let` statements
pub mod destructuring;
pub mod inputs_outputs;
pub mod instances;
pub mod memories;

View file

@ -1,33 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! ### Destructuring Let
//!
//! You can use `#[hdl] let` to destructure types, similarly to Rust `let` statements with non-trivial patterns.
//!
//! `#[hdl] let` statements can only match one level of struct/tuple pattern for now,
//! e.g. you can match with the pattern `MyStruct { a, b }`, but not `MyStruct { a, b: Struct2 { v } }`.
//!
//! ```
//! # use fayalite::prelude::*;
//! #[hdl]
//! struct MyStruct {
//! a: UInt<8>,
//! b: Bool,
//! }
//!
//! #[hdl_module]
//! fn my_module() {
//! #[hdl]
//! let my_input: MyStruct = m.input();
//! #[hdl]
//! let my_output: UInt<8> = m.input();
//! #[hdl]
//! let MyStruct { a, b } = my_input;
//! #[hdl]
//! if b {
//! connect(my_output, a);
//! } else {
//! connect(my_output, 0_hdl_u8);
//! }
//! }
//! ```

View file

@ -1,5 +1,3 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! ### Inputs/Outputs
//!
//! Inputs/Outputs create a Rust variable with type [`Expr<T>`] where `T` is the type of the input/output.
@ -8,14 +6,14 @@
//! so you should read it.
//!
//! ```
//! # use fayalite::prelude::*;
//! # use fayalite::{hdl_module, int::UInt, expr::Expr, array::Array};
//! # #[hdl_module]
//! # fn module() {
//! #[hdl]
//! let my_input: UInt<10> = m.input();
//! let _: Expr<UInt<10>> = my_input; // my_input has type Expr<UInt<10>>
//! #[hdl]
//! let my_output: Array<UInt<10>, 3> = m.output();
//! let my_output: Array<[UInt<10>; 3]> = m.output();
//! # }
//! ```
//!

View file

@ -1,20 +1,18 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! ### Module Instances
//!
//! module instances are kinda like the hardware equivalent of calling a function,
//! you can create them like so:
//!
//! ```
//! # use fayalite::prelude::*;
//! # use fayalite::{hdl_module, int::UInt, array::Array};
//! # #[hdl_module]
//! # fn module() {
//! #[hdl]
//! let my_instance = instance(some_module());
//! let my_instance = m.instance(some_module());
//! // now you can use `my_instance`'s inputs/outputs like so:
//! #[hdl]
//! let v: UInt<3> = m.input();
//! connect(my_instance.a, v);
//! m.connect(my_instance.a, v);
//! #[hdl_module]
//! fn some_module() {
//! #[hdl]

View file

@ -1,5 +1,3 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! # Memories
//!
//! Memories are optimized for storing large amounts of data.
@ -9,12 +7,12 @@
//!
//! There are several different ways to create a memory:
//!
//! ## using [`memory()`]
//! ## using [`ModuleBuilder::memory()`]
//!
//! This way you have to set the [`depth`][`MemBuilder::depth`] separately.
//!
//! ```
//! # use fayalite::prelude::*;
//! # use fayalite::{hdl_module, int::UInt, clock::ClockDomain};
//! # #[hdl_module]
//! # fn module() {
//! // first, we need some IO
@ -27,45 +25,45 @@
//!
//! // now create the memory
//! #[hdl]
//! let mut my_memory = memory();
//! let mut my_memory = m.memory();
//! my_memory.depth(256); // the memory has 256 elements
//!
//! let read_port = my_memory.new_read_port();
//!
//! // connect up the read port
//! connect_any(read_port.addr, read_addr);
//! connect(read_port.en, true);
//! connect(read_port.clk, cd.clk);
//! connect(read_data, read_port.data);
//! m.connect_any(read_port.addr, read_addr);
//! m.connect(read_port.en, 1_hdl_u1);
//! m.connect(read_port.clk, cd.clk);
//! m.connect(read_data, read_port.data);
//!
//! // we need more IO for the write port
//! #[hdl]
//! let write_addr: UInt<8> = m.input();
//! #[hdl]
//! let do_write: Bool = m.input();
//! let do_write: UInt<1> = m.input();
//! #[hdl]
//! let write_data: UInt<8> = m.input();
//!
//! let write_port = my_memory.new_write_port();
//!
//! connect_any(write_port.addr, write_addr);
//! connect(write_port.en, do_write);
//! connect(write_port.clk, cd.clk);
//! connect(write_port.data, write_port.data);
//! connect(write_port.mask, true);
//! m.connect_any(write_port.addr, write_addr);
//! m.connect(write_port.en, do_write);
//! m.connect(write_port.clk, cd.clk);
//! m.connect(write_port.data, write_port.data);
//! m.connect(write_port.mask, 1_hdl_u1);
//! # }
//! ```
//!
//! ## using [`memory_array()`]
//! ## using [`ModuleBuilder::memory_array()`]
//!
//! this allows you to specify the memory's underlying array type directly.
//!
//! ```
//! # use fayalite::prelude::*;
//! # use fayalite::{hdl_module, int::UInt, memory::MemBuilder};
//! # #[hdl_module]
//! # fn module() {
//! #[hdl]
//! let mut my_memory: MemBuilder<UInt<8>, ConstUsize<256>> = memory_array();
//! let mut my_memory: MemBuilder<[UInt<8>; 256]> = m.memory_array();
//!
//! let read_port = my_memory.new_read_port();
//! // ...
@ -74,22 +72,25 @@
//! # }
//! ```
//!
//! ## using [`memory_with_init()`]
//! ## using [`ModuleBuilder::memory_with_init()`]
//!
//! This allows you to deduce the memory's array type from the data used to initialize the memory.
//!
//! ```
//! # use fayalite::prelude::*;
//! # use fayalite::{hdl_module, int::UInt};
//! # #[hdl_module]
//! # fn module() {
//! # #[hdl]
//! # let read_addr: UInt<2> = m.input();
//! #[hdl]
//! let mut my_memory = memory_with_init([0x12_hdl_u8, 0x34_hdl_u8, 0x56_hdl_u8, 0x78_hdl_u8]);
//! let mut my_memory = m.memory_with_init(
//! #[hdl]
//! [0x12_hdl_u8, 0x34_hdl_u8, 0x56_hdl_u8, 0x78_hdl_u8],
//! );
//!
//! let read_port = my_memory.new_read_port();
//! // note that `read_addr` is `UInt<2>` since the memory only has 4 elements
//! connect_any(read_port.addr, read_addr);
//! m.connect_any(read_port.addr, read_addr);
//! // ...
//! let write_port = my_memory.new_write_port();
//! // ...
@ -97,4 +98,4 @@
//! ```
#[allow(unused)]
use crate::prelude::*;
use crate::{memory::MemBuilder, module::ModuleBuilder};

View file

@ -1,5 +1,3 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! ### Registers
//!
//! Registers are memory devices that will change their state only on a clock
@ -9,23 +7,20 @@
//!
//! Registers follow [connection semantics], which are unlike assignments in software, so you should read it.
//!
//! By convention, register names end in `_reg` -- this helps you tell which values are written
//! immediately or on the next clock edge when connecting to them.
//!
//! ```
//! # use fayalite::prelude::*;
//! # use fayalite::{hdl_module, int::UInt, array::Array, clock::ClockDomain};
//! # #[hdl_module]
//! # fn module() {
//! # #[hdl]
//! # let v: Bool = m.input();
//! # let v: UInt<1> = m.input();
//! #[hdl]
//! let cd: ClockDomain = m.input();
//! #[hdl]
//! let my_reg: UInt<8> = reg_builder().clock_domain(cd).reset(8_hdl_u8);
//! let my_register: UInt<8> = m.reg_builder().clock_domain(cd).reset(8_hdl_u8);
//! #[hdl]
//! if v {
//! // my_reg is only changed when both `v` is set and `cd`'s clock edge occurs.
//! connect(my_reg, 0x45_hdl_u8);
//! // my_register is only changed when both `v` is set and `cd`'s clock edge occurs.
//! m.connect(my_register, 0x45_hdl_u8);
//! }
//! # }
//! ```

View file

@ -1,29 +1,27 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! ### Wires
//!
//! Wires are kinda like variables, but unlike registers,
//! they have no memory (they're combinatorial).
//! You must [connect] to all wires, so they have a defined value.
//! You must [connect][`ModuleBuilder::connect`] to all wires, so they have a defined value.
//!
//! Wires create a Rust variable with type [`Expr<T>`] where `T` is the type of the wire.
//!
//! Wires follow [connection semantics], which are unlike assignments in software, so you should read it.
//!
//! ```
//! # use fayalite::prelude::*;
//! # use fayalite::{hdl_module, int::UInt, array::Array, clock::ClockDomain};
//! # #[hdl_module]
//! # fn module() {
//! # #[hdl]
//! # let v: Bool = m.input();
//! # let v: UInt<1> = m.input();
//! #[hdl]
//! let my_wire: UInt<8> = wire();
//! let my_wire: UInt<8> = m.wire();
//! #[hdl]
//! if v {
//! connect(my_wire, 0x45_hdl_u8);
//! m.connect(my_wire, 0x45_hdl_u8);
//! } else {
//! // wires must be connected to under all conditions
//! connect(my_wire, 0x23_hdl_u8);
//! m.connect(my_wire, 0x23_hdl_u8);
//! }
//! # }
//! ```
@ -31,4 +29,4 @@
//! [connection semantics]: crate::_docs::semantics::connection_semantics
#[allow(unused)]
use crate::prelude::*;
use crate::{expr::Expr, module::ModuleBuilder};

View file

@ -1,10 +1,9 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! # `_hdl`-suffixed literals
//!
//! You can have integer literals with an arbitrary number of bits like so:
//!
//! `_hdl`-suffixed literals have type [`Expr<UInt<N>>`] or [`Expr<SInt<N>>`].
//! `_hdl`-suffixed literals have type [`Expr<UInt<N>>`] or [`Expr<SInt<N>>`]
//! ... which are basically just [`UInt<N>`] or [`SInt<N>`] converted to an expression.
//!
//! ```
//! # #[fayalite::hdl_module]

View file

@ -1,5 +1,3 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! # `#[hdl] match` Statements
//!
//! `#[hdl] match` statements behave similarly to Rust `match` statements, except they end up as muxes
@ -7,5 +5,5 @@
//!
//! `#[hdl] match` statements' bodies must evaluate to type `()` for now.
//!
//! `#[hdl] match` statements can only match one level of struct/tuple/enum pattern for now,
//! e.g. you can match with the pattern `HdlSome(v)`, but not `HdlSome(HdlSome(_))`.
//! `#[hdl] match` statements can only match one level of struct/enum pattern for now,
//! e.g. you can match with the pattern `Some(v)`, but not `Some(Some(_))`.

View file

@ -1,27 +1,28 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! # `#[hdl]` Struct/Variant Expressions
//!
//! Note: Structs are also known as [Bundles] when used in Fayalite, the Bundle name comes from [FIRRTL].
//!
//! [Bundles]: crate::bundle::BundleType
//! [Bundles]: crate::bundle::BundleValue
//! [FIRRTL]: https://github.com/chipsalliance/firrtl-spec
//!
//! `#[hdl]` can be used on Struct Expressions to construct a value of that
//! struct's type. They can also be used on tuples.
//! `#[hdl]` can be used on Struct/Variant Expressions to construct a value of that
//! struct/variant's type. They can also be used on tuples.
//!
//! ```
//! # use fayalite::prelude::*;
//! #[hdl]
//! # use fayalite::{hdl_module, int::UInt, array::Array, ty::Value};
//! #[derive(Value, Clone, PartialEq, Eq, Hash, Debug)]
//! #[hdl(static)]
//! pub struct MyStruct {
//! pub a: UInt<8>,
//! pub b: UInt<16>,
//! }
//!
//! #[hdl]
//! #[derive(Value, Clone, PartialEq, Eq, Hash, Debug)]
//! pub enum MyEnum {
//! A,
//! B(UInt<32>),
//! B {
//! v: UInt<32>,
//! },
//! }
//!
//! # #[hdl_module]
@ -29,8 +30,8 @@
//! #[hdl]
//! let v: UInt<8> = m.input();
//! #[hdl]
//! let my_struct: MyStruct = wire();
//! connect(
//! let my_struct: MyStruct = m.wire();
//! m.connect(
//! my_struct,
//! #[hdl]
//! MyStruct {
@ -39,14 +40,15 @@
//! },
//! );
//! #[hdl]
//! let my_enum: MyEnum = wire();
//! connect(
//! let my_enum: MyEnum = m.wire();
//! m.connect(
//! my_enum,
//! MyEnum.B(12345678_hdl_u32),
//! #[hdl]
//! MyEnum::B { v: 12345678_hdl_u32 },
//! );
//! #[hdl]
//! let some_tuple: (UInt<4>, UInt<12>) = wire();
//! connect(
//! let some_tuple: (UInt<4>, UInt<12>) = m.wire();
//! m.connect(
//! some_tuple,
//! #[hdl]
//! (12_hdl_u4, 3421_hdl_u12),
@ -55,4 +57,4 @@
//! ```
#[allow(unused)]
use crate::prelude::*;
use crate::array::Array;

View file

@ -1,6 +1,6 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! # Normal Modules
//!
//! These use the [`NormalModule`][`crate::module::NormalModule`] tag type.
//!
//! See also: [Extern Modules][`super::extern_module`]
//! See also: [Module Bodies][`super::module_bodies`]

View file

@ -1,5 +1,3 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! # Fayalite Semantics
//!
//! Fayalite's semantics are based on [FIRRTL]. Due to their significance, some of the semantics are also documented here.

View file

@ -1,5 +1,3 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! # Connection Semantics
//!
//! Fayalite's connection semantics are unlike assignments in software, so be careful!
@ -22,60 +20,62 @@
//! Connection Semantics Example:
//!
//! ```
//! # use fayalite::prelude::*;
//! # use fayalite::{hdl_module, int::UInt};
//! # #[hdl_module]
//! # fn module() {
//! #[hdl]
//! let a: UInt<8> = wire();
//! let a: UInt<8> = m.wire();
//! #[hdl]
//! let b: UInt<8> = m.output();
//!
//! // doesn't actually affect anything, since `a` is completely overwritten later
//! connect(a, 5_hdl_u8);
//! m.connect(a, 5_hdl_u8);
//!
//! // here `a` has value `7` since the last connection assigns
//! // `7` to `a`, so `b` has value `7` too.
//! connect(b, a);
//! m.connect(b, a);
//!
//! // this is the last `connect` to `a`, so this `connect` determines `a`'s value
//! connect(a, 7_hdl_u8);
//! m.connect(a, 7_hdl_u8);
//! # }
//! ```
//!
//! # Conditional Connection Semantics
//!
//! ```
//! # use fayalite::prelude::*;
//! # use fayalite::{hdl_module, int::UInt};
//! # #[hdl_module]
//! # fn module() {
//! #[hdl]
//! let cond: Bool = m.input();
//! let cond: UInt<1> = m.input();
//! #[hdl]
//! let a: UInt<8> = wire();
//! let a: UInt<8> = m.wire();
//! #[hdl]
//! let b: UInt<8> = m.output();
//!
//! // this is the last `connect` to `a` when `cond` is `0`
//! connect(a, 5_hdl_u8);
//! m.connect(a, 5_hdl_u8);
//!
//! // here `a` has value `7` if `cond` is `1` since the last connection assigns
//! // `7` to `a`, so `b` has value `7` too, otherwise `a` (and therefore `b`)
//! // have value `5` since then the connection assigning `7` is in a
//! // conditional block where the condition doesn't hold.
//! connect(b, a);
//! m.connect(b, a);
//!
//! #[hdl]
//! if cond {
//! // this is the last `connect` to `a` when `cond` is `1`
//! connect(a, 7_hdl_u8);
//! m.connect(a, 7_hdl_u8);
//! }
//! # }
//! ```
//!
//! [conditional block]: self#conditional-connection-semantics
//! [`connect()`]: ModuleBuilder::connect
//! [`connect_any()`]: ModuleBuilder::connect_any
//! [wire]: crate::_docs::modules::module_bodies::hdl_let_statements::wires
//! [if]: crate::_docs::modules::module_bodies::hdl_if_statements
//! [FIRRTL]: https://github.com/chipsalliance/firrtl-spec
#[allow(unused)]
use crate::prelude::*;
use crate::module::ModuleBuilder;

View file

@ -1,18 +1,17 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
expr::target::Target,
expr::Target,
intern::{Intern, Interned},
};
use serde::{Deserialize, Serialize};
use std::{
fmt,
hash::{Hash, Hasher},
iter::FusedIterator,
ops::Deref,
};
#[derive(Clone, Debug)]
#[derive(Clone)]
struct CustomFirrtlAnnotationFieldsImpl {
value: serde_json::Map<String, serde_json::Value>,
serialized: Interned<str>,
@ -119,87 +118,11 @@ pub struct CustomFirrtlAnnotation {
pub additional_fields: CustomFirrtlAnnotationFields,
}
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)]
pub struct DontTouchAnnotation;
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)]
pub struct SVAttributeAnnotation {
pub text: Interned<str>,
}
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)]
pub struct BlackBoxInlineAnnotation {
pub path: Interned<str>,
pub text: Interned<str>,
}
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)]
pub struct BlackBoxPathAnnotation {
pub path: Interned<str>,
}
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)]
pub struct DocStringAnnotation {
pub text: Interned<str>,
}
macro_rules! make_annotation_enum {
(
$(#[$meta:meta])*
$vis:vis enum $Annotation:ident {
$($Variant:ident($T:ident),)*
}
) => {
$(#[$meta])*
$vis enum $Annotation {
$($Variant($T),)*
}
$(impl IntoAnnotations for $T {
type IntoAnnotations = [$Annotation; 1];
fn into_annotations(self) -> Self::IntoAnnotations {
[$Annotation::$Variant(self)]
}
}
impl IntoAnnotations for &'_ $T {
type IntoAnnotations = [$Annotation; 1];
fn into_annotations(self) -> Self::IntoAnnotations {
[$Annotation::$Variant(*self)]
}
}
impl IntoAnnotations for &'_ mut $T {
type IntoAnnotations = [$Annotation; 1];
fn into_annotations(self) -> Self::IntoAnnotations {
[$Annotation::$Variant(*self)]
}
}
impl IntoAnnotations for Box<$T> {
type IntoAnnotations = [$Annotation; 1];
fn into_annotations(self) -> Self::IntoAnnotations {
[$Annotation::$Variant(*self)]
}
})*
};
}
make_annotation_enum! {
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
#[non_exhaustive]
pub enum Annotation {
DontTouch(DontTouchAnnotation),
SVAttribute(SVAttributeAnnotation),
BlackBoxInline(BlackBoxInlineAnnotation),
BlackBoxPath(BlackBoxPathAnnotation),
DocString(DocStringAnnotation),
CustomFirrtl(CustomFirrtlAnnotation),
}
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
#[non_exhaustive]
pub enum Annotation {
DontTouch,
CustomFirrtl(CustomFirrtlAnnotation),
}
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
@ -264,70 +187,10 @@ impl IntoAnnotations for &'_ mut Annotation {
}
}
pub struct IterIntoAnnotations<T: Iterator<Item: IntoAnnotations>> {
outer: T,
inner: Option<<<T::Item as IntoAnnotations>::IntoAnnotations as IntoIterator>::IntoIter>,
}
impl<T: Iterator<Item: IntoAnnotations>> Iterator for IterIntoAnnotations<T> {
type Item = Annotation;
fn next(&mut self) -> Option<Self::Item> {
loop {
if let Some(inner) = &mut self.inner {
let Some(retval) = inner.next() else {
self.inner = None;
continue;
};
return Some(retval);
} else {
self.inner = Some(self.outer.next()?.into_annotations().into_iter());
}
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
if let (0, Some(0)) = self.outer.size_hint() {
self.inner
.as_ref()
.map(|v| v.size_hint())
.unwrap_or((0, Some(0)))
} else {
(
self.inner.as_ref().map(|v| v.size_hint().0).unwrap_or(0),
None,
)
}
}
fn fold<B, F>(self, init: B, f: F) -> B
where
Self: Sized,
F: FnMut(B, Self::Item) -> B,
{
self.inner
.into_iter()
.chain(self.outer.map(|v| v.into_annotations().into_iter()))
.flatten()
.fold(init, f)
}
}
impl<
T: FusedIterator<
Item: IntoAnnotations<IntoAnnotations: IntoIterator<IntoIter: FusedIterator>>,
>,
> FusedIterator for IterIntoAnnotations<T>
{
}
impl<T: IntoIterator<Item: IntoAnnotations>> IntoAnnotations for T {
type IntoAnnotations = IterIntoAnnotations<T::IntoIter>;
impl<T: IntoIterator<Item = Annotation>> IntoAnnotations for T {
type IntoAnnotations = Self;
fn into_annotations(self) -> Self::IntoAnnotations {
IterIntoAnnotations {
outer: self.into_iter(),
inner: None,
}
self
}
}

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -1,27 +1,15 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
bundle::{Bundle, BundleType},
firrtl::{self, ExportOptions},
bundle::{BundleType, BundleValue, DynBundle},
firrtl,
intern::Interned,
module::Module,
util::{job_server::AcquiredJob, streaming_read_utf8::streaming_read_utf8},
};
use clap::{
builder::{OsStringValueParser, TypedValueParser},
Parser, Subcommand, ValueEnum, ValueHint,
Args, Parser, Subcommand, ValueEnum, ValueHint,
};
use eyre::{eyre, Report};
use serde::{Deserialize, Serialize};
use std::{
error,
ffi::OsString,
fmt::{self, Write},
fs, io, mem,
path::{Path, PathBuf},
process,
};
use tempfile::TempDir;
use std::{error, ffi::OsString, fmt, io, path::PathBuf, process};
pub type Result<T = (), E = CliError> = std::result::Result<T, E>;
@ -49,157 +37,80 @@ impl From<io::Error> for CliError {
pub trait RunPhase<Arg> {
type Output;
fn run(&self, arg: Arg) -> Result<Self::Output> {
self.run_with_job(arg, &mut AcquiredJob::acquire())
}
fn run_with_job(&self, arg: Arg, acquired_job: &mut AcquiredJob) -> Result<Self::Output>;
fn run(&self, arg: Arg) -> Result<Self::Output>;
}
#[derive(Parser, Debug, Clone)]
#[derive(Args, Debug)]
#[non_exhaustive]
pub struct BaseArgs {
/// the directory to put the generated main output file and associated files in
#[arg(short, long, value_hint = ValueHint::DirPath, required = true)]
pub output: Option<PathBuf>,
#[arg(short, long, value_hint = ValueHint::DirPath)]
pub output: PathBuf,
/// the stem of the generated main output file, e.g. to get foo.v, pass --file-stem=foo
#[arg(long)]
pub file_stem: Option<String>,
#[arg(long, env = "FAYALITE_KEEP_TEMP_DIR")]
pub keep_temp_dir: bool,
#[arg(skip = false)]
pub redirect_output_for_rust_test: bool,
}
impl BaseArgs {
fn make_firrtl_file_backend(&self) -> Result<(firrtl::FileBackend, Option<TempDir>)> {
let (dir_path, temp_dir) = match &self.output {
Some(output) => (output.clone(), None),
None => {
let temp_dir = TempDir::new()?;
if self.keep_temp_dir {
let temp_dir = temp_dir.into_path();
println!("created temporary directory: {}", temp_dir.display());
(temp_dir, None)
} else {
(temp_dir.path().to_path_buf(), Some(temp_dir))
}
}
};
Ok((
firrtl::FileBackend {
dir_path,
top_fir_file_stem: self.file_stem.clone(),
circuit_name: None,
},
temp_dir,
))
}
/// handles possibly redirecting the command's output for Rust tests
pub fn run_external_command(
&self,
_acquired_job: &mut AcquiredJob,
mut command: process::Command,
mut captured_output: Option<&mut String>,
) -> io::Result<process::ExitStatus> {
if self.redirect_output_for_rust_test || captured_output.is_some() {
let (reader, writer) = os_pipe::pipe()?;
let mut reader = io::BufReader::new(reader);
command.stderr(writer.try_clone()?);
command.stdout(writer); // must not leave writer around after spawning child
command.stdin(process::Stdio::null());
let mut child = command.spawn()?;
drop(command); // close writers
Ok(loop {
let status = child.try_wait()?;
streaming_read_utf8(&mut reader, |s| {
if let Some(captured_output) = captured_output.as_deref_mut() {
captured_output.push_str(s);
}
// use print! so output goes to Rust test output capture
print!("{s}");
io::Result::Ok(())
})?;
if let Some(status) = status {
break status;
}
})
} else {
command.status()
pub fn to_firrtl_file_backend(&self) -> firrtl::FileBackend {
firrtl::FileBackend {
dir_path: self.output.clone(),
top_fir_file_stem: self.file_stem.clone(),
}
}
}
#[derive(Parser, Debug, Clone)]
#[derive(Args, Debug)]
#[non_exhaustive]
pub struct FirrtlArgs {
#[command(flatten)]
pub base: BaseArgs,
#[command(flatten)]
pub export_options: ExportOptions,
}
#[derive(Debug)]
#[non_exhaustive]
pub struct FirrtlOutput {
pub file_stem: String,
pub top_module: String,
pub output_dir: PathBuf,
pub temp_dir: Option<TempDir>,
}
impl FirrtlOutput {
pub fn file_with_ext(&self, ext: &str) -> PathBuf {
let mut retval = self.output_dir.join(&self.file_stem);
retval.set_extension(ext);
pub fn firrtl_file(&self, args: &FirrtlArgs) -> PathBuf {
let mut retval = args.base.output.join(&self.file_stem);
retval.set_extension("fir");
retval
}
pub fn firrtl_file(&self) -> PathBuf {
self.file_with_ext("fir")
}
}
impl FirrtlArgs {
fn run_impl(
&self,
top_module: Module<Bundle>,
_acquired_job: &mut AcquiredJob,
) -> Result<FirrtlOutput> {
let (file_backend, temp_dir) = self.base.make_firrtl_file_backend()?;
fn run_impl(&self, top_module: Module<DynBundle>) -> Result<FirrtlOutput> {
let firrtl::FileBackend {
top_fir_file_stem,
circuit_name,
dir_path,
} = firrtl::export(file_backend, &top_module, self.export_options)?;
top_fir_file_stem, ..
} = firrtl::export(self.base.to_firrtl_file_backend(), &top_module)?;
Ok(FirrtlOutput {
file_stem: top_fir_file_stem.expect(
"export is known to set the file stem from the circuit name if not provided",
),
top_module: circuit_name.expect("export is known to set the circuit name"),
output_dir: dir_path,
temp_dir,
})
}
}
impl<T: BundleType> RunPhase<Module<T>> for FirrtlArgs {
impl<T: BundleValue> RunPhase<Module<T>> for FirrtlArgs
where
T::Type: BundleType<Value = T>,
{
type Output = FirrtlOutput;
fn run_with_job(
&self,
top_module: Module<T>,
acquired_job: &mut AcquiredJob,
) -> Result<Self::Output> {
self.run_impl(top_module.canonical(), acquired_job)
fn run(&self, top_module: Module<T>) -> Result<Self::Output> {
self.run_impl(top_module.canonical())
}
}
impl<T: BundleType> RunPhase<Interned<Module<T>>> for FirrtlArgs {
impl<T: BundleValue> RunPhase<Interned<Module<T>>> for FirrtlArgs
where
T::Type: BundleType<Value = T>,
{
type Output = FirrtlOutput;
fn run_with_job(
&self,
top_module: Interned<Module<T>>,
acquired_job: &mut AcquiredJob,
) -> Result<Self::Output> {
self.run_with_job(*top_module, acquired_job)
fn run(&self, top_module: Interned<Module<T>>) -> Result<Self::Output> {
self.run(*top_module)
}
}
@ -215,22 +126,7 @@ pub enum VerilogDialect {
Yosys,
}
impl fmt::Display for VerilogDialect {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.as_str())
}
}
impl VerilogDialect {
pub fn as_str(self) -> &'static str {
match self {
VerilogDialect::Questa => "questa",
VerilogDialect::Spyglass => "spyglass",
VerilogDialect::Verilator => "verilator",
VerilogDialect::Vivado => "vivado",
VerilogDialect::Yosys => "yosys",
}
}
pub fn firtool_extra_args(self) -> &'static [&'static str] {
match self {
VerilogDialect::Questa => &["--lowering-options=emitWireInPorts"],
@ -248,7 +144,7 @@ impl VerilogDialect {
}
}
#[derive(Parser, Debug, Clone)]
#[derive(Args, Debug)]
#[non_exhaustive]
pub struct VerilogArgs {
#[command(flatten)]
@ -258,7 +154,7 @@ pub struct VerilogArgs {
default_value = "firtool",
env = "FIRTOOL",
value_hint = ValueHint::CommandName,
value_parser = OsStringValueParser::new().try_map(which)
value_parser = OsStringValueParser::new().try_map(which::which)
)]
pub firtool: PathBuf,
#[arg(long)]
@ -266,94 +162,39 @@ pub struct VerilogArgs {
/// adapt the generated Verilog for a particular toolchain
#[arg(long)]
pub verilog_dialect: Option<VerilogDialect>,
#[arg(long, short = 'g')]
pub debug: bool,
}
#[derive(Debug)]
#[non_exhaustive]
pub struct VerilogOutput {
pub firrtl: FirrtlOutput,
pub verilog_files: Vec<PathBuf>,
pub contents_hash: Option<blake3::Hash>,
}
impl VerilogOutput {
pub fn main_verilog_file(&self) -> PathBuf {
self.firrtl.file_with_ext("v")
}
fn unadjusted_verilog_file(&self) -> PathBuf {
self.firrtl.file_with_ext("unadjusted.v")
pub fn verilog_file(&self, args: &VerilogArgs) -> PathBuf {
let mut retval = args.firrtl.base.output.join(&self.firrtl.file_stem);
retval.set_extension("v");
retval
}
}
impl VerilogArgs {
fn process_unadjusted_verilog_file(&self, mut output: VerilogOutput) -> Result<VerilogOutput> {
let input = fs::read_to_string(output.unadjusted_verilog_file())?;
let file_separator_prefix = "\n// ----- 8< ----- FILE \"";
let file_separator_suffix = "\" ----- 8< -----\n\n";
let mut input = &*input;
output.contents_hash = Some(blake3::hash(input.as_bytes()));
let main_verilog_file = output.main_verilog_file();
let mut file_name: Option<&Path> = Some(&main_verilog_file);
loop {
let (chunk, next_file_name) = if let Some((chunk, rest)) =
input.split_once(file_separator_prefix)
{
let Some((next_file_name, rest)) = rest.split_once(file_separator_suffix) else {
return Err(CliError(eyre!("parsing firtool's output failed: found {file_separator_prefix:?} but no {file_separator_suffix:?}")));
};
input = rest;
(chunk, Some(next_file_name.as_ref()))
} else {
(mem::take(&mut input), None)
};
let Some(file_name) = mem::replace(&mut file_name, next_file_name) else {
break;
};
let file_name = output.firrtl.output_dir.join(file_name);
fs::write(&file_name, chunk)?;
if let Some(extension) = file_name.extension() {
if extension == "v" || extension == "sv" {
output.verilog_files.push(file_name);
}
}
}
Ok(output)
}
fn run_impl(
&self,
firrtl_output: FirrtlOutput,
acquired_job: &mut AcquiredJob,
) -> Result<VerilogOutput> {
let Self {
firrtl,
firtool,
firtool_extra_args,
verilog_dialect,
debug,
} = self;
fn run_impl(&self, firrtl_output: FirrtlOutput) -> Result<VerilogOutput> {
let output = VerilogOutput {
firrtl: firrtl_output,
verilog_files: vec![],
contents_hash: None,
};
let mut cmd = process::Command::new(firtool);
cmd.arg(output.firrtl.firrtl_file());
let mut cmd = process::Command::new(&self.firtool);
cmd.arg(output.firrtl.firrtl_file(&self.firrtl));
cmd.arg("-o");
cmd.arg(output.unadjusted_verilog_file());
if *debug {
cmd.arg("-g");
cmd.arg("--preserve-values=all");
}
if let Some(dialect) = verilog_dialect {
cmd.arg(output.verilog_file(self));
if let Some(dialect) = self.verilog_dialect {
cmd.args(dialect.firtool_extra_args());
}
cmd.args(firtool_extra_args);
cmd.current_dir(&output.firrtl.output_dir);
let status = firrtl.base.run_external_command(acquired_job, cmd, None)?;
cmd.args(&self.firtool_extra_args);
cmd.current_dir(&self.firrtl.base.output);
let status = cmd.status()?;
if status.success() {
self.process_unadjusted_verilog_file(output)
Ok(output)
} else {
Err(CliError(eyre!(
"running {} failed: {status}",
@ -368,323 +209,9 @@ where
FirrtlArgs: RunPhase<Arg, Output = FirrtlOutput>,
{
type Output = VerilogOutput;
fn run_with_job(&self, arg: Arg, acquired_job: &mut AcquiredJob) -> Result<Self::Output> {
let firrtl_output = self.firrtl.run_with_job(arg, acquired_job)?;
self.run_impl(firrtl_output, acquired_job)
}
}
#[derive(ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash, Default)]
#[non_exhaustive]
pub enum FormalMode {
#[default]
BMC,
Prove,
Live,
Cover,
}
impl FormalMode {
pub fn as_str(self) -> &'static str {
match self {
FormalMode::BMC => "bmc",
FormalMode::Prove => "prove",
FormalMode::Live => "live",
FormalMode::Cover => "cover",
}
}
}
impl fmt::Display for FormalMode {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.as_str())
}
}
#[derive(Clone)]
struct FormalAdjustArgs;
impl clap::FromArgMatches for FormalAdjustArgs {
fn from_arg_matches(_matches: &clap::ArgMatches) -> Result<Self, clap::Error> {
Ok(Self)
}
fn update_from_arg_matches(&mut self, _matches: &clap::ArgMatches) -> Result<(), clap::Error> {
Ok(())
}
}
impl clap::Args for FormalAdjustArgs {
fn augment_args(cmd: clap::Command) -> clap::Command {
cmd.mut_arg("output", |arg| arg.required(false))
.mut_arg("verilog_dialect", |arg| {
arg.default_value(VerilogDialect::Yosys.to_string())
.hide(true)
})
}
fn augment_args_for_update(cmd: clap::Command) -> clap::Command {
Self::augment_args(cmd)
}
}
fn which(v: std::ffi::OsString) -> which::Result<PathBuf> {
#[cfg(not(miri))]
return which::which(v);
#[cfg(miri)]
return Ok(Path::new("/").join(v));
}
#[derive(Parser, Clone)]
#[non_exhaustive]
pub struct FormalArgs {
#[command(flatten)]
pub verilog: VerilogArgs,
#[arg(
long,
default_value = "sby",
env = "SBY",
value_hint = ValueHint::CommandName,
value_parser = OsStringValueParser::new().try_map(which)
)]
pub sby: PathBuf,
#[arg(long)]
pub sby_extra_args: Vec<String>,
#[arg(long, default_value_t)]
pub mode: FormalMode,
#[arg(long, default_value_t = Self::DEFAULT_DEPTH)]
pub depth: u64,
#[arg(long, default_value = "z3")]
pub solver: String,
#[arg(long)]
pub smtbmc_extra_args: Vec<String>,
#[arg(long, default_value_t = true, env = "FAYALITE_CACHE_RESULTS")]
pub cache_results: bool,
#[command(flatten)]
_formal_adjust_args: FormalAdjustArgs,
}
impl fmt::Debug for FormalArgs {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Self {
verilog,
sby,
sby_extra_args,
mode,
depth,
solver,
smtbmc_extra_args,
cache_results,
_formal_adjust_args: _,
} = self;
f.debug_struct("FormalArgs")
.field("verilog", verilog)
.field("sby", sby)
.field("sby_extra_args", sby_extra_args)
.field("mode", mode)
.field("depth", depth)
.field("solver", solver)
.field("smtbmc_extra_args", smtbmc_extra_args)
.field("cache_results", cache_results)
.finish_non_exhaustive()
}
}
impl FormalArgs {
pub const DEFAULT_DEPTH: u64 = 20;
}
#[derive(Debug)]
#[non_exhaustive]
pub struct FormalOutput {
pub verilog: VerilogOutput,
}
impl FormalOutput {
pub fn sby_file(&self) -> PathBuf {
self.verilog.firrtl.file_with_ext("sby")
}
pub fn cache_file(&self) -> PathBuf {
self.verilog.firrtl.file_with_ext("cache.json")
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[non_exhaustive]
pub struct FormalCacheOutput {}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[non_exhaustive]
pub enum FormalCacheVersion {
V1,
}
impl FormalCacheVersion {
pub const CURRENT: Self = Self::V1;
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[non_exhaustive]
pub struct FormalCache {
pub version: FormalCacheVersion,
pub contents_hash: blake3::Hash,
pub stdout_stderr: String,
pub result: Result<FormalCacheOutput, String>,
}
impl FormalCache {
pub fn new(
version: FormalCacheVersion,
contents_hash: blake3::Hash,
stdout_stderr: String,
result: Result<FormalCacheOutput, String>,
) -> Self {
Self {
version,
contents_hash,
stdout_stderr,
result,
}
}
}
impl FormalArgs {
fn sby_contents(&self, output: &FormalOutput) -> Result<String> {
let Self {
verilog: _,
sby: _,
sby_extra_args: _,
mode,
depth,
smtbmc_extra_args,
solver,
cache_results: _,
_formal_adjust_args: _,
} = self;
let smtbmc_options = smtbmc_extra_args.join(" ");
let top_module = &output.verilog.firrtl.top_module;
let mut retval = format!(
"[options]\n\
mode {mode}\n\
depth {depth}\n\
wait on\n\
\n\
[engines]\n\
smtbmc {solver} -- -- {smtbmc_options}\n\
\n\
[script]\n"
);
for verilog_file in &output.verilog.verilog_files {
let verilog_file = verilog_file
.to_str()
.ok_or_else(|| CliError(eyre!("verilog file path is not UTF-8")))?;
if verilog_file.contains(|ch: char| {
(ch != ' ' && ch != '\t' && ch.is_ascii_whitespace()) || ch == '"'
}) {
return Err(CliError(eyre!(
"verilog file path contains characters that aren't permitted"
)));
}
writeln!(retval, "read_verilog -sv -formal \"{verilog_file}\"").unwrap();
}
// workaround for wires disappearing -- set `keep` on all wires
writeln!(retval, "hierarchy -top {top_module}").unwrap();
writeln!(retval, "proc").unwrap();
writeln!(retval, "setattr -set keep 1 w:\\*").unwrap();
writeln!(retval, "prep").unwrap();
Ok(retval)
}
fn run_impl(
&self,
verilog_output: VerilogOutput,
acquired_job: &mut AcquiredJob,
) -> Result<FormalOutput> {
let output = FormalOutput {
verilog: verilog_output,
};
let sby_file = output.sby_file();
let sby_contents = self.sby_contents(&output)?;
let contents_hash = output.verilog.contents_hash.map(|verilog_hash| {
let mut hasher = blake3::Hasher::new();
hasher.update(verilog_hash.as_bytes());
hasher.update(sby_contents.as_bytes());
hasher.update(&(self.sby_extra_args.len() as u64).to_le_bytes());
for sby_extra_arg in self.sby_extra_args.iter() {
hasher.update(&(sby_extra_arg.len() as u64).to_le_bytes());
hasher.update(sby_extra_arg.as_bytes());
}
hasher.finalize()
});
std::fs::write(&sby_file, sby_contents)?;
let mut cmd = process::Command::new(&self.sby);
cmd.arg("-j1"); // sby seems not to respect job count in parallel mode
cmd.arg("-f");
cmd.arg(sby_file.file_name().unwrap());
cmd.args(&self.sby_extra_args);
cmd.current_dir(&output.verilog.firrtl.output_dir);
let mut captured_output = String::new();
let cache_file = output.cache_file();
let do_cache = if let Some(contents_hash) = contents_hash.filter(|_| self.cache_results) {
if let Some(FormalCache {
version: FormalCacheVersion::CURRENT,
contents_hash: cache_contents_hash,
stdout_stderr,
result,
}) = fs::read(&cache_file)
.ok()
.and_then(|v| serde_json::from_slice(&v).ok())
{
if cache_contents_hash == contents_hash {
println!("Using cached formal result:\n{stdout_stderr}");
return match result {
Ok(FormalCacheOutput {}) => Ok(output),
Err(error) => Err(CliError(eyre::Report::msg(error))),
};
}
}
true
} else {
false
};
let _ = fs::remove_file(&cache_file);
let status = self.verilog.firrtl.base.run_external_command(
acquired_job,
cmd,
do_cache.then_some(&mut captured_output),
)?;
let result = if status.success() {
Ok(output)
} else {
Err(CliError(eyre!(
"running {} failed: {status}",
self.sby.display()
)))
};
fs::write(
cache_file,
serde_json::to_string_pretty(&FormalCache {
version: FormalCacheVersion::CURRENT,
contents_hash: contents_hash.unwrap(),
stdout_stderr: captured_output,
result: match &result {
Ok(FormalOutput { verilog: _ }) => Ok(FormalCacheOutput {}),
Err(error) => Err(error.to_string()),
},
})
.expect("serialization shouldn't ever fail"),
)?;
result
}
}
impl<Arg> RunPhase<Arg> for FormalArgs
where
VerilogArgs: RunPhase<Arg, Output = VerilogOutput>,
{
type Output = FormalOutput;
fn run_with_job(&self, arg: Arg, acquired_job: &mut AcquiredJob) -> Result<Self::Output> {
let verilog_output = self.verilog.run_with_job(arg, acquired_job)?;
self.run_impl(verilog_output, acquired_job)
fn run(&self, arg: Arg) -> Result<Self::Output> {
let firrtl_output = self.firrtl.run(arg)?;
self.run_impl(firrtl_output)
}
}
@ -694,8 +221,6 @@ enum CliCommand {
Firrtl(FirrtlArgs),
/// Generate Verilog
Verilog(VerilogArgs),
/// Run a formal proof
Formal(FormalArgs),
}
/// a simple CLI
@ -703,7 +228,7 @@ enum CliCommand {
/// Use like:
///
/// ```no_run
/// # use fayalite::prelude::*;
/// # use fayalite::hdl_module;
/// # #[hdl_module]
/// # fn my_module() {}
/// use fayalite::cli;
@ -716,7 +241,7 @@ enum CliCommand {
/// You can also use it with a larger [`clap`]-based CLI like so:
///
/// ```no_run
/// # use fayalite::prelude::*;
/// # use fayalite::hdl_module;
/// # #[hdl_module]
/// # fn my_module() {}
/// use clap::{Subcommand, Parser};
@ -773,16 +298,13 @@ where
FirrtlArgs: RunPhase<T, Output = FirrtlOutput>,
{
type Output = ();
fn run_with_job(&self, arg: T, acquired_job: &mut AcquiredJob) -> Result<Self::Output> {
fn run(&self, arg: T) -> Result<Self::Output> {
match &self.subcommand {
CliCommand::Firrtl(c) => {
c.run_with_job(arg, acquired_job)?;
c.run(arg)?;
}
CliCommand::Verilog(c) => {
c.run_with_job(arg, acquired_job)?;
}
CliCommand::Formal(c) => {
c.run_with_job(arg, acquired_job)?;
c.run(arg)?;
}
}
Ok(())

View file

@ -2,78 +2,111 @@
// See Notices.txt for copyright information
use crate::{
expr::{Expr, ToExpr},
hdl,
int::Bool,
reset::{Reset, ResetType},
int::{UInt, UIntType},
intern::Interned,
reset::Reset,
source_location::SourceLocation,
ty::{impl_match_variant_as_self, CanonicalType, StaticType, Type, TypeProperties},
ty::{
impl_match_values_as_self, CanonicalType, CanonicalTypeKind, CanonicalValue, Connect,
DynCanonicalType, StaticType, Type, TypeEnum, Value, ValueEnum,
},
util::interned_bit,
};
use bitvec::slice::BitSlice;
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)]
pub struct Clock;
pub struct ClockType;
impl Type for Clock {
type BaseType = Clock;
type MaskType = Bool;
type SimValue = bool;
impl ClockType {
pub const fn new() -> Self {
Self
}
}
impl_match_variant_as_self!();
impl Type for ClockType {
type Value = Clock;
type CanonicalType = ClockType;
type CanonicalValue = Clock;
type MaskType = UIntType<1>;
type MaskValue = UInt<1>;
impl_match_values_as_self!();
fn mask_type(&self) -> Self::MaskType {
Bool
UIntType::new()
}
fn canonical(&self) -> CanonicalType {
CanonicalType::Clock(*self)
fn type_enum(&self) -> TypeEnum {
TypeEnum::Clock(*self)
}
fn source_location() -> SourceLocation {
fn from_canonical_type(t: Self::CanonicalType) -> Self {
t
}
fn canonical(&self) -> Self::CanonicalType {
*self
}
fn source_location(&self) -> SourceLocation {
SourceLocation::builtin()
}
fn from_canonical(canonical_type: CanonicalType) -> Self {
let CanonicalType::Clock(retval) = canonical_type else {
panic!("expected Clock");
};
retval
}
fn sim_value_from_bits(&self, bits: &BitSlice) -> Self::SimValue {
assert_eq!(bits.len(), 1);
bits[0]
}
fn sim_value_clone_from_bits(&self, value: &mut Self::SimValue, bits: &BitSlice) {
assert_eq!(bits.len(), 1);
*value = bits[0];
}
fn sim_value_to_bits(&self, value: &Self::SimValue, bits: &mut BitSlice) {
assert_eq!(bits.len(), 1);
bits.set(0, *value);
fn as_dyn_canonical_type_impl(this: &Self) -> Option<&dyn DynCanonicalType> {
Some(this)
}
}
impl Clock {
pub fn type_properties(self) -> TypeProperties {
Self::TYPE_PROPERTIES
}
pub fn can_connect(self, _rhs: Self) -> bool {
true
impl Connect<Self> for ClockType {}
impl CanonicalType for ClockType {
const CANONICAL_TYPE_KIND: CanonicalTypeKind = CanonicalTypeKind::Clock;
}
impl StaticType for ClockType {
fn static_type() -> Self {
Self
}
}
impl StaticType for Clock {
const TYPE: Self = Self;
const MASK_TYPE: Self::MaskType = Bool;
const TYPE_PROPERTIES: TypeProperties = TypeProperties {
is_passive: true,
is_storable: false,
is_castable_from_bits: true,
bit_width: 1,
};
const MASK_TYPE_PROPERTIES: TypeProperties = Bool::TYPE_PROPERTIES;
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub struct Clock(pub bool);
impl ToExpr for Clock {
type Type = ClockType;
fn ty(&self) -> Self::Type {
ClockType
}
fn to_expr(&self) -> Expr<Self> {
Expr::from_value(self)
}
}
impl Value for Clock {
fn to_canonical(&self) -> <Self::Type as Type>::CanonicalValue {
*self
}
fn to_bits_impl(this: &Self) -> Interned<BitSlice> {
interned_bit(this.0)
}
}
impl CanonicalValue for Clock {
fn value_enum_impl(this: &Self) -> ValueEnum {
ValueEnum::Clock(*this)
}
fn to_bits_impl(this: &Self) -> Interned<BitSlice> {
interned_bit(this.0)
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Value)]
#[hdl(static, outline_generated)]
pub struct ClockDomain {
pub clk: Clock,
pub rst: Reset,
}
pub trait ToClock {
@ -104,10 +137,10 @@ impl ToClock for Expr<Clock> {
}
}
#[hdl]
pub struct ClockDomain<R: ResetType = Reset> {
pub clk: Clock,
pub rst: R,
impl ToClock for Clock {
fn to_clock(&self) -> Expr<Clock> {
self.to_expr()
}
}
impl ToClock for bool {
@ -115,3 +148,9 @@ impl ToClock for bool {
self.to_expr().to_clock()
}
}
impl ToClock for UInt<1> {
fn to_clock(&self) -> Expr<Clock> {
self.to_expr().to_clock()
}
}

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -1,489 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
array::Array,
bundle::{Bundle, BundleField},
expr::{Expr, Flow, ToExpr},
intern::{Intern, Interned},
memory::{DynPortType, MemPort},
module::{Instance, ModuleIO, TargetName},
reg::Reg,
reset::{AsyncReset, Reset, ResetType, ResetTypeDispatch, SyncReset},
source_location::SourceLocation,
ty::{CanonicalType, Type},
wire::Wire,
};
use std::fmt;
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct TargetPathBundleField {
pub name: Interned<str>,
}
impl fmt::Display for TargetPathBundleField {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, ".{}", self.name)
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct TargetPathArrayElement {
pub index: usize,
}
impl fmt::Display for TargetPathArrayElement {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "[{}]", self.index)
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct TargetPathDynArrayElement {}
impl fmt::Display for TargetPathDynArrayElement {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "[<dyn>]")
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum TargetPathElement {
BundleField(TargetPathBundleField),
ArrayElement(TargetPathArrayElement),
DynArrayElement(TargetPathDynArrayElement),
}
impl From<TargetPathBundleField> for TargetPathElement {
fn from(value: TargetPathBundleField) -> Self {
Self::BundleField(value)
}
}
impl From<TargetPathArrayElement> for TargetPathElement {
fn from(value: TargetPathArrayElement) -> Self {
Self::ArrayElement(value)
}
}
impl From<TargetPathDynArrayElement> for TargetPathElement {
fn from(value: TargetPathDynArrayElement) -> Self {
Self::DynArrayElement(value)
}
}
impl fmt::Display for TargetPathElement {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::BundleField(v) => v.fmt(f),
Self::ArrayElement(v) => v.fmt(f),
Self::DynArrayElement(v) => v.fmt(f),
}
}
}
impl TargetPathElement {
pub fn canonical_ty(&self, parent: Interned<Target>) -> CanonicalType {
match self {
&Self::BundleField(TargetPathBundleField { name }) => {
let parent_ty = Bundle::from_canonical(parent.canonical_ty());
let field = parent_ty
.field_by_name(name)
.expect("field name is known to be a valid field of parent type");
field.ty
}
&Self::ArrayElement(TargetPathArrayElement { index }) => {
let parent_ty = Array::<CanonicalType>::from_canonical(parent.canonical_ty());
assert!(index < parent_ty.len());
parent_ty.element()
}
Self::DynArrayElement(_) => {
let parent_ty = Array::<CanonicalType>::from_canonical(parent.canonical_ty());
parent_ty.element()
}
}
}
pub fn flow(&self, parent: Interned<Target>) -> Flow {
match self {
Self::BundleField(v) => {
let parent_ty = Bundle::from_canonical(parent.canonical_ty());
let field = parent_ty
.field_by_name(v.name)
.expect("field name is known to be a valid field of parent type");
parent.flow().flip_if(field.flipped)
}
Self::ArrayElement(_) => parent.flow(),
Self::DynArrayElement(_) => parent.flow(),
}
}
pub fn is_static(&self) -> bool {
match self {
Self::BundleField(_) | Self::ArrayElement(_) => true,
Self::DynArrayElement(_) => false,
}
}
}
macro_rules! impl_target_base {
(
$(#[$enum_meta:meta])*
$enum_vis:vis enum $TargetBase:ident {
$(
$(#[from = $from:ident])?
#[is = $is_fn:ident]
#[to = $to_fn:ident]
$(#[$variant_meta:meta])*
$Variant:ident($VariantTy:ty),
)*
}
) => {
$(#[$enum_meta])*
$enum_vis enum $TargetBase {
$(
$(#[$variant_meta])*
$Variant($VariantTy),
)*
}
impl fmt::Debug for $TargetBase {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
$(Self::$Variant(v) => v.fmt(f),)*
}
}
}
$($(
impl From<$VariantTy> for $TargetBase {
fn $from(value: $VariantTy) -> Self {
Self::$Variant(value)
}
}
impl From<$VariantTy> for Target {
fn $from(value: $VariantTy) -> Self {
$TargetBase::$Variant(value).into()
}
}
)*)?
impl $TargetBase {
$(
$enum_vis fn $is_fn(&self) -> bool {
self.$to_fn().is_some()
}
$enum_vis fn $to_fn(&self) -> Option<&$VariantTy> {
if let Self::$Variant(retval) = self {
Some(retval)
} else {
None
}
}
)*
$enum_vis fn must_connect_to(&self) -> bool {
match self {
$(Self::$Variant(v) => v.must_connect_to(),)*
}
}
$enum_vis fn flow(&self) -> Flow {
match self {
$(Self::$Variant(v) => v.flow(),)*
}
}
$enum_vis fn source_location(&self) -> SourceLocation {
match self {
$(Self::$Variant(v) => v.source_location(),)*
}
}
}
impl ToExpr for $TargetBase {
type Type = CanonicalType;
fn to_expr(&self) -> Expr<Self::Type> {
match self {
$(Self::$Variant(v) => Expr::canonical(v.to_expr()),)*
}
}
}
};
}
impl_target_base! {
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub enum TargetBase {
#[from = from]
#[is = is_module_io]
#[to = module_io]
ModuleIO(ModuleIO<CanonicalType>),
#[from = from]
#[is = is_mem_port]
#[to = mem_port]
MemPort(MemPort<DynPortType>),
#[is = is_reg]
#[to = reg]
Reg(Reg<CanonicalType, Reset>),
#[is = is_reg_sync]
#[to = reg_sync]
RegSync(Reg<CanonicalType, SyncReset>),
#[is = is_reg_async]
#[to = reg_async]
RegAsync(Reg<CanonicalType, AsyncReset>),
#[from = from]
#[is = is_wire]
#[to = wire]
Wire(Wire<CanonicalType>),
#[from = from]
#[is = is_instance]
#[to = instance]
Instance(Instance<Bundle>),
}
}
impl<R: ResetType> From<Reg<CanonicalType, R>> for TargetBase {
fn from(value: Reg<CanonicalType, R>) -> Self {
struct Dispatch;
impl ResetTypeDispatch for Dispatch {
type Input<T: ResetType> = Reg<CanonicalType, T>;
type Output<T: ResetType> = TargetBase;
fn reset(self, input: Self::Input<Reset>) -> Self::Output<Reset> {
TargetBase::Reg(input)
}
fn sync_reset(self, input: Self::Input<SyncReset>) -> Self::Output<SyncReset> {
TargetBase::RegSync(input)
}
fn async_reset(self, input: Self::Input<AsyncReset>) -> Self::Output<AsyncReset> {
TargetBase::RegAsync(input)
}
}
R::dispatch(value, Dispatch)
}
}
impl<R: ResetType> From<Reg<CanonicalType, R>> for Target {
fn from(value: Reg<CanonicalType, R>) -> Self {
TargetBase::from(value).into()
}
}
impl fmt::Display for TargetBase {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self.target_name())
}
}
impl TargetBase {
pub fn target_name(&self) -> TargetName {
match self {
TargetBase::ModuleIO(v) => TargetName(v.scoped_name(), None),
TargetBase::MemPort(v) => TargetName(v.mem_name(), Some(v.port_name())),
TargetBase::Reg(v) => TargetName(v.scoped_name(), None),
TargetBase::RegSync(v) => TargetName(v.scoped_name(), None),
TargetBase::RegAsync(v) => TargetName(v.scoped_name(), None),
TargetBase::Wire(v) => TargetName(v.scoped_name(), None),
TargetBase::Instance(v) => TargetName(v.scoped_name(), None),
}
}
pub fn canonical_ty(&self) -> CanonicalType {
match self {
TargetBase::ModuleIO(v) => v.ty(),
TargetBase::MemPort(v) => v.ty().canonical(),
TargetBase::Reg(v) => v.ty(),
TargetBase::RegSync(v) => v.ty(),
TargetBase::RegAsync(v) => v.ty(),
TargetBase::Wire(v) => v.ty(),
TargetBase::Instance(v) => v.ty().canonical(),
}
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct TargetChild {
parent: Interned<Target>,
path_element: Interned<TargetPathElement>,
canonical_ty: CanonicalType,
flow: Flow,
}
impl fmt::Debug for TargetChild {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Self {
parent,
path_element,
canonical_ty: _,
flow: _,
} = self;
parent.fmt(f)?;
fmt::Display::fmt(path_element, f)
}
}
impl fmt::Display for TargetChild {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Self {
parent,
path_element,
canonical_ty: _,
flow: _,
} = self;
parent.fmt(f)?;
path_element.fmt(f)
}
}
impl TargetChild {
pub fn new(parent: Interned<Target>, path_element: Interned<TargetPathElement>) -> Self {
Self {
parent,
path_element,
canonical_ty: path_element.canonical_ty(parent),
flow: path_element.flow(parent),
}
}
pub fn parent(self) -> Interned<Target> {
self.parent
}
pub fn path_element(self) -> Interned<TargetPathElement> {
self.path_element
}
pub fn canonical_ty(self) -> CanonicalType {
self.canonical_ty
}
pub fn flow(self) -> Flow {
self.flow
}
pub fn bundle_field(self) -> Option<BundleField> {
if let TargetPathElement::BundleField(TargetPathBundleField { name }) = *self.path_element {
let parent_ty = Bundle::from_canonical(self.parent.canonical_ty());
Some(
parent_ty
.field_by_name(name)
.expect("field name known to be a valid field of parent"),
)
} else {
None
}
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub enum Target {
Base(Interned<TargetBase>),
Child(TargetChild),
}
impl From<TargetBase> for Target {
fn from(value: TargetBase) -> Self {
Self::Base(Intern::intern_sized(value))
}
}
impl From<TargetChild> for Target {
fn from(value: TargetChild) -> Self {
Self::Child(value)
}
}
impl From<Interned<TargetBase>> for Target {
fn from(value: Interned<TargetBase>) -> Self {
Self::Base(value)
}
}
impl Target {
pub fn base(&self) -> Interned<TargetBase> {
let mut target = self;
loop {
match target {
Self::Base(v) => break *v,
Self::Child(v) => target = &v.parent,
}
}
}
pub fn child(&self) -> Option<TargetChild> {
match *self {
Target::Base(_) => None,
Target::Child(v) => Some(v),
}
}
pub fn is_static(&self) -> bool {
let mut target = self;
loop {
match target {
Self::Base(_) => return true,
Self::Child(v) if !v.path_element().is_static() => return false,
Self::Child(v) => target = &v.parent,
}
}
}
#[must_use]
pub fn join(&self, path_element: Interned<TargetPathElement>) -> Self {
TargetChild::new(self.intern(), path_element).into()
}
pub fn flow(&self) -> Flow {
match self {
Self::Base(v) => v.flow(),
Self::Child(v) => v.flow(),
}
}
pub fn canonical_ty(&self) -> CanonicalType {
match self {
Target::Base(v) => v.canonical_ty(),
Target::Child(v) => v.canonical_ty(),
}
}
}
impl fmt::Display for Target {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Base(v) => v.fmt(f),
Self::Child(v) => v.fmt(f),
}
}
}
impl fmt::Debug for Target {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Base(v) => v.fmt(f),
Self::Child(v) => v.fmt(f),
}
}
}
pub trait GetTarget {
fn target(&self) -> Option<Interned<Target>>;
}
impl GetTarget for bool {
fn target(&self) -> Option<Interned<Target>> {
None
}
}
impl<T: ?Sized + GetTarget + Send + Sync + 'static> GetTarget for Interned<T> {
fn target(&self) -> Option<Interned<Target>> {
T::target(self)
}
}
impl<T: ?Sized + GetTarget> GetTarget for &'_ T {
fn target(&self) -> Option<Interned<Target>> {
T::target(self)
}
}
impl<T: ?Sized + GetTarget> GetTarget for &'_ mut T {
fn target(&self) -> Option<Interned<Target>> {
T::target(self)
}
}
impl<T: ?Sized + GetTarget> GetTarget for Box<T> {
fn target(&self) -> Option<Interned<Target>> {
T::target(self)
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,247 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
int::BoolOrIntType,
intern::{Intern, Interned, Memoize},
prelude::*,
};
use std::sync::OnceLock;
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub enum FormalKind {
Assert,
Assume,
Cover,
}
impl FormalKind {
pub fn as_str(self) -> &'static str {
match self {
Self::Assert => "assert",
Self::Assume => "assume",
Self::Cover => "cover",
}
}
}
#[track_caller]
pub fn formal_stmt_with_enable_and_loc(
kind: FormalKind,
clk: Expr<Clock>,
pred: Expr<Bool>,
en: Expr<Bool>,
text: &str,
source_location: SourceLocation,
) {
crate::module::add_stmt_formal(crate::module::StmtFormal {
kind,
clk,
pred,
en: en & !formal_reset().cast_to_static::<Bool>(),
text: text.intern(),
source_location,
});
}
#[track_caller]
pub fn formal_stmt_with_enable(
kind: FormalKind,
clk: Expr<Clock>,
pred: Expr<Bool>,
en: Expr<Bool>,
text: &str,
) {
formal_stmt_with_enable_and_loc(kind, clk, pred, en, text, SourceLocation::caller());
}
#[track_caller]
pub fn formal_stmt_with_loc(
kind: FormalKind,
clk: Expr<Clock>,
pred: Expr<Bool>,
text: &str,
source_location: SourceLocation,
) {
formal_stmt_with_enable_and_loc(kind, clk, pred, true.to_expr(), text, source_location);
}
#[track_caller]
pub fn formal_stmt(kind: FormalKind, clk: Expr<Clock>, pred: Expr<Bool>, text: &str) {
formal_stmt_with_loc(kind, clk, pred, text, SourceLocation::caller());
}
macro_rules! make_formal {
($kind:ident, $formal_stmt_with_enable_and_loc:ident, $formal_stmt_with_enable:ident, $formal_stmt_with_loc:ident, $formal_stmt:ident) => {
#[track_caller]
pub fn $formal_stmt_with_enable_and_loc(
clk: Expr<Clock>,
pred: Expr<Bool>,
en: Expr<Bool>,
text: &str,
source_location: SourceLocation,
) {
formal_stmt_with_enable_and_loc(
FormalKind::$kind,
clk,
pred,
en,
text,
source_location,
);
}
#[track_caller]
pub fn $formal_stmt_with_enable(
clk: Expr<Clock>,
pred: Expr<Bool>,
en: Expr<Bool>,
text: &str,
) {
formal_stmt_with_enable(FormalKind::$kind, clk, pred, en, text);
}
#[track_caller]
pub fn $formal_stmt_with_loc(
clk: Expr<Clock>,
pred: Expr<Bool>,
text: &str,
source_location: SourceLocation,
) {
formal_stmt_with_loc(FormalKind::$kind, clk, pred, text, source_location);
}
#[track_caller]
pub fn $formal_stmt(clk: Expr<Clock>, pred: Expr<Bool>, text: &str) {
formal_stmt(FormalKind::$kind, clk, pred, text);
}
};
}
make_formal!(
Assert,
hdl_assert_with_enable_and_loc,
hdl_assert_with_enable,
hdl_assert_with_loc,
hdl_assert
);
make_formal!(
Assume,
hdl_assume_with_enable_and_loc,
hdl_assume_with_enable,
hdl_assume_with_loc,
hdl_assume
);
make_formal!(
Cover,
hdl_cover_with_enable_and_loc,
hdl_cover_with_enable,
hdl_cover_with_loc,
hdl_cover
);
pub trait MakeFormalExpr: Type {}
impl<T: Type> MakeFormalExpr for T {}
#[hdl]
pub fn formal_global_clock() -> Expr<Clock> {
#[hdl_module(extern)]
fn formal_global_clock() {
#[hdl]
let clk: Clock = m.output();
m.annotate_module(BlackBoxInlineAnnotation {
path: "fayalite_formal_global_clock.v".intern(),
text: r"module __fayalite_formal_global_clock(output clk);
(* gclk *)
reg clk;
endmodule
"
.intern(),
});
m.verilog_name("__fayalite_formal_global_clock");
}
#[hdl]
let formal_global_clock = instance(formal_global_clock());
formal_global_clock.clk
}
#[hdl]
pub fn formal_reset() -> Expr<SyncReset> {
#[hdl_module(extern)]
fn formal_reset() {
#[hdl]
let rst: SyncReset = m.output();
m.annotate_module(BlackBoxInlineAnnotation {
path: "fayalite_formal_reset.v".intern(),
text: r"module __fayalite_formal_reset(output rst);
assign rst = $initstate;
endmodule
"
.intern(),
});
m.verilog_name("__fayalite_formal_reset");
}
static MOD: OnceLock<Interned<Module<formal_reset>>> = OnceLock::new();
#[hdl]
let formal_reset = instance(*MOD.get_or_init(formal_reset));
formal_reset.rst
}
macro_rules! make_any_const_fn {
($ident:ident, $verilog_attribute:literal) => {
#[hdl]
pub fn $ident<T: BoolOrIntType>(ty: T) -> Expr<T> {
#[hdl_module(extern)]
pub(super) fn $ident<T: BoolOrIntType>(ty: T) {
#[hdl]
let out: T = m.output(ty);
let width = ty.width();
let verilog_bitslice = if width == 1 {
String::new()
} else {
format!(" [{}:0]", width - 1)
};
m.annotate_module(BlackBoxInlineAnnotation {
path: Intern::intern_owned(format!(
"fayalite_{}_{width}.v",
stringify!($ident),
)),
text: Intern::intern_owned(format!(
r"module __fayalite_{}_{width}(output{verilog_bitslice} out);
(* {} *)
reg{verilog_bitslice} out;
endmodule
",
stringify!($ident),
$verilog_attribute,
)),
});
m.verilog_name(format!("__fayalite_{}_{width}", stringify!($ident)));
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
struct TheMemoize<T>(T);
impl<T: BoolOrIntType> Memoize for TheMemoize<T> {
type Input = ();
type InputOwned = ();
type Output = Option<Interned<Module<$ident<T>>>>;
fn inner(self, _input: &Self::Input) -> Self::Output {
if self.0.width() == 0 {
None
} else {
Some($ident(self.0))
}
}
}
let Some(module) = TheMemoize(ty).get_owned(()) else {
return 0_hdl_u0.cast_bits_to(ty);
};
#[hdl]
let $ident = instance(module);
$ident.out
}
};
}
make_any_const_fn!(any_const, "anyconst");
make_any_const_fn!(any_seq, "anyseq");
make_any_const_fn!(all_const, "allconst");
make_any_const_fn!(all_seq, "allseq");

View file

@ -0,0 +1,368 @@
use crate::{
intern::{Intern, Interned},
source_location::SourceLocation,
ty::{DynCanonicalType, Type, Value},
util::DebugAsDisplay,
};
use std::{fmt, hash::Hash};
mod sealed {
pub trait GenericArgsSealed {}
pub trait ToGenericArgSealed {}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct GenericParamName {
pub name: Interned<str>,
pub source_location: SourceLocation,
}
impl GenericParamName {
#[track_caller]
pub fn new(name: &str) -> Self {
Self::new_with_loc(name, SourceLocation::caller())
}
pub fn new_with_loc(name: &str, source_location: SourceLocation) -> Self {
let name = name.intern();
Self {
name,
source_location,
}
}
}
impl fmt::Debug for GenericParamName {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.name.fmt(f)
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct TypeParam {
pub name: GenericParamName,
pub default: Option<fn(earlier_args: &[GenericArg]) -> TypeArg>,
}
impl fmt::Debug for TypeParam {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("TypeParam")
.field("name", &self.name)
.field("default", &self.default.map(|_| DebugAsDisplay("_")))
.finish()
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct ConstParam {
pub name: GenericParamName,
pub default: Option<fn(earlier_args: &[GenericArg]) -> ConstArg>,
}
impl fmt::Debug for ConstParam {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("ConstParam")
.field("name", &self.name)
.field("default", &self.default.map(|_| DebugAsDisplay("_")))
.finish()
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub enum GenericParam {
Type(TypeParam),
Const(ConstParam),
}
impl fmt::Debug for GenericParam {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Type(v) => v.fmt(f),
Self::Const(v) => v.fmt(f),
}
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub struct TypeArg<T: Type = Interned<dyn DynCanonicalType>> {
pub ty: T,
}
impl<T: Type> TypeArg<T> {
pub fn canonical(&self) -> TypeArg {
TypeArg {
ty: self.ty.canonical_dyn(),
}
}
}
impl<T: Type> sealed::ToGenericArgSealed for T {}
impl<T: Type> ToGenericArg for T {
fn to_generic_arg(this: &Self) -> GenericArg {
GenericArg::Type(TypeArg {
ty: this.canonical_dyn(),
})
}
}
impl<T: Type> sealed::ToGenericArgSealed for TypeArg<T> {}
impl<T: Type> ToGenericArg for TypeArg<T> {
fn to_generic_arg(this: &Self) -> GenericArg {
GenericArg::Type(this.canonical())
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub struct ConstArg {
pub value: usize,
}
impl sealed::ToGenericArgSealed for usize {}
impl ToGenericArg for usize {
fn to_generic_arg(this: &Self) -> GenericArg {
GenericArg::Const(ConstArg { value: *this })
}
}
impl sealed::ToGenericArgSealed for ConstArg {}
impl ToGenericArg for ConstArg {
fn to_generic_arg(this: &Self) -> GenericArg {
GenericArg::Const(*this)
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)]
pub struct DefaultArg;
impl sealed::ToGenericArgSealed for DefaultArg {}
impl ToGenericArg for DefaultArg {
fn to_generic_arg(this: &Self) -> GenericArg {
GenericArg::Default(*this)
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub enum GenericArg<T: Type = Interned<dyn DynCanonicalType>> {
Type(TypeArg<T>),
Const(ConstArg),
Default(DefaultArg),
}
#[derive(Copy, Clone, Debug)]
pub struct NotATypeArg;
impl fmt::Display for NotATypeArg {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("not a type argument")
}
}
impl std::error::Error for NotATypeArg {}
#[derive(Copy, Clone, Debug)]
pub struct NotAConstArg;
impl fmt::Display for NotAConstArg {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("not a const argument")
}
}
impl std::error::Error for NotAConstArg {}
impl<T: Type> GenericArg<T> {
pub fn canonical(&self) -> GenericArg {
match self {
GenericArg::Type(v) => GenericArg::Type(v.canonical()),
GenericArg::Const(v) => GenericArg::Const(*v),
GenericArg::Default(v) => GenericArg::Default(*v),
}
}
pub fn ty(self) -> Result<T, NotATypeArg> {
match self {
GenericArg::Type(TypeArg { ty }) => Ok(ty),
GenericArg::Const(_) | GenericArg::Default(_) => Err(NotATypeArg),
}
}
pub fn const_(self) -> Result<usize, NotAConstArg> {
match self {
GenericArg::Const(ConstArg { value }) => Ok(value),
GenericArg::Type(_) | GenericArg::Default(_) => Err(NotAConstArg),
}
}
}
impl<T: Type> fmt::Debug for GenericArg<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Type(v) => v.fmt(f),
Self::Const(v) => v.fmt(f),
Self::Default(v) => v.fmt(f),
}
}
}
impl<T: Type> sealed::ToGenericArgSealed for GenericArg<T> {}
impl<T: Type> ToGenericArg for GenericArg<T> {
fn to_generic_arg(this: &Self) -> GenericArg {
this.canonical()
}
}
pub trait ToGenericArg: sealed::ToGenericArgSealed {
fn to_generic_arg(this: &Self) -> GenericArg;
}
pub trait GenericArgs: sealed::GenericArgsSealed {
fn generic_args(this: &Self) -> Vec<GenericArg>;
}
impl<T: GenericArgs + ?Sized + Send + Sync + 'static> sealed::GenericArgsSealed for Interned<T> {}
impl<T: GenericArgs + ?Sized + Send + Sync + 'static> GenericArgs for Interned<T> {
fn generic_args(this: &Self) -> Vec<GenericArg> {
T::generic_args(this)
}
}
impl<T: GenericArgs + ?Sized> sealed::GenericArgsSealed for Box<T> {}
impl<T: GenericArgs + ?Sized> GenericArgs for Box<T> {
fn generic_args(this: &Self) -> Vec<GenericArg> {
T::generic_args(this)
}
}
impl<T: GenericArgs + ?Sized> sealed::GenericArgsSealed for &'_ T {}
impl<T: GenericArgs + ?Sized> GenericArgs for &'_ T {
fn generic_args(this: &Self) -> Vec<GenericArg> {
T::generic_args(this)
}
}
impl<T: GenericArgs + ?Sized> sealed::GenericArgsSealed for &'_ mut T {}
impl<T: GenericArgs + ?Sized> GenericArgs for &'_ mut T {
fn generic_args(this: &Self) -> Vec<GenericArg> {
T::generic_args(this)
}
}
impl<T: ToGenericArg> sealed::GenericArgsSealed for [T] {}
impl<T: ToGenericArg> GenericArgs for [T] {
fn generic_args(this: &Self) -> Vec<GenericArg> {
Vec::from_iter(this.iter().map(ToGenericArg::to_generic_arg))
}
}
impl<T: ToGenericArg> sealed::GenericArgsSealed for Vec<T> {}
impl<T: ToGenericArg> GenericArgs for Vec<T> {
fn generic_args(this: &Self) -> Vec<GenericArg> {
Vec::from_iter(this.iter().map(ToGenericArg::to_generic_arg))
}
}
impl<T: ToGenericArg, const N: usize> sealed::GenericArgsSealed for [T; N] {}
impl<T: ToGenericArg, const N: usize> GenericArgs for [T; N] {
fn generic_args(this: &Self) -> Vec<GenericArg> {
Vec::from_iter(this.iter().map(ToGenericArg::to_generic_arg))
}
}
macro_rules! impl_generic_args_for_tuples {
($($v:ident: $T:ident),*) => {
impl<$($T: ToGenericArg,)*> sealed::GenericArgsSealed for ($($T,)*) {}
impl<$($T: ToGenericArg,)*> GenericArgs for ($($T,)*) {
fn generic_args(this: &Self) -> Vec<GenericArg> {
let ($($v,)*) = this;
vec![$($T::to_generic_arg($v),)*]
}
}
};
}
impl_generic_args_for_tuples!();
impl_generic_args_for_tuples!(a: A);
impl_generic_args_for_tuples!(a: A, b: B);
impl_generic_args_for_tuples!(a: A, b: B, c: C);
impl_generic_args_for_tuples!(a: A, b: B, c: C, d: D);
impl_generic_args_for_tuples!(a: A, b: B, c: C, d: D, e: E);
impl_generic_args_for_tuples!(a: A, b: B, c: C, d: D, e: E, f: F);
impl_generic_args_for_tuples!(a: A, b: B, c: C, d: D, e: E, f: F, g: G);
impl_generic_args_for_tuples!(a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H);
impl_generic_args_for_tuples!(a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I);
impl_generic_args_for_tuples!(a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J);
impl_generic_args_for_tuples!(a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K);
impl_generic_args_for_tuples!(a: A, b: B, c: C, d: D, e: E, f: F, g: G, h: H, i: I, j: J, k: K, l: L);
#[track_caller]
pub fn canonical_generic_args(
mut generic_args: Vec<GenericArg>,
generic_params: &[GenericParam],
) -> Interned<[GenericArg]> {
assert!(generic_params.len() >= generic_args.len());
for (i, generic_param) in generic_params.iter().enumerate() {
match generic_args.get(i) {
Some(GenericArg::Default(DefaultArg)) | None => {}
Some(GenericArg::Type(_)) | Some(GenericArg::Const(_)) => continue,
}
let generic_arg = match generic_param {
GenericParam::Type(TypeParam {
name: _,
default: Some(default),
}) => GenericArg::Type(default(&generic_args[..i])),
GenericParam::Type(TypeParam {
name:
GenericParamName {
name,
source_location,
},
default: None,
}) => panic!("Missing type argument for {name} declared here: {source_location}"),
GenericParam::Const(ConstParam {
name: _,
default: Some(default),
}) => GenericArg::Const(default(&generic_args[..i])),
GenericParam::Const(ConstParam {
name:
GenericParamName {
name,
source_location,
},
default: None,
}) => panic!("Missing const argument for {name} declared here: {source_location}"),
};
if i == generic_args.len() {
generic_args.push(generic_arg)
} else {
generic_args[i] = generic_arg;
}
}
debug_assert_eq!(generic_params.len(), generic_args.len());
Intern::intern_owned(generic_args)
}
pub trait GenericType {
type GenericArgs: GenericArgs;
type Type: Type<Value = Self::Value>;
type Value: Value<Type = Self::Type>;
fn generic_params() -> Interned<[GenericParam]>;
#[track_caller]
fn canonical_generic_args(generic_args: &Self::GenericArgs) -> Interned<[GenericArg]> {
canonical_generic_args(
GenericArgs::generic_args(&generic_args),
&Self::generic_params(),
)
}
fn ty(generic_args: Interned<[GenericArg]>) -> Self::Type;
}

File diff suppressed because it is too large Load diff

View file

@ -1,614 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
bundle::{Bundle, BundleField, BundleType, BundleTypePropertiesBuilder, NoBuilder},
expr::{
ops::{ExprCastTo, ExprPartialEq, ExprPartialOrd},
CastBitsTo, CastTo, CastToBits, Expr, HdlPartialEq, HdlPartialOrd,
},
int::{Bool, DynSize, KnownSize, Size, SizeType, UInt, UIntType},
intern::{Intern, Interned},
phantom_const::PhantomConst,
sim::value::{SimValue, SimValuePartialEq, ToSimValueWithType},
source_location::SourceLocation,
ty::{impl_match_variant_as_self, CanonicalType, StaticType, Type, TypeProperties},
};
use bitvec::{order::Lsb0, slice::BitSlice, view::BitView};
use serde::{
de::{value::UsizeDeserializer, Error, Visitor},
Deserialize, Deserializer, Serialize, Serializer,
};
use std::{fmt, marker::PhantomData, ops::Index};
const UINT_IN_RANGE_TYPE_FIELD_NAMES: [&'static str; 2] = ["value", "range"];
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)]
pub struct UIntInRangeMaskType {
value: Bool,
range: PhantomConstRangeMaskType,
}
impl Type for UIntInRangeMaskType {
type BaseType = Bundle;
type MaskType = Self;
type SimValue = bool;
impl_match_variant_as_self!();
fn mask_type(&self) -> Self::MaskType {
*self
}
fn canonical(&self) -> CanonicalType {
CanonicalType::Bundle(Bundle::new(self.fields()))
}
fn from_canonical(canonical_type: CanonicalType) -> Self {
let fields = Bundle::from_canonical(canonical_type).fields();
let [BundleField {
name: value_name,
flipped: false,
ty: value,
}, BundleField {
name: range_name,
flipped: false,
ty: range,
}] = *fields
else {
panic!("expected UIntInRangeMaskType");
};
assert_eq!([&*value_name, &*range_name], UINT_IN_RANGE_TYPE_FIELD_NAMES);
let value = Bool::from_canonical(value);
let range = PhantomConstRangeMaskType::from_canonical(range);
Self { value, range }
}
fn source_location() -> SourceLocation {
SourceLocation::builtin()
}
fn sim_value_from_bits(&self, bits: &BitSlice) -> Self::SimValue {
Bool.sim_value_from_bits(bits)
}
fn sim_value_clone_from_bits(&self, value: &mut Self::SimValue, bits: &BitSlice) {
Bool.sim_value_clone_from_bits(value, bits);
}
fn sim_value_to_bits(&self, value: &Self::SimValue, bits: &mut BitSlice) {
Bool.sim_value_to_bits(value, bits);
}
}
impl BundleType for UIntInRangeMaskType {
type Builder = NoBuilder;
type FilledBuilder = Expr<UIntInRangeMaskType>;
fn fields(&self) -> Interned<[BundleField]> {
let [value_name, range_name] = UINT_IN_RANGE_TYPE_FIELD_NAMES;
let Self { value, range } = self;
[
BundleField {
name: value_name.intern(),
flipped: false,
ty: value.canonical(),
},
BundleField {
name: range_name.intern(),
flipped: false,
ty: range.canonical(),
},
][..]
.intern()
}
}
impl StaticType for UIntInRangeMaskType {
const TYPE: Self = Self {
value: Bool,
range: PhantomConstRangeMaskType::TYPE,
};
const MASK_TYPE: Self::MaskType = Self::TYPE;
const TYPE_PROPERTIES: TypeProperties = BundleTypePropertiesBuilder::new()
.field(false, Bool::TYPE_PROPERTIES)
.field(false, PhantomConstRangeMaskType::TYPE_PROPERTIES)
.finish();
const MASK_TYPE_PROPERTIES: TypeProperties = Self::TYPE_PROPERTIES;
}
impl ToSimValueWithType<UIntInRangeMaskType> for bool {
fn to_sim_value_with_type(&self, ty: UIntInRangeMaskType) -> SimValue<UIntInRangeMaskType> {
SimValue::from_value(ty, *self)
}
}
impl ExprCastTo<Bool> for UIntInRangeMaskType {
fn cast_to(src: Expr<Self>, to_type: Bool) -> Expr<Bool> {
src.cast_to_bits().cast_to(to_type)
}
}
impl ExprCastTo<UIntInRangeMaskType> for Bool {
fn cast_to(src: Expr<Self>, to_type: UIntInRangeMaskType) -> Expr<UIntInRangeMaskType> {
src.cast_to_static::<UInt<1>>().cast_bits_to(to_type)
}
}
impl ExprPartialEq<Self> for UIntInRangeMaskType {
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
lhs.cast_to_bits().cmp_eq(rhs.cast_to_bits())
}
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
lhs.cast_to_bits().cmp_ne(rhs.cast_to_bits())
}
}
impl SimValuePartialEq<Self> for UIntInRangeMaskType {
fn sim_value_eq(this: &SimValue<Self>, other: &SimValue<Self>) -> bool {
**this == **other
}
}
type PhantomConstRangeMaskType = <PhantomConst<SerdeRange<DynSize, DynSize>> as Type>::MaskType;
#[derive(Default, Copy, Clone, Debug)]
struct RangeParseError;
macro_rules! define_uint_in_range_type {
(
$UIntInRange:ident,
$UIntInRangeType:ident,
$UIntInRangeTypeWithoutGenerics:ident,
$UIntInRangeTypeWithStart:ident,
$SerdeRange:ident,
$range_operator_str:literal,
|$uint_range_usize_start:ident, $uint_range_usize_end:ident| $uint_range_usize:expr,
) => {
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
struct $SerdeRange<Start: Size, End: Size> {
start: Start::SizeType,
end: End::SizeType,
}
impl<Start: KnownSize, End: KnownSize> Default for $SerdeRange<Start, End> {
fn default() -> Self {
Self {
start: Start::SIZE,
end: End::SIZE,
}
}
}
impl std::str::FromStr for $SerdeRange<DynSize, DynSize> {
type Err = RangeParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let Some((start, end)) = s.split_once($range_operator_str) else {
return Err(RangeParseError);
};
if start.is_empty()
|| start.bytes().any(|b| !b.is_ascii_digit())
|| end.is_empty()
|| end.bytes().any(|b| !b.is_ascii_digit())
{
return Err(RangeParseError);
}
let start = start.parse().map_err(|_| RangeParseError)?;
let end = end.parse().map_err(|_| RangeParseError)?;
let retval = Self { start, end };
if retval.is_empty() {
Err(RangeParseError)
} else {
Ok(retval)
}
}
}
impl<Start: Size, End: Size> fmt::Display for $SerdeRange<Start, End> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Self { start, end } = *self;
write!(
f,
"{}{}{}",
Start::as_usize(start),
$range_operator_str,
End::as_usize(end),
)
}
}
impl<Start: Size, End: Size> Serialize for $SerdeRange<Start, End> {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
serializer.collect_str(self)
}
}
impl<'de, Start: Size, End: Size> Deserialize<'de> for $SerdeRange<Start, End> {
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
struct SerdeRangeVisitor<Start: Size, End: Size>(PhantomData<(Start, End)>);
impl<'de, Start: Size, End: Size> Visitor<'de> for SerdeRangeVisitor<Start, End> {
type Value = $SerdeRange<Start, End>;
fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("a string with format \"")?;
if let Some(start) = Start::KNOWN_VALUE {
write!(f, "{start}")?;
} else {
f.write_str("<int>")?;
};
f.write_str($range_operator_str)?;
if let Some(end) = End::KNOWN_VALUE {
write!(f, "{end}")?;
} else {
f.write_str("<int>")?;
};
f.write_str("\" that is a non-empty range")
}
fn visit_str<E: Error>(self, v: &str) -> Result<Self::Value, E> {
let $SerdeRange::<DynSize, DynSize> { start, end } =
v.parse().map_err(|_| {
Error::invalid_value(serde::de::Unexpected::Str(v), &self)
})?;
let start =
Start::SizeType::deserialize(UsizeDeserializer::<E>::new(start))?;
let end = End::SizeType::deserialize(UsizeDeserializer::<E>::new(end))?;
Ok($SerdeRange { start, end })
}
fn visit_bytes<E: Error>(self, v: &[u8]) -> Result<Self::Value, E> {
match std::str::from_utf8(v) {
Ok(v) => self.visit_str(v),
Err(_) => {
Err(Error::invalid_value(serde::de::Unexpected::Bytes(v), &self))
}
}
}
}
deserializer.deserialize_str(SerdeRangeVisitor(PhantomData))
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct $UIntInRangeType<Start: Size, End: Size> {
value: UInt,
range: PhantomConst<$SerdeRange<Start, End>>,
}
impl<Start: Size, End: Size> $UIntInRangeType<Start, End> {
fn from_phantom_const_range(range: PhantomConst<$SerdeRange<Start, End>>) -> Self {
let $SerdeRange { start, end } = *range.get();
let $uint_range_usize_start = Start::as_usize(start);
let $uint_range_usize_end = End::as_usize(end);
Self {
value: $uint_range_usize,
range,
}
}
pub fn new(start: Start::SizeType, end: End::SizeType) -> Self {
Self::from_phantom_const_range(PhantomConst::new(
$SerdeRange { start, end }.intern_sized(),
))
}
}
impl<Start: Size, End: Size> fmt::Debug for $UIntInRangeType<Start, End> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Self { value, range } = self;
let $SerdeRange { start, end } = *range.get();
f.debug_struct(&format!(
"{}<{}, {}>",
stringify!($UIntInRange),
Start::as_usize(start),
End::as_usize(end),
))
.field("value", value)
.finish_non_exhaustive()
}
}
impl<Start: Size, End: Size> Type for $UIntInRangeType<Start, End> {
type BaseType = Bundle;
type MaskType = UIntInRangeMaskType;
type SimValue = usize;
impl_match_variant_as_self!();
fn mask_type(&self) -> Self::MaskType {
UIntInRangeMaskType::TYPE
}
fn canonical(&self) -> CanonicalType {
CanonicalType::Bundle(Bundle::new(self.fields()))
}
fn from_canonical(canonical_type: CanonicalType) -> Self {
let fields = Bundle::from_canonical(canonical_type).fields();
let [BundleField {
name: value_name,
flipped: false,
ty: value,
}, BundleField {
name: range_name,
flipped: false,
ty: range,
}] = *fields
else {
panic!("expected {}", stringify!($UIntInRange));
};
assert_eq!([&*value_name, &*range_name], UINT_IN_RANGE_TYPE_FIELD_NAMES);
let value = UInt::from_canonical(value);
let range = PhantomConst::<$SerdeRange<Start, End>>::from_canonical(range);
let retval = Self::from_phantom_const_range(range);
assert_eq!(retval, Self { value, range });
retval
}
fn source_location() -> SourceLocation {
SourceLocation::builtin()
}
fn sim_value_from_bits(&self, bits: &BitSlice) -> Self::SimValue {
let mut retval = 0usize;
retval.view_bits_mut::<Lsb0>()[..bits.len()].clone_from_bitslice(bits);
retval
}
fn sim_value_clone_from_bits(&self, value: &mut Self::SimValue, bits: &BitSlice) {
*value = self.sim_value_from_bits(bits);
}
fn sim_value_to_bits(&self, value: &Self::SimValue, bits: &mut BitSlice) {
bits.clone_from_bitslice(&value.view_bits::<Lsb0>()[..bits.len()]);
}
}
impl<Start: Size, End: Size> BundleType for $UIntInRangeType<Start, End> {
type Builder = NoBuilder;
type FilledBuilder = Expr<Self>;
fn fields(&self) -> Interned<[BundleField]> {
let [value_name, range_name] = UINT_IN_RANGE_TYPE_FIELD_NAMES;
let Self { value, range } = self;
[
BundleField {
name: value_name.intern(),
flipped: false,
ty: value.canonical(),
},
BundleField {
name: range_name.intern(),
flipped: false,
ty: range.canonical(),
},
][..]
.intern()
}
}
impl<Start: KnownSize, End: KnownSize> Default for $UIntInRangeType<Start, End> {
fn default() -> Self {
Self::TYPE
}
}
impl<Start: KnownSize, End: KnownSize> StaticType for $UIntInRangeType<Start, End> {
const TYPE: Self = {
let $uint_range_usize_start = Start::VALUE;
let $uint_range_usize_end = End::VALUE;
Self {
value: $uint_range_usize,
range: PhantomConst::<$SerdeRange<Start, End>>::TYPE,
}
};
const MASK_TYPE: Self::MaskType = UIntInRangeMaskType::TYPE;
const TYPE_PROPERTIES: TypeProperties = BundleTypePropertiesBuilder::new()
.field(false, Self::TYPE.value.type_properties_dyn())
.field(
false,
PhantomConst::<$SerdeRange<Start, End>>::TYPE_PROPERTIES,
)
.finish();
const MASK_TYPE_PROPERTIES: TypeProperties = UIntInRangeMaskType::TYPE_PROPERTIES;
}
impl<Start: Size, End: Size> ToSimValueWithType<$UIntInRangeType<Start, End>> for usize {
fn to_sim_value_with_type(
&self,
ty: $UIntInRangeType<Start, End>,
) -> SimValue<$UIntInRangeType<Start, End>> {
SimValue::from_value(ty, *self)
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)]
pub struct $UIntInRangeTypeWithoutGenerics;
#[allow(non_upper_case_globals)]
pub const $UIntInRangeType: $UIntInRangeTypeWithoutGenerics =
$UIntInRangeTypeWithoutGenerics;
impl<StartSize: SizeType> Index<StartSize> for $UIntInRangeTypeWithoutGenerics {
type Output = $UIntInRangeTypeWithStart<StartSize::Size>;
fn index(&self, start: StartSize) -> &Self::Output {
Interned::into_inner($UIntInRangeTypeWithStart(start).intern_sized())
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub struct $UIntInRangeTypeWithStart<Start: Size>(Start::SizeType);
impl<Start: Size, EndSize: SizeType<Size = End>, End: Size<SizeType = EndSize>>
Index<EndSize> for $UIntInRangeTypeWithStart<Start>
{
type Output = $UIntInRangeType<Start, End>;
fn index(&self, end: EndSize) -> &Self::Output {
Interned::into_inner($UIntInRangeType::new(self.0, end).intern_sized())
}
}
impl<Start: Size, End: Size> ExprCastTo<UInt> for $UIntInRangeType<Start, End> {
fn cast_to(src: Expr<Self>, to_type: UInt) -> Expr<UInt> {
src.cast_to_bits().cast_to(to_type)
}
}
impl<Start: Size, End: Size> ExprCastTo<$UIntInRangeType<Start, End>> for UInt {
fn cast_to(
src: Expr<Self>,
to_type: $UIntInRangeType<Start, End>,
) -> Expr<$UIntInRangeType<Start, End>> {
src.cast_bits_to(to_type)
}
}
impl<LhsStart: Size, LhsEnd: Size, RhsStart: Size, RhsEnd: Size>
ExprPartialEq<$UIntInRangeType<RhsStart, RhsEnd>>
for $UIntInRangeType<LhsStart, LhsEnd>
{
fn cmp_eq(
lhs: Expr<Self>,
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
) -> Expr<Bool> {
lhs.cast_to_bits().cmp_eq(rhs.cast_to_bits())
}
fn cmp_ne(
lhs: Expr<Self>,
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
) -> Expr<Bool> {
lhs.cast_to_bits().cmp_ne(rhs.cast_to_bits())
}
}
impl<LhsStart: Size, LhsEnd: Size, RhsStart: Size, RhsEnd: Size>
ExprPartialOrd<$UIntInRangeType<RhsStart, RhsEnd>>
for $UIntInRangeType<LhsStart, LhsEnd>
{
fn cmp_lt(
lhs: Expr<Self>,
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
) -> Expr<Bool> {
lhs.cast_to_bits().cmp_lt(rhs.cast_to_bits())
}
fn cmp_le(
lhs: Expr<Self>,
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
) -> Expr<Bool> {
lhs.cast_to_bits().cmp_le(rhs.cast_to_bits())
}
fn cmp_gt(
lhs: Expr<Self>,
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
) -> Expr<Bool> {
lhs.cast_to_bits().cmp_gt(rhs.cast_to_bits())
}
fn cmp_ge(
lhs: Expr<Self>,
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
) -> Expr<Bool> {
lhs.cast_to_bits().cmp_ge(rhs.cast_to_bits())
}
}
impl<LhsStart: Size, LhsEnd: Size, RhsStart: Size, RhsEnd: Size>
SimValuePartialEq<$UIntInRangeType<RhsStart, RhsEnd>>
for $UIntInRangeType<LhsStart, LhsEnd>
{
fn sim_value_eq(
this: &SimValue<Self>,
other: &SimValue<$UIntInRangeType<RhsStart, RhsEnd>>,
) -> bool {
**this == **other
}
}
impl<Start: Size, End: Size, Width: Size> ExprPartialEq<UIntType<Width>>
for $UIntInRangeType<Start, End>
{
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
lhs.cast_to_bits().cmp_eq(rhs)
}
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
lhs.cast_to_bits().cmp_ne(rhs)
}
}
impl<Start: Size, End: Size, Width: Size> ExprPartialEq<$UIntInRangeType<Start, End>>
for UIntType<Width>
{
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
lhs.cmp_eq(rhs.cast_to_bits())
}
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
lhs.cmp_ne(rhs.cast_to_bits())
}
}
impl<Start: Size, End: Size, Width: Size> ExprPartialOrd<UIntType<Width>>
for $UIntInRangeType<Start, End>
{
fn cmp_lt(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
lhs.cast_to_bits().cmp_lt(rhs)
}
fn cmp_le(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
lhs.cast_to_bits().cmp_le(rhs)
}
fn cmp_gt(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
lhs.cast_to_bits().cmp_gt(rhs)
}
fn cmp_ge(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
lhs.cast_to_bits().cmp_ge(rhs)
}
}
impl<Start: Size, End: Size, Width: Size> ExprPartialOrd<$UIntInRangeType<Start, End>>
for UIntType<Width>
{
fn cmp_lt(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
lhs.cmp_lt(rhs.cast_to_bits())
}
fn cmp_le(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
lhs.cmp_le(rhs.cast_to_bits())
}
fn cmp_gt(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
lhs.cmp_gt(rhs.cast_to_bits())
}
fn cmp_ge(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
lhs.cmp_ge(rhs.cast_to_bits())
}
}
};
}
define_uint_in_range_type! {
UIntInRange,
UIntInRangeType,
UIntInRangeTypeWithoutGenerics,
UIntInRangeTypeWithStart,
SerdeRange,
"..",
|start, end| UInt::range_usize(start..end),
}
define_uint_in_range_type! {
UIntInRangeInclusive,
UIntInRangeInclusiveType,
UIntInRangeInclusiveTypeWithoutGenerics,
UIntInRangeInclusiveTypeWithStart,
SerdeRangeInclusive,
"..=",
|start, end| UInt::range_inclusive_usize(start..=end),
}
impl SerdeRange<DynSize, DynSize> {
fn is_empty(self) -> bool {
self.start >= self.end
}
}
impl SerdeRangeInclusive<DynSize, DynSize> {
fn is_empty(self) -> bool {
self.start > self.end
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,8 +1,10 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use hashbrown::HashMap;
use std::{
any::{Any, TypeId},
hash::{BuildHasher, Hasher},
ptr::NonNull,
sync::RwLock,
};
@ -73,36 +75,59 @@ impl BuildHasher for TypeIdBuildHasher {
}
}
pub(crate) struct TypeIdMap(
RwLock<hashbrown::HashMap<TypeId, &'static (dyn Any + Send + Sync), TypeIdBuildHasher>>,
);
struct Value(NonNull<dyn Any + Send + Sync>);
impl Value {
unsafe fn get_transmute_lifetime<'b>(&self) -> &'b (dyn Any + Send + Sync) {
unsafe { &*self.0.as_ptr() }
}
fn new(v: Box<dyn Any + Send + Sync>) -> Self {
unsafe { Self(NonNull::new_unchecked(Box::into_raw(v))) }
}
}
unsafe impl Send for Value {}
unsafe impl Sync for Value {}
impl Drop for Value {
fn drop(&mut self) {
unsafe { std::ptr::drop_in_place(self.0.as_ptr()) }
}
}
pub struct TypeIdMap(RwLock<HashMap<TypeId, Value, TypeIdBuildHasher>>);
impl TypeIdMap {
pub(crate) const fn new() -> Self {
Self(RwLock::new(hashbrown::HashMap::with_hasher(
TypeIdBuildHasher,
)))
pub const fn new() -> Self {
Self(RwLock::new(HashMap::with_hasher(TypeIdBuildHasher)))
}
#[cold]
fn insert_slow(
unsafe fn insert_slow(
&self,
type_id: TypeId,
make: fn() -> Box<dyn Any + Sync + Send>,
) -> &'static (dyn Any + Sync + Send) {
let value = Box::leak(make());
) -> &(dyn Any + Sync + Send) {
let value = Value::new(make());
let mut write_guard = self.0.write().unwrap();
*write_guard.entry(type_id).or_insert(value)
unsafe {
write_guard
.entry(type_id)
.or_insert(value)
.get_transmute_lifetime()
}
}
pub(crate) fn get_or_insert_default<T: Sized + Any + Send + Sync + Default>(&self) -> &T {
pub fn get_or_insert_default<T: Sized + Any + Send + Sync + Default>(&self) -> &T {
let type_id = TypeId::of::<T>();
let read_guard = self.0.read().unwrap();
let retval = read_guard.get(&type_id).map(|v| *v);
let retval = read_guard
.get(&type_id)
.map(|v| unsafe { Value::get_transmute_lifetime(v) });
drop(read_guard);
let retval = match retval {
Some(retval) => retval,
None => self.insert_slow(type_id, move || Box::new(T::default())),
None => unsafe { self.insert_slow(type_id, move || Box::new(T::default())) },
};
retval.downcast_ref().expect("known to have correct TypeId")
unsafe { &*(retval as *const dyn Any as *const T) }
}
}

View file

@ -11,60 +11,8 @@ extern crate self as fayalite;
#[doc(hidden)]
pub use std as __std;
#[doc(hidden)]
#[macro_export]
macro_rules! __cfg_expansion_helper {
(
[
$($evaluated_cfgs:ident($($evaluated_exprs:tt)*) = $evaluated_results:ident,)*
]
[
$cfg:ident($($expr:tt)*),
$($unevaluated_cfgs:ident($($unevaluated_exprs:tt)*),)*
]
// pass as tt so we get right span for attribute
$after_evaluation_attr:tt $after_evaluation_body:tt
) => {
#[$cfg($($expr)*)]
$crate::__cfg_expansion_helper! {
[
$($evaluated_cfgs($($evaluated_exprs)*) = $evaluated_results,)*
$cfg($($expr)*) = true,
]
[
$($unevaluated_cfgs($($unevaluated_exprs)*),)*
]
$after_evaluation_attr $after_evaluation_body
}
#[$cfg(not($($expr)*))]
$crate::__cfg_expansion_helper! {
[
$($evaluated_cfgs($($evaluated_exprs)*) = $evaluated_results,)*
$cfg($($expr)*) = false,
]
[
$($unevaluated_cfgs($($unevaluated_exprs)*),)*
]
$after_evaluation_attr $after_evaluation_body
}
};
(
[
$($evaluated_cfgs:ident($($evaluated_exprs:tt)*) = $evaluated_results:ident,)*
]
[]
// don't use #[...] so we get right span for `#` and `[]` of attribute
{$($after_evaluation_attr:tt)*} {$($after_evaluation_body:tt)*}
) => {
$($after_evaluation_attr)*
#[__evaluated_cfgs([
$($evaluated_cfgs($($evaluated_exprs)*) = $evaluated_results,)*
])]
$($after_evaluation_body)*
};
}
#[doc(inline)]
#[doc(alias = "hdl")]
/// The `#[hdl_module]` attribute is applied to a Rust function so that that function creates
/// a [`Module`][`::fayalite::module::Module`] when called.
/// In the function body it will implicitly create a
@ -73,15 +21,6 @@ macro_rules! __cfg_expansion_helper {
/// See [Fayalite Modules][crate::_docs::modules]
pub use fayalite_proc_macros::hdl_module;
#[doc(inline)]
pub use fayalite_proc_macros::hdl;
pub use bitvec;
/// struct used as a placeholder when applying defaults
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
pub struct __;
#[cfg(feature = "unstable-doc")]
pub mod _docs;
@ -93,18 +32,15 @@ pub mod clock;
pub mod enum_;
pub mod expr;
pub mod firrtl;
pub mod formal;
pub mod generics;
pub mod int;
pub mod intern;
pub mod memory;
pub mod module;
pub mod phantom_const;
pub mod prelude;
pub mod reg;
pub mod reset;
pub mod sim;
pub mod source_location;
pub mod testing;
pub mod ty;
pub mod util;
pub mod valueless;
pub mod wire;

View file

@ -4,16 +4,15 @@
#![allow(clippy::multiple_bound_locations)]
use crate::{
annotations::{Annotation, IntoAnnotations, TargetedAnnotation},
array::{Array, ArrayType},
bundle::{Bundle, BundleType},
clock::Clock,
expr::{ops::BundleLiteral, repeat, Expr, Flow, ToExpr, ToLiteralBits},
hdl,
int::{Bool, DynSize, Size, UInt, UIntType},
array::{Array, ArrayType, ArrayTypeTrait, ValueArrayOrSlice},
bundle::{BundleType, BundleValue, DynBundle, DynBundleType},
clock::{Clock, ClockType},
expr::{Expr, ExprEnum, ExprTrait, Flow, ToExpr},
int::{DynUInt, DynUIntType, UInt, UIntType},
intern::{Intern, Interned},
module::ScopedNameId,
source_location::SourceLocation,
ty::{AsMask, CanonicalType, Type},
ty::{AsMask, DynCanonicalType, DynCanonicalValue, DynType, Type, Value},
util::DebugAsDisplay,
};
use bitvec::slice::BitSlice;
@ -21,37 +20,37 @@ use std::{
cell::RefCell,
fmt,
hash::{Hash, Hasher},
marker::PhantomData,
num::NonZeroUsize,
num::NonZeroU32,
rc::Rc,
};
#[hdl]
pub struct ReadStruct<Element, AddrWidth: Size> {
pub addr: UIntType<AddrWidth>,
pub en: Bool,
#[derive(Value, Clone, PartialEq, Eq, Hash, Debug)]
pub struct ReadStruct<Element> {
pub addr: DynUInt,
pub en: UInt<1>,
pub clk: Clock,
#[hdl(flip)]
pub data: Element,
}
#[hdl]
pub struct WriteStruct<Element, AddrWidth: Size> {
pub addr: UIntType<AddrWidth>,
pub en: Bool,
#[derive(Value, Clone, PartialEq, Eq, Hash, Debug)]
pub struct WriteStruct<Element: Value> {
pub addr: DynUInt,
pub en: UInt<1>,
pub clk: Clock,
pub data: Element,
pub mask: AsMask<Element>,
}
#[hdl]
pub struct ReadWriteStruct<Element, AddrWidth: Size> {
pub addr: UIntType<AddrWidth>,
pub en: Bool,
#[allow(clippy::multiple_bound_locations)]
#[derive(Value, Clone, PartialEq, Eq, Hash, Debug)]
pub struct ReadWriteStruct<Element: Value> {
pub addr: DynUInt,
pub en: UInt<1>,
pub clk: Clock,
#[hdl(flip)]
pub rdata: Element,
pub wmode: Bool,
pub wmode: UInt<1>,
pub wdata: Element,
pub wmask: AsMask<Element>,
}
@ -64,10 +63,11 @@ pub trait PortType:
sealed::Sealed + Clone + Eq + Hash + fmt::Debug + Send + Sync + 'static
{
type PortKindTy: Copy + Eq + Hash + fmt::Debug + Send + Sync + 'static;
type Port: BundleType;
type PortType: BundleType<Value = Self::PortValue>;
type PortValue: BundleValue<Type = Self::PortType>;
fn port_kind(port_kind: Self::PortKindTy) -> PortKind;
fn from_port_kind(port_kind: PortKind) -> Self::PortKindTy;
fn port_ty(port: &MemPort<Self>) -> Self::Port;
fn port_ty(port: &MemPort<Self>) -> Self::PortType;
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
@ -79,7 +79,8 @@ impl sealed::Sealed for DynPortType {}
impl PortType for DynPortType {
type PortKindTy = PortKind;
type Port = Bundle;
type PortType = DynBundleType;
type PortValue = DynBundle;
fn port_kind(port_kind: Self::PortKindTy) -> PortKind {
port_kind
@ -89,38 +90,41 @@ impl PortType for DynPortType {
port_kind
}
fn port_ty(port: &MemPort<Self>) -> Self::Port {
Bundle::new(match port.port_kind {
PortKind::ReadOnly => {
MemPort::<ReadStruct<CanonicalType, DynSize>>::from_canonical(*port)
.ty()
.fields()
}
PortKind::WriteOnly => {
MemPort::<WriteStruct<CanonicalType, DynSize>>::from_canonical(*port)
.ty()
.fields()
}
fn port_ty(port: &MemPort<Self>) -> Self::PortType {
match port.port_kind {
PortKind::ReadOnly => MemPort::<ReadStruct<DynCanonicalValue>>::from_canonical(*port)
.ty()
.canonical(),
PortKind::WriteOnly => MemPort::<WriteStruct<DynCanonicalValue>>::from_canonical(*port)
.ty()
.canonical(),
PortKind::ReadWrite => {
MemPort::<ReadWriteStruct<CanonicalType, DynSize>>::from_canonical(*port)
MemPort::<ReadWriteStruct<DynCanonicalValue>>::from_canonical(*port)
.ty()
.fields()
.canonical()
}
})
}
}
}
pub trait PortStruct: BundleType + sealed::Sealed + PortType<PortKindTy = (), Port = Self> {
type Element: Type;
pub trait PortStruct:
BundleValue
+ sealed::Sealed
+ PortType<PortKindTy = (), PortType = <Self as ToExpr>::Type, PortValue = Self>
where
Self::Type: BundleType<Value = Self>,
{
type Element: Value<Type = Self::ElementType>;
type ElementType: Type<Value = Self::Element>;
const PORT_KIND: PortKind;
fn addr(this: Expr<Self>) -> Expr<UInt>;
fn en(this: Expr<Self>) -> Expr<Bool>;
fn addr(this: Expr<Self>) -> Expr<DynUInt>;
fn en(this: Expr<Self>) -> Expr<UInt<1>>;
fn clk(this: Expr<Self>) -> Expr<Clock>;
fn rdata(this: Expr<Self>) -> Option<Expr<Self::Element>>;
fn wdata(this: Expr<Self>) -> Option<Expr<Self::Element>>;
fn wmask(this: Expr<Self>) -> Option<Expr<AsMask<Self::Element>>>;
fn wmode(this: Expr<Self>) -> Expr<Bool>;
fn wmode(this: Expr<Self>) -> Expr<UInt<1>>;
}
macro_rules! impl_port_struct {
@ -133,11 +137,19 @@ macro_rules! impl_port_struct {
$($body:tt)*
}
) => {
impl<$Element: Type> sealed::Sealed for $Struct<$Element, DynSize> {}
impl<$Element: Value> sealed::Sealed for $Struct<$Element>
where
$Element::Type: Type<Value = $Element>,
{
}
impl<$Element: Type> PortType for $Struct<$Element, DynSize> {
impl<$Element: Value> PortType for $Struct<$Element>
where
$Element::Type: Type<Value = $Element>,
{
type PortKindTy = ();
type Port = Self;
type PortType = <Self as ToExpr>::Type;
type PortValue = Self;
fn port_kind(_port_kind: Self::PortKindTy) -> PortKind {
Self::PORT_KIND
@ -152,14 +164,18 @@ macro_rules! impl_port_struct {
}
}
impl<$Element: Type> PortStruct for $Struct<$Element, DynSize> {
impl<$Element: Value> PortStruct for $Struct<$Element>
where
$Element::Type: Type<Value = $Element>,
{
type Element = $Element;
type ElementType = $Element::Type;
fn addr(this: Expr<Self>) -> Expr<UInt> {
fn addr(this: Expr<Self>) -> Expr<DynUInt> {
this.addr
}
fn en(this: Expr<Self>) -> Expr<Bool> {
fn en(this: Expr<Self>) -> Expr<UInt<1>> {
this.en
}
@ -174,9 +190,14 @@ macro_rules! impl_port_struct {
impl_port_struct! {
impl<Element> _ for ReadStruct {
fn port_ty(port: &MemPort<Self>) -> Self {
let element = Element::from_canonical(port.mem_element_type);
ReadStruct[element][port.addr_type.width()]
fn port_ty(port: &MemPort<Self>) -> <MemPort<Self> as ToExpr>::Type {
type T<V> = <V as ToExpr>::Type;
T::<Self> {
addr: port.addr_type,
en: UIntType::new(),
clk: ClockType,
data: Element::Type::from_dyn_canonical_type(port.mem_element_type),
}
}
const PORT_KIND: PortKind = PortKind::ReadOnly;
@ -193,7 +214,7 @@ impl_port_struct! {
None
}
fn wmode(_this: Expr<Self>) -> Expr<Bool> {
fn wmode(_this: Expr<Self>) -> Expr<UInt<1>> {
false.to_expr()
}
}
@ -201,9 +222,16 @@ impl_port_struct! {
impl_port_struct! {
impl<Element> _ for WriteStruct {
fn port_ty(port: &MemPort<Self>) -> Self {
let element = Element::from_canonical(port.mem_element_type);
WriteStruct[element][port.addr_type.width()]
fn port_ty(port: &MemPort<Self>) -> <MemPort<Self> as ToExpr>::Type {
type T<V> = <V as ToExpr>::Type;
let element_ty = Element::Type::from_dyn_canonical_type(port.mem_element_type);
T::<Self> {
addr: port.addr_type,
en: UIntType::new(),
clk: ClockType,
mask: element_ty.mask_type(),
data: element_ty,
}
}
const PORT_KIND: PortKind = PortKind::WriteOnly;
@ -220,7 +248,7 @@ impl_port_struct! {
Some(this.mask)
}
fn wmode(_this: Expr<Self>) -> Expr<Bool> {
fn wmode(_this: Expr<Self>) -> Expr<UInt<1>> {
true.to_expr()
}
}
@ -228,9 +256,18 @@ impl_port_struct! {
impl_port_struct! {
impl<Element> _ for ReadWriteStruct {
fn port_ty(port: &MemPort<Self>) -> Self {
let element = Element::from_canonical(port.mem_element_type);
ReadWriteStruct[element][port.addr_type.width()]
fn port_ty(port: &MemPort<Self>) -> <MemPort<Self> as ToExpr>::Type {
type T<V> = <V as ToExpr>::Type;
let element_ty = Element::Type::from_dyn_canonical_type(port.mem_element_type);
T::<Self> {
addr: port.addr_type,
en: UIntType::new(),
clk: ClockType,
rdata: element_ty.clone(),
wmode: UIntType::new(),
wmask: element_ty.mask_type(),
wdata: element_ty,
}
}
const PORT_KIND: PortKind = PortKind::ReadWrite;
@ -247,7 +284,7 @@ impl_port_struct! {
Some(this.wmask)
}
fn wmode(this: Expr<Self>) -> Expr<Bool> {
fn wmode(this: Expr<Self>) -> Expr<UInt<1>> {
this.wmode
}
}
@ -345,31 +382,42 @@ pub struct MemPort<T: PortType> {
source_location: SourceLocation,
port_kind: T::PortKindTy,
port_index: usize,
addr_type: UInt,
mem_element_type: CanonicalType,
addr_type: DynUIntType,
mem_element_type: Interned<dyn DynCanonicalType>,
}
impl<T: PortType> fmt::Debug for MemPort<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("MemPort(")?;
self.mem_name.fmt(f)?;
f.write_str(".")?;
self.port_name().fmt(f)?;
f.write_str(": ")?;
match self.port_kind() {
PortKind::ReadOnly => f.write_str("ReadStruct<")?,
PortKind::WriteOnly => f.write_str("WriteStruct<")?,
PortKind::ReadWrite => f.write_str("ReadWriteStruct<")?,
}
self.mem_element_type.fmt(f)?;
f.write_str(">)")
let Self {
mem_name,
source_location: _,
port_kind: _,
port_index: _,
addr_type,
mem_element_type,
} = self;
f.debug_struct("MemPort")
.field("mem_name", mem_name)
.field("port_name", &self.port_name())
.field("addr_type", addr_type)
.field("mem_element_type", mem_element_type)
.finish_non_exhaustive()
}
}
impl<T: PortType> ToExpr for MemPort<T> {
type Type = T::PortType;
fn ty(&self) -> Self::Type {
T::port_ty(self)
}
fn to_expr(&self) -> Expr<<Self::Type as Type>::Value> {
Expr::new_unchecked(self.expr_enum())
}
}
impl<T: PortType> MemPort<T> {
pub fn ty(&self) -> T::Port {
T::port_ty(self)
}
pub fn source_location(&self) -> SourceLocation {
self.source_location
}
@ -388,10 +436,10 @@ impl<T: PortType> MemPort<T> {
index: self.port_index,
}
}
pub fn mem_element_type(&self) -> CanonicalType {
pub fn mem_element_type(&self) -> Interned<dyn DynCanonicalType> {
self.mem_element_type
}
pub fn addr_type(&self) -> UInt {
pub fn addr_type(&self) -> DynUIntType {
self.addr_type
}
pub fn canonical(&self) -> MemPort<DynPortType> {
@ -415,6 +463,7 @@ impl<T: PortType> MemPort<T> {
pub fn from_canonical(port: MemPort<DynPortType>) -> Self
where
T: PortStruct,
T::Type: BundleType<Value = T>,
{
let MemPort {
mem_name,
@ -439,8 +488,8 @@ impl<T: PortType> MemPort<T> {
mem_name: ScopedNameId,
source_location: SourceLocation,
port_name: PortName,
addr_type: UInt,
mem_element_type: CanonicalType,
addr_type: DynUIntType,
mem_element_type: Interned<dyn DynCanonicalType>,
) -> Self {
assert!(
mem_element_type.is_storable(),
@ -470,25 +519,25 @@ pub enum ReadUnderWrite {
Undefined,
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
struct MemImpl<Element: Type, Len: Size, P> {
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
struct MemImpl<T: ArrayTypeTrait, P> {
scoped_name: ScopedNameId,
source_location: SourceLocation,
array_type: ArrayType<Element, Len>,
array_type: T,
initial_value: Option<Interned<BitSlice>>,
ports: P,
read_latency: usize,
write_latency: NonZeroUsize,
write_latency: NonZeroU32,
read_under_write: ReadUnderWrite,
port_annotations: Interned<[TargetedAnnotation]>,
mem_annotations: Interned<[Annotation]>,
}
pub struct Mem<Element: Type = CanonicalType, Len: Size = DynSize>(
Interned<MemImpl<Element, Len, Interned<[MemPort<DynPortType>]>>>,
pub struct Mem<VA: ValueArrayOrSlice + ?Sized>(
Interned<MemImpl<ArrayType<VA>, Interned<[Interned<MemPort<DynPortType>>]>>>,
);
struct PortsDebug<'a>(&'a [MemPort<DynPortType>]);
struct PortsDebug<'a>(&'a [Interned<MemPort<DynPortType>>]);
impl fmt::Debug for PortsDebug<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
@ -502,7 +551,7 @@ impl fmt::Debug for PortsDebug<'_> {
}
}
impl<Element: Type, Len: Size> fmt::Debug for Mem<Element, Len> {
impl<VA: ValueArrayOrSlice + ?Sized> fmt::Debug for Mem<VA> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let MemImpl {
scoped_name,
@ -519,12 +568,7 @@ impl<Element: Type, Len: Size> fmt::Debug for Mem<Element, Len> {
f.debug_struct("Mem")
.field("name", scoped_name)
.field("array_type", array_type)
.field(
"initial_value",
&initial_value.as_ref().map(|initial_value| {
DebugMemoryData::from_bit_slice(*array_type, initial_value)
}),
)
.field("initial_value", initial_value)
.field("read_latency", read_latency)
.field("write_latency", write_latency)
.field("read_under_write", read_under_write)
@ -535,52 +579,52 @@ impl<Element: Type, Len: Size> fmt::Debug for Mem<Element, Len> {
}
}
impl<Element: Type, Len: Size> Hash for Mem<Element, Len> {
impl<VA: ValueArrayOrSlice + ?Sized> Hash for Mem<VA> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.0.hash(state);
}
}
impl<Element: Type, Len: Size> Eq for Mem<Element, Len> {}
impl<VA: ValueArrayOrSlice + ?Sized> Eq for Mem<VA> {}
impl<Element: Type, Len: Size> PartialEq for Mem<Element, Len> {
impl<VA: ValueArrayOrSlice + ?Sized> PartialEq for Mem<VA> {
fn eq(&self, other: &Self) -> bool {
self.0 == other.0
}
}
impl<Element: Type, Len: Size> Copy for Mem<Element, Len> {}
impl<VA: ValueArrayOrSlice + ?Sized> Copy for Mem<VA> {}
impl<Element: Type, Len: Size> Clone for Mem<Element, Len> {
impl<VA: ValueArrayOrSlice + ?Sized> Clone for Mem<VA> {
fn clone(&self) -> Self {
*self
}
}
impl<Element: Type, Len: Size> Mem<Element, Len> {
impl<VA: ValueArrayOrSlice + ?Sized> Mem<VA> {
#[allow(clippy::too_many_arguments)]
#[track_caller]
pub fn new_unchecked(
scoped_name: ScopedNameId,
source_location: SourceLocation,
array_type: ArrayType<Element, Len>,
array_type: ArrayType<VA>,
initial_value: Option<Interned<BitSlice>>,
ports: Interned<[MemPort<DynPortType>]>,
ports: Interned<[Interned<MemPort<DynPortType>>]>,
read_latency: usize,
write_latency: NonZeroUsize,
write_latency: NonZeroU32,
read_under_write: ReadUnderWrite,
port_annotations: Interned<[TargetedAnnotation]>,
mem_annotations: Interned<[Annotation]>,
) -> Self {
if let Some(initial_value) = initial_value {
MemBuilder::<Element, Len>::check_initial_value_bit_slice(
MemBuilder::<VA>::check_initial_value_bit_slice(
array_type.element(),
Some(array_type.len()),
initial_value,
);
}
let addr_width = memory_addr_width(array_type.len());
let expected_mem_element_type = array_type.element().canonical();
let expected_mem_element_type = array_type.element().canonical_dyn();
assert!(
expected_mem_element_type.is_storable(),
"memory element type must be a storable type"
@ -593,7 +637,7 @@ impl<Element: Type, Len: Size> Mem<Element, Len> {
port_index,
addr_type,
mem_element_type,
} = *port;
} = **port;
assert_eq!(mem_name, scoped_name, "memory name must match with ports");
assert_eq!(
port_index, index,
@ -615,7 +659,7 @@ impl<Element: Type, Len: Size> Mem<Element, Len> {
};
assert_eq!(
Some(port),
ports.get(port.port_index),
ports.get(port.port_index).map(|v| &**v),
"port on memory must match annotation's target base"
);
}
@ -638,19 +682,19 @@ impl<Element: Type, Len: Size> Mem<Element, Len> {
pub fn source_location(self) -> SourceLocation {
self.0.source_location
}
pub fn array_type(self) -> ArrayType<Element, Len> {
self.0.array_type
pub fn array_type(self) -> ArrayType<VA> {
self.0.array_type.clone()
}
pub fn initial_value(self) -> Option<Interned<BitSlice>> {
self.0.initial_value
}
pub fn ports(self) -> Interned<[MemPort<DynPortType>]> {
pub fn ports(self) -> Interned<[Interned<MemPort<DynPortType>>]> {
self.0.ports
}
pub fn read_latency(self) -> usize {
self.0.read_latency
}
pub fn write_latency(self) -> NonZeroUsize {
pub fn write_latency(self) -> NonZeroU32 {
self.0.write_latency
}
pub fn read_under_write(self) -> ReadUnderWrite {
@ -662,7 +706,7 @@ impl<Element: Type, Len: Size> Mem<Element, Len> {
pub fn mem_annotations(self) -> Interned<[Annotation]> {
self.0.mem_annotations
}
pub fn canonical(self) -> Mem {
pub fn canonical(self) -> Mem<[DynCanonicalValue]> {
let MemImpl {
scoped_name,
source_location,
@ -675,7 +719,7 @@ impl<Element: Type, Len: Size> Mem<Element, Len> {
port_annotations,
mem_annotations,
} = *self.0;
let array_type = array_type.as_dyn_array();
let array_type = array_type.canonical();
Mem(Intern::intern_sized(MemImpl {
scoped_name,
source_location,
@ -707,23 +751,23 @@ impl<T: fmt::Debug> fmt::Debug for MaybeSpecified<T> {
pub(crate) struct MemBuilderTarget {
pub(crate) scoped_name: ScopedNameId,
pub(crate) source_location: SourceLocation,
pub(crate) mem_element_type: CanonicalType,
pub(crate) mem_element_type: Interned<dyn DynCanonicalType>,
pub(crate) depth: Option<usize>,
pub(crate) initial_value: Option<Interned<BitSlice>>,
pub(crate) ports: Vec<MemPort<DynPortType>>,
pub(crate) ports: Vec<Interned<MemPort<DynPortType>>>,
pub(crate) read_latency: usize,
pub(crate) write_latency: NonZeroUsize,
pub(crate) write_latency: NonZeroU32,
pub(crate) read_under_write: ReadUnderWrite,
pub(crate) port_annotations: Vec<TargetedAnnotation>,
pub(crate) mem_annotations: Vec<Annotation>,
}
impl MemBuilderTarget {
pub(crate) fn make_memory(&self) -> Option<Mem> {
pub(crate) fn make_memory(&self) -> Option<Mem<[DynCanonicalValue]>> {
Some(Mem::new_unchecked(
self.scoped_name,
self.source_location,
ArrayType::new_dyn(self.mem_element_type, self.depth?),
ArrayType::new_slice(self.mem_element_type, self.depth?),
self.initial_value,
Intern::intern(&self.ports),
self.read_latency,
@ -773,18 +817,16 @@ impl fmt::Debug for MemBuilderTarget {
}
}
pub struct MemBuilder<Element: Type, Len: Size = DynSize> {
mem_element_type: Element,
pub struct MemBuilder<VA: ValueArrayOrSlice + ?Sized> {
mem_element_type: VA::ElementType,
target: Rc<RefCell<MemBuilderTarget>>,
_phantom: PhantomData<Len>,
}
impl<Element: Type, Len: Size> fmt::Debug for MemBuilder<Element, Len> {
impl<VA: ValueArrayOrSlice + ?Sized> fmt::Debug for MemBuilder<VA> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Self {
mem_element_type,
target,
_phantom: _,
} = &self;
target.borrow().debug_fmt("MemBuilder", mem_element_type, f)
}
@ -796,16 +838,15 @@ pub fn memory_addr_width(depth: usize) -> usize {
.map_or(usize::BITS, usize::ilog2) as usize
}
impl<Element: Type, Len: Size> MemBuilder<Element, Len> {
impl<VA: ValueArrayOrSlice + ?Sized> MemBuilder<VA> {
#[track_caller]
fn check_initial_value_bit_slice(
mem_element_type: Element,
mem_element_type: &VA::ElementType,
depth: Option<usize>,
initial_value: Interned<BitSlice>,
) -> Interned<BitSlice> {
let element_bit_width = mem_element_type.canonical().bit_width();
if let Some(depth) = depth {
let expected_len = depth.checked_mul(element_bit_width).expect(
let expected_len = depth.checked_mul(mem_element_type.bit_width()).expect(
"memory must be small enough that its initializer bit length fits in usize",
);
assert_eq!(
@ -817,7 +858,7 @@ impl<Element: Type, Len: Size> MemBuilder<Element, Len> {
assert!(
initial_value
.len()
.checked_rem(element_bit_width)
.checked_rem(mem_element_type.bit_width())
.unwrap_or(initial_value.len())
== 0,
"Mem's initializer bit length must be a multiple of the element type's bit width",
@ -826,14 +867,14 @@ impl<Element: Type, Len: Size> MemBuilder<Element, Len> {
}
#[track_caller]
fn check_initial_value_expr(
mem_element_type: &Element,
mem_element_type: &VA::ElementType,
depth: Option<usize>,
initial_value: Expr<Array>,
initial_value: Expr<Array<[DynCanonicalValue]>>,
) -> Interned<BitSlice> {
let initial_value_ty = Expr::ty(initial_value);
let initial_value_ty = initial_value.canonical_type();
assert_eq!(
*mem_element_type,
Element::from_canonical(initial_value_ty.element()),
<VA::ElementType as DynType>::from_dyn_canonical_type(*initial_value_ty.element()),
"Mem's element type must match initializer's element type",
);
if let Some(depth) = depth {
@ -848,7 +889,7 @@ impl<Element: Type, Len: Size> MemBuilder<Element, Len> {
};
debug_assert_eq!(
retval.len(),
initial_value_ty.type_properties().bit_width,
initial_value_ty.bit_width(),
"initial value produced wrong literal bits length"
);
retval
@ -857,9 +898,9 @@ impl<Element: Type, Len: Size> MemBuilder<Element, Len> {
pub(crate) fn new(
scoped_name: ScopedNameId,
source_location: SourceLocation,
mem_element_type: Element,
mem_element_type: VA::ElementType,
) -> (Self, Rc<RefCell<MemBuilderTarget>>) {
let canonical_mem_element_type = mem_element_type.canonical();
let canonical_mem_element_type = mem_element_type.canonical_dyn();
assert!(
canonical_mem_element_type.is_storable(),
"memory element type must be a storable type"
@ -868,11 +909,11 @@ impl<Element: Type, Len: Size> MemBuilder<Element, Len> {
scoped_name,
source_location,
mem_element_type: canonical_mem_element_type,
depth: Len::KNOWN_VALUE,
depth: VA::FIXED_LEN_TYPE.map(VA::len_from_len_type),
initial_value: None,
ports: vec![],
read_latency: 0,
write_latency: NonZeroUsize::new(1).unwrap(),
write_latency: NonZeroU32::new(1).unwrap(),
read_under_write: ReadUnderWrite::Old,
port_annotations: vec![],
mem_annotations: vec![],
@ -881,7 +922,6 @@ impl<Element: Type, Len: Size> MemBuilder<Element, Len> {
Self {
mem_element_type,
target: Rc::clone(&target),
_phantom: PhantomData,
},
target,
)
@ -891,19 +931,19 @@ impl<Element: Type, Len: Size> MemBuilder<Element, Len> {
&mut self,
source_location: SourceLocation,
port_kind: PortKind,
) -> MemPort<DynPortType> {
) -> Interned<MemPort<DynPortType>> {
let mut target = self.target.borrow_mut();
let Some(depth) = target.depth else {
panic!("MemBuilder::depth must be called before adding ports");
};
let port = MemPort {
let port = Intern::intern_sized(MemPort {
mem_name: target.scoped_name,
source_location,
port_kind,
port_index: target.ports.len(),
addr_type: UInt::new(memory_addr_width(depth)),
addr_type: DynUIntType::new(memory_addr_width(depth)),
mem_element_type: target.mem_element_type,
};
});
target.ports.push(port);
port
}
@ -912,53 +952,50 @@ impl<Element: Type, Len: Size> MemBuilder<Element, Len> {
&mut self,
source_location: SourceLocation,
kind: PortKind,
) -> Expr<Bundle> {
self.new_port_impl(source_location, kind).to_expr()
) -> Expr<DynBundle> {
Expr::new_unchecked(ExprEnum::MemPort(self.new_port_impl(source_location, kind)))
}
#[track_caller]
pub fn new_port(&mut self, kind: PortKind) -> Expr<Bundle> {
pub fn new_port(&mut self, kind: PortKind) -> Expr<DynBundle> {
self.new_port_with_loc(SourceLocation::caller(), kind)
}
#[track_caller]
pub fn new_read_port_with_loc(
&mut self,
source_location: SourceLocation,
) -> Expr<ReadStruct<Element, DynSize>> {
Expr::from_bundle(
self.new_port_impl(source_location, PortKind::ReadOnly)
.to_expr(),
)
) -> Expr<ReadStruct<VA::Element>> {
Expr::new_unchecked(ExprEnum::MemPort(
self.new_port_impl(source_location, PortKind::ReadOnly),
))
}
#[track_caller]
pub fn new_read_port(&mut self) -> Expr<ReadStruct<Element, DynSize>> {
pub fn new_read_port(&mut self) -> Expr<ReadStruct<VA::Element>> {
self.new_read_port_with_loc(SourceLocation::caller())
}
#[track_caller]
pub fn new_write_port_with_loc(
&mut self,
source_location: SourceLocation,
) -> Expr<WriteStruct<Element, DynSize>> {
Expr::from_bundle(
self.new_port_impl(source_location, PortKind::WriteOnly)
.to_expr(),
)
) -> Expr<WriteStruct<VA::Element>> {
Expr::new_unchecked(ExprEnum::MemPort(
self.new_port_impl(source_location, PortKind::WriteOnly),
))
}
#[track_caller]
pub fn new_write_port(&mut self) -> Expr<WriteStruct<Element, DynSize>> {
pub fn new_write_port(&mut self) -> Expr<WriteStruct<VA::Element>> {
self.new_write_port_with_loc(SourceLocation::caller())
}
#[track_caller]
pub fn new_rw_port_with_loc(
&mut self,
source_location: SourceLocation,
) -> Expr<ReadWriteStruct<Element, DynSize>> {
Expr::from_bundle(
self.new_port_impl(source_location, PortKind::ReadWrite)
.to_expr(),
)
) -> Expr<ReadWriteStruct<VA::Element>> {
Expr::new_unchecked(ExprEnum::MemPort(
self.new_port_impl(source_location, PortKind::ReadWrite),
))
}
#[track_caller]
pub fn new_rw_port(&mut self) -> Expr<ReadWriteStruct<Element, DynSize>> {
pub fn new_rw_port(&mut self) -> Expr<ReadWriteStruct<VA::Element>> {
self.new_rw_port_with_loc(SourceLocation::caller())
}
pub fn scoped_name(&self) -> ScopedNameId {
@ -967,7 +1004,7 @@ impl<Element: Type, Len: Size> MemBuilder<Element, Len> {
pub fn source_location(&self) -> SourceLocation {
self.target.borrow().source_location
}
pub fn get_mem_element_type(&self) -> &Element {
pub fn get_mem_element_type(&self) -> &VA::ElementType {
&self.mem_element_type
}
#[allow(clippy::result_unit_err)]
@ -990,28 +1027,28 @@ impl<Element: Type, Len: Size> MemBuilder<Element, Len> {
target.depth = Some(depth);
}
#[allow(clippy::result_unit_err)]
pub fn get_array_type(&self) -> Result<ArrayType<Element, Len>, ()> {
Ok(ArrayType::new(
self.mem_element_type,
Len::from_usize(self.get_depth()?),
pub fn get_array_type(&self) -> Result<ArrayType<VA>, ()> {
Ok(ArrayType::new_with_len(
self.mem_element_type.clone(),
self.get_depth()?,
))
}
pub fn get_initial_value(&self) -> Option<Interned<BitSlice>> {
self.target.borrow().initial_value
}
#[track_caller]
pub fn initial_value(&mut self, initial_value: impl ToExpr<Type = ArrayType<Element, Len>>) {
pub fn initial_value(&mut self, initial_value: impl ToExpr<Type = ArrayType<VA>>) {
let mut target = self.target.borrow_mut();
if target.initial_value.is_some() {
panic!("can't set Mem's initial value more than once");
}
let initial_value = Expr::as_dyn_array(initial_value.to_expr());
let initial_value = initial_value.to_expr().canonical();
target.initial_value = Some(Self::check_initial_value_expr(
&self.mem_element_type,
target.depth,
initial_value,
));
target.depth = Some(Expr::ty(initial_value).len());
target.depth = Some(initial_value.ty().len());
}
#[track_caller]
pub fn initial_value_bit_slice(&mut self, initial_value: Interned<BitSlice>) {
@ -1020,11 +1057,11 @@ impl<Element: Type, Len: Size> MemBuilder<Element, Len> {
panic!("can't set Mem's initial value more than once");
}
target.initial_value = Some(Self::check_initial_value_bit_slice(
self.mem_element_type,
&self.mem_element_type,
target.depth,
initial_value,
));
let element_bit_width = self.mem_element_type.canonical().bit_width();
let element_bit_width = self.mem_element_type.bit_width();
if element_bit_width != 0 {
target.depth = Some(initial_value.len() / element_bit_width);
}
@ -1035,10 +1072,10 @@ impl<Element: Type, Len: Size> MemBuilder<Element, Len> {
pub fn read_latency(&mut self, read_latency: usize) {
self.target.borrow_mut().read_latency = read_latency;
}
pub fn get_write_latency(&self) -> NonZeroUsize {
pub fn get_write_latency(&self) -> NonZeroU32 {
self.target.borrow().write_latency
}
pub fn write_latency(&mut self, write_latency: NonZeroUsize) {
pub fn write_latency(&mut self, write_latency: NonZeroU32) {
self.target.borrow_mut().write_latency = write_latency;
}
pub fn get_read_under_write(&self) -> ReadUnderWrite {
@ -1055,91 +1092,3 @@ impl<Element: Type, Len: Size> MemBuilder<Element, Len> {
.extend(annotations.into_annotations());
}
}
pub fn splat_mask<T: Type>(ty: T, value: Expr<Bool>) -> Expr<AsMask<T>> {
let canonical_ty = ty.canonical();
match canonical_ty {
CanonicalType::UInt(_)
| CanonicalType::SInt(_)
| CanonicalType::Bool(_)
| CanonicalType::AsyncReset(_)
| CanonicalType::SyncReset(_)
| CanonicalType::Reset(_)
| CanonicalType::Clock(_)
| CanonicalType::Enum(_) => Expr::from_canonical(Expr::canonical(value)),
CanonicalType::Array(array) => Expr::from_canonical(Expr::canonical(repeat(
splat_mask(array.element(), value),
array.len(),
))),
CanonicalType::Bundle(bundle) => Expr::from_canonical(Expr::canonical(
BundleLiteral::new(
bundle.mask_type(),
bundle
.fields()
.iter()
.map(|field| splat_mask(field.ty, value))
.collect(),
)
.to_expr(),
)),
CanonicalType::PhantomConst(_) => Expr::from_canonical(Expr::canonical(().to_expr())),
}
}
pub trait DebugMemoryDataGetElement {
fn get_element(&self, element_index: usize, array_type: Array) -> &BitSlice;
}
impl<'a, F: ?Sized + Fn(usize, Array) -> &'a BitSlice> DebugMemoryDataGetElement for &'a F {
fn get_element(&self, element_index: usize, array_type: Array) -> &BitSlice {
self(element_index, array_type)
}
}
#[derive(Clone)]
pub struct DebugMemoryData<GetElement: DebugMemoryDataGetElement> {
pub array_type: Array,
pub get_element: GetElement,
}
impl DebugMemoryDataGetElement for &'_ BitSlice {
fn get_element(&self, element_index: usize, array_type: Array) -> &BitSlice {
assert!(element_index < array_type.len());
let stride = array_type.element().bit_width();
let start = element_index
.checked_mul(stride)
.expect("memory is too big");
let end = start.checked_add(stride).expect("memory is too big");
&self[start..end]
}
}
impl<'a> DebugMemoryData<&'a BitSlice> {
pub fn from_bit_slice<T: Type, Depth: Size>(
array_type: ArrayType<T, Depth>,
bit_slice: &'a BitSlice,
) -> Self {
let array_type = array_type.as_dyn_array();
assert_eq!(bit_slice.len(), array_type.type_properties().bit_width);
Self {
array_type,
get_element: bit_slice,
}
}
}
impl<GetElement: DebugMemoryDataGetElement> fmt::Debug for DebugMemoryData<GetElement> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.array_type.len() == 0 {
return f.write_str("[]");
}
writeln!(f, "[\n // len = {:#x}", self.array_type.len())?;
for element_index in 0..self.array_type.len() {
let element = crate::util::BitSliceWriteWithBase(
self.get_element.get_element(element_index, self.array_type),
);
writeln!(f, " [{element_index:#x}]: {element:#x},")?;
}
f.write_str("]")
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,5 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
pub mod deduce_resets;
pub mod simplify_enums;
pub mod simplify_memories;
pub mod visit;

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -2,22 +2,23 @@
// See Notices.txt for copyright information
use crate::{
annotations::TargetedAnnotation,
array::Array,
bundle::{Bundle, BundleType},
expr::{CastBitsTo, CastToBits, Expr, ExprEnum, ToExpr},
int::{Bool, SInt, Size, UInt},
array::{Array, ArrayType, ValueArrayOrSlice},
bundle::{BundleType, BundleValue, DynBundle},
expr::{Expr, ExprEnum, ToExpr},
int::{DynSInt, DynSIntType, DynUInt, DynUIntType},
intern::{Intern, Interned},
memory::{Mem, MemPort, PortType},
module::{
transform::visit::{Fold, Folder},
Block, Id, Module, NameId, ScopedNameId, Stmt, StmtConnect, StmtWire,
Block, Module, NameId, NameIdGen, ScopedNameId, Stmt, StmtConnect, StmtWire,
},
source_location::SourceLocation,
ty::{CanonicalType, Type},
util::{HashMap, MakeMutSlice},
ty::{DynCanonicalValue, DynType, Type, TypeEnum},
util::MakeMutSlice,
wire::Wire,
};
use bitvec::{slice::BitSlice, vec::BitVec};
use hashbrown::HashMap;
use std::{
convert::Infallible,
fmt::Write,
@ -27,31 +28,26 @@ use std::{
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
enum SingleType {
UInt(UInt),
SInt(SInt),
Bool(Bool),
UIntArray(Array<UInt>),
SIntArray(Array<SInt>),
BoolArray(Array<Bool>),
UInt(DynUIntType),
SInt(DynSIntType),
UIntArray(ArrayType<[DynUInt]>),
SIntArray(ArrayType<[DynSInt]>),
}
impl SingleType {
fn is_array_type(self, array_type: Array) -> bool {
fn is_array_type(self, array_type: ArrayType<[DynCanonicalValue]>) -> bool {
match self {
SingleType::UInt(_) | SingleType::SInt(_) | SingleType::Bool(_) => false,
SingleType::UIntArray(ty) => ty.as_dyn_array() == array_type,
SingleType::SIntArray(ty) => ty.as_dyn_array() == array_type,
SingleType::BoolArray(ty) => ty.as_dyn_array() == array_type,
SingleType::UInt(_) | SingleType::SInt(_) => false,
SingleType::UIntArray(ty) => ty.canonical() == array_type,
SingleType::SIntArray(ty) => ty.canonical() == array_type,
}
}
fn array_len(self) -> usize {
match self {
SingleType::UInt(_ty) => 1,
SingleType::SInt(_ty) => 1,
SingleType::Bool(_ty) => 1,
SingleType::UIntArray(ty) => ty.len(),
SingleType::SIntArray(ty) => ty.len(),
SingleType::BoolArray(ty) => ty.len(),
}
}
}
@ -61,9 +57,8 @@ enum MemSplit {
Bundle {
fields: Rc<[MemSplit]>,
},
PhantomConst,
Single {
output_mem: Option<Mem>,
output_mem: Option<Mem<[DynCanonicalValue]>>,
element_type: SingleType,
unchanged_element_type: bool,
},
@ -76,7 +71,6 @@ impl MemSplit {
fn mark_changed_element_type(self) -> Self {
match self {
MemSplit::Bundle { fields: _ } => self,
MemSplit::PhantomConst => self,
MemSplit::Single {
output_mem,
element_type,
@ -89,18 +83,17 @@ impl MemSplit {
MemSplit::Array { elements: _ } => self,
}
}
fn new(element_type: CanonicalType) -> Self {
fn new(element_type: TypeEnum) -> Self {
match element_type {
CanonicalType::Bundle(bundle_ty) => MemSplit::Bundle {
TypeEnum::BundleType(bundle_ty) => MemSplit::Bundle {
fields: bundle_ty
.fields()
.into_iter()
.map(|field| Self::new(field.ty).mark_changed_element_type())
.map(|field| Self::new(field.ty.type_enum()).mark_changed_element_type())
.collect(),
},
CanonicalType::PhantomConst(_) => MemSplit::PhantomConst,
CanonicalType::Array(ty) => {
let element = MemSplit::new(ty.element());
TypeEnum::ArrayType(ty) => {
let element = MemSplit::new(ty.element().type_enum());
if let Self::Single {
output_mem: _,
element_type,
@ -110,7 +103,7 @@ impl MemSplit {
match element_type {
SingleType::UInt(element_type) => Self::Single {
output_mem: None,
element_type: SingleType::UIntArray(Array::new_dyn(
element_type: SingleType::UIntArray(ArrayType::new_slice(
element_type,
ty.len(),
)),
@ -118,15 +111,7 @@ impl MemSplit {
},
SingleType::SInt(element_type) => Self::Single {
output_mem: None,
element_type: SingleType::SIntArray(Array::new_dyn(
element_type,
ty.len(),
)),
unchanged_element_type,
},
SingleType::Bool(element_type) => Self::Single {
output_mem: None,
element_type: SingleType::BoolArray(Array::new_dyn(
element_type: SingleType::SIntArray(ArrayType::new_slice(
element_type,
ty.len(),
)),
@ -134,8 +119,8 @@ impl MemSplit {
},
SingleType::UIntArray(element_type) => Self::Single {
output_mem: None,
element_type: SingleType::UIntArray(Array::new_dyn(
element_type.element(),
element_type: SingleType::UIntArray(ArrayType::new_slice(
*element_type.element(),
ty.len()
.checked_mul(element_type.len())
.expect("memory element type can't be too big"),
@ -144,18 +129,8 @@ impl MemSplit {
},
SingleType::SIntArray(element_type) => Self::Single {
output_mem: None,
element_type: SingleType::SIntArray(Array::new_dyn(
element_type.element(),
ty.len()
.checked_mul(element_type.len())
.expect("memory element type can't be too big"),
)),
unchanged_element_type: false,
},
SingleType::BoolArray(element_type) => Self::Single {
output_mem: None,
element_type: SingleType::BoolArray(Array::new_dyn(
element_type.element(),
element_type: SingleType::SIntArray(ArrayType::new_slice(
*element_type.element(),
ty.len()
.checked_mul(element_type.len())
.expect("memory element type can't be too big"),
@ -170,30 +145,25 @@ impl MemSplit {
}
}
}
CanonicalType::UInt(ty) => Self::Single {
TypeEnum::UInt(ty) => Self::Single {
output_mem: None,
element_type: SingleType::UInt(ty),
unchanged_element_type: true,
},
CanonicalType::SInt(ty) => Self::Single {
TypeEnum::SInt(ty) => Self::Single {
output_mem: None,
element_type: SingleType::SInt(ty),
unchanged_element_type: true,
},
CanonicalType::Bool(ty) => Self::Single {
TypeEnum::EnumType(ty) => Self::Single {
output_mem: None,
element_type: SingleType::Bool(ty),
unchanged_element_type: true,
},
CanonicalType::Enum(ty) => Self::Single {
output_mem: None,
element_type: SingleType::UInt(UInt::new_dyn(ty.type_properties().bit_width)),
element_type: SingleType::UInt(DynUIntType::new(ty.bit_width())),
unchanged_element_type: false,
},
CanonicalType::Clock(_)
| CanonicalType::AsyncReset(_)
| CanonicalType::SyncReset(_)
| CanonicalType::Reset(_) => unreachable!("memory element type is a storable type"),
TypeEnum::Clock(_)
| TypeEnum::AsyncReset(_)
| TypeEnum::SyncReset(_)
| TypeEnum::Reset(_) => unreachable!("memory element type is a storable type"),
}
}
}
@ -203,9 +173,9 @@ struct MemState {
}
struct SplitState<'a> {
wire_rdata: Box<[Option<Expr<CanonicalType>>]>,
wire_wdata: Box<[Option<Expr<CanonicalType>>]>,
wire_wmask: Box<[Option<Expr<CanonicalType>>]>,
wire_rdata: Box<[Option<Expr<DynCanonicalValue>>]>,
wire_wdata: Box<[Option<Expr<DynCanonicalValue>>]>,
wire_wmask: Box<[Option<Expr<DynCanonicalValue>>]>,
initial_value: Option<Box<[&'a BitSlice]>>,
}
@ -253,7 +223,7 @@ impl<'a> SplitStateStack<'a> {
}
fn push_map(
&mut self,
mut wire_map: impl FnMut(Expr<CanonicalType>) -> Expr<CanonicalType>,
mut wire_map: impl FnMut(Expr<DynCanonicalValue>) -> Expr<DynCanonicalValue>,
mut initial_value_element_map: impl FnMut(&BitSlice) -> &BitSlice,
) {
let top_index = self.top_index + 1;
@ -290,10 +260,10 @@ impl<'a> SplitStateStack<'a> {
struct SplitMemState<'a, 'b> {
module_state: &'a mut ModuleState,
input_mem: Mem,
output_mems: &'a mut Vec<Mem>,
input_mem: Mem<[DynCanonicalValue]>,
output_mems: &'a mut Vec<Mem<[DynCanonicalValue]>>,
output_stmts: &'a mut Vec<Stmt>,
element_type: CanonicalType,
element_type: TypeEnum,
split: &'a mut MemSplit,
mem_name_path: &'a mut String,
split_state_stack: &'a mut SplitStateStack<'b>,
@ -305,7 +275,7 @@ impl SplitMemState<'_, '_> {
let outer_mem_name_path_len = self.mem_name_path.len();
match self.split {
MemSplit::Bundle { fields } => {
let CanonicalType::Bundle(bundle_type) = self.element_type else {
let TypeEnum::BundleType(bundle_type) = self.element_type else {
unreachable!();
};
for ((field, field_offset), split) in bundle_type
@ -319,9 +289,7 @@ impl SplitMemState<'_, '_> {
self.mem_name_path.push_str(&field.name);
let field_ty_bit_width = field.ty.bit_width();
self.split_state_stack.push_map(
|e: Expr<CanonicalType>| {
Expr::field(Expr::<Bundle>::from_canonical(e), &field.name)
},
|e: Expr<DynCanonicalValue>| e.with_type::<DynBundle>().field(&field.name),
|initial_value_element| {
&initial_value_element[field_offset..][..field_ty_bit_width]
},
@ -331,7 +299,7 @@ impl SplitMemState<'_, '_> {
input_mem: self.input_mem,
output_mems: self.output_mems,
output_stmts: self.output_stmts,
element_type: field.ty,
element_type: field.ty.type_enum(),
split,
mem_name_path: self.mem_name_path,
split_state_stack: self.split_state_stack,
@ -341,7 +309,6 @@ impl SplitMemState<'_, '_> {
self.split_state_stack.pop();
}
}
MemSplit::PhantomConst => {}
MemSplit::Single {
output_mem,
element_type: single_type,
@ -361,7 +328,7 @@ impl SplitMemState<'_, '_> {
.zip(self.mem_state.replacement_ports.iter())
{
let port_expr = port.to_expr();
let wire_expr = Expr::<Bundle>::from_canonical(wire.to_expr());
let wire_expr = Expr::<DynBundle>::new_unchecked(*wire);
for name in [
Some("addr"),
Some("clk"),
@ -373,8 +340,8 @@ impl SplitMemState<'_, '_> {
};
self.output_stmts.push(
StmtConnect {
lhs: Expr::field(port_expr, name),
rhs: Expr::field(wire_expr, name),
lhs: port_expr.field(name),
rhs: wire_expr.field(name),
source_location: port.source_location(),
}
.into(),
@ -385,16 +352,18 @@ impl SplitMemState<'_, '_> {
self.output_mems.push(new_mem);
}
MemSplit::Array { elements } => {
let CanonicalType::Array(array_type) = self.element_type else {
let TypeEnum::ArrayType(array_type) = self.element_type else {
unreachable!();
};
let element_type = array_type.element();
let element_bit_width = element_type.bit_width();
let element_type = array_type.element().type_enum();
let element_bit_width = array_type.element().bit_width();
for (index, split) in elements.make_mut_slice().iter_mut().enumerate() {
self.mem_name_path.truncate(outer_mem_name_path_len);
write!(self.mem_name_path, "_{index}").unwrap();
self.split_state_stack.push_map(
|e| Expr::<Array>::from_canonical(e)[index],
|e: Expr<DynCanonicalValue>| {
e.with_type::<Array<[DynCanonicalValue]>>()[index]
},
|initial_value_element| {
&initial_value_element[index * element_bit_width..][..element_bit_width]
},
@ -419,7 +388,8 @@ impl SplitMemState<'_, '_> {
}
struct ModuleState {
output_module: Option<Interned<Module<Bundle>>>,
output_module: Option<Interned<Module<DynBundle>>>,
name_id_gen: NameIdGen,
memories: HashMap<ScopedNameId, MemState>,
}
@ -427,18 +397,18 @@ impl ModuleState {
#[allow(clippy::too_many_arguments)]
fn connect_split_mem_port_arrays(
output_stmts: &mut Vec<Stmt>,
input_array_types: &[Array],
input_array_types: &[ArrayType<[DynCanonicalValue]>],
memory_element_array_range_start: usize,
memory_element_array_range_len: usize,
wire_rdata: Option<Expr<CanonicalType>>,
wire_wdata: Option<Expr<CanonicalType>>,
wire_wmask: Option<Expr<CanonicalType>>,
port_rdata: Option<Expr<Array>>,
port_wdata: Option<Expr<Array>>,
port_wmask: Option<Expr<Array>>,
connect_rdata: impl Copy + Fn(&mut Vec<Stmt>, Expr<CanonicalType>, Expr<CanonicalType>),
connect_wdata: impl Copy + Fn(&mut Vec<Stmt>, Expr<CanonicalType>, Expr<CanonicalType>),
connect_wmask: impl Copy + Fn(&mut Vec<Stmt>, Expr<CanonicalType>, Expr<CanonicalType>),
wire_rdata: Option<Expr<DynCanonicalValue>>,
wire_wdata: Option<Expr<DynCanonicalValue>>,
wire_wmask: Option<Expr<DynCanonicalValue>>,
port_rdata: Option<Expr<Array<[DynCanonicalValue]>>>,
port_wdata: Option<Expr<Array<[DynCanonicalValue]>>>,
port_wmask: Option<Expr<Array<[DynCanonicalValue]>>>,
connect_rdata: impl Copy + Fn(&mut Vec<Stmt>, Expr<DynCanonicalValue>, Expr<DynCanonicalValue>),
connect_wdata: impl Copy + Fn(&mut Vec<Stmt>, Expr<DynCanonicalValue>, Expr<DynCanonicalValue>),
connect_wmask: impl Copy + Fn(&mut Vec<Stmt>, Expr<DynCanonicalValue>, Expr<DynCanonicalValue>),
) {
let Some((input_array_type, input_array_types_rest)) = input_array_types.split_first()
else {
@ -460,7 +430,8 @@ impl ModuleState {
assert_eq!(memory_element_array_range_len % input_array_type.len(), 0);
let chunk_size = memory_element_array_range_len / input_array_type.len();
for index in 0..input_array_type.len() {
let map = |e| Expr::<Array>::from_canonical(e)[index];
let map =
|e: Expr<DynCanonicalValue>| e.with_type::<Array<[DynCanonicalValue]>>()[index];
let wire_rdata = wire_rdata.map(map);
let wire_wdata = wire_wdata.map(map);
let wire_wmask = wire_wmask.map(map);
@ -485,20 +456,20 @@ impl ModuleState {
fn connect_split_mem_port(
&mut self,
output_stmts: &mut Vec<Stmt>,
mut input_element_type: CanonicalType,
mut input_element_type: TypeEnum,
single_type: SingleType,
source_location: SourceLocation,
wire_rdata: Option<Expr<CanonicalType>>,
wire_wdata: Option<Expr<CanonicalType>>,
wire_wmask: Option<Expr<CanonicalType>>,
port_rdata: Option<Expr<CanonicalType>>,
port_wdata: Option<Expr<CanonicalType>>,
port_wmask: Option<Expr<CanonicalType>>,
wire_rdata: Option<Expr<DynCanonicalValue>>,
wire_wdata: Option<Expr<DynCanonicalValue>>,
wire_wmask: Option<Expr<DynCanonicalValue>>,
port_rdata: Option<Expr<DynCanonicalValue>>,
port_wdata: Option<Expr<DynCanonicalValue>>,
port_wmask: Option<Expr<DynCanonicalValue>>,
) {
let mut input_array_types = vec![];
let connect_read = |output_stmts: &mut Vec<Stmt>,
wire_read: Expr<CanonicalType>,
port_read: Expr<CanonicalType>| {
wire_read: Expr<DynCanonicalValue>,
port_read: Expr<DynCanonicalValue>| {
output_stmts.push(
StmtConnect {
lhs: wire_read,
@ -509,8 +480,8 @@ impl ModuleState {
);
};
let connect_write = |output_stmts: &mut Vec<Stmt>,
wire_write: Expr<CanonicalType>,
port_write: Expr<CanonicalType>| {
wire_write: Expr<DynCanonicalValue>,
port_write: Expr<DynCanonicalValue>| {
output_stmts.push(
StmtConnect {
lhs: port_write,
@ -520,34 +491,32 @@ impl ModuleState {
.into(),
);
};
let connect_read_enum = |output_stmts: &mut Vec<Stmt>,
wire_read: Expr<CanonicalType>,
port_read: Expr<CanonicalType>| {
connect_read(
output_stmts,
wire_read,
Expr::<UInt>::from_canonical(port_read).cast_bits_to(Expr::ty(wire_read)),
);
};
let connect_read_enum =
|output_stmts: &mut Vec<Stmt>,
wire_read: Expr<DynCanonicalValue>,
port_read: Expr<DynCanonicalValue>| {
connect_read(
output_stmts,
wire_read,
port_read
.with_type::<DynUInt>()
.cast_bits_to(wire_read.ty()),
);
};
let connect_write_enum =
|output_stmts: &mut Vec<Stmt>,
wire_write: Expr<CanonicalType>,
port_write: Expr<CanonicalType>| {
wire_write: Expr<DynCanonicalValue>,
port_write: Expr<DynCanonicalValue>| {
connect_write(
output_stmts,
Expr::canonical(wire_write.cast_to_bits()),
wire_write.cast_to_bits().to_canonical_dyn(),
port_write,
);
};
loop {
match input_element_type {
CanonicalType::Bundle(_) => {
unreachable!("bundle types are always split")
}
CanonicalType::PhantomConst(_) => {
unreachable!("PhantomConst are always removed")
}
CanonicalType::Enum(_)
TypeEnum::BundleType(_) => unreachable!("bundle types are always split"),
TypeEnum::EnumType(_)
if input_array_types
.first()
.map(|&v| single_type.is_array_type(v))
@ -563,7 +532,7 @@ impl ModuleState {
connect_write(output_stmts, wire_wmask, port_wmask);
}
}
CanonicalType::Enum(_) => Self::connect_split_mem_port_arrays(
TypeEnum::EnumType(_) => Self::connect_split_mem_port_arrays(
output_stmts,
&input_array_types,
0,
@ -571,19 +540,25 @@ impl ModuleState {
wire_rdata,
wire_wdata,
wire_wmask,
port_rdata.map(Expr::from_canonical),
port_wdata.map(Expr::from_canonical),
port_wmask.map(Expr::from_canonical),
port_rdata.map(|e: Expr<DynCanonicalValue>| {
e.with_type::<Array<[DynCanonicalValue]>>()
}),
port_wdata.map(|e: Expr<DynCanonicalValue>| {
e.with_type::<Array<[DynCanonicalValue]>>()
}),
port_wmask.map(|e: Expr<DynCanonicalValue>| {
e.with_type::<Array<[DynCanonicalValue]>>()
}),
connect_read_enum,
connect_write_enum,
connect_write,
connect_write_enum,
),
CanonicalType::Array(array_type) => {
TypeEnum::ArrayType(array_type) => {
input_array_types.push(array_type);
input_element_type = array_type.element();
input_element_type = array_type.element().type_enum();
continue;
}
CanonicalType::UInt(_) | CanonicalType::SInt(_) | CanonicalType::Bool(_)
TypeEnum::UInt(_) | TypeEnum::SInt(_)
if input_array_types
.first()
.map(|&v| single_type.is_array_type(v))
@ -599,56 +574,59 @@ impl ModuleState {
connect_write(output_stmts, wire_wmask, port_wmask);
}
}
CanonicalType::UInt(_) | CanonicalType::SInt(_) | CanonicalType::Bool(_) => {
Self::connect_split_mem_port_arrays(
output_stmts,
&input_array_types,
0,
single_type.array_len(),
wire_rdata,
wire_wdata,
wire_wmask,
port_rdata.map(Expr::from_canonical),
port_wdata.map(Expr::from_canonical),
port_wmask.map(Expr::from_canonical),
connect_read,
connect_write,
connect_write,
)
}
CanonicalType::Clock(_)
| CanonicalType::AsyncReset(_)
| CanonicalType::SyncReset(_)
| CanonicalType::Reset(_) => unreachable!("memory element type is a storable type"),
TypeEnum::UInt(_) | TypeEnum::SInt(_) => Self::connect_split_mem_port_arrays(
output_stmts,
&input_array_types,
0,
single_type.array_len(),
wire_rdata,
wire_wdata,
wire_wmask,
port_rdata.map(|e: Expr<DynCanonicalValue>| {
e.with_type::<Array<[DynCanonicalValue]>>()
}),
port_wdata.map(|e: Expr<DynCanonicalValue>| {
e.with_type::<Array<[DynCanonicalValue]>>()
}),
port_wmask.map(|e: Expr<DynCanonicalValue>| {
e.with_type::<Array<[DynCanonicalValue]>>()
}),
connect_read,
connect_write,
connect_write,
),
TypeEnum::Clock(_)
| TypeEnum::AsyncReset(_)
| TypeEnum::SyncReset(_)
| TypeEnum::Reset(_) => unreachable!("memory element type is a storable type"),
}
break;
}
}
fn create_split_mem(
&mut self,
input_mem: Mem,
input_mem: Mem<[DynCanonicalValue]>,
output_stmts: &mut Vec<Stmt>,
input_element_type: CanonicalType,
input_element_type: TypeEnum,
single_type: SingleType,
mem_name_path: &str,
split_state: &SplitState<'_>,
) -> Mem {
let mem_name = NameId(
Intern::intern_owned(format!("{}{mem_name_path}", input_mem.scoped_name().1 .0)),
Id::new(),
);
) -> Mem<[DynCanonicalValue]> {
let mem_name = self.name_id_gen.gen(Intern::intern_owned(format!(
"{}{mem_name_path}",
input_mem.scoped_name().1 .0
)));
let mem_name = ScopedNameId(input_mem.scoped_name().0, mem_name);
let output_element_type = match single_type {
SingleType::UInt(ty) => ty.canonical(),
SingleType::SInt(ty) => ty.canonical(),
SingleType::Bool(ty) => ty.canonical(),
SingleType::UIntArray(ty) => ty.canonical(),
SingleType::SIntArray(ty) => ty.canonical(),
SingleType::BoolArray(ty) => ty.canonical(),
SingleType::UInt(ty) => ty.canonical_dyn(),
SingleType::SInt(ty) => ty.canonical_dyn(),
SingleType::UIntArray(ty) => ty.canonical_dyn(),
SingleType::SIntArray(ty) => ty.canonical_dyn(),
};
let output_array_type = Array::new_dyn(output_element_type, input_mem.array_type().len());
let output_array_type =
ArrayType::new_slice(output_element_type, input_mem.array_type().len());
let initial_value = split_state.initial_value.as_ref().map(|initial_value| {
let mut bits = BitVec::with_capacity(output_array_type.type_properties().bit_width);
let mut bits = BitVec::with_capacity(output_array_type.bit_width());
for element in initial_value.iter() {
bits.extend_from_bitslice(element);
}
@ -665,6 +643,7 @@ impl ModuleState {
port.addr_type(),
output_element_type,
)
.intern_sized()
})
.collect();
let output_mem = Mem::new_unchecked(
@ -697,15 +676,15 @@ impl ModuleState {
let port_rdata = port
.port_kind()
.rdata_name()
.map(|name| Expr::field(port_expr, name));
.map(|name| port_expr.field(name));
let port_wdata = port
.port_kind()
.wdata_name()
.map(|name| Expr::field(port_expr, name));
.map(|name| port_expr.field(name));
let port_wmask = port
.port_kind()
.wmask_name()
.map(|name| Expr::field(port_expr, name));
.map(|name| port_expr.field(name));
self.connect_split_mem_port(
output_stmts,
input_element_type,
@ -723,11 +702,11 @@ impl ModuleState {
}
fn process_mem(
&mut self,
input_mem: Mem,
output_mems: &mut Vec<Mem>,
input_mem: Mem<[DynCanonicalValue]>,
output_mems: &mut Vec<Mem<[DynCanonicalValue]>>,
output_stmts: &mut Vec<Stmt>,
) {
let element_type = input_mem.array_type().element();
let element_type = input_mem.array_type().element().type_enum();
let mut split = MemSplit::new(element_type);
let mem_state = match split {
MemSplit::Single {
@ -751,8 +730,7 @@ impl ModuleState {
..
}
| MemSplit::Bundle { .. }
| MemSplit::Array { .. }
| MemSplit::PhantomConst => {
| MemSplit::Array { .. } => {
let mut replacement_ports = Vec::with_capacity(input_mem.ports().len());
let mut wire_port_rdata = Vec::with_capacity(input_mem.ports().len());
let mut wire_port_wdata = Vec::with_capacity(input_mem.ports().len());
@ -761,39 +739,38 @@ impl ModuleState {
let port_ty = port.ty();
let NameId(mem_name, _) = input_mem.scoped_name().1;
let port_name = port.port_name();
let wire_name = NameId(
Intern::intern_owned(format!("{mem_name}_{port_name}")),
Id::new(),
);
let wire_name = self
.name_id_gen
.gen(Intern::intern_owned(format!("{mem_name}_{port_name}")));
let wire = Wire::new_unchecked(
ScopedNameId(input_mem.scoped_name().0, wire_name),
port.source_location(),
port_ty,
);
let wire_expr = wire.to_expr();
let canonical_wire = wire.canonical();
let canonical_wire = wire.to_dyn_canonical_wire();
output_stmts.push(
StmtWire {
annotations: Default::default(),
wire: canonical_wire,
wire: canonical_wire.clone(),
}
.into(),
);
replacement_ports.push(ExprEnum::Wire(canonical_wire));
replacement_ports.push(ExprEnum::Wire(canonical_wire.intern_sized()));
wire_port_rdata.push(
port.port_kind()
.rdata_name()
.map(|name| Expr::field(wire_expr, name)),
.map(|name| wire_expr.field(name)),
);
wire_port_wdata.push(
port.port_kind()
.wdata_name()
.map(|name| Expr::field(wire_expr, name)),
.map(|name| wire_expr.field(name)),
);
wire_port_wmask.push(
port.port_kind()
.wmask_name()
.map(|name| Expr::field(wire_expr, name)),
.map(|name| wire_expr.field(name)),
);
}
let mem_state = MemState {
@ -838,8 +815,8 @@ impl ModuleState {
#[derive(Default)]
struct State {
modules: HashMap<Interned<Module<Bundle>>, ModuleState>,
current_module: Option<Interned<Module<Bundle>>>,
modules: HashMap<Interned<Module<DynBundle>>, ModuleState>,
current_module: Option<Interned<Module<DynBundle>>>,
}
impl State {
@ -851,11 +828,11 @@ impl State {
struct PushedState<'a> {
state: &'a mut State,
old_module: Option<Interned<Module<Bundle>>>,
old_module: Option<Interned<Module<DynBundle>>>,
}
impl<'a> PushedState<'a> {
fn push_module(state: &'a mut State, module: Interned<Module<Bundle>>) -> Self {
fn push_module(state: &'a mut State, module: Interned<Module<DynBundle>>) -> Self {
let old_module = state.current_module.replace(module);
Self { state, old_module }
}
@ -883,7 +860,10 @@ impl DerefMut for PushedState<'_> {
impl Folder for State {
type Error = Infallible;
fn fold_module<T: BundleType>(&mut self, v: Module<T>) -> Result<Module<T>, Self::Error> {
fn fold_module<T: BundleValue>(&mut self, v: Module<T>) -> Result<Module<T>, Self::Error>
where
T::Type: BundleType<Value = T>,
{
let module: Interned<_> = v.canonical().intern_sized();
if let Some(module_state) = self.modules.get(&module) {
return Ok(Module::from_canonical(
@ -896,7 +876,8 @@ impl Folder for State {
module,
ModuleState {
output_module: None,
memories: HashMap::default(),
name_id_gen: NameIdGen::for_module(*module),
memories: HashMap::new(),
},
);
let mut this = PushedState::push_module(self, module);
@ -905,10 +886,10 @@ impl Folder for State {
Ok(Module::from_canonical(*module))
}
fn fold_mem<Element: Type, Len: Size>(
fn fold_mem<VA: ValueArrayOrSlice + ?Sized>(
&mut self,
_v: Mem<Element, Len>,
) -> Result<Mem<Element, Len>, Self::Error> {
_v: Mem<VA>,
) -> Result<Mem<VA>, Self::Error> {
unreachable!()
}
@ -952,7 +933,7 @@ impl Folder for State {
}
}
pub fn simplify_memories(module: Interned<Module<Bundle>>) -> Interned<Module<Bundle>> {
pub fn simplify_memories(module: Interned<Module<DynBundle>>) -> Interned<Module<DynBundle>> {
module
.fold(&mut State::default())
.unwrap_or_else(|v| match v {})

View file

@ -2,38 +2,32 @@
// See Notices.txt for copyright information
#![allow(clippy::multiple_bound_locations)]
use crate::{
annotations::{
Annotation, BlackBoxInlineAnnotation, BlackBoxPathAnnotation, CustomFirrtlAnnotation,
DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation, TargetedAnnotation,
},
array::ArrayType,
bundle::{Bundle, BundleField, BundleType},
clock::Clock,
enum_::{Enum, EnumType, EnumVariant},
annotations::{Annotation, CustomFirrtlAnnotation, TargetedAnnotation},
array::{Array, ArrayType, ArrayTypeTrait, ValueArrayOrSlice},
bundle::{BundleType, BundleValue, DynBundle, DynBundleType, FieldType},
clock::{Clock, ClockType},
enum_::{DynEnum, DynEnumType, EnumType, EnumValue, VariantType},
expr::{
ops,
target::{
Target, TargetBase, TargetChild, TargetPathArrayElement, TargetPathBundleField,
TargetPathDynArrayElement, TargetPathElement,
},
Expr, ExprEnum,
ops, Expr, ExprEnum, Literal, Target, TargetBase, TargetChild, TargetPathArrayElement,
TargetPathBundleField, TargetPathDynArrayElement, TargetPathElement, ToExpr,
},
int::{
DynInt, DynIntType, DynSInt, DynSIntType, DynUInt, DynUIntType, StaticOrDynIntType, IntType,
IntTypeTrait,
},
formal::FormalKind,
int::{Bool, SIntType, SIntValue, Size, UIntType, UIntValue},
intern::{Intern, Interned},
memory::{Mem, MemPort, PortKind, PortName, PortType, ReadUnderWrite},
module::{
AnnotatedModuleIO, Block, BlockId, ExternModuleBody, ExternModuleParameter,
ExternModuleParameterValue, Instance, Module, ModuleBody, ModuleIO, NameId,
NormalModuleBody, ScopedNameId, Stmt, StmtConnect, StmtDeclaration, StmtFormal, StmtIf,
StmtInstance, StmtMatch, StmtReg, StmtWire,
NormalModuleBody, ScopedNameId, Stmt, StmtConnect, StmtDeclaration, StmtIf, StmtInstance,
StmtMatch, StmtReg, StmtWire,
},
phantom_const::PhantomConst,
reg::Reg,
reset::{AsyncReset, Reset, ResetType, SyncReset},
sim::ExternModuleSimulation,
reset::{AsyncReset, AsyncResetType, Reset, ResetType, SyncReset, SyncResetType},
source_location::SourceLocation,
ty::{CanonicalType, Type},
ty::{DynCanonicalType, DynCanonicalValue, DynType, Type, TypeEnum, Value, ValueEnum},
util::{ConstBool, GenericConstBool},
wire::Wire,
};
use num_bigint::{BigInt, BigUint};
@ -479,4 +473,7 @@ impl<T: ?Sized + Visit<State>, State: ?Sized + Visitor> Visit<State> for &'_ mut
}
}
type InternedDynType = Interned<dyn DynType>;
type InternedDynCanonicalType = Interned<dyn DynCanonicalType>;
include!(concat!(env!("OUT_DIR"), "/visit.rs"));

View file

@ -1,410 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
expr::{
ops::{ExprPartialEq, ExprPartialOrd},
Expr, ToExpr,
},
int::Bool,
intern::{Intern, Interned, InternedCompare, LazyInterned, LazyInternedTrait, Memoize},
sim::value::{SimValue, SimValuePartialEq, ToSimValue, ToSimValueWithType},
source_location::SourceLocation,
ty::{
impl_match_variant_as_self,
serde_impls::{SerdeCanonicalType, SerdePhantomConst},
CanonicalType, StaticType, Type, TypeProperties,
},
};
use bitvec::slice::BitSlice;
use serde::{
de::{DeserializeOwned, Error},
Deserialize, Deserializer, Serialize, Serializer,
};
use std::{
any::Any,
fmt,
hash::{Hash, Hasher},
marker::PhantomData,
ops::Index,
};
#[derive(Clone)]
pub struct PhantomConstCanonicalValue {
parsed: serde_json::Value,
serialized: Interned<str>,
}
impl PhantomConstCanonicalValue {
pub fn from_json_value(parsed: serde_json::Value) -> Self {
let serialized = Intern::intern_owned(
serde_json::to_string(&parsed)
.expect("conversion from json value to text shouldn't fail"),
);
Self { parsed, serialized }
}
pub fn as_json_value(&self) -> &serde_json::Value {
&self.parsed
}
pub fn as_str(&self) -> Interned<str> {
self.serialized
}
}
impl fmt::Debug for PhantomConstCanonicalValue {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(&self.serialized)
}
}
impl fmt::Display for PhantomConstCanonicalValue {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(&self.serialized)
}
}
impl PartialEq for PhantomConstCanonicalValue {
fn eq(&self, other: &Self) -> bool {
self.serialized == other.serialized
}
}
impl Eq for PhantomConstCanonicalValue {}
impl Hash for PhantomConstCanonicalValue {
fn hash<H: Hasher>(&self, state: &mut H) {
self.serialized.hash(state);
}
}
impl Serialize for PhantomConstCanonicalValue {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
self.parsed.serialize(serializer)
}
}
impl<'de> Deserialize<'de> for PhantomConstCanonicalValue {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
Ok(Self::from_json_value(serde_json::Value::deserialize(
deserializer,
)?))
}
}
pub trait PhantomConstValue: Intern + InternedCompare + Serialize + fmt::Debug {
fn deserialize_value<'de, D>(deserializer: D) -> Result<Interned<Self>, D::Error>
where
D: serde::Deserializer<'de>;
}
impl<T> PhantomConstValue for T
where
T: ?Sized + Intern + InternedCompare + Serialize + fmt::Debug,
Interned<T>: DeserializeOwned,
{
fn deserialize_value<'de, D>(deserializer: D) -> Result<Interned<Self>, D::Error>
where
D: serde::Deserializer<'de>,
{
<Interned<T> as Deserialize<'de>>::deserialize(deserializer)
}
}
/// Wrapper type that allows any Rust value to be smuggled as a HDL [`Type`].
/// This only works for values that can be [serialized][Serialize] to and [deserialized][Deserialize] from [JSON][serde_json].
pub struct PhantomConst<T: ?Sized + PhantomConstValue = PhantomConstCanonicalValue> {
value: LazyInterned<T>,
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)]
pub struct PhantomConstWithoutGenerics;
#[allow(non_upper_case_globals)]
pub const PhantomConst: PhantomConstWithoutGenerics = PhantomConstWithoutGenerics;
impl<T: Type + PhantomConstValue> Index<T> for PhantomConstWithoutGenerics {
type Output = PhantomConst<T>;
fn index(&self, value: T) -> &Self::Output {
Interned::into_inner(PhantomConst::new(value.intern()).intern_sized())
}
}
impl<T: ?Sized + PhantomConstValue> fmt::Debug for PhantomConst<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("PhantomConst").field(&self.get()).finish()
}
}
impl<T: ?Sized + PhantomConstValue> Clone for PhantomConst<T> {
fn clone(&self) -> Self {
*self
}
}
impl<T: ?Sized + PhantomConstValue> Copy for PhantomConst<T> {}
impl<T: ?Sized + PhantomConstValue> PartialEq for PhantomConst<T> {
fn eq(&self, other: &Self) -> bool {
self.get() == other.get()
}
}
impl<T: ?Sized + PhantomConstValue> Eq for PhantomConst<T> {}
impl<T: ?Sized + PhantomConstValue> Hash for PhantomConst<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.get().hash(state);
}
}
struct PhantomConstCanonicalMemoize<T: ?Sized, const IS_FROM_CANONICAL: bool>(PhantomData<T>);
impl<T: ?Sized, const IS_FROM_CANONICAL: bool> Copy
for PhantomConstCanonicalMemoize<T, IS_FROM_CANONICAL>
{
}
impl<T: ?Sized, const IS_FROM_CANONICAL: bool> Clone
for PhantomConstCanonicalMemoize<T, IS_FROM_CANONICAL>
{
fn clone(&self) -> Self {
*self
}
}
impl<T: ?Sized, const IS_FROM_CANONICAL: bool> Eq
for PhantomConstCanonicalMemoize<T, IS_FROM_CANONICAL>
{
}
impl<T: ?Sized, const IS_FROM_CANONICAL: bool> PartialEq
for PhantomConstCanonicalMemoize<T, IS_FROM_CANONICAL>
{
fn eq(&self, _other: &Self) -> bool {
true
}
}
impl<T: ?Sized, const IS_FROM_CANONICAL: bool> Hash
for PhantomConstCanonicalMemoize<T, IS_FROM_CANONICAL>
{
fn hash<H: Hasher>(&self, _state: &mut H) {}
}
impl<T: ?Sized + PhantomConstValue> Memoize for PhantomConstCanonicalMemoize<T, false> {
type Input = Interned<T>;
type InputOwned = Interned<T>;
type Output = Interned<PhantomConstCanonicalValue>;
fn inner(self, input: &Self::Input) -> Self::Output {
Intern::intern_sized(PhantomConstCanonicalValue::from_json_value(
serde_json::to_value(input)
.expect("serialization failed when constructing a canonical PhantomConst"),
))
}
}
impl<T: ?Sized + PhantomConstValue> Memoize for PhantomConstCanonicalMemoize<T, true> {
type Input = Interned<PhantomConstCanonicalValue>;
type InputOwned = Interned<PhantomConstCanonicalValue>;
type Output = Interned<T>;
fn inner(self, input: &Self::Input) -> Self::Output {
PhantomConstValue::deserialize_value(input.as_json_value())
.expect("deserialization failed ")
}
}
impl<T: ?Sized + PhantomConstValue> PhantomConst<T> {
pub fn new(value: Interned<T>) -> Self {
Self {
value: LazyInterned::Interned(value),
}
}
pub const fn new_lazy(v: &'static dyn LazyInternedTrait<T>) -> Self {
Self {
value: LazyInterned::new_lazy(v),
}
}
pub fn get(self) -> Interned<T> {
self.value.interned()
}
pub fn type_properties(self) -> TypeProperties {
<()>::TYPE_PROPERTIES
}
pub fn can_connect(self, other: Self) -> bool {
self == other
}
pub fn canonical_phantom_const(self) -> PhantomConst {
if let Some(&retval) = <dyn Any>::downcast_ref::<PhantomConst>(&self) {
return retval;
}
<PhantomConst>::new(
PhantomConstCanonicalMemoize::<T, false>(PhantomData).get_owned(self.get()),
)
}
pub fn from_canonical_phantom_const(canonical_type: PhantomConst) -> Self {
if let Some(&retval) = <dyn Any>::downcast_ref::<Self>(&canonical_type) {
return retval;
}
Self::new(
PhantomConstCanonicalMemoize::<T, true>(PhantomData).get_owned(canonical_type.get()),
)
}
}
impl<T: ?Sized + PhantomConstValue> Type for PhantomConst<T> {
type BaseType = PhantomConst;
type MaskType = ();
type SimValue = PhantomConst<T>;
impl_match_variant_as_self!();
fn mask_type(&self) -> Self::MaskType {
()
}
fn canonical(&self) -> CanonicalType {
CanonicalType::PhantomConst(self.canonical_phantom_const())
}
fn from_canonical(canonical_type: CanonicalType) -> Self {
let CanonicalType::PhantomConst(phantom_const) = canonical_type else {
panic!("expected PhantomConst");
};
Self::from_canonical_phantom_const(phantom_const)
}
fn source_location() -> SourceLocation {
SourceLocation::builtin()
}
fn sim_value_from_bits(&self, bits: &BitSlice) -> Self::SimValue {
assert!(bits.is_empty());
*self
}
fn sim_value_clone_from_bits(&self, value: &mut Self::SimValue, bits: &BitSlice) {
assert!(bits.is_empty());
assert_eq!(*value, *self);
}
fn sim_value_to_bits(&self, value: &Self::SimValue, bits: &mut BitSlice) {
assert!(bits.is_empty());
assert_eq!(*value, *self);
}
}
impl<T: ?Sized + PhantomConstValue> Default for PhantomConst<T>
where
Interned<T>: Default,
{
fn default() -> Self {
Self::TYPE
}
}
impl<T: ?Sized + PhantomConstValue> StaticType for PhantomConst<T>
where
Interned<T>: Default,
{
const TYPE: Self = PhantomConst {
value: LazyInterned::new_lazy(&Interned::<T>::default),
};
const MASK_TYPE: Self::MaskType = ();
const TYPE_PROPERTIES: TypeProperties = <()>::TYPE_PROPERTIES;
const MASK_TYPE_PROPERTIES: TypeProperties = <()>::TYPE_PROPERTIES;
}
type SerdeType<T> = SerdeCanonicalType<CanonicalType, SerdePhantomConst<Interned<T>>>;
impl<T: ?Sized + PhantomConstValue> Serialize for PhantomConst<T> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
SerdeType::<T>::PhantomConst(SerdePhantomConst(self.get())).serialize(serializer)
}
}
impl<'de, T: ?Sized + PhantomConstValue> Deserialize<'de> for PhantomConst<T> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
match SerdeType::<T>::deserialize(deserializer)? {
SerdeCanonicalType::PhantomConst(SerdePhantomConst(value)) => Ok(Self::new(value)),
ty => Err(Error::invalid_value(
serde::de::Unexpected::Other(ty.as_serde_unexpected_str()),
&"a PhantomConst",
)),
}
}
}
impl<T: ?Sized + PhantomConstValue> ExprPartialEq<Self> for PhantomConst<T> {
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
true.to_expr()
}
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
false.to_expr()
}
}
impl<T: ?Sized + PhantomConstValue> ExprPartialOrd<Self> for PhantomConst<T> {
fn cmp_lt(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
false.to_expr()
}
fn cmp_le(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
true.to_expr()
}
fn cmp_gt(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
false.to_expr()
}
fn cmp_ge(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
true.to_expr()
}
}
impl<T: ?Sized + PhantomConstValue> SimValuePartialEq<Self> for PhantomConst<T> {
fn sim_value_eq(this: &SimValue<Self>, other: &SimValue<Self>) -> bool {
assert_eq!(SimValue::ty(this), SimValue::ty(other));
true
}
}
impl<T: ?Sized + PhantomConstValue> ToSimValue for PhantomConst<T> {
type Type = PhantomConst<T>;
fn to_sim_value(&self) -> SimValue<Self::Type> {
SimValue::from_value(*self, *self)
}
}
impl<T: ?Sized + PhantomConstValue> ToSimValueWithType<PhantomConst<T>> for PhantomConst<T> {
fn to_sim_value_with_type(&self, ty: PhantomConst<T>) -> SimValue<PhantomConst<T>> {
SimValue::from_value(ty, *self)
}
}
impl<T: ?Sized + PhantomConstValue> ToSimValueWithType<CanonicalType> for PhantomConst<T> {
fn to_sim_value_with_type(&self, ty: CanonicalType) -> SimValue<CanonicalType> {
SimValue::into_canonical(SimValue::from_value(Self::from_canonical(ty), *self))
}
}

View file

@ -1,43 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
pub use crate::{
annotations::{
BlackBoxInlineAnnotation, BlackBoxPathAnnotation, CustomFirrtlAnnotation,
DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation,
},
array::{Array, ArrayType},
bundle::Bundle,
cli::Cli,
clock::{Clock, ClockDomain, ToClock},
enum_::{Enum, HdlNone, HdlOption, HdlSome},
expr::{
repeat, CastBitsTo, CastTo, CastToBits, Expr, HdlPartialEq, HdlPartialOrd, MakeUninitExpr,
ReduceBits, ToExpr,
},
formal::{
all_const, all_seq, any_const, any_seq, formal_global_clock, formal_reset, hdl_assert,
hdl_assert_with_enable, hdl_assume, hdl_assume_with_enable, hdl_cover,
hdl_cover_with_enable, MakeFormalExpr,
},
hdl, hdl_module,
int::{Bool, DynSize, KnownSize, SInt, SIntType, SIntValue, Size, UInt, UIntType, UIntValue},
memory::{Mem, MemBuilder, ReadUnderWrite},
module::{
annotate, connect, connect_any, incomplete_wire, instance, memory, memory_array,
memory_with_init, reg_builder, wire, Instance, Module, ModuleBuilder,
},
phantom_const::PhantomConst,
reg::Reg,
reset::{AsyncReset, Reset, SyncReset, ToAsyncReset, ToReset, ToSyncReset},
sim::{
time::{SimDuration, SimInstant},
value::{SimValue, ToSimValue, ToSimValueWithType},
ExternModuleSimulationState, Simulation,
},
source_location::SourceLocation,
ty::{AsMask, CanonicalType, Type},
util::{ConstUsize, GenericConstUsize},
wire::Wire,
__,
};
pub use bitvec::{slice::BitSlice, vec::BitVec};

View file

@ -2,25 +2,24 @@
// See Notices.txt for copyright information
use crate::{
clock::ClockDomain,
expr::{Expr, Flow},
expr::{Expr, ExprTrait, Flow, ToExpr},
intern::Interned,
module::{NameId, ScopedNameId},
reset::{Reset, ResetType},
source_location::SourceLocation,
ty::{CanonicalType, Type},
ty::{DynCanonicalType, DynType, Type},
};
use std::fmt;
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
pub struct Reg<T: Type, R: ResetType = Reset> {
#[derive(Clone, Eq, PartialEq, Hash)]
pub struct Reg<T: Type> {
name: ScopedNameId,
source_location: SourceLocation,
ty: T,
clock_domain: Expr<ClockDomain<R>>,
init: Option<Expr<T>>,
clock_domain: Expr<ClockDomain>,
init: Option<Expr<T::Value>>,
}
impl<T: Type + fmt::Debug, R: ResetType> fmt::Debug for Reg<T, R> {
impl<T: Type + fmt::Debug> fmt::Debug for Reg<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Self {
name,
@ -38,8 +37,20 @@ impl<T: Type + fmt::Debug, R: ResetType> fmt::Debug for Reg<T, R> {
}
}
impl<T: Type, R: ResetType> Reg<T, R> {
pub fn canonical(&self) -> Reg<CanonicalType, R> {
impl<T: Type> ToExpr for Reg<T> {
type Type = T;
fn ty(&self) -> Self::Type {
self.ty.clone()
}
fn to_expr(&self) -> Expr<<Self::Type as Type>::Value> {
Expr::new_unchecked(self.expr_enum())
}
}
impl<T: Type> Reg<T> {
pub fn canonical(&self) -> Reg<T::CanonicalType> {
let Self {
name,
source_location,
@ -55,20 +66,49 @@ impl<T: Type, R: ResetType> Reg<T, R> {
init: init.map(Expr::canonical),
}
}
pub fn to_dyn_reg(&self) -> Reg<Interned<dyn DynType>> {
let Self {
name,
source_location,
ref ty,
clock_domain,
init,
} = *self;
Reg {
name,
source_location,
ty: ty.to_dyn(),
clock_domain,
init: init.map(Expr::to_dyn),
}
}
pub fn to_dyn_canonical_reg(&self) -> Reg<Interned<dyn DynCanonicalType>> {
let Self {
name,
source_location,
ref ty,
clock_domain,
init,
} = *self;
Reg {
name,
source_location,
ty: ty.canonical_dyn(),
clock_domain,
init: init.map(Expr::to_canonical_dyn),
}
}
#[track_caller]
pub fn new_unchecked(
scoped_name: ScopedNameId,
source_location: SourceLocation,
ty: T,
clock_domain: Expr<ClockDomain<R>>,
init: Option<Expr<T>>,
clock_domain: Expr<ClockDomain>,
init: Option<Expr<T::Value>>,
) -> Self {
assert!(
ty.canonical().is_storable(),
"register type must be a storable type"
);
assert!(ty.is_storable(), "register type must be a storable type");
if let Some(init) = init {
assert_eq!(ty, Expr::ty(init), "register's type must match init type");
assert_eq!(ty, init.ty(), "register's type must match init type");
}
Self {
name: scoped_name,
@ -78,9 +118,6 @@ impl<T: Type, R: ResetType> Reg<T, R> {
init,
}
}
pub fn ty(&self) -> T {
self.ty
}
pub fn source_location(&self) -> SourceLocation {
self.source_location
}
@ -99,10 +136,10 @@ impl<T: Type, R: ResetType> Reg<T, R> {
pub fn scoped_name(&self) -> ScopedNameId {
self.name
}
pub fn clock_domain(&self) -> Expr<ClockDomain<R>> {
pub fn clock_domain(&self) -> Expr<ClockDomain> {
self.clock_domain
}
pub fn init(&self) -> Option<Expr<T>> {
pub fn init(&self) -> Option<Expr<T::Value>> {
self.init
}
pub fn flow(&self) -> Flow {

View file

@ -1,169 +1,359 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
clock::Clock,
expr::{ops, Expr, ToExpr},
int::{Bool, SInt, UInt},
expr::{Expr, ToExpr},
int::{UInt, UIntType},
intern::Interned,
source_location::SourceLocation,
ty::{impl_match_variant_as_self, CanonicalType, StaticType, Type, TypeProperties},
ty::{
impl_match_values_as_self, CanonicalType, CanonicalTypeKind, CanonicalValue, Connect,
DynCanonicalType, StaticType, Type, TypeEnum, Value, ValueEnum,
},
util::interned_bit,
};
use bitvec::slice::BitSlice;
mod sealed {
pub trait ResetTypeSealed {}
pub trait ResetTypeTrait: CanonicalType + StaticType<MaskType = UIntType<1>> {}
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)]
pub struct AsyncResetType;
impl AsyncResetType {
pub const fn new() -> Self {
Self
}
}
pub trait ResetType:
StaticType<MaskType = Bool>
+ sealed::ResetTypeSealed
+ ops::ExprCastTo<Bool>
+ ops::ExprCastTo<Reset>
+ ops::ExprCastTo<SyncReset>
+ ops::ExprCastTo<AsyncReset>
+ ops::ExprCastTo<Clock>
+ ops::ExprCastTo<UInt<1>>
+ ops::ExprCastTo<SInt<1>>
+ ops::ExprCastTo<UInt>
+ ops::ExprCastTo<SInt>
{
fn dispatch<D: ResetTypeDispatch>(input: D::Input<Self>, dispatch: D) -> D::Output<Self>;
impl Type for AsyncResetType {
type Value = AsyncReset;
type CanonicalType = AsyncResetType;
type CanonicalValue = AsyncReset;
type MaskType = UIntType<1>;
type MaskValue = UInt<1>;
impl_match_values_as_self!();
fn mask_type(&self) -> Self::MaskType {
UIntType::new()
}
fn canonical(&self) -> Self::CanonicalType {
*self
}
fn source_location(&self) -> SourceLocation {
SourceLocation::builtin()
}
fn type_enum(&self) -> TypeEnum {
TypeEnum::AsyncReset(*self)
}
fn from_canonical_type(t: Self::CanonicalType) -> Self {
t
}
fn as_dyn_canonical_type_impl(this: &Self) -> Option<&dyn DynCanonicalType> {
Some(this)
}
}
pub trait ResetTypeDispatch: Sized {
type Input<T: ResetType>;
type Output<T: ResetType>;
impl Connect<Self> for AsyncResetType {}
fn reset(self, input: Self::Input<Reset>) -> Self::Output<Reset>;
fn sync_reset(self, input: Self::Input<SyncReset>) -> Self::Output<SyncReset>;
fn async_reset(self, input: Self::Input<AsyncReset>) -> Self::Output<AsyncReset>;
impl CanonicalType for AsyncResetType {
const CANONICAL_TYPE_KIND: CanonicalTypeKind = CanonicalTypeKind::AsyncReset;
}
macro_rules! reset_type {
($name:ident, $(#[$impl_trait:ident])? $Trait:ident::$trait_fn:ident, $is_castable_from_bits:literal, $dispatch_fn:ident) => {
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)]
pub struct $name;
impl StaticType for AsyncResetType {
fn static_type() -> Self {
Self
}
}
impl Type for $name {
type BaseType = $name;
type MaskType = Bool;
type SimValue = bool;
impl ResetTypeTrait for AsyncResetType {}
impl_match_variant_as_self!();
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub struct AsyncReset(pub bool);
fn mask_type(&self) -> Self::MaskType {
Bool
}
impl ToExpr for AsyncReset {
type Type = AsyncResetType;
fn canonical(&self) -> CanonicalType {
CanonicalType::$name(*self)
}
fn ty(&self) -> Self::Type {
AsyncResetType
}
fn source_location() -> SourceLocation {
SourceLocation::builtin()
}
fn to_expr(&self) -> Expr<Self> {
Expr::from_value(self)
}
}
fn from_canonical(canonical_type: CanonicalType) -> Self {
let CanonicalType::$name(retval) = canonical_type else {
panic!("expected {}", stringify!($name));
};
retval
}
impl Value for AsyncReset {
fn to_canonical(&self) -> <Self::Type as Type>::CanonicalValue {
*self
}
fn to_bits_impl(this: &Self) -> Interned<BitSlice> {
interned_bit(this.0)
}
}
fn sim_value_from_bits(&self, bits: &BitSlice) -> Self::SimValue {
assert_eq!(bits.len(), 1);
bits[0]
}
impl CanonicalValue for AsyncReset {
fn value_enum_impl(this: &Self) -> ValueEnum {
ValueEnum::AsyncReset(*this)
}
fn to_bits_impl(this: &Self) -> Interned<BitSlice> {
interned_bit(this.0)
}
}
fn sim_value_clone_from_bits(&self, value: &mut Self::SimValue, bits: &BitSlice) {
assert_eq!(bits.len(), 1);
*value = bits[0];
}
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)]
pub struct SyncResetType;
fn sim_value_to_bits(&self, value: &Self::SimValue, bits: &mut BitSlice) {
assert_eq!(bits.len(), 1);
bits.set(0, *value);
}
impl SyncResetType {
pub const fn new() -> Self {
Self
}
}
impl Type for SyncResetType {
type CanonicalType = SyncResetType;
type Value = SyncReset;
type CanonicalValue = SyncReset;
type MaskType = UIntType<1>;
type MaskValue = UInt<1>;
impl_match_values_as_self!();
fn mask_type(&self) -> Self::MaskType {
UIntType::new()
}
fn canonical(&self) -> Self::CanonicalType {
*self
}
fn source_location(&self) -> SourceLocation {
SourceLocation::builtin()
}
fn type_enum(&self) -> TypeEnum {
TypeEnum::SyncReset(*self)
}
fn from_canonical_type(t: Self::CanonicalType) -> Self {
t
}
fn as_dyn_canonical_type_impl(this: &Self) -> Option<&dyn DynCanonicalType> {
Some(this)
}
}
impl Connect<Self> for SyncResetType {}
impl CanonicalType for SyncResetType {
const CANONICAL_TYPE_KIND: CanonicalTypeKind = CanonicalTypeKind::SyncReset;
}
impl StaticType for SyncResetType {
fn static_type() -> Self {
Self
}
}
impl ResetTypeTrait for SyncResetType {}
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)]
pub struct SyncReset(pub bool);
impl ToExpr for SyncReset {
type Type = SyncResetType;
fn ty(&self) -> Self::Type {
SyncResetType
}
fn to_expr(&self) -> Expr<Self> {
Expr::from_value(self)
}
}
impl Value for SyncReset {
fn to_canonical(&self) -> <Self::Type as Type>::CanonicalValue {
*self
}
fn to_bits_impl(this: &Self) -> Interned<BitSlice> {
interned_bit(this.0)
}
}
impl CanonicalValue for SyncReset {
fn value_enum_impl(this: &Self) -> ValueEnum {
ValueEnum::SyncReset(*this)
}
fn to_bits_impl(this: &Self) -> Interned<BitSlice> {
interned_bit(this.0)
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Default)]
pub struct ResetType;
impl ResetType {
pub const fn new() -> Self {
Self
}
}
impl Type for ResetType {
type Value = Reset;
type CanonicalType = ResetType;
type CanonicalValue = Reset;
type MaskType = UIntType<1>;
type MaskValue = UInt<1>;
impl_match_values_as_self!();
fn mask_type(&self) -> Self::MaskType {
UIntType::new()
}
fn canonical(&self) -> Self::CanonicalType {
*self
}
fn source_location(&self) -> SourceLocation {
SourceLocation::builtin()
}
fn type_enum(&self) -> TypeEnum {
TypeEnum::Reset(*self)
}
fn from_canonical_type(t: Self::CanonicalType) -> Self {
t
}
fn as_dyn_canonical_type_impl(this: &Self) -> Option<&dyn DynCanonicalType> {
Some(this)
}
}
impl Connect<Self> for ResetType {}
impl CanonicalType for ResetType {
const CANONICAL_TYPE_KIND: CanonicalTypeKind = CanonicalTypeKind::Reset;
}
impl StaticType for ResetType {
fn static_type() -> Self {
Self
}
}
impl ResetTypeTrait for ResetType {}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub enum Reset {}
impl ToExpr for Reset {
type Type = ResetType;
fn ty(&self) -> Self::Type {
match *self {}
}
fn to_expr(&self) -> Expr<Self> {
Expr::from_value(self)
}
}
impl Value for Reset {
fn to_canonical(&self) -> <Self::Type as Type>::CanonicalValue {
*self
}
fn to_bits_impl(this: &Self) -> Interned<BitSlice> {
match *this {}
}
}
impl CanonicalValue for Reset {
fn value_enum_impl(this: &Self) -> ValueEnum {
ValueEnum::Reset(*this)
}
fn to_bits_impl(this: &Self) -> Interned<BitSlice> {
match *this {}
}
}
macro_rules! make_to_reset {
(
$(#[from_value($from_value_ty:ty)])*
$vis:vis trait $Trait:ident {
fn $fn:ident(&self) -> Expr<$T:ty>;
}
impl $name {
pub fn type_properties(self) -> TypeProperties {
Self::TYPE_PROPERTIES
}
pub fn can_connect(self, _rhs: Self) -> bool {
true
}
}
impl StaticType for $name {
const TYPE: Self = Self;
const MASK_TYPE: Self::MaskType = Bool;
const TYPE_PROPERTIES: TypeProperties = TypeProperties {
is_passive: true,
is_storable: false,
is_castable_from_bits: $is_castable_from_bits,
bit_width: 1,
};
const MASK_TYPE_PROPERTIES: TypeProperties = Bool::TYPE_PROPERTIES;
}
impl sealed::ResetTypeSealed for $name {}
impl ResetType for $name {
fn dispatch<D: ResetTypeDispatch>(
input: D::Input<Self>,
dispatch: D,
) -> D::Output<Self> {
dispatch.$dispatch_fn(input)
}
}
pub trait $Trait {
fn $trait_fn(&self) -> Expr<$name>;
) => {
$vis trait $Trait {
fn $fn(&self) -> Expr<$T>;
}
impl<T: ?Sized + $Trait> $Trait for &'_ T {
fn $trait_fn(&self) -> Expr<$name> {
(**self).$trait_fn()
fn $fn(&self) -> Expr<$T> {
(**self).$fn()
}
}
impl<T: ?Sized + $Trait> $Trait for &'_ mut T {
fn $trait_fn(&self) -> Expr<$name> {
(**self).$trait_fn()
fn $fn(&self) -> Expr<$T> {
(**self).$fn()
}
}
impl<T: ?Sized + $Trait> $Trait for Box<T> {
fn $trait_fn(&self) -> Expr<$name> {
(**self).$trait_fn()
fn $fn(&self) -> Expr<$T> {
(**self).$fn()
}
}
$($impl_trait $Trait for Expr<$name> {
fn $trait_fn(&self) -> Expr<$name> {
impl $Trait for Expr<$T> {
fn $fn(&self) -> Expr<$T> {
*self
}
})?
}
impl $Trait for $T {
fn $fn(&self) -> Expr<$T> {
self.to_expr()
}
}
$(impl $Trait for $from_value_ty {
fn $fn(&self) -> Expr<$T> {
self.to_expr().$fn()
}
})*
};
}
reset_type!(AsyncReset, #[impl] ToAsyncReset::to_async_reset, true, async_reset);
reset_type!(SyncReset, #[impl] ToSyncReset::to_sync_reset, true, sync_reset);
reset_type!(
Reset,
ToReset::to_reset,
false, // Reset is not castable from bits because we don't know if it's async or sync
reset
);
impl ToSyncReset for bool {
fn to_sync_reset(&self) -> Expr<SyncReset> {
self.to_expr().to_sync_reset()
make_to_reset! {
#[from_value(SyncReset)]
#[from_value(AsyncReset)]
pub trait ToReset {
fn to_reset(&self) -> Expr<Reset>;
}
}
impl ToAsyncReset for bool {
fn to_async_reset(&self) -> Expr<AsyncReset> {
self.to_expr().to_async_reset()
make_to_reset! {
#[from_value(bool)]
#[from_value(UInt<1>)]
pub trait ToAsyncReset {
fn to_async_reset(&self) -> Expr<AsyncReset>;
}
}
make_to_reset! {
#[from_value(bool)]
#[from_value(UInt<1>)]
pub trait ToSyncReset {
fn to_sync_reset(&self) -> Expr<SyncReset>;
}
}

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -1,397 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use std::{
fmt,
ops::{Add, AddAssign, Sub, SubAssign},
time::Duration,
};
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
pub struct SimInstant {
time_since_start: SimDuration,
}
impl SimInstant {
pub const fn checked_add(self, duration: SimDuration) -> Option<Self> {
let Some(time_since_start) = self.time_since_start.checked_add(duration) else {
return None;
};
Some(SimInstant { time_since_start })
}
pub const fn checked_duration_since(self, earlier: Self) -> Option<SimDuration> {
self.time_since_start.checked_sub(earlier.time_since_start)
}
pub const fn checked_sub(self, duration: SimDuration) -> Option<Self> {
let Some(time_since_start) = self.time_since_start.checked_sub(duration) else {
return None;
};
Some(SimInstant { time_since_start })
}
#[track_caller]
pub const fn duration_since(self, earlier: Self) -> SimDuration {
let Some(retval) = self.checked_duration_since(earlier) else {
panic!(
"tried to compute the duration since a later time -- durations can't be negative"
);
};
retval
}
pub const fn saturating_duration_since(self, earlier: Self) -> SimDuration {
let Some(retval) = self.checked_duration_since(earlier) else {
return SimDuration::ZERO;
};
retval
}
}
impl Add<SimDuration> for SimInstant {
type Output = SimInstant;
#[track_caller]
fn add(mut self, rhs: SimDuration) -> Self::Output {
self += rhs;
self
}
}
impl AddAssign<SimDuration> for SimInstant {
#[track_caller]
fn add_assign(&mut self, rhs: SimDuration) {
self.time_since_start += rhs;
}
}
impl Add<SimInstant> for SimDuration {
type Output = SimInstant;
#[track_caller]
fn add(self, rhs: SimInstant) -> Self::Output {
rhs.add(self)
}
}
impl Sub for SimInstant {
type Output = SimDuration;
#[track_caller]
fn sub(self, rhs: SimInstant) -> Self::Output {
self.duration_since(rhs)
}
}
impl Sub<SimDuration> for SimInstant {
type Output = SimInstant;
#[track_caller]
fn sub(self, rhs: SimDuration) -> Self::Output {
let Some(retval) = self.checked_sub(rhs) else {
panic!("SimInstant underflow");
};
retval
}
}
impl SubAssign<SimDuration> for SimInstant {
#[track_caller]
fn sub_assign(&mut self, rhs: SimDuration) {
*self = *self - rhs;
}
}
impl SimInstant {
pub const START: SimInstant = SimInstant {
time_since_start: SimDuration::ZERO,
};
}
impl fmt::Debug for SimInstant {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.time_since_start.fmt(f)
}
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
pub struct SimDuration {
attos: u128,
}
impl AddAssign for SimDuration {
#[track_caller]
fn add_assign(&mut self, rhs: SimDuration) {
*self = *self + rhs;
}
}
impl Add for SimDuration {
type Output = SimDuration;
#[track_caller]
fn add(self, rhs: SimDuration) -> Self::Output {
SimDuration {
attos: self
.attos
.checked_add(rhs.attos)
.expect("overflow adding durations"),
}
}
}
impl Sub for SimDuration {
type Output = Self;
#[track_caller]
fn sub(self, rhs: Self) -> Self::Output {
SimDuration {
attos: self
.attos
.checked_add(rhs.attos)
.expect("underflow subtracting durations -- durations can't be negative"),
}
}
}
impl SubAssign for SimDuration {
#[track_caller]
fn sub_assign(&mut self, rhs: Self) {
*self = *self - rhs;
}
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)]
pub struct SimDurationParts {
pub attos: u16,
pub femtos: u16,
pub picos: u16,
pub nanos: u16,
pub micros: u16,
pub millis: u16,
pub secs: u128,
}
macro_rules! impl_duration_units {
(
$(
#[unit_const = $UNIT:ident, from_units = $from_units:ident, as_units = $as_units:ident, units = $units:ident, suffix = $suffix:literal]
const $log10_units_per_sec:ident: u32 = $log10_units_per_sec_value:expr;
)*
) => {
impl SimDuration {
$(
const $log10_units_per_sec: u32 = $log10_units_per_sec_value;
pub const fn $from_units($units: u128) -> Self {
Self::from_units_helper::<{ Self::$log10_units_per_sec }>($units)
}
pub const fn $as_units(self) -> u128 {
self.attos / const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) }
}
)*
pub const fn to_parts(mut self) -> SimDurationParts {
$(
let $units = self.attos / const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) };
self.attos %= const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) };
)*
SimDurationParts {
$($units: $units as _,)*
}
}
pub const fn from_parts_checked(parts: SimDurationParts) -> Option<Self> {
let attos = 0u128;
$(
let Some(product) = const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) }.checked_mul(parts.$units as u128) else {
return None;
};
let Some(attos) = attos.checked_add(product) else {
return None;
};
)*
Some(Self {
attos,
})
}
}
impl fmt::Debug for SimDuration {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let ilog10_attos = match self.attos.checked_ilog10() {
Some(v) => v,
None => Self::LOG10_ATTOS_PER_SEC,
};
let (suffix, int, fraction, fraction_digits) =
match Self::LOG10_ATTOS_PER_SEC.saturating_sub(ilog10_attos) {
$(
..=Self::$log10_units_per_sec => {
let divisor = const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) };
(
$suffix,
self.attos / divisor,
self.attos % divisor,
(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) as usize,
)
},
)*
_ => unreachable!(),
};
write!(f, "{int}")?;
if fraction != 0 {
write!(f, ".{fraction:0fraction_digits$}")?;
}
write!(f, " {suffix}")
}
}
#[cfg(test)]
#[test]
fn test_duration_debug() {
$(
assert_eq!(
format!("{:?}", SimDuration::$from_units(123)),
concat!("123 ", $suffix)
);
assert_eq!(
format!("{:?}", SimDuration::$from_units(1)),
concat!("1 ", $suffix),
);
let mut v = SimDuration::$from_units(1);
if v.attos < 1 << 53 {
v.attos += 1;
assert_eq!(
format!("{v:?}"),
format!("{} {}", v.attos as f64 / 10.0f64.powf((SimDuration::LOG10_ATTOS_PER_SEC - SimDuration::$log10_units_per_sec) as f64), $suffix),
"1 {} + 1 as == {} as", $suffix, v.attos,
);
}
)*
}
};
}
impl_duration_units! {
#[unit_const = SECOND, from_units = from_secs, as_units = as_secs, units = secs, suffix = "s"]
const LOG10_SECS_PER_SEC: u32 = 0;
#[unit_const = MILLISECOND, from_units = from_millis, as_units = as_millis, units = millis, suffix = "ms"]
const LOG10_MILLIS_PER_SEC: u32 = 3;
#[unit_const = MICROSECOND, from_units = from_micros, as_units = as_micros, units = micros, suffix = "μs"]
const LOG10_MICROS_PER_SEC: u32 = 6;
#[unit_const = NANOSECOND, from_units = from_nanos, as_units = as_nanos, units = nanos, suffix = "ns"]
const LOG10_NANOS_PER_SEC: u32 = 9;
#[unit_const = PICOSECOND, from_units = from_picos, as_units = as_picos, units = picos, suffix = "ps"]
const LOG10_PICOS_PER_SEC: u32 = 12;
#[unit_const = FEMTOSECOND, from_units = from_femtos, as_units = as_femtos, units = femtos, suffix = "fs"]
const LOG10_FEMTOS_PER_SEC: u32 = 15;
#[unit_const = ATTOSECOND, from_units = from_attos, as_units = as_attos, units = attos, suffix = "as"]
const LOG10_ATTOS_PER_SEC: u32 = 18;
}
impl SimDuration {
const fn from_units_helper<const UNITS_PER_SEC: u32>(units: u128) -> Self {
let Some(attos) =
units.checked_mul(const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - UNITS_PER_SEC) })
else {
panic!("duration too big");
};
Self { attos }
}
pub const ZERO: SimDuration = SimDuration::from_secs(0);
pub const fn from_parts(parts: SimDurationParts) -> Self {
match Self::from_parts_checked(parts) {
Some(v) => v,
None => panic!("duration too big"),
}
}
pub const fn abs_diff(self, other: Self) -> Self {
Self {
attos: self.attos.abs_diff(other.attos),
}
}
pub const fn checked_add(self, rhs: Self) -> Option<Self> {
let Some(attos) = self.attos.checked_add(rhs.attos) else {
return None;
};
Some(Self { attos })
}
pub const fn checked_sub(self, rhs: Self) -> Option<Self> {
let Some(attos) = self.attos.checked_sub(rhs.attos) else {
return None;
};
Some(Self { attos })
}
pub const fn is_zero(self) -> bool {
self.attos == 0
}
pub const fn saturating_add(self, rhs: Self) -> Self {
Self {
attos: self.attos.saturating_add(rhs.attos),
}
}
pub const fn saturating_sub(self, rhs: Self) -> Self {
Self {
attos: self.attos.saturating_sub(rhs.attos),
}
}
pub const fn checked_ilog10(self) -> Option<i32> {
let Some(ilog10_attos) = self.attos.checked_ilog10() else {
return None;
};
Some(ilog10_attos as i32 - Self::LOG10_ATTOS_PER_SEC as i32)
}
#[track_caller]
pub const fn ilog10(self) -> i32 {
let Some(retval) = self.checked_ilog10() else {
panic!("tried to take the ilog10 of 0");
};
retval
}
pub const fn checked_pow10(log10: i32, underflow_is_zero: bool) -> Option<Self> {
let Some(log10) = Self::LOG10_ATTOS_PER_SEC.checked_add_signed(log10) else {
return if log10 < 0 && underflow_is_zero {
Some(Self::ZERO)
} else {
None
};
};
let Some(attos) = 10u128.checked_pow(log10) else {
return None;
};
Some(Self { attos })
}
#[track_caller]
pub const fn pow10(log10: i32) -> Self {
let Some(retval) = Self::checked_pow10(log10, true) else {
panic!("pow10 overflowed");
};
retval
}
pub const fn is_power_of_ten(self) -> bool {
const TEN: u128 = 10;
const NUMBER_OF_POWERS_OF_TEN: usize = {
let mut n = 0;
while let Some(_) = TEN.checked_pow(n as u32) {
n += 1;
}
n
};
const POWERS_OF_TEN: [u128; NUMBER_OF_POWERS_OF_TEN] = {
let mut retval = [0; NUMBER_OF_POWERS_OF_TEN];
let mut i = 0;
while i < NUMBER_OF_POWERS_OF_TEN {
retval[i] = TEN.pow(i as u32);
i += 1;
}
retval
};
let mut i = 0;
while i < NUMBER_OF_POWERS_OF_TEN {
if self.attos == POWERS_OF_TEN[i] {
return true;
}
i += 1;
}
false
}
}
impl From<Duration> for SimDuration {
fn from(duration: Duration) -> Self {
Self::from_nanos(duration.as_nanos())
}
}

View file

@ -1,913 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
array::{Array, ArrayType},
bundle::{Bundle, BundleType},
clock::Clock,
enum_::{Enum, EnumType},
expr::{CastBitsTo, Expr, ToExpr},
int::{Bool, IntType, KnownSize, SInt, SIntType, SIntValue, Size, UInt, UIntType, UIntValue},
reset::{AsyncReset, Reset, SyncReset},
ty::{CanonicalType, StaticType, Type},
util::{
alternating_cell::{AlternatingCell, AlternatingCellMethods},
ConstUsize,
},
};
use bitvec::{slice::BitSlice, vec::BitVec};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::{
fmt,
ops::{Deref, DerefMut},
sync::Arc,
};
#[derive(Copy, Clone, Eq, PartialEq)]
enum ValidFlags {
BothValid = 0,
OnlyValueValid = 1,
OnlyBitsValid = 2,
}
#[derive(Clone)]
struct SimValueInner<T: Type> {
value: T::SimValue,
bits: UIntValue,
valid_flags: ValidFlags,
ty: T,
}
impl<T: Type> SimValueInner<T> {
fn fill_bits(&mut self) {
match self.valid_flags {
ValidFlags::BothValid | ValidFlags::OnlyBitsValid => {}
ValidFlags::OnlyValueValid => {
self.ty.sim_value_to_bits(&self.value, self.bits.bits_mut());
self.valid_flags = ValidFlags::BothValid;
}
}
}
fn into_bits(mut self) -> UIntValue {
self.fill_bits();
self.bits
}
fn bits_mut(&mut self) -> &mut UIntValue {
self.fill_bits();
self.valid_flags = ValidFlags::OnlyBitsValid;
&mut self.bits
}
fn fill_value(&mut self) {
match self.valid_flags {
ValidFlags::BothValid | ValidFlags::OnlyValueValid => {}
ValidFlags::OnlyBitsValid => {
self.ty
.sim_value_clone_from_bits(&mut self.value, self.bits.bits());
self.valid_flags = ValidFlags::BothValid;
}
}
}
fn into_value(mut self) -> T::SimValue {
self.fill_value();
self.value
}
fn value_mut(&mut self) -> &mut T::SimValue {
self.fill_value();
self.valid_flags = ValidFlags::OnlyValueValid;
&mut self.value
}
}
impl<T: Type> AlternatingCellMethods for SimValueInner<T> {
fn unique_to_shared(&mut self) {
match self.valid_flags {
ValidFlags::BothValid => return,
ValidFlags::OnlyValueValid => {
self.ty.sim_value_to_bits(&self.value, self.bits.bits_mut())
}
ValidFlags::OnlyBitsValid => self
.ty
.sim_value_clone_from_bits(&mut self.value, self.bits.bits()),
}
self.valid_flags = ValidFlags::BothValid;
}
fn shared_to_unique(&mut self) {}
}
#[derive(Serialize, Deserialize)]
#[serde(rename = "SimValue")]
#[serde(bound(
serialize = "T: Type<SimValue: Serialize> + Serialize",
deserialize = "T: Type<SimValue: Deserialize<'de>> + Deserialize<'de>"
))]
struct SerdeSimValue<'a, T: Type> {
ty: T,
value: std::borrow::Cow<'a, T::SimValue>,
}
impl<T: Type<SimValue: Serialize> + Serialize> Serialize for SimValue<T> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
SerdeSimValue {
ty: SimValue::ty(self),
value: std::borrow::Cow::Borrowed(&*self),
}
.serialize(serializer)
}
}
impl<'de, T: Type<SimValue: Deserialize<'de>> + Deserialize<'de>> Deserialize<'de> for SimValue<T> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let SerdeSimValue { ty, value } = SerdeSimValue::<T>::deserialize(deserializer)?;
Ok(SimValue::from_value(ty, value.into_owned()))
}
}
pub struct SimValue<T: Type> {
inner: AlternatingCell<SimValueInner<T>>,
}
impl<T: Type + Clone> Clone for SimValue<T> {
fn clone(&self) -> Self {
Self {
inner: AlternatingCell::new_unique(self.inner.share().clone()),
}
}
}
impl<T: Type> SimValue<T> {
#[track_caller]
pub fn from_bits(ty: T, bits: UIntValue) -> Self {
assert_eq!(ty.canonical().bit_width(), bits.width());
let inner = SimValueInner {
value: ty.sim_value_from_bits(bits.bits()),
bits,
valid_flags: ValidFlags::BothValid,
ty,
};
Self {
inner: AlternatingCell::new_shared(inner),
}
}
#[track_caller]
pub fn from_bitslice(ty: T, bits: &BitSlice) -> Self {
Self::from_bits(ty, UIntValue::new(Arc::new(bits.to_bitvec())))
}
pub fn from_value(ty: T, value: T::SimValue) -> Self {
let inner = SimValueInner {
bits: UIntValue::new_dyn(Arc::new(BitVec::repeat(false, ty.canonical().bit_width()))),
value,
valid_flags: ValidFlags::OnlyValueValid,
ty,
};
Self {
inner: AlternatingCell::new_unique(inner),
}
}
pub fn ty(this: &Self) -> T {
this.inner.share().ty
}
pub fn into_bits(this: Self) -> UIntValue {
this.inner.into_inner().into_bits()
}
pub fn into_ty_and_bits(this: Self) -> (T, UIntValue) {
let inner = this.inner.into_inner();
(inner.ty, inner.into_bits())
}
pub fn bits(this: &Self) -> &UIntValue {
&this.inner.share().bits
}
pub fn bits_mut(this: &mut Self) -> &mut UIntValue {
this.inner.unique().bits_mut()
}
pub fn into_value(this: Self) -> T::SimValue {
this.inner.into_inner().into_value()
}
pub fn value(this: &Self) -> &T::SimValue {
&this.inner.share().value
}
pub fn value_mut(this: &mut Self) -> &mut T::SimValue {
this.inner.unique().value_mut()
}
#[track_caller]
pub fn from_canonical(v: SimValue<CanonicalType>) -> Self {
let (ty, bits) = SimValue::into_ty_and_bits(v);
Self::from_bits(T::from_canonical(ty), bits)
}
pub fn into_canonical(this: Self) -> SimValue<CanonicalType> {
let (ty, bits) = Self::into_ty_and_bits(this);
SimValue::from_bits(ty.canonical(), bits)
}
pub fn canonical(this: &Self) -> SimValue<CanonicalType> {
SimValue::from_bits(Self::ty(this).canonical(), Self::bits(this).clone())
}
#[track_caller]
pub fn from_dyn_int(v: SimValue<T::Dyn>) -> Self
where
T: IntType,
{
let (ty, bits) = SimValue::into_ty_and_bits(v);
SimValue::from_bits(T::from_dyn_int(ty), bits)
}
pub fn into_dyn_int(this: Self) -> SimValue<T::Dyn>
where
T: IntType,
{
let (ty, bits) = Self::into_ty_and_bits(this);
SimValue::from_bits(ty.as_dyn_int(), bits)
}
pub fn to_dyn_int(this: &Self) -> SimValue<T::Dyn>
where
T: IntType,
{
SimValue::from_bits(Self::ty(this).as_dyn_int(), Self::bits(&this).clone())
}
#[track_caller]
pub fn from_bundle(v: SimValue<Bundle>) -> Self
where
T: BundleType,
{
let (ty, bits) = SimValue::into_ty_and_bits(v);
SimValue::from_bits(T::from_canonical(CanonicalType::Bundle(ty)), bits)
}
pub fn into_bundle(this: Self) -> SimValue<Bundle>
where
T: BundleType,
{
let (ty, bits) = Self::into_ty_and_bits(this);
SimValue::from_bits(Bundle::from_canonical(ty.canonical()), bits)
}
pub fn to_bundle(this: &Self) -> SimValue<Bundle>
where
T: BundleType,
{
SimValue::from_bits(
Bundle::from_canonical(Self::ty(this).canonical()),
Self::bits(&this).clone(),
)
}
#[track_caller]
pub fn from_enum(v: SimValue<Enum>) -> Self
where
T: EnumType,
{
let (ty, bits) = SimValue::into_ty_and_bits(v);
SimValue::from_bits(T::from_canonical(CanonicalType::Enum(ty)), bits)
}
pub fn into_enum(this: Self) -> SimValue<Enum>
where
T: EnumType,
{
let (ty, bits) = Self::into_ty_and_bits(this);
SimValue::from_bits(Enum::from_canonical(ty.canonical()), bits)
}
pub fn to_enum(this: &Self) -> SimValue<Enum>
where
T: EnumType,
{
SimValue::from_bits(
Enum::from_canonical(Self::ty(this).canonical()),
Self::bits(&this).clone(),
)
}
}
impl<T: Type> Deref for SimValue<T> {
type Target = T::SimValue;
fn deref(&self) -> &Self::Target {
Self::value(self)
}
}
impl<T: Type> DerefMut for SimValue<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
Self::value_mut(self)
}
}
impl<T: Type> fmt::Debug for SimValue<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let inner = self.inner.share();
f.debug_struct("SimValue")
.field("ty", &inner.ty)
.field("value", &inner.value)
.finish()
}
}
impl<T: Type> ToExpr for SimValue<T> {
type Type = T;
#[track_caller]
fn to_expr(&self) -> Expr<Self::Type> {
let inner = self.inner.share();
inner.bits.cast_bits_to(inner.ty)
}
}
pub trait SimValuePartialEq<T: Type = Self>: Type {
#[track_caller]
fn sim_value_eq(this: &SimValue<Self>, other: &SimValue<T>) -> bool;
}
impl<T: SimValuePartialEq<U>, U: Type> PartialEq<SimValue<U>> for SimValue<T> {
#[track_caller]
fn eq(&self, other: &SimValue<U>) -> bool {
T::sim_value_eq(self, other)
}
}
impl<Width: Size> SimValuePartialEq<Self> for UIntType<Width> {
fn sim_value_eq(this: &SimValue<Self>, other: &SimValue<Self>) -> bool {
**this == **other
}
}
impl<Width: Size> SimValuePartialEq<Self> for SIntType<Width> {
fn sim_value_eq(this: &SimValue<Self>, other: &SimValue<Self>) -> bool {
**this == **other
}
}
impl SimValuePartialEq<Bool> for Bool {
fn sim_value_eq(this: &SimValue<Self>, other: &SimValue<Bool>) -> bool {
**this == **other
}
}
pub trait ToSimValue: ToSimValueWithType<<Self as ToSimValue>::Type> {
type Type: Type;
#[track_caller]
fn to_sim_value(&self) -> SimValue<Self::Type>;
#[track_caller]
fn into_sim_value(self) -> SimValue<Self::Type>
where
Self: Sized,
{
self.to_sim_value()
}
#[track_caller]
fn arc_into_sim_value(self: Arc<Self>) -> SimValue<Self::Type> {
self.to_sim_value()
}
#[track_caller]
fn arc_to_sim_value(self: &Arc<Self>) -> SimValue<Self::Type> {
self.to_sim_value()
}
}
pub trait ToSimValueWithType<T: Type> {
#[track_caller]
fn to_sim_value_with_type(&self, ty: T) -> SimValue<T>;
#[track_caller]
fn into_sim_value_with_type(self, ty: T) -> SimValue<T>
where
Self: Sized,
{
self.to_sim_value_with_type(ty)
}
#[track_caller]
fn arc_into_sim_value_with_type(self: Arc<Self>, ty: T) -> SimValue<T> {
self.to_sim_value_with_type(ty)
}
#[track_caller]
fn arc_to_sim_value_with_type(self: &Arc<Self>, ty: T) -> SimValue<T> {
self.to_sim_value_with_type(ty)
}
}
macro_rules! forward_to_sim_value_with_type {
([$($generics:tt)*] $ty:ty) => {
impl<$($generics)*> ToSimValueWithType<<Self as ToSimValue>::Type> for $ty {
fn to_sim_value_with_type(&self, ty: <Self as ToSimValue>::Type) -> SimValue<<Self as ToSimValue>::Type> {
let retval = Self::to_sim_value(self);
assert_eq!(SimValue::ty(&retval), ty);
retval
}
#[track_caller]
fn into_sim_value_with_type(self, ty: <Self as ToSimValue>::Type) -> SimValue<<Self as ToSimValue>::Type>
where
Self: Sized,
{
let retval = Self::into_sim_value(self);
assert_eq!(SimValue::ty(&retval), ty);
retval
}
#[track_caller]
fn arc_into_sim_value_with_type(self: Arc<Self>, ty: <Self as ToSimValue>::Type) -> SimValue<<Self as ToSimValue>::Type> {
let retval = Self::arc_into_sim_value(self);
assert_eq!(SimValue::ty(&retval), ty);
retval
}
#[track_caller]
fn arc_to_sim_value_with_type(self: &Arc<Self>, ty: <Self as ToSimValue>::Type) -> SimValue<<Self as ToSimValue>::Type> {
let retval = Self::arc_to_sim_value(self);
assert_eq!(SimValue::ty(&retval), ty);
retval
}
}
};
}
impl<T: Type> ToSimValue for SimValue<T> {
type Type = T;
fn to_sim_value(&self) -> SimValue<Self::Type> {
self.clone()
}
fn into_sim_value(self) -> SimValue<Self::Type> {
self
}
}
forward_to_sim_value_with_type!([T: Type] SimValue<T>);
impl<T: Type> ToSimValueWithType<T> for BitVec {
#[track_caller]
fn to_sim_value_with_type(&self, ty: T) -> SimValue<T> {
self.clone().into_sim_value_with_type(ty)
}
#[track_caller]
fn into_sim_value_with_type(self, ty: T) -> SimValue<T> {
Arc::new(self).arc_into_sim_value_with_type(ty)
}
#[track_caller]
fn arc_into_sim_value_with_type(self: Arc<Self>, ty: T) -> SimValue<T> {
SimValue::from_bits(ty, UIntValue::new_dyn(self))
}
#[track_caller]
fn arc_to_sim_value_with_type(self: &Arc<Self>, ty: T) -> SimValue<T> {
SimValue::from_bits(ty, UIntValue::new_dyn(self.clone()))
}
}
impl<T: Type> ToSimValueWithType<T> for bitvec::boxed::BitBox {
#[track_caller]
fn to_sim_value_with_type(&self, ty: T) -> SimValue<T> {
self.clone().into_sim_value_with_type(ty)
}
#[track_caller]
fn into_sim_value_with_type(self, ty: T) -> SimValue<T> {
self.into_bitvec().into_sim_value_with_type(ty)
}
}
impl<T: Type> ToSimValueWithType<T> for BitSlice {
#[track_caller]
fn to_sim_value_with_type(&self, ty: T) -> SimValue<T> {
self.to_bitvec().into_sim_value_with_type(ty)
}
}
impl<This: ?Sized + ToSimValue> ToSimValue for &'_ This {
type Type = This::Type;
fn to_sim_value(&self) -> SimValue<Self::Type> {
This::to_sim_value(self)
}
}
impl<This: ?Sized + ToSimValueWithType<T>, T: Type> ToSimValueWithType<T> for &'_ This {
fn to_sim_value_with_type(&self, ty: T) -> SimValue<T> {
This::to_sim_value_with_type(self, ty)
}
}
impl<This: ?Sized + ToSimValue> ToSimValue for &'_ mut This {
type Type = This::Type;
fn to_sim_value(&self) -> SimValue<Self::Type> {
This::to_sim_value(self)
}
}
impl<This: ?Sized + ToSimValueWithType<T>, T: Type> ToSimValueWithType<T> for &'_ mut This {
fn to_sim_value_with_type(&self, ty: T) -> SimValue<T> {
This::to_sim_value_with_type(self, ty)
}
}
impl<This: ?Sized + ToSimValue> ToSimValue for Arc<This> {
type Type = This::Type;
fn to_sim_value(&self) -> SimValue<Self::Type> {
This::arc_to_sim_value(self)
}
fn into_sim_value(self) -> SimValue<Self::Type> {
This::arc_into_sim_value(self)
}
}
impl<This: ?Sized + ToSimValueWithType<T>, T: Type> ToSimValueWithType<T> for Arc<This> {
fn to_sim_value_with_type(&self, ty: T) -> SimValue<T> {
This::arc_to_sim_value_with_type(self, ty)
}
fn into_sim_value_with_type(self, ty: T) -> SimValue<T> {
This::arc_into_sim_value_with_type(self, ty)
}
}
impl<This: ?Sized + ToSimValue + Send + Sync + 'static> ToSimValue
for crate::intern::Interned<This>
{
type Type = This::Type;
fn to_sim_value(&self) -> SimValue<Self::Type> {
This::to_sim_value(self)
}
}
impl<This: ?Sized + ToSimValueWithType<T> + Send + Sync + 'static, T: Type> ToSimValueWithType<T>
for crate::intern::Interned<This>
{
fn to_sim_value_with_type(&self, ty: T) -> SimValue<T> {
This::to_sim_value_with_type(self, ty)
}
}
impl<This: ToSimValue> ToSimValue for Box<This> {
type Type = This::Type;
fn to_sim_value(&self) -> SimValue<Self::Type> {
This::to_sim_value(self)
}
fn into_sim_value(self) -> SimValue<Self::Type> {
This::into_sim_value(*self)
}
}
impl<This: ToSimValueWithType<T>, T: Type> ToSimValueWithType<T> for Box<This> {
fn to_sim_value_with_type(&self, ty: T) -> SimValue<T> {
This::to_sim_value_with_type(self, ty)
}
fn into_sim_value_with_type(self, ty: T) -> SimValue<T> {
This::into_sim_value_with_type(*self, ty)
}
}
impl<T: Type, Len: Size> SimValue<ArrayType<T, Len>> {
#[track_caller]
pub fn from_array_elements<I: IntoIterator<Item: ToSimValueWithType<T>>>(
ty: ArrayType<T, Len>,
elements: I,
) -> Self {
let element_ty = ty.element();
let elements = Vec::from_iter(
elements
.into_iter()
.map(|element| element.into_sim_value_with_type(element_ty)),
);
assert_eq!(elements.len(), ty.len());
SimValue::from_value(ty, elements.try_into().ok().expect("already checked len"))
}
}
impl<Element: ToSimValueWithType<T>, T: Type> ToSimValueWithType<Array<T>> for [Element] {
#[track_caller]
fn to_sim_value_with_type(&self, ty: Array<T>) -> SimValue<Array<T>> {
SimValue::from_array_elements(ty, self)
}
}
impl<Element: ToSimValue<Type: StaticType>> ToSimValue for [Element] {
type Type = Array<Element::Type>;
#[track_caller]
fn to_sim_value(&self) -> SimValue<Self::Type> {
SimValue::from_array_elements(ArrayType::new_dyn(StaticType::TYPE, self.len()), self)
}
}
impl<Element: ToSimValueWithType<CanonicalType>> ToSimValueWithType<CanonicalType> for [Element] {
#[track_caller]
fn to_sim_value_with_type(&self, ty: CanonicalType) -> SimValue<CanonicalType> {
SimValue::into_canonical(SimValue::from_array_elements(
<Array>::from_canonical(ty),
self,
))
}
}
impl<Element: ToSimValueWithType<T>, T: Type, const N: usize> ToSimValueWithType<Array<T, N>>
for [Element; N]
where
ConstUsize<N>: KnownSize,
{
#[track_caller]
fn to_sim_value_with_type(&self, ty: Array<T, N>) -> SimValue<Array<T, N>> {
SimValue::from_array_elements(ty, self)
}
#[track_caller]
fn into_sim_value_with_type(self, ty: Array<T, N>) -> SimValue<Array<T, N>> {
SimValue::from_array_elements(ty, self)
}
}
impl<Element: ToSimValue<Type: StaticType>, const N: usize> ToSimValue for [Element; N]
where
ConstUsize<N>: KnownSize,
{
type Type = Array<Element::Type, N>;
fn to_sim_value(&self) -> SimValue<Self::Type> {
SimValue::from_array_elements(StaticType::TYPE, self)
}
fn into_sim_value(self) -> SimValue<Self::Type> {
SimValue::from_array_elements(StaticType::TYPE, self)
}
}
impl<Element: ToSimValueWithType<T>, T: Type, const N: usize> ToSimValueWithType<Array<T>>
for [Element; N]
{
#[track_caller]
fn to_sim_value_with_type(&self, ty: Array<T>) -> SimValue<Array<T>> {
SimValue::from_array_elements(ty, self)
}
#[track_caller]
fn into_sim_value_with_type(self, ty: Array<T>) -> SimValue<Array<T>> {
SimValue::from_array_elements(ty, self)
}
}
impl<Element: ToSimValueWithType<CanonicalType>, const N: usize> ToSimValueWithType<CanonicalType>
for [Element; N]
{
#[track_caller]
fn to_sim_value_with_type(&self, ty: CanonicalType) -> SimValue<CanonicalType> {
SimValue::into_canonical(SimValue::from_array_elements(
<Array>::from_canonical(ty),
self,
))
}
#[track_caller]
fn into_sim_value_with_type(self, ty: CanonicalType) -> SimValue<CanonicalType> {
SimValue::into_canonical(SimValue::from_array_elements(
<Array>::from_canonical(ty),
self,
))
}
}
impl<Element: ToSimValueWithType<T>, T: Type> ToSimValueWithType<Array<T>> for Vec<Element> {
#[track_caller]
fn to_sim_value_with_type(&self, ty: Array<T>) -> SimValue<Array<T>> {
SimValue::from_array_elements(ty, self)
}
#[track_caller]
fn into_sim_value_with_type(self, ty: Array<T>) -> SimValue<Array<T>> {
SimValue::from_array_elements(ty, self)
}
}
impl<Element: ToSimValue<Type: StaticType>> ToSimValue for Vec<Element> {
type Type = Array<Element::Type>;
fn to_sim_value(&self) -> SimValue<Self::Type> {
SimValue::from_array_elements(ArrayType::new_dyn(StaticType::TYPE, self.len()), self)
}
fn into_sim_value(self) -> SimValue<Self::Type> {
SimValue::from_array_elements(ArrayType::new_dyn(StaticType::TYPE, self.len()), self)
}
}
impl<Element: ToSimValueWithType<CanonicalType>> ToSimValueWithType<CanonicalType>
for Vec<Element>
{
#[track_caller]
fn to_sim_value_with_type(&self, ty: CanonicalType) -> SimValue<CanonicalType> {
SimValue::into_canonical(SimValue::from_array_elements(
<Array>::from_canonical(ty),
self,
))
}
#[track_caller]
fn into_sim_value_with_type(self, ty: CanonicalType) -> SimValue<CanonicalType> {
SimValue::into_canonical(SimValue::from_array_elements(
<Array>::from_canonical(ty),
self,
))
}
}
impl<Element: ToSimValueWithType<T>, T: Type> ToSimValueWithType<Array<T>> for Box<[Element]> {
#[track_caller]
fn to_sim_value_with_type(&self, ty: Array<T>) -> SimValue<Array<T>> {
SimValue::from_array_elements(ty, self)
}
#[track_caller]
fn into_sim_value_with_type(self, ty: Array<T>) -> SimValue<Array<T>> {
SimValue::from_array_elements(ty, self)
}
}
impl<Element: ToSimValue<Type: StaticType>> ToSimValue for Box<[Element]> {
type Type = Array<Element::Type>;
fn to_sim_value(&self) -> SimValue<Self::Type> {
SimValue::from_array_elements(ArrayType::new_dyn(StaticType::TYPE, self.len()), self)
}
fn into_sim_value(self) -> SimValue<Self::Type> {
SimValue::from_array_elements(ArrayType::new_dyn(StaticType::TYPE, self.len()), self)
}
}
impl<Element: ToSimValueWithType<CanonicalType>> ToSimValueWithType<CanonicalType>
for Box<[Element]>
{
#[track_caller]
fn to_sim_value_with_type(&self, ty: CanonicalType) -> SimValue<CanonicalType> {
SimValue::into_canonical(SimValue::from_array_elements(
<Array>::from_canonical(ty),
self,
))
}
#[track_caller]
fn into_sim_value_with_type(self, ty: CanonicalType) -> SimValue<CanonicalType> {
SimValue::into_canonical(SimValue::from_array_elements(
<Array>::from_canonical(ty),
self,
))
}
}
impl<T: Type> ToSimValue for Expr<T> {
type Type = T;
#[track_caller]
fn to_sim_value(&self) -> SimValue<Self::Type> {
SimValue::from_bitslice(
Expr::ty(*self),
&crate::expr::ToLiteralBits::to_literal_bits(self)
.expect("must be a literal expression"),
)
}
}
forward_to_sim_value_with_type!([T: Type] Expr<T>);
macro_rules! impl_to_sim_value_for_bool_like {
($ty:ident) => {
impl ToSimValueWithType<$ty> for bool {
fn to_sim_value_with_type(&self, ty: $ty) -> SimValue<$ty> {
SimValue::from_value(ty, *self)
}
}
};
}
impl ToSimValue for bool {
type Type = Bool;
fn to_sim_value(&self) -> SimValue<Self::Type> {
SimValue::from_value(Bool, *self)
}
}
impl_to_sim_value_for_bool_like!(Bool);
impl_to_sim_value_for_bool_like!(AsyncReset);
impl_to_sim_value_for_bool_like!(SyncReset);
impl_to_sim_value_for_bool_like!(Reset);
impl_to_sim_value_for_bool_like!(Clock);
impl ToSimValueWithType<CanonicalType> for bool {
#[track_caller]
fn to_sim_value_with_type(&self, ty: CanonicalType) -> SimValue<CanonicalType> {
match ty {
CanonicalType::UInt(_)
| CanonicalType::SInt(_)
| CanonicalType::Array(_)
| CanonicalType::Enum(_)
| CanonicalType::Bundle(_)
| CanonicalType::PhantomConst(_) => {
panic!("can't create SimValue from bool: expected value of type: {ty:?}");
}
CanonicalType::Bool(_)
| CanonicalType::AsyncReset(_)
| CanonicalType::SyncReset(_)
| CanonicalType::Reset(_)
| CanonicalType::Clock(_) => {
SimValue::from_bits(ty, UIntValue::new(Arc::new(BitVec::repeat(*self, 1))))
}
}
}
}
macro_rules! impl_to_sim_value_for_primitive_int {
($prim:ident) => {
impl ToSimValue for $prim {
type Type = <$prim as ToExpr>::Type;
#[track_caller]
fn to_sim_value(
&self,
) -> SimValue<Self::Type> {
SimValue::from_value(StaticType::TYPE, (*self).into())
}
}
forward_to_sim_value_with_type!([] $prim);
impl ToSimValueWithType<<<$prim as ToExpr>::Type as IntType>::Dyn> for $prim {
#[track_caller]
fn to_sim_value_with_type(
&self,
ty: <<$prim as ToExpr>::Type as IntType>::Dyn,
) -> SimValue<<<$prim as ToExpr>::Type as IntType>::Dyn> {
SimValue::from_value(
ty,
<<$prim as ToExpr>::Type as Type>::SimValue::from(*self).as_dyn_int(),
)
}
}
impl ToSimValueWithType<CanonicalType> for $prim {
#[track_caller]
fn to_sim_value_with_type(&self, ty: CanonicalType) -> SimValue<CanonicalType> {
let ty: <<$prim as ToExpr>::Type as IntType>::Dyn = Type::from_canonical(ty);
SimValue::into_canonical(self.to_sim_value_with_type(ty))
}
}
};
}
impl_to_sim_value_for_primitive_int!(u8);
impl_to_sim_value_for_primitive_int!(u16);
impl_to_sim_value_for_primitive_int!(u32);
impl_to_sim_value_for_primitive_int!(u64);
impl_to_sim_value_for_primitive_int!(u128);
impl_to_sim_value_for_primitive_int!(usize);
impl_to_sim_value_for_primitive_int!(i8);
impl_to_sim_value_for_primitive_int!(i16);
impl_to_sim_value_for_primitive_int!(i32);
impl_to_sim_value_for_primitive_int!(i64);
impl_to_sim_value_for_primitive_int!(i128);
impl_to_sim_value_for_primitive_int!(isize);
macro_rules! impl_to_sim_value_for_int_value {
($IntValue:ident, $Int:ident, $IntType:ident) => {
impl<Width: Size> ToSimValue for $IntValue<Width> {
type Type = $IntType<Width>;
fn to_sim_value(&self) -> SimValue<Self::Type> {
SimValue::from_value(self.ty(), self.clone())
}
fn into_sim_value(self) -> SimValue<Self::Type> {
SimValue::from_value(self.ty(), self)
}
}
impl<Width: Size> ToSimValueWithType<$IntType<Width>> for $IntValue<Width> {
fn to_sim_value_with_type(&self, ty: $IntType<Width>) -> SimValue<$IntType<Width>> {
SimValue::from_value(ty, self.clone())
}
fn into_sim_value_with_type(self, ty: $IntType<Width>) -> SimValue<$IntType<Width>> {
SimValue::from_value(ty, self)
}
}
impl<Width: KnownSize> ToSimValueWithType<$Int> for $IntValue<Width> {
fn to_sim_value_with_type(&self, ty: $Int) -> SimValue<$Int> {
self.bits().to_sim_value_with_type(ty)
}
fn into_sim_value_with_type(self, ty: $Int) -> SimValue<$Int> {
self.into_bits().into_sim_value_with_type(ty)
}
}
impl<Width: Size> ToSimValueWithType<CanonicalType> for $IntValue<Width> {
#[track_caller]
fn to_sim_value_with_type(&self, ty: CanonicalType) -> SimValue<CanonicalType> {
SimValue::into_canonical(
self.to_sim_value_with_type($IntType::<Width>::from_canonical(ty)),
)
}
#[track_caller]
fn into_sim_value_with_type(self, ty: CanonicalType) -> SimValue<CanonicalType> {
SimValue::into_canonical(
self.into_sim_value_with_type($IntType::<Width>::from_canonical(ty)),
)
}
}
};
}
impl_to_sim_value_for_int_value!(UIntValue, UInt, UIntType);
impl_to_sim_value_for_int_value!(SIntValue, SInt, SIntType);

File diff suppressed because it is too large Load diff

View file

@ -2,8 +2,9 @@
// See Notices.txt for copyright information
use crate::{
intern::{Intern, Interned},
util::{DebugAsDisplay, HashMap},
util::DebugAsDisplay,
};
use hashbrown::HashMap;
use std::{cell::RefCell, fmt, num::NonZeroUsize, panic, path::Path};
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
@ -96,7 +97,7 @@ impl NormalizeFilesForTestsState {
fn new() -> Self {
Self {
test_position: panic::Location::caller(),
file_pattern_matches: HashMap::default(),
file_pattern_matches: HashMap::new(),
}
}
}
@ -142,7 +143,7 @@ impl From<&'_ panic::Location<'_>> for SourceLocation {
map.entry_ref(file)
.or_insert_with(|| NormalizedFileForTestState {
file_name_id: NonZeroUsize::new(len + 1).unwrap(),
positions_map: HashMap::default(),
positions_map: HashMap::new(),
});
file_str = m.generate_file_name(file_state.file_name_id);
file = &file_str;

View file

@ -1,122 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
cli::{FormalArgs, FormalMode, FormalOutput, RunPhase},
firrtl::ExportOptions,
util::HashMap,
};
use clap::Parser;
use serde::Deserialize;
use std::{
fmt::Write,
path::{Path, PathBuf},
process::Command,
sync::{Mutex, OnceLock},
};
fn assert_formal_helper() -> FormalArgs {
static FORMAL_ARGS: OnceLock<FormalArgs> = OnceLock::new();
// ensure we only run parsing once, so errors from env vars don't produce overlapping output if we're called on multiple threads
FORMAL_ARGS
.get_or_init(|| FormalArgs::parse_from(["fayalite::testing::assert_formal"]))
.clone()
}
#[derive(Deserialize)]
struct CargoMetadata {
target_directory: String,
}
fn get_cargo_target_dir() -> &'static Path {
static RETVAL: OnceLock<PathBuf> = OnceLock::new();
RETVAL.get_or_init(|| {
let output = Command::new(
std::env::var_os("CARGO")
.as_deref()
.unwrap_or("cargo".as_ref()),
)
.arg("metadata")
.output()
.expect("can't run `cargo metadata`");
if !output.status.success() {
panic!(
"can't run `cargo metadata`:\n{}\nexited with status: {}",
String::from_utf8_lossy(&output.stderr),
output.status
);
}
let CargoMetadata { target_directory } =
serde_json::from_slice(&output.stdout).expect("can't parse output of `cargo metadata`");
PathBuf::from(target_directory)
})
}
#[track_caller]
fn get_assert_formal_target_path(test_name: &dyn std::fmt::Display) -> PathBuf {
static DIRS: Mutex<Option<HashMap<String, u64>>> = Mutex::new(None);
let test_name = test_name.to_string();
// don't use line/column numbers since that constantly changes as you edit tests
let file = std::panic::Location::caller().file();
// simple reproducible hash
let simple_hash = file.bytes().chain(test_name.bytes()).fold(
((file.len() as u32) << 16).wrapping_add(test_name.len() as u32),
|mut h, b| {
h = h.wrapping_mul(0xaa0d184b);
h ^= h.rotate_right(5);
h ^= h.rotate_right(13);
h.wrapping_add(b as u32)
},
);
let mut dir = String::with_capacity(64);
for ch in Path::new(file)
.file_stem()
.unwrap_or_default()
.to_str()
.unwrap()
.chars()
.chain(['-'])
.chain(test_name.chars())
{
dir.push(match ch {
ch if ch.is_alphanumeric() => ch,
'_' | '-' | '+' | '.' | ',' | ' ' => ch,
_ => '_',
});
}
write!(dir, "-{simple_hash:08x}").unwrap();
let index = *DIRS
.lock()
.unwrap()
.get_or_insert_with(HashMap::default)
.entry_ref(&dir)
.and_modify(|v| *v += 1)
.or_insert(0);
write!(dir, "-{index}").unwrap();
get_cargo_target_dir()
.join("fayalite_assert_formal")
.join(dir)
}
#[track_caller]
pub fn assert_formal<M>(
test_name: impl std::fmt::Display,
module: M,
mode: FormalMode,
depth: u64,
solver: Option<&str>,
export_options: ExportOptions,
) where
FormalArgs: RunPhase<M, Output = FormalOutput>,
{
let mut args = assert_formal_helper();
args.verilog.firrtl.base.redirect_output_for_rust_test = true;
args.verilog.firrtl.base.output = Some(get_assert_formal_target_path(&test_name));
args.verilog.firrtl.export_options = export_options;
args.verilog.debug = true;
args.mode = mode;
args.depth = depth;
if let Some(solver) = solver {
args.solver = solver.into();
}
args.run(module).expect("testing::assert_formal() failed");
}

File diff suppressed because it is too large Load diff

View file

@ -1,130 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
array::Array,
bundle::{Bundle, BundleType},
clock::Clock,
enum_::{Enum, EnumType},
int::{Bool, SInt, UInt},
intern::Interned,
phantom_const::{PhantomConstCanonicalValue, PhantomConstValue},
prelude::PhantomConst,
reset::{AsyncReset, Reset, SyncReset},
ty::{BaseType, CanonicalType},
};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
pub(crate) struct SerdePhantomConst<T>(pub T);
impl<T: ?Sized + PhantomConstValue> Serialize for SerdePhantomConst<Interned<T>> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
self.0.serialize(serializer)
}
}
impl<'de, T: ?Sized + PhantomConstValue> Deserialize<'de> for SerdePhantomConst<Interned<T>> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
T::deserialize_value(deserializer).map(Self)
}
}
#[derive(Serialize, Deserialize)]
#[serde(rename = "CanonicalType")]
pub(crate) enum SerdeCanonicalType<
ArrayElement = CanonicalType,
ThePhantomConst = SerdePhantomConst<Interned<PhantomConstCanonicalValue>>,
> {
UInt {
width: usize,
},
SInt {
width: usize,
},
Bool,
Array {
element: ArrayElement,
len: usize,
},
Enum {
variants: Interned<[crate::enum_::EnumVariant]>,
},
Bundle {
fields: Interned<[crate::bundle::BundleField]>,
},
AsyncReset,
SyncReset,
Reset,
Clock,
PhantomConst(ThePhantomConst),
}
impl<ArrayElement, PhantomConstInner> SerdeCanonicalType<ArrayElement, PhantomConstInner> {
pub(crate) fn as_serde_unexpected_str(&self) -> &'static str {
match self {
Self::UInt { .. } => "a UInt",
Self::SInt { .. } => "a SInt",
Self::Bool => "a Bool",
Self::Array { .. } => "an Array",
Self::Enum { .. } => "an Enum",
Self::Bundle { .. } => "a Bundle",
Self::AsyncReset => "an AsyncReset",
Self::SyncReset => "a SyncReset",
Self::Reset => "a Reset",
Self::Clock => "a Clock",
Self::PhantomConst(_) => "a PhantomConst",
}
}
}
impl<T: BaseType> From<T> for SerdeCanonicalType {
fn from(ty: T) -> Self {
let ty: CanonicalType = ty.into();
match ty {
CanonicalType::UInt(ty) => Self::UInt { width: ty.width() },
CanonicalType::SInt(ty) => Self::SInt { width: ty.width() },
CanonicalType::Bool(Bool {}) => Self::Bool,
CanonicalType::Array(ty) => Self::Array {
element: ty.element(),
len: ty.len(),
},
CanonicalType::Enum(ty) => Self::Enum {
variants: ty.variants(),
},
CanonicalType::Bundle(ty) => Self::Bundle {
fields: ty.fields(),
},
CanonicalType::AsyncReset(AsyncReset {}) => Self::AsyncReset,
CanonicalType::SyncReset(SyncReset {}) => Self::SyncReset,
CanonicalType::Reset(Reset {}) => Self::Reset,
CanonicalType::Clock(Clock {}) => Self::Clock,
CanonicalType::PhantomConst(ty) => Self::PhantomConst(SerdePhantomConst(ty.get())),
}
}
}
impl From<SerdeCanonicalType> for CanonicalType {
fn from(ty: SerdeCanonicalType) -> Self {
match ty {
SerdeCanonicalType::UInt { width } => Self::UInt(UInt::new(width)),
SerdeCanonicalType::SInt { width } => Self::SInt(SInt::new(width)),
SerdeCanonicalType::Bool => Self::Bool(Bool),
SerdeCanonicalType::Array { element, len } => Self::Array(Array::new(element, len)),
SerdeCanonicalType::Enum { variants } => Self::Enum(Enum::new(variants)),
SerdeCanonicalType::Bundle { fields } => Self::Bundle(Bundle::new(fields)),
SerdeCanonicalType::AsyncReset => Self::AsyncReset(AsyncReset),
SerdeCanonicalType::SyncReset => Self::SyncReset(SyncReset),
SerdeCanonicalType::Reset => Self::Reset(Reset),
SerdeCanonicalType::Clock => Self::Clock(Clock),
SerdeCanonicalType::PhantomConst(value) => {
Self::PhantomConst(PhantomConst::new(value.0))
}
}
}
}

View file

@ -1,28 +1,12 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
pub(crate) mod alternating_cell;
mod const_bool;
mod const_cmp;
mod const_usize;
mod misc;
mod scoped_ref;
pub(crate) mod streaming_read_utf8;
mod test_hasher;
// allow easily switching the hasher crate-wide for testing
#[cfg(feature = "unstable-test-hasher")]
pub type DefaultBuildHasher = test_hasher::DefaultBuildHasher;
#[cfg(not(feature = "unstable-test-hasher"))]
pub(crate) type DefaultBuildHasher = hashbrown::DefaultHashBuilder;
pub(crate) type HashMap<K, V> = hashbrown::HashMap<K, V, DefaultBuildHasher>;
pub(crate) type HashSet<T> = hashbrown::HashSet<T, DefaultBuildHasher>;
#[doc(inline)]
pub use const_bool::{ConstBool, ConstBoolDispatch, ConstBoolDispatchTag, GenericConstBool};
#[doc(inline)]
pub use const_usize::{ConstUsize, GenericConstUsize};
#[doc(inline)]
pub use const_cmp::{
@ -30,15 +14,7 @@ pub use const_cmp::{
const_usize_cmp,
};
#[doc(inline)]
pub use scoped_ref::ScopedRef;
#[doc(inline)]
pub use misc::{
get_many_mut, interned_bit, iter_eq_by, BitSliceWriteWithBase, DebugAsDisplay,
DebugAsRawString, MakeMutSlice, RcWriter,
interned_bit, iter_eq_by, BitSliceWriteWithBase, DebugAsDisplay, DebugAsRawString, MakeMutSlice,
};
pub mod job_server;
pub mod prefix_sum;
pub mod ready_valid;

View file

@ -1,122 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::util::DebugAsDisplay;
use std::{
cell::{Cell, UnsafeCell},
fmt,
};
pub(crate) trait AlternatingCellMethods {
fn unique_to_shared(&mut self);
fn shared_to_unique(&mut self);
}
#[derive(Copy, Clone, Debug)]
enum State {
Unique,
Shared,
Locked,
}
pub(crate) struct AlternatingCell<T: ?Sized> {
state: Cell<State>,
value: UnsafeCell<T>,
}
impl<T: ?Sized + fmt::Debug + AlternatingCellMethods> fmt::Debug for AlternatingCell<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("AlternatingCell")
.field(
self.try_share()
.as_ref()
.map(|v| -> &dyn fmt::Debug { v })
.unwrap_or(&DebugAsDisplay("<...>")),
)
.finish()
}
}
impl<T: ?Sized> AlternatingCell<T> {
pub(crate) const fn new_shared(value: T) -> Self
where
T: Sized,
{
Self {
state: Cell::new(State::Shared),
value: UnsafeCell::new(value),
}
}
pub(crate) const fn new_unique(value: T) -> Self
where
T: Sized,
{
Self {
state: Cell::new(State::Unique),
value: UnsafeCell::new(value),
}
}
pub(crate) fn is_unique(&self) -> bool {
matches!(self.state.get(), State::Unique)
}
pub(crate) fn is_shared(&self) -> bool {
matches!(self.state.get(), State::Shared)
}
pub(crate) fn into_inner(self) -> T
where
T: Sized,
{
self.value.into_inner()
}
pub(crate) fn try_share(&self) -> Option<&T>
where
T: AlternatingCellMethods,
{
match self.state.get() {
State::Shared => {}
State::Unique => {
struct Locked<'a>(&'a Cell<State>);
impl Drop for Locked<'_> {
fn drop(&mut self) {
self.0.set(State::Shared);
}
}
self.state.set(State::Locked);
let lock = Locked(&self.state);
// Safety: state is Locked, so nothing else will
// access value while calling unique_to_shared.
unsafe { &mut *self.value.get() }.unique_to_shared();
drop(lock);
}
State::Locked => return None,
}
// Safety: state is Shared so nothing will create any mutable
// references until the returned reference's lifetime expires.
Some(unsafe { &*self.value.get() })
}
#[track_caller]
pub(crate) fn share(&self) -> &T
where
T: AlternatingCellMethods,
{
let Some(retval) = self.try_share() else {
panic!("`share` called recursively");
};
retval
}
pub(crate) fn unique(&mut self) -> &mut T
where
T: AlternatingCellMethods,
{
match self.state.get() {
State::Shared => {
self.state.set(State::Unique);
self.value.get_mut().shared_to_unique();
}
State::Unique => {}
State::Locked => unreachable!(),
}
self.value.get_mut()
}
}

View file

@ -1,9 +1,5 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use serde::{
de::{DeserializeOwned, Error, Unexpected},
Deserialize, Deserializer, Serialize, Serializer,
};
use std::{fmt::Debug, hash::Hash, mem::ManuallyDrop, ptr};
mod sealed {
@ -13,17 +9,7 @@ mod sealed {
/// # Safety
/// the only implementation is `ConstBool<Self::VALUE>`
pub unsafe trait GenericConstBool:
sealed::Sealed
+ Copy
+ Ord
+ Hash
+ Default
+ Debug
+ 'static
+ Send
+ Sync
+ Serialize
+ DeserializeOwned
sealed::Sealed + Copy + Ord + Hash + Default + Debug + 'static + Send + Sync
{
const VALUE: bool;
}
@ -44,32 +30,6 @@ unsafe impl<const VALUE: bool> GenericConstBool for ConstBool<VALUE> {
const VALUE: bool = VALUE;
}
impl<const VALUE: bool> Serialize for ConstBool<VALUE> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
VALUE.serialize(serializer)
}
}
impl<'de, const VALUE: bool> Deserialize<'de> for ConstBool<VALUE> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let value = bool::deserialize(deserializer)?;
if value == VALUE {
Ok(ConstBool)
} else {
Err(D::Error::invalid_value(
Unexpected::Bool(value),
&if VALUE { "true" } else { "false" },
))
}
}
}
pub trait ConstBoolDispatchTag {
type Type<Select: GenericConstBool>;
}

View file

@ -1,69 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use serde::{
de::{DeserializeOwned, Error, Unexpected},
Deserialize, Deserializer, Serialize, Serializer,
};
use std::{fmt::Debug, hash::Hash};
mod sealed {
pub trait Sealed {}
}
/// the only implementation is `ConstUsize<Self::VALUE>`
pub trait GenericConstUsize:
sealed::Sealed
+ Copy
+ Ord
+ Hash
+ Default
+ Debug
+ 'static
+ Send
+ Sync
+ Serialize
+ DeserializeOwned
{
const VALUE: usize;
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
pub struct ConstUsize<const VALUE: usize>;
impl<const VALUE: usize> Debug for ConstUsize<VALUE> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("ConstUsize").field(&Self::VALUE).finish()
}
}
impl<const VALUE: usize> sealed::Sealed for ConstUsize<VALUE> {}
impl<const VALUE: usize> GenericConstUsize for ConstUsize<VALUE> {
const VALUE: usize = VALUE;
}
impl<const VALUE: usize> Serialize for ConstUsize<VALUE> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
VALUE.serialize(serializer)
}
}
impl<'de, const VALUE: usize> Deserialize<'de> for ConstUsize<VALUE> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let value = usize::deserialize(deserializer)?;
if value == VALUE {
Ok(ConstUsize)
} else {
Err(D::Error::invalid_value(
Unexpected::Unsigned(value as u64),
&&*VALUE.to_string(),
))
}
}
}

View file

@ -1,193 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use ctor::ctor;
use jobslot::{Acquired, Client};
use std::{
ffi::OsString,
mem,
num::NonZeroUsize,
sync::{Condvar, Mutex, Once, OnceLock},
thread::spawn,
};
fn get_or_make_client() -> &'static Client {
#[ctor]
static CLIENT: OnceLock<Client> = unsafe {
match Client::from_env() {
Some(client) => OnceLock::from(client),
None => OnceLock::new(),
}
};
CLIENT.get_or_init(|| {
let mut available_parallelism = None;
let mut args = std::env::args_os().skip(1);
while let Some(arg) = args.next() {
const TEST_THREADS_OPTION: &'static [u8] = b"--test-threads";
if arg.as_encoded_bytes().starts_with(TEST_THREADS_OPTION) {
match arg.as_encoded_bytes().get(TEST_THREADS_OPTION.len()) {
Some(b'=') => {
let mut arg = arg.into_encoded_bytes();
arg.drain(..=TEST_THREADS_OPTION.len());
available_parallelism = Some(arg);
break;
}
None => {
available_parallelism = args.next().map(OsString::into_encoded_bytes);
break;
}
_ => {}
}
}
}
let available_parallelism = if let Some(available_parallelism) = available_parallelism
.as_deref()
.and_then(|v| std::str::from_utf8(v).ok())
.and_then(|v| v.parse().ok())
{
available_parallelism
} else if let Ok(available_parallelism) = std::thread::available_parallelism() {
available_parallelism
} else {
NonZeroUsize::new(1).unwrap()
};
Client::new_with_fifo(available_parallelism.get() - 1).expect("failed to create job server")
})
}
struct State {
waiting_count: usize,
available: Vec<Acquired>,
implicit_available: bool,
}
impl State {
fn total_available(&self) -> usize {
self.available.len() + self.implicit_available as usize
}
fn additional_waiting(&self) -> usize {
self.waiting_count.saturating_sub(self.total_available())
}
}
static STATE: Mutex<State> = Mutex::new(State {
waiting_count: 0,
available: Vec::new(),
implicit_available: true,
});
static COND_VAR: Condvar = Condvar::new();
#[derive(Debug)]
enum AcquiredJobInner {
FromJobServer(Acquired),
ImplicitJob,
}
#[derive(Debug)]
pub struct AcquiredJob {
job: AcquiredJobInner,
}
impl AcquiredJob {
fn start_acquire_thread() {
static STARTED_THREAD: Once = Once::new();
STARTED_THREAD.call_once(|| {
spawn(|| {
let mut acquired = None;
let client = get_or_make_client();
let mut state = STATE.lock().unwrap();
loop {
state = if state.additional_waiting() == 0 {
if acquired.is_some() {
drop(state);
drop(acquired.take()); // drop Acquired outside of lock
STATE.lock().unwrap()
} else {
COND_VAR.wait(state).unwrap()
}
} else if acquired.is_some() {
// allocate space before moving Acquired to ensure we
// drop Acquired outside of the lock on panic
state.available.reserve(1);
state.available.push(acquired.take().unwrap());
COND_VAR.notify_all();
state
} else {
drop(state);
acquired = Some(
client
.acquire()
.expect("can't acquire token from job server"),
);
STATE.lock().unwrap()
};
}
});
});
}
fn acquire_inner(block: bool) -> Option<Self> {
Self::start_acquire_thread();
let mut state = STATE.lock().unwrap();
loop {
if let Some(acquired) = state.available.pop() {
return Some(Self {
job: AcquiredJobInner::FromJobServer(acquired),
});
}
if state.implicit_available {
state.implicit_available = false;
return Some(Self {
job: AcquiredJobInner::ImplicitJob,
});
}
if !block {
return None;
}
state.waiting_count += 1;
state = COND_VAR.wait(state).unwrap();
state.waiting_count -= 1;
}
}
pub fn try_acquire() -> Option<Self> {
Self::acquire_inner(false)
}
pub fn acquire() -> Self {
Self::acquire_inner(true).expect("failed to acquire token")
}
pub fn run_command<R>(
&mut self,
cmd: std::process::Command,
f: impl FnOnce(&mut std::process::Command) -> std::io::Result<R>,
) -> std::io::Result<R> {
get_or_make_client().configure_make_and_run_with_fifo(cmd, f)
}
}
impl Drop for AcquiredJob {
fn drop(&mut self) {
let mut state = STATE.lock().unwrap();
match &self.job {
AcquiredJobInner::FromJobServer(_) => {
if state.waiting_count > state.available.len() + state.implicit_available as usize {
// allocate space before moving Acquired to ensure we
// drop Acquired outside of the lock on panic
state.available.reserve(1);
let AcquiredJobInner::FromJobServer(acquired) =
mem::replace(&mut self.job, AcquiredJobInner::ImplicitJob)
else {
unreachable!()
};
state.available.push(acquired);
COND_VAR.notify_all();
}
}
AcquiredJobInner::ImplicitJob => {
state.implicit_available = true;
if state.waiting_count > state.available.len() {
COND_VAR.notify_all();
}
}
}
}
}

View file

@ -3,7 +3,6 @@
use crate::intern::{Intern, Interned};
use bitvec::{bits, order::Lsb0, slice::BitSlice, view::BitView};
use std::{
cell::Cell,
fmt::{self, Debug, Write},
rc::Rc,
sync::{Arc, OnceLock},
@ -95,15 +94,9 @@ pub fn interned_bit(v: bool) -> Interned<BitSlice> {
RETVAL.get_or_init(|| [bits![0; 1].intern(), bits![1; 1].intern()])[v as usize]
}
#[derive(Copy, Clone)]
#[derive(Copy, Clone, Debug)]
pub struct BitSliceWriteWithBase<'a>(pub &'a BitSlice);
impl<'a> Debug for BitSliceWriteWithBase<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{self:#x}")
}
}
impl BitSliceWriteWithBase<'_> {
fn fmt_with_base<const BITS_PER_DIGIT: usize, const UPPER_CASE: bool>(
self,
@ -162,66 +155,3 @@ impl fmt::UpperHex for BitSliceWriteWithBase<'_> {
self.fmt_with_base::<4, true>(f)
}
}
#[inline]
#[track_caller]
pub fn get_many_mut<T, const N: usize>(slice: &mut [T], indexes: [usize; N]) -> [&mut T; N] {
for i in 0..N {
for j in 0..i {
assert!(indexes[i] != indexes[j], "duplicate index");
}
assert!(indexes[i] < slice.len(), "index out of bounds");
}
// Safety: checked that no indexes are duplicates and no indexes are out of bounds
unsafe {
let base = slice.as_mut_ptr(); // convert to a raw pointer before loop to avoid aliasing with &mut [T]
std::array::from_fn(|i| &mut *base.add(indexes[i]))
}
}
#[derive(Clone, Default)]
pub struct RcWriter(Rc<Cell<Vec<u8>>>);
impl Debug for RcWriter {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.borrow_impl(|buf| {
f.debug_tuple("RcWriter")
.field(&DebugAsDisplay(format_args!("b\"{}\"", buf.escape_ascii())))
.finish()
})
}
}
impl RcWriter {
fn borrow_impl<R>(&self, f: impl FnOnce(&mut Vec<u8>) -> R) -> R {
let buf = Cell::take(&self.0);
struct PutBackOnDrop<'a> {
buf: Vec<u8>,
this: &'a RcWriter,
}
impl Drop for PutBackOnDrop<'_> {
fn drop(&mut self) {
self.this.0.set(std::mem::take(&mut self.buf));
}
}
let mut buf = PutBackOnDrop { buf, this: self };
f(&mut buf.buf)
}
pub fn borrow<R>(&mut self, f: impl FnOnce(&mut Vec<u8>) -> R) -> R {
self.borrow_impl(f)
}
pub fn take(&mut self) -> Vec<u8> {
Cell::take(&self.0)
}
}
impl std::io::Write for RcWriter {
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
self.borrow(|v| v.extend_from_slice(buf));
Ok(buf.len())
}
fn flush(&mut self) -> std::io::Result<()> {
Ok(())
}
}

View file

@ -1,839 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
// code derived from:
// https://web.archive.org/web/20250303054010/https://git.libre-soc.org/?p=nmutil.git;a=blob;f=src/nmutil/prefix_sum.py;hb=effeb28e5848392adddcdad1f6e7a098f2a44c9c
use crate::intern::{Intern, Interned, Memoize};
use std::{borrow::Cow, num::NonZeroUsize};
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub struct PrefixSumOp {
pub lhs_index: usize,
pub rhs_and_dest_index: NonZeroUsize,
pub row: u32,
}
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
#[non_exhaustive]
pub struct DiagramConfig {
pub space: Cow<'static, str>,
pub vertical_bar: Cow<'static, str>,
pub plus: Cow<'static, str>,
pub slant: Cow<'static, str>,
pub connect: Cow<'static, str>,
pub no_connect: Cow<'static, str>,
pub padding: usize,
}
impl DiagramConfig {
pub const fn new() -> Self {
Self {
space: Cow::Borrowed(" "),
vertical_bar: Cow::Borrowed("|"),
plus: Cow::Borrowed("\u{2295}"), // ⊕
slant: Cow::Borrowed(r"\"),
connect: Cow::Borrowed("\u{25CF}"), // ●
no_connect: Cow::Borrowed("X"),
padding: 1,
}
}
pub fn draw(self, ops: impl IntoIterator<Item = PrefixSumOp>, item_count: usize) -> String {
#[derive(Copy, Clone, Debug)]
struct DiagramCell {
slant: bool,
plus: bool,
tee: bool,
}
let mut ops_by_row: Vec<Vec<PrefixSumOp>> = Vec::new();
let mut last_row = 0;
ops.into_iter().for_each(|op| {
assert!(
op.lhs_index < op.rhs_and_dest_index.get(),
"invalid PrefixSumOp! lhs_index must be less \
than rhs_and_dest_index: {op:?}",
);
assert!(
op.row >= last_row,
"invalid PrefixSumOp! row must \
not decrease (row last was: {last_row}): {op:?}",
);
let ops = if op.row > last_row || ops_by_row.is_empty() {
ops_by_row.push(vec![]);
ops_by_row.last_mut().expect("just pushed")
} else {
ops_by_row
.last_mut()
.expect("just checked if ops_by_row is empty")
};
if let Some(last) = ops.last() {
assert!(
op.rhs_and_dest_index < last.rhs_and_dest_index,
"invalid PrefixSumOp! rhs_and_dest_index must strictly \
decrease in a row:\nthis op: {op:?}\nlast op: {last:?}",
);
}
ops.push(op);
last_row = op.row;
});
let blank_row = || {
vec![
DiagramCell {
slant: false,
plus: false,
tee: false
};
item_count
]
};
let mut cells = vec![blank_row()];
for ops in ops_by_row {
let max_distance = ops
.iter()
.map(
|&PrefixSumOp {
lhs_index,
rhs_and_dest_index,
..
}| { rhs_and_dest_index.get() - lhs_index },
)
.max()
.expect("ops is known to be non-empty");
cells.extend((0..max_distance).map(|_| blank_row()));
for op in ops {
let mut y = cells.len() - 1;
assert!(
op.rhs_and_dest_index.get() < item_count,
"invalid PrefixSumOp! rhs_and_dest_index must be \
less than item_count ({item_count}): {op:?}",
);
let mut x = op.rhs_and_dest_index.get();
cells[y][x].plus = true;
x -= 1;
y -= 1;
while op.lhs_index < x {
cells[y][x].slant = true;
x -= 1;
y -= 1;
}
cells[y][x].tee = true;
}
}
let mut retval = String::new();
let mut row_text = vec![String::new(); 2 * self.padding + 1];
for cells_row in cells {
for cell in cells_row {
// top padding
for y in 0..self.padding {
// top left padding
for x in 0..self.padding {
row_text[y] += if x == y && (cell.plus || cell.slant) {
&self.slant
} else {
&self.space
};
}
// top vertical bar
row_text[y] += &self.vertical_bar;
// top right padding
for _ in 0..self.padding {
row_text[y] += &self.space;
}
}
// center left padding
for _ in 0..self.padding {
row_text[self.padding] += &self.space;
}
// center
row_text[self.padding] += if cell.plus {
&self.plus
} else if cell.tee {
&self.connect
} else if cell.slant {
&self.no_connect
} else {
&self.vertical_bar
};
// center right padding
for _ in 0..self.padding {
row_text[self.padding] += &self.space;
}
let bottom_padding_start = self.padding + 1;
let bottom_padding_last = self.padding * 2;
// bottom padding
for y in bottom_padding_start..=bottom_padding_last {
// bottom left padding
for _ in 0..self.padding {
row_text[y] += &self.space;
}
// bottom vertical bar
row_text[y] += &self.vertical_bar;
// bottom right padding
for x in bottom_padding_start..=bottom_padding_last {
row_text[y] += if x == y && (cell.tee || cell.slant) {
&self.slant
} else {
&self.space
};
}
}
}
for line in &mut row_text {
retval += line.trim_end();
retval += "\n";
line.clear();
}
}
retval
}
}
impl Default for DiagramConfig {
fn default() -> Self {
Self::new()
}
}
impl PrefixSumOp {
pub fn diagram(ops: impl IntoIterator<Item = Self>, item_count: usize) -> String {
Self::diagram_with_config(ops, item_count, DiagramConfig::new())
}
pub fn diagram_with_config(
ops: impl IntoIterator<Item = Self>,
item_count: usize,
config: DiagramConfig,
) -> String {
config.draw(ops, item_count)
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub enum PrefixSumAlgorithm {
/// Uses the algorithm from:
/// <https://en.wikipedia.org/wiki/Prefix_sum#Algorithm_1:_Shorter_span,_more_parallel>
LowLatency,
/// Uses the algorithm from:
/// <https://en.wikipedia.org/wiki/Prefix_sum#Algorithm_2:_Work-efficient>
WorkEfficient,
}
impl PrefixSumAlgorithm {
fn ops_impl(self, item_count: usize) -> Vec<PrefixSumOp> {
let mut retval = Vec::new();
let mut distance = 1;
let mut row = 0;
while distance < item_count {
let double_distance = distance
.checked_mul(2)
.expect("prefix-sum item_count is too big");
let (start, step) = match self {
Self::LowLatency => (distance, 1),
Self::WorkEfficient => (double_distance - 1, double_distance),
};
for rhs_and_dest_index in (start..item_count).step_by(step).rev() {
let Some(rhs_and_dest_index) = NonZeroUsize::new(rhs_and_dest_index) else {
unreachable!();
};
let lhs_index = rhs_and_dest_index.get() - distance;
retval.push(PrefixSumOp {
lhs_index,
rhs_and_dest_index,
row,
});
}
distance = double_distance;
row += 1;
}
match self {
Self::LowLatency => {}
Self::WorkEfficient => {
distance /= 2;
while distance >= 1 {
let start = distance
.checked_mul(3)
.expect("prefix-sum item_count is too big")
- 1;
for rhs_and_dest_index in (start..item_count).step_by(distance * 2).rev() {
let Some(rhs_and_dest_index) = NonZeroUsize::new(rhs_and_dest_index) else {
unreachable!();
};
let lhs_index = rhs_and_dest_index.get() - distance;
retval.push(PrefixSumOp {
lhs_index,
rhs_and_dest_index,
row,
});
}
row += 1;
distance /= 2;
}
}
}
retval
}
pub fn ops(self, item_count: usize) -> Interned<[PrefixSumOp]> {
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
struct MyMemoize(PrefixSumAlgorithm);
impl Memoize for MyMemoize {
type Input = usize;
type InputOwned = usize;
type Output = Interned<[PrefixSumOp]>;
fn inner(self, item_count: &Self::Input) -> Self::Output {
Intern::intern_owned(self.0.ops_impl(*item_count))
}
}
MyMemoize(self).get_owned(item_count)
}
pub fn run<T>(self, items: impl IntoIterator<Item = T>, f: impl FnMut(&T, &T) -> T) -> Vec<T> {
let mut items = Vec::from_iter(items);
self.run_on_slice(&mut items, f);
items
}
pub fn run_on_slice<T>(self, items: &mut [T], mut f: impl FnMut(&T, &T) -> T) -> &mut [T] {
self.ops(items.len()).into_iter().for_each(
|PrefixSumOp {
lhs_index,
rhs_and_dest_index,
row: _,
}| {
items[rhs_and_dest_index.get()] =
f(&items[lhs_index], &items[rhs_and_dest_index.get()]);
},
);
items
}
pub fn filtered_ops(
self,
item_live_out_flags: impl IntoIterator<Item = bool>,
) -> Vec<PrefixSumOp> {
let mut item_live_out_flags = Vec::from_iter(item_live_out_flags);
let prefix_sum_ops = self.ops(item_live_out_flags.len());
let mut ops_live_flags = vec![false; prefix_sum_ops.len()];
for (
op_index,
&PrefixSumOp {
lhs_index,
rhs_and_dest_index,
row: _,
},
) in prefix_sum_ops.iter().enumerate().rev()
{
let live = item_live_out_flags[rhs_and_dest_index.get()];
item_live_out_flags[lhs_index] |= live;
ops_live_flags[op_index] = live;
}
prefix_sum_ops
.into_iter()
.zip(ops_live_flags)
.filter_map(|(op, live)| live.then_some(op))
.collect()
}
pub fn reduce_ops(self, item_count: usize) -> Interned<[PrefixSumOp]> {
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
struct MyMemoize(PrefixSumAlgorithm);
impl Memoize for MyMemoize {
type Input = usize;
type InputOwned = usize;
type Output = Interned<[PrefixSumOp]>;
fn inner(self, item_count: &Self::Input) -> Self::Output {
let mut item_live_out_flags = vec![false; *item_count];
let Some(last_item_live_out_flag) = item_live_out_flags.last_mut() else {
return Interned::default();
};
*last_item_live_out_flag = true;
Intern::intern_owned(self.0.filtered_ops(item_live_out_flags))
}
}
MyMemoize(self).get_owned(item_count)
}
}
pub fn reduce_ops(item_count: usize) -> Interned<[PrefixSumOp]> {
PrefixSumAlgorithm::LowLatency.reduce_ops(item_count)
}
pub fn reduce<T>(items: impl IntoIterator<Item = T>, mut f: impl FnMut(T, T) -> T) -> Option<T> {
let mut items: Vec<_> = items.into_iter().map(Some).collect();
for op in reduce_ops(items.len()) {
let (Some(lhs), Some(rhs)) = (
items[op.lhs_index].take(),
items[op.rhs_and_dest_index.get()].take(),
) else {
unreachable!();
};
items[op.rhs_and_dest_index.get()] = Some(f(lhs, rhs));
}
items.last_mut().and_then(Option::take)
}
#[cfg(test)]
mod tests {
use super::*;
fn input_strings() -> [String; 9] {
std::array::from_fn(|i| String::from_utf8(vec![b'a' + i as u8]).unwrap())
}
#[test]
fn test_prefix_sum_strings() {
let input = input_strings();
let expected: Vec<String> = input
.iter()
.scan(String::new(), |l, r| {
*l += r;
Some(l.clone())
})
.collect();
println!("expected: {expected:?}");
assert_eq!(
*PrefixSumAlgorithm::WorkEfficient
.run_on_slice(&mut input.clone(), |l, r| l.to_string() + r),
*expected
);
assert_eq!(
*PrefixSumAlgorithm::LowLatency
.run_on_slice(&mut input.clone(), |l, r| l.to_string() + r),
*expected
);
}
#[test]
fn test_reduce_string() {
let input = input_strings();
let expected = input.clone().into_iter().reduce(|l, r| l + &r);
assert_eq!(reduce(input, |l, r| l + &r), expected);
}
fn op(lhs_index: usize, rhs_and_dest_index: usize, row: u32) -> PrefixSumOp {
PrefixSumOp {
lhs_index,
rhs_and_dest_index: NonZeroUsize::new(rhs_and_dest_index).expect("should be non-zero"),
row,
}
}
#[test]
fn test_reduce_ops_9() {
let expected = vec![
op(7, 8, 0),
op(5, 6, 0),
op(3, 4, 0),
op(1, 2, 0),
op(6, 8, 1),
op(2, 4, 1),
op(4, 8, 2),
op(0, 8, 3),
];
println!("expected: {expected:#?}");
let ops = reduce_ops(9);
println!("ops: {ops:#?}");
assert_eq!(*ops, *expected);
}
#[test]
fn test_reduce_ops_8() {
let expected = vec![
op(6, 7, 0),
op(4, 5, 0),
op(2, 3, 0),
op(0, 1, 0),
op(5, 7, 1),
op(1, 3, 1),
op(3, 7, 2),
];
println!("expected: {expected:#?}");
let ops = reduce_ops(8);
println!("ops: {ops:#?}");
assert_eq!(*ops, *expected);
}
#[test]
fn test_count_ones() {
for width in 0..=10u32 {
for v in 0..1u32 << width {
let expected = v.count_ones();
assert_eq!(
reduce((0..width).map(|i| (v >> i) & 1), |l, r| l + r).unwrap_or(0),
expected,
"v={v:#X}"
);
}
}
}
#[track_caller]
fn test_diagram(ops: impl IntoIterator<Item = PrefixSumOp>, item_count: usize, expected: &str) {
let text = PrefixSumOp::diagram_with_config(
ops,
item_count,
DiagramConfig {
plus: Cow::Borrowed("@"),
..Default::default()
},
);
println!("text:\n{text}\n");
assert_eq!(text, expected);
}
#[test]
fn test_work_efficient_diagram_16() {
let item_count = 16;
test_diagram(
PrefixSumAlgorithm::WorkEfficient.ops(item_count),
item_count,
&r"
| | | | | | | | | | | | | | | |
| | | | | | | |
|\ | |\ | |\ | |\ | |\ | |\ | |\ | |\ |
| \| | \| | \| | \| | \| | \| | \| | \|
| @ | @ | @ | @ | @ | @ | @ | @
| |\ | | | |\ | | | |\ | | | |\ | |
| | \| | | | \| | | | \| | | | \| |
| | X | | | X | | | X | | | X |
| | |\ | | | |\ | | | |\ | | | |\ |
| | | \| | | | \| | | | \| | | | \|
| | | @ | | | @ | | | @ | | | @
| | | |\ | | | | | | | |\ | | | |
| | | | \| | | | | | | | \| | | |
| | | | X | | | | | | | X | | |
| | | | |\ | | | | | | | |\ | | |
| | | | | \| | | | | | | | \| | |
| | | | | X | | | | | | | X | |
| | | | | |\ | | | | | | | |\ | |
| | | | | | \| | | | | | | | \| |
| | | | | | X | | | | | | | X |
| | | | | | |\ | | | | | | | |\ |
| | | | | | | \| | | | | | | | \|
| | | | | | | @ | | | | | | | @
| | | | | | | |\ | | | | | | | |
| | | | | | | | \| | | | | | | |
| | | | | | | | X | | | | | | |
| | | | | | | | |\ | | | | | | |
| | | | | | | | | \| | | | | | |
| | | | | | | | | X | | | | | |
| | | | | | | | | |\ | | | | | |
| | | | | | | | | | \| | | | | |
| | | | | | | | | | X | | | | |
| | | | | | | | | | |\ | | | | |
| | | | | | | | | | | \| | | | |
| | | | | | | | | | | X | | | |
| | | | | | | | | | | |\ | | | |
| | | | | | | | | | | | \| | | |
| | | | | | | | | | | | X | | |
| | | | | | | | | | | | |\ | | |
| | | | | | | | | | | | | \| | |
| | | | | | | | | | | | | X | |
| | | | | | | | | | | | | |\ | |
| | | | | | | | | | | | | | \| |
| | | | | | | | | | | | | | X |
| | | | | | | | | | | | | | |\ |
| | | | | | | | | | | | | | | \|
| | | | | | | | | | | | | | @
| | | | | | | |\ | | | | | | | |
| | | | | | | | \| | | | | | | |
| | | | | | | | X | | | | | | |
| | | | | | | | |\ | | | | | | |
| | | | | | | | | \| | | | | | |
| | | | | | | | | X | | | | | |
| | | | | | | | | |\ | | | | | |
| | | | | | | | | | \| | | | | |
| | | | | | | | | | X | | | | |
| | | | | | | | | | |\ | | | | |
| | | | | | | | | | | \| | | | |
| | | | | | | | | @ | | | |
| | | |\ | | | |\ | | | |\ | | | |
| | | | \| | | | \| | | | \| | | |
| | | | X | | | X | | | X | | |
| | | | |\ | | | |\ | | | |\ | | |
| | | | | \| | | | \| | | | \| | |
| | | @ | | @ | | @ | |
| |\ | |\ | |\ | |\ | |\ | |\ | |\ | |
| | \| | \| | \| | \| | \| | \| | \| |
| | @ | @ | @ | @ | @ | @ | @ |
| | | | | | | | | | | | | | | |
"[1..], // trim newline at start
);
}
#[test]
fn test_low_latency_diagram_16() {
let item_count = 16;
test_diagram(
PrefixSumAlgorithm::LowLatency.ops(item_count),
item_count,
&r"
| | | | | | | | | | | | | | | |
|
|\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |
| \| \| \| \| \| \| \| \| \| \| \| \| \| \| \|
@ @ @ @ @ @ @ @ @ @ @ @ @ @ @
|\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ | |
| \| \| \| \| \| \| \| \| \| \| \| \| \| \| |
| X X X X X X X X X X X X X X |
| |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |
| | \| \| \| \| \| \| \| \| \| \| \| \| \| \|
@ @ @ @ @ @ @ @ @ @ @ @ @ @
|\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ | | | |
| \| \| \| \| \| \| \| \| \| \| \| \| | | |
| X X X X X X X X X X X X | | |
| |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ | | |
| | \| \| \| \| \| \| \| \| \| \| \| \| | |
| | X X X X X X X X X X X X | |
| | |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ | |
| | | \| \| \| \| \| \| \| \| \| \| \| \| |
| | | X X X X X X X X X X X X |
| | | |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |
| | | | \| \| \| \| \| \| \| \| \| \| \| \|
@ @ @ @ @ @ @ @ @ @ @ @
|\ |\ |\ |\ |\ |\ |\ |\ | | | | | | | |
| \| \| \| \| \| \| \| \| | | | | | | |
| X X X X X X X X | | | | | | |
| |\ |\ |\ |\ |\ |\ |\ |\ | | | | | | |
| | \| \| \| \| \| \| \| \| | | | | | |
| | X X X X X X X X | | | | | |
| | |\ |\ |\ |\ |\ |\ |\ |\ | | | | | |
| | | \| \| \| \| \| \| \| \| | | | | |
| | | X X X X X X X X | | | | |
| | | |\ |\ |\ |\ |\ |\ |\ |\ | | | | |
| | | | \| \| \| \| \| \| \| \| | | | |
| | | | X X X X X X X X | | | |
| | | | |\ |\ |\ |\ |\ |\ |\ |\ | | | |
| | | | | \| \| \| \| \| \| \| \| | | |
| | | | | X X X X X X X X | | |
| | | | | |\ |\ |\ |\ |\ |\ |\ |\ | | |
| | | | | | \| \| \| \| \| \| \| \| | |
| | | | | | X X X X X X X X | |
| | | | | | |\ |\ |\ |\ |\ |\ |\ |\ | |
| | | | | | | \| \| \| \| \| \| \| \| |
| | | | | | | X X X X X X X X |
| | | | | | | |\ |\ |\ |\ |\ |\ |\ |\ |
| | | | | | | | \| \| \| \| \| \| \| \|
| | | | | | | | @ @ @ @ @ @ @ @
| | | | | | | | | | | | | | | |
"[1..], // trim newline at start
);
}
#[test]
fn test_work_efficient_diagram_9() {
let item_count = 9;
test_diagram(
PrefixSumAlgorithm::WorkEfficient.ops(item_count),
item_count,
&r"
| | | | | | | | |
| | | | |
|\ | |\ | |\ | |\ | |
| \| | \| | \| | \| |
| @ | @ | @ | @ |
| |\ | | | |\ | | |
| | \| | | | \| | |
| | X | | | X | |
| | |\ | | | |\ | |
| | | \| | | | \| |
| | | @ | | | @ |
| | | |\ | | | | |
| | | | \| | | | |
| | | | X | | | |
| | | | |\ | | | |
| | | | | \| | | |
| | | | | X | | |
| | | | | |\ | | |
| | | | | | \| | |
| | | | | | X | |
| | | | | | |\ | |
| | | | | | | \| |
| | | | | | @ |
| | | |\ | | | | |
| | | | \| | | | |
| | | | X | | | |
| | | | |\ | | | |
| | | | | \| | | |
| | | @ | |
| |\ | |\ | |\ | |\ |
| | \| | \| | \| | \|
| | @ | @ | @ | @
| | | | | | | | |
"[1..], // trim newline at start
);
}
#[test]
fn test_low_latency_diagram_9() {
let item_count = 9;
test_diagram(
PrefixSumAlgorithm::LowLatency.ops(item_count),
item_count,
&r"
| | | | | | | | |
|
|\ |\ |\ |\ |\ |\ |\ |\ |
| \| \| \| \| \| \| \| \|
@ @ @ @ @ @ @ @
|\ |\ |\ |\ |\ |\ |\ | |
| \| \| \| \| \| \| \| |
| X X X X X X X |
| |\ |\ |\ |\ |\ |\ |\ |
| | \| \| \| \| \| \| \|
@ @ @ @ @ @ @
|\ |\ |\ |\ |\ | | | |
| \| \| \| \| \| | | |
| X X X X X | | |
| |\ |\ |\ |\ |\ | | |
| | \| \| \| \| \| | |
| | X X X X X | |
| | |\ |\ |\ |\ |\ | |
| | | \| \| \| \| \| |
| | | X X X X X |
| | | |\ |\ |\ |\ |\ |
| | | | \| \| \| \| \|
| | | @ @ @ @ @
|\ | | | | | | | |
| \| | | | | | | |
| X | | | | | | |
| |\ | | | | | | |
| | \| | | | | | |
| | X | | | | | |
| | |\ | | | | | |
| | | \| | | | | |
| | | X | | | | |
| | | |\ | | | | |
| | | | \| | | | |
| | | | X | | | |
| | | | |\ | | | |
| | | | | \| | | |
| | | | | X | | |
| | | | | |\ | | |
| | | | | | \| | |
| | | | | | X | |
| | | | | | |\ | |
| | | | | | | \| |
| | | | | | | X |
| | | | | | | |\ |
| | | | | | | | \|
| | | | | | | | @
| | | | | | | | |
"[1..], // trim newline at start
);
}
#[test]
fn test_reduce_diagram_16() {
let item_count = 16;
test_diagram(
reduce_ops(item_count),
item_count,
&r"
| | | | | | | | | | | | | | | |
| | | | | | | |
|\ | |\ | |\ | |\ | |\ | |\ | |\ | |\ |
| \| | \| | \| | \| | \| | \| | \| | \|
| @ | @ | @ | @ | @ | @ | @ | @
| |\ | | | |\ | | | |\ | | | |\ | |
| | \| | | | \| | | | \| | | | \| |
| | X | | | X | | | X | | | X |
| | |\ | | | |\ | | | |\ | | | |\ |
| | | \| | | | \| | | | \| | | | \|
| | | @ | | | @ | | | @ | | | @
| | | |\ | | | | | | | |\ | | | |
| | | | \| | | | | | | | \| | | |
| | | | X | | | | | | | X | | |
| | | | |\ | | | | | | | |\ | | |
| | | | | \| | | | | | | | \| | |
| | | | | X | | | | | | | X | |
| | | | | |\ | | | | | | | |\ | |
| | | | | | \| | | | | | | | \| |
| | | | | | X | | | | | | | X |
| | | | | | |\ | | | | | | | |\ |
| | | | | | | \| | | | | | | | \|
| | | | | | | @ | | | | | | | @
| | | | | | | |\ | | | | | | | |
| | | | | | | | \| | | | | | | |
| | | | | | | | X | | | | | | |
| | | | | | | | |\ | | | | | | |
| | | | | | | | | \| | | | | | |
| | | | | | | | | X | | | | | |
| | | | | | | | | |\ | | | | | |
| | | | | | | | | | \| | | | | |
| | | | | | | | | | X | | | | |
| | | | | | | | | | |\ | | | | |
| | | | | | | | | | | \| | | | |
| | | | | | | | | | | X | | | |
| | | | | | | | | | | |\ | | | |
| | | | | | | | | | | | \| | | |
| | | | | | | | | | | | X | | |
| | | | | | | | | | | | |\ | | |
| | | | | | | | | | | | | \| | |
| | | | | | | | | | | | | X | |
| | | | | | | | | | | | | |\ | |
| | | | | | | | | | | | | | \| |
| | | | | | | | | | | | | | X |
| | | | | | | | | | | | | | |\ |
| | | | | | | | | | | | | | | \|
| | | | | | | | | | | | | | | @
| | | | | | | | | | | | | | | |
"[1..], // trim newline at start
);
}
#[test]
fn test_reduce_diagram_9() {
let item_count = 9;
test_diagram(
reduce_ops(item_count),
item_count,
&r"
| | | | | | | | |
| | | | |
| |\ | |\ | |\ | |\ |
| | \| | \| | \| | \|
| | @ | @ | @ | @
| | |\ | | | |\ | |
| | | \| | | | \| |
| | | X | | | X |
| | | |\ | | | |\ |
| | | | \| | | | \|
| | | | @ | | | @
| | | | |\ | | | |
| | | | | \| | | |
| | | | | X | | |
| | | | | |\ | | |
| | | | | | \| | |
| | | | | | X | |
| | | | | | |\ | |
| | | | | | | \| |
| | | | | | | X |
| | | | | | | |\ |
| | | | | | | | \|
| | | | | | | @
|\ | | | | | | | |
| \| | | | | | | |
| X | | | | | | |
| |\ | | | | | | |
| | \| | | | | | |
| | X | | | | | |
| | |\ | | | | | |
| | | \| | | | | |
| | | X | | | | |
| | | |\ | | | | |
| | | | \| | | | |
| | | | X | | | |
| | | | |\ | | | |
| | | | | \| | | |
| | | | | X | | |
| | | | | |\ | | |
| | | | | | \| | |
| | | | | | X | |
| | | | | | |\ | |
| | | | | | | \| |
| | | | | | | X |
| | | | | | | |\ |
| | | | | | | | \|
| | | | | | | | @
| | | | | | | | |
"[1..], // trim newline at start
);
}
}

View file

@ -1,566 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{memory::splat_mask, prelude::*};
use std::num::NonZeroUsize;
#[hdl]
pub struct ReadyValid<T> {
pub data: HdlOption<T>,
#[hdl(flip)]
pub ready: Bool,
}
impl<T: Type> ReadyValid<T> {
#[hdl]
pub fn firing(expr: Expr<Self>) -> Expr<Bool> {
#[hdl]
let firing: Bool = wire();
#[hdl]
match expr.data {
HdlNone => connect(firing, false),
HdlSome(_) => connect(firing, expr.ready),
}
firing
}
#[hdl]
pub fn firing_data(expr: impl ToExpr<Type = Self>) -> Expr<HdlOption<T>> {
let expr = expr.to_expr();
let option_ty = Expr::ty(expr).data;
#[hdl]
let firing_data = wire(option_ty);
connect(firing_data, option_ty.HdlNone());
#[hdl]
if expr.ready {
connect(firing_data, expr.data);
}
firing_data
}
#[hdl]
pub fn map<R: Type>(
expr: Expr<Self>,
f: impl FnOnce(Expr<T>) -> Expr<R>,
) -> Expr<ReadyValid<R>> {
let data = HdlOption::map(expr.data, f);
#[hdl]
let mapped = wire(ReadyValid[Expr::ty(data).HdlSome]);
connect(mapped.data, data);
connect(expr.ready, mapped.ready);
mapped
}
}
/// This debug port is only meant to assist the formal proof of the queue.
#[cfg(test)]
#[doc(hidden)]
#[hdl]
pub struct QueueDebugPort<Element, Index> {
#[hdl(flip)]
index_to_check: Index,
stored: Element,
inp_index: Index,
out_index: Index,
}
#[hdl_module]
pub fn queue<T: Type>(
ty: T,
capacity: NonZeroUsize,
inp_ready_is_comb: bool,
out_valid_is_comb: bool,
) {
let count_ty = UInt::range_inclusive(0..=capacity.get());
let index_ty = UInt::range(0..capacity.get());
#[hdl]
let cd: ClockDomain = m.input();
#[hdl]
let inp: ReadyValid<T> = m.input(ReadyValid[ty]);
#[hdl]
let out: ReadyValid<T> = m.output(ReadyValid[ty]);
#[hdl]
let count: UInt = m.output(count_ty);
#[hdl]
let inp_index_reg = reg_builder().clock_domain(cd).reset(0.cast_to(index_ty));
#[hdl]
let out_index_reg = reg_builder().clock_domain(cd).reset(0.cast_to(index_ty));
#[hdl]
let maybe_full_reg = reg_builder().clock_domain(cd).reset(false);
#[hdl]
let mut mem = memory(ty);
mem.depth(capacity.get());
let read_port = mem.new_read_port();
let write_port = mem.new_write_port();
#[hdl]
let inp_firing: Bool = wire();
connect(inp_firing, ReadyValid::firing(inp));
#[hdl]
let out_firing: Bool = wire();
connect(out_firing, ReadyValid::firing(out));
#[hdl]
let indexes_equal: Bool = wire();
connect(indexes_equal, inp_index_reg.cmp_eq(out_index_reg));
#[hdl]
let empty: Bool = wire();
connect(empty, indexes_equal & !maybe_full_reg);
#[hdl]
let full: Bool = wire();
connect(full, indexes_equal & maybe_full_reg);
connect(read_port.addr, out_index_reg);
connect(read_port.en, true);
connect(read_port.clk, cd.clk);
connect(write_port.addr, inp_index_reg);
connect(write_port.en, inp_firing);
connect(write_port.clk, cd.clk);
connect(write_port.data, HdlOption::unwrap_or(inp.data, ty.uninit()));
connect(write_port.mask, splat_mask(ty, true.to_expr()));
connect(inp.ready, !full);
if inp_ready_is_comb {
#[hdl]
if out.ready {
connect(inp.ready, true);
}
}
#[hdl]
if !empty {
connect(out.data, HdlSome(read_port.data));
} else {
if out_valid_is_comb {
connect(out.data, inp.data);
} else {
connect(out.data, HdlOption[ty].HdlNone());
}
}
#[hdl]
if inp_firing.cmp_ne(out_firing) {
connect(maybe_full_reg, inp_firing);
}
#[hdl]
if inp_firing {
#[hdl]
if inp_index_reg.cmp_eq(capacity.get() - 1) {
connect_any(inp_index_reg, 0_hdl_u0);
} else {
connect_any(inp_index_reg, inp_index_reg + 1_hdl_u1);
}
}
#[hdl]
if out_firing {
#[hdl]
if out_index_reg.cmp_eq(capacity.get() - 1) {
connect_any(out_index_reg, 0_hdl_u0);
} else {
connect_any(out_index_reg, out_index_reg + 1_hdl_u1);
}
}
#[hdl]
if indexes_equal {
#[hdl]
if maybe_full_reg {
connect_any(count, capacity);
} else {
connect_any(count, 0_hdl_u0);
}
} else {
if capacity.is_power_of_two() {
debug_assert_eq!(count_ty.width(), index_ty.width() + 1);
#[hdl]
let count_lower = wire(index_ty);
connect(
count_lower,
(inp_index_reg - out_index_reg).cast_to(index_ty),
); // wrap
connect(count, count_lower.cast_to(count_ty));
} else {
debug_assert_eq!(count_ty.width(), index_ty.width());
#[hdl]
if inp_index_reg.cmp_lt(out_index_reg) {
connect(count, inp_index_reg + capacity - out_index_reg);
} else {
connect(count, inp_index_reg - out_index_reg);
}
}
}
// These debug ports expose some internal state during the Induction phase
// of Formal Verification. They are not present in normal use.
#[cfg(test)]
{
#[hdl]
let dbg: QueueDebugPort<T, UInt> = m.output(QueueDebugPort[ty][index_ty]);
// read the memory word currently stored at some fixed index
let debug_port = mem.new_read_port();
connect(debug_port.addr, dbg.index_to_check);
connect(debug_port.en, true);
connect(debug_port.clk, cd.clk);
connect(dbg.stored, debug_port.data);
// also expose the current read and write indices
connect(dbg.inp_index, inp_index_reg);
connect(dbg.out_index, out_index_reg);
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{
cli::FormalMode, firrtl::ExportOptions,
module::transform::simplify_enums::SimplifyEnumsKind, testing::assert_formal,
ty::StaticType,
};
use std::num::NonZero;
#[track_caller]
fn test_queue(capacity: NonZeroUsize, inp_ready_is_comb: bool, out_valid_is_comb: bool) {
assert_formal(
format_args!("test_queue_{capacity}_{inp_ready_is_comb}_{out_valid_is_comb}"),
queue_test(capacity, inp_ready_is_comb, out_valid_is_comb),
FormalMode::Prove,
2,
None,
ExportOptions {
simplify_enums: Some(SimplifyEnumsKind::ReplaceWithBundleOfUInts),
..ExportOptions::default()
},
);
/// Formal verification of the FIFO queue
///
/// The strategy derives from the observation that, if we filter its
/// input and output streams to consider just one in every N reads and
/// writes (where N is the FIFO capacity), then the FIFO effectively
/// behaves as a one-entry FIFO.
///
/// In particular, any counterexample of the full FIFO behaving badly
/// will also be caught by one of the filtered versions (one which
/// happens to be in phase with the offending input or output).
#[hdl_module]
fn queue_test(capacity: NonZeroUsize, inp_ready_is_comb: bool, out_valid_is_comb: bool) {
#[hdl]
let clk: Clock = m.input();
#[hdl]
let cd = wire();
connect(
cd,
#[hdl]
ClockDomain {
clk,
rst: formal_reset().to_reset(),
},
);
// random input data
#[hdl]
let inp_data: HdlOption<UInt<8>> = wire();
#[hdl]
if any_seq(Bool) {
connect(inp_data, HdlSome(any_seq(UInt::<8>::TYPE)));
} else {
connect(inp_data, HdlNone());
}
// assert output ready at random
#[hdl]
let out_ready: Bool = wire();
connect(out_ready, any_seq(Bool));
// The current number of elements in the FIFO ranges from zero to
// maximum capacity, inclusive.
let count_ty = UInt::range_inclusive(0..=capacity.get());
// type for counters that wrap around at the FIFO capacity
let index_ty = UInt::range(0..capacity.get());
// among all entries of the FIFO internal circular memory, choose
// one at random to check
#[hdl]
let index_to_check = wire(index_ty);
connect(index_to_check, any_const(index_ty));
hdl_assume(clk, index_to_check.cmp_lt(capacity.get()), "");
// instantiate and connect the queue
#[hdl]
let dut = instance(queue(
UInt[ConstUsize::<8>],
capacity,
inp_ready_is_comb,
out_valid_is_comb,
));
connect(dut.cd, cd);
connect(dut.inp.data, inp_data);
connect(dut.out.ready, out_ready);
// Keep an independent count of words in the FIFO. Ensure that
// it's always correct, and never overflows.
#[hdl]
let expected_count_reg = reg_builder().clock_domain(cd).reset(count_ty.zero());
#[hdl]
if ReadyValid::firing(dut.inp) & !ReadyValid::firing(dut.out) {
hdl_assert(clk, expected_count_reg.cmp_ne(capacity.get()), "");
connect_any(expected_count_reg, expected_count_reg + 1u8);
} else if !ReadyValid::firing(dut.inp) & ReadyValid::firing(dut.out) {
hdl_assert(clk, expected_count_reg.cmp_ne(count_ty.zero()), "");
connect_any(expected_count_reg, expected_count_reg - 1u8);
}
hdl_assert(clk, expected_count_reg.cmp_eq(dut.count), "");
// keep an independent write index into the FIFO's circular buffer
#[hdl]
let inp_index_reg = reg_builder().clock_domain(cd).reset(index_ty.zero());
#[hdl]
if ReadyValid::firing(dut.inp) {
#[hdl]
if inp_index_reg.cmp_ne(capacity.get() - 1) {
connect_any(inp_index_reg, inp_index_reg + 1u8);
} else {
connect_any(inp_index_reg, 0_hdl_u0);
}
}
// keep an independent read index into the FIFO's circular buffer
#[hdl]
let out_index_reg = reg_builder().clock_domain(cd).reset(index_ty.zero());
#[hdl]
if ReadyValid::firing(dut.out) {
#[hdl]
if out_index_reg.cmp_ne(capacity.get() - 1) {
connect_any(out_index_reg, out_index_reg + 1u8);
} else {
connect_any(out_index_reg, 0_hdl_u0);
}
}
// filter the input data stream, predicated by the read index
// matching the chosen position in the FIFO's circular buffer
#[hdl]
let inp_index_matches = wire();
connect(inp_index_matches, inp_index_reg.cmp_eq(index_to_check));
#[hdl]
let inp_firing_data = wire();
connect(inp_firing_data, HdlNone());
#[hdl]
if inp_index_matches {
connect(inp_firing_data, ReadyValid::firing_data(dut.inp));
}
// filter the output data stream, predicated by the write index
// matching the chosen position in the FIFO's circular buffer
#[hdl]
let out_index_matches = wire();
connect(out_index_matches, out_index_reg.cmp_eq(index_to_check));
#[hdl]
let out_firing_data = wire();
connect(out_firing_data, HdlNone());
#[hdl]
if out_index_matches {
connect(out_firing_data, ReadyValid::firing_data(dut.out));
}
// Implement a one-entry FIFO and ensure its equivalence to the
// filtered FIFO.
//
// the holding register for our one-entry FIFO
#[hdl]
let stored_reg = reg_builder().clock_domain(cd).reset(HdlNone());
#[hdl]
match stored_reg {
// If the holding register is empty...
HdlNone => {
#[hdl]
match inp_firing_data {
// ... and we are not receiving data, then we must not
// transmit any data.
HdlNone => hdl_assert(clk, HdlOption::is_none(out_firing_data), ""),
// If we are indeed receiving some data...
HdlSome(data_in) => {
#[hdl]
match out_firing_data {
// ... and transmitting at the same time, we
// must be transmitting the input data itself,
// since the holding register is empty.
HdlSome(data_out) => hdl_assert(clk, data_out.cmp_eq(data_in), ""),
// If we are receiving, but not transmitting,
// store the received data in the holding
// register.
HdlNone => connect(stored_reg, HdlSome(data_in)),
}
}
}
}
// If there is some value stored in the holding register...
HdlSome(stored) => {
#[hdl]
match out_firing_data {
// ... and we are not transmitting it, we cannot
// receive any more data.
HdlNone => hdl_assert(clk, HdlOption::is_none(inp_firing_data), ""),
// If we are transmitting a previously stored value...
HdlSome(data_out) => {
// ... it must be the same data we stored earlier.
hdl_assert(clk, data_out.cmp_eq(stored), "");
// Also, accept new data, if any. Otherwise,
// let the holding register become empty.
connect(stored_reg, inp_firing_data);
}
}
}
}
// from now on, some extra assertions in order to pass induction
// sync the holding register, when it's occupied, to the
// corresponding entry in the FIFO's circular buffer
connect(dut.dbg.index_to_check, index_to_check);
#[hdl]
if let HdlSome(stored) = stored_reg {
hdl_assert(clk, stored.cmp_eq(dut.dbg.stored), "");
}
// sync the read and write indices
hdl_assert(clk, inp_index_reg.cmp_eq(dut.dbg.inp_index), "");
hdl_assert(clk, out_index_reg.cmp_eq(dut.dbg.out_index), "");
// the indices should never go past the capacity, but induction
// doesn't know that...
hdl_assert(clk, inp_index_reg.cmp_lt(capacity.get()), "");
hdl_assert(clk, out_index_reg.cmp_lt(capacity.get()), "");
// strongly constrain the state of the holding register
//
// The holding register is full if and only if the corresponding
// FIFO entry was written to and not yet read. In other words, if
// the number of pending reads until the chosen entry is read out
// is greater than the current FIFO count, then the entry couldn't
// be in the FIFO in the first place.
#[hdl]
let pending_reads: UInt = wire(index_ty);
// take care of wrap-around when subtracting indices, add the
// capacity amount to keep the result positive if necessary
#[hdl]
if index_to_check.cmp_ge(out_index_reg) {
connect(pending_reads, index_to_check - out_index_reg);
} else {
connect(
pending_reads,
index_to_check + capacity.get() - out_index_reg,
);
}
// check whether the chosen entry is in the FIFO
#[hdl]
let expected_stored: Bool = wire();
connect(expected_stored, pending_reads.cmp_lt(dut.count));
// sync with the state of the holding register
hdl_assert(
clk,
expected_stored.cmp_eq(HdlOption::is_some(stored_reg)),
"",
);
}
}
#[test]
fn test_1_false_false() {
test_queue(NonZero::new(1).unwrap(), false, false);
}
#[test]
fn test_1_false_true() {
test_queue(NonZero::new(1).unwrap(), false, true);
}
#[test]
fn test_1_true_false() {
test_queue(NonZero::new(1).unwrap(), true, false);
}
#[test]
fn test_1_true_true() {
test_queue(NonZero::new(1).unwrap(), true, true);
}
#[test]
fn test_2_false_false() {
test_queue(NonZero::new(2).unwrap(), false, false);
}
#[test]
fn test_2_false_true() {
test_queue(NonZero::new(2).unwrap(), false, true);
}
#[test]
fn test_2_true_false() {
test_queue(NonZero::new(2).unwrap(), true, false);
}
#[test]
fn test_2_true_true() {
test_queue(NonZero::new(2).unwrap(), true, true);
}
#[test]
fn test_3_false_false() {
test_queue(NonZero::new(3).unwrap(), false, false);
}
#[test]
fn test_3_false_true() {
test_queue(NonZero::new(3).unwrap(), false, true);
}
#[test]
fn test_3_true_false() {
test_queue(NonZero::new(3).unwrap(), true, false);
}
#[test]
fn test_3_true_true() {
test_queue(NonZero::new(3).unwrap(), true, true);
}
#[test]
fn test_4_false_false() {
test_queue(NonZero::new(4).unwrap(), false, false);
}
#[test]
fn test_4_false_true() {
test_queue(NonZero::new(4).unwrap(), false, true);
}
#[test]
fn test_4_true_false() {
test_queue(NonZero::new(4).unwrap(), true, false);
}
#[test]
fn test_4_true_true() {
test_queue(NonZero::new(4).unwrap(), true, true);
}
#[test]
fn test_many_false_false() {
test_queue(NonZero::new((2 << 16) - 5).unwrap(), false, false);
}
#[test]
fn test_many_false_true() {
test_queue(NonZero::new((2 << 16) - 5).unwrap(), false, true);
}
#[test]
fn test_many_true_false() {
test_queue(NonZero::new((2 << 16) - 5).unwrap(), true, false);
}
#[test]
fn test_many_true_true() {
test_queue(NonZero::new((2 << 16) - 5).unwrap(), true, true);
}
}

View file

@ -1,114 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
mod safety_boundary {
use std::{cell::Cell, ptr::NonNull};
pub(super) struct Impl<T: ?Sized>(Cell<Option<NonNull<T>>>);
impl<T: ?Sized> Impl<T> {
#[inline]
pub(super) const fn new() -> Self {
Self(Cell::new(None))
}
#[inline]
pub(super) const fn opt_static_ref(v: Option<&'static T>) -> Self {
Self(Cell::new(if let Some(v) = v {
// SAFETY: v is a valid reference for lifetime 'static
unsafe { Some(NonNull::new_unchecked(v as *const T as *mut T)) }
} else {
None
}))
}
/// set `self` to `value` for the duration of the `f()` call.
#[inline]
pub(super) fn set_opt<F: FnOnce() -> R, R>(&self, value: Option<&T>, f: F) -> R {
struct ResetOnDrop<'a, T: ?Sized> {
this: &'a Impl<T>,
old: Option<NonNull<T>>,
}
impl<T: ?Sized> Drop for ResetOnDrop<'_, T> {
fn drop(&mut self) {
self.this.0.set(self.old);
}
}
// reset to old value before exiting this function ensuring `self`
// is not set to `value` when its lifetime is expired
let _reset_on_drop = ResetOnDrop {
this: self,
old: self.0.replace(value.map(NonNull::from)),
};
f()
}
#[inline]
pub(super) fn get_ptr(&self) -> Option<NonNull<T>> {
self.0.get()
}
/// get the reference in `self` for the duration of the `f(...)` call.
#[inline]
pub(super) fn with_opt<F: for<'a> FnOnce(Option<&'a T>) -> R, R>(&self, f: F) -> R {
// SAFETY:
// `self.0` is only `Some` when inside some `set_opt` call or when set
// to some `&'static T`, which ensures that the pointer is live and valid.
//
// the reference we give away has its lifetime scoped to this
// function call which ensures that it won't escape
unsafe { f(self.0.get().map(|v| &*v.as_ptr())) }
}
}
}
/// holds a `Cell<Option<&'scoped T>>` where `'scoped` is erased. This is useful for holding references in TLS.
pub struct ScopedRef<T: ?Sized>(safety_boundary::Impl<T>);
impl<T: ?Sized> ScopedRef<T> {
/// create a new empty [`ScopedRef`]
#[inline]
pub const fn new() -> Self {
Self(safety_boundary::Impl::new())
}
#[inline]
pub const fn opt_static_ref(v: Option<&'static T>) -> Self {
Self(safety_boundary::Impl::opt_static_ref(v))
}
#[inline]
pub const fn static_ref(v: &'static T) -> Self {
Self::opt_static_ref(Some(v))
}
/// set `self` to `value` for the duration of the `f()` call.
#[inline]
pub fn set_opt<F: FnOnce() -> R, R>(&self, value: Option<&T>, f: F) -> R {
self.0.set_opt(value, f)
}
/// set `self` to `value` for the duration of the `f()` call.
#[inline]
pub fn set<F: FnOnce() -> R, R>(&self, value: &T, f: F) -> R {
self.0.set_opt(Some(value), f)
}
#[inline]
pub fn is_some(&self) -> bool {
self.0.get_ptr().is_some()
}
#[inline]
pub fn is_none(&self) -> bool {
self.0.get_ptr().is_none()
}
/// get the reference in `self` for the duration of the `f(...)` call. panics if no reference is set.
#[inline]
pub fn with<F: FnOnce(&T) -> R, R>(&self, f: F) -> R {
self.0.with_opt(
#[inline]
|v| f(v.expect("called ScopedRef::with on an empty ScopedRef")),
)
}
/// get the reference in `self` for the duration of the `f(...)` call.
#[inline]
pub fn with_opt<F: FnOnce(Option<&T>) -> R, R>(&self, f: F) -> R {
self.0.with_opt(f)
}
}
impl<T: ?Sized> Default for ScopedRef<T> {
fn default() -> Self {
Self::new()
}
}

View file

@ -1,31 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use std::{
io::{self, BufRead},
str,
};
pub(crate) fn streaming_read_utf8<R: BufRead, E: From<io::Error>>(
reader: R,
mut callback: impl FnMut(&str) -> Result<(), E>,
) -> Result<(), E> {
let mut buf = [0; 4];
let mut buf_len = 0;
for byte in reader.bytes() {
buf[buf_len] = byte?;
buf_len += 1;
match str::from_utf8(&buf[..buf_len]) {
Ok(buf) => {
callback(buf)?;
buf_len = 0;
}
Err(e) => {
if e.error_len().is_some() {
callback("\u{FFFD}")?; // replacement character
buf_len = 0;
}
}
}
}
Ok(())
}

View file

@ -1,240 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
#![cfg(feature = "unstable-test-hasher")]
use std::{
fmt::Write as _,
hash::{BuildHasher, Hash, Hasher},
io::Write as _,
marker::PhantomData,
sync::LazyLock,
};
type BoxDynHasher = Box<dyn Hasher + Send + Sync>;
type BoxDynBuildHasher = Box<dyn DynBuildHasherTrait + Send + Sync>;
type BoxDynMakeBuildHasher = Box<dyn Fn() -> BoxDynBuildHasher + Send + Sync>;
trait TryGetDynBuildHasher: Copy {
type Type;
fn try_get_make_build_hasher(self) -> Option<BoxDynMakeBuildHasher>;
}
impl<T> TryGetDynBuildHasher for PhantomData<T> {
type Type = T;
fn try_get_make_build_hasher(self) -> Option<BoxDynMakeBuildHasher> {
None
}
}
impl<T: Default + BuildHasher<Hasher: Send + Sync + 'static> + Send + Sync + 'static + Clone>
TryGetDynBuildHasher for &'_ PhantomData<T>
{
type Type = T;
fn try_get_make_build_hasher(self) -> Option<BoxDynMakeBuildHasher> {
Some(Box::new(|| Box::<DynBuildHasher<T>>::default()))
}
}
#[derive(Default, Clone)]
struct DynBuildHasher<T>(T);
trait DynBuildHasherTrait: BuildHasher<Hasher = BoxDynHasher> {
fn clone_dyn_build_hasher(&self) -> BoxDynBuildHasher;
}
impl<BH: BuildHasher<Hasher: Send + Sync + 'static>> BuildHasher for DynBuildHasher<BH> {
type Hasher = BoxDynHasher;
fn build_hasher(&self) -> Self::Hasher {
Box::new(self.0.build_hasher())
}
fn hash_one<T: Hash>(&self, x: T) -> u64 {
self.0.hash_one(x)
}
}
impl<BH> DynBuildHasherTrait for DynBuildHasher<BH>
where
Self: Clone + BuildHasher<Hasher = BoxDynHasher> + Send + Sync + 'static,
{
fn clone_dyn_build_hasher(&self) -> BoxDynBuildHasher {
Box::new(self.clone())
}
}
pub struct DefaultBuildHasher(BoxDynBuildHasher);
impl Clone for DefaultBuildHasher {
fn clone(&self) -> Self {
DefaultBuildHasher(self.0.clone_dyn_build_hasher())
}
}
const ENV_VAR_NAME: &'static str = "FAYALITE_TEST_HASHER";
struct EnvVarValue {
key: &'static str,
try_get_make_build_hasher: fn() -> Option<BoxDynMakeBuildHasher>,
description: &'static str,
}
macro_rules! env_var_value {
(
key: $key:literal,
build_hasher: $build_hasher:ty,
description: $description:literal,
) => {
EnvVarValue {
key: $key,
try_get_make_build_hasher: || {
// use rust method resolution to detect if $build_hasher is usable
// (e.g. hashbrown's hasher won't be usable without the right feature enabled)
(&PhantomData::<DynBuildHasher<$build_hasher>>).try_get_make_build_hasher()
},
description: $description,
}
};
}
#[derive(Default)]
struct AlwaysZeroHasher;
impl Hasher for AlwaysZeroHasher {
fn write(&mut self, _bytes: &[u8]) {}
fn finish(&self) -> u64 {
0
}
}
const ENV_VAR_VALUES: &'static [EnvVarValue] = &[
env_var_value! {
key: "std",
build_hasher: std::hash::RandomState,
description: "use std::hash::RandomState",
},
env_var_value! {
key: "hashbrown",
build_hasher: hashbrown::DefaultHashBuilder,
description: "use hashbrown's DefaultHashBuilder",
},
env_var_value! {
key: "always_zero",
build_hasher: std::hash::BuildHasherDefault<AlwaysZeroHasher>,
description: "use a hasher that always returns 0 for all hashes,\n \
this is useful for checking that PartialEq impls are correct",
},
];
fn report_bad_env_var(msg: impl std::fmt::Display) -> ! {
let mut msg = format!("{ENV_VAR_NAME}: {msg}\n");
for &EnvVarValue {
key,
try_get_make_build_hasher,
description,
} in ENV_VAR_VALUES
{
let availability = match try_get_make_build_hasher() {
Some(_) => "available",
None => "unavailable",
};
writeln!(msg, "{key}: ({availability})\n {description}").expect("can't fail");
}
std::io::stderr()
.write_all(msg.as_bytes())
.expect("should be able to write to stderr");
std::process::abort();
}
impl Default for DefaultBuildHasher {
fn default() -> Self {
static DEFAULT_FN: LazyLock<BoxDynMakeBuildHasher> = LazyLock::new(|| {
let var = std::env::var_os(ENV_VAR_NAME);
let var = var.as_deref().unwrap_or("std".as_ref());
for &EnvVarValue {
key,
try_get_make_build_hasher,
description: _,
} in ENV_VAR_VALUES
{
if var.as_encoded_bytes().eq_ignore_ascii_case(key.as_bytes()) {
return try_get_make_build_hasher().unwrap_or_else(|| {
report_bad_env_var(format_args!(
"unavailable hasher: {key} (is the appropriate feature enabled?)"
));
});
}
}
report_bad_env_var(format_args!("unrecognized hasher: {var:?}"));
});
Self(DEFAULT_FN())
}
}
pub struct DefaultHasher(BoxDynHasher);
impl BuildHasher for DefaultBuildHasher {
type Hasher = DefaultHasher;
fn build_hasher(&self) -> Self::Hasher {
DefaultHasher(self.0.build_hasher())
}
}
impl Hasher for DefaultHasher {
fn finish(&self) -> u64 {
self.0.finish()
}
fn write(&mut self, bytes: &[u8]) {
self.0.write(bytes)
}
fn write_u8(&mut self, i: u8) {
self.0.write_u8(i)
}
fn write_u16(&mut self, i: u16) {
self.0.write_u16(i)
}
fn write_u32(&mut self, i: u32) {
self.0.write_u32(i)
}
fn write_u64(&mut self, i: u64) {
self.0.write_u64(i)
}
fn write_u128(&mut self, i: u128) {
self.0.write_u128(i)
}
fn write_usize(&mut self, i: usize) {
self.0.write_usize(i)
}
fn write_i8(&mut self, i: i8) {
self.0.write_i8(i)
}
fn write_i16(&mut self, i: i16) {
self.0.write_i16(i)
}
fn write_i32(&mut self, i: i32) {
self.0.write_i32(i)
}
fn write_i64(&mut self, i: i64) {
self.0.write_i64(i)
}
fn write_i128(&mut self, i: i128) {
self.0.write_i128(i)
}
fn write_isize(&mut self, i: isize) {
self.0.write_isize(i)
}
}

View file

@ -0,0 +1,88 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
int::{DynIntType, DynSIntType, DynUIntType, IntTypeTrait, SIntType},
ty::{Type, Value},
};
use std::ops::RangeBounds;
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq, Default)]
pub struct Valueless<T> {
pub ty: T,
}
impl<T: Type> Valueless<T> {
pub fn to_canonical(&self) -> Valueless<T::CanonicalType> {
Valueless {
ty: self.ty.canonical(),
}
}
pub fn from_canonical(v: Valueless<T::CanonicalType>) -> Self {
Valueless {
ty: T::from_canonical_type(v.ty),
}
}
}
mod sealed {
pub trait Sealed {}
}
pub trait ValuelessTr: sealed::Sealed {
type Type: Type<Value = Self::Value>;
type Value: Value<Type = Self::Type>;
}
impl<T> sealed::Sealed for Valueless<T> {}
impl<T: Type> ValuelessTr for Valueless<T> {
type Type = T;
type Value = T::Value;
}
impl<T: IntTypeTrait> Valueless<T> {
pub fn signum(&self) -> Valueless<SIntType<2>> {
Valueless::default()
}
pub fn as_same_width_uint(self) -> Valueless<T::SameWidthUInt> {
Valueless {
ty: self.ty.as_same_width_uint(),
}
}
pub fn as_same_width_sint(self) -> Valueless<T::SameWidthSInt> {
Valueless {
ty: self.ty.as_same_width_sint(),
}
}
pub fn as_same_value_uint(self) -> Valueless<DynUIntType> {
Valueless {
ty: self.ty.as_same_value_uint(),
}
}
pub fn as_same_value_sint(self) -> Valueless<DynSIntType> {
Valueless {
ty: self.ty.as_same_value_sint(),
}
}
pub fn concat<HighType: IntTypeTrait>(
&self,
high_part: Valueless<HighType>,
) -> Valueless<DynIntType<HighType::Signed>> {
let ty = DynIntType::new(
self.ty
.width()
.checked_add(high_part.ty.width())
.expect("result has too many bits"),
);
Valueless { ty }
}
pub fn repeat(&self, count: usize) -> Valueless<DynIntType<T::Signed>> {
let width = self.ty.width();
let ty = DynIntType::new(width.checked_mul(count).expect("result has too many bits"));
Valueless { ty }
}
pub fn slice<I: RangeBounds<usize>>(&self, index: I) -> Valueless<DynUIntType> {
let ty = self.ty.slice(index);
Valueless { ty }
}
}

View file

@ -1,15 +1,15 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
expr::{Expr, Flow, ToExpr},
expr::{Expr, ExprTrait, Flow, ToExpr},
intern::Interned,
module::{IncompleteDeclaration, NameId, ScopedNameId, StmtDeclaration, StmtWire},
module::{NameId, ScopedNameId},
source_location::SourceLocation,
ty::{CanonicalType, Type},
ty::{DynCanonicalType, DynType, Type},
};
use std::{cell::RefCell, fmt, rc::Rc};
use std::fmt;
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
#[derive(Clone, Eq, PartialEq, Hash)]
pub struct Wire<T: Type> {
name: ScopedNameId,
source_location: SourceLocation,
@ -18,14 +18,27 @@ pub struct Wire<T: Type> {
impl<T: Type + fmt::Debug> fmt::Debug for Wire<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Wire({:?}: ", self.name)?;
self.ty.fmt(f)?;
f.write_str(")")
f.debug_struct("Wire")
.field("name", &self.name)
.field("ty", &self.ty)
.finish_non_exhaustive()
}
}
impl<T: Type> ToExpr for Wire<T> {
type Type = T;
fn ty(&self) -> Self::Type {
self.ty.clone()
}
fn to_expr(&self) -> Expr<<Self::Type as Type>::Value> {
Expr::new_unchecked(self.expr_enum())
}
}
impl<T: Type> Wire<T> {
pub fn canonical(&self) -> Wire<CanonicalType> {
pub fn canonical(&self) -> Wire<T::CanonicalType> {
let Self {
name,
source_location,
@ -37,20 +50,29 @@ impl<T: Type> Wire<T> {
ty: ty.canonical(),
}
}
pub fn from_canonical(v: Wire<CanonicalType>) -> Self {
let Wire {
pub fn to_dyn_wire(&self) -> Wire<Interned<dyn DynType>> {
let Self {
name,
source_location,
ty,
} = v;
Self {
ref ty,
} = *self;
Wire {
name,
source_location,
ty: T::from_canonical(ty),
ty: ty.to_dyn(),
}
}
pub fn ty(&self) -> T {
self.ty
pub fn to_dyn_canonical_wire(&self) -> Wire<Interned<dyn DynCanonicalType>> {
let Self {
name,
source_location,
ref ty,
} = *self;
Wire {
name,
source_location,
ty: ty.canonical_dyn(),
}
}
pub fn new_unchecked(
scoped_name: ScopedNameId,
@ -88,57 +110,3 @@ impl<T: Type> Wire<T> {
true
}
}
#[derive(Clone)]
pub struct IncompleteWire {
pub(crate) declaration: Rc<RefCell<IncompleteDeclaration>>,
}
impl IncompleteWire {
#[track_caller]
pub fn complete<T: Type>(&mut self, ty: T) -> Expr<T> {
let canonical_type = ty.canonical();
let mut declaration = self.declaration.borrow_mut();
if let IncompleteDeclaration::Incomplete {
name,
source_location,
} = *declaration
{
*declaration = IncompleteDeclaration::Complete(
StmtWire {
annotations: (),
wire: Wire {
name,
source_location,
ty: canonical_type,
},
}
.into(),
);
}
match *declaration {
IncompleteDeclaration::Complete(StmtDeclaration::Wire(StmtWire {
wire:
Wire {
name,
source_location,
ty: wire_ty,
},
..
})) => {
drop(declaration);
assert_eq!(wire_ty, canonical_type, "type mismatch");
Wire {
name,
source_location,
ty,
}
.to_expr()
}
IncompleteDeclaration::Taken => panic!("can't use wire outside of containing module"),
IncompleteDeclaration::Complete(_) | IncompleteDeclaration::Incomplete { .. } => {
unreachable!()
}
}
}
}

View file

@ -1,295 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! Formal tests in Fayalite
use fayalite::{
cli::FormalMode,
clock::{Clock, ClockDomain},
expr::{CastTo, HdlPartialEq},
firrtl::ExportOptions,
formal::{any_const, any_seq, formal_reset, hdl_assert, hdl_assume},
hdl, hdl_module,
int::{Bool, DynSize, Size, UInt, UIntType},
module::{connect, connect_any, instance, memory, reg_builder, wire},
reset::ToReset,
testing::assert_formal,
ty::StaticType,
};
/// Test hidden state
///
/// Hidden state can cause problems for induction, since the formal engine
/// can assign invalid values to the state registers, making it traverse
/// valid but unreachable states.
///
/// One solution is to go sufficiently in the past so the engine is forced
/// to eventually take a reachable state. This may be hampered by
/// existence of loops, then assumptions may be added to break them.
///
/// Another solution is to "open the black box" and add additional
/// assertions involving the hidden state, so that the unreachable states
/// become invalid as well.
///
/// Both approaches are taken here.
///
/// See [Claire Wolf's presentation] and [Zipcpu blog article].
///
/// [Claire Wolf's presentation]: https://web.archive.org/web/20200115081517fw_/http://www.clifford.at/papers/2017/smtbmc-sby/
/// [Zipcpu blog article]: https://zipcpu.com/blog/2018/03/10/induction-exercise.html
mod hidden_state {
use super::*;
/// Test hidden state by shift registers
///
/// The code implement the ideas from an article in the [Zipcpu blog]. Two
/// shift registers are fed from the same input, so they should always have
/// the same value. However the only observable is a comparison of their
/// last bit, all the others are hidden. To complicate matters, an enable
/// signal causes a loop in state space.
///
/// [Zipcpu blog]: https://zipcpu.com/blog/2018/03/10/induction-exercise.html
#[test]
fn shift_register() {
enum ConstraintMode {
WithExtraAssertions,
WithExtraAssumptions,
}
use ConstraintMode::*;
#[hdl_module]
fn test_module(constraint_mode: ConstraintMode) {
#[hdl]
let clk: Clock = m.input();
#[hdl]
let cd = wire();
connect(
cd,
#[hdl]
ClockDomain {
clk,
rst: formal_reset().to_reset(),
},
);
// input signal for the shift registers
#[hdl]
let i: Bool = wire();
connect(i, any_seq(Bool));
// shift enable signal
#[hdl]
let en: Bool = wire();
connect(en, any_seq(Bool));
// comparison output
#[hdl]
let o: Bool = wire();
// shift registers, with enable
#[hdl]
let r1 = reg_builder().clock_domain(cd).reset(0u8);
#[hdl]
let r2 = reg_builder().clock_domain(cd).reset(0u8);
#[hdl]
if en {
connect_any(r1, (r1 << 1) | i.cast_to(UInt[1]));
connect_any(r2, (r2 << 1) | i.cast_to(UInt[1]));
}
// compare last bits of both shift registers
connect(o, r1[7].cmp_eq(r2[7]));
// what we want to prove: last bits are always equal
hdl_assert(clk, o, "");
// additional terms below are only needed to assist with the induction proof
match constraint_mode {
WithExtraAssertions => {
// "Open the box": add assertions about hidden state.
// In this case, the hidden bits are also always equal.
hdl_assert(clk, r1.cmp_eq(r2), "");
}
WithExtraAssumptions => {
// Break the loop, do not allow "en" to remain low forever
#[hdl]
let past_en_reg = reg_builder().clock_domain(cd).reset(false);
connect(past_en_reg, en);
hdl_assume(clk, past_en_reg | en, "");
}
}
}
// we need a minimum of 16 steps so we can constrain all eight shift register bits,
// given that we are allowed to disable the shift once every two cycles.
assert_formal(
"shift_register_with_assumptions",
test_module(WithExtraAssumptions),
FormalMode::Prove,
16,
None,
ExportOptions::default(),
);
// here a couple of cycles is enough
assert_formal(
"shift_register_with_assertions",
test_module(WithExtraAssertions),
FormalMode::Prove,
2,
None,
ExportOptions::default(),
);
}
}
/// Formal verification of designs containing memories
///
/// There is a trick for memories, described in the [Zipcpu blog].
/// First, select a fixed but arbitrary memory address, monitoring all reads
/// and writes made to it. Then, assert that anything read from that location
/// matches the last stored value.
///
/// A difficulty for induction is that the memory represents [hidden_state]. A
/// solution is to include an additional read port to the memory and assert
/// that the memory location effectively contains the last stored value.
/// This additional debug port is present only to assist the proof and is
/// unused (optimized out) in actual use.
///
/// [Zipcpu blog]: <https://zipcpu.com/zipcpu/2018/07/13/memories.html>
mod memory {
use super::*;
/// Test a simple 8-bit SRAM model
#[test]
fn test_sram() {
#[hdl]
struct WritePort<AddrWidth: Size> {
addr: UIntType<AddrWidth>,
data: UInt<8>,
en: Bool,
}
#[hdl]
struct ReadPort<AddrWidth: Size> {
addr: UIntType<AddrWidth>,
#[hdl(flip)]
data: UInt<8>,
}
/// This debug port is only meant to assist the proof.
/// For normal use in a design, a wrapper could be provided,
/// omitting this port.
/// The implementation is forbidden to use any information
/// provided on this port in its internal workings.
#[hdl]
struct DebugPort<AddrWidth: Size> {
selected: UIntType<AddrWidth>,
stored: UInt<8>,
wrote: Bool,
}
/// simple 1R1W SRAM model (one asynchronous read port and one
/// independent write port) with `n`-bit address width
#[hdl_module]
fn example_sram(n: usize) {
#[hdl]
let wr: WritePort<DynSize> = m.input(WritePort[n]);
#[hdl]
let rd: ReadPort<DynSize> = m.input(ReadPort[n]);
#[hdl]
let cd: ClockDomain = m.input();
// declare and connect the backing memory
#[hdl]
let mut mem = memory();
mem.depth(1 << n);
let read_port = mem.new_read_port();
let write_port = mem.new_write_port();
connect(write_port.clk, cd.clk);
connect(write_port.addr, wr.addr);
connect(write_port.en, wr.en);
connect(write_port.data, wr.data);
connect(write_port.mask, true);
connect(read_port.clk, cd.clk);
connect(read_port.addr, rd.addr);
connect(read_port.en, true);
connect(rd.data, read_port.data);
// To assist with induction, ensure that the chosen memory location
// really contains, always, the last value written to it.
#[hdl]
let dbg: DebugPort<DynSize> = m.input(DebugPort[n]);
let debug_port = mem.new_read_port();
connect(debug_port.en, true);
connect(debug_port.clk, cd.clk);
connect(debug_port.addr, dbg.selected);
#[hdl]
if dbg.wrote {
hdl_assert(cd.clk, debug_port.data.cmp_eq(dbg.stored), "");
// Try commenting out the assert above, induction will fail.
// Opening the trace, it can be seen that the memory contents
// and the stored value don't match, which is an unreachable
// state. By asserting the above, it will become invalid
// as well, so induction will skip this kind of situation.
}
}
/// formal verification of the SRAM module, parametrized by the
/// address bit-width
#[hdl_module]
fn test_module(n: usize) {
#[hdl]
let clk: Clock = m.input();
let cd = #[hdl]
ClockDomain {
clk,
rst: formal_reset().to_reset(),
};
// instantiate the SRAM model, connecting its inputs to
// a random sequence
#[hdl]
let rd: ReadPort<DynSize> = wire(ReadPort[n]);
connect(rd.addr, any_seq(UInt[n]));
#[hdl]
let wr: WritePort<DynSize> = wire(WritePort[n]);
connect(wr.addr, any_seq(UInt[n]));
connect(wr.data, any_seq(UInt::<8>::TYPE));
connect(wr.en, any_seq(Bool));
#[hdl]
let dut = instance(example_sram(n));
connect(dut.cd, cd);
connect(dut.rd, rd);
connect(dut.wr, wr);
// select a fixed but arbitrary test address
#[hdl]
let selected = wire(UInt[n]);
connect(selected, any_const(UInt[n]));
// store the last value written to that address
#[hdl]
let stored: UInt<8> = reg_builder().clock_domain(cd).reset(0u8);
// since memories are not initialized, track whether we wrote to the
// memory at least once
#[hdl]
let wrote: Bool = reg_builder().clock_domain(cd).reset(false);
// on a write, capture the last written value
#[hdl]
if wr.en & wr.addr.cmp_eq(selected) {
connect(stored, wr.data);
connect(wrote, true);
}
// on a read, assert that the read value is the same which was stored
#[hdl]
if rd.addr.cmp_eq(selected) & wrote {
hdl_assert(clk, rd.data.cmp_eq(stored), "");
}
// to assist induction, pass our state to the underlying instance
let dbg = #[hdl]
DebugPort {
selected,
stored,
wrote,
};
connect(dut.dbg, dbg);
}
assert_formal(
"sram",
test_module(8),
FormalMode::Prove,
2,
None,
ExportOptions::default(),
);
}
}

View file

@ -1,199 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use fayalite::{
bundle::BundleType,
enum_::EnumType,
int::{BoolOrIntType, IntType},
phantom_const::PhantomConst,
prelude::*,
ty::StaticType,
};
use std::marker::PhantomData;
#[hdl(outline_generated)]
pub struct MyConstSize<V: Size> {
pub size: PhantomConst<UIntType<V>>,
}
#[hdl(outline_generated)]
pub struct S<T, Len: Size, T2> {
pub a: T,
b: UInt<3>,
pub(crate) c: ArrayType<UInt<1>, Len>,
pub d: T2,
pub _phantom: PhantomData<(T, Len)>,
}
#[hdl(outline_generated)]
pub struct S3<const LEN: usize, T> {
pub a: T,
b: UInt<3>,
pub(crate) c: Array<UInt<1>, LEN>,
pub d: S<T, ConstUsize<LEN>, ()>,
}
#[hdl(outline_generated)]
pub enum E<T> {
A,
B(UInt<3>),
C(T),
D(TyAlias2),
E(TyAlias<Bool, ConstUsize<1>, { 1 + 2 }>),
}
#[hdl(outline_generated)]
pub struct S2<T = ()> {
pub v: E<T>,
}
#[hdl(outline_generated)]
pub type TyAlias<T, Sz: Size, const C: usize, D = ()> = Array<S<T, Sz, D>, C>;
#[hdl(outline_generated)]
pub type TyAlias2 = TyAlias<UInt<8>, ConstUsize<24>, 5>;
// check that #[hdl] properly handles hygiene
macro_rules! types_in_macros {
($a:ident, $b:ident, $c:ident, $d:ident, $e:ident, $f:ident, $A:ident, $B:ident, $C:ident, $D:ident, $E:ident, $F:ident) => {
#[hdl]
struct $F {}
#[hdl]
struct $A<$B, $C: Size, const $D: usize, $E = $F> {
$a: $B,
$b: UIntType<$C>,
$c: SInt<$D>,
$d: HdlOption<$E>,
$e: $E,
$f: $F,
}
#[allow(non_camel_case_types)]
#[hdl]
enum $B<$C: Size, const $D: usize, $E = $F> {
$a($A<(), $C, $D, $E>),
$b(UIntType<$C>),
$c(SInt<$D>),
$d,
$e($E),
$f($F),
}
};
// ensure every identifier has different hygiene
() => {
types_in_macros!(a);
};
($a:ident) => {
types_in_macros!($a, b);
};
($a:ident, $b:ident) => {
types_in_macros!($a, $b, c);
};
($a:ident, $b:ident, $c:ident) => {
types_in_macros!($a, $b, $c, d);
};
($a:ident, $b:ident, $c:ident, $d:ident) => {
types_in_macros!($a, $b, $c, $d, e);
};
($a:ident, $b:ident, $c:ident, $d:ident, $e:ident) => {
types_in_macros!($a, $b, $c, $d, $e, f);
};
($a:ident, $b:ident, $c:ident, $d:ident, $e:ident, $f:ident) => {
types_in_macros!($a, $b, $c, $d, $e, $f, A);
};
($a:ident, $b:ident, $c:ident, $d:ident, $e:ident, $f:ident, $A:ident) => {
types_in_macros!($a, $b, $c, $d, $e, $f, $A, B);
};
($a:ident, $b:ident, $c:ident, $d:ident, $e:ident, $f:ident, $A:ident, $B:ident) => {
types_in_macros!($a, $b, $c, $d, $e, $f, $A, $B, C);
};
($a:ident, $b:ident, $c:ident, $d:ident, $e:ident, $f:ident, $A:ident, $B:ident, $C:ident) => {
types_in_macros!($a, $b, $c, $d, $e, $f, $A, $B, $C, D);
};
($a:ident, $b:ident, $c:ident, $d:ident, $e:ident, $f:ident, $A:ident, $B:ident, $C:ident, $D:ident) => {
types_in_macros!($a, $b, $c, $d, $e, $f, $A, $B, $C, $D, E);
};
($a:ident, $b:ident, $c:ident, $d:ident, $e:ident, $f:ident, $A:ident, $B:ident, $C:ident, $D:ident, $E:ident) => {
types_in_macros!($a, $b, $c, $d, $e, $f, $A, $B, $C, $D, $E, F);
};
}
types_in_macros!();
mod bound_kind {
use fayalite::prelude::*;
#[hdl]
pub struct Type<T> {
v: T,
}
#[hdl]
pub struct Size<T: ::fayalite::int::Size> {
v: UIntType<T>,
}
}
macro_rules! check_bounds {
($name:ident<$(#[$field:ident, $kind:ident] $var:ident: $($bound:ident +)*),*>) => {
#[hdl(outline_generated)]
struct $name<$($var: $($bound +)*,)*> {
$($field: bound_kind::$kind<$var>,)*
}
};
}
check_bounds!(CheckBoundsS0<#[a, Size] A: Size +>);
check_bounds!(CheckBoundsS1<#[a, Size] A: KnownSize +>);
check_bounds!(CheckBoundsT0<#[a, Type] A: Type +>);
check_bounds!(CheckBoundsT1<#[a, Type] A: BoolOrIntType +>);
check_bounds!(CheckBoundsT2<#[a, Type] A: BundleType +>);
check_bounds!(CheckBoundsT3<#[a, Type] A: EnumType +>);
check_bounds!(CheckBoundsT4<#[a, Type] A: IntType +>);
check_bounds!(CheckBoundsT5<#[a, Type] A: StaticType +>);
check_bounds!(CheckBoundsSS0<#[a, Size] A: Size +, #[b, Size] B: Size +>);
check_bounds!(CheckBoundsSS1<#[a, Size] A: KnownSize +, #[b, Size] B: Size +>);
check_bounds!(CheckBoundsST0<#[a, Size] A: Size +, #[b, Type] B: Type +>);
check_bounds!(CheckBoundsST1<#[a, Size] A: KnownSize +, #[b, Type] B: Type +>);
check_bounds!(CheckBoundsTS0<#[a, Type] A: Type +, #[b, Size] B: Size +>);
check_bounds!(CheckBoundsTS1<#[a, Type] A: BoolOrIntType +, #[b, Size] B: Size +>);
check_bounds!(CheckBoundsTS2<#[a, Type] A: BundleType +, #[b, Size] B: Size +>);
check_bounds!(CheckBoundsTS3<#[a, Type] A: EnumType +, #[b, Size] B: Size +>);
check_bounds!(CheckBoundsTS4<#[a, Type] A: IntType +, #[b, Size] B: Size +>);
check_bounds!(CheckBoundsTS5<#[a, Type] A: StaticType +, #[b, Size] B: Size +>);
check_bounds!(CheckBoundsTT0<#[a, Type] A: Type +, #[b, Type] B: Type +>);
check_bounds!(CheckBoundsTT1<#[a, Type] A: BoolOrIntType +, #[b, Type] B: Type +>);
check_bounds!(CheckBoundsTT2<#[a, Type] A: BundleType +, #[b, Type] B: Type +>);
check_bounds!(CheckBoundsTT3<#[a, Type] A: EnumType +, #[b, Type] B: Type +>);
check_bounds!(CheckBoundsTT4<#[a, Type] A: IntType +, #[b, Type] B: Type +>);
check_bounds!(CheckBoundsTT5<#[a, Type] A: StaticType +, #[b, Type] B: Type +>);
check_bounds!(CheckBoundsSSS0<#[a, Size] A: Size +, #[b, Size] B: Size +, #[c, Size] C: Size +>);
check_bounds!(CheckBoundsSSS1<#[a, Size] A: KnownSize +, #[b, Size] B: Size +, #[c, Size] C: Size +>);
check_bounds!(CheckBoundsSST0<#[a, Size] A: Size +, #[b, Size] B: Size +, #[c, Type] C: Type +>);
check_bounds!(CheckBoundsSST1<#[a, Size] A: KnownSize +, #[b, Size] B: Size +, #[c, Type] C: Type +>);
check_bounds!(CheckBoundsSTS0<#[a, Size] A: Size +, #[b, Type] B: Type +, #[c, Size] C: Size +>);
check_bounds!(CheckBoundsSTS1<#[a, Size] A: KnownSize +, #[b, Type] B: Type +, #[c, Size] C: Size +>);
check_bounds!(CheckBoundsSTT0<#[a, Size] A: Size +, #[b, Type] B: Type +, #[c, Type] C: Type +>);
check_bounds!(CheckBoundsSTT1<#[a, Size] A: KnownSize +, #[b, Type] B: Type +, #[c, Type] C: Type +>);
check_bounds!(CheckBoundsTSS0<#[a, Type] A: Type +, #[b, Size] B: Size +, #[c, Size] C: Size +>);
check_bounds!(CheckBoundsTSS1<#[a, Type] A: BoolOrIntType +, #[b, Size] B: Size +, #[c, Size] C: Size +>);
check_bounds!(CheckBoundsTSS2<#[a, Type] A: BundleType +, #[b, Size] B: Size +, #[c, Size] C: Size +>);
check_bounds!(CheckBoundsTSS3<#[a, Type] A: EnumType +, #[b, Size] B: Size +, #[c, Size] C: Size +>);
check_bounds!(CheckBoundsTSS4<#[a, Type] A: IntType +, #[b, Size] B: Size +, #[c, Size] C: Size +>);
check_bounds!(CheckBoundsTSS5<#[a, Type] A: StaticType +, #[b, Size] B: Size +, #[c, Size] C: Size +>);
check_bounds!(CheckBoundsTST0<#[a, Type] A: Type +, #[b, Size] B: Size +, #[c, Type] C: Type +>);
check_bounds!(CheckBoundsTST1<#[a, Type] A: BoolOrIntType +, #[b, Size] B: Size +, #[c, Type] C: Type +>);
check_bounds!(CheckBoundsTST2<#[a, Type] A: BundleType +, #[b, Size] B: Size +, #[c, Type] C: Type +>);
check_bounds!(CheckBoundsTST3<#[a, Type] A: EnumType +, #[b, Size] B: Size +, #[c, Type] C: Type +>);
check_bounds!(CheckBoundsTST4<#[a, Type] A: IntType +, #[b, Size] B: Size +, #[c, Type] C: Type +>);
check_bounds!(CheckBoundsTST5<#[a, Type] A: StaticType +, #[b, Size] B: Size +, #[c, Type] C: Type +>);
check_bounds!(CheckBoundsTTS0<#[a, Type] A: Type +, #[b, Type] B: Type +, #[c, Size] C: Size +>);
check_bounds!(CheckBoundsTTS1<#[a, Type] A: BoolOrIntType +, #[b, Type] B: Type +, #[c, Size] C: Size +>);
check_bounds!(CheckBoundsTTS2<#[a, Type] A: BundleType +, #[b, Type] B: Type +, #[c, Size] C: Size +>);
check_bounds!(CheckBoundsTTS3<#[a, Type] A: EnumType +, #[b, Type] B: Type +, #[c, Size] C: Size +>);
check_bounds!(CheckBoundsTTS4<#[a, Type] A: IntType +, #[b, Type] B: Type +, #[c, Size] C: Size +>);
check_bounds!(CheckBoundsTTS5<#[a, Type] A: StaticType +, #[b, Type] B: Type +, #[c, Size] C: Size +>);
check_bounds!(CheckBoundsTTT0<#[a, Type] A: Type +, #[b, Type] B: Type +, #[c, Type] C: Type +>);
check_bounds!(CheckBoundsTTT1<#[a, Type] A: BoolOrIntType +, #[b, Type] B: Type +, #[c, Type] C: Type +>);
check_bounds!(CheckBoundsTTT2<#[a, Type] A: BundleType +, #[b, Type] B: Type +, #[c, Type] C: Type +>);
check_bounds!(CheckBoundsTTT3<#[a, Type] A: EnumType +, #[b, Type] B: Type +, #[c, Type] C: Type +>);
check_bounds!(CheckBoundsTTT4<#[a, Type] A: IntType +, #[b, Type] B: Type +, #[c, Type] C: Type +>);
check_bounds!(CheckBoundsTTT5<#[a, Type] A: StaticType +, #[b, Type] B: Type +, #[c, Type] C: Type +>);

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

Some files were not shown because too many files have changed in this diff Show more