Compare commits
No commits in common. "master" and "square-brackets-generics-experiment" have entirely different histories.
master
...
square-bra
|
@ -1,77 +0,0 @@
|
|||
# SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
# See Notices.txt for copyright information
|
||||
on:
|
||||
workflow_call:
|
||||
outputs:
|
||||
cache-primary-key:
|
||||
value: ${{ jobs.deps.outputs.cache-primary-key }}
|
||||
|
||||
jobs:
|
||||
deps:
|
||||
runs-on: debian-12
|
||||
outputs:
|
||||
cache-primary-key: ${{ steps.restore-deps.outputs.cache-primary-key }}
|
||||
steps:
|
||||
- uses: https://code.forgejo.org/actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: https://code.forgejo.org/actions/cache/restore@v3
|
||||
id: restore-deps
|
||||
with:
|
||||
path: deps
|
||||
key: ${{ github.repository }}-deps-${{ runner.os }}-${{ hashFiles('.forgejo/workflows/deps.yml') }}
|
||||
lookup-only: true
|
||||
- name: Install Apt packages
|
||||
if: steps.restore-deps.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
apt-get update -qq
|
||||
apt-get install -qq \
|
||||
bison \
|
||||
build-essential \
|
||||
ccache \
|
||||
clang \
|
||||
cvc5 \
|
||||
flex \
|
||||
gawk \
|
||||
g++ \
|
||||
git \
|
||||
libboost-filesystem-dev \
|
||||
libboost-python-dev \
|
||||
libboost-system-dev \
|
||||
libffi-dev \
|
||||
libreadline-dev \
|
||||
lld \
|
||||
pkg-config \
|
||||
python3 \
|
||||
python3-click \
|
||||
tcl-dev \
|
||||
zlib1g-dev
|
||||
- name: Install Firtool
|
||||
if: steps.restore-deps.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
mkdir -p deps
|
||||
wget -O deps/firrtl.tar.gz https://github.com/llvm/circt/releases/download/firtool-1.86.0/firrtl-bin-linux-x64.tar.gz
|
||||
sha256sum -c - <<<'bf6f4ab18ae76f135c944efbd81e25391c31c1bd0617c58ab0592640abefee14 deps/firrtl.tar.gz'
|
||||
tar -C deps -xvaf deps/firrtl.tar.gz
|
||||
rm -rf deps/firtool
|
||||
mv deps/firtool-1.86.0 deps/firtool
|
||||
- name: Get SymbiYosys
|
||||
if: steps.restore-deps.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
git clone --depth=1 --branch=yosys-0.45 https://github.com/YosysHQ/sby.git deps/sby
|
||||
- name: Build Z3
|
||||
if: steps.restore-deps.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
git clone --depth=1 --recursive --branch=z3-4.13.3 https://github.com/Z3Prover/z3.git deps/z3
|
||||
(cd deps/z3; PYTHON=python3 ./configure --prefix=/usr/local)
|
||||
make -C deps/z3/build -j"$(nproc)"
|
||||
- name: Build Yosys
|
||||
if: steps.restore-deps.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
git clone --depth=1 --recursive --branch=0.45 https://github.com/YosysHQ/yosys.git deps/yosys
|
||||
make -C deps/yosys -j"$(nproc)"
|
||||
- uses: https://code.forgejo.org/actions/cache/save@v3
|
||||
if: steps.restore-deps.outputs.cache-hit != 'true'
|
||||
with:
|
||||
path: deps
|
||||
key: ${{ steps.restore-deps.outputs.cache-primary-key }}
|
|
@ -1,61 +1,19 @@
|
|||
# SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
# See Notices.txt for copyright information
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
deps:
|
||||
uses: ./.forgejo/workflows/deps.yml
|
||||
test:
|
||||
runs-on: debian-12
|
||||
needs: deps
|
||||
steps:
|
||||
- uses: https://code.forgejo.org/actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- run: |
|
||||
scripts/check-copyright.sh
|
||||
- run: |
|
||||
apt-get update -qq
|
||||
apt-get install -qq \
|
||||
bison \
|
||||
build-essential \
|
||||
ccache \
|
||||
clang \
|
||||
cvc5 \
|
||||
flex \
|
||||
gawk \
|
||||
git \
|
||||
libboost-filesystem-dev \
|
||||
libboost-python-dev \
|
||||
libboost-system-dev \
|
||||
libffi-dev \
|
||||
libreadline-dev \
|
||||
lld \
|
||||
pkg-config \
|
||||
python3 \
|
||||
python3-click \
|
||||
tcl-dev \
|
||||
z3 \
|
||||
zlib1g-dev
|
||||
- run: |
|
||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain 1.82.0
|
||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain 1.80.1
|
||||
source "$HOME/.cargo/env"
|
||||
echo "$PATH" >> "$GITHUB_PATH"
|
||||
- uses: https://code.forgejo.org/actions/cache/restore@v3
|
||||
with:
|
||||
path: deps
|
||||
key: ${{ needs.deps.outputs.cache-primary-key }}
|
||||
fail-on-cache-miss: true
|
||||
- run: |
|
||||
make -C deps/z3/build install
|
||||
make -C deps/sby install
|
||||
make -C deps/yosys install
|
||||
export PATH="$(realpath deps/firtool/bin):$PATH"
|
||||
echo "$PATH" >> "$GITHUB_PATH"
|
||||
- uses: https://github.com/Swatinem/rust-cache@v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/master' }}
|
||||
- run: cargo test
|
||||
- run: cargo build --tests --features=unstable-doc
|
||||
- run: cargo test --doc --features=unstable-doc
|
||||
- run: cargo test --features=unstable-doc
|
||||
- run: cargo doc --features=unstable-doc
|
||||
|
|
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -1,4 +1,2 @@
|
|||
# SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
# See Notices.txt for copyright information
|
||||
/target
|
||||
.vscode
|
228
Cargo.lock
generated
228
Cargo.lock
generated
|
@ -56,7 +56,7 @@ version = "1.1.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ad186efb764318d35165f1758e7dcef3b10628e26d41a44bc5550652e6804391"
|
||||
dependencies = [
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -66,21 +66,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19"
|
||||
dependencies = [
|
||||
"anstyle",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "arrayref"
|
||||
version = "0.3.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb"
|
||||
|
||||
[[package]]
|
||||
name = "arrayvec"
|
||||
version = "0.7.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
|
||||
|
||||
[[package]]
|
||||
name = "autocfg"
|
||||
version = "1.1.0"
|
||||
|
@ -121,20 +109,6 @@ dependencies = [
|
|||
"wyz",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "blake3"
|
||||
version = "1.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d82033247fd8e890df8f740e407ad4d038debb9eb1f40533fffb32e7d17dc6f7"
|
||||
dependencies = [
|
||||
"arrayref",
|
||||
"arrayvec",
|
||||
"cc",
|
||||
"cfg-if",
|
||||
"constant_time_eq",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "block-buffer"
|
||||
version = "0.10.4"
|
||||
|
@ -144,15 +118,6 @@ dependencies = [
|
|||
"generic-array",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.1.28"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2e80e3b6a3ab07840e1cae9b0666a63970dc28e8ed5ffbcdacbfc760c281bfc1"
|
||||
dependencies = [
|
||||
"shlex",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.0"
|
||||
|
@ -205,12 +170,6 @@ version = "1.0.1"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422"
|
||||
|
||||
[[package]]
|
||||
name = "constant_time_eq"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6"
|
||||
|
||||
[[package]]
|
||||
name = "cpufeatures"
|
||||
version = "0.2.12"
|
||||
|
@ -230,27 +189,6 @@ dependencies = [
|
|||
"typenum",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ctor"
|
||||
version = "0.2.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "edb49164822f3ee45b17acd4a208cfc1251410cf0cad9a833234c9890774dd9f"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "derive_destructure2"
|
||||
version = "0.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "64b697ac90ff296f0fc031ee5a61c7ac31fb9fff50e3fb32873b09223613fc0c"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "digest"
|
||||
version = "0.10.7"
|
||||
|
@ -280,7 +218,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -301,39 +239,32 @@ checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5"
|
|||
|
||||
[[package]]
|
||||
name = "fayalite"
|
||||
version = "0.3.0"
|
||||
version = "0.2.0"
|
||||
dependencies = [
|
||||
"bitvec",
|
||||
"blake3",
|
||||
"clap",
|
||||
"ctor",
|
||||
"eyre",
|
||||
"fayalite-proc-macros",
|
||||
"fayalite-visit-gen",
|
||||
"hashbrown",
|
||||
"jobslot",
|
||||
"num-bigint",
|
||||
"num-traits",
|
||||
"os_pipe",
|
||||
"petgraph",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
"trybuild",
|
||||
"vec_map",
|
||||
"which",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fayalite-proc-macros"
|
||||
version = "0.3.0"
|
||||
version = "0.2.0"
|
||||
dependencies = [
|
||||
"fayalite-proc-macros-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fayalite-proc-macros-impl"
|
||||
version = "0.3.0"
|
||||
version = "0.2.0"
|
||||
dependencies = [
|
||||
"base16ct",
|
||||
"num-bigint",
|
||||
|
@ -347,7 +278,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "fayalite-visit-gen"
|
||||
version = "0.3.0"
|
||||
version = "0.2.0"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"prettyplease",
|
||||
|
@ -359,12 +290,6 @@ dependencies = [
|
|||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fixedbitset"
|
||||
version = "0.5.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99"
|
||||
|
||||
[[package]]
|
||||
name = "funty"
|
||||
version = "2.0.0"
|
||||
|
@ -381,17 +306,6 @@ dependencies = [
|
|||
"version_check",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "getrandom"
|
||||
version = "0.2.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"wasi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "glob"
|
||||
version = "0.3.1"
|
||||
|
@ -420,7 +334,7 @@ version = "0.5.9"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5"
|
||||
dependencies = [
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -431,9 +345,9 @@ checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683"
|
|||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "2.5.0"
|
||||
version = "2.2.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "68b900aa2f7301e21c36462b170ee99994de34dff39a4a6a528e80e7376d07e5"
|
||||
checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"hashbrown",
|
||||
|
@ -452,20 +366,6 @@ version = "1.0.10"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c"
|
||||
|
||||
[[package]]
|
||||
name = "jobslot"
|
||||
version = "0.2.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fe10868679d7a24c2c67d862d0e64a342ce9aef7cdde9ce8019bd35d353d458d"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"derive_destructure2",
|
||||
"getrandom",
|
||||
"libc",
|
||||
"scopeguard",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.153"
|
||||
|
@ -480,10 +380,11 @@ checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c"
|
|||
|
||||
[[package]]
|
||||
name = "num-bigint"
|
||||
version = "0.4.6"
|
||||
version = "0.4.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9"
|
||||
checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"num-integer",
|
||||
"num-traits",
|
||||
]
|
||||
|
@ -512,25 +413,6 @@ version = "1.19.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
|
||||
|
||||
[[package]]
|
||||
name = "os_pipe"
|
||||
version = "1.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5ffd2b0a5634335b135d5728d84c5e0fd726954b87111f7506a61c502280d982"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "petgraph"
|
||||
version = "0.6.5"
|
||||
source = "git+https://github.com/programmerjake/petgraph.git?rev=258ea8071209a924b73fe96f9f87a3b7b45cbc9f#258ea8071209a924b73fe96f9f87a3b7b45cbc9f"
|
||||
dependencies = [
|
||||
"fixedbitset",
|
||||
"indexmap",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "prettyplease"
|
||||
version = "0.2.20"
|
||||
|
@ -543,9 +425,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.92"
|
||||
version = "1.0.83"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0"
|
||||
checksum = "0b33eb56c327dec362a9e55b3ad14f9d2f0904fb5a5b03b513ab5465399e9f43"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
@ -575,7 +457,7 @@ dependencies = [
|
|||
"errno",
|
||||
"libc",
|
||||
"linux-raw-sys",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -584,12 +466,6 @@ version = "1.0.17"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1"
|
||||
|
||||
[[package]]
|
||||
name = "scopeguard"
|
||||
version = "1.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.202"
|
||||
|
@ -633,12 +509,6 @@ dependencies = [
|
|||
"digest",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "shlex"
|
||||
version = "1.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
|
||||
|
||||
[[package]]
|
||||
name = "strsim"
|
||||
version = "0.11.1"
|
||||
|
@ -647,9 +517,9 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
|
|||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.93"
|
||||
version = "2.0.66"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c786062daee0d6db1132800e623df74274a0a87322d8e183338e01b3d98d058"
|
||||
checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
@ -671,7 +541,7 @@ dependencies = [
|
|||
"cfg-if",
|
||||
"fastrand",
|
||||
"rustix",
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -736,24 +606,12 @@ version = "0.2.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
|
||||
|
||||
[[package]]
|
||||
name = "vec_map"
|
||||
version = "0.8.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
|
||||
|
||||
[[package]]
|
||||
name = "version_check"
|
||||
version = "0.9.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
||||
|
||||
[[package]]
|
||||
name = "wasi"
|
||||
version = "0.11.0+wasi-snapshot-preview1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
||||
|
||||
[[package]]
|
||||
name = "which"
|
||||
version = "6.0.1"
|
||||
|
@ -806,25 +664,15 @@ dependencies = [
|
|||
"windows-targets",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.59.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
|
||||
dependencies = [
|
||||
"windows-targets",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.52.6"
|
||||
version = "0.52.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
|
||||
checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm",
|
||||
"windows_aarch64_msvc",
|
||||
"windows_i686_gnu",
|
||||
"windows_i686_gnullvm",
|
||||
"windows_i686_msvc",
|
||||
"windows_x86_64_gnu",
|
||||
"windows_x86_64_gnullvm",
|
||||
|
@ -833,51 +681,45 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "windows_aarch64_gnullvm"
|
||||
version = "0.52.6"
|
||||
version = "0.52.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
|
||||
checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.52.6"
|
||||
version = "0.52.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
|
||||
checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.52.6"
|
||||
version = "0.52.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnullvm"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
|
||||
checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.52.6"
|
||||
version = "0.52.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
|
||||
checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.52.6"
|
||||
version = "0.52.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
|
||||
checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.52.6"
|
||||
version = "0.52.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
|
||||
checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.52.6"
|
||||
version = "0.52.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
|
||||
checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8"
|
||||
|
||||
[[package]]
|
||||
name = "winsafe"
|
||||
|
|
26
Cargo.toml
26
Cargo.toml
|
@ -5,41 +5,31 @@ resolver = "2"
|
|||
members = ["crates/*"]
|
||||
|
||||
[workspace.package]
|
||||
version = "0.3.0"
|
||||
version = "0.2.0"
|
||||
license = "LGPL-3.0-or-later"
|
||||
edition = "2021"
|
||||
repository = "https://git.libre-chip.org/libre-chip/fayalite"
|
||||
keywords = ["hdl", "hardware", "semiconductors", "firrtl", "fpga"]
|
||||
categories = ["simulation", "development-tools", "compilers"]
|
||||
rust-version = "1.82.0"
|
||||
rust-version = "1.80.1"
|
||||
|
||||
[workspace.dependencies]
|
||||
fayalite-proc-macros = { version = "=0.3.0", path = "crates/fayalite-proc-macros" }
|
||||
fayalite-proc-macros-impl = { version = "=0.3.0", path = "crates/fayalite-proc-macros-impl" }
|
||||
fayalite-visit-gen = { version = "=0.3.0", path = "crates/fayalite-visit-gen" }
|
||||
fayalite-proc-macros = { version = "=0.2.0", path = "crates/fayalite-proc-macros" }
|
||||
fayalite-proc-macros-impl = { version = "=0.2.0", path = "crates/fayalite-proc-macros-impl" }
|
||||
fayalite-visit-gen = { version = "=0.2.0", path = "crates/fayalite-visit-gen" }
|
||||
base16ct = "0.2.0"
|
||||
bitvec = { version = "1.0.1", features = ["serde"] }
|
||||
blake3 = { version = "1.5.4", features = ["serde"] }
|
||||
clap = { version = "4.5.9", features = ["derive", "env", "string"] }
|
||||
ctor = "0.2.8"
|
||||
eyre = "0.6.12"
|
||||
hashbrown = "0.14.3"
|
||||
indexmap = { version = "2.5.0", features = ["serde"] }
|
||||
jobslot = "0.2.19"
|
||||
num-bigint = "0.4.6"
|
||||
indexmap = { version = "2.2.6", features = ["serde"] }
|
||||
num-bigint = "0.4.4"
|
||||
num-traits = "0.2.16"
|
||||
os_pipe = "1.2.1"
|
||||
# TODO: switch back to crates.io once petgraph accepts PR #684 and releases a new version
|
||||
petgraph = { git = "https://github.com/programmerjake/petgraph.git", rev = "258ea8071209a924b73fe96f9f87a3b7b45cbc9f" }
|
||||
prettyplease = "0.2.20"
|
||||
proc-macro2 = "1.0.83"
|
||||
quote = "1.0.36"
|
||||
serde = { version = "1.0.202", features = ["derive"] }
|
||||
serde_json = { version = "1.0.117", features = ["preserve_order"] }
|
||||
sha2 = "0.10.8"
|
||||
syn = { version = "2.0.93", features = ["full", "fold", "visit", "extra-traits"] }
|
||||
syn = { version = "2.0.66", features = ["full", "fold", "visit", "extra-traits"] }
|
||||
tempfile = "3.10.1"
|
||||
thiserror = "1.0.61"
|
||||
trybuild = "1.0"
|
||||
vec_map = "0.8.2"
|
||||
which = "6.0.1"
|
||||
|
|
|
@ -1,7 +1,3 @@
|
|||
<!--
|
||||
SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
See Notices.txt for copyright information
|
||||
-->
|
||||
# Fayalite
|
||||
|
||||
Fayalite is a library for designing digital hardware -- a hardware description language (HDL) embedded in the Rust programming language. Fayalite's semantics are based on [FIRRTL] as interpreted by [LLVM CIRCT](https://circt.llvm.org/docs/Dialects/FIRRTL/FIRRTLAnnotations/).
|
||||
|
|
|
@ -13,11 +13,11 @@ rust-version.workspace = true
|
|||
version.workspace = true
|
||||
|
||||
[dependencies]
|
||||
base16ct.workspace = true
|
||||
num-bigint.workspace = true
|
||||
prettyplease.workspace = true
|
||||
proc-macro2.workspace = true
|
||||
quote.workspace = true
|
||||
sha2.workspace = true
|
||||
syn.workspace = true
|
||||
tempfile.workspace = true
|
||||
base16ct = { workspace = true }
|
||||
num-bigint = { workspace = true }
|
||||
prettyplease = { workspace = true }
|
||||
proc-macro2 = { workspace = true }
|
||||
quote = { workspace = true }
|
||||
sha2 = { workspace = true }
|
||||
syn = { workspace = true }
|
||||
tempfile = { workspace = true }
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
hdl_type_common::{
|
||||
common_derives, get_target, ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedField,
|
||||
|
@ -21,13 +19,13 @@ use syn::{
|
|||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct ParsedBundle {
|
||||
pub(crate) attrs: Vec<Attribute>,
|
||||
pub(crate) options: HdlAttr<ItemOptions, kw::hdl>,
|
||||
pub(crate) options: HdlAttr<ItemOptions>,
|
||||
pub(crate) vis: Visibility,
|
||||
pub(crate) struct_token: Token![struct],
|
||||
pub(crate) ident: Ident,
|
||||
pub(crate) generics: MaybeParsed<ParsedGenerics, Generics>,
|
||||
pub(crate) fields: MaybeParsed<ParsedFieldsNamed, FieldsNamed>,
|
||||
pub(crate) field_flips: Vec<Option<HdlAttr<kw::flip, kw::hdl>>>,
|
||||
pub(crate) field_flips: Vec<Option<HdlAttr<kw::flip>>>,
|
||||
pub(crate) mask_type_ident: Ident,
|
||||
pub(crate) mask_type_match_variant_ident: Ident,
|
||||
pub(crate) match_variant_ident: Ident,
|
||||
|
@ -40,7 +38,7 @@ impl ParsedBundle {
|
|||
errors: &mut Errors,
|
||||
field: &mut Field,
|
||||
index: usize,
|
||||
) -> Option<HdlAttr<kw::flip, kw::hdl>> {
|
||||
) -> Option<HdlAttr<kw::flip>> {
|
||||
let Field {
|
||||
attrs,
|
||||
vis: _,
|
||||
|
@ -58,7 +56,8 @@ impl ParsedBundle {
|
|||
}
|
||||
*mutability = FieldMutability::None;
|
||||
colon_token.get_or_insert(Token));
|
||||
errors.unwrap_or_default(HdlAttr::parse_and_take_attr(attrs))
|
||||
let options = errors.unwrap_or_default(HdlAttr::parse_and_take_attr(attrs));
|
||||
options
|
||||
}
|
||||
fn parse(item: ItemStruct) -> syn::Result<Self> {
|
||||
let ItemStruct {
|
||||
|
@ -72,9 +71,7 @@ impl ParsedBundle {
|
|||
} = item;
|
||||
let mut errors = Errors::new();
|
||||
let mut options = errors
|
||||
.unwrap_or_default(HdlAttr::<ItemOptions, kw::hdl>::parse_and_take_attr(
|
||||
&mut attrs,
|
||||
))
|
||||
.unwrap_or_default(HdlAttr::<ItemOptions>::parse_and_take_attr(&mut attrs))
|
||||
.unwrap_or_default();
|
||||
errors.ok(options.body.validate());
|
||||
let ItemOptions {
|
||||
|
@ -83,7 +80,6 @@ impl ParsedBundle {
|
|||
custom_bounds,
|
||||
no_static: _,
|
||||
no_runtime_generics: _,
|
||||
cmp_eq: _,
|
||||
} = options.body;
|
||||
let mut fields = match fields {
|
||||
syn::Fields::Named(fields) => fields,
|
||||
|
@ -341,7 +337,7 @@ impl ToTokens for Builder {
|
|||
}));
|
||||
quote_spanned! {self.ident.span()=>
|
||||
#[automatically_derived]
|
||||
#[allow(non_camel_case_types, non_snake_case, dead_code)]
|
||||
#[allow(non_camel_case_types, dead_code)]
|
||||
impl #impl_generics #unfilled_ty
|
||||
#where_clause
|
||||
{
|
||||
|
@ -431,18 +427,16 @@ impl ToTokens for ParsedBundle {
|
|||
builder_ident,
|
||||
mask_type_builder_ident,
|
||||
} = self;
|
||||
let span = ident.span();
|
||||
let ItemOptions {
|
||||
outline_generated: _,
|
||||
target,
|
||||
custom_bounds: _,
|
||||
no_static,
|
||||
no_runtime_generics,
|
||||
cmp_eq,
|
||||
} = &options.body;
|
||||
let target = get_target(target, ident);
|
||||
let mut item_attrs = attrs.clone();
|
||||
item_attrs.push(common_derives(span));
|
||||
item_attrs.push(common_derives(ident.span()));
|
||||
ItemStruct {
|
||||
attrs: item_attrs,
|
||||
vis: vis.clone(),
|
||||
|
@ -464,19 +458,19 @@ impl ToTokens for ParsedBundle {
|
|||
.map(|ParsedField { ident, ty, .. }| {
|
||||
let ident = ident.as_ref().unwrap();
|
||||
let expr = ty.make_hdl_type_expr(context);
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#ident: #expr,
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
parse_quote_spanned! {span=>
|
||||
parse_quote_spanned! {ident.span()=>
|
||||
#target {
|
||||
#(#fields)*
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
let mut wrapped_in_const = WrappedInConst::new(tokens, span);
|
||||
let mut wrapped_in_const = WrappedInConst::new(tokens, ident.span());
|
||||
let tokens = wrapped_in_const.inner();
|
||||
let builder = Builder {
|
||||
vis: vis.clone(),
|
||||
|
@ -490,8 +484,9 @@ impl ToTokens for ParsedBundle {
|
|||
let unfilled_builder_ty = builder.builder_struct_ty(|_| BuilderFieldState::Unfilled);
|
||||
let filled_builder_ty = builder.builder_struct_ty(|_| BuilderFieldState::Filled);
|
||||
let mut mask_type_fields = FieldsNamed::from(fields.clone());
|
||||
for Field { ty, .. } in &mut mask_type_fields.named {
|
||||
*ty = parse_quote_spanned! {span=>
|
||||
for Field { ident, ty, .. } in &mut mask_type_fields.named {
|
||||
let ident = ident.as_ref().unwrap();
|
||||
*ty = parse_quote_spanned! {ident.span()=>
|
||||
<#ty as ::fayalite::ty::Type>::MaskType
|
||||
};
|
||||
}
|
||||
|
@ -510,8 +505,8 @@ impl ToTokens for ParsedBundle {
|
|||
mask_type_builder.builder_struct_ty(|_| BuilderFieldState::Filled);
|
||||
ItemStruct {
|
||||
attrs: vec![
|
||||
common_derives(span),
|
||||
parse_quote_spanned! {span=>
|
||||
common_derives(ident.span()),
|
||||
parse_quote_spanned! {ident.span()=>
|
||||
#[allow(non_camel_case_types, dead_code)]
|
||||
},
|
||||
],
|
||||
|
@ -524,15 +519,16 @@ impl ToTokens for ParsedBundle {
|
|||
}
|
||||
.to_tokens(tokens);
|
||||
let mut mask_type_match_variant_fields = mask_type_fields;
|
||||
for Field { ty, .. } in &mut mask_type_match_variant_fields.named {
|
||||
*ty = parse_quote_spanned! {span=>
|
||||
for Field { ident, ty, .. } in &mut mask_type_match_variant_fields.named {
|
||||
let ident = ident.as_ref().unwrap();
|
||||
*ty = parse_quote_spanned! {ident.span()=>
|
||||
::fayalite::expr::Expr<#ty>
|
||||
};
|
||||
}
|
||||
ItemStruct {
|
||||
attrs: vec![
|
||||
common_derives(span),
|
||||
parse_quote_spanned! {span=>
|
||||
common_derives(ident.span()),
|
||||
parse_quote_spanned! {ident.span()=>
|
||||
#[allow(non_camel_case_types, dead_code)]
|
||||
},
|
||||
],
|
||||
|
@ -545,15 +541,16 @@ impl ToTokens for ParsedBundle {
|
|||
}
|
||||
.to_tokens(tokens);
|
||||
let mut match_variant_fields = FieldsNamed::from(fields.clone());
|
||||
for Field { ty, .. } in &mut match_variant_fields.named {
|
||||
*ty = parse_quote_spanned! {span=>
|
||||
for Field { ident, ty, .. } in &mut match_variant_fields.named {
|
||||
let ident = ident.as_ref().unwrap();
|
||||
*ty = parse_quote_spanned! {ident.span()=>
|
||||
::fayalite::expr::Expr<#ty>
|
||||
};
|
||||
}
|
||||
ItemStruct {
|
||||
attrs: vec![
|
||||
common_derives(span),
|
||||
parse_quote_spanned! {span=>
|
||||
common_derives(ident.span()),
|
||||
parse_quote_spanned! {ident.span()=>
|
||||
#[allow(non_camel_case_types, dead_code)]
|
||||
},
|
||||
],
|
||||
|
@ -565,20 +562,17 @@ impl ToTokens for ParsedBundle {
|
|||
semi_token: None,
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let this_token = Ident::new("__this", span);
|
||||
let fields_token = Ident::new("__fields", span);
|
||||
let self_token = Token;
|
||||
let match_variant_body_fields = Vec::from_iter(fields.named().into_iter().map(|field| {
|
||||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
let ident_str = ident.to_string();
|
||||
quote_spanned! {span=>
|
||||
#ident: ::fayalite::expr::Expr::field(#this_token, #ident_str),
|
||||
quote_spanned! {ident.span()=>
|
||||
#ident: ::fayalite::expr::Expr::field(__this, #ident_str),
|
||||
}
|
||||
}));
|
||||
let mask_type_body_fields = Vec::from_iter(fields.named().into_iter().map(|field| {
|
||||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
quote_spanned! {span=>
|
||||
#ident: ::fayalite::ty::Type::mask_type(&#self_token.#ident),
|
||||
quote_spanned! {ident.span()=>
|
||||
#ident: ::fayalite::ty::Type::mask_type(&self.#ident),
|
||||
}
|
||||
}));
|
||||
let from_canonical_body_fields =
|
||||
|
@ -586,16 +580,16 @@ impl ToTokens for ParsedBundle {
|
|||
|((index, field), flip)| {
|
||||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
let ident_str = ident.to_string();
|
||||
let not_flipped = flip.is_none().then(|| Token);
|
||||
quote_spanned! {span=>
|
||||
let flipped = flip.is_some();
|
||||
quote_spanned! {ident.span()=>
|
||||
#ident: {
|
||||
let ::fayalite::bundle::BundleField {
|
||||
name: __name,
|
||||
flipped: __flipped,
|
||||
ty: __ty,
|
||||
} = #fields_token[#index];
|
||||
} = __fields[#index];
|
||||
::fayalite::__std::assert_eq!(&*__name, #ident_str);
|
||||
::fayalite::__std::assert!(#not_flipped __flipped);
|
||||
::fayalite::__std::assert_eq!(__flipped, #flipped);
|
||||
::fayalite::ty::Type::from_canonical(__ty)
|
||||
},
|
||||
}
|
||||
|
@ -606,17 +600,17 @@ impl ToTokens for ParsedBundle {
|
|||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
let ident_str = ident.to_string();
|
||||
let flipped = flip.is_some();
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
::fayalite::bundle::BundleField {
|
||||
name: ::fayalite::intern::Intern::intern(#ident_str),
|
||||
flipped: #flipped,
|
||||
ty: ::fayalite::ty::Type::canonical(&#self_token.#ident),
|
||||
ty: ::fayalite::ty::Type::canonical(&self.#ident),
|
||||
},
|
||||
}
|
||||
},
|
||||
));
|
||||
let fields_len = fields.named().into_iter().len();
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::ty::Type for #mask_type_ident #type_generics
|
||||
#where_clause
|
||||
|
@ -632,7 +626,7 @@ impl ToTokens for ParsedBundle {
|
|||
<Self as ::fayalite::ty::Type>::MatchVariantAndInactiveScope,
|
||||
>;
|
||||
fn match_variants(
|
||||
#this_token: ::fayalite::expr::Expr<Self>,
|
||||
__this: ::fayalite::expr::Expr<Self>,
|
||||
__source_location: ::fayalite::source_location::SourceLocation,
|
||||
) -> <Self as ::fayalite::ty::Type>::MatchVariantsIter {
|
||||
let __retval = #mask_type_match_variant_ident {
|
||||
|
@ -640,19 +634,19 @@ impl ToTokens for ParsedBundle {
|
|||
};
|
||||
::fayalite::__std::iter::once(::fayalite::ty::MatchVariantWithoutScope(__retval))
|
||||
}
|
||||
fn mask_type(&#self_token) -> <Self as ::fayalite::ty::Type>::MaskType {
|
||||
*#self_token
|
||||
fn mask_type(&self) -> <Self as ::fayalite::ty::Type>::MaskType {
|
||||
*self
|
||||
}
|
||||
fn canonical(&#self_token) -> ::fayalite::ty::CanonicalType {
|
||||
::fayalite::ty::Type::canonical(&::fayalite::bundle::Bundle::new(::fayalite::bundle::BundleType::fields(#self_token)))
|
||||
fn canonical(&self) -> ::fayalite::ty::CanonicalType {
|
||||
::fayalite::ty::Type::canonical(&::fayalite::bundle::Bundle::new(::fayalite::bundle::BundleType::fields(self)))
|
||||
}
|
||||
#[track_caller]
|
||||
fn from_canonical(__canonical_type: ::fayalite::ty::CanonicalType) -> Self {
|
||||
let ::fayalite::ty::CanonicalType::Bundle(__bundle) = __canonical_type else {
|
||||
::fayalite::__std::panic!("expected bundle");
|
||||
};
|
||||
let #fields_token = ::fayalite::bundle::BundleType::fields(&__bundle);
|
||||
::fayalite::__std::assert_eq!(#fields_token.len(), #fields_len, "bundle has wrong number of fields");
|
||||
let __fields = ::fayalite::bundle::BundleType::fields(&__bundle);
|
||||
::fayalite::__std::assert_eq!(__fields.len(), #fields_len, "bundle has wrong number of fields");
|
||||
Self {
|
||||
#(#from_canonical_body_fields)*
|
||||
}
|
||||
|
@ -667,7 +661,7 @@ impl ToTokens for ParsedBundle {
|
|||
{
|
||||
type Builder = #unfilled_mask_type_builder_ty;
|
||||
type FilledBuilder = #filled_mask_type_builder_ty;
|
||||
fn fields(&#self_token) -> ::fayalite::intern::Interned<[::fayalite::bundle::BundleField]> {
|
||||
fn fields(&self) -> ::fayalite::intern::Interned<[::fayalite::bundle::BundleField]> {
|
||||
::fayalite::intern::Intern::intern(&[#(#fields_body_fields)*][..])
|
||||
}
|
||||
}
|
||||
|
@ -682,12 +676,12 @@ impl ToTokens for ParsedBundle {
|
|||
impl #impl_generics ::fayalite::ty::TypeWithDeref for #mask_type_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
fn expr_deref(#this_token: &::fayalite::expr::Expr<Self>) -> &<Self as ::fayalite::ty::Type>::MatchVariant {
|
||||
let #this_token = *#this_token;
|
||||
fn expr_deref(__this: &::fayalite::expr::Expr<Self>) -> &<Self as ::fayalite::ty::Type>::MatchVariant {
|
||||
let __this = *__this;
|
||||
let __retval = #mask_type_match_variant_ident {
|
||||
#(#match_variant_body_fields)*
|
||||
};
|
||||
::fayalite::intern::Interned::into_inner(::fayalite::intern::Intern::intern_sized(__retval))
|
||||
::fayalite::intern::Interned::<_>::into_inner(::fayalite::intern::Intern::intern_sized(__retval))
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
|
@ -705,7 +699,7 @@ impl ToTokens for ParsedBundle {
|
|||
<Self as ::fayalite::ty::Type>::MatchVariantAndInactiveScope,
|
||||
>;
|
||||
fn match_variants(
|
||||
#this_token: ::fayalite::expr::Expr<Self>,
|
||||
__this: ::fayalite::expr::Expr<Self>,
|
||||
__source_location: ::fayalite::source_location::SourceLocation,
|
||||
) -> <Self as ::fayalite::ty::Type>::MatchVariantsIter {
|
||||
let __retval = #match_variant_ident {
|
||||
|
@ -713,21 +707,21 @@ impl ToTokens for ParsedBundle {
|
|||
};
|
||||
::fayalite::__std::iter::once(::fayalite::ty::MatchVariantWithoutScope(__retval))
|
||||
}
|
||||
fn mask_type(&#self_token) -> <Self as ::fayalite::ty::Type>::MaskType {
|
||||
fn mask_type(&self) -> <Self as ::fayalite::ty::Type>::MaskType {
|
||||
#mask_type_ident {
|
||||
#(#mask_type_body_fields)*
|
||||
}
|
||||
}
|
||||
fn canonical(&#self_token) -> ::fayalite::ty::CanonicalType {
|
||||
::fayalite::ty::Type::canonical(&::fayalite::bundle::Bundle::new(::fayalite::bundle::BundleType::fields(#self_token)))
|
||||
fn canonical(&self) -> ::fayalite::ty::CanonicalType {
|
||||
::fayalite::ty::Type::canonical(&::fayalite::bundle::Bundle::new(::fayalite::bundle::BundleType::fields(self)))
|
||||
}
|
||||
#[track_caller]
|
||||
fn from_canonical(__canonical_type: ::fayalite::ty::CanonicalType) -> Self {
|
||||
let ::fayalite::ty::CanonicalType::Bundle(__bundle) = __canonical_type else {
|
||||
::fayalite::__std::panic!("expected bundle");
|
||||
};
|
||||
let #fields_token = ::fayalite::bundle::BundleType::fields(&__bundle);
|
||||
::fayalite::__std::assert_eq!(#fields_token.len(), #fields_len, "bundle has wrong number of fields");
|
||||
let __fields = ::fayalite::bundle::BundleType::fields(&__bundle);
|
||||
::fayalite::__std::assert_eq!(__fields.len(), #fields_len, "bundle has wrong number of fields");
|
||||
Self {
|
||||
#(#from_canonical_body_fields)*
|
||||
}
|
||||
|
@ -742,7 +736,7 @@ impl ToTokens for ParsedBundle {
|
|||
{
|
||||
type Builder = #unfilled_builder_ty;
|
||||
type FilledBuilder = #filled_builder_ty;
|
||||
fn fields(&#self_token) -> ::fayalite::intern::Interned<[::fayalite::bundle::BundleField]> {
|
||||
fn fields(&self) -> ::fayalite::intern::Interned<[::fayalite::bundle::BundleField]> {
|
||||
::fayalite::intern::Intern::intern(&[#(#fields_body_fields)*][..])
|
||||
}
|
||||
}
|
||||
|
@ -757,79 +751,16 @@ impl ToTokens for ParsedBundle {
|
|||
impl #impl_generics ::fayalite::ty::TypeWithDeref for #target #type_generics
|
||||
#where_clause
|
||||
{
|
||||
fn expr_deref(#this_token: &::fayalite::expr::Expr<Self>) -> &<Self as ::fayalite::ty::Type>::MatchVariant {
|
||||
let #this_token = *#this_token;
|
||||
fn expr_deref(__this: &::fayalite::expr::Expr<Self>) -> &<Self as ::fayalite::ty::Type>::MatchVariant {
|
||||
let __this = *__this;
|
||||
let __retval = #match_variant_ident {
|
||||
#(#match_variant_body_fields)*
|
||||
};
|
||||
::fayalite::intern::Interned::into_inner(::fayalite::intern::Intern::intern_sized(__retval))
|
||||
::fayalite::intern::Interned::<_>::into_inner(::fayalite::intern::Intern::intern_sized(__retval))
|
||||
}
|
||||
}
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
if let Some((cmp_eq,)) = cmp_eq {
|
||||
let mut where_clause =
|
||||
Generics::from(generics)
|
||||
.where_clause
|
||||
.unwrap_or_else(|| syn::WhereClause {
|
||||
where_token: Token,
|
||||
predicates: Punctuated::new(),
|
||||
});
|
||||
let mut fields_cmp_eq = vec![];
|
||||
let mut fields_cmp_ne = vec![];
|
||||
for field in fields.named() {
|
||||
let field_ident = field.ident();
|
||||
let field_ty = field.ty();
|
||||
where_clause
|
||||
.predicates
|
||||
.push(parse_quote_spanned! {cmp_eq.span=>
|
||||
#field_ty: ::fayalite::expr::ops::ExprPartialEq<#field_ty>
|
||||
});
|
||||
fields_cmp_eq.push(quote_spanned! {span=>
|
||||
::fayalite::expr::ops::ExprPartialEq::cmp_eq(__lhs.#field_ident, __rhs.#field_ident)
|
||||
});
|
||||
fields_cmp_ne.push(quote_spanned! {span=>
|
||||
::fayalite::expr::ops::ExprPartialEq::cmp_ne(__lhs.#field_ident, __rhs.#field_ident)
|
||||
});
|
||||
}
|
||||
let cmp_eq_body;
|
||||
let cmp_ne_body;
|
||||
if fields_len == 0 {
|
||||
cmp_eq_body = quote_spanned! {span=>
|
||||
::fayalite::expr::ToExpr::to_expr(&true)
|
||||
};
|
||||
cmp_ne_body = quote_spanned! {span=>
|
||||
::fayalite::expr::ToExpr::to_expr(&false)
|
||||
};
|
||||
} else {
|
||||
cmp_eq_body = quote_spanned! {span=>
|
||||
#(#fields_cmp_eq)&*
|
||||
};
|
||||
cmp_ne_body = quote_spanned! {span=>
|
||||
#(#fields_cmp_ne)|*
|
||||
};
|
||||
};
|
||||
quote_spanned! {span=>
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::expr::ops::ExprPartialEq<Self> for #target #type_generics
|
||||
#where_clause
|
||||
{
|
||||
fn cmp_eq(
|
||||
__lhs: ::fayalite::expr::Expr<Self>,
|
||||
__rhs: ::fayalite::expr::Expr<Self>,
|
||||
) -> ::fayalite::expr::Expr<::fayalite::int::Bool> {
|
||||
#cmp_eq_body
|
||||
}
|
||||
fn cmp_ne(
|
||||
__lhs: ::fayalite::expr::Expr<Self>,
|
||||
__rhs: ::fayalite::expr::Expr<Self>,
|
||||
) -> ::fayalite::expr::Expr<::fayalite::int::Bool> {
|
||||
#cmp_ne_body
|
||||
}
|
||||
}
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
}
|
||||
if let (None, MaybeParsed::Parsed(generics)) = (no_static, &self.generics) {
|
||||
let static_generics = generics.clone().for_static_type();
|
||||
let (static_impl_generics, static_type_generics, static_where_clause) =
|
||||
|
@ -837,7 +768,7 @@ impl ToTokens for ParsedBundle {
|
|||
let static_type_body_fields = Vec::from_iter(fields.named().into_iter().map(|field| {
|
||||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
let ty = field.ty();
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#ident: <#ty as ::fayalite::ty::StaticType>::TYPE,
|
||||
}
|
||||
}));
|
||||
|
@ -845,26 +776,28 @@ impl ToTokens for ParsedBundle {
|
|||
Vec::from_iter(fields.named().into_iter().map(|field| {
|
||||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
let ty = field.ty();
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#ident: <#ty as ::fayalite::ty::StaticType>::MASK_TYPE,
|
||||
}
|
||||
}));
|
||||
let type_properties = format_ident!("__type_properties", span = span);
|
||||
let type_properties = format_ident!("__type_properties", span = ident.span());
|
||||
let type_properties_fields = Vec::from_iter(fields.named().into_iter().zip(field_flips).map(|(field, field_flip)| {
|
||||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
let flipped = field_flip.is_some();
|
||||
let ty = field.ty();
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
let #type_properties = #type_properties.field(#flipped, <#ty as ::fayalite::ty::StaticType>::TYPE_PROPERTIES);
|
||||
}
|
||||
}));
|
||||
let type_properties_mask_fields = Vec::from_iter(fields.named().into_iter().zip(field_flips).map(|(field, field_flip)| {
|
||||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
let flipped = field_flip.is_some();
|
||||
let ty = field.ty();
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
let #type_properties = #type_properties.field(#flipped, <#ty as ::fayalite::ty::StaticType>::MASK_TYPE_PROPERTIES);
|
||||
}
|
||||
}));
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#[automatically_derived]
|
||||
impl #static_impl_generics ::fayalite::ty::StaticType for #mask_type_ident #static_type_generics
|
||||
#static_where_clause
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
hdl_type_common::{
|
||||
common_derives, get_target, ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedGenerics,
|
||||
ParsedType, SplitForImpl, TypesParser, WrappedInConst,
|
||||
},
|
||||
kw, Errors, HdlAttr, PairsIterExt,
|
||||
Errors, HdlAttr, PairsIterExt,
|
||||
};
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{format_ident, quote_spanned, ToTokens};
|
||||
|
@ -31,7 +29,7 @@ crate::options! {
|
|||
pub(crate) struct ParsedVariantField {
|
||||
pub(crate) paren_token: Paren,
|
||||
pub(crate) attrs: Vec<Attribute>,
|
||||
pub(crate) options: HdlAttr<FieldOptions, kw::hdl>,
|
||||
pub(crate) options: HdlAttr<FieldOptions>,
|
||||
pub(crate) ty: MaybeParsed<ParsedType, Type>,
|
||||
pub(crate) comma_token: Option<Token![,]>,
|
||||
}
|
||||
|
@ -39,7 +37,7 @@ pub(crate) struct ParsedVariantField {
|
|||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct ParsedVariant {
|
||||
pub(crate) attrs: Vec<Attribute>,
|
||||
pub(crate) options: HdlAttr<VariantOptions, kw::hdl>,
|
||||
pub(crate) options: HdlAttr<VariantOptions>,
|
||||
pub(crate) ident: Ident,
|
||||
pub(crate) field: Option<ParsedVariantField>,
|
||||
}
|
||||
|
@ -121,7 +119,7 @@ impl ParsedVariant {
|
|||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct ParsedEnum {
|
||||
pub(crate) attrs: Vec<Attribute>,
|
||||
pub(crate) options: HdlAttr<ItemOptions, kw::hdl>,
|
||||
pub(crate) options: HdlAttr<ItemOptions>,
|
||||
pub(crate) vis: Visibility,
|
||||
pub(crate) enum_token: Token![enum],
|
||||
pub(crate) ident: Ident,
|
||||
|
@ -144,9 +142,7 @@ impl ParsedEnum {
|
|||
} = item;
|
||||
let mut errors = Errors::new();
|
||||
let mut options = errors
|
||||
.unwrap_or_default(HdlAttr::<ItemOptions, kw::hdl>::parse_and_take_attr(
|
||||
&mut attrs,
|
||||
))
|
||||
.unwrap_or_default(HdlAttr::<ItemOptions>::parse_and_take_attr(&mut attrs))
|
||||
.unwrap_or_default();
|
||||
errors.ok(options.body.validate());
|
||||
let ItemOptions {
|
||||
|
@ -155,11 +151,7 @@ impl ParsedEnum {
|
|||
custom_bounds,
|
||||
no_static: _,
|
||||
no_runtime_generics: _,
|
||||
cmp_eq,
|
||||
} = options.body;
|
||||
if let Some((cmp_eq,)) = cmp_eq {
|
||||
errors.error(cmp_eq, "#[hdl(cmp_eq)] is not yet implemented for enums");
|
||||
}
|
||||
attrs.retain(|attr| {
|
||||
if attr.path().is_ident("repr") {
|
||||
errors.error(attr, "#[repr] is not supported on #[hdl] enums");
|
||||
|
@ -208,19 +200,17 @@ impl ToTokens for ParsedEnum {
|
|||
variants,
|
||||
match_variant_ident,
|
||||
} = self;
|
||||
let span = ident.span();
|
||||
let ItemOptions {
|
||||
outline_generated: _,
|
||||
target,
|
||||
custom_bounds: _,
|
||||
no_static,
|
||||
no_runtime_generics,
|
||||
cmp_eq: _, // TODO: implement cmp_eq for enums
|
||||
} = &options.body;
|
||||
let target = get_target(target, ident);
|
||||
let mut struct_attrs = attrs.clone();
|
||||
struct_attrs.push(common_derives(span));
|
||||
struct_attrs.push(parse_quote_spanned! {span=>
|
||||
struct_attrs.push(common_derives(ident.span()));
|
||||
struct_attrs.push(parse_quote_spanned! {ident.span()=>
|
||||
#[allow(non_snake_case)]
|
||||
});
|
||||
let struct_fields = Punctuated::from_iter(variants.pairs().map_pair_value_ref(
|
||||
|
@ -244,8 +234,8 @@ impl ToTokens for ParsedEnum {
|
|||
colon_token = Token);
|
||||
ty.clone().into()
|
||||
} else {
|
||||
colon_token = Token;
|
||||
parse_quote_spanned! {span=>
|
||||
colon_token = Token);
|
||||
parse_quote_spanned! {ident.span()=>
|
||||
()
|
||||
}
|
||||
};
|
||||
|
@ -288,30 +278,30 @@ impl ToTokens for ParsedEnum {
|
|||
}) = field
|
||||
{
|
||||
let expr = ty.make_hdl_type_expr(context);
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#ident: #expr,
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#ident: (),
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
parse_quote_spanned! {span=>
|
||||
parse_quote_spanned! {ident.span()=>
|
||||
#target {
|
||||
#(#fields)*
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
let mut wrapped_in_const = WrappedInConst::new(tokens, span);
|
||||
let mut wrapped_in_const = WrappedInConst::new(tokens, ident.span());
|
||||
let tokens = wrapped_in_const.inner();
|
||||
{
|
||||
let mut wrapped_in_const = WrappedInConst::new(tokens, span);
|
||||
let mut wrapped_in_const = WrappedInConst::new(tokens, ident.span());
|
||||
let tokens = wrapped_in_const.inner();
|
||||
let mut enum_attrs = attrs.clone();
|
||||
enum_attrs.push(parse_quote_spanned! {span=>
|
||||
enum_attrs.push(parse_quote_spanned! {ident.span()=>
|
||||
#[allow(dead_code)]
|
||||
});
|
||||
ItemEnum {
|
||||
|
@ -360,7 +350,7 @@ impl ToTokens for ParsedEnum {
|
|||
.to_tokens(tokens);
|
||||
}
|
||||
let mut enum_attrs = attrs.clone();
|
||||
enum_attrs.push(parse_quote_spanned! {span=>
|
||||
enum_attrs.push(parse_quote_spanned! {ident.span()=>
|
||||
#[allow(dead_code, non_camel_case_types)]
|
||||
});
|
||||
ItemEnum {
|
||||
|
@ -395,7 +385,7 @@ impl ToTokens for ParsedEnum {
|
|||
mutability: FieldMutability::None,
|
||||
ident: None,
|
||||
colon_token: None,
|
||||
ty: parse_quote_spanned! {span=>
|
||||
ty: parse_quote_spanned! {ident.span()=>
|
||||
::fayalite::expr::Expr<#ty>
|
||||
},
|
||||
},
|
||||
|
@ -409,22 +399,21 @@ impl ToTokens for ParsedEnum {
|
|||
)),
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let self_token = Token;
|
||||
for (index, ParsedVariant { ident, field, .. }) in variants.iter().enumerate() {
|
||||
if let Some(ParsedVariantField { ty, .. }) = field {
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#[automatically_derived]
|
||||
impl #impl_generics #target #type_generics
|
||||
#where_clause
|
||||
{
|
||||
#[allow(non_snake_case, dead_code)]
|
||||
#vis fn #ident<__V: ::fayalite::expr::ToExpr<Type = #ty>>(
|
||||
#self_token,
|
||||
self,
|
||||
v: __V,
|
||||
) -> ::fayalite::expr::Expr<Self> {
|
||||
::fayalite::expr::ToExpr::to_expr(
|
||||
&::fayalite::expr::ops::EnumLiteral::new_by_index(
|
||||
#self_token,
|
||||
self,
|
||||
#index,
|
||||
::fayalite::__std::option::Option::Some(
|
||||
::fayalite::expr::Expr::canonical(
|
||||
|
@ -437,16 +426,16 @@ impl ToTokens for ParsedEnum {
|
|||
}
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#[automatically_derived]
|
||||
impl #impl_generics #target #type_generics
|
||||
#where_clause
|
||||
{
|
||||
#[allow(non_snake_case, dead_code)]
|
||||
#vis fn #ident(#self_token) -> ::fayalite::expr::Expr<Self> {
|
||||
#vis fn #ident(self) -> ::fayalite::expr::Expr<Self> {
|
||||
::fayalite::expr::ToExpr::to_expr(
|
||||
&::fayalite::expr::ops::EnumLiteral::new_by_index(
|
||||
#self_token,
|
||||
self,
|
||||
#index,
|
||||
::fayalite::__std::option::Option::None,
|
||||
),
|
||||
|
@ -457,48 +446,46 @@ impl ToTokens for ParsedEnum {
|
|||
}
|
||||
.to_tokens(tokens);
|
||||
}
|
||||
let variants_token = Ident::new("variants", span);
|
||||
let from_canonical_body_fields = Vec::from_iter(variants.iter().enumerate().map(
|
||||
|(index, ParsedVariant { ident, field, .. })| {
|
||||
let ident_str = ident.to_string();
|
||||
let val = if field.is_some() {
|
||||
let missing_value_msg = format!("expected variant {ident} to have a field");
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
::fayalite::ty::Type::from_canonical(ty.expect(#missing_value_msg))
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
::fayalite::__std::assert!(ty.is_none());
|
||||
}
|
||||
};
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#ident: {
|
||||
let ::fayalite::enum_::EnumVariant {
|
||||
name,
|
||||
ty,
|
||||
} = #variants_token[#index];
|
||||
} = variants[#index];
|
||||
::fayalite::__std::assert_eq!(&*name, #ident_str);
|
||||
#val
|
||||
},
|
||||
}
|
||||
},
|
||||
));
|
||||
let variant_access_token = Ident::new("variant_access", span);
|
||||
let match_active_scope_match_arms = Vec::from_iter(variants.iter().enumerate().map(
|
||||
|(index, ParsedVariant { ident, field, .. })| {
|
||||
if field.is_some() {
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#index => #match_variant_ident::#ident(
|
||||
::fayalite::expr::ToExpr::to_expr(
|
||||
&::fayalite::expr::ops::VariantAccess::new_by_index(
|
||||
#variant_access_token.base(),
|
||||
#variant_access_token.variant_index(),
|
||||
variant_access.base(),
|
||||
variant_access.variant_index(),
|
||||
),
|
||||
),
|
||||
),
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#index => #match_variant_ident::#ident,
|
||||
}
|
||||
}
|
||||
|
@ -516,16 +503,16 @@ impl ToTokens for ParsedEnum {
|
|||
match field {
|
||||
Some(ParsedVariantField { options, .. }) => {
|
||||
let FieldOptions {} = options.body;
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
::fayalite::enum_::EnumVariant {
|
||||
name: ::fayalite::intern::Intern::intern(#ident_str),
|
||||
ty: ::fayalite::__std::option::Option::Some(
|
||||
::fayalite::ty::Type::canonical(&#self_token.#ident),
|
||||
::fayalite::ty::Type::canonical(&self.#ident),
|
||||
),
|
||||
},
|
||||
}
|
||||
}
|
||||
None => quote_spanned! {span=>
|
||||
None => quote_spanned! {ident.span()=>
|
||||
::fayalite::enum_::EnumVariant {
|
||||
name: ::fayalite::intern::Intern::intern(#ident_str),
|
||||
ty: ::fayalite::__std::option::Option::None,
|
||||
|
@ -535,7 +522,7 @@ impl ToTokens for ParsedEnum {
|
|||
},
|
||||
));
|
||||
let variants_len = variants.len();
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::ty::Type for #target #type_generics
|
||||
#where_clause
|
||||
|
@ -553,11 +540,11 @@ impl ToTokens for ParsedEnum {
|
|||
) -> <Self as ::fayalite::ty::Type>::MatchVariantsIter {
|
||||
::fayalite::module::enum_match_variants_helper(this, source_location)
|
||||
}
|
||||
fn mask_type(&#self_token) -> <Self as ::fayalite::ty::Type>::MaskType {
|
||||
fn mask_type(&self) -> <Self as ::fayalite::ty::Type>::MaskType {
|
||||
::fayalite::int::Bool
|
||||
}
|
||||
fn canonical(&#self_token) -> ::fayalite::ty::CanonicalType {
|
||||
::fayalite::ty::CanonicalType::Enum(::fayalite::enum_::Enum::new(::fayalite::enum_::EnumType::variants(#self_token)))
|
||||
fn canonical(&self) -> ::fayalite::ty::CanonicalType {
|
||||
::fayalite::ty::CanonicalType::Enum(::fayalite::enum_::Enum::new(::fayalite::enum_::EnumType::variants(self)))
|
||||
}
|
||||
#[track_caller]
|
||||
#[allow(non_snake_case)]
|
||||
|
@ -565,8 +552,8 @@ impl ToTokens for ParsedEnum {
|
|||
let ::fayalite::ty::CanonicalType::Enum(enum_) = canonical_type else {
|
||||
::fayalite::__std::panic!("expected enum");
|
||||
};
|
||||
let #variants_token = ::fayalite::enum_::EnumType::variants(&enum_);
|
||||
::fayalite::__std::assert_eq!(#variants_token.len(), #variants_len, "enum has wrong number of variants");
|
||||
let variants = ::fayalite::enum_::EnumType::variants(&enum_);
|
||||
::fayalite::__std::assert_eq!(variants.len(), #variants_len, "enum has wrong number of variants");
|
||||
Self {
|
||||
#(#from_canonical_body_fields)*
|
||||
}
|
||||
|
@ -582,16 +569,16 @@ impl ToTokens for ParsedEnum {
|
|||
fn match_activate_scope(
|
||||
v: <Self as ::fayalite::ty::Type>::MatchVariantAndInactiveScope,
|
||||
) -> (<Self as ::fayalite::ty::Type>::MatchVariant, <Self as ::fayalite::ty::Type>::MatchActiveScope) {
|
||||
let (#variant_access_token, scope) = v.activate();
|
||||
let (variant_access, scope) = v.activate();
|
||||
(
|
||||
match #variant_access_token.variant_index() {
|
||||
match variant_access.variant_index() {
|
||||
#(#match_active_scope_match_arms)*
|
||||
#variants_len.. => ::fayalite::__std::panic!("invalid variant index"),
|
||||
},
|
||||
scope,
|
||||
)
|
||||
}
|
||||
fn variants(&#self_token) -> ::fayalite::intern::Interned<[::fayalite::enum_::EnumVariant]> {
|
||||
fn variants(&self) -> ::fayalite::intern::Interned<[::fayalite::enum_::EnumVariant]> {
|
||||
::fayalite::intern::Intern::intern(&[
|
||||
#(#variants_body_variants)*
|
||||
][..])
|
||||
|
@ -605,35 +592,35 @@ impl ToTokens for ParsedEnum {
|
|||
static_generics.split_for_impl();
|
||||
let static_type_body_variants =
|
||||
Vec::from_iter(variants.iter().map(|ParsedVariant { ident, field, .. }| {
|
||||
if field.is_some() {
|
||||
quote_spanned! {span=>
|
||||
if let Some(_) = field {
|
||||
quote_spanned! {ident.span()=>
|
||||
#ident: ::fayalite::ty::StaticType::TYPE,
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#ident: (),
|
||||
}
|
||||
}
|
||||
}));
|
||||
let type_properties = format_ident!("__type_properties", span = span);
|
||||
let type_properties = format_ident!("__type_properties", span = ident.span());
|
||||
let type_properties_variants =
|
||||
Vec::from_iter(variants.iter().map(|ParsedVariant { field, .. }| {
|
||||
Vec::from_iter(variants.iter().map(|ParsedVariant { ident, field, .. }| {
|
||||
let variant = if let Some(ParsedVariantField { ty, .. }) = field {
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
::fayalite::__std::option::Option::Some(
|
||||
<#ty as ::fayalite::ty::StaticType>::TYPE_PROPERTIES,
|
||||
)
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
::fayalite::__std::option::Option::None
|
||||
}
|
||||
};
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
let #type_properties = #type_properties.variant(#variant);
|
||||
}
|
||||
}));
|
||||
quote_spanned! {span=>
|
||||
quote_spanned! {ident.span()=>
|
||||
#[automatically_derived]
|
||||
impl #static_impl_generics ::fayalite::ty::StaticType
|
||||
for #target #static_type_generics
|
||||
|
|
|
@ -1,138 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
hdl_type_common::{
|
||||
get_target, ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedGenerics, ParsedType,
|
||||
TypesParser,
|
||||
},
|
||||
kw, Errors, HdlAttr,
|
||||
};
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
use syn::{parse_quote_spanned, Attribute, Generics, Ident, ItemType, Token, Type, Visibility};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct ParsedTypeAlias {
|
||||
pub(crate) attrs: Vec<Attribute>,
|
||||
pub(crate) options: HdlAttr<ItemOptions, kw::hdl>,
|
||||
pub(crate) vis: Visibility,
|
||||
pub(crate) type_token: Token![type],
|
||||
pub(crate) ident: Ident,
|
||||
pub(crate) generics: MaybeParsed<ParsedGenerics, Generics>,
|
||||
pub(crate) eq_token: Token![=],
|
||||
pub(crate) ty: MaybeParsed<ParsedType, Type>,
|
||||
pub(crate) semi_token: Token![;],
|
||||
}
|
||||
|
||||
impl ParsedTypeAlias {
|
||||
fn parse(item: ItemType) -> syn::Result<Self> {
|
||||
let ItemType {
|
||||
mut attrs,
|
||||
vis,
|
||||
type_token,
|
||||
ident,
|
||||
mut generics,
|
||||
eq_token,
|
||||
ty,
|
||||
semi_token,
|
||||
} = item;
|
||||
let mut errors = Errors::new();
|
||||
let mut options = errors
|
||||
.unwrap_or_default(HdlAttr::<ItemOptions, kw::hdl>::parse_and_take_attr(
|
||||
&mut attrs,
|
||||
))
|
||||
.unwrap_or_default();
|
||||
errors.ok(options.body.validate());
|
||||
let ItemOptions {
|
||||
outline_generated: _,
|
||||
target: _,
|
||||
custom_bounds,
|
||||
no_static,
|
||||
no_runtime_generics: _,
|
||||
cmp_eq,
|
||||
} = options.body;
|
||||
if let Some((no_static,)) = no_static {
|
||||
errors.error(no_static, "no_static is not valid on type aliases");
|
||||
}
|
||||
if let Some((cmp_eq,)) = cmp_eq {
|
||||
errors.error(cmp_eq, "cmp_eq is not valid on type aliases");
|
||||
}
|
||||
let generics = if custom_bounds.is_some() {
|
||||
MaybeParsed::Unrecognized(generics)
|
||||
} else if let Some(generics) = errors.ok(ParsedGenerics::parse(&mut generics)) {
|
||||
MaybeParsed::Parsed(generics)
|
||||
} else {
|
||||
MaybeParsed::Unrecognized(generics)
|
||||
};
|
||||
let ty = TypesParser::maybe_run(generics.as_ref(), *ty, &mut errors);
|
||||
errors.finish()?;
|
||||
Ok(Self {
|
||||
attrs,
|
||||
options,
|
||||
vis,
|
||||
type_token,
|
||||
ident,
|
||||
generics,
|
||||
eq_token,
|
||||
ty,
|
||||
semi_token,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for ParsedTypeAlias {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
attrs,
|
||||
options,
|
||||
vis,
|
||||
type_token,
|
||||
ident,
|
||||
generics,
|
||||
eq_token,
|
||||
ty,
|
||||
semi_token,
|
||||
} = self;
|
||||
let ItemOptions {
|
||||
outline_generated: _,
|
||||
target,
|
||||
custom_bounds: _,
|
||||
no_static: _,
|
||||
no_runtime_generics,
|
||||
cmp_eq: _,
|
||||
} = &options.body;
|
||||
let target = get_target(target, ident);
|
||||
let mut type_attrs = attrs.clone();
|
||||
type_attrs.push(parse_quote_spanned! {ident.span()=>
|
||||
#[allow(type_alias_bounds)]
|
||||
});
|
||||
ItemType {
|
||||
attrs: type_attrs,
|
||||
vis: vis.clone(),
|
||||
type_token: *type_token,
|
||||
ident: ident.clone(),
|
||||
generics: generics.into(),
|
||||
eq_token: *eq_token,
|
||||
ty: Box::new(ty.clone().into()),
|
||||
semi_token: *semi_token,
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
if let (MaybeParsed::Parsed(generics), MaybeParsed::Parsed(ty), None) =
|
||||
(generics, ty, no_runtime_generics)
|
||||
{
|
||||
generics.make_runtime_generics(tokens, vis, ident, &target, |context| {
|
||||
ty.make_hdl_type_expr(context)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn hdl_type_alias_impl(item: ItemType) -> syn::Result<TokenStream> {
|
||||
let item = ParsedTypeAlias::parse(item)?;
|
||||
let outline_generated = item.options.body.outline_generated;
|
||||
let mut contents = item.to_token_stream();
|
||||
if outline_generated.is_some() {
|
||||
contents = crate::outline_generated(contents, "hdl-type-alias-");
|
||||
}
|
||||
Ok(contents)
|
||||
}
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{fold::impl_fold, kw, Errors, HdlAttr, PairsIterExt};
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{format_ident, quote_spanned, ToTokens};
|
||||
|
@ -26,7 +24,6 @@ crate::options! {
|
|||
CustomBounds(custom_bounds),
|
||||
NoStatic(no_static),
|
||||
NoRuntimeGenerics(no_runtime_generics),
|
||||
CmpEq(cmp_eq),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -67,7 +64,6 @@ impl Drop for WrappedInConst<'_> {
|
|||
fn drop(&mut self) {
|
||||
let inner = &self.inner;
|
||||
quote_spanned! {self.span=>
|
||||
#[allow(clippy::type_complexity)]
|
||||
const _: () = {
|
||||
#inner
|
||||
};
|
||||
|
@ -1274,130 +1270,6 @@ make_parsed_type_or_const! {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct ParsedTypePhantomData {
|
||||
pub(crate) phantom_data: known_items::PhantomData,
|
||||
pub(crate) lt_token: Token![<],
|
||||
pub(crate) ty: Box<ParsedType>,
|
||||
pub(crate) gt_token: Token![>],
|
||||
}
|
||||
|
||||
impl_fold! {
|
||||
struct ParsedTypePhantomData<> {
|
||||
phantom_data: known_items::PhantomData,
|
||||
lt_token: Token![<],
|
||||
ty: Box<ParsedType>,
|
||||
gt_token: Token![>],
|
||||
}
|
||||
}
|
||||
|
||||
impl ParsedTypePhantomData {
|
||||
pub(crate) fn try_from_named(
|
||||
named: ParsedTypeNamed,
|
||||
parser: &mut TypesParser<'_>,
|
||||
) -> Result<Result<Self, ParsedTypeNamed>, ParseFailed> {
|
||||
let ParsedTypeNamed { path, args } = named;
|
||||
let parsed_path = known_items::PhantomData::parse_path(path);
|
||||
let phantom_data = match parsed_path {
|
||||
Ok(phantom_data) => phantom_data,
|
||||
Err(path) => return Ok(Err(ParsedTypeNamed { path, args })),
|
||||
};
|
||||
let Some(ParsedGenericArguments {
|
||||
colon2_token: _,
|
||||
lt_token,
|
||||
args,
|
||||
gt_token,
|
||||
}) = args
|
||||
else {
|
||||
parser
|
||||
.errors()
|
||||
.error(phantom_data, "PhantomData requires generic arguments");
|
||||
return Err(ParseFailed);
|
||||
};
|
||||
let args_len = args.len();
|
||||
if args_len != 1 {
|
||||
parser.errors().error(
|
||||
phantom_data,
|
||||
format_args!(
|
||||
"wrong number of generic arguments supplied: got {args_len}, expected 1"
|
||||
),
|
||||
);
|
||||
return Err(ParseFailed);
|
||||
}
|
||||
let ty = args.into_iter().next().unwrap();
|
||||
let ParsedGenericArgument::Type(ty) = ty else {
|
||||
parser.errors().error(ty, "expected a type");
|
||||
return Err(ParseFailed);
|
||||
};
|
||||
Ok(Ok(Self {
|
||||
phantom_data,
|
||||
lt_token,
|
||||
ty: Box::new(ty),
|
||||
gt_token,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ParsedTypePhantomData> for Type {
|
||||
fn from(value: ParsedTypePhantomData) -> Type {
|
||||
let ParsedTypePhantomData {
|
||||
phantom_data,
|
||||
lt_token,
|
||||
ty,
|
||||
gt_token,
|
||||
} = value;
|
||||
let path = phantom_data.path;
|
||||
let mut args = Punctuated::new();
|
||||
args.push(GenericArgument::Type(ty.into()));
|
||||
let args = AngleBracketedGenericArguments {
|
||||
colon2_token: Some(Token),
|
||||
lt_token,
|
||||
args,
|
||||
gt_token,
|
||||
};
|
||||
let mut segments = path.segments;
|
||||
segments.last_mut().unwrap().arguments = PathArguments::AngleBracketed(args);
|
||||
Type::Path(TypePath {
|
||||
qself: None,
|
||||
path: Path {
|
||||
leading_colon: path.leading_colon,
|
||||
segments,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl MakeHdlTypeExpr for ParsedTypePhantomData {
|
||||
fn make_hdl_type_expr(&self, _context: &MakeHdlTypeExprContext) -> Expr {
|
||||
let ParsedTypePhantomData {
|
||||
phantom_data,
|
||||
lt_token: _,
|
||||
ty: _,
|
||||
gt_token: _,
|
||||
} = self;
|
||||
Expr::Path(ExprPath {
|
||||
attrs: vec![],
|
||||
qself: None,
|
||||
path: phantom_data.path.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for ParsedTypePhantomData {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
phantom_data,
|
||||
lt_token,
|
||||
ty,
|
||||
gt_token,
|
||||
} = self;
|
||||
phantom_data.to_tokens(tokens);
|
||||
lt_token.to_tokens(tokens);
|
||||
ty.to_tokens(tokens);
|
||||
gt_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) enum ParsedType {
|
||||
Delimited(ParsedTypeDelimited),
|
||||
|
@ -1405,7 +1277,6 @@ pub(crate) enum ParsedType {
|
|||
NamedParam(ParsedTypeNamedParam),
|
||||
Tuple(ParsedTypeTuple),
|
||||
ConstUsize(ParsedTypeConstUsize),
|
||||
PhantomData(ParsedTypePhantomData),
|
||||
Array(ParsedTypeArray),
|
||||
UInt(ParsedTypeUInt),
|
||||
SInt(ParsedTypeSInt),
|
||||
|
@ -1420,7 +1291,6 @@ impl_fold! {
|
|||
NamedParam(ParsedTypeNamedParam),
|
||||
Tuple(ParsedTypeTuple),
|
||||
ConstUsize(ParsedTypeConstUsize),
|
||||
PhantomData(ParsedTypePhantomData),
|
||||
Array(ParsedTypeArray),
|
||||
UInt(ParsedTypeUInt),
|
||||
SInt(ParsedTypeSInt),
|
||||
|
@ -1437,7 +1307,6 @@ impl From<ParsedType> for Type {
|
|||
ParsedType::NamedParam(v) => v.into(),
|
||||
ParsedType::Tuple(v) => v.into(),
|
||||
ParsedType::ConstUsize(v) => v.into(),
|
||||
ParsedType::PhantomData(v) => v.into(),
|
||||
ParsedType::Array(v) => v.into(),
|
||||
ParsedType::UInt(v) => v.into(),
|
||||
ParsedType::SInt(v) => v.into(),
|
||||
|
@ -1488,7 +1357,6 @@ impl ToTokens for ParsedType {
|
|||
ParsedType::Named(ty) => ty.to_tokens(tokens),
|
||||
ParsedType::Tuple(ty) => ty.to_tokens(tokens),
|
||||
ParsedType::ConstUsize(ty) => ty.to_tokens(tokens),
|
||||
ParsedType::PhantomData(ty) => ty.to_tokens(tokens),
|
||||
ParsedType::Array(ty) => ty.to_tokens(tokens),
|
||||
ParsedType::UInt(ty) => ty.to_tokens(tokens),
|
||||
ParsedType::SInt(ty) => ty.to_tokens(tokens),
|
||||
|
@ -1540,7 +1408,7 @@ impl ParseTypes<Path> for ParsedType {
|
|||
let mut args = None;
|
||||
let segments = Punctuated::from_iter(segments.pairs_mut().map_pair_value_mut(|segment| {
|
||||
let PathSegment { ident, arguments } = segment;
|
||||
if args.is_some() {
|
||||
if let Some(_) = args {
|
||||
parser
|
||||
.errors()
|
||||
.error(&ident, "associated types/consts are not yet implemented");
|
||||
|
@ -1596,10 +1464,6 @@ impl ParseTypes<Path> for ParsedType {
|
|||
Ok(v) => return Ok(Self::ConstUsize(v)),
|
||||
Err(named) => named,
|
||||
};
|
||||
let named = match ParsedTypePhantomData::try_from_named(named, parser)? {
|
||||
Ok(v) => return Ok(Self::PhantomData(v)),
|
||||
Err(named) => named,
|
||||
};
|
||||
let named = match ParsedTypeUInt::try_from_named(named, parser)? {
|
||||
Ok(v) => return Ok(Self::UInt(v)),
|
||||
Err(named) => named,
|
||||
|
@ -1728,7 +1592,7 @@ impl ParseTypes<Path> for ParsedConstGenericType {
|
|||
let mut args = None;
|
||||
let segments = Punctuated::from_iter(segments.pairs_mut().map_pair_value_mut(|segment| {
|
||||
let PathSegment { ident, arguments } = segment;
|
||||
if args.is_some() {
|
||||
if let Some(_) = args {
|
||||
parser
|
||||
.errors()
|
||||
.error(&ident, "associated types/consts are not yet implemented");
|
||||
|
@ -1881,7 +1745,7 @@ impl<T: ParseTypes<I>, I, P: Clone> ParseTypes<Punctuated<I, P>> for Punctuated<
|
|||
pub(crate) enum UnparsedGenericParam {
|
||||
Type {
|
||||
attrs: Vec<Attribute>,
|
||||
options: HdlAttr<TypeParamOptions, kw::hdl>,
|
||||
options: HdlAttr<TypeParamOptions>,
|
||||
ident: Ident,
|
||||
colon_token: Token![:],
|
||||
bounds: ParsedBounds,
|
||||
|
@ -1889,7 +1753,7 @@ pub(crate) enum UnparsedGenericParam {
|
|||
},
|
||||
Const {
|
||||
attrs: Vec<Attribute>,
|
||||
options: HdlAttr<ConstParamOptions, kw::hdl>,
|
||||
options: HdlAttr<ConstParamOptions>,
|
||||
const_token: Token![const],
|
||||
ident: Ident,
|
||||
colon_token: Token![:],
|
||||
|
@ -1917,7 +1781,7 @@ pub(crate) mod known_items {
|
|||
|
||||
#[allow(non_snake_case, dead_code)]
|
||||
pub(crate) fn $known_item(span: Span) -> $known_item {
|
||||
let segments = $known_item::PATH_SEGMENTS[0].iter()
|
||||
let segments = $known_item::PATH_SEGMENTS.iter()
|
||||
.copied()
|
||||
.map(|seg| PathSegment::from(Ident::new(seg, span)))
|
||||
.collect();
|
||||
|
@ -1943,22 +1807,21 @@ pub(crate) mod known_items {
|
|||
return Ok(Self { span: ident.span(), path });
|
||||
}
|
||||
}
|
||||
for &path_segments in Self::PATH_SEGMENTS.iter() {
|
||||
if path.segments.len() == path_segments.len()
|
||||
if path.segments.len() == Self::PATH_SEGMENTS.len()
|
||||
&& path
|
||||
.segments
|
||||
.iter()
|
||||
.zip(path_segments)
|
||||
.zip(Self::PATH_SEGMENTS)
|
||||
.all(|(seg, expected)| {
|
||||
matches!(seg.arguments, PathArguments::None)
|
||||
&& seg.ident == *expected
|
||||
})
|
||||
{
|
||||
return Ok(Self { span: path.segments.last().unwrap().ident.span(), path });
|
||||
}
|
||||
}
|
||||
Ok(Self { span: path.segments.last().unwrap().ident.span(), path })
|
||||
} else {
|
||||
Err(path)
|
||||
}
|
||||
}
|
||||
#[allow(dead_code)]
|
||||
pub(crate) fn parse_path_with_arguments(mut path: Path) -> Result<(Self, PathArguments), Path> {
|
||||
let Some(last_segment) = path.segments.last_mut() else {
|
||||
|
@ -2012,31 +1875,25 @@ pub(crate) mod known_items {
|
|||
}
|
||||
|
||||
macro_rules! impl_known_item {
|
||||
($(#[alias = $(::$alias:ident)+])* [$(::$seg:ident)+] ::$known_item:ident) => {
|
||||
($([$(::$head:ident)*])? ::$next:ident $(::$tail:ident)+) => {
|
||||
impl_known_item!([$($(::$head)*)? ::$next] $(::$tail)+);
|
||||
};
|
||||
([$(::$seg:ident)+] ::$known_item:ident) => {
|
||||
impl_known_item_body!($known_item);
|
||||
|
||||
impl $known_item {
|
||||
pub(crate) const PATH_SEGMENTS: &'static [&'static [&'static str]] = &[
|
||||
&[
|
||||
pub(crate) const PATH_SEGMENTS: &'static [&'static str] = &[
|
||||
$(stringify!($seg),)+
|
||||
stringify!($known_item),
|
||||
],
|
||||
$(&[
|
||||
$(stringify!($alias),)+
|
||||
],)*
|
||||
];
|
||||
}
|
||||
};
|
||||
($(#[alias = $(::$alias:ident)+])* $([$(::$head:ident)*])? ::$next:ident $(::$tail:ident)+) => {
|
||||
impl_known_item!($(#[alias = $(::$alias)+])* [$($(::$head)*)? ::$next] $(::$tail)+);
|
||||
};
|
||||
}
|
||||
|
||||
impl_known_item!(::fayalite::array::Array);
|
||||
impl_known_item!(::fayalite::array::ArrayType);
|
||||
impl_known_item!(::fayalite::bundle::BundleType);
|
||||
impl_known_item!(::fayalite::enum_::EnumType);
|
||||
impl_known_item!(::fayalite::int::BoolOrIntType);
|
||||
impl_known_item!(::fayalite::int::DynSize);
|
||||
impl_known_item!(::fayalite::int::IntType);
|
||||
impl_known_item!(::fayalite::int::KnownSize);
|
||||
|
@ -2045,22 +1902,12 @@ pub(crate) mod known_items {
|
|||
impl_known_item!(::fayalite::int::Size);
|
||||
impl_known_item!(::fayalite::int::UInt);
|
||||
impl_known_item!(::fayalite::int::UIntType);
|
||||
impl_known_item!(::fayalite::reset::ResetType);
|
||||
impl_known_item!(::fayalite::ty::CanonicalType);
|
||||
impl_known_item!(::fayalite::ty::StaticType);
|
||||
impl_known_item!(::fayalite::ty::Type);
|
||||
impl_known_item!(::fayalite::ty::Type::MaskType);
|
||||
impl_known_item!(::fayalite::util::ConstUsize);
|
||||
impl_known_item!(
|
||||
#[alias = ::std::primitive::usize]
|
||||
#[alias = ::core::primitive::usize]
|
||||
::fayalite::__std::primitive::usize
|
||||
);
|
||||
impl_known_item!(
|
||||
#[alias = ::std::marker::PhantomData]
|
||||
#[alias = ::core::marker::PhantomData]
|
||||
::fayalite::__std::marker::PhantomData
|
||||
);
|
||||
impl_known_item!(::fayalite::__std::primitive::usize);
|
||||
}
|
||||
|
||||
macro_rules! impl_bounds {
|
||||
|
@ -2070,16 +1917,11 @@ macro_rules! impl_bounds {
|
|||
$(
|
||||
$Variant:ident,
|
||||
)*
|
||||
$(
|
||||
#[unknown]
|
||||
$Unknown:ident,
|
||||
)?
|
||||
}
|
||||
) => {
|
||||
#[derive(Clone, Debug)]
|
||||
$vis enum $enum_type {
|
||||
$($Variant(known_items::$Variant),)*
|
||||
$($Unknown(syn::TypeParamBound),)?
|
||||
}
|
||||
|
||||
$(impl From<known_items::$Variant> for $enum_type {
|
||||
|
@ -2092,54 +1934,28 @@ macro_rules! impl_bounds {
|
|||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
$(Self::$Variant(v) => v.to_tokens(tokens),)*
|
||||
$(Self::$Unknown(v) => v.to_tokens(tokens),)?
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl $enum_type {
|
||||
$vis fn parse_path(path: Path) -> Result<Self, Path> {
|
||||
#![allow(unreachable_code)]
|
||||
$(let path = match known_items::$Variant::parse_path(path) {
|
||||
Ok(v) => return Ok(Self::$Variant(v)),
|
||||
Err(path) => path,
|
||||
};)*
|
||||
$(return Ok(Self::$Unknown(syn::TraitBound {
|
||||
paren_token: None,
|
||||
modifier: syn::TraitBoundModifier::None,
|
||||
lifetimes: None,
|
||||
path,
|
||||
}.into()));)?
|
||||
Err(path)
|
||||
}
|
||||
$vis fn parse_type_param_bound(mut type_param_bound: syn::TypeParamBound) -> Result<Self, syn::TypeParamBound> {
|
||||
#![allow(unreachable_code)]
|
||||
if let syn::TypeParamBound::Trait(mut trait_bound) = type_param_bound {
|
||||
if let syn::TraitBound {
|
||||
paren_token: _,
|
||||
modifier: syn::TraitBoundModifier::None,
|
||||
lifetimes: None,
|
||||
path: _,
|
||||
} = trait_bound {
|
||||
match Self::parse_path(trait_bound.path) {
|
||||
Ok(retval) => return Ok(retval),
|
||||
Err(path) => trait_bound.path = path,
|
||||
}
|
||||
}
|
||||
type_param_bound = trait_bound.into();
|
||||
}
|
||||
$(return Ok(Self::$Unknown(type_param_bound));)?
|
||||
Err(type_param_bound)
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for $enum_type {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
Self::parse_type_param_bound(input.parse()?)
|
||||
.map_err(|type_param_bound| syn::Error::new_spanned(
|
||||
type_param_bound,
|
||||
Self::parse_path(Path::parse_mod_style(input)?).map_err(|path| {
|
||||
syn::Error::new_spanned(
|
||||
path,
|
||||
format_args!("expected one of: {}", [$(stringify!($Variant)),*].join(", ")),
|
||||
))
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2147,7 +1963,6 @@ macro_rules! impl_bounds {
|
|||
#[allow(non_snake_case)]
|
||||
$vis struct $struct_type {
|
||||
$($vis $Variant: Option<known_items::$Variant>,)*
|
||||
$($vis $Unknown: Vec<syn::TypeParamBound>,)?
|
||||
}
|
||||
|
||||
impl ToTokens for $struct_type {
|
||||
|
@ -2159,63 +1974,42 @@ macro_rules! impl_bounds {
|
|||
separator = Some(<Token![+]>::default());
|
||||
v.to_tokens(tokens);
|
||||
})*
|
||||
$(for v in &self.$Unknown {
|
||||
separator.to_tokens(tokens);
|
||||
separator = Some(<Token![+]>::default());
|
||||
v.to_tokens(tokens);
|
||||
})*
|
||||
}
|
||||
}
|
||||
|
||||
const _: () = {
|
||||
#[derive(Clone, Debug)]
|
||||
#[allow(non_snake_case)]
|
||||
$vis struct Iter {
|
||||
$($Variant: Option<known_items::$Variant>,)*
|
||||
$($Unknown: std::vec::IntoIter<syn::TypeParamBound>,)?
|
||||
}
|
||||
$vis struct Iter($vis $struct_type);
|
||||
|
||||
impl IntoIterator for $struct_type {
|
||||
type Item = $enum_type;
|
||||
type IntoIter = Iter;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
Iter {
|
||||
$($Variant: self.$Variant,)*
|
||||
$($Unknown: self.$Unknown.into_iter(),)?
|
||||
}
|
||||
Iter(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for Iter {
|
||||
type Item = $enum_type;
|
||||
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
$(
|
||||
if let Some(value) = self.$Variant.take() {
|
||||
if let Some(value) = self.0.$Variant.take() {
|
||||
return Some($enum_type::$Variant(value));
|
||||
}
|
||||
)*
|
||||
$(
|
||||
if let Some(value) = self.$Unknown.next() {
|
||||
return Some($enum_type::$Unknown(value));
|
||||
}
|
||||
)?
|
||||
None
|
||||
}
|
||||
|
||||
#[allow(unused_mut, unused_variables)]
|
||||
fn fold<B, F: FnMut(B, Self::Item) -> B>(mut self, mut init: B, mut f: F) -> B {
|
||||
$(
|
||||
if let Some(value) = self.$Variant.take() {
|
||||
if let Some(value) = self.0.$Variant.take() {
|
||||
init = f(init, $enum_type::$Variant(value));
|
||||
}
|
||||
)*
|
||||
$(
|
||||
if let Some(value) = self.$Unknown.next() {
|
||||
init = f(init, $enum_type::$Unknown(value));
|
||||
}
|
||||
)?
|
||||
init
|
||||
}
|
||||
}
|
||||
|
@ -2227,9 +2021,6 @@ macro_rules! impl_bounds {
|
|||
$($enum_type::$Variant(v) => {
|
||||
self.$Variant = Some(v);
|
||||
})*
|
||||
$($enum_type::$Unknown(v) => {
|
||||
self.$Unknown.push(v);
|
||||
})?
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -2248,7 +2039,6 @@ macro_rules! impl_bounds {
|
|||
$(if let Some(v) = v.$Variant {
|
||||
self.$Variant = Some(v);
|
||||
})*
|
||||
$(self.$Unknown.extend(v.$Unknown);)*
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -2293,46 +2083,35 @@ macro_rules! impl_bounds {
|
|||
impl_bounds! {
|
||||
#[struct = ParsedBounds]
|
||||
pub(crate) enum ParsedBound {
|
||||
BoolOrIntType,
|
||||
BundleType,
|
||||
EnumType,
|
||||
IntType,
|
||||
KnownSize,
|
||||
ResetType,
|
||||
Size,
|
||||
StaticType,
|
||||
Type,
|
||||
#[unknown]
|
||||
Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
impl_bounds! {
|
||||
#[struct = ParsedTypeBounds]
|
||||
pub(crate) enum ParsedTypeBound {
|
||||
BoolOrIntType,
|
||||
BundleType,
|
||||
EnumType,
|
||||
IntType,
|
||||
ResetType,
|
||||
StaticType,
|
||||
Type,
|
||||
#[unknown]
|
||||
Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ParsedTypeBound> for ParsedBound {
|
||||
fn from(value: ParsedTypeBound) -> Self {
|
||||
match value {
|
||||
ParsedTypeBound::BoolOrIntType(v) => ParsedBound::BoolOrIntType(v),
|
||||
ParsedTypeBound::BundleType(v) => ParsedBound::BundleType(v),
|
||||
ParsedTypeBound::EnumType(v) => ParsedBound::EnumType(v),
|
||||
ParsedTypeBound::IntType(v) => ParsedBound::IntType(v),
|
||||
ParsedTypeBound::ResetType(v) => ParsedBound::ResetType(v),
|
||||
ParsedTypeBound::StaticType(v) => ParsedBound::StaticType(v),
|
||||
ParsedTypeBound::Type(v) => ParsedBound::Type(v),
|
||||
ParsedTypeBound::Unknown(v) => ParsedBound::Unknown(v),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2340,26 +2119,20 @@ impl From<ParsedTypeBound> for ParsedBound {
|
|||
impl From<ParsedTypeBounds> for ParsedBounds {
|
||||
fn from(value: ParsedTypeBounds) -> Self {
|
||||
let ParsedTypeBounds {
|
||||
BoolOrIntType,
|
||||
BundleType,
|
||||
EnumType,
|
||||
IntType,
|
||||
ResetType,
|
||||
StaticType,
|
||||
Type,
|
||||
Unknown,
|
||||
} = value;
|
||||
Self {
|
||||
BoolOrIntType,
|
||||
BundleType,
|
||||
EnumType,
|
||||
IntType,
|
||||
KnownSize: None,
|
||||
ResetType,
|
||||
Size: None,
|
||||
StaticType,
|
||||
Type,
|
||||
Unknown,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2368,10 +2141,6 @@ impl ParsedTypeBound {
|
|||
fn implied_bounds(self) -> ParsedTypeBounds {
|
||||
let span = self.span();
|
||||
match self {
|
||||
Self::BoolOrIntType(v) => ParsedTypeBounds::from_iter([
|
||||
ParsedTypeBound::from(v),
|
||||
ParsedTypeBound::Type(known_items::Type(span)),
|
||||
]),
|
||||
Self::BundleType(v) => ParsedTypeBounds::from_iter([
|
||||
ParsedTypeBound::from(v),
|
||||
ParsedTypeBound::Type(known_items::Type(span)),
|
||||
|
@ -2382,12 +2151,6 @@ impl ParsedTypeBound {
|
|||
]),
|
||||
Self::IntType(v) => ParsedTypeBounds::from_iter([
|
||||
ParsedTypeBound::from(v),
|
||||
ParsedTypeBound::BoolOrIntType(known_items::BoolOrIntType(span)),
|
||||
ParsedTypeBound::Type(known_items::Type(span)),
|
||||
]),
|
||||
Self::ResetType(v) => ParsedTypeBounds::from_iter([
|
||||
ParsedTypeBound::from(v),
|
||||
ParsedTypeBound::StaticType(known_items::StaticType(span)),
|
||||
ParsedTypeBound::Type(known_items::Type(span)),
|
||||
]),
|
||||
Self::StaticType(v) => ParsedTypeBounds::from_iter([
|
||||
|
@ -2395,7 +2158,6 @@ impl ParsedTypeBound {
|
|||
ParsedTypeBound::Type(known_items::Type(span)),
|
||||
]),
|
||||
Self::Type(v) => ParsedTypeBounds::from_iter([ParsedTypeBound::from(v)]),
|
||||
Self::Unknown(v) => ParsedTypeBounds::from_iter([ParsedTypeBound::Unknown(v)]),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2421,16 +2183,13 @@ impl From<ParsedSizeTypeBounds> for ParsedBounds {
|
|||
fn from(value: ParsedSizeTypeBounds) -> Self {
|
||||
let ParsedSizeTypeBounds { KnownSize, Size } = value;
|
||||
Self {
|
||||
BoolOrIntType: None,
|
||||
BundleType: None,
|
||||
EnumType: None,
|
||||
IntType: None,
|
||||
KnownSize,
|
||||
ResetType: None,
|
||||
Size,
|
||||
StaticType: None,
|
||||
Type: None,
|
||||
Unknown: vec![],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2458,7 +2217,6 @@ impl ParsedBounds {
|
|||
fn categorize(self, errors: &mut Errors, span: Span) -> ParsedBoundsCategory {
|
||||
let mut type_bounds = None;
|
||||
let mut size_type_bounds = None;
|
||||
let mut unknown_bounds = vec![];
|
||||
self.into_iter().for_each(|bound| match bound.categorize() {
|
||||
ParsedBoundCategory::Type(bound) => {
|
||||
type_bounds
|
||||
|
@ -2470,37 +2228,15 @@ impl ParsedBounds {
|
|||
.get_or_insert_with(ParsedSizeTypeBounds::default)
|
||||
.extend([bound]);
|
||||
}
|
||||
ParsedBoundCategory::Unknown(bound) => unknown_bounds.push(bound),
|
||||
});
|
||||
match (type_bounds, size_type_bounds, unknown_bounds.is_empty()) {
|
||||
(None, None, true) => ParsedBoundsCategory::Type(ParsedTypeBounds {
|
||||
match (type_bounds, size_type_bounds) {
|
||||
(None, None) => ParsedBoundsCategory::Type(ParsedTypeBounds {
|
||||
Type: Some(known_items::Type(span)),
|
||||
..Default::default()
|
||||
}),
|
||||
(None, None, false) => {
|
||||
errors.error(
|
||||
unknown_bounds.remove(0),
|
||||
"unknown bounds: must use at least one known bound (such as `Type`) with any unknown bounds",
|
||||
);
|
||||
ParsedBoundsCategory::Type(ParsedTypeBounds {
|
||||
Unknown: unknown_bounds,
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
(None, Some(bounds), true) => ParsedBoundsCategory::SizeType(bounds),
|
||||
(None, Some(bounds), false) => {
|
||||
// TODO: implement
|
||||
errors.error(
|
||||
unknown_bounds.remove(0),
|
||||
"unknown bounds with `Size` bounds are not implemented",
|
||||
);
|
||||
ParsedBoundsCategory::SizeType(bounds)
|
||||
}
|
||||
(Some(bounds), None, _) => ParsedBoundsCategory::Type(ParsedTypeBounds {
|
||||
Unknown: unknown_bounds,
|
||||
..bounds
|
||||
}),
|
||||
(Some(type_bounds), Some(size_type_bounds), _) => {
|
||||
(None, Some(bounds)) => ParsedBoundsCategory::SizeType(bounds),
|
||||
(Some(bounds), None) => ParsedBoundsCategory::Type(bounds),
|
||||
(Some(type_bounds), Some(size_type_bounds)) => {
|
||||
errors.error(
|
||||
size_type_bounds
|
||||
.Size
|
||||
|
@ -2517,29 +2253,24 @@ impl ParsedBounds {
|
|||
pub(crate) enum ParsedBoundCategory {
|
||||
Type(ParsedTypeBound),
|
||||
SizeType(ParsedSizeTypeBound),
|
||||
Unknown(syn::TypeParamBound),
|
||||
}
|
||||
|
||||
impl ParsedBound {
|
||||
fn categorize(self) -> ParsedBoundCategory {
|
||||
match self {
|
||||
Self::BoolOrIntType(v) => ParsedBoundCategory::Type(ParsedTypeBound::BoolOrIntType(v)),
|
||||
Self::BundleType(v) => ParsedBoundCategory::Type(ParsedTypeBound::BundleType(v)),
|
||||
Self::EnumType(v) => ParsedBoundCategory::Type(ParsedTypeBound::EnumType(v)),
|
||||
Self::IntType(v) => ParsedBoundCategory::Type(ParsedTypeBound::IntType(v)),
|
||||
Self::KnownSize(v) => ParsedBoundCategory::SizeType(ParsedSizeTypeBound::KnownSize(v)),
|
||||
Self::ResetType(v) => ParsedBoundCategory::Type(ParsedTypeBound::ResetType(v)),
|
||||
Self::Size(v) => ParsedBoundCategory::SizeType(ParsedSizeTypeBound::Size(v)),
|
||||
Self::StaticType(v) => ParsedBoundCategory::Type(ParsedTypeBound::StaticType(v)),
|
||||
Self::Type(v) => ParsedBoundCategory::Type(ParsedTypeBound::Type(v)),
|
||||
Self::Unknown(v) => ParsedBoundCategory::Unknown(v),
|
||||
}
|
||||
}
|
||||
fn implied_bounds(self) -> ParsedBounds {
|
||||
match self.categorize() {
|
||||
ParsedBoundCategory::Type(v) => v.implied_bounds().into(),
|
||||
ParsedBoundCategory::SizeType(v) => v.implied_bounds().into(),
|
||||
ParsedBoundCategory::Unknown(v) => ParsedBounds::from_iter([ParsedBound::Unknown(v)]),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2547,7 +2278,7 @@ impl ParsedBound {
|
|||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct ParsedTypeParam {
|
||||
pub(crate) attrs: Vec<Attribute>,
|
||||
pub(crate) options: HdlAttr<TypeParamOptions, kw::hdl>,
|
||||
pub(crate) options: HdlAttr<TypeParamOptions>,
|
||||
pub(crate) ident: Ident,
|
||||
pub(crate) colon_token: Token![:],
|
||||
pub(crate) bounds: ParsedTypeBounds,
|
||||
|
@ -2581,7 +2312,7 @@ impl ToTokens for ParsedTypeParam {
|
|||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct ParsedSizeTypeParam {
|
||||
pub(crate) attrs: Vec<Attribute>,
|
||||
pub(crate) options: HdlAttr<TypeParamOptions, kw::hdl>,
|
||||
pub(crate) options: HdlAttr<TypeParamOptions>,
|
||||
pub(crate) ident: Ident,
|
||||
pub(crate) colon_token: Token![:],
|
||||
pub(crate) bounds: ParsedSizeTypeBounds,
|
||||
|
@ -2625,7 +2356,7 @@ pub(crate) struct ParsedConstParamWhereBounds {
|
|||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct ParsedConstParam {
|
||||
pub(crate) attrs: Vec<Attribute>,
|
||||
pub(crate) options: HdlAttr<ConstParamOptions, kw::hdl>,
|
||||
pub(crate) options: HdlAttr<ConstParamOptions>,
|
||||
pub(crate) const_token: Token![const],
|
||||
pub(crate) ident: Ident,
|
||||
pub(crate) colon_token: Token![:],
|
||||
|
@ -2682,7 +2413,7 @@ impl ParsedGenericParam {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct ParsedGenerics {
|
||||
pub(crate) lt_token: Option<Token![<]>,
|
||||
pub(crate) params: Punctuated<ParsedGenericParam, Token![,]>,
|
||||
|
@ -2842,7 +2573,6 @@ impl ParsedGenerics {
|
|||
}
|
||||
})
|
||||
.collect();
|
||||
let param_token = Ident::new("__param", ident.span());
|
||||
for (param_count, (generics_accumulation_type, next_param)) in generics_accumulation_types
|
||||
.iter()
|
||||
.zip(&self.params)
|
||||
|
@ -2891,7 +2621,7 @@ impl ParsedGenerics {
|
|||
next_generics.split_for_impl();
|
||||
let next_turbofish = next_type_generics.as_turbofish();
|
||||
let mut param: Expr = parse_quote_spanned! {ident.span()=>
|
||||
#param_token
|
||||
__param
|
||||
};
|
||||
let mut generics = next_generics.clone();
|
||||
let mut index_type = param_ident.clone();
|
||||
|
@ -2906,7 +2636,7 @@ impl ParsedGenerics {
|
|||
is_const: false,
|
||||
});
|
||||
param = parse_quote_spanned! {ident.span()=>
|
||||
::fayalite::ty::TypeOrDefault::get(#param_token, || #default_expr)
|
||||
::fayalite::ty::TypeOrDefault::get(__param, || #default_expr)
|
||||
};
|
||||
let context = MakeHdlTypeExprContext {
|
||||
named_param_values: self_members[..param_count]
|
||||
|
@ -2971,8 +2701,8 @@ impl ParsedGenerics {
|
|||
{
|
||||
type Output = #next_target #next_type_generics;
|
||||
|
||||
fn index(&self, #param_token: #index_type) -> &Self::Output {
|
||||
::fayalite::intern::Interned::into_inner(
|
||||
fn index(&self, __param: #index_type) -> &Self::Output {
|
||||
::fayalite::intern::Interned::<_>::into_inner(
|
||||
::fayalite::intern::Intern::intern_sized(#output_expr),
|
||||
)
|
||||
}
|
||||
|
@ -2992,7 +2722,7 @@ impl ParsedGenerics {
|
|||
.iter()
|
||||
.cloned()
|
||||
.chain([parse_quote_spanned! {ident.span()=>
|
||||
#param_token
|
||||
__param
|
||||
}])
|
||||
.collect(),
|
||||
is_const: false,
|
||||
|
@ -3031,8 +2761,8 @@ impl ParsedGenerics {
|
|||
{
|
||||
type Output = #next_target #next_target_args;
|
||||
|
||||
fn index(&self, #param_token: #param_ident) -> &Self::Output {
|
||||
::fayalite::intern::Interned::into_inner(
|
||||
fn index(&self, __param: #param_ident) -> &Self::Output {
|
||||
::fayalite::intern::Interned::<_>::into_inner(
|
||||
::fayalite::intern::Intern::intern_sized(#output_expr),
|
||||
)
|
||||
}
|
||||
|
@ -3059,7 +2789,7 @@ impl ParsedGenerics {
|
|||
.iter()
|
||||
.cloned()
|
||||
.chain([parse_quote_spanned! {ident.span()=>
|
||||
#param_token
|
||||
__param
|
||||
}])
|
||||
.collect(),
|
||||
is_const: false,
|
||||
|
@ -3101,8 +2831,8 @@ impl ParsedGenerics {
|
|||
{
|
||||
type Output = #next_target #next_target_args;
|
||||
|
||||
fn index(&self, #param_token: __Param) -> &Self::Output {
|
||||
::fayalite::intern::Interned::into_inner(
|
||||
fn index(&self, __param: __Param) -> &Self::Output {
|
||||
::fayalite::intern::Interned::<_>::into_inner(
|
||||
::fayalite::intern::Intern::intern_sized(#output_expr),
|
||||
)
|
||||
}
|
||||
|
@ -3133,11 +2863,9 @@ impl ParsedGenerics {
|
|||
let (input_param, punct) = input_param.into_tuple();
|
||||
let (unparsed_param, late_parsed_param) = match input_param {
|
||||
GenericParam::Lifetime(param) => {
|
||||
errors.unwrap_or_default(
|
||||
HdlAttr::<LifetimeParamOptions, kw::hdl>::parse_and_take_attr(
|
||||
errors.unwrap_or_default(HdlAttr::<LifetimeParamOptions>::parse_and_take_attr(
|
||||
&mut param.attrs,
|
||||
),
|
||||
);
|
||||
));
|
||||
errors.error(param, "lifetime generics are not supported by #[hdl]");
|
||||
continue;
|
||||
}
|
||||
|
@ -3151,9 +2879,7 @@ impl ParsedGenerics {
|
|||
}) => {
|
||||
let span = ident.span();
|
||||
let options = errors
|
||||
.unwrap_or_default(
|
||||
HdlAttr::<TypeParamOptions, kw::hdl>::parse_and_take_attr(attrs),
|
||||
)
|
||||
.unwrap_or_default(HdlAttr::<TypeParamOptions>::parse_and_take_attr(attrs))
|
||||
.unwrap_or_default();
|
||||
let colon_token = colon_token.unwrap_or_else(|| Token);
|
||||
if !bounds.is_empty() {
|
||||
|
@ -3191,9 +2917,7 @@ impl ParsedGenerics {
|
|||
default,
|
||||
}) => {
|
||||
let options = errors
|
||||
.unwrap_or_default(
|
||||
HdlAttr::<ConstParamOptions, kw::hdl>::parse_and_take_attr(attrs),
|
||||
)
|
||||
.unwrap_or_default(HdlAttr::<ConstParamOptions>::parse_and_take_attr(attrs))
|
||||
.unwrap_or_default();
|
||||
if let Some(default) = default {
|
||||
let _ = eq_token;
|
||||
|
@ -3413,29 +3137,17 @@ impl ParsedGenerics {
|
|||
.Type
|
||||
.get_or_insert_with(|| known_items::Type(bound.span()));
|
||||
match bound {
|
||||
ParsedTypeBound::BoolOrIntType(_)
|
||||
| ParsedTypeBound::BundleType(_)
|
||||
ParsedTypeBound::BundleType(_)
|
||||
| ParsedTypeBound::EnumType(_)
|
||||
| ParsedTypeBound::IntType(_)
|
||||
| ParsedTypeBound::ResetType(_) => {
|
||||
errors.error(bound, "bounds on mask types are not implemented");
|
||||
| ParsedTypeBound::IntType(_) => {
|
||||
errors.error(bound, "bound on mask type not implemented");
|
||||
}
|
||||
ParsedTypeBound::StaticType(bound) => {
|
||||
if bounds.StaticType.is_none() {
|
||||
errors.error(
|
||||
bound,
|
||||
"StaticType bound on mask type without corresponding \
|
||||
StaticType bound on original type is not implemented",
|
||||
);
|
||||
}
|
||||
errors.error(bound, "StaticType bound on mask type without corresponding StaticType bound on original type is not implemented");
|
||||
}
|
||||
},
|
||||
ParsedTypeBound::Type(_) => {}
|
||||
ParsedTypeBound::Unknown(_) => {
|
||||
errors.error(
|
||||
bound,
|
||||
"unknown bounds on mask types are not implemented",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
bounds.add_implied_bounds();
|
||||
|
@ -3815,7 +3527,7 @@ impl SplitForImpl for Generics {
|
|||
Self::TypeGenerics<'_>,
|
||||
Self::WhereClause<'_>,
|
||||
) {
|
||||
Generics::split_for_impl(self)
|
||||
Generics::split_for_impl(&self)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4229,7 +3941,6 @@ impl MakeHdlTypeExpr for ParsedType {
|
|||
Self::NamedParam(v) => v.make_hdl_type_expr(context),
|
||||
Self::Tuple(v) => v.make_hdl_type_expr(context),
|
||||
Self::ConstUsize(v) => v.make_hdl_type_expr(context),
|
||||
Self::PhantomData(v) => v.make_hdl_type_expr(context),
|
||||
Self::Array(v) => v.make_hdl_type_expr(context),
|
||||
Self::UInt(v) => v.make_hdl_type_expr(context),
|
||||
Self::SInt(v) => v.make_hdl_type_expr(context),
|
||||
|
@ -4276,13 +3987,7 @@ impl MakeHdlTypeExpr for ParsedExpr {
|
|||
match self {
|
||||
ParsedExpr::Delimited(expr) => expr.make_hdl_type_expr(context),
|
||||
ParsedExpr::NamedParamConst(expr) => expr.make_hdl_type_expr(context),
|
||||
ParsedExpr::Other(expr) => {
|
||||
let span = expr.span();
|
||||
let const_usize = known_items::ConstUsize(span);
|
||||
parse_quote_spanned! {expr.span()=>
|
||||
#const_usize::<{ #expr }>
|
||||
}
|
||||
}
|
||||
ParsedExpr::Other(expr) => (**expr).clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,43 +3,22 @@
|
|||
#![cfg_attr(test, recursion_limit = "512")]
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{quote, ToTokens};
|
||||
use std::{
|
||||
collections::{hash_map::Entry, HashMap},
|
||||
io::{ErrorKind, Write},
|
||||
};
|
||||
use std::io::{ErrorKind, Write};
|
||||
use syn::{
|
||||
bracketed,
|
||||
ext::IdentExt,
|
||||
parenthesized,
|
||||
bracketed, parenthesized,
|
||||
parse::{Parse, ParseStream, Parser},
|
||||
parse_quote,
|
||||
punctuated::{Pair, Punctuated},
|
||||
spanned::Spanned,
|
||||
token::{Bracket, Paren},
|
||||
AttrStyle, Attribute, Error, Ident, Item, ItemFn, LitBool, LitStr, Meta, Token,
|
||||
punctuated::Pair,
|
||||
AttrStyle, Attribute, Error, Item, Token,
|
||||
};
|
||||
|
||||
mod fold;
|
||||
mod hdl_bundle;
|
||||
mod hdl_enum;
|
||||
mod hdl_type_alias;
|
||||
mod hdl_type_common;
|
||||
mod module;
|
||||
mod process_cfg;
|
||||
|
||||
pub(crate) trait CustomToken:
|
||||
Copy
|
||||
+ Spanned
|
||||
+ ToTokens
|
||||
+ std::fmt::Debug
|
||||
+ Eq
|
||||
+ std::hash::Hash
|
||||
+ Default
|
||||
+ quote::IdentFragment
|
||||
+ Parse
|
||||
{
|
||||
const IDENT_STR: &'static str;
|
||||
}
|
||||
//mod value_derive_common;
|
||||
//mod value_derive_struct;
|
||||
|
||||
mod kw {
|
||||
pub(crate) use syn::token::Extern as extern_;
|
||||
|
@ -59,26 +38,14 @@ mod kw {
|
|||
}
|
||||
|
||||
crate::fold::no_op_fold!($kw);
|
||||
|
||||
impl crate::CustomToken for $kw {
|
||||
const IDENT_STR: &'static str = stringify!($kw);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
custom_keyword!(__evaluated_cfgs);
|
||||
custom_keyword!(all);
|
||||
custom_keyword!(any);
|
||||
custom_keyword!(cfg);
|
||||
custom_keyword!(cfg_attr);
|
||||
custom_keyword!(clock_domain);
|
||||
custom_keyword!(cmp_eq);
|
||||
custom_keyword!(connect_inexact);
|
||||
custom_keyword!(custom_bounds);
|
||||
custom_keyword!(flip);
|
||||
custom_keyword!(hdl);
|
||||
custom_keyword!(hdl_module);
|
||||
custom_keyword!(incomplete_wire);
|
||||
custom_keyword!(input);
|
||||
custom_keyword!(instance);
|
||||
custom_keyword!(m);
|
||||
|
@ -88,11 +55,11 @@ mod kw {
|
|||
custom_keyword!(no_reset);
|
||||
custom_keyword!(no_runtime_generics);
|
||||
custom_keyword!(no_static);
|
||||
custom_keyword!(not);
|
||||
custom_keyword!(outline_generated);
|
||||
custom_keyword!(output);
|
||||
custom_keyword!(reg_builder);
|
||||
custom_keyword!(reset);
|
||||
custom_keyword!(reset_default);
|
||||
custom_keyword!(skip);
|
||||
custom_keyword!(target);
|
||||
custom_keyword!(wire);
|
||||
|
@ -101,34 +68,34 @@ mod kw {
|
|||
type Pound = Token![#]; // work around https://github.com/rust-lang/rust/issues/50676
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct HdlAttr<T, KW> {
|
||||
pub(crate) struct HdlAttr<T> {
|
||||
pub(crate) pound_token: Pound,
|
||||
pub(crate) style: AttrStyle,
|
||||
pub(crate) bracket_token: syn::token::Bracket,
|
||||
pub(crate) kw: KW,
|
||||
pub(crate) hdl: kw::hdl,
|
||||
pub(crate) paren_token: Option<syn::token::Paren>,
|
||||
pub(crate) body: T,
|
||||
}
|
||||
|
||||
crate::fold::impl_fold! {
|
||||
struct HdlAttr<T, KW,> {
|
||||
struct HdlAttr<T,> {
|
||||
pound_token: Pound,
|
||||
style: AttrStyle,
|
||||
bracket_token: syn::token::Bracket,
|
||||
kw: KW,
|
||||
hdl: kw::hdl,
|
||||
paren_token: Option<syn::token::Paren>,
|
||||
body: T,
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl<T, KW> HdlAttr<T, KW> {
|
||||
pub(crate) fn split_body(self) -> (HdlAttr<(), KW>, T) {
|
||||
impl<T> HdlAttr<T> {
|
||||
pub(crate) fn split_body(self) -> (HdlAttr<()>, T) {
|
||||
let Self {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw,
|
||||
hdl,
|
||||
paren_token,
|
||||
body,
|
||||
} = self;
|
||||
|
@ -137,19 +104,19 @@ impl<T, KW> HdlAttr<T, KW> {
|
|||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw,
|
||||
hdl,
|
||||
paren_token,
|
||||
body: (),
|
||||
},
|
||||
body,
|
||||
)
|
||||
}
|
||||
pub(crate) fn replace_body<T2>(self, body: T2) -> HdlAttr<T2, KW> {
|
||||
pub(crate) fn replace_body<T2>(self, body: T2) -> HdlAttr<T2> {
|
||||
let Self {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw,
|
||||
hdl,
|
||||
paren_token,
|
||||
body: _,
|
||||
} = self;
|
||||
|
@ -157,20 +124,17 @@ impl<T, KW> HdlAttr<T, KW> {
|
|||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw,
|
||||
hdl,
|
||||
paren_token,
|
||||
body,
|
||||
}
|
||||
}
|
||||
pub(crate) fn as_ref(&self) -> HdlAttr<&T, KW>
|
||||
where
|
||||
KW: Clone,
|
||||
{
|
||||
pub(crate) fn as_ref(&self) -> HdlAttr<&T> {
|
||||
let Self {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
ref kw,
|
||||
hdl,
|
||||
paren_token,
|
||||
ref body,
|
||||
} = *self;
|
||||
|
@ -178,20 +142,17 @@ impl<T, KW> HdlAttr<T, KW> {
|
|||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw: kw.clone(),
|
||||
hdl,
|
||||
paren_token,
|
||||
body,
|
||||
}
|
||||
}
|
||||
pub(crate) fn try_map<R, E, F: FnOnce(T) -> Result<R, E>>(
|
||||
self,
|
||||
f: F,
|
||||
) -> Result<HdlAttr<R, KW>, E> {
|
||||
pub(crate) fn try_map<R, E, F: FnOnce(T) -> Result<R, E>>(self, f: F) -> Result<HdlAttr<R>, E> {
|
||||
let Self {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw,
|
||||
hdl,
|
||||
paren_token,
|
||||
body,
|
||||
} = self;
|
||||
|
@ -199,17 +160,17 @@ impl<T, KW> HdlAttr<T, KW> {
|
|||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw,
|
||||
hdl,
|
||||
paren_token,
|
||||
body: f(body)?,
|
||||
})
|
||||
}
|
||||
pub(crate) fn map<R, F: FnOnce(T) -> R>(self, f: F) -> HdlAttr<R, KW> {
|
||||
pub(crate) fn map<R, F: FnOnce(T) -> R>(self, f: F) -> HdlAttr<R> {
|
||||
let Self {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw,
|
||||
hdl,
|
||||
paren_token,
|
||||
body,
|
||||
} = self;
|
||||
|
@ -217,7 +178,7 @@ impl<T, KW> HdlAttr<T, KW> {
|
|||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw,
|
||||
hdl,
|
||||
paren_token,
|
||||
body: f(body),
|
||||
}
|
||||
|
@ -225,32 +186,31 @@ impl<T, KW> HdlAttr<T, KW> {
|
|||
fn to_attr(&self) -> Attribute
|
||||
where
|
||||
T: ToTokens,
|
||||
KW: ToTokens,
|
||||
{
|
||||
parse_quote! { #self }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Default, KW: Default> Default for HdlAttr<T, KW> {
|
||||
impl<T: Default> Default for HdlAttr<T> {
|
||||
fn default() -> Self {
|
||||
T::default().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, KW: Default> From<T> for HdlAttr<T, KW> {
|
||||
impl<T> From<T> for HdlAttr<T> {
|
||||
fn from(body: T) -> Self {
|
||||
HdlAttr {
|
||||
pound_token: Default::default(),
|
||||
style: AttrStyle::Outer,
|
||||
bracket_token: Default::default(),
|
||||
kw: Default::default(),
|
||||
hdl: Default::default(),
|
||||
paren_token: Default::default(),
|
||||
body,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ToTokens, KW: ToTokens + Spanned> ToTokens for HdlAttr<T, KW> {
|
||||
impl<T: ToTokens> ToTokens for HdlAttr<T> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
self.pound_token.to_tokens(tokens);
|
||||
match self.style {
|
||||
|
@ -258,7 +218,7 @@ impl<T: ToTokens, KW: ToTokens + Spanned> ToTokens for HdlAttr<T, KW> {
|
|||
AttrStyle::Outer => {}
|
||||
};
|
||||
self.bracket_token.surround(tokens, |tokens| {
|
||||
self.kw.to_tokens(tokens);
|
||||
self.hdl.to_tokens(tokens);
|
||||
match self.paren_token {
|
||||
Some(paren_token) => {
|
||||
paren_token.surround(tokens, |tokens| self.body.to_tokens(tokens))
|
||||
|
@ -266,7 +226,7 @@ impl<T: ToTokens, KW: ToTokens + Spanned> ToTokens for HdlAttr<T, KW> {
|
|||
None => {
|
||||
let body = self.body.to_token_stream();
|
||||
if !body.is_empty() {
|
||||
syn::token::Paren(self.kw.span())
|
||||
syn::token::Paren(self.hdl.span)
|
||||
.surround(tokens, |tokens| tokens.extend([body]));
|
||||
}
|
||||
}
|
||||
|
@ -275,24 +235,18 @@ impl<T: ToTokens, KW: ToTokens + Spanned> ToTokens for HdlAttr<T, KW> {
|
|||
}
|
||||
}
|
||||
|
||||
fn is_hdl_attr<KW: CustomToken>(attr: &Attribute) -> bool {
|
||||
attr.path().is_ident(KW::IDENT_STR)
|
||||
fn is_hdl_attr(attr: &Attribute) -> bool {
|
||||
attr.path().is_ident("hdl")
|
||||
}
|
||||
|
||||
impl<T: Parse, KW: Parse> HdlAttr<T, KW> {
|
||||
fn parse_and_take_attr(attrs: &mut Vec<Attribute>) -> syn::Result<Option<Self>>
|
||||
where
|
||||
KW: ToTokens,
|
||||
{
|
||||
impl<T: Parse> HdlAttr<T> {
|
||||
fn parse_and_take_attr(attrs: &mut Vec<Attribute>) -> syn::Result<Option<Self>> {
|
||||
let mut retval = None;
|
||||
let mut errors = Errors::new();
|
||||
attrs.retain(|attr| {
|
||||
if let Ok(kw) = syn::parse2::<KW>(attr.path().to_token_stream()) {
|
||||
if is_hdl_attr(attr) {
|
||||
if retval.is_some() {
|
||||
errors.push(Error::new_spanned(
|
||||
attr,
|
||||
format_args!("more than one #[{}] attribute", kw.to_token_stream()),
|
||||
));
|
||||
errors.push(Error::new_spanned(attr, "more than one #[hdl] attribute"));
|
||||
}
|
||||
errors.unwrap_or_default(Self::parse_attr(attr).map(|v| retval = Some(v)));
|
||||
false
|
||||
|
@ -303,19 +257,13 @@ impl<T: Parse, KW: Parse> HdlAttr<T, KW> {
|
|||
errors.finish()?;
|
||||
Ok(retval)
|
||||
}
|
||||
fn parse_and_leave_attr(attrs: &[Attribute]) -> syn::Result<Option<Self>>
|
||||
where
|
||||
KW: ToTokens,
|
||||
{
|
||||
fn parse_and_leave_attr(attrs: &[Attribute]) -> syn::Result<Option<Self>> {
|
||||
let mut retval = None;
|
||||
let mut errors = Errors::new();
|
||||
for attr in attrs {
|
||||
if let Ok(kw) = syn::parse2::<KW>(attr.path().to_token_stream()) {
|
||||
if is_hdl_attr(attr) {
|
||||
if retval.is_some() {
|
||||
errors.push(Error::new_spanned(
|
||||
attr,
|
||||
format_args!("more than one #[{}] attribute", kw.to_token_stream()),
|
||||
));
|
||||
errors.push(Error::new_spanned(attr, "more than one #[hdl] attribute"));
|
||||
}
|
||||
errors.unwrap_or_default(Self::parse_attr(attr).map(|v| retval = Some(v)));
|
||||
}
|
||||
|
@ -336,7 +284,7 @@ impl<T: Parse, KW: Parse> HdlAttr<T, KW> {
|
|||
) -> syn::Result<Self> {
|
||||
let bracket_content;
|
||||
let bracket_token = bracketed!(bracket_content in input);
|
||||
let kw = bracket_content.parse()?;
|
||||
let hdl = bracket_content.parse()?;
|
||||
let paren_content;
|
||||
let body;
|
||||
let paren_token;
|
||||
|
@ -357,7 +305,7 @@ impl<T: Parse, KW: Parse> HdlAttr<T, KW> {
|
|||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw,
|
||||
hdl,
|
||||
paren_token,
|
||||
body,
|
||||
})
|
||||
|
@ -865,7 +813,6 @@ macro_rules! options {
|
|||
};
|
||||
}
|
||||
|
||||
use crate::hdl_type_alias::hdl_type_alias_impl;
|
||||
pub(crate) use options;
|
||||
|
||||
pub(crate) fn outline_generated(contents: TokenStream, prefix: &str) -> TokenStream {
|
||||
|
@ -905,372 +852,25 @@ pub(crate) fn outline_generated(contents: TokenStream, prefix: &str) -> TokenStr
|
|||
}
|
||||
}
|
||||
|
||||
fn hdl_module_impl(item: ItemFn) -> syn::Result<TokenStream> {
|
||||
let func = module::ModuleFn::parse_from_fn(item)?;
|
||||
let options = func.config_options();
|
||||
pub fn hdl_module(attr: TokenStream, item: TokenStream) -> syn::Result<TokenStream> {
|
||||
let options = syn::parse2::<module::ConfigOptions>(attr)?;
|
||||
let options = HdlAttr::from(options);
|
||||
let func = syn::parse2::<module::ModuleFn>(quote! { #options #item })?;
|
||||
let mut contents = func.generate();
|
||||
if options.outline_generated.is_some() {
|
||||
if options.body.outline_generated.is_some() {
|
||||
contents = outline_generated(contents, "module-");
|
||||
}
|
||||
Ok(contents)
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub(crate) enum CfgExpr {
|
||||
Option {
|
||||
ident: Ident,
|
||||
value: Option<(Token![=], LitStr)>,
|
||||
},
|
||||
All {
|
||||
all: kw::all,
|
||||
paren: Paren,
|
||||
exprs: Punctuated<CfgExpr, Token![,]>,
|
||||
},
|
||||
Any {
|
||||
any: kw::any,
|
||||
paren: Paren,
|
||||
exprs: Punctuated<CfgExpr, Token![,]>,
|
||||
},
|
||||
Not {
|
||||
not: kw::not,
|
||||
paren: Paren,
|
||||
expr: Box<CfgExpr>,
|
||||
trailing_comma: Option<Token![,]>,
|
||||
},
|
||||
}
|
||||
|
||||
impl Parse for CfgExpr {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
match input.cursor().ident() {
|
||||
Some((_, cursor)) if cursor.eof() => {
|
||||
return Ok(CfgExpr::Option {
|
||||
ident: input.call(Ident::parse_any)?,
|
||||
value: None,
|
||||
});
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
if input.peek(Ident::peek_any) && input.peek2(Token![=]) {
|
||||
return Ok(CfgExpr::Option {
|
||||
ident: input.call(Ident::parse_any)?,
|
||||
value: Some((input.parse()?, input.parse()?)),
|
||||
});
|
||||
}
|
||||
let contents;
|
||||
if input.peek(kw::all) {
|
||||
Ok(CfgExpr::All {
|
||||
all: input.parse()?,
|
||||
paren: parenthesized!(contents in input),
|
||||
exprs: contents.call(Punctuated::parse_terminated)?,
|
||||
})
|
||||
} else if input.peek(kw::any) {
|
||||
Ok(CfgExpr::Any {
|
||||
any: input.parse()?,
|
||||
paren: parenthesized!(contents in input),
|
||||
exprs: contents.call(Punctuated::parse_terminated)?,
|
||||
})
|
||||
} else if input.peek(kw::not) {
|
||||
Ok(CfgExpr::Not {
|
||||
not: input.parse()?,
|
||||
paren: parenthesized!(contents in input),
|
||||
expr: contents.parse()?,
|
||||
trailing_comma: contents.parse()?,
|
||||
})
|
||||
} else {
|
||||
Err(input.error("expected cfg-pattern"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for CfgExpr {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
CfgExpr::Option { ident, value } => {
|
||||
ident.to_tokens(tokens);
|
||||
if let Some((eq, value)) = value {
|
||||
eq.to_tokens(tokens);
|
||||
value.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
CfgExpr::All { all, paren, exprs } => {
|
||||
all.to_tokens(tokens);
|
||||
paren.surround(tokens, |tokens| exprs.to_tokens(tokens));
|
||||
}
|
||||
CfgExpr::Any { any, paren, exprs } => {
|
||||
any.to_tokens(tokens);
|
||||
paren.surround(tokens, |tokens| exprs.to_tokens(tokens));
|
||||
}
|
||||
CfgExpr::Not {
|
||||
not,
|
||||
paren,
|
||||
expr,
|
||||
trailing_comma,
|
||||
} => {
|
||||
not.to_tokens(tokens);
|
||||
paren.surround(tokens, |tokens| {
|
||||
expr.to_tokens(tokens);
|
||||
trailing_comma.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub(crate) struct Cfg {
|
||||
cfg: kw::cfg,
|
||||
paren: Paren,
|
||||
expr: CfgExpr,
|
||||
trailing_comma: Option<Token![,]>,
|
||||
}
|
||||
|
||||
impl Cfg {
|
||||
fn parse_meta(meta: &Meta) -> syn::Result<Self> {
|
||||
syn::parse2(meta.to_token_stream())
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for Cfg {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
cfg,
|
||||
paren,
|
||||
expr,
|
||||
trailing_comma,
|
||||
} = self;
|
||||
cfg.to_tokens(tokens);
|
||||
paren.surround(tokens, |tokens| {
|
||||
expr.to_tokens(tokens);
|
||||
trailing_comma.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for Cfg {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
let contents;
|
||||
Ok(Self {
|
||||
cfg: input.parse()?,
|
||||
paren: parenthesized!(contents in input),
|
||||
expr: contents.parse()?,
|
||||
trailing_comma: contents.parse()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub(crate) struct CfgAttr {
|
||||
cfg_attr: kw::cfg_attr,
|
||||
paren: Paren,
|
||||
expr: CfgExpr,
|
||||
comma: Token![,],
|
||||
attrs: Punctuated<Meta, Token![,]>,
|
||||
}
|
||||
|
||||
impl CfgAttr {
|
||||
pub(crate) fn to_cfg(&self) -> Cfg {
|
||||
Cfg {
|
||||
cfg: kw::cfg(self.cfg_attr.span),
|
||||
paren: self.paren,
|
||||
expr: self.expr.clone(),
|
||||
trailing_comma: None,
|
||||
}
|
||||
}
|
||||
fn parse_meta(meta: &Meta) -> syn::Result<Self> {
|
||||
syn::parse2(meta.to_token_stream())
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for CfgAttr {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
let contents;
|
||||
Ok(Self {
|
||||
cfg_attr: input.parse()?,
|
||||
paren: parenthesized!(contents in input),
|
||||
expr: contents.parse()?,
|
||||
comma: contents.parse()?,
|
||||
attrs: contents.call(Punctuated::parse_terminated)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct CfgAndValue {
|
||||
cfg: Cfg,
|
||||
eq_token: Token![=],
|
||||
value: LitBool,
|
||||
}
|
||||
|
||||
impl Parse for CfgAndValue {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
Ok(Self {
|
||||
cfg: input.parse()?,
|
||||
eq_token: input.parse()?,
|
||||
value: input.parse()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct Cfgs<T> {
|
||||
pub(crate) bracket: Bracket,
|
||||
pub(crate) cfgs_map: HashMap<Cfg, T>,
|
||||
pub(crate) cfgs_list: Vec<Cfg>,
|
||||
}
|
||||
|
||||
impl<T> Default for Cfgs<T> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
bracket: Default::default(),
|
||||
cfgs_map: Default::default(),
|
||||
cfgs_list: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Cfgs<T> {
|
||||
fn insert_cfg(&mut self, cfg: Cfg, value: T) {
|
||||
match self.cfgs_map.entry(cfg) {
|
||||
Entry::Occupied(_) => {}
|
||||
Entry::Vacant(entry) => {
|
||||
self.cfgs_list.push(entry.key().clone());
|
||||
entry.insert(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for Cfgs<bool> {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
let contents;
|
||||
let bracket = bracketed!(contents in input);
|
||||
let mut cfgs_map = HashMap::new();
|
||||
let mut cfgs_list = Vec::new();
|
||||
for CfgAndValue {
|
||||
cfg,
|
||||
eq_token,
|
||||
value,
|
||||
} in contents.call(Punctuated::<CfgAndValue, Token![,]>::parse_terminated)?
|
||||
{
|
||||
let _ = eq_token;
|
||||
match cfgs_map.entry(cfg) {
|
||||
Entry::Occupied(_) => {}
|
||||
Entry::Vacant(entry) => {
|
||||
cfgs_list.push(entry.key().clone());
|
||||
entry.insert(value.value);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(Self {
|
||||
bracket,
|
||||
cfgs_map,
|
||||
cfgs_list,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for Cfgs<()> {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
let contents;
|
||||
let bracket = bracketed!(contents in input);
|
||||
let mut cfgs_map = HashMap::new();
|
||||
let mut cfgs_list = Vec::new();
|
||||
for cfg in contents.call(Punctuated::<Cfg, Token![,]>::parse_terminated)? {
|
||||
match cfgs_map.entry(cfg) {
|
||||
Entry::Occupied(_) => {}
|
||||
Entry::Vacant(entry) => {
|
||||
cfgs_list.push(entry.key().clone());
|
||||
entry.insert(());
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(Self {
|
||||
bracket,
|
||||
cfgs_map,
|
||||
cfgs_list,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for Cfgs<()> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
bracket,
|
||||
cfgs_map: _,
|
||||
cfgs_list,
|
||||
} = self;
|
||||
bracket.surround(tokens, |tokens| {
|
||||
for cfg in cfgs_list {
|
||||
cfg.to_tokens(tokens);
|
||||
<Token![,]>::default().to_tokens(tokens);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn hdl_main(
|
||||
kw: impl CustomToken,
|
||||
attr: TokenStream,
|
||||
item: TokenStream,
|
||||
) -> syn::Result<TokenStream> {
|
||||
fn parse_evaluated_cfgs_attr<R>(
|
||||
input: ParseStream,
|
||||
parse_inner: impl FnOnce(ParseStream) -> syn::Result<R>,
|
||||
) -> syn::Result<R> {
|
||||
let _: Token![#] = input.parse()?;
|
||||
let bracket_content;
|
||||
bracketed!(bracket_content in input);
|
||||
let _: kw::__evaluated_cfgs = bracket_content.parse()?;
|
||||
let paren_content;
|
||||
parenthesized!(paren_content in bracket_content);
|
||||
parse_inner(&paren_content)
|
||||
}
|
||||
let (evaluated_cfgs, item): (_, TokenStream) = Parser::parse2(
|
||||
|input: ParseStream| {
|
||||
let peek = input.fork();
|
||||
if parse_evaluated_cfgs_attr(&peek, |_| Ok(())).is_ok() {
|
||||
let evaluated_cfgs = parse_evaluated_cfgs_attr(input, Cfgs::<bool>::parse)?;
|
||||
Ok((Some(evaluated_cfgs), input.parse()?))
|
||||
} else {
|
||||
Ok((None, input.parse()?))
|
||||
}
|
||||
},
|
||||
item,
|
||||
)?;
|
||||
let cfgs = if let Some(cfgs) = evaluated_cfgs {
|
||||
cfgs
|
||||
} else {
|
||||
let cfgs = process_cfg::collect_cfgs(syn::parse2(item.clone())?)?;
|
||||
if cfgs.cfgs_list.is_empty() {
|
||||
Cfgs::default()
|
||||
} else {
|
||||
return Ok(quote! {
|
||||
::fayalite::__cfg_expansion_helper! {
|
||||
[]
|
||||
#cfgs
|
||||
{#[::fayalite::#kw(#attr)]} { #item }
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
let item = syn::parse2(quote! { #[#kw(#attr)] #item })?;
|
||||
let Some(item) = process_cfg::process_cfgs(item, cfgs)? else {
|
||||
return Ok(TokenStream::new());
|
||||
};
|
||||
pub fn hdl_attr(attr: TokenStream, item: TokenStream) -> syn::Result<TokenStream> {
|
||||
let item = syn::parse2::<Item>(quote! { #[hdl(#attr)] #item })?;
|
||||
match item {
|
||||
Item::Enum(item) => hdl_enum::hdl_enum(item),
|
||||
Item::Struct(item) => hdl_bundle::hdl_bundle(item),
|
||||
Item::Fn(item) => hdl_module_impl(item),
|
||||
Item::Type(item) => hdl_type_alias_impl(item),
|
||||
_ => Err(syn::Error::new(
|
||||
Span::call_site(),
|
||||
"top-level #[hdl] can only be used on structs, enums, type aliases, or functions",
|
||||
"top-level #[hdl] can only be used on structs or enums",
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn hdl_module(attr: TokenStream, item: TokenStream) -> syn::Result<TokenStream> {
|
||||
hdl_main(kw::hdl_module::default(), attr, item)
|
||||
}
|
||||
|
||||
pub fn hdl_attr(attr: TokenStream, item: TokenStream) -> syn::Result<TokenStream> {
|
||||
hdl_main(kw::hdl::default(), attr, item)
|
||||
}
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
hdl_type_common::{ParsedGenerics, SplitForImpl},
|
||||
kw,
|
||||
module::transform_body::{HdlLet, HdlLetKindIO},
|
||||
options, Errors, HdlAttr, PairsIterExt,
|
||||
};
|
||||
|
@ -10,6 +9,7 @@ use proc_macro2::TokenStream;
|
|||
use quote::{format_ident, quote, quote_spanned, ToTokens};
|
||||
use std::collections::HashSet;
|
||||
use syn::{
|
||||
parse::{Parse, ParseStream},
|
||||
parse_quote,
|
||||
visit::{visit_pat, Visit},
|
||||
Attribute, Block, ConstParam, Error, FnArg, GenericParam, Generics, Ident, ItemFn, ItemStruct,
|
||||
|
@ -59,9 +59,9 @@ impl Visit<'_> for CheckNameConflictsWithModuleBuilderVisitor<'_> {
|
|||
|
||||
pub(crate) type ModuleIO = HdlLet<HdlLetKindIO>;
|
||||
|
||||
struct ModuleFnModule {
|
||||
pub(crate) struct ModuleFn {
|
||||
attrs: Vec<Attribute>,
|
||||
config_options: HdlAttr<ConfigOptions, kw::hdl_module>,
|
||||
config_options: HdlAttr<ConfigOptions>,
|
||||
module_kind: ModuleKind,
|
||||
vis: Visibility,
|
||||
sig: Signature,
|
||||
|
@ -70,26 +70,6 @@ struct ModuleFnModule {
|
|||
the_struct: TokenStream,
|
||||
}
|
||||
|
||||
enum ModuleFnImpl {
|
||||
Module(ModuleFnModule),
|
||||
Fn {
|
||||
attrs: Vec<Attribute>,
|
||||
config_options: HdlAttr<ConfigOptions, kw::hdl>,
|
||||
vis: Visibility,
|
||||
sig: Signature,
|
||||
block: Box<Block>,
|
||||
},
|
||||
}
|
||||
|
||||
options! {
|
||||
pub(crate) enum HdlOrHdlModule {
|
||||
Hdl(hdl),
|
||||
HdlModule(hdl_module),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct ModuleFn(ModuleFnImpl);
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
|
||||
pub(crate) enum ModuleKind {
|
||||
Extern,
|
||||
|
@ -109,25 +89,14 @@ impl Visit<'_> for ContainsSkippedIdent<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
impl ModuleFn {
|
||||
pub(crate) fn config_options(&self) -> ConfigOptions {
|
||||
let (ModuleFnImpl::Module(ModuleFnModule {
|
||||
config_options: HdlAttr { body, .. },
|
||||
..
|
||||
})
|
||||
| ModuleFnImpl::Fn {
|
||||
config_options: HdlAttr { body, .. },
|
||||
..
|
||||
}) = &self.0;
|
||||
body.clone()
|
||||
}
|
||||
pub(crate) fn parse_from_fn(item: ItemFn) -> syn::Result<Self> {
|
||||
impl Parse for ModuleFn {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
let ItemFn {
|
||||
mut attrs,
|
||||
vis,
|
||||
mut sig,
|
||||
block,
|
||||
} = item;
|
||||
} = input.parse()?;
|
||||
let Signature {
|
||||
ref constness,
|
||||
ref asyncness,
|
||||
|
@ -142,33 +111,17 @@ impl ModuleFn {
|
|||
ref output,
|
||||
} = sig;
|
||||
let mut errors = Errors::new();
|
||||
let Some(mut config_options) =
|
||||
errors.unwrap_or_default(
|
||||
HdlAttr::<ConfigOptions, HdlOrHdlModule>::parse_and_take_attr(&mut attrs),
|
||||
)
|
||||
else {
|
||||
errors.error(sig.ident, "missing #[hdl] or #[hdl_module] attribute");
|
||||
errors.finish()?;
|
||||
unreachable!();
|
||||
};
|
||||
let config_options = errors
|
||||
.unwrap_or_default(HdlAttr::parse_and_take_attr(&mut attrs))
|
||||
.unwrap_or_default();
|
||||
let ConfigOptions {
|
||||
outline_generated: _,
|
||||
extern_,
|
||||
} = config_options.body;
|
||||
let module_kind = match (config_options.kw, extern_) {
|
||||
(HdlOrHdlModule::Hdl(_), None) => None,
|
||||
(HdlOrHdlModule::Hdl(_), Some(extern2)) => {
|
||||
config_options.body.extern_ = None;
|
||||
errors.error(
|
||||
extern2.0,
|
||||
"extern can only be used as #[hdl_module(extern)]",
|
||||
);
|
||||
None
|
||||
}
|
||||
(HdlOrHdlModule::HdlModule(_), None) => Some(ModuleKind::Normal),
|
||||
(HdlOrHdlModule::HdlModule(_), Some(_)) => Some(ModuleKind::Extern),
|
||||
let module_kind = match extern_ {
|
||||
Some(_) => ModuleKind::Extern,
|
||||
None => ModuleKind::Normal,
|
||||
};
|
||||
if let HdlOrHdlModule::HdlModule(_) = config_options.kw {
|
||||
for fn_arg in inputs {
|
||||
match fn_arg {
|
||||
FnArg::Receiver(_) => {
|
||||
|
@ -196,24 +149,20 @@ impl ModuleFn {
|
|||
if let Some(abi) = abi {
|
||||
errors.push(syn::Error::new_spanned(abi, "extern not allowed here"));
|
||||
}
|
||||
}
|
||||
let mut skipped_idents = HashSet::new();
|
||||
let struct_generic_params = generics
|
||||
.params
|
||||
.pairs_mut()
|
||||
.filter_map_pair_value_mut(|v| match v {
|
||||
GenericParam::Lifetime(LifetimeParam { attrs, .. }) => {
|
||||
errors.unwrap_or_default(
|
||||
HdlAttr::<crate::kw::skip, kw::hdl>::parse_and_take_attr(attrs),
|
||||
);
|
||||
errors
|
||||
.unwrap_or_default(HdlAttr::<crate::kw::skip>::parse_and_take_attr(attrs));
|
||||
None
|
||||
}
|
||||
GenericParam::Type(TypeParam { attrs, ident, .. })
|
||||
| GenericParam::Const(ConstParam { attrs, ident, .. }) => {
|
||||
if errors
|
||||
.unwrap_or_default(
|
||||
HdlAttr::<crate::kw::skip, kw::hdl>::parse_and_take_attr(attrs),
|
||||
)
|
||||
.unwrap_or_default(HdlAttr::<crate::kw::skip>::parse_and_take_attr(attrs))
|
||||
.is_some()
|
||||
{
|
||||
skipped_idents.insert(ident.clone());
|
||||
|
@ -227,7 +176,6 @@ impl ModuleFn {
|
|||
let struct_where_clause = generics
|
||||
.where_clause
|
||||
.as_mut()
|
||||
.filter(|_| matches!(config_options.kw, HdlOrHdlModule::HdlModule(_)))
|
||||
.map(|where_clause| WhereClause {
|
||||
where_token: where_clause.where_token,
|
||||
predicates: where_clause
|
||||
|
@ -250,8 +198,7 @@ impl ModuleFn {
|
|||
})
|
||||
.collect(),
|
||||
});
|
||||
let struct_generics = if let HdlOrHdlModule::HdlModule(_) = config_options.kw {
|
||||
let mut struct_generics = Generics {
|
||||
let struct_generics = Generics {
|
||||
lt_token: generics.lt_token,
|
||||
params: struct_generic_params,
|
||||
gt_token: generics.gt_token,
|
||||
|
@ -266,10 +213,7 @@ impl ModuleFn {
|
|||
"return type not allowed here",
|
||||
));
|
||||
}
|
||||
errors.ok(ParsedGenerics::parse(&mut struct_generics))
|
||||
} else {
|
||||
Some(ParsedGenerics::default())
|
||||
};
|
||||
let struct_generics = errors.ok(ParsedGenerics::parse(&mut { struct_generics }));
|
||||
let body_results = struct_generics.as_ref().and_then(|struct_generics| {
|
||||
errors.ok(transform_body::transform_body(
|
||||
module_kind,
|
||||
|
@ -280,47 +224,6 @@ impl ModuleFn {
|
|||
errors.finish()?;
|
||||
let struct_generics = struct_generics.unwrap();
|
||||
let (block, io) = body_results.unwrap();
|
||||
let config_options = match config_options {
|
||||
HdlAttr {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw: HdlOrHdlModule::Hdl((kw,)),
|
||||
paren_token,
|
||||
body,
|
||||
} => {
|
||||
debug_assert!(io.is_empty());
|
||||
return Ok(Self(ModuleFnImpl::Fn {
|
||||
attrs,
|
||||
config_options: HdlAttr {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw,
|
||||
paren_token,
|
||||
body,
|
||||
},
|
||||
vis,
|
||||
sig,
|
||||
block,
|
||||
}));
|
||||
}
|
||||
HdlAttr {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw: HdlOrHdlModule::HdlModule((kw,)),
|
||||
paren_token,
|
||||
body,
|
||||
} => HdlAttr {
|
||||
pound_token,
|
||||
style,
|
||||
bracket_token,
|
||||
kw,
|
||||
paren_token,
|
||||
body,
|
||||
},
|
||||
};
|
||||
let (_struct_impl_generics, _struct_type_generics, struct_where_clause) =
|
||||
struct_generics.split_for_impl();
|
||||
let struct_where_clause: Option<WhereClause> = parse_quote! { #struct_where_clause };
|
||||
|
@ -356,22 +259,7 @@ impl ModuleFn {
|
|||
}
|
||||
};
|
||||
let the_struct = crate::hdl_bundle::hdl_bundle(the_struct)?;
|
||||
Ok(Self(ModuleFnImpl::Module(ModuleFnModule {
|
||||
attrs,
|
||||
config_options,
|
||||
module_kind: module_kind.unwrap(),
|
||||
vis,
|
||||
sig,
|
||||
block,
|
||||
struct_generics,
|
||||
the_struct,
|
||||
})))
|
||||
}
|
||||
}
|
||||
|
||||
impl ModuleFn {
|
||||
pub(crate) fn generate(self) -> TokenStream {
|
||||
let ModuleFnModule {
|
||||
Ok(Self {
|
||||
attrs,
|
||||
config_options,
|
||||
module_kind,
|
||||
|
@ -380,28 +268,22 @@ impl ModuleFn {
|
|||
block,
|
||||
struct_generics,
|
||||
the_struct,
|
||||
} = match self.0 {
|
||||
ModuleFnImpl::Module(v) => v,
|
||||
ModuleFnImpl::Fn {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ModuleFn {
|
||||
pub(crate) fn generate(self) -> TokenStream {
|
||||
let Self {
|
||||
attrs,
|
||||
config_options,
|
||||
module_kind,
|
||||
vis,
|
||||
sig,
|
||||
block,
|
||||
} => {
|
||||
let ConfigOptions {
|
||||
outline_generated: _,
|
||||
extern_: _,
|
||||
} = config_options.body;
|
||||
return ItemFn {
|
||||
attrs,
|
||||
vis,
|
||||
sig,
|
||||
block,
|
||||
}
|
||||
.into_token_stream();
|
||||
}
|
||||
};
|
||||
struct_generics,
|
||||
the_struct,
|
||||
} = self;
|
||||
let ConfigOptions {
|
||||
outline_generated: _,
|
||||
extern_: _,
|
||||
|
@ -450,21 +332,12 @@ impl ModuleFn {
|
|||
let fn_name_str = fn_name.to_string();
|
||||
let (_, body_type_generics, _) = body_fn.sig.generics.split_for_impl();
|
||||
let body_turbofish_type_generics = body_type_generics.as_turbofish();
|
||||
let body_lambda = if param_names.is_empty() {
|
||||
quote! {
|
||||
__body #body_turbofish_type_generics
|
||||
}
|
||||
} else {
|
||||
quote! {
|
||||
|m| __body #body_turbofish_type_generics(m, #(#param_names,)*)
|
||||
}
|
||||
};
|
||||
let block = parse_quote! {{
|
||||
#body_fn
|
||||
::fayalite::module::ModuleBuilder::run(
|
||||
#fn_name_str,
|
||||
#module_kind_value,
|
||||
#body_lambda,
|
||||
|m| __body #body_turbofish_type_generics(m, #(#param_names,)*),
|
||||
)
|
||||
}};
|
||||
let outer_fn = ItemFn {
|
||||
|
|
|
@ -34,7 +34,6 @@ options! {
|
|||
Instance(instance),
|
||||
RegBuilder(reg_builder),
|
||||
Wire(wire),
|
||||
IncompleteWire(incomplete_wire),
|
||||
Memory(memory),
|
||||
MemoryArray(memory_array),
|
||||
MemoryWithInit(memory_with_init),
|
||||
|
@ -265,6 +264,11 @@ pub(crate) enum RegBuilderReset {
|
|||
paren: Paren,
|
||||
init_expr: Box<Expr>,
|
||||
},
|
||||
ResetDefault {
|
||||
dot_token: Token![.],
|
||||
reset_default: kw::reset_default,
|
||||
paren: Paren,
|
||||
},
|
||||
}
|
||||
|
||||
impl_fold! {
|
||||
|
@ -281,6 +285,11 @@ impl_fold! {
|
|||
paren: Paren,
|
||||
init_expr: Box<Expr>,
|
||||
},
|
||||
ResetDefault {
|
||||
dot_token: Token![.],
|
||||
reset_default: kw::reset_default,
|
||||
paren: Paren,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -302,6 +311,11 @@ impl Parse for RegBuilderReset {
|
|||
paren: parenthesized!(paren_contents in input),
|
||||
init_expr: paren_contents.call(parse_single_fn_arg)?,
|
||||
}),
|
||||
RegBuilderMethod::ResetDefault(reset_default) => Ok(Self::ResetDefault {
|
||||
dot_token,
|
||||
reset_default,
|
||||
paren: parenthesized!(paren_contents in input),
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -329,6 +343,15 @@ impl ToTokens for RegBuilderReset {
|
|||
reset.to_tokens(tokens);
|
||||
paren.surround(tokens, |tokens| init_expr.to_tokens(tokens));
|
||||
}
|
||||
RegBuilderReset::ResetDefault {
|
||||
dot_token,
|
||||
reset_default,
|
||||
paren,
|
||||
} => {
|
||||
dot_token.to_tokens(tokens);
|
||||
reset_default.to_tokens(tokens);
|
||||
paren.surround(tokens, |_| {});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -377,6 +400,8 @@ make_builder_method_enum! {
|
|||
NoReset(no_reset),
|
||||
#[cond = need_reset]
|
||||
Reset(reset),
|
||||
#[cond = need_reset]
|
||||
ResetDefault(reset_default),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -419,13 +444,17 @@ impl HdlLetKindRegBuilder {
|
|||
let mut clock_domain = None;
|
||||
match RegBuilderMethod::parse_dot_prefixed(&input.fork(), true, true)?.1 {
|
||||
RegBuilderMethod::ClockDomain(_) => clock_domain = Some(input.parse()?),
|
||||
RegBuilderMethod::NoReset(_) | RegBuilderMethod::Reset(_) => {}
|
||||
RegBuilderMethod::NoReset(_)
|
||||
| RegBuilderMethod::Reset(_)
|
||||
| RegBuilderMethod::ResetDefault(_) => {}
|
||||
}
|
||||
let reset = input.parse()?;
|
||||
if clock_domain.is_none() {
|
||||
match RegBuilderMethod::parse_dot_prefixed(&input.fork(), true, false)?.1 {
|
||||
RegBuilderMethod::ClockDomain(_) => clock_domain = Some(input.parse()?),
|
||||
RegBuilderMethod::NoReset(_) | RegBuilderMethod::Reset(_) => unreachable!(),
|
||||
RegBuilderMethod::NoReset(_)
|
||||
| RegBuilderMethod::Reset(_)
|
||||
| RegBuilderMethod::ResetDefault(_) => unreachable!(),
|
||||
}
|
||||
}
|
||||
Ok(Self {
|
||||
|
@ -504,41 +533,6 @@ impl HdlLetKindToTokens for HdlLetKindWire {
|
|||
}
|
||||
}
|
||||
|
||||
options! {
|
||||
pub(crate) enum LetFnKindIncomplete {
|
||||
IncompleteWire(incomplete_wire),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct HdlLetKindIncomplete {
|
||||
pub(crate) kind: LetFnKindIncomplete,
|
||||
pub(crate) paren: Paren,
|
||||
}
|
||||
|
||||
impl ParseTypes<Self> for HdlLetKindIncomplete {
|
||||
fn parse_types(input: &mut Self, _parser: &mut TypesParser<'_>) -> Result<Self, ParseFailed> {
|
||||
Ok(input.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl_fold! {
|
||||
struct HdlLetKindIncomplete<> {
|
||||
kind: LetFnKindIncomplete,
|
||||
paren: Paren,
|
||||
}
|
||||
}
|
||||
|
||||
impl HdlLetKindToTokens for HdlLetKindIncomplete {
|
||||
fn ty_to_tokens(&self, _tokens: &mut TokenStream) {}
|
||||
|
||||
fn expr_to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self { kind, paren } = self;
|
||||
kind.to_tokens(tokens);
|
||||
paren.surround(tokens, |_| {});
|
||||
}
|
||||
}
|
||||
|
||||
options! {
|
||||
pub(crate) enum MemoryFnName {
|
||||
Memory(memory),
|
||||
|
@ -703,7 +697,6 @@ impl HdlLetKindMemory {
|
|||
#[derive(Clone, Debug)]
|
||||
pub(crate) enum HdlLetKind<IOType = ParsedType> {
|
||||
IO(HdlLetKindIO<ModuleIOKind, IOType>),
|
||||
Incomplete(HdlLetKindIncomplete),
|
||||
Instance(HdlLetKindInstance),
|
||||
RegBuilder(HdlLetKindRegBuilder),
|
||||
Wire(HdlLetKindWire),
|
||||
|
@ -713,7 +706,6 @@ pub(crate) enum HdlLetKind<IOType = ParsedType> {
|
|||
impl_fold! {
|
||||
enum HdlLetKind<IOType,> {
|
||||
IO(HdlLetKindIO<ModuleIOKind, IOType>),
|
||||
Incomplete(HdlLetKindIncomplete),
|
||||
Instance(HdlLetKindInstance),
|
||||
RegBuilder(HdlLetKindRegBuilder),
|
||||
Wire(HdlLetKindWire),
|
||||
|
@ -728,9 +720,6 @@ impl<T: ParseTypes<I>, I> ParseTypes<HdlLetKind<I>> for HdlLetKind<T> {
|
|||
) -> Result<Self, ParseFailed> {
|
||||
match input {
|
||||
HdlLetKind::IO(input) => ParseTypes::parse_types(input, parser).map(HdlLetKind::IO),
|
||||
HdlLetKind::Incomplete(input) => {
|
||||
ParseTypes::parse_types(input, parser).map(HdlLetKind::Incomplete)
|
||||
}
|
||||
HdlLetKind::Instance(input) => {
|
||||
ParseTypes::parse_types(input, parser).map(HdlLetKind::Instance)
|
||||
}
|
||||
|
@ -882,20 +871,6 @@ impl HdlLetKindParse for HdlLetKind<Type> {
|
|||
ty_expr: paren_contents.call(parse_optional_fn_arg)?,
|
||||
}))
|
||||
}
|
||||
LetFnKind::IncompleteWire(incomplete_wire) => {
|
||||
if let Some(parsed_ty) = parsed_ty {
|
||||
return Err(Error::new_spanned(
|
||||
parsed_ty.1,
|
||||
"type annotation not allowed for incomplete_wire",
|
||||
));
|
||||
}
|
||||
check_empty_m_dot(m_dot, kind)?;
|
||||
let _paren_contents;
|
||||
Ok(Self::Incomplete(HdlLetKindIncomplete {
|
||||
kind: LetFnKindIncomplete::IncompleteWire(incomplete_wire),
|
||||
paren: parenthesized!(_paren_contents in input),
|
||||
}))
|
||||
}
|
||||
LetFnKind::Memory(fn_name) => HdlLetKindMemory::rest_of_parse(
|
||||
input,
|
||||
parsed_ty,
|
||||
|
@ -928,7 +903,6 @@ impl HdlLetKindToTokens for HdlLetKind {
|
|||
fn ty_to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
HdlLetKind::IO(v) => v.ty_to_tokens(tokens),
|
||||
HdlLetKind::Incomplete(v) => v.ty_to_tokens(tokens),
|
||||
HdlLetKind::Instance(v) => v.ty_to_tokens(tokens),
|
||||
HdlLetKind::RegBuilder(v) => v.ty_to_tokens(tokens),
|
||||
HdlLetKind::Wire(v) => v.ty_to_tokens(tokens),
|
||||
|
@ -939,7 +913,6 @@ impl HdlLetKindToTokens for HdlLetKind {
|
|||
fn expr_to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
HdlLetKind::IO(v) => v.expr_to_tokens(tokens),
|
||||
HdlLetKind::Incomplete(v) => v.expr_to_tokens(tokens),
|
||||
HdlLetKind::Instance(v) => v.expr_to_tokens(tokens),
|
||||
HdlLetKind::RegBuilder(v) => v.expr_to_tokens(tokens),
|
||||
HdlLetKind::Wire(v) => v.expr_to_tokens(tokens),
|
||||
|
@ -952,7 +925,7 @@ with_debug_clone_and_fold! {
|
|||
#[allow(dead_code)]
|
||||
pub(crate) struct HdlLet<Kind = HdlLetKind> {
|
||||
pub(crate) attrs: Vec<Attribute>,
|
||||
pub(crate) hdl_attr: HdlAttr<Nothing, kw::hdl>,
|
||||
pub(crate) hdl_attr: HdlAttr<Nothing>,
|
||||
pub(crate) let_token: Token![let],
|
||||
pub(crate) mut_token: Option<Token![mut]>,
|
||||
pub(crate) name: Ident,
|
||||
|
@ -1109,7 +1082,7 @@ fn parse_quote_let_pat<T, R: ToTokens, C: Borrow<Token![:]>>(
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn wrap_ty_with_expr(ty: impl ToTokens) -> Type {
|
||||
fn wrap_ty_with_expr(ty: impl ToTokens) -> Type {
|
||||
parse_quote_spanned! {ty.span()=>
|
||||
::fayalite::expr::Expr<#ty>
|
||||
}
|
||||
|
@ -1139,7 +1112,7 @@ impl<T: ToString> ToTokens for ImplicitName<T> {
|
|||
}
|
||||
|
||||
struct Visitor<'a> {
|
||||
module_kind: Option<ModuleKind>,
|
||||
module_kind: ModuleKind,
|
||||
errors: Errors,
|
||||
io: Vec<ModuleIO>,
|
||||
block_depth: usize,
|
||||
|
@ -1147,33 +1120,22 @@ struct Visitor<'a> {
|
|||
}
|
||||
|
||||
impl Visitor<'_> {
|
||||
fn take_hdl_attr<T: Parse>(
|
||||
&mut self,
|
||||
attrs: &mut Vec<Attribute>,
|
||||
) -> Option<HdlAttr<T, kw::hdl>> {
|
||||
fn take_hdl_attr<T: Parse>(&mut self, attrs: &mut Vec<Attribute>) -> Option<HdlAttr<T>> {
|
||||
self.errors.unwrap_or(
|
||||
HdlAttr::parse_and_take_attr(attrs),
|
||||
Some(syn::parse2::<T>(quote! {}).unwrap().into()),
|
||||
)
|
||||
}
|
||||
fn require_normal_module_or_fn(&mut self, spanned: impl ToTokens) {
|
||||
fn require_normal_module(&mut self, spanned: impl ToTokens) {
|
||||
match self.module_kind {
|
||||
Some(ModuleKind::Extern) => {
|
||||
ModuleKind::Extern => {
|
||||
self.errors
|
||||
.error(spanned, "not allowed in #[hdl_module(extern)]");
|
||||
}
|
||||
Some(ModuleKind::Normal) | None => {}
|
||||
ModuleKind::Normal => {}
|
||||
}
|
||||
}
|
||||
fn require_module(&mut self, spanned: impl ToTokens) {
|
||||
match self.module_kind {
|
||||
None => {
|
||||
self.errors.error(spanned, "not allowed in #[hdl] fn");
|
||||
}
|
||||
Some(_) => {}
|
||||
}
|
||||
}
|
||||
fn process_hdl_if(&mut self, hdl_attr: HdlAttr<Nothing, kw::hdl>, expr_if: ExprIf) -> Expr {
|
||||
fn process_hdl_if(&mut self, hdl_attr: HdlAttr<Nothing>, expr_if: ExprIf) -> Expr {
|
||||
let ExprIf {
|
||||
attrs,
|
||||
if_token,
|
||||
|
@ -1181,7 +1143,7 @@ impl Visitor<'_> {
|
|||
then_branch,
|
||||
else_branch,
|
||||
} = expr_if;
|
||||
self.require_normal_module_or_fn(if_token);
|
||||
self.require_normal_module(if_token);
|
||||
let else_expr = else_branch.unzip().1.map(|else_expr| match *else_expr {
|
||||
Expr::If(expr_if) => self.process_hdl_if(hdl_attr.clone(), expr_if),
|
||||
expr => expr,
|
||||
|
@ -1246,12 +1208,11 @@ impl Visitor<'_> {
|
|||
.to_tokens(expr);
|
||||
});
|
||||
let mut attrs = hdl_let.attrs.clone();
|
||||
self.require_module(kind);
|
||||
match self.module_kind {
|
||||
Some(ModuleKind::Extern) => attrs.push(parse_quote_spanned! {hdl_let.let_token.span=>
|
||||
ModuleKind::Extern => attrs.push(parse_quote_spanned! {hdl_let.let_token.span=>
|
||||
#[allow(unused_variables)]
|
||||
}),
|
||||
Some(ModuleKind::Normal) | None => {}
|
||||
ModuleKind::Normal => {}
|
||||
}
|
||||
let let_stmt = Local {
|
||||
attrs,
|
||||
|
@ -1288,7 +1249,7 @@ impl Visitor<'_> {
|
|||
},
|
||||
semi_token,
|
||||
} = hdl_let;
|
||||
self.require_normal_module_or_fn(instance);
|
||||
self.require_normal_module(instance);
|
||||
let mut expr = instance.to_token_stream();
|
||||
paren.surround(&mut expr, |expr| {
|
||||
let name_str = ImplicitName {
|
||||
|
@ -1315,7 +1276,7 @@ impl Visitor<'_> {
|
|||
fn process_hdl_let_reg_builder(&mut self, hdl_let: HdlLet<HdlLetKindRegBuilder>) -> Local {
|
||||
let name = &hdl_let.name;
|
||||
let reg_builder = hdl_let.kind.reg_builder;
|
||||
self.require_normal_module_or_fn(reg_builder);
|
||||
self.require_normal_module(reg_builder);
|
||||
let mut expr = reg_builder.to_token_stream();
|
||||
hdl_let.kind.reg_builder_paren.surround(&mut expr, |expr| {
|
||||
let name_str = ImplicitName {
|
||||
|
@ -1340,7 +1301,7 @@ impl Visitor<'_> {
|
|||
no_reset.to_tokens(&mut expr);
|
||||
paren.surround(&mut expr, |expr| ty_expr.to_tokens(expr));
|
||||
}
|
||||
RegBuilderReset::Reset { .. } => {
|
||||
RegBuilderReset::Reset { .. } | RegBuilderReset::ResetDefault { .. } => {
|
||||
hdl_let.kind.reset.to_tokens(&mut expr);
|
||||
}
|
||||
}
|
||||
|
@ -1366,7 +1327,7 @@ impl Visitor<'_> {
|
|||
fn process_hdl_let_wire(&mut self, hdl_let: HdlLet<HdlLetKindWire>) -> Local {
|
||||
let name = &hdl_let.name;
|
||||
let wire = hdl_let.kind.wire;
|
||||
self.require_normal_module_or_fn(wire);
|
||||
self.require_normal_module(wire);
|
||||
let ty_expr = unwrap_or_static_type(hdl_let.kind.ty_expr.as_ref(), wire.span());
|
||||
let mut expr = wire.to_token_stream();
|
||||
hdl_let.kind.paren.surround(&mut expr, |expr| {
|
||||
|
@ -1396,36 +1357,11 @@ impl Visitor<'_> {
|
|||
semi_token: hdl_let.semi_token,
|
||||
}
|
||||
}
|
||||
fn process_hdl_let_incomplete(&mut self, hdl_let: HdlLet<HdlLetKindIncomplete>) -> Local {
|
||||
let name = &hdl_let.name;
|
||||
let kind = hdl_let.kind.kind;
|
||||
self.require_normal_module_or_fn(kind);
|
||||
let mut expr = kind.to_token_stream();
|
||||
hdl_let.kind.paren.surround(&mut expr, |expr| {
|
||||
ImplicitName {
|
||||
name,
|
||||
span: name.span(),
|
||||
}
|
||||
.to_tokens(expr);
|
||||
});
|
||||
let mut_token = &hdl_let.mut_token;
|
||||
Local {
|
||||
attrs: hdl_let.attrs.clone(),
|
||||
let_token: hdl_let.let_token,
|
||||
pat: parse_quote! { #mut_token #name },
|
||||
init: Some(LocalInit {
|
||||
eq_token: hdl_let.eq_token,
|
||||
expr: parse_quote! { #expr },
|
||||
diverge: None,
|
||||
}),
|
||||
semi_token: hdl_let.semi_token,
|
||||
}
|
||||
}
|
||||
fn process_hdl_let_memory(&mut self, hdl_let: HdlLet<HdlLetKindMemory>) -> Local {
|
||||
let name = &hdl_let.name;
|
||||
let memory_fn = hdl_let.kind.memory_fn;
|
||||
let memory_fn_name = memory_fn.name();
|
||||
self.require_normal_module_or_fn(memory_fn_name);
|
||||
self.require_normal_module(memory_fn_name);
|
||||
let mut expr = memory_fn_name.to_token_stream();
|
||||
let (paren, arg) = match memory_fn {
|
||||
MemoryFn::Memory {
|
||||
|
@ -1490,7 +1426,6 @@ impl Visitor<'_> {
|
|||
}
|
||||
the_match! {
|
||||
IO => process_hdl_let_io,
|
||||
Incomplete => process_hdl_let_incomplete,
|
||||
Instance => process_hdl_let_instance,
|
||||
RegBuilder => process_hdl_let_reg_builder,
|
||||
Wire => process_hdl_let_wire,
|
||||
|
@ -1586,7 +1521,7 @@ impl Visitor<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn empty_let() -> Local {
|
||||
fn empty_let() -> Local {
|
||||
Local {
|
||||
attrs: vec![],
|
||||
let_token: Default::default(),
|
||||
|
@ -1608,7 +1543,7 @@ impl Fold for Visitor<'_> {
|
|||
}
|
||||
|
||||
fn fold_attribute(&mut self, attr: Attribute) -> Attribute {
|
||||
if is_hdl_attr::<kw::hdl>(&attr) {
|
||||
if is_hdl_attr(&attr) {
|
||||
self.errors
|
||||
.error(&attr, "#[hdl] attribute not supported here");
|
||||
}
|
||||
|
@ -1672,35 +1607,15 @@ impl Fold for Visitor<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
fn fold_local(&mut self, mut let_stmt: Local) -> Local {
|
||||
fn fold_local(&mut self, let_stmt: Local) -> Local {
|
||||
match self
|
||||
.errors
|
||||
.ok(HdlAttr::<Nothing, kw::hdl>::parse_and_leave_attr(
|
||||
&let_stmt.attrs,
|
||||
)) {
|
||||
.ok(HdlAttr::<Nothing>::parse_and_leave_attr(&let_stmt.attrs))
|
||||
{
|
||||
None => return empty_let(),
|
||||
Some(None) => return fold_local(self, let_stmt),
|
||||
Some(Some(HdlAttr { .. })) => {}
|
||||
};
|
||||
let mut pat = &let_stmt.pat;
|
||||
if let Pat::Type(pat_type) = pat {
|
||||
pat = &pat_type.pat;
|
||||
}
|
||||
let Pat::Ident(syn::PatIdent {
|
||||
attrs: _,
|
||||
by_ref: None,
|
||||
mutability: _,
|
||||
ident: _,
|
||||
subpat: None,
|
||||
}) = pat
|
||||
else {
|
||||
let hdl_attr = HdlAttr::<Nothing, kw::hdl>::parse_and_take_attr(&mut let_stmt.attrs)
|
||||
.ok()
|
||||
.flatten()
|
||||
.expect("already checked above");
|
||||
let let_stmt = fold_local(self, let_stmt);
|
||||
return self.process_hdl_let_pat(hdl_attr, let_stmt);
|
||||
};
|
||||
let hdl_let = syn::parse2::<HdlLet<HdlLetKind<Type>>>(let_stmt.into_token_stream());
|
||||
let Some(hdl_let) = self.errors.ok(hdl_let) else {
|
||||
return empty_let();
|
||||
|
@ -1731,7 +1646,7 @@ impl Fold for Visitor<'_> {
|
|||
}
|
||||
|
||||
pub(crate) fn transform_body(
|
||||
module_kind: Option<ModuleKind>,
|
||||
module_kind: ModuleKind,
|
||||
mut body: Box<Block>,
|
||||
parsed_generics: &ParsedGenerics,
|
||||
) -> syn::Result<(Box<Block>, Vec<ModuleIO>)> {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{kw, module::transform_body::Visitor, HdlAttr};
|
||||
use crate::{module::transform_body::Visitor, HdlAttr};
|
||||
use quote::{format_ident, quote_spanned};
|
||||
use syn::{
|
||||
parse::Nothing, parse_quote, parse_quote_spanned, spanned::Spanned, Expr, ExprArray, ExprPath,
|
||||
|
@ -10,10 +10,10 @@ use syn::{
|
|||
impl Visitor<'_> {
|
||||
pub(crate) fn process_hdl_array(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<Nothing, kw::hdl>,
|
||||
hdl_attr: HdlAttr<Nothing>,
|
||||
mut expr_array: ExprArray,
|
||||
) -> Expr {
|
||||
self.require_normal_module_or_fn(hdl_attr);
|
||||
self.require_normal_module(hdl_attr);
|
||||
for elem in &mut expr_array.elems {
|
||||
*elem = parse_quote_spanned! {elem.span()=>
|
||||
::fayalite::expr::ToExpr::to_expr(&(#elem))
|
||||
|
@ -23,10 +23,10 @@ impl Visitor<'_> {
|
|||
}
|
||||
pub(crate) fn process_hdl_repeat(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<Nothing, kw::hdl>,
|
||||
hdl_attr: HdlAttr<Nothing>,
|
||||
mut expr_repeat: ExprRepeat,
|
||||
) -> Expr {
|
||||
self.require_normal_module_or_fn(hdl_attr);
|
||||
self.require_normal_module(hdl_attr);
|
||||
let repeated_value = &expr_repeat.expr;
|
||||
*expr_repeat.expr = parse_quote_spanned! {repeated_value.span()=>
|
||||
::fayalite::expr::ToExpr::to_expr(&(#repeated_value))
|
||||
|
@ -35,10 +35,10 @@ impl Visitor<'_> {
|
|||
}
|
||||
pub(crate) fn process_hdl_struct(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<Nothing, kw::hdl>,
|
||||
hdl_attr: HdlAttr<Nothing>,
|
||||
expr_struct: ExprStruct,
|
||||
) -> Expr {
|
||||
self.require_normal_module_or_fn(&hdl_attr);
|
||||
self.require_normal_module(&hdl_attr);
|
||||
let name_span = expr_struct.path.segments.last().unwrap().ident.span();
|
||||
let builder_ident = format_ident!("__builder", span = name_span);
|
||||
let empty_builder = if expr_struct.qself.is_some()
|
||||
|
@ -91,10 +91,10 @@ impl Visitor<'_> {
|
|||
}
|
||||
pub(crate) fn process_hdl_tuple(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<Nothing, kw::hdl>,
|
||||
hdl_attr: HdlAttr<Nothing>,
|
||||
expr_tuple: ExprTuple,
|
||||
) -> Expr {
|
||||
self.require_normal_module_or_fn(hdl_attr);
|
||||
self.require_normal_module(hdl_attr);
|
||||
parse_quote_spanned! {expr_tuple.span()=>
|
||||
::fayalite::expr::ToExpr::to_expr(&#expr_tuple)
|
||||
}
|
||||
|
|
|
@ -2,112 +2,22 @@
|
|||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
fold::{impl_fold, DoFold},
|
||||
kw,
|
||||
module::transform_body::{empty_let, with_debug_clone_and_fold, wrap_ty_with_expr, Visitor},
|
||||
module::transform_body::{with_debug_clone_and_fold, Visitor},
|
||||
Errors, HdlAttr, PairsIterExt,
|
||||
};
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{format_ident, quote_spanned, ToTokens, TokenStreamExt};
|
||||
use std::collections::BTreeSet;
|
||||
use syn::{
|
||||
fold::{fold_arm, fold_expr_match, fold_local, fold_pat, Fold},
|
||||
fold::{fold_arm, fold_expr_match, fold_pat, Fold},
|
||||
parse::Nothing,
|
||||
parse_quote_spanned,
|
||||
punctuated::Punctuated,
|
||||
spanned::Spanned,
|
||||
token::{Brace, Paren},
|
||||
Arm, Attribute, Expr, ExprMatch, FieldPat, Ident, Local, Member, Pat, PatIdent, PatOr,
|
||||
PatParen, PatPath, PatRest, PatStruct, PatTuple, PatTupleStruct, PatWild, Path, PathSegment,
|
||||
Token, TypePath,
|
||||
Arm, Attribute, Expr, ExprMatch, FieldPat, Ident, Member, Pat, PatIdent, PatOr, PatParen,
|
||||
PatPath, PatRest, PatStruct, PatTupleStruct, PatWild, Path, PathSegment, Token, TypePath,
|
||||
};
|
||||
|
||||
macro_rules! visit_trait {
|
||||
(
|
||||
$($vis:vis fn $fn:ident($state:ident: _, $value:ident: &$Value:ty) $block:block)*
|
||||
) => {
|
||||
trait VisitMatchPat<'a> {
|
||||
$(fn $fn(&mut self, $value: &'a $Value) {
|
||||
$fn(self, $value);
|
||||
})*
|
||||
}
|
||||
|
||||
$($vis fn $fn<'a>($state: &mut (impl ?Sized + VisitMatchPat<'a>), $value: &'a $Value) $block)*
|
||||
};
|
||||
}
|
||||
|
||||
visit_trait! {
|
||||
fn visit_match_pat_binding(_state: _, v: &MatchPatBinding) {
|
||||
let MatchPatBinding { ident: _ } = v;
|
||||
}
|
||||
fn visit_match_pat_wild(_state: _, v: &MatchPatWild) {
|
||||
let MatchPatWild { underscore_token: _ } = v;
|
||||
}
|
||||
fn visit_match_pat_rest(_state: _, v: &MatchPatRest) {
|
||||
let MatchPatRest { dot2_token: _ } = v;
|
||||
}
|
||||
fn visit_match_pat_paren(state: _, v: &MatchPatParen<MatchPat>) {
|
||||
let MatchPatParen { paren_token: _, pat } = v;
|
||||
state.visit_match_pat(pat);
|
||||
}
|
||||
fn visit_match_pat_paren_simple(state: _, v: &MatchPatParen<MatchPatSimple>) {
|
||||
let MatchPatParen { paren_token: _, pat } = v;
|
||||
state.visit_match_pat_simple(pat);
|
||||
}
|
||||
fn visit_match_pat_or(state: _, v: &MatchPatOr<MatchPat>) {
|
||||
let MatchPatOr { leading_vert: _, cases } = v;
|
||||
for v in cases {
|
||||
state.visit_match_pat(v);
|
||||
}
|
||||
}
|
||||
fn visit_match_pat_or_simple(state: _, v: &MatchPatOr<MatchPatSimple>) {
|
||||
let MatchPatOr { leading_vert: _, cases } = v;
|
||||
for v in cases {
|
||||
state.visit_match_pat_simple(v);
|
||||
}
|
||||
}
|
||||
fn visit_match_pat_struct_field(state: _, v: &MatchPatStructField) {
|
||||
let MatchPatStructField { field_name: _, colon_token: _, pat } = v;
|
||||
state.visit_match_pat_simple(pat);
|
||||
}
|
||||
fn visit_match_pat_struct(state: _, v: &MatchPatStruct) {
|
||||
let MatchPatStruct { match_span: _, path: _, brace_token: _, fields, rest: _ } = v;
|
||||
for v in fields {
|
||||
state.visit_match_pat_struct_field(v);
|
||||
}
|
||||
}
|
||||
fn visit_match_pat_tuple(state: _, v: &MatchPatTuple) {
|
||||
let MatchPatTuple { paren_token: _, fields } = v;
|
||||
for v in fields {
|
||||
state.visit_match_pat_simple(v);
|
||||
}
|
||||
}
|
||||
fn visit_match_pat_enum_variant(state: _, v: &MatchPatEnumVariant) {
|
||||
let MatchPatEnumVariant {match_span:_, variant_path: _, enum_path: _, variant_name: _, field } = v;
|
||||
if let Some((_, v)) = field {
|
||||
state.visit_match_pat_simple(v);
|
||||
}
|
||||
}
|
||||
fn visit_match_pat_simple(state: _, v: &MatchPatSimple) {
|
||||
match v {
|
||||
MatchPatSimple::Paren(v) => state.visit_match_pat_paren_simple(v),
|
||||
MatchPatSimple::Or(v) => state.visit_match_pat_or_simple(v),
|
||||
MatchPatSimple::Binding(v) => state.visit_match_pat_binding(v),
|
||||
MatchPatSimple::Wild(v) => state.visit_match_pat_wild(v),
|
||||
MatchPatSimple::Rest(v) => state.visit_match_pat_rest(v),
|
||||
}
|
||||
}
|
||||
fn visit_match_pat(state: _, v: &MatchPat) {
|
||||
match v {
|
||||
MatchPat::Simple(v) => state.visit_match_pat_simple(v),
|
||||
MatchPat::Or(v) => state.visit_match_pat_or(v),
|
||||
MatchPat::Paren(v) => state.visit_match_pat_paren(v),
|
||||
MatchPat::Struct(v) => state.visit_match_pat_struct(v),
|
||||
MatchPat::Tuple(v) => state.visit_match_pat_tuple(v),
|
||||
MatchPat::EnumVariant(v) => state.visit_match_pat_enum_variant(v),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
with_debug_clone_and_fold! {
|
||||
struct MatchPatBinding<> {
|
||||
ident: Ident,
|
||||
|
@ -142,15 +52,6 @@ with_debug_clone_and_fold! {
|
|||
}
|
||||
}
|
||||
|
||||
impl<P> MatchPatOr<P> {
|
||||
/// returns the first `|` between two patterns
|
||||
fn first_inner_vert(&self) -> Option<Token![|]> {
|
||||
let mut pairs = self.cases.pairs();
|
||||
pairs.next_back();
|
||||
pairs.next().and_then(|v| v.into_tuple().1.copied())
|
||||
}
|
||||
}
|
||||
|
||||
impl<P: ToTokens> ToTokens for MatchPatOr<P> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
|
@ -175,19 +76,6 @@ impl ToTokens for MatchPatWild {
|
|||
}
|
||||
}
|
||||
|
||||
with_debug_clone_and_fold! {
|
||||
struct MatchPatRest<> {
|
||||
dot2_token: Token![..],
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for MatchPatRest {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self { dot2_token } = self;
|
||||
dot2_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
with_debug_clone_and_fold! {
|
||||
struct MatchPatStructField<> {
|
||||
field_name: Ident,
|
||||
|
@ -270,25 +158,6 @@ impl ToTokens for MatchPatStruct {
|
|||
}
|
||||
}
|
||||
|
||||
with_debug_clone_and_fold! {
|
||||
struct MatchPatTuple<> {
|
||||
paren_token: Paren,
|
||||
fields: Punctuated<MatchPatSimple, Token![,]>,
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for MatchPatTuple {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
paren_token,
|
||||
fields,
|
||||
} = self;
|
||||
paren_token.surround(tokens, |tokens| {
|
||||
fields.to_tokens(tokens);
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
with_debug_clone_and_fold! {
|
||||
struct MatchPatEnumVariant<> {
|
||||
match_span: Span,
|
||||
|
@ -324,7 +193,6 @@ enum MatchPatSimple {
|
|||
Or(MatchPatOr<MatchPatSimple>),
|
||||
Binding(MatchPatBinding),
|
||||
Wild(MatchPatWild),
|
||||
Rest(MatchPatRest),
|
||||
}
|
||||
|
||||
impl_fold! {
|
||||
|
@ -333,7 +201,6 @@ impl_fold! {
|
|||
Or(MatchPatOr<MatchPatSimple>),
|
||||
Binding(MatchPatBinding),
|
||||
Wild(MatchPatWild),
|
||||
Rest(MatchPatRest),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -344,7 +211,6 @@ impl ToTokens for MatchPatSimple {
|
|||
Self::Paren(v) => v.to_tokens(tokens),
|
||||
Self::Binding(v) => v.to_tokens(tokens),
|
||||
Self::Wild(v) => v.to_tokens(tokens),
|
||||
Self::Rest(v) => v.to_tokens(tokens),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -411,7 +277,6 @@ trait ParseMatchPat: Sized {
|
|||
fn or(v: MatchPatOr<Self>) -> Self;
|
||||
fn paren(v: MatchPatParen<Self>) -> Self;
|
||||
fn struct_(state: &mut HdlMatchParseState<'_>, v: MatchPatStruct) -> Result<Self, ()>;
|
||||
fn tuple(state: &mut HdlMatchParseState<'_>, v: MatchPatTuple) -> Result<Self, ()>;
|
||||
fn enum_variant(state: &mut HdlMatchParseState<'_>, v: MatchPatEnumVariant)
|
||||
-> Result<Self, ()>;
|
||||
fn parse(state: &mut HdlMatchParseState<'_>, pat: Pat) -> Result<Self, ()> {
|
||||
|
@ -596,34 +461,7 @@ trait ParseMatchPat: Sized {
|
|||
}) => Ok(Self::simple(MatchPatSimple::Wild(MatchPatWild {
|
||||
underscore_token,
|
||||
}))),
|
||||
Pat::Tuple(PatTuple {
|
||||
attrs: _,
|
||||
paren_token,
|
||||
elems,
|
||||
}) => {
|
||||
let fields = elems
|
||||
.into_pairs()
|
||||
.filter_map_pair_value(|field_pat| {
|
||||
if let Pat::Rest(PatRest {
|
||||
attrs: _,
|
||||
dot2_token,
|
||||
}) = field_pat
|
||||
{
|
||||
Some(MatchPatSimple::Rest(MatchPatRest { dot2_token }))
|
||||
} else {
|
||||
MatchPatSimple::parse(state, field_pat).ok()
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
Self::tuple(
|
||||
state,
|
||||
MatchPatTuple {
|
||||
paren_token,
|
||||
fields,
|
||||
},
|
||||
)
|
||||
}
|
||||
Pat::Slice(_) | Pat::Const(_) | Pat::Lit(_) | Pat::Range(_) => {
|
||||
Pat::Tuple(_) | Pat::Slice(_) | Pat::Const(_) | Pat::Lit(_) | Pat::Range(_) => {
|
||||
state
|
||||
.errors
|
||||
.error(pat, "not yet implemented in #[hdl] patterns");
|
||||
|
@ -658,14 +496,6 @@ impl ParseMatchPat for MatchPatSimple {
|
|||
Err(())
|
||||
}
|
||||
|
||||
fn tuple(state: &mut HdlMatchParseState<'_>, v: MatchPatTuple) -> Result<Self, ()> {
|
||||
state.errors.push(syn::Error::new(
|
||||
v.paren_token.span.open(),
|
||||
"matching tuples is not yet implemented inside structs/enums in #[hdl] patterns",
|
||||
));
|
||||
Err(())
|
||||
}
|
||||
|
||||
fn enum_variant(
|
||||
state: &mut HdlMatchParseState<'_>,
|
||||
v: MatchPatEnumVariant,
|
||||
|
@ -684,7 +514,6 @@ enum MatchPat {
|
|||
Or(MatchPatOr<MatchPat>),
|
||||
Paren(MatchPatParen<MatchPat>),
|
||||
Struct(MatchPatStruct),
|
||||
Tuple(MatchPatTuple),
|
||||
EnumVariant(MatchPatEnumVariant),
|
||||
}
|
||||
|
||||
|
@ -694,7 +523,6 @@ impl_fold! {
|
|||
Or(MatchPatOr<MatchPat>),
|
||||
Paren(MatchPatParen<MatchPat>),
|
||||
Struct(MatchPatStruct),
|
||||
Tuple(MatchPatTuple),
|
||||
EnumVariant(MatchPatEnumVariant),
|
||||
}
|
||||
}
|
||||
|
@ -716,10 +544,6 @@ impl ParseMatchPat for MatchPat {
|
|||
Ok(Self::Struct(v))
|
||||
}
|
||||
|
||||
fn tuple(_state: &mut HdlMatchParseState<'_>, v: MatchPatTuple) -> Result<Self, ()> {
|
||||
Ok(Self::Tuple(v))
|
||||
}
|
||||
|
||||
fn enum_variant(
|
||||
_state: &mut HdlMatchParseState<'_>,
|
||||
v: MatchPatEnumVariant,
|
||||
|
@ -735,7 +559,6 @@ impl ToTokens for MatchPat {
|
|||
Self::Or(v) => v.to_tokens(tokens),
|
||||
Self::Paren(v) => v.to_tokens(tokens),
|
||||
Self::Struct(v) => v.to_tokens(tokens),
|
||||
Self::Tuple(v) => v.to_tokens(tokens),
|
||||
Self::EnumVariant(v) => v.to_tokens(tokens),
|
||||
}
|
||||
}
|
||||
|
@ -798,6 +621,10 @@ struct RewriteAsCheckMatch {
|
|||
}
|
||||
|
||||
impl Fold for RewriteAsCheckMatch {
|
||||
fn fold_field_pat(&mut self, mut i: FieldPat) -> FieldPat {
|
||||
i.colon_token = Some(Token));
|
||||
i
|
||||
}
|
||||
fn fold_pat(&mut self, pat: Pat) -> Pat {
|
||||
match pat {
|
||||
Pat::Ident(mut pat_ident) => match parse_enum_ident(pat_ident.ident) {
|
||||
|
@ -912,30 +739,6 @@ impl Fold for RewriteAsCheckMatch {
|
|||
// don't recurse into expressions
|
||||
i
|
||||
}
|
||||
fn fold_local(&mut self, mut let_stmt: Local) -> Local {
|
||||
if let Some(syn::LocalInit {
|
||||
eq_token,
|
||||
expr: _,
|
||||
diverge,
|
||||
}) = let_stmt.init.take()
|
||||
{
|
||||
let_stmt.init = Some(syn::LocalInit {
|
||||
eq_token,
|
||||
expr: parse_quote_spanned! {self.span=>
|
||||
__match_value
|
||||
},
|
||||
diverge: diverge.map(|(else_, _expr)| {
|
||||
(
|
||||
else_,
|
||||
parse_quote_spanned! {self.span=>
|
||||
match __infallible {}
|
||||
},
|
||||
)
|
||||
}),
|
||||
});
|
||||
}
|
||||
fold_local(self, let_stmt)
|
||||
}
|
||||
}
|
||||
|
||||
struct HdlMatchParseState<'a> {
|
||||
|
@ -943,126 +746,10 @@ struct HdlMatchParseState<'a> {
|
|||
errors: &'a mut Errors,
|
||||
}
|
||||
|
||||
struct HdlLetPatVisitState<'a> {
|
||||
errors: &'a mut Errors,
|
||||
bindings: BTreeSet<&'a Ident>,
|
||||
}
|
||||
|
||||
impl<'a> VisitMatchPat<'a> for HdlLetPatVisitState<'a> {
|
||||
fn visit_match_pat_binding(&mut self, v: &'a MatchPatBinding) {
|
||||
self.bindings.insert(&v.ident);
|
||||
}
|
||||
|
||||
fn visit_match_pat_or(&mut self, v: &'a MatchPatOr<MatchPat>) {
|
||||
if let Some(first_inner_vert) = v.first_inner_vert() {
|
||||
self.errors.error(
|
||||
first_inner_vert,
|
||||
"or-patterns are not supported in let statements",
|
||||
);
|
||||
}
|
||||
visit_match_pat_or(self, v);
|
||||
}
|
||||
|
||||
fn visit_match_pat_or_simple(&mut self, v: &'a MatchPatOr<MatchPatSimple>) {
|
||||
if let Some(first_inner_vert) = v.first_inner_vert() {
|
||||
self.errors.error(
|
||||
first_inner_vert,
|
||||
"or-patterns are not supported in let statements",
|
||||
);
|
||||
}
|
||||
visit_match_pat_or_simple(self, v);
|
||||
}
|
||||
|
||||
fn visit_match_pat_enum_variant(&mut self, v: &'a MatchPatEnumVariant) {
|
||||
self.errors.error(v, "refutable pattern in let statement");
|
||||
}
|
||||
}
|
||||
|
||||
impl Visitor<'_> {
|
||||
pub(crate) fn process_hdl_let_pat(
|
||||
&mut self,
|
||||
_hdl_attr: HdlAttr<Nothing, kw::hdl>,
|
||||
mut let_stmt: Local,
|
||||
) -> Local {
|
||||
let span = let_stmt.let_token.span();
|
||||
if let Pat::Type(pat) = &mut let_stmt.pat {
|
||||
*pat.ty = wrap_ty_with_expr((*pat.ty).clone());
|
||||
}
|
||||
let check_let_stmt = RewriteAsCheckMatch { span }.fold_local(let_stmt.clone());
|
||||
let Local {
|
||||
attrs: _,
|
||||
let_token,
|
||||
pat,
|
||||
init,
|
||||
semi_token,
|
||||
} = let_stmt;
|
||||
self.require_normal_module_or_fn(let_token);
|
||||
let Some(syn::LocalInit {
|
||||
eq_token,
|
||||
expr,
|
||||
diverge,
|
||||
}) = init
|
||||
else {
|
||||
self.errors
|
||||
.error(let_token, "#[hdl] let must be assigned a value");
|
||||
return empty_let();
|
||||
};
|
||||
if let Some((else_, _)) = diverge {
|
||||
// TODO: implement let-else
|
||||
self.errors
|
||||
.error(else_, "#[hdl] let ... else { ... } is not implemented");
|
||||
return empty_let();
|
||||
}
|
||||
let Ok(pat) = MatchPat::parse(
|
||||
&mut HdlMatchParseState {
|
||||
match_span: span,
|
||||
errors: &mut self.errors,
|
||||
},
|
||||
pat,
|
||||
) else {
|
||||
return empty_let();
|
||||
};
|
||||
let mut state = HdlLetPatVisitState {
|
||||
errors: &mut self.errors,
|
||||
bindings: BTreeSet::new(),
|
||||
};
|
||||
state.visit_match_pat(&pat);
|
||||
let HdlLetPatVisitState {
|
||||
errors: _,
|
||||
bindings,
|
||||
} = state;
|
||||
let retval = parse_quote_spanned! {span=>
|
||||
let (#(#bindings,)* __scope,) = {
|
||||
type __MatchTy<T> = <T as ::fayalite::ty::Type>::MatchVariant;
|
||||
let __match_expr = ::fayalite::expr::ToExpr::to_expr(&(#expr));
|
||||
::fayalite::expr::check_match_expr(__match_expr, |__match_value, __infallible| {
|
||||
#[allow(unused_variables)]
|
||||
#check_let_stmt
|
||||
match __infallible {}
|
||||
});
|
||||
let mut __match_iter = ::fayalite::module::match_(__match_expr);
|
||||
let ::fayalite::__std::option::Option::Some(__match_variant) = ::fayalite::__std::iter::Iterator::next(&mut __match_iter) else {
|
||||
::fayalite::__std::unreachable!("#[hdl] let with uninhabited type");
|
||||
};
|
||||
let ::fayalite::__std::option::Option::None = ::fayalite::__std::iter::Iterator::next(&mut __match_iter) else {
|
||||
::fayalite::__std::unreachable!("#[hdl] let with refutable pattern");
|
||||
};
|
||||
let (__match_variant, __scope) =
|
||||
::fayalite::ty::MatchVariantAndInactiveScope::match_activate_scope(
|
||||
__match_variant,
|
||||
);
|
||||
#let_token #pat #eq_token __match_variant #semi_token
|
||||
(#(#bindings,)* __scope,)
|
||||
};
|
||||
};
|
||||
match retval {
|
||||
syn::Stmt::Local(retval) => retval,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
pub(crate) fn process_hdl_match(
|
||||
&mut self,
|
||||
_hdl_attr: HdlAttr<Nothing, kw::hdl>,
|
||||
_hdl_attr: HdlAttr<Nothing>,
|
||||
expr_match: ExprMatch,
|
||||
) -> Expr {
|
||||
let span = expr_match.match_token.span();
|
||||
|
@ -1074,7 +761,7 @@ impl Visitor<'_> {
|
|||
brace_token: _,
|
||||
arms,
|
||||
} = expr_match;
|
||||
self.require_normal_module_or_fn(match_token);
|
||||
self.require_normal_module(match_token);
|
||||
let mut state = HdlMatchParseState {
|
||||
match_span: span,
|
||||
errors: &mut self.errors,
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -16,4 +16,4 @@ version.workspace = true
|
|||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
fayalite-proc-macros-impl.workspace = true
|
||||
fayalite-proc-macros-impl = { workspace = true }
|
||||
|
|
|
@ -13,11 +13,11 @@ rust-version.workspace = true
|
|||
version.workspace = true
|
||||
|
||||
[dependencies]
|
||||
indexmap.workspace = true
|
||||
prettyplease.workspace = true
|
||||
proc-macro2.workspace = true
|
||||
quote.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
syn.workspace = true
|
||||
thiserror.workspace = true
|
||||
indexmap = { workspace = true }
|
||||
prettyplease = { workspace = true }
|
||||
proc-macro2 = { workspace = true }
|
||||
quote = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
syn = { workspace = true }
|
||||
thiserror = { workspace = true }
|
||||
|
|
|
@ -14,29 +14,22 @@ rust-version.workspace = true
|
|||
version.workspace = true
|
||||
|
||||
[dependencies]
|
||||
bitvec.workspace = true
|
||||
blake3.workspace = true
|
||||
clap.workspace = true
|
||||
ctor.workspace = true
|
||||
eyre.workspace = true
|
||||
fayalite-proc-macros.workspace = true
|
||||
hashbrown.workspace = true
|
||||
jobslot.workspace = true
|
||||
num-bigint.workspace = true
|
||||
num-traits.workspace = true
|
||||
os_pipe.workspace = true
|
||||
petgraph.workspace = true
|
||||
serde_json.workspace = true
|
||||
serde.workspace = true
|
||||
tempfile.workspace = true
|
||||
vec_map.workspace = true
|
||||
which.workspace = true
|
||||
bitvec = { workspace = true }
|
||||
hashbrown = { workspace = true }
|
||||
num-bigint = { workspace = true }
|
||||
num-traits = { workspace = true }
|
||||
fayalite-proc-macros = { workspace = true }
|
||||
serde = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
clap = { version = "4.5.9", features = ["derive", "env"] }
|
||||
eyre = "0.6.12"
|
||||
which = "6.0.1"
|
||||
|
||||
[dev-dependencies]
|
||||
trybuild.workspace = true
|
||||
trybuild = { workspace = true }
|
||||
|
||||
[build-dependencies]
|
||||
fayalite-visit-gen.workspace = true
|
||||
fayalite-visit-gen = { workspace = true }
|
||||
|
||||
[features]
|
||||
unstable-doc = []
|
||||
|
|
|
@ -5,9 +5,6 @@ use std::{env, fs, path::Path};
|
|||
|
||||
fn main() {
|
||||
println!("cargo::rustc-check-cfg=cfg(todo)");
|
||||
println!("cargo::rustc-check-cfg=cfg(cfg_false_for_tests)");
|
||||
println!("cargo::rustc-check-cfg=cfg(cfg_true_for_tests)");
|
||||
println!("cargo::rustc-cfg=cfg_true_for_tests");
|
||||
let path = "visit_types.json";
|
||||
println!("cargo::rerun-if-changed={path}");
|
||||
println!("cargo::rerun-if-changed=build.rs");
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use clap::Parser;
|
||||
use fayalite::{cli, prelude::*};
|
||||
|
||||
|
@ -17,15 +15,15 @@ fn blinky(clock_frequency: u64) {
|
|||
let max_value = clock_frequency / 2 - 1;
|
||||
let int_ty = UInt::range_inclusive(0..=max_value);
|
||||
#[hdl]
|
||||
let counter_reg: UInt = reg_builder().clock_domain(cd).reset(0u8.cast_to(int_ty));
|
||||
let counter: UInt = reg_builder().clock_domain(cd).reset(0u8.cast_to(int_ty));
|
||||
#[hdl]
|
||||
let output_reg: Bool = reg_builder().clock_domain(cd).reset(false);
|
||||
#[hdl]
|
||||
if counter_reg.cmp_eq(max_value) {
|
||||
connect_any(counter_reg, 0u8);
|
||||
if counter.cmp_eq(max_value) {
|
||||
connect_any(counter, 0u8);
|
||||
connect(output_reg, !output_reg);
|
||||
} else {
|
||||
connect_any(counter_reg, counter_reg + 1_hdl_u1);
|
||||
connect_any(counter, counter + 1_hdl_u1);
|
||||
}
|
||||
#[hdl]
|
||||
let led: Bool = m.output();
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
#![doc = include_str!("../README.md")]
|
||||
|
||||
//!
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! # Fayalite Modules
|
||||
//!
|
||||
//! The [`#[hdl_module]`][`crate::hdl_module`] attribute is applied to a Rust
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! These are for when you want to use modules written in
|
||||
//! some other language, such as Verilog.
|
||||
//!
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! # Module Function Bodies
|
||||
//!
|
||||
//! The `#[hdl_module]` attribute lets you have statements/expressions with `#[hdl]` annotations
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! # `#[hdl]` Array Expressions
|
||||
//!
|
||||
//! `#[hdl]` can be used on Array Expressions to construct an [`Array<[T; N]>`][type@Array] expression:
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! # `#[hdl] if` Statements
|
||||
//!
|
||||
//! `#[hdl] if` statements behave similarly to Rust `if` statements, except they end up as muxes
|
||||
|
|
|
@ -1,8 +1,5 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! ## `#[hdl] let` statements
|
||||
|
||||
pub mod destructuring;
|
||||
pub mod inputs_outputs;
|
||||
pub mod instances;
|
||||
pub mod memories;
|
||||
|
|
|
@ -1,33 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! ### Destructuring Let
|
||||
//!
|
||||
//! You can use `#[hdl] let` to destructure types, similarly to Rust `let` statements with non-trivial patterns.
|
||||
//!
|
||||
//! `#[hdl] let` statements can only match one level of struct/tuple pattern for now,
|
||||
//! e.g. you can match with the pattern `MyStruct { a, b }`, but not `MyStruct { a, b: Struct2 { v } }`.
|
||||
//!
|
||||
//! ```
|
||||
//! # use fayalite::prelude::*;
|
||||
//! #[hdl]
|
||||
//! struct MyStruct {
|
||||
//! a: UInt<8>,
|
||||
//! b: Bool,
|
||||
//! }
|
||||
//!
|
||||
//! #[hdl_module]
|
||||
//! fn my_module() {
|
||||
//! #[hdl]
|
||||
//! let my_input: MyStruct = m.input();
|
||||
//! #[hdl]
|
||||
//! let my_output: UInt<8> = m.input();
|
||||
//! #[hdl]
|
||||
//! let MyStruct { a, b } = my_input;
|
||||
//! #[hdl]
|
||||
//! if b {
|
||||
//! connect(my_output, a);
|
||||
//! } else {
|
||||
//! connect(my_output, 0_hdl_u8);
|
||||
//! }
|
||||
//! }
|
||||
//! ```
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! ### Inputs/Outputs
|
||||
//!
|
||||
//! Inputs/Outputs create a Rust variable with type [`Expr<T>`] where `T` is the type of the input/output.
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! ### Module Instances
|
||||
//!
|
||||
//! module instances are kinda like the hardware equivalent of calling a function,
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! # Memories
|
||||
//!
|
||||
//! Memories are optimized for storing large amounts of data.
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! ### Registers
|
||||
//!
|
||||
//! Registers are memory devices that will change their state only on a clock
|
||||
|
@ -9,9 +7,6 @@
|
|||
//!
|
||||
//! Registers follow [connection semantics], which are unlike assignments in software, so you should read it.
|
||||
//!
|
||||
//! By convention, register names end in `_reg` -- this helps you tell which values are written
|
||||
//! immediately or on the next clock edge when connecting to them.
|
||||
//!
|
||||
//! ```
|
||||
//! # use fayalite::prelude::*;
|
||||
//! # #[hdl_module]
|
||||
|
@ -21,11 +16,11 @@
|
|||
//! #[hdl]
|
||||
//! let cd: ClockDomain = m.input();
|
||||
//! #[hdl]
|
||||
//! let my_reg: UInt<8> = reg_builder().clock_domain(cd).reset(8_hdl_u8);
|
||||
//! let my_register: UInt<8> = reg_builder().clock_domain(cd).reset(8_hdl_u8);
|
||||
//! #[hdl]
|
||||
//! if v {
|
||||
//! // my_reg is only changed when both `v` is set and `cd`'s clock edge occurs.
|
||||
//! connect(my_reg, 0x45_hdl_u8);
|
||||
//! // my_register is only changed when both `v` is set and `cd`'s clock edge occurs.
|
||||
//! connect(my_register, 0x45_hdl_u8);
|
||||
//! }
|
||||
//! # }
|
||||
//! ```
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! ### Wires
|
||||
//!
|
||||
//! Wires are kinda like variables, but unlike registers,
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! # `_hdl`-suffixed literals
|
||||
//!
|
||||
//! You can have integer literals with an arbitrary number of bits like so:
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! # `#[hdl] match` Statements
|
||||
//!
|
||||
//! `#[hdl] match` statements behave similarly to Rust `match` statements, except they end up as muxes
|
||||
|
@ -7,5 +5,5 @@
|
|||
//!
|
||||
//! `#[hdl] match` statements' bodies must evaluate to type `()` for now.
|
||||
//!
|
||||
//! `#[hdl] match` statements can only match one level of struct/tuple/enum pattern for now,
|
||||
//! `#[hdl] match` statements can only match one level of struct/enum pattern for now,
|
||||
//! e.g. you can match with the pattern `HdlSome(v)`, but not `HdlSome(HdlSome(_))`.
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! # `#[hdl]` Struct/Variant Expressions
|
||||
//!
|
||||
//! Note: Structs are also known as [Bundles] when used in Fayalite, the Bundle name comes from [FIRRTL].
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! # Normal Modules
|
||||
//!
|
||||
//! See also: [Extern Modules][`super::extern_module`]
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! # Fayalite Semantics
|
||||
//!
|
||||
//! Fayalite's semantics are based on [FIRRTL]. Due to their significance, some of the semantics are also documented here.
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! # Connection Semantics
|
||||
//!
|
||||
//! Fayalite's connection semantics are unlike assignments in software, so be careful!
|
||||
|
|
|
@ -8,7 +8,6 @@ use serde::{Deserialize, Serialize};
|
|||
use std::{
|
||||
fmt,
|
||||
hash::{Hash, Hasher},
|
||||
iter::FusedIterator,
|
||||
ops::Deref,
|
||||
};
|
||||
|
||||
|
@ -119,88 +118,12 @@ pub struct CustomFirrtlAnnotation {
|
|||
pub additional_fields: CustomFirrtlAnnotationFields,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)]
|
||||
pub struct DontTouchAnnotation;
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)]
|
||||
pub struct SVAttributeAnnotation {
|
||||
pub text: Interned<str>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)]
|
||||
pub struct BlackBoxInlineAnnotation {
|
||||
pub path: Interned<str>,
|
||||
pub text: Interned<str>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)]
|
||||
pub struct BlackBoxPathAnnotation {
|
||||
pub path: Interned<str>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)]
|
||||
pub struct DocStringAnnotation {
|
||||
pub text: Interned<str>,
|
||||
}
|
||||
|
||||
macro_rules! make_annotation_enum {
|
||||
(
|
||||
$(#[$meta:meta])*
|
||||
$vis:vis enum $Annotation:ident {
|
||||
$($Variant:ident($T:ident),)*
|
||||
}
|
||||
) => {
|
||||
$(#[$meta])*
|
||||
$vis enum $Annotation {
|
||||
$($Variant($T),)*
|
||||
}
|
||||
|
||||
$(impl IntoAnnotations for $T {
|
||||
type IntoAnnotations = [$Annotation; 1];
|
||||
|
||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||
[$Annotation::$Variant(self)]
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoAnnotations for &'_ $T {
|
||||
type IntoAnnotations = [$Annotation; 1];
|
||||
|
||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||
[$Annotation::$Variant(*self)]
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoAnnotations for &'_ mut $T {
|
||||
type IntoAnnotations = [$Annotation; 1];
|
||||
|
||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||
[$Annotation::$Variant(*self)]
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoAnnotations for Box<$T> {
|
||||
type IntoAnnotations = [$Annotation; 1];
|
||||
|
||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||
[$Annotation::$Variant(*self)]
|
||||
}
|
||||
})*
|
||||
};
|
||||
}
|
||||
|
||||
make_annotation_enum! {
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
#[non_exhaustive]
|
||||
pub enum Annotation {
|
||||
DontTouch(DontTouchAnnotation),
|
||||
SVAttribute(SVAttributeAnnotation),
|
||||
BlackBoxInline(BlackBoxInlineAnnotation),
|
||||
BlackBoxPath(BlackBoxPathAnnotation),
|
||||
DocString(DocStringAnnotation),
|
||||
DontTouch,
|
||||
CustomFirrtl(CustomFirrtlAnnotation),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct TargetedAnnotation {
|
||||
|
@ -264,70 +187,10 @@ impl IntoAnnotations for &'_ mut Annotation {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct IterIntoAnnotations<T: Iterator<Item: IntoAnnotations>> {
|
||||
outer: T,
|
||||
inner: Option<<<T::Item as IntoAnnotations>::IntoAnnotations as IntoIterator>::IntoIter>,
|
||||
}
|
||||
|
||||
impl<T: Iterator<Item: IntoAnnotations>> Iterator for IterIntoAnnotations<T> {
|
||||
type Item = Annotation;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
loop {
|
||||
if let Some(inner) = &mut self.inner {
|
||||
let Some(retval) = inner.next() else {
|
||||
self.inner = None;
|
||||
continue;
|
||||
};
|
||||
return Some(retval);
|
||||
} else {
|
||||
self.inner = Some(self.outer.next()?.into_annotations().into_iter());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
if let (0, Some(0)) = self.outer.size_hint() {
|
||||
self.inner
|
||||
.as_ref()
|
||||
.map(|v| v.size_hint())
|
||||
.unwrap_or((0, Some(0)))
|
||||
} else {
|
||||
(
|
||||
self.inner.as_ref().map(|v| v.size_hint().0).unwrap_or(0),
|
||||
None,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn fold<B, F>(self, init: B, f: F) -> B
|
||||
where
|
||||
Self: Sized,
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
self.inner
|
||||
.into_iter()
|
||||
.chain(self.outer.map(|v| v.into_annotations().into_iter()))
|
||||
.flatten()
|
||||
.fold(init, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<
|
||||
T: FusedIterator<
|
||||
Item: IntoAnnotations<IntoAnnotations: IntoIterator<IntoIter: FusedIterator>>,
|
||||
>,
|
||||
> FusedIterator for IterIntoAnnotations<T>
|
||||
{
|
||||
}
|
||||
|
||||
impl<T: IntoIterator<Item: IntoAnnotations>> IntoAnnotations for T {
|
||||
type IntoAnnotations = IterIntoAnnotations<T::IntoIter>;
|
||||
impl<T: IntoIterator<Item = Annotation>> IntoAnnotations for T {
|
||||
type IntoAnnotations = Self;
|
||||
|
||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||
IterIntoAnnotations {
|
||||
outer: self.into_iter(),
|
||||
inner: None,
|
||||
}
|
||||
self
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,11 +2,8 @@
|
|||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
expr::{
|
||||
ops::{ArrayIndex, ArrayLiteral, ExprPartialEq},
|
||||
CastToBits, Expr, HdlPartialEq, ReduceBits, ToExpr,
|
||||
},
|
||||
int::{Bool, DynSize, KnownSize, Size, SizeType, DYN_SIZE},
|
||||
expr::{ops::ArrayIndex, Expr, ToExpr},
|
||||
int::{DynSize, KnownSize, Size, SizeType, DYN_SIZE},
|
||||
intern::{Intern, Interned, LazyInterned},
|
||||
module::transform::visit::{Fold, Folder, Visit, Visitor},
|
||||
source_location::SourceLocation,
|
||||
|
@ -88,7 +85,7 @@ impl<T: Type, Len: Size> ArrayType<T, Len> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: Type, Len: KnownSize + Size<SizeType = Len>> ArrayType<T, Len> {
|
||||
impl<T: Type, Len: KnownSize> ArrayType<T, Len> {
|
||||
pub fn new_static(element: T) -> Self {
|
||||
Self::new(element, Len::SizeType::default())
|
||||
}
|
||||
|
@ -190,7 +187,7 @@ impl<T: Type, Len: Size> TypeWithDeref for ArrayType<T, Len> {
|
|||
let base = Expr::as_dyn_array(*this);
|
||||
let base_ty = Expr::ty(base);
|
||||
let retval = Vec::from_iter((0..base_ty.len()).map(|i| ArrayIndex::new(base, i).to_expr()));
|
||||
Interned::into_inner(Intern::intern_sized(
|
||||
Interned::<_>::into_inner(Intern::intern_sized(
|
||||
Len::ArrayMatch::<T>::try_from(retval)
|
||||
.ok()
|
||||
.expect("unreachable"),
|
||||
|
@ -205,7 +202,7 @@ impl<T: Type> Index<T> for ArrayWithoutGenerics {
|
|||
type Output = ArrayWithoutLen<T>;
|
||||
|
||||
fn index(&self, element: T) -> &Self::Output {
|
||||
Interned::into_inner(Intern::intern_sized(ArrayWithoutLen { element }))
|
||||
Interned::<_>::into_inner(Intern::intern_sized(ArrayWithoutLen { element }))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -218,39 +215,6 @@ impl<T: Type, L: SizeType> Index<L> for ArrayWithoutLen<T> {
|
|||
type Output = ArrayType<T, L::Size>;
|
||||
|
||||
fn index(&self, len: L) -> &Self::Output {
|
||||
Interned::into_inner(Intern::intern_sized(ArrayType::new(self.element, len)))
|
||||
}
|
||||
}
|
||||
|
||||
impl<Lhs: Type, Rhs: Type, Len: Size> ExprPartialEq<ArrayType<Rhs, Len>> for ArrayType<Lhs, Len>
|
||||
where
|
||||
Lhs: ExprPartialEq<Rhs>,
|
||||
{
|
||||
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<ArrayType<Rhs, Len>>) -> Expr<Bool> {
|
||||
let lhs_ty = Expr::ty(lhs);
|
||||
let rhs_ty = Expr::ty(rhs);
|
||||
assert_eq!(lhs_ty.len(), rhs_ty.len());
|
||||
ArrayLiteral::<Bool, DynSize>::new(
|
||||
Bool,
|
||||
(0..lhs_ty.len())
|
||||
.map(|i| Expr::canonical(lhs[i].cmp_eq(rhs[i])))
|
||||
.collect(),
|
||||
)
|
||||
.cast_to_bits()
|
||||
.all_one_bits()
|
||||
}
|
||||
|
||||
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<ArrayType<Rhs, Len>>) -> Expr<Bool> {
|
||||
let lhs_ty = Expr::ty(lhs);
|
||||
let rhs_ty = Expr::ty(rhs);
|
||||
assert_eq!(lhs_ty.len(), rhs_ty.len());
|
||||
ArrayLiteral::<Bool, DynSize>::new(
|
||||
Bool,
|
||||
(0..lhs_ty.len())
|
||||
.map(|i| Expr::canonical(lhs[i].cmp_ne(rhs[i])))
|
||||
.collect(),
|
||||
)
|
||||
.cast_to_bits()
|
||||
.any_one_bits()
|
||||
Interned::<_>::into_inner(Intern::intern_sized(ArrayType::new(self.element, len)))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,20 +2,14 @@
|
|||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
expr::{
|
||||
ops::{ArrayLiteral, BundleLiteral, ExprPartialEq},
|
||||
CastToBits, Expr, ReduceBits, ToExpr,
|
||||
},
|
||||
int::{Bool, DynSize},
|
||||
expr::{ops::BundleLiteral, Expr, ToExpr},
|
||||
intern::{Intern, Interned},
|
||||
sim::{SimValue, ToSimValue},
|
||||
source_location::SourceLocation,
|
||||
ty::{
|
||||
impl_match_variant_as_self, CanonicalType, MatchVariantWithoutScope, StaticType, Type,
|
||||
TypeProperties, TypeWithDeref,
|
||||
},
|
||||
};
|
||||
use bitvec::vec::BitVec;
|
||||
use hashbrown::HashMap;
|
||||
use std::{fmt, marker::PhantomData};
|
||||
|
||||
|
@ -150,12 +144,6 @@ impl BundleTypePropertiesBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
impl Default for BundleTypePropertiesBuilder {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl Bundle {
|
||||
#[track_caller]
|
||||
pub fn new(fields: Interned<[BundleField]>) -> Self {
|
||||
|
@ -329,19 +317,7 @@ macro_rules! impl_tuple_builder_fields {
|
|||
}
|
||||
|
||||
macro_rules! impl_tuples {
|
||||
(
|
||||
[$({
|
||||
#[
|
||||
num = $num:tt,
|
||||
field = $field:ident,
|
||||
ty = $ty_var:ident: $Ty:ident,
|
||||
lhs = $lhs_var:ident: $Lhs:ident,
|
||||
rhs = $rhs_var:ident: $Rhs:ident
|
||||
]
|
||||
$var:ident: $T:ident
|
||||
})*]
|
||||
[]
|
||||
) => {
|
||||
([$({#[num = $num:literal, field = $field:ident] $var:ident: $T:ident})*] []) => {
|
||||
impl_tuple_builder_fields! {
|
||||
{}
|
||||
[$({
|
||||
|
@ -366,7 +342,6 @@ macro_rules! impl_tuples {
|
|||
std::iter::once(MatchVariantWithoutScope(($(Expr::field(this, stringify!($num)),)*)))
|
||||
}
|
||||
fn mask_type(&self) -> Self::MaskType {
|
||||
#![allow(clippy::unused_unit)]
|
||||
let ($($var,)*) = self;
|
||||
($($var.mask_type(),)*)
|
||||
}
|
||||
|
@ -375,7 +350,6 @@ macro_rules! impl_tuples {
|
|||
}
|
||||
#[track_caller]
|
||||
fn from_canonical(canonical_type: CanonicalType) -> Self {
|
||||
#![allow(clippy::unused_unit)]
|
||||
let CanonicalType::Bundle(bundle) = canonical_type else {
|
||||
panic!("expected bundle");
|
||||
};
|
||||
|
@ -384,7 +358,7 @@ macro_rules! impl_tuples {
|
|||
};
|
||||
$(let BundleField { name, flipped, ty } = $var;
|
||||
assert_eq!(&*name, stringify!($num));
|
||||
assert!(!flipped);
|
||||
assert_eq!(flipped, false);
|
||||
let $var = $T::from_canonical(ty);)*
|
||||
($($var,)*)
|
||||
}
|
||||
|
@ -403,7 +377,7 @@ macro_rules! impl_tuples {
|
|||
impl<$($T: Type,)*> TypeWithDeref for ($($T,)*) {
|
||||
fn expr_deref(this: &Expr<Self>) -> &Self::MatchVariant {
|
||||
let _ = this;
|
||||
Interned::into_inner(($(Expr::field(*this, stringify!($num)),)*).intern_sized())
|
||||
Interned::<_>::into_inner(($(Expr::field(*this, stringify!($num)),)*).intern_sized())
|
||||
}
|
||||
}
|
||||
impl<$($T: StaticType,)*> StaticType for ($($T,)*) {
|
||||
|
@ -441,102 +415,6 @@ macro_rules! impl_tuples {
|
|||
BundleLiteral::new(ty, field_values[..].intern()).to_expr()
|
||||
}
|
||||
}
|
||||
impl<$($T: ToSimValue<CanonicalType>,)*> ToSimValue<CanonicalType> for ($($T,)*) {
|
||||
#[track_caller]
|
||||
fn to_sim_value(&self, ty: CanonicalType) -> SimValue<CanonicalType> {
|
||||
ToSimValue::<Bundle>::to_sim_value(self, Bundle::from_canonical(ty)).into_canonical()
|
||||
}
|
||||
#[track_caller]
|
||||
fn into_sim_value(self, ty: CanonicalType) -> SimValue<CanonicalType>
|
||||
{
|
||||
ToSimValue::<Bundle>::into_sim_value(self, Bundle::from_canonical(ty)).into_canonical()
|
||||
}
|
||||
#[track_caller]
|
||||
fn box_into_sim_value(self: Box<Self>, ty: CanonicalType) -> SimValue<CanonicalType> {
|
||||
ToSimValue::<Bundle>::box_into_sim_value(self, Bundle::from_canonical(ty)).into_canonical()
|
||||
}
|
||||
}
|
||||
impl<$($T: ToSimValue<CanonicalType>,)*> ToSimValue<Bundle> for ($($T,)*) {
|
||||
#[track_caller]
|
||||
fn to_sim_value(&self, ty: Bundle) -> SimValue<Bundle> {
|
||||
let ($($var,)*) = self;
|
||||
let [$($ty_var,)*] = *ty.fields() else {
|
||||
panic!("bundle has wrong number of fields");
|
||||
};
|
||||
$(let $var = $var.to_sim_value($ty_var.ty);)*
|
||||
ToSimValue::into_sim_value(($($var,)*), ty)
|
||||
}
|
||||
#[track_caller]
|
||||
fn into_sim_value(self, ty: Bundle) -> SimValue<Bundle> {
|
||||
#![allow(unused_mut)]
|
||||
#![allow(clippy::unused_unit)]
|
||||
let ($($var,)*) = self;
|
||||
let [$($ty_var,)*] = *ty.fields() else {
|
||||
panic!("bundle has wrong number of fields");
|
||||
};
|
||||
let mut bits: Option<BitVec> = None;
|
||||
$(let $var = $var.into_sim_value($ty_var.ty);
|
||||
assert_eq!($var.ty(), $ty_var.ty);
|
||||
if !$var.bits().is_empty() {
|
||||
if let Some(bits) = &mut bits {
|
||||
bits.extend_from_bitslice($var.bits());
|
||||
} else {
|
||||
let mut $var = $var.into_bits();
|
||||
$var.reserve(ty.type_properties().bit_width - $var.len());
|
||||
bits = Some($var);
|
||||
}
|
||||
}
|
||||
)*
|
||||
bits.unwrap_or_else(BitVec::new).into_sim_value(ty)
|
||||
}
|
||||
#[track_caller]
|
||||
fn box_into_sim_value(self: Box<Self>, ty: Bundle) -> SimValue<Bundle> {
|
||||
Self::into_sim_value(*self, ty)
|
||||
}
|
||||
}
|
||||
impl<$($T: ToSimValue<$Ty>, $Ty: Type,)*> ToSimValue<($($Ty,)*)> for ($($T,)*) {
|
||||
#[track_caller]
|
||||
fn to_sim_value(&self, ty: ($($Ty,)*)) -> SimValue<($($Ty,)*)> {
|
||||
let ($($var,)*) = self;
|
||||
let ($($ty_var,)*) = ty;
|
||||
$(let $var = $var.to_sim_value($ty_var).into_canonical();)*
|
||||
SimValue::from_canonical(ToSimValue::into_sim_value(($($var,)*), ty.canonical()))
|
||||
}
|
||||
#[track_caller]
|
||||
fn into_sim_value(self, ty: ($($Ty,)*)) -> SimValue<($($Ty,)*)> {
|
||||
let ($($var,)*) = self;
|
||||
let ($($ty_var,)*) = ty;
|
||||
$(let $var = $var.into_sim_value($ty_var).into_canonical();)*
|
||||
SimValue::from_canonical(ToSimValue::into_sim_value(($($var,)*), ty.canonical()))
|
||||
}
|
||||
#[track_caller]
|
||||
fn box_into_sim_value(self: Box<Self>, ty: ($($Ty,)*)) -> SimValue<($($Ty,)*)> {
|
||||
Self::into_sim_value(*self, ty)
|
||||
}
|
||||
}
|
||||
impl<$($Lhs: Type + ExprPartialEq<$Rhs>, $Rhs: Type,)*> ExprPartialEq<($($Rhs,)*)> for ($($Lhs,)*) {
|
||||
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<($($Rhs,)*)>) -> Expr<Bool> {
|
||||
let ($($lhs_var,)*) = *lhs;
|
||||
let ($($rhs_var,)*) = *rhs;
|
||||
ArrayLiteral::<Bool, DynSize>::new(
|
||||
Bool,
|
||||
FromIterator::from_iter([$(Expr::canonical(ExprPartialEq::cmp_eq($lhs_var, $rhs_var)),)*]),
|
||||
)
|
||||
.cast_to_bits()
|
||||
.all_one_bits()
|
||||
}
|
||||
|
||||
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<($($Rhs,)*)>) -> Expr<Bool> {
|
||||
let ($($lhs_var,)*) = *lhs;
|
||||
let ($($rhs_var,)*) = *rhs;
|
||||
ArrayLiteral::<Bool, DynSize>::new(
|
||||
Bool,
|
||||
FromIterator::from_iter([$(Expr::canonical(ExprPartialEq::cmp_ne($lhs_var, $rhs_var)),)*]),
|
||||
)
|
||||
.cast_to_bits()
|
||||
.any_one_bits()
|
||||
}
|
||||
}
|
||||
};
|
||||
([$($lhs:tt)*] [$rhs_first:tt $($rhs:tt)*]) => {
|
||||
impl_tuples!([$($lhs)*] []);
|
||||
|
@ -546,123 +424,17 @@ macro_rules! impl_tuples {
|
|||
|
||||
impl_tuples! {
|
||||
[] [
|
||||
{#[num = 0, field = field_0, ty = ty0: Ty0, lhs = lhs0: Lhs0, rhs = rhs0: Rhs0] v0: T0}
|
||||
{#[num = 1, field = field_1, ty = ty1: Ty1, lhs = lhs1: Lhs1, rhs = rhs1: Rhs1] v1: T1}
|
||||
{#[num = 2, field = field_2, ty = ty2: Ty2, lhs = lhs2: Lhs2, rhs = rhs2: Rhs2] v2: T2}
|
||||
{#[num = 3, field = field_3, ty = ty3: Ty3, lhs = lhs3: Lhs3, rhs = rhs3: Rhs3] v3: T3}
|
||||
{#[num = 4, field = field_4, ty = ty4: Ty4, lhs = lhs4: Lhs4, rhs = rhs4: Rhs4] v4: T4}
|
||||
{#[num = 5, field = field_5, ty = ty5: Ty5, lhs = lhs5: Lhs5, rhs = rhs5: Rhs5] v5: T5}
|
||||
{#[num = 6, field = field_6, ty = ty6: Ty6, lhs = lhs6: Lhs6, rhs = rhs6: Rhs6] v6: T6}
|
||||
{#[num = 7, field = field_7, ty = ty7: Ty7, lhs = lhs7: Lhs7, rhs = rhs7: Rhs7] v7: T7}
|
||||
{#[num = 8, field = field_8, ty = ty8: Ty8, lhs = lhs8: Lhs8, rhs = rhs8: Rhs8] v8: T8}
|
||||
{#[num = 9, field = field_9, ty = ty9: Ty9, lhs = lhs9: Lhs9, rhs = rhs9: Rhs9] v9: T9}
|
||||
{#[num = 10, field = field_10, ty = ty10: Ty10, lhs = lhs10: Lhs10, rhs = rhs10: Rhs10] v10: T10}
|
||||
{#[num = 11, field = field_11, ty = ty11: Ty11, lhs = lhs11: Lhs11, rhs = rhs11: Rhs11] v11: T11}
|
||||
{#[num = 0, field = field_0] v0: T0}
|
||||
{#[num = 1, field = field_1] v1: T1}
|
||||
{#[num = 2, field = field_2] v2: T2}
|
||||
{#[num = 3, field = field_3] v3: T3}
|
||||
{#[num = 4, field = field_4] v4: T4}
|
||||
{#[num = 5, field = field_5] v5: T5}
|
||||
{#[num = 6, field = field_6] v6: T6}
|
||||
{#[num = 7, field = field_7] v7: T7}
|
||||
{#[num = 8, field = field_8] v8: T8}
|
||||
{#[num = 9, field = field_9] v9: T9}
|
||||
{#[num = 10, field = field_10] v10: T10}
|
||||
{#[num = 11, field = field_11] v11: T11}
|
||||
]
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Send + Sync + 'static> Type for PhantomData<T> {
|
||||
type BaseType = Bundle;
|
||||
type MaskType = ();
|
||||
type MatchVariant = PhantomData<T>;
|
||||
type MatchActiveScope = ();
|
||||
type MatchVariantAndInactiveScope = MatchVariantWithoutScope<Self::MatchVariant>;
|
||||
type MatchVariantsIter = std::iter::Once<Self::MatchVariantAndInactiveScope>;
|
||||
fn match_variants(
|
||||
this: Expr<Self>,
|
||||
source_location: SourceLocation,
|
||||
) -> Self::MatchVariantsIter {
|
||||
let _ = this;
|
||||
let _ = source_location;
|
||||
std::iter::once(MatchVariantWithoutScope(PhantomData))
|
||||
}
|
||||
fn mask_type(&self) -> Self::MaskType {
|
||||
()
|
||||
}
|
||||
fn canonical(&self) -> CanonicalType {
|
||||
Bundle::new(self.fields()).canonical()
|
||||
}
|
||||
#[track_caller]
|
||||
fn from_canonical(canonical_type: CanonicalType) -> Self {
|
||||
let CanonicalType::Bundle(bundle) = canonical_type else {
|
||||
panic!("expected bundle");
|
||||
};
|
||||
assert!(
|
||||
bundle.fields().is_empty(),
|
||||
"bundle has wrong number of fields"
|
||||
);
|
||||
PhantomData
|
||||
}
|
||||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PhantomDataBuilder<T: ?Sized + Send + Sync + 'static>(PhantomData<T>);
|
||||
|
||||
impl<T: ?Sized + Send + Sync + 'static> Default for PhantomDataBuilder<T> {
|
||||
fn default() -> Self {
|
||||
Self(PhantomData)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Send + Sync + 'static> ToExpr for PhantomDataBuilder<T> {
|
||||
type Type = PhantomData<T>;
|
||||
|
||||
fn to_expr(&self) -> Expr<Self::Type> {
|
||||
PhantomData.to_expr()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Send + Sync + 'static> BundleType for PhantomData<T> {
|
||||
type Builder = PhantomDataBuilder<T>;
|
||||
type FilledBuilder = PhantomDataBuilder<T>;
|
||||
fn fields(&self) -> Interned<[BundleField]> {
|
||||
Interned::default()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Send + Sync + 'static> TypeWithDeref for PhantomData<T> {
|
||||
fn expr_deref(_this: &Expr<Self>) -> &Self::MatchVariant {
|
||||
&PhantomData
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Send + Sync + 'static> StaticType for PhantomData<T> {
|
||||
const TYPE: Self = PhantomData;
|
||||
const MASK_TYPE: Self::MaskType = ();
|
||||
const TYPE_PROPERTIES: TypeProperties = <()>::TYPE_PROPERTIES;
|
||||
const MASK_TYPE_PROPERTIES: TypeProperties = <()>::TYPE_PROPERTIES;
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Send + Sync + 'static> ToExpr for PhantomData<T> {
|
||||
type Type = PhantomData<T>;
|
||||
|
||||
fn to_expr(&self) -> Expr<Self::Type> {
|
||||
BundleLiteral::new(PhantomData, Interned::default()).to_expr()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Send + Sync + 'static> ToSimValue<Self> for PhantomData<T> {
|
||||
#[track_caller]
|
||||
fn to_sim_value(&self, ty: Self) -> SimValue<Self> {
|
||||
ToSimValue::into_sim_value(BitVec::new(), ty)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized> ToSimValue<Bundle> for PhantomData<T> {
|
||||
#[track_caller]
|
||||
fn to_sim_value(&self, ty: Bundle) -> SimValue<Bundle> {
|
||||
assert!(ty.fields().is_empty());
|
||||
ToSimValue::into_sim_value(BitVec::new(), ty)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized> ToSimValue<CanonicalType> for PhantomData<T> {
|
||||
#[track_caller]
|
||||
fn to_sim_value(&self, ty: CanonicalType) -> SimValue<CanonicalType> {
|
||||
let ty = Bundle::from_canonical(ty);
|
||||
assert!(ty.fields().is_empty());
|
||||
ToSimValue::into_sim_value(BitVec::new(), ty).into_canonical()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,27 +1,15 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
bundle::{Bundle, BundleType},
|
||||
firrtl::{self, ExportOptions},
|
||||
firrtl,
|
||||
intern::Interned,
|
||||
module::Module,
|
||||
util::{job_server::AcquiredJob, streaming_read_utf8::streaming_read_utf8},
|
||||
};
|
||||
use clap::{
|
||||
builder::{OsStringValueParser, TypedValueParser},
|
||||
Parser, Subcommand, ValueEnum, ValueHint,
|
||||
Args, Parser, Subcommand, ValueEnum, ValueHint,
|
||||
};
|
||||
use eyre::{eyre, Report};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
error,
|
||||
ffi::OsString,
|
||||
fmt::{self, Write},
|
||||
fs, io, mem,
|
||||
path::{Path, PathBuf},
|
||||
process,
|
||||
};
|
||||
use tempfile::TempDir;
|
||||
use std::{error, ffi::OsString, fmt, io, path::PathBuf, process};
|
||||
|
||||
pub type Result<T = (), E = CliError> = std::result::Result<T, E>;
|
||||
|
||||
|
@ -49,157 +37,74 @@ impl From<io::Error> for CliError {
|
|||
|
||||
pub trait RunPhase<Arg> {
|
||||
type Output;
|
||||
fn run(&self, arg: Arg) -> Result<Self::Output> {
|
||||
self.run_with_job(arg, &mut AcquiredJob::acquire())
|
||||
}
|
||||
fn run_with_job(&self, arg: Arg, acquired_job: &mut AcquiredJob) -> Result<Self::Output>;
|
||||
fn run(&self, arg: Arg) -> Result<Self::Output>;
|
||||
}
|
||||
|
||||
#[derive(Parser, Debug, Clone)]
|
||||
#[derive(Args, Debug)]
|
||||
#[non_exhaustive]
|
||||
pub struct BaseArgs {
|
||||
/// the directory to put the generated main output file and associated files in
|
||||
#[arg(short, long, value_hint = ValueHint::DirPath, required = true)]
|
||||
pub output: Option<PathBuf>,
|
||||
#[arg(short, long, value_hint = ValueHint::DirPath)]
|
||||
pub output: PathBuf,
|
||||
/// the stem of the generated main output file, e.g. to get foo.v, pass --file-stem=foo
|
||||
#[arg(long)]
|
||||
pub file_stem: Option<String>,
|
||||
#[arg(long, env = "FAYALITE_KEEP_TEMP_DIR")]
|
||||
pub keep_temp_dir: bool,
|
||||
#[arg(skip = false)]
|
||||
pub redirect_output_for_rust_test: bool,
|
||||
}
|
||||
|
||||
impl BaseArgs {
|
||||
fn make_firrtl_file_backend(&self) -> Result<(firrtl::FileBackend, Option<TempDir>)> {
|
||||
let (dir_path, temp_dir) = match &self.output {
|
||||
Some(output) => (output.clone(), None),
|
||||
None => {
|
||||
let temp_dir = TempDir::new()?;
|
||||
if self.keep_temp_dir {
|
||||
let temp_dir = temp_dir.into_path();
|
||||
println!("created temporary directory: {}", temp_dir.display());
|
||||
(temp_dir, None)
|
||||
} else {
|
||||
(temp_dir.path().to_path_buf(), Some(temp_dir))
|
||||
}
|
||||
}
|
||||
};
|
||||
Ok((
|
||||
pub fn to_firrtl_file_backend(&self) -> firrtl::FileBackend {
|
||||
firrtl::FileBackend {
|
||||
dir_path,
|
||||
dir_path: self.output.clone(),
|
||||
top_fir_file_stem: self.file_stem.clone(),
|
||||
circuit_name: None,
|
||||
},
|
||||
temp_dir,
|
||||
))
|
||||
}
|
||||
/// handles possibly redirecting the command's output for Rust tests
|
||||
pub fn run_external_command(
|
||||
&self,
|
||||
_acquired_job: &mut AcquiredJob,
|
||||
mut command: process::Command,
|
||||
mut captured_output: Option<&mut String>,
|
||||
) -> io::Result<process::ExitStatus> {
|
||||
if self.redirect_output_for_rust_test || captured_output.is_some() {
|
||||
let (reader, writer) = os_pipe::pipe()?;
|
||||
let mut reader = io::BufReader::new(reader);
|
||||
command.stderr(writer.try_clone()?);
|
||||
command.stdout(writer); // must not leave writer around after spawning child
|
||||
command.stdin(process::Stdio::null());
|
||||
let mut child = command.spawn()?;
|
||||
drop(command); // close writers
|
||||
Ok(loop {
|
||||
let status = child.try_wait()?;
|
||||
streaming_read_utf8(&mut reader, |s| {
|
||||
if let Some(captured_output) = captured_output.as_deref_mut() {
|
||||
captured_output.push_str(s);
|
||||
}
|
||||
// use print! so output goes to Rust test output capture
|
||||
print!("{s}");
|
||||
io::Result::Ok(())
|
||||
})?;
|
||||
if let Some(status) = status {
|
||||
break status;
|
||||
}
|
||||
})
|
||||
} else {
|
||||
command.status()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Parser, Debug, Clone)]
|
||||
#[derive(Args, Debug)]
|
||||
#[non_exhaustive]
|
||||
pub struct FirrtlArgs {
|
||||
#[command(flatten)]
|
||||
pub base: BaseArgs,
|
||||
#[command(flatten)]
|
||||
pub export_options: ExportOptions,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[non_exhaustive]
|
||||
pub struct FirrtlOutput {
|
||||
pub file_stem: String,
|
||||
pub top_module: String,
|
||||
pub output_dir: PathBuf,
|
||||
pub temp_dir: Option<TempDir>,
|
||||
}
|
||||
|
||||
impl FirrtlOutput {
|
||||
pub fn file_with_ext(&self, ext: &str) -> PathBuf {
|
||||
let mut retval = self.output_dir.join(&self.file_stem);
|
||||
retval.set_extension(ext);
|
||||
pub fn firrtl_file(&self, args: &FirrtlArgs) -> PathBuf {
|
||||
let mut retval = args.base.output.join(&self.file_stem);
|
||||
retval.set_extension("fir");
|
||||
retval
|
||||
}
|
||||
pub fn firrtl_file(&self) -> PathBuf {
|
||||
self.file_with_ext("fir")
|
||||
}
|
||||
}
|
||||
|
||||
impl FirrtlArgs {
|
||||
fn run_impl(
|
||||
&self,
|
||||
top_module: Module<Bundle>,
|
||||
_acquired_job: &mut AcquiredJob,
|
||||
) -> Result<FirrtlOutput> {
|
||||
let (file_backend, temp_dir) = self.base.make_firrtl_file_backend()?;
|
||||
fn run_impl(&self, top_module: Module<Bundle>) -> Result<FirrtlOutput> {
|
||||
let firrtl::FileBackend {
|
||||
top_fir_file_stem,
|
||||
circuit_name,
|
||||
dir_path,
|
||||
} = firrtl::export(file_backend, &top_module, self.export_options)?;
|
||||
top_fir_file_stem, ..
|
||||
} = firrtl::export(self.base.to_firrtl_file_backend(), &top_module)?;
|
||||
Ok(FirrtlOutput {
|
||||
file_stem: top_fir_file_stem.expect(
|
||||
"export is known to set the file stem from the circuit name if not provided",
|
||||
),
|
||||
top_module: circuit_name.expect("export is known to set the circuit name"),
|
||||
output_dir: dir_path,
|
||||
temp_dir,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: BundleType> RunPhase<Module<T>> for FirrtlArgs {
|
||||
type Output = FirrtlOutput;
|
||||
fn run_with_job(
|
||||
&self,
|
||||
top_module: Module<T>,
|
||||
acquired_job: &mut AcquiredJob,
|
||||
) -> Result<Self::Output> {
|
||||
self.run_impl(top_module.canonical(), acquired_job)
|
||||
fn run(&self, top_module: Module<T>) -> Result<Self::Output> {
|
||||
self.run_impl(top_module.canonical())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: BundleType> RunPhase<Interned<Module<T>>> for FirrtlArgs {
|
||||
type Output = FirrtlOutput;
|
||||
fn run_with_job(
|
||||
&self,
|
||||
top_module: Interned<Module<T>>,
|
||||
acquired_job: &mut AcquiredJob,
|
||||
) -> Result<Self::Output> {
|
||||
self.run_with_job(*top_module, acquired_job)
|
||||
fn run(&self, top_module: Interned<Module<T>>) -> Result<Self::Output> {
|
||||
self.run(*top_module)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -215,22 +120,7 @@ pub enum VerilogDialect {
|
|||
Yosys,
|
||||
}
|
||||
|
||||
impl fmt::Display for VerilogDialect {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl VerilogDialect {
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
VerilogDialect::Questa => "questa",
|
||||
VerilogDialect::Spyglass => "spyglass",
|
||||
VerilogDialect::Verilator => "verilator",
|
||||
VerilogDialect::Vivado => "vivado",
|
||||
VerilogDialect::Yosys => "yosys",
|
||||
}
|
||||
}
|
||||
pub fn firtool_extra_args(self) -> &'static [&'static str] {
|
||||
match self {
|
||||
VerilogDialect::Questa => &["--lowering-options=emitWireInPorts"],
|
||||
|
@ -248,7 +138,7 @@ impl VerilogDialect {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Parser, Debug, Clone)]
|
||||
#[derive(Args, Debug)]
|
||||
#[non_exhaustive]
|
||||
pub struct VerilogArgs {
|
||||
#[command(flatten)]
|
||||
|
@ -266,94 +156,39 @@ pub struct VerilogArgs {
|
|||
/// adapt the generated Verilog for a particular toolchain
|
||||
#[arg(long)]
|
||||
pub verilog_dialect: Option<VerilogDialect>,
|
||||
#[arg(long, short = 'g')]
|
||||
pub debug: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[non_exhaustive]
|
||||
pub struct VerilogOutput {
|
||||
pub firrtl: FirrtlOutput,
|
||||
pub verilog_files: Vec<PathBuf>,
|
||||
pub contents_hash: Option<blake3::Hash>,
|
||||
}
|
||||
|
||||
impl VerilogOutput {
|
||||
pub fn main_verilog_file(&self) -> PathBuf {
|
||||
self.firrtl.file_with_ext("v")
|
||||
}
|
||||
fn unadjusted_verilog_file(&self) -> PathBuf {
|
||||
self.firrtl.file_with_ext("unadjusted.v")
|
||||
pub fn verilog_file(&self, args: &VerilogArgs) -> PathBuf {
|
||||
let mut retval = args.firrtl.base.output.join(&self.firrtl.file_stem);
|
||||
retval.set_extension("v");
|
||||
retval
|
||||
}
|
||||
}
|
||||
|
||||
impl VerilogArgs {
|
||||
fn process_unadjusted_verilog_file(&self, mut output: VerilogOutput) -> Result<VerilogOutput> {
|
||||
let input = fs::read_to_string(output.unadjusted_verilog_file())?;
|
||||
let file_separator_prefix = "\n// ----- 8< ----- FILE \"";
|
||||
let file_separator_suffix = "\" ----- 8< -----\n\n";
|
||||
let mut input = &*input;
|
||||
output.contents_hash = Some(blake3::hash(input.as_bytes()));
|
||||
let main_verilog_file = output.main_verilog_file();
|
||||
let mut file_name: Option<&Path> = Some(&main_verilog_file);
|
||||
loop {
|
||||
let (chunk, next_file_name) = if let Some((chunk, rest)) =
|
||||
input.split_once(file_separator_prefix)
|
||||
{
|
||||
let Some((next_file_name, rest)) = rest.split_once(file_separator_suffix) else {
|
||||
return Err(CliError(eyre!("parsing firtool's output failed: found {file_separator_prefix:?} but no {file_separator_suffix:?}")));
|
||||
};
|
||||
input = rest;
|
||||
(chunk, Some(next_file_name.as_ref()))
|
||||
} else {
|
||||
(mem::take(&mut input), None)
|
||||
};
|
||||
let Some(file_name) = mem::replace(&mut file_name, next_file_name) else {
|
||||
break;
|
||||
};
|
||||
let file_name = output.firrtl.output_dir.join(file_name);
|
||||
fs::write(&file_name, chunk)?;
|
||||
if let Some(extension) = file_name.extension() {
|
||||
if extension == "v" || extension == "sv" {
|
||||
output.verilog_files.push(file_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(output)
|
||||
}
|
||||
fn run_impl(
|
||||
&self,
|
||||
firrtl_output: FirrtlOutput,
|
||||
acquired_job: &mut AcquiredJob,
|
||||
) -> Result<VerilogOutput> {
|
||||
let Self {
|
||||
firrtl,
|
||||
firtool,
|
||||
firtool_extra_args,
|
||||
verilog_dialect,
|
||||
debug,
|
||||
} = self;
|
||||
fn run_impl(&self, firrtl_output: FirrtlOutput) -> Result<VerilogOutput> {
|
||||
let output = VerilogOutput {
|
||||
firrtl: firrtl_output,
|
||||
verilog_files: vec![],
|
||||
contents_hash: None,
|
||||
};
|
||||
let mut cmd = process::Command::new(firtool);
|
||||
cmd.arg(output.firrtl.firrtl_file());
|
||||
let mut cmd = process::Command::new(&self.firtool);
|
||||
cmd.arg(output.firrtl.firrtl_file(&self.firrtl));
|
||||
cmd.arg("-o");
|
||||
cmd.arg(output.unadjusted_verilog_file());
|
||||
if *debug {
|
||||
cmd.arg("-g");
|
||||
cmd.arg("--preserve-values=all");
|
||||
}
|
||||
if let Some(dialect) = verilog_dialect {
|
||||
cmd.arg(output.verilog_file(self));
|
||||
if let Some(dialect) = self.verilog_dialect {
|
||||
cmd.args(dialect.firtool_extra_args());
|
||||
}
|
||||
cmd.args(firtool_extra_args);
|
||||
cmd.current_dir(&output.firrtl.output_dir);
|
||||
let status = firrtl.base.run_external_command(acquired_job, cmd, None)?;
|
||||
cmd.args(&self.firtool_extra_args);
|
||||
cmd.current_dir(&self.firrtl.base.output);
|
||||
let status = cmd.status()?;
|
||||
if status.success() {
|
||||
self.process_unadjusted_verilog_file(output)
|
||||
Ok(output)
|
||||
} else {
|
||||
Err(CliError(eyre!(
|
||||
"running {} failed: {status}",
|
||||
|
@ -368,316 +203,9 @@ where
|
|||
FirrtlArgs: RunPhase<Arg, Output = FirrtlOutput>,
|
||||
{
|
||||
type Output = VerilogOutput;
|
||||
fn run_with_job(&self, arg: Arg, acquired_job: &mut AcquiredJob) -> Result<Self::Output> {
|
||||
let firrtl_output = self.firrtl.run_with_job(arg, acquired_job)?;
|
||||
self.run_impl(firrtl_output, acquired_job)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash, Default)]
|
||||
#[non_exhaustive]
|
||||
pub enum FormalMode {
|
||||
#[default]
|
||||
BMC,
|
||||
Prove,
|
||||
Live,
|
||||
Cover,
|
||||
}
|
||||
|
||||
impl FormalMode {
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
FormalMode::BMC => "bmc",
|
||||
FormalMode::Prove => "prove",
|
||||
FormalMode::Live => "live",
|
||||
FormalMode::Cover => "cover",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for FormalMode {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct FormalAdjustArgs;
|
||||
|
||||
impl clap::FromArgMatches for FormalAdjustArgs {
|
||||
fn from_arg_matches(_matches: &clap::ArgMatches) -> Result<Self, clap::Error> {
|
||||
Ok(Self)
|
||||
}
|
||||
|
||||
fn update_from_arg_matches(&mut self, _matches: &clap::ArgMatches) -> Result<(), clap::Error> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl clap::Args for FormalAdjustArgs {
|
||||
fn augment_args(cmd: clap::Command) -> clap::Command {
|
||||
cmd.mut_arg("output", |arg| arg.required(false))
|
||||
.mut_arg("verilog_dialect", |arg| {
|
||||
arg.default_value(VerilogDialect::Yosys.to_string())
|
||||
.hide(true)
|
||||
})
|
||||
}
|
||||
|
||||
fn augment_args_for_update(cmd: clap::Command) -> clap::Command {
|
||||
Self::augment_args(cmd)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Parser, Clone)]
|
||||
#[non_exhaustive]
|
||||
pub struct FormalArgs {
|
||||
#[command(flatten)]
|
||||
pub verilog: VerilogArgs,
|
||||
#[arg(
|
||||
long,
|
||||
default_value = "sby",
|
||||
env = "SBY",
|
||||
value_hint = ValueHint::CommandName,
|
||||
value_parser = OsStringValueParser::new().try_map(which::which)
|
||||
)]
|
||||
pub sby: PathBuf,
|
||||
#[arg(long)]
|
||||
pub sby_extra_args: Vec<String>,
|
||||
#[arg(long, default_value_t)]
|
||||
pub mode: FormalMode,
|
||||
#[arg(long, default_value_t = Self::DEFAULT_DEPTH)]
|
||||
pub depth: u64,
|
||||
#[arg(long, default_value = "z3")]
|
||||
pub solver: String,
|
||||
#[arg(long)]
|
||||
pub smtbmc_extra_args: Vec<String>,
|
||||
#[arg(long, default_value_t = true, env = "FAYALITE_CACHE_RESULTS")]
|
||||
pub cache_results: bool,
|
||||
#[command(flatten)]
|
||||
_formal_adjust_args: FormalAdjustArgs,
|
||||
}
|
||||
|
||||
impl fmt::Debug for FormalArgs {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let Self {
|
||||
verilog,
|
||||
sby,
|
||||
sby_extra_args,
|
||||
mode,
|
||||
depth,
|
||||
solver,
|
||||
smtbmc_extra_args,
|
||||
cache_results,
|
||||
_formal_adjust_args: _,
|
||||
} = self;
|
||||
f.debug_struct("FormalArgs")
|
||||
.field("verilog", verilog)
|
||||
.field("sby", sby)
|
||||
.field("sby_extra_args", sby_extra_args)
|
||||
.field("mode", mode)
|
||||
.field("depth", depth)
|
||||
.field("solver", solver)
|
||||
.field("smtbmc_extra_args", smtbmc_extra_args)
|
||||
.field("cache_results", cache_results)
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
impl FormalArgs {
|
||||
pub const DEFAULT_DEPTH: u64 = 20;
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[non_exhaustive]
|
||||
pub struct FormalOutput {
|
||||
pub verilog: VerilogOutput,
|
||||
}
|
||||
|
||||
impl FormalOutput {
|
||||
pub fn sby_file(&self) -> PathBuf {
|
||||
self.verilog.firrtl.file_with_ext("sby")
|
||||
}
|
||||
pub fn cache_file(&self) -> PathBuf {
|
||||
self.verilog.firrtl.file_with_ext("cache.json")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[non_exhaustive]
|
||||
pub struct FormalCacheOutput {}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[non_exhaustive]
|
||||
pub enum FormalCacheVersion {
|
||||
V1,
|
||||
}
|
||||
|
||||
impl FormalCacheVersion {
|
||||
pub const CURRENT: Self = Self::V1;
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[non_exhaustive]
|
||||
pub struct FormalCache {
|
||||
pub version: FormalCacheVersion,
|
||||
pub contents_hash: blake3::Hash,
|
||||
pub stdout_stderr: String,
|
||||
pub result: Result<FormalCacheOutput, String>,
|
||||
}
|
||||
|
||||
impl FormalCache {
|
||||
pub fn new(
|
||||
version: FormalCacheVersion,
|
||||
contents_hash: blake3::Hash,
|
||||
stdout_stderr: String,
|
||||
result: Result<FormalCacheOutput, String>,
|
||||
) -> Self {
|
||||
Self {
|
||||
version,
|
||||
contents_hash,
|
||||
stdout_stderr,
|
||||
result,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FormalArgs {
|
||||
fn sby_contents(&self, output: &FormalOutput) -> Result<String> {
|
||||
let Self {
|
||||
verilog: _,
|
||||
sby: _,
|
||||
sby_extra_args: _,
|
||||
mode,
|
||||
depth,
|
||||
smtbmc_extra_args,
|
||||
solver,
|
||||
cache_results: _,
|
||||
_formal_adjust_args: _,
|
||||
} = self;
|
||||
let smtbmc_options = smtbmc_extra_args.join(" ");
|
||||
let top_module = &output.verilog.firrtl.top_module;
|
||||
let mut retval = format!(
|
||||
"[options]\n\
|
||||
mode {mode}\n\
|
||||
depth {depth}\n\
|
||||
wait on\n\
|
||||
\n\
|
||||
[engines]\n\
|
||||
smtbmc {solver} -- -- {smtbmc_options}\n\
|
||||
\n\
|
||||
[script]\n"
|
||||
);
|
||||
for verilog_file in &output.verilog.verilog_files {
|
||||
let verilog_file = verilog_file
|
||||
.to_str()
|
||||
.ok_or_else(|| CliError(eyre!("verilog file path is not UTF-8")))?;
|
||||
if verilog_file.contains(|ch: char| {
|
||||
(ch != ' ' && ch != '\t' && ch.is_ascii_whitespace()) || ch == '"'
|
||||
}) {
|
||||
return Err(CliError(eyre!(
|
||||
"verilog file path contains characters that aren't permitted"
|
||||
)));
|
||||
}
|
||||
writeln!(retval, "read_verilog -sv -formal \"{verilog_file}\"").unwrap();
|
||||
}
|
||||
// workaround for wires disappearing -- set `keep` on all wires
|
||||
writeln!(retval, "hierarchy -top {top_module}").unwrap();
|
||||
writeln!(retval, "proc").unwrap();
|
||||
writeln!(retval, "setattr -set keep 1 w:\\*").unwrap();
|
||||
writeln!(retval, "prep").unwrap();
|
||||
Ok(retval)
|
||||
}
|
||||
fn run_impl(
|
||||
&self,
|
||||
verilog_output: VerilogOutput,
|
||||
acquired_job: &mut AcquiredJob,
|
||||
) -> Result<FormalOutput> {
|
||||
let output = FormalOutput {
|
||||
verilog: verilog_output,
|
||||
};
|
||||
let sby_file = output.sby_file();
|
||||
let sby_contents = self.sby_contents(&output)?;
|
||||
let contents_hash = output.verilog.contents_hash.map(|verilog_hash| {
|
||||
let mut hasher = blake3::Hasher::new();
|
||||
hasher.update(verilog_hash.as_bytes());
|
||||
hasher.update(sby_contents.as_bytes());
|
||||
hasher.update(&(self.sby_extra_args.len() as u64).to_le_bytes());
|
||||
for sby_extra_arg in self.sby_extra_args.iter() {
|
||||
hasher.update(&(sby_extra_arg.len() as u64).to_le_bytes());
|
||||
hasher.update(sby_extra_arg.as_bytes());
|
||||
}
|
||||
hasher.finalize()
|
||||
});
|
||||
std::fs::write(&sby_file, sby_contents)?;
|
||||
let mut cmd = process::Command::new(&self.sby);
|
||||
cmd.arg("-j1"); // sby seems not to respect job count in parallel mode
|
||||
cmd.arg("-f");
|
||||
cmd.arg(sby_file.file_name().unwrap());
|
||||
cmd.args(&self.sby_extra_args);
|
||||
cmd.current_dir(&output.verilog.firrtl.output_dir);
|
||||
let mut captured_output = String::new();
|
||||
let cache_file = output.cache_file();
|
||||
let do_cache = if let Some(contents_hash) = contents_hash.filter(|_| self.cache_results) {
|
||||
if let Some(FormalCache {
|
||||
version: FormalCacheVersion::CURRENT,
|
||||
contents_hash: cache_contents_hash,
|
||||
stdout_stderr,
|
||||
result,
|
||||
}) = fs::read(&cache_file)
|
||||
.ok()
|
||||
.and_then(|v| serde_json::from_slice(&v).ok())
|
||||
{
|
||||
if cache_contents_hash == contents_hash {
|
||||
println!("Using cached formal result:\n{stdout_stderr}");
|
||||
return match result {
|
||||
Ok(FormalCacheOutput {}) => Ok(output),
|
||||
Err(error) => Err(CliError(eyre::Report::msg(error))),
|
||||
};
|
||||
}
|
||||
}
|
||||
true
|
||||
} else {
|
||||
false
|
||||
};
|
||||
let _ = fs::remove_file(&cache_file);
|
||||
let status = self.verilog.firrtl.base.run_external_command(
|
||||
acquired_job,
|
||||
cmd,
|
||||
do_cache.then_some(&mut captured_output),
|
||||
)?;
|
||||
let result = if status.success() {
|
||||
Ok(output)
|
||||
} else {
|
||||
Err(CliError(eyre!(
|
||||
"running {} failed: {status}",
|
||||
self.sby.display()
|
||||
)))
|
||||
};
|
||||
fs::write(
|
||||
cache_file,
|
||||
serde_json::to_string_pretty(&FormalCache {
|
||||
version: FormalCacheVersion::CURRENT,
|
||||
contents_hash: contents_hash.unwrap(),
|
||||
stdout_stderr: captured_output,
|
||||
result: match &result {
|
||||
Ok(FormalOutput { verilog: _ }) => Ok(FormalCacheOutput {}),
|
||||
Err(error) => Err(error.to_string()),
|
||||
},
|
||||
})
|
||||
.expect("serialization shouldn't ever fail"),
|
||||
)?;
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
impl<Arg> RunPhase<Arg> for FormalArgs
|
||||
where
|
||||
VerilogArgs: RunPhase<Arg, Output = VerilogOutput>,
|
||||
{
|
||||
type Output = FormalOutput;
|
||||
fn run_with_job(&self, arg: Arg, acquired_job: &mut AcquiredJob) -> Result<Self::Output> {
|
||||
let verilog_output = self.verilog.run_with_job(arg, acquired_job)?;
|
||||
self.run_impl(verilog_output, acquired_job)
|
||||
fn run(&self, arg: Arg) -> Result<Self::Output> {
|
||||
let firrtl_output = self.firrtl.run(arg)?;
|
||||
self.run_impl(firrtl_output)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -687,8 +215,6 @@ enum CliCommand {
|
|||
Firrtl(FirrtlArgs),
|
||||
/// Generate Verilog
|
||||
Verilog(VerilogArgs),
|
||||
/// Run a formal proof
|
||||
Formal(FormalArgs),
|
||||
}
|
||||
|
||||
/// a simple CLI
|
||||
|
@ -766,16 +292,13 @@ where
|
|||
FirrtlArgs: RunPhase<T, Output = FirrtlOutput>,
|
||||
{
|
||||
type Output = ();
|
||||
fn run_with_job(&self, arg: T, acquired_job: &mut AcquiredJob) -> Result<Self::Output> {
|
||||
fn run(&self, arg: T) -> Result<Self::Output> {
|
||||
match &self.subcommand {
|
||||
CliCommand::Firrtl(c) => {
|
||||
c.run_with_job(arg, acquired_job)?;
|
||||
c.run(arg)?;
|
||||
}
|
||||
CliCommand::Verilog(c) => {
|
||||
c.run_with_job(arg, acquired_job)?;
|
||||
}
|
||||
CliCommand::Formal(c) => {
|
||||
c.run_with_job(arg, acquired_job)?;
|
||||
c.run(arg)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
|
|
@ -4,7 +4,7 @@ use crate::{
|
|||
expr::{Expr, ToExpr},
|
||||
hdl,
|
||||
int::Bool,
|
||||
reset::{Reset, ResetType},
|
||||
reset::Reset,
|
||||
source_location::SourceLocation,
|
||||
ty::{impl_match_variant_as_self, CanonicalType, StaticType, Type, TypeProperties},
|
||||
};
|
||||
|
@ -88,9 +88,9 @@ impl ToClock for Expr<Clock> {
|
|||
}
|
||||
|
||||
#[hdl]
|
||||
pub struct ClockDomain<R: ResetType = Reset> {
|
||||
pub struct ClockDomain {
|
||||
pub clk: Clock,
|
||||
pub rst: R,
|
||||
pub rst: Reset,
|
||||
}
|
||||
|
||||
impl ToClock for bool {
|
||||
|
|
|
@ -2,22 +2,19 @@
|
|||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
expr::{
|
||||
ops::{ExprPartialEq, VariantAccess},
|
||||
Expr, ToExpr,
|
||||
},
|
||||
expr::{ops::VariantAccess, Expr, ToExpr},
|
||||
hdl,
|
||||
int::Bool,
|
||||
intern::{Intern, Interned},
|
||||
module::{
|
||||
connect, enum_match_variants_helper, incomplete_wire, wire,
|
||||
EnumMatchVariantAndInactiveScopeImpl, EnumMatchVariantsIterImpl, Scope,
|
||||
enum_match_variants_helper, EnumMatchVariantAndInactiveScopeImpl,
|
||||
EnumMatchVariantsIterImpl, Scope,
|
||||
},
|
||||
source_location::SourceLocation,
|
||||
ty::{CanonicalType, MatchVariantAndInactiveScope, StaticType, Type, TypeProperties},
|
||||
};
|
||||
use hashbrown::HashMap;
|
||||
use std::{convert::Infallible, fmt, iter::FusedIterator};
|
||||
use std::{fmt, iter::FusedIterator};
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct EnumVariant {
|
||||
|
@ -172,12 +169,6 @@ impl EnumTypePropertiesBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
impl Default for EnumTypePropertiesBuilder {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl Enum {
|
||||
#[track_caller]
|
||||
pub fn new(variants: Interned<[EnumVariant]>) -> Self {
|
||||
|
@ -363,60 +354,6 @@ pub enum HdlOption<T: Type> {
|
|||
HdlSome(T),
|
||||
}
|
||||
|
||||
impl<Lhs: Type + ExprPartialEq<Rhs>, Rhs: Type> ExprPartialEq<HdlOption<Rhs>> for HdlOption<Lhs> {
|
||||
#[hdl]
|
||||
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<HdlOption<Rhs>>) -> Expr<Bool> {
|
||||
#[hdl]
|
||||
let cmp_eq = wire();
|
||||
#[hdl]
|
||||
match lhs {
|
||||
HdlSome(lhs) =>
|
||||
{
|
||||
#[hdl]
|
||||
match rhs {
|
||||
HdlSome(rhs) => connect(cmp_eq, ExprPartialEq::cmp_eq(lhs, rhs)),
|
||||
HdlNone => connect(cmp_eq, false),
|
||||
}
|
||||
}
|
||||
HdlNone =>
|
||||
{
|
||||
#[hdl]
|
||||
match rhs {
|
||||
HdlSome(_) => connect(cmp_eq, false),
|
||||
HdlNone => connect(cmp_eq, true),
|
||||
}
|
||||
}
|
||||
}
|
||||
cmp_eq
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<HdlOption<Rhs>>) -> Expr<Bool> {
|
||||
#[hdl]
|
||||
let cmp_ne = wire();
|
||||
#[hdl]
|
||||
match lhs {
|
||||
HdlSome(lhs) =>
|
||||
{
|
||||
#[hdl]
|
||||
match rhs {
|
||||
HdlSome(rhs) => connect(cmp_ne, ExprPartialEq::cmp_ne(lhs, rhs)),
|
||||
HdlNone => connect(cmp_ne, true),
|
||||
}
|
||||
}
|
||||
HdlNone =>
|
||||
{
|
||||
#[hdl]
|
||||
match rhs {
|
||||
HdlSome(_) => connect(cmp_ne, true),
|
||||
HdlNone => connect(cmp_ne, false),
|
||||
}
|
||||
}
|
||||
}
|
||||
cmp_ne
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
pub fn HdlNone<T: StaticType>() -> Expr<HdlOption<T>> {
|
||||
HdlOption[T::TYPE].HdlNone()
|
||||
|
@ -427,307 +364,3 @@ pub fn HdlSome<T: Type>(value: impl ToExpr<Type = T>) -> Expr<HdlOption<T>> {
|
|||
let value = value.to_expr();
|
||||
HdlOption[Expr::ty(value)].HdlSome(value)
|
||||
}
|
||||
|
||||
impl<T: Type> HdlOption<T> {
|
||||
#[track_caller]
|
||||
pub fn try_map<R: Type, E>(
|
||||
expr: Expr<Self>,
|
||||
f: impl FnOnce(Expr<T>) -> Result<Expr<R>, E>,
|
||||
) -> Result<Expr<HdlOption<R>>, E> {
|
||||
Self::try_and_then(expr, |v| Ok(HdlSome(f(v)?)))
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn map<R: Type>(
|
||||
expr: Expr<Self>,
|
||||
f: impl FnOnce(Expr<T>) -> Expr<R>,
|
||||
) -> Expr<HdlOption<R>> {
|
||||
Self::and_then(expr, |v| HdlSome(f(v)))
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn try_and_then<R: Type, E>(
|
||||
expr: Expr<Self>,
|
||||
f: impl FnOnce(Expr<T>) -> Result<Expr<HdlOption<R>>, E>,
|
||||
) -> Result<Expr<HdlOption<R>>, E> {
|
||||
// manually run match steps so we can extract the return type to construct HdlNone
|
||||
type Wrap<T> = T;
|
||||
#[hdl]
|
||||
let mut and_then_out = incomplete_wire();
|
||||
let mut iter = Self::match_variants(expr, SourceLocation::caller());
|
||||
let none = iter.next().unwrap();
|
||||
let some = iter.next().unwrap();
|
||||
assert!(iter.next().is_none());
|
||||
let (Wrap::<<Self as Type>::MatchVariant>::HdlSome(value), some_scope) =
|
||||
Self::match_activate_scope(some)
|
||||
else {
|
||||
unreachable!();
|
||||
};
|
||||
let value = f(value).inspect_err(|_| {
|
||||
and_then_out.complete(()); // avoid error
|
||||
})?;
|
||||
let and_then_out = and_then_out.complete(Expr::ty(value));
|
||||
connect(and_then_out, value);
|
||||
drop(some_scope);
|
||||
let (Wrap::<<Self as Type>::MatchVariant>::HdlNone, none_scope) =
|
||||
Self::match_activate_scope(none)
|
||||
else {
|
||||
unreachable!();
|
||||
};
|
||||
connect(and_then_out, Expr::ty(and_then_out).HdlNone());
|
||||
drop(none_scope);
|
||||
Ok(and_then_out)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn and_then<R: Type>(
|
||||
expr: Expr<Self>,
|
||||
f: impl FnOnce(Expr<T>) -> Expr<HdlOption<R>>,
|
||||
) -> Expr<HdlOption<R>> {
|
||||
match Self::try_and_then(expr, |v| Ok::<_, Infallible>(f(v))) {
|
||||
Ok(v) => v,
|
||||
Err(e) => match e {},
|
||||
}
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn and<U: Type>(expr: Expr<Self>, opt_b: Expr<HdlOption<U>>) -> Expr<HdlOption<U>> {
|
||||
#[hdl]
|
||||
let and_out = wire(Expr::ty(opt_b));
|
||||
connect(and_out, Expr::ty(opt_b).HdlNone());
|
||||
#[hdl]
|
||||
if let HdlSome(_) = expr {
|
||||
connect(and_out, opt_b);
|
||||
}
|
||||
and_out
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn try_filter<E>(
|
||||
expr: Expr<Self>,
|
||||
f: impl FnOnce(Expr<T>) -> Result<Expr<Bool>, E>,
|
||||
) -> Result<Expr<Self>, E> {
|
||||
#[hdl]
|
||||
let filtered = wire(Expr::ty(expr));
|
||||
connect(filtered, Expr::ty(expr).HdlNone());
|
||||
let mut f = Some(f);
|
||||
#[hdl]
|
||||
if let HdlSome(v) = expr {
|
||||
#[hdl]
|
||||
if f.take().unwrap()(v)? {
|
||||
connect(filtered, HdlSome(v));
|
||||
}
|
||||
}
|
||||
Ok(filtered)
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn filter(expr: Expr<Self>, f: impl FnOnce(Expr<T>) -> Expr<Bool>) -> Expr<Self> {
|
||||
match Self::try_filter(expr, |v| Ok::<_, Infallible>(f(v))) {
|
||||
Ok(v) => v,
|
||||
Err(e) => match e {},
|
||||
}
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn try_inspect<E>(
|
||||
expr: Expr<Self>,
|
||||
f: impl FnOnce(Expr<T>) -> Result<(), E>,
|
||||
) -> Result<Expr<Self>, E> {
|
||||
let mut f = Some(f);
|
||||
#[hdl]
|
||||
if let HdlSome(v) = expr {
|
||||
f.take().unwrap()(v)?;
|
||||
}
|
||||
Ok(expr)
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn inspect(expr: Expr<Self>, f: impl FnOnce(Expr<T>)) -> Expr<Self> {
|
||||
let mut f = Some(f);
|
||||
#[hdl]
|
||||
if let HdlSome(v) = expr {
|
||||
f.take().unwrap()(v);
|
||||
}
|
||||
expr
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn is_none(expr: Expr<Self>) -> Expr<Bool> {
|
||||
#[hdl]
|
||||
let is_none_out: Bool = wire();
|
||||
connect(is_none_out, false);
|
||||
#[hdl]
|
||||
if let HdlNone = expr {
|
||||
connect(is_none_out, true);
|
||||
}
|
||||
is_none_out
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn is_some(expr: Expr<Self>) -> Expr<Bool> {
|
||||
#[hdl]
|
||||
let is_some_out: Bool = wire();
|
||||
connect(is_some_out, false);
|
||||
#[hdl]
|
||||
if let HdlSome(_) = expr {
|
||||
connect(is_some_out, true);
|
||||
}
|
||||
is_some_out
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn map_or<R: Type>(
|
||||
expr: Expr<Self>,
|
||||
default: Expr<R>,
|
||||
f: impl FnOnce(Expr<T>) -> Expr<R>,
|
||||
) -> Expr<R> {
|
||||
#[hdl]
|
||||
let mapped = wire(Expr::ty(default));
|
||||
let mut f = Some(f);
|
||||
#[hdl]
|
||||
match expr {
|
||||
HdlSome(v) => connect(mapped, f.take().unwrap()(v)),
|
||||
HdlNone => connect(mapped, default),
|
||||
}
|
||||
mapped
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn map_or_else<R: Type>(
|
||||
expr: Expr<Self>,
|
||||
default: impl FnOnce() -> Expr<R>,
|
||||
f: impl FnOnce(Expr<T>) -> Expr<R>,
|
||||
) -> Expr<R> {
|
||||
#[hdl]
|
||||
let mut mapped = incomplete_wire();
|
||||
let mut default = Some(default);
|
||||
let mut f = Some(f);
|
||||
let mut retval = None;
|
||||
#[hdl]
|
||||
match expr {
|
||||
HdlSome(v) => {
|
||||
let v = f.take().unwrap()(v);
|
||||
let mapped = *retval.get_or_insert_with(|| mapped.complete(Expr::ty(v)));
|
||||
connect(mapped, v);
|
||||
}
|
||||
HdlNone => {
|
||||
let v = default.take().unwrap()();
|
||||
let mapped = *retval.get_or_insert_with(|| mapped.complete(Expr::ty(v)));
|
||||
connect(mapped, v);
|
||||
}
|
||||
}
|
||||
retval.unwrap()
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn or(expr: Expr<Self>, opt_b: Expr<Self>) -> Expr<Self> {
|
||||
#[hdl]
|
||||
let or_out = wire(Expr::ty(expr));
|
||||
connect(or_out, opt_b);
|
||||
#[hdl]
|
||||
if let HdlSome(_) = expr {
|
||||
connect(or_out, expr);
|
||||
}
|
||||
or_out
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn or_else(expr: Expr<Self>, f: impl FnOnce() -> Expr<Self>) -> Expr<Self> {
|
||||
#[hdl]
|
||||
let or_else_out = wire(Expr::ty(expr));
|
||||
connect(or_else_out, f());
|
||||
#[hdl]
|
||||
if let HdlSome(_) = expr {
|
||||
connect(or_else_out, expr);
|
||||
}
|
||||
or_else_out
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn unwrap_or(expr: Expr<Self>, default: Expr<T>) -> Expr<T> {
|
||||
#[hdl]
|
||||
let unwrap_or_else_out = wire(Expr::ty(default));
|
||||
connect(unwrap_or_else_out, default);
|
||||
#[hdl]
|
||||
if let HdlSome(v) = expr {
|
||||
connect(unwrap_or_else_out, v);
|
||||
}
|
||||
unwrap_or_else_out
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn unwrap_or_else(expr: Expr<Self>, f: impl FnOnce() -> Expr<T>) -> Expr<T> {
|
||||
#[hdl]
|
||||
let unwrap_or_else_out = wire(Expr::ty(expr).HdlSome);
|
||||
connect(unwrap_or_else_out, f());
|
||||
#[hdl]
|
||||
if let HdlSome(v) = expr {
|
||||
connect(unwrap_or_else_out, v);
|
||||
}
|
||||
unwrap_or_else_out
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn xor(expr: Expr<Self>, opt_b: Expr<Self>) -> Expr<Self> {
|
||||
#[hdl]
|
||||
let xor_out = wire(Expr::ty(expr));
|
||||
#[hdl]
|
||||
if let HdlSome(_) = expr {
|
||||
#[hdl]
|
||||
if let HdlNone = opt_b {
|
||||
connect(xor_out, expr);
|
||||
} else {
|
||||
connect(xor_out, Expr::ty(expr).HdlNone());
|
||||
}
|
||||
} else {
|
||||
connect(xor_out, opt_b);
|
||||
}
|
||||
xor_out
|
||||
}
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn zip<U: Type>(expr: Expr<Self>, other: Expr<HdlOption<U>>) -> Expr<HdlOption<(T, U)>> {
|
||||
#[hdl]
|
||||
let zip_out = wire(HdlOption[(Expr::ty(expr).HdlSome, Expr::ty(other).HdlSome)]);
|
||||
connect(zip_out, Expr::ty(zip_out).HdlNone());
|
||||
#[hdl]
|
||||
if let HdlSome(l) = expr {
|
||||
#[hdl]
|
||||
if let HdlSome(r) = other {
|
||||
connect(zip_out, HdlSome((l, r)));
|
||||
}
|
||||
}
|
||||
zip_out
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type> HdlOption<HdlOption<T>> {
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn flatten(expr: Expr<Self>) -> Expr<HdlOption<T>> {
|
||||
#[hdl]
|
||||
let flattened = wire(Expr::ty(expr).HdlSome);
|
||||
#[hdl]
|
||||
match expr {
|
||||
HdlSome(v) => connect(flattened, v),
|
||||
HdlNone => connect(flattened, Expr::ty(expr).HdlSome.HdlNone()),
|
||||
}
|
||||
flattened
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type, U: Type> HdlOption<(T, U)> {
|
||||
#[hdl]
|
||||
#[track_caller]
|
||||
pub fn unzip(expr: Expr<Self>) -> Expr<(HdlOption<T>, HdlOption<U>)> {
|
||||
let (t, u) = Expr::ty(expr).HdlSome;
|
||||
#[hdl]
|
||||
let unzipped = wire((HdlOption[t], HdlOption[u]));
|
||||
connect(unzipped, (HdlOption[t].HdlNone(), HdlOption[u].HdlNone()));
|
||||
#[hdl]
|
||||
if let HdlSome(v) = expr {
|
||||
connect(unzipped.0, HdlSome(v.0));
|
||||
connect(unzipped.1, HdlSome(v.1));
|
||||
}
|
||||
unzipped
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ use crate::{
|
|||
ops::ExprCastTo,
|
||||
target::{GetTarget, Target},
|
||||
},
|
||||
int::{Bool, DynSize, IntType, SIntType, SIntValue, Size, SizeType, UInt, UIntType, UIntValue},
|
||||
int::{Bool, DynSize, IntType, SIntType, SIntValue, Size, UInt, UIntType, UIntValue},
|
||||
intern::{Intern, Interned},
|
||||
memory::{DynPortType, MemPort, PortType},
|
||||
module::{
|
||||
|
@ -17,7 +17,6 @@ use crate::{
|
|||
Instance, ModuleIO,
|
||||
},
|
||||
reg::Reg,
|
||||
reset::{AsyncReset, Reset, ResetType, ResetTypeDispatch, SyncReset},
|
||||
ty::{CanonicalType, StaticType, Type, TypeWithDeref},
|
||||
wire::Wire,
|
||||
};
|
||||
|
@ -112,7 +111,6 @@ expr_enum! {
|
|||
BundleLiteral(ops::BundleLiteral),
|
||||
ArrayLiteral(ops::ArrayLiteral<CanonicalType, DynSize>),
|
||||
EnumLiteral(ops::EnumLiteral),
|
||||
Uninit(ops::Uninit),
|
||||
NotU(ops::NotU),
|
||||
NotS(ops::NotS),
|
||||
NotB(ops::NotB),
|
||||
|
@ -210,9 +208,7 @@ expr_enum! {
|
|||
ModuleIO(ModuleIO<CanonicalType>),
|
||||
Instance(Instance<Bundle>),
|
||||
Wire(Wire<CanonicalType>),
|
||||
Reg(Reg<CanonicalType, Reset>),
|
||||
RegSync(Reg<CanonicalType, SyncReset>),
|
||||
RegAsync(Reg<CanonicalType, AsyncReset>),
|
||||
Reg(Reg<CanonicalType>),
|
||||
MemPort(MemPort<DynPortType>),
|
||||
}
|
||||
}
|
||||
|
@ -596,42 +592,25 @@ impl<T: Type> GetTarget for Wire<T> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: Type, R: ResetType> ToExpr for Reg<T, R> {
|
||||
impl<T: Type> ToExpr for Reg<T> {
|
||||
type Type = T;
|
||||
|
||||
fn to_expr(&self) -> Expr<Self::Type> {
|
||||
struct Dispatch;
|
||||
impl ResetTypeDispatch for Dispatch {
|
||||
type Input<T: ResetType> = Reg<CanonicalType, T>;
|
||||
type Output<T: ResetType> = ExprEnum;
|
||||
|
||||
fn reset(self, input: Self::Input<Reset>) -> Self::Output<Reset> {
|
||||
ExprEnum::Reg(input)
|
||||
}
|
||||
|
||||
fn sync_reset(self, input: Self::Input<SyncReset>) -> Self::Output<SyncReset> {
|
||||
ExprEnum::RegSync(input)
|
||||
}
|
||||
|
||||
fn async_reset(self, input: Self::Input<AsyncReset>) -> Self::Output<AsyncReset> {
|
||||
ExprEnum::RegAsync(input)
|
||||
}
|
||||
}
|
||||
Expr {
|
||||
__enum: R::dispatch(self.canonical(), Dispatch).intern_sized(),
|
||||
__enum: ExprEnum::Reg(self.canonical()).intern_sized(),
|
||||
__ty: self.ty(),
|
||||
__flow: self.flow(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type, R: ResetType> ToLiteralBits for Reg<T, R> {
|
||||
impl<T: Type> ToLiteralBits for Reg<T> {
|
||||
fn to_literal_bits(&self) -> Result<Interned<BitSlice>, NotALiteralExpr> {
|
||||
Err(NotALiteralExpr)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type, R: ResetType> GetTarget for Reg<T, R> {
|
||||
impl<T: Type> GetTarget for Reg<T> {
|
||||
fn target(&self) -> Option<Interned<Target>> {
|
||||
Some(Intern::intern_sized(self.canonical().into()))
|
||||
}
|
||||
|
@ -661,18 +640,6 @@ impl<T: PortType> GetTarget for MemPort<T> {
|
|||
}
|
||||
}
|
||||
|
||||
pub trait HdlPartialEq<Rhs> {
|
||||
fn cmp_eq(self, rhs: Rhs) -> Expr<Bool>;
|
||||
fn cmp_ne(self, rhs: Rhs) -> Expr<Bool>;
|
||||
}
|
||||
|
||||
pub trait HdlPartialOrd<Rhs>: HdlPartialEq<Rhs> {
|
||||
fn cmp_lt(self, rhs: Rhs) -> Expr<Bool>;
|
||||
fn cmp_le(self, rhs: Rhs) -> Expr<Bool>;
|
||||
fn cmp_gt(self, rhs: Rhs) -> Expr<Bool>;
|
||||
fn cmp_ge(self, rhs: Rhs) -> Expr<Bool>;
|
||||
}
|
||||
|
||||
pub trait ReduceBits {
|
||||
type UIntOutput;
|
||||
type BoolOutput;
|
||||
|
@ -730,28 +697,3 @@ pub fn check_match_expr<T: Type>(
|
|||
_check_fn: impl FnOnce(T::MatchVariant, Infallible),
|
||||
) {
|
||||
}
|
||||
|
||||
pub trait MakeUninitExpr: Type {
|
||||
fn uninit(self) -> Expr<Self>;
|
||||
}
|
||||
|
||||
impl<T: Type> MakeUninitExpr for T {
|
||||
fn uninit(self) -> Expr<Self> {
|
||||
ops::Uninit::new(self).to_expr()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn repeat<T: Type, L: SizeType>(
|
||||
element: impl ToExpr<Type = T>,
|
||||
len: L,
|
||||
) -> Expr<ArrayType<T, L::Size>> {
|
||||
let element = element.to_expr();
|
||||
let canonical_element = Expr::canonical(element);
|
||||
ops::ArrayLiteral::new(
|
||||
Expr::ty(element),
|
||||
std::iter::repeat(canonical_element)
|
||||
.take(L::Size::as_usize(len))
|
||||
.collect(),
|
||||
)
|
||||
.to_expr()
|
||||
}
|
||||
|
|
|
@ -11,18 +11,14 @@ use crate::{
|
|||
GetTarget, Target, TargetPathArrayElement, TargetPathBundleField,
|
||||
TargetPathDynArrayElement, TargetPathElement,
|
||||
},
|
||||
CastTo, Expr, ExprEnum, Flow, HdlPartialEq, HdlPartialOrd, NotALiteralExpr, ReduceBits,
|
||||
ToExpr, ToLiteralBits,
|
||||
CastTo, Expr, ExprEnum, Flow, NotALiteralExpr, ReduceBits, ToExpr, ToLiteralBits,
|
||||
},
|
||||
int::{
|
||||
Bool, BoolOrIntType, DynSize, IntType, KnownSize, SInt, SIntType, SIntValue, Size, UInt,
|
||||
UIntType, UIntValue,
|
||||
Bool, BoolOrIntType, DynSize, IntCmp, IntType, KnownSize, SInt, SIntType, SIntValue, Size,
|
||||
UInt, UIntType, UIntValue,
|
||||
},
|
||||
intern::{Intern, Interned},
|
||||
reset::{
|
||||
AsyncReset, Reset, ResetType, ResetTypeDispatch, SyncReset, ToAsyncReset, ToReset,
|
||||
ToSyncReset,
|
||||
},
|
||||
reset::{AsyncReset, Reset, SyncReset, ToAsyncReset, ToReset, ToSyncReset},
|
||||
ty::{CanonicalType, StaticType, Type},
|
||||
util::ConstUsize,
|
||||
};
|
||||
|
@ -265,7 +261,7 @@ impl Neg {
|
|||
};
|
||||
let result_ty = retval.ty();
|
||||
retval.literal_bits = arg.to_literal_bits().map(|bits| {
|
||||
Intern::intern_owned(result_ty.bits_from_bigint_wrapping(&-SInt::bits_to_bigint(&bits)))
|
||||
Intern::intern_owned(result_ty.bits_from_bigint_wrapping(-SInt::bits_to_bigint(&bits)))
|
||||
});
|
||||
retval
|
||||
}
|
||||
|
@ -372,7 +368,7 @@ fn binary_op_literal_bits<ResultTy: BoolOrIntType, Lhs: BoolOrIntType, Rhs: Bool
|
|||
let rhs = Rhs::bits_to_bigint(&rhs);
|
||||
let result = f(lhs, rhs)?;
|
||||
Ok(Intern::intern_owned(
|
||||
result_ty.bits_from_bigint_wrapping(&result),
|
||||
result_ty.bits_from_bigint_wrapping(result),
|
||||
))
|
||||
}
|
||||
|
||||
|
@ -1240,11 +1236,10 @@ macro_rules! impl_dyn_shl {
|
|||
}
|
||||
}
|
||||
|
||||
impl<LhsWidth: Size, RhsWidth: Size> Shl<Expr<UIntType<RhsWidth>>> for Expr<$ty<LhsWidth>> {
|
||||
type Output = Expr<$ty>;
|
||||
|
||||
fn shl(self, rhs: Expr<UIntType<RhsWidth>>) -> Self::Output {
|
||||
$name::new(Expr::as_dyn_int(self), Expr::as_dyn_int(rhs)).to_expr()
|
||||
impl_binary_op_trait! {
|
||||
#[generics(LhsWidth: Size, RhsWidth: Size)]
|
||||
fn Shl::shl(lhs: $ty<LhsWidth>, rhs: UIntType<RhsWidth>) -> $ty {
|
||||
$name::new(Expr::as_dyn_int(lhs), Expr::as_dyn_int(rhs)).to_expr()
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -1313,11 +1308,10 @@ macro_rules! impl_dyn_shr {
|
|||
}
|
||||
}
|
||||
|
||||
impl<LhsWidth: Size, RhsWidth: Size> Shr<Expr<UIntType<RhsWidth>>> for Expr<$ty<LhsWidth>> {
|
||||
type Output = Expr<$ty<LhsWidth>>;
|
||||
|
||||
fn shr(self, rhs: Expr<UIntType<RhsWidth>>) -> Self::Output {
|
||||
$name::new(self, Expr::as_dyn_int(rhs)).to_expr()
|
||||
impl_binary_op_trait! {
|
||||
#[generics(LhsWidth: Size, RhsWidth: Size)]
|
||||
fn Shr::shr(lhs: $ty<LhsWidth>, rhs: UIntType<RhsWidth>) -> $ty<LhsWidth> {
|
||||
$name::new(lhs, Expr::as_dyn_int(rhs)).to_expr()
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -1347,7 +1341,7 @@ macro_rules! binary_op_fixed_shift {
|
|||
literal_bits: Err(NotALiteralExpr),
|
||||
};
|
||||
retval.literal_bits = lhs.to_literal_bits().map(|bits| {
|
||||
Intern::intern_owned(retval.ty().bits_from_bigint_wrapping(&$Trait::$method(
|
||||
Intern::intern_owned(retval.ty().bits_from_bigint_wrapping($Trait::$method(
|
||||
$ty::bits_to_bigint(&bits),
|
||||
rhs,
|
||||
)))
|
||||
|
@ -1426,45 +1420,36 @@ forward_value_to_expr_binary_op_trait! {
|
|||
Shr::shr
|
||||
}
|
||||
|
||||
pub trait ExprPartialEq<Rhs: Type>: Type {
|
||||
pub trait IntCmpExpr<Rhs: Type>: Type {
|
||||
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<Rhs>) -> Expr<Bool>;
|
||||
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<Rhs>) -> Expr<Bool>;
|
||||
}
|
||||
|
||||
pub trait ExprPartialOrd<Rhs: Type>: ExprPartialEq<Rhs> {
|
||||
fn cmp_lt(lhs: Expr<Self>, rhs: Expr<Rhs>) -> Expr<Bool>;
|
||||
fn cmp_le(lhs: Expr<Self>, rhs: Expr<Rhs>) -> Expr<Bool>;
|
||||
fn cmp_gt(lhs: Expr<Self>, rhs: Expr<Rhs>) -> Expr<Bool>;
|
||||
fn cmp_ge(lhs: Expr<Self>, rhs: Expr<Rhs>) -> Expr<Bool>;
|
||||
}
|
||||
|
||||
impl<Lhs: ToExpr, Rhs: ToExpr> HdlPartialEq<Rhs> for Lhs
|
||||
impl<Lhs: ToExpr, Rhs: ToExpr> IntCmp<Rhs> for Lhs
|
||||
where
|
||||
Lhs::Type: ExprPartialEq<Rhs::Type>,
|
||||
Lhs::Type: IntCmpExpr<Rhs::Type>,
|
||||
{
|
||||
fn cmp_eq(self, rhs: Rhs) -> Expr<Bool> {
|
||||
ExprPartialEq::cmp_eq(self.to_expr(), rhs.to_expr())
|
||||
IntCmpExpr::cmp_eq(self.to_expr(), rhs.to_expr())
|
||||
}
|
||||
fn cmp_ne(self, rhs: Rhs) -> Expr<Bool> {
|
||||
ExprPartialEq::cmp_ne(self.to_expr(), rhs.to_expr())
|
||||
IntCmpExpr::cmp_ne(self.to_expr(), rhs.to_expr())
|
||||
}
|
||||
}
|
||||
|
||||
impl<Lhs: ToExpr, Rhs: ToExpr> HdlPartialOrd<Rhs> for Lhs
|
||||
where
|
||||
Lhs::Type: ExprPartialOrd<Rhs::Type>,
|
||||
{
|
||||
fn cmp_lt(self, rhs: Rhs) -> Expr<Bool> {
|
||||
ExprPartialOrd::cmp_lt(self.to_expr(), rhs.to_expr())
|
||||
IntCmpExpr::cmp_lt(self.to_expr(), rhs.to_expr())
|
||||
}
|
||||
fn cmp_le(self, rhs: Rhs) -> Expr<Bool> {
|
||||
ExprPartialOrd::cmp_le(self.to_expr(), rhs.to_expr())
|
||||
IntCmpExpr::cmp_le(self.to_expr(), rhs.to_expr())
|
||||
}
|
||||
fn cmp_gt(self, rhs: Rhs) -> Expr<Bool> {
|
||||
ExprPartialOrd::cmp_gt(self.to_expr(), rhs.to_expr())
|
||||
IntCmpExpr::cmp_gt(self.to_expr(), rhs.to_expr())
|
||||
}
|
||||
fn cmp_ge(self, rhs: Rhs) -> Expr<Bool> {
|
||||
ExprPartialOrd::cmp_ge(self.to_expr(), rhs.to_expr())
|
||||
IntCmpExpr::cmp_ge(self.to_expr(), rhs.to_expr())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1474,7 +1459,6 @@ macro_rules! impl_compare_op {
|
|||
#[dyn_type($DynTy:ident)]
|
||||
#[to_dyn_type($lhs:ident => $dyn_lhs:expr, $rhs:ident => $dyn_rhs:expr)]
|
||||
#[type($Lhs:ty, $Rhs:ty)]
|
||||
#[trait($Trait:ident)]
|
||||
$(
|
||||
struct $name:ident;
|
||||
fn $method:ident();
|
||||
|
@ -1526,7 +1510,7 @@ macro_rules! impl_compare_op {
|
|||
}
|
||||
})*
|
||||
|
||||
impl$(<$LhsWidth: Size, $RhsWidth: Size>)? $Trait<$Rhs> for $Lhs {
|
||||
impl$(<$LhsWidth: Size, $RhsWidth: Size>)? IntCmpExpr<$Rhs> for $Lhs {
|
||||
$(fn $method($lhs: Expr<Self>, $rhs: Expr<$Rhs>) -> Expr<Bool> {
|
||||
$name::new($dyn_lhs, $dyn_rhs).to_expr()
|
||||
})*
|
||||
|
@ -1538,16 +1522,8 @@ impl_compare_op! {
|
|||
#[dyn_type(Bool)]
|
||||
#[to_dyn_type(lhs => lhs, rhs => rhs)]
|
||||
#[type(Bool, Bool)]
|
||||
#[trait(ExprPartialEq)]
|
||||
struct CmpEqB; fn cmp_eq(); PartialEq::eq();
|
||||
struct CmpNeB; fn cmp_ne(); PartialEq::ne();
|
||||
}
|
||||
|
||||
impl_compare_op! {
|
||||
#[dyn_type(Bool)]
|
||||
#[to_dyn_type(lhs => lhs, rhs => rhs)]
|
||||
#[type(Bool, Bool)]
|
||||
#[trait(ExprPartialOrd)]
|
||||
struct CmpLtB; fn cmp_lt(); PartialOrd::lt();
|
||||
struct CmpLeB; fn cmp_le(); PartialOrd::le();
|
||||
struct CmpGtB; fn cmp_gt(); PartialOrd::gt();
|
||||
|
@ -1559,17 +1535,8 @@ impl_compare_op! {
|
|||
#[dyn_type(UInt)]
|
||||
#[to_dyn_type(lhs => Expr::as_dyn_int(lhs), rhs => Expr::as_dyn_int(rhs))]
|
||||
#[type(UIntType<LhsWidth>, UIntType<RhsWidth>)]
|
||||
#[trait(ExprPartialEq)]
|
||||
struct CmpEqU; fn cmp_eq(); PartialEq::eq();
|
||||
struct CmpNeU; fn cmp_ne(); PartialEq::ne();
|
||||
}
|
||||
|
||||
impl_compare_op! {
|
||||
#[width(LhsWidth, RhsWidth)]
|
||||
#[dyn_type(UInt)]
|
||||
#[to_dyn_type(lhs => Expr::as_dyn_int(lhs), rhs => Expr::as_dyn_int(rhs))]
|
||||
#[type(UIntType<LhsWidth>, UIntType<RhsWidth>)]
|
||||
#[trait(ExprPartialOrd)]
|
||||
struct CmpLtU; fn cmp_lt(); PartialOrd::lt();
|
||||
struct CmpLeU; fn cmp_le(); PartialOrd::le();
|
||||
struct CmpGtU; fn cmp_gt(); PartialOrd::gt();
|
||||
|
@ -1581,17 +1548,8 @@ impl_compare_op! {
|
|||
#[dyn_type(SInt)]
|
||||
#[to_dyn_type(lhs => Expr::as_dyn_int(lhs), rhs => Expr::as_dyn_int(rhs))]
|
||||
#[type(SIntType<LhsWidth>, SIntType<RhsWidth>)]
|
||||
#[trait(ExprPartialEq)]
|
||||
struct CmpEqS; fn cmp_eq(); PartialEq::eq();
|
||||
struct CmpNeS; fn cmp_ne(); PartialEq::ne();
|
||||
}
|
||||
|
||||
impl_compare_op! {
|
||||
#[width(LhsWidth, RhsWidth)]
|
||||
#[dyn_type(SInt)]
|
||||
#[to_dyn_type(lhs => Expr::as_dyn_int(lhs), rhs => Expr::as_dyn_int(rhs))]
|
||||
#[type(SIntType<LhsWidth>, SIntType<RhsWidth>)]
|
||||
#[trait(ExprPartialOrd)]
|
||||
struct CmpLtS; fn cmp_lt(); PartialOrd::lt();
|
||||
struct CmpLeS; fn cmp_le(); PartialOrd::le();
|
||||
struct CmpGtS; fn cmp_gt(); PartialOrd::gt();
|
||||
|
@ -1624,7 +1582,7 @@ macro_rules! impl_cast_int_op {
|
|||
ty,
|
||||
literal_bits: arg.to_literal_bits().map(|bits| {
|
||||
Intern::intern_owned(
|
||||
ty.bits_from_bigint_wrapping(&$from::bits_to_bigint(&bits)),
|
||||
ty.bits_from_bigint_wrapping($from::bits_to_bigint(&bits)),
|
||||
)
|
||||
}),
|
||||
}
|
||||
|
@ -1776,11 +1734,11 @@ impl_cast_bit_op!(CastSIntToAsyncReset, SInt<1>, #[dyn] SInt, AsyncReset, #[trai
|
|||
impl_cast_bit_op!(CastSyncResetToBool, SyncReset, Bool);
|
||||
impl_cast_bit_op!(CastSyncResetToUInt, SyncReset, UInt<1>, #[dyn] UInt);
|
||||
impl_cast_bit_op!(CastSyncResetToSInt, SyncReset, SInt<1>, #[dyn] SInt);
|
||||
impl_cast_bit_op!(CastSyncResetToReset, SyncReset, Reset);
|
||||
impl_cast_bit_op!(CastSyncResetToReset, SyncReset, Reset, #[trait] ToReset::to_reset);
|
||||
impl_cast_bit_op!(CastAsyncResetToBool, AsyncReset, Bool);
|
||||
impl_cast_bit_op!(CastAsyncResetToUInt, AsyncReset, UInt<1>, #[dyn] UInt);
|
||||
impl_cast_bit_op!(CastAsyncResetToSInt, AsyncReset, SInt<1>, #[dyn] SInt);
|
||||
impl_cast_bit_op!(CastAsyncResetToReset, AsyncReset, Reset);
|
||||
impl_cast_bit_op!(CastAsyncResetToReset, AsyncReset, Reset, #[trait] ToReset::to_reset);
|
||||
impl_cast_bit_op!(CastResetToBool, Reset, Bool);
|
||||
impl_cast_bit_op!(CastResetToUInt, Reset, UInt<1>, #[dyn] UInt);
|
||||
impl_cast_bit_op!(CastResetToSInt, Reset, SInt<1>, #[dyn] SInt);
|
||||
|
@ -1791,107 +1749,6 @@ impl_cast_bit_op!(CastClockToBool, Clock, Bool);
|
|||
impl_cast_bit_op!(CastClockToUInt, Clock, UInt<1>, #[dyn] UInt);
|
||||
impl_cast_bit_op!(CastClockToSInt, Clock, SInt<1>, #[dyn] SInt);
|
||||
|
||||
impl<T: ResetType> ToReset for Expr<T> {
|
||||
fn to_reset(&self) -> Expr<Reset> {
|
||||
struct Dispatch;
|
||||
impl ResetTypeDispatch for Dispatch {
|
||||
type Input<T: ResetType> = Expr<T>;
|
||||
type Output<T: ResetType> = Expr<Reset>;
|
||||
|
||||
fn reset(self, input: Self::Input<Reset>) -> Self::Output<Reset> {
|
||||
input
|
||||
}
|
||||
|
||||
fn sync_reset(self, input: Self::Input<SyncReset>) -> Self::Output<SyncReset> {
|
||||
input.cast_to_static()
|
||||
}
|
||||
|
||||
fn async_reset(self, input: Self::Input<AsyncReset>) -> Self::Output<AsyncReset> {
|
||||
input.cast_to_static()
|
||||
}
|
||||
}
|
||||
T::dispatch(*self, Dispatch)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<AsyncReset> for AsyncReset {
|
||||
fn cast_to(src: Expr<Self>, _to_type: AsyncReset) -> Expr<AsyncReset> {
|
||||
src
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<SyncReset> for AsyncReset {
|
||||
fn cast_to(src: Expr<Self>, to_type: SyncReset) -> Expr<SyncReset> {
|
||||
src.cast_to(Bool).cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<Clock> for AsyncReset {
|
||||
fn cast_to(src: Expr<Self>, to_type: Clock) -> Expr<Clock> {
|
||||
src.cast_to(Bool).cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<AsyncReset> for SyncReset {
|
||||
fn cast_to(src: Expr<Self>, to_type: AsyncReset) -> Expr<AsyncReset> {
|
||||
src.cast_to(Bool).cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<SyncReset> for SyncReset {
|
||||
fn cast_to(src: Expr<Self>, _to_type: SyncReset) -> Expr<SyncReset> {
|
||||
src
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<Clock> for SyncReset {
|
||||
fn cast_to(src: Expr<Self>, to_type: Clock) -> Expr<Clock> {
|
||||
src.cast_to(Bool).cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<AsyncReset> for Reset {
|
||||
fn cast_to(src: Expr<Self>, to_type: AsyncReset) -> Expr<AsyncReset> {
|
||||
src.cast_to(Bool).cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<SyncReset> for Reset {
|
||||
fn cast_to(src: Expr<Self>, to_type: SyncReset) -> Expr<SyncReset> {
|
||||
src.cast_to(Bool).cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<Reset> for Reset {
|
||||
fn cast_to(src: Expr<Self>, _to_type: Reset) -> Expr<Reset> {
|
||||
src
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<Clock> for Reset {
|
||||
fn cast_to(src: Expr<Self>, to_type: Clock) -> Expr<Clock> {
|
||||
src.cast_to(Bool).cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<AsyncReset> for Clock {
|
||||
fn cast_to(src: Expr<Self>, to_type: AsyncReset) -> Expr<AsyncReset> {
|
||||
src.cast_to(Bool).cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<SyncReset> for Clock {
|
||||
fn cast_to(src: Expr<Self>, to_type: SyncReset) -> Expr<SyncReset> {
|
||||
src.cast_to(Bool).cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<Clock> for Clock {
|
||||
fn cast_to(src: Expr<Self>, _to_type: Clock) -> Expr<Clock> {
|
||||
src
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct FieldAccess<FieldType: Type = CanonicalType> {
|
||||
base: Expr<Bundle>,
|
||||
|
@ -2161,7 +2018,7 @@ impl<ElementType: Type, Len: Size> ExprIndex<usize> for ArrayType<ElementType, L
|
|||
|
||||
#[track_caller]
|
||||
fn expr_index(this: &Expr<Self>, index: usize) -> &Expr<Self::Output> {
|
||||
Interned::into_inner(
|
||||
Interned::<_>::into_inner(
|
||||
ArrayIndex::<ElementType>::new(Expr::as_dyn_array(*this), index)
|
||||
.to_expr()
|
||||
.intern_sized(),
|
||||
|
@ -2258,7 +2115,7 @@ impl<ElementType: Type, Len: Size, Width: Size> ExprIndex<Expr<UIntType<Width>>>
|
|||
type Output = ElementType;
|
||||
|
||||
fn expr_index(this: &Expr<Self>, index: Expr<UIntType<Width>>) -> &Expr<Self::Output> {
|
||||
Interned::into_inner(
|
||||
Interned::<_>::into_inner(
|
||||
DynArrayIndex::<ElementType>::new(Expr::as_dyn_array(*this), Expr::as_dyn_int(index))
|
||||
.to_expr()
|
||||
.intern_sized(),
|
||||
|
@ -2383,7 +2240,7 @@ macro_rules! impl_int_slice {
|
|||
let base = Expr::as_dyn_int(*this);
|
||||
let base_ty = Expr::ty(base);
|
||||
let range = base_ty.slice_index_to_range(index);
|
||||
Interned::into_inner($name::new(base, range).to_expr().intern_sized())
|
||||
Interned::<_>::into_inner($name::new(base, range).to_expr().intern_sized())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2395,7 +2252,7 @@ macro_rules! impl_int_slice {
|
|||
let base = Expr::as_dyn_int(*this);
|
||||
let base_ty = Expr::ty(base);
|
||||
assert!(index < base_ty.width());
|
||||
Interned::into_inner(
|
||||
Interned::<_>::into_inner(
|
||||
$name::new(base, index..(index + 1))
|
||||
.to_expr()
|
||||
.cast_to_static::<Bool>()
|
||||
|
@ -2670,41 +2527,3 @@ impl<T: Type> ToExpr for CastBitsTo<T> {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct Uninit<T: Type = CanonicalType> {
|
||||
ty: T,
|
||||
}
|
||||
|
||||
impl<T: Type> Uninit<T> {
|
||||
#[track_caller]
|
||||
pub fn new(ty: T) -> Self {
|
||||
Self { ty }
|
||||
}
|
||||
pub fn ty(self) -> T {
|
||||
self.ty
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type> ToLiteralBits for Uninit<T> {
|
||||
fn to_literal_bits(&self) -> Result<Interned<BitSlice>, NotALiteralExpr> {
|
||||
Err(NotALiteralExpr)
|
||||
}
|
||||
}
|
||||
|
||||
impl_get_target_none!([T: Type] Uninit<T>);
|
||||
|
||||
impl<T: Type> ToExpr for Uninit<T> {
|
||||
type Type = T;
|
||||
|
||||
fn to_expr(&self) -> Expr<Self::Type> {
|
||||
Expr {
|
||||
__enum: ExprEnum::Uninit(Uninit {
|
||||
ty: self.ty.canonical(),
|
||||
})
|
||||
.intern(),
|
||||
__ty: self.ty,
|
||||
__flow: Flow::Source,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,21 +1,18 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
array::Array,
|
||||
bundle::{Bundle, BundleField},
|
||||
expr::{Expr, Flow, ToExpr},
|
||||
expr::Flow,
|
||||
intern::{Intern, Interned},
|
||||
memory::{DynPortType, MemPort},
|
||||
module::{Instance, ModuleIO, TargetName},
|
||||
reg::Reg,
|
||||
reset::{AsyncReset, Reset, ResetType, ResetTypeDispatch, SyncReset},
|
||||
source_location::SourceLocation,
|
||||
ty::{CanonicalType, Type},
|
||||
wire::Wire,
|
||||
};
|
||||
use std::fmt;
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct TargetPathBundleField {
|
||||
pub name: Interned<str>,
|
||||
}
|
||||
|
@ -26,7 +23,7 @@ impl fmt::Display for TargetPathBundleField {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct TargetPathArrayElement {
|
||||
pub index: usize,
|
||||
}
|
||||
|
@ -37,7 +34,7 @@ impl fmt::Display for TargetPathArrayElement {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct TargetPathDynArrayElement {}
|
||||
|
||||
impl fmt::Display for TargetPathDynArrayElement {
|
||||
|
@ -46,7 +43,7 @@ impl fmt::Display for TargetPathDynArrayElement {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum TargetPathElement {
|
||||
BundleField(TargetPathBundleField),
|
||||
ArrayElement(TargetPathArrayElement),
|
||||
|
@ -128,7 +125,6 @@ macro_rules! impl_target_base {
|
|||
$(#[$enum_meta:meta])*
|
||||
$enum_vis:vis enum $TargetBase:ident {
|
||||
$(
|
||||
$(#[from = $from:ident])?
|
||||
#[is = $is_fn:ident]
|
||||
#[to = $to_fn:ident]
|
||||
$(#[$variant_meta:meta])*
|
||||
|
@ -152,19 +148,19 @@ macro_rules! impl_target_base {
|
|||
}
|
||||
}
|
||||
|
||||
$($(
|
||||
$(
|
||||
impl From<$VariantTy> for $TargetBase {
|
||||
fn $from(value: $VariantTy) -> Self {
|
||||
fn from(value: $VariantTy) -> Self {
|
||||
Self::$Variant(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<$VariantTy> for Target {
|
||||
fn $from(value: $VariantTy) -> Self {
|
||||
fn from(value: $VariantTy) -> Self {
|
||||
$TargetBase::$Variant(value).into()
|
||||
}
|
||||
}
|
||||
)*)?
|
||||
)*
|
||||
|
||||
impl $TargetBase {
|
||||
$(
|
||||
|
@ -195,79 +191,30 @@ macro_rules! impl_target_base {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToExpr for $TargetBase {
|
||||
type Type = CanonicalType;
|
||||
|
||||
fn to_expr(&self) -> Expr<Self::Type> {
|
||||
match self {
|
||||
$(Self::$Variant(v) => Expr::canonical(v.to_expr()),)*
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_target_base! {
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||
pub enum TargetBase {
|
||||
#[from = from]
|
||||
#[is = is_module_io]
|
||||
#[to = module_io]
|
||||
ModuleIO(ModuleIO<CanonicalType>),
|
||||
#[from = from]
|
||||
#[is = is_mem_port]
|
||||
#[to = mem_port]
|
||||
MemPort(MemPort<DynPortType>),
|
||||
#[is = is_reg]
|
||||
#[to = reg]
|
||||
Reg(Reg<CanonicalType, Reset>),
|
||||
#[is = is_reg_sync]
|
||||
#[to = reg_sync]
|
||||
RegSync(Reg<CanonicalType, SyncReset>),
|
||||
#[is = is_reg_async]
|
||||
#[to = reg_async]
|
||||
RegAsync(Reg<CanonicalType, AsyncReset>),
|
||||
#[from = from]
|
||||
Reg(Reg<CanonicalType>),
|
||||
#[is = is_wire]
|
||||
#[to = wire]
|
||||
Wire(Wire<CanonicalType>),
|
||||
#[from = from]
|
||||
#[is = is_instance]
|
||||
#[to = instance]
|
||||
Instance(Instance<Bundle>),
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: ResetType> From<Reg<CanonicalType, R>> for TargetBase {
|
||||
fn from(value: Reg<CanonicalType, R>) -> Self {
|
||||
struct Dispatch;
|
||||
impl ResetTypeDispatch for Dispatch {
|
||||
type Input<T: ResetType> = Reg<CanonicalType, T>;
|
||||
type Output<T: ResetType> = TargetBase;
|
||||
|
||||
fn reset(self, input: Self::Input<Reset>) -> Self::Output<Reset> {
|
||||
TargetBase::Reg(input)
|
||||
}
|
||||
|
||||
fn sync_reset(self, input: Self::Input<SyncReset>) -> Self::Output<SyncReset> {
|
||||
TargetBase::RegSync(input)
|
||||
}
|
||||
|
||||
fn async_reset(self, input: Self::Input<AsyncReset>) -> Self::Output<AsyncReset> {
|
||||
TargetBase::RegAsync(input)
|
||||
}
|
||||
}
|
||||
R::dispatch(value, Dispatch)
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: ResetType> From<Reg<CanonicalType, R>> for Target {
|
||||
fn from(value: Reg<CanonicalType, R>) -> Self {
|
||||
TargetBase::from(value).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for TargetBase {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{:?}", self.target_name())
|
||||
|
@ -280,8 +227,6 @@ impl TargetBase {
|
|||
TargetBase::ModuleIO(v) => TargetName(v.scoped_name(), None),
|
||||
TargetBase::MemPort(v) => TargetName(v.mem_name(), Some(v.port_name())),
|
||||
TargetBase::Reg(v) => TargetName(v.scoped_name(), None),
|
||||
TargetBase::RegSync(v) => TargetName(v.scoped_name(), None),
|
||||
TargetBase::RegAsync(v) => TargetName(v.scoped_name(), None),
|
||||
TargetBase::Wire(v) => TargetName(v.scoped_name(), None),
|
||||
TargetBase::Instance(v) => TargetName(v.scoped_name(), None),
|
||||
}
|
||||
|
@ -291,8 +236,6 @@ impl TargetBase {
|
|||
TargetBase::ModuleIO(v) => v.ty(),
|
||||
TargetBase::MemPort(v) => v.ty().canonical(),
|
||||
TargetBase::Reg(v) => v.ty(),
|
||||
TargetBase::RegSync(v) => v.ty(),
|
||||
TargetBase::RegAsync(v) => v.ty(),
|
||||
TargetBase::Wire(v) => v.ty(),
|
||||
TargetBase::Instance(v) => v.ty().canonical(),
|
||||
}
|
||||
|
@ -368,7 +311,7 @@ impl TargetChild {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||
pub enum Target {
|
||||
Base(Interned<TargetBase>),
|
||||
Child(TargetChild),
|
||||
|
|
|
@ -2,10 +2,7 @@
|
|||
// See Notices.txt for copyright information
|
||||
#![allow(clippy::type_complexity)]
|
||||
use crate::{
|
||||
annotations::{
|
||||
Annotation, BlackBoxInlineAnnotation, BlackBoxPathAnnotation, CustomFirrtlAnnotation,
|
||||
DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation,
|
||||
},
|
||||
annotations::CustomFirrtlAnnotation,
|
||||
array::Array,
|
||||
bundle::{Bundle, BundleField, BundleType},
|
||||
clock::Clock,
|
||||
|
@ -17,21 +14,16 @@ use crate::{
|
|||
},
|
||||
Expr, ExprEnum,
|
||||
},
|
||||
formal::FormalKind,
|
||||
int::{Bool, DynSize, IntType, SIntValue, UInt, UIntValue},
|
||||
intern::{Intern, Interned},
|
||||
memory::{Mem, PortKind, PortName, ReadUnderWrite},
|
||||
module::{
|
||||
transform::{
|
||||
simplify_enums::{simplify_enums, SimplifyEnumsError, SimplifyEnumsKind},
|
||||
simplify_memories::simplify_memories,
|
||||
transform::simplify_memories::simplify_memories, AnnotatedModuleIO, Block,
|
||||
ExternModuleBody, ExternModuleParameter, ExternModuleParameterValue, Module, ModuleBody,
|
||||
NameId, NormalModuleBody, Stmt, StmtConnect, StmtDeclaration, StmtIf, StmtInstance,
|
||||
StmtMatch, StmtReg, StmtWire,
|
||||
},
|
||||
AnnotatedModuleIO, Block, ExternModuleBody, ExternModuleParameter,
|
||||
ExternModuleParameterValue, Module, ModuleBody, NameOptId, NormalModuleBody, Stmt,
|
||||
StmtConnect, StmtDeclaration, StmtFormal, StmtIf, StmtInstance, StmtMatch, StmtReg,
|
||||
StmtWire,
|
||||
},
|
||||
reset::{AsyncReset, Reset, ResetType, SyncReset},
|
||||
reset::{AsyncReset, Reset, SyncReset},
|
||||
source_location::SourceLocation,
|
||||
ty::{CanonicalType, Type},
|
||||
util::{
|
||||
|
@ -40,7 +32,6 @@ use crate::{
|
|||
},
|
||||
};
|
||||
use bitvec::slice::BitSlice;
|
||||
use clap::value_parser;
|
||||
use hashbrown::{HashMap, HashSet};
|
||||
use num_traits::Signed;
|
||||
use serde::Serialize;
|
||||
|
@ -187,9 +178,9 @@ struct NameMaker {
|
|||
}
|
||||
|
||||
impl NameMaker {
|
||||
fn make(&mut self, name: impl Into<String>) -> Ident {
|
||||
let mut num = 0usize;
|
||||
let name: String = name.into();
|
||||
fn make(&mut self, name: NameId) -> Ident {
|
||||
let mut num: usize = name.1;
|
||||
let name = String::from(&*name.0);
|
||||
// remove all invalid characters -- all valid characters are ASCII, so we can just remove invalid bytes
|
||||
let mut name = String::from_iter(
|
||||
name.bytes()
|
||||
|
@ -221,7 +212,7 @@ impl NameMaker {
|
|||
#[derive(Default)]
|
||||
struct Namespace {
|
||||
name_maker: NameMaker,
|
||||
map: HashMap<NameOptId, Ident>,
|
||||
map: HashMap<NameId, Ident>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
|
@ -247,11 +238,10 @@ impl From<PortName> for Ident {
|
|||
}
|
||||
|
||||
impl Namespace {
|
||||
fn get(&mut self, name: impl Into<NameOptId>) -> Ident {
|
||||
let name: NameOptId = name.into();
|
||||
fn get(&mut self, name: NameId) -> Ident {
|
||||
#[cold]
|
||||
fn make(name_maker: &mut NameMaker, name: NameOptId) -> Ident {
|
||||
name_maker.make(name.0)
|
||||
fn make(name_maker: &mut NameMaker, name: NameId) -> Ident {
|
||||
name_maker.make(name)
|
||||
}
|
||||
*self
|
||||
.map
|
||||
|
@ -259,7 +249,7 @@ impl Namespace {
|
|||
.or_insert_with(|| make(&mut self.name_maker, name))
|
||||
}
|
||||
fn make_new(&mut self, prefix: &str) -> Ident {
|
||||
self.name_maker.make(prefix)
|
||||
self.name_maker.make(NameId(prefix.intern(), 0))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -369,7 +359,7 @@ impl TypeState {
|
|||
Ident(Intern::intern_owned(format!("Ty{id}")))
|
||||
}
|
||||
fn get_bundle_field(&mut self, ty: Bundle, name: Interned<str>) -> Ident {
|
||||
self.bundle_ns(ty).borrow_mut().get(name)
|
||||
self.bundle_ns(ty).borrow_mut().get(NameId(name, 0))
|
||||
}
|
||||
fn bundle_def(&self, ty: Bundle) -> (Ident, Rc<RefCell<Namespace>>) {
|
||||
self.bundle_defs.get_or_make(ty, |&ty, definitions| {
|
||||
|
@ -383,7 +373,7 @@ impl TypeState {
|
|||
if flipped {
|
||||
body.push_str("flip ");
|
||||
}
|
||||
write!(body, "{}: ", ns.get(name)).unwrap();
|
||||
write!(body, "{}: ", ns.get(NameId(name, 0))).unwrap();
|
||||
body.push_str(&self.ty(ty));
|
||||
}
|
||||
body.push('}');
|
||||
|
@ -407,7 +397,7 @@ impl TypeState {
|
|||
for EnumVariant { name, ty } in ty.variants() {
|
||||
body.push_str(separator);
|
||||
separator = ", ";
|
||||
write!(body, "{}", variants.get(name)).unwrap();
|
||||
write!(body, "{}", variants.get(NameId(name, 0))).unwrap();
|
||||
if let Some(ty) = ty {
|
||||
body.push_str(": ");
|
||||
body.push_str(&self.ty(ty));
|
||||
|
@ -429,7 +419,11 @@ impl TypeState {
|
|||
self.enum_def(ty).0
|
||||
}
|
||||
fn get_enum_variant(&mut self, ty: Enum, name: Interned<str>) -> Ident {
|
||||
self.enum_def(ty).1.variants.borrow_mut().get(name)
|
||||
self.enum_def(ty)
|
||||
.1
|
||||
.variants
|
||||
.borrow_mut()
|
||||
.get(NameId(name, 0))
|
||||
}
|
||||
fn ty<T: Type>(&self, ty: T) -> String {
|
||||
match ty.canonical() {
|
||||
|
@ -482,7 +476,6 @@ trait WrappedFileBackendTrait {
|
|||
circuit_name: String,
|
||||
contents: String,
|
||||
) -> Result<(), WrappedError>;
|
||||
fn simplify_enums_error(&mut self, error: SimplifyEnumsError) -> WrappedError;
|
||||
}
|
||||
|
||||
struct WrappedFileBackend<B: FileBackendTrait> {
|
||||
|
@ -540,11 +533,6 @@ impl<B: FileBackendTrait> WrappedFileBackendTrait for WrappedFileBackend<B> {
|
|||
WrappedError
|
||||
})
|
||||
}
|
||||
|
||||
fn simplify_enums_error(&mut self, error: SimplifyEnumsError) -> WrappedError {
|
||||
self.error = Err(error.into());
|
||||
WrappedError
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -666,17 +654,6 @@ enum AnnotationData {
|
|||
},
|
||||
#[serde(rename = "firrtl.transforms.DontTouchAnnotation")]
|
||||
DontTouch,
|
||||
#[serde(rename = "firrtl.AttributeAnnotation")]
|
||||
AttributeAnnotation { description: Interned<str> },
|
||||
#[serde(rename = "firrtl.transforms.BlackBoxInlineAnno")]
|
||||
BlackBoxInlineAnno {
|
||||
name: Interned<str>,
|
||||
text: Interned<str>,
|
||||
},
|
||||
#[serde(rename = "firrtl.transforms.BlackBoxPathAnno")]
|
||||
BlackBoxPathAnno { path: Interned<str> },
|
||||
#[serde(rename = "firrtl.DocStringAnnotation")]
|
||||
DocStringAnnotation { description: Interned<str> },
|
||||
#[allow(dead_code)]
|
||||
#[serde(untagged)]
|
||||
Other {
|
||||
|
@ -687,7 +664,7 @@ enum AnnotationData {
|
|||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct FirrtlAnnotation {
|
||||
struct Annotation {
|
||||
#[serde(flatten)]
|
||||
data: AnnotationData,
|
||||
target: AnnotationTarget,
|
||||
|
@ -702,7 +679,7 @@ struct Exporter<'a> {
|
|||
module: ModuleState,
|
||||
type_state: TypeState,
|
||||
circuit_name: Ident,
|
||||
annotations: Vec<FirrtlAnnotation>,
|
||||
annotations: Vec<Annotation>,
|
||||
}
|
||||
|
||||
struct PushIndent<'a> {
|
||||
|
@ -926,7 +903,7 @@ impl<'a> Exporter<'a> {
|
|||
) in expr.field_values().into_iter().zip(ty.fields())
|
||||
{
|
||||
debug_assert!(!flipped, "can't have bundle literal with flipped field -- this should have been caught in BundleLiteral::new_unchecked");
|
||||
let name = bundle_ns.borrow_mut().get(name);
|
||||
let name = bundle_ns.borrow_mut().get(NameId(name, 0));
|
||||
let field_value = self.expr(Expr::canonical(field_value), definitions, const_ty);
|
||||
definitions.add_definition_line(format_args!("connect {ident}.{name}, {field_value}"));
|
||||
}
|
||||
|
@ -935,20 +912,6 @@ impl<'a> Exporter<'a> {
|
|||
}
|
||||
ident.to_string()
|
||||
}
|
||||
fn uninit_expr(
|
||||
&mut self,
|
||||
expr: ops::Uninit,
|
||||
definitions: &RcDefinitions,
|
||||
const_ty: bool,
|
||||
) -> String {
|
||||
let ident = self.module.ns.make_new("_uninit_expr");
|
||||
let ty = expr.ty();
|
||||
let ty_ident = self.type_state.ty(ty);
|
||||
let const_ = if const_ty { "const " } else { "" };
|
||||
definitions.add_definition_line(format_args!("wire {ident}: {const_}{ty_ident}"));
|
||||
definitions.add_definition_line(format_args!("invalidate {ident}"));
|
||||
ident.to_string()
|
||||
}
|
||||
fn enum_literal_expr(
|
||||
&mut self,
|
||||
expr: ops::EnumLiteral<Enum>,
|
||||
|
@ -1404,7 +1367,6 @@ impl<'a> Exporter<'a> {
|
|||
ExprEnum::EnumLiteral(enum_literal) => {
|
||||
self.enum_literal_expr(enum_literal, definitions, const_ty)
|
||||
}
|
||||
ExprEnum::Uninit(uninit) => self.uninit_expr(uninit, definitions, const_ty),
|
||||
ExprEnum::NotU(expr) => self.expr_unary("not", expr.arg(), definitions, const_ty),
|
||||
ExprEnum::NotS(expr) => self.expr_unary("not", expr.arg(), definitions, const_ty),
|
||||
ExprEnum::NotB(expr) => self.expr_unary("not", expr.arg(), definitions, const_ty),
|
||||
|
@ -1739,14 +1701,6 @@ impl<'a> Exporter<'a> {
|
|||
assert!(!const_ty, "not a constant");
|
||||
self.module.ns.get(expr.scoped_name().1).to_string()
|
||||
}
|
||||
ExprEnum::RegSync(expr) => {
|
||||
assert!(!const_ty, "not a constant");
|
||||
self.module.ns.get(expr.scoped_name().1).to_string()
|
||||
}
|
||||
ExprEnum::RegAsync(expr) => {
|
||||
assert!(!const_ty, "not a constant");
|
||||
self.module.ns.get(expr.scoped_name().1).to_string()
|
||||
}
|
||||
ExprEnum::MemPort(expr) => {
|
||||
assert!(!const_ty, "not a constant");
|
||||
let mem_name = self.module.ns.get(expr.mem_name().1);
|
||||
|
@ -1790,7 +1744,7 @@ impl<'a> Exporter<'a> {
|
|||
memory_name.0.to_string(),
|
||||
contents,
|
||||
)?;
|
||||
self.annotations.push(FirrtlAnnotation {
|
||||
self.annotations.push(Annotation {
|
||||
data: AnnotationData::MemoryFileInline {
|
||||
filename,
|
||||
hex_or_binary,
|
||||
|
@ -1809,25 +1763,14 @@ impl<'a> Exporter<'a> {
|
|||
});
|
||||
Ok(())
|
||||
}
|
||||
fn annotation(&mut self, path: AnnotationTargetPath, annotation: &Annotation) {
|
||||
fn annotation(
|
||||
&mut self,
|
||||
path: AnnotationTargetPath,
|
||||
annotation: &crate::annotations::Annotation,
|
||||
) {
|
||||
let data = match annotation {
|
||||
Annotation::DontTouch(DontTouchAnnotation {}) => AnnotationData::DontTouch,
|
||||
Annotation::SVAttribute(SVAttributeAnnotation { text }) => {
|
||||
AnnotationData::AttributeAnnotation { description: *text }
|
||||
}
|
||||
Annotation::BlackBoxInline(BlackBoxInlineAnnotation { path, text }) => {
|
||||
AnnotationData::BlackBoxInlineAnno {
|
||||
name: *path,
|
||||
text: *text,
|
||||
}
|
||||
}
|
||||
Annotation::BlackBoxPath(BlackBoxPathAnnotation { path }) => {
|
||||
AnnotationData::BlackBoxPathAnno { path: *path }
|
||||
}
|
||||
Annotation::DocString(DocStringAnnotation { text }) => {
|
||||
AnnotationData::DocStringAnnotation { description: *text }
|
||||
}
|
||||
Annotation::CustomFirrtl(CustomFirrtlAnnotation {
|
||||
crate::annotations::Annotation::DontTouch => AnnotationData::DontTouch,
|
||||
crate::annotations::Annotation::CustomFirrtl(CustomFirrtlAnnotation {
|
||||
class,
|
||||
additional_fields,
|
||||
}) => AnnotationData::Other {
|
||||
|
@ -1835,7 +1778,7 @@ impl<'a> Exporter<'a> {
|
|||
additional_fields: (*additional_fields).into(),
|
||||
},
|
||||
};
|
||||
self.annotations.push(FirrtlAnnotation {
|
||||
self.annotations.push(Annotation {
|
||||
data,
|
||||
target: AnnotationTarget {
|
||||
circuit: self.circuit_name,
|
||||
|
@ -1856,8 +1799,6 @@ impl<'a> Exporter<'a> {
|
|||
self.module.ns.get(v.mem_name().1)
|
||||
}
|
||||
TargetBase::Reg(v) => self.module.ns.get(v.name_id()),
|
||||
TargetBase::RegSync(v) => self.module.ns.get(v.name_id()),
|
||||
TargetBase::RegAsync(v) => self.module.ns.get(v.name_id()),
|
||||
TargetBase::Wire(v) => self.module.ns.get(v.name_id()),
|
||||
TargetBase::Instance(v) => self.module.ns.get(v.name_id()),
|
||||
};
|
||||
|
@ -1966,37 +1907,6 @@ impl<'a> Exporter<'a> {
|
|||
drop(memory_indent);
|
||||
Ok(body)
|
||||
}
|
||||
fn stmt_reg<R: ResetType>(
|
||||
&mut self,
|
||||
stmt_reg: StmtReg<R>,
|
||||
module_name: Ident,
|
||||
definitions: &RcDefinitions,
|
||||
body: &mut String,
|
||||
) {
|
||||
let StmtReg { annotations, reg } = stmt_reg;
|
||||
let indent = self.indent;
|
||||
self.targeted_annotations(module_name, vec![], &annotations);
|
||||
let name = self.module.ns.get(reg.name_id());
|
||||
let ty = self.type_state.ty(reg.ty());
|
||||
let clk = self.expr(Expr::canonical(reg.clock_domain().clk), definitions, false);
|
||||
if let Some(init) = reg.init() {
|
||||
let rst = self.expr(Expr::canonical(reg.clock_domain().rst), definitions, false);
|
||||
let init = self.expr(init, definitions, false);
|
||||
writeln!(
|
||||
body,
|
||||
"{indent}regreset {name}: {ty}, {clk}, {rst}, {init}{}",
|
||||
FileInfo::new(reg.source_location()),
|
||||
)
|
||||
.unwrap();
|
||||
} else {
|
||||
writeln!(
|
||||
body,
|
||||
"{indent}reg {name}: {ty}, {clk}{}",
|
||||
FileInfo::new(reg.source_location()),
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
fn block(
|
||||
&mut self,
|
||||
module: Interned<Module<Bundle>>,
|
||||
|
@ -2020,15 +1930,6 @@ impl<'a> Exporter<'a> {
|
|||
rhs,
|
||||
source_location,
|
||||
}) => {
|
||||
if Expr::ty(lhs) != Expr::ty(rhs) {
|
||||
writeln!(
|
||||
body,
|
||||
"{indent}; connect different types:\n{indent}; lhs: {:?}\n{indent}; rhs: {:?}",
|
||||
Expr::ty(lhs),
|
||||
Expr::ty(rhs),
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
let lhs = self.expr(lhs, &definitions, false);
|
||||
let rhs = self.expr(rhs, &definitions, false);
|
||||
writeln!(
|
||||
|
@ -2038,33 +1939,6 @@ impl<'a> Exporter<'a> {
|
|||
)
|
||||
.unwrap();
|
||||
}
|
||||
Stmt::Formal(StmtFormal {
|
||||
kind,
|
||||
clk,
|
||||
pred,
|
||||
en,
|
||||
text,
|
||||
source_location,
|
||||
}) => {
|
||||
let clk = self.expr(Expr::canonical(clk), &definitions, false);
|
||||
let pred = self.expr(Expr::canonical(pred), &definitions, false);
|
||||
let en = self.expr(Expr::canonical(en), &definitions, false);
|
||||
let kind = match kind {
|
||||
FormalKind::Assert => "assert",
|
||||
FormalKind::Assume => "assume",
|
||||
FormalKind::Cover => "cover",
|
||||
};
|
||||
let text = EscapedString {
|
||||
value: &text,
|
||||
raw: false,
|
||||
};
|
||||
writeln!(
|
||||
body,
|
||||
"{indent}{kind}({clk}, {pred}, {en}, {text}){}",
|
||||
FileInfo::new(source_location),
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
Stmt::If(StmtIf {
|
||||
mut cond,
|
||||
mut source_location,
|
||||
|
@ -2167,14 +2041,30 @@ impl<'a> Exporter<'a> {
|
|||
)
|
||||
.unwrap();
|
||||
}
|
||||
Stmt::Declaration(StmtDeclaration::Reg(stmt_reg)) => {
|
||||
self.stmt_reg(stmt_reg, module_name, &definitions, &mut body);
|
||||
Stmt::Declaration(StmtDeclaration::Reg(StmtReg { annotations, reg })) => {
|
||||
self.targeted_annotations(module_name, vec![], &annotations);
|
||||
let name = self.module.ns.get(reg.name_id());
|
||||
let ty = self.type_state.ty(reg.ty());
|
||||
let clk =
|
||||
self.expr(Expr::canonical(reg.clock_domain().clk), &definitions, false);
|
||||
if let Some(init) = reg.init() {
|
||||
let rst =
|
||||
self.expr(Expr::canonical(reg.clock_domain().rst), &definitions, false);
|
||||
let init = self.expr(init, &definitions, false);
|
||||
writeln!(
|
||||
body,
|
||||
"{indent}regreset {name}: {ty}, {clk}, {rst}, {init}{}",
|
||||
FileInfo::new(reg.source_location()),
|
||||
)
|
||||
.unwrap();
|
||||
} else {
|
||||
writeln!(
|
||||
body,
|
||||
"{indent}reg {name}: {ty}, {clk}{}",
|
||||
FileInfo::new(reg.source_location()),
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
Stmt::Declaration(StmtDeclaration::RegSync(stmt_reg)) => {
|
||||
self.stmt_reg(stmt_reg, module_name, &definitions, &mut body);
|
||||
}
|
||||
Stmt::Declaration(StmtDeclaration::RegAsync(stmt_reg)) => {
|
||||
self.stmt_reg(stmt_reg, module_name, &definitions, &mut body);
|
||||
}
|
||||
Stmt::Declaration(StmtDeclaration::Instance(StmtInstance {
|
||||
annotations,
|
||||
|
@ -2221,7 +2111,7 @@ impl<'a> Exporter<'a> {
|
|||
} in module.module_io().iter()
|
||||
{
|
||||
self.targeted_annotations(module_name, vec![], annotations);
|
||||
let name = self.module.ns.get(module_io.name_id());
|
||||
let name = self.module.ns.get(NameId(module_io.name(), 0));
|
||||
let ty = self.type_state.ty(module_io.ty());
|
||||
if module_io.is_input() {
|
||||
writeln!(
|
||||
|
@ -2293,7 +2183,7 @@ impl<'a> Exporter<'a> {
|
|||
}
|
||||
|
||||
pub trait FileBackendTrait {
|
||||
type Error: From<SimplifyEnumsError>;
|
||||
type Error;
|
||||
type Path: AsRef<Self::Path> + fmt::Debug + ?Sized;
|
||||
type PathBuf: AsRef<Self::Path> + fmt::Debug;
|
||||
fn path_to_string(&mut self, path: &Self::Path) -> Result<String, Self::Error>;
|
||||
|
@ -2368,7 +2258,6 @@ impl<T: ?Sized + FileBackendTrait> FileBackendTrait for &'_ mut T {
|
|||
#[non_exhaustive]
|
||||
pub struct FileBackend {
|
||||
pub dir_path: PathBuf,
|
||||
pub circuit_name: Option<String>,
|
||||
pub top_fir_file_stem: Option<String>,
|
||||
}
|
||||
|
||||
|
@ -2376,7 +2265,6 @@ impl FileBackend {
|
|||
pub fn new(dir_path: impl AsRef<Path>) -> Self {
|
||||
Self {
|
||||
dir_path: dir_path.as_ref().to_owned(),
|
||||
circuit_name: None,
|
||||
top_fir_file_stem: None,
|
||||
}
|
||||
}
|
||||
|
@ -2412,10 +2300,7 @@ impl FileBackendTrait for FileBackend {
|
|||
circuit_name: String,
|
||||
contents: String,
|
||||
) -> Result<(), Self::Error> {
|
||||
let top_fir_file_stem = self
|
||||
.top_fir_file_stem
|
||||
.get_or_insert_with(|| circuit_name.clone());
|
||||
self.circuit_name = Some(circuit_name);
|
||||
let top_fir_file_stem = self.top_fir_file_stem.get_or_insert(circuit_name);
|
||||
let mut path = self.dir_path.join(top_fir_file_stem);
|
||||
if let Some(parent) = path.parent().filter(|v| !v.as_os_str().is_empty()) {
|
||||
fs::create_dir_all(parent)?;
|
||||
|
@ -2426,17 +2311,15 @@ impl FileBackendTrait for FileBackend {
|
|||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[derive(PartialEq, Eq, Clone, Copy)]
|
||||
#[derive(PartialEq, Eq)]
|
||||
pub struct TestBackendPrivate {
|
||||
pub module_var_name: &'static str,
|
||||
pub included_fields: &'static [&'static str],
|
||||
}
|
||||
|
||||
impl Default for TestBackendPrivate {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
module_var_name: "m",
|
||||
included_fields: &[],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2445,7 +2328,6 @@ impl Default for TestBackendPrivate {
|
|||
pub struct TestBackend {
|
||||
pub files: BTreeMap<String, String>,
|
||||
pub error_after: Option<i64>,
|
||||
pub options: ExportOptions,
|
||||
#[doc(hidden)]
|
||||
/// `#[non_exhaustive]` except allowing struct update syntax
|
||||
pub __private: TestBackendPrivate,
|
||||
|
@ -2456,12 +2338,7 @@ impl fmt::Debug for TestBackend {
|
|||
let Self {
|
||||
files,
|
||||
error_after,
|
||||
options,
|
||||
__private:
|
||||
TestBackendPrivate {
|
||||
module_var_name,
|
||||
included_fields,
|
||||
},
|
||||
__private: TestBackendPrivate { module_var_name },
|
||||
} = self;
|
||||
writeln!(
|
||||
f,
|
||||
|
@ -2469,44 +2346,12 @@ impl fmt::Debug for TestBackend {
|
|||
)?;
|
||||
writeln!(f, " assert_export_firrtl! {{")?;
|
||||
writeln!(f, " {module_var_name} =>")?;
|
||||
if *error_after != Option::default() || included_fields.contains(&"error_after") {
|
||||
writeln!(f, " error_after: {error_after:?},")?;
|
||||
}
|
||||
if *options != ExportOptions::default() || included_fields.contains(&"options") {
|
||||
struct DebugWithForceIncludeFields<'a> {
|
||||
options: ExportOptions,
|
||||
included_fields: &'a [&'a str],
|
||||
}
|
||||
impl fmt::Debug for DebugWithForceIncludeFields<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.options.debug_fmt(f, |field| {
|
||||
self.included_fields.iter().any(|included_field| {
|
||||
if let Some(("options", suffix)) = included_field.split_once(".") {
|
||||
suffix == field
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
let options_str = format!(
|
||||
"{:#?}",
|
||||
DebugWithForceIncludeFields {
|
||||
options: *options,
|
||||
included_fields
|
||||
}
|
||||
);
|
||||
let mut sep = " options: ";
|
||||
for line in options_str.lines() {
|
||||
write!(f, "{sep}{line}")?;
|
||||
sep = "\n ";
|
||||
}
|
||||
writeln!(f, ",")?;
|
||||
}
|
||||
for (file, content) in files {
|
||||
writeln!(f, " {file:?}: {:?},", DebugAsRawString(content))?;
|
||||
}
|
||||
if *error_after != Option::default() {
|
||||
writeln!(f, " error_after: {error_after:?},")?;
|
||||
}
|
||||
write!(f, " }};")
|
||||
}
|
||||
}
|
||||
|
@ -2522,12 +2367,6 @@ impl fmt::Display for TestBackendError {
|
|||
|
||||
impl Error for TestBackendError {}
|
||||
|
||||
impl From<SimplifyEnumsError> for TestBackendError {
|
||||
fn from(value: SimplifyEnumsError) -> Self {
|
||||
TestBackendError(value.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl TestBackend {
|
||||
#[track_caller]
|
||||
pub fn step_error_after(&mut self, args: &dyn fmt::Debug) -> Result<(), TestBackendError> {
|
||||
|
@ -2584,21 +2423,9 @@ impl FileBackendTrait for TestBackend {
|
|||
|
||||
fn export_impl(
|
||||
file_backend: &mut dyn WrappedFileBackendTrait,
|
||||
mut top_module: Interned<Module<Bundle>>,
|
||||
options: ExportOptions,
|
||||
top_module: Interned<Module<Bundle>>,
|
||||
) -> Result<(), WrappedError> {
|
||||
let ExportOptions {
|
||||
simplify_memories: do_simplify_memories,
|
||||
simplify_enums: do_simplify_enums,
|
||||
__private: _,
|
||||
} = options;
|
||||
if let Some(kind) = do_simplify_enums {
|
||||
top_module =
|
||||
simplify_enums(top_module, kind).map_err(|e| file_backend.simplify_enums_error(e))?;
|
||||
}
|
||||
if do_simplify_memories {
|
||||
top_module = simplify_memories(top_module);
|
||||
}
|
||||
let top_module = simplify_memories(top_module);
|
||||
let indent_depth = Cell::new(0);
|
||||
let mut global_ns = Namespace::default();
|
||||
let circuit_name = global_ns.get(top_module.name_id());
|
||||
|
@ -2619,154 +2446,20 @@ fn export_impl(
|
|||
.run(top_module)
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct OptionSimplifyEnumsKindValueParser;
|
||||
|
||||
impl OptionSimplifyEnumsKindValueParser {
|
||||
const NONE_NAME: &'static str = "off";
|
||||
}
|
||||
|
||||
impl clap::builder::TypedValueParser for OptionSimplifyEnumsKindValueParser {
|
||||
type Value = Option<SimplifyEnumsKind>;
|
||||
|
||||
fn parse_ref(
|
||||
&self,
|
||||
cmd: &clap::Command,
|
||||
arg: Option<&clap::Arg>,
|
||||
value: &std::ffi::OsStr,
|
||||
) -> Result<Self::Value, clap::Error> {
|
||||
if value == Self::NONE_NAME {
|
||||
Ok(None)
|
||||
} else {
|
||||
Ok(Some(
|
||||
value_parser!(SimplifyEnumsKind).parse_ref(cmd, arg, value)?,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
fn possible_values(
|
||||
&self,
|
||||
) -> Option<Box<dyn Iterator<Item = clap::builder::PossibleValue> + '_>> {
|
||||
Some(Box::new(
|
||||
[Self::NONE_NAME.into()]
|
||||
.into_iter()
|
||||
.chain(value_parser!(SimplifyEnumsKind).possible_values()?)
|
||||
.collect::<Vec<_>>()
|
||||
.into_iter(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct ExportOptionsPrivate(());
|
||||
|
||||
#[derive(clap::Parser, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct ExportOptions {
|
||||
#[clap(long = "no-simplify-memories", action = clap::ArgAction::SetFalse)]
|
||||
pub simplify_memories: bool,
|
||||
#[clap(long, value_parser = OptionSimplifyEnumsKindValueParser, default_value = "replace-with-bundle-of-uints")]
|
||||
pub simplify_enums: std::option::Option<SimplifyEnumsKind>,
|
||||
#[doc(hidden)]
|
||||
#[clap(skip = ExportOptionsPrivate(()))]
|
||||
/// `#[non_exhaustive]` except allowing struct update syntax
|
||||
pub __private: ExportOptionsPrivate,
|
||||
}
|
||||
|
||||
impl fmt::Debug for ExportOptions {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.debug_fmt(f, |_| false)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExportOptions {
|
||||
fn debug_fmt(
|
||||
&self,
|
||||
f: &mut fmt::Formatter<'_>,
|
||||
force_include_field: impl Fn(&str) -> bool,
|
||||
) -> fmt::Result {
|
||||
let Self {
|
||||
simplify_memories,
|
||||
simplify_enums,
|
||||
__private: _,
|
||||
} = *self;
|
||||
f.write_str("ExportOptions {")?;
|
||||
let mut sep = if f.alternate() { "\n " } else { " " };
|
||||
let comma_sep = if f.alternate() { ",\n " } else { ", " };
|
||||
let default = ExportOptions::default();
|
||||
if simplify_memories != default.simplify_memories
|
||||
|| force_include_field("simplify_memories")
|
||||
{
|
||||
write!(f, "{sep}simplify_memories: {:?}", simplify_memories)?;
|
||||
sep = comma_sep;
|
||||
}
|
||||
if simplify_enums != default.simplify_enums || force_include_field("simplify_enums") {
|
||||
write!(f, "{sep}simplify_enums: ")?;
|
||||
macro_rules! debug_cases {
|
||||
($($ident:ident $(($($args:tt)*))?,)*) => {
|
||||
match simplify_enums {
|
||||
// use more complex stringify to avoid the compiler inserting spaces
|
||||
$($ident $(($($args)*))? => {
|
||||
f.write_str(concat!(
|
||||
stringify!($ident),
|
||||
$("(",
|
||||
$(stringify!($args),)*
|
||||
")")?
|
||||
))?;
|
||||
})*
|
||||
}
|
||||
};
|
||||
}
|
||||
debug_cases! {
|
||||
Some(SimplifyEnumsKind::SimplifyToEnumsWithNoBody),
|
||||
Some(SimplifyEnumsKind::ReplaceWithBundleOfUInts),
|
||||
Some(SimplifyEnumsKind::ReplaceWithUInt),
|
||||
None,
|
||||
}
|
||||
sep = comma_sep;
|
||||
}
|
||||
write!(
|
||||
f,
|
||||
"{sep}..ExportOptions::default(){}",
|
||||
if f.alternate() { "\n}" } else { " }" }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ExportOptions {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
simplify_memories: true,
|
||||
simplify_enums: Some(SimplifyEnumsKind::ReplaceWithBundleOfUInts),
|
||||
__private: ExportOptionsPrivate(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn export<T: BundleType, B: FileBackendTrait>(
|
||||
file_backend: B,
|
||||
top_module: &Module<T>,
|
||||
options: ExportOptions,
|
||||
) -> Result<B, B::Error> {
|
||||
let top_module = Intern::intern_sized(top_module.canonical());
|
||||
WrappedFileBackend::with(file_backend, |file_backend| {
|
||||
export_impl(file_backend, top_module, options)
|
||||
export_impl(file_backend, top_module)
|
||||
})
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[track_caller]
|
||||
pub fn assert_export_firrtl_impl<T: BundleType>(top_module: &Module<T>, expected: TestBackend) {
|
||||
let result = export(
|
||||
TestBackend {
|
||||
files: BTreeMap::default(),
|
||||
error_after: expected.error_after,
|
||||
options: expected.options,
|
||||
__private: expected.__private,
|
||||
},
|
||||
top_module,
|
||||
expected.options,
|
||||
)
|
||||
.unwrap();
|
||||
let result = export(TestBackend::default(), top_module).unwrap();
|
||||
if result != expected {
|
||||
panic!(
|
||||
"assert_export_firrtl failed:\nyou can update the expected output by using:\n-------START-------\n{result:?}\n-------END-------"
|
||||
|
@ -2783,69 +2476,21 @@ pub fn make_test_expected_files(v: &[(&str, &str)]) -> BTreeMap<String, String>
|
|||
macro_rules! assert_export_firrtl {
|
||||
{
|
||||
$m:ident =>
|
||||
$($field:ident: $value:expr,)*
|
||||
@parsed_fields($($field_strings:expr,)*)
|
||||
$($file_name:literal: $file_contents:literal,)*
|
||||
$($field:ident: $value:expr,)*
|
||||
} => {
|
||||
$crate::firrtl::assert_export_firrtl_impl(
|
||||
&$m,
|
||||
$crate::firrtl::TestBackend {
|
||||
$($field: $value,)*
|
||||
files: $crate::firrtl::make_test_expected_files(&[
|
||||
$(($file_name, $file_contents),)*
|
||||
]),
|
||||
$($field: $value,)*
|
||||
__private: $crate::firrtl::TestBackendPrivate {
|
||||
module_var_name: stringify!($m),
|
||||
included_fields: &[$($field_strings,)*],
|
||||
},
|
||||
..<$crate::firrtl::TestBackend as $crate::__std::default::Default>::default()
|
||||
},
|
||||
);
|
||||
};
|
||||
{
|
||||
$m:ident =>
|
||||
$($parsed_fields:ident: $parsed_field_values:expr,)*
|
||||
@parsed_fields($($field_strings:expr,)*)
|
||||
options: ExportOptions {
|
||||
$($export_option_fields:ident: $parsed_export_option_field_values:expr,)*
|
||||
..$export_option_default:expr
|
||||
},
|
||||
$($rest:tt)*
|
||||
} => {
|
||||
$crate::assert_export_firrtl!(
|
||||
$m =>
|
||||
$($parsed_fields: $parsed_field_values,)*
|
||||
options: ExportOptions {
|
||||
$($export_option_fields: $parsed_export_option_field_values,)*
|
||||
..$export_option_default
|
||||
},
|
||||
@parsed_fields($($field_strings,)* "options", $(concat!("options.", stringify!($export_option_fields)),)*)
|
||||
$($rest)*
|
||||
);
|
||||
};
|
||||
{
|
||||
$m:ident =>
|
||||
$($parsed_fields:ident: $parsed_field_values:expr,)*
|
||||
@parsed_fields($($field_strings:expr,)*)
|
||||
$field:ident: $field_value:expr,
|
||||
$($rest:tt)*
|
||||
} => {
|
||||
$crate::assert_export_firrtl!(
|
||||
$m =>
|
||||
$($parsed_fields: $parsed_field_values,)*
|
||||
$field: $field_value,
|
||||
@parsed_fields($($field_strings,)* stringify!($field),)
|
||||
$($rest)*
|
||||
);
|
||||
};
|
||||
{
|
||||
$m:ident =>
|
||||
$($rest:tt)*
|
||||
} => {
|
||||
$crate::assert_export_firrtl!(
|
||||
$m =>
|
||||
@parsed_fields()
|
||||
$($rest)*
|
||||
);
|
||||
};
|
||||
}
|
||||
|
|
|
@ -1,247 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
int::BoolOrIntType,
|
||||
intern::{Intern, Interned, Memoize},
|
||||
prelude::*,
|
||||
};
|
||||
use std::sync::OnceLock;
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
|
||||
pub enum FormalKind {
|
||||
Assert,
|
||||
Assume,
|
||||
Cover,
|
||||
}
|
||||
|
||||
impl FormalKind {
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::Assert => "assert",
|
||||
Self::Assume => "assume",
|
||||
Self::Cover => "cover",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn formal_stmt_with_enable_and_loc(
|
||||
kind: FormalKind,
|
||||
clk: Expr<Clock>,
|
||||
pred: Expr<Bool>,
|
||||
en: Expr<Bool>,
|
||||
text: &str,
|
||||
source_location: SourceLocation,
|
||||
) {
|
||||
crate::module::add_stmt_formal(crate::module::StmtFormal {
|
||||
kind,
|
||||
clk,
|
||||
pred,
|
||||
en: en & !formal_reset().cast_to_static::<Bool>(),
|
||||
text: text.intern(),
|
||||
source_location,
|
||||
});
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn formal_stmt_with_enable(
|
||||
kind: FormalKind,
|
||||
clk: Expr<Clock>,
|
||||
pred: Expr<Bool>,
|
||||
en: Expr<Bool>,
|
||||
text: &str,
|
||||
) {
|
||||
formal_stmt_with_enable_and_loc(kind, clk, pred, en, text, SourceLocation::caller());
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn formal_stmt_with_loc(
|
||||
kind: FormalKind,
|
||||
clk: Expr<Clock>,
|
||||
pred: Expr<Bool>,
|
||||
text: &str,
|
||||
source_location: SourceLocation,
|
||||
) {
|
||||
formal_stmt_with_enable_and_loc(kind, clk, pred, true.to_expr(), text, source_location);
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn formal_stmt(kind: FormalKind, clk: Expr<Clock>, pred: Expr<Bool>, text: &str) {
|
||||
formal_stmt_with_loc(kind, clk, pred, text, SourceLocation::caller());
|
||||
}
|
||||
|
||||
macro_rules! make_formal {
|
||||
($kind:ident, $formal_stmt_with_enable_and_loc:ident, $formal_stmt_with_enable:ident, $formal_stmt_with_loc:ident, $formal_stmt:ident) => {
|
||||
#[track_caller]
|
||||
pub fn $formal_stmt_with_enable_and_loc(
|
||||
clk: Expr<Clock>,
|
||||
pred: Expr<Bool>,
|
||||
en: Expr<Bool>,
|
||||
text: &str,
|
||||
source_location: SourceLocation,
|
||||
) {
|
||||
formal_stmt_with_enable_and_loc(
|
||||
FormalKind::$kind,
|
||||
clk,
|
||||
pred,
|
||||
en,
|
||||
text,
|
||||
source_location,
|
||||
);
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn $formal_stmt_with_enable(
|
||||
clk: Expr<Clock>,
|
||||
pred: Expr<Bool>,
|
||||
en: Expr<Bool>,
|
||||
text: &str,
|
||||
) {
|
||||
formal_stmt_with_enable(FormalKind::$kind, clk, pred, en, text);
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn $formal_stmt_with_loc(
|
||||
clk: Expr<Clock>,
|
||||
pred: Expr<Bool>,
|
||||
text: &str,
|
||||
source_location: SourceLocation,
|
||||
) {
|
||||
formal_stmt_with_loc(FormalKind::$kind, clk, pred, text, source_location);
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn $formal_stmt(clk: Expr<Clock>, pred: Expr<Bool>, text: &str) {
|
||||
formal_stmt(FormalKind::$kind, clk, pred, text);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
make_formal!(
|
||||
Assert,
|
||||
hdl_assert_with_enable_and_loc,
|
||||
hdl_assert_with_enable,
|
||||
hdl_assert_with_loc,
|
||||
hdl_assert
|
||||
);
|
||||
|
||||
make_formal!(
|
||||
Assume,
|
||||
hdl_assume_with_enable_and_loc,
|
||||
hdl_assume_with_enable,
|
||||
hdl_assume_with_loc,
|
||||
hdl_assume
|
||||
);
|
||||
|
||||
make_formal!(
|
||||
Cover,
|
||||
hdl_cover_with_enable_and_loc,
|
||||
hdl_cover_with_enable,
|
||||
hdl_cover_with_loc,
|
||||
hdl_cover
|
||||
);
|
||||
|
||||
pub trait MakeFormalExpr: Type {}
|
||||
|
||||
impl<T: Type> MakeFormalExpr for T {}
|
||||
|
||||
#[hdl]
|
||||
pub fn formal_global_clock() -> Expr<Clock> {
|
||||
#[hdl_module(extern)]
|
||||
fn formal_global_clock() {
|
||||
#[hdl]
|
||||
let clk: Clock = m.output();
|
||||
m.annotate_module(BlackBoxInlineAnnotation {
|
||||
path: "fayalite_formal_global_clock.v".intern(),
|
||||
text: r"module __fayalite_formal_global_clock(output clk);
|
||||
(* gclk *)
|
||||
reg clk;
|
||||
endmodule
|
||||
"
|
||||
.intern(),
|
||||
});
|
||||
m.verilog_name("__fayalite_formal_global_clock");
|
||||
}
|
||||
#[hdl]
|
||||
let formal_global_clock = instance(formal_global_clock());
|
||||
formal_global_clock.clk
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
pub fn formal_reset() -> Expr<SyncReset> {
|
||||
#[hdl_module(extern)]
|
||||
fn formal_reset() {
|
||||
#[hdl]
|
||||
let rst: SyncReset = m.output();
|
||||
m.annotate_module(BlackBoxInlineAnnotation {
|
||||
path: "fayalite_formal_reset.v".intern(),
|
||||
text: r"module __fayalite_formal_reset(output rst);
|
||||
assign rst = $initstate;
|
||||
endmodule
|
||||
"
|
||||
.intern(),
|
||||
});
|
||||
m.verilog_name("__fayalite_formal_reset");
|
||||
}
|
||||
static MOD: OnceLock<Interned<Module<formal_reset>>> = OnceLock::new();
|
||||
#[hdl]
|
||||
let formal_reset = instance(*MOD.get_or_init(formal_reset));
|
||||
formal_reset.rst
|
||||
}
|
||||
|
||||
macro_rules! make_any_const_fn {
|
||||
($ident:ident, $verilog_attribute:literal) => {
|
||||
#[hdl]
|
||||
pub fn $ident<T: BoolOrIntType>(ty: T) -> Expr<T> {
|
||||
#[hdl_module(extern)]
|
||||
pub(super) fn $ident<T: BoolOrIntType>(ty: T) {
|
||||
#[hdl]
|
||||
let out: T = m.output(ty);
|
||||
let width = ty.width();
|
||||
let verilog_bitslice = if width == 1 {
|
||||
String::new()
|
||||
} else {
|
||||
format!(" [{}:0]", width - 1)
|
||||
};
|
||||
m.annotate_module(BlackBoxInlineAnnotation {
|
||||
path: Intern::intern_owned(format!(
|
||||
"fayalite_{}_{width}.v",
|
||||
stringify!($ident),
|
||||
)),
|
||||
text: Intern::intern_owned(format!(
|
||||
r"module __fayalite_{}_{width}(output{verilog_bitslice} out);
|
||||
(* {} *)
|
||||
reg{verilog_bitslice} out;
|
||||
endmodule
|
||||
",
|
||||
stringify!($ident),
|
||||
$verilog_attribute,
|
||||
)),
|
||||
});
|
||||
m.verilog_name(format!("__fayalite_{}_{width}", stringify!($ident)));
|
||||
}
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
struct TheMemoize<T>(T);
|
||||
impl<T: BoolOrIntType> Memoize for TheMemoize<T> {
|
||||
type Input = ();
|
||||
type InputOwned = ();
|
||||
type Output = Option<Interned<Module<$ident<T>>>>;
|
||||
fn inner(self, _input: &Self::Input) -> Self::Output {
|
||||
if self.0.width() == 0 {
|
||||
None
|
||||
} else {
|
||||
Some($ident(self.0))
|
||||
}
|
||||
}
|
||||
}
|
||||
let Some(module) = TheMemoize(ty).get_owned(()) else {
|
||||
return 0_hdl_u0.cast_bits_to(ty);
|
||||
};
|
||||
#[hdl]
|
||||
let $ident = instance(module);
|
||||
$ident.out
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
make_any_const_fn!(any_const, "anyconst");
|
||||
make_any_const_fn!(any_seq, "anyseq");
|
||||
make_any_const_fn!(all_const, "allconst");
|
||||
make_any_const_fn!(all_seq, "allseq");
|
|
@ -18,7 +18,6 @@ use std::{
|
|||
borrow::{BorrowMut, Cow},
|
||||
fmt,
|
||||
marker::PhantomData,
|
||||
num::NonZero,
|
||||
ops::{Bound, Index, Not, Range, RangeBounds, RangeInclusive},
|
||||
sync::Arc,
|
||||
};
|
||||
|
@ -32,23 +31,8 @@ mod sealed {
|
|||
pub const DYN_SIZE: usize = !0;
|
||||
pub type DynSize = ConstUsize<DYN_SIZE>;
|
||||
|
||||
pub trait KnownSize:
|
||||
GenericConstUsize + sealed::SizeTypeSealed + sealed::SizeSealed + Default
|
||||
{
|
||||
pub trait KnownSize: GenericConstUsize + Size<SizeType = Self> {
|
||||
const SIZE: Self;
|
||||
type ArrayMatch<Element: Type>: AsRef<[Expr<Element>]>
|
||||
+ AsMut<[Expr<Element>]>
|
||||
+ BorrowMut<[Expr<Element>]>
|
||||
+ 'static
|
||||
+ Send
|
||||
+ Sync
|
||||
+ Eq
|
||||
+ Clone
|
||||
+ std::hash::Hash
|
||||
+ std::fmt::Debug
|
||||
+ IntoIterator<Item = Expr<Element>>
|
||||
+ TryFrom<Vec<Expr<Element>>>
|
||||
+ Into<Vec<Expr<Element>>>;
|
||||
}
|
||||
|
||||
macro_rules! known_widths {
|
||||
|
@ -59,7 +43,6 @@ macro_rules! known_widths {
|
|||
v
|
||||
}> {
|
||||
const SIZE: Self = Self;
|
||||
type ArrayMatch<Element: Type> = [Expr<Element>; Self::VALUE];
|
||||
}
|
||||
};
|
||||
([2 $($rest:tt)*] $($bits:literal)+) => {
|
||||
|
@ -71,7 +54,6 @@ macro_rules! known_widths {
|
|||
known_widths!([$($rest)*] 1);
|
||||
impl KnownSize for ConstUsize<{2 $(* $rest)*}> {
|
||||
const SIZE: Self = Self;
|
||||
type ArrayMatch<Element: Type> = [Expr<Element>; Self::VALUE];
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -141,24 +123,30 @@ impl<const VALUE: usize> sealed::SizeSealed for ConstUsize<VALUE> {}
|
|||
|
||||
impl<const VALUE: usize> sealed::SizeTypeSealed for ConstUsize<VALUE> {}
|
||||
|
||||
impl<T: KnownSize> SizeType for T {
|
||||
type Size = T;
|
||||
impl<const VALUE: usize> SizeType for ConstUsize<VALUE>
|
||||
where
|
||||
ConstUsize<VALUE>: KnownSize,
|
||||
{
|
||||
type Size = ConstUsize<VALUE>;
|
||||
}
|
||||
|
||||
impl<T: KnownSize> Size for T {
|
||||
type ArrayMatch<Element: Type> = <T as KnownSize>::ArrayMatch<Element>;
|
||||
impl<const VALUE: usize> Size for ConstUsize<VALUE>
|
||||
where
|
||||
ConstUsize<VALUE>: KnownSize,
|
||||
{
|
||||
type ArrayMatch<Element: Type> = [Expr<Element>; VALUE];
|
||||
|
||||
const KNOWN_VALUE: Option<usize> = Some(T::VALUE);
|
||||
const KNOWN_VALUE: Option<usize> = Some(VALUE);
|
||||
|
||||
type SizeType = T;
|
||||
type SizeType = ConstUsize<VALUE>;
|
||||
|
||||
fn as_usize(_size_type: Self::SizeType) -> usize {
|
||||
T::VALUE
|
||||
VALUE
|
||||
}
|
||||
|
||||
fn try_from_usize(v: usize) -> Option<Self::SizeType> {
|
||||
if v == T::VALUE {
|
||||
Some(T::SIZE)
|
||||
if v == VALUE {
|
||||
Some(Self::SizeType::default())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -202,17 +190,17 @@ macro_rules! impl_int {
|
|||
bit_width: self.width(),
|
||||
}
|
||||
}
|
||||
pub fn bits_from_bigint_wrapping(self, v: &BigInt) -> BitVec {
|
||||
pub fn bits_from_bigint_wrapping(self, v: BigInt) -> BitVec {
|
||||
BoolOrIntType::bits_from_bigint_wrapping(self, v)
|
||||
}
|
||||
pub fn from_bigint_wrapping(self, v: &BigInt) -> $value<Width> {
|
||||
pub fn from_bigint_wrapping(self, v: BigInt) -> $value<Width> {
|
||||
$value {
|
||||
bits: Arc::new(self.bits_from_bigint_wrapping(v)),
|
||||
_phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
pub fn from_int_wrapping(self, v: impl Into<BigInt>) -> $value<Width> {
|
||||
self.from_bigint_wrapping(&v.into())
|
||||
self.from_bigint_wrapping(v.into())
|
||||
}
|
||||
pub fn zero(self) -> $value<Width> {
|
||||
self.from_int_wrapping(0u8)
|
||||
|
@ -227,29 +215,12 @@ macro_rules! impl_int {
|
|||
impl<Width: Size> BoolOrIntType for $name<Width> {
|
||||
type Width = Width;
|
||||
type Signed = ConstBool<$SIGNED>;
|
||||
type Value = $value<Width>;
|
||||
fn width(self) -> usize {
|
||||
$name::width(self)
|
||||
}
|
||||
fn new(width: Width::SizeType) -> Self {
|
||||
$name { width }
|
||||
}
|
||||
fn value_from_bigint_wrapping(self, v: &BigInt) -> Self::Value {
|
||||
$value::<Width>::from_bigint_wrapping(self, v)
|
||||
}
|
||||
fn bits_to_value(bits: Cow<'_, BitSlice>) -> Self::Value {
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
|
||||
struct MemoizeBitsToValue;
|
||||
impl Memoize for MemoizeBitsToValue {
|
||||
type Input = BitSlice;
|
||||
type InputOwned = BitVec;
|
||||
type Output = Arc<BitVec>;
|
||||
fn inner(self, input: &Self::Input) -> Self::Output {
|
||||
Arc::new(input.to_bitvec())
|
||||
}
|
||||
}
|
||||
$value::new(MemoizeBitsToValue.get_cow(bits))
|
||||
}
|
||||
fn bits_to_expr(bits: Cow<'_, BitSlice>) -> Expr<Self> {
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
|
||||
struct MemoizeBitsToExpr;
|
||||
|
@ -280,7 +251,9 @@ macro_rules! impl_int {
|
|||
|
||||
impl<Width: KnownSize> $name<Width> {
|
||||
pub fn new_static() -> Self {
|
||||
Self { width: Width::SIZE }
|
||||
Self {
|
||||
width: Width::SizeType::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -327,7 +300,7 @@ macro_rules! impl_int {
|
|||
type Output = $name<Width::Size>;
|
||||
|
||||
fn index(&self, width: Width) -> &Self::Output {
|
||||
Interned::into_inner(Intern::intern_sized($name::new(width)))
|
||||
Interned::<_>::into_inner(Intern::intern_sized($name::new(width)))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -351,24 +324,6 @@ macro_rules! impl_int {
|
|||
}
|
||||
}
|
||||
|
||||
impl<Width: Size> PartialOrd for $value<Width> {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl<Width: Size> Ord for $value<Width> {
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||
self.to_bigint().cmp(&other.to_bigint())
|
||||
}
|
||||
}
|
||||
|
||||
impl<Width: Size> From<$value<Width>> for BigInt {
|
||||
fn from(v: $value<Width>) -> BigInt {
|
||||
v.to_bigint()
|
||||
}
|
||||
}
|
||||
|
||||
impl<Width: Size> $value<Width> {
|
||||
pub fn width(&self) -> usize {
|
||||
if let Some(retval) = Width::KNOWN_VALUE {
|
||||
|
@ -378,7 +333,7 @@ macro_rules! impl_int {
|
|||
self.bits.len()
|
||||
}
|
||||
}
|
||||
pub fn from_bigint_wrapping(ty: $name<Width>, v: &BigInt) -> $value<Width> {
|
||||
pub fn from_bigint_wrapping(ty: $name<Width>, v: BigInt) -> $value<Width> {
|
||||
ty.from_bigint_wrapping(v)
|
||||
}
|
||||
pub fn to_bigint(&self) -> BigInt {
|
||||
|
@ -488,10 +443,7 @@ impl SInt {
|
|||
v.not().bits().checked_add(1).expect("too big")
|
||||
}
|
||||
Sign::NoSign => 0,
|
||||
Sign::Plus => {
|
||||
// account for sign bit
|
||||
v.bits().checked_add(1).expect("too big")
|
||||
}
|
||||
Sign::Plus => v.bits(),
|
||||
}
|
||||
.try_into()
|
||||
.expect("too big"),
|
||||
|
@ -516,24 +468,7 @@ impl SInt {
|
|||
}
|
||||
|
||||
macro_rules! impl_prim_int {
|
||||
(
|
||||
$(#[$meta:meta])*
|
||||
$prim_int:ident, $ty:ty
|
||||
) => {
|
||||
impl From<$prim_int> for <$ty as BoolOrIntType>::Value {
|
||||
fn from(v: $prim_int) -> Self {
|
||||
<$ty>::le_bytes_to_value_wrapping(
|
||||
&v.to_le_bytes(),
|
||||
<$ty as BoolOrIntType>::Width::VALUE,
|
||||
)
|
||||
}
|
||||
}
|
||||
impl From<NonZero<$prim_int>> for <$ty as BoolOrIntType>::Value {
|
||||
fn from(v: NonZero<$prim_int>) -> Self {
|
||||
v.get().into()
|
||||
}
|
||||
}
|
||||
$(#[$meta])*
|
||||
($prim_int:ident, $ty:ty) => {
|
||||
impl ToExpr for $prim_int {
|
||||
type Type = $ty;
|
||||
|
||||
|
@ -544,14 +479,6 @@ macro_rules! impl_prim_int {
|
|||
)
|
||||
}
|
||||
}
|
||||
$(#[$meta])*
|
||||
impl ToExpr for NonZero<$prim_int> {
|
||||
type Type = $ty;
|
||||
|
||||
fn to_expr(&self) -> Expr<Self::Type> {
|
||||
self.get().to_expr()
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -566,35 +493,16 @@ impl_prim_int!(i32, SInt<32>);
|
|||
impl_prim_int!(i64, SInt<64>);
|
||||
impl_prim_int!(i128, SInt<128>);
|
||||
|
||||
impl_prim_int!(
|
||||
/// for portability reasons, [`usize`] always translates to [`UInt<64>`][type@UInt]
|
||||
usize, UInt<64>
|
||||
);
|
||||
|
||||
impl_prim_int!(
|
||||
/// for portability reasons, [`isize`] always translates to [`SInt<64>`][type@SInt]
|
||||
isize, SInt<64>
|
||||
);
|
||||
|
||||
pub trait BoolOrIntType: Type + sealed::BoolOrIntTypeSealed {
|
||||
type Width: Size;
|
||||
type Signed: GenericConstBool;
|
||||
type Value: Clone
|
||||
+ Ord
|
||||
+ std::hash::Hash
|
||||
+ fmt::Debug
|
||||
+ Send
|
||||
+ Sync
|
||||
+ 'static
|
||||
+ ToExpr<Type = Self>
|
||||
+ Into<BigInt>;
|
||||
fn width(self) -> usize;
|
||||
fn new(width: <Self::Width as Size>::SizeType) -> Self;
|
||||
fn new_static() -> Self
|
||||
where
|
||||
Self::Width: KnownSize + Size<SizeType = Self::Width>,
|
||||
Self::Width: KnownSize,
|
||||
{
|
||||
Self::new(Self::Width::default())
|
||||
Self::new(<Self::Width as Size>::SizeType::default())
|
||||
}
|
||||
fn as_same_width_sint(self) -> SIntType<Self::Width> {
|
||||
SIntType::new(Self::Width::from_usize(self.width()))
|
||||
|
@ -602,24 +510,17 @@ pub trait BoolOrIntType: Type + sealed::BoolOrIntTypeSealed {
|
|||
fn as_same_width_uint(self) -> UIntType<Self::Width> {
|
||||
UIntType::new(Self::Width::from_usize(self.width()))
|
||||
}
|
||||
fn value_from_int_wrapping(self, v: impl Into<BigInt>) -> Self::Value {
|
||||
self.value_from_bigint_wrapping(&v.into())
|
||||
}
|
||||
fn value_from_bigint_wrapping(self, v: &BigInt) -> Self::Value;
|
||||
fn bits_from_bigint_wrapping(self, v: &BigInt) -> BitVec {
|
||||
let mut bits = BitVec::repeat(false, self.width());
|
||||
Self::copy_bits_from_bigint_wrapping(v, &mut bits);
|
||||
bits
|
||||
}
|
||||
fn copy_bits_from_bigint_wrapping(v: &BigInt, bits: &mut BitSlice) {
|
||||
let width = bits.len();
|
||||
fn bits_from_bigint_wrapping(self, v: BigInt) -> BitVec {
|
||||
let width = self.width();
|
||||
let mut bytes = v.to_signed_bytes_le();
|
||||
bytes.resize(
|
||||
width.div_ceil(u8::BITS as usize),
|
||||
if v.is_negative() { 0xFF } else { 0 },
|
||||
);
|
||||
let bitslice = &BitSlice::<u8, Lsb0>::from_slice(&bytes)[..width];
|
||||
bits.clone_from_bitslice(bitslice);
|
||||
let mut bits = BitVec::new();
|
||||
bits.extend_from_bitslice(bitslice);
|
||||
bits
|
||||
}
|
||||
fn bits_to_bigint(bits: &BitSlice) -> BigInt {
|
||||
let sign_byte = if Self::Signed::VALUE && bits.last().as_deref().copied().unwrap_or(false) {
|
||||
|
@ -631,10 +532,9 @@ pub trait BoolOrIntType: Type + sealed::BoolOrIntTypeSealed {
|
|||
BitSlice::<u8, Lsb0>::from_slice_mut(&mut bytes)[..bits.len()].clone_from_bitslice(bits);
|
||||
BigInt::from_signed_bytes_le(&bytes)
|
||||
}
|
||||
fn bits_to_value(bits: Cow<'_, BitSlice>) -> Self::Value;
|
||||
fn bits_to_expr(bits: Cow<'_, BitSlice>) -> Expr<Self>;
|
||||
fn le_bytes_to_bits_wrapping(bytes: &[u8], bit_width: usize) -> BitVec {
|
||||
let bitslice = BitSlice::<u8, Lsb0>::from_slice(bytes);
|
||||
fn le_bytes_to_expr_wrapping(bytes: &[u8], bit_width: usize) -> Expr<Self> {
|
||||
let bitslice = BitSlice::<u8, Lsb0>::from_slice(&bytes);
|
||||
let bitslice = &bitslice[..bit_width.min(bitslice.len())];
|
||||
let mut bits = BitVec::new();
|
||||
bits.extend_from_bitslice(bitslice);
|
||||
|
@ -642,17 +542,7 @@ pub trait BoolOrIntType: Type + sealed::BoolOrIntTypeSealed {
|
|||
bit_width,
|
||||
Self::Signed::VALUE && bits.last().as_deref().copied().unwrap_or(false),
|
||||
);
|
||||
bits
|
||||
}
|
||||
fn le_bytes_to_expr_wrapping(bytes: &[u8], bit_width: usize) -> Expr<Self> {
|
||||
Self::bits_to_expr(Cow::Owned(Self::le_bytes_to_bits_wrapping(
|
||||
bytes, bit_width,
|
||||
)))
|
||||
}
|
||||
fn le_bytes_to_value_wrapping(bytes: &[u8], bit_width: usize) -> Self::Value {
|
||||
Self::bits_to_value(Cow::Owned(Self::le_bytes_to_bits_wrapping(
|
||||
bytes, bit_width,
|
||||
)))
|
||||
Self::bits_to_expr(Cow::Owned(bits))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -704,7 +594,6 @@ impl sealed::BoolOrIntTypeSealed for Bool {}
|
|||
impl BoolOrIntType for Bool {
|
||||
type Width = ConstUsize<1>;
|
||||
type Signed = ConstBool<false>;
|
||||
type Value = bool;
|
||||
|
||||
fn width(self) -> usize {
|
||||
1
|
||||
|
@ -715,19 +604,10 @@ impl BoolOrIntType for Bool {
|
|||
Bool
|
||||
}
|
||||
|
||||
fn value_from_bigint_wrapping(self, v: &BigInt) -> Self::Value {
|
||||
v.bit(0)
|
||||
}
|
||||
|
||||
fn bits_to_expr(bits: Cow<'_, BitSlice>) -> Expr<Self> {
|
||||
assert_eq!(bits.len(), 1);
|
||||
bits[0].to_expr()
|
||||
}
|
||||
|
||||
fn bits_to_value(bits: Cow<'_, BitSlice>) -> Self::Value {
|
||||
assert_eq!(bits.len(), 1);
|
||||
bits[0]
|
||||
}
|
||||
}
|
||||
|
||||
impl Bool {
|
||||
|
@ -773,36 +653,17 @@ impl StaticType for Bool {
|
|||
const MASK_TYPE_PROPERTIES: TypeProperties = Bool::TYPE_PROPERTIES;
|
||||
}
|
||||
|
||||
pub trait IntCmp<Rhs> {
|
||||
fn cmp_eq(self, rhs: Rhs) -> Expr<Bool>;
|
||||
fn cmp_ne(self, rhs: Rhs) -> Expr<Bool>;
|
||||
fn cmp_lt(self, rhs: Rhs) -> Expr<Bool>;
|
||||
fn cmp_le(self, rhs: Rhs) -> Expr<Bool>;
|
||||
fn cmp_gt(self, rhs: Rhs) -> Expr<Bool>;
|
||||
fn cmp_ge(self, rhs: Rhs) -> Expr<Bool>;
|
||||
}
|
||||
|
||||
impl ToLiteralBits for bool {
|
||||
fn to_literal_bits(&self) -> Result<Interned<BitSlice>, NotALiteralExpr> {
|
||||
Ok(interned_bit(*self))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_uint_for_value() {
|
||||
assert_eq!(UInt::for_value(0u8).width, 0);
|
||||
assert_eq!(UInt::for_value(1u8).width, 1);
|
||||
assert_eq!(UInt::for_value(2u8).width, 2);
|
||||
assert_eq!(UInt::for_value(3u8).width, 2);
|
||||
assert_eq!(UInt::for_value(4u8).width, 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sint_for_value() {
|
||||
assert_eq!(SInt::for_value(-5).width, 4);
|
||||
assert_eq!(SInt::for_value(-4).width, 3);
|
||||
assert_eq!(SInt::for_value(-3).width, 3);
|
||||
assert_eq!(SInt::for_value(-2).width, 2);
|
||||
assert_eq!(SInt::for_value(-1).width, 1);
|
||||
assert_eq!(SInt::for_value(0).width, 0);
|
||||
assert_eq!(SInt::for_value(1).width, 2);
|
||||
assert_eq!(SInt::for_value(2).width, 3);
|
||||
assert_eq!(SInt::for_value(3).width, 3);
|
||||
assert_eq!(SInt::for_value(4).width, 4);
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -11,59 +11,6 @@ extern crate self as fayalite;
|
|||
#[doc(hidden)]
|
||||
pub use std as __std;
|
||||
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! __cfg_expansion_helper {
|
||||
(
|
||||
[
|
||||
$($evaluated_cfgs:ident($($evaluated_exprs:tt)*) = $evaluated_results:ident,)*
|
||||
]
|
||||
[
|
||||
$cfg:ident($($expr:tt)*),
|
||||
$($unevaluated_cfgs:ident($($unevaluated_exprs:tt)*),)*
|
||||
]
|
||||
// pass as tt so we get right span for attribute
|
||||
$after_evaluation_attr:tt $after_evaluation_body:tt
|
||||
) => {
|
||||
#[$cfg($($expr)*)]
|
||||
$crate::__cfg_expansion_helper! {
|
||||
[
|
||||
$($evaluated_cfgs($($evaluated_exprs)*) = $evaluated_results,)*
|
||||
$cfg($($expr)*) = true,
|
||||
]
|
||||
[
|
||||
$($unevaluated_cfgs($($unevaluated_exprs)*),)*
|
||||
]
|
||||
$after_evaluation_attr $after_evaluation_body
|
||||
}
|
||||
#[$cfg(not($($expr)*))]
|
||||
$crate::__cfg_expansion_helper! {
|
||||
[
|
||||
$($evaluated_cfgs($($evaluated_exprs)*) = $evaluated_results,)*
|
||||
$cfg($($expr)*) = false,
|
||||
]
|
||||
[
|
||||
$($unevaluated_cfgs($($unevaluated_exprs)*),)*
|
||||
]
|
||||
$after_evaluation_attr $after_evaluation_body
|
||||
}
|
||||
};
|
||||
(
|
||||
[
|
||||
$($evaluated_cfgs:ident($($evaluated_exprs:tt)*) = $evaluated_results:ident,)*
|
||||
]
|
||||
[]
|
||||
// don't use #[...] so we get right span for `#` and `[]` of attribute
|
||||
{$($after_evaluation_attr:tt)*} {$($after_evaluation_body:tt)*}
|
||||
) => {
|
||||
$($after_evaluation_attr)*
|
||||
#[__evaluated_cfgs([
|
||||
$($evaluated_cfgs($($evaluated_exprs)*) = $evaluated_results,)*
|
||||
])]
|
||||
$($after_evaluation_body)*
|
||||
};
|
||||
}
|
||||
|
||||
#[doc(inline)]
|
||||
/// The `#[hdl_module]` attribute is applied to a Rust function so that that function creates
|
||||
/// a [`Module`][`::fayalite::module::Module`] when called.
|
||||
|
@ -83,6 +30,7 @@ pub struct __;
|
|||
#[cfg(feature = "unstable-doc")]
|
||||
pub mod _docs;
|
||||
|
||||
// FIXME: finish
|
||||
pub mod annotations;
|
||||
pub mod array;
|
||||
pub mod bundle;
|
||||
|
@ -91,17 +39,15 @@ pub mod clock;
|
|||
pub mod enum_;
|
||||
pub mod expr;
|
||||
pub mod firrtl;
|
||||
pub mod formal;
|
||||
pub mod int;
|
||||
pub mod intern;
|
||||
pub mod memory;
|
||||
pub mod module;
|
||||
pub mod prelude;
|
||||
pub mod reg;
|
||||
pub mod reset;
|
||||
pub mod sim;
|
||||
pub mod source_location;
|
||||
pub mod testing;
|
||||
pub mod ty;
|
||||
pub mod util;
|
||||
//pub mod valueless;
|
||||
pub mod prelude;
|
||||
pub mod wire;
|
||||
|
|
|
@ -7,7 +7,7 @@ use crate::{
|
|||
array::{Array, ArrayType},
|
||||
bundle::{Bundle, BundleType},
|
||||
clock::Clock,
|
||||
expr::{ops::BundleLiteral, repeat, Expr, Flow, ToExpr, ToLiteralBits},
|
||||
expr::{Expr, Flow, ToExpr, ToLiteralBits},
|
||||
hdl,
|
||||
int::{Bool, DynSize, Size, UInt, UIntType},
|
||||
intern::{Intern, Interned},
|
||||
|
@ -22,7 +22,7 @@ use std::{
|
|||
fmt,
|
||||
hash::{Hash, Hasher},
|
||||
marker::PhantomData,
|
||||
num::NonZeroUsize,
|
||||
num::NonZeroU32,
|
||||
rc::Rc,
|
||||
};
|
||||
|
||||
|
@ -478,7 +478,7 @@ struct MemImpl<Element: Type, Len: Size, P> {
|
|||
initial_value: Option<Interned<BitSlice>>,
|
||||
ports: P,
|
||||
read_latency: usize,
|
||||
write_latency: NonZeroUsize,
|
||||
write_latency: NonZeroU32,
|
||||
read_under_write: ReadUnderWrite,
|
||||
port_annotations: Interned<[TargetedAnnotation]>,
|
||||
mem_annotations: Interned<[Annotation]>,
|
||||
|
@ -519,12 +519,7 @@ impl<Element: Type, Len: Size> fmt::Debug for Mem<Element, Len> {
|
|||
f.debug_struct("Mem")
|
||||
.field("name", scoped_name)
|
||||
.field("array_type", array_type)
|
||||
.field(
|
||||
"initial_value",
|
||||
&initial_value.as_ref().map(|initial_value| {
|
||||
DebugMemoryData::from_bit_slice(*array_type, initial_value)
|
||||
}),
|
||||
)
|
||||
.field("initial_value", initial_value)
|
||||
.field("read_latency", read_latency)
|
||||
.field("write_latency", write_latency)
|
||||
.field("read_under_write", read_under_write)
|
||||
|
@ -567,7 +562,7 @@ impl<Element: Type, Len: Size> Mem<Element, Len> {
|
|||
initial_value: Option<Interned<BitSlice>>,
|
||||
ports: Interned<[MemPort<DynPortType>]>,
|
||||
read_latency: usize,
|
||||
write_latency: NonZeroUsize,
|
||||
write_latency: NonZeroU32,
|
||||
read_under_write: ReadUnderWrite,
|
||||
port_annotations: Interned<[TargetedAnnotation]>,
|
||||
mem_annotations: Interned<[Annotation]>,
|
||||
|
@ -639,7 +634,7 @@ impl<Element: Type, Len: Size> Mem<Element, Len> {
|
|||
self.0.source_location
|
||||
}
|
||||
pub fn array_type(self) -> ArrayType<Element, Len> {
|
||||
self.0.array_type
|
||||
self.0.array_type.clone()
|
||||
}
|
||||
pub fn initial_value(self) -> Option<Interned<BitSlice>> {
|
||||
self.0.initial_value
|
||||
|
@ -650,7 +645,7 @@ impl<Element: Type, Len: Size> Mem<Element, Len> {
|
|||
pub fn read_latency(self) -> usize {
|
||||
self.0.read_latency
|
||||
}
|
||||
pub fn write_latency(self) -> NonZeroUsize {
|
||||
pub fn write_latency(self) -> NonZeroU32 {
|
||||
self.0.write_latency
|
||||
}
|
||||
pub fn read_under_write(self) -> ReadUnderWrite {
|
||||
|
@ -712,7 +707,7 @@ pub(crate) struct MemBuilderTarget {
|
|||
pub(crate) initial_value: Option<Interned<BitSlice>>,
|
||||
pub(crate) ports: Vec<MemPort<DynPortType>>,
|
||||
pub(crate) read_latency: usize,
|
||||
pub(crate) write_latency: NonZeroUsize,
|
||||
pub(crate) write_latency: NonZeroU32,
|
||||
pub(crate) read_under_write: ReadUnderWrite,
|
||||
pub(crate) port_annotations: Vec<TargetedAnnotation>,
|
||||
pub(crate) mem_annotations: Vec<Annotation>,
|
||||
|
@ -872,7 +867,7 @@ impl<Element: Type, Len: Size> MemBuilder<Element, Len> {
|
|||
initial_value: None,
|
||||
ports: vec![],
|
||||
read_latency: 0,
|
||||
write_latency: NonZeroUsize::new(1).unwrap(),
|
||||
write_latency: NonZeroU32::new(1).unwrap(),
|
||||
read_under_write: ReadUnderWrite::Old,
|
||||
port_annotations: vec![],
|
||||
mem_annotations: vec![],
|
||||
|
@ -992,7 +987,7 @@ impl<Element: Type, Len: Size> MemBuilder<Element, Len> {
|
|||
#[allow(clippy::result_unit_err)]
|
||||
pub fn get_array_type(&self) -> Result<ArrayType<Element, Len>, ()> {
|
||||
Ok(ArrayType::new(
|
||||
self.mem_element_type,
|
||||
self.mem_element_type.clone(),
|
||||
Len::from_usize(self.get_depth()?),
|
||||
))
|
||||
}
|
||||
|
@ -1035,10 +1030,10 @@ impl<Element: Type, Len: Size> MemBuilder<Element, Len> {
|
|||
pub fn read_latency(&mut self, read_latency: usize) {
|
||||
self.target.borrow_mut().read_latency = read_latency;
|
||||
}
|
||||
pub fn get_write_latency(&self) -> NonZeroUsize {
|
||||
pub fn get_write_latency(&self) -> NonZeroU32 {
|
||||
self.target.borrow().write_latency
|
||||
}
|
||||
pub fn write_latency(&mut self, write_latency: NonZeroUsize) {
|
||||
pub fn write_latency(&mut self, write_latency: NonZeroU32) {
|
||||
self.target.borrow_mut().write_latency = write_latency;
|
||||
}
|
||||
pub fn get_read_under_write(&self) -> ReadUnderWrite {
|
||||
|
@ -1055,90 +1050,3 @@ impl<Element: Type, Len: Size> MemBuilder<Element, Len> {
|
|||
.extend(annotations.into_annotations());
|
||||
}
|
||||
}
|
||||
|
||||
pub fn splat_mask<T: Type>(ty: T, value: Expr<Bool>) -> Expr<AsMask<T>> {
|
||||
let canonical_ty = ty.canonical();
|
||||
match canonical_ty {
|
||||
CanonicalType::UInt(_)
|
||||
| CanonicalType::SInt(_)
|
||||
| CanonicalType::Bool(_)
|
||||
| CanonicalType::AsyncReset(_)
|
||||
| CanonicalType::SyncReset(_)
|
||||
| CanonicalType::Reset(_)
|
||||
| CanonicalType::Clock(_)
|
||||
| CanonicalType::Enum(_) => Expr::from_canonical(Expr::canonical(value)),
|
||||
CanonicalType::Array(array) => Expr::from_canonical(Expr::canonical(repeat(
|
||||
splat_mask(array.element(), value),
|
||||
array.len(),
|
||||
))),
|
||||
CanonicalType::Bundle(bundle) => Expr::from_canonical(Expr::canonical(
|
||||
BundleLiteral::new(
|
||||
bundle.mask_type(),
|
||||
bundle
|
||||
.fields()
|
||||
.iter()
|
||||
.map(|field| splat_mask(field.ty, value))
|
||||
.collect(),
|
||||
)
|
||||
.to_expr(),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
pub trait DebugMemoryDataGetElement {
|
||||
fn get_element(&self, element_index: usize, array_type: Array) -> &BitSlice;
|
||||
}
|
||||
|
||||
impl<'a, F: ?Sized + Fn(usize, Array) -> &'a BitSlice> DebugMemoryDataGetElement for &'a F {
|
||||
fn get_element(&self, element_index: usize, array_type: Array) -> &BitSlice {
|
||||
self(element_index, array_type)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DebugMemoryData<GetElement: DebugMemoryDataGetElement> {
|
||||
pub array_type: Array,
|
||||
pub get_element: GetElement,
|
||||
}
|
||||
|
||||
impl DebugMemoryDataGetElement for &'_ BitSlice {
|
||||
fn get_element(&self, element_index: usize, array_type: Array) -> &BitSlice {
|
||||
assert!(element_index < array_type.len());
|
||||
let stride = array_type.element().bit_width();
|
||||
let start = element_index
|
||||
.checked_mul(stride)
|
||||
.expect("memory is too big");
|
||||
let end = start.checked_add(stride).expect("memory is too big");
|
||||
&self[start..end]
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> DebugMemoryData<&'a BitSlice> {
|
||||
pub fn from_bit_slice<T: Type, Depth: Size>(
|
||||
array_type: ArrayType<T, Depth>,
|
||||
bit_slice: &'a BitSlice,
|
||||
) -> Self {
|
||||
let array_type = array_type.as_dyn_array();
|
||||
assert_eq!(bit_slice.len(), array_type.type_properties().bit_width);
|
||||
Self {
|
||||
array_type,
|
||||
get_element: bit_slice,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<GetElement: DebugMemoryDataGetElement> fmt::Debug for DebugMemoryData<GetElement> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
if self.array_type.len() == 0 {
|
||||
return f.write_str("[]");
|
||||
}
|
||||
writeln!(f, "[\n // len = {:#x}", self.array_type.len())?;
|
||||
for element_index in 0..self.array_type.len() {
|
||||
let element = crate::util::BitSliceWriteWithBase(
|
||||
self.get_element.get_element(element_index, self.array_type),
|
||||
);
|
||||
writeln!(f, " [{element_index:#x}]: {element:#x},")?;
|
||||
}
|
||||
f.write_str("]")
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,6 +1,5 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
pub mod deduce_resets;
|
||||
pub mod simplify_enums;
|
||||
pub mod simplify_memories;
|
||||
pub mod visit;
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -2,19 +2,19 @@
|
|||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
array::{Array, ArrayType},
|
||||
bundle::{Bundle, BundleField, BundleType},
|
||||
bundle::{Bundle, BundleType},
|
||||
enum_::{Enum, EnumType, EnumVariant},
|
||||
expr::{
|
||||
ops::{self, EnumLiteral},
|
||||
CastBitsTo, CastTo, CastToBits, Expr, ExprEnum, HdlPartialEq, ToExpr,
|
||||
CastBitsTo, CastToBits, Expr, ExprEnum, ToExpr,
|
||||
},
|
||||
hdl,
|
||||
int::UInt,
|
||||
intern::{Intern, Interned, Memoize},
|
||||
int::{DynSize, IntCmp, Size, UInt, UIntType},
|
||||
intern::{Intern, Interned},
|
||||
memory::{DynPortType, Mem, MemPort},
|
||||
module::{
|
||||
transform::visit::{Fold, Folder},
|
||||
Block, Id, Module, NameId, ScopedNameId, Stmt, StmtConnect, StmtIf, StmtMatch, StmtWire,
|
||||
Block, Module, NameIdGen, ScopedNameId, Stmt, StmtConnect, StmtIf, StmtMatch, StmtWire,
|
||||
},
|
||||
source_location::SourceLocation,
|
||||
ty::{CanonicalType, Type},
|
||||
|
@ -41,70 +41,25 @@ impl fmt::Display for SimplifyEnumsError {
|
|||
|
||||
impl std::error::Error for SimplifyEnumsError {}
|
||||
|
||||
impl From<SimplifyEnumsError> for std::io::Error {
|
||||
fn from(value: SimplifyEnumsError) -> Self {
|
||||
std::io::Error::new(std::io::ErrorKind::Other, value)
|
||||
}
|
||||
}
|
||||
|
||||
fn contains_any_enum_types(ty: CanonicalType) -> bool {
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
struct TheMemoize;
|
||||
impl Memoize for TheMemoize {
|
||||
type Input = CanonicalType;
|
||||
type InputOwned = CanonicalType;
|
||||
type Output = bool;
|
||||
|
||||
fn inner(self, ty: &Self::Input) -> Self::Output {
|
||||
match *ty {
|
||||
CanonicalType::Array(array_type) => contains_any_enum_types(array_type.element()),
|
||||
CanonicalType::Enum(_) => true,
|
||||
CanonicalType::Bundle(bundle) => bundle
|
||||
.fields()
|
||||
.iter()
|
||||
.any(|field| contains_any_enum_types(field.ty)),
|
||||
CanonicalType::UInt(_)
|
||||
| CanonicalType::SInt(_)
|
||||
| CanonicalType::Bool(_)
|
||||
| CanonicalType::AsyncReset(_)
|
||||
| CanonicalType::SyncReset(_)
|
||||
| CanonicalType::Reset(_)
|
||||
| CanonicalType::Clock(_) => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
TheMemoize.get_owned(ty)
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
struct TagAndBody<Tag, Body> {
|
||||
tag: Tag,
|
||||
body: Body,
|
||||
struct TagAndBody<T, BodyWidth: Size> {
|
||||
tag: T,
|
||||
body: UIntType<BodyWidth>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
enum EnumTypeState {
|
||||
TagEnumAndBody(TagAndBody<Enum, UInt>),
|
||||
TagUIntAndBody(TagAndBody<UInt, UInt>),
|
||||
TagEnumAndBody(TagAndBody<Enum, DynSize>),
|
||||
TagUIntAndBody(TagAndBody<UInt, DynSize>),
|
||||
UInt(UInt),
|
||||
Unchanged,
|
||||
}
|
||||
|
||||
struct ModuleState {
|
||||
module_name: NameId,
|
||||
}
|
||||
|
||||
impl ModuleState {
|
||||
fn gen_name(&mut self, name: &str) -> ScopedNameId {
|
||||
ScopedNameId(self.module_name, NameId(name.intern(), Id::new()))
|
||||
}
|
||||
}
|
||||
|
||||
struct State {
|
||||
enum_types: HashMap<Enum, EnumTypeState>,
|
||||
replacement_mem_ports: HashMap<MemPort<DynPortType>, Wire<CanonicalType>>,
|
||||
kind: SimplifyEnumsKind,
|
||||
module_state_stack: Vec<ModuleState>,
|
||||
name_id_gen: NameIdGen,
|
||||
}
|
||||
|
||||
impl State {
|
||||
|
@ -152,369 +107,6 @@ impl State {
|
|||
self.enum_types.insert(enum_type, retval.clone());
|
||||
Ok(retval)
|
||||
}
|
||||
#[hdl]
|
||||
fn handle_enum_literal(
|
||||
&mut self,
|
||||
unfolded_enum_type: Enum,
|
||||
variant_index: usize,
|
||||
folded_variant_value: Option<Expr<CanonicalType>>,
|
||||
) -> Result<Expr<CanonicalType>, SimplifyEnumsError> {
|
||||
Ok(
|
||||
match self.get_or_make_enum_type_state(unfolded_enum_type)? {
|
||||
EnumTypeState::TagEnumAndBody(TagAndBody { tag, body }) => Expr::canonical(
|
||||
#[hdl]
|
||||
TagAndBody {
|
||||
tag: EnumLiteral::new_by_index(tag, variant_index, None),
|
||||
body: match folded_variant_value {
|
||||
Some(variant_value) => variant_value.cast_to_bits().cast_to(body),
|
||||
None => body.zero().to_expr(),
|
||||
},
|
||||
},
|
||||
),
|
||||
EnumTypeState::TagUIntAndBody(TagAndBody { tag, body }) => Expr::canonical(
|
||||
#[hdl]
|
||||
TagAndBody {
|
||||
tag: tag.from_int_wrapping(variant_index),
|
||||
body: match folded_variant_value {
|
||||
Some(folded_variant_value) => {
|
||||
folded_variant_value.cast_to_bits().cast_to(body)
|
||||
}
|
||||
None => body.zero().to_expr(),
|
||||
},
|
||||
},
|
||||
),
|
||||
EnumTypeState::UInt(_) => {
|
||||
let tag = UInt[unfolded_enum_type.discriminant_bit_width()];
|
||||
let body = UInt[unfolded_enum_type.type_properties().bit_width - tag.width()];
|
||||
Expr::canonical(
|
||||
(#[hdl]
|
||||
TagAndBody {
|
||||
tag: tag.from_int_wrapping(variant_index),
|
||||
body: match folded_variant_value {
|
||||
Some(folded_variant_value) => {
|
||||
folded_variant_value.cast_to_bits().cast_to(body)
|
||||
}
|
||||
None => body.zero().to_expr(),
|
||||
},
|
||||
})
|
||||
.cast_to_bits(),
|
||||
)
|
||||
}
|
||||
EnumTypeState::Unchanged => Expr::canonical(
|
||||
ops::EnumLiteral::new_by_index(
|
||||
unfolded_enum_type,
|
||||
variant_index,
|
||||
folded_variant_value,
|
||||
)
|
||||
.to_expr(),
|
||||
),
|
||||
},
|
||||
)
|
||||
}
|
||||
fn handle_variant_access(
|
||||
&mut self,
|
||||
unfolded_enum_type: Enum,
|
||||
folded_base_expr: Expr<CanonicalType>,
|
||||
variant_index: usize,
|
||||
) -> Result<Expr<CanonicalType>, SimplifyEnumsError> {
|
||||
let unfolded_variant_type = unfolded_enum_type.variants()[variant_index].ty;
|
||||
Ok(
|
||||
match self.get_or_make_enum_type_state(unfolded_enum_type)? {
|
||||
EnumTypeState::TagEnumAndBody(_) | EnumTypeState::TagUIntAndBody(_) => {
|
||||
match unfolded_variant_type {
|
||||
Some(variant_type) => Expr::canonical(
|
||||
Expr::<TagAndBody<CanonicalType, UInt>>::from_canonical(
|
||||
folded_base_expr,
|
||||
)
|
||||
.body[..variant_type.bit_width()]
|
||||
.cast_bits_to(variant_type.fold(self)?),
|
||||
),
|
||||
None => Expr::canonical(().to_expr()),
|
||||
}
|
||||
}
|
||||
EnumTypeState::UInt(_) => match unfolded_variant_type {
|
||||
Some(variant_type) => {
|
||||
let base_int = Expr::<UInt>::from_canonical(folded_base_expr);
|
||||
let variant_type_bit_width = variant_type.bit_width();
|
||||
Expr::canonical(
|
||||
base_int[unfolded_enum_type.discriminant_bit_width()..]
|
||||
[..variant_type_bit_width]
|
||||
.cast_bits_to(variant_type.fold(self)?),
|
||||
)
|
||||
}
|
||||
None => Expr::canonical(().to_expr()),
|
||||
},
|
||||
EnumTypeState::Unchanged => match unfolded_variant_type {
|
||||
Some(_) => ops::VariantAccess::new_by_index(
|
||||
Expr::from_canonical(folded_base_expr),
|
||||
variant_index,
|
||||
)
|
||||
.to_expr(),
|
||||
None => Expr::canonical(().to_expr()),
|
||||
},
|
||||
},
|
||||
)
|
||||
}
|
||||
fn handle_match(
|
||||
&mut self,
|
||||
unfolded_enum_type: Enum,
|
||||
folded_expr: Expr<CanonicalType>,
|
||||
source_location: SourceLocation,
|
||||
folded_blocks: &[Block],
|
||||
) -> Result<Stmt, SimplifyEnumsError> {
|
||||
match self.get_or_make_enum_type_state(unfolded_enum_type)? {
|
||||
EnumTypeState::TagEnumAndBody(_) => Ok(StmtMatch {
|
||||
expr: Expr::<TagAndBody<Enum, UInt>>::from_canonical(folded_expr).tag,
|
||||
source_location,
|
||||
blocks: folded_blocks.intern(),
|
||||
}
|
||||
.into()),
|
||||
EnumTypeState::TagUIntAndBody(_) => {
|
||||
let int_tag_expr = Expr::<TagAndBody<UInt, UInt>>::from_canonical(folded_expr).tag;
|
||||
Ok(match_int_tag(int_tag_expr, source_location, folded_blocks).into())
|
||||
}
|
||||
EnumTypeState::UInt(_) => {
|
||||
let int_tag_expr = Expr::<UInt>::from_canonical(folded_expr)
|
||||
[..unfolded_enum_type.discriminant_bit_width()];
|
||||
Ok(match_int_tag(int_tag_expr, source_location, folded_blocks).into())
|
||||
}
|
||||
EnumTypeState::Unchanged => Ok(StmtMatch {
|
||||
expr: Expr::from_canonical(folded_expr),
|
||||
source_location,
|
||||
blocks: folded_blocks.intern(),
|
||||
}
|
||||
.into()),
|
||||
}
|
||||
}
|
||||
fn handle_stmt_connect_array(
|
||||
&mut self,
|
||||
unfolded_lhs_ty: Array,
|
||||
unfolded_rhs_ty: Array,
|
||||
folded_lhs: Expr<Array>,
|
||||
folded_rhs: Expr<Array>,
|
||||
source_location: SourceLocation,
|
||||
output_stmts: &mut Vec<Stmt>,
|
||||
) -> Result<(), SimplifyEnumsError> {
|
||||
assert_eq!(unfolded_lhs_ty.len(), unfolded_rhs_ty.len());
|
||||
let unfolded_lhs_element_ty = unfolded_lhs_ty.element();
|
||||
let unfolded_rhs_element_ty = unfolded_rhs_ty.element();
|
||||
for array_index in 0..unfolded_lhs_ty.len() {
|
||||
self.handle_stmt_connect(
|
||||
unfolded_lhs_element_ty,
|
||||
unfolded_rhs_element_ty,
|
||||
folded_lhs[array_index],
|
||||
folded_rhs[array_index],
|
||||
source_location,
|
||||
output_stmts,
|
||||
)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
fn handle_stmt_connect_bundle(
|
||||
&mut self,
|
||||
unfolded_lhs_ty: Bundle,
|
||||
unfolded_rhs_ty: Bundle,
|
||||
folded_lhs: Expr<Bundle>,
|
||||
folded_rhs: Expr<Bundle>,
|
||||
source_location: SourceLocation,
|
||||
output_stmts: &mut Vec<Stmt>,
|
||||
) -> Result<(), SimplifyEnumsError> {
|
||||
let unfolded_lhs_fields = unfolded_lhs_ty.fields();
|
||||
let unfolded_rhs_fields = unfolded_rhs_ty.fields();
|
||||
assert_eq!(unfolded_lhs_fields.len(), unfolded_rhs_fields.len());
|
||||
for (
|
||||
field_index,
|
||||
(
|
||||
&BundleField {
|
||||
name,
|
||||
flipped,
|
||||
ty: unfolded_lhs_field_ty,
|
||||
},
|
||||
unfolded_rhs_field,
|
||||
),
|
||||
) in unfolded_lhs_fields
|
||||
.iter()
|
||||
.zip(&unfolded_rhs_fields)
|
||||
.enumerate()
|
||||
{
|
||||
assert_eq!(name, unfolded_rhs_field.name);
|
||||
assert_eq!(flipped, unfolded_rhs_field.flipped);
|
||||
let folded_lhs_field =
|
||||
ops::FieldAccess::new_by_index(folded_lhs, field_index).to_expr();
|
||||
let folded_rhs_field =
|
||||
ops::FieldAccess::new_by_index(folded_rhs, field_index).to_expr();
|
||||
if flipped {
|
||||
// swap lhs/rhs
|
||||
self.handle_stmt_connect(
|
||||
unfolded_rhs_field.ty,
|
||||
unfolded_lhs_field_ty,
|
||||
folded_rhs_field,
|
||||
folded_lhs_field,
|
||||
source_location,
|
||||
output_stmts,
|
||||
)?;
|
||||
} else {
|
||||
self.handle_stmt_connect(
|
||||
unfolded_lhs_field_ty,
|
||||
unfolded_rhs_field.ty,
|
||||
folded_lhs_field,
|
||||
folded_rhs_field,
|
||||
source_location,
|
||||
output_stmts,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
fn handle_stmt_connect_enum(
|
||||
&mut self,
|
||||
unfolded_lhs_ty: Enum,
|
||||
unfolded_rhs_ty: Enum,
|
||||
folded_lhs: Expr<CanonicalType>,
|
||||
folded_rhs: Expr<CanonicalType>,
|
||||
source_location: SourceLocation,
|
||||
output_stmts: &mut Vec<Stmt>,
|
||||
) -> Result<(), SimplifyEnumsError> {
|
||||
let unfolded_lhs_variants = unfolded_lhs_ty.variants();
|
||||
let unfolded_rhs_variants = unfolded_rhs_ty.variants();
|
||||
assert_eq!(unfolded_lhs_variants.len(), unfolded_rhs_variants.len());
|
||||
let mut folded_blocks = vec![];
|
||||
for (
|
||||
variant_index,
|
||||
(
|
||||
&EnumVariant {
|
||||
name,
|
||||
ty: unfolded_lhs_variant_ty,
|
||||
},
|
||||
unfolded_rhs_variant,
|
||||
),
|
||||
) in unfolded_lhs_variants
|
||||
.iter()
|
||||
.zip(&unfolded_rhs_variants)
|
||||
.enumerate()
|
||||
{
|
||||
let mut output_stmts = vec![];
|
||||
assert_eq!(name, unfolded_rhs_variant.name);
|
||||
assert_eq!(
|
||||
unfolded_lhs_variant_ty.is_some(),
|
||||
unfolded_rhs_variant.ty.is_some()
|
||||
);
|
||||
let folded_variant_value =
|
||||
if let (Some(unfolded_lhs_variant_ty), Some(unfolded_rhs_variant_ty)) =
|
||||
(unfolded_lhs_variant_ty, unfolded_rhs_variant.ty)
|
||||
{
|
||||
let lhs_wire = Wire::new_unchecked(
|
||||
self.module_state_stack
|
||||
.last_mut()
|
||||
.unwrap()
|
||||
.gen_name("__connect_variant_body"),
|
||||
source_location,
|
||||
unfolded_lhs_variant_ty.fold(self)?,
|
||||
);
|
||||
output_stmts.push(
|
||||
StmtWire {
|
||||
annotations: Interned::default(),
|
||||
wire: lhs_wire,
|
||||
}
|
||||
.into(),
|
||||
);
|
||||
let lhs_wire = lhs_wire.to_expr();
|
||||
let folded_rhs_variant =
|
||||
self.handle_variant_access(unfolded_rhs_ty, folded_rhs, variant_index)?;
|
||||
self.handle_stmt_connect(
|
||||
unfolded_lhs_variant_ty,
|
||||
unfolded_rhs_variant_ty,
|
||||
lhs_wire,
|
||||
folded_rhs_variant,
|
||||
source_location,
|
||||
&mut output_stmts,
|
||||
)?;
|
||||
Some(lhs_wire)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
output_stmts.push(
|
||||
StmtConnect {
|
||||
lhs: folded_lhs,
|
||||
rhs: self.handle_enum_literal(
|
||||
unfolded_lhs_ty,
|
||||
variant_index,
|
||||
folded_variant_value,
|
||||
)?,
|
||||
source_location,
|
||||
}
|
||||
.into(),
|
||||
);
|
||||
folded_blocks.push(Block {
|
||||
memories: Interned::default(),
|
||||
stmts: Intern::intern_owned(output_stmts),
|
||||
});
|
||||
}
|
||||
output_stmts.push(self.handle_match(
|
||||
unfolded_rhs_ty,
|
||||
folded_rhs,
|
||||
source_location,
|
||||
&folded_blocks,
|
||||
)?);
|
||||
Ok(())
|
||||
}
|
||||
fn handle_stmt_connect(
|
||||
&mut self,
|
||||
unfolded_lhs_ty: CanonicalType,
|
||||
unfolded_rhs_ty: CanonicalType,
|
||||
folded_lhs: Expr<CanonicalType>,
|
||||
folded_rhs: Expr<CanonicalType>,
|
||||
source_location: SourceLocation,
|
||||
output_stmts: &mut Vec<Stmt>,
|
||||
) -> Result<(), SimplifyEnumsError> {
|
||||
let needs_expansion = unfolded_lhs_ty != unfolded_rhs_ty
|
||||
&& (contains_any_enum_types(unfolded_lhs_ty)
|
||||
|| contains_any_enum_types(unfolded_rhs_ty));
|
||||
if !needs_expansion {
|
||||
output_stmts.push(
|
||||
StmtConnect {
|
||||
lhs: folded_lhs,
|
||||
rhs: folded_rhs,
|
||||
source_location,
|
||||
}
|
||||
.into(),
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
match unfolded_lhs_ty {
|
||||
CanonicalType::Array(unfolded_lhs_ty) => self.handle_stmt_connect_array(
|
||||
unfolded_lhs_ty,
|
||||
Array::from_canonical(unfolded_rhs_ty),
|
||||
Expr::from_canonical(folded_lhs),
|
||||
Expr::from_canonical(folded_rhs),
|
||||
source_location,
|
||||
output_stmts,
|
||||
),
|
||||
CanonicalType::Enum(unfolded_lhs_ty) => self.handle_stmt_connect_enum(
|
||||
unfolded_lhs_ty,
|
||||
Enum::from_canonical(unfolded_rhs_ty),
|
||||
folded_lhs,
|
||||
folded_rhs,
|
||||
source_location,
|
||||
output_stmts,
|
||||
),
|
||||
CanonicalType::Bundle(unfolded_lhs_ty) => self.handle_stmt_connect_bundle(
|
||||
unfolded_lhs_ty,
|
||||
Bundle::from_canonical(unfolded_rhs_ty),
|
||||
Expr::from_canonical(folded_lhs),
|
||||
Expr::from_canonical(folded_rhs),
|
||||
source_location,
|
||||
output_stmts,
|
||||
),
|
||||
CanonicalType::UInt(_)
|
||||
| CanonicalType::SInt(_)
|
||||
| CanonicalType::Bool(_)
|
||||
| CanonicalType::AsyncReset(_)
|
||||
| CanonicalType::SyncReset(_)
|
||||
| CanonicalType::Reset(_)
|
||||
| CanonicalType::Clock(_) => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn connect_port(
|
||||
|
@ -525,11 +117,11 @@ fn connect_port(
|
|||
) {
|
||||
if Expr::ty(lhs) == Expr::ty(rhs) {
|
||||
stmts.push(
|
||||
StmtConnect {
|
||||
dbg!(StmtConnect {
|
||||
lhs,
|
||||
rhs,
|
||||
source_location,
|
||||
}
|
||||
})
|
||||
.into(),
|
||||
);
|
||||
return;
|
||||
|
@ -585,42 +177,6 @@ fn connect_port(
|
|||
}
|
||||
}
|
||||
|
||||
fn match_int_tag(
|
||||
int_tag_expr: Expr<UInt>,
|
||||
source_location: SourceLocation,
|
||||
folded_blocks: &[Block],
|
||||
) -> StmtIf {
|
||||
let mut blocks_iter = folded_blocks.iter().copied().enumerate();
|
||||
let (_, last_block) = blocks_iter.next_back().unwrap_or_default();
|
||||
let Some((next_to_last_variant_index, next_to_last_block)) = blocks_iter.next_back() else {
|
||||
return StmtIf {
|
||||
cond: true.to_expr(),
|
||||
source_location,
|
||||
blocks: [last_block, Block::default()],
|
||||
};
|
||||
};
|
||||
let mut retval = StmtIf {
|
||||
cond: int_tag_expr
|
||||
.cmp_eq(Expr::ty(int_tag_expr).from_int_wrapping(next_to_last_variant_index)),
|
||||
source_location,
|
||||
blocks: [next_to_last_block, last_block],
|
||||
};
|
||||
for (variant_index, block) in blocks_iter.rev() {
|
||||
retval = StmtIf {
|
||||
cond: int_tag_expr.cmp_eq(Expr::ty(int_tag_expr).from_int_wrapping(variant_index)),
|
||||
source_location,
|
||||
blocks: [
|
||||
block,
|
||||
Block {
|
||||
memories: Default::default(),
|
||||
stmts: [Stmt::from(retval)][..].intern(),
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
retval
|
||||
}
|
||||
|
||||
impl Folder for State {
|
||||
type Error = SimplifyEnumsError;
|
||||
|
||||
|
@ -629,32 +185,96 @@ impl Folder for State {
|
|||
}
|
||||
|
||||
fn fold_module<T: BundleType>(&mut self, v: Module<T>) -> Result<Module<T>, Self::Error> {
|
||||
self.module_state_stack.push(ModuleState {
|
||||
module_name: v.name_id(),
|
||||
});
|
||||
let old_name_id_gen =
|
||||
std::mem::replace(&mut self.name_id_gen, NameIdGen::for_module(v.canonical()));
|
||||
let retval = Fold::default_fold(v, self);
|
||||
self.module_state_stack.pop();
|
||||
self.name_id_gen = old_name_id_gen;
|
||||
retval
|
||||
}
|
||||
|
||||
fn fold_expr_enum(&mut self, op: ExprEnum) -> Result<ExprEnum, Self::Error> {
|
||||
match op {
|
||||
ExprEnum::EnumLiteral(op) => {
|
||||
let folded_variant_value = op.variant_value().map(|v| v.fold(self)).transpose()?;
|
||||
Ok(*Expr::expr_enum(self.handle_enum_literal(
|
||||
ExprEnum::EnumLiteral(op) => Ok(match self.get_or_make_enum_type_state(op.ty())? {
|
||||
EnumTypeState::TagEnumAndBody(TagAndBody { tag, body }) => *Expr::expr_enum(
|
||||
<TagAndBody<Enum, DynSize> as BundleType>::Builder::default()
|
||||
.field_tag(EnumLiteral::new_by_index(tag, op.variant_index(), None))
|
||||
.field_body(match op.variant_value() {
|
||||
Some(variant_value) => variant_value.fold(self)?.cast_to_bits(),
|
||||
None => body.zero().to_expr(),
|
||||
})
|
||||
.to_expr(),
|
||||
),
|
||||
EnumTypeState::TagUIntAndBody(TagAndBody { tag, body }) => *Expr::expr_enum(
|
||||
<TagAndBody<UInt, DynSize> as BundleType>::Builder::default()
|
||||
.field_tag(tag.from_int_wrapping(op.variant_index()))
|
||||
.field_body(match op.variant_value() {
|
||||
Some(variant_value) => variant_value.fold(self)?.cast_to_bits(),
|
||||
None => body.zero().to_expr(),
|
||||
})
|
||||
.to_expr(),
|
||||
),
|
||||
EnumTypeState::UInt(_) => *Expr::expr_enum(
|
||||
<TagAndBody<UInt, DynSize> as BundleType>::Builder::default()
|
||||
.field_tag(
|
||||
UIntType::new(op.ty().discriminant_bit_width())
|
||||
.from_int_wrapping(op.variant_index()),
|
||||
)
|
||||
.field_body(match op.variant_value() {
|
||||
Some(variant_value) => variant_value.fold(self)?.cast_to_bits(),
|
||||
None => UIntType::new(
|
||||
op.ty().type_properties().bit_width
|
||||
- op.ty().discriminant_bit_width(),
|
||||
)
|
||||
.zero()
|
||||
.to_expr(),
|
||||
})
|
||||
.cast_to_bits(),
|
||||
),
|
||||
EnumTypeState::Unchanged => ExprEnum::EnumLiteral(ops::EnumLiteral::new_by_index(
|
||||
op.ty(),
|
||||
op.variant_index(),
|
||||
folded_variant_value,
|
||||
)?))
|
||||
op.variant_value().map(|v| v.fold(self)).transpose()?,
|
||||
)),
|
||||
}),
|
||||
ExprEnum::VariantAccess(op) => Ok(
|
||||
match self.get_or_make_enum_type_state(Expr::ty(op.base()))? {
|
||||
EnumTypeState::TagEnumAndBody(_) | EnumTypeState::TagUIntAndBody(_) => {
|
||||
match op.variant_type() {
|
||||
Some(variant_type) => *Expr::expr_enum(
|
||||
Expr::<TagAndBody<CanonicalType, DynSize>>::from_canonical(
|
||||
(*Expr::expr_enum(op.base())).fold(self)?.to_expr(),
|
||||
)
|
||||
.body[..variant_type.bit_width()]
|
||||
.cast_bits_to(variant_type),
|
||||
),
|
||||
None => *Expr::expr_enum(().to_expr()),
|
||||
}
|
||||
ExprEnum::VariantAccess(op) => {
|
||||
let folded_base_expr = Expr::canonical(op.base()).fold(self)?;
|
||||
Ok(*Expr::expr_enum(self.handle_variant_access(
|
||||
Expr::ty(op.base()),
|
||||
folded_base_expr,
|
||||
}
|
||||
EnumTypeState::UInt(_) => match op.variant_type() {
|
||||
Some(variant_type) => {
|
||||
let base_int = Expr::<UInt>::from_canonical(
|
||||
(*Expr::expr_enum(op.base())).fold(self)?.to_expr(),
|
||||
);
|
||||
dbg!(base_int);
|
||||
let base_ty = Expr::ty(op.base());
|
||||
let variant_type_bit_width = variant_type.bit_width();
|
||||
*Expr::expr_enum(
|
||||
base_int[base_ty.discriminant_bit_width()..]
|
||||
[..variant_type_bit_width]
|
||||
.cast_bits_to(variant_type),
|
||||
)
|
||||
}
|
||||
None => *Expr::expr_enum(().to_expr()),
|
||||
},
|
||||
EnumTypeState::Unchanged => match op.variant_type() {
|
||||
Some(_) => ExprEnum::VariantAccess(ops::VariantAccess::new_by_index(
|
||||
op.base().fold(self)?,
|
||||
op.variant_index(),
|
||||
)?))
|
||||
}
|
||||
)),
|
||||
None => *Expr::expr_enum(().to_expr()),
|
||||
},
|
||||
},
|
||||
),
|
||||
ExprEnum::MemPort(mem_port) => Ok(
|
||||
if let Some(&wire) = self.replacement_mem_ports.get(&mem_port) {
|
||||
ExprEnum::Wire(wire)
|
||||
|
@ -667,7 +287,6 @@ impl Folder for State {
|
|||
| ExprEnum::BoolLiteral(_)
|
||||
| ExprEnum::BundleLiteral(_)
|
||||
| ExprEnum::ArrayLiteral(_)
|
||||
| ExprEnum::Uninit(_)
|
||||
| ExprEnum::NotU(_)
|
||||
| ExprEnum::NotS(_)
|
||||
| ExprEnum::NotB(_)
|
||||
|
@ -764,9 +383,7 @@ impl Folder for State {
|
|||
| ExprEnum::ModuleIO(_)
|
||||
| ExprEnum::Instance(_)
|
||||
| ExprEnum::Wire(_)
|
||||
| ExprEnum::Reg(_)
|
||||
| ExprEnum::RegSync(_)
|
||||
| ExprEnum::RegAsync(_) => op.default_fold(self),
|
||||
| ExprEnum::Reg(_) => op.default_fold(self),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -800,15 +417,11 @@ impl Folder for State {
|
|||
if wire_ty == new_port_ty {
|
||||
continue;
|
||||
}
|
||||
let wire_name = self.name_id_gen.gen(
|
||||
(*format!("{}_{}", memory.scoped_name().1 .0, port.port_name())).intern(),
|
||||
);
|
||||
let wire = Wire::new_unchecked(
|
||||
self.module_state_stack
|
||||
.last_mut()
|
||||
.unwrap()
|
||||
.gen_name(&format!(
|
||||
"{}_{}",
|
||||
memory.scoped_name().1 .0,
|
||||
port.port_name()
|
||||
)),
|
||||
ScopedNameId(memory.scoped_name().0, wire_name),
|
||||
port.source_location(),
|
||||
wire_ty,
|
||||
);
|
||||
|
@ -851,50 +464,80 @@ impl Folder for State {
|
|||
}
|
||||
|
||||
fn fold_stmt(&mut self, stmt: Stmt) -> Result<Stmt, Self::Error> {
|
||||
fn match_int_tag(
|
||||
state: &mut State,
|
||||
int_tag_expr: Expr<UInt>,
|
||||
source_location: SourceLocation,
|
||||
blocks: Interned<[Block]>,
|
||||
) -> Result<StmtIf, SimplifyEnumsError> {
|
||||
let mut blocks_iter = blocks.iter().copied().enumerate();
|
||||
let (_, last_block) = blocks_iter.next_back().unwrap_or_default();
|
||||
let Some((next_to_last_variant_index, next_to_last_block)) = blocks_iter.next_back()
|
||||
else {
|
||||
return Ok(StmtIf {
|
||||
cond: true.to_expr(),
|
||||
source_location,
|
||||
blocks: [last_block.fold(state)?, Block::default()],
|
||||
});
|
||||
};
|
||||
let mut retval = StmtIf {
|
||||
cond: int_tag_expr
|
||||
.cmp_eq(Expr::ty(int_tag_expr).from_int_wrapping(next_to_last_variant_index)),
|
||||
source_location,
|
||||
blocks: [next_to_last_block.fold(state)?, last_block.fold(state)?],
|
||||
};
|
||||
for (variant_index, block) in blocks_iter.rev() {
|
||||
retval = StmtIf {
|
||||
cond: int_tag_expr
|
||||
.cmp_eq(Expr::ty(int_tag_expr).from_int_wrapping(variant_index)),
|
||||
source_location,
|
||||
blocks: [
|
||||
block.fold(state)?,
|
||||
Block {
|
||||
memories: Default::default(),
|
||||
stmts: [Stmt::from(retval)][..].intern(),
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
Ok(retval)
|
||||
}
|
||||
match stmt {
|
||||
Stmt::Match(StmtMatch {
|
||||
expr,
|
||||
source_location,
|
||||
blocks,
|
||||
}) => {
|
||||
let folded_expr = Expr::canonical(expr).fold(self)?;
|
||||
let folded_blocks = blocks.fold(self)?;
|
||||
self.handle_match(Expr::ty(expr), folded_expr, source_location, &folded_blocks)
|
||||
}) => match self.get_or_make_enum_type_state(Expr::ty(expr))? {
|
||||
EnumTypeState::TagEnumAndBody(_) => Ok(StmtMatch {
|
||||
expr: Expr::<TagAndBody<Enum, DynSize>>::from_canonical(
|
||||
Expr::canonical(expr).fold(self)?,
|
||||
)
|
||||
.tag,
|
||||
source_location,
|
||||
blocks: blocks.fold(self)?,
|
||||
}
|
||||
Stmt::Connect(StmtConnect {
|
||||
lhs,
|
||||
rhs,
|
||||
.into()),
|
||||
EnumTypeState::TagUIntAndBody(_) => {
|
||||
let int_tag_expr = Expr::<TagAndBody<UInt, DynSize>>::from_canonical(
|
||||
Expr::canonical(expr).fold(self)?,
|
||||
)
|
||||
.tag;
|
||||
Ok(match_int_tag(self, int_tag_expr, source_location, blocks)?.into())
|
||||
}
|
||||
EnumTypeState::UInt(_) => {
|
||||
let int_tag_expr =
|
||||
Expr::<UInt>::from_canonical(Expr::canonical(expr).fold(self)?)
|
||||
[..Expr::ty(expr).discriminant_bit_width()];
|
||||
Ok(match_int_tag(self, int_tag_expr, source_location, blocks)?.into())
|
||||
}
|
||||
EnumTypeState::Unchanged => Ok(StmtMatch {
|
||||
expr: expr.fold(self)?,
|
||||
source_location,
|
||||
}) => {
|
||||
let folded_lhs = lhs.fold(self)?;
|
||||
let folded_rhs = rhs.fold(self)?;
|
||||
let mut output_stmts = vec![];
|
||||
self.handle_stmt_connect(
|
||||
Expr::ty(lhs),
|
||||
Expr::ty(rhs),
|
||||
folded_lhs,
|
||||
folded_rhs,
|
||||
source_location,
|
||||
&mut output_stmts,
|
||||
)?;
|
||||
if output_stmts.len() == 1 {
|
||||
Ok(output_stmts.pop().unwrap())
|
||||
} else {
|
||||
Ok(StmtIf {
|
||||
cond: true.to_expr(),
|
||||
source_location,
|
||||
blocks: [
|
||||
Block {
|
||||
memories: Interned::default(),
|
||||
stmts: Intern::intern_owned(output_stmts),
|
||||
blocks: blocks.fold(self)?,
|
||||
}
|
||||
.into()),
|
||||
},
|
||||
Block::default(),
|
||||
],
|
||||
}
|
||||
.into())
|
||||
}
|
||||
}
|
||||
Stmt::Formal(_) | Stmt::If(_) | Stmt::Declaration(_) => stmt.default_fold(self),
|
||||
Stmt::Connect(_) | Stmt::If(_) | Stmt::Declaration(_) => stmt.default_fold(self),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -931,10 +574,13 @@ impl Folder for State {
|
|||
unreachable!()
|
||||
}
|
||||
|
||||
fn fold_enum_literal<T: EnumType + Fold<Self>>(
|
||||
fn fold_enum_literal<T: EnumType>(
|
||||
&mut self,
|
||||
_v: ops::EnumLiteral<T>,
|
||||
) -> Result<ops::EnumLiteral<T>, Self::Error> {
|
||||
) -> Result<ops::EnumLiteral<T>, Self::Error>
|
||||
where
|
||||
T: Fold<Self>,
|
||||
{
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
|
@ -946,12 +592,10 @@ impl Folder for State {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, clap::ValueEnum)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub enum SimplifyEnumsKind {
|
||||
SimplifyToEnumsWithNoBody,
|
||||
#[clap(name = "replace-with-bundle-of-uints")]
|
||||
ReplaceWithBundleOfUInts,
|
||||
#[clap(name = "replace-with-uint")]
|
||||
ReplaceWithUInt,
|
||||
}
|
||||
|
||||
|
@ -963,6 +607,6 @@ pub fn simplify_enums(
|
|||
enum_types: HashMap::new(),
|
||||
replacement_mem_ports: HashMap::new(),
|
||||
kind,
|
||||
module_state_stack: vec![],
|
||||
name_id_gen: NameIdGen::default(),
|
||||
})
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ use crate::{
|
|||
memory::{Mem, MemPort, PortType},
|
||||
module::{
|
||||
transform::visit::{Fold, Folder},
|
||||
Block, Id, Module, NameId, ScopedNameId, Stmt, StmtConnect, StmtWire,
|
||||
Block, Module, NameId, NameIdGen, ScopedNameId, Stmt, StmtConnect, StmtWire,
|
||||
},
|
||||
source_location::SourceLocation,
|
||||
ty::{CanonicalType, Type},
|
||||
|
@ -417,6 +417,7 @@ impl SplitMemState<'_, '_> {
|
|||
|
||||
struct ModuleState {
|
||||
output_module: Option<Interned<Module<Bundle>>>,
|
||||
name_id_gen: NameIdGen,
|
||||
memories: HashMap<ScopedNameId, MemState>,
|
||||
}
|
||||
|
||||
|
@ -568,7 +569,7 @@ impl ModuleState {
|
|||
port_wmask.map(Expr::from_canonical),
|
||||
connect_read_enum,
|
||||
connect_write_enum,
|
||||
connect_write,
|
||||
connect_write_enum,
|
||||
),
|
||||
CanonicalType::Array(array_type) => {
|
||||
input_array_types.push(array_type);
|
||||
|
@ -625,10 +626,10 @@ impl ModuleState {
|
|||
mem_name_path: &str,
|
||||
split_state: &SplitState<'_>,
|
||||
) -> Mem {
|
||||
let mem_name = NameId(
|
||||
Intern::intern_owned(format!("{}{mem_name_path}", input_mem.scoped_name().1 .0)),
|
||||
Id::new(),
|
||||
);
|
||||
let mem_name = self.name_id_gen.gen(Intern::intern_owned(format!(
|
||||
"{}{mem_name_path}",
|
||||
input_mem.scoped_name().1 .0
|
||||
)));
|
||||
let mem_name = ScopedNameId(input_mem.scoped_name().0, mem_name);
|
||||
let output_element_type = match single_type {
|
||||
SingleType::UInt(ty) => ty.canonical(),
|
||||
|
@ -752,10 +753,9 @@ impl ModuleState {
|
|||
let port_ty = port.ty();
|
||||
let NameId(mem_name, _) = input_mem.scoped_name().1;
|
||||
let port_name = port.port_name();
|
||||
let wire_name = NameId(
|
||||
Intern::intern_owned(format!("{mem_name}_{port_name}")),
|
||||
Id::new(),
|
||||
);
|
||||
let wire_name = self
|
||||
.name_id_gen
|
||||
.gen(Intern::intern_owned(format!("{mem_name}_{port_name}")));
|
||||
let wire = Wire::new_unchecked(
|
||||
ScopedNameId(input_mem.scoped_name().0, wire_name),
|
||||
port.source_location(),
|
||||
|
@ -766,7 +766,7 @@ impl ModuleState {
|
|||
output_stmts.push(
|
||||
StmtWire {
|
||||
annotations: Default::default(),
|
||||
wire: canonical_wire,
|
||||
wire: canonical_wire.clone(),
|
||||
}
|
||||
.into(),
|
||||
);
|
||||
|
@ -887,6 +887,7 @@ impl Folder for State {
|
|||
module,
|
||||
ModuleState {
|
||||
output_module: None,
|
||||
name_id_gen: NameIdGen::for_module(*module),
|
||||
memories: HashMap::new(),
|
||||
},
|
||||
);
|
||||
|
|
|
@ -2,10 +2,7 @@
|
|||
// See Notices.txt for copyright information
|
||||
#![allow(clippy::multiple_bound_locations)]
|
||||
use crate::{
|
||||
annotations::{
|
||||
Annotation, BlackBoxInlineAnnotation, BlackBoxPathAnnotation, CustomFirrtlAnnotation,
|
||||
DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation, TargetedAnnotation,
|
||||
},
|
||||
annotations::{Annotation, CustomFirrtlAnnotation, TargetedAnnotation},
|
||||
array::ArrayType,
|
||||
bundle::{Bundle, BundleField, BundleType},
|
||||
clock::Clock,
|
||||
|
@ -18,18 +15,17 @@ use crate::{
|
|||
},
|
||||
Expr, ExprEnum,
|
||||
},
|
||||
formal::FormalKind,
|
||||
int::{Bool, SIntType, SIntValue, Size, UIntType, UIntValue},
|
||||
intern::{Intern, Interned},
|
||||
memory::{Mem, MemPort, PortKind, PortName, PortType, ReadUnderWrite},
|
||||
module::{
|
||||
AnnotatedModuleIO, Block, BlockId, ExternModuleBody, ExternModuleParameter,
|
||||
ExternModuleParameterValue, Instance, Module, ModuleBody, ModuleIO, NameId,
|
||||
NormalModuleBody, ScopedNameId, Stmt, StmtConnect, StmtDeclaration, StmtFormal, StmtIf,
|
||||
StmtInstance, StmtMatch, StmtReg, StmtWire,
|
||||
NormalModuleBody, ScopedNameId, Stmt, StmtConnect, StmtDeclaration, StmtIf, StmtInstance,
|
||||
StmtMatch, StmtReg, StmtWire,
|
||||
},
|
||||
reg::Reg,
|
||||
reset::{AsyncReset, Reset, ResetType, SyncReset},
|
||||
reset::{AsyncReset, Reset, SyncReset},
|
||||
source_location::SourceLocation,
|
||||
ty::{CanonicalType, Type},
|
||||
wire::Wire,
|
||||
|
|
|
@ -1,30 +1,16 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
pub use crate::{
|
||||
annotations::{
|
||||
BlackBoxInlineAnnotation, BlackBoxPathAnnotation, CustomFirrtlAnnotation,
|
||||
DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation,
|
||||
},
|
||||
annotations::Annotation,
|
||||
array::{Array, ArrayType},
|
||||
bundle::Bundle,
|
||||
cli::Cli,
|
||||
clock::{Clock, ClockDomain, ToClock},
|
||||
enum_::{Enum, HdlNone, HdlOption, HdlSome},
|
||||
expr::{
|
||||
repeat, CastBitsTo, CastTo, CastToBits, Expr, HdlPartialEq, HdlPartialOrd, MakeUninitExpr,
|
||||
ReduceBits, ToExpr,
|
||||
},
|
||||
formal::{
|
||||
all_const, all_seq, any_const, any_seq, formal_global_clock, formal_reset, hdl_assert,
|
||||
hdl_assert_with_enable, hdl_assume, hdl_assume_with_enable, hdl_cover,
|
||||
hdl_cover_with_enable, MakeFormalExpr,
|
||||
},
|
||||
enum_::{HdlNone, HdlOption, HdlSome},
|
||||
expr::{CastBitsTo, CastTo, CastToBits, Expr, ReduceBits, ToExpr},
|
||||
hdl, hdl_module,
|
||||
int::{Bool, DynSize, KnownSize, SInt, SIntType, Size, UInt, UIntType},
|
||||
int::{Bool, DynSize, IntCmp, KnownSize, SInt, SIntType, Size, UInt, UIntType},
|
||||
memory::{Mem, MemBuilder, ReadUnderWrite},
|
||||
module::{
|
||||
annotate, connect, connect_any, incomplete_wire, instance, memory, memory_array,
|
||||
memory_with_init, reg_builder, wire, Instance, Module, ModuleBuilder,
|
||||
annotate, connect, connect_any, instance, memory, memory_array, memory_with_init,
|
||||
reg_builder, wire, Instance, Module, ModuleBuilder,
|
||||
},
|
||||
reg::Reg,
|
||||
reset::{AsyncReset, Reset, SyncReset, ToAsyncReset, ToReset, ToSyncReset},
|
||||
|
|
|
@ -5,22 +5,21 @@ use crate::{
|
|||
expr::{Expr, Flow},
|
||||
intern::Interned,
|
||||
module::{NameId, ScopedNameId},
|
||||
reset::{Reset, ResetType},
|
||||
source_location::SourceLocation,
|
||||
ty::{CanonicalType, Type},
|
||||
};
|
||||
use std::fmt;
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
|
||||
pub struct Reg<T: Type, R: ResetType = Reset> {
|
||||
pub struct Reg<T: Type> {
|
||||
name: ScopedNameId,
|
||||
source_location: SourceLocation,
|
||||
ty: T,
|
||||
clock_domain: Expr<ClockDomain<R>>,
|
||||
clock_domain: Expr<ClockDomain>,
|
||||
init: Option<Expr<T>>,
|
||||
}
|
||||
|
||||
impl<T: Type + fmt::Debug, R: ResetType> fmt::Debug for Reg<T, R> {
|
||||
impl<T: Type + fmt::Debug> fmt::Debug for Reg<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let Self {
|
||||
name,
|
||||
|
@ -38,8 +37,8 @@ impl<T: Type + fmt::Debug, R: ResetType> fmt::Debug for Reg<T, R> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: Type, R: ResetType> Reg<T, R> {
|
||||
pub fn canonical(&self) -> Reg<CanonicalType, R> {
|
||||
impl<T: Type> Reg<T> {
|
||||
pub fn canonical(&self) -> Reg<CanonicalType> {
|
||||
let Self {
|
||||
name,
|
||||
source_location,
|
||||
|
@ -60,7 +59,7 @@ impl<T: Type, R: ResetType> Reg<T, R> {
|
|||
scoped_name: ScopedNameId,
|
||||
source_location: SourceLocation,
|
||||
ty: T,
|
||||
clock_domain: Expr<ClockDomain<R>>,
|
||||
clock_domain: Expr<ClockDomain>,
|
||||
init: Option<Expr<T>>,
|
||||
) -> Self {
|
||||
assert!(
|
||||
|
@ -99,7 +98,7 @@ impl<T: Type, R: ResetType> Reg<T, R> {
|
|||
pub fn scoped_name(&self) -> ScopedNameId {
|
||||
self.name
|
||||
}
|
||||
pub fn clock_domain(&self) -> Expr<ClockDomain<R>> {
|
||||
pub fn clock_domain(&self) -> Expr<ClockDomain> {
|
||||
self.clock_domain
|
||||
}
|
||||
pub fn init(&self) -> Option<Expr<T>> {
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
clock::Clock,
|
||||
expr::{ops, Expr, ToExpr},
|
||||
int::{Bool, SInt, UInt},
|
||||
expr::{Expr, ToExpr},
|
||||
int::Bool,
|
||||
source_location::SourceLocation,
|
||||
ty::{impl_match_variant_as_self, CanonicalType, StaticType, Type, TypeProperties},
|
||||
};
|
||||
|
@ -12,33 +11,10 @@ mod sealed {
|
|||
pub trait ResetTypeSealed {}
|
||||
}
|
||||
|
||||
pub trait ResetType:
|
||||
StaticType<MaskType = Bool>
|
||||
+ sealed::ResetTypeSealed
|
||||
+ ops::ExprCastTo<Bool>
|
||||
+ ops::ExprCastTo<Reset>
|
||||
+ ops::ExprCastTo<SyncReset>
|
||||
+ ops::ExprCastTo<AsyncReset>
|
||||
+ ops::ExprCastTo<Clock>
|
||||
+ ops::ExprCastTo<UInt<1>>
|
||||
+ ops::ExprCastTo<SInt<1>>
|
||||
+ ops::ExprCastTo<UInt>
|
||||
+ ops::ExprCastTo<SInt>
|
||||
{
|
||||
fn dispatch<D: ResetTypeDispatch>(input: D::Input<Self>, dispatch: D) -> D::Output<Self>;
|
||||
}
|
||||
|
||||
pub trait ResetTypeDispatch: Sized {
|
||||
type Input<T: ResetType>;
|
||||
type Output<T: ResetType>;
|
||||
|
||||
fn reset(self, input: Self::Input<Reset>) -> Self::Output<Reset>;
|
||||
fn sync_reset(self, input: Self::Input<SyncReset>) -> Self::Output<SyncReset>;
|
||||
fn async_reset(self, input: Self::Input<AsyncReset>) -> Self::Output<AsyncReset>;
|
||||
}
|
||||
pub trait ResetType: StaticType<MaskType = Bool> + sealed::ResetTypeSealed {}
|
||||
|
||||
macro_rules! reset_type {
|
||||
($name:ident, $(#[$impl_trait:ident])? $Trait:ident::$trait_fn:ident, $is_castable_from_bits:literal, $dispatch_fn:ident) => {
|
||||
($name:ident, $Trait:ident::$trait_fn:ident, $is_castable_from_bits:literal) => {
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)]
|
||||
pub struct $name;
|
||||
|
||||
|
@ -91,14 +67,7 @@ macro_rules! reset_type {
|
|||
|
||||
impl sealed::ResetTypeSealed for $name {}
|
||||
|
||||
impl ResetType for $name {
|
||||
fn dispatch<D: ResetTypeDispatch>(
|
||||
input: D::Input<Self>,
|
||||
dispatch: D,
|
||||
) -> D::Output<Self> {
|
||||
dispatch.$dispatch_fn(input)
|
||||
}
|
||||
}
|
||||
impl ResetType for $name {}
|
||||
|
||||
pub trait $Trait {
|
||||
fn $trait_fn(&self) -> Expr<$name>;
|
||||
|
@ -122,21 +91,20 @@ macro_rules! reset_type {
|
|||
}
|
||||
}
|
||||
|
||||
$($impl_trait $Trait for Expr<$name> {
|
||||
impl $Trait for Expr<$name> {
|
||||
fn $trait_fn(&self) -> Expr<$name> {
|
||||
*self
|
||||
}
|
||||
})?
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
reset_type!(AsyncReset, #[impl] ToAsyncReset::to_async_reset, true, async_reset);
|
||||
reset_type!(SyncReset, #[impl] ToSyncReset::to_sync_reset, true, sync_reset);
|
||||
reset_type!(AsyncReset, ToAsyncReset::to_async_reset, true);
|
||||
reset_type!(SyncReset, ToSyncReset::to_sync_reset, true);
|
||||
reset_type!(
|
||||
Reset,
|
||||
ToReset::to_reset,
|
||||
false, // Reset is not castable from bits because we don't know if it's async or sync
|
||||
reset
|
||||
false // Reset is not castable from bits because we don't know if it's async or sync
|
||||
);
|
||||
|
||||
impl ToSyncReset for bool {
|
||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -1,397 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use std::{
|
||||
fmt,
|
||||
ops::{Add, AddAssign, Sub, SubAssign},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
|
||||
pub struct SimInstant {
|
||||
time_since_start: SimDuration,
|
||||
}
|
||||
|
||||
impl SimInstant {
|
||||
pub const fn checked_add(self, duration: SimDuration) -> Option<Self> {
|
||||
let Some(time_since_start) = self.time_since_start.checked_add(duration) else {
|
||||
return None;
|
||||
};
|
||||
Some(SimInstant { time_since_start })
|
||||
}
|
||||
pub const fn checked_duration_since(self, earlier: Self) -> Option<SimDuration> {
|
||||
self.time_since_start.checked_sub(earlier.time_since_start)
|
||||
}
|
||||
pub const fn checked_sub(self, duration: SimDuration) -> Option<Self> {
|
||||
let Some(time_since_start) = self.time_since_start.checked_sub(duration) else {
|
||||
return None;
|
||||
};
|
||||
Some(SimInstant { time_since_start })
|
||||
}
|
||||
#[track_caller]
|
||||
pub const fn duration_since(self, earlier: Self) -> SimDuration {
|
||||
let Some(retval) = self.checked_duration_since(earlier) else {
|
||||
panic!(
|
||||
"tried to compute the duration since a later time -- durations can't be negative"
|
||||
);
|
||||
};
|
||||
retval
|
||||
}
|
||||
pub const fn saturating_duration_since(self, earlier: Self) -> SimDuration {
|
||||
let Some(retval) = self.checked_duration_since(earlier) else {
|
||||
return SimDuration::ZERO;
|
||||
};
|
||||
retval
|
||||
}
|
||||
}
|
||||
|
||||
impl Add<SimDuration> for SimInstant {
|
||||
type Output = SimInstant;
|
||||
|
||||
#[track_caller]
|
||||
fn add(mut self, rhs: SimDuration) -> Self::Output {
|
||||
self += rhs;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl AddAssign<SimDuration> for SimInstant {
|
||||
#[track_caller]
|
||||
fn add_assign(&mut self, rhs: SimDuration) {
|
||||
self.time_since_start += rhs;
|
||||
}
|
||||
}
|
||||
|
||||
impl Add<SimInstant> for SimDuration {
|
||||
type Output = SimInstant;
|
||||
|
||||
#[track_caller]
|
||||
fn add(self, rhs: SimInstant) -> Self::Output {
|
||||
rhs.add(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub for SimInstant {
|
||||
type Output = SimDuration;
|
||||
|
||||
#[track_caller]
|
||||
fn sub(self, rhs: SimInstant) -> Self::Output {
|
||||
self.duration_since(rhs)
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub<SimDuration> for SimInstant {
|
||||
type Output = SimInstant;
|
||||
|
||||
#[track_caller]
|
||||
fn sub(self, rhs: SimDuration) -> Self::Output {
|
||||
let Some(retval) = self.checked_sub(rhs) else {
|
||||
panic!("SimInstant underflow");
|
||||
};
|
||||
retval
|
||||
}
|
||||
}
|
||||
|
||||
impl SubAssign<SimDuration> for SimInstant {
|
||||
#[track_caller]
|
||||
fn sub_assign(&mut self, rhs: SimDuration) {
|
||||
*self = *self - rhs;
|
||||
}
|
||||
}
|
||||
|
||||
impl SimInstant {
|
||||
pub const START: SimInstant = SimInstant {
|
||||
time_since_start: SimDuration::ZERO,
|
||||
};
|
||||
}
|
||||
|
||||
impl fmt::Debug for SimInstant {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.time_since_start.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
|
||||
pub struct SimDuration {
|
||||
attos: u128,
|
||||
}
|
||||
|
||||
impl AddAssign for SimDuration {
|
||||
#[track_caller]
|
||||
fn add_assign(&mut self, rhs: SimDuration) {
|
||||
*self = *self + rhs;
|
||||
}
|
||||
}
|
||||
|
||||
impl Add for SimDuration {
|
||||
type Output = SimDuration;
|
||||
|
||||
#[track_caller]
|
||||
fn add(self, rhs: SimDuration) -> Self::Output {
|
||||
SimDuration {
|
||||
attos: self
|
||||
.attos
|
||||
.checked_add(rhs.attos)
|
||||
.expect("overflow adding durations"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub for SimDuration {
|
||||
type Output = Self;
|
||||
|
||||
#[track_caller]
|
||||
fn sub(self, rhs: Self) -> Self::Output {
|
||||
SimDuration {
|
||||
attos: self
|
||||
.attos
|
||||
.checked_add(rhs.attos)
|
||||
.expect("underflow subtracting durations -- durations can't be negative"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SubAssign for SimDuration {
|
||||
#[track_caller]
|
||||
fn sub_assign(&mut self, rhs: Self) {
|
||||
*self = *self - rhs;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)]
|
||||
pub struct SimDurationParts {
|
||||
pub attos: u16,
|
||||
pub femtos: u16,
|
||||
pub picos: u16,
|
||||
pub nanos: u16,
|
||||
pub micros: u16,
|
||||
pub millis: u16,
|
||||
pub secs: u128,
|
||||
}
|
||||
|
||||
macro_rules! impl_duration_units {
|
||||
(
|
||||
$(
|
||||
#[unit_const = $UNIT:ident, from_units = $from_units:ident, as_units = $as_units:ident, units = $units:ident, suffix = $suffix:literal]
|
||||
const $log10_units_per_sec:ident: u32 = $log10_units_per_sec_value:expr;
|
||||
)*
|
||||
) => {
|
||||
impl SimDuration {
|
||||
$(
|
||||
const $log10_units_per_sec: u32 = $log10_units_per_sec_value;
|
||||
pub const fn $from_units($units: u128) -> Self {
|
||||
Self::from_units_helper::<{ Self::$log10_units_per_sec }>($units)
|
||||
}
|
||||
pub const fn $as_units(self) -> u128 {
|
||||
self.attos / const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) }
|
||||
}
|
||||
)*
|
||||
pub const fn to_parts(mut self) -> SimDurationParts {
|
||||
$(
|
||||
let $units = self.attos / const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) };
|
||||
self.attos %= const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) };
|
||||
)*
|
||||
SimDurationParts {
|
||||
$($units: $units as _,)*
|
||||
}
|
||||
}
|
||||
pub const fn from_parts_checked(parts: SimDurationParts) -> Option<Self> {
|
||||
let attos = 0u128;
|
||||
$(
|
||||
let Some(product) = const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) }.checked_mul(parts.$units as u128) else {
|
||||
return None;
|
||||
};
|
||||
let Some(attos) = attos.checked_add(product) else {
|
||||
return None;
|
||||
};
|
||||
)*
|
||||
Some(Self {
|
||||
attos,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for SimDuration {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let ilog10_attos = match self.attos.checked_ilog10() {
|
||||
Some(v) => v,
|
||||
None => Self::LOG10_ATTOS_PER_SEC,
|
||||
};
|
||||
let (suffix, int, fraction, fraction_digits) =
|
||||
match Self::LOG10_ATTOS_PER_SEC.saturating_sub(ilog10_attos) {
|
||||
$(
|
||||
..=Self::$log10_units_per_sec => {
|
||||
let divisor = const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) };
|
||||
(
|
||||
$suffix,
|
||||
self.attos / divisor,
|
||||
self.attos % divisor,
|
||||
(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) as usize,
|
||||
)
|
||||
},
|
||||
)*
|
||||
_ => unreachable!(),
|
||||
};
|
||||
write!(f, "{int}")?;
|
||||
if fraction != 0 {
|
||||
write!(f, ".{fraction:0fraction_digits$}")?;
|
||||
}
|
||||
write!(f, " {suffix}")
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[test]
|
||||
fn test_duration_debug() {
|
||||
$(
|
||||
assert_eq!(
|
||||
format!("{:?}", SimDuration::$from_units(123)),
|
||||
concat!("123 ", $suffix)
|
||||
);
|
||||
assert_eq!(
|
||||
format!("{:?}", SimDuration::$from_units(1)),
|
||||
concat!("1 ", $suffix),
|
||||
);
|
||||
let mut v = SimDuration::$from_units(1);
|
||||
if v.attos < 1 << 53 {
|
||||
v.attos += 1;
|
||||
assert_eq!(
|
||||
format!("{v:?}"),
|
||||
format!("{} {}", v.attos as f64 / 10.0f64.powf((SimDuration::LOG10_ATTOS_PER_SEC - SimDuration::$log10_units_per_sec) as f64), $suffix),
|
||||
"1 {} + 1 as == {} as", $suffix, v.attos,
|
||||
);
|
||||
}
|
||||
)*
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_duration_units! {
|
||||
#[unit_const = SECOND, from_units = from_secs, as_units = as_secs, units = secs, suffix = "s"]
|
||||
const LOG10_SECS_PER_SEC: u32 = 0;
|
||||
#[unit_const = MILLISECOND, from_units = from_millis, as_units = as_millis, units = millis, suffix = "ms"]
|
||||
const LOG10_MILLIS_PER_SEC: u32 = 3;
|
||||
#[unit_const = MICROSECOND, from_units = from_micros, as_units = as_micros, units = micros, suffix = "μs"]
|
||||
const LOG10_MICROS_PER_SEC: u32 = 6;
|
||||
#[unit_const = NANOSECOND, from_units = from_nanos, as_units = as_nanos, units = nanos, suffix = "ns"]
|
||||
const LOG10_NANOS_PER_SEC: u32 = 9;
|
||||
#[unit_const = PICOSECOND, from_units = from_picos, as_units = as_picos, units = picos, suffix = "ps"]
|
||||
const LOG10_PICOS_PER_SEC: u32 = 12;
|
||||
#[unit_const = FEMTOSECOND, from_units = from_femtos, as_units = as_femtos, units = femtos, suffix = "fs"]
|
||||
const LOG10_FEMTOS_PER_SEC: u32 = 15;
|
||||
#[unit_const = ATTOSECOND, from_units = from_attos, as_units = as_attos, units = attos, suffix = "as"]
|
||||
const LOG10_ATTOS_PER_SEC: u32 = 18;
|
||||
}
|
||||
|
||||
impl SimDuration {
|
||||
const fn from_units_helper<const UNITS_PER_SEC: u32>(units: u128) -> Self {
|
||||
let Some(attos) =
|
||||
units.checked_mul(const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - UNITS_PER_SEC) })
|
||||
else {
|
||||
panic!("duration too big");
|
||||
};
|
||||
Self { attos }
|
||||
}
|
||||
pub const ZERO: SimDuration = SimDuration::from_secs(0);
|
||||
pub const fn from_parts(parts: SimDurationParts) -> Self {
|
||||
match Self::from_parts_checked(parts) {
|
||||
Some(v) => v,
|
||||
None => panic!("duration too big"),
|
||||
}
|
||||
}
|
||||
pub const fn abs_diff(self, other: Self) -> Self {
|
||||
Self {
|
||||
attos: self.attos.abs_diff(other.attos),
|
||||
}
|
||||
}
|
||||
pub const fn checked_add(self, rhs: Self) -> Option<Self> {
|
||||
let Some(attos) = self.attos.checked_add(rhs.attos) else {
|
||||
return None;
|
||||
};
|
||||
Some(Self { attos })
|
||||
}
|
||||
pub const fn checked_sub(self, rhs: Self) -> Option<Self> {
|
||||
let Some(attos) = self.attos.checked_sub(rhs.attos) else {
|
||||
return None;
|
||||
};
|
||||
Some(Self { attos })
|
||||
}
|
||||
pub const fn is_zero(self) -> bool {
|
||||
self.attos == 0
|
||||
}
|
||||
pub const fn saturating_add(self, rhs: Self) -> Self {
|
||||
Self {
|
||||
attos: self.attos.saturating_add(rhs.attos),
|
||||
}
|
||||
}
|
||||
pub const fn saturating_sub(self, rhs: Self) -> Self {
|
||||
Self {
|
||||
attos: self.attos.saturating_sub(rhs.attos),
|
||||
}
|
||||
}
|
||||
pub const fn checked_ilog10(self) -> Option<i32> {
|
||||
let Some(ilog10_attos) = self.attos.checked_ilog10() else {
|
||||
return None;
|
||||
};
|
||||
Some(ilog10_attos as i32 - Self::LOG10_ATTOS_PER_SEC as i32)
|
||||
}
|
||||
#[track_caller]
|
||||
pub const fn ilog10(self) -> i32 {
|
||||
let Some(retval) = self.checked_ilog10() else {
|
||||
panic!("tried to take the ilog10 of 0");
|
||||
};
|
||||
retval
|
||||
}
|
||||
pub const fn checked_pow10(log10: i32, underflow_is_zero: bool) -> Option<Self> {
|
||||
let Some(log10) = Self::LOG10_ATTOS_PER_SEC.checked_add_signed(log10) else {
|
||||
return if log10 < 0 && underflow_is_zero {
|
||||
Some(Self::ZERO)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
};
|
||||
let Some(attos) = 10u128.checked_pow(log10) else {
|
||||
return None;
|
||||
};
|
||||
Some(Self { attos })
|
||||
}
|
||||
#[track_caller]
|
||||
pub const fn pow10(log10: i32) -> Self {
|
||||
let Some(retval) = Self::checked_pow10(log10, true) else {
|
||||
panic!("pow10 overflowed");
|
||||
};
|
||||
retval
|
||||
}
|
||||
pub const fn is_power_of_ten(self) -> bool {
|
||||
const TEN: u128 = 10;
|
||||
const NUMBER_OF_POWERS_OF_TEN: usize = {
|
||||
let mut n = 0;
|
||||
while let Some(_) = TEN.checked_pow(n as u32) {
|
||||
n += 1;
|
||||
}
|
||||
n
|
||||
};
|
||||
const POWERS_OF_TEN: [u128; NUMBER_OF_POWERS_OF_TEN] = {
|
||||
let mut retval = [0; NUMBER_OF_POWERS_OF_TEN];
|
||||
let mut i = 0;
|
||||
while i < NUMBER_OF_POWERS_OF_TEN {
|
||||
retval[i] = TEN.pow(i as u32);
|
||||
i += 1;
|
||||
}
|
||||
retval
|
||||
};
|
||||
let mut i = 0;
|
||||
while i < NUMBER_OF_POWERS_OF_TEN {
|
||||
if self.attos == POWERS_OF_TEN[i] {
|
||||
return true;
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Duration> for SimDuration {
|
||||
fn from(duration: Duration) -> Self {
|
||||
Self::from_nanos(duration.as_nanos())
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -1,122 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
cli::{FormalArgs, FormalMode, FormalOutput, RunPhase},
|
||||
firrtl::ExportOptions,
|
||||
};
|
||||
use clap::Parser;
|
||||
use hashbrown::HashMap;
|
||||
use serde::Deserialize;
|
||||
use std::{
|
||||
fmt::Write,
|
||||
path::{Path, PathBuf},
|
||||
process::Command,
|
||||
sync::{Mutex, OnceLock},
|
||||
};
|
||||
|
||||
fn assert_formal_helper() -> FormalArgs {
|
||||
static FORMAL_ARGS: OnceLock<FormalArgs> = OnceLock::new();
|
||||
// ensure we only run parsing once, so errors from env vars don't produce overlapping output if we're called on multiple threads
|
||||
FORMAL_ARGS
|
||||
.get_or_init(|| FormalArgs::parse_from(["fayalite::testing::assert_formal"]))
|
||||
.clone()
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct CargoMetadata {
|
||||
target_directory: String,
|
||||
}
|
||||
|
||||
fn get_cargo_target_dir() -> &'static Path {
|
||||
static RETVAL: OnceLock<PathBuf> = OnceLock::new();
|
||||
RETVAL.get_or_init(|| {
|
||||
let output = Command::new(
|
||||
std::env::var_os("CARGO")
|
||||
.as_deref()
|
||||
.unwrap_or("cargo".as_ref()),
|
||||
)
|
||||
.arg("metadata")
|
||||
.output()
|
||||
.expect("can't run `cargo metadata`");
|
||||
if !output.status.success() {
|
||||
panic!(
|
||||
"can't run `cargo metadata`:\n{}\nexited with status: {}",
|
||||
String::from_utf8_lossy(&output.stderr),
|
||||
output.status
|
||||
);
|
||||
}
|
||||
let CargoMetadata { target_directory } =
|
||||
serde_json::from_slice(&output.stdout).expect("can't parse output of `cargo metadata`");
|
||||
PathBuf::from(target_directory)
|
||||
})
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn get_assert_formal_target_path(test_name: &dyn std::fmt::Display) -> PathBuf {
|
||||
static DIRS: Mutex<Option<HashMap<String, u64>>> = Mutex::new(None);
|
||||
let test_name = test_name.to_string();
|
||||
// don't use line/column numbers since that constantly changes as you edit tests
|
||||
let file = std::panic::Location::caller().file();
|
||||
// simple reproducible hash
|
||||
let simple_hash = file.bytes().chain(test_name.bytes()).fold(
|
||||
((file.len() as u32) << 16).wrapping_add(test_name.len() as u32),
|
||||
|mut h, b| {
|
||||
h = h.wrapping_mul(0xaa0d184b);
|
||||
h ^= h.rotate_right(5);
|
||||
h ^= h.rotate_right(13);
|
||||
h.wrapping_add(b as u32)
|
||||
},
|
||||
);
|
||||
let mut dir = String::with_capacity(64);
|
||||
for ch in Path::new(file)
|
||||
.file_stem()
|
||||
.unwrap_or_default()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.chars()
|
||||
.chain(['-'])
|
||||
.chain(test_name.chars())
|
||||
{
|
||||
dir.push(match ch {
|
||||
ch if ch.is_alphanumeric() => ch,
|
||||
'_' | '-' | '+' | '.' | ',' | ' ' => ch,
|
||||
_ => '_',
|
||||
});
|
||||
}
|
||||
write!(dir, "-{simple_hash:08x}").unwrap();
|
||||
let index = *DIRS
|
||||
.lock()
|
||||
.unwrap()
|
||||
.get_or_insert_with(HashMap::new)
|
||||
.entry_ref(&dir)
|
||||
.and_modify(|v| *v += 1)
|
||||
.or_insert(0);
|
||||
write!(dir, "-{index}").unwrap();
|
||||
get_cargo_target_dir()
|
||||
.join("fayalite_assert_formal")
|
||||
.join(dir)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn assert_formal<M>(
|
||||
test_name: impl std::fmt::Display,
|
||||
module: M,
|
||||
mode: FormalMode,
|
||||
depth: u64,
|
||||
solver: Option<&str>,
|
||||
export_options: ExportOptions,
|
||||
) where
|
||||
FormalArgs: RunPhase<M, Output = FormalOutput>,
|
||||
{
|
||||
let mut args = assert_formal_helper();
|
||||
args.verilog.firrtl.base.redirect_output_for_rust_test = true;
|
||||
args.verilog.firrtl.base.output = Some(get_assert_formal_target_path(&test_name));
|
||||
args.verilog.firrtl.export_options = export_options;
|
||||
args.verilog.debug = true;
|
||||
args.mode = mode;
|
||||
args.depth = depth;
|
||||
if let Some(solver) = solver {
|
||||
args.solver = solver.into();
|
||||
}
|
||||
args.run(module).expect("testing::assert_formal() failed");
|
||||
}
|
|
@ -210,9 +210,7 @@ impl sealed::BaseTypeSealed for CanonicalType {}
|
|||
|
||||
impl BaseType for CanonicalType {}
|
||||
|
||||
pub trait TypeOrDefault<D: Type>:
|
||||
sealed::TypeOrDefaultSealed + Copy + Eq + Hash + fmt::Debug
|
||||
{
|
||||
pub trait TypeOrDefault<D: Type>: sealed::TypeOrDefaultSealed {
|
||||
type Type: Type;
|
||||
fn get<F: FnOnce() -> D>(self, default: F) -> Self::Type;
|
||||
}
|
||||
|
@ -330,6 +328,6 @@ impl<T: Type> Index<T> for AsMaskWithoutGenerics {
|
|||
type Output = T::MaskType;
|
||||
|
||||
fn index(&self, ty: T) -> &Self::Output {
|
||||
Interned::into_inner(Intern::intern_sized(ty.mask_type()))
|
||||
Interned::<_>::into_inner(Intern::intern_sized(ty.mask_type()))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,7 +6,6 @@ mod const_cmp;
|
|||
mod const_usize;
|
||||
mod misc;
|
||||
mod scoped_ref;
|
||||
pub(crate) mod streaming_read_utf8;
|
||||
|
||||
#[doc(inline)]
|
||||
pub use const_bool::{ConstBool, ConstBoolDispatch, ConstBoolDispatchTag, GenericConstBool};
|
||||
|
@ -24,9 +23,5 @@ pub use scoped_ref::ScopedRef;
|
|||
|
||||
#[doc(inline)]
|
||||
pub use misc::{
|
||||
get_many_mut, interned_bit, iter_eq_by, BitSliceWriteWithBase, DebugAsDisplay,
|
||||
DebugAsRawString, MakeMutSlice, RcWriter,
|
||||
interned_bit, iter_eq_by, BitSliceWriteWithBase, DebugAsDisplay, DebugAsRawString, MakeMutSlice,
|
||||
};
|
||||
|
||||
pub mod job_server;
|
||||
pub mod ready_valid;
|
||||
|
|
|
@ -1,193 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use ctor::ctor;
|
||||
use jobslot::{Acquired, Client};
|
||||
use std::{
|
||||
ffi::OsString,
|
||||
mem,
|
||||
num::NonZeroUsize,
|
||||
sync::{Condvar, Mutex, Once, OnceLock},
|
||||
thread::spawn,
|
||||
};
|
||||
|
||||
fn get_or_make_client() -> &'static Client {
|
||||
#[ctor]
|
||||
static CLIENT: OnceLock<Client> = unsafe {
|
||||
match Client::from_env() {
|
||||
Some(client) => OnceLock::from(client),
|
||||
None => OnceLock::new(),
|
||||
}
|
||||
};
|
||||
|
||||
CLIENT.get_or_init(|| {
|
||||
let mut available_parallelism = None;
|
||||
let mut args = std::env::args_os().skip(1);
|
||||
while let Some(arg) = args.next() {
|
||||
const TEST_THREADS_OPTION: &'static [u8] = b"--test-threads";
|
||||
if arg.as_encoded_bytes().starts_with(TEST_THREADS_OPTION) {
|
||||
match arg.as_encoded_bytes().get(TEST_THREADS_OPTION.len()) {
|
||||
Some(b'=') => {
|
||||
let mut arg = arg.into_encoded_bytes();
|
||||
arg.drain(..=TEST_THREADS_OPTION.len());
|
||||
available_parallelism = Some(arg);
|
||||
break;
|
||||
}
|
||||
None => {
|
||||
available_parallelism = args.next().map(OsString::into_encoded_bytes);
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
let available_parallelism = if let Some(available_parallelism) = available_parallelism
|
||||
.as_deref()
|
||||
.and_then(|v| std::str::from_utf8(v).ok())
|
||||
.and_then(|v| v.parse().ok())
|
||||
{
|
||||
available_parallelism
|
||||
} else if let Ok(available_parallelism) = std::thread::available_parallelism() {
|
||||
available_parallelism
|
||||
} else {
|
||||
NonZeroUsize::new(1).unwrap()
|
||||
};
|
||||
Client::new_with_fifo(available_parallelism.get() - 1).expect("failed to create job server")
|
||||
})
|
||||
}
|
||||
|
||||
struct State {
|
||||
waiting_count: usize,
|
||||
available: Vec<Acquired>,
|
||||
implicit_available: bool,
|
||||
}
|
||||
|
||||
impl State {
|
||||
fn total_available(&self) -> usize {
|
||||
self.available.len() + self.implicit_available as usize
|
||||
}
|
||||
fn additional_waiting(&self) -> usize {
|
||||
self.waiting_count.saturating_sub(self.total_available())
|
||||
}
|
||||
}
|
||||
|
||||
static STATE: Mutex<State> = Mutex::new(State {
|
||||
waiting_count: 0,
|
||||
available: Vec::new(),
|
||||
implicit_available: true,
|
||||
});
|
||||
static COND_VAR: Condvar = Condvar::new();
|
||||
|
||||
#[derive(Debug)]
|
||||
enum AcquiredJobInner {
|
||||
FromJobServer(Acquired),
|
||||
ImplicitJob,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct AcquiredJob {
|
||||
job: AcquiredJobInner,
|
||||
}
|
||||
|
||||
impl AcquiredJob {
|
||||
fn start_acquire_thread() {
|
||||
static STARTED_THREAD: Once = Once::new();
|
||||
STARTED_THREAD.call_once(|| {
|
||||
spawn(|| {
|
||||
let mut acquired = None;
|
||||
let client = get_or_make_client();
|
||||
let mut state = STATE.lock().unwrap();
|
||||
loop {
|
||||
state = if state.additional_waiting() == 0 {
|
||||
if acquired.is_some() {
|
||||
drop(state);
|
||||
drop(acquired.take()); // drop Acquired outside of lock
|
||||
STATE.lock().unwrap()
|
||||
} else {
|
||||
COND_VAR.wait(state).unwrap()
|
||||
}
|
||||
} else if acquired.is_some() {
|
||||
// allocate space before moving Acquired to ensure we
|
||||
// drop Acquired outside of the lock on panic
|
||||
state.available.reserve(1);
|
||||
state.available.push(acquired.take().unwrap());
|
||||
COND_VAR.notify_all();
|
||||
state
|
||||
} else {
|
||||
drop(state);
|
||||
acquired = Some(
|
||||
client
|
||||
.acquire()
|
||||
.expect("can't acquire token from job server"),
|
||||
);
|
||||
STATE.lock().unwrap()
|
||||
};
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
fn acquire_inner(block: bool) -> Option<Self> {
|
||||
Self::start_acquire_thread();
|
||||
let mut state = STATE.lock().unwrap();
|
||||
loop {
|
||||
if let Some(acquired) = state.available.pop() {
|
||||
return Some(Self {
|
||||
job: AcquiredJobInner::FromJobServer(acquired),
|
||||
});
|
||||
}
|
||||
if state.implicit_available {
|
||||
state.implicit_available = false;
|
||||
return Some(Self {
|
||||
job: AcquiredJobInner::ImplicitJob,
|
||||
});
|
||||
}
|
||||
if !block {
|
||||
return None;
|
||||
}
|
||||
state.waiting_count += 1;
|
||||
state = COND_VAR.wait(state).unwrap();
|
||||
state.waiting_count -= 1;
|
||||
}
|
||||
}
|
||||
pub fn try_acquire() -> Option<Self> {
|
||||
Self::acquire_inner(false)
|
||||
}
|
||||
pub fn acquire() -> Self {
|
||||
Self::acquire_inner(true).expect("failed to acquire token")
|
||||
}
|
||||
pub fn run_command<R>(
|
||||
&mut self,
|
||||
cmd: std::process::Command,
|
||||
f: impl FnOnce(&mut std::process::Command) -> std::io::Result<R>,
|
||||
) -> std::io::Result<R> {
|
||||
get_or_make_client().configure_make_and_run_with_fifo(cmd, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for AcquiredJob {
|
||||
fn drop(&mut self) {
|
||||
let mut state = STATE.lock().unwrap();
|
||||
match &self.job {
|
||||
AcquiredJobInner::FromJobServer(_) => {
|
||||
if state.waiting_count > state.available.len() + state.implicit_available as usize {
|
||||
// allocate space before moving Acquired to ensure we
|
||||
// drop Acquired outside of the lock on panic
|
||||
state.available.reserve(1);
|
||||
let AcquiredJobInner::FromJobServer(acquired) =
|
||||
mem::replace(&mut self.job, AcquiredJobInner::ImplicitJob)
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
state.available.push(acquired);
|
||||
COND_VAR.notify_all();
|
||||
}
|
||||
}
|
||||
AcquiredJobInner::ImplicitJob => {
|
||||
state.implicit_available = true;
|
||||
if state.waiting_count > state.available.len() {
|
||||
COND_VAR.notify_all();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -3,7 +3,6 @@
|
|||
use crate::intern::{Intern, Interned};
|
||||
use bitvec::{bits, order::Lsb0, slice::BitSlice, view::BitView};
|
||||
use std::{
|
||||
cell::Cell,
|
||||
fmt::{self, Debug, Write},
|
||||
rc::Rc,
|
||||
sync::{Arc, OnceLock},
|
||||
|
@ -95,15 +94,9 @@ pub fn interned_bit(v: bool) -> Interned<BitSlice> {
|
|||
RETVAL.get_or_init(|| [bits![0; 1].intern(), bits![1; 1].intern()])[v as usize]
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct BitSliceWriteWithBase<'a>(pub &'a BitSlice);
|
||||
|
||||
impl<'a> Debug for BitSliceWriteWithBase<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{self:#x}")
|
||||
}
|
||||
}
|
||||
|
||||
impl BitSliceWriteWithBase<'_> {
|
||||
fn fmt_with_base<const BITS_PER_DIGIT: usize, const UPPER_CASE: bool>(
|
||||
self,
|
||||
|
@ -162,66 +155,3 @@ impl fmt::UpperHex for BitSliceWriteWithBase<'_> {
|
|||
self.fmt_with_base::<4, true>(f)
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[track_caller]
|
||||
pub fn get_many_mut<T, const N: usize>(slice: &mut [T], indexes: [usize; N]) -> [&mut T; N] {
|
||||
for i in 0..N {
|
||||
for j in 0..i {
|
||||
assert!(indexes[i] != indexes[j], "duplicate index");
|
||||
}
|
||||
assert!(indexes[i] < slice.len(), "index out of bounds");
|
||||
}
|
||||
// Safety: checked that no indexes are duplicates and no indexes are out of bounds
|
||||
unsafe {
|
||||
let base = slice.as_mut_ptr(); // convert to a raw pointer before loop to avoid aliasing with &mut [T]
|
||||
std::array::from_fn(|i| &mut *base.add(indexes[i]))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct RcWriter(Rc<Cell<Vec<u8>>>);
|
||||
|
||||
impl Debug for RcWriter {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.borrow_impl(|buf| {
|
||||
f.debug_tuple("RcWriter")
|
||||
.field(&DebugAsDisplay(format_args!("b\"{}\"", buf.escape_ascii())))
|
||||
.finish()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl RcWriter {
|
||||
fn borrow_impl<R>(&self, f: impl FnOnce(&mut Vec<u8>) -> R) -> R {
|
||||
let buf = Cell::take(&self.0);
|
||||
struct PutBackOnDrop<'a> {
|
||||
buf: Vec<u8>,
|
||||
this: &'a RcWriter,
|
||||
}
|
||||
impl Drop for PutBackOnDrop<'_> {
|
||||
fn drop(&mut self) {
|
||||
self.this.0.set(std::mem::take(&mut self.buf));
|
||||
}
|
||||
}
|
||||
let mut buf = PutBackOnDrop { buf, this: self };
|
||||
f(&mut buf.buf)
|
||||
}
|
||||
pub fn borrow<R>(&mut self, f: impl FnOnce(&mut Vec<u8>) -> R) -> R {
|
||||
self.borrow_impl(f)
|
||||
}
|
||||
pub fn take(&mut self) -> Vec<u8> {
|
||||
Cell::take(&self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::io::Write for RcWriter {
|
||||
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
|
||||
self.borrow(|v| v.extend_from_slice(buf));
|
||||
Ok(buf.len())
|
||||
}
|
||||
|
||||
fn flush(&mut self) -> std::io::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,566 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{memory::splat_mask, prelude::*};
|
||||
use std::num::NonZeroUsize;
|
||||
|
||||
#[hdl]
|
||||
pub struct ReadyValid<T> {
|
||||
pub data: HdlOption<T>,
|
||||
#[hdl(flip)]
|
||||
pub ready: Bool,
|
||||
}
|
||||
|
||||
impl<T: Type> ReadyValid<T> {
|
||||
#[hdl]
|
||||
pub fn firing(expr: Expr<Self>) -> Expr<Bool> {
|
||||
#[hdl]
|
||||
let firing: Bool = wire();
|
||||
#[hdl]
|
||||
match expr.data {
|
||||
HdlNone => connect(firing, false),
|
||||
HdlSome(_) => connect(firing, expr.ready),
|
||||
}
|
||||
firing
|
||||
}
|
||||
#[hdl]
|
||||
pub fn firing_data(expr: impl ToExpr<Type = Self>) -> Expr<HdlOption<T>> {
|
||||
let expr = expr.to_expr();
|
||||
let option_ty = Expr::ty(expr).data;
|
||||
#[hdl]
|
||||
let firing_data = wire(option_ty);
|
||||
connect(firing_data, option_ty.HdlNone());
|
||||
#[hdl]
|
||||
if expr.ready {
|
||||
connect(firing_data, expr.data);
|
||||
}
|
||||
firing_data
|
||||
}
|
||||
#[hdl]
|
||||
pub fn map<R: Type>(
|
||||
expr: Expr<Self>,
|
||||
f: impl FnOnce(Expr<T>) -> Expr<R>,
|
||||
) -> Expr<ReadyValid<R>> {
|
||||
let data = HdlOption::map(expr.data, f);
|
||||
#[hdl]
|
||||
let mapped = wire(ReadyValid[Expr::ty(data).HdlSome]);
|
||||
connect(mapped.data, data);
|
||||
connect(expr.ready, mapped.ready);
|
||||
mapped
|
||||
}
|
||||
}
|
||||
|
||||
/// This debug port is only meant to assist the formal proof of the queue.
|
||||
#[cfg(test)]
|
||||
#[doc(hidden)]
|
||||
#[hdl]
|
||||
pub struct QueueDebugPort<Element, Index> {
|
||||
#[hdl(flip)]
|
||||
index_to_check: Index,
|
||||
stored: Element,
|
||||
inp_index: Index,
|
||||
out_index: Index,
|
||||
}
|
||||
|
||||
#[hdl_module]
|
||||
pub fn queue<T: Type>(
|
||||
ty: T,
|
||||
capacity: NonZeroUsize,
|
||||
inp_ready_is_comb: bool,
|
||||
out_valid_is_comb: bool,
|
||||
) {
|
||||
let count_ty = UInt::range_inclusive(0..=capacity.get());
|
||||
let index_ty = UInt::range(0..capacity.get());
|
||||
|
||||
#[hdl]
|
||||
let cd: ClockDomain = m.input();
|
||||
#[hdl]
|
||||
let inp: ReadyValid<T> = m.input(ReadyValid[ty]);
|
||||
#[hdl]
|
||||
let out: ReadyValid<T> = m.output(ReadyValid[ty]);
|
||||
#[hdl]
|
||||
let count: UInt = m.output(count_ty);
|
||||
|
||||
#[hdl]
|
||||
let inp_index_reg = reg_builder().clock_domain(cd).reset(0.cast_to(index_ty));
|
||||
#[hdl]
|
||||
let out_index_reg = reg_builder().clock_domain(cd).reset(0.cast_to(index_ty));
|
||||
#[hdl]
|
||||
let maybe_full_reg = reg_builder().clock_domain(cd).reset(false);
|
||||
|
||||
#[hdl]
|
||||
let mut mem = memory(ty);
|
||||
mem.depth(capacity.get());
|
||||
let read_port = mem.new_read_port();
|
||||
let write_port = mem.new_write_port();
|
||||
|
||||
#[hdl]
|
||||
let inp_firing: Bool = wire();
|
||||
connect(inp_firing, ReadyValid::firing(inp));
|
||||
#[hdl]
|
||||
let out_firing: Bool = wire();
|
||||
connect(out_firing, ReadyValid::firing(out));
|
||||
#[hdl]
|
||||
let indexes_equal: Bool = wire();
|
||||
connect(indexes_equal, inp_index_reg.cmp_eq(out_index_reg));
|
||||
#[hdl]
|
||||
let empty: Bool = wire();
|
||||
connect(empty, indexes_equal & !maybe_full_reg);
|
||||
#[hdl]
|
||||
let full: Bool = wire();
|
||||
connect(full, indexes_equal & maybe_full_reg);
|
||||
|
||||
connect(read_port.addr, out_index_reg);
|
||||
connect(read_port.en, true);
|
||||
connect(read_port.clk, cd.clk);
|
||||
connect(write_port.addr, inp_index_reg);
|
||||
connect(write_port.en, inp_firing);
|
||||
connect(write_port.clk, cd.clk);
|
||||
connect(write_port.data, HdlOption::unwrap_or(inp.data, ty.uninit()));
|
||||
connect(write_port.mask, splat_mask(ty, true.to_expr()));
|
||||
|
||||
connect(inp.ready, !full);
|
||||
if inp_ready_is_comb {
|
||||
#[hdl]
|
||||
if out.ready {
|
||||
connect(inp.ready, true);
|
||||
}
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
if !empty {
|
||||
connect(out.data, HdlSome(read_port.data));
|
||||
} else {
|
||||
if out_valid_is_comb {
|
||||
connect(out.data, inp.data);
|
||||
} else {
|
||||
connect(out.data, HdlOption[ty].HdlNone());
|
||||
}
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
if inp_firing.cmp_ne(out_firing) {
|
||||
connect(maybe_full_reg, inp_firing);
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
if inp_firing {
|
||||
#[hdl]
|
||||
if inp_index_reg.cmp_eq(capacity.get() - 1) {
|
||||
connect_any(inp_index_reg, 0_hdl_u0);
|
||||
} else {
|
||||
connect_any(inp_index_reg, inp_index_reg + 1_hdl_u1);
|
||||
}
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
if out_firing {
|
||||
#[hdl]
|
||||
if out_index_reg.cmp_eq(capacity.get() - 1) {
|
||||
connect_any(out_index_reg, 0_hdl_u0);
|
||||
} else {
|
||||
connect_any(out_index_reg, out_index_reg + 1_hdl_u1);
|
||||
}
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
if indexes_equal {
|
||||
#[hdl]
|
||||
if maybe_full_reg {
|
||||
connect_any(count, capacity);
|
||||
} else {
|
||||
connect_any(count, 0_hdl_u0);
|
||||
}
|
||||
} else {
|
||||
if capacity.is_power_of_two() {
|
||||
debug_assert_eq!(count_ty.width(), index_ty.width() + 1);
|
||||
#[hdl]
|
||||
let count_lower = wire(index_ty);
|
||||
connect(
|
||||
count_lower,
|
||||
(inp_index_reg - out_index_reg).cast_to(index_ty),
|
||||
); // wrap
|
||||
connect(count, count_lower.cast_to(count_ty));
|
||||
} else {
|
||||
debug_assert_eq!(count_ty.width(), index_ty.width());
|
||||
#[hdl]
|
||||
if inp_index_reg.cmp_lt(out_index_reg) {
|
||||
connect(count, inp_index_reg + capacity - out_index_reg);
|
||||
} else {
|
||||
connect(count, inp_index_reg - out_index_reg);
|
||||
}
|
||||
}
|
||||
}
|
||||
// These debug ports expose some internal state during the Induction phase
|
||||
// of Formal Verification. They are not present in normal use.
|
||||
#[cfg(test)]
|
||||
{
|
||||
#[hdl]
|
||||
let dbg: QueueDebugPort<T, UInt> = m.output(QueueDebugPort[ty][index_ty]);
|
||||
// read the memory word currently stored at some fixed index
|
||||
let debug_port = mem.new_read_port();
|
||||
connect(debug_port.addr, dbg.index_to_check);
|
||||
connect(debug_port.en, true);
|
||||
connect(debug_port.clk, cd.clk);
|
||||
connect(dbg.stored, debug_port.data);
|
||||
// also expose the current read and write indices
|
||||
connect(dbg.inp_index, inp_index_reg);
|
||||
connect(dbg.out_index, out_index_reg);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{
|
||||
cli::FormalMode, firrtl::ExportOptions,
|
||||
module::transform::simplify_enums::SimplifyEnumsKind, testing::assert_formal,
|
||||
ty::StaticType,
|
||||
};
|
||||
use std::num::NonZero;
|
||||
|
||||
#[track_caller]
|
||||
fn test_queue(capacity: NonZeroUsize, inp_ready_is_comb: bool, out_valid_is_comb: bool) {
|
||||
assert_formal(
|
||||
format_args!("test_queue_{capacity}_{inp_ready_is_comb}_{out_valid_is_comb}"),
|
||||
queue_test(capacity, inp_ready_is_comb, out_valid_is_comb),
|
||||
FormalMode::Prove,
|
||||
2,
|
||||
None,
|
||||
ExportOptions {
|
||||
simplify_enums: Some(SimplifyEnumsKind::ReplaceWithBundleOfUInts),
|
||||
..ExportOptions::default()
|
||||
},
|
||||
);
|
||||
/// Formal verification of the FIFO queue
|
||||
///
|
||||
/// The strategy derives from the observation that, if we filter its
|
||||
/// input and output streams to consider just one in every N reads and
|
||||
/// writes (where N is the FIFO capacity), then the FIFO effectively
|
||||
/// behaves as a one-entry FIFO.
|
||||
///
|
||||
/// In particular, any counterexample of the full FIFO behaving badly
|
||||
/// will also be caught by one of the filtered versions (one which
|
||||
/// happens to be in phase with the offending input or output).
|
||||
#[hdl_module]
|
||||
fn queue_test(capacity: NonZeroUsize, inp_ready_is_comb: bool, out_valid_is_comb: bool) {
|
||||
#[hdl]
|
||||
let clk: Clock = m.input();
|
||||
#[hdl]
|
||||
let cd = wire();
|
||||
connect(
|
||||
cd,
|
||||
#[hdl]
|
||||
ClockDomain {
|
||||
clk,
|
||||
rst: formal_reset().to_reset(),
|
||||
},
|
||||
);
|
||||
|
||||
// random input data
|
||||
#[hdl]
|
||||
let inp_data: HdlOption<UInt<8>> = wire();
|
||||
#[hdl]
|
||||
if any_seq(Bool) {
|
||||
connect(inp_data, HdlSome(any_seq(UInt::<8>::TYPE)));
|
||||
} else {
|
||||
connect(inp_data, HdlNone());
|
||||
}
|
||||
|
||||
// assert output ready at random
|
||||
#[hdl]
|
||||
let out_ready: Bool = wire();
|
||||
connect(out_ready, any_seq(Bool));
|
||||
|
||||
// The current number of elements in the FIFO ranges from zero to
|
||||
// maximum capacity, inclusive.
|
||||
let count_ty = UInt::range_inclusive(0..=capacity.get());
|
||||
// type for counters that wrap around at the FIFO capacity
|
||||
let index_ty = UInt::range(0..capacity.get());
|
||||
|
||||
// among all entries of the FIFO internal circular memory, choose
|
||||
// one at random to check
|
||||
#[hdl]
|
||||
let index_to_check = wire(index_ty);
|
||||
connect(index_to_check, any_const(index_ty));
|
||||
hdl_assume(clk, index_to_check.cmp_lt(capacity.get()), "");
|
||||
|
||||
// instantiate and connect the queue
|
||||
#[hdl]
|
||||
let dut = instance(queue(
|
||||
UInt[ConstUsize::<8>],
|
||||
capacity,
|
||||
inp_ready_is_comb,
|
||||
out_valid_is_comb,
|
||||
));
|
||||
connect(dut.cd, cd);
|
||||
connect(dut.inp.data, inp_data);
|
||||
connect(dut.out.ready, out_ready);
|
||||
|
||||
// Keep an independent count of words in the FIFO. Ensure that
|
||||
// it's always correct, and never overflows.
|
||||
#[hdl]
|
||||
let expected_count_reg = reg_builder().clock_domain(cd).reset(count_ty.zero());
|
||||
#[hdl]
|
||||
if ReadyValid::firing(dut.inp) & !ReadyValid::firing(dut.out) {
|
||||
hdl_assert(clk, expected_count_reg.cmp_ne(capacity.get()), "");
|
||||
connect_any(expected_count_reg, expected_count_reg + 1u8);
|
||||
} else if !ReadyValid::firing(dut.inp) & ReadyValid::firing(dut.out) {
|
||||
hdl_assert(clk, expected_count_reg.cmp_ne(count_ty.zero()), "");
|
||||
connect_any(expected_count_reg, expected_count_reg - 1u8);
|
||||
}
|
||||
hdl_assert(clk, expected_count_reg.cmp_eq(dut.count), "");
|
||||
|
||||
// keep an independent write index into the FIFO's circular buffer
|
||||
#[hdl]
|
||||
let inp_index_reg = reg_builder().clock_domain(cd).reset(index_ty.zero());
|
||||
#[hdl]
|
||||
if ReadyValid::firing(dut.inp) {
|
||||
#[hdl]
|
||||
if inp_index_reg.cmp_ne(capacity.get() - 1) {
|
||||
connect_any(inp_index_reg, inp_index_reg + 1u8);
|
||||
} else {
|
||||
connect_any(inp_index_reg, 0_hdl_u0);
|
||||
}
|
||||
}
|
||||
|
||||
// keep an independent read index into the FIFO's circular buffer
|
||||
#[hdl]
|
||||
let out_index_reg = reg_builder().clock_domain(cd).reset(index_ty.zero());
|
||||
#[hdl]
|
||||
if ReadyValid::firing(dut.out) {
|
||||
#[hdl]
|
||||
if out_index_reg.cmp_ne(capacity.get() - 1) {
|
||||
connect_any(out_index_reg, out_index_reg + 1u8);
|
||||
} else {
|
||||
connect_any(out_index_reg, 0_hdl_u0);
|
||||
}
|
||||
}
|
||||
|
||||
// filter the input data stream, predicated by the read index
|
||||
// matching the chosen position in the FIFO's circular buffer
|
||||
#[hdl]
|
||||
let inp_index_matches = wire();
|
||||
connect(inp_index_matches, inp_index_reg.cmp_eq(index_to_check));
|
||||
#[hdl]
|
||||
let inp_firing_data = wire();
|
||||
connect(inp_firing_data, HdlNone());
|
||||
#[hdl]
|
||||
if inp_index_matches {
|
||||
connect(inp_firing_data, ReadyValid::firing_data(dut.inp));
|
||||
}
|
||||
|
||||
// filter the output data stream, predicated by the write index
|
||||
// matching the chosen position in the FIFO's circular buffer
|
||||
#[hdl]
|
||||
let out_index_matches = wire();
|
||||
connect(out_index_matches, out_index_reg.cmp_eq(index_to_check));
|
||||
#[hdl]
|
||||
let out_firing_data = wire();
|
||||
connect(out_firing_data, HdlNone());
|
||||
#[hdl]
|
||||
if out_index_matches {
|
||||
connect(out_firing_data, ReadyValid::firing_data(dut.out));
|
||||
}
|
||||
|
||||
// Implement a one-entry FIFO and ensure its equivalence to the
|
||||
// filtered FIFO.
|
||||
//
|
||||
// the holding register for our one-entry FIFO
|
||||
#[hdl]
|
||||
let stored_reg = reg_builder().clock_domain(cd).reset(HdlNone());
|
||||
#[hdl]
|
||||
match stored_reg {
|
||||
// If the holding register is empty...
|
||||
HdlNone => {
|
||||
#[hdl]
|
||||
match inp_firing_data {
|
||||
// ... and we are not receiving data, then we must not
|
||||
// transmit any data.
|
||||
HdlNone => hdl_assert(clk, HdlOption::is_none(out_firing_data), ""),
|
||||
// If we are indeed receiving some data...
|
||||
HdlSome(data_in) => {
|
||||
#[hdl]
|
||||
match out_firing_data {
|
||||
// ... and transmitting at the same time, we
|
||||
// must be transmitting the input data itself,
|
||||
// since the holding register is empty.
|
||||
HdlSome(data_out) => hdl_assert(clk, data_out.cmp_eq(data_in), ""),
|
||||
// If we are receiving, but not transmitting,
|
||||
// store the received data in the holding
|
||||
// register.
|
||||
HdlNone => connect(stored_reg, HdlSome(data_in)),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// If there is some value stored in the holding register...
|
||||
HdlSome(stored) => {
|
||||
#[hdl]
|
||||
match out_firing_data {
|
||||
// ... and we are not transmitting it, we cannot
|
||||
// receive any more data.
|
||||
HdlNone => hdl_assert(clk, HdlOption::is_none(inp_firing_data), ""),
|
||||
// If we are transmitting a previously stored value...
|
||||
HdlSome(data_out) => {
|
||||
// ... it must be the same data we stored earlier.
|
||||
hdl_assert(clk, data_out.cmp_eq(stored), "");
|
||||
// Also, accept new data, if any. Otherwise,
|
||||
// let the holding register become empty.
|
||||
connect(stored_reg, inp_firing_data);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// from now on, some extra assertions in order to pass induction
|
||||
|
||||
// sync the holding register, when it's occupied, to the
|
||||
// corresponding entry in the FIFO's circular buffer
|
||||
connect(dut.dbg.index_to_check, index_to_check);
|
||||
#[hdl]
|
||||
if let HdlSome(stored) = stored_reg {
|
||||
hdl_assert(clk, stored.cmp_eq(dut.dbg.stored), "");
|
||||
}
|
||||
|
||||
// sync the read and write indices
|
||||
hdl_assert(clk, inp_index_reg.cmp_eq(dut.dbg.inp_index), "");
|
||||
hdl_assert(clk, out_index_reg.cmp_eq(dut.dbg.out_index), "");
|
||||
|
||||
// the indices should never go past the capacity, but induction
|
||||
// doesn't know that...
|
||||
hdl_assert(clk, inp_index_reg.cmp_lt(capacity.get()), "");
|
||||
hdl_assert(clk, out_index_reg.cmp_lt(capacity.get()), "");
|
||||
|
||||
// strongly constrain the state of the holding register
|
||||
//
|
||||
// The holding register is full if and only if the corresponding
|
||||
// FIFO entry was written to and not yet read. In other words, if
|
||||
// the number of pending reads until the chosen entry is read out
|
||||
// is greater than the current FIFO count, then the entry couldn't
|
||||
// be in the FIFO in the first place.
|
||||
#[hdl]
|
||||
let pending_reads: UInt = wire(index_ty);
|
||||
// take care of wrap-around when subtracting indices, add the
|
||||
// capacity amount to keep the result positive if necessary
|
||||
#[hdl]
|
||||
if index_to_check.cmp_ge(out_index_reg) {
|
||||
connect(pending_reads, index_to_check - out_index_reg);
|
||||
} else {
|
||||
connect(
|
||||
pending_reads,
|
||||
index_to_check + capacity.get() - out_index_reg,
|
||||
);
|
||||
}
|
||||
// check whether the chosen entry is in the FIFO
|
||||
#[hdl]
|
||||
let expected_stored: Bool = wire();
|
||||
connect(expected_stored, pending_reads.cmp_lt(dut.count));
|
||||
// sync with the state of the holding register
|
||||
hdl_assert(
|
||||
clk,
|
||||
expected_stored.cmp_eq(HdlOption::is_some(stored_reg)),
|
||||
"",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_1_false_false() {
|
||||
test_queue(NonZero::new(1).unwrap(), false, false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_1_false_true() {
|
||||
test_queue(NonZero::new(1).unwrap(), false, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_1_true_false() {
|
||||
test_queue(NonZero::new(1).unwrap(), true, false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_1_true_true() {
|
||||
test_queue(NonZero::new(1).unwrap(), true, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_2_false_false() {
|
||||
test_queue(NonZero::new(2).unwrap(), false, false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_2_false_true() {
|
||||
test_queue(NonZero::new(2).unwrap(), false, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_2_true_false() {
|
||||
test_queue(NonZero::new(2).unwrap(), true, false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_2_true_true() {
|
||||
test_queue(NonZero::new(2).unwrap(), true, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_3_false_false() {
|
||||
test_queue(NonZero::new(3).unwrap(), false, false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_3_false_true() {
|
||||
test_queue(NonZero::new(3).unwrap(), false, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_3_true_false() {
|
||||
test_queue(NonZero::new(3).unwrap(), true, false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_3_true_true() {
|
||||
test_queue(NonZero::new(3).unwrap(), true, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_4_false_false() {
|
||||
test_queue(NonZero::new(4).unwrap(), false, false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_4_false_true() {
|
||||
test_queue(NonZero::new(4).unwrap(), false, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_4_true_false() {
|
||||
test_queue(NonZero::new(4).unwrap(), true, false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_4_true_true() {
|
||||
test_queue(NonZero::new(4).unwrap(), true, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_many_false_false() {
|
||||
test_queue(NonZero::new((2 << 16) - 5).unwrap(), false, false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_many_false_true() {
|
||||
test_queue(NonZero::new((2 << 16) - 5).unwrap(), false, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_many_true_false() {
|
||||
test_queue(NonZero::new((2 << 16) - 5).unwrap(), true, false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_many_true_true() {
|
||||
test_queue(NonZero::new((2 << 16) - 5).unwrap(), true, true);
|
||||
}
|
||||
}
|
|
@ -1,5 +1,3 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
mod safety_boundary {
|
||||
use std::{cell::Cell, ptr::NonNull};
|
||||
|
||||
|
@ -106,9 +104,3 @@ impl<T: ?Sized> ScopedRef<T> {
|
|||
self.0.with_opt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized> Default for ScopedRef<T> {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,31 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use std::{
|
||||
io::{self, BufRead},
|
||||
str,
|
||||
};
|
||||
|
||||
pub(crate) fn streaming_read_utf8<R: BufRead, E: From<io::Error>>(
|
||||
reader: R,
|
||||
mut callback: impl FnMut(&str) -> Result<(), E>,
|
||||
) -> Result<(), E> {
|
||||
let mut buf = [0; 4];
|
||||
let mut buf_len = 0;
|
||||
for byte in reader.bytes() {
|
||||
buf[buf_len] = byte?;
|
||||
buf_len += 1;
|
||||
match str::from_utf8(&buf[..buf_len]) {
|
||||
Ok(buf) => {
|
||||
callback(buf)?;
|
||||
buf_len = 0;
|
||||
}
|
||||
Err(e) => {
|
||||
if e.error_len().is_some() {
|
||||
callback("\u{FFFD}")?; // replacement character
|
||||
buf_len = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
88
crates/fayalite/src/valueless.rs
Normal file
88
crates/fayalite/src/valueless.rs
Normal file
|
@ -0,0 +1,88 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
int::{DynIntType, DynSIntType, DynUIntType, IntTypeTrait, SIntType},
|
||||
ty::{Type, Value},
|
||||
};
|
||||
use std::ops::RangeBounds;
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq, Default)]
|
||||
pub struct Valueless<T> {
|
||||
pub ty: T,
|
||||
}
|
||||
|
||||
impl<T: Type> Valueless<T> {
|
||||
pub fn to_canonical(&self) -> Valueless<T::CanonicalType> {
|
||||
Valueless {
|
||||
ty: self.ty.canonical(),
|
||||
}
|
||||
}
|
||||
pub fn from_canonical(v: Valueless<T::CanonicalType>) -> Self {
|
||||
Valueless {
|
||||
ty: T::from_canonical_type(v.ty),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mod sealed {
|
||||
pub trait Sealed {}
|
||||
}
|
||||
|
||||
pub trait ValuelessTr: sealed::Sealed {
|
||||
type Type: Type<Value = Self::Value>;
|
||||
type Value: Value<Type = Self::Type>;
|
||||
}
|
||||
|
||||
impl<T> sealed::Sealed for Valueless<T> {}
|
||||
|
||||
impl<T: Type> ValuelessTr for Valueless<T> {
|
||||
type Type = T;
|
||||
type Value = T::Value;
|
||||
}
|
||||
|
||||
impl<T: IntTypeTrait> Valueless<T> {
|
||||
pub fn signum(&self) -> Valueless<SIntType<2>> {
|
||||
Valueless::default()
|
||||
}
|
||||
pub fn as_same_width_uint(self) -> Valueless<T::SameWidthUInt> {
|
||||
Valueless {
|
||||
ty: self.ty.as_same_width_uint(),
|
||||
}
|
||||
}
|
||||
pub fn as_same_width_sint(self) -> Valueless<T::SameWidthSInt> {
|
||||
Valueless {
|
||||
ty: self.ty.as_same_width_sint(),
|
||||
}
|
||||
}
|
||||
pub fn as_same_value_uint(self) -> Valueless<DynUIntType> {
|
||||
Valueless {
|
||||
ty: self.ty.as_same_value_uint(),
|
||||
}
|
||||
}
|
||||
pub fn as_same_value_sint(self) -> Valueless<DynSIntType> {
|
||||
Valueless {
|
||||
ty: self.ty.as_same_value_sint(),
|
||||
}
|
||||
}
|
||||
pub fn concat<HighType: IntTypeTrait>(
|
||||
&self,
|
||||
high_part: Valueless<HighType>,
|
||||
) -> Valueless<DynIntType<HighType::Signed>> {
|
||||
let ty = DynIntType::new(
|
||||
self.ty
|
||||
.width()
|
||||
.checked_add(high_part.ty.width())
|
||||
.expect("result has too many bits"),
|
||||
);
|
||||
Valueless { ty }
|
||||
}
|
||||
pub fn repeat(&self, count: usize) -> Valueless<DynIntType<T::Signed>> {
|
||||
let width = self.ty.width();
|
||||
let ty = DynIntType::new(width.checked_mul(count).expect("result has too many bits"));
|
||||
Valueless { ty }
|
||||
}
|
||||
pub fn slice<I: RangeBounds<usize>>(&self, index: I) -> Valueless<DynUIntType> {
|
||||
let ty = self.ty.slice(index);
|
||||
Valueless { ty }
|
||||
}
|
||||
}
|
|
@ -1,13 +1,13 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
expr::{Expr, Flow, ToExpr},
|
||||
expr::Flow,
|
||||
intern::Interned,
|
||||
module::{IncompleteDeclaration, NameId, ScopedNameId, StmtDeclaration, StmtWire},
|
||||
module::{NameId, ScopedNameId},
|
||||
source_location::SourceLocation,
|
||||
ty::{CanonicalType, Type},
|
||||
};
|
||||
use std::{cell::RefCell, fmt, rc::Rc};
|
||||
use std::fmt;
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
|
||||
pub struct Wire<T: Type> {
|
||||
|
@ -37,18 +37,6 @@ impl<T: Type> Wire<T> {
|
|||
ty: ty.canonical(),
|
||||
}
|
||||
}
|
||||
pub fn from_canonical(v: Wire<CanonicalType>) -> Self {
|
||||
let Wire {
|
||||
name,
|
||||
source_location,
|
||||
ty,
|
||||
} = v;
|
||||
Self {
|
||||
name,
|
||||
source_location,
|
||||
ty: T::from_canonical(ty),
|
||||
}
|
||||
}
|
||||
pub fn ty(&self) -> T {
|
||||
self.ty
|
||||
}
|
||||
|
@ -88,57 +76,3 @@ impl<T: Type> Wire<T> {
|
|||
true
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct IncompleteWire {
|
||||
pub(crate) declaration: Rc<RefCell<IncompleteDeclaration>>,
|
||||
}
|
||||
|
||||
impl IncompleteWire {
|
||||
#[track_caller]
|
||||
pub fn complete<T: Type>(&mut self, ty: T) -> Expr<T> {
|
||||
let canonical_type = ty.canonical();
|
||||
let mut declaration = self.declaration.borrow_mut();
|
||||
if let IncompleteDeclaration::Incomplete {
|
||||
name,
|
||||
source_location,
|
||||
} = *declaration
|
||||
{
|
||||
*declaration = IncompleteDeclaration::Complete(
|
||||
StmtWire {
|
||||
annotations: (),
|
||||
wire: Wire {
|
||||
name,
|
||||
source_location,
|
||||
ty: canonical_type,
|
||||
},
|
||||
}
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
match *declaration {
|
||||
IncompleteDeclaration::Complete(StmtDeclaration::Wire(StmtWire {
|
||||
wire:
|
||||
Wire {
|
||||
name,
|
||||
source_location,
|
||||
ty: wire_ty,
|
||||
},
|
||||
..
|
||||
})) => {
|
||||
drop(declaration);
|
||||
assert_eq!(wire_ty, canonical_type, "type mismatch");
|
||||
Wire {
|
||||
name,
|
||||
source_location,
|
||||
ty,
|
||||
}
|
||||
.to_expr()
|
||||
}
|
||||
IncompleteDeclaration::Taken => panic!("can't use wire outside of containing module"),
|
||||
IncompleteDeclaration::Complete(_) | IncompleteDeclaration::Incomplete { .. } => {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,295 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! Formal tests in Fayalite
|
||||
|
||||
use fayalite::{
|
||||
cli::FormalMode,
|
||||
clock::{Clock, ClockDomain},
|
||||
expr::{CastTo, HdlPartialEq},
|
||||
firrtl::ExportOptions,
|
||||
formal::{any_const, any_seq, formal_reset, hdl_assert, hdl_assume},
|
||||
hdl, hdl_module,
|
||||
int::{Bool, DynSize, Size, UInt, UIntType},
|
||||
module::{connect, connect_any, instance, memory, reg_builder, wire},
|
||||
reset::ToReset,
|
||||
testing::assert_formal,
|
||||
ty::StaticType,
|
||||
};
|
||||
|
||||
/// Test hidden state
|
||||
///
|
||||
/// Hidden state can cause problems for induction, since the formal engine
|
||||
/// can assign invalid values to the state registers, making it traverse
|
||||
/// valid but unreachable states.
|
||||
///
|
||||
/// One solution is to go sufficiently in the past so the engine is forced
|
||||
/// to eventually take a reachable state. This may be hampered by
|
||||
/// existence of loops, then assumptions may be added to break them.
|
||||
///
|
||||
/// Another solution is to "open the black box" and add additional
|
||||
/// assertions involving the hidden state, so that the unreachable states
|
||||
/// become invalid as well.
|
||||
///
|
||||
/// Both approaches are taken here.
|
||||
///
|
||||
/// See [Claire Wolf's presentation] and [Zipcpu blog article].
|
||||
///
|
||||
/// [Claire Wolf's presentation]: https://web.archive.org/web/20200115081517fw_/http://www.clifford.at/papers/2017/smtbmc-sby/
|
||||
/// [Zipcpu blog article]: https://zipcpu.com/blog/2018/03/10/induction-exercise.html
|
||||
mod hidden_state {
|
||||
use super::*;
|
||||
/// Test hidden state by shift registers
|
||||
///
|
||||
/// The code implement the ideas from an article in the [Zipcpu blog]. Two
|
||||
/// shift registers are fed from the same input, so they should always have
|
||||
/// the same value. However the only observable is a comparison of their
|
||||
/// last bit, all the others are hidden. To complicate matters, an enable
|
||||
/// signal causes a loop in state space.
|
||||
///
|
||||
/// [Zipcpu blog]: https://zipcpu.com/blog/2018/03/10/induction-exercise.html
|
||||
#[test]
|
||||
fn shift_register() {
|
||||
enum ConstraintMode {
|
||||
WithExtraAssertions,
|
||||
WithExtraAssumptions,
|
||||
}
|
||||
use ConstraintMode::*;
|
||||
#[hdl_module]
|
||||
fn test_module(constraint_mode: ConstraintMode) {
|
||||
#[hdl]
|
||||
let clk: Clock = m.input();
|
||||
#[hdl]
|
||||
let cd = wire();
|
||||
connect(
|
||||
cd,
|
||||
#[hdl]
|
||||
ClockDomain {
|
||||
clk,
|
||||
rst: formal_reset().to_reset(),
|
||||
},
|
||||
);
|
||||
// input signal for the shift registers
|
||||
#[hdl]
|
||||
let i: Bool = wire();
|
||||
connect(i, any_seq(Bool));
|
||||
// shift enable signal
|
||||
#[hdl]
|
||||
let en: Bool = wire();
|
||||
connect(en, any_seq(Bool));
|
||||
// comparison output
|
||||
#[hdl]
|
||||
let o: Bool = wire();
|
||||
// shift registers, with enable
|
||||
#[hdl]
|
||||
let r1 = reg_builder().clock_domain(cd).reset(0u8);
|
||||
#[hdl]
|
||||
let r2 = reg_builder().clock_domain(cd).reset(0u8);
|
||||
#[hdl]
|
||||
if en {
|
||||
connect_any(r1, (r1 << 1) | i.cast_to(UInt[1]));
|
||||
connect_any(r2, (r2 << 1) | i.cast_to(UInt[1]));
|
||||
}
|
||||
// compare last bits of both shift registers
|
||||
connect(o, r1[7].cmp_eq(r2[7]));
|
||||
|
||||
// what we want to prove: last bits are always equal
|
||||
hdl_assert(clk, o, "");
|
||||
|
||||
// additional terms below are only needed to assist with the induction proof
|
||||
match constraint_mode {
|
||||
WithExtraAssertions => {
|
||||
// "Open the box": add assertions about hidden state.
|
||||
// In this case, the hidden bits are also always equal.
|
||||
hdl_assert(clk, r1.cmp_eq(r2), "");
|
||||
}
|
||||
WithExtraAssumptions => {
|
||||
// Break the loop, do not allow "en" to remain low forever
|
||||
#[hdl]
|
||||
let past_en_reg = reg_builder().clock_domain(cd).reset(false);
|
||||
connect(past_en_reg, en);
|
||||
hdl_assume(clk, past_en_reg | en, "");
|
||||
}
|
||||
}
|
||||
}
|
||||
// we need a minimum of 16 steps so we can constrain all eight shift register bits,
|
||||
// given that we are allowed to disable the shift once every two cycles.
|
||||
assert_formal(
|
||||
"shift_register_with_assumptions",
|
||||
test_module(WithExtraAssumptions),
|
||||
FormalMode::Prove,
|
||||
16,
|
||||
None,
|
||||
ExportOptions::default(),
|
||||
);
|
||||
// here a couple of cycles is enough
|
||||
assert_formal(
|
||||
"shift_register_with_assertions",
|
||||
test_module(WithExtraAssertions),
|
||||
FormalMode::Prove,
|
||||
2,
|
||||
None,
|
||||
ExportOptions::default(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Formal verification of designs containing memories
|
||||
///
|
||||
/// There is a trick for memories, described in the [Zipcpu blog].
|
||||
/// First, select a fixed but arbitrary memory address, monitoring all reads
|
||||
/// and writes made to it. Then, assert that anything read from that location
|
||||
/// matches the last stored value.
|
||||
///
|
||||
/// A difficulty for induction is that the memory represents [hidden_state]. A
|
||||
/// solution is to include an additional read port to the memory and assert
|
||||
/// that the memory location effectively contains the last stored value.
|
||||
/// This additional debug port is present only to assist the proof and is
|
||||
/// unused (optimized out) in actual use.
|
||||
///
|
||||
/// [Zipcpu blog]: <https://zipcpu.com/zipcpu/2018/07/13/memories.html>
|
||||
mod memory {
|
||||
use super::*;
|
||||
|
||||
/// Test a simple 8-bit SRAM model
|
||||
#[test]
|
||||
fn test_sram() {
|
||||
#[hdl]
|
||||
struct WritePort<AddrWidth: Size> {
|
||||
addr: UIntType<AddrWidth>,
|
||||
data: UInt<8>,
|
||||
en: Bool,
|
||||
}
|
||||
#[hdl]
|
||||
struct ReadPort<AddrWidth: Size> {
|
||||
addr: UIntType<AddrWidth>,
|
||||
#[hdl(flip)]
|
||||
data: UInt<8>,
|
||||
}
|
||||
/// This debug port is only meant to assist the proof.
|
||||
/// For normal use in a design, a wrapper could be provided,
|
||||
/// omitting this port.
|
||||
/// The implementation is forbidden to use any information
|
||||
/// provided on this port in its internal workings.
|
||||
#[hdl]
|
||||
struct DebugPort<AddrWidth: Size> {
|
||||
selected: UIntType<AddrWidth>,
|
||||
stored: UInt<8>,
|
||||
wrote: Bool,
|
||||
}
|
||||
/// simple 1R1W SRAM model (one asynchronous read port and one
|
||||
/// independent write port) with `n`-bit address width
|
||||
#[hdl_module]
|
||||
fn example_sram(n: usize) {
|
||||
#[hdl]
|
||||
let wr: WritePort<DynSize> = m.input(WritePort[n]);
|
||||
#[hdl]
|
||||
let rd: ReadPort<DynSize> = m.input(ReadPort[n]);
|
||||
#[hdl]
|
||||
let cd: ClockDomain = m.input();
|
||||
|
||||
// declare and connect the backing memory
|
||||
#[hdl]
|
||||
let mut mem = memory();
|
||||
mem.depth(1 << n);
|
||||
let read_port = mem.new_read_port();
|
||||
let write_port = mem.new_write_port();
|
||||
connect(write_port.clk, cd.clk);
|
||||
connect(write_port.addr, wr.addr);
|
||||
connect(write_port.en, wr.en);
|
||||
connect(write_port.data, wr.data);
|
||||
connect(write_port.mask, true);
|
||||
connect(read_port.clk, cd.clk);
|
||||
connect(read_port.addr, rd.addr);
|
||||
connect(read_port.en, true);
|
||||
connect(rd.data, read_port.data);
|
||||
|
||||
// To assist with induction, ensure that the chosen memory location
|
||||
// really contains, always, the last value written to it.
|
||||
#[hdl]
|
||||
let dbg: DebugPort<DynSize> = m.input(DebugPort[n]);
|
||||
let debug_port = mem.new_read_port();
|
||||
connect(debug_port.en, true);
|
||||
connect(debug_port.clk, cd.clk);
|
||||
connect(debug_port.addr, dbg.selected);
|
||||
#[hdl]
|
||||
if dbg.wrote {
|
||||
hdl_assert(cd.clk, debug_port.data.cmp_eq(dbg.stored), "");
|
||||
// Try commenting out the assert above, induction will fail.
|
||||
// Opening the trace, it can be seen that the memory contents
|
||||
// and the stored value don't match, which is an unreachable
|
||||
// state. By asserting the above, it will become invalid
|
||||
// as well, so induction will skip this kind of situation.
|
||||
}
|
||||
}
|
||||
|
||||
/// formal verification of the SRAM module, parametrized by the
|
||||
/// address bit-width
|
||||
#[hdl_module]
|
||||
fn test_module(n: usize) {
|
||||
#[hdl]
|
||||
let clk: Clock = m.input();
|
||||
let cd = #[hdl]
|
||||
ClockDomain {
|
||||
clk,
|
||||
rst: formal_reset().to_reset(),
|
||||
};
|
||||
|
||||
// instantiate the SRAM model, connecting its inputs to
|
||||
// a random sequence
|
||||
#[hdl]
|
||||
let rd: ReadPort<DynSize> = wire(ReadPort[n]);
|
||||
connect(rd.addr, any_seq(UInt[n]));
|
||||
#[hdl]
|
||||
let wr: WritePort<DynSize> = wire(WritePort[n]);
|
||||
connect(wr.addr, any_seq(UInt[n]));
|
||||
connect(wr.data, any_seq(UInt::<8>::TYPE));
|
||||
connect(wr.en, any_seq(Bool));
|
||||
#[hdl]
|
||||
let dut = instance(example_sram(n));
|
||||
connect(dut.cd, cd);
|
||||
connect(dut.rd, rd);
|
||||
connect(dut.wr, wr);
|
||||
|
||||
// select a fixed but arbitrary test address
|
||||
#[hdl]
|
||||
let selected = wire(UInt[n]);
|
||||
connect(selected, any_const(UInt[n]));
|
||||
// store the last value written to that address
|
||||
#[hdl]
|
||||
let stored: UInt<8> = reg_builder().clock_domain(cd).reset(0u8);
|
||||
// since memories are not initialized, track whether we wrote to the
|
||||
// memory at least once
|
||||
#[hdl]
|
||||
let wrote: Bool = reg_builder().clock_domain(cd).reset(false);
|
||||
// on a write, capture the last written value
|
||||
#[hdl]
|
||||
if wr.en & wr.addr.cmp_eq(selected) {
|
||||
connect(stored, wr.data);
|
||||
connect(wrote, true);
|
||||
}
|
||||
// on a read, assert that the read value is the same which was stored
|
||||
#[hdl]
|
||||
if rd.addr.cmp_eq(selected) & wrote {
|
||||
hdl_assert(clk, rd.data.cmp_eq(stored), "");
|
||||
}
|
||||
|
||||
// to assist induction, pass our state to the underlying instance
|
||||
let dbg = #[hdl]
|
||||
DebugPort {
|
||||
selected,
|
||||
stored,
|
||||
wrote,
|
||||
};
|
||||
connect(dut.dbg, dbg);
|
||||
}
|
||||
|
||||
assert_formal(
|
||||
"sram",
|
||||
test_module(8),
|
||||
FormalMode::Prove,
|
||||
2,
|
||||
None,
|
||||
ExportOptions::default(),
|
||||
);
|
||||
}
|
||||
}
|
|
@ -1,13 +1,6 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use fayalite::{
|
||||
bundle::BundleType,
|
||||
enum_::EnumType,
|
||||
int::{BoolOrIntType, IntType},
|
||||
prelude::*,
|
||||
ty::StaticType,
|
||||
};
|
||||
use std::marker::PhantomData;
|
||||
use fayalite::prelude::*;
|
||||
|
||||
#[hdl(outline_generated)]
|
||||
pub struct S<T, Len: Size, T2> {
|
||||
|
@ -15,7 +8,6 @@ pub struct S<T, Len: Size, T2> {
|
|||
b: UInt<3>,
|
||||
pub(crate) c: ArrayType<UInt<1>, Len>,
|
||||
pub d: T2,
|
||||
pub _phantom: PhantomData<(T, Len)>,
|
||||
}
|
||||
|
||||
#[hdl(outline_generated)]
|
||||
|
@ -31,163 +23,9 @@ pub enum E<T> {
|
|||
A,
|
||||
B(UInt<3>),
|
||||
C(T),
|
||||
D(TyAlias2),
|
||||
E(TyAlias<Bool, ConstUsize<1>, { 1 + 2 }>),
|
||||
}
|
||||
|
||||
#[hdl(outline_generated)]
|
||||
pub struct S2<T = ()> {
|
||||
pub v: E<T>,
|
||||
}
|
||||
|
||||
#[hdl(outline_generated)]
|
||||
pub type TyAlias<T, Sz: Size, const C: usize, D = ()> = Array<S<T, Sz, D>, C>;
|
||||
|
||||
#[hdl(outline_generated)]
|
||||
pub type TyAlias2 = TyAlias<UInt<8>, ConstUsize<24>, 5>;
|
||||
|
||||
// check that #[hdl] properly handles hygiene
|
||||
macro_rules! types_in_macros {
|
||||
($a:ident, $b:ident, $c:ident, $d:ident, $e:ident, $f:ident, $A:ident, $B:ident, $C:ident, $D:ident, $E:ident, $F:ident) => {
|
||||
#[hdl]
|
||||
struct $F {}
|
||||
#[hdl]
|
||||
struct $A<$B, $C: Size, const $D: usize, $E = $F> {
|
||||
$a: $B,
|
||||
$b: UIntType<$C>,
|
||||
$c: SInt<$D>,
|
||||
$d: HdlOption<$E>,
|
||||
$e: $E,
|
||||
$f: $F,
|
||||
}
|
||||
#[allow(non_camel_case_types)]
|
||||
#[hdl]
|
||||
enum $B<$C: Size, const $D: usize, $E = $F> {
|
||||
$a($A<(), $C, $D, $E>),
|
||||
$b(UIntType<$C>),
|
||||
$c(SInt<$D>),
|
||||
$d,
|
||||
$e($E),
|
||||
$f($F),
|
||||
}
|
||||
};
|
||||
// ensure every identifier has different hygiene
|
||||
() => {
|
||||
types_in_macros!(a);
|
||||
};
|
||||
($a:ident) => {
|
||||
types_in_macros!($a, b);
|
||||
};
|
||||
($a:ident, $b:ident) => {
|
||||
types_in_macros!($a, $b, c);
|
||||
};
|
||||
($a:ident, $b:ident, $c:ident) => {
|
||||
types_in_macros!($a, $b, $c, d);
|
||||
};
|
||||
($a:ident, $b:ident, $c:ident, $d:ident) => {
|
||||
types_in_macros!($a, $b, $c, $d, e);
|
||||
};
|
||||
($a:ident, $b:ident, $c:ident, $d:ident, $e:ident) => {
|
||||
types_in_macros!($a, $b, $c, $d, $e, f);
|
||||
};
|
||||
($a:ident, $b:ident, $c:ident, $d:ident, $e:ident, $f:ident) => {
|
||||
types_in_macros!($a, $b, $c, $d, $e, $f, A);
|
||||
};
|
||||
($a:ident, $b:ident, $c:ident, $d:ident, $e:ident, $f:ident, $A:ident) => {
|
||||
types_in_macros!($a, $b, $c, $d, $e, $f, $A, B);
|
||||
};
|
||||
($a:ident, $b:ident, $c:ident, $d:ident, $e:ident, $f:ident, $A:ident, $B:ident) => {
|
||||
types_in_macros!($a, $b, $c, $d, $e, $f, $A, $B, C);
|
||||
};
|
||||
($a:ident, $b:ident, $c:ident, $d:ident, $e:ident, $f:ident, $A:ident, $B:ident, $C:ident) => {
|
||||
types_in_macros!($a, $b, $c, $d, $e, $f, $A, $B, $C, D);
|
||||
};
|
||||
($a:ident, $b:ident, $c:ident, $d:ident, $e:ident, $f:ident, $A:ident, $B:ident, $C:ident, $D:ident) => {
|
||||
types_in_macros!($a, $b, $c, $d, $e, $f, $A, $B, $C, $D, E);
|
||||
};
|
||||
($a:ident, $b:ident, $c:ident, $d:ident, $e:ident, $f:ident, $A:ident, $B:ident, $C:ident, $D:ident, $E:ident) => {
|
||||
types_in_macros!($a, $b, $c, $d, $e, $f, $A, $B, $C, $D, $E, F);
|
||||
};
|
||||
}
|
||||
|
||||
types_in_macros!();
|
||||
|
||||
mod bound_kind {
|
||||
use fayalite::prelude::*;
|
||||
|
||||
#[hdl]
|
||||
pub struct Type<T> {
|
||||
v: T,
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
pub struct Size<T: ::fayalite::int::Size> {
|
||||
v: UIntType<T>,
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! check_bounds {
|
||||
($name:ident<$(#[$field:ident, $kind:ident] $var:ident: $($bound:ident +)*),*>) => {
|
||||
#[hdl(outline_generated)]
|
||||
struct $name<$($var: $($bound +)*,)*> {
|
||||
$($field: bound_kind::$kind<$var>,)*
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
check_bounds!(CheckBoundsS0<#[a, Size] A: Size +>);
|
||||
check_bounds!(CheckBoundsS1<#[a, Size] A: KnownSize +>);
|
||||
check_bounds!(CheckBoundsT0<#[a, Type] A: Type +>);
|
||||
check_bounds!(CheckBoundsT1<#[a, Type] A: BoolOrIntType +>);
|
||||
check_bounds!(CheckBoundsT2<#[a, Type] A: BundleType +>);
|
||||
check_bounds!(CheckBoundsT3<#[a, Type] A: EnumType +>);
|
||||
check_bounds!(CheckBoundsT4<#[a, Type] A: IntType +>);
|
||||
check_bounds!(CheckBoundsT5<#[a, Type] A: StaticType +>);
|
||||
check_bounds!(CheckBoundsSS0<#[a, Size] A: Size +, #[b, Size] B: Size +>);
|
||||
check_bounds!(CheckBoundsSS1<#[a, Size] A: KnownSize +, #[b, Size] B: Size +>);
|
||||
check_bounds!(CheckBoundsST0<#[a, Size] A: Size +, #[b, Type] B: Type +>);
|
||||
check_bounds!(CheckBoundsST1<#[a, Size] A: KnownSize +, #[b, Type] B: Type +>);
|
||||
check_bounds!(CheckBoundsTS0<#[a, Type] A: Type +, #[b, Size] B: Size +>);
|
||||
check_bounds!(CheckBoundsTS1<#[a, Type] A: BoolOrIntType +, #[b, Size] B: Size +>);
|
||||
check_bounds!(CheckBoundsTS2<#[a, Type] A: BundleType +, #[b, Size] B: Size +>);
|
||||
check_bounds!(CheckBoundsTS3<#[a, Type] A: EnumType +, #[b, Size] B: Size +>);
|
||||
check_bounds!(CheckBoundsTS4<#[a, Type] A: IntType +, #[b, Size] B: Size +>);
|
||||
check_bounds!(CheckBoundsTS5<#[a, Type] A: StaticType +, #[b, Size] B: Size +>);
|
||||
check_bounds!(CheckBoundsTT0<#[a, Type] A: Type +, #[b, Type] B: Type +>);
|
||||
check_bounds!(CheckBoundsTT1<#[a, Type] A: BoolOrIntType +, #[b, Type] B: Type +>);
|
||||
check_bounds!(CheckBoundsTT2<#[a, Type] A: BundleType +, #[b, Type] B: Type +>);
|
||||
check_bounds!(CheckBoundsTT3<#[a, Type] A: EnumType +, #[b, Type] B: Type +>);
|
||||
check_bounds!(CheckBoundsTT4<#[a, Type] A: IntType +, #[b, Type] B: Type +>);
|
||||
check_bounds!(CheckBoundsTT5<#[a, Type] A: StaticType +, #[b, Type] B: Type +>);
|
||||
check_bounds!(CheckBoundsSSS0<#[a, Size] A: Size +, #[b, Size] B: Size +, #[c, Size] C: Size +>);
|
||||
check_bounds!(CheckBoundsSSS1<#[a, Size] A: KnownSize +, #[b, Size] B: Size +, #[c, Size] C: Size +>);
|
||||
check_bounds!(CheckBoundsSST0<#[a, Size] A: Size +, #[b, Size] B: Size +, #[c, Type] C: Type +>);
|
||||
check_bounds!(CheckBoundsSST1<#[a, Size] A: KnownSize +, #[b, Size] B: Size +, #[c, Type] C: Type +>);
|
||||
check_bounds!(CheckBoundsSTS0<#[a, Size] A: Size +, #[b, Type] B: Type +, #[c, Size] C: Size +>);
|
||||
check_bounds!(CheckBoundsSTS1<#[a, Size] A: KnownSize +, #[b, Type] B: Type +, #[c, Size] C: Size +>);
|
||||
check_bounds!(CheckBoundsSTT0<#[a, Size] A: Size +, #[b, Type] B: Type +, #[c, Type] C: Type +>);
|
||||
check_bounds!(CheckBoundsSTT1<#[a, Size] A: KnownSize +, #[b, Type] B: Type +, #[c, Type] C: Type +>);
|
||||
check_bounds!(CheckBoundsTSS0<#[a, Type] A: Type +, #[b, Size] B: Size +, #[c, Size] C: Size +>);
|
||||
check_bounds!(CheckBoundsTSS1<#[a, Type] A: BoolOrIntType +, #[b, Size] B: Size +, #[c, Size] C: Size +>);
|
||||
check_bounds!(CheckBoundsTSS2<#[a, Type] A: BundleType +, #[b, Size] B: Size +, #[c, Size] C: Size +>);
|
||||
check_bounds!(CheckBoundsTSS3<#[a, Type] A: EnumType +, #[b, Size] B: Size +, #[c, Size] C: Size +>);
|
||||
check_bounds!(CheckBoundsTSS4<#[a, Type] A: IntType +, #[b, Size] B: Size +, #[c, Size] C: Size +>);
|
||||
check_bounds!(CheckBoundsTSS5<#[a, Type] A: StaticType +, #[b, Size] B: Size +, #[c, Size] C: Size +>);
|
||||
check_bounds!(CheckBoundsTST0<#[a, Type] A: Type +, #[b, Size] B: Size +, #[c, Type] C: Type +>);
|
||||
check_bounds!(CheckBoundsTST1<#[a, Type] A: BoolOrIntType +, #[b, Size] B: Size +, #[c, Type] C: Type +>);
|
||||
check_bounds!(CheckBoundsTST2<#[a, Type] A: BundleType +, #[b, Size] B: Size +, #[c, Type] C: Type +>);
|
||||
check_bounds!(CheckBoundsTST3<#[a, Type] A: EnumType +, #[b, Size] B: Size +, #[c, Type] C: Type +>);
|
||||
check_bounds!(CheckBoundsTST4<#[a, Type] A: IntType +, #[b, Size] B: Size +, #[c, Type] C: Type +>);
|
||||
check_bounds!(CheckBoundsTST5<#[a, Type] A: StaticType +, #[b, Size] B: Size +, #[c, Type] C: Type +>);
|
||||
check_bounds!(CheckBoundsTTS0<#[a, Type] A: Type +, #[b, Type] B: Type +, #[c, Size] C: Size +>);
|
||||
check_bounds!(CheckBoundsTTS1<#[a, Type] A: BoolOrIntType +, #[b, Type] B: Type +, #[c, Size] C: Size +>);
|
||||
check_bounds!(CheckBoundsTTS2<#[a, Type] A: BundleType +, #[b, Type] B: Type +, #[c, Size] C: Size +>);
|
||||
check_bounds!(CheckBoundsTTS3<#[a, Type] A: EnumType +, #[b, Type] B: Type +, #[c, Size] C: Size +>);
|
||||
check_bounds!(CheckBoundsTTS4<#[a, Type] A: IntType +, #[b, Type] B: Type +, #[c, Size] C: Size +>);
|
||||
check_bounds!(CheckBoundsTTS5<#[a, Type] A: StaticType +, #[b, Type] B: Type +, #[c, Size] C: Size +>);
|
||||
check_bounds!(CheckBoundsTTT0<#[a, Type] A: Type +, #[b, Type] B: Type +, #[c, Type] C: Type +>);
|
||||
check_bounds!(CheckBoundsTTT1<#[a, Type] A: BoolOrIntType +, #[b, Type] B: Type +, #[c, Type] C: Type +>);
|
||||
check_bounds!(CheckBoundsTTT2<#[a, Type] A: BundleType +, #[b, Type] B: Type +, #[c, Type] C: Type +>);
|
||||
check_bounds!(CheckBoundsTTT3<#[a, Type] A: EnumType +, #[b, Type] B: Type +, #[c, Type] C: Type +>);
|
||||
check_bounds!(CheckBoundsTTT4<#[a, Type] A: IntType +, #[b, Type] B: Type +, #[c, Type] C: Type +>);
|
||||
check_bounds!(CheckBoundsTTT5<#[a, Type] A: StaticType +, #[b, Type] B: Type +, #[c, Type] C: Type +>);
|
||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -1,283 +0,0 @@
|
|||
$timescale 1 ps $end
|
||||
$scope module array_rw $end
|
||||
$scope struct array_in $end
|
||||
$var wire 8 ! \[0] $end
|
||||
$var wire 8 " \[1] $end
|
||||
$var wire 8 # \[2] $end
|
||||
$var wire 8 $ \[3] $end
|
||||
$var wire 8 % \[4] $end
|
||||
$var wire 8 & \[5] $end
|
||||
$var wire 8 ' \[6] $end
|
||||
$var wire 8 ( \[7] $end
|
||||
$var wire 8 ) \[8] $end
|
||||
$var wire 8 * \[9] $end
|
||||
$var wire 8 + \[10] $end
|
||||
$var wire 8 , \[11] $end
|
||||
$var wire 8 - \[12] $end
|
||||
$var wire 8 . \[13] $end
|
||||
$var wire 8 / \[14] $end
|
||||
$var wire 8 0 \[15] $end
|
||||
$upscope $end
|
||||
$scope struct array_out $end
|
||||
$var wire 8 1 \[0] $end
|
||||
$var wire 8 2 \[1] $end
|
||||
$var wire 8 3 \[2] $end
|
||||
$var wire 8 4 \[3] $end
|
||||
$var wire 8 5 \[4] $end
|
||||
$var wire 8 6 \[5] $end
|
||||
$var wire 8 7 \[6] $end
|
||||
$var wire 8 8 \[7] $end
|
||||
$var wire 8 9 \[8] $end
|
||||
$var wire 8 : \[9] $end
|
||||
$var wire 8 ; \[10] $end
|
||||
$var wire 8 < \[11] $end
|
||||
$var wire 8 = \[12] $end
|
||||
$var wire 8 > \[13] $end
|
||||
$var wire 8 ? \[14] $end
|
||||
$var wire 8 @ \[15] $end
|
||||
$upscope $end
|
||||
$var wire 8 A read_index $end
|
||||
$var wire 8 B read_data $end
|
||||
$var wire 8 C write_index $end
|
||||
$var wire 8 D write_data $end
|
||||
$var wire 1 E write_en $end
|
||||
$scope struct array_wire $end
|
||||
$var wire 8 F \[0] $end
|
||||
$var wire 8 G \[1] $end
|
||||
$var wire 8 H \[2] $end
|
||||
$var wire 8 I \[3] $end
|
||||
$var wire 8 J \[4] $end
|
||||
$var wire 8 K \[5] $end
|
||||
$var wire 8 L \[6] $end
|
||||
$var wire 8 M \[7] $end
|
||||
$var wire 8 N \[8] $end
|
||||
$var wire 8 O \[9] $end
|
||||
$var wire 8 P \[10] $end
|
||||
$var wire 8 Q \[11] $end
|
||||
$var wire 8 R \[12] $end
|
||||
$var wire 8 S \[13] $end
|
||||
$var wire 8 T \[14] $end
|
||||
$var wire 8 U \[15] $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$enddefinitions $end
|
||||
$dumpvars
|
||||
b11111111 !
|
||||
b1111111 "
|
||||
b111111 #
|
||||
b11111 $
|
||||
b1111 %
|
||||
b111 &
|
||||
b11 '
|
||||
b1 (
|
||||
b0 )
|
||||
b10000000 *
|
||||
b11000000 +
|
||||
b11100000 ,
|
||||
b11110000 -
|
||||
b11111000 .
|
||||
b11111100 /
|
||||
b11111110 0
|
||||
b11111111 1
|
||||
b1111111 2
|
||||
b111111 3
|
||||
b11111 4
|
||||
b1111 5
|
||||
b111 6
|
||||
b11 7
|
||||
b1 8
|
||||
b0 9
|
||||
b10000000 :
|
||||
b11000000 ;
|
||||
b11100000 <
|
||||
b11110000 =
|
||||
b11111000 >
|
||||
b11111100 ?
|
||||
b11111110 @
|
||||
b0 A
|
||||
b11111111 B
|
||||
b0 C
|
||||
b0 D
|
||||
0E
|
||||
b11111111 F
|
||||
b1111111 G
|
||||
b111111 H
|
||||
b11111 I
|
||||
b1111 J
|
||||
b111 K
|
||||
b11 L
|
||||
b1 M
|
||||
b0 N
|
||||
b10000000 O
|
||||
b11000000 P
|
||||
b11100000 Q
|
||||
b11110000 R
|
||||
b11111000 S
|
||||
b11111100 T
|
||||
b11111110 U
|
||||
$end
|
||||
#1000000
|
||||
b1 A
|
||||
b1111111 B
|
||||
#2000000
|
||||
b10 A
|
||||
b111111 B
|
||||
#3000000
|
||||
b11 A
|
||||
b11111 B
|
||||
#4000000
|
||||
b100 A
|
||||
b1111 B
|
||||
#5000000
|
||||
b101 A
|
||||
b111 B
|
||||
#6000000
|
||||
b110 A
|
||||
b11 B
|
||||
#7000000
|
||||
b111 A
|
||||
b1 B
|
||||
#8000000
|
||||
b1000 A
|
||||
b0 B
|
||||
#9000000
|
||||
b1001 A
|
||||
b10000000 B
|
||||
#10000000
|
||||
b1010 A
|
||||
b11000000 B
|
||||
#11000000
|
||||
b1011 A
|
||||
b11100000 B
|
||||
#12000000
|
||||
b1100 A
|
||||
b11110000 B
|
||||
#13000000
|
||||
b1101 A
|
||||
b11111000 B
|
||||
#14000000
|
||||
b1110 A
|
||||
b11111100 B
|
||||
#15000000
|
||||
b1111 A
|
||||
b11111110 B
|
||||
#16000000
|
||||
b10000 A
|
||||
b0 B
|
||||
#17000000
|
||||
b0 1
|
||||
b0 A
|
||||
1E
|
||||
b0 F
|
||||
#18000000
|
||||
b11111111 1
|
||||
b1 2
|
||||
b11111111 B
|
||||
b1 C
|
||||
b1 D
|
||||
b11111111 F
|
||||
b1 G
|
||||
#19000000
|
||||
b1111111 2
|
||||
b100 3
|
||||
b10 C
|
||||
b100 D
|
||||
b1111111 G
|
||||
b100 H
|
||||
#20000000
|
||||
b111111 3
|
||||
b1001 4
|
||||
b11 C
|
||||
b1001 D
|
||||
b111111 H
|
||||
b1001 I
|
||||
#21000000
|
||||
b11111 4
|
||||
b10000 5
|
||||
b100 C
|
||||
b10000 D
|
||||
b11111 I
|
||||
b10000 J
|
||||
#22000000
|
||||
b1111 5
|
||||
b11001 6
|
||||
b101 C
|
||||
b11001 D
|
||||
b1111 J
|
||||
b11001 K
|
||||
#23000000
|
||||
b111 6
|
||||
b100100 7
|
||||
b110 C
|
||||
b100100 D
|
||||
b111 K
|
||||
b100100 L
|
||||
#24000000
|
||||
b11 7
|
||||
b110001 8
|
||||
b111 C
|
||||
b110001 D
|
||||
b11 L
|
||||
b110001 M
|
||||
#25000000
|
||||
b1 8
|
||||
b1000000 9
|
||||
b1000 C
|
||||
b1000000 D
|
||||
b1 M
|
||||
b1000000 N
|
||||
#26000000
|
||||
b0 9
|
||||
b1010001 :
|
||||
b1001 C
|
||||
b1010001 D
|
||||
b0 N
|
||||
b1010001 O
|
||||
#27000000
|
||||
b10000000 :
|
||||
b1100100 ;
|
||||
b1010 C
|
||||
b1100100 D
|
||||
b10000000 O
|
||||
b1100100 P
|
||||
#28000000
|
||||
b11000000 ;
|
||||
b1111001 <
|
||||
b1011 C
|
||||
b1111001 D
|
||||
b11000000 P
|
||||
b1111001 Q
|
||||
#29000000
|
||||
b11100000 <
|
||||
b10010000 =
|
||||
b1100 C
|
||||
b10010000 D
|
||||
b11100000 Q
|
||||
b10010000 R
|
||||
#30000000
|
||||
b11110000 =
|
||||
b10101001 >
|
||||
b1101 C
|
||||
b10101001 D
|
||||
b11110000 R
|
||||
b10101001 S
|
||||
#31000000
|
||||
b11111000 >
|
||||
b11000100 ?
|
||||
b1110 C
|
||||
b11000100 D
|
||||
b11111000 S
|
||||
b11000100 T
|
||||
#32000000
|
||||
b11111100 ?
|
||||
b11100001 @
|
||||
b1111 C
|
||||
b11100001 D
|
||||
b11111100 T
|
||||
b11100001 U
|
||||
#33000000
|
||||
b11111110 @
|
||||
b10000 C
|
||||
b0 D
|
||||
b11111110 U
|
||||
#34000000
|
|
@ -1,189 +0,0 @@
|
|||
Simulation {
|
||||
state: State {
|
||||
insns: Insns {
|
||||
state_layout: StateLayout {
|
||||
ty: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 4,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(conditional_assignment_last: conditional_assignment_last).conditional_assignment_last::i",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(conditional_assignment_last: conditional_assignment_last).conditional_assignment_last::w",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
memories: StatePartLayout<Memories> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
insns: [
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
0: Const {
|
||||
dest: StatePartIndex<BigSlots>(3), // (0x0) SlotDebugData { name: "", ty: Bool },
|
||||
value: 0x0,
|
||||
},
|
||||
1: Const {
|
||||
dest: StatePartIndex<BigSlots>(2), // (0x1) SlotDebugData { name: "", ty: Bool },
|
||||
value: 0x1,
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:4:1
|
||||
2: Copy {
|
||||
dest: StatePartIndex<BigSlots>(1), // (0x0) SlotDebugData { name: "InstantiatedModule(conditional_assignment_last: conditional_assignment_last).conditional_assignment_last::w", ty: Bool },
|
||||
src: StatePartIndex<BigSlots>(2), // (0x1) SlotDebugData { name: "", ty: Bool },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:5:1
|
||||
3: BranchIfZero {
|
||||
target: 5,
|
||||
value: StatePartIndex<BigSlots>(0), // (0x1) SlotDebugData { name: "InstantiatedModule(conditional_assignment_last: conditional_assignment_last).conditional_assignment_last::i", ty: Bool },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:6:1
|
||||
4: Copy {
|
||||
dest: StatePartIndex<BigSlots>(1), // (0x0) SlotDebugData { name: "InstantiatedModule(conditional_assignment_last: conditional_assignment_last).conditional_assignment_last::w", ty: Bool },
|
||||
src: StatePartIndex<BigSlots>(3), // (0x0) SlotDebugData { name: "", ty: Bool },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
5: Return,
|
||||
],
|
||||
..
|
||||
},
|
||||
pc: 5,
|
||||
memory_write_log: [],
|
||||
memories: StatePart {
|
||||
value: [],
|
||||
},
|
||||
small_slots: StatePart {
|
||||
value: [],
|
||||
},
|
||||
big_slots: StatePart {
|
||||
value: [
|
||||
1,
|
||||
0,
|
||||
1,
|
||||
0,
|
||||
],
|
||||
},
|
||||
},
|
||||
io: Instance {
|
||||
name: <simulator>::conditional_assignment_last,
|
||||
instantiated: Module {
|
||||
name: conditional_assignment_last,
|
||||
..
|
||||
},
|
||||
},
|
||||
uninitialized_inputs: {},
|
||||
io_targets: {
|
||||
Instance {
|
||||
name: <simulator>::conditional_assignment_last,
|
||||
instantiated: Module {
|
||||
name: conditional_assignment_last,
|
||||
..
|
||||
},
|
||||
}.i: CompiledValue {
|
||||
layout: CompiledTypeLayout {
|
||||
ty: Bool,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(conditional_assignment_last: conditional_assignment_last).conditional_assignment_last::i",
|
||||
ty: Bool,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Scalar,
|
||||
},
|
||||
range: TypeIndexRange {
|
||||
small_slots: StatePartIndexRange<SmallSlots> { start: 0, len: 0 },
|
||||
big_slots: StatePartIndexRange<BigSlots> { start: 0, len: 1 },
|
||||
},
|
||||
write: None,
|
||||
},
|
||||
},
|
||||
made_initial_step: true,
|
||||
needs_settle: false,
|
||||
trace_decls: TraceModule {
|
||||
name: "conditional_assignment_last",
|
||||
children: [
|
||||
TraceModuleIO {
|
||||
name: "i",
|
||||
child: TraceBool {
|
||||
location: TraceScalarId(0),
|
||||
name: "i",
|
||||
flow: Source,
|
||||
},
|
||||
ty: Bool,
|
||||
flow: Source,
|
||||
},
|
||||
TraceWire {
|
||||
name: "w",
|
||||
child: TraceBool {
|
||||
location: TraceScalarId(1),
|
||||
name: "w",
|
||||
flow: Duplex,
|
||||
},
|
||||
ty: Bool,
|
||||
},
|
||||
],
|
||||
},
|
||||
traces: [
|
||||
SimTrace {
|
||||
id: TraceScalarId(0),
|
||||
kind: BigBool {
|
||||
index: StatePartIndex<BigSlots>(0),
|
||||
},
|
||||
state: 0x1,
|
||||
last_state: 0x0,
|
||||
},
|
||||
SimTrace {
|
||||
id: TraceScalarId(1),
|
||||
kind: BigBool {
|
||||
index: StatePartIndex<BigSlots>(1),
|
||||
},
|
||||
state: 0x0,
|
||||
last_state: 0x1,
|
||||
},
|
||||
],
|
||||
trace_memories: {},
|
||||
trace_writers: [
|
||||
Running(
|
||||
VcdWriter {
|
||||
finished_init: true,
|
||||
timescale: 1 ps,
|
||||
..
|
||||
},
|
||||
),
|
||||
],
|
||||
instant: 2 μs,
|
||||
clocks_triggered: [],
|
||||
..
|
||||
}
|
|
@ -1,14 +0,0 @@
|
|||
$timescale 1 ps $end
|
||||
$scope module conditional_assignment_last $end
|
||||
$var wire 1 ! i $end
|
||||
$var wire 1 " w $end
|
||||
$upscope $end
|
||||
$enddefinitions $end
|
||||
$dumpvars
|
||||
0!
|
||||
1"
|
||||
$end
|
||||
#1000000
|
||||
1!
|
||||
0"
|
||||
#2000000
|
|
@ -1,142 +0,0 @@
|
|||
Simulation {
|
||||
state: State {
|
||||
insns: Insns {
|
||||
state_layout: StateLayout {
|
||||
ty: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 2,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(connect_const: connect_const).connect_const::o",
|
||||
ty: UInt<8>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: UInt<8>,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
memories: StatePartLayout<Memories> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
insns: [
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
0: Const {
|
||||
dest: StatePartIndex<BigSlots>(1), // (0x5) SlotDebugData { name: "", ty: UInt<8> },
|
||||
value: 0x5,
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:3:1
|
||||
1: Copy {
|
||||
dest: StatePartIndex<BigSlots>(0), // (0x5) SlotDebugData { name: "InstantiatedModule(connect_const: connect_const).connect_const::o", ty: UInt<8> },
|
||||
src: StatePartIndex<BigSlots>(1), // (0x5) SlotDebugData { name: "", ty: UInt<8> },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
2: Return,
|
||||
],
|
||||
..
|
||||
},
|
||||
pc: 2,
|
||||
memory_write_log: [],
|
||||
memories: StatePart {
|
||||
value: [],
|
||||
},
|
||||
small_slots: StatePart {
|
||||
value: [],
|
||||
},
|
||||
big_slots: StatePart {
|
||||
value: [
|
||||
5,
|
||||
5,
|
||||
],
|
||||
},
|
||||
},
|
||||
io: Instance {
|
||||
name: <simulator>::connect_const,
|
||||
instantiated: Module {
|
||||
name: connect_const,
|
||||
..
|
||||
},
|
||||
},
|
||||
uninitialized_inputs: {},
|
||||
io_targets: {
|
||||
Instance {
|
||||
name: <simulator>::connect_const,
|
||||
instantiated: Module {
|
||||
name: connect_const,
|
||||
..
|
||||
},
|
||||
}.o: CompiledValue {
|
||||
layout: CompiledTypeLayout {
|
||||
ty: UInt<8>,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(connect_const: connect_const).connect_const::o",
|
||||
ty: UInt<8>,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Scalar,
|
||||
},
|
||||
range: TypeIndexRange {
|
||||
small_slots: StatePartIndexRange<SmallSlots> { start: 0, len: 0 },
|
||||
big_slots: StatePartIndexRange<BigSlots> { start: 0, len: 1 },
|
||||
},
|
||||
write: None,
|
||||
},
|
||||
},
|
||||
made_initial_step: true,
|
||||
needs_settle: false,
|
||||
trace_decls: TraceModule {
|
||||
name: "connect_const",
|
||||
children: [
|
||||
TraceModuleIO {
|
||||
name: "o",
|
||||
child: TraceUInt {
|
||||
location: TraceScalarId(0),
|
||||
name: "o",
|
||||
ty: UInt<8>,
|
||||
flow: Sink,
|
||||
},
|
||||
ty: UInt<8>,
|
||||
flow: Sink,
|
||||
},
|
||||
],
|
||||
},
|
||||
traces: [
|
||||
SimTrace {
|
||||
id: TraceScalarId(0),
|
||||
kind: BigUInt {
|
||||
index: StatePartIndex<BigSlots>(0),
|
||||
ty: UInt<8>,
|
||||
},
|
||||
state: 0x05,
|
||||
last_state: 0x05,
|
||||
},
|
||||
],
|
||||
trace_memories: {},
|
||||
trace_writers: [],
|
||||
instant: 0 s,
|
||||
clocks_triggered: [],
|
||||
..
|
||||
}
|
|
@ -1,229 +0,0 @@
|
|||
Simulation {
|
||||
state: State {
|
||||
insns: Insns {
|
||||
state_layout: StateLayout {
|
||||
ty: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 5,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(connect_const_reset: connect_const_reset).connect_const_reset::reset_out",
|
||||
ty: AsyncReset,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(connect_const_reset: connect_const_reset).connect_const_reset::bit_out",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: AsyncReset,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
memories: StatePartLayout<Memories> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
insns: [
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
0: Const {
|
||||
dest: StatePartIndex<BigSlots>(2), // (0x1) SlotDebugData { name: "", ty: Bool },
|
||||
value: 0x1,
|
||||
},
|
||||
1: Copy {
|
||||
dest: StatePartIndex<BigSlots>(3), // (0x1) SlotDebugData { name: "", ty: AsyncReset },
|
||||
src: StatePartIndex<BigSlots>(2), // (0x1) SlotDebugData { name: "", ty: Bool },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:4:1
|
||||
2: Copy {
|
||||
dest: StatePartIndex<BigSlots>(0), // (0x1) SlotDebugData { name: "InstantiatedModule(connect_const_reset: connect_const_reset).connect_const_reset::reset_out", ty: AsyncReset },
|
||||
src: StatePartIndex<BigSlots>(3), // (0x1) SlotDebugData { name: "", ty: AsyncReset },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
3: Copy {
|
||||
dest: StatePartIndex<BigSlots>(4), // (0x1) SlotDebugData { name: "", ty: Bool },
|
||||
src: StatePartIndex<BigSlots>(0), // (0x1) SlotDebugData { name: "InstantiatedModule(connect_const_reset: connect_const_reset).connect_const_reset::reset_out", ty: AsyncReset },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:5:1
|
||||
4: Copy {
|
||||
dest: StatePartIndex<BigSlots>(1), // (0x1) SlotDebugData { name: "InstantiatedModule(connect_const_reset: connect_const_reset).connect_const_reset::bit_out", ty: Bool },
|
||||
src: StatePartIndex<BigSlots>(4), // (0x1) SlotDebugData { name: "", ty: Bool },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
5: Return,
|
||||
],
|
||||
..
|
||||
},
|
||||
pc: 5,
|
||||
memory_write_log: [],
|
||||
memories: StatePart {
|
||||
value: [],
|
||||
},
|
||||
small_slots: StatePart {
|
||||
value: [],
|
||||
},
|
||||
big_slots: StatePart {
|
||||
value: [
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
],
|
||||
},
|
||||
},
|
||||
io: Instance {
|
||||
name: <simulator>::connect_const_reset,
|
||||
instantiated: Module {
|
||||
name: connect_const_reset,
|
||||
..
|
||||
},
|
||||
},
|
||||
uninitialized_inputs: {},
|
||||
io_targets: {
|
||||
Instance {
|
||||
name: <simulator>::connect_const_reset,
|
||||
instantiated: Module {
|
||||
name: connect_const_reset,
|
||||
..
|
||||
},
|
||||
}.bit_out: CompiledValue {
|
||||
layout: CompiledTypeLayout {
|
||||
ty: Bool,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(connect_const_reset: connect_const_reset).connect_const_reset::bit_out",
|
||||
ty: Bool,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Scalar,
|
||||
},
|
||||
range: TypeIndexRange {
|
||||
small_slots: StatePartIndexRange<SmallSlots> { start: 0, len: 0 },
|
||||
big_slots: StatePartIndexRange<BigSlots> { start: 1, len: 1 },
|
||||
},
|
||||
write: None,
|
||||
},
|
||||
Instance {
|
||||
name: <simulator>::connect_const_reset,
|
||||
instantiated: Module {
|
||||
name: connect_const_reset,
|
||||
..
|
||||
},
|
||||
}.reset_out: CompiledValue {
|
||||
layout: CompiledTypeLayout {
|
||||
ty: AsyncReset,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(connect_const_reset: connect_const_reset).connect_const_reset::reset_out",
|
||||
ty: AsyncReset,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Scalar,
|
||||
},
|
||||
range: TypeIndexRange {
|
||||
small_slots: StatePartIndexRange<SmallSlots> { start: 0, len: 0 },
|
||||
big_slots: StatePartIndexRange<BigSlots> { start: 0, len: 1 },
|
||||
},
|
||||
write: None,
|
||||
},
|
||||
},
|
||||
made_initial_step: true,
|
||||
needs_settle: false,
|
||||
trace_decls: TraceModule {
|
||||
name: "connect_const_reset",
|
||||
children: [
|
||||
TraceModuleIO {
|
||||
name: "reset_out",
|
||||
child: TraceAsyncReset {
|
||||
location: TraceScalarId(0),
|
||||
name: "reset_out",
|
||||
flow: Sink,
|
||||
},
|
||||
ty: AsyncReset,
|
||||
flow: Sink,
|
||||
},
|
||||
TraceModuleIO {
|
||||
name: "bit_out",
|
||||
child: TraceBool {
|
||||
location: TraceScalarId(1),
|
||||
name: "bit_out",
|
||||
flow: Sink,
|
||||
},
|
||||
ty: Bool,
|
||||
flow: Sink,
|
||||
},
|
||||
],
|
||||
},
|
||||
traces: [
|
||||
SimTrace {
|
||||
id: TraceScalarId(0),
|
||||
kind: BigAsyncReset {
|
||||
index: StatePartIndex<BigSlots>(0),
|
||||
},
|
||||
state: 0x1,
|
||||
last_state: 0x1,
|
||||
},
|
||||
SimTrace {
|
||||
id: TraceScalarId(1),
|
||||
kind: BigBool {
|
||||
index: StatePartIndex<BigSlots>(1),
|
||||
},
|
||||
state: 0x1,
|
||||
last_state: 0x1,
|
||||
},
|
||||
],
|
||||
trace_memories: {},
|
||||
trace_writers: [
|
||||
Running(
|
||||
VcdWriter {
|
||||
finished_init: true,
|
||||
timescale: 1 ps,
|
||||
..
|
||||
},
|
||||
),
|
||||
],
|
||||
instant: 1 μs,
|
||||
clocks_triggered: [],
|
||||
..
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
$timescale 1 ps $end
|
||||
$scope module connect_const_reset $end
|
||||
$var wire 1 ! reset_out $end
|
||||
$var wire 1 " bit_out $end
|
||||
$upscope $end
|
||||
$enddefinitions $end
|
||||
$dumpvars
|
||||
1!
|
||||
1"
|
||||
$end
|
||||
#1000000
|
|
@ -1,522 +0,0 @@
|
|||
Simulation {
|
||||
state: State {
|
||||
insns: Insns {
|
||||
state_layout: StateLayout {
|
||||
ty: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 4,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 10,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::cd.clk",
|
||||
ty: Clock,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::cd.rst",
|
||||
ty: AsyncReset,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::count",
|
||||
ty: UInt<4>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::count_reg",
|
||||
ty: UInt<4>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::count_reg$next",
|
||||
ty: UInt<4>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: UInt<4>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: UInt<1>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: UInt<5>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: UInt<4>,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
memories: StatePartLayout<Memories> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
insns: [
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
0: Const {
|
||||
dest: StatePartIndex<BigSlots>(7), // (0x1) SlotDebugData { name: "", ty: UInt<1> },
|
||||
value: 0x1,
|
||||
},
|
||||
1: Copy {
|
||||
dest: StatePartIndex<BigSlots>(6), // (0x0) SlotDebugData { name: "", ty: Bool },
|
||||
src: StatePartIndex<BigSlots>(1), // (0x0) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::cd.rst", ty: AsyncReset },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:3:1
|
||||
2: IsNonZeroDestIsSmall {
|
||||
dest: StatePartIndex<SmallSlots>(3), // (0x0 0) SlotDebugData { name: "", ty: Bool },
|
||||
src: StatePartIndex<BigSlots>(1), // (0x0) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::cd.rst", ty: AsyncReset },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
3: Const {
|
||||
dest: StatePartIndex<BigSlots>(5), // (0x3) SlotDebugData { name: "", ty: UInt<4> },
|
||||
value: 0x3,
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:3:1
|
||||
4: BranchIfZero {
|
||||
target: 6,
|
||||
value: StatePartIndex<BigSlots>(6), // (0x0) SlotDebugData { name: "", ty: Bool },
|
||||
},
|
||||
5: Copy {
|
||||
dest: StatePartIndex<BigSlots>(3), // (0x3) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count_reg", ty: UInt<4> },
|
||||
src: StatePartIndex<BigSlots>(5), // (0x3) SlotDebugData { name: "", ty: UInt<4> },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
6: Add {
|
||||
dest: StatePartIndex<BigSlots>(8), // (0x4) SlotDebugData { name: "", ty: UInt<5> },
|
||||
lhs: StatePartIndex<BigSlots>(3), // (0x3) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count_reg", ty: UInt<4> },
|
||||
rhs: StatePartIndex<BigSlots>(7), // (0x1) SlotDebugData { name: "", ty: UInt<1> },
|
||||
},
|
||||
7: CastToUInt {
|
||||
dest: StatePartIndex<BigSlots>(9), // (0x4) SlotDebugData { name: "", ty: UInt<4> },
|
||||
src: StatePartIndex<BigSlots>(8), // (0x4) SlotDebugData { name: "", ty: UInt<5> },
|
||||
dest_width: 4,
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:4:1
|
||||
8: Copy {
|
||||
dest: StatePartIndex<BigSlots>(4), // (0x4) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count_reg$next", ty: UInt<4> },
|
||||
src: StatePartIndex<BigSlots>(9), // (0x4) SlotDebugData { name: "", ty: UInt<4> },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:6:1
|
||||
9: Copy {
|
||||
dest: StatePartIndex<BigSlots>(2), // (0x3) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count", ty: UInt<4> },
|
||||
src: StatePartIndex<BigSlots>(3), // (0x3) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count_reg", ty: UInt<4> },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:3:1
|
||||
10: IsNonZeroDestIsSmall {
|
||||
dest: StatePartIndex<SmallSlots>(2), // (0x1 1) SlotDebugData { name: "", ty: Bool },
|
||||
src: StatePartIndex<BigSlots>(0), // (0x1) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::cd.clk", ty: Clock },
|
||||
},
|
||||
11: AndSmall {
|
||||
dest: StatePartIndex<SmallSlots>(1), // (0x0 0) SlotDebugData { name: "", ty: Bool },
|
||||
lhs: StatePartIndex<SmallSlots>(2), // (0x1 1) SlotDebugData { name: "", ty: Bool },
|
||||
rhs: StatePartIndex<SmallSlots>(0), // (0x0 0) SlotDebugData { name: "", ty: Bool },
|
||||
},
|
||||
12: BranchIfSmallNonZero {
|
||||
target: 16,
|
||||
value: StatePartIndex<SmallSlots>(3), // (0x0 0) SlotDebugData { name: "", ty: Bool },
|
||||
},
|
||||
13: BranchIfSmallZero {
|
||||
target: 17,
|
||||
value: StatePartIndex<SmallSlots>(1), // (0x0 0) SlotDebugData { name: "", ty: Bool },
|
||||
},
|
||||
14: Copy {
|
||||
dest: StatePartIndex<BigSlots>(3), // (0x3) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count_reg", ty: UInt<4> },
|
||||
src: StatePartIndex<BigSlots>(4), // (0x4) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count_reg$next", ty: UInt<4> },
|
||||
},
|
||||
15: Branch {
|
||||
target: 17,
|
||||
},
|
||||
16: Copy {
|
||||
dest: StatePartIndex<BigSlots>(3), // (0x3) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count_reg", ty: UInt<4> },
|
||||
src: StatePartIndex<BigSlots>(5), // (0x3) SlotDebugData { name: "", ty: UInt<4> },
|
||||
},
|
||||
17: XorSmallImmediate {
|
||||
dest: StatePartIndex<SmallSlots>(0), // (0x0 0) SlotDebugData { name: "", ty: Bool },
|
||||
lhs: StatePartIndex<SmallSlots>(2), // (0x1 1) SlotDebugData { name: "", ty: Bool },
|
||||
rhs: 0x1,
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
18: Return,
|
||||
],
|
||||
..
|
||||
},
|
||||
pc: 18,
|
||||
memory_write_log: [],
|
||||
memories: StatePart {
|
||||
value: [],
|
||||
},
|
||||
small_slots: StatePart {
|
||||
value: [
|
||||
0,
|
||||
0,
|
||||
1,
|
||||
0,
|
||||
],
|
||||
},
|
||||
big_slots: StatePart {
|
||||
value: [
|
||||
1,
|
||||
0,
|
||||
3,
|
||||
3,
|
||||
4,
|
||||
3,
|
||||
0,
|
||||
1,
|
||||
4,
|
||||
4,
|
||||
],
|
||||
},
|
||||
},
|
||||
io: Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
},
|
||||
uninitialized_inputs: {},
|
||||
io_targets: {
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.cd: CompiledValue {
|
||||
layout: CompiledTypeLayout {
|
||||
ty: Bundle {
|
||||
/* offset = 0 */
|
||||
clk: Clock,
|
||||
/* offset = 1 */
|
||||
rst: AsyncReset,
|
||||
},
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 2,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::cd.clk",
|
||||
ty: Clock,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::cd.rst",
|
||||
ty: AsyncReset,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Bundle {
|
||||
fields: [
|
||||
CompiledBundleField {
|
||||
offset: TypeIndex {
|
||||
small_slots: StatePartIndex<SmallSlots>(0),
|
||||
big_slots: StatePartIndex<BigSlots>(0),
|
||||
},
|
||||
ty: CompiledTypeLayout {
|
||||
ty: Clock,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Clock,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Scalar,
|
||||
},
|
||||
},
|
||||
CompiledBundleField {
|
||||
offset: TypeIndex {
|
||||
small_slots: StatePartIndex<SmallSlots>(0),
|
||||
big_slots: StatePartIndex<BigSlots>(1),
|
||||
},
|
||||
ty: CompiledTypeLayout {
|
||||
ty: AsyncReset,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: AsyncReset,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Scalar,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
range: TypeIndexRange {
|
||||
small_slots: StatePartIndexRange<SmallSlots> { start: 0, len: 0 },
|
||||
big_slots: StatePartIndexRange<BigSlots> { start: 0, len: 2 },
|
||||
},
|
||||
write: None,
|
||||
},
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.cd.clk: CompiledValue {
|
||||
layout: CompiledTypeLayout {
|
||||
ty: Clock,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Clock,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Scalar,
|
||||
},
|
||||
range: TypeIndexRange {
|
||||
small_slots: StatePartIndexRange<SmallSlots> { start: 0, len: 0 },
|
||||
big_slots: StatePartIndexRange<BigSlots> { start: 0, len: 1 },
|
||||
},
|
||||
write: None,
|
||||
},
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.cd.rst: CompiledValue {
|
||||
layout: CompiledTypeLayout {
|
||||
ty: AsyncReset,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: AsyncReset,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Scalar,
|
||||
},
|
||||
range: TypeIndexRange {
|
||||
small_slots: StatePartIndexRange<SmallSlots> { start: 0, len: 0 },
|
||||
big_slots: StatePartIndexRange<BigSlots> { start: 1, len: 1 },
|
||||
},
|
||||
write: None,
|
||||
},
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.count: CompiledValue {
|
||||
layout: CompiledTypeLayout {
|
||||
ty: UInt<4>,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::count",
|
||||
ty: UInt<4>,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Scalar,
|
||||
},
|
||||
range: TypeIndexRange {
|
||||
small_slots: StatePartIndexRange<SmallSlots> { start: 0, len: 0 },
|
||||
big_slots: StatePartIndexRange<BigSlots> { start: 2, len: 1 },
|
||||
},
|
||||
write: None,
|
||||
},
|
||||
},
|
||||
made_initial_step: true,
|
||||
needs_settle: false,
|
||||
trace_decls: TraceModule {
|
||||
name: "counter",
|
||||
children: [
|
||||
TraceModuleIO {
|
||||
name: "cd",
|
||||
child: TraceBundle {
|
||||
name: "cd",
|
||||
fields: [
|
||||
TraceClock {
|
||||
location: TraceScalarId(0),
|
||||
name: "clk",
|
||||
flow: Source,
|
||||
},
|
||||
TraceAsyncReset {
|
||||
location: TraceScalarId(1),
|
||||
name: "rst",
|
||||
flow: Source,
|
||||
},
|
||||
],
|
||||
ty: Bundle {
|
||||
/* offset = 0 */
|
||||
clk: Clock,
|
||||
/* offset = 1 */
|
||||
rst: AsyncReset,
|
||||
},
|
||||
flow: Source,
|
||||
},
|
||||
ty: Bundle {
|
||||
/* offset = 0 */
|
||||
clk: Clock,
|
||||
/* offset = 1 */
|
||||
rst: AsyncReset,
|
||||
},
|
||||
flow: Source,
|
||||
},
|
||||
TraceModuleIO {
|
||||
name: "count",
|
||||
child: TraceUInt {
|
||||
location: TraceScalarId(2),
|
||||
name: "count",
|
||||
ty: UInt<4>,
|
||||
flow: Sink,
|
||||
},
|
||||
ty: UInt<4>,
|
||||
flow: Sink,
|
||||
},
|
||||
TraceReg {
|
||||
name: "count_reg",
|
||||
child: TraceUInt {
|
||||
location: TraceScalarId(3),
|
||||
name: "count_reg",
|
||||
ty: UInt<4>,
|
||||
flow: Duplex,
|
||||
},
|
||||
ty: UInt<4>,
|
||||
},
|
||||
],
|
||||
},
|
||||
traces: [
|
||||
SimTrace {
|
||||
id: TraceScalarId(0),
|
||||
kind: BigClock {
|
||||
index: StatePartIndex<BigSlots>(0),
|
||||
},
|
||||
state: 0x1,
|
||||
last_state: 0x1,
|
||||
},
|
||||
SimTrace {
|
||||
id: TraceScalarId(1),
|
||||
kind: BigAsyncReset {
|
||||
index: StatePartIndex<BigSlots>(1),
|
||||
},
|
||||
state: 0x0,
|
||||
last_state: 0x0,
|
||||
},
|
||||
SimTrace {
|
||||
id: TraceScalarId(2),
|
||||
kind: BigUInt {
|
||||
index: StatePartIndex<BigSlots>(2),
|
||||
ty: UInt<4>,
|
||||
},
|
||||
state: 0x3,
|
||||
last_state: 0x2,
|
||||
},
|
||||
SimTrace {
|
||||
id: TraceScalarId(3),
|
||||
kind: BigUInt {
|
||||
index: StatePartIndex<BigSlots>(3),
|
||||
ty: UInt<4>,
|
||||
},
|
||||
state: 0x3,
|
||||
last_state: 0x3,
|
||||
},
|
||||
],
|
||||
trace_memories: {},
|
||||
trace_writers: [
|
||||
Running(
|
||||
VcdWriter {
|
||||
finished_init: true,
|
||||
timescale: 1 ps,
|
||||
..
|
||||
},
|
||||
),
|
||||
],
|
||||
instant: 66 μs,
|
||||
clocks_triggered: [
|
||||
StatePartIndex<SmallSlots>(1),
|
||||
],
|
||||
..
|
||||
}
|
|
@ -1,217 +0,0 @@
|
|||
$timescale 1 ps $end
|
||||
$scope module counter $end
|
||||
$scope struct cd $end
|
||||
$var wire 1 ! clk $end
|
||||
$var wire 1 " rst $end
|
||||
$upscope $end
|
||||
$var wire 4 # count $end
|
||||
$var reg 4 $ count_reg $end
|
||||
$upscope $end
|
||||
$enddefinitions $end
|
||||
$dumpvars
|
||||
0!
|
||||
0"
|
||||
b0 #
|
||||
b0 $
|
||||
$end
|
||||
#500000
|
||||
1"
|
||||
b11 #
|
||||
b11 $
|
||||
#1000000
|
||||
1!
|
||||
#1500000
|
||||
0"
|
||||
#2000000
|
||||
0!
|
||||
#3000000
|
||||
1!
|
||||
b100 $
|
||||
b100 #
|
||||
#4000000
|
||||
0!
|
||||
#5000000
|
||||
1!
|
||||
b101 $
|
||||
b101 #
|
||||
#6000000
|
||||
0!
|
||||
#7000000
|
||||
1!
|
||||
b110 $
|
||||
b110 #
|
||||
#8000000
|
||||
0!
|
||||
#9000000
|
||||
1!
|
||||
b111 $
|
||||
b111 #
|
||||
#10000000
|
||||
0!
|
||||
#11000000
|
||||
1!
|
||||
b1000 $
|
||||
b1000 #
|
||||
#12000000
|
||||
0!
|
||||
#13000000
|
||||
1!
|
||||
b1001 $
|
||||
b1001 #
|
||||
#14000000
|
||||
0!
|
||||
#15000000
|
||||
1!
|
||||
b1010 $
|
||||
b1010 #
|
||||
#16000000
|
||||
0!
|
||||
#17000000
|
||||
1!
|
||||
b1011 $
|
||||
b1011 #
|
||||
#18000000
|
||||
0!
|
||||
#19000000
|
||||
1!
|
||||
b1100 $
|
||||
b1100 #
|
||||
#20000000
|
||||
0!
|
||||
#21000000
|
||||
1!
|
||||
b1101 $
|
||||
b1101 #
|
||||
#22000000
|
||||
0!
|
||||
#23000000
|
||||
1!
|
||||
b1110 $
|
||||
b1110 #
|
||||
#24000000
|
||||
0!
|
||||
#25000000
|
||||
1!
|
||||
b1111 $
|
||||
b1111 #
|
||||
#26000000
|
||||
0!
|
||||
#27000000
|
||||
1!
|
||||
b0 $
|
||||
b0 #
|
||||
#28000000
|
||||
0!
|
||||
#29000000
|
||||
1!
|
||||
b1 $
|
||||
b1 #
|
||||
#30000000
|
||||
0!
|
||||
#31000000
|
||||
1!
|
||||
b10 $
|
||||
b10 #
|
||||
#32000000
|
||||
0!
|
||||
#33000000
|
||||
1!
|
||||
b11 $
|
||||
b11 #
|
||||
#34000000
|
||||
0!
|
||||
#35000000
|
||||
1!
|
||||
b100 $
|
||||
b100 #
|
||||
#36000000
|
||||
0!
|
||||
#37000000
|
||||
1!
|
||||
b101 $
|
||||
b101 #
|
||||
#38000000
|
||||
0!
|
||||
#39000000
|
||||
1!
|
||||
b110 $
|
||||
b110 #
|
||||
#40000000
|
||||
0!
|
||||
#41000000
|
||||
1!
|
||||
b111 $
|
||||
b111 #
|
||||
#42000000
|
||||
0!
|
||||
#43000000
|
||||
1!
|
||||
b1000 $
|
||||
b1000 #
|
||||
#44000000
|
||||
0!
|
||||
#45000000
|
||||
1!
|
||||
b1001 $
|
||||
b1001 #
|
||||
#46000000
|
||||
0!
|
||||
#47000000
|
||||
1!
|
||||
b1010 $
|
||||
b1010 #
|
||||
#48000000
|
||||
0!
|
||||
#49000000
|
||||
1!
|
||||
b1011 $
|
||||
b1011 #
|
||||
#50000000
|
||||
0!
|
||||
#51000000
|
||||
1!
|
||||
b1100 $
|
||||
b1100 #
|
||||
#52000000
|
||||
0!
|
||||
#53000000
|
||||
1!
|
||||
b1101 $
|
||||
b1101 #
|
||||
#54000000
|
||||
0!
|
||||
#55000000
|
||||
1!
|
||||
b1110 $
|
||||
b1110 #
|
||||
#56000000
|
||||
0!
|
||||
#57000000
|
||||
1!
|
||||
b1111 $
|
||||
b1111 #
|
||||
#58000000
|
||||
0!
|
||||
#59000000
|
||||
1!
|
||||
b0 $
|
||||
b0 #
|
||||
#60000000
|
||||
0!
|
||||
#61000000
|
||||
1!
|
||||
b1 $
|
||||
b1 #
|
||||
#62000000
|
||||
0!
|
||||
#63000000
|
||||
1!
|
||||
b10 $
|
||||
b10 #
|
||||
#64000000
|
||||
0!
|
||||
#65000000
|
||||
1!
|
||||
b11 $
|
||||
b11 #
|
||||
#66000000
|
|
@ -1,503 +0,0 @@
|
|||
Simulation {
|
||||
state: State {
|
||||
insns: Insns {
|
||||
state_layout: StateLayout {
|
||||
ty: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 4,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 9,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::cd.clk",
|
||||
ty: Clock,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::cd.rst",
|
||||
ty: SyncReset,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::count",
|
||||
ty: UInt<4>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::count_reg",
|
||||
ty: UInt<4>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::count_reg$next",
|
||||
ty: UInt<4>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: UInt<4>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: UInt<1>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: UInt<5>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: UInt<4>,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
memories: StatePartLayout<Memories> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
insns: [
|
||||
// at: module-XXXXXXXXXX.rs:6:1
|
||||
0: Copy {
|
||||
dest: StatePartIndex<BigSlots>(2), // (0x3) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count", ty: UInt<4> },
|
||||
src: StatePartIndex<BigSlots>(3), // (0x3) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count_reg", ty: UInt<4> },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
1: Const {
|
||||
dest: StatePartIndex<BigSlots>(6), // (0x1) SlotDebugData { name: "", ty: UInt<1> },
|
||||
value: 0x1,
|
||||
},
|
||||
2: Add {
|
||||
dest: StatePartIndex<BigSlots>(7), // (0x4) SlotDebugData { name: "", ty: UInt<5> },
|
||||
lhs: StatePartIndex<BigSlots>(3), // (0x3) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count_reg", ty: UInt<4> },
|
||||
rhs: StatePartIndex<BigSlots>(6), // (0x1) SlotDebugData { name: "", ty: UInt<1> },
|
||||
},
|
||||
3: CastToUInt {
|
||||
dest: StatePartIndex<BigSlots>(8), // (0x4) SlotDebugData { name: "", ty: UInt<4> },
|
||||
src: StatePartIndex<BigSlots>(7), // (0x4) SlotDebugData { name: "", ty: UInt<5> },
|
||||
dest_width: 4,
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:4:1
|
||||
4: Copy {
|
||||
dest: StatePartIndex<BigSlots>(4), // (0x4) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count_reg$next", ty: UInt<4> },
|
||||
src: StatePartIndex<BigSlots>(8), // (0x4) SlotDebugData { name: "", ty: UInt<4> },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:3:1
|
||||
5: IsNonZeroDestIsSmall {
|
||||
dest: StatePartIndex<SmallSlots>(3), // (0x0 0) SlotDebugData { name: "", ty: Bool },
|
||||
src: StatePartIndex<BigSlots>(1), // (0x0) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::cd.rst", ty: SyncReset },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
6: Const {
|
||||
dest: StatePartIndex<BigSlots>(5), // (0x3) SlotDebugData { name: "", ty: UInt<4> },
|
||||
value: 0x3,
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:3:1
|
||||
7: IsNonZeroDestIsSmall {
|
||||
dest: StatePartIndex<SmallSlots>(2), // (0x1 1) SlotDebugData { name: "", ty: Bool },
|
||||
src: StatePartIndex<BigSlots>(0), // (0x1) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::cd.clk", ty: Clock },
|
||||
},
|
||||
8: AndSmall {
|
||||
dest: StatePartIndex<SmallSlots>(1), // (0x0 0) SlotDebugData { name: "", ty: Bool },
|
||||
lhs: StatePartIndex<SmallSlots>(2), // (0x1 1) SlotDebugData { name: "", ty: Bool },
|
||||
rhs: StatePartIndex<SmallSlots>(0), // (0x0 0) SlotDebugData { name: "", ty: Bool },
|
||||
},
|
||||
9: BranchIfSmallZero {
|
||||
target: 14,
|
||||
value: StatePartIndex<SmallSlots>(1), // (0x0 0) SlotDebugData { name: "", ty: Bool },
|
||||
},
|
||||
10: BranchIfSmallNonZero {
|
||||
target: 13,
|
||||
value: StatePartIndex<SmallSlots>(3), // (0x0 0) SlotDebugData { name: "", ty: Bool },
|
||||
},
|
||||
11: Copy {
|
||||
dest: StatePartIndex<BigSlots>(3), // (0x3) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count_reg", ty: UInt<4> },
|
||||
src: StatePartIndex<BigSlots>(4), // (0x4) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count_reg$next", ty: UInt<4> },
|
||||
},
|
||||
12: Branch {
|
||||
target: 14,
|
||||
},
|
||||
13: Copy {
|
||||
dest: StatePartIndex<BigSlots>(3), // (0x3) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count_reg", ty: UInt<4> },
|
||||
src: StatePartIndex<BigSlots>(5), // (0x3) SlotDebugData { name: "", ty: UInt<4> },
|
||||
},
|
||||
14: XorSmallImmediate {
|
||||
dest: StatePartIndex<SmallSlots>(0), // (0x0 0) SlotDebugData { name: "", ty: Bool },
|
||||
lhs: StatePartIndex<SmallSlots>(2), // (0x1 1) SlotDebugData { name: "", ty: Bool },
|
||||
rhs: 0x1,
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
15: Return,
|
||||
],
|
||||
..
|
||||
},
|
||||
pc: 15,
|
||||
memory_write_log: [],
|
||||
memories: StatePart {
|
||||
value: [],
|
||||
},
|
||||
small_slots: StatePart {
|
||||
value: [
|
||||
0,
|
||||
0,
|
||||
1,
|
||||
0,
|
||||
],
|
||||
},
|
||||
big_slots: StatePart {
|
||||
value: [
|
||||
1,
|
||||
0,
|
||||
3,
|
||||
3,
|
||||
4,
|
||||
3,
|
||||
1,
|
||||
4,
|
||||
4,
|
||||
],
|
||||
},
|
||||
},
|
||||
io: Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
},
|
||||
uninitialized_inputs: {},
|
||||
io_targets: {
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.cd: CompiledValue {
|
||||
layout: CompiledTypeLayout {
|
||||
ty: Bundle {
|
||||
/* offset = 0 */
|
||||
clk: Clock,
|
||||
/* offset = 1 */
|
||||
rst: SyncReset,
|
||||
},
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 2,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::cd.clk",
|
||||
ty: Clock,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::cd.rst",
|
||||
ty: SyncReset,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Bundle {
|
||||
fields: [
|
||||
CompiledBundleField {
|
||||
offset: TypeIndex {
|
||||
small_slots: StatePartIndex<SmallSlots>(0),
|
||||
big_slots: StatePartIndex<BigSlots>(0),
|
||||
},
|
||||
ty: CompiledTypeLayout {
|
||||
ty: Clock,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Clock,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Scalar,
|
||||
},
|
||||
},
|
||||
CompiledBundleField {
|
||||
offset: TypeIndex {
|
||||
small_slots: StatePartIndex<SmallSlots>(0),
|
||||
big_slots: StatePartIndex<BigSlots>(1),
|
||||
},
|
||||
ty: CompiledTypeLayout {
|
||||
ty: SyncReset,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: SyncReset,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Scalar,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
range: TypeIndexRange {
|
||||
small_slots: StatePartIndexRange<SmallSlots> { start: 0, len: 0 },
|
||||
big_slots: StatePartIndexRange<BigSlots> { start: 0, len: 2 },
|
||||
},
|
||||
write: None,
|
||||
},
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.cd.clk: CompiledValue {
|
||||
layout: CompiledTypeLayout {
|
||||
ty: Clock,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Clock,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Scalar,
|
||||
},
|
||||
range: TypeIndexRange {
|
||||
small_slots: StatePartIndexRange<SmallSlots> { start: 0, len: 0 },
|
||||
big_slots: StatePartIndexRange<BigSlots> { start: 0, len: 1 },
|
||||
},
|
||||
write: None,
|
||||
},
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.cd.rst: CompiledValue {
|
||||
layout: CompiledTypeLayout {
|
||||
ty: SyncReset,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: SyncReset,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Scalar,
|
||||
},
|
||||
range: TypeIndexRange {
|
||||
small_slots: StatePartIndexRange<SmallSlots> { start: 0, len: 0 },
|
||||
big_slots: StatePartIndexRange<BigSlots> { start: 1, len: 1 },
|
||||
},
|
||||
write: None,
|
||||
},
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.count: CompiledValue {
|
||||
layout: CompiledTypeLayout {
|
||||
ty: UInt<4>,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::count",
|
||||
ty: UInt<4>,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Scalar,
|
||||
},
|
||||
range: TypeIndexRange {
|
||||
small_slots: StatePartIndexRange<SmallSlots> { start: 0, len: 0 },
|
||||
big_slots: StatePartIndexRange<BigSlots> { start: 2, len: 1 },
|
||||
},
|
||||
write: None,
|
||||
},
|
||||
},
|
||||
made_initial_step: true,
|
||||
needs_settle: false,
|
||||
trace_decls: TraceModule {
|
||||
name: "counter",
|
||||
children: [
|
||||
TraceModuleIO {
|
||||
name: "cd",
|
||||
child: TraceBundle {
|
||||
name: "cd",
|
||||
fields: [
|
||||
TraceClock {
|
||||
location: TraceScalarId(0),
|
||||
name: "clk",
|
||||
flow: Source,
|
||||
},
|
||||
TraceSyncReset {
|
||||
location: TraceScalarId(1),
|
||||
name: "rst",
|
||||
flow: Source,
|
||||
},
|
||||
],
|
||||
ty: Bundle {
|
||||
/* offset = 0 */
|
||||
clk: Clock,
|
||||
/* offset = 1 */
|
||||
rst: SyncReset,
|
||||
},
|
||||
flow: Source,
|
||||
},
|
||||
ty: Bundle {
|
||||
/* offset = 0 */
|
||||
clk: Clock,
|
||||
/* offset = 1 */
|
||||
rst: SyncReset,
|
||||
},
|
||||
flow: Source,
|
||||
},
|
||||
TraceModuleIO {
|
||||
name: "count",
|
||||
child: TraceUInt {
|
||||
location: TraceScalarId(2),
|
||||
name: "count",
|
||||
ty: UInt<4>,
|
||||
flow: Sink,
|
||||
},
|
||||
ty: UInt<4>,
|
||||
flow: Sink,
|
||||
},
|
||||
TraceReg {
|
||||
name: "count_reg",
|
||||
child: TraceUInt {
|
||||
location: TraceScalarId(3),
|
||||
name: "count_reg",
|
||||
ty: UInt<4>,
|
||||
flow: Duplex,
|
||||
},
|
||||
ty: UInt<4>,
|
||||
},
|
||||
],
|
||||
},
|
||||
traces: [
|
||||
SimTrace {
|
||||
id: TraceScalarId(0),
|
||||
kind: BigClock {
|
||||
index: StatePartIndex<BigSlots>(0),
|
||||
},
|
||||
state: 0x1,
|
||||
last_state: 0x1,
|
||||
},
|
||||
SimTrace {
|
||||
id: TraceScalarId(1),
|
||||
kind: BigSyncReset {
|
||||
index: StatePartIndex<BigSlots>(1),
|
||||
},
|
||||
state: 0x0,
|
||||
last_state: 0x0,
|
||||
},
|
||||
SimTrace {
|
||||
id: TraceScalarId(2),
|
||||
kind: BigUInt {
|
||||
index: StatePartIndex<BigSlots>(2),
|
||||
ty: UInt<4>,
|
||||
},
|
||||
state: 0x3,
|
||||
last_state: 0x2,
|
||||
},
|
||||
SimTrace {
|
||||
id: TraceScalarId(3),
|
||||
kind: BigUInt {
|
||||
index: StatePartIndex<BigSlots>(3),
|
||||
ty: UInt<4>,
|
||||
},
|
||||
state: 0x3,
|
||||
last_state: 0x3,
|
||||
},
|
||||
],
|
||||
trace_memories: {},
|
||||
trace_writers: [
|
||||
Running(
|
||||
VcdWriter {
|
||||
finished_init: true,
|
||||
timescale: 1 ps,
|
||||
..
|
||||
},
|
||||
),
|
||||
],
|
||||
instant: 66 μs,
|
||||
clocks_triggered: [
|
||||
StatePartIndex<SmallSlots>(1),
|
||||
],
|
||||
..
|
||||
}
|
|
@ -1,214 +0,0 @@
|
|||
$timescale 1 ps $end
|
||||
$scope module counter $end
|
||||
$scope struct cd $end
|
||||
$var wire 1 ! clk $end
|
||||
$var wire 1 " rst $end
|
||||
$upscope $end
|
||||
$var wire 4 # count $end
|
||||
$var reg 4 $ count_reg $end
|
||||
$upscope $end
|
||||
$enddefinitions $end
|
||||
$dumpvars
|
||||
0!
|
||||
1"
|
||||
b0 #
|
||||
b0 $
|
||||
$end
|
||||
#1000000
|
||||
1!
|
||||
b11 $
|
||||
b11 #
|
||||
0"
|
||||
#2000000
|
||||
0!
|
||||
#3000000
|
||||
1!
|
||||
b100 $
|
||||
b100 #
|
||||
#4000000
|
||||
0!
|
||||
#5000000
|
||||
1!
|
||||
b101 $
|
||||
b101 #
|
||||
#6000000
|
||||
0!
|
||||
#7000000
|
||||
1!
|
||||
b110 $
|
||||
b110 #
|
||||
#8000000
|
||||
0!
|
||||
#9000000
|
||||
1!
|
||||
b111 $
|
||||
b111 #
|
||||
#10000000
|
||||
0!
|
||||
#11000000
|
||||
1!
|
||||
b1000 $
|
||||
b1000 #
|
||||
#12000000
|
||||
0!
|
||||
#13000000
|
||||
1!
|
||||
b1001 $
|
||||
b1001 #
|
||||
#14000000
|
||||
0!
|
||||
#15000000
|
||||
1!
|
||||
b1010 $
|
||||
b1010 #
|
||||
#16000000
|
||||
0!
|
||||
#17000000
|
||||
1!
|
||||
b1011 $
|
||||
b1011 #
|
||||
#18000000
|
||||
0!
|
||||
#19000000
|
||||
1!
|
||||
b1100 $
|
||||
b1100 #
|
||||
#20000000
|
||||
0!
|
||||
#21000000
|
||||
1!
|
||||
b1101 $
|
||||
b1101 #
|
||||
#22000000
|
||||
0!
|
||||
#23000000
|
||||
1!
|
||||
b1110 $
|
||||
b1110 #
|
||||
#24000000
|
||||
0!
|
||||
#25000000
|
||||
1!
|
||||
b1111 $
|
||||
b1111 #
|
||||
#26000000
|
||||
0!
|
||||
#27000000
|
||||
1!
|
||||
b0 $
|
||||
b0 #
|
||||
#28000000
|
||||
0!
|
||||
#29000000
|
||||
1!
|
||||
b1 $
|
||||
b1 #
|
||||
#30000000
|
||||
0!
|
||||
#31000000
|
||||
1!
|
||||
b10 $
|
||||
b10 #
|
||||
#32000000
|
||||
0!
|
||||
#33000000
|
||||
1!
|
||||
b11 $
|
||||
b11 #
|
||||
#34000000
|
||||
0!
|
||||
#35000000
|
||||
1!
|
||||
b100 $
|
||||
b100 #
|
||||
#36000000
|
||||
0!
|
||||
#37000000
|
||||
1!
|
||||
b101 $
|
||||
b101 #
|
||||
#38000000
|
||||
0!
|
||||
#39000000
|
||||
1!
|
||||
b110 $
|
||||
b110 #
|
||||
#40000000
|
||||
0!
|
||||
#41000000
|
||||
1!
|
||||
b111 $
|
||||
b111 #
|
||||
#42000000
|
||||
0!
|
||||
#43000000
|
||||
1!
|
||||
b1000 $
|
||||
b1000 #
|
||||
#44000000
|
||||
0!
|
||||
#45000000
|
||||
1!
|
||||
b1001 $
|
||||
b1001 #
|
||||
#46000000
|
||||
0!
|
||||
#47000000
|
||||
1!
|
||||
b1010 $
|
||||
b1010 #
|
||||
#48000000
|
||||
0!
|
||||
#49000000
|
||||
1!
|
||||
b1011 $
|
||||
b1011 #
|
||||
#50000000
|
||||
0!
|
||||
#51000000
|
||||
1!
|
||||
b1100 $
|
||||
b1100 #
|
||||
#52000000
|
||||
0!
|
||||
#53000000
|
||||
1!
|
||||
b1101 $
|
||||
b1101 #
|
||||
#54000000
|
||||
0!
|
||||
#55000000
|
||||
1!
|
||||
b1110 $
|
||||
b1110 #
|
||||
#56000000
|
||||
0!
|
||||
#57000000
|
||||
1!
|
||||
b1111 $
|
||||
b1111 #
|
||||
#58000000
|
||||
0!
|
||||
#59000000
|
||||
1!
|
||||
b0 $
|
||||
b0 #
|
||||
#60000000
|
||||
0!
|
||||
#61000000
|
||||
1!
|
||||
b1 $
|
||||
b1 #
|
||||
#62000000
|
||||
0!
|
||||
#63000000
|
||||
1!
|
||||
b10 $
|
||||
b10 #
|
||||
#64000000
|
||||
0!
|
||||
#65000000
|
||||
1!
|
||||
b11 $
|
||||
b11 #
|
||||
#66000000
|
|
@ -1,153 +0,0 @@
|
|||
Simulation {
|
||||
state: State {
|
||||
insns: Insns {
|
||||
state_layout: StateLayout {
|
||||
ty: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 4,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(duplicate_names: duplicate_names).duplicate_names::w",
|
||||
ty: UInt<8>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: UInt<8>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(duplicate_names: duplicate_names).duplicate_names::w",
|
||||
ty: UInt<8>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: UInt<8>,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
memories: StatePartLayout<Memories> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
insns: [
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
0: Const {
|
||||
dest: StatePartIndex<BigSlots>(3), // (0x6) SlotDebugData { name: "", ty: UInt<8> },
|
||||
value: 0x6,
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:5:1
|
||||
1: Copy {
|
||||
dest: StatePartIndex<BigSlots>(2), // (0x6) SlotDebugData { name: "InstantiatedModule(duplicate_names: duplicate_names).duplicate_names::w", ty: UInt<8> },
|
||||
src: StatePartIndex<BigSlots>(3), // (0x6) SlotDebugData { name: "", ty: UInt<8> },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
2: Const {
|
||||
dest: StatePartIndex<BigSlots>(1), // (0x5) SlotDebugData { name: "", ty: UInt<8> },
|
||||
value: 0x5,
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:3:1
|
||||
3: Copy {
|
||||
dest: StatePartIndex<BigSlots>(0), // (0x5) SlotDebugData { name: "InstantiatedModule(duplicate_names: duplicate_names).duplicate_names::w", ty: UInt<8> },
|
||||
src: StatePartIndex<BigSlots>(1), // (0x5) SlotDebugData { name: "", ty: UInt<8> },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
4: Return,
|
||||
],
|
||||
..
|
||||
},
|
||||
pc: 4,
|
||||
memory_write_log: [],
|
||||
memories: StatePart {
|
||||
value: [],
|
||||
},
|
||||
small_slots: StatePart {
|
||||
value: [],
|
||||
},
|
||||
big_slots: StatePart {
|
||||
value: [
|
||||
5,
|
||||
5,
|
||||
6,
|
||||
6,
|
||||
],
|
||||
},
|
||||
},
|
||||
io: Instance {
|
||||
name: <simulator>::duplicate_names,
|
||||
instantiated: Module {
|
||||
name: duplicate_names,
|
||||
..
|
||||
},
|
||||
},
|
||||
uninitialized_inputs: {},
|
||||
io_targets: {},
|
||||
made_initial_step: true,
|
||||
needs_settle: false,
|
||||
trace_decls: TraceModule {
|
||||
name: "duplicate_names",
|
||||
children: [
|
||||
TraceWire {
|
||||
name: "w",
|
||||
child: TraceUInt {
|
||||
location: TraceScalarId(0),
|
||||
name: "w",
|
||||
ty: UInt<8>,
|
||||
flow: Duplex,
|
||||
},
|
||||
ty: UInt<8>,
|
||||
},
|
||||
TraceWire {
|
||||
name: "w",
|
||||
child: TraceUInt {
|
||||
location: TraceScalarId(1),
|
||||
name: "w",
|
||||
ty: UInt<8>,
|
||||
flow: Duplex,
|
||||
},
|
||||
ty: UInt<8>,
|
||||
},
|
||||
],
|
||||
},
|
||||
traces: [
|
||||
SimTrace {
|
||||
id: TraceScalarId(0),
|
||||
kind: BigUInt {
|
||||
index: StatePartIndex<BigSlots>(0),
|
||||
ty: UInt<8>,
|
||||
},
|
||||
state: 0x05,
|
||||
last_state: 0x05,
|
||||
},
|
||||
SimTrace {
|
||||
id: TraceScalarId(1),
|
||||
kind: BigUInt {
|
||||
index: StatePartIndex<BigSlots>(2),
|
||||
ty: UInt<8>,
|
||||
},
|
||||
state: 0x06,
|
||||
last_state: 0x06,
|
||||
},
|
||||
],
|
||||
trace_memories: {},
|
||||
trace_writers: [
|
||||
Running(
|
||||
VcdWriter {
|
||||
finished_init: true,
|
||||
timescale: 1 ps,
|
||||
..
|
||||
},
|
||||
),
|
||||
],
|
||||
instant: 1 μs,
|
||||
clocks_triggered: [],
|
||||
..
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
$timescale 1 ps $end
|
||||
$scope module duplicate_names $end
|
||||
$var wire 8 ! w $end
|
||||
$var wire 8 " w_2 $end
|
||||
$upscope $end
|
||||
$enddefinitions $end
|
||||
$dumpvars
|
||||
b101 !
|
||||
b110 "
|
||||
$end
|
||||
#1000000
|
File diff suppressed because it is too large
Load diff
|
@ -1,110 +0,0 @@
|
|||
$timescale 1 ps $end
|
||||
$scope module enums $end
|
||||
$scope struct cd $end
|
||||
$var wire 1 ! clk $end
|
||||
$var wire 1 " rst $end
|
||||
$upscope $end
|
||||
$var wire 1 # en $end
|
||||
$var wire 2 $ which_in $end
|
||||
$var wire 4 % data_in $end
|
||||
$var wire 2 & which_out $end
|
||||
$var wire 4 ' data_out $end
|
||||
$scope struct b_out $end
|
||||
$var string 1 ( \$tag $end
|
||||
$scope struct HdlSome $end
|
||||
$var wire 1 ) \0 $end
|
||||
$var wire 1 * \1 $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$scope struct the_reg $end
|
||||
$var string 1 + \$tag $end
|
||||
$scope struct B $end
|
||||
$var reg 1 , \0 $end
|
||||
$var reg 1 - \1 $end
|
||||
$upscope $end
|
||||
$scope struct C $end
|
||||
$scope struct a $end
|
||||
$var reg 1 . \[0] $end
|
||||
$var reg 1 / \[1] $end
|
||||
$upscope $end
|
||||
$var reg 2 0 b $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$enddefinitions $end
|
||||
$dumpvars
|
||||
0!
|
||||
1"
|
||||
0#
|
||||
b0 $
|
||||
b0 %
|
||||
b0 &
|
||||
b0 '
|
||||
sHdlNone\x20(0) (
|
||||
0)
|
||||
0*
|
||||
sA\x20(0) +
|
||||
0,
|
||||
0-
|
||||
0.
|
||||
0/
|
||||
b0 0
|
||||
$end
|
||||
#1000000
|
||||
1!
|
||||
#1100000
|
||||
0"
|
||||
#2000000
|
||||
0!
|
||||
#3000000
|
||||
1!
|
||||
#4000000
|
||||
1#
|
||||
b1 $
|
||||
0!
|
||||
#5000000
|
||||
1!
|
||||
b1 &
|
||||
sHdlSome\x20(1) (
|
||||
sB\x20(1) +
|
||||
#6000000
|
||||
0#
|
||||
b0 $
|
||||
0!
|
||||
#7000000
|
||||
1!
|
||||
#8000000
|
||||
1#
|
||||
b1 $
|
||||
b1111 %
|
||||
0!
|
||||
#9000000
|
||||
1!
|
||||
b11 '
|
||||
1)
|
||||
1*
|
||||
1,
|
||||
1-
|
||||
1.
|
||||
1/
|
||||
#10000000
|
||||
0!
|
||||
#11000000
|
||||
1!
|
||||
#12000000
|
||||
b10 $
|
||||
0!
|
||||
#13000000
|
||||
1!
|
||||
b10 &
|
||||
b1111 '
|
||||
sHdlNone\x20(0) (
|
||||
0)
|
||||
0*
|
||||
sC\x20(2) +
|
||||
b11 0
|
||||
#14000000
|
||||
0!
|
||||
#15000000
|
||||
1!
|
||||
#16000000
|
File diff suppressed because it is too large
Load diff
|
@ -1,408 +0,0 @@
|
|||
$timescale 1 ps $end
|
||||
$scope module memories $end
|
||||
$scope struct r $end
|
||||
$var wire 4 ! addr $end
|
||||
$var wire 1 " en $end
|
||||
$var wire 1 # clk $end
|
||||
$scope struct data $end
|
||||
$var wire 8 $ \0 $end
|
||||
$var wire 8 % \1 $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$scope struct w $end
|
||||
$var wire 4 & addr $end
|
||||
$var wire 1 ' en $end
|
||||
$var wire 1 ( clk $end
|
||||
$scope struct data $end
|
||||
$var wire 8 ) \0 $end
|
||||
$var wire 8 * \1 $end
|
||||
$upscope $end
|
||||
$scope struct mask $end
|
||||
$var wire 1 + \0 $end
|
||||
$var wire 1 , \1 $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$scope struct mem $end
|
||||
$scope struct contents $end
|
||||
$scope struct \[0] $end
|
||||
$scope struct mem $end
|
||||
$var reg 8 9 \0 $end
|
||||
$var reg 8 I \1 $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$scope struct \[1] $end
|
||||
$scope struct mem $end
|
||||
$var reg 8 : \0 $end
|
||||
$var reg 8 J \1 $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$scope struct \[2] $end
|
||||
$scope struct mem $end
|
||||
$var reg 8 ; \0 $end
|
||||
$var reg 8 K \1 $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$scope struct \[3] $end
|
||||
$scope struct mem $end
|
||||
$var reg 8 < \0 $end
|
||||
$var reg 8 L \1 $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$scope struct \[4] $end
|
||||
$scope struct mem $end
|
||||
$var reg 8 = \0 $end
|
||||
$var reg 8 M \1 $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$scope struct \[5] $end
|
||||
$scope struct mem $end
|
||||
$var reg 8 > \0 $end
|
||||
$var reg 8 N \1 $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$scope struct \[6] $end
|
||||
$scope struct mem $end
|
||||
$var reg 8 ? \0 $end
|
||||
$var reg 8 O \1 $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$scope struct \[7] $end
|
||||
$scope struct mem $end
|
||||
$var reg 8 @ \0 $end
|
||||
$var reg 8 P \1 $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$scope struct \[8] $end
|
||||
$scope struct mem $end
|
||||
$var reg 8 A \0 $end
|
||||
$var reg 8 Q \1 $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$scope struct \[9] $end
|
||||
$scope struct mem $end
|
||||
$var reg 8 B \0 $end
|
||||
$var reg 8 R \1 $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$scope struct \[10] $end
|
||||
$scope struct mem $end
|
||||
$var reg 8 C \0 $end
|
||||
$var reg 8 S \1 $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$scope struct \[11] $end
|
||||
$scope struct mem $end
|
||||
$var reg 8 D \0 $end
|
||||
$var reg 8 T \1 $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$scope struct \[12] $end
|
||||
$scope struct mem $end
|
||||
$var reg 8 E \0 $end
|
||||
$var reg 8 U \1 $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$scope struct \[13] $end
|
||||
$scope struct mem $end
|
||||
$var reg 8 F \0 $end
|
||||
$var reg 8 V \1 $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$scope struct \[14] $end
|
||||
$scope struct mem $end
|
||||
$var reg 8 G \0 $end
|
||||
$var reg 8 W \1 $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$scope struct \[15] $end
|
||||
$scope struct mem $end
|
||||
$var reg 8 H \0 $end
|
||||
$var reg 8 X \1 $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$scope struct r0 $end
|
||||
$var wire 4 - addr $end
|
||||
$var wire 1 . en $end
|
||||
$var wire 1 / clk $end
|
||||
$scope struct data $end
|
||||
$var wire 8 0 \0 $end
|
||||
$var wire 8 1 \1 $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$scope struct w1 $end
|
||||
$var wire 4 2 addr $end
|
||||
$var wire 1 3 en $end
|
||||
$var wire 1 4 clk $end
|
||||
$scope struct data $end
|
||||
$var wire 8 5 \0 $end
|
||||
$var wire 8 6 \1 $end
|
||||
$upscope $end
|
||||
$scope struct mask $end
|
||||
$var wire 1 7 \0 $end
|
||||
$var wire 1 8 \1 $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$enddefinitions $end
|
||||
$dumpvars
|
||||
b1 9
|
||||
b100011 I
|
||||
b1 :
|
||||
b100011 J
|
||||
b1 ;
|
||||
b100011 K
|
||||
b1 <
|
||||
b100011 L
|
||||
b1 =
|
||||
b100011 M
|
||||
b1 >
|
||||
b100011 N
|
||||
b1 ?
|
||||
b100011 O
|
||||
b1 @
|
||||
b100011 P
|
||||
b1 A
|
||||
b100011 Q
|
||||
b1 B
|
||||
b100011 R
|
||||
b1 C
|
||||
b100011 S
|
||||
b1 D
|
||||
b100011 T
|
||||
b1 E
|
||||
b100011 U
|
||||
b1 F
|
||||
b100011 V
|
||||
b1 G
|
||||
b100011 W
|
||||
b1 H
|
||||
b100011 X
|
||||
b0 !
|
||||
0"
|
||||
0#
|
||||
b0 $
|
||||
b0 %
|
||||
b0 &
|
||||
0'
|
||||
0(
|
||||
b0 )
|
||||
b0 *
|
||||
0+
|
||||
0,
|
||||
b0 -
|
||||
0.
|
||||
0/
|
||||
b0 0
|
||||
b0 1
|
||||
b0 2
|
||||
03
|
||||
04
|
||||
b0 5
|
||||
b0 6
|
||||
07
|
||||
08
|
||||
$end
|
||||
#1000000
|
||||
1#
|
||||
1(
|
||||
1/
|
||||
14
|
||||
#2000000
|
||||
1"
|
||||
0#
|
||||
b1 $
|
||||
b100011 %
|
||||
1'
|
||||
0(
|
||||
b10000 )
|
||||
b100000 *
|
||||
1+
|
||||
1,
|
||||
1.
|
||||
0/
|
||||
b1 0
|
||||
b100011 1
|
||||
13
|
||||
04
|
||||
b10000 5
|
||||
b100000 6
|
||||
17
|
||||
18
|
||||
#3000000
|
||||
b10000 9
|
||||
b100000 I
|
||||
1#
|
||||
1(
|
||||
1/
|
||||
14
|
||||
b10000 $
|
||||
b100000 %
|
||||
b10000 0
|
||||
b100000 1
|
||||
#4000000
|
||||
0#
|
||||
0(
|
||||
b110000 )
|
||||
b1000000 *
|
||||
0+
|
||||
0/
|
||||
04
|
||||
b110000 5
|
||||
b1000000 6
|
||||
07
|
||||
#5000000
|
||||
b10000 9
|
||||
b1000000 I
|
||||
1#
|
||||
1(
|
||||
1/
|
||||
14
|
||||
b1000000 %
|
||||
b1000000 1
|
||||
#6000000
|
||||
0#
|
||||
0(
|
||||
b1010000 )
|
||||
b1100000 *
|
||||
1+
|
||||
0,
|
||||
0/
|
||||
04
|
||||
b1010000 5
|
||||
b1100000 6
|
||||
17
|
||||
08
|
||||
#7000000
|
||||
b1010000 9
|
||||
b1000000 I
|
||||
1#
|
||||
1(
|
||||
1/
|
||||
14
|
||||
b1010000 $
|
||||
b1010000 0
|
||||
#8000000
|
||||
0#
|
||||
0(
|
||||
b1110000 )
|
||||
b10000000 *
|
||||
0+
|
||||
0/
|
||||
04
|
||||
b1110000 5
|
||||
b10000000 6
|
||||
07
|
||||
#9000000
|
||||
1#
|
||||
1(
|
||||
1/
|
||||
14
|
||||
#10000000
|
||||
0#
|
||||
0'
|
||||
0(
|
||||
b10010000 )
|
||||
b10100000 *
|
||||
0/
|
||||
03
|
||||
04
|
||||
b10010000 5
|
||||
b10100000 6
|
||||
#11000000
|
||||
1#
|
||||
1(
|
||||
1/
|
||||
14
|
||||
#12000000
|
||||
0#
|
||||
b1 &
|
||||
1'
|
||||
0(
|
||||
1+
|
||||
1,
|
||||
0/
|
||||
b1 2
|
||||
13
|
||||
04
|
||||
17
|
||||
18
|
||||
#13000000
|
||||
b10010000 :
|
||||
b10100000 J
|
||||
1#
|
||||
1(
|
||||
1/
|
||||
14
|
||||
#14000000
|
||||
0#
|
||||
b10 &
|
||||
0(
|
||||
b10110000 )
|
||||
b11000000 *
|
||||
0/
|
||||
b10 2
|
||||
04
|
||||
b10110000 5
|
||||
b11000000 6
|
||||
#15000000
|
||||
b10110000 ;
|
||||
b11000000 K
|
||||
1#
|
||||
1(
|
||||
1/
|
||||
14
|
||||
#16000000
|
||||
0#
|
||||
0'
|
||||
0(
|
||||
b11010000 )
|
||||
b11100000 *
|
||||
0/
|
||||
03
|
||||
04
|
||||
b11010000 5
|
||||
b11100000 6
|
||||
#17000000
|
||||
1#
|
||||
1(
|
||||
1/
|
||||
14
|
||||
#18000000
|
||||
b1 !
|
||||
0#
|
||||
b10010000 $
|
||||
b10100000 %
|
||||
0(
|
||||
b1 -
|
||||
0/
|
||||
b10010000 0
|
||||
b10100000 1
|
||||
04
|
||||
#19000000
|
||||
1#
|
||||
1(
|
||||
1/
|
||||
14
|
||||
#20000000
|
||||
b10 !
|
||||
0#
|
||||
b10110000 $
|
||||
b11000000 %
|
||||
0(
|
||||
b10 -
|
||||
0/
|
||||
b10110000 0
|
||||
b11000000 1
|
||||
04
|
||||
#21000000
|
||||
1#
|
||||
1(
|
||||
1/
|
||||
14
|
||||
#22000000
|
||||
0#
|
||||
0(
|
||||
0/
|
||||
04
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue