forked from libre-chip/fayalite
Compare commits
163 commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 4b24a88641 | |||
| 094c77e26e | |||
| d2c8b023bf | |||
| c043ee54d0 | |||
| edcc5927a5 | |||
| 7dc4417874 | |||
| 838bd469ce | |||
| b6e4cd0614 | |||
| 3e5b2f126a | |||
| 040cefea21 | |||
| 3267cb38c4 | |||
| b3cc28e2b6 | |||
| 26840daf13 | |||
| 4d9e8d3b47 | |||
| c6feea6d51 | |||
| 409992961c | |||
| 2bdc8a7c72 | |||
| 477a1f2d29 | |||
| 4d54f903be | |||
| 3f5dd61e46 | |||
| def406ab52 | |||
| a565be1b09 | |||
| 676c1e3b7d | |||
| 169be960f8 | |||
| 2b52799f5c | |||
| 35f98f3229 | |||
| 8a63ea89d0 | |||
| 84c5978eaf | |||
| 42e3179a60 | |||
| 53ae3ff670 | |||
| 7af9abfb6f | |||
| aacd05378f | |||
| 908ccef674 | |||
| 057670c12a | |||
| f8ac78abd6 | |||
| 64ec6c0dcc | |||
| c06ef56482 | |||
| db9b1c202c | |||
| d3dd66cbf0 | |||
| b5b1ee866c | |||
| f0e3aef061 | |||
| 6d36698adf | |||
| e7e831cf00 | |||
| 4008c311bf | |||
| ef85d11327 | |||
| ae7c4be9dc | |||
| 65f9ab32f4 | |||
| 67e66ac3bd | |||
| 668e714dc9 | |||
| 88323a8c16 | |||
| 91e1b619e8 | |||
| e2d2d4110b | |||
| b1f9706e4e | |||
| 4eda4366c8 | |||
| 122c08d3cf | |||
| b08a747e20 | |||
| e0c9939147 | |||
| 07725ab489 | |||
| 36f1b9bbb6 | |||
| 9a1b047d2f | |||
| 5967e812a2 | |||
| 001fd31451 | |||
| 57aae7b7fb | |||
| 6929352be7 | |||
| 62058dc141 | |||
| c4b6a0fee6 | |||
| 9092e45447 | |||
| a40eaaa2da | |||
| 5028401a5a | |||
| e0f978fbb6 | |||
| ec3a61513b | |||
| fdc73b5f3b | |||
| a115585d5a | |||
| ab9ff4f2db | |||
| d1bd176b28 | |||
| 920d8d875f | |||
| d453755bb2 | |||
| 450e1004b6 | |||
| c0c5b550bc | |||
| 2fa0ea6192 | |||
| bd75fdfefd | |||
| 50c86e18dc | |||
| 60734cc9d1 | |||
| 3458c21f44 | |||
| 43797db36e | |||
| cdd84953d0 | |||
| 86a1bb46be | |||
| 209d5b5fe1 | |||
| d4ea826051 | |||
| 404a2ee043 | |||
| e3a2ccd41c | |||
| 3771cea78e | |||
| dcf865caec | |||
| 31d01046a8 | |||
| c16726cee6 | |||
| b63676d0ca | |||
| 7005fa3330 | |||
| 2ab8428062 | |||
| 9b06019bf5 | |||
| 36bad52978 | |||
| 21c73051ec | |||
| 304d8da0e8 | |||
| 2af38de900 | |||
| c756aeec70 | |||
| 903ca1bf30 | |||
| 8d030ac65d | |||
| 562c479b62 | |||
| 393f78a14d | |||
| 8616ee4737 | |||
| 5087f16099 | |||
| 6b31e6d515 | |||
| 564ccb30bc | |||
| ca759168ff | |||
| e4cf66adf8 | |||
| cd0dd7b7ee | |||
|
|
2e7d685dc7 | ||
| 9654167ca3 | |||
| 3ed7827485 | |||
| e504cfebfe | |||
| 9f42cab471 | |||
| 259bee39c2 | |||
| 643816d5b5 | |||
| 42afd2da0e | |||
| 15bc304bb6 | |||
| 4422157db8 | |||
| d3f52292a1 | |||
| fd45465d35 | |||
| 5e0548db26 | |||
| 12b3ba57f1 | |||
| 965fe53077 | |||
| 3abba7f9eb | |||
| 6446b71afd | |||
| d36cf92d7f | |||
| d744d85c66 | |||
| 358cdd10c8 | |||
| 9128a84284 | |||
| 546010739a | |||
| 9b5f1218fd | |||
| 89d84551f8 | |||
| c45624e3c2 | |||
| 7851bf545c | |||
| 3e3da53bd2 | |||
| fa50930ff8 | |||
| 9516fe03a1 | |||
| 52ab134673 | |||
| 698b8adc23 | |||
| 59be3bd645 | |||
| 913baa37e9 | |||
| 11ddbc43c7 | |||
| c4b5d00419 | |||
| 09aa9fbc78 | |||
| 288a6b71b9 | |||
| 0095570f19 | |||
| f54e55a143 | |||
| a6e40839ac | |||
| 3106a6fff6 | |||
| f338f37d3e | |||
| 277d3e0d4d | |||
| b288d6f8f2 | |||
| 479d59b287 | |||
| 6f904148c4 | |||
| 3ea0d98924 | |||
|
|
c1f1a8b749 |
131 changed files with 75905 additions and 2210 deletions
|
|
@ -1,77 +0,0 @@
|
|||
# SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
# See Notices.txt for copyright information
|
||||
on:
|
||||
workflow_call:
|
||||
outputs:
|
||||
cache-primary-key:
|
||||
value: ${{ jobs.deps.outputs.cache-primary-key }}
|
||||
|
||||
jobs:
|
||||
deps:
|
||||
runs-on: debian-12
|
||||
outputs:
|
||||
cache-primary-key: ${{ steps.restore-deps.outputs.cache-primary-key }}
|
||||
steps:
|
||||
- uses: https://code.forgejo.org/actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: https://code.forgejo.org/actions/cache/restore@v3
|
||||
id: restore-deps
|
||||
with:
|
||||
path: deps
|
||||
key: ${{ github.repository }}-deps-${{ runner.os }}-${{ hashFiles('.forgejo/workflows/deps.yml') }}
|
||||
lookup-only: true
|
||||
- name: Install Apt packages
|
||||
if: steps.restore-deps.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
apt-get update -qq
|
||||
apt-get install -qq \
|
||||
bison \
|
||||
build-essential \
|
||||
ccache \
|
||||
clang \
|
||||
cvc5 \
|
||||
flex \
|
||||
gawk \
|
||||
g++ \
|
||||
git \
|
||||
libboost-filesystem-dev \
|
||||
libboost-python-dev \
|
||||
libboost-system-dev \
|
||||
libffi-dev \
|
||||
libreadline-dev \
|
||||
lld \
|
||||
pkg-config \
|
||||
python3 \
|
||||
python3-click \
|
||||
tcl-dev \
|
||||
zlib1g-dev
|
||||
- name: Install Firtool
|
||||
if: steps.restore-deps.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
mkdir -p deps
|
||||
wget -O deps/firrtl.tar.gz https://github.com/llvm/circt/releases/download/firtool-1.86.0/firrtl-bin-linux-x64.tar.gz
|
||||
sha256sum -c - <<<'bf6f4ab18ae76f135c944efbd81e25391c31c1bd0617c58ab0592640abefee14 deps/firrtl.tar.gz'
|
||||
tar -C deps -xvaf deps/firrtl.tar.gz
|
||||
rm -rf deps/firtool
|
||||
mv deps/firtool-1.86.0 deps/firtool
|
||||
- name: Get SymbiYosys
|
||||
if: steps.restore-deps.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
git clone --depth=1 --branch=yosys-0.45 https://github.com/YosysHQ/sby.git deps/sby
|
||||
- name: Build Z3
|
||||
if: steps.restore-deps.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
git clone --depth=1 --recursive --branch=z3-4.13.3 https://github.com/Z3Prover/z3.git deps/z3
|
||||
(cd deps/z3; PYTHON=python3 ./configure --prefix=/usr/local)
|
||||
make -C deps/z3/build -j"$(nproc)"
|
||||
- name: Build Yosys
|
||||
if: steps.restore-deps.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
git clone --depth=1 --recursive --branch=0.45 https://github.com/YosysHQ/yosys.git deps/yosys
|
||||
make -C deps/yosys -j"$(nproc)"
|
||||
- uses: https://code.forgejo.org/actions/cache/save@v3
|
||||
if: steps.restore-deps.outputs.cache-hit != 'true'
|
||||
with:
|
||||
path: deps
|
||||
key: ${{ steps.restore-deps.outputs.cache-primary-key }}
|
||||
|
|
@ -3,58 +3,23 @@
|
|||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
deps:
|
||||
uses: ./.forgejo/workflows/deps.yml
|
||||
test:
|
||||
runs-on: debian-12
|
||||
needs: deps
|
||||
container:
|
||||
image: git.libre-chip.org/libre-chip/fayalite-deps:latest
|
||||
steps:
|
||||
- uses: https://code.forgejo.org/actions/checkout@v3
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- run: |
|
||||
scripts/check-copyright.sh
|
||||
- run: |
|
||||
apt-get update -qq
|
||||
apt-get install -qq \
|
||||
bison \
|
||||
build-essential \
|
||||
ccache \
|
||||
clang \
|
||||
cvc5 \
|
||||
flex \
|
||||
gawk \
|
||||
git \
|
||||
libboost-filesystem-dev \
|
||||
libboost-python-dev \
|
||||
libboost-system-dev \
|
||||
libffi-dev \
|
||||
libreadline-dev \
|
||||
lld \
|
||||
pkg-config \
|
||||
python3 \
|
||||
python3-click \
|
||||
tcl-dev \
|
||||
z3 \
|
||||
zlib1g-dev
|
||||
- run: |
|
||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain 1.80.1
|
||||
source "$HOME/.cargo/env"
|
||||
echo "$PATH" >> "$GITHUB_PATH"
|
||||
- uses: https://code.forgejo.org/actions/cache/restore@v3
|
||||
with:
|
||||
path: deps
|
||||
key: ${{ needs.deps.outputs.cache-primary-key }}
|
||||
fail-on-cache-miss: true
|
||||
- run: |
|
||||
make -C deps/z3/build install
|
||||
make -C deps/sby install
|
||||
make -C deps/yosys install
|
||||
export PATH="$(realpath deps/firtool/bin):$PATH"
|
||||
echo "$PATH" >> "$GITHUB_PATH"
|
||||
- uses: https://github.com/Swatinem/rust-cache@v2
|
||||
- uses: https://git.libre-chip.org/mirrors/rust-cache@v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/master' }}
|
||||
- run: cargo test
|
||||
- run: cargo build --tests --features=unstable-doc
|
||||
- run: cargo test --doc --features=unstable-doc
|
||||
- run: cargo doc --features=unstable-doc
|
||||
- run: FAYALITE_TEST_HASHER=always_zero cargo test --test=module --features=unstable-doc,unstable-test-hasher
|
||||
- run: cargo run --example blinky yosys-nextpnr-xray --platform=arty-a7-100t --nextpnr-xilinx-chipdb-dir /opt/fayalite-deps/nextpnr-xilinx/xilinx --prjxray-db-dir /opt/fayalite-deps/prjxray-db -o target/blinky-out
|
||||
- run: cargo run --example tx_only_uart yosys-nextpnr-xray --platform=arty-a7-100t --nextpnr-xilinx-chipdb-dir /opt/fayalite-deps/nextpnr-xilinx/xilinx --prjxray-db-dir /opt/fayalite-deps/prjxray-db -o target/tx_only_uart-out
|
||||
|
|
|
|||
224
Cargo.lock
generated
224
Cargo.lock
generated
|
|
@ -1,18 +1,6 @@
|
|||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "ahash"
|
||||
version = "0.8.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "77c3a9648d43b9cd48db467b3f87fdd6e146bcc88ab0180006cef2179fe11d01"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"once_cell",
|
||||
"version_check",
|
||||
"zerocopy",
|
||||
]
|
||||
version = 4
|
||||
|
||||
[[package]]
|
||||
name = "allocator-api2"
|
||||
|
|
@ -37,9 +25,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "anstyle"
|
||||
version = "1.0.7"
|
||||
version = "1.0.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b"
|
||||
checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78"
|
||||
|
||||
[[package]]
|
||||
name = "anstyle-parse"
|
||||
|
|
@ -93,6 +81,12 @@ version = "0.2.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf"
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.22.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
|
||||
|
||||
[[package]]
|
||||
name = "basic-toml"
|
||||
version = "0.1.8"
|
||||
|
|
@ -161,9 +155,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
|||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.9"
|
||||
version = "4.5.48"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "64acc1846d54c1fe936a78dc189c34e28d3f5afc348403f28ecf53660b9b8462"
|
||||
checksum = "e2134bb3ea021b78629caa971416385309e0131b351b25e01dc16fb54e1b5fae"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
|
|
@ -171,9 +165,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.9"
|
||||
version = "4.5.48"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6fb8393d67ba2e7bfaf28a23458e4e2b543cc73a99595511eb207fdb8aede942"
|
||||
checksum = "c2ba64afa3c0a6df7fa517765e31314e983f51dda798ffba27b988194fb65dc9"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
|
|
@ -182,10 +176,19 @@ dependencies = [
|
|||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap_derive"
|
||||
version = "4.5.8"
|
||||
name = "clap_complete"
|
||||
version = "4.5.58"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2bac35c6dafb060fd4d275d9a4ffae97917c13a6327903a8be2153cd964f7085"
|
||||
checksum = "75bf0b32ad2e152de789bb635ea4d3078f6b838ad7974143e99b99f45a04af4a"
|
||||
dependencies = [
|
||||
"clap",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap_derive"
|
||||
version = "4.5.47"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bbfd7eae0b0f1a6e63d4b13c9c478de77c2eb546fba158ad50b4203dc24b9f9c"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
|
|
@ -195,9 +198,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clap_lex"
|
||||
version = "0.7.1"
|
||||
version = "0.7.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4b82cf0babdbd58558212896d1a4272303a57bdb245c2bf1147185fb45640e70"
|
||||
checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675"
|
||||
|
||||
[[package]]
|
||||
name = "colorchoice"
|
||||
|
|
@ -301,11 +304,13 @@ checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5"
|
|||
|
||||
[[package]]
|
||||
name = "fayalite"
|
||||
version = "0.2.1"
|
||||
version = "0.3.0"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"bitvec",
|
||||
"blake3",
|
||||
"clap",
|
||||
"clap_complete",
|
||||
"ctor",
|
||||
"eyre",
|
||||
"fayalite-proc-macros",
|
||||
|
|
@ -314,24 +319,26 @@ dependencies = [
|
|||
"jobslot",
|
||||
"num-bigint",
|
||||
"num-traits",
|
||||
"os_pipe",
|
||||
"ordered-float",
|
||||
"petgraph",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
"trybuild",
|
||||
"vec_map",
|
||||
"which",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fayalite-proc-macros"
|
||||
version = "0.2.1"
|
||||
version = "0.3.0"
|
||||
dependencies = [
|
||||
"fayalite-proc-macros-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fayalite-proc-macros-impl"
|
||||
version = "0.2.1"
|
||||
version = "0.3.0"
|
||||
dependencies = [
|
||||
"base16ct",
|
||||
"num-bigint",
|
||||
|
|
@ -345,7 +352,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "fayalite-visit-gen"
|
||||
version = "0.2.1"
|
||||
version = "0.3.0"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"prettyplease",
|
||||
|
|
@ -357,6 +364,18 @@ dependencies = [
|
|||
"thiserror",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fixedbitset"
|
||||
version = "0.5.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99"
|
||||
|
||||
[[package]]
|
||||
name = "foldhash"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
|
||||
|
||||
[[package]]
|
||||
name = "funty"
|
||||
version = "2.0.0"
|
||||
|
|
@ -375,12 +394,13 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "getrandom"
|
||||
version = "0.2.14"
|
||||
version = "0.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c"
|
||||
checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"r-efi",
|
||||
"wasi",
|
||||
]
|
||||
|
||||
|
|
@ -392,12 +412,13 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
|
|||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.14.3"
|
||||
version = "0.15.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604"
|
||||
checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"allocator-api2",
|
||||
"equivalent",
|
||||
"foldhash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -423,9 +444,9 @@ checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683"
|
|||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "2.2.6"
|
||||
version = "2.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26"
|
||||
checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"hashbrown",
|
||||
|
|
@ -446,23 +467,23 @@ checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c"
|
|||
|
||||
[[package]]
|
||||
name = "jobslot"
|
||||
version = "0.2.19"
|
||||
version = "0.2.23"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fe10868679d7a24c2c67d862d0e64a342ce9aef7cdde9ce8019bd35d353d458d"
|
||||
checksum = "58715c67c327da7f1558708348d68c207fd54900c4ae0529e29305d04d795b8c"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"derive_destructure2",
|
||||
"getrandom",
|
||||
"libc",
|
||||
"scopeguard",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.61.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.153"
|
||||
version = "0.2.176"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
|
||||
checksum = "58f929b4d672ea937a23a1ab494143d968337a5f47e56d0815df1e0890ddf174"
|
||||
|
||||
[[package]]
|
||||
name = "linux-raw-sys"
|
||||
|
|
@ -472,11 +493,10 @@ checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c"
|
|||
|
||||
[[package]]
|
||||
name = "num-bigint"
|
||||
version = "0.4.4"
|
||||
version = "0.4.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0"
|
||||
checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"num-integer",
|
||||
"num-traits",
|
||||
]
|
||||
|
|
@ -506,13 +526,26 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
|
||||
|
||||
[[package]]
|
||||
name = "os_pipe"
|
||||
version = "1.2.1"
|
||||
name = "ordered-float"
|
||||
version = "5.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5ffd2b0a5634335b135d5728d84c5e0fd726954b87111f7506a61c502280d982"
|
||||
checksum = "7f4779c6901a562440c3786d08192c6fbda7c1c2060edd10006b05ee35d10f2d"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.59.0",
|
||||
"num-traits",
|
||||
"rand",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "petgraph"
|
||||
version = "0.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7a98c6720655620a521dcc722d0ad66cd8afd5d86e34a89ef691c50b7b24de06"
|
||||
dependencies = [
|
||||
"fixedbitset",
|
||||
"hashbrown",
|
||||
"indexmap",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -527,9 +560,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.83"
|
||||
version = "1.0.92"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0b33eb56c327dec362a9e55b3ad14f9d2f0904fb5a5b03b513ab5465399e9f43"
|
||||
checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
|
@ -543,12 +576,37 @@ dependencies = [
|
|||
"proc-macro2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "r-efi"
|
||||
version = "5.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
|
||||
|
||||
[[package]]
|
||||
name = "radium"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09"
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
|
||||
dependencies = [
|
||||
"rand_core",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_core"
|
||||
version = "0.6.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustix"
|
||||
version = "0.38.31"
|
||||
|
|
@ -631,9 +689,9 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
|
|||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.66"
|
||||
version = "2.0.93"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5"
|
||||
checksum = "9c786062daee0d6db1132800e623df74274a0a87322d8e183338e01b3d98d058"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
|
@ -720,6 +778,12 @@ version = "0.2.2"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
|
||||
|
||||
[[package]]
|
||||
name = "vec_map"
|
||||
version = "0.8.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
|
||||
|
||||
[[package]]
|
||||
name = "version_check"
|
||||
version = "0.9.4"
|
||||
|
|
@ -728,9 +792,21 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
|||
|
||||
[[package]]
|
||||
name = "wasi"
|
||||
version = "0.11.0+wasi-snapshot-preview1"
|
||||
version = "0.14.7+wasi-0.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
||||
checksum = "883478de20367e224c0090af9cf5f9fa85bed63a95c1abf3afc5c083ebc06e8c"
|
||||
dependencies = [
|
||||
"wasip2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasip2"
|
||||
version = "1.0.1+wasi-0.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7"
|
||||
dependencies = [
|
||||
"wit-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "which"
|
||||
|
|
@ -775,6 +851,12 @@ version = "0.4.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||
|
||||
[[package]]
|
||||
name = "windows-link"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.52.0"
|
||||
|
|
@ -786,11 +868,11 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.59.0"
|
||||
version = "0.61.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
|
||||
checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc"
|
||||
dependencies = [
|
||||
"windows-targets",
|
||||
"windows-link",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -863,6 +945,12 @@ version = "0.0.19"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904"
|
||||
|
||||
[[package]]
|
||||
name = "wit-bindgen"
|
||||
version = "0.46.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59"
|
||||
|
||||
[[package]]
|
||||
name = "wyz"
|
||||
version = "0.5.1"
|
||||
|
|
@ -871,23 +959,3 @@ checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed"
|
|||
dependencies = [
|
||||
"tap",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy"
|
||||
version = "0.7.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be"
|
||||
dependencies = [
|
||||
"zerocopy-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy-derive"
|
||||
version = "0.7.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
|
|
|||
28
Cargo.toml
28
Cargo.toml
|
|
@ -5,38 +5,42 @@ resolver = "2"
|
|||
members = ["crates/*"]
|
||||
|
||||
[workspace.package]
|
||||
version = "0.2.1"
|
||||
version = "0.3.0"
|
||||
license = "LGPL-3.0-or-later"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
repository = "https://git.libre-chip.org/libre-chip/fayalite"
|
||||
keywords = ["hdl", "hardware", "semiconductors", "firrtl", "fpga"]
|
||||
categories = ["simulation", "development-tools", "compilers"]
|
||||
rust-version = "1.80.1"
|
||||
rust-version = "1.89.0"
|
||||
|
||||
[workspace.dependencies]
|
||||
fayalite-proc-macros = { version = "=0.2.1", path = "crates/fayalite-proc-macros" }
|
||||
fayalite-proc-macros-impl = { version = "=0.2.1", path = "crates/fayalite-proc-macros-impl" }
|
||||
fayalite-visit-gen = { version = "=0.2.1", path = "crates/fayalite-visit-gen" }
|
||||
fayalite-proc-macros = { version = "=0.3.0", path = "crates/fayalite-proc-macros" }
|
||||
fayalite-proc-macros-impl = { version = "=0.3.0", path = "crates/fayalite-proc-macros-impl" }
|
||||
fayalite-visit-gen = { version = "=0.3.0", path = "crates/fayalite-visit-gen" }
|
||||
base16ct = "0.2.0"
|
||||
base64 = "0.22.1"
|
||||
bitvec = { version = "1.0.1", features = ["serde"] }
|
||||
blake3 = { version = "1.5.4", features = ["serde"] }
|
||||
clap = { version = "4.5.9", features = ["derive", "env", "string"] }
|
||||
clap_complete = "4.5.58"
|
||||
ctor = "0.2.8"
|
||||
eyre = "0.6.12"
|
||||
hashbrown = "0.14.3"
|
||||
indexmap = { version = "2.2.6", features = ["serde"] }
|
||||
jobslot = "0.2.19"
|
||||
num-bigint = "0.4.4"
|
||||
hashbrown = "0.15.2"
|
||||
indexmap = { version = "2.5.0", features = ["serde"] }
|
||||
jobslot = "0.2.23"
|
||||
num-bigint = "0.4.6"
|
||||
num-traits = "0.2.16"
|
||||
os_pipe = "1.2.1"
|
||||
ordered-float = { version = "5.1.0", features = ["serde"] }
|
||||
petgraph = "0.8.1"
|
||||
prettyplease = "0.2.20"
|
||||
proc-macro2 = "1.0.83"
|
||||
quote = "1.0.36"
|
||||
serde = { version = "1.0.202", features = ["derive"] }
|
||||
serde_json = { version = "1.0.117", features = ["preserve_order"] }
|
||||
sha2 = "0.10.8"
|
||||
syn = { version = "2.0.66", features = ["full", "fold", "visit", "extra-traits"] }
|
||||
syn = { version = "2.0.93", features = ["full", "fold", "visit", "extra-traits"] }
|
||||
tempfile = "3.10.1"
|
||||
thiserror = "1.0.61"
|
||||
trybuild = "1.0"
|
||||
vec_map = "0.8.2"
|
||||
which = "6.0.1"
|
||||
|
|
|
|||
75
README.md
75
README.md
|
|
@ -7,3 +7,78 @@ See Notices.txt for copyright information
|
|||
Fayalite is a library for designing digital hardware -- a hardware description language (HDL) embedded in the Rust programming language. Fayalite's semantics are based on [FIRRTL] as interpreted by [LLVM CIRCT](https://circt.llvm.org/docs/Dialects/FIRRTL/FIRRTLAnnotations/).
|
||||
|
||||
[FIRRTL]: https://github.com/chipsalliance/firrtl-spec
|
||||
|
||||
# Building the [Blinky example] for the Arty A7 100T on Linux
|
||||
|
||||
[Blinky example]: crates/fayalite/examples/blinky.rs
|
||||
|
||||
This uses the container image containing all the external programs and files that Fayalite needs to build for FPGAs, the sources for the container image are in <https://git.libre-chip.org/libre-chip/fayalite-deps>
|
||||
|
||||
Steps:
|
||||
|
||||
Install podman (or docker).
|
||||
|
||||
Run:
|
||||
```bash
|
||||
podman run --rm --security-opt label=disable --volume="$(pwd):$(pwd)" -w="$(pwd)" -it git.libre-chip.org/libre-chip/fayalite-deps:latest cargo run --example blinky yosys-nextpnr-xray --nextpnr-xilinx-chipdb-dir /opt/fayalite-deps/nextpnr-xilinx/xilinx --prjxray-db-dir /opt/fayalite-deps/prjxray-db --platform arty-a7-100t -o target/blinky-out
|
||||
```
|
||||
|
||||
To actually program the FPGA, you'll need to install [openFPGALoader] on your host OS:
|
||||
|
||||
[openFPGALoader]: https://github.com/trabucayre/openFPGALoader
|
||||
|
||||
On Debian 12:
|
||||
```bash
|
||||
sudo apt update && sudo apt install openfpgaloader
|
||||
```
|
||||
|
||||
Then program the FPGA:
|
||||
```bash
|
||||
sudo openFPGALoader --board arty_a7_100t target/blinky-out/blinky.bit
|
||||
```
|
||||
|
||||
This will program the FPGA but leave the Flash chip unmodified, so the FPGA will revert when the board is power-cycled.
|
||||
|
||||
To program the Flash also, so it stays programmed when power-cycling the board:
|
||||
|
||||
```bash
|
||||
sudo openFPGALoader --board arty_a7_100t -f target/blinky-out/blinky.bit
|
||||
```
|
||||
|
||||
# Building the [Transmit-only UART example] for the Arty A7 100T on Linux
|
||||
|
||||
[Transmit-only UART example]: crates/fayalite/examples/tx_only_uart.rs
|
||||
|
||||
Follow the steps above of building the Blinky example, but replace `blinky` with `tx_only_uart`.
|
||||
|
||||
View the output using [tio](https://github.com/tio/tio) which you can install in Debian using `apt`.
|
||||
|
||||
Find the correct USB device:
|
||||
```bash
|
||||
sudo tio --list
|
||||
```
|
||||
|
||||
You want the device with a name like (note the `if01`, `if00` is presumably the JTAG port):
|
||||
`/dev/serial/by-id/usb-Digilent_Digilent_USB_Device_210319B4A51E-if01-port0`
|
||||
|
||||
Connect to the serial port:
|
||||
```bash
|
||||
sudo tio -b115200 /dev/serial/by-id/put-your-device-id-here
|
||||
```
|
||||
|
||||
You'll see (repeating endlessly):
|
||||
```text
|
||||
Hello World from Fayalite!!!
|
||||
Hello World from Fayalite!!!
|
||||
Hello World from Fayalite!!!
|
||||
```
|
||||
|
||||
Press Ctrl+T then `q` to exit tio.
|
||||
|
||||
# Funding
|
||||
|
||||
## NLnet Grants
|
||||
|
||||
* [Libre-Chip CPU with proof of No Spectre bugs](https://nlnet.nl/project/Libre-Chip-proof/) 2024-12-324 [(progress)](https://git.libre-chip.org/libre-chip/grant-tracking/src/branch/master/nlnet-2024-12-324/progress.md)
|
||||
|
||||
This project was funded through the [NGI0 Commons Fund](https://nlnet.nl/commonsfund), a fund established by [NLnet](https://nlnet.nl/) with financial support from the European Commission's [Next Generation Internet](https://ngi.eu) programme, under the aegis of [DG Communications Networks, Content and Technology](https://commission.europa.eu/about-european-commission/departments-and-executive-agencies/communications-networks-content-and-technology_en) under grant agreement № [101135429](https://cordis.europa.eu/project/id/101135429). Additional funding is made available by the [Swiss State Secretariat for Education, Research and Innovation](https://www.sbfi.admin.ch/sbfi/en/home.html) (SERI).
|
||||
|
|
|
|||
|
|
@ -220,6 +220,7 @@ forward_fold!(syn::ExprArray => fold_expr_array);
|
|||
forward_fold!(syn::ExprCall => fold_expr_call);
|
||||
forward_fold!(syn::ExprIf => fold_expr_if);
|
||||
forward_fold!(syn::ExprMatch => fold_expr_match);
|
||||
forward_fold!(syn::ExprMethodCall => fold_expr_method_call);
|
||||
forward_fold!(syn::ExprPath => fold_expr_path);
|
||||
forward_fold!(syn::ExprRepeat => fold_expr_repeat);
|
||||
forward_fold!(syn::ExprStruct => fold_expr_struct);
|
||||
|
|
|
|||
|
|
@ -1,21 +1,22 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
Errors, HdlAttr, PairsIterExt,
|
||||
hdl_type_common::{
|
||||
common_derives, get_target, ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedField,
|
||||
ParsedFieldsNamed, ParsedGenerics, SplitForImpl, TypesParser, WrappedInConst,
|
||||
ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedField, ParsedFieldsNamed, ParsedGenerics,
|
||||
SplitForImpl, TypesParser, WrappedInConst, common_derives, get_target,
|
||||
},
|
||||
kw, Errors, HdlAttr, PairsIterExt,
|
||||
kw,
|
||||
};
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{format_ident, quote_spanned, ToTokens};
|
||||
use quote::{ToTokens, format_ident, quote_spanned};
|
||||
use syn::{
|
||||
parse_quote, parse_quote_spanned,
|
||||
AngleBracketedGenericArguments, Attribute, Field, FieldMutability, Fields, FieldsNamed,
|
||||
GenericParam, Generics, Ident, ItemStruct, Path, Token, Type, Visibility, parse_quote,
|
||||
parse_quote_spanned,
|
||||
punctuated::{Pair, Punctuated},
|
||||
spanned::Spanned,
|
||||
token::Brace,
|
||||
AngleBracketedGenericArguments, Attribute, Field, FieldMutability, Fields, FieldsNamed,
|
||||
GenericParam, Generics, Ident, ItemStruct, Path, Token, Type, Visibility,
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
|
|
@ -30,7 +31,9 @@ pub(crate) struct ParsedBundle {
|
|||
pub(crate) field_flips: Vec<Option<HdlAttr<kw::flip, kw::hdl>>>,
|
||||
pub(crate) mask_type_ident: Ident,
|
||||
pub(crate) mask_type_match_variant_ident: Ident,
|
||||
pub(crate) mask_type_sim_value_ident: Ident,
|
||||
pub(crate) match_variant_ident: Ident,
|
||||
pub(crate) sim_value_ident: Ident,
|
||||
pub(crate) builder_ident: Ident,
|
||||
pub(crate) mask_type_builder_ident: Ident,
|
||||
}
|
||||
|
|
@ -83,7 +86,12 @@ impl ParsedBundle {
|
|||
custom_bounds,
|
||||
no_static: _,
|
||||
no_runtime_generics: _,
|
||||
cmp_eq: _,
|
||||
ref get,
|
||||
} = options.body;
|
||||
if let Some((get, ..)) = get {
|
||||
errors.error(get, "#[hdl(get(...))] is not allowed on structs");
|
||||
}
|
||||
let mut fields = match fields {
|
||||
syn::Fields::Named(fields) => fields,
|
||||
syn::Fields::Unnamed(fields) => {
|
||||
|
|
@ -124,7 +132,9 @@ impl ParsedBundle {
|
|||
field_flips,
|
||||
mask_type_ident: format_ident!("__{}__MaskType", ident),
|
||||
mask_type_match_variant_ident: format_ident!("__{}__MaskType__MatchVariant", ident),
|
||||
mask_type_sim_value_ident: format_ident!("__{}__MaskType__SimValue", ident),
|
||||
match_variant_ident: format_ident!("__{}__MatchVariant", ident),
|
||||
sim_value_ident: format_ident!("__{}__SimValue", ident),
|
||||
mask_type_builder_ident: format_ident!("__{}__MaskType__Builder", ident),
|
||||
builder_ident: format_ident!("__{}__Builder", ident),
|
||||
ident,
|
||||
|
|
@ -339,7 +349,6 @@ impl ToTokens for Builder {
|
|||
}
|
||||
}));
|
||||
quote_spanned! {self.ident.span()=>
|
||||
#[automatically_derived]
|
||||
#[allow(non_camel_case_types, non_snake_case, dead_code)]
|
||||
impl #impl_generics #unfilled_ty
|
||||
#where_clause
|
||||
|
|
@ -426,7 +435,9 @@ impl ToTokens for ParsedBundle {
|
|||
field_flips,
|
||||
mask_type_ident,
|
||||
mask_type_match_variant_ident,
|
||||
mask_type_sim_value_ident,
|
||||
match_variant_ident,
|
||||
sim_value_ident,
|
||||
builder_ident,
|
||||
mask_type_builder_ident,
|
||||
} = self;
|
||||
|
|
@ -437,6 +448,8 @@ impl ToTokens for ParsedBundle {
|
|||
custom_bounds: _,
|
||||
no_static,
|
||||
no_runtime_generics,
|
||||
cmp_eq,
|
||||
get: _,
|
||||
} = &options.body;
|
||||
let target = get_target(target, ident);
|
||||
let mut item_attrs = attrs.clone();
|
||||
|
|
@ -521,7 +534,7 @@ impl ToTokens for ParsedBundle {
|
|||
semi_token: None,
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let mut mask_type_match_variant_fields = mask_type_fields;
|
||||
let mut mask_type_match_variant_fields = mask_type_fields.clone();
|
||||
for Field { ty, .. } in &mut mask_type_match_variant_fields.named {
|
||||
*ty = parse_quote_spanned! {span=>
|
||||
::fayalite::expr::Expr<#ty>
|
||||
|
|
@ -563,6 +576,58 @@ impl ToTokens for ParsedBundle {
|
|||
semi_token: None,
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let mut mask_type_sim_value_fields = mask_type_fields;
|
||||
for Field { ty, .. } in &mut mask_type_sim_value_fields.named {
|
||||
*ty = parse_quote_spanned! {span=>
|
||||
::fayalite::sim::value::SimValue<#ty>
|
||||
};
|
||||
}
|
||||
ItemStruct {
|
||||
attrs: vec![
|
||||
parse_quote_spanned! {span=>
|
||||
#[::fayalite::__std::prelude::v1::derive(
|
||||
::fayalite::__std::fmt::Debug,
|
||||
::fayalite::__std::clone::Clone,
|
||||
)]
|
||||
},
|
||||
parse_quote_spanned! {span=>
|
||||
#[allow(non_camel_case_types, dead_code)]
|
||||
},
|
||||
],
|
||||
vis: vis.clone(),
|
||||
struct_token: *struct_token,
|
||||
ident: mask_type_sim_value_ident.clone(),
|
||||
generics: generics.into(),
|
||||
fields: Fields::Named(mask_type_sim_value_fields),
|
||||
semi_token: None,
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let mut sim_value_fields = FieldsNamed::from(fields.clone());
|
||||
for Field { ty, .. } in &mut sim_value_fields.named {
|
||||
*ty = parse_quote_spanned! {span=>
|
||||
::fayalite::sim::value::SimValue<#ty>
|
||||
};
|
||||
}
|
||||
ItemStruct {
|
||||
attrs: vec![
|
||||
parse_quote_spanned! {span=>
|
||||
#[::fayalite::__std::prelude::v1::derive(
|
||||
::fayalite::__std::fmt::Debug,
|
||||
::fayalite::__std::clone::Clone,
|
||||
)]
|
||||
},
|
||||
parse_quote_spanned! {span=>
|
||||
#[allow(non_camel_case_types, dead_code)]
|
||||
},
|
||||
],
|
||||
vis: vis.clone(),
|
||||
struct_token: *struct_token,
|
||||
ident: sim_value_ident.clone(),
|
||||
generics: generics.into(),
|
||||
fields: Fields::Named(sim_value_fields),
|
||||
semi_token: None,
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let this_token = Ident::new("__this", span);
|
||||
let fields_token = Ident::new("__fields", span);
|
||||
let self_token = Token;
|
||||
|
|
@ -613,6 +678,32 @@ impl ToTokens for ParsedBundle {
|
|||
}
|
||||
},
|
||||
));
|
||||
let sim_value_from_opaque_fields =
|
||||
Vec::from_iter(fields.named().into_iter().map(|field| {
|
||||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
quote_spanned! {span=>
|
||||
#ident: v.field_from_opaque(),
|
||||
}
|
||||
}));
|
||||
let sim_value_clone_from_opaque_fields =
|
||||
Vec::from_iter(fields.named().into_iter().map(|field| {
|
||||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
quote_spanned! {span=>
|
||||
v.field_clone_from_opaque(&mut value.#ident);
|
||||
}
|
||||
}));
|
||||
let sim_value_to_opaque_fields = Vec::from_iter(fields.named().into_iter().map(|field| {
|
||||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
quote_spanned! {span=>
|
||||
v.field(&value.#ident);
|
||||
}
|
||||
}));
|
||||
let to_sim_value_fields = Vec::from_iter(fields.named().into_iter().map(|field| {
|
||||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
quote_spanned! {span=>
|
||||
#ident: ::fayalite::sim::value::SimValue::ty(&self.#ident),
|
||||
}
|
||||
}));
|
||||
let fields_len = fields.named().into_iter().len();
|
||||
quote_spanned! {span=>
|
||||
#[automatically_derived]
|
||||
|
|
@ -621,6 +712,7 @@ impl ToTokens for ParsedBundle {
|
|||
{
|
||||
type BaseType = ::fayalite::bundle::Bundle;
|
||||
type MaskType = #mask_type_ident #type_generics;
|
||||
type SimValue = #mask_type_sim_value_ident #type_generics;
|
||||
type MatchVariant = #mask_type_match_variant_ident #type_generics;
|
||||
type MatchActiveScope = ();
|
||||
type MatchVariantAndInactiveScope = ::fayalite::ty::MatchVariantWithoutScope<
|
||||
|
|
@ -658,6 +750,35 @@ impl ToTokens for ParsedBundle {
|
|||
fn source_location() -> ::fayalite::source_location::SourceLocation {
|
||||
::fayalite::source_location::SourceLocation::caller()
|
||||
}
|
||||
fn sim_value_from_opaque(
|
||||
&self,
|
||||
opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>,
|
||||
) -> <Self as ::fayalite::ty::Type>::SimValue {
|
||||
#![allow(unused_mut, unused_variables)]
|
||||
let mut v = ::fayalite::bundle::BundleSimValueFromOpaque::new(*self, opaque);
|
||||
#mask_type_sim_value_ident {
|
||||
#(#sim_value_from_opaque_fields)*
|
||||
}
|
||||
}
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut <Self as ::fayalite::ty::Type>::SimValue,
|
||||
opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
#![allow(unused_mut, unused_variables)]
|
||||
let mut v = ::fayalite::bundle::BundleSimValueFromOpaque::new(*self, opaque);
|
||||
#(#sim_value_clone_from_opaque_fields)*
|
||||
}
|
||||
fn sim_value_to_opaque<'__w>(
|
||||
&self,
|
||||
value: &<Self as ::fayalite::ty::Type>::SimValue,
|
||||
writer: ::fayalite::ty::OpaqueSimValueWriter<'__w>,
|
||||
) -> ::fayalite::ty::OpaqueSimValueWritten<'__w> {
|
||||
#![allow(unused_mut, unused_variables)]
|
||||
let mut v = ::fayalite::bundle::BundleSimValueToOpaque::new(*self, writer);
|
||||
#(#sim_value_to_opaque_fields)*
|
||||
v.finish()
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::bundle::BundleType for #mask_type_ident #type_generics
|
||||
|
|
@ -689,11 +810,57 @@ impl ToTokens for ParsedBundle {
|
|||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::sim::value::ToSimValue for #mask_type_sim_value_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
type Type = #mask_type_ident #type_generics;
|
||||
|
||||
fn to_sim_value(
|
||||
&self,
|
||||
) -> ::fayalite::sim::value::SimValue<
|
||||
<Self as ::fayalite::sim::value::ToSimValue>::Type,
|
||||
> {
|
||||
let ty = #mask_type_ident {
|
||||
#(#to_sim_value_fields)*
|
||||
};
|
||||
::fayalite::sim::value::SimValue::from_value(ty, ::fayalite::__std::clone::Clone::clone(self))
|
||||
}
|
||||
fn into_sim_value(
|
||||
self,
|
||||
) -> ::fayalite::sim::value::SimValue<
|
||||
<Self as ::fayalite::sim::value::ToSimValue>::Type,
|
||||
> {
|
||||
let ty = #mask_type_ident {
|
||||
#(#to_sim_value_fields)*
|
||||
};
|
||||
::fayalite::sim::value::SimValue::from_value(ty, self)
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::sim::value::ToSimValueWithType<#mask_type_ident #type_generics>
|
||||
for #mask_type_sim_value_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
fn to_sim_value_with_type(
|
||||
&self,
|
||||
ty: #mask_type_ident #type_generics,
|
||||
) -> ::fayalite::sim::value::SimValue<#mask_type_ident #type_generics> {
|
||||
::fayalite::sim::value::SimValue::from_value(ty, ::fayalite::__std::clone::Clone::clone(self))
|
||||
}
|
||||
fn into_sim_value_with_type(
|
||||
self,
|
||||
ty: #mask_type_ident #type_generics,
|
||||
) -> ::fayalite::sim::value::SimValue<#mask_type_ident #type_generics> {
|
||||
::fayalite::sim::value::SimValue::from_value(ty, self)
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::ty::Type for #target #type_generics
|
||||
#where_clause
|
||||
{
|
||||
type BaseType = ::fayalite::bundle::Bundle;
|
||||
type MaskType = #mask_type_ident #type_generics;
|
||||
type SimValue = #sim_value_ident #type_generics;
|
||||
type MatchVariant = #match_variant_ident #type_generics;
|
||||
type MatchActiveScope = ();
|
||||
type MatchVariantAndInactiveScope = ::fayalite::ty::MatchVariantWithoutScope<
|
||||
|
|
@ -733,6 +900,35 @@ impl ToTokens for ParsedBundle {
|
|||
fn source_location() -> ::fayalite::source_location::SourceLocation {
|
||||
::fayalite::source_location::SourceLocation::caller()
|
||||
}
|
||||
fn sim_value_from_opaque(
|
||||
&self,
|
||||
opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>,
|
||||
) -> <Self as ::fayalite::ty::Type>::SimValue {
|
||||
#![allow(unused_mut, unused_variables)]
|
||||
let mut v = ::fayalite::bundle::BundleSimValueFromOpaque::new(*self, opaque);
|
||||
#sim_value_ident {
|
||||
#(#sim_value_from_opaque_fields)*
|
||||
}
|
||||
}
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut <Self as ::fayalite::ty::Type>::SimValue,
|
||||
opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
#![allow(unused_mut, unused_variables)]
|
||||
let mut v = ::fayalite::bundle::BundleSimValueFromOpaque::new(*self, opaque);
|
||||
#(#sim_value_clone_from_opaque_fields)*
|
||||
}
|
||||
fn sim_value_to_opaque<'__w>(
|
||||
&self,
|
||||
value: &<Self as ::fayalite::ty::Type>::SimValue,
|
||||
writer: ::fayalite::ty::OpaqueSimValueWriter<'__w>,
|
||||
) -> ::fayalite::ty::OpaqueSimValueWritten<'__w> {
|
||||
#![allow(unused_mut, unused_variables)]
|
||||
let mut v = ::fayalite::bundle::BundleSimValueToOpaque::new(*self, writer);
|
||||
#(#sim_value_to_opaque_fields)*
|
||||
v.finish()
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::bundle::BundleType for #target #type_generics
|
||||
|
|
@ -763,8 +959,144 @@ impl ToTokens for ParsedBundle {
|
|||
::fayalite::intern::Interned::into_inner(::fayalite::intern::Intern::intern_sized(__retval))
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::sim::value::ToSimValue for #sim_value_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
type Type = #target #type_generics;
|
||||
|
||||
fn to_sim_value(
|
||||
&self,
|
||||
) -> ::fayalite::sim::value::SimValue<
|
||||
<Self as ::fayalite::sim::value::ToSimValue>::Type,
|
||||
> {
|
||||
let ty = #target {
|
||||
#(#to_sim_value_fields)*
|
||||
};
|
||||
::fayalite::sim::value::SimValue::from_value(ty, ::fayalite::__std::clone::Clone::clone(self))
|
||||
}
|
||||
fn into_sim_value(
|
||||
self,
|
||||
) -> ::fayalite::sim::value::SimValue<
|
||||
<Self as ::fayalite::sim::value::ToSimValue>::Type,
|
||||
> {
|
||||
let ty = #target {
|
||||
#(#to_sim_value_fields)*
|
||||
};
|
||||
::fayalite::sim::value::SimValue::from_value(ty, self)
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::sim::value::ToSimValueWithType<#target #type_generics>
|
||||
for #sim_value_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
fn to_sim_value_with_type(
|
||||
&self,
|
||||
ty: #target #type_generics,
|
||||
) -> ::fayalite::sim::value::SimValue<#target #type_generics> {
|
||||
::fayalite::sim::value::SimValue::from_value(ty, ::fayalite::__std::clone::Clone::clone(self))
|
||||
}
|
||||
fn into_sim_value_with_type(
|
||||
self,
|
||||
ty: #target #type_generics,
|
||||
) -> ::fayalite::sim::value::SimValue<#target #type_generics> {
|
||||
::fayalite::sim::value::SimValue::from_value(ty, self)
|
||||
}
|
||||
}
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
if let Some((cmp_eq,)) = cmp_eq {
|
||||
let mut expr_where_clause =
|
||||
Generics::from(generics)
|
||||
.where_clause
|
||||
.unwrap_or_else(|| syn::WhereClause {
|
||||
where_token: Token,
|
||||
predicates: Punctuated::new(),
|
||||
});
|
||||
let mut sim_value_where_clause = expr_where_clause.clone();
|
||||
let mut fields_sim_value_eq = vec![];
|
||||
let mut fields_cmp_eq = vec![];
|
||||
let mut fields_cmp_ne = vec![];
|
||||
for field in fields.named() {
|
||||
let field_ident = field.ident();
|
||||
let field_ty = field.ty();
|
||||
expr_where_clause
|
||||
.predicates
|
||||
.push(parse_quote_spanned! {cmp_eq.span=>
|
||||
#field_ty: ::fayalite::expr::ops::ExprPartialEq<#field_ty>
|
||||
});
|
||||
sim_value_where_clause
|
||||
.predicates
|
||||
.push(parse_quote_spanned! {cmp_eq.span=>
|
||||
#field_ty: ::fayalite::sim::value::SimValuePartialEq<#field_ty>
|
||||
});
|
||||
fields_sim_value_eq.push(quote_spanned! {span=>
|
||||
::fayalite::sim::value::SimValuePartialEq::sim_value_eq(&__lhs.#field_ident, &__rhs.#field_ident)
|
||||
});
|
||||
fields_cmp_eq.push(quote_spanned! {span=>
|
||||
::fayalite::expr::ops::ExprPartialEq::cmp_eq(__lhs.#field_ident, __rhs.#field_ident)
|
||||
});
|
||||
fields_cmp_ne.push(quote_spanned! {span=>
|
||||
::fayalite::expr::ops::ExprPartialEq::cmp_ne(__lhs.#field_ident, __rhs.#field_ident)
|
||||
});
|
||||
}
|
||||
let sim_value_eq_body;
|
||||
let cmp_eq_body;
|
||||
let cmp_ne_body;
|
||||
if fields_len == 0 {
|
||||
sim_value_eq_body = quote_spanned! {span=>
|
||||
true
|
||||
};
|
||||
cmp_eq_body = quote_spanned! {span=>
|
||||
::fayalite::expr::ToExpr::to_expr(&true)
|
||||
};
|
||||
cmp_ne_body = quote_spanned! {span=>
|
||||
::fayalite::expr::ToExpr::to_expr(&false)
|
||||
};
|
||||
} else {
|
||||
sim_value_eq_body = quote_spanned! {span=>
|
||||
#(#fields_sim_value_eq)&&*
|
||||
};
|
||||
cmp_eq_body = quote_spanned! {span=>
|
||||
#(#fields_cmp_eq)&*
|
||||
};
|
||||
cmp_ne_body = quote_spanned! {span=>
|
||||
#(#fields_cmp_ne)|*
|
||||
};
|
||||
};
|
||||
quote_spanned! {span=>
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::expr::ops::ExprPartialEq<Self> for #target #type_generics
|
||||
#expr_where_clause
|
||||
{
|
||||
fn cmp_eq(
|
||||
__lhs: ::fayalite::expr::Expr<Self>,
|
||||
__rhs: ::fayalite::expr::Expr<Self>,
|
||||
) -> ::fayalite::expr::Expr<::fayalite::int::Bool> {
|
||||
#cmp_eq_body
|
||||
}
|
||||
fn cmp_ne(
|
||||
__lhs: ::fayalite::expr::Expr<Self>,
|
||||
__rhs: ::fayalite::expr::Expr<Self>,
|
||||
) -> ::fayalite::expr::Expr<::fayalite::int::Bool> {
|
||||
#cmp_ne_body
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::sim::value::SimValuePartialEq<Self> for #target #type_generics
|
||||
#sim_value_where_clause
|
||||
{
|
||||
fn sim_value_eq(
|
||||
__lhs: &::fayalite::sim::value::SimValue<Self>,
|
||||
__rhs: &::fayalite::sim::value::SimValue<Self>,
|
||||
) -> bool {
|
||||
#sim_value_eq_body
|
||||
}
|
||||
}
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
}
|
||||
if let (None, MaybeParsed::Parsed(generics)) = (no_static, &self.generics) {
|
||||
let static_generics = generics.clone().for_static_type();
|
||||
let (static_impl_generics, static_type_generics, static_where_clause) =
|
||||
|
|
@ -800,6 +1132,14 @@ impl ToTokens for ParsedBundle {
|
|||
}
|
||||
}));
|
||||
quote_spanned! {span=>
|
||||
#[automatically_derived]
|
||||
impl #static_impl_generics ::fayalite::__std::default::Default for #mask_type_ident #static_type_generics
|
||||
#static_where_clause
|
||||
{
|
||||
fn default() -> Self {
|
||||
<Self as ::fayalite::ty::StaticType>::TYPE
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #static_impl_generics ::fayalite::ty::StaticType for #mask_type_ident #static_type_generics
|
||||
#static_where_clause
|
||||
|
|
@ -822,6 +1162,15 @@ impl ToTokens for ParsedBundle {
|
|||
};
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #static_impl_generics ::fayalite::__std::default::Default
|
||||
for #target #static_type_generics
|
||||
#static_where_clause
|
||||
{
|
||||
fn default() -> Self {
|
||||
<Self as ::fayalite::ty::StaticType>::TYPE
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #static_impl_generics ::fayalite::ty::StaticType for #target #static_type_generics
|
||||
#static_where_clause
|
||||
{
|
||||
|
|
|
|||
|
|
@ -1,20 +1,20 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
Errors, HdlAttr, PairsIterExt,
|
||||
hdl_type_common::{
|
||||
common_derives, get_target, ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedGenerics,
|
||||
ParsedType, SplitForImpl, TypesParser, WrappedInConst,
|
||||
ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedGenerics, ParsedType, SplitForImpl,
|
||||
TypesParser, WrappedInConst, common_derives, get_target,
|
||||
},
|
||||
kw, Errors, HdlAttr, PairsIterExt,
|
||||
kw,
|
||||
};
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{format_ident, quote_spanned, ToTokens};
|
||||
use quote::{ToTokens, format_ident, quote_spanned};
|
||||
use syn::{
|
||||
parse_quote_spanned,
|
||||
Attribute, Field, FieldMutability, Fields, FieldsNamed, FieldsUnnamed, Generics, Ident,
|
||||
ItemEnum, ItemStruct, Token, Type, Variant, Visibility, parse_quote_spanned,
|
||||
punctuated::{Pair, Punctuated},
|
||||
token::{Brace, Paren},
|
||||
Attribute, Field, FieldMutability, Fields, FieldsNamed, FieldsUnnamed, Generics, Ident,
|
||||
ItemEnum, ItemStruct, Token, Type, Variant, Visibility,
|
||||
};
|
||||
|
||||
crate::options! {
|
||||
|
|
@ -129,6 +129,9 @@ pub(crate) struct ParsedEnum {
|
|||
pub(crate) brace_token: Brace,
|
||||
pub(crate) variants: Punctuated<ParsedVariant, Token![,]>,
|
||||
pub(crate) match_variant_ident: Ident,
|
||||
pub(crate) sim_value_ident: Ident,
|
||||
pub(crate) sim_builder_ident: Ident,
|
||||
pub(crate) sim_builder_ty_field_ident: Ident,
|
||||
}
|
||||
|
||||
impl ParsedEnum {
|
||||
|
|
@ -155,7 +158,15 @@ impl ParsedEnum {
|
|||
custom_bounds,
|
||||
no_static: _,
|
||||
no_runtime_generics: _,
|
||||
cmp_eq,
|
||||
ref get,
|
||||
} = options.body;
|
||||
if let Some((cmp_eq,)) = cmp_eq {
|
||||
errors.error(cmp_eq, "#[hdl(cmp_eq)] is not yet implemented for enums");
|
||||
}
|
||||
if let Some((get, ..)) = get {
|
||||
errors.error(get, "#[hdl(get(...))] is not allowed on enums");
|
||||
}
|
||||
attrs.retain(|attr| {
|
||||
if attr.path().is_ident("repr") {
|
||||
errors.error(attr, "#[repr] is not supported on #[hdl] enums");
|
||||
|
|
@ -186,6 +197,9 @@ impl ParsedEnum {
|
|||
brace_token,
|
||||
variants,
|
||||
match_variant_ident: format_ident!("__{}__MatchVariant", ident),
|
||||
sim_value_ident: format_ident!("__{}__SimValue", ident),
|
||||
sim_builder_ident: format_ident!("__{}__SimBuilder", ident),
|
||||
sim_builder_ty_field_ident: format_ident!("__ty", span = ident.span()),
|
||||
ident,
|
||||
})
|
||||
}
|
||||
|
|
@ -203,6 +217,9 @@ impl ToTokens for ParsedEnum {
|
|||
brace_token,
|
||||
variants,
|
||||
match_variant_ident,
|
||||
sim_value_ident,
|
||||
sim_builder_ident,
|
||||
sim_builder_ty_field_ident,
|
||||
} = self;
|
||||
let span = ident.span();
|
||||
let ItemOptions {
|
||||
|
|
@ -211,6 +228,8 @@ impl ToTokens for ParsedEnum {
|
|||
custom_bounds: _,
|
||||
no_static,
|
||||
no_runtime_generics,
|
||||
cmp_eq: _, // TODO: implement cmp_eq for enums
|
||||
get: _,
|
||||
} = &options.body;
|
||||
let target = get_target(target, ident);
|
||||
let mut struct_attrs = attrs.clone();
|
||||
|
|
@ -404,11 +423,137 @@ impl ToTokens for ParsedEnum {
|
|||
)),
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let mut struct_attrs = attrs.clone();
|
||||
struct_attrs.push(parse_quote_spanned! {span=>
|
||||
#[allow(dead_code, non_camel_case_types)]
|
||||
});
|
||||
ItemStruct {
|
||||
attrs: struct_attrs,
|
||||
vis: vis.clone(),
|
||||
struct_token: Token,
|
||||
ident: sim_builder_ident.clone(),
|
||||
generics: generics.into(),
|
||||
fields: FieldsNamed {
|
||||
brace_token: *brace_token,
|
||||
named: Punctuated::from_iter([Field {
|
||||
attrs: vec![],
|
||||
vis: Visibility::Inherited,
|
||||
mutability: FieldMutability::None,
|
||||
ident: Some(sim_builder_ty_field_ident.clone()),
|
||||
colon_token: Some(Token),
|
||||
ty: parse_quote_spanned! {span=>
|
||||
#target #type_generics
|
||||
},
|
||||
}]),
|
||||
}
|
||||
.into(),
|
||||
semi_token: None,
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let mut enum_attrs = attrs.clone();
|
||||
enum_attrs.push(parse_quote_spanned! {span=>
|
||||
#[::fayalite::__std::prelude::v1::derive(
|
||||
::fayalite::__std::fmt::Debug,
|
||||
::fayalite::__std::clone::Clone,
|
||||
)]
|
||||
});
|
||||
enum_attrs.push(parse_quote_spanned! {span=>
|
||||
#[allow(dead_code, non_camel_case_types)]
|
||||
});
|
||||
let sim_value_has_unknown_variant = !variants.len().is_power_of_two();
|
||||
let sim_value_unknown_variant_name = sim_value_has_unknown_variant.then(|| {
|
||||
let mut name = String::new();
|
||||
let unknown = "Unknown";
|
||||
loop {
|
||||
let orig_len = name.len();
|
||||
name.push_str(unknown);
|
||||
if variants.iter().all(|v| v.ident != name) {
|
||||
break Ident::new(&name, span);
|
||||
}
|
||||
name.truncate(orig_len);
|
||||
name.push('_');
|
||||
}
|
||||
});
|
||||
let sim_value_unknown_variant =
|
||||
sim_value_unknown_variant_name
|
||||
.as_ref()
|
||||
.map(|unknown_variant_name| {
|
||||
Pair::End(parse_quote_spanned! {span=>
|
||||
#unknown_variant_name(::fayalite::enum_::UnknownVariantSimValue)
|
||||
})
|
||||
});
|
||||
ItemEnum {
|
||||
attrs: enum_attrs,
|
||||
vis: vis.clone(),
|
||||
enum_token: *enum_token,
|
||||
ident: sim_value_ident.clone(),
|
||||
generics: generics.into(),
|
||||
brace_token: *brace_token,
|
||||
variants: Punctuated::from_iter(
|
||||
variants
|
||||
.pairs()
|
||||
.map_pair_value_ref(
|
||||
|ParsedVariant {
|
||||
attrs,
|
||||
options: _,
|
||||
ident,
|
||||
field,
|
||||
}| Variant {
|
||||
attrs: attrs.clone(),
|
||||
ident: ident.clone(),
|
||||
fields: match field {
|
||||
Some(ParsedVariantField {
|
||||
paren_token,
|
||||
attrs,
|
||||
options: _,
|
||||
ty,
|
||||
comma_token,
|
||||
}) => Fields::Unnamed(FieldsUnnamed {
|
||||
paren_token: *paren_token,
|
||||
unnamed: Punctuated::from_iter([
|
||||
Pair::new(
|
||||
Field {
|
||||
attrs: attrs.clone(),
|
||||
vis: Visibility::Inherited,
|
||||
mutability: FieldMutability::None,
|
||||
ident: None,
|
||||
colon_token: None,
|
||||
ty: parse_quote_spanned! {span=>
|
||||
::fayalite::sim::value::SimValue<#ty>
|
||||
},
|
||||
},
|
||||
Some(comma_token.unwrap_or(Token))),
|
||||
),
|
||||
Pair::new(
|
||||
Field {
|
||||
attrs: vec![],
|
||||
vis: Visibility::Inherited,
|
||||
mutability: FieldMutability::None,
|
||||
ident: None,
|
||||
colon_token: None,
|
||||
ty: parse_quote_spanned! {span=>
|
||||
::fayalite::enum_::EnumPaddingSimValue
|
||||
},
|
||||
},
|
||||
None,
|
||||
),
|
||||
]),
|
||||
}),
|
||||
None => Fields::Unnamed(parse_quote_spanned! {span=>
|
||||
(::fayalite::enum_::EnumPaddingSimValue)
|
||||
}),
|
||||
},
|
||||
discriminant: None,
|
||||
},
|
||||
)
|
||||
.chain(sim_value_unknown_variant),
|
||||
),
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let self_token = Token;
|
||||
for (index, ParsedVariant { ident, field, .. }) in variants.iter().enumerate() {
|
||||
if let Some(ParsedVariantField { ty, .. }) = field {
|
||||
quote_spanned! {span=>
|
||||
#[automatically_derived]
|
||||
impl #impl_generics #target #type_generics
|
||||
#where_clause
|
||||
{
|
||||
|
|
@ -430,10 +575,27 @@ impl ToTokens for ParsedEnum {
|
|||
)
|
||||
}
|
||||
}
|
||||
impl #impl_generics #sim_builder_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
#[allow(non_snake_case, dead_code)]
|
||||
#vis fn #ident<__V: ::fayalite::sim::value::ToSimValueWithType<#ty>>(
|
||||
#self_token,
|
||||
v: __V,
|
||||
) -> ::fayalite::sim::value::SimValue<#target #type_generics> {
|
||||
let v = ::fayalite::sim::value::ToSimValueWithType::into_sim_value_with_type(
|
||||
v,
|
||||
#self_token.#sim_builder_ty_field_ident.#ident,
|
||||
);
|
||||
::fayalite::sim::value::SimValue::from_value(
|
||||
#self_token.#sim_builder_ty_field_ident,
|
||||
#sim_value_ident::#ident(v, ::fayalite::enum_::EnumPaddingSimValue::new()),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
#[automatically_derived]
|
||||
impl #impl_generics #target #type_generics
|
||||
#where_clause
|
||||
{
|
||||
|
|
@ -448,6 +610,17 @@ impl ToTokens for ParsedEnum {
|
|||
)
|
||||
}
|
||||
}
|
||||
impl #impl_generics #sim_builder_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
#[allow(non_snake_case, dead_code)]
|
||||
#vis fn #ident(#self_token) -> ::fayalite::sim::value::SimValue<#target #type_generics> {
|
||||
::fayalite::sim::value::SimValue::from_value(
|
||||
#self_token.#sim_builder_ty_field_ident,
|
||||
#sim_value_ident::#ident(::fayalite::enum_::EnumPaddingSimValue::new()),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
|
|
@ -529,6 +702,142 @@ impl ToTokens for ParsedEnum {
|
|||
}
|
||||
},
|
||||
));
|
||||
let sim_value_from_opaque_unknown_match_arm = if let Some(sim_value_unknown_variant_name) =
|
||||
&sim_value_unknown_variant_name
|
||||
{
|
||||
quote_spanned! {span=>
|
||||
_ => #sim_value_ident::#sim_value_unknown_variant_name(v.unknown_variant_from_opaque()),
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
_ => ::fayalite::__std::unreachable!(),
|
||||
}
|
||||
};
|
||||
let sim_value_from_opaque_match_arms = Vec::from_iter(
|
||||
variants
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(
|
||||
|(
|
||||
index,
|
||||
ParsedVariant {
|
||||
attrs: _,
|
||||
options: _,
|
||||
ident,
|
||||
field,
|
||||
},
|
||||
)| {
|
||||
if let Some(_) = field {
|
||||
quote_spanned! {span=>
|
||||
#index => {
|
||||
let (field, padding) = v.variant_with_field_from_opaque();
|
||||
#sim_value_ident::#ident(field, padding)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
#index => #sim_value_ident::#ident(
|
||||
v.variant_no_field_from_opaque(),
|
||||
),
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
.chain([sim_value_from_opaque_unknown_match_arm]),
|
||||
);
|
||||
let sim_value_clone_from_opaque_unknown_match_arm =
|
||||
if let Some(sim_value_unknown_variant_name) = &sim_value_unknown_variant_name {
|
||||
quote_spanned! {span=>
|
||||
_ => if let #sim_value_ident::#sim_value_unknown_variant_name(value) = value {
|
||||
v.unknown_variant_clone_from_opaque(value);
|
||||
} else {
|
||||
*value = #sim_value_ident::#sim_value_unknown_variant_name(
|
||||
v.unknown_variant_from_opaque(),
|
||||
);
|
||||
},
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
_ => ::fayalite::__std::unreachable!(),
|
||||
}
|
||||
};
|
||||
let sim_value_clone_from_opaque_match_arms = Vec::from_iter(
|
||||
variants
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(
|
||||
|(
|
||||
index,
|
||||
ParsedVariant {
|
||||
attrs: _,
|
||||
options: _,
|
||||
ident,
|
||||
field,
|
||||
},
|
||||
)| {
|
||||
if let Some(_) = field {
|
||||
quote_spanned! {span=>
|
||||
#index => if let #sim_value_ident::#ident(field, padding) = value {
|
||||
v.variant_with_field_clone_from_opaque(field, padding);
|
||||
} else {
|
||||
let (field, padding) = v.variant_with_field_from_opaque();
|
||||
*value = #sim_value_ident::#ident(field, padding);
|
||||
},
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
#index => if let #sim_value_ident::#ident(padding) = value {
|
||||
v.variant_no_field_clone_from_opaque(padding);
|
||||
} else {
|
||||
*value = #sim_value_ident::#ident(
|
||||
v.variant_no_field_from_opaque(),
|
||||
);
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
.chain([sim_value_clone_from_opaque_unknown_match_arm]),
|
||||
);
|
||||
let sim_value_to_opaque_match_arms = Vec::from_iter(
|
||||
variants
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(
|
||||
|(
|
||||
index,
|
||||
ParsedVariant {
|
||||
attrs: _,
|
||||
options: _,
|
||||
ident,
|
||||
field,
|
||||
},
|
||||
)| {
|
||||
if let Some(_) = field {
|
||||
quote_spanned! {span=>
|
||||
#sim_value_ident::#ident(field, padding) => {
|
||||
v.variant_with_field_to_opaque(#index, field, padding)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
#sim_value_ident::#ident(padding) => {
|
||||
v.variant_no_field_to_opaque(#index, padding)
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
.chain(sim_value_unknown_variant_name.as_ref().map(
|
||||
|sim_value_unknown_variant_name| {
|
||||
quote_spanned! {span=>
|
||||
#sim_value_ident::#sim_value_unknown_variant_name(value) => {
|
||||
v.unknown_variant_to_opaque(value)
|
||||
}
|
||||
}
|
||||
},
|
||||
)),
|
||||
);
|
||||
let variants_len = variants.len();
|
||||
quote_spanned! {span=>
|
||||
#[automatically_derived]
|
||||
|
|
@ -537,6 +846,7 @@ impl ToTokens for ParsedEnum {
|
|||
{
|
||||
type BaseType = ::fayalite::enum_::Enum;
|
||||
type MaskType = ::fayalite::int::Bool;
|
||||
type SimValue = #sim_value_ident #type_generics;
|
||||
type MatchVariant = #match_variant_ident #type_generics;
|
||||
type MatchActiveScope = ::fayalite::module::Scope;
|
||||
type MatchVariantAndInactiveScope = ::fayalite::enum_::EnumMatchVariantAndInactiveScope<Self>;
|
||||
|
|
@ -569,11 +879,41 @@ impl ToTokens for ParsedEnum {
|
|||
fn source_location() -> ::fayalite::source_location::SourceLocation {
|
||||
::fayalite::source_location::SourceLocation::caller()
|
||||
}
|
||||
fn sim_value_from_opaque(
|
||||
&self,
|
||||
opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>,
|
||||
) -> <Self as ::fayalite::ty::Type>::SimValue {
|
||||
let v = ::fayalite::enum_::EnumSimValueFromOpaque::new(*self, opaque);
|
||||
match v.discriminant() {
|
||||
#(#sim_value_from_opaque_match_arms)*
|
||||
}
|
||||
}
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut <Self as ::fayalite::ty::Type>::SimValue,
|
||||
opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
let v = ::fayalite::enum_::EnumSimValueFromOpaque::new(*self, opaque);
|
||||
match v.discriminant() {
|
||||
#(#sim_value_clone_from_opaque_match_arms)*
|
||||
}
|
||||
}
|
||||
fn sim_value_to_opaque<'__w>(
|
||||
&self,
|
||||
value: &<Self as ::fayalite::ty::Type>::SimValue,
|
||||
writer: ::fayalite::ty::OpaqueSimValueWriter<'__w>,
|
||||
) -> ::fayalite::ty::OpaqueSimValueWritten<'__w> {
|
||||
let v = ::fayalite::enum_::EnumSimValueToOpaque::new(*self, writer);
|
||||
match value {
|
||||
#(#sim_value_to_opaque_match_arms)*
|
||||
}
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::enum_::EnumType for #target #type_generics
|
||||
#where_clause
|
||||
{
|
||||
type SimBuilder = #sim_builder_ident #type_generics;
|
||||
fn match_activate_scope(
|
||||
v: <Self as ::fayalite::ty::Type>::MatchVariantAndInactiveScope,
|
||||
) -> (<Self as ::fayalite::ty::Type>::MatchVariant, <Self as ::fayalite::ty::Type>::MatchActiveScope) {
|
||||
|
|
@ -592,6 +932,33 @@ impl ToTokens for ParsedEnum {
|
|||
][..])
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::sim::value::ToSimValueWithType<#target #type_generics>
|
||||
for #sim_value_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
fn to_sim_value_with_type(
|
||||
&self,
|
||||
ty: #target #type_generics,
|
||||
) -> ::fayalite::sim::value::SimValue<#target #type_generics> {
|
||||
::fayalite::sim::value::SimValue::from_value(ty, ::fayalite::__std::clone::Clone::clone(self))
|
||||
}
|
||||
fn into_sim_value_with_type(
|
||||
self,
|
||||
ty: #target #type_generics,
|
||||
) -> ::fayalite::sim::value::SimValue<#target #type_generics> {
|
||||
::fayalite::sim::value::SimValue::from_value(ty, self)
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::__std::convert::From<#target #type_generics>
|
||||
for #sim_builder_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
fn from(#sim_builder_ty_field_ident: #target #type_generics) -> Self {
|
||||
Self { #sim_builder_ty_field_ident }
|
||||
}
|
||||
}
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
if let (None, MaybeParsed::Parsed(generics)) = (no_static, &self.generics) {
|
||||
|
|
@ -629,6 +996,15 @@ impl ToTokens for ParsedEnum {
|
|||
}
|
||||
}));
|
||||
quote_spanned! {span=>
|
||||
#[automatically_derived]
|
||||
impl #static_impl_generics ::fayalite::__std::default::Default
|
||||
for #target #static_type_generics
|
||||
#static_where_clause
|
||||
{
|
||||
fn default() -> Self {
|
||||
<Self as ::fayalite::ty::StaticType>::TYPE
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #static_impl_generics ::fayalite::ty::StaticType
|
||||
for #target #static_type_generics
|
||||
|
|
@ -647,6 +1023,34 @@ impl ToTokens for ParsedEnum {
|
|||
const MASK_TYPE_PROPERTIES: ::fayalite::ty::TypeProperties =
|
||||
<::fayalite::int::Bool as ::fayalite::ty::StaticType>::TYPE_PROPERTIES;
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #static_impl_generics ::fayalite::sim::value::ToSimValue
|
||||
for #sim_value_ident #static_type_generics
|
||||
#static_where_clause
|
||||
{
|
||||
type Type = #target #static_type_generics;
|
||||
|
||||
fn to_sim_value(
|
||||
&self,
|
||||
) -> ::fayalite::sim::value::SimValue<
|
||||
<Self as ::fayalite::sim::value::ToSimValue>::Type,
|
||||
> {
|
||||
::fayalite::sim::value::SimValue::from_value(
|
||||
::fayalite::ty::StaticType::TYPE,
|
||||
::fayalite::__std::clone::Clone::clone(self),
|
||||
)
|
||||
}
|
||||
fn into_sim_value(
|
||||
self,
|
||||
) -> ::fayalite::sim::value::SimValue<
|
||||
<Self as ::fayalite::sim::value::ToSimValue>::Type,
|
||||
> {
|
||||
::fayalite::sim::value::SimValue::from_value(
|
||||
::fayalite::ty::StaticType::TYPE,
|
||||
self,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,30 +1,356 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
Errors, HdlAttr,
|
||||
hdl_type_common::{
|
||||
get_target, ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedGenerics, ParsedType,
|
||||
TypesParser,
|
||||
ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedGenerics, ParsedType, TypesParser,
|
||||
WrappedInConst, common_derives, get_target, known_items,
|
||||
},
|
||||
kw, Errors, HdlAttr,
|
||||
kw,
|
||||
};
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::ToTokens;
|
||||
use syn::{parse_quote_spanned, Attribute, Generics, Ident, ItemType, Token, Type, Visibility};
|
||||
use quote::{ToTokens, format_ident, quote_spanned};
|
||||
use syn::{
|
||||
AngleBracketedGenericArguments, Attribute, Expr, Fields, GenericArgument, GenericParam,
|
||||
Generics, Ident, ItemStruct, ItemType, Path, PathArguments, Token, TraitBound,
|
||||
TraitBoundModifier, Type, TypeGroup, TypeParam, TypeParamBound, TypeParen, Visibility,
|
||||
parse_quote_spanned, punctuated::Pair, token::Paren,
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct ParsedTypeAlias {
|
||||
pub(crate) attrs: Vec<Attribute>,
|
||||
pub(crate) options: HdlAttr<ItemOptions, kw::hdl>,
|
||||
pub(crate) vis: Visibility,
|
||||
pub(crate) type_token: Token![type],
|
||||
pub(crate) ident: Ident,
|
||||
pub(crate) generics: MaybeParsed<ParsedGenerics, Generics>,
|
||||
pub(crate) eq_token: Token![=],
|
||||
pub(crate) ty: MaybeParsed<ParsedType, Type>,
|
||||
pub(crate) semi_token: Token![;],
|
||||
pub(crate) struct PhantomConstGetBound {
|
||||
pub(crate) phantom_const_get: known_items::PhantomConstGet,
|
||||
pub(crate) colon2_token: Option<Token![::]>,
|
||||
pub(crate) lt_token: Token![<],
|
||||
pub(crate) ty: Type,
|
||||
pub(crate) comma_token: Option<Token![,]>,
|
||||
pub(crate) gt_token: Token![>],
|
||||
}
|
||||
|
||||
impl From<PhantomConstGetBound> for Path {
|
||||
fn from(value: PhantomConstGetBound) -> Self {
|
||||
let PhantomConstGetBound {
|
||||
phantom_const_get,
|
||||
colon2_token,
|
||||
lt_token,
|
||||
ty,
|
||||
comma_token,
|
||||
gt_token,
|
||||
} = value;
|
||||
let mut path = phantom_const_get.path;
|
||||
path.segments.last_mut().expect("known to exist").arguments =
|
||||
PathArguments::AngleBracketed(AngleBracketedGenericArguments {
|
||||
colon2_token,
|
||||
lt_token,
|
||||
args: FromIterator::from_iter([Pair::new(GenericArgument::Type(ty), comma_token)]),
|
||||
gt_token,
|
||||
});
|
||||
path
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PhantomConstGetBound> for TraitBound {
|
||||
fn from(value: PhantomConstGetBound) -> Self {
|
||||
let path = Path::from(value);
|
||||
TraitBound {
|
||||
paren_token: None,
|
||||
modifier: TraitBoundModifier::None,
|
||||
lifetimes: None,
|
||||
path,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PhantomConstGetBound> for TypeParamBound {
|
||||
fn from(value: PhantomConstGetBound) -> Self {
|
||||
TraitBound::from(value).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl PhantomConstGetBound {
|
||||
fn parse_opt(bound: TypeParamBound) -> Option<Self> {
|
||||
let TypeParamBound::Trait(TraitBound {
|
||||
paren_token: None,
|
||||
modifier: TraitBoundModifier::None,
|
||||
lifetimes: None,
|
||||
path,
|
||||
}) = bound
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
let Ok((
|
||||
phantom_const_get,
|
||||
PathArguments::AngleBracketed(AngleBracketedGenericArguments {
|
||||
colon2_token,
|
||||
lt_token,
|
||||
args,
|
||||
gt_token,
|
||||
}),
|
||||
)) = known_items::PhantomConstGet::parse_path_with_arguments(path)
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
let mut args = args.into_pairs();
|
||||
let (GenericArgument::Type(ty), comma_token) = args.next()?.into_tuple() else {
|
||||
return None;
|
||||
};
|
||||
let None = args.next() else {
|
||||
return None;
|
||||
};
|
||||
Some(Self {
|
||||
phantom_const_get,
|
||||
colon2_token,
|
||||
lt_token,
|
||||
ty,
|
||||
comma_token,
|
||||
gt_token,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct PhantomConstAccessorTypeParam {
|
||||
attrs: Vec<Attribute>,
|
||||
ident: Ident,
|
||||
colon_token: Token![:],
|
||||
phantom_const_get_bound: PhantomConstGetBound,
|
||||
plus_token: Option<Token![+]>,
|
||||
}
|
||||
|
||||
impl From<PhantomConstAccessorTypeParam> for TypeParam {
|
||||
fn from(value: PhantomConstAccessorTypeParam) -> Self {
|
||||
let PhantomConstAccessorTypeParam {
|
||||
attrs,
|
||||
ident,
|
||||
colon_token,
|
||||
phantom_const_get_bound,
|
||||
plus_token,
|
||||
} = value;
|
||||
TypeParam {
|
||||
attrs,
|
||||
ident,
|
||||
colon_token: Some(colon_token),
|
||||
bounds: FromIterator::from_iter([Pair::new(
|
||||
phantom_const_get_bound.into(),
|
||||
plus_token,
|
||||
)]),
|
||||
eq_token: None,
|
||||
default: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PhantomConstAccessorTypeParam> for GenericParam {
|
||||
fn from(value: PhantomConstAccessorTypeParam) -> Self {
|
||||
TypeParam::from(value).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl PhantomConstAccessorTypeParam {
|
||||
fn parse_opt(generic_param: GenericParam) -> Option<Self> {
|
||||
let GenericParam::Type(TypeParam {
|
||||
attrs,
|
||||
ident,
|
||||
colon_token,
|
||||
bounds,
|
||||
eq_token: None,
|
||||
default: None,
|
||||
}) = generic_param
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
let colon_token = colon_token.unwrap_or(Token));
|
||||
let mut bounds = bounds.into_pairs();
|
||||
let (bound, plus_token) = bounds.next()?.into_tuple();
|
||||
let phantom_const_get_bound = PhantomConstGetBound::parse_opt(bound)?;
|
||||
let None = bounds.next() else {
|
||||
return None;
|
||||
};
|
||||
Some(Self {
|
||||
attrs,
|
||||
ident,
|
||||
colon_token,
|
||||
phantom_const_get_bound,
|
||||
plus_token,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct PhantomConstAccessorGenerics {
|
||||
lt_token: Token![<],
|
||||
type_param: PhantomConstAccessorTypeParam,
|
||||
comma_token: Option<Token![,]>,
|
||||
gt_token: Token![>],
|
||||
}
|
||||
|
||||
impl From<PhantomConstAccessorGenerics> for Generics {
|
||||
fn from(value: PhantomConstAccessorGenerics) -> Self {
|
||||
let PhantomConstAccessorGenerics {
|
||||
lt_token,
|
||||
type_param,
|
||||
comma_token,
|
||||
gt_token,
|
||||
} = value;
|
||||
Generics {
|
||||
lt_token: Some(lt_token),
|
||||
params: FromIterator::from_iter([Pair::new(type_param.into(), comma_token)]),
|
||||
gt_token: Some(gt_token),
|
||||
where_clause: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a PhantomConstAccessorGenerics> for Generics {
|
||||
fn from(value: &'a PhantomConstAccessorGenerics) -> Self {
|
||||
value.clone().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl PhantomConstAccessorGenerics {
|
||||
fn parse_opt(generics: Generics) -> Option<Self> {
|
||||
let Generics {
|
||||
lt_token,
|
||||
params,
|
||||
gt_token,
|
||||
where_clause: None,
|
||||
} = generics
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
let mut params = params.into_pairs();
|
||||
let (generic_param, comma_token) = params.next()?.into_tuple();
|
||||
let type_param = PhantomConstAccessorTypeParam::parse_opt(generic_param)?;
|
||||
let span = type_param.ident.span();
|
||||
let lt_token = lt_token.unwrap_or(Token);
|
||||
let gt_token = gt_token.unwrap_or(Token);
|
||||
let None = params.next() else {
|
||||
return None;
|
||||
};
|
||||
Some(Self {
|
||||
lt_token,
|
||||
type_param,
|
||||
comma_token,
|
||||
gt_token,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) enum ParsedTypeAlias {
|
||||
TypeAlias {
|
||||
attrs: Vec<Attribute>,
|
||||
options: HdlAttr<ItemOptions, kw::hdl>,
|
||||
vis: Visibility,
|
||||
type_token: Token![type],
|
||||
ident: Ident,
|
||||
generics: MaybeParsed<ParsedGenerics, Generics>,
|
||||
eq_token: Token![=],
|
||||
ty: MaybeParsed<ParsedType, Type>,
|
||||
semi_token: Token![;],
|
||||
},
|
||||
PhantomConstAccessor {
|
||||
attrs: Vec<Attribute>,
|
||||
options: HdlAttr<ItemOptions, kw::hdl>,
|
||||
get: (kw::get, Paren, Expr),
|
||||
vis: Visibility,
|
||||
type_token: Token![type],
|
||||
ident: Ident,
|
||||
generics: PhantomConstAccessorGenerics,
|
||||
eq_token: Token![=],
|
||||
ty: Type,
|
||||
ty_is_dyn_size: Option<known_items::DynSize>,
|
||||
semi_token: Token![;],
|
||||
},
|
||||
}
|
||||
|
||||
impl ParsedTypeAlias {
|
||||
fn ty_is_dyn_size(ty: &Type) -> Option<known_items::DynSize> {
|
||||
match ty {
|
||||
Type::Group(TypeGroup {
|
||||
group_token: _,
|
||||
elem,
|
||||
}) => Self::ty_is_dyn_size(elem),
|
||||
Type::Paren(TypeParen {
|
||||
paren_token: _,
|
||||
elem,
|
||||
}) => Self::ty_is_dyn_size(elem),
|
||||
Type::Path(syn::TypePath { qself: None, path }) => {
|
||||
known_items::DynSize::parse_path(path.clone()).ok()
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
fn parse_phantom_const_accessor(
|
||||
item: ItemType,
|
||||
mut errors: Errors,
|
||||
options: HdlAttr<ItemOptions, kw::hdl>,
|
||||
get: (kw::get, Paren, Expr),
|
||||
) -> syn::Result<Self> {
|
||||
let ItemType {
|
||||
attrs,
|
||||
vis,
|
||||
type_token,
|
||||
ident,
|
||||
generics,
|
||||
eq_token,
|
||||
ty,
|
||||
semi_token,
|
||||
} = item;
|
||||
let ItemOptions {
|
||||
outline_generated: _,
|
||||
ref target,
|
||||
custom_bounds,
|
||||
no_static,
|
||||
no_runtime_generics,
|
||||
cmp_eq,
|
||||
get: _,
|
||||
} = options.body;
|
||||
if let Some((no_static,)) = no_static {
|
||||
errors.error(no_static, "no_static is not valid on type aliases");
|
||||
}
|
||||
if let Some((target, ..)) = target {
|
||||
errors.error(
|
||||
target,
|
||||
"target is not implemented on PhantomConstGet type aliases",
|
||||
);
|
||||
}
|
||||
if let Some((no_runtime_generics,)) = no_runtime_generics {
|
||||
errors.error(
|
||||
no_runtime_generics,
|
||||
"no_runtime_generics is not implemented on PhantomConstGet type aliases",
|
||||
);
|
||||
}
|
||||
if let Some((cmp_eq,)) = cmp_eq {
|
||||
errors.error(cmp_eq, "cmp_eq is not valid on type aliases");
|
||||
}
|
||||
if let Some((custom_bounds,)) = custom_bounds {
|
||||
errors.error(
|
||||
custom_bounds,
|
||||
"custom_bounds is not implemented on PhantomConstGet type aliases",
|
||||
);
|
||||
}
|
||||
let Some(generics) = PhantomConstAccessorGenerics::parse_opt(generics) else {
|
||||
errors.error(ident, "#[hdl(get(...))] type alias must be of the form:\ntype MyTypeGetter<P: PhantomConstGet<MyType>> = RetType;");
|
||||
errors.finish()?;
|
||||
unreachable!();
|
||||
};
|
||||
errors.finish()?;
|
||||
let ty_is_dyn_size = Self::ty_is_dyn_size(&ty);
|
||||
Ok(Self::PhantomConstAccessor {
|
||||
attrs,
|
||||
options,
|
||||
get,
|
||||
vis,
|
||||
type_token,
|
||||
ident,
|
||||
generics,
|
||||
eq_token,
|
||||
ty: *ty,
|
||||
ty_is_dyn_size,
|
||||
semi_token,
|
||||
})
|
||||
}
|
||||
fn parse(item: ItemType) -> syn::Result<Self> {
|
||||
let ItemType {
|
||||
mut attrs,
|
||||
|
|
@ -49,10 +375,32 @@ impl ParsedTypeAlias {
|
|||
custom_bounds,
|
||||
no_static,
|
||||
no_runtime_generics: _,
|
||||
cmp_eq,
|
||||
ref mut get,
|
||||
} = options.body;
|
||||
if let Some(get) = get.take() {
|
||||
return Self::parse_phantom_const_accessor(
|
||||
ItemType {
|
||||
attrs,
|
||||
vis,
|
||||
type_token,
|
||||
ident,
|
||||
generics,
|
||||
eq_token,
|
||||
ty,
|
||||
semi_token,
|
||||
},
|
||||
errors,
|
||||
options,
|
||||
get,
|
||||
);
|
||||
}
|
||||
if let Some((no_static,)) = no_static {
|
||||
errors.error(no_static, "no_static is not valid on type aliases");
|
||||
}
|
||||
if let Some((cmp_eq,)) = cmp_eq {
|
||||
errors.error(cmp_eq, "cmp_eq is not valid on type aliases");
|
||||
}
|
||||
let generics = if custom_bounds.is_some() {
|
||||
MaybeParsed::Unrecognized(generics)
|
||||
} else if let Some(generics) = errors.ok(ParsedGenerics::parse(&mut generics)) {
|
||||
|
|
@ -62,7 +410,7 @@ impl ParsedTypeAlias {
|
|||
};
|
||||
let ty = TypesParser::maybe_run(generics.as_ref(), *ty, &mut errors);
|
||||
errors.finish()?;
|
||||
Ok(Self {
|
||||
Ok(Self::TypeAlias {
|
||||
attrs,
|
||||
options,
|
||||
vis,
|
||||
|
|
@ -78,53 +426,155 @@ impl ParsedTypeAlias {
|
|||
|
||||
impl ToTokens for ParsedTypeAlias {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
attrs,
|
||||
options,
|
||||
vis,
|
||||
type_token,
|
||||
ident,
|
||||
generics,
|
||||
eq_token,
|
||||
ty,
|
||||
semi_token,
|
||||
} = self;
|
||||
let ItemOptions {
|
||||
outline_generated: _,
|
||||
target,
|
||||
custom_bounds: _,
|
||||
no_static: _,
|
||||
no_runtime_generics,
|
||||
} = &options.body;
|
||||
let target = get_target(target, ident);
|
||||
let mut type_attrs = attrs.clone();
|
||||
type_attrs.push(parse_quote_spanned! {ident.span()=>
|
||||
#[allow(type_alias_bounds)]
|
||||
});
|
||||
ItemType {
|
||||
attrs: type_attrs,
|
||||
vis: vis.clone(),
|
||||
type_token: *type_token,
|
||||
ident: ident.clone(),
|
||||
generics: generics.into(),
|
||||
eq_token: *eq_token,
|
||||
ty: Box::new(ty.clone().into()),
|
||||
semi_token: *semi_token,
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
if let (MaybeParsed::Parsed(generics), MaybeParsed::Parsed(ty), None) =
|
||||
(generics, ty, no_runtime_generics)
|
||||
{
|
||||
generics.make_runtime_generics(tokens, vis, ident, &target, |context| {
|
||||
ty.make_hdl_type_expr(context)
|
||||
})
|
||||
match self {
|
||||
Self::TypeAlias {
|
||||
attrs,
|
||||
options,
|
||||
vis,
|
||||
type_token,
|
||||
ident,
|
||||
generics,
|
||||
eq_token,
|
||||
ty,
|
||||
semi_token,
|
||||
} => {
|
||||
let ItemOptions {
|
||||
outline_generated: _,
|
||||
target,
|
||||
custom_bounds: _,
|
||||
no_static: _,
|
||||
no_runtime_generics,
|
||||
cmp_eq: _,
|
||||
get: _,
|
||||
} = &options.body;
|
||||
let target = get_target(target, ident);
|
||||
let mut type_attrs = attrs.clone();
|
||||
type_attrs.push(parse_quote_spanned! {ident.span()=>
|
||||
#[allow(type_alias_bounds)]
|
||||
});
|
||||
ItemType {
|
||||
attrs: type_attrs,
|
||||
vis: vis.clone(),
|
||||
type_token: *type_token,
|
||||
ident: ident.clone(),
|
||||
generics: generics.into(),
|
||||
eq_token: *eq_token,
|
||||
ty: Box::new(ty.clone().into()),
|
||||
semi_token: *semi_token,
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
if let (MaybeParsed::Parsed(generics), MaybeParsed::Parsed(ty), None) =
|
||||
(generics, ty, no_runtime_generics)
|
||||
{
|
||||
generics.make_runtime_generics(tokens, vis, ident, &target, |context| {
|
||||
ty.make_hdl_type_expr(context)
|
||||
})
|
||||
}
|
||||
}
|
||||
Self::PhantomConstAccessor {
|
||||
attrs,
|
||||
options,
|
||||
get: (_get_kw, _get_paren, get_expr),
|
||||
vis,
|
||||
type_token,
|
||||
ident,
|
||||
generics,
|
||||
eq_token,
|
||||
ty,
|
||||
ty_is_dyn_size,
|
||||
semi_token,
|
||||
} => {
|
||||
let ItemOptions {
|
||||
outline_generated: _,
|
||||
target: _,
|
||||
custom_bounds: _,
|
||||
no_static: _,
|
||||
no_runtime_generics: _,
|
||||
cmp_eq: _,
|
||||
get: _,
|
||||
} = &options.body;
|
||||
let span = ident.span();
|
||||
let mut type_attrs = attrs.clone();
|
||||
type_attrs.push(parse_quote_spanned! {span=>
|
||||
#[allow(type_alias_bounds)]
|
||||
});
|
||||
let type_param_ident = &generics.type_param.ident;
|
||||
let syn_generics = Generics::from(generics);
|
||||
ItemType {
|
||||
attrs: type_attrs,
|
||||
vis: vis.clone(),
|
||||
type_token: *type_token,
|
||||
ident: ident.clone(),
|
||||
generics: syn_generics.clone(),
|
||||
eq_token: *eq_token,
|
||||
ty: parse_quote_spanned! {span=>
|
||||
<#ty as ::fayalite::phantom_const::ReturnSelfUnchanged<#type_param_ident>>::Type
|
||||
},
|
||||
semi_token: *semi_token,
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let generics_accumulation_ident =
|
||||
format_ident!("__{}__GenericsAccumulation", ident);
|
||||
ItemStruct {
|
||||
attrs: vec![
|
||||
common_derives(span),
|
||||
parse_quote_spanned! {span=>
|
||||
#[allow(non_camel_case_types)]
|
||||
},
|
||||
],
|
||||
vis: vis.clone(),
|
||||
struct_token: Token,
|
||||
ident: generics_accumulation_ident.clone(),
|
||||
generics: Generics::default(),
|
||||
fields: Fields::Unnamed(parse_quote_spanned! {span=>
|
||||
(())
|
||||
}),
|
||||
semi_token: Some(Token),
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
quote_spanned! {span=>
|
||||
#[allow(non_upper_case_globals, dead_code)]
|
||||
#vis const #ident: #generics_accumulation_ident = #generics_accumulation_ident(());
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let mut wrapped_in_const = WrappedInConst::new(tokens, span);
|
||||
let tokens = wrapped_in_const.inner();
|
||||
let (impl_generics, _type_generics, where_clause) = syn_generics.split_for_impl();
|
||||
let phantom_const_get_ty = &generics.type_param.phantom_const_get_bound.ty;
|
||||
let index_output = if let Some(ty_is_dyn_size) = ty_is_dyn_size {
|
||||
known_items::usize(ty_is_dyn_size.span).to_token_stream()
|
||||
} else {
|
||||
ty.to_token_stream()
|
||||
};
|
||||
quote_spanned! {span=>
|
||||
#[allow(non_upper_case_globals)]
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::__std::ops::Index<#type_param_ident>
|
||||
for #generics_accumulation_ident
|
||||
#where_clause
|
||||
{
|
||||
type Output = #index_output;
|
||||
|
||||
fn index(&self, __param: #type_param_ident) -> &Self::Output {
|
||||
::fayalite::phantom_const::type_alias_phantom_const_get_helper::<#phantom_const_get_ty, #index_output>(
|
||||
__param,
|
||||
#get_expr,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn hdl_type_alias_impl(item: ItemType) -> syn::Result<TokenStream> {
|
||||
let item = ParsedTypeAlias::parse(item)?;
|
||||
let outline_generated = item.options.body.outline_generated;
|
||||
let outline_generated = match &item {
|
||||
ParsedTypeAlias::TypeAlias { options, .. }
|
||||
| ParsedTypeAlias::PhantomConstAccessor { options, .. } => options.body.outline_generated,
|
||||
};
|
||||
let mut contents = item.to_token_stream();
|
||||
if outline_generated.is_some() {
|
||||
contents = crate::outline_generated(contents, "hdl-type-alias-");
|
||||
|
|
|
|||
|
|
@ -1,21 +1,21 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{fold::impl_fold, kw, Errors, HdlAttr, PairsIterExt};
|
||||
use crate::{Errors, HdlAttr, PairsIterExt, fold::impl_fold, kw};
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{format_ident, quote_spanned, ToTokens};
|
||||
use quote::{ToTokens, format_ident, quote_spanned};
|
||||
use std::{collections::HashMap, fmt, mem};
|
||||
use syn::{
|
||||
parse::{Parse, ParseStream},
|
||||
parse_quote, parse_quote_spanned,
|
||||
punctuated::{Pair, Punctuated},
|
||||
spanned::Spanned,
|
||||
token::{Brace, Bracket, Paren},
|
||||
AngleBracketedGenericArguments, Attribute, Block, ConstParam, Expr, ExprBlock, ExprGroup,
|
||||
ExprIndex, ExprParen, ExprPath, ExprTuple, Field, FieldMutability, Fields, FieldsNamed,
|
||||
FieldsUnnamed, GenericArgument, GenericParam, Generics, Ident, ImplGenerics, Index, ItemStruct,
|
||||
Path, PathArguments, PathSegment, PredicateType, QSelf, Stmt, Token, Turbofish, Type,
|
||||
TypeGenerics, TypeGroup, TypeParam, TypeParen, TypePath, TypeTuple, Visibility, WhereClause,
|
||||
WherePredicate,
|
||||
parse::{Parse, ParseStream},
|
||||
parse_quote, parse_quote_spanned,
|
||||
punctuated::{Pair, Punctuated},
|
||||
spanned::Spanned,
|
||||
token::{Brace, Bracket, Paren},
|
||||
};
|
||||
|
||||
crate::options! {
|
||||
|
|
@ -26,6 +26,8 @@ crate::options! {
|
|||
CustomBounds(custom_bounds),
|
||||
NoStatic(no_static),
|
||||
NoRuntimeGenerics(no_runtime_generics),
|
||||
CmpEq(cmp_eq),
|
||||
Get(get, Expr),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -298,7 +300,7 @@ impl ParseTypes<Expr> for ParsedExpr {
|
|||
return Ok(ParsedExpr::Delimited(ParsedExprDelimited {
|
||||
delim: ExprDelimiter::Group(*group_token),
|
||||
expr: parser.parse(expr)?,
|
||||
}))
|
||||
}));
|
||||
}
|
||||
Expr::Paren(ExprParen {
|
||||
attrs,
|
||||
|
|
@ -308,7 +310,7 @@ impl ParseTypes<Expr> for ParsedExpr {
|
|||
return Ok(ParsedExpr::Delimited(ParsedExprDelimited {
|
||||
delim: ExprDelimiter::Paren(*paren_token),
|
||||
expr: parser.parse(expr)?,
|
||||
}))
|
||||
}));
|
||||
}
|
||||
Expr::Path(ExprPath {
|
||||
attrs,
|
||||
|
|
@ -1901,8 +1903,8 @@ pub(crate) mod known_items {
|
|||
use proc_macro2::{Ident, Span, TokenStream};
|
||||
use quote::ToTokens;
|
||||
use syn::{
|
||||
parse::{Parse, ParseStream},
|
||||
Path, PathArguments, PathSegment, Token,
|
||||
parse::{Parse, ParseStream},
|
||||
};
|
||||
|
||||
macro_rules! impl_known_item_body {
|
||||
|
|
@ -2044,6 +2046,8 @@ pub(crate) mod known_items {
|
|||
impl_known_item!(::fayalite::int::Size);
|
||||
impl_known_item!(::fayalite::int::UInt);
|
||||
impl_known_item!(::fayalite::int::UIntType);
|
||||
impl_known_item!(::fayalite::phantom_const::PhantomConstGet);
|
||||
impl_known_item!(::fayalite::reset::ResetType);
|
||||
impl_known_item!(::fayalite::ty::CanonicalType);
|
||||
impl_known_item!(::fayalite::ty::StaticType);
|
||||
impl_known_item!(::fayalite::ty::Type);
|
||||
|
|
@ -2068,11 +2072,16 @@ macro_rules! impl_bounds {
|
|||
$(
|
||||
$Variant:ident,
|
||||
)*
|
||||
$(
|
||||
#[unknown]
|
||||
$Unknown:ident,
|
||||
)?
|
||||
}
|
||||
) => {
|
||||
#[derive(Clone, Debug)]
|
||||
$vis enum $enum_type {
|
||||
$($Variant(known_items::$Variant),)*
|
||||
$($Unknown(syn::TypeParamBound),)?
|
||||
}
|
||||
|
||||
$(impl From<known_items::$Variant> for $enum_type {
|
||||
|
|
@ -2085,28 +2094,54 @@ macro_rules! impl_bounds {
|
|||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
$(Self::$Variant(v) => v.to_tokens(tokens),)*
|
||||
$(Self::$Unknown(v) => v.to_tokens(tokens),)?
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl $enum_type {
|
||||
$vis fn parse_path(path: Path) -> Result<Self, Path> {
|
||||
#![allow(unreachable_code)]
|
||||
$(let path = match known_items::$Variant::parse_path(path) {
|
||||
Ok(v) => return Ok(Self::$Variant(v)),
|
||||
Err(path) => path,
|
||||
};)*
|
||||
$(return Ok(Self::$Unknown(syn::TraitBound {
|
||||
paren_token: None,
|
||||
modifier: syn::TraitBoundModifier::None,
|
||||
lifetimes: None,
|
||||
path,
|
||||
}.into()));)?
|
||||
Err(path)
|
||||
}
|
||||
$vis fn parse_type_param_bound(mut type_param_bound: syn::TypeParamBound) -> Result<Self, syn::TypeParamBound> {
|
||||
#![allow(unreachable_code)]
|
||||
if let syn::TypeParamBound::Trait(mut trait_bound) = type_param_bound {
|
||||
if let syn::TraitBound {
|
||||
paren_token: _,
|
||||
modifier: syn::TraitBoundModifier::None,
|
||||
lifetimes: None,
|
||||
path: _,
|
||||
} = trait_bound {
|
||||
match Self::parse_path(trait_bound.path) {
|
||||
Ok(retval) => return Ok(retval),
|
||||
Err(path) => trait_bound.path = path,
|
||||
}
|
||||
}
|
||||
type_param_bound = trait_bound.into();
|
||||
}
|
||||
$(return Ok(Self::$Unknown(type_param_bound));)?
|
||||
Err(type_param_bound)
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for $enum_type {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
Self::parse_path(Path::parse_mod_style(input)?).map_err(|path| {
|
||||
syn::Error::new_spanned(
|
||||
path,
|
||||
Self::parse_type_param_bound(input.parse()?)
|
||||
.map_err(|type_param_bound| syn::Error::new_spanned(
|
||||
type_param_bound,
|
||||
format_args!("expected one of: {}", [$(stringify!($Variant)),*].join(", ")),
|
||||
)
|
||||
})
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -2114,6 +2149,7 @@ macro_rules! impl_bounds {
|
|||
#[allow(non_snake_case)]
|
||||
$vis struct $struct_type {
|
||||
$($vis $Variant: Option<known_items::$Variant>,)*
|
||||
$($vis $Unknown: Vec<syn::TypeParamBound>,)?
|
||||
}
|
||||
|
||||
impl ToTokens for $struct_type {
|
||||
|
|
@ -2125,42 +2161,63 @@ macro_rules! impl_bounds {
|
|||
separator = Some(<Token![+]>::default());
|
||||
v.to_tokens(tokens);
|
||||
})*
|
||||
$(for v in &self.$Unknown {
|
||||
separator.to_tokens(tokens);
|
||||
separator = Some(<Token![+]>::default());
|
||||
v.to_tokens(tokens);
|
||||
})*
|
||||
}
|
||||
}
|
||||
|
||||
const _: () = {
|
||||
#[derive(Clone, Debug)]
|
||||
$vis struct Iter($vis $struct_type);
|
||||
#[allow(non_snake_case)]
|
||||
$vis struct Iter {
|
||||
$($Variant: Option<known_items::$Variant>,)*
|
||||
$($Unknown: std::vec::IntoIter<syn::TypeParamBound>,)?
|
||||
}
|
||||
|
||||
impl IntoIterator for $struct_type {
|
||||
type Item = $enum_type;
|
||||
type IntoIter = Iter;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
Iter(self)
|
||||
Iter {
|
||||
$($Variant: self.$Variant,)*
|
||||
$($Unknown: self.$Unknown.into_iter(),)?
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for Iter {
|
||||
type Item = $enum_type;
|
||||
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
$(
|
||||
if let Some(value) = self.0.$Variant.take() {
|
||||
if let Some(value) = self.$Variant.take() {
|
||||
return Some($enum_type::$Variant(value));
|
||||
}
|
||||
)*
|
||||
$(
|
||||
if let Some(value) = self.$Unknown.next() {
|
||||
return Some($enum_type::$Unknown(value));
|
||||
}
|
||||
)?
|
||||
None
|
||||
}
|
||||
|
||||
#[allow(unused_mut, unused_variables)]
|
||||
fn fold<B, F: FnMut(B, Self::Item) -> B>(mut self, mut init: B, mut f: F) -> B {
|
||||
$(
|
||||
if let Some(value) = self.0.$Variant.take() {
|
||||
if let Some(value) = self.$Variant.take() {
|
||||
init = f(init, $enum_type::$Variant(value));
|
||||
}
|
||||
)*
|
||||
$(
|
||||
if let Some(value) = self.$Unknown.next() {
|
||||
init = f(init, $enum_type::$Unknown(value));
|
||||
}
|
||||
)?
|
||||
init
|
||||
}
|
||||
}
|
||||
|
|
@ -2172,6 +2229,9 @@ macro_rules! impl_bounds {
|
|||
$($enum_type::$Variant(v) => {
|
||||
self.$Variant = Some(v);
|
||||
})*
|
||||
$($enum_type::$Unknown(v) => {
|
||||
self.$Unknown.push(v);
|
||||
})?
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
@ -2190,6 +2250,7 @@ macro_rules! impl_bounds {
|
|||
$(if let Some(v) = v.$Variant {
|
||||
self.$Variant = Some(v);
|
||||
})*
|
||||
$(self.$Unknown.extend(v.$Unknown);)*
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
@ -2239,9 +2300,12 @@ impl_bounds! {
|
|||
EnumType,
|
||||
IntType,
|
||||
KnownSize,
|
||||
ResetType,
|
||||
Size,
|
||||
StaticType,
|
||||
Type,
|
||||
#[unknown]
|
||||
Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -2252,8 +2316,11 @@ impl_bounds! {
|
|||
BundleType,
|
||||
EnumType,
|
||||
IntType,
|
||||
ResetType,
|
||||
StaticType,
|
||||
Type,
|
||||
#[unknown]
|
||||
Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -2264,8 +2331,10 @@ impl From<ParsedTypeBound> for ParsedBound {
|
|||
ParsedTypeBound::BundleType(v) => ParsedBound::BundleType(v),
|
||||
ParsedTypeBound::EnumType(v) => ParsedBound::EnumType(v),
|
||||
ParsedTypeBound::IntType(v) => ParsedBound::IntType(v),
|
||||
ParsedTypeBound::ResetType(v) => ParsedBound::ResetType(v),
|
||||
ParsedTypeBound::StaticType(v) => ParsedBound::StaticType(v),
|
||||
ParsedTypeBound::Type(v) => ParsedBound::Type(v),
|
||||
ParsedTypeBound::Unknown(v) => ParsedBound::Unknown(v),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -2277,8 +2346,10 @@ impl From<ParsedTypeBounds> for ParsedBounds {
|
|||
BundleType,
|
||||
EnumType,
|
||||
IntType,
|
||||
ResetType,
|
||||
StaticType,
|
||||
Type,
|
||||
Unknown,
|
||||
} = value;
|
||||
Self {
|
||||
BoolOrIntType,
|
||||
|
|
@ -2286,9 +2357,11 @@ impl From<ParsedTypeBounds> for ParsedBounds {
|
|||
EnumType,
|
||||
IntType,
|
||||
KnownSize: None,
|
||||
ResetType,
|
||||
Size: None,
|
||||
StaticType,
|
||||
Type,
|
||||
Unknown,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -2314,11 +2387,17 @@ impl ParsedTypeBound {
|
|||
ParsedTypeBound::BoolOrIntType(known_items::BoolOrIntType(span)),
|
||||
ParsedTypeBound::Type(known_items::Type(span)),
|
||||
]),
|
||||
Self::ResetType(v) => ParsedTypeBounds::from_iter([
|
||||
ParsedTypeBound::from(v),
|
||||
ParsedTypeBound::StaticType(known_items::StaticType(span)),
|
||||
ParsedTypeBound::Type(known_items::Type(span)),
|
||||
]),
|
||||
Self::StaticType(v) => ParsedTypeBounds::from_iter([
|
||||
ParsedTypeBound::from(v),
|
||||
ParsedTypeBound::Type(known_items::Type(span)),
|
||||
]),
|
||||
Self::Type(v) => ParsedTypeBounds::from_iter([ParsedTypeBound::from(v)]),
|
||||
Self::Unknown(v) => ParsedTypeBounds::from_iter([ParsedTypeBound::Unknown(v)]),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -2349,9 +2428,11 @@ impl From<ParsedSizeTypeBounds> for ParsedBounds {
|
|||
EnumType: None,
|
||||
IntType: None,
|
||||
KnownSize,
|
||||
ResetType: None,
|
||||
Size,
|
||||
StaticType: None,
|
||||
Type: None,
|
||||
Unknown: vec![],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -2379,6 +2460,7 @@ impl ParsedBounds {
|
|||
fn categorize(self, errors: &mut Errors, span: Span) -> ParsedBoundsCategory {
|
||||
let mut type_bounds = None;
|
||||
let mut size_type_bounds = None;
|
||||
let mut unknown_bounds = vec![];
|
||||
self.into_iter().for_each(|bound| match bound.categorize() {
|
||||
ParsedBoundCategory::Type(bound) => {
|
||||
type_bounds
|
||||
|
|
@ -2390,15 +2472,37 @@ impl ParsedBounds {
|
|||
.get_or_insert_with(ParsedSizeTypeBounds::default)
|
||||
.extend([bound]);
|
||||
}
|
||||
ParsedBoundCategory::Unknown(bound) => unknown_bounds.push(bound),
|
||||
});
|
||||
match (type_bounds, size_type_bounds) {
|
||||
(None, None) => ParsedBoundsCategory::Type(ParsedTypeBounds {
|
||||
match (type_bounds, size_type_bounds, unknown_bounds.is_empty()) {
|
||||
(None, None, true) => ParsedBoundsCategory::Type(ParsedTypeBounds {
|
||||
Type: Some(known_items::Type(span)),
|
||||
..Default::default()
|
||||
}),
|
||||
(None, Some(bounds)) => ParsedBoundsCategory::SizeType(bounds),
|
||||
(Some(bounds), None) => ParsedBoundsCategory::Type(bounds),
|
||||
(Some(type_bounds), Some(size_type_bounds)) => {
|
||||
(None, None, false) => {
|
||||
errors.error(
|
||||
unknown_bounds.remove(0),
|
||||
"unknown bounds: must use at least one known bound (such as `Type`) with any unknown bounds",
|
||||
);
|
||||
ParsedBoundsCategory::Type(ParsedTypeBounds {
|
||||
Unknown: unknown_bounds,
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
(None, Some(bounds), true) => ParsedBoundsCategory::SizeType(bounds),
|
||||
(None, Some(bounds), false) => {
|
||||
// TODO: implement
|
||||
errors.error(
|
||||
unknown_bounds.remove(0),
|
||||
"unknown bounds with `Size` bounds are not implemented",
|
||||
);
|
||||
ParsedBoundsCategory::SizeType(bounds)
|
||||
}
|
||||
(Some(bounds), None, _) => ParsedBoundsCategory::Type(ParsedTypeBounds {
|
||||
Unknown: unknown_bounds,
|
||||
..bounds
|
||||
}),
|
||||
(Some(type_bounds), Some(size_type_bounds), _) => {
|
||||
errors.error(
|
||||
size_type_bounds
|
||||
.Size
|
||||
|
|
@ -2415,6 +2519,7 @@ impl ParsedBounds {
|
|||
pub(crate) enum ParsedBoundCategory {
|
||||
Type(ParsedTypeBound),
|
||||
SizeType(ParsedSizeTypeBound),
|
||||
Unknown(syn::TypeParamBound),
|
||||
}
|
||||
|
||||
impl ParsedBound {
|
||||
|
|
@ -2425,15 +2530,18 @@ impl ParsedBound {
|
|||
Self::EnumType(v) => ParsedBoundCategory::Type(ParsedTypeBound::EnumType(v)),
|
||||
Self::IntType(v) => ParsedBoundCategory::Type(ParsedTypeBound::IntType(v)),
|
||||
Self::KnownSize(v) => ParsedBoundCategory::SizeType(ParsedSizeTypeBound::KnownSize(v)),
|
||||
Self::ResetType(v) => ParsedBoundCategory::Type(ParsedTypeBound::ResetType(v)),
|
||||
Self::Size(v) => ParsedBoundCategory::SizeType(ParsedSizeTypeBound::Size(v)),
|
||||
Self::StaticType(v) => ParsedBoundCategory::Type(ParsedTypeBound::StaticType(v)),
|
||||
Self::Type(v) => ParsedBoundCategory::Type(ParsedTypeBound::Type(v)),
|
||||
Self::Unknown(v) => ParsedBoundCategory::Unknown(v),
|
||||
}
|
||||
}
|
||||
fn implied_bounds(self) -> ParsedBounds {
|
||||
match self.categorize() {
|
||||
ParsedBoundCategory::Type(v) => v.implied_bounds().into(),
|
||||
ParsedBoundCategory::SizeType(v) => v.implied_bounds().into(),
|
||||
ParsedBoundCategory::Unknown(v) => ParsedBounds::from_iter([ParsedBound::Unknown(v)]),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -3310,8 +3418,9 @@ impl ParsedGenerics {
|
|||
ParsedTypeBound::BoolOrIntType(_)
|
||||
| ParsedTypeBound::BundleType(_)
|
||||
| ParsedTypeBound::EnumType(_)
|
||||
| ParsedTypeBound::IntType(_) => {
|
||||
errors.error(bound, "bound on mask type not implemented");
|
||||
| ParsedTypeBound::IntType(_)
|
||||
| ParsedTypeBound::ResetType(_) => {
|
||||
errors.error(bound, "bounds on mask types are not implemented");
|
||||
}
|
||||
ParsedTypeBound::StaticType(bound) => {
|
||||
if bounds.StaticType.is_none() {
|
||||
|
|
@ -3323,6 +3432,12 @@ impl ParsedGenerics {
|
|||
}
|
||||
}
|
||||
ParsedTypeBound::Type(_) => {}
|
||||
ParsedTypeBound::Unknown(_) => {
|
||||
errors.error(
|
||||
bound,
|
||||
"unknown bounds on mask types are not implemented",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
bounds.add_implied_bounds();
|
||||
|
|
@ -3648,7 +3763,10 @@ pub(crate) trait AsTurbofish {
|
|||
}
|
||||
|
||||
impl AsTurbofish for TypeGenerics<'_> {
|
||||
type Turbofish<'a> = Turbofish<'a> where Self: 'a;
|
||||
type Turbofish<'a>
|
||||
= Turbofish<'a>
|
||||
where
|
||||
Self: 'a;
|
||||
|
||||
fn as_turbofish(&self) -> Self::Turbofish<'_> {
|
||||
TypeGenerics::as_turbofish(self)
|
||||
|
|
@ -3656,7 +3774,8 @@ impl AsTurbofish for TypeGenerics<'_> {
|
|||
}
|
||||
|
||||
impl AsTurbofish for ParsedGenericsTypeGenerics<'_> {
|
||||
type Turbofish<'a> = ParsedGenericsTurbofish<'a>
|
||||
type Turbofish<'a>
|
||||
= ParsedGenericsTurbofish<'a>
|
||||
where
|
||||
Self: 'a;
|
||||
|
||||
|
|
@ -3707,15 +3826,18 @@ impl SplitForImpl for Generics {
|
|||
}
|
||||
|
||||
impl SplitForImpl for ParsedGenerics {
|
||||
type ImplGenerics<'a> = ParsedGenericsImplGenerics<'a>
|
||||
type ImplGenerics<'a>
|
||||
= ParsedGenericsImplGenerics<'a>
|
||||
where
|
||||
Self: 'a;
|
||||
|
||||
type TypeGenerics<'a> = ParsedGenericsTypeGenerics<'a>
|
||||
type TypeGenerics<'a>
|
||||
= ParsedGenericsTypeGenerics<'a>
|
||||
where
|
||||
Self: 'a;
|
||||
|
||||
type WhereClause<'a> = ParsedGenericsWhereClause<'a>
|
||||
type WhereClause<'a>
|
||||
= ParsedGenericsWhereClause<'a>
|
||||
where
|
||||
Self: 'a;
|
||||
|
||||
|
|
@ -3932,7 +4054,8 @@ impl<P: ToTokens, U: ToTokens> ToTokens for MaybeParsed<P, U> {
|
|||
}
|
||||
|
||||
impl<P: AsTurbofish, U: AsTurbofish> AsTurbofish for MaybeParsed<P, U> {
|
||||
type Turbofish<'a> = MaybeParsed<P::Turbofish<'a>, U::Turbofish<'a>>
|
||||
type Turbofish<'a>
|
||||
= MaybeParsed<P::Turbofish<'a>, U::Turbofish<'a>>
|
||||
where
|
||||
Self: 'a;
|
||||
|
||||
|
|
@ -3945,13 +4068,16 @@ impl<P: AsTurbofish, U: AsTurbofish> AsTurbofish for MaybeParsed<P, U> {
|
|||
}
|
||||
|
||||
impl<P: SplitForImpl, U: SplitForImpl> SplitForImpl for MaybeParsed<P, U> {
|
||||
type ImplGenerics<'a> = MaybeParsed<P::ImplGenerics<'a>, U::ImplGenerics<'a>>
|
||||
type ImplGenerics<'a>
|
||||
= MaybeParsed<P::ImplGenerics<'a>, U::ImplGenerics<'a>>
|
||||
where
|
||||
Self: 'a;
|
||||
type TypeGenerics<'a> = MaybeParsed<P::TypeGenerics<'a>, U::TypeGenerics<'a>>
|
||||
type TypeGenerics<'a>
|
||||
= MaybeParsed<P::TypeGenerics<'a>, U::TypeGenerics<'a>>
|
||||
where
|
||||
Self: 'a;
|
||||
type WhereClause<'a> = MaybeParsed<P::WhereClause<'a>, U::WhereClause<'a>>
|
||||
type WhereClause<'a>
|
||||
= MaybeParsed<P::WhereClause<'a>, U::WhereClause<'a>>
|
||||
where
|
||||
Self: 'a;
|
||||
|
||||
|
|
|
|||
|
|
@ -2,15 +2,20 @@
|
|||
// See Notices.txt for copyright information
|
||||
#![cfg_attr(test, recursion_limit = "512")]
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{quote, ToTokens};
|
||||
use std::io::{ErrorKind, Write};
|
||||
use quote::{ToTokens, quote};
|
||||
use std::{
|
||||
collections::{HashMap, hash_map::Entry},
|
||||
io::{ErrorKind, Write},
|
||||
};
|
||||
use syn::{
|
||||
bracketed, parenthesized,
|
||||
AttrStyle, Attribute, Error, Ident, Item, ItemFn, LitBool, LitStr, Meta, Token, bracketed,
|
||||
ext::IdentExt,
|
||||
parenthesized,
|
||||
parse::{Parse, ParseStream, Parser},
|
||||
parse_quote,
|
||||
punctuated::Pair,
|
||||
punctuated::{Pair, Punctuated},
|
||||
spanned::Spanned,
|
||||
AttrStyle, Attribute, Error, Item, ItemFn, Token,
|
||||
token::{Bracket, Paren},
|
||||
};
|
||||
|
||||
mod fold;
|
||||
|
|
@ -19,6 +24,7 @@ mod hdl_enum;
|
|||
mod hdl_type_alias;
|
||||
mod hdl_type_common;
|
||||
mod module;
|
||||
mod process_cfg;
|
||||
|
||||
pub(crate) trait CustomToken:
|
||||
Copy
|
||||
|
|
@ -59,14 +65,22 @@ mod kw {
|
|||
};
|
||||
}
|
||||
|
||||
custom_keyword!(__evaluated_cfgs);
|
||||
custom_keyword!(add_platform_io);
|
||||
custom_keyword!(all);
|
||||
custom_keyword!(any);
|
||||
custom_keyword!(cfg);
|
||||
custom_keyword!(cfg_attr);
|
||||
custom_keyword!(clock_domain);
|
||||
custom_keyword!(cmp_eq);
|
||||
custom_keyword!(connect_inexact);
|
||||
custom_keyword!(custom_bounds);
|
||||
custom_keyword!(flip);
|
||||
custom_keyword!(get);
|
||||
custom_keyword!(hdl);
|
||||
custom_keyword!(hdl_module);
|
||||
custom_keyword!(input);
|
||||
custom_keyword!(incomplete_wire);
|
||||
custom_keyword!(input);
|
||||
custom_keyword!(instance);
|
||||
custom_keyword!(m);
|
||||
custom_keyword!(memory);
|
||||
|
|
@ -75,10 +89,12 @@ mod kw {
|
|||
custom_keyword!(no_reset);
|
||||
custom_keyword!(no_runtime_generics);
|
||||
custom_keyword!(no_static);
|
||||
custom_keyword!(not);
|
||||
custom_keyword!(outline_generated);
|
||||
custom_keyword!(output);
|
||||
custom_keyword!(reg_builder);
|
||||
custom_keyword!(reset);
|
||||
custom_keyword!(sim);
|
||||
custom_keyword!(skip);
|
||||
custom_keyword!(target);
|
||||
custom_keyword!(wire);
|
||||
|
|
@ -901,15 +917,346 @@ fn hdl_module_impl(item: ItemFn) -> syn::Result<TokenStream> {
|
|||
Ok(contents)
|
||||
}
|
||||
|
||||
pub fn hdl_module(attr: TokenStream, item: TokenStream) -> syn::Result<TokenStream> {
|
||||
let kw = kw::hdl_module::default();
|
||||
hdl_module_impl(syn::parse2(quote! { #[#kw(#attr)] #item })?)
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub(crate) enum CfgExpr {
|
||||
Option {
|
||||
ident: Ident,
|
||||
value: Option<(Token![=], LitStr)>,
|
||||
},
|
||||
All {
|
||||
all: kw::all,
|
||||
paren: Paren,
|
||||
exprs: Punctuated<CfgExpr, Token![,]>,
|
||||
},
|
||||
Any {
|
||||
any: kw::any,
|
||||
paren: Paren,
|
||||
exprs: Punctuated<CfgExpr, Token![,]>,
|
||||
},
|
||||
Not {
|
||||
not: kw::not,
|
||||
paren: Paren,
|
||||
expr: Box<CfgExpr>,
|
||||
trailing_comma: Option<Token![,]>,
|
||||
},
|
||||
}
|
||||
|
||||
pub fn hdl_attr(attr: TokenStream, item: TokenStream) -> syn::Result<TokenStream> {
|
||||
let kw = kw::hdl::default();
|
||||
let item = quote! { #[#kw(#attr)] #item };
|
||||
let item = syn::parse2::<Item>(item)?;
|
||||
impl Parse for CfgExpr {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
match input.cursor().ident() {
|
||||
Some((_, cursor)) if cursor.eof() => {
|
||||
return Ok(CfgExpr::Option {
|
||||
ident: input.call(Ident::parse_any)?,
|
||||
value: None,
|
||||
});
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
if input.peek(Ident::peek_any) && input.peek2(Token![=]) {
|
||||
return Ok(CfgExpr::Option {
|
||||
ident: input.call(Ident::parse_any)?,
|
||||
value: Some((input.parse()?, input.parse()?)),
|
||||
});
|
||||
}
|
||||
let contents;
|
||||
if input.peek(kw::all) {
|
||||
Ok(CfgExpr::All {
|
||||
all: input.parse()?,
|
||||
paren: parenthesized!(contents in input),
|
||||
exprs: contents.call(Punctuated::parse_terminated)?,
|
||||
})
|
||||
} else if input.peek(kw::any) {
|
||||
Ok(CfgExpr::Any {
|
||||
any: input.parse()?,
|
||||
paren: parenthesized!(contents in input),
|
||||
exprs: contents.call(Punctuated::parse_terminated)?,
|
||||
})
|
||||
} else if input.peek(kw::not) {
|
||||
Ok(CfgExpr::Not {
|
||||
not: input.parse()?,
|
||||
paren: parenthesized!(contents in input),
|
||||
expr: contents.parse()?,
|
||||
trailing_comma: contents.parse()?,
|
||||
})
|
||||
} else {
|
||||
Err(input.error("expected cfg-pattern"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for CfgExpr {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
CfgExpr::Option { ident, value } => {
|
||||
ident.to_tokens(tokens);
|
||||
if let Some((eq, value)) = value {
|
||||
eq.to_tokens(tokens);
|
||||
value.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
CfgExpr::All { all, paren, exprs } => {
|
||||
all.to_tokens(tokens);
|
||||
paren.surround(tokens, |tokens| exprs.to_tokens(tokens));
|
||||
}
|
||||
CfgExpr::Any { any, paren, exprs } => {
|
||||
any.to_tokens(tokens);
|
||||
paren.surround(tokens, |tokens| exprs.to_tokens(tokens));
|
||||
}
|
||||
CfgExpr::Not {
|
||||
not,
|
||||
paren,
|
||||
expr,
|
||||
trailing_comma,
|
||||
} => {
|
||||
not.to_tokens(tokens);
|
||||
paren.surround(tokens, |tokens| {
|
||||
expr.to_tokens(tokens);
|
||||
trailing_comma.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub(crate) struct Cfg {
|
||||
cfg: kw::cfg,
|
||||
paren: Paren,
|
||||
expr: CfgExpr,
|
||||
trailing_comma: Option<Token![,]>,
|
||||
}
|
||||
|
||||
impl Cfg {
|
||||
fn parse_meta(meta: &Meta) -> syn::Result<Self> {
|
||||
syn::parse2(meta.to_token_stream())
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for Cfg {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
cfg,
|
||||
paren,
|
||||
expr,
|
||||
trailing_comma,
|
||||
} = self;
|
||||
cfg.to_tokens(tokens);
|
||||
paren.surround(tokens, |tokens| {
|
||||
expr.to_tokens(tokens);
|
||||
trailing_comma.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for Cfg {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
let contents;
|
||||
Ok(Self {
|
||||
cfg: input.parse()?,
|
||||
paren: parenthesized!(contents in input),
|
||||
expr: contents.parse()?,
|
||||
trailing_comma: contents.parse()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub(crate) struct CfgAttr {
|
||||
cfg_attr: kw::cfg_attr,
|
||||
paren: Paren,
|
||||
expr: CfgExpr,
|
||||
comma: Token![,],
|
||||
attrs: Punctuated<Meta, Token![,]>,
|
||||
}
|
||||
|
||||
impl CfgAttr {
|
||||
pub(crate) fn to_cfg(&self) -> Cfg {
|
||||
Cfg {
|
||||
cfg: kw::cfg(self.cfg_attr.span),
|
||||
paren: self.paren,
|
||||
expr: self.expr.clone(),
|
||||
trailing_comma: None,
|
||||
}
|
||||
}
|
||||
fn parse_meta(meta: &Meta) -> syn::Result<Self> {
|
||||
syn::parse2(meta.to_token_stream())
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for CfgAttr {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
let contents;
|
||||
Ok(Self {
|
||||
cfg_attr: input.parse()?,
|
||||
paren: parenthesized!(contents in input),
|
||||
expr: contents.parse()?,
|
||||
comma: contents.parse()?,
|
||||
attrs: contents.call(Punctuated::parse_terminated)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct CfgAndValue {
|
||||
cfg: Cfg,
|
||||
eq_token: Token![=],
|
||||
value: LitBool,
|
||||
}
|
||||
|
||||
impl Parse for CfgAndValue {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
Ok(Self {
|
||||
cfg: input.parse()?,
|
||||
eq_token: input.parse()?,
|
||||
value: input.parse()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct Cfgs<T> {
|
||||
pub(crate) bracket: Bracket,
|
||||
pub(crate) cfgs_map: HashMap<Cfg, T>,
|
||||
pub(crate) cfgs_list: Vec<Cfg>,
|
||||
}
|
||||
|
||||
impl<T> Default for Cfgs<T> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
bracket: Default::default(),
|
||||
cfgs_map: Default::default(),
|
||||
cfgs_list: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Cfgs<T> {
|
||||
fn insert_cfg(&mut self, cfg: Cfg, value: T) {
|
||||
match self.cfgs_map.entry(cfg) {
|
||||
Entry::Occupied(_) => {}
|
||||
Entry::Vacant(entry) => {
|
||||
self.cfgs_list.push(entry.key().clone());
|
||||
entry.insert(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for Cfgs<bool> {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
let contents;
|
||||
let bracket = bracketed!(contents in input);
|
||||
let mut cfgs_map = HashMap::new();
|
||||
let mut cfgs_list = Vec::new();
|
||||
for CfgAndValue {
|
||||
cfg,
|
||||
eq_token,
|
||||
value,
|
||||
} in contents.call(Punctuated::<CfgAndValue, Token![,]>::parse_terminated)?
|
||||
{
|
||||
let _ = eq_token;
|
||||
match cfgs_map.entry(cfg) {
|
||||
Entry::Occupied(_) => {}
|
||||
Entry::Vacant(entry) => {
|
||||
cfgs_list.push(entry.key().clone());
|
||||
entry.insert(value.value);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(Self {
|
||||
bracket,
|
||||
cfgs_map,
|
||||
cfgs_list,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for Cfgs<()> {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
let contents;
|
||||
let bracket = bracketed!(contents in input);
|
||||
let mut cfgs_map = HashMap::new();
|
||||
let mut cfgs_list = Vec::new();
|
||||
for cfg in contents.call(Punctuated::<Cfg, Token![,]>::parse_terminated)? {
|
||||
match cfgs_map.entry(cfg) {
|
||||
Entry::Occupied(_) => {}
|
||||
Entry::Vacant(entry) => {
|
||||
cfgs_list.push(entry.key().clone());
|
||||
entry.insert(());
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(Self {
|
||||
bracket,
|
||||
cfgs_map,
|
||||
cfgs_list,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for Cfgs<()> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
bracket,
|
||||
cfgs_map: _,
|
||||
cfgs_list,
|
||||
} = self;
|
||||
bracket.surround(tokens, |tokens| {
|
||||
for cfg in cfgs_list {
|
||||
cfg.to_tokens(tokens);
|
||||
<Token![,]>::default().to_tokens(tokens);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn hdl_main(
|
||||
kw: impl CustomToken,
|
||||
attr: TokenStream,
|
||||
item: TokenStream,
|
||||
) -> syn::Result<TokenStream> {
|
||||
fn parse_evaluated_cfgs_attr<R>(
|
||||
input: ParseStream,
|
||||
parse_inner: impl FnOnce(ParseStream) -> syn::Result<R>,
|
||||
) -> syn::Result<R> {
|
||||
let _: Token![#] = input.parse()?;
|
||||
let bracket_content;
|
||||
bracketed!(bracket_content in input);
|
||||
let _: kw::__evaluated_cfgs = bracket_content.parse()?;
|
||||
let paren_content;
|
||||
parenthesized!(paren_content in bracket_content);
|
||||
parse_inner(&paren_content)
|
||||
}
|
||||
let (evaluated_cfgs, item): (_, TokenStream) = Parser::parse2(
|
||||
|input: ParseStream| {
|
||||
let peek = input.fork();
|
||||
if parse_evaluated_cfgs_attr(&peek, |_| Ok(())).is_ok() {
|
||||
let evaluated_cfgs = parse_evaluated_cfgs_attr(input, Cfgs::<bool>::parse)?;
|
||||
Ok((Some(evaluated_cfgs), input.parse()?))
|
||||
} else {
|
||||
Ok((None, input.parse()?))
|
||||
}
|
||||
},
|
||||
item,
|
||||
)?;
|
||||
let cfgs = if let Some(cfgs) = evaluated_cfgs {
|
||||
cfgs
|
||||
} else {
|
||||
let cfgs = process_cfg::collect_cfgs(syn::parse2(item.clone())?)?;
|
||||
if cfgs.cfgs_list.is_empty() {
|
||||
Cfgs::default()
|
||||
} else {
|
||||
return Ok(quote! {
|
||||
::fayalite::__cfg_expansion_helper! {
|
||||
[]
|
||||
#cfgs
|
||||
{#[::fayalite::#kw(#attr)]} { #item }
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
let item = syn::parse2(quote! { #[#kw(#attr)] #item })?;
|
||||
let Some(item) = process_cfg::process_cfgs(item, cfgs)? else {
|
||||
return Ok(TokenStream::new());
|
||||
};
|
||||
match item {
|
||||
Item::Enum(item) => hdl_enum::hdl_enum(item),
|
||||
Item::Struct(item) => hdl_bundle::hdl_bundle(item),
|
||||
|
|
@ -921,3 +1268,11 @@ pub fn hdl_attr(attr: TokenStream, item: TokenStream) -> syn::Result<TokenStream
|
|||
)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn hdl_module(attr: TokenStream, item: TokenStream) -> syn::Result<TokenStream> {
|
||||
hdl_main(kw::hdl_module::default(), attr, item)
|
||||
}
|
||||
|
||||
pub fn hdl_attr(attr: TokenStream, item: TokenStream) -> syn::Result<TokenStream> {
|
||||
hdl_main(kw::hdl::default(), attr, item)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,19 +1,20 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
Errors, HdlAttr, PairsIterExt,
|
||||
hdl_type_common::{ParsedGenerics, SplitForImpl},
|
||||
kw,
|
||||
module::transform_body::{HdlLet, HdlLetKindIO},
|
||||
options, Errors, HdlAttr, PairsIterExt,
|
||||
module::transform_body::{HdlLet, HdlLetKindIO, ModuleIOOrAddPlatformIO},
|
||||
options,
|
||||
};
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{format_ident, quote, quote_spanned, ToTokens};
|
||||
use quote::{ToTokens, format_ident, quote, quote_spanned};
|
||||
use std::collections::HashSet;
|
||||
use syn::{
|
||||
parse_quote,
|
||||
visit::{visit_pat, Visit},
|
||||
Attribute, Block, ConstParam, Error, FnArg, GenericParam, Generics, Ident, ItemFn, ItemStruct,
|
||||
LifetimeParam, ReturnType, Signature, TypeParam, Visibility, WhereClause, WherePredicate,
|
||||
parse_quote,
|
||||
visit::{Visit, visit_pat},
|
||||
};
|
||||
|
||||
mod transform_body;
|
||||
|
|
@ -38,7 +39,7 @@ pub(crate) fn check_name_conflicts_with_module_builder(name: &Ident) -> syn::Res
|
|||
if name == "m" {
|
||||
Err(Error::new_spanned(
|
||||
name,
|
||||
"name conflicts with implicit `m: &mut ModuleBuilder<_>`",
|
||||
"name conflicts with implicit `m: &ModuleBuilder`",
|
||||
))
|
||||
} else {
|
||||
Ok(())
|
||||
|
|
@ -66,7 +67,7 @@ struct ModuleFnModule {
|
|||
vis: Visibility,
|
||||
sig: Signature,
|
||||
block: Box<Block>,
|
||||
struct_generics: ParsedGenerics,
|
||||
struct_generics: Option<ParsedGenerics>,
|
||||
the_struct: TokenStream,
|
||||
}
|
||||
|
||||
|
|
@ -289,7 +290,7 @@ impl ModuleFn {
|
|||
paren_token,
|
||||
body,
|
||||
} => {
|
||||
debug_assert!(io.is_empty());
|
||||
debug_assert!(matches!(io, ModuleIOOrAddPlatformIO::ModuleIO(v) if v.is_empty()));
|
||||
return Ok(Self(ModuleFnImpl::Fn {
|
||||
attrs,
|
||||
config_options: HdlAttr {
|
||||
|
|
@ -321,6 +322,21 @@ impl ModuleFn {
|
|||
body,
|
||||
},
|
||||
};
|
||||
let io = match io {
|
||||
ModuleIOOrAddPlatformIO::ModuleIO(io) => io,
|
||||
ModuleIOOrAddPlatformIO::AddPlatformIO => {
|
||||
return Ok(Self(ModuleFnImpl::Module(ModuleFnModule {
|
||||
attrs,
|
||||
config_options,
|
||||
module_kind: module_kind.unwrap(),
|
||||
vis,
|
||||
sig,
|
||||
block,
|
||||
struct_generics: None,
|
||||
the_struct: TokenStream::new(),
|
||||
})));
|
||||
}
|
||||
};
|
||||
let (_struct_impl_generics, _struct_type_generics, struct_where_clause) =
|
||||
struct_generics.split_for_impl();
|
||||
let struct_where_clause: Option<WhereClause> = parse_quote! { #struct_where_clause };
|
||||
|
|
@ -363,7 +379,7 @@ impl ModuleFn {
|
|||
vis,
|
||||
sig,
|
||||
block,
|
||||
struct_generics,
|
||||
struct_generics: Some(struct_generics),
|
||||
the_struct,
|
||||
})))
|
||||
}
|
||||
|
|
@ -377,7 +393,7 @@ impl ModuleFn {
|
|||
module_kind,
|
||||
vis,
|
||||
sig,
|
||||
block,
|
||||
mut block,
|
||||
struct_generics,
|
||||
the_struct,
|
||||
} = match self.0 {
|
||||
|
|
@ -432,13 +448,24 @@ impl ModuleFn {
|
|||
ModuleKind::Normal => quote! { ::fayalite::module::ModuleKind::Normal },
|
||||
};
|
||||
let fn_name = &outer_sig.ident;
|
||||
let (_struct_impl_generics, struct_type_generics, _struct_where_clause) =
|
||||
struct_generics.split_for_impl();
|
||||
let struct_ty = quote! {#fn_name #struct_type_generics};
|
||||
let struct_ty = match struct_generics {
|
||||
Some(struct_generics) => {
|
||||
let (_struct_impl_generics, struct_type_generics, _struct_where_clause) =
|
||||
struct_generics.split_for_impl();
|
||||
quote! {#fn_name #struct_type_generics}
|
||||
}
|
||||
None => quote! {::fayalite::bundle::Bundle},
|
||||
};
|
||||
body_sig.ident = parse_quote! {__body};
|
||||
body_sig
|
||||
.inputs
|
||||
.insert(0, parse_quote! { m: &::fayalite::module::ModuleBuilder });
|
||||
block.stmts.insert(
|
||||
0,
|
||||
parse_quote! {
|
||||
let _ = m;
|
||||
},
|
||||
);
|
||||
let body_fn = ItemFn {
|
||||
attrs: vec![],
|
||||
vis: Visibility::Inherited,
|
||||
|
|
|
|||
|
|
@ -1,36 +1,45 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
fold::{impl_fold, DoFold},
|
||||
Errors, HdlAttr,
|
||||
fold::{DoFold, impl_fold},
|
||||
hdl_type_common::{
|
||||
known_items, ParseFailed, ParseTypes, ParsedGenerics, ParsedType, TypesParser,
|
||||
ParseFailed, ParseTypes, ParsedGenerics, ParsedType, TypesParser, known_items,
|
||||
},
|
||||
is_hdl_attr, kw,
|
||||
module::{check_name_conflicts_with_module_builder, ModuleIO, ModuleIOKind, ModuleKind},
|
||||
options, Errors, HdlAttr,
|
||||
module::{ModuleIO, ModuleIOKind, ModuleKind, check_name_conflicts_with_module_builder},
|
||||
options,
|
||||
};
|
||||
use num_bigint::BigInt;
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{quote, quote_spanned, ToTokens};
|
||||
use quote::{ToTokens, quote, quote_spanned};
|
||||
use std::{borrow::Borrow, convert::Infallible};
|
||||
use syn::{
|
||||
fold::{fold_expr, fold_expr_lit, fold_expr_unary, fold_local, fold_stmt, Fold},
|
||||
Attribute, Block, Error, Expr, ExprIf, ExprLet, ExprLit, ExprRepeat, ExprUnary,
|
||||
GenericArgument, Ident, Item, Lit, LitStr, Local, LocalInit, Pat, Token, Type, UnOp,
|
||||
fold::{Fold, fold_expr, fold_expr_lit, fold_expr_unary, fold_local, fold_stmt},
|
||||
parenthesized,
|
||||
parse::{Nothing, Parse, ParseStream},
|
||||
parse::{Parse, ParseStream},
|
||||
parse_quote, parse_quote_spanned,
|
||||
spanned::Spanned,
|
||||
token::Paren,
|
||||
Attribute, Block, Error, Expr, ExprIf, ExprLet, ExprLit, ExprRepeat, ExprUnary,
|
||||
GenericArgument, Ident, Item, Lit, LitStr, Local, LocalInit, Pat, Token, Type, UnOp,
|
||||
};
|
||||
|
||||
mod expand_aggregate_literals;
|
||||
mod expand_match;
|
||||
|
||||
options! {
|
||||
#[options = ExprOptions]
|
||||
pub(crate) enum ExprOption {
|
||||
Sim(sim),
|
||||
}
|
||||
}
|
||||
|
||||
options! {
|
||||
pub(crate) enum LetFnKind {
|
||||
Input(input),
|
||||
Output(output),
|
||||
AddPlatformIO(add_platform_io),
|
||||
Instance(instance),
|
||||
RegBuilder(reg_builder),
|
||||
Wire(wire),
|
||||
|
|
@ -208,6 +217,49 @@ impl HdlLetKindToTokens for HdlLetKindInstance {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct HdlLetKindAddPlatformIO {
|
||||
pub(crate) m: kw::m,
|
||||
pub(crate) dot_token: Token![.],
|
||||
pub(crate) add_platform_io: kw::add_platform_io,
|
||||
pub(crate) paren: Paren,
|
||||
pub(crate) platform_io_builder: Box<Expr>,
|
||||
}
|
||||
|
||||
impl ParseTypes<Self> for HdlLetKindAddPlatformIO {
|
||||
fn parse_types(input: &mut Self, _parser: &mut TypesParser<'_>) -> Result<Self, ParseFailed> {
|
||||
Ok(input.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl_fold! {
|
||||
struct HdlLetKindAddPlatformIO<> {
|
||||
m: kw::m,
|
||||
dot_token: Token![.],
|
||||
add_platform_io: kw::add_platform_io,
|
||||
paren: Paren,
|
||||
platform_io_builder: Box<Expr>,
|
||||
}
|
||||
}
|
||||
|
||||
impl HdlLetKindToTokens for HdlLetKindAddPlatformIO {
|
||||
fn ty_to_tokens(&self, _tokens: &mut TokenStream) {}
|
||||
|
||||
fn expr_to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
m,
|
||||
dot_token,
|
||||
add_platform_io,
|
||||
paren,
|
||||
platform_io_builder,
|
||||
} = self;
|
||||
m.to_tokens(tokens);
|
||||
dot_token.to_tokens(tokens);
|
||||
add_platform_io.to_tokens(tokens);
|
||||
paren.surround(tokens, |tokens| platform_io_builder.to_tokens(tokens));
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct RegBuilderClockDomain {
|
||||
pub(crate) dot_token: Token![.],
|
||||
|
|
@ -703,6 +755,7 @@ impl HdlLetKindMemory {
|
|||
#[derive(Clone, Debug)]
|
||||
pub(crate) enum HdlLetKind<IOType = ParsedType> {
|
||||
IO(HdlLetKindIO<ModuleIOKind, IOType>),
|
||||
AddPlatformIO(HdlLetKindAddPlatformIO),
|
||||
Incomplete(HdlLetKindIncomplete),
|
||||
Instance(HdlLetKindInstance),
|
||||
RegBuilder(HdlLetKindRegBuilder),
|
||||
|
|
@ -713,6 +766,7 @@ pub(crate) enum HdlLetKind<IOType = ParsedType> {
|
|||
impl_fold! {
|
||||
enum HdlLetKind<IOType,> {
|
||||
IO(HdlLetKindIO<ModuleIOKind, IOType>),
|
||||
AddPlatformIO(HdlLetKindAddPlatformIO),
|
||||
Incomplete(HdlLetKindIncomplete),
|
||||
Instance(HdlLetKindInstance),
|
||||
RegBuilder(HdlLetKindRegBuilder),
|
||||
|
|
@ -728,6 +782,9 @@ impl<T: ParseTypes<I>, I> ParseTypes<HdlLetKind<I>> for HdlLetKind<T> {
|
|||
) -> Result<Self, ParseFailed> {
|
||||
match input {
|
||||
HdlLetKind::IO(input) => ParseTypes::parse_types(input, parser).map(HdlLetKind::IO),
|
||||
HdlLetKind::AddPlatformIO(input) => {
|
||||
ParseTypes::parse_types(input, parser).map(HdlLetKind::AddPlatformIO)
|
||||
}
|
||||
HdlLetKind::Incomplete(input) => {
|
||||
ParseTypes::parse_types(input, parser).map(HdlLetKind::Incomplete)
|
||||
}
|
||||
|
|
@ -853,6 +910,23 @@ impl HdlLetKindParse for HdlLetKind<Type> {
|
|||
ModuleIOKind::Output(output),
|
||||
)
|
||||
.map(Self::IO),
|
||||
LetFnKind::AddPlatformIO((add_platform_io,)) => {
|
||||
if let Some(parsed_ty) = parsed_ty {
|
||||
return Err(Error::new_spanned(
|
||||
parsed_ty.1,
|
||||
"type annotation not allowed for instance",
|
||||
));
|
||||
}
|
||||
let (m, dot_token) = unwrap_m_dot(m_dot, kind)?;
|
||||
let paren_contents;
|
||||
Ok(Self::AddPlatformIO(HdlLetKindAddPlatformIO {
|
||||
m,
|
||||
dot_token,
|
||||
add_platform_io,
|
||||
paren: parenthesized!(paren_contents in input),
|
||||
platform_io_builder: paren_contents.call(parse_single_fn_arg)?,
|
||||
}))
|
||||
}
|
||||
LetFnKind::Instance((instance,)) => {
|
||||
if let Some(parsed_ty) = parsed_ty {
|
||||
return Err(Error::new_spanned(
|
||||
|
|
@ -928,6 +1002,7 @@ impl HdlLetKindToTokens for HdlLetKind {
|
|||
fn ty_to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
HdlLetKind::IO(v) => v.ty_to_tokens(tokens),
|
||||
HdlLetKind::AddPlatformIO(v) => v.ty_to_tokens(tokens),
|
||||
HdlLetKind::Incomplete(v) => v.ty_to_tokens(tokens),
|
||||
HdlLetKind::Instance(v) => v.ty_to_tokens(tokens),
|
||||
HdlLetKind::RegBuilder(v) => v.ty_to_tokens(tokens),
|
||||
|
|
@ -939,6 +1014,7 @@ impl HdlLetKindToTokens for HdlLetKind {
|
|||
fn expr_to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
HdlLetKind::IO(v) => v.expr_to_tokens(tokens),
|
||||
HdlLetKind::AddPlatformIO(v) => v.expr_to_tokens(tokens),
|
||||
HdlLetKind::Incomplete(v) => v.expr_to_tokens(tokens),
|
||||
HdlLetKind::Instance(v) => v.expr_to_tokens(tokens),
|
||||
HdlLetKind::RegBuilder(v) => v.expr_to_tokens(tokens),
|
||||
|
|
@ -952,7 +1028,7 @@ with_debug_clone_and_fold! {
|
|||
#[allow(dead_code)]
|
||||
pub(crate) struct HdlLet<Kind = HdlLetKind> {
|
||||
pub(crate) attrs: Vec<Attribute>,
|
||||
pub(crate) hdl_attr: HdlAttr<Nothing, kw::hdl>,
|
||||
pub(crate) hdl_attr: HdlAttr<syn::parse::Nothing, kw::hdl>,
|
||||
pub(crate) let_token: Token![let],
|
||||
pub(crate) mut_token: Option<Token![mut]>,
|
||||
pub(crate) name: Ident,
|
||||
|
|
@ -1109,7 +1185,7 @@ fn parse_quote_let_pat<T, R: ToTokens, C: Borrow<Token![:]>>(
|
|||
}
|
||||
}
|
||||
|
||||
fn wrap_ty_with_expr(ty: impl ToTokens) -> Type {
|
||||
pub(crate) fn wrap_ty_with_expr(ty: impl ToTokens) -> Type {
|
||||
parse_quote_spanned! {ty.span()=>
|
||||
::fayalite::expr::Expr<#ty>
|
||||
}
|
||||
|
|
@ -1141,7 +1217,7 @@ impl<T: ToString> ToTokens for ImplicitName<T> {
|
|||
struct Visitor<'a> {
|
||||
module_kind: Option<ModuleKind>,
|
||||
errors: Errors,
|
||||
io: Vec<ModuleIO>,
|
||||
io: ModuleIOOrAddPlatformIO,
|
||||
block_depth: usize,
|
||||
parsed_generics: &'a ParsedGenerics,
|
||||
}
|
||||
|
|
@ -1173,7 +1249,7 @@ impl Visitor<'_> {
|
|||
Some(_) => {}
|
||||
}
|
||||
}
|
||||
fn process_hdl_if(&mut self, hdl_attr: HdlAttr<Nothing, kw::hdl>, expr_if: ExprIf) -> Expr {
|
||||
fn process_hdl_if(&mut self, hdl_attr: HdlAttr<ExprOptions, kw::hdl>, expr_if: ExprIf) -> Expr {
|
||||
let ExprIf {
|
||||
attrs,
|
||||
if_token,
|
||||
|
|
@ -1181,10 +1257,10 @@ impl Visitor<'_> {
|
|||
then_branch,
|
||||
else_branch,
|
||||
} = expr_if;
|
||||
self.require_normal_module_or_fn(if_token);
|
||||
let else_expr = else_branch.unzip().1.map(|else_expr| match *else_expr {
|
||||
Expr::If(expr_if) => self.process_hdl_if(hdl_attr.clone(), expr_if),
|
||||
expr => expr,
|
||||
let (else_token, else_expr) = else_branch.unzip();
|
||||
let else_expr = else_expr.map(|else_expr| match *else_expr {
|
||||
Expr::If(expr_if) => Box::new(self.process_hdl_if(hdl_attr.clone(), expr_if)),
|
||||
_ => else_expr,
|
||||
});
|
||||
if let Expr::Let(ExprLet {
|
||||
attrs: let_attrs,
|
||||
|
|
@ -1206,7 +1282,19 @@ impl Visitor<'_> {
|
|||
},
|
||||
);
|
||||
}
|
||||
if let Some(else_expr) = else_expr {
|
||||
let ExprOptions { sim } = hdl_attr.body;
|
||||
if sim.is_some() {
|
||||
ExprIf {
|
||||
attrs,
|
||||
if_token,
|
||||
cond: parse_quote_spanned! {if_token.span=>
|
||||
*::fayalite::sim::value::SimValue::<::fayalite::int::Bool>::value(&::fayalite::sim::value::ToSimValue::into_sim_value(#cond))
|
||||
},
|
||||
then_branch,
|
||||
else_branch: else_token.zip(else_expr),
|
||||
}
|
||||
.into()
|
||||
} else if let Some(else_expr) = else_expr {
|
||||
parse_quote_spanned! {if_token.span=>
|
||||
#(#attrs)*
|
||||
{
|
||||
|
|
@ -1269,7 +1357,81 @@ impl Visitor<'_> {
|
|||
}),
|
||||
semi_token: hdl_let.semi_token,
|
||||
};
|
||||
self.io.push(hdl_let);
|
||||
match &mut self.io {
|
||||
ModuleIOOrAddPlatformIO::ModuleIO(io) => io.push(hdl_let),
|
||||
ModuleIOOrAddPlatformIO::AddPlatformIO => {
|
||||
self.errors.error(
|
||||
kind,
|
||||
"can't have other inputs/outputs in a module using m.add_platform_io()",
|
||||
);
|
||||
}
|
||||
}
|
||||
let_stmt
|
||||
}
|
||||
fn process_hdl_let_add_platform_io(
|
||||
&mut self,
|
||||
hdl_let: HdlLet<HdlLetKindAddPlatformIO>,
|
||||
) -> Local {
|
||||
let HdlLet {
|
||||
mut attrs,
|
||||
hdl_attr: _,
|
||||
let_token,
|
||||
mut_token,
|
||||
ref name,
|
||||
eq_token,
|
||||
kind:
|
||||
HdlLetKindAddPlatformIO {
|
||||
m,
|
||||
dot_token,
|
||||
add_platform_io,
|
||||
paren,
|
||||
platform_io_builder,
|
||||
},
|
||||
semi_token,
|
||||
} = hdl_let;
|
||||
let mut expr = quote! {#m #dot_token #add_platform_io};
|
||||
paren.surround(&mut expr, |expr| {
|
||||
let name_str = ImplicitName {
|
||||
name,
|
||||
span: name.span(),
|
||||
};
|
||||
quote_spanned! {name.span()=>
|
||||
#name_str, #platform_io_builder
|
||||
}
|
||||
.to_tokens(expr);
|
||||
});
|
||||
self.require_module(add_platform_io);
|
||||
attrs.push(parse_quote_spanned! {let_token.span=>
|
||||
#[allow(unused_variables)]
|
||||
});
|
||||
let let_stmt = Local {
|
||||
attrs,
|
||||
let_token,
|
||||
pat: parse_quote! { #mut_token #name },
|
||||
init: Some(LocalInit {
|
||||
eq_token,
|
||||
expr: parse_quote! { #expr },
|
||||
diverge: None,
|
||||
}),
|
||||
semi_token,
|
||||
};
|
||||
match &mut self.io {
|
||||
ModuleIOOrAddPlatformIO::ModuleIO(io) => {
|
||||
for io in io {
|
||||
self.errors.error(
|
||||
io.kind.kind,
|
||||
"can't have other inputs/outputs in a module using m.add_platform_io()",
|
||||
);
|
||||
}
|
||||
}
|
||||
ModuleIOOrAddPlatformIO::AddPlatformIO => {
|
||||
self.errors.error(
|
||||
add_platform_io,
|
||||
"can't use m.add_platform_io() more than once in a single module",
|
||||
);
|
||||
}
|
||||
}
|
||||
self.io = ModuleIOOrAddPlatformIO::AddPlatformIO;
|
||||
let_stmt
|
||||
}
|
||||
fn process_hdl_let_instance(&mut self, hdl_let: HdlLet<HdlLetKindInstance>) -> Local {
|
||||
|
|
@ -1490,6 +1652,7 @@ impl Visitor<'_> {
|
|||
}
|
||||
the_match! {
|
||||
IO => process_hdl_let_io,
|
||||
AddPlatformIO => process_hdl_let_add_platform_io,
|
||||
Incomplete => process_hdl_let_incomplete,
|
||||
Instance => process_hdl_let_instance,
|
||||
RegBuilder => process_hdl_let_reg_builder,
|
||||
|
|
@ -1586,7 +1749,7 @@ impl Visitor<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
fn empty_let() -> Local {
|
||||
pub(crate) fn empty_let() -> Local {
|
||||
Local {
|
||||
attrs: vec![],
|
||||
let_token: Default::default(),
|
||||
|
|
@ -1668,20 +1831,42 @@ impl Fold for Visitor<'_> {
|
|||
Repeat => process_hdl_repeat,
|
||||
Struct => process_hdl_struct,
|
||||
Tuple => process_hdl_tuple,
|
||||
MethodCall => process_hdl_method_call,
|
||||
Call => process_hdl_call,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn fold_local(&mut self, let_stmt: Local) -> Local {
|
||||
fn fold_local(&mut self, mut let_stmt: Local) -> Local {
|
||||
match self
|
||||
.errors
|
||||
.ok(HdlAttr::<Nothing, kw::hdl>::parse_and_leave_attr(
|
||||
.ok(HdlAttr::<ExprOptions, kw::hdl>::parse_and_leave_attr(
|
||||
&let_stmt.attrs,
|
||||
)) {
|
||||
None => return empty_let(),
|
||||
Some(None) => return fold_local(self, let_stmt),
|
||||
Some(Some(HdlAttr { .. })) => {}
|
||||
};
|
||||
let mut pat = &let_stmt.pat;
|
||||
if let Pat::Type(pat_type) = pat {
|
||||
pat = &pat_type.pat;
|
||||
}
|
||||
let Pat::Ident(syn::PatIdent {
|
||||
attrs: _,
|
||||
by_ref: None,
|
||||
mutability: _,
|
||||
ident: _,
|
||||
subpat: None,
|
||||
}) = pat
|
||||
else {
|
||||
let hdl_attr =
|
||||
HdlAttr::<ExprOptions, kw::hdl>::parse_and_take_attr(&mut let_stmt.attrs)
|
||||
.ok()
|
||||
.flatten()
|
||||
.expect("already checked above");
|
||||
let let_stmt = fold_local(self, let_stmt);
|
||||
return self.process_hdl_let_pat(hdl_attr, let_stmt);
|
||||
};
|
||||
let hdl_let = syn::parse2::<HdlLet<HdlLetKind<Type>>>(let_stmt.into_token_stream());
|
||||
let Some(hdl_let) = self.errors.ok(hdl_let) else {
|
||||
return empty_let();
|
||||
|
|
@ -1711,15 +1896,20 @@ impl Fold for Visitor<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) enum ModuleIOOrAddPlatformIO {
|
||||
ModuleIO(Vec<ModuleIO>),
|
||||
AddPlatformIO,
|
||||
}
|
||||
|
||||
pub(crate) fn transform_body(
|
||||
module_kind: Option<ModuleKind>,
|
||||
mut body: Box<Block>,
|
||||
parsed_generics: &ParsedGenerics,
|
||||
) -> syn::Result<(Box<Block>, Vec<ModuleIO>)> {
|
||||
) -> syn::Result<(Box<Block>, ModuleIOOrAddPlatformIO)> {
|
||||
let mut visitor = Visitor {
|
||||
module_kind,
|
||||
errors: Errors::new(),
|
||||
io: vec![],
|
||||
io: ModuleIOOrAddPlatformIO::ModuleIO(vec![]),
|
||||
block_depth: 0,
|
||||
parsed_generics,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,45 +1,102 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{kw, module::transform_body::Visitor, HdlAttr};
|
||||
|
||||
use crate::{
|
||||
HdlAttr, kw,
|
||||
module::transform_body::{
|
||||
ExprOptions, Visitor,
|
||||
expand_match::{EnumPath, parse_enum_path},
|
||||
},
|
||||
};
|
||||
use quote::{format_ident, quote_spanned};
|
||||
use std::mem;
|
||||
use syn::{
|
||||
parse::Nothing, parse_quote, parse_quote_spanned, spanned::Spanned, Expr, ExprArray, ExprPath,
|
||||
ExprRepeat, ExprStruct, ExprTuple, FieldValue, TypePath,
|
||||
Expr, ExprArray, ExprCall, ExprGroup, ExprMethodCall, ExprParen, ExprPath, ExprRepeat,
|
||||
ExprStruct, ExprTuple, FieldValue, Token, TypePath, parse_quote_spanned,
|
||||
punctuated::Punctuated, spanned::Spanned, token::Paren,
|
||||
};
|
||||
|
||||
impl Visitor<'_> {
|
||||
pub(crate) fn process_hdl_array(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<Nothing, kw::hdl>,
|
||||
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
|
||||
mut expr_array: ExprArray,
|
||||
) -> Expr {
|
||||
self.require_normal_module_or_fn(hdl_attr);
|
||||
for elem in &mut expr_array.elems {
|
||||
*elem = parse_quote_spanned! {elem.span()=>
|
||||
::fayalite::expr::ToExpr::to_expr(&(#elem))
|
||||
};
|
||||
let ExprOptions { sim } = hdl_attr.body;
|
||||
let span = hdl_attr.kw.span;
|
||||
if sim.is_some() {
|
||||
for elem in &mut expr_array.elems {
|
||||
*elem = parse_quote_spanned! {elem.span()=>
|
||||
::fayalite::sim::value::ToSimValue::to_sim_value(&(#elem))
|
||||
};
|
||||
}
|
||||
parse_quote_spanned! {span=>
|
||||
::fayalite::sim::value::ToSimValue::into_sim_value(#expr_array)
|
||||
}
|
||||
} else {
|
||||
for elem in &mut expr_array.elems {
|
||||
*elem = parse_quote_spanned! {elem.span()=>
|
||||
::fayalite::expr::ToExpr::to_expr(&(#elem))
|
||||
};
|
||||
}
|
||||
parse_quote_spanned! {span=>
|
||||
::fayalite::expr::ToExpr::to_expr(&#expr_array)
|
||||
}
|
||||
}
|
||||
parse_quote! {::fayalite::expr::ToExpr::to_expr(&#expr_array)}
|
||||
}
|
||||
pub(crate) fn process_hdl_repeat(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<Nothing, kw::hdl>,
|
||||
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
|
||||
mut expr_repeat: ExprRepeat,
|
||||
) -> Expr {
|
||||
self.require_normal_module_or_fn(hdl_attr);
|
||||
let repeated_value = &expr_repeat.expr;
|
||||
*expr_repeat.expr = parse_quote_spanned! {repeated_value.span()=>
|
||||
::fayalite::expr::ToExpr::to_expr(&(#repeated_value))
|
||||
};
|
||||
parse_quote! {::fayalite::expr::ToExpr::to_expr(&#expr_repeat)}
|
||||
let ExprOptions { sim } = hdl_attr.body;
|
||||
let span = hdl_attr.kw.span;
|
||||
if sim.is_some() {
|
||||
*expr_repeat.expr = parse_quote_spanned! {repeated_value.span()=>
|
||||
::fayalite::sim::value::ToSimValue::to_sim_value(&(#repeated_value))
|
||||
};
|
||||
parse_quote_spanned! {span=>
|
||||
::fayalite::sim::value::ToSimValue::into_sim_value(#expr_repeat)
|
||||
}
|
||||
} else {
|
||||
*expr_repeat.expr = parse_quote_spanned! {repeated_value.span()=>
|
||||
::fayalite::expr::ToExpr::to_expr(&(#repeated_value))
|
||||
};
|
||||
parse_quote_spanned! {span=>
|
||||
::fayalite::expr::ToExpr::to_expr(&#expr_repeat)
|
||||
}
|
||||
}
|
||||
}
|
||||
pub(crate) fn process_hdl_struct(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<Nothing, kw::hdl>,
|
||||
expr_struct: ExprStruct,
|
||||
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
|
||||
mut expr_struct: ExprStruct,
|
||||
) -> Expr {
|
||||
self.require_normal_module_or_fn(&hdl_attr);
|
||||
let name_span = expr_struct.path.segments.last().unwrap().ident.span();
|
||||
let ExprOptions { sim } = hdl_attr.body;
|
||||
if sim.is_some() {
|
||||
let ty_path = TypePath {
|
||||
qself: expr_struct.qself.take(),
|
||||
path: expr_struct.path,
|
||||
};
|
||||
expr_struct.path = parse_quote_spanned! {name_span=>
|
||||
__SimValue::<#ty_path>
|
||||
};
|
||||
for field in &mut expr_struct.fields {
|
||||
let expr = &field.expr;
|
||||
field.expr = parse_quote_spanned! {field.member.span()=>
|
||||
::fayalite::sim::value::ToSimValue::to_sim_value(&(#expr))
|
||||
};
|
||||
}
|
||||
return parse_quote_spanned! {name_span=>
|
||||
{
|
||||
type __SimValue<T> = <T as ::fayalite::ty::Type>::SimValue;
|
||||
let value: ::fayalite::sim::value::SimValue<#ty_path> = ::fayalite::sim::value::ToSimValue::into_sim_value(#expr_struct);
|
||||
value
|
||||
}
|
||||
};
|
||||
}
|
||||
let builder_ident = format_ident!("__builder", span = name_span);
|
||||
let empty_builder = if expr_struct.qself.is_some()
|
||||
|| expr_struct
|
||||
|
|
@ -91,12 +148,126 @@ impl Visitor<'_> {
|
|||
}
|
||||
pub(crate) fn process_hdl_tuple(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<Nothing, kw::hdl>,
|
||||
expr_tuple: ExprTuple,
|
||||
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
|
||||
mut expr_tuple: ExprTuple,
|
||||
) -> Expr {
|
||||
self.require_normal_module_or_fn(hdl_attr);
|
||||
parse_quote_spanned! {expr_tuple.span()=>
|
||||
::fayalite::expr::ToExpr::to_expr(&#expr_tuple)
|
||||
let ExprOptions { sim } = hdl_attr.body;
|
||||
if sim.is_some() {
|
||||
for element in &mut expr_tuple.elems {
|
||||
*element = parse_quote_spanned! {element.span()=>
|
||||
&(#element)
|
||||
};
|
||||
}
|
||||
parse_quote_spanned! {expr_tuple.span()=>
|
||||
::fayalite::sim::value::ToSimValue::into_sim_value(#expr_tuple)
|
||||
}
|
||||
} else {
|
||||
parse_quote_spanned! {expr_tuple.span()=>
|
||||
::fayalite::expr::ToExpr::to_expr(&#expr_tuple)
|
||||
}
|
||||
}
|
||||
}
|
||||
pub(crate) fn process_hdl_call(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
|
||||
mut expr_call: ExprCall,
|
||||
) -> Expr {
|
||||
let span = hdl_attr.kw.span;
|
||||
let mut func = &mut *expr_call.func;
|
||||
let EnumPath {
|
||||
variant_path: _,
|
||||
enum_path,
|
||||
variant_name,
|
||||
} = loop {
|
||||
match func {
|
||||
Expr::Group(ExprGroup { expr, .. }) | Expr::Paren(ExprParen { expr, .. }) => {
|
||||
func = &mut **expr;
|
||||
}
|
||||
Expr::Path(_) => {
|
||||
let Expr::Path(ExprPath { attrs, qself, path }) =
|
||||
mem::replace(func, Expr::PLACEHOLDER)
|
||||
else {
|
||||
unreachable!();
|
||||
};
|
||||
match parse_enum_path(TypePath { qself, path }) {
|
||||
Ok(path) => break path,
|
||||
Err(path) => {
|
||||
self.errors.error(&path, "unsupported enum variant path");
|
||||
let TypePath { qself, path } = path;
|
||||
*func = ExprPath { attrs, qself, path }.into();
|
||||
return expr_call.into();
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
self.errors.error(
|
||||
&expr_call.func,
|
||||
"#[hdl] function call -- function must be a possibly-parenthesized path",
|
||||
);
|
||||
return expr_call.into();
|
||||
}
|
||||
}
|
||||
};
|
||||
self.process_hdl_method_call(
|
||||
hdl_attr,
|
||||
ExprMethodCall {
|
||||
attrs: expr_call.attrs,
|
||||
receiver: parse_quote_spanned! {span=>
|
||||
<#enum_path as ::fayalite::ty::StaticType>::TYPE
|
||||
},
|
||||
dot_token: Token,
|
||||
method: variant_name,
|
||||
turbofish: None,
|
||||
paren_token: expr_call.paren_token,
|
||||
args: expr_call.args,
|
||||
},
|
||||
)
|
||||
}
|
||||
pub(crate) fn process_hdl_method_call(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
|
||||
mut expr_method_call: ExprMethodCall,
|
||||
) -> Expr {
|
||||
let ExprOptions { sim } = hdl_attr.body;
|
||||
let span = hdl_attr.kw.span;
|
||||
// remove any number of groups and up to one paren
|
||||
let mut receiver = &mut *expr_method_call.receiver;
|
||||
let mut has_group = false;
|
||||
let receiver = loop {
|
||||
match receiver {
|
||||
Expr::Group(ExprGroup { expr, .. }) => {
|
||||
has_group = true;
|
||||
receiver = expr;
|
||||
}
|
||||
Expr::Paren(ExprParen { expr, .. }) => break &mut **expr,
|
||||
receiver @ Expr::Path(_) => break receiver,
|
||||
_ => {
|
||||
if !has_group {
|
||||
self.errors.error(
|
||||
&expr_method_call.receiver,
|
||||
"#[hdl] on a method call needs parenthesized receiver",
|
||||
);
|
||||
}
|
||||
break &mut *expr_method_call.receiver;
|
||||
}
|
||||
}
|
||||
};
|
||||
let func = if sim.is_some() {
|
||||
parse_quote_spanned! {span=>
|
||||
::fayalite::enum_::enum_type_to_sim_builder
|
||||
}
|
||||
} else {
|
||||
parse_quote_spanned! {span=>
|
||||
::fayalite::enum_::assert_is_enum_type
|
||||
}
|
||||
};
|
||||
*expr_method_call.receiver = ExprCall {
|
||||
attrs: vec![],
|
||||
func,
|
||||
paren_token: Paren(span),
|
||||
args: Punctuated::from_iter([mem::replace(receiver, Expr::PLACEHOLDER)]),
|
||||
}
|
||||
.into();
|
||||
expr_method_call.into()
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,24 +1,121 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
fold::{impl_fold, DoFold},
|
||||
kw,
|
||||
module::transform_body::{with_debug_clone_and_fold, Visitor},
|
||||
Errors, HdlAttr, PairsIterExt,
|
||||
fold::{DoFold, impl_fold},
|
||||
kw,
|
||||
module::transform_body::{
|
||||
ExprOptions, Visitor, empty_let, with_debug_clone_and_fold, wrap_ty_with_expr,
|
||||
},
|
||||
};
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{format_ident, quote_spanned, ToTokens, TokenStreamExt};
|
||||
use quote::{ToTokens, TokenStreamExt, format_ident, quote_spanned};
|
||||
use std::collections::BTreeSet;
|
||||
use syn::{
|
||||
fold::{fold_arm, fold_expr_match, fold_pat, Fold},
|
||||
parse::Nothing,
|
||||
Arm, Attribute, Expr, ExprMatch, FieldPat, Ident, Local, Member, Pat, PatIdent, PatOr,
|
||||
PatParen, PatPath, PatRest, PatStruct, PatTuple, PatTupleStruct, PatWild, Path, PathSegment,
|
||||
Token, TypePath,
|
||||
fold::{Fold, fold_arm, fold_expr_match, fold_local, fold_pat},
|
||||
parse_quote_spanned,
|
||||
punctuated::Punctuated,
|
||||
spanned::Spanned,
|
||||
token::{Brace, Paren},
|
||||
Arm, Attribute, Expr, ExprMatch, FieldPat, Ident, Member, Pat, PatIdent, PatOr, PatParen,
|
||||
PatPath, PatRest, PatStruct, PatTupleStruct, PatWild, Path, PathSegment, Token, TypePath,
|
||||
};
|
||||
|
||||
macro_rules! visit_trait {
|
||||
(
|
||||
$($vis:vis fn $fn:ident($state:ident: _, $value:ident: &$Value:ty) $block:block)*
|
||||
) => {
|
||||
trait VisitMatchPat<'a> {
|
||||
$(fn $fn(&mut self, $value: &'a $Value) {
|
||||
$fn(self, $value);
|
||||
})*
|
||||
}
|
||||
|
||||
$($vis fn $fn<'a>($state: &mut (impl ?Sized + VisitMatchPat<'a>), $value: &'a $Value) $block)*
|
||||
};
|
||||
}
|
||||
|
||||
visit_trait! {
|
||||
fn visit_match_pat_binding(_state: _, v: &MatchPatBinding) {
|
||||
let MatchPatBinding { ident: _ } = v;
|
||||
}
|
||||
fn visit_match_pat_wild(_state: _, v: &MatchPatWild) {
|
||||
let MatchPatWild { underscore_token: _ } = v;
|
||||
}
|
||||
fn visit_match_pat_rest(_state: _, v: &MatchPatRest) {
|
||||
let MatchPatRest { dot2_token: _ } = v;
|
||||
}
|
||||
fn visit_match_pat_paren(state: _, v: &MatchPatParen<MatchPat>) {
|
||||
let MatchPatParen { paren_token: _, pat } = v;
|
||||
state.visit_match_pat(pat);
|
||||
}
|
||||
fn visit_match_pat_paren_simple(state: _, v: &MatchPatParen<MatchPatSimple>) {
|
||||
let MatchPatParen { paren_token: _, pat } = v;
|
||||
state.visit_match_pat_simple(pat);
|
||||
}
|
||||
fn visit_match_pat_or(state: _, v: &MatchPatOr<MatchPat>) {
|
||||
let MatchPatOr { leading_vert: _, cases } = v;
|
||||
for v in cases {
|
||||
state.visit_match_pat(v);
|
||||
}
|
||||
}
|
||||
fn visit_match_pat_or_simple(state: _, v: &MatchPatOr<MatchPatSimple>) {
|
||||
let MatchPatOr { leading_vert: _, cases } = v;
|
||||
for v in cases {
|
||||
state.visit_match_pat_simple(v);
|
||||
}
|
||||
}
|
||||
fn visit_match_pat_struct_field(state: _, v: &MatchPatStructField) {
|
||||
let MatchPatStructField { field_name: _, colon_token: _, pat } = v;
|
||||
state.visit_match_pat_simple(pat);
|
||||
}
|
||||
fn visit_match_pat_struct(state: _, v: &MatchPatStruct) {
|
||||
let MatchPatStruct { match_span: _, path: _, brace_token: _, fields, rest: _ } = v;
|
||||
for v in fields {
|
||||
state.visit_match_pat_struct_field(v);
|
||||
}
|
||||
}
|
||||
fn visit_match_pat_tuple(state: _, v: &MatchPatTuple) {
|
||||
let MatchPatTuple { paren_token: _, fields } = v;
|
||||
for v in fields {
|
||||
state.visit_match_pat_simple(v);
|
||||
}
|
||||
}
|
||||
fn visit_match_pat_enum_variant(state: _, v: &MatchPatEnumVariant) {
|
||||
let MatchPatEnumVariant {
|
||||
match_span:_,
|
||||
sim:_,
|
||||
variant_path: _,
|
||||
enum_path: _,
|
||||
variant_name: _,
|
||||
field,
|
||||
} = v;
|
||||
if let Some((_, v)) = field {
|
||||
state.visit_match_pat_simple(v);
|
||||
}
|
||||
}
|
||||
fn visit_match_pat_simple(state: _, v: &MatchPatSimple) {
|
||||
match v {
|
||||
MatchPatSimple::Paren(v) => state.visit_match_pat_paren_simple(v),
|
||||
MatchPatSimple::Or(v) => state.visit_match_pat_or_simple(v),
|
||||
MatchPatSimple::Binding(v) => state.visit_match_pat_binding(v),
|
||||
MatchPatSimple::Wild(v) => state.visit_match_pat_wild(v),
|
||||
MatchPatSimple::Rest(v) => state.visit_match_pat_rest(v),
|
||||
}
|
||||
}
|
||||
fn visit_match_pat(state: _, v: &MatchPat) {
|
||||
match v {
|
||||
MatchPat::Simple(v) => state.visit_match_pat_simple(v),
|
||||
MatchPat::Or(v) => state.visit_match_pat_or(v),
|
||||
MatchPat::Paren(v) => state.visit_match_pat_paren(v),
|
||||
MatchPat::Struct(v) => state.visit_match_pat_struct(v),
|
||||
MatchPat::Tuple(v) => state.visit_match_pat_tuple(v),
|
||||
MatchPat::EnumVariant(v) => state.visit_match_pat_enum_variant(v),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
with_debug_clone_and_fold! {
|
||||
struct MatchPatBinding<> {
|
||||
ident: Ident,
|
||||
|
|
@ -53,6 +150,15 @@ with_debug_clone_and_fold! {
|
|||
}
|
||||
}
|
||||
|
||||
impl<P> MatchPatOr<P> {
|
||||
/// returns the first `|` between two patterns
|
||||
fn first_inner_vert(&self) -> Option<Token![|]> {
|
||||
let mut pairs = self.cases.pairs();
|
||||
pairs.next_back();
|
||||
pairs.next().and_then(|v| v.into_tuple().1.copied())
|
||||
}
|
||||
}
|
||||
|
||||
impl<P: ToTokens> ToTokens for MatchPatOr<P> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
|
|
@ -77,6 +183,19 @@ impl ToTokens for MatchPatWild {
|
|||
}
|
||||
}
|
||||
|
||||
with_debug_clone_and_fold! {
|
||||
struct MatchPatRest<> {
|
||||
dot2_token: Token![..],
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for MatchPatRest {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self { dot2_token } = self;
|
||||
dot2_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
with_debug_clone_and_fold! {
|
||||
struct MatchPatStructField<> {
|
||||
field_name: Ident,
|
||||
|
|
@ -159,9 +278,29 @@ impl ToTokens for MatchPatStruct {
|
|||
}
|
||||
}
|
||||
|
||||
with_debug_clone_and_fold! {
|
||||
struct MatchPatTuple<> {
|
||||
paren_token: Paren,
|
||||
fields: Punctuated<MatchPatSimple, Token![,]>,
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for MatchPatTuple {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
paren_token,
|
||||
fields,
|
||||
} = self;
|
||||
paren_token.surround(tokens, |tokens| {
|
||||
fields.to_tokens(tokens);
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
with_debug_clone_and_fold! {
|
||||
struct MatchPatEnumVariant<> {
|
||||
match_span: Span,
|
||||
sim: Option<(kw::sim,)>,
|
||||
variant_path: Path,
|
||||
enum_path: Path,
|
||||
variant_name: Ident,
|
||||
|
|
@ -173,6 +312,7 @@ impl ToTokens for MatchPatEnumVariant {
|
|||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
match_span,
|
||||
sim,
|
||||
variant_path: _,
|
||||
enum_path,
|
||||
variant_name,
|
||||
|
|
@ -182,7 +322,28 @@ impl ToTokens for MatchPatEnumVariant {
|
|||
__MatchTy::<#enum_path>::#variant_name
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
if let Some((paren_token, field)) = field {
|
||||
if sim.is_some() {
|
||||
if let Some((paren_token, field)) = field {
|
||||
paren_token.surround(tokens, |tokens| {
|
||||
field.to_tokens(tokens);
|
||||
match field {
|
||||
MatchPatSimple::Paren(_)
|
||||
| MatchPatSimple::Or(_)
|
||||
| MatchPatSimple::Binding(_)
|
||||
| MatchPatSimple::Wild(_) => quote_spanned! {*match_span=>
|
||||
, _
|
||||
}
|
||||
.to_tokens(tokens),
|
||||
MatchPatSimple::Rest(_) => {}
|
||||
}
|
||||
});
|
||||
} else {
|
||||
quote_spanned! {*match_span=>
|
||||
(_)
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
}
|
||||
} else if let Some((paren_token, field)) = field {
|
||||
paren_token.surround(tokens, |tokens| field.to_tokens(tokens));
|
||||
}
|
||||
}
|
||||
|
|
@ -194,6 +355,7 @@ enum MatchPatSimple {
|
|||
Or(MatchPatOr<MatchPatSimple>),
|
||||
Binding(MatchPatBinding),
|
||||
Wild(MatchPatWild),
|
||||
Rest(MatchPatRest),
|
||||
}
|
||||
|
||||
impl_fold! {
|
||||
|
|
@ -202,6 +364,7 @@ impl_fold! {
|
|||
Or(MatchPatOr<MatchPatSimple>),
|
||||
Binding(MatchPatBinding),
|
||||
Wild(MatchPatWild),
|
||||
Rest(MatchPatRest),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -212,17 +375,18 @@ impl ToTokens for MatchPatSimple {
|
|||
Self::Paren(v) => v.to_tokens(tokens),
|
||||
Self::Binding(v) => v.to_tokens(tokens),
|
||||
Self::Wild(v) => v.to_tokens(tokens),
|
||||
Self::Rest(v) => v.to_tokens(tokens),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct EnumPath {
|
||||
variant_path: Path,
|
||||
enum_path: Path,
|
||||
variant_name: Ident,
|
||||
pub(crate) struct EnumPath {
|
||||
pub(crate) variant_path: Path,
|
||||
pub(crate) enum_path: Path,
|
||||
pub(crate) variant_name: Ident,
|
||||
}
|
||||
|
||||
fn parse_enum_path(variant_path: TypePath) -> Result<EnumPath, TypePath> {
|
||||
pub(crate) fn parse_enum_path(variant_path: TypePath) -> Result<EnumPath, TypePath> {
|
||||
let TypePath {
|
||||
qself: None,
|
||||
path: variant_path,
|
||||
|
|
@ -278,8 +442,9 @@ trait ParseMatchPat: Sized {
|
|||
fn or(v: MatchPatOr<Self>) -> Self;
|
||||
fn paren(v: MatchPatParen<Self>) -> Self;
|
||||
fn struct_(state: &mut HdlMatchParseState<'_>, v: MatchPatStruct) -> Result<Self, ()>;
|
||||
fn tuple(state: &mut HdlMatchParseState<'_>, v: MatchPatTuple) -> Result<Self, ()>;
|
||||
fn enum_variant(state: &mut HdlMatchParseState<'_>, v: MatchPatEnumVariant)
|
||||
-> Result<Self, ()>;
|
||||
-> Result<Self, ()>;
|
||||
fn parse(state: &mut HdlMatchParseState<'_>, pat: Pat) -> Result<Self, ()> {
|
||||
match pat {
|
||||
Pat::Ident(PatIdent {
|
||||
|
|
@ -313,6 +478,7 @@ trait ParseMatchPat: Sized {
|
|||
state,
|
||||
MatchPatEnumVariant {
|
||||
match_span: state.match_span,
|
||||
sim: state.sim,
|
||||
variant_path,
|
||||
enum_path,
|
||||
variant_name,
|
||||
|
|
@ -359,6 +525,7 @@ trait ParseMatchPat: Sized {
|
|||
state,
|
||||
MatchPatEnumVariant {
|
||||
match_span: state.match_span,
|
||||
sim: state.sim,
|
||||
variant_path,
|
||||
enum_path,
|
||||
variant_name,
|
||||
|
|
@ -443,6 +610,7 @@ trait ParseMatchPat: Sized {
|
|||
state,
|
||||
MatchPatEnumVariant {
|
||||
match_span: state.match_span,
|
||||
sim: state.sim,
|
||||
variant_path,
|
||||
enum_path,
|
||||
variant_name,
|
||||
|
|
@ -462,7 +630,34 @@ trait ParseMatchPat: Sized {
|
|||
}) => Ok(Self::simple(MatchPatSimple::Wild(MatchPatWild {
|
||||
underscore_token,
|
||||
}))),
|
||||
Pat::Tuple(_) | Pat::Slice(_) | Pat::Const(_) | Pat::Lit(_) | Pat::Range(_) => {
|
||||
Pat::Tuple(PatTuple {
|
||||
attrs: _,
|
||||
paren_token,
|
||||
elems,
|
||||
}) => {
|
||||
let fields = elems
|
||||
.into_pairs()
|
||||
.filter_map_pair_value(|field_pat| {
|
||||
if let Pat::Rest(PatRest {
|
||||
attrs: _,
|
||||
dot2_token,
|
||||
}) = field_pat
|
||||
{
|
||||
Some(MatchPatSimple::Rest(MatchPatRest { dot2_token }))
|
||||
} else {
|
||||
MatchPatSimple::parse(state, field_pat).ok()
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
Self::tuple(
|
||||
state,
|
||||
MatchPatTuple {
|
||||
paren_token,
|
||||
fields,
|
||||
},
|
||||
)
|
||||
}
|
||||
Pat::Slice(_) | Pat::Const(_) | Pat::Lit(_) | Pat::Range(_) => {
|
||||
state
|
||||
.errors
|
||||
.error(pat, "not yet implemented in #[hdl] patterns");
|
||||
|
|
@ -497,6 +692,14 @@ impl ParseMatchPat for MatchPatSimple {
|
|||
Err(())
|
||||
}
|
||||
|
||||
fn tuple(state: &mut HdlMatchParseState<'_>, v: MatchPatTuple) -> Result<Self, ()> {
|
||||
state.errors.push(syn::Error::new(
|
||||
v.paren_token.span.open(),
|
||||
"matching tuples is not yet implemented inside structs/enums in #[hdl] patterns",
|
||||
));
|
||||
Err(())
|
||||
}
|
||||
|
||||
fn enum_variant(
|
||||
state: &mut HdlMatchParseState<'_>,
|
||||
v: MatchPatEnumVariant,
|
||||
|
|
@ -515,6 +718,7 @@ enum MatchPat {
|
|||
Or(MatchPatOr<MatchPat>),
|
||||
Paren(MatchPatParen<MatchPat>),
|
||||
Struct(MatchPatStruct),
|
||||
Tuple(MatchPatTuple),
|
||||
EnumVariant(MatchPatEnumVariant),
|
||||
}
|
||||
|
||||
|
|
@ -524,6 +728,7 @@ impl_fold! {
|
|||
Or(MatchPatOr<MatchPat>),
|
||||
Paren(MatchPatParen<MatchPat>),
|
||||
Struct(MatchPatStruct),
|
||||
Tuple(MatchPatTuple),
|
||||
EnumVariant(MatchPatEnumVariant),
|
||||
}
|
||||
}
|
||||
|
|
@ -545,6 +750,10 @@ impl ParseMatchPat for MatchPat {
|
|||
Ok(Self::Struct(v))
|
||||
}
|
||||
|
||||
fn tuple(_state: &mut HdlMatchParseState<'_>, v: MatchPatTuple) -> Result<Self, ()> {
|
||||
Ok(Self::Tuple(v))
|
||||
}
|
||||
|
||||
fn enum_variant(
|
||||
_state: &mut HdlMatchParseState<'_>,
|
||||
v: MatchPatEnumVariant,
|
||||
|
|
@ -560,6 +769,7 @@ impl ToTokens for MatchPat {
|
|||
Self::Or(v) => v.to_tokens(tokens),
|
||||
Self::Paren(v) => v.to_tokens(tokens),
|
||||
Self::Struct(v) => v.to_tokens(tokens),
|
||||
Self::Tuple(v) => v.to_tokens(tokens),
|
||||
Self::EnumVariant(v) => v.to_tokens(tokens),
|
||||
}
|
||||
}
|
||||
|
|
@ -622,10 +832,6 @@ struct RewriteAsCheckMatch {
|
|||
}
|
||||
|
||||
impl Fold for RewriteAsCheckMatch {
|
||||
fn fold_field_pat(&mut self, mut i: FieldPat) -> FieldPat {
|
||||
i.colon_token = Some(Token));
|
||||
i
|
||||
}
|
||||
fn fold_pat(&mut self, pat: Pat) -> Pat {
|
||||
match pat {
|
||||
Pat::Ident(mut pat_ident) => match parse_enum_ident(pat_ident.ident) {
|
||||
|
|
@ -740,17 +946,177 @@ impl Fold for RewriteAsCheckMatch {
|
|||
// don't recurse into expressions
|
||||
i
|
||||
}
|
||||
fn fold_local(&mut self, mut let_stmt: Local) -> Local {
|
||||
if let Some(syn::LocalInit {
|
||||
eq_token,
|
||||
expr: _,
|
||||
diverge,
|
||||
}) = let_stmt.init.take()
|
||||
{
|
||||
let_stmt.init = Some(syn::LocalInit {
|
||||
eq_token,
|
||||
expr: parse_quote_spanned! {self.span=>
|
||||
__match_value
|
||||
},
|
||||
diverge: diverge.map(|(else_, _expr)| {
|
||||
(
|
||||
else_,
|
||||
parse_quote_spanned! {self.span=>
|
||||
match __infallible {}
|
||||
},
|
||||
)
|
||||
}),
|
||||
});
|
||||
}
|
||||
fold_local(self, let_stmt)
|
||||
}
|
||||
}
|
||||
|
||||
struct HdlMatchParseState<'a> {
|
||||
sim: Option<(kw::sim,)>,
|
||||
match_span: Span,
|
||||
errors: &'a mut Errors,
|
||||
}
|
||||
|
||||
struct HdlLetPatVisitState<'a> {
|
||||
errors: &'a mut Errors,
|
||||
bindings: BTreeSet<&'a Ident>,
|
||||
}
|
||||
|
||||
impl<'a> VisitMatchPat<'a> for HdlLetPatVisitState<'a> {
|
||||
fn visit_match_pat_binding(&mut self, v: &'a MatchPatBinding) {
|
||||
self.bindings.insert(&v.ident);
|
||||
}
|
||||
|
||||
fn visit_match_pat_or(&mut self, v: &'a MatchPatOr<MatchPat>) {
|
||||
if let Some(first_inner_vert) = v.first_inner_vert() {
|
||||
self.errors.error(
|
||||
first_inner_vert,
|
||||
"or-patterns are not supported in let statements",
|
||||
);
|
||||
}
|
||||
visit_match_pat_or(self, v);
|
||||
}
|
||||
|
||||
fn visit_match_pat_or_simple(&mut self, v: &'a MatchPatOr<MatchPatSimple>) {
|
||||
if let Some(first_inner_vert) = v.first_inner_vert() {
|
||||
self.errors.error(
|
||||
first_inner_vert,
|
||||
"or-patterns are not supported in let statements",
|
||||
);
|
||||
}
|
||||
visit_match_pat_or_simple(self, v);
|
||||
}
|
||||
|
||||
fn visit_match_pat_enum_variant(&mut self, v: &'a MatchPatEnumVariant) {
|
||||
self.errors.error(v, "refutable pattern in let statement");
|
||||
}
|
||||
}
|
||||
|
||||
impl Visitor<'_> {
|
||||
pub(crate) fn process_hdl_let_pat(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
|
||||
mut let_stmt: Local,
|
||||
) -> Local {
|
||||
let span = let_stmt.let_token.span();
|
||||
let ExprOptions { sim } = hdl_attr.body;
|
||||
if let Pat::Type(pat) = &mut let_stmt.pat {
|
||||
*pat.ty = wrap_ty_with_expr((*pat.ty).clone());
|
||||
}
|
||||
let check_let_stmt = RewriteAsCheckMatch { span }.fold_local(let_stmt.clone());
|
||||
let Local {
|
||||
attrs: _,
|
||||
let_token,
|
||||
pat,
|
||||
init,
|
||||
semi_token,
|
||||
} = let_stmt;
|
||||
let Some(syn::LocalInit {
|
||||
eq_token,
|
||||
expr,
|
||||
diverge,
|
||||
}) = init
|
||||
else {
|
||||
self.errors
|
||||
.error(let_token, "#[hdl] let must be assigned a value");
|
||||
return empty_let();
|
||||
};
|
||||
if let Some((else_, _)) = diverge {
|
||||
// TODO: implement let-else
|
||||
self.errors
|
||||
.error(else_, "#[hdl] let ... else { ... } is not implemented");
|
||||
return empty_let();
|
||||
}
|
||||
let Ok(pat) = MatchPat::parse(
|
||||
&mut HdlMatchParseState {
|
||||
sim,
|
||||
match_span: span,
|
||||
errors: &mut self.errors,
|
||||
},
|
||||
pat,
|
||||
) else {
|
||||
return empty_let();
|
||||
};
|
||||
let mut state = HdlLetPatVisitState {
|
||||
errors: &mut self.errors,
|
||||
bindings: BTreeSet::new(),
|
||||
};
|
||||
state.visit_match_pat(&pat);
|
||||
let HdlLetPatVisitState {
|
||||
errors: _,
|
||||
bindings,
|
||||
} = state;
|
||||
let retval = if sim.is_some() {
|
||||
parse_quote_spanned! {span=>
|
||||
let (#(#bindings,)*) = {
|
||||
type __MatchTy<T> = <T as ::fayalite::ty::Type>::SimValue;
|
||||
let __match_value = ::fayalite::sim::value::ToSimValue::to_sim_value(&(#expr));
|
||||
#let_token #pat #eq_token ::fayalite::sim::value::SimValue::into_value(__match_value) #semi_token
|
||||
(#(#bindings,)*)
|
||||
};
|
||||
}
|
||||
} else {
|
||||
parse_quote_spanned! {span=>
|
||||
let (#(#bindings,)* __scope,) = {
|
||||
type __MatchTy<T> = <T as ::fayalite::ty::Type>::MatchVariant;
|
||||
let __match_expr = ::fayalite::expr::ToExpr::to_expr(&(#expr));
|
||||
::fayalite::expr::check_match_expr(
|
||||
__match_expr,
|
||||
|__match_value, __infallible| {
|
||||
#[allow(unused_variables)]
|
||||
#check_let_stmt
|
||||
match __infallible {}
|
||||
},
|
||||
);
|
||||
let mut __match_iter = ::fayalite::module::match_(__match_expr);
|
||||
let ::fayalite::__std::option::Option::Some(__match_variant) =
|
||||
::fayalite::__std::iter::Iterator::next(&mut __match_iter)
|
||||
else {
|
||||
::fayalite::__std::unreachable!("#[hdl] let with uninhabited type");
|
||||
};
|
||||
let ::fayalite::__std::option::Option::None =
|
||||
::fayalite::__std::iter::Iterator::next(&mut __match_iter)
|
||||
else {
|
||||
::fayalite::__std::unreachable!("#[hdl] let with refutable pattern");
|
||||
};
|
||||
let (__match_variant, __scope) =
|
||||
::fayalite::ty::MatchVariantAndInactiveScope::match_activate_scope(
|
||||
__match_variant,
|
||||
);
|
||||
#let_token #pat #eq_token __match_variant #semi_token
|
||||
(#(#bindings,)* __scope,)
|
||||
};
|
||||
}
|
||||
};
|
||||
match retval {
|
||||
syn::Stmt::Local(retval) => retval,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
pub(crate) fn process_hdl_match(
|
||||
&mut self,
|
||||
_hdl_attr: HdlAttr<Nothing, kw::hdl>,
|
||||
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
|
||||
expr_match: ExprMatch,
|
||||
) -> Expr {
|
||||
let span = expr_match.match_token.span();
|
||||
|
|
@ -762,8 +1128,9 @@ impl Visitor<'_> {
|
|||
brace_token: _,
|
||||
arms,
|
||||
} = expr_match;
|
||||
self.require_normal_module_or_fn(match_token);
|
||||
let ExprOptions { sim } = hdl_attr.body;
|
||||
let mut state = HdlMatchParseState {
|
||||
sim,
|
||||
match_span: span,
|
||||
errors: &mut self.errors,
|
||||
};
|
||||
|
|
@ -771,24 +1138,36 @@ impl Visitor<'_> {
|
|||
arms.into_iter()
|
||||
.filter_map(|arm| MatchArm::parse(&mut state, arm).ok()),
|
||||
);
|
||||
let expr = quote_spanned! {span=>
|
||||
{
|
||||
type __MatchTy<T> = <T as ::fayalite::ty::Type>::MatchVariant;
|
||||
let __match_expr = ::fayalite::expr::ToExpr::to_expr(&(#expr));
|
||||
::fayalite::expr::check_match_expr(__match_expr, |__match_value, __infallible| {
|
||||
#[allow(unused_variables)]
|
||||
#check_match
|
||||
});
|
||||
for __match_variant in ::fayalite::module::match_(__match_expr) {
|
||||
let (__match_variant, __scope) =
|
||||
::fayalite::ty::MatchVariantAndInactiveScope::match_activate_scope(
|
||||
__match_variant,
|
||||
);
|
||||
#match_token __match_variant {
|
||||
let expr = if sim.is_some() {
|
||||
quote_spanned! {span=>
|
||||
{
|
||||
type __MatchTy<T> = <T as ::fayalite::ty::Type>::SimValue;
|
||||
let __match_expr = ::fayalite::sim::value::ToSimValue::to_sim_value(&(#expr));
|
||||
#match_token ::fayalite::sim::value::SimValue::into_value(__match_expr) {
|
||||
#(#arms)*
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
{
|
||||
type __MatchTy<T> = <T as ::fayalite::ty::Type>::MatchVariant;
|
||||
let __match_expr = ::fayalite::expr::ToExpr::to_expr(&(#expr));
|
||||
::fayalite::expr::check_match_expr(__match_expr, |__match_value, __infallible| {
|
||||
#[allow(unused_variables)]
|
||||
#check_match
|
||||
});
|
||||
for __match_variant in ::fayalite::module::match_(__match_expr) {
|
||||
let (__match_variant, __scope) =
|
||||
::fayalite::ty::MatchVariantAndInactiveScope::match_activate_scope(
|
||||
__match_variant,
|
||||
);
|
||||
#match_token __match_variant {
|
||||
#(#arms)*
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
syn::parse2(expr).unwrap()
|
||||
}
|
||||
|
|
|
|||
2527
crates/fayalite-proc-macros-impl/src/process_cfg.rs
Normal file
2527
crates/fayalite-proc-macros-impl/src/process_cfg.rs
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -1,7 +1,7 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{format_ident, quote, ToTokens};
|
||||
use quote::{ToTokens, format_ident, quote};
|
||||
use std::{collections::BTreeMap, fs};
|
||||
use syn::{fold::Fold, parse_quote};
|
||||
|
||||
|
|
|
|||
|
|
@ -14,9 +14,11 @@ rust-version.workspace = true
|
|||
version.workspace = true
|
||||
|
||||
[dependencies]
|
||||
base64.workspace = true
|
||||
bitvec.workspace = true
|
||||
blake3.workspace = true
|
||||
clap.workspace = true
|
||||
clap_complete.workspace = true
|
||||
ctor.workspace = true
|
||||
eyre.workspace = true
|
||||
fayalite-proc-macros.workspace = true
|
||||
|
|
@ -24,20 +26,24 @@ hashbrown.workspace = true
|
|||
jobslot.workspace = true
|
||||
num-bigint.workspace = true
|
||||
num-traits.workspace = true
|
||||
os_pipe.workspace = true
|
||||
ordered-float.workspace = true
|
||||
petgraph.workspace = true
|
||||
serde_json.workspace = true
|
||||
serde.workspace = true
|
||||
tempfile.workspace = true
|
||||
vec_map.workspace = true
|
||||
which.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
trybuild.workspace = true
|
||||
serde = { workspace = true, features = ["rc"] }
|
||||
|
||||
[build-dependencies]
|
||||
fayalite-visit-gen.workspace = true
|
||||
|
||||
[features]
|
||||
unstable-doc = []
|
||||
unstable-test-hasher = []
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
features = ["unstable-doc"]
|
||||
|
|
|
|||
|
|
@ -5,6 +5,9 @@ use std::{env, fs, path::Path};
|
|||
|
||||
fn main() {
|
||||
println!("cargo::rustc-check-cfg=cfg(todo)");
|
||||
println!("cargo::rustc-check-cfg=cfg(cfg_false_for_tests)");
|
||||
println!("cargo::rustc-check-cfg=cfg(cfg_true_for_tests)");
|
||||
println!("cargo::rustc-cfg=cfg_true_for_tests");
|
||||
let path = "visit_types.json";
|
||||
println!("cargo::rerun-if-changed={path}");
|
||||
println!("cargo::rerun-if-changed=build.rs");
|
||||
|
|
|
|||
|
|
@ -1,47 +1,64 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use clap::Parser;
|
||||
use fayalite::{cli, prelude::*};
|
||||
use fayalite::prelude::*;
|
||||
|
||||
#[hdl_module]
|
||||
fn blinky(clock_frequency: u64) {
|
||||
#[hdl]
|
||||
let clk: Clock = m.input();
|
||||
#[hdl]
|
||||
let rst: SyncReset = m.input();
|
||||
fn blinky(platform_io_builder: PlatformIOBuilder<'_>) {
|
||||
let clk_input =
|
||||
platform_io_builder.peripherals_with_type::<peripherals::ClockInput>()[0].use_peripheral();
|
||||
let rst = platform_io_builder.peripherals_with_type::<Reset>()[0].use_peripheral();
|
||||
let cd = #[hdl]
|
||||
ClockDomain {
|
||||
clk,
|
||||
rst: rst.to_reset(),
|
||||
clk: clk_input.clk,
|
||||
rst,
|
||||
};
|
||||
let max_value = clock_frequency / 2 - 1;
|
||||
let max_value = (Expr::ty(clk_input).frequency() / 2.0).round_ties_even() as u64 - 1;
|
||||
let int_ty = UInt::range_inclusive(0..=max_value);
|
||||
#[hdl]
|
||||
let counter_reg: UInt = reg_builder().clock_domain(cd).reset(0u8.cast_to(int_ty));
|
||||
#[hdl]
|
||||
let output_reg: Bool = reg_builder().clock_domain(cd).reset(false);
|
||||
#[hdl]
|
||||
let rgb_output_reg = reg_builder().clock_domain(cd).reset(
|
||||
#[hdl]
|
||||
peripherals::RgbLed {
|
||||
r: false,
|
||||
g: false,
|
||||
b: false,
|
||||
},
|
||||
);
|
||||
#[hdl]
|
||||
if counter_reg.cmp_eq(max_value) {
|
||||
connect_any(counter_reg, 0u8);
|
||||
connect(output_reg, !output_reg);
|
||||
connect(rgb_output_reg.r, !rgb_output_reg.r);
|
||||
#[hdl]
|
||||
if rgb_output_reg.r {
|
||||
connect(rgb_output_reg.g, !rgb_output_reg.g);
|
||||
#[hdl]
|
||||
if rgb_output_reg.g {
|
||||
connect(rgb_output_reg.b, !rgb_output_reg.b);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
connect_any(counter_reg, counter_reg + 1_hdl_u1);
|
||||
}
|
||||
for led in platform_io_builder.peripherals_with_type::<peripherals::Led>() {
|
||||
if let Ok(led) = led.try_use_peripheral() {
|
||||
connect(led.on, output_reg);
|
||||
}
|
||||
}
|
||||
for rgb_led in platform_io_builder.peripherals_with_type::<peripherals::RgbLed>() {
|
||||
if let Ok(rgb_led) = rgb_led.try_use_peripheral() {
|
||||
connect(rgb_led, rgb_output_reg);
|
||||
}
|
||||
}
|
||||
#[hdl]
|
||||
let led: Bool = m.output();
|
||||
connect(led, output_reg);
|
||||
let io = m.add_platform_io(platform_io_builder);
|
||||
}
|
||||
|
||||
#[derive(Parser)]
|
||||
struct Cli {
|
||||
/// clock frequency in hertz
|
||||
#[arg(long, default_value = "1000000", value_parser = clap::value_parser!(u64).range(2..))]
|
||||
clock_frequency: u64,
|
||||
#[command(subcommand)]
|
||||
cli: cli::Cli,
|
||||
}
|
||||
|
||||
fn main() -> cli::Result {
|
||||
let cli = Cli::parse();
|
||||
cli.cli.run(blinky(cli.clock_frequency))
|
||||
fn main() {
|
||||
<BuildCli>::main("blinky", |_, platform, _| {
|
||||
Ok(JobParams::new(platform.wrap_main_module(blinky)))
|
||||
});
|
||||
}
|
||||
|
|
|
|||
188
crates/fayalite/examples/tx_only_uart.rs
Normal file
188
crates/fayalite/examples/tx_only_uart.rs
Normal file
|
|
@ -0,0 +1,188 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use clap::builder::TypedValueParser;
|
||||
use fayalite::{
|
||||
build::{ToArgs, WriteArgs},
|
||||
platform::PeripheralRef,
|
||||
prelude::*,
|
||||
};
|
||||
use ordered_float::NotNan;
|
||||
|
||||
fn pick_clock<'a>(
|
||||
platform_io_builder: &PlatformIOBuilder<'a>,
|
||||
) -> PeripheralRef<'a, peripherals::ClockInput> {
|
||||
let mut clks = platform_io_builder.peripherals_with_type::<peripherals::ClockInput>();
|
||||
clks.sort_by_key(|clk| {
|
||||
// sort clocks by preference, smaller return values means higher preference
|
||||
let mut frequency = clk.ty().frequency();
|
||||
let priority;
|
||||
if frequency < 10e6 {
|
||||
frequency = -frequency; // prefer bigger frequencies
|
||||
priority = 1;
|
||||
} else if frequency > 50e6 {
|
||||
// prefer smaller frequencies
|
||||
priority = 2; // least preferred
|
||||
} else {
|
||||
priority = 0; // most preferred
|
||||
frequency = (frequency - 25e6).abs(); // prefer closer to 25MHz
|
||||
}
|
||||
(priority, NotNan::new(frequency).expect("should be valid"))
|
||||
});
|
||||
clks[0]
|
||||
}
|
||||
|
||||
#[hdl_module]
|
||||
fn tx_only_uart(
|
||||
platform_io_builder: PlatformIOBuilder<'_>,
|
||||
divisor: f64,
|
||||
message: impl AsRef<[u8]>,
|
||||
) {
|
||||
let message = message.as_ref();
|
||||
let clk_input = pick_clock(&platform_io_builder).use_peripheral();
|
||||
let rst = platform_io_builder.peripherals_with_type::<Reset>()[0].use_peripheral();
|
||||
let cd = #[hdl]
|
||||
ClockDomain {
|
||||
clk: clk_input.clk,
|
||||
rst,
|
||||
};
|
||||
let numerator = 1u128 << 16;
|
||||
let denominator = (divisor * numerator as f64).round() as u128;
|
||||
|
||||
#[hdl]
|
||||
let remainder_reg: UInt<128> = reg_builder().clock_domain(cd).reset(0u128);
|
||||
|
||||
#[hdl]
|
||||
let sum: UInt<128> = wire();
|
||||
connect_any(sum, remainder_reg + numerator);
|
||||
|
||||
#[hdl]
|
||||
let tick_reg = reg_builder().clock_domain(cd).reset(false);
|
||||
connect(tick_reg, false);
|
||||
|
||||
#[hdl]
|
||||
let next_remainder: UInt<128> = wire();
|
||||
connect(remainder_reg, next_remainder);
|
||||
|
||||
#[hdl]
|
||||
if sum.cmp_ge(denominator) {
|
||||
connect_any(next_remainder, sum - denominator);
|
||||
connect(tick_reg, true);
|
||||
} else {
|
||||
connect(next_remainder, sum);
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
let uart_state_reg = reg_builder().clock_domain(cd).reset(0_hdl_u4);
|
||||
#[hdl]
|
||||
let next_uart_state: UInt<4> = wire();
|
||||
|
||||
connect_any(next_uart_state, uart_state_reg + 1u8);
|
||||
|
||||
#[hdl]
|
||||
let message_mem: Array<UInt<8>> = wire(Array[UInt::new_static()][message.len()]);
|
||||
for (message, message_mem) in message.iter().zip(message_mem) {
|
||||
connect(message_mem, *message);
|
||||
}
|
||||
#[hdl]
|
||||
let addr_reg: UInt<32> = reg_builder().clock_domain(cd).reset(0u32);
|
||||
#[hdl]
|
||||
let next_addr: UInt<32> = wire();
|
||||
connect(next_addr, addr_reg);
|
||||
|
||||
#[hdl]
|
||||
let tx = reg_builder().clock_domain(cd).reset(true);
|
||||
|
||||
#[hdl]
|
||||
let tx_bits: Array<Bool, 10> = wire();
|
||||
|
||||
connect(tx_bits[0], false); // start bit
|
||||
connect(tx_bits[9], true); // stop bit
|
||||
|
||||
for i in 0..8 {
|
||||
connect(tx_bits[i + 1], message_mem[addr_reg][i]); // data bits
|
||||
}
|
||||
|
||||
connect(tx, tx_bits[uart_state_reg]);
|
||||
|
||||
#[hdl]
|
||||
if uart_state_reg.cmp_eq(Expr::ty(tx_bits).len() - 1) {
|
||||
connect(next_uart_state, 0_hdl_u4);
|
||||
let next_addr_val = addr_reg + 1u8;
|
||||
#[hdl]
|
||||
if next_addr_val.cmp_lt(message.len()) {
|
||||
connect_any(next_addr, next_addr_val);
|
||||
} else {
|
||||
connect(next_addr, 0u32);
|
||||
}
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
if tick_reg {
|
||||
connect(uart_state_reg, next_uart_state);
|
||||
connect(addr_reg, next_addr);
|
||||
}
|
||||
|
||||
for uart in platform_io_builder.peripherals_with_type::<peripherals::Uart>() {
|
||||
connect(uart.use_peripheral().tx, tx);
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
let io = m.add_platform_io(platform_io_builder);
|
||||
}
|
||||
|
||||
fn parse_baud_rate(
|
||||
v: impl AsRef<str>,
|
||||
) -> Result<NotNan<f64>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let retval: NotNan<f64> = v
|
||||
.as_ref()
|
||||
.parse()
|
||||
.map_err(|_| "invalid baud rate, must be a finite positive floating-point value")?;
|
||||
if *retval > 0.0 && retval.is_finite() {
|
||||
Ok(retval)
|
||||
} else {
|
||||
Err("baud rate must be finite and positive".into())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)]
|
||||
pub struct ExtraArgs {
|
||||
#[arg(long, value_parser = clap::builder::StringValueParser::new().try_map(parse_baud_rate), default_value = "115200")]
|
||||
pub baud_rate: NotNan<f64>,
|
||||
#[arg(long, default_value = "Hello World from Fayalite!!!\r\n", value_parser = clap::builder::NonEmptyStringValueParser::new())]
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
impl ToArgs for ExtraArgs {
|
||||
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
|
||||
let Self { baud_rate, message } = self;
|
||||
args.write_display_arg(format_args!("--baud-rate={baud_rate}"));
|
||||
args.write_long_option_eq("message", message);
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
type Cli = BuildCli<ExtraArgs>;
|
||||
Cli::main(
|
||||
"tx_only_uart",
|
||||
|_, platform, ExtraArgs { baud_rate, message }| {
|
||||
Ok(JobParams::new(platform.try_wrap_main_module(|io| {
|
||||
let clk = pick_clock(&io).ty();
|
||||
let divisor = clk.frequency() / *baud_rate;
|
||||
let baud_rate_error = |msg| {
|
||||
<Cli as clap::CommandFactory>::command()
|
||||
.error(clap::error::ErrorKind::ValueValidation, msg)
|
||||
};
|
||||
const HUGE_DIVISOR: f64 = u64::MAX as f64;
|
||||
match divisor {
|
||||
divisor if !divisor.is_finite() => {
|
||||
return Err(baud_rate_error("bad baud rate"));
|
||||
}
|
||||
HUGE_DIVISOR.. => return Err(baud_rate_error("baud rate is too small")),
|
||||
4.0.. => {}
|
||||
_ => return Err(baud_rate_error("baud rate is too large")),
|
||||
}
|
||||
Ok(tx_only_uart(io, divisor, message))
|
||||
})?))
|
||||
},
|
||||
);
|
||||
}
|
||||
|
|
@ -2,6 +2,7 @@
|
|||
// See Notices.txt for copyright information
|
||||
//! ## `#[hdl] let` statements
|
||||
|
||||
pub mod destructuring;
|
||||
pub mod inputs_outputs;
|
||||
pub mod instances;
|
||||
pub mod memories;
|
||||
|
|
|
|||
|
|
@ -0,0 +1,33 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! ### Destructuring Let
|
||||
//!
|
||||
//! You can use `#[hdl] let` to destructure types, similarly to Rust `let` statements with non-trivial patterns.
|
||||
//!
|
||||
//! `#[hdl] let` statements can only match one level of struct/tuple pattern for now,
|
||||
//! e.g. you can match with the pattern `MyStruct { a, b }`, but not `MyStruct { a, b: Struct2 { v } }`.
|
||||
//!
|
||||
//! ```
|
||||
//! # use fayalite::prelude::*;
|
||||
//! #[hdl]
|
||||
//! struct MyStruct {
|
||||
//! a: UInt<8>,
|
||||
//! b: Bool,
|
||||
//! }
|
||||
//!
|
||||
//! #[hdl_module]
|
||||
//! fn my_module() {
|
||||
//! #[hdl]
|
||||
//! let my_input: MyStruct = m.input();
|
||||
//! #[hdl]
|
||||
//! let my_output: UInt<8> = m.input();
|
||||
//! #[hdl]
|
||||
//! let MyStruct { a, b } = my_input;
|
||||
//! #[hdl]
|
||||
//! if b {
|
||||
//! connect(my_output, a);
|
||||
//! } else {
|
||||
//! connect(my_output, 0_hdl_u8);
|
||||
//! }
|
||||
//! }
|
||||
//! ```
|
||||
|
|
@ -7,5 +7,5 @@
|
|||
//!
|
||||
//! `#[hdl] match` statements' bodies must evaluate to type `()` for now.
|
||||
//!
|
||||
//! `#[hdl] match` statements can only match one level of struct/enum pattern for now,
|
||||
//! `#[hdl] match` statements can only match one level of struct/tuple/enum pattern for now,
|
||||
//! e.g. you can match with the pattern `HdlSome(v)`, but not `HdlSome(HdlSome(_))`.
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ use std::{
|
|||
ops::Deref,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
#[derive(Clone, Debug)]
|
||||
struct CustomFirrtlAnnotationFieldsImpl {
|
||||
value: serde_json::Map<String, serde_json::Value>,
|
||||
serialized: Interned<str>,
|
||||
|
|
@ -145,52 +145,73 @@ pub struct DocStringAnnotation {
|
|||
|
||||
macro_rules! make_annotation_enum {
|
||||
(
|
||||
#[$non_exhaustive:ident]
|
||||
$(#[$meta:meta])*
|
||||
$vis:vis enum $Annotation:ident {
|
||||
$($Variant:ident($T:ident),)*
|
||||
$vis:vis enum $AnnotationEnum:ident {
|
||||
$($Variant:ident($T:ty),)*
|
||||
}
|
||||
) => {
|
||||
crate::annotations::make_annotation_enum!(@require_non_exhaustive $non_exhaustive);
|
||||
|
||||
#[$non_exhaustive]
|
||||
$(#[$meta])*
|
||||
$vis enum $Annotation {
|
||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||
$vis enum $AnnotationEnum {
|
||||
$($Variant($T),)*
|
||||
}
|
||||
|
||||
$(impl IntoAnnotations for $T {
|
||||
type IntoAnnotations = [$Annotation; 1];
|
||||
|
||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||
[$Annotation::$Variant(self)]
|
||||
impl std::fmt::Debug for $AnnotationEnum {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
$(Self::$Variant(v) => v.fmt(f),)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoAnnotations for &'_ $T {
|
||||
type IntoAnnotations = [$Annotation; 1];
|
||||
|
||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||
[$Annotation::$Variant(*self)]
|
||||
$(impl From<$T> for crate::annotations::Annotation {
|
||||
fn from(v: $T) -> Self {
|
||||
$AnnotationEnum::$Variant(v).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoAnnotations for &'_ mut $T {
|
||||
type IntoAnnotations = [$Annotation; 1];
|
||||
impl crate::annotations::IntoAnnotations for $T {
|
||||
type IntoAnnotations = [crate::annotations::Annotation; 1];
|
||||
|
||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||
[$Annotation::$Variant(*self)]
|
||||
[self.into()]
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoAnnotations for Box<$T> {
|
||||
type IntoAnnotations = [$Annotation; 1];
|
||||
impl crate::annotations::IntoAnnotations for &'_ $T {
|
||||
type IntoAnnotations = [crate::annotations::Annotation; 1];
|
||||
|
||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||
[$Annotation::$Variant(*self)]
|
||||
[crate::annotations::Annotation::from(self.clone())]
|
||||
}
|
||||
}
|
||||
|
||||
impl crate::annotations::IntoAnnotations for &'_ mut $T {
|
||||
type IntoAnnotations = [crate::annotations::Annotation; 1];
|
||||
|
||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||
[crate::annotations::Annotation::from(self.clone())]
|
||||
}
|
||||
}
|
||||
|
||||
impl crate::annotations::IntoAnnotations for Box<$T> {
|
||||
type IntoAnnotations = [crate::annotations::Annotation; 1];
|
||||
|
||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||
[crate::annotations::Annotation::from(*self)]
|
||||
}
|
||||
})*
|
||||
};
|
||||
(@require_non_exhaustive non_exhaustive) => {};
|
||||
}
|
||||
|
||||
pub(crate) use make_annotation_enum;
|
||||
|
||||
make_annotation_enum! {
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
#[non_exhaustive]
|
||||
pub enum Annotation {
|
||||
DontTouch(DontTouchAnnotation),
|
||||
|
|
@ -199,6 +220,7 @@ make_annotation_enum! {
|
|||
BlackBoxPath(BlackBoxPathAnnotation),
|
||||
DocString(DocStringAnnotation),
|
||||
CustomFirrtl(CustomFirrtlAnnotation),
|
||||
Xilinx(crate::vendor::xilinx::XilinxAnnotation),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -314,10 +336,8 @@ impl<T: Iterator<Item: IntoAnnotations>> Iterator for IterIntoAnnotations<T> {
|
|||
}
|
||||
|
||||
impl<
|
||||
T: FusedIterator<
|
||||
Item: IntoAnnotations<IntoAnnotations: IntoIterator<IntoIter: FusedIterator>>,
|
||||
>,
|
||||
> FusedIterator for IterIntoAnnotations<T>
|
||||
T: FusedIterator<Item: IntoAnnotations<IntoAnnotations: IntoIterator<IntoIter: FusedIterator>>>,
|
||||
> FusedIterator for IterIntoAnnotations<T>
|
||||
{
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -2,17 +2,24 @@
|
|||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
expr::{ops::ArrayIndex, Expr, ToExpr},
|
||||
int::{DynSize, KnownSize, Size, SizeType, DYN_SIZE},
|
||||
expr::{
|
||||
CastToBits, Expr, HdlPartialEq, ReduceBits, ToExpr,
|
||||
ops::{ArrayLiteral, ExprFromIterator, ExprIntoIterator, ExprPartialEq},
|
||||
},
|
||||
int::{Bool, DYN_SIZE, DynSize, KnownSize, Size, SizeType},
|
||||
intern::{Intern, Interned, LazyInterned},
|
||||
module::transform::visit::{Fold, Folder, Visit, Visitor},
|
||||
sim::value::{SimValue, SimValuePartialEq},
|
||||
source_location::SourceLocation,
|
||||
ty::{
|
||||
CanonicalType, MatchVariantWithoutScope, StaticType, Type, TypeProperties, TypeWithDeref,
|
||||
CanonicalType, MatchVariantWithoutScope, OpaqueSimValueSlice, OpaqueSimValueWriter,
|
||||
OpaqueSimValueWritten, StaticType, Type, TypeProperties, TypeWithDeref,
|
||||
serde_impls::SerdeCanonicalType,
|
||||
},
|
||||
util::ConstUsize,
|
||||
};
|
||||
use std::ops::Index;
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer, de::Error};
|
||||
use std::{iter::FusedIterator, ops::Index};
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct ArrayType<T: Type = CanonicalType, Len: Size = DynSize> {
|
||||
|
|
@ -41,15 +48,20 @@ impl<T: Type, Len: Size> ArrayType<T, Len> {
|
|||
is_storable,
|
||||
is_castable_from_bits,
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
} = element;
|
||||
let Some(bit_width) = bit_width.checked_mul(len) else {
|
||||
panic!("array too big");
|
||||
};
|
||||
let Some(sim_only_values_len) = sim_only_values_len.checked_mul(len) else {
|
||||
panic!("array too big");
|
||||
};
|
||||
TypeProperties {
|
||||
is_passive,
|
||||
is_storable,
|
||||
is_castable_from_bits,
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
}
|
||||
}
|
||||
pub fn new(element: T, len: Len::SizeType) -> Self {
|
||||
|
|
@ -91,6 +103,12 @@ impl<T: Type, Len: KnownSize + Size<SizeType = Len>> ArrayType<T, Len> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: StaticType, Len: KnownSize> Default for ArrayType<T, Len> {
|
||||
fn default() -> Self {
|
||||
Self::TYPE
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: StaticType, Len: KnownSize> StaticType for ArrayType<T, Len> {
|
||||
const TYPE: Self = Self {
|
||||
element: LazyInterned::new_lazy(&|| T::TYPE.intern_sized()),
|
||||
|
|
@ -139,6 +157,7 @@ impl<T: Type + Visit<State>, Len: Size, State: Visitor + ?Sized> Visit<State>
|
|||
impl<T: Type, Len: Size> Type for ArrayType<T, Len> {
|
||||
type BaseType = Array;
|
||||
type MaskType = ArrayType<T::MaskType, Len>;
|
||||
type SimValue = Len::ArraySimValue<T>;
|
||||
type MatchVariant = Len::ArrayMatch<T>;
|
||||
type MatchActiveScope = ();
|
||||
type MatchVariantAndInactiveScope = MatchVariantWithoutScope<Len::ArrayMatch<T>>;
|
||||
|
|
@ -148,10 +167,8 @@ impl<T: Type, Len: Size> Type for ArrayType<T, Len> {
|
|||
this: Expr<Self>,
|
||||
source_location: SourceLocation,
|
||||
) -> Self::MatchVariantsIter {
|
||||
let base = Expr::as_dyn_array(this);
|
||||
let base_ty = Expr::ty(base);
|
||||
let _ = source_location;
|
||||
let retval = Vec::from_iter((0..base_ty.len()).map(|i| ArrayIndex::new(base, i).to_expr()));
|
||||
let retval = Vec::from_iter(this);
|
||||
std::iter::once(MatchVariantWithoutScope(
|
||||
Len::ArrayMatch::<T>::try_from(retval)
|
||||
.ok()
|
||||
|
|
@ -177,16 +194,106 @@ impl<T: Type, Len: Size> Type for ArrayType<T, Len> {
|
|||
Len::from_usize(array.len()),
|
||||
)
|
||||
}
|
||||
|
||||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
|
||||
fn sim_value_from_opaque(&self, mut opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
let element_ty = self.element();
|
||||
let element_size = element_ty.canonical().size();
|
||||
let mut value = Vec::with_capacity(self.len());
|
||||
for _ in 0..self.len() {
|
||||
let (element_opaque, rest) = opaque.split_at(element_size);
|
||||
value.push(SimValue::from_opaque(element_ty, element_opaque.to_owned()));
|
||||
opaque = rest;
|
||||
}
|
||||
value.try_into().ok().expect("used correct length")
|
||||
}
|
||||
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
mut opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
let element_ty = self.element();
|
||||
let element_size = element_ty.canonical().size();
|
||||
let value = AsMut::<[SimValue<T>]>::as_mut(value);
|
||||
assert_eq!(self.len(), value.len());
|
||||
for element_value in value {
|
||||
assert_eq!(SimValue::ty(element_value), element_ty);
|
||||
let (element_opaque, rest) = opaque.split_at(element_size);
|
||||
SimValue::opaque_mut(element_value).clone_from_slice(element_opaque);
|
||||
opaque = rest;
|
||||
}
|
||||
}
|
||||
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
mut writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
let element_ty = self.element();
|
||||
let element_size = element_ty.canonical().size();
|
||||
let value = AsRef::<[SimValue<T>]>::as_ref(value);
|
||||
assert_eq!(self.len(), value.len());
|
||||
for element_value in value {
|
||||
assert_eq!(SimValue::ty(element_value), element_ty);
|
||||
writer.fill_prefix_with(element_size, |writer| {
|
||||
writer.fill_cloned_from_slice(SimValue::opaque(element_value).as_slice())
|
||||
});
|
||||
}
|
||||
writer.fill_cloned_from_slice(OpaqueSimValueSlice::empty())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type + Serialize, Len: Size> Serialize for ArrayType<T, Len> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
SerdeCanonicalType::<T>::Array {
|
||||
element: self.element(),
|
||||
len: self.len(),
|
||||
}
|
||||
.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de, T: Type + Deserialize<'de>, Len: Size> Deserialize<'de> for ArrayType<T, Len> {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let name = |len| -> String {
|
||||
if let Some(len) = len {
|
||||
format!("an Array<_, {len}>")
|
||||
} else {
|
||||
"an Array<_>".to_string()
|
||||
}
|
||||
};
|
||||
match SerdeCanonicalType::<T>::deserialize(deserializer)? {
|
||||
SerdeCanonicalType::Array { element, len } => {
|
||||
if let Some(len) = Len::try_from_usize(len) {
|
||||
Ok(Self::new(element, len))
|
||||
} else {
|
||||
Err(Error::invalid_value(
|
||||
serde::de::Unexpected::Other(&name(Some(len))),
|
||||
&&*name(Len::KNOWN_VALUE),
|
||||
))
|
||||
}
|
||||
}
|
||||
ty => Err(Error::invalid_value(
|
||||
serde::de::Unexpected::Other(ty.as_serde_unexpected_str()),
|
||||
&&*name(Len::KNOWN_VALUE),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type, Len: Size> TypeWithDeref for ArrayType<T, Len> {
|
||||
fn expr_deref(this: &Expr<Self>) -> &Self::MatchVariant {
|
||||
let base = Expr::as_dyn_array(*this);
|
||||
let base_ty = Expr::ty(base);
|
||||
let retval = Vec::from_iter((0..base_ty.len()).map(|i| ArrayIndex::new(base, i).to_expr()));
|
||||
let retval = Vec::from_iter(*this);
|
||||
Interned::into_inner(Intern::intern_sized(
|
||||
Len::ArrayMatch::<T>::try_from(retval)
|
||||
.ok()
|
||||
|
|
@ -218,3 +325,143 @@ impl<T: Type, L: SizeType> Index<L> for ArrayWithoutLen<T> {
|
|||
Interned::into_inner(Intern::intern_sized(ArrayType::new(self.element, len)))
|
||||
}
|
||||
}
|
||||
|
||||
impl<Lhs: Type, Rhs: Type, Len: Size> ExprPartialEq<ArrayType<Rhs, Len>> for ArrayType<Lhs, Len>
|
||||
where
|
||||
Lhs: ExprPartialEq<Rhs>,
|
||||
{
|
||||
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<ArrayType<Rhs, Len>>) -> Expr<Bool> {
|
||||
let lhs_ty = Expr::ty(lhs);
|
||||
let rhs_ty = Expr::ty(rhs);
|
||||
assert_eq!(lhs_ty.len(), rhs_ty.len());
|
||||
lhs.into_iter()
|
||||
.zip(rhs)
|
||||
.map(|(l, r)| l.cmp_eq(r))
|
||||
.collect::<Expr<Array<Bool>>>()
|
||||
.cast_to_bits()
|
||||
.all_one_bits()
|
||||
}
|
||||
|
||||
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<ArrayType<Rhs, Len>>) -> Expr<Bool> {
|
||||
let lhs_ty = Expr::ty(lhs);
|
||||
let rhs_ty = Expr::ty(rhs);
|
||||
assert_eq!(lhs_ty.len(), rhs_ty.len());
|
||||
lhs.into_iter()
|
||||
.zip(rhs)
|
||||
.map(|(l, r)| l.cmp_ne(r))
|
||||
.collect::<Expr<Array<Bool>>>()
|
||||
.cast_to_bits()
|
||||
.any_one_bits()
|
||||
}
|
||||
}
|
||||
|
||||
impl<Lhs: Type, Rhs: Type, Len: Size> SimValuePartialEq<ArrayType<Rhs, Len>> for ArrayType<Lhs, Len>
|
||||
where
|
||||
Lhs: SimValuePartialEq<Rhs>,
|
||||
{
|
||||
fn sim_value_eq(this: &SimValue<Self>, other: &SimValue<ArrayType<Rhs, Len>>) -> bool {
|
||||
AsRef::<[_]>::as_ref(&**this)
|
||||
.iter()
|
||||
.zip(AsRef::<[_]>::as_ref(&**other))
|
||||
.all(|(l, r)| SimValuePartialEq::sim_value_eq(l, r))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type, Len: Size> ExprIntoIterator for ArrayType<T, Len> {
|
||||
type Item = T;
|
||||
type ExprIntoIter = ExprArrayIter<T, Len>;
|
||||
|
||||
fn expr_into_iter(e: Expr<Self>) -> Self::ExprIntoIter {
|
||||
ExprArrayIter {
|
||||
base: e,
|
||||
indexes: 0..Expr::ty(e).len(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ExprArrayIter<T: Type, Len: Size> {
|
||||
base: Expr<ArrayType<T, Len>>,
|
||||
indexes: std::ops::Range<usize>,
|
||||
}
|
||||
|
||||
impl<T: Type, Len: Size> ExprArrayIter<T, Len> {
|
||||
pub fn base(&self) -> Expr<ArrayType<T, Len>> {
|
||||
self.base
|
||||
}
|
||||
pub fn indexes(&self) -> std::ops::Range<usize> {
|
||||
self.indexes.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type, Len: Size> Iterator for ExprArrayIter<T, Len> {
|
||||
type Item = Expr<T>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.indexes.next().map(|i| self.base[i])
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.indexes.size_hint()
|
||||
}
|
||||
|
||||
fn count(self) -> usize {
|
||||
self.indexes.count()
|
||||
}
|
||||
|
||||
fn last(mut self) -> Option<Self::Item> {
|
||||
self.next_back()
|
||||
}
|
||||
|
||||
fn nth(&mut self, n: usize) -> Option<Self::Item> {
|
||||
self.indexes.nth(n).map(|i| self.base[i])
|
||||
}
|
||||
|
||||
fn fold<B, F>(self, init: B, mut f: F) -> B
|
||||
where
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
self.indexes.fold(init, |b, i| f(b, self.base[i]))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type, Len: Size> DoubleEndedIterator for ExprArrayIter<T, Len> {
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
self.indexes.next_back().map(|i| self.base[i])
|
||||
}
|
||||
|
||||
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
|
||||
self.indexes.nth_back(n).map(|i| self.base[i])
|
||||
}
|
||||
|
||||
fn rfold<B, F>(self, init: B, mut f: F) -> B
|
||||
where
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
self.indexes.rfold(init, |b, i| f(b, self.base[i]))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type, Len: Size> ExactSizeIterator for ExprArrayIter<T, Len> {
|
||||
fn len(&self) -> usize {
|
||||
self.indexes.len()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type, Len: Size> FusedIterator for ExprArrayIter<T, Len> {}
|
||||
|
||||
impl<A: StaticType> ExprFromIterator<Expr<A>> for Array<A> {
|
||||
fn expr_from_iter<T: IntoIterator<Item = Expr<A>>>(iter: T) -> Expr<Self> {
|
||||
ArrayLiteral::new(
|
||||
A::TYPE,
|
||||
iter.into_iter().map(|v| Expr::canonical(v)).collect(),
|
||||
)
|
||||
.to_expr()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, A: StaticType> ExprFromIterator<&'a Expr<A>> for Array<A> {
|
||||
fn expr_from_iter<T: IntoIterator<Item = &'a Expr<A>>>(iter: T) -> Expr<Self> {
|
||||
iter.into_iter().copied().collect()
|
||||
}
|
||||
}
|
||||
|
|
|
|||
2803
crates/fayalite/src/build.rs
Normal file
2803
crates/fayalite/src/build.rs
Normal file
File diff suppressed because it is too large
Load diff
1177
crates/fayalite/src/build/external.rs
Normal file
1177
crates/fayalite/src/build/external.rs
Normal file
File diff suppressed because it is too large
Load diff
128
crates/fayalite/src/build/firrtl.rs
Normal file
128
crates/fayalite/src/build/firrtl.rs
Normal file
|
|
@ -0,0 +1,128 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
build::{
|
||||
BaseJob, BaseJobKind, CommandParams, DynJobKind, GlobalParams, JobAndDependencies,
|
||||
JobArgsAndDependencies, JobItem, JobItemName, JobKind, JobKindAndDependencies, JobParams,
|
||||
ToArgs, WriteArgs,
|
||||
},
|
||||
firrtl::{ExportOptions, FileBackend},
|
||||
intern::{Intern, InternSlice, Interned},
|
||||
util::job_server::AcquiredJob,
|
||||
};
|
||||
use clap::Args;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Debug)]
|
||||
pub struct FirrtlJobKind;
|
||||
|
||||
#[derive(Args, Debug, Clone, Hash, PartialEq, Eq)]
|
||||
#[group(id = "Firrtl")]
|
||||
#[non_exhaustive]
|
||||
pub struct FirrtlArgs {
|
||||
#[command(flatten)]
|
||||
pub export_options: ExportOptions,
|
||||
}
|
||||
|
||||
impl ToArgs for FirrtlArgs {
|
||||
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
|
||||
let Self { export_options } = self;
|
||||
export_options.to_args(args);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
pub struct Firrtl {
|
||||
base: BaseJob,
|
||||
export_options: ExportOptions,
|
||||
}
|
||||
|
||||
impl Firrtl {
|
||||
fn make_firrtl_file_backend(&self) -> FileBackend {
|
||||
FileBackend {
|
||||
dir_path: PathBuf::from(&*self.base.output_dir()),
|
||||
top_fir_file_stem: Some(self.base.file_stem().into()),
|
||||
circuit_name: None,
|
||||
}
|
||||
}
|
||||
pub fn firrtl_file(&self) -> Interned<Path> {
|
||||
self.base.file_with_ext("fir")
|
||||
}
|
||||
}
|
||||
|
||||
impl JobKind for FirrtlJobKind {
|
||||
type Args = FirrtlArgs;
|
||||
type Job = Firrtl;
|
||||
type Dependencies = JobKindAndDependencies<BaseJobKind>;
|
||||
|
||||
fn dependencies(self) -> Self::Dependencies {
|
||||
JobKindAndDependencies::new(BaseJobKind)
|
||||
}
|
||||
|
||||
fn args_to_jobs(
|
||||
args: JobArgsAndDependencies<Self>,
|
||||
params: &JobParams,
|
||||
global_params: &GlobalParams,
|
||||
) -> eyre::Result<JobAndDependencies<Self>> {
|
||||
args.args_to_jobs_simple(
|
||||
params,
|
||||
global_params,
|
||||
|_kind, FirrtlArgs { export_options }, dependencies| {
|
||||
Ok(Firrtl {
|
||||
base: dependencies.get_job::<BaseJob, _>().clone(),
|
||||
export_options,
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||
[JobItemName::Path {
|
||||
path: job.base.output_dir(),
|
||||
}]
|
||||
.intern_slice()
|
||||
}
|
||||
|
||||
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||
[JobItemName::Path {
|
||||
path: job.firrtl_file(),
|
||||
}]
|
||||
.intern_slice()
|
||||
}
|
||||
|
||||
fn name(self) -> Interned<str> {
|
||||
"firrtl".intern()
|
||||
}
|
||||
|
||||
fn external_command_params(self, _job: &Self::Job) -> Option<CommandParams> {
|
||||
None
|
||||
}
|
||||
|
||||
fn run(
|
||||
self,
|
||||
job: &Self::Job,
|
||||
inputs: &[JobItem],
|
||||
params: &JobParams,
|
||||
_global_params: &GlobalParams,
|
||||
_acquired_job: &mut AcquiredJob,
|
||||
) -> eyre::Result<Vec<JobItem>> {
|
||||
let [JobItem::Path { path: input_path }] = *inputs else {
|
||||
panic!("wrong inputs, expected a single `Path`");
|
||||
};
|
||||
assert_eq!(input_path, job.base.output_dir());
|
||||
crate::firrtl::export(
|
||||
job.make_firrtl_file_backend(),
|
||||
params.main_module(),
|
||||
job.export_options,
|
||||
)?;
|
||||
Ok(vec![JobItem::Path {
|
||||
path: job.firrtl_file(),
|
||||
}])
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = DynJobKind> {
|
||||
[DynJobKind::new(FirrtlJobKind)]
|
||||
}
|
||||
388
crates/fayalite/src/build/formal.rs
Normal file
388
crates/fayalite/src/build/formal.rs
Normal file
|
|
@ -0,0 +1,388 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
build::{
|
||||
BaseJob, CommandParams, DynJobKind, GetJobPositionDependencies, GlobalParams,
|
||||
JobAndDependencies, JobArgsAndDependencies, JobDependencies, JobItem, JobItemName, JobKind,
|
||||
JobKindAndDependencies, JobParams, ToArgs, WriteArgs,
|
||||
external::{
|
||||
ExternalCommand, ExternalCommandJob, ExternalCommandJobKind, ExternalProgramTrait,
|
||||
},
|
||||
verilog::{UnadjustedVerilog, VerilogDialect, VerilogJob, VerilogJobKind},
|
||||
},
|
||||
intern::{Intern, InternSlice, Interned},
|
||||
module::NameId,
|
||||
testing::FormalMode,
|
||||
util::job_server::AcquiredJob,
|
||||
};
|
||||
use clap::Args;
|
||||
use eyre::Context;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
ffi::{OsStr, OsString},
|
||||
fmt::{self, Write},
|
||||
path::Path,
|
||||
};
|
||||
|
||||
#[derive(Args, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub struct FormalArgs {
|
||||
#[arg(long = "sby-extra-arg", value_name = "ARG")]
|
||||
pub sby_extra_args: Vec<OsString>,
|
||||
#[arg(long, default_value_t)]
|
||||
pub formal_mode: FormalMode,
|
||||
#[arg(long, default_value_t = Self::DEFAULT_DEPTH)]
|
||||
pub formal_depth: u64,
|
||||
#[arg(long, default_value = Self::DEFAULT_SOLVER)]
|
||||
pub formal_solver: String,
|
||||
#[arg(long = "smtbmc-extra-arg", value_name = "ARG")]
|
||||
pub smtbmc_extra_args: Vec<OsString>,
|
||||
}
|
||||
|
||||
impl FormalArgs {
|
||||
pub const DEFAULT_DEPTH: u64 = 20;
|
||||
pub const DEFAULT_SOLVER: &'static str = "z3";
|
||||
}
|
||||
|
||||
impl ToArgs for FormalArgs {
|
||||
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
|
||||
let Self {
|
||||
sby_extra_args,
|
||||
formal_mode,
|
||||
formal_depth,
|
||||
formal_solver,
|
||||
smtbmc_extra_args,
|
||||
} = self;
|
||||
for arg in sby_extra_args {
|
||||
args.write_long_option_eq("sby-extra-arg", arg);
|
||||
}
|
||||
args.write_display_args([
|
||||
format_args!("--formal-mode={formal_mode}"),
|
||||
format_args!("--formal-depth={formal_depth}"),
|
||||
format_args!("--formal-solver={formal_solver}"),
|
||||
]);
|
||||
for arg in smtbmc_extra_args {
|
||||
args.write_long_option_eq("smtbmc-extra-arg", arg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
|
||||
pub struct WriteSbyFileJobKind;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, Deserialize, Serialize)]
|
||||
pub struct WriteSbyFileJob {
|
||||
sby_extra_args: Interned<[Interned<OsStr>]>,
|
||||
formal_mode: FormalMode,
|
||||
formal_depth: u64,
|
||||
formal_solver: Interned<str>,
|
||||
smtbmc_extra_args: Interned<[Interned<OsStr>]>,
|
||||
sby_file: Interned<Path>,
|
||||
output_dir: Interned<Path>,
|
||||
main_verilog_file: Interned<Path>,
|
||||
}
|
||||
|
||||
impl WriteSbyFileJob {
|
||||
pub fn sby_extra_args(&self) -> Interned<[Interned<OsStr>]> {
|
||||
self.sby_extra_args
|
||||
}
|
||||
pub fn formal_mode(&self) -> FormalMode {
|
||||
self.formal_mode
|
||||
}
|
||||
pub fn formal_depth(&self) -> u64 {
|
||||
self.formal_depth
|
||||
}
|
||||
pub fn formal_solver(&self) -> Interned<str> {
|
||||
self.formal_solver
|
||||
}
|
||||
pub fn smtbmc_extra_args(&self) -> Interned<[Interned<OsStr>]> {
|
||||
self.smtbmc_extra_args
|
||||
}
|
||||
pub fn sby_file(&self) -> Interned<Path> {
|
||||
self.sby_file
|
||||
}
|
||||
pub fn output_dir(&self) -> Interned<Path> {
|
||||
self.output_dir
|
||||
}
|
||||
pub fn main_verilog_file(&self) -> Interned<Path> {
|
||||
self.main_verilog_file
|
||||
}
|
||||
fn write_sby(
|
||||
&self,
|
||||
output: &mut OsString,
|
||||
additional_files: &[Interned<Path>],
|
||||
main_module_name_id: NameId,
|
||||
) -> eyre::Result<()> {
|
||||
let Self {
|
||||
sby_extra_args: _,
|
||||
formal_mode,
|
||||
formal_depth,
|
||||
formal_solver,
|
||||
smtbmc_extra_args,
|
||||
sby_file: _,
|
||||
output_dir: _,
|
||||
main_verilog_file,
|
||||
} = self;
|
||||
write!(
|
||||
output,
|
||||
"[options]\n\
|
||||
mode {formal_mode}\n\
|
||||
depth {formal_depth}\n\
|
||||
wait on\n\
|
||||
\n\
|
||||
[engines]\n\
|
||||
smtbmc {formal_solver} -- --"
|
||||
)
|
||||
.expect("writing to OsString can't fail");
|
||||
for i in smtbmc_extra_args {
|
||||
output.push(" ");
|
||||
output.push(i);
|
||||
}
|
||||
output.push(
|
||||
"\n\
|
||||
\n\
|
||||
[script]\n",
|
||||
);
|
||||
for verilog_file in VerilogJob::all_verilog_files(*main_verilog_file, additional_files)? {
|
||||
output.push("read_verilog -sv -formal \"");
|
||||
output.push(verilog_file);
|
||||
output.push("\"\n");
|
||||
}
|
||||
let circuit_name = crate::firrtl::get_circuit_name(main_module_name_id);
|
||||
// workaround for wires disappearing -- set `keep` on all wires
|
||||
writeln!(
|
||||
output,
|
||||
"hierarchy -top {circuit_name}\n\
|
||||
proc\n\
|
||||
setattr -set keep 1 w:\\*\n\
|
||||
prep",
|
||||
)
|
||||
.expect("writing to OsString can't fail");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl JobKind for WriteSbyFileJobKind {
|
||||
type Args = FormalArgs;
|
||||
type Job = WriteSbyFileJob;
|
||||
type Dependencies = JobKindAndDependencies<VerilogJobKind>;
|
||||
|
||||
fn dependencies(self) -> Self::Dependencies {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
fn args_to_jobs(
|
||||
mut args: JobArgsAndDependencies<Self>,
|
||||
params: &JobParams,
|
||||
global_params: &GlobalParams,
|
||||
) -> eyre::Result<JobAndDependencies<Self>> {
|
||||
args.dependencies
|
||||
.dependencies
|
||||
.args
|
||||
.args
|
||||
.additional_args
|
||||
.verilog_dialect
|
||||
.get_or_insert(VerilogDialect::Yosys);
|
||||
args.args_to_jobs_simple(params, global_params, |_kind, args, dependencies| {
|
||||
let FormalArgs {
|
||||
sby_extra_args,
|
||||
formal_mode,
|
||||
formal_depth,
|
||||
formal_solver,
|
||||
smtbmc_extra_args,
|
||||
} = args;
|
||||
let base_job = dependencies.get_job::<BaseJob, _>();
|
||||
Ok(WriteSbyFileJob {
|
||||
sby_extra_args: sby_extra_args.into_iter().map(Interned::from).collect(),
|
||||
formal_mode,
|
||||
formal_depth,
|
||||
formal_solver: formal_solver.intern_deref(),
|
||||
smtbmc_extra_args: smtbmc_extra_args.into_iter().map(Interned::from).collect(),
|
||||
sby_file: base_job.file_with_ext("sby"),
|
||||
output_dir: base_job.output_dir(),
|
||||
main_verilog_file: dependencies.get_job::<VerilogJob, _>().main_verilog_file(),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn inputs(self, _job: &Self::Job) -> Interned<[JobItemName]> {
|
||||
[JobItemName::DynamicPaths {
|
||||
source_job_name: VerilogJobKind.name(),
|
||||
}]
|
||||
.intern_slice()
|
||||
}
|
||||
|
||||
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||
[JobItemName::Path { path: job.sby_file }].intern_slice()
|
||||
}
|
||||
|
||||
fn name(self) -> Interned<str> {
|
||||
"write-sby-file".intern()
|
||||
}
|
||||
|
||||
fn external_command_params(self, _job: &Self::Job) -> Option<CommandParams> {
|
||||
None
|
||||
}
|
||||
|
||||
fn run(
|
||||
self,
|
||||
job: &Self::Job,
|
||||
inputs: &[JobItem],
|
||||
params: &JobParams,
|
||||
_global_params: &GlobalParams,
|
||||
_acquired_job: &mut AcquiredJob,
|
||||
) -> eyre::Result<Vec<JobItem>> {
|
||||
assert!(inputs.iter().map(JobItem::name).eq(self.inputs(job)));
|
||||
let [additional_files] = inputs else {
|
||||
unreachable!();
|
||||
};
|
||||
let additional_files = VerilogJob::unwrap_additional_files(additional_files);
|
||||
let mut contents = OsString::new();
|
||||
job.write_sby(
|
||||
&mut contents,
|
||||
additional_files,
|
||||
params.main_module().name_id(),
|
||||
)?;
|
||||
let path = job.sby_file;
|
||||
std::fs::write(path, contents.as_encoded_bytes())
|
||||
.wrap_err_with(|| format!("writing {path:?} failed"))?;
|
||||
Ok(vec![JobItem::Path { path }])
|
||||
}
|
||||
|
||||
fn subcommand_hidden(self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct Formal {
|
||||
#[serde(flatten)]
|
||||
write_sby_file: WriteSbyFileJob,
|
||||
sby_file_name: Interned<OsStr>,
|
||||
}
|
||||
|
||||
impl fmt::Debug for Formal {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let Self {
|
||||
write_sby_file:
|
||||
WriteSbyFileJob {
|
||||
sby_extra_args,
|
||||
formal_mode,
|
||||
formal_depth,
|
||||
formal_solver,
|
||||
smtbmc_extra_args,
|
||||
sby_file,
|
||||
output_dir: _,
|
||||
main_verilog_file,
|
||||
},
|
||||
sby_file_name,
|
||||
} = self;
|
||||
f.debug_struct("Formal")
|
||||
.field("sby_extra_args", sby_extra_args)
|
||||
.field("formal_mode", formal_mode)
|
||||
.field("formal_depth", formal_depth)
|
||||
.field("formal_solver", formal_solver)
|
||||
.field("smtbmc_extra_args", smtbmc_extra_args)
|
||||
.field("sby_file", sby_file)
|
||||
.field("sby_file_name", sby_file_name)
|
||||
.field("main_verilog_file", main_verilog_file)
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)]
|
||||
pub struct Symbiyosys;
|
||||
|
||||
impl ExternalProgramTrait for Symbiyosys {
|
||||
fn default_program_name() -> Interned<str> {
|
||||
"sby".intern()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Hash, PartialEq, Eq, Debug, Args)]
|
||||
pub struct FormalAdditionalArgs {}
|
||||
|
||||
impl ToArgs for FormalAdditionalArgs {
|
||||
fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) {
|
||||
let Self {} = self;
|
||||
}
|
||||
}
|
||||
|
||||
impl ExternalCommand for Formal {
|
||||
type AdditionalArgs = FormalAdditionalArgs;
|
||||
type AdditionalJobData = Formal;
|
||||
type BaseJobPosition = GetJobPositionDependencies<
|
||||
GetJobPositionDependencies<
|
||||
GetJobPositionDependencies<<UnadjustedVerilog as ExternalCommand>::BaseJobPosition>,
|
||||
>,
|
||||
>;
|
||||
type Dependencies = JobKindAndDependencies<WriteSbyFileJobKind>;
|
||||
type ExternalProgram = Symbiyosys;
|
||||
|
||||
fn dependencies() -> Self::Dependencies {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
fn args_to_jobs(
|
||||
args: JobArgsAndDependencies<ExternalCommandJobKind<Self>>,
|
||||
params: &JobParams,
|
||||
global_params: &GlobalParams,
|
||||
) -> eyre::Result<(
|
||||
Self::AdditionalJobData,
|
||||
<Self::Dependencies as JobDependencies>::JobsAndKinds,
|
||||
)> {
|
||||
args.args_to_jobs_external_simple(params, global_params, |args, dependencies| {
|
||||
let FormalAdditionalArgs {} = args.additional_args;
|
||||
let write_sby_file = dependencies.get_job::<WriteSbyFileJob, _>().clone();
|
||||
Ok(Formal {
|
||||
sby_file_name: write_sby_file
|
||||
.sby_file()
|
||||
.interned_file_name()
|
||||
.expect("known to have file name"),
|
||||
write_sby_file,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn inputs(job: &ExternalCommandJob<Self>) -> Interned<[JobItemName]> {
|
||||
[
|
||||
JobItemName::Path {
|
||||
path: job.additional_job_data().write_sby_file.sby_file(),
|
||||
},
|
||||
JobItemName::Path {
|
||||
path: job.additional_job_data().write_sby_file.main_verilog_file(),
|
||||
},
|
||||
JobItemName::DynamicPaths {
|
||||
source_job_name: VerilogJobKind.name(),
|
||||
},
|
||||
]
|
||||
.intern_slice()
|
||||
}
|
||||
|
||||
fn output_paths(_job: &ExternalCommandJob<Self>) -> Interned<[Interned<Path>]> {
|
||||
Interned::default()
|
||||
}
|
||||
|
||||
fn command_line_args<W: ?Sized + WriteArgs>(job: &ExternalCommandJob<Self>, args: &mut W) {
|
||||
// args.write_str_arg("-j1"); // sby seems not to respect job count in parallel mode
|
||||
args.write_arg("-f");
|
||||
args.write_interned_arg(job.additional_job_data().sby_file_name);
|
||||
args.write_interned_args(job.additional_job_data().write_sby_file.sby_extra_args());
|
||||
}
|
||||
|
||||
fn current_dir(job: &ExternalCommandJob<Self>) -> Option<Interned<Path>> {
|
||||
Some(job.output_dir())
|
||||
}
|
||||
|
||||
fn job_kind_name() -> Interned<str> {
|
||||
"formal".intern()
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = DynJobKind> {
|
||||
[
|
||||
DynJobKind::new(WriteSbyFileJobKind),
|
||||
DynJobKind::new(ExternalCommandJobKind::<Formal>::new()),
|
||||
]
|
||||
}
|
||||
847
crates/fayalite/src/build/graph.rs
Normal file
847
crates/fayalite/src/build/graph.rs
Normal file
|
|
@ -0,0 +1,847 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
build::{
|
||||
DynJob, GlobalParams, JobItem, JobItemName, JobParams, program_name_for_internal_jobs,
|
||||
},
|
||||
intern::Interned,
|
||||
platform::DynPlatform,
|
||||
util::{HashMap, HashSet, job_server::AcquiredJob},
|
||||
};
|
||||
use eyre::{ContextCompat, eyre};
|
||||
use petgraph::{
|
||||
algo::{DfsSpace, kosaraju_scc, toposort},
|
||||
graph::DiGraph,
|
||||
visit::{GraphBase, Visitable},
|
||||
};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer, de::Error, ser::SerializeSeq};
|
||||
use std::{
|
||||
cell::OnceCell,
|
||||
collections::{BTreeMap, BTreeSet, VecDeque},
|
||||
convert::Infallible,
|
||||
ffi::OsStr,
|
||||
fmt::{self, Write},
|
||||
panic,
|
||||
rc::Rc,
|
||||
str::Utf8Error,
|
||||
sync::mpsc,
|
||||
thread::{self, ScopedJoinHandle},
|
||||
};
|
||||
|
||||
macro_rules! write_str {
|
||||
($s:expr, $($rest:tt)*) => {
|
||||
write!($s, $($rest)*).expect("String::write_fmt can't fail")
|
||||
};
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
enum JobGraphNode {
|
||||
Job(DynJob),
|
||||
Item {
|
||||
#[allow(dead_code, reason = "name used for debugging")]
|
||||
name: JobItemName,
|
||||
source_job: Option<DynJob>,
|
||||
},
|
||||
}
|
||||
|
||||
type JobGraphInner = DiGraph<JobGraphNode, ()>;
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct JobGraph {
|
||||
jobs: HashMap<DynJob, <JobGraphInner as GraphBase>::NodeId>,
|
||||
items: HashMap<JobItemName, <JobGraphInner as GraphBase>::NodeId>,
|
||||
graph: JobGraphInner,
|
||||
topological_order: Vec<<JobGraphInner as GraphBase>::NodeId>,
|
||||
space: DfsSpace<<JobGraphInner as GraphBase>::NodeId, <JobGraphInner as Visitable>::Map>,
|
||||
}
|
||||
|
||||
impl fmt::Debug for JobGraph {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let Self {
|
||||
jobs: _,
|
||||
items: _,
|
||||
graph,
|
||||
topological_order,
|
||||
space: _,
|
||||
} = self;
|
||||
f.debug_struct("JobGraph")
|
||||
.field("graph", graph)
|
||||
.field("topological_order", topological_order)
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum JobGraphError {
|
||||
CycleError {
|
||||
job: DynJob,
|
||||
output: JobItemName,
|
||||
},
|
||||
MultipleJobsCreateSameOutput {
|
||||
output_item: JobItemName,
|
||||
existing_job: DynJob,
|
||||
new_job: DynJob,
|
||||
},
|
||||
}
|
||||
|
||||
impl std::error::Error for JobGraphError {}
|
||||
|
||||
impl fmt::Display for JobGraphError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::CycleError { job, output } => write!(
|
||||
f,
|
||||
"job can't be added to job graph because it would introduce a cyclic dependency through this job output:\n\
|
||||
{output:?}\n\
|
||||
job:\n{job:?}",
|
||||
),
|
||||
JobGraphError::MultipleJobsCreateSameOutput {
|
||||
output_item,
|
||||
existing_job,
|
||||
new_job,
|
||||
} => write!(
|
||||
f,
|
||||
"job can't be added to job graph because the new job has an output that is also produced by an existing job.\n\
|
||||
conflicting output:\n\
|
||||
{output_item:?}\n\
|
||||
existing job:\n\
|
||||
{existing_job:?}\n\
|
||||
new job:\n\
|
||||
{new_job:?}",
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum EscapeForUnixShellState {
|
||||
DollarSingleQuote,
|
||||
SingleQuote,
|
||||
Unquoted,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct EscapeForUnixShell<'a> {
|
||||
state: EscapeForUnixShellState,
|
||||
prefix: [u8; 3],
|
||||
bytes: &'a [u8],
|
||||
}
|
||||
|
||||
impl<'a> fmt::Debug for EscapeForUnixShell<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::Display::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> fmt::Display for EscapeForUnixShell<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
for c in self.clone() {
|
||||
f.write_char(c)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> EscapeForUnixShell<'a> {
|
||||
pub fn new(s: &'a (impl ?Sized + AsRef<OsStr>)) -> Self {
|
||||
Self::from_bytes(s.as_ref().as_encoded_bytes())
|
||||
}
|
||||
fn make_prefix(bytes: &[u8]) -> [u8; 3] {
|
||||
let mut prefix = [0; 3];
|
||||
prefix[..bytes.len()].copy_from_slice(bytes);
|
||||
prefix
|
||||
}
|
||||
pub fn from_bytes(bytes: &'a [u8]) -> Self {
|
||||
let mut needs_single_quote = bytes.is_empty();
|
||||
for &b in bytes {
|
||||
match b {
|
||||
b'!' | b'\'' | b'\"' | b' ' => needs_single_quote = true,
|
||||
0..0x20 | 0x7F.. => {
|
||||
return Self {
|
||||
state: EscapeForUnixShellState::DollarSingleQuote,
|
||||
prefix: Self::make_prefix(b"$'"),
|
||||
bytes,
|
||||
};
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
if needs_single_quote {
|
||||
Self {
|
||||
state: EscapeForUnixShellState::SingleQuote,
|
||||
prefix: Self::make_prefix(b"'"),
|
||||
bytes,
|
||||
}
|
||||
} else {
|
||||
Self {
|
||||
state: EscapeForUnixShellState::Unquoted,
|
||||
prefix: Self::make_prefix(b""),
|
||||
bytes,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for EscapeForUnixShell<'_> {
|
||||
type Item = char;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match &mut self.prefix {
|
||||
[0, 0, 0] => {}
|
||||
[0, 0, v] | // find first
|
||||
[0, v, _] | // non-zero byte
|
||||
[v, _, _] => {
|
||||
let retval = *v as char;
|
||||
*v = 0;
|
||||
return Some(retval);
|
||||
}
|
||||
}
|
||||
let Some(&next_byte) = self.bytes.split_off_first() else {
|
||||
return match self.state {
|
||||
EscapeForUnixShellState::DollarSingleQuote
|
||||
| EscapeForUnixShellState::SingleQuote => {
|
||||
self.state = EscapeForUnixShellState::Unquoted;
|
||||
Some('\'')
|
||||
}
|
||||
EscapeForUnixShellState::Unquoted => None,
|
||||
};
|
||||
};
|
||||
match self.state {
|
||||
EscapeForUnixShellState::DollarSingleQuote => match next_byte {
|
||||
b'\'' | b'\\' => {
|
||||
self.prefix = Self::make_prefix(&[next_byte]);
|
||||
Some('\\')
|
||||
}
|
||||
b'\t' => {
|
||||
self.prefix = Self::make_prefix(b"t");
|
||||
Some('\\')
|
||||
}
|
||||
b'\n' => {
|
||||
self.prefix = Self::make_prefix(b"n");
|
||||
Some('\\')
|
||||
}
|
||||
b'\r' => {
|
||||
self.prefix = Self::make_prefix(b"r");
|
||||
Some('\\')
|
||||
}
|
||||
0x20..=0x7E => Some(next_byte as char),
|
||||
_ => {
|
||||
self.prefix = [
|
||||
b'x',
|
||||
char::from_digit(next_byte as u32 >> 4, 0x10).expect("known to be in range")
|
||||
as u8,
|
||||
char::from_digit(next_byte as u32 & 0xF, 0x10)
|
||||
.expect("known to be in range") as u8,
|
||||
];
|
||||
Some('\\')
|
||||
}
|
||||
},
|
||||
EscapeForUnixShellState::SingleQuote => {
|
||||
if next_byte == b'\'' {
|
||||
self.prefix = Self::make_prefix(b"\\''");
|
||||
Some('\'')
|
||||
} else {
|
||||
Some(next_byte as char)
|
||||
}
|
||||
}
|
||||
EscapeForUnixShellState::Unquoted => match next_byte {
|
||||
b' ' | b'!' | b'"' | b'#' | b'$' | b'&' | b'\'' | b'(' | b')' | b'*' | b','
|
||||
| b';' | b'<' | b'>' | b'?' | b'[' | b'\\' | b']' | b'^' | b'`' | b'{' | b'|'
|
||||
| b'}' | b'~' => {
|
||||
self.prefix = Self::make_prefix(&[next_byte]);
|
||||
Some('\\')
|
||||
}
|
||||
_ => Some(next_byte as char),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
#[non_exhaustive]
|
||||
pub enum UnixMakefileEscapeKind {
|
||||
NonRecipe,
|
||||
RecipeWithoutShellEscaping,
|
||||
RecipeWithShellEscaping,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct EscapeForUnixMakefile<'a> {
|
||||
s: &'a OsStr,
|
||||
kind: UnixMakefileEscapeKind,
|
||||
}
|
||||
|
||||
impl<'a> fmt::Debug for EscapeForUnixMakefile<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::Display::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> fmt::Display for EscapeForUnixMakefile<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.do_write(
|
||||
f,
|
||||
fmt::Write::write_str,
|
||||
fmt::Write::write_char,
|
||||
|_, _| Ok(()),
|
||||
|_| unreachable!("already checked that the input causes no UTF-8 errors"),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> EscapeForUnixMakefile<'a> {
|
||||
fn do_write<S: ?Sized, E>(
|
||||
&self,
|
||||
state: &mut S,
|
||||
write_str: impl Fn(&mut S, &str) -> Result<(), E>,
|
||||
write_char: impl Fn(&mut S, char) -> Result<(), E>,
|
||||
add_variable: impl Fn(&mut S, &'static str) -> Result<(), E>,
|
||||
utf8_error: impl Fn(Utf8Error) -> E,
|
||||
) -> Result<(), E> {
|
||||
let escape_recipe_char = |c| match c {
|
||||
'$' => write_str(state, "$$"),
|
||||
'\0'..='\x1F' | '\x7F' => {
|
||||
panic!("can't escape a control character for Unix Makefile: {c:?}");
|
||||
}
|
||||
_ => write_char(state, c),
|
||||
};
|
||||
match self.kind {
|
||||
UnixMakefileEscapeKind::NonRecipe => str::from_utf8(self.s.as_encoded_bytes())
|
||||
.map_err(&utf8_error)?
|
||||
.chars()
|
||||
.try_for_each(|c| match c {
|
||||
'=' => {
|
||||
add_variable(state, "EQUALS = =")?;
|
||||
write_str(state, "$(EQUALS)")
|
||||
}
|
||||
';' => panic!("can't escape a semicolon (;) for Unix Makefile"),
|
||||
'$' => write_str(state, "$$"),
|
||||
'\\' | ' ' | '#' | ':' | '%' | '*' | '?' | '[' | ']' | '~' => {
|
||||
write_char(state, '\\')?;
|
||||
write_char(state, c)
|
||||
}
|
||||
'\0'..='\x1F' | '\x7F' => {
|
||||
panic!("can't escape a control character for Unix Makefile: {c:?}");
|
||||
}
|
||||
_ => write_char(state, c),
|
||||
}),
|
||||
UnixMakefileEscapeKind::RecipeWithoutShellEscaping => {
|
||||
str::from_utf8(self.s.as_encoded_bytes())
|
||||
.map_err(&utf8_error)?
|
||||
.chars()
|
||||
.try_for_each(escape_recipe_char)
|
||||
}
|
||||
UnixMakefileEscapeKind::RecipeWithShellEscaping => {
|
||||
EscapeForUnixShell::new(self.s).try_for_each(escape_recipe_char)
|
||||
}
|
||||
}
|
||||
}
|
||||
pub fn new(
|
||||
s: &'a (impl ?Sized + AsRef<OsStr>),
|
||||
kind: UnixMakefileEscapeKind,
|
||||
needed_variables: &mut BTreeSet<&'static str>,
|
||||
) -> Result<Self, Utf8Error> {
|
||||
let s = s.as_ref();
|
||||
let retval = Self { s, kind };
|
||||
retval.do_write(
|
||||
needed_variables,
|
||||
|_, _| Ok(()),
|
||||
|_, _| Ok(()),
|
||||
|needed_variables, variable| {
|
||||
needed_variables.insert(variable);
|
||||
Ok(())
|
||||
},
|
||||
|e| e,
|
||||
)?;
|
||||
Ok(retval)
|
||||
}
|
||||
}
|
||||
|
||||
impl JobGraph {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
fn try_add_item_node(
|
||||
&mut self,
|
||||
name: JobItemName,
|
||||
new_source_job: Option<DynJob>,
|
||||
new_nodes: &mut HashSet<<JobGraphInner as GraphBase>::NodeId>,
|
||||
) -> Result<<JobGraphInner as GraphBase>::NodeId, JobGraphError> {
|
||||
use hashbrown::hash_map::Entry;
|
||||
match self.items.entry(name) {
|
||||
Entry::Occupied(item_entry) => {
|
||||
let node_id = *item_entry.get();
|
||||
let JobGraphNode::Item {
|
||||
name: _,
|
||||
source_job,
|
||||
} = &mut self.graph[node_id]
|
||||
else {
|
||||
unreachable!("known to be an item");
|
||||
};
|
||||
if let Some(new_source_job) = new_source_job {
|
||||
if let Some(source_job) = source_job {
|
||||
return Err(JobGraphError::MultipleJobsCreateSameOutput {
|
||||
output_item: item_entry.key().clone(),
|
||||
existing_job: source_job.clone(),
|
||||
new_job: new_source_job,
|
||||
});
|
||||
} else {
|
||||
*source_job = Some(new_source_job);
|
||||
}
|
||||
}
|
||||
Ok(node_id)
|
||||
}
|
||||
Entry::Vacant(item_entry) => {
|
||||
let node_id = self.graph.add_node(JobGraphNode::Item {
|
||||
name,
|
||||
source_job: new_source_job,
|
||||
});
|
||||
new_nodes.insert(node_id);
|
||||
item_entry.insert(node_id);
|
||||
Ok(node_id)
|
||||
}
|
||||
}
|
||||
}
|
||||
pub fn try_add_jobs<I: IntoIterator<Item = DynJob>>(
|
||||
&mut self,
|
||||
jobs: I,
|
||||
) -> Result<(), JobGraphError> {
|
||||
use hashbrown::hash_map::Entry;
|
||||
let jobs = jobs.into_iter();
|
||||
struct RemoveNewNodesOnError<'a> {
|
||||
this: &'a mut JobGraph,
|
||||
new_nodes: HashSet<<JobGraphInner as GraphBase>::NodeId>,
|
||||
}
|
||||
impl Drop for RemoveNewNodesOnError<'_> {
|
||||
fn drop(&mut self) {
|
||||
for node in self.new_nodes.drain() {
|
||||
self.this.graph.remove_node(node);
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut remove_new_nodes_on_error = RemoveNewNodesOnError {
|
||||
this: self,
|
||||
new_nodes: HashSet::with_capacity_and_hasher(jobs.size_hint().0, Default::default()),
|
||||
};
|
||||
let new_nodes = &mut remove_new_nodes_on_error.new_nodes;
|
||||
let this = &mut *remove_new_nodes_on_error.this;
|
||||
for job in jobs {
|
||||
let Entry::Vacant(job_entry) = this.jobs.entry(job.clone()) else {
|
||||
continue;
|
||||
};
|
||||
let job_node_id = this
|
||||
.graph
|
||||
.add_node(JobGraphNode::Job(job_entry.key().clone()));
|
||||
new_nodes.insert(job_node_id);
|
||||
job_entry.insert(job_node_id);
|
||||
for name in job.outputs() {
|
||||
let item_node_id = this.try_add_item_node(name, Some(job.clone()), new_nodes)?;
|
||||
this.graph.add_edge(job_node_id, item_node_id, ());
|
||||
}
|
||||
for name in job.inputs() {
|
||||
let item_node_id = this.try_add_item_node(name, None, new_nodes)?;
|
||||
this.graph.add_edge(item_node_id, job_node_id, ());
|
||||
}
|
||||
}
|
||||
match toposort(&this.graph, Some(&mut this.space)) {
|
||||
Ok(v) => {
|
||||
this.topological_order = v;
|
||||
// no need to remove any of the new nodes on drop since we didn't encounter any errors
|
||||
remove_new_nodes_on_error.new_nodes.clear();
|
||||
Ok(())
|
||||
}
|
||||
Err(_) => {
|
||||
// there's at least one cycle, find one!
|
||||
let cycle = kosaraju_scc(&this.graph)
|
||||
.into_iter()
|
||||
.find_map(|scc| {
|
||||
if scc.len() <= 1 {
|
||||
// can't be a cycle since our graph is bipartite --
|
||||
// jobs only connect to items, never jobs to jobs or items to items
|
||||
None
|
||||
} else {
|
||||
Some(scc)
|
||||
}
|
||||
})
|
||||
.expect("we know there's a cycle");
|
||||
let cycle_set = HashSet::from_iter(cycle.iter().copied());
|
||||
let job = cycle
|
||||
.into_iter()
|
||||
.find_map(|node_id| {
|
||||
if let JobGraphNode::Job(job) = &this.graph[node_id] {
|
||||
Some(job.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.expect("a job must be part of the cycle");
|
||||
let output = job
|
||||
.outputs()
|
||||
.into_iter()
|
||||
.find(|output| cycle_set.contains(&this.items[output]))
|
||||
.expect("an output must be part of the cycle");
|
||||
Err(JobGraphError::CycleError { job, output })
|
||||
}
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn add_jobs<I: IntoIterator<Item = DynJob>>(&mut self, jobs: I) {
|
||||
match self.try_add_jobs(jobs) {
|
||||
Ok(()) => {}
|
||||
Err(e) => panic!("error: {e}"),
|
||||
}
|
||||
}
|
||||
pub fn to_unix_makefile(
|
||||
&self,
|
||||
platform: Option<&DynPlatform>,
|
||||
extra_args: &[Interned<OsStr>],
|
||||
) -> Result<String, Utf8Error> {
|
||||
self.to_unix_makefile_with_internal_program_prefix(
|
||||
&[program_name_for_internal_jobs()],
|
||||
platform,
|
||||
extra_args,
|
||||
)
|
||||
}
|
||||
pub fn to_unix_makefile_with_internal_program_prefix(
|
||||
&self,
|
||||
internal_program_prefix: &[Interned<OsStr>],
|
||||
platform: Option<&DynPlatform>,
|
||||
extra_args: &[Interned<OsStr>],
|
||||
) -> Result<String, Utf8Error> {
|
||||
let mut retval = String::new();
|
||||
let mut needed_variables = BTreeSet::new();
|
||||
let mut phony_targets = BTreeSet::new();
|
||||
for &node_id in &self.topological_order {
|
||||
let JobGraphNode::Job(job) = &self.graph[node_id] else {
|
||||
continue;
|
||||
};
|
||||
let outputs = job.outputs();
|
||||
if outputs.is_empty() {
|
||||
retval.push_str(":");
|
||||
} else {
|
||||
for output in job.outputs() {
|
||||
match output {
|
||||
JobItemName::Path { path } => {
|
||||
write_str!(
|
||||
retval,
|
||||
"{} ",
|
||||
EscapeForUnixMakefile::new(
|
||||
&str::from_utf8(path.as_os_str().as_encoded_bytes())?,
|
||||
UnixMakefileEscapeKind::NonRecipe,
|
||||
&mut needed_variables
|
||||
)?
|
||||
);
|
||||
}
|
||||
JobItemName::DynamicPaths { source_job_name } => {
|
||||
write_str!(
|
||||
retval,
|
||||
"{} ",
|
||||
EscapeForUnixMakefile::new(
|
||||
&source_job_name,
|
||||
UnixMakefileEscapeKind::NonRecipe,
|
||||
&mut needed_variables
|
||||
)?
|
||||
);
|
||||
phony_targets.insert(Interned::into_inner(source_job_name));
|
||||
}
|
||||
}
|
||||
}
|
||||
if outputs.len() == 1 {
|
||||
retval.push_str(":");
|
||||
} else {
|
||||
retval.push_str("&:");
|
||||
}
|
||||
}
|
||||
for input in job.inputs() {
|
||||
match input {
|
||||
JobItemName::Path { path } => {
|
||||
write_str!(
|
||||
retval,
|
||||
" {}",
|
||||
EscapeForUnixMakefile::new(
|
||||
&str::from_utf8(path.as_os_str().as_encoded_bytes())?,
|
||||
UnixMakefileEscapeKind::NonRecipe,
|
||||
&mut needed_variables
|
||||
)?
|
||||
);
|
||||
}
|
||||
JobItemName::DynamicPaths { source_job_name } => {
|
||||
write_str!(
|
||||
retval,
|
||||
" {}",
|
||||
EscapeForUnixMakefile::new(
|
||||
&source_job_name,
|
||||
UnixMakefileEscapeKind::NonRecipe,
|
||||
&mut needed_variables
|
||||
)?
|
||||
);
|
||||
phony_targets.insert(Interned::into_inner(source_job_name));
|
||||
}
|
||||
}
|
||||
}
|
||||
retval.push_str("\n\t");
|
||||
job.command_params_with_internal_program_prefix(
|
||||
internal_program_prefix,
|
||||
platform,
|
||||
extra_args,
|
||||
)
|
||||
.to_unix_shell_line(&mut retval, |arg, output| {
|
||||
write_str!(
|
||||
output,
|
||||
"{}",
|
||||
EscapeForUnixMakefile::new(
|
||||
arg,
|
||||
UnixMakefileEscapeKind::RecipeWithShellEscaping,
|
||||
&mut needed_variables
|
||||
)?
|
||||
);
|
||||
Ok(())
|
||||
})?;
|
||||
retval.push_str("\n\n");
|
||||
}
|
||||
if !phony_targets.is_empty() {
|
||||
retval.push_str("\n.PHONY:");
|
||||
for phony_target in phony_targets {
|
||||
write_str!(
|
||||
retval,
|
||||
" {}",
|
||||
EscapeForUnixMakefile::new(
|
||||
phony_target,
|
||||
UnixMakefileEscapeKind::NonRecipe,
|
||||
&mut needed_variables
|
||||
)?
|
||||
);
|
||||
}
|
||||
retval.push_str("\n");
|
||||
}
|
||||
if !needed_variables.is_empty() {
|
||||
retval.insert_str(
|
||||
0,
|
||||
&String::from_iter(needed_variables.into_iter().map(|v| format!("{v}\n"))),
|
||||
);
|
||||
}
|
||||
Ok(retval)
|
||||
}
|
||||
pub fn to_unix_shell_script(
|
||||
&self,
|
||||
platform: Option<&DynPlatform>,
|
||||
extra_args: &[Interned<OsStr>],
|
||||
) -> String {
|
||||
self.to_unix_shell_script_with_internal_program_prefix(
|
||||
&[program_name_for_internal_jobs()],
|
||||
platform,
|
||||
extra_args,
|
||||
)
|
||||
}
|
||||
pub fn to_unix_shell_script_with_internal_program_prefix(
|
||||
&self,
|
||||
internal_program_prefix: &[Interned<OsStr>],
|
||||
platform: Option<&DynPlatform>,
|
||||
extra_args: &[Interned<OsStr>],
|
||||
) -> String {
|
||||
let mut retval = String::from(
|
||||
"#!/bin/sh\n\
|
||||
set -ex\n",
|
||||
);
|
||||
for &node_id in &self.topological_order {
|
||||
let JobGraphNode::Job(job) = &self.graph[node_id] else {
|
||||
continue;
|
||||
};
|
||||
let Ok(()) = job
|
||||
.command_params_with_internal_program_prefix(
|
||||
internal_program_prefix,
|
||||
platform,
|
||||
extra_args,
|
||||
)
|
||||
.to_unix_shell_line(&mut retval, |arg, output| -> Result<(), Infallible> {
|
||||
write_str!(output, "{}", EscapeForUnixShell::new(&arg));
|
||||
Ok(())
|
||||
});
|
||||
retval.push_str("\n");
|
||||
}
|
||||
retval
|
||||
}
|
||||
pub fn run(&self, params: &JobParams, global_params: &GlobalParams) -> eyre::Result<()> {
|
||||
// use scope to auto-join threads on errors
|
||||
thread::scope(|scope| {
|
||||
struct WaitingJobState {
|
||||
job_node_id: <JobGraphInner as GraphBase>::NodeId,
|
||||
job: DynJob,
|
||||
inputs: BTreeMap<JobItemName, OnceCell<JobItem>>,
|
||||
}
|
||||
let mut ready_jobs = VecDeque::new();
|
||||
let mut item_name_to_waiting_jobs_map = HashMap::<_, Vec<_>>::default();
|
||||
for &node_id in &self.topological_order {
|
||||
let JobGraphNode::Job(job) = &self.graph[node_id] else {
|
||||
continue;
|
||||
};
|
||||
let waiting_job = WaitingJobState {
|
||||
job_node_id: node_id,
|
||||
job: job.clone(),
|
||||
inputs: job
|
||||
.inputs()
|
||||
.iter()
|
||||
.map(|&name| (name, OnceCell::new()))
|
||||
.collect(),
|
||||
};
|
||||
if waiting_job.inputs.is_empty() {
|
||||
ready_jobs.push_back(waiting_job);
|
||||
} else {
|
||||
let waiting_job = Rc::new(waiting_job);
|
||||
for &input_item in waiting_job.inputs.keys() {
|
||||
item_name_to_waiting_jobs_map
|
||||
.entry(input_item)
|
||||
.or_default()
|
||||
.push(waiting_job.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
struct RunningJob<'scope> {
|
||||
job: DynJob,
|
||||
thread: ScopedJoinHandle<'scope, eyre::Result<Vec<JobItem>>>,
|
||||
}
|
||||
let mut running_jobs = HashMap::default();
|
||||
let (finished_jobs_sender, finished_jobs_receiver) = mpsc::channel();
|
||||
loop {
|
||||
while let Some(finished_job) = finished_jobs_receiver.try_recv().ok() {
|
||||
let Some(RunningJob { job, thread }) = running_jobs.remove(&finished_job)
|
||||
else {
|
||||
unreachable!();
|
||||
};
|
||||
let output_items = thread.join().map_err(panic::resume_unwind)??;
|
||||
assert!(
|
||||
output_items.iter().map(JobItem::name).eq(job.outputs()),
|
||||
"job's run() method returned the wrong output items:\n\
|
||||
output items:\n\
|
||||
{output_items:?}\n\
|
||||
expected outputs:\n\
|
||||
{:?}\n\
|
||||
job:\n\
|
||||
{job:?}",
|
||||
job.outputs(),
|
||||
);
|
||||
for output_item in output_items {
|
||||
for waiting_job in item_name_to_waiting_jobs_map
|
||||
.remove(&output_item.name())
|
||||
.unwrap_or_default()
|
||||
{
|
||||
let Ok(()) =
|
||||
waiting_job.inputs[&output_item.name()].set(output_item.clone())
|
||||
else {
|
||||
unreachable!();
|
||||
};
|
||||
if let Some(waiting_job) = Rc::into_inner(waiting_job) {
|
||||
ready_jobs.push_back(waiting_job);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(WaitingJobState {
|
||||
job_node_id,
|
||||
job,
|
||||
inputs,
|
||||
}) = ready_jobs.pop_front()
|
||||
{
|
||||
struct RunningJobInThread<'a> {
|
||||
job_node_id: <JobGraphInner as GraphBase>::NodeId,
|
||||
job: DynJob,
|
||||
inputs: Vec<JobItem>,
|
||||
params: &'a JobParams,
|
||||
global_params: &'a GlobalParams,
|
||||
acquired_job: AcquiredJob,
|
||||
finished_jobs_sender: mpsc::Sender<<JobGraphInner as GraphBase>::NodeId>,
|
||||
}
|
||||
impl RunningJobInThread<'_> {
|
||||
fn run(mut self) -> eyre::Result<Vec<JobItem>> {
|
||||
self.job.run(
|
||||
&self.inputs,
|
||||
self.params,
|
||||
self.global_params,
|
||||
&mut self.acquired_job,
|
||||
)
|
||||
}
|
||||
}
|
||||
impl Drop for RunningJobInThread<'_> {
|
||||
fn drop(&mut self) {
|
||||
let _ = self.finished_jobs_sender.send(self.job_node_id);
|
||||
}
|
||||
}
|
||||
let name = job.kind().name();
|
||||
let running_job_in_thread = RunningJobInThread {
|
||||
job_node_id,
|
||||
job: job.clone(),
|
||||
inputs: Result::from_iter(job.inputs().iter().map(|input_name| {
|
||||
inputs.get(input_name).and_then(|v| v.get().cloned()).wrap_err_with(|| {
|
||||
eyre!("failed when trying to run job {name}: nothing provided the input item: {input_name:?}")
|
||||
})
|
||||
}))?,
|
||||
params,
|
||||
global_params,
|
||||
acquired_job: AcquiredJob::acquire()?,
|
||||
finished_jobs_sender: finished_jobs_sender.clone(),
|
||||
};
|
||||
running_jobs.insert(
|
||||
job_node_id,
|
||||
RunningJob {
|
||||
job,
|
||||
thread: thread::Builder::new()
|
||||
.name(format!("job:{name}"))
|
||||
.spawn_scoped(scope, move || running_job_in_thread.run())
|
||||
.expect("failed to spawn thread for job"),
|
||||
},
|
||||
);
|
||||
}
|
||||
if running_jobs.is_empty() {
|
||||
assert!(item_name_to_waiting_jobs_map.is_empty());
|
||||
assert!(ready_jobs.is_empty());
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<DynJob> for JobGraph {
|
||||
#[track_caller]
|
||||
fn extend<T: IntoIterator<Item = DynJob>>(&mut self, iter: T) {
|
||||
self.add_jobs(iter);
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<DynJob> for JobGraph {
|
||||
#[track_caller]
|
||||
fn from_iter<T: IntoIterator<Item = DynJob>>(iter: T) -> Self {
|
||||
let mut retval = Self::new();
|
||||
retval.add_jobs(iter);
|
||||
retval
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for JobGraph {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut serializer = serializer.serialize_seq(Some(self.jobs.len()))?;
|
||||
for &node_id in &self.topological_order {
|
||||
let JobGraphNode::Job(job) = &self.graph[node_id] else {
|
||||
continue;
|
||||
};
|
||||
serializer.serialize_element(job)?;
|
||||
}
|
||||
serializer.end()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for JobGraph {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let jobs = Vec::<DynJob>::deserialize(deserializer)?;
|
||||
let mut retval = JobGraph::new();
|
||||
retval.try_add_jobs(jobs).map_err(D::Error::custom)?;
|
||||
Ok(retval)
|
||||
}
|
||||
}
|
||||
313
crates/fayalite/src/build/registry.rs
Normal file
313
crates/fayalite/src/build/registry.rs
Normal file
|
|
@ -0,0 +1,313 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
build::{DynJobKind, JobKind, built_in_job_kinds},
|
||||
intern::Interned,
|
||||
util::InternedStrCompareAsStr,
|
||||
};
|
||||
use std::{
|
||||
collections::BTreeMap,
|
||||
fmt,
|
||||
sync::{Arc, OnceLock, RwLock, RwLockWriteGuard},
|
||||
};
|
||||
|
||||
impl DynJobKind {
|
||||
pub fn registry() -> JobKindRegistrySnapshot {
|
||||
JobKindRegistrySnapshot(JobKindRegistry::get())
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn register(self) {
|
||||
JobKindRegistry::register(JobKindRegistry::lock(), self);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct JobKindRegistry {
|
||||
job_kinds: BTreeMap<InternedStrCompareAsStr, DynJobKind>,
|
||||
}
|
||||
|
||||
enum JobKindRegisterError {
|
||||
SameName {
|
||||
name: InternedStrCompareAsStr,
|
||||
old_job_kind: DynJobKind,
|
||||
new_job_kind: DynJobKind,
|
||||
},
|
||||
}
|
||||
|
||||
impl fmt::Display for JobKindRegisterError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::SameName {
|
||||
name,
|
||||
old_job_kind,
|
||||
new_job_kind,
|
||||
} => write!(
|
||||
f,
|
||||
"two different `JobKind` can't share the same name:\n\
|
||||
{name:?}\n\
|
||||
old job kind:\n\
|
||||
{old_job_kind:?}\n\
|
||||
new job kind:\n\
|
||||
{new_job_kind:?}",
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trait JobKindRegistryRegisterLock {
|
||||
type Locked;
|
||||
fn lock(self) -> Self::Locked;
|
||||
fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry;
|
||||
}
|
||||
|
||||
impl JobKindRegistryRegisterLock for &'static RwLock<Arc<JobKindRegistry>> {
|
||||
type Locked = RwLockWriteGuard<'static, Arc<JobKindRegistry>>;
|
||||
fn lock(self) -> Self::Locked {
|
||||
self.write().expect("shouldn't be poisoned")
|
||||
}
|
||||
fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry {
|
||||
Arc::make_mut(locked)
|
||||
}
|
||||
}
|
||||
|
||||
impl JobKindRegistryRegisterLock for &'_ mut JobKindRegistry {
|
||||
type Locked = Self;
|
||||
|
||||
fn lock(self) -> Self::Locked {
|
||||
self
|
||||
}
|
||||
|
||||
fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry {
|
||||
locked
|
||||
}
|
||||
}
|
||||
|
||||
impl JobKindRegistry {
|
||||
fn lock() -> &'static RwLock<Arc<Self>> {
|
||||
static REGISTRY: OnceLock<RwLock<Arc<JobKindRegistry>>> = OnceLock::new();
|
||||
REGISTRY.get_or_init(Default::default)
|
||||
}
|
||||
fn try_register<L: JobKindRegistryRegisterLock>(
|
||||
lock: L,
|
||||
job_kind: DynJobKind,
|
||||
) -> Result<(), JobKindRegisterError> {
|
||||
use std::collections::btree_map::Entry;
|
||||
let name = InternedStrCompareAsStr(job_kind.name());
|
||||
// run user code only outside of lock
|
||||
let mut locked = lock.lock();
|
||||
let this = L::make_mut(&mut locked);
|
||||
let result = match this.job_kinds.entry(name) {
|
||||
Entry::Occupied(entry) => Err(JobKindRegisterError::SameName {
|
||||
name,
|
||||
old_job_kind: entry.get().clone(),
|
||||
new_job_kind: job_kind,
|
||||
}),
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(job_kind);
|
||||
Ok(())
|
||||
}
|
||||
};
|
||||
drop(locked);
|
||||
// outside of lock now, so we can test if it's the same DynJobKind
|
||||
match result {
|
||||
Err(JobKindRegisterError::SameName {
|
||||
name: _,
|
||||
old_job_kind,
|
||||
new_job_kind,
|
||||
}) if old_job_kind == new_job_kind => Ok(()),
|
||||
result => result,
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
fn register<L: JobKindRegistryRegisterLock>(lock: L, job_kind: DynJobKind) {
|
||||
match Self::try_register(lock, job_kind) {
|
||||
Err(e) => panic!("{e}"),
|
||||
Ok(()) => {}
|
||||
}
|
||||
}
|
||||
fn get() -> Arc<Self> {
|
||||
Self::lock().read().expect("shouldn't be poisoned").clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for JobKindRegistry {
|
||||
fn default() -> Self {
|
||||
let mut retval = Self {
|
||||
job_kinds: BTreeMap::new(),
|
||||
};
|
||||
for job_kind in built_in_job_kinds() {
|
||||
Self::register(&mut retval, job_kind);
|
||||
}
|
||||
retval
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct JobKindRegistrySnapshot(Arc<JobKindRegistry>);
|
||||
|
||||
impl JobKindRegistrySnapshot {
|
||||
pub fn get() -> Self {
|
||||
JobKindRegistrySnapshot(JobKindRegistry::get())
|
||||
}
|
||||
pub fn get_by_name<'a>(&'a self, name: &str) -> Option<&'a DynJobKind> {
|
||||
self.0.job_kinds.get(name)
|
||||
}
|
||||
pub fn iter_with_names(&self) -> JobKindRegistryIterWithNames<'_> {
|
||||
JobKindRegistryIterWithNames(self.0.job_kinds.iter())
|
||||
}
|
||||
pub fn iter(&self) -> JobKindRegistryIter<'_> {
|
||||
JobKindRegistryIter(self.0.job_kinds.values())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a JobKindRegistrySnapshot {
|
||||
type Item = &'a DynJobKind;
|
||||
type IntoIter = JobKindRegistryIter<'a>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a mut JobKindRegistrySnapshot {
|
||||
type Item = &'a DynJobKind;
|
||||
type IntoIter = JobKindRegistryIter<'a>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.iter()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct JobKindRegistryIter<'a>(
|
||||
std::collections::btree_map::Values<'a, InternedStrCompareAsStr, DynJobKind>,
|
||||
);
|
||||
|
||||
impl<'a> Iterator for JobKindRegistryIter<'a> {
|
||||
type Item = &'a DynJobKind;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.0.next()
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.0.size_hint()
|
||||
}
|
||||
|
||||
fn count(self) -> usize
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self.0.count()
|
||||
}
|
||||
|
||||
fn last(self) -> Option<Self::Item> {
|
||||
self.0.last()
|
||||
}
|
||||
|
||||
fn nth(&mut self, n: usize) -> Option<Self::Item> {
|
||||
self.0.nth(n)
|
||||
}
|
||||
|
||||
fn fold<B, F>(self, init: B, f: F) -> B
|
||||
where
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
self.0.fold(init, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> std::iter::FusedIterator for JobKindRegistryIter<'a> {}
|
||||
|
||||
impl<'a> ExactSizeIterator for JobKindRegistryIter<'a> {}
|
||||
|
||||
impl<'a> DoubleEndedIterator for JobKindRegistryIter<'a> {
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
self.0.next_back()
|
||||
}
|
||||
|
||||
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
|
||||
self.0.nth_back(n)
|
||||
}
|
||||
|
||||
fn rfold<B, F>(self, init: B, f: F) -> B
|
||||
where
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
self.0.rfold(init, f)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct JobKindRegistryIterWithNames<'a>(
|
||||
std::collections::btree_map::Iter<'a, InternedStrCompareAsStr, DynJobKind>,
|
||||
);
|
||||
|
||||
impl<'a> Iterator for JobKindRegistryIterWithNames<'a> {
|
||||
type Item = (Interned<str>, &'a DynJobKind);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.0.next().map(|(name, job_kind)| (name.0, job_kind))
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.0.size_hint()
|
||||
}
|
||||
|
||||
fn count(self) -> usize
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self.0.count()
|
||||
}
|
||||
|
||||
fn last(self) -> Option<Self::Item> {
|
||||
self.0.last().map(|(name, job_kind)| (name.0, job_kind))
|
||||
}
|
||||
|
||||
fn nth(&mut self, n: usize) -> Option<Self::Item> {
|
||||
self.0.nth(n).map(|(name, job_kind)| (name.0, job_kind))
|
||||
}
|
||||
|
||||
fn fold<B, F>(self, init: B, f: F) -> B
|
||||
where
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
self.0
|
||||
.map(|(name, job_kind)| (name.0, job_kind))
|
||||
.fold(init, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> std::iter::FusedIterator for JobKindRegistryIterWithNames<'a> {}
|
||||
|
||||
impl<'a> ExactSizeIterator for JobKindRegistryIterWithNames<'a> {}
|
||||
|
||||
impl<'a> DoubleEndedIterator for JobKindRegistryIterWithNames<'a> {
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
self.0
|
||||
.next_back()
|
||||
.map(|(name, job_kind)| (name.0, job_kind))
|
||||
}
|
||||
|
||||
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
|
||||
self.0
|
||||
.nth_back(n)
|
||||
.map(|(name, job_kind)| (name.0, job_kind))
|
||||
}
|
||||
|
||||
fn rfold<B, F>(self, init: B, f: F) -> B
|
||||
where
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
self.0
|
||||
.map(|(name, job_kind)| (name.0, job_kind))
|
||||
.rfold(init, f)
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn register_job_kind<K: JobKind>(kind: K) {
|
||||
DynJobKind::new(kind).register();
|
||||
}
|
||||
418
crates/fayalite/src/build/verilog.rs
Normal file
418
crates/fayalite/src/build/verilog.rs
Normal file
|
|
@ -0,0 +1,418 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
build::{
|
||||
BaseJob, CommandParams, DynJobKind, GetJobPositionDependencies, GetJobPositionJob,
|
||||
GlobalParams, JobAndDependencies, JobArgsAndDependencies, JobDependencies, JobItem,
|
||||
JobItemName, JobKind, JobKindAndDependencies, JobParams, ToArgs, WriteArgs,
|
||||
external::{
|
||||
ExternalCommand, ExternalCommandJob, ExternalCommandJobKind, ExternalProgramTrait,
|
||||
},
|
||||
firrtl::{Firrtl, FirrtlJobKind},
|
||||
},
|
||||
intern::{Intern, InternSlice, Interned},
|
||||
util::job_server::AcquiredJob,
|
||||
};
|
||||
use clap::Args;
|
||||
use eyre::{Context, bail};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
ffi::{OsStr, OsString},
|
||||
fmt, mem,
|
||||
path::Path,
|
||||
};
|
||||
|
||||
/// based on [LLVM Circt's recommended lowering options][lowering-options]
|
||||
///
|
||||
/// [lowering-options]: https://circt.llvm.org/docs/VerilogGeneration/#recommended-loweringoptions-by-target
|
||||
#[derive(clap::ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[non_exhaustive]
|
||||
pub enum VerilogDialect {
|
||||
Questa,
|
||||
Spyglass,
|
||||
Verilator,
|
||||
Vivado,
|
||||
Yosys,
|
||||
}
|
||||
|
||||
impl fmt::Display for VerilogDialect {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl VerilogDialect {
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
VerilogDialect::Questa => "questa",
|
||||
VerilogDialect::Spyglass => "spyglass",
|
||||
VerilogDialect::Verilator => "verilator",
|
||||
VerilogDialect::Vivado => "vivado",
|
||||
VerilogDialect::Yosys => "yosys",
|
||||
}
|
||||
}
|
||||
pub fn firtool_extra_args(self) -> &'static [&'static str] {
|
||||
match self {
|
||||
VerilogDialect::Questa => &["--lowering-options=emitWireInPorts"],
|
||||
VerilogDialect::Spyglass => {
|
||||
&["--lowering-options=explicitBitcast,disallowExpressionInliningInPorts"]
|
||||
}
|
||||
VerilogDialect::Verilator => &[
|
||||
"--lowering-options=locationInfoStyle=wrapInAtSquareBracket,disallowLocalVariables",
|
||||
],
|
||||
VerilogDialect::Vivado => &["--lowering-options=mitigateVivadoArrayIndexConstPropBug"],
|
||||
VerilogDialect::Yosys => {
|
||||
&["--lowering-options=disallowLocalVariables,disallowPackedArrays"]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Args, Debug, Clone, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub struct UnadjustedVerilogArgs {
|
||||
#[arg(long = "firtool-extra-arg", value_name = "ARG")]
|
||||
pub firtool_extra_args: Vec<OsString>,
|
||||
/// adapt the generated Verilog for a particular toolchain
|
||||
#[arg(long)]
|
||||
pub verilog_dialect: Option<VerilogDialect>,
|
||||
#[arg(long)]
|
||||
pub verilog_debug: bool,
|
||||
}
|
||||
|
||||
impl ToArgs for UnadjustedVerilogArgs {
|
||||
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
|
||||
let Self {
|
||||
ref firtool_extra_args,
|
||||
verilog_dialect,
|
||||
verilog_debug,
|
||||
} = *self;
|
||||
for arg in firtool_extra_args {
|
||||
args.write_long_option_eq("firtool-extra-arg", arg);
|
||||
}
|
||||
if let Some(verilog_dialect) = verilog_dialect {
|
||||
args.write_long_option_eq("verilog-dialect", verilog_dialect.as_str());
|
||||
}
|
||||
if verilog_debug {
|
||||
args.write_arg("--verilog-debug");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)]
|
||||
pub struct Firtool;
|
||||
|
||||
impl ExternalProgramTrait for Firtool {
|
||||
fn default_program_name() -> Interned<str> {
|
||||
"firtool".intern()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, Deserialize, Serialize)]
|
||||
pub struct UnadjustedVerilog {
|
||||
firrtl_file: Interned<Path>,
|
||||
firrtl_file_name: Interned<OsStr>,
|
||||
unadjusted_verilog_file: Interned<Path>,
|
||||
unadjusted_verilog_file_name: Interned<OsStr>,
|
||||
firtool_extra_args: Interned<[Interned<OsStr>]>,
|
||||
verilog_dialect: Option<VerilogDialect>,
|
||||
verilog_debug: bool,
|
||||
}
|
||||
|
||||
impl UnadjustedVerilog {
|
||||
pub fn firrtl_file(&self) -> Interned<Path> {
|
||||
self.firrtl_file
|
||||
}
|
||||
pub fn unadjusted_verilog_file(&self) -> Interned<Path> {
|
||||
self.unadjusted_verilog_file
|
||||
}
|
||||
pub fn firtool_extra_args(&self) -> Interned<[Interned<OsStr>]> {
|
||||
self.firtool_extra_args
|
||||
}
|
||||
pub fn verilog_dialect(&self) -> Option<VerilogDialect> {
|
||||
self.verilog_dialect
|
||||
}
|
||||
pub fn verilog_debug(&self) -> bool {
|
||||
self.verilog_debug
|
||||
}
|
||||
}
|
||||
|
||||
impl ExternalCommand for UnadjustedVerilog {
|
||||
type AdditionalArgs = UnadjustedVerilogArgs;
|
||||
type AdditionalJobData = UnadjustedVerilog;
|
||||
type BaseJobPosition = GetJobPositionDependencies<GetJobPositionJob>;
|
||||
type Dependencies = JobKindAndDependencies<FirrtlJobKind>;
|
||||
type ExternalProgram = Firtool;
|
||||
|
||||
fn dependencies() -> Self::Dependencies {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
fn args_to_jobs(
|
||||
args: JobArgsAndDependencies<ExternalCommandJobKind<Self>>,
|
||||
params: &JobParams,
|
||||
global_params: &GlobalParams,
|
||||
) -> eyre::Result<(
|
||||
Self::AdditionalJobData,
|
||||
<Self::Dependencies as JobDependencies>::JobsAndKinds,
|
||||
)> {
|
||||
args.args_to_jobs_external_simple(params, global_params, |args, dependencies| {
|
||||
let UnadjustedVerilogArgs {
|
||||
firtool_extra_args,
|
||||
verilog_dialect,
|
||||
verilog_debug,
|
||||
} = args.additional_args;
|
||||
let unadjusted_verilog_file = dependencies
|
||||
.dependencies
|
||||
.job
|
||||
.job
|
||||
.file_with_ext("unadjusted.v");
|
||||
let firrtl_job = dependencies.get_job::<Firrtl, _>();
|
||||
Ok(UnadjustedVerilog {
|
||||
firrtl_file: firrtl_job.firrtl_file(),
|
||||
firrtl_file_name: firrtl_job
|
||||
.firrtl_file()
|
||||
.interned_file_name()
|
||||
.expect("known to have file name"),
|
||||
unadjusted_verilog_file,
|
||||
unadjusted_verilog_file_name: unadjusted_verilog_file
|
||||
.interned_file_name()
|
||||
.expect("known to have file name"),
|
||||
firtool_extra_args: firtool_extra_args.into_iter().map(Interned::from).collect(),
|
||||
verilog_dialect,
|
||||
verilog_debug,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn inputs(job: &ExternalCommandJob<Self>) -> Interned<[JobItemName]> {
|
||||
[JobItemName::Path {
|
||||
path: job.additional_job_data().firrtl_file,
|
||||
}]
|
||||
.intern_slice()
|
||||
}
|
||||
|
||||
fn output_paths(job: &ExternalCommandJob<Self>) -> Interned<[Interned<Path>]> {
|
||||
[job.additional_job_data().unadjusted_verilog_file].intern_slice()
|
||||
}
|
||||
|
||||
fn command_line_args<W: ?Sized + WriteArgs>(job: &ExternalCommandJob<Self>, args: &mut W) {
|
||||
let UnadjustedVerilog {
|
||||
firrtl_file: _,
|
||||
firrtl_file_name,
|
||||
unadjusted_verilog_file: _,
|
||||
unadjusted_verilog_file_name,
|
||||
firtool_extra_args,
|
||||
verilog_dialect,
|
||||
verilog_debug,
|
||||
} = *job.additional_job_data();
|
||||
args.write_interned_arg(firrtl_file_name);
|
||||
args.write_arg("-o");
|
||||
args.write_interned_arg(unadjusted_verilog_file_name);
|
||||
if verilog_debug {
|
||||
args.write_args(["-g", "--preserve-values=all"]);
|
||||
}
|
||||
if let Some(dialect) = verilog_dialect {
|
||||
args.write_args(dialect.firtool_extra_args().iter().copied());
|
||||
}
|
||||
args.write_interned_args(firtool_extra_args);
|
||||
}
|
||||
|
||||
fn current_dir(job: &ExternalCommandJob<Self>) -> Option<Interned<Path>> {
|
||||
Some(job.output_dir())
|
||||
}
|
||||
|
||||
fn job_kind_name() -> Interned<str> {
|
||||
"unadjusted-verilog".intern()
|
||||
}
|
||||
|
||||
fn subcommand_hidden() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn run_even_if_cached_arg_name() -> Interned<str> {
|
||||
"firtool-run-even-if-cached".intern()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct VerilogJobKind;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, Args)]
|
||||
#[non_exhaustive]
|
||||
pub struct VerilogJobArgs {}
|
||||
|
||||
impl ToArgs for VerilogJobArgs {
|
||||
fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) {
|
||||
let Self {} = self;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
pub struct VerilogJob {
|
||||
output_dir: Interned<Path>,
|
||||
unadjusted_verilog_file: Interned<Path>,
|
||||
main_verilog_file: Interned<Path>,
|
||||
}
|
||||
|
||||
impl VerilogJob {
|
||||
pub fn output_dir(&self) -> Interned<Path> {
|
||||
self.output_dir
|
||||
}
|
||||
pub fn unadjusted_verilog_file(&self) -> Interned<Path> {
|
||||
self.unadjusted_verilog_file
|
||||
}
|
||||
pub fn main_verilog_file(&self) -> Interned<Path> {
|
||||
self.main_verilog_file
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn unwrap_additional_files(additional_files: &JobItem) -> &[Interned<Path>] {
|
||||
match additional_files {
|
||||
JobItem::DynamicPaths {
|
||||
paths,
|
||||
source_job_name,
|
||||
} if *source_job_name == VerilogJobKind.name() => paths,
|
||||
v => panic!("expected VerilogJob's additional files JobItem: {v:?}"),
|
||||
}
|
||||
}
|
||||
pub fn all_verilog_files(
|
||||
main_verilog_file: Interned<Path>,
|
||||
additional_files: &[Interned<Path>],
|
||||
) -> eyre::Result<Interned<[Interned<Path>]>> {
|
||||
let mut retval = Vec::with_capacity(additional_files.len().saturating_add(1));
|
||||
for verilog_file in [main_verilog_file].iter().chain(additional_files) {
|
||||
if !["v", "sv"]
|
||||
.iter()
|
||||
.any(|extension| verilog_file.extension() == Some(extension.as_ref()))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
let verilog_file = std::path::absolute(verilog_file).wrap_err_with(|| {
|
||||
format!("converting {verilog_file:?} to an absolute path failed")
|
||||
})?;
|
||||
if verilog_file
|
||||
.as_os_str()
|
||||
.as_encoded_bytes()
|
||||
.iter()
|
||||
.any(|&ch| (ch != b' ' && ch != b'\t' && ch.is_ascii_whitespace()) || ch == b'"')
|
||||
{
|
||||
bail!("verilog file path contains characters that aren't permitted");
|
||||
}
|
||||
retval.push(verilog_file.intern_deref());
|
||||
}
|
||||
Ok(retval.intern_slice())
|
||||
}
|
||||
}
|
||||
|
||||
impl JobKind for VerilogJobKind {
|
||||
type Args = VerilogJobArgs;
|
||||
type Job = VerilogJob;
|
||||
type Dependencies = JobKindAndDependencies<ExternalCommandJobKind<UnadjustedVerilog>>;
|
||||
|
||||
fn dependencies(self) -> Self::Dependencies {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
fn args_to_jobs(
|
||||
args: JobArgsAndDependencies<Self>,
|
||||
params: &JobParams,
|
||||
global_params: &GlobalParams,
|
||||
) -> eyre::Result<JobAndDependencies<Self>> {
|
||||
args.args_to_jobs_simple(params, global_params, |_kind, args, dependencies| {
|
||||
let VerilogJobArgs {} = args;
|
||||
let base_job = dependencies.get_job::<BaseJob, _>();
|
||||
Ok(VerilogJob {
|
||||
output_dir: base_job.output_dir(),
|
||||
unadjusted_verilog_file: dependencies
|
||||
.job
|
||||
.job
|
||||
.additional_job_data()
|
||||
.unadjusted_verilog_file(),
|
||||
main_verilog_file: base_job.file_with_ext("v"),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||
[JobItemName::Path {
|
||||
path: job.unadjusted_verilog_file,
|
||||
}]
|
||||
.intern_slice()
|
||||
}
|
||||
|
||||
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||
[
|
||||
JobItemName::Path {
|
||||
path: job.main_verilog_file,
|
||||
},
|
||||
JobItemName::DynamicPaths {
|
||||
source_job_name: self.name(),
|
||||
},
|
||||
]
|
||||
.intern_slice()
|
||||
}
|
||||
|
||||
fn name(self) -> Interned<str> {
|
||||
"verilog".intern()
|
||||
}
|
||||
|
||||
fn external_command_params(self, _job: &Self::Job) -> Option<CommandParams> {
|
||||
None
|
||||
}
|
||||
|
||||
fn run(
|
||||
self,
|
||||
job: &Self::Job,
|
||||
inputs: &[JobItem],
|
||||
_params: &JobParams,
|
||||
_global_params: &GlobalParams,
|
||||
_acquired_job: &mut AcquiredJob,
|
||||
) -> eyre::Result<Vec<JobItem>> {
|
||||
assert!(inputs.iter().map(JobItem::name).eq(self.inputs(job)));
|
||||
let input = std::fs::read_to_string(job.unadjusted_verilog_file())?;
|
||||
let file_separator_prefix = "\n// ----- 8< ----- FILE \"";
|
||||
let file_separator_suffix = "\" ----- 8< -----\n\n";
|
||||
let mut input = &*input;
|
||||
let main_verilog_file = job.main_verilog_file();
|
||||
let mut file_name = Some(main_verilog_file);
|
||||
let mut additional_outputs = Vec::new();
|
||||
loop {
|
||||
let (chunk, next_file_name) = if let Some((chunk, rest)) =
|
||||
input.split_once(file_separator_prefix)
|
||||
{
|
||||
let Some((next_file_name, rest)) = rest.split_once(file_separator_suffix) else {
|
||||
bail!(
|
||||
"parsing firtool's output failed: found {file_separator_prefix:?} but no {file_separator_suffix:?}"
|
||||
);
|
||||
};
|
||||
input = rest;
|
||||
let next_file_name = job.output_dir.join(next_file_name).intern_deref();
|
||||
additional_outputs.push(next_file_name);
|
||||
(chunk, Some(next_file_name))
|
||||
} else {
|
||||
(mem::take(&mut input), None)
|
||||
};
|
||||
let Some(file_name) = mem::replace(&mut file_name, next_file_name) else {
|
||||
break;
|
||||
};
|
||||
std::fs::write(&file_name, chunk)?;
|
||||
}
|
||||
Ok(vec![
|
||||
JobItem::Path {
|
||||
path: main_verilog_file,
|
||||
},
|
||||
JobItem::DynamicPaths {
|
||||
paths: additional_outputs,
|
||||
source_job_name: self.name(),
|
||||
},
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = DynJobKind> {
|
||||
[
|
||||
DynJobKind::new(ExternalCommandJobKind::<UnadjustedVerilog>::new()),
|
||||
DynJobKind::new(VerilogJobKind),
|
||||
]
|
||||
}
|
||||
|
|
@ -2,18 +2,25 @@
|
|||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
expr::{ops::BundleLiteral, Expr, ToExpr},
|
||||
intern::{Intern, Interned},
|
||||
expr::{
|
||||
CastToBits, Expr, ReduceBits, ToExpr,
|
||||
ops::{ArrayLiteral, BundleLiteral, ExprPartialEq},
|
||||
},
|
||||
int::{Bool, DynSize},
|
||||
intern::{Intern, InternSlice, Interned},
|
||||
sim::value::{SimValue, SimValuePartialEq, ToSimValue, ToSimValueWithType},
|
||||
source_location::SourceLocation,
|
||||
ty::{
|
||||
impl_match_variant_as_self, CanonicalType, MatchVariantWithoutScope, StaticType, Type,
|
||||
TypeProperties, TypeWithDeref,
|
||||
CanonicalType, MatchVariantWithoutScope, OpaqueSimValue, OpaqueSimValueSize,
|
||||
OpaqueSimValueSlice, OpaqueSimValueWriter, OpaqueSimValueWritten, StaticType, Type,
|
||||
TypeProperties, TypeWithDeref, impl_match_variant_as_self,
|
||||
},
|
||||
util::HashMap,
|
||||
};
|
||||
use hashbrown::HashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{fmt, marker::PhantomData};
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
|
||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct BundleField {
|
||||
pub name: Interned<str>,
|
||||
pub flipped: bool,
|
||||
|
|
@ -62,7 +69,7 @@ impl fmt::Display for FmtDebugInStruct {
|
|||
struct BundleImpl {
|
||||
fields: Interned<[BundleField]>,
|
||||
name_indexes: HashMap<Interned<str>, usize>,
|
||||
field_offsets: Interned<[usize]>,
|
||||
field_offsets: Interned<[OpaqueSimValueSize]>,
|
||||
type_properties: TypeProperties,
|
||||
}
|
||||
|
||||
|
|
@ -82,12 +89,9 @@ impl std::fmt::Debug for BundleImpl {
|
|||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str("Bundle ")?;
|
||||
f.debug_set()
|
||||
.entries(
|
||||
self.fields
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(index, field)| field.fmt_debug_in_struct(self.field_offsets[index])),
|
||||
)
|
||||
.entries(self.fields.iter().enumerate().map(|(index, field)| {
|
||||
field.fmt_debug_in_struct(self.field_offsets[index].bit_width)
|
||||
}))
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
|
@ -112,6 +116,7 @@ impl BundleTypePropertiesBuilder {
|
|||
is_storable: true,
|
||||
is_castable_from_bits: true,
|
||||
bit_width: 0,
|
||||
sim_only_values_len: 0,
|
||||
})
|
||||
}
|
||||
pub const fn clone(&self) -> Self {
|
||||
|
|
@ -119,8 +124,12 @@ impl BundleTypePropertiesBuilder {
|
|||
}
|
||||
#[must_use]
|
||||
pub const fn field(self, flipped: bool, field_props: TypeProperties) -> Self {
|
||||
let Some(bit_width) = self.0.bit_width.checked_add(field_props.bit_width) else {
|
||||
panic!("bundle is too big: bit-width overflowed");
|
||||
let Some(OpaqueSimValueSize {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
}) = self.0.size().checked_add(field_props.size())
|
||||
else {
|
||||
panic!("bundle is too big: size overflowed");
|
||||
};
|
||||
if flipped {
|
||||
Self(TypeProperties {
|
||||
|
|
@ -128,6 +137,7 @@ impl BundleTypePropertiesBuilder {
|
|||
is_storable: false,
|
||||
is_castable_from_bits: false,
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
})
|
||||
} else {
|
||||
Self(TypeProperties {
|
||||
|
|
@ -136,6 +146,7 @@ impl BundleTypePropertiesBuilder {
|
|||
is_castable_from_bits: self.0.is_castable_from_bits
|
||||
& field_props.is_castable_from_bits,
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -153,14 +164,14 @@ impl Default for BundleTypePropertiesBuilder {
|
|||
impl Bundle {
|
||||
#[track_caller]
|
||||
pub fn new(fields: Interned<[BundleField]>) -> Self {
|
||||
let mut name_indexes = HashMap::with_capacity(fields.len());
|
||||
let mut name_indexes = HashMap::with_capacity_and_hasher(fields.len(), Default::default());
|
||||
let mut field_offsets = Vec::with_capacity(fields.len());
|
||||
let mut type_props_builder = BundleTypePropertiesBuilder::new();
|
||||
for (index, &BundleField { name, flipped, ty }) in fields.iter().enumerate() {
|
||||
if let Some(old_index) = name_indexes.insert(name, index) {
|
||||
panic!("duplicate field name {name:?}: at both index {old_index} and {index}");
|
||||
}
|
||||
field_offsets.push(type_props_builder.0.bit_width);
|
||||
field_offsets.push(type_props_builder.0.size());
|
||||
type_props_builder = type_props_builder.field(flipped, ty.type_properties());
|
||||
}
|
||||
Self(Intern::intern_sized(BundleImpl {
|
||||
|
|
@ -176,7 +187,7 @@ impl Bundle {
|
|||
pub fn field_by_name(&self, name: Interned<str>) -> Option<BundleField> {
|
||||
Some(self.0.fields[*self.0.name_indexes.get(&name)?])
|
||||
}
|
||||
pub fn field_offsets(self) -> Interned<[usize]> {
|
||||
pub fn field_offsets(self) -> Interned<[OpaqueSimValueSize]> {
|
||||
self.0.field_offsets
|
||||
}
|
||||
pub fn type_properties(self) -> TypeProperties {
|
||||
|
|
@ -210,6 +221,7 @@ impl Bundle {
|
|||
impl Type for Bundle {
|
||||
type BaseType = Bundle;
|
||||
type MaskType = Bundle;
|
||||
type SimValue = OpaqueSimValue;
|
||||
impl_match_variant_as_self!();
|
||||
fn mask_type(&self) -> Self::MaskType {
|
||||
Self::new(Interned::from_iter(self.0.fields.into_iter().map(
|
||||
|
|
@ -233,6 +245,28 @@ impl Type for Bundle {
|
|||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
assert_eq!(self.type_properties().size(), opaque.size());
|
||||
opaque.to_owned()
|
||||
}
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
assert_eq!(self.type_properties().size(), opaque.size());
|
||||
assert_eq!(value.size(), opaque.size());
|
||||
value.clone_from_slice(opaque);
|
||||
}
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
assert_eq!(self.type_properties().size(), writer.size());
|
||||
assert_eq!(value.size(), writer.size());
|
||||
writer.fill_cloned_from_slice(value.as_slice())
|
||||
}
|
||||
}
|
||||
|
||||
pub trait BundleType: Type<BaseType = Bundle> {
|
||||
|
|
@ -241,6 +275,102 @@ pub trait BundleType: Type<BaseType = Bundle> {
|
|||
fn fields(&self) -> Interned<[BundleField]>;
|
||||
}
|
||||
|
||||
pub struct BundleSimValueFromOpaque<'a> {
|
||||
fields: std::slice::Iter<'static, BundleField>,
|
||||
opaque: OpaqueSimValueSlice<'a>,
|
||||
}
|
||||
|
||||
impl<'a> BundleSimValueFromOpaque<'a> {
|
||||
#[track_caller]
|
||||
pub fn new<T: BundleType>(bundle_ty: T, opaque: OpaqueSimValueSlice<'a>) -> Self {
|
||||
let fields = bundle_ty.fields();
|
||||
assert_eq!(
|
||||
opaque.size(),
|
||||
fields
|
||||
.iter()
|
||||
.map(|BundleField { ty, .. }| ty.size())
|
||||
.sum::<OpaqueSimValueSize>()
|
||||
);
|
||||
Self {
|
||||
fields: Interned::into_inner(fields).iter(),
|
||||
opaque,
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
fn field_ty_and_opaque<T: Type>(&mut self) -> (T, OpaqueSimValueSlice<'a>) {
|
||||
let Some(&BundleField {
|
||||
name: _,
|
||||
flipped: _,
|
||||
ty,
|
||||
}) = self.fields.next()
|
||||
else {
|
||||
panic!("tried to read too many fields from BundleSimValueFromBits");
|
||||
};
|
||||
let (field_opaque, rest) = self.opaque.split_at(ty.size());
|
||||
self.opaque = rest;
|
||||
(T::from_canonical(ty), field_opaque)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn field_from_opaque<T: Type>(&mut self) -> SimValue<T> {
|
||||
let (field_ty, field_opaque) = self.field_ty_and_opaque::<T>();
|
||||
SimValue::from_opaque(field_ty, field_opaque.to_owned())
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn field_clone_from_opaque<T: Type>(&mut self, field_value: &mut SimValue<T>) {
|
||||
let (field_ty, field_opaque) = self.field_ty_and_opaque::<T>();
|
||||
assert_eq!(field_ty, SimValue::ty(field_value));
|
||||
SimValue::opaque_mut(field_value).clone_from_slice(field_opaque);
|
||||
}
|
||||
}
|
||||
|
||||
pub struct BundleSimValueToOpaque<'a> {
|
||||
fields: std::slice::Iter<'static, BundleField>,
|
||||
writer: OpaqueSimValueWriter<'a>,
|
||||
}
|
||||
|
||||
impl<'a> BundleSimValueToOpaque<'a> {
|
||||
#[track_caller]
|
||||
pub fn new<T: BundleType>(bundle_ty: T, writer: OpaqueSimValueWriter<'a>) -> Self {
|
||||
let fields = bundle_ty.fields();
|
||||
assert_eq!(
|
||||
writer.size(),
|
||||
fields
|
||||
.iter()
|
||||
.map(|BundleField { ty, .. }| ty.size())
|
||||
.sum::<OpaqueSimValueSize>()
|
||||
);
|
||||
Self {
|
||||
fields: Interned::into_inner(fields).iter(),
|
||||
writer,
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn field<T: Type>(&mut self, field_value: &SimValue<T>) {
|
||||
let Some(&BundleField {
|
||||
name: _,
|
||||
flipped: _,
|
||||
ty,
|
||||
}) = self.fields.next()
|
||||
else {
|
||||
panic!("tried to write too many fields with BundleSimValueToOpaque");
|
||||
};
|
||||
assert_eq!(T::from_canonical(ty), SimValue::ty(field_value));
|
||||
self.writer.fill_prefix_with(ty.size(), |writer| {
|
||||
writer.fill_cloned_from_slice(SimValue::opaque(field_value).as_slice())
|
||||
});
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn finish(mut self) -> OpaqueSimValueWritten<'a> {
|
||||
assert_eq!(
|
||||
self.fields.next(),
|
||||
None,
|
||||
"wrote too few fields with BundleSimValueToOpaque"
|
||||
);
|
||||
self.writer
|
||||
.fill_cloned_from_slice(OpaqueSimValueSlice::empty())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct NoBuilder;
|
||||
|
||||
|
|
@ -323,7 +453,19 @@ macro_rules! impl_tuple_builder_fields {
|
|||
}
|
||||
|
||||
macro_rules! impl_tuples {
|
||||
([$({#[num = $num:literal, field = $field:ident] $var:ident: $T:ident})*] []) => {
|
||||
(
|
||||
[$({
|
||||
#[
|
||||
num = $num:tt,
|
||||
field = $field:ident,
|
||||
ty = $ty_var:ident: $Ty:ident,
|
||||
lhs = $lhs_var:ident: $Lhs:ident,
|
||||
rhs = $rhs_var:ident: $Rhs:ident
|
||||
]
|
||||
$var:ident: $T:ident
|
||||
})*]
|
||||
[]
|
||||
) => {
|
||||
impl_tuple_builder_fields! {
|
||||
{}
|
||||
[$({
|
||||
|
|
@ -335,6 +477,7 @@ macro_rules! impl_tuples {
|
|||
impl<$($T: Type,)*> Type for ($($T,)*) {
|
||||
type BaseType = Bundle;
|
||||
type MaskType = ($($T::MaskType,)*);
|
||||
type SimValue = ($(SimValue<$T>,)*);
|
||||
type MatchVariant = ($(Expr<$T>,)*);
|
||||
type MatchActiveScope = ();
|
||||
type MatchVariantAndInactiveScope = MatchVariantWithoutScope<Self::MatchVariant>;
|
||||
|
|
@ -373,13 +516,40 @@ macro_rules! impl_tuples {
|
|||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
#![allow(unused_mut, unused_variables)]
|
||||
let mut v = BundleSimValueFromOpaque::new(*self, opaque);
|
||||
$(let $var = v.field_from_opaque();)*
|
||||
($($var,)*)
|
||||
}
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
#![allow(unused_mut, unused_variables)]
|
||||
let mut v = BundleSimValueFromOpaque::new(*self, opaque);
|
||||
let ($($var,)*) = value;
|
||||
$(v.field_clone_from_opaque($var);)*
|
||||
}
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
#![allow(unused_mut, unused_variables)]
|
||||
let mut v = BundleSimValueToOpaque::new(*self, writer);
|
||||
let ($($var,)*) = value;
|
||||
$(v.field($var);)*
|
||||
v.finish()
|
||||
}
|
||||
}
|
||||
impl<$($T: Type,)*> BundleType for ($($T,)*) {
|
||||
type Builder = TupleBuilder<($(Unfilled<$T>,)*)>;
|
||||
type FilledBuilder = TupleBuilder<($(Expr<$T>,)*)>;
|
||||
fn fields(&self) -> Interned<[BundleField]> {
|
||||
let ($($var,)*) = self;
|
||||
[$(BundleField { name: stringify!($num).intern(), flipped: false, ty: $var.canonical() }),*][..].intern()
|
||||
[$(BundleField { name: stringify!($num).intern(), flipped: false, ty: $var.canonical() }),*].intern_slice()
|
||||
}
|
||||
}
|
||||
impl<$($T: Type,)*> TypeWithDeref for ($($T,)*) {
|
||||
|
|
@ -410,7 +580,7 @@ macro_rules! impl_tuples {
|
|||
$(let $var = $var.to_expr();)*
|
||||
let ty = ($(Expr::ty($var),)*);
|
||||
let field_values = [$(Expr::canonical($var)),*];
|
||||
BundleLiteral::new(ty, field_values[..].intern()).to_expr()
|
||||
BundleLiteral::new(ty, field_values.intern_slice()).to_expr()
|
||||
}
|
||||
}
|
||||
impl<$($T: Type,)*> ToExpr for TupleBuilder<($(Expr<$T>,)*)> {
|
||||
|
|
@ -420,7 +590,107 @@ macro_rules! impl_tuples {
|
|||
let ($($var,)*) = self.0;
|
||||
let ty = ($(Expr::ty($var),)*);
|
||||
let field_values = [$(Expr::canonical($var)),*];
|
||||
BundleLiteral::new(ty, field_values[..].intern()).to_expr()
|
||||
BundleLiteral::new(ty, field_values.intern_slice()).to_expr()
|
||||
}
|
||||
}
|
||||
impl<$($T: ToSimValueWithType<CanonicalType>,)*> ToSimValueWithType<CanonicalType> for ($($T,)*) {
|
||||
#[track_caller]
|
||||
fn to_sim_value_with_type(&self, ty: CanonicalType) -> SimValue<CanonicalType> {
|
||||
SimValue::into_canonical(ToSimValueWithType::<Bundle>::to_sim_value_with_type(self, Bundle::from_canonical(ty)))
|
||||
}
|
||||
#[track_caller]
|
||||
fn into_sim_value_with_type(self, ty: CanonicalType) -> SimValue<CanonicalType>
|
||||
{
|
||||
SimValue::into_canonical(ToSimValueWithType::<Bundle>::into_sim_value_with_type(self, Bundle::from_canonical(ty)))
|
||||
}
|
||||
}
|
||||
impl<$($T: ToSimValueWithType<CanonicalType>,)*> ToSimValueWithType<Bundle> for ($($T,)*) {
|
||||
#[track_caller]
|
||||
fn to_sim_value_with_type(&self, ty: Bundle) -> SimValue<Bundle> {
|
||||
let ($($var,)*) = self;
|
||||
let [$($ty_var,)*] = *ty.fields() else {
|
||||
panic!("bundle has wrong number of fields");
|
||||
};
|
||||
$(let $var = $var.to_sim_value_with_type($ty_var.ty);)*
|
||||
ToSimValueWithType::into_sim_value_with_type(($($var,)*), ty)
|
||||
}
|
||||
#[track_caller]
|
||||
fn into_sim_value_with_type(self, ty: Bundle) -> SimValue<Bundle> {
|
||||
#![allow(unused_mut)]
|
||||
#![allow(clippy::unused_unit)]
|
||||
let ($($var,)*) = self;
|
||||
let [$($ty_var,)*] = *ty.fields() else {
|
||||
panic!("bundle has wrong number of fields");
|
||||
};
|
||||
let mut opaque = OpaqueSimValue::empty();
|
||||
$(let $var = $var.into_sim_value_with_type($ty_var.ty);
|
||||
assert_eq!(SimValue::ty(&$var), $ty_var.ty);
|
||||
opaque.extend_from_slice(SimValue::opaque(&$var).as_slice());
|
||||
)*
|
||||
SimValue::from_opaque(ty, opaque)
|
||||
}
|
||||
}
|
||||
impl<$($T: ToSimValueWithType<$Ty>, $Ty: Type,)*> ToSimValueWithType<($($Ty,)*)> for ($($T,)*) {
|
||||
#[track_caller]
|
||||
fn to_sim_value_with_type(&self, ty: ($($Ty,)*)) -> SimValue<($($Ty,)*)> {
|
||||
let ($($var,)*) = self;
|
||||
let ($($ty_var,)*) = ty;
|
||||
$(let $var = $var.to_sim_value_with_type($ty_var);)*
|
||||
SimValue::from_value(ty, ($($var,)*))
|
||||
}
|
||||
#[track_caller]
|
||||
fn into_sim_value_with_type(self, ty: ($($Ty,)*)) -> SimValue<($($Ty,)*)> {
|
||||
let ($($var,)*) = self;
|
||||
let ($($ty_var,)*) = ty;
|
||||
$(let $var = $var.into_sim_value_with_type($ty_var);)*
|
||||
SimValue::from_value(ty, ($($var,)*))
|
||||
}
|
||||
}
|
||||
impl<$($T: ToSimValue,)*> ToSimValue for ($($T,)*) {
|
||||
type Type = ($($T::Type,)*);
|
||||
#[track_caller]
|
||||
fn to_sim_value(&self) -> SimValue<Self::Type> {
|
||||
let ($($var,)*) = self;
|
||||
$(let $var = $var.to_sim_value();)*
|
||||
SimValue::from_value(($(SimValue::ty(&$var),)*), ($($var,)*))
|
||||
}
|
||||
#[track_caller]
|
||||
fn into_sim_value(self) -> SimValue<Self::Type> {
|
||||
let ($($var,)*) = self;
|
||||
$(let $var = $var.to_sim_value();)*
|
||||
SimValue::from_value(($(SimValue::ty(&$var),)*), ($($var,)*))
|
||||
}
|
||||
}
|
||||
impl<$($Lhs: Type + ExprPartialEq<$Rhs>, $Rhs: Type,)*> ExprPartialEq<($($Rhs,)*)> for ($($Lhs,)*) {
|
||||
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<($($Rhs,)*)>) -> Expr<Bool> {
|
||||
let ($($lhs_var,)*) = *lhs;
|
||||
let ($($rhs_var,)*) = *rhs;
|
||||
ArrayLiteral::<Bool, DynSize>::new(
|
||||
Bool,
|
||||
FromIterator::from_iter([$(Expr::canonical(ExprPartialEq::cmp_eq($lhs_var, $rhs_var)),)*]),
|
||||
)
|
||||
.cast_to_bits()
|
||||
.all_one_bits()
|
||||
}
|
||||
|
||||
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<($($Rhs,)*)>) -> Expr<Bool> {
|
||||
let ($($lhs_var,)*) = *lhs;
|
||||
let ($($rhs_var,)*) = *rhs;
|
||||
ArrayLiteral::<Bool, DynSize>::new(
|
||||
Bool,
|
||||
FromIterator::from_iter([$(Expr::canonical(ExprPartialEq::cmp_ne($lhs_var, $rhs_var)),)*]),
|
||||
)
|
||||
.cast_to_bits()
|
||||
.any_one_bits()
|
||||
}
|
||||
}
|
||||
impl<$($Lhs: SimValuePartialEq<$Rhs>, $Rhs: Type,)*> SimValuePartialEq<($($Rhs,)*)> for ($($Lhs,)*) {
|
||||
fn sim_value_eq(lhs: &SimValue<Self>, rhs: &SimValue<($($Rhs,)*)>) -> bool {
|
||||
let ($($lhs_var,)*) = &**lhs;
|
||||
let ($($rhs_var,)*) = &**rhs;
|
||||
let retval = true;
|
||||
$(let retval = retval && $lhs_var == $rhs_var;)*
|
||||
retval
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
@ -432,24 +702,25 @@ macro_rules! impl_tuples {
|
|||
|
||||
impl_tuples! {
|
||||
[] [
|
||||
{#[num = 0, field = field_0] v0: T0}
|
||||
{#[num = 1, field = field_1] v1: T1}
|
||||
{#[num = 2, field = field_2] v2: T2}
|
||||
{#[num = 3, field = field_3] v3: T3}
|
||||
{#[num = 4, field = field_4] v4: T4}
|
||||
{#[num = 5, field = field_5] v5: T5}
|
||||
{#[num = 6, field = field_6] v6: T6}
|
||||
{#[num = 7, field = field_7] v7: T7}
|
||||
{#[num = 8, field = field_8] v8: T8}
|
||||
{#[num = 9, field = field_9] v9: T9}
|
||||
{#[num = 10, field = field_10] v10: T10}
|
||||
{#[num = 11, field = field_11] v11: T11}
|
||||
{#[num = 0, field = field_0, ty = ty0: Ty0, lhs = lhs0: Lhs0, rhs = rhs0: Rhs0] v0: T0}
|
||||
{#[num = 1, field = field_1, ty = ty1: Ty1, lhs = lhs1: Lhs1, rhs = rhs1: Rhs1] v1: T1}
|
||||
{#[num = 2, field = field_2, ty = ty2: Ty2, lhs = lhs2: Lhs2, rhs = rhs2: Rhs2] v2: T2}
|
||||
{#[num = 3, field = field_3, ty = ty3: Ty3, lhs = lhs3: Lhs3, rhs = rhs3: Rhs3] v3: T3}
|
||||
{#[num = 4, field = field_4, ty = ty4: Ty4, lhs = lhs4: Lhs4, rhs = rhs4: Rhs4] v4: T4}
|
||||
{#[num = 5, field = field_5, ty = ty5: Ty5, lhs = lhs5: Lhs5, rhs = rhs5: Rhs5] v5: T5}
|
||||
{#[num = 6, field = field_6, ty = ty6: Ty6, lhs = lhs6: Lhs6, rhs = rhs6: Rhs6] v6: T6}
|
||||
{#[num = 7, field = field_7, ty = ty7: Ty7, lhs = lhs7: Lhs7, rhs = rhs7: Rhs7] v7: T7}
|
||||
{#[num = 8, field = field_8, ty = ty8: Ty8, lhs = lhs8: Lhs8, rhs = rhs8: Rhs8] v8: T8}
|
||||
{#[num = 9, field = field_9, ty = ty9: Ty9, lhs = lhs9: Lhs9, rhs = rhs9: Rhs9] v9: T9}
|
||||
{#[num = 10, field = field_10, ty = ty10: Ty10, lhs = lhs10: Lhs10, rhs = rhs10: Rhs10] v10: T10}
|
||||
{#[num = 11, field = field_11, ty = ty11: Ty11, lhs = lhs11: Lhs11, rhs = rhs11: Rhs11] v11: T11}
|
||||
]
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Send + Sync + 'static> Type for PhantomData<T> {
|
||||
type BaseType = Bundle;
|
||||
type MaskType = ();
|
||||
type SimValue = PhantomData<T>;
|
||||
type MatchVariant = PhantomData<T>;
|
||||
type MatchActiveScope = ();
|
||||
type MatchVariantAndInactiveScope = MatchVariantWithoutScope<Self::MatchVariant>;
|
||||
|
|
@ -482,6 +753,24 @@ impl<T: ?Sized + Send + Sync + 'static> Type for PhantomData<T> {
|
|||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
assert!(opaque.is_empty());
|
||||
*self
|
||||
}
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
_value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
assert!(opaque.is_empty());
|
||||
}
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
_value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
writer.fill_cloned_from_slice(OpaqueSimValueSlice::empty())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PhantomDataBuilder<T: ?Sized + Send + Sync + 'static>(PhantomData<T>);
|
||||
|
|
@ -528,3 +817,36 @@ impl<T: ?Sized + Send + Sync + 'static> ToExpr for PhantomData<T> {
|
|||
BundleLiteral::new(PhantomData, Interned::default()).to_expr()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Send + Sync + 'static> ToSimValue for PhantomData<T> {
|
||||
type Type = PhantomData<T>;
|
||||
|
||||
#[track_caller]
|
||||
fn to_sim_value(&self) -> SimValue<Self> {
|
||||
SimValue::from_value(*self, *self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Send + Sync + 'static> ToSimValueWithType<Self> for PhantomData<T> {
|
||||
#[track_caller]
|
||||
fn to_sim_value_with_type(&self, ty: Self) -> SimValue<Self> {
|
||||
SimValue::from_value(ty, *self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized> ToSimValueWithType<Bundle> for PhantomData<T> {
|
||||
#[track_caller]
|
||||
fn to_sim_value_with_type(&self, ty: Bundle) -> SimValue<Bundle> {
|
||||
assert!(ty.fields().is_empty());
|
||||
SimValue::from_opaque(ty, OpaqueSimValue::empty())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized> ToSimValueWithType<CanonicalType> for PhantomData<T> {
|
||||
#[track_caller]
|
||||
fn to_sim_value_with_type(&self, canonical_ty: CanonicalType) -> SimValue<CanonicalType> {
|
||||
let ty = Bundle::from_canonical(canonical_ty);
|
||||
assert!(ty.fields().is_empty());
|
||||
SimValue::from_opaque(canonical_ty, OpaqueSimValue::empty())
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,799 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
bundle::{Bundle, BundleType},
|
||||
firrtl::{self, ExportOptions},
|
||||
intern::Interned,
|
||||
module::Module,
|
||||
util::{job_server::AcquiredJob, streaming_read_utf8::streaming_read_utf8},
|
||||
};
|
||||
use clap::{
|
||||
builder::{OsStringValueParser, TypedValueParser},
|
||||
Parser, Subcommand, ValueEnum, ValueHint,
|
||||
};
|
||||
use eyre::{eyre, Report};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
error,
|
||||
ffi::OsString,
|
||||
fmt::{self, Write},
|
||||
fs, io, mem,
|
||||
path::{Path, PathBuf},
|
||||
process,
|
||||
};
|
||||
use tempfile::TempDir;
|
||||
|
||||
pub type Result<T = (), E = CliError> = std::result::Result<T, E>;
|
||||
|
||||
pub struct CliError(Report);
|
||||
|
||||
impl fmt::Debug for CliError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for CliError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl error::Error for CliError {}
|
||||
|
||||
impl From<io::Error> for CliError {
|
||||
fn from(value: io::Error) -> Self {
|
||||
CliError(Report::new(value))
|
||||
}
|
||||
}
|
||||
|
||||
pub trait RunPhase<Arg> {
|
||||
type Output;
|
||||
fn run(&self, arg: Arg) -> Result<Self::Output> {
|
||||
self.run_with_job(arg, &mut AcquiredJob::acquire())
|
||||
}
|
||||
fn run_with_job(&self, arg: Arg, acquired_job: &mut AcquiredJob) -> Result<Self::Output>;
|
||||
}
|
||||
|
||||
#[derive(Parser, Debug, Clone)]
|
||||
#[non_exhaustive]
|
||||
pub struct BaseArgs {
|
||||
/// the directory to put the generated main output file and associated files in
|
||||
#[arg(short, long, value_hint = ValueHint::DirPath, required = true)]
|
||||
pub output: Option<PathBuf>,
|
||||
/// the stem of the generated main output file, e.g. to get foo.v, pass --file-stem=foo
|
||||
#[arg(long)]
|
||||
pub file_stem: Option<String>,
|
||||
#[arg(long, env = "FAYALITE_KEEP_TEMP_DIR")]
|
||||
pub keep_temp_dir: bool,
|
||||
#[arg(skip = false)]
|
||||
pub redirect_output_for_rust_test: bool,
|
||||
}
|
||||
|
||||
impl BaseArgs {
|
||||
fn make_firrtl_file_backend(&self) -> Result<(firrtl::FileBackend, Option<TempDir>)> {
|
||||
let (dir_path, temp_dir) = match &self.output {
|
||||
Some(output) => (output.clone(), None),
|
||||
None => {
|
||||
let temp_dir = TempDir::new()?;
|
||||
if self.keep_temp_dir {
|
||||
let temp_dir = temp_dir.into_path();
|
||||
println!("created temporary directory: {}", temp_dir.display());
|
||||
(temp_dir, None)
|
||||
} else {
|
||||
(temp_dir.path().to_path_buf(), Some(temp_dir))
|
||||
}
|
||||
}
|
||||
};
|
||||
Ok((
|
||||
firrtl::FileBackend {
|
||||
dir_path,
|
||||
top_fir_file_stem: self.file_stem.clone(),
|
||||
circuit_name: None,
|
||||
},
|
||||
temp_dir,
|
||||
))
|
||||
}
|
||||
/// handles possibly redirecting the command's output for Rust tests
|
||||
pub fn run_external_command(
|
||||
&self,
|
||||
_acquired_job: &mut AcquiredJob,
|
||||
mut command: process::Command,
|
||||
mut captured_output: Option<&mut String>,
|
||||
) -> io::Result<process::ExitStatus> {
|
||||
if self.redirect_output_for_rust_test || captured_output.is_some() {
|
||||
let (reader, writer) = os_pipe::pipe()?;
|
||||
let mut reader = io::BufReader::new(reader);
|
||||
command.stderr(writer.try_clone()?);
|
||||
command.stdout(writer); // must not leave writer around after spawning child
|
||||
command.stdin(process::Stdio::null());
|
||||
let mut child = command.spawn()?;
|
||||
drop(command); // close writers
|
||||
Ok(loop {
|
||||
let status = child.try_wait()?;
|
||||
streaming_read_utf8(&mut reader, |s| {
|
||||
if let Some(captured_output) = captured_output.as_deref_mut() {
|
||||
captured_output.push_str(s);
|
||||
}
|
||||
// use print! so output goes to Rust test output capture
|
||||
print!("{s}");
|
||||
io::Result::Ok(())
|
||||
})?;
|
||||
if let Some(status) = status {
|
||||
break status;
|
||||
}
|
||||
})
|
||||
} else {
|
||||
command.status()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Parser, Debug, Clone)]
|
||||
#[non_exhaustive]
|
||||
pub struct FirrtlArgs {
|
||||
#[command(flatten)]
|
||||
pub base: BaseArgs,
|
||||
#[command(flatten)]
|
||||
pub export_options: ExportOptions,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[non_exhaustive]
|
||||
pub struct FirrtlOutput {
|
||||
pub file_stem: String,
|
||||
pub top_module: String,
|
||||
pub output_dir: PathBuf,
|
||||
pub temp_dir: Option<TempDir>,
|
||||
}
|
||||
|
||||
impl FirrtlOutput {
|
||||
pub fn file_with_ext(&self, ext: &str) -> PathBuf {
|
||||
let mut retval = self.output_dir.join(&self.file_stem);
|
||||
retval.set_extension(ext);
|
||||
retval
|
||||
}
|
||||
pub fn firrtl_file(&self) -> PathBuf {
|
||||
self.file_with_ext("fir")
|
||||
}
|
||||
}
|
||||
|
||||
impl FirrtlArgs {
|
||||
fn run_impl(
|
||||
&self,
|
||||
top_module: Module<Bundle>,
|
||||
_acquired_job: &mut AcquiredJob,
|
||||
) -> Result<FirrtlOutput> {
|
||||
let (file_backend, temp_dir) = self.base.make_firrtl_file_backend()?;
|
||||
let firrtl::FileBackend {
|
||||
top_fir_file_stem,
|
||||
circuit_name,
|
||||
dir_path,
|
||||
} = firrtl::export(file_backend, &top_module, self.export_options)?;
|
||||
Ok(FirrtlOutput {
|
||||
file_stem: top_fir_file_stem.expect(
|
||||
"export is known to set the file stem from the circuit name if not provided",
|
||||
),
|
||||
top_module: circuit_name.expect("export is known to set the circuit name"),
|
||||
output_dir: dir_path,
|
||||
temp_dir,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: BundleType> RunPhase<Module<T>> for FirrtlArgs {
|
||||
type Output = FirrtlOutput;
|
||||
fn run_with_job(
|
||||
&self,
|
||||
top_module: Module<T>,
|
||||
acquired_job: &mut AcquiredJob,
|
||||
) -> Result<Self::Output> {
|
||||
self.run_impl(top_module.canonical(), acquired_job)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: BundleType> RunPhase<Interned<Module<T>>> for FirrtlArgs {
|
||||
type Output = FirrtlOutput;
|
||||
fn run_with_job(
|
||||
&self,
|
||||
top_module: Interned<Module<T>>,
|
||||
acquired_job: &mut AcquiredJob,
|
||||
) -> Result<Self::Output> {
|
||||
self.run_with_job(*top_module, acquired_job)
|
||||
}
|
||||
}
|
||||
|
||||
/// based on [LLVM Circt's recommended lowering options
|
||||
/// ](https://circt.llvm.org/docs/VerilogGeneration/#recommended-loweringoptions-by-target)
|
||||
#[derive(ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum VerilogDialect {
|
||||
Questa,
|
||||
Spyglass,
|
||||
Verilator,
|
||||
Vivado,
|
||||
Yosys,
|
||||
}
|
||||
|
||||
impl fmt::Display for VerilogDialect {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl VerilogDialect {
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
VerilogDialect::Questa => "questa",
|
||||
VerilogDialect::Spyglass => "spyglass",
|
||||
VerilogDialect::Verilator => "verilator",
|
||||
VerilogDialect::Vivado => "vivado",
|
||||
VerilogDialect::Yosys => "yosys",
|
||||
}
|
||||
}
|
||||
pub fn firtool_extra_args(self) -> &'static [&'static str] {
|
||||
match self {
|
||||
VerilogDialect::Questa => &["--lowering-options=emitWireInPorts"],
|
||||
VerilogDialect::Spyglass => {
|
||||
&["--lowering-options=explicitBitcast,disallowExpressionInliningInPorts"]
|
||||
}
|
||||
VerilogDialect::Verilator => &[
|
||||
"--lowering-options=locationInfoStyle=wrapInAtSquareBracket,disallowLocalVariables",
|
||||
],
|
||||
VerilogDialect::Vivado => &["--lowering-options=mitigateVivadoArrayIndexConstPropBug"],
|
||||
VerilogDialect::Yosys => {
|
||||
&["--lowering-options=disallowLocalVariables,disallowPackedArrays"]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Parser, Debug, Clone)]
|
||||
#[non_exhaustive]
|
||||
pub struct VerilogArgs {
|
||||
#[command(flatten)]
|
||||
pub firrtl: FirrtlArgs,
|
||||
#[arg(
|
||||
long,
|
||||
default_value = "firtool",
|
||||
env = "FIRTOOL",
|
||||
value_hint = ValueHint::CommandName,
|
||||
value_parser = OsStringValueParser::new().try_map(which::which)
|
||||
)]
|
||||
pub firtool: PathBuf,
|
||||
#[arg(long)]
|
||||
pub firtool_extra_args: Vec<OsString>,
|
||||
/// adapt the generated Verilog for a particular toolchain
|
||||
#[arg(long)]
|
||||
pub verilog_dialect: Option<VerilogDialect>,
|
||||
#[arg(long, short = 'g')]
|
||||
pub debug: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[non_exhaustive]
|
||||
pub struct VerilogOutput {
|
||||
pub firrtl: FirrtlOutput,
|
||||
pub verilog_files: Vec<PathBuf>,
|
||||
pub contents_hash: Option<blake3::Hash>,
|
||||
}
|
||||
|
||||
impl VerilogOutput {
|
||||
pub fn main_verilog_file(&self) -> PathBuf {
|
||||
self.firrtl.file_with_ext("v")
|
||||
}
|
||||
fn unadjusted_verilog_file(&self) -> PathBuf {
|
||||
self.firrtl.file_with_ext("unadjusted.v")
|
||||
}
|
||||
}
|
||||
|
||||
impl VerilogArgs {
|
||||
fn process_unadjusted_verilog_file(&self, mut output: VerilogOutput) -> Result<VerilogOutput> {
|
||||
let input = fs::read_to_string(output.unadjusted_verilog_file())?;
|
||||
let file_separator_prefix = "\n// ----- 8< ----- FILE \"";
|
||||
let file_separator_suffix = "\" ----- 8< -----\n\n";
|
||||
let mut input = &*input;
|
||||
output.contents_hash = Some(blake3::hash(input.as_bytes()));
|
||||
let main_verilog_file = output.main_verilog_file();
|
||||
let mut file_name: Option<&Path> = Some(&main_verilog_file);
|
||||
loop {
|
||||
let (chunk, next_file_name) = if let Some((chunk, rest)) =
|
||||
input.split_once(file_separator_prefix)
|
||||
{
|
||||
let Some((next_file_name, rest)) = rest.split_once(file_separator_suffix) else {
|
||||
return Err(CliError(eyre!("parsing firtool's output failed: found {file_separator_prefix:?} but no {file_separator_suffix:?}")));
|
||||
};
|
||||
input = rest;
|
||||
(chunk, Some(next_file_name.as_ref()))
|
||||
} else {
|
||||
(mem::take(&mut input), None)
|
||||
};
|
||||
let Some(file_name) = mem::replace(&mut file_name, next_file_name) else {
|
||||
break;
|
||||
};
|
||||
let file_name = output.firrtl.output_dir.join(file_name);
|
||||
fs::write(&file_name, chunk)?;
|
||||
if let Some(extension) = file_name.extension() {
|
||||
if extension == "v" || extension == "sv" {
|
||||
output.verilog_files.push(file_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(output)
|
||||
}
|
||||
fn run_impl(
|
||||
&self,
|
||||
firrtl_output: FirrtlOutput,
|
||||
acquired_job: &mut AcquiredJob,
|
||||
) -> Result<VerilogOutput> {
|
||||
let Self {
|
||||
firrtl,
|
||||
firtool,
|
||||
firtool_extra_args,
|
||||
verilog_dialect,
|
||||
debug,
|
||||
} = self;
|
||||
let output = VerilogOutput {
|
||||
firrtl: firrtl_output,
|
||||
verilog_files: vec![],
|
||||
contents_hash: None,
|
||||
};
|
||||
let mut cmd = process::Command::new(firtool);
|
||||
cmd.arg(output.firrtl.firrtl_file());
|
||||
cmd.arg("-o");
|
||||
cmd.arg(output.unadjusted_verilog_file());
|
||||
if *debug {
|
||||
cmd.arg("-g");
|
||||
cmd.arg("--preserve-values=all");
|
||||
}
|
||||
if let Some(dialect) = verilog_dialect {
|
||||
cmd.args(dialect.firtool_extra_args());
|
||||
}
|
||||
cmd.args(firtool_extra_args);
|
||||
cmd.current_dir(&output.firrtl.output_dir);
|
||||
let status = firrtl.base.run_external_command(acquired_job, cmd, None)?;
|
||||
if status.success() {
|
||||
self.process_unadjusted_verilog_file(output)
|
||||
} else {
|
||||
Err(CliError(eyre!(
|
||||
"running {} failed: {status}",
|
||||
self.firtool.display()
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Arg> RunPhase<Arg> for VerilogArgs
|
||||
where
|
||||
FirrtlArgs: RunPhase<Arg, Output = FirrtlOutput>,
|
||||
{
|
||||
type Output = VerilogOutput;
|
||||
fn run_with_job(&self, arg: Arg, acquired_job: &mut AcquiredJob) -> Result<Self::Output> {
|
||||
let firrtl_output = self.firrtl.run_with_job(arg, acquired_job)?;
|
||||
self.run_impl(firrtl_output, acquired_job)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash, Default)]
|
||||
#[non_exhaustive]
|
||||
pub enum FormalMode {
|
||||
#[default]
|
||||
BMC,
|
||||
Prove,
|
||||
Live,
|
||||
Cover,
|
||||
}
|
||||
|
||||
impl FormalMode {
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
FormalMode::BMC => "bmc",
|
||||
FormalMode::Prove => "prove",
|
||||
FormalMode::Live => "live",
|
||||
FormalMode::Cover => "cover",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for FormalMode {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct FormalAdjustArgs;
|
||||
|
||||
impl clap::FromArgMatches for FormalAdjustArgs {
|
||||
fn from_arg_matches(_matches: &clap::ArgMatches) -> Result<Self, clap::Error> {
|
||||
Ok(Self)
|
||||
}
|
||||
|
||||
fn update_from_arg_matches(&mut self, _matches: &clap::ArgMatches) -> Result<(), clap::Error> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl clap::Args for FormalAdjustArgs {
|
||||
fn augment_args(cmd: clap::Command) -> clap::Command {
|
||||
cmd.mut_arg("output", |arg| arg.required(false))
|
||||
.mut_arg("verilog_dialect", |arg| {
|
||||
arg.default_value(VerilogDialect::Yosys.to_string())
|
||||
.hide(true)
|
||||
})
|
||||
}
|
||||
|
||||
fn augment_args_for_update(cmd: clap::Command) -> clap::Command {
|
||||
Self::augment_args(cmd)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Parser, Clone)]
|
||||
#[non_exhaustive]
|
||||
pub struct FormalArgs {
|
||||
#[command(flatten)]
|
||||
pub verilog: VerilogArgs,
|
||||
#[arg(
|
||||
long,
|
||||
default_value = "sby",
|
||||
env = "SBY",
|
||||
value_hint = ValueHint::CommandName,
|
||||
value_parser = OsStringValueParser::new().try_map(which::which)
|
||||
)]
|
||||
pub sby: PathBuf,
|
||||
#[arg(long)]
|
||||
pub sby_extra_args: Vec<String>,
|
||||
#[arg(long, default_value_t)]
|
||||
pub mode: FormalMode,
|
||||
#[arg(long, default_value_t = Self::DEFAULT_DEPTH)]
|
||||
pub depth: u64,
|
||||
#[arg(long, default_value = "z3")]
|
||||
pub solver: String,
|
||||
#[arg(long)]
|
||||
pub smtbmc_extra_args: Vec<String>,
|
||||
#[arg(long, default_value_t = true, env = "FAYALITE_CACHE_RESULTS")]
|
||||
pub cache_results: bool,
|
||||
#[command(flatten)]
|
||||
_formal_adjust_args: FormalAdjustArgs,
|
||||
}
|
||||
|
||||
impl fmt::Debug for FormalArgs {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let Self {
|
||||
verilog,
|
||||
sby,
|
||||
sby_extra_args,
|
||||
mode,
|
||||
depth,
|
||||
solver,
|
||||
smtbmc_extra_args,
|
||||
cache_results,
|
||||
_formal_adjust_args: _,
|
||||
} = self;
|
||||
f.debug_struct("FormalArgs")
|
||||
.field("verilog", verilog)
|
||||
.field("sby", sby)
|
||||
.field("sby_extra_args", sby_extra_args)
|
||||
.field("mode", mode)
|
||||
.field("depth", depth)
|
||||
.field("solver", solver)
|
||||
.field("smtbmc_extra_args", smtbmc_extra_args)
|
||||
.field("cache_results", cache_results)
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
impl FormalArgs {
|
||||
pub const DEFAULT_DEPTH: u64 = 20;
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[non_exhaustive]
|
||||
pub struct FormalOutput {
|
||||
pub verilog: VerilogOutput,
|
||||
}
|
||||
|
||||
impl FormalOutput {
|
||||
pub fn sby_file(&self) -> PathBuf {
|
||||
self.verilog.firrtl.file_with_ext("sby")
|
||||
}
|
||||
pub fn cache_file(&self) -> PathBuf {
|
||||
self.verilog.firrtl.file_with_ext("cache.json")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[non_exhaustive]
|
||||
pub struct FormalCacheOutput {}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[non_exhaustive]
|
||||
pub enum FormalCacheVersion {
|
||||
V1,
|
||||
}
|
||||
|
||||
impl FormalCacheVersion {
|
||||
pub const CURRENT: Self = Self::V1;
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[non_exhaustive]
|
||||
pub struct FormalCache {
|
||||
pub version: FormalCacheVersion,
|
||||
pub contents_hash: blake3::Hash,
|
||||
pub stdout_stderr: String,
|
||||
pub result: Result<FormalCacheOutput, String>,
|
||||
}
|
||||
|
||||
impl FormalCache {
|
||||
pub fn new(
|
||||
version: FormalCacheVersion,
|
||||
contents_hash: blake3::Hash,
|
||||
stdout_stderr: String,
|
||||
result: Result<FormalCacheOutput, String>,
|
||||
) -> Self {
|
||||
Self {
|
||||
version,
|
||||
contents_hash,
|
||||
stdout_stderr,
|
||||
result,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FormalArgs {
|
||||
fn sby_contents(&self, output: &FormalOutput) -> Result<String> {
|
||||
let Self {
|
||||
verilog: _,
|
||||
sby: _,
|
||||
sby_extra_args: _,
|
||||
mode,
|
||||
depth,
|
||||
smtbmc_extra_args,
|
||||
solver,
|
||||
cache_results: _,
|
||||
_formal_adjust_args: _,
|
||||
} = self;
|
||||
let smtbmc_options = smtbmc_extra_args.join(" ");
|
||||
let top_module = &output.verilog.firrtl.top_module;
|
||||
let mut retval = format!(
|
||||
"[options]\n\
|
||||
mode {mode}\n\
|
||||
depth {depth}\n\
|
||||
wait on\n\
|
||||
\n\
|
||||
[engines]\n\
|
||||
smtbmc {solver} -- -- {smtbmc_options}\n\
|
||||
\n\
|
||||
[script]\n"
|
||||
);
|
||||
for verilog_file in &output.verilog.verilog_files {
|
||||
let verilog_file = verilog_file
|
||||
.to_str()
|
||||
.ok_or_else(|| CliError(eyre!("verilog file path is not UTF-8")))?;
|
||||
if verilog_file.contains(|ch: char| {
|
||||
(ch != ' ' && ch != '\t' && ch.is_ascii_whitespace()) || ch == '"'
|
||||
}) {
|
||||
return Err(CliError(eyre!(
|
||||
"verilog file path contains characters that aren't permitted"
|
||||
)));
|
||||
}
|
||||
writeln!(retval, "read_verilog -sv -formal \"{verilog_file}\"").unwrap();
|
||||
}
|
||||
// workaround for wires disappearing -- set `keep` on all wires
|
||||
writeln!(retval, "hierarchy -top {top_module}").unwrap();
|
||||
writeln!(retval, "proc").unwrap();
|
||||
writeln!(retval, "setattr -set keep 1 w:\\*").unwrap();
|
||||
writeln!(retval, "prep").unwrap();
|
||||
Ok(retval)
|
||||
}
|
||||
fn run_impl(
|
||||
&self,
|
||||
verilog_output: VerilogOutput,
|
||||
acquired_job: &mut AcquiredJob,
|
||||
) -> Result<FormalOutput> {
|
||||
let output = FormalOutput {
|
||||
verilog: verilog_output,
|
||||
};
|
||||
let sby_file = output.sby_file();
|
||||
let sby_contents = self.sby_contents(&output)?;
|
||||
let contents_hash = output.verilog.contents_hash.map(|verilog_hash| {
|
||||
let mut hasher = blake3::Hasher::new();
|
||||
hasher.update(verilog_hash.as_bytes());
|
||||
hasher.update(sby_contents.as_bytes());
|
||||
hasher.update(&(self.sby_extra_args.len() as u64).to_le_bytes());
|
||||
for sby_extra_arg in self.sby_extra_args.iter() {
|
||||
hasher.update(&(sby_extra_arg.len() as u64).to_le_bytes());
|
||||
hasher.update(sby_extra_arg.as_bytes());
|
||||
}
|
||||
hasher.finalize()
|
||||
});
|
||||
std::fs::write(&sby_file, sby_contents)?;
|
||||
let mut cmd = process::Command::new(&self.sby);
|
||||
cmd.arg("-j1"); // sby seems not to respect job count in parallel mode
|
||||
cmd.arg("-f");
|
||||
cmd.arg(sby_file.file_name().unwrap());
|
||||
cmd.args(&self.sby_extra_args);
|
||||
cmd.current_dir(&output.verilog.firrtl.output_dir);
|
||||
let mut captured_output = String::new();
|
||||
let cache_file = output.cache_file();
|
||||
let do_cache = if let Some(contents_hash) = contents_hash.filter(|_| self.cache_results) {
|
||||
if let Some(FormalCache {
|
||||
version: FormalCacheVersion::CURRENT,
|
||||
contents_hash: cache_contents_hash,
|
||||
stdout_stderr,
|
||||
result,
|
||||
}) = fs::read(&cache_file)
|
||||
.ok()
|
||||
.and_then(|v| serde_json::from_slice(&v).ok())
|
||||
{
|
||||
if cache_contents_hash == contents_hash {
|
||||
println!("Using cached formal result:\n{stdout_stderr}");
|
||||
return match result {
|
||||
Ok(FormalCacheOutput {}) => Ok(output),
|
||||
Err(error) => Err(CliError(eyre::Report::msg(error))),
|
||||
};
|
||||
}
|
||||
}
|
||||
true
|
||||
} else {
|
||||
false
|
||||
};
|
||||
let _ = fs::remove_file(&cache_file);
|
||||
let status = self.verilog.firrtl.base.run_external_command(
|
||||
acquired_job,
|
||||
cmd,
|
||||
do_cache.then_some(&mut captured_output),
|
||||
)?;
|
||||
let result = if status.success() {
|
||||
Ok(output)
|
||||
} else {
|
||||
Err(CliError(eyre!(
|
||||
"running {} failed: {status}",
|
||||
self.sby.display()
|
||||
)))
|
||||
};
|
||||
if do_cache {
|
||||
fs::write(
|
||||
cache_file,
|
||||
serde_json::to_string_pretty(&FormalCache {
|
||||
version: FormalCacheVersion::CURRENT,
|
||||
contents_hash: contents_hash.unwrap(),
|
||||
stdout_stderr: captured_output,
|
||||
result: match &result {
|
||||
Ok(FormalOutput { verilog: _ }) => Ok(FormalCacheOutput {}),
|
||||
Err(error) => Err(error.to_string()),
|
||||
},
|
||||
})
|
||||
.expect("serialization shouldn't ever fail"),
|
||||
)?;
|
||||
}
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
impl<Arg> RunPhase<Arg> for FormalArgs
|
||||
where
|
||||
VerilogArgs: RunPhase<Arg, Output = VerilogOutput>,
|
||||
{
|
||||
type Output = FormalOutput;
|
||||
fn run_with_job(&self, arg: Arg, acquired_job: &mut AcquiredJob) -> Result<Self::Output> {
|
||||
let verilog_output = self.verilog.run_with_job(arg, acquired_job)?;
|
||||
self.run_impl(verilog_output, acquired_job)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Subcommand, Debug)]
|
||||
enum CliCommand {
|
||||
/// Generate FIRRTL
|
||||
Firrtl(FirrtlArgs),
|
||||
/// Generate Verilog
|
||||
Verilog(VerilogArgs),
|
||||
/// Run a formal proof
|
||||
Formal(FormalArgs),
|
||||
}
|
||||
|
||||
/// a simple CLI
|
||||
///
|
||||
/// Use like:
|
||||
///
|
||||
/// ```no_run
|
||||
/// # use fayalite::prelude::*;
|
||||
/// # #[hdl_module]
|
||||
/// # fn my_module() {}
|
||||
/// use fayalite::cli;
|
||||
///
|
||||
/// fn main() -> cli::Result {
|
||||
/// cli::Cli::parse().run(my_module())
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// You can also use it with a larger [`clap`]-based CLI like so:
|
||||
///
|
||||
/// ```no_run
|
||||
/// # use fayalite::prelude::*;
|
||||
/// # #[hdl_module]
|
||||
/// # fn my_module() {}
|
||||
/// use clap::{Subcommand, Parser};
|
||||
/// use fayalite::cli;
|
||||
///
|
||||
/// #[derive(Subcommand)]
|
||||
/// pub enum Cmd {
|
||||
/// #[command(flatten)]
|
||||
/// Fayalite(cli::Cli),
|
||||
/// MySpecialCommand {
|
||||
/// #[arg(long)]
|
||||
/// foo: bool,
|
||||
/// },
|
||||
/// }
|
||||
///
|
||||
/// #[derive(Parser)]
|
||||
/// pub struct Cli {
|
||||
/// #[command(subcommand)]
|
||||
/// cmd: Cmd, // or just use cli::Cli directly if you don't need more subcommands
|
||||
/// }
|
||||
///
|
||||
/// fn main() -> cli::Result {
|
||||
/// match Cli::parse().cmd {
|
||||
/// Cmd::Fayalite(v) => v.run(my_module())?,
|
||||
/// Cmd::MySpecialCommand { foo } => println!("special: foo={foo}"),
|
||||
/// }
|
||||
/// Ok(())
|
||||
/// }
|
||||
/// ```
|
||||
#[derive(Parser, Debug)]
|
||||
// clear things that would be crate-specific
|
||||
#[command(name = "Fayalite Simple CLI", about = None, long_about = None)]
|
||||
pub struct Cli {
|
||||
#[command(subcommand)]
|
||||
subcommand: CliCommand,
|
||||
}
|
||||
|
||||
impl clap::Subcommand for Cli {
|
||||
fn augment_subcommands(cmd: clap::Command) -> clap::Command {
|
||||
CliCommand::augment_subcommands(cmd)
|
||||
}
|
||||
|
||||
fn augment_subcommands_for_update(cmd: clap::Command) -> clap::Command {
|
||||
CliCommand::augment_subcommands_for_update(cmd)
|
||||
}
|
||||
|
||||
fn has_subcommand(name: &str) -> bool {
|
||||
CliCommand::has_subcommand(name)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> RunPhase<T> for Cli
|
||||
where
|
||||
FirrtlArgs: RunPhase<T, Output = FirrtlOutput>,
|
||||
{
|
||||
type Output = ();
|
||||
fn run_with_job(&self, arg: T, acquired_job: &mut AcquiredJob) -> Result<Self::Output> {
|
||||
match &self.subcommand {
|
||||
CliCommand::Firrtl(c) => {
|
||||
c.run_with_job(arg, acquired_job)?;
|
||||
}
|
||||
CliCommand::Verilog(c) => {
|
||||
c.run_with_job(arg, acquired_job)?;
|
||||
}
|
||||
CliCommand::Formal(c) => {
|
||||
c.run_with_job(arg, acquired_job)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Cli {
|
||||
/// forwards to [`clap::Parser::parse()`] so you don't have to import [`clap::Parser`]
|
||||
pub fn parse() -> Self {
|
||||
clap::Parser::parse()
|
||||
}
|
||||
/// forwards to [`RunPhase::run()`] so you don't have to import [`RunPhase`]
|
||||
pub fn run<T>(&self, top_module: T) -> Result<()>
|
||||
where
|
||||
Self: RunPhase<T, Output = ()>,
|
||||
{
|
||||
RunPhase::run(self, top_module)
|
||||
}
|
||||
}
|
||||
|
|
@ -4,10 +4,14 @@ use crate::{
|
|||
expr::{Expr, ToExpr},
|
||||
hdl,
|
||||
int::Bool,
|
||||
reset::Reset,
|
||||
reset::{Reset, ResetType},
|
||||
source_location::SourceLocation,
|
||||
ty::{impl_match_variant_as_self, CanonicalType, StaticType, Type, TypeProperties},
|
||||
ty::{
|
||||
CanonicalType, OpaqueSimValueSize, OpaqueSimValueSlice, OpaqueSimValueWriter,
|
||||
OpaqueSimValueWritten, StaticType, Type, TypeProperties, impl_match_variant_as_self,
|
||||
},
|
||||
};
|
||||
use bitvec::{bits, order::Lsb0};
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)]
|
||||
pub struct Clock;
|
||||
|
|
@ -15,6 +19,7 @@ pub struct Clock;
|
|||
impl Type for Clock {
|
||||
type BaseType = Clock;
|
||||
type MaskType = Bool;
|
||||
type SimValue = bool;
|
||||
|
||||
impl_match_variant_as_self!();
|
||||
|
||||
|
|
@ -36,6 +41,31 @@ impl Type for Clock {
|
|||
};
|
||||
retval
|
||||
}
|
||||
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
assert_eq!(opaque.size(), OpaqueSimValueSize::from_bit_width(1));
|
||||
opaque.bits()[0]
|
||||
}
|
||||
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
assert_eq!(opaque.size(), OpaqueSimValueSize::from_bit_width(1));
|
||||
*value = opaque.bits()[0];
|
||||
}
|
||||
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
assert_eq!(writer.size(), OpaqueSimValueSize::from_bit_width(1));
|
||||
writer.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(
|
||||
[bits![0], bits![1]][*value as usize],
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl Clock {
|
||||
|
|
@ -55,6 +85,7 @@ impl StaticType for Clock {
|
|||
is_storable: false,
|
||||
is_castable_from_bits: true,
|
||||
bit_width: 1,
|
||||
sim_only_values_len: 0,
|
||||
};
|
||||
const MASK_TYPE_PROPERTIES: TypeProperties = Bool::TYPE_PROPERTIES;
|
||||
}
|
||||
|
|
@ -88,9 +119,9 @@ impl ToClock for Expr<Clock> {
|
|||
}
|
||||
|
||||
#[hdl]
|
||||
pub struct ClockDomain {
|
||||
pub struct ClockDomain<R: ResetType = Reset> {
|
||||
pub clk: Clock,
|
||||
pub rst: Reset,
|
||||
pub rst: R,
|
||||
}
|
||||
|
||||
impl ToClock for bool {
|
||||
|
|
|
|||
|
|
@ -2,21 +2,31 @@
|
|||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
expr::{ops::VariantAccess, Expr, ToExpr},
|
||||
expr::{
|
||||
Expr, ToExpr,
|
||||
ops::{ExprPartialEq, VariantAccess},
|
||||
},
|
||||
hdl,
|
||||
int::Bool,
|
||||
int::{Bool, UIntValue},
|
||||
intern::{Intern, Interned},
|
||||
module::{
|
||||
connect, enum_match_variants_helper, incomplete_wire, wire,
|
||||
EnumMatchVariantAndInactiveScopeImpl, EnumMatchVariantsIterImpl, Scope,
|
||||
EnumMatchVariantAndInactiveScopeImpl, EnumMatchVariantsIterImpl, Scope, connect,
|
||||
enum_match_variants_helper, incomplete_wire, wire,
|
||||
},
|
||||
sim::value::{SimValue, SimValuePartialEq},
|
||||
source_location::SourceLocation,
|
||||
ty::{CanonicalType, MatchVariantAndInactiveScope, StaticType, Type, TypeProperties},
|
||||
ty::{
|
||||
CanonicalType, MatchVariantAndInactiveScope, OpaqueSimValue, OpaqueSimValueSize,
|
||||
OpaqueSimValueSlice, OpaqueSimValueWriter, OpaqueSimValueWritten, StaticType, Type,
|
||||
TypeProperties,
|
||||
},
|
||||
util::HashMap,
|
||||
};
|
||||
use hashbrown::HashMap;
|
||||
use std::{convert::Infallible, fmt, iter::FusedIterator};
|
||||
use bitvec::{order::Lsb0, slice::BitSlice, view::BitView};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{convert::Infallible, fmt, iter::FusedIterator, sync::Arc};
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)]
|
||||
pub struct EnumVariant {
|
||||
pub name: Interned<str>,
|
||||
pub ty: Option<CanonicalType>,
|
||||
|
|
@ -111,6 +121,7 @@ impl EnumTypePropertiesBuilder {
|
|||
is_storable: true,
|
||||
is_castable_from_bits: true,
|
||||
bit_width: 0,
|
||||
sim_only_values_len: 0,
|
||||
},
|
||||
variant_count: 0,
|
||||
}
|
||||
|
|
@ -129,9 +140,14 @@ impl EnumTypePropertiesBuilder {
|
|||
is_storable,
|
||||
is_castable_from_bits,
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
}) = field_props
|
||||
{
|
||||
assert!(is_passive, "variant type must be a passive type");
|
||||
assert!(
|
||||
sim_only_values_len == 0,
|
||||
"can't have `SimOnlyValue`s in an Enum"
|
||||
);
|
||||
type_properties = TypeProperties {
|
||||
is_passive: true,
|
||||
is_storable: type_properties.is_storable & is_storable,
|
||||
|
|
@ -142,6 +158,7 @@ impl EnumTypePropertiesBuilder {
|
|||
} else {
|
||||
type_properties.bit_width
|
||||
},
|
||||
sim_only_values_len: 0,
|
||||
};
|
||||
}
|
||||
Self {
|
||||
|
|
@ -149,6 +166,12 @@ impl EnumTypePropertiesBuilder {
|
|||
variant_count: variant_count + 1,
|
||||
}
|
||||
}
|
||||
#[must_use]
|
||||
pub fn variants(self, variants: impl IntoIterator<Item = EnumVariant>) -> Self {
|
||||
variants.into_iter().fold(self, |this, variant| {
|
||||
this.variant(variant.ty.map(CanonicalType::type_properties))
|
||||
})
|
||||
}
|
||||
pub const fn finish(self) -> TypeProperties {
|
||||
assert!(
|
||||
self.variant_count != 0,
|
||||
|
|
@ -178,7 +201,8 @@ impl Default for EnumTypePropertiesBuilder {
|
|||
impl Enum {
|
||||
#[track_caller]
|
||||
pub fn new(variants: Interned<[EnumVariant]>) -> Self {
|
||||
let mut name_indexes = HashMap::with_capacity(variants.len());
|
||||
let mut name_indexes =
|
||||
HashMap::with_capacity_and_hasher(variants.len(), Default::default());
|
||||
let mut type_props_builder = EnumTypePropertiesBuilder::new();
|
||||
for (index, EnumVariant { name, ty }) in variants.iter().enumerate() {
|
||||
if let Some(old_index) = name_indexes.insert(*name, index) {
|
||||
|
|
@ -238,13 +262,14 @@ impl Enum {
|
|||
|
||||
pub trait EnumType:
|
||||
Type<
|
||||
BaseType = Enum,
|
||||
MaskType = Bool,
|
||||
MatchActiveScope = Scope,
|
||||
MatchVariantAndInactiveScope = EnumMatchVariantAndInactiveScope<Self>,
|
||||
MatchVariantsIter = EnumMatchVariantsIter<Self>,
|
||||
>
|
||||
BaseType = Enum,
|
||||
MaskType = Bool,
|
||||
MatchActiveScope = Scope,
|
||||
MatchVariantAndInactiveScope = EnumMatchVariantAndInactiveScope<Self>,
|
||||
MatchVariantsIter = EnumMatchVariantsIter<Self>,
|
||||
>
|
||||
{
|
||||
type SimBuilder: From<Self>;
|
||||
fn variants(&self) -> Interned<[EnumVariant]>;
|
||||
fn match_activate_scope(
|
||||
v: Self::MatchVariantAndInactiveScope,
|
||||
|
|
@ -307,7 +332,18 @@ impl<T: EnumType> DoubleEndedIterator for EnumMatchVariantsIter<T> {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct NoBuilder {
|
||||
_ty: Enum,
|
||||
}
|
||||
|
||||
impl From<Enum> for NoBuilder {
|
||||
fn from(_ty: Enum) -> Self {
|
||||
Self { _ty }
|
||||
}
|
||||
}
|
||||
|
||||
impl EnumType for Enum {
|
||||
type SimBuilder = NoBuilder;
|
||||
fn match_activate_scope(
|
||||
v: Self::MatchVariantAndInactiveScope,
|
||||
) -> (Self::MatchVariant, Self::MatchActiveScope) {
|
||||
|
|
@ -322,6 +358,7 @@ impl EnumType for Enum {
|
|||
impl Type for Enum {
|
||||
type BaseType = Enum;
|
||||
type MaskType = Bool;
|
||||
type SimValue = OpaqueSimValue;
|
||||
type MatchVariant = Option<Expr<CanonicalType>>;
|
||||
type MatchActiveScope = Scope;
|
||||
type MatchVariantAndInactiveScope = EnumMatchVariantAndInactiveScope<Self>;
|
||||
|
|
@ -352,6 +389,341 @@ impl Type for Enum {
|
|||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
assert_eq!(self.type_properties().size(), opaque.size());
|
||||
opaque.to_owned()
|
||||
}
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
assert_eq!(self.type_properties().size(), opaque.size());
|
||||
assert_eq!(value.size(), opaque.size());
|
||||
value.clone_from_slice(opaque);
|
||||
}
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
assert_eq!(self.type_properties().size(), writer.size());
|
||||
assert_eq!(value.size(), writer.size());
|
||||
writer.fill_cloned_from_slice(value.as_slice())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, Default)]
|
||||
pub struct EnumPaddingSimValue {
|
||||
bits: Option<UIntValue>,
|
||||
}
|
||||
|
||||
impl EnumPaddingSimValue {
|
||||
pub const fn new() -> Self {
|
||||
Self { bits: None }
|
||||
}
|
||||
pub fn bit_width(&self) -> Option<usize> {
|
||||
self.bits.as_ref().map(UIntValue::width)
|
||||
}
|
||||
pub fn bits(&self) -> &Option<UIntValue> {
|
||||
&self.bits
|
||||
}
|
||||
pub fn bits_mut(&mut self) -> &mut Option<UIntValue> {
|
||||
&mut self.bits
|
||||
}
|
||||
pub fn into_bits(self) -> Option<UIntValue> {
|
||||
self.bits
|
||||
}
|
||||
pub fn from_bits(bits: Option<UIntValue>) -> Self {
|
||||
Self { bits }
|
||||
}
|
||||
pub fn from_bitslice(v: &BitSlice) -> Self {
|
||||
Self {
|
||||
bits: Some(UIntValue::new(Arc::new(v.to_bitvec()))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct UnknownVariantSimValue {
|
||||
discriminant: usize,
|
||||
body_bits: UIntValue,
|
||||
}
|
||||
|
||||
impl UnknownVariantSimValue {
|
||||
pub fn discriminant(&self) -> usize {
|
||||
self.discriminant
|
||||
}
|
||||
pub fn body_bits(&self) -> &UIntValue {
|
||||
&self.body_bits
|
||||
}
|
||||
pub fn body_bits_mut(&mut self) -> &mut UIntValue {
|
||||
&mut self.body_bits
|
||||
}
|
||||
pub fn into_body_bits(self) -> UIntValue {
|
||||
self.body_bits
|
||||
}
|
||||
pub fn into_parts(self) -> (usize, UIntValue) {
|
||||
(self.discriminant, self.body_bits)
|
||||
}
|
||||
pub fn new(discriminant: usize, body_bits: UIntValue) -> Self {
|
||||
Self {
|
||||
discriminant,
|
||||
body_bits,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct EnumSimValueFromOpaque<'a> {
|
||||
variants: Interned<[EnumVariant]>,
|
||||
discriminant: usize,
|
||||
body_bits: &'a BitSlice,
|
||||
}
|
||||
|
||||
impl<'a> EnumSimValueFromOpaque<'a> {
|
||||
#[track_caller]
|
||||
pub fn new<T: EnumType>(ty: T, opaque: OpaqueSimValueSlice<'a>) -> Self {
|
||||
let variants = ty.variants();
|
||||
let size = EnumTypePropertiesBuilder::new()
|
||||
.variants(variants)
|
||||
.finish()
|
||||
.size();
|
||||
assert!(size.only_bit_width().is_some());
|
||||
assert_eq!(size, opaque.size());
|
||||
let (discriminant_bits, body_bits) = opaque
|
||||
.bits()
|
||||
.split_at(discriminant_bit_width_impl(variants.len()));
|
||||
let mut discriminant = 0usize;
|
||||
discriminant.view_bits_mut::<Lsb0>()[..discriminant_bits.len()]
|
||||
.copy_from_bitslice(discriminant_bits);
|
||||
Self {
|
||||
variants,
|
||||
discriminant,
|
||||
body_bits,
|
||||
}
|
||||
}
|
||||
pub fn discriminant(&self) -> usize {
|
||||
self.discriminant
|
||||
}
|
||||
#[track_caller]
|
||||
#[cold]
|
||||
fn usage_error(&self, clone: bool) -> ! {
|
||||
let clone = if clone { "clone_" } else { "" };
|
||||
match self.variants.get(self.discriminant) {
|
||||
None => {
|
||||
panic!("should have called EnumSimValueFromBits::unknown_variant_{clone}from_bits");
|
||||
}
|
||||
Some(EnumVariant { ty: None, .. }) => {
|
||||
panic!(
|
||||
"should have called EnumSimValueFromBits::variant_no_field_{clone}from_bits"
|
||||
);
|
||||
}
|
||||
Some(EnumVariant { ty: Some(_), .. }) => {
|
||||
panic!(
|
||||
"should have called EnumSimValueFromBits::variant_with_field_{clone}from_bits"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
fn known_variant(&self, clone: bool) -> (Option<CanonicalType>, &'a BitSlice, &'a BitSlice) {
|
||||
let Some(EnumVariant { ty, .. }) = self.variants.get(self.discriminant) else {
|
||||
self.usage_error(clone);
|
||||
};
|
||||
let variant_bit_width = ty.map_or(0, CanonicalType::bit_width);
|
||||
let (variant_bits, padding_bits) = self.body_bits.split_at(variant_bit_width);
|
||||
(*ty, variant_bits, padding_bits)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn unknown_variant_from_opaque(self) -> UnknownVariantSimValue {
|
||||
let None = self.variants.get(self.discriminant) else {
|
||||
self.usage_error(false);
|
||||
};
|
||||
UnknownVariantSimValue::new(
|
||||
self.discriminant,
|
||||
UIntValue::new(Arc::new(self.body_bits.to_bitvec())),
|
||||
)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn unknown_variant_clone_from_opaque(self, value: &mut UnknownVariantSimValue) {
|
||||
let None = self.variants.get(self.discriminant) else {
|
||||
self.usage_error(true);
|
||||
};
|
||||
value.discriminant = self.discriminant;
|
||||
assert_eq!(value.body_bits.width(), self.body_bits.len());
|
||||
value
|
||||
.body_bits
|
||||
.bits_mut()
|
||||
.copy_from_bitslice(self.body_bits);
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn variant_no_field_from_opaque(self) -> EnumPaddingSimValue {
|
||||
let (None, _variant_bits, padding_bits) = self.known_variant(false) else {
|
||||
self.usage_error(false);
|
||||
};
|
||||
EnumPaddingSimValue::from_bitslice(padding_bits)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn variant_with_field_from_opaque<T: Type>(self) -> (SimValue<T>, EnumPaddingSimValue) {
|
||||
let (Some(variant_ty), variant_bits, padding_bits) = self.known_variant(false) else {
|
||||
self.usage_error(false);
|
||||
};
|
||||
(
|
||||
SimValue::from_bitslice(T::from_canonical(variant_ty), variant_bits),
|
||||
EnumPaddingSimValue::from_bitslice(padding_bits),
|
||||
)
|
||||
}
|
||||
#[track_caller]
|
||||
fn clone_padding_from_bits(padding: &mut EnumPaddingSimValue, padding_bits: &BitSlice) {
|
||||
match padding.bits_mut() {
|
||||
None => *padding = EnumPaddingSimValue::from_bitslice(padding_bits),
|
||||
Some(padding) => {
|
||||
assert_eq!(padding.width(), padding_bits.len());
|
||||
padding.bits_mut().copy_from_bitslice(padding_bits);
|
||||
}
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn variant_no_field_clone_from_opaque(self, padding: &mut EnumPaddingSimValue) {
|
||||
let (None, _variant_bits, padding_bits) = self.known_variant(true) else {
|
||||
self.usage_error(true);
|
||||
};
|
||||
Self::clone_padding_from_bits(padding, padding_bits);
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn variant_with_field_clone_from_opaque<T: Type>(
|
||||
self,
|
||||
value: &mut SimValue<T>,
|
||||
padding: &mut EnumPaddingSimValue,
|
||||
) {
|
||||
let (Some(variant_ty), variant_bits, padding_bits) = self.known_variant(true) else {
|
||||
self.usage_error(true);
|
||||
};
|
||||
assert_eq!(SimValue::ty(value), T::from_canonical(variant_ty));
|
||||
SimValue::bits_mut(value)
|
||||
.bits_mut()
|
||||
.copy_from_bitslice(variant_bits);
|
||||
Self::clone_padding_from_bits(padding, padding_bits);
|
||||
}
|
||||
}
|
||||
|
||||
pub struct EnumSimValueToOpaque<'a> {
|
||||
variants: Interned<[EnumVariant]>,
|
||||
bit_width: usize,
|
||||
discriminant_bit_width: usize,
|
||||
writer: OpaqueSimValueWriter<'a>,
|
||||
}
|
||||
|
||||
impl<'a> EnumSimValueToOpaque<'a> {
|
||||
#[track_caller]
|
||||
pub fn new<T: EnumType>(ty: T, writer: OpaqueSimValueWriter<'a>) -> Self {
|
||||
let variants = ty.variants();
|
||||
let size = EnumTypePropertiesBuilder::new()
|
||||
.variants(variants)
|
||||
.finish()
|
||||
.size();
|
||||
assert_eq!(size, writer.size());
|
||||
Self {
|
||||
variants,
|
||||
bit_width: size
|
||||
.only_bit_width()
|
||||
.expect("enums should only contain bits"),
|
||||
discriminant_bit_width: discriminant_bit_width_impl(variants.len()),
|
||||
writer,
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
fn write_discriminant(&mut self, mut discriminant: usize) {
|
||||
let orig_discriminant = discriminant;
|
||||
let discriminant_bits =
|
||||
&mut discriminant.view_bits_mut::<Lsb0>()[..self.discriminant_bit_width];
|
||||
self.writer.fill_prefix_with(
|
||||
OpaqueSimValueSize::from_bit_width(self.discriminant_bit_width),
|
||||
|writer| {
|
||||
writer.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(discriminant_bits))
|
||||
},
|
||||
);
|
||||
discriminant_bits.fill(false);
|
||||
assert!(
|
||||
discriminant == 0,
|
||||
"{orig_discriminant:#x} is too big to fit in enum discriminant bits",
|
||||
);
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn unknown_variant_to_opaque(
|
||||
mut self,
|
||||
value: &UnknownVariantSimValue,
|
||||
) -> OpaqueSimValueWritten<'a> {
|
||||
self.write_discriminant(value.discriminant);
|
||||
let None = self.variants.get(value.discriminant) else {
|
||||
panic!("can't use UnknownVariantSimValue to set known discriminant");
|
||||
};
|
||||
assert_eq!(
|
||||
self.bit_width - self.discriminant_bit_width,
|
||||
value.body_bits.width()
|
||||
);
|
||||
self.writer
|
||||
.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(value.body_bits.bits()))
|
||||
}
|
||||
#[track_caller]
|
||||
fn known_variant(
|
||||
mut self,
|
||||
discriminant: usize,
|
||||
value: Option<&OpaqueSimValue>,
|
||||
padding: &EnumPaddingSimValue,
|
||||
) -> OpaqueSimValueWritten<'a> {
|
||||
self.write_discriminant(discriminant);
|
||||
let variant_ty = self.variants[discriminant].ty;
|
||||
let variant_size = variant_ty.map_or(OpaqueSimValueSize::empty(), CanonicalType::size);
|
||||
if let Some(value) = value {
|
||||
if variant_ty.is_none() {
|
||||
panic!("expected variant to have no field");
|
||||
}
|
||||
self.writer.fill_prefix_with(variant_size, |writer| {
|
||||
writer.fill_cloned_from_slice(value.as_slice())
|
||||
});
|
||||
} else if variant_ty.is_some() {
|
||||
panic!("expected variant to have a field");
|
||||
}
|
||||
if let Some(padding) = padding.bits() {
|
||||
assert_eq!(padding.ty().type_properties().size(), self.writer.size());
|
||||
self.writer
|
||||
.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(padding.bits()))
|
||||
} else {
|
||||
self.writer.fill_with_zeros()
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn variant_no_field_to_opaque(
|
||||
self,
|
||||
discriminant: usize,
|
||||
padding: &EnumPaddingSimValue,
|
||||
) -> OpaqueSimValueWritten<'a> {
|
||||
self.known_variant(discriminant, None, padding)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn variant_with_field_to_opaque<T: Type>(
|
||||
self,
|
||||
discriminant: usize,
|
||||
value: &SimValue<T>,
|
||||
padding: &EnumPaddingSimValue,
|
||||
) -> OpaqueSimValueWritten<'a> {
|
||||
let Some(variant_ty) = self.variants[discriminant].ty else {
|
||||
panic!("expected variant to have no field");
|
||||
};
|
||||
assert_eq!(SimValue::ty(value), T::from_canonical(variant_ty));
|
||||
self.known_variant(discriminant, Some(SimValue::opaque(value)), padding)
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub fn assert_is_enum_type<T: EnumType>(v: T) -> T {
|
||||
v
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub fn enum_type_to_sim_builder<T: EnumType>(v: T) -> T::SimBuilder {
|
||||
v.into()
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
|
|
@ -360,6 +732,79 @@ pub enum HdlOption<T: Type> {
|
|||
HdlSome(T),
|
||||
}
|
||||
|
||||
impl<Lhs: Type + ExprPartialEq<Rhs>, Rhs: Type> ExprPartialEq<HdlOption<Rhs>> for HdlOption<Lhs> {
|
||||
#[hdl]
|
||||
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<HdlOption<Rhs>>) -> Expr<Bool> {
|
||||
#[hdl]
|
||||
let cmp_eq = wire();
|
||||
#[hdl]
|
||||
match lhs {
|
||||
HdlSome(lhs) =>
|
||||
{
|
||||
#[hdl]
|
||||
match rhs {
|
||||
HdlSome(rhs) => connect(cmp_eq, ExprPartialEq::cmp_eq(lhs, rhs)),
|
||||
HdlNone => connect(cmp_eq, false),
|
||||
}
|
||||
}
|
||||
HdlNone =>
|
||||
{
|
||||
#[hdl]
|
||||
match rhs {
|
||||
HdlSome(_) => connect(cmp_eq, false),
|
||||
HdlNone => connect(cmp_eq, true),
|
||||
}
|
||||
}
|
||||
}
|
||||
cmp_eq
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<HdlOption<Rhs>>) -> Expr<Bool> {
|
||||
#[hdl]
|
||||
let cmp_ne = wire();
|
||||
#[hdl]
|
||||
match lhs {
|
||||
HdlSome(lhs) =>
|
||||
{
|
||||
#[hdl]
|
||||
match rhs {
|
||||
HdlSome(rhs) => connect(cmp_ne, ExprPartialEq::cmp_ne(lhs, rhs)),
|
||||
HdlNone => connect(cmp_ne, true),
|
||||
}
|
||||
}
|
||||
HdlNone =>
|
||||
{
|
||||
#[hdl]
|
||||
match rhs {
|
||||
HdlSome(_) => connect(cmp_ne, true),
|
||||
HdlNone => connect(cmp_ne, false),
|
||||
}
|
||||
}
|
||||
}
|
||||
cmp_ne
|
||||
}
|
||||
}
|
||||
|
||||
impl<Lhs: SimValuePartialEq<Rhs>, Rhs: Type> SimValuePartialEq<HdlOption<Rhs>> for HdlOption<Lhs> {
|
||||
fn sim_value_eq(this: &SimValue<Self>, other: &SimValue<HdlOption<Rhs>>) -> bool {
|
||||
type SimValueMatch<T> = <T as Type>::SimValue;
|
||||
match (&**this, &**other) {
|
||||
(SimValueMatch::<Self>::HdlNone(_), SimValueMatch::<HdlOption<Rhs>>::HdlNone(_)) => {
|
||||
true
|
||||
}
|
||||
(SimValueMatch::<Self>::HdlSome(..), SimValueMatch::<HdlOption<Rhs>>::HdlNone(_))
|
||||
| (SimValueMatch::<Self>::HdlNone(_), SimValueMatch::<HdlOption<Rhs>>::HdlSome(..)) => {
|
||||
false
|
||||
}
|
||||
(
|
||||
SimValueMatch::<Self>::HdlSome(l, _),
|
||||
SimValueMatch::<HdlOption<Rhs>>::HdlSome(r, _),
|
||||
) => l == r,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
pub fn HdlNone<T: StaticType>() -> Expr<HdlOption<T>> {
|
||||
HdlOption[T::TYPE].HdlNone()
|
||||
|
|
|
|||
|
|
@ -13,10 +13,12 @@ use crate::{
|
|||
intern::{Intern, Interned},
|
||||
memory::{DynPortType, MemPort, PortType},
|
||||
module::{
|
||||
transform::visit::{Fold, Folder, Visit, Visitor},
|
||||
Instance, ModuleIO,
|
||||
transform::visit::{Fold, Folder, Visit, Visitor},
|
||||
},
|
||||
phantom_const::PhantomConst,
|
||||
reg::Reg,
|
||||
reset::{AsyncReset, Reset, ResetType, ResetTypeDispatch, SyncReset},
|
||||
ty::{CanonicalType, StaticType, Type, TypeWithDeref},
|
||||
wire::Wire,
|
||||
};
|
||||
|
|
@ -108,6 +110,7 @@ expr_enum! {
|
|||
UIntLiteral(Interned<UIntValue>),
|
||||
SIntLiteral(Interned<SIntValue>),
|
||||
BoolLiteral(bool),
|
||||
PhantomConst(PhantomConst),
|
||||
BundleLiteral(ops::BundleLiteral),
|
||||
ArrayLiteral(ops::ArrayLiteral<CanonicalType, DynSize>),
|
||||
EnumLiteral(ops::EnumLiteral),
|
||||
|
|
@ -209,7 +212,9 @@ expr_enum! {
|
|||
ModuleIO(ModuleIO<CanonicalType>),
|
||||
Instance(Instance<Bundle>),
|
||||
Wire(Wire<CanonicalType>),
|
||||
Reg(Reg<CanonicalType>),
|
||||
Reg(Reg<CanonicalType, Reset>),
|
||||
RegSync(Reg<CanonicalType, SyncReset>),
|
||||
RegAsync(Reg<CanonicalType, AsyncReset>),
|
||||
MemPort(MemPort<DynPortType>),
|
||||
}
|
||||
}
|
||||
|
|
@ -269,6 +274,20 @@ pub struct Expr<T: Type> {
|
|||
|
||||
impl<T: Type + fmt::Debug> fmt::Debug for Expr<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
let Self {
|
||||
__enum,
|
||||
__ty,
|
||||
__flow,
|
||||
} = self;
|
||||
let expr_ty = __ty.canonical();
|
||||
let enum_ty = __enum.to_expr().__ty;
|
||||
assert_eq!(
|
||||
expr_ty, enum_ty,
|
||||
"expr ty mismatch:\nExpr {{\n__enum: {__enum:?},\n__ty: {__ty:?},\n__flow: {__flow:?}\n}}"
|
||||
);
|
||||
}
|
||||
self.__enum.fmt(f)
|
||||
}
|
||||
}
|
||||
|
|
@ -513,11 +532,7 @@ impl Flow {
|
|||
}
|
||||
}
|
||||
pub const fn flip_if(self, flipped: bool) -> Flow {
|
||||
if flipped {
|
||||
self.flip()
|
||||
} else {
|
||||
self
|
||||
}
|
||||
if flipped { self.flip() } else { self }
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -593,25 +608,42 @@ impl<T: Type> GetTarget for Wire<T> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: Type> ToExpr for Reg<T> {
|
||||
impl<T: Type, R: ResetType> ToExpr for Reg<T, R> {
|
||||
type Type = T;
|
||||
|
||||
fn to_expr(&self) -> Expr<Self::Type> {
|
||||
struct Dispatch;
|
||||
impl ResetTypeDispatch for Dispatch {
|
||||
type Input<T: ResetType> = Reg<CanonicalType, T>;
|
||||
type Output<T: ResetType> = ExprEnum;
|
||||
|
||||
fn reset(self, input: Self::Input<Reset>) -> Self::Output<Reset> {
|
||||
ExprEnum::Reg(input)
|
||||
}
|
||||
|
||||
fn sync_reset(self, input: Self::Input<SyncReset>) -> Self::Output<SyncReset> {
|
||||
ExprEnum::RegSync(input)
|
||||
}
|
||||
|
||||
fn async_reset(self, input: Self::Input<AsyncReset>) -> Self::Output<AsyncReset> {
|
||||
ExprEnum::RegAsync(input)
|
||||
}
|
||||
}
|
||||
Expr {
|
||||
__enum: ExprEnum::Reg(self.canonical()).intern_sized(),
|
||||
__enum: R::dispatch(self.canonical(), Dispatch).intern_sized(),
|
||||
__ty: self.ty(),
|
||||
__flow: self.flow(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type> ToLiteralBits for Reg<T> {
|
||||
impl<T: Type, R: ResetType> ToLiteralBits for Reg<T, R> {
|
||||
fn to_literal_bits(&self) -> Result<Interned<BitSlice>, NotALiteralExpr> {
|
||||
Err(NotALiteralExpr)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type> GetTarget for Reg<T> {
|
||||
impl<T: Type, R: ResetType> GetTarget for Reg<T, R> {
|
||||
fn target(&self) -> Option<Interned<Target>> {
|
||||
Some(Intern::intern_sized(self.canonical().into()))
|
||||
}
|
||||
|
|
@ -678,6 +710,7 @@ impl<T: ToExpr + ?Sized> CastToBits for T {
|
|||
}
|
||||
|
||||
pub trait CastBitsTo {
|
||||
#[track_caller]
|
||||
fn cast_bits_to<T: Type>(&self, ty: T) -> Expr<T>;
|
||||
}
|
||||
|
||||
|
|
@ -735,3 +768,27 @@ pub fn repeat<T: Type, L: SizeType>(
|
|||
)
|
||||
.to_expr()
|
||||
}
|
||||
|
||||
impl<T: ?Sized + crate::phantom_const::PhantomConstValue> ToExpr for PhantomConst<T> {
|
||||
type Type = Self;
|
||||
|
||||
fn to_expr(&self) -> Expr<Self::Type> {
|
||||
Expr {
|
||||
__enum: ExprEnum::PhantomConst(self.canonical_phantom_const()).intern_sized(),
|
||||
__ty: *self,
|
||||
__flow: Flow::Source,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + crate::phantom_const::PhantomConstValue> GetTarget for PhantomConst<T> {
|
||||
fn target(&self) -> Option<Interned<Target>> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + crate::phantom_const::PhantomConstValue> ToLiteralBits for PhantomConst<T> {
|
||||
fn to_literal_bits(&self) -> Result<Interned<BitSlice>, NotALiteralExpr> {
|
||||
Ok(Interned::default())
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,19 +7,23 @@ use crate::{
|
|||
clock::{Clock, ToClock},
|
||||
enum_::{Enum, EnumType, EnumVariant},
|
||||
expr::{
|
||||
CastBitsTo as _, CastTo, CastToBits as _, Expr, ExprEnum, Flow, HdlPartialEq,
|
||||
HdlPartialOrd, NotALiteralExpr, ReduceBits, ToExpr, ToLiteralBits,
|
||||
target::{
|
||||
GetTarget, Target, TargetPathArrayElement, TargetPathBundleField,
|
||||
TargetPathDynArrayElement, TargetPathElement,
|
||||
},
|
||||
CastTo, Expr, ExprEnum, Flow, HdlPartialEq, HdlPartialOrd, NotALiteralExpr, ReduceBits,
|
||||
ToExpr, ToLiteralBits,
|
||||
},
|
||||
int::{
|
||||
Bool, BoolOrIntType, DynSize, IntType, KnownSize, SInt, SIntType, SIntValue, Size, UInt,
|
||||
UIntType, UIntValue,
|
||||
},
|
||||
intern::{Intern, Interned},
|
||||
reset::{AsyncReset, Reset, SyncReset, ToAsyncReset, ToReset, ToSyncReset},
|
||||
phantom_const::{PhantomConst, PhantomConstValue},
|
||||
reset::{
|
||||
AsyncReset, Reset, ResetType, ResetTypeDispatch, SyncReset, ToAsyncReset, ToReset,
|
||||
ToSyncReset,
|
||||
},
|
||||
ty::{CanonicalType, StaticType, Type},
|
||||
util::ConstUsize,
|
||||
};
|
||||
|
|
@ -262,7 +266,7 @@ impl Neg {
|
|||
};
|
||||
let result_ty = retval.ty();
|
||||
retval.literal_bits = arg.to_literal_bits().map(|bits| {
|
||||
Intern::intern_owned(result_ty.bits_from_bigint_wrapping(-SInt::bits_to_bigint(&bits)))
|
||||
Intern::intern_owned(result_ty.bits_from_bigint_wrapping(&-SInt::bits_to_bigint(&bits)))
|
||||
});
|
||||
retval
|
||||
}
|
||||
|
|
@ -369,7 +373,7 @@ fn binary_op_literal_bits<ResultTy: BoolOrIntType, Lhs: BoolOrIntType, Rhs: Bool
|
|||
let rhs = Rhs::bits_to_bigint(&rhs);
|
||||
let result = f(lhs, rhs)?;
|
||||
Ok(Intern::intern_owned(
|
||||
result_ty.bits_from_bigint_wrapping(result),
|
||||
result_ty.bits_from_bigint_wrapping(&result),
|
||||
))
|
||||
}
|
||||
|
||||
|
|
@ -1344,7 +1348,7 @@ macro_rules! binary_op_fixed_shift {
|
|||
literal_bits: Err(NotALiteralExpr),
|
||||
};
|
||||
retval.literal_bits = lhs.to_literal_bits().map(|bits| {
|
||||
Intern::intern_owned(retval.ty().bits_from_bigint_wrapping($Trait::$method(
|
||||
Intern::intern_owned(retval.ty().bits_from_bigint_wrapping(&$Trait::$method(
|
||||
$ty::bits_to_bigint(&bits),
|
||||
rhs,
|
||||
)))
|
||||
|
|
@ -1621,7 +1625,7 @@ macro_rules! impl_cast_int_op {
|
|||
ty,
|
||||
literal_bits: arg.to_literal_bits().map(|bits| {
|
||||
Intern::intern_owned(
|
||||
ty.bits_from_bigint_wrapping($from::bits_to_bigint(&bits)),
|
||||
ty.bits_from_bigint_wrapping(&$from::bits_to_bigint(&bits)),
|
||||
)
|
||||
}),
|
||||
}
|
||||
|
|
@ -1773,11 +1777,11 @@ impl_cast_bit_op!(CastSIntToAsyncReset, SInt<1>, #[dyn] SInt, AsyncReset, #[trai
|
|||
impl_cast_bit_op!(CastSyncResetToBool, SyncReset, Bool);
|
||||
impl_cast_bit_op!(CastSyncResetToUInt, SyncReset, UInt<1>, #[dyn] UInt);
|
||||
impl_cast_bit_op!(CastSyncResetToSInt, SyncReset, SInt<1>, #[dyn] SInt);
|
||||
impl_cast_bit_op!(CastSyncResetToReset, SyncReset, Reset, #[trait] ToReset::to_reset);
|
||||
impl_cast_bit_op!(CastSyncResetToReset, SyncReset, Reset);
|
||||
impl_cast_bit_op!(CastAsyncResetToBool, AsyncReset, Bool);
|
||||
impl_cast_bit_op!(CastAsyncResetToUInt, AsyncReset, UInt<1>, #[dyn] UInt);
|
||||
impl_cast_bit_op!(CastAsyncResetToSInt, AsyncReset, SInt<1>, #[dyn] SInt);
|
||||
impl_cast_bit_op!(CastAsyncResetToReset, AsyncReset, Reset, #[trait] ToReset::to_reset);
|
||||
impl_cast_bit_op!(CastAsyncResetToReset, AsyncReset, Reset);
|
||||
impl_cast_bit_op!(CastResetToBool, Reset, Bool);
|
||||
impl_cast_bit_op!(CastResetToUInt, Reset, UInt<1>, #[dyn] UInt);
|
||||
impl_cast_bit_op!(CastResetToSInt, Reset, SInt<1>, #[dyn] SInt);
|
||||
|
|
@ -1788,6 +1792,127 @@ impl_cast_bit_op!(CastClockToBool, Clock, Bool);
|
|||
impl_cast_bit_op!(CastClockToUInt, Clock, UInt<1>, #[dyn] UInt);
|
||||
impl_cast_bit_op!(CastClockToSInt, Clock, SInt<1>, #[dyn] SInt);
|
||||
|
||||
impl<T: ResetType> ToReset for Expr<T> {
|
||||
fn to_reset(&self) -> Expr<Reset> {
|
||||
struct Dispatch;
|
||||
impl ResetTypeDispatch for Dispatch {
|
||||
type Input<T: ResetType> = Expr<T>;
|
||||
type Output<T: ResetType> = Expr<Reset>;
|
||||
|
||||
fn reset(self, input: Self::Input<Reset>) -> Self::Output<Reset> {
|
||||
input
|
||||
}
|
||||
|
||||
fn sync_reset(self, input: Self::Input<SyncReset>) -> Self::Output<SyncReset> {
|
||||
input.cast_to_static()
|
||||
}
|
||||
|
||||
fn async_reset(self, input: Self::Input<AsyncReset>) -> Self::Output<AsyncReset> {
|
||||
input.cast_to_static()
|
||||
}
|
||||
}
|
||||
T::dispatch(*self, Dispatch)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<AsyncReset> for AsyncReset {
|
||||
fn cast_to(src: Expr<Self>, _to_type: AsyncReset) -> Expr<AsyncReset> {
|
||||
src
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<SyncReset> for AsyncReset {
|
||||
fn cast_to(src: Expr<Self>, to_type: SyncReset) -> Expr<SyncReset> {
|
||||
src.cast_to(Bool).cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<Clock> for AsyncReset {
|
||||
fn cast_to(src: Expr<Self>, to_type: Clock) -> Expr<Clock> {
|
||||
src.cast_to(Bool).cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<AsyncReset> for SyncReset {
|
||||
fn cast_to(src: Expr<Self>, to_type: AsyncReset) -> Expr<AsyncReset> {
|
||||
src.cast_to(Bool).cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<SyncReset> for SyncReset {
|
||||
fn cast_to(src: Expr<Self>, _to_type: SyncReset) -> Expr<SyncReset> {
|
||||
src
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<Clock> for SyncReset {
|
||||
fn cast_to(src: Expr<Self>, to_type: Clock) -> Expr<Clock> {
|
||||
src.cast_to(Bool).cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<AsyncReset> for Reset {
|
||||
fn cast_to(src: Expr<Self>, to_type: AsyncReset) -> Expr<AsyncReset> {
|
||||
src.cast_to(Bool).cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<SyncReset> for Reset {
|
||||
fn cast_to(src: Expr<Self>, to_type: SyncReset) -> Expr<SyncReset> {
|
||||
src.cast_to(Bool).cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<Reset> for Reset {
|
||||
fn cast_to(src: Expr<Self>, _to_type: Reset) -> Expr<Reset> {
|
||||
src
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<Clock> for Reset {
|
||||
fn cast_to(src: Expr<Self>, to_type: Clock) -> Expr<Clock> {
|
||||
src.cast_to(Bool).cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<AsyncReset> for Clock {
|
||||
fn cast_to(src: Expr<Self>, to_type: AsyncReset) -> Expr<AsyncReset> {
|
||||
src.cast_to(Bool).cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<SyncReset> for Clock {
|
||||
fn cast_to(src: Expr<Self>, to_type: SyncReset) -> Expr<SyncReset> {
|
||||
src.cast_to(Bool).cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<Clock> for Clock {
|
||||
fn cast_to(src: Expr<Self>, _to_type: Clock) -> Expr<Clock> {
|
||||
src
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> ExprCastTo<()> for PhantomConst<T> {
|
||||
fn cast_to(src: Expr<Self>, to_type: ()) -> Expr<()> {
|
||||
src.cast_to_bits().cast_bits_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> ExprCastTo<PhantomConst<T>> for () {
|
||||
fn cast_to(src: Expr<Self>, to_type: PhantomConst<T>) -> Expr<PhantomConst<T>> {
|
||||
src.cast_to_bits().cast_bits_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue, U: ?Sized + PhantomConstValue> ExprCastTo<PhantomConst<T>>
|
||||
for PhantomConst<U>
|
||||
{
|
||||
fn cast_to(src: Expr<Self>, to_type: PhantomConst<T>) -> Expr<PhantomConst<T>> {
|
||||
src.cast_to_bits().cast_bits_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct FieldAccess<FieldType: Type = CanonicalType> {
|
||||
base: Expr<Bundle>,
|
||||
|
|
@ -1812,7 +1937,8 @@ impl<FieldType: Type> FieldAccess<FieldType> {
|
|||
let field = Expr::ty(base).fields()[field_index];
|
||||
let field_type = FieldType::from_canonical(field.ty);
|
||||
let literal_bits = base.to_literal_bits().map(|bits| {
|
||||
bits[Expr::ty(base).field_offsets()[field_index]..][..field.ty.bit_width()].intern()
|
||||
bits[Expr::ty(base).field_offsets()[field_index].bit_width..][..field.ty.bit_width()]
|
||||
.intern()
|
||||
});
|
||||
let target = base.target().map(|base| {
|
||||
Intern::intern_sized(base.join(TargetPathElement::intern_sized(
|
||||
|
|
@ -2604,3 +2730,47 @@ impl<T: Type> ToExpr for Uninit<T> {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ExprIntoIterator: Type {
|
||||
type Item: Type;
|
||||
type ExprIntoIter: Iterator<Item = Expr<Self::Item>>;
|
||||
|
||||
fn expr_into_iter(e: Expr<Self>) -> Self::ExprIntoIter;
|
||||
}
|
||||
|
||||
impl<T: ExprIntoIterator> IntoIterator for Expr<T> {
|
||||
type Item = Expr<T::Item>;
|
||||
type IntoIter = T::ExprIntoIter;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
T::expr_into_iter(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ExprIntoIterator> IntoIterator for &'_ Expr<T> {
|
||||
type Item = Expr<T::Item>;
|
||||
type IntoIter = T::ExprIntoIter;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
T::expr_into_iter(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ExprIntoIterator> IntoIterator for &'_ mut Expr<T> {
|
||||
type Item = Expr<T::Item>;
|
||||
type IntoIter = T::ExprIntoIter;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
T::expr_into_iter(*self)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ExprFromIterator<A>: Type {
|
||||
fn expr_from_iter<T: IntoIterator<Item = A>>(iter: T) -> Expr<Self>;
|
||||
}
|
||||
|
||||
impl<This: ExprFromIterator<A>, A> FromIterator<A> for Expr<This> {
|
||||
fn from_iter<T: IntoIterator<Item = A>>(iter: T) -> Self {
|
||||
This::expr_from_iter(iter)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,18 +3,19 @@
|
|||
use crate::{
|
||||
array::Array,
|
||||
bundle::{Bundle, BundleField},
|
||||
expr::Flow,
|
||||
expr::{Expr, Flow, ToExpr},
|
||||
intern::{Intern, Interned},
|
||||
memory::{DynPortType, MemPort},
|
||||
module::{Instance, ModuleIO, TargetName},
|
||||
reg::Reg,
|
||||
reset::{AsyncReset, Reset, ResetType, ResetTypeDispatch, SyncReset},
|
||||
source_location::SourceLocation,
|
||||
ty::{CanonicalType, Type},
|
||||
wire::Wire,
|
||||
};
|
||||
use std::fmt;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct TargetPathBundleField {
|
||||
pub name: Interned<str>,
|
||||
}
|
||||
|
|
@ -25,7 +26,7 @@ impl fmt::Display for TargetPathBundleField {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct TargetPathArrayElement {
|
||||
pub index: usize,
|
||||
}
|
||||
|
|
@ -36,7 +37,7 @@ impl fmt::Display for TargetPathArrayElement {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct TargetPathDynArrayElement {}
|
||||
|
||||
impl fmt::Display for TargetPathDynArrayElement {
|
||||
|
|
@ -45,7 +46,7 @@ impl fmt::Display for TargetPathDynArrayElement {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum TargetPathElement {
|
||||
BundleField(TargetPathBundleField),
|
||||
ArrayElement(TargetPathArrayElement),
|
||||
|
|
@ -127,6 +128,7 @@ macro_rules! impl_target_base {
|
|||
$(#[$enum_meta:meta])*
|
||||
$enum_vis:vis enum $TargetBase:ident {
|
||||
$(
|
||||
$(#[from = $from:ident])?
|
||||
#[is = $is_fn:ident]
|
||||
#[to = $to_fn:ident]
|
||||
$(#[$variant_meta:meta])*
|
||||
|
|
@ -150,19 +152,19 @@ macro_rules! impl_target_base {
|
|||
}
|
||||
}
|
||||
|
||||
$(
|
||||
$($(
|
||||
impl From<$VariantTy> for $TargetBase {
|
||||
fn from(value: $VariantTy) -> Self {
|
||||
fn $from(value: $VariantTy) -> Self {
|
||||
Self::$Variant(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<$VariantTy> for Target {
|
||||
fn from(value: $VariantTy) -> Self {
|
||||
fn $from(value: $VariantTy) -> Self {
|
||||
$TargetBase::$Variant(value).into()
|
||||
}
|
||||
}
|
||||
)*
|
||||
)*)?
|
||||
|
||||
impl $TargetBase {
|
||||
$(
|
||||
|
|
@ -193,30 +195,79 @@ macro_rules! impl_target_base {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToExpr for $TargetBase {
|
||||
type Type = CanonicalType;
|
||||
|
||||
fn to_expr(&self) -> Expr<Self::Type> {
|
||||
match self {
|
||||
$(Self::$Variant(v) => Expr::canonical(v.to_expr()),)*
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_target_base! {
|
||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum TargetBase {
|
||||
#[from = from]
|
||||
#[is = is_module_io]
|
||||
#[to = module_io]
|
||||
ModuleIO(ModuleIO<CanonicalType>),
|
||||
#[from = from]
|
||||
#[is = is_mem_port]
|
||||
#[to = mem_port]
|
||||
MemPort(MemPort<DynPortType>),
|
||||
#[is = is_reg]
|
||||
#[to = reg]
|
||||
Reg(Reg<CanonicalType>),
|
||||
Reg(Reg<CanonicalType, Reset>),
|
||||
#[is = is_reg_sync]
|
||||
#[to = reg_sync]
|
||||
RegSync(Reg<CanonicalType, SyncReset>),
|
||||
#[is = is_reg_async]
|
||||
#[to = reg_async]
|
||||
RegAsync(Reg<CanonicalType, AsyncReset>),
|
||||
#[from = from]
|
||||
#[is = is_wire]
|
||||
#[to = wire]
|
||||
Wire(Wire<CanonicalType>),
|
||||
#[from = from]
|
||||
#[is = is_instance]
|
||||
#[to = instance]
|
||||
Instance(Instance<Bundle>),
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: ResetType> From<Reg<CanonicalType, R>> for TargetBase {
|
||||
fn from(value: Reg<CanonicalType, R>) -> Self {
|
||||
struct Dispatch;
|
||||
impl ResetTypeDispatch for Dispatch {
|
||||
type Input<T: ResetType> = Reg<CanonicalType, T>;
|
||||
type Output<T: ResetType> = TargetBase;
|
||||
|
||||
fn reset(self, input: Self::Input<Reset>) -> Self::Output<Reset> {
|
||||
TargetBase::Reg(input)
|
||||
}
|
||||
|
||||
fn sync_reset(self, input: Self::Input<SyncReset>) -> Self::Output<SyncReset> {
|
||||
TargetBase::RegSync(input)
|
||||
}
|
||||
|
||||
fn async_reset(self, input: Self::Input<AsyncReset>) -> Self::Output<AsyncReset> {
|
||||
TargetBase::RegAsync(input)
|
||||
}
|
||||
}
|
||||
R::dispatch(value, Dispatch)
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: ResetType> From<Reg<CanonicalType, R>> for Target {
|
||||
fn from(value: Reg<CanonicalType, R>) -> Self {
|
||||
TargetBase::from(value).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for TargetBase {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{:?}", self.target_name())
|
||||
|
|
@ -229,6 +280,8 @@ impl TargetBase {
|
|||
TargetBase::ModuleIO(v) => TargetName(v.scoped_name(), None),
|
||||
TargetBase::MemPort(v) => TargetName(v.mem_name(), Some(v.port_name())),
|
||||
TargetBase::Reg(v) => TargetName(v.scoped_name(), None),
|
||||
TargetBase::RegSync(v) => TargetName(v.scoped_name(), None),
|
||||
TargetBase::RegAsync(v) => TargetName(v.scoped_name(), None),
|
||||
TargetBase::Wire(v) => TargetName(v.scoped_name(), None),
|
||||
TargetBase::Instance(v) => TargetName(v.scoped_name(), None),
|
||||
}
|
||||
|
|
@ -238,6 +291,8 @@ impl TargetBase {
|
|||
TargetBase::ModuleIO(v) => v.ty(),
|
||||
TargetBase::MemPort(v) => v.ty().canonical(),
|
||||
TargetBase::Reg(v) => v.ty(),
|
||||
TargetBase::RegSync(v) => v.ty(),
|
||||
TargetBase::RegAsync(v) => v.ty(),
|
||||
TargetBase::Wire(v) => v.ty(),
|
||||
TargetBase::Instance(v) => v.ty().canonical(),
|
||||
}
|
||||
|
|
@ -313,7 +368,7 @@ impl TargetChild {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum Target {
|
||||
Base(Interned<TargetBase>),
|
||||
Child(TargetChild),
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
656
crates/fayalite/src/int/uint_in_range.rs
Normal file
656
crates/fayalite/src/int/uint_in_range.rs
Normal file
|
|
@ -0,0 +1,656 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
bundle::{Bundle, BundleField, BundleType, BundleTypePropertiesBuilder, NoBuilder},
|
||||
expr::{
|
||||
CastBitsTo, CastTo, CastToBits, Expr, HdlPartialEq, HdlPartialOrd,
|
||||
ops::{ExprCastTo, ExprPartialEq, ExprPartialOrd},
|
||||
},
|
||||
int::{Bool, DynSize, KnownSize, Size, SizeType, UInt, UIntType},
|
||||
intern::{Intern, InternSlice, Interned},
|
||||
phantom_const::PhantomConst,
|
||||
sim::value::{SimValue, SimValuePartialEq, ToSimValueWithType},
|
||||
source_location::SourceLocation,
|
||||
ty::{
|
||||
CanonicalType, OpaqueSimValueSlice, OpaqueSimValueWriter, OpaqueSimValueWritten,
|
||||
StaticType, Type, TypeProperties, impl_match_variant_as_self,
|
||||
},
|
||||
};
|
||||
use bitvec::{order::Lsb0, view::BitView};
|
||||
use serde::{
|
||||
Deserialize, Deserializer, Serialize, Serializer,
|
||||
de::{Error, Visitor, value::UsizeDeserializer},
|
||||
};
|
||||
use std::{fmt, marker::PhantomData, ops::Index};
|
||||
|
||||
const UINT_IN_RANGE_TYPE_FIELD_NAMES: [&'static str; 2] = ["value", "range"];
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)]
|
||||
pub struct UIntInRangeMaskType {
|
||||
value: Bool,
|
||||
range: PhantomConstRangeMaskType,
|
||||
}
|
||||
|
||||
impl Type for UIntInRangeMaskType {
|
||||
type BaseType = Bundle;
|
||||
type MaskType = Self;
|
||||
type SimValue = bool;
|
||||
impl_match_variant_as_self!();
|
||||
|
||||
fn mask_type(&self) -> Self::MaskType {
|
||||
*self
|
||||
}
|
||||
|
||||
fn canonical(&self) -> CanonicalType {
|
||||
CanonicalType::Bundle(Bundle::new(self.fields()))
|
||||
}
|
||||
|
||||
fn from_canonical(canonical_type: CanonicalType) -> Self {
|
||||
let fields = Bundle::from_canonical(canonical_type).fields();
|
||||
let [
|
||||
BundleField {
|
||||
name: value_name,
|
||||
flipped: false,
|
||||
ty: value,
|
||||
},
|
||||
BundleField {
|
||||
name: range_name,
|
||||
flipped: false,
|
||||
ty: range,
|
||||
},
|
||||
] = *fields
|
||||
else {
|
||||
panic!("expected UIntInRangeMaskType");
|
||||
};
|
||||
assert_eq!([&*value_name, &*range_name], UINT_IN_RANGE_TYPE_FIELD_NAMES);
|
||||
let value = Bool::from_canonical(value);
|
||||
let range = PhantomConstRangeMaskType::from_canonical(range);
|
||||
Self { value, range }
|
||||
}
|
||||
|
||||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
Bool.sim_value_from_opaque(opaque)
|
||||
}
|
||||
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
Bool.sim_value_clone_from_opaque(value, opaque);
|
||||
}
|
||||
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
Bool.sim_value_to_opaque(value, writer)
|
||||
}
|
||||
}
|
||||
|
||||
impl BundleType for UIntInRangeMaskType {
|
||||
type Builder = NoBuilder;
|
||||
type FilledBuilder = Expr<UIntInRangeMaskType>;
|
||||
|
||||
fn fields(&self) -> Interned<[BundleField]> {
|
||||
let [value_name, range_name] = UINT_IN_RANGE_TYPE_FIELD_NAMES;
|
||||
let Self { value, range } = self;
|
||||
[
|
||||
BundleField {
|
||||
name: value_name.intern(),
|
||||
flipped: false,
|
||||
ty: value.canonical(),
|
||||
},
|
||||
BundleField {
|
||||
name: range_name.intern(),
|
||||
flipped: false,
|
||||
ty: range.canonical(),
|
||||
},
|
||||
]
|
||||
.intern_slice()
|
||||
}
|
||||
}
|
||||
|
||||
impl StaticType for UIntInRangeMaskType {
|
||||
const TYPE: Self = Self {
|
||||
value: Bool,
|
||||
range: PhantomConstRangeMaskType::TYPE,
|
||||
};
|
||||
const MASK_TYPE: Self::MaskType = Self::TYPE;
|
||||
const TYPE_PROPERTIES: TypeProperties = BundleTypePropertiesBuilder::new()
|
||||
.field(false, Bool::TYPE_PROPERTIES)
|
||||
.field(false, PhantomConstRangeMaskType::TYPE_PROPERTIES)
|
||||
.finish();
|
||||
const MASK_TYPE_PROPERTIES: TypeProperties = Self::TYPE_PROPERTIES;
|
||||
}
|
||||
|
||||
impl ToSimValueWithType<UIntInRangeMaskType> for bool {
|
||||
fn to_sim_value_with_type(&self, ty: UIntInRangeMaskType) -> SimValue<UIntInRangeMaskType> {
|
||||
SimValue::from_value(ty, *self)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<Bool> for UIntInRangeMaskType {
|
||||
fn cast_to(src: Expr<Self>, to_type: Bool) -> Expr<Bool> {
|
||||
src.cast_to_bits().cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<UIntInRangeMaskType> for Bool {
|
||||
fn cast_to(src: Expr<Self>, to_type: UIntInRangeMaskType) -> Expr<UIntInRangeMaskType> {
|
||||
src.cast_to_static::<UInt<1>>().cast_bits_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprPartialEq<Self> for UIntInRangeMaskType {
|
||||
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_eq(rhs.cast_to_bits())
|
||||
}
|
||||
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_ne(rhs.cast_to_bits())
|
||||
}
|
||||
}
|
||||
|
||||
impl SimValuePartialEq<Self> for UIntInRangeMaskType {
|
||||
fn sim_value_eq(this: &SimValue<Self>, other: &SimValue<Self>) -> bool {
|
||||
**this == **other
|
||||
}
|
||||
}
|
||||
|
||||
type PhantomConstRangeMaskType = <PhantomConst<SerdeRange<DynSize, DynSize>> as Type>::MaskType;
|
||||
|
||||
#[derive(Default, Copy, Clone, Debug)]
|
||||
struct RangeParseError;
|
||||
|
||||
macro_rules! define_uint_in_range_type {
|
||||
(
|
||||
$UIntInRange:ident,
|
||||
$UIntInRangeType:ident,
|
||||
$UIntInRangeTypeWithoutGenerics:ident,
|
||||
$UIntInRangeTypeWithStart:ident,
|
||||
$SerdeRange:ident,
|
||||
$range_operator_str:literal,
|
||||
|$uint_range_usize_start:ident, $uint_range_usize_end:ident| $uint_range_usize:expr,
|
||||
) => {
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
struct $SerdeRange<Start: Size, End: Size> {
|
||||
start: Start::SizeType,
|
||||
end: End::SizeType,
|
||||
}
|
||||
|
||||
impl<Start: KnownSize, End: KnownSize> Default for $SerdeRange<Start, End> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
start: Start::SIZE,
|
||||
end: End::SIZE,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::str::FromStr for $SerdeRange<DynSize, DynSize> {
|
||||
type Err = RangeParseError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let Some((start, end)) = s.split_once($range_operator_str) else {
|
||||
return Err(RangeParseError);
|
||||
};
|
||||
if start.is_empty()
|
||||
|| start.bytes().any(|b| !b.is_ascii_digit())
|
||||
|| end.is_empty()
|
||||
|| end.bytes().any(|b| !b.is_ascii_digit())
|
||||
{
|
||||
return Err(RangeParseError);
|
||||
}
|
||||
let start = start.parse().map_err(|_| RangeParseError)?;
|
||||
let end = end.parse().map_err(|_| RangeParseError)?;
|
||||
let retval = Self { start, end };
|
||||
if retval.is_empty() {
|
||||
Err(RangeParseError)
|
||||
} else {
|
||||
Ok(retval)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size> fmt::Display for $SerdeRange<Start, End> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let Self { start, end } = *self;
|
||||
write!(
|
||||
f,
|
||||
"{}{}{}",
|
||||
Start::as_usize(start),
|
||||
$range_operator_str,
|
||||
End::as_usize(end),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size> Serialize for $SerdeRange<Start, End> {
|
||||
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
|
||||
serializer.collect_str(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de, Start: Size, End: Size> Deserialize<'de> for $SerdeRange<Start, End> {
|
||||
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
|
||||
struct SerdeRangeVisitor<Start: Size, End: Size>(PhantomData<(Start, End)>);
|
||||
impl<'de, Start: Size, End: Size> Visitor<'de> for SerdeRangeVisitor<Start, End> {
|
||||
type Value = $SerdeRange<Start, End>;
|
||||
|
||||
fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str("a string with format \"")?;
|
||||
if let Some(start) = Start::KNOWN_VALUE {
|
||||
write!(f, "{start}")?;
|
||||
} else {
|
||||
f.write_str("<int>")?;
|
||||
};
|
||||
f.write_str($range_operator_str)?;
|
||||
if let Some(end) = End::KNOWN_VALUE {
|
||||
write!(f, "{end}")?;
|
||||
} else {
|
||||
f.write_str("<int>")?;
|
||||
};
|
||||
f.write_str("\" that is a non-empty range")
|
||||
}
|
||||
|
||||
fn visit_str<E: Error>(self, v: &str) -> Result<Self::Value, E> {
|
||||
let $SerdeRange::<DynSize, DynSize> { start, end } =
|
||||
v.parse().map_err(|_| {
|
||||
Error::invalid_value(serde::de::Unexpected::Str(v), &self)
|
||||
})?;
|
||||
let start =
|
||||
Start::SizeType::deserialize(UsizeDeserializer::<E>::new(start))?;
|
||||
let end = End::SizeType::deserialize(UsizeDeserializer::<E>::new(end))?;
|
||||
Ok($SerdeRange { start, end })
|
||||
}
|
||||
|
||||
fn visit_bytes<E: Error>(self, v: &[u8]) -> Result<Self::Value, E> {
|
||||
match std::str::from_utf8(v) {
|
||||
Ok(v) => self.visit_str(v),
|
||||
Err(_) => {
|
||||
Err(Error::invalid_value(serde::de::Unexpected::Bytes(v), &self))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
deserializer.deserialize_str(SerdeRangeVisitor(PhantomData))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct $UIntInRangeType<Start: Size, End: Size> {
|
||||
value: UInt,
|
||||
range: PhantomConst<$SerdeRange<Start, End>>,
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size> $UIntInRangeType<Start, End> {
|
||||
fn from_phantom_const_range(range: PhantomConst<$SerdeRange<Start, End>>) -> Self {
|
||||
let $SerdeRange { start, end } = *range.get();
|
||||
let $uint_range_usize_start = Start::as_usize(start);
|
||||
let $uint_range_usize_end = End::as_usize(end);
|
||||
Self {
|
||||
value: $uint_range_usize,
|
||||
range,
|
||||
}
|
||||
}
|
||||
pub fn new(start: Start::SizeType, end: End::SizeType) -> Self {
|
||||
Self::from_phantom_const_range(PhantomConst::new(
|
||||
$SerdeRange { start, end }.intern_sized(),
|
||||
))
|
||||
}
|
||||
pub fn bit_width(self) -> usize {
|
||||
self.value.width()
|
||||
}
|
||||
pub fn start(self) -> Start::SizeType {
|
||||
self.range.get().start
|
||||
}
|
||||
pub fn end(self) -> End::SizeType {
|
||||
self.range.get().end
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size> fmt::Debug for $UIntInRangeType<Start, End> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let Self { value, range } = self;
|
||||
let $SerdeRange { start, end } = *range.get();
|
||||
f.debug_struct(&format!(
|
||||
"{}<{}, {}>",
|
||||
stringify!($UIntInRange),
|
||||
Start::as_usize(start),
|
||||
End::as_usize(end),
|
||||
))
|
||||
.field("value", value)
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size> Type for $UIntInRangeType<Start, End> {
|
||||
type BaseType = Bundle;
|
||||
type MaskType = UIntInRangeMaskType;
|
||||
type SimValue = usize;
|
||||
impl_match_variant_as_self!();
|
||||
|
||||
fn mask_type(&self) -> Self::MaskType {
|
||||
UIntInRangeMaskType::TYPE
|
||||
}
|
||||
|
||||
fn canonical(&self) -> CanonicalType {
|
||||
CanonicalType::Bundle(Bundle::new(self.fields()))
|
||||
}
|
||||
|
||||
fn from_canonical(canonical_type: CanonicalType) -> Self {
|
||||
let fields = Bundle::from_canonical(canonical_type).fields();
|
||||
let [
|
||||
BundleField {
|
||||
name: value_name,
|
||||
flipped: false,
|
||||
ty: value,
|
||||
},
|
||||
BundleField {
|
||||
name: range_name,
|
||||
flipped: false,
|
||||
ty: range,
|
||||
},
|
||||
] = *fields
|
||||
else {
|
||||
panic!("expected {}", stringify!($UIntInRange));
|
||||
};
|
||||
assert_eq!([&*value_name, &*range_name], UINT_IN_RANGE_TYPE_FIELD_NAMES);
|
||||
let value = UInt::from_canonical(value);
|
||||
let range = PhantomConst::<$SerdeRange<Start, End>>::from_canonical(range);
|
||||
let retval = Self::from_phantom_const_range(range);
|
||||
assert_eq!(retval, Self { value, range });
|
||||
retval
|
||||
}
|
||||
|
||||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
assert_eq!(opaque.size(), self.value.type_properties().size());
|
||||
let mut retval = 0usize;
|
||||
retval.view_bits_mut::<Lsb0>()[..opaque.bit_width()]
|
||||
.clone_from_bitslice(opaque.bits());
|
||||
retval
|
||||
}
|
||||
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
*value = self.sim_value_from_opaque(opaque);
|
||||
}
|
||||
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
writer.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(
|
||||
&value.view_bits::<Lsb0>()[..self.value.width()],
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size> BundleType for $UIntInRangeType<Start, End> {
|
||||
type Builder = NoBuilder;
|
||||
type FilledBuilder = Expr<Self>;
|
||||
|
||||
fn fields(&self) -> Interned<[BundleField]> {
|
||||
let [value_name, range_name] = UINT_IN_RANGE_TYPE_FIELD_NAMES;
|
||||
let Self { value, range } = self;
|
||||
[
|
||||
BundleField {
|
||||
name: value_name.intern(),
|
||||
flipped: false,
|
||||
ty: value.canonical(),
|
||||
},
|
||||
BundleField {
|
||||
name: range_name.intern(),
|
||||
flipped: false,
|
||||
ty: range.canonical(),
|
||||
},
|
||||
]
|
||||
.intern_slice()
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: KnownSize, End: KnownSize> Default for $UIntInRangeType<Start, End> {
|
||||
fn default() -> Self {
|
||||
Self::TYPE
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: KnownSize, End: KnownSize> StaticType for $UIntInRangeType<Start, End> {
|
||||
const TYPE: Self = {
|
||||
let $uint_range_usize_start = Start::VALUE;
|
||||
let $uint_range_usize_end = End::VALUE;
|
||||
Self {
|
||||
value: $uint_range_usize,
|
||||
range: PhantomConst::<$SerdeRange<Start, End>>::TYPE,
|
||||
}
|
||||
};
|
||||
const MASK_TYPE: Self::MaskType = UIntInRangeMaskType::TYPE;
|
||||
const TYPE_PROPERTIES: TypeProperties = BundleTypePropertiesBuilder::new()
|
||||
.field(false, Self::TYPE.value.type_properties_dyn())
|
||||
.field(
|
||||
false,
|
||||
PhantomConst::<$SerdeRange<Start, End>>::TYPE_PROPERTIES,
|
||||
)
|
||||
.finish();
|
||||
const MASK_TYPE_PROPERTIES: TypeProperties = UIntInRangeMaskType::TYPE_PROPERTIES;
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size> ToSimValueWithType<$UIntInRangeType<Start, End>> for usize {
|
||||
fn to_sim_value_with_type(
|
||||
&self,
|
||||
ty: $UIntInRangeType<Start, End>,
|
||||
) -> SimValue<$UIntInRangeType<Start, End>> {
|
||||
SimValue::from_value(ty, *self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)]
|
||||
pub struct $UIntInRangeTypeWithoutGenerics;
|
||||
|
||||
#[allow(non_upper_case_globals)]
|
||||
pub const $UIntInRangeType: $UIntInRangeTypeWithoutGenerics =
|
||||
$UIntInRangeTypeWithoutGenerics;
|
||||
|
||||
impl<StartSize: SizeType> Index<StartSize> for $UIntInRangeTypeWithoutGenerics {
|
||||
type Output = $UIntInRangeTypeWithStart<StartSize::Size>;
|
||||
|
||||
fn index(&self, start: StartSize) -> &Self::Output {
|
||||
Interned::into_inner($UIntInRangeTypeWithStart(start).intern_sized())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct $UIntInRangeTypeWithStart<Start: Size>(Start::SizeType);
|
||||
|
||||
impl<Start: Size, EndSize: SizeType<Size = End>, End: Size<SizeType = EndSize>>
|
||||
Index<EndSize> for $UIntInRangeTypeWithStart<Start>
|
||||
{
|
||||
type Output = $UIntInRangeType<Start, End>;
|
||||
|
||||
fn index(&self, end: EndSize) -> &Self::Output {
|
||||
Interned::into_inner($UIntInRangeType::new(self.0, end).intern_sized())
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size, Width: Size> ExprCastTo<UIntType<Width>>
|
||||
for $UIntInRangeType<Start, End>
|
||||
{
|
||||
fn cast_to(src: Expr<Self>, to_type: UIntType<Width>) -> Expr<UIntType<Width>> {
|
||||
src.cast_to_bits().cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size, Width: Size> ExprCastTo<$UIntInRangeType<Start, End>>
|
||||
for UIntType<Width>
|
||||
{
|
||||
fn cast_to(
|
||||
src: Expr<Self>,
|
||||
to_type: $UIntInRangeType<Start, End>,
|
||||
) -> Expr<$UIntInRangeType<Start, End>> {
|
||||
src.cast_to(to_type.value).cast_bits_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl<LhsStart: Size, LhsEnd: Size, RhsStart: Size, RhsEnd: Size>
|
||||
ExprPartialEq<$UIntInRangeType<RhsStart, RhsEnd>>
|
||||
for $UIntInRangeType<LhsStart, LhsEnd>
|
||||
{
|
||||
fn cmp_eq(
|
||||
lhs: Expr<Self>,
|
||||
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
|
||||
) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_eq(rhs.cast_to_bits())
|
||||
}
|
||||
fn cmp_ne(
|
||||
lhs: Expr<Self>,
|
||||
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
|
||||
) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_ne(rhs.cast_to_bits())
|
||||
}
|
||||
}
|
||||
|
||||
impl<LhsStart: Size, LhsEnd: Size, RhsStart: Size, RhsEnd: Size>
|
||||
ExprPartialOrd<$UIntInRangeType<RhsStart, RhsEnd>>
|
||||
for $UIntInRangeType<LhsStart, LhsEnd>
|
||||
{
|
||||
fn cmp_lt(
|
||||
lhs: Expr<Self>,
|
||||
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
|
||||
) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_lt(rhs.cast_to_bits())
|
||||
}
|
||||
fn cmp_le(
|
||||
lhs: Expr<Self>,
|
||||
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
|
||||
) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_le(rhs.cast_to_bits())
|
||||
}
|
||||
fn cmp_gt(
|
||||
lhs: Expr<Self>,
|
||||
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
|
||||
) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_gt(rhs.cast_to_bits())
|
||||
}
|
||||
fn cmp_ge(
|
||||
lhs: Expr<Self>,
|
||||
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
|
||||
) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_ge(rhs.cast_to_bits())
|
||||
}
|
||||
}
|
||||
|
||||
impl<LhsStart: Size, LhsEnd: Size, RhsStart: Size, RhsEnd: Size>
|
||||
SimValuePartialEq<$UIntInRangeType<RhsStart, RhsEnd>>
|
||||
for $UIntInRangeType<LhsStart, LhsEnd>
|
||||
{
|
||||
fn sim_value_eq(
|
||||
this: &SimValue<Self>,
|
||||
other: &SimValue<$UIntInRangeType<RhsStart, RhsEnd>>,
|
||||
) -> bool {
|
||||
**this == **other
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size, Width: Size> ExprPartialEq<UIntType<Width>>
|
||||
for $UIntInRangeType<Start, End>
|
||||
{
|
||||
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_eq(rhs)
|
||||
}
|
||||
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_ne(rhs)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size, Width: Size> ExprPartialEq<$UIntInRangeType<Start, End>>
|
||||
for UIntType<Width>
|
||||
{
|
||||
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
|
||||
lhs.cmp_eq(rhs.cast_to_bits())
|
||||
}
|
||||
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
|
||||
lhs.cmp_ne(rhs.cast_to_bits())
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size, Width: Size> ExprPartialOrd<UIntType<Width>>
|
||||
for $UIntInRangeType<Start, End>
|
||||
{
|
||||
fn cmp_lt(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_lt(rhs)
|
||||
}
|
||||
fn cmp_le(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_le(rhs)
|
||||
}
|
||||
fn cmp_gt(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_gt(rhs)
|
||||
}
|
||||
fn cmp_ge(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_ge(rhs)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size, Width: Size> ExprPartialOrd<$UIntInRangeType<Start, End>>
|
||||
for UIntType<Width>
|
||||
{
|
||||
fn cmp_lt(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
|
||||
lhs.cmp_lt(rhs.cast_to_bits())
|
||||
}
|
||||
fn cmp_le(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
|
||||
lhs.cmp_le(rhs.cast_to_bits())
|
||||
}
|
||||
fn cmp_gt(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
|
||||
lhs.cmp_gt(rhs.cast_to_bits())
|
||||
}
|
||||
fn cmp_ge(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
|
||||
lhs.cmp_ge(rhs.cast_to_bits())
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
define_uint_in_range_type! {
|
||||
UIntInRange,
|
||||
UIntInRangeType,
|
||||
UIntInRangeTypeWithoutGenerics,
|
||||
UIntInRangeTypeWithStart,
|
||||
SerdeRange,
|
||||
"..",
|
||||
|start, end| UInt::range_usize(start..end),
|
||||
}
|
||||
|
||||
define_uint_in_range_type! {
|
||||
UIntInRangeInclusive,
|
||||
UIntInRangeInclusiveType,
|
||||
UIntInRangeInclusiveTypeWithoutGenerics,
|
||||
UIntInRangeInclusiveTypeWithStart,
|
||||
SerdeRangeInclusive,
|
||||
"..=",
|
||||
|start, end| UInt::range_inclusive_usize(start..=end),
|
||||
}
|
||||
|
||||
impl SerdeRange<DynSize, DynSize> {
|
||||
fn is_empty(self) -> bool {
|
||||
self.start >= self.end
|
||||
}
|
||||
}
|
||||
|
||||
impl SerdeRangeInclusive<DynSize, DynSize> {
|
||||
fn is_empty(self) -> bool {
|
||||
self.start > self.end
|
||||
}
|
||||
}
|
||||
|
|
@ -1,23 +1,25 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
#![allow(clippy::type_complexity)]
|
||||
use crate::intern::type_map::TypeIdMap;
|
||||
use crate::{intern::type_map::TypeIdMap, util::DefaultBuildHasher};
|
||||
use bitvec::{ptr::BitPtr, slice::BitSlice, vec::BitVec};
|
||||
use hashbrown::{hash_map::RawEntryMut, HashMap, HashTable};
|
||||
use hashbrown::HashTable;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
any::{Any, TypeId},
|
||||
borrow::{Borrow, Cow},
|
||||
cmp::Ordering,
|
||||
ffi::{OsStr, OsString},
|
||||
fmt,
|
||||
hash::{BuildHasher, Hash, Hasher},
|
||||
iter::FusedIterator,
|
||||
marker::PhantomData,
|
||||
ops::Deref,
|
||||
path::{Path, PathBuf},
|
||||
sync::{Mutex, RwLock},
|
||||
};
|
||||
|
||||
pub mod type_map;
|
||||
mod type_map;
|
||||
|
||||
pub trait LazyInternedTrait<T: ?Sized + Send + Sync + 'static>: Send + Sync + Any {
|
||||
fn get(&self) -> Interned<T>;
|
||||
|
|
@ -287,15 +289,266 @@ impl InternedCompare for BitSlice {
|
|||
}
|
||||
}
|
||||
|
||||
impl InternedCompare for str {
|
||||
type InternedCompareKey = PtrEqWithMetadata<Self>;
|
||||
fn interned_compare_key_ref(this: &Self) -> Self::InternedCompareKey {
|
||||
PtrEqWithMetadata(this)
|
||||
/// Safety: `as_bytes` and `from_bytes_unchecked` must return the same pointer as the input.
|
||||
/// all values returned by `as_bytes` must be valid to pass to `from_bytes_unchecked`.
|
||||
/// `into_bytes` must return the exact same thing as `as_bytes`.
|
||||
/// `Interned<Self>` must contain the exact same references as `Interned<[u8]>`,
|
||||
/// so they can be safely interconverted without needing re-interning.
|
||||
unsafe trait InternStrLike: ToOwned {
|
||||
fn as_bytes(this: &Self) -> &[u8];
|
||||
fn into_bytes(this: Self::Owned) -> Vec<u8>;
|
||||
/// Safety: `bytes` must be a valid sequence of bytes for this type. All UTF-8 sequences are valid.
|
||||
unsafe fn from_bytes_unchecked(bytes: &[u8]) -> &Self;
|
||||
}
|
||||
|
||||
macro_rules! impl_intern_str_like {
|
||||
($ty:ty, owned = $Owned:ty) => {
|
||||
impl InternedCompare for $ty {
|
||||
type InternedCompareKey = PtrEqWithMetadata<[u8]>;
|
||||
fn interned_compare_key_ref(this: &Self) -> Self::InternedCompareKey {
|
||||
PtrEqWithMetadata(InternStrLike::as_bytes(this))
|
||||
}
|
||||
}
|
||||
impl Intern for $ty {
|
||||
fn intern(&self) -> Interned<Self> {
|
||||
Self::intern_cow(Cow::Borrowed(self))
|
||||
}
|
||||
fn intern_cow(this: Cow<'_, Self>) -> Interned<Self> {
|
||||
Interned::cast_unchecked(
|
||||
<[u8]>::intern_cow(match this {
|
||||
Cow::Borrowed(v) => Cow::Borrowed(<Self as InternStrLike>::as_bytes(v)),
|
||||
Cow::Owned(v) => {
|
||||
// verify $Owned is correct
|
||||
let v: $Owned = v;
|
||||
Cow::Owned(<Self as InternStrLike>::into_bytes(v))
|
||||
}
|
||||
}),
|
||||
// Safety: guaranteed safe because we got the bytes from `as_bytes`/`into_bytes`
|
||||
|v| unsafe { <Self as InternStrLike>::from_bytes_unchecked(v) },
|
||||
)
|
||||
}
|
||||
}
|
||||
impl Default for Interned<$ty> {
|
||||
fn default() -> Self {
|
||||
// Safety: safe because the empty sequence is valid UTF-8
|
||||
unsafe { <$ty as InternStrLike>::from_bytes_unchecked(&[]) }.intern()
|
||||
}
|
||||
}
|
||||
impl<'de> Deserialize<'de> for Interned<$ty> {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
Cow::<'de, $ty>::deserialize(deserializer).map(Intern::intern_cow)
|
||||
}
|
||||
}
|
||||
impl From<$Owned> for Interned<$ty> {
|
||||
fn from(v: $Owned) -> Self {
|
||||
v.intern_deref()
|
||||
}
|
||||
}
|
||||
impl From<Interned<$ty>> for $Owned {
|
||||
fn from(v: Interned<$ty>) -> Self {
|
||||
Interned::into_inner(v).into()
|
||||
}
|
||||
}
|
||||
impl From<Interned<$ty>> for Box<$ty> {
|
||||
fn from(v: Interned<$ty>) -> Self {
|
||||
Interned::into_inner(v).into()
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Safety: satisfies `InternStrLike`'s requirements where the valid sequences for `from_bytes_unchecked` matches `str`
|
||||
unsafe impl InternStrLike for str {
|
||||
fn as_bytes(this: &Self) -> &[u8] {
|
||||
this.as_bytes()
|
||||
}
|
||||
fn into_bytes(this: Self::Owned) -> Vec<u8> {
|
||||
this.into_bytes()
|
||||
}
|
||||
unsafe fn from_bytes_unchecked(bytes: &[u8]) -> &Self {
|
||||
// Safety: `bytes` is guaranteed UTF-8 by the caller
|
||||
unsafe { str::from_utf8_unchecked(bytes) }
|
||||
}
|
||||
}
|
||||
|
||||
impl_intern_str_like!(str, owned = String);
|
||||
|
||||
// Safety: satisfies `InternStrLike`'s requirements where the valid sequences for `from_bytes_unchecked` matches `OsStr`
|
||||
unsafe impl InternStrLike for OsStr {
|
||||
fn as_bytes(this: &Self) -> &[u8] {
|
||||
this.as_encoded_bytes()
|
||||
}
|
||||
fn into_bytes(this: Self::Owned) -> Vec<u8> {
|
||||
this.into_encoded_bytes()
|
||||
}
|
||||
unsafe fn from_bytes_unchecked(bytes: &[u8]) -> &Self {
|
||||
// Safety: `bytes` is guaranteed valid for `OsStr` by the caller
|
||||
unsafe { OsStr::from_encoded_bytes_unchecked(bytes) }
|
||||
}
|
||||
}
|
||||
|
||||
impl_intern_str_like!(OsStr, owned = OsString);
|
||||
|
||||
// Safety: satisfies `InternStrLike`'s requirements where the valid sequences for `from_bytes_unchecked` matches `OsStr`
|
||||
unsafe impl InternStrLike for Path {
|
||||
fn as_bytes(this: &Self) -> &[u8] {
|
||||
this.as_os_str().as_encoded_bytes()
|
||||
}
|
||||
fn into_bytes(this: Self::Owned) -> Vec<u8> {
|
||||
this.into_os_string().into_encoded_bytes()
|
||||
}
|
||||
unsafe fn from_bytes_unchecked(bytes: &[u8]) -> &Self {
|
||||
// Safety: `bytes` is guaranteed valid for `OsStr` by the caller
|
||||
unsafe { Path::new(OsStr::from_encoded_bytes_unchecked(bytes)) }
|
||||
}
|
||||
}
|
||||
|
||||
impl_intern_str_like!(Path, owned = PathBuf);
|
||||
|
||||
impl Interned<str> {
|
||||
pub fn from_utf8(v: Interned<[u8]>) -> Result<Self, std::str::Utf8Error> {
|
||||
Interned::try_cast_unchecked(v, str::from_utf8)
|
||||
}
|
||||
pub fn as_interned_bytes(self) -> Interned<[u8]> {
|
||||
Interned::cast_unchecked(self, str::as_bytes)
|
||||
}
|
||||
pub fn as_interned_os_str(self) -> Interned<OsStr> {
|
||||
Interned::cast_unchecked(self, AsRef::as_ref)
|
||||
}
|
||||
pub fn as_interned_path(self) -> Interned<Path> {
|
||||
Interned::cast_unchecked(self, AsRef::as_ref)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Interned<str>> for Interned<OsStr> {
|
||||
fn from(value: Interned<str>) -> Self {
|
||||
value.as_interned_os_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Interned<str>> for Interned<Path> {
|
||||
fn from(value: Interned<str>) -> Self {
|
||||
value.as_interned_path()
|
||||
}
|
||||
}
|
||||
|
||||
impl Interned<OsStr> {
|
||||
pub fn as_interned_encoded_bytes(self) -> Interned<[u8]> {
|
||||
Interned::cast_unchecked(self, OsStr::as_encoded_bytes)
|
||||
}
|
||||
pub fn to_interned_str(self) -> Option<Interned<str>> {
|
||||
Interned::try_cast_unchecked(self, |v| v.to_str().ok_or(())).ok()
|
||||
}
|
||||
pub fn display(self) -> std::ffi::os_str::Display<'static> {
|
||||
Self::into_inner(self).display()
|
||||
}
|
||||
pub fn as_interned_path(self) -> Interned<Path> {
|
||||
Interned::cast_unchecked(self, AsRef::as_ref)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Interned<OsStr>> for Interned<Path> {
|
||||
fn from(value: Interned<OsStr>) -> Self {
|
||||
value.as_interned_path()
|
||||
}
|
||||
}
|
||||
|
||||
impl Interned<Path> {
|
||||
pub fn as_interned_os_str(self) -> Interned<OsStr> {
|
||||
Interned::cast_unchecked(self, AsRef::as_ref)
|
||||
}
|
||||
pub fn to_interned_str(self) -> Option<Interned<str>> {
|
||||
Interned::try_cast_unchecked(self, |v| v.to_str().ok_or(())).ok()
|
||||
}
|
||||
pub fn display(self) -> std::path::Display<'static> {
|
||||
Self::into_inner(self).display()
|
||||
}
|
||||
pub fn interned_file_name(self) -> Option<Interned<OsStr>> {
|
||||
Some(self.file_name()?.intern())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Interned<Path>> for Interned<OsStr> {
|
||||
fn from(value: Interned<Path>) -> Self {
|
||||
value.as_interned_os_str()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait InternSlice: Sized {
|
||||
type Element: 'static + Send + Sync + Clone + Hash + Eq;
|
||||
fn intern_slice(self) -> Interned<[Self::Element]>;
|
||||
}
|
||||
|
||||
impl<T: 'static + Send + Sync + Clone + Hash + Eq> InternSlice for Box<[T]> {
|
||||
type Element = T;
|
||||
fn intern_slice(self) -> Interned<[Self::Element]> {
|
||||
self.into_vec().intern_slice()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static + Send + Sync + Clone + Hash + Eq> InternSlice for Vec<T> {
|
||||
type Element = T;
|
||||
fn intern_slice(self) -> Interned<[Self::Element]> {
|
||||
self.intern_deref()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static + Send + Sync + Clone + Hash + Eq> InternSlice for &'_ [T] {
|
||||
type Element = T;
|
||||
fn intern_slice(self) -> Interned<[Self::Element]> {
|
||||
self.intern()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static + Send + Sync + Clone + Hash + Eq> InternSlice for &'_ mut [T] {
|
||||
type Element = T;
|
||||
fn intern_slice(self) -> Interned<[Self::Element]> {
|
||||
self.intern()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static + Send + Sync + Clone + Hash + Eq, const N: usize> InternSlice for [T; N] {
|
||||
type Element = T;
|
||||
fn intern_slice(self) -> Interned<[Self::Element]> {
|
||||
(&self).intern_slice()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static + Send + Sync + Clone + Hash + Eq, const N: usize> InternSlice for Box<[T; N]> {
|
||||
type Element = T;
|
||||
fn intern_slice(self) -> Interned<[Self::Element]> {
|
||||
let this: Box<[T]> = self;
|
||||
this.intern_slice()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static + Send + Sync + Clone + Hash + Eq, const N: usize> InternSlice for &'_ [T; N] {
|
||||
type Element = T;
|
||||
fn intern_slice(self) -> Interned<[Self::Element]> {
|
||||
let this: &[T] = self;
|
||||
this.intern()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static + Send + Sync + Clone + Hash + Eq, const N: usize> InternSlice for &'_ mut [T; N] {
|
||||
type Element = T;
|
||||
fn intern_slice(self) -> Interned<[Self::Element]> {
|
||||
let this: &[T] = self;
|
||||
this.intern()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Intern: Any + Send + Sync {
|
||||
fn intern(&self) -> Interned<Self>;
|
||||
fn intern_deref(self) -> Interned<Self::Target>
|
||||
where
|
||||
Self: Sized + Deref<Target: Intern + ToOwned<Owned = Self>>,
|
||||
{
|
||||
Self::Target::intern_owned(self)
|
||||
}
|
||||
fn intern_sized(self) -> Interned<Self>
|
||||
where
|
||||
Self: Clone,
|
||||
|
|
@ -316,8 +569,37 @@ pub trait Intern: Any + Send + Sync {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Intern + ToOwned> From<Cow<'_, T>> for Interned<T> {
|
||||
fn from(value: Cow<'_, T>) -> Self {
|
||||
Intern::intern_cow(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Intern> From<&'_ T> for Interned<T> {
|
||||
fn from(value: &'_ T) -> Self {
|
||||
Intern::intern(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Intern + Clone> From<T> for Interned<T> {
|
||||
fn from(value: T) -> Self {
|
||||
Intern::intern_sized(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + 'static + Send + Sync + ToOwned> From<Interned<T>> for Cow<'_, T> {
|
||||
fn from(value: Interned<T>) -> Self {
|
||||
Cow::Borrowed(Interned::into_inner(value))
|
||||
}
|
||||
}
|
||||
|
||||
struct InternerState<T: ?Sized + 'static + Send + Sync> {
|
||||
table: HashTable<&'static T>,
|
||||
hasher: DefaultBuildHasher,
|
||||
}
|
||||
|
||||
pub struct Interner<T: ?Sized + 'static + Send + Sync> {
|
||||
map: Mutex<HashMap<&'static T, ()>>,
|
||||
state: Mutex<InternerState<T>>,
|
||||
}
|
||||
|
||||
impl<T: ?Sized + 'static + Send + Sync> Interner<T> {
|
||||
|
|
@ -330,7 +612,10 @@ impl<T: ?Sized + 'static + Send + Sync> Interner<T> {
|
|||
impl<T: ?Sized + 'static + Send + Sync> Default for Interner<T> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
map: Default::default(),
|
||||
state: Mutex::new(InternerState {
|
||||
table: HashTable::new(),
|
||||
hasher: Default::default(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -341,17 +626,16 @@ impl<T: ?Sized + 'static + Send + Sync + Hash + Eq + ToOwned> Interner<T> {
|
|||
alloc: F,
|
||||
value: Cow<'_, T>,
|
||||
) -> Interned<T> {
|
||||
let mut map = self.map.lock().unwrap();
|
||||
let hasher = map.hasher().clone();
|
||||
let hash = hasher.hash_one(&*value);
|
||||
let inner = match map.raw_entry_mut().from_hash(hash, |k| **k == *value) {
|
||||
RawEntryMut::Occupied(entry) => *entry.key(),
|
||||
RawEntryMut::Vacant(entry) => {
|
||||
*entry
|
||||
.insert_with_hasher(hash, alloc(value), (), |k| hasher.hash_one(&**k))
|
||||
.0
|
||||
}
|
||||
};
|
||||
let mut state = self.state.lock().unwrap();
|
||||
let InternerState { table, hasher } = &mut *state;
|
||||
let inner = *table
|
||||
.entry(
|
||||
hasher.hash_one(&*value),
|
||||
|k| **k == *value,
|
||||
|k| hasher.hash_one(&**k),
|
||||
)
|
||||
.or_insert_with(|| alloc(value))
|
||||
.get();
|
||||
Interned { inner }
|
||||
}
|
||||
}
|
||||
|
|
@ -374,12 +658,6 @@ impl Interner<BitSlice> {
|
|||
}
|
||||
}
|
||||
|
||||
impl Interner<str> {
|
||||
fn intern_str(&self, value: Cow<'_, str>) -> Interned<str> {
|
||||
self.intern(|value| value.into_owned().leak(), value)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Interned<T: ?Sized + 'static + Send + Sync> {
|
||||
inner: &'static T,
|
||||
}
|
||||
|
|
@ -409,6 +687,12 @@ forward_fmt_trait!(Pointer);
|
|||
forward_fmt_trait!(UpperExp);
|
||||
forward_fmt_trait!(UpperHex);
|
||||
|
||||
impl<T: ?Sized + 'static + Send + Sync + AsRef<U>, U: ?Sized> AsRef<U> for Interned<T> {
|
||||
fn as_ref(&self) -> &U {
|
||||
T::as_ref(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct InternedSliceIter<T: Clone + 'static + Send + Sync> {
|
||||
slice: Interned<[T]>,
|
||||
|
|
@ -478,6 +762,57 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
impl<I> FromIterator<I> for Interned<str>
|
||||
where
|
||||
String: FromIterator<I>,
|
||||
{
|
||||
fn from_iter<T: IntoIterator<Item = I>>(iter: T) -> Self {
|
||||
String::from_iter(iter).intern_deref()
|
||||
}
|
||||
}
|
||||
|
||||
impl<I> FromIterator<I> for Interned<Path>
|
||||
where
|
||||
PathBuf: FromIterator<I>,
|
||||
{
|
||||
fn from_iter<T: IntoIterator<Item = I>>(iter: T) -> Self {
|
||||
PathBuf::from_iter(iter).intern_deref()
|
||||
}
|
||||
}
|
||||
|
||||
impl<I> FromIterator<I> for Interned<OsStr>
|
||||
where
|
||||
OsString: FromIterator<I>,
|
||||
{
|
||||
fn from_iter<T: IntoIterator<Item = I>>(iter: T) -> Self {
|
||||
OsString::from_iter(iter).intern_deref()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Interned<str>> for clap::builder::Str {
|
||||
fn from(value: Interned<str>) -> Self {
|
||||
Interned::into_inner(value).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Interned<str>> for clap::builder::OsStr {
|
||||
fn from(value: Interned<str>) -> Self {
|
||||
Interned::into_inner(value).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Interned<str>> for clap::builder::StyledStr {
|
||||
fn from(value: Interned<str>) -> Self {
|
||||
Interned::into_inner(value).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Interned<str>> for clap::Id {
|
||||
fn from(value: Interned<str>) -> Self {
|
||||
Interned::into_inner(value).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static + Clone + Send + Sync> From<Interned<[T]>> for Vec<T> {
|
||||
fn from(value: Interned<[T]>) -> Self {
|
||||
Vec::from(&*value)
|
||||
|
|
@ -490,24 +825,12 @@ impl<T: 'static + Clone + Send + Sync> From<Interned<[T]>> for Box<[T]> {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<Interned<str>> for String {
|
||||
fn from(value: Interned<str>) -> Self {
|
||||
String::from(&*value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<I> Default for Interned<[I]>
|
||||
where
|
||||
[I]: Intern,
|
||||
{
|
||||
fn default() -> Self {
|
||||
[][..].intern()
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Interned<str> {
|
||||
fn default() -> Self {
|
||||
"".intern()
|
||||
Intern::intern(&[])
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -638,15 +961,6 @@ impl<'de> Deserialize<'de> for Interned<BitSlice> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for Interned<str> {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
String::deserialize(deserializer).map(Intern::intern_owned)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Clone + Send + Sync + 'static + Hash + Eq> Intern for T {
|
||||
fn intern(&self) -> Interned<Self> {
|
||||
Self::intern_cow(Cow::Borrowed(self))
|
||||
|
|
@ -707,26 +1021,6 @@ impl Intern for BitSlice {
|
|||
}
|
||||
}
|
||||
|
||||
impl Intern for str {
|
||||
fn intern(&self) -> Interned<Self> {
|
||||
Self::intern_cow(Cow::Borrowed(self))
|
||||
}
|
||||
|
||||
fn intern_owned(this: <Self as ToOwned>::Owned) -> Interned<Self>
|
||||
where
|
||||
Self: ToOwned,
|
||||
{
|
||||
Self::intern_cow(Cow::Owned(this))
|
||||
}
|
||||
|
||||
fn intern_cow(this: Cow<'_, Self>) -> Interned<Self>
|
||||
where
|
||||
Self: ToOwned,
|
||||
{
|
||||
Interner::get().intern_str(this)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait MemoizeGeneric: 'static + Send + Sync + Hash + Eq + Copy {
|
||||
type InputRef<'a>: 'a + Send + Sync + Hash + Copy;
|
||||
type InputOwned: 'static + Send + Sync;
|
||||
|
|
@ -742,7 +1036,7 @@ pub trait MemoizeGeneric: 'static + Send + Sync + Hash + Eq + Copy {
|
|||
fn get_cow(self, input: Self::InputCow<'_>) -> Self::Output {
|
||||
static TYPE_ID_MAP: TypeIdMap = TypeIdMap::new();
|
||||
let map: &RwLock<(
|
||||
hashbrown::hash_map::DefaultHashBuilder,
|
||||
DefaultBuildHasher,
|
||||
HashTable<(Self, Self::InputOwned, Self::Output)>,
|
||||
)> = TYPE_ID_MAP.get_or_insert_default();
|
||||
fn hash_eq_key<'a, 'b, T: MemoizeGeneric>(
|
||||
|
|
|
|||
|
|
@ -1,10 +1,8 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use hashbrown::HashMap;
|
||||
use std::{
|
||||
any::{Any, TypeId},
|
||||
hash::{BuildHasher, Hasher},
|
||||
ptr::NonNull,
|
||||
sync::RwLock,
|
||||
};
|
||||
|
||||
|
|
@ -75,59 +73,36 @@ impl BuildHasher for TypeIdBuildHasher {
|
|||
}
|
||||
}
|
||||
|
||||
struct Value(NonNull<dyn Any + Send + Sync>);
|
||||
|
||||
impl Value {
|
||||
unsafe fn get_transmute_lifetime<'b>(&self) -> &'b (dyn Any + Send + Sync) {
|
||||
unsafe { &*self.0.as_ptr() }
|
||||
}
|
||||
fn new(v: Box<dyn Any + Send + Sync>) -> Self {
|
||||
unsafe { Self(NonNull::new_unchecked(Box::into_raw(v))) }
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl Send for Value {}
|
||||
unsafe impl Sync for Value {}
|
||||
|
||||
impl Drop for Value {
|
||||
fn drop(&mut self) {
|
||||
unsafe { std::ptr::drop_in_place(self.0.as_ptr()) }
|
||||
}
|
||||
}
|
||||
|
||||
pub struct TypeIdMap(RwLock<HashMap<TypeId, Value, TypeIdBuildHasher>>);
|
||||
pub(crate) struct TypeIdMap(
|
||||
RwLock<hashbrown::HashMap<TypeId, &'static (dyn Any + Send + Sync), TypeIdBuildHasher>>,
|
||||
);
|
||||
|
||||
impl TypeIdMap {
|
||||
pub const fn new() -> Self {
|
||||
Self(RwLock::new(HashMap::with_hasher(TypeIdBuildHasher)))
|
||||
pub(crate) const fn new() -> Self {
|
||||
Self(RwLock::new(hashbrown::HashMap::with_hasher(
|
||||
TypeIdBuildHasher,
|
||||
)))
|
||||
}
|
||||
#[cold]
|
||||
unsafe fn insert_slow(
|
||||
fn insert_slow(
|
||||
&self,
|
||||
type_id: TypeId,
|
||||
make: fn() -> Box<dyn Any + Sync + Send>,
|
||||
) -> &(dyn Any + Sync + Send) {
|
||||
let value = Value::new(make());
|
||||
) -> &'static (dyn Any + Sync + Send) {
|
||||
let value = Box::leak(make());
|
||||
let mut write_guard = self.0.write().unwrap();
|
||||
unsafe {
|
||||
write_guard
|
||||
.entry(type_id)
|
||||
.or_insert(value)
|
||||
.get_transmute_lifetime()
|
||||
}
|
||||
*write_guard.entry(type_id).or_insert(value)
|
||||
}
|
||||
pub fn get_or_insert_default<T: Sized + Any + Send + Sync + Default>(&self) -> &T {
|
||||
pub(crate) fn get_or_insert_default<T: Sized + Any + Send + Sync + Default>(&self) -> &T {
|
||||
let type_id = TypeId::of::<T>();
|
||||
let read_guard = self.0.read().unwrap();
|
||||
let retval = read_guard
|
||||
.get(&type_id)
|
||||
.map(|v| unsafe { Value::get_transmute_lifetime(v) });
|
||||
let retval = read_guard.get(&type_id).map(|v| *v);
|
||||
drop(read_guard);
|
||||
let retval = match retval {
|
||||
Some(retval) => retval,
|
||||
None => unsafe { self.insert_slow(type_id, move || Box::new(T::default())) },
|
||||
None => self.insert_slow(type_id, move || Box::new(T::default())),
|
||||
};
|
||||
unsafe { &*(retval as *const dyn Any as *const T) }
|
||||
retval.downcast_ref().expect("known to have correct TypeId")
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -4,6 +4,18 @@
|
|||
// TODO: enable:
|
||||
// #![warn(missing_docs)]
|
||||
|
||||
#![deny(
|
||||
rustdoc::bare_urls,
|
||||
rustdoc::broken_intra_doc_links,
|
||||
rustdoc::invalid_codeblock_attributes,
|
||||
rustdoc::invalid_html_tags,
|
||||
rustdoc::invalid_rust_codeblocks,
|
||||
rustdoc::private_doc_tests,
|
||||
rustdoc::private_intra_doc_links,
|
||||
rustdoc::redundant_explicit_links,
|
||||
rustdoc::unescaped_backticks
|
||||
)]
|
||||
|
||||
//! [Main Documentation][_docs]
|
||||
|
||||
extern crate self as fayalite;
|
||||
|
|
@ -11,6 +23,59 @@ extern crate self as fayalite;
|
|||
#[doc(hidden)]
|
||||
pub use std as __std;
|
||||
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! __cfg_expansion_helper {
|
||||
(
|
||||
[
|
||||
$($evaluated_cfgs:ident($($evaluated_exprs:tt)*) = $evaluated_results:ident,)*
|
||||
]
|
||||
[
|
||||
$cfg:ident($($expr:tt)*),
|
||||
$($unevaluated_cfgs:ident($($unevaluated_exprs:tt)*),)*
|
||||
]
|
||||
// pass as tt so we get right span for attribute
|
||||
$after_evaluation_attr:tt $after_evaluation_body:tt
|
||||
) => {
|
||||
#[$cfg($($expr)*)]
|
||||
$crate::__cfg_expansion_helper! {
|
||||
[
|
||||
$($evaluated_cfgs($($evaluated_exprs)*) = $evaluated_results,)*
|
||||
$cfg($($expr)*) = true,
|
||||
]
|
||||
[
|
||||
$($unevaluated_cfgs($($unevaluated_exprs)*),)*
|
||||
]
|
||||
$after_evaluation_attr $after_evaluation_body
|
||||
}
|
||||
#[$cfg(not($($expr)*))]
|
||||
$crate::__cfg_expansion_helper! {
|
||||
[
|
||||
$($evaluated_cfgs($($evaluated_exprs)*) = $evaluated_results,)*
|
||||
$cfg($($expr)*) = false,
|
||||
]
|
||||
[
|
||||
$($unevaluated_cfgs($($unevaluated_exprs)*),)*
|
||||
]
|
||||
$after_evaluation_attr $after_evaluation_body
|
||||
}
|
||||
};
|
||||
(
|
||||
[
|
||||
$($evaluated_cfgs:ident($($evaluated_exprs:tt)*) = $evaluated_results:ident,)*
|
||||
]
|
||||
[]
|
||||
// don't use #[...] so we get right span for `#` and `[]` of attribute
|
||||
{$($after_evaluation_attr:tt)*} {$($after_evaluation_body:tt)*}
|
||||
) => {
|
||||
$($after_evaluation_attr)*
|
||||
#[__evaluated_cfgs([
|
||||
$($evaluated_cfgs($($evaluated_exprs)*) = $evaluated_results,)*
|
||||
])]
|
||||
$($after_evaluation_body)*
|
||||
};
|
||||
}
|
||||
|
||||
#[doc(inline)]
|
||||
/// The `#[hdl_module]` attribute is applied to a Rust function so that that function creates
|
||||
/// a [`Module`][`::fayalite::module::Module`] when called.
|
||||
|
|
@ -21,8 +86,139 @@ pub use std as __std;
|
|||
pub use fayalite_proc_macros::hdl_module;
|
||||
|
||||
#[doc(inline)]
|
||||
/// The `#[hdl]` attribute is supported on several different kinds of [Rust Items](https://doc.rust-lang.org/reference/items.html):
|
||||
///
|
||||
/// # Functions and Methods
|
||||
/// Enable's the stuff that you can use inside a [module's body](crate::_docs::modules::module_bodies),
|
||||
/// but without being a module or changing the function's signature.
|
||||
/// The only exception is that you can't use stuff that requires the automatically-provided `m` variable.
|
||||
///
|
||||
/// # Structs
|
||||
// TODO: expand on struct docs
|
||||
/// e.g.:
|
||||
/// ```
|
||||
/// # use fayalite::prelude::*;
|
||||
/// # #[hdl]
|
||||
/// # pub struct OtherStruct {}
|
||||
/// #[hdl]
|
||||
/// pub struct MyStruct {
|
||||
/// #[hdl(flip)]
|
||||
/// pub a: UInt<5>,
|
||||
/// pub b: Bool,
|
||||
/// #[hdl(flip)]
|
||||
/// pub c: OtherStruct,
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// # Enums
|
||||
// TODO: expand on enum docs
|
||||
/// e.g.:
|
||||
/// ```
|
||||
/// # use fayalite::prelude::*;
|
||||
/// # #[hdl]
|
||||
/// # pub struct MyStruct {}
|
||||
/// #[hdl]
|
||||
/// pub enum MyEnum {
|
||||
/// A(UInt<3>),
|
||||
/// B,
|
||||
/// C(MyStruct),
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// # Type Aliases
|
||||
///
|
||||
/// There's three different ways you can create a type alias:
|
||||
///
|
||||
/// # Normal Type Alias
|
||||
///
|
||||
/// This works exactly how you'd expect:
|
||||
/// ```
|
||||
/// # use fayalite::prelude::*;
|
||||
/// # #[hdl]
|
||||
/// # pub struct MyStruct<T: Type> {
|
||||
/// # v: T,
|
||||
/// # }
|
||||
/// #[hdl]
|
||||
/// pub type MyType<T: Type> = MyStruct<T>;
|
||||
///
|
||||
/// // you can then use Fayalite's standard syntax for creating dynamic types at runtime:
|
||||
///
|
||||
/// let ty = MyType[UInt[3]];
|
||||
/// assert_eq!(ty, MyStruct[UInt[3]]);
|
||||
/// ```
|
||||
///
|
||||
/// # Type Alias that gets a [`Type`] from a [`PhantomConst`]
|
||||
///
|
||||
/// This allows you to use some computed property of a [`PhantomConst`] to get a [`Type`] that you can use in other #[hdl] types.
|
||||
///
|
||||
/// ```
|
||||
/// # use fayalite::{intern::Intern, prelude::*};
|
||||
/// #[derive(Clone, PartialEq, Eq, Hash, Debug, serde::Serialize, serde::Deserialize)]
|
||||
/// pub struct Config {
|
||||
/// pub foo: usize,
|
||||
/// pub bar: Bundle,
|
||||
/// }
|
||||
///
|
||||
/// // the expression inside `get` is called with `Interned<Config>` and returns `Array<Bundle>`
|
||||
/// #[hdl(get(|config| Array[config.bar][config.foo]))]
|
||||
/// pub type GetMyArray<P: PhantomConstGet<Config>> = Array<Bundle>;
|
||||
///
|
||||
/// // you can then use it in other types:
|
||||
///
|
||||
/// #[hdl(no_static)]
|
||||
/// pub struct WrapMyArray<P: Type + PhantomConstGet<Config>> {
|
||||
/// pub my_array: GetMyArray<P>,
|
||||
/// }
|
||||
///
|
||||
/// // you can then use Fayalite's standard syntax for creating dynamic types at runtime:
|
||||
/// let bar = Bundle::new(Default::default());
|
||||
/// let config = PhantomConst::new(Config { foo: 12, bar }.intern_sized());
|
||||
/// let ty = WrapMyArray[config];
|
||||
/// assert_eq!(ty.my_array, Array[bar][12]);
|
||||
/// ```
|
||||
///
|
||||
/// # Type Alias that gets a [`Size`] from a [`PhantomConst`]
|
||||
///
|
||||
/// This allows you to use some computed property of a [`PhantomConst`] to get a [`Size`] that you can use in other #[hdl] types.
|
||||
///
|
||||
/// ```
|
||||
/// # use fayalite::{intern::Intern, prelude::*};
|
||||
/// # #[derive(Clone, PartialEq, Eq, Hash, Debug, serde::Serialize, serde::Deserialize)]
|
||||
/// # pub struct ConfigItem {}
|
||||
/// # impl ConfigItem {
|
||||
/// # pub fn new() -> Self {
|
||||
/// # Self {}
|
||||
/// # }
|
||||
/// # }
|
||||
/// #[derive(Clone, PartialEq, Eq, Hash, Debug, serde::Serialize, serde::Deserialize)]
|
||||
/// pub struct Config {
|
||||
/// pub items: Vec<ConfigItem>,
|
||||
/// }
|
||||
///
|
||||
/// // the expression inside `get` is called with `Interned<Config>` and returns `usize` (not DynSize)
|
||||
/// #[hdl(get(|config| config.items.len()))]
|
||||
/// pub type GetItemsLen<P: PhantomConstGet<Config>> = DynSize; // must be DynSize
|
||||
///
|
||||
/// // you can then use it in other types:
|
||||
///
|
||||
/// #[hdl(no_static)]
|
||||
/// pub struct FlagPerItem<P: Type + PhantomConstGet<Config>> {
|
||||
/// pub flags: ArrayType<Bool, GetItemsLen<P>>,
|
||||
/// }
|
||||
///
|
||||
/// // you can then use Fayalite's standard syntax for creating dynamic types at runtime:
|
||||
/// let config = PhantomConst::new(Config { items: vec![ConfigItem::new(); 5] }.intern_sized());
|
||||
/// let ty = FlagPerItem[config];
|
||||
/// assert_eq!(ty.flags, Array[Bool][5]);
|
||||
/// ```
|
||||
///
|
||||
/// [`PhantomConst`]: crate::phantom_const::PhantomConst
|
||||
/// [`Size`]: crate::int::Size
|
||||
/// [`Type`]: crate::ty::Type
|
||||
pub use fayalite_proc_macros::hdl;
|
||||
|
||||
pub use bitvec;
|
||||
|
||||
/// struct used as a placeholder when applying defaults
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
|
||||
pub struct __;
|
||||
|
|
@ -32,8 +228,8 @@ pub mod _docs;
|
|||
|
||||
pub mod annotations;
|
||||
pub mod array;
|
||||
pub mod build;
|
||||
pub mod bundle;
|
||||
pub mod cli;
|
||||
pub mod clock;
|
||||
pub mod enum_;
|
||||
pub mod expr;
|
||||
|
|
@ -43,11 +239,15 @@ pub mod int;
|
|||
pub mod intern;
|
||||
pub mod memory;
|
||||
pub mod module;
|
||||
pub mod phantom_const;
|
||||
pub mod platform;
|
||||
pub mod prelude;
|
||||
pub mod reg;
|
||||
pub mod reset;
|
||||
pub mod sim;
|
||||
pub mod source_location;
|
||||
pub mod testing;
|
||||
pub mod ty;
|
||||
pub mod util;
|
||||
pub mod vendor;
|
||||
pub mod wire;
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ use crate::{
|
|||
array::{Array, ArrayType},
|
||||
bundle::{Bundle, BundleType},
|
||||
clock::Clock,
|
||||
expr::{ops::BundleLiteral, repeat, Expr, Flow, ToExpr, ToLiteralBits},
|
||||
expr::{Expr, Flow, ToExpr, ToLiteralBits, ops::BundleLiteral, repeat},
|
||||
hdl,
|
||||
int::{Bool, DynSize, Size, UInt, UIntType},
|
||||
intern::{Intern, Interned},
|
||||
|
|
@ -22,7 +22,7 @@ use std::{
|
|||
fmt,
|
||||
hash::{Hash, Hasher},
|
||||
marker::PhantomData,
|
||||
num::NonZeroU32,
|
||||
num::NonZeroUsize,
|
||||
rc::Rc,
|
||||
};
|
||||
|
||||
|
|
@ -470,7 +470,7 @@ pub enum ReadUnderWrite {
|
|||
Undefined,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
struct MemImpl<Element: Type, Len: Size, P> {
|
||||
scoped_name: ScopedNameId,
|
||||
source_location: SourceLocation,
|
||||
|
|
@ -478,7 +478,7 @@ struct MemImpl<Element: Type, Len: Size, P> {
|
|||
initial_value: Option<Interned<BitSlice>>,
|
||||
ports: P,
|
||||
read_latency: usize,
|
||||
write_latency: NonZeroU32,
|
||||
write_latency: NonZeroUsize,
|
||||
read_under_write: ReadUnderWrite,
|
||||
port_annotations: Interned<[TargetedAnnotation]>,
|
||||
mem_annotations: Interned<[Annotation]>,
|
||||
|
|
@ -519,7 +519,12 @@ impl<Element: Type, Len: Size> fmt::Debug for Mem<Element, Len> {
|
|||
f.debug_struct("Mem")
|
||||
.field("name", scoped_name)
|
||||
.field("array_type", array_type)
|
||||
.field("initial_value", initial_value)
|
||||
.field(
|
||||
"initial_value",
|
||||
&initial_value.as_ref().map(|initial_value| {
|
||||
DebugMemoryData::from_bit_slice(*array_type, initial_value)
|
||||
}),
|
||||
)
|
||||
.field("read_latency", read_latency)
|
||||
.field("write_latency", write_latency)
|
||||
.field("read_under_write", read_under_write)
|
||||
|
|
@ -562,7 +567,7 @@ impl<Element: Type, Len: Size> Mem<Element, Len> {
|
|||
initial_value: Option<Interned<BitSlice>>,
|
||||
ports: Interned<[MemPort<DynPortType>]>,
|
||||
read_latency: usize,
|
||||
write_latency: NonZeroU32,
|
||||
write_latency: NonZeroUsize,
|
||||
read_under_write: ReadUnderWrite,
|
||||
port_annotations: Interned<[TargetedAnnotation]>,
|
||||
mem_annotations: Interned<[Annotation]>,
|
||||
|
|
@ -645,7 +650,7 @@ impl<Element: Type, Len: Size> Mem<Element, Len> {
|
|||
pub fn read_latency(self) -> usize {
|
||||
self.0.read_latency
|
||||
}
|
||||
pub fn write_latency(self) -> NonZeroU32 {
|
||||
pub fn write_latency(self) -> NonZeroUsize {
|
||||
self.0.write_latency
|
||||
}
|
||||
pub fn read_under_write(self) -> ReadUnderWrite {
|
||||
|
|
@ -707,7 +712,7 @@ pub(crate) struct MemBuilderTarget {
|
|||
pub(crate) initial_value: Option<Interned<BitSlice>>,
|
||||
pub(crate) ports: Vec<MemPort<DynPortType>>,
|
||||
pub(crate) read_latency: usize,
|
||||
pub(crate) write_latency: NonZeroU32,
|
||||
pub(crate) write_latency: NonZeroUsize,
|
||||
pub(crate) read_under_write: ReadUnderWrite,
|
||||
pub(crate) port_annotations: Vec<TargetedAnnotation>,
|
||||
pub(crate) mem_annotations: Vec<Annotation>,
|
||||
|
|
@ -867,7 +872,7 @@ impl<Element: Type, Len: Size> MemBuilder<Element, Len> {
|
|||
initial_value: None,
|
||||
ports: vec![],
|
||||
read_latency: 0,
|
||||
write_latency: NonZeroU32::new(1).unwrap(),
|
||||
write_latency: NonZeroUsize::new(1).unwrap(),
|
||||
read_under_write: ReadUnderWrite::Old,
|
||||
port_annotations: vec![],
|
||||
mem_annotations: vec![],
|
||||
|
|
@ -1030,10 +1035,10 @@ impl<Element: Type, Len: Size> MemBuilder<Element, Len> {
|
|||
pub fn read_latency(&mut self, read_latency: usize) {
|
||||
self.target.borrow_mut().read_latency = read_latency;
|
||||
}
|
||||
pub fn get_write_latency(&self) -> NonZeroU32 {
|
||||
pub fn get_write_latency(&self) -> NonZeroUsize {
|
||||
self.target.borrow().write_latency
|
||||
}
|
||||
pub fn write_latency(&mut self, write_latency: NonZeroU32) {
|
||||
pub fn write_latency(&mut self, write_latency: NonZeroUsize) {
|
||||
self.target.borrow_mut().write_latency = write_latency;
|
||||
}
|
||||
pub fn get_read_under_write(&self) -> ReadUnderWrite {
|
||||
|
|
@ -1061,7 +1066,8 @@ pub fn splat_mask<T: Type>(ty: T, value: Expr<Bool>) -> Expr<AsMask<T>> {
|
|||
| CanonicalType::SyncReset(_)
|
||||
| CanonicalType::Reset(_)
|
||||
| CanonicalType::Clock(_)
|
||||
| CanonicalType::Enum(_) => Expr::from_canonical(Expr::canonical(value)),
|
||||
| CanonicalType::Enum(_)
|
||||
| CanonicalType::DynSimOnly(_) => Expr::from_canonical(Expr::canonical(value)),
|
||||
CanonicalType::Array(array) => Expr::from_canonical(Expr::canonical(repeat(
|
||||
splat_mask(array.element(), value),
|
||||
array.len(),
|
||||
|
|
@ -1077,5 +1083,64 @@ pub fn splat_mask<T: Type>(ty: T, value: Expr<Bool>) -> Expr<AsMask<T>> {
|
|||
)
|
||||
.to_expr(),
|
||||
)),
|
||||
CanonicalType::PhantomConst(_) => Expr::from_canonical(Expr::canonical(().to_expr())),
|
||||
}
|
||||
}
|
||||
|
||||
pub trait DebugMemoryDataGetElement {
|
||||
fn get_element(&self, element_index: usize, array_type: Array) -> &BitSlice;
|
||||
}
|
||||
|
||||
impl<'a, F: ?Sized + Fn(usize, Array) -> &'a BitSlice> DebugMemoryDataGetElement for &'a F {
|
||||
fn get_element(&self, element_index: usize, array_type: Array) -> &BitSlice {
|
||||
self(element_index, array_type)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DebugMemoryData<GetElement: DebugMemoryDataGetElement> {
|
||||
pub array_type: Array,
|
||||
pub get_element: GetElement,
|
||||
}
|
||||
|
||||
impl DebugMemoryDataGetElement for &'_ BitSlice {
|
||||
fn get_element(&self, element_index: usize, array_type: Array) -> &BitSlice {
|
||||
assert!(element_index < array_type.len());
|
||||
let stride = array_type.element().bit_width();
|
||||
let start = element_index
|
||||
.checked_mul(stride)
|
||||
.expect("memory is too big");
|
||||
let end = start.checked_add(stride).expect("memory is too big");
|
||||
&self[start..end]
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> DebugMemoryData<&'a BitSlice> {
|
||||
pub fn from_bit_slice<T: Type, Depth: Size>(
|
||||
array_type: ArrayType<T, Depth>,
|
||||
bit_slice: &'a BitSlice,
|
||||
) -> Self {
|
||||
let array_type = array_type.as_dyn_array();
|
||||
assert_eq!(bit_slice.len(), array_type.type_properties().bit_width);
|
||||
Self {
|
||||
array_type,
|
||||
get_element: bit_slice,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<GetElement: DebugMemoryDataGetElement> fmt::Debug for DebugMemoryData<GetElement> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
if self.array_type.len() == 0 {
|
||||
return f.write_str("[]");
|
||||
}
|
||||
writeln!(f, "[\n // len = {:#x}", self.array_type.len())?;
|
||||
for element_index in 0..self.array_type.len() {
|
||||
let element = crate::util::BitSliceWriteWithBase(
|
||||
self.get_element.get_element(element_index, self.array_type),
|
||||
);
|
||||
writeln!(f, " [{element_index:#x}]: {element:#x},")?;
|
||||
}
|
||||
f.write_str("]")
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,30 +8,34 @@ use crate::{
|
|||
clock::{Clock, ClockDomain},
|
||||
enum_::{Enum, EnumMatchVariantsIter, EnumType},
|
||||
expr::{
|
||||
Expr, Flow, ToExpr,
|
||||
ops::VariantAccess,
|
||||
target::{
|
||||
GetTarget, Target, TargetBase, TargetPathArrayElement, TargetPathBundleField,
|
||||
TargetPathElement,
|
||||
},
|
||||
Expr, Flow, ToExpr,
|
||||
},
|
||||
formal::FormalKind,
|
||||
int::{Bool, DynSize, Size},
|
||||
intern::{Intern, Interned},
|
||||
memory::{Mem, MemBuilder, MemBuilderTarget, PortName},
|
||||
platform::PlatformIOBuilder,
|
||||
reg::Reg,
|
||||
reset::{AsyncReset, Reset, ResetType, ResetTypeDispatch, SyncReset},
|
||||
sim::{ExternModuleSimGenerator, ExternModuleSimulation},
|
||||
source_location::SourceLocation,
|
||||
ty::{CanonicalType, Type},
|
||||
util::ScopedRef,
|
||||
util::{HashMap, HashSet, ScopedRef},
|
||||
wire::{IncompleteWire, Wire},
|
||||
};
|
||||
use hashbrown::{hash_map::Entry, HashMap, HashSet};
|
||||
use hashbrown::hash_map::Entry;
|
||||
use num_bigint::BigInt;
|
||||
use std::{
|
||||
cell::RefCell,
|
||||
collections::VecDeque,
|
||||
collections::{BTreeMap, VecDeque},
|
||||
convert::Infallible,
|
||||
fmt,
|
||||
future::IntoFuture,
|
||||
hash::{Hash, Hasher},
|
||||
iter::FusedIterator,
|
||||
marker::PhantomData,
|
||||
|
|
@ -180,7 +184,7 @@ impl Block {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct StmtConnect {
|
||||
pub lhs: Expr<CanonicalType>,
|
||||
pub rhs: Expr<CanonicalType>,
|
||||
|
|
@ -235,7 +239,7 @@ impl fmt::Debug for StmtConnect {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct StmtFormal {
|
||||
pub kind: FormalKind,
|
||||
pub clk: Expr<Clock>,
|
||||
|
|
@ -284,6 +288,8 @@ pub struct StmtIf<S: ModuleBuildingStatus = ModuleBuilt> {
|
|||
pub blocks: [S::Block; 2],
|
||||
}
|
||||
|
||||
impl Copy for StmtIf {}
|
||||
|
||||
impl<S: ModuleBuildingStatus> StmtIf<S> {
|
||||
pub fn then_block(&self) -> S::Block {
|
||||
self.blocks[0]
|
||||
|
|
@ -315,6 +321,8 @@ pub struct StmtMatch<S: ModuleBuildingStatus = ModuleBuilt> {
|
|||
pub blocks: Interned<[S::Block]>,
|
||||
}
|
||||
|
||||
impl Copy for StmtMatch {}
|
||||
|
||||
impl StmtMatch {
|
||||
#[track_caller]
|
||||
fn assert_validity(&self) {
|
||||
|
|
@ -346,7 +354,7 @@ macro_rules! wrapper_enum {
|
|||
$(#[$enum_meta:meta])*
|
||||
$vis:vis enum $enum_name:ident<$T_enum:ident: $T_bound:ident = $T_enum_default:ident> {
|
||||
$(
|
||||
#[is = $is_fn:ident, as_ref = $as_ref_fn:ident]
|
||||
#[is = $is_fn:ident, as_ref = $as_ref_fn:ident $(, from = $from:ident)?]
|
||||
$(#[$variant_meta:meta])*
|
||||
$Variant:ident($VariantTy:ty),
|
||||
)*
|
||||
|
|
@ -358,7 +366,7 @@ macro_rules! wrapper_enum {
|
|||
$(#[$enum_meta])*
|
||||
$vis enum $enum_name<$T_enum: $T_bound = $T_enum_default> {
|
||||
$(
|
||||
#[is = $is_fn, as_ref = $as_ref_fn]
|
||||
#[is = $is_fn, as_ref = $as_ref_fn $(, from = $from)?]
|
||||
$(#[$variant_meta])*
|
||||
$Variant($VariantTy),
|
||||
)*
|
||||
|
|
@ -385,7 +393,7 @@ macro_rules! wrapper_enum {
|
|||
$(#[$enum_meta:meta])*
|
||||
$vis:vis enum $enum_name:ident<$T_enum:ident: $T_bound:ident = $T_enum_default:ident> {
|
||||
$(
|
||||
#[is = $is_fn:ident, as_ref = $as_ref_fn:ident]
|
||||
#[is = $is_fn:ident, as_ref = $as_ref_fn:ident $(, from = $from:ident)?]
|
||||
$(#[$variant_meta:meta])*
|
||||
$Variant:ident($VariantTy:ty),
|
||||
)*
|
||||
|
|
@ -397,22 +405,22 @@ macro_rules! wrapper_enum {
|
|||
$(#[$enum_meta])*
|
||||
$vis enum $enum_name<$T_enum: $T_bound = $T_enum_default> {
|
||||
$(
|
||||
#[is = $is_fn, as_ref = $as_ref_fn]
|
||||
#[is = $is_fn, as_ref = $as_ref_fn $(, from = $from)?]
|
||||
$(#[$variant_meta])*
|
||||
$Variant($VariantTy),
|
||||
)*
|
||||
}
|
||||
}
|
||||
|
||||
$(
|
||||
$($(
|
||||
wrapper_enum! {
|
||||
impl $T_to From<$VariantTy> for $to_type {
|
||||
fn from(value: $VariantTy) -> Self {
|
||||
fn $from(value: $VariantTy) -> Self {
|
||||
$enum_name::$Variant(value).into()
|
||||
}
|
||||
}
|
||||
}
|
||||
)*
|
||||
)?)*
|
||||
};
|
||||
(
|
||||
#[impl()]
|
||||
|
|
@ -420,7 +428,7 @@ macro_rules! wrapper_enum {
|
|||
$(#[$enum_meta:meta])*
|
||||
$vis:vis enum $enum_name:ident<$T_enum:ident: $T_bound:ident = $T_enum_default:ident> {
|
||||
$(
|
||||
#[is = $is_fn:ident, as_ref = $as_ref_fn:ident]
|
||||
#[is = $is_fn:ident, as_ref = $as_ref_fn:ident $(, from = $from:ident)?]
|
||||
$(#[$variant_meta:meta])*
|
||||
$Variant:ident($VariantTy:ty),
|
||||
)*
|
||||
|
|
@ -459,13 +467,15 @@ pub struct StmtWire<S: ModuleBuildingStatus = ModuleBuilt> {
|
|||
pub wire: Wire<CanonicalType>,
|
||||
}
|
||||
|
||||
impl Copy for StmtWire {}
|
||||
|
||||
#[derive(Hash, Clone, PartialEq, Eq, Debug)]
|
||||
pub struct StmtReg<S: ModuleBuildingStatus = ModuleBuilt> {
|
||||
pub struct StmtReg<R: ResetType, S: ModuleBuildingStatus = ModuleBuilt> {
|
||||
pub annotations: S::StmtAnnotations,
|
||||
pub reg: Reg<CanonicalType>,
|
||||
pub reg: Reg<CanonicalType, R>,
|
||||
}
|
||||
|
||||
impl Copy for StmtReg {}
|
||||
impl<R: ResetType> Copy for StmtReg<R> {}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct StmtInstance<S: ModuleBuildingStatus = ModuleBuilt> {
|
||||
|
|
@ -473,6 +483,8 @@ pub struct StmtInstance<S: ModuleBuildingStatus = ModuleBuilt> {
|
|||
pub instance: Instance<Bundle>,
|
||||
}
|
||||
|
||||
impl Copy for StmtInstance {}
|
||||
|
||||
wrapper_enum! {
|
||||
#[impl(
|
||||
(<S: ModuleBuildingStatus>) self: StmtDeclaration<S> = self,
|
||||
|
|
@ -481,20 +493,57 @@ wrapper_enum! {
|
|||
#[to((<S: ModuleBuildingStatus>) StmtDeclaration<S>, (<S: ModuleBuildingStatus>) Stmt<S>)]
|
||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||
pub enum StmtDeclaration<S: ModuleBuildingStatus = ModuleBuilt> {
|
||||
#[is = is_wire, as_ref = wire]
|
||||
#[is = is_wire, as_ref = wire, from = from]
|
||||
Wire(StmtWire<S>),
|
||||
#[is = is_reg, as_ref = reg]
|
||||
Reg(StmtReg<S>),
|
||||
#[is = is_instance, as_ref = instance]
|
||||
Reg(StmtReg<Reset, S>),
|
||||
#[is = is_reg_sync, as_ref = reg_sync]
|
||||
RegSync(StmtReg<SyncReset, S>),
|
||||
#[is = is_reg_async, as_ref = reg_async]
|
||||
RegAsync(StmtReg<AsyncReset, S>),
|
||||
#[is = is_instance, as_ref = instance, from = from]
|
||||
Instance(StmtInstance<S>),
|
||||
}
|
||||
}
|
||||
|
||||
impl Copy for StmtDeclaration {}
|
||||
|
||||
impl<S: ModuleBuildingStatus, R: ResetType> From<StmtReg<R, S>> for Stmt<S> {
|
||||
fn from(value: StmtReg<R, S>) -> Self {
|
||||
StmtDeclaration::from(value).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: ModuleBuildingStatus, R: ResetType> From<StmtReg<R, S>> for StmtDeclaration<S> {
|
||||
fn from(value: StmtReg<R, S>) -> Self {
|
||||
struct Dispatch<S>(PhantomData<S>);
|
||||
impl<S: ModuleBuildingStatus> ResetTypeDispatch for Dispatch<S> {
|
||||
type Input<T: ResetType> = StmtReg<T, S>;
|
||||
type Output<T: ResetType> = StmtDeclaration<S>;
|
||||
|
||||
fn reset(self, input: Self::Input<Reset>) -> Self::Output<Reset> {
|
||||
StmtDeclaration::Reg(input)
|
||||
}
|
||||
|
||||
fn sync_reset(self, input: Self::Input<SyncReset>) -> Self::Output<SyncReset> {
|
||||
StmtDeclaration::RegSync(input)
|
||||
}
|
||||
|
||||
fn async_reset(self, input: Self::Input<AsyncReset>) -> Self::Output<AsyncReset> {
|
||||
StmtDeclaration::RegAsync(input)
|
||||
}
|
||||
}
|
||||
R::dispatch(value, Dispatch(PhantomData))
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: ModuleBuildingStatus> StmtDeclaration<S> {
|
||||
pub fn annotations(&self) -> S::StmtAnnotations {
|
||||
match self {
|
||||
StmtDeclaration::Wire(v) => v.annotations,
|
||||
StmtDeclaration::Reg(v) => v.annotations,
|
||||
StmtDeclaration::RegSync(v) => v.annotations,
|
||||
StmtDeclaration::RegAsync(v) => v.annotations,
|
||||
StmtDeclaration::Instance(v) => v.annotations,
|
||||
}
|
||||
}
|
||||
|
|
@ -502,6 +551,8 @@ impl<S: ModuleBuildingStatus> StmtDeclaration<S> {
|
|||
match self {
|
||||
StmtDeclaration::Wire(v) => v.wire.source_location(),
|
||||
StmtDeclaration::Reg(v) => v.reg.source_location(),
|
||||
StmtDeclaration::RegSync(v) => v.reg.source_location(),
|
||||
StmtDeclaration::RegAsync(v) => v.reg.source_location(),
|
||||
StmtDeclaration::Instance(v) => v.instance.source_location(),
|
||||
}
|
||||
}
|
||||
|
|
@ -509,20 +560,26 @@ impl<S: ModuleBuildingStatus> StmtDeclaration<S> {
|
|||
match self {
|
||||
StmtDeclaration::Wire(v) => v.wire.scoped_name(),
|
||||
StmtDeclaration::Reg(v) => v.reg.scoped_name(),
|
||||
StmtDeclaration::RegSync(v) => v.reg.scoped_name(),
|
||||
StmtDeclaration::RegAsync(v) => v.reg.scoped_name(),
|
||||
StmtDeclaration::Instance(v) => v.instance.scoped_name(),
|
||||
}
|
||||
}
|
||||
pub fn sub_stmt_blocks(&self) -> &[S::Block] {
|
||||
match self {
|
||||
StmtDeclaration::Wire(_) | StmtDeclaration::Reg(_) | StmtDeclaration::Instance(_) => {
|
||||
&[]
|
||||
}
|
||||
StmtDeclaration::Wire(_)
|
||||
| StmtDeclaration::Reg(_)
|
||||
| StmtDeclaration::RegSync(_)
|
||||
| StmtDeclaration::RegAsync(_)
|
||||
| StmtDeclaration::Instance(_) => &[],
|
||||
}
|
||||
}
|
||||
pub fn canonical_ty(&self) -> CanonicalType {
|
||||
match self {
|
||||
StmtDeclaration::Wire(v) => v.wire.ty(),
|
||||
StmtDeclaration::Reg(v) => v.reg.ty(),
|
||||
StmtDeclaration::RegSync(v) => v.reg.ty(),
|
||||
StmtDeclaration::RegAsync(v) => v.reg.ty(),
|
||||
StmtDeclaration::Instance(v) => CanonicalType::Bundle(v.instance.ty()),
|
||||
}
|
||||
}
|
||||
|
|
@ -533,19 +590,21 @@ wrapper_enum! {
|
|||
#[to((<S: ModuleBuildingStatus>) Stmt<S>)]
|
||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||
pub enum Stmt<S: ModuleBuildingStatus = ModuleBuilt> {
|
||||
#[is = is_connect, as_ref = connect]
|
||||
#[is = is_connect, as_ref = connect, from = from]
|
||||
Connect(StmtConnect),
|
||||
#[is = is_formal, as_ref = formal]
|
||||
#[is = is_formal, as_ref = formal, from = from]
|
||||
Formal(StmtFormal),
|
||||
#[is = is_if, as_ref = if_]
|
||||
#[is = is_if, as_ref = if_, from = from]
|
||||
If(StmtIf<S>),
|
||||
#[is = is_match, as_ref = match_]
|
||||
#[is = is_match, as_ref = match_, from = from]
|
||||
Match(StmtMatch<S>),
|
||||
#[is = is_declaration, as_ref = declaration]
|
||||
#[is = is_declaration, as_ref = declaration, from = from]
|
||||
Declaration(StmtDeclaration<S>),
|
||||
}
|
||||
}
|
||||
|
||||
impl Copy for Stmt {}
|
||||
|
||||
impl<S: ModuleBuildingStatus> Stmt<S> {
|
||||
pub fn sub_stmt_blocks(&self) -> &[S::Block] {
|
||||
match self {
|
||||
|
|
@ -714,6 +773,18 @@ impl<T: BundleType> Instance<T> {
|
|||
source_location,
|
||||
}
|
||||
}
|
||||
pub fn from_canonical(v: Instance<Bundle>) -> Self {
|
||||
let Instance {
|
||||
scoped_name,
|
||||
instantiated,
|
||||
source_location,
|
||||
} = v;
|
||||
Self {
|
||||
scoped_name,
|
||||
instantiated: Module::from_canonical(*instantiated).intern_sized(),
|
||||
source_location,
|
||||
}
|
||||
}
|
||||
pub fn containing_module_name(self) -> Interned<str> {
|
||||
self.containing_module_name_id().0
|
||||
}
|
||||
|
|
@ -763,6 +834,8 @@ pub struct AnnotatedModuleIO<S: ModuleBuildingStatus = ModuleBuilt> {
|
|||
pub module_io: ModuleIO<CanonicalType>,
|
||||
}
|
||||
|
||||
impl Copy for AnnotatedModuleIO {}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub enum ModuleKind {
|
||||
Extern,
|
||||
|
|
@ -958,6 +1031,14 @@ impl From<NormalModuleBody<ModuleBuilding>> for NormalModuleBody {
|
|||
annotations: (),
|
||||
reg,
|
||||
}) => StmtReg { annotations, reg }.into(),
|
||||
StmtDeclaration::RegSync(StmtReg {
|
||||
annotations: (),
|
||||
reg,
|
||||
}) => StmtReg { annotations, reg }.into(),
|
||||
StmtDeclaration::RegAsync(StmtReg {
|
||||
annotations: (),
|
||||
reg,
|
||||
}) => StmtReg { annotations, reg }.into(),
|
||||
StmtDeclaration::Instance(StmtInstance {
|
||||
annotations: (),
|
||||
instance,
|
||||
|
|
@ -1005,6 +1086,7 @@ pub struct ExternModuleBody<
|
|||
> {
|
||||
pub verilog_name: Interned<str>,
|
||||
pub parameters: P,
|
||||
pub simulation: Option<ExternModuleSimulation>,
|
||||
}
|
||||
|
||||
impl From<ExternModuleBody<Vec<ExternModuleParameter>>> for ExternModuleBody {
|
||||
|
|
@ -1012,11 +1094,13 @@ impl From<ExternModuleBody<Vec<ExternModuleParameter>>> for ExternModuleBody {
|
|||
let ExternModuleBody {
|
||||
verilog_name,
|
||||
parameters,
|
||||
simulation,
|
||||
} = value;
|
||||
let parameters = Intern::intern_owned(parameters);
|
||||
Self {
|
||||
verilog_name,
|
||||
parameters,
|
||||
simulation,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1131,6 +1215,12 @@ pub struct Module<T: BundleType> {
|
|||
module_annotations: Interned<[Annotation]>,
|
||||
}
|
||||
|
||||
impl<T: BundleType> AsRef<Self> for Module<T> {
|
||||
fn as_ref(&self) -> &Self {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct DebugFmtModulesState {
|
||||
seen: HashSet<NameId>,
|
||||
|
|
@ -1207,10 +1297,12 @@ impl<T: BundleType> fmt::Debug for DebugModuleBody<T> {
|
|||
ModuleBody::Extern(ExternModuleBody {
|
||||
verilog_name,
|
||||
parameters,
|
||||
simulation,
|
||||
}) => {
|
||||
debug_struct
|
||||
.field("verilog_name", verilog_name)
|
||||
.field("parameters", parameters);
|
||||
.field("parameters", parameters)
|
||||
.field("simulation", simulation);
|
||||
}
|
||||
}
|
||||
debug_struct.finish_non_exhaustive()
|
||||
|
|
@ -1376,7 +1468,9 @@ impl TargetState {
|
|||
})
|
||||
.reduce(TargetWritten::conditional_merge_written)
|
||||
else {
|
||||
unreachable!("merge_conditional_sub_blocks_into_block must be called with at least one sub-block");
|
||||
unreachable!(
|
||||
"merge_conditional_sub_blocks_into_block must be called with at least one sub-block"
|
||||
);
|
||||
};
|
||||
let mut written_in_blocks = written_in_blocks.borrow_mut();
|
||||
if target_block >= written_in_blocks.len() {
|
||||
|
|
@ -1414,6 +1508,9 @@ impl TargetState {
|
|||
})
|
||||
.collect(),
|
||||
},
|
||||
CanonicalType::PhantomConst(_) => TargetStateInner::Decomposed {
|
||||
subtargets: HashMap::default(),
|
||||
},
|
||||
CanonicalType::Array(ty) => TargetStateInner::Decomposed {
|
||||
subtargets: (0..ty.len())
|
||||
.map(|index| {
|
||||
|
|
@ -1436,7 +1533,8 @@ impl TargetState {
|
|||
| CanonicalType::Clock(_)
|
||||
| CanonicalType::AsyncReset(_)
|
||||
| CanonicalType::SyncReset(_)
|
||||
| CanonicalType::Reset(_) => TargetStateInner::Single {
|
||||
| CanonicalType::Reset(_)
|
||||
| CanonicalType::DynSimOnly(_) => TargetStateInner::Single {
|
||||
declared_in_block,
|
||||
written_in_blocks: RefCell::default(),
|
||||
},
|
||||
|
|
@ -1661,6 +1759,14 @@ impl AssertValidityState {
|
|||
annotations: _,
|
||||
reg,
|
||||
})) => self.insert_new_base(TargetBase::intern_sized(reg.into()), block),
|
||||
Stmt::Declaration(StmtDeclaration::RegSync(StmtReg {
|
||||
annotations: _,
|
||||
reg,
|
||||
})) => self.insert_new_base(TargetBase::intern_sized(reg.into()), block),
|
||||
Stmt::Declaration(StmtDeclaration::RegAsync(StmtReg {
|
||||
annotations: _,
|
||||
reg,
|
||||
})) => self.insert_new_base(TargetBase::intern_sized(reg.into()), block),
|
||||
Stmt::Declaration(StmtDeclaration::Instance(StmtInstance {
|
||||
annotations: _,
|
||||
instance,
|
||||
|
|
@ -1674,6 +1780,7 @@ impl AssertValidityState {
|
|||
ModuleBody::Extern(ExternModuleBody {
|
||||
verilog_name: _,
|
||||
parameters: _,
|
||||
simulation: _,
|
||||
}) => {}
|
||||
ModuleBody::Normal(NormalModuleBody { body }) => {
|
||||
let body = self.make_block_index(body);
|
||||
|
|
@ -1695,12 +1802,49 @@ impl<T: BundleType> Module<T> {
|
|||
pub fn new_unchecked(
|
||||
name_id: NameId,
|
||||
source_location: SourceLocation,
|
||||
body: ModuleBody,
|
||||
mut body: ModuleBody,
|
||||
module_io: impl IntoIterator<Item = AnnotatedModuleIO>,
|
||||
module_annotations: impl IntoAnnotations,
|
||||
) -> Module<T> {
|
||||
let module_io: Interned<[_]> = module_io.into_iter().collect();
|
||||
let module_annotations = module_annotations.into_annotations().into_iter().collect();
|
||||
match &mut body {
|
||||
ModuleBody::Normal(_) => {}
|
||||
ModuleBody::Extern(ExternModuleBody {
|
||||
simulation: Some(simulation),
|
||||
..
|
||||
}) => {
|
||||
if module_io.iter().any(|io| {
|
||||
!simulation
|
||||
.sim_io_to_generator_map
|
||||
.contains_key(&io.module_io.intern())
|
||||
}) {
|
||||
let mut sim_io_to_generator_map =
|
||||
BTreeMap::clone(&simulation.sim_io_to_generator_map);
|
||||
for io in module_io.iter() {
|
||||
let io = io.module_io.intern();
|
||||
sim_io_to_generator_map.entry(io).or_insert(io);
|
||||
}
|
||||
simulation.sim_io_to_generator_map = sim_io_to_generator_map.intern_sized();
|
||||
}
|
||||
if simulation.sim_io_to_generator_map.len() > module_io.len() {
|
||||
// if sim_io_to_generator_map is bigger, then there must be a key that's not in module_io
|
||||
let module_io_set = HashSet::from_iter(module_io.iter().map(|v| v.module_io));
|
||||
for (sim_io, generator_io) in simulation.sim_io_to_generator_map.iter() {
|
||||
if !module_io_set.contains(&**sim_io) {
|
||||
panic!(
|
||||
"extern module has invalid `sim_io_to_generator_map`: key is not in containing module's `module_io`:\n\
|
||||
key={sim_io:?}\nvalue={generator_io:?}\nmodule location: {source_location}"
|
||||
);
|
||||
}
|
||||
}
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
ModuleBody::Extern(ExternModuleBody {
|
||||
simulation: None, ..
|
||||
}) => {}
|
||||
}
|
||||
let retval = Module {
|
||||
name: name_id,
|
||||
source_location,
|
||||
|
|
@ -1769,7 +1913,7 @@ impl<T: BundleType> Module<T> {
|
|||
AssertValidityState {
|
||||
module: self.canonical(),
|
||||
blocks: vec![],
|
||||
target_states: HashMap::with_capacity(64),
|
||||
target_states: HashMap::with_capacity_and_hasher(64, Default::default()),
|
||||
}
|
||||
.assert_validity();
|
||||
}
|
||||
|
|
@ -1842,10 +1986,10 @@ impl<CD> RegBuilder<CD, (), ()> {
|
|||
}
|
||||
|
||||
impl<I, T: Type> RegBuilder<(), I, T> {
|
||||
pub fn clock_domain(
|
||||
pub fn clock_domain<R: ResetType>(
|
||||
self,
|
||||
clock_domain: impl ToExpr<Type = ClockDomain>,
|
||||
) -> RegBuilder<Expr<ClockDomain>, I, T> {
|
||||
clock_domain: impl ToExpr<Type = ClockDomain<R>>,
|
||||
) -> RegBuilder<Expr<ClockDomain<R>>, I, T> {
|
||||
let Self {
|
||||
name,
|
||||
source_location,
|
||||
|
|
@ -1863,7 +2007,7 @@ impl<I, T: Type> RegBuilder<(), I, T> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: Type> RegBuilder<Expr<ClockDomain>, Option<Expr<T>>, T> {
|
||||
impl<T: Type, R: ResetType> RegBuilder<Expr<ClockDomain<R>>, Option<Expr<T>>, T> {
|
||||
#[track_caller]
|
||||
pub fn build(self) -> Expr<T> {
|
||||
let Self {
|
||||
|
|
@ -1976,6 +2120,27 @@ impl ModuleBuilder {
|
|||
self.output_with_loc(implicit_name.0, SourceLocation::caller(), ty)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn add_platform_io_with_loc(
|
||||
&self,
|
||||
name: &str,
|
||||
source_location: SourceLocation,
|
||||
platform_io_builder: PlatformIOBuilder<'_>,
|
||||
) -> Expr<Bundle> {
|
||||
platform_io_builder.add_platform_io(name, source_location, self)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn add_platform_io(
|
||||
&self,
|
||||
implicit_name: ImplicitName<'_>,
|
||||
platform_io_builder: PlatformIOBuilder<'_>,
|
||||
) -> Expr<Bundle> {
|
||||
self.add_platform_io_with_loc(
|
||||
implicit_name.0,
|
||||
SourceLocation::caller(),
|
||||
platform_io_builder,
|
||||
)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn run<T: BundleType>(
|
||||
name: &str,
|
||||
module_kind: ModuleKind,
|
||||
|
|
@ -2021,6 +2186,7 @@ impl ModuleBuilder {
|
|||
ModuleKind::Extern => ModuleBody::Extern(ExternModuleBody {
|
||||
verilog_name: name.0,
|
||||
parameters: vec![],
|
||||
simulation: None,
|
||||
}),
|
||||
ModuleKind::Normal => ModuleBody::Normal(NormalModuleBody {
|
||||
body: BuilderModuleBody {
|
||||
|
|
@ -2029,8 +2195,8 @@ impl ModuleBuilder {
|
|||
incomplete_declarations: vec![],
|
||||
stmts: vec![],
|
||||
}],
|
||||
annotations_map: HashMap::new(),
|
||||
memory_map: HashMap::new(),
|
||||
annotations_map: HashMap::default(),
|
||||
memory_map: HashMap::default(),
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
|
@ -2040,7 +2206,7 @@ impl ModuleBuilder {
|
|||
impl_: RefCell::new(ModuleBuilderImpl {
|
||||
body,
|
||||
io: vec![],
|
||||
io_indexes: HashMap::new(),
|
||||
io_indexes: HashMap::default(),
|
||||
module_annotations: vec![],
|
||||
}),
|
||||
};
|
||||
|
|
@ -2087,6 +2253,7 @@ impl ModuleBuilder {
|
|||
.builder_extern_body()
|
||||
.verilog_name = name.intern();
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn parameter(&self, name: impl AsRef<str>, value: ExternModuleParameterValue) {
|
||||
let name = name.as_ref();
|
||||
self.impl_
|
||||
|
|
@ -2099,6 +2266,7 @@ impl ModuleBuilder {
|
|||
value,
|
||||
});
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn parameter_int(&self, name: impl AsRef<str>, value: impl Into<BigInt>) {
|
||||
let name = name.as_ref();
|
||||
let value = value.into();
|
||||
|
|
@ -2112,6 +2280,7 @@ impl ModuleBuilder {
|
|||
value: ExternModuleParameterValue::Integer(value),
|
||||
});
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn parameter_str(&self, name: impl AsRef<str>, value: impl AsRef<str>) {
|
||||
let name = name.as_ref();
|
||||
let value = value.as_ref();
|
||||
|
|
@ -2125,6 +2294,7 @@ impl ModuleBuilder {
|
|||
value: ExternModuleParameterValue::String(value.intern()),
|
||||
});
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn parameter_raw_verilog(&self, name: impl AsRef<str>, raw_verilog: impl AsRef<str>) {
|
||||
let name = name.as_ref();
|
||||
let raw_verilog = raw_verilog.as_ref();
|
||||
|
|
@ -2138,6 +2308,26 @@ impl ModuleBuilder {
|
|||
value: ExternModuleParameterValue::RawVerilog(raw_verilog.intern()),
|
||||
});
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn extern_module_simulation<G: ExternModuleSimGenerator>(&self, generator: G) {
|
||||
let mut impl_ = self.impl_.borrow_mut();
|
||||
let simulation = &mut impl_.body.builder_extern_body().simulation;
|
||||
if simulation.is_some() {
|
||||
panic!("already added an extern module simulation");
|
||||
}
|
||||
*simulation = Some(ExternModuleSimulation::new(generator));
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn extern_module_simulation_fn<
|
||||
Args: fmt::Debug + Clone + Hash + Eq + Send + Sync + 'static,
|
||||
Fut: IntoFuture<Output = ()> + 'static,
|
||||
>(
|
||||
&self,
|
||||
args: Args,
|
||||
f: fn(Args, crate::sim::ExternModuleSimulationState) -> Fut,
|
||||
) {
|
||||
self.extern_module_simulation(crate::sim::SimGeneratorFn { args, f });
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
|
|
@ -2170,14 +2360,12 @@ pub fn annotate<T: Type>(target: Expr<T>, annotations: impl IntoAnnotations) {
|
|||
}
|
||||
TargetBase::MemPort(v) => {
|
||||
ModuleBuilder::with(|m| {
|
||||
RefCell::borrow_mut(unwrap!(unwrap!(m
|
||||
.impl_
|
||||
.borrow_mut()
|
||||
.body
|
||||
.builder_normal_body_opt())
|
||||
.body
|
||||
.memory_map
|
||||
.get_mut(&v.mem_name())))
|
||||
RefCell::borrow_mut(unwrap!(
|
||||
unwrap!(m.impl_.borrow_mut().body.builder_normal_body_opt())
|
||||
.body
|
||||
.memory_map
|
||||
.get_mut(&v.mem_name())
|
||||
))
|
||||
.port_annotations
|
||||
.extend(annotations)
|
||||
});
|
||||
|
|
@ -2188,6 +2376,16 @@ pub fn annotate<T: Type>(target: Expr<T>, annotations: impl IntoAnnotations) {
|
|||
reg,
|
||||
}
|
||||
.into(),
|
||||
TargetBase::RegSync(reg) => StmtReg {
|
||||
annotations: (),
|
||||
reg,
|
||||
}
|
||||
.into(),
|
||||
TargetBase::RegAsync(reg) => StmtReg {
|
||||
annotations: (),
|
||||
reg,
|
||||
}
|
||||
.into(),
|
||||
TargetBase::Wire(wire) => StmtWire {
|
||||
annotations: (),
|
||||
wire,
|
||||
|
|
@ -2567,6 +2765,22 @@ impl<T: Type> ModuleIO<T> {
|
|||
source_location,
|
||||
}
|
||||
}
|
||||
pub fn from_canonical(canonical_module_io: ModuleIO<CanonicalType>) -> Self {
|
||||
let ModuleIO {
|
||||
containing_module_name,
|
||||
bundle_field,
|
||||
id,
|
||||
ty,
|
||||
source_location,
|
||||
} = canonical_module_io;
|
||||
Self {
|
||||
containing_module_name,
|
||||
bundle_field,
|
||||
id,
|
||||
ty: T::from_canonical(ty),
|
||||
source_location,
|
||||
}
|
||||
}
|
||||
pub fn bundle_field(&self) -> BundleField {
|
||||
self.bundle_field
|
||||
}
|
||||
|
|
@ -2629,3 +2843,50 @@ impl<T: Type> ModuleIO<T> {
|
|||
self.ty
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Hash, Clone, Copy)]
|
||||
pub enum InstantiatedModule {
|
||||
Base(Interned<Module<Bundle>>),
|
||||
Child {
|
||||
parent: Interned<InstantiatedModule>,
|
||||
instance: Interned<Instance<Bundle>>,
|
||||
},
|
||||
}
|
||||
|
||||
impl InstantiatedModule {
|
||||
pub fn leaf_module(self) -> Interned<Module<Bundle>> {
|
||||
match self {
|
||||
InstantiatedModule::Base(base) => base,
|
||||
InstantiatedModule::Child { instance, .. } => instance.instantiated(),
|
||||
}
|
||||
}
|
||||
fn write_path(self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
InstantiatedModule::Base(base) => fmt::Debug::fmt(&base.name_id(), f),
|
||||
InstantiatedModule::Child { parent, instance } => {
|
||||
parent.write_path(f)?;
|
||||
write!(f, ".{}", instance.name_id())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for InstantiatedModule {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "InstantiatedModule(")?;
|
||||
self.write_path(f)?;
|
||||
write!(f, ": {})", self.leaf_module().name_id())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)]
|
||||
pub struct TargetInInstantiatedModule {
|
||||
pub instantiated_module: InstantiatedModule,
|
||||
pub target: Target,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)]
|
||||
pub struct ExprInInstantiatedModule<T: Type> {
|
||||
pub instantiated_module: InstantiatedModule,
|
||||
pub expr: Expr<T>,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
pub mod deduce_resets;
|
||||
pub mod simplify_enums;
|
||||
pub mod simplify_memories;
|
||||
pub mod visit;
|
||||
|
|
|
|||
2331
crates/fayalite/src/module/transform/deduce_resets.rs
Normal file
2331
crates/fayalite/src/module/transform/deduce_resets.rs
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -5,23 +5,24 @@ use crate::{
|
|||
bundle::{Bundle, BundleField, BundleType},
|
||||
enum_::{Enum, EnumType, EnumVariant},
|
||||
expr::{
|
||||
ops::{self, EnumLiteral},
|
||||
CastBitsTo, CastTo, CastToBits, Expr, ExprEnum, HdlPartialEq, ToExpr,
|
||||
ops::{self, EnumLiteral},
|
||||
},
|
||||
hdl,
|
||||
int::UInt,
|
||||
intern::{Intern, Interned, Memoize},
|
||||
intern::{Intern, InternSlice, Interned, Memoize},
|
||||
memory::{DynPortType, Mem, MemPort},
|
||||
module::{
|
||||
transform::visit::{Fold, Folder},
|
||||
Block, Id, Module, NameId, ScopedNameId, Stmt, StmtConnect, StmtIf, StmtMatch, StmtWire,
|
||||
transform::visit::{Fold, Folder},
|
||||
},
|
||||
source_location::SourceLocation,
|
||||
ty::{CanonicalType, Type},
|
||||
util::HashMap,
|
||||
wire::Wire,
|
||||
};
|
||||
use core::fmt;
|
||||
use hashbrown::HashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum SimplifyEnumsError {
|
||||
|
|
@ -69,7 +70,9 @@ fn contains_any_enum_types(ty: CanonicalType) -> bool {
|
|||
| CanonicalType::AsyncReset(_)
|
||||
| CanonicalType::SyncReset(_)
|
||||
| CanonicalType::Reset(_)
|
||||
| CanonicalType::Clock(_) => false,
|
||||
| CanonicalType::Clock(_)
|
||||
| CanonicalType::PhantomConst(_)
|
||||
| CanonicalType::DynSimOnly(_) => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -512,7 +515,9 @@ impl State {
|
|||
| CanonicalType::AsyncReset(_)
|
||||
| CanonicalType::SyncReset(_)
|
||||
| CanonicalType::Reset(_)
|
||||
| CanonicalType::Clock(_) => unreachable!(),
|
||||
| CanonicalType::Clock(_)
|
||||
| CanonicalType::PhantomConst(_)
|
||||
| CanonicalType::DynSimOnly(_) => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -577,7 +582,9 @@ fn connect_port(
|
|||
| (CanonicalType::Clock(_), _)
|
||||
| (CanonicalType::AsyncReset(_), _)
|
||||
| (CanonicalType::SyncReset(_), _)
|
||||
| (CanonicalType::Reset(_), _) => unreachable!(
|
||||
| (CanonicalType::Reset(_), _)
|
||||
| (CanonicalType::PhantomConst(_), _)
|
||||
| (CanonicalType::DynSimOnly(_), _) => unreachable!(
|
||||
"trying to connect memory ports:\n{:?}\n{:?}",
|
||||
Expr::ty(lhs),
|
||||
Expr::ty(rhs),
|
||||
|
|
@ -613,7 +620,7 @@ fn match_int_tag(
|
|||
block,
|
||||
Block {
|
||||
memories: Default::default(),
|
||||
stmts: [Stmt::from(retval)][..].intern(),
|
||||
stmts: [Stmt::from(retval)].intern_slice(),
|
||||
},
|
||||
],
|
||||
};
|
||||
|
|
@ -665,6 +672,7 @@ impl Folder for State {
|
|||
ExprEnum::UIntLiteral(_)
|
||||
| ExprEnum::SIntLiteral(_)
|
||||
| ExprEnum::BoolLiteral(_)
|
||||
| ExprEnum::PhantomConst(_)
|
||||
| ExprEnum::BundleLiteral(_)
|
||||
| ExprEnum::ArrayLiteral(_)
|
||||
| ExprEnum::Uninit(_)
|
||||
|
|
@ -764,7 +772,9 @@ impl Folder for State {
|
|||
| ExprEnum::ModuleIO(_)
|
||||
| ExprEnum::Instance(_)
|
||||
| ExprEnum::Wire(_)
|
||||
| ExprEnum::Reg(_) => op.default_fold(self),
|
||||
| ExprEnum::Reg(_)
|
||||
| ExprEnum::RegSync(_)
|
||||
| ExprEnum::RegAsync(_) => op.default_fold(self),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -804,7 +814,7 @@ impl Folder for State {
|
|||
.unwrap()
|
||||
.gen_name(&format!(
|
||||
"{}_{}",
|
||||
memory.scoped_name().1 .0,
|
||||
memory.scoped_name().1.0,
|
||||
port.port_name()
|
||||
)),
|
||||
port.source_location(),
|
||||
|
|
@ -921,7 +931,9 @@ impl Folder for State {
|
|||
| CanonicalType::Clock(_)
|
||||
| CanonicalType::AsyncReset(_)
|
||||
| CanonicalType::SyncReset(_)
|
||||
| CanonicalType::Reset(_) => canonical_type.default_fold(self),
|
||||
| CanonicalType::Reset(_)
|
||||
| CanonicalType::PhantomConst(_)
|
||||
| CanonicalType::DynSimOnly(_) => canonical_type.default_fold(self),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -944,12 +956,15 @@ impl Folder for State {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, clap::ValueEnum)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, clap::ValueEnum, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum SimplifyEnumsKind {
|
||||
SimplifyToEnumsWithNoBody,
|
||||
#[clap(name = "replace-with-bundle-of-uints")]
|
||||
#[serde(rename = "replace-with-bundle-of-uints")]
|
||||
ReplaceWithBundleOfUInts,
|
||||
#[clap(name = "replace-with-uint")]
|
||||
#[serde(rename = "replace-with-uint")]
|
||||
ReplaceWithUInt,
|
||||
}
|
||||
|
||||
|
|
@ -958,8 +973,8 @@ pub fn simplify_enums(
|
|||
kind: SimplifyEnumsKind,
|
||||
) -> Result<Interned<Module<Bundle>>, SimplifyEnumsError> {
|
||||
module.fold(&mut State {
|
||||
enum_types: HashMap::new(),
|
||||
replacement_mem_ports: HashMap::new(),
|
||||
enum_types: HashMap::default(),
|
||||
replacement_mem_ports: HashMap::default(),
|
||||
kind,
|
||||
module_state_stack: vec![],
|
||||
})
|
||||
|
|
|
|||
|
|
@ -9,16 +9,15 @@ use crate::{
|
|||
intern::{Intern, Interned},
|
||||
memory::{Mem, MemPort, PortType},
|
||||
module::{
|
||||
transform::visit::{Fold, Folder},
|
||||
Block, Id, Module, NameId, ScopedNameId, Stmt, StmtConnect, StmtWire,
|
||||
transform::visit::{Fold, Folder},
|
||||
},
|
||||
source_location::SourceLocation,
|
||||
ty::{CanonicalType, Type},
|
||||
util::MakeMutSlice,
|
||||
util::{HashMap, MakeMutSlice},
|
||||
wire::Wire,
|
||||
};
|
||||
use bitvec::{slice::BitSlice, vec::BitVec};
|
||||
use hashbrown::HashMap;
|
||||
use std::{
|
||||
convert::Infallible,
|
||||
fmt::Write,
|
||||
|
|
@ -62,6 +61,7 @@ enum MemSplit {
|
|||
Bundle {
|
||||
fields: Rc<[MemSplit]>,
|
||||
},
|
||||
PhantomConst,
|
||||
Single {
|
||||
output_mem: Option<Mem>,
|
||||
element_type: SingleType,
|
||||
|
|
@ -76,6 +76,7 @@ impl MemSplit {
|
|||
fn mark_changed_element_type(self) -> Self {
|
||||
match self {
|
||||
MemSplit::Bundle { fields: _ } => self,
|
||||
MemSplit::PhantomConst => self,
|
||||
MemSplit::Single {
|
||||
output_mem,
|
||||
element_type,
|
||||
|
|
@ -97,6 +98,7 @@ impl MemSplit {
|
|||
.map(|field| Self::new(field.ty).mark_changed_element_type())
|
||||
.collect(),
|
||||
},
|
||||
CanonicalType::PhantomConst(_) => MemSplit::PhantomConst,
|
||||
CanonicalType::Array(ty) => {
|
||||
let element = MemSplit::new(ty.element());
|
||||
if let Self::Single {
|
||||
|
|
@ -192,6 +194,7 @@ impl MemSplit {
|
|||
| CanonicalType::AsyncReset(_)
|
||||
| CanonicalType::SyncReset(_)
|
||||
| CanonicalType::Reset(_) => unreachable!("memory element type is a storable type"),
|
||||
CanonicalType::DynSimOnly(_) => todo!("memory containing sim-only values"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -321,6 +324,9 @@ impl SplitMemState<'_, '_> {
|
|||
Expr::field(Expr::<Bundle>::from_canonical(e), &field.name)
|
||||
},
|
||||
|initial_value_element| {
|
||||
let Some(field_offset) = field_offset.only_bit_width() else {
|
||||
todo!("memory containing sim-only values");
|
||||
};
|
||||
&initial_value_element[field_offset..][..field_ty_bit_width]
|
||||
},
|
||||
);
|
||||
|
|
@ -339,6 +345,7 @@ impl SplitMemState<'_, '_> {
|
|||
self.split_state_stack.pop();
|
||||
}
|
||||
}
|
||||
MemSplit::PhantomConst => {}
|
||||
MemSplit::Single {
|
||||
output_mem,
|
||||
element_type: single_type,
|
||||
|
|
@ -538,7 +545,12 @@ impl ModuleState {
|
|||
};
|
||||
loop {
|
||||
match input_element_type {
|
||||
CanonicalType::Bundle(_) => unreachable!("bundle types are always split"),
|
||||
CanonicalType::Bundle(_) => {
|
||||
unreachable!("bundle types are always split")
|
||||
}
|
||||
CanonicalType::PhantomConst(_) => {
|
||||
unreachable!("PhantomConst are always removed")
|
||||
}
|
||||
CanonicalType::Enum(_)
|
||||
if input_array_types
|
||||
.first()
|
||||
|
|
@ -612,6 +624,7 @@ impl ModuleState {
|
|||
| CanonicalType::AsyncReset(_)
|
||||
| CanonicalType::SyncReset(_)
|
||||
| CanonicalType::Reset(_) => unreachable!("memory element type is a storable type"),
|
||||
CanonicalType::DynSimOnly(_) => todo!("memory containing sim-only values"),
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
|
@ -626,7 +639,7 @@ impl ModuleState {
|
|||
split_state: &SplitState<'_>,
|
||||
) -> Mem {
|
||||
let mem_name = NameId(
|
||||
Intern::intern_owned(format!("{}{mem_name_path}", input_mem.scoped_name().1 .0)),
|
||||
Intern::intern_owned(format!("{}{mem_name_path}", input_mem.scoped_name().1.0)),
|
||||
Id::new(),
|
||||
);
|
||||
let mem_name = ScopedNameId(input_mem.scoped_name().0, mem_name);
|
||||
|
|
@ -743,7 +756,8 @@ impl ModuleState {
|
|||
..
|
||||
}
|
||||
| MemSplit::Bundle { .. }
|
||||
| MemSplit::Array { .. } => {
|
||||
| MemSplit::Array { .. }
|
||||
| MemSplit::PhantomConst => {
|
||||
let mut replacement_ports = Vec::with_capacity(input_mem.ports().len());
|
||||
let mut wire_port_rdata = Vec::with_capacity(input_mem.ports().len());
|
||||
let mut wire_port_wdata = Vec::with_capacity(input_mem.ports().len());
|
||||
|
|
@ -887,7 +901,7 @@ impl Folder for State {
|
|||
module,
|
||||
ModuleState {
|
||||
output_module: None,
|
||||
memories: HashMap::new(),
|
||||
memories: HashMap::default(),
|
||||
},
|
||||
);
|
||||
let mut this = PushedState::push_module(self, module);
|
||||
|
|
|
|||
|
|
@ -11,12 +11,11 @@ use crate::{
|
|||
clock::Clock,
|
||||
enum_::{Enum, EnumType, EnumVariant},
|
||||
expr::{
|
||||
ops,
|
||||
Expr, ExprEnum, ops,
|
||||
target::{
|
||||
Target, TargetBase, TargetChild, TargetPathArrayElement, TargetPathBundleField,
|
||||
TargetPathDynArrayElement, TargetPathElement,
|
||||
},
|
||||
Expr, ExprEnum,
|
||||
},
|
||||
formal::FormalKind,
|
||||
int::{Bool, SIntType, SIntValue, Size, UIntType, UIntValue},
|
||||
|
|
@ -28,10 +27,15 @@ use crate::{
|
|||
NormalModuleBody, ScopedNameId, Stmt, StmtConnect, StmtDeclaration, StmtFormal, StmtIf,
|
||||
StmtInstance, StmtMatch, StmtReg, StmtWire,
|
||||
},
|
||||
phantom_const::PhantomConst,
|
||||
reg::Reg,
|
||||
reset::{AsyncReset, Reset, SyncReset},
|
||||
reset::{AsyncReset, Reset, ResetType, SyncReset},
|
||||
sim::{ExternModuleSimulation, value::DynSimOnly},
|
||||
source_location::SourceLocation,
|
||||
ty::{CanonicalType, Type},
|
||||
vendor::xilinx::{
|
||||
XdcCreateClockAnnotation, XdcIOStandardAnnotation, XdcLocationAnnotation, XilinxAnnotation,
|
||||
},
|
||||
wire::Wire,
|
||||
};
|
||||
use num_bigint::{BigInt, BigUint};
|
||||
|
|
|
|||
485
crates/fayalite/src/phantom_const.rs
Normal file
485
crates/fayalite/src/phantom_const.rs
Normal file
|
|
@ -0,0 +1,485 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
expr::{
|
||||
Expr, ToExpr,
|
||||
ops::{ExprPartialEq, ExprPartialOrd},
|
||||
},
|
||||
int::Bool,
|
||||
intern::{Intern, Interned, InternedCompare, LazyInterned, LazyInternedTrait, Memoize},
|
||||
sim::value::{SimValue, SimValuePartialEq, ToSimValue, ToSimValueWithType},
|
||||
source_location::SourceLocation,
|
||||
ty::{
|
||||
CanonicalType, OpaqueSimValueSlice, OpaqueSimValueWriter, OpaqueSimValueWritten,
|
||||
StaticType, Type, TypeProperties, impl_match_variant_as_self,
|
||||
serde_impls::{SerdeCanonicalType, SerdePhantomConst},
|
||||
},
|
||||
};
|
||||
use serde::{
|
||||
Deserialize, Deserializer, Serialize, Serializer,
|
||||
de::{DeserializeOwned, Error},
|
||||
};
|
||||
use std::{
|
||||
any::Any,
|
||||
fmt,
|
||||
hash::{Hash, Hasher},
|
||||
marker::PhantomData,
|
||||
ops::Index,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct PhantomConstCanonicalValue {
|
||||
parsed: serde_json::Value,
|
||||
serialized: Interned<str>,
|
||||
}
|
||||
|
||||
impl PhantomConstCanonicalValue {
|
||||
pub fn from_json_value(parsed: serde_json::Value) -> Self {
|
||||
let serialized = Intern::intern_owned(
|
||||
serde_json::to_string(&parsed)
|
||||
.expect("conversion from json value to text shouldn't fail"),
|
||||
);
|
||||
Self { parsed, serialized }
|
||||
}
|
||||
pub fn as_json_value(&self) -> &serde_json::Value {
|
||||
&self.parsed
|
||||
}
|
||||
pub fn as_str(&self) -> Interned<str> {
|
||||
self.serialized
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for PhantomConstCanonicalValue {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.serialized)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PhantomConstCanonicalValue {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.serialized)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for PhantomConstCanonicalValue {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.serialized == other.serialized
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for PhantomConstCanonicalValue {}
|
||||
|
||||
impl Hash for PhantomConstCanonicalValue {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.serialized.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for PhantomConstCanonicalValue {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
self.parsed.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for PhantomConstCanonicalValue {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
Ok(Self::from_json_value(serde_json::Value::deserialize(
|
||||
deserializer,
|
||||
)?))
|
||||
}
|
||||
}
|
||||
|
||||
pub trait PhantomConstValue: Intern + InternedCompare + Serialize + fmt::Debug {
|
||||
fn deserialize_value<'de, D>(deserializer: D) -> Result<Interned<Self>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>;
|
||||
}
|
||||
|
||||
impl<T> PhantomConstValue for T
|
||||
where
|
||||
T: ?Sized + Intern + InternedCompare + Serialize + fmt::Debug,
|
||||
Interned<T>: DeserializeOwned,
|
||||
{
|
||||
fn deserialize_value<'de, D>(deserializer: D) -> Result<Interned<Self>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
<Interned<T> as Deserialize<'de>>::deserialize(deserializer)
|
||||
}
|
||||
}
|
||||
|
||||
/// Wrapper type that allows any Rust value to be smuggled as a HDL [`Type`].
|
||||
/// This only works for values that can be [serialized][Serialize] to and [deserialized][Deserialize] from [JSON][serde_json].
|
||||
pub struct PhantomConst<T: ?Sized + PhantomConstValue = PhantomConstCanonicalValue> {
|
||||
value: LazyInterned<T>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)]
|
||||
pub struct PhantomConstWithoutGenerics;
|
||||
|
||||
#[allow(non_upper_case_globals)]
|
||||
pub const PhantomConst: PhantomConstWithoutGenerics = PhantomConstWithoutGenerics;
|
||||
|
||||
impl<T: Type + PhantomConstValue> Index<T> for PhantomConstWithoutGenerics {
|
||||
type Output = PhantomConst<T>;
|
||||
|
||||
fn index(&self, value: T) -> &Self::Output {
|
||||
Interned::into_inner(PhantomConst::new(value.intern()).intern_sized())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> fmt::Debug for PhantomConst<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_tuple("PhantomConst").field(&self.get()).finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Clone for PhantomConst<T> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Copy for PhantomConst<T> {}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> PartialEq for PhantomConst<T> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.get() == other.get()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Eq for PhantomConst<T> {}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Hash for PhantomConst<T> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.get().hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
struct PhantomConstCanonicalMemoize<T: ?Sized, const IS_FROM_CANONICAL: bool>(PhantomData<T>);
|
||||
|
||||
impl<T: ?Sized, const IS_FROM_CANONICAL: bool> Copy
|
||||
for PhantomConstCanonicalMemoize<T, IS_FROM_CANONICAL>
|
||||
{
|
||||
}
|
||||
|
||||
impl<T: ?Sized, const IS_FROM_CANONICAL: bool> Clone
|
||||
for PhantomConstCanonicalMemoize<T, IS_FROM_CANONICAL>
|
||||
{
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized, const IS_FROM_CANONICAL: bool> Eq
|
||||
for PhantomConstCanonicalMemoize<T, IS_FROM_CANONICAL>
|
||||
{
|
||||
}
|
||||
|
||||
impl<T: ?Sized, const IS_FROM_CANONICAL: bool> PartialEq
|
||||
for PhantomConstCanonicalMemoize<T, IS_FROM_CANONICAL>
|
||||
{
|
||||
fn eq(&self, _other: &Self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized, const IS_FROM_CANONICAL: bool> Hash
|
||||
for PhantomConstCanonicalMemoize<T, IS_FROM_CANONICAL>
|
||||
{
|
||||
fn hash<H: Hasher>(&self, _state: &mut H) {}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Memoize for PhantomConstCanonicalMemoize<T, false> {
|
||||
type Input = Interned<T>;
|
||||
type InputOwned = Interned<T>;
|
||||
type Output = Interned<PhantomConstCanonicalValue>;
|
||||
|
||||
fn inner(self, input: &Self::Input) -> Self::Output {
|
||||
Intern::intern_sized(PhantomConstCanonicalValue::from_json_value(
|
||||
serde_json::to_value(input)
|
||||
.expect("serialization failed when constructing a canonical PhantomConst"),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Memoize for PhantomConstCanonicalMemoize<T, true> {
|
||||
type Input = Interned<PhantomConstCanonicalValue>;
|
||||
type InputOwned = Interned<PhantomConstCanonicalValue>;
|
||||
type Output = Interned<T>;
|
||||
|
||||
fn inner(self, input: &Self::Input) -> Self::Output {
|
||||
PhantomConstValue::deserialize_value(input.as_json_value())
|
||||
.expect("deserialization failed ")
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> PhantomConst<T> {
|
||||
pub fn new(value: Interned<T>) -> Self {
|
||||
Self {
|
||||
value: LazyInterned::Interned(value),
|
||||
}
|
||||
}
|
||||
pub const fn new_lazy(v: &'static dyn LazyInternedTrait<T>) -> Self {
|
||||
Self {
|
||||
value: LazyInterned::new_lazy(v),
|
||||
}
|
||||
}
|
||||
pub fn get(self) -> Interned<T> {
|
||||
self.value.interned()
|
||||
}
|
||||
pub fn type_properties(self) -> TypeProperties {
|
||||
<()>::TYPE_PROPERTIES
|
||||
}
|
||||
pub fn can_connect(self, other: Self) -> bool {
|
||||
self == other
|
||||
}
|
||||
pub fn canonical_phantom_const(self) -> PhantomConst {
|
||||
if let Some(&retval) = <dyn Any>::downcast_ref::<PhantomConst>(&self) {
|
||||
return retval;
|
||||
}
|
||||
<PhantomConst>::new(
|
||||
PhantomConstCanonicalMemoize::<T, false>(PhantomData).get_owned(self.get()),
|
||||
)
|
||||
}
|
||||
pub fn from_canonical_phantom_const(canonical_type: PhantomConst) -> Self {
|
||||
if let Some(&retval) = <dyn Any>::downcast_ref::<Self>(&canonical_type) {
|
||||
return retval;
|
||||
}
|
||||
Self::new(
|
||||
PhantomConstCanonicalMemoize::<T, true>(PhantomData).get_owned(canonical_type.get()),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Type for PhantomConst<T> {
|
||||
type BaseType = PhantomConst;
|
||||
type MaskType = ();
|
||||
type SimValue = PhantomConst<T>;
|
||||
impl_match_variant_as_self!();
|
||||
|
||||
fn mask_type(&self) -> Self::MaskType {
|
||||
()
|
||||
}
|
||||
|
||||
fn canonical(&self) -> CanonicalType {
|
||||
CanonicalType::PhantomConst(self.canonical_phantom_const())
|
||||
}
|
||||
|
||||
fn from_canonical(canonical_type: CanonicalType) -> Self {
|
||||
let CanonicalType::PhantomConst(phantom_const) = canonical_type else {
|
||||
panic!("expected PhantomConst");
|
||||
};
|
||||
Self::from_canonical_phantom_const(phantom_const)
|
||||
}
|
||||
|
||||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
assert!(opaque.is_empty());
|
||||
*self
|
||||
}
|
||||
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
assert!(opaque.is_empty());
|
||||
assert_eq!(*value, *self);
|
||||
}
|
||||
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
assert_eq!(*value, *self);
|
||||
writer.fill_cloned_from_slice(OpaqueSimValueSlice::empty())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Default for PhantomConst<T>
|
||||
where
|
||||
Interned<T>: Default,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self::TYPE
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> StaticType for PhantomConst<T>
|
||||
where
|
||||
Interned<T>: Default,
|
||||
{
|
||||
const TYPE: Self = PhantomConst {
|
||||
value: LazyInterned::new_lazy(&Interned::<T>::default),
|
||||
};
|
||||
const MASK_TYPE: Self::MaskType = ();
|
||||
const TYPE_PROPERTIES: TypeProperties = <()>::TYPE_PROPERTIES;
|
||||
const MASK_TYPE_PROPERTIES: TypeProperties = <()>::TYPE_PROPERTIES;
|
||||
}
|
||||
|
||||
type SerdeType<T> = SerdeCanonicalType<CanonicalType, SerdePhantomConst<Interned<T>>>;
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Serialize for PhantomConst<T> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
SerdeType::<T>::PhantomConst(SerdePhantomConst(self.get())).serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de, T: ?Sized + PhantomConstValue> Deserialize<'de> for PhantomConst<T> {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
match SerdeType::<T>::deserialize(deserializer)? {
|
||||
SerdeCanonicalType::PhantomConst(SerdePhantomConst(value)) => Ok(Self::new(value)),
|
||||
ty => Err(Error::invalid_value(
|
||||
serde::de::Unexpected::Other(ty.as_serde_unexpected_str()),
|
||||
&"a PhantomConst",
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> ExprPartialEq<Self> for PhantomConst<T> {
|
||||
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
|
||||
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
|
||||
true.to_expr()
|
||||
}
|
||||
|
||||
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
|
||||
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
|
||||
false.to_expr()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> ExprPartialOrd<Self> for PhantomConst<T> {
|
||||
fn cmp_lt(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
|
||||
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
|
||||
false.to_expr()
|
||||
}
|
||||
|
||||
fn cmp_le(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
|
||||
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
|
||||
true.to_expr()
|
||||
}
|
||||
|
||||
fn cmp_gt(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
|
||||
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
|
||||
false.to_expr()
|
||||
}
|
||||
|
||||
fn cmp_ge(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
|
||||
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
|
||||
true.to_expr()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> SimValuePartialEq<Self> for PhantomConst<T> {
|
||||
fn sim_value_eq(this: &SimValue<Self>, other: &SimValue<Self>) -> bool {
|
||||
assert_eq!(SimValue::ty(this), SimValue::ty(other));
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> ToSimValue for PhantomConst<T> {
|
||||
type Type = PhantomConst<T>;
|
||||
|
||||
fn to_sim_value(&self) -> SimValue<Self::Type> {
|
||||
SimValue::from_value(*self, *self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> ToSimValueWithType<PhantomConst<T>> for PhantomConst<T> {
|
||||
fn to_sim_value_with_type(&self, ty: PhantomConst<T>) -> SimValue<PhantomConst<T>> {
|
||||
SimValue::from_value(ty, *self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> ToSimValueWithType<CanonicalType> for PhantomConst<T> {
|
||||
fn to_sim_value_with_type(&self, ty: CanonicalType) -> SimValue<CanonicalType> {
|
||||
SimValue::into_canonical(SimValue::from_value(Self::from_canonical(ty), *self))
|
||||
}
|
||||
}
|
||||
|
||||
mod sealed {
|
||||
pub trait Sealed<T: ?Sized> {}
|
||||
}
|
||||
|
||||
pub trait PhantomConstGet<T: ?Sized + PhantomConstValue>: sealed::Sealed<T> {
|
||||
fn get(&self) -> Interned<T>;
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue, This: ?Sized + std::ops::Deref<Target: PhantomConstGet<T>>>
|
||||
sealed::Sealed<T> for This
|
||||
{
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue, This: ?Sized + std::ops::Deref<Target: PhantomConstGet<T>>>
|
||||
PhantomConstGet<T> for This
|
||||
{
|
||||
fn get(&self) -> Interned<T> {
|
||||
This::Target::get(&**self)
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! impl_phantom_const_get {
|
||||
(
|
||||
impl PhantomConstGet<$T:ident> for $ty:ty {
|
||||
fn $get:ident(&$get_self:ident) -> _ $get_body:block
|
||||
}
|
||||
) => {
|
||||
impl<$T: ?Sized + PhantomConstValue> sealed::Sealed<$T> for $ty {}
|
||||
|
||||
impl<$T: ?Sized + PhantomConstValue> PhantomConstGet<$T> for $ty {
|
||||
fn $get(&$get_self) -> Interned<$T> $get_body
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_phantom_const_get! {
|
||||
impl PhantomConstGet<T> for PhantomConst<T> {
|
||||
fn get(&self) -> _ {
|
||||
PhantomConst::get(*self)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl_phantom_const_get! {
|
||||
impl PhantomConstGet<T> for Expr<PhantomConst<T>> {
|
||||
fn get(&self) -> _ {
|
||||
PhantomConst::get(Expr::ty(*self))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub trait ReturnSelfUnchanged<T: ?Sized> {
|
||||
type Type: ?Sized;
|
||||
}
|
||||
|
||||
impl<This: ?Sized, T: ?Sized> ReturnSelfUnchanged<T> for This {
|
||||
type Type = This;
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub fn type_alias_phantom_const_get_helper<T: ?Sized + PhantomConstValue, R: Intern + Clone>(
|
||||
param: impl PhantomConstGet<T>,
|
||||
get: impl FnOnce(Interned<T>) -> R,
|
||||
) -> &'static R {
|
||||
Interned::into_inner(get(param.get()).intern_sized())
|
||||
}
|
||||
1923
crates/fayalite/src/platform.rs
Normal file
1923
crates/fayalite/src/platform.rs
Normal file
File diff suppressed because it is too large
Load diff
62
crates/fayalite/src/platform/peripherals.rs
Normal file
62
crates/fayalite/src/platform/peripherals.rs
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{intern::Intern, prelude::*};
|
||||
use ordered_float::NotNan;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)]
|
||||
#[non_exhaustive]
|
||||
pub struct ClockInputProperties {
|
||||
pub frequency: NotNan<f64>,
|
||||
}
|
||||
|
||||
#[hdl(no_runtime_generics, no_static)]
|
||||
pub struct ClockInput {
|
||||
pub clk: Clock,
|
||||
pub properties: PhantomConst<ClockInputProperties>,
|
||||
}
|
||||
|
||||
impl ClockInput {
|
||||
#[track_caller]
|
||||
pub fn new(frequency: f64) -> Self {
|
||||
assert!(
|
||||
frequency > 0.0 && frequency.is_finite(),
|
||||
"invalid clock frequency: {frequency}"
|
||||
);
|
||||
Self {
|
||||
clk: Clock,
|
||||
properties: PhantomConst::new(
|
||||
ClockInputProperties {
|
||||
frequency: NotNan::new(frequency).expect("just checked"),
|
||||
}
|
||||
.intern_sized(),
|
||||
),
|
||||
}
|
||||
}
|
||||
pub fn frequency(self) -> f64 {
|
||||
self.properties.get().frequency.into_inner()
|
||||
}
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
pub struct Led {
|
||||
pub on: Bool,
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
pub struct RgbLed {
|
||||
pub r: Bool,
|
||||
pub g: Bool,
|
||||
pub b: Bool,
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
/// UART, used as an output from the FPGA
|
||||
pub struct Uart {
|
||||
/// transmit from the FPGA's perspective
|
||||
pub tx: Bool,
|
||||
/// receive from the FPGA's perspective
|
||||
#[hdl(flip)]
|
||||
pub rx: Bool,
|
||||
}
|
||||
|
|
@ -1,36 +1,45 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
pub use crate::{
|
||||
__,
|
||||
annotations::{
|
||||
BlackBoxInlineAnnotation, BlackBoxPathAnnotation, CustomFirrtlAnnotation,
|
||||
DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation,
|
||||
},
|
||||
array::{Array, ArrayType},
|
||||
build::{BuildCli, JobParams, RunBuild},
|
||||
bundle::Bundle,
|
||||
cli::Cli,
|
||||
clock::{Clock, ClockDomain, ToClock},
|
||||
enum_::{Enum, HdlNone, HdlOption, HdlSome},
|
||||
expr::{
|
||||
repeat, CastBitsTo, CastTo, CastToBits, Expr, HdlPartialEq, HdlPartialOrd, MakeUninitExpr,
|
||||
ReduceBits, ToExpr,
|
||||
CastBitsTo, CastTo, CastToBits, Expr, HdlPartialEq, HdlPartialOrd, MakeUninitExpr,
|
||||
ReduceBits, ToExpr, repeat,
|
||||
},
|
||||
formal::{
|
||||
all_const, all_seq, any_const, any_seq, formal_global_clock, formal_reset, hdl_assert,
|
||||
hdl_assert_with_enable, hdl_assume, hdl_assume_with_enable, hdl_cover,
|
||||
hdl_cover_with_enable, MakeFormalExpr,
|
||||
MakeFormalExpr, all_const, all_seq, any_const, any_seq, formal_global_clock, formal_reset,
|
||||
hdl_assert, hdl_assert_with_enable, hdl_assume, hdl_assume_with_enable, hdl_cover,
|
||||
hdl_cover_with_enable,
|
||||
},
|
||||
hdl, hdl_module,
|
||||
int::{Bool, DynSize, KnownSize, SInt, SIntType, Size, UInt, UIntType},
|
||||
int::{Bool, DynSize, KnownSize, SInt, SIntType, SIntValue, Size, UInt, UIntType, UIntValue},
|
||||
memory::{Mem, MemBuilder, ReadUnderWrite},
|
||||
module::{
|
||||
annotate, connect, connect_any, incomplete_wire, instance, memory, memory_array,
|
||||
memory_with_init, reg_builder, wire, Instance, Module, ModuleBuilder,
|
||||
Instance, Module, ModuleBuilder, annotate, connect, connect_any, incomplete_wire, instance,
|
||||
memory, memory_array, memory_with_init, reg_builder, wire,
|
||||
},
|
||||
phantom_const::{PhantomConst, PhantomConstGet},
|
||||
platform::{DynPlatform, Platform, PlatformIOBuilder, peripherals},
|
||||
reg::Reg,
|
||||
reset::{AsyncReset, Reset, SyncReset, ToAsyncReset, ToReset, ToSyncReset},
|
||||
sim::{
|
||||
ExternModuleSimulationState, Simulation,
|
||||
time::{SimDuration, SimInstant},
|
||||
value::{SimOnly, SimOnlyValue, SimValue, ToSimValue, ToSimValueWithType},
|
||||
},
|
||||
source_location::SourceLocation,
|
||||
testing::{FormalMode, assert_formal},
|
||||
ty::{AsMask, CanonicalType, Type},
|
||||
util::{ConstUsize, GenericConstUsize},
|
||||
wire::Wire,
|
||||
__,
|
||||
};
|
||||
pub use bitvec::{slice::BitSlice, vec::BitVec};
|
||||
|
|
|
|||
|
|
@ -5,21 +5,22 @@ use crate::{
|
|||
expr::{Expr, Flow},
|
||||
intern::Interned,
|
||||
module::{NameId, ScopedNameId},
|
||||
reset::{Reset, ResetType},
|
||||
source_location::SourceLocation,
|
||||
ty::{CanonicalType, Type},
|
||||
};
|
||||
use std::fmt;
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
|
||||
pub struct Reg<T: Type> {
|
||||
pub struct Reg<T: Type, R: ResetType = Reset> {
|
||||
name: ScopedNameId,
|
||||
source_location: SourceLocation,
|
||||
ty: T,
|
||||
clock_domain: Expr<ClockDomain>,
|
||||
clock_domain: Expr<ClockDomain<R>>,
|
||||
init: Option<Expr<T>>,
|
||||
}
|
||||
|
||||
impl<T: Type + fmt::Debug> fmt::Debug for Reg<T> {
|
||||
impl<T: Type + fmt::Debug, R: ResetType> fmt::Debug for Reg<T, R> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let Self {
|
||||
name,
|
||||
|
|
@ -37,8 +38,8 @@ impl<T: Type + fmt::Debug> fmt::Debug for Reg<T> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: Type> Reg<T> {
|
||||
pub fn canonical(&self) -> Reg<CanonicalType> {
|
||||
impl<T: Type, R: ResetType> Reg<T, R> {
|
||||
pub fn canonical(&self) -> Reg<CanonicalType, R> {
|
||||
let Self {
|
||||
name,
|
||||
source_location,
|
||||
|
|
@ -59,7 +60,7 @@ impl<T: Type> Reg<T> {
|
|||
scoped_name: ScopedNameId,
|
||||
source_location: SourceLocation,
|
||||
ty: T,
|
||||
clock_domain: Expr<ClockDomain>,
|
||||
clock_domain: Expr<ClockDomain<R>>,
|
||||
init: Option<Expr<T>>,
|
||||
) -> Self {
|
||||
assert!(
|
||||
|
|
@ -98,7 +99,7 @@ impl<T: Type> Reg<T> {
|
|||
pub fn scoped_name(&self) -> ScopedNameId {
|
||||
self.name
|
||||
}
|
||||
pub fn clock_domain(&self) -> Expr<ClockDomain> {
|
||||
pub fn clock_domain(&self) -> Expr<ClockDomain<R>> {
|
||||
self.clock_domain
|
||||
}
|
||||
pub fn init(&self) -> Option<Expr<T>> {
|
||||
|
|
|
|||
|
|
@ -1,26 +1,55 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
expr::{Expr, ToExpr},
|
||||
int::Bool,
|
||||
clock::Clock,
|
||||
expr::{Expr, ToExpr, ops},
|
||||
int::{Bool, SInt, UInt},
|
||||
source_location::SourceLocation,
|
||||
ty::{impl_match_variant_as_self, CanonicalType, StaticType, Type, TypeProperties},
|
||||
ty::{
|
||||
CanonicalType, OpaqueSimValueSize, OpaqueSimValueSlice, OpaqueSimValueWriter,
|
||||
OpaqueSimValueWritten, StaticType, Type, TypeProperties, impl_match_variant_as_self,
|
||||
},
|
||||
};
|
||||
use bitvec::{bits, order::Lsb0};
|
||||
|
||||
mod sealed {
|
||||
pub trait ResetTypeSealed {}
|
||||
}
|
||||
|
||||
pub trait ResetType: StaticType<MaskType = Bool> + sealed::ResetTypeSealed {}
|
||||
pub trait ResetType:
|
||||
StaticType<MaskType = Bool>
|
||||
+ sealed::ResetTypeSealed
|
||||
+ ops::ExprCastTo<Bool>
|
||||
+ ops::ExprCastTo<Reset>
|
||||
+ ops::ExprCastTo<SyncReset>
|
||||
+ ops::ExprCastTo<AsyncReset>
|
||||
+ ops::ExprCastTo<Clock>
|
||||
+ ops::ExprCastTo<UInt<1>>
|
||||
+ ops::ExprCastTo<SInt<1>>
|
||||
+ ops::ExprCastTo<UInt>
|
||||
+ ops::ExprCastTo<SInt>
|
||||
{
|
||||
fn dispatch<D: ResetTypeDispatch>(input: D::Input<Self>, dispatch: D) -> D::Output<Self>;
|
||||
}
|
||||
|
||||
pub trait ResetTypeDispatch: Sized {
|
||||
type Input<T: ResetType>;
|
||||
type Output<T: ResetType>;
|
||||
|
||||
fn reset(self, input: Self::Input<Reset>) -> Self::Output<Reset>;
|
||||
fn sync_reset(self, input: Self::Input<SyncReset>) -> Self::Output<SyncReset>;
|
||||
fn async_reset(self, input: Self::Input<AsyncReset>) -> Self::Output<AsyncReset>;
|
||||
}
|
||||
|
||||
macro_rules! reset_type {
|
||||
($name:ident, $Trait:ident::$trait_fn:ident, $is_castable_from_bits:literal) => {
|
||||
($name:ident, $(#[$impl_trait:ident])? $Trait:ident::$trait_fn:ident, $is_castable_from_bits:literal, $dispatch_fn:ident) => {
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)]
|
||||
pub struct $name;
|
||||
|
||||
impl Type for $name {
|
||||
type BaseType = $name;
|
||||
type MaskType = Bool;
|
||||
type SimValue = bool;
|
||||
|
||||
impl_match_variant_as_self!();
|
||||
|
||||
|
|
@ -42,6 +71,31 @@ macro_rules! reset_type {
|
|||
};
|
||||
retval
|
||||
}
|
||||
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
assert_eq!(opaque.size(), OpaqueSimValueSize::from_bit_width(1));
|
||||
opaque.bits()[0]
|
||||
}
|
||||
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
assert_eq!(opaque.size(), OpaqueSimValueSize::from_bit_width(1));
|
||||
*value = opaque.bits()[0];
|
||||
}
|
||||
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
assert_eq!(writer.size(), OpaqueSimValueSize::from_bit_width(1));
|
||||
writer.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(
|
||||
[bits![0], bits![1]][*value as usize],
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl $name {
|
||||
|
|
@ -61,13 +115,21 @@ macro_rules! reset_type {
|
|||
is_storable: false,
|
||||
is_castable_from_bits: $is_castable_from_bits,
|
||||
bit_width: 1,
|
||||
sim_only_values_len: 0,
|
||||
};
|
||||
const MASK_TYPE_PROPERTIES: TypeProperties = Bool::TYPE_PROPERTIES;
|
||||
}
|
||||
|
||||
impl sealed::ResetTypeSealed for $name {}
|
||||
|
||||
impl ResetType for $name {}
|
||||
impl ResetType for $name {
|
||||
fn dispatch<D: ResetTypeDispatch>(
|
||||
input: D::Input<Self>,
|
||||
dispatch: D,
|
||||
) -> D::Output<Self> {
|
||||
dispatch.$dispatch_fn(input)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait $Trait {
|
||||
fn $trait_fn(&self) -> Expr<$name>;
|
||||
|
|
@ -91,20 +153,21 @@ macro_rules! reset_type {
|
|||
}
|
||||
}
|
||||
|
||||
impl $Trait for Expr<$name> {
|
||||
$($impl_trait $Trait for Expr<$name> {
|
||||
fn $trait_fn(&self) -> Expr<$name> {
|
||||
*self
|
||||
}
|
||||
}
|
||||
})?
|
||||
};
|
||||
}
|
||||
|
||||
reset_type!(AsyncReset, ToAsyncReset::to_async_reset, true);
|
||||
reset_type!(SyncReset, ToSyncReset::to_sync_reset, true);
|
||||
reset_type!(AsyncReset, #[impl] ToAsyncReset::to_async_reset, true, async_reset);
|
||||
reset_type!(SyncReset, #[impl] ToSyncReset::to_sync_reset, true, sync_reset);
|
||||
reset_type!(
|
||||
Reset,
|
||||
ToReset::to_reset,
|
||||
false // Reset is not castable from bits because we don't know if it's async or sync
|
||||
false, // Reset is not castable from bits because we don't know if it's async or sync
|
||||
reset
|
||||
);
|
||||
|
||||
impl ToSyncReset for bool {
|
||||
|
|
|
|||
3038
crates/fayalite/src/sim.rs
Normal file
3038
crates/fayalite/src/sim.rs
Normal file
File diff suppressed because it is too large
Load diff
5161
crates/fayalite/src/sim/compiler.rs
Normal file
5161
crates/fayalite/src/sim/compiler.rs
Normal file
File diff suppressed because it is too large
Load diff
2096
crates/fayalite/src/sim/interpreter.rs
Normal file
2096
crates/fayalite/src/sim/interpreter.rs
Normal file
File diff suppressed because it is too large
Load diff
1052
crates/fayalite/src/sim/interpreter/parts.rs
Normal file
1052
crates/fayalite/src/sim/interpreter/parts.rs
Normal file
File diff suppressed because it is too large
Load diff
397
crates/fayalite/src/sim/time.rs
Normal file
397
crates/fayalite/src/sim/time.rs
Normal file
|
|
@ -0,0 +1,397 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use std::{
|
||||
fmt,
|
||||
ops::{Add, AddAssign, Sub, SubAssign},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
|
||||
pub struct SimInstant {
|
||||
time_since_start: SimDuration,
|
||||
}
|
||||
|
||||
impl SimInstant {
|
||||
pub const fn checked_add(self, duration: SimDuration) -> Option<Self> {
|
||||
let Some(time_since_start) = self.time_since_start.checked_add(duration) else {
|
||||
return None;
|
||||
};
|
||||
Some(SimInstant { time_since_start })
|
||||
}
|
||||
pub const fn checked_duration_since(self, earlier: Self) -> Option<SimDuration> {
|
||||
self.time_since_start.checked_sub(earlier.time_since_start)
|
||||
}
|
||||
pub const fn checked_sub(self, duration: SimDuration) -> Option<Self> {
|
||||
let Some(time_since_start) = self.time_since_start.checked_sub(duration) else {
|
||||
return None;
|
||||
};
|
||||
Some(SimInstant { time_since_start })
|
||||
}
|
||||
#[track_caller]
|
||||
pub const fn duration_since(self, earlier: Self) -> SimDuration {
|
||||
let Some(retval) = self.checked_duration_since(earlier) else {
|
||||
panic!(
|
||||
"tried to compute the duration since a later time -- durations can't be negative"
|
||||
);
|
||||
};
|
||||
retval
|
||||
}
|
||||
pub const fn saturating_duration_since(self, earlier: Self) -> SimDuration {
|
||||
let Some(retval) = self.checked_duration_since(earlier) else {
|
||||
return SimDuration::ZERO;
|
||||
};
|
||||
retval
|
||||
}
|
||||
}
|
||||
|
||||
impl Add<SimDuration> for SimInstant {
|
||||
type Output = SimInstant;
|
||||
|
||||
#[track_caller]
|
||||
fn add(mut self, rhs: SimDuration) -> Self::Output {
|
||||
self += rhs;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl AddAssign<SimDuration> for SimInstant {
|
||||
#[track_caller]
|
||||
fn add_assign(&mut self, rhs: SimDuration) {
|
||||
self.time_since_start += rhs;
|
||||
}
|
||||
}
|
||||
|
||||
impl Add<SimInstant> for SimDuration {
|
||||
type Output = SimInstant;
|
||||
|
||||
#[track_caller]
|
||||
fn add(self, rhs: SimInstant) -> Self::Output {
|
||||
rhs.add(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub for SimInstant {
|
||||
type Output = SimDuration;
|
||||
|
||||
#[track_caller]
|
||||
fn sub(self, rhs: SimInstant) -> Self::Output {
|
||||
self.duration_since(rhs)
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub<SimDuration> for SimInstant {
|
||||
type Output = SimInstant;
|
||||
|
||||
#[track_caller]
|
||||
fn sub(self, rhs: SimDuration) -> Self::Output {
|
||||
let Some(retval) = self.checked_sub(rhs) else {
|
||||
panic!("SimInstant underflow");
|
||||
};
|
||||
retval
|
||||
}
|
||||
}
|
||||
|
||||
impl SubAssign<SimDuration> for SimInstant {
|
||||
#[track_caller]
|
||||
fn sub_assign(&mut self, rhs: SimDuration) {
|
||||
*self = *self - rhs;
|
||||
}
|
||||
}
|
||||
|
||||
impl SimInstant {
|
||||
pub const START: SimInstant = SimInstant {
|
||||
time_since_start: SimDuration::ZERO,
|
||||
};
|
||||
}
|
||||
|
||||
impl fmt::Debug for SimInstant {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.time_since_start.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
|
||||
pub struct SimDuration {
|
||||
attos: u128,
|
||||
}
|
||||
|
||||
impl AddAssign for SimDuration {
|
||||
#[track_caller]
|
||||
fn add_assign(&mut self, rhs: SimDuration) {
|
||||
*self = *self + rhs;
|
||||
}
|
||||
}
|
||||
|
||||
impl Add for SimDuration {
|
||||
type Output = SimDuration;
|
||||
|
||||
#[track_caller]
|
||||
fn add(self, rhs: SimDuration) -> Self::Output {
|
||||
SimDuration {
|
||||
attos: self
|
||||
.attos
|
||||
.checked_add(rhs.attos)
|
||||
.expect("overflow adding durations"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub for SimDuration {
|
||||
type Output = Self;
|
||||
|
||||
#[track_caller]
|
||||
fn sub(self, rhs: Self) -> Self::Output {
|
||||
SimDuration {
|
||||
attos: self
|
||||
.attos
|
||||
.checked_add(rhs.attos)
|
||||
.expect("underflow subtracting durations -- durations can't be negative"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SubAssign for SimDuration {
|
||||
#[track_caller]
|
||||
fn sub_assign(&mut self, rhs: Self) {
|
||||
*self = *self - rhs;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)]
|
||||
pub struct SimDurationParts {
|
||||
pub attos: u16,
|
||||
pub femtos: u16,
|
||||
pub picos: u16,
|
||||
pub nanos: u16,
|
||||
pub micros: u16,
|
||||
pub millis: u16,
|
||||
pub secs: u128,
|
||||
}
|
||||
|
||||
macro_rules! impl_duration_units {
|
||||
(
|
||||
$(
|
||||
#[unit_const = $UNIT:ident, from_units = $from_units:ident, as_units = $as_units:ident, units = $units:ident, suffix = $suffix:literal]
|
||||
const $log10_units_per_sec:ident: u32 = $log10_units_per_sec_value:expr;
|
||||
)*
|
||||
) => {
|
||||
impl SimDuration {
|
||||
$(
|
||||
const $log10_units_per_sec: u32 = $log10_units_per_sec_value;
|
||||
pub const fn $from_units($units: u128) -> Self {
|
||||
Self::from_units_helper::<{ Self::$log10_units_per_sec }>($units)
|
||||
}
|
||||
pub const fn $as_units(self) -> u128 {
|
||||
self.attos / const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) }
|
||||
}
|
||||
)*
|
||||
pub const fn to_parts(mut self) -> SimDurationParts {
|
||||
$(
|
||||
let $units = self.attos / const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) };
|
||||
self.attos %= const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) };
|
||||
)*
|
||||
SimDurationParts {
|
||||
$($units: $units as _,)*
|
||||
}
|
||||
}
|
||||
pub const fn from_parts_checked(parts: SimDurationParts) -> Option<Self> {
|
||||
let attos = 0u128;
|
||||
$(
|
||||
let Some(product) = const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) }.checked_mul(parts.$units as u128) else {
|
||||
return None;
|
||||
};
|
||||
let Some(attos) = attos.checked_add(product) else {
|
||||
return None;
|
||||
};
|
||||
)*
|
||||
Some(Self {
|
||||
attos,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for SimDuration {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let ilog10_attos = match self.attos.checked_ilog10() {
|
||||
Some(v) => v,
|
||||
None => Self::LOG10_ATTOS_PER_SEC,
|
||||
};
|
||||
let (suffix, int, fraction, fraction_digits) =
|
||||
match Self::LOG10_ATTOS_PER_SEC.saturating_sub(ilog10_attos) {
|
||||
$(
|
||||
..=Self::$log10_units_per_sec => {
|
||||
let divisor = const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) };
|
||||
(
|
||||
$suffix,
|
||||
self.attos / divisor,
|
||||
self.attos % divisor,
|
||||
(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) as usize,
|
||||
)
|
||||
},
|
||||
)*
|
||||
_ => unreachable!(),
|
||||
};
|
||||
write!(f, "{int}")?;
|
||||
if fraction != 0 {
|
||||
write!(f, ".{fraction:0fraction_digits$}")?;
|
||||
}
|
||||
write!(f, " {suffix}")
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[test]
|
||||
fn test_duration_debug() {
|
||||
$(
|
||||
assert_eq!(
|
||||
format!("{:?}", SimDuration::$from_units(123)),
|
||||
concat!("123 ", $suffix)
|
||||
);
|
||||
assert_eq!(
|
||||
format!("{:?}", SimDuration::$from_units(1)),
|
||||
concat!("1 ", $suffix),
|
||||
);
|
||||
let mut v = SimDuration::$from_units(1);
|
||||
if v.attos < 1 << 53 {
|
||||
v.attos += 1;
|
||||
assert_eq!(
|
||||
format!("{v:?}"),
|
||||
format!("{} {}", v.attos as f64 / 10.0f64.powf((SimDuration::LOG10_ATTOS_PER_SEC - SimDuration::$log10_units_per_sec) as f64), $suffix),
|
||||
"1 {} + 1 as == {} as", $suffix, v.attos,
|
||||
);
|
||||
}
|
||||
)*
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_duration_units! {
|
||||
#[unit_const = SECOND, from_units = from_secs, as_units = as_secs, units = secs, suffix = "s"]
|
||||
const LOG10_SECS_PER_SEC: u32 = 0;
|
||||
#[unit_const = MILLISECOND, from_units = from_millis, as_units = as_millis, units = millis, suffix = "ms"]
|
||||
const LOG10_MILLIS_PER_SEC: u32 = 3;
|
||||
#[unit_const = MICROSECOND, from_units = from_micros, as_units = as_micros, units = micros, suffix = "μs"]
|
||||
const LOG10_MICROS_PER_SEC: u32 = 6;
|
||||
#[unit_const = NANOSECOND, from_units = from_nanos, as_units = as_nanos, units = nanos, suffix = "ns"]
|
||||
const LOG10_NANOS_PER_SEC: u32 = 9;
|
||||
#[unit_const = PICOSECOND, from_units = from_picos, as_units = as_picos, units = picos, suffix = "ps"]
|
||||
const LOG10_PICOS_PER_SEC: u32 = 12;
|
||||
#[unit_const = FEMTOSECOND, from_units = from_femtos, as_units = as_femtos, units = femtos, suffix = "fs"]
|
||||
const LOG10_FEMTOS_PER_SEC: u32 = 15;
|
||||
#[unit_const = ATTOSECOND, from_units = from_attos, as_units = as_attos, units = attos, suffix = "as"]
|
||||
const LOG10_ATTOS_PER_SEC: u32 = 18;
|
||||
}
|
||||
|
||||
impl SimDuration {
|
||||
const fn from_units_helper<const UNITS_PER_SEC: u32>(units: u128) -> Self {
|
||||
let Some(attos) =
|
||||
units.checked_mul(const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - UNITS_PER_SEC) })
|
||||
else {
|
||||
panic!("duration too big");
|
||||
};
|
||||
Self { attos }
|
||||
}
|
||||
pub const ZERO: SimDuration = SimDuration::from_secs(0);
|
||||
pub const fn from_parts(parts: SimDurationParts) -> Self {
|
||||
match Self::from_parts_checked(parts) {
|
||||
Some(v) => v,
|
||||
None => panic!("duration too big"),
|
||||
}
|
||||
}
|
||||
pub const fn abs_diff(self, other: Self) -> Self {
|
||||
Self {
|
||||
attos: self.attos.abs_diff(other.attos),
|
||||
}
|
||||
}
|
||||
pub const fn checked_add(self, rhs: Self) -> Option<Self> {
|
||||
let Some(attos) = self.attos.checked_add(rhs.attos) else {
|
||||
return None;
|
||||
};
|
||||
Some(Self { attos })
|
||||
}
|
||||
pub const fn checked_sub(self, rhs: Self) -> Option<Self> {
|
||||
let Some(attos) = self.attos.checked_sub(rhs.attos) else {
|
||||
return None;
|
||||
};
|
||||
Some(Self { attos })
|
||||
}
|
||||
pub const fn is_zero(self) -> bool {
|
||||
self.attos == 0
|
||||
}
|
||||
pub const fn saturating_add(self, rhs: Self) -> Self {
|
||||
Self {
|
||||
attos: self.attos.saturating_add(rhs.attos),
|
||||
}
|
||||
}
|
||||
pub const fn saturating_sub(self, rhs: Self) -> Self {
|
||||
Self {
|
||||
attos: self.attos.saturating_sub(rhs.attos),
|
||||
}
|
||||
}
|
||||
pub const fn checked_ilog10(self) -> Option<i32> {
|
||||
let Some(ilog10_attos) = self.attos.checked_ilog10() else {
|
||||
return None;
|
||||
};
|
||||
Some(ilog10_attos as i32 - Self::LOG10_ATTOS_PER_SEC as i32)
|
||||
}
|
||||
#[track_caller]
|
||||
pub const fn ilog10(self) -> i32 {
|
||||
let Some(retval) = self.checked_ilog10() else {
|
||||
panic!("tried to take the ilog10 of 0");
|
||||
};
|
||||
retval
|
||||
}
|
||||
pub const fn checked_pow10(log10: i32, underflow_is_zero: bool) -> Option<Self> {
|
||||
let Some(log10) = Self::LOG10_ATTOS_PER_SEC.checked_add_signed(log10) else {
|
||||
return if log10 < 0 && underflow_is_zero {
|
||||
Some(Self::ZERO)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
};
|
||||
let Some(attos) = 10u128.checked_pow(log10) else {
|
||||
return None;
|
||||
};
|
||||
Some(Self { attos })
|
||||
}
|
||||
#[track_caller]
|
||||
pub const fn pow10(log10: i32) -> Self {
|
||||
let Some(retval) = Self::checked_pow10(log10, true) else {
|
||||
panic!("pow10 overflowed");
|
||||
};
|
||||
retval
|
||||
}
|
||||
pub const fn is_power_of_ten(self) -> bool {
|
||||
const TEN: u128 = 10;
|
||||
const NUMBER_OF_POWERS_OF_TEN: usize = {
|
||||
let mut n = 0;
|
||||
while let Some(_) = TEN.checked_pow(n as u32) {
|
||||
n += 1;
|
||||
}
|
||||
n
|
||||
};
|
||||
const POWERS_OF_TEN: [u128; NUMBER_OF_POWERS_OF_TEN] = {
|
||||
let mut retval = [0; NUMBER_OF_POWERS_OF_TEN];
|
||||
let mut i = 0;
|
||||
while i < NUMBER_OF_POWERS_OF_TEN {
|
||||
retval[i] = TEN.pow(i as u32);
|
||||
i += 1;
|
||||
}
|
||||
retval
|
||||
};
|
||||
let mut i = 0;
|
||||
while i < NUMBER_OF_POWERS_OF_TEN {
|
||||
if self.attos == POWERS_OF_TEN[i] {
|
||||
return true;
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Duration> for SimDuration {
|
||||
fn from(duration: Duration) -> Self {
|
||||
Self::from_nanos(duration.as_nanos())
|
||||
}
|
||||
}
|
||||
1344
crates/fayalite/src/sim/value.rs
Normal file
1344
crates/fayalite/src/sim/value.rs
Normal file
File diff suppressed because it is too large
Load diff
304
crates/fayalite/src/sim/value/sim_only_value_unsafe.rs
Normal file
304
crates/fayalite/src/sim/value/sim_only_value_unsafe.rs
Normal file
|
|
@ -0,0 +1,304 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
//! `unsafe` parts of [`DynSimOnlyValue`]
|
||||
|
||||
use serde::{Serialize, de::DeserializeOwned};
|
||||
use std::{
|
||||
any::{self, TypeId},
|
||||
fmt,
|
||||
hash::{Hash, Hasher},
|
||||
marker::PhantomData,
|
||||
mem::ManuallyDrop,
|
||||
rc::Rc,
|
||||
};
|
||||
|
||||
pub trait SimOnlyValueTrait:
|
||||
'static + Eq + Hash + fmt::Debug + Serialize + DeserializeOwned + Clone + Default
|
||||
{
|
||||
}
|
||||
|
||||
impl<T: 'static + Eq + Hash + fmt::Debug + Serialize + DeserializeOwned + Clone + Default>
|
||||
SimOnlyValueTrait for T
|
||||
{
|
||||
}
|
||||
|
||||
/// Safety: `type_id_dyn` must return `TypeId::of::<T>()` where `Self = SimOnly<T>`
|
||||
unsafe trait DynSimOnlyTrait: 'static + Send + Sync {
|
||||
fn type_id_dyn(&self) -> TypeId;
|
||||
fn type_name(&self) -> &'static str;
|
||||
fn default_value(&self) -> Rc<dyn DynSimOnlyValueTrait>;
|
||||
fn deserialize_from_json_string(
|
||||
&self,
|
||||
json_str: &str,
|
||||
) -> serde_json::Result<Rc<dyn DynSimOnlyValueTrait>>;
|
||||
}
|
||||
|
||||
/// Safety: `type_id_dyn` is implemented correctly
|
||||
unsafe impl<T: SimOnlyValueTrait> DynSimOnlyTrait for SimOnly<T> {
|
||||
fn type_id_dyn(&self) -> TypeId {
|
||||
TypeId::of::<T>()
|
||||
}
|
||||
|
||||
fn type_name(&self) -> &'static str {
|
||||
any::type_name::<T>()
|
||||
}
|
||||
|
||||
fn default_value(&self) -> Rc<dyn DynSimOnlyValueTrait> {
|
||||
Rc::new(T::default())
|
||||
}
|
||||
|
||||
fn deserialize_from_json_string(
|
||||
&self,
|
||||
json_str: &str,
|
||||
) -> serde_json::Result<Rc<dyn DynSimOnlyValueTrait>> {
|
||||
Ok(Rc::<T>::new(serde_json::from_str(json_str)?))
|
||||
}
|
||||
}
|
||||
|
||||
/// Safety:
|
||||
/// * `type_id_dyn()` must return `TypeId::of::<Self>()`.
|
||||
/// * `ty().type_id()` must return `TypeId::of::<Self>()`.
|
||||
unsafe trait DynSimOnlyValueTrait: 'static + fmt::Debug {
|
||||
fn type_id_dyn(&self) -> TypeId;
|
||||
fn ty(&self) -> DynSimOnly;
|
||||
fn eq_dyn(&self, other: &dyn DynSimOnlyValueTrait) -> bool;
|
||||
fn serialize_to_json_string(&self) -> serde_json::Result<String>;
|
||||
fn hash_dyn(&self, state: &mut dyn Hasher);
|
||||
}
|
||||
|
||||
impl dyn DynSimOnlyValueTrait {
|
||||
fn is<T: SimOnlyValueTrait>(&self) -> bool {
|
||||
Self::type_id_dyn(self) == TypeId::of::<T>()
|
||||
}
|
||||
|
||||
fn downcast_ref<T: SimOnlyValueTrait>(&self) -> Option<&T> {
|
||||
if Self::is::<T>(self) {
|
||||
// Safety: checked that `Self` is really `T`
|
||||
Some(unsafe { &*(self as *const Self as *const T) })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn downcast_rc<T: SimOnlyValueTrait>(self: Rc<Self>) -> Result<Rc<T>, Rc<Self>> {
|
||||
if Self::is::<T>(&*self) {
|
||||
// Safety: checked that `Self` is really `T`
|
||||
Ok(unsafe { Rc::from_raw(Rc::into_raw(self) as *const T) })
|
||||
} else {
|
||||
Err(self)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Safety:
|
||||
/// * `type_id_dyn()` returns `TypeId::of::<Self>()`.
|
||||
/// * `ty().type_id()` returns `TypeId::of::<Self>()`.
|
||||
unsafe impl<T: SimOnlyValueTrait> DynSimOnlyValueTrait for T {
|
||||
fn type_id_dyn(&self) -> TypeId {
|
||||
TypeId::of::<T>()
|
||||
}
|
||||
|
||||
fn ty(&self) -> DynSimOnly {
|
||||
DynSimOnly::of::<T>()
|
||||
}
|
||||
|
||||
fn eq_dyn(&self, other: &dyn DynSimOnlyValueTrait) -> bool {
|
||||
other.downcast_ref::<T>().is_some_and(|other| self == other)
|
||||
}
|
||||
|
||||
fn serialize_to_json_string(&self) -> serde_json::Result<String> {
|
||||
serde_json::to_string(self)
|
||||
}
|
||||
|
||||
fn hash_dyn(&self, mut state: &mut dyn Hasher) {
|
||||
self.hash(&mut state);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct DynSimOnly {
|
||||
ty: &'static dyn DynSimOnlyTrait,
|
||||
}
|
||||
|
||||
impl DynSimOnly {
|
||||
pub const fn of<T: SimOnlyValueTrait>() -> Self {
|
||||
Self {
|
||||
ty: &const { SimOnly::<T>::new() },
|
||||
}
|
||||
}
|
||||
pub fn type_id(self) -> TypeId {
|
||||
self.ty.type_id_dyn()
|
||||
}
|
||||
pub fn type_name(self) -> &'static str {
|
||||
self.ty.type_name()
|
||||
}
|
||||
pub fn is<T: SimOnlyValueTrait>(self) -> bool {
|
||||
self.type_id() == TypeId::of::<T>()
|
||||
}
|
||||
pub fn downcast<T: SimOnlyValueTrait>(self) -> Option<SimOnly<T>> {
|
||||
self.is::<T>().then_some(SimOnly::default())
|
||||
}
|
||||
pub fn deserialize_from_json_string(
|
||||
self,
|
||||
json_str: &str,
|
||||
) -> serde_json::Result<DynSimOnlyValue> {
|
||||
self.ty
|
||||
.deserialize_from_json_string(json_str)
|
||||
.map(DynSimOnlyValue)
|
||||
}
|
||||
pub fn default_value(self) -> DynSimOnlyValue {
|
||||
DynSimOnlyValue(self.ty.default_value())
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for DynSimOnly {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
Self::type_id(*self) == Self::type_id(*other)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for DynSimOnly {}
|
||||
|
||||
impl Hash for DynSimOnly {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
Self::type_id(*self).hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for DynSimOnly {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "SimOnly<{}>", self.ty.type_name())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SimOnlyValueTrait> From<SimOnly<T>> for DynSimOnly {
|
||||
fn from(value: SimOnly<T>) -> Self {
|
||||
let SimOnly(PhantomData) = value;
|
||||
Self::of::<T>()
|
||||
}
|
||||
}
|
||||
|
||||
/// the [`Type`][Type] for a value that can only be used in a Fayalite simulation, it can't be converted to FIRRTL
|
||||
///
|
||||
/// [Type]: crate::ty::Type
|
||||
#[derive(Clone, Eq, PartialEq, Hash)]
|
||||
pub struct SimOnly<T: SimOnlyValueTrait>(PhantomData<fn(T) -> T>);
|
||||
|
||||
impl<T: SimOnlyValueTrait> fmt::Debug for SimOnly<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
DynSimOnly::of::<T>().fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SimOnlyValueTrait> SimOnly<T> {
|
||||
pub const fn new() -> Self {
|
||||
Self(PhantomData)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SimOnlyValueTrait> Copy for SimOnly<T> {}
|
||||
|
||||
impl<T: SimOnlyValueTrait> Default for SimOnly<T> {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
/// a value that can only be used in a Fayalite simulation, it can't be converted to FIRRTL
|
||||
#[derive(Clone, Eq, PartialEq, Hash, Default, PartialOrd, Ord)]
|
||||
pub struct SimOnlyValue<T: SimOnlyValueTrait>(Rc<T>);
|
||||
|
||||
impl<T: SimOnlyValueTrait> SimOnlyValue<T> {
|
||||
pub fn with_dyn_ref<F: FnOnce(&DynSimOnlyValue) -> R, R>(&self, f: F) -> R {
|
||||
// Safety: creating a copied `Rc<T>` is safe as long as the copy isn't dropped and isn't changed
|
||||
// to point somewhere else, `f` can't change `dyn_ref` because it's only given a shared reference.
|
||||
let dyn_ref =
|
||||
unsafe { ManuallyDrop::new(DynSimOnlyValue(Rc::<T>::from_raw(Rc::as_ptr(&self.0)))) };
|
||||
f(&dyn_ref)
|
||||
}
|
||||
pub fn from_rc(v: Rc<T>) -> Self {
|
||||
Self(v)
|
||||
}
|
||||
pub fn new(v: T) -> Self {
|
||||
Self(Rc::new(v))
|
||||
}
|
||||
pub fn into_inner(this: Self) -> Rc<T> {
|
||||
this.0
|
||||
}
|
||||
pub fn inner_mut(this: &mut Self) -> &mut Rc<T> {
|
||||
&mut this.0
|
||||
}
|
||||
pub fn inner(this: &Self) -> &Rc<T> {
|
||||
&this.0
|
||||
}
|
||||
pub fn into_dyn(this: Self) -> DynSimOnlyValue {
|
||||
DynSimOnlyValue::from(this)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SimOnlyValueTrait> std::ops::Deref for SimOnlyValue<T> {
|
||||
type Target = T;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SimOnlyValueTrait> std::ops::DerefMut for SimOnlyValue<T> {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
Rc::make_mut(&mut self.0)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DynSimOnlyValue(Rc<dyn DynSimOnlyValueTrait>);
|
||||
|
||||
impl fmt::Debug for DynSimOnlyValue {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
<dyn DynSimOnlyValueTrait as fmt::Debug>::fmt(&*self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for DynSimOnlyValue {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
DynSimOnlyValueTrait::eq_dyn(&*self.0, &*other.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for DynSimOnlyValue {}
|
||||
|
||||
impl Hash for DynSimOnlyValue {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
DynSimOnlyValueTrait::hash_dyn(&*self.0, state);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SimOnlyValueTrait> From<SimOnlyValue<T>> for DynSimOnlyValue {
|
||||
fn from(value: SimOnlyValue<T>) -> Self {
|
||||
Self(value.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl DynSimOnlyValue {
|
||||
pub fn ty(&self) -> DynSimOnly {
|
||||
self.0.ty()
|
||||
}
|
||||
pub fn type_id(&self) -> TypeId {
|
||||
self.0.type_id_dyn()
|
||||
}
|
||||
pub fn is<T: SimOnlyValueTrait>(&self) -> bool {
|
||||
self.0.is::<T>()
|
||||
}
|
||||
pub fn downcast<T: SimOnlyValueTrait>(self) -> Result<SimOnlyValue<T>, DynSimOnlyValue> {
|
||||
match <dyn DynSimOnlyValueTrait>::downcast_rc(self.0) {
|
||||
Ok(v) => Ok(SimOnlyValue(v)),
|
||||
Err(v) => Err(Self(v)),
|
||||
}
|
||||
}
|
||||
pub fn downcast_ref<T: SimOnlyValueTrait>(&self) -> Option<&T> {
|
||||
<dyn DynSimOnlyValueTrait>::downcast_ref(&*self.0)
|
||||
}
|
||||
pub fn serialize_to_json_string(&self) -> serde_json::Result<String> {
|
||||
self.0.serialize_to_json_string()
|
||||
}
|
||||
}
|
||||
1162
crates/fayalite/src/sim/vcd.rs
Normal file
1162
crates/fayalite/src/sim/vcd.rs
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -2,9 +2,8 @@
|
|||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
intern::{Intern, Interned},
|
||||
util::DebugAsDisplay,
|
||||
util::{DebugAsDisplay, HashMap},
|
||||
};
|
||||
use hashbrown::HashMap;
|
||||
use std::{cell::RefCell, fmt, num::NonZeroUsize, panic, path::Path};
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
|
|
@ -97,7 +96,7 @@ impl NormalizeFilesForTestsState {
|
|||
fn new() -> Self {
|
||||
Self {
|
||||
test_position: panic::Location::caller(),
|
||||
file_pattern_matches: HashMap::new(),
|
||||
file_pattern_matches: HashMap::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -143,7 +142,7 @@ impl From<&'_ panic::Location<'_>> for SourceLocation {
|
|||
map.entry_ref(file)
|
||||
.or_insert_with(|| NormalizedFileForTestState {
|
||||
file_name_id: NonZeroUsize::new(len + 1).unwrap(),
|
||||
positions_map: HashMap::new(),
|
||||
positions_map: HashMap::default(),
|
||||
});
|
||||
file_str = m.generate_file_name(file_state.file_name_id);
|
||||
file = &file_str;
|
||||
|
|
|
|||
|
|
@ -1,25 +1,54 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
cli::{FormalArgs, FormalMode, FormalOutput, RunPhase},
|
||||
build::{
|
||||
BaseJobArgs, BaseJobKind, GlobalParams, JobArgsAndDependencies, JobKindAndArgs, JobParams,
|
||||
NoArgs, RunBuild,
|
||||
external::{ExternalCommandArgs, ExternalCommandJobKind},
|
||||
firrtl::{FirrtlArgs, FirrtlJobKind},
|
||||
formal::{Formal, FormalAdditionalArgs, FormalArgs, WriteSbyFileJobKind},
|
||||
verilog::{UnadjustedVerilogArgs, VerilogJobArgs, VerilogJobKind},
|
||||
},
|
||||
bundle::BundleType,
|
||||
firrtl::ExportOptions,
|
||||
module::Module,
|
||||
util::HashMap,
|
||||
};
|
||||
use clap::Parser;
|
||||
use hashbrown::HashMap;
|
||||
use serde::Deserialize;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
fmt::Write,
|
||||
fmt::{self, Write},
|
||||
path::{Path, PathBuf},
|
||||
process::Command,
|
||||
sync::{Mutex, OnceLock},
|
||||
};
|
||||
|
||||
fn assert_formal_helper() -> FormalArgs {
|
||||
static FORMAL_ARGS: OnceLock<FormalArgs> = OnceLock::new();
|
||||
// ensure we only run parsing once, so errors from env vars don't produce overlapping output if we're called on multiple threads
|
||||
FORMAL_ARGS
|
||||
.get_or_init(|| FormalArgs::parse_from(["fayalite::testing::assert_formal"]))
|
||||
.clone()
|
||||
#[derive(
|
||||
clap::ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash, Default, Deserialize, Serialize,
|
||||
)]
|
||||
#[non_exhaustive]
|
||||
pub enum FormalMode {
|
||||
#[default]
|
||||
BMC,
|
||||
Prove,
|
||||
Live,
|
||||
Cover,
|
||||
}
|
||||
|
||||
impl FormalMode {
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
FormalMode::BMC => "bmc",
|
||||
FormalMode::Prove => "prove",
|
||||
FormalMode::Live => "live",
|
||||
FormalMode::Cover => "cover",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for FormalMode {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
|
|
@ -87,7 +116,7 @@ fn get_assert_formal_target_path(test_name: &dyn std::fmt::Display) -> PathBuf {
|
|||
let index = *DIRS
|
||||
.lock()
|
||||
.unwrap()
|
||||
.get_or_insert_with(HashMap::new)
|
||||
.get_or_insert_with(HashMap::default)
|
||||
.entry_ref(&dir)
|
||||
.and_modify(|v| *v += 1)
|
||||
.or_insert(0);
|
||||
|
|
@ -97,26 +126,99 @@ fn get_assert_formal_target_path(test_name: &dyn std::fmt::Display) -> PathBuf {
|
|||
.join(dir)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn assert_formal<M>(
|
||||
test_name: impl std::fmt::Display,
|
||||
module: M,
|
||||
mode: FormalMode,
|
||||
depth: u64,
|
||||
fn make_assert_formal_args(
|
||||
test_name: &dyn std::fmt::Display,
|
||||
formal_mode: FormalMode,
|
||||
formal_depth: u64,
|
||||
solver: Option<&str>,
|
||||
export_options: ExportOptions,
|
||||
) where
|
||||
FormalArgs: RunPhase<M, Output = FormalOutput>,
|
||||
{
|
||||
let mut args = assert_formal_helper();
|
||||
args.verilog.firrtl.base.redirect_output_for_rust_test = true;
|
||||
args.verilog.firrtl.base.output = Some(get_assert_formal_target_path(&test_name));
|
||||
args.verilog.firrtl.export_options = export_options;
|
||||
args.verilog.debug = true;
|
||||
args.mode = mode;
|
||||
args.depth = depth;
|
||||
if let Some(solver) = solver {
|
||||
args.solver = solver.into();
|
||||
}
|
||||
args.run(module).expect("testing::assert_formal() failed");
|
||||
) -> eyre::Result<JobArgsAndDependencies<ExternalCommandJobKind<Formal>>> {
|
||||
let args = JobKindAndArgs {
|
||||
kind: BaseJobKind,
|
||||
args: BaseJobArgs::from_output_dir_and_env(get_assert_formal_target_path(&test_name), None),
|
||||
};
|
||||
let dependencies = JobArgsAndDependencies {
|
||||
args,
|
||||
dependencies: (),
|
||||
};
|
||||
let args = JobKindAndArgs {
|
||||
kind: FirrtlJobKind,
|
||||
args: FirrtlArgs { export_options },
|
||||
};
|
||||
let dependencies = JobArgsAndDependencies { args, dependencies };
|
||||
let args = JobKindAndArgs {
|
||||
kind: ExternalCommandJobKind::new(),
|
||||
args: ExternalCommandArgs::resolve_program_path(
|
||||
None,
|
||||
UnadjustedVerilogArgs {
|
||||
firtool_extra_args: vec![],
|
||||
verilog_dialect: None,
|
||||
verilog_debug: true,
|
||||
},
|
||||
)?,
|
||||
};
|
||||
let dependencies = JobArgsAndDependencies { args, dependencies };
|
||||
let args = JobKindAndArgs {
|
||||
kind: VerilogJobKind,
|
||||
args: VerilogJobArgs {},
|
||||
};
|
||||
let dependencies = JobArgsAndDependencies { args, dependencies };
|
||||
let args = JobKindAndArgs {
|
||||
kind: WriteSbyFileJobKind,
|
||||
args: FormalArgs {
|
||||
sby_extra_args: vec![],
|
||||
formal_mode,
|
||||
formal_depth,
|
||||
formal_solver: solver.unwrap_or(FormalArgs::DEFAULT_SOLVER).into(),
|
||||
smtbmc_extra_args: vec![],
|
||||
},
|
||||
};
|
||||
let dependencies = JobArgsAndDependencies { args, dependencies };
|
||||
let args = JobKindAndArgs {
|
||||
kind: ExternalCommandJobKind::new(),
|
||||
args: ExternalCommandArgs::resolve_program_path(None, FormalAdditionalArgs {})?,
|
||||
};
|
||||
Ok(JobArgsAndDependencies { args, dependencies })
|
||||
}
|
||||
|
||||
pub fn try_assert_formal<M: AsRef<Module<T>>, T: BundleType>(
|
||||
test_name: impl std::fmt::Display,
|
||||
module: M,
|
||||
formal_mode: FormalMode,
|
||||
formal_depth: u64,
|
||||
solver: Option<&str>,
|
||||
export_options: ExportOptions,
|
||||
) -> eyre::Result<()> {
|
||||
const APP_NAME: &'static str = "fayalite::testing::assert_formal";
|
||||
make_assert_formal_args(
|
||||
&test_name,
|
||||
formal_mode,
|
||||
formal_depth,
|
||||
solver,
|
||||
export_options,
|
||||
)?
|
||||
.run_without_platform(
|
||||
|NoArgs {}| Ok(JobParams::new(module)),
|
||||
&GlobalParams::new(None, APP_NAME),
|
||||
)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn assert_formal<M: AsRef<Module<T>>, T: BundleType>(
|
||||
test_name: impl std::fmt::Display,
|
||||
module: M,
|
||||
formal_mode: FormalMode,
|
||||
formal_depth: u64,
|
||||
solver: Option<&str>,
|
||||
export_options: ExportOptions,
|
||||
) {
|
||||
try_assert_formal(
|
||||
test_name,
|
||||
module,
|
||||
formal_mode,
|
||||
formal_depth,
|
||||
solver,
|
||||
export_options,
|
||||
)
|
||||
.expect("testing::assert_formal() failed");
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,12 +7,27 @@ use crate::{
|
|||
clock::Clock,
|
||||
enum_::Enum,
|
||||
expr::Expr,
|
||||
int::{Bool, SInt, UInt},
|
||||
int::{Bool, SInt, UInt, UIntValue},
|
||||
intern::{Intern, Interned},
|
||||
phantom_const::PhantomConst,
|
||||
reset::{AsyncReset, Reset, SyncReset},
|
||||
sim::value::{DynSimOnlyValue, DynSimOnly, SimValue, ToSimValueWithType},
|
||||
source_location::SourceLocation,
|
||||
util::{ConstUsize, slice_range, try_slice_range},
|
||||
};
|
||||
use std::{fmt, hash::Hash, iter::FusedIterator, ops::Index};
|
||||
use bitvec::{slice::BitSlice, vec::BitVec};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer, de::DeserializeOwned};
|
||||
use std::{
|
||||
fmt,
|
||||
hash::Hash,
|
||||
iter::{FusedIterator, Sum},
|
||||
marker::PhantomData,
|
||||
mem,
|
||||
ops::{Add, AddAssign, Bound, Index, Mul, MulAssign, Range, Sub, SubAssign},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
pub(crate) mod serde_impls;
|
||||
|
||||
#[derive(Copy, Clone, Hash, PartialEq, Eq, Debug)]
|
||||
#[non_exhaustive]
|
||||
|
|
@ -21,6 +36,23 @@ pub struct TypeProperties {
|
|||
pub is_storable: bool,
|
||||
pub is_castable_from_bits: bool,
|
||||
pub bit_width: usize,
|
||||
pub sim_only_values_len: usize,
|
||||
}
|
||||
|
||||
impl TypeProperties {
|
||||
pub const fn size(self) -> OpaqueSimValueSize {
|
||||
let Self {
|
||||
is_passive: _,
|
||||
is_storable: _,
|
||||
is_castable_from_bits: _,
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
} = self;
|
||||
OpaqueSimValueSize {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Hash, PartialEq, Eq)]
|
||||
|
|
@ -35,6 +67,8 @@ pub enum CanonicalType {
|
|||
SyncReset(SyncReset),
|
||||
Reset(Reset),
|
||||
Clock(Clock),
|
||||
PhantomConst(PhantomConst),
|
||||
DynSimOnly(DynSimOnly),
|
||||
}
|
||||
|
||||
impl fmt::Debug for CanonicalType {
|
||||
|
|
@ -50,10 +84,30 @@ impl fmt::Debug for CanonicalType {
|
|||
Self::SyncReset(v) => v.fmt(f),
|
||||
Self::Reset(v) => v.fmt(f),
|
||||
Self::Clock(v) => v.fmt(f),
|
||||
Self::PhantomConst(v) => v.fmt(f),
|
||||
Self::DynSimOnly(v) => v.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for CanonicalType {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
serde_impls::SerdeCanonicalType::from(*self).serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for CanonicalType {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
Ok(serde_impls::SerdeCanonicalType::deserialize(deserializer)?.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl CanonicalType {
|
||||
pub fn type_properties(self) -> TypeProperties {
|
||||
match self {
|
||||
|
|
@ -67,6 +121,8 @@ impl CanonicalType {
|
|||
CanonicalType::SyncReset(v) => v.type_properties(),
|
||||
CanonicalType::Reset(v) => v.type_properties(),
|
||||
CanonicalType::Clock(v) => v.type_properties(),
|
||||
CanonicalType::PhantomConst(v) => v.type_properties(),
|
||||
CanonicalType::DynSimOnly(v) => v.type_properties(),
|
||||
}
|
||||
}
|
||||
pub fn is_passive(self) -> bool {
|
||||
|
|
@ -81,6 +137,12 @@ impl CanonicalType {
|
|||
pub fn bit_width(self) -> usize {
|
||||
self.type_properties().bit_width
|
||||
}
|
||||
pub fn sim_only_values_len(self) -> usize {
|
||||
self.type_properties().sim_only_values_len
|
||||
}
|
||||
pub fn size(self) -> OpaqueSimValueSize {
|
||||
self.type_properties().size()
|
||||
}
|
||||
pub fn can_connect(self, rhs: Self) -> bool {
|
||||
match self {
|
||||
CanonicalType::UInt(lhs) => {
|
||||
|
|
@ -143,8 +205,23 @@ impl CanonicalType {
|
|||
};
|
||||
lhs.can_connect(rhs)
|
||||
}
|
||||
CanonicalType::PhantomConst(lhs) => {
|
||||
let CanonicalType::PhantomConst(rhs) = rhs else {
|
||||
return false;
|
||||
};
|
||||
lhs.can_connect(rhs)
|
||||
}
|
||||
CanonicalType::DynSimOnly(lhs) => {
|
||||
let CanonicalType::DynSimOnly(rhs) = rhs else {
|
||||
return false;
|
||||
};
|
||||
lhs.can_connect(rhs)
|
||||
}
|
||||
}
|
||||
}
|
||||
pub(crate) fn as_serde_unexpected_str(self) -> &'static str {
|
||||
serde_impls::SerdeCanonicalType::from(self).as_serde_unexpected_str()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait MatchVariantAndInactiveScope: Sized {
|
||||
|
|
@ -166,7 +243,7 @@ impl<T: 'static + Send + Sync> MatchVariantAndInactiveScope for MatchVariantWith
|
|||
}
|
||||
|
||||
pub trait FillInDefaultedGenerics {
|
||||
type Type: Type;
|
||||
type Type;
|
||||
fn fill_in_defaulted_generics(self) -> Self::Type;
|
||||
}
|
||||
|
||||
|
|
@ -178,6 +255,22 @@ impl<T: Type> FillInDefaultedGenerics for T {
|
|||
}
|
||||
}
|
||||
|
||||
impl FillInDefaultedGenerics for usize {
|
||||
type Type = usize;
|
||||
|
||||
fn fill_in_defaulted_generics(self) -> Self::Type {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<const V: usize> FillInDefaultedGenerics for ConstUsize<V> {
|
||||
type Type = ConstUsize<V>;
|
||||
|
||||
fn fill_in_defaulted_generics(self) -> Self::Type {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
mod sealed {
|
||||
pub trait TypeOrDefaultSealed {}
|
||||
pub trait BaseTypeSealed {}
|
||||
|
|
@ -195,6 +288,34 @@ macro_rules! impl_base_type {
|
|||
};
|
||||
}
|
||||
|
||||
macro_rules! impl_base_type_serde {
|
||||
($name:ident, $expected:literal) => {
|
||||
impl Serialize for $name {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
self.canonical().serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for $name {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
match CanonicalType::deserialize(deserializer)? {
|
||||
CanonicalType::$name(retval) => Ok(retval),
|
||||
ty => Err(serde::de::Error::invalid_value(
|
||||
serde::de::Unexpected::Other(ty.as_serde_unexpected_str()),
|
||||
&$expected,
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_base_type!(UInt);
|
||||
impl_base_type!(SInt);
|
||||
impl_base_type!(Bool);
|
||||
|
|
@ -205,6 +326,16 @@ impl_base_type!(AsyncReset);
|
|||
impl_base_type!(SyncReset);
|
||||
impl_base_type!(Reset);
|
||||
impl_base_type!(Clock);
|
||||
impl_base_type!(PhantomConst);
|
||||
impl_base_type!(DynSimOnly);
|
||||
|
||||
impl_base_type_serde!(Bool, "a Bool");
|
||||
impl_base_type_serde!(Enum, "an Enum");
|
||||
impl_base_type_serde!(Bundle, "a Bundle");
|
||||
impl_base_type_serde!(AsyncReset, "an AsyncReset");
|
||||
impl_base_type_serde!(SyncReset, "a SyncReset");
|
||||
impl_base_type_serde!(Reset, "a Reset");
|
||||
impl_base_type_serde!(Clock, "a Clock");
|
||||
|
||||
impl sealed::BaseTypeSealed for CanonicalType {}
|
||||
|
||||
|
|
@ -240,26 +371,48 @@ pub trait Type:
|
|||
{
|
||||
type BaseType: BaseType;
|
||||
type MaskType: Type<MaskType = Self::MaskType>;
|
||||
type SimValue: fmt::Debug + Clone + 'static + ToSimValueWithType<Self>;
|
||||
type MatchVariant: 'static + Send + Sync;
|
||||
type MatchActiveScope;
|
||||
type MatchVariantAndInactiveScope: MatchVariantAndInactiveScope<
|
||||
MatchVariant = Self::MatchVariant,
|
||||
MatchActiveScope = Self::MatchActiveScope,
|
||||
>;
|
||||
MatchVariant = Self::MatchVariant,
|
||||
MatchActiveScope = Self::MatchActiveScope,
|
||||
>;
|
||||
type MatchVariantsIter: Iterator<Item = Self::MatchVariantAndInactiveScope>
|
||||
+ ExactSizeIterator
|
||||
+ FusedIterator
|
||||
+ DoubleEndedIterator;
|
||||
#[track_caller]
|
||||
fn match_variants(this: Expr<Self>, source_location: SourceLocation)
|
||||
-> Self::MatchVariantsIter;
|
||||
-> Self::MatchVariantsIter;
|
||||
fn mask_type(&self) -> Self::MaskType;
|
||||
fn canonical(&self) -> CanonicalType;
|
||||
fn from_canonical(canonical_type: CanonicalType) -> Self;
|
||||
fn source_location() -> SourceLocation;
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue;
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
);
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w>;
|
||||
}
|
||||
|
||||
pub trait BaseType: Type<BaseType = Self> + sealed::BaseTypeSealed + Into<CanonicalType> {}
|
||||
pub trait BaseType:
|
||||
Type<
|
||||
BaseType = Self,
|
||||
MaskType: Serialize + DeserializeOwned,
|
||||
SimValue: Serialize + DeserializeOwned,
|
||||
> + sealed::BaseTypeSealed
|
||||
+ Into<CanonicalType>
|
||||
+ Serialize
|
||||
+ DeserializeOwned
|
||||
{
|
||||
}
|
||||
|
||||
macro_rules! impl_match_variant_as_self {
|
||||
() => {
|
||||
|
|
@ -286,6 +439,7 @@ pub trait TypeWithDeref: Type {
|
|||
impl Type for CanonicalType {
|
||||
type BaseType = CanonicalType;
|
||||
type MaskType = CanonicalType;
|
||||
type SimValue = OpaqueSimValue;
|
||||
impl_match_variant_as_self!();
|
||||
fn mask_type(&self) -> Self::MaskType {
|
||||
match self {
|
||||
|
|
@ -299,6 +453,8 @@ impl Type for CanonicalType {
|
|||
CanonicalType::SyncReset(v) => v.mask_type().canonical(),
|
||||
CanonicalType::Reset(v) => v.mask_type().canonical(),
|
||||
CanonicalType::Clock(v) => v.mask_type().canonical(),
|
||||
CanonicalType::PhantomConst(v) => v.mask_type().canonical(),
|
||||
CanonicalType::DynSimOnly(v) => v.mask_type().canonical(),
|
||||
}
|
||||
}
|
||||
fn canonical(&self) -> CanonicalType {
|
||||
|
|
@ -310,9 +466,636 @@ impl Type for CanonicalType {
|
|||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
assert_eq!(self.type_properties().size(), opaque.size());
|
||||
opaque.to_owned()
|
||||
}
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
assert_eq!(self.type_properties().size(), opaque.size());
|
||||
assert_eq!(value.size(), opaque.size());
|
||||
value.clone_from_slice(opaque);
|
||||
}
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
assert_eq!(self.type_properties().size(), writer.size());
|
||||
assert_eq!(value.size(), writer.size());
|
||||
writer.fill_cloned_from_slice(value.as_slice())
|
||||
}
|
||||
}
|
||||
|
||||
pub trait StaticType: Type {
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize, Default)]
|
||||
#[non_exhaustive]
|
||||
pub struct OpaqueSimValueSizeRange {
|
||||
pub bit_width: Range<usize>,
|
||||
pub sim_only_values_len: Range<usize>,
|
||||
}
|
||||
|
||||
impl OpaqueSimValueSizeRange {
|
||||
pub fn start(&self) -> OpaqueSimValueSize {
|
||||
OpaqueSimValueSize {
|
||||
bit_width: self.bit_width.start,
|
||||
sim_only_values_len: self.sim_only_values_len.start,
|
||||
}
|
||||
}
|
||||
pub fn end(&self) -> OpaqueSimValueSize {
|
||||
OpaqueSimValueSize {
|
||||
bit_width: self.bit_width.end,
|
||||
sim_only_values_len: self.sim_only_values_len.end,
|
||||
}
|
||||
}
|
||||
pub fn is_empty(&self) -> bool {
|
||||
let Self {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
} = self;
|
||||
bit_width.is_empty() && sim_only_values_len.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Range<OpaqueSimValueSize>> for OpaqueSimValueSizeRange {
|
||||
fn from(value: Range<OpaqueSimValueSize>) -> Self {
|
||||
Self {
|
||||
bit_width: value.start.bit_width..value.end.bit_width,
|
||||
sim_only_values_len: value.start.sim_only_values_len..value.end.sim_only_values_len,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<OpaqueSimValueSizeRange> for Range<OpaqueSimValueSize> {
|
||||
fn from(value: OpaqueSimValueSizeRange) -> Self {
|
||||
value.start()..value.end()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait OpaqueSimValueSizeRangeBounds {
|
||||
fn start_bound(&self) -> Bound<OpaqueSimValueSize>;
|
||||
fn end_bound(&self) -> Bound<OpaqueSimValueSize>;
|
||||
}
|
||||
|
||||
impl OpaqueSimValueSizeRangeBounds for OpaqueSimValueSizeRange {
|
||||
fn start_bound(&self) -> Bound<OpaqueSimValueSize> {
|
||||
Bound::Included(self.start())
|
||||
}
|
||||
|
||||
fn end_bound(&self) -> Bound<OpaqueSimValueSize> {
|
||||
Bound::Excluded(self.end())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + std::ops::RangeBounds<OpaqueSimValueSize>> OpaqueSimValueSizeRangeBounds for T {
|
||||
fn start_bound(&self) -> Bound<OpaqueSimValueSize> {
|
||||
std::ops::RangeBounds::start_bound(self).cloned()
|
||||
}
|
||||
fn end_bound(&self) -> Bound<OpaqueSimValueSize> {
|
||||
std::ops::RangeBounds::end_bound(self).cloned()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize, Default)]
|
||||
#[non_exhaustive]
|
||||
pub struct OpaqueSimValueSize {
|
||||
pub bit_width: usize,
|
||||
pub sim_only_values_len: usize,
|
||||
}
|
||||
|
||||
impl OpaqueSimValueSize {
|
||||
pub const fn from_bit_width(bit_width: usize) -> Self {
|
||||
Self::from_bit_width_and_sim_only_values_len(bit_width, 0)
|
||||
}
|
||||
pub const fn from_bit_width_and_sim_only_values_len(
|
||||
bit_width: usize,
|
||||
sim_only_values_len: usize,
|
||||
) -> Self {
|
||||
Self {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
}
|
||||
}
|
||||
pub const fn only_bit_width(self) -> Option<usize> {
|
||||
if let Self {
|
||||
bit_width,
|
||||
sim_only_values_len: 0,
|
||||
} = self
|
||||
{
|
||||
Some(bit_width)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
pub const fn empty() -> Self {
|
||||
Self {
|
||||
bit_width: 0,
|
||||
sim_only_values_len: 0,
|
||||
}
|
||||
}
|
||||
pub const fn is_empty(self) -> bool {
|
||||
let Self {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
} = self;
|
||||
bit_width == 0 && sim_only_values_len == 0
|
||||
}
|
||||
pub const fn checked_mul(self, factor: usize) -> Option<Self> {
|
||||
let Some(bit_width) = self.bit_width.checked_mul(factor) else {
|
||||
return None;
|
||||
};
|
||||
let Some(sim_only_values_len) = self.sim_only_values_len.checked_mul(factor) else {
|
||||
return None;
|
||||
};
|
||||
Some(Self {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
})
|
||||
}
|
||||
pub const fn checked_add(self, rhs: Self) -> Option<Self> {
|
||||
let Some(bit_width) = self.bit_width.checked_add(rhs.bit_width) else {
|
||||
return None;
|
||||
};
|
||||
let Some(sim_only_values_len) = self
|
||||
.sim_only_values_len
|
||||
.checked_add(rhs.sim_only_values_len)
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
Some(Self {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
})
|
||||
}
|
||||
pub const fn checked_sub(self, rhs: Self) -> Option<Self> {
|
||||
let Some(bit_width) = self.bit_width.checked_sub(rhs.bit_width) else {
|
||||
return None;
|
||||
};
|
||||
let Some(sim_only_values_len) = self
|
||||
.sim_only_values_len
|
||||
.checked_sub(rhs.sim_only_values_len)
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
Some(Self {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
})
|
||||
}
|
||||
pub fn try_slice_range<R: OpaqueSimValueSizeRangeBounds>(
|
||||
self,
|
||||
range: R,
|
||||
) -> Option<OpaqueSimValueSizeRange> {
|
||||
let start = range.start_bound();
|
||||
let end = range.end_bound();
|
||||
let bit_width = try_slice_range(
|
||||
(start.map(|v| v.bit_width), end.map(|v| v.bit_width)),
|
||||
self.bit_width,
|
||||
)?;
|
||||
let sim_only_values_len = try_slice_range(
|
||||
(
|
||||
start.map(|v| v.sim_only_values_len),
|
||||
end.map(|v| v.sim_only_values_len),
|
||||
),
|
||||
self.sim_only_values_len,
|
||||
)?;
|
||||
Some(OpaqueSimValueSizeRange {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
})
|
||||
}
|
||||
pub fn slice_range<R: OpaqueSimValueSizeRangeBounds>(
|
||||
self,
|
||||
range: R,
|
||||
) -> OpaqueSimValueSizeRange {
|
||||
self.try_slice_range(range).expect("range out of bounds")
|
||||
}
|
||||
}
|
||||
|
||||
impl Mul<usize> for OpaqueSimValueSize {
|
||||
type Output = OpaqueSimValueSize;
|
||||
|
||||
fn mul(self, rhs: usize) -> Self::Output {
|
||||
self.checked_mul(rhs).expect("multiplication overflowed")
|
||||
}
|
||||
}
|
||||
|
||||
impl Mul<OpaqueSimValueSize> for usize {
|
||||
type Output = OpaqueSimValueSize;
|
||||
|
||||
fn mul(self, rhs: OpaqueSimValueSize) -> Self::Output {
|
||||
rhs.checked_mul(self).expect("multiplication overflowed")
|
||||
}
|
||||
}
|
||||
|
||||
impl Add for OpaqueSimValueSize {
|
||||
type Output = OpaqueSimValueSize;
|
||||
|
||||
fn add(self, rhs: OpaqueSimValueSize) -> Self::Output {
|
||||
rhs.checked_add(self).expect("addition overflowed")
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub for OpaqueSimValueSize {
|
||||
type Output = OpaqueSimValueSize;
|
||||
|
||||
fn sub(self, rhs: OpaqueSimValueSize) -> Self::Output {
|
||||
rhs.checked_sub(self).expect("subtraction underflowed")
|
||||
}
|
||||
}
|
||||
|
||||
impl MulAssign<usize> for OpaqueSimValueSize {
|
||||
fn mul_assign(&mut self, rhs: usize) {
|
||||
*self = *self * rhs;
|
||||
}
|
||||
}
|
||||
|
||||
impl AddAssign for OpaqueSimValueSize {
|
||||
fn add_assign(&mut self, rhs: OpaqueSimValueSize) {
|
||||
*self = *self + rhs;
|
||||
}
|
||||
}
|
||||
|
||||
impl SubAssign for OpaqueSimValueSize {
|
||||
fn sub_assign(&mut self, rhs: OpaqueSimValueSize) {
|
||||
*self = *self - rhs;
|
||||
}
|
||||
}
|
||||
|
||||
impl Sum for OpaqueSimValueSize {
|
||||
fn sum<I: Iterator<Item = Self>>(iter: I) -> Self {
|
||||
iter.fold(OpaqueSimValueSize::empty(), Add::add)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)]
|
||||
pub struct OpaqueSimValue {
|
||||
bits: UIntValue,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
sim_only_values: Vec<DynSimOnlyValue>,
|
||||
}
|
||||
|
||||
impl OpaqueSimValue {
|
||||
pub fn empty() -> Self {
|
||||
Self {
|
||||
bits: UIntValue::new(Default::default()),
|
||||
sim_only_values: Vec::new(),
|
||||
}
|
||||
}
|
||||
pub fn with_capacity(capacity: OpaqueSimValueSize) -> Self {
|
||||
Self {
|
||||
bits: UIntValue::new(Arc::new(BitVec::with_capacity(capacity.bit_width))),
|
||||
sim_only_values: Vec::with_capacity(capacity.sim_only_values_len),
|
||||
}
|
||||
}
|
||||
pub fn size(&self) -> OpaqueSimValueSize {
|
||||
OpaqueSimValueSize {
|
||||
bit_width: self.bits.width(),
|
||||
sim_only_values_len: self.sim_only_values.len(),
|
||||
}
|
||||
}
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.size().is_empty()
|
||||
}
|
||||
pub fn bit_width(&self) -> usize {
|
||||
self.bits.width()
|
||||
}
|
||||
pub fn bits(&self) -> &UIntValue {
|
||||
&self.bits
|
||||
}
|
||||
pub fn bits_mut(&mut self) -> &mut UIntValue {
|
||||
&mut self.bits
|
||||
}
|
||||
pub fn into_bits(self) -> UIntValue {
|
||||
self.bits
|
||||
}
|
||||
pub fn from_bits(bits: UIntValue) -> Self {
|
||||
Self {
|
||||
bits,
|
||||
sim_only_values: Vec::new(),
|
||||
}
|
||||
}
|
||||
pub fn from_bitslice(v: &BitSlice) -> Self {
|
||||
Self::from_bitslice_and_sim_only_values(v, Vec::new())
|
||||
}
|
||||
pub fn from_bitslice_and_sim_only_values(
|
||||
bits: &BitSlice,
|
||||
sim_only_values: Vec<DynSimOnlyValue>,
|
||||
) -> Self {
|
||||
Self {
|
||||
bits: UIntValue::new(Arc::new(bits.to_bitvec())),
|
||||
sim_only_values,
|
||||
}
|
||||
}
|
||||
pub fn from_bits_and_sim_only_values(
|
||||
bits: UIntValue,
|
||||
sim_only_values: Vec<DynSimOnlyValue>,
|
||||
) -> Self {
|
||||
Self {
|
||||
bits,
|
||||
sim_only_values,
|
||||
}
|
||||
}
|
||||
pub fn into_parts(self) -> (UIntValue, Vec<DynSimOnlyValue>) {
|
||||
let Self {
|
||||
bits,
|
||||
sim_only_values,
|
||||
} = self;
|
||||
(bits, sim_only_values)
|
||||
}
|
||||
pub fn parts_mut(&mut self) -> (&mut UIntValue, &mut Vec<DynSimOnlyValue>) {
|
||||
let Self {
|
||||
bits,
|
||||
sim_only_values,
|
||||
} = self;
|
||||
(bits, sim_only_values)
|
||||
}
|
||||
pub fn sim_only_values(&self) -> &[DynSimOnlyValue] {
|
||||
&self.sim_only_values
|
||||
}
|
||||
pub fn sim_only_values_mut(&mut self) -> &mut Vec<DynSimOnlyValue> {
|
||||
&mut self.sim_only_values
|
||||
}
|
||||
pub fn as_slice(&self) -> OpaqueSimValueSlice<'_> {
|
||||
OpaqueSimValueSlice {
|
||||
bits: self.bits.bits(),
|
||||
sim_only_values: &self.sim_only_values,
|
||||
}
|
||||
}
|
||||
pub fn slice<R: OpaqueSimValueSizeRangeBounds>(&self, range: R) -> OpaqueSimValueSlice<'_> {
|
||||
self.as_slice().slice(range)
|
||||
}
|
||||
pub fn rewrite_with<F>(&mut self, target_size: OpaqueSimValueSize, f: F)
|
||||
where
|
||||
F: for<'b> FnOnce(OpaqueSimValueWriter<'b>) -> OpaqueSimValueWritten<'b>, // 'b is used as a brand
|
||||
{
|
||||
OpaqueSimValueWriter::rewrite_with(target_size, self, f);
|
||||
}
|
||||
pub fn clone_from_slice(&mut self, slice: OpaqueSimValueSlice<'_>) {
|
||||
let OpaqueSimValueSlice {
|
||||
bits,
|
||||
sim_only_values,
|
||||
} = slice;
|
||||
self.bits.bits_mut().copy_from_bitslice(bits);
|
||||
self.sim_only_values.clone_from_slice(sim_only_values);
|
||||
}
|
||||
pub fn extend_from_slice(&mut self, slice: OpaqueSimValueSlice<'_>) {
|
||||
let OpaqueSimValueSlice {
|
||||
bits,
|
||||
sim_only_values,
|
||||
} = slice;
|
||||
self.bits.bitvec_mut().extend_from_bitslice(bits);
|
||||
self.sim_only_values.extend_from_slice(sim_only_values);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Extend<OpaqueSimValueSlice<'a>> for OpaqueSimValue {
|
||||
fn extend<T: IntoIterator<Item = OpaqueSimValueSlice<'a>>>(&mut self, iter: T) {
|
||||
let Self {
|
||||
bits,
|
||||
sim_only_values,
|
||||
} = self;
|
||||
let bits = bits.bitvec_mut();
|
||||
for slice in iter {
|
||||
bits.extend_from_bitslice(slice.bits);
|
||||
sim_only_values.extend_from_slice(slice.sim_only_values);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<OpaqueSimValue> for OpaqueSimValue {
|
||||
fn extend<T: IntoIterator<Item = OpaqueSimValue>>(&mut self, iter: T) {
|
||||
let Self {
|
||||
bits,
|
||||
sim_only_values,
|
||||
} = self;
|
||||
let bits = bits.bitvec_mut();
|
||||
for value in iter {
|
||||
bits.extend_from_bitslice(value.bits().bits());
|
||||
sim_only_values.extend_from_slice(value.sim_only_values());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type<SimValue = OpaqueSimValue>> ToSimValueWithType<T> for OpaqueSimValue {
|
||||
fn to_sim_value_with_type(&self, ty: T) -> SimValue<T> {
|
||||
SimValue::from_value(ty, self.clone())
|
||||
}
|
||||
fn into_sim_value_with_type(self, ty: T) -> SimValue<T> {
|
||||
SimValue::from_value(ty, self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct OpaqueSimValueSlice<'a> {
|
||||
bits: &'a BitSlice,
|
||||
sim_only_values: &'a [DynSimOnlyValue],
|
||||
}
|
||||
|
||||
impl<'a> Default for OpaqueSimValueSlice<'a> {
|
||||
fn default() -> Self {
|
||||
Self::empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> OpaqueSimValueSlice<'a> {
|
||||
pub fn from_parts(bits: &'a BitSlice, sim_only_values: &'a [DynSimOnlyValue]) -> Self {
|
||||
Self {
|
||||
bits,
|
||||
sim_only_values,
|
||||
}
|
||||
}
|
||||
pub fn from_bitslice(bits: &'a BitSlice) -> Self {
|
||||
Self::from_parts(bits, &[])
|
||||
}
|
||||
pub fn empty() -> Self {
|
||||
Self {
|
||||
bits: BitSlice::empty(),
|
||||
sim_only_values: &[],
|
||||
}
|
||||
}
|
||||
pub fn size(self) -> OpaqueSimValueSize {
|
||||
OpaqueSimValueSize {
|
||||
bit_width: self.bit_width(),
|
||||
sim_only_values_len: self.sim_only_values_len(),
|
||||
}
|
||||
}
|
||||
pub fn is_empty(self) -> bool {
|
||||
self.size().is_empty()
|
||||
}
|
||||
pub fn bit_width(self) -> usize {
|
||||
self.bits.len()
|
||||
}
|
||||
pub fn bits(self) -> &'a BitSlice {
|
||||
self.bits
|
||||
}
|
||||
pub fn sim_only_values(self) -> &'a [DynSimOnlyValue] {
|
||||
self.sim_only_values
|
||||
}
|
||||
pub fn sim_only_values_len(self) -> usize {
|
||||
self.sim_only_values.len()
|
||||
}
|
||||
pub fn to_owned(self) -> OpaqueSimValue {
|
||||
OpaqueSimValue::from_bitslice_and_sim_only_values(self.bits, self.sim_only_values.to_vec())
|
||||
}
|
||||
pub fn slice<R: OpaqueSimValueSizeRangeBounds>(self, range: R) -> OpaqueSimValueSlice<'a> {
|
||||
let start = range.start_bound();
|
||||
let end = range.end_bound();
|
||||
let bits_range = slice_range(
|
||||
(start.map(|v| v.bit_width), end.map(|v| v.bit_width)),
|
||||
self.bit_width(),
|
||||
);
|
||||
let sim_only_values_range = slice_range(
|
||||
(start.map(|v| v.bit_width), end.map(|v| v.bit_width)),
|
||||
self.sim_only_values_len(),
|
||||
);
|
||||
Self {
|
||||
bits: &self.bits[bits_range],
|
||||
sim_only_values: &self.sim_only_values[sim_only_values_range],
|
||||
}
|
||||
}
|
||||
pub fn split_at(self, index: OpaqueSimValueSize) -> (Self, Self) {
|
||||
let bits = self.bits.split_at(index.bit_width);
|
||||
let sim_only_values = self.sim_only_values.split_at(index.sim_only_values_len);
|
||||
(
|
||||
Self {
|
||||
bits: bits.0,
|
||||
sim_only_values: sim_only_values.0,
|
||||
},
|
||||
Self {
|
||||
bits: bits.1,
|
||||
sim_only_values: sim_only_values.1,
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct OpaqueSimValueWriter<'a> {
|
||||
bits: &'a mut BitSlice,
|
||||
sim_only_values: &'a mut Vec<DynSimOnlyValue>,
|
||||
sim_only_values_range: std::ops::Range<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct OpaqueSimValueWritten<'a> {
|
||||
_phantom: PhantomData<&'a ()>,
|
||||
}
|
||||
|
||||
impl<'a> OpaqueSimValueWriter<'a> {
|
||||
pub fn sim_only_values_range(&self) -> std::ops::Range<usize> {
|
||||
self.sim_only_values_range.clone()
|
||||
}
|
||||
pub fn rewrite_with<F>(target_size: OpaqueSimValueSize, value: &mut OpaqueSimValue, f: F)
|
||||
where
|
||||
F: for<'b> FnOnce(OpaqueSimValueWriter<'b>) -> OpaqueSimValueWritten<'b>, // 'b is used as a brand
|
||||
{
|
||||
let OpaqueSimValueWritten {
|
||||
_phantom: PhantomData,
|
||||
} = f(OpaqueSimValueWriter::rewrite_helper(target_size, value));
|
||||
}
|
||||
pub(crate) fn rewrite_helper(
|
||||
target_size: OpaqueSimValueSize,
|
||||
value: &'a mut OpaqueSimValue,
|
||||
) -> Self {
|
||||
let (bits, sim_only_values) = value.parts_mut();
|
||||
let OpaqueSimValueSize {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
} = target_size;
|
||||
let bits = bits.bitvec_mut();
|
||||
bits.resize(bit_width, false);
|
||||
sim_only_values.truncate(sim_only_values_len);
|
||||
sim_only_values.reserve_exact(sim_only_values_len - sim_only_values.len());
|
||||
Self {
|
||||
bits,
|
||||
sim_only_values,
|
||||
sim_only_values_range: 0..sim_only_values_len,
|
||||
}
|
||||
}
|
||||
pub fn size(&self) -> OpaqueSimValueSize {
|
||||
OpaqueSimValueSize {
|
||||
bit_width: self.bit_width(),
|
||||
sim_only_values_len: self.sim_only_values_len(),
|
||||
}
|
||||
}
|
||||
pub fn bit_width(&self) -> usize {
|
||||
self.bits.len()
|
||||
}
|
||||
pub fn sim_only_values_len(&self) -> usize {
|
||||
self.sim_only_values_range.len()
|
||||
}
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.size().is_empty()
|
||||
}
|
||||
pub fn fill_cloned_from_slice(
|
||||
self,
|
||||
slice: OpaqueSimValueSlice<'_>,
|
||||
) -> OpaqueSimValueWritten<'a> {
|
||||
assert_eq!(self.size(), slice.size());
|
||||
let Self {
|
||||
bits,
|
||||
sim_only_values,
|
||||
sim_only_values_range,
|
||||
} = self;
|
||||
bits.copy_from_bitslice(slice.bits);
|
||||
let (clone_from_src, clone_src) = slice.sim_only_values.split_at(
|
||||
(sim_only_values.len() - sim_only_values_range.start).min(slice.sim_only_values.len()),
|
||||
);
|
||||
sim_only_values[sim_only_values_range.start..][..clone_from_src.len()]
|
||||
.clone_from_slice(clone_from_src);
|
||||
sim_only_values.extend_from_slice(clone_src);
|
||||
OpaqueSimValueWritten {
|
||||
_phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
pub fn fill_with_bits_with<F: FnOnce(&mut BitSlice)>(self, f: F) -> OpaqueSimValueWritten<'a> {
|
||||
assert!(self.size().only_bit_width().is_some());
|
||||
let Self {
|
||||
bits,
|
||||
sim_only_values,
|
||||
sim_only_values_range,
|
||||
} = self;
|
||||
f(bits);
|
||||
assert_eq!(sim_only_values.len(), sim_only_values_range.end);
|
||||
OpaqueSimValueWritten {
|
||||
_phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
pub fn fill_with_zeros(self) -> OpaqueSimValueWritten<'a> {
|
||||
assert!(
|
||||
self.size().only_bit_width().is_some(),
|
||||
"can't fill things other than bits with zeros",
|
||||
);
|
||||
self.fill_with_bits_with(|bits| bits.fill(false))
|
||||
}
|
||||
pub fn fill_prefix_with<F>(&mut self, prefix_size: OpaqueSimValueSize, f: F)
|
||||
where
|
||||
F: for<'b> FnOnce(OpaqueSimValueWriter<'b>) -> OpaqueSimValueWritten<'b>, // 'b is used as a brand
|
||||
{
|
||||
let OpaqueSimValueSize {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
} = prefix_size;
|
||||
assert!(bit_width <= self.bit_width());
|
||||
assert!(sim_only_values_len <= self.sim_only_values_len());
|
||||
let next_start = self.sim_only_values_range.start + sim_only_values_len;
|
||||
let OpaqueSimValueWritten {
|
||||
_phantom: PhantomData,
|
||||
} = f(OpaqueSimValueWriter {
|
||||
bits: &mut self.bits[..bit_width],
|
||||
sim_only_values: self.sim_only_values,
|
||||
sim_only_values_range: self.sim_only_values_range.start..next_start,
|
||||
});
|
||||
assert!(self.sim_only_values.len() >= next_start);
|
||||
self.bits = &mut mem::take(&mut self.bits)[bit_width..];
|
||||
self.sim_only_values_range.start = next_start;
|
||||
}
|
||||
}
|
||||
|
||||
pub trait StaticType: Type + Default {
|
||||
const TYPE: Self;
|
||||
const MASK_TYPE: Self::MaskType;
|
||||
const TYPE_PROPERTIES: TypeProperties;
|
||||
|
|
|
|||
135
crates/fayalite/src/ty/serde_impls.rs
Normal file
135
crates/fayalite/src/ty/serde_impls.rs
Normal file
|
|
@ -0,0 +1,135 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
array::Array,
|
||||
bundle::{Bundle, BundleType},
|
||||
clock::Clock,
|
||||
enum_::{Enum, EnumType},
|
||||
int::{Bool, SInt, UInt},
|
||||
intern::Interned,
|
||||
phantom_const::{PhantomConstCanonicalValue, PhantomConstValue},
|
||||
prelude::PhantomConst,
|
||||
reset::{AsyncReset, Reset, SyncReset},
|
||||
sim::value::DynSimOnly,
|
||||
ty::{BaseType, CanonicalType},
|
||||
};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
|
||||
pub(crate) struct SerdePhantomConst<T>(pub T);
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Serialize for SerdePhantomConst<Interned<T>> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
self.0.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de, T: ?Sized + PhantomConstValue> Deserialize<'de> for SerdePhantomConst<Interned<T>> {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
T::deserialize_value(deserializer).map(Self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
#[serde(rename = "CanonicalType")]
|
||||
pub(crate) enum SerdeCanonicalType<
|
||||
ArrayElement = CanonicalType,
|
||||
ThePhantomConst = SerdePhantomConst<Interned<PhantomConstCanonicalValue>>,
|
||||
> {
|
||||
UInt {
|
||||
width: usize,
|
||||
},
|
||||
SInt {
|
||||
width: usize,
|
||||
},
|
||||
Bool,
|
||||
Array {
|
||||
element: ArrayElement,
|
||||
len: usize,
|
||||
},
|
||||
Enum {
|
||||
variants: Interned<[crate::enum_::EnumVariant]>,
|
||||
},
|
||||
Bundle {
|
||||
fields: Interned<[crate::bundle::BundleField]>,
|
||||
},
|
||||
AsyncReset,
|
||||
SyncReset,
|
||||
Reset,
|
||||
Clock,
|
||||
PhantomConst(ThePhantomConst),
|
||||
DynSimOnly(DynSimOnly),
|
||||
}
|
||||
|
||||
impl<ArrayElement, PhantomConstInner> SerdeCanonicalType<ArrayElement, PhantomConstInner> {
|
||||
pub(crate) fn as_serde_unexpected_str(&self) -> &'static str {
|
||||
match self {
|
||||
Self::UInt { .. } => "a UInt",
|
||||
Self::SInt { .. } => "a SInt",
|
||||
Self::Bool => "a Bool",
|
||||
Self::Array { .. } => "an Array",
|
||||
Self::Enum { .. } => "an Enum",
|
||||
Self::Bundle { .. } => "a Bundle",
|
||||
Self::AsyncReset => "an AsyncReset",
|
||||
Self::SyncReset => "a SyncReset",
|
||||
Self::Reset => "a Reset",
|
||||
Self::Clock => "a Clock",
|
||||
Self::PhantomConst(_) => "a PhantomConst",
|
||||
Self::DynSimOnly(_) => "a SimOnlyValue",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: BaseType> From<T> for SerdeCanonicalType {
|
||||
fn from(ty: T) -> Self {
|
||||
let ty: CanonicalType = ty.into();
|
||||
match ty {
|
||||
CanonicalType::UInt(ty) => Self::UInt { width: ty.width() },
|
||||
CanonicalType::SInt(ty) => Self::SInt { width: ty.width() },
|
||||
CanonicalType::Bool(Bool {}) => Self::Bool,
|
||||
CanonicalType::Array(ty) => Self::Array {
|
||||
element: ty.element(),
|
||||
len: ty.len(),
|
||||
},
|
||||
CanonicalType::Enum(ty) => Self::Enum {
|
||||
variants: ty.variants(),
|
||||
},
|
||||
CanonicalType::Bundle(ty) => Self::Bundle {
|
||||
fields: ty.fields(),
|
||||
},
|
||||
CanonicalType::AsyncReset(AsyncReset {}) => Self::AsyncReset,
|
||||
CanonicalType::SyncReset(SyncReset {}) => Self::SyncReset,
|
||||
CanonicalType::Reset(Reset {}) => Self::Reset,
|
||||
CanonicalType::Clock(Clock {}) => Self::Clock,
|
||||
CanonicalType::PhantomConst(ty) => Self::PhantomConst(SerdePhantomConst(ty.get())),
|
||||
CanonicalType::DynSimOnly(ty) => Self::DynSimOnly(ty),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SerdeCanonicalType> for CanonicalType {
|
||||
fn from(ty: SerdeCanonicalType) -> Self {
|
||||
match ty {
|
||||
SerdeCanonicalType::UInt { width } => Self::UInt(UInt::new(width)),
|
||||
SerdeCanonicalType::SInt { width } => Self::SInt(SInt::new(width)),
|
||||
SerdeCanonicalType::Bool => Self::Bool(Bool),
|
||||
SerdeCanonicalType::Array { element, len } => Self::Array(Array::new(element, len)),
|
||||
SerdeCanonicalType::Enum { variants } => Self::Enum(Enum::new(variants)),
|
||||
SerdeCanonicalType::Bundle { fields } => Self::Bundle(Bundle::new(fields)),
|
||||
SerdeCanonicalType::AsyncReset => Self::AsyncReset(AsyncReset),
|
||||
SerdeCanonicalType::SyncReset => Self::SyncReset(SyncReset),
|
||||
SerdeCanonicalType::Reset => Self::Reset(Reset),
|
||||
SerdeCanonicalType::Clock => Self::Clock(Clock),
|
||||
SerdeCanonicalType::PhantomConst(value) => {
|
||||
Self::PhantomConst(PhantomConst::new(value.0))
|
||||
}
|
||||
SerdeCanonicalType::DynSimOnly(value) => Self::DynSimOnly(value),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,12 +1,23 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
pub(crate) mod alternating_cell;
|
||||
mod const_bool;
|
||||
mod const_cmp;
|
||||
mod const_usize;
|
||||
mod misc;
|
||||
mod scoped_ref;
|
||||
pub(crate) mod streaming_read_utf8;
|
||||
mod test_hasher;
|
||||
|
||||
// allow easily switching the hasher crate-wide for testing
|
||||
#[cfg(feature = "unstable-test-hasher")]
|
||||
pub type DefaultBuildHasher = test_hasher::DefaultBuildHasher;
|
||||
#[cfg(not(feature = "unstable-test-hasher"))]
|
||||
pub(crate) type DefaultBuildHasher = hashbrown::DefaultHashBuilder;
|
||||
|
||||
pub(crate) type HashMap<K, V> = hashbrown::HashMap<K, V, DefaultBuildHasher>;
|
||||
pub(crate) type HashSet<T> = hashbrown::HashSet<T, DefaultBuildHasher>;
|
||||
|
||||
#[doc(inline)]
|
||||
pub use const_bool::{ConstBool, ConstBoolDispatch, ConstBoolDispatchTag, GenericConstBool};
|
||||
|
|
@ -24,8 +35,14 @@ pub use scoped_ref::ScopedRef;
|
|||
|
||||
#[doc(inline)]
|
||||
pub use misc::{
|
||||
interned_bit, iter_eq_by, BitSliceWriteWithBase, DebugAsDisplay, DebugAsRawString, MakeMutSlice,
|
||||
BitSliceWriteWithBase, DebugAsDisplay, DebugAsRawString, MakeMutSlice, RcWriter,
|
||||
SerdeJsonEscapeIf, SerdeJsonEscapeIfFormatter, SerdeJsonEscapeIfTest,
|
||||
SerdeJsonEscapeIfTestResult, interned_bit, iter_eq_by, os_str_strip_prefix,
|
||||
os_str_strip_suffix, serialize_to_json_ascii, serialize_to_json_ascii_pretty,
|
||||
serialize_to_json_ascii_pretty_with_indent, slice_range, try_slice_range,
|
||||
};
|
||||
pub(crate) use misc::{InternedStrCompareAsStr, chain};
|
||||
|
||||
pub mod job_server;
|
||||
pub mod prefix_sum;
|
||||
pub mod ready_valid;
|
||||
|
|
|
|||
122
crates/fayalite/src/util/alternating_cell.rs
Normal file
122
crates/fayalite/src/util/alternating_cell.rs
Normal file
|
|
@ -0,0 +1,122 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::util::DebugAsDisplay;
|
||||
use std::{
|
||||
cell::{Cell, UnsafeCell},
|
||||
fmt,
|
||||
};
|
||||
|
||||
pub(crate) trait AlternatingCellMethods {
|
||||
fn unique_to_shared(&mut self);
|
||||
fn shared_to_unique(&mut self);
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum State {
|
||||
Unique,
|
||||
Shared,
|
||||
Locked,
|
||||
}
|
||||
|
||||
pub(crate) struct AlternatingCell<T: ?Sized> {
|
||||
state: Cell<State>,
|
||||
value: UnsafeCell<T>,
|
||||
}
|
||||
|
||||
impl<T: ?Sized + fmt::Debug + AlternatingCellMethods> fmt::Debug for AlternatingCell<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_tuple("AlternatingCell")
|
||||
.field(
|
||||
self.try_share()
|
||||
.as_ref()
|
||||
.map(|v| -> &dyn fmt::Debug { v })
|
||||
.unwrap_or(&DebugAsDisplay("<...>")),
|
||||
)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized> AlternatingCell<T> {
|
||||
pub(crate) const fn new_shared(value: T) -> Self
|
||||
where
|
||||
T: Sized,
|
||||
{
|
||||
Self {
|
||||
state: Cell::new(State::Shared),
|
||||
value: UnsafeCell::new(value),
|
||||
}
|
||||
}
|
||||
pub(crate) const fn new_unique(value: T) -> Self
|
||||
where
|
||||
T: Sized,
|
||||
{
|
||||
Self {
|
||||
state: Cell::new(State::Unique),
|
||||
value: UnsafeCell::new(value),
|
||||
}
|
||||
}
|
||||
pub(crate) fn is_unique(&self) -> bool {
|
||||
matches!(self.state.get(), State::Unique)
|
||||
}
|
||||
pub(crate) fn is_shared(&self) -> bool {
|
||||
matches!(self.state.get(), State::Shared)
|
||||
}
|
||||
pub(crate) fn into_inner(self) -> T
|
||||
where
|
||||
T: Sized,
|
||||
{
|
||||
self.value.into_inner()
|
||||
}
|
||||
pub(crate) fn try_share(&self) -> Option<&T>
|
||||
where
|
||||
T: AlternatingCellMethods,
|
||||
{
|
||||
match self.state.get() {
|
||||
State::Shared => {}
|
||||
State::Unique => {
|
||||
struct Locked<'a>(&'a Cell<State>);
|
||||
impl Drop for Locked<'_> {
|
||||
fn drop(&mut self) {
|
||||
self.0.set(State::Shared);
|
||||
}
|
||||
}
|
||||
self.state.set(State::Locked);
|
||||
let lock = Locked(&self.state);
|
||||
// Safety: state is Locked, so nothing else will
|
||||
// access value while calling unique_to_shared.
|
||||
unsafe { &mut *self.value.get() }.unique_to_shared();
|
||||
drop(lock);
|
||||
}
|
||||
State::Locked => return None,
|
||||
}
|
||||
|
||||
// Safety: state is Shared so nothing will create any mutable
|
||||
// references until the returned reference's lifetime expires.
|
||||
Some(unsafe { &*self.value.get() })
|
||||
}
|
||||
#[track_caller]
|
||||
pub(crate) fn share(&self) -> &T
|
||||
where
|
||||
T: AlternatingCellMethods,
|
||||
{
|
||||
let Some(retval) = self.try_share() else {
|
||||
panic!("`share` called recursively");
|
||||
};
|
||||
retval
|
||||
}
|
||||
pub(crate) fn unique(&mut self) -> &mut T
|
||||
where
|
||||
T: AlternatingCellMethods,
|
||||
{
|
||||
match self.state.get() {
|
||||
State::Shared => {
|
||||
self.state.set(State::Unique);
|
||||
self.value.get_mut().shared_to_unique();
|
||||
}
|
||||
State::Unique => {}
|
||||
State::Locked => unreachable!(),
|
||||
}
|
||||
self.value.get_mut()
|
||||
}
|
||||
}
|
||||
|
|
@ -1,5 +1,9 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use serde::{
|
||||
Deserialize, Deserializer, Serialize, Serializer,
|
||||
de::{DeserializeOwned, Error, Unexpected},
|
||||
};
|
||||
use std::{fmt::Debug, hash::Hash, mem::ManuallyDrop, ptr};
|
||||
|
||||
mod sealed {
|
||||
|
|
@ -9,7 +13,17 @@ mod sealed {
|
|||
/// # Safety
|
||||
/// the only implementation is `ConstBool<Self::VALUE>`
|
||||
pub unsafe trait GenericConstBool:
|
||||
sealed::Sealed + Copy + Ord + Hash + Default + Debug + 'static + Send + Sync
|
||||
sealed::Sealed
|
||||
+ Copy
|
||||
+ Ord
|
||||
+ Hash
|
||||
+ Default
|
||||
+ Debug
|
||||
+ 'static
|
||||
+ Send
|
||||
+ Sync
|
||||
+ Serialize
|
||||
+ DeserializeOwned
|
||||
{
|
||||
const VALUE: bool;
|
||||
}
|
||||
|
|
@ -30,6 +44,32 @@ unsafe impl<const VALUE: bool> GenericConstBool for ConstBool<VALUE> {
|
|||
const VALUE: bool = VALUE;
|
||||
}
|
||||
|
||||
impl<const VALUE: bool> Serialize for ConstBool<VALUE> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
VALUE.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de, const VALUE: bool> Deserialize<'de> for ConstBool<VALUE> {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let value = bool::deserialize(deserializer)?;
|
||||
if value == VALUE {
|
||||
Ok(ConstBool)
|
||||
} else {
|
||||
Err(D::Error::invalid_value(
|
||||
Unexpected::Bool(value),
|
||||
&if VALUE { "true" } else { "false" },
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ConstBoolDispatchTag {
|
||||
type Type<Select: GenericConstBool>;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,9 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use serde::{
|
||||
Deserialize, Deserializer, Serialize, Serializer,
|
||||
de::{DeserializeOwned, Error, Unexpected},
|
||||
};
|
||||
use std::{fmt::Debug, hash::Hash};
|
||||
|
||||
mod sealed {
|
||||
|
|
@ -8,7 +12,17 @@ mod sealed {
|
|||
|
||||
/// the only implementation is `ConstUsize<Self::VALUE>`
|
||||
pub trait GenericConstUsize:
|
||||
sealed::Sealed + Copy + Ord + Hash + Default + Debug + 'static + Send + Sync
|
||||
sealed::Sealed
|
||||
+ Copy
|
||||
+ Ord
|
||||
+ Hash
|
||||
+ Default
|
||||
+ Debug
|
||||
+ 'static
|
||||
+ Send
|
||||
+ Sync
|
||||
+ Serialize
|
||||
+ DeserializeOwned
|
||||
{
|
||||
const VALUE: usize;
|
||||
}
|
||||
|
|
@ -27,3 +41,29 @@ impl<const VALUE: usize> sealed::Sealed for ConstUsize<VALUE> {}
|
|||
impl<const VALUE: usize> GenericConstUsize for ConstUsize<VALUE> {
|
||||
const VALUE: usize = VALUE;
|
||||
}
|
||||
|
||||
impl<const VALUE: usize> Serialize for ConstUsize<VALUE> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
VALUE.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de, const VALUE: usize> Deserialize<'de> for ConstUsize<VALUE> {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let value = usize::deserialize(deserializer)?;
|
||||
if value == VALUE {
|
||||
Ok(ConstUsize)
|
||||
} else {
|
||||
Err(D::Error::invalid_value(
|
||||
Unexpected::Unsigned(value as u64),
|
||||
&&*VALUE.to_string(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,192 +1,156 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use ctor::ctor;
|
||||
use jobslot::{Acquired, Client};
|
||||
use ctor::{ctor, dtor};
|
||||
use jobslot::Client;
|
||||
use std::{
|
||||
ffi::OsString,
|
||||
mem,
|
||||
io, mem,
|
||||
num::NonZeroUsize,
|
||||
sync::{Condvar, Mutex, Once, OnceLock},
|
||||
thread::spawn,
|
||||
sync::{Mutex, MutexGuard},
|
||||
};
|
||||
|
||||
fn get_or_make_client() -> &'static Client {
|
||||
#[ctor]
|
||||
static CLIENT: OnceLock<Client> = unsafe {
|
||||
match Client::from_env() {
|
||||
Some(client) => OnceLock::from(client),
|
||||
None => OnceLock::new(),
|
||||
}
|
||||
};
|
||||
#[ctor]
|
||||
static CLIENT: Mutex<Option<Option<Client>>> = unsafe { Mutex::new(Some(Client::from_env())) };
|
||||
|
||||
CLIENT.get_or_init(|| {
|
||||
let mut available_parallelism = None;
|
||||
let mut args = std::env::args_os().skip(1);
|
||||
while let Some(arg) = args.next() {
|
||||
const TEST_THREADS_OPTION: &'static [u8] = b"--test-threads";
|
||||
if arg.as_encoded_bytes().starts_with(TEST_THREADS_OPTION) {
|
||||
match arg.as_encoded_bytes().get(TEST_THREADS_OPTION.len()) {
|
||||
Some(b'=') => {
|
||||
let mut arg = arg.into_encoded_bytes();
|
||||
arg.drain(..=TEST_THREADS_OPTION.len());
|
||||
available_parallelism = Some(arg);
|
||||
break;
|
||||
#[dtor]
|
||||
fn drop_client() {
|
||||
drop(
|
||||
match CLIENT.lock() {
|
||||
Ok(v) => v,
|
||||
Err(e) => e.into_inner(),
|
||||
}
|
||||
.take(),
|
||||
);
|
||||
}
|
||||
|
||||
fn get_or_make_client() -> Client {
|
||||
CLIENT
|
||||
.lock()
|
||||
.expect("shouldn't have panicked")
|
||||
.as_mut()
|
||||
.expect("shutting down")
|
||||
.get_or_insert_with(|| {
|
||||
let mut available_parallelism = None;
|
||||
let mut args = std::env::args_os().skip(1);
|
||||
while let Some(arg) = args.next() {
|
||||
const TEST_THREADS_OPTION: &'static [u8] = b"--test-threads";
|
||||
if arg.as_encoded_bytes().starts_with(TEST_THREADS_OPTION) {
|
||||
match arg.as_encoded_bytes().get(TEST_THREADS_OPTION.len()) {
|
||||
Some(b'=') => {
|
||||
let mut arg = arg.into_encoded_bytes();
|
||||
arg.drain(..=TEST_THREADS_OPTION.len());
|
||||
available_parallelism = Some(arg);
|
||||
break;
|
||||
}
|
||||
None => {
|
||||
available_parallelism = args.next().map(OsString::into_encoded_bytes);
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
None => {
|
||||
available_parallelism = args.next().map(OsString::into_encoded_bytes);
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
let available_parallelism = if let Some(available_parallelism) = available_parallelism
|
||||
.as_deref()
|
||||
.and_then(|v| std::str::from_utf8(v).ok())
|
||||
.and_then(|v| v.parse().ok())
|
||||
{
|
||||
available_parallelism
|
||||
} else if let Ok(available_parallelism) = std::thread::available_parallelism() {
|
||||
available_parallelism
|
||||
} else {
|
||||
NonZeroUsize::new(1).unwrap()
|
||||
};
|
||||
Client::new_with_fifo(available_parallelism.get() - 1).expect("failed to create job server")
|
||||
})
|
||||
let available_parallelism = if let Some(available_parallelism) = available_parallelism
|
||||
.as_deref()
|
||||
.and_then(|v| std::str::from_utf8(v).ok())
|
||||
.and_then(|v| v.parse().ok())
|
||||
{
|
||||
available_parallelism
|
||||
} else if let Ok(available_parallelism) = std::thread::available_parallelism() {
|
||||
available_parallelism
|
||||
} else {
|
||||
NonZeroUsize::new(1).unwrap()
|
||||
};
|
||||
Client::new_with_fifo(available_parallelism.get() - 1)
|
||||
.expect("failed to create job server")
|
||||
})
|
||||
.clone()
|
||||
}
|
||||
|
||||
struct State {
|
||||
obtained_count: usize,
|
||||
waiting_count: usize,
|
||||
available: Vec<Acquired>,
|
||||
implicit_available: bool,
|
||||
}
|
||||
|
||||
impl State {
|
||||
fn total_available(&self) -> usize {
|
||||
self.available.len() + self.implicit_available as usize
|
||||
}
|
||||
fn additional_waiting(&self) -> usize {
|
||||
self.waiting_count.saturating_sub(self.total_available())
|
||||
}
|
||||
}
|
||||
|
||||
static STATE: Mutex<State> = Mutex::new(State {
|
||||
obtained_count: 0,
|
||||
waiting_count: 0,
|
||||
available: Vec::new(),
|
||||
implicit_available: true,
|
||||
});
|
||||
static COND_VAR: Condvar = Condvar::new();
|
||||
|
||||
#[derive(Debug)]
|
||||
enum AcquiredJobInner {
|
||||
FromJobServer(Acquired),
|
||||
ImplicitJob,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct AcquiredJob {
|
||||
job: AcquiredJobInner,
|
||||
client: Client,
|
||||
}
|
||||
|
||||
impl AcquiredJob {
|
||||
fn start_acquire_thread() {
|
||||
static STARTED_THREAD: Once = Once::new();
|
||||
STARTED_THREAD.call_once(|| {
|
||||
spawn(|| {
|
||||
let mut acquired = None;
|
||||
let client = get_or_make_client();
|
||||
pub fn acquire() -> io::Result<Self> {
|
||||
let client = get_or_make_client();
|
||||
struct Waiting {}
|
||||
|
||||
impl Waiting {
|
||||
fn done(self) -> MutexGuard<'static, State> {
|
||||
mem::forget(self);
|
||||
let mut state = STATE.lock().unwrap();
|
||||
loop {
|
||||
state = if state.additional_waiting() == 0 {
|
||||
if acquired.is_some() {
|
||||
drop(state);
|
||||
drop(acquired.take()); // drop Acquired outside of lock
|
||||
STATE.lock().unwrap()
|
||||
} else {
|
||||
COND_VAR.wait(state).unwrap()
|
||||
}
|
||||
} else if acquired.is_some() {
|
||||
// allocate space before moving Acquired to ensure we
|
||||
// drop Acquired outside of the lock on panic
|
||||
state.available.reserve(1);
|
||||
state.available.push(acquired.take().unwrap());
|
||||
COND_VAR.notify_all();
|
||||
state
|
||||
} else {
|
||||
drop(state);
|
||||
acquired = Some(
|
||||
client
|
||||
.acquire()
|
||||
.expect("can't acquire token from job server"),
|
||||
);
|
||||
STATE.lock().unwrap()
|
||||
};
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
fn acquire_inner(block: bool) -> Option<Self> {
|
||||
Self::start_acquire_thread();
|
||||
let mut state = STATE.lock().unwrap();
|
||||
loop {
|
||||
if let Some(acquired) = state.available.pop() {
|
||||
return Some(Self {
|
||||
job: AcquiredJobInner::FromJobServer(acquired),
|
||||
});
|
||||
state.waiting_count -= 1;
|
||||
state
|
||||
}
|
||||
if state.implicit_available {
|
||||
state.implicit_available = false;
|
||||
return Some(Self {
|
||||
job: AcquiredJobInner::ImplicitJob,
|
||||
});
|
||||
}
|
||||
if !block {
|
||||
return None;
|
||||
}
|
||||
state.waiting_count += 1;
|
||||
state = COND_VAR.wait(state).unwrap();
|
||||
state.waiting_count -= 1;
|
||||
}
|
||||
}
|
||||
pub fn try_acquire() -> Option<Self> {
|
||||
Self::acquire_inner(false)
|
||||
}
|
||||
pub fn acquire() -> Self {
|
||||
Self::acquire_inner(true).expect("failed to acquire token")
|
||||
impl Drop for Waiting {
|
||||
fn drop(&mut self) {
|
||||
STATE.lock().unwrap().waiting_count -= 1;
|
||||
}
|
||||
}
|
||||
let mut state = STATE.lock().unwrap();
|
||||
if state.obtained_count == 0 && state.waiting_count == 0 {
|
||||
state.obtained_count = 1; // get implicit token
|
||||
return Ok(Self { client });
|
||||
}
|
||||
state.waiting_count += 1;
|
||||
drop(state);
|
||||
let waiting = Waiting {};
|
||||
client.acquire_raw()?;
|
||||
state = waiting.done();
|
||||
state.obtained_count = state
|
||||
.obtained_count
|
||||
.checked_add(1)
|
||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "obtained_count overflowed"))?;
|
||||
drop(state);
|
||||
Ok(Self { client })
|
||||
}
|
||||
pub fn run_command<R>(
|
||||
&mut self,
|
||||
cmd: std::process::Command,
|
||||
f: impl FnOnce(&mut std::process::Command) -> std::io::Result<R>,
|
||||
) -> std::io::Result<R> {
|
||||
get_or_make_client().configure_make_and_run_with_fifo(cmd, f)
|
||||
self.client.configure_make_and_run_with_fifo(cmd, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for AcquiredJob {
|
||||
fn drop(&mut self) {
|
||||
let mut state = STATE.lock().unwrap();
|
||||
match &self.job {
|
||||
AcquiredJobInner::FromJobServer(_) => {
|
||||
if state.waiting_count > state.available.len() + state.implicit_available as usize {
|
||||
// allocate space before moving Acquired to ensure we
|
||||
// drop Acquired outside of the lock on panic
|
||||
state.available.reserve(1);
|
||||
let AcquiredJobInner::FromJobServer(acquired) =
|
||||
mem::replace(&mut self.job, AcquiredJobInner::ImplicitJob)
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
state.available.push(acquired);
|
||||
COND_VAR.notify_all();
|
||||
match &mut *state {
|
||||
State {
|
||||
obtained_count: 0, ..
|
||||
} => unreachable!(),
|
||||
State {
|
||||
obtained_count: obtained_count @ 1,
|
||||
waiting_count,
|
||||
} => {
|
||||
*obtained_count = 0; // drop implicit token
|
||||
let any_waiting = *waiting_count != 0;
|
||||
drop(state);
|
||||
if any_waiting {
|
||||
// we have the implicit token, but some other thread is trying to acquire a token,
|
||||
// release the implicit token so they can acquire it.
|
||||
let _ = self.client.release_raw(); // we're in drop, just ignore errors since we at least tried
|
||||
}
|
||||
}
|
||||
AcquiredJobInner::ImplicitJob => {
|
||||
state.implicit_available = true;
|
||||
if state.waiting_count > state.available.len() {
|
||||
COND_VAR.notify_all();
|
||||
}
|
||||
State { obtained_count, .. } => {
|
||||
*obtained_count = obtained_count.saturating_sub(1);
|
||||
drop(state);
|
||||
let _ = self.client.release_raw(); // we're in drop, just ignore errors since we at least tried
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,7 +3,11 @@
|
|||
use crate::intern::{Intern, Interned};
|
||||
use bitvec::{bits, order::Lsb0, slice::BitSlice, view::BitView};
|
||||
use std::{
|
||||
cell::Cell,
|
||||
ffi::OsStr,
|
||||
fmt::{self, Debug, Write},
|
||||
io,
|
||||
ops::{Bound, Range, RangeBounds},
|
||||
rc::Rc,
|
||||
sync::{Arc, OnceLock},
|
||||
};
|
||||
|
|
@ -94,9 +98,15 @@ pub fn interned_bit(v: bool) -> Interned<BitSlice> {
|
|||
RETVAL.get_or_init(|| [bits![0; 1].intern(), bits![1; 1].intern()])[v as usize]
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct BitSliceWriteWithBase<'a>(pub &'a BitSlice);
|
||||
|
||||
impl<'a> Debug for BitSliceWriteWithBase<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{self:#x}")
|
||||
}
|
||||
}
|
||||
|
||||
impl BitSliceWriteWithBase<'_> {
|
||||
fn fmt_with_base<const BITS_PER_DIGIT: usize, const UPPER_CASE: bool>(
|
||||
self,
|
||||
|
|
@ -155,3 +165,450 @@ impl fmt::UpperHex for BitSliceWriteWithBase<'_> {
|
|||
self.fmt_with_base::<4, true>(f)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct RcWriter(Rc<Cell<Vec<u8>>>);
|
||||
|
||||
impl Debug for RcWriter {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.borrow_impl(|buf| {
|
||||
f.debug_tuple("RcWriter")
|
||||
.field(&DebugAsDisplay(format_args!("b\"{}\"", buf.escape_ascii())))
|
||||
.finish()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl RcWriter {
|
||||
fn borrow_impl<R>(&self, f: impl FnOnce(&mut Vec<u8>) -> R) -> R {
|
||||
let buf = Cell::take(&self.0);
|
||||
struct PutBackOnDrop<'a> {
|
||||
buf: Vec<u8>,
|
||||
this: &'a RcWriter,
|
||||
}
|
||||
impl Drop for PutBackOnDrop<'_> {
|
||||
fn drop(&mut self) {
|
||||
self.this.0.set(std::mem::take(&mut self.buf));
|
||||
}
|
||||
}
|
||||
let mut buf = PutBackOnDrop { buf, this: self };
|
||||
f(&mut buf.buf)
|
||||
}
|
||||
pub fn borrow<R>(&mut self, f: impl FnOnce(&mut Vec<u8>) -> R) -> R {
|
||||
self.borrow_impl(f)
|
||||
}
|
||||
pub fn take(&mut self) -> Vec<u8> {
|
||||
Cell::take(&self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::io::Write for RcWriter {
|
||||
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
|
||||
self.borrow(|v| v.extend_from_slice(buf));
|
||||
Ok(buf.len())
|
||||
}
|
||||
|
||||
fn flush(&mut self) -> std::io::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! chain {
|
||||
() => {
|
||||
std::iter::empty()
|
||||
};
|
||||
($first:expr $(, $rest:expr)* $(,)?) => {
|
||||
{
|
||||
let retval = IntoIterator::into_iter($first);
|
||||
$(let retval = Iterator::chain(retval, $rest);)*
|
||||
retval
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub(crate) use chain;
|
||||
|
||||
pub fn try_slice_range<R: RangeBounds<usize>>(range: R, size: usize) -> Option<Range<usize>> {
|
||||
let start = match range.start_bound() {
|
||||
Bound::Included(start) => *start,
|
||||
Bound::Excluded(start) => start.checked_add(1)?,
|
||||
Bound::Unbounded => 0,
|
||||
};
|
||||
let end = match range.end_bound() {
|
||||
Bound::Included(end) => end.checked_add(1)?,
|
||||
Bound::Excluded(end) => *end,
|
||||
Bound::Unbounded => size,
|
||||
};
|
||||
(start <= end && end <= size).then_some(start..end)
|
||||
}
|
||||
|
||||
pub fn slice_range<R: RangeBounds<usize>>(range: R, size: usize) -> Range<usize> {
|
||||
try_slice_range(range, size).expect("range out of bounds")
|
||||
}
|
||||
|
||||
pub trait SerdeJsonEscapeIfTest {
|
||||
fn char_needs_escape(&mut self, ch: char) -> serde_json::Result<bool>;
|
||||
}
|
||||
|
||||
pub trait SerdeJsonEscapeIfTestResult {
|
||||
fn to_result(self) -> serde_json::Result<bool>;
|
||||
}
|
||||
|
||||
impl SerdeJsonEscapeIfTestResult for bool {
|
||||
fn to_result(self) -> serde_json::Result<bool> {
|
||||
Ok(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: Into<serde_json::Error>> SerdeJsonEscapeIfTestResult for Result<bool, E> {
|
||||
fn to_result(self) -> serde_json::Result<bool> {
|
||||
self.map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + FnMut(char) -> R, R: SerdeJsonEscapeIfTestResult> SerdeJsonEscapeIfTest for T {
|
||||
fn char_needs_escape(&mut self, ch: char) -> serde_json::Result<bool> {
|
||||
self(ch).to_result()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait SerdeJsonEscapeIfFormatter: serde_json::ser::Formatter {
|
||||
fn write_unicode_escape<W>(&mut self, writer: &mut W, ch: char) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
for utf16 in ch.encode_utf16(&mut [0; 2]) {
|
||||
write!(writer, "\\u{utf16:04x}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl SerdeJsonEscapeIfFormatter for serde_json::ser::CompactFormatter {}
|
||||
impl SerdeJsonEscapeIfFormatter for serde_json::ser::PrettyFormatter<'_> {}
|
||||
|
||||
pub struct SerdeJsonEscapeIf<Test, Base = serde_json::ser::CompactFormatter> {
|
||||
pub base: Base,
|
||||
pub test: Test,
|
||||
}
|
||||
|
||||
impl<Test: SerdeJsonEscapeIfTest, Base: SerdeJsonEscapeIfFormatter> serde_json::ser::Formatter
|
||||
for SerdeJsonEscapeIf<Test, Base>
|
||||
{
|
||||
fn write_null<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_null(writer)
|
||||
}
|
||||
|
||||
fn write_bool<W>(&mut self, writer: &mut W, value: bool) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_bool(writer, value)
|
||||
}
|
||||
|
||||
fn write_i8<W>(&mut self, writer: &mut W, value: i8) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_i8(writer, value)
|
||||
}
|
||||
|
||||
fn write_i16<W>(&mut self, writer: &mut W, value: i16) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_i16(writer, value)
|
||||
}
|
||||
|
||||
fn write_i32<W>(&mut self, writer: &mut W, value: i32) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_i32(writer, value)
|
||||
}
|
||||
|
||||
fn write_i64<W>(&mut self, writer: &mut W, value: i64) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_i64(writer, value)
|
||||
}
|
||||
|
||||
fn write_i128<W>(&mut self, writer: &mut W, value: i128) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_i128(writer, value)
|
||||
}
|
||||
|
||||
fn write_u8<W>(&mut self, writer: &mut W, value: u8) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_u8(writer, value)
|
||||
}
|
||||
|
||||
fn write_u16<W>(&mut self, writer: &mut W, value: u16) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_u16(writer, value)
|
||||
}
|
||||
|
||||
fn write_u32<W>(&mut self, writer: &mut W, value: u32) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_u32(writer, value)
|
||||
}
|
||||
|
||||
fn write_u64<W>(&mut self, writer: &mut W, value: u64) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_u64(writer, value)
|
||||
}
|
||||
|
||||
fn write_u128<W>(&mut self, writer: &mut W, value: u128) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_u128(writer, value)
|
||||
}
|
||||
|
||||
fn write_f32<W>(&mut self, writer: &mut W, value: f32) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_f32(writer, value)
|
||||
}
|
||||
|
||||
fn write_f64<W>(&mut self, writer: &mut W, value: f64) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_f64(writer, value)
|
||||
}
|
||||
|
||||
fn write_number_str<W>(&mut self, writer: &mut W, value: &str) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_number_str(writer, value)
|
||||
}
|
||||
|
||||
fn begin_string<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.begin_string(writer)
|
||||
}
|
||||
|
||||
fn end_string<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.end_string(writer)
|
||||
}
|
||||
|
||||
fn write_string_fragment<W>(&mut self, writer: &mut W, mut fragment: &str) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
while let Some((next_escape_index, next_escape_char)) = fragment
|
||||
.char_indices()
|
||||
.find_map(|(index, ch)| match self.test.char_needs_escape(ch) {
|
||||
Ok(false) => None,
|
||||
Ok(true) => Some(Ok((index, ch))),
|
||||
Err(e) => Some(Err(e)),
|
||||
})
|
||||
.transpose()?
|
||||
{
|
||||
let (no_escapes, rest) = fragment.split_at(next_escape_index);
|
||||
fragment = &rest[next_escape_char.len_utf8()..];
|
||||
self.base.write_string_fragment(writer, no_escapes)?;
|
||||
self.base.write_unicode_escape(writer, next_escape_char)?;
|
||||
}
|
||||
self.base.write_string_fragment(writer, fragment)
|
||||
}
|
||||
|
||||
fn write_char_escape<W>(
|
||||
&mut self,
|
||||
writer: &mut W,
|
||||
char_escape: serde_json::ser::CharEscape,
|
||||
) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_char_escape(writer, char_escape)
|
||||
}
|
||||
|
||||
fn write_byte_array<W>(&mut self, writer: &mut W, value: &[u8]) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_byte_array(writer, value)
|
||||
}
|
||||
|
||||
fn begin_array<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.begin_array(writer)
|
||||
}
|
||||
|
||||
fn end_array<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.end_array(writer)
|
||||
}
|
||||
|
||||
fn begin_array_value<W>(&mut self, writer: &mut W, first: bool) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.begin_array_value(writer, first)
|
||||
}
|
||||
|
||||
fn end_array_value<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.end_array_value(writer)
|
||||
}
|
||||
|
||||
fn begin_object<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.begin_object(writer)
|
||||
}
|
||||
|
||||
fn end_object<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.end_object(writer)
|
||||
}
|
||||
|
||||
fn begin_object_key<W>(&mut self, writer: &mut W, first: bool) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.begin_object_key(writer, first)
|
||||
}
|
||||
|
||||
fn end_object_key<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.end_object_key(writer)
|
||||
}
|
||||
|
||||
fn begin_object_value<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.begin_object_value(writer)
|
||||
}
|
||||
|
||||
fn end_object_value<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.end_object_value(writer)
|
||||
}
|
||||
|
||||
fn write_raw_fragment<W>(&mut self, writer: &mut W, fragment: &str) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_raw_fragment(writer, fragment)
|
||||
}
|
||||
}
|
||||
|
||||
fn serialize_to_json_ascii_helper<F: SerdeJsonEscapeIfFormatter, S: serde::Serialize + ?Sized>(
|
||||
v: &S,
|
||||
base: F,
|
||||
) -> serde_json::Result<String> {
|
||||
let mut retval = Vec::new();
|
||||
v.serialize(&mut serde_json::ser::Serializer::with_formatter(
|
||||
&mut retval,
|
||||
SerdeJsonEscapeIf {
|
||||
base,
|
||||
test: |ch| ch < '\x20' || ch > '\x7F',
|
||||
},
|
||||
))?;
|
||||
String::from_utf8(retval).map_err(|_| serde::ser::Error::custom("invalid UTF-8"))
|
||||
}
|
||||
|
||||
pub fn serialize_to_json_ascii<T: serde::Serialize + ?Sized>(v: &T) -> serde_json::Result<String> {
|
||||
serialize_to_json_ascii_helper(v, serde_json::ser::CompactFormatter)
|
||||
}
|
||||
|
||||
pub fn serialize_to_json_ascii_pretty<T: serde::Serialize + ?Sized>(
|
||||
v: &T,
|
||||
) -> serde_json::Result<String> {
|
||||
serialize_to_json_ascii_helper(v, serde_json::ser::PrettyFormatter::new())
|
||||
}
|
||||
|
||||
pub fn serialize_to_json_ascii_pretty_with_indent<T: serde::Serialize + ?Sized>(
|
||||
v: &T,
|
||||
indent: &str,
|
||||
) -> serde_json::Result<String> {
|
||||
serialize_to_json_ascii_helper(
|
||||
v,
|
||||
serde_json::ser::PrettyFormatter::with_indent(indent.as_bytes()),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn os_str_strip_prefix<'a>(os_str: &'a OsStr, prefix: impl AsRef<str>) -> Option<&'a OsStr> {
|
||||
os_str
|
||||
.as_encoded_bytes()
|
||||
.strip_prefix(prefix.as_ref().as_bytes())
|
||||
.map(|bytes| {
|
||||
// Safety: we removed a UTF-8 prefix so bytes starts with a valid boundary
|
||||
unsafe { OsStr::from_encoded_bytes_unchecked(bytes) }
|
||||
})
|
||||
}
|
||||
|
||||
pub fn os_str_strip_suffix<'a>(os_str: &'a OsStr, suffix: impl AsRef<str>) -> Option<&'a OsStr> {
|
||||
os_str
|
||||
.as_encoded_bytes()
|
||||
.strip_suffix(suffix.as_ref().as_bytes())
|
||||
.map(|bytes| {
|
||||
// Safety: we removed a UTF-8 suffix so bytes ends with a valid boundary
|
||||
unsafe { OsStr::from_encoded_bytes_unchecked(bytes) }
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq)]
|
||||
pub(crate) struct InternedStrCompareAsStr(pub(crate) Interned<str>);
|
||||
|
||||
impl fmt::Debug for InternedStrCompareAsStr {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for InternedStrCompareAsStr {
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||
str::cmp(&self.0, &other.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for InternedStrCompareAsStr {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl std::borrow::Borrow<str> for InternedStrCompareAsStr {
|
||||
fn borrow(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
|
|
|||
839
crates/fayalite/src/util/prefix_sum.rs
Normal file
839
crates/fayalite/src/util/prefix_sum.rs
Normal file
|
|
@ -0,0 +1,839 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
// code derived from:
|
||||
// https://web.archive.org/web/20250303054010/https://git.libre-soc.org/?p=nmutil.git;a=blob;f=src/nmutil/prefix_sum.py;hb=effeb28e5848392adddcdad1f6e7a098f2a44c9c
|
||||
|
||||
use crate::intern::{Intern, Interned, Memoize};
|
||||
use std::{borrow::Cow, num::NonZeroUsize};
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
|
||||
pub struct PrefixSumOp {
|
||||
pub lhs_index: usize,
|
||||
pub rhs_and_dest_index: NonZeroUsize,
|
||||
pub row: u32,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
#[non_exhaustive]
|
||||
pub struct DiagramConfig {
|
||||
pub space: Cow<'static, str>,
|
||||
pub vertical_bar: Cow<'static, str>,
|
||||
pub plus: Cow<'static, str>,
|
||||
pub slant: Cow<'static, str>,
|
||||
pub connect: Cow<'static, str>,
|
||||
pub no_connect: Cow<'static, str>,
|
||||
pub padding: usize,
|
||||
}
|
||||
|
||||
impl DiagramConfig {
|
||||
pub const fn new() -> Self {
|
||||
Self {
|
||||
space: Cow::Borrowed(" "),
|
||||
vertical_bar: Cow::Borrowed("|"),
|
||||
plus: Cow::Borrowed("\u{2295}"), // ⊕
|
||||
slant: Cow::Borrowed(r"\"),
|
||||
connect: Cow::Borrowed("\u{25CF}"), // ●
|
||||
no_connect: Cow::Borrowed("X"),
|
||||
padding: 1,
|
||||
}
|
||||
}
|
||||
pub fn draw(self, ops: impl IntoIterator<Item = PrefixSumOp>, item_count: usize) -> String {
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
struct DiagramCell {
|
||||
slant: bool,
|
||||
plus: bool,
|
||||
tee: bool,
|
||||
}
|
||||
let mut ops_by_row: Vec<Vec<PrefixSumOp>> = Vec::new();
|
||||
let mut last_row = 0;
|
||||
ops.into_iter().for_each(|op| {
|
||||
assert!(
|
||||
op.lhs_index < op.rhs_and_dest_index.get(),
|
||||
"invalid PrefixSumOp! lhs_index must be less \
|
||||
than rhs_and_dest_index: {op:?}",
|
||||
);
|
||||
assert!(
|
||||
op.row >= last_row,
|
||||
"invalid PrefixSumOp! row must \
|
||||
not decrease (row last was: {last_row}): {op:?}",
|
||||
);
|
||||
let ops = if op.row > last_row || ops_by_row.is_empty() {
|
||||
ops_by_row.push(vec![]);
|
||||
ops_by_row.last_mut().expect("just pushed")
|
||||
} else {
|
||||
ops_by_row
|
||||
.last_mut()
|
||||
.expect("just checked if ops_by_row is empty")
|
||||
};
|
||||
if let Some(last) = ops.last() {
|
||||
assert!(
|
||||
op.rhs_and_dest_index < last.rhs_and_dest_index,
|
||||
"invalid PrefixSumOp! rhs_and_dest_index must strictly \
|
||||
decrease in a row:\nthis op: {op:?}\nlast op: {last:?}",
|
||||
);
|
||||
}
|
||||
ops.push(op);
|
||||
last_row = op.row;
|
||||
});
|
||||
let blank_row = || {
|
||||
vec![
|
||||
DiagramCell {
|
||||
slant: false,
|
||||
plus: false,
|
||||
tee: false
|
||||
};
|
||||
item_count
|
||||
]
|
||||
};
|
||||
let mut cells = vec![blank_row()];
|
||||
for ops in ops_by_row {
|
||||
let max_distance = ops
|
||||
.iter()
|
||||
.map(
|
||||
|&PrefixSumOp {
|
||||
lhs_index,
|
||||
rhs_and_dest_index,
|
||||
..
|
||||
}| { rhs_and_dest_index.get() - lhs_index },
|
||||
)
|
||||
.max()
|
||||
.expect("ops is known to be non-empty");
|
||||
cells.extend((0..max_distance).map(|_| blank_row()));
|
||||
for op in ops {
|
||||
let mut y = cells.len() - 1;
|
||||
assert!(
|
||||
op.rhs_and_dest_index.get() < item_count,
|
||||
"invalid PrefixSumOp! rhs_and_dest_index must be \
|
||||
less than item_count ({item_count}): {op:?}",
|
||||
);
|
||||
let mut x = op.rhs_and_dest_index.get();
|
||||
cells[y][x].plus = true;
|
||||
x -= 1;
|
||||
y -= 1;
|
||||
while op.lhs_index < x {
|
||||
cells[y][x].slant = true;
|
||||
x -= 1;
|
||||
y -= 1;
|
||||
}
|
||||
cells[y][x].tee = true;
|
||||
}
|
||||
}
|
||||
let mut retval = String::new();
|
||||
let mut row_text = vec![String::new(); 2 * self.padding + 1];
|
||||
for cells_row in cells {
|
||||
for cell in cells_row {
|
||||
// top padding
|
||||
for y in 0..self.padding {
|
||||
// top left padding
|
||||
for x in 0..self.padding {
|
||||
row_text[y] += if x == y && (cell.plus || cell.slant) {
|
||||
&self.slant
|
||||
} else {
|
||||
&self.space
|
||||
};
|
||||
}
|
||||
// top vertical bar
|
||||
row_text[y] += &self.vertical_bar;
|
||||
// top right padding
|
||||
for _ in 0..self.padding {
|
||||
row_text[y] += &self.space;
|
||||
}
|
||||
}
|
||||
// center left padding
|
||||
for _ in 0..self.padding {
|
||||
row_text[self.padding] += &self.space;
|
||||
}
|
||||
// center
|
||||
row_text[self.padding] += if cell.plus {
|
||||
&self.plus
|
||||
} else if cell.tee {
|
||||
&self.connect
|
||||
} else if cell.slant {
|
||||
&self.no_connect
|
||||
} else {
|
||||
&self.vertical_bar
|
||||
};
|
||||
// center right padding
|
||||
for _ in 0..self.padding {
|
||||
row_text[self.padding] += &self.space;
|
||||
}
|
||||
let bottom_padding_start = self.padding + 1;
|
||||
let bottom_padding_last = self.padding * 2;
|
||||
// bottom padding
|
||||
for y in bottom_padding_start..=bottom_padding_last {
|
||||
// bottom left padding
|
||||
for _ in 0..self.padding {
|
||||
row_text[y] += &self.space;
|
||||
}
|
||||
// bottom vertical bar
|
||||
row_text[y] += &self.vertical_bar;
|
||||
// bottom right padding
|
||||
for x in bottom_padding_start..=bottom_padding_last {
|
||||
row_text[y] += if x == y && (cell.tee || cell.slant) {
|
||||
&self.slant
|
||||
} else {
|
||||
&self.space
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
for line in &mut row_text {
|
||||
retval += line.trim_end();
|
||||
retval += "\n";
|
||||
line.clear();
|
||||
}
|
||||
}
|
||||
retval
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for DiagramConfig {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl PrefixSumOp {
|
||||
pub fn diagram(ops: impl IntoIterator<Item = Self>, item_count: usize) -> String {
|
||||
Self::diagram_with_config(ops, item_count, DiagramConfig::new())
|
||||
}
|
||||
pub fn diagram_with_config(
|
||||
ops: impl IntoIterator<Item = Self>,
|
||||
item_count: usize,
|
||||
config: DiagramConfig,
|
||||
) -> String {
|
||||
config.draw(ops, item_count)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub enum PrefixSumAlgorithm {
|
||||
/// Uses the algorithm from:
|
||||
/// <https://en.wikipedia.org/wiki/Prefix_sum#Algorithm_1:_Shorter_span,_more_parallel>
|
||||
LowLatency,
|
||||
/// Uses the algorithm from:
|
||||
/// <https://en.wikipedia.org/wiki/Prefix_sum#Algorithm_2:_Work-efficient>
|
||||
WorkEfficient,
|
||||
}
|
||||
|
||||
impl PrefixSumAlgorithm {
|
||||
fn ops_impl(self, item_count: usize) -> Vec<PrefixSumOp> {
|
||||
let mut retval = Vec::new();
|
||||
let mut distance = 1;
|
||||
let mut row = 0;
|
||||
while distance < item_count {
|
||||
let double_distance = distance
|
||||
.checked_mul(2)
|
||||
.expect("prefix-sum item_count is too big");
|
||||
let (start, step) = match self {
|
||||
Self::LowLatency => (distance, 1),
|
||||
Self::WorkEfficient => (double_distance - 1, double_distance),
|
||||
};
|
||||
for rhs_and_dest_index in (start..item_count).step_by(step).rev() {
|
||||
let Some(rhs_and_dest_index) = NonZeroUsize::new(rhs_and_dest_index) else {
|
||||
unreachable!();
|
||||
};
|
||||
let lhs_index = rhs_and_dest_index.get() - distance;
|
||||
retval.push(PrefixSumOp {
|
||||
lhs_index,
|
||||
rhs_and_dest_index,
|
||||
row,
|
||||
});
|
||||
}
|
||||
distance = double_distance;
|
||||
row += 1;
|
||||
}
|
||||
match self {
|
||||
Self::LowLatency => {}
|
||||
Self::WorkEfficient => {
|
||||
distance /= 2;
|
||||
while distance >= 1 {
|
||||
let start = distance
|
||||
.checked_mul(3)
|
||||
.expect("prefix-sum item_count is too big")
|
||||
- 1;
|
||||
for rhs_and_dest_index in (start..item_count).step_by(distance * 2).rev() {
|
||||
let Some(rhs_and_dest_index) = NonZeroUsize::new(rhs_and_dest_index) else {
|
||||
unreachable!();
|
||||
};
|
||||
let lhs_index = rhs_and_dest_index.get() - distance;
|
||||
retval.push(PrefixSumOp {
|
||||
lhs_index,
|
||||
rhs_and_dest_index,
|
||||
row,
|
||||
});
|
||||
}
|
||||
row += 1;
|
||||
distance /= 2;
|
||||
}
|
||||
}
|
||||
}
|
||||
retval
|
||||
}
|
||||
pub fn ops(self, item_count: usize) -> Interned<[PrefixSumOp]> {
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
struct MyMemoize(PrefixSumAlgorithm);
|
||||
impl Memoize for MyMemoize {
|
||||
type Input = usize;
|
||||
type InputOwned = usize;
|
||||
type Output = Interned<[PrefixSumOp]>;
|
||||
|
||||
fn inner(self, item_count: &Self::Input) -> Self::Output {
|
||||
Intern::intern_owned(self.0.ops_impl(*item_count))
|
||||
}
|
||||
}
|
||||
MyMemoize(self).get_owned(item_count)
|
||||
}
|
||||
pub fn run<T>(self, items: impl IntoIterator<Item = T>, f: impl FnMut(&T, &T) -> T) -> Vec<T> {
|
||||
let mut items = Vec::from_iter(items);
|
||||
self.run_on_slice(&mut items, f);
|
||||
items
|
||||
}
|
||||
pub fn run_on_slice<T>(self, items: &mut [T], mut f: impl FnMut(&T, &T) -> T) -> &mut [T] {
|
||||
self.ops(items.len()).into_iter().for_each(
|
||||
|PrefixSumOp {
|
||||
lhs_index,
|
||||
rhs_and_dest_index,
|
||||
row: _,
|
||||
}| {
|
||||
items[rhs_and_dest_index.get()] =
|
||||
f(&items[lhs_index], &items[rhs_and_dest_index.get()]);
|
||||
},
|
||||
);
|
||||
items
|
||||
}
|
||||
pub fn filtered_ops(
|
||||
self,
|
||||
item_live_out_flags: impl IntoIterator<Item = bool>,
|
||||
) -> Vec<PrefixSumOp> {
|
||||
let mut item_live_out_flags = Vec::from_iter(item_live_out_flags);
|
||||
let prefix_sum_ops = self.ops(item_live_out_flags.len());
|
||||
let mut ops_live_flags = vec![false; prefix_sum_ops.len()];
|
||||
for (
|
||||
op_index,
|
||||
&PrefixSumOp {
|
||||
lhs_index,
|
||||
rhs_and_dest_index,
|
||||
row: _,
|
||||
},
|
||||
) in prefix_sum_ops.iter().enumerate().rev()
|
||||
{
|
||||
let live = item_live_out_flags[rhs_and_dest_index.get()];
|
||||
item_live_out_flags[lhs_index] |= live;
|
||||
ops_live_flags[op_index] = live;
|
||||
}
|
||||
prefix_sum_ops
|
||||
.into_iter()
|
||||
.zip(ops_live_flags)
|
||||
.filter_map(|(op, live)| live.then_some(op))
|
||||
.collect()
|
||||
}
|
||||
pub fn reduce_ops(self, item_count: usize) -> Interned<[PrefixSumOp]> {
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
struct MyMemoize(PrefixSumAlgorithm);
|
||||
impl Memoize for MyMemoize {
|
||||
type Input = usize;
|
||||
type InputOwned = usize;
|
||||
type Output = Interned<[PrefixSumOp]>;
|
||||
|
||||
fn inner(self, item_count: &Self::Input) -> Self::Output {
|
||||
let mut item_live_out_flags = vec![false; *item_count];
|
||||
let Some(last_item_live_out_flag) = item_live_out_flags.last_mut() else {
|
||||
return Interned::default();
|
||||
};
|
||||
*last_item_live_out_flag = true;
|
||||
Intern::intern_owned(self.0.filtered_ops(item_live_out_flags))
|
||||
}
|
||||
}
|
||||
MyMemoize(self).get_owned(item_count)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reduce_ops(item_count: usize) -> Interned<[PrefixSumOp]> {
|
||||
PrefixSumAlgorithm::LowLatency.reduce_ops(item_count)
|
||||
}
|
||||
|
||||
pub fn reduce<T>(items: impl IntoIterator<Item = T>, mut f: impl FnMut(T, T) -> T) -> Option<T> {
|
||||
let mut items: Vec<_> = items.into_iter().map(Some).collect();
|
||||
for op in reduce_ops(items.len()) {
|
||||
let (Some(lhs), Some(rhs)) = (
|
||||
items[op.lhs_index].take(),
|
||||
items[op.rhs_and_dest_index.get()].take(),
|
||||
) else {
|
||||
unreachable!();
|
||||
};
|
||||
items[op.rhs_and_dest_index.get()] = Some(f(lhs, rhs));
|
||||
}
|
||||
items.last_mut().and_then(Option::take)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn input_strings() -> [String; 9] {
|
||||
std::array::from_fn(|i| String::from_utf8(vec![b'a' + i as u8]).unwrap())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_prefix_sum_strings() {
|
||||
let input = input_strings();
|
||||
let expected: Vec<String> = input
|
||||
.iter()
|
||||
.scan(String::new(), |l, r| {
|
||||
*l += r;
|
||||
Some(l.clone())
|
||||
})
|
||||
.collect();
|
||||
println!("expected: {expected:?}");
|
||||
assert_eq!(
|
||||
*PrefixSumAlgorithm::WorkEfficient
|
||||
.run_on_slice(&mut input.clone(), |l, r| l.to_string() + r),
|
||||
*expected
|
||||
);
|
||||
assert_eq!(
|
||||
*PrefixSumAlgorithm::LowLatency
|
||||
.run_on_slice(&mut input.clone(), |l, r| l.to_string() + r),
|
||||
*expected
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_reduce_string() {
|
||||
let input = input_strings();
|
||||
let expected = input.clone().into_iter().reduce(|l, r| l + &r);
|
||||
assert_eq!(reduce(input, |l, r| l + &r), expected);
|
||||
}
|
||||
|
||||
fn op(lhs_index: usize, rhs_and_dest_index: usize, row: u32) -> PrefixSumOp {
|
||||
PrefixSumOp {
|
||||
lhs_index,
|
||||
rhs_and_dest_index: NonZeroUsize::new(rhs_and_dest_index).expect("should be non-zero"),
|
||||
row,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_reduce_ops_9() {
|
||||
let expected = vec![
|
||||
op(7, 8, 0),
|
||||
op(5, 6, 0),
|
||||
op(3, 4, 0),
|
||||
op(1, 2, 0),
|
||||
op(6, 8, 1),
|
||||
op(2, 4, 1),
|
||||
op(4, 8, 2),
|
||||
op(0, 8, 3),
|
||||
];
|
||||
println!("expected: {expected:#?}");
|
||||
let ops = reduce_ops(9);
|
||||
println!("ops: {ops:#?}");
|
||||
assert_eq!(*ops, *expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_reduce_ops_8() {
|
||||
let expected = vec![
|
||||
op(6, 7, 0),
|
||||
op(4, 5, 0),
|
||||
op(2, 3, 0),
|
||||
op(0, 1, 0),
|
||||
op(5, 7, 1),
|
||||
op(1, 3, 1),
|
||||
op(3, 7, 2),
|
||||
];
|
||||
println!("expected: {expected:#?}");
|
||||
let ops = reduce_ops(8);
|
||||
println!("ops: {ops:#?}");
|
||||
assert_eq!(*ops, *expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_count_ones() {
|
||||
for width in 0..=10u32 {
|
||||
for v in 0..1u32 << width {
|
||||
let expected = v.count_ones();
|
||||
assert_eq!(
|
||||
reduce((0..width).map(|i| (v >> i) & 1), |l, r| l + r).unwrap_or(0),
|
||||
expected,
|
||||
"v={v:#X}"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn test_diagram(ops: impl IntoIterator<Item = PrefixSumOp>, item_count: usize, expected: &str) {
|
||||
let text = PrefixSumOp::diagram_with_config(
|
||||
ops,
|
||||
item_count,
|
||||
DiagramConfig {
|
||||
plus: Cow::Borrowed("@"),
|
||||
..Default::default()
|
||||
},
|
||||
);
|
||||
println!("text:\n{text}\n");
|
||||
assert_eq!(text, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_work_efficient_diagram_16() {
|
||||
let item_count = 16;
|
||||
test_diagram(
|
||||
PrefixSumAlgorithm::WorkEfficient.ops(item_count),
|
||||
item_count,
|
||||
&r"
|
||||
| | | | | | | | | | | | | | | |
|
||||
● | ● | ● | ● | ● | ● | ● | ● |
|
||||
|\ | |\ | |\ | |\ | |\ | |\ | |\ | |\ |
|
||||
| \| | \| | \| | \| | \| | \| | \| | \|
|
||||
| @ | @ | @ | @ | @ | @ | @ | @
|
||||
| |\ | | | |\ | | | |\ | | | |\ | |
|
||||
| | \| | | | \| | | | \| | | | \| |
|
||||
| | X | | | X | | | X | | | X |
|
||||
| | |\ | | | |\ | | | |\ | | | |\ |
|
||||
| | | \| | | | \| | | | \| | | | \|
|
||||
| | | @ | | | @ | | | @ | | | @
|
||||
| | | |\ | | | | | | | |\ | | | |
|
||||
| | | | \| | | | | | | | \| | | |
|
||||
| | | | X | | | | | | | X | | |
|
||||
| | | | |\ | | | | | | | |\ | | |
|
||||
| | | | | \| | | | | | | | \| | |
|
||||
| | | | | X | | | | | | | X | |
|
||||
| | | | | |\ | | | | | | | |\ | |
|
||||
| | | | | | \| | | | | | | | \| |
|
||||
| | | | | | X | | | | | | | X |
|
||||
| | | | | | |\ | | | | | | | |\ |
|
||||
| | | | | | | \| | | | | | | | \|
|
||||
| | | | | | | @ | | | | | | | @
|
||||
| | | | | | | |\ | | | | | | | |
|
||||
| | | | | | | | \| | | | | | | |
|
||||
| | | | | | | | X | | | | | | |
|
||||
| | | | | | | | |\ | | | | | | |
|
||||
| | | | | | | | | \| | | | | | |
|
||||
| | | | | | | | | X | | | | | |
|
||||
| | | | | | | | | |\ | | | | | |
|
||||
| | | | | | | | | | \| | | | | |
|
||||
| | | | | | | | | | X | | | | |
|
||||
| | | | | | | | | | |\ | | | | |
|
||||
| | | | | | | | | | | \| | | | |
|
||||
| | | | | | | | | | | X | | | |
|
||||
| | | | | | | | | | | |\ | | | |
|
||||
| | | | | | | | | | | | \| | | |
|
||||
| | | | | | | | | | | | X | | |
|
||||
| | | | | | | | | | | | |\ | | |
|
||||
| | | | | | | | | | | | | \| | |
|
||||
| | | | | | | | | | | | | X | |
|
||||
| | | | | | | | | | | | | |\ | |
|
||||
| | | | | | | | | | | | | | \| |
|
||||
| | | | | | | | | | | | | | X |
|
||||
| | | | | | | | | | | | | | |\ |
|
||||
| | | | | | | | | | | | | | | \|
|
||||
| | | | | | | ● | | | | | | | @
|
||||
| | | | | | | |\ | | | | | | | |
|
||||
| | | | | | | | \| | | | | | | |
|
||||
| | | | | | | | X | | | | | | |
|
||||
| | | | | | | | |\ | | | | | | |
|
||||
| | | | | | | | | \| | | | | | |
|
||||
| | | | | | | | | X | | | | | |
|
||||
| | | | | | | | | |\ | | | | | |
|
||||
| | | | | | | | | | \| | | | | |
|
||||
| | | | | | | | | | X | | | | |
|
||||
| | | | | | | | | | |\ | | | | |
|
||||
| | | | | | | | | | | \| | | | |
|
||||
| | | ● | | | ● | | | @ | | | |
|
||||
| | | |\ | | | |\ | | | |\ | | | |
|
||||
| | | | \| | | | \| | | | \| | | |
|
||||
| | | | X | | | X | | | X | | |
|
||||
| | | | |\ | | | |\ | | | |\ | | |
|
||||
| | | | | \| | | | \| | | | \| | |
|
||||
| ● | ● | @ | ● | @ | ● | @ | |
|
||||
| |\ | |\ | |\ | |\ | |\ | |\ | |\ | |
|
||||
| | \| | \| | \| | \| | \| | \| | \| |
|
||||
| | @ | @ | @ | @ | @ | @ | @ |
|
||||
| | | | | | | | | | | | | | | |
|
||||
"[1..], // trim newline at start
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_low_latency_diagram_16() {
|
||||
let item_count = 16;
|
||||
test_diagram(
|
||||
PrefixSumAlgorithm::LowLatency.ops(item_count),
|
||||
item_count,
|
||||
&r"
|
||||
| | | | | | | | | | | | | | | |
|
||||
● ● ● ● ● ● ● ● ● ● ● ● ● ● ● |
|
||||
|\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |
|
||||
| \| \| \| \| \| \| \| \| \| \| \| \| \| \| \|
|
||||
● @ @ @ @ @ @ @ @ @ @ @ @ @ @ @
|
||||
|\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ | |
|
||||
| \| \| \| \| \| \| \| \| \| \| \| \| \| \| |
|
||||
| X X X X X X X X X X X X X X |
|
||||
| |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |
|
||||
| | \| \| \| \| \| \| \| \| \| \| \| \| \| \|
|
||||
● ● @ @ @ @ @ @ @ @ @ @ @ @ @ @
|
||||
|\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ | | | |
|
||||
| \| \| \| \| \| \| \| \| \| \| \| \| | | |
|
||||
| X X X X X X X X X X X X | | |
|
||||
| |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ | | |
|
||||
| | \| \| \| \| \| \| \| \| \| \| \| \| | |
|
||||
| | X X X X X X X X X X X X | |
|
||||
| | |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ | |
|
||||
| | | \| \| \| \| \| \| \| \| \| \| \| \| |
|
||||
| | | X X X X X X X X X X X X |
|
||||
| | | |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |
|
||||
| | | | \| \| \| \| \| \| \| \| \| \| \| \|
|
||||
● ● ● ● @ @ @ @ @ @ @ @ @ @ @ @
|
||||
|\ |\ |\ |\ |\ |\ |\ |\ | | | | | | | |
|
||||
| \| \| \| \| \| \| \| \| | | | | | | |
|
||||
| X X X X X X X X | | | | | | |
|
||||
| |\ |\ |\ |\ |\ |\ |\ |\ | | | | | | |
|
||||
| | \| \| \| \| \| \| \| \| | | | | | |
|
||||
| | X X X X X X X X | | | | | |
|
||||
| | |\ |\ |\ |\ |\ |\ |\ |\ | | | | | |
|
||||
| | | \| \| \| \| \| \| \| \| | | | | |
|
||||
| | | X X X X X X X X | | | | |
|
||||
| | | |\ |\ |\ |\ |\ |\ |\ |\ | | | | |
|
||||
| | | | \| \| \| \| \| \| \| \| | | | |
|
||||
| | | | X X X X X X X X | | | |
|
||||
| | | | |\ |\ |\ |\ |\ |\ |\ |\ | | | |
|
||||
| | | | | \| \| \| \| \| \| \| \| | | |
|
||||
| | | | | X X X X X X X X | | |
|
||||
| | | | | |\ |\ |\ |\ |\ |\ |\ |\ | | |
|
||||
| | | | | | \| \| \| \| \| \| \| \| | |
|
||||
| | | | | | X X X X X X X X | |
|
||||
| | | | | | |\ |\ |\ |\ |\ |\ |\ |\ | |
|
||||
| | | | | | | \| \| \| \| \| \| \| \| |
|
||||
| | | | | | | X X X X X X X X |
|
||||
| | | | | | | |\ |\ |\ |\ |\ |\ |\ |\ |
|
||||
| | | | | | | | \| \| \| \| \| \| \| \|
|
||||
| | | | | | | | @ @ @ @ @ @ @ @
|
||||
| | | | | | | | | | | | | | | |
|
||||
"[1..], // trim newline at start
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_work_efficient_diagram_9() {
|
||||
let item_count = 9;
|
||||
test_diagram(
|
||||
PrefixSumAlgorithm::WorkEfficient.ops(item_count),
|
||||
item_count,
|
||||
&r"
|
||||
| | | | | | | | |
|
||||
● | ● | ● | ● | |
|
||||
|\ | |\ | |\ | |\ | |
|
||||
| \| | \| | \| | \| |
|
||||
| @ | @ | @ | @ |
|
||||
| |\ | | | |\ | | |
|
||||
| | \| | | | \| | |
|
||||
| | X | | | X | |
|
||||
| | |\ | | | |\ | |
|
||||
| | | \| | | | \| |
|
||||
| | | @ | | | @ |
|
||||
| | | |\ | | | | |
|
||||
| | | | \| | | | |
|
||||
| | | | X | | | |
|
||||
| | | | |\ | | | |
|
||||
| | | | | \| | | |
|
||||
| | | | | X | | |
|
||||
| | | | | |\ | | |
|
||||
| | | | | | \| | |
|
||||
| | | | | | X | |
|
||||
| | | | | | |\ | |
|
||||
| | | | | | | \| |
|
||||
| | | ● | | | @ |
|
||||
| | | |\ | | | | |
|
||||
| | | | \| | | | |
|
||||
| | | | X | | | |
|
||||
| | | | |\ | | | |
|
||||
| | | | | \| | | |
|
||||
| ● | ● | @ | ● |
|
||||
| |\ | |\ | |\ | |\ |
|
||||
| | \| | \| | \| | \|
|
||||
| | @ | @ | @ | @
|
||||
| | | | | | | | |
|
||||
"[1..], // trim newline at start
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_low_latency_diagram_9() {
|
||||
let item_count = 9;
|
||||
test_diagram(
|
||||
PrefixSumAlgorithm::LowLatency.ops(item_count),
|
||||
item_count,
|
||||
&r"
|
||||
| | | | | | | | |
|
||||
● ● ● ● ● ● ● ● |
|
||||
|\ |\ |\ |\ |\ |\ |\ |\ |
|
||||
| \| \| \| \| \| \| \| \|
|
||||
● @ @ @ @ @ @ @ @
|
||||
|\ |\ |\ |\ |\ |\ |\ | |
|
||||
| \| \| \| \| \| \| \| |
|
||||
| X X X X X X X |
|
||||
| |\ |\ |\ |\ |\ |\ |\ |
|
||||
| | \| \| \| \| \| \| \|
|
||||
● ● @ @ @ @ @ @ @
|
||||
|\ |\ |\ |\ |\ | | | |
|
||||
| \| \| \| \| \| | | |
|
||||
| X X X X X | | |
|
||||
| |\ |\ |\ |\ |\ | | |
|
||||
| | \| \| \| \| \| | |
|
||||
| | X X X X X | |
|
||||
| | |\ |\ |\ |\ |\ | |
|
||||
| | | \| \| \| \| \| |
|
||||
| | | X X X X X |
|
||||
| | | |\ |\ |\ |\ |\ |
|
||||
| | | | \| \| \| \| \|
|
||||
● | | | @ @ @ @ @
|
||||
|\ | | | | | | | |
|
||||
| \| | | | | | | |
|
||||
| X | | | | | | |
|
||||
| |\ | | | | | | |
|
||||
| | \| | | | | | |
|
||||
| | X | | | | | |
|
||||
| | |\ | | | | | |
|
||||
| | | \| | | | | |
|
||||
| | | X | | | | |
|
||||
| | | |\ | | | | |
|
||||
| | | | \| | | | |
|
||||
| | | | X | | | |
|
||||
| | | | |\ | | | |
|
||||
| | | | | \| | | |
|
||||
| | | | | X | | |
|
||||
| | | | | |\ | | |
|
||||
| | | | | | \| | |
|
||||
| | | | | | X | |
|
||||
| | | | | | |\ | |
|
||||
| | | | | | | \| |
|
||||
| | | | | | | X |
|
||||
| | | | | | | |\ |
|
||||
| | | | | | | | \|
|
||||
| | | | | | | | @
|
||||
| | | | | | | | |
|
||||
"[1..], // trim newline at start
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_reduce_diagram_16() {
|
||||
let item_count = 16;
|
||||
test_diagram(
|
||||
reduce_ops(item_count),
|
||||
item_count,
|
||||
&r"
|
||||
| | | | | | | | | | | | | | | |
|
||||
● | ● | ● | ● | ● | ● | ● | ● |
|
||||
|\ | |\ | |\ | |\ | |\ | |\ | |\ | |\ |
|
||||
| \| | \| | \| | \| | \| | \| | \| | \|
|
||||
| @ | @ | @ | @ | @ | @ | @ | @
|
||||
| |\ | | | |\ | | | |\ | | | |\ | |
|
||||
| | \| | | | \| | | | \| | | | \| |
|
||||
| | X | | | X | | | X | | | X |
|
||||
| | |\ | | | |\ | | | |\ | | | |\ |
|
||||
| | | \| | | | \| | | | \| | | | \|
|
||||
| | | @ | | | @ | | | @ | | | @
|
||||
| | | |\ | | | | | | | |\ | | | |
|
||||
| | | | \| | | | | | | | \| | | |
|
||||
| | | | X | | | | | | | X | | |
|
||||
| | | | |\ | | | | | | | |\ | | |
|
||||
| | | | | \| | | | | | | | \| | |
|
||||
| | | | | X | | | | | | | X | |
|
||||
| | | | | |\ | | | | | | | |\ | |
|
||||
| | | | | | \| | | | | | | | \| |
|
||||
| | | | | | X | | | | | | | X |
|
||||
| | | | | | |\ | | | | | | | |\ |
|
||||
| | | | | | | \| | | | | | | | \|
|
||||
| | | | | | | @ | | | | | | | @
|
||||
| | | | | | | |\ | | | | | | | |
|
||||
| | | | | | | | \| | | | | | | |
|
||||
| | | | | | | | X | | | | | | |
|
||||
| | | | | | | | |\ | | | | | | |
|
||||
| | | | | | | | | \| | | | | | |
|
||||
| | | | | | | | | X | | | | | |
|
||||
| | | | | | | | | |\ | | | | | |
|
||||
| | | | | | | | | | \| | | | | |
|
||||
| | | | | | | | | | X | | | | |
|
||||
| | | | | | | | | | |\ | | | | |
|
||||
| | | | | | | | | | | \| | | | |
|
||||
| | | | | | | | | | | X | | | |
|
||||
| | | | | | | | | | | |\ | | | |
|
||||
| | | | | | | | | | | | \| | | |
|
||||
| | | | | | | | | | | | X | | |
|
||||
| | | | | | | | | | | | |\ | | |
|
||||
| | | | | | | | | | | | | \| | |
|
||||
| | | | | | | | | | | | | X | |
|
||||
| | | | | | | | | | | | | |\ | |
|
||||
| | | | | | | | | | | | | | \| |
|
||||
| | | | | | | | | | | | | | X |
|
||||
| | | | | | | | | | | | | | |\ |
|
||||
| | | | | | | | | | | | | | | \|
|
||||
| | | | | | | | | | | | | | | @
|
||||
| | | | | | | | | | | | | | | |
|
||||
"[1..], // trim newline at start
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_reduce_diagram_9() {
|
||||
let item_count = 9;
|
||||
test_diagram(
|
||||
reduce_ops(item_count),
|
||||
item_count,
|
||||
&r"
|
||||
| | | | | | | | |
|
||||
| ● | ● | ● | ● |
|
||||
| |\ | |\ | |\ | |\ |
|
||||
| | \| | \| | \| | \|
|
||||
| | @ | @ | @ | @
|
||||
| | |\ | | | |\ | |
|
||||
| | | \| | | | \| |
|
||||
| | | X | | | X |
|
||||
| | | |\ | | | |\ |
|
||||
| | | | \| | | | \|
|
||||
| | | | @ | | | @
|
||||
| | | | |\ | | | |
|
||||
| | | | | \| | | |
|
||||
| | | | | X | | |
|
||||
| | | | | |\ | | |
|
||||
| | | | | | \| | |
|
||||
| | | | | | X | |
|
||||
| | | | | | |\ | |
|
||||
| | | | | | | \| |
|
||||
| | | | | | | X |
|
||||
| | | | | | | |\ |
|
||||
| | | | | | | | \|
|
||||
● | | | | | | | @
|
||||
|\ | | | | | | | |
|
||||
| \| | | | | | | |
|
||||
| X | | | | | | |
|
||||
| |\ | | | | | | |
|
||||
| | \| | | | | | |
|
||||
| | X | | | | | |
|
||||
| | |\ | | | | | |
|
||||
| | | \| | | | | |
|
||||
| | | X | | | | |
|
||||
| | | |\ | | | | |
|
||||
| | | | \| | | | |
|
||||
| | | | X | | | |
|
||||
| | | | |\ | | | |
|
||||
| | | | | \| | | |
|
||||
| | | | | X | | |
|
||||
| | | | | |\ | | |
|
||||
| | | | | | \| | |
|
||||
| | | | | | X | |
|
||||
| | | | | | |\ | |
|
||||
| | | | | | | \| |
|
||||
| | | | | | | X |
|
||||
| | | | | | | |\ |
|
||||
| | | | | | | | \|
|
||||
| | | | | | | | @
|
||||
| | | | | | | | |
|
||||
"[1..], // trim newline at start
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -49,6 +49,18 @@ impl<T: Type> ReadyValid<T> {
|
|||
}
|
||||
}
|
||||
|
||||
/// This debug port is only meant to assist the formal proof of the queue.
|
||||
#[cfg(test)]
|
||||
#[doc(hidden)]
|
||||
#[hdl]
|
||||
pub struct QueueDebugPort<Element, Index> {
|
||||
#[hdl(flip)]
|
||||
index_to_check: Index,
|
||||
stored: Element,
|
||||
inp_index: Index,
|
||||
out_index: Index,
|
||||
}
|
||||
|
||||
#[hdl_module]
|
||||
pub fn queue<T: Type>(
|
||||
ty: T,
|
||||
|
|
@ -178,15 +190,29 @@ pub fn queue<T: Type>(
|
|||
}
|
||||
}
|
||||
}
|
||||
// These debug ports expose some internal state during the Induction phase
|
||||
// of Formal Verification. They are not present in normal use.
|
||||
#[cfg(test)]
|
||||
{
|
||||
#[hdl]
|
||||
let dbg: QueueDebugPort<T, UInt> = m.output(QueueDebugPort[ty][index_ty]);
|
||||
// read the memory word currently stored at some fixed index
|
||||
let debug_port = mem.new_read_port();
|
||||
connect(debug_port.addr, dbg.index_to_check);
|
||||
connect(debug_port.en, true);
|
||||
connect(debug_port.clk, cd.clk);
|
||||
connect(dbg.stored, debug_port.data);
|
||||
// also expose the current read and write indices
|
||||
connect(dbg.inp_index, inp_index_reg);
|
||||
connect(dbg.out_index, out_index_reg);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{
|
||||
cli::FormalMode, firrtl::ExportOptions,
|
||||
module::transform::simplify_enums::SimplifyEnumsKind, testing::assert_formal,
|
||||
ty::StaticType,
|
||||
firrtl::ExportOptions, module::transform::simplify_enums::SimplifyEnumsKind, ty::StaticType,
|
||||
};
|
||||
use std::num::NonZero;
|
||||
|
||||
|
|
@ -196,13 +222,23 @@ mod tests {
|
|||
format_args!("test_queue_{capacity}_{inp_ready_is_comb}_{out_valid_is_comb}"),
|
||||
queue_test(capacity, inp_ready_is_comb, out_valid_is_comb),
|
||||
FormalMode::Prove,
|
||||
14,
|
||||
2,
|
||||
None,
|
||||
ExportOptions {
|
||||
simplify_enums: Some(SimplifyEnumsKind::ReplaceWithBundleOfUInts),
|
||||
..ExportOptions::default()
|
||||
},
|
||||
);
|
||||
/// Formal verification of the FIFO queue
|
||||
///
|
||||
/// The strategy derives from the observation that, if we filter its
|
||||
/// input and output streams to consider just one in every N reads and
|
||||
/// writes (where N is the FIFO capacity), then the FIFO effectively
|
||||
/// behaves as a one-entry FIFO.
|
||||
///
|
||||
/// In particular, any counterexample of the full FIFO behaving badly
|
||||
/// will also be caught by one of the filtered versions (one which
|
||||
/// happens to be in phase with the offending input or output).
|
||||
#[hdl_module]
|
||||
fn queue_test(capacity: NonZeroUsize, inp_ready_is_comb: bool, out_valid_is_comb: bool) {
|
||||
#[hdl]
|
||||
|
|
@ -217,6 +253,8 @@ mod tests {
|
|||
rst: formal_reset().to_reset(),
|
||||
},
|
||||
);
|
||||
|
||||
// random input data
|
||||
#[hdl]
|
||||
let inp_data: HdlOption<UInt<8>> = wire();
|
||||
#[hdl]
|
||||
|
|
@ -225,16 +263,26 @@ mod tests {
|
|||
} else {
|
||||
connect(inp_data, HdlNone());
|
||||
}
|
||||
|
||||
// assert output ready at random
|
||||
#[hdl]
|
||||
let out_ready: Bool = wire();
|
||||
connect(out_ready, any_seq(Bool));
|
||||
let index_ty: UInt<32> = UInt::TYPE;
|
||||
|
||||
// The current number of elements in the FIFO ranges from zero to
|
||||
// maximum capacity, inclusive.
|
||||
let count_ty = UInt::range_inclusive(0..=capacity.get());
|
||||
// type for counters that wrap around at the FIFO capacity
|
||||
let index_ty = UInt::range(0..capacity.get());
|
||||
|
||||
// among all entries of the FIFO internal circular memory, choose
|
||||
// one at random to check
|
||||
#[hdl]
|
||||
let index_to_check = wire();
|
||||
let index_to_check = wire(index_ty);
|
||||
connect(index_to_check, any_const(index_ty));
|
||||
let index_max = !index_ty.zero();
|
||||
// we saturate at index_max, so only check indexes where we properly maintain position
|
||||
hdl_assume(clk, index_to_check.cmp_ne(index_max), "");
|
||||
hdl_assume(clk, index_to_check.cmp_lt(capacity.get()), "");
|
||||
|
||||
// instantiate and connect the queue
|
||||
#[hdl]
|
||||
let dut = instance(queue(
|
||||
UInt[ConstUsize::<8>],
|
||||
|
|
@ -245,109 +293,172 @@ mod tests {
|
|||
connect(dut.cd, cd);
|
||||
connect(dut.inp.data, inp_data);
|
||||
connect(dut.out.ready, out_ready);
|
||||
hdl_assume(
|
||||
clk,
|
||||
index_to_check.cmp_ne(!Expr::ty(index_to_check).zero()),
|
||||
"",
|
||||
);
|
||||
|
||||
// Keep an independent count of words in the FIFO. Ensure that
|
||||
// it's always correct, and never overflows.
|
||||
#[hdl]
|
||||
let expected_count_reg = reg_builder().clock_domain(cd).reset(0u32);
|
||||
#[hdl]
|
||||
let next_expected_count = wire();
|
||||
connect(next_expected_count, expected_count_reg);
|
||||
connect(expected_count_reg, next_expected_count);
|
||||
let expected_count_reg = reg_builder().clock_domain(cd).reset(count_ty.zero());
|
||||
#[hdl]
|
||||
if ReadyValid::firing(dut.inp) & !ReadyValid::firing(dut.out) {
|
||||
connect_any(next_expected_count, expected_count_reg + 1u8);
|
||||
hdl_assert(clk, expected_count_reg.cmp_ne(capacity.get()), "");
|
||||
connect_any(expected_count_reg, expected_count_reg + 1u8);
|
||||
} else if !ReadyValid::firing(dut.inp) & ReadyValid::firing(dut.out) {
|
||||
connect_any(next_expected_count, expected_count_reg - 1u8);
|
||||
hdl_assert(clk, expected_count_reg.cmp_ne(count_ty.zero()), "");
|
||||
connect_any(expected_count_reg, expected_count_reg - 1u8);
|
||||
}
|
||||
hdl_assert(cd.clk, expected_count_reg.cmp_eq(dut.count), "");
|
||||
|
||||
#[hdl]
|
||||
let prev_out_ready_reg = reg_builder().clock_domain(cd).reset(!0_hdl_u3);
|
||||
connect_any(
|
||||
prev_out_ready_reg,
|
||||
(prev_out_ready_reg << 1) | out_ready.cast_to(UInt[1]),
|
||||
);
|
||||
#[hdl]
|
||||
let prev_inp_valid_reg = reg_builder().clock_domain(cd).reset(!0_hdl_u3);
|
||||
connect_any(
|
||||
prev_inp_valid_reg,
|
||||
(prev_inp_valid_reg << 1) | HdlOption::is_some(inp_data).cast_to(UInt[1]),
|
||||
);
|
||||
hdl_assume(
|
||||
clk,
|
||||
(prev_out_ready_reg & prev_inp_valid_reg).cmp_ne(0u8),
|
||||
"",
|
||||
);
|
||||
hdl_assert(clk, expected_count_reg.cmp_eq(dut.count), "");
|
||||
|
||||
// keep an independent write index into the FIFO's circular buffer
|
||||
#[hdl]
|
||||
let inp_index_reg = reg_builder().clock_domain(cd).reset(index_ty.zero());
|
||||
#[hdl]
|
||||
let stored_inp_data_reg = reg_builder().clock_domain(cd).reset(0u8);
|
||||
|
||||
#[hdl]
|
||||
if let HdlSome(data) = ReadyValid::firing_data(dut.inp) {
|
||||
if ReadyValid::firing(dut.inp) {
|
||||
#[hdl]
|
||||
if inp_index_reg.cmp_lt(index_max) {
|
||||
if inp_index_reg.cmp_ne(capacity.get() - 1) {
|
||||
connect_any(inp_index_reg, inp_index_reg + 1u8);
|
||||
#[hdl]
|
||||
if inp_index_reg.cmp_eq(index_to_check) {
|
||||
connect(stored_inp_data_reg, data);
|
||||
}
|
||||
} else {
|
||||
connect_any(inp_index_reg, 0_hdl_u0);
|
||||
}
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
if inp_index_reg.cmp_lt(index_to_check) {
|
||||
hdl_assert(clk, stored_inp_data_reg.cmp_eq(0u8), "");
|
||||
}
|
||||
|
||||
// keep an independent read index into the FIFO's circular buffer
|
||||
#[hdl]
|
||||
let out_index_reg = reg_builder().clock_domain(cd).reset(index_ty.zero());
|
||||
#[hdl]
|
||||
let stored_out_data_reg = reg_builder().clock_domain(cd).reset(0u8);
|
||||
|
||||
#[hdl]
|
||||
if let HdlSome(data) = ReadyValid::firing_data(dut.out) {
|
||||
if ReadyValid::firing(dut.out) {
|
||||
#[hdl]
|
||||
if out_index_reg.cmp_lt(index_max) {
|
||||
if out_index_reg.cmp_ne(capacity.get() - 1) {
|
||||
connect_any(out_index_reg, out_index_reg + 1u8);
|
||||
} else {
|
||||
connect_any(out_index_reg, 0_hdl_u0);
|
||||
}
|
||||
}
|
||||
|
||||
// filter the input data stream, predicated by the read index
|
||||
// matching the chosen position in the FIFO's circular buffer
|
||||
#[hdl]
|
||||
let inp_index_matches = wire();
|
||||
connect(inp_index_matches, inp_index_reg.cmp_eq(index_to_check));
|
||||
#[hdl]
|
||||
let inp_firing_data = wire();
|
||||
connect(inp_firing_data, HdlNone());
|
||||
#[hdl]
|
||||
if inp_index_matches {
|
||||
connect(inp_firing_data, ReadyValid::firing_data(dut.inp));
|
||||
}
|
||||
|
||||
// filter the output data stream, predicated by the write index
|
||||
// matching the chosen position in the FIFO's circular buffer
|
||||
#[hdl]
|
||||
let out_index_matches = wire();
|
||||
connect(out_index_matches, out_index_reg.cmp_eq(index_to_check));
|
||||
#[hdl]
|
||||
let out_firing_data = wire();
|
||||
connect(out_firing_data, HdlNone());
|
||||
#[hdl]
|
||||
if out_index_matches {
|
||||
connect(out_firing_data, ReadyValid::firing_data(dut.out));
|
||||
}
|
||||
|
||||
// Implement a one-entry FIFO and ensure its equivalence to the
|
||||
// filtered FIFO.
|
||||
//
|
||||
// the holding register for our one-entry FIFO
|
||||
#[hdl]
|
||||
let stored_reg = reg_builder().clock_domain(cd).reset(HdlNone());
|
||||
#[hdl]
|
||||
match stored_reg {
|
||||
// If the holding register is empty...
|
||||
HdlNone => {
|
||||
#[hdl]
|
||||
if out_index_reg.cmp_eq(index_to_check) {
|
||||
connect(stored_out_data_reg, data);
|
||||
match inp_firing_data {
|
||||
// ... and we are not receiving data, then we must not
|
||||
// transmit any data.
|
||||
HdlNone => hdl_assert(clk, HdlOption::is_none(out_firing_data), ""),
|
||||
// If we are indeed receiving some data...
|
||||
HdlSome(data_in) => {
|
||||
#[hdl]
|
||||
match out_firing_data {
|
||||
// ... and transmitting at the same time, we
|
||||
// must be transmitting the input data itself,
|
||||
// since the holding register is empty.
|
||||
HdlSome(data_out) => hdl_assert(clk, data_out.cmp_eq(data_in), ""),
|
||||
// If we are receiving, but not transmitting,
|
||||
// store the received data in the holding
|
||||
// register.
|
||||
HdlNone => connect(stored_reg, HdlSome(data_in)),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// If there is some value stored in the holding register...
|
||||
HdlSome(stored) => {
|
||||
#[hdl]
|
||||
match out_firing_data {
|
||||
// ... and we are not transmitting it, we cannot
|
||||
// receive any more data.
|
||||
HdlNone => hdl_assert(clk, HdlOption::is_none(inp_firing_data), ""),
|
||||
// If we are transmitting a previously stored value...
|
||||
HdlSome(data_out) => {
|
||||
// ... it must be the same data we stored earlier.
|
||||
hdl_assert(clk, data_out.cmp_eq(stored), "");
|
||||
// Also, accept new data, if any. Otherwise,
|
||||
// let the holding register become empty.
|
||||
connect(stored_reg, inp_firing_data);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// from now on, some extra assertions in order to pass induction
|
||||
|
||||
// sync the holding register, when it's occupied, to the
|
||||
// corresponding entry in the FIFO's circular buffer
|
||||
connect(dut.dbg.index_to_check, index_to_check);
|
||||
#[hdl]
|
||||
if out_index_reg.cmp_lt(index_to_check) {
|
||||
hdl_assert(clk, stored_out_data_reg.cmp_eq(0u8), "");
|
||||
if let HdlSome(stored) = stored_reg {
|
||||
hdl_assert(clk, stored.cmp_eq(dut.dbg.stored), "");
|
||||
}
|
||||
|
||||
hdl_assert(clk, inp_index_reg.cmp_ge(out_index_reg), "");
|
||||
// sync the read and write indices
|
||||
hdl_assert(clk, inp_index_reg.cmp_eq(dut.dbg.inp_index), "");
|
||||
hdl_assert(clk, out_index_reg.cmp_eq(dut.dbg.out_index), "");
|
||||
|
||||
// the indices should never go past the capacity, but induction
|
||||
// doesn't know that...
|
||||
hdl_assert(clk, inp_index_reg.cmp_lt(capacity.get()), "");
|
||||
hdl_assert(clk, out_index_reg.cmp_lt(capacity.get()), "");
|
||||
|
||||
// strongly constrain the state of the holding register
|
||||
//
|
||||
// The holding register is full if and only if the corresponding
|
||||
// FIFO entry was written to and not yet read. In other words, if
|
||||
// the number of pending reads until the chosen entry is read out
|
||||
// is greater than the current FIFO count, then the entry couldn't
|
||||
// be in the FIFO in the first place.
|
||||
#[hdl]
|
||||
if inp_index_reg.cmp_lt(index_max) & out_index_reg.cmp_lt(index_max) {
|
||||
hdl_assert(
|
||||
clk,
|
||||
expected_count_reg.cmp_eq(inp_index_reg - out_index_reg),
|
||||
"",
|
||||
);
|
||||
let pending_reads: UInt = wire(index_ty);
|
||||
// take care of wrap-around when subtracting indices, add the
|
||||
// capacity amount to keep the result positive if necessary
|
||||
#[hdl]
|
||||
if index_to_check.cmp_ge(out_index_reg) {
|
||||
connect(pending_reads, index_to_check - out_index_reg);
|
||||
} else {
|
||||
hdl_assert(
|
||||
clk,
|
||||
expected_count_reg.cmp_ge(inp_index_reg - out_index_reg),
|
||||
"",
|
||||
connect(
|
||||
pending_reads,
|
||||
index_to_check + capacity.get() - out_index_reg,
|
||||
);
|
||||
}
|
||||
|
||||
// check whether the chosen entry is in the FIFO
|
||||
#[hdl]
|
||||
if inp_index_reg.cmp_gt(index_to_check) & out_index_reg.cmp_gt(index_to_check) {
|
||||
hdl_assert(clk, stored_inp_data_reg.cmp_eq(stored_out_data_reg), "");
|
||||
}
|
||||
let expected_stored: Bool = wire();
|
||||
connect(expected_stored, pending_reads.cmp_lt(dut.count));
|
||||
// sync with the state of the holding register
|
||||
hdl_assert(
|
||||
clk,
|
||||
expected_stored.cmp_eq(HdlOption::is_some(stored_reg)),
|
||||
"",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -430,4 +541,24 @@ mod tests {
|
|||
fn test_4_true_true() {
|
||||
test_queue(NonZero::new(4).unwrap(), true, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_many_false_false() {
|
||||
test_queue(NonZero::new((2 << 16) - 5).unwrap(), false, false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_many_false_true() {
|
||||
test_queue(NonZero::new((2 << 16) - 5).unwrap(), false, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_many_true_false() {
|
||||
test_queue(NonZero::new((2 << 16) - 5).unwrap(), true, false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_many_true_true() {
|
||||
test_queue(NonZero::new((2 << 16) - 5).unwrap(), true, true);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
240
crates/fayalite/src/util/test_hasher.rs
Normal file
240
crates/fayalite/src/util/test_hasher.rs
Normal file
|
|
@ -0,0 +1,240 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
#![cfg(feature = "unstable-test-hasher")]
|
||||
|
||||
use std::{
|
||||
fmt::Write as _,
|
||||
hash::{BuildHasher, Hash, Hasher},
|
||||
io::Write as _,
|
||||
marker::PhantomData,
|
||||
sync::LazyLock,
|
||||
};
|
||||
|
||||
type BoxDynHasher = Box<dyn Hasher + Send + Sync>;
|
||||
type BoxDynBuildHasher = Box<dyn DynBuildHasherTrait + Send + Sync>;
|
||||
type BoxDynMakeBuildHasher = Box<dyn Fn() -> BoxDynBuildHasher + Send + Sync>;
|
||||
|
||||
trait TryGetDynBuildHasher: Copy {
|
||||
type Type;
|
||||
fn try_get_make_build_hasher(self) -> Option<BoxDynMakeBuildHasher>;
|
||||
}
|
||||
|
||||
impl<T> TryGetDynBuildHasher for PhantomData<T> {
|
||||
type Type = T;
|
||||
fn try_get_make_build_hasher(self) -> Option<BoxDynMakeBuildHasher> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Default + BuildHasher<Hasher: Send + Sync + 'static> + Send + Sync + 'static + Clone>
|
||||
TryGetDynBuildHasher for &'_ PhantomData<T>
|
||||
{
|
||||
type Type = T;
|
||||
fn try_get_make_build_hasher(self) -> Option<BoxDynMakeBuildHasher> {
|
||||
Some(Box::new(|| Box::<DynBuildHasher<T>>::default()))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Clone)]
|
||||
struct DynBuildHasher<T>(T);
|
||||
|
||||
trait DynBuildHasherTrait: BuildHasher<Hasher = BoxDynHasher> {
|
||||
fn clone_dyn_build_hasher(&self) -> BoxDynBuildHasher;
|
||||
}
|
||||
|
||||
impl<BH: BuildHasher<Hasher: Send + Sync + 'static>> BuildHasher for DynBuildHasher<BH> {
|
||||
type Hasher = BoxDynHasher;
|
||||
|
||||
fn build_hasher(&self) -> Self::Hasher {
|
||||
Box::new(self.0.build_hasher())
|
||||
}
|
||||
|
||||
fn hash_one<T: Hash>(&self, x: T) -> u64 {
|
||||
self.0.hash_one(x)
|
||||
}
|
||||
}
|
||||
|
||||
impl<BH> DynBuildHasherTrait for DynBuildHasher<BH>
|
||||
where
|
||||
Self: Clone + BuildHasher<Hasher = BoxDynHasher> + Send + Sync + 'static,
|
||||
{
|
||||
fn clone_dyn_build_hasher(&self) -> BoxDynBuildHasher {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DefaultBuildHasher(BoxDynBuildHasher);
|
||||
|
||||
impl Clone for DefaultBuildHasher {
|
||||
fn clone(&self) -> Self {
|
||||
DefaultBuildHasher(self.0.clone_dyn_build_hasher())
|
||||
}
|
||||
}
|
||||
|
||||
const ENV_VAR_NAME: &'static str = "FAYALITE_TEST_HASHER";
|
||||
|
||||
struct EnvVarValue {
|
||||
key: &'static str,
|
||||
try_get_make_build_hasher: fn() -> Option<BoxDynMakeBuildHasher>,
|
||||
description: &'static str,
|
||||
}
|
||||
|
||||
macro_rules! env_var_value {
|
||||
(
|
||||
key: $key:literal,
|
||||
build_hasher: $build_hasher:ty,
|
||||
description: $description:literal,
|
||||
) => {
|
||||
EnvVarValue {
|
||||
key: $key,
|
||||
try_get_make_build_hasher: || {
|
||||
// use rust method resolution to detect if $build_hasher is usable
|
||||
// (e.g. hashbrown's hasher won't be usable without the right feature enabled)
|
||||
(&PhantomData::<DynBuildHasher<$build_hasher>>).try_get_make_build_hasher()
|
||||
},
|
||||
description: $description,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct AlwaysZeroHasher;
|
||||
|
||||
impl Hasher for AlwaysZeroHasher {
|
||||
fn write(&mut self, _bytes: &[u8]) {}
|
||||
fn finish(&self) -> u64 {
|
||||
0
|
||||
}
|
||||
}
|
||||
|
||||
const ENV_VAR_VALUES: &'static [EnvVarValue] = &[
|
||||
env_var_value! {
|
||||
key: "std",
|
||||
build_hasher: std::hash::RandomState,
|
||||
description: "use std::hash::RandomState",
|
||||
},
|
||||
env_var_value! {
|
||||
key: "hashbrown",
|
||||
build_hasher: hashbrown::DefaultHashBuilder,
|
||||
description: "use hashbrown's DefaultHashBuilder",
|
||||
},
|
||||
env_var_value! {
|
||||
key: "always_zero",
|
||||
build_hasher: std::hash::BuildHasherDefault<AlwaysZeroHasher>,
|
||||
description: "use a hasher that always returns 0 for all hashes,\n \
|
||||
this is useful for checking that PartialEq impls are correct",
|
||||
},
|
||||
];
|
||||
|
||||
fn report_bad_env_var(msg: impl std::fmt::Display) -> ! {
|
||||
let mut msg = format!("{ENV_VAR_NAME}: {msg}\n");
|
||||
for &EnvVarValue {
|
||||
key,
|
||||
try_get_make_build_hasher,
|
||||
description,
|
||||
} in ENV_VAR_VALUES
|
||||
{
|
||||
let availability = match try_get_make_build_hasher() {
|
||||
Some(_) => "available",
|
||||
None => "unavailable",
|
||||
};
|
||||
writeln!(msg, "{key}: ({availability})\n {description}").expect("can't fail");
|
||||
}
|
||||
std::io::stderr()
|
||||
.write_all(msg.as_bytes())
|
||||
.expect("should be able to write to stderr");
|
||||
std::process::abort();
|
||||
}
|
||||
|
||||
impl Default for DefaultBuildHasher {
|
||||
fn default() -> Self {
|
||||
static DEFAULT_FN: LazyLock<BoxDynMakeBuildHasher> = LazyLock::new(|| {
|
||||
let var = std::env::var_os(ENV_VAR_NAME);
|
||||
let var = var.as_deref().unwrap_or("std".as_ref());
|
||||
for &EnvVarValue {
|
||||
key,
|
||||
try_get_make_build_hasher,
|
||||
description: _,
|
||||
} in ENV_VAR_VALUES
|
||||
{
|
||||
if var.as_encoded_bytes().eq_ignore_ascii_case(key.as_bytes()) {
|
||||
return try_get_make_build_hasher().unwrap_or_else(|| {
|
||||
report_bad_env_var(format_args!(
|
||||
"unavailable hasher: {key} (is the appropriate feature enabled?)"
|
||||
));
|
||||
});
|
||||
}
|
||||
}
|
||||
report_bad_env_var(format_args!("unrecognized hasher: {var:?}"));
|
||||
});
|
||||
Self(DEFAULT_FN())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DefaultHasher(BoxDynHasher);
|
||||
|
||||
impl BuildHasher for DefaultBuildHasher {
|
||||
type Hasher = DefaultHasher;
|
||||
|
||||
fn build_hasher(&self) -> Self::Hasher {
|
||||
DefaultHasher(self.0.build_hasher())
|
||||
}
|
||||
}
|
||||
|
||||
impl Hasher for DefaultHasher {
|
||||
fn finish(&self) -> u64 {
|
||||
self.0.finish()
|
||||
}
|
||||
|
||||
fn write(&mut self, bytes: &[u8]) {
|
||||
self.0.write(bytes)
|
||||
}
|
||||
|
||||
fn write_u8(&mut self, i: u8) {
|
||||
self.0.write_u8(i)
|
||||
}
|
||||
|
||||
fn write_u16(&mut self, i: u16) {
|
||||
self.0.write_u16(i)
|
||||
}
|
||||
|
||||
fn write_u32(&mut self, i: u32) {
|
||||
self.0.write_u32(i)
|
||||
}
|
||||
|
||||
fn write_u64(&mut self, i: u64) {
|
||||
self.0.write_u64(i)
|
||||
}
|
||||
|
||||
fn write_u128(&mut self, i: u128) {
|
||||
self.0.write_u128(i)
|
||||
}
|
||||
|
||||
fn write_usize(&mut self, i: usize) {
|
||||
self.0.write_usize(i)
|
||||
}
|
||||
|
||||
fn write_i8(&mut self, i: i8) {
|
||||
self.0.write_i8(i)
|
||||
}
|
||||
|
||||
fn write_i16(&mut self, i: i16) {
|
||||
self.0.write_i16(i)
|
||||
}
|
||||
|
||||
fn write_i32(&mut self, i: i32) {
|
||||
self.0.write_i32(i)
|
||||
}
|
||||
|
||||
fn write_i64(&mut self, i: i64) {
|
||||
self.0.write_i64(i)
|
||||
}
|
||||
|
||||
fn write_i128(&mut self, i: i128) {
|
||||
self.0.write_i128(i)
|
||||
}
|
||||
|
||||
fn write_isize(&mut self, i: isize) {
|
||||
self.0.write_isize(i)
|
||||
}
|
||||
}
|
||||
12
crates/fayalite/src/vendor.rs
Normal file
12
crates/fayalite/src/vendor.rs
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
pub mod xilinx;
|
||||
|
||||
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = crate::build::DynJobKind> {
|
||||
xilinx::built_in_job_kinds()
|
||||
}
|
||||
|
||||
pub(crate) fn built_in_platforms() -> impl IntoIterator<Item = crate::platform::DynPlatform> {
|
||||
xilinx::built_in_platforms()
|
||||
}
|
||||
207
crates/fayalite/src/vendor/xilinx.rs
vendored
Normal file
207
crates/fayalite/src/vendor/xilinx.rs
vendored
Normal file
|
|
@ -0,0 +1,207 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
annotations::make_annotation_enum,
|
||||
build::{GlobalParams, ToArgs, WriteArgs},
|
||||
intern::Interned,
|
||||
prelude::{DynPlatform, Platform},
|
||||
};
|
||||
use clap::ValueEnum;
|
||||
use ordered_float::NotNan;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt;
|
||||
|
||||
pub mod arty_a7;
|
||||
pub mod primitives;
|
||||
pub mod yosys_nextpnr_prjxray;
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct XdcIOStandardAnnotation {
|
||||
pub value: Interned<str>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct XdcLocationAnnotation {
|
||||
pub location: Interned<str>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct XdcCreateClockAnnotation {
|
||||
/// clock period in nanoseconds
|
||||
pub period: NotNan<f64>,
|
||||
}
|
||||
|
||||
make_annotation_enum! {
|
||||
#[non_exhaustive]
|
||||
pub enum XilinxAnnotation {
|
||||
XdcIOStandard(XdcIOStandardAnnotation),
|
||||
XdcLocation(XdcLocationAnnotation),
|
||||
XdcCreateClock(XdcCreateClockAnnotation),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)]
|
||||
pub struct XilinxArgs {
|
||||
#[arg(long)]
|
||||
pub device: Option<Device>,
|
||||
}
|
||||
|
||||
impl XilinxArgs {
|
||||
pub fn require_device(
|
||||
&self,
|
||||
platform: Option<&DynPlatform>,
|
||||
global_params: &GlobalParams,
|
||||
) -> clap::error::Result<Device> {
|
||||
if let Some(device) = self.device {
|
||||
return Ok(device);
|
||||
}
|
||||
if let Some(device) =
|
||||
platform.and_then(|platform| platform.aspects().get_single_by_type::<Device>().copied())
|
||||
{
|
||||
return Ok(device);
|
||||
}
|
||||
Err(global_params.clap_error(
|
||||
clap::error::ErrorKind::MissingRequiredArgument,
|
||||
"missing --device option",
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl ToArgs for XilinxArgs {
|
||||
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
|
||||
if let Some(device) = self.device {
|
||||
args.write_long_option_eq("device", device.as_str());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! make_device_enum {
|
||||
($vis:vis enum $Device:ident {
|
||||
$(
|
||||
#[
|
||||
name = $name:literal,
|
||||
xray_part = $xray_part:literal,
|
||||
xray_device = $xray_device:literal,
|
||||
xray_family = $xray_family:literal,
|
||||
]
|
||||
$variant:ident,
|
||||
)*
|
||||
}) => {
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, ValueEnum)]
|
||||
$vis enum $Device {
|
||||
$(
|
||||
#[value(name = $name, alias = $xray_part)]
|
||||
$variant,
|
||||
)*
|
||||
}
|
||||
|
||||
impl $Device {
|
||||
$vis fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
$(Self::$variant => $name,)*
|
||||
}
|
||||
}
|
||||
$vis fn xray_part(self) -> &'static str {
|
||||
match self {
|
||||
$(Self::$variant => $xray_part,)*
|
||||
}
|
||||
}
|
||||
$vis fn xray_device(self) -> &'static str {
|
||||
match self {
|
||||
$(Self::$variant => $xray_device,)*
|
||||
}
|
||||
}
|
||||
$vis fn xray_family(self) -> &'static str {
|
||||
match self {
|
||||
$(Self::$variant => $xray_family,)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct DeviceVisitor;
|
||||
|
||||
impl<'de> serde::de::Visitor<'de> for DeviceVisitor {
|
||||
type Value = $Device;
|
||||
|
||||
fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str("a Xilinx device string")
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
match $Device::from_str(v, false) {
|
||||
Ok(v) => Ok(v),
|
||||
Err(_) => Err(E::invalid_value(serde::de::Unexpected::Str(v), &self)),
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
match str::from_utf8(v).ok().and_then(|v| $Device::from_str(v, false).ok()) {
|
||||
Some(v) => Ok(v),
|
||||
None => Err(E::invalid_value(serde::de::Unexpected::Bytes(v), &self)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for $Device {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
deserializer.deserialize_string(DeviceVisitor)
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for $Device {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
self.as_str().serialize(serializer)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
make_device_enum! {
|
||||
pub enum Device {
|
||||
#[
|
||||
name = "xc7a35ticsg324-1L",
|
||||
xray_part = "xc7a35tcsg324-1",
|
||||
xray_device = "xc7a35t",
|
||||
xray_family = "artix7",
|
||||
]
|
||||
Xc7a35ticsg324_1l,
|
||||
#[
|
||||
name = "xc7a100ticsg324-1L",
|
||||
xray_part = "xc7a100tcsg324-1",
|
||||
xray_device = "xc7a100t",
|
||||
xray_family = "artix7",
|
||||
]
|
||||
Xc7a100ticsg324_1l,
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Device {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = crate::build::DynJobKind> {
|
||||
arty_a7::built_in_job_kinds()
|
||||
.into_iter()
|
||||
.chain(yosys_nextpnr_prjxray::built_in_job_kinds())
|
||||
}
|
||||
|
||||
pub(crate) fn built_in_platforms() -> impl IntoIterator<Item = crate::platform::DynPlatform> {
|
||||
arty_a7::built_in_platforms()
|
||||
.into_iter()
|
||||
.chain(yosys_nextpnr_prjxray::built_in_platforms())
|
||||
}
|
||||
404
crates/fayalite/src/vendor/xilinx/arty_a7.rs
vendored
Normal file
404
crates/fayalite/src/vendor/xilinx/arty_a7.rs
vendored
Normal file
|
|
@ -0,0 +1,404 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
intern::{Intern, Interned},
|
||||
module::{instance_with_loc, reg_builder_with_loc, wire_with_loc},
|
||||
platform::{
|
||||
DynPlatform, Peripheral, PeripheralRef, Peripherals, PeripheralsBuilderFactory,
|
||||
PeripheralsBuilderFinished, Platform, PlatformAspectSet,
|
||||
peripherals::{ClockInput, Led, RgbLed, Uart},
|
||||
},
|
||||
prelude::*,
|
||||
vendor::xilinx::{
|
||||
Device, XdcCreateClockAnnotation, XdcIOStandardAnnotation, XdcLocationAnnotation,
|
||||
primitives,
|
||||
},
|
||||
};
|
||||
use ordered_float::NotNan;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
macro_rules! arty_a7_platform {
|
||||
(
|
||||
$vis:vis enum $ArtyA7Platform:ident {
|
||||
$(#[name = $name:literal, device = $device:ident]
|
||||
$Variant:ident,)*
|
||||
}
|
||||
) => {
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
#[non_exhaustive]
|
||||
$vis enum $ArtyA7Platform {
|
||||
$($Variant,)*
|
||||
}
|
||||
|
||||
impl $ArtyA7Platform {
|
||||
$vis const VARIANTS: &'static [Self] = &[$(Self::$Variant,)*];
|
||||
$vis fn device(self) -> Device {
|
||||
match self {
|
||||
$(Self::$Variant => Device::$device,)*
|
||||
}
|
||||
}
|
||||
$vis const fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
$(Self::$Variant => $name,)*
|
||||
}
|
||||
}
|
||||
fn get_aspects(self) -> &'static PlatformAspectSet {
|
||||
match self {
|
||||
$(Self::$Variant => {
|
||||
static ASPECTS_SET: OnceLock<PlatformAspectSet> = OnceLock::new();
|
||||
ASPECTS_SET.get_or_init(|| self.make_aspects())
|
||||
})*
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
arty_a7_platform! {
|
||||
pub enum ArtyA7Platform {
|
||||
#[name = "arty-a7-35t", device = Xc7a35ticsg324_1l]
|
||||
ArtyA7_35T,
|
||||
#[name = "arty-a7-100t", device = Xc7a100ticsg324_1l]
|
||||
ArtyA7_100T,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ArtyA7Peripherals {
|
||||
clk100_div_pow2: [Peripheral<ClockInput>; 4],
|
||||
rst: Peripheral<Reset>,
|
||||
rst_sync: Peripheral<SyncReset>,
|
||||
ld0: Peripheral<RgbLed>,
|
||||
ld1: Peripheral<RgbLed>,
|
||||
ld2: Peripheral<RgbLed>,
|
||||
ld3: Peripheral<RgbLed>,
|
||||
ld4: Peripheral<Led>,
|
||||
ld5: Peripheral<Led>,
|
||||
ld6: Peripheral<Led>,
|
||||
ld7: Peripheral<Led>,
|
||||
uart: Peripheral<Uart>,
|
||||
// TODO: add rest of peripherals when we need them
|
||||
}
|
||||
|
||||
impl Peripherals for ArtyA7Peripherals {
|
||||
fn append_peripherals<'a>(&'a self, peripherals: &mut Vec<PeripheralRef<'a, CanonicalType>>) {
|
||||
let Self {
|
||||
clk100_div_pow2,
|
||||
rst,
|
||||
rst_sync,
|
||||
ld0,
|
||||
ld1,
|
||||
ld2,
|
||||
ld3,
|
||||
ld4,
|
||||
ld5,
|
||||
ld6,
|
||||
ld7,
|
||||
uart,
|
||||
} = self;
|
||||
clk100_div_pow2.append_peripherals(peripherals);
|
||||
rst.append_peripherals(peripherals);
|
||||
rst_sync.append_peripherals(peripherals);
|
||||
ld0.append_peripherals(peripherals);
|
||||
ld1.append_peripherals(peripherals);
|
||||
ld2.append_peripherals(peripherals);
|
||||
ld3.append_peripherals(peripherals);
|
||||
ld4.append_peripherals(peripherals);
|
||||
ld5.append_peripherals(peripherals);
|
||||
ld6.append_peripherals(peripherals);
|
||||
ld7.append_peripherals(peripherals);
|
||||
uart.append_peripherals(peripherals);
|
||||
}
|
||||
}
|
||||
|
||||
impl ArtyA7Platform {
|
||||
fn make_aspects(self) -> PlatformAspectSet {
|
||||
let mut retval = PlatformAspectSet::new();
|
||||
retval.insert_new(self.device());
|
||||
retval
|
||||
}
|
||||
}
|
||||
|
||||
#[hdl_module(extern)]
|
||||
fn reset_sync() {
|
||||
#[hdl]
|
||||
let clk: Clock = m.input();
|
||||
#[hdl]
|
||||
let inp: Bool = m.input();
|
||||
#[hdl]
|
||||
let out: SyncReset = m.output();
|
||||
m.annotate_module(BlackBoxInlineAnnotation {
|
||||
path: "fayalite_arty_a7_reset_sync.v".intern(),
|
||||
text: r#"module __fayalite_arty_a7_reset_sync(input clk, input inp, output out);
|
||||
wire reset_0_out;
|
||||
(* ASYNC_REG = "TRUE" *)
|
||||
FDPE #(
|
||||
.INIT(1'b1)
|
||||
) reset_0 (
|
||||
.Q(reset_0_out),
|
||||
.C(clk),
|
||||
.CE(1'b1),
|
||||
.PRE(inp),
|
||||
.D(1'b0)
|
||||
);
|
||||
(* ASYNC_REG = "TRUE" *)
|
||||
FDPE #(
|
||||
.INIT(1'b1)
|
||||
) reset_1 (
|
||||
.Q(out),
|
||||
.C(clk),
|
||||
.CE(1'b1),
|
||||
.PRE(inp),
|
||||
.D(reset_0_out)
|
||||
);
|
||||
endmodule
|
||||
"#
|
||||
.intern(),
|
||||
});
|
||||
m.verilog_name("__fayalite_arty_a7_reset_sync");
|
||||
}
|
||||
|
||||
impl Platform for ArtyA7Platform {
|
||||
type Peripherals = ArtyA7Peripherals;
|
||||
|
||||
fn name(&self) -> Interned<str> {
|
||||
self.as_str().intern()
|
||||
}
|
||||
|
||||
fn new_peripherals<'builder>(
|
||||
&self,
|
||||
builder_factory: PeripheralsBuilderFactory<'builder>,
|
||||
) -> (Self::Peripherals, PeripheralsBuilderFinished<'builder>) {
|
||||
let mut builder = builder_factory.builder();
|
||||
|
||||
let clk100_div_pow2 = std::array::from_fn(|log2_divisor| {
|
||||
let divisor = 1u64 << log2_divisor;
|
||||
let name = if divisor != 1 {
|
||||
format!("clk100_div_{divisor}")
|
||||
} else {
|
||||
"clk100".into()
|
||||
};
|
||||
builder.input_peripheral(name, ClockInput::new(100e6 / divisor as f64))
|
||||
});
|
||||
builder.add_conflicts(Vec::from_iter(clk100_div_pow2.iter().map(|v| v.id())));
|
||||
(
|
||||
ArtyA7Peripherals {
|
||||
clk100_div_pow2,
|
||||
rst: builder.input_peripheral("rst", Reset),
|
||||
rst_sync: builder.input_peripheral("rst_sync", SyncReset),
|
||||
ld0: builder.output_peripheral("ld0", RgbLed),
|
||||
ld1: builder.output_peripheral("ld1", RgbLed),
|
||||
ld2: builder.output_peripheral("ld2", RgbLed),
|
||||
ld3: builder.output_peripheral("ld3", RgbLed),
|
||||
ld4: builder.output_peripheral("ld4", Led),
|
||||
ld5: builder.output_peripheral("ld5", Led),
|
||||
ld6: builder.output_peripheral("ld6", Led),
|
||||
ld7: builder.output_peripheral("ld7", Led),
|
||||
uart: builder.output_peripheral("uart", Uart),
|
||||
},
|
||||
builder.finish(),
|
||||
)
|
||||
}
|
||||
|
||||
fn source_location(&self) -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
|
||||
fn add_peripherals_in_wrapper_module(&self, m: &ModuleBuilder, peripherals: Self::Peripherals) {
|
||||
let ArtyA7Peripherals {
|
||||
clk100_div_pow2,
|
||||
rst,
|
||||
rst_sync,
|
||||
ld0,
|
||||
ld1,
|
||||
ld2,
|
||||
ld3,
|
||||
ld4,
|
||||
ld5,
|
||||
ld6,
|
||||
ld7,
|
||||
uart,
|
||||
} = peripherals;
|
||||
let make_buffered_input = |name: &str, location: &str, io_standard: &str, invert: bool| {
|
||||
let pin = m.input_with_loc(name, SourceLocation::builtin(), Bool);
|
||||
annotate(
|
||||
pin,
|
||||
XdcLocationAnnotation {
|
||||
location: location.intern(),
|
||||
},
|
||||
);
|
||||
annotate(
|
||||
pin,
|
||||
XdcIOStandardAnnotation {
|
||||
value: io_standard.intern(),
|
||||
},
|
||||
);
|
||||
let buf = instance_with_loc(
|
||||
&format!("{name}_buf"),
|
||||
primitives::IBUF(),
|
||||
SourceLocation::builtin(),
|
||||
);
|
||||
connect(buf.I, pin);
|
||||
if invert { !buf.O } else { buf.O }
|
||||
};
|
||||
let make_buffered_output = |name: &str, location: &str, io_standard: &str| {
|
||||
let pin = m.output_with_loc(name, SourceLocation::builtin(), Bool);
|
||||
annotate(
|
||||
pin,
|
||||
XdcLocationAnnotation {
|
||||
location: location.intern(),
|
||||
},
|
||||
);
|
||||
annotate(
|
||||
pin,
|
||||
XdcIOStandardAnnotation {
|
||||
value: io_standard.intern(),
|
||||
},
|
||||
);
|
||||
let buf = instance_with_loc(
|
||||
&format!("{name}_buf"),
|
||||
primitives::OBUFT(),
|
||||
SourceLocation::builtin(),
|
||||
);
|
||||
connect(pin, buf.O);
|
||||
connect(buf.T, false);
|
||||
buf.I
|
||||
};
|
||||
let mut frequency = clk100_div_pow2[0].ty().frequency();
|
||||
let mut log2_divisor = 0;
|
||||
let mut clk = None;
|
||||
for (cur_log2_divisor, p) in clk100_div_pow2.into_iter().enumerate() {
|
||||
let Some(p) = p.into_used() else {
|
||||
continue;
|
||||
};
|
||||
debug_assert!(
|
||||
clk.is_none(),
|
||||
"conflict-handling logic should ensure at most one clock is used",
|
||||
);
|
||||
frequency = p.ty().frequency();
|
||||
clk = Some(p);
|
||||
log2_divisor = cur_log2_divisor;
|
||||
}
|
||||
let clk100_buf = make_buffered_input("clk100", "E3", "LVCMOS33", false);
|
||||
let startup = instance_with_loc(
|
||||
"startup",
|
||||
primitives::STARTUPE2_default_inputs(),
|
||||
SourceLocation::builtin(),
|
||||
);
|
||||
let clk_global_buf = instance_with_loc(
|
||||
"clk_global_buf",
|
||||
primitives::BUFGCE(),
|
||||
SourceLocation::builtin(),
|
||||
);
|
||||
connect(clk_global_buf.CE, startup.EOS);
|
||||
let mut clk_global_buf_in = clk100_buf.to_clock();
|
||||
for prev_log2_divisor in 0..log2_divisor {
|
||||
let prev_divisor = 1u64 << prev_log2_divisor;
|
||||
let clk_in = wire_with_loc(
|
||||
&format!("clk_div_{prev_divisor}"),
|
||||
SourceLocation::builtin(),
|
||||
Clock,
|
||||
);
|
||||
connect(clk_in, clk_global_buf_in);
|
||||
annotate(
|
||||
clk_in,
|
||||
XdcCreateClockAnnotation {
|
||||
period: NotNan::new(1e9 / (100e6 / prev_divisor as f64))
|
||||
.expect("known to be valid"),
|
||||
},
|
||||
);
|
||||
annotate(clk_in, DontTouchAnnotation);
|
||||
let cd = wire_with_loc(
|
||||
&format!("clk_div_{prev_divisor}_in"),
|
||||
SourceLocation::builtin(),
|
||||
ClockDomain[AsyncReset],
|
||||
);
|
||||
connect(cd.clk, clk_in);
|
||||
connect(cd.rst, (!startup.EOS).to_async_reset());
|
||||
let divider = reg_builder_with_loc("divider", SourceLocation::builtin())
|
||||
.clock_domain(cd)
|
||||
.reset(false)
|
||||
.build();
|
||||
connect(divider, !divider);
|
||||
clk_global_buf_in = divider.to_clock();
|
||||
}
|
||||
connect(clk_global_buf.I, clk_global_buf_in);
|
||||
let clk_out = wire_with_loc("clk_out", SourceLocation::builtin(), Clock);
|
||||
connect(clk_out, clk_global_buf.O);
|
||||
annotate(
|
||||
clk_out,
|
||||
XdcCreateClockAnnotation {
|
||||
period: NotNan::new(1e9 / frequency).expect("known to be valid"),
|
||||
},
|
||||
);
|
||||
annotate(clk_out, DontTouchAnnotation);
|
||||
if let Some(clk) = clk {
|
||||
connect(clk.instance_io_field().clk, clk_out);
|
||||
}
|
||||
let rst_value = {
|
||||
let rst_buf = make_buffered_input("rst", "C2", "LVCMOS33", true);
|
||||
let rst_sync = instance_with_loc("rst_sync", reset_sync(), SourceLocation::builtin());
|
||||
connect(rst_sync.clk, clk_out);
|
||||
connect(rst_sync.inp, rst_buf | !startup.EOS);
|
||||
rst_sync.out
|
||||
};
|
||||
if let Some(rst) = rst.into_used() {
|
||||
connect(rst.instance_io_field(), rst_value.to_reset());
|
||||
}
|
||||
if let Some(rst_sync) = rst_sync.into_used() {
|
||||
connect(rst_sync.instance_io_field(), rst_value);
|
||||
}
|
||||
let rgb_leds = [
|
||||
(ld0, ("G6", "F6", "E1")),
|
||||
(ld1, ("G3", "J4", "G4")),
|
||||
(ld2, ("J3", "J2", "H4")),
|
||||
(ld3, ("K1", "H6", "K2")),
|
||||
];
|
||||
for (rgb_led, (r_loc, g_loc, b_loc)) in rgb_leds {
|
||||
let r = make_buffered_output(&format!("{}_r", rgb_led.name()), r_loc, "LVCMOS33");
|
||||
let g = make_buffered_output(&format!("{}_g", rgb_led.name()), g_loc, "LVCMOS33");
|
||||
let b = make_buffered_output(&format!("{}_b", rgb_led.name()), b_loc, "LVCMOS33");
|
||||
if let Some(rgb_led) = rgb_led.into_used() {
|
||||
connect(r, rgb_led.instance_io_field().r);
|
||||
connect(g, rgb_led.instance_io_field().g);
|
||||
connect(b, rgb_led.instance_io_field().b);
|
||||
} else {
|
||||
connect(r, false);
|
||||
connect(g, false);
|
||||
connect(b, false);
|
||||
}
|
||||
}
|
||||
let leds = [(ld4, "H5"), (ld5, "J5"), (ld6, "T9"), (ld7, "T10")];
|
||||
for (led, loc) in leds {
|
||||
let o = make_buffered_output(&led.name(), loc, "LVCMOS33");
|
||||
if let Some(led) = led.into_used() {
|
||||
connect(o, led.instance_io_field().on);
|
||||
} else {
|
||||
connect(o, false);
|
||||
}
|
||||
}
|
||||
let uart_tx = make_buffered_output("uart_tx", "D10", "LVCMOS33");
|
||||
let uart_rx = make_buffered_input("uart_rx", "A9", "LVCMOS33", false);
|
||||
if let Some(uart) = uart.into_used() {
|
||||
connect(uart_tx, uart.instance_io_field().tx);
|
||||
connect(uart.instance_io_field().rx, uart_rx);
|
||||
} else {
|
||||
connect(uart_tx, true); // idle
|
||||
}
|
||||
}
|
||||
|
||||
fn aspects(&self) -> PlatformAspectSet {
|
||||
self.get_aspects().clone()
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = crate::build::DynJobKind> {
|
||||
[]
|
||||
}
|
||||
|
||||
pub(crate) fn built_in_platforms() -> impl IntoIterator<Item = DynPlatform> {
|
||||
ArtyA7Platform::VARIANTS
|
||||
.iter()
|
||||
.map(|&v| DynPlatform::new(v))
|
||||
}
|
||||
50
crates/fayalite/src/vendor/xilinx/primitives.rs
vendored
Normal file
50
crates/fayalite/src/vendor/xilinx/primitives.rs
vendored
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
#![allow(non_snake_case)]
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
#[hdl_module(extern)]
|
||||
pub fn IBUF() {
|
||||
m.verilog_name("IBUF");
|
||||
#[hdl]
|
||||
let O: Bool = m.output();
|
||||
#[hdl]
|
||||
let I: Bool = m.input();
|
||||
}
|
||||
|
||||
#[hdl_module(extern)]
|
||||
pub fn OBUFT() {
|
||||
m.verilog_name("OBUFT");
|
||||
#[hdl]
|
||||
let O: Bool = m.output();
|
||||
#[hdl]
|
||||
let I: Bool = m.input();
|
||||
#[hdl]
|
||||
let T: Bool = m.input();
|
||||
}
|
||||
|
||||
#[hdl_module(extern)]
|
||||
pub fn BUFGCE() {
|
||||
m.verilog_name("BUFGCE");
|
||||
#[hdl]
|
||||
let O: Clock = m.output();
|
||||
#[hdl]
|
||||
let CE: Bool = m.input();
|
||||
#[hdl]
|
||||
let I: Clock = m.input();
|
||||
}
|
||||
|
||||
#[hdl_module(extern)]
|
||||
pub fn STARTUPE2_default_inputs() {
|
||||
m.verilog_name("STARTUPE2");
|
||||
#[hdl]
|
||||
let CFGCLK: Clock = m.output();
|
||||
#[hdl]
|
||||
let CFGMCLK: Clock = m.output();
|
||||
#[hdl]
|
||||
let EOS: Bool = m.output();
|
||||
#[hdl]
|
||||
let PREQ: Bool = m.output();
|
||||
}
|
||||
1043
crates/fayalite/src/vendor/xilinx/yosys_nextpnr_prjxray.rs
vendored
Normal file
1043
crates/fayalite/src/vendor/xilinx/yosys_nextpnr_prjxray.rs
vendored
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -37,6 +37,18 @@ impl<T: Type> Wire<T> {
|
|||
ty: ty.canonical(),
|
||||
}
|
||||
}
|
||||
pub fn from_canonical(v: Wire<CanonicalType>) -> Self {
|
||||
let Wire {
|
||||
name,
|
||||
source_location,
|
||||
ty,
|
||||
} = v;
|
||||
Self {
|
||||
name,
|
||||
source_location,
|
||||
ty: T::from_canonical(ty),
|
||||
}
|
||||
}
|
||||
pub fn ty(&self) -> T {
|
||||
self.ty
|
||||
}
|
||||
|
|
|
|||
283
crates/fayalite/tests/formal.rs
Normal file
283
crates/fayalite/tests/formal.rs
Normal file
|
|
@ -0,0 +1,283 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! Formal tests in Fayalite
|
||||
|
||||
use fayalite::prelude::*;
|
||||
|
||||
/// Test hidden state
|
||||
///
|
||||
/// Hidden state can cause problems for induction, since the formal engine
|
||||
/// can assign invalid values to the state registers, making it traverse
|
||||
/// valid but unreachable states.
|
||||
///
|
||||
/// One solution is to go sufficiently in the past so the engine is forced
|
||||
/// to eventually take a reachable state. This may be hampered by
|
||||
/// existence of loops, then assumptions may be added to break them.
|
||||
///
|
||||
/// Another solution is to "open the black box" and add additional
|
||||
/// assertions involving the hidden state, so that the unreachable states
|
||||
/// become invalid as well.
|
||||
///
|
||||
/// Both approaches are taken here.
|
||||
///
|
||||
/// See [Claire Wolf's presentation] and [Zipcpu blog article].
|
||||
///
|
||||
/// [Claire Wolf's presentation]: https://web.archive.org/web/20200115081517fw_/http://www.clifford.at/papers/2017/smtbmc-sby/
|
||||
/// [Zipcpu blog article]: https://zipcpu.com/blog/2018/03/10/induction-exercise.html
|
||||
mod hidden_state {
|
||||
use super::*;
|
||||
/// Test hidden state by shift registers
|
||||
///
|
||||
/// The code implement the ideas from an article in the [Zipcpu blog]. Two
|
||||
/// shift registers are fed from the same input, so they should always have
|
||||
/// the same value. However the only observable is a comparison of their
|
||||
/// last bit, all the others are hidden. To complicate matters, an enable
|
||||
/// signal causes a loop in state space.
|
||||
///
|
||||
/// [Zipcpu blog]: https://zipcpu.com/blog/2018/03/10/induction-exercise.html
|
||||
#[test]
|
||||
fn shift_register() {
|
||||
enum ConstraintMode {
|
||||
WithExtraAssertions,
|
||||
WithExtraAssumptions,
|
||||
}
|
||||
use ConstraintMode::*;
|
||||
#[hdl_module]
|
||||
fn test_module(constraint_mode: ConstraintMode) {
|
||||
#[hdl]
|
||||
let clk: Clock = m.input();
|
||||
#[hdl]
|
||||
let cd = wire();
|
||||
connect(
|
||||
cd,
|
||||
#[hdl]
|
||||
ClockDomain {
|
||||
clk,
|
||||
rst: formal_reset().to_reset(),
|
||||
},
|
||||
);
|
||||
// input signal for the shift registers
|
||||
#[hdl]
|
||||
let i: Bool = wire();
|
||||
connect(i, any_seq(Bool));
|
||||
// shift enable signal
|
||||
#[hdl]
|
||||
let en: Bool = wire();
|
||||
connect(en, any_seq(Bool));
|
||||
// comparison output
|
||||
#[hdl]
|
||||
let o: Bool = wire();
|
||||
// shift registers, with enable
|
||||
#[hdl]
|
||||
let r1 = reg_builder().clock_domain(cd).reset(0u8);
|
||||
#[hdl]
|
||||
let r2 = reg_builder().clock_domain(cd).reset(0u8);
|
||||
#[hdl]
|
||||
if en {
|
||||
connect_any(r1, (r1 << 1) | i.cast_to(UInt[1]));
|
||||
connect_any(r2, (r2 << 1) | i.cast_to(UInt[1]));
|
||||
}
|
||||
// compare last bits of both shift registers
|
||||
connect(o, r1[7].cmp_eq(r2[7]));
|
||||
|
||||
// what we want to prove: last bits are always equal
|
||||
hdl_assert(clk, o, "");
|
||||
|
||||
// additional terms below are only needed to assist with the induction proof
|
||||
match constraint_mode {
|
||||
WithExtraAssertions => {
|
||||
// "Open the box": add assertions about hidden state.
|
||||
// In this case, the hidden bits are also always equal.
|
||||
hdl_assert(clk, r1.cmp_eq(r2), "");
|
||||
}
|
||||
WithExtraAssumptions => {
|
||||
// Break the loop, do not allow "en" to remain low forever
|
||||
#[hdl]
|
||||
let past_en_reg = reg_builder().clock_domain(cd).reset(false);
|
||||
connect(past_en_reg, en);
|
||||
hdl_assume(clk, past_en_reg | en, "");
|
||||
}
|
||||
}
|
||||
}
|
||||
// we need a minimum of 16 steps so we can constrain all eight shift register bits,
|
||||
// given that we are allowed to disable the shift once every two cycles.
|
||||
assert_formal(
|
||||
"shift_register_with_assumptions",
|
||||
test_module(WithExtraAssumptions),
|
||||
FormalMode::Prove,
|
||||
16,
|
||||
None,
|
||||
Default::default(),
|
||||
);
|
||||
// here a couple of cycles is enough
|
||||
assert_formal(
|
||||
"shift_register_with_assertions",
|
||||
test_module(WithExtraAssertions),
|
||||
FormalMode::Prove,
|
||||
2,
|
||||
None,
|
||||
Default::default(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Formal verification of designs containing memories
|
||||
///
|
||||
/// There is a trick for memories, described in the [Zipcpu blog].
|
||||
/// First, select a fixed but arbitrary memory address, monitoring all reads
|
||||
/// and writes made to it. Then, assert that anything read from that location
|
||||
/// matches the last stored value.
|
||||
///
|
||||
/// A difficulty for induction is that the memory represents [hidden_state]. A
|
||||
/// solution is to include an additional read port to the memory and assert
|
||||
/// that the memory location effectively contains the last stored value.
|
||||
/// This additional debug port is present only to assist the proof and is
|
||||
/// unused (optimized out) in actual use.
|
||||
///
|
||||
/// [Zipcpu blog]: <https://zipcpu.com/zipcpu/2018/07/13/memories.html>
|
||||
mod memory {
|
||||
use super::*;
|
||||
|
||||
/// Test a simple 8-bit SRAM model
|
||||
#[test]
|
||||
fn test_sram() {
|
||||
#[hdl]
|
||||
struct WritePort<AddrWidth: Size> {
|
||||
addr: UIntType<AddrWidth>,
|
||||
data: UInt<8>,
|
||||
en: Bool,
|
||||
}
|
||||
#[hdl]
|
||||
struct ReadPort<AddrWidth: Size> {
|
||||
addr: UIntType<AddrWidth>,
|
||||
#[hdl(flip)]
|
||||
data: UInt<8>,
|
||||
}
|
||||
/// This debug port is only meant to assist the proof.
|
||||
/// For normal use in a design, a wrapper could be provided,
|
||||
/// omitting this port.
|
||||
/// The implementation is forbidden to use any information
|
||||
/// provided on this port in its internal workings.
|
||||
#[hdl]
|
||||
struct DebugPort<AddrWidth: Size> {
|
||||
selected: UIntType<AddrWidth>,
|
||||
stored: UInt<8>,
|
||||
wrote: Bool,
|
||||
}
|
||||
/// simple 1R1W SRAM model (one asynchronous read port and one
|
||||
/// independent write port) with `n`-bit address width
|
||||
#[hdl_module]
|
||||
fn example_sram(n: usize) {
|
||||
#[hdl]
|
||||
let wr: WritePort<DynSize> = m.input(WritePort[n]);
|
||||
#[hdl]
|
||||
let rd: ReadPort<DynSize> = m.input(ReadPort[n]);
|
||||
#[hdl]
|
||||
let cd: ClockDomain = m.input();
|
||||
|
||||
// declare and connect the backing memory
|
||||
#[hdl]
|
||||
let mut mem = memory();
|
||||
mem.depth(1 << n);
|
||||
let read_port = mem.new_read_port();
|
||||
let write_port = mem.new_write_port();
|
||||
connect(write_port.clk, cd.clk);
|
||||
connect(write_port.addr, wr.addr);
|
||||
connect(write_port.en, wr.en);
|
||||
connect(write_port.data, wr.data);
|
||||
connect(write_port.mask, true);
|
||||
connect(read_port.clk, cd.clk);
|
||||
connect(read_port.addr, rd.addr);
|
||||
connect(read_port.en, true);
|
||||
connect(rd.data, read_port.data);
|
||||
|
||||
// To assist with induction, ensure that the chosen memory location
|
||||
// really contains, always, the last value written to it.
|
||||
#[hdl]
|
||||
let dbg: DebugPort<DynSize> = m.input(DebugPort[n]);
|
||||
let debug_port = mem.new_read_port();
|
||||
connect(debug_port.en, true);
|
||||
connect(debug_port.clk, cd.clk);
|
||||
connect(debug_port.addr, dbg.selected);
|
||||
#[hdl]
|
||||
if dbg.wrote {
|
||||
hdl_assert(cd.clk, debug_port.data.cmp_eq(dbg.stored), "");
|
||||
// Try commenting out the assert above, induction will fail.
|
||||
// Opening the trace, it can be seen that the memory contents
|
||||
// and the stored value don't match, which is an unreachable
|
||||
// state. By asserting the above, it will become invalid
|
||||
// as well, so induction will skip this kind of situation.
|
||||
}
|
||||
}
|
||||
|
||||
/// formal verification of the SRAM module, parametrized by the
|
||||
/// address bit-width
|
||||
#[hdl_module]
|
||||
fn test_module(n: usize) {
|
||||
#[hdl]
|
||||
let clk: Clock = m.input();
|
||||
let cd = #[hdl]
|
||||
ClockDomain {
|
||||
clk,
|
||||
rst: formal_reset().to_reset(),
|
||||
};
|
||||
|
||||
// instantiate the SRAM model, connecting its inputs to
|
||||
// a random sequence
|
||||
#[hdl]
|
||||
let rd: ReadPort<DynSize> = wire(ReadPort[n]);
|
||||
connect(rd.addr, any_seq(UInt[n]));
|
||||
#[hdl]
|
||||
let wr: WritePort<DynSize> = wire(WritePort[n]);
|
||||
connect(wr.addr, any_seq(UInt[n]));
|
||||
connect(wr.data, any_seq(UInt::<8>::new_static()));
|
||||
connect(wr.en, any_seq(Bool));
|
||||
#[hdl]
|
||||
let dut = instance(example_sram(n));
|
||||
connect(dut.cd, cd);
|
||||
connect(dut.rd, rd);
|
||||
connect(dut.wr, wr);
|
||||
|
||||
// select a fixed but arbitrary test address
|
||||
#[hdl]
|
||||
let selected = wire(UInt[n]);
|
||||
connect(selected, any_const(UInt[n]));
|
||||
// store the last value written to that address
|
||||
#[hdl]
|
||||
let stored: UInt<8> = reg_builder().clock_domain(cd).reset(0u8);
|
||||
// since memories are not initialized, track whether we wrote to the
|
||||
// memory at least once
|
||||
#[hdl]
|
||||
let wrote: Bool = reg_builder().clock_domain(cd).reset(false);
|
||||
// on a write, capture the last written value
|
||||
#[hdl]
|
||||
if wr.en & wr.addr.cmp_eq(selected) {
|
||||
connect(stored, wr.data);
|
||||
connect(wrote, true);
|
||||
}
|
||||
// on a read, assert that the read value is the same which was stored
|
||||
#[hdl]
|
||||
if rd.addr.cmp_eq(selected) & wrote {
|
||||
hdl_assert(clk, rd.data.cmp_eq(stored), "");
|
||||
}
|
||||
|
||||
// to assist induction, pass our state to the underlying instance
|
||||
let dbg = #[hdl]
|
||||
DebugPort {
|
||||
selected,
|
||||
stored,
|
||||
wrote,
|
||||
};
|
||||
connect(dut.dbg, dbg);
|
||||
}
|
||||
|
||||
assert_formal(
|
||||
"sram",
|
||||
test_module(8),
|
||||
FormalMode::Prove,
|
||||
2,
|
||||
None,
|
||||
Default::default(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -9,6 +9,11 @@ use fayalite::{
|
|||
};
|
||||
use std::marker::PhantomData;
|
||||
|
||||
#[hdl(outline_generated)]
|
||||
pub struct MyConstSize<V: Size> {
|
||||
pub size: PhantomConst<UIntType<V>>,
|
||||
}
|
||||
|
||||
#[hdl(outline_generated)]
|
||||
pub struct S<T, Len: Size, T2> {
|
||||
pub a: T,
|
||||
|
|
@ -191,3 +196,21 @@ check_bounds!(CheckBoundsTTT2<#[a, Type] A: BundleType +, #[b, Type] B: Type +,
|
|||
check_bounds!(CheckBoundsTTT3<#[a, Type] A: EnumType +, #[b, Type] B: Type +, #[c, Type] C: Type +>);
|
||||
check_bounds!(CheckBoundsTTT4<#[a, Type] A: IntType +, #[b, Type] B: Type +, #[c, Type] C: Type +>);
|
||||
check_bounds!(CheckBoundsTTT5<#[a, Type] A: StaticType +, #[b, Type] B: Type +, #[c, Type] C: Type +>);
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, serde::Serialize, serde::Deserialize)]
|
||||
pub struct MyPhantomConstInner {
|
||||
pub a: usize,
|
||||
pub b: UInt,
|
||||
}
|
||||
|
||||
#[hdl(outline_generated, get(|v| v.a))]
|
||||
pub type GetA<P: PhantomConstGet<MyPhantomConstInner>> = DynSize;
|
||||
|
||||
#[hdl(outline_generated, get(|v| v.b))]
|
||||
pub type GetB<P: PhantomConstGet<MyPhantomConstInner>> = UInt;
|
||||
|
||||
#[hdl(outline_generated, no_static)]
|
||||
pub struct MyTypeWithPhantomConstParameter<P: Type + PhantomConstGet<MyPhantomConstInner>> {
|
||||
pub a: ArrayType<Bool, GetA<P>>,
|
||||
pub b: HdlOption<GetB<P>>,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,8 +1,15 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use fayalite::{
|
||||
assert_export_firrtl, firrtl::ExportOptions, intern::Intern,
|
||||
module::transform::simplify_enums::SimplifyEnumsKind, prelude::*, ty::StaticType,
|
||||
assert_export_firrtl,
|
||||
firrtl::ExportOptions,
|
||||
int::{UIntInRange, UIntInRangeInclusive},
|
||||
intern::Intern,
|
||||
module::transform::simplify_enums::SimplifyEnumsKind,
|
||||
platform::PlatformIOBuilder,
|
||||
prelude::*,
|
||||
reset::ResetType,
|
||||
ty::StaticType,
|
||||
};
|
||||
use serde_json::json;
|
||||
|
||||
|
|
@ -190,10 +197,14 @@ circuit check_array_repeat:
|
|||
};
|
||||
}
|
||||
|
||||
pub trait UnknownTrait {}
|
||||
|
||||
impl<T: ?Sized> UnknownTrait for T {}
|
||||
|
||||
#[hdl_module(outline_generated)]
|
||||
pub fn check_skipped_generics<T, #[hdl(skip)] U, const N: usize, #[hdl(skip)] const M: usize>(v: U)
|
||||
where
|
||||
T: StaticType,
|
||||
T: StaticType + UnknownTrait,
|
||||
ConstUsize<N>: KnownSize,
|
||||
U: std::fmt::Display,
|
||||
{
|
||||
|
|
@ -375,18 +386,18 @@ circuit check_written_inside_both_if_else:
|
|||
};
|
||||
}
|
||||
|
||||
#[hdl(outline_generated)]
|
||||
#[hdl(outline_generated, cmp_eq)]
|
||||
pub struct TestStruct<T> {
|
||||
pub a: T,
|
||||
pub b: UInt<8>,
|
||||
}
|
||||
|
||||
#[hdl(outline_generated)]
|
||||
#[hdl(outline_generated, cmp_eq)]
|
||||
pub struct TestStruct2 {
|
||||
pub v: UInt<8>,
|
||||
}
|
||||
|
||||
#[hdl(outline_generated)]
|
||||
#[hdl(outline_generated, cmp_eq)]
|
||||
pub struct TestStruct3 {}
|
||||
|
||||
#[hdl_module(outline_generated)]
|
||||
|
|
@ -4026,3 +4037,650 @@ circuit check_enum_connect_any:
|
|||
",
|
||||
};
|
||||
}
|
||||
|
||||
#[hdl_module(outline_generated)]
|
||||
pub fn check_deduce_resets<T: ResetType>(ty: T) {
|
||||
#[hdl]
|
||||
let cd: ClockDomain<T> = m.input(ClockDomain[ty]);
|
||||
#[hdl]
|
||||
let my_reg = reg_builder().reset(0u8).clock_domain(cd);
|
||||
#[hdl]
|
||||
let u8_in: UInt<8> = m.input();
|
||||
connect(my_reg, u8_in);
|
||||
#[hdl]
|
||||
let u8_out: UInt<8> = m.output();
|
||||
connect(u8_out, my_reg);
|
||||
#[hdl]
|
||||
let enum_in: OneOfThree<Reset, AsyncReset, SyncReset> = m.input();
|
||||
#[hdl]
|
||||
let enum_out: OneOfThree<Reset, AsyncReset, SyncReset> = m.output();
|
||||
#[hdl]
|
||||
let reset_out: Reset = m.output();
|
||||
connect(reset_out, cd.rst.to_reset());
|
||||
#[hdl]
|
||||
match enum_in {
|
||||
OneOfThree::<_, _, _>::A(v) => {
|
||||
connect(
|
||||
enum_out,
|
||||
OneOfThree[Reset][AsyncReset][SyncReset].A(cd.rst.to_reset()),
|
||||
);
|
||||
connect(reset_out, v);
|
||||
}
|
||||
OneOfThree::<_, _, _>::B(v) => {
|
||||
connect(enum_out, OneOfThree[Reset][AsyncReset][SyncReset].B(v))
|
||||
}
|
||||
OneOfThree::<_, _, _>::C(v) => {
|
||||
connect(enum_out, OneOfThree[Reset][AsyncReset][SyncReset].C(v))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_deduce_resets() {
|
||||
let _n = SourceLocation::normalize_files_for_tests();
|
||||
let m = check_deduce_resets(Reset);
|
||||
dbg!(m);
|
||||
#[rustfmt::skip] // work around https://github.com/rust-lang/rustfmt/issues/6161
|
||||
assert_export_firrtl! {
|
||||
m =>
|
||||
options: ExportOptions {
|
||||
simplify_enums: None,
|
||||
..ExportOptions::default()
|
||||
},
|
||||
"/test/check_deduce_resets.fir": r"FIRRTL version 3.2.0
|
||||
circuit check_deduce_resets:
|
||||
type Ty0 = {clk: Clock, rst: Reset}
|
||||
type Ty1 = {|A: Reset, B: AsyncReset, C: UInt<1>|}
|
||||
module check_deduce_resets: @[module-XXXXXXXXXX.rs 1:1]
|
||||
input cd: Ty0 @[module-XXXXXXXXXX.rs 2:1]
|
||||
input u8_in: UInt<8> @[module-XXXXXXXXXX.rs 4:1]
|
||||
output u8_out: UInt<8> @[module-XXXXXXXXXX.rs 6:1]
|
||||
input enum_in: Ty1 @[module-XXXXXXXXXX.rs 8:1]
|
||||
output enum_out: Ty1 @[module-XXXXXXXXXX.rs 9:1]
|
||||
output reset_out: Reset @[module-XXXXXXXXXX.rs 10:1]
|
||||
regreset my_reg: UInt<8>, cd.clk, cd.rst, UInt<8>(0h0) @[module-XXXXXXXXXX.rs 3:1]
|
||||
connect my_reg, u8_in @[module-XXXXXXXXXX.rs 5:1]
|
||||
connect u8_out, my_reg @[module-XXXXXXXXXX.rs 7:1]
|
||||
connect reset_out, cd.rst @[module-XXXXXXXXXX.rs 11:1]
|
||||
match enum_in: @[module-XXXXXXXXXX.rs 12:1]
|
||||
A(_match_arm_value):
|
||||
connect enum_out, {|A: Reset, B: AsyncReset, C: UInt<1>|}(A, cd.rst) @[module-XXXXXXXXXX.rs 13:1]
|
||||
connect reset_out, _match_arm_value @[module-XXXXXXXXXX.rs 14:1]
|
||||
B(_match_arm_value_1):
|
||||
connect enum_out, {|A: Reset, B: AsyncReset, C: UInt<1>|}(B, _match_arm_value_1) @[module-XXXXXXXXXX.rs 15:1]
|
||||
C(_match_arm_value_2):
|
||||
connect enum_out, {|A: Reset, B: AsyncReset, C: UInt<1>|}(C, _match_arm_value_2) @[module-XXXXXXXXXX.rs 16:1]
|
||||
",
|
||||
};
|
||||
fayalite::module::transform::deduce_resets::deduce_resets(m.canonical().intern_sized(), false)
|
||||
.unwrap_err();
|
||||
let m = fayalite::module::transform::deduce_resets::deduce_resets(
|
||||
m.canonical().intern_sized(),
|
||||
true,
|
||||
)
|
||||
.unwrap();
|
||||
dbg!(m);
|
||||
#[rustfmt::skip] // work around https://github.com/rust-lang/rustfmt/issues/6161
|
||||
assert_export_firrtl! {
|
||||
m =>
|
||||
options: ExportOptions {
|
||||
simplify_enums: None,
|
||||
..ExportOptions::default()
|
||||
},
|
||||
"/test/check_deduce_resets.fir": r"FIRRTL version 3.2.0
|
||||
circuit check_deduce_resets:
|
||||
type Ty0 = {clk: Clock, rst: UInt<1>}
|
||||
type Ty1 = {|A: UInt<1>, B: AsyncReset, C: UInt<1>|}
|
||||
module check_deduce_resets: @[module-XXXXXXXXXX.rs 1:1]
|
||||
input cd: Ty0 @[module-XXXXXXXXXX.rs 2:1]
|
||||
input u8_in: UInt<8> @[module-XXXXXXXXXX.rs 4:1]
|
||||
output u8_out: UInt<8> @[module-XXXXXXXXXX.rs 6:1]
|
||||
input enum_in: Ty1 @[module-XXXXXXXXXX.rs 8:1]
|
||||
output enum_out: Ty1 @[module-XXXXXXXXXX.rs 9:1]
|
||||
output reset_out: UInt<1> @[module-XXXXXXXXXX.rs 10:1]
|
||||
regreset my_reg: UInt<8>, cd.clk, cd.rst, UInt<8>(0h0) @[module-XXXXXXXXXX.rs 3:1]
|
||||
connect my_reg, u8_in @[module-XXXXXXXXXX.rs 5:1]
|
||||
connect u8_out, my_reg @[module-XXXXXXXXXX.rs 7:1]
|
||||
connect reset_out, cd.rst @[module-XXXXXXXXXX.rs 11:1]
|
||||
match enum_in: @[module-XXXXXXXXXX.rs 12:1]
|
||||
A(_match_arm_value):
|
||||
connect enum_out, {|A: UInt<1>, B: AsyncReset, C: UInt<1>|}(A, cd.rst) @[module-XXXXXXXXXX.rs 13:1]
|
||||
connect reset_out, _match_arm_value @[module-XXXXXXXXXX.rs 14:1]
|
||||
B(_match_arm_value_1):
|
||||
connect enum_out, {|A: UInt<1>, B: AsyncReset, C: UInt<1>|}(B, _match_arm_value_1) @[module-XXXXXXXXXX.rs 15:1]
|
||||
C(_match_arm_value_2):
|
||||
connect enum_out, {|A: UInt<1>, B: AsyncReset, C: UInt<1>|}(C, _match_arm_value_2) @[module-XXXXXXXXXX.rs 16:1]
|
||||
",
|
||||
};
|
||||
let m = check_deduce_resets(SyncReset);
|
||||
dbg!(m);
|
||||
#[rustfmt::skip] // work around https://github.com/rust-lang/rustfmt/issues/6161
|
||||
assert_export_firrtl! {
|
||||
m =>
|
||||
options: ExportOptions {
|
||||
simplify_enums: None,
|
||||
..ExportOptions::default()
|
||||
},
|
||||
"/test/check_deduce_resets.fir": r"FIRRTL version 3.2.0
|
||||
circuit check_deduce_resets:
|
||||
type Ty0 = {clk: Clock, rst: UInt<1>}
|
||||
type Ty1 = {|A: Reset, B: AsyncReset, C: UInt<1>|}
|
||||
module check_deduce_resets: @[module-XXXXXXXXXX.rs 1:1]
|
||||
input cd: Ty0 @[module-XXXXXXXXXX.rs 2:1]
|
||||
input u8_in: UInt<8> @[module-XXXXXXXXXX.rs 4:1]
|
||||
output u8_out: UInt<8> @[module-XXXXXXXXXX.rs 6:1]
|
||||
input enum_in: Ty1 @[module-XXXXXXXXXX.rs 8:1]
|
||||
output enum_out: Ty1 @[module-XXXXXXXXXX.rs 9:1]
|
||||
output reset_out: Reset @[module-XXXXXXXXXX.rs 10:1]
|
||||
regreset my_reg: UInt<8>, cd.clk, cd.rst, UInt<8>(0h0) @[module-XXXXXXXXXX.rs 3:1]
|
||||
connect my_reg, u8_in @[module-XXXXXXXXXX.rs 5:1]
|
||||
connect u8_out, my_reg @[module-XXXXXXXXXX.rs 7:1]
|
||||
connect reset_out, cd.rst @[module-XXXXXXXXXX.rs 11:1]
|
||||
match enum_in: @[module-XXXXXXXXXX.rs 12:1]
|
||||
A(_match_arm_value):
|
||||
connect enum_out, {|A: Reset, B: AsyncReset, C: UInt<1>|}(A, cd.rst) @[module-XXXXXXXXXX.rs 13:1]
|
||||
connect reset_out, _match_arm_value @[module-XXXXXXXXXX.rs 14:1]
|
||||
B(_match_arm_value_1):
|
||||
connect enum_out, {|A: Reset, B: AsyncReset, C: UInt<1>|}(B, _match_arm_value_1) @[module-XXXXXXXXXX.rs 15:1]
|
||||
C(_match_arm_value_2):
|
||||
connect enum_out, {|A: Reset, B: AsyncReset, C: UInt<1>|}(C, _match_arm_value_2) @[module-XXXXXXXXXX.rs 16:1]
|
||||
",
|
||||
};
|
||||
let m = fayalite::module::transform::deduce_resets::deduce_resets(
|
||||
m.canonical().intern_sized(),
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
dbg!(m);
|
||||
#[rustfmt::skip] // work around https://github.com/rust-lang/rustfmt/issues/6161
|
||||
assert_export_firrtl! {
|
||||
m =>
|
||||
options: ExportOptions {
|
||||
simplify_enums: None,
|
||||
..ExportOptions::default()
|
||||
},
|
||||
"/test/check_deduce_resets.fir": r"FIRRTL version 3.2.0
|
||||
circuit check_deduce_resets:
|
||||
type Ty0 = {clk: Clock, rst: UInt<1>}
|
||||
type Ty1 = {|A: UInt<1>, B: AsyncReset, C: UInt<1>|}
|
||||
module check_deduce_resets: @[module-XXXXXXXXXX.rs 1:1]
|
||||
input cd: Ty0 @[module-XXXXXXXXXX.rs 2:1]
|
||||
input u8_in: UInt<8> @[module-XXXXXXXXXX.rs 4:1]
|
||||
output u8_out: UInt<8> @[module-XXXXXXXXXX.rs 6:1]
|
||||
input enum_in: Ty1 @[module-XXXXXXXXXX.rs 8:1]
|
||||
output enum_out: Ty1 @[module-XXXXXXXXXX.rs 9:1]
|
||||
output reset_out: UInt<1> @[module-XXXXXXXXXX.rs 10:1]
|
||||
regreset my_reg: UInt<8>, cd.clk, cd.rst, UInt<8>(0h0) @[module-XXXXXXXXXX.rs 3:1]
|
||||
connect my_reg, u8_in @[module-XXXXXXXXXX.rs 5:1]
|
||||
connect u8_out, my_reg @[module-XXXXXXXXXX.rs 7:1]
|
||||
connect reset_out, cd.rst @[module-XXXXXXXXXX.rs 11:1]
|
||||
match enum_in: @[module-XXXXXXXXXX.rs 12:1]
|
||||
A(_match_arm_value):
|
||||
connect enum_out, {|A: UInt<1>, B: AsyncReset, C: UInt<1>|}(A, cd.rst) @[module-XXXXXXXXXX.rs 13:1]
|
||||
connect reset_out, _match_arm_value @[module-XXXXXXXXXX.rs 14:1]
|
||||
B(_match_arm_value_1):
|
||||
connect enum_out, {|A: UInt<1>, B: AsyncReset, C: UInt<1>|}(B, _match_arm_value_1) @[module-XXXXXXXXXX.rs 15:1]
|
||||
C(_match_arm_value_2):
|
||||
connect enum_out, {|A: UInt<1>, B: AsyncReset, C: UInt<1>|}(C, _match_arm_value_2) @[module-XXXXXXXXXX.rs 16:1]
|
||||
",
|
||||
};
|
||||
let m = check_deduce_resets(AsyncReset);
|
||||
dbg!(m);
|
||||
#[rustfmt::skip] // work around https://github.com/rust-lang/rustfmt/issues/6161
|
||||
assert_export_firrtl! {
|
||||
m =>
|
||||
options: ExportOptions {
|
||||
simplify_enums: None,
|
||||
..ExportOptions::default()
|
||||
},
|
||||
"/test/check_deduce_resets.fir": r"FIRRTL version 3.2.0
|
||||
circuit check_deduce_resets:
|
||||
type Ty0 = {clk: Clock, rst: AsyncReset}
|
||||
type Ty1 = {|A: Reset, B: AsyncReset, C: UInt<1>|}
|
||||
module check_deduce_resets: @[module-XXXXXXXXXX.rs 1:1]
|
||||
input cd: Ty0 @[module-XXXXXXXXXX.rs 2:1]
|
||||
input u8_in: UInt<8> @[module-XXXXXXXXXX.rs 4:1]
|
||||
output u8_out: UInt<8> @[module-XXXXXXXXXX.rs 6:1]
|
||||
input enum_in: Ty1 @[module-XXXXXXXXXX.rs 8:1]
|
||||
output enum_out: Ty1 @[module-XXXXXXXXXX.rs 9:1]
|
||||
output reset_out: Reset @[module-XXXXXXXXXX.rs 10:1]
|
||||
regreset my_reg: UInt<8>, cd.clk, cd.rst, UInt<8>(0h0) @[module-XXXXXXXXXX.rs 3:1]
|
||||
connect my_reg, u8_in @[module-XXXXXXXXXX.rs 5:1]
|
||||
connect u8_out, my_reg @[module-XXXXXXXXXX.rs 7:1]
|
||||
connect reset_out, cd.rst @[module-XXXXXXXXXX.rs 11:1]
|
||||
match enum_in: @[module-XXXXXXXXXX.rs 12:1]
|
||||
A(_match_arm_value):
|
||||
connect enum_out, {|A: Reset, B: AsyncReset, C: UInt<1>|}(A, cd.rst) @[module-XXXXXXXXXX.rs 13:1]
|
||||
connect reset_out, _match_arm_value @[module-XXXXXXXXXX.rs 14:1]
|
||||
B(_match_arm_value_1):
|
||||
connect enum_out, {|A: Reset, B: AsyncReset, C: UInt<1>|}(B, _match_arm_value_1) @[module-XXXXXXXXXX.rs 15:1]
|
||||
C(_match_arm_value_2):
|
||||
connect enum_out, {|A: Reset, B: AsyncReset, C: UInt<1>|}(C, _match_arm_value_2) @[module-XXXXXXXXXX.rs 16:1]
|
||||
",
|
||||
};
|
||||
let m = fayalite::module::transform::deduce_resets::deduce_resets(
|
||||
m.canonical().intern_sized(),
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
dbg!(m);
|
||||
#[rustfmt::skip] // work around https://github.com/rust-lang/rustfmt/issues/6161
|
||||
assert_export_firrtl! {
|
||||
m =>
|
||||
options: ExportOptions {
|
||||
simplify_enums: None,
|
||||
..ExportOptions::default()
|
||||
},
|
||||
"/test/check_deduce_resets.fir": r"FIRRTL version 3.2.0
|
||||
circuit check_deduce_resets:
|
||||
type Ty0 = {clk: Clock, rst: AsyncReset}
|
||||
type Ty1 = {|A: AsyncReset, B: AsyncReset, C: UInt<1>|}
|
||||
module check_deduce_resets: @[module-XXXXXXXXXX.rs 1:1]
|
||||
input cd: Ty0 @[module-XXXXXXXXXX.rs 2:1]
|
||||
input u8_in: UInt<8> @[module-XXXXXXXXXX.rs 4:1]
|
||||
output u8_out: UInt<8> @[module-XXXXXXXXXX.rs 6:1]
|
||||
input enum_in: Ty1 @[module-XXXXXXXXXX.rs 8:1]
|
||||
output enum_out: Ty1 @[module-XXXXXXXXXX.rs 9:1]
|
||||
output reset_out: AsyncReset @[module-XXXXXXXXXX.rs 10:1]
|
||||
regreset my_reg: UInt<8>, cd.clk, cd.rst, UInt<8>(0h0) @[module-XXXXXXXXXX.rs 3:1]
|
||||
connect my_reg, u8_in @[module-XXXXXXXXXX.rs 5:1]
|
||||
connect u8_out, my_reg @[module-XXXXXXXXXX.rs 7:1]
|
||||
connect reset_out, cd.rst @[module-XXXXXXXXXX.rs 11:1]
|
||||
match enum_in: @[module-XXXXXXXXXX.rs 12:1]
|
||||
A(_match_arm_value):
|
||||
connect enum_out, {|A: AsyncReset, B: AsyncReset, C: UInt<1>|}(A, cd.rst) @[module-XXXXXXXXXX.rs 13:1]
|
||||
connect reset_out, _match_arm_value @[module-XXXXXXXXXX.rs 14:1]
|
||||
B(_match_arm_value_1):
|
||||
connect enum_out, {|A: AsyncReset, B: AsyncReset, C: UInt<1>|}(B, _match_arm_value_1) @[module-XXXXXXXXXX.rs 15:1]
|
||||
C(_match_arm_value_2):
|
||||
connect enum_out, {|A: AsyncReset, B: AsyncReset, C: UInt<1>|}(C, _match_arm_value_2) @[module-XXXXXXXXXX.rs 16:1]
|
||||
",
|
||||
};
|
||||
}
|
||||
|
||||
// intentionally not outline_generated to ensure we get correct macro hygiene
|
||||
#[hdl_module]
|
||||
pub fn check_cfgs<#[cfg(cfg_false_for_tests)] A: Type, #[cfg(cfg_true_for_tests)] B: Type>(
|
||||
#[cfg(cfg_false_for_tests)] a: A,
|
||||
#[cfg(cfg_true_for_tests)] b: B,
|
||||
) {
|
||||
#[hdl]
|
||||
struct S<#[cfg(cfg_false_for_tests)] A, #[cfg(cfg_true_for_tests)] B> {
|
||||
#[cfg(cfg_false_for_tests)]
|
||||
a: A,
|
||||
#[cfg(cfg_true_for_tests)]
|
||||
b: B,
|
||||
}
|
||||
#[hdl]
|
||||
#[cfg(cfg_false_for_tests)]
|
||||
let i_a: A = m.input(a);
|
||||
#[hdl]
|
||||
#[cfg(cfg_true_for_tests)]
|
||||
let i_b: B = m.input(b);
|
||||
#[hdl]
|
||||
let w: S<UInt<8>> = wire();
|
||||
#[cfg(cfg_false_for_tests)]
|
||||
{
|
||||
#[hdl]
|
||||
let o_a: A = m.output(a);
|
||||
connect(o_a, w.a.cast_bits_to(a));
|
||||
connect_any(w.a, i_a.cast_to_bits());
|
||||
}
|
||||
#[cfg(cfg_true_for_tests)]
|
||||
{
|
||||
#[hdl]
|
||||
let o_b: B = m.output(b);
|
||||
connect(o_b, w.b.cast_bits_to(b));
|
||||
connect_any(w.b, i_b.cast_to_bits());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cfgs() {
|
||||
let _n = SourceLocation::normalize_files_for_tests();
|
||||
let m = check_cfgs(UInt[8]);
|
||||
dbg!(m);
|
||||
#[rustfmt::skip] // work around https://github.com/rust-lang/rustfmt/issues/6161
|
||||
assert_export_firrtl! {
|
||||
m =>
|
||||
"/test/check_cfgs.fir": r"FIRRTL version 3.2.0
|
||||
circuit check_cfgs:
|
||||
type Ty0 = {b: UInt<8>}
|
||||
module check_cfgs: @[the_test_file.rs 9962:1]
|
||||
input i_b: UInt<8> @[the_test_file.rs 9979:20]
|
||||
output o_b: UInt<8> @[the_test_file.rs 9992:24]
|
||||
wire w: Ty0 @[the_test_file.rs 9981:25]
|
||||
connect o_b, w.b @[the_test_file.rs 9993:9]
|
||||
connect w.b, i_b @[the_test_file.rs 9994:9]
|
||||
",
|
||||
};
|
||||
}
|
||||
|
||||
#[hdl_module(outline_generated)]
|
||||
pub fn check_let_patterns() {
|
||||
#[hdl]
|
||||
let tuple_in: (UInt<1>, SInt<1>, Bool) = m.input();
|
||||
#[hdl]
|
||||
let (tuple_0, tuple_1, tuple_2) = tuple_in;
|
||||
#[hdl]
|
||||
let tuple_0_out: UInt<1> = m.output();
|
||||
connect(tuple_0_out, tuple_0);
|
||||
#[hdl]
|
||||
let tuple_1_out: SInt<1> = m.output();
|
||||
connect(tuple_1_out, tuple_1);
|
||||
#[hdl]
|
||||
let tuple_2_out: Bool = m.output();
|
||||
connect(tuple_2_out, tuple_2);
|
||||
|
||||
#[hdl]
|
||||
let test_struct_in: TestStruct<SInt<8>> = m.input();
|
||||
#[hdl]
|
||||
let TestStruct::<_> { a, b } = test_struct_in;
|
||||
#[hdl]
|
||||
let test_struct_a_out: SInt<8> = m.output();
|
||||
connect(test_struct_a_out, a);
|
||||
#[hdl]
|
||||
let test_struct_b_out: UInt<8> = m.output();
|
||||
connect(test_struct_b_out, b);
|
||||
|
||||
#[hdl]
|
||||
let test_struct_2_in: TestStruct2 = m.input();
|
||||
#[hdl]
|
||||
let TestStruct2 { v } = test_struct_2_in;
|
||||
#[hdl]
|
||||
let test_struct_2_v_out: UInt<8> = m.output();
|
||||
connect(test_struct_2_v_out, v);
|
||||
|
||||
#[hdl]
|
||||
let test_struct_3_in: TestStruct3 = m.input();
|
||||
#[hdl]
|
||||
let TestStruct3 {} = test_struct_3_in;
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_let_patterns() {
|
||||
let _n = SourceLocation::normalize_files_for_tests();
|
||||
let m = check_let_patterns();
|
||||
dbg!(m);
|
||||
#[rustfmt::skip] // work around https://github.com/rust-lang/rustfmt/issues/6161
|
||||
assert_export_firrtl! {
|
||||
m =>
|
||||
"/test/check_let_patterns.fir": r"FIRRTL version 3.2.0
|
||||
circuit check_let_patterns:
|
||||
type Ty0 = {`0`: UInt<1>, `1`: SInt<1>, `2`: UInt<1>}
|
||||
type Ty1 = {a: SInt<8>, b: UInt<8>}
|
||||
type Ty2 = {v: UInt<8>}
|
||||
type Ty3 = {}
|
||||
module check_let_patterns: @[module-XXXXXXXXXX.rs 1:1]
|
||||
input tuple_in: Ty0 @[module-XXXXXXXXXX.rs 2:1]
|
||||
output tuple_0_out: UInt<1> @[module-XXXXXXXXXX.rs 4:1]
|
||||
output tuple_1_out: SInt<1> @[module-XXXXXXXXXX.rs 6:1]
|
||||
output tuple_2_out: UInt<1> @[module-XXXXXXXXXX.rs 8:1]
|
||||
input test_struct_in: Ty1 @[module-XXXXXXXXXX.rs 10:1]
|
||||
output test_struct_a_out: SInt<8> @[module-XXXXXXXXXX.rs 12:1]
|
||||
output test_struct_b_out: UInt<8> @[module-XXXXXXXXXX.rs 14:1]
|
||||
input test_struct_2_in: Ty2 @[module-XXXXXXXXXX.rs 16:1]
|
||||
output test_struct_2_v_out: UInt<8> @[module-XXXXXXXXXX.rs 18:1]
|
||||
input test_struct_3_in: Ty3 @[module-XXXXXXXXXX.rs 20:1]
|
||||
connect tuple_0_out, tuple_in.`0` @[module-XXXXXXXXXX.rs 5:1]
|
||||
connect tuple_1_out, tuple_in.`1` @[module-XXXXXXXXXX.rs 7:1]
|
||||
connect tuple_2_out, tuple_in.`2` @[module-XXXXXXXXXX.rs 9:1]
|
||||
connect test_struct_a_out, test_struct_in.a @[module-XXXXXXXXXX.rs 13:1]
|
||||
connect test_struct_b_out, test_struct_in.b @[module-XXXXXXXXXX.rs 15:1]
|
||||
connect test_struct_2_v_out, test_struct_2_in.v @[module-XXXXXXXXXX.rs 19:1]
|
||||
",
|
||||
};
|
||||
}
|
||||
|
||||
#[hdl_module(outline_generated)]
|
||||
pub fn check_struct_cmp_eq() {
|
||||
#[hdl]
|
||||
let tuple_lhs: (UInt<1>, SInt<1>, Bool) = m.input();
|
||||
#[hdl]
|
||||
let tuple_rhs: (UInt<1>, SInt<1>, Bool) = m.input();
|
||||
#[hdl]
|
||||
let tuple_cmp_eq: Bool = m.output();
|
||||
connect(tuple_cmp_eq, tuple_lhs.cmp_eq(tuple_rhs));
|
||||
#[hdl]
|
||||
let tuple_cmp_ne: Bool = m.output();
|
||||
connect(tuple_cmp_ne, tuple_lhs.cmp_ne(tuple_rhs));
|
||||
|
||||
#[hdl]
|
||||
let test_struct_lhs: TestStruct<SInt<8>> = m.input();
|
||||
#[hdl]
|
||||
let test_struct_rhs: TestStruct<SInt<8>> = m.input();
|
||||
#[hdl]
|
||||
let test_struct_cmp_eq: Bool = m.output();
|
||||
connect(test_struct_cmp_eq, test_struct_lhs.cmp_eq(test_struct_rhs));
|
||||
#[hdl]
|
||||
let test_struct_cmp_ne: Bool = m.output();
|
||||
connect(test_struct_cmp_ne, test_struct_lhs.cmp_ne(test_struct_rhs));
|
||||
|
||||
#[hdl]
|
||||
let test_struct_2_lhs: TestStruct2 = m.input();
|
||||
#[hdl]
|
||||
let test_struct_2_rhs: TestStruct2 = m.input();
|
||||
#[hdl]
|
||||
let test_struct_2_cmp_eq: Bool = m.output();
|
||||
connect(
|
||||
test_struct_2_cmp_eq,
|
||||
test_struct_2_lhs.cmp_eq(test_struct_2_rhs),
|
||||
);
|
||||
#[hdl]
|
||||
let test_struct_2_cmp_ne: Bool = m.output();
|
||||
connect(
|
||||
test_struct_2_cmp_ne,
|
||||
test_struct_2_lhs.cmp_ne(test_struct_2_rhs),
|
||||
);
|
||||
|
||||
#[hdl]
|
||||
let test_struct_3_lhs: TestStruct3 = m.input();
|
||||
#[hdl]
|
||||
let test_struct_3_rhs: TestStruct3 = m.input();
|
||||
#[hdl]
|
||||
let test_struct_3_cmp_eq: Bool = m.output();
|
||||
connect(
|
||||
test_struct_3_cmp_eq,
|
||||
test_struct_3_lhs.cmp_eq(test_struct_3_rhs),
|
||||
);
|
||||
#[hdl]
|
||||
let test_struct_3_cmp_ne: Bool = m.output();
|
||||
connect(
|
||||
test_struct_3_cmp_ne,
|
||||
test_struct_3_lhs.cmp_ne(test_struct_3_rhs),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_struct_cmp_eq() {
|
||||
let _n = SourceLocation::normalize_files_for_tests();
|
||||
let m = check_struct_cmp_eq();
|
||||
dbg!(m);
|
||||
#[rustfmt::skip] // work around https://github.com/rust-lang/rustfmt/issues/6161
|
||||
assert_export_firrtl! {
|
||||
m =>
|
||||
"/test/check_struct_cmp_eq.fir": r"FIRRTL version 3.2.0
|
||||
circuit check_struct_cmp_eq:
|
||||
type Ty0 = {`0`: UInt<1>, `1`: SInt<1>, `2`: UInt<1>}
|
||||
type Ty1 = {a: SInt<8>, b: UInt<8>}
|
||||
type Ty2 = {v: UInt<8>}
|
||||
type Ty3 = {}
|
||||
module check_struct_cmp_eq: @[module-XXXXXXXXXX.rs 1:1]
|
||||
input tuple_lhs: Ty0 @[module-XXXXXXXXXX.rs 2:1]
|
||||
input tuple_rhs: Ty0 @[module-XXXXXXXXXX.rs 3:1]
|
||||
output tuple_cmp_eq: UInt<1> @[module-XXXXXXXXXX.rs 4:1]
|
||||
output tuple_cmp_ne: UInt<1> @[module-XXXXXXXXXX.rs 6:1]
|
||||
input test_struct_lhs: Ty1 @[module-XXXXXXXXXX.rs 8:1]
|
||||
input test_struct_rhs: Ty1 @[module-XXXXXXXXXX.rs 9:1]
|
||||
output test_struct_cmp_eq: UInt<1> @[module-XXXXXXXXXX.rs 10:1]
|
||||
output test_struct_cmp_ne: UInt<1> @[module-XXXXXXXXXX.rs 12:1]
|
||||
input test_struct_2_lhs: Ty2 @[module-XXXXXXXXXX.rs 14:1]
|
||||
input test_struct_2_rhs: Ty2 @[module-XXXXXXXXXX.rs 15:1]
|
||||
output test_struct_2_cmp_eq: UInt<1> @[module-XXXXXXXXXX.rs 16:1]
|
||||
output test_struct_2_cmp_ne: UInt<1> @[module-XXXXXXXXXX.rs 18:1]
|
||||
input test_struct_3_lhs: Ty3 @[module-XXXXXXXXXX.rs 20:1]
|
||||
input test_struct_3_rhs: Ty3 @[module-XXXXXXXXXX.rs 21:1]
|
||||
output test_struct_3_cmp_eq: UInt<1> @[module-XXXXXXXXXX.rs 22:1]
|
||||
output test_struct_3_cmp_ne: UInt<1> @[module-XXXXXXXXXX.rs 24:1]
|
||||
wire _array_literal_expr: UInt<1>[3]
|
||||
connect _array_literal_expr[0], eq(tuple_lhs.`0`, tuple_rhs.`0`)
|
||||
connect _array_literal_expr[1], eq(tuple_lhs.`1`, tuple_rhs.`1`)
|
||||
connect _array_literal_expr[2], eq(tuple_lhs.`2`, tuple_rhs.`2`)
|
||||
wire _cast_array_to_bits_expr: UInt<1>[3]
|
||||
connect _cast_array_to_bits_expr[0], _array_literal_expr[0]
|
||||
connect _cast_array_to_bits_expr[1], _array_literal_expr[1]
|
||||
connect _cast_array_to_bits_expr[2], _array_literal_expr[2]
|
||||
wire _cast_to_bits_expr: UInt<3>
|
||||
connect _cast_to_bits_expr, cat(_cast_array_to_bits_expr[2], cat(_cast_array_to_bits_expr[1], _cast_array_to_bits_expr[0]))
|
||||
connect tuple_cmp_eq, andr(_cast_to_bits_expr) @[module-XXXXXXXXXX.rs 5:1]
|
||||
wire _array_literal_expr_1: UInt<1>[3]
|
||||
connect _array_literal_expr_1[0], neq(tuple_lhs.`0`, tuple_rhs.`0`)
|
||||
connect _array_literal_expr_1[1], neq(tuple_lhs.`1`, tuple_rhs.`1`)
|
||||
connect _array_literal_expr_1[2], neq(tuple_lhs.`2`, tuple_rhs.`2`)
|
||||
wire _cast_array_to_bits_expr_1: UInt<1>[3]
|
||||
connect _cast_array_to_bits_expr_1[0], _array_literal_expr_1[0]
|
||||
connect _cast_array_to_bits_expr_1[1], _array_literal_expr_1[1]
|
||||
connect _cast_array_to_bits_expr_1[2], _array_literal_expr_1[2]
|
||||
wire _cast_to_bits_expr_1: UInt<3>
|
||||
connect _cast_to_bits_expr_1, cat(_cast_array_to_bits_expr_1[2], cat(_cast_array_to_bits_expr_1[1], _cast_array_to_bits_expr_1[0]))
|
||||
connect tuple_cmp_ne, orr(_cast_to_bits_expr_1) @[module-XXXXXXXXXX.rs 7:1]
|
||||
connect test_struct_cmp_eq, and(eq(test_struct_lhs.a, test_struct_rhs.a), eq(test_struct_lhs.b, test_struct_rhs.b)) @[module-XXXXXXXXXX.rs 11:1]
|
||||
connect test_struct_cmp_ne, or(neq(test_struct_lhs.a, test_struct_rhs.a), neq(test_struct_lhs.b, test_struct_rhs.b)) @[module-XXXXXXXXXX.rs 13:1]
|
||||
connect test_struct_2_cmp_eq, eq(test_struct_2_lhs.v, test_struct_2_rhs.v) @[module-XXXXXXXXXX.rs 17:1]
|
||||
connect test_struct_2_cmp_ne, neq(test_struct_2_lhs.v, test_struct_2_rhs.v) @[module-XXXXXXXXXX.rs 19:1]
|
||||
connect test_struct_3_cmp_eq, UInt<1>(0h1) @[module-XXXXXXXXXX.rs 23:1]
|
||||
connect test_struct_3_cmp_ne, UInt<1>(0h0) @[module-XXXXXXXXXX.rs 25:1]
|
||||
",
|
||||
};
|
||||
}
|
||||
|
||||
#[hdl_module(outline_generated)]
|
||||
pub fn check_uint_in_range() {
|
||||
#[hdl]
|
||||
let i_0_to_1: UIntInRange<0, 1> = m.input();
|
||||
#[hdl]
|
||||
let i_0_to_2: UIntInRange<0, 2> = m.input();
|
||||
#[hdl]
|
||||
let i_0_to_3: UIntInRange<0, 3> = m.input();
|
||||
#[hdl]
|
||||
let i_0_to_4: UIntInRange<0, 4> = m.input();
|
||||
#[hdl]
|
||||
let i_0_to_7: UIntInRange<0, 7> = m.input();
|
||||
#[hdl]
|
||||
let i_0_to_8: UIntInRange<0, 8> = m.input();
|
||||
#[hdl]
|
||||
let i_0_to_9: UIntInRange<0, 9> = m.input();
|
||||
#[hdl]
|
||||
let i_0_through_0: UIntInRangeInclusive<0, 0> = m.input();
|
||||
#[hdl]
|
||||
let i_0_through_1: UIntInRangeInclusive<0, 1> = m.input();
|
||||
#[hdl]
|
||||
let i_0_through_2: UIntInRangeInclusive<0, 2> = m.input();
|
||||
#[hdl]
|
||||
let i_0_through_3: UIntInRangeInclusive<0, 3> = m.input();
|
||||
#[hdl]
|
||||
let i_0_through_4: UIntInRangeInclusive<0, 4> = m.input();
|
||||
#[hdl]
|
||||
let i_0_through_7: UIntInRangeInclusive<0, 7> = m.input();
|
||||
#[hdl]
|
||||
let i_0_through_8: UIntInRangeInclusive<0, 8> = m.input();
|
||||
#[hdl]
|
||||
let i_0_through_9: UIntInRangeInclusive<0, 9> = m.input();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_uint_in_range() {
|
||||
let _n = SourceLocation::normalize_files_for_tests();
|
||||
let m = check_uint_in_range();
|
||||
dbg!(m);
|
||||
#[rustfmt::skip] // work around https://github.com/rust-lang/rustfmt/issues/6161
|
||||
assert_export_firrtl! {
|
||||
m =>
|
||||
"/test/check_uint_in_range.fir": r"FIRRTL version 3.2.0
|
||||
circuit check_uint_in_range:
|
||||
type Ty0 = {value: UInt<0>, range: {}}
|
||||
type Ty1 = {value: UInt<1>, range: {}}
|
||||
type Ty2 = {value: UInt<2>, range: {}}
|
||||
type Ty3 = {value: UInt<2>, range: {}}
|
||||
type Ty4 = {value: UInt<3>, range: {}}
|
||||
type Ty5 = {value: UInt<3>, range: {}}
|
||||
type Ty6 = {value: UInt<4>, range: {}}
|
||||
type Ty7 = {value: UInt<0>, range: {}}
|
||||
type Ty8 = {value: UInt<1>, range: {}}
|
||||
type Ty9 = {value: UInt<2>, range: {}}
|
||||
type Ty10 = {value: UInt<2>, range: {}}
|
||||
type Ty11 = {value: UInt<3>, range: {}}
|
||||
type Ty12 = {value: UInt<3>, range: {}}
|
||||
type Ty13 = {value: UInt<4>, range: {}}
|
||||
type Ty14 = {value: UInt<4>, range: {}}
|
||||
module check_uint_in_range: @[module-XXXXXXXXXX.rs 1:1]
|
||||
input i_0_to_1: Ty0 @[module-XXXXXXXXXX.rs 2:1]
|
||||
input i_0_to_2: Ty1 @[module-XXXXXXXXXX.rs 3:1]
|
||||
input i_0_to_3: Ty2 @[module-XXXXXXXXXX.rs 4:1]
|
||||
input i_0_to_4: Ty3 @[module-XXXXXXXXXX.rs 5:1]
|
||||
input i_0_to_7: Ty4 @[module-XXXXXXXXXX.rs 6:1]
|
||||
input i_0_to_8: Ty5 @[module-XXXXXXXXXX.rs 7:1]
|
||||
input i_0_to_9: Ty6 @[module-XXXXXXXXXX.rs 8:1]
|
||||
input i_0_through_0: Ty7 @[module-XXXXXXXXXX.rs 9:1]
|
||||
input i_0_through_1: Ty8 @[module-XXXXXXXXXX.rs 10:1]
|
||||
input i_0_through_2: Ty9 @[module-XXXXXXXXXX.rs 11:1]
|
||||
input i_0_through_3: Ty10 @[module-XXXXXXXXXX.rs 12:1]
|
||||
input i_0_through_4: Ty11 @[module-XXXXXXXXXX.rs 13:1]
|
||||
input i_0_through_7: Ty12 @[module-XXXXXXXXXX.rs 14:1]
|
||||
input i_0_through_8: Ty13 @[module-XXXXXXXXXX.rs 15:1]
|
||||
input i_0_through_9: Ty14 @[module-XXXXXXXXXX.rs 16:1]
|
||||
",
|
||||
};
|
||||
}
|
||||
|
||||
#[hdl_module(outline_generated)]
|
||||
pub fn check_platform_io(platform_io_builder: PlatformIOBuilder<'_>) {
|
||||
#[hdl]
|
||||
let io = m.add_platform_io(platform_io_builder);
|
||||
}
|
||||
|
||||
#[cfg(todo)]
|
||||
#[test]
|
||||
fn test_platform_io() {
|
||||
let _n = SourceLocation::normalize_files_for_tests();
|
||||
let m = check_platform_io(todo!());
|
||||
dbg!(m);
|
||||
#[rustfmt::skip] // work around https://github.com/rust-lang/rustfmt/issues/6161
|
||||
assert_export_firrtl! {
|
||||
m =>
|
||||
"/test/check_platform_io.fir": r"FIRRTL version 3.2.0
|
||||
circuit check_platform_io:
|
||||
type Ty0 = {value: UInt<0>, range: {}}
|
||||
type Ty1 = {value: UInt<1>, range: {}}
|
||||
type Ty2 = {value: UInt<2>, range: {}}
|
||||
type Ty3 = {value: UInt<2>, range: {}}
|
||||
type Ty4 = {value: UInt<3>, range: {}}
|
||||
type Ty5 = {value: UInt<3>, range: {}}
|
||||
type Ty6 = {value: UInt<4>, range: {}}
|
||||
type Ty7 = {value: UInt<0>, range: {}}
|
||||
type Ty8 = {value: UInt<1>, range: {}}
|
||||
type Ty9 = {value: UInt<2>, range: {}}
|
||||
type Ty10 = {value: UInt<2>, range: {}}
|
||||
type Ty11 = {value: UInt<3>, range: {}}
|
||||
type Ty12 = {value: UInt<3>, range: {}}
|
||||
type Ty13 = {value: UInt<4>, range: {}}
|
||||
type Ty14 = {value: UInt<4>, range: {}}
|
||||
module check_platform_io: @[module-XXXXXXXXXX.rs 1:1]
|
||||
input i_0_to_1: Ty0 @[module-XXXXXXXXXX.rs 2:1]
|
||||
input i_0_to_2: Ty1 @[module-XXXXXXXXXX.rs 3:1]
|
||||
input i_0_to_3: Ty2 @[module-XXXXXXXXXX.rs 4:1]
|
||||
input i_0_to_4: Ty3 @[module-XXXXXXXXXX.rs 5:1]
|
||||
input i_0_to_7: Ty4 @[module-XXXXXXXXXX.rs 6:1]
|
||||
input i_0_to_8: Ty5 @[module-XXXXXXXXXX.rs 7:1]
|
||||
input i_0_to_9: Ty6 @[module-XXXXXXXXXX.rs 8:1]
|
||||
input i_0_through_0: Ty7 @[module-XXXXXXXXXX.rs 9:1]
|
||||
input i_0_through_1: Ty8 @[module-XXXXXXXXXX.rs 10:1]
|
||||
input i_0_through_2: Ty9 @[module-XXXXXXXXXX.rs 11:1]
|
||||
input i_0_through_3: Ty10 @[module-XXXXXXXXXX.rs 12:1]
|
||||
input i_0_through_4: Ty11 @[module-XXXXXXXXXX.rs 13:1]
|
||||
input i_0_through_7: Ty12 @[module-XXXXXXXXXX.rs 14:1]
|
||||
input i_0_through_8: Ty13 @[module-XXXXXXXXXX.rs 15:1]
|
||||
input i_0_through_9: Ty14 @[module-XXXXXXXXXX.rs 16:1]
|
||||
",
|
||||
};
|
||||
}
|
||||
|
|
|
|||
2028
crates/fayalite/tests/sim.rs
Normal file
2028
crates/fayalite/tests/sim.rs
Normal file
File diff suppressed because it is too large
Load diff
1705
crates/fayalite/tests/sim/expected/array_rw.txt
Normal file
1705
crates/fayalite/tests/sim/expected/array_rw.txt
Normal file
File diff suppressed because it is too large
Load diff
283
crates/fayalite/tests/sim/expected/array_rw.vcd
Normal file
283
crates/fayalite/tests/sim/expected/array_rw.vcd
Normal file
|
|
@ -0,0 +1,283 @@
|
|||
$timescale 1 ps $end
|
||||
$scope module array_rw $end
|
||||
$scope struct array_in $end
|
||||
$var wire 8 ! \[0] $end
|
||||
$var wire 8 " \[1] $end
|
||||
$var wire 8 # \[2] $end
|
||||
$var wire 8 $ \[3] $end
|
||||
$var wire 8 % \[4] $end
|
||||
$var wire 8 & \[5] $end
|
||||
$var wire 8 ' \[6] $end
|
||||
$var wire 8 ( \[7] $end
|
||||
$var wire 8 ) \[8] $end
|
||||
$var wire 8 * \[9] $end
|
||||
$var wire 8 + \[10] $end
|
||||
$var wire 8 , \[11] $end
|
||||
$var wire 8 - \[12] $end
|
||||
$var wire 8 . \[13] $end
|
||||
$var wire 8 / \[14] $end
|
||||
$var wire 8 0 \[15] $end
|
||||
$upscope $end
|
||||
$scope struct array_out $end
|
||||
$var wire 8 1 \[0] $end
|
||||
$var wire 8 2 \[1] $end
|
||||
$var wire 8 3 \[2] $end
|
||||
$var wire 8 4 \[3] $end
|
||||
$var wire 8 5 \[4] $end
|
||||
$var wire 8 6 \[5] $end
|
||||
$var wire 8 7 \[6] $end
|
||||
$var wire 8 8 \[7] $end
|
||||
$var wire 8 9 \[8] $end
|
||||
$var wire 8 : \[9] $end
|
||||
$var wire 8 ; \[10] $end
|
||||
$var wire 8 < \[11] $end
|
||||
$var wire 8 = \[12] $end
|
||||
$var wire 8 > \[13] $end
|
||||
$var wire 8 ? \[14] $end
|
||||
$var wire 8 @ \[15] $end
|
||||
$upscope $end
|
||||
$var wire 8 A read_index $end
|
||||
$var wire 8 B read_data $end
|
||||
$var wire 8 C write_index $end
|
||||
$var wire 8 D write_data $end
|
||||
$var wire 1 E write_en $end
|
||||
$scope struct array_wire $end
|
||||
$var wire 8 F \[0] $end
|
||||
$var wire 8 G \[1] $end
|
||||
$var wire 8 H \[2] $end
|
||||
$var wire 8 I \[3] $end
|
||||
$var wire 8 J \[4] $end
|
||||
$var wire 8 K \[5] $end
|
||||
$var wire 8 L \[6] $end
|
||||
$var wire 8 M \[7] $end
|
||||
$var wire 8 N \[8] $end
|
||||
$var wire 8 O \[9] $end
|
||||
$var wire 8 P \[10] $end
|
||||
$var wire 8 Q \[11] $end
|
||||
$var wire 8 R \[12] $end
|
||||
$var wire 8 S \[13] $end
|
||||
$var wire 8 T \[14] $end
|
||||
$var wire 8 U \[15] $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$enddefinitions $end
|
||||
$dumpvars
|
||||
b11111111 !
|
||||
b1111111 "
|
||||
b111111 #
|
||||
b11111 $
|
||||
b1111 %
|
||||
b111 &
|
||||
b11 '
|
||||
b1 (
|
||||
b0 )
|
||||
b10000000 *
|
||||
b11000000 +
|
||||
b11100000 ,
|
||||
b11110000 -
|
||||
b11111000 .
|
||||
b11111100 /
|
||||
b11111110 0
|
||||
b11111111 1
|
||||
b1111111 2
|
||||
b111111 3
|
||||
b11111 4
|
||||
b1111 5
|
||||
b111 6
|
||||
b11 7
|
||||
b1 8
|
||||
b0 9
|
||||
b10000000 :
|
||||
b11000000 ;
|
||||
b11100000 <
|
||||
b11110000 =
|
||||
b11111000 >
|
||||
b11111100 ?
|
||||
b11111110 @
|
||||
b0 A
|
||||
b11111111 B
|
||||
b0 C
|
||||
b0 D
|
||||
0E
|
||||
b11111111 F
|
||||
b1111111 G
|
||||
b111111 H
|
||||
b11111 I
|
||||
b1111 J
|
||||
b111 K
|
||||
b11 L
|
||||
b1 M
|
||||
b0 N
|
||||
b10000000 O
|
||||
b11000000 P
|
||||
b11100000 Q
|
||||
b11110000 R
|
||||
b11111000 S
|
||||
b11111100 T
|
||||
b11111110 U
|
||||
$end
|
||||
#1000000
|
||||
b1 A
|
||||
b1111111 B
|
||||
#2000000
|
||||
b10 A
|
||||
b111111 B
|
||||
#3000000
|
||||
b11 A
|
||||
b11111 B
|
||||
#4000000
|
||||
b100 A
|
||||
b1111 B
|
||||
#5000000
|
||||
b101 A
|
||||
b111 B
|
||||
#6000000
|
||||
b110 A
|
||||
b11 B
|
||||
#7000000
|
||||
b111 A
|
||||
b1 B
|
||||
#8000000
|
||||
b1000 A
|
||||
b0 B
|
||||
#9000000
|
||||
b1001 A
|
||||
b10000000 B
|
||||
#10000000
|
||||
b1010 A
|
||||
b11000000 B
|
||||
#11000000
|
||||
b1011 A
|
||||
b11100000 B
|
||||
#12000000
|
||||
b1100 A
|
||||
b11110000 B
|
||||
#13000000
|
||||
b1101 A
|
||||
b11111000 B
|
||||
#14000000
|
||||
b1110 A
|
||||
b11111100 B
|
||||
#15000000
|
||||
b1111 A
|
||||
b11111110 B
|
||||
#16000000
|
||||
b10000 A
|
||||
b0 B
|
||||
#17000000
|
||||
b0 1
|
||||
b0 A
|
||||
1E
|
||||
b0 F
|
||||
#18000000
|
||||
b11111111 1
|
||||
b1 2
|
||||
b11111111 B
|
||||
b1 C
|
||||
b1 D
|
||||
b11111111 F
|
||||
b1 G
|
||||
#19000000
|
||||
b1111111 2
|
||||
b100 3
|
||||
b10 C
|
||||
b100 D
|
||||
b1111111 G
|
||||
b100 H
|
||||
#20000000
|
||||
b111111 3
|
||||
b1001 4
|
||||
b11 C
|
||||
b1001 D
|
||||
b111111 H
|
||||
b1001 I
|
||||
#21000000
|
||||
b11111 4
|
||||
b10000 5
|
||||
b100 C
|
||||
b10000 D
|
||||
b11111 I
|
||||
b10000 J
|
||||
#22000000
|
||||
b1111 5
|
||||
b11001 6
|
||||
b101 C
|
||||
b11001 D
|
||||
b1111 J
|
||||
b11001 K
|
||||
#23000000
|
||||
b111 6
|
||||
b100100 7
|
||||
b110 C
|
||||
b100100 D
|
||||
b111 K
|
||||
b100100 L
|
||||
#24000000
|
||||
b11 7
|
||||
b110001 8
|
||||
b111 C
|
||||
b110001 D
|
||||
b11 L
|
||||
b110001 M
|
||||
#25000000
|
||||
b1 8
|
||||
b1000000 9
|
||||
b1000 C
|
||||
b1000000 D
|
||||
b1 M
|
||||
b1000000 N
|
||||
#26000000
|
||||
b0 9
|
||||
b1010001 :
|
||||
b1001 C
|
||||
b1010001 D
|
||||
b0 N
|
||||
b1010001 O
|
||||
#27000000
|
||||
b10000000 :
|
||||
b1100100 ;
|
||||
b1010 C
|
||||
b1100100 D
|
||||
b10000000 O
|
||||
b1100100 P
|
||||
#28000000
|
||||
b11000000 ;
|
||||
b1111001 <
|
||||
b1011 C
|
||||
b1111001 D
|
||||
b11000000 P
|
||||
b1111001 Q
|
||||
#29000000
|
||||
b11100000 <
|
||||
b10010000 =
|
||||
b1100 C
|
||||
b10010000 D
|
||||
b11100000 Q
|
||||
b10010000 R
|
||||
#30000000
|
||||
b11110000 =
|
||||
b10101001 >
|
||||
b1101 C
|
||||
b10101001 D
|
||||
b11110000 R
|
||||
b10101001 S
|
||||
#31000000
|
||||
b11111000 >
|
||||
b11000100 ?
|
||||
b1110 C
|
||||
b11000100 D
|
||||
b11111000 S
|
||||
b11000100 T
|
||||
#32000000
|
||||
b11111100 ?
|
||||
b11100001 @
|
||||
b1111 C
|
||||
b11100001 D
|
||||
b11111100 T
|
||||
b11100001 U
|
||||
#33000000
|
||||
b11111110 @
|
||||
b10000 C
|
||||
b0 D
|
||||
b11111110 U
|
||||
#34000000
|
||||
|
|
@ -0,0 +1,183 @@
|
|||
Simulation {
|
||||
state: State {
|
||||
insns: Insns {
|
||||
state_layout: StateLayout {
|
||||
ty: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 4,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(conditional_assignment_last: conditional_assignment_last).conditional_assignment_last::i",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(conditional_assignment_last: conditional_assignment_last).conditional_assignment_last::w",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
sim_only_slots: StatePartLayout<SimOnlySlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
memories: StatePartLayout<Memories> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
insns: [
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
0: Const {
|
||||
dest: StatePartIndex<BigSlots>(3), // (0x0) SlotDebugData { name: "", ty: Bool },
|
||||
value: 0x0,
|
||||
},
|
||||
1: Const {
|
||||
dest: StatePartIndex<BigSlots>(2), // (0x1) SlotDebugData { name: "", ty: Bool },
|
||||
value: 0x1,
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:4:1
|
||||
2: Copy {
|
||||
dest: StatePartIndex<BigSlots>(1), // (0x0) SlotDebugData { name: "InstantiatedModule(conditional_assignment_last: conditional_assignment_last).conditional_assignment_last::w", ty: Bool },
|
||||
src: StatePartIndex<BigSlots>(2), // (0x1) SlotDebugData { name: "", ty: Bool },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:5:1
|
||||
3: BranchIfZero {
|
||||
target: 5,
|
||||
value: StatePartIndex<BigSlots>(0), // (0x1) SlotDebugData { name: "InstantiatedModule(conditional_assignment_last: conditional_assignment_last).conditional_assignment_last::i", ty: Bool },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:6:1
|
||||
4: Copy {
|
||||
dest: StatePartIndex<BigSlots>(1), // (0x0) SlotDebugData { name: "InstantiatedModule(conditional_assignment_last: conditional_assignment_last).conditional_assignment_last::w", ty: Bool },
|
||||
src: StatePartIndex<BigSlots>(3), // (0x0) SlotDebugData { name: "", ty: Bool },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
5: Return,
|
||||
],
|
||||
..
|
||||
},
|
||||
pc: 5,
|
||||
memory_write_log: [],
|
||||
memories: StatePart {
|
||||
value: [],
|
||||
},
|
||||
small_slots: StatePart {
|
||||
value: [],
|
||||
},
|
||||
big_slots: StatePart {
|
||||
value: [
|
||||
1,
|
||||
0,
|
||||
1,
|
||||
0,
|
||||
],
|
||||
},
|
||||
sim_only_slots: StatePart {
|
||||
value: [],
|
||||
},
|
||||
},
|
||||
io: Instance {
|
||||
name: <simulator>::conditional_assignment_last,
|
||||
instantiated: Module {
|
||||
name: conditional_assignment_last,
|
||||
..
|
||||
},
|
||||
},
|
||||
main_module: SimulationModuleState {
|
||||
base_targets: [
|
||||
Instance {
|
||||
name: <simulator>::conditional_assignment_last,
|
||||
instantiated: Module {
|
||||
name: conditional_assignment_last,
|
||||
..
|
||||
},
|
||||
}.i,
|
||||
],
|
||||
uninitialized_ios: {},
|
||||
io_targets: {
|
||||
Instance {
|
||||
name: <simulator>::conditional_assignment_last,
|
||||
instantiated: Module {
|
||||
name: conditional_assignment_last,
|
||||
..
|
||||
},
|
||||
}.i,
|
||||
},
|
||||
did_initial_settle: true,
|
||||
},
|
||||
extern_modules: [],
|
||||
state_ready_to_run: false,
|
||||
trace_decls: TraceModule {
|
||||
name: "conditional_assignment_last",
|
||||
children: [
|
||||
TraceModuleIO {
|
||||
name: "i",
|
||||
child: TraceBool {
|
||||
location: TraceScalarId(0),
|
||||
name: "i",
|
||||
flow: Source,
|
||||
},
|
||||
ty: Bool,
|
||||
flow: Source,
|
||||
},
|
||||
TraceWire {
|
||||
name: "w",
|
||||
child: TraceBool {
|
||||
location: TraceScalarId(1),
|
||||
name: "w",
|
||||
flow: Duplex,
|
||||
},
|
||||
ty: Bool,
|
||||
},
|
||||
],
|
||||
},
|
||||
traces: [
|
||||
SimTrace {
|
||||
id: TraceScalarId(0),
|
||||
kind: BigBool {
|
||||
index: StatePartIndex<BigSlots>(0),
|
||||
},
|
||||
state: 0x1,
|
||||
last_state: 0x0,
|
||||
},
|
||||
SimTrace {
|
||||
id: TraceScalarId(1),
|
||||
kind: BigBool {
|
||||
index: StatePartIndex<BigSlots>(1),
|
||||
},
|
||||
state: 0x0,
|
||||
last_state: 0x1,
|
||||
},
|
||||
],
|
||||
trace_memories: {},
|
||||
trace_writers: [
|
||||
Running(
|
||||
VcdWriter {
|
||||
finished_init: true,
|
||||
timescale: 1 ps,
|
||||
..
|
||||
},
|
||||
),
|
||||
],
|
||||
instant: 2 μs,
|
||||
clocks_triggered: [],
|
||||
..
|
||||
}
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
$timescale 1 ps $end
|
||||
$scope module conditional_assignment_last $end
|
||||
$var wire 1 ! i $end
|
||||
$var wire 1 " w $end
|
||||
$upscope $end
|
||||
$enddefinitions $end
|
||||
$dumpvars
|
||||
0!
|
||||
1"
|
||||
$end
|
||||
#1000000
|
||||
1!
|
||||
0"
|
||||
#2000000
|
||||
136
crates/fayalite/tests/sim/expected/connect_const.txt
Normal file
136
crates/fayalite/tests/sim/expected/connect_const.txt
Normal file
|
|
@ -0,0 +1,136 @@
|
|||
Simulation {
|
||||
state: State {
|
||||
insns: Insns {
|
||||
state_layout: StateLayout {
|
||||
ty: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 2,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(connect_const: connect_const).connect_const::o",
|
||||
ty: UInt<8>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: UInt<8>,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
sim_only_slots: StatePartLayout<SimOnlySlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
memories: StatePartLayout<Memories> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
insns: [
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
0: Const {
|
||||
dest: StatePartIndex<BigSlots>(1), // (0x5) SlotDebugData { name: "", ty: UInt<8> },
|
||||
value: 0x5,
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:3:1
|
||||
1: Copy {
|
||||
dest: StatePartIndex<BigSlots>(0), // (0x5) SlotDebugData { name: "InstantiatedModule(connect_const: connect_const).connect_const::o", ty: UInt<8> },
|
||||
src: StatePartIndex<BigSlots>(1), // (0x5) SlotDebugData { name: "", ty: UInt<8> },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
2: Return,
|
||||
],
|
||||
..
|
||||
},
|
||||
pc: 2,
|
||||
memory_write_log: [],
|
||||
memories: StatePart {
|
||||
value: [],
|
||||
},
|
||||
small_slots: StatePart {
|
||||
value: [],
|
||||
},
|
||||
big_slots: StatePart {
|
||||
value: [
|
||||
5,
|
||||
5,
|
||||
],
|
||||
},
|
||||
sim_only_slots: StatePart {
|
||||
value: [],
|
||||
},
|
||||
},
|
||||
io: Instance {
|
||||
name: <simulator>::connect_const,
|
||||
instantiated: Module {
|
||||
name: connect_const,
|
||||
..
|
||||
},
|
||||
},
|
||||
main_module: SimulationModuleState {
|
||||
base_targets: [
|
||||
Instance {
|
||||
name: <simulator>::connect_const,
|
||||
instantiated: Module {
|
||||
name: connect_const,
|
||||
..
|
||||
},
|
||||
}.o,
|
||||
],
|
||||
uninitialized_ios: {},
|
||||
io_targets: {
|
||||
Instance {
|
||||
name: <simulator>::connect_const,
|
||||
instantiated: Module {
|
||||
name: connect_const,
|
||||
..
|
||||
},
|
||||
}.o,
|
||||
},
|
||||
did_initial_settle: true,
|
||||
},
|
||||
extern_modules: [],
|
||||
state_ready_to_run: false,
|
||||
trace_decls: TraceModule {
|
||||
name: "connect_const",
|
||||
children: [
|
||||
TraceModuleIO {
|
||||
name: "o",
|
||||
child: TraceUInt {
|
||||
location: TraceScalarId(0),
|
||||
name: "o",
|
||||
ty: UInt<8>,
|
||||
flow: Sink,
|
||||
},
|
||||
ty: UInt<8>,
|
||||
flow: Sink,
|
||||
},
|
||||
],
|
||||
},
|
||||
traces: [
|
||||
SimTrace {
|
||||
id: TraceScalarId(0),
|
||||
kind: BigUInt {
|
||||
index: StatePartIndex<BigSlots>(0),
|
||||
ty: UInt<8>,
|
||||
},
|
||||
state: 0x05,
|
||||
last_state: 0x05,
|
||||
},
|
||||
],
|
||||
trace_memories: {},
|
||||
trace_writers: [],
|
||||
instant: 0 s,
|
||||
clocks_triggered: [],
|
||||
..
|
||||
}
|
||||
203
crates/fayalite/tests/sim/expected/connect_const_reset.txt
Normal file
203
crates/fayalite/tests/sim/expected/connect_const_reset.txt
Normal file
|
|
@ -0,0 +1,203 @@
|
|||
Simulation {
|
||||
state: State {
|
||||
insns: Insns {
|
||||
state_layout: StateLayout {
|
||||
ty: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 5,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(connect_const_reset: connect_const_reset).connect_const_reset::reset_out",
|
||||
ty: AsyncReset,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(connect_const_reset: connect_const_reset).connect_const_reset::bit_out",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: AsyncReset,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
sim_only_slots: StatePartLayout<SimOnlySlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
memories: StatePartLayout<Memories> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
insns: [
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
0: Const {
|
||||
dest: StatePartIndex<BigSlots>(2), // (0x1) SlotDebugData { name: "", ty: Bool },
|
||||
value: 0x1,
|
||||
},
|
||||
1: Copy {
|
||||
dest: StatePartIndex<BigSlots>(3), // (0x1) SlotDebugData { name: "", ty: AsyncReset },
|
||||
src: StatePartIndex<BigSlots>(2), // (0x1) SlotDebugData { name: "", ty: Bool },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:4:1
|
||||
2: Copy {
|
||||
dest: StatePartIndex<BigSlots>(0), // (0x1) SlotDebugData { name: "InstantiatedModule(connect_const_reset: connect_const_reset).connect_const_reset::reset_out", ty: AsyncReset },
|
||||
src: StatePartIndex<BigSlots>(3), // (0x1) SlotDebugData { name: "", ty: AsyncReset },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
3: Copy {
|
||||
dest: StatePartIndex<BigSlots>(4), // (0x1) SlotDebugData { name: "", ty: Bool },
|
||||
src: StatePartIndex<BigSlots>(0), // (0x1) SlotDebugData { name: "InstantiatedModule(connect_const_reset: connect_const_reset).connect_const_reset::reset_out", ty: AsyncReset },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:5:1
|
||||
4: Copy {
|
||||
dest: StatePartIndex<BigSlots>(1), // (0x1) SlotDebugData { name: "InstantiatedModule(connect_const_reset: connect_const_reset).connect_const_reset::bit_out", ty: Bool },
|
||||
src: StatePartIndex<BigSlots>(4), // (0x1) SlotDebugData { name: "", ty: Bool },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
5: Return,
|
||||
],
|
||||
..
|
||||
},
|
||||
pc: 5,
|
||||
memory_write_log: [],
|
||||
memories: StatePart {
|
||||
value: [],
|
||||
},
|
||||
small_slots: StatePart {
|
||||
value: [],
|
||||
},
|
||||
big_slots: StatePart {
|
||||
value: [
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
],
|
||||
},
|
||||
sim_only_slots: StatePart {
|
||||
value: [],
|
||||
},
|
||||
},
|
||||
io: Instance {
|
||||
name: <simulator>::connect_const_reset,
|
||||
instantiated: Module {
|
||||
name: connect_const_reset,
|
||||
..
|
||||
},
|
||||
},
|
||||
main_module: SimulationModuleState {
|
||||
base_targets: [
|
||||
Instance {
|
||||
name: <simulator>::connect_const_reset,
|
||||
instantiated: Module {
|
||||
name: connect_const_reset,
|
||||
..
|
||||
},
|
||||
}.reset_out,
|
||||
Instance {
|
||||
name: <simulator>::connect_const_reset,
|
||||
instantiated: Module {
|
||||
name: connect_const_reset,
|
||||
..
|
||||
},
|
||||
}.bit_out,
|
||||
],
|
||||
uninitialized_ios: {},
|
||||
io_targets: {
|
||||
Instance {
|
||||
name: <simulator>::connect_const_reset,
|
||||
instantiated: Module {
|
||||
name: connect_const_reset,
|
||||
..
|
||||
},
|
||||
}.bit_out,
|
||||
Instance {
|
||||
name: <simulator>::connect_const_reset,
|
||||
instantiated: Module {
|
||||
name: connect_const_reset,
|
||||
..
|
||||
},
|
||||
}.reset_out,
|
||||
},
|
||||
did_initial_settle: true,
|
||||
},
|
||||
extern_modules: [],
|
||||
state_ready_to_run: false,
|
||||
trace_decls: TraceModule {
|
||||
name: "connect_const_reset",
|
||||
children: [
|
||||
TraceModuleIO {
|
||||
name: "reset_out",
|
||||
child: TraceAsyncReset {
|
||||
location: TraceScalarId(0),
|
||||
name: "reset_out",
|
||||
flow: Sink,
|
||||
},
|
||||
ty: AsyncReset,
|
||||
flow: Sink,
|
||||
},
|
||||
TraceModuleIO {
|
||||
name: "bit_out",
|
||||
child: TraceBool {
|
||||
location: TraceScalarId(1),
|
||||
name: "bit_out",
|
||||
flow: Sink,
|
||||
},
|
||||
ty: Bool,
|
||||
flow: Sink,
|
||||
},
|
||||
],
|
||||
},
|
||||
traces: [
|
||||
SimTrace {
|
||||
id: TraceScalarId(0),
|
||||
kind: BigAsyncReset {
|
||||
index: StatePartIndex<BigSlots>(0),
|
||||
},
|
||||
state: 0x1,
|
||||
last_state: 0x1,
|
||||
},
|
||||
SimTrace {
|
||||
id: TraceScalarId(1),
|
||||
kind: BigBool {
|
||||
index: StatePartIndex<BigSlots>(1),
|
||||
},
|
||||
state: 0x1,
|
||||
last_state: 0x1,
|
||||
},
|
||||
],
|
||||
trace_memories: {},
|
||||
trace_writers: [
|
||||
Running(
|
||||
VcdWriter {
|
||||
finished_init: true,
|
||||
timescale: 1 ps,
|
||||
..
|
||||
},
|
||||
),
|
||||
],
|
||||
instant: 1 μs,
|
||||
clocks_triggered: [],
|
||||
..
|
||||
}
|
||||
11
crates/fayalite/tests/sim/expected/connect_const_reset.vcd
Normal file
11
crates/fayalite/tests/sim/expected/connect_const_reset.vcd
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
$timescale 1 ps $end
|
||||
$scope module connect_const_reset $end
|
||||
$var wire 1 ! reset_out $end
|
||||
$var wire 1 " bit_out $end
|
||||
$upscope $end
|
||||
$enddefinitions $end
|
||||
$dumpvars
|
||||
1!
|
||||
1"
|
||||
$end
|
||||
#1000000
|
||||
376
crates/fayalite/tests/sim/expected/counter_async.txt
Normal file
376
crates/fayalite/tests/sim/expected/counter_async.txt
Normal file
|
|
@ -0,0 +1,376 @@
|
|||
Simulation {
|
||||
state: State {
|
||||
insns: Insns {
|
||||
state_layout: StateLayout {
|
||||
ty: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 4,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 10,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::cd.clk",
|
||||
ty: Clock,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::cd.rst",
|
||||
ty: AsyncReset,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::count",
|
||||
ty: UInt<4>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::count_reg",
|
||||
ty: UInt<4>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::count_reg$next",
|
||||
ty: UInt<4>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: UInt<4>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: UInt<1>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: UInt<5>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: UInt<4>,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
sim_only_slots: StatePartLayout<SimOnlySlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
memories: StatePartLayout<Memories> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
insns: [
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
0: Const {
|
||||
dest: StatePartIndex<BigSlots>(7), // (0x1) SlotDebugData { name: "", ty: UInt<1> },
|
||||
value: 0x1,
|
||||
},
|
||||
1: Copy {
|
||||
dest: StatePartIndex<BigSlots>(6), // (0x0) SlotDebugData { name: "", ty: Bool },
|
||||
src: StatePartIndex<BigSlots>(1), // (0x0) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::cd.rst", ty: AsyncReset },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:3:1
|
||||
2: IsNonZeroDestIsSmall {
|
||||
dest: StatePartIndex<SmallSlots>(3), // (0x0 0) SlotDebugData { name: "", ty: Bool },
|
||||
src: StatePartIndex<BigSlots>(1), // (0x0) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::cd.rst", ty: AsyncReset },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
3: Const {
|
||||
dest: StatePartIndex<BigSlots>(5), // (0x3) SlotDebugData { name: "", ty: UInt<4> },
|
||||
value: 0x3,
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:3:1
|
||||
4: BranchIfZero {
|
||||
target: 6,
|
||||
value: StatePartIndex<BigSlots>(6), // (0x0) SlotDebugData { name: "", ty: Bool },
|
||||
},
|
||||
5: Copy {
|
||||
dest: StatePartIndex<BigSlots>(3), // (0x3) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count_reg", ty: UInt<4> },
|
||||
src: StatePartIndex<BigSlots>(5), // (0x3) SlotDebugData { name: "", ty: UInt<4> },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
6: Add {
|
||||
dest: StatePartIndex<BigSlots>(8), // (0x4) SlotDebugData { name: "", ty: UInt<5> },
|
||||
lhs: StatePartIndex<BigSlots>(3), // (0x3) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count_reg", ty: UInt<4> },
|
||||
rhs: StatePartIndex<BigSlots>(7), // (0x1) SlotDebugData { name: "", ty: UInt<1> },
|
||||
},
|
||||
7: CastToUInt {
|
||||
dest: StatePartIndex<BigSlots>(9), // (0x4) SlotDebugData { name: "", ty: UInt<4> },
|
||||
src: StatePartIndex<BigSlots>(8), // (0x4) SlotDebugData { name: "", ty: UInt<5> },
|
||||
dest_width: 4,
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:4:1
|
||||
8: Copy {
|
||||
dest: StatePartIndex<BigSlots>(4), // (0x4) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count_reg$next", ty: UInt<4> },
|
||||
src: StatePartIndex<BigSlots>(9), // (0x4) SlotDebugData { name: "", ty: UInt<4> },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:6:1
|
||||
9: Copy {
|
||||
dest: StatePartIndex<BigSlots>(2), // (0x3) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count", ty: UInt<4> },
|
||||
src: StatePartIndex<BigSlots>(3), // (0x3) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count_reg", ty: UInt<4> },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:3:1
|
||||
10: IsNonZeroDestIsSmall {
|
||||
dest: StatePartIndex<SmallSlots>(2), // (0x1 1) SlotDebugData { name: "", ty: Bool },
|
||||
src: StatePartIndex<BigSlots>(0), // (0x1) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::cd.clk", ty: Clock },
|
||||
},
|
||||
11: AndSmall {
|
||||
dest: StatePartIndex<SmallSlots>(1), // (0x0 0) SlotDebugData { name: "", ty: Bool },
|
||||
lhs: StatePartIndex<SmallSlots>(2), // (0x1 1) SlotDebugData { name: "", ty: Bool },
|
||||
rhs: StatePartIndex<SmallSlots>(0), // (0x0 0) SlotDebugData { name: "", ty: Bool },
|
||||
},
|
||||
12: BranchIfSmallNonZero {
|
||||
target: 16,
|
||||
value: StatePartIndex<SmallSlots>(3), // (0x0 0) SlotDebugData { name: "", ty: Bool },
|
||||
},
|
||||
13: BranchIfSmallZero {
|
||||
target: 17,
|
||||
value: StatePartIndex<SmallSlots>(1), // (0x0 0) SlotDebugData { name: "", ty: Bool },
|
||||
},
|
||||
14: Copy {
|
||||
dest: StatePartIndex<BigSlots>(3), // (0x3) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count_reg", ty: UInt<4> },
|
||||
src: StatePartIndex<BigSlots>(4), // (0x4) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count_reg$next", ty: UInt<4> },
|
||||
},
|
||||
15: Branch {
|
||||
target: 17,
|
||||
},
|
||||
16: Copy {
|
||||
dest: StatePartIndex<BigSlots>(3), // (0x3) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count_reg", ty: UInt<4> },
|
||||
src: StatePartIndex<BigSlots>(5), // (0x3) SlotDebugData { name: "", ty: UInt<4> },
|
||||
},
|
||||
17: XorSmallImmediate {
|
||||
dest: StatePartIndex<SmallSlots>(0), // (0x0 0) SlotDebugData { name: "", ty: Bool },
|
||||
lhs: StatePartIndex<SmallSlots>(2), // (0x1 1) SlotDebugData { name: "", ty: Bool },
|
||||
rhs: 0x1,
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
18: Return,
|
||||
],
|
||||
..
|
||||
},
|
||||
pc: 18,
|
||||
memory_write_log: [],
|
||||
memories: StatePart {
|
||||
value: [],
|
||||
},
|
||||
small_slots: StatePart {
|
||||
value: [
|
||||
0,
|
||||
0,
|
||||
1,
|
||||
0,
|
||||
],
|
||||
},
|
||||
big_slots: StatePart {
|
||||
value: [
|
||||
1,
|
||||
0,
|
||||
3,
|
||||
3,
|
||||
4,
|
||||
3,
|
||||
0,
|
||||
1,
|
||||
4,
|
||||
4,
|
||||
],
|
||||
},
|
||||
sim_only_slots: StatePart {
|
||||
value: [],
|
||||
},
|
||||
},
|
||||
io: Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
},
|
||||
main_module: SimulationModuleState {
|
||||
base_targets: [
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.cd,
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.count,
|
||||
],
|
||||
uninitialized_ios: {},
|
||||
io_targets: {
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.cd,
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.cd.clk,
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.cd.rst,
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.count,
|
||||
},
|
||||
did_initial_settle: true,
|
||||
},
|
||||
extern_modules: [],
|
||||
state_ready_to_run: false,
|
||||
trace_decls: TraceModule {
|
||||
name: "counter",
|
||||
children: [
|
||||
TraceModuleIO {
|
||||
name: "cd",
|
||||
child: TraceBundle {
|
||||
name: "cd",
|
||||
fields: [
|
||||
TraceClock {
|
||||
location: TraceScalarId(0),
|
||||
name: "clk",
|
||||
flow: Source,
|
||||
},
|
||||
TraceAsyncReset {
|
||||
location: TraceScalarId(1),
|
||||
name: "rst",
|
||||
flow: Source,
|
||||
},
|
||||
],
|
||||
ty: Bundle {
|
||||
/* offset = 0 */
|
||||
clk: Clock,
|
||||
/* offset = 1 */
|
||||
rst: AsyncReset,
|
||||
},
|
||||
flow: Source,
|
||||
},
|
||||
ty: Bundle {
|
||||
/* offset = 0 */
|
||||
clk: Clock,
|
||||
/* offset = 1 */
|
||||
rst: AsyncReset,
|
||||
},
|
||||
flow: Source,
|
||||
},
|
||||
TraceModuleIO {
|
||||
name: "count",
|
||||
child: TraceUInt {
|
||||
location: TraceScalarId(2),
|
||||
name: "count",
|
||||
ty: UInt<4>,
|
||||
flow: Sink,
|
||||
},
|
||||
ty: UInt<4>,
|
||||
flow: Sink,
|
||||
},
|
||||
TraceReg {
|
||||
name: "count_reg",
|
||||
child: TraceUInt {
|
||||
location: TraceScalarId(3),
|
||||
name: "count_reg",
|
||||
ty: UInt<4>,
|
||||
flow: Duplex,
|
||||
},
|
||||
ty: UInt<4>,
|
||||
},
|
||||
],
|
||||
},
|
||||
traces: [
|
||||
SimTrace {
|
||||
id: TraceScalarId(0),
|
||||
kind: BigClock {
|
||||
index: StatePartIndex<BigSlots>(0),
|
||||
},
|
||||
state: 0x1,
|
||||
last_state: 0x1,
|
||||
},
|
||||
SimTrace {
|
||||
id: TraceScalarId(1),
|
||||
kind: BigAsyncReset {
|
||||
index: StatePartIndex<BigSlots>(1),
|
||||
},
|
||||
state: 0x0,
|
||||
last_state: 0x0,
|
||||
},
|
||||
SimTrace {
|
||||
id: TraceScalarId(2),
|
||||
kind: BigUInt {
|
||||
index: StatePartIndex<BigSlots>(2),
|
||||
ty: UInt<4>,
|
||||
},
|
||||
state: 0x3,
|
||||
last_state: 0x2,
|
||||
},
|
||||
SimTrace {
|
||||
id: TraceScalarId(3),
|
||||
kind: BigUInt {
|
||||
index: StatePartIndex<BigSlots>(3),
|
||||
ty: UInt<4>,
|
||||
},
|
||||
state: 0x3,
|
||||
last_state: 0x3,
|
||||
},
|
||||
],
|
||||
trace_memories: {},
|
||||
trace_writers: [
|
||||
Running(
|
||||
VcdWriter {
|
||||
finished_init: true,
|
||||
timescale: 1 ps,
|
||||
..
|
||||
},
|
||||
),
|
||||
],
|
||||
instant: 66 μs,
|
||||
clocks_triggered: [
|
||||
StatePartIndex<SmallSlots>(1),
|
||||
],
|
||||
..
|
||||
}
|
||||
217
crates/fayalite/tests/sim/expected/counter_async.vcd
Normal file
217
crates/fayalite/tests/sim/expected/counter_async.vcd
Normal file
|
|
@ -0,0 +1,217 @@
|
|||
$timescale 1 ps $end
|
||||
$scope module counter $end
|
||||
$scope struct cd $end
|
||||
$var wire 1 ! clk $end
|
||||
$var wire 1 " rst $end
|
||||
$upscope $end
|
||||
$var wire 4 # count $end
|
||||
$var reg 4 $ count_reg $end
|
||||
$upscope $end
|
||||
$enddefinitions $end
|
||||
$dumpvars
|
||||
0!
|
||||
0"
|
||||
b0 #
|
||||
b0 $
|
||||
$end
|
||||
#500000
|
||||
1"
|
||||
b11 #
|
||||
b11 $
|
||||
#1000000
|
||||
1!
|
||||
#1500000
|
||||
0"
|
||||
#2000000
|
||||
0!
|
||||
#3000000
|
||||
1!
|
||||
b100 $
|
||||
b100 #
|
||||
#4000000
|
||||
0!
|
||||
#5000000
|
||||
1!
|
||||
b101 $
|
||||
b101 #
|
||||
#6000000
|
||||
0!
|
||||
#7000000
|
||||
1!
|
||||
b110 $
|
||||
b110 #
|
||||
#8000000
|
||||
0!
|
||||
#9000000
|
||||
1!
|
||||
b111 $
|
||||
b111 #
|
||||
#10000000
|
||||
0!
|
||||
#11000000
|
||||
1!
|
||||
b1000 $
|
||||
b1000 #
|
||||
#12000000
|
||||
0!
|
||||
#13000000
|
||||
1!
|
||||
b1001 $
|
||||
b1001 #
|
||||
#14000000
|
||||
0!
|
||||
#15000000
|
||||
1!
|
||||
b1010 $
|
||||
b1010 #
|
||||
#16000000
|
||||
0!
|
||||
#17000000
|
||||
1!
|
||||
b1011 $
|
||||
b1011 #
|
||||
#18000000
|
||||
0!
|
||||
#19000000
|
||||
1!
|
||||
b1100 $
|
||||
b1100 #
|
||||
#20000000
|
||||
0!
|
||||
#21000000
|
||||
1!
|
||||
b1101 $
|
||||
b1101 #
|
||||
#22000000
|
||||
0!
|
||||
#23000000
|
||||
1!
|
||||
b1110 $
|
||||
b1110 #
|
||||
#24000000
|
||||
0!
|
||||
#25000000
|
||||
1!
|
||||
b1111 $
|
||||
b1111 #
|
||||
#26000000
|
||||
0!
|
||||
#27000000
|
||||
1!
|
||||
b0 $
|
||||
b0 #
|
||||
#28000000
|
||||
0!
|
||||
#29000000
|
||||
1!
|
||||
b1 $
|
||||
b1 #
|
||||
#30000000
|
||||
0!
|
||||
#31000000
|
||||
1!
|
||||
b10 $
|
||||
b10 #
|
||||
#32000000
|
||||
0!
|
||||
#33000000
|
||||
1!
|
||||
b11 $
|
||||
b11 #
|
||||
#34000000
|
||||
0!
|
||||
#35000000
|
||||
1!
|
||||
b100 $
|
||||
b100 #
|
||||
#36000000
|
||||
0!
|
||||
#37000000
|
||||
1!
|
||||
b101 $
|
||||
b101 #
|
||||
#38000000
|
||||
0!
|
||||
#39000000
|
||||
1!
|
||||
b110 $
|
||||
b110 #
|
||||
#40000000
|
||||
0!
|
||||
#41000000
|
||||
1!
|
||||
b111 $
|
||||
b111 #
|
||||
#42000000
|
||||
0!
|
||||
#43000000
|
||||
1!
|
||||
b1000 $
|
||||
b1000 #
|
||||
#44000000
|
||||
0!
|
||||
#45000000
|
||||
1!
|
||||
b1001 $
|
||||
b1001 #
|
||||
#46000000
|
||||
0!
|
||||
#47000000
|
||||
1!
|
||||
b1010 $
|
||||
b1010 #
|
||||
#48000000
|
||||
0!
|
||||
#49000000
|
||||
1!
|
||||
b1011 $
|
||||
b1011 #
|
||||
#50000000
|
||||
0!
|
||||
#51000000
|
||||
1!
|
||||
b1100 $
|
||||
b1100 #
|
||||
#52000000
|
||||
0!
|
||||
#53000000
|
||||
1!
|
||||
b1101 $
|
||||
b1101 #
|
||||
#54000000
|
||||
0!
|
||||
#55000000
|
||||
1!
|
||||
b1110 $
|
||||
b1110 #
|
||||
#56000000
|
||||
0!
|
||||
#57000000
|
||||
1!
|
||||
b1111 $
|
||||
b1111 #
|
||||
#58000000
|
||||
0!
|
||||
#59000000
|
||||
1!
|
||||
b0 $
|
||||
b0 #
|
||||
#60000000
|
||||
0!
|
||||
#61000000
|
||||
1!
|
||||
b1 $
|
||||
b1 #
|
||||
#62000000
|
||||
0!
|
||||
#63000000
|
||||
1!
|
||||
b10 $
|
||||
b10 #
|
||||
#64000000
|
||||
0!
|
||||
#65000000
|
||||
1!
|
||||
b11 $
|
||||
b11 #
|
||||
#66000000
|
||||
357
crates/fayalite/tests/sim/expected/counter_sync.txt
Normal file
357
crates/fayalite/tests/sim/expected/counter_sync.txt
Normal file
|
|
@ -0,0 +1,357 @@
|
|||
Simulation {
|
||||
state: State {
|
||||
insns: Insns {
|
||||
state_layout: StateLayout {
|
||||
ty: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 4,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 9,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::cd.clk",
|
||||
ty: Clock,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::cd.rst",
|
||||
ty: SyncReset,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::count",
|
||||
ty: UInt<4>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::count_reg",
|
||||
ty: UInt<4>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::count_reg$next",
|
||||
ty: UInt<4>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: UInt<4>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: UInt<1>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: UInt<5>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: UInt<4>,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
sim_only_slots: StatePartLayout<SimOnlySlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
memories: StatePartLayout<Memories> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
insns: [
|
||||
// at: module-XXXXXXXXXX.rs:6:1
|
||||
0: Copy {
|
||||
dest: StatePartIndex<BigSlots>(2), // (0x3) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count", ty: UInt<4> },
|
||||
src: StatePartIndex<BigSlots>(3), // (0x3) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count_reg", ty: UInt<4> },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
1: Const {
|
||||
dest: StatePartIndex<BigSlots>(6), // (0x1) SlotDebugData { name: "", ty: UInt<1> },
|
||||
value: 0x1,
|
||||
},
|
||||
2: Add {
|
||||
dest: StatePartIndex<BigSlots>(7), // (0x4) SlotDebugData { name: "", ty: UInt<5> },
|
||||
lhs: StatePartIndex<BigSlots>(3), // (0x3) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count_reg", ty: UInt<4> },
|
||||
rhs: StatePartIndex<BigSlots>(6), // (0x1) SlotDebugData { name: "", ty: UInt<1> },
|
||||
},
|
||||
3: CastToUInt {
|
||||
dest: StatePartIndex<BigSlots>(8), // (0x4) SlotDebugData { name: "", ty: UInt<4> },
|
||||
src: StatePartIndex<BigSlots>(7), // (0x4) SlotDebugData { name: "", ty: UInt<5> },
|
||||
dest_width: 4,
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:4:1
|
||||
4: Copy {
|
||||
dest: StatePartIndex<BigSlots>(4), // (0x4) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count_reg$next", ty: UInt<4> },
|
||||
src: StatePartIndex<BigSlots>(8), // (0x4) SlotDebugData { name: "", ty: UInt<4> },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:3:1
|
||||
5: IsNonZeroDestIsSmall {
|
||||
dest: StatePartIndex<SmallSlots>(3), // (0x0 0) SlotDebugData { name: "", ty: Bool },
|
||||
src: StatePartIndex<BigSlots>(1), // (0x0) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::cd.rst", ty: SyncReset },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
6: Const {
|
||||
dest: StatePartIndex<BigSlots>(5), // (0x3) SlotDebugData { name: "", ty: UInt<4> },
|
||||
value: 0x3,
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:3:1
|
||||
7: IsNonZeroDestIsSmall {
|
||||
dest: StatePartIndex<SmallSlots>(2), // (0x1 1) SlotDebugData { name: "", ty: Bool },
|
||||
src: StatePartIndex<BigSlots>(0), // (0x1) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::cd.clk", ty: Clock },
|
||||
},
|
||||
8: AndSmall {
|
||||
dest: StatePartIndex<SmallSlots>(1), // (0x0 0) SlotDebugData { name: "", ty: Bool },
|
||||
lhs: StatePartIndex<SmallSlots>(2), // (0x1 1) SlotDebugData { name: "", ty: Bool },
|
||||
rhs: StatePartIndex<SmallSlots>(0), // (0x0 0) SlotDebugData { name: "", ty: Bool },
|
||||
},
|
||||
9: BranchIfSmallZero {
|
||||
target: 14,
|
||||
value: StatePartIndex<SmallSlots>(1), // (0x0 0) SlotDebugData { name: "", ty: Bool },
|
||||
},
|
||||
10: BranchIfSmallNonZero {
|
||||
target: 13,
|
||||
value: StatePartIndex<SmallSlots>(3), // (0x0 0) SlotDebugData { name: "", ty: Bool },
|
||||
},
|
||||
11: Copy {
|
||||
dest: StatePartIndex<BigSlots>(3), // (0x3) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count_reg", ty: UInt<4> },
|
||||
src: StatePartIndex<BigSlots>(4), // (0x4) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count_reg$next", ty: UInt<4> },
|
||||
},
|
||||
12: Branch {
|
||||
target: 14,
|
||||
},
|
||||
13: Copy {
|
||||
dest: StatePartIndex<BigSlots>(3), // (0x3) SlotDebugData { name: "InstantiatedModule(counter: counter).counter::count_reg", ty: UInt<4> },
|
||||
src: StatePartIndex<BigSlots>(5), // (0x3) SlotDebugData { name: "", ty: UInt<4> },
|
||||
},
|
||||
14: XorSmallImmediate {
|
||||
dest: StatePartIndex<SmallSlots>(0), // (0x0 0) SlotDebugData { name: "", ty: Bool },
|
||||
lhs: StatePartIndex<SmallSlots>(2), // (0x1 1) SlotDebugData { name: "", ty: Bool },
|
||||
rhs: 0x1,
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
15: Return,
|
||||
],
|
||||
..
|
||||
},
|
||||
pc: 15,
|
||||
memory_write_log: [],
|
||||
memories: StatePart {
|
||||
value: [],
|
||||
},
|
||||
small_slots: StatePart {
|
||||
value: [
|
||||
0,
|
||||
0,
|
||||
1,
|
||||
0,
|
||||
],
|
||||
},
|
||||
big_slots: StatePart {
|
||||
value: [
|
||||
1,
|
||||
0,
|
||||
3,
|
||||
3,
|
||||
4,
|
||||
3,
|
||||
1,
|
||||
4,
|
||||
4,
|
||||
],
|
||||
},
|
||||
sim_only_slots: StatePart {
|
||||
value: [],
|
||||
},
|
||||
},
|
||||
io: Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
},
|
||||
main_module: SimulationModuleState {
|
||||
base_targets: [
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.cd,
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.count,
|
||||
],
|
||||
uninitialized_ios: {},
|
||||
io_targets: {
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.cd,
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.cd.clk,
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.cd.rst,
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.count,
|
||||
},
|
||||
did_initial_settle: true,
|
||||
},
|
||||
extern_modules: [],
|
||||
state_ready_to_run: false,
|
||||
trace_decls: TraceModule {
|
||||
name: "counter",
|
||||
children: [
|
||||
TraceModuleIO {
|
||||
name: "cd",
|
||||
child: TraceBundle {
|
||||
name: "cd",
|
||||
fields: [
|
||||
TraceClock {
|
||||
location: TraceScalarId(0),
|
||||
name: "clk",
|
||||
flow: Source,
|
||||
},
|
||||
TraceSyncReset {
|
||||
location: TraceScalarId(1),
|
||||
name: "rst",
|
||||
flow: Source,
|
||||
},
|
||||
],
|
||||
ty: Bundle {
|
||||
/* offset = 0 */
|
||||
clk: Clock,
|
||||
/* offset = 1 */
|
||||
rst: SyncReset,
|
||||
},
|
||||
flow: Source,
|
||||
},
|
||||
ty: Bundle {
|
||||
/* offset = 0 */
|
||||
clk: Clock,
|
||||
/* offset = 1 */
|
||||
rst: SyncReset,
|
||||
},
|
||||
flow: Source,
|
||||
},
|
||||
TraceModuleIO {
|
||||
name: "count",
|
||||
child: TraceUInt {
|
||||
location: TraceScalarId(2),
|
||||
name: "count",
|
||||
ty: UInt<4>,
|
||||
flow: Sink,
|
||||
},
|
||||
ty: UInt<4>,
|
||||
flow: Sink,
|
||||
},
|
||||
TraceReg {
|
||||
name: "count_reg",
|
||||
child: TraceUInt {
|
||||
location: TraceScalarId(3),
|
||||
name: "count_reg",
|
||||
ty: UInt<4>,
|
||||
flow: Duplex,
|
||||
},
|
||||
ty: UInt<4>,
|
||||
},
|
||||
],
|
||||
},
|
||||
traces: [
|
||||
SimTrace {
|
||||
id: TraceScalarId(0),
|
||||
kind: BigClock {
|
||||
index: StatePartIndex<BigSlots>(0),
|
||||
},
|
||||
state: 0x1,
|
||||
last_state: 0x1,
|
||||
},
|
||||
SimTrace {
|
||||
id: TraceScalarId(1),
|
||||
kind: BigSyncReset {
|
||||
index: StatePartIndex<BigSlots>(1),
|
||||
},
|
||||
state: 0x0,
|
||||
last_state: 0x0,
|
||||
},
|
||||
SimTrace {
|
||||
id: TraceScalarId(2),
|
||||
kind: BigUInt {
|
||||
index: StatePartIndex<BigSlots>(2),
|
||||
ty: UInt<4>,
|
||||
},
|
||||
state: 0x3,
|
||||
last_state: 0x2,
|
||||
},
|
||||
SimTrace {
|
||||
id: TraceScalarId(3),
|
||||
kind: BigUInt {
|
||||
index: StatePartIndex<BigSlots>(3),
|
||||
ty: UInt<4>,
|
||||
},
|
||||
state: 0x3,
|
||||
last_state: 0x3,
|
||||
},
|
||||
],
|
||||
trace_memories: {},
|
||||
trace_writers: [
|
||||
Running(
|
||||
VcdWriter {
|
||||
finished_init: true,
|
||||
timescale: 1 ps,
|
||||
..
|
||||
},
|
||||
),
|
||||
],
|
||||
instant: 66 μs,
|
||||
clocks_triggered: [
|
||||
StatePartIndex<SmallSlots>(1),
|
||||
],
|
||||
..
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue