Compare commits
No commits in common. "master" and "fifo-proof" have entirely different histories.
master
...
fifo-proof
119 changed files with 15115 additions and 52445 deletions
77
.forgejo/workflows/deps.yml
Normal file
77
.forgejo/workflows/deps.yml
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
# SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
# See Notices.txt for copyright information
|
||||
on:
|
||||
workflow_call:
|
||||
outputs:
|
||||
cache-primary-key:
|
||||
value: ${{ jobs.deps.outputs.cache-primary-key }}
|
||||
|
||||
jobs:
|
||||
deps:
|
||||
runs-on: debian-12
|
||||
outputs:
|
||||
cache-primary-key: ${{ steps.restore-deps.outputs.cache-primary-key }}
|
||||
steps:
|
||||
- uses: https://code.forgejo.org/actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- uses: https://code.forgejo.org/actions/cache/restore@v3
|
||||
id: restore-deps
|
||||
with:
|
||||
path: deps
|
||||
key: ${{ github.repository }}-deps-${{ runner.os }}-${{ hashFiles('.forgejo/workflows/deps.yml') }}
|
||||
lookup-only: true
|
||||
- name: Install Apt packages
|
||||
if: steps.restore-deps.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
apt-get update -qq
|
||||
apt-get install -qq \
|
||||
bison \
|
||||
build-essential \
|
||||
ccache \
|
||||
clang \
|
||||
cvc5 \
|
||||
flex \
|
||||
gawk \
|
||||
g++ \
|
||||
git \
|
||||
libboost-filesystem-dev \
|
||||
libboost-python-dev \
|
||||
libboost-system-dev \
|
||||
libffi-dev \
|
||||
libreadline-dev \
|
||||
lld \
|
||||
pkg-config \
|
||||
python3 \
|
||||
python3-click \
|
||||
tcl-dev \
|
||||
zlib1g-dev
|
||||
- name: Install Firtool
|
||||
if: steps.restore-deps.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
mkdir -p deps
|
||||
wget -O deps/firrtl.tar.gz https://github.com/llvm/circt/releases/download/firtool-1.86.0/firrtl-bin-linux-x64.tar.gz
|
||||
sha256sum -c - <<<'bf6f4ab18ae76f135c944efbd81e25391c31c1bd0617c58ab0592640abefee14 deps/firrtl.tar.gz'
|
||||
tar -C deps -xvaf deps/firrtl.tar.gz
|
||||
rm -rf deps/firtool
|
||||
mv deps/firtool-1.86.0 deps/firtool
|
||||
- name: Get SymbiYosys
|
||||
if: steps.restore-deps.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
git clone --depth=1 --branch=yosys-0.45 https://github.com/YosysHQ/sby.git deps/sby
|
||||
- name: Build Z3
|
||||
if: steps.restore-deps.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
git clone --depth=1 --recursive --branch=z3-4.13.3 https://github.com/Z3Prover/z3.git deps/z3
|
||||
(cd deps/z3; PYTHON=python3 ./configure --prefix=/usr/local)
|
||||
make -C deps/z3/build -j"$(nproc)"
|
||||
- name: Build Yosys
|
||||
if: steps.restore-deps.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
git clone --depth=1 --recursive --branch=0.45 https://github.com/YosysHQ/yosys.git deps/yosys
|
||||
make -C deps/yosys -j"$(nproc)"
|
||||
- uses: https://code.forgejo.org/actions/cache/save@v3
|
||||
if: steps.restore-deps.outputs.cache-hit != 'true'
|
||||
with:
|
||||
path: deps
|
||||
key: ${{ steps.restore-deps.outputs.cache-primary-key }}
|
||||
|
|
@ -3,23 +3,59 @@
|
|||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
deps:
|
||||
uses: ./.forgejo/workflows/deps.yml
|
||||
test:
|
||||
runs-on: debian-12
|
||||
container:
|
||||
image: git.libre-chip.org/libre-chip/fayalite-deps:latest
|
||||
needs: deps
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: https://code.forgejo.org/actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- run: |
|
||||
scripts/check-copyright.sh
|
||||
- uses: https://git.libre-chip.org/mirrors/rust-cache@v2
|
||||
- run: |
|
||||
apt-get update -qq
|
||||
apt-get install -qq \
|
||||
bison \
|
||||
build-essential \
|
||||
ccache \
|
||||
clang \
|
||||
cvc5 \
|
||||
flex \
|
||||
gawk \
|
||||
git \
|
||||
libboost-filesystem-dev \
|
||||
libboost-python-dev \
|
||||
libboost-system-dev \
|
||||
libffi-dev \
|
||||
libreadline-dev \
|
||||
lld \
|
||||
pkg-config \
|
||||
python3 \
|
||||
python3-click \
|
||||
tcl-dev \
|
||||
z3 \
|
||||
zlib1g-dev
|
||||
- run: |
|
||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain 1.82.0
|
||||
source "$HOME/.cargo/env"
|
||||
echo "$PATH" >> "$GITHUB_PATH"
|
||||
- uses: https://code.forgejo.org/actions/cache/restore@v3
|
||||
with:
|
||||
path: deps
|
||||
key: ${{ needs.deps.outputs.cache-primary-key }}
|
||||
fail-on-cache-miss: true
|
||||
- run: |
|
||||
make -C deps/z3/build install
|
||||
make -C deps/sby install
|
||||
make -C deps/yosys install
|
||||
export PATH="$(realpath deps/firtool/bin):$PATH"
|
||||
echo "$PATH" >> "$GITHUB_PATH"
|
||||
- uses: https://github.com/Swatinem/rust-cache@v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/master' }}
|
||||
- run: cargo test
|
||||
- run: cargo build --tests --features=unstable-doc
|
||||
- run: cargo test --doc --features=unstable-doc
|
||||
- run: cargo doc --features=unstable-doc
|
||||
- run: FAYALITE_TEST_HASHER=always_zero cargo test --test=module --features=unstable-doc,unstable-test-hasher
|
||||
- run: cargo run --example blinky yosys-nextpnr-xray --platform=arty-a7-100t --nextpnr-xilinx-chipdb-dir /opt/fayalite-deps/nextpnr-xilinx/xilinx --prjxray-db-dir /opt/fayalite-deps/prjxray-db -o target/blinky-out
|
||||
- run: cargo run --example tx_only_uart yosys-nextpnr-xray --platform=arty-a7-100t --nextpnr-xilinx-chipdb-dir /opt/fayalite-deps/nextpnr-xilinx/xilinx --prjxray-db-dir /opt/fayalite-deps/prjxray-db -o target/tx_only_uart-out
|
||||
|
|
|
|||
182
Cargo.lock
generated
182
Cargo.lock
generated
|
|
@ -1,6 +1,18 @@
|
|||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 4
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "ahash"
|
||||
version = "0.8.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "77c3a9648d43b9cd48db467b3f87fdd6e146bcc88ab0180006cef2179fe11d01"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"once_cell",
|
||||
"version_check",
|
||||
"zerocopy",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "allocator-api2"
|
||||
|
|
@ -25,9 +37,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "anstyle"
|
||||
version = "1.0.13"
|
||||
version = "1.0.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78"
|
||||
checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b"
|
||||
|
||||
[[package]]
|
||||
name = "anstyle-parse"
|
||||
|
|
@ -81,12 +93,6 @@ version = "0.2.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf"
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.22.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
|
||||
|
||||
[[package]]
|
||||
name = "basic-toml"
|
||||
version = "0.1.8"
|
||||
|
|
@ -155,9 +161,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
|||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.48"
|
||||
version = "4.5.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e2134bb3ea021b78629caa971416385309e0131b351b25e01dc16fb54e1b5fae"
|
||||
checksum = "64acc1846d54c1fe936a78dc189c34e28d3f5afc348403f28ecf53660b9b8462"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
|
|
@ -165,9 +171,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.48"
|
||||
version = "4.5.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c2ba64afa3c0a6df7fa517765e31314e983f51dda798ffba27b988194fb65dc9"
|
||||
checksum = "6fb8393d67ba2e7bfaf28a23458e4e2b543cc73a99595511eb207fdb8aede942"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
|
|
@ -175,20 +181,11 @@ dependencies = [
|
|||
"strsim",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap_complete"
|
||||
version = "4.5.58"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "75bf0b32ad2e152de789bb635ea4d3078f6b838ad7974143e99b99f45a04af4a"
|
||||
dependencies = [
|
||||
"clap",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap_derive"
|
||||
version = "4.5.47"
|
||||
version = "4.5.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bbfd7eae0b0f1a6e63d4b13c9c478de77c2eb546fba158ad50b4203dc24b9f9c"
|
||||
checksum = "2bac35c6dafb060fd4d275d9a4ffae97917c13a6327903a8be2153cd964f7085"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
|
|
@ -198,9 +195,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "clap_lex"
|
||||
version = "0.7.5"
|
||||
version = "0.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675"
|
||||
checksum = "4b82cf0babdbd58558212896d1a4272303a57bdb245c2bf1147185fb45640e70"
|
||||
|
||||
[[package]]
|
||||
name = "colorchoice"
|
||||
|
|
@ -306,11 +303,9 @@ checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5"
|
|||
name = "fayalite"
|
||||
version = "0.3.0"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"bitvec",
|
||||
"blake3",
|
||||
"clap",
|
||||
"clap_complete",
|
||||
"ctor",
|
||||
"eyre",
|
||||
"fayalite-proc-macros",
|
||||
|
|
@ -319,7 +314,7 @@ dependencies = [
|
|||
"jobslot",
|
||||
"num-bigint",
|
||||
"num-traits",
|
||||
"ordered-float",
|
||||
"os_pipe",
|
||||
"petgraph",
|
||||
"serde",
|
||||
"serde_json",
|
||||
|
|
@ -370,12 +365,6 @@ version = "0.5.7"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99"
|
||||
|
||||
[[package]]
|
||||
name = "foldhash"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
|
||||
|
||||
[[package]]
|
||||
name = "funty"
|
||||
version = "2.0.0"
|
||||
|
|
@ -394,13 +383,12 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "getrandom"
|
||||
version = "0.3.3"
|
||||
version = "0.2.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4"
|
||||
checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"r-efi",
|
||||
"wasi",
|
||||
]
|
||||
|
||||
|
|
@ -412,13 +400,12 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
|
|||
|
||||
[[package]]
|
||||
name = "hashbrown"
|
||||
version = "0.15.2"
|
||||
version = "0.14.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289"
|
||||
checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"allocator-api2",
|
||||
"equivalent",
|
||||
"foldhash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -444,9 +431,9 @@ checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683"
|
|||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "2.9.0"
|
||||
version = "2.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e"
|
||||
checksum = "68b900aa2f7301e21c36462b170ee99994de34dff39a4a6a528e80e7376d07e5"
|
||||
dependencies = [
|
||||
"equivalent",
|
||||
"hashbrown",
|
||||
|
|
@ -467,23 +454,23 @@ checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c"
|
|||
|
||||
[[package]]
|
||||
name = "jobslot"
|
||||
version = "0.2.23"
|
||||
version = "0.2.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "58715c67c327da7f1558708348d68c207fd54900c4ae0529e29305d04d795b8c"
|
||||
checksum = "fe10868679d7a24c2c67d862d0e64a342ce9aef7cdde9ce8019bd35d353d458d"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"derive_destructure2",
|
||||
"getrandom",
|
||||
"libc",
|
||||
"scopeguard",
|
||||
"windows-sys 0.61.2",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.176"
|
||||
version = "0.2.153"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "58f929b4d672ea937a23a1ab494143d968337a5f47e56d0815df1e0890ddf174"
|
||||
checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
|
||||
|
||||
[[package]]
|
||||
name = "linux-raw-sys"
|
||||
|
|
@ -526,26 +513,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
|
||||
|
||||
[[package]]
|
||||
name = "ordered-float"
|
||||
version = "5.1.0"
|
||||
name = "os_pipe"
|
||||
version = "1.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7f4779c6901a562440c3786d08192c6fbda7c1c2060edd10006b05ee35d10f2d"
|
||||
checksum = "5ffd2b0a5634335b135d5728d84c5e0fd726954b87111f7506a61c502280d982"
|
||||
dependencies = [
|
||||
"num-traits",
|
||||
"rand",
|
||||
"serde",
|
||||
"libc",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "petgraph"
|
||||
version = "0.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7a98c6720655620a521dcc722d0ad66cd8afd5d86e34a89ef691c50b7b24de06"
|
||||
version = "0.6.5"
|
||||
source = "git+https://github.com/programmerjake/petgraph.git?rev=258ea8071209a924b73fe96f9f87a3b7b45cbc9f#258ea8071209a924b73fe96f9f87a3b7b45cbc9f"
|
||||
dependencies = [
|
||||
"fixedbitset",
|
||||
"hashbrown",
|
||||
"indexmap",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -576,37 +559,12 @@ dependencies = [
|
|||
"proc-macro2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "r-efi"
|
||||
version = "5.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
|
||||
|
||||
[[package]]
|
||||
name = "radium"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09"
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.8.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
|
||||
dependencies = [
|
||||
"rand_core",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_core"
|
||||
version = "0.6.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustix"
|
||||
version = "0.38.31"
|
||||
|
|
@ -792,21 +750,9 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
|||
|
||||
[[package]]
|
||||
name = "wasi"
|
||||
version = "0.14.7+wasi-0.2.4"
|
||||
version = "0.11.0+wasi-snapshot-preview1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "883478de20367e224c0090af9cf5f9fa85bed63a95c1abf3afc5c083ebc06e8c"
|
||||
dependencies = [
|
||||
"wasip2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasip2"
|
||||
version = "1.0.1+wasi-0.2.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7"
|
||||
dependencies = [
|
||||
"wit-bindgen",
|
||||
]
|
||||
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
||||
|
||||
[[package]]
|
||||
name = "which"
|
||||
|
|
@ -851,12 +797,6 @@ version = "0.4.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||
|
||||
[[package]]
|
||||
name = "windows-link"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.52.0"
|
||||
|
|
@ -868,11 +808,11 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.61.2"
|
||||
version = "0.59.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc"
|
||||
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
|
||||
dependencies = [
|
||||
"windows-link",
|
||||
"windows-targets",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -945,12 +885,6 @@ version = "0.0.19"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904"
|
||||
|
||||
[[package]]
|
||||
name = "wit-bindgen"
|
||||
version = "0.46.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59"
|
||||
|
||||
[[package]]
|
||||
name = "wyz"
|
||||
version = "0.5.1"
|
||||
|
|
@ -959,3 +893,23 @@ checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed"
|
|||
dependencies = [
|
||||
"tap",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy"
|
||||
version = "0.7.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be"
|
||||
dependencies = [
|
||||
"zerocopy-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy-derive"
|
||||
version = "0.7.32"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
|
|
|||
15
Cargo.toml
15
Cargo.toml
|
|
@ -7,31 +7,30 @@ members = ["crates/*"]
|
|||
[workspace.package]
|
||||
version = "0.3.0"
|
||||
license = "LGPL-3.0-or-later"
|
||||
edition = "2024"
|
||||
edition = "2021"
|
||||
repository = "https://git.libre-chip.org/libre-chip/fayalite"
|
||||
keywords = ["hdl", "hardware", "semiconductors", "firrtl", "fpga"]
|
||||
categories = ["simulation", "development-tools", "compilers"]
|
||||
rust-version = "1.89.0"
|
||||
rust-version = "1.82.0"
|
||||
|
||||
[workspace.dependencies]
|
||||
fayalite-proc-macros = { version = "=0.3.0", path = "crates/fayalite-proc-macros" }
|
||||
fayalite-proc-macros-impl = { version = "=0.3.0", path = "crates/fayalite-proc-macros-impl" }
|
||||
fayalite-visit-gen = { version = "=0.3.0", path = "crates/fayalite-visit-gen" }
|
||||
base16ct = "0.2.0"
|
||||
base64 = "0.22.1"
|
||||
bitvec = { version = "1.0.1", features = ["serde"] }
|
||||
blake3 = { version = "1.5.4", features = ["serde"] }
|
||||
clap = { version = "4.5.9", features = ["derive", "env", "string"] }
|
||||
clap_complete = "4.5.58"
|
||||
ctor = "0.2.8"
|
||||
eyre = "0.6.12"
|
||||
hashbrown = "0.15.2"
|
||||
hashbrown = "0.14.3"
|
||||
indexmap = { version = "2.5.0", features = ["serde"] }
|
||||
jobslot = "0.2.23"
|
||||
jobslot = "0.2.19"
|
||||
num-bigint = "0.4.6"
|
||||
num-traits = "0.2.16"
|
||||
ordered-float = { version = "5.1.0", features = ["serde"] }
|
||||
petgraph = "0.8.1"
|
||||
os_pipe = "1.2.1"
|
||||
# TODO: switch back to crates.io once petgraph accepts PR #684 and releases a new version
|
||||
petgraph = { git = "https://github.com/programmerjake/petgraph.git", rev = "258ea8071209a924b73fe96f9f87a3b7b45cbc9f" }
|
||||
prettyplease = "0.2.20"
|
||||
proc-macro2 = "1.0.83"
|
||||
quote = "1.0.36"
|
||||
|
|
|
|||
75
README.md
75
README.md
|
|
@ -7,78 +7,3 @@ See Notices.txt for copyright information
|
|||
Fayalite is a library for designing digital hardware -- a hardware description language (HDL) embedded in the Rust programming language. Fayalite's semantics are based on [FIRRTL] as interpreted by [LLVM CIRCT](https://circt.llvm.org/docs/Dialects/FIRRTL/FIRRTLAnnotations/).
|
||||
|
||||
[FIRRTL]: https://github.com/chipsalliance/firrtl-spec
|
||||
|
||||
# Building the [Blinky example] for the Arty A7 100T on Linux
|
||||
|
||||
[Blinky example]: crates/fayalite/examples/blinky.rs
|
||||
|
||||
This uses the container image containing all the external programs and files that Fayalite needs to build for FPGAs, the sources for the container image are in <https://git.libre-chip.org/libre-chip/fayalite-deps>
|
||||
|
||||
Steps:
|
||||
|
||||
Install podman (or docker).
|
||||
|
||||
Run:
|
||||
```bash
|
||||
podman run --rm --security-opt label=disable --volume="$(pwd):$(pwd)" -w="$(pwd)" -it git.libre-chip.org/libre-chip/fayalite-deps:latest cargo run --example blinky yosys-nextpnr-xray --nextpnr-xilinx-chipdb-dir /opt/fayalite-deps/nextpnr-xilinx/xilinx --prjxray-db-dir /opt/fayalite-deps/prjxray-db --platform arty-a7-100t -o target/blinky-out
|
||||
```
|
||||
|
||||
To actually program the FPGA, you'll need to install [openFPGALoader] on your host OS:
|
||||
|
||||
[openFPGALoader]: https://github.com/trabucayre/openFPGALoader
|
||||
|
||||
On Debian 12:
|
||||
```bash
|
||||
sudo apt update && sudo apt install openfpgaloader
|
||||
```
|
||||
|
||||
Then program the FPGA:
|
||||
```bash
|
||||
sudo openFPGALoader --board arty_a7_100t target/blinky-out/blinky.bit
|
||||
```
|
||||
|
||||
This will program the FPGA but leave the Flash chip unmodified, so the FPGA will revert when the board is power-cycled.
|
||||
|
||||
To program the Flash also, so it stays programmed when power-cycling the board:
|
||||
|
||||
```bash
|
||||
sudo openFPGALoader --board arty_a7_100t -f target/blinky-out/blinky.bit
|
||||
```
|
||||
|
||||
# Building the [Transmit-only UART example] for the Arty A7 100T on Linux
|
||||
|
||||
[Transmit-only UART example]: crates/fayalite/examples/tx_only_uart.rs
|
||||
|
||||
Follow the steps above of building the Blinky example, but replace `blinky` with `tx_only_uart`.
|
||||
|
||||
View the output using [tio](https://github.com/tio/tio) which you can install in Debian using `apt`.
|
||||
|
||||
Find the correct USB device:
|
||||
```bash
|
||||
sudo tio --list
|
||||
```
|
||||
|
||||
You want the device with a name like (note the `if01`, `if00` is presumably the JTAG port):
|
||||
`/dev/serial/by-id/usb-Digilent_Digilent_USB_Device_210319B4A51E-if01-port0`
|
||||
|
||||
Connect to the serial port:
|
||||
```bash
|
||||
sudo tio -b115200 /dev/serial/by-id/put-your-device-id-here
|
||||
```
|
||||
|
||||
You'll see (repeating endlessly):
|
||||
```text
|
||||
Hello World from Fayalite!!!
|
||||
Hello World from Fayalite!!!
|
||||
Hello World from Fayalite!!!
|
||||
```
|
||||
|
||||
Press Ctrl+T then `q` to exit tio.
|
||||
|
||||
# Funding
|
||||
|
||||
## NLnet Grants
|
||||
|
||||
* [Libre-Chip CPU with proof of No Spectre bugs](https://nlnet.nl/project/Libre-Chip-proof/) 2024-12-324 [(progress)](https://git.libre-chip.org/libre-chip/grant-tracking/src/branch/master/nlnet-2024-12-324/progress.md)
|
||||
|
||||
This project was funded through the [NGI0 Commons Fund](https://nlnet.nl/commonsfund), a fund established by [NLnet](https://nlnet.nl/) with financial support from the European Commission's [Next Generation Internet](https://ngi.eu) programme, under the aegis of [DG Communications Networks, Content and Technology](https://commission.europa.eu/about-european-commission/departments-and-executive-agencies/communications-networks-content-and-technology_en) under grant agreement № [101135429](https://cordis.europa.eu/project/id/101135429). Additional funding is made available by the [Swiss State Secretariat for Education, Research and Innovation](https://www.sbfi.admin.ch/sbfi/en/home.html) (SERI).
|
||||
|
|
|
|||
|
|
@ -220,7 +220,6 @@ forward_fold!(syn::ExprArray => fold_expr_array);
|
|||
forward_fold!(syn::ExprCall => fold_expr_call);
|
||||
forward_fold!(syn::ExprIf => fold_expr_if);
|
||||
forward_fold!(syn::ExprMatch => fold_expr_match);
|
||||
forward_fold!(syn::ExprMethodCall => fold_expr_method_call);
|
||||
forward_fold!(syn::ExprPath => fold_expr_path);
|
||||
forward_fold!(syn::ExprRepeat => fold_expr_repeat);
|
||||
forward_fold!(syn::ExprStruct => fold_expr_struct);
|
||||
|
|
|
|||
|
|
@ -1,22 +1,21 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
Errors, HdlAttr, PairsIterExt,
|
||||
hdl_type_common::{
|
||||
ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedField, ParsedFieldsNamed, ParsedGenerics,
|
||||
SplitForImpl, TypesParser, WrappedInConst, common_derives, get_target,
|
||||
common_derives, get_target, ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedField,
|
||||
ParsedFieldsNamed, ParsedGenerics, SplitForImpl, TypesParser, WrappedInConst,
|
||||
},
|
||||
kw,
|
||||
kw, Errors, HdlAttr, PairsIterExt,
|
||||
};
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{ToTokens, format_ident, quote_spanned};
|
||||
use quote::{format_ident, quote_spanned, ToTokens};
|
||||
use syn::{
|
||||
AngleBracketedGenericArguments, Attribute, Field, FieldMutability, Fields, FieldsNamed,
|
||||
GenericParam, Generics, Ident, ItemStruct, Path, Token, Type, Visibility, parse_quote,
|
||||
parse_quote_spanned,
|
||||
parse_quote, parse_quote_spanned,
|
||||
punctuated::{Pair, Punctuated},
|
||||
spanned::Spanned,
|
||||
token::Brace,
|
||||
AngleBracketedGenericArguments, Attribute, Field, FieldMutability, Fields, FieldsNamed,
|
||||
GenericParam, Generics, Ident, ItemStruct, Path, Token, Type, Visibility,
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
|
|
@ -31,9 +30,7 @@ pub(crate) struct ParsedBundle {
|
|||
pub(crate) field_flips: Vec<Option<HdlAttr<kw::flip, kw::hdl>>>,
|
||||
pub(crate) mask_type_ident: Ident,
|
||||
pub(crate) mask_type_match_variant_ident: Ident,
|
||||
pub(crate) mask_type_sim_value_ident: Ident,
|
||||
pub(crate) match_variant_ident: Ident,
|
||||
pub(crate) sim_value_ident: Ident,
|
||||
pub(crate) builder_ident: Ident,
|
||||
pub(crate) mask_type_builder_ident: Ident,
|
||||
}
|
||||
|
|
@ -86,12 +83,7 @@ impl ParsedBundle {
|
|||
custom_bounds,
|
||||
no_static: _,
|
||||
no_runtime_generics: _,
|
||||
cmp_eq: _,
|
||||
ref get,
|
||||
} = options.body;
|
||||
if let Some((get, ..)) = get {
|
||||
errors.error(get, "#[hdl(get(...))] is not allowed on structs");
|
||||
}
|
||||
let mut fields = match fields {
|
||||
syn::Fields::Named(fields) => fields,
|
||||
syn::Fields::Unnamed(fields) => {
|
||||
|
|
@ -132,9 +124,7 @@ impl ParsedBundle {
|
|||
field_flips,
|
||||
mask_type_ident: format_ident!("__{}__MaskType", ident),
|
||||
mask_type_match_variant_ident: format_ident!("__{}__MaskType__MatchVariant", ident),
|
||||
mask_type_sim_value_ident: format_ident!("__{}__MaskType__SimValue", ident),
|
||||
match_variant_ident: format_ident!("__{}__MatchVariant", ident),
|
||||
sim_value_ident: format_ident!("__{}__SimValue", ident),
|
||||
mask_type_builder_ident: format_ident!("__{}__MaskType__Builder", ident),
|
||||
builder_ident: format_ident!("__{}__Builder", ident),
|
||||
ident,
|
||||
|
|
@ -349,6 +339,7 @@ impl ToTokens for Builder {
|
|||
}
|
||||
}));
|
||||
quote_spanned! {self.ident.span()=>
|
||||
#[automatically_derived]
|
||||
#[allow(non_camel_case_types, non_snake_case, dead_code)]
|
||||
impl #impl_generics #unfilled_ty
|
||||
#where_clause
|
||||
|
|
@ -435,9 +426,7 @@ impl ToTokens for ParsedBundle {
|
|||
field_flips,
|
||||
mask_type_ident,
|
||||
mask_type_match_variant_ident,
|
||||
mask_type_sim_value_ident,
|
||||
match_variant_ident,
|
||||
sim_value_ident,
|
||||
builder_ident,
|
||||
mask_type_builder_ident,
|
||||
} = self;
|
||||
|
|
@ -448,8 +437,6 @@ impl ToTokens for ParsedBundle {
|
|||
custom_bounds: _,
|
||||
no_static,
|
||||
no_runtime_generics,
|
||||
cmp_eq,
|
||||
get: _,
|
||||
} = &options.body;
|
||||
let target = get_target(target, ident);
|
||||
let mut item_attrs = attrs.clone();
|
||||
|
|
@ -534,7 +521,7 @@ impl ToTokens for ParsedBundle {
|
|||
semi_token: None,
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let mut mask_type_match_variant_fields = mask_type_fields.clone();
|
||||
let mut mask_type_match_variant_fields = mask_type_fields;
|
||||
for Field { ty, .. } in &mut mask_type_match_variant_fields.named {
|
||||
*ty = parse_quote_spanned! {span=>
|
||||
::fayalite::expr::Expr<#ty>
|
||||
|
|
@ -576,58 +563,6 @@ impl ToTokens for ParsedBundle {
|
|||
semi_token: None,
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let mut mask_type_sim_value_fields = mask_type_fields;
|
||||
for Field { ty, .. } in &mut mask_type_sim_value_fields.named {
|
||||
*ty = parse_quote_spanned! {span=>
|
||||
::fayalite::sim::value::SimValue<#ty>
|
||||
};
|
||||
}
|
||||
ItemStruct {
|
||||
attrs: vec![
|
||||
parse_quote_spanned! {span=>
|
||||
#[::fayalite::__std::prelude::v1::derive(
|
||||
::fayalite::__std::fmt::Debug,
|
||||
::fayalite::__std::clone::Clone,
|
||||
)]
|
||||
},
|
||||
parse_quote_spanned! {span=>
|
||||
#[allow(non_camel_case_types, dead_code)]
|
||||
},
|
||||
],
|
||||
vis: vis.clone(),
|
||||
struct_token: *struct_token,
|
||||
ident: mask_type_sim_value_ident.clone(),
|
||||
generics: generics.into(),
|
||||
fields: Fields::Named(mask_type_sim_value_fields),
|
||||
semi_token: None,
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let mut sim_value_fields = FieldsNamed::from(fields.clone());
|
||||
for Field { ty, .. } in &mut sim_value_fields.named {
|
||||
*ty = parse_quote_spanned! {span=>
|
||||
::fayalite::sim::value::SimValue<#ty>
|
||||
};
|
||||
}
|
||||
ItemStruct {
|
||||
attrs: vec![
|
||||
parse_quote_spanned! {span=>
|
||||
#[::fayalite::__std::prelude::v1::derive(
|
||||
::fayalite::__std::fmt::Debug,
|
||||
::fayalite::__std::clone::Clone,
|
||||
)]
|
||||
},
|
||||
parse_quote_spanned! {span=>
|
||||
#[allow(non_camel_case_types, dead_code)]
|
||||
},
|
||||
],
|
||||
vis: vis.clone(),
|
||||
struct_token: *struct_token,
|
||||
ident: sim_value_ident.clone(),
|
||||
generics: generics.into(),
|
||||
fields: Fields::Named(sim_value_fields),
|
||||
semi_token: None,
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let this_token = Ident::new("__this", span);
|
||||
let fields_token = Ident::new("__fields", span);
|
||||
let self_token = Token;
|
||||
|
|
@ -678,32 +613,6 @@ impl ToTokens for ParsedBundle {
|
|||
}
|
||||
},
|
||||
));
|
||||
let sim_value_from_opaque_fields =
|
||||
Vec::from_iter(fields.named().into_iter().map(|field| {
|
||||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
quote_spanned! {span=>
|
||||
#ident: v.field_from_opaque(),
|
||||
}
|
||||
}));
|
||||
let sim_value_clone_from_opaque_fields =
|
||||
Vec::from_iter(fields.named().into_iter().map(|field| {
|
||||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
quote_spanned! {span=>
|
||||
v.field_clone_from_opaque(&mut value.#ident);
|
||||
}
|
||||
}));
|
||||
let sim_value_to_opaque_fields = Vec::from_iter(fields.named().into_iter().map(|field| {
|
||||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
quote_spanned! {span=>
|
||||
v.field(&value.#ident);
|
||||
}
|
||||
}));
|
||||
let to_sim_value_fields = Vec::from_iter(fields.named().into_iter().map(|field| {
|
||||
let ident: &Ident = field.ident().as_ref().unwrap();
|
||||
quote_spanned! {span=>
|
||||
#ident: ::fayalite::sim::value::SimValue::ty(&self.#ident),
|
||||
}
|
||||
}));
|
||||
let fields_len = fields.named().into_iter().len();
|
||||
quote_spanned! {span=>
|
||||
#[automatically_derived]
|
||||
|
|
@ -712,7 +621,6 @@ impl ToTokens for ParsedBundle {
|
|||
{
|
||||
type BaseType = ::fayalite::bundle::Bundle;
|
||||
type MaskType = #mask_type_ident #type_generics;
|
||||
type SimValue = #mask_type_sim_value_ident #type_generics;
|
||||
type MatchVariant = #mask_type_match_variant_ident #type_generics;
|
||||
type MatchActiveScope = ();
|
||||
type MatchVariantAndInactiveScope = ::fayalite::ty::MatchVariantWithoutScope<
|
||||
|
|
@ -750,35 +658,6 @@ impl ToTokens for ParsedBundle {
|
|||
fn source_location() -> ::fayalite::source_location::SourceLocation {
|
||||
::fayalite::source_location::SourceLocation::caller()
|
||||
}
|
||||
fn sim_value_from_opaque(
|
||||
&self,
|
||||
opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>,
|
||||
) -> <Self as ::fayalite::ty::Type>::SimValue {
|
||||
#![allow(unused_mut, unused_variables)]
|
||||
let mut v = ::fayalite::bundle::BundleSimValueFromOpaque::new(*self, opaque);
|
||||
#mask_type_sim_value_ident {
|
||||
#(#sim_value_from_opaque_fields)*
|
||||
}
|
||||
}
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut <Self as ::fayalite::ty::Type>::SimValue,
|
||||
opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
#![allow(unused_mut, unused_variables)]
|
||||
let mut v = ::fayalite::bundle::BundleSimValueFromOpaque::new(*self, opaque);
|
||||
#(#sim_value_clone_from_opaque_fields)*
|
||||
}
|
||||
fn sim_value_to_opaque<'__w>(
|
||||
&self,
|
||||
value: &<Self as ::fayalite::ty::Type>::SimValue,
|
||||
writer: ::fayalite::ty::OpaqueSimValueWriter<'__w>,
|
||||
) -> ::fayalite::ty::OpaqueSimValueWritten<'__w> {
|
||||
#![allow(unused_mut, unused_variables)]
|
||||
let mut v = ::fayalite::bundle::BundleSimValueToOpaque::new(*self, writer);
|
||||
#(#sim_value_to_opaque_fields)*
|
||||
v.finish()
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::bundle::BundleType for #mask_type_ident #type_generics
|
||||
|
|
@ -810,57 +689,11 @@ impl ToTokens for ParsedBundle {
|
|||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::sim::value::ToSimValue for #mask_type_sim_value_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
type Type = #mask_type_ident #type_generics;
|
||||
|
||||
fn to_sim_value(
|
||||
&self,
|
||||
) -> ::fayalite::sim::value::SimValue<
|
||||
<Self as ::fayalite::sim::value::ToSimValue>::Type,
|
||||
> {
|
||||
let ty = #mask_type_ident {
|
||||
#(#to_sim_value_fields)*
|
||||
};
|
||||
::fayalite::sim::value::SimValue::from_value(ty, ::fayalite::__std::clone::Clone::clone(self))
|
||||
}
|
||||
fn into_sim_value(
|
||||
self,
|
||||
) -> ::fayalite::sim::value::SimValue<
|
||||
<Self as ::fayalite::sim::value::ToSimValue>::Type,
|
||||
> {
|
||||
let ty = #mask_type_ident {
|
||||
#(#to_sim_value_fields)*
|
||||
};
|
||||
::fayalite::sim::value::SimValue::from_value(ty, self)
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::sim::value::ToSimValueWithType<#mask_type_ident #type_generics>
|
||||
for #mask_type_sim_value_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
fn to_sim_value_with_type(
|
||||
&self,
|
||||
ty: #mask_type_ident #type_generics,
|
||||
) -> ::fayalite::sim::value::SimValue<#mask_type_ident #type_generics> {
|
||||
::fayalite::sim::value::SimValue::from_value(ty, ::fayalite::__std::clone::Clone::clone(self))
|
||||
}
|
||||
fn into_sim_value_with_type(
|
||||
self,
|
||||
ty: #mask_type_ident #type_generics,
|
||||
) -> ::fayalite::sim::value::SimValue<#mask_type_ident #type_generics> {
|
||||
::fayalite::sim::value::SimValue::from_value(ty, self)
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::ty::Type for #target #type_generics
|
||||
#where_clause
|
||||
{
|
||||
type BaseType = ::fayalite::bundle::Bundle;
|
||||
type MaskType = #mask_type_ident #type_generics;
|
||||
type SimValue = #sim_value_ident #type_generics;
|
||||
type MatchVariant = #match_variant_ident #type_generics;
|
||||
type MatchActiveScope = ();
|
||||
type MatchVariantAndInactiveScope = ::fayalite::ty::MatchVariantWithoutScope<
|
||||
|
|
@ -900,35 +733,6 @@ impl ToTokens for ParsedBundle {
|
|||
fn source_location() -> ::fayalite::source_location::SourceLocation {
|
||||
::fayalite::source_location::SourceLocation::caller()
|
||||
}
|
||||
fn sim_value_from_opaque(
|
||||
&self,
|
||||
opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>,
|
||||
) -> <Self as ::fayalite::ty::Type>::SimValue {
|
||||
#![allow(unused_mut, unused_variables)]
|
||||
let mut v = ::fayalite::bundle::BundleSimValueFromOpaque::new(*self, opaque);
|
||||
#sim_value_ident {
|
||||
#(#sim_value_from_opaque_fields)*
|
||||
}
|
||||
}
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut <Self as ::fayalite::ty::Type>::SimValue,
|
||||
opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
#![allow(unused_mut, unused_variables)]
|
||||
let mut v = ::fayalite::bundle::BundleSimValueFromOpaque::new(*self, opaque);
|
||||
#(#sim_value_clone_from_opaque_fields)*
|
||||
}
|
||||
fn sim_value_to_opaque<'__w>(
|
||||
&self,
|
||||
value: &<Self as ::fayalite::ty::Type>::SimValue,
|
||||
writer: ::fayalite::ty::OpaqueSimValueWriter<'__w>,
|
||||
) -> ::fayalite::ty::OpaqueSimValueWritten<'__w> {
|
||||
#![allow(unused_mut, unused_variables)]
|
||||
let mut v = ::fayalite::bundle::BundleSimValueToOpaque::new(*self, writer);
|
||||
#(#sim_value_to_opaque_fields)*
|
||||
v.finish()
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::bundle::BundleType for #target #type_generics
|
||||
|
|
@ -959,144 +763,8 @@ impl ToTokens for ParsedBundle {
|
|||
::fayalite::intern::Interned::into_inner(::fayalite::intern::Intern::intern_sized(__retval))
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::sim::value::ToSimValue for #sim_value_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
type Type = #target #type_generics;
|
||||
|
||||
fn to_sim_value(
|
||||
&self,
|
||||
) -> ::fayalite::sim::value::SimValue<
|
||||
<Self as ::fayalite::sim::value::ToSimValue>::Type,
|
||||
> {
|
||||
let ty = #target {
|
||||
#(#to_sim_value_fields)*
|
||||
};
|
||||
::fayalite::sim::value::SimValue::from_value(ty, ::fayalite::__std::clone::Clone::clone(self))
|
||||
}
|
||||
fn into_sim_value(
|
||||
self,
|
||||
) -> ::fayalite::sim::value::SimValue<
|
||||
<Self as ::fayalite::sim::value::ToSimValue>::Type,
|
||||
> {
|
||||
let ty = #target {
|
||||
#(#to_sim_value_fields)*
|
||||
};
|
||||
::fayalite::sim::value::SimValue::from_value(ty, self)
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::sim::value::ToSimValueWithType<#target #type_generics>
|
||||
for #sim_value_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
fn to_sim_value_with_type(
|
||||
&self,
|
||||
ty: #target #type_generics,
|
||||
) -> ::fayalite::sim::value::SimValue<#target #type_generics> {
|
||||
::fayalite::sim::value::SimValue::from_value(ty, ::fayalite::__std::clone::Clone::clone(self))
|
||||
}
|
||||
fn into_sim_value_with_type(
|
||||
self,
|
||||
ty: #target #type_generics,
|
||||
) -> ::fayalite::sim::value::SimValue<#target #type_generics> {
|
||||
::fayalite::sim::value::SimValue::from_value(ty, self)
|
||||
}
|
||||
}
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
if let Some((cmp_eq,)) = cmp_eq {
|
||||
let mut expr_where_clause =
|
||||
Generics::from(generics)
|
||||
.where_clause
|
||||
.unwrap_or_else(|| syn::WhereClause {
|
||||
where_token: Token,
|
||||
predicates: Punctuated::new(),
|
||||
});
|
||||
let mut sim_value_where_clause = expr_where_clause.clone();
|
||||
let mut fields_sim_value_eq = vec![];
|
||||
let mut fields_cmp_eq = vec![];
|
||||
let mut fields_cmp_ne = vec![];
|
||||
for field in fields.named() {
|
||||
let field_ident = field.ident();
|
||||
let field_ty = field.ty();
|
||||
expr_where_clause
|
||||
.predicates
|
||||
.push(parse_quote_spanned! {cmp_eq.span=>
|
||||
#field_ty: ::fayalite::expr::ops::ExprPartialEq<#field_ty>
|
||||
});
|
||||
sim_value_where_clause
|
||||
.predicates
|
||||
.push(parse_quote_spanned! {cmp_eq.span=>
|
||||
#field_ty: ::fayalite::sim::value::SimValuePartialEq<#field_ty>
|
||||
});
|
||||
fields_sim_value_eq.push(quote_spanned! {span=>
|
||||
::fayalite::sim::value::SimValuePartialEq::sim_value_eq(&__lhs.#field_ident, &__rhs.#field_ident)
|
||||
});
|
||||
fields_cmp_eq.push(quote_spanned! {span=>
|
||||
::fayalite::expr::ops::ExprPartialEq::cmp_eq(__lhs.#field_ident, __rhs.#field_ident)
|
||||
});
|
||||
fields_cmp_ne.push(quote_spanned! {span=>
|
||||
::fayalite::expr::ops::ExprPartialEq::cmp_ne(__lhs.#field_ident, __rhs.#field_ident)
|
||||
});
|
||||
}
|
||||
let sim_value_eq_body;
|
||||
let cmp_eq_body;
|
||||
let cmp_ne_body;
|
||||
if fields_len == 0 {
|
||||
sim_value_eq_body = quote_spanned! {span=>
|
||||
true
|
||||
};
|
||||
cmp_eq_body = quote_spanned! {span=>
|
||||
::fayalite::expr::ToExpr::to_expr(&true)
|
||||
};
|
||||
cmp_ne_body = quote_spanned! {span=>
|
||||
::fayalite::expr::ToExpr::to_expr(&false)
|
||||
};
|
||||
} else {
|
||||
sim_value_eq_body = quote_spanned! {span=>
|
||||
#(#fields_sim_value_eq)&&*
|
||||
};
|
||||
cmp_eq_body = quote_spanned! {span=>
|
||||
#(#fields_cmp_eq)&*
|
||||
};
|
||||
cmp_ne_body = quote_spanned! {span=>
|
||||
#(#fields_cmp_ne)|*
|
||||
};
|
||||
};
|
||||
quote_spanned! {span=>
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::expr::ops::ExprPartialEq<Self> for #target #type_generics
|
||||
#expr_where_clause
|
||||
{
|
||||
fn cmp_eq(
|
||||
__lhs: ::fayalite::expr::Expr<Self>,
|
||||
__rhs: ::fayalite::expr::Expr<Self>,
|
||||
) -> ::fayalite::expr::Expr<::fayalite::int::Bool> {
|
||||
#cmp_eq_body
|
||||
}
|
||||
fn cmp_ne(
|
||||
__lhs: ::fayalite::expr::Expr<Self>,
|
||||
__rhs: ::fayalite::expr::Expr<Self>,
|
||||
) -> ::fayalite::expr::Expr<::fayalite::int::Bool> {
|
||||
#cmp_ne_body
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::sim::value::SimValuePartialEq<Self> for #target #type_generics
|
||||
#sim_value_where_clause
|
||||
{
|
||||
fn sim_value_eq(
|
||||
__lhs: &::fayalite::sim::value::SimValue<Self>,
|
||||
__rhs: &::fayalite::sim::value::SimValue<Self>,
|
||||
) -> bool {
|
||||
#sim_value_eq_body
|
||||
}
|
||||
}
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
}
|
||||
if let (None, MaybeParsed::Parsed(generics)) = (no_static, &self.generics) {
|
||||
let static_generics = generics.clone().for_static_type();
|
||||
let (static_impl_generics, static_type_generics, static_where_clause) =
|
||||
|
|
@ -1132,14 +800,6 @@ impl ToTokens for ParsedBundle {
|
|||
}
|
||||
}));
|
||||
quote_spanned! {span=>
|
||||
#[automatically_derived]
|
||||
impl #static_impl_generics ::fayalite::__std::default::Default for #mask_type_ident #static_type_generics
|
||||
#static_where_clause
|
||||
{
|
||||
fn default() -> Self {
|
||||
<Self as ::fayalite::ty::StaticType>::TYPE
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #static_impl_generics ::fayalite::ty::StaticType for #mask_type_ident #static_type_generics
|
||||
#static_where_clause
|
||||
|
|
@ -1162,15 +822,6 @@ impl ToTokens for ParsedBundle {
|
|||
};
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #static_impl_generics ::fayalite::__std::default::Default
|
||||
for #target #static_type_generics
|
||||
#static_where_clause
|
||||
{
|
||||
fn default() -> Self {
|
||||
<Self as ::fayalite::ty::StaticType>::TYPE
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #static_impl_generics ::fayalite::ty::StaticType for #target #static_type_generics
|
||||
#static_where_clause
|
||||
{
|
||||
|
|
|
|||
|
|
@ -1,20 +1,20 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
Errors, HdlAttr, PairsIterExt,
|
||||
hdl_type_common::{
|
||||
ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedGenerics, ParsedType, SplitForImpl,
|
||||
TypesParser, WrappedInConst, common_derives, get_target,
|
||||
common_derives, get_target, ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedGenerics,
|
||||
ParsedType, SplitForImpl, TypesParser, WrappedInConst,
|
||||
},
|
||||
kw,
|
||||
kw, Errors, HdlAttr, PairsIterExt,
|
||||
};
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{ToTokens, format_ident, quote_spanned};
|
||||
use quote::{format_ident, quote_spanned, ToTokens};
|
||||
use syn::{
|
||||
Attribute, Field, FieldMutability, Fields, FieldsNamed, FieldsUnnamed, Generics, Ident,
|
||||
ItemEnum, ItemStruct, Token, Type, Variant, Visibility, parse_quote_spanned,
|
||||
parse_quote_spanned,
|
||||
punctuated::{Pair, Punctuated},
|
||||
token::{Brace, Paren},
|
||||
Attribute, Field, FieldMutability, Fields, FieldsNamed, FieldsUnnamed, Generics, Ident,
|
||||
ItemEnum, ItemStruct, Token, Type, Variant, Visibility,
|
||||
};
|
||||
|
||||
crate::options! {
|
||||
|
|
@ -129,9 +129,6 @@ pub(crate) struct ParsedEnum {
|
|||
pub(crate) brace_token: Brace,
|
||||
pub(crate) variants: Punctuated<ParsedVariant, Token![,]>,
|
||||
pub(crate) match_variant_ident: Ident,
|
||||
pub(crate) sim_value_ident: Ident,
|
||||
pub(crate) sim_builder_ident: Ident,
|
||||
pub(crate) sim_builder_ty_field_ident: Ident,
|
||||
}
|
||||
|
||||
impl ParsedEnum {
|
||||
|
|
@ -158,15 +155,7 @@ impl ParsedEnum {
|
|||
custom_bounds,
|
||||
no_static: _,
|
||||
no_runtime_generics: _,
|
||||
cmp_eq,
|
||||
ref get,
|
||||
} = options.body;
|
||||
if let Some((cmp_eq,)) = cmp_eq {
|
||||
errors.error(cmp_eq, "#[hdl(cmp_eq)] is not yet implemented for enums");
|
||||
}
|
||||
if let Some((get, ..)) = get {
|
||||
errors.error(get, "#[hdl(get(...))] is not allowed on enums");
|
||||
}
|
||||
attrs.retain(|attr| {
|
||||
if attr.path().is_ident("repr") {
|
||||
errors.error(attr, "#[repr] is not supported on #[hdl] enums");
|
||||
|
|
@ -197,9 +186,6 @@ impl ParsedEnum {
|
|||
brace_token,
|
||||
variants,
|
||||
match_variant_ident: format_ident!("__{}__MatchVariant", ident),
|
||||
sim_value_ident: format_ident!("__{}__SimValue", ident),
|
||||
sim_builder_ident: format_ident!("__{}__SimBuilder", ident),
|
||||
sim_builder_ty_field_ident: format_ident!("__ty", span = ident.span()),
|
||||
ident,
|
||||
})
|
||||
}
|
||||
|
|
@ -217,9 +203,6 @@ impl ToTokens for ParsedEnum {
|
|||
brace_token,
|
||||
variants,
|
||||
match_variant_ident,
|
||||
sim_value_ident,
|
||||
sim_builder_ident,
|
||||
sim_builder_ty_field_ident,
|
||||
} = self;
|
||||
let span = ident.span();
|
||||
let ItemOptions {
|
||||
|
|
@ -228,8 +211,6 @@ impl ToTokens for ParsedEnum {
|
|||
custom_bounds: _,
|
||||
no_static,
|
||||
no_runtime_generics,
|
||||
cmp_eq: _, // TODO: implement cmp_eq for enums
|
||||
get: _,
|
||||
} = &options.body;
|
||||
let target = get_target(target, ident);
|
||||
let mut struct_attrs = attrs.clone();
|
||||
|
|
@ -423,137 +404,11 @@ impl ToTokens for ParsedEnum {
|
|||
)),
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let mut struct_attrs = attrs.clone();
|
||||
struct_attrs.push(parse_quote_spanned! {span=>
|
||||
#[allow(dead_code, non_camel_case_types)]
|
||||
});
|
||||
ItemStruct {
|
||||
attrs: struct_attrs,
|
||||
vis: vis.clone(),
|
||||
struct_token: Token,
|
||||
ident: sim_builder_ident.clone(),
|
||||
generics: generics.into(),
|
||||
fields: FieldsNamed {
|
||||
brace_token: *brace_token,
|
||||
named: Punctuated::from_iter([Field {
|
||||
attrs: vec![],
|
||||
vis: Visibility::Inherited,
|
||||
mutability: FieldMutability::None,
|
||||
ident: Some(sim_builder_ty_field_ident.clone()),
|
||||
colon_token: Some(Token),
|
||||
ty: parse_quote_spanned! {span=>
|
||||
#target #type_generics
|
||||
},
|
||||
}]),
|
||||
}
|
||||
.into(),
|
||||
semi_token: None,
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let mut enum_attrs = attrs.clone();
|
||||
enum_attrs.push(parse_quote_spanned! {span=>
|
||||
#[::fayalite::__std::prelude::v1::derive(
|
||||
::fayalite::__std::fmt::Debug,
|
||||
::fayalite::__std::clone::Clone,
|
||||
)]
|
||||
});
|
||||
enum_attrs.push(parse_quote_spanned! {span=>
|
||||
#[allow(dead_code, non_camel_case_types)]
|
||||
});
|
||||
let sim_value_has_unknown_variant = !variants.len().is_power_of_two();
|
||||
let sim_value_unknown_variant_name = sim_value_has_unknown_variant.then(|| {
|
||||
let mut name = String::new();
|
||||
let unknown = "Unknown";
|
||||
loop {
|
||||
let orig_len = name.len();
|
||||
name.push_str(unknown);
|
||||
if variants.iter().all(|v| v.ident != name) {
|
||||
break Ident::new(&name, span);
|
||||
}
|
||||
name.truncate(orig_len);
|
||||
name.push('_');
|
||||
}
|
||||
});
|
||||
let sim_value_unknown_variant =
|
||||
sim_value_unknown_variant_name
|
||||
.as_ref()
|
||||
.map(|unknown_variant_name| {
|
||||
Pair::End(parse_quote_spanned! {span=>
|
||||
#unknown_variant_name(::fayalite::enum_::UnknownVariantSimValue)
|
||||
})
|
||||
});
|
||||
ItemEnum {
|
||||
attrs: enum_attrs,
|
||||
vis: vis.clone(),
|
||||
enum_token: *enum_token,
|
||||
ident: sim_value_ident.clone(),
|
||||
generics: generics.into(),
|
||||
brace_token: *brace_token,
|
||||
variants: Punctuated::from_iter(
|
||||
variants
|
||||
.pairs()
|
||||
.map_pair_value_ref(
|
||||
|ParsedVariant {
|
||||
attrs,
|
||||
options: _,
|
||||
ident,
|
||||
field,
|
||||
}| Variant {
|
||||
attrs: attrs.clone(),
|
||||
ident: ident.clone(),
|
||||
fields: match field {
|
||||
Some(ParsedVariantField {
|
||||
paren_token,
|
||||
attrs,
|
||||
options: _,
|
||||
ty,
|
||||
comma_token,
|
||||
}) => Fields::Unnamed(FieldsUnnamed {
|
||||
paren_token: *paren_token,
|
||||
unnamed: Punctuated::from_iter([
|
||||
Pair::new(
|
||||
Field {
|
||||
attrs: attrs.clone(),
|
||||
vis: Visibility::Inherited,
|
||||
mutability: FieldMutability::None,
|
||||
ident: None,
|
||||
colon_token: None,
|
||||
ty: parse_quote_spanned! {span=>
|
||||
::fayalite::sim::value::SimValue<#ty>
|
||||
},
|
||||
},
|
||||
Some(comma_token.unwrap_or(Token))),
|
||||
),
|
||||
Pair::new(
|
||||
Field {
|
||||
attrs: vec![],
|
||||
vis: Visibility::Inherited,
|
||||
mutability: FieldMutability::None,
|
||||
ident: None,
|
||||
colon_token: None,
|
||||
ty: parse_quote_spanned! {span=>
|
||||
::fayalite::enum_::EnumPaddingSimValue
|
||||
},
|
||||
},
|
||||
None,
|
||||
),
|
||||
]),
|
||||
}),
|
||||
None => Fields::Unnamed(parse_quote_spanned! {span=>
|
||||
(::fayalite::enum_::EnumPaddingSimValue)
|
||||
}),
|
||||
},
|
||||
discriminant: None,
|
||||
},
|
||||
)
|
||||
.chain(sim_value_unknown_variant),
|
||||
),
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let self_token = Token;
|
||||
for (index, ParsedVariant { ident, field, .. }) in variants.iter().enumerate() {
|
||||
if let Some(ParsedVariantField { ty, .. }) = field {
|
||||
quote_spanned! {span=>
|
||||
#[automatically_derived]
|
||||
impl #impl_generics #target #type_generics
|
||||
#where_clause
|
||||
{
|
||||
|
|
@ -575,27 +430,10 @@ impl ToTokens for ParsedEnum {
|
|||
)
|
||||
}
|
||||
}
|
||||
impl #impl_generics #sim_builder_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
#[allow(non_snake_case, dead_code)]
|
||||
#vis fn #ident<__V: ::fayalite::sim::value::ToSimValueWithType<#ty>>(
|
||||
#self_token,
|
||||
v: __V,
|
||||
) -> ::fayalite::sim::value::SimValue<#target #type_generics> {
|
||||
let v = ::fayalite::sim::value::ToSimValueWithType::into_sim_value_with_type(
|
||||
v,
|
||||
#self_token.#sim_builder_ty_field_ident.#ident,
|
||||
);
|
||||
::fayalite::sim::value::SimValue::from_value(
|
||||
#self_token.#sim_builder_ty_field_ident,
|
||||
#sim_value_ident::#ident(v, ::fayalite::enum_::EnumPaddingSimValue::new()),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
#[automatically_derived]
|
||||
impl #impl_generics #target #type_generics
|
||||
#where_clause
|
||||
{
|
||||
|
|
@ -610,17 +448,6 @@ impl ToTokens for ParsedEnum {
|
|||
)
|
||||
}
|
||||
}
|
||||
impl #impl_generics #sim_builder_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
#[allow(non_snake_case, dead_code)]
|
||||
#vis fn #ident(#self_token) -> ::fayalite::sim::value::SimValue<#target #type_generics> {
|
||||
::fayalite::sim::value::SimValue::from_value(
|
||||
#self_token.#sim_builder_ty_field_ident,
|
||||
#sim_value_ident::#ident(::fayalite::enum_::EnumPaddingSimValue::new()),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
|
|
@ -702,142 +529,6 @@ impl ToTokens for ParsedEnum {
|
|||
}
|
||||
},
|
||||
));
|
||||
let sim_value_from_opaque_unknown_match_arm = if let Some(sim_value_unknown_variant_name) =
|
||||
&sim_value_unknown_variant_name
|
||||
{
|
||||
quote_spanned! {span=>
|
||||
_ => #sim_value_ident::#sim_value_unknown_variant_name(v.unknown_variant_from_opaque()),
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
_ => ::fayalite::__std::unreachable!(),
|
||||
}
|
||||
};
|
||||
let sim_value_from_opaque_match_arms = Vec::from_iter(
|
||||
variants
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(
|
||||
|(
|
||||
index,
|
||||
ParsedVariant {
|
||||
attrs: _,
|
||||
options: _,
|
||||
ident,
|
||||
field,
|
||||
},
|
||||
)| {
|
||||
if let Some(_) = field {
|
||||
quote_spanned! {span=>
|
||||
#index => {
|
||||
let (field, padding) = v.variant_with_field_from_opaque();
|
||||
#sim_value_ident::#ident(field, padding)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
#index => #sim_value_ident::#ident(
|
||||
v.variant_no_field_from_opaque(),
|
||||
),
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
.chain([sim_value_from_opaque_unknown_match_arm]),
|
||||
);
|
||||
let sim_value_clone_from_opaque_unknown_match_arm =
|
||||
if let Some(sim_value_unknown_variant_name) = &sim_value_unknown_variant_name {
|
||||
quote_spanned! {span=>
|
||||
_ => if let #sim_value_ident::#sim_value_unknown_variant_name(value) = value {
|
||||
v.unknown_variant_clone_from_opaque(value);
|
||||
} else {
|
||||
*value = #sim_value_ident::#sim_value_unknown_variant_name(
|
||||
v.unknown_variant_from_opaque(),
|
||||
);
|
||||
},
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
_ => ::fayalite::__std::unreachable!(),
|
||||
}
|
||||
};
|
||||
let sim_value_clone_from_opaque_match_arms = Vec::from_iter(
|
||||
variants
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(
|
||||
|(
|
||||
index,
|
||||
ParsedVariant {
|
||||
attrs: _,
|
||||
options: _,
|
||||
ident,
|
||||
field,
|
||||
},
|
||||
)| {
|
||||
if let Some(_) = field {
|
||||
quote_spanned! {span=>
|
||||
#index => if let #sim_value_ident::#ident(field, padding) = value {
|
||||
v.variant_with_field_clone_from_opaque(field, padding);
|
||||
} else {
|
||||
let (field, padding) = v.variant_with_field_from_opaque();
|
||||
*value = #sim_value_ident::#ident(field, padding);
|
||||
},
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
#index => if let #sim_value_ident::#ident(padding) = value {
|
||||
v.variant_no_field_clone_from_opaque(padding);
|
||||
} else {
|
||||
*value = #sim_value_ident::#ident(
|
||||
v.variant_no_field_from_opaque(),
|
||||
);
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
.chain([sim_value_clone_from_opaque_unknown_match_arm]),
|
||||
);
|
||||
let sim_value_to_opaque_match_arms = Vec::from_iter(
|
||||
variants
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(
|
||||
|(
|
||||
index,
|
||||
ParsedVariant {
|
||||
attrs: _,
|
||||
options: _,
|
||||
ident,
|
||||
field,
|
||||
},
|
||||
)| {
|
||||
if let Some(_) = field {
|
||||
quote_spanned! {span=>
|
||||
#sim_value_ident::#ident(field, padding) => {
|
||||
v.variant_with_field_to_opaque(#index, field, padding)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
#sim_value_ident::#ident(padding) => {
|
||||
v.variant_no_field_to_opaque(#index, padding)
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
.chain(sim_value_unknown_variant_name.as_ref().map(
|
||||
|sim_value_unknown_variant_name| {
|
||||
quote_spanned! {span=>
|
||||
#sim_value_ident::#sim_value_unknown_variant_name(value) => {
|
||||
v.unknown_variant_to_opaque(value)
|
||||
}
|
||||
}
|
||||
},
|
||||
)),
|
||||
);
|
||||
let variants_len = variants.len();
|
||||
quote_spanned! {span=>
|
||||
#[automatically_derived]
|
||||
|
|
@ -846,7 +537,6 @@ impl ToTokens for ParsedEnum {
|
|||
{
|
||||
type BaseType = ::fayalite::enum_::Enum;
|
||||
type MaskType = ::fayalite::int::Bool;
|
||||
type SimValue = #sim_value_ident #type_generics;
|
||||
type MatchVariant = #match_variant_ident #type_generics;
|
||||
type MatchActiveScope = ::fayalite::module::Scope;
|
||||
type MatchVariantAndInactiveScope = ::fayalite::enum_::EnumMatchVariantAndInactiveScope<Self>;
|
||||
|
|
@ -879,41 +569,11 @@ impl ToTokens for ParsedEnum {
|
|||
fn source_location() -> ::fayalite::source_location::SourceLocation {
|
||||
::fayalite::source_location::SourceLocation::caller()
|
||||
}
|
||||
fn sim_value_from_opaque(
|
||||
&self,
|
||||
opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>,
|
||||
) -> <Self as ::fayalite::ty::Type>::SimValue {
|
||||
let v = ::fayalite::enum_::EnumSimValueFromOpaque::new(*self, opaque);
|
||||
match v.discriminant() {
|
||||
#(#sim_value_from_opaque_match_arms)*
|
||||
}
|
||||
}
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut <Self as ::fayalite::ty::Type>::SimValue,
|
||||
opaque: ::fayalite::ty::OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
let v = ::fayalite::enum_::EnumSimValueFromOpaque::new(*self, opaque);
|
||||
match v.discriminant() {
|
||||
#(#sim_value_clone_from_opaque_match_arms)*
|
||||
}
|
||||
}
|
||||
fn sim_value_to_opaque<'__w>(
|
||||
&self,
|
||||
value: &<Self as ::fayalite::ty::Type>::SimValue,
|
||||
writer: ::fayalite::ty::OpaqueSimValueWriter<'__w>,
|
||||
) -> ::fayalite::ty::OpaqueSimValueWritten<'__w> {
|
||||
let v = ::fayalite::enum_::EnumSimValueToOpaque::new(*self, writer);
|
||||
match value {
|
||||
#(#sim_value_to_opaque_match_arms)*
|
||||
}
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::enum_::EnumType for #target #type_generics
|
||||
#where_clause
|
||||
{
|
||||
type SimBuilder = #sim_builder_ident #type_generics;
|
||||
fn match_activate_scope(
|
||||
v: <Self as ::fayalite::ty::Type>::MatchVariantAndInactiveScope,
|
||||
) -> (<Self as ::fayalite::ty::Type>::MatchVariant, <Self as ::fayalite::ty::Type>::MatchActiveScope) {
|
||||
|
|
@ -932,33 +592,6 @@ impl ToTokens for ParsedEnum {
|
|||
][..])
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::sim::value::ToSimValueWithType<#target #type_generics>
|
||||
for #sim_value_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
fn to_sim_value_with_type(
|
||||
&self,
|
||||
ty: #target #type_generics,
|
||||
) -> ::fayalite::sim::value::SimValue<#target #type_generics> {
|
||||
::fayalite::sim::value::SimValue::from_value(ty, ::fayalite::__std::clone::Clone::clone(self))
|
||||
}
|
||||
fn into_sim_value_with_type(
|
||||
self,
|
||||
ty: #target #type_generics,
|
||||
) -> ::fayalite::sim::value::SimValue<#target #type_generics> {
|
||||
::fayalite::sim::value::SimValue::from_value(ty, self)
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::__std::convert::From<#target #type_generics>
|
||||
for #sim_builder_ident #type_generics
|
||||
#where_clause
|
||||
{
|
||||
fn from(#sim_builder_ty_field_ident: #target #type_generics) -> Self {
|
||||
Self { #sim_builder_ty_field_ident }
|
||||
}
|
||||
}
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
if let (None, MaybeParsed::Parsed(generics)) = (no_static, &self.generics) {
|
||||
|
|
@ -996,15 +629,6 @@ impl ToTokens for ParsedEnum {
|
|||
}
|
||||
}));
|
||||
quote_spanned! {span=>
|
||||
#[automatically_derived]
|
||||
impl #static_impl_generics ::fayalite::__std::default::Default
|
||||
for #target #static_type_generics
|
||||
#static_where_clause
|
||||
{
|
||||
fn default() -> Self {
|
||||
<Self as ::fayalite::ty::StaticType>::TYPE
|
||||
}
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #static_impl_generics ::fayalite::ty::StaticType
|
||||
for #target #static_type_generics
|
||||
|
|
@ -1023,34 +647,6 @@ impl ToTokens for ParsedEnum {
|
|||
const MASK_TYPE_PROPERTIES: ::fayalite::ty::TypeProperties =
|
||||
<::fayalite::int::Bool as ::fayalite::ty::StaticType>::TYPE_PROPERTIES;
|
||||
}
|
||||
#[automatically_derived]
|
||||
impl #static_impl_generics ::fayalite::sim::value::ToSimValue
|
||||
for #sim_value_ident #static_type_generics
|
||||
#static_where_clause
|
||||
{
|
||||
type Type = #target #static_type_generics;
|
||||
|
||||
fn to_sim_value(
|
||||
&self,
|
||||
) -> ::fayalite::sim::value::SimValue<
|
||||
<Self as ::fayalite::sim::value::ToSimValue>::Type,
|
||||
> {
|
||||
::fayalite::sim::value::SimValue::from_value(
|
||||
::fayalite::ty::StaticType::TYPE,
|
||||
::fayalite::__std::clone::Clone::clone(self),
|
||||
)
|
||||
}
|
||||
fn into_sim_value(
|
||||
self,
|
||||
) -> ::fayalite::sim::value::SimValue<
|
||||
<Self as ::fayalite::sim::value::ToSimValue>::Type,
|
||||
> {
|
||||
::fayalite::sim::value::SimValue::from_value(
|
||||
::fayalite::ty::StaticType::TYPE,
|
||||
self,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,266 +1,30 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
Errors, HdlAttr,
|
||||
hdl_type_common::{
|
||||
ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedGenerics, ParsedType,
|
||||
PhantomConstGetBound, TypesParser, WrappedInConst, common_derives, get_target, known_items,
|
||||
get_target, ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedGenerics, ParsedType,
|
||||
TypesParser,
|
||||
},
|
||||
kw,
|
||||
kw, Errors, HdlAttr,
|
||||
};
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{ToTokens, format_ident, quote_spanned};
|
||||
use syn::{
|
||||
Attribute, Expr, Fields, GenericParam, Generics, Ident, ItemStruct, ItemType, Token, Type,
|
||||
TypeGroup, TypeParam, TypeParen, Visibility, parse_quote_spanned, punctuated::Pair,
|
||||
token::Paren,
|
||||
};
|
||||
use quote::ToTokens;
|
||||
use syn::{parse_quote_spanned, Attribute, Generics, Ident, ItemType, Token, Type, Visibility};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct PhantomConstAccessorTypeParam {
|
||||
attrs: Vec<Attribute>,
|
||||
ident: Ident,
|
||||
colon_token: Token![:],
|
||||
phantom_const_get_bound: PhantomConstGetBound,
|
||||
plus_token: Option<Token![+]>,
|
||||
}
|
||||
|
||||
impl From<PhantomConstAccessorTypeParam> for TypeParam {
|
||||
fn from(value: PhantomConstAccessorTypeParam) -> Self {
|
||||
let PhantomConstAccessorTypeParam {
|
||||
attrs,
|
||||
ident,
|
||||
colon_token,
|
||||
phantom_const_get_bound,
|
||||
plus_token,
|
||||
} = value;
|
||||
TypeParam {
|
||||
attrs,
|
||||
ident,
|
||||
colon_token: Some(colon_token),
|
||||
bounds: FromIterator::from_iter([Pair::new(
|
||||
phantom_const_get_bound.into(),
|
||||
plus_token,
|
||||
)]),
|
||||
eq_token: None,
|
||||
default: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PhantomConstAccessorTypeParam> for GenericParam {
|
||||
fn from(value: PhantomConstAccessorTypeParam) -> Self {
|
||||
TypeParam::from(value).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl PhantomConstAccessorTypeParam {
|
||||
fn parse_opt(generic_param: GenericParam) -> Option<Self> {
|
||||
let GenericParam::Type(TypeParam {
|
||||
attrs,
|
||||
ident,
|
||||
colon_token,
|
||||
bounds,
|
||||
eq_token: None,
|
||||
default: None,
|
||||
}) = generic_param
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
let colon_token = colon_token.unwrap_or(Token));
|
||||
let mut bounds = bounds.into_pairs();
|
||||
let (bound, plus_token) = bounds.next()?.into_tuple();
|
||||
let phantom_const_get_bound = PhantomConstGetBound::parse_type_param_bound(bound)
|
||||
.ok()?
|
||||
.ok()?;
|
||||
let None = bounds.next() else {
|
||||
return None;
|
||||
};
|
||||
Some(Self {
|
||||
attrs,
|
||||
ident,
|
||||
colon_token,
|
||||
phantom_const_get_bound,
|
||||
plus_token,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct PhantomConstAccessorGenerics {
|
||||
lt_token: Token![<],
|
||||
type_param: PhantomConstAccessorTypeParam,
|
||||
comma_token: Option<Token![,]>,
|
||||
gt_token: Token![>],
|
||||
}
|
||||
|
||||
impl From<PhantomConstAccessorGenerics> for Generics {
|
||||
fn from(value: PhantomConstAccessorGenerics) -> Self {
|
||||
let PhantomConstAccessorGenerics {
|
||||
lt_token,
|
||||
type_param,
|
||||
comma_token,
|
||||
gt_token,
|
||||
} = value;
|
||||
Generics {
|
||||
lt_token: Some(lt_token),
|
||||
params: FromIterator::from_iter([Pair::new(type_param.into(), comma_token)]),
|
||||
gt_token: Some(gt_token),
|
||||
where_clause: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a PhantomConstAccessorGenerics> for Generics {
|
||||
fn from(value: &'a PhantomConstAccessorGenerics) -> Self {
|
||||
value.clone().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl PhantomConstAccessorGenerics {
|
||||
fn parse_opt(generics: Generics) -> Option<Self> {
|
||||
let Generics {
|
||||
lt_token,
|
||||
params,
|
||||
gt_token,
|
||||
where_clause: None,
|
||||
} = generics
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
let mut params = params.into_pairs();
|
||||
let (generic_param, comma_token) = params.next()?.into_tuple();
|
||||
let type_param = PhantomConstAccessorTypeParam::parse_opt(generic_param)?;
|
||||
let span = type_param.ident.span();
|
||||
let lt_token = lt_token.unwrap_or(Token);
|
||||
let gt_token = gt_token.unwrap_or(Token);
|
||||
let None = params.next() else {
|
||||
return None;
|
||||
};
|
||||
Some(Self {
|
||||
lt_token,
|
||||
type_param,
|
||||
comma_token,
|
||||
gt_token,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) enum ParsedTypeAlias {
|
||||
TypeAlias {
|
||||
attrs: Vec<Attribute>,
|
||||
options: HdlAttr<ItemOptions, kw::hdl>,
|
||||
vis: Visibility,
|
||||
type_token: Token![type],
|
||||
ident: Ident,
|
||||
generics: MaybeParsed<ParsedGenerics, Generics>,
|
||||
eq_token: Token![=],
|
||||
ty: MaybeParsed<ParsedType, Type>,
|
||||
semi_token: Token![;],
|
||||
},
|
||||
PhantomConstAccessor {
|
||||
attrs: Vec<Attribute>,
|
||||
options: HdlAttr<ItemOptions, kw::hdl>,
|
||||
get: (kw::get, Paren, Expr),
|
||||
vis: Visibility,
|
||||
type_token: Token![type],
|
||||
ident: Ident,
|
||||
generics: PhantomConstAccessorGenerics,
|
||||
eq_token: Token![=],
|
||||
ty: Type,
|
||||
ty_is_dyn_size: Option<known_items::DynSize>,
|
||||
semi_token: Token![;],
|
||||
},
|
||||
pub(crate) struct ParsedTypeAlias {
|
||||
pub(crate) attrs: Vec<Attribute>,
|
||||
pub(crate) options: HdlAttr<ItemOptions, kw::hdl>,
|
||||
pub(crate) vis: Visibility,
|
||||
pub(crate) type_token: Token![type],
|
||||
pub(crate) ident: Ident,
|
||||
pub(crate) generics: MaybeParsed<ParsedGenerics, Generics>,
|
||||
pub(crate) eq_token: Token![=],
|
||||
pub(crate) ty: MaybeParsed<ParsedType, Type>,
|
||||
pub(crate) semi_token: Token![;],
|
||||
}
|
||||
|
||||
impl ParsedTypeAlias {
|
||||
fn ty_is_dyn_size(ty: &Type) -> Option<known_items::DynSize> {
|
||||
match ty {
|
||||
Type::Group(TypeGroup {
|
||||
group_token: _,
|
||||
elem,
|
||||
}) => Self::ty_is_dyn_size(elem),
|
||||
Type::Paren(TypeParen {
|
||||
paren_token: _,
|
||||
elem,
|
||||
}) => Self::ty_is_dyn_size(elem),
|
||||
Type::Path(syn::TypePath { qself: None, path }) => {
|
||||
known_items::DynSize::parse_path(path.clone()).ok()
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
fn parse_phantom_const_accessor(
|
||||
item: ItemType,
|
||||
mut errors: Errors,
|
||||
options: HdlAttr<ItemOptions, kw::hdl>,
|
||||
get: (kw::get, Paren, Expr),
|
||||
) -> syn::Result<Self> {
|
||||
let ItemType {
|
||||
attrs,
|
||||
vis,
|
||||
type_token,
|
||||
ident,
|
||||
generics,
|
||||
eq_token,
|
||||
ty,
|
||||
semi_token,
|
||||
} = item;
|
||||
let ItemOptions {
|
||||
outline_generated: _,
|
||||
ref target,
|
||||
custom_bounds,
|
||||
no_static,
|
||||
no_runtime_generics,
|
||||
cmp_eq,
|
||||
get: _,
|
||||
} = options.body;
|
||||
if let Some((no_static,)) = no_static {
|
||||
errors.error(no_static, "no_static is not valid on type aliases");
|
||||
}
|
||||
if let Some((target, ..)) = target {
|
||||
errors.error(
|
||||
target,
|
||||
"target is not implemented on PhantomConstGet type aliases",
|
||||
);
|
||||
}
|
||||
if let Some((no_runtime_generics,)) = no_runtime_generics {
|
||||
errors.error(
|
||||
no_runtime_generics,
|
||||
"no_runtime_generics is not implemented on PhantomConstGet type aliases",
|
||||
);
|
||||
}
|
||||
if let Some((cmp_eq,)) = cmp_eq {
|
||||
errors.error(cmp_eq, "cmp_eq is not valid on type aliases");
|
||||
}
|
||||
if let Some((custom_bounds,)) = custom_bounds {
|
||||
errors.error(
|
||||
custom_bounds,
|
||||
"custom_bounds is not implemented on PhantomConstGet type aliases",
|
||||
);
|
||||
}
|
||||
let Some(generics) = PhantomConstAccessorGenerics::parse_opt(generics) else {
|
||||
errors.error(ident, "#[hdl(get(...))] type alias must be of the form:\ntype MyTypeGetter<P: PhantomConstGet<MyType>> = RetType;");
|
||||
errors.finish()?;
|
||||
unreachable!();
|
||||
};
|
||||
errors.finish()?;
|
||||
let ty_is_dyn_size = Self::ty_is_dyn_size(&ty);
|
||||
Ok(Self::PhantomConstAccessor {
|
||||
attrs,
|
||||
options,
|
||||
get,
|
||||
vis,
|
||||
type_token,
|
||||
ident,
|
||||
generics,
|
||||
eq_token,
|
||||
ty: *ty,
|
||||
ty_is_dyn_size,
|
||||
semi_token,
|
||||
})
|
||||
}
|
||||
fn parse(item: ItemType) -> syn::Result<Self> {
|
||||
let ItemType {
|
||||
mut attrs,
|
||||
|
|
@ -285,32 +49,10 @@ impl ParsedTypeAlias {
|
|||
custom_bounds,
|
||||
no_static,
|
||||
no_runtime_generics: _,
|
||||
cmp_eq,
|
||||
ref mut get,
|
||||
} = options.body;
|
||||
if let Some(get) = get.take() {
|
||||
return Self::parse_phantom_const_accessor(
|
||||
ItemType {
|
||||
attrs,
|
||||
vis,
|
||||
type_token,
|
||||
ident,
|
||||
generics,
|
||||
eq_token,
|
||||
ty,
|
||||
semi_token,
|
||||
},
|
||||
errors,
|
||||
options,
|
||||
get,
|
||||
);
|
||||
}
|
||||
if let Some((no_static,)) = no_static {
|
||||
errors.error(no_static, "no_static is not valid on type aliases");
|
||||
}
|
||||
if let Some((cmp_eq,)) = cmp_eq {
|
||||
errors.error(cmp_eq, "cmp_eq is not valid on type aliases");
|
||||
}
|
||||
let generics = if custom_bounds.is_some() {
|
||||
MaybeParsed::Unrecognized(generics)
|
||||
} else if let Some(generics) = errors.ok(ParsedGenerics::parse(&mut generics)) {
|
||||
|
|
@ -320,7 +62,7 @@ impl ParsedTypeAlias {
|
|||
};
|
||||
let ty = TypesParser::maybe_run(generics.as_ref(), *ty, &mut errors);
|
||||
errors.finish()?;
|
||||
Ok(Self::TypeAlias {
|
||||
Ok(Self {
|
||||
attrs,
|
||||
options,
|
||||
vis,
|
||||
|
|
@ -336,155 +78,53 @@ impl ParsedTypeAlias {
|
|||
|
||||
impl ToTokens for ParsedTypeAlias {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
Self::TypeAlias {
|
||||
attrs,
|
||||
options,
|
||||
vis,
|
||||
type_token,
|
||||
ident,
|
||||
generics,
|
||||
eq_token,
|
||||
ty,
|
||||
semi_token,
|
||||
} => {
|
||||
let ItemOptions {
|
||||
outline_generated: _,
|
||||
target,
|
||||
custom_bounds: _,
|
||||
no_static: _,
|
||||
no_runtime_generics,
|
||||
cmp_eq: _,
|
||||
get: _,
|
||||
} = &options.body;
|
||||
let target = get_target(target, ident);
|
||||
let mut type_attrs = attrs.clone();
|
||||
type_attrs.push(parse_quote_spanned! {ident.span()=>
|
||||
#[allow(type_alias_bounds)]
|
||||
});
|
||||
ItemType {
|
||||
attrs: type_attrs,
|
||||
vis: vis.clone(),
|
||||
type_token: *type_token,
|
||||
ident: ident.clone(),
|
||||
generics: generics.into(),
|
||||
eq_token: *eq_token,
|
||||
ty: Box::new(ty.clone().into()),
|
||||
semi_token: *semi_token,
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
if let (MaybeParsed::Parsed(generics), MaybeParsed::Parsed(ty), None) =
|
||||
(generics, ty, no_runtime_generics)
|
||||
{
|
||||
generics.make_runtime_generics(tokens, vis, ident, &target, |context| {
|
||||
ty.make_hdl_type_expr(context)
|
||||
})
|
||||
}
|
||||
}
|
||||
Self::PhantomConstAccessor {
|
||||
attrs,
|
||||
options,
|
||||
get: (_get_kw, _get_paren, get_expr),
|
||||
vis,
|
||||
type_token,
|
||||
ident,
|
||||
generics,
|
||||
eq_token,
|
||||
ty,
|
||||
ty_is_dyn_size,
|
||||
semi_token,
|
||||
} => {
|
||||
let ItemOptions {
|
||||
outline_generated: _,
|
||||
target: _,
|
||||
custom_bounds: _,
|
||||
no_static: _,
|
||||
no_runtime_generics: _,
|
||||
cmp_eq: _,
|
||||
get: _,
|
||||
} = &options.body;
|
||||
let span = ident.span();
|
||||
let mut type_attrs = attrs.clone();
|
||||
type_attrs.push(parse_quote_spanned! {span=>
|
||||
#[allow(type_alias_bounds)]
|
||||
});
|
||||
let type_param_ident = &generics.type_param.ident;
|
||||
let syn_generics = Generics::from(generics);
|
||||
ItemType {
|
||||
attrs: type_attrs,
|
||||
vis: vis.clone(),
|
||||
type_token: *type_token,
|
||||
ident: ident.clone(),
|
||||
generics: syn_generics.clone(),
|
||||
eq_token: *eq_token,
|
||||
ty: parse_quote_spanned! {span=>
|
||||
<#ty as ::fayalite::phantom_const::ReturnSelfUnchanged<#type_param_ident>>::Type
|
||||
},
|
||||
semi_token: *semi_token,
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let generics_accumulation_ident =
|
||||
format_ident!("__{}__GenericsAccumulation", ident);
|
||||
ItemStruct {
|
||||
attrs: vec![
|
||||
common_derives(span),
|
||||
parse_quote_spanned! {span=>
|
||||
#[allow(non_camel_case_types)]
|
||||
},
|
||||
],
|
||||
vis: vis.clone(),
|
||||
struct_token: Token,
|
||||
ident: generics_accumulation_ident.clone(),
|
||||
generics: Generics::default(),
|
||||
fields: Fields::Unnamed(parse_quote_spanned! {span=>
|
||||
(())
|
||||
}),
|
||||
semi_token: Some(Token),
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
quote_spanned! {span=>
|
||||
#[allow(non_upper_case_globals, dead_code)]
|
||||
#vis const #ident: #generics_accumulation_ident = #generics_accumulation_ident(());
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
let mut wrapped_in_const = WrappedInConst::new(tokens, span);
|
||||
let tokens = wrapped_in_const.inner();
|
||||
let (impl_generics, _type_generics, where_clause) = syn_generics.split_for_impl();
|
||||
let phantom_const_get_ty = &generics.type_param.phantom_const_get_bound.ty;
|
||||
let index_output = if let Some(ty_is_dyn_size) = ty_is_dyn_size {
|
||||
known_items::usize(ty_is_dyn_size.span).to_token_stream()
|
||||
} else {
|
||||
ty.to_token_stream()
|
||||
};
|
||||
quote_spanned! {span=>
|
||||
#[allow(non_upper_case_globals)]
|
||||
#[automatically_derived]
|
||||
impl #impl_generics ::fayalite::__std::ops::Index<#type_param_ident>
|
||||
for #generics_accumulation_ident
|
||||
#where_clause
|
||||
{
|
||||
type Output = #index_output;
|
||||
|
||||
fn index(&self, __param: #type_param_ident) -> &Self::Output {
|
||||
::fayalite::phantom_const::type_alias_phantom_const_get_helper::<#phantom_const_get_ty, #index_output>(
|
||||
__param,
|
||||
#get_expr,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
}
|
||||
let Self {
|
||||
attrs,
|
||||
options,
|
||||
vis,
|
||||
type_token,
|
||||
ident,
|
||||
generics,
|
||||
eq_token,
|
||||
ty,
|
||||
semi_token,
|
||||
} = self;
|
||||
let ItemOptions {
|
||||
outline_generated: _,
|
||||
target,
|
||||
custom_bounds: _,
|
||||
no_static: _,
|
||||
no_runtime_generics,
|
||||
} = &options.body;
|
||||
let target = get_target(target, ident);
|
||||
let mut type_attrs = attrs.clone();
|
||||
type_attrs.push(parse_quote_spanned! {ident.span()=>
|
||||
#[allow(type_alias_bounds)]
|
||||
});
|
||||
ItemType {
|
||||
attrs: type_attrs,
|
||||
vis: vis.clone(),
|
||||
type_token: *type_token,
|
||||
ident: ident.clone(),
|
||||
generics: generics.into(),
|
||||
eq_token: *eq_token,
|
||||
ty: Box::new(ty.clone().into()),
|
||||
semi_token: *semi_token,
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
if let (MaybeParsed::Parsed(generics), MaybeParsed::Parsed(ty), None) =
|
||||
(generics, ty, no_runtime_generics)
|
||||
{
|
||||
generics.make_runtime_generics(tokens, vis, ident, &target, |context| {
|
||||
ty.make_hdl_type_expr(context)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn hdl_type_alias_impl(item: ItemType) -> syn::Result<TokenStream> {
|
||||
let item = ParsedTypeAlias::parse(item)?;
|
||||
let outline_generated = match &item {
|
||||
ParsedTypeAlias::TypeAlias { options, .. }
|
||||
| ParsedTypeAlias::PhantomConstAccessor { options, .. } => options.body.outline_generated,
|
||||
};
|
||||
let outline_generated = item.options.body.outline_generated;
|
||||
let mut contents = item.to_token_stream();
|
||||
if outline_generated.is_some() {
|
||||
contents = crate::outline_generated(contents, "hdl-type-alias-");
|
||||
|
|
|
|||
|
|
@ -1,21 +1,21 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{Errors, HdlAttr, PairsIterExt, fold::impl_fold, kw};
|
||||
use crate::{fold::impl_fold, kw, Errors, HdlAttr, PairsIterExt};
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{ToTokens, format_ident, quote_spanned};
|
||||
use quote::{format_ident, quote_spanned, ToTokens};
|
||||
use std::{collections::HashMap, fmt, mem};
|
||||
use syn::{
|
||||
AngleBracketedGenericArguments, Attribute, Block, ConstParam, Expr, ExprBlock, ExprGroup,
|
||||
ExprIndex, ExprParen, ExprPath, ExprTuple, Field, FieldMutability, Fields, FieldsNamed,
|
||||
FieldsUnnamed, GenericArgument, GenericParam, Generics, Ident, ImplGenerics, Index, ItemStruct,
|
||||
Path, PathArguments, PathSegment, PredicateType, QSelf, Stmt, Token, TraitBound, Turbofish,
|
||||
Type, TypeGenerics, TypeGroup, TypeParam, TypeParamBound, TypeParen, TypePath, TypeTuple,
|
||||
Visibility, WhereClause, WherePredicate,
|
||||
parse::{Parse, ParseStream},
|
||||
parse_quote, parse_quote_spanned,
|
||||
punctuated::{Pair, Punctuated},
|
||||
spanned::Spanned,
|
||||
token::{Brace, Bracket, Paren},
|
||||
AngleBracketedGenericArguments, Attribute, Block, ConstParam, Expr, ExprBlock, ExprGroup,
|
||||
ExprIndex, ExprParen, ExprPath, ExprTuple, Field, FieldMutability, Fields, FieldsNamed,
|
||||
FieldsUnnamed, GenericArgument, GenericParam, Generics, Ident, ImplGenerics, Index, ItemStruct,
|
||||
Path, PathArguments, PathSegment, PredicateType, QSelf, Stmt, Token, Turbofish, Type,
|
||||
TypeGenerics, TypeGroup, TypeParam, TypeParen, TypePath, TypeTuple, Visibility, WhereClause,
|
||||
WherePredicate,
|
||||
};
|
||||
|
||||
crate::options! {
|
||||
|
|
@ -26,8 +26,6 @@ crate::options! {
|
|||
CustomBounds(custom_bounds),
|
||||
NoStatic(no_static),
|
||||
NoRuntimeGenerics(no_runtime_generics),
|
||||
CmpEq(cmp_eq),
|
||||
Get(get, Expr),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -300,7 +298,7 @@ impl ParseTypes<Expr> for ParsedExpr {
|
|||
return Ok(ParsedExpr::Delimited(ParsedExprDelimited {
|
||||
delim: ExprDelimiter::Group(*group_token),
|
||||
expr: parser.parse(expr)?,
|
||||
}));
|
||||
}))
|
||||
}
|
||||
Expr::Paren(ExprParen {
|
||||
attrs,
|
||||
|
|
@ -310,7 +308,7 @@ impl ParseTypes<Expr> for ParsedExpr {
|
|||
return Ok(ParsedExpr::Delimited(ParsedExprDelimited {
|
||||
delim: ExprDelimiter::Paren(*paren_token),
|
||||
expr: parser.parse(expr)?,
|
||||
}));
|
||||
}))
|
||||
}
|
||||
Expr::Path(ExprPath {
|
||||
attrs,
|
||||
|
|
@ -1903,8 +1901,8 @@ pub(crate) mod known_items {
|
|||
use proc_macro2::{Ident, Span, TokenStream};
|
||||
use quote::ToTokens;
|
||||
use syn::{
|
||||
Path, PathArguments, PathSegment, Token,
|
||||
parse::{Parse, ParseStream},
|
||||
Path, PathArguments, PathSegment, Token,
|
||||
};
|
||||
|
||||
macro_rules! impl_known_item_body {
|
||||
|
|
@ -2046,7 +2044,6 @@ pub(crate) mod known_items {
|
|||
impl_known_item!(::fayalite::int::Size);
|
||||
impl_known_item!(::fayalite::int::UInt);
|
||||
impl_known_item!(::fayalite::int::UIntType);
|
||||
impl_known_item!(::fayalite::phantom_const::PhantomConstGet);
|
||||
impl_known_item!(::fayalite::reset::ResetType);
|
||||
impl_known_item!(::fayalite::ty::CanonicalType);
|
||||
impl_known_item!(::fayalite::ty::StaticType);
|
||||
|
|
@ -2065,174 +2062,6 @@ pub(crate) mod known_items {
|
|||
);
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct PhantomConstGetBound {
|
||||
pub(crate) phantom_const_get: known_items::PhantomConstGet,
|
||||
pub(crate) colon2_token: Option<Token![::]>,
|
||||
pub(crate) lt_token: Token![<],
|
||||
pub(crate) ty: Type,
|
||||
pub(crate) comma_token: Option<Token![,]>,
|
||||
pub(crate) gt_token: Token![>],
|
||||
}
|
||||
|
||||
impl PhantomConstGetBound {
|
||||
pub(crate) fn parse_path_with_arguments(path: Path) -> syn::Result<Result<Self, Path>> {
|
||||
match known_items::PhantomConstGet::parse_path_with_arguments(path) {
|
||||
Ok((phantom_const_get, arguments)) => {
|
||||
Self::parse_path_and_arguments(phantom_const_get, arguments).map(Ok)
|
||||
}
|
||||
Err(path) => Ok(Err(path)),
|
||||
}
|
||||
}
|
||||
pub(crate) fn parse_path_and_arguments(
|
||||
phantom_const_get: known_items::PhantomConstGet,
|
||||
arguments: PathArguments,
|
||||
) -> syn::Result<Self> {
|
||||
let error = |arguments: PathArguments, message: &str| {
|
||||
let mut path = phantom_const_get.path.clone();
|
||||
path.segments.last_mut().expect("known to exist").arguments = arguments;
|
||||
syn::Error::new_spanned(path, message)
|
||||
};
|
||||
match arguments {
|
||||
PathArguments::None => Err(error(arguments, "missing generics for PhantomConstGet")),
|
||||
PathArguments::AngleBracketed(AngleBracketedGenericArguments {
|
||||
colon2_token,
|
||||
lt_token,
|
||||
args,
|
||||
gt_token,
|
||||
}) => {
|
||||
let error = |args: Punctuated<GenericArgument, Token![,]>, message| {
|
||||
error(
|
||||
PathArguments::AngleBracketed(AngleBracketedGenericArguments {
|
||||
colon2_token,
|
||||
lt_token,
|
||||
args,
|
||||
gt_token,
|
||||
}),
|
||||
message,
|
||||
)
|
||||
};
|
||||
let mut args = args.into_pairs().peekable();
|
||||
let Some((generic_argument, comma_token)) = args.next().map(Pair::into_tuple)
|
||||
else {
|
||||
return Err(error(
|
||||
Default::default(),
|
||||
"PhantomConstGet takes a type argument but no generic arguments were supplied",
|
||||
));
|
||||
};
|
||||
if args.peek().is_some() {
|
||||
return Err(error(
|
||||
[Pair::new(generic_argument, comma_token)]
|
||||
.into_iter()
|
||||
.chain(args)
|
||||
.collect(),
|
||||
"PhantomConstGet takes a single type argument but too many generic arguments were supplied",
|
||||
));
|
||||
};
|
||||
let GenericArgument::Type(ty) = generic_argument else {
|
||||
return Err(error(
|
||||
Punctuated::from_iter([Pair::new(generic_argument, comma_token)]),
|
||||
"PhantomConstGet requires a type argument",
|
||||
));
|
||||
};
|
||||
Ok(Self {
|
||||
phantom_const_get,
|
||||
colon2_token,
|
||||
lt_token,
|
||||
ty,
|
||||
comma_token,
|
||||
gt_token,
|
||||
})
|
||||
}
|
||||
PathArguments::Parenthesized(_) => Err(error(
|
||||
arguments,
|
||||
"parenthetical generics are not valid for PhantomConstGet",
|
||||
)),
|
||||
}
|
||||
}
|
||||
pub(crate) fn parse_type_param_bound(
|
||||
bound: TypeParamBound,
|
||||
) -> syn::Result<Result<Self, TypeParamBound>> {
|
||||
let TypeParamBound::Trait(TraitBound {
|
||||
paren_token: None,
|
||||
modifier: syn::TraitBoundModifier::None,
|
||||
lifetimes: None,
|
||||
path,
|
||||
}) = bound
|
||||
else {
|
||||
return Ok(Err(bound));
|
||||
};
|
||||
Ok(match Self::parse_path_with_arguments(path)? {
|
||||
Ok(v) => Ok(v),
|
||||
Err(path) => Err(TypeParamBound::Trait(TraitBound {
|
||||
paren_token: None,
|
||||
modifier: syn::TraitBoundModifier::None,
|
||||
lifetimes: None,
|
||||
path,
|
||||
})),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for PhantomConstGetBound {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
phantom_const_get,
|
||||
colon2_token,
|
||||
lt_token,
|
||||
ty,
|
||||
comma_token,
|
||||
gt_token,
|
||||
} = self;
|
||||
phantom_const_get.to_tokens(tokens);
|
||||
colon2_token.to_tokens(tokens);
|
||||
lt_token.to_tokens(tokens);
|
||||
ty.to_tokens(tokens);
|
||||
comma_token.to_tokens(tokens);
|
||||
gt_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PhantomConstGetBound> for Path {
|
||||
fn from(value: PhantomConstGetBound) -> Self {
|
||||
let PhantomConstGetBound {
|
||||
phantom_const_get,
|
||||
colon2_token,
|
||||
lt_token,
|
||||
ty,
|
||||
comma_token,
|
||||
gt_token,
|
||||
} = value;
|
||||
let mut path = phantom_const_get.path;
|
||||
path.segments.last_mut().expect("known to exist").arguments =
|
||||
PathArguments::AngleBracketed(AngleBracketedGenericArguments {
|
||||
colon2_token,
|
||||
lt_token,
|
||||
args: FromIterator::from_iter([Pair::new(GenericArgument::Type(ty), comma_token)]),
|
||||
gt_token,
|
||||
});
|
||||
path
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PhantomConstGetBound> for TraitBound {
|
||||
fn from(value: PhantomConstGetBound) -> Self {
|
||||
let path = Path::from(value);
|
||||
TraitBound {
|
||||
paren_token: None,
|
||||
modifier: syn::TraitBoundModifier::None,
|
||||
lifetimes: None,
|
||||
path,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PhantomConstGetBound> for TypeParamBound {
|
||||
fn from(value: PhantomConstGetBound) -> Self {
|
||||
TraitBound::from(value).into()
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! impl_bounds {
|
||||
(
|
||||
#[struct = $struct_type:ident]
|
||||
|
|
@ -2240,21 +2069,11 @@ macro_rules! impl_bounds {
|
|||
$(
|
||||
$Variant:ident,
|
||||
)*
|
||||
$(
|
||||
#[has_body]
|
||||
$VariantHasBody:ident($variant_has_body_ty:ty),
|
||||
)*
|
||||
$(
|
||||
#[unknown]
|
||||
$Unknown:ident,
|
||||
)?
|
||||
}
|
||||
) => {
|
||||
#[derive(Clone, Debug)]
|
||||
$vis enum $enum_type {
|
||||
$($Variant(known_items::$Variant),)*
|
||||
$($VariantHasBody($variant_has_body_ty),)*
|
||||
$($Unknown(syn::TypeParamBound),)?
|
||||
}
|
||||
|
||||
$(impl From<known_items::$Variant> for $enum_type {
|
||||
|
|
@ -2263,69 +2082,32 @@ macro_rules! impl_bounds {
|
|||
}
|
||||
})*
|
||||
|
||||
$(impl From<$variant_has_body_ty> for $enum_type {
|
||||
fn from(v: $variant_has_body_ty) -> Self {
|
||||
Self::$VariantHasBody(v)
|
||||
}
|
||||
})*
|
||||
|
||||
impl ToTokens for $enum_type {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
$(Self::$Variant(v) => v.to_tokens(tokens),)*
|
||||
$(Self::$VariantHasBody(v) => v.to_tokens(tokens),)*
|
||||
$(Self::$Unknown(v) => v.to_tokens(tokens),)?
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl $enum_type {
|
||||
$vis fn parse_path_with_arguments(path: Path) -> syn::Result<Result<Self, Path>> {
|
||||
#![allow(unreachable_code)]
|
||||
$vis fn parse_path(path: Path) -> Result<Self, Path> {
|
||||
$(let path = match known_items::$Variant::parse_path(path) {
|
||||
Ok(v) => return Ok(Ok(Self::$Variant(v))),
|
||||
Ok(v) => return Ok(Self::$Variant(v)),
|
||||
Err(path) => path,
|
||||
};)*
|
||||
$(let path = match <$variant_has_body_ty>::parse_path_with_arguments(path)? {
|
||||
Ok(v) => return Ok(Ok(Self::$VariantHasBody(v))),
|
||||
Err(path) => path,
|
||||
};)*
|
||||
$(return Ok(Ok(Self::$Unknown(syn::TraitBound {
|
||||
paren_token: None,
|
||||
modifier: syn::TraitBoundModifier::None,
|
||||
lifetimes: None,
|
||||
path,
|
||||
}.into())));)?
|
||||
Ok(Err(path))
|
||||
}
|
||||
$vis fn parse_type_param_bound(mut type_param_bound: syn::TypeParamBound) -> syn::Result<Result<Self, syn::TypeParamBound>> {
|
||||
#![allow(unreachable_code)]
|
||||
if let syn::TypeParamBound::Trait(mut trait_bound) = type_param_bound {
|
||||
if let syn::TraitBound {
|
||||
paren_token: _,
|
||||
modifier: syn::TraitBoundModifier::None,
|
||||
lifetimes: None,
|
||||
path: _,
|
||||
} = trait_bound {
|
||||
match Self::parse_path_with_arguments(trait_bound.path)? {
|
||||
Ok(retval) => return Ok(Ok(retval)),
|
||||
Err(path) => trait_bound.path = path,
|
||||
}
|
||||
}
|
||||
type_param_bound = trait_bound.into();
|
||||
}
|
||||
$(return Ok(Ok(Self::$Unknown(type_param_bound)));)?
|
||||
Ok(Err(type_param_bound))
|
||||
Err(path)
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for $enum_type {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
Self::parse_type_param_bound(input.parse()?)?
|
||||
.map_err(|type_param_bound| syn::Error::new_spanned(
|
||||
type_param_bound,
|
||||
format_args!("expected one of: {}", [$(stringify!($Variant),)* $(stringify!($VariantHasBody)),*].join(", ")),
|
||||
))
|
||||
Self::parse_path(Path::parse_mod_style(input)?).map_err(|path| {
|
||||
syn::Error::new_spanned(
|
||||
path,
|
||||
format_args!("expected one of: {}", [$(stringify!($Variant)),*].join(", ")),
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -2333,8 +2115,6 @@ macro_rules! impl_bounds {
|
|||
#[allow(non_snake_case)]
|
||||
$vis struct $struct_type {
|
||||
$($vis $Variant: Option<known_items::$Variant>,)*
|
||||
$($vis $VariantHasBody: Option<$variant_has_body_ty>,)*
|
||||
$($vis $Unknown: Vec<syn::TypeParamBound>,)?
|
||||
}
|
||||
|
||||
impl ToTokens for $struct_type {
|
||||
|
|
@ -2346,80 +2126,42 @@ macro_rules! impl_bounds {
|
|||
separator = Some(<Token![+]>::default());
|
||||
v.to_tokens(tokens);
|
||||
})*
|
||||
$(if let Some(v) = &self.$VariantHasBody {
|
||||
separator.to_tokens(tokens);
|
||||
separator = Some(<Token![+]>::default());
|
||||
v.to_tokens(tokens);
|
||||
})*
|
||||
$(for v in &self.$Unknown {
|
||||
separator.to_tokens(tokens);
|
||||
separator = Some(<Token![+]>::default());
|
||||
v.to_tokens(tokens);
|
||||
})*
|
||||
}
|
||||
}
|
||||
|
||||
const _: () = {
|
||||
#[derive(Clone, Debug)]
|
||||
#[allow(non_snake_case)]
|
||||
$vis struct Iter {
|
||||
$($Variant: Option<known_items::$Variant>,)*
|
||||
$($VariantHasBody: Option<$variant_has_body_ty>,)*
|
||||
$($Unknown: std::vec::IntoIter<syn::TypeParamBound>,)?
|
||||
}
|
||||
$vis struct Iter($vis $struct_type);
|
||||
|
||||
impl IntoIterator for $struct_type {
|
||||
type Item = $enum_type;
|
||||
type IntoIter = Iter;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
Iter {
|
||||
$($Variant: self.$Variant,)*
|
||||
$($VariantHasBody: self.$VariantHasBody,)*
|
||||
$($Unknown: self.$Unknown.into_iter(),)?
|
||||
}
|
||||
Iter(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for Iter {
|
||||
type Item = $enum_type;
|
||||
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
$(
|
||||
if let Some(value) = self.$Variant.take() {
|
||||
if let Some(value) = self.0.$Variant.take() {
|
||||
return Some($enum_type::$Variant(value));
|
||||
}
|
||||
)*
|
||||
$(
|
||||
if let Some(value) = self.$VariantHasBody.take() {
|
||||
return Some($enum_type::$VariantHasBody(value));
|
||||
}
|
||||
)*
|
||||
$(
|
||||
if let Some(value) = self.$Unknown.next() {
|
||||
return Some($enum_type::$Unknown(value));
|
||||
}
|
||||
)?
|
||||
None
|
||||
}
|
||||
|
||||
#[allow(unused_mut, unused_variables)]
|
||||
fn fold<B, F: FnMut(B, Self::Item) -> B>(mut self, mut init: B, mut f: F) -> B {
|
||||
$(
|
||||
if let Some(value) = self.$Variant.take() {
|
||||
if let Some(value) = self.0.$Variant.take() {
|
||||
init = f(init, $enum_type::$Variant(value));
|
||||
}
|
||||
)*
|
||||
$(
|
||||
if let Some(value) = self.$VariantHasBody.take() {
|
||||
init = f(init, $enum_type::$VariantHasBody(value));
|
||||
}
|
||||
)*
|
||||
$(
|
||||
if let Some(value) = self.$Unknown.next() {
|
||||
init = f(init, $enum_type::$Unknown(value));
|
||||
}
|
||||
)?
|
||||
init
|
||||
}
|
||||
}
|
||||
|
|
@ -2431,12 +2173,6 @@ macro_rules! impl_bounds {
|
|||
$($enum_type::$Variant(v) => {
|
||||
self.$Variant = Some(v);
|
||||
})*
|
||||
$($enum_type::$VariantHasBody(v) => {
|
||||
self.$VariantHasBody = Some(v);
|
||||
})*
|
||||
$($enum_type::$Unknown(v) => {
|
||||
self.$Unknown.push(v);
|
||||
})?
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
@ -2455,10 +2191,6 @@ macro_rules! impl_bounds {
|
|||
$(if let Some(v) = v.$Variant {
|
||||
self.$Variant = Some(v);
|
||||
})*
|
||||
$(if let Some(v) = v.$VariantHasBody {
|
||||
self.$VariantHasBody = Some(v);
|
||||
})*
|
||||
$(self.$Unknown.extend(v.$Unknown);)*
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
@ -2512,10 +2244,6 @@ impl_bounds! {
|
|||
Size,
|
||||
StaticType,
|
||||
Type,
|
||||
#[has_body]
|
||||
PhantomConstGet(PhantomConstGetBound),
|
||||
#[unknown]
|
||||
Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -2529,10 +2257,6 @@ impl_bounds! {
|
|||
ResetType,
|
||||
StaticType,
|
||||
Type,
|
||||
#[has_body]
|
||||
PhantomConstGet(PhantomConstGetBound),
|
||||
#[unknown]
|
||||
Unknown,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -2546,8 +2270,6 @@ impl From<ParsedTypeBound> for ParsedBound {
|
|||
ParsedTypeBound::ResetType(v) => ParsedBound::ResetType(v),
|
||||
ParsedTypeBound::StaticType(v) => ParsedBound::StaticType(v),
|
||||
ParsedTypeBound::Type(v) => ParsedBound::Type(v),
|
||||
ParsedTypeBound::PhantomConstGet(v) => ParsedBound::PhantomConstGet(v),
|
||||
ParsedTypeBound::Unknown(v) => ParsedBound::Unknown(v),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -2562,8 +2284,6 @@ impl From<ParsedTypeBounds> for ParsedBounds {
|
|||
ResetType,
|
||||
StaticType,
|
||||
Type,
|
||||
PhantomConstGet,
|
||||
Unknown,
|
||||
} = value;
|
||||
Self {
|
||||
BoolOrIntType,
|
||||
|
|
@ -2575,8 +2295,6 @@ impl From<ParsedTypeBounds> for ParsedBounds {
|
|||
Size: None,
|
||||
StaticType,
|
||||
Type,
|
||||
PhantomConstGet,
|
||||
Unknown,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -2612,11 +2330,6 @@ impl ParsedTypeBound {
|
|||
ParsedTypeBound::Type(known_items::Type(span)),
|
||||
]),
|
||||
Self::Type(v) => ParsedTypeBounds::from_iter([ParsedTypeBound::from(v)]),
|
||||
Self::PhantomConstGet(v) => ParsedTypeBounds::from_iter([
|
||||
ParsedTypeBound::from(v),
|
||||
ParsedTypeBound::Type(known_items::Type(span)),
|
||||
]),
|
||||
Self::Unknown(v) => ParsedTypeBounds::from_iter([ParsedTypeBound::Unknown(v)]),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -2651,8 +2364,6 @@ impl From<ParsedSizeTypeBounds> for ParsedBounds {
|
|||
Size,
|
||||
StaticType: None,
|
||||
Type: None,
|
||||
PhantomConstGet: None,
|
||||
Unknown: vec![],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -2680,7 +2391,6 @@ impl ParsedBounds {
|
|||
fn categorize(self, errors: &mut Errors, span: Span) -> ParsedBoundsCategory {
|
||||
let mut type_bounds = None;
|
||||
let mut size_type_bounds = None;
|
||||
let mut unknown_bounds = vec![];
|
||||
self.into_iter().for_each(|bound| match bound.categorize() {
|
||||
ParsedBoundCategory::Type(bound) => {
|
||||
type_bounds
|
||||
|
|
@ -2692,37 +2402,15 @@ impl ParsedBounds {
|
|||
.get_or_insert_with(ParsedSizeTypeBounds::default)
|
||||
.extend([bound]);
|
||||
}
|
||||
ParsedBoundCategory::Unknown(bound) => unknown_bounds.push(bound),
|
||||
});
|
||||
match (type_bounds, size_type_bounds, unknown_bounds.is_empty()) {
|
||||
(None, None, true) => ParsedBoundsCategory::Type(ParsedTypeBounds {
|
||||
match (type_bounds, size_type_bounds) {
|
||||
(None, None) => ParsedBoundsCategory::Type(ParsedTypeBounds {
|
||||
Type: Some(known_items::Type(span)),
|
||||
..Default::default()
|
||||
}),
|
||||
(None, None, false) => {
|
||||
errors.error(
|
||||
unknown_bounds.remove(0),
|
||||
"unknown bounds: must use at least one known bound (such as `Type`) with any unknown bounds",
|
||||
);
|
||||
ParsedBoundsCategory::Type(ParsedTypeBounds {
|
||||
Unknown: unknown_bounds,
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
(None, Some(bounds), true) => ParsedBoundsCategory::SizeType(bounds),
|
||||
(None, Some(bounds), false) => {
|
||||
// TODO: implement
|
||||
errors.error(
|
||||
unknown_bounds.remove(0),
|
||||
"unknown bounds with `Size` bounds are not implemented",
|
||||
);
|
||||
ParsedBoundsCategory::SizeType(bounds)
|
||||
}
|
||||
(Some(bounds), None, _) => ParsedBoundsCategory::Type(ParsedTypeBounds {
|
||||
Unknown: unknown_bounds,
|
||||
..bounds
|
||||
}),
|
||||
(Some(type_bounds), Some(size_type_bounds), _) => {
|
||||
(None, Some(bounds)) => ParsedBoundsCategory::SizeType(bounds),
|
||||
(Some(bounds), None) => ParsedBoundsCategory::Type(bounds),
|
||||
(Some(type_bounds), Some(size_type_bounds)) => {
|
||||
errors.error(
|
||||
size_type_bounds
|
||||
.Size
|
||||
|
|
@ -2739,7 +2427,6 @@ impl ParsedBounds {
|
|||
pub(crate) enum ParsedBoundCategory {
|
||||
Type(ParsedTypeBound),
|
||||
SizeType(ParsedSizeTypeBound),
|
||||
Unknown(syn::TypeParamBound),
|
||||
}
|
||||
|
||||
impl ParsedBound {
|
||||
|
|
@ -2754,17 +2441,12 @@ impl ParsedBound {
|
|||
Self::Size(v) => ParsedBoundCategory::SizeType(ParsedSizeTypeBound::Size(v)),
|
||||
Self::StaticType(v) => ParsedBoundCategory::Type(ParsedTypeBound::StaticType(v)),
|
||||
Self::Type(v) => ParsedBoundCategory::Type(ParsedTypeBound::Type(v)),
|
||||
Self::PhantomConstGet(v) => {
|
||||
ParsedBoundCategory::Type(ParsedTypeBound::PhantomConstGet(v))
|
||||
}
|
||||
Self::Unknown(v) => ParsedBoundCategory::Unknown(v),
|
||||
}
|
||||
}
|
||||
fn implied_bounds(self) -> ParsedBounds {
|
||||
match self.categorize() {
|
||||
ParsedBoundCategory::Type(v) => v.implied_bounds().into(),
|
||||
ParsedBoundCategory::SizeType(v) => v.implied_bounds().into(),
|
||||
ParsedBoundCategory::Unknown(v) => ParsedBounds::from_iter([ParsedBound::Unknown(v)]),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -3642,9 +3324,8 @@ impl ParsedGenerics {
|
|||
| ParsedTypeBound::BundleType(_)
|
||||
| ParsedTypeBound::EnumType(_)
|
||||
| ParsedTypeBound::IntType(_)
|
||||
| ParsedTypeBound::ResetType(_)
|
||||
| ParsedTypeBound::PhantomConstGet(_) => {
|
||||
errors.error(bound, "bounds on mask types are not implemented");
|
||||
| ParsedTypeBound::ResetType(_) => {
|
||||
errors.error(bound, "bound on mask type not implemented");
|
||||
}
|
||||
ParsedTypeBound::StaticType(bound) => {
|
||||
if bounds.StaticType.is_none() {
|
||||
|
|
@ -3656,12 +3337,6 @@ impl ParsedGenerics {
|
|||
}
|
||||
}
|
||||
ParsedTypeBound::Type(_) => {}
|
||||
ParsedTypeBound::Unknown(_) => {
|
||||
errors.error(
|
||||
bound,
|
||||
"unknown bounds on mask types are not implemented",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
bounds.add_implied_bounds();
|
||||
|
|
@ -3987,10 +3662,7 @@ pub(crate) trait AsTurbofish {
|
|||
}
|
||||
|
||||
impl AsTurbofish for TypeGenerics<'_> {
|
||||
type Turbofish<'a>
|
||||
= Turbofish<'a>
|
||||
where
|
||||
Self: 'a;
|
||||
type Turbofish<'a> = Turbofish<'a> where Self: 'a;
|
||||
|
||||
fn as_turbofish(&self) -> Self::Turbofish<'_> {
|
||||
TypeGenerics::as_turbofish(self)
|
||||
|
|
@ -3998,8 +3670,7 @@ impl AsTurbofish for TypeGenerics<'_> {
|
|||
}
|
||||
|
||||
impl AsTurbofish for ParsedGenericsTypeGenerics<'_> {
|
||||
type Turbofish<'a>
|
||||
= ParsedGenericsTurbofish<'a>
|
||||
type Turbofish<'a> = ParsedGenericsTurbofish<'a>
|
||||
where
|
||||
Self: 'a;
|
||||
|
||||
|
|
@ -4050,18 +3721,15 @@ impl SplitForImpl for Generics {
|
|||
}
|
||||
|
||||
impl SplitForImpl for ParsedGenerics {
|
||||
type ImplGenerics<'a>
|
||||
= ParsedGenericsImplGenerics<'a>
|
||||
type ImplGenerics<'a> = ParsedGenericsImplGenerics<'a>
|
||||
where
|
||||
Self: 'a;
|
||||
|
||||
type TypeGenerics<'a>
|
||||
= ParsedGenericsTypeGenerics<'a>
|
||||
type TypeGenerics<'a> = ParsedGenericsTypeGenerics<'a>
|
||||
where
|
||||
Self: 'a;
|
||||
|
||||
type WhereClause<'a>
|
||||
= ParsedGenericsWhereClause<'a>
|
||||
type WhereClause<'a> = ParsedGenericsWhereClause<'a>
|
||||
where
|
||||
Self: 'a;
|
||||
|
||||
|
|
@ -4278,8 +3946,7 @@ impl<P: ToTokens, U: ToTokens> ToTokens for MaybeParsed<P, U> {
|
|||
}
|
||||
|
||||
impl<P: AsTurbofish, U: AsTurbofish> AsTurbofish for MaybeParsed<P, U> {
|
||||
type Turbofish<'a>
|
||||
= MaybeParsed<P::Turbofish<'a>, U::Turbofish<'a>>
|
||||
type Turbofish<'a> = MaybeParsed<P::Turbofish<'a>, U::Turbofish<'a>>
|
||||
where
|
||||
Self: 'a;
|
||||
|
||||
|
|
@ -4292,16 +3959,13 @@ impl<P: AsTurbofish, U: AsTurbofish> AsTurbofish for MaybeParsed<P, U> {
|
|||
}
|
||||
|
||||
impl<P: SplitForImpl, U: SplitForImpl> SplitForImpl for MaybeParsed<P, U> {
|
||||
type ImplGenerics<'a>
|
||||
= MaybeParsed<P::ImplGenerics<'a>, U::ImplGenerics<'a>>
|
||||
type ImplGenerics<'a> = MaybeParsed<P::ImplGenerics<'a>, U::ImplGenerics<'a>>
|
||||
where
|
||||
Self: 'a;
|
||||
type TypeGenerics<'a>
|
||||
= MaybeParsed<P::TypeGenerics<'a>, U::TypeGenerics<'a>>
|
||||
type TypeGenerics<'a> = MaybeParsed<P::TypeGenerics<'a>, U::TypeGenerics<'a>>
|
||||
where
|
||||
Self: 'a;
|
||||
type WhereClause<'a>
|
||||
= MaybeParsed<P::WhereClause<'a>, U::WhereClause<'a>>
|
||||
type WhereClause<'a> = MaybeParsed<P::WhereClause<'a>, U::WhereClause<'a>>
|
||||
where
|
||||
Self: 'a;
|
||||
|
||||
|
|
|
|||
|
|
@ -2,13 +2,13 @@
|
|||
// See Notices.txt for copyright information
|
||||
#![cfg_attr(test, recursion_limit = "512")]
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{ToTokens, quote};
|
||||
use quote::{quote, ToTokens};
|
||||
use std::{
|
||||
collections::{HashMap, hash_map::Entry},
|
||||
collections::{hash_map::Entry, HashMap},
|
||||
io::{ErrorKind, Write},
|
||||
};
|
||||
use syn::{
|
||||
AttrStyle, Attribute, Error, Ident, Item, ItemFn, LitBool, LitStr, Meta, Token, bracketed,
|
||||
bracketed,
|
||||
ext::IdentExt,
|
||||
parenthesized,
|
||||
parse::{Parse, ParseStream, Parser},
|
||||
|
|
@ -16,6 +16,7 @@ use syn::{
|
|||
punctuated::{Pair, Punctuated},
|
||||
spanned::Spanned,
|
||||
token::{Bracket, Paren},
|
||||
AttrStyle, Attribute, Error, Ident, Item, ItemFn, LitBool, LitStr, Meta, Token,
|
||||
};
|
||||
|
||||
mod fold;
|
||||
|
|
@ -66,21 +67,18 @@ mod kw {
|
|||
}
|
||||
|
||||
custom_keyword!(__evaluated_cfgs);
|
||||
custom_keyword!(add_platform_io);
|
||||
custom_keyword!(all);
|
||||
custom_keyword!(any);
|
||||
custom_keyword!(cfg);
|
||||
custom_keyword!(cfg_attr);
|
||||
custom_keyword!(clock_domain);
|
||||
custom_keyword!(cmp_eq);
|
||||
custom_keyword!(connect_inexact);
|
||||
custom_keyword!(custom_bounds);
|
||||
custom_keyword!(flip);
|
||||
custom_keyword!(get);
|
||||
custom_keyword!(hdl);
|
||||
custom_keyword!(hdl_module);
|
||||
custom_keyword!(incomplete_wire);
|
||||
custom_keyword!(input);
|
||||
custom_keyword!(incomplete_wire);
|
||||
custom_keyword!(instance);
|
||||
custom_keyword!(m);
|
||||
custom_keyword!(memory);
|
||||
|
|
@ -94,7 +92,6 @@ mod kw {
|
|||
custom_keyword!(output);
|
||||
custom_keyword!(reg_builder);
|
||||
custom_keyword!(reset);
|
||||
custom_keyword!(sim);
|
||||
custom_keyword!(skip);
|
||||
custom_keyword!(target);
|
||||
custom_keyword!(wire);
|
||||
|
|
|
|||
|
|
@ -1,20 +1,19 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
Errors, HdlAttr, PairsIterExt,
|
||||
hdl_type_common::{ParsedGenerics, SplitForImpl},
|
||||
kw,
|
||||
module::transform_body::{HdlLet, HdlLetKindIO, ModuleIOOrAddPlatformIO},
|
||||
options,
|
||||
module::transform_body::{HdlLet, HdlLetKindIO},
|
||||
options, Errors, HdlAttr, PairsIterExt,
|
||||
};
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::{ToTokens, format_ident, quote, quote_spanned};
|
||||
use quote::{format_ident, quote, quote_spanned, ToTokens};
|
||||
use std::collections::HashSet;
|
||||
use syn::{
|
||||
parse_quote,
|
||||
visit::{visit_pat, Visit},
|
||||
Attribute, Block, ConstParam, Error, FnArg, GenericParam, Generics, Ident, ItemFn, ItemStruct,
|
||||
LifetimeParam, ReturnType, Signature, TypeParam, Visibility, WhereClause, WherePredicate,
|
||||
parse_quote,
|
||||
visit::{Visit, visit_pat},
|
||||
};
|
||||
|
||||
mod transform_body;
|
||||
|
|
@ -39,7 +38,7 @@ pub(crate) fn check_name_conflicts_with_module_builder(name: &Ident) -> syn::Res
|
|||
if name == "m" {
|
||||
Err(Error::new_spanned(
|
||||
name,
|
||||
"name conflicts with implicit `m: &ModuleBuilder`",
|
||||
"name conflicts with implicit `m: &mut ModuleBuilder<_>`",
|
||||
))
|
||||
} else {
|
||||
Ok(())
|
||||
|
|
@ -67,7 +66,7 @@ struct ModuleFnModule {
|
|||
vis: Visibility,
|
||||
sig: Signature,
|
||||
block: Box<Block>,
|
||||
struct_generics: Option<ParsedGenerics>,
|
||||
struct_generics: ParsedGenerics,
|
||||
the_struct: TokenStream,
|
||||
}
|
||||
|
||||
|
|
@ -290,7 +289,7 @@ impl ModuleFn {
|
|||
paren_token,
|
||||
body,
|
||||
} => {
|
||||
debug_assert!(matches!(io, ModuleIOOrAddPlatformIO::ModuleIO(v) if v.is_empty()));
|
||||
debug_assert!(io.is_empty());
|
||||
return Ok(Self(ModuleFnImpl::Fn {
|
||||
attrs,
|
||||
config_options: HdlAttr {
|
||||
|
|
@ -322,21 +321,6 @@ impl ModuleFn {
|
|||
body,
|
||||
},
|
||||
};
|
||||
let io = match io {
|
||||
ModuleIOOrAddPlatformIO::ModuleIO(io) => io,
|
||||
ModuleIOOrAddPlatformIO::AddPlatformIO => {
|
||||
return Ok(Self(ModuleFnImpl::Module(ModuleFnModule {
|
||||
attrs,
|
||||
config_options,
|
||||
module_kind: module_kind.unwrap(),
|
||||
vis,
|
||||
sig,
|
||||
block,
|
||||
struct_generics: None,
|
||||
the_struct: TokenStream::new(),
|
||||
})));
|
||||
}
|
||||
};
|
||||
let (_struct_impl_generics, _struct_type_generics, struct_where_clause) =
|
||||
struct_generics.split_for_impl();
|
||||
let struct_where_clause: Option<WhereClause> = parse_quote! { #struct_where_clause };
|
||||
|
|
@ -379,7 +363,7 @@ impl ModuleFn {
|
|||
vis,
|
||||
sig,
|
||||
block,
|
||||
struct_generics: Some(struct_generics),
|
||||
struct_generics,
|
||||
the_struct,
|
||||
})))
|
||||
}
|
||||
|
|
@ -393,7 +377,7 @@ impl ModuleFn {
|
|||
module_kind,
|
||||
vis,
|
||||
sig,
|
||||
mut block,
|
||||
block,
|
||||
struct_generics,
|
||||
the_struct,
|
||||
} = match self.0 {
|
||||
|
|
@ -448,24 +432,13 @@ impl ModuleFn {
|
|||
ModuleKind::Normal => quote! { ::fayalite::module::ModuleKind::Normal },
|
||||
};
|
||||
let fn_name = &outer_sig.ident;
|
||||
let struct_ty = match struct_generics {
|
||||
Some(struct_generics) => {
|
||||
let (_struct_impl_generics, struct_type_generics, _struct_where_clause) =
|
||||
struct_generics.split_for_impl();
|
||||
quote! {#fn_name #struct_type_generics}
|
||||
}
|
||||
None => quote! {::fayalite::bundle::Bundle},
|
||||
};
|
||||
let (_struct_impl_generics, struct_type_generics, _struct_where_clause) =
|
||||
struct_generics.split_for_impl();
|
||||
let struct_ty = quote! {#fn_name #struct_type_generics};
|
||||
body_sig.ident = parse_quote! {__body};
|
||||
body_sig
|
||||
.inputs
|
||||
.insert(0, parse_quote! { m: &::fayalite::module::ModuleBuilder });
|
||||
block.stmts.insert(
|
||||
0,
|
||||
parse_quote! {
|
||||
let _ = m;
|
||||
},
|
||||
);
|
||||
let body_fn = ItemFn {
|
||||
attrs: vec![],
|
||||
vis: Visibility::Inherited,
|
||||
|
|
|
|||
|
|
@ -1,45 +1,36 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
Errors, HdlAttr,
|
||||
fold::{DoFold, impl_fold},
|
||||
fold::{impl_fold, DoFold},
|
||||
hdl_type_common::{
|
||||
ParseFailed, ParseTypes, ParsedGenerics, ParsedType, TypesParser, known_items,
|
||||
known_items, ParseFailed, ParseTypes, ParsedGenerics, ParsedType, TypesParser,
|
||||
},
|
||||
is_hdl_attr, kw,
|
||||
module::{ModuleIO, ModuleIOKind, ModuleKind, check_name_conflicts_with_module_builder},
|
||||
options,
|
||||
module::{check_name_conflicts_with_module_builder, ModuleIO, ModuleIOKind, ModuleKind},
|
||||
options, Errors, HdlAttr,
|
||||
};
|
||||
use num_bigint::BigInt;
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{ToTokens, quote, quote_spanned};
|
||||
use quote::{quote, quote_spanned, ToTokens};
|
||||
use std::{borrow::Borrow, convert::Infallible};
|
||||
use syn::{
|
||||
Attribute, Block, Error, Expr, ExprIf, ExprLet, ExprLit, ExprRepeat, ExprUnary,
|
||||
GenericArgument, Ident, Item, Lit, LitStr, Local, LocalInit, Pat, Token, Type, UnOp,
|
||||
fold::{Fold, fold_expr, fold_expr_lit, fold_expr_unary, fold_local, fold_stmt},
|
||||
fold::{fold_expr, fold_expr_lit, fold_expr_unary, fold_local, fold_stmt, Fold},
|
||||
parenthesized,
|
||||
parse::{Parse, ParseStream},
|
||||
parse::{Nothing, Parse, ParseStream},
|
||||
parse_quote, parse_quote_spanned,
|
||||
spanned::Spanned,
|
||||
token::Paren,
|
||||
Attribute, Block, Error, Expr, ExprIf, ExprLet, ExprLit, ExprRepeat, ExprUnary,
|
||||
GenericArgument, Ident, Item, Lit, LitStr, Local, LocalInit, Pat, Token, Type, UnOp,
|
||||
};
|
||||
|
||||
mod expand_aggregate_literals;
|
||||
mod expand_match;
|
||||
|
||||
options! {
|
||||
#[options = ExprOptions]
|
||||
pub(crate) enum ExprOption {
|
||||
Sim(sim),
|
||||
}
|
||||
}
|
||||
|
||||
options! {
|
||||
pub(crate) enum LetFnKind {
|
||||
Input(input),
|
||||
Output(output),
|
||||
AddPlatformIO(add_platform_io),
|
||||
Instance(instance),
|
||||
RegBuilder(reg_builder),
|
||||
Wire(wire),
|
||||
|
|
@ -217,49 +208,6 @@ impl HdlLetKindToTokens for HdlLetKindInstance {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct HdlLetKindAddPlatformIO {
|
||||
pub(crate) m: kw::m,
|
||||
pub(crate) dot_token: Token![.],
|
||||
pub(crate) add_platform_io: kw::add_platform_io,
|
||||
pub(crate) paren: Paren,
|
||||
pub(crate) platform_io_builder: Box<Expr>,
|
||||
}
|
||||
|
||||
impl ParseTypes<Self> for HdlLetKindAddPlatformIO {
|
||||
fn parse_types(input: &mut Self, _parser: &mut TypesParser<'_>) -> Result<Self, ParseFailed> {
|
||||
Ok(input.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl_fold! {
|
||||
struct HdlLetKindAddPlatformIO<> {
|
||||
m: kw::m,
|
||||
dot_token: Token![.],
|
||||
add_platform_io: kw::add_platform_io,
|
||||
paren: Paren,
|
||||
platform_io_builder: Box<Expr>,
|
||||
}
|
||||
}
|
||||
|
||||
impl HdlLetKindToTokens for HdlLetKindAddPlatformIO {
|
||||
fn ty_to_tokens(&self, _tokens: &mut TokenStream) {}
|
||||
|
||||
fn expr_to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
m,
|
||||
dot_token,
|
||||
add_platform_io,
|
||||
paren,
|
||||
platform_io_builder,
|
||||
} = self;
|
||||
m.to_tokens(tokens);
|
||||
dot_token.to_tokens(tokens);
|
||||
add_platform_io.to_tokens(tokens);
|
||||
paren.surround(tokens, |tokens| platform_io_builder.to_tokens(tokens));
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub(crate) struct RegBuilderClockDomain {
|
||||
pub(crate) dot_token: Token![.],
|
||||
|
|
@ -755,7 +703,6 @@ impl HdlLetKindMemory {
|
|||
#[derive(Clone, Debug)]
|
||||
pub(crate) enum HdlLetKind<IOType = ParsedType> {
|
||||
IO(HdlLetKindIO<ModuleIOKind, IOType>),
|
||||
AddPlatformIO(HdlLetKindAddPlatformIO),
|
||||
Incomplete(HdlLetKindIncomplete),
|
||||
Instance(HdlLetKindInstance),
|
||||
RegBuilder(HdlLetKindRegBuilder),
|
||||
|
|
@ -766,7 +713,6 @@ pub(crate) enum HdlLetKind<IOType = ParsedType> {
|
|||
impl_fold! {
|
||||
enum HdlLetKind<IOType,> {
|
||||
IO(HdlLetKindIO<ModuleIOKind, IOType>),
|
||||
AddPlatformIO(HdlLetKindAddPlatformIO),
|
||||
Incomplete(HdlLetKindIncomplete),
|
||||
Instance(HdlLetKindInstance),
|
||||
RegBuilder(HdlLetKindRegBuilder),
|
||||
|
|
@ -782,9 +728,6 @@ impl<T: ParseTypes<I>, I> ParseTypes<HdlLetKind<I>> for HdlLetKind<T> {
|
|||
) -> Result<Self, ParseFailed> {
|
||||
match input {
|
||||
HdlLetKind::IO(input) => ParseTypes::parse_types(input, parser).map(HdlLetKind::IO),
|
||||
HdlLetKind::AddPlatformIO(input) => {
|
||||
ParseTypes::parse_types(input, parser).map(HdlLetKind::AddPlatformIO)
|
||||
}
|
||||
HdlLetKind::Incomplete(input) => {
|
||||
ParseTypes::parse_types(input, parser).map(HdlLetKind::Incomplete)
|
||||
}
|
||||
|
|
@ -910,23 +853,6 @@ impl HdlLetKindParse for HdlLetKind<Type> {
|
|||
ModuleIOKind::Output(output),
|
||||
)
|
||||
.map(Self::IO),
|
||||
LetFnKind::AddPlatformIO((add_platform_io,)) => {
|
||||
if let Some(parsed_ty) = parsed_ty {
|
||||
return Err(Error::new_spanned(
|
||||
parsed_ty.1,
|
||||
"type annotation not allowed for instance",
|
||||
));
|
||||
}
|
||||
let (m, dot_token) = unwrap_m_dot(m_dot, kind)?;
|
||||
let paren_contents;
|
||||
Ok(Self::AddPlatformIO(HdlLetKindAddPlatformIO {
|
||||
m,
|
||||
dot_token,
|
||||
add_platform_io,
|
||||
paren: parenthesized!(paren_contents in input),
|
||||
platform_io_builder: paren_contents.call(parse_single_fn_arg)?,
|
||||
}))
|
||||
}
|
||||
LetFnKind::Instance((instance,)) => {
|
||||
if let Some(parsed_ty) = parsed_ty {
|
||||
return Err(Error::new_spanned(
|
||||
|
|
@ -1002,7 +928,6 @@ impl HdlLetKindToTokens for HdlLetKind {
|
|||
fn ty_to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
HdlLetKind::IO(v) => v.ty_to_tokens(tokens),
|
||||
HdlLetKind::AddPlatformIO(v) => v.ty_to_tokens(tokens),
|
||||
HdlLetKind::Incomplete(v) => v.ty_to_tokens(tokens),
|
||||
HdlLetKind::Instance(v) => v.ty_to_tokens(tokens),
|
||||
HdlLetKind::RegBuilder(v) => v.ty_to_tokens(tokens),
|
||||
|
|
@ -1014,7 +939,6 @@ impl HdlLetKindToTokens for HdlLetKind {
|
|||
fn expr_to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
HdlLetKind::IO(v) => v.expr_to_tokens(tokens),
|
||||
HdlLetKind::AddPlatformIO(v) => v.expr_to_tokens(tokens),
|
||||
HdlLetKind::Incomplete(v) => v.expr_to_tokens(tokens),
|
||||
HdlLetKind::Instance(v) => v.expr_to_tokens(tokens),
|
||||
HdlLetKind::RegBuilder(v) => v.expr_to_tokens(tokens),
|
||||
|
|
@ -1028,7 +952,7 @@ with_debug_clone_and_fold! {
|
|||
#[allow(dead_code)]
|
||||
pub(crate) struct HdlLet<Kind = HdlLetKind> {
|
||||
pub(crate) attrs: Vec<Attribute>,
|
||||
pub(crate) hdl_attr: HdlAttr<syn::parse::Nothing, kw::hdl>,
|
||||
pub(crate) hdl_attr: HdlAttr<Nothing, kw::hdl>,
|
||||
pub(crate) let_token: Token![let],
|
||||
pub(crate) mut_token: Option<Token![mut]>,
|
||||
pub(crate) name: Ident,
|
||||
|
|
@ -1185,7 +1109,7 @@ fn parse_quote_let_pat<T, R: ToTokens, C: Borrow<Token![:]>>(
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn wrap_ty_with_expr(ty: impl ToTokens) -> Type {
|
||||
fn wrap_ty_with_expr(ty: impl ToTokens) -> Type {
|
||||
parse_quote_spanned! {ty.span()=>
|
||||
::fayalite::expr::Expr<#ty>
|
||||
}
|
||||
|
|
@ -1217,7 +1141,7 @@ impl<T: ToString> ToTokens for ImplicitName<T> {
|
|||
struct Visitor<'a> {
|
||||
module_kind: Option<ModuleKind>,
|
||||
errors: Errors,
|
||||
io: ModuleIOOrAddPlatformIO,
|
||||
io: Vec<ModuleIO>,
|
||||
block_depth: usize,
|
||||
parsed_generics: &'a ParsedGenerics,
|
||||
}
|
||||
|
|
@ -1249,7 +1173,7 @@ impl Visitor<'_> {
|
|||
Some(_) => {}
|
||||
}
|
||||
}
|
||||
fn process_hdl_if(&mut self, hdl_attr: HdlAttr<ExprOptions, kw::hdl>, expr_if: ExprIf) -> Expr {
|
||||
fn process_hdl_if(&mut self, hdl_attr: HdlAttr<Nothing, kw::hdl>, expr_if: ExprIf) -> Expr {
|
||||
let ExprIf {
|
||||
attrs,
|
||||
if_token,
|
||||
|
|
@ -1257,10 +1181,10 @@ impl Visitor<'_> {
|
|||
then_branch,
|
||||
else_branch,
|
||||
} = expr_if;
|
||||
let (else_token, else_expr) = else_branch.unzip();
|
||||
let else_expr = else_expr.map(|else_expr| match *else_expr {
|
||||
Expr::If(expr_if) => Box::new(self.process_hdl_if(hdl_attr.clone(), expr_if)),
|
||||
_ => else_expr,
|
||||
self.require_normal_module_or_fn(if_token);
|
||||
let else_expr = else_branch.unzip().1.map(|else_expr| match *else_expr {
|
||||
Expr::If(expr_if) => self.process_hdl_if(hdl_attr.clone(), expr_if),
|
||||
expr => expr,
|
||||
});
|
||||
if let Expr::Let(ExprLet {
|
||||
attrs: let_attrs,
|
||||
|
|
@ -1282,19 +1206,7 @@ impl Visitor<'_> {
|
|||
},
|
||||
);
|
||||
}
|
||||
let ExprOptions { sim } = hdl_attr.body;
|
||||
if sim.is_some() {
|
||||
ExprIf {
|
||||
attrs,
|
||||
if_token,
|
||||
cond: parse_quote_spanned! {if_token.span=>
|
||||
*::fayalite::sim::value::SimValue::<::fayalite::int::Bool>::value(&::fayalite::sim::value::ToSimValue::into_sim_value(#cond))
|
||||
},
|
||||
then_branch,
|
||||
else_branch: else_token.zip(else_expr),
|
||||
}
|
||||
.into()
|
||||
} else if let Some(else_expr) = else_expr {
|
||||
if let Some(else_expr) = else_expr {
|
||||
parse_quote_spanned! {if_token.span=>
|
||||
#(#attrs)*
|
||||
{
|
||||
|
|
@ -1357,81 +1269,7 @@ impl Visitor<'_> {
|
|||
}),
|
||||
semi_token: hdl_let.semi_token,
|
||||
};
|
||||
match &mut self.io {
|
||||
ModuleIOOrAddPlatformIO::ModuleIO(io) => io.push(hdl_let),
|
||||
ModuleIOOrAddPlatformIO::AddPlatformIO => {
|
||||
self.errors.error(
|
||||
kind,
|
||||
"can't have other inputs/outputs in a module using m.add_platform_io()",
|
||||
);
|
||||
}
|
||||
}
|
||||
let_stmt
|
||||
}
|
||||
fn process_hdl_let_add_platform_io(
|
||||
&mut self,
|
||||
hdl_let: HdlLet<HdlLetKindAddPlatformIO>,
|
||||
) -> Local {
|
||||
let HdlLet {
|
||||
mut attrs,
|
||||
hdl_attr: _,
|
||||
let_token,
|
||||
mut_token,
|
||||
ref name,
|
||||
eq_token,
|
||||
kind:
|
||||
HdlLetKindAddPlatformIO {
|
||||
m,
|
||||
dot_token,
|
||||
add_platform_io,
|
||||
paren,
|
||||
platform_io_builder,
|
||||
},
|
||||
semi_token,
|
||||
} = hdl_let;
|
||||
let mut expr = quote! {#m #dot_token #add_platform_io};
|
||||
paren.surround(&mut expr, |expr| {
|
||||
let name_str = ImplicitName {
|
||||
name,
|
||||
span: name.span(),
|
||||
};
|
||||
quote_spanned! {name.span()=>
|
||||
#name_str, #platform_io_builder
|
||||
}
|
||||
.to_tokens(expr);
|
||||
});
|
||||
self.require_module(add_platform_io);
|
||||
attrs.push(parse_quote_spanned! {let_token.span=>
|
||||
#[allow(unused_variables)]
|
||||
});
|
||||
let let_stmt = Local {
|
||||
attrs,
|
||||
let_token,
|
||||
pat: parse_quote! { #mut_token #name },
|
||||
init: Some(LocalInit {
|
||||
eq_token,
|
||||
expr: parse_quote! { #expr },
|
||||
diverge: None,
|
||||
}),
|
||||
semi_token,
|
||||
};
|
||||
match &mut self.io {
|
||||
ModuleIOOrAddPlatformIO::ModuleIO(io) => {
|
||||
for io in io {
|
||||
self.errors.error(
|
||||
io.kind.kind,
|
||||
"can't have other inputs/outputs in a module using m.add_platform_io()",
|
||||
);
|
||||
}
|
||||
}
|
||||
ModuleIOOrAddPlatformIO::AddPlatformIO => {
|
||||
self.errors.error(
|
||||
add_platform_io,
|
||||
"can't use m.add_platform_io() more than once in a single module",
|
||||
);
|
||||
}
|
||||
}
|
||||
self.io = ModuleIOOrAddPlatformIO::AddPlatformIO;
|
||||
self.io.push(hdl_let);
|
||||
let_stmt
|
||||
}
|
||||
fn process_hdl_let_instance(&mut self, hdl_let: HdlLet<HdlLetKindInstance>) -> Local {
|
||||
|
|
@ -1652,7 +1490,6 @@ impl Visitor<'_> {
|
|||
}
|
||||
the_match! {
|
||||
IO => process_hdl_let_io,
|
||||
AddPlatformIO => process_hdl_let_add_platform_io,
|
||||
Incomplete => process_hdl_let_incomplete,
|
||||
Instance => process_hdl_let_instance,
|
||||
RegBuilder => process_hdl_let_reg_builder,
|
||||
|
|
@ -1749,7 +1586,7 @@ impl Visitor<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) fn empty_let() -> Local {
|
||||
fn empty_let() -> Local {
|
||||
Local {
|
||||
attrs: vec![],
|
||||
let_token: Default::default(),
|
||||
|
|
@ -1831,42 +1668,20 @@ impl Fold for Visitor<'_> {
|
|||
Repeat => process_hdl_repeat,
|
||||
Struct => process_hdl_struct,
|
||||
Tuple => process_hdl_tuple,
|
||||
MethodCall => process_hdl_method_call,
|
||||
Call => process_hdl_call,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn fold_local(&mut self, mut let_stmt: Local) -> Local {
|
||||
fn fold_local(&mut self, let_stmt: Local) -> Local {
|
||||
match self
|
||||
.errors
|
||||
.ok(HdlAttr::<ExprOptions, kw::hdl>::parse_and_leave_attr(
|
||||
.ok(HdlAttr::<Nothing, kw::hdl>::parse_and_leave_attr(
|
||||
&let_stmt.attrs,
|
||||
)) {
|
||||
None => return empty_let(),
|
||||
Some(None) => return fold_local(self, let_stmt),
|
||||
Some(Some(HdlAttr { .. })) => {}
|
||||
};
|
||||
let mut pat = &let_stmt.pat;
|
||||
if let Pat::Type(pat_type) = pat {
|
||||
pat = &pat_type.pat;
|
||||
}
|
||||
let Pat::Ident(syn::PatIdent {
|
||||
attrs: _,
|
||||
by_ref: None,
|
||||
mutability: _,
|
||||
ident: _,
|
||||
subpat: None,
|
||||
}) = pat
|
||||
else {
|
||||
let hdl_attr =
|
||||
HdlAttr::<ExprOptions, kw::hdl>::parse_and_take_attr(&mut let_stmt.attrs)
|
||||
.ok()
|
||||
.flatten()
|
||||
.expect("already checked above");
|
||||
let let_stmt = fold_local(self, let_stmt);
|
||||
return self.process_hdl_let_pat(hdl_attr, let_stmt);
|
||||
};
|
||||
let hdl_let = syn::parse2::<HdlLet<HdlLetKind<Type>>>(let_stmt.into_token_stream());
|
||||
let Some(hdl_let) = self.errors.ok(hdl_let) else {
|
||||
return empty_let();
|
||||
|
|
@ -1896,20 +1711,15 @@ impl Fold for Visitor<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) enum ModuleIOOrAddPlatformIO {
|
||||
ModuleIO(Vec<ModuleIO>),
|
||||
AddPlatformIO,
|
||||
}
|
||||
|
||||
pub(crate) fn transform_body(
|
||||
module_kind: Option<ModuleKind>,
|
||||
mut body: Box<Block>,
|
||||
parsed_generics: &ParsedGenerics,
|
||||
) -> syn::Result<(Box<Block>, ModuleIOOrAddPlatformIO)> {
|
||||
) -> syn::Result<(Box<Block>, Vec<ModuleIO>)> {
|
||||
let mut visitor = Visitor {
|
||||
module_kind,
|
||||
errors: Errors::new(),
|
||||
io: ModuleIOOrAddPlatformIO::ModuleIO(vec![]),
|
||||
io: vec![],
|
||||
block_depth: 0,
|
||||
parsed_generics,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,102 +1,45 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
HdlAttr, kw,
|
||||
module::transform_body::{
|
||||
ExprOptions, Visitor,
|
||||
expand_match::{EnumPath, parse_enum_path},
|
||||
},
|
||||
};
|
||||
use crate::{kw, module::transform_body::Visitor, HdlAttr};
|
||||
use quote::{format_ident, quote_spanned};
|
||||
use std::mem;
|
||||
use syn::{
|
||||
Expr, ExprArray, ExprCall, ExprGroup, ExprMethodCall, ExprParen, ExprPath, ExprRepeat,
|
||||
ExprStruct, ExprTuple, FieldValue, Token, TypePath, parse_quote_spanned,
|
||||
punctuated::Punctuated, spanned::Spanned, token::Paren,
|
||||
parse::Nothing, parse_quote, parse_quote_spanned, spanned::Spanned, Expr, ExprArray, ExprPath,
|
||||
ExprRepeat, ExprStruct, ExprTuple, FieldValue, TypePath,
|
||||
};
|
||||
|
||||
impl Visitor<'_> {
|
||||
pub(crate) fn process_hdl_array(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
|
||||
hdl_attr: HdlAttr<Nothing, kw::hdl>,
|
||||
mut expr_array: ExprArray,
|
||||
) -> Expr {
|
||||
let ExprOptions { sim } = hdl_attr.body;
|
||||
let span = hdl_attr.kw.span;
|
||||
if sim.is_some() {
|
||||
for elem in &mut expr_array.elems {
|
||||
*elem = parse_quote_spanned! {elem.span()=>
|
||||
::fayalite::sim::value::ToSimValue::to_sim_value(&(#elem))
|
||||
};
|
||||
}
|
||||
parse_quote_spanned! {span=>
|
||||
::fayalite::sim::value::ToSimValue::into_sim_value(#expr_array)
|
||||
}
|
||||
} else {
|
||||
for elem in &mut expr_array.elems {
|
||||
*elem = parse_quote_spanned! {elem.span()=>
|
||||
::fayalite::expr::ToExpr::to_expr(&(#elem))
|
||||
};
|
||||
}
|
||||
parse_quote_spanned! {span=>
|
||||
::fayalite::expr::ToExpr::to_expr(&#expr_array)
|
||||
}
|
||||
self.require_normal_module_or_fn(hdl_attr);
|
||||
for elem in &mut expr_array.elems {
|
||||
*elem = parse_quote_spanned! {elem.span()=>
|
||||
::fayalite::expr::ToExpr::to_expr(&(#elem))
|
||||
};
|
||||
}
|
||||
parse_quote! {::fayalite::expr::ToExpr::to_expr(&#expr_array)}
|
||||
}
|
||||
pub(crate) fn process_hdl_repeat(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
|
||||
hdl_attr: HdlAttr<Nothing, kw::hdl>,
|
||||
mut expr_repeat: ExprRepeat,
|
||||
) -> Expr {
|
||||
self.require_normal_module_or_fn(hdl_attr);
|
||||
let repeated_value = &expr_repeat.expr;
|
||||
let ExprOptions { sim } = hdl_attr.body;
|
||||
let span = hdl_attr.kw.span;
|
||||
if sim.is_some() {
|
||||
*expr_repeat.expr = parse_quote_spanned! {repeated_value.span()=>
|
||||
::fayalite::sim::value::ToSimValue::to_sim_value(&(#repeated_value))
|
||||
};
|
||||
parse_quote_spanned! {span=>
|
||||
::fayalite::sim::value::ToSimValue::into_sim_value(#expr_repeat)
|
||||
}
|
||||
} else {
|
||||
*expr_repeat.expr = parse_quote_spanned! {repeated_value.span()=>
|
||||
::fayalite::expr::ToExpr::to_expr(&(#repeated_value))
|
||||
};
|
||||
parse_quote_spanned! {span=>
|
||||
::fayalite::expr::ToExpr::to_expr(&#expr_repeat)
|
||||
}
|
||||
}
|
||||
*expr_repeat.expr = parse_quote_spanned! {repeated_value.span()=>
|
||||
::fayalite::expr::ToExpr::to_expr(&(#repeated_value))
|
||||
};
|
||||
parse_quote! {::fayalite::expr::ToExpr::to_expr(&#expr_repeat)}
|
||||
}
|
||||
pub(crate) fn process_hdl_struct(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
|
||||
mut expr_struct: ExprStruct,
|
||||
hdl_attr: HdlAttr<Nothing, kw::hdl>,
|
||||
expr_struct: ExprStruct,
|
||||
) -> Expr {
|
||||
self.require_normal_module_or_fn(&hdl_attr);
|
||||
let name_span = expr_struct.path.segments.last().unwrap().ident.span();
|
||||
let ExprOptions { sim } = hdl_attr.body;
|
||||
if sim.is_some() {
|
||||
let ty_path = TypePath {
|
||||
qself: expr_struct.qself.take(),
|
||||
path: expr_struct.path,
|
||||
};
|
||||
expr_struct.path = parse_quote_spanned! {name_span=>
|
||||
__SimValue::<#ty_path>
|
||||
};
|
||||
for field in &mut expr_struct.fields {
|
||||
let expr = &field.expr;
|
||||
field.expr = parse_quote_spanned! {field.member.span()=>
|
||||
::fayalite::sim::value::ToSimValue::to_sim_value(&(#expr))
|
||||
};
|
||||
}
|
||||
return parse_quote_spanned! {name_span=>
|
||||
{
|
||||
type __SimValue<T> = <T as ::fayalite::ty::Type>::SimValue;
|
||||
let value: ::fayalite::sim::value::SimValue<#ty_path> = ::fayalite::sim::value::ToSimValue::into_sim_value(#expr_struct);
|
||||
value
|
||||
}
|
||||
};
|
||||
}
|
||||
let builder_ident = format_ident!("__builder", span = name_span);
|
||||
let empty_builder = if expr_struct.qself.is_some()
|
||||
|| expr_struct
|
||||
|
|
@ -148,126 +91,12 @@ impl Visitor<'_> {
|
|||
}
|
||||
pub(crate) fn process_hdl_tuple(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
|
||||
mut expr_tuple: ExprTuple,
|
||||
hdl_attr: HdlAttr<Nothing, kw::hdl>,
|
||||
expr_tuple: ExprTuple,
|
||||
) -> Expr {
|
||||
let ExprOptions { sim } = hdl_attr.body;
|
||||
if sim.is_some() {
|
||||
for element in &mut expr_tuple.elems {
|
||||
*element = parse_quote_spanned! {element.span()=>
|
||||
&(#element)
|
||||
};
|
||||
}
|
||||
parse_quote_spanned! {expr_tuple.span()=>
|
||||
::fayalite::sim::value::ToSimValue::into_sim_value(#expr_tuple)
|
||||
}
|
||||
} else {
|
||||
parse_quote_spanned! {expr_tuple.span()=>
|
||||
::fayalite::expr::ToExpr::to_expr(&#expr_tuple)
|
||||
}
|
||||
self.require_normal_module_or_fn(hdl_attr);
|
||||
parse_quote_spanned! {expr_tuple.span()=>
|
||||
::fayalite::expr::ToExpr::to_expr(&#expr_tuple)
|
||||
}
|
||||
}
|
||||
pub(crate) fn process_hdl_call(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
|
||||
mut expr_call: ExprCall,
|
||||
) -> Expr {
|
||||
let span = hdl_attr.kw.span;
|
||||
let mut func = &mut *expr_call.func;
|
||||
let EnumPath {
|
||||
variant_path: _,
|
||||
enum_path,
|
||||
variant_name,
|
||||
} = loop {
|
||||
match func {
|
||||
Expr::Group(ExprGroup { expr, .. }) | Expr::Paren(ExprParen { expr, .. }) => {
|
||||
func = &mut **expr;
|
||||
}
|
||||
Expr::Path(_) => {
|
||||
let Expr::Path(ExprPath { attrs, qself, path }) =
|
||||
mem::replace(func, Expr::PLACEHOLDER)
|
||||
else {
|
||||
unreachable!();
|
||||
};
|
||||
match parse_enum_path(TypePath { qself, path }) {
|
||||
Ok(path) => break path,
|
||||
Err(path) => {
|
||||
self.errors.error(&path, "unsupported enum variant path");
|
||||
let TypePath { qself, path } = path;
|
||||
*func = ExprPath { attrs, qself, path }.into();
|
||||
return expr_call.into();
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
self.errors.error(
|
||||
&expr_call.func,
|
||||
"#[hdl] function call -- function must be a possibly-parenthesized path",
|
||||
);
|
||||
return expr_call.into();
|
||||
}
|
||||
}
|
||||
};
|
||||
self.process_hdl_method_call(
|
||||
hdl_attr,
|
||||
ExprMethodCall {
|
||||
attrs: expr_call.attrs,
|
||||
receiver: parse_quote_spanned! {span=>
|
||||
<#enum_path as ::fayalite::ty::StaticType>::TYPE
|
||||
},
|
||||
dot_token: Token,
|
||||
method: variant_name,
|
||||
turbofish: None,
|
||||
paren_token: expr_call.paren_token,
|
||||
args: expr_call.args,
|
||||
},
|
||||
)
|
||||
}
|
||||
pub(crate) fn process_hdl_method_call(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
|
||||
mut expr_method_call: ExprMethodCall,
|
||||
) -> Expr {
|
||||
let ExprOptions { sim } = hdl_attr.body;
|
||||
let span = hdl_attr.kw.span;
|
||||
// remove any number of groups and up to one paren
|
||||
let mut receiver = &mut *expr_method_call.receiver;
|
||||
let mut has_group = false;
|
||||
let receiver = loop {
|
||||
match receiver {
|
||||
Expr::Group(ExprGroup { expr, .. }) => {
|
||||
has_group = true;
|
||||
receiver = expr;
|
||||
}
|
||||
Expr::Paren(ExprParen { expr, .. }) => break &mut **expr,
|
||||
receiver @ Expr::Path(_) => break receiver,
|
||||
_ => {
|
||||
if !has_group {
|
||||
self.errors.error(
|
||||
&expr_method_call.receiver,
|
||||
"#[hdl] on a method call needs parenthesized receiver",
|
||||
);
|
||||
}
|
||||
break &mut *expr_method_call.receiver;
|
||||
}
|
||||
}
|
||||
};
|
||||
let func = if sim.is_some() {
|
||||
parse_quote_spanned! {span=>
|
||||
::fayalite::enum_::enum_type_to_sim_builder
|
||||
}
|
||||
} else {
|
||||
parse_quote_spanned! {span=>
|
||||
::fayalite::enum_::assert_is_enum_type
|
||||
}
|
||||
};
|
||||
*expr_method_call.receiver = ExprCall {
|
||||
attrs: vec![],
|
||||
func,
|
||||
paren_token: Paren(span),
|
||||
args: Punctuated::from_iter([mem::replace(receiver, Expr::PLACEHOLDER)]),
|
||||
}
|
||||
.into();
|
||||
expr_method_call.into()
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,121 +1,24 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
Errors, HdlAttr, PairsIterExt,
|
||||
fold::{DoFold, impl_fold},
|
||||
fold::{impl_fold, DoFold},
|
||||
kw,
|
||||
module::transform_body::{
|
||||
ExprOptions, Visitor, empty_let, with_debug_clone_and_fold, wrap_ty_with_expr,
|
||||
},
|
||||
module::transform_body::{with_debug_clone_and_fold, Visitor},
|
||||
Errors, HdlAttr, PairsIterExt,
|
||||
};
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{ToTokens, TokenStreamExt, format_ident, quote_spanned};
|
||||
use std::collections::BTreeSet;
|
||||
use quote::{format_ident, quote_spanned, ToTokens, TokenStreamExt};
|
||||
use syn::{
|
||||
Arm, Attribute, Expr, ExprMatch, FieldPat, Ident, Local, Member, Pat, PatIdent, PatOr,
|
||||
PatParen, PatPath, PatRest, PatStruct, PatTuple, PatTupleStruct, PatWild, Path, PathSegment,
|
||||
Token, TypePath,
|
||||
fold::{Fold, fold_arm, fold_expr_match, fold_local, fold_pat},
|
||||
fold::{fold_arm, fold_expr_match, fold_pat, Fold},
|
||||
parse::Nothing,
|
||||
parse_quote_spanned,
|
||||
punctuated::Punctuated,
|
||||
spanned::Spanned,
|
||||
token::{Brace, Paren},
|
||||
Arm, Attribute, Expr, ExprMatch, FieldPat, Ident, Member, Pat, PatIdent, PatOr, PatParen,
|
||||
PatPath, PatRest, PatStruct, PatTupleStruct, PatWild, Path, PathSegment, Token, TypePath,
|
||||
};
|
||||
|
||||
macro_rules! visit_trait {
|
||||
(
|
||||
$($vis:vis fn $fn:ident($state:ident: _, $value:ident: &$Value:ty) $block:block)*
|
||||
) => {
|
||||
trait VisitMatchPat<'a> {
|
||||
$(fn $fn(&mut self, $value: &'a $Value) {
|
||||
$fn(self, $value);
|
||||
})*
|
||||
}
|
||||
|
||||
$($vis fn $fn<'a>($state: &mut (impl ?Sized + VisitMatchPat<'a>), $value: &'a $Value) $block)*
|
||||
};
|
||||
}
|
||||
|
||||
visit_trait! {
|
||||
fn visit_match_pat_binding(_state: _, v: &MatchPatBinding) {
|
||||
let MatchPatBinding { ident: _ } = v;
|
||||
}
|
||||
fn visit_match_pat_wild(_state: _, v: &MatchPatWild) {
|
||||
let MatchPatWild { underscore_token: _ } = v;
|
||||
}
|
||||
fn visit_match_pat_rest(_state: _, v: &MatchPatRest) {
|
||||
let MatchPatRest { dot2_token: _ } = v;
|
||||
}
|
||||
fn visit_match_pat_paren(state: _, v: &MatchPatParen<MatchPat>) {
|
||||
let MatchPatParen { paren_token: _, pat } = v;
|
||||
state.visit_match_pat(pat);
|
||||
}
|
||||
fn visit_match_pat_paren_simple(state: _, v: &MatchPatParen<MatchPatSimple>) {
|
||||
let MatchPatParen { paren_token: _, pat } = v;
|
||||
state.visit_match_pat_simple(pat);
|
||||
}
|
||||
fn visit_match_pat_or(state: _, v: &MatchPatOr<MatchPat>) {
|
||||
let MatchPatOr { leading_vert: _, cases } = v;
|
||||
for v in cases {
|
||||
state.visit_match_pat(v);
|
||||
}
|
||||
}
|
||||
fn visit_match_pat_or_simple(state: _, v: &MatchPatOr<MatchPatSimple>) {
|
||||
let MatchPatOr { leading_vert: _, cases } = v;
|
||||
for v in cases {
|
||||
state.visit_match_pat_simple(v);
|
||||
}
|
||||
}
|
||||
fn visit_match_pat_struct_field(state: _, v: &MatchPatStructField) {
|
||||
let MatchPatStructField { field_name: _, colon_token: _, pat } = v;
|
||||
state.visit_match_pat_simple(pat);
|
||||
}
|
||||
fn visit_match_pat_struct(state: _, v: &MatchPatStruct) {
|
||||
let MatchPatStruct { match_span: _, path: _, brace_token: _, fields, rest: _ } = v;
|
||||
for v in fields {
|
||||
state.visit_match_pat_struct_field(v);
|
||||
}
|
||||
}
|
||||
fn visit_match_pat_tuple(state: _, v: &MatchPatTuple) {
|
||||
let MatchPatTuple { paren_token: _, fields } = v;
|
||||
for v in fields {
|
||||
state.visit_match_pat_simple(v);
|
||||
}
|
||||
}
|
||||
fn visit_match_pat_enum_variant(state: _, v: &MatchPatEnumVariant) {
|
||||
let MatchPatEnumVariant {
|
||||
match_span:_,
|
||||
sim:_,
|
||||
variant_path: _,
|
||||
enum_path: _,
|
||||
variant_name: _,
|
||||
field,
|
||||
} = v;
|
||||
if let Some((_, v)) = field {
|
||||
state.visit_match_pat_simple(v);
|
||||
}
|
||||
}
|
||||
fn visit_match_pat_simple(state: _, v: &MatchPatSimple) {
|
||||
match v {
|
||||
MatchPatSimple::Paren(v) => state.visit_match_pat_paren_simple(v),
|
||||
MatchPatSimple::Or(v) => state.visit_match_pat_or_simple(v),
|
||||
MatchPatSimple::Binding(v) => state.visit_match_pat_binding(v),
|
||||
MatchPatSimple::Wild(v) => state.visit_match_pat_wild(v),
|
||||
MatchPatSimple::Rest(v) => state.visit_match_pat_rest(v),
|
||||
}
|
||||
}
|
||||
fn visit_match_pat(state: _, v: &MatchPat) {
|
||||
match v {
|
||||
MatchPat::Simple(v) => state.visit_match_pat_simple(v),
|
||||
MatchPat::Or(v) => state.visit_match_pat_or(v),
|
||||
MatchPat::Paren(v) => state.visit_match_pat_paren(v),
|
||||
MatchPat::Struct(v) => state.visit_match_pat_struct(v),
|
||||
MatchPat::Tuple(v) => state.visit_match_pat_tuple(v),
|
||||
MatchPat::EnumVariant(v) => state.visit_match_pat_enum_variant(v),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
with_debug_clone_and_fold! {
|
||||
struct MatchPatBinding<> {
|
||||
ident: Ident,
|
||||
|
|
@ -150,15 +53,6 @@ with_debug_clone_and_fold! {
|
|||
}
|
||||
}
|
||||
|
||||
impl<P> MatchPatOr<P> {
|
||||
/// returns the first `|` between two patterns
|
||||
fn first_inner_vert(&self) -> Option<Token![|]> {
|
||||
let mut pairs = self.cases.pairs();
|
||||
pairs.next_back();
|
||||
pairs.next().and_then(|v| v.into_tuple().1.copied())
|
||||
}
|
||||
}
|
||||
|
||||
impl<P: ToTokens> ToTokens for MatchPatOr<P> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
|
|
@ -183,19 +77,6 @@ impl ToTokens for MatchPatWild {
|
|||
}
|
||||
}
|
||||
|
||||
with_debug_clone_and_fold! {
|
||||
struct MatchPatRest<> {
|
||||
dot2_token: Token![..],
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for MatchPatRest {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self { dot2_token } = self;
|
||||
dot2_token.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
with_debug_clone_and_fold! {
|
||||
struct MatchPatStructField<> {
|
||||
field_name: Ident,
|
||||
|
|
@ -278,29 +159,9 @@ impl ToTokens for MatchPatStruct {
|
|||
}
|
||||
}
|
||||
|
||||
with_debug_clone_and_fold! {
|
||||
struct MatchPatTuple<> {
|
||||
paren_token: Paren,
|
||||
fields: Punctuated<MatchPatSimple, Token![,]>,
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for MatchPatTuple {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
paren_token,
|
||||
fields,
|
||||
} = self;
|
||||
paren_token.surround(tokens, |tokens| {
|
||||
fields.to_tokens(tokens);
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
with_debug_clone_and_fold! {
|
||||
struct MatchPatEnumVariant<> {
|
||||
match_span: Span,
|
||||
sim: Option<(kw::sim,)>,
|
||||
variant_path: Path,
|
||||
enum_path: Path,
|
||||
variant_name: Ident,
|
||||
|
|
@ -312,7 +173,6 @@ impl ToTokens for MatchPatEnumVariant {
|
|||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
match_span,
|
||||
sim,
|
||||
variant_path: _,
|
||||
enum_path,
|
||||
variant_name,
|
||||
|
|
@ -322,28 +182,7 @@ impl ToTokens for MatchPatEnumVariant {
|
|||
__MatchTy::<#enum_path>::#variant_name
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
if sim.is_some() {
|
||||
if let Some((paren_token, field)) = field {
|
||||
paren_token.surround(tokens, |tokens| {
|
||||
field.to_tokens(tokens);
|
||||
match field {
|
||||
MatchPatSimple::Paren(_)
|
||||
| MatchPatSimple::Or(_)
|
||||
| MatchPatSimple::Binding(_)
|
||||
| MatchPatSimple::Wild(_) => quote_spanned! {*match_span=>
|
||||
, _
|
||||
}
|
||||
.to_tokens(tokens),
|
||||
MatchPatSimple::Rest(_) => {}
|
||||
}
|
||||
});
|
||||
} else {
|
||||
quote_spanned! {*match_span=>
|
||||
(_)
|
||||
}
|
||||
.to_tokens(tokens);
|
||||
}
|
||||
} else if let Some((paren_token, field)) = field {
|
||||
if let Some((paren_token, field)) = field {
|
||||
paren_token.surround(tokens, |tokens| field.to_tokens(tokens));
|
||||
}
|
||||
}
|
||||
|
|
@ -355,7 +194,6 @@ enum MatchPatSimple {
|
|||
Or(MatchPatOr<MatchPatSimple>),
|
||||
Binding(MatchPatBinding),
|
||||
Wild(MatchPatWild),
|
||||
Rest(MatchPatRest),
|
||||
}
|
||||
|
||||
impl_fold! {
|
||||
|
|
@ -364,7 +202,6 @@ impl_fold! {
|
|||
Or(MatchPatOr<MatchPatSimple>),
|
||||
Binding(MatchPatBinding),
|
||||
Wild(MatchPatWild),
|
||||
Rest(MatchPatRest),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -375,18 +212,17 @@ impl ToTokens for MatchPatSimple {
|
|||
Self::Paren(v) => v.to_tokens(tokens),
|
||||
Self::Binding(v) => v.to_tokens(tokens),
|
||||
Self::Wild(v) => v.to_tokens(tokens),
|
||||
Self::Rest(v) => v.to_tokens(tokens),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct EnumPath {
|
||||
pub(crate) variant_path: Path,
|
||||
pub(crate) enum_path: Path,
|
||||
pub(crate) variant_name: Ident,
|
||||
struct EnumPath {
|
||||
variant_path: Path,
|
||||
enum_path: Path,
|
||||
variant_name: Ident,
|
||||
}
|
||||
|
||||
pub(crate) fn parse_enum_path(variant_path: TypePath) -> Result<EnumPath, TypePath> {
|
||||
fn parse_enum_path(variant_path: TypePath) -> Result<EnumPath, TypePath> {
|
||||
let TypePath {
|
||||
qself: None,
|
||||
path: variant_path,
|
||||
|
|
@ -442,9 +278,8 @@ trait ParseMatchPat: Sized {
|
|||
fn or(v: MatchPatOr<Self>) -> Self;
|
||||
fn paren(v: MatchPatParen<Self>) -> Self;
|
||||
fn struct_(state: &mut HdlMatchParseState<'_>, v: MatchPatStruct) -> Result<Self, ()>;
|
||||
fn tuple(state: &mut HdlMatchParseState<'_>, v: MatchPatTuple) -> Result<Self, ()>;
|
||||
fn enum_variant(state: &mut HdlMatchParseState<'_>, v: MatchPatEnumVariant)
|
||||
-> Result<Self, ()>;
|
||||
-> Result<Self, ()>;
|
||||
fn parse(state: &mut HdlMatchParseState<'_>, pat: Pat) -> Result<Self, ()> {
|
||||
match pat {
|
||||
Pat::Ident(PatIdent {
|
||||
|
|
@ -478,7 +313,6 @@ trait ParseMatchPat: Sized {
|
|||
state,
|
||||
MatchPatEnumVariant {
|
||||
match_span: state.match_span,
|
||||
sim: state.sim,
|
||||
variant_path,
|
||||
enum_path,
|
||||
variant_name,
|
||||
|
|
@ -525,7 +359,6 @@ trait ParseMatchPat: Sized {
|
|||
state,
|
||||
MatchPatEnumVariant {
|
||||
match_span: state.match_span,
|
||||
sim: state.sim,
|
||||
variant_path,
|
||||
enum_path,
|
||||
variant_name,
|
||||
|
|
@ -610,7 +443,6 @@ trait ParseMatchPat: Sized {
|
|||
state,
|
||||
MatchPatEnumVariant {
|
||||
match_span: state.match_span,
|
||||
sim: state.sim,
|
||||
variant_path,
|
||||
enum_path,
|
||||
variant_name,
|
||||
|
|
@ -630,34 +462,7 @@ trait ParseMatchPat: Sized {
|
|||
}) => Ok(Self::simple(MatchPatSimple::Wild(MatchPatWild {
|
||||
underscore_token,
|
||||
}))),
|
||||
Pat::Tuple(PatTuple {
|
||||
attrs: _,
|
||||
paren_token,
|
||||
elems,
|
||||
}) => {
|
||||
let fields = elems
|
||||
.into_pairs()
|
||||
.filter_map_pair_value(|field_pat| {
|
||||
if let Pat::Rest(PatRest {
|
||||
attrs: _,
|
||||
dot2_token,
|
||||
}) = field_pat
|
||||
{
|
||||
Some(MatchPatSimple::Rest(MatchPatRest { dot2_token }))
|
||||
} else {
|
||||
MatchPatSimple::parse(state, field_pat).ok()
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
Self::tuple(
|
||||
state,
|
||||
MatchPatTuple {
|
||||
paren_token,
|
||||
fields,
|
||||
},
|
||||
)
|
||||
}
|
||||
Pat::Slice(_) | Pat::Const(_) | Pat::Lit(_) | Pat::Range(_) => {
|
||||
Pat::Tuple(_) | Pat::Slice(_) | Pat::Const(_) | Pat::Lit(_) | Pat::Range(_) => {
|
||||
state
|
||||
.errors
|
||||
.error(pat, "not yet implemented in #[hdl] patterns");
|
||||
|
|
@ -692,14 +497,6 @@ impl ParseMatchPat for MatchPatSimple {
|
|||
Err(())
|
||||
}
|
||||
|
||||
fn tuple(state: &mut HdlMatchParseState<'_>, v: MatchPatTuple) -> Result<Self, ()> {
|
||||
state.errors.push(syn::Error::new(
|
||||
v.paren_token.span.open(),
|
||||
"matching tuples is not yet implemented inside structs/enums in #[hdl] patterns",
|
||||
));
|
||||
Err(())
|
||||
}
|
||||
|
||||
fn enum_variant(
|
||||
state: &mut HdlMatchParseState<'_>,
|
||||
v: MatchPatEnumVariant,
|
||||
|
|
@ -718,7 +515,6 @@ enum MatchPat {
|
|||
Or(MatchPatOr<MatchPat>),
|
||||
Paren(MatchPatParen<MatchPat>),
|
||||
Struct(MatchPatStruct),
|
||||
Tuple(MatchPatTuple),
|
||||
EnumVariant(MatchPatEnumVariant),
|
||||
}
|
||||
|
||||
|
|
@ -728,7 +524,6 @@ impl_fold! {
|
|||
Or(MatchPatOr<MatchPat>),
|
||||
Paren(MatchPatParen<MatchPat>),
|
||||
Struct(MatchPatStruct),
|
||||
Tuple(MatchPatTuple),
|
||||
EnumVariant(MatchPatEnumVariant),
|
||||
}
|
||||
}
|
||||
|
|
@ -750,10 +545,6 @@ impl ParseMatchPat for MatchPat {
|
|||
Ok(Self::Struct(v))
|
||||
}
|
||||
|
||||
fn tuple(_state: &mut HdlMatchParseState<'_>, v: MatchPatTuple) -> Result<Self, ()> {
|
||||
Ok(Self::Tuple(v))
|
||||
}
|
||||
|
||||
fn enum_variant(
|
||||
_state: &mut HdlMatchParseState<'_>,
|
||||
v: MatchPatEnumVariant,
|
||||
|
|
@ -769,7 +560,6 @@ impl ToTokens for MatchPat {
|
|||
Self::Or(v) => v.to_tokens(tokens),
|
||||
Self::Paren(v) => v.to_tokens(tokens),
|
||||
Self::Struct(v) => v.to_tokens(tokens),
|
||||
Self::Tuple(v) => v.to_tokens(tokens),
|
||||
Self::EnumVariant(v) => v.to_tokens(tokens),
|
||||
}
|
||||
}
|
||||
|
|
@ -832,6 +622,10 @@ struct RewriteAsCheckMatch {
|
|||
}
|
||||
|
||||
impl Fold for RewriteAsCheckMatch {
|
||||
fn fold_field_pat(&mut self, mut i: FieldPat) -> FieldPat {
|
||||
i.colon_token = Some(Token));
|
||||
i
|
||||
}
|
||||
fn fold_pat(&mut self, pat: Pat) -> Pat {
|
||||
match pat {
|
||||
Pat::Ident(mut pat_ident) => match parse_enum_ident(pat_ident.ident) {
|
||||
|
|
@ -946,177 +740,17 @@ impl Fold for RewriteAsCheckMatch {
|
|||
// don't recurse into expressions
|
||||
i
|
||||
}
|
||||
fn fold_local(&mut self, mut let_stmt: Local) -> Local {
|
||||
if let Some(syn::LocalInit {
|
||||
eq_token,
|
||||
expr: _,
|
||||
diverge,
|
||||
}) = let_stmt.init.take()
|
||||
{
|
||||
let_stmt.init = Some(syn::LocalInit {
|
||||
eq_token,
|
||||
expr: parse_quote_spanned! {self.span=>
|
||||
__match_value
|
||||
},
|
||||
diverge: diverge.map(|(else_, _expr)| {
|
||||
(
|
||||
else_,
|
||||
parse_quote_spanned! {self.span=>
|
||||
match __infallible {}
|
||||
},
|
||||
)
|
||||
}),
|
||||
});
|
||||
}
|
||||
fold_local(self, let_stmt)
|
||||
}
|
||||
}
|
||||
|
||||
struct HdlMatchParseState<'a> {
|
||||
sim: Option<(kw::sim,)>,
|
||||
match_span: Span,
|
||||
errors: &'a mut Errors,
|
||||
}
|
||||
|
||||
struct HdlLetPatVisitState<'a> {
|
||||
errors: &'a mut Errors,
|
||||
bindings: BTreeSet<&'a Ident>,
|
||||
}
|
||||
|
||||
impl<'a> VisitMatchPat<'a> for HdlLetPatVisitState<'a> {
|
||||
fn visit_match_pat_binding(&mut self, v: &'a MatchPatBinding) {
|
||||
self.bindings.insert(&v.ident);
|
||||
}
|
||||
|
||||
fn visit_match_pat_or(&mut self, v: &'a MatchPatOr<MatchPat>) {
|
||||
if let Some(first_inner_vert) = v.first_inner_vert() {
|
||||
self.errors.error(
|
||||
first_inner_vert,
|
||||
"or-patterns are not supported in let statements",
|
||||
);
|
||||
}
|
||||
visit_match_pat_or(self, v);
|
||||
}
|
||||
|
||||
fn visit_match_pat_or_simple(&mut self, v: &'a MatchPatOr<MatchPatSimple>) {
|
||||
if let Some(first_inner_vert) = v.first_inner_vert() {
|
||||
self.errors.error(
|
||||
first_inner_vert,
|
||||
"or-patterns are not supported in let statements",
|
||||
);
|
||||
}
|
||||
visit_match_pat_or_simple(self, v);
|
||||
}
|
||||
|
||||
fn visit_match_pat_enum_variant(&mut self, v: &'a MatchPatEnumVariant) {
|
||||
self.errors.error(v, "refutable pattern in let statement");
|
||||
}
|
||||
}
|
||||
|
||||
impl Visitor<'_> {
|
||||
pub(crate) fn process_hdl_let_pat(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
|
||||
mut let_stmt: Local,
|
||||
) -> Local {
|
||||
let span = let_stmt.let_token.span();
|
||||
let ExprOptions { sim } = hdl_attr.body;
|
||||
if let Pat::Type(pat) = &mut let_stmt.pat {
|
||||
*pat.ty = wrap_ty_with_expr((*pat.ty).clone());
|
||||
}
|
||||
let check_let_stmt = RewriteAsCheckMatch { span }.fold_local(let_stmt.clone());
|
||||
let Local {
|
||||
attrs: _,
|
||||
let_token,
|
||||
pat,
|
||||
init,
|
||||
semi_token,
|
||||
} = let_stmt;
|
||||
let Some(syn::LocalInit {
|
||||
eq_token,
|
||||
expr,
|
||||
diverge,
|
||||
}) = init
|
||||
else {
|
||||
self.errors
|
||||
.error(let_token, "#[hdl] let must be assigned a value");
|
||||
return empty_let();
|
||||
};
|
||||
if let Some((else_, _)) = diverge {
|
||||
// TODO: implement let-else
|
||||
self.errors
|
||||
.error(else_, "#[hdl] let ... else { ... } is not implemented");
|
||||
return empty_let();
|
||||
}
|
||||
let Ok(pat) = MatchPat::parse(
|
||||
&mut HdlMatchParseState {
|
||||
sim,
|
||||
match_span: span,
|
||||
errors: &mut self.errors,
|
||||
},
|
||||
pat,
|
||||
) else {
|
||||
return empty_let();
|
||||
};
|
||||
let mut state = HdlLetPatVisitState {
|
||||
errors: &mut self.errors,
|
||||
bindings: BTreeSet::new(),
|
||||
};
|
||||
state.visit_match_pat(&pat);
|
||||
let HdlLetPatVisitState {
|
||||
errors: _,
|
||||
bindings,
|
||||
} = state;
|
||||
let retval = if sim.is_some() {
|
||||
parse_quote_spanned! {span=>
|
||||
let (#(#bindings,)*) = {
|
||||
type __MatchTy<T> = <T as ::fayalite::ty::Type>::SimValue;
|
||||
let __match_value = ::fayalite::sim::value::ToSimValue::to_sim_value(&(#expr));
|
||||
#let_token #pat #eq_token ::fayalite::sim::value::SimValue::into_value(__match_value) #semi_token
|
||||
(#(#bindings,)*)
|
||||
};
|
||||
}
|
||||
} else {
|
||||
parse_quote_spanned! {span=>
|
||||
let (#(#bindings,)* __scope,) = {
|
||||
type __MatchTy<T> = <T as ::fayalite::ty::Type>::MatchVariant;
|
||||
let __match_expr = ::fayalite::expr::ToExpr::to_expr(&(#expr));
|
||||
::fayalite::expr::check_match_expr(
|
||||
__match_expr,
|
||||
|__match_value, __infallible| {
|
||||
#[allow(unused_variables)]
|
||||
#check_let_stmt
|
||||
match __infallible {}
|
||||
},
|
||||
);
|
||||
let mut __match_iter = ::fayalite::module::match_(__match_expr);
|
||||
let ::fayalite::__std::option::Option::Some(__match_variant) =
|
||||
::fayalite::__std::iter::Iterator::next(&mut __match_iter)
|
||||
else {
|
||||
::fayalite::__std::unreachable!("#[hdl] let with uninhabited type");
|
||||
};
|
||||
let ::fayalite::__std::option::Option::None =
|
||||
::fayalite::__std::iter::Iterator::next(&mut __match_iter)
|
||||
else {
|
||||
::fayalite::__std::unreachable!("#[hdl] let with refutable pattern");
|
||||
};
|
||||
let (__match_variant, __scope) =
|
||||
::fayalite::ty::MatchVariantAndInactiveScope::match_activate_scope(
|
||||
__match_variant,
|
||||
);
|
||||
#let_token #pat #eq_token __match_variant #semi_token
|
||||
(#(#bindings,)* __scope,)
|
||||
};
|
||||
}
|
||||
};
|
||||
match retval {
|
||||
syn::Stmt::Local(retval) => retval,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
pub(crate) fn process_hdl_match(
|
||||
&mut self,
|
||||
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
|
||||
_hdl_attr: HdlAttr<Nothing, kw::hdl>,
|
||||
expr_match: ExprMatch,
|
||||
) -> Expr {
|
||||
let span = expr_match.match_token.span();
|
||||
|
|
@ -1128,9 +762,8 @@ impl Visitor<'_> {
|
|||
brace_token: _,
|
||||
arms,
|
||||
} = expr_match;
|
||||
let ExprOptions { sim } = hdl_attr.body;
|
||||
self.require_normal_module_or_fn(match_token);
|
||||
let mut state = HdlMatchParseState {
|
||||
sim,
|
||||
match_span: span,
|
||||
errors: &mut self.errors,
|
||||
};
|
||||
|
|
@ -1138,36 +771,24 @@ impl Visitor<'_> {
|
|||
arms.into_iter()
|
||||
.filter_map(|arm| MatchArm::parse(&mut state, arm).ok()),
|
||||
);
|
||||
let expr = if sim.is_some() {
|
||||
quote_spanned! {span=>
|
||||
{
|
||||
type __MatchTy<T> = <T as ::fayalite::ty::Type>::SimValue;
|
||||
let __match_expr = ::fayalite::sim::value::ToSimValue::to_sim_value(&(#expr));
|
||||
#match_token ::fayalite::sim::value::SimValue::into_value(__match_expr) {
|
||||
let expr = quote_spanned! {span=>
|
||||
{
|
||||
type __MatchTy<T> = <T as ::fayalite::ty::Type>::MatchVariant;
|
||||
let __match_expr = ::fayalite::expr::ToExpr::to_expr(&(#expr));
|
||||
::fayalite::expr::check_match_expr(__match_expr, |__match_value, __infallible| {
|
||||
#[allow(unused_variables)]
|
||||
#check_match
|
||||
});
|
||||
for __match_variant in ::fayalite::module::match_(__match_expr) {
|
||||
let (__match_variant, __scope) =
|
||||
::fayalite::ty::MatchVariantAndInactiveScope::match_activate_scope(
|
||||
__match_variant,
|
||||
);
|
||||
#match_token __match_variant {
|
||||
#(#arms)*
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
quote_spanned! {span=>
|
||||
{
|
||||
type __MatchTy<T> = <T as ::fayalite::ty::Type>::MatchVariant;
|
||||
let __match_expr = ::fayalite::expr::ToExpr::to_expr(&(#expr));
|
||||
::fayalite::expr::check_match_expr(__match_expr, |__match_value, __infallible| {
|
||||
#[allow(unused_variables)]
|
||||
#check_match
|
||||
});
|
||||
for __match_variant in ::fayalite::module::match_(__match_expr) {
|
||||
let (__match_variant, __scope) =
|
||||
::fayalite::ty::MatchVariantAndInactiveScope::match_activate_scope(
|
||||
__match_variant,
|
||||
);
|
||||
#match_token __match_variant {
|
||||
#(#arms)*
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
syn::parse2(expr).unwrap()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,8 +5,8 @@ use crate::{Cfg, CfgAttr, Cfgs, Errors};
|
|||
use proc_macro2::Ident;
|
||||
use std::{collections::VecDeque, marker::PhantomData};
|
||||
use syn::{
|
||||
Token,
|
||||
punctuated::{Pair, Punctuated},
|
||||
Token,
|
||||
};
|
||||
|
||||
struct State<P: Phase> {
|
||||
|
|
@ -131,9 +131,9 @@ trait PhaseDispatch {
|
|||
type Args<P: Phase>;
|
||||
type Output<P: Phase>;
|
||||
fn dispatch_collect(self, args: Self::Args<CollectCfgsPhase>)
|
||||
-> Self::Output<CollectCfgsPhase>;
|
||||
-> Self::Output<CollectCfgsPhase>;
|
||||
fn dispatch_process(self, args: Self::Args<ProcessCfgsPhase>)
|
||||
-> Self::Output<ProcessCfgsPhase>;
|
||||
-> Self::Output<ProcessCfgsPhase>;
|
||||
}
|
||||
|
||||
trait Phase: Sized + 'static {
|
||||
|
|
@ -2510,7 +2510,7 @@ pub(crate) fn process_cfgs(item: syn::Item, cfgs: Cfgs<bool>) -> syn::Result<Opt
|
|||
errors: Errors::new(),
|
||||
_phantom: PhantomData,
|
||||
};
|
||||
let retval = TopItem(item).process(&mut state).map(|v| v.0.0);
|
||||
let retval = TopItem(item).process(&mut state).map(|v| v.0 .0);
|
||||
state.errors.finish()?;
|
||||
Ok(retval)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{ToTokens, format_ident, quote};
|
||||
use quote::{format_ident, quote, ToTokens};
|
||||
use std::{collections::BTreeMap, fs};
|
||||
use syn::{fold::Fold, parse_quote};
|
||||
|
||||
|
|
|
|||
|
|
@ -14,11 +14,9 @@ rust-version.workspace = true
|
|||
version.workspace = true
|
||||
|
||||
[dependencies]
|
||||
base64.workspace = true
|
||||
bitvec.workspace = true
|
||||
blake3.workspace = true
|
||||
clap.workspace = true
|
||||
clap_complete.workspace = true
|
||||
ctor.workspace = true
|
||||
eyre.workspace = true
|
||||
fayalite-proc-macros.workspace = true
|
||||
|
|
@ -26,7 +24,7 @@ hashbrown.workspace = true
|
|||
jobslot.workspace = true
|
||||
num-bigint.workspace = true
|
||||
num-traits.workspace = true
|
||||
ordered-float.workspace = true
|
||||
os_pipe.workspace = true
|
||||
petgraph.workspace = true
|
||||
serde_json.workspace = true
|
||||
serde.workspace = true
|
||||
|
|
@ -36,14 +34,12 @@ which.workspace = true
|
|||
|
||||
[dev-dependencies]
|
||||
trybuild.workspace = true
|
||||
serde = { workspace = true, features = ["rc"] }
|
||||
|
||||
[build-dependencies]
|
||||
fayalite-visit-gen.workspace = true
|
||||
|
||||
[features]
|
||||
unstable-doc = []
|
||||
unstable-test-hasher = []
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
features = ["unstable-doc"]
|
||||
|
|
|
|||
|
|
@ -1,64 +1,47 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use fayalite::prelude::*;
|
||||
use clap::Parser;
|
||||
use fayalite::{cli, prelude::*};
|
||||
|
||||
#[hdl_module]
|
||||
fn blinky(platform_io_builder: PlatformIOBuilder<'_>) {
|
||||
let clk_input =
|
||||
platform_io_builder.peripherals_with_type::<peripherals::ClockInput>()[0].use_peripheral();
|
||||
let rst = platform_io_builder.peripherals_with_type::<Reset>()[0].use_peripheral();
|
||||
fn blinky(clock_frequency: u64) {
|
||||
#[hdl]
|
||||
let clk: Clock = m.input();
|
||||
#[hdl]
|
||||
let rst: SyncReset = m.input();
|
||||
let cd = #[hdl]
|
||||
ClockDomain {
|
||||
clk: clk_input.clk,
|
||||
rst,
|
||||
clk,
|
||||
rst: rst.to_reset(),
|
||||
};
|
||||
let max_value = (Expr::ty(clk_input).frequency() / 2.0).round_ties_even() as u64 - 1;
|
||||
let max_value = clock_frequency / 2 - 1;
|
||||
let int_ty = UInt::range_inclusive(0..=max_value);
|
||||
#[hdl]
|
||||
let counter_reg: UInt = reg_builder().clock_domain(cd).reset(0u8.cast_to(int_ty));
|
||||
#[hdl]
|
||||
let output_reg: Bool = reg_builder().clock_domain(cd).reset(false);
|
||||
#[hdl]
|
||||
let rgb_output_reg = reg_builder().clock_domain(cd).reset(
|
||||
#[hdl]
|
||||
peripherals::RgbLed {
|
||||
r: false,
|
||||
g: false,
|
||||
b: false,
|
||||
},
|
||||
);
|
||||
#[hdl]
|
||||
if counter_reg.cmp_eq(max_value) {
|
||||
connect_any(counter_reg, 0u8);
|
||||
connect(output_reg, !output_reg);
|
||||
connect(rgb_output_reg.r, !rgb_output_reg.r);
|
||||
#[hdl]
|
||||
if rgb_output_reg.r {
|
||||
connect(rgb_output_reg.g, !rgb_output_reg.g);
|
||||
#[hdl]
|
||||
if rgb_output_reg.g {
|
||||
connect(rgb_output_reg.b, !rgb_output_reg.b);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
connect_any(counter_reg, counter_reg + 1_hdl_u1);
|
||||
}
|
||||
for led in platform_io_builder.peripherals_with_type::<peripherals::Led>() {
|
||||
if let Ok(led) = led.try_use_peripheral() {
|
||||
connect(led.on, output_reg);
|
||||
}
|
||||
}
|
||||
for rgb_led in platform_io_builder.peripherals_with_type::<peripherals::RgbLed>() {
|
||||
if let Ok(rgb_led) = rgb_led.try_use_peripheral() {
|
||||
connect(rgb_led, rgb_output_reg);
|
||||
}
|
||||
}
|
||||
#[hdl]
|
||||
let io = m.add_platform_io(platform_io_builder);
|
||||
let led: Bool = m.output();
|
||||
connect(led, output_reg);
|
||||
}
|
||||
|
||||
fn main() {
|
||||
<BuildCli>::main("blinky", |_, platform, _| {
|
||||
Ok(JobParams::new(platform.wrap_main_module(blinky)))
|
||||
});
|
||||
#[derive(Parser)]
|
||||
struct Cli {
|
||||
/// clock frequency in hertz
|
||||
#[arg(long, default_value = "1000000", value_parser = clap::value_parser!(u64).range(2..))]
|
||||
clock_frequency: u64,
|
||||
#[command(subcommand)]
|
||||
cli: cli::Cli,
|
||||
}
|
||||
|
||||
fn main() -> cli::Result {
|
||||
let cli = Cli::parse();
|
||||
cli.cli.run(blinky(cli.clock_frequency))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,188 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use clap::builder::TypedValueParser;
|
||||
use fayalite::{
|
||||
build::{ToArgs, WriteArgs},
|
||||
platform::PeripheralRef,
|
||||
prelude::*,
|
||||
};
|
||||
use ordered_float::NotNan;
|
||||
|
||||
fn pick_clock<'a>(
|
||||
platform_io_builder: &PlatformIOBuilder<'a>,
|
||||
) -> PeripheralRef<'a, peripherals::ClockInput> {
|
||||
let mut clks = platform_io_builder.peripherals_with_type::<peripherals::ClockInput>();
|
||||
clks.sort_by_key(|clk| {
|
||||
// sort clocks by preference, smaller return values means higher preference
|
||||
let mut frequency = clk.ty().frequency();
|
||||
let priority;
|
||||
if frequency < 10e6 {
|
||||
frequency = -frequency; // prefer bigger frequencies
|
||||
priority = 1;
|
||||
} else if frequency > 50e6 {
|
||||
// prefer smaller frequencies
|
||||
priority = 2; // least preferred
|
||||
} else {
|
||||
priority = 0; // most preferred
|
||||
frequency = (frequency - 25e6).abs(); // prefer closer to 25MHz
|
||||
}
|
||||
(priority, NotNan::new(frequency).expect("should be valid"))
|
||||
});
|
||||
clks[0]
|
||||
}
|
||||
|
||||
#[hdl_module]
|
||||
fn tx_only_uart(
|
||||
platform_io_builder: PlatformIOBuilder<'_>,
|
||||
divisor: f64,
|
||||
message: impl AsRef<[u8]>,
|
||||
) {
|
||||
let message = message.as_ref();
|
||||
let clk_input = pick_clock(&platform_io_builder).use_peripheral();
|
||||
let rst = platform_io_builder.peripherals_with_type::<Reset>()[0].use_peripheral();
|
||||
let cd = #[hdl]
|
||||
ClockDomain {
|
||||
clk: clk_input.clk,
|
||||
rst,
|
||||
};
|
||||
let numerator = 1u128 << 16;
|
||||
let denominator = (divisor * numerator as f64).round() as u128;
|
||||
|
||||
#[hdl]
|
||||
let remainder_reg: UInt<128> = reg_builder().clock_domain(cd).reset(0u128);
|
||||
|
||||
#[hdl]
|
||||
let sum: UInt<128> = wire();
|
||||
connect_any(sum, remainder_reg + numerator);
|
||||
|
||||
#[hdl]
|
||||
let tick_reg = reg_builder().clock_domain(cd).reset(false);
|
||||
connect(tick_reg, false);
|
||||
|
||||
#[hdl]
|
||||
let next_remainder: UInt<128> = wire();
|
||||
connect(remainder_reg, next_remainder);
|
||||
|
||||
#[hdl]
|
||||
if sum.cmp_ge(denominator) {
|
||||
connect_any(next_remainder, sum - denominator);
|
||||
connect(tick_reg, true);
|
||||
} else {
|
||||
connect(next_remainder, sum);
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
let uart_state_reg = reg_builder().clock_domain(cd).reset(0_hdl_u4);
|
||||
#[hdl]
|
||||
let next_uart_state: UInt<4> = wire();
|
||||
|
||||
connect_any(next_uart_state, uart_state_reg + 1u8);
|
||||
|
||||
#[hdl]
|
||||
let message_mem: Array<UInt<8>> = wire(Array[UInt::new_static()][message.len()]);
|
||||
for (message, message_mem) in message.iter().zip(message_mem) {
|
||||
connect(message_mem, *message);
|
||||
}
|
||||
#[hdl]
|
||||
let addr_reg: UInt<32> = reg_builder().clock_domain(cd).reset(0u32);
|
||||
#[hdl]
|
||||
let next_addr: UInt<32> = wire();
|
||||
connect(next_addr, addr_reg);
|
||||
|
||||
#[hdl]
|
||||
let tx = reg_builder().clock_domain(cd).reset(true);
|
||||
|
||||
#[hdl]
|
||||
let tx_bits: Array<Bool, 10> = wire();
|
||||
|
||||
connect(tx_bits[0], false); // start bit
|
||||
connect(tx_bits[9], true); // stop bit
|
||||
|
||||
for i in 0..8 {
|
||||
connect(tx_bits[i + 1], message_mem[addr_reg][i]); // data bits
|
||||
}
|
||||
|
||||
connect(tx, tx_bits[uart_state_reg]);
|
||||
|
||||
#[hdl]
|
||||
if uart_state_reg.cmp_eq(Expr::ty(tx_bits).len() - 1) {
|
||||
connect(next_uart_state, 0_hdl_u4);
|
||||
let next_addr_val = addr_reg + 1u8;
|
||||
#[hdl]
|
||||
if next_addr_val.cmp_lt(message.len()) {
|
||||
connect_any(next_addr, next_addr_val);
|
||||
} else {
|
||||
connect(next_addr, 0u32);
|
||||
}
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
if tick_reg {
|
||||
connect(uart_state_reg, next_uart_state);
|
||||
connect(addr_reg, next_addr);
|
||||
}
|
||||
|
||||
for uart in platform_io_builder.peripherals_with_type::<peripherals::Uart>() {
|
||||
connect(uart.use_peripheral().tx, tx);
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
let io = m.add_platform_io(platform_io_builder);
|
||||
}
|
||||
|
||||
fn parse_baud_rate(
|
||||
v: impl AsRef<str>,
|
||||
) -> Result<NotNan<f64>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let retval: NotNan<f64> = v
|
||||
.as_ref()
|
||||
.parse()
|
||||
.map_err(|_| "invalid baud rate, must be a finite positive floating-point value")?;
|
||||
if *retval > 0.0 && retval.is_finite() {
|
||||
Ok(retval)
|
||||
} else {
|
||||
Err("baud rate must be finite and positive".into())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)]
|
||||
pub struct ExtraArgs {
|
||||
#[arg(long, value_parser = clap::builder::StringValueParser::new().try_map(parse_baud_rate), default_value = "115200")]
|
||||
pub baud_rate: NotNan<f64>,
|
||||
#[arg(long, default_value = "Hello World from Fayalite!!!\r\n", value_parser = clap::builder::NonEmptyStringValueParser::new())]
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
impl ToArgs for ExtraArgs {
|
||||
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
|
||||
let Self { baud_rate, message } = self;
|
||||
args.write_display_arg(format_args!("--baud-rate={baud_rate}"));
|
||||
args.write_long_option_eq("message", message);
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
type Cli = BuildCli<ExtraArgs>;
|
||||
Cli::main(
|
||||
"tx_only_uart",
|
||||
|_, platform, ExtraArgs { baud_rate, message }| {
|
||||
Ok(JobParams::new(platform.try_wrap_main_module(|io| {
|
||||
let clk = pick_clock(&io).ty();
|
||||
let divisor = clk.frequency() / *baud_rate;
|
||||
let baud_rate_error = |msg| {
|
||||
<Cli as clap::CommandFactory>::command()
|
||||
.error(clap::error::ErrorKind::ValueValidation, msg)
|
||||
};
|
||||
const HUGE_DIVISOR: f64 = u64::MAX as f64;
|
||||
match divisor {
|
||||
divisor if !divisor.is_finite() => {
|
||||
return Err(baud_rate_error("bad baud rate"));
|
||||
}
|
||||
HUGE_DIVISOR.. => return Err(baud_rate_error("baud rate is too small")),
|
||||
4.0.. => {}
|
||||
_ => return Err(baud_rate_error("baud rate is too large")),
|
||||
}
|
||||
Ok(tx_only_uart(io, divisor, message))
|
||||
})?))
|
||||
},
|
||||
);
|
||||
}
|
||||
|
|
@ -2,7 +2,6 @@
|
|||
// See Notices.txt for copyright information
|
||||
//! ## `#[hdl] let` statements
|
||||
|
||||
pub mod destructuring;
|
||||
pub mod inputs_outputs;
|
||||
pub mod instances;
|
||||
pub mod memories;
|
||||
|
|
|
|||
|
|
@ -1,33 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
//! ### Destructuring Let
|
||||
//!
|
||||
//! You can use `#[hdl] let` to destructure types, similarly to Rust `let` statements with non-trivial patterns.
|
||||
//!
|
||||
//! `#[hdl] let` statements can only match one level of struct/tuple pattern for now,
|
||||
//! e.g. you can match with the pattern `MyStruct { a, b }`, but not `MyStruct { a, b: Struct2 { v } }`.
|
||||
//!
|
||||
//! ```
|
||||
//! # use fayalite::prelude::*;
|
||||
//! #[hdl]
|
||||
//! struct MyStruct {
|
||||
//! a: UInt<8>,
|
||||
//! b: Bool,
|
||||
//! }
|
||||
//!
|
||||
//! #[hdl_module]
|
||||
//! fn my_module() {
|
||||
//! #[hdl]
|
||||
//! let my_input: MyStruct = m.input();
|
||||
//! #[hdl]
|
||||
//! let my_output: UInt<8> = m.input();
|
||||
//! #[hdl]
|
||||
//! let MyStruct { a, b } = my_input;
|
||||
//! #[hdl]
|
||||
//! if b {
|
||||
//! connect(my_output, a);
|
||||
//! } else {
|
||||
//! connect(my_output, 0_hdl_u8);
|
||||
//! }
|
||||
//! }
|
||||
//! ```
|
||||
|
|
@ -7,5 +7,5 @@
|
|||
//!
|
||||
//! `#[hdl] match` statements' bodies must evaluate to type `()` for now.
|
||||
//!
|
||||
//! `#[hdl] match` statements can only match one level of struct/tuple/enum pattern for now,
|
||||
//! `#[hdl] match` statements can only match one level of struct/enum pattern for now,
|
||||
//! e.g. you can match with the pattern `HdlSome(v)`, but not `HdlSome(HdlSome(_))`.
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ use std::{
|
|||
ops::Deref,
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone)]
|
||||
struct CustomFirrtlAnnotationFieldsImpl {
|
||||
value: serde_json::Map<String, serde_json::Value>,
|
||||
serialized: Interned<str>,
|
||||
|
|
@ -145,73 +145,52 @@ pub struct DocStringAnnotation {
|
|||
|
||||
macro_rules! make_annotation_enum {
|
||||
(
|
||||
#[$non_exhaustive:ident]
|
||||
$(#[$meta:meta])*
|
||||
$vis:vis enum $AnnotationEnum:ident {
|
||||
$($Variant:ident($T:ty),)*
|
||||
$vis:vis enum $Annotation:ident {
|
||||
$($Variant:ident($T:ident),)*
|
||||
}
|
||||
) => {
|
||||
crate::annotations::make_annotation_enum!(@require_non_exhaustive $non_exhaustive);
|
||||
|
||||
#[$non_exhaustive]
|
||||
$(#[$meta])*
|
||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||
$vis enum $AnnotationEnum {
|
||||
$vis enum $Annotation {
|
||||
$($Variant($T),)*
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for $AnnotationEnum {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
$(Self::$Variant(v) => v.fmt(f),)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
$(impl From<$T> for crate::annotations::Annotation {
|
||||
fn from(v: $T) -> Self {
|
||||
$AnnotationEnum::$Variant(v).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl crate::annotations::IntoAnnotations for $T {
|
||||
type IntoAnnotations = [crate::annotations::Annotation; 1];
|
||||
$(impl IntoAnnotations for $T {
|
||||
type IntoAnnotations = [$Annotation; 1];
|
||||
|
||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||
[self.into()]
|
||||
[$Annotation::$Variant(self)]
|
||||
}
|
||||
}
|
||||
|
||||
impl crate::annotations::IntoAnnotations for &'_ $T {
|
||||
type IntoAnnotations = [crate::annotations::Annotation; 1];
|
||||
impl IntoAnnotations for &'_ $T {
|
||||
type IntoAnnotations = [$Annotation; 1];
|
||||
|
||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||
[crate::annotations::Annotation::from(self.clone())]
|
||||
[$Annotation::$Variant(*self)]
|
||||
}
|
||||
}
|
||||
|
||||
impl crate::annotations::IntoAnnotations for &'_ mut $T {
|
||||
type IntoAnnotations = [crate::annotations::Annotation; 1];
|
||||
impl IntoAnnotations for &'_ mut $T {
|
||||
type IntoAnnotations = [$Annotation; 1];
|
||||
|
||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||
[crate::annotations::Annotation::from(self.clone())]
|
||||
[$Annotation::$Variant(*self)]
|
||||
}
|
||||
}
|
||||
|
||||
impl crate::annotations::IntoAnnotations for Box<$T> {
|
||||
type IntoAnnotations = [crate::annotations::Annotation; 1];
|
||||
impl IntoAnnotations for Box<$T> {
|
||||
type IntoAnnotations = [$Annotation; 1];
|
||||
|
||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||
[crate::annotations::Annotation::from(*self)]
|
||||
[$Annotation::$Variant(*self)]
|
||||
}
|
||||
})*
|
||||
};
|
||||
(@require_non_exhaustive non_exhaustive) => {};
|
||||
}
|
||||
|
||||
pub(crate) use make_annotation_enum;
|
||||
|
||||
make_annotation_enum! {
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
#[non_exhaustive]
|
||||
pub enum Annotation {
|
||||
DontTouch(DontTouchAnnotation),
|
||||
|
|
@ -220,7 +199,6 @@ make_annotation_enum! {
|
|||
BlackBoxPath(BlackBoxPathAnnotation),
|
||||
DocString(DocStringAnnotation),
|
||||
CustomFirrtl(CustomFirrtlAnnotation),
|
||||
Xilinx(crate::vendor::xilinx::XilinxAnnotation),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -336,8 +314,10 @@ impl<T: Iterator<Item: IntoAnnotations>> Iterator for IterIntoAnnotations<T> {
|
|||
}
|
||||
|
||||
impl<
|
||||
T: FusedIterator<Item: IntoAnnotations<IntoAnnotations: IntoIterator<IntoIter: FusedIterator>>>,
|
||||
> FusedIterator for IterIntoAnnotations<T>
|
||||
T: FusedIterator<
|
||||
Item: IntoAnnotations<IntoAnnotations: IntoIterator<IntoIter: FusedIterator>>,
|
||||
>,
|
||||
> FusedIterator for IterIntoAnnotations<T>
|
||||
{
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -2,24 +2,17 @@
|
|||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
expr::{
|
||||
CastToBits, Expr, HdlPartialEq, ReduceBits, ToExpr,
|
||||
ops::{ArrayLiteral, ExprFromIterator, ExprIntoIterator, ExprPartialEq},
|
||||
},
|
||||
int::{Bool, DYN_SIZE, DynSize, KnownSize, Size, SizeType},
|
||||
expr::{ops::ArrayIndex, Expr, ToExpr},
|
||||
int::{DynSize, KnownSize, Size, SizeType, DYN_SIZE},
|
||||
intern::{Intern, Interned, LazyInterned},
|
||||
module::transform::visit::{Fold, Folder, Visit, Visitor},
|
||||
sim::value::{SimValue, SimValuePartialEq},
|
||||
source_location::SourceLocation,
|
||||
ty::{
|
||||
CanonicalType, MatchVariantWithoutScope, OpaqueSimValueSlice, OpaqueSimValueWriter,
|
||||
OpaqueSimValueWritten, StaticType, Type, TypeProperties, TypeWithDeref,
|
||||
serde_impls::SerdeCanonicalType,
|
||||
CanonicalType, MatchVariantWithoutScope, StaticType, Type, TypeProperties, TypeWithDeref,
|
||||
},
|
||||
util::ConstUsize,
|
||||
};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer, de::Error};
|
||||
use std::{iter::FusedIterator, ops::Index};
|
||||
use std::ops::Index;
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct ArrayType<T: Type = CanonicalType, Len: Size = DynSize> {
|
||||
|
|
@ -48,20 +41,15 @@ impl<T: Type, Len: Size> ArrayType<T, Len> {
|
|||
is_storable,
|
||||
is_castable_from_bits,
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
} = element;
|
||||
let Some(bit_width) = bit_width.checked_mul(len) else {
|
||||
panic!("array too big");
|
||||
};
|
||||
let Some(sim_only_values_len) = sim_only_values_len.checked_mul(len) else {
|
||||
panic!("array too big");
|
||||
};
|
||||
TypeProperties {
|
||||
is_passive,
|
||||
is_storable,
|
||||
is_castable_from_bits,
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
}
|
||||
}
|
||||
pub fn new(element: T, len: Len::SizeType) -> Self {
|
||||
|
|
@ -103,12 +91,6 @@ impl<T: Type, Len: KnownSize + Size<SizeType = Len>> ArrayType<T, Len> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: StaticType, Len: KnownSize> Default for ArrayType<T, Len> {
|
||||
fn default() -> Self {
|
||||
Self::TYPE
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: StaticType, Len: KnownSize> StaticType for ArrayType<T, Len> {
|
||||
const TYPE: Self = Self {
|
||||
element: LazyInterned::new_lazy(&|| T::TYPE.intern_sized()),
|
||||
|
|
@ -157,7 +139,6 @@ impl<T: Type + Visit<State>, Len: Size, State: Visitor + ?Sized> Visit<State>
|
|||
impl<T: Type, Len: Size> Type for ArrayType<T, Len> {
|
||||
type BaseType = Array;
|
||||
type MaskType = ArrayType<T::MaskType, Len>;
|
||||
type SimValue = Len::ArraySimValue<T>;
|
||||
type MatchVariant = Len::ArrayMatch<T>;
|
||||
type MatchActiveScope = ();
|
||||
type MatchVariantAndInactiveScope = MatchVariantWithoutScope<Len::ArrayMatch<T>>;
|
||||
|
|
@ -167,8 +148,10 @@ impl<T: Type, Len: Size> Type for ArrayType<T, Len> {
|
|||
this: Expr<Self>,
|
||||
source_location: SourceLocation,
|
||||
) -> Self::MatchVariantsIter {
|
||||
let base = Expr::as_dyn_array(this);
|
||||
let base_ty = Expr::ty(base);
|
||||
let _ = source_location;
|
||||
let retval = Vec::from_iter(this);
|
||||
let retval = Vec::from_iter((0..base_ty.len()).map(|i| ArrayIndex::new(base, i).to_expr()));
|
||||
std::iter::once(MatchVariantWithoutScope(
|
||||
Len::ArrayMatch::<T>::try_from(retval)
|
||||
.ok()
|
||||
|
|
@ -194,106 +177,16 @@ impl<T: Type, Len: Size> Type for ArrayType<T, Len> {
|
|||
Len::from_usize(array.len()),
|
||||
)
|
||||
}
|
||||
|
||||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
|
||||
fn sim_value_from_opaque(&self, mut opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
let element_ty = self.element();
|
||||
let element_size = element_ty.canonical().size();
|
||||
let mut value = Vec::with_capacity(self.len());
|
||||
for _ in 0..self.len() {
|
||||
let (element_opaque, rest) = opaque.split_at(element_size);
|
||||
value.push(SimValue::from_opaque(element_ty, element_opaque.to_owned()));
|
||||
opaque = rest;
|
||||
}
|
||||
value.try_into().ok().expect("used correct length")
|
||||
}
|
||||
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
mut opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
let element_ty = self.element();
|
||||
let element_size = element_ty.canonical().size();
|
||||
let value = AsMut::<[SimValue<T>]>::as_mut(value);
|
||||
assert_eq!(self.len(), value.len());
|
||||
for element_value in value {
|
||||
assert_eq!(SimValue::ty(element_value), element_ty);
|
||||
let (element_opaque, rest) = opaque.split_at(element_size);
|
||||
SimValue::opaque_mut(element_value).clone_from_slice(element_opaque);
|
||||
opaque = rest;
|
||||
}
|
||||
}
|
||||
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
mut writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
let element_ty = self.element();
|
||||
let element_size = element_ty.canonical().size();
|
||||
let value = AsRef::<[SimValue<T>]>::as_ref(value);
|
||||
assert_eq!(self.len(), value.len());
|
||||
for element_value in value {
|
||||
assert_eq!(SimValue::ty(element_value), element_ty);
|
||||
writer.fill_prefix_with(element_size, |writer| {
|
||||
writer.fill_cloned_from_slice(SimValue::opaque(element_value).as_slice())
|
||||
});
|
||||
}
|
||||
writer.fill_cloned_from_slice(OpaqueSimValueSlice::empty())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type + Serialize, Len: Size> Serialize for ArrayType<T, Len> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
SerdeCanonicalType::<T>::Array {
|
||||
element: self.element(),
|
||||
len: self.len(),
|
||||
}
|
||||
.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de, T: Type + Deserialize<'de>, Len: Size> Deserialize<'de> for ArrayType<T, Len> {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let name = |len| -> String {
|
||||
if let Some(len) = len {
|
||||
format!("an Array<_, {len}>")
|
||||
} else {
|
||||
"an Array<_>".to_string()
|
||||
}
|
||||
};
|
||||
match SerdeCanonicalType::<T>::deserialize(deserializer)? {
|
||||
SerdeCanonicalType::Array { element, len } => {
|
||||
if let Some(len) = Len::try_from_usize(len) {
|
||||
Ok(Self::new(element, len))
|
||||
} else {
|
||||
Err(Error::invalid_value(
|
||||
serde::de::Unexpected::Other(&name(Some(len))),
|
||||
&&*name(Len::KNOWN_VALUE),
|
||||
))
|
||||
}
|
||||
}
|
||||
ty => Err(Error::invalid_value(
|
||||
serde::de::Unexpected::Other(ty.as_serde_unexpected_str()),
|
||||
&&*name(Len::KNOWN_VALUE),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type, Len: Size> TypeWithDeref for ArrayType<T, Len> {
|
||||
fn expr_deref(this: &Expr<Self>) -> &Self::MatchVariant {
|
||||
let retval = Vec::from_iter(*this);
|
||||
let base = Expr::as_dyn_array(*this);
|
||||
let base_ty = Expr::ty(base);
|
||||
let retval = Vec::from_iter((0..base_ty.len()).map(|i| ArrayIndex::new(base, i).to_expr()));
|
||||
Interned::into_inner(Intern::intern_sized(
|
||||
Len::ArrayMatch::<T>::try_from(retval)
|
||||
.ok()
|
||||
|
|
@ -325,143 +218,3 @@ impl<T: Type, L: SizeType> Index<L> for ArrayWithoutLen<T> {
|
|||
Interned::into_inner(Intern::intern_sized(ArrayType::new(self.element, len)))
|
||||
}
|
||||
}
|
||||
|
||||
impl<Lhs: Type, Rhs: Type, Len: Size> ExprPartialEq<ArrayType<Rhs, Len>> for ArrayType<Lhs, Len>
|
||||
where
|
||||
Lhs: ExprPartialEq<Rhs>,
|
||||
{
|
||||
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<ArrayType<Rhs, Len>>) -> Expr<Bool> {
|
||||
let lhs_ty = Expr::ty(lhs);
|
||||
let rhs_ty = Expr::ty(rhs);
|
||||
assert_eq!(lhs_ty.len(), rhs_ty.len());
|
||||
lhs.into_iter()
|
||||
.zip(rhs)
|
||||
.map(|(l, r)| l.cmp_eq(r))
|
||||
.collect::<Expr<Array<Bool>>>()
|
||||
.cast_to_bits()
|
||||
.all_one_bits()
|
||||
}
|
||||
|
||||
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<ArrayType<Rhs, Len>>) -> Expr<Bool> {
|
||||
let lhs_ty = Expr::ty(lhs);
|
||||
let rhs_ty = Expr::ty(rhs);
|
||||
assert_eq!(lhs_ty.len(), rhs_ty.len());
|
||||
lhs.into_iter()
|
||||
.zip(rhs)
|
||||
.map(|(l, r)| l.cmp_ne(r))
|
||||
.collect::<Expr<Array<Bool>>>()
|
||||
.cast_to_bits()
|
||||
.any_one_bits()
|
||||
}
|
||||
}
|
||||
|
||||
impl<Lhs: Type, Rhs: Type, Len: Size> SimValuePartialEq<ArrayType<Rhs, Len>> for ArrayType<Lhs, Len>
|
||||
where
|
||||
Lhs: SimValuePartialEq<Rhs>,
|
||||
{
|
||||
fn sim_value_eq(this: &SimValue<Self>, other: &SimValue<ArrayType<Rhs, Len>>) -> bool {
|
||||
AsRef::<[_]>::as_ref(&**this)
|
||||
.iter()
|
||||
.zip(AsRef::<[_]>::as_ref(&**other))
|
||||
.all(|(l, r)| SimValuePartialEq::sim_value_eq(l, r))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type, Len: Size> ExprIntoIterator for ArrayType<T, Len> {
|
||||
type Item = T;
|
||||
type ExprIntoIter = ExprArrayIter<T, Len>;
|
||||
|
||||
fn expr_into_iter(e: Expr<Self>) -> Self::ExprIntoIter {
|
||||
ExprArrayIter {
|
||||
base: e,
|
||||
indexes: 0..Expr::ty(e).len(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ExprArrayIter<T: Type, Len: Size> {
|
||||
base: Expr<ArrayType<T, Len>>,
|
||||
indexes: std::ops::Range<usize>,
|
||||
}
|
||||
|
||||
impl<T: Type, Len: Size> ExprArrayIter<T, Len> {
|
||||
pub fn base(&self) -> Expr<ArrayType<T, Len>> {
|
||||
self.base
|
||||
}
|
||||
pub fn indexes(&self) -> std::ops::Range<usize> {
|
||||
self.indexes.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type, Len: Size> Iterator for ExprArrayIter<T, Len> {
|
||||
type Item = Expr<T>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.indexes.next().map(|i| self.base[i])
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.indexes.size_hint()
|
||||
}
|
||||
|
||||
fn count(self) -> usize {
|
||||
self.indexes.count()
|
||||
}
|
||||
|
||||
fn last(mut self) -> Option<Self::Item> {
|
||||
self.next_back()
|
||||
}
|
||||
|
||||
fn nth(&mut self, n: usize) -> Option<Self::Item> {
|
||||
self.indexes.nth(n).map(|i| self.base[i])
|
||||
}
|
||||
|
||||
fn fold<B, F>(self, init: B, mut f: F) -> B
|
||||
where
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
self.indexes.fold(init, |b, i| f(b, self.base[i]))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type, Len: Size> DoubleEndedIterator for ExprArrayIter<T, Len> {
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
self.indexes.next_back().map(|i| self.base[i])
|
||||
}
|
||||
|
||||
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
|
||||
self.indexes.nth_back(n).map(|i| self.base[i])
|
||||
}
|
||||
|
||||
fn rfold<B, F>(self, init: B, mut f: F) -> B
|
||||
where
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
self.indexes.rfold(init, |b, i| f(b, self.base[i]))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type, Len: Size> ExactSizeIterator for ExprArrayIter<T, Len> {
|
||||
fn len(&self) -> usize {
|
||||
self.indexes.len()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type, Len: Size> FusedIterator for ExprArrayIter<T, Len> {}
|
||||
|
||||
impl<A: StaticType> ExprFromIterator<Expr<A>> for Array<A> {
|
||||
fn expr_from_iter<T: IntoIterator<Item = Expr<A>>>(iter: T) -> Expr<Self> {
|
||||
ArrayLiteral::new(
|
||||
A::TYPE,
|
||||
iter.into_iter().map(|v| Expr::canonical(v)).collect(),
|
||||
)
|
||||
.to_expr()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, A: StaticType> ExprFromIterator<&'a Expr<A>> for Array<A> {
|
||||
fn expr_from_iter<T: IntoIterator<Item = &'a Expr<A>>>(iter: T) -> Expr<Self> {
|
||||
iter.into_iter().copied().collect()
|
||||
}
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -1,128 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
build::{
|
||||
BaseJob, BaseJobKind, CommandParams, DynJobKind, GlobalParams, JobAndDependencies,
|
||||
JobArgsAndDependencies, JobItem, JobItemName, JobKind, JobKindAndDependencies, JobParams,
|
||||
ToArgs, WriteArgs,
|
||||
},
|
||||
firrtl::{ExportOptions, FileBackend},
|
||||
intern::{Intern, InternSlice, Interned},
|
||||
util::job_server::AcquiredJob,
|
||||
};
|
||||
use clap::Args;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Debug)]
|
||||
pub struct FirrtlJobKind;
|
||||
|
||||
#[derive(Args, Debug, Clone, Hash, PartialEq, Eq)]
|
||||
#[group(id = "Firrtl")]
|
||||
#[non_exhaustive]
|
||||
pub struct FirrtlArgs {
|
||||
#[command(flatten)]
|
||||
pub export_options: ExportOptions,
|
||||
}
|
||||
|
||||
impl ToArgs for FirrtlArgs {
|
||||
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
|
||||
let Self { export_options } = self;
|
||||
export_options.to_args(args);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
pub struct Firrtl {
|
||||
base: BaseJob,
|
||||
export_options: ExportOptions,
|
||||
}
|
||||
|
||||
impl Firrtl {
|
||||
fn make_firrtl_file_backend(&self) -> FileBackend {
|
||||
FileBackend {
|
||||
dir_path: PathBuf::from(&*self.base.output_dir()),
|
||||
top_fir_file_stem: Some(self.base.file_stem().into()),
|
||||
circuit_name: None,
|
||||
}
|
||||
}
|
||||
pub fn firrtl_file(&self) -> Interned<Path> {
|
||||
self.base.file_with_ext("fir")
|
||||
}
|
||||
}
|
||||
|
||||
impl JobKind for FirrtlJobKind {
|
||||
type Args = FirrtlArgs;
|
||||
type Job = Firrtl;
|
||||
type Dependencies = JobKindAndDependencies<BaseJobKind>;
|
||||
|
||||
fn dependencies(self) -> Self::Dependencies {
|
||||
JobKindAndDependencies::new(BaseJobKind)
|
||||
}
|
||||
|
||||
fn args_to_jobs(
|
||||
args: JobArgsAndDependencies<Self>,
|
||||
params: &JobParams,
|
||||
global_params: &GlobalParams,
|
||||
) -> eyre::Result<JobAndDependencies<Self>> {
|
||||
args.args_to_jobs_simple(
|
||||
params,
|
||||
global_params,
|
||||
|_kind, FirrtlArgs { export_options }, dependencies| {
|
||||
Ok(Firrtl {
|
||||
base: dependencies.get_job::<BaseJob, _>().clone(),
|
||||
export_options,
|
||||
})
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||
[JobItemName::Path {
|
||||
path: job.base.output_dir(),
|
||||
}]
|
||||
.intern_slice()
|
||||
}
|
||||
|
||||
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||
[JobItemName::Path {
|
||||
path: job.firrtl_file(),
|
||||
}]
|
||||
.intern_slice()
|
||||
}
|
||||
|
||||
fn name(self) -> Interned<str> {
|
||||
"firrtl".intern()
|
||||
}
|
||||
|
||||
fn external_command_params(self, _job: &Self::Job) -> Option<CommandParams> {
|
||||
None
|
||||
}
|
||||
|
||||
fn run(
|
||||
self,
|
||||
job: &Self::Job,
|
||||
inputs: &[JobItem],
|
||||
params: &JobParams,
|
||||
_global_params: &GlobalParams,
|
||||
_acquired_job: &mut AcquiredJob,
|
||||
) -> eyre::Result<Vec<JobItem>> {
|
||||
let [JobItem::Path { path: input_path }] = *inputs else {
|
||||
panic!("wrong inputs, expected a single `Path`");
|
||||
};
|
||||
assert_eq!(input_path, job.base.output_dir());
|
||||
crate::firrtl::export(
|
||||
job.make_firrtl_file_backend(),
|
||||
params.main_module(),
|
||||
job.export_options,
|
||||
)?;
|
||||
Ok(vec![JobItem::Path {
|
||||
path: job.firrtl_file(),
|
||||
}])
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = DynJobKind> {
|
||||
[DynJobKind::new(FirrtlJobKind)]
|
||||
}
|
||||
|
|
@ -1,388 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
build::{
|
||||
BaseJob, CommandParams, DynJobKind, GetJobPositionDependencies, GlobalParams,
|
||||
JobAndDependencies, JobArgsAndDependencies, JobDependencies, JobItem, JobItemName, JobKind,
|
||||
JobKindAndDependencies, JobParams, ToArgs, WriteArgs,
|
||||
external::{
|
||||
ExternalCommand, ExternalCommandJob, ExternalCommandJobKind, ExternalProgramTrait,
|
||||
},
|
||||
verilog::{UnadjustedVerilog, VerilogDialect, VerilogJob, VerilogJobKind},
|
||||
},
|
||||
intern::{Intern, InternSlice, Interned},
|
||||
module::NameId,
|
||||
testing::FormalMode,
|
||||
util::job_server::AcquiredJob,
|
||||
};
|
||||
use clap::Args;
|
||||
use eyre::Context;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
ffi::{OsStr, OsString},
|
||||
fmt::{self, Write},
|
||||
path::Path,
|
||||
};
|
||||
|
||||
#[derive(Args, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub struct FormalArgs {
|
||||
#[arg(long = "sby-extra-arg", value_name = "ARG")]
|
||||
pub sby_extra_args: Vec<OsString>,
|
||||
#[arg(long, default_value_t)]
|
||||
pub formal_mode: FormalMode,
|
||||
#[arg(long, default_value_t = Self::DEFAULT_DEPTH)]
|
||||
pub formal_depth: u64,
|
||||
#[arg(long, default_value = Self::DEFAULT_SOLVER)]
|
||||
pub formal_solver: String,
|
||||
#[arg(long = "smtbmc-extra-arg", value_name = "ARG")]
|
||||
pub smtbmc_extra_args: Vec<OsString>,
|
||||
}
|
||||
|
||||
impl FormalArgs {
|
||||
pub const DEFAULT_DEPTH: u64 = 20;
|
||||
pub const DEFAULT_SOLVER: &'static str = "z3";
|
||||
}
|
||||
|
||||
impl ToArgs for FormalArgs {
|
||||
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
|
||||
let Self {
|
||||
sby_extra_args,
|
||||
formal_mode,
|
||||
formal_depth,
|
||||
formal_solver,
|
||||
smtbmc_extra_args,
|
||||
} = self;
|
||||
for arg in sby_extra_args {
|
||||
args.write_long_option_eq("sby-extra-arg", arg);
|
||||
}
|
||||
args.write_display_args([
|
||||
format_args!("--formal-mode={formal_mode}"),
|
||||
format_args!("--formal-depth={formal_depth}"),
|
||||
format_args!("--formal-solver={formal_solver}"),
|
||||
]);
|
||||
for arg in smtbmc_extra_args {
|
||||
args.write_long_option_eq("smtbmc-extra-arg", arg);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
|
||||
pub struct WriteSbyFileJobKind;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, Deserialize, Serialize)]
|
||||
pub struct WriteSbyFileJob {
|
||||
sby_extra_args: Interned<[Interned<OsStr>]>,
|
||||
formal_mode: FormalMode,
|
||||
formal_depth: u64,
|
||||
formal_solver: Interned<str>,
|
||||
smtbmc_extra_args: Interned<[Interned<OsStr>]>,
|
||||
sby_file: Interned<Path>,
|
||||
output_dir: Interned<Path>,
|
||||
main_verilog_file: Interned<Path>,
|
||||
}
|
||||
|
||||
impl WriteSbyFileJob {
|
||||
pub fn sby_extra_args(&self) -> Interned<[Interned<OsStr>]> {
|
||||
self.sby_extra_args
|
||||
}
|
||||
pub fn formal_mode(&self) -> FormalMode {
|
||||
self.formal_mode
|
||||
}
|
||||
pub fn formal_depth(&self) -> u64 {
|
||||
self.formal_depth
|
||||
}
|
||||
pub fn formal_solver(&self) -> Interned<str> {
|
||||
self.formal_solver
|
||||
}
|
||||
pub fn smtbmc_extra_args(&self) -> Interned<[Interned<OsStr>]> {
|
||||
self.smtbmc_extra_args
|
||||
}
|
||||
pub fn sby_file(&self) -> Interned<Path> {
|
||||
self.sby_file
|
||||
}
|
||||
pub fn output_dir(&self) -> Interned<Path> {
|
||||
self.output_dir
|
||||
}
|
||||
pub fn main_verilog_file(&self) -> Interned<Path> {
|
||||
self.main_verilog_file
|
||||
}
|
||||
fn write_sby(
|
||||
&self,
|
||||
output: &mut OsString,
|
||||
additional_files: &[Interned<Path>],
|
||||
main_module_name_id: NameId,
|
||||
) -> eyre::Result<()> {
|
||||
let Self {
|
||||
sby_extra_args: _,
|
||||
formal_mode,
|
||||
formal_depth,
|
||||
formal_solver,
|
||||
smtbmc_extra_args,
|
||||
sby_file: _,
|
||||
output_dir: _,
|
||||
main_verilog_file,
|
||||
} = self;
|
||||
write!(
|
||||
output,
|
||||
"[options]\n\
|
||||
mode {formal_mode}\n\
|
||||
depth {formal_depth}\n\
|
||||
wait on\n\
|
||||
\n\
|
||||
[engines]\n\
|
||||
smtbmc {formal_solver} -- --"
|
||||
)
|
||||
.expect("writing to OsString can't fail");
|
||||
for i in smtbmc_extra_args {
|
||||
output.push(" ");
|
||||
output.push(i);
|
||||
}
|
||||
output.push(
|
||||
"\n\
|
||||
\n\
|
||||
[script]\n",
|
||||
);
|
||||
for verilog_file in VerilogJob::all_verilog_files(*main_verilog_file, additional_files)? {
|
||||
output.push("read_verilog -sv -formal \"");
|
||||
output.push(verilog_file);
|
||||
output.push("\"\n");
|
||||
}
|
||||
let circuit_name = crate::firrtl::get_circuit_name(main_module_name_id);
|
||||
// workaround for wires disappearing -- set `keep` on all wires
|
||||
writeln!(
|
||||
output,
|
||||
"hierarchy -top {circuit_name}\n\
|
||||
proc\n\
|
||||
setattr -set keep 1 w:\\*\n\
|
||||
prep",
|
||||
)
|
||||
.expect("writing to OsString can't fail");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl JobKind for WriteSbyFileJobKind {
|
||||
type Args = FormalArgs;
|
||||
type Job = WriteSbyFileJob;
|
||||
type Dependencies = JobKindAndDependencies<VerilogJobKind>;
|
||||
|
||||
fn dependencies(self) -> Self::Dependencies {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
fn args_to_jobs(
|
||||
mut args: JobArgsAndDependencies<Self>,
|
||||
params: &JobParams,
|
||||
global_params: &GlobalParams,
|
||||
) -> eyre::Result<JobAndDependencies<Self>> {
|
||||
args.dependencies
|
||||
.dependencies
|
||||
.args
|
||||
.args
|
||||
.additional_args
|
||||
.verilog_dialect
|
||||
.get_or_insert(VerilogDialect::Yosys);
|
||||
args.args_to_jobs_simple(params, global_params, |_kind, args, dependencies| {
|
||||
let FormalArgs {
|
||||
sby_extra_args,
|
||||
formal_mode,
|
||||
formal_depth,
|
||||
formal_solver,
|
||||
smtbmc_extra_args,
|
||||
} = args;
|
||||
let base_job = dependencies.get_job::<BaseJob, _>();
|
||||
Ok(WriteSbyFileJob {
|
||||
sby_extra_args: sby_extra_args.into_iter().map(Interned::from).collect(),
|
||||
formal_mode,
|
||||
formal_depth,
|
||||
formal_solver: formal_solver.intern_deref(),
|
||||
smtbmc_extra_args: smtbmc_extra_args.into_iter().map(Interned::from).collect(),
|
||||
sby_file: base_job.file_with_ext("sby"),
|
||||
output_dir: base_job.output_dir(),
|
||||
main_verilog_file: dependencies.get_job::<VerilogJob, _>().main_verilog_file(),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn inputs(self, _job: &Self::Job) -> Interned<[JobItemName]> {
|
||||
[JobItemName::DynamicPaths {
|
||||
source_job_name: VerilogJobKind.name(),
|
||||
}]
|
||||
.intern_slice()
|
||||
}
|
||||
|
||||
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||
[JobItemName::Path { path: job.sby_file }].intern_slice()
|
||||
}
|
||||
|
||||
fn name(self) -> Interned<str> {
|
||||
"write-sby-file".intern()
|
||||
}
|
||||
|
||||
fn external_command_params(self, _job: &Self::Job) -> Option<CommandParams> {
|
||||
None
|
||||
}
|
||||
|
||||
fn run(
|
||||
self,
|
||||
job: &Self::Job,
|
||||
inputs: &[JobItem],
|
||||
params: &JobParams,
|
||||
_global_params: &GlobalParams,
|
||||
_acquired_job: &mut AcquiredJob,
|
||||
) -> eyre::Result<Vec<JobItem>> {
|
||||
assert!(inputs.iter().map(JobItem::name).eq(self.inputs(job)));
|
||||
let [additional_files] = inputs else {
|
||||
unreachable!();
|
||||
};
|
||||
let additional_files = VerilogJob::unwrap_additional_files(additional_files);
|
||||
let mut contents = OsString::new();
|
||||
job.write_sby(
|
||||
&mut contents,
|
||||
additional_files,
|
||||
params.main_module().name_id(),
|
||||
)?;
|
||||
let path = job.sby_file;
|
||||
std::fs::write(path, contents.as_encoded_bytes())
|
||||
.wrap_err_with(|| format!("writing {path:?} failed"))?;
|
||||
Ok(vec![JobItem::Path { path }])
|
||||
}
|
||||
|
||||
fn subcommand_hidden(self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct Formal {
|
||||
#[serde(flatten)]
|
||||
write_sby_file: WriteSbyFileJob,
|
||||
sby_file_name: Interned<OsStr>,
|
||||
}
|
||||
|
||||
impl fmt::Debug for Formal {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let Self {
|
||||
write_sby_file:
|
||||
WriteSbyFileJob {
|
||||
sby_extra_args,
|
||||
formal_mode,
|
||||
formal_depth,
|
||||
formal_solver,
|
||||
smtbmc_extra_args,
|
||||
sby_file,
|
||||
output_dir: _,
|
||||
main_verilog_file,
|
||||
},
|
||||
sby_file_name,
|
||||
} = self;
|
||||
f.debug_struct("Formal")
|
||||
.field("sby_extra_args", sby_extra_args)
|
||||
.field("formal_mode", formal_mode)
|
||||
.field("formal_depth", formal_depth)
|
||||
.field("formal_solver", formal_solver)
|
||||
.field("smtbmc_extra_args", smtbmc_extra_args)
|
||||
.field("sby_file", sby_file)
|
||||
.field("sby_file_name", sby_file_name)
|
||||
.field("main_verilog_file", main_verilog_file)
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)]
|
||||
pub struct Symbiyosys;
|
||||
|
||||
impl ExternalProgramTrait for Symbiyosys {
|
||||
fn default_program_name() -> Interned<str> {
|
||||
"sby".intern()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Hash, PartialEq, Eq, Debug, Args)]
|
||||
pub struct FormalAdditionalArgs {}
|
||||
|
||||
impl ToArgs for FormalAdditionalArgs {
|
||||
fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) {
|
||||
let Self {} = self;
|
||||
}
|
||||
}
|
||||
|
||||
impl ExternalCommand for Formal {
|
||||
type AdditionalArgs = FormalAdditionalArgs;
|
||||
type AdditionalJobData = Formal;
|
||||
type BaseJobPosition = GetJobPositionDependencies<
|
||||
GetJobPositionDependencies<
|
||||
GetJobPositionDependencies<<UnadjustedVerilog as ExternalCommand>::BaseJobPosition>,
|
||||
>,
|
||||
>;
|
||||
type Dependencies = JobKindAndDependencies<WriteSbyFileJobKind>;
|
||||
type ExternalProgram = Symbiyosys;
|
||||
|
||||
fn dependencies() -> Self::Dependencies {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
fn args_to_jobs(
|
||||
args: JobArgsAndDependencies<ExternalCommandJobKind<Self>>,
|
||||
params: &JobParams,
|
||||
global_params: &GlobalParams,
|
||||
) -> eyre::Result<(
|
||||
Self::AdditionalJobData,
|
||||
<Self::Dependencies as JobDependencies>::JobsAndKinds,
|
||||
)> {
|
||||
args.args_to_jobs_external_simple(params, global_params, |args, dependencies| {
|
||||
let FormalAdditionalArgs {} = args.additional_args;
|
||||
let write_sby_file = dependencies.get_job::<WriteSbyFileJob, _>().clone();
|
||||
Ok(Formal {
|
||||
sby_file_name: write_sby_file
|
||||
.sby_file()
|
||||
.interned_file_name()
|
||||
.expect("known to have file name"),
|
||||
write_sby_file,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn inputs(job: &ExternalCommandJob<Self>) -> Interned<[JobItemName]> {
|
||||
[
|
||||
JobItemName::Path {
|
||||
path: job.additional_job_data().write_sby_file.sby_file(),
|
||||
},
|
||||
JobItemName::Path {
|
||||
path: job.additional_job_data().write_sby_file.main_verilog_file(),
|
||||
},
|
||||
JobItemName::DynamicPaths {
|
||||
source_job_name: VerilogJobKind.name(),
|
||||
},
|
||||
]
|
||||
.intern_slice()
|
||||
}
|
||||
|
||||
fn output_paths(_job: &ExternalCommandJob<Self>) -> Interned<[Interned<Path>]> {
|
||||
Interned::default()
|
||||
}
|
||||
|
||||
fn command_line_args<W: ?Sized + WriteArgs>(job: &ExternalCommandJob<Self>, args: &mut W) {
|
||||
// args.write_str_arg("-j1"); // sby seems not to respect job count in parallel mode
|
||||
args.write_arg("-f");
|
||||
args.write_interned_arg(job.additional_job_data().sby_file_name);
|
||||
args.write_interned_args(job.additional_job_data().write_sby_file.sby_extra_args());
|
||||
}
|
||||
|
||||
fn current_dir(job: &ExternalCommandJob<Self>) -> Option<Interned<Path>> {
|
||||
Some(job.output_dir())
|
||||
}
|
||||
|
||||
fn job_kind_name() -> Interned<str> {
|
||||
"formal".intern()
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = DynJobKind> {
|
||||
[
|
||||
DynJobKind::new(WriteSbyFileJobKind),
|
||||
DynJobKind::new(ExternalCommandJobKind::<Formal>::new()),
|
||||
]
|
||||
}
|
||||
|
|
@ -1,855 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
build::{
|
||||
DynJob, GlobalParams, JobItem, JobItemName, JobParams, program_name_for_internal_jobs,
|
||||
},
|
||||
intern::Interned,
|
||||
platform::DynPlatform,
|
||||
util::{HashMap, HashSet, job_server::AcquiredJob},
|
||||
};
|
||||
use eyre::{ContextCompat, eyre};
|
||||
use petgraph::{
|
||||
algo::{DfsSpace, kosaraju_scc, toposort},
|
||||
graph::DiGraph,
|
||||
visit::{GraphBase, Visitable},
|
||||
};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer, de::Error, ser::SerializeSeq};
|
||||
use std::{
|
||||
cell::OnceCell,
|
||||
collections::{BTreeMap, BTreeSet, VecDeque},
|
||||
convert::Infallible,
|
||||
ffi::OsStr,
|
||||
fmt::{self, Write},
|
||||
panic,
|
||||
rc::Rc,
|
||||
str::Utf8Error,
|
||||
sync::mpsc,
|
||||
thread::{self, ScopedJoinHandle},
|
||||
};
|
||||
|
||||
macro_rules! write_str {
|
||||
($s:expr, $($rest:tt)*) => {
|
||||
write!($s, $($rest)*).expect("String::write_fmt can't fail")
|
||||
};
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
enum JobGraphNode {
|
||||
Job(DynJob),
|
||||
Item {
|
||||
#[allow(dead_code, reason = "name used for debugging")]
|
||||
name: JobItemName,
|
||||
source_job: Option<DynJob>,
|
||||
},
|
||||
}
|
||||
|
||||
type JobGraphInner = DiGraph<JobGraphNode, ()>;
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct JobGraph {
|
||||
jobs: HashMap<DynJob, <JobGraphInner as GraphBase>::NodeId>,
|
||||
items: HashMap<JobItemName, <JobGraphInner as GraphBase>::NodeId>,
|
||||
graph: JobGraphInner,
|
||||
topological_order: Vec<<JobGraphInner as GraphBase>::NodeId>,
|
||||
space: DfsSpace<<JobGraphInner as GraphBase>::NodeId, <JobGraphInner as Visitable>::Map>,
|
||||
}
|
||||
|
||||
impl fmt::Debug for JobGraph {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let Self {
|
||||
jobs: _,
|
||||
items: _,
|
||||
graph,
|
||||
topological_order,
|
||||
space: _,
|
||||
} = self;
|
||||
f.debug_struct("JobGraph")
|
||||
.field("graph", graph)
|
||||
.field("topological_order", topological_order)
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum JobGraphError {
|
||||
CycleError {
|
||||
job: DynJob,
|
||||
output: JobItemName,
|
||||
},
|
||||
MultipleJobsCreateSameOutput {
|
||||
output_item: JobItemName,
|
||||
existing_job: DynJob,
|
||||
new_job: DynJob,
|
||||
},
|
||||
}
|
||||
|
||||
impl std::error::Error for JobGraphError {}
|
||||
|
||||
impl fmt::Display for JobGraphError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::CycleError { job, output } => write!(
|
||||
f,
|
||||
"job can't be added to job graph because it would introduce a cyclic dependency through this job output:\n\
|
||||
{output:?}\n\
|
||||
job:\n{job:?}",
|
||||
),
|
||||
JobGraphError::MultipleJobsCreateSameOutput {
|
||||
output_item,
|
||||
existing_job,
|
||||
new_job,
|
||||
} => write!(
|
||||
f,
|
||||
"job can't be added to job graph because the new job has an output that is also produced by an existing job.\n\
|
||||
conflicting output:\n\
|
||||
{output_item:?}\n\
|
||||
existing job:\n\
|
||||
{existing_job:?}\n\
|
||||
new job:\n\
|
||||
{new_job:?}",
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum EscapeForUnixShellState {
|
||||
DollarSingleQuote,
|
||||
SingleQuote,
|
||||
Unquoted,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct EscapeForUnixShell<'a> {
|
||||
state: EscapeForUnixShellState,
|
||||
prefix: [u8; 3],
|
||||
bytes: &'a [u8],
|
||||
}
|
||||
|
||||
impl<'a> fmt::Debug for EscapeForUnixShell<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::Display::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> fmt::Display for EscapeForUnixShell<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
for c in self.clone() {
|
||||
f.write_char(c)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> EscapeForUnixShell<'a> {
|
||||
pub fn new(s: &'a (impl ?Sized + AsRef<OsStr>)) -> Self {
|
||||
Self::from_bytes(s.as_ref().as_encoded_bytes())
|
||||
}
|
||||
fn make_prefix(bytes: &[u8]) -> [u8; 3] {
|
||||
let mut prefix = [0; 3];
|
||||
prefix[..bytes.len()].copy_from_slice(bytes);
|
||||
prefix
|
||||
}
|
||||
pub fn from_bytes(bytes: &'a [u8]) -> Self {
|
||||
let mut needs_single_quote = bytes.is_empty();
|
||||
for &b in bytes {
|
||||
match b {
|
||||
b'!' | b'\'' | b'\"' | b' ' => needs_single_quote = true,
|
||||
0..0x20 | 0x7F.. => {
|
||||
return Self {
|
||||
state: EscapeForUnixShellState::DollarSingleQuote,
|
||||
prefix: Self::make_prefix(b"$'"),
|
||||
bytes,
|
||||
};
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
if needs_single_quote {
|
||||
Self {
|
||||
state: EscapeForUnixShellState::SingleQuote,
|
||||
prefix: Self::make_prefix(b"'"),
|
||||
bytes,
|
||||
}
|
||||
} else {
|
||||
Self {
|
||||
state: EscapeForUnixShellState::Unquoted,
|
||||
prefix: Self::make_prefix(b""),
|
||||
bytes,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for EscapeForUnixShell<'_> {
|
||||
type Item = char;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match &mut self.prefix {
|
||||
[0, 0, 0] => {}
|
||||
[0, 0, v] | // find first
|
||||
[0, v, _] | // non-zero byte
|
||||
[v, _, _] => {
|
||||
let retval = *v as char;
|
||||
*v = 0;
|
||||
return Some(retval);
|
||||
}
|
||||
}
|
||||
let Some(&next_byte) = self.bytes.split_off_first() else {
|
||||
return match self.state {
|
||||
EscapeForUnixShellState::DollarSingleQuote
|
||||
| EscapeForUnixShellState::SingleQuote => {
|
||||
self.state = EscapeForUnixShellState::Unquoted;
|
||||
Some('\'')
|
||||
}
|
||||
EscapeForUnixShellState::Unquoted => None,
|
||||
};
|
||||
};
|
||||
match self.state {
|
||||
EscapeForUnixShellState::DollarSingleQuote => match next_byte {
|
||||
b'\'' | b'\\' => {
|
||||
self.prefix = Self::make_prefix(&[next_byte]);
|
||||
Some('\\')
|
||||
}
|
||||
b'\t' => {
|
||||
self.prefix = Self::make_prefix(b"t");
|
||||
Some('\\')
|
||||
}
|
||||
b'\n' => {
|
||||
self.prefix = Self::make_prefix(b"n");
|
||||
Some('\\')
|
||||
}
|
||||
b'\r' => {
|
||||
self.prefix = Self::make_prefix(b"r");
|
||||
Some('\\')
|
||||
}
|
||||
0x20..=0x7E => Some(next_byte as char),
|
||||
_ => {
|
||||
self.prefix = [
|
||||
b'x',
|
||||
char::from_digit(next_byte as u32 >> 4, 0x10).expect("known to be in range")
|
||||
as u8,
|
||||
char::from_digit(next_byte as u32 & 0xF, 0x10)
|
||||
.expect("known to be in range") as u8,
|
||||
];
|
||||
Some('\\')
|
||||
}
|
||||
},
|
||||
EscapeForUnixShellState::SingleQuote => {
|
||||
if next_byte == b'\'' {
|
||||
self.prefix = Self::make_prefix(b"\\''");
|
||||
Some('\'')
|
||||
} else {
|
||||
Some(next_byte as char)
|
||||
}
|
||||
}
|
||||
EscapeForUnixShellState::Unquoted => match next_byte {
|
||||
b' ' | b'!' | b'"' | b'#' | b'$' | b'&' | b'\'' | b'(' | b')' | b'*' | b','
|
||||
| b';' | b'<' | b'>' | b'?' | b'[' | b'\\' | b']' | b'^' | b'`' | b'{' | b'|'
|
||||
| b'}' | b'~' => {
|
||||
self.prefix = Self::make_prefix(&[next_byte]);
|
||||
Some('\\')
|
||||
}
|
||||
_ => Some(next_byte as char),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
#[non_exhaustive]
|
||||
pub enum UnixMakefileEscapeKind {
|
||||
NonRecipe,
|
||||
RecipeWithoutShellEscaping,
|
||||
RecipeWithShellEscaping,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct EscapeForUnixMakefile<'a> {
|
||||
s: &'a OsStr,
|
||||
kind: UnixMakefileEscapeKind,
|
||||
}
|
||||
|
||||
impl<'a> fmt::Debug for EscapeForUnixMakefile<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::Display::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> fmt::Display for EscapeForUnixMakefile<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.do_write(
|
||||
f,
|
||||
fmt::Write::write_str,
|
||||
fmt::Write::write_char,
|
||||
|_, _| Ok(()),
|
||||
|_| unreachable!("already checked that the input causes no UTF-8 errors"),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> EscapeForUnixMakefile<'a> {
|
||||
fn do_write<S: ?Sized, E>(
|
||||
&self,
|
||||
state: &mut S,
|
||||
write_str: impl Fn(&mut S, &str) -> Result<(), E>,
|
||||
write_char: impl Fn(&mut S, char) -> Result<(), E>,
|
||||
add_variable: impl Fn(&mut S, &'static str) -> Result<(), E>,
|
||||
utf8_error: impl Fn(Utf8Error) -> E,
|
||||
) -> Result<(), E> {
|
||||
let escape_recipe_char = |c| match c {
|
||||
'$' => write_str(state, "$$"),
|
||||
'\0'..='\x1F' | '\x7F' => {
|
||||
panic!("can't escape a control character for Unix Makefile: {c:?}");
|
||||
}
|
||||
_ => write_char(state, c),
|
||||
};
|
||||
match self.kind {
|
||||
UnixMakefileEscapeKind::NonRecipe => str::from_utf8(self.s.as_encoded_bytes())
|
||||
.map_err(&utf8_error)?
|
||||
.chars()
|
||||
.try_for_each(|c| match c {
|
||||
'=' => {
|
||||
add_variable(state, "EQUALS = =")?;
|
||||
write_str(state, "$(EQUALS)")
|
||||
}
|
||||
';' => panic!("can't escape a semicolon (;) for Unix Makefile"),
|
||||
'$' => write_str(state, "$$"),
|
||||
'\\' | ' ' | '#' | ':' | '%' | '*' | '?' | '[' | ']' | '~' => {
|
||||
write_char(state, '\\')?;
|
||||
write_char(state, c)
|
||||
}
|
||||
'\0'..='\x1F' | '\x7F' => {
|
||||
panic!("can't escape a control character for Unix Makefile: {c:?}");
|
||||
}
|
||||
_ => write_char(state, c),
|
||||
}),
|
||||
UnixMakefileEscapeKind::RecipeWithoutShellEscaping => {
|
||||
str::from_utf8(self.s.as_encoded_bytes())
|
||||
.map_err(&utf8_error)?
|
||||
.chars()
|
||||
.try_for_each(escape_recipe_char)
|
||||
}
|
||||
UnixMakefileEscapeKind::RecipeWithShellEscaping => {
|
||||
EscapeForUnixShell::new(self.s).try_for_each(escape_recipe_char)
|
||||
}
|
||||
}
|
||||
}
|
||||
pub fn new(
|
||||
s: &'a (impl ?Sized + AsRef<OsStr>),
|
||||
kind: UnixMakefileEscapeKind,
|
||||
needed_variables: &mut BTreeSet<&'static str>,
|
||||
) -> Result<Self, Utf8Error> {
|
||||
let s = s.as_ref();
|
||||
let retval = Self { s, kind };
|
||||
retval.do_write(
|
||||
needed_variables,
|
||||
|_, _| Ok(()),
|
||||
|_, _| Ok(()),
|
||||
|needed_variables, variable| {
|
||||
needed_variables.insert(variable);
|
||||
Ok(())
|
||||
},
|
||||
|e| e,
|
||||
)?;
|
||||
Ok(retval)
|
||||
}
|
||||
}
|
||||
|
||||
impl JobGraph {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
fn try_add_item_node(
|
||||
&mut self,
|
||||
name: JobItemName,
|
||||
new_source_job: Option<DynJob>,
|
||||
new_nodes: &mut HashSet<<JobGraphInner as GraphBase>::NodeId>,
|
||||
) -> Result<<JobGraphInner as GraphBase>::NodeId, JobGraphError> {
|
||||
use hashbrown::hash_map::Entry;
|
||||
match self.items.entry(name) {
|
||||
Entry::Occupied(item_entry) => {
|
||||
let node_id = *item_entry.get();
|
||||
let JobGraphNode::Item {
|
||||
name: _,
|
||||
source_job,
|
||||
} = &mut self.graph[node_id]
|
||||
else {
|
||||
unreachable!("known to be an item");
|
||||
};
|
||||
if let Some(new_source_job) = new_source_job {
|
||||
if let Some(source_job) = source_job {
|
||||
return Err(JobGraphError::MultipleJobsCreateSameOutput {
|
||||
output_item: item_entry.key().clone(),
|
||||
existing_job: source_job.clone(),
|
||||
new_job: new_source_job,
|
||||
});
|
||||
} else {
|
||||
*source_job = Some(new_source_job);
|
||||
}
|
||||
}
|
||||
Ok(node_id)
|
||||
}
|
||||
Entry::Vacant(item_entry) => {
|
||||
let node_id = self.graph.add_node(JobGraphNode::Item {
|
||||
name,
|
||||
source_job: new_source_job,
|
||||
});
|
||||
new_nodes.insert(node_id);
|
||||
item_entry.insert(node_id);
|
||||
Ok(node_id)
|
||||
}
|
||||
}
|
||||
}
|
||||
pub fn try_add_jobs<I: IntoIterator<Item = DynJob>>(
|
||||
&mut self,
|
||||
jobs: I,
|
||||
) -> Result<(), JobGraphError> {
|
||||
use hashbrown::hash_map::Entry;
|
||||
let jobs = jobs.into_iter();
|
||||
struct RemoveNewNodesOnError<'a> {
|
||||
this: &'a mut JobGraph,
|
||||
new_nodes: HashSet<<JobGraphInner as GraphBase>::NodeId>,
|
||||
}
|
||||
impl Drop for RemoveNewNodesOnError<'_> {
|
||||
fn drop(&mut self) {
|
||||
for node in self.new_nodes.drain() {
|
||||
self.this.graph.remove_node(node);
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut remove_new_nodes_on_error = RemoveNewNodesOnError {
|
||||
this: self,
|
||||
new_nodes: HashSet::with_capacity_and_hasher(jobs.size_hint().0, Default::default()),
|
||||
};
|
||||
let new_nodes = &mut remove_new_nodes_on_error.new_nodes;
|
||||
let this = &mut *remove_new_nodes_on_error.this;
|
||||
for job in jobs {
|
||||
let Entry::Vacant(job_entry) = this.jobs.entry(job.clone()) else {
|
||||
continue;
|
||||
};
|
||||
let job_node_id = this
|
||||
.graph
|
||||
.add_node(JobGraphNode::Job(job_entry.key().clone()));
|
||||
new_nodes.insert(job_node_id);
|
||||
job_entry.insert(job_node_id);
|
||||
for name in job.outputs() {
|
||||
let item_node_id = this.try_add_item_node(name, Some(job.clone()), new_nodes)?;
|
||||
this.graph.add_edge(job_node_id, item_node_id, ());
|
||||
}
|
||||
for name in job.inputs() {
|
||||
let item_node_id = this.try_add_item_node(name, None, new_nodes)?;
|
||||
this.graph.add_edge(item_node_id, job_node_id, ());
|
||||
}
|
||||
}
|
||||
match toposort(&this.graph, Some(&mut this.space)) {
|
||||
Ok(v) => {
|
||||
this.topological_order = v;
|
||||
// no need to remove any of the new nodes on drop since we didn't encounter any errors
|
||||
remove_new_nodes_on_error.new_nodes.clear();
|
||||
Ok(())
|
||||
}
|
||||
Err(_) => {
|
||||
// there's at least one cycle, find one!
|
||||
let cycle = kosaraju_scc(&this.graph)
|
||||
.into_iter()
|
||||
.find_map(|scc| {
|
||||
if scc.len() <= 1 {
|
||||
// can't be a cycle since our graph is bipartite --
|
||||
// jobs only connect to items, never jobs to jobs or items to items
|
||||
None
|
||||
} else {
|
||||
Some(scc)
|
||||
}
|
||||
})
|
||||
.expect("we know there's a cycle");
|
||||
let cycle_set = HashSet::from_iter(cycle.iter().copied());
|
||||
let job = cycle
|
||||
.into_iter()
|
||||
.find_map(|node_id| {
|
||||
if let JobGraphNode::Job(job) = &this.graph[node_id] {
|
||||
Some(job.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.expect("a job must be part of the cycle");
|
||||
let output = job
|
||||
.outputs()
|
||||
.into_iter()
|
||||
.find(|output| cycle_set.contains(&this.items[output]))
|
||||
.expect("an output must be part of the cycle");
|
||||
Err(JobGraphError::CycleError { job, output })
|
||||
}
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn add_jobs<I: IntoIterator<Item = DynJob>>(&mut self, jobs: I) {
|
||||
match self.try_add_jobs(jobs) {
|
||||
Ok(()) => {}
|
||||
Err(e) => panic!("error: {e}"),
|
||||
}
|
||||
}
|
||||
pub fn to_unix_makefile(
|
||||
&self,
|
||||
platform: Option<&DynPlatform>,
|
||||
extra_args: &[Interned<OsStr>],
|
||||
) -> Result<String, Utf8Error> {
|
||||
self.to_unix_makefile_with_internal_program_prefix(
|
||||
&[program_name_for_internal_jobs()],
|
||||
platform,
|
||||
extra_args,
|
||||
)
|
||||
}
|
||||
pub fn to_unix_makefile_with_internal_program_prefix(
|
||||
&self,
|
||||
internal_program_prefix: &[Interned<OsStr>],
|
||||
platform: Option<&DynPlatform>,
|
||||
extra_args: &[Interned<OsStr>],
|
||||
) -> Result<String, Utf8Error> {
|
||||
let mut retval = String::new();
|
||||
let mut needed_variables = BTreeSet::new();
|
||||
let mut phony_targets = BTreeSet::new();
|
||||
for &node_id in &self.topological_order {
|
||||
let JobGraphNode::Job(job) = &self.graph[node_id] else {
|
||||
continue;
|
||||
};
|
||||
let outputs = job.outputs();
|
||||
if outputs.is_empty() {
|
||||
retval.push_str(":");
|
||||
} else {
|
||||
for output in job.outputs() {
|
||||
match output {
|
||||
JobItemName::Path { path } => {
|
||||
write_str!(
|
||||
retval,
|
||||
"{} ",
|
||||
EscapeForUnixMakefile::new(
|
||||
&str::from_utf8(path.as_os_str().as_encoded_bytes())?,
|
||||
UnixMakefileEscapeKind::NonRecipe,
|
||||
&mut needed_variables
|
||||
)?
|
||||
);
|
||||
}
|
||||
JobItemName::DynamicPaths { source_job_name } => {
|
||||
write_str!(
|
||||
retval,
|
||||
"{} ",
|
||||
EscapeForUnixMakefile::new(
|
||||
&source_job_name,
|
||||
UnixMakefileEscapeKind::NonRecipe,
|
||||
&mut needed_variables
|
||||
)?
|
||||
);
|
||||
phony_targets.insert(Interned::into_inner(source_job_name));
|
||||
}
|
||||
}
|
||||
}
|
||||
if outputs.len() == 1 {
|
||||
retval.push_str(":");
|
||||
} else {
|
||||
retval.push_str("&:");
|
||||
}
|
||||
}
|
||||
for input in job.inputs() {
|
||||
match input {
|
||||
JobItemName::Path { path } => {
|
||||
write_str!(
|
||||
retval,
|
||||
" {}",
|
||||
EscapeForUnixMakefile::new(
|
||||
&str::from_utf8(path.as_os_str().as_encoded_bytes())?,
|
||||
UnixMakefileEscapeKind::NonRecipe,
|
||||
&mut needed_variables
|
||||
)?
|
||||
);
|
||||
}
|
||||
JobItemName::DynamicPaths { source_job_name } => {
|
||||
write_str!(
|
||||
retval,
|
||||
" {}",
|
||||
EscapeForUnixMakefile::new(
|
||||
&source_job_name,
|
||||
UnixMakefileEscapeKind::NonRecipe,
|
||||
&mut needed_variables
|
||||
)?
|
||||
);
|
||||
phony_targets.insert(Interned::into_inner(source_job_name));
|
||||
}
|
||||
}
|
||||
}
|
||||
retval.push_str("\n\t");
|
||||
job.command_params_with_internal_program_prefix(
|
||||
internal_program_prefix,
|
||||
platform,
|
||||
extra_args,
|
||||
)
|
||||
.to_unix_shell_line(&mut retval, |arg, output| {
|
||||
write_str!(
|
||||
output,
|
||||
"{}",
|
||||
EscapeForUnixMakefile::new(
|
||||
arg,
|
||||
UnixMakefileEscapeKind::RecipeWithShellEscaping,
|
||||
&mut needed_variables
|
||||
)?
|
||||
);
|
||||
Ok(())
|
||||
})?;
|
||||
retval.push_str("\n\n");
|
||||
}
|
||||
if !phony_targets.is_empty() {
|
||||
retval.push_str("\n.PHONY:");
|
||||
for phony_target in phony_targets {
|
||||
write_str!(
|
||||
retval,
|
||||
" {}",
|
||||
EscapeForUnixMakefile::new(
|
||||
phony_target,
|
||||
UnixMakefileEscapeKind::NonRecipe,
|
||||
&mut needed_variables
|
||||
)?
|
||||
);
|
||||
}
|
||||
retval.push_str("\n");
|
||||
}
|
||||
if !needed_variables.is_empty() {
|
||||
retval.insert_str(
|
||||
0,
|
||||
&String::from_iter(needed_variables.into_iter().map(|v| format!("{v}\n"))),
|
||||
);
|
||||
}
|
||||
Ok(retval)
|
||||
}
|
||||
pub fn to_unix_shell_script(
|
||||
&self,
|
||||
platform: Option<&DynPlatform>,
|
||||
extra_args: &[Interned<OsStr>],
|
||||
) -> String {
|
||||
self.to_unix_shell_script_with_internal_program_prefix(
|
||||
&[program_name_for_internal_jobs()],
|
||||
platform,
|
||||
extra_args,
|
||||
)
|
||||
}
|
||||
pub fn to_unix_shell_script_with_internal_program_prefix(
|
||||
&self,
|
||||
internal_program_prefix: &[Interned<OsStr>],
|
||||
platform: Option<&DynPlatform>,
|
||||
extra_args: &[Interned<OsStr>],
|
||||
) -> String {
|
||||
let mut retval = String::from(
|
||||
"#!/bin/sh\n\
|
||||
set -ex\n",
|
||||
);
|
||||
for &node_id in &self.topological_order {
|
||||
let JobGraphNode::Job(job) = &self.graph[node_id] else {
|
||||
continue;
|
||||
};
|
||||
let Ok(()) = job
|
||||
.command_params_with_internal_program_prefix(
|
||||
internal_program_prefix,
|
||||
platform,
|
||||
extra_args,
|
||||
)
|
||||
.to_unix_shell_line(&mut retval, |arg, output| -> Result<(), Infallible> {
|
||||
write_str!(output, "{}", EscapeForUnixShell::new(&arg));
|
||||
Ok(())
|
||||
});
|
||||
retval.push_str("\n");
|
||||
}
|
||||
retval
|
||||
}
|
||||
pub fn run(&self, params: &JobParams, global_params: &GlobalParams) -> eyre::Result<()> {
|
||||
// use scope to auto-join threads on errors
|
||||
thread::scope(|scope| {
|
||||
struct WaitingJobState {
|
||||
job_node_id: <JobGraphInner as GraphBase>::NodeId,
|
||||
job: DynJob,
|
||||
inputs: BTreeMap<JobItemName, OnceCell<JobItem>>,
|
||||
}
|
||||
let mut ready_jobs = VecDeque::new();
|
||||
let mut item_name_to_waiting_jobs_map = HashMap::<_, Vec<_>>::default();
|
||||
for &node_id in &self.topological_order {
|
||||
let JobGraphNode::Job(job) = &self.graph[node_id] else {
|
||||
continue;
|
||||
};
|
||||
let waiting_job = WaitingJobState {
|
||||
job_node_id: node_id,
|
||||
job: job.clone(),
|
||||
inputs: job
|
||||
.inputs()
|
||||
.iter()
|
||||
.map(|&name| (name, OnceCell::new()))
|
||||
.collect(),
|
||||
};
|
||||
if waiting_job.inputs.is_empty() {
|
||||
ready_jobs.push_back(waiting_job);
|
||||
} else {
|
||||
let waiting_job = Rc::new(waiting_job);
|
||||
for &input_item in waiting_job.inputs.keys() {
|
||||
item_name_to_waiting_jobs_map
|
||||
.entry(input_item)
|
||||
.or_default()
|
||||
.push(waiting_job.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
struct RunningJob<'scope> {
|
||||
job: DynJob,
|
||||
thread: ScopedJoinHandle<'scope, eyre::Result<Vec<JobItem>>>,
|
||||
}
|
||||
let mut running_jobs = HashMap::default();
|
||||
let (finished_jobs_sender, finished_jobs_receiver) = mpsc::channel();
|
||||
let mut next_finished_job = None;
|
||||
loop {
|
||||
if let Some(finished_job) = next_finished_job
|
||||
.take()
|
||||
.or_else(|| finished_jobs_receiver.try_recv().ok())
|
||||
{
|
||||
let Some(RunningJob { job, thread }) = running_jobs.remove(&finished_job)
|
||||
else {
|
||||
unreachable!();
|
||||
};
|
||||
let output_items = thread.join().map_err(panic::resume_unwind)??;
|
||||
assert!(
|
||||
output_items.iter().map(JobItem::name).eq(job.outputs()),
|
||||
"job's run() method returned the wrong output items:\n\
|
||||
output items:\n\
|
||||
{output_items:?}\n\
|
||||
expected outputs:\n\
|
||||
{:?}\n\
|
||||
job:\n\
|
||||
{job:?}",
|
||||
job.outputs(),
|
||||
);
|
||||
for output_item in output_items {
|
||||
for waiting_job in item_name_to_waiting_jobs_map
|
||||
.remove(&output_item.name())
|
||||
.unwrap_or_default()
|
||||
{
|
||||
let Ok(()) =
|
||||
waiting_job.inputs[&output_item.name()].set(output_item.clone())
|
||||
else {
|
||||
unreachable!();
|
||||
};
|
||||
if let Some(waiting_job) = Rc::into_inner(waiting_job) {
|
||||
ready_jobs.push_back(waiting_job);
|
||||
}
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if let Some(WaitingJobState {
|
||||
job_node_id,
|
||||
job,
|
||||
inputs,
|
||||
}) = ready_jobs.pop_front()
|
||||
{
|
||||
struct RunningJobInThread<'a> {
|
||||
job_node_id: <JobGraphInner as GraphBase>::NodeId,
|
||||
job: DynJob,
|
||||
inputs: Vec<JobItem>,
|
||||
params: &'a JobParams,
|
||||
global_params: &'a GlobalParams,
|
||||
acquired_job: AcquiredJob,
|
||||
finished_jobs_sender: mpsc::Sender<<JobGraphInner as GraphBase>::NodeId>,
|
||||
}
|
||||
impl RunningJobInThread<'_> {
|
||||
fn run(mut self) -> eyre::Result<Vec<JobItem>> {
|
||||
self.job.run(
|
||||
&self.inputs,
|
||||
self.params,
|
||||
self.global_params,
|
||||
&mut self.acquired_job,
|
||||
)
|
||||
}
|
||||
}
|
||||
impl Drop for RunningJobInThread<'_> {
|
||||
fn drop(&mut self) {
|
||||
let _ = self.finished_jobs_sender.send(self.job_node_id);
|
||||
}
|
||||
}
|
||||
let name = job.kind().name();
|
||||
let running_job_in_thread = RunningJobInThread {
|
||||
job_node_id,
|
||||
job: job.clone(),
|
||||
inputs: Result::from_iter(job.inputs().iter().map(|input_name| {
|
||||
inputs.get(input_name).and_then(|v| v.get().cloned()).wrap_err_with(|| {
|
||||
eyre!("failed when trying to run job {name}: nothing provided the input item: {input_name:?}")
|
||||
})
|
||||
}))?,
|
||||
params,
|
||||
global_params,
|
||||
acquired_job: AcquiredJob::acquire()?,
|
||||
finished_jobs_sender: finished_jobs_sender.clone(),
|
||||
};
|
||||
running_jobs.insert(
|
||||
job_node_id,
|
||||
RunningJob {
|
||||
job,
|
||||
thread: thread::Builder::new()
|
||||
.name(format!("job:{name}"))
|
||||
.spawn_scoped(scope, move || running_job_in_thread.run())
|
||||
.expect("failed to spawn thread for job"),
|
||||
},
|
||||
);
|
||||
continue;
|
||||
}
|
||||
if running_jobs.is_empty() {
|
||||
assert!(item_name_to_waiting_jobs_map.is_empty());
|
||||
assert!(ready_jobs.is_empty());
|
||||
return Ok(());
|
||||
}
|
||||
// nothing to do yet, block to avoid busy waiting
|
||||
next_finished_job = finished_jobs_receiver.recv().ok();
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<DynJob> for JobGraph {
|
||||
#[track_caller]
|
||||
fn extend<T: IntoIterator<Item = DynJob>>(&mut self, iter: T) {
|
||||
self.add_jobs(iter);
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<DynJob> for JobGraph {
|
||||
#[track_caller]
|
||||
fn from_iter<T: IntoIterator<Item = DynJob>>(iter: T) -> Self {
|
||||
let mut retval = Self::new();
|
||||
retval.add_jobs(iter);
|
||||
retval
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for JobGraph {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut serializer = serializer.serialize_seq(Some(self.jobs.len()))?;
|
||||
for &node_id in &self.topological_order {
|
||||
let JobGraphNode::Job(job) = &self.graph[node_id] else {
|
||||
continue;
|
||||
};
|
||||
serializer.serialize_element(job)?;
|
||||
}
|
||||
serializer.end()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for JobGraph {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let jobs = Vec::<DynJob>::deserialize(deserializer)?;
|
||||
let mut retval = JobGraph::new();
|
||||
retval.try_add_jobs(jobs).map_err(D::Error::custom)?;
|
||||
Ok(retval)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,313 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
build::{DynJobKind, JobKind, built_in_job_kinds},
|
||||
intern::Interned,
|
||||
util::InternedStrCompareAsStr,
|
||||
};
|
||||
use std::{
|
||||
collections::BTreeMap,
|
||||
fmt,
|
||||
sync::{Arc, OnceLock, RwLock, RwLockWriteGuard},
|
||||
};
|
||||
|
||||
impl DynJobKind {
|
||||
pub fn registry() -> JobKindRegistrySnapshot {
|
||||
JobKindRegistrySnapshot(JobKindRegistry::get())
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn register(self) {
|
||||
JobKindRegistry::register(JobKindRegistry::lock(), self);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct JobKindRegistry {
|
||||
job_kinds: BTreeMap<InternedStrCompareAsStr, DynJobKind>,
|
||||
}
|
||||
|
||||
enum JobKindRegisterError {
|
||||
SameName {
|
||||
name: InternedStrCompareAsStr,
|
||||
old_job_kind: DynJobKind,
|
||||
new_job_kind: DynJobKind,
|
||||
},
|
||||
}
|
||||
|
||||
impl fmt::Display for JobKindRegisterError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::SameName {
|
||||
name,
|
||||
old_job_kind,
|
||||
new_job_kind,
|
||||
} => write!(
|
||||
f,
|
||||
"two different `JobKind` can't share the same name:\n\
|
||||
{name:?}\n\
|
||||
old job kind:\n\
|
||||
{old_job_kind:?}\n\
|
||||
new job kind:\n\
|
||||
{new_job_kind:?}",
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trait JobKindRegistryRegisterLock {
|
||||
type Locked;
|
||||
fn lock(self) -> Self::Locked;
|
||||
fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry;
|
||||
}
|
||||
|
||||
impl JobKindRegistryRegisterLock for &'static RwLock<Arc<JobKindRegistry>> {
|
||||
type Locked = RwLockWriteGuard<'static, Arc<JobKindRegistry>>;
|
||||
fn lock(self) -> Self::Locked {
|
||||
self.write().expect("shouldn't be poisoned")
|
||||
}
|
||||
fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry {
|
||||
Arc::make_mut(locked)
|
||||
}
|
||||
}
|
||||
|
||||
impl JobKindRegistryRegisterLock for &'_ mut JobKindRegistry {
|
||||
type Locked = Self;
|
||||
|
||||
fn lock(self) -> Self::Locked {
|
||||
self
|
||||
}
|
||||
|
||||
fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry {
|
||||
locked
|
||||
}
|
||||
}
|
||||
|
||||
impl JobKindRegistry {
|
||||
fn lock() -> &'static RwLock<Arc<Self>> {
|
||||
static REGISTRY: OnceLock<RwLock<Arc<JobKindRegistry>>> = OnceLock::new();
|
||||
REGISTRY.get_or_init(Default::default)
|
||||
}
|
||||
fn try_register<L: JobKindRegistryRegisterLock>(
|
||||
lock: L,
|
||||
job_kind: DynJobKind,
|
||||
) -> Result<(), JobKindRegisterError> {
|
||||
use std::collections::btree_map::Entry;
|
||||
let name = InternedStrCompareAsStr(job_kind.name());
|
||||
// run user code only outside of lock
|
||||
let mut locked = lock.lock();
|
||||
let this = L::make_mut(&mut locked);
|
||||
let result = match this.job_kinds.entry(name) {
|
||||
Entry::Occupied(entry) => Err(JobKindRegisterError::SameName {
|
||||
name,
|
||||
old_job_kind: entry.get().clone(),
|
||||
new_job_kind: job_kind,
|
||||
}),
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(job_kind);
|
||||
Ok(())
|
||||
}
|
||||
};
|
||||
drop(locked);
|
||||
// outside of lock now, so we can test if it's the same DynJobKind
|
||||
match result {
|
||||
Err(JobKindRegisterError::SameName {
|
||||
name: _,
|
||||
old_job_kind,
|
||||
new_job_kind,
|
||||
}) if old_job_kind == new_job_kind => Ok(()),
|
||||
result => result,
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
fn register<L: JobKindRegistryRegisterLock>(lock: L, job_kind: DynJobKind) {
|
||||
match Self::try_register(lock, job_kind) {
|
||||
Err(e) => panic!("{e}"),
|
||||
Ok(()) => {}
|
||||
}
|
||||
}
|
||||
fn get() -> Arc<Self> {
|
||||
Self::lock().read().expect("shouldn't be poisoned").clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for JobKindRegistry {
|
||||
fn default() -> Self {
|
||||
let mut retval = Self {
|
||||
job_kinds: BTreeMap::new(),
|
||||
};
|
||||
for job_kind in built_in_job_kinds() {
|
||||
Self::register(&mut retval, job_kind);
|
||||
}
|
||||
retval
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct JobKindRegistrySnapshot(Arc<JobKindRegistry>);
|
||||
|
||||
impl JobKindRegistrySnapshot {
|
||||
pub fn get() -> Self {
|
||||
JobKindRegistrySnapshot(JobKindRegistry::get())
|
||||
}
|
||||
pub fn get_by_name<'a>(&'a self, name: &str) -> Option<&'a DynJobKind> {
|
||||
self.0.job_kinds.get(name)
|
||||
}
|
||||
pub fn iter_with_names(&self) -> JobKindRegistryIterWithNames<'_> {
|
||||
JobKindRegistryIterWithNames(self.0.job_kinds.iter())
|
||||
}
|
||||
pub fn iter(&self) -> JobKindRegistryIter<'_> {
|
||||
JobKindRegistryIter(self.0.job_kinds.values())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a JobKindRegistrySnapshot {
|
||||
type Item = &'a DynJobKind;
|
||||
type IntoIter = JobKindRegistryIter<'a>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a mut JobKindRegistrySnapshot {
|
||||
type Item = &'a DynJobKind;
|
||||
type IntoIter = JobKindRegistryIter<'a>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.iter()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct JobKindRegistryIter<'a>(
|
||||
std::collections::btree_map::Values<'a, InternedStrCompareAsStr, DynJobKind>,
|
||||
);
|
||||
|
||||
impl<'a> Iterator for JobKindRegistryIter<'a> {
|
||||
type Item = &'a DynJobKind;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.0.next()
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.0.size_hint()
|
||||
}
|
||||
|
||||
fn count(self) -> usize
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self.0.count()
|
||||
}
|
||||
|
||||
fn last(self) -> Option<Self::Item> {
|
||||
self.0.last()
|
||||
}
|
||||
|
||||
fn nth(&mut self, n: usize) -> Option<Self::Item> {
|
||||
self.0.nth(n)
|
||||
}
|
||||
|
||||
fn fold<B, F>(self, init: B, f: F) -> B
|
||||
where
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
self.0.fold(init, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> std::iter::FusedIterator for JobKindRegistryIter<'a> {}
|
||||
|
||||
impl<'a> ExactSizeIterator for JobKindRegistryIter<'a> {}
|
||||
|
||||
impl<'a> DoubleEndedIterator for JobKindRegistryIter<'a> {
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
self.0.next_back()
|
||||
}
|
||||
|
||||
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
|
||||
self.0.nth_back(n)
|
||||
}
|
||||
|
||||
fn rfold<B, F>(self, init: B, f: F) -> B
|
||||
where
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
self.0.rfold(init, f)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct JobKindRegistryIterWithNames<'a>(
|
||||
std::collections::btree_map::Iter<'a, InternedStrCompareAsStr, DynJobKind>,
|
||||
);
|
||||
|
||||
impl<'a> Iterator for JobKindRegistryIterWithNames<'a> {
|
||||
type Item = (Interned<str>, &'a DynJobKind);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.0.next().map(|(name, job_kind)| (name.0, job_kind))
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.0.size_hint()
|
||||
}
|
||||
|
||||
fn count(self) -> usize
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self.0.count()
|
||||
}
|
||||
|
||||
fn last(self) -> Option<Self::Item> {
|
||||
self.0.last().map(|(name, job_kind)| (name.0, job_kind))
|
||||
}
|
||||
|
||||
fn nth(&mut self, n: usize) -> Option<Self::Item> {
|
||||
self.0.nth(n).map(|(name, job_kind)| (name.0, job_kind))
|
||||
}
|
||||
|
||||
fn fold<B, F>(self, init: B, f: F) -> B
|
||||
where
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
self.0
|
||||
.map(|(name, job_kind)| (name.0, job_kind))
|
||||
.fold(init, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> std::iter::FusedIterator for JobKindRegistryIterWithNames<'a> {}
|
||||
|
||||
impl<'a> ExactSizeIterator for JobKindRegistryIterWithNames<'a> {}
|
||||
|
||||
impl<'a> DoubleEndedIterator for JobKindRegistryIterWithNames<'a> {
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
self.0
|
||||
.next_back()
|
||||
.map(|(name, job_kind)| (name.0, job_kind))
|
||||
}
|
||||
|
||||
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
|
||||
self.0
|
||||
.nth_back(n)
|
||||
.map(|(name, job_kind)| (name.0, job_kind))
|
||||
}
|
||||
|
||||
fn rfold<B, F>(self, init: B, f: F) -> B
|
||||
where
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
self.0
|
||||
.map(|(name, job_kind)| (name.0, job_kind))
|
||||
.rfold(init, f)
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn register_job_kind<K: JobKind>(kind: K) {
|
||||
DynJobKind::new(kind).register();
|
||||
}
|
||||
|
|
@ -1,418 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
build::{
|
||||
BaseJob, CommandParams, DynJobKind, GetJobPositionDependencies, GetJobPositionJob,
|
||||
GlobalParams, JobAndDependencies, JobArgsAndDependencies, JobDependencies, JobItem,
|
||||
JobItemName, JobKind, JobKindAndDependencies, JobParams, ToArgs, WriteArgs,
|
||||
external::{
|
||||
ExternalCommand, ExternalCommandJob, ExternalCommandJobKind, ExternalProgramTrait,
|
||||
},
|
||||
firrtl::{Firrtl, FirrtlJobKind},
|
||||
},
|
||||
intern::{Intern, InternSlice, Interned},
|
||||
util::job_server::AcquiredJob,
|
||||
};
|
||||
use clap::Args;
|
||||
use eyre::{Context, bail};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
ffi::{OsStr, OsString},
|
||||
fmt, mem,
|
||||
path::Path,
|
||||
};
|
||||
|
||||
/// based on [LLVM Circt's recommended lowering options][lowering-options]
|
||||
///
|
||||
/// [lowering-options]: https://circt.llvm.org/docs/VerilogGeneration/#recommended-loweringoptions-by-target
|
||||
#[derive(clap::ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[non_exhaustive]
|
||||
pub enum VerilogDialect {
|
||||
Questa,
|
||||
Spyglass,
|
||||
Verilator,
|
||||
Vivado,
|
||||
Yosys,
|
||||
}
|
||||
|
||||
impl fmt::Display for VerilogDialect {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl VerilogDialect {
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
VerilogDialect::Questa => "questa",
|
||||
VerilogDialect::Spyglass => "spyglass",
|
||||
VerilogDialect::Verilator => "verilator",
|
||||
VerilogDialect::Vivado => "vivado",
|
||||
VerilogDialect::Yosys => "yosys",
|
||||
}
|
||||
}
|
||||
pub fn firtool_extra_args(self) -> &'static [&'static str] {
|
||||
match self {
|
||||
VerilogDialect::Questa => &["--lowering-options=emitWireInPorts"],
|
||||
VerilogDialect::Spyglass => {
|
||||
&["--lowering-options=explicitBitcast,disallowExpressionInliningInPorts"]
|
||||
}
|
||||
VerilogDialect::Verilator => &[
|
||||
"--lowering-options=locationInfoStyle=wrapInAtSquareBracket,disallowLocalVariables",
|
||||
],
|
||||
VerilogDialect::Vivado => &["--lowering-options=mitigateVivadoArrayIndexConstPropBug"],
|
||||
VerilogDialect::Yosys => {
|
||||
&["--lowering-options=disallowLocalVariables,disallowPackedArrays"]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Args, Debug, Clone, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub struct UnadjustedVerilogArgs {
|
||||
#[arg(long = "firtool-extra-arg", value_name = "ARG")]
|
||||
pub firtool_extra_args: Vec<OsString>,
|
||||
/// adapt the generated Verilog for a particular toolchain
|
||||
#[arg(long)]
|
||||
pub verilog_dialect: Option<VerilogDialect>,
|
||||
#[arg(long)]
|
||||
pub verilog_debug: bool,
|
||||
}
|
||||
|
||||
impl ToArgs for UnadjustedVerilogArgs {
|
||||
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
|
||||
let Self {
|
||||
ref firtool_extra_args,
|
||||
verilog_dialect,
|
||||
verilog_debug,
|
||||
} = *self;
|
||||
for arg in firtool_extra_args {
|
||||
args.write_long_option_eq("firtool-extra-arg", arg);
|
||||
}
|
||||
if let Some(verilog_dialect) = verilog_dialect {
|
||||
args.write_long_option_eq("verilog-dialect", verilog_dialect.as_str());
|
||||
}
|
||||
if verilog_debug {
|
||||
args.write_arg("--verilog-debug");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)]
|
||||
pub struct Firtool;
|
||||
|
||||
impl ExternalProgramTrait for Firtool {
|
||||
fn default_program_name() -> Interned<str> {
|
||||
"firtool".intern()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, Deserialize, Serialize)]
|
||||
pub struct UnadjustedVerilog {
|
||||
firrtl_file: Interned<Path>,
|
||||
firrtl_file_name: Interned<OsStr>,
|
||||
unadjusted_verilog_file: Interned<Path>,
|
||||
unadjusted_verilog_file_name: Interned<OsStr>,
|
||||
firtool_extra_args: Interned<[Interned<OsStr>]>,
|
||||
verilog_dialect: Option<VerilogDialect>,
|
||||
verilog_debug: bool,
|
||||
}
|
||||
|
||||
impl UnadjustedVerilog {
|
||||
pub fn firrtl_file(&self) -> Interned<Path> {
|
||||
self.firrtl_file
|
||||
}
|
||||
pub fn unadjusted_verilog_file(&self) -> Interned<Path> {
|
||||
self.unadjusted_verilog_file
|
||||
}
|
||||
pub fn firtool_extra_args(&self) -> Interned<[Interned<OsStr>]> {
|
||||
self.firtool_extra_args
|
||||
}
|
||||
pub fn verilog_dialect(&self) -> Option<VerilogDialect> {
|
||||
self.verilog_dialect
|
||||
}
|
||||
pub fn verilog_debug(&self) -> bool {
|
||||
self.verilog_debug
|
||||
}
|
||||
}
|
||||
|
||||
impl ExternalCommand for UnadjustedVerilog {
|
||||
type AdditionalArgs = UnadjustedVerilogArgs;
|
||||
type AdditionalJobData = UnadjustedVerilog;
|
||||
type BaseJobPosition = GetJobPositionDependencies<GetJobPositionJob>;
|
||||
type Dependencies = JobKindAndDependencies<FirrtlJobKind>;
|
||||
type ExternalProgram = Firtool;
|
||||
|
||||
fn dependencies() -> Self::Dependencies {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
fn args_to_jobs(
|
||||
args: JobArgsAndDependencies<ExternalCommandJobKind<Self>>,
|
||||
params: &JobParams,
|
||||
global_params: &GlobalParams,
|
||||
) -> eyre::Result<(
|
||||
Self::AdditionalJobData,
|
||||
<Self::Dependencies as JobDependencies>::JobsAndKinds,
|
||||
)> {
|
||||
args.args_to_jobs_external_simple(params, global_params, |args, dependencies| {
|
||||
let UnadjustedVerilogArgs {
|
||||
firtool_extra_args,
|
||||
verilog_dialect,
|
||||
verilog_debug,
|
||||
} = args.additional_args;
|
||||
let unadjusted_verilog_file = dependencies
|
||||
.dependencies
|
||||
.job
|
||||
.job
|
||||
.file_with_ext("unadjusted.v");
|
||||
let firrtl_job = dependencies.get_job::<Firrtl, _>();
|
||||
Ok(UnadjustedVerilog {
|
||||
firrtl_file: firrtl_job.firrtl_file(),
|
||||
firrtl_file_name: firrtl_job
|
||||
.firrtl_file()
|
||||
.interned_file_name()
|
||||
.expect("known to have file name"),
|
||||
unadjusted_verilog_file,
|
||||
unadjusted_verilog_file_name: unadjusted_verilog_file
|
||||
.interned_file_name()
|
||||
.expect("known to have file name"),
|
||||
firtool_extra_args: firtool_extra_args.into_iter().map(Interned::from).collect(),
|
||||
verilog_dialect,
|
||||
verilog_debug,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn inputs(job: &ExternalCommandJob<Self>) -> Interned<[JobItemName]> {
|
||||
[JobItemName::Path {
|
||||
path: job.additional_job_data().firrtl_file,
|
||||
}]
|
||||
.intern_slice()
|
||||
}
|
||||
|
||||
fn output_paths(job: &ExternalCommandJob<Self>) -> Interned<[Interned<Path>]> {
|
||||
[job.additional_job_data().unadjusted_verilog_file].intern_slice()
|
||||
}
|
||||
|
||||
fn command_line_args<W: ?Sized + WriteArgs>(job: &ExternalCommandJob<Self>, args: &mut W) {
|
||||
let UnadjustedVerilog {
|
||||
firrtl_file: _,
|
||||
firrtl_file_name,
|
||||
unadjusted_verilog_file: _,
|
||||
unadjusted_verilog_file_name,
|
||||
firtool_extra_args,
|
||||
verilog_dialect,
|
||||
verilog_debug,
|
||||
} = *job.additional_job_data();
|
||||
args.write_interned_arg(firrtl_file_name);
|
||||
args.write_arg("-o");
|
||||
args.write_interned_arg(unadjusted_verilog_file_name);
|
||||
if verilog_debug {
|
||||
args.write_args(["-g", "--preserve-values=all"]);
|
||||
}
|
||||
if let Some(dialect) = verilog_dialect {
|
||||
args.write_args(dialect.firtool_extra_args().iter().copied());
|
||||
}
|
||||
args.write_interned_args(firtool_extra_args);
|
||||
}
|
||||
|
||||
fn current_dir(job: &ExternalCommandJob<Self>) -> Option<Interned<Path>> {
|
||||
Some(job.output_dir())
|
||||
}
|
||||
|
||||
fn job_kind_name() -> Interned<str> {
|
||||
"unadjusted-verilog".intern()
|
||||
}
|
||||
|
||||
fn subcommand_hidden() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn run_even_if_cached_arg_name() -> Interned<str> {
|
||||
"firtool-run-even-if-cached".intern()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct VerilogJobKind;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, Args)]
|
||||
#[non_exhaustive]
|
||||
pub struct VerilogJobArgs {}
|
||||
|
||||
impl ToArgs for VerilogJobArgs {
|
||||
fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) {
|
||||
let Self {} = self;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
pub struct VerilogJob {
|
||||
output_dir: Interned<Path>,
|
||||
unadjusted_verilog_file: Interned<Path>,
|
||||
main_verilog_file: Interned<Path>,
|
||||
}
|
||||
|
||||
impl VerilogJob {
|
||||
pub fn output_dir(&self) -> Interned<Path> {
|
||||
self.output_dir
|
||||
}
|
||||
pub fn unadjusted_verilog_file(&self) -> Interned<Path> {
|
||||
self.unadjusted_verilog_file
|
||||
}
|
||||
pub fn main_verilog_file(&self) -> Interned<Path> {
|
||||
self.main_verilog_file
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn unwrap_additional_files(additional_files: &JobItem) -> &[Interned<Path>] {
|
||||
match additional_files {
|
||||
JobItem::DynamicPaths {
|
||||
paths,
|
||||
source_job_name,
|
||||
} if *source_job_name == VerilogJobKind.name() => paths,
|
||||
v => panic!("expected VerilogJob's additional files JobItem: {v:?}"),
|
||||
}
|
||||
}
|
||||
pub fn all_verilog_files(
|
||||
main_verilog_file: Interned<Path>,
|
||||
additional_files: &[Interned<Path>],
|
||||
) -> eyre::Result<Interned<[Interned<Path>]>> {
|
||||
let mut retval = Vec::with_capacity(additional_files.len().saturating_add(1));
|
||||
for verilog_file in [main_verilog_file].iter().chain(additional_files) {
|
||||
if !["v", "sv"]
|
||||
.iter()
|
||||
.any(|extension| verilog_file.extension() == Some(extension.as_ref()))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
let verilog_file = std::path::absolute(verilog_file).wrap_err_with(|| {
|
||||
format!("converting {verilog_file:?} to an absolute path failed")
|
||||
})?;
|
||||
if verilog_file
|
||||
.as_os_str()
|
||||
.as_encoded_bytes()
|
||||
.iter()
|
||||
.any(|&ch| (ch != b' ' && ch != b'\t' && ch.is_ascii_whitespace()) || ch == b'"')
|
||||
{
|
||||
bail!("verilog file path contains characters that aren't permitted");
|
||||
}
|
||||
retval.push(verilog_file.intern_deref());
|
||||
}
|
||||
Ok(retval.intern_slice())
|
||||
}
|
||||
}
|
||||
|
||||
impl JobKind for VerilogJobKind {
|
||||
type Args = VerilogJobArgs;
|
||||
type Job = VerilogJob;
|
||||
type Dependencies = JobKindAndDependencies<ExternalCommandJobKind<UnadjustedVerilog>>;
|
||||
|
||||
fn dependencies(self) -> Self::Dependencies {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
fn args_to_jobs(
|
||||
args: JobArgsAndDependencies<Self>,
|
||||
params: &JobParams,
|
||||
global_params: &GlobalParams,
|
||||
) -> eyre::Result<JobAndDependencies<Self>> {
|
||||
args.args_to_jobs_simple(params, global_params, |_kind, args, dependencies| {
|
||||
let VerilogJobArgs {} = args;
|
||||
let base_job = dependencies.get_job::<BaseJob, _>();
|
||||
Ok(VerilogJob {
|
||||
output_dir: base_job.output_dir(),
|
||||
unadjusted_verilog_file: dependencies
|
||||
.job
|
||||
.job
|
||||
.additional_job_data()
|
||||
.unadjusted_verilog_file(),
|
||||
main_verilog_file: base_job.file_with_ext("v"),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||
[JobItemName::Path {
|
||||
path: job.unadjusted_verilog_file,
|
||||
}]
|
||||
.intern_slice()
|
||||
}
|
||||
|
||||
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||
[
|
||||
JobItemName::Path {
|
||||
path: job.main_verilog_file,
|
||||
},
|
||||
JobItemName::DynamicPaths {
|
||||
source_job_name: self.name(),
|
||||
},
|
||||
]
|
||||
.intern_slice()
|
||||
}
|
||||
|
||||
fn name(self) -> Interned<str> {
|
||||
"verilog".intern()
|
||||
}
|
||||
|
||||
fn external_command_params(self, _job: &Self::Job) -> Option<CommandParams> {
|
||||
None
|
||||
}
|
||||
|
||||
fn run(
|
||||
self,
|
||||
job: &Self::Job,
|
||||
inputs: &[JobItem],
|
||||
_params: &JobParams,
|
||||
_global_params: &GlobalParams,
|
||||
_acquired_job: &mut AcquiredJob,
|
||||
) -> eyre::Result<Vec<JobItem>> {
|
||||
assert!(inputs.iter().map(JobItem::name).eq(self.inputs(job)));
|
||||
let input = std::fs::read_to_string(job.unadjusted_verilog_file())?;
|
||||
let file_separator_prefix = "\n// ----- 8< ----- FILE \"";
|
||||
let file_separator_suffix = "\" ----- 8< -----\n\n";
|
||||
let mut input = &*input;
|
||||
let main_verilog_file = job.main_verilog_file();
|
||||
let mut file_name = Some(main_verilog_file);
|
||||
let mut additional_outputs = Vec::new();
|
||||
loop {
|
||||
let (chunk, next_file_name) = if let Some((chunk, rest)) =
|
||||
input.split_once(file_separator_prefix)
|
||||
{
|
||||
let Some((next_file_name, rest)) = rest.split_once(file_separator_suffix) else {
|
||||
bail!(
|
||||
"parsing firtool's output failed: found {file_separator_prefix:?} but no {file_separator_suffix:?}"
|
||||
);
|
||||
};
|
||||
input = rest;
|
||||
let next_file_name = job.output_dir.join(next_file_name).intern_deref();
|
||||
additional_outputs.push(next_file_name);
|
||||
(chunk, Some(next_file_name))
|
||||
} else {
|
||||
(mem::take(&mut input), None)
|
||||
};
|
||||
let Some(file_name) = mem::replace(&mut file_name, next_file_name) else {
|
||||
break;
|
||||
};
|
||||
std::fs::write(&file_name, chunk)?;
|
||||
}
|
||||
Ok(vec![
|
||||
JobItem::Path {
|
||||
path: main_verilog_file,
|
||||
},
|
||||
JobItem::DynamicPaths {
|
||||
paths: additional_outputs,
|
||||
source_job_name: self.name(),
|
||||
},
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = DynJobKind> {
|
||||
[
|
||||
DynJobKind::new(ExternalCommandJobKind::<UnadjustedVerilog>::new()),
|
||||
DynJobKind::new(VerilogJobKind),
|
||||
]
|
||||
}
|
||||
|
|
@ -2,25 +2,20 @@
|
|||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
expr::{
|
||||
CastToBits, Expr, ReduceBits, ToExpr,
|
||||
ops::{ArrayLiteral, BundleLiteral, ExprPartialEq},
|
||||
},
|
||||
int::{Bool, DynSize},
|
||||
intern::{Intern, InternSlice, Interned},
|
||||
sim::value::{SimValue, SimValuePartialEq, ToSimValue, ToSimValueWithType},
|
||||
expr::{ops::BundleLiteral, Expr, ToExpr},
|
||||
intern::{Intern, Interned},
|
||||
sim::{SimValue, ToSimValue},
|
||||
source_location::SourceLocation,
|
||||
ty::{
|
||||
CanonicalType, MatchVariantWithoutScope, OpaqueSimValue, OpaqueSimValueSize,
|
||||
OpaqueSimValueSlice, OpaqueSimValueWriter, OpaqueSimValueWritten, StaticType, Type,
|
||||
TypeProperties, TypeWithDeref, impl_match_variant_as_self,
|
||||
impl_match_variant_as_self, CanonicalType, MatchVariantWithoutScope, StaticType, Type,
|
||||
TypeProperties, TypeWithDeref,
|
||||
},
|
||||
util::HashMap,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use bitvec::vec::BitVec;
|
||||
use hashbrown::HashMap;
|
||||
use std::{fmt, marker::PhantomData};
|
||||
|
||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
|
||||
pub struct BundleField {
|
||||
pub name: Interned<str>,
|
||||
pub flipped: bool,
|
||||
|
|
@ -69,7 +64,7 @@ impl fmt::Display for FmtDebugInStruct {
|
|||
struct BundleImpl {
|
||||
fields: Interned<[BundleField]>,
|
||||
name_indexes: HashMap<Interned<str>, usize>,
|
||||
field_offsets: Interned<[OpaqueSimValueSize]>,
|
||||
field_offsets: Interned<[usize]>,
|
||||
type_properties: TypeProperties,
|
||||
}
|
||||
|
||||
|
|
@ -89,9 +84,12 @@ impl std::fmt::Debug for BundleImpl {
|
|||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str("Bundle ")?;
|
||||
f.debug_set()
|
||||
.entries(self.fields.iter().enumerate().map(|(index, field)| {
|
||||
field.fmt_debug_in_struct(self.field_offsets[index].bit_width)
|
||||
}))
|
||||
.entries(
|
||||
self.fields
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(index, field)| field.fmt_debug_in_struct(self.field_offsets[index])),
|
||||
)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
|
@ -116,7 +114,6 @@ impl BundleTypePropertiesBuilder {
|
|||
is_storable: true,
|
||||
is_castable_from_bits: true,
|
||||
bit_width: 0,
|
||||
sim_only_values_len: 0,
|
||||
})
|
||||
}
|
||||
pub const fn clone(&self) -> Self {
|
||||
|
|
@ -124,12 +121,8 @@ impl BundleTypePropertiesBuilder {
|
|||
}
|
||||
#[must_use]
|
||||
pub const fn field(self, flipped: bool, field_props: TypeProperties) -> Self {
|
||||
let Some(OpaqueSimValueSize {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
}) = self.0.size().checked_add(field_props.size())
|
||||
else {
|
||||
panic!("bundle is too big: size overflowed");
|
||||
let Some(bit_width) = self.0.bit_width.checked_add(field_props.bit_width) else {
|
||||
panic!("bundle is too big: bit-width overflowed");
|
||||
};
|
||||
if flipped {
|
||||
Self(TypeProperties {
|
||||
|
|
@ -137,7 +130,6 @@ impl BundleTypePropertiesBuilder {
|
|||
is_storable: false,
|
||||
is_castable_from_bits: false,
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
})
|
||||
} else {
|
||||
Self(TypeProperties {
|
||||
|
|
@ -146,7 +138,6 @@ impl BundleTypePropertiesBuilder {
|
|||
is_castable_from_bits: self.0.is_castable_from_bits
|
||||
& field_props.is_castable_from_bits,
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -164,14 +155,14 @@ impl Default for BundleTypePropertiesBuilder {
|
|||
impl Bundle {
|
||||
#[track_caller]
|
||||
pub fn new(fields: Interned<[BundleField]>) -> Self {
|
||||
let mut name_indexes = HashMap::with_capacity_and_hasher(fields.len(), Default::default());
|
||||
let mut name_indexes = HashMap::with_capacity(fields.len());
|
||||
let mut field_offsets = Vec::with_capacity(fields.len());
|
||||
let mut type_props_builder = BundleTypePropertiesBuilder::new();
|
||||
for (index, &BundleField { name, flipped, ty }) in fields.iter().enumerate() {
|
||||
if let Some(old_index) = name_indexes.insert(name, index) {
|
||||
panic!("duplicate field name {name:?}: at both index {old_index} and {index}");
|
||||
}
|
||||
field_offsets.push(type_props_builder.0.size());
|
||||
field_offsets.push(type_props_builder.0.bit_width);
|
||||
type_props_builder = type_props_builder.field(flipped, ty.type_properties());
|
||||
}
|
||||
Self(Intern::intern_sized(BundleImpl {
|
||||
|
|
@ -187,7 +178,7 @@ impl Bundle {
|
|||
pub fn field_by_name(&self, name: Interned<str>) -> Option<BundleField> {
|
||||
Some(self.0.fields[*self.0.name_indexes.get(&name)?])
|
||||
}
|
||||
pub fn field_offsets(self) -> Interned<[OpaqueSimValueSize]> {
|
||||
pub fn field_offsets(self) -> Interned<[usize]> {
|
||||
self.0.field_offsets
|
||||
}
|
||||
pub fn type_properties(self) -> TypeProperties {
|
||||
|
|
@ -221,7 +212,6 @@ impl Bundle {
|
|||
impl Type for Bundle {
|
||||
type BaseType = Bundle;
|
||||
type MaskType = Bundle;
|
||||
type SimValue = OpaqueSimValue;
|
||||
impl_match_variant_as_self!();
|
||||
fn mask_type(&self) -> Self::MaskType {
|
||||
Self::new(Interned::from_iter(self.0.fields.into_iter().map(
|
||||
|
|
@ -245,28 +235,6 @@ impl Type for Bundle {
|
|||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
assert_eq!(self.type_properties().size(), opaque.size());
|
||||
opaque.to_owned()
|
||||
}
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
assert_eq!(self.type_properties().size(), opaque.size());
|
||||
assert_eq!(value.size(), opaque.size());
|
||||
value.clone_from_slice(opaque);
|
||||
}
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
assert_eq!(self.type_properties().size(), writer.size());
|
||||
assert_eq!(value.size(), writer.size());
|
||||
writer.fill_cloned_from_slice(value.as_slice())
|
||||
}
|
||||
}
|
||||
|
||||
pub trait BundleType: Type<BaseType = Bundle> {
|
||||
|
|
@ -275,102 +243,6 @@ pub trait BundleType: Type<BaseType = Bundle> {
|
|||
fn fields(&self) -> Interned<[BundleField]>;
|
||||
}
|
||||
|
||||
pub struct BundleSimValueFromOpaque<'a> {
|
||||
fields: std::slice::Iter<'static, BundleField>,
|
||||
opaque: OpaqueSimValueSlice<'a>,
|
||||
}
|
||||
|
||||
impl<'a> BundleSimValueFromOpaque<'a> {
|
||||
#[track_caller]
|
||||
pub fn new<T: BundleType>(bundle_ty: T, opaque: OpaqueSimValueSlice<'a>) -> Self {
|
||||
let fields = bundle_ty.fields();
|
||||
assert_eq!(
|
||||
opaque.size(),
|
||||
fields
|
||||
.iter()
|
||||
.map(|BundleField { ty, .. }| ty.size())
|
||||
.sum::<OpaqueSimValueSize>()
|
||||
);
|
||||
Self {
|
||||
fields: Interned::into_inner(fields).iter(),
|
||||
opaque,
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
fn field_ty_and_opaque<T: Type>(&mut self) -> (T, OpaqueSimValueSlice<'a>) {
|
||||
let Some(&BundleField {
|
||||
name: _,
|
||||
flipped: _,
|
||||
ty,
|
||||
}) = self.fields.next()
|
||||
else {
|
||||
panic!("tried to read too many fields from BundleSimValueFromBits");
|
||||
};
|
||||
let (field_opaque, rest) = self.opaque.split_at(ty.size());
|
||||
self.opaque = rest;
|
||||
(T::from_canonical(ty), field_opaque)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn field_from_opaque<T: Type>(&mut self) -> SimValue<T> {
|
||||
let (field_ty, field_opaque) = self.field_ty_and_opaque::<T>();
|
||||
SimValue::from_opaque(field_ty, field_opaque.to_owned())
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn field_clone_from_opaque<T: Type>(&mut self, field_value: &mut SimValue<T>) {
|
||||
let (field_ty, field_opaque) = self.field_ty_and_opaque::<T>();
|
||||
assert_eq!(field_ty, SimValue::ty(field_value));
|
||||
SimValue::opaque_mut(field_value).clone_from_slice(field_opaque);
|
||||
}
|
||||
}
|
||||
|
||||
pub struct BundleSimValueToOpaque<'a> {
|
||||
fields: std::slice::Iter<'static, BundleField>,
|
||||
writer: OpaqueSimValueWriter<'a>,
|
||||
}
|
||||
|
||||
impl<'a> BundleSimValueToOpaque<'a> {
|
||||
#[track_caller]
|
||||
pub fn new<T: BundleType>(bundle_ty: T, writer: OpaqueSimValueWriter<'a>) -> Self {
|
||||
let fields = bundle_ty.fields();
|
||||
assert_eq!(
|
||||
writer.size(),
|
||||
fields
|
||||
.iter()
|
||||
.map(|BundleField { ty, .. }| ty.size())
|
||||
.sum::<OpaqueSimValueSize>()
|
||||
);
|
||||
Self {
|
||||
fields: Interned::into_inner(fields).iter(),
|
||||
writer,
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn field<T: Type>(&mut self, field_value: &SimValue<T>) {
|
||||
let Some(&BundleField {
|
||||
name: _,
|
||||
flipped: _,
|
||||
ty,
|
||||
}) = self.fields.next()
|
||||
else {
|
||||
panic!("tried to write too many fields with BundleSimValueToOpaque");
|
||||
};
|
||||
assert_eq!(T::from_canonical(ty), SimValue::ty(field_value));
|
||||
self.writer.fill_prefix_with(ty.size(), |writer| {
|
||||
writer.fill_cloned_from_slice(SimValue::opaque(field_value).as_slice())
|
||||
});
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn finish(mut self) -> OpaqueSimValueWritten<'a> {
|
||||
assert_eq!(
|
||||
self.fields.next(),
|
||||
None,
|
||||
"wrote too few fields with BundleSimValueToOpaque"
|
||||
);
|
||||
self.writer
|
||||
.fill_cloned_from_slice(OpaqueSimValueSlice::empty())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct NoBuilder;
|
||||
|
||||
|
|
@ -453,19 +325,7 @@ macro_rules! impl_tuple_builder_fields {
|
|||
}
|
||||
|
||||
macro_rules! impl_tuples {
|
||||
(
|
||||
[$({
|
||||
#[
|
||||
num = $num:tt,
|
||||
field = $field:ident,
|
||||
ty = $ty_var:ident: $Ty:ident,
|
||||
lhs = $lhs_var:ident: $Lhs:ident,
|
||||
rhs = $rhs_var:ident: $Rhs:ident
|
||||
]
|
||||
$var:ident: $T:ident
|
||||
})*]
|
||||
[]
|
||||
) => {
|
||||
([$({#[num = $num:literal, field = $field:ident, ty = $ty_var:ident: $Ty:ident] $var:ident: $T:ident})*] []) => {
|
||||
impl_tuple_builder_fields! {
|
||||
{}
|
||||
[$({
|
||||
|
|
@ -477,7 +337,6 @@ macro_rules! impl_tuples {
|
|||
impl<$($T: Type,)*> Type for ($($T,)*) {
|
||||
type BaseType = Bundle;
|
||||
type MaskType = ($($T::MaskType,)*);
|
||||
type SimValue = ($(SimValue<$T>,)*);
|
||||
type MatchVariant = ($(Expr<$T>,)*);
|
||||
type MatchActiveScope = ();
|
||||
type MatchVariantAndInactiveScope = MatchVariantWithoutScope<Self::MatchVariant>;
|
||||
|
|
@ -516,40 +375,13 @@ macro_rules! impl_tuples {
|
|||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
#![allow(unused_mut, unused_variables)]
|
||||
let mut v = BundleSimValueFromOpaque::new(*self, opaque);
|
||||
$(let $var = v.field_from_opaque();)*
|
||||
($($var,)*)
|
||||
}
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
#![allow(unused_mut, unused_variables)]
|
||||
let mut v = BundleSimValueFromOpaque::new(*self, opaque);
|
||||
let ($($var,)*) = value;
|
||||
$(v.field_clone_from_opaque($var);)*
|
||||
}
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
#![allow(unused_mut, unused_variables)]
|
||||
let mut v = BundleSimValueToOpaque::new(*self, writer);
|
||||
let ($($var,)*) = value;
|
||||
$(v.field($var);)*
|
||||
v.finish()
|
||||
}
|
||||
}
|
||||
impl<$($T: Type,)*> BundleType for ($($T,)*) {
|
||||
type Builder = TupleBuilder<($(Unfilled<$T>,)*)>;
|
||||
type FilledBuilder = TupleBuilder<($(Expr<$T>,)*)>;
|
||||
fn fields(&self) -> Interned<[BundleField]> {
|
||||
let ($($var,)*) = self;
|
||||
[$(BundleField { name: stringify!($num).intern(), flipped: false, ty: $var.canonical() }),*].intern_slice()
|
||||
[$(BundleField { name: stringify!($num).intern(), flipped: false, ty: $var.canonical() }),*][..].intern()
|
||||
}
|
||||
}
|
||||
impl<$($T: Type,)*> TypeWithDeref for ($($T,)*) {
|
||||
|
|
@ -580,7 +412,7 @@ macro_rules! impl_tuples {
|
|||
$(let $var = $var.to_expr();)*
|
||||
let ty = ($(Expr::ty($var),)*);
|
||||
let field_values = [$(Expr::canonical($var)),*];
|
||||
BundleLiteral::new(ty, field_values.intern_slice()).to_expr()
|
||||
BundleLiteral::new(ty, field_values[..].intern()).to_expr()
|
||||
}
|
||||
}
|
||||
impl<$($T: Type,)*> ToExpr for TupleBuilder<($(Expr<$T>,)*)> {
|
||||
|
|
@ -590,107 +422,80 @@ macro_rules! impl_tuples {
|
|||
let ($($var,)*) = self.0;
|
||||
let ty = ($(Expr::ty($var),)*);
|
||||
let field_values = [$(Expr::canonical($var)),*];
|
||||
BundleLiteral::new(ty, field_values.intern_slice()).to_expr()
|
||||
BundleLiteral::new(ty, field_values[..].intern()).to_expr()
|
||||
}
|
||||
}
|
||||
impl<$($T: ToSimValueWithType<CanonicalType>,)*> ToSimValueWithType<CanonicalType> for ($($T,)*) {
|
||||
impl<$($T: ToSimValue<CanonicalType>,)*> ToSimValue<CanonicalType> for ($($T,)*) {
|
||||
#[track_caller]
|
||||
fn to_sim_value_with_type(&self, ty: CanonicalType) -> SimValue<CanonicalType> {
|
||||
SimValue::into_canonical(ToSimValueWithType::<Bundle>::to_sim_value_with_type(self, Bundle::from_canonical(ty)))
|
||||
fn to_sim_value(&self, ty: CanonicalType) -> SimValue<CanonicalType> {
|
||||
ToSimValue::<Bundle>::to_sim_value(self, Bundle::from_canonical(ty)).into_canonical()
|
||||
}
|
||||
#[track_caller]
|
||||
fn into_sim_value_with_type(self, ty: CanonicalType) -> SimValue<CanonicalType>
|
||||
fn into_sim_value(self, ty: CanonicalType) -> SimValue<CanonicalType>
|
||||
{
|
||||
SimValue::into_canonical(ToSimValueWithType::<Bundle>::into_sim_value_with_type(self, Bundle::from_canonical(ty)))
|
||||
ToSimValue::<Bundle>::into_sim_value(self, Bundle::from_canonical(ty)).into_canonical()
|
||||
}
|
||||
#[track_caller]
|
||||
fn box_into_sim_value(self: Box<Self>, ty: CanonicalType) -> SimValue<CanonicalType> {
|
||||
ToSimValue::<Bundle>::box_into_sim_value(self, Bundle::from_canonical(ty)).into_canonical()
|
||||
}
|
||||
}
|
||||
impl<$($T: ToSimValueWithType<CanonicalType>,)*> ToSimValueWithType<Bundle> for ($($T,)*) {
|
||||
impl<$($T: ToSimValue<CanonicalType>,)*> ToSimValue<Bundle> for ($($T,)*) {
|
||||
#[track_caller]
|
||||
fn to_sim_value_with_type(&self, ty: Bundle) -> SimValue<Bundle> {
|
||||
fn to_sim_value(&self, ty: Bundle) -> SimValue<Bundle> {
|
||||
let ($($var,)*) = self;
|
||||
let [$($ty_var,)*] = *ty.fields() else {
|
||||
panic!("bundle has wrong number of fields");
|
||||
};
|
||||
$(let $var = $var.to_sim_value_with_type($ty_var.ty);)*
|
||||
ToSimValueWithType::into_sim_value_with_type(($($var,)*), ty)
|
||||
$(let $var = $var.to_sim_value($ty_var.ty);)*
|
||||
ToSimValue::into_sim_value(($($var,)*), ty)
|
||||
}
|
||||
#[track_caller]
|
||||
fn into_sim_value_with_type(self, ty: Bundle) -> SimValue<Bundle> {
|
||||
fn into_sim_value(self, ty: Bundle) -> SimValue<Bundle> {
|
||||
#![allow(unused_mut)]
|
||||
#![allow(clippy::unused_unit)]
|
||||
let ($($var,)*) = self;
|
||||
let [$($ty_var,)*] = *ty.fields() else {
|
||||
panic!("bundle has wrong number of fields");
|
||||
};
|
||||
let mut opaque = OpaqueSimValue::empty();
|
||||
$(let $var = $var.into_sim_value_with_type($ty_var.ty);
|
||||
assert_eq!(SimValue::ty(&$var), $ty_var.ty);
|
||||
opaque.extend_from_slice(SimValue::opaque(&$var).as_slice());
|
||||
let mut bits: Option<BitVec> = None;
|
||||
$(let $var = $var.into_sim_value($ty_var.ty);
|
||||
assert_eq!($var.ty(), $ty_var.ty);
|
||||
if !$var.bits().is_empty() {
|
||||
if let Some(bits) = &mut bits {
|
||||
bits.extend_from_bitslice($var.bits());
|
||||
} else {
|
||||
let mut $var = $var.into_bits();
|
||||
$var.reserve(ty.type_properties().bit_width - $var.len());
|
||||
bits = Some($var);
|
||||
}
|
||||
}
|
||||
)*
|
||||
SimValue::from_opaque(ty, opaque)
|
||||
bits.unwrap_or_else(BitVec::new).into_sim_value(ty)
|
||||
}
|
||||
#[track_caller]
|
||||
fn box_into_sim_value(self: Box<Self>, ty: Bundle) -> SimValue<Bundle> {
|
||||
Self::into_sim_value(*self, ty)
|
||||
}
|
||||
}
|
||||
impl<$($T: ToSimValueWithType<$Ty>, $Ty: Type,)*> ToSimValueWithType<($($Ty,)*)> for ($($T,)*) {
|
||||
impl<$($T: ToSimValue<$Ty>, $Ty: Type,)*> ToSimValue<($($Ty,)*)> for ($($T,)*) {
|
||||
#[track_caller]
|
||||
fn to_sim_value_with_type(&self, ty: ($($Ty,)*)) -> SimValue<($($Ty,)*)> {
|
||||
fn to_sim_value(&self, ty: ($($Ty,)*)) -> SimValue<($($Ty,)*)> {
|
||||
let ($($var,)*) = self;
|
||||
let ($($ty_var,)*) = ty;
|
||||
$(let $var = $var.to_sim_value_with_type($ty_var);)*
|
||||
SimValue::from_value(ty, ($($var,)*))
|
||||
$(let $var = $var.to_sim_value($ty_var).into_canonical();)*
|
||||
SimValue::from_canonical(ToSimValue::into_sim_value(($($var,)*), ty.canonical()))
|
||||
}
|
||||
#[track_caller]
|
||||
fn into_sim_value_with_type(self, ty: ($($Ty,)*)) -> SimValue<($($Ty,)*)> {
|
||||
fn into_sim_value(self, ty: ($($Ty,)*)) -> SimValue<($($Ty,)*)> {
|
||||
let ($($var,)*) = self;
|
||||
let ($($ty_var,)*) = ty;
|
||||
$(let $var = $var.into_sim_value_with_type($ty_var);)*
|
||||
SimValue::from_value(ty, ($($var,)*))
|
||||
}
|
||||
}
|
||||
impl<$($T: ToSimValue,)*> ToSimValue for ($($T,)*) {
|
||||
type Type = ($($T::Type,)*);
|
||||
#[track_caller]
|
||||
fn to_sim_value(&self) -> SimValue<Self::Type> {
|
||||
let ($($var,)*) = self;
|
||||
$(let $var = $var.to_sim_value();)*
|
||||
SimValue::from_value(($(SimValue::ty(&$var),)*), ($($var,)*))
|
||||
$(let $var = $var.into_sim_value($ty_var).into_canonical();)*
|
||||
SimValue::from_canonical(ToSimValue::into_sim_value(($($var,)*), ty.canonical()))
|
||||
}
|
||||
#[track_caller]
|
||||
fn into_sim_value(self) -> SimValue<Self::Type> {
|
||||
let ($($var,)*) = self;
|
||||
$(let $var = $var.to_sim_value();)*
|
||||
SimValue::from_value(($(SimValue::ty(&$var),)*), ($($var,)*))
|
||||
}
|
||||
}
|
||||
impl<$($Lhs: Type + ExprPartialEq<$Rhs>, $Rhs: Type,)*> ExprPartialEq<($($Rhs,)*)> for ($($Lhs,)*) {
|
||||
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<($($Rhs,)*)>) -> Expr<Bool> {
|
||||
let ($($lhs_var,)*) = *lhs;
|
||||
let ($($rhs_var,)*) = *rhs;
|
||||
ArrayLiteral::<Bool, DynSize>::new(
|
||||
Bool,
|
||||
FromIterator::from_iter([$(Expr::canonical(ExprPartialEq::cmp_eq($lhs_var, $rhs_var)),)*]),
|
||||
)
|
||||
.cast_to_bits()
|
||||
.all_one_bits()
|
||||
}
|
||||
|
||||
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<($($Rhs,)*)>) -> Expr<Bool> {
|
||||
let ($($lhs_var,)*) = *lhs;
|
||||
let ($($rhs_var,)*) = *rhs;
|
||||
ArrayLiteral::<Bool, DynSize>::new(
|
||||
Bool,
|
||||
FromIterator::from_iter([$(Expr::canonical(ExprPartialEq::cmp_ne($lhs_var, $rhs_var)),)*]),
|
||||
)
|
||||
.cast_to_bits()
|
||||
.any_one_bits()
|
||||
}
|
||||
}
|
||||
impl<$($Lhs: SimValuePartialEq<$Rhs>, $Rhs: Type,)*> SimValuePartialEq<($($Rhs,)*)> for ($($Lhs,)*) {
|
||||
fn sim_value_eq(lhs: &SimValue<Self>, rhs: &SimValue<($($Rhs,)*)>) -> bool {
|
||||
let ($($lhs_var,)*) = &**lhs;
|
||||
let ($($rhs_var,)*) = &**rhs;
|
||||
let retval = true;
|
||||
$(let retval = retval && $lhs_var == $rhs_var;)*
|
||||
retval
|
||||
fn box_into_sim_value(self: Box<Self>, ty: ($($Ty,)*)) -> SimValue<($($Ty,)*)> {
|
||||
Self::into_sim_value(*self, ty)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
@ -702,25 +507,24 @@ macro_rules! impl_tuples {
|
|||
|
||||
impl_tuples! {
|
||||
[] [
|
||||
{#[num = 0, field = field_0, ty = ty0: Ty0, lhs = lhs0: Lhs0, rhs = rhs0: Rhs0] v0: T0}
|
||||
{#[num = 1, field = field_1, ty = ty1: Ty1, lhs = lhs1: Lhs1, rhs = rhs1: Rhs1] v1: T1}
|
||||
{#[num = 2, field = field_2, ty = ty2: Ty2, lhs = lhs2: Lhs2, rhs = rhs2: Rhs2] v2: T2}
|
||||
{#[num = 3, field = field_3, ty = ty3: Ty3, lhs = lhs3: Lhs3, rhs = rhs3: Rhs3] v3: T3}
|
||||
{#[num = 4, field = field_4, ty = ty4: Ty4, lhs = lhs4: Lhs4, rhs = rhs4: Rhs4] v4: T4}
|
||||
{#[num = 5, field = field_5, ty = ty5: Ty5, lhs = lhs5: Lhs5, rhs = rhs5: Rhs5] v5: T5}
|
||||
{#[num = 6, field = field_6, ty = ty6: Ty6, lhs = lhs6: Lhs6, rhs = rhs6: Rhs6] v6: T6}
|
||||
{#[num = 7, field = field_7, ty = ty7: Ty7, lhs = lhs7: Lhs7, rhs = rhs7: Rhs7] v7: T7}
|
||||
{#[num = 8, field = field_8, ty = ty8: Ty8, lhs = lhs8: Lhs8, rhs = rhs8: Rhs8] v8: T8}
|
||||
{#[num = 9, field = field_9, ty = ty9: Ty9, lhs = lhs9: Lhs9, rhs = rhs9: Rhs9] v9: T9}
|
||||
{#[num = 10, field = field_10, ty = ty10: Ty10, lhs = lhs10: Lhs10, rhs = rhs10: Rhs10] v10: T10}
|
||||
{#[num = 11, field = field_11, ty = ty11: Ty11, lhs = lhs11: Lhs11, rhs = rhs11: Rhs11] v11: T11}
|
||||
{#[num = 0, field = field_0, ty = ty0: Ty0] v0: T0}
|
||||
{#[num = 1, field = field_1, ty = ty1: Ty1] v1: T1}
|
||||
{#[num = 2, field = field_2, ty = ty2: Ty2] v2: T2}
|
||||
{#[num = 3, field = field_3, ty = ty3: Ty3] v3: T3}
|
||||
{#[num = 4, field = field_4, ty = ty4: Ty4] v4: T4}
|
||||
{#[num = 5, field = field_5, ty = ty5: Ty5] v5: T5}
|
||||
{#[num = 6, field = field_6, ty = ty6: Ty6] v6: T6}
|
||||
{#[num = 7, field = field_7, ty = ty7: Ty7] v7: T7}
|
||||
{#[num = 8, field = field_8, ty = ty8: Ty8] v8: T8}
|
||||
{#[num = 9, field = field_9, ty = ty9: Ty9] v9: T9}
|
||||
{#[num = 10, field = field_10, ty = ty10: Ty10] v10: T10}
|
||||
{#[num = 11, field = field_11, ty = ty11: Ty11] v11: T11}
|
||||
]
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Send + Sync + 'static> Type for PhantomData<T> {
|
||||
type BaseType = Bundle;
|
||||
type MaskType = ();
|
||||
type SimValue = PhantomData<T>;
|
||||
type MatchVariant = PhantomData<T>;
|
||||
type MatchActiveScope = ();
|
||||
type MatchVariantAndInactiveScope = MatchVariantWithoutScope<Self::MatchVariant>;
|
||||
|
|
@ -753,24 +557,6 @@ impl<T: ?Sized + Send + Sync + 'static> Type for PhantomData<T> {
|
|||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
assert!(opaque.is_empty());
|
||||
*self
|
||||
}
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
_value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
assert!(opaque.is_empty());
|
||||
}
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
_value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
writer.fill_cloned_from_slice(OpaqueSimValueSlice::empty())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PhantomDataBuilder<T: ?Sized + Send + Sync + 'static>(PhantomData<T>);
|
||||
|
|
@ -818,35 +604,26 @@ impl<T: ?Sized + Send + Sync + 'static> ToExpr for PhantomData<T> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Send + Sync + 'static> ToSimValue for PhantomData<T> {
|
||||
type Type = PhantomData<T>;
|
||||
|
||||
impl<T: ?Sized + Send + Sync + 'static> ToSimValue<Self> for PhantomData<T> {
|
||||
#[track_caller]
|
||||
fn to_sim_value(&self) -> SimValue<Self> {
|
||||
SimValue::from_value(*self, *self)
|
||||
fn to_sim_value(&self, ty: Self) -> SimValue<Self> {
|
||||
ToSimValue::into_sim_value(BitVec::new(), ty)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Send + Sync + 'static> ToSimValueWithType<Self> for PhantomData<T> {
|
||||
impl<T: ?Sized> ToSimValue<Bundle> for PhantomData<T> {
|
||||
#[track_caller]
|
||||
fn to_sim_value_with_type(&self, ty: Self) -> SimValue<Self> {
|
||||
SimValue::from_value(ty, *self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized> ToSimValueWithType<Bundle> for PhantomData<T> {
|
||||
#[track_caller]
|
||||
fn to_sim_value_with_type(&self, ty: Bundle) -> SimValue<Bundle> {
|
||||
fn to_sim_value(&self, ty: Bundle) -> SimValue<Bundle> {
|
||||
assert!(ty.fields().is_empty());
|
||||
SimValue::from_opaque(ty, OpaqueSimValue::empty())
|
||||
ToSimValue::into_sim_value(BitVec::new(), ty)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized> ToSimValueWithType<CanonicalType> for PhantomData<T> {
|
||||
impl<T: ?Sized> ToSimValue<CanonicalType> for PhantomData<T> {
|
||||
#[track_caller]
|
||||
fn to_sim_value_with_type(&self, canonical_ty: CanonicalType) -> SimValue<CanonicalType> {
|
||||
let ty = Bundle::from_canonical(canonical_ty);
|
||||
fn to_sim_value(&self, ty: CanonicalType) -> SimValue<CanonicalType> {
|
||||
let ty = Bundle::from_canonical(ty);
|
||||
assert!(ty.fields().is_empty());
|
||||
SimValue::from_opaque(canonical_ty, OpaqueSimValue::empty())
|
||||
ToSimValue::into_sim_value(BitVec::new(), ty).into_canonical()
|
||||
}
|
||||
}
|
||||
|
|
|
|||
797
crates/fayalite/src/cli.rs
Normal file
797
crates/fayalite/src/cli.rs
Normal file
|
|
@ -0,0 +1,797 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
bundle::{Bundle, BundleType},
|
||||
firrtl::{self, ExportOptions},
|
||||
intern::Interned,
|
||||
module::Module,
|
||||
util::{job_server::AcquiredJob, streaming_read_utf8::streaming_read_utf8},
|
||||
};
|
||||
use clap::{
|
||||
builder::{OsStringValueParser, TypedValueParser},
|
||||
Parser, Subcommand, ValueEnum, ValueHint,
|
||||
};
|
||||
use eyre::{eyre, Report};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
error,
|
||||
ffi::OsString,
|
||||
fmt::{self, Write},
|
||||
fs, io, mem,
|
||||
path::{Path, PathBuf},
|
||||
process,
|
||||
};
|
||||
use tempfile::TempDir;
|
||||
|
||||
pub type Result<T = (), E = CliError> = std::result::Result<T, E>;
|
||||
|
||||
pub struct CliError(Report);
|
||||
|
||||
impl fmt::Debug for CliError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for CliError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl error::Error for CliError {}
|
||||
|
||||
impl From<io::Error> for CliError {
|
||||
fn from(value: io::Error) -> Self {
|
||||
CliError(Report::new(value))
|
||||
}
|
||||
}
|
||||
|
||||
pub trait RunPhase<Arg> {
|
||||
type Output;
|
||||
fn run(&self, arg: Arg) -> Result<Self::Output> {
|
||||
self.run_with_job(arg, &mut AcquiredJob::acquire())
|
||||
}
|
||||
fn run_with_job(&self, arg: Arg, acquired_job: &mut AcquiredJob) -> Result<Self::Output>;
|
||||
}
|
||||
|
||||
#[derive(Parser, Debug, Clone)]
|
||||
#[non_exhaustive]
|
||||
pub struct BaseArgs {
|
||||
/// the directory to put the generated main output file and associated files in
|
||||
#[arg(short, long, value_hint = ValueHint::DirPath, required = true)]
|
||||
pub output: Option<PathBuf>,
|
||||
/// the stem of the generated main output file, e.g. to get foo.v, pass --file-stem=foo
|
||||
#[arg(long)]
|
||||
pub file_stem: Option<String>,
|
||||
#[arg(long, env = "FAYALITE_KEEP_TEMP_DIR")]
|
||||
pub keep_temp_dir: bool,
|
||||
#[arg(skip = false)]
|
||||
pub redirect_output_for_rust_test: bool,
|
||||
}
|
||||
|
||||
impl BaseArgs {
|
||||
fn make_firrtl_file_backend(&self) -> Result<(firrtl::FileBackend, Option<TempDir>)> {
|
||||
let (dir_path, temp_dir) = match &self.output {
|
||||
Some(output) => (output.clone(), None),
|
||||
None => {
|
||||
let temp_dir = TempDir::new()?;
|
||||
if self.keep_temp_dir {
|
||||
let temp_dir = temp_dir.into_path();
|
||||
println!("created temporary directory: {}", temp_dir.display());
|
||||
(temp_dir, None)
|
||||
} else {
|
||||
(temp_dir.path().to_path_buf(), Some(temp_dir))
|
||||
}
|
||||
}
|
||||
};
|
||||
Ok((
|
||||
firrtl::FileBackend {
|
||||
dir_path,
|
||||
top_fir_file_stem: self.file_stem.clone(),
|
||||
circuit_name: None,
|
||||
},
|
||||
temp_dir,
|
||||
))
|
||||
}
|
||||
/// handles possibly redirecting the command's output for Rust tests
|
||||
pub fn run_external_command(
|
||||
&self,
|
||||
_acquired_job: &mut AcquiredJob,
|
||||
mut command: process::Command,
|
||||
mut captured_output: Option<&mut String>,
|
||||
) -> io::Result<process::ExitStatus> {
|
||||
if self.redirect_output_for_rust_test || captured_output.is_some() {
|
||||
let (reader, writer) = os_pipe::pipe()?;
|
||||
let mut reader = io::BufReader::new(reader);
|
||||
command.stderr(writer.try_clone()?);
|
||||
command.stdout(writer); // must not leave writer around after spawning child
|
||||
command.stdin(process::Stdio::null());
|
||||
let mut child = command.spawn()?;
|
||||
drop(command); // close writers
|
||||
Ok(loop {
|
||||
let status = child.try_wait()?;
|
||||
streaming_read_utf8(&mut reader, |s| {
|
||||
if let Some(captured_output) = captured_output.as_deref_mut() {
|
||||
captured_output.push_str(s);
|
||||
}
|
||||
// use print! so output goes to Rust test output capture
|
||||
print!("{s}");
|
||||
io::Result::Ok(())
|
||||
})?;
|
||||
if let Some(status) = status {
|
||||
break status;
|
||||
}
|
||||
})
|
||||
} else {
|
||||
command.status()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Parser, Debug, Clone)]
|
||||
#[non_exhaustive]
|
||||
pub struct FirrtlArgs {
|
||||
#[command(flatten)]
|
||||
pub base: BaseArgs,
|
||||
#[command(flatten)]
|
||||
pub export_options: ExportOptions,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[non_exhaustive]
|
||||
pub struct FirrtlOutput {
|
||||
pub file_stem: String,
|
||||
pub top_module: String,
|
||||
pub output_dir: PathBuf,
|
||||
pub temp_dir: Option<TempDir>,
|
||||
}
|
||||
|
||||
impl FirrtlOutput {
|
||||
pub fn file_with_ext(&self, ext: &str) -> PathBuf {
|
||||
let mut retval = self.output_dir.join(&self.file_stem);
|
||||
retval.set_extension(ext);
|
||||
retval
|
||||
}
|
||||
pub fn firrtl_file(&self) -> PathBuf {
|
||||
self.file_with_ext("fir")
|
||||
}
|
||||
}
|
||||
|
||||
impl FirrtlArgs {
|
||||
fn run_impl(
|
||||
&self,
|
||||
top_module: Module<Bundle>,
|
||||
_acquired_job: &mut AcquiredJob,
|
||||
) -> Result<FirrtlOutput> {
|
||||
let (file_backend, temp_dir) = self.base.make_firrtl_file_backend()?;
|
||||
let firrtl::FileBackend {
|
||||
top_fir_file_stem,
|
||||
circuit_name,
|
||||
dir_path,
|
||||
} = firrtl::export(file_backend, &top_module, self.export_options)?;
|
||||
Ok(FirrtlOutput {
|
||||
file_stem: top_fir_file_stem.expect(
|
||||
"export is known to set the file stem from the circuit name if not provided",
|
||||
),
|
||||
top_module: circuit_name.expect("export is known to set the circuit name"),
|
||||
output_dir: dir_path,
|
||||
temp_dir,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: BundleType> RunPhase<Module<T>> for FirrtlArgs {
|
||||
type Output = FirrtlOutput;
|
||||
fn run_with_job(
|
||||
&self,
|
||||
top_module: Module<T>,
|
||||
acquired_job: &mut AcquiredJob,
|
||||
) -> Result<Self::Output> {
|
||||
self.run_impl(top_module.canonical(), acquired_job)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: BundleType> RunPhase<Interned<Module<T>>> for FirrtlArgs {
|
||||
type Output = FirrtlOutput;
|
||||
fn run_with_job(
|
||||
&self,
|
||||
top_module: Interned<Module<T>>,
|
||||
acquired_job: &mut AcquiredJob,
|
||||
) -> Result<Self::Output> {
|
||||
self.run_with_job(*top_module, acquired_job)
|
||||
}
|
||||
}
|
||||
|
||||
/// based on [LLVM Circt's recommended lowering options
|
||||
/// ](https://circt.llvm.org/docs/VerilogGeneration/#recommended-loweringoptions-by-target)
|
||||
#[derive(ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub enum VerilogDialect {
|
||||
Questa,
|
||||
Spyglass,
|
||||
Verilator,
|
||||
Vivado,
|
||||
Yosys,
|
||||
}
|
||||
|
||||
impl fmt::Display for VerilogDialect {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl VerilogDialect {
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
VerilogDialect::Questa => "questa",
|
||||
VerilogDialect::Spyglass => "spyglass",
|
||||
VerilogDialect::Verilator => "verilator",
|
||||
VerilogDialect::Vivado => "vivado",
|
||||
VerilogDialect::Yosys => "yosys",
|
||||
}
|
||||
}
|
||||
pub fn firtool_extra_args(self) -> &'static [&'static str] {
|
||||
match self {
|
||||
VerilogDialect::Questa => &["--lowering-options=emitWireInPorts"],
|
||||
VerilogDialect::Spyglass => {
|
||||
&["--lowering-options=explicitBitcast,disallowExpressionInliningInPorts"]
|
||||
}
|
||||
VerilogDialect::Verilator => &[
|
||||
"--lowering-options=locationInfoStyle=wrapInAtSquareBracket,disallowLocalVariables",
|
||||
],
|
||||
VerilogDialect::Vivado => &["--lowering-options=mitigateVivadoArrayIndexConstPropBug"],
|
||||
VerilogDialect::Yosys => {
|
||||
&["--lowering-options=disallowLocalVariables,disallowPackedArrays"]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Parser, Debug, Clone)]
|
||||
#[non_exhaustive]
|
||||
pub struct VerilogArgs {
|
||||
#[command(flatten)]
|
||||
pub firrtl: FirrtlArgs,
|
||||
#[arg(
|
||||
long,
|
||||
default_value = "firtool",
|
||||
env = "FIRTOOL",
|
||||
value_hint = ValueHint::CommandName,
|
||||
value_parser = OsStringValueParser::new().try_map(which::which)
|
||||
)]
|
||||
pub firtool: PathBuf,
|
||||
#[arg(long)]
|
||||
pub firtool_extra_args: Vec<OsString>,
|
||||
/// adapt the generated Verilog for a particular toolchain
|
||||
#[arg(long)]
|
||||
pub verilog_dialect: Option<VerilogDialect>,
|
||||
#[arg(long, short = 'g')]
|
||||
pub debug: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[non_exhaustive]
|
||||
pub struct VerilogOutput {
|
||||
pub firrtl: FirrtlOutput,
|
||||
pub verilog_files: Vec<PathBuf>,
|
||||
pub contents_hash: Option<blake3::Hash>,
|
||||
}
|
||||
|
||||
impl VerilogOutput {
|
||||
pub fn main_verilog_file(&self) -> PathBuf {
|
||||
self.firrtl.file_with_ext("v")
|
||||
}
|
||||
fn unadjusted_verilog_file(&self) -> PathBuf {
|
||||
self.firrtl.file_with_ext("unadjusted.v")
|
||||
}
|
||||
}
|
||||
|
||||
impl VerilogArgs {
|
||||
fn process_unadjusted_verilog_file(&self, mut output: VerilogOutput) -> Result<VerilogOutput> {
|
||||
let input = fs::read_to_string(output.unadjusted_verilog_file())?;
|
||||
let file_separator_prefix = "\n// ----- 8< ----- FILE \"";
|
||||
let file_separator_suffix = "\" ----- 8< -----\n\n";
|
||||
let mut input = &*input;
|
||||
output.contents_hash = Some(blake3::hash(input.as_bytes()));
|
||||
let main_verilog_file = output.main_verilog_file();
|
||||
let mut file_name: Option<&Path> = Some(&main_verilog_file);
|
||||
loop {
|
||||
let (chunk, next_file_name) = if let Some((chunk, rest)) =
|
||||
input.split_once(file_separator_prefix)
|
||||
{
|
||||
let Some((next_file_name, rest)) = rest.split_once(file_separator_suffix) else {
|
||||
return Err(CliError(eyre!("parsing firtool's output failed: found {file_separator_prefix:?} but no {file_separator_suffix:?}")));
|
||||
};
|
||||
input = rest;
|
||||
(chunk, Some(next_file_name.as_ref()))
|
||||
} else {
|
||||
(mem::take(&mut input), None)
|
||||
};
|
||||
let Some(file_name) = mem::replace(&mut file_name, next_file_name) else {
|
||||
break;
|
||||
};
|
||||
let file_name = output.firrtl.output_dir.join(file_name);
|
||||
fs::write(&file_name, chunk)?;
|
||||
if let Some(extension) = file_name.extension() {
|
||||
if extension == "v" || extension == "sv" {
|
||||
output.verilog_files.push(file_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(output)
|
||||
}
|
||||
fn run_impl(
|
||||
&self,
|
||||
firrtl_output: FirrtlOutput,
|
||||
acquired_job: &mut AcquiredJob,
|
||||
) -> Result<VerilogOutput> {
|
||||
let Self {
|
||||
firrtl,
|
||||
firtool,
|
||||
firtool_extra_args,
|
||||
verilog_dialect,
|
||||
debug,
|
||||
} = self;
|
||||
let output = VerilogOutput {
|
||||
firrtl: firrtl_output,
|
||||
verilog_files: vec![],
|
||||
contents_hash: None,
|
||||
};
|
||||
let mut cmd = process::Command::new(firtool);
|
||||
cmd.arg(output.firrtl.firrtl_file());
|
||||
cmd.arg("-o");
|
||||
cmd.arg(output.unadjusted_verilog_file());
|
||||
if *debug {
|
||||
cmd.arg("-g");
|
||||
cmd.arg("--preserve-values=all");
|
||||
}
|
||||
if let Some(dialect) = verilog_dialect {
|
||||
cmd.args(dialect.firtool_extra_args());
|
||||
}
|
||||
cmd.args(firtool_extra_args);
|
||||
cmd.current_dir(&output.firrtl.output_dir);
|
||||
let status = firrtl.base.run_external_command(acquired_job, cmd, None)?;
|
||||
if status.success() {
|
||||
self.process_unadjusted_verilog_file(output)
|
||||
} else {
|
||||
Err(CliError(eyre!(
|
||||
"running {} failed: {status}",
|
||||
self.firtool.display()
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Arg> RunPhase<Arg> for VerilogArgs
|
||||
where
|
||||
FirrtlArgs: RunPhase<Arg, Output = FirrtlOutput>,
|
||||
{
|
||||
type Output = VerilogOutput;
|
||||
fn run_with_job(&self, arg: Arg, acquired_job: &mut AcquiredJob) -> Result<Self::Output> {
|
||||
let firrtl_output = self.firrtl.run_with_job(arg, acquired_job)?;
|
||||
self.run_impl(firrtl_output, acquired_job)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash, Default)]
|
||||
#[non_exhaustive]
|
||||
pub enum FormalMode {
|
||||
#[default]
|
||||
BMC,
|
||||
Prove,
|
||||
Live,
|
||||
Cover,
|
||||
}
|
||||
|
||||
impl FormalMode {
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
FormalMode::BMC => "bmc",
|
||||
FormalMode::Prove => "prove",
|
||||
FormalMode::Live => "live",
|
||||
FormalMode::Cover => "cover",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for FormalMode {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct FormalAdjustArgs;
|
||||
|
||||
impl clap::FromArgMatches for FormalAdjustArgs {
|
||||
fn from_arg_matches(_matches: &clap::ArgMatches) -> Result<Self, clap::Error> {
|
||||
Ok(Self)
|
||||
}
|
||||
|
||||
fn update_from_arg_matches(&mut self, _matches: &clap::ArgMatches) -> Result<(), clap::Error> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl clap::Args for FormalAdjustArgs {
|
||||
fn augment_args(cmd: clap::Command) -> clap::Command {
|
||||
cmd.mut_arg("output", |arg| arg.required(false))
|
||||
.mut_arg("verilog_dialect", |arg| {
|
||||
arg.default_value(VerilogDialect::Yosys.to_string())
|
||||
.hide(true)
|
||||
})
|
||||
}
|
||||
|
||||
fn augment_args_for_update(cmd: clap::Command) -> clap::Command {
|
||||
Self::augment_args(cmd)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Parser, Clone)]
|
||||
#[non_exhaustive]
|
||||
pub struct FormalArgs {
|
||||
#[command(flatten)]
|
||||
pub verilog: VerilogArgs,
|
||||
#[arg(
|
||||
long,
|
||||
default_value = "sby",
|
||||
env = "SBY",
|
||||
value_hint = ValueHint::CommandName,
|
||||
value_parser = OsStringValueParser::new().try_map(which::which)
|
||||
)]
|
||||
pub sby: PathBuf,
|
||||
#[arg(long)]
|
||||
pub sby_extra_args: Vec<String>,
|
||||
#[arg(long, default_value_t)]
|
||||
pub mode: FormalMode,
|
||||
#[arg(long, default_value_t = Self::DEFAULT_DEPTH)]
|
||||
pub depth: u64,
|
||||
#[arg(long, default_value = "z3")]
|
||||
pub solver: String,
|
||||
#[arg(long)]
|
||||
pub smtbmc_extra_args: Vec<String>,
|
||||
#[arg(long, default_value_t = true, env = "FAYALITE_CACHE_RESULTS")]
|
||||
pub cache_results: bool,
|
||||
#[command(flatten)]
|
||||
_formal_adjust_args: FormalAdjustArgs,
|
||||
}
|
||||
|
||||
impl fmt::Debug for FormalArgs {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let Self {
|
||||
verilog,
|
||||
sby,
|
||||
sby_extra_args,
|
||||
mode,
|
||||
depth,
|
||||
solver,
|
||||
smtbmc_extra_args,
|
||||
cache_results,
|
||||
_formal_adjust_args: _,
|
||||
} = self;
|
||||
f.debug_struct("FormalArgs")
|
||||
.field("verilog", verilog)
|
||||
.field("sby", sby)
|
||||
.field("sby_extra_args", sby_extra_args)
|
||||
.field("mode", mode)
|
||||
.field("depth", depth)
|
||||
.field("solver", solver)
|
||||
.field("smtbmc_extra_args", smtbmc_extra_args)
|
||||
.field("cache_results", cache_results)
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
impl FormalArgs {
|
||||
pub const DEFAULT_DEPTH: u64 = 20;
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[non_exhaustive]
|
||||
pub struct FormalOutput {
|
||||
pub verilog: VerilogOutput,
|
||||
}
|
||||
|
||||
impl FormalOutput {
|
||||
pub fn sby_file(&self) -> PathBuf {
|
||||
self.verilog.firrtl.file_with_ext("sby")
|
||||
}
|
||||
pub fn cache_file(&self) -> PathBuf {
|
||||
self.verilog.firrtl.file_with_ext("cache.json")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[non_exhaustive]
|
||||
pub struct FormalCacheOutput {}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[non_exhaustive]
|
||||
pub enum FormalCacheVersion {
|
||||
V1,
|
||||
}
|
||||
|
||||
impl FormalCacheVersion {
|
||||
pub const CURRENT: Self = Self::V1;
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[non_exhaustive]
|
||||
pub struct FormalCache {
|
||||
pub version: FormalCacheVersion,
|
||||
pub contents_hash: blake3::Hash,
|
||||
pub stdout_stderr: String,
|
||||
pub result: Result<FormalCacheOutput, String>,
|
||||
}
|
||||
|
||||
impl FormalCache {
|
||||
pub fn new(
|
||||
version: FormalCacheVersion,
|
||||
contents_hash: blake3::Hash,
|
||||
stdout_stderr: String,
|
||||
result: Result<FormalCacheOutput, String>,
|
||||
) -> Self {
|
||||
Self {
|
||||
version,
|
||||
contents_hash,
|
||||
stdout_stderr,
|
||||
result,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FormalArgs {
|
||||
fn sby_contents(&self, output: &FormalOutput) -> Result<String> {
|
||||
let Self {
|
||||
verilog: _,
|
||||
sby: _,
|
||||
sby_extra_args: _,
|
||||
mode,
|
||||
depth,
|
||||
smtbmc_extra_args,
|
||||
solver,
|
||||
cache_results: _,
|
||||
_formal_adjust_args: _,
|
||||
} = self;
|
||||
let smtbmc_options = smtbmc_extra_args.join(" ");
|
||||
let top_module = &output.verilog.firrtl.top_module;
|
||||
let mut retval = format!(
|
||||
"[options]\n\
|
||||
mode {mode}\n\
|
||||
depth {depth}\n\
|
||||
wait on\n\
|
||||
\n\
|
||||
[engines]\n\
|
||||
smtbmc {solver} -- -- {smtbmc_options}\n\
|
||||
\n\
|
||||
[script]\n"
|
||||
);
|
||||
for verilog_file in &output.verilog.verilog_files {
|
||||
let verilog_file = verilog_file
|
||||
.to_str()
|
||||
.ok_or_else(|| CliError(eyre!("verilog file path is not UTF-8")))?;
|
||||
if verilog_file.contains(|ch: char| {
|
||||
(ch != ' ' && ch != '\t' && ch.is_ascii_whitespace()) || ch == '"'
|
||||
}) {
|
||||
return Err(CliError(eyre!(
|
||||
"verilog file path contains characters that aren't permitted"
|
||||
)));
|
||||
}
|
||||
writeln!(retval, "read_verilog -sv -formal \"{verilog_file}\"").unwrap();
|
||||
}
|
||||
// workaround for wires disappearing -- set `keep` on all wires
|
||||
writeln!(retval, "hierarchy -top {top_module}").unwrap();
|
||||
writeln!(retval, "proc").unwrap();
|
||||
writeln!(retval, "setattr -set keep 1 w:\\*").unwrap();
|
||||
writeln!(retval, "prep").unwrap();
|
||||
Ok(retval)
|
||||
}
|
||||
fn run_impl(
|
||||
&self,
|
||||
verilog_output: VerilogOutput,
|
||||
acquired_job: &mut AcquiredJob,
|
||||
) -> Result<FormalOutput> {
|
||||
let output = FormalOutput {
|
||||
verilog: verilog_output,
|
||||
};
|
||||
let sby_file = output.sby_file();
|
||||
let sby_contents = self.sby_contents(&output)?;
|
||||
let contents_hash = output.verilog.contents_hash.map(|verilog_hash| {
|
||||
let mut hasher = blake3::Hasher::new();
|
||||
hasher.update(verilog_hash.as_bytes());
|
||||
hasher.update(sby_contents.as_bytes());
|
||||
hasher.update(&(self.sby_extra_args.len() as u64).to_le_bytes());
|
||||
for sby_extra_arg in self.sby_extra_args.iter() {
|
||||
hasher.update(&(sby_extra_arg.len() as u64).to_le_bytes());
|
||||
hasher.update(sby_extra_arg.as_bytes());
|
||||
}
|
||||
hasher.finalize()
|
||||
});
|
||||
std::fs::write(&sby_file, sby_contents)?;
|
||||
let mut cmd = process::Command::new(&self.sby);
|
||||
cmd.arg("-j1"); // sby seems not to respect job count in parallel mode
|
||||
cmd.arg("-f");
|
||||
cmd.arg(sby_file.file_name().unwrap());
|
||||
cmd.args(&self.sby_extra_args);
|
||||
cmd.current_dir(&output.verilog.firrtl.output_dir);
|
||||
let mut captured_output = String::new();
|
||||
let cache_file = output.cache_file();
|
||||
let do_cache = if let Some(contents_hash) = contents_hash.filter(|_| self.cache_results) {
|
||||
if let Some(FormalCache {
|
||||
version: FormalCacheVersion::CURRENT,
|
||||
contents_hash: cache_contents_hash,
|
||||
stdout_stderr,
|
||||
result,
|
||||
}) = fs::read(&cache_file)
|
||||
.ok()
|
||||
.and_then(|v| serde_json::from_slice(&v).ok())
|
||||
{
|
||||
if cache_contents_hash == contents_hash {
|
||||
println!("Using cached formal result:\n{stdout_stderr}");
|
||||
return match result {
|
||||
Ok(FormalCacheOutput {}) => Ok(output),
|
||||
Err(error) => Err(CliError(eyre::Report::msg(error))),
|
||||
};
|
||||
}
|
||||
}
|
||||
true
|
||||
} else {
|
||||
false
|
||||
};
|
||||
let _ = fs::remove_file(&cache_file);
|
||||
let status = self.verilog.firrtl.base.run_external_command(
|
||||
acquired_job,
|
||||
cmd,
|
||||
do_cache.then_some(&mut captured_output),
|
||||
)?;
|
||||
let result = if status.success() {
|
||||
Ok(output)
|
||||
} else {
|
||||
Err(CliError(eyre!(
|
||||
"running {} failed: {status}",
|
||||
self.sby.display()
|
||||
)))
|
||||
};
|
||||
fs::write(
|
||||
cache_file,
|
||||
serde_json::to_string_pretty(&FormalCache {
|
||||
version: FormalCacheVersion::CURRENT,
|
||||
contents_hash: contents_hash.unwrap(),
|
||||
stdout_stderr: captured_output,
|
||||
result: match &result {
|
||||
Ok(FormalOutput { verilog: _ }) => Ok(FormalCacheOutput {}),
|
||||
Err(error) => Err(error.to_string()),
|
||||
},
|
||||
})
|
||||
.expect("serialization shouldn't ever fail"),
|
||||
)?;
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
impl<Arg> RunPhase<Arg> for FormalArgs
|
||||
where
|
||||
VerilogArgs: RunPhase<Arg, Output = VerilogOutput>,
|
||||
{
|
||||
type Output = FormalOutput;
|
||||
fn run_with_job(&self, arg: Arg, acquired_job: &mut AcquiredJob) -> Result<Self::Output> {
|
||||
let verilog_output = self.verilog.run_with_job(arg, acquired_job)?;
|
||||
self.run_impl(verilog_output, acquired_job)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Subcommand, Debug)]
|
||||
enum CliCommand {
|
||||
/// Generate FIRRTL
|
||||
Firrtl(FirrtlArgs),
|
||||
/// Generate Verilog
|
||||
Verilog(VerilogArgs),
|
||||
/// Run a formal proof
|
||||
Formal(FormalArgs),
|
||||
}
|
||||
|
||||
/// a simple CLI
|
||||
///
|
||||
/// Use like:
|
||||
///
|
||||
/// ```no_run
|
||||
/// # use fayalite::prelude::*;
|
||||
/// # #[hdl_module]
|
||||
/// # fn my_module() {}
|
||||
/// use fayalite::cli;
|
||||
///
|
||||
/// fn main() -> cli::Result {
|
||||
/// cli::Cli::parse().run(my_module())
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// You can also use it with a larger [`clap`]-based CLI like so:
|
||||
///
|
||||
/// ```no_run
|
||||
/// # use fayalite::prelude::*;
|
||||
/// # #[hdl_module]
|
||||
/// # fn my_module() {}
|
||||
/// use clap::{Subcommand, Parser};
|
||||
/// use fayalite::cli;
|
||||
///
|
||||
/// #[derive(Subcommand)]
|
||||
/// pub enum Cmd {
|
||||
/// #[command(flatten)]
|
||||
/// Fayalite(cli::Cli),
|
||||
/// MySpecialCommand {
|
||||
/// #[arg(long)]
|
||||
/// foo: bool,
|
||||
/// },
|
||||
/// }
|
||||
///
|
||||
/// #[derive(Parser)]
|
||||
/// pub struct Cli {
|
||||
/// #[command(subcommand)]
|
||||
/// cmd: Cmd, // or just use cli::Cli directly if you don't need more subcommands
|
||||
/// }
|
||||
///
|
||||
/// fn main() -> cli::Result {
|
||||
/// match Cli::parse().cmd {
|
||||
/// Cmd::Fayalite(v) => v.run(my_module())?,
|
||||
/// Cmd::MySpecialCommand { foo } => println!("special: foo={foo}"),
|
||||
/// }
|
||||
/// Ok(())
|
||||
/// }
|
||||
/// ```
|
||||
#[derive(Parser, Debug)]
|
||||
// clear things that would be crate-specific
|
||||
#[command(name = "Fayalite Simple CLI", about = None, long_about = None)]
|
||||
pub struct Cli {
|
||||
#[command(subcommand)]
|
||||
subcommand: CliCommand,
|
||||
}
|
||||
|
||||
impl clap::Subcommand for Cli {
|
||||
fn augment_subcommands(cmd: clap::Command) -> clap::Command {
|
||||
CliCommand::augment_subcommands(cmd)
|
||||
}
|
||||
|
||||
fn augment_subcommands_for_update(cmd: clap::Command) -> clap::Command {
|
||||
CliCommand::augment_subcommands_for_update(cmd)
|
||||
}
|
||||
|
||||
fn has_subcommand(name: &str) -> bool {
|
||||
CliCommand::has_subcommand(name)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> RunPhase<T> for Cli
|
||||
where
|
||||
FirrtlArgs: RunPhase<T, Output = FirrtlOutput>,
|
||||
{
|
||||
type Output = ();
|
||||
fn run_with_job(&self, arg: T, acquired_job: &mut AcquiredJob) -> Result<Self::Output> {
|
||||
match &self.subcommand {
|
||||
CliCommand::Firrtl(c) => {
|
||||
c.run_with_job(arg, acquired_job)?;
|
||||
}
|
||||
CliCommand::Verilog(c) => {
|
||||
c.run_with_job(arg, acquired_job)?;
|
||||
}
|
||||
CliCommand::Formal(c) => {
|
||||
c.run_with_job(arg, acquired_job)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Cli {
|
||||
/// forwards to [`clap::Parser::parse()`] so you don't have to import [`clap::Parser`]
|
||||
pub fn parse() -> Self {
|
||||
clap::Parser::parse()
|
||||
}
|
||||
/// forwards to [`RunPhase::run()`] so you don't have to import [`RunPhase`]
|
||||
pub fn run<T>(&self, top_module: T) -> Result<()>
|
||||
where
|
||||
Self: RunPhase<T, Output = ()>,
|
||||
{
|
||||
RunPhase::run(self, top_module)
|
||||
}
|
||||
}
|
||||
|
|
@ -6,12 +6,8 @@ use crate::{
|
|||
int::Bool,
|
||||
reset::{Reset, ResetType},
|
||||
source_location::SourceLocation,
|
||||
ty::{
|
||||
CanonicalType, OpaqueSimValueSize, OpaqueSimValueSlice, OpaqueSimValueWriter,
|
||||
OpaqueSimValueWritten, StaticType, Type, TypeProperties, impl_match_variant_as_self,
|
||||
},
|
||||
ty::{impl_match_variant_as_self, CanonicalType, StaticType, Type, TypeProperties},
|
||||
};
|
||||
use bitvec::{bits, order::Lsb0};
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)]
|
||||
pub struct Clock;
|
||||
|
|
@ -19,7 +15,6 @@ pub struct Clock;
|
|||
impl Type for Clock {
|
||||
type BaseType = Clock;
|
||||
type MaskType = Bool;
|
||||
type SimValue = bool;
|
||||
|
||||
impl_match_variant_as_self!();
|
||||
|
||||
|
|
@ -41,31 +36,6 @@ impl Type for Clock {
|
|||
};
|
||||
retval
|
||||
}
|
||||
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
assert_eq!(opaque.size(), OpaqueSimValueSize::from_bit_width(1));
|
||||
opaque.bits()[0]
|
||||
}
|
||||
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
assert_eq!(opaque.size(), OpaqueSimValueSize::from_bit_width(1));
|
||||
*value = opaque.bits()[0];
|
||||
}
|
||||
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
assert_eq!(writer.size(), OpaqueSimValueSize::from_bit_width(1));
|
||||
writer.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(
|
||||
[bits![0], bits![1]][*value as usize],
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl Clock {
|
||||
|
|
@ -85,7 +55,6 @@ impl StaticType for Clock {
|
|||
is_storable: false,
|
||||
is_castable_from_bits: true,
|
||||
bit_width: 1,
|
||||
sim_only_values_len: 0,
|
||||
};
|
||||
const MASK_TYPE_PROPERTIES: TypeProperties = Bool::TYPE_PROPERTIES;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,31 +2,21 @@
|
|||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
expr::{
|
||||
Expr, ToExpr,
|
||||
ops::{ExprPartialEq, VariantAccess},
|
||||
},
|
||||
expr::{ops::VariantAccess, Expr, ToExpr},
|
||||
hdl,
|
||||
int::{Bool, UIntValue},
|
||||
int::Bool,
|
||||
intern::{Intern, Interned},
|
||||
module::{
|
||||
EnumMatchVariantAndInactiveScopeImpl, EnumMatchVariantsIterImpl, Scope, connect,
|
||||
enum_match_variants_helper, incomplete_wire, wire,
|
||||
connect, enum_match_variants_helper, incomplete_wire, wire,
|
||||
EnumMatchVariantAndInactiveScopeImpl, EnumMatchVariantsIterImpl, Scope,
|
||||
},
|
||||
sim::value::{SimValue, SimValuePartialEq},
|
||||
source_location::SourceLocation,
|
||||
ty::{
|
||||
CanonicalType, MatchVariantAndInactiveScope, OpaqueSimValue, OpaqueSimValueSize,
|
||||
OpaqueSimValueSlice, OpaqueSimValueWriter, OpaqueSimValueWritten, StaticType, Type,
|
||||
TypeProperties,
|
||||
},
|
||||
util::HashMap,
|
||||
ty::{CanonicalType, MatchVariantAndInactiveScope, StaticType, Type, TypeProperties},
|
||||
};
|
||||
use bitvec::{order::Lsb0, slice::BitSlice, view::BitView};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{convert::Infallible, fmt, iter::FusedIterator, sync::Arc};
|
||||
use hashbrown::HashMap;
|
||||
use std::{convert::Infallible, fmt, iter::FusedIterator};
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct EnumVariant {
|
||||
pub name: Interned<str>,
|
||||
pub ty: Option<CanonicalType>,
|
||||
|
|
@ -121,7 +111,6 @@ impl EnumTypePropertiesBuilder {
|
|||
is_storable: true,
|
||||
is_castable_from_bits: true,
|
||||
bit_width: 0,
|
||||
sim_only_values_len: 0,
|
||||
},
|
||||
variant_count: 0,
|
||||
}
|
||||
|
|
@ -140,14 +129,9 @@ impl EnumTypePropertiesBuilder {
|
|||
is_storable,
|
||||
is_castable_from_bits,
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
}) = field_props
|
||||
{
|
||||
assert!(is_passive, "variant type must be a passive type");
|
||||
assert!(
|
||||
sim_only_values_len == 0,
|
||||
"can't have `SimOnlyValue`s in an Enum"
|
||||
);
|
||||
type_properties = TypeProperties {
|
||||
is_passive: true,
|
||||
is_storable: type_properties.is_storable & is_storable,
|
||||
|
|
@ -158,7 +142,6 @@ impl EnumTypePropertiesBuilder {
|
|||
} else {
|
||||
type_properties.bit_width
|
||||
},
|
||||
sim_only_values_len: 0,
|
||||
};
|
||||
}
|
||||
Self {
|
||||
|
|
@ -166,12 +149,6 @@ impl EnumTypePropertiesBuilder {
|
|||
variant_count: variant_count + 1,
|
||||
}
|
||||
}
|
||||
#[must_use]
|
||||
pub fn variants(self, variants: impl IntoIterator<Item = EnumVariant>) -> Self {
|
||||
variants.into_iter().fold(self, |this, variant| {
|
||||
this.variant(variant.ty.map(CanonicalType::type_properties))
|
||||
})
|
||||
}
|
||||
pub const fn finish(self) -> TypeProperties {
|
||||
assert!(
|
||||
self.variant_count != 0,
|
||||
|
|
@ -201,8 +178,7 @@ impl Default for EnumTypePropertiesBuilder {
|
|||
impl Enum {
|
||||
#[track_caller]
|
||||
pub fn new(variants: Interned<[EnumVariant]>) -> Self {
|
||||
let mut name_indexes =
|
||||
HashMap::with_capacity_and_hasher(variants.len(), Default::default());
|
||||
let mut name_indexes = HashMap::with_capacity(variants.len());
|
||||
let mut type_props_builder = EnumTypePropertiesBuilder::new();
|
||||
for (index, EnumVariant { name, ty }) in variants.iter().enumerate() {
|
||||
if let Some(old_index) = name_indexes.insert(*name, index) {
|
||||
|
|
@ -262,14 +238,13 @@ impl Enum {
|
|||
|
||||
pub trait EnumType:
|
||||
Type<
|
||||
BaseType = Enum,
|
||||
MaskType = Bool,
|
||||
MatchActiveScope = Scope,
|
||||
MatchVariantAndInactiveScope = EnumMatchVariantAndInactiveScope<Self>,
|
||||
MatchVariantsIter = EnumMatchVariantsIter<Self>,
|
||||
>
|
||||
BaseType = Enum,
|
||||
MaskType = Bool,
|
||||
MatchActiveScope = Scope,
|
||||
MatchVariantAndInactiveScope = EnumMatchVariantAndInactiveScope<Self>,
|
||||
MatchVariantsIter = EnumMatchVariantsIter<Self>,
|
||||
>
|
||||
{
|
||||
type SimBuilder: From<Self>;
|
||||
fn variants(&self) -> Interned<[EnumVariant]>;
|
||||
fn match_activate_scope(
|
||||
v: Self::MatchVariantAndInactiveScope,
|
||||
|
|
@ -332,18 +307,7 @@ impl<T: EnumType> DoubleEndedIterator for EnumMatchVariantsIter<T> {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct NoBuilder {
|
||||
_ty: Enum,
|
||||
}
|
||||
|
||||
impl From<Enum> for NoBuilder {
|
||||
fn from(_ty: Enum) -> Self {
|
||||
Self { _ty }
|
||||
}
|
||||
}
|
||||
|
||||
impl EnumType for Enum {
|
||||
type SimBuilder = NoBuilder;
|
||||
fn match_activate_scope(
|
||||
v: Self::MatchVariantAndInactiveScope,
|
||||
) -> (Self::MatchVariant, Self::MatchActiveScope) {
|
||||
|
|
@ -358,7 +322,6 @@ impl EnumType for Enum {
|
|||
impl Type for Enum {
|
||||
type BaseType = Enum;
|
||||
type MaskType = Bool;
|
||||
type SimValue = OpaqueSimValue;
|
||||
type MatchVariant = Option<Expr<CanonicalType>>;
|
||||
type MatchActiveScope = Scope;
|
||||
type MatchVariantAndInactiveScope = EnumMatchVariantAndInactiveScope<Self>;
|
||||
|
|
@ -389,341 +352,6 @@ impl Type for Enum {
|
|||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
assert_eq!(self.type_properties().size(), opaque.size());
|
||||
opaque.to_owned()
|
||||
}
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
assert_eq!(self.type_properties().size(), opaque.size());
|
||||
assert_eq!(value.size(), opaque.size());
|
||||
value.clone_from_slice(opaque);
|
||||
}
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
assert_eq!(self.type_properties().size(), writer.size());
|
||||
assert_eq!(value.size(), writer.size());
|
||||
writer.fill_cloned_from_slice(value.as_slice())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, Default)]
|
||||
pub struct EnumPaddingSimValue {
|
||||
bits: Option<UIntValue>,
|
||||
}
|
||||
|
||||
impl EnumPaddingSimValue {
|
||||
pub const fn new() -> Self {
|
||||
Self { bits: None }
|
||||
}
|
||||
pub fn bit_width(&self) -> Option<usize> {
|
||||
self.bits.as_ref().map(UIntValue::width)
|
||||
}
|
||||
pub fn bits(&self) -> &Option<UIntValue> {
|
||||
&self.bits
|
||||
}
|
||||
pub fn bits_mut(&mut self) -> &mut Option<UIntValue> {
|
||||
&mut self.bits
|
||||
}
|
||||
pub fn into_bits(self) -> Option<UIntValue> {
|
||||
self.bits
|
||||
}
|
||||
pub fn from_bits(bits: Option<UIntValue>) -> Self {
|
||||
Self { bits }
|
||||
}
|
||||
pub fn from_bitslice(v: &BitSlice) -> Self {
|
||||
Self {
|
||||
bits: Some(UIntValue::new(Arc::new(v.to_bitvec()))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct UnknownVariantSimValue {
|
||||
discriminant: usize,
|
||||
body_bits: UIntValue,
|
||||
}
|
||||
|
||||
impl UnknownVariantSimValue {
|
||||
pub fn discriminant(&self) -> usize {
|
||||
self.discriminant
|
||||
}
|
||||
pub fn body_bits(&self) -> &UIntValue {
|
||||
&self.body_bits
|
||||
}
|
||||
pub fn body_bits_mut(&mut self) -> &mut UIntValue {
|
||||
&mut self.body_bits
|
||||
}
|
||||
pub fn into_body_bits(self) -> UIntValue {
|
||||
self.body_bits
|
||||
}
|
||||
pub fn into_parts(self) -> (usize, UIntValue) {
|
||||
(self.discriminant, self.body_bits)
|
||||
}
|
||||
pub fn new(discriminant: usize, body_bits: UIntValue) -> Self {
|
||||
Self {
|
||||
discriminant,
|
||||
body_bits,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct EnumSimValueFromOpaque<'a> {
|
||||
variants: Interned<[EnumVariant]>,
|
||||
discriminant: usize,
|
||||
body_bits: &'a BitSlice,
|
||||
}
|
||||
|
||||
impl<'a> EnumSimValueFromOpaque<'a> {
|
||||
#[track_caller]
|
||||
pub fn new<T: EnumType>(ty: T, opaque: OpaqueSimValueSlice<'a>) -> Self {
|
||||
let variants = ty.variants();
|
||||
let size = EnumTypePropertiesBuilder::new()
|
||||
.variants(variants)
|
||||
.finish()
|
||||
.size();
|
||||
assert!(size.only_bit_width().is_some());
|
||||
assert_eq!(size, opaque.size());
|
||||
let (discriminant_bits, body_bits) = opaque
|
||||
.bits()
|
||||
.split_at(discriminant_bit_width_impl(variants.len()));
|
||||
let mut discriminant = 0usize;
|
||||
discriminant.view_bits_mut::<Lsb0>()[..discriminant_bits.len()]
|
||||
.copy_from_bitslice(discriminant_bits);
|
||||
Self {
|
||||
variants,
|
||||
discriminant,
|
||||
body_bits,
|
||||
}
|
||||
}
|
||||
pub fn discriminant(&self) -> usize {
|
||||
self.discriminant
|
||||
}
|
||||
#[track_caller]
|
||||
#[cold]
|
||||
fn usage_error(&self, clone: bool) -> ! {
|
||||
let clone = if clone { "clone_" } else { "" };
|
||||
match self.variants.get(self.discriminant) {
|
||||
None => {
|
||||
panic!("should have called EnumSimValueFromBits::unknown_variant_{clone}from_bits");
|
||||
}
|
||||
Some(EnumVariant { ty: None, .. }) => {
|
||||
panic!(
|
||||
"should have called EnumSimValueFromBits::variant_no_field_{clone}from_bits"
|
||||
);
|
||||
}
|
||||
Some(EnumVariant { ty: Some(_), .. }) => {
|
||||
panic!(
|
||||
"should have called EnumSimValueFromBits::variant_with_field_{clone}from_bits"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
fn known_variant(&self, clone: bool) -> (Option<CanonicalType>, &'a BitSlice, &'a BitSlice) {
|
||||
let Some(EnumVariant { ty, .. }) = self.variants.get(self.discriminant) else {
|
||||
self.usage_error(clone);
|
||||
};
|
||||
let variant_bit_width = ty.map_or(0, CanonicalType::bit_width);
|
||||
let (variant_bits, padding_bits) = self.body_bits.split_at(variant_bit_width);
|
||||
(*ty, variant_bits, padding_bits)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn unknown_variant_from_opaque(self) -> UnknownVariantSimValue {
|
||||
let None = self.variants.get(self.discriminant) else {
|
||||
self.usage_error(false);
|
||||
};
|
||||
UnknownVariantSimValue::new(
|
||||
self.discriminant,
|
||||
UIntValue::new(Arc::new(self.body_bits.to_bitvec())),
|
||||
)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn unknown_variant_clone_from_opaque(self, value: &mut UnknownVariantSimValue) {
|
||||
let None = self.variants.get(self.discriminant) else {
|
||||
self.usage_error(true);
|
||||
};
|
||||
value.discriminant = self.discriminant;
|
||||
assert_eq!(value.body_bits.width(), self.body_bits.len());
|
||||
value
|
||||
.body_bits
|
||||
.bits_mut()
|
||||
.copy_from_bitslice(self.body_bits);
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn variant_no_field_from_opaque(self) -> EnumPaddingSimValue {
|
||||
let (None, _variant_bits, padding_bits) = self.known_variant(false) else {
|
||||
self.usage_error(false);
|
||||
};
|
||||
EnumPaddingSimValue::from_bitslice(padding_bits)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn variant_with_field_from_opaque<T: Type>(self) -> (SimValue<T>, EnumPaddingSimValue) {
|
||||
let (Some(variant_ty), variant_bits, padding_bits) = self.known_variant(false) else {
|
||||
self.usage_error(false);
|
||||
};
|
||||
(
|
||||
SimValue::from_bitslice(T::from_canonical(variant_ty), variant_bits),
|
||||
EnumPaddingSimValue::from_bitslice(padding_bits),
|
||||
)
|
||||
}
|
||||
#[track_caller]
|
||||
fn clone_padding_from_bits(padding: &mut EnumPaddingSimValue, padding_bits: &BitSlice) {
|
||||
match padding.bits_mut() {
|
||||
None => *padding = EnumPaddingSimValue::from_bitslice(padding_bits),
|
||||
Some(padding) => {
|
||||
assert_eq!(padding.width(), padding_bits.len());
|
||||
padding.bits_mut().copy_from_bitslice(padding_bits);
|
||||
}
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn variant_no_field_clone_from_opaque(self, padding: &mut EnumPaddingSimValue) {
|
||||
let (None, _variant_bits, padding_bits) = self.known_variant(true) else {
|
||||
self.usage_error(true);
|
||||
};
|
||||
Self::clone_padding_from_bits(padding, padding_bits);
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn variant_with_field_clone_from_opaque<T: Type>(
|
||||
self,
|
||||
value: &mut SimValue<T>,
|
||||
padding: &mut EnumPaddingSimValue,
|
||||
) {
|
||||
let (Some(variant_ty), variant_bits, padding_bits) = self.known_variant(true) else {
|
||||
self.usage_error(true);
|
||||
};
|
||||
assert_eq!(SimValue::ty(value), T::from_canonical(variant_ty));
|
||||
SimValue::bits_mut(value)
|
||||
.bits_mut()
|
||||
.copy_from_bitslice(variant_bits);
|
||||
Self::clone_padding_from_bits(padding, padding_bits);
|
||||
}
|
||||
}
|
||||
|
||||
pub struct EnumSimValueToOpaque<'a> {
|
||||
variants: Interned<[EnumVariant]>,
|
||||
bit_width: usize,
|
||||
discriminant_bit_width: usize,
|
||||
writer: OpaqueSimValueWriter<'a>,
|
||||
}
|
||||
|
||||
impl<'a> EnumSimValueToOpaque<'a> {
|
||||
#[track_caller]
|
||||
pub fn new<T: EnumType>(ty: T, writer: OpaqueSimValueWriter<'a>) -> Self {
|
||||
let variants = ty.variants();
|
||||
let size = EnumTypePropertiesBuilder::new()
|
||||
.variants(variants)
|
||||
.finish()
|
||||
.size();
|
||||
assert_eq!(size, writer.size());
|
||||
Self {
|
||||
variants,
|
||||
bit_width: size
|
||||
.only_bit_width()
|
||||
.expect("enums should only contain bits"),
|
||||
discriminant_bit_width: discriminant_bit_width_impl(variants.len()),
|
||||
writer,
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
fn write_discriminant(&mut self, mut discriminant: usize) {
|
||||
let orig_discriminant = discriminant;
|
||||
let discriminant_bits =
|
||||
&mut discriminant.view_bits_mut::<Lsb0>()[..self.discriminant_bit_width];
|
||||
self.writer.fill_prefix_with(
|
||||
OpaqueSimValueSize::from_bit_width(self.discriminant_bit_width),
|
||||
|writer| {
|
||||
writer.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(discriminant_bits))
|
||||
},
|
||||
);
|
||||
discriminant_bits.fill(false);
|
||||
assert!(
|
||||
discriminant == 0,
|
||||
"{orig_discriminant:#x} is too big to fit in enum discriminant bits",
|
||||
);
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn unknown_variant_to_opaque(
|
||||
mut self,
|
||||
value: &UnknownVariantSimValue,
|
||||
) -> OpaqueSimValueWritten<'a> {
|
||||
self.write_discriminant(value.discriminant);
|
||||
let None = self.variants.get(value.discriminant) else {
|
||||
panic!("can't use UnknownVariantSimValue to set known discriminant");
|
||||
};
|
||||
assert_eq!(
|
||||
self.bit_width - self.discriminant_bit_width,
|
||||
value.body_bits.width()
|
||||
);
|
||||
self.writer
|
||||
.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(value.body_bits.bits()))
|
||||
}
|
||||
#[track_caller]
|
||||
fn known_variant(
|
||||
mut self,
|
||||
discriminant: usize,
|
||||
value: Option<&OpaqueSimValue>,
|
||||
padding: &EnumPaddingSimValue,
|
||||
) -> OpaqueSimValueWritten<'a> {
|
||||
self.write_discriminant(discriminant);
|
||||
let variant_ty = self.variants[discriminant].ty;
|
||||
let variant_size = variant_ty.map_or(OpaqueSimValueSize::empty(), CanonicalType::size);
|
||||
if let Some(value) = value {
|
||||
if variant_ty.is_none() {
|
||||
panic!("expected variant to have no field");
|
||||
}
|
||||
self.writer.fill_prefix_with(variant_size, |writer| {
|
||||
writer.fill_cloned_from_slice(value.as_slice())
|
||||
});
|
||||
} else if variant_ty.is_some() {
|
||||
panic!("expected variant to have a field");
|
||||
}
|
||||
if let Some(padding) = padding.bits() {
|
||||
assert_eq!(padding.ty().type_properties().size(), self.writer.size());
|
||||
self.writer
|
||||
.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(padding.bits()))
|
||||
} else {
|
||||
self.writer.fill_with_zeros()
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn variant_no_field_to_opaque(
|
||||
self,
|
||||
discriminant: usize,
|
||||
padding: &EnumPaddingSimValue,
|
||||
) -> OpaqueSimValueWritten<'a> {
|
||||
self.known_variant(discriminant, None, padding)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn variant_with_field_to_opaque<T: Type>(
|
||||
self,
|
||||
discriminant: usize,
|
||||
value: &SimValue<T>,
|
||||
padding: &EnumPaddingSimValue,
|
||||
) -> OpaqueSimValueWritten<'a> {
|
||||
let Some(variant_ty) = self.variants[discriminant].ty else {
|
||||
panic!("expected variant to have no field");
|
||||
};
|
||||
assert_eq!(SimValue::ty(value), T::from_canonical(variant_ty));
|
||||
self.known_variant(discriminant, Some(SimValue::opaque(value)), padding)
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub fn assert_is_enum_type<T: EnumType>(v: T) -> T {
|
||||
v
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub fn enum_type_to_sim_builder<T: EnumType>(v: T) -> T::SimBuilder {
|
||||
v.into()
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
|
|
@ -732,79 +360,6 @@ pub enum HdlOption<T: Type> {
|
|||
HdlSome(T),
|
||||
}
|
||||
|
||||
impl<Lhs: Type + ExprPartialEq<Rhs>, Rhs: Type> ExprPartialEq<HdlOption<Rhs>> for HdlOption<Lhs> {
|
||||
#[hdl]
|
||||
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<HdlOption<Rhs>>) -> Expr<Bool> {
|
||||
#[hdl]
|
||||
let cmp_eq = wire();
|
||||
#[hdl]
|
||||
match lhs {
|
||||
HdlSome(lhs) =>
|
||||
{
|
||||
#[hdl]
|
||||
match rhs {
|
||||
HdlSome(rhs) => connect(cmp_eq, ExprPartialEq::cmp_eq(lhs, rhs)),
|
||||
HdlNone => connect(cmp_eq, false),
|
||||
}
|
||||
}
|
||||
HdlNone =>
|
||||
{
|
||||
#[hdl]
|
||||
match rhs {
|
||||
HdlSome(_) => connect(cmp_eq, false),
|
||||
HdlNone => connect(cmp_eq, true),
|
||||
}
|
||||
}
|
||||
}
|
||||
cmp_eq
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<HdlOption<Rhs>>) -> Expr<Bool> {
|
||||
#[hdl]
|
||||
let cmp_ne = wire();
|
||||
#[hdl]
|
||||
match lhs {
|
||||
HdlSome(lhs) =>
|
||||
{
|
||||
#[hdl]
|
||||
match rhs {
|
||||
HdlSome(rhs) => connect(cmp_ne, ExprPartialEq::cmp_ne(lhs, rhs)),
|
||||
HdlNone => connect(cmp_ne, true),
|
||||
}
|
||||
}
|
||||
HdlNone =>
|
||||
{
|
||||
#[hdl]
|
||||
match rhs {
|
||||
HdlSome(_) => connect(cmp_ne, true),
|
||||
HdlNone => connect(cmp_ne, false),
|
||||
}
|
||||
}
|
||||
}
|
||||
cmp_ne
|
||||
}
|
||||
}
|
||||
|
||||
impl<Lhs: SimValuePartialEq<Rhs>, Rhs: Type> SimValuePartialEq<HdlOption<Rhs>> for HdlOption<Lhs> {
|
||||
fn sim_value_eq(this: &SimValue<Self>, other: &SimValue<HdlOption<Rhs>>) -> bool {
|
||||
type SimValueMatch<T> = <T as Type>::SimValue;
|
||||
match (&**this, &**other) {
|
||||
(SimValueMatch::<Self>::HdlNone(_), SimValueMatch::<HdlOption<Rhs>>::HdlNone(_)) => {
|
||||
true
|
||||
}
|
||||
(SimValueMatch::<Self>::HdlSome(..), SimValueMatch::<HdlOption<Rhs>>::HdlNone(_))
|
||||
| (SimValueMatch::<Self>::HdlNone(_), SimValueMatch::<HdlOption<Rhs>>::HdlSome(..)) => {
|
||||
false
|
||||
}
|
||||
(
|
||||
SimValueMatch::<Self>::HdlSome(l, _),
|
||||
SimValueMatch::<HdlOption<Rhs>>::HdlSome(r, _),
|
||||
) => l == r,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
pub fn HdlNone<T: StaticType>() -> Expr<HdlOption<T>> {
|
||||
HdlOption[T::TYPE].HdlNone()
|
||||
|
|
|
|||
|
|
@ -13,10 +13,9 @@ use crate::{
|
|||
intern::{Intern, Interned},
|
||||
memory::{DynPortType, MemPort, PortType},
|
||||
module::{
|
||||
Instance, ModuleIO,
|
||||
transform::visit::{Fold, Folder, Visit, Visitor},
|
||||
Instance, ModuleIO,
|
||||
},
|
||||
phantom_const::PhantomConst,
|
||||
reg::Reg,
|
||||
reset::{AsyncReset, Reset, ResetType, ResetTypeDispatch, SyncReset},
|
||||
ty::{CanonicalType, StaticType, Type, TypeWithDeref},
|
||||
|
|
@ -110,7 +109,6 @@ expr_enum! {
|
|||
UIntLiteral(Interned<UIntValue>),
|
||||
SIntLiteral(Interned<SIntValue>),
|
||||
BoolLiteral(bool),
|
||||
PhantomConst(PhantomConst),
|
||||
BundleLiteral(ops::BundleLiteral),
|
||||
ArrayLiteral(ops::ArrayLiteral<CanonicalType, DynSize>),
|
||||
EnumLiteral(ops::EnumLiteral),
|
||||
|
|
@ -274,20 +272,6 @@ pub struct Expr<T: Type> {
|
|||
|
||||
impl<T: Type + fmt::Debug> fmt::Debug for Expr<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
let Self {
|
||||
__enum,
|
||||
__ty,
|
||||
__flow,
|
||||
} = self;
|
||||
let expr_ty = __ty.canonical();
|
||||
let enum_ty = __enum.to_expr().__ty;
|
||||
assert_eq!(
|
||||
expr_ty, enum_ty,
|
||||
"expr ty mismatch:\nExpr {{\n__enum: {__enum:?},\n__ty: {__ty:?},\n__flow: {__flow:?}\n}}"
|
||||
);
|
||||
}
|
||||
self.__enum.fmt(f)
|
||||
}
|
||||
}
|
||||
|
|
@ -532,7 +516,11 @@ impl Flow {
|
|||
}
|
||||
}
|
||||
pub const fn flip_if(self, flipped: bool) -> Flow {
|
||||
if flipped { self.flip() } else { self }
|
||||
if flipped {
|
||||
self.flip()
|
||||
} else {
|
||||
self
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -710,7 +698,6 @@ impl<T: ToExpr + ?Sized> CastToBits for T {
|
|||
}
|
||||
|
||||
pub trait CastBitsTo {
|
||||
#[track_caller]
|
||||
fn cast_bits_to<T: Type>(&self, ty: T) -> Expr<T>;
|
||||
}
|
||||
|
||||
|
|
@ -768,27 +755,3 @@ pub fn repeat<T: Type, L: SizeType>(
|
|||
)
|
||||
.to_expr()
|
||||
}
|
||||
|
||||
impl<T: ?Sized + crate::phantom_const::PhantomConstValue> ToExpr for PhantomConst<T> {
|
||||
type Type = Self;
|
||||
|
||||
fn to_expr(&self) -> Expr<Self::Type> {
|
||||
Expr {
|
||||
__enum: ExprEnum::PhantomConst(self.canonical_phantom_const()).intern_sized(),
|
||||
__ty: *self,
|
||||
__flow: Flow::Source,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + crate::phantom_const::PhantomConstValue> GetTarget for PhantomConst<T> {
|
||||
fn target(&self) -> Option<Interned<Target>> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + crate::phantom_const::PhantomConstValue> ToLiteralBits for PhantomConst<T> {
|
||||
fn to_literal_bits(&self) -> Result<Interned<BitSlice>, NotALiteralExpr> {
|
||||
Ok(Interned::default())
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,19 +7,18 @@ use crate::{
|
|||
clock::{Clock, ToClock},
|
||||
enum_::{Enum, EnumType, EnumVariant},
|
||||
expr::{
|
||||
CastBitsTo as _, CastTo, CastToBits as _, Expr, ExprEnum, Flow, HdlPartialEq,
|
||||
HdlPartialOrd, NotALiteralExpr, ReduceBits, ToExpr, ToLiteralBits,
|
||||
target::{
|
||||
GetTarget, Target, TargetPathArrayElement, TargetPathBundleField,
|
||||
TargetPathDynArrayElement, TargetPathElement,
|
||||
},
|
||||
CastTo, Expr, ExprEnum, Flow, HdlPartialEq, HdlPartialOrd, NotALiteralExpr, ReduceBits,
|
||||
ToExpr, ToLiteralBits,
|
||||
},
|
||||
int::{
|
||||
Bool, BoolOrIntType, DynSize, IntType, KnownSize, SInt, SIntType, SIntValue, Size, UInt,
|
||||
UIntType, UIntValue,
|
||||
},
|
||||
intern::{Intern, Interned},
|
||||
phantom_const::{PhantomConst, PhantomConstValue},
|
||||
reset::{
|
||||
AsyncReset, Reset, ResetType, ResetTypeDispatch, SyncReset, ToAsyncReset, ToReset,
|
||||
ToSyncReset,
|
||||
|
|
@ -1893,26 +1892,6 @@ impl ExprCastTo<Clock> for Clock {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> ExprCastTo<()> for PhantomConst<T> {
|
||||
fn cast_to(src: Expr<Self>, to_type: ()) -> Expr<()> {
|
||||
src.cast_to_bits().cast_bits_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> ExprCastTo<PhantomConst<T>> for () {
|
||||
fn cast_to(src: Expr<Self>, to_type: PhantomConst<T>) -> Expr<PhantomConst<T>> {
|
||||
src.cast_to_bits().cast_bits_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue, U: ?Sized + PhantomConstValue> ExprCastTo<PhantomConst<T>>
|
||||
for PhantomConst<U>
|
||||
{
|
||||
fn cast_to(src: Expr<Self>, to_type: PhantomConst<T>) -> Expr<PhantomConst<T>> {
|
||||
src.cast_to_bits().cast_bits_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct FieldAccess<FieldType: Type = CanonicalType> {
|
||||
base: Expr<Bundle>,
|
||||
|
|
@ -1937,8 +1916,7 @@ impl<FieldType: Type> FieldAccess<FieldType> {
|
|||
let field = Expr::ty(base).fields()[field_index];
|
||||
let field_type = FieldType::from_canonical(field.ty);
|
||||
let literal_bits = base.to_literal_bits().map(|bits| {
|
||||
bits[Expr::ty(base).field_offsets()[field_index].bit_width..][..field.ty.bit_width()]
|
||||
.intern()
|
||||
bits[Expr::ty(base).field_offsets()[field_index]..][..field.ty.bit_width()].intern()
|
||||
});
|
||||
let target = base.target().map(|base| {
|
||||
Intern::intern_sized(base.join(TargetPathElement::intern_sized(
|
||||
|
|
@ -2730,47 +2708,3 @@ impl<T: Type> ToExpr for Uninit<T> {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ExprIntoIterator: Type {
|
||||
type Item: Type;
|
||||
type ExprIntoIter: Iterator<Item = Expr<Self::Item>>;
|
||||
|
||||
fn expr_into_iter(e: Expr<Self>) -> Self::ExprIntoIter;
|
||||
}
|
||||
|
||||
impl<T: ExprIntoIterator> IntoIterator for Expr<T> {
|
||||
type Item = Expr<T::Item>;
|
||||
type IntoIter = T::ExprIntoIter;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
T::expr_into_iter(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ExprIntoIterator> IntoIterator for &'_ Expr<T> {
|
||||
type Item = Expr<T::Item>;
|
||||
type IntoIter = T::ExprIntoIter;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
T::expr_into_iter(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ExprIntoIterator> IntoIterator for &'_ mut Expr<T> {
|
||||
type Item = Expr<T::Item>;
|
||||
type IntoIter = T::ExprIntoIter;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
T::expr_into_iter(*self)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ExprFromIterator<A>: Type {
|
||||
fn expr_from_iter<T: IntoIterator<Item = A>>(iter: T) -> Expr<Self>;
|
||||
}
|
||||
|
||||
impl<This: ExprFromIterator<A>, A> FromIterator<A> for Expr<This> {
|
||||
fn from_iter<T: IntoIterator<Item = A>>(iter: T) -> Self {
|
||||
This::expr_from_iter(iter)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -1,656 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
bundle::{Bundle, BundleField, BundleType, BundleTypePropertiesBuilder, NoBuilder},
|
||||
expr::{
|
||||
CastBitsTo, CastTo, CastToBits, Expr, HdlPartialEq, HdlPartialOrd,
|
||||
ops::{ExprCastTo, ExprPartialEq, ExprPartialOrd},
|
||||
},
|
||||
int::{Bool, DynSize, KnownSize, Size, SizeType, UInt, UIntType},
|
||||
intern::{Intern, InternSlice, Interned},
|
||||
phantom_const::PhantomConst,
|
||||
sim::value::{SimValue, SimValuePartialEq, ToSimValueWithType},
|
||||
source_location::SourceLocation,
|
||||
ty::{
|
||||
CanonicalType, OpaqueSimValueSlice, OpaqueSimValueWriter, OpaqueSimValueWritten,
|
||||
StaticType, Type, TypeProperties, impl_match_variant_as_self,
|
||||
},
|
||||
};
|
||||
use bitvec::{order::Lsb0, view::BitView};
|
||||
use serde::{
|
||||
Deserialize, Deserializer, Serialize, Serializer,
|
||||
de::{Error, Visitor, value::UsizeDeserializer},
|
||||
};
|
||||
use std::{fmt, marker::PhantomData, ops::Index};
|
||||
|
||||
const UINT_IN_RANGE_TYPE_FIELD_NAMES: [&'static str; 2] = ["value", "range"];
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)]
|
||||
pub struct UIntInRangeMaskType {
|
||||
value: Bool,
|
||||
range: PhantomConstRangeMaskType,
|
||||
}
|
||||
|
||||
impl Type for UIntInRangeMaskType {
|
||||
type BaseType = Bundle;
|
||||
type MaskType = Self;
|
||||
type SimValue = bool;
|
||||
impl_match_variant_as_self!();
|
||||
|
||||
fn mask_type(&self) -> Self::MaskType {
|
||||
*self
|
||||
}
|
||||
|
||||
fn canonical(&self) -> CanonicalType {
|
||||
CanonicalType::Bundle(Bundle::new(self.fields()))
|
||||
}
|
||||
|
||||
fn from_canonical(canonical_type: CanonicalType) -> Self {
|
||||
let fields = Bundle::from_canonical(canonical_type).fields();
|
||||
let [
|
||||
BundleField {
|
||||
name: value_name,
|
||||
flipped: false,
|
||||
ty: value,
|
||||
},
|
||||
BundleField {
|
||||
name: range_name,
|
||||
flipped: false,
|
||||
ty: range,
|
||||
},
|
||||
] = *fields
|
||||
else {
|
||||
panic!("expected UIntInRangeMaskType");
|
||||
};
|
||||
assert_eq!([&*value_name, &*range_name], UINT_IN_RANGE_TYPE_FIELD_NAMES);
|
||||
let value = Bool::from_canonical(value);
|
||||
let range = PhantomConstRangeMaskType::from_canonical(range);
|
||||
Self { value, range }
|
||||
}
|
||||
|
||||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
Bool.sim_value_from_opaque(opaque)
|
||||
}
|
||||
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
Bool.sim_value_clone_from_opaque(value, opaque);
|
||||
}
|
||||
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
Bool.sim_value_to_opaque(value, writer)
|
||||
}
|
||||
}
|
||||
|
||||
impl BundleType for UIntInRangeMaskType {
|
||||
type Builder = NoBuilder;
|
||||
type FilledBuilder = Expr<UIntInRangeMaskType>;
|
||||
|
||||
fn fields(&self) -> Interned<[BundleField]> {
|
||||
let [value_name, range_name] = UINT_IN_RANGE_TYPE_FIELD_NAMES;
|
||||
let Self { value, range } = self;
|
||||
[
|
||||
BundleField {
|
||||
name: value_name.intern(),
|
||||
flipped: false,
|
||||
ty: value.canonical(),
|
||||
},
|
||||
BundleField {
|
||||
name: range_name.intern(),
|
||||
flipped: false,
|
||||
ty: range.canonical(),
|
||||
},
|
||||
]
|
||||
.intern_slice()
|
||||
}
|
||||
}
|
||||
|
||||
impl StaticType for UIntInRangeMaskType {
|
||||
const TYPE: Self = Self {
|
||||
value: Bool,
|
||||
range: PhantomConstRangeMaskType::TYPE,
|
||||
};
|
||||
const MASK_TYPE: Self::MaskType = Self::TYPE;
|
||||
const TYPE_PROPERTIES: TypeProperties = BundleTypePropertiesBuilder::new()
|
||||
.field(false, Bool::TYPE_PROPERTIES)
|
||||
.field(false, PhantomConstRangeMaskType::TYPE_PROPERTIES)
|
||||
.finish();
|
||||
const MASK_TYPE_PROPERTIES: TypeProperties = Self::TYPE_PROPERTIES;
|
||||
}
|
||||
|
||||
impl ToSimValueWithType<UIntInRangeMaskType> for bool {
|
||||
fn to_sim_value_with_type(&self, ty: UIntInRangeMaskType) -> SimValue<UIntInRangeMaskType> {
|
||||
SimValue::from_value(ty, *self)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<Bool> for UIntInRangeMaskType {
|
||||
fn cast_to(src: Expr<Self>, to_type: Bool) -> Expr<Bool> {
|
||||
src.cast_to_bits().cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprCastTo<UIntInRangeMaskType> for Bool {
|
||||
fn cast_to(src: Expr<Self>, to_type: UIntInRangeMaskType) -> Expr<UIntInRangeMaskType> {
|
||||
src.cast_to_static::<UInt<1>>().cast_bits_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExprPartialEq<Self> for UIntInRangeMaskType {
|
||||
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_eq(rhs.cast_to_bits())
|
||||
}
|
||||
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_ne(rhs.cast_to_bits())
|
||||
}
|
||||
}
|
||||
|
||||
impl SimValuePartialEq<Self> for UIntInRangeMaskType {
|
||||
fn sim_value_eq(this: &SimValue<Self>, other: &SimValue<Self>) -> bool {
|
||||
**this == **other
|
||||
}
|
||||
}
|
||||
|
||||
type PhantomConstRangeMaskType = <PhantomConst<SerdeRange<DynSize, DynSize>> as Type>::MaskType;
|
||||
|
||||
#[derive(Default, Copy, Clone, Debug)]
|
||||
struct RangeParseError;
|
||||
|
||||
macro_rules! define_uint_in_range_type {
|
||||
(
|
||||
$UIntInRange:ident,
|
||||
$UIntInRangeType:ident,
|
||||
$UIntInRangeTypeWithoutGenerics:ident,
|
||||
$UIntInRangeTypeWithStart:ident,
|
||||
$SerdeRange:ident,
|
||||
$range_operator_str:literal,
|
||||
|$uint_range_usize_start:ident, $uint_range_usize_end:ident| $uint_range_usize:expr,
|
||||
) => {
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
struct $SerdeRange<Start: Size, End: Size> {
|
||||
start: Start::SizeType,
|
||||
end: End::SizeType,
|
||||
}
|
||||
|
||||
impl<Start: KnownSize, End: KnownSize> Default for $SerdeRange<Start, End> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
start: Start::SIZE,
|
||||
end: End::SIZE,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::str::FromStr for $SerdeRange<DynSize, DynSize> {
|
||||
type Err = RangeParseError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let Some((start, end)) = s.split_once($range_operator_str) else {
|
||||
return Err(RangeParseError);
|
||||
};
|
||||
if start.is_empty()
|
||||
|| start.bytes().any(|b| !b.is_ascii_digit())
|
||||
|| end.is_empty()
|
||||
|| end.bytes().any(|b| !b.is_ascii_digit())
|
||||
{
|
||||
return Err(RangeParseError);
|
||||
}
|
||||
let start = start.parse().map_err(|_| RangeParseError)?;
|
||||
let end = end.parse().map_err(|_| RangeParseError)?;
|
||||
let retval = Self { start, end };
|
||||
if retval.is_empty() {
|
||||
Err(RangeParseError)
|
||||
} else {
|
||||
Ok(retval)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size> fmt::Display for $SerdeRange<Start, End> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let Self { start, end } = *self;
|
||||
write!(
|
||||
f,
|
||||
"{}{}{}",
|
||||
Start::as_usize(start),
|
||||
$range_operator_str,
|
||||
End::as_usize(end),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size> Serialize for $SerdeRange<Start, End> {
|
||||
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
|
||||
serializer.collect_str(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de, Start: Size, End: Size> Deserialize<'de> for $SerdeRange<Start, End> {
|
||||
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
|
||||
struct SerdeRangeVisitor<Start: Size, End: Size>(PhantomData<(Start, End)>);
|
||||
impl<'de, Start: Size, End: Size> Visitor<'de> for SerdeRangeVisitor<Start, End> {
|
||||
type Value = $SerdeRange<Start, End>;
|
||||
|
||||
fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str("a string with format \"")?;
|
||||
if let Some(start) = Start::KNOWN_VALUE {
|
||||
write!(f, "{start}")?;
|
||||
} else {
|
||||
f.write_str("<int>")?;
|
||||
};
|
||||
f.write_str($range_operator_str)?;
|
||||
if let Some(end) = End::KNOWN_VALUE {
|
||||
write!(f, "{end}")?;
|
||||
} else {
|
||||
f.write_str("<int>")?;
|
||||
};
|
||||
f.write_str("\" that is a non-empty range")
|
||||
}
|
||||
|
||||
fn visit_str<E: Error>(self, v: &str) -> Result<Self::Value, E> {
|
||||
let $SerdeRange::<DynSize, DynSize> { start, end } =
|
||||
v.parse().map_err(|_| {
|
||||
Error::invalid_value(serde::de::Unexpected::Str(v), &self)
|
||||
})?;
|
||||
let start =
|
||||
Start::SizeType::deserialize(UsizeDeserializer::<E>::new(start))?;
|
||||
let end = End::SizeType::deserialize(UsizeDeserializer::<E>::new(end))?;
|
||||
Ok($SerdeRange { start, end })
|
||||
}
|
||||
|
||||
fn visit_bytes<E: Error>(self, v: &[u8]) -> Result<Self::Value, E> {
|
||||
match std::str::from_utf8(v) {
|
||||
Ok(v) => self.visit_str(v),
|
||||
Err(_) => {
|
||||
Err(Error::invalid_value(serde::de::Unexpected::Bytes(v), &self))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
deserializer.deserialize_str(SerdeRangeVisitor(PhantomData))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct $UIntInRangeType<Start: Size, End: Size> {
|
||||
value: UInt,
|
||||
range: PhantomConst<$SerdeRange<Start, End>>,
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size> $UIntInRangeType<Start, End> {
|
||||
fn from_phantom_const_range(range: PhantomConst<$SerdeRange<Start, End>>) -> Self {
|
||||
let $SerdeRange { start, end } = *range.get();
|
||||
let $uint_range_usize_start = Start::as_usize(start);
|
||||
let $uint_range_usize_end = End::as_usize(end);
|
||||
Self {
|
||||
value: $uint_range_usize,
|
||||
range,
|
||||
}
|
||||
}
|
||||
pub fn new(start: Start::SizeType, end: End::SizeType) -> Self {
|
||||
Self::from_phantom_const_range(PhantomConst::new(
|
||||
$SerdeRange { start, end }.intern_sized(),
|
||||
))
|
||||
}
|
||||
pub fn bit_width(self) -> usize {
|
||||
self.value.width()
|
||||
}
|
||||
pub fn start(self) -> Start::SizeType {
|
||||
self.range.get().start
|
||||
}
|
||||
pub fn end(self) -> End::SizeType {
|
||||
self.range.get().end
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size> fmt::Debug for $UIntInRangeType<Start, End> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let Self { value, range } = self;
|
||||
let $SerdeRange { start, end } = *range.get();
|
||||
f.debug_struct(&format!(
|
||||
"{}<{}, {}>",
|
||||
stringify!($UIntInRange),
|
||||
Start::as_usize(start),
|
||||
End::as_usize(end),
|
||||
))
|
||||
.field("value", value)
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size> Type for $UIntInRangeType<Start, End> {
|
||||
type BaseType = Bundle;
|
||||
type MaskType = UIntInRangeMaskType;
|
||||
type SimValue = usize;
|
||||
impl_match_variant_as_self!();
|
||||
|
||||
fn mask_type(&self) -> Self::MaskType {
|
||||
UIntInRangeMaskType::TYPE
|
||||
}
|
||||
|
||||
fn canonical(&self) -> CanonicalType {
|
||||
CanonicalType::Bundle(Bundle::new(self.fields()))
|
||||
}
|
||||
|
||||
fn from_canonical(canonical_type: CanonicalType) -> Self {
|
||||
let fields = Bundle::from_canonical(canonical_type).fields();
|
||||
let [
|
||||
BundleField {
|
||||
name: value_name,
|
||||
flipped: false,
|
||||
ty: value,
|
||||
},
|
||||
BundleField {
|
||||
name: range_name,
|
||||
flipped: false,
|
||||
ty: range,
|
||||
},
|
||||
] = *fields
|
||||
else {
|
||||
panic!("expected {}", stringify!($UIntInRange));
|
||||
};
|
||||
assert_eq!([&*value_name, &*range_name], UINT_IN_RANGE_TYPE_FIELD_NAMES);
|
||||
let value = UInt::from_canonical(value);
|
||||
let range = PhantomConst::<$SerdeRange<Start, End>>::from_canonical(range);
|
||||
let retval = Self::from_phantom_const_range(range);
|
||||
assert_eq!(retval, Self { value, range });
|
||||
retval
|
||||
}
|
||||
|
||||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
assert_eq!(opaque.size(), self.value.type_properties().size());
|
||||
let mut retval = 0usize;
|
||||
retval.view_bits_mut::<Lsb0>()[..opaque.bit_width()]
|
||||
.clone_from_bitslice(opaque.bits());
|
||||
retval
|
||||
}
|
||||
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
*value = self.sim_value_from_opaque(opaque);
|
||||
}
|
||||
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
writer.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(
|
||||
&value.view_bits::<Lsb0>()[..self.value.width()],
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size> BundleType for $UIntInRangeType<Start, End> {
|
||||
type Builder = NoBuilder;
|
||||
type FilledBuilder = Expr<Self>;
|
||||
|
||||
fn fields(&self) -> Interned<[BundleField]> {
|
||||
let [value_name, range_name] = UINT_IN_RANGE_TYPE_FIELD_NAMES;
|
||||
let Self { value, range } = self;
|
||||
[
|
||||
BundleField {
|
||||
name: value_name.intern(),
|
||||
flipped: false,
|
||||
ty: value.canonical(),
|
||||
},
|
||||
BundleField {
|
||||
name: range_name.intern(),
|
||||
flipped: false,
|
||||
ty: range.canonical(),
|
||||
},
|
||||
]
|
||||
.intern_slice()
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: KnownSize, End: KnownSize> Default for $UIntInRangeType<Start, End> {
|
||||
fn default() -> Self {
|
||||
Self::TYPE
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: KnownSize, End: KnownSize> StaticType for $UIntInRangeType<Start, End> {
|
||||
const TYPE: Self = {
|
||||
let $uint_range_usize_start = Start::VALUE;
|
||||
let $uint_range_usize_end = End::VALUE;
|
||||
Self {
|
||||
value: $uint_range_usize,
|
||||
range: PhantomConst::<$SerdeRange<Start, End>>::TYPE,
|
||||
}
|
||||
};
|
||||
const MASK_TYPE: Self::MaskType = UIntInRangeMaskType::TYPE;
|
||||
const TYPE_PROPERTIES: TypeProperties = BundleTypePropertiesBuilder::new()
|
||||
.field(false, Self::TYPE.value.type_properties_dyn())
|
||||
.field(
|
||||
false,
|
||||
PhantomConst::<$SerdeRange<Start, End>>::TYPE_PROPERTIES,
|
||||
)
|
||||
.finish();
|
||||
const MASK_TYPE_PROPERTIES: TypeProperties = UIntInRangeMaskType::TYPE_PROPERTIES;
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size> ToSimValueWithType<$UIntInRangeType<Start, End>> for usize {
|
||||
fn to_sim_value_with_type(
|
||||
&self,
|
||||
ty: $UIntInRangeType<Start, End>,
|
||||
) -> SimValue<$UIntInRangeType<Start, End>> {
|
||||
SimValue::from_value(ty, *self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)]
|
||||
pub struct $UIntInRangeTypeWithoutGenerics;
|
||||
|
||||
#[allow(non_upper_case_globals)]
|
||||
pub const $UIntInRangeType: $UIntInRangeTypeWithoutGenerics =
|
||||
$UIntInRangeTypeWithoutGenerics;
|
||||
|
||||
impl<StartSize: SizeType> Index<StartSize> for $UIntInRangeTypeWithoutGenerics {
|
||||
type Output = $UIntInRangeTypeWithStart<StartSize::Size>;
|
||||
|
||||
fn index(&self, start: StartSize) -> &Self::Output {
|
||||
Interned::into_inner($UIntInRangeTypeWithStart(start).intern_sized())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct $UIntInRangeTypeWithStart<Start: Size>(Start::SizeType);
|
||||
|
||||
impl<Start: Size, EndSize: SizeType<Size = End>, End: Size<SizeType = EndSize>>
|
||||
Index<EndSize> for $UIntInRangeTypeWithStart<Start>
|
||||
{
|
||||
type Output = $UIntInRangeType<Start, End>;
|
||||
|
||||
fn index(&self, end: EndSize) -> &Self::Output {
|
||||
Interned::into_inner($UIntInRangeType::new(self.0, end).intern_sized())
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size, Width: Size> ExprCastTo<UIntType<Width>>
|
||||
for $UIntInRangeType<Start, End>
|
||||
{
|
||||
fn cast_to(src: Expr<Self>, to_type: UIntType<Width>) -> Expr<UIntType<Width>> {
|
||||
src.cast_to_bits().cast_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size, Width: Size> ExprCastTo<$UIntInRangeType<Start, End>>
|
||||
for UIntType<Width>
|
||||
{
|
||||
fn cast_to(
|
||||
src: Expr<Self>,
|
||||
to_type: $UIntInRangeType<Start, End>,
|
||||
) -> Expr<$UIntInRangeType<Start, End>> {
|
||||
src.cast_to(to_type.value).cast_bits_to(to_type)
|
||||
}
|
||||
}
|
||||
|
||||
impl<LhsStart: Size, LhsEnd: Size, RhsStart: Size, RhsEnd: Size>
|
||||
ExprPartialEq<$UIntInRangeType<RhsStart, RhsEnd>>
|
||||
for $UIntInRangeType<LhsStart, LhsEnd>
|
||||
{
|
||||
fn cmp_eq(
|
||||
lhs: Expr<Self>,
|
||||
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
|
||||
) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_eq(rhs.cast_to_bits())
|
||||
}
|
||||
fn cmp_ne(
|
||||
lhs: Expr<Self>,
|
||||
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
|
||||
) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_ne(rhs.cast_to_bits())
|
||||
}
|
||||
}
|
||||
|
||||
impl<LhsStart: Size, LhsEnd: Size, RhsStart: Size, RhsEnd: Size>
|
||||
ExprPartialOrd<$UIntInRangeType<RhsStart, RhsEnd>>
|
||||
for $UIntInRangeType<LhsStart, LhsEnd>
|
||||
{
|
||||
fn cmp_lt(
|
||||
lhs: Expr<Self>,
|
||||
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
|
||||
) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_lt(rhs.cast_to_bits())
|
||||
}
|
||||
fn cmp_le(
|
||||
lhs: Expr<Self>,
|
||||
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
|
||||
) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_le(rhs.cast_to_bits())
|
||||
}
|
||||
fn cmp_gt(
|
||||
lhs: Expr<Self>,
|
||||
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
|
||||
) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_gt(rhs.cast_to_bits())
|
||||
}
|
||||
fn cmp_ge(
|
||||
lhs: Expr<Self>,
|
||||
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
|
||||
) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_ge(rhs.cast_to_bits())
|
||||
}
|
||||
}
|
||||
|
||||
impl<LhsStart: Size, LhsEnd: Size, RhsStart: Size, RhsEnd: Size>
|
||||
SimValuePartialEq<$UIntInRangeType<RhsStart, RhsEnd>>
|
||||
for $UIntInRangeType<LhsStart, LhsEnd>
|
||||
{
|
||||
fn sim_value_eq(
|
||||
this: &SimValue<Self>,
|
||||
other: &SimValue<$UIntInRangeType<RhsStart, RhsEnd>>,
|
||||
) -> bool {
|
||||
**this == **other
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size, Width: Size> ExprPartialEq<UIntType<Width>>
|
||||
for $UIntInRangeType<Start, End>
|
||||
{
|
||||
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_eq(rhs)
|
||||
}
|
||||
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_ne(rhs)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size, Width: Size> ExprPartialEq<$UIntInRangeType<Start, End>>
|
||||
for UIntType<Width>
|
||||
{
|
||||
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
|
||||
lhs.cmp_eq(rhs.cast_to_bits())
|
||||
}
|
||||
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
|
||||
lhs.cmp_ne(rhs.cast_to_bits())
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size, Width: Size> ExprPartialOrd<UIntType<Width>>
|
||||
for $UIntInRangeType<Start, End>
|
||||
{
|
||||
fn cmp_lt(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_lt(rhs)
|
||||
}
|
||||
fn cmp_le(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_le(rhs)
|
||||
}
|
||||
fn cmp_gt(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_gt(rhs)
|
||||
}
|
||||
fn cmp_ge(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
|
||||
lhs.cast_to_bits().cmp_ge(rhs)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Start: Size, End: Size, Width: Size> ExprPartialOrd<$UIntInRangeType<Start, End>>
|
||||
for UIntType<Width>
|
||||
{
|
||||
fn cmp_lt(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
|
||||
lhs.cmp_lt(rhs.cast_to_bits())
|
||||
}
|
||||
fn cmp_le(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
|
||||
lhs.cmp_le(rhs.cast_to_bits())
|
||||
}
|
||||
fn cmp_gt(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
|
||||
lhs.cmp_gt(rhs.cast_to_bits())
|
||||
}
|
||||
fn cmp_ge(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
|
||||
lhs.cmp_ge(rhs.cast_to_bits())
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
define_uint_in_range_type! {
|
||||
UIntInRange,
|
||||
UIntInRangeType,
|
||||
UIntInRangeTypeWithoutGenerics,
|
||||
UIntInRangeTypeWithStart,
|
||||
SerdeRange,
|
||||
"..",
|
||||
|start, end| UInt::range_usize(start..end),
|
||||
}
|
||||
|
||||
define_uint_in_range_type! {
|
||||
UIntInRangeInclusive,
|
||||
UIntInRangeInclusiveType,
|
||||
UIntInRangeInclusiveTypeWithoutGenerics,
|
||||
UIntInRangeInclusiveTypeWithStart,
|
||||
SerdeRangeInclusive,
|
||||
"..=",
|
||||
|start, end| UInt::range_inclusive_usize(start..=end),
|
||||
}
|
||||
|
||||
impl SerdeRange<DynSize, DynSize> {
|
||||
fn is_empty(self) -> bool {
|
||||
self.start >= self.end
|
||||
}
|
||||
}
|
||||
|
||||
impl SerdeRangeInclusive<DynSize, DynSize> {
|
||||
fn is_empty(self) -> bool {
|
||||
self.start > self.end
|
||||
}
|
||||
}
|
||||
|
|
@ -1,25 +1,23 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
#![allow(clippy::type_complexity)]
|
||||
use crate::{intern::type_map::TypeIdMap, util::DefaultBuildHasher};
|
||||
use crate::intern::type_map::TypeIdMap;
|
||||
use bitvec::{ptr::BitPtr, slice::BitSlice, vec::BitVec};
|
||||
use hashbrown::HashTable;
|
||||
use hashbrown::{hash_map::RawEntryMut, HashMap, HashTable};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
any::{Any, TypeId},
|
||||
borrow::{Borrow, Cow},
|
||||
cmp::Ordering,
|
||||
ffi::{OsStr, OsString},
|
||||
fmt,
|
||||
hash::{BuildHasher, Hash, Hasher},
|
||||
iter::FusedIterator,
|
||||
marker::PhantomData,
|
||||
ops::Deref,
|
||||
path::{Path, PathBuf},
|
||||
sync::{Mutex, RwLock},
|
||||
};
|
||||
|
||||
mod type_map;
|
||||
pub mod type_map;
|
||||
|
||||
pub trait LazyInternedTrait<T: ?Sized + Send + Sync + 'static>: Send + Sync + Any {
|
||||
fn get(&self) -> Interned<T>;
|
||||
|
|
@ -289,266 +287,15 @@ impl InternedCompare for BitSlice {
|
|||
}
|
||||
}
|
||||
|
||||
/// Safety: `as_bytes` and `from_bytes_unchecked` must return the same pointer as the input.
|
||||
/// all values returned by `as_bytes` must be valid to pass to `from_bytes_unchecked`.
|
||||
/// `into_bytes` must return the exact same thing as `as_bytes`.
|
||||
/// `Interned<Self>` must contain the exact same references as `Interned<[u8]>`,
|
||||
/// so they can be safely interconverted without needing re-interning.
|
||||
unsafe trait InternStrLike: ToOwned {
|
||||
fn as_bytes(this: &Self) -> &[u8];
|
||||
fn into_bytes(this: Self::Owned) -> Vec<u8>;
|
||||
/// Safety: `bytes` must be a valid sequence of bytes for this type. All UTF-8 sequences are valid.
|
||||
unsafe fn from_bytes_unchecked(bytes: &[u8]) -> &Self;
|
||||
}
|
||||
|
||||
macro_rules! impl_intern_str_like {
|
||||
($ty:ty, owned = $Owned:ty) => {
|
||||
impl InternedCompare for $ty {
|
||||
type InternedCompareKey = PtrEqWithMetadata<[u8]>;
|
||||
fn interned_compare_key_ref(this: &Self) -> Self::InternedCompareKey {
|
||||
PtrEqWithMetadata(InternStrLike::as_bytes(this))
|
||||
}
|
||||
}
|
||||
impl Intern for $ty {
|
||||
fn intern(&self) -> Interned<Self> {
|
||||
Self::intern_cow(Cow::Borrowed(self))
|
||||
}
|
||||
fn intern_cow(this: Cow<'_, Self>) -> Interned<Self> {
|
||||
Interned::cast_unchecked(
|
||||
<[u8]>::intern_cow(match this {
|
||||
Cow::Borrowed(v) => Cow::Borrowed(<Self as InternStrLike>::as_bytes(v)),
|
||||
Cow::Owned(v) => {
|
||||
// verify $Owned is correct
|
||||
let v: $Owned = v;
|
||||
Cow::Owned(<Self as InternStrLike>::into_bytes(v))
|
||||
}
|
||||
}),
|
||||
// Safety: guaranteed safe because we got the bytes from `as_bytes`/`into_bytes`
|
||||
|v| unsafe { <Self as InternStrLike>::from_bytes_unchecked(v) },
|
||||
)
|
||||
}
|
||||
}
|
||||
impl Default for Interned<$ty> {
|
||||
fn default() -> Self {
|
||||
// Safety: safe because the empty sequence is valid UTF-8
|
||||
unsafe { <$ty as InternStrLike>::from_bytes_unchecked(&[]) }.intern()
|
||||
}
|
||||
}
|
||||
impl<'de> Deserialize<'de> for Interned<$ty> {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
Cow::<'de, $ty>::deserialize(deserializer).map(Intern::intern_cow)
|
||||
}
|
||||
}
|
||||
impl From<$Owned> for Interned<$ty> {
|
||||
fn from(v: $Owned) -> Self {
|
||||
v.intern_deref()
|
||||
}
|
||||
}
|
||||
impl From<Interned<$ty>> for $Owned {
|
||||
fn from(v: Interned<$ty>) -> Self {
|
||||
Interned::into_inner(v).into()
|
||||
}
|
||||
}
|
||||
impl From<Interned<$ty>> for Box<$ty> {
|
||||
fn from(v: Interned<$ty>) -> Self {
|
||||
Interned::into_inner(v).into()
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Safety: satisfies `InternStrLike`'s requirements where the valid sequences for `from_bytes_unchecked` matches `str`
|
||||
unsafe impl InternStrLike for str {
|
||||
fn as_bytes(this: &Self) -> &[u8] {
|
||||
this.as_bytes()
|
||||
}
|
||||
fn into_bytes(this: Self::Owned) -> Vec<u8> {
|
||||
this.into_bytes()
|
||||
}
|
||||
unsafe fn from_bytes_unchecked(bytes: &[u8]) -> &Self {
|
||||
// Safety: `bytes` is guaranteed UTF-8 by the caller
|
||||
unsafe { str::from_utf8_unchecked(bytes) }
|
||||
}
|
||||
}
|
||||
|
||||
impl_intern_str_like!(str, owned = String);
|
||||
|
||||
// Safety: satisfies `InternStrLike`'s requirements where the valid sequences for `from_bytes_unchecked` matches `OsStr`
|
||||
unsafe impl InternStrLike for OsStr {
|
||||
fn as_bytes(this: &Self) -> &[u8] {
|
||||
this.as_encoded_bytes()
|
||||
}
|
||||
fn into_bytes(this: Self::Owned) -> Vec<u8> {
|
||||
this.into_encoded_bytes()
|
||||
}
|
||||
unsafe fn from_bytes_unchecked(bytes: &[u8]) -> &Self {
|
||||
// Safety: `bytes` is guaranteed valid for `OsStr` by the caller
|
||||
unsafe { OsStr::from_encoded_bytes_unchecked(bytes) }
|
||||
}
|
||||
}
|
||||
|
||||
impl_intern_str_like!(OsStr, owned = OsString);
|
||||
|
||||
// Safety: satisfies `InternStrLike`'s requirements where the valid sequences for `from_bytes_unchecked` matches `OsStr`
|
||||
unsafe impl InternStrLike for Path {
|
||||
fn as_bytes(this: &Self) -> &[u8] {
|
||||
this.as_os_str().as_encoded_bytes()
|
||||
}
|
||||
fn into_bytes(this: Self::Owned) -> Vec<u8> {
|
||||
this.into_os_string().into_encoded_bytes()
|
||||
}
|
||||
unsafe fn from_bytes_unchecked(bytes: &[u8]) -> &Self {
|
||||
// Safety: `bytes` is guaranteed valid for `OsStr` by the caller
|
||||
unsafe { Path::new(OsStr::from_encoded_bytes_unchecked(bytes)) }
|
||||
}
|
||||
}
|
||||
|
||||
impl_intern_str_like!(Path, owned = PathBuf);
|
||||
|
||||
impl Interned<str> {
|
||||
pub fn from_utf8(v: Interned<[u8]>) -> Result<Self, std::str::Utf8Error> {
|
||||
Interned::try_cast_unchecked(v, str::from_utf8)
|
||||
}
|
||||
pub fn as_interned_bytes(self) -> Interned<[u8]> {
|
||||
Interned::cast_unchecked(self, str::as_bytes)
|
||||
}
|
||||
pub fn as_interned_os_str(self) -> Interned<OsStr> {
|
||||
Interned::cast_unchecked(self, AsRef::as_ref)
|
||||
}
|
||||
pub fn as_interned_path(self) -> Interned<Path> {
|
||||
Interned::cast_unchecked(self, AsRef::as_ref)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Interned<str>> for Interned<OsStr> {
|
||||
fn from(value: Interned<str>) -> Self {
|
||||
value.as_interned_os_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Interned<str>> for Interned<Path> {
|
||||
fn from(value: Interned<str>) -> Self {
|
||||
value.as_interned_path()
|
||||
}
|
||||
}
|
||||
|
||||
impl Interned<OsStr> {
|
||||
pub fn as_interned_encoded_bytes(self) -> Interned<[u8]> {
|
||||
Interned::cast_unchecked(self, OsStr::as_encoded_bytes)
|
||||
}
|
||||
pub fn to_interned_str(self) -> Option<Interned<str>> {
|
||||
Interned::try_cast_unchecked(self, |v| v.to_str().ok_or(())).ok()
|
||||
}
|
||||
pub fn display(self) -> std::ffi::os_str::Display<'static> {
|
||||
Self::into_inner(self).display()
|
||||
}
|
||||
pub fn as_interned_path(self) -> Interned<Path> {
|
||||
Interned::cast_unchecked(self, AsRef::as_ref)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Interned<OsStr>> for Interned<Path> {
|
||||
fn from(value: Interned<OsStr>) -> Self {
|
||||
value.as_interned_path()
|
||||
}
|
||||
}
|
||||
|
||||
impl Interned<Path> {
|
||||
pub fn as_interned_os_str(self) -> Interned<OsStr> {
|
||||
Interned::cast_unchecked(self, AsRef::as_ref)
|
||||
}
|
||||
pub fn to_interned_str(self) -> Option<Interned<str>> {
|
||||
Interned::try_cast_unchecked(self, |v| v.to_str().ok_or(())).ok()
|
||||
}
|
||||
pub fn display(self) -> std::path::Display<'static> {
|
||||
Self::into_inner(self).display()
|
||||
}
|
||||
pub fn interned_file_name(self) -> Option<Interned<OsStr>> {
|
||||
Some(self.file_name()?.intern())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Interned<Path>> for Interned<OsStr> {
|
||||
fn from(value: Interned<Path>) -> Self {
|
||||
value.as_interned_os_str()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait InternSlice: Sized {
|
||||
type Element: 'static + Send + Sync + Clone + Hash + Eq;
|
||||
fn intern_slice(self) -> Interned<[Self::Element]>;
|
||||
}
|
||||
|
||||
impl<T: 'static + Send + Sync + Clone + Hash + Eq> InternSlice for Box<[T]> {
|
||||
type Element = T;
|
||||
fn intern_slice(self) -> Interned<[Self::Element]> {
|
||||
self.into_vec().intern_slice()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static + Send + Sync + Clone + Hash + Eq> InternSlice for Vec<T> {
|
||||
type Element = T;
|
||||
fn intern_slice(self) -> Interned<[Self::Element]> {
|
||||
self.intern_deref()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static + Send + Sync + Clone + Hash + Eq> InternSlice for &'_ [T] {
|
||||
type Element = T;
|
||||
fn intern_slice(self) -> Interned<[Self::Element]> {
|
||||
self.intern()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static + Send + Sync + Clone + Hash + Eq> InternSlice for &'_ mut [T] {
|
||||
type Element = T;
|
||||
fn intern_slice(self) -> Interned<[Self::Element]> {
|
||||
self.intern()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static + Send + Sync + Clone + Hash + Eq, const N: usize> InternSlice for [T; N] {
|
||||
type Element = T;
|
||||
fn intern_slice(self) -> Interned<[Self::Element]> {
|
||||
(&self).intern_slice()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static + Send + Sync + Clone + Hash + Eq, const N: usize> InternSlice for Box<[T; N]> {
|
||||
type Element = T;
|
||||
fn intern_slice(self) -> Interned<[Self::Element]> {
|
||||
let this: Box<[T]> = self;
|
||||
this.intern_slice()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static + Send + Sync + Clone + Hash + Eq, const N: usize> InternSlice for &'_ [T; N] {
|
||||
type Element = T;
|
||||
fn intern_slice(self) -> Interned<[Self::Element]> {
|
||||
let this: &[T] = self;
|
||||
this.intern()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static + Send + Sync + Clone + Hash + Eq, const N: usize> InternSlice for &'_ mut [T; N] {
|
||||
type Element = T;
|
||||
fn intern_slice(self) -> Interned<[Self::Element]> {
|
||||
let this: &[T] = self;
|
||||
this.intern()
|
||||
impl InternedCompare for str {
|
||||
type InternedCompareKey = PtrEqWithMetadata<Self>;
|
||||
fn interned_compare_key_ref(this: &Self) -> Self::InternedCompareKey {
|
||||
PtrEqWithMetadata(this)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Intern: Any + Send + Sync {
|
||||
fn intern(&self) -> Interned<Self>;
|
||||
fn intern_deref(self) -> Interned<Self::Target>
|
||||
where
|
||||
Self: Sized + Deref<Target: Intern + ToOwned<Owned = Self>>,
|
||||
{
|
||||
Self::Target::intern_owned(self)
|
||||
}
|
||||
fn intern_sized(self) -> Interned<Self>
|
||||
where
|
||||
Self: Clone,
|
||||
|
|
@ -569,37 +316,8 @@ pub trait Intern: Any + Send + Sync {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Intern + ToOwned> From<Cow<'_, T>> for Interned<T> {
|
||||
fn from(value: Cow<'_, T>) -> Self {
|
||||
Intern::intern_cow(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + Intern> From<&'_ T> for Interned<T> {
|
||||
fn from(value: &'_ T) -> Self {
|
||||
Intern::intern(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Intern + Clone> From<T> for Interned<T> {
|
||||
fn from(value: T) -> Self {
|
||||
Intern::intern_sized(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + 'static + Send + Sync + ToOwned> From<Interned<T>> for Cow<'_, T> {
|
||||
fn from(value: Interned<T>) -> Self {
|
||||
Cow::Borrowed(Interned::into_inner(value))
|
||||
}
|
||||
}
|
||||
|
||||
struct InternerState<T: ?Sized + 'static + Send + Sync> {
|
||||
table: HashTable<&'static T>,
|
||||
hasher: DefaultBuildHasher,
|
||||
}
|
||||
|
||||
pub struct Interner<T: ?Sized + 'static + Send + Sync> {
|
||||
state: Mutex<InternerState<T>>,
|
||||
map: Mutex<HashMap<&'static T, ()>>,
|
||||
}
|
||||
|
||||
impl<T: ?Sized + 'static + Send + Sync> Interner<T> {
|
||||
|
|
@ -612,10 +330,7 @@ impl<T: ?Sized + 'static + Send + Sync> Interner<T> {
|
|||
impl<T: ?Sized + 'static + Send + Sync> Default for Interner<T> {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
state: Mutex::new(InternerState {
|
||||
table: HashTable::new(),
|
||||
hasher: Default::default(),
|
||||
}),
|
||||
map: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -626,16 +341,17 @@ impl<T: ?Sized + 'static + Send + Sync + Hash + Eq + ToOwned> Interner<T> {
|
|||
alloc: F,
|
||||
value: Cow<'_, T>,
|
||||
) -> Interned<T> {
|
||||
let mut state = self.state.lock().unwrap();
|
||||
let InternerState { table, hasher } = &mut *state;
|
||||
let inner = *table
|
||||
.entry(
|
||||
hasher.hash_one(&*value),
|
||||
|k| **k == *value,
|
||||
|k| hasher.hash_one(&**k),
|
||||
)
|
||||
.or_insert_with(|| alloc(value))
|
||||
.get();
|
||||
let mut map = self.map.lock().unwrap();
|
||||
let hasher = map.hasher().clone();
|
||||
let hash = hasher.hash_one(&*value);
|
||||
let inner = match map.raw_entry_mut().from_hash(hash, |k| **k == *value) {
|
||||
RawEntryMut::Occupied(entry) => *entry.key(),
|
||||
RawEntryMut::Vacant(entry) => {
|
||||
*entry
|
||||
.insert_with_hasher(hash, alloc(value), (), |k| hasher.hash_one(&**k))
|
||||
.0
|
||||
}
|
||||
};
|
||||
Interned { inner }
|
||||
}
|
||||
}
|
||||
|
|
@ -658,6 +374,12 @@ impl Interner<BitSlice> {
|
|||
}
|
||||
}
|
||||
|
||||
impl Interner<str> {
|
||||
fn intern_str(&self, value: Cow<'_, str>) -> Interned<str> {
|
||||
self.intern(|value| value.into_owned().leak(), value)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Interned<T: ?Sized + 'static + Send + Sync> {
|
||||
inner: &'static T,
|
||||
}
|
||||
|
|
@ -687,12 +409,6 @@ forward_fmt_trait!(Pointer);
|
|||
forward_fmt_trait!(UpperExp);
|
||||
forward_fmt_trait!(UpperHex);
|
||||
|
||||
impl<T: ?Sized + 'static + Send + Sync + AsRef<U>, U: ?Sized> AsRef<U> for Interned<T> {
|
||||
fn as_ref(&self) -> &U {
|
||||
T::as_ref(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct InternedSliceIter<T: Clone + 'static + Send + Sync> {
|
||||
slice: Interned<[T]>,
|
||||
|
|
@ -762,57 +478,6 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
impl<I> FromIterator<I> for Interned<str>
|
||||
where
|
||||
String: FromIterator<I>,
|
||||
{
|
||||
fn from_iter<T: IntoIterator<Item = I>>(iter: T) -> Self {
|
||||
String::from_iter(iter).intern_deref()
|
||||
}
|
||||
}
|
||||
|
||||
impl<I> FromIterator<I> for Interned<Path>
|
||||
where
|
||||
PathBuf: FromIterator<I>,
|
||||
{
|
||||
fn from_iter<T: IntoIterator<Item = I>>(iter: T) -> Self {
|
||||
PathBuf::from_iter(iter).intern_deref()
|
||||
}
|
||||
}
|
||||
|
||||
impl<I> FromIterator<I> for Interned<OsStr>
|
||||
where
|
||||
OsString: FromIterator<I>,
|
||||
{
|
||||
fn from_iter<T: IntoIterator<Item = I>>(iter: T) -> Self {
|
||||
OsString::from_iter(iter).intern_deref()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Interned<str>> for clap::builder::Str {
|
||||
fn from(value: Interned<str>) -> Self {
|
||||
Interned::into_inner(value).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Interned<str>> for clap::builder::OsStr {
|
||||
fn from(value: Interned<str>) -> Self {
|
||||
Interned::into_inner(value).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Interned<str>> for clap::builder::StyledStr {
|
||||
fn from(value: Interned<str>) -> Self {
|
||||
Interned::into_inner(value).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Interned<str>> for clap::Id {
|
||||
fn from(value: Interned<str>) -> Self {
|
||||
Interned::into_inner(value).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static + Clone + Send + Sync> From<Interned<[T]>> for Vec<T> {
|
||||
fn from(value: Interned<[T]>) -> Self {
|
||||
Vec::from(&*value)
|
||||
|
|
@ -825,12 +490,24 @@ impl<T: 'static + Clone + Send + Sync> From<Interned<[T]>> for Box<[T]> {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<Interned<str>> for String {
|
||||
fn from(value: Interned<str>) -> Self {
|
||||
String::from(&*value)
|
||||
}
|
||||
}
|
||||
|
||||
impl<I> Default for Interned<[I]>
|
||||
where
|
||||
[I]: Intern,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Intern::intern(&[])
|
||||
[][..].intern()
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Interned<str> {
|
||||
fn default() -> Self {
|
||||
"".intern()
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -961,6 +638,15 @@ impl<'de> Deserialize<'de> for Interned<BitSlice> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for Interned<str> {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
String::deserialize(deserializer).map(Intern::intern_owned)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Clone + Send + Sync + 'static + Hash + Eq> Intern for T {
|
||||
fn intern(&self) -> Interned<Self> {
|
||||
Self::intern_cow(Cow::Borrowed(self))
|
||||
|
|
@ -1021,6 +707,26 @@ impl Intern for BitSlice {
|
|||
}
|
||||
}
|
||||
|
||||
impl Intern for str {
|
||||
fn intern(&self) -> Interned<Self> {
|
||||
Self::intern_cow(Cow::Borrowed(self))
|
||||
}
|
||||
|
||||
fn intern_owned(this: <Self as ToOwned>::Owned) -> Interned<Self>
|
||||
where
|
||||
Self: ToOwned,
|
||||
{
|
||||
Self::intern_cow(Cow::Owned(this))
|
||||
}
|
||||
|
||||
fn intern_cow(this: Cow<'_, Self>) -> Interned<Self>
|
||||
where
|
||||
Self: ToOwned,
|
||||
{
|
||||
Interner::get().intern_str(this)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait MemoizeGeneric: 'static + Send + Sync + Hash + Eq + Copy {
|
||||
type InputRef<'a>: 'a + Send + Sync + Hash + Copy;
|
||||
type InputOwned: 'static + Send + Sync;
|
||||
|
|
@ -1036,7 +742,7 @@ pub trait MemoizeGeneric: 'static + Send + Sync + Hash + Eq + Copy {
|
|||
fn get_cow(self, input: Self::InputCow<'_>) -> Self::Output {
|
||||
static TYPE_ID_MAP: TypeIdMap = TypeIdMap::new();
|
||||
let map: &RwLock<(
|
||||
DefaultBuildHasher,
|
||||
hashbrown::hash_map::DefaultHashBuilder,
|
||||
HashTable<(Self, Self::InputOwned, Self::Output)>,
|
||||
)> = TYPE_ID_MAP.get_or_insert_default();
|
||||
fn hash_eq_key<'a, 'b, T: MemoizeGeneric>(
|
||||
|
|
|
|||
|
|
@ -1,8 +1,10 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use hashbrown::HashMap;
|
||||
use std::{
|
||||
any::{Any, TypeId},
|
||||
hash::{BuildHasher, Hasher},
|
||||
ptr::NonNull,
|
||||
sync::RwLock,
|
||||
};
|
||||
|
||||
|
|
@ -73,36 +75,59 @@ impl BuildHasher for TypeIdBuildHasher {
|
|||
}
|
||||
}
|
||||
|
||||
pub(crate) struct TypeIdMap(
|
||||
RwLock<hashbrown::HashMap<TypeId, &'static (dyn Any + Send + Sync), TypeIdBuildHasher>>,
|
||||
);
|
||||
struct Value(NonNull<dyn Any + Send + Sync>);
|
||||
|
||||
impl Value {
|
||||
unsafe fn get_transmute_lifetime<'b>(&self) -> &'b (dyn Any + Send + Sync) {
|
||||
unsafe { &*self.0.as_ptr() }
|
||||
}
|
||||
fn new(v: Box<dyn Any + Send + Sync>) -> Self {
|
||||
unsafe { Self(NonNull::new_unchecked(Box::into_raw(v))) }
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl Send for Value {}
|
||||
unsafe impl Sync for Value {}
|
||||
|
||||
impl Drop for Value {
|
||||
fn drop(&mut self) {
|
||||
unsafe { std::ptr::drop_in_place(self.0.as_ptr()) }
|
||||
}
|
||||
}
|
||||
|
||||
pub struct TypeIdMap(RwLock<HashMap<TypeId, Value, TypeIdBuildHasher>>);
|
||||
|
||||
impl TypeIdMap {
|
||||
pub(crate) const fn new() -> Self {
|
||||
Self(RwLock::new(hashbrown::HashMap::with_hasher(
|
||||
TypeIdBuildHasher,
|
||||
)))
|
||||
pub const fn new() -> Self {
|
||||
Self(RwLock::new(HashMap::with_hasher(TypeIdBuildHasher)))
|
||||
}
|
||||
#[cold]
|
||||
fn insert_slow(
|
||||
unsafe fn insert_slow(
|
||||
&self,
|
||||
type_id: TypeId,
|
||||
make: fn() -> Box<dyn Any + Sync + Send>,
|
||||
) -> &'static (dyn Any + Sync + Send) {
|
||||
let value = Box::leak(make());
|
||||
) -> &(dyn Any + Sync + Send) {
|
||||
let value = Value::new(make());
|
||||
let mut write_guard = self.0.write().unwrap();
|
||||
*write_guard.entry(type_id).or_insert(value)
|
||||
unsafe {
|
||||
write_guard
|
||||
.entry(type_id)
|
||||
.or_insert(value)
|
||||
.get_transmute_lifetime()
|
||||
}
|
||||
}
|
||||
pub(crate) fn get_or_insert_default<T: Sized + Any + Send + Sync + Default>(&self) -> &T {
|
||||
pub fn get_or_insert_default<T: Sized + Any + Send + Sync + Default>(&self) -> &T {
|
||||
let type_id = TypeId::of::<T>();
|
||||
let read_guard = self.0.read().unwrap();
|
||||
let retval = read_guard.get(&type_id).map(|v| *v);
|
||||
let retval = read_guard
|
||||
.get(&type_id)
|
||||
.map(|v| unsafe { Value::get_transmute_lifetime(v) });
|
||||
drop(read_guard);
|
||||
let retval = match retval {
|
||||
Some(retval) => retval,
|
||||
None => self.insert_slow(type_id, move || Box::new(T::default())),
|
||||
None => unsafe { self.insert_slow(type_id, move || Box::new(T::default())) },
|
||||
};
|
||||
retval.downcast_ref().expect("known to have correct TypeId")
|
||||
unsafe { &*(retval as *const dyn Any as *const T) }
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -4,18 +4,6 @@
|
|||
// TODO: enable:
|
||||
// #![warn(missing_docs)]
|
||||
|
||||
#![deny(
|
||||
rustdoc::bare_urls,
|
||||
rustdoc::broken_intra_doc_links,
|
||||
rustdoc::invalid_codeblock_attributes,
|
||||
rustdoc::invalid_html_tags,
|
||||
rustdoc::invalid_rust_codeblocks,
|
||||
rustdoc::private_doc_tests,
|
||||
rustdoc::private_intra_doc_links,
|
||||
rustdoc::redundant_explicit_links,
|
||||
rustdoc::unescaped_backticks
|
||||
)]
|
||||
|
||||
//! [Main Documentation][_docs]
|
||||
|
||||
extern crate self as fayalite;
|
||||
|
|
@ -86,139 +74,8 @@ macro_rules! __cfg_expansion_helper {
|
|||
pub use fayalite_proc_macros::hdl_module;
|
||||
|
||||
#[doc(inline)]
|
||||
/// The `#[hdl]` attribute is supported on several different kinds of [Rust Items](https://doc.rust-lang.org/reference/items.html):
|
||||
///
|
||||
/// # Functions and Methods
|
||||
/// Enable's the stuff that you can use inside a [module's body](crate::_docs::modules::module_bodies),
|
||||
/// but without being a module or changing the function's signature.
|
||||
/// The only exception is that you can't use stuff that requires the automatically-provided `m` variable.
|
||||
///
|
||||
/// # Structs
|
||||
// TODO: expand on struct docs
|
||||
/// e.g.:
|
||||
/// ```
|
||||
/// # use fayalite::prelude::*;
|
||||
/// # #[hdl]
|
||||
/// # pub struct OtherStruct {}
|
||||
/// #[hdl]
|
||||
/// pub struct MyStruct {
|
||||
/// #[hdl(flip)]
|
||||
/// pub a: UInt<5>,
|
||||
/// pub b: Bool,
|
||||
/// #[hdl(flip)]
|
||||
/// pub c: OtherStruct,
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// # Enums
|
||||
// TODO: expand on enum docs
|
||||
/// e.g.:
|
||||
/// ```
|
||||
/// # use fayalite::prelude::*;
|
||||
/// # #[hdl]
|
||||
/// # pub struct MyStruct {}
|
||||
/// #[hdl]
|
||||
/// pub enum MyEnum {
|
||||
/// A(UInt<3>),
|
||||
/// B,
|
||||
/// C(MyStruct),
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// # Type Aliases
|
||||
///
|
||||
/// There's three different ways you can create a type alias:
|
||||
///
|
||||
/// # Normal Type Alias
|
||||
///
|
||||
/// This works exactly how you'd expect:
|
||||
/// ```
|
||||
/// # use fayalite::prelude::*;
|
||||
/// # #[hdl]
|
||||
/// # pub struct MyStruct<T: Type> {
|
||||
/// # v: T,
|
||||
/// # }
|
||||
/// #[hdl]
|
||||
/// pub type MyType<T: Type> = MyStruct<T>;
|
||||
///
|
||||
/// // you can then use Fayalite's standard syntax for creating dynamic types at runtime:
|
||||
///
|
||||
/// let ty = MyType[UInt[3]];
|
||||
/// assert_eq!(ty, MyStruct[UInt[3]]);
|
||||
/// ```
|
||||
///
|
||||
/// # Type Alias that gets a [`Type`] from a [`PhantomConst`]
|
||||
///
|
||||
/// This allows you to use some computed property of a [`PhantomConst`] to get a [`Type`] that you can use in other #[hdl] types.
|
||||
///
|
||||
/// ```
|
||||
/// # use fayalite::{intern::Intern, prelude::*};
|
||||
/// #[derive(Clone, PartialEq, Eq, Hash, Debug, serde::Serialize, serde::Deserialize)]
|
||||
/// pub struct Config {
|
||||
/// pub foo: usize,
|
||||
/// pub bar: Bundle,
|
||||
/// }
|
||||
///
|
||||
/// // the expression inside `get` is called with `Interned<Config>` and returns `Array<Bundle>`
|
||||
/// #[hdl(get(|config| Array[config.bar][config.foo]))]
|
||||
/// pub type GetMyArray<P: PhantomConstGet<Config>> = Array<Bundle>;
|
||||
///
|
||||
/// // you can then use it in other types:
|
||||
///
|
||||
/// #[hdl(no_static)]
|
||||
/// pub struct WrapMyArray<P: PhantomConstGet<Config>> {
|
||||
/// pub my_array: GetMyArray<P>,
|
||||
/// }
|
||||
///
|
||||
/// // you can then use Fayalite's standard syntax for creating dynamic types at runtime:
|
||||
/// let bar = Bundle::new(Default::default());
|
||||
/// let config = PhantomConst::new(Config { foo: 12, bar }.intern_sized());
|
||||
/// let ty = WrapMyArray[config];
|
||||
/// assert_eq!(ty.my_array, Array[bar][12]);
|
||||
/// ```
|
||||
///
|
||||
/// # Type Alias that gets a [`Size`] from a [`PhantomConst`]
|
||||
///
|
||||
/// This allows you to use some computed property of a [`PhantomConst`] to get a [`Size`] that you can use in other #[hdl] types.
|
||||
///
|
||||
/// ```
|
||||
/// # use fayalite::{intern::Intern, prelude::*};
|
||||
/// # #[derive(Clone, PartialEq, Eq, Hash, Debug, serde::Serialize, serde::Deserialize)]
|
||||
/// # pub struct ConfigItem {}
|
||||
/// # impl ConfigItem {
|
||||
/// # pub fn new() -> Self {
|
||||
/// # Self {}
|
||||
/// # }
|
||||
/// # }
|
||||
/// #[derive(Clone, PartialEq, Eq, Hash, Debug, serde::Serialize, serde::Deserialize)]
|
||||
/// pub struct Config {
|
||||
/// pub items: Vec<ConfigItem>,
|
||||
/// }
|
||||
///
|
||||
/// // the expression inside `get` is called with `Interned<Config>` and returns `usize` (not DynSize)
|
||||
/// #[hdl(get(|config| config.items.len()))]
|
||||
/// pub type GetItemsLen<P: PhantomConstGet<Config>> = DynSize; // must be DynSize
|
||||
///
|
||||
/// // you can then use it in other types:
|
||||
///
|
||||
/// #[hdl(no_static)]
|
||||
/// pub struct FlagPerItem<P: PhantomConstGet<Config>> {
|
||||
/// pub flags: ArrayType<Bool, GetItemsLen<P>>,
|
||||
/// }
|
||||
///
|
||||
/// // you can then use Fayalite's standard syntax for creating dynamic types at runtime:
|
||||
/// let config = PhantomConst::new(Config { items: vec![ConfigItem::new(); 5] }.intern_sized());
|
||||
/// let ty = FlagPerItem[config];
|
||||
/// assert_eq!(ty.flags, Array[Bool][5]);
|
||||
/// ```
|
||||
///
|
||||
/// [`PhantomConst`]: crate::phantom_const::PhantomConst
|
||||
/// [`Size`]: crate::int::Size
|
||||
/// [`Type`]: crate::ty::Type
|
||||
pub use fayalite_proc_macros::hdl;
|
||||
|
||||
pub use bitvec;
|
||||
|
||||
/// struct used as a placeholder when applying defaults
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
|
||||
pub struct __;
|
||||
|
|
@ -228,8 +85,8 @@ pub mod _docs;
|
|||
|
||||
pub mod annotations;
|
||||
pub mod array;
|
||||
pub mod build;
|
||||
pub mod bundle;
|
||||
pub mod cli;
|
||||
pub mod clock;
|
||||
pub mod enum_;
|
||||
pub mod expr;
|
||||
|
|
@ -239,8 +96,6 @@ pub mod int;
|
|||
pub mod intern;
|
||||
pub mod memory;
|
||||
pub mod module;
|
||||
pub mod phantom_const;
|
||||
pub mod platform;
|
||||
pub mod prelude;
|
||||
pub mod reg;
|
||||
pub mod reset;
|
||||
|
|
@ -249,5 +104,4 @@ pub mod source_location;
|
|||
pub mod testing;
|
||||
pub mod ty;
|
||||
pub mod util;
|
||||
pub mod vendor;
|
||||
pub mod wire;
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ use crate::{
|
|||
array::{Array, ArrayType},
|
||||
bundle::{Bundle, BundleType},
|
||||
clock::Clock,
|
||||
expr::{Expr, Flow, ToExpr, ToLiteralBits, ops::BundleLiteral, repeat},
|
||||
expr::{ops::BundleLiteral, repeat, Expr, Flow, ToExpr, ToLiteralBits},
|
||||
hdl,
|
||||
int::{Bool, DynSize, Size, UInt, UIntType},
|
||||
intern::{Intern, Interned},
|
||||
|
|
@ -470,7 +470,7 @@ pub enum ReadUnderWrite {
|
|||
Undefined,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
struct MemImpl<Element: Type, Len: Size, P> {
|
||||
scoped_name: ScopedNameId,
|
||||
source_location: SourceLocation,
|
||||
|
|
@ -1066,8 +1066,7 @@ pub fn splat_mask<T: Type>(ty: T, value: Expr<Bool>) -> Expr<AsMask<T>> {
|
|||
| CanonicalType::SyncReset(_)
|
||||
| CanonicalType::Reset(_)
|
||||
| CanonicalType::Clock(_)
|
||||
| CanonicalType::Enum(_)
|
||||
| CanonicalType::DynSimOnly(_) => Expr::from_canonical(Expr::canonical(value)),
|
||||
| CanonicalType::Enum(_) => Expr::from_canonical(Expr::canonical(value)),
|
||||
CanonicalType::Array(array) => Expr::from_canonical(Expr::canonical(repeat(
|
||||
splat_mask(array.element(), value),
|
||||
array.len(),
|
||||
|
|
@ -1083,7 +1082,6 @@ pub fn splat_mask<T: Type>(ty: T, value: Expr<Bool>) -> Expr<AsMask<T>> {
|
|||
)
|
||||
.to_expr(),
|
||||
)),
|
||||
CanonicalType::PhantomConst(_) => Expr::from_canonical(Expr::canonical(().to_expr())),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -8,34 +8,31 @@ use crate::{
|
|||
clock::{Clock, ClockDomain},
|
||||
enum_::{Enum, EnumMatchVariantsIter, EnumType},
|
||||
expr::{
|
||||
Expr, Flow, ToExpr,
|
||||
ops::VariantAccess,
|
||||
target::{
|
||||
GetTarget, Target, TargetBase, TargetPathArrayElement, TargetPathBundleField,
|
||||
TargetPathElement,
|
||||
},
|
||||
Expr, Flow, ToExpr,
|
||||
},
|
||||
formal::FormalKind,
|
||||
int::{Bool, DynSize, Size},
|
||||
intern::{Intern, Interned},
|
||||
memory::{Mem, MemBuilder, MemBuilderTarget, PortName},
|
||||
platform::PlatformIOBuilder,
|
||||
reg::Reg,
|
||||
reset::{AsyncReset, Reset, ResetType, ResetTypeDispatch, SyncReset},
|
||||
sim::{ExternModuleSimGenerator, ExternModuleSimulation},
|
||||
source_location::SourceLocation,
|
||||
ty::{CanonicalType, Type},
|
||||
util::{HashMap, HashSet, ScopedRef},
|
||||
util::ScopedRef,
|
||||
wire::{IncompleteWire, Wire},
|
||||
};
|
||||
use hashbrown::hash_map::Entry;
|
||||
use hashbrown::{hash_map::Entry, HashMap, HashSet};
|
||||
use num_bigint::BigInt;
|
||||
use std::{
|
||||
cell::RefCell,
|
||||
collections::{BTreeMap, VecDeque},
|
||||
collections::VecDeque,
|
||||
convert::Infallible,
|
||||
fmt,
|
||||
future::IntoFuture,
|
||||
hash::{Hash, Hasher},
|
||||
iter::FusedIterator,
|
||||
marker::PhantomData,
|
||||
|
|
@ -834,8 +831,6 @@ pub struct AnnotatedModuleIO<S: ModuleBuildingStatus = ModuleBuilt> {
|
|||
pub module_io: ModuleIO<CanonicalType>,
|
||||
}
|
||||
|
||||
impl Copy for AnnotatedModuleIO {}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub enum ModuleKind {
|
||||
Extern,
|
||||
|
|
@ -1086,7 +1081,6 @@ pub struct ExternModuleBody<
|
|||
> {
|
||||
pub verilog_name: Interned<str>,
|
||||
pub parameters: P,
|
||||
pub simulation: Option<ExternModuleSimulation>,
|
||||
}
|
||||
|
||||
impl From<ExternModuleBody<Vec<ExternModuleParameter>>> for ExternModuleBody {
|
||||
|
|
@ -1094,13 +1088,11 @@ impl From<ExternModuleBody<Vec<ExternModuleParameter>>> for ExternModuleBody {
|
|||
let ExternModuleBody {
|
||||
verilog_name,
|
||||
parameters,
|
||||
simulation,
|
||||
} = value;
|
||||
let parameters = Intern::intern_owned(parameters);
|
||||
Self {
|
||||
verilog_name,
|
||||
parameters,
|
||||
simulation,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1215,12 +1207,6 @@ pub struct Module<T: BundleType> {
|
|||
module_annotations: Interned<[Annotation]>,
|
||||
}
|
||||
|
||||
impl<T: BundleType> AsRef<Self> for Module<T> {
|
||||
fn as_ref(&self) -> &Self {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct DebugFmtModulesState {
|
||||
seen: HashSet<NameId>,
|
||||
|
|
@ -1297,12 +1283,10 @@ impl<T: BundleType> fmt::Debug for DebugModuleBody<T> {
|
|||
ModuleBody::Extern(ExternModuleBody {
|
||||
verilog_name,
|
||||
parameters,
|
||||
simulation,
|
||||
}) => {
|
||||
debug_struct
|
||||
.field("verilog_name", verilog_name)
|
||||
.field("parameters", parameters)
|
||||
.field("simulation", simulation);
|
||||
.field("parameters", parameters);
|
||||
}
|
||||
}
|
||||
debug_struct.finish_non_exhaustive()
|
||||
|
|
@ -1468,9 +1452,7 @@ impl TargetState {
|
|||
})
|
||||
.reduce(TargetWritten::conditional_merge_written)
|
||||
else {
|
||||
unreachable!(
|
||||
"merge_conditional_sub_blocks_into_block must be called with at least one sub-block"
|
||||
);
|
||||
unreachable!("merge_conditional_sub_blocks_into_block must be called with at least one sub-block");
|
||||
};
|
||||
let mut written_in_blocks = written_in_blocks.borrow_mut();
|
||||
if target_block >= written_in_blocks.len() {
|
||||
|
|
@ -1508,9 +1490,6 @@ impl TargetState {
|
|||
})
|
||||
.collect(),
|
||||
},
|
||||
CanonicalType::PhantomConst(_) => TargetStateInner::Decomposed {
|
||||
subtargets: HashMap::default(),
|
||||
},
|
||||
CanonicalType::Array(ty) => TargetStateInner::Decomposed {
|
||||
subtargets: (0..ty.len())
|
||||
.map(|index| {
|
||||
|
|
@ -1533,8 +1512,7 @@ impl TargetState {
|
|||
| CanonicalType::Clock(_)
|
||||
| CanonicalType::AsyncReset(_)
|
||||
| CanonicalType::SyncReset(_)
|
||||
| CanonicalType::Reset(_)
|
||||
| CanonicalType::DynSimOnly(_) => TargetStateInner::Single {
|
||||
| CanonicalType::Reset(_) => TargetStateInner::Single {
|
||||
declared_in_block,
|
||||
written_in_blocks: RefCell::default(),
|
||||
},
|
||||
|
|
@ -1780,7 +1758,6 @@ impl AssertValidityState {
|
|||
ModuleBody::Extern(ExternModuleBody {
|
||||
verilog_name: _,
|
||||
parameters: _,
|
||||
simulation: _,
|
||||
}) => {}
|
||||
ModuleBody::Normal(NormalModuleBody { body }) => {
|
||||
let body = self.make_block_index(body);
|
||||
|
|
@ -1802,49 +1779,12 @@ impl<T: BundleType> Module<T> {
|
|||
pub fn new_unchecked(
|
||||
name_id: NameId,
|
||||
source_location: SourceLocation,
|
||||
mut body: ModuleBody,
|
||||
body: ModuleBody,
|
||||
module_io: impl IntoIterator<Item = AnnotatedModuleIO>,
|
||||
module_annotations: impl IntoAnnotations,
|
||||
) -> Module<T> {
|
||||
let module_io: Interned<[_]> = module_io.into_iter().collect();
|
||||
let module_annotations = module_annotations.into_annotations().into_iter().collect();
|
||||
match &mut body {
|
||||
ModuleBody::Normal(_) => {}
|
||||
ModuleBody::Extern(ExternModuleBody {
|
||||
simulation: Some(simulation),
|
||||
..
|
||||
}) => {
|
||||
if module_io.iter().any(|io| {
|
||||
!simulation
|
||||
.sim_io_to_generator_map
|
||||
.contains_key(&io.module_io.intern())
|
||||
}) {
|
||||
let mut sim_io_to_generator_map =
|
||||
BTreeMap::clone(&simulation.sim_io_to_generator_map);
|
||||
for io in module_io.iter() {
|
||||
let io = io.module_io.intern();
|
||||
sim_io_to_generator_map.entry(io).or_insert(io);
|
||||
}
|
||||
simulation.sim_io_to_generator_map = sim_io_to_generator_map.intern_sized();
|
||||
}
|
||||
if simulation.sim_io_to_generator_map.len() > module_io.len() {
|
||||
// if sim_io_to_generator_map is bigger, then there must be a key that's not in module_io
|
||||
let module_io_set = HashSet::from_iter(module_io.iter().map(|v| v.module_io));
|
||||
for (sim_io, generator_io) in simulation.sim_io_to_generator_map.iter() {
|
||||
if !module_io_set.contains(&**sim_io) {
|
||||
panic!(
|
||||
"extern module has invalid `sim_io_to_generator_map`: key is not in containing module's `module_io`:\n\
|
||||
key={sim_io:?}\nvalue={generator_io:?}\nmodule location: {source_location}"
|
||||
);
|
||||
}
|
||||
}
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
ModuleBody::Extern(ExternModuleBody {
|
||||
simulation: None, ..
|
||||
}) => {}
|
||||
}
|
||||
let retval = Module {
|
||||
name: name_id,
|
||||
source_location,
|
||||
|
|
@ -1913,7 +1853,7 @@ impl<T: BundleType> Module<T> {
|
|||
AssertValidityState {
|
||||
module: self.canonical(),
|
||||
blocks: vec![],
|
||||
target_states: HashMap::with_capacity_and_hasher(64, Default::default()),
|
||||
target_states: HashMap::with_capacity(64),
|
||||
}
|
||||
.assert_validity();
|
||||
}
|
||||
|
|
@ -2120,27 +2060,6 @@ impl ModuleBuilder {
|
|||
self.output_with_loc(implicit_name.0, SourceLocation::caller(), ty)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn add_platform_io_with_loc(
|
||||
&self,
|
||||
name: &str,
|
||||
source_location: SourceLocation,
|
||||
platform_io_builder: PlatformIOBuilder<'_>,
|
||||
) -> Expr<Bundle> {
|
||||
platform_io_builder.add_platform_io(name, source_location, self)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn add_platform_io(
|
||||
&self,
|
||||
implicit_name: ImplicitName<'_>,
|
||||
platform_io_builder: PlatformIOBuilder<'_>,
|
||||
) -> Expr<Bundle> {
|
||||
self.add_platform_io_with_loc(
|
||||
implicit_name.0,
|
||||
SourceLocation::caller(),
|
||||
platform_io_builder,
|
||||
)
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn run<T: BundleType>(
|
||||
name: &str,
|
||||
module_kind: ModuleKind,
|
||||
|
|
@ -2186,7 +2105,6 @@ impl ModuleBuilder {
|
|||
ModuleKind::Extern => ModuleBody::Extern(ExternModuleBody {
|
||||
verilog_name: name.0,
|
||||
parameters: vec![],
|
||||
simulation: None,
|
||||
}),
|
||||
ModuleKind::Normal => ModuleBody::Normal(NormalModuleBody {
|
||||
body: BuilderModuleBody {
|
||||
|
|
@ -2195,8 +2113,8 @@ impl ModuleBuilder {
|
|||
incomplete_declarations: vec![],
|
||||
stmts: vec![],
|
||||
}],
|
||||
annotations_map: HashMap::default(),
|
||||
memory_map: HashMap::default(),
|
||||
annotations_map: HashMap::new(),
|
||||
memory_map: HashMap::new(),
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
|
@ -2206,7 +2124,7 @@ impl ModuleBuilder {
|
|||
impl_: RefCell::new(ModuleBuilderImpl {
|
||||
body,
|
||||
io: vec![],
|
||||
io_indexes: HashMap::default(),
|
||||
io_indexes: HashMap::new(),
|
||||
module_annotations: vec![],
|
||||
}),
|
||||
};
|
||||
|
|
@ -2253,7 +2171,6 @@ impl ModuleBuilder {
|
|||
.builder_extern_body()
|
||||
.verilog_name = name.intern();
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn parameter(&self, name: impl AsRef<str>, value: ExternModuleParameterValue) {
|
||||
let name = name.as_ref();
|
||||
self.impl_
|
||||
|
|
@ -2266,7 +2183,6 @@ impl ModuleBuilder {
|
|||
value,
|
||||
});
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn parameter_int(&self, name: impl AsRef<str>, value: impl Into<BigInt>) {
|
||||
let name = name.as_ref();
|
||||
let value = value.into();
|
||||
|
|
@ -2280,7 +2196,6 @@ impl ModuleBuilder {
|
|||
value: ExternModuleParameterValue::Integer(value),
|
||||
});
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn parameter_str(&self, name: impl AsRef<str>, value: impl AsRef<str>) {
|
||||
let name = name.as_ref();
|
||||
let value = value.as_ref();
|
||||
|
|
@ -2294,7 +2209,6 @@ impl ModuleBuilder {
|
|||
value: ExternModuleParameterValue::String(value.intern()),
|
||||
});
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn parameter_raw_verilog(&self, name: impl AsRef<str>, raw_verilog: impl AsRef<str>) {
|
||||
let name = name.as_ref();
|
||||
let raw_verilog = raw_verilog.as_ref();
|
||||
|
|
@ -2308,26 +2222,6 @@ impl ModuleBuilder {
|
|||
value: ExternModuleParameterValue::RawVerilog(raw_verilog.intern()),
|
||||
});
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn extern_module_simulation<G: ExternModuleSimGenerator>(&self, generator: G) {
|
||||
let mut impl_ = self.impl_.borrow_mut();
|
||||
let simulation = &mut impl_.body.builder_extern_body().simulation;
|
||||
if simulation.is_some() {
|
||||
panic!("already added an extern module simulation");
|
||||
}
|
||||
*simulation = Some(ExternModuleSimulation::new(generator));
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn extern_module_simulation_fn<
|
||||
Args: fmt::Debug + Clone + Hash + Eq + Send + Sync + 'static,
|
||||
Fut: IntoFuture<Output = ()> + 'static,
|
||||
>(
|
||||
&self,
|
||||
args: Args,
|
||||
f: fn(Args, crate::sim::ExternModuleSimulationState) -> Fut,
|
||||
) {
|
||||
self.extern_module_simulation(crate::sim::SimGeneratorFn { args, f });
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
|
|
@ -2360,12 +2254,14 @@ pub fn annotate<T: Type>(target: Expr<T>, annotations: impl IntoAnnotations) {
|
|||
}
|
||||
TargetBase::MemPort(v) => {
|
||||
ModuleBuilder::with(|m| {
|
||||
RefCell::borrow_mut(unwrap!(
|
||||
unwrap!(m.impl_.borrow_mut().body.builder_normal_body_opt())
|
||||
.body
|
||||
.memory_map
|
||||
.get_mut(&v.mem_name())
|
||||
))
|
||||
RefCell::borrow_mut(unwrap!(unwrap!(m
|
||||
.impl_
|
||||
.borrow_mut()
|
||||
.body
|
||||
.builder_normal_body_opt())
|
||||
.body
|
||||
.memory_map
|
||||
.get_mut(&v.mem_name())))
|
||||
.port_annotations
|
||||
.extend(annotations)
|
||||
});
|
||||
|
|
@ -2765,22 +2661,6 @@ impl<T: Type> ModuleIO<T> {
|
|||
source_location,
|
||||
}
|
||||
}
|
||||
pub fn from_canonical(canonical_module_io: ModuleIO<CanonicalType>) -> Self {
|
||||
let ModuleIO {
|
||||
containing_module_name,
|
||||
bundle_field,
|
||||
id,
|
||||
ty,
|
||||
source_location,
|
||||
} = canonical_module_io;
|
||||
Self {
|
||||
containing_module_name,
|
||||
bundle_field,
|
||||
id,
|
||||
ty: T::from_canonical(ty),
|
||||
source_location,
|
||||
}
|
||||
}
|
||||
pub fn bundle_field(&self) -> BundleField {
|
||||
self.bundle_field
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,32 +6,29 @@ use crate::{
|
|||
bundle::{BundleField, BundleType},
|
||||
enum_::{EnumType, EnumVariant},
|
||||
expr::{
|
||||
ExprEnum,
|
||||
ops::{self, ArrayLiteral},
|
||||
target::{
|
||||
Target, TargetBase, TargetChild, TargetPathArrayElement, TargetPathBundleField,
|
||||
TargetPathDynArrayElement, TargetPathElement,
|
||||
},
|
||||
ExprEnum,
|
||||
},
|
||||
formal::FormalKind,
|
||||
int::{SIntValue, UIntValue},
|
||||
intern::{Intern, Interned, Memoize},
|
||||
memory::{DynPortType, MemPort},
|
||||
module::{
|
||||
AnnotatedModuleIO, Block, ExprInInstantiatedModule, ExternModuleBody,
|
||||
ExternModuleParameter, InstantiatedModule, ModuleBody, ModuleIO, NameId, NormalModuleBody,
|
||||
Stmt, StmtConnect, StmtDeclaration, StmtFormal, StmtIf, StmtInstance, StmtMatch, StmtReg,
|
||||
StmtWire,
|
||||
AnnotatedModuleIO, Block, ExprInInstantiatedModule, ExternModuleBody, InstantiatedModule,
|
||||
ModuleBody, ModuleIO, NameId, NormalModuleBody, Stmt, StmtConnect, StmtDeclaration,
|
||||
StmtFormal, StmtIf, StmtInstance, StmtMatch, StmtReg, StmtWire,
|
||||
},
|
||||
prelude::*,
|
||||
reset::{ResetType, ResetTypeDispatch},
|
||||
sim::ExternModuleSimulation,
|
||||
util::{HashMap, HashSet},
|
||||
};
|
||||
use hashbrown::hash_map::Entry;
|
||||
use hashbrown::{hash_map::Entry, HashMap, HashSet};
|
||||
use num_bigint::BigInt;
|
||||
use petgraph::unionfind::UnionFind;
|
||||
use std::{collections::BTreeMap, fmt, marker::PhantomData};
|
||||
use std::{fmt, marker::PhantomData};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum DeduceResetsError {
|
||||
|
|
@ -43,16 +40,10 @@ impl fmt::Display for DeduceResetsError {
|
|||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
DeduceResetsError::ResetIsNotDrivenByAsyncOrSync { source_location } => {
|
||||
write!(
|
||||
f,
|
||||
"deduce_reset failed: Reset signal is not driven by any AsyncReset or SyncReset signals: {source_location}"
|
||||
)
|
||||
write!(f, "deduce_reset failed: Reset signal is not driven by any AsyncReset or SyncReset signals: {source_location}")
|
||||
}
|
||||
DeduceResetsError::ResetIsDrivenByBothAsyncAndSync { source_location } => {
|
||||
write!(
|
||||
f,
|
||||
"deduce_reset failed: Reset signal is driven by both AsyncReset and SyncReset signals: {source_location}"
|
||||
)
|
||||
write!(f, "deduce_reset failed: Reset signal is driven by both AsyncReset and SyncReset signals: {source_location}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -164,8 +155,6 @@ impl ResetsLayout {
|
|||
CanonicalType::SyncReset(_) => ResetsLayout::SyncReset,
|
||||
CanonicalType::Reset(_) => ResetsLayout::Reset,
|
||||
CanonicalType::Clock(_) => ResetsLayout::NoResets,
|
||||
CanonicalType::PhantomConst(_) => ResetsLayout::NoResets,
|
||||
CanonicalType::DynSimOnly(_) => ResetsLayout::NoResets,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -418,9 +407,7 @@ impl Resets {
|
|||
| CanonicalType::Bool(_)
|
||||
| CanonicalType::AsyncReset(_)
|
||||
| CanonicalType::SyncReset(_)
|
||||
| CanonicalType::Clock(_)
|
||||
| CanonicalType::PhantomConst(_)
|
||||
| CanonicalType::DynSimOnly(_) => Ok(self.ty),
|
||||
| CanonicalType::Clock(_) => Ok(self.ty),
|
||||
CanonicalType::Array(ty) => Ok(CanonicalType::Array(Array::new_dyn(
|
||||
self.array_elements().substituted_type(
|
||||
reset_graph,
|
||||
|
|
@ -1011,9 +998,7 @@ fn cast_bit_op<P: Pass, T: Type, A: Type>(
|
|||
CanonicalType::Array(_)
|
||||
| CanonicalType::Enum(_)
|
||||
| CanonicalType::Bundle(_)
|
||||
| CanonicalType::Reset(_)
|
||||
| CanonicalType::PhantomConst(_)
|
||||
| CanonicalType::DynSimOnly(_) => unreachable!(),
|
||||
| CanonicalType::Reset(_) => unreachable!(),
|
||||
$(CanonicalType::$Variant(ty) => Expr::expr_enum($arg.cast_to(ty)),)*
|
||||
}
|
||||
};
|
||||
|
|
@ -1025,8 +1010,6 @@ fn cast_bit_op<P: Pass, T: Type, A: Type>(
|
|||
| CanonicalType::Enum(_)
|
||||
| CanonicalType::Bundle(_)
|
||||
| CanonicalType::Reset(_) => unreachable!(),
|
||||
CanonicalType::PhantomConst(_) |
|
||||
CanonicalType::DynSimOnly(_) => Expr::expr_enum(arg),
|
||||
$(CanonicalType::$Variant(_) => {
|
||||
let arg = Expr::<$Variant>::from_canonical(arg);
|
||||
match_expr_ty!(arg, UInt, SInt, Bool, AsyncReset, SyncReset, Clock)
|
||||
|
|
@ -1057,7 +1040,6 @@ impl<P: Pass> RunPass<P> for ExprEnum {
|
|||
ExprEnum::UIntLiteral(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)),
|
||||
ExprEnum::SIntLiteral(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)),
|
||||
ExprEnum::BoolLiteral(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)),
|
||||
ExprEnum::PhantomConst(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)),
|
||||
ExprEnum::BundleLiteral(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)),
|
||||
ExprEnum::ArrayLiteral(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)),
|
||||
ExprEnum::EnumLiteral(expr) => Ok(expr.run_pass(pass_args)?.map(ExprEnum::from)),
|
||||
|
|
@ -1688,9 +1670,7 @@ impl RunPassDispatch for AnyReg {
|
|||
| CanonicalType::Enum(_)
|
||||
| CanonicalType::Bundle(_)
|
||||
| CanonicalType::Reset(_)
|
||||
| CanonicalType::Clock(_)
|
||||
| CanonicalType::PhantomConst(_)
|
||||
| CanonicalType::DynSimOnly(_) => unreachable!(),
|
||||
| CanonicalType::Clock(_) => unreachable!(),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
@ -1744,33 +1724,6 @@ impl RunPassDispatch for Instance<Bundle> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<P: Pass> RunPass<P> for ExternModuleSimulation {
|
||||
fn run_pass(
|
||||
&self,
|
||||
mut pass_args: PassArgs<'_, P>,
|
||||
) -> Result<PassOutput<Self, P>, DeduceResetsError> {
|
||||
let Self {
|
||||
generator,
|
||||
sim_io_to_generator_map,
|
||||
source_location,
|
||||
} = *self;
|
||||
let sim_io_to_generator_map = Result::<PassOutput<BTreeMap<_, _>, P>, _>::from_iter(
|
||||
sim_io_to_generator_map
|
||||
.iter()
|
||||
.map(|(sim_io, generator_io)| {
|
||||
Ok(sim_io
|
||||
.run_pass(pass_args.as_mut())?
|
||||
.map(|v| (v, *generator_io)))
|
||||
}),
|
||||
)?;
|
||||
Ok(sim_io_to_generator_map.map(|sim_io_to_generator_map| Self {
|
||||
generator,
|
||||
sim_io_to_generator_map: sim_io_to_generator_map.intern_sized(),
|
||||
source_location,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! impl_run_pass_copy {
|
||||
([$($generics:tt)*] $ty:ty) => {
|
||||
impl<P: Pass, $($generics)*> RunPass<P> for $ty {
|
||||
|
|
@ -1798,17 +1751,16 @@ macro_rules! impl_run_pass_clone {
|
|||
}
|
||||
|
||||
impl_run_pass_clone!([] BigInt);
|
||||
impl_run_pass_clone!([] ExternModuleParameter);
|
||||
impl_run_pass_clone!([] SIntValue);
|
||||
impl_run_pass_clone!([] std::ops::Range<usize>);
|
||||
impl_run_pass_clone!([] UIntValue);
|
||||
impl_run_pass_clone!([] crate::vendor::xilinx::XilinxAnnotation);
|
||||
impl_run_pass_copy!([] BlackBoxInlineAnnotation);
|
||||
impl_run_pass_copy!([] BlackBoxPathAnnotation);
|
||||
impl_run_pass_copy!([] bool);
|
||||
impl_run_pass_copy!([] CustomFirrtlAnnotation);
|
||||
impl_run_pass_copy!([] DocStringAnnotation);
|
||||
impl_run_pass_copy!([] DontTouchAnnotation);
|
||||
impl_run_pass_copy!([] ExternModuleBody);
|
||||
impl_run_pass_copy!([] Interned<str>);
|
||||
impl_run_pass_copy!([] NameId);
|
||||
impl_run_pass_copy!([] SInt);
|
||||
|
|
@ -1817,7 +1769,6 @@ impl_run_pass_copy!([] SVAttributeAnnotation);
|
|||
impl_run_pass_copy!([] UInt);
|
||||
impl_run_pass_copy!([] usize);
|
||||
impl_run_pass_copy!([] FormalKind);
|
||||
impl_run_pass_copy!([] PhantomConst);
|
||||
|
||||
macro_rules! impl_run_pass_for_struct {
|
||||
(
|
||||
|
|
@ -2069,14 +2020,6 @@ impl_run_pass_for_struct! {
|
|||
}
|
||||
}
|
||||
|
||||
impl_run_pass_for_struct! {
|
||||
impl[] RunPass for ExternModuleBody {
|
||||
verilog_name: _,
|
||||
parameters: _,
|
||||
simulation: _,
|
||||
}
|
||||
}
|
||||
|
||||
impl_run_pass_copy!([] MemPort<DynPortType>); // Mem can't contain any `Reset` types
|
||||
impl_run_pass_copy!([] Mem); // Mem can't contain any `Reset` types
|
||||
|
||||
|
|
@ -2148,7 +2091,7 @@ impl<P: Pass> RunPass<P> for StmtDeclaration {
|
|||
) -> Result<PassOutput<Self, P>, DeduceResetsError> {
|
||||
let (annotations, reg) = match self {
|
||||
StmtDeclaration::Wire(v) => {
|
||||
return Ok(v.run_pass(pass_args)?.map(StmtDeclaration::Wire));
|
||||
return Ok(v.run_pass(pass_args)?.map(StmtDeclaration::Wire))
|
||||
}
|
||||
&StmtDeclaration::Reg(StmtReg { annotations, reg }) => (annotations, AnyReg::from(reg)),
|
||||
&StmtDeclaration::RegSync(StmtReg { annotations, reg }) => {
|
||||
|
|
@ -2158,7 +2101,7 @@ impl<P: Pass> RunPass<P> for StmtDeclaration {
|
|||
(annotations, AnyReg::from(reg))
|
||||
}
|
||||
StmtDeclaration::Instance(v) => {
|
||||
return Ok(v.run_pass(pass_args)?.map(StmtDeclaration::Instance));
|
||||
return Ok(v.run_pass(pass_args)?.map(StmtDeclaration::Instance))
|
||||
}
|
||||
};
|
||||
let annotations = annotations.run_pass(pass_args.as_mut())?;
|
||||
|
|
@ -2218,7 +2161,6 @@ impl_run_pass_for_enum! {
|
|||
BlackBoxPath(v),
|
||||
DocString(v),
|
||||
CustomFirrtl(v),
|
||||
Xilinx(v),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -2302,9 +2244,9 @@ pub fn deduce_resets(
|
|||
fallback_to_sync_reset: bool,
|
||||
) -> Result<Interned<Module<Bundle>>, DeduceResetsError> {
|
||||
let mut state = State {
|
||||
modules_added_to_graph: HashSet::default(),
|
||||
substituted_modules: HashMap::default(),
|
||||
expr_resets: HashMap::default(),
|
||||
modules_added_to_graph: HashSet::new(),
|
||||
substituted_modules: HashMap::new(),
|
||||
expr_resets: HashMap::new(),
|
||||
reset_graph: ResetGraph::default(),
|
||||
fallback_to_sync_reset,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -5,24 +5,23 @@ use crate::{
|
|||
bundle::{Bundle, BundleField, BundleType},
|
||||
enum_::{Enum, EnumType, EnumVariant},
|
||||
expr::{
|
||||
CastBitsTo, CastTo, CastToBits, Expr, ExprEnum, HdlPartialEq, ToExpr,
|
||||
ops::{self, EnumLiteral},
|
||||
CastBitsTo, CastTo, CastToBits, Expr, ExprEnum, HdlPartialEq, ToExpr,
|
||||
},
|
||||
hdl,
|
||||
int::UInt,
|
||||
intern::{Intern, InternSlice, Interned, Memoize},
|
||||
intern::{Intern, Interned, Memoize},
|
||||
memory::{DynPortType, Mem, MemPort},
|
||||
module::{
|
||||
Block, Id, Module, NameId, ScopedNameId, Stmt, StmtConnect, StmtIf, StmtMatch, StmtWire,
|
||||
transform::visit::{Fold, Folder},
|
||||
Block, Id, Module, NameId, ScopedNameId, Stmt, StmtConnect, StmtIf, StmtMatch, StmtWire,
|
||||
},
|
||||
source_location::SourceLocation,
|
||||
ty::{CanonicalType, Type},
|
||||
util::HashMap,
|
||||
wire::Wire,
|
||||
};
|
||||
use core::fmt;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use hashbrown::HashMap;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum SimplifyEnumsError {
|
||||
|
|
@ -70,9 +69,7 @@ fn contains_any_enum_types(ty: CanonicalType) -> bool {
|
|||
| CanonicalType::AsyncReset(_)
|
||||
| CanonicalType::SyncReset(_)
|
||||
| CanonicalType::Reset(_)
|
||||
| CanonicalType::Clock(_)
|
||||
| CanonicalType::PhantomConst(_)
|
||||
| CanonicalType::DynSimOnly(_) => false,
|
||||
| CanonicalType::Clock(_) => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -515,9 +512,7 @@ impl State {
|
|||
| CanonicalType::AsyncReset(_)
|
||||
| CanonicalType::SyncReset(_)
|
||||
| CanonicalType::Reset(_)
|
||||
| CanonicalType::Clock(_)
|
||||
| CanonicalType::PhantomConst(_)
|
||||
| CanonicalType::DynSimOnly(_) => unreachable!(),
|
||||
| CanonicalType::Clock(_) => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -582,9 +577,7 @@ fn connect_port(
|
|||
| (CanonicalType::Clock(_), _)
|
||||
| (CanonicalType::AsyncReset(_), _)
|
||||
| (CanonicalType::SyncReset(_), _)
|
||||
| (CanonicalType::Reset(_), _)
|
||||
| (CanonicalType::PhantomConst(_), _)
|
||||
| (CanonicalType::DynSimOnly(_), _) => unreachable!(
|
||||
| (CanonicalType::Reset(_), _) => unreachable!(
|
||||
"trying to connect memory ports:\n{:?}\n{:?}",
|
||||
Expr::ty(lhs),
|
||||
Expr::ty(rhs),
|
||||
|
|
@ -620,7 +613,7 @@ fn match_int_tag(
|
|||
block,
|
||||
Block {
|
||||
memories: Default::default(),
|
||||
stmts: [Stmt::from(retval)].intern_slice(),
|
||||
stmts: [Stmt::from(retval)][..].intern(),
|
||||
},
|
||||
],
|
||||
};
|
||||
|
|
@ -672,7 +665,6 @@ impl Folder for State {
|
|||
ExprEnum::UIntLiteral(_)
|
||||
| ExprEnum::SIntLiteral(_)
|
||||
| ExprEnum::BoolLiteral(_)
|
||||
| ExprEnum::PhantomConst(_)
|
||||
| ExprEnum::BundleLiteral(_)
|
||||
| ExprEnum::ArrayLiteral(_)
|
||||
| ExprEnum::Uninit(_)
|
||||
|
|
@ -814,7 +806,7 @@ impl Folder for State {
|
|||
.unwrap()
|
||||
.gen_name(&format!(
|
||||
"{}_{}",
|
||||
memory.scoped_name().1.0,
|
||||
memory.scoped_name().1 .0,
|
||||
port.port_name()
|
||||
)),
|
||||
port.source_location(),
|
||||
|
|
@ -931,9 +923,7 @@ impl Folder for State {
|
|||
| CanonicalType::Clock(_)
|
||||
| CanonicalType::AsyncReset(_)
|
||||
| CanonicalType::SyncReset(_)
|
||||
| CanonicalType::Reset(_)
|
||||
| CanonicalType::PhantomConst(_)
|
||||
| CanonicalType::DynSimOnly(_) => canonical_type.default_fold(self),
|
||||
| CanonicalType::Reset(_) => canonical_type.default_fold(self),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -956,15 +946,12 @@ impl Folder for State {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, clap::ValueEnum, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, clap::ValueEnum)]
|
||||
pub enum SimplifyEnumsKind {
|
||||
SimplifyToEnumsWithNoBody,
|
||||
#[clap(name = "replace-with-bundle-of-uints")]
|
||||
#[serde(rename = "replace-with-bundle-of-uints")]
|
||||
ReplaceWithBundleOfUInts,
|
||||
#[clap(name = "replace-with-uint")]
|
||||
#[serde(rename = "replace-with-uint")]
|
||||
ReplaceWithUInt,
|
||||
}
|
||||
|
||||
|
|
@ -973,8 +960,8 @@ pub fn simplify_enums(
|
|||
kind: SimplifyEnumsKind,
|
||||
) -> Result<Interned<Module<Bundle>>, SimplifyEnumsError> {
|
||||
module.fold(&mut State {
|
||||
enum_types: HashMap::default(),
|
||||
replacement_mem_ports: HashMap::default(),
|
||||
enum_types: HashMap::new(),
|
||||
replacement_mem_ports: HashMap::new(),
|
||||
kind,
|
||||
module_state_stack: vec![],
|
||||
})
|
||||
|
|
|
|||
|
|
@ -9,15 +9,16 @@ use crate::{
|
|||
intern::{Intern, Interned},
|
||||
memory::{Mem, MemPort, PortType},
|
||||
module::{
|
||||
Block, Id, Module, NameId, ScopedNameId, Stmt, StmtConnect, StmtWire,
|
||||
transform::visit::{Fold, Folder},
|
||||
Block, Id, Module, NameId, ScopedNameId, Stmt, StmtConnect, StmtWire,
|
||||
},
|
||||
source_location::SourceLocation,
|
||||
ty::{CanonicalType, Type},
|
||||
util::{HashMap, MakeMutSlice},
|
||||
util::MakeMutSlice,
|
||||
wire::Wire,
|
||||
};
|
||||
use bitvec::{slice::BitSlice, vec::BitVec};
|
||||
use hashbrown::HashMap;
|
||||
use std::{
|
||||
convert::Infallible,
|
||||
fmt::Write,
|
||||
|
|
@ -61,7 +62,6 @@ enum MemSplit {
|
|||
Bundle {
|
||||
fields: Rc<[MemSplit]>,
|
||||
},
|
||||
PhantomConst,
|
||||
Single {
|
||||
output_mem: Option<Mem>,
|
||||
element_type: SingleType,
|
||||
|
|
@ -76,7 +76,6 @@ impl MemSplit {
|
|||
fn mark_changed_element_type(self) -> Self {
|
||||
match self {
|
||||
MemSplit::Bundle { fields: _ } => self,
|
||||
MemSplit::PhantomConst => self,
|
||||
MemSplit::Single {
|
||||
output_mem,
|
||||
element_type,
|
||||
|
|
@ -98,7 +97,6 @@ impl MemSplit {
|
|||
.map(|field| Self::new(field.ty).mark_changed_element_type())
|
||||
.collect(),
|
||||
},
|
||||
CanonicalType::PhantomConst(_) => MemSplit::PhantomConst,
|
||||
CanonicalType::Array(ty) => {
|
||||
let element = MemSplit::new(ty.element());
|
||||
if let Self::Single {
|
||||
|
|
@ -194,7 +192,6 @@ impl MemSplit {
|
|||
| CanonicalType::AsyncReset(_)
|
||||
| CanonicalType::SyncReset(_)
|
||||
| CanonicalType::Reset(_) => unreachable!("memory element type is a storable type"),
|
||||
CanonicalType::DynSimOnly(_) => todo!("memory containing sim-only values"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -324,9 +321,6 @@ impl SplitMemState<'_, '_> {
|
|||
Expr::field(Expr::<Bundle>::from_canonical(e), &field.name)
|
||||
},
|
||||
|initial_value_element| {
|
||||
let Some(field_offset) = field_offset.only_bit_width() else {
|
||||
todo!("memory containing sim-only values");
|
||||
};
|
||||
&initial_value_element[field_offset..][..field_ty_bit_width]
|
||||
},
|
||||
);
|
||||
|
|
@ -345,7 +339,6 @@ impl SplitMemState<'_, '_> {
|
|||
self.split_state_stack.pop();
|
||||
}
|
||||
}
|
||||
MemSplit::PhantomConst => {}
|
||||
MemSplit::Single {
|
||||
output_mem,
|
||||
element_type: single_type,
|
||||
|
|
@ -545,12 +538,7 @@ impl ModuleState {
|
|||
};
|
||||
loop {
|
||||
match input_element_type {
|
||||
CanonicalType::Bundle(_) => {
|
||||
unreachable!("bundle types are always split")
|
||||
}
|
||||
CanonicalType::PhantomConst(_) => {
|
||||
unreachable!("PhantomConst are always removed")
|
||||
}
|
||||
CanonicalType::Bundle(_) => unreachable!("bundle types are always split"),
|
||||
CanonicalType::Enum(_)
|
||||
if input_array_types
|
||||
.first()
|
||||
|
|
@ -624,7 +612,6 @@ impl ModuleState {
|
|||
| CanonicalType::AsyncReset(_)
|
||||
| CanonicalType::SyncReset(_)
|
||||
| CanonicalType::Reset(_) => unreachable!("memory element type is a storable type"),
|
||||
CanonicalType::DynSimOnly(_) => todo!("memory containing sim-only values"),
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
|
@ -639,7 +626,7 @@ impl ModuleState {
|
|||
split_state: &SplitState<'_>,
|
||||
) -> Mem {
|
||||
let mem_name = NameId(
|
||||
Intern::intern_owned(format!("{}{mem_name_path}", input_mem.scoped_name().1.0)),
|
||||
Intern::intern_owned(format!("{}{mem_name_path}", input_mem.scoped_name().1 .0)),
|
||||
Id::new(),
|
||||
);
|
||||
let mem_name = ScopedNameId(input_mem.scoped_name().0, mem_name);
|
||||
|
|
@ -756,8 +743,7 @@ impl ModuleState {
|
|||
..
|
||||
}
|
||||
| MemSplit::Bundle { .. }
|
||||
| MemSplit::Array { .. }
|
||||
| MemSplit::PhantomConst => {
|
||||
| MemSplit::Array { .. } => {
|
||||
let mut replacement_ports = Vec::with_capacity(input_mem.ports().len());
|
||||
let mut wire_port_rdata = Vec::with_capacity(input_mem.ports().len());
|
||||
let mut wire_port_wdata = Vec::with_capacity(input_mem.ports().len());
|
||||
|
|
@ -901,7 +887,7 @@ impl Folder for State {
|
|||
module,
|
||||
ModuleState {
|
||||
output_module: None,
|
||||
memories: HashMap::default(),
|
||||
memories: HashMap::new(),
|
||||
},
|
||||
);
|
||||
let mut this = PushedState::push_module(self, module);
|
||||
|
|
|
|||
|
|
@ -11,11 +11,12 @@ use crate::{
|
|||
clock::Clock,
|
||||
enum_::{Enum, EnumType, EnumVariant},
|
||||
expr::{
|
||||
Expr, ExprEnum, ops,
|
||||
ops,
|
||||
target::{
|
||||
Target, TargetBase, TargetChild, TargetPathArrayElement, TargetPathBundleField,
|
||||
TargetPathDynArrayElement, TargetPathElement,
|
||||
},
|
||||
Expr, ExprEnum,
|
||||
},
|
||||
formal::FormalKind,
|
||||
int::{Bool, SIntType, SIntValue, Size, UIntType, UIntValue},
|
||||
|
|
@ -27,15 +28,10 @@ use crate::{
|
|||
NormalModuleBody, ScopedNameId, Stmt, StmtConnect, StmtDeclaration, StmtFormal, StmtIf,
|
||||
StmtInstance, StmtMatch, StmtReg, StmtWire,
|
||||
},
|
||||
phantom_const::PhantomConst,
|
||||
reg::Reg,
|
||||
reset::{AsyncReset, Reset, ResetType, SyncReset},
|
||||
sim::{ExternModuleSimulation, value::DynSimOnly},
|
||||
source_location::SourceLocation,
|
||||
ty::{CanonicalType, Type},
|
||||
vendor::xilinx::{
|
||||
XdcCreateClockAnnotation, XdcIOStandardAnnotation, XdcLocationAnnotation, XilinxAnnotation,
|
||||
},
|
||||
wire::Wire,
|
||||
};
|
||||
use num_bigint::{BigInt, BigUint};
|
||||
|
|
|
|||
|
|
@ -1,485 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
expr::{
|
||||
Expr, ToExpr,
|
||||
ops::{ExprPartialEq, ExprPartialOrd},
|
||||
},
|
||||
int::Bool,
|
||||
intern::{Intern, Interned, InternedCompare, LazyInterned, LazyInternedTrait, Memoize},
|
||||
sim::value::{SimValue, SimValuePartialEq, ToSimValue, ToSimValueWithType},
|
||||
source_location::SourceLocation,
|
||||
ty::{
|
||||
CanonicalType, OpaqueSimValueSlice, OpaqueSimValueWriter, OpaqueSimValueWritten,
|
||||
StaticType, Type, TypeProperties, impl_match_variant_as_self,
|
||||
serde_impls::{SerdeCanonicalType, SerdePhantomConst},
|
||||
},
|
||||
};
|
||||
use serde::{
|
||||
Deserialize, Deserializer, Serialize, Serializer,
|
||||
de::{DeserializeOwned, Error},
|
||||
};
|
||||
use std::{
|
||||
any::Any,
|
||||
fmt,
|
||||
hash::{Hash, Hasher},
|
||||
marker::PhantomData,
|
||||
ops::Index,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct PhantomConstCanonicalValue {
|
||||
parsed: serde_json::Value,
|
||||
serialized: Interned<str>,
|
||||
}
|
||||
|
||||
impl PhantomConstCanonicalValue {
|
||||
pub fn from_json_value(parsed: serde_json::Value) -> Self {
|
||||
let serialized = Intern::intern_owned(
|
||||
serde_json::to_string(&parsed)
|
||||
.expect("conversion from json value to text shouldn't fail"),
|
||||
);
|
||||
Self { parsed, serialized }
|
||||
}
|
||||
pub fn as_json_value(&self) -> &serde_json::Value {
|
||||
&self.parsed
|
||||
}
|
||||
pub fn as_str(&self) -> Interned<str> {
|
||||
self.serialized
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for PhantomConstCanonicalValue {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.serialized)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for PhantomConstCanonicalValue {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.serialized)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for PhantomConstCanonicalValue {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.serialized == other.serialized
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for PhantomConstCanonicalValue {}
|
||||
|
||||
impl Hash for PhantomConstCanonicalValue {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.serialized.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for PhantomConstCanonicalValue {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
self.parsed.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for PhantomConstCanonicalValue {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
Ok(Self::from_json_value(serde_json::Value::deserialize(
|
||||
deserializer,
|
||||
)?))
|
||||
}
|
||||
}
|
||||
|
||||
pub trait PhantomConstValue: Intern + InternedCompare + Serialize + fmt::Debug {
|
||||
fn deserialize_value<'de, D>(deserializer: D) -> Result<Interned<Self>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>;
|
||||
}
|
||||
|
||||
impl<T> PhantomConstValue for T
|
||||
where
|
||||
T: ?Sized + Intern + InternedCompare + Serialize + fmt::Debug,
|
||||
Interned<T>: DeserializeOwned,
|
||||
{
|
||||
fn deserialize_value<'de, D>(deserializer: D) -> Result<Interned<Self>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
<Interned<T> as Deserialize<'de>>::deserialize(deserializer)
|
||||
}
|
||||
}
|
||||
|
||||
/// Wrapper type that allows any Rust value to be smuggled as a HDL [`Type`].
|
||||
/// This only works for values that can be [serialized][Serialize] to and [deserialized][Deserialize] from [JSON][serde_json].
|
||||
pub struct PhantomConst<T: ?Sized + PhantomConstValue = PhantomConstCanonicalValue> {
|
||||
value: LazyInterned<T>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)]
|
||||
pub struct PhantomConstWithoutGenerics;
|
||||
|
||||
#[allow(non_upper_case_globals)]
|
||||
pub const PhantomConst: PhantomConstWithoutGenerics = PhantomConstWithoutGenerics;
|
||||
|
||||
impl<T: Type + PhantomConstValue> Index<T> for PhantomConstWithoutGenerics {
|
||||
type Output = PhantomConst<T>;
|
||||
|
||||
fn index(&self, value: T) -> &Self::Output {
|
||||
Interned::into_inner(PhantomConst::new(value.intern()).intern_sized())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> fmt::Debug for PhantomConst<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_tuple("PhantomConst").field(&self.get()).finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Clone for PhantomConst<T> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Copy for PhantomConst<T> {}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> PartialEq for PhantomConst<T> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.get() == other.get()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Eq for PhantomConst<T> {}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Hash for PhantomConst<T> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.get().hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
struct PhantomConstCanonicalMemoize<T: ?Sized, const IS_FROM_CANONICAL: bool>(PhantomData<T>);
|
||||
|
||||
impl<T: ?Sized, const IS_FROM_CANONICAL: bool> Copy
|
||||
for PhantomConstCanonicalMemoize<T, IS_FROM_CANONICAL>
|
||||
{
|
||||
}
|
||||
|
||||
impl<T: ?Sized, const IS_FROM_CANONICAL: bool> Clone
|
||||
for PhantomConstCanonicalMemoize<T, IS_FROM_CANONICAL>
|
||||
{
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized, const IS_FROM_CANONICAL: bool> Eq
|
||||
for PhantomConstCanonicalMemoize<T, IS_FROM_CANONICAL>
|
||||
{
|
||||
}
|
||||
|
||||
impl<T: ?Sized, const IS_FROM_CANONICAL: bool> PartialEq
|
||||
for PhantomConstCanonicalMemoize<T, IS_FROM_CANONICAL>
|
||||
{
|
||||
fn eq(&self, _other: &Self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized, const IS_FROM_CANONICAL: bool> Hash
|
||||
for PhantomConstCanonicalMemoize<T, IS_FROM_CANONICAL>
|
||||
{
|
||||
fn hash<H: Hasher>(&self, _state: &mut H) {}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Memoize for PhantomConstCanonicalMemoize<T, false> {
|
||||
type Input = Interned<T>;
|
||||
type InputOwned = Interned<T>;
|
||||
type Output = Interned<PhantomConstCanonicalValue>;
|
||||
|
||||
fn inner(self, input: &Self::Input) -> Self::Output {
|
||||
Intern::intern_sized(PhantomConstCanonicalValue::from_json_value(
|
||||
serde_json::to_value(input)
|
||||
.expect("serialization failed when constructing a canonical PhantomConst"),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Memoize for PhantomConstCanonicalMemoize<T, true> {
|
||||
type Input = Interned<PhantomConstCanonicalValue>;
|
||||
type InputOwned = Interned<PhantomConstCanonicalValue>;
|
||||
type Output = Interned<T>;
|
||||
|
||||
fn inner(self, input: &Self::Input) -> Self::Output {
|
||||
PhantomConstValue::deserialize_value(input.as_json_value())
|
||||
.expect("deserialization failed ")
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> PhantomConst<T> {
|
||||
pub fn new(value: Interned<T>) -> Self {
|
||||
Self {
|
||||
value: LazyInterned::Interned(value),
|
||||
}
|
||||
}
|
||||
pub const fn new_lazy(v: &'static dyn LazyInternedTrait<T>) -> Self {
|
||||
Self {
|
||||
value: LazyInterned::new_lazy(v),
|
||||
}
|
||||
}
|
||||
pub fn get(self) -> Interned<T> {
|
||||
self.value.interned()
|
||||
}
|
||||
pub fn type_properties(self) -> TypeProperties {
|
||||
<()>::TYPE_PROPERTIES
|
||||
}
|
||||
pub fn can_connect(self, other: Self) -> bool {
|
||||
self == other
|
||||
}
|
||||
pub fn canonical_phantom_const(self) -> PhantomConst {
|
||||
if let Some(&retval) = <dyn Any>::downcast_ref::<PhantomConst>(&self) {
|
||||
return retval;
|
||||
}
|
||||
<PhantomConst>::new(
|
||||
PhantomConstCanonicalMemoize::<T, false>(PhantomData).get_owned(self.get()),
|
||||
)
|
||||
}
|
||||
pub fn from_canonical_phantom_const(canonical_type: PhantomConst) -> Self {
|
||||
if let Some(&retval) = <dyn Any>::downcast_ref::<Self>(&canonical_type) {
|
||||
return retval;
|
||||
}
|
||||
Self::new(
|
||||
PhantomConstCanonicalMemoize::<T, true>(PhantomData).get_owned(canonical_type.get()),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Type for PhantomConst<T> {
|
||||
type BaseType = PhantomConst;
|
||||
type MaskType = ();
|
||||
type SimValue = PhantomConst<T>;
|
||||
impl_match_variant_as_self!();
|
||||
|
||||
fn mask_type(&self) -> Self::MaskType {
|
||||
()
|
||||
}
|
||||
|
||||
fn canonical(&self) -> CanonicalType {
|
||||
CanonicalType::PhantomConst(self.canonical_phantom_const())
|
||||
}
|
||||
|
||||
fn from_canonical(canonical_type: CanonicalType) -> Self {
|
||||
let CanonicalType::PhantomConst(phantom_const) = canonical_type else {
|
||||
panic!("expected PhantomConst");
|
||||
};
|
||||
Self::from_canonical_phantom_const(phantom_const)
|
||||
}
|
||||
|
||||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
assert!(opaque.is_empty());
|
||||
*self
|
||||
}
|
||||
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
assert!(opaque.is_empty());
|
||||
assert_eq!(*value, *self);
|
||||
}
|
||||
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
assert_eq!(*value, *self);
|
||||
writer.fill_cloned_from_slice(OpaqueSimValueSlice::empty())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Default for PhantomConst<T>
|
||||
where
|
||||
Interned<T>: Default,
|
||||
{
|
||||
fn default() -> Self {
|
||||
Self::TYPE
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> StaticType for PhantomConst<T>
|
||||
where
|
||||
Interned<T>: Default,
|
||||
{
|
||||
const TYPE: Self = PhantomConst {
|
||||
value: LazyInterned::new_lazy(&Interned::<T>::default),
|
||||
};
|
||||
const MASK_TYPE: Self::MaskType = ();
|
||||
const TYPE_PROPERTIES: TypeProperties = <()>::TYPE_PROPERTIES;
|
||||
const MASK_TYPE_PROPERTIES: TypeProperties = <()>::TYPE_PROPERTIES;
|
||||
}
|
||||
|
||||
type SerdeType<T> = SerdeCanonicalType<CanonicalType, SerdePhantomConst<Interned<T>>>;
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Serialize for PhantomConst<T> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
SerdeType::<T>::PhantomConst(SerdePhantomConst(self.get())).serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de, T: ?Sized + PhantomConstValue> Deserialize<'de> for PhantomConst<T> {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
match SerdeType::<T>::deserialize(deserializer)? {
|
||||
SerdeCanonicalType::PhantomConst(SerdePhantomConst(value)) => Ok(Self::new(value)),
|
||||
ty => Err(Error::invalid_value(
|
||||
serde::de::Unexpected::Other(ty.as_serde_unexpected_str()),
|
||||
&"a PhantomConst",
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> ExprPartialEq<Self> for PhantomConst<T> {
|
||||
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
|
||||
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
|
||||
true.to_expr()
|
||||
}
|
||||
|
||||
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
|
||||
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
|
||||
false.to_expr()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> ExprPartialOrd<Self> for PhantomConst<T> {
|
||||
fn cmp_lt(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
|
||||
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
|
||||
false.to_expr()
|
||||
}
|
||||
|
||||
fn cmp_le(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
|
||||
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
|
||||
true.to_expr()
|
||||
}
|
||||
|
||||
fn cmp_gt(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
|
||||
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
|
||||
false.to_expr()
|
||||
}
|
||||
|
||||
fn cmp_ge(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
|
||||
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
|
||||
true.to_expr()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> SimValuePartialEq<Self> for PhantomConst<T> {
|
||||
fn sim_value_eq(this: &SimValue<Self>, other: &SimValue<Self>) -> bool {
|
||||
assert_eq!(SimValue::ty(this), SimValue::ty(other));
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> ToSimValue for PhantomConst<T> {
|
||||
type Type = PhantomConst<T>;
|
||||
|
||||
fn to_sim_value(&self) -> SimValue<Self::Type> {
|
||||
SimValue::from_value(*self, *self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> ToSimValueWithType<PhantomConst<T>> for PhantomConst<T> {
|
||||
fn to_sim_value_with_type(&self, ty: PhantomConst<T>) -> SimValue<PhantomConst<T>> {
|
||||
SimValue::from_value(ty, *self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> ToSimValueWithType<CanonicalType> for PhantomConst<T> {
|
||||
fn to_sim_value_with_type(&self, ty: CanonicalType) -> SimValue<CanonicalType> {
|
||||
SimValue::into_canonical(SimValue::from_value(Self::from_canonical(ty), *self))
|
||||
}
|
||||
}
|
||||
|
||||
mod sealed {
|
||||
pub trait Sealed<T: ?Sized> {}
|
||||
}
|
||||
|
||||
pub trait PhantomConstGet<T: ?Sized + PhantomConstValue>: sealed::Sealed<T> {
|
||||
fn get(&self) -> Interned<T>;
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue, This: ?Sized + std::ops::Deref<Target: PhantomConstGet<T>>>
|
||||
sealed::Sealed<T> for This
|
||||
{
|
||||
}
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue, This: ?Sized + std::ops::Deref<Target: PhantomConstGet<T>>>
|
||||
PhantomConstGet<T> for This
|
||||
{
|
||||
fn get(&self) -> Interned<T> {
|
||||
This::Target::get(&**self)
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! impl_phantom_const_get {
|
||||
(
|
||||
impl PhantomConstGet<$T:ident> for $ty:ty {
|
||||
fn $get:ident(&$get_self:ident) -> _ $get_body:block
|
||||
}
|
||||
) => {
|
||||
impl<$T: ?Sized + PhantomConstValue> sealed::Sealed<$T> for $ty {}
|
||||
|
||||
impl<$T: ?Sized + PhantomConstValue> PhantomConstGet<$T> for $ty {
|
||||
fn $get(&$get_self) -> Interned<$T> $get_body
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_phantom_const_get! {
|
||||
impl PhantomConstGet<T> for PhantomConst<T> {
|
||||
fn get(&self) -> _ {
|
||||
PhantomConst::get(*self)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl_phantom_const_get! {
|
||||
impl PhantomConstGet<T> for Expr<PhantomConst<T>> {
|
||||
fn get(&self) -> _ {
|
||||
PhantomConst::get(Expr::ty(*self))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub trait ReturnSelfUnchanged<T: ?Sized> {
|
||||
type Type: ?Sized;
|
||||
}
|
||||
|
||||
impl<This: ?Sized, T: ?Sized> ReturnSelfUnchanged<T> for This {
|
||||
type Type = This;
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub fn type_alias_phantom_const_get_helper<T: ?Sized + PhantomConstValue, R: Intern + Clone>(
|
||||
param: impl PhantomConstGet<T>,
|
||||
get: impl FnOnce(Interned<T>) -> R,
|
||||
) -> &'static R {
|
||||
Interned::into_inner(get(param.get()).intern_sized())
|
||||
}
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,62 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{intern::Intern, prelude::*};
|
||||
use ordered_float::NotNan;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)]
|
||||
#[non_exhaustive]
|
||||
pub struct ClockInputProperties {
|
||||
pub frequency: NotNan<f64>,
|
||||
}
|
||||
|
||||
#[hdl(no_runtime_generics, no_static)]
|
||||
pub struct ClockInput {
|
||||
pub clk: Clock,
|
||||
pub properties: PhantomConst<ClockInputProperties>,
|
||||
}
|
||||
|
||||
impl ClockInput {
|
||||
#[track_caller]
|
||||
pub fn new(frequency: f64) -> Self {
|
||||
assert!(
|
||||
frequency > 0.0 && frequency.is_finite(),
|
||||
"invalid clock frequency: {frequency}"
|
||||
);
|
||||
Self {
|
||||
clk: Clock,
|
||||
properties: PhantomConst::new(
|
||||
ClockInputProperties {
|
||||
frequency: NotNan::new(frequency).expect("just checked"),
|
||||
}
|
||||
.intern_sized(),
|
||||
),
|
||||
}
|
||||
}
|
||||
pub fn frequency(self) -> f64 {
|
||||
self.properties.get().frequency.into_inner()
|
||||
}
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
pub struct Led {
|
||||
pub on: Bool,
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
pub struct RgbLed {
|
||||
pub r: Bool,
|
||||
pub g: Bool,
|
||||
pub b: Bool,
|
||||
}
|
||||
|
||||
#[hdl]
|
||||
/// UART, used as an output from the FPGA
|
||||
pub struct Uart {
|
||||
/// transmit from the FPGA's perspective
|
||||
pub tx: Bool,
|
||||
/// receive from the FPGA's perspective
|
||||
#[hdl(flip)]
|
||||
pub rx: Bool,
|
||||
}
|
||||
|
|
@ -1,45 +1,36 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
pub use crate::{
|
||||
__,
|
||||
annotations::{
|
||||
BlackBoxInlineAnnotation, BlackBoxPathAnnotation, CustomFirrtlAnnotation,
|
||||
DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation,
|
||||
},
|
||||
array::{Array, ArrayType},
|
||||
build::{BuildCli, JobParams, RunBuild},
|
||||
bundle::Bundle,
|
||||
cli::Cli,
|
||||
clock::{Clock, ClockDomain, ToClock},
|
||||
enum_::{Enum, HdlNone, HdlOption, HdlSome},
|
||||
expr::{
|
||||
CastBitsTo, CastTo, CastToBits, Expr, HdlPartialEq, HdlPartialOrd, MakeUninitExpr,
|
||||
ReduceBits, ToExpr, repeat,
|
||||
repeat, CastBitsTo, CastTo, CastToBits, Expr, HdlPartialEq, HdlPartialOrd, MakeUninitExpr,
|
||||
ReduceBits, ToExpr,
|
||||
},
|
||||
formal::{
|
||||
MakeFormalExpr, all_const, all_seq, any_const, any_seq, formal_global_clock, formal_reset,
|
||||
hdl_assert, hdl_assert_with_enable, hdl_assume, hdl_assume_with_enable, hdl_cover,
|
||||
hdl_cover_with_enable,
|
||||
all_const, all_seq, any_const, any_seq, formal_global_clock, formal_reset, hdl_assert,
|
||||
hdl_assert_with_enable, hdl_assume, hdl_assume_with_enable, hdl_cover,
|
||||
hdl_cover_with_enable, MakeFormalExpr,
|
||||
},
|
||||
hdl, hdl_module,
|
||||
int::{Bool, DynSize, KnownSize, SInt, SIntType, SIntValue, Size, UInt, UIntType, UIntValue},
|
||||
int::{Bool, DynSize, KnownSize, SInt, SIntType, Size, UInt, UIntType},
|
||||
memory::{Mem, MemBuilder, ReadUnderWrite},
|
||||
module::{
|
||||
Instance, Module, ModuleBuilder, annotate, connect, connect_any, incomplete_wire, instance,
|
||||
memory, memory_array, memory_with_init, reg_builder, wire,
|
||||
annotate, connect, connect_any, incomplete_wire, instance, memory, memory_array,
|
||||
memory_with_init, reg_builder, wire, Instance, Module, ModuleBuilder,
|
||||
},
|
||||
phantom_const::{PhantomConst, PhantomConstGet},
|
||||
platform::{DynPlatform, Platform, PlatformIOBuilder, peripherals},
|
||||
reg::Reg,
|
||||
reset::{AsyncReset, Reset, SyncReset, ToAsyncReset, ToReset, ToSyncReset},
|
||||
sim::{
|
||||
ExternModuleSimulationState, Simulation,
|
||||
time::{SimDuration, SimInstant},
|
||||
value::{SimOnly, SimOnlyValue, SimValue, ToSimValue, ToSimValueWithType},
|
||||
},
|
||||
source_location::SourceLocation,
|
||||
testing::{FormalMode, assert_formal},
|
||||
ty::{AsMask, CanonicalType, Type},
|
||||
util::{ConstUsize, GenericConstUsize},
|
||||
wire::Wire,
|
||||
__,
|
||||
};
|
||||
pub use bitvec::{slice::BitSlice, vec::BitVec};
|
||||
|
|
|
|||
|
|
@ -2,15 +2,11 @@
|
|||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
clock::Clock,
|
||||
expr::{Expr, ToExpr, ops},
|
||||
expr::{ops, Expr, ToExpr},
|
||||
int::{Bool, SInt, UInt},
|
||||
source_location::SourceLocation,
|
||||
ty::{
|
||||
CanonicalType, OpaqueSimValueSize, OpaqueSimValueSlice, OpaqueSimValueWriter,
|
||||
OpaqueSimValueWritten, StaticType, Type, TypeProperties, impl_match_variant_as_self,
|
||||
},
|
||||
ty::{impl_match_variant_as_self, CanonicalType, StaticType, Type, TypeProperties},
|
||||
};
|
||||
use bitvec::{bits, order::Lsb0};
|
||||
|
||||
mod sealed {
|
||||
pub trait ResetTypeSealed {}
|
||||
|
|
@ -49,7 +45,6 @@ macro_rules! reset_type {
|
|||
impl Type for $name {
|
||||
type BaseType = $name;
|
||||
type MaskType = Bool;
|
||||
type SimValue = bool;
|
||||
|
||||
impl_match_variant_as_self!();
|
||||
|
||||
|
|
@ -71,31 +66,6 @@ macro_rules! reset_type {
|
|||
};
|
||||
retval
|
||||
}
|
||||
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
assert_eq!(opaque.size(), OpaqueSimValueSize::from_bit_width(1));
|
||||
opaque.bits()[0]
|
||||
}
|
||||
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
assert_eq!(opaque.size(), OpaqueSimValueSize::from_bit_width(1));
|
||||
*value = opaque.bits()[0];
|
||||
}
|
||||
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
assert_eq!(writer.size(), OpaqueSimValueSize::from_bit_width(1));
|
||||
writer.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(
|
||||
[bits![0], bits![1]][*value as usize],
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl $name {
|
||||
|
|
@ -115,7 +85,6 @@ macro_rules! reset_type {
|
|||
is_storable: false,
|
||||
is_castable_from_bits: $is_castable_from_bits,
|
||||
bit_width: 1,
|
||||
sim_only_values_len: 0,
|
||||
};
|
||||
const MASK_TYPE_PROPERTIES: TypeProperties = Bool::TYPE_PROPERTIES;
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -1,304 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
//! `unsafe` parts of [`DynSimOnlyValue`]
|
||||
|
||||
use serde::{Serialize, de::DeserializeOwned};
|
||||
use std::{
|
||||
any::{self, TypeId},
|
||||
fmt,
|
||||
hash::{Hash, Hasher},
|
||||
marker::PhantomData,
|
||||
mem::ManuallyDrop,
|
||||
rc::Rc,
|
||||
};
|
||||
|
||||
pub trait SimOnlyValueTrait:
|
||||
'static + Eq + Hash + fmt::Debug + Serialize + DeserializeOwned + Clone + Default
|
||||
{
|
||||
}
|
||||
|
||||
impl<T: 'static + Eq + Hash + fmt::Debug + Serialize + DeserializeOwned + Clone + Default>
|
||||
SimOnlyValueTrait for T
|
||||
{
|
||||
}
|
||||
|
||||
/// Safety: `type_id_dyn` must return `TypeId::of::<T>()` where `Self = SimOnly<T>`
|
||||
unsafe trait DynSimOnlyTrait: 'static + Send + Sync {
|
||||
fn type_id_dyn(&self) -> TypeId;
|
||||
fn type_name(&self) -> &'static str;
|
||||
fn default_value(&self) -> Rc<dyn DynSimOnlyValueTrait>;
|
||||
fn deserialize_from_json_string(
|
||||
&self,
|
||||
json_str: &str,
|
||||
) -> serde_json::Result<Rc<dyn DynSimOnlyValueTrait>>;
|
||||
}
|
||||
|
||||
/// Safety: `type_id_dyn` is implemented correctly
|
||||
unsafe impl<T: SimOnlyValueTrait> DynSimOnlyTrait for SimOnly<T> {
|
||||
fn type_id_dyn(&self) -> TypeId {
|
||||
TypeId::of::<T>()
|
||||
}
|
||||
|
||||
fn type_name(&self) -> &'static str {
|
||||
any::type_name::<T>()
|
||||
}
|
||||
|
||||
fn default_value(&self) -> Rc<dyn DynSimOnlyValueTrait> {
|
||||
Rc::new(T::default())
|
||||
}
|
||||
|
||||
fn deserialize_from_json_string(
|
||||
&self,
|
||||
json_str: &str,
|
||||
) -> serde_json::Result<Rc<dyn DynSimOnlyValueTrait>> {
|
||||
Ok(Rc::<T>::new(serde_json::from_str(json_str)?))
|
||||
}
|
||||
}
|
||||
|
||||
/// Safety:
|
||||
/// * `type_id_dyn()` must return `TypeId::of::<Self>()`.
|
||||
/// * `ty().type_id()` must return `TypeId::of::<Self>()`.
|
||||
unsafe trait DynSimOnlyValueTrait: 'static + fmt::Debug {
|
||||
fn type_id_dyn(&self) -> TypeId;
|
||||
fn ty(&self) -> DynSimOnly;
|
||||
fn eq_dyn(&self, other: &dyn DynSimOnlyValueTrait) -> bool;
|
||||
fn serialize_to_json_string(&self) -> serde_json::Result<String>;
|
||||
fn hash_dyn(&self, state: &mut dyn Hasher);
|
||||
}
|
||||
|
||||
impl dyn DynSimOnlyValueTrait {
|
||||
fn is<T: SimOnlyValueTrait>(&self) -> bool {
|
||||
Self::type_id_dyn(self) == TypeId::of::<T>()
|
||||
}
|
||||
|
||||
fn downcast_ref<T: SimOnlyValueTrait>(&self) -> Option<&T> {
|
||||
if Self::is::<T>(self) {
|
||||
// Safety: checked that `Self` is really `T`
|
||||
Some(unsafe { &*(self as *const Self as *const T) })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn downcast_rc<T: SimOnlyValueTrait>(self: Rc<Self>) -> Result<Rc<T>, Rc<Self>> {
|
||||
if Self::is::<T>(&*self) {
|
||||
// Safety: checked that `Self` is really `T`
|
||||
Ok(unsafe { Rc::from_raw(Rc::into_raw(self) as *const T) })
|
||||
} else {
|
||||
Err(self)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Safety:
|
||||
/// * `type_id_dyn()` returns `TypeId::of::<Self>()`.
|
||||
/// * `ty().type_id()` returns `TypeId::of::<Self>()`.
|
||||
unsafe impl<T: SimOnlyValueTrait> DynSimOnlyValueTrait for T {
|
||||
fn type_id_dyn(&self) -> TypeId {
|
||||
TypeId::of::<T>()
|
||||
}
|
||||
|
||||
fn ty(&self) -> DynSimOnly {
|
||||
DynSimOnly::of::<T>()
|
||||
}
|
||||
|
||||
fn eq_dyn(&self, other: &dyn DynSimOnlyValueTrait) -> bool {
|
||||
other.downcast_ref::<T>().is_some_and(|other| self == other)
|
||||
}
|
||||
|
||||
fn serialize_to_json_string(&self) -> serde_json::Result<String> {
|
||||
serde_json::to_string(self)
|
||||
}
|
||||
|
||||
fn hash_dyn(&self, mut state: &mut dyn Hasher) {
|
||||
self.hash(&mut state);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct DynSimOnly {
|
||||
ty: &'static dyn DynSimOnlyTrait,
|
||||
}
|
||||
|
||||
impl DynSimOnly {
|
||||
pub const fn of<T: SimOnlyValueTrait>() -> Self {
|
||||
Self {
|
||||
ty: &const { SimOnly::<T>::new() },
|
||||
}
|
||||
}
|
||||
pub fn type_id(self) -> TypeId {
|
||||
self.ty.type_id_dyn()
|
||||
}
|
||||
pub fn type_name(self) -> &'static str {
|
||||
self.ty.type_name()
|
||||
}
|
||||
pub fn is<T: SimOnlyValueTrait>(self) -> bool {
|
||||
self.type_id() == TypeId::of::<T>()
|
||||
}
|
||||
pub fn downcast<T: SimOnlyValueTrait>(self) -> Option<SimOnly<T>> {
|
||||
self.is::<T>().then_some(SimOnly::default())
|
||||
}
|
||||
pub fn deserialize_from_json_string(
|
||||
self,
|
||||
json_str: &str,
|
||||
) -> serde_json::Result<DynSimOnlyValue> {
|
||||
self.ty
|
||||
.deserialize_from_json_string(json_str)
|
||||
.map(DynSimOnlyValue)
|
||||
}
|
||||
pub fn default_value(self) -> DynSimOnlyValue {
|
||||
DynSimOnlyValue(self.ty.default_value())
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for DynSimOnly {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
Self::type_id(*self) == Self::type_id(*other)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for DynSimOnly {}
|
||||
|
||||
impl Hash for DynSimOnly {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
Self::type_id(*self).hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for DynSimOnly {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "SimOnly<{}>", self.ty.type_name())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SimOnlyValueTrait> From<SimOnly<T>> for DynSimOnly {
|
||||
fn from(value: SimOnly<T>) -> Self {
|
||||
let SimOnly(PhantomData) = value;
|
||||
Self::of::<T>()
|
||||
}
|
||||
}
|
||||
|
||||
/// the [`Type`][Type] for a value that can only be used in a Fayalite simulation, it can't be converted to FIRRTL
|
||||
///
|
||||
/// [Type]: crate::ty::Type
|
||||
#[derive(Clone, Eq, PartialEq, Hash)]
|
||||
pub struct SimOnly<T: SimOnlyValueTrait>(PhantomData<fn(T) -> T>);
|
||||
|
||||
impl<T: SimOnlyValueTrait> fmt::Debug for SimOnly<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
DynSimOnly::of::<T>().fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SimOnlyValueTrait> SimOnly<T> {
|
||||
pub const fn new() -> Self {
|
||||
Self(PhantomData)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SimOnlyValueTrait> Copy for SimOnly<T> {}
|
||||
|
||||
impl<T: SimOnlyValueTrait> Default for SimOnly<T> {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
/// a value that can only be used in a Fayalite simulation, it can't be converted to FIRRTL
|
||||
#[derive(Clone, Eq, PartialEq, Hash, Default, PartialOrd, Ord)]
|
||||
pub struct SimOnlyValue<T: SimOnlyValueTrait>(Rc<T>);
|
||||
|
||||
impl<T: SimOnlyValueTrait> SimOnlyValue<T> {
|
||||
pub fn with_dyn_ref<F: FnOnce(&DynSimOnlyValue) -> R, R>(&self, f: F) -> R {
|
||||
// Safety: creating a copied `Rc<T>` is safe as long as the copy isn't dropped and isn't changed
|
||||
// to point somewhere else, `f` can't change `dyn_ref` because it's only given a shared reference.
|
||||
let dyn_ref =
|
||||
unsafe { ManuallyDrop::new(DynSimOnlyValue(Rc::<T>::from_raw(Rc::as_ptr(&self.0)))) };
|
||||
f(&dyn_ref)
|
||||
}
|
||||
pub fn from_rc(v: Rc<T>) -> Self {
|
||||
Self(v)
|
||||
}
|
||||
pub fn new(v: T) -> Self {
|
||||
Self(Rc::new(v))
|
||||
}
|
||||
pub fn into_inner(this: Self) -> Rc<T> {
|
||||
this.0
|
||||
}
|
||||
pub fn inner_mut(this: &mut Self) -> &mut Rc<T> {
|
||||
&mut this.0
|
||||
}
|
||||
pub fn inner(this: &Self) -> &Rc<T> {
|
||||
&this.0
|
||||
}
|
||||
pub fn into_dyn(this: Self) -> DynSimOnlyValue {
|
||||
DynSimOnlyValue::from(this)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SimOnlyValueTrait> std::ops::Deref for SimOnlyValue<T> {
|
||||
type Target = T;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SimOnlyValueTrait> std::ops::DerefMut for SimOnlyValue<T> {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
Rc::make_mut(&mut self.0)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DynSimOnlyValue(Rc<dyn DynSimOnlyValueTrait>);
|
||||
|
||||
impl fmt::Debug for DynSimOnlyValue {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
<dyn DynSimOnlyValueTrait as fmt::Debug>::fmt(&*self.0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for DynSimOnlyValue {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
DynSimOnlyValueTrait::eq_dyn(&*self.0, &*other.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for DynSimOnlyValue {}
|
||||
|
||||
impl Hash for DynSimOnlyValue {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
DynSimOnlyValueTrait::hash_dyn(&*self.0, state);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SimOnlyValueTrait> From<SimOnlyValue<T>> for DynSimOnlyValue {
|
||||
fn from(value: SimOnlyValue<T>) -> Self {
|
||||
Self(value.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl DynSimOnlyValue {
|
||||
pub fn ty(&self) -> DynSimOnly {
|
||||
self.0.ty()
|
||||
}
|
||||
pub fn type_id(&self) -> TypeId {
|
||||
self.0.type_id_dyn()
|
||||
}
|
||||
pub fn is<T: SimOnlyValueTrait>(&self) -> bool {
|
||||
self.0.is::<T>()
|
||||
}
|
||||
pub fn downcast<T: SimOnlyValueTrait>(self) -> Result<SimOnlyValue<T>, DynSimOnlyValue> {
|
||||
match <dyn DynSimOnlyValueTrait>::downcast_rc(self.0) {
|
||||
Ok(v) => Ok(SimOnlyValue(v)),
|
||||
Err(v) => Err(Self(v)),
|
||||
}
|
||||
}
|
||||
pub fn downcast_ref<T: SimOnlyValueTrait>(&self) -> Option<&T> {
|
||||
<dyn DynSimOnlyValueTrait>::downcast_ref(&*self.0)
|
||||
}
|
||||
pub fn serialize_to_json_string(&self) -> serde_json::Result<String> {
|
||||
self.0.serialize_to_json_string()
|
||||
}
|
||||
}
|
||||
|
|
@ -5,86 +5,22 @@ use crate::{
|
|||
enum_::{Enum, EnumType},
|
||||
expr::Flow,
|
||||
int::UInt,
|
||||
intern::{Intern, Interned},
|
||||
sim::{
|
||||
time::{SimDuration, SimInstant},
|
||||
TraceArray, TraceAsyncReset, TraceBool, TraceBundle, TraceClock, TraceDecl,
|
||||
TraceEnumDiscriminant, TraceEnumWithFields, TraceFieldlessEnum, TraceInstance,
|
||||
TraceLocation, TraceMem, TraceMemPort, TraceMemoryId, TraceMemoryLocation, TraceModule,
|
||||
TraceModuleIO, TraceReg, TraceSInt, TraceScalar, TraceScalarId, TraceScope, TraceSimOnly,
|
||||
TraceSyncReset, TraceUInt, TraceWire, TraceWriter, TraceWriterDecls,
|
||||
time::{SimDuration, SimInstant},
|
||||
value::DynSimOnlyValue,
|
||||
TraceModuleIO, TraceReg, TraceSInt, TraceScalar, TraceScalarId, TraceScope, TraceSyncReset,
|
||||
TraceUInt, TraceWire, TraceWriter, TraceWriterDecls,
|
||||
},
|
||||
util::HashMap,
|
||||
};
|
||||
use bitvec::{order::Lsb0, slice::BitSlice};
|
||||
use hashbrown::hash_map::Entry;
|
||||
use std::{
|
||||
fmt::{self, Write as _},
|
||||
io, mem,
|
||||
fmt,
|
||||
io::{self, Write},
|
||||
mem,
|
||||
};
|
||||
|
||||
#[derive(Default)]
|
||||
struct Scope {
|
||||
last_inserted: HashMap<Interned<str>, usize>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
struct VerilogIdentifier {
|
||||
unescaped_name: Interned<str>,
|
||||
}
|
||||
|
||||
impl VerilogIdentifier {
|
||||
fn needs_escape(self) -> bool {
|
||||
// we only allow ascii, so we can just check bytes
|
||||
let Some((&first, rest)) = self.unescaped_name.as_bytes().split_first() else {
|
||||
unreachable!("Scope::new_identifier guarantees a non-empty name");
|
||||
};
|
||||
if !first.is_ascii_alphabetic() && first != b'_' {
|
||||
true
|
||||
} else {
|
||||
rest.iter()
|
||||
.any(|&ch| !ch.is_ascii_alphanumeric() && ch != b'_' && ch != b'$')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for VerilogIdentifier {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
if self.needs_escape() {
|
||||
f.write_str("\\")?;
|
||||
}
|
||||
write!(f, "{}", Escaped(self.unescaped_name))
|
||||
}
|
||||
}
|
||||
|
||||
impl Scope {
|
||||
fn new_identifier(&mut self, unescaped_name: Interned<str>) -> VerilogIdentifier {
|
||||
let next_disambiguator = match self.last_inserted.entry(unescaped_name) {
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(1);
|
||||
return VerilogIdentifier { unescaped_name };
|
||||
}
|
||||
Entry::Occupied(entry) => entry.get() + 1,
|
||||
};
|
||||
let mut disambiguated_name = String::from(&*unescaped_name);
|
||||
for disambiguator in next_disambiguator.. {
|
||||
disambiguated_name.truncate(unescaped_name.len());
|
||||
write!(disambiguated_name, "_{disambiguator}").expect("can't fail");
|
||||
if let Entry::Vacant(entry) = self.last_inserted.entry((*disambiguated_name).intern()) {
|
||||
let retval = VerilogIdentifier {
|
||||
unescaped_name: *entry.key(),
|
||||
};
|
||||
entry.insert(1);
|
||||
// speed up future searches
|
||||
self.last_inserted.insert(unescaped_name, disambiguator);
|
||||
return retval;
|
||||
}
|
||||
}
|
||||
panic!("too many names");
|
||||
}
|
||||
}
|
||||
|
||||
pub struct VcdWriterDecls<W: io::Write + 'static> {
|
||||
writer: W,
|
||||
timescale: SimDuration,
|
||||
|
|
@ -161,20 +97,14 @@ impl<W: io::Write> fmt::Debug for VcdWriterDecls<W> {
|
|||
}
|
||||
}
|
||||
|
||||
/// pass in scope to ensure it's not available in child scope
|
||||
fn write_vcd_scope<W: io::Write, R>(
|
||||
writer: &mut W,
|
||||
scope_type: &str,
|
||||
scope_name: Interned<str>,
|
||||
scope: &mut Scope,
|
||||
f: impl FnOnce(&mut W, &mut Scope) -> io::Result<R>,
|
||||
scope_name: &str,
|
||||
f: impl FnOnce(&mut W) -> io::Result<R>,
|
||||
) -> io::Result<R> {
|
||||
writeln!(
|
||||
writer,
|
||||
"$scope {scope_type} {} $end",
|
||||
scope.new_identifier(scope_name),
|
||||
)?;
|
||||
let retval = f(writer, &mut Scope::default())?;
|
||||
writeln!(writer, "$scope {scope_type} {scope_name} $end")?;
|
||||
let retval = f(writer)?;
|
||||
writeln!(writer, "$upscope $end")?;
|
||||
Ok(retval)
|
||||
}
|
||||
|
|
@ -213,28 +143,24 @@ trait_arg! {
|
|||
|
||||
struct ArgModule<'a> {
|
||||
properties: &'a mut VcdWriterProperties,
|
||||
scope: &'a mut Scope,
|
||||
}
|
||||
|
||||
impl<'a> ArgModule<'a> {
|
||||
fn reborrow(&mut self) -> ArgModule<'_> {
|
||||
ArgModule {
|
||||
properties: self.properties,
|
||||
scope: self.scope,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct ArgModuleBody<'a> {
|
||||
properties: &'a mut VcdWriterProperties,
|
||||
scope: &'a mut Scope,
|
||||
}
|
||||
|
||||
impl<'a> ArgModuleBody<'a> {
|
||||
fn reborrow(&mut self) -> ArgModuleBody<'_> {
|
||||
ArgModuleBody {
|
||||
properties: self.properties,
|
||||
scope: self.scope,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -244,7 +170,6 @@ struct ArgInType<'a> {
|
|||
sink_var_type: &'static str,
|
||||
duplex_var_type: &'static str,
|
||||
properties: &'a mut VcdWriterProperties,
|
||||
scope: &'a mut Scope,
|
||||
}
|
||||
|
||||
impl<'a> ArgInType<'a> {
|
||||
|
|
@ -254,7 +179,6 @@ impl<'a> ArgInType<'a> {
|
|||
sink_var_type: self.sink_var_type,
|
||||
duplex_var_type: self.duplex_var_type,
|
||||
properties: self.properties,
|
||||
scope: self.scope,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -283,7 +207,6 @@ impl WriteTrace for TraceScalar {
|
|||
Self::Clock(v) => v.write_trace(writer, arg),
|
||||
Self::SyncReset(v) => v.write_trace(writer, arg),
|
||||
Self::AsyncReset(v) => v.write_trace(writer, arg),
|
||||
Self::SimOnly(v) => v.write_trace(writer, arg),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -303,42 +226,55 @@ fn write_vcd_id<W: io::Write>(writer: &mut W, mut id: usize) -> io::Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
struct Escaped<T: fmt::Display>(T);
|
||||
|
||||
impl<T: fmt::Display> fmt::Display for Escaped<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
// escaping rules from function GTKWave uses to decode VCD strings:
|
||||
// https://github.com/gtkwave/gtkwave/blob/491f24d7e8619cfc1fcc65704ee5c967d1083c18/lib/libfst/fstapi.c#L7090
|
||||
struct Wrapper<W>(W);
|
||||
impl<W: fmt::Write> fmt::Write for Wrapper<W> {
|
||||
fn write_str(&mut self, s: &str) -> fmt::Result {
|
||||
for byte in s.bytes() {
|
||||
match byte {
|
||||
b'\\' | b'\'' | b'"' | b'?' => {
|
||||
self.0.write_str("\\")?;
|
||||
self.0.write_char(byte as char)?;
|
||||
}
|
||||
b'\n' => self.0.write_str(r"\n")?,
|
||||
b'\r' => self.0.write_str(r"\r")?,
|
||||
b'\t' => self.0.write_str(r"\t")?,
|
||||
0x7 => self.0.write_str(r"\a")?,
|
||||
0x8 => self.0.write_str(r"\b")?,
|
||||
0xC => self.0.write_str(r"\f")?,
|
||||
0xB => self.0.write_str(r"\v")?,
|
||||
_ => {
|
||||
if byte.is_ascii_graphic() {
|
||||
self.0.write_char(byte as char)?;
|
||||
} else {
|
||||
write!(self.0, r"\x{byte:02x}")?;
|
||||
}
|
||||
fn write_escaped<W: io::Write>(writer: &mut W, value: impl fmt::Display) -> io::Result<()> {
|
||||
// escaping rules from function GTKWave uses to decode VCD strings:
|
||||
// https://github.com/gtkwave/gtkwave/blob/491f24d7e8619cfc1fcc65704ee5c967d1083c18/lib/libfst/fstapi.c#L7090
|
||||
struct Wrapper<W>(W);
|
||||
impl<W: io::Write> io::Write for Wrapper<W> {
|
||||
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
|
||||
if buf.is_empty() {
|
||||
return self.0.write(buf);
|
||||
}
|
||||
let mut retval = 0;
|
||||
for &byte in buf {
|
||||
match byte {
|
||||
b'\\' | b'\'' | b'"' | b'?' => self.0.write_all(&[b'\\', byte])?,
|
||||
b'\n' => self.0.write_all(br"\n")?,
|
||||
b'\r' => self.0.write_all(br"\r")?,
|
||||
b'\t' => self.0.write_all(br"\t")?,
|
||||
0x7 => self.0.write_all(br"\a")?,
|
||||
0x8 => self.0.write_all(br"\b")?,
|
||||
0xC => self.0.write_all(br"\f")?,
|
||||
0xB => self.0.write_all(br"\v")?,
|
||||
_ => {
|
||||
if byte.is_ascii_graphic() {
|
||||
self.0.write_all(&[byte])?;
|
||||
} else {
|
||||
write!(self.0, r"\x{byte:02x}")?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
retval += 1;
|
||||
}
|
||||
Ok(retval)
|
||||
}
|
||||
|
||||
fn flush(&mut self) -> io::Result<()> {
|
||||
self.0.flush()
|
||||
}
|
||||
write!(Wrapper(f), "{}", self.0)
|
||||
}
|
||||
write!(Wrapper(writer), "{value}")
|
||||
}
|
||||
|
||||
fn is_unescaped_verilog_identifier(ident: &str) -> bool {
|
||||
// we only allow ascii, so we can just check bytes
|
||||
let Some((&first, rest)) = ident.as_bytes().split_first() else {
|
||||
return false; // empty string is not an identifier
|
||||
};
|
||||
(first.is_ascii_alphabetic() || first == b'_')
|
||||
&& rest
|
||||
.iter()
|
||||
.all(|&ch| ch.is_ascii_alphanumeric() || ch == b'_' || ch == b'$')
|
||||
}
|
||||
|
||||
fn write_vcd_var<W: io::Write>(
|
||||
|
|
@ -348,7 +284,7 @@ fn write_vcd_var<W: io::Write>(
|
|||
var_type: &str,
|
||||
size: usize,
|
||||
location: TraceLocation,
|
||||
name: VerilogIdentifier,
|
||||
name: &str,
|
||||
) -> io::Result<()> {
|
||||
let id = match location {
|
||||
TraceLocation::Scalar(id) => id.as_usize(),
|
||||
|
|
@ -383,7 +319,12 @@ fn write_vcd_var<W: io::Write>(
|
|||
};
|
||||
write!(writer, "$var {var_type} {size} ")?;
|
||||
write_vcd_id(writer, id)?;
|
||||
writeln!(writer, " {name} $end")
|
||||
writer.write_all(b" ")?;
|
||||
if !is_unescaped_verilog_identifier(name) {
|
||||
writer.write_all(b"\\")?;
|
||||
}
|
||||
write_escaped(writer, name)?;
|
||||
writer.write_all(b" $end\n")
|
||||
}
|
||||
|
||||
impl WriteTrace for TraceUInt {
|
||||
|
|
@ -393,7 +334,6 @@ impl WriteTrace for TraceUInt {
|
|||
sink_var_type,
|
||||
duplex_var_type,
|
||||
properties,
|
||||
scope,
|
||||
} = arg.in_type();
|
||||
let Self {
|
||||
location,
|
||||
|
|
@ -416,7 +356,7 @@ impl WriteTrace for TraceUInt {
|
|||
var_type,
|
||||
ty.width(),
|
||||
location,
|
||||
scope.new_identifier(name),
|
||||
&name,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
@ -481,7 +421,6 @@ impl WriteTrace for TraceEnumDiscriminant {
|
|||
sink_var_type: _,
|
||||
duplex_var_type: _,
|
||||
properties,
|
||||
scope,
|
||||
} = arg.in_type();
|
||||
let Self {
|
||||
location,
|
||||
|
|
@ -496,7 +435,7 @@ impl WriteTrace for TraceEnumDiscriminant {
|
|||
"string",
|
||||
1,
|
||||
location,
|
||||
scope.new_identifier(name),
|
||||
&name,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
@ -549,33 +488,6 @@ impl WriteTrace for TraceAsyncReset {
|
|||
}
|
||||
}
|
||||
|
||||
impl WriteTrace for TraceSimOnly {
|
||||
fn write_trace<W: io::Write, A: Arg>(self, writer: &mut W, mut arg: A) -> io::Result<()> {
|
||||
let ArgInType {
|
||||
source_var_type: _,
|
||||
sink_var_type: _,
|
||||
duplex_var_type: _,
|
||||
properties,
|
||||
scope,
|
||||
} = arg.in_type();
|
||||
let Self {
|
||||
location,
|
||||
name,
|
||||
ty: _,
|
||||
flow: _,
|
||||
} = self;
|
||||
write_vcd_var(
|
||||
properties,
|
||||
MemoryElementPartBody::Scalar,
|
||||
writer,
|
||||
"string",
|
||||
1,
|
||||
location,
|
||||
scope.new_identifier(name),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl WriteTrace for TraceScope {
|
||||
fn write_trace<W: io::Write, A: Arg>(self, writer: &mut W, arg: A) -> io::Result<()> {
|
||||
match self {
|
||||
|
|
@ -595,11 +507,11 @@ impl WriteTrace for TraceScope {
|
|||
|
||||
impl WriteTrace for TraceModule {
|
||||
fn write_trace<W: io::Write, A: Arg>(self, writer: &mut W, mut arg: A) -> io::Result<()> {
|
||||
let ArgModule { properties, scope } = arg.module();
|
||||
let ArgModule { properties } = arg.module();
|
||||
let Self { name, children } = self;
|
||||
write_vcd_scope(writer, "module", name, scope, |writer, scope| {
|
||||
write_vcd_scope(writer, "module", &name, |writer| {
|
||||
for child in children {
|
||||
child.write_trace(writer, ArgModuleBody { properties, scope })?;
|
||||
child.write_trace(writer, ArgModuleBody { properties })?;
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
|
|
@ -608,7 +520,7 @@ impl WriteTrace for TraceModule {
|
|||
|
||||
impl WriteTrace for TraceInstance {
|
||||
fn write_trace<W: io::Write, A: Arg>(self, writer: &mut W, mut arg: A) -> io::Result<()> {
|
||||
let ArgModuleBody { properties, scope } = arg.module_body();
|
||||
let ArgModuleBody { properties } = arg.module_body();
|
||||
let Self {
|
||||
name: _,
|
||||
instance_io,
|
||||
|
|
@ -622,16 +534,15 @@ impl WriteTrace for TraceInstance {
|
|||
sink_var_type: "wire",
|
||||
duplex_var_type: "wire",
|
||||
properties,
|
||||
scope,
|
||||
},
|
||||
)?;
|
||||
module.write_trace(writer, ArgModule { properties, scope })
|
||||
module.write_trace(writer, ArgModule { properties })
|
||||
}
|
||||
}
|
||||
|
||||
impl WriteTrace for TraceMem {
|
||||
fn write_trace<W: io::Write, A: Arg>(self, writer: &mut W, mut arg: A) -> io::Result<()> {
|
||||
let ArgModuleBody { properties, scope } = arg.module_body();
|
||||
let ArgModuleBody { properties } = arg.module_body();
|
||||
let Self {
|
||||
id,
|
||||
name,
|
||||
|
|
@ -640,41 +551,27 @@ impl WriteTrace for TraceMem {
|
|||
ports,
|
||||
array_type,
|
||||
} = self;
|
||||
write_vcd_scope(writer, "struct", name, scope, |writer, scope| {
|
||||
write_vcd_scope(
|
||||
writer,
|
||||
"struct",
|
||||
"contents".intern(),
|
||||
scope,
|
||||
|writer, scope| {
|
||||
for element_index in 0..array_type.len() {
|
||||
write_vcd_scope(
|
||||
write_vcd_scope(writer, "struct", &*name, |writer| {
|
||||
write_vcd_scope(writer, "struct", "contents", |writer| {
|
||||
for element_index in 0..array_type.len() {
|
||||
write_vcd_scope(writer, "struct", &format!("[{element_index}]"), |writer| {
|
||||
properties.memory_properties[id.as_usize()].element_index = element_index;
|
||||
properties.memory_properties[id.as_usize()].element_part_index = 0;
|
||||
element_type.write_trace(
|
||||
writer,
|
||||
"struct",
|
||||
Intern::intern_owned(format!("[{element_index}]")),
|
||||
scope,
|
||||
|writer, scope| {
|
||||
properties.memory_properties[id.as_usize()].element_index =
|
||||
element_index;
|
||||
properties.memory_properties[id.as_usize()].element_part_index = 0;
|
||||
element_type.write_trace(
|
||||
writer,
|
||||
ArgInType {
|
||||
source_var_type: "reg",
|
||||
sink_var_type: "reg",
|
||||
duplex_var_type: "reg",
|
||||
properties,
|
||||
scope,
|
||||
},
|
||||
)
|
||||
ArgInType {
|
||||
source_var_type: "reg",
|
||||
sink_var_type: "reg",
|
||||
duplex_var_type: "reg",
|
||||
properties,
|
||||
},
|
||||
)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
)
|
||||
})?;
|
||||
}
|
||||
Ok(())
|
||||
})?;
|
||||
for port in ports {
|
||||
port.write_trace(writer, ArgModuleBody { properties, scope })?;
|
||||
port.write_trace(writer, ArgModuleBody { properties })?;
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
|
|
@ -683,7 +580,7 @@ impl WriteTrace for TraceMem {
|
|||
|
||||
impl WriteTrace for TraceMemPort {
|
||||
fn write_trace<W: io::Write, A: Arg>(self, writer: &mut W, mut arg: A) -> io::Result<()> {
|
||||
let ArgModuleBody { properties, scope } = arg.module_body();
|
||||
let ArgModuleBody { properties } = arg.module_body();
|
||||
let Self {
|
||||
name: _,
|
||||
bundle,
|
||||
|
|
@ -696,7 +593,6 @@ impl WriteTrace for TraceMemPort {
|
|||
sink_var_type: "wire",
|
||||
duplex_var_type: "wire",
|
||||
properties,
|
||||
scope,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
|
@ -704,7 +600,7 @@ impl WriteTrace for TraceMemPort {
|
|||
|
||||
impl WriteTrace for TraceWire {
|
||||
fn write_trace<W: io::Write, A: Arg>(self, writer: &mut W, mut arg: A) -> io::Result<()> {
|
||||
let ArgModuleBody { properties, scope } = arg.module_body();
|
||||
let ArgModuleBody { properties } = arg.module_body();
|
||||
let Self {
|
||||
name: _,
|
||||
child,
|
||||
|
|
@ -717,7 +613,6 @@ impl WriteTrace for TraceWire {
|
|||
sink_var_type: "wire",
|
||||
duplex_var_type: "wire",
|
||||
properties,
|
||||
scope,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
|
@ -725,7 +620,7 @@ impl WriteTrace for TraceWire {
|
|||
|
||||
impl WriteTrace for TraceReg {
|
||||
fn write_trace<W: io::Write, A: Arg>(self, writer: &mut W, mut arg: A) -> io::Result<()> {
|
||||
let ArgModuleBody { properties, scope } = arg.module_body();
|
||||
let ArgModuleBody { properties } = arg.module_body();
|
||||
let Self {
|
||||
name: _,
|
||||
child,
|
||||
|
|
@ -738,7 +633,6 @@ impl WriteTrace for TraceReg {
|
|||
sink_var_type: "reg",
|
||||
duplex_var_type: "reg",
|
||||
properties,
|
||||
scope,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
|
@ -746,7 +640,7 @@ impl WriteTrace for TraceReg {
|
|||
|
||||
impl WriteTrace for TraceModuleIO {
|
||||
fn write_trace<W: io::Write, A: Arg>(self, writer: &mut W, mut arg: A) -> io::Result<()> {
|
||||
let ArgModuleBody { properties, scope } = arg.module_body();
|
||||
let ArgModuleBody { properties } = arg.module_body();
|
||||
let Self {
|
||||
name: _,
|
||||
child,
|
||||
|
|
@ -760,7 +654,6 @@ impl WriteTrace for TraceModuleIO {
|
|||
sink_var_type: "wire",
|
||||
duplex_var_type: "wire",
|
||||
properties,
|
||||
scope,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
|
@ -768,31 +661,16 @@ impl WriteTrace for TraceModuleIO {
|
|||
|
||||
impl WriteTrace for TraceBundle {
|
||||
fn write_trace<W: io::Write, A: Arg>(self, writer: &mut W, mut arg: A) -> io::Result<()> {
|
||||
let ArgInType {
|
||||
source_var_type,
|
||||
sink_var_type,
|
||||
duplex_var_type,
|
||||
properties,
|
||||
scope,
|
||||
} = arg.in_type();
|
||||
let mut arg = arg.in_type();
|
||||
let Self {
|
||||
name,
|
||||
fields,
|
||||
ty: _,
|
||||
flow: _,
|
||||
} = self;
|
||||
write_vcd_scope(writer, "struct", name, scope, |writer, scope| {
|
||||
write_vcd_scope(writer, "struct", &name, |writer| {
|
||||
for field in fields {
|
||||
field.write_trace(
|
||||
writer,
|
||||
ArgInType {
|
||||
source_var_type,
|
||||
sink_var_type,
|
||||
duplex_var_type,
|
||||
properties,
|
||||
scope,
|
||||
},
|
||||
)?;
|
||||
field.write_trace(writer, arg.reborrow())?;
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
|
|
@ -801,31 +679,16 @@ impl WriteTrace for TraceBundle {
|
|||
|
||||
impl WriteTrace for TraceArray {
|
||||
fn write_trace<W: io::Write, A: Arg>(self, writer: &mut W, mut arg: A) -> io::Result<()> {
|
||||
let ArgInType {
|
||||
source_var_type,
|
||||
sink_var_type,
|
||||
duplex_var_type,
|
||||
properties,
|
||||
scope,
|
||||
} = arg.in_type();
|
||||
let mut arg = arg.in_type();
|
||||
let Self {
|
||||
name,
|
||||
elements,
|
||||
ty: _,
|
||||
flow: _,
|
||||
} = self;
|
||||
write_vcd_scope(writer, "struct", name, scope, |writer, scope| {
|
||||
write_vcd_scope(writer, "struct", &name, |writer| {
|
||||
for element in elements {
|
||||
element.write_trace(
|
||||
writer,
|
||||
ArgInType {
|
||||
source_var_type,
|
||||
sink_var_type,
|
||||
duplex_var_type,
|
||||
properties,
|
||||
scope,
|
||||
},
|
||||
)?;
|
||||
element.write_trace(writer, arg.reborrow())?;
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
|
|
@ -834,13 +697,7 @@ impl WriteTrace for TraceArray {
|
|||
|
||||
impl WriteTrace for TraceEnumWithFields {
|
||||
fn write_trace<W: io::Write, A: Arg>(self, writer: &mut W, mut arg: A) -> io::Result<()> {
|
||||
let ArgInType {
|
||||
source_var_type,
|
||||
sink_var_type,
|
||||
duplex_var_type,
|
||||
properties,
|
||||
scope,
|
||||
} = arg.in_type();
|
||||
let mut arg = arg.in_type();
|
||||
let Self {
|
||||
name,
|
||||
discriminant,
|
||||
|
|
@ -848,28 +705,10 @@ impl WriteTrace for TraceEnumWithFields {
|
|||
ty: _,
|
||||
flow: _,
|
||||
} = self;
|
||||
write_vcd_scope(writer, "struct", name, scope, |writer, scope| {
|
||||
discriminant.write_trace(
|
||||
writer,
|
||||
ArgInType {
|
||||
source_var_type,
|
||||
sink_var_type,
|
||||
duplex_var_type,
|
||||
properties,
|
||||
scope,
|
||||
},
|
||||
)?;
|
||||
write_vcd_scope(writer, "struct", &name, |writer| {
|
||||
discriminant.write_trace(writer, arg.reborrow())?;
|
||||
for field in non_empty_fields {
|
||||
field.write_trace(
|
||||
writer,
|
||||
ArgInType {
|
||||
source_var_type,
|
||||
sink_var_type,
|
||||
duplex_var_type,
|
||||
properties,
|
||||
scope,
|
||||
},
|
||||
)?;
|
||||
field.write_trace(writer, arg.reborrow())?;
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
|
|
@ -905,7 +744,6 @@ impl<W: io::Write> TraceWriterDecls for VcdWriterDecls<W> {
|
|||
&mut writer,
|
||||
ArgModule {
|
||||
properties: &mut properties,
|
||||
scope: &mut Scope::default(),
|
||||
},
|
||||
)?;
|
||||
writeln!(writer, "$enddefinitions $end")?;
|
||||
|
|
@ -960,7 +798,9 @@ fn write_string_value_change(
|
|||
value: impl fmt::Display,
|
||||
id: usize,
|
||||
) -> io::Result<()> {
|
||||
write!(writer, "s{} ", Escaped(value))?;
|
||||
writer.write_all(b"s")?;
|
||||
write_escaped(writer, value)?;
|
||||
writer.write_all(b" ")?;
|
||||
write_vcd_id(writer, id)?;
|
||||
writer.write_all(b"\n")
|
||||
}
|
||||
|
|
@ -1090,14 +930,6 @@ impl<W: io::Write> TraceWriter for VcdWriter<W> {
|
|||
) -> Result<(), Self::Error> {
|
||||
write_enum_discriminant_value_change(&mut self.writer, variant_index, ty, id.as_usize())
|
||||
}
|
||||
|
||||
fn set_signal_sim_only_value(
|
||||
&mut self,
|
||||
id: TraceScalarId,
|
||||
value: &DynSimOnlyValue,
|
||||
) -> Result<(), Self::Error> {
|
||||
write_string_value_change(&mut self.writer, format_args!("{value:?}"), id.as_usize())
|
||||
}
|
||||
}
|
||||
|
||||
impl<W: io::Write> fmt::Debug for VcdWriter<W> {
|
||||
|
|
@ -1114,49 +946,3 @@ impl<W: io::Write> fmt::Debug for VcdWriter<W> {
|
|||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_scope() {
|
||||
let mut scope = Scope::default();
|
||||
assert_eq!(&*scope.new_identifier("foo".intern()).unescaped_name, "foo");
|
||||
assert_eq!(
|
||||
&*scope.new_identifier("foo_0".intern()).unescaped_name,
|
||||
"foo_0"
|
||||
);
|
||||
assert_eq!(
|
||||
&*scope.new_identifier("foo_1".intern()).unescaped_name,
|
||||
"foo_1"
|
||||
);
|
||||
assert_eq!(
|
||||
&*scope.new_identifier("foo_3".intern()).unescaped_name,
|
||||
"foo_3"
|
||||
);
|
||||
assert_eq!(
|
||||
&*scope.new_identifier("foo".intern()).unescaped_name,
|
||||
"foo_2"
|
||||
);
|
||||
assert_eq!(
|
||||
&*scope.new_identifier("foo".intern()).unescaped_name,
|
||||
"foo_4"
|
||||
);
|
||||
assert_eq!(
|
||||
&*scope.new_identifier("foo_0".intern()).unescaped_name,
|
||||
"foo_0_2"
|
||||
);
|
||||
assert_eq!(
|
||||
&*scope.new_identifier("foo_1".intern()).unescaped_name,
|
||||
"foo_1_2"
|
||||
);
|
||||
for i in 5..1000u64 {
|
||||
// verify it actually picks the next available identifier with no skips or duplicates
|
||||
assert_eq!(
|
||||
*scope.new_identifier("foo".intern()).unescaped_name,
|
||||
format!("foo_{i}"),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,8 +2,9 @@
|
|||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
intern::{Intern, Interned},
|
||||
util::{DebugAsDisplay, HashMap},
|
||||
util::DebugAsDisplay,
|
||||
};
|
||||
use hashbrown::HashMap;
|
||||
use std::{cell::RefCell, fmt, num::NonZeroUsize, panic, path::Path};
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
|
|
@ -96,7 +97,7 @@ impl NormalizeFilesForTestsState {
|
|||
fn new() -> Self {
|
||||
Self {
|
||||
test_position: panic::Location::caller(),
|
||||
file_pattern_matches: HashMap::default(),
|
||||
file_pattern_matches: HashMap::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -142,7 +143,7 @@ impl From<&'_ panic::Location<'_>> for SourceLocation {
|
|||
map.entry_ref(file)
|
||||
.or_insert_with(|| NormalizedFileForTestState {
|
||||
file_name_id: NonZeroUsize::new(len + 1).unwrap(),
|
||||
positions_map: HashMap::default(),
|
||||
positions_map: HashMap::new(),
|
||||
});
|
||||
file_str = m.generate_file_name(file_state.file_name_id);
|
||||
file = &file_str;
|
||||
|
|
|
|||
|
|
@ -1,54 +1,25 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
build::{
|
||||
BaseJobArgs, BaseJobKind, GlobalParams, JobArgsAndDependencies, JobKindAndArgs, JobParams,
|
||||
NoArgs, RunBuild,
|
||||
external::{ExternalCommandArgs, ExternalCommandJobKind},
|
||||
firrtl::{FirrtlArgs, FirrtlJobKind},
|
||||
formal::{Formal, FormalAdditionalArgs, FormalArgs, WriteSbyFileJobKind},
|
||||
verilog::{UnadjustedVerilogArgs, VerilogJobArgs, VerilogJobKind},
|
||||
},
|
||||
bundle::BundleType,
|
||||
cli::{FormalArgs, FormalMode, FormalOutput, RunPhase},
|
||||
firrtl::ExportOptions,
|
||||
module::Module,
|
||||
util::HashMap,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use clap::Parser;
|
||||
use hashbrown::HashMap;
|
||||
use serde::Deserialize;
|
||||
use std::{
|
||||
fmt::{self, Write},
|
||||
fmt::Write,
|
||||
path::{Path, PathBuf},
|
||||
process::Command,
|
||||
sync::{Mutex, OnceLock},
|
||||
};
|
||||
|
||||
#[derive(
|
||||
clap::ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash, Default, Deserialize, Serialize,
|
||||
)]
|
||||
#[non_exhaustive]
|
||||
pub enum FormalMode {
|
||||
#[default]
|
||||
BMC,
|
||||
Prove,
|
||||
Live,
|
||||
Cover,
|
||||
}
|
||||
|
||||
impl FormalMode {
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
FormalMode::BMC => "bmc",
|
||||
FormalMode::Prove => "prove",
|
||||
FormalMode::Live => "live",
|
||||
FormalMode::Cover => "cover",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for FormalMode {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
fn assert_formal_helper() -> FormalArgs {
|
||||
static FORMAL_ARGS: OnceLock<FormalArgs> = OnceLock::new();
|
||||
// ensure we only run parsing once, so errors from env vars don't produce overlapping output if we're called on multiple threads
|
||||
FORMAL_ARGS
|
||||
.get_or_init(|| FormalArgs::parse_from(["fayalite::testing::assert_formal"]))
|
||||
.clone()
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
|
|
@ -116,7 +87,7 @@ fn get_assert_formal_target_path(test_name: &dyn std::fmt::Display) -> PathBuf {
|
|||
let index = *DIRS
|
||||
.lock()
|
||||
.unwrap()
|
||||
.get_or_insert_with(HashMap::default)
|
||||
.get_or_insert_with(HashMap::new)
|
||||
.entry_ref(&dir)
|
||||
.and_modify(|v| *v += 1)
|
||||
.or_insert(0);
|
||||
|
|
@ -126,99 +97,26 @@ fn get_assert_formal_target_path(test_name: &dyn std::fmt::Display) -> PathBuf {
|
|||
.join(dir)
|
||||
}
|
||||
|
||||
fn make_assert_formal_args(
|
||||
test_name: &dyn std::fmt::Display,
|
||||
formal_mode: FormalMode,
|
||||
formal_depth: u64,
|
||||
solver: Option<&str>,
|
||||
export_options: ExportOptions,
|
||||
) -> eyre::Result<JobArgsAndDependencies<ExternalCommandJobKind<Formal>>> {
|
||||
let args = JobKindAndArgs {
|
||||
kind: BaseJobKind,
|
||||
args: BaseJobArgs::from_output_dir_and_env(get_assert_formal_target_path(&test_name), None),
|
||||
};
|
||||
let dependencies = JobArgsAndDependencies {
|
||||
args,
|
||||
dependencies: (),
|
||||
};
|
||||
let args = JobKindAndArgs {
|
||||
kind: FirrtlJobKind,
|
||||
args: FirrtlArgs { export_options },
|
||||
};
|
||||
let dependencies = JobArgsAndDependencies { args, dependencies };
|
||||
let args = JobKindAndArgs {
|
||||
kind: ExternalCommandJobKind::new(),
|
||||
args: ExternalCommandArgs::resolve_program_path(
|
||||
None,
|
||||
UnadjustedVerilogArgs {
|
||||
firtool_extra_args: vec![],
|
||||
verilog_dialect: None,
|
||||
verilog_debug: true,
|
||||
},
|
||||
)?,
|
||||
};
|
||||
let dependencies = JobArgsAndDependencies { args, dependencies };
|
||||
let args = JobKindAndArgs {
|
||||
kind: VerilogJobKind,
|
||||
args: VerilogJobArgs {},
|
||||
};
|
||||
let dependencies = JobArgsAndDependencies { args, dependencies };
|
||||
let args = JobKindAndArgs {
|
||||
kind: WriteSbyFileJobKind,
|
||||
args: FormalArgs {
|
||||
sby_extra_args: vec![],
|
||||
formal_mode,
|
||||
formal_depth,
|
||||
formal_solver: solver.unwrap_or(FormalArgs::DEFAULT_SOLVER).into(),
|
||||
smtbmc_extra_args: vec![],
|
||||
},
|
||||
};
|
||||
let dependencies = JobArgsAndDependencies { args, dependencies };
|
||||
let args = JobKindAndArgs {
|
||||
kind: ExternalCommandJobKind::new(),
|
||||
args: ExternalCommandArgs::resolve_program_path(None, FormalAdditionalArgs {})?,
|
||||
};
|
||||
Ok(JobArgsAndDependencies { args, dependencies })
|
||||
}
|
||||
|
||||
pub fn try_assert_formal<M: AsRef<Module<T>>, T: BundleType>(
|
||||
test_name: impl std::fmt::Display,
|
||||
module: M,
|
||||
formal_mode: FormalMode,
|
||||
formal_depth: u64,
|
||||
solver: Option<&str>,
|
||||
export_options: ExportOptions,
|
||||
) -> eyre::Result<()> {
|
||||
const APP_NAME: &'static str = "fayalite::testing::assert_formal";
|
||||
make_assert_formal_args(
|
||||
&test_name,
|
||||
formal_mode,
|
||||
formal_depth,
|
||||
solver,
|
||||
export_options,
|
||||
)?
|
||||
.run_without_platform(
|
||||
|NoArgs {}| Ok(JobParams::new(module)),
|
||||
&GlobalParams::new(None, APP_NAME),
|
||||
)
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn assert_formal<M: AsRef<Module<T>>, T: BundleType>(
|
||||
pub fn assert_formal<M>(
|
||||
test_name: impl std::fmt::Display,
|
||||
module: M,
|
||||
formal_mode: FormalMode,
|
||||
formal_depth: u64,
|
||||
mode: FormalMode,
|
||||
depth: u64,
|
||||
solver: Option<&str>,
|
||||
export_options: ExportOptions,
|
||||
) {
|
||||
try_assert_formal(
|
||||
test_name,
|
||||
module,
|
||||
formal_mode,
|
||||
formal_depth,
|
||||
solver,
|
||||
export_options,
|
||||
)
|
||||
.expect("testing::assert_formal() failed");
|
||||
) where
|
||||
FormalArgs: RunPhase<M, Output = FormalOutput>,
|
||||
{
|
||||
let mut args = assert_formal_helper();
|
||||
args.verilog.firrtl.base.redirect_output_for_rust_test = true;
|
||||
args.verilog.firrtl.base.output = Some(get_assert_formal_target_path(&test_name));
|
||||
args.verilog.firrtl.export_options = export_options;
|
||||
args.verilog.debug = true;
|
||||
args.mode = mode;
|
||||
args.depth = depth;
|
||||
if let Some(solver) = solver {
|
||||
args.solver = solver.into();
|
||||
}
|
||||
args.run(module).expect("testing::assert_formal() failed");
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,27 +7,12 @@ use crate::{
|
|||
clock::Clock,
|
||||
enum_::Enum,
|
||||
expr::Expr,
|
||||
int::{Bool, SInt, UInt, UIntValue},
|
||||
int::{Bool, SInt, UInt},
|
||||
intern::{Intern, Interned},
|
||||
phantom_const::PhantomConst,
|
||||
reset::{AsyncReset, Reset, SyncReset},
|
||||
sim::value::{DynSimOnlyValue, DynSimOnly, SimValue, ToSimValueWithType},
|
||||
source_location::SourceLocation,
|
||||
util::{ConstUsize, slice_range, try_slice_range},
|
||||
};
|
||||
use bitvec::{slice::BitSlice, vec::BitVec};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer, de::DeserializeOwned};
|
||||
use std::{
|
||||
fmt,
|
||||
hash::Hash,
|
||||
iter::{FusedIterator, Sum},
|
||||
marker::PhantomData,
|
||||
mem,
|
||||
ops::{Add, AddAssign, Bound, Index, Mul, MulAssign, Range, Sub, SubAssign},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
pub(crate) mod serde_impls;
|
||||
use std::{fmt, hash::Hash, iter::FusedIterator, ops::Index};
|
||||
|
||||
#[derive(Copy, Clone, Hash, PartialEq, Eq, Debug)]
|
||||
#[non_exhaustive]
|
||||
|
|
@ -36,23 +21,6 @@ pub struct TypeProperties {
|
|||
pub is_storable: bool,
|
||||
pub is_castable_from_bits: bool,
|
||||
pub bit_width: usize,
|
||||
pub sim_only_values_len: usize,
|
||||
}
|
||||
|
||||
impl TypeProperties {
|
||||
pub const fn size(self) -> OpaqueSimValueSize {
|
||||
let Self {
|
||||
is_passive: _,
|
||||
is_storable: _,
|
||||
is_castable_from_bits: _,
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
} = self;
|
||||
OpaqueSimValueSize {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Hash, PartialEq, Eq)]
|
||||
|
|
@ -67,8 +35,6 @@ pub enum CanonicalType {
|
|||
SyncReset(SyncReset),
|
||||
Reset(Reset),
|
||||
Clock(Clock),
|
||||
PhantomConst(PhantomConst),
|
||||
DynSimOnly(DynSimOnly),
|
||||
}
|
||||
|
||||
impl fmt::Debug for CanonicalType {
|
||||
|
|
@ -84,30 +50,10 @@ impl fmt::Debug for CanonicalType {
|
|||
Self::SyncReset(v) => v.fmt(f),
|
||||
Self::Reset(v) => v.fmt(f),
|
||||
Self::Clock(v) => v.fmt(f),
|
||||
Self::PhantomConst(v) => v.fmt(f),
|
||||
Self::DynSimOnly(v) => v.fmt(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for CanonicalType {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
serde_impls::SerdeCanonicalType::from(*self).serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for CanonicalType {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
Ok(serde_impls::SerdeCanonicalType::deserialize(deserializer)?.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl CanonicalType {
|
||||
pub fn type_properties(self) -> TypeProperties {
|
||||
match self {
|
||||
|
|
@ -121,8 +67,6 @@ impl CanonicalType {
|
|||
CanonicalType::SyncReset(v) => v.type_properties(),
|
||||
CanonicalType::Reset(v) => v.type_properties(),
|
||||
CanonicalType::Clock(v) => v.type_properties(),
|
||||
CanonicalType::PhantomConst(v) => v.type_properties(),
|
||||
CanonicalType::DynSimOnly(v) => v.type_properties(),
|
||||
}
|
||||
}
|
||||
pub fn is_passive(self) -> bool {
|
||||
|
|
@ -137,12 +81,6 @@ impl CanonicalType {
|
|||
pub fn bit_width(self) -> usize {
|
||||
self.type_properties().bit_width
|
||||
}
|
||||
pub fn sim_only_values_len(self) -> usize {
|
||||
self.type_properties().sim_only_values_len
|
||||
}
|
||||
pub fn size(self) -> OpaqueSimValueSize {
|
||||
self.type_properties().size()
|
||||
}
|
||||
pub fn can_connect(self, rhs: Self) -> bool {
|
||||
match self {
|
||||
CanonicalType::UInt(lhs) => {
|
||||
|
|
@ -205,23 +143,8 @@ impl CanonicalType {
|
|||
};
|
||||
lhs.can_connect(rhs)
|
||||
}
|
||||
CanonicalType::PhantomConst(lhs) => {
|
||||
let CanonicalType::PhantomConst(rhs) = rhs else {
|
||||
return false;
|
||||
};
|
||||
lhs.can_connect(rhs)
|
||||
}
|
||||
CanonicalType::DynSimOnly(lhs) => {
|
||||
let CanonicalType::DynSimOnly(rhs) = rhs else {
|
||||
return false;
|
||||
};
|
||||
lhs.can_connect(rhs)
|
||||
}
|
||||
}
|
||||
}
|
||||
pub(crate) fn as_serde_unexpected_str(self) -> &'static str {
|
||||
serde_impls::SerdeCanonicalType::from(self).as_serde_unexpected_str()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait MatchVariantAndInactiveScope: Sized {
|
||||
|
|
@ -243,7 +166,7 @@ impl<T: 'static + Send + Sync> MatchVariantAndInactiveScope for MatchVariantWith
|
|||
}
|
||||
|
||||
pub trait FillInDefaultedGenerics {
|
||||
type Type;
|
||||
type Type: Type;
|
||||
fn fill_in_defaulted_generics(self) -> Self::Type;
|
||||
}
|
||||
|
||||
|
|
@ -255,22 +178,6 @@ impl<T: Type> FillInDefaultedGenerics for T {
|
|||
}
|
||||
}
|
||||
|
||||
impl FillInDefaultedGenerics for usize {
|
||||
type Type = usize;
|
||||
|
||||
fn fill_in_defaulted_generics(self) -> Self::Type {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<const V: usize> FillInDefaultedGenerics for ConstUsize<V> {
|
||||
type Type = ConstUsize<V>;
|
||||
|
||||
fn fill_in_defaulted_generics(self) -> Self::Type {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
mod sealed {
|
||||
pub trait TypeOrDefaultSealed {}
|
||||
pub trait BaseTypeSealed {}
|
||||
|
|
@ -288,34 +195,6 @@ macro_rules! impl_base_type {
|
|||
};
|
||||
}
|
||||
|
||||
macro_rules! impl_base_type_serde {
|
||||
($name:ident, $expected:literal) => {
|
||||
impl Serialize for $name {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
self.canonical().serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for $name {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
match CanonicalType::deserialize(deserializer)? {
|
||||
CanonicalType::$name(retval) => Ok(retval),
|
||||
ty => Err(serde::de::Error::invalid_value(
|
||||
serde::de::Unexpected::Other(ty.as_serde_unexpected_str()),
|
||||
&$expected,
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl_base_type!(UInt);
|
||||
impl_base_type!(SInt);
|
||||
impl_base_type!(Bool);
|
||||
|
|
@ -326,16 +205,6 @@ impl_base_type!(AsyncReset);
|
|||
impl_base_type!(SyncReset);
|
||||
impl_base_type!(Reset);
|
||||
impl_base_type!(Clock);
|
||||
impl_base_type!(PhantomConst);
|
||||
impl_base_type!(DynSimOnly);
|
||||
|
||||
impl_base_type_serde!(Bool, "a Bool");
|
||||
impl_base_type_serde!(Enum, "an Enum");
|
||||
impl_base_type_serde!(Bundle, "a Bundle");
|
||||
impl_base_type_serde!(AsyncReset, "an AsyncReset");
|
||||
impl_base_type_serde!(SyncReset, "a SyncReset");
|
||||
impl_base_type_serde!(Reset, "a Reset");
|
||||
impl_base_type_serde!(Clock, "a Clock");
|
||||
|
||||
impl sealed::BaseTypeSealed for CanonicalType {}
|
||||
|
||||
|
|
@ -371,48 +240,26 @@ pub trait Type:
|
|||
{
|
||||
type BaseType: BaseType;
|
||||
type MaskType: Type<MaskType = Self::MaskType>;
|
||||
type SimValue: fmt::Debug + Clone + 'static + ToSimValueWithType<Self>;
|
||||
type MatchVariant: 'static + Send + Sync;
|
||||
type MatchActiveScope;
|
||||
type MatchVariantAndInactiveScope: MatchVariantAndInactiveScope<
|
||||
MatchVariant = Self::MatchVariant,
|
||||
MatchActiveScope = Self::MatchActiveScope,
|
||||
>;
|
||||
MatchVariant = Self::MatchVariant,
|
||||
MatchActiveScope = Self::MatchActiveScope,
|
||||
>;
|
||||
type MatchVariantsIter: Iterator<Item = Self::MatchVariantAndInactiveScope>
|
||||
+ ExactSizeIterator
|
||||
+ FusedIterator
|
||||
+ DoubleEndedIterator;
|
||||
#[track_caller]
|
||||
fn match_variants(this: Expr<Self>, source_location: SourceLocation)
|
||||
-> Self::MatchVariantsIter;
|
||||
-> Self::MatchVariantsIter;
|
||||
fn mask_type(&self) -> Self::MaskType;
|
||||
fn canonical(&self) -> CanonicalType;
|
||||
fn from_canonical(canonical_type: CanonicalType) -> Self;
|
||||
fn source_location() -> SourceLocation;
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue;
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
);
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w>;
|
||||
}
|
||||
|
||||
pub trait BaseType:
|
||||
Type<
|
||||
BaseType = Self,
|
||||
MaskType: Serialize + DeserializeOwned,
|
||||
SimValue: Serialize + DeserializeOwned,
|
||||
> + sealed::BaseTypeSealed
|
||||
+ Into<CanonicalType>
|
||||
+ Serialize
|
||||
+ DeserializeOwned
|
||||
{
|
||||
}
|
||||
pub trait BaseType: Type<BaseType = Self> + sealed::BaseTypeSealed + Into<CanonicalType> {}
|
||||
|
||||
macro_rules! impl_match_variant_as_self {
|
||||
() => {
|
||||
|
|
@ -439,7 +286,6 @@ pub trait TypeWithDeref: Type {
|
|||
impl Type for CanonicalType {
|
||||
type BaseType = CanonicalType;
|
||||
type MaskType = CanonicalType;
|
||||
type SimValue = OpaqueSimValue;
|
||||
impl_match_variant_as_self!();
|
||||
fn mask_type(&self) -> Self::MaskType {
|
||||
match self {
|
||||
|
|
@ -453,8 +299,6 @@ impl Type for CanonicalType {
|
|||
CanonicalType::SyncReset(v) => v.mask_type().canonical(),
|
||||
CanonicalType::Reset(v) => v.mask_type().canonical(),
|
||||
CanonicalType::Clock(v) => v.mask_type().canonical(),
|
||||
CanonicalType::PhantomConst(v) => v.mask_type().canonical(),
|
||||
CanonicalType::DynSimOnly(v) => v.mask_type().canonical(),
|
||||
}
|
||||
}
|
||||
fn canonical(&self) -> CanonicalType {
|
||||
|
|
@ -466,636 +310,9 @@ impl Type for CanonicalType {
|
|||
fn source_location() -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
|
||||
assert_eq!(self.type_properties().size(), opaque.size());
|
||||
opaque.to_owned()
|
||||
}
|
||||
fn sim_value_clone_from_opaque(
|
||||
&self,
|
||||
value: &mut Self::SimValue,
|
||||
opaque: OpaqueSimValueSlice<'_>,
|
||||
) {
|
||||
assert_eq!(self.type_properties().size(), opaque.size());
|
||||
assert_eq!(value.size(), opaque.size());
|
||||
value.clone_from_slice(opaque);
|
||||
}
|
||||
fn sim_value_to_opaque<'w>(
|
||||
&self,
|
||||
value: &Self::SimValue,
|
||||
writer: OpaqueSimValueWriter<'w>,
|
||||
) -> OpaqueSimValueWritten<'w> {
|
||||
assert_eq!(self.type_properties().size(), writer.size());
|
||||
assert_eq!(value.size(), writer.size());
|
||||
writer.fill_cloned_from_slice(value.as_slice())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize, Default)]
|
||||
#[non_exhaustive]
|
||||
pub struct OpaqueSimValueSizeRange {
|
||||
pub bit_width: Range<usize>,
|
||||
pub sim_only_values_len: Range<usize>,
|
||||
}
|
||||
|
||||
impl OpaqueSimValueSizeRange {
|
||||
pub fn start(&self) -> OpaqueSimValueSize {
|
||||
OpaqueSimValueSize {
|
||||
bit_width: self.bit_width.start,
|
||||
sim_only_values_len: self.sim_only_values_len.start,
|
||||
}
|
||||
}
|
||||
pub fn end(&self) -> OpaqueSimValueSize {
|
||||
OpaqueSimValueSize {
|
||||
bit_width: self.bit_width.end,
|
||||
sim_only_values_len: self.sim_only_values_len.end,
|
||||
}
|
||||
}
|
||||
pub fn is_empty(&self) -> bool {
|
||||
let Self {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
} = self;
|
||||
bit_width.is_empty() && sim_only_values_len.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Range<OpaqueSimValueSize>> for OpaqueSimValueSizeRange {
|
||||
fn from(value: Range<OpaqueSimValueSize>) -> Self {
|
||||
Self {
|
||||
bit_width: value.start.bit_width..value.end.bit_width,
|
||||
sim_only_values_len: value.start.sim_only_values_len..value.end.sim_only_values_len,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<OpaqueSimValueSizeRange> for Range<OpaqueSimValueSize> {
|
||||
fn from(value: OpaqueSimValueSizeRange) -> Self {
|
||||
value.start()..value.end()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait OpaqueSimValueSizeRangeBounds {
|
||||
fn start_bound(&self) -> Bound<OpaqueSimValueSize>;
|
||||
fn end_bound(&self) -> Bound<OpaqueSimValueSize>;
|
||||
}
|
||||
|
||||
impl OpaqueSimValueSizeRangeBounds for OpaqueSimValueSizeRange {
|
||||
fn start_bound(&self) -> Bound<OpaqueSimValueSize> {
|
||||
Bound::Included(self.start())
|
||||
}
|
||||
|
||||
fn end_bound(&self) -> Bound<OpaqueSimValueSize> {
|
||||
Bound::Excluded(self.end())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + std::ops::RangeBounds<OpaqueSimValueSize>> OpaqueSimValueSizeRangeBounds for T {
|
||||
fn start_bound(&self) -> Bound<OpaqueSimValueSize> {
|
||||
std::ops::RangeBounds::start_bound(self).cloned()
|
||||
}
|
||||
fn end_bound(&self) -> Bound<OpaqueSimValueSize> {
|
||||
std::ops::RangeBounds::end_bound(self).cloned()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize, Default)]
|
||||
#[non_exhaustive]
|
||||
pub struct OpaqueSimValueSize {
|
||||
pub bit_width: usize,
|
||||
pub sim_only_values_len: usize,
|
||||
}
|
||||
|
||||
impl OpaqueSimValueSize {
|
||||
pub const fn from_bit_width(bit_width: usize) -> Self {
|
||||
Self::from_bit_width_and_sim_only_values_len(bit_width, 0)
|
||||
}
|
||||
pub const fn from_bit_width_and_sim_only_values_len(
|
||||
bit_width: usize,
|
||||
sim_only_values_len: usize,
|
||||
) -> Self {
|
||||
Self {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
}
|
||||
}
|
||||
pub const fn only_bit_width(self) -> Option<usize> {
|
||||
if let Self {
|
||||
bit_width,
|
||||
sim_only_values_len: 0,
|
||||
} = self
|
||||
{
|
||||
Some(bit_width)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
pub const fn empty() -> Self {
|
||||
Self {
|
||||
bit_width: 0,
|
||||
sim_only_values_len: 0,
|
||||
}
|
||||
}
|
||||
pub const fn is_empty(self) -> bool {
|
||||
let Self {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
} = self;
|
||||
bit_width == 0 && sim_only_values_len == 0
|
||||
}
|
||||
pub const fn checked_mul(self, factor: usize) -> Option<Self> {
|
||||
let Some(bit_width) = self.bit_width.checked_mul(factor) else {
|
||||
return None;
|
||||
};
|
||||
let Some(sim_only_values_len) = self.sim_only_values_len.checked_mul(factor) else {
|
||||
return None;
|
||||
};
|
||||
Some(Self {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
})
|
||||
}
|
||||
pub const fn checked_add(self, rhs: Self) -> Option<Self> {
|
||||
let Some(bit_width) = self.bit_width.checked_add(rhs.bit_width) else {
|
||||
return None;
|
||||
};
|
||||
let Some(sim_only_values_len) = self
|
||||
.sim_only_values_len
|
||||
.checked_add(rhs.sim_only_values_len)
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
Some(Self {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
})
|
||||
}
|
||||
pub const fn checked_sub(self, rhs: Self) -> Option<Self> {
|
||||
let Some(bit_width) = self.bit_width.checked_sub(rhs.bit_width) else {
|
||||
return None;
|
||||
};
|
||||
let Some(sim_only_values_len) = self
|
||||
.sim_only_values_len
|
||||
.checked_sub(rhs.sim_only_values_len)
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
Some(Self {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
})
|
||||
}
|
||||
pub fn try_slice_range<R: OpaqueSimValueSizeRangeBounds>(
|
||||
self,
|
||||
range: R,
|
||||
) -> Option<OpaqueSimValueSizeRange> {
|
||||
let start = range.start_bound();
|
||||
let end = range.end_bound();
|
||||
let bit_width = try_slice_range(
|
||||
(start.map(|v| v.bit_width), end.map(|v| v.bit_width)),
|
||||
self.bit_width,
|
||||
)?;
|
||||
let sim_only_values_len = try_slice_range(
|
||||
(
|
||||
start.map(|v| v.sim_only_values_len),
|
||||
end.map(|v| v.sim_only_values_len),
|
||||
),
|
||||
self.sim_only_values_len,
|
||||
)?;
|
||||
Some(OpaqueSimValueSizeRange {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
})
|
||||
}
|
||||
pub fn slice_range<R: OpaqueSimValueSizeRangeBounds>(
|
||||
self,
|
||||
range: R,
|
||||
) -> OpaqueSimValueSizeRange {
|
||||
self.try_slice_range(range).expect("range out of bounds")
|
||||
}
|
||||
}
|
||||
|
||||
impl Mul<usize> for OpaqueSimValueSize {
|
||||
type Output = OpaqueSimValueSize;
|
||||
|
||||
fn mul(self, rhs: usize) -> Self::Output {
|
||||
self.checked_mul(rhs).expect("multiplication overflowed")
|
||||
}
|
||||
}
|
||||
|
||||
impl Mul<OpaqueSimValueSize> for usize {
|
||||
type Output = OpaqueSimValueSize;
|
||||
|
||||
fn mul(self, rhs: OpaqueSimValueSize) -> Self::Output {
|
||||
rhs.checked_mul(self).expect("multiplication overflowed")
|
||||
}
|
||||
}
|
||||
|
||||
impl Add for OpaqueSimValueSize {
|
||||
type Output = OpaqueSimValueSize;
|
||||
|
||||
fn add(self, rhs: OpaqueSimValueSize) -> Self::Output {
|
||||
rhs.checked_add(self).expect("addition overflowed")
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub for OpaqueSimValueSize {
|
||||
type Output = OpaqueSimValueSize;
|
||||
|
||||
fn sub(self, rhs: OpaqueSimValueSize) -> Self::Output {
|
||||
rhs.checked_sub(self).expect("subtraction underflowed")
|
||||
}
|
||||
}
|
||||
|
||||
impl MulAssign<usize> for OpaqueSimValueSize {
|
||||
fn mul_assign(&mut self, rhs: usize) {
|
||||
*self = *self * rhs;
|
||||
}
|
||||
}
|
||||
|
||||
impl AddAssign for OpaqueSimValueSize {
|
||||
fn add_assign(&mut self, rhs: OpaqueSimValueSize) {
|
||||
*self = *self + rhs;
|
||||
}
|
||||
}
|
||||
|
||||
impl SubAssign for OpaqueSimValueSize {
|
||||
fn sub_assign(&mut self, rhs: OpaqueSimValueSize) {
|
||||
*self = *self - rhs;
|
||||
}
|
||||
}
|
||||
|
||||
impl Sum for OpaqueSimValueSize {
|
||||
fn sum<I: Iterator<Item = Self>>(iter: I) -> Self {
|
||||
iter.fold(OpaqueSimValueSize::empty(), Add::add)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)]
|
||||
pub struct OpaqueSimValue {
|
||||
bits: UIntValue,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
sim_only_values: Vec<DynSimOnlyValue>,
|
||||
}
|
||||
|
||||
impl OpaqueSimValue {
|
||||
pub fn empty() -> Self {
|
||||
Self {
|
||||
bits: UIntValue::new(Default::default()),
|
||||
sim_only_values: Vec::new(),
|
||||
}
|
||||
}
|
||||
pub fn with_capacity(capacity: OpaqueSimValueSize) -> Self {
|
||||
Self {
|
||||
bits: UIntValue::new(Arc::new(BitVec::with_capacity(capacity.bit_width))),
|
||||
sim_only_values: Vec::with_capacity(capacity.sim_only_values_len),
|
||||
}
|
||||
}
|
||||
pub fn size(&self) -> OpaqueSimValueSize {
|
||||
OpaqueSimValueSize {
|
||||
bit_width: self.bits.width(),
|
||||
sim_only_values_len: self.sim_only_values.len(),
|
||||
}
|
||||
}
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.size().is_empty()
|
||||
}
|
||||
pub fn bit_width(&self) -> usize {
|
||||
self.bits.width()
|
||||
}
|
||||
pub fn bits(&self) -> &UIntValue {
|
||||
&self.bits
|
||||
}
|
||||
pub fn bits_mut(&mut self) -> &mut UIntValue {
|
||||
&mut self.bits
|
||||
}
|
||||
pub fn into_bits(self) -> UIntValue {
|
||||
self.bits
|
||||
}
|
||||
pub fn from_bits(bits: UIntValue) -> Self {
|
||||
Self {
|
||||
bits,
|
||||
sim_only_values: Vec::new(),
|
||||
}
|
||||
}
|
||||
pub fn from_bitslice(v: &BitSlice) -> Self {
|
||||
Self::from_bitslice_and_sim_only_values(v, Vec::new())
|
||||
}
|
||||
pub fn from_bitslice_and_sim_only_values(
|
||||
bits: &BitSlice,
|
||||
sim_only_values: Vec<DynSimOnlyValue>,
|
||||
) -> Self {
|
||||
Self {
|
||||
bits: UIntValue::new(Arc::new(bits.to_bitvec())),
|
||||
sim_only_values,
|
||||
}
|
||||
}
|
||||
pub fn from_bits_and_sim_only_values(
|
||||
bits: UIntValue,
|
||||
sim_only_values: Vec<DynSimOnlyValue>,
|
||||
) -> Self {
|
||||
Self {
|
||||
bits,
|
||||
sim_only_values,
|
||||
}
|
||||
}
|
||||
pub fn into_parts(self) -> (UIntValue, Vec<DynSimOnlyValue>) {
|
||||
let Self {
|
||||
bits,
|
||||
sim_only_values,
|
||||
} = self;
|
||||
(bits, sim_only_values)
|
||||
}
|
||||
pub fn parts_mut(&mut self) -> (&mut UIntValue, &mut Vec<DynSimOnlyValue>) {
|
||||
let Self {
|
||||
bits,
|
||||
sim_only_values,
|
||||
} = self;
|
||||
(bits, sim_only_values)
|
||||
}
|
||||
pub fn sim_only_values(&self) -> &[DynSimOnlyValue] {
|
||||
&self.sim_only_values
|
||||
}
|
||||
pub fn sim_only_values_mut(&mut self) -> &mut Vec<DynSimOnlyValue> {
|
||||
&mut self.sim_only_values
|
||||
}
|
||||
pub fn as_slice(&self) -> OpaqueSimValueSlice<'_> {
|
||||
OpaqueSimValueSlice {
|
||||
bits: self.bits.bits(),
|
||||
sim_only_values: &self.sim_only_values,
|
||||
}
|
||||
}
|
||||
pub fn slice<R: OpaqueSimValueSizeRangeBounds>(&self, range: R) -> OpaqueSimValueSlice<'_> {
|
||||
self.as_slice().slice(range)
|
||||
}
|
||||
pub fn rewrite_with<F>(&mut self, target_size: OpaqueSimValueSize, f: F)
|
||||
where
|
||||
F: for<'b> FnOnce(OpaqueSimValueWriter<'b>) -> OpaqueSimValueWritten<'b>, // 'b is used as a brand
|
||||
{
|
||||
OpaqueSimValueWriter::rewrite_with(target_size, self, f);
|
||||
}
|
||||
pub fn clone_from_slice(&mut self, slice: OpaqueSimValueSlice<'_>) {
|
||||
let OpaqueSimValueSlice {
|
||||
bits,
|
||||
sim_only_values,
|
||||
} = slice;
|
||||
self.bits.bits_mut().copy_from_bitslice(bits);
|
||||
self.sim_only_values.clone_from_slice(sim_only_values);
|
||||
}
|
||||
pub fn extend_from_slice(&mut self, slice: OpaqueSimValueSlice<'_>) {
|
||||
let OpaqueSimValueSlice {
|
||||
bits,
|
||||
sim_only_values,
|
||||
} = slice;
|
||||
self.bits.bitvec_mut().extend_from_bitslice(bits);
|
||||
self.sim_only_values.extend_from_slice(sim_only_values);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Extend<OpaqueSimValueSlice<'a>> for OpaqueSimValue {
|
||||
fn extend<T: IntoIterator<Item = OpaqueSimValueSlice<'a>>>(&mut self, iter: T) {
|
||||
let Self {
|
||||
bits,
|
||||
sim_only_values,
|
||||
} = self;
|
||||
let bits = bits.bitvec_mut();
|
||||
for slice in iter {
|
||||
bits.extend_from_bitslice(slice.bits);
|
||||
sim_only_values.extend_from_slice(slice.sim_only_values);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<OpaqueSimValue> for OpaqueSimValue {
|
||||
fn extend<T: IntoIterator<Item = OpaqueSimValue>>(&mut self, iter: T) {
|
||||
let Self {
|
||||
bits,
|
||||
sim_only_values,
|
||||
} = self;
|
||||
let bits = bits.bitvec_mut();
|
||||
for value in iter {
|
||||
bits.extend_from_bitslice(value.bits().bits());
|
||||
sim_only_values.extend_from_slice(value.sim_only_values());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Type<SimValue = OpaqueSimValue>> ToSimValueWithType<T> for OpaqueSimValue {
|
||||
fn to_sim_value_with_type(&self, ty: T) -> SimValue<T> {
|
||||
SimValue::from_value(ty, self.clone())
|
||||
}
|
||||
fn into_sim_value_with_type(self, ty: T) -> SimValue<T> {
|
||||
SimValue::from_value(ty, self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct OpaqueSimValueSlice<'a> {
|
||||
bits: &'a BitSlice,
|
||||
sim_only_values: &'a [DynSimOnlyValue],
|
||||
}
|
||||
|
||||
impl<'a> Default for OpaqueSimValueSlice<'a> {
|
||||
fn default() -> Self {
|
||||
Self::empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> OpaqueSimValueSlice<'a> {
|
||||
pub fn from_parts(bits: &'a BitSlice, sim_only_values: &'a [DynSimOnlyValue]) -> Self {
|
||||
Self {
|
||||
bits,
|
||||
sim_only_values,
|
||||
}
|
||||
}
|
||||
pub fn from_bitslice(bits: &'a BitSlice) -> Self {
|
||||
Self::from_parts(bits, &[])
|
||||
}
|
||||
pub fn empty() -> Self {
|
||||
Self {
|
||||
bits: BitSlice::empty(),
|
||||
sim_only_values: &[],
|
||||
}
|
||||
}
|
||||
pub fn size(self) -> OpaqueSimValueSize {
|
||||
OpaqueSimValueSize {
|
||||
bit_width: self.bit_width(),
|
||||
sim_only_values_len: self.sim_only_values_len(),
|
||||
}
|
||||
}
|
||||
pub fn is_empty(self) -> bool {
|
||||
self.size().is_empty()
|
||||
}
|
||||
pub fn bit_width(self) -> usize {
|
||||
self.bits.len()
|
||||
}
|
||||
pub fn bits(self) -> &'a BitSlice {
|
||||
self.bits
|
||||
}
|
||||
pub fn sim_only_values(self) -> &'a [DynSimOnlyValue] {
|
||||
self.sim_only_values
|
||||
}
|
||||
pub fn sim_only_values_len(self) -> usize {
|
||||
self.sim_only_values.len()
|
||||
}
|
||||
pub fn to_owned(self) -> OpaqueSimValue {
|
||||
OpaqueSimValue::from_bitslice_and_sim_only_values(self.bits, self.sim_only_values.to_vec())
|
||||
}
|
||||
pub fn slice<R: OpaqueSimValueSizeRangeBounds>(self, range: R) -> OpaqueSimValueSlice<'a> {
|
||||
let start = range.start_bound();
|
||||
let end = range.end_bound();
|
||||
let bits_range = slice_range(
|
||||
(start.map(|v| v.bit_width), end.map(|v| v.bit_width)),
|
||||
self.bit_width(),
|
||||
);
|
||||
let sim_only_values_range = slice_range(
|
||||
(start.map(|v| v.bit_width), end.map(|v| v.bit_width)),
|
||||
self.sim_only_values_len(),
|
||||
);
|
||||
Self {
|
||||
bits: &self.bits[bits_range],
|
||||
sim_only_values: &self.sim_only_values[sim_only_values_range],
|
||||
}
|
||||
}
|
||||
pub fn split_at(self, index: OpaqueSimValueSize) -> (Self, Self) {
|
||||
let bits = self.bits.split_at(index.bit_width);
|
||||
let sim_only_values = self.sim_only_values.split_at(index.sim_only_values_len);
|
||||
(
|
||||
Self {
|
||||
bits: bits.0,
|
||||
sim_only_values: sim_only_values.0,
|
||||
},
|
||||
Self {
|
||||
bits: bits.1,
|
||||
sim_only_values: sim_only_values.1,
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct OpaqueSimValueWriter<'a> {
|
||||
bits: &'a mut BitSlice,
|
||||
sim_only_values: &'a mut Vec<DynSimOnlyValue>,
|
||||
sim_only_values_range: std::ops::Range<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct OpaqueSimValueWritten<'a> {
|
||||
_phantom: PhantomData<&'a ()>,
|
||||
}
|
||||
|
||||
impl<'a> OpaqueSimValueWriter<'a> {
|
||||
pub fn sim_only_values_range(&self) -> std::ops::Range<usize> {
|
||||
self.sim_only_values_range.clone()
|
||||
}
|
||||
pub fn rewrite_with<F>(target_size: OpaqueSimValueSize, value: &mut OpaqueSimValue, f: F)
|
||||
where
|
||||
F: for<'b> FnOnce(OpaqueSimValueWriter<'b>) -> OpaqueSimValueWritten<'b>, // 'b is used as a brand
|
||||
{
|
||||
let OpaqueSimValueWritten {
|
||||
_phantom: PhantomData,
|
||||
} = f(OpaqueSimValueWriter::rewrite_helper(target_size, value));
|
||||
}
|
||||
pub(crate) fn rewrite_helper(
|
||||
target_size: OpaqueSimValueSize,
|
||||
value: &'a mut OpaqueSimValue,
|
||||
) -> Self {
|
||||
let (bits, sim_only_values) = value.parts_mut();
|
||||
let OpaqueSimValueSize {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
} = target_size;
|
||||
let bits = bits.bitvec_mut();
|
||||
bits.resize(bit_width, false);
|
||||
sim_only_values.truncate(sim_only_values_len);
|
||||
sim_only_values.reserve_exact(sim_only_values_len - sim_only_values.len());
|
||||
Self {
|
||||
bits,
|
||||
sim_only_values,
|
||||
sim_only_values_range: 0..sim_only_values_len,
|
||||
}
|
||||
}
|
||||
pub fn size(&self) -> OpaqueSimValueSize {
|
||||
OpaqueSimValueSize {
|
||||
bit_width: self.bit_width(),
|
||||
sim_only_values_len: self.sim_only_values_len(),
|
||||
}
|
||||
}
|
||||
pub fn bit_width(&self) -> usize {
|
||||
self.bits.len()
|
||||
}
|
||||
pub fn sim_only_values_len(&self) -> usize {
|
||||
self.sim_only_values_range.len()
|
||||
}
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.size().is_empty()
|
||||
}
|
||||
pub fn fill_cloned_from_slice(
|
||||
self,
|
||||
slice: OpaqueSimValueSlice<'_>,
|
||||
) -> OpaqueSimValueWritten<'a> {
|
||||
assert_eq!(self.size(), slice.size());
|
||||
let Self {
|
||||
bits,
|
||||
sim_only_values,
|
||||
sim_only_values_range,
|
||||
} = self;
|
||||
bits.copy_from_bitslice(slice.bits);
|
||||
let (clone_from_src, clone_src) = slice.sim_only_values.split_at(
|
||||
(sim_only_values.len() - sim_only_values_range.start).min(slice.sim_only_values.len()),
|
||||
);
|
||||
sim_only_values[sim_only_values_range.start..][..clone_from_src.len()]
|
||||
.clone_from_slice(clone_from_src);
|
||||
sim_only_values.extend_from_slice(clone_src);
|
||||
OpaqueSimValueWritten {
|
||||
_phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
pub fn fill_with_bits_with<F: FnOnce(&mut BitSlice)>(self, f: F) -> OpaqueSimValueWritten<'a> {
|
||||
assert!(self.size().only_bit_width().is_some());
|
||||
let Self {
|
||||
bits,
|
||||
sim_only_values,
|
||||
sim_only_values_range,
|
||||
} = self;
|
||||
f(bits);
|
||||
assert_eq!(sim_only_values.len(), sim_only_values_range.end);
|
||||
OpaqueSimValueWritten {
|
||||
_phantom: PhantomData,
|
||||
}
|
||||
}
|
||||
pub fn fill_with_zeros(self) -> OpaqueSimValueWritten<'a> {
|
||||
assert!(
|
||||
self.size().only_bit_width().is_some(),
|
||||
"can't fill things other than bits with zeros",
|
||||
);
|
||||
self.fill_with_bits_with(|bits| bits.fill(false))
|
||||
}
|
||||
pub fn fill_prefix_with<F>(&mut self, prefix_size: OpaqueSimValueSize, f: F)
|
||||
where
|
||||
F: for<'b> FnOnce(OpaqueSimValueWriter<'b>) -> OpaqueSimValueWritten<'b>, // 'b is used as a brand
|
||||
{
|
||||
let OpaqueSimValueSize {
|
||||
bit_width,
|
||||
sim_only_values_len,
|
||||
} = prefix_size;
|
||||
assert!(bit_width <= self.bit_width());
|
||||
assert!(sim_only_values_len <= self.sim_only_values_len());
|
||||
let next_start = self.sim_only_values_range.start + sim_only_values_len;
|
||||
let OpaqueSimValueWritten {
|
||||
_phantom: PhantomData,
|
||||
} = f(OpaqueSimValueWriter {
|
||||
bits: &mut self.bits[..bit_width],
|
||||
sim_only_values: self.sim_only_values,
|
||||
sim_only_values_range: self.sim_only_values_range.start..next_start,
|
||||
});
|
||||
assert!(self.sim_only_values.len() >= next_start);
|
||||
self.bits = &mut mem::take(&mut self.bits)[bit_width..];
|
||||
self.sim_only_values_range.start = next_start;
|
||||
}
|
||||
}
|
||||
|
||||
pub trait StaticType: Type + Default {
|
||||
pub trait StaticType: Type {
|
||||
const TYPE: Self;
|
||||
const MASK_TYPE: Self::MaskType;
|
||||
const TYPE_PROPERTIES: TypeProperties;
|
||||
|
|
|
|||
|
|
@ -1,135 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
array::Array,
|
||||
bundle::{Bundle, BundleType},
|
||||
clock::Clock,
|
||||
enum_::{Enum, EnumType},
|
||||
int::{Bool, SInt, UInt},
|
||||
intern::Interned,
|
||||
phantom_const::{PhantomConstCanonicalValue, PhantomConstValue},
|
||||
prelude::PhantomConst,
|
||||
reset::{AsyncReset, Reset, SyncReset},
|
||||
sim::value::DynSimOnly,
|
||||
ty::{BaseType, CanonicalType},
|
||||
};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
|
||||
pub(crate) struct SerdePhantomConst<T>(pub T);
|
||||
|
||||
impl<T: ?Sized + PhantomConstValue> Serialize for SerdePhantomConst<Interned<T>> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
self.0.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de, T: ?Sized + PhantomConstValue> Deserialize<'de> for SerdePhantomConst<Interned<T>> {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
T::deserialize_value(deserializer).map(Self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
#[serde(rename = "CanonicalType")]
|
||||
pub(crate) enum SerdeCanonicalType<
|
||||
ArrayElement = CanonicalType,
|
||||
ThePhantomConst = SerdePhantomConst<Interned<PhantomConstCanonicalValue>>,
|
||||
> {
|
||||
UInt {
|
||||
width: usize,
|
||||
},
|
||||
SInt {
|
||||
width: usize,
|
||||
},
|
||||
Bool,
|
||||
Array {
|
||||
element: ArrayElement,
|
||||
len: usize,
|
||||
},
|
||||
Enum {
|
||||
variants: Interned<[crate::enum_::EnumVariant]>,
|
||||
},
|
||||
Bundle {
|
||||
fields: Interned<[crate::bundle::BundleField]>,
|
||||
},
|
||||
AsyncReset,
|
||||
SyncReset,
|
||||
Reset,
|
||||
Clock,
|
||||
PhantomConst(ThePhantomConst),
|
||||
DynSimOnly(DynSimOnly),
|
||||
}
|
||||
|
||||
impl<ArrayElement, PhantomConstInner> SerdeCanonicalType<ArrayElement, PhantomConstInner> {
|
||||
pub(crate) fn as_serde_unexpected_str(&self) -> &'static str {
|
||||
match self {
|
||||
Self::UInt { .. } => "a UInt",
|
||||
Self::SInt { .. } => "a SInt",
|
||||
Self::Bool => "a Bool",
|
||||
Self::Array { .. } => "an Array",
|
||||
Self::Enum { .. } => "an Enum",
|
||||
Self::Bundle { .. } => "a Bundle",
|
||||
Self::AsyncReset => "an AsyncReset",
|
||||
Self::SyncReset => "a SyncReset",
|
||||
Self::Reset => "a Reset",
|
||||
Self::Clock => "a Clock",
|
||||
Self::PhantomConst(_) => "a PhantomConst",
|
||||
Self::DynSimOnly(_) => "a SimOnlyValue",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: BaseType> From<T> for SerdeCanonicalType {
|
||||
fn from(ty: T) -> Self {
|
||||
let ty: CanonicalType = ty.into();
|
||||
match ty {
|
||||
CanonicalType::UInt(ty) => Self::UInt { width: ty.width() },
|
||||
CanonicalType::SInt(ty) => Self::SInt { width: ty.width() },
|
||||
CanonicalType::Bool(Bool {}) => Self::Bool,
|
||||
CanonicalType::Array(ty) => Self::Array {
|
||||
element: ty.element(),
|
||||
len: ty.len(),
|
||||
},
|
||||
CanonicalType::Enum(ty) => Self::Enum {
|
||||
variants: ty.variants(),
|
||||
},
|
||||
CanonicalType::Bundle(ty) => Self::Bundle {
|
||||
fields: ty.fields(),
|
||||
},
|
||||
CanonicalType::AsyncReset(AsyncReset {}) => Self::AsyncReset,
|
||||
CanonicalType::SyncReset(SyncReset {}) => Self::SyncReset,
|
||||
CanonicalType::Reset(Reset {}) => Self::Reset,
|
||||
CanonicalType::Clock(Clock {}) => Self::Clock,
|
||||
CanonicalType::PhantomConst(ty) => Self::PhantomConst(SerdePhantomConst(ty.get())),
|
||||
CanonicalType::DynSimOnly(ty) => Self::DynSimOnly(ty),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SerdeCanonicalType> for CanonicalType {
|
||||
fn from(ty: SerdeCanonicalType) -> Self {
|
||||
match ty {
|
||||
SerdeCanonicalType::UInt { width } => Self::UInt(UInt::new(width)),
|
||||
SerdeCanonicalType::SInt { width } => Self::SInt(SInt::new(width)),
|
||||
SerdeCanonicalType::Bool => Self::Bool(Bool),
|
||||
SerdeCanonicalType::Array { element, len } => Self::Array(Array::new(element, len)),
|
||||
SerdeCanonicalType::Enum { variants } => Self::Enum(Enum::new(variants)),
|
||||
SerdeCanonicalType::Bundle { fields } => Self::Bundle(Bundle::new(fields)),
|
||||
SerdeCanonicalType::AsyncReset => Self::AsyncReset(AsyncReset),
|
||||
SerdeCanonicalType::SyncReset => Self::SyncReset(SyncReset),
|
||||
SerdeCanonicalType::Reset => Self::Reset(Reset),
|
||||
SerdeCanonicalType::Clock => Self::Clock(Clock),
|
||||
SerdeCanonicalType::PhantomConst(value) => {
|
||||
Self::PhantomConst(PhantomConst::new(value.0))
|
||||
}
|
||||
SerdeCanonicalType::DynSimOnly(value) => Self::DynSimOnly(value),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -1,23 +1,12 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
pub(crate) mod alternating_cell;
|
||||
mod const_bool;
|
||||
mod const_cmp;
|
||||
mod const_usize;
|
||||
mod misc;
|
||||
mod scoped_ref;
|
||||
pub(crate) mod streaming_read_utf8;
|
||||
mod test_hasher;
|
||||
|
||||
// allow easily switching the hasher crate-wide for testing
|
||||
#[cfg(feature = "unstable-test-hasher")]
|
||||
pub type DefaultBuildHasher = test_hasher::DefaultBuildHasher;
|
||||
#[cfg(not(feature = "unstable-test-hasher"))]
|
||||
pub(crate) type DefaultBuildHasher = hashbrown::DefaultHashBuilder;
|
||||
|
||||
pub(crate) type HashMap<K, V> = hashbrown::HashMap<K, V, DefaultBuildHasher>;
|
||||
pub(crate) type HashSet<T> = hashbrown::HashSet<T, DefaultBuildHasher>;
|
||||
|
||||
#[doc(inline)]
|
||||
pub use const_bool::{ConstBool, ConstBoolDispatch, ConstBoolDispatchTag, GenericConstBool};
|
||||
|
|
@ -35,14 +24,9 @@ pub use scoped_ref::ScopedRef;
|
|||
|
||||
#[doc(inline)]
|
||||
pub use misc::{
|
||||
BitSliceWriteWithBase, DebugAsDisplay, DebugAsRawString, MakeMutSlice, RcWriter,
|
||||
SerdeJsonEscapeIf, SerdeJsonEscapeIfFormatter, SerdeJsonEscapeIfTest,
|
||||
SerdeJsonEscapeIfTestResult, interned_bit, iter_eq_by, os_str_strip_prefix,
|
||||
os_str_strip_suffix, serialize_to_json_ascii, serialize_to_json_ascii_pretty,
|
||||
serialize_to_json_ascii_pretty_with_indent, slice_range, try_slice_range,
|
||||
get_many_mut, interned_bit, iter_eq_by, BitSliceWriteWithBase, DebugAsDisplay,
|
||||
DebugAsRawString, MakeMutSlice, RcWriter,
|
||||
};
|
||||
pub(crate) use misc::{InternedStrCompareAsStr, chain};
|
||||
|
||||
pub mod job_server;
|
||||
pub mod prefix_sum;
|
||||
pub mod ready_valid;
|
||||
|
|
|
|||
|
|
@ -1,122 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::util::DebugAsDisplay;
|
||||
use std::{
|
||||
cell::{Cell, UnsafeCell},
|
||||
fmt,
|
||||
};
|
||||
|
||||
pub(crate) trait AlternatingCellMethods {
|
||||
fn unique_to_shared(&mut self);
|
||||
fn shared_to_unique(&mut self);
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum State {
|
||||
Unique,
|
||||
Shared,
|
||||
Locked,
|
||||
}
|
||||
|
||||
pub(crate) struct AlternatingCell<T: ?Sized> {
|
||||
state: Cell<State>,
|
||||
value: UnsafeCell<T>,
|
||||
}
|
||||
|
||||
impl<T: ?Sized + fmt::Debug + AlternatingCellMethods> fmt::Debug for AlternatingCell<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_tuple("AlternatingCell")
|
||||
.field(
|
||||
self.try_share()
|
||||
.as_ref()
|
||||
.map(|v| -> &dyn fmt::Debug { v })
|
||||
.unwrap_or(&DebugAsDisplay("<...>")),
|
||||
)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized> AlternatingCell<T> {
|
||||
pub(crate) const fn new_shared(value: T) -> Self
|
||||
where
|
||||
T: Sized,
|
||||
{
|
||||
Self {
|
||||
state: Cell::new(State::Shared),
|
||||
value: UnsafeCell::new(value),
|
||||
}
|
||||
}
|
||||
pub(crate) const fn new_unique(value: T) -> Self
|
||||
where
|
||||
T: Sized,
|
||||
{
|
||||
Self {
|
||||
state: Cell::new(State::Unique),
|
||||
value: UnsafeCell::new(value),
|
||||
}
|
||||
}
|
||||
pub(crate) fn is_unique(&self) -> bool {
|
||||
matches!(self.state.get(), State::Unique)
|
||||
}
|
||||
pub(crate) fn is_shared(&self) -> bool {
|
||||
matches!(self.state.get(), State::Shared)
|
||||
}
|
||||
pub(crate) fn into_inner(self) -> T
|
||||
where
|
||||
T: Sized,
|
||||
{
|
||||
self.value.into_inner()
|
||||
}
|
||||
pub(crate) fn try_share(&self) -> Option<&T>
|
||||
where
|
||||
T: AlternatingCellMethods,
|
||||
{
|
||||
match self.state.get() {
|
||||
State::Shared => {}
|
||||
State::Unique => {
|
||||
struct Locked<'a>(&'a Cell<State>);
|
||||
impl Drop for Locked<'_> {
|
||||
fn drop(&mut self) {
|
||||
self.0.set(State::Shared);
|
||||
}
|
||||
}
|
||||
self.state.set(State::Locked);
|
||||
let lock = Locked(&self.state);
|
||||
// Safety: state is Locked, so nothing else will
|
||||
// access value while calling unique_to_shared.
|
||||
unsafe { &mut *self.value.get() }.unique_to_shared();
|
||||
drop(lock);
|
||||
}
|
||||
State::Locked => return None,
|
||||
}
|
||||
|
||||
// Safety: state is Shared so nothing will create any mutable
|
||||
// references until the returned reference's lifetime expires.
|
||||
Some(unsafe { &*self.value.get() })
|
||||
}
|
||||
#[track_caller]
|
||||
pub(crate) fn share(&self) -> &T
|
||||
where
|
||||
T: AlternatingCellMethods,
|
||||
{
|
||||
let Some(retval) = self.try_share() else {
|
||||
panic!("`share` called recursively");
|
||||
};
|
||||
retval
|
||||
}
|
||||
pub(crate) fn unique(&mut self) -> &mut T
|
||||
where
|
||||
T: AlternatingCellMethods,
|
||||
{
|
||||
match self.state.get() {
|
||||
State::Shared => {
|
||||
self.state.set(State::Unique);
|
||||
self.value.get_mut().shared_to_unique();
|
||||
}
|
||||
State::Unique => {}
|
||||
State::Locked => unreachable!(),
|
||||
}
|
||||
self.value.get_mut()
|
||||
}
|
||||
}
|
||||
|
|
@ -1,9 +1,5 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use serde::{
|
||||
Deserialize, Deserializer, Serialize, Serializer,
|
||||
de::{DeserializeOwned, Error, Unexpected},
|
||||
};
|
||||
use std::{fmt::Debug, hash::Hash, mem::ManuallyDrop, ptr};
|
||||
|
||||
mod sealed {
|
||||
|
|
@ -13,17 +9,7 @@ mod sealed {
|
|||
/// # Safety
|
||||
/// the only implementation is `ConstBool<Self::VALUE>`
|
||||
pub unsafe trait GenericConstBool:
|
||||
sealed::Sealed
|
||||
+ Copy
|
||||
+ Ord
|
||||
+ Hash
|
||||
+ Default
|
||||
+ Debug
|
||||
+ 'static
|
||||
+ Send
|
||||
+ Sync
|
||||
+ Serialize
|
||||
+ DeserializeOwned
|
||||
sealed::Sealed + Copy + Ord + Hash + Default + Debug + 'static + Send + Sync
|
||||
{
|
||||
const VALUE: bool;
|
||||
}
|
||||
|
|
@ -44,32 +30,6 @@ unsafe impl<const VALUE: bool> GenericConstBool for ConstBool<VALUE> {
|
|||
const VALUE: bool = VALUE;
|
||||
}
|
||||
|
||||
impl<const VALUE: bool> Serialize for ConstBool<VALUE> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
VALUE.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de, const VALUE: bool> Deserialize<'de> for ConstBool<VALUE> {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let value = bool::deserialize(deserializer)?;
|
||||
if value == VALUE {
|
||||
Ok(ConstBool)
|
||||
} else {
|
||||
Err(D::Error::invalid_value(
|
||||
Unexpected::Bool(value),
|
||||
&if VALUE { "true" } else { "false" },
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ConstBoolDispatchTag {
|
||||
type Type<Select: GenericConstBool>;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,9 +1,5 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use serde::{
|
||||
Deserialize, Deserializer, Serialize, Serializer,
|
||||
de::{DeserializeOwned, Error, Unexpected},
|
||||
};
|
||||
use std::{fmt::Debug, hash::Hash};
|
||||
|
||||
mod sealed {
|
||||
|
|
@ -12,17 +8,7 @@ mod sealed {
|
|||
|
||||
/// the only implementation is `ConstUsize<Self::VALUE>`
|
||||
pub trait GenericConstUsize:
|
||||
sealed::Sealed
|
||||
+ Copy
|
||||
+ Ord
|
||||
+ Hash
|
||||
+ Default
|
||||
+ Debug
|
||||
+ 'static
|
||||
+ Send
|
||||
+ Sync
|
||||
+ Serialize
|
||||
+ DeserializeOwned
|
||||
sealed::Sealed + Copy + Ord + Hash + Default + Debug + 'static + Send + Sync
|
||||
{
|
||||
const VALUE: usize;
|
||||
}
|
||||
|
|
@ -41,29 +27,3 @@ impl<const VALUE: usize> sealed::Sealed for ConstUsize<VALUE> {}
|
|||
impl<const VALUE: usize> GenericConstUsize for ConstUsize<VALUE> {
|
||||
const VALUE: usize = VALUE;
|
||||
}
|
||||
|
||||
impl<const VALUE: usize> Serialize for ConstUsize<VALUE> {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
VALUE.serialize(serializer)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de, const VALUE: usize> Deserialize<'de> for ConstUsize<VALUE> {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let value = usize::deserialize(deserializer)?;
|
||||
if value == VALUE {
|
||||
Ok(ConstUsize)
|
||||
} else {
|
||||
Err(D::Error::invalid_value(
|
||||
Unexpected::Unsigned(value as u64),
|
||||
&&*VALUE.to_string(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,156 +1,192 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use ctor::{ctor, dtor};
|
||||
use jobslot::Client;
|
||||
use ctor::ctor;
|
||||
use jobslot::{Acquired, Client};
|
||||
use std::{
|
||||
ffi::OsString,
|
||||
io, mem,
|
||||
mem,
|
||||
num::NonZeroUsize,
|
||||
sync::{Mutex, MutexGuard},
|
||||
sync::{Condvar, Mutex, Once, OnceLock},
|
||||
thread::spawn,
|
||||
};
|
||||
|
||||
#[ctor]
|
||||
static CLIENT: Mutex<Option<Option<Client>>> = unsafe { Mutex::new(Some(Client::from_env())) };
|
||||
|
||||
#[dtor]
|
||||
fn drop_client() {
|
||||
drop(
|
||||
match CLIENT.lock() {
|
||||
Ok(v) => v,
|
||||
Err(e) => e.into_inner(),
|
||||
fn get_or_make_client() -> &'static Client {
|
||||
#[ctor]
|
||||
static CLIENT: OnceLock<Client> = unsafe {
|
||||
match Client::from_env() {
|
||||
Some(client) => OnceLock::from(client),
|
||||
None => OnceLock::new(),
|
||||
}
|
||||
.take(),
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
fn get_or_make_client() -> Client {
|
||||
CLIENT
|
||||
.lock()
|
||||
.expect("shouldn't have panicked")
|
||||
.as_mut()
|
||||
.expect("shutting down")
|
||||
.get_or_insert_with(|| {
|
||||
let mut available_parallelism = None;
|
||||
let mut args = std::env::args_os().skip(1);
|
||||
while let Some(arg) = args.next() {
|
||||
const TEST_THREADS_OPTION: &'static [u8] = b"--test-threads";
|
||||
if arg.as_encoded_bytes().starts_with(TEST_THREADS_OPTION) {
|
||||
match arg.as_encoded_bytes().get(TEST_THREADS_OPTION.len()) {
|
||||
Some(b'=') => {
|
||||
let mut arg = arg.into_encoded_bytes();
|
||||
arg.drain(..=TEST_THREADS_OPTION.len());
|
||||
available_parallelism = Some(arg);
|
||||
break;
|
||||
}
|
||||
None => {
|
||||
available_parallelism = args.next().map(OsString::into_encoded_bytes);
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
CLIENT.get_or_init(|| {
|
||||
let mut available_parallelism = None;
|
||||
let mut args = std::env::args_os().skip(1);
|
||||
while let Some(arg) = args.next() {
|
||||
const TEST_THREADS_OPTION: &'static [u8] = b"--test-threads";
|
||||
if arg.as_encoded_bytes().starts_with(TEST_THREADS_OPTION) {
|
||||
match arg.as_encoded_bytes().get(TEST_THREADS_OPTION.len()) {
|
||||
Some(b'=') => {
|
||||
let mut arg = arg.into_encoded_bytes();
|
||||
arg.drain(..=TEST_THREADS_OPTION.len());
|
||||
available_parallelism = Some(arg);
|
||||
break;
|
||||
}
|
||||
None => {
|
||||
available_parallelism = args.next().map(OsString::into_encoded_bytes);
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
let available_parallelism = if let Some(available_parallelism) = available_parallelism
|
||||
.as_deref()
|
||||
.and_then(|v| std::str::from_utf8(v).ok())
|
||||
.and_then(|v| v.parse().ok())
|
||||
{
|
||||
available_parallelism
|
||||
} else if let Ok(available_parallelism) = std::thread::available_parallelism() {
|
||||
available_parallelism
|
||||
} else {
|
||||
NonZeroUsize::new(1).unwrap()
|
||||
};
|
||||
Client::new_with_fifo(available_parallelism.get() - 1)
|
||||
.expect("failed to create job server")
|
||||
})
|
||||
.clone()
|
||||
}
|
||||
let available_parallelism = if let Some(available_parallelism) = available_parallelism
|
||||
.as_deref()
|
||||
.and_then(|v| std::str::from_utf8(v).ok())
|
||||
.and_then(|v| v.parse().ok())
|
||||
{
|
||||
available_parallelism
|
||||
} else if let Ok(available_parallelism) = std::thread::available_parallelism() {
|
||||
available_parallelism
|
||||
} else {
|
||||
NonZeroUsize::new(1).unwrap()
|
||||
};
|
||||
Client::new_with_fifo(available_parallelism.get() - 1).expect("failed to create job server")
|
||||
})
|
||||
}
|
||||
|
||||
struct State {
|
||||
obtained_count: usize,
|
||||
waiting_count: usize,
|
||||
available: Vec<Acquired>,
|
||||
implicit_available: bool,
|
||||
}
|
||||
|
||||
impl State {
|
||||
fn total_available(&self) -> usize {
|
||||
self.available.len() + self.implicit_available as usize
|
||||
}
|
||||
fn additional_waiting(&self) -> usize {
|
||||
self.waiting_count.saturating_sub(self.total_available())
|
||||
}
|
||||
}
|
||||
|
||||
static STATE: Mutex<State> = Mutex::new(State {
|
||||
obtained_count: 0,
|
||||
waiting_count: 0,
|
||||
available: Vec::new(),
|
||||
implicit_available: true,
|
||||
});
|
||||
static COND_VAR: Condvar = Condvar::new();
|
||||
|
||||
#[derive(Debug)]
|
||||
enum AcquiredJobInner {
|
||||
FromJobServer(Acquired),
|
||||
ImplicitJob,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct AcquiredJob {
|
||||
client: Client,
|
||||
job: AcquiredJobInner,
|
||||
}
|
||||
|
||||
impl AcquiredJob {
|
||||
pub fn acquire() -> io::Result<Self> {
|
||||
let client = get_or_make_client();
|
||||
struct Waiting {}
|
||||
|
||||
impl Waiting {
|
||||
fn done(self) -> MutexGuard<'static, State> {
|
||||
mem::forget(self);
|
||||
fn start_acquire_thread() {
|
||||
static STARTED_THREAD: Once = Once::new();
|
||||
STARTED_THREAD.call_once(|| {
|
||||
spawn(|| {
|
||||
let mut acquired = None;
|
||||
let client = get_or_make_client();
|
||||
let mut state = STATE.lock().unwrap();
|
||||
state.waiting_count -= 1;
|
||||
state
|
||||
}
|
||||
}
|
||||
impl Drop for Waiting {
|
||||
fn drop(&mut self) {
|
||||
STATE.lock().unwrap().waiting_count -= 1;
|
||||
}
|
||||
}
|
||||
loop {
|
||||
state = if state.additional_waiting() == 0 {
|
||||
if acquired.is_some() {
|
||||
drop(state);
|
||||
drop(acquired.take()); // drop Acquired outside of lock
|
||||
STATE.lock().unwrap()
|
||||
} else {
|
||||
COND_VAR.wait(state).unwrap()
|
||||
}
|
||||
} else if acquired.is_some() {
|
||||
// allocate space before moving Acquired to ensure we
|
||||
// drop Acquired outside of the lock on panic
|
||||
state.available.reserve(1);
|
||||
state.available.push(acquired.take().unwrap());
|
||||
COND_VAR.notify_all();
|
||||
state
|
||||
} else {
|
||||
drop(state);
|
||||
acquired = Some(
|
||||
client
|
||||
.acquire()
|
||||
.expect("can't acquire token from job server"),
|
||||
);
|
||||
STATE.lock().unwrap()
|
||||
};
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
fn acquire_inner(block: bool) -> Option<Self> {
|
||||
Self::start_acquire_thread();
|
||||
let mut state = STATE.lock().unwrap();
|
||||
if state.obtained_count == 0 && state.waiting_count == 0 {
|
||||
state.obtained_count = 1; // get implicit token
|
||||
return Ok(Self { client });
|
||||
loop {
|
||||
if let Some(acquired) = state.available.pop() {
|
||||
return Some(Self {
|
||||
job: AcquiredJobInner::FromJobServer(acquired),
|
||||
});
|
||||
}
|
||||
if state.implicit_available {
|
||||
state.implicit_available = false;
|
||||
return Some(Self {
|
||||
job: AcquiredJobInner::ImplicitJob,
|
||||
});
|
||||
}
|
||||
if !block {
|
||||
return None;
|
||||
}
|
||||
state.waiting_count += 1;
|
||||
state = COND_VAR.wait(state).unwrap();
|
||||
state.waiting_count -= 1;
|
||||
}
|
||||
state.waiting_count += 1;
|
||||
drop(state);
|
||||
let waiting = Waiting {};
|
||||
client.acquire_raw()?;
|
||||
state = waiting.done();
|
||||
state.obtained_count = state
|
||||
.obtained_count
|
||||
.checked_add(1)
|
||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "obtained_count overflowed"))?;
|
||||
drop(state);
|
||||
Ok(Self { client })
|
||||
}
|
||||
pub fn try_acquire() -> Option<Self> {
|
||||
Self::acquire_inner(false)
|
||||
}
|
||||
pub fn acquire() -> Self {
|
||||
Self::acquire_inner(true).expect("failed to acquire token")
|
||||
}
|
||||
pub fn run_command<R>(
|
||||
&mut self,
|
||||
cmd: std::process::Command,
|
||||
f: impl FnOnce(&mut std::process::Command) -> std::io::Result<R>,
|
||||
) -> std::io::Result<R> {
|
||||
self.client.configure_make_and_run_with_fifo(cmd, f)
|
||||
get_or_make_client().configure_make_and_run_with_fifo(cmd, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for AcquiredJob {
|
||||
fn drop(&mut self) {
|
||||
let mut state = STATE.lock().unwrap();
|
||||
match &mut *state {
|
||||
State {
|
||||
obtained_count: 0, ..
|
||||
} => unreachable!(),
|
||||
State {
|
||||
obtained_count: obtained_count @ 1,
|
||||
waiting_count,
|
||||
} => {
|
||||
*obtained_count = 0; // drop implicit token
|
||||
let any_waiting = *waiting_count != 0;
|
||||
drop(state);
|
||||
if any_waiting {
|
||||
// we have the implicit token, but some other thread is trying to acquire a token,
|
||||
// release the implicit token so they can acquire it.
|
||||
let _ = self.client.release_raw(); // we're in drop, just ignore errors since we at least tried
|
||||
match &self.job {
|
||||
AcquiredJobInner::FromJobServer(_) => {
|
||||
if state.waiting_count > state.available.len() + state.implicit_available as usize {
|
||||
// allocate space before moving Acquired to ensure we
|
||||
// drop Acquired outside of the lock on panic
|
||||
state.available.reserve(1);
|
||||
let AcquiredJobInner::FromJobServer(acquired) =
|
||||
mem::replace(&mut self.job, AcquiredJobInner::ImplicitJob)
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
state.available.push(acquired);
|
||||
COND_VAR.notify_all();
|
||||
}
|
||||
}
|
||||
State { obtained_count, .. } => {
|
||||
*obtained_count = obtained_count.saturating_sub(1);
|
||||
drop(state);
|
||||
let _ = self.client.release_raw(); // we're in drop, just ignore errors since we at least tried
|
||||
AcquiredJobInner::ImplicitJob => {
|
||||
state.implicit_available = true;
|
||||
if state.waiting_count > state.available.len() {
|
||||
COND_VAR.notify_all();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,10 +4,7 @@ use crate::intern::{Intern, Interned};
|
|||
use bitvec::{bits, order::Lsb0, slice::BitSlice, view::BitView};
|
||||
use std::{
|
||||
cell::Cell,
|
||||
ffi::OsStr,
|
||||
fmt::{self, Debug, Write},
|
||||
io,
|
||||
ops::{Bound, Range, RangeBounds},
|
||||
rc::Rc,
|
||||
sync::{Arc, OnceLock},
|
||||
};
|
||||
|
|
@ -166,6 +163,22 @@ impl fmt::UpperHex for BitSliceWriteWithBase<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[track_caller]
|
||||
pub fn get_many_mut<T, const N: usize>(slice: &mut [T], indexes: [usize; N]) -> [&mut T; N] {
|
||||
for i in 0..N {
|
||||
for j in 0..i {
|
||||
assert!(indexes[i] != indexes[j], "duplicate index");
|
||||
}
|
||||
assert!(indexes[i] < slice.len(), "index out of bounds");
|
||||
}
|
||||
// Safety: checked that no indexes are duplicates and no indexes are out of bounds
|
||||
unsafe {
|
||||
let base = slice.as_mut_ptr(); // convert to a raw pointer before loop to avoid aliasing with &mut [T]
|
||||
std::array::from_fn(|i| &mut *base.add(indexes[i]))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct RcWriter(Rc<Cell<Vec<u8>>>);
|
||||
|
||||
|
|
@ -212,403 +225,3 @@ impl std::io::Write for RcWriter {
|
|||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! chain {
|
||||
() => {
|
||||
std::iter::empty()
|
||||
};
|
||||
($first:expr $(, $rest:expr)* $(,)?) => {
|
||||
{
|
||||
let retval = IntoIterator::into_iter($first);
|
||||
$(let retval = Iterator::chain(retval, $rest);)*
|
||||
retval
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub(crate) use chain;
|
||||
|
||||
pub fn try_slice_range<R: RangeBounds<usize>>(range: R, size: usize) -> Option<Range<usize>> {
|
||||
let start = match range.start_bound() {
|
||||
Bound::Included(start) => *start,
|
||||
Bound::Excluded(start) => start.checked_add(1)?,
|
||||
Bound::Unbounded => 0,
|
||||
};
|
||||
let end = match range.end_bound() {
|
||||
Bound::Included(end) => end.checked_add(1)?,
|
||||
Bound::Excluded(end) => *end,
|
||||
Bound::Unbounded => size,
|
||||
};
|
||||
(start <= end && end <= size).then_some(start..end)
|
||||
}
|
||||
|
||||
pub fn slice_range<R: RangeBounds<usize>>(range: R, size: usize) -> Range<usize> {
|
||||
try_slice_range(range, size).expect("range out of bounds")
|
||||
}
|
||||
|
||||
pub trait SerdeJsonEscapeIfTest {
|
||||
fn char_needs_escape(&mut self, ch: char) -> serde_json::Result<bool>;
|
||||
}
|
||||
|
||||
pub trait SerdeJsonEscapeIfTestResult {
|
||||
fn to_result(self) -> serde_json::Result<bool>;
|
||||
}
|
||||
|
||||
impl SerdeJsonEscapeIfTestResult for bool {
|
||||
fn to_result(self) -> serde_json::Result<bool> {
|
||||
Ok(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: Into<serde_json::Error>> SerdeJsonEscapeIfTestResult for Result<bool, E> {
|
||||
fn to_result(self) -> serde_json::Result<bool> {
|
||||
self.map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + FnMut(char) -> R, R: SerdeJsonEscapeIfTestResult> SerdeJsonEscapeIfTest for T {
|
||||
fn char_needs_escape(&mut self, ch: char) -> serde_json::Result<bool> {
|
||||
self(ch).to_result()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait SerdeJsonEscapeIfFormatter: serde_json::ser::Formatter {
|
||||
fn write_unicode_escape<W>(&mut self, writer: &mut W, ch: char) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
for utf16 in ch.encode_utf16(&mut [0; 2]) {
|
||||
write!(writer, "\\u{utf16:04x}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl SerdeJsonEscapeIfFormatter for serde_json::ser::CompactFormatter {}
|
||||
impl SerdeJsonEscapeIfFormatter for serde_json::ser::PrettyFormatter<'_> {}
|
||||
|
||||
pub struct SerdeJsonEscapeIf<Test, Base = serde_json::ser::CompactFormatter> {
|
||||
pub base: Base,
|
||||
pub test: Test,
|
||||
}
|
||||
|
||||
impl<Test: SerdeJsonEscapeIfTest, Base: SerdeJsonEscapeIfFormatter> serde_json::ser::Formatter
|
||||
for SerdeJsonEscapeIf<Test, Base>
|
||||
{
|
||||
fn write_null<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_null(writer)
|
||||
}
|
||||
|
||||
fn write_bool<W>(&mut self, writer: &mut W, value: bool) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_bool(writer, value)
|
||||
}
|
||||
|
||||
fn write_i8<W>(&mut self, writer: &mut W, value: i8) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_i8(writer, value)
|
||||
}
|
||||
|
||||
fn write_i16<W>(&mut self, writer: &mut W, value: i16) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_i16(writer, value)
|
||||
}
|
||||
|
||||
fn write_i32<W>(&mut self, writer: &mut W, value: i32) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_i32(writer, value)
|
||||
}
|
||||
|
||||
fn write_i64<W>(&mut self, writer: &mut W, value: i64) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_i64(writer, value)
|
||||
}
|
||||
|
||||
fn write_i128<W>(&mut self, writer: &mut W, value: i128) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_i128(writer, value)
|
||||
}
|
||||
|
||||
fn write_u8<W>(&mut self, writer: &mut W, value: u8) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_u8(writer, value)
|
||||
}
|
||||
|
||||
fn write_u16<W>(&mut self, writer: &mut W, value: u16) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_u16(writer, value)
|
||||
}
|
||||
|
||||
fn write_u32<W>(&mut self, writer: &mut W, value: u32) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_u32(writer, value)
|
||||
}
|
||||
|
||||
fn write_u64<W>(&mut self, writer: &mut W, value: u64) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_u64(writer, value)
|
||||
}
|
||||
|
||||
fn write_u128<W>(&mut self, writer: &mut W, value: u128) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_u128(writer, value)
|
||||
}
|
||||
|
||||
fn write_f32<W>(&mut self, writer: &mut W, value: f32) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_f32(writer, value)
|
||||
}
|
||||
|
||||
fn write_f64<W>(&mut self, writer: &mut W, value: f64) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_f64(writer, value)
|
||||
}
|
||||
|
||||
fn write_number_str<W>(&mut self, writer: &mut W, value: &str) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_number_str(writer, value)
|
||||
}
|
||||
|
||||
fn begin_string<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.begin_string(writer)
|
||||
}
|
||||
|
||||
fn end_string<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.end_string(writer)
|
||||
}
|
||||
|
||||
fn write_string_fragment<W>(&mut self, writer: &mut W, mut fragment: &str) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
while let Some((next_escape_index, next_escape_char)) = fragment
|
||||
.char_indices()
|
||||
.find_map(|(index, ch)| match self.test.char_needs_escape(ch) {
|
||||
Ok(false) => None,
|
||||
Ok(true) => Some(Ok((index, ch))),
|
||||
Err(e) => Some(Err(e)),
|
||||
})
|
||||
.transpose()?
|
||||
{
|
||||
let (no_escapes, rest) = fragment.split_at(next_escape_index);
|
||||
fragment = &rest[next_escape_char.len_utf8()..];
|
||||
self.base.write_string_fragment(writer, no_escapes)?;
|
||||
self.base.write_unicode_escape(writer, next_escape_char)?;
|
||||
}
|
||||
self.base.write_string_fragment(writer, fragment)
|
||||
}
|
||||
|
||||
fn write_char_escape<W>(
|
||||
&mut self,
|
||||
writer: &mut W,
|
||||
char_escape: serde_json::ser::CharEscape,
|
||||
) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_char_escape(writer, char_escape)
|
||||
}
|
||||
|
||||
fn write_byte_array<W>(&mut self, writer: &mut W, value: &[u8]) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_byte_array(writer, value)
|
||||
}
|
||||
|
||||
fn begin_array<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.begin_array(writer)
|
||||
}
|
||||
|
||||
fn end_array<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.end_array(writer)
|
||||
}
|
||||
|
||||
fn begin_array_value<W>(&mut self, writer: &mut W, first: bool) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.begin_array_value(writer, first)
|
||||
}
|
||||
|
||||
fn end_array_value<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.end_array_value(writer)
|
||||
}
|
||||
|
||||
fn begin_object<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.begin_object(writer)
|
||||
}
|
||||
|
||||
fn end_object<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.end_object(writer)
|
||||
}
|
||||
|
||||
fn begin_object_key<W>(&mut self, writer: &mut W, first: bool) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.begin_object_key(writer, first)
|
||||
}
|
||||
|
||||
fn end_object_key<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.end_object_key(writer)
|
||||
}
|
||||
|
||||
fn begin_object_value<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.begin_object_value(writer)
|
||||
}
|
||||
|
||||
fn end_object_value<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.end_object_value(writer)
|
||||
}
|
||||
|
||||
fn write_raw_fragment<W>(&mut self, writer: &mut W, fragment: &str) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_raw_fragment(writer, fragment)
|
||||
}
|
||||
}
|
||||
|
||||
fn serialize_to_json_ascii_helper<F: SerdeJsonEscapeIfFormatter, S: serde::Serialize + ?Sized>(
|
||||
v: &S,
|
||||
base: F,
|
||||
) -> serde_json::Result<String> {
|
||||
let mut retval = Vec::new();
|
||||
v.serialize(&mut serde_json::ser::Serializer::with_formatter(
|
||||
&mut retval,
|
||||
SerdeJsonEscapeIf {
|
||||
base,
|
||||
test: |ch| ch < '\x20' || ch > '\x7F',
|
||||
},
|
||||
))?;
|
||||
String::from_utf8(retval).map_err(|_| serde::ser::Error::custom("invalid UTF-8"))
|
||||
}
|
||||
|
||||
pub fn serialize_to_json_ascii<T: serde::Serialize + ?Sized>(v: &T) -> serde_json::Result<String> {
|
||||
serialize_to_json_ascii_helper(v, serde_json::ser::CompactFormatter)
|
||||
}
|
||||
|
||||
pub fn serialize_to_json_ascii_pretty<T: serde::Serialize + ?Sized>(
|
||||
v: &T,
|
||||
) -> serde_json::Result<String> {
|
||||
serialize_to_json_ascii_helper(v, serde_json::ser::PrettyFormatter::new())
|
||||
}
|
||||
|
||||
pub fn serialize_to_json_ascii_pretty_with_indent<T: serde::Serialize + ?Sized>(
|
||||
v: &T,
|
||||
indent: &str,
|
||||
) -> serde_json::Result<String> {
|
||||
serialize_to_json_ascii_helper(
|
||||
v,
|
||||
serde_json::ser::PrettyFormatter::with_indent(indent.as_bytes()),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn os_str_strip_prefix<'a>(os_str: &'a OsStr, prefix: impl AsRef<str>) -> Option<&'a OsStr> {
|
||||
os_str
|
||||
.as_encoded_bytes()
|
||||
.strip_prefix(prefix.as_ref().as_bytes())
|
||||
.map(|bytes| {
|
||||
// Safety: we removed a UTF-8 prefix so bytes starts with a valid boundary
|
||||
unsafe { OsStr::from_encoded_bytes_unchecked(bytes) }
|
||||
})
|
||||
}
|
||||
|
||||
pub fn os_str_strip_suffix<'a>(os_str: &'a OsStr, suffix: impl AsRef<str>) -> Option<&'a OsStr> {
|
||||
os_str
|
||||
.as_encoded_bytes()
|
||||
.strip_suffix(suffix.as_ref().as_bytes())
|
||||
.map(|bytes| {
|
||||
// Safety: we removed a UTF-8 suffix so bytes ends with a valid boundary
|
||||
unsafe { OsStr::from_encoded_bytes_unchecked(bytes) }
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq)]
|
||||
pub(crate) struct InternedStrCompareAsStr(pub(crate) Interned<str>);
|
||||
|
||||
impl fmt::Debug for InternedStrCompareAsStr {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for InternedStrCompareAsStr {
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||
str::cmp(&self.0, &other.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for InternedStrCompareAsStr {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl std::borrow::Borrow<str> for InternedStrCompareAsStr {
|
||||
fn borrow(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,839 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
// code derived from:
|
||||
// https://web.archive.org/web/20250303054010/https://git.libre-soc.org/?p=nmutil.git;a=blob;f=src/nmutil/prefix_sum.py;hb=effeb28e5848392adddcdad1f6e7a098f2a44c9c
|
||||
|
||||
use crate::intern::{Intern, Interned, Memoize};
|
||||
use std::{borrow::Cow, num::NonZeroUsize};
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
|
||||
pub struct PrefixSumOp {
|
||||
pub lhs_index: usize,
|
||||
pub rhs_and_dest_index: NonZeroUsize,
|
||||
pub row: u32,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
#[non_exhaustive]
|
||||
pub struct DiagramConfig {
|
||||
pub space: Cow<'static, str>,
|
||||
pub vertical_bar: Cow<'static, str>,
|
||||
pub plus: Cow<'static, str>,
|
||||
pub slant: Cow<'static, str>,
|
||||
pub connect: Cow<'static, str>,
|
||||
pub no_connect: Cow<'static, str>,
|
||||
pub padding: usize,
|
||||
}
|
||||
|
||||
impl DiagramConfig {
|
||||
pub const fn new() -> Self {
|
||||
Self {
|
||||
space: Cow::Borrowed(" "),
|
||||
vertical_bar: Cow::Borrowed("|"),
|
||||
plus: Cow::Borrowed("\u{2295}"), // ⊕
|
||||
slant: Cow::Borrowed(r"\"),
|
||||
connect: Cow::Borrowed("\u{25CF}"), // ●
|
||||
no_connect: Cow::Borrowed("X"),
|
||||
padding: 1,
|
||||
}
|
||||
}
|
||||
pub fn draw(self, ops: impl IntoIterator<Item = PrefixSumOp>, item_count: usize) -> String {
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
struct DiagramCell {
|
||||
slant: bool,
|
||||
plus: bool,
|
||||
tee: bool,
|
||||
}
|
||||
let mut ops_by_row: Vec<Vec<PrefixSumOp>> = Vec::new();
|
||||
let mut last_row = 0;
|
||||
ops.into_iter().for_each(|op| {
|
||||
assert!(
|
||||
op.lhs_index < op.rhs_and_dest_index.get(),
|
||||
"invalid PrefixSumOp! lhs_index must be less \
|
||||
than rhs_and_dest_index: {op:?}",
|
||||
);
|
||||
assert!(
|
||||
op.row >= last_row,
|
||||
"invalid PrefixSumOp! row must \
|
||||
not decrease (row last was: {last_row}): {op:?}",
|
||||
);
|
||||
let ops = if op.row > last_row || ops_by_row.is_empty() {
|
||||
ops_by_row.push(vec![]);
|
||||
ops_by_row.last_mut().expect("just pushed")
|
||||
} else {
|
||||
ops_by_row
|
||||
.last_mut()
|
||||
.expect("just checked if ops_by_row is empty")
|
||||
};
|
||||
if let Some(last) = ops.last() {
|
||||
assert!(
|
||||
op.rhs_and_dest_index < last.rhs_and_dest_index,
|
||||
"invalid PrefixSumOp! rhs_and_dest_index must strictly \
|
||||
decrease in a row:\nthis op: {op:?}\nlast op: {last:?}",
|
||||
);
|
||||
}
|
||||
ops.push(op);
|
||||
last_row = op.row;
|
||||
});
|
||||
let blank_row = || {
|
||||
vec![
|
||||
DiagramCell {
|
||||
slant: false,
|
||||
plus: false,
|
||||
tee: false
|
||||
};
|
||||
item_count
|
||||
]
|
||||
};
|
||||
let mut cells = vec![blank_row()];
|
||||
for ops in ops_by_row {
|
||||
let max_distance = ops
|
||||
.iter()
|
||||
.map(
|
||||
|&PrefixSumOp {
|
||||
lhs_index,
|
||||
rhs_and_dest_index,
|
||||
..
|
||||
}| { rhs_and_dest_index.get() - lhs_index },
|
||||
)
|
||||
.max()
|
||||
.expect("ops is known to be non-empty");
|
||||
cells.extend((0..max_distance).map(|_| blank_row()));
|
||||
for op in ops {
|
||||
let mut y = cells.len() - 1;
|
||||
assert!(
|
||||
op.rhs_and_dest_index.get() < item_count,
|
||||
"invalid PrefixSumOp! rhs_and_dest_index must be \
|
||||
less than item_count ({item_count}): {op:?}",
|
||||
);
|
||||
let mut x = op.rhs_and_dest_index.get();
|
||||
cells[y][x].plus = true;
|
||||
x -= 1;
|
||||
y -= 1;
|
||||
while op.lhs_index < x {
|
||||
cells[y][x].slant = true;
|
||||
x -= 1;
|
||||
y -= 1;
|
||||
}
|
||||
cells[y][x].tee = true;
|
||||
}
|
||||
}
|
||||
let mut retval = String::new();
|
||||
let mut row_text = vec![String::new(); 2 * self.padding + 1];
|
||||
for cells_row in cells {
|
||||
for cell in cells_row {
|
||||
// top padding
|
||||
for y in 0..self.padding {
|
||||
// top left padding
|
||||
for x in 0..self.padding {
|
||||
row_text[y] += if x == y && (cell.plus || cell.slant) {
|
||||
&self.slant
|
||||
} else {
|
||||
&self.space
|
||||
};
|
||||
}
|
||||
// top vertical bar
|
||||
row_text[y] += &self.vertical_bar;
|
||||
// top right padding
|
||||
for _ in 0..self.padding {
|
||||
row_text[y] += &self.space;
|
||||
}
|
||||
}
|
||||
// center left padding
|
||||
for _ in 0..self.padding {
|
||||
row_text[self.padding] += &self.space;
|
||||
}
|
||||
// center
|
||||
row_text[self.padding] += if cell.plus {
|
||||
&self.plus
|
||||
} else if cell.tee {
|
||||
&self.connect
|
||||
} else if cell.slant {
|
||||
&self.no_connect
|
||||
} else {
|
||||
&self.vertical_bar
|
||||
};
|
||||
// center right padding
|
||||
for _ in 0..self.padding {
|
||||
row_text[self.padding] += &self.space;
|
||||
}
|
||||
let bottom_padding_start = self.padding + 1;
|
||||
let bottom_padding_last = self.padding * 2;
|
||||
// bottom padding
|
||||
for y in bottom_padding_start..=bottom_padding_last {
|
||||
// bottom left padding
|
||||
for _ in 0..self.padding {
|
||||
row_text[y] += &self.space;
|
||||
}
|
||||
// bottom vertical bar
|
||||
row_text[y] += &self.vertical_bar;
|
||||
// bottom right padding
|
||||
for x in bottom_padding_start..=bottom_padding_last {
|
||||
row_text[y] += if x == y && (cell.tee || cell.slant) {
|
||||
&self.slant
|
||||
} else {
|
||||
&self.space
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
for line in &mut row_text {
|
||||
retval += line.trim_end();
|
||||
retval += "\n";
|
||||
line.clear();
|
||||
}
|
||||
}
|
||||
retval
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for DiagramConfig {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl PrefixSumOp {
|
||||
pub fn diagram(ops: impl IntoIterator<Item = Self>, item_count: usize) -> String {
|
||||
Self::diagram_with_config(ops, item_count, DiagramConfig::new())
|
||||
}
|
||||
pub fn diagram_with_config(
|
||||
ops: impl IntoIterator<Item = Self>,
|
||||
item_count: usize,
|
||||
config: DiagramConfig,
|
||||
) -> String {
|
||||
config.draw(ops, item_count)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub enum PrefixSumAlgorithm {
|
||||
/// Uses the algorithm from:
|
||||
/// <https://en.wikipedia.org/wiki/Prefix_sum#Algorithm_1:_Shorter_span,_more_parallel>
|
||||
LowLatency,
|
||||
/// Uses the algorithm from:
|
||||
/// <https://en.wikipedia.org/wiki/Prefix_sum#Algorithm_2:_Work-efficient>
|
||||
WorkEfficient,
|
||||
}
|
||||
|
||||
impl PrefixSumAlgorithm {
|
||||
fn ops_impl(self, item_count: usize) -> Vec<PrefixSumOp> {
|
||||
let mut retval = Vec::new();
|
||||
let mut distance = 1;
|
||||
let mut row = 0;
|
||||
while distance < item_count {
|
||||
let double_distance = distance
|
||||
.checked_mul(2)
|
||||
.expect("prefix-sum item_count is too big");
|
||||
let (start, step) = match self {
|
||||
Self::LowLatency => (distance, 1),
|
||||
Self::WorkEfficient => (double_distance - 1, double_distance),
|
||||
};
|
||||
for rhs_and_dest_index in (start..item_count).step_by(step).rev() {
|
||||
let Some(rhs_and_dest_index) = NonZeroUsize::new(rhs_and_dest_index) else {
|
||||
unreachable!();
|
||||
};
|
||||
let lhs_index = rhs_and_dest_index.get() - distance;
|
||||
retval.push(PrefixSumOp {
|
||||
lhs_index,
|
||||
rhs_and_dest_index,
|
||||
row,
|
||||
});
|
||||
}
|
||||
distance = double_distance;
|
||||
row += 1;
|
||||
}
|
||||
match self {
|
||||
Self::LowLatency => {}
|
||||
Self::WorkEfficient => {
|
||||
distance /= 2;
|
||||
while distance >= 1 {
|
||||
let start = distance
|
||||
.checked_mul(3)
|
||||
.expect("prefix-sum item_count is too big")
|
||||
- 1;
|
||||
for rhs_and_dest_index in (start..item_count).step_by(distance * 2).rev() {
|
||||
let Some(rhs_and_dest_index) = NonZeroUsize::new(rhs_and_dest_index) else {
|
||||
unreachable!();
|
||||
};
|
||||
let lhs_index = rhs_and_dest_index.get() - distance;
|
||||
retval.push(PrefixSumOp {
|
||||
lhs_index,
|
||||
rhs_and_dest_index,
|
||||
row,
|
||||
});
|
||||
}
|
||||
row += 1;
|
||||
distance /= 2;
|
||||
}
|
||||
}
|
||||
}
|
||||
retval
|
||||
}
|
||||
pub fn ops(self, item_count: usize) -> Interned<[PrefixSumOp]> {
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
struct MyMemoize(PrefixSumAlgorithm);
|
||||
impl Memoize for MyMemoize {
|
||||
type Input = usize;
|
||||
type InputOwned = usize;
|
||||
type Output = Interned<[PrefixSumOp]>;
|
||||
|
||||
fn inner(self, item_count: &Self::Input) -> Self::Output {
|
||||
Intern::intern_owned(self.0.ops_impl(*item_count))
|
||||
}
|
||||
}
|
||||
MyMemoize(self).get_owned(item_count)
|
||||
}
|
||||
pub fn run<T>(self, items: impl IntoIterator<Item = T>, f: impl FnMut(&T, &T) -> T) -> Vec<T> {
|
||||
let mut items = Vec::from_iter(items);
|
||||
self.run_on_slice(&mut items, f);
|
||||
items
|
||||
}
|
||||
pub fn run_on_slice<T>(self, items: &mut [T], mut f: impl FnMut(&T, &T) -> T) -> &mut [T] {
|
||||
self.ops(items.len()).into_iter().for_each(
|
||||
|PrefixSumOp {
|
||||
lhs_index,
|
||||
rhs_and_dest_index,
|
||||
row: _,
|
||||
}| {
|
||||
items[rhs_and_dest_index.get()] =
|
||||
f(&items[lhs_index], &items[rhs_and_dest_index.get()]);
|
||||
},
|
||||
);
|
||||
items
|
||||
}
|
||||
pub fn filtered_ops(
|
||||
self,
|
||||
item_live_out_flags: impl IntoIterator<Item = bool>,
|
||||
) -> Vec<PrefixSumOp> {
|
||||
let mut item_live_out_flags = Vec::from_iter(item_live_out_flags);
|
||||
let prefix_sum_ops = self.ops(item_live_out_flags.len());
|
||||
let mut ops_live_flags = vec![false; prefix_sum_ops.len()];
|
||||
for (
|
||||
op_index,
|
||||
&PrefixSumOp {
|
||||
lhs_index,
|
||||
rhs_and_dest_index,
|
||||
row: _,
|
||||
},
|
||||
) in prefix_sum_ops.iter().enumerate().rev()
|
||||
{
|
||||
let live = item_live_out_flags[rhs_and_dest_index.get()];
|
||||
item_live_out_flags[lhs_index] |= live;
|
||||
ops_live_flags[op_index] = live;
|
||||
}
|
||||
prefix_sum_ops
|
||||
.into_iter()
|
||||
.zip(ops_live_flags)
|
||||
.filter_map(|(op, live)| live.then_some(op))
|
||||
.collect()
|
||||
}
|
||||
pub fn reduce_ops(self, item_count: usize) -> Interned<[PrefixSumOp]> {
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
struct MyMemoize(PrefixSumAlgorithm);
|
||||
impl Memoize for MyMemoize {
|
||||
type Input = usize;
|
||||
type InputOwned = usize;
|
||||
type Output = Interned<[PrefixSumOp]>;
|
||||
|
||||
fn inner(self, item_count: &Self::Input) -> Self::Output {
|
||||
let mut item_live_out_flags = vec![false; *item_count];
|
||||
let Some(last_item_live_out_flag) = item_live_out_flags.last_mut() else {
|
||||
return Interned::default();
|
||||
};
|
||||
*last_item_live_out_flag = true;
|
||||
Intern::intern_owned(self.0.filtered_ops(item_live_out_flags))
|
||||
}
|
||||
}
|
||||
MyMemoize(self).get_owned(item_count)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reduce_ops(item_count: usize) -> Interned<[PrefixSumOp]> {
|
||||
PrefixSumAlgorithm::LowLatency.reduce_ops(item_count)
|
||||
}
|
||||
|
||||
pub fn reduce<T>(items: impl IntoIterator<Item = T>, mut f: impl FnMut(T, T) -> T) -> Option<T> {
|
||||
let mut items: Vec<_> = items.into_iter().map(Some).collect();
|
||||
for op in reduce_ops(items.len()) {
|
||||
let (Some(lhs), Some(rhs)) = (
|
||||
items[op.lhs_index].take(),
|
||||
items[op.rhs_and_dest_index.get()].take(),
|
||||
) else {
|
||||
unreachable!();
|
||||
};
|
||||
items[op.rhs_and_dest_index.get()] = Some(f(lhs, rhs));
|
||||
}
|
||||
items.last_mut().and_then(Option::take)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn input_strings() -> [String; 9] {
|
||||
std::array::from_fn(|i| String::from_utf8(vec![b'a' + i as u8]).unwrap())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_prefix_sum_strings() {
|
||||
let input = input_strings();
|
||||
let expected: Vec<String> = input
|
||||
.iter()
|
||||
.scan(String::new(), |l, r| {
|
||||
*l += r;
|
||||
Some(l.clone())
|
||||
})
|
||||
.collect();
|
||||
println!("expected: {expected:?}");
|
||||
assert_eq!(
|
||||
*PrefixSumAlgorithm::WorkEfficient
|
||||
.run_on_slice(&mut input.clone(), |l, r| l.to_string() + r),
|
||||
*expected
|
||||
);
|
||||
assert_eq!(
|
||||
*PrefixSumAlgorithm::LowLatency
|
||||
.run_on_slice(&mut input.clone(), |l, r| l.to_string() + r),
|
||||
*expected
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_reduce_string() {
|
||||
let input = input_strings();
|
||||
let expected = input.clone().into_iter().reduce(|l, r| l + &r);
|
||||
assert_eq!(reduce(input, |l, r| l + &r), expected);
|
||||
}
|
||||
|
||||
fn op(lhs_index: usize, rhs_and_dest_index: usize, row: u32) -> PrefixSumOp {
|
||||
PrefixSumOp {
|
||||
lhs_index,
|
||||
rhs_and_dest_index: NonZeroUsize::new(rhs_and_dest_index).expect("should be non-zero"),
|
||||
row,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_reduce_ops_9() {
|
||||
let expected = vec![
|
||||
op(7, 8, 0),
|
||||
op(5, 6, 0),
|
||||
op(3, 4, 0),
|
||||
op(1, 2, 0),
|
||||
op(6, 8, 1),
|
||||
op(2, 4, 1),
|
||||
op(4, 8, 2),
|
||||
op(0, 8, 3),
|
||||
];
|
||||
println!("expected: {expected:#?}");
|
||||
let ops = reduce_ops(9);
|
||||
println!("ops: {ops:#?}");
|
||||
assert_eq!(*ops, *expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_reduce_ops_8() {
|
||||
let expected = vec![
|
||||
op(6, 7, 0),
|
||||
op(4, 5, 0),
|
||||
op(2, 3, 0),
|
||||
op(0, 1, 0),
|
||||
op(5, 7, 1),
|
||||
op(1, 3, 1),
|
||||
op(3, 7, 2),
|
||||
];
|
||||
println!("expected: {expected:#?}");
|
||||
let ops = reduce_ops(8);
|
||||
println!("ops: {ops:#?}");
|
||||
assert_eq!(*ops, *expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_count_ones() {
|
||||
for width in 0..=10u32 {
|
||||
for v in 0..1u32 << width {
|
||||
let expected = v.count_ones();
|
||||
assert_eq!(
|
||||
reduce((0..width).map(|i| (v >> i) & 1), |l, r| l + r).unwrap_or(0),
|
||||
expected,
|
||||
"v={v:#X}"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn test_diagram(ops: impl IntoIterator<Item = PrefixSumOp>, item_count: usize, expected: &str) {
|
||||
let text = PrefixSumOp::diagram_with_config(
|
||||
ops,
|
||||
item_count,
|
||||
DiagramConfig {
|
||||
plus: Cow::Borrowed("@"),
|
||||
..Default::default()
|
||||
},
|
||||
);
|
||||
println!("text:\n{text}\n");
|
||||
assert_eq!(text, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_work_efficient_diagram_16() {
|
||||
let item_count = 16;
|
||||
test_diagram(
|
||||
PrefixSumAlgorithm::WorkEfficient.ops(item_count),
|
||||
item_count,
|
||||
&r"
|
||||
| | | | | | | | | | | | | | | |
|
||||
● | ● | ● | ● | ● | ● | ● | ● |
|
||||
|\ | |\ | |\ | |\ | |\ | |\ | |\ | |\ |
|
||||
| \| | \| | \| | \| | \| | \| | \| | \|
|
||||
| @ | @ | @ | @ | @ | @ | @ | @
|
||||
| |\ | | | |\ | | | |\ | | | |\ | |
|
||||
| | \| | | | \| | | | \| | | | \| |
|
||||
| | X | | | X | | | X | | | X |
|
||||
| | |\ | | | |\ | | | |\ | | | |\ |
|
||||
| | | \| | | | \| | | | \| | | | \|
|
||||
| | | @ | | | @ | | | @ | | | @
|
||||
| | | |\ | | | | | | | |\ | | | |
|
||||
| | | | \| | | | | | | | \| | | |
|
||||
| | | | X | | | | | | | X | | |
|
||||
| | | | |\ | | | | | | | |\ | | |
|
||||
| | | | | \| | | | | | | | \| | |
|
||||
| | | | | X | | | | | | | X | |
|
||||
| | | | | |\ | | | | | | | |\ | |
|
||||
| | | | | | \| | | | | | | | \| |
|
||||
| | | | | | X | | | | | | | X |
|
||||
| | | | | | |\ | | | | | | | |\ |
|
||||
| | | | | | | \| | | | | | | | \|
|
||||
| | | | | | | @ | | | | | | | @
|
||||
| | | | | | | |\ | | | | | | | |
|
||||
| | | | | | | | \| | | | | | | |
|
||||
| | | | | | | | X | | | | | | |
|
||||
| | | | | | | | |\ | | | | | | |
|
||||
| | | | | | | | | \| | | | | | |
|
||||
| | | | | | | | | X | | | | | |
|
||||
| | | | | | | | | |\ | | | | | |
|
||||
| | | | | | | | | | \| | | | | |
|
||||
| | | | | | | | | | X | | | | |
|
||||
| | | | | | | | | | |\ | | | | |
|
||||
| | | | | | | | | | | \| | | | |
|
||||
| | | | | | | | | | | X | | | |
|
||||
| | | | | | | | | | | |\ | | | |
|
||||
| | | | | | | | | | | | \| | | |
|
||||
| | | | | | | | | | | | X | | |
|
||||
| | | | | | | | | | | | |\ | | |
|
||||
| | | | | | | | | | | | | \| | |
|
||||
| | | | | | | | | | | | | X | |
|
||||
| | | | | | | | | | | | | |\ | |
|
||||
| | | | | | | | | | | | | | \| |
|
||||
| | | | | | | | | | | | | | X |
|
||||
| | | | | | | | | | | | | | |\ |
|
||||
| | | | | | | | | | | | | | | \|
|
||||
| | | | | | | ● | | | | | | | @
|
||||
| | | | | | | |\ | | | | | | | |
|
||||
| | | | | | | | \| | | | | | | |
|
||||
| | | | | | | | X | | | | | | |
|
||||
| | | | | | | | |\ | | | | | | |
|
||||
| | | | | | | | | \| | | | | | |
|
||||
| | | | | | | | | X | | | | | |
|
||||
| | | | | | | | | |\ | | | | | |
|
||||
| | | | | | | | | | \| | | | | |
|
||||
| | | | | | | | | | X | | | | |
|
||||
| | | | | | | | | | |\ | | | | |
|
||||
| | | | | | | | | | | \| | | | |
|
||||
| | | ● | | | ● | | | @ | | | |
|
||||
| | | |\ | | | |\ | | | |\ | | | |
|
||||
| | | | \| | | | \| | | | \| | | |
|
||||
| | | | X | | | X | | | X | | |
|
||||
| | | | |\ | | | |\ | | | |\ | | |
|
||||
| | | | | \| | | | \| | | | \| | |
|
||||
| ● | ● | @ | ● | @ | ● | @ | |
|
||||
| |\ | |\ | |\ | |\ | |\ | |\ | |\ | |
|
||||
| | \| | \| | \| | \| | \| | \| | \| |
|
||||
| | @ | @ | @ | @ | @ | @ | @ |
|
||||
| | | | | | | | | | | | | | | |
|
||||
"[1..], // trim newline at start
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_low_latency_diagram_16() {
|
||||
let item_count = 16;
|
||||
test_diagram(
|
||||
PrefixSumAlgorithm::LowLatency.ops(item_count),
|
||||
item_count,
|
||||
&r"
|
||||
| | | | | | | | | | | | | | | |
|
||||
● ● ● ● ● ● ● ● ● ● ● ● ● ● ● |
|
||||
|\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |
|
||||
| \| \| \| \| \| \| \| \| \| \| \| \| \| \| \|
|
||||
● @ @ @ @ @ @ @ @ @ @ @ @ @ @ @
|
||||
|\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ | |
|
||||
| \| \| \| \| \| \| \| \| \| \| \| \| \| \| |
|
||||
| X X X X X X X X X X X X X X |
|
||||
| |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |
|
||||
| | \| \| \| \| \| \| \| \| \| \| \| \| \| \|
|
||||
● ● @ @ @ @ @ @ @ @ @ @ @ @ @ @
|
||||
|\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ | | | |
|
||||
| \| \| \| \| \| \| \| \| \| \| \| \| | | |
|
||||
| X X X X X X X X X X X X | | |
|
||||
| |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ | | |
|
||||
| | \| \| \| \| \| \| \| \| \| \| \| \| | |
|
||||
| | X X X X X X X X X X X X | |
|
||||
| | |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ | |
|
||||
| | | \| \| \| \| \| \| \| \| \| \| \| \| |
|
||||
| | | X X X X X X X X X X X X |
|
||||
| | | |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |
|
||||
| | | | \| \| \| \| \| \| \| \| \| \| \| \|
|
||||
● ● ● ● @ @ @ @ @ @ @ @ @ @ @ @
|
||||
|\ |\ |\ |\ |\ |\ |\ |\ | | | | | | | |
|
||||
| \| \| \| \| \| \| \| \| | | | | | | |
|
||||
| X X X X X X X X | | | | | | |
|
||||
| |\ |\ |\ |\ |\ |\ |\ |\ | | | | | | |
|
||||
| | \| \| \| \| \| \| \| \| | | | | | |
|
||||
| | X X X X X X X X | | | | | |
|
||||
| | |\ |\ |\ |\ |\ |\ |\ |\ | | | | | |
|
||||
| | | \| \| \| \| \| \| \| \| | | | | |
|
||||
| | | X X X X X X X X | | | | |
|
||||
| | | |\ |\ |\ |\ |\ |\ |\ |\ | | | | |
|
||||
| | | | \| \| \| \| \| \| \| \| | | | |
|
||||
| | | | X X X X X X X X | | | |
|
||||
| | | | |\ |\ |\ |\ |\ |\ |\ |\ | | | |
|
||||
| | | | | \| \| \| \| \| \| \| \| | | |
|
||||
| | | | | X X X X X X X X | | |
|
||||
| | | | | |\ |\ |\ |\ |\ |\ |\ |\ | | |
|
||||
| | | | | | \| \| \| \| \| \| \| \| | |
|
||||
| | | | | | X X X X X X X X | |
|
||||
| | | | | | |\ |\ |\ |\ |\ |\ |\ |\ | |
|
||||
| | | | | | | \| \| \| \| \| \| \| \| |
|
||||
| | | | | | | X X X X X X X X |
|
||||
| | | | | | | |\ |\ |\ |\ |\ |\ |\ |\ |
|
||||
| | | | | | | | \| \| \| \| \| \| \| \|
|
||||
| | | | | | | | @ @ @ @ @ @ @ @
|
||||
| | | | | | | | | | | | | | | |
|
||||
"[1..], // trim newline at start
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_work_efficient_diagram_9() {
|
||||
let item_count = 9;
|
||||
test_diagram(
|
||||
PrefixSumAlgorithm::WorkEfficient.ops(item_count),
|
||||
item_count,
|
||||
&r"
|
||||
| | | | | | | | |
|
||||
● | ● | ● | ● | |
|
||||
|\ | |\ | |\ | |\ | |
|
||||
| \| | \| | \| | \| |
|
||||
| @ | @ | @ | @ |
|
||||
| |\ | | | |\ | | |
|
||||
| | \| | | | \| | |
|
||||
| | X | | | X | |
|
||||
| | |\ | | | |\ | |
|
||||
| | | \| | | | \| |
|
||||
| | | @ | | | @ |
|
||||
| | | |\ | | | | |
|
||||
| | | | \| | | | |
|
||||
| | | | X | | | |
|
||||
| | | | |\ | | | |
|
||||
| | | | | \| | | |
|
||||
| | | | | X | | |
|
||||
| | | | | |\ | | |
|
||||
| | | | | | \| | |
|
||||
| | | | | | X | |
|
||||
| | | | | | |\ | |
|
||||
| | | | | | | \| |
|
||||
| | | ● | | | @ |
|
||||
| | | |\ | | | | |
|
||||
| | | | \| | | | |
|
||||
| | | | X | | | |
|
||||
| | | | |\ | | | |
|
||||
| | | | | \| | | |
|
||||
| ● | ● | @ | ● |
|
||||
| |\ | |\ | |\ | |\ |
|
||||
| | \| | \| | \| | \|
|
||||
| | @ | @ | @ | @
|
||||
| | | | | | | | |
|
||||
"[1..], // trim newline at start
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_low_latency_diagram_9() {
|
||||
let item_count = 9;
|
||||
test_diagram(
|
||||
PrefixSumAlgorithm::LowLatency.ops(item_count),
|
||||
item_count,
|
||||
&r"
|
||||
| | | | | | | | |
|
||||
● ● ● ● ● ● ● ● |
|
||||
|\ |\ |\ |\ |\ |\ |\ |\ |
|
||||
| \| \| \| \| \| \| \| \|
|
||||
● @ @ @ @ @ @ @ @
|
||||
|\ |\ |\ |\ |\ |\ |\ | |
|
||||
| \| \| \| \| \| \| \| |
|
||||
| X X X X X X X |
|
||||
| |\ |\ |\ |\ |\ |\ |\ |
|
||||
| | \| \| \| \| \| \| \|
|
||||
● ● @ @ @ @ @ @ @
|
||||
|\ |\ |\ |\ |\ | | | |
|
||||
| \| \| \| \| \| | | |
|
||||
| X X X X X | | |
|
||||
| |\ |\ |\ |\ |\ | | |
|
||||
| | \| \| \| \| \| | |
|
||||
| | X X X X X | |
|
||||
| | |\ |\ |\ |\ |\ | |
|
||||
| | | \| \| \| \| \| |
|
||||
| | | X X X X X |
|
||||
| | | |\ |\ |\ |\ |\ |
|
||||
| | | | \| \| \| \| \|
|
||||
● | | | @ @ @ @ @
|
||||
|\ | | | | | | | |
|
||||
| \| | | | | | | |
|
||||
| X | | | | | | |
|
||||
| |\ | | | | | | |
|
||||
| | \| | | | | | |
|
||||
| | X | | | | | |
|
||||
| | |\ | | | | | |
|
||||
| | | \| | | | | |
|
||||
| | | X | | | | |
|
||||
| | | |\ | | | | |
|
||||
| | | | \| | | | |
|
||||
| | | | X | | | |
|
||||
| | | | |\ | | | |
|
||||
| | | | | \| | | |
|
||||
| | | | | X | | |
|
||||
| | | | | |\ | | |
|
||||
| | | | | | \| | |
|
||||
| | | | | | X | |
|
||||
| | | | | | |\ | |
|
||||
| | | | | | | \| |
|
||||
| | | | | | | X |
|
||||
| | | | | | | |\ |
|
||||
| | | | | | | | \|
|
||||
| | | | | | | | @
|
||||
| | | | | | | | |
|
||||
"[1..], // trim newline at start
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_reduce_diagram_16() {
|
||||
let item_count = 16;
|
||||
test_diagram(
|
||||
reduce_ops(item_count),
|
||||
item_count,
|
||||
&r"
|
||||
| | | | | | | | | | | | | | | |
|
||||
● | ● | ● | ● | ● | ● | ● | ● |
|
||||
|\ | |\ | |\ | |\ | |\ | |\ | |\ | |\ |
|
||||
| \| | \| | \| | \| | \| | \| | \| | \|
|
||||
| @ | @ | @ | @ | @ | @ | @ | @
|
||||
| |\ | | | |\ | | | |\ | | | |\ | |
|
||||
| | \| | | | \| | | | \| | | | \| |
|
||||
| | X | | | X | | | X | | | X |
|
||||
| | |\ | | | |\ | | | |\ | | | |\ |
|
||||
| | | \| | | | \| | | | \| | | | \|
|
||||
| | | @ | | | @ | | | @ | | | @
|
||||
| | | |\ | | | | | | | |\ | | | |
|
||||
| | | | \| | | | | | | | \| | | |
|
||||
| | | | X | | | | | | | X | | |
|
||||
| | | | |\ | | | | | | | |\ | | |
|
||||
| | | | | \| | | | | | | | \| | |
|
||||
| | | | | X | | | | | | | X | |
|
||||
| | | | | |\ | | | | | | | |\ | |
|
||||
| | | | | | \| | | | | | | | \| |
|
||||
| | | | | | X | | | | | | | X |
|
||||
| | | | | | |\ | | | | | | | |\ |
|
||||
| | | | | | | \| | | | | | | | \|
|
||||
| | | | | | | @ | | | | | | | @
|
||||
| | | | | | | |\ | | | | | | | |
|
||||
| | | | | | | | \| | | | | | | |
|
||||
| | | | | | | | X | | | | | | |
|
||||
| | | | | | | | |\ | | | | | | |
|
||||
| | | | | | | | | \| | | | | | |
|
||||
| | | | | | | | | X | | | | | |
|
||||
| | | | | | | | | |\ | | | | | |
|
||||
| | | | | | | | | | \| | | | | |
|
||||
| | | | | | | | | | X | | | | |
|
||||
| | | | | | | | | | |\ | | | | |
|
||||
| | | | | | | | | | | \| | | | |
|
||||
| | | | | | | | | | | X | | | |
|
||||
| | | | | | | | | | | |\ | | | |
|
||||
| | | | | | | | | | | | \| | | |
|
||||
| | | | | | | | | | | | X | | |
|
||||
| | | | | | | | | | | | |\ | | |
|
||||
| | | | | | | | | | | | | \| | |
|
||||
| | | | | | | | | | | | | X | |
|
||||
| | | | | | | | | | | | | |\ | |
|
||||
| | | | | | | | | | | | | | \| |
|
||||
| | | | | | | | | | | | | | X |
|
||||
| | | | | | | | | | | | | | |\ |
|
||||
| | | | | | | | | | | | | | | \|
|
||||
| | | | | | | | | | | | | | | @
|
||||
| | | | | | | | | | | | | | | |
|
||||
"[1..], // trim newline at start
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_reduce_diagram_9() {
|
||||
let item_count = 9;
|
||||
test_diagram(
|
||||
reduce_ops(item_count),
|
||||
item_count,
|
||||
&r"
|
||||
| | | | | | | | |
|
||||
| ● | ● | ● | ● |
|
||||
| |\ | |\ | |\ | |\ |
|
||||
| | \| | \| | \| | \|
|
||||
| | @ | @ | @ | @
|
||||
| | |\ | | | |\ | |
|
||||
| | | \| | | | \| |
|
||||
| | | X | | | X |
|
||||
| | | |\ | | | |\ |
|
||||
| | | | \| | | | \|
|
||||
| | | | @ | | | @
|
||||
| | | | |\ | | | |
|
||||
| | | | | \| | | |
|
||||
| | | | | X | | |
|
||||
| | | | | |\ | | |
|
||||
| | | | | | \| | |
|
||||
| | | | | | X | |
|
||||
| | | | | | |\ | |
|
||||
| | | | | | | \| |
|
||||
| | | | | | | X |
|
||||
| | | | | | | |\ |
|
||||
| | | | | | | | \|
|
||||
● | | | | | | | @
|
||||
|\ | | | | | | | |
|
||||
| \| | | | | | | |
|
||||
| X | | | | | | |
|
||||
| |\ | | | | | | |
|
||||
| | \| | | | | | |
|
||||
| | X | | | | | |
|
||||
| | |\ | | | | | |
|
||||
| | | \| | | | | |
|
||||
| | | X | | | | |
|
||||
| | | |\ | | | | |
|
||||
| | | | \| | | | |
|
||||
| | | | X | | | |
|
||||
| | | | |\ | | | |
|
||||
| | | | | \| | | |
|
||||
| | | | | X | | |
|
||||
| | | | | |\ | | |
|
||||
| | | | | | \| | |
|
||||
| | | | | | X | |
|
||||
| | | | | | |\ | |
|
||||
| | | | | | | \| |
|
||||
| | | | | | | X |
|
||||
| | | | | | | |\ |
|
||||
| | | | | | | | \|
|
||||
| | | | | | | | @
|
||||
| | | | | | | | |
|
||||
"[1..], // trim newline at start
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -212,7 +212,9 @@ pub fn queue<T: Type>(
|
|||
mod tests {
|
||||
use super::*;
|
||||
use crate::{
|
||||
firrtl::ExportOptions, module::transform::simplify_enums::SimplifyEnumsKind, ty::StaticType,
|
||||
cli::FormalMode, firrtl::ExportOptions,
|
||||
module::transform::simplify_enums::SimplifyEnumsKind, testing::assert_formal,
|
||||
ty::StaticType,
|
||||
};
|
||||
use std::num::NonZero;
|
||||
|
||||
|
|
|
|||
|
|
@ -1,240 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
#![cfg(feature = "unstable-test-hasher")]
|
||||
|
||||
use std::{
|
||||
fmt::Write as _,
|
||||
hash::{BuildHasher, Hash, Hasher},
|
||||
io::Write as _,
|
||||
marker::PhantomData,
|
||||
sync::LazyLock,
|
||||
};
|
||||
|
||||
type BoxDynHasher = Box<dyn Hasher + Send + Sync>;
|
||||
type BoxDynBuildHasher = Box<dyn DynBuildHasherTrait + Send + Sync>;
|
||||
type BoxDynMakeBuildHasher = Box<dyn Fn() -> BoxDynBuildHasher + Send + Sync>;
|
||||
|
||||
trait TryGetDynBuildHasher: Copy {
|
||||
type Type;
|
||||
fn try_get_make_build_hasher(self) -> Option<BoxDynMakeBuildHasher>;
|
||||
}
|
||||
|
||||
impl<T> TryGetDynBuildHasher for PhantomData<T> {
|
||||
type Type = T;
|
||||
fn try_get_make_build_hasher(self) -> Option<BoxDynMakeBuildHasher> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Default + BuildHasher<Hasher: Send + Sync + 'static> + Send + Sync + 'static + Clone>
|
||||
TryGetDynBuildHasher for &'_ PhantomData<T>
|
||||
{
|
||||
type Type = T;
|
||||
fn try_get_make_build_hasher(self) -> Option<BoxDynMakeBuildHasher> {
|
||||
Some(Box::new(|| Box::<DynBuildHasher<T>>::default()))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Clone)]
|
||||
struct DynBuildHasher<T>(T);
|
||||
|
||||
trait DynBuildHasherTrait: BuildHasher<Hasher = BoxDynHasher> {
|
||||
fn clone_dyn_build_hasher(&self) -> BoxDynBuildHasher;
|
||||
}
|
||||
|
||||
impl<BH: BuildHasher<Hasher: Send + Sync + 'static>> BuildHasher for DynBuildHasher<BH> {
|
||||
type Hasher = BoxDynHasher;
|
||||
|
||||
fn build_hasher(&self) -> Self::Hasher {
|
||||
Box::new(self.0.build_hasher())
|
||||
}
|
||||
|
||||
fn hash_one<T: Hash>(&self, x: T) -> u64 {
|
||||
self.0.hash_one(x)
|
||||
}
|
||||
}
|
||||
|
||||
impl<BH> DynBuildHasherTrait for DynBuildHasher<BH>
|
||||
where
|
||||
Self: Clone + BuildHasher<Hasher = BoxDynHasher> + Send + Sync + 'static,
|
||||
{
|
||||
fn clone_dyn_build_hasher(&self) -> BoxDynBuildHasher {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DefaultBuildHasher(BoxDynBuildHasher);
|
||||
|
||||
impl Clone for DefaultBuildHasher {
|
||||
fn clone(&self) -> Self {
|
||||
DefaultBuildHasher(self.0.clone_dyn_build_hasher())
|
||||
}
|
||||
}
|
||||
|
||||
const ENV_VAR_NAME: &'static str = "FAYALITE_TEST_HASHER";
|
||||
|
||||
struct EnvVarValue {
|
||||
key: &'static str,
|
||||
try_get_make_build_hasher: fn() -> Option<BoxDynMakeBuildHasher>,
|
||||
description: &'static str,
|
||||
}
|
||||
|
||||
macro_rules! env_var_value {
|
||||
(
|
||||
key: $key:literal,
|
||||
build_hasher: $build_hasher:ty,
|
||||
description: $description:literal,
|
||||
) => {
|
||||
EnvVarValue {
|
||||
key: $key,
|
||||
try_get_make_build_hasher: || {
|
||||
// use rust method resolution to detect if $build_hasher is usable
|
||||
// (e.g. hashbrown's hasher won't be usable without the right feature enabled)
|
||||
(&PhantomData::<DynBuildHasher<$build_hasher>>).try_get_make_build_hasher()
|
||||
},
|
||||
description: $description,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct AlwaysZeroHasher;
|
||||
|
||||
impl Hasher for AlwaysZeroHasher {
|
||||
fn write(&mut self, _bytes: &[u8]) {}
|
||||
fn finish(&self) -> u64 {
|
||||
0
|
||||
}
|
||||
}
|
||||
|
||||
const ENV_VAR_VALUES: &'static [EnvVarValue] = &[
|
||||
env_var_value! {
|
||||
key: "std",
|
||||
build_hasher: std::hash::RandomState,
|
||||
description: "use std::hash::RandomState",
|
||||
},
|
||||
env_var_value! {
|
||||
key: "hashbrown",
|
||||
build_hasher: hashbrown::DefaultHashBuilder,
|
||||
description: "use hashbrown's DefaultHashBuilder",
|
||||
},
|
||||
env_var_value! {
|
||||
key: "always_zero",
|
||||
build_hasher: std::hash::BuildHasherDefault<AlwaysZeroHasher>,
|
||||
description: "use a hasher that always returns 0 for all hashes,\n \
|
||||
this is useful for checking that PartialEq impls are correct",
|
||||
},
|
||||
];
|
||||
|
||||
fn report_bad_env_var(msg: impl std::fmt::Display) -> ! {
|
||||
let mut msg = format!("{ENV_VAR_NAME}: {msg}\n");
|
||||
for &EnvVarValue {
|
||||
key,
|
||||
try_get_make_build_hasher,
|
||||
description,
|
||||
} in ENV_VAR_VALUES
|
||||
{
|
||||
let availability = match try_get_make_build_hasher() {
|
||||
Some(_) => "available",
|
||||
None => "unavailable",
|
||||
};
|
||||
writeln!(msg, "{key}: ({availability})\n {description}").expect("can't fail");
|
||||
}
|
||||
std::io::stderr()
|
||||
.write_all(msg.as_bytes())
|
||||
.expect("should be able to write to stderr");
|
||||
std::process::abort();
|
||||
}
|
||||
|
||||
impl Default for DefaultBuildHasher {
|
||||
fn default() -> Self {
|
||||
static DEFAULT_FN: LazyLock<BoxDynMakeBuildHasher> = LazyLock::new(|| {
|
||||
let var = std::env::var_os(ENV_VAR_NAME);
|
||||
let var = var.as_deref().unwrap_or("std".as_ref());
|
||||
for &EnvVarValue {
|
||||
key,
|
||||
try_get_make_build_hasher,
|
||||
description: _,
|
||||
} in ENV_VAR_VALUES
|
||||
{
|
||||
if var.as_encoded_bytes().eq_ignore_ascii_case(key.as_bytes()) {
|
||||
return try_get_make_build_hasher().unwrap_or_else(|| {
|
||||
report_bad_env_var(format_args!(
|
||||
"unavailable hasher: {key} (is the appropriate feature enabled?)"
|
||||
));
|
||||
});
|
||||
}
|
||||
}
|
||||
report_bad_env_var(format_args!("unrecognized hasher: {var:?}"));
|
||||
});
|
||||
Self(DEFAULT_FN())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DefaultHasher(BoxDynHasher);
|
||||
|
||||
impl BuildHasher for DefaultBuildHasher {
|
||||
type Hasher = DefaultHasher;
|
||||
|
||||
fn build_hasher(&self) -> Self::Hasher {
|
||||
DefaultHasher(self.0.build_hasher())
|
||||
}
|
||||
}
|
||||
|
||||
impl Hasher for DefaultHasher {
|
||||
fn finish(&self) -> u64 {
|
||||
self.0.finish()
|
||||
}
|
||||
|
||||
fn write(&mut self, bytes: &[u8]) {
|
||||
self.0.write(bytes)
|
||||
}
|
||||
|
||||
fn write_u8(&mut self, i: u8) {
|
||||
self.0.write_u8(i)
|
||||
}
|
||||
|
||||
fn write_u16(&mut self, i: u16) {
|
||||
self.0.write_u16(i)
|
||||
}
|
||||
|
||||
fn write_u32(&mut self, i: u32) {
|
||||
self.0.write_u32(i)
|
||||
}
|
||||
|
||||
fn write_u64(&mut self, i: u64) {
|
||||
self.0.write_u64(i)
|
||||
}
|
||||
|
||||
fn write_u128(&mut self, i: u128) {
|
||||
self.0.write_u128(i)
|
||||
}
|
||||
|
||||
fn write_usize(&mut self, i: usize) {
|
||||
self.0.write_usize(i)
|
||||
}
|
||||
|
||||
fn write_i8(&mut self, i: i8) {
|
||||
self.0.write_i8(i)
|
||||
}
|
||||
|
||||
fn write_i16(&mut self, i: i16) {
|
||||
self.0.write_i16(i)
|
||||
}
|
||||
|
||||
fn write_i32(&mut self, i: i32) {
|
||||
self.0.write_i32(i)
|
||||
}
|
||||
|
||||
fn write_i64(&mut self, i: i64) {
|
||||
self.0.write_i64(i)
|
||||
}
|
||||
|
||||
fn write_i128(&mut self, i: i128) {
|
||||
self.0.write_i128(i)
|
||||
}
|
||||
|
||||
fn write_isize(&mut self, i: isize) {
|
||||
self.0.write_isize(i)
|
||||
}
|
||||
}
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
pub mod xilinx;
|
||||
|
||||
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = crate::build::DynJobKind> {
|
||||
xilinx::built_in_job_kinds()
|
||||
}
|
||||
|
||||
pub(crate) fn built_in_platforms() -> impl IntoIterator<Item = crate::platform::DynPlatform> {
|
||||
xilinx::built_in_platforms()
|
||||
}
|
||||
207
crates/fayalite/src/vendor/xilinx.rs
vendored
207
crates/fayalite/src/vendor/xilinx.rs
vendored
|
|
@ -1,207 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
annotations::make_annotation_enum,
|
||||
build::{GlobalParams, ToArgs, WriteArgs},
|
||||
intern::Interned,
|
||||
prelude::{DynPlatform, Platform},
|
||||
};
|
||||
use clap::ValueEnum;
|
||||
use ordered_float::NotNan;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt;
|
||||
|
||||
pub mod arty_a7;
|
||||
pub mod primitives;
|
||||
pub mod yosys_nextpnr_prjxray;
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct XdcIOStandardAnnotation {
|
||||
pub value: Interned<str>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct XdcLocationAnnotation {
|
||||
pub location: Interned<str>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct XdcCreateClockAnnotation {
|
||||
/// clock period in nanoseconds
|
||||
pub period: NotNan<f64>,
|
||||
}
|
||||
|
||||
make_annotation_enum! {
|
||||
#[non_exhaustive]
|
||||
pub enum XilinxAnnotation {
|
||||
XdcIOStandard(XdcIOStandardAnnotation),
|
||||
XdcLocation(XdcLocationAnnotation),
|
||||
XdcCreateClock(XdcCreateClockAnnotation),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)]
|
||||
pub struct XilinxArgs {
|
||||
#[arg(long)]
|
||||
pub device: Option<Device>,
|
||||
}
|
||||
|
||||
impl XilinxArgs {
|
||||
pub fn require_device(
|
||||
&self,
|
||||
platform: Option<&DynPlatform>,
|
||||
global_params: &GlobalParams,
|
||||
) -> clap::error::Result<Device> {
|
||||
if let Some(device) = self.device {
|
||||
return Ok(device);
|
||||
}
|
||||
if let Some(device) =
|
||||
platform.and_then(|platform| platform.aspects().get_single_by_type::<Device>().copied())
|
||||
{
|
||||
return Ok(device);
|
||||
}
|
||||
Err(global_params.clap_error(
|
||||
clap::error::ErrorKind::MissingRequiredArgument,
|
||||
"missing --device option",
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl ToArgs for XilinxArgs {
|
||||
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
|
||||
if let Some(device) = self.device {
|
||||
args.write_long_option_eq("device", device.as_str());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! make_device_enum {
|
||||
($vis:vis enum $Device:ident {
|
||||
$(
|
||||
#[
|
||||
name = $name:literal,
|
||||
xray_part = $xray_part:literal,
|
||||
xray_device = $xray_device:literal,
|
||||
xray_family = $xray_family:literal,
|
||||
]
|
||||
$variant:ident,
|
||||
)*
|
||||
}) => {
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, ValueEnum)]
|
||||
$vis enum $Device {
|
||||
$(
|
||||
#[value(name = $name, alias = $xray_part)]
|
||||
$variant,
|
||||
)*
|
||||
}
|
||||
|
||||
impl $Device {
|
||||
$vis fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
$(Self::$variant => $name,)*
|
||||
}
|
||||
}
|
||||
$vis fn xray_part(self) -> &'static str {
|
||||
match self {
|
||||
$(Self::$variant => $xray_part,)*
|
||||
}
|
||||
}
|
||||
$vis fn xray_device(self) -> &'static str {
|
||||
match self {
|
||||
$(Self::$variant => $xray_device,)*
|
||||
}
|
||||
}
|
||||
$vis fn xray_family(self) -> &'static str {
|
||||
match self {
|
||||
$(Self::$variant => $xray_family,)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct DeviceVisitor;
|
||||
|
||||
impl<'de> serde::de::Visitor<'de> for DeviceVisitor {
|
||||
type Value = $Device;
|
||||
|
||||
fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str("a Xilinx device string")
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
match $Device::from_str(v, false) {
|
||||
Ok(v) => Ok(v),
|
||||
Err(_) => Err(E::invalid_value(serde::de::Unexpected::Str(v), &self)),
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
match str::from_utf8(v).ok().and_then(|v| $Device::from_str(v, false).ok()) {
|
||||
Some(v) => Ok(v),
|
||||
None => Err(E::invalid_value(serde::de::Unexpected::Bytes(v), &self)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for $Device {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
deserializer.deserialize_string(DeviceVisitor)
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for $Device {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
self.as_str().serialize(serializer)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
make_device_enum! {
|
||||
pub enum Device {
|
||||
#[
|
||||
name = "xc7a35ticsg324-1L",
|
||||
xray_part = "xc7a35tcsg324-1",
|
||||
xray_device = "xc7a35t",
|
||||
xray_family = "artix7",
|
||||
]
|
||||
Xc7a35ticsg324_1l,
|
||||
#[
|
||||
name = "xc7a100ticsg324-1L",
|
||||
xray_part = "xc7a100tcsg324-1",
|
||||
xray_device = "xc7a100t",
|
||||
xray_family = "artix7",
|
||||
]
|
||||
Xc7a100ticsg324_1l,
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Device {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = crate::build::DynJobKind> {
|
||||
arty_a7::built_in_job_kinds()
|
||||
.into_iter()
|
||||
.chain(yosys_nextpnr_prjxray::built_in_job_kinds())
|
||||
}
|
||||
|
||||
pub(crate) fn built_in_platforms() -> impl IntoIterator<Item = crate::platform::DynPlatform> {
|
||||
arty_a7::built_in_platforms()
|
||||
.into_iter()
|
||||
.chain(yosys_nextpnr_prjxray::built_in_platforms())
|
||||
}
|
||||
404
crates/fayalite/src/vendor/xilinx/arty_a7.rs
vendored
404
crates/fayalite/src/vendor/xilinx/arty_a7.rs
vendored
|
|
@ -1,404 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
intern::{Intern, Interned},
|
||||
module::{instance_with_loc, reg_builder_with_loc, wire_with_loc},
|
||||
platform::{
|
||||
DynPlatform, Peripheral, PeripheralRef, Peripherals, PeripheralsBuilderFactory,
|
||||
PeripheralsBuilderFinished, Platform, PlatformAspectSet,
|
||||
peripherals::{ClockInput, Led, RgbLed, Uart},
|
||||
},
|
||||
prelude::*,
|
||||
vendor::xilinx::{
|
||||
Device, XdcCreateClockAnnotation, XdcIOStandardAnnotation, XdcLocationAnnotation,
|
||||
primitives,
|
||||
},
|
||||
};
|
||||
use ordered_float::NotNan;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
macro_rules! arty_a7_platform {
|
||||
(
|
||||
$vis:vis enum $ArtyA7Platform:ident {
|
||||
$(#[name = $name:literal, device = $device:ident]
|
||||
$Variant:ident,)*
|
||||
}
|
||||
) => {
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
#[non_exhaustive]
|
||||
$vis enum $ArtyA7Platform {
|
||||
$($Variant,)*
|
||||
}
|
||||
|
||||
impl $ArtyA7Platform {
|
||||
$vis const VARIANTS: &'static [Self] = &[$(Self::$Variant,)*];
|
||||
$vis fn device(self) -> Device {
|
||||
match self {
|
||||
$(Self::$Variant => Device::$device,)*
|
||||
}
|
||||
}
|
||||
$vis const fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
$(Self::$Variant => $name,)*
|
||||
}
|
||||
}
|
||||
fn get_aspects(self) -> &'static PlatformAspectSet {
|
||||
match self {
|
||||
$(Self::$Variant => {
|
||||
static ASPECTS_SET: OnceLock<PlatformAspectSet> = OnceLock::new();
|
||||
ASPECTS_SET.get_or_init(|| self.make_aspects())
|
||||
})*
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
arty_a7_platform! {
|
||||
pub enum ArtyA7Platform {
|
||||
#[name = "arty-a7-35t", device = Xc7a35ticsg324_1l]
|
||||
ArtyA7_35T,
|
||||
#[name = "arty-a7-100t", device = Xc7a100ticsg324_1l]
|
||||
ArtyA7_100T,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ArtyA7Peripherals {
|
||||
clk100_div_pow2: [Peripheral<ClockInput>; 4],
|
||||
rst: Peripheral<Reset>,
|
||||
rst_sync: Peripheral<SyncReset>,
|
||||
ld0: Peripheral<RgbLed>,
|
||||
ld1: Peripheral<RgbLed>,
|
||||
ld2: Peripheral<RgbLed>,
|
||||
ld3: Peripheral<RgbLed>,
|
||||
ld4: Peripheral<Led>,
|
||||
ld5: Peripheral<Led>,
|
||||
ld6: Peripheral<Led>,
|
||||
ld7: Peripheral<Led>,
|
||||
uart: Peripheral<Uart>,
|
||||
// TODO: add rest of peripherals when we need them
|
||||
}
|
||||
|
||||
impl Peripherals for ArtyA7Peripherals {
|
||||
fn append_peripherals<'a>(&'a self, peripherals: &mut Vec<PeripheralRef<'a, CanonicalType>>) {
|
||||
let Self {
|
||||
clk100_div_pow2,
|
||||
rst,
|
||||
rst_sync,
|
||||
ld0,
|
||||
ld1,
|
||||
ld2,
|
||||
ld3,
|
||||
ld4,
|
||||
ld5,
|
||||
ld6,
|
||||
ld7,
|
||||
uart,
|
||||
} = self;
|
||||
clk100_div_pow2.append_peripherals(peripherals);
|
||||
rst.append_peripherals(peripherals);
|
||||
rst_sync.append_peripherals(peripherals);
|
||||
ld0.append_peripherals(peripherals);
|
||||
ld1.append_peripherals(peripherals);
|
||||
ld2.append_peripherals(peripherals);
|
||||
ld3.append_peripherals(peripherals);
|
||||
ld4.append_peripherals(peripherals);
|
||||
ld5.append_peripherals(peripherals);
|
||||
ld6.append_peripherals(peripherals);
|
||||
ld7.append_peripherals(peripherals);
|
||||
uart.append_peripherals(peripherals);
|
||||
}
|
||||
}
|
||||
|
||||
impl ArtyA7Platform {
|
||||
fn make_aspects(self) -> PlatformAspectSet {
|
||||
let mut retval = PlatformAspectSet::new();
|
||||
retval.insert_new(self.device());
|
||||
retval
|
||||
}
|
||||
}
|
||||
|
||||
#[hdl_module(extern)]
|
||||
fn reset_sync() {
|
||||
#[hdl]
|
||||
let clk: Clock = m.input();
|
||||
#[hdl]
|
||||
let inp: Bool = m.input();
|
||||
#[hdl]
|
||||
let out: SyncReset = m.output();
|
||||
m.annotate_module(BlackBoxInlineAnnotation {
|
||||
path: "fayalite_arty_a7_reset_sync.v".intern(),
|
||||
text: r#"module __fayalite_arty_a7_reset_sync(input clk, input inp, output out);
|
||||
wire reset_0_out;
|
||||
(* ASYNC_REG = "TRUE" *)
|
||||
FDPE #(
|
||||
.INIT(1'b1)
|
||||
) reset_0 (
|
||||
.Q(reset_0_out),
|
||||
.C(clk),
|
||||
.CE(1'b1),
|
||||
.PRE(inp),
|
||||
.D(1'b0)
|
||||
);
|
||||
(* ASYNC_REG = "TRUE" *)
|
||||
FDPE #(
|
||||
.INIT(1'b1)
|
||||
) reset_1 (
|
||||
.Q(out),
|
||||
.C(clk),
|
||||
.CE(1'b1),
|
||||
.PRE(inp),
|
||||
.D(reset_0_out)
|
||||
);
|
||||
endmodule
|
||||
"#
|
||||
.intern(),
|
||||
});
|
||||
m.verilog_name("__fayalite_arty_a7_reset_sync");
|
||||
}
|
||||
|
||||
impl Platform for ArtyA7Platform {
|
||||
type Peripherals = ArtyA7Peripherals;
|
||||
|
||||
fn name(&self) -> Interned<str> {
|
||||
self.as_str().intern()
|
||||
}
|
||||
|
||||
fn new_peripherals<'builder>(
|
||||
&self,
|
||||
builder_factory: PeripheralsBuilderFactory<'builder>,
|
||||
) -> (Self::Peripherals, PeripheralsBuilderFinished<'builder>) {
|
||||
let mut builder = builder_factory.builder();
|
||||
|
||||
let clk100_div_pow2 = std::array::from_fn(|log2_divisor| {
|
||||
let divisor = 1u64 << log2_divisor;
|
||||
let name = if divisor != 1 {
|
||||
format!("clk100_div_{divisor}")
|
||||
} else {
|
||||
"clk100".into()
|
||||
};
|
||||
builder.input_peripheral(name, ClockInput::new(100e6 / divisor as f64))
|
||||
});
|
||||
builder.add_conflicts(Vec::from_iter(clk100_div_pow2.iter().map(|v| v.id())));
|
||||
(
|
||||
ArtyA7Peripherals {
|
||||
clk100_div_pow2,
|
||||
rst: builder.input_peripheral("rst", Reset),
|
||||
rst_sync: builder.input_peripheral("rst_sync", SyncReset),
|
||||
ld0: builder.output_peripheral("ld0", RgbLed),
|
||||
ld1: builder.output_peripheral("ld1", RgbLed),
|
||||
ld2: builder.output_peripheral("ld2", RgbLed),
|
||||
ld3: builder.output_peripheral("ld3", RgbLed),
|
||||
ld4: builder.output_peripheral("ld4", Led),
|
||||
ld5: builder.output_peripheral("ld5", Led),
|
||||
ld6: builder.output_peripheral("ld6", Led),
|
||||
ld7: builder.output_peripheral("ld7", Led),
|
||||
uart: builder.output_peripheral("uart", Uart),
|
||||
},
|
||||
builder.finish(),
|
||||
)
|
||||
}
|
||||
|
||||
fn source_location(&self) -> SourceLocation {
|
||||
SourceLocation::builtin()
|
||||
}
|
||||
|
||||
fn add_peripherals_in_wrapper_module(&self, m: &ModuleBuilder, peripherals: Self::Peripherals) {
|
||||
let ArtyA7Peripherals {
|
||||
clk100_div_pow2,
|
||||
rst,
|
||||
rst_sync,
|
||||
ld0,
|
||||
ld1,
|
||||
ld2,
|
||||
ld3,
|
||||
ld4,
|
||||
ld5,
|
||||
ld6,
|
||||
ld7,
|
||||
uart,
|
||||
} = peripherals;
|
||||
let make_buffered_input = |name: &str, location: &str, io_standard: &str, invert: bool| {
|
||||
let pin = m.input_with_loc(name, SourceLocation::builtin(), Bool);
|
||||
annotate(
|
||||
pin,
|
||||
XdcLocationAnnotation {
|
||||
location: location.intern(),
|
||||
},
|
||||
);
|
||||
annotate(
|
||||
pin,
|
||||
XdcIOStandardAnnotation {
|
||||
value: io_standard.intern(),
|
||||
},
|
||||
);
|
||||
let buf = instance_with_loc(
|
||||
&format!("{name}_buf"),
|
||||
primitives::IBUF(),
|
||||
SourceLocation::builtin(),
|
||||
);
|
||||
connect(buf.I, pin);
|
||||
if invert { !buf.O } else { buf.O }
|
||||
};
|
||||
let make_buffered_output = |name: &str, location: &str, io_standard: &str| {
|
||||
let pin = m.output_with_loc(name, SourceLocation::builtin(), Bool);
|
||||
annotate(
|
||||
pin,
|
||||
XdcLocationAnnotation {
|
||||
location: location.intern(),
|
||||
},
|
||||
);
|
||||
annotate(
|
||||
pin,
|
||||
XdcIOStandardAnnotation {
|
||||
value: io_standard.intern(),
|
||||
},
|
||||
);
|
||||
let buf = instance_with_loc(
|
||||
&format!("{name}_buf"),
|
||||
primitives::OBUFT(),
|
||||
SourceLocation::builtin(),
|
||||
);
|
||||
connect(pin, buf.O);
|
||||
connect(buf.T, false);
|
||||
buf.I
|
||||
};
|
||||
let mut frequency = clk100_div_pow2[0].ty().frequency();
|
||||
let mut log2_divisor = 0;
|
||||
let mut clk = None;
|
||||
for (cur_log2_divisor, p) in clk100_div_pow2.into_iter().enumerate() {
|
||||
let Some(p) = p.into_used() else {
|
||||
continue;
|
||||
};
|
||||
debug_assert!(
|
||||
clk.is_none(),
|
||||
"conflict-handling logic should ensure at most one clock is used",
|
||||
);
|
||||
frequency = p.ty().frequency();
|
||||
clk = Some(p);
|
||||
log2_divisor = cur_log2_divisor;
|
||||
}
|
||||
let clk100_buf = make_buffered_input("clk100", "E3", "LVCMOS33", false);
|
||||
let startup = instance_with_loc(
|
||||
"startup",
|
||||
primitives::STARTUPE2_default_inputs(),
|
||||
SourceLocation::builtin(),
|
||||
);
|
||||
let clk_global_buf = instance_with_loc(
|
||||
"clk_global_buf",
|
||||
primitives::BUFGCE(),
|
||||
SourceLocation::builtin(),
|
||||
);
|
||||
connect(clk_global_buf.CE, startup.EOS);
|
||||
let mut clk_global_buf_in = clk100_buf.to_clock();
|
||||
for prev_log2_divisor in 0..log2_divisor {
|
||||
let prev_divisor = 1u64 << prev_log2_divisor;
|
||||
let clk_in = wire_with_loc(
|
||||
&format!("clk_div_{prev_divisor}"),
|
||||
SourceLocation::builtin(),
|
||||
Clock,
|
||||
);
|
||||
connect(clk_in, clk_global_buf_in);
|
||||
annotate(
|
||||
clk_in,
|
||||
XdcCreateClockAnnotation {
|
||||
period: NotNan::new(1e9 / (100e6 / prev_divisor as f64))
|
||||
.expect("known to be valid"),
|
||||
},
|
||||
);
|
||||
annotate(clk_in, DontTouchAnnotation);
|
||||
let cd = wire_with_loc(
|
||||
&format!("clk_div_{prev_divisor}_in"),
|
||||
SourceLocation::builtin(),
|
||||
ClockDomain[AsyncReset],
|
||||
);
|
||||
connect(cd.clk, clk_in);
|
||||
connect(cd.rst, (!startup.EOS).to_async_reset());
|
||||
let divider = reg_builder_with_loc("divider", SourceLocation::builtin())
|
||||
.clock_domain(cd)
|
||||
.reset(false)
|
||||
.build();
|
||||
connect(divider, !divider);
|
||||
clk_global_buf_in = divider.to_clock();
|
||||
}
|
||||
connect(clk_global_buf.I, clk_global_buf_in);
|
||||
let clk_out = wire_with_loc("clk_out", SourceLocation::builtin(), Clock);
|
||||
connect(clk_out, clk_global_buf.O);
|
||||
annotate(
|
||||
clk_out,
|
||||
XdcCreateClockAnnotation {
|
||||
period: NotNan::new(1e9 / frequency).expect("known to be valid"),
|
||||
},
|
||||
);
|
||||
annotate(clk_out, DontTouchAnnotation);
|
||||
if let Some(clk) = clk {
|
||||
connect(clk.instance_io_field().clk, clk_out);
|
||||
}
|
||||
let rst_value = {
|
||||
let rst_buf = make_buffered_input("rst", "C2", "LVCMOS33", true);
|
||||
let rst_sync = instance_with_loc("rst_sync", reset_sync(), SourceLocation::builtin());
|
||||
connect(rst_sync.clk, clk_out);
|
||||
connect(rst_sync.inp, rst_buf | !startup.EOS);
|
||||
rst_sync.out
|
||||
};
|
||||
if let Some(rst) = rst.into_used() {
|
||||
connect(rst.instance_io_field(), rst_value.to_reset());
|
||||
}
|
||||
if let Some(rst_sync) = rst_sync.into_used() {
|
||||
connect(rst_sync.instance_io_field(), rst_value);
|
||||
}
|
||||
let rgb_leds = [
|
||||
(ld0, ("G6", "F6", "E1")),
|
||||
(ld1, ("G3", "J4", "G4")),
|
||||
(ld2, ("J3", "J2", "H4")),
|
||||
(ld3, ("K1", "H6", "K2")),
|
||||
];
|
||||
for (rgb_led, (r_loc, g_loc, b_loc)) in rgb_leds {
|
||||
let r = make_buffered_output(&format!("{}_r", rgb_led.name()), r_loc, "LVCMOS33");
|
||||
let g = make_buffered_output(&format!("{}_g", rgb_led.name()), g_loc, "LVCMOS33");
|
||||
let b = make_buffered_output(&format!("{}_b", rgb_led.name()), b_loc, "LVCMOS33");
|
||||
if let Some(rgb_led) = rgb_led.into_used() {
|
||||
connect(r, rgb_led.instance_io_field().r);
|
||||
connect(g, rgb_led.instance_io_field().g);
|
||||
connect(b, rgb_led.instance_io_field().b);
|
||||
} else {
|
||||
connect(r, false);
|
||||
connect(g, false);
|
||||
connect(b, false);
|
||||
}
|
||||
}
|
||||
let leds = [(ld4, "H5"), (ld5, "J5"), (ld6, "T9"), (ld7, "T10")];
|
||||
for (led, loc) in leds {
|
||||
let o = make_buffered_output(&led.name(), loc, "LVCMOS33");
|
||||
if let Some(led) = led.into_used() {
|
||||
connect(o, led.instance_io_field().on);
|
||||
} else {
|
||||
connect(o, false);
|
||||
}
|
||||
}
|
||||
let uart_tx = make_buffered_output("uart_tx", "D10", "LVCMOS33");
|
||||
let uart_rx = make_buffered_input("uart_rx", "A9", "LVCMOS33", false);
|
||||
if let Some(uart) = uart.into_used() {
|
||||
connect(uart_tx, uart.instance_io_field().tx);
|
||||
connect(uart.instance_io_field().rx, uart_rx);
|
||||
} else {
|
||||
connect(uart_tx, true); // idle
|
||||
}
|
||||
}
|
||||
|
||||
fn aspects(&self) -> PlatformAspectSet {
|
||||
self.get_aspects().clone()
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = crate::build::DynJobKind> {
|
||||
[]
|
||||
}
|
||||
|
||||
pub(crate) fn built_in_platforms() -> impl IntoIterator<Item = DynPlatform> {
|
||||
ArtyA7Platform::VARIANTS
|
||||
.iter()
|
||||
.map(|&v| DynPlatform::new(v))
|
||||
}
|
||||
50
crates/fayalite/src/vendor/xilinx/primitives.rs
vendored
50
crates/fayalite/src/vendor/xilinx/primitives.rs
vendored
|
|
@ -1,50 +0,0 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
#![allow(non_snake_case)]
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
#[hdl_module(extern)]
|
||||
pub fn IBUF() {
|
||||
m.verilog_name("IBUF");
|
||||
#[hdl]
|
||||
let O: Bool = m.output();
|
||||
#[hdl]
|
||||
let I: Bool = m.input();
|
||||
}
|
||||
|
||||
#[hdl_module(extern)]
|
||||
pub fn OBUFT() {
|
||||
m.verilog_name("OBUFT");
|
||||
#[hdl]
|
||||
let O: Bool = m.output();
|
||||
#[hdl]
|
||||
let I: Bool = m.input();
|
||||
#[hdl]
|
||||
let T: Bool = m.input();
|
||||
}
|
||||
|
||||
#[hdl_module(extern)]
|
||||
pub fn BUFGCE() {
|
||||
m.verilog_name("BUFGCE");
|
||||
#[hdl]
|
||||
let O: Clock = m.output();
|
||||
#[hdl]
|
||||
let CE: Bool = m.input();
|
||||
#[hdl]
|
||||
let I: Clock = m.input();
|
||||
}
|
||||
|
||||
#[hdl_module(extern)]
|
||||
pub fn STARTUPE2_default_inputs() {
|
||||
m.verilog_name("STARTUPE2");
|
||||
#[hdl]
|
||||
let CFGCLK: Clock = m.output();
|
||||
#[hdl]
|
||||
let CFGMCLK: Clock = m.output();
|
||||
#[hdl]
|
||||
let EOS: Bool = m.output();
|
||||
#[hdl]
|
||||
let PREQ: Bool = m.output();
|
||||
}
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -2,7 +2,19 @@
|
|||
// See Notices.txt for copyright information
|
||||
//! Formal tests in Fayalite
|
||||
|
||||
use fayalite::prelude::*;
|
||||
use fayalite::{
|
||||
cli::FormalMode,
|
||||
clock::{Clock, ClockDomain},
|
||||
expr::{CastTo, HdlPartialEq},
|
||||
firrtl::ExportOptions,
|
||||
formal::{any_const, any_seq, formal_reset, hdl_assert, hdl_assume},
|
||||
hdl, hdl_module,
|
||||
int::{Bool, DynSize, Size, UInt, UIntType},
|
||||
module::{connect, connect_any, instance, memory, reg_builder, wire},
|
||||
reset::ToReset,
|
||||
testing::assert_formal,
|
||||
ty::StaticType,
|
||||
};
|
||||
|
||||
/// Test hidden state
|
||||
///
|
||||
|
|
@ -107,7 +119,7 @@ mod hidden_state {
|
|||
FormalMode::Prove,
|
||||
16,
|
||||
None,
|
||||
Default::default(),
|
||||
ExportOptions::default(),
|
||||
);
|
||||
// here a couple of cycles is enough
|
||||
assert_formal(
|
||||
|
|
@ -116,7 +128,7 @@ mod hidden_state {
|
|||
FormalMode::Prove,
|
||||
2,
|
||||
None,
|
||||
Default::default(),
|
||||
ExportOptions::default(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -230,7 +242,7 @@ mod memory {
|
|||
#[hdl]
|
||||
let wr: WritePort<DynSize> = wire(WritePort[n]);
|
||||
connect(wr.addr, any_seq(UInt[n]));
|
||||
connect(wr.data, any_seq(UInt::<8>::new_static()));
|
||||
connect(wr.data, any_seq(UInt::<8>::TYPE));
|
||||
connect(wr.en, any_seq(Bool));
|
||||
#[hdl]
|
||||
let dut = instance(example_sram(n));
|
||||
|
|
@ -277,7 +289,7 @@ mod memory {
|
|||
FormalMode::Prove,
|
||||
2,
|
||||
None,
|
||||
Default::default(),
|
||||
ExportOptions::default(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,11 +9,6 @@ use fayalite::{
|
|||
};
|
||||
use std::marker::PhantomData;
|
||||
|
||||
#[hdl(outline_generated)]
|
||||
pub struct MyConstSize<V: Size> {
|
||||
pub size: PhantomConst<UIntType<V>>,
|
||||
}
|
||||
|
||||
#[hdl(outline_generated)]
|
||||
pub struct S<T, Len: Size, T2> {
|
||||
pub a: T,
|
||||
|
|
@ -196,21 +191,3 @@ check_bounds!(CheckBoundsTTT2<#[a, Type] A: BundleType +, #[b, Type] B: Type +,
|
|||
check_bounds!(CheckBoundsTTT3<#[a, Type] A: EnumType +, #[b, Type] B: Type +, #[c, Type] C: Type +>);
|
||||
check_bounds!(CheckBoundsTTT4<#[a, Type] A: IntType +, #[b, Type] B: Type +, #[c, Type] C: Type +>);
|
||||
check_bounds!(CheckBoundsTTT5<#[a, Type] A: StaticType +, #[b, Type] B: Type +, #[c, Type] C: Type +>);
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, serde::Serialize, serde::Deserialize)]
|
||||
pub struct MyPhantomConstInner {
|
||||
pub a: usize,
|
||||
pub b: UInt,
|
||||
}
|
||||
|
||||
#[hdl(outline_generated, get(|v| v.a))]
|
||||
pub type GetA<P: PhantomConstGet<MyPhantomConstInner>> = DynSize;
|
||||
|
||||
#[hdl(outline_generated, get(|v| v.b))]
|
||||
pub type GetB<P: PhantomConstGet<MyPhantomConstInner>> = UInt;
|
||||
|
||||
#[hdl(outline_generated, no_static)]
|
||||
pub struct MyTypeWithPhantomConstParameter<P: PhantomConstGet<MyPhantomConstInner>> {
|
||||
pub a: ArrayType<Bool, GetA<P>>,
|
||||
pub b: HdlOption<GetB<P>>,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,14 +1,8 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use fayalite::{
|
||||
assert_export_firrtl,
|
||||
firrtl::ExportOptions,
|
||||
int::{UIntInRange, UIntInRangeInclusive},
|
||||
intern::Intern,
|
||||
module::transform::simplify_enums::SimplifyEnumsKind,
|
||||
platform::PlatformIOBuilder,
|
||||
prelude::*,
|
||||
reset::ResetType,
|
||||
assert_export_firrtl, firrtl::ExportOptions, intern::Intern,
|
||||
module::transform::simplify_enums::SimplifyEnumsKind, prelude::*, reset::ResetType,
|
||||
ty::StaticType,
|
||||
};
|
||||
use serde_json::json;
|
||||
|
|
@ -197,14 +191,10 @@ circuit check_array_repeat:
|
|||
};
|
||||
}
|
||||
|
||||
pub trait UnknownTrait {}
|
||||
|
||||
impl<T: ?Sized> UnknownTrait for T {}
|
||||
|
||||
#[hdl_module(outline_generated)]
|
||||
pub fn check_skipped_generics<T, #[hdl(skip)] U, const N: usize, #[hdl(skip)] const M: usize>(v: U)
|
||||
where
|
||||
T: StaticType + UnknownTrait,
|
||||
T: StaticType,
|
||||
ConstUsize<N>: KnownSize,
|
||||
U: std::fmt::Display,
|
||||
{
|
||||
|
|
@ -386,18 +376,18 @@ circuit check_written_inside_both_if_else:
|
|||
};
|
||||
}
|
||||
|
||||
#[hdl(outline_generated, cmp_eq)]
|
||||
#[hdl(outline_generated)]
|
||||
pub struct TestStruct<T> {
|
||||
pub a: T,
|
||||
pub b: UInt<8>,
|
||||
}
|
||||
|
||||
#[hdl(outline_generated, cmp_eq)]
|
||||
#[hdl(outline_generated)]
|
||||
pub struct TestStruct2 {
|
||||
pub v: UInt<8>,
|
||||
}
|
||||
|
||||
#[hdl(outline_generated, cmp_eq)]
|
||||
#[hdl(outline_generated)]
|
||||
pub struct TestStruct3 {}
|
||||
|
||||
#[hdl_module(outline_generated)]
|
||||
|
|
@ -4355,332 +4345,3 @@ circuit check_cfgs:
|
|||
",
|
||||
};
|
||||
}
|
||||
|
||||
#[hdl_module(outline_generated)]
|
||||
pub fn check_let_patterns() {
|
||||
#[hdl]
|
||||
let tuple_in: (UInt<1>, SInt<1>, Bool) = m.input();
|
||||
#[hdl]
|
||||
let (tuple_0, tuple_1, tuple_2) = tuple_in;
|
||||
#[hdl]
|
||||
let tuple_0_out: UInt<1> = m.output();
|
||||
connect(tuple_0_out, tuple_0);
|
||||
#[hdl]
|
||||
let tuple_1_out: SInt<1> = m.output();
|
||||
connect(tuple_1_out, tuple_1);
|
||||
#[hdl]
|
||||
let tuple_2_out: Bool = m.output();
|
||||
connect(tuple_2_out, tuple_2);
|
||||
|
||||
#[hdl]
|
||||
let test_struct_in: TestStruct<SInt<8>> = m.input();
|
||||
#[hdl]
|
||||
let TestStruct::<_> { a, b } = test_struct_in;
|
||||
#[hdl]
|
||||
let test_struct_a_out: SInt<8> = m.output();
|
||||
connect(test_struct_a_out, a);
|
||||
#[hdl]
|
||||
let test_struct_b_out: UInt<8> = m.output();
|
||||
connect(test_struct_b_out, b);
|
||||
|
||||
#[hdl]
|
||||
let test_struct_2_in: TestStruct2 = m.input();
|
||||
#[hdl]
|
||||
let TestStruct2 { v } = test_struct_2_in;
|
||||
#[hdl]
|
||||
let test_struct_2_v_out: UInt<8> = m.output();
|
||||
connect(test_struct_2_v_out, v);
|
||||
|
||||
#[hdl]
|
||||
let test_struct_3_in: TestStruct3 = m.input();
|
||||
#[hdl]
|
||||
let TestStruct3 {} = test_struct_3_in;
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_let_patterns() {
|
||||
let _n = SourceLocation::normalize_files_for_tests();
|
||||
let m = check_let_patterns();
|
||||
dbg!(m);
|
||||
#[rustfmt::skip] // work around https://github.com/rust-lang/rustfmt/issues/6161
|
||||
assert_export_firrtl! {
|
||||
m =>
|
||||
"/test/check_let_patterns.fir": r"FIRRTL version 3.2.0
|
||||
circuit check_let_patterns:
|
||||
type Ty0 = {`0`: UInt<1>, `1`: SInt<1>, `2`: UInt<1>}
|
||||
type Ty1 = {a: SInt<8>, b: UInt<8>}
|
||||
type Ty2 = {v: UInt<8>}
|
||||
type Ty3 = {}
|
||||
module check_let_patterns: @[module-XXXXXXXXXX.rs 1:1]
|
||||
input tuple_in: Ty0 @[module-XXXXXXXXXX.rs 2:1]
|
||||
output tuple_0_out: UInt<1> @[module-XXXXXXXXXX.rs 4:1]
|
||||
output tuple_1_out: SInt<1> @[module-XXXXXXXXXX.rs 6:1]
|
||||
output tuple_2_out: UInt<1> @[module-XXXXXXXXXX.rs 8:1]
|
||||
input test_struct_in: Ty1 @[module-XXXXXXXXXX.rs 10:1]
|
||||
output test_struct_a_out: SInt<8> @[module-XXXXXXXXXX.rs 12:1]
|
||||
output test_struct_b_out: UInt<8> @[module-XXXXXXXXXX.rs 14:1]
|
||||
input test_struct_2_in: Ty2 @[module-XXXXXXXXXX.rs 16:1]
|
||||
output test_struct_2_v_out: UInt<8> @[module-XXXXXXXXXX.rs 18:1]
|
||||
input test_struct_3_in: Ty3 @[module-XXXXXXXXXX.rs 20:1]
|
||||
connect tuple_0_out, tuple_in.`0` @[module-XXXXXXXXXX.rs 5:1]
|
||||
connect tuple_1_out, tuple_in.`1` @[module-XXXXXXXXXX.rs 7:1]
|
||||
connect tuple_2_out, tuple_in.`2` @[module-XXXXXXXXXX.rs 9:1]
|
||||
connect test_struct_a_out, test_struct_in.a @[module-XXXXXXXXXX.rs 13:1]
|
||||
connect test_struct_b_out, test_struct_in.b @[module-XXXXXXXXXX.rs 15:1]
|
||||
connect test_struct_2_v_out, test_struct_2_in.v @[module-XXXXXXXXXX.rs 19:1]
|
||||
",
|
||||
};
|
||||
}
|
||||
|
||||
#[hdl_module(outline_generated)]
|
||||
pub fn check_struct_cmp_eq() {
|
||||
#[hdl]
|
||||
let tuple_lhs: (UInt<1>, SInt<1>, Bool) = m.input();
|
||||
#[hdl]
|
||||
let tuple_rhs: (UInt<1>, SInt<1>, Bool) = m.input();
|
||||
#[hdl]
|
||||
let tuple_cmp_eq: Bool = m.output();
|
||||
connect(tuple_cmp_eq, tuple_lhs.cmp_eq(tuple_rhs));
|
||||
#[hdl]
|
||||
let tuple_cmp_ne: Bool = m.output();
|
||||
connect(tuple_cmp_ne, tuple_lhs.cmp_ne(tuple_rhs));
|
||||
|
||||
#[hdl]
|
||||
let test_struct_lhs: TestStruct<SInt<8>> = m.input();
|
||||
#[hdl]
|
||||
let test_struct_rhs: TestStruct<SInt<8>> = m.input();
|
||||
#[hdl]
|
||||
let test_struct_cmp_eq: Bool = m.output();
|
||||
connect(test_struct_cmp_eq, test_struct_lhs.cmp_eq(test_struct_rhs));
|
||||
#[hdl]
|
||||
let test_struct_cmp_ne: Bool = m.output();
|
||||
connect(test_struct_cmp_ne, test_struct_lhs.cmp_ne(test_struct_rhs));
|
||||
|
||||
#[hdl]
|
||||
let test_struct_2_lhs: TestStruct2 = m.input();
|
||||
#[hdl]
|
||||
let test_struct_2_rhs: TestStruct2 = m.input();
|
||||
#[hdl]
|
||||
let test_struct_2_cmp_eq: Bool = m.output();
|
||||
connect(
|
||||
test_struct_2_cmp_eq,
|
||||
test_struct_2_lhs.cmp_eq(test_struct_2_rhs),
|
||||
);
|
||||
#[hdl]
|
||||
let test_struct_2_cmp_ne: Bool = m.output();
|
||||
connect(
|
||||
test_struct_2_cmp_ne,
|
||||
test_struct_2_lhs.cmp_ne(test_struct_2_rhs),
|
||||
);
|
||||
|
||||
#[hdl]
|
||||
let test_struct_3_lhs: TestStruct3 = m.input();
|
||||
#[hdl]
|
||||
let test_struct_3_rhs: TestStruct3 = m.input();
|
||||
#[hdl]
|
||||
let test_struct_3_cmp_eq: Bool = m.output();
|
||||
connect(
|
||||
test_struct_3_cmp_eq,
|
||||
test_struct_3_lhs.cmp_eq(test_struct_3_rhs),
|
||||
);
|
||||
#[hdl]
|
||||
let test_struct_3_cmp_ne: Bool = m.output();
|
||||
connect(
|
||||
test_struct_3_cmp_ne,
|
||||
test_struct_3_lhs.cmp_ne(test_struct_3_rhs),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_struct_cmp_eq() {
|
||||
let _n = SourceLocation::normalize_files_for_tests();
|
||||
let m = check_struct_cmp_eq();
|
||||
dbg!(m);
|
||||
#[rustfmt::skip] // work around https://github.com/rust-lang/rustfmt/issues/6161
|
||||
assert_export_firrtl! {
|
||||
m =>
|
||||
"/test/check_struct_cmp_eq.fir": r"FIRRTL version 3.2.0
|
||||
circuit check_struct_cmp_eq:
|
||||
type Ty0 = {`0`: UInt<1>, `1`: SInt<1>, `2`: UInt<1>}
|
||||
type Ty1 = {a: SInt<8>, b: UInt<8>}
|
||||
type Ty2 = {v: UInt<8>}
|
||||
type Ty3 = {}
|
||||
module check_struct_cmp_eq: @[module-XXXXXXXXXX.rs 1:1]
|
||||
input tuple_lhs: Ty0 @[module-XXXXXXXXXX.rs 2:1]
|
||||
input tuple_rhs: Ty0 @[module-XXXXXXXXXX.rs 3:1]
|
||||
output tuple_cmp_eq: UInt<1> @[module-XXXXXXXXXX.rs 4:1]
|
||||
output tuple_cmp_ne: UInt<1> @[module-XXXXXXXXXX.rs 6:1]
|
||||
input test_struct_lhs: Ty1 @[module-XXXXXXXXXX.rs 8:1]
|
||||
input test_struct_rhs: Ty1 @[module-XXXXXXXXXX.rs 9:1]
|
||||
output test_struct_cmp_eq: UInt<1> @[module-XXXXXXXXXX.rs 10:1]
|
||||
output test_struct_cmp_ne: UInt<1> @[module-XXXXXXXXXX.rs 12:1]
|
||||
input test_struct_2_lhs: Ty2 @[module-XXXXXXXXXX.rs 14:1]
|
||||
input test_struct_2_rhs: Ty2 @[module-XXXXXXXXXX.rs 15:1]
|
||||
output test_struct_2_cmp_eq: UInt<1> @[module-XXXXXXXXXX.rs 16:1]
|
||||
output test_struct_2_cmp_ne: UInt<1> @[module-XXXXXXXXXX.rs 18:1]
|
||||
input test_struct_3_lhs: Ty3 @[module-XXXXXXXXXX.rs 20:1]
|
||||
input test_struct_3_rhs: Ty3 @[module-XXXXXXXXXX.rs 21:1]
|
||||
output test_struct_3_cmp_eq: UInt<1> @[module-XXXXXXXXXX.rs 22:1]
|
||||
output test_struct_3_cmp_ne: UInt<1> @[module-XXXXXXXXXX.rs 24:1]
|
||||
wire _array_literal_expr: UInt<1>[3]
|
||||
connect _array_literal_expr[0], eq(tuple_lhs.`0`, tuple_rhs.`0`)
|
||||
connect _array_literal_expr[1], eq(tuple_lhs.`1`, tuple_rhs.`1`)
|
||||
connect _array_literal_expr[2], eq(tuple_lhs.`2`, tuple_rhs.`2`)
|
||||
wire _cast_array_to_bits_expr: UInt<1>[3]
|
||||
connect _cast_array_to_bits_expr[0], _array_literal_expr[0]
|
||||
connect _cast_array_to_bits_expr[1], _array_literal_expr[1]
|
||||
connect _cast_array_to_bits_expr[2], _array_literal_expr[2]
|
||||
wire _cast_to_bits_expr: UInt<3>
|
||||
connect _cast_to_bits_expr, cat(_cast_array_to_bits_expr[2], cat(_cast_array_to_bits_expr[1], _cast_array_to_bits_expr[0]))
|
||||
connect tuple_cmp_eq, andr(_cast_to_bits_expr) @[module-XXXXXXXXXX.rs 5:1]
|
||||
wire _array_literal_expr_1: UInt<1>[3]
|
||||
connect _array_literal_expr_1[0], neq(tuple_lhs.`0`, tuple_rhs.`0`)
|
||||
connect _array_literal_expr_1[1], neq(tuple_lhs.`1`, tuple_rhs.`1`)
|
||||
connect _array_literal_expr_1[2], neq(tuple_lhs.`2`, tuple_rhs.`2`)
|
||||
wire _cast_array_to_bits_expr_1: UInt<1>[3]
|
||||
connect _cast_array_to_bits_expr_1[0], _array_literal_expr_1[0]
|
||||
connect _cast_array_to_bits_expr_1[1], _array_literal_expr_1[1]
|
||||
connect _cast_array_to_bits_expr_1[2], _array_literal_expr_1[2]
|
||||
wire _cast_to_bits_expr_1: UInt<3>
|
||||
connect _cast_to_bits_expr_1, cat(_cast_array_to_bits_expr_1[2], cat(_cast_array_to_bits_expr_1[1], _cast_array_to_bits_expr_1[0]))
|
||||
connect tuple_cmp_ne, orr(_cast_to_bits_expr_1) @[module-XXXXXXXXXX.rs 7:1]
|
||||
connect test_struct_cmp_eq, and(eq(test_struct_lhs.a, test_struct_rhs.a), eq(test_struct_lhs.b, test_struct_rhs.b)) @[module-XXXXXXXXXX.rs 11:1]
|
||||
connect test_struct_cmp_ne, or(neq(test_struct_lhs.a, test_struct_rhs.a), neq(test_struct_lhs.b, test_struct_rhs.b)) @[module-XXXXXXXXXX.rs 13:1]
|
||||
connect test_struct_2_cmp_eq, eq(test_struct_2_lhs.v, test_struct_2_rhs.v) @[module-XXXXXXXXXX.rs 17:1]
|
||||
connect test_struct_2_cmp_ne, neq(test_struct_2_lhs.v, test_struct_2_rhs.v) @[module-XXXXXXXXXX.rs 19:1]
|
||||
connect test_struct_3_cmp_eq, UInt<1>(0h1) @[module-XXXXXXXXXX.rs 23:1]
|
||||
connect test_struct_3_cmp_ne, UInt<1>(0h0) @[module-XXXXXXXXXX.rs 25:1]
|
||||
",
|
||||
};
|
||||
}
|
||||
|
||||
#[hdl_module(outline_generated)]
|
||||
pub fn check_uint_in_range() {
|
||||
#[hdl]
|
||||
let i_0_to_1: UIntInRange<0, 1> = m.input();
|
||||
#[hdl]
|
||||
let i_0_to_2: UIntInRange<0, 2> = m.input();
|
||||
#[hdl]
|
||||
let i_0_to_3: UIntInRange<0, 3> = m.input();
|
||||
#[hdl]
|
||||
let i_0_to_4: UIntInRange<0, 4> = m.input();
|
||||
#[hdl]
|
||||
let i_0_to_7: UIntInRange<0, 7> = m.input();
|
||||
#[hdl]
|
||||
let i_0_to_8: UIntInRange<0, 8> = m.input();
|
||||
#[hdl]
|
||||
let i_0_to_9: UIntInRange<0, 9> = m.input();
|
||||
#[hdl]
|
||||
let i_0_through_0: UIntInRangeInclusive<0, 0> = m.input();
|
||||
#[hdl]
|
||||
let i_0_through_1: UIntInRangeInclusive<0, 1> = m.input();
|
||||
#[hdl]
|
||||
let i_0_through_2: UIntInRangeInclusive<0, 2> = m.input();
|
||||
#[hdl]
|
||||
let i_0_through_3: UIntInRangeInclusive<0, 3> = m.input();
|
||||
#[hdl]
|
||||
let i_0_through_4: UIntInRangeInclusive<0, 4> = m.input();
|
||||
#[hdl]
|
||||
let i_0_through_7: UIntInRangeInclusive<0, 7> = m.input();
|
||||
#[hdl]
|
||||
let i_0_through_8: UIntInRangeInclusive<0, 8> = m.input();
|
||||
#[hdl]
|
||||
let i_0_through_9: UIntInRangeInclusive<0, 9> = m.input();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_uint_in_range() {
|
||||
let _n = SourceLocation::normalize_files_for_tests();
|
||||
let m = check_uint_in_range();
|
||||
dbg!(m);
|
||||
#[rustfmt::skip] // work around https://github.com/rust-lang/rustfmt/issues/6161
|
||||
assert_export_firrtl! {
|
||||
m =>
|
||||
"/test/check_uint_in_range.fir": r"FIRRTL version 3.2.0
|
||||
circuit check_uint_in_range:
|
||||
type Ty0 = {value: UInt<0>, range: {}}
|
||||
type Ty1 = {value: UInt<1>, range: {}}
|
||||
type Ty2 = {value: UInt<2>, range: {}}
|
||||
type Ty3 = {value: UInt<2>, range: {}}
|
||||
type Ty4 = {value: UInt<3>, range: {}}
|
||||
type Ty5 = {value: UInt<3>, range: {}}
|
||||
type Ty6 = {value: UInt<4>, range: {}}
|
||||
type Ty7 = {value: UInt<0>, range: {}}
|
||||
type Ty8 = {value: UInt<1>, range: {}}
|
||||
type Ty9 = {value: UInt<2>, range: {}}
|
||||
type Ty10 = {value: UInt<2>, range: {}}
|
||||
type Ty11 = {value: UInt<3>, range: {}}
|
||||
type Ty12 = {value: UInt<3>, range: {}}
|
||||
type Ty13 = {value: UInt<4>, range: {}}
|
||||
type Ty14 = {value: UInt<4>, range: {}}
|
||||
module check_uint_in_range: @[module-XXXXXXXXXX.rs 1:1]
|
||||
input i_0_to_1: Ty0 @[module-XXXXXXXXXX.rs 2:1]
|
||||
input i_0_to_2: Ty1 @[module-XXXXXXXXXX.rs 3:1]
|
||||
input i_0_to_3: Ty2 @[module-XXXXXXXXXX.rs 4:1]
|
||||
input i_0_to_4: Ty3 @[module-XXXXXXXXXX.rs 5:1]
|
||||
input i_0_to_7: Ty4 @[module-XXXXXXXXXX.rs 6:1]
|
||||
input i_0_to_8: Ty5 @[module-XXXXXXXXXX.rs 7:1]
|
||||
input i_0_to_9: Ty6 @[module-XXXXXXXXXX.rs 8:1]
|
||||
input i_0_through_0: Ty7 @[module-XXXXXXXXXX.rs 9:1]
|
||||
input i_0_through_1: Ty8 @[module-XXXXXXXXXX.rs 10:1]
|
||||
input i_0_through_2: Ty9 @[module-XXXXXXXXXX.rs 11:1]
|
||||
input i_0_through_3: Ty10 @[module-XXXXXXXXXX.rs 12:1]
|
||||
input i_0_through_4: Ty11 @[module-XXXXXXXXXX.rs 13:1]
|
||||
input i_0_through_7: Ty12 @[module-XXXXXXXXXX.rs 14:1]
|
||||
input i_0_through_8: Ty13 @[module-XXXXXXXXXX.rs 15:1]
|
||||
input i_0_through_9: Ty14 @[module-XXXXXXXXXX.rs 16:1]
|
||||
",
|
||||
};
|
||||
}
|
||||
|
||||
#[hdl_module(outline_generated)]
|
||||
pub fn check_platform_io(platform_io_builder: PlatformIOBuilder<'_>) {
|
||||
#[hdl]
|
||||
let io = m.add_platform_io(platform_io_builder);
|
||||
}
|
||||
|
||||
#[cfg(todo)]
|
||||
#[test]
|
||||
fn test_platform_io() {
|
||||
let _n = SourceLocation::normalize_files_for_tests();
|
||||
let m = check_platform_io(todo!());
|
||||
dbg!(m);
|
||||
#[rustfmt::skip] // work around https://github.com/rust-lang/rustfmt/issues/6161
|
||||
assert_export_firrtl! {
|
||||
m =>
|
||||
"/test/check_platform_io.fir": r"FIRRTL version 3.2.0
|
||||
circuit check_platform_io:
|
||||
type Ty0 = {value: UInt<0>, range: {}}
|
||||
type Ty1 = {value: UInt<1>, range: {}}
|
||||
type Ty2 = {value: UInt<2>, range: {}}
|
||||
type Ty3 = {value: UInt<2>, range: {}}
|
||||
type Ty4 = {value: UInt<3>, range: {}}
|
||||
type Ty5 = {value: UInt<3>, range: {}}
|
||||
type Ty6 = {value: UInt<4>, range: {}}
|
||||
type Ty7 = {value: UInt<0>, range: {}}
|
||||
type Ty8 = {value: UInt<1>, range: {}}
|
||||
type Ty9 = {value: UInt<2>, range: {}}
|
||||
type Ty10 = {value: UInt<2>, range: {}}
|
||||
type Ty11 = {value: UInt<3>, range: {}}
|
||||
type Ty12 = {value: UInt<3>, range: {}}
|
||||
type Ty13 = {value: UInt<4>, range: {}}
|
||||
type Ty14 = {value: UInt<4>, range: {}}
|
||||
module check_platform_io: @[module-XXXXXXXXXX.rs 1:1]
|
||||
input i_0_to_1: Ty0 @[module-XXXXXXXXXX.rs 2:1]
|
||||
input i_0_to_2: Ty1 @[module-XXXXXXXXXX.rs 3:1]
|
||||
input i_0_to_3: Ty2 @[module-XXXXXXXXXX.rs 4:1]
|
||||
input i_0_to_4: Ty3 @[module-XXXXXXXXXX.rs 5:1]
|
||||
input i_0_to_7: Ty4 @[module-XXXXXXXXXX.rs 6:1]
|
||||
input i_0_to_8: Ty5 @[module-XXXXXXXXXX.rs 7:1]
|
||||
input i_0_to_9: Ty6 @[module-XXXXXXXXXX.rs 8:1]
|
||||
input i_0_through_0: Ty7 @[module-XXXXXXXXXX.rs 9:1]
|
||||
input i_0_through_1: Ty8 @[module-XXXXXXXXXX.rs 10:1]
|
||||
input i_0_through_2: Ty9 @[module-XXXXXXXXXX.rs 11:1]
|
||||
input i_0_through_3: Ty10 @[module-XXXXXXXXXX.rs 12:1]
|
||||
input i_0_through_4: Ty11 @[module-XXXXXXXXXX.rs 13:1]
|
||||
input i_0_through_7: Ty12 @[module-XXXXXXXXXX.rs 14:1]
|
||||
input i_0_through_8: Ty13 @[module-XXXXXXXXXX.rs 15:1]
|
||||
input i_0_through_9: Ty14 @[module-XXXXXXXXXX.rs 16:1]
|
||||
",
|
||||
};
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -1,283 +0,0 @@
|
|||
$timescale 1 ps $end
|
||||
$scope module array_rw $end
|
||||
$scope struct array_in $end
|
||||
$var wire 8 ! \[0] $end
|
||||
$var wire 8 " \[1] $end
|
||||
$var wire 8 # \[2] $end
|
||||
$var wire 8 $ \[3] $end
|
||||
$var wire 8 % \[4] $end
|
||||
$var wire 8 & \[5] $end
|
||||
$var wire 8 ' \[6] $end
|
||||
$var wire 8 ( \[7] $end
|
||||
$var wire 8 ) \[8] $end
|
||||
$var wire 8 * \[9] $end
|
||||
$var wire 8 + \[10] $end
|
||||
$var wire 8 , \[11] $end
|
||||
$var wire 8 - \[12] $end
|
||||
$var wire 8 . \[13] $end
|
||||
$var wire 8 / \[14] $end
|
||||
$var wire 8 0 \[15] $end
|
||||
$upscope $end
|
||||
$scope struct array_out $end
|
||||
$var wire 8 1 \[0] $end
|
||||
$var wire 8 2 \[1] $end
|
||||
$var wire 8 3 \[2] $end
|
||||
$var wire 8 4 \[3] $end
|
||||
$var wire 8 5 \[4] $end
|
||||
$var wire 8 6 \[5] $end
|
||||
$var wire 8 7 \[6] $end
|
||||
$var wire 8 8 \[7] $end
|
||||
$var wire 8 9 \[8] $end
|
||||
$var wire 8 : \[9] $end
|
||||
$var wire 8 ; \[10] $end
|
||||
$var wire 8 < \[11] $end
|
||||
$var wire 8 = \[12] $end
|
||||
$var wire 8 > \[13] $end
|
||||
$var wire 8 ? \[14] $end
|
||||
$var wire 8 @ \[15] $end
|
||||
$upscope $end
|
||||
$var wire 8 A read_index $end
|
||||
$var wire 8 B read_data $end
|
||||
$var wire 8 C write_index $end
|
||||
$var wire 8 D write_data $end
|
||||
$var wire 1 E write_en $end
|
||||
$scope struct array_wire $end
|
||||
$var wire 8 F \[0] $end
|
||||
$var wire 8 G \[1] $end
|
||||
$var wire 8 H \[2] $end
|
||||
$var wire 8 I \[3] $end
|
||||
$var wire 8 J \[4] $end
|
||||
$var wire 8 K \[5] $end
|
||||
$var wire 8 L \[6] $end
|
||||
$var wire 8 M \[7] $end
|
||||
$var wire 8 N \[8] $end
|
||||
$var wire 8 O \[9] $end
|
||||
$var wire 8 P \[10] $end
|
||||
$var wire 8 Q \[11] $end
|
||||
$var wire 8 R \[12] $end
|
||||
$var wire 8 S \[13] $end
|
||||
$var wire 8 T \[14] $end
|
||||
$var wire 8 U \[15] $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$enddefinitions $end
|
||||
$dumpvars
|
||||
b11111111 !
|
||||
b1111111 "
|
||||
b111111 #
|
||||
b11111 $
|
||||
b1111 %
|
||||
b111 &
|
||||
b11 '
|
||||
b1 (
|
||||
b0 )
|
||||
b10000000 *
|
||||
b11000000 +
|
||||
b11100000 ,
|
||||
b11110000 -
|
||||
b11111000 .
|
||||
b11111100 /
|
||||
b11111110 0
|
||||
b11111111 1
|
||||
b1111111 2
|
||||
b111111 3
|
||||
b11111 4
|
||||
b1111 5
|
||||
b111 6
|
||||
b11 7
|
||||
b1 8
|
||||
b0 9
|
||||
b10000000 :
|
||||
b11000000 ;
|
||||
b11100000 <
|
||||
b11110000 =
|
||||
b11111000 >
|
||||
b11111100 ?
|
||||
b11111110 @
|
||||
b0 A
|
||||
b11111111 B
|
||||
b0 C
|
||||
b0 D
|
||||
0E
|
||||
b11111111 F
|
||||
b1111111 G
|
||||
b111111 H
|
||||
b11111 I
|
||||
b1111 J
|
||||
b111 K
|
||||
b11 L
|
||||
b1 M
|
||||
b0 N
|
||||
b10000000 O
|
||||
b11000000 P
|
||||
b11100000 Q
|
||||
b11110000 R
|
||||
b11111000 S
|
||||
b11111100 T
|
||||
b11111110 U
|
||||
$end
|
||||
#1000000
|
||||
b1 A
|
||||
b1111111 B
|
||||
#2000000
|
||||
b10 A
|
||||
b111111 B
|
||||
#3000000
|
||||
b11 A
|
||||
b11111 B
|
||||
#4000000
|
||||
b100 A
|
||||
b1111 B
|
||||
#5000000
|
||||
b101 A
|
||||
b111 B
|
||||
#6000000
|
||||
b110 A
|
||||
b11 B
|
||||
#7000000
|
||||
b111 A
|
||||
b1 B
|
||||
#8000000
|
||||
b1000 A
|
||||
b0 B
|
||||
#9000000
|
||||
b1001 A
|
||||
b10000000 B
|
||||
#10000000
|
||||
b1010 A
|
||||
b11000000 B
|
||||
#11000000
|
||||
b1011 A
|
||||
b11100000 B
|
||||
#12000000
|
||||
b1100 A
|
||||
b11110000 B
|
||||
#13000000
|
||||
b1101 A
|
||||
b11111000 B
|
||||
#14000000
|
||||
b1110 A
|
||||
b11111100 B
|
||||
#15000000
|
||||
b1111 A
|
||||
b11111110 B
|
||||
#16000000
|
||||
b10000 A
|
||||
b0 B
|
||||
#17000000
|
||||
b0 1
|
||||
b0 A
|
||||
1E
|
||||
b0 F
|
||||
#18000000
|
||||
b11111111 1
|
||||
b1 2
|
||||
b11111111 B
|
||||
b1 C
|
||||
b1 D
|
||||
b11111111 F
|
||||
b1 G
|
||||
#19000000
|
||||
b1111111 2
|
||||
b100 3
|
||||
b10 C
|
||||
b100 D
|
||||
b1111111 G
|
||||
b100 H
|
||||
#20000000
|
||||
b111111 3
|
||||
b1001 4
|
||||
b11 C
|
||||
b1001 D
|
||||
b111111 H
|
||||
b1001 I
|
||||
#21000000
|
||||
b11111 4
|
||||
b10000 5
|
||||
b100 C
|
||||
b10000 D
|
||||
b11111 I
|
||||
b10000 J
|
||||
#22000000
|
||||
b1111 5
|
||||
b11001 6
|
||||
b101 C
|
||||
b11001 D
|
||||
b1111 J
|
||||
b11001 K
|
||||
#23000000
|
||||
b111 6
|
||||
b100100 7
|
||||
b110 C
|
||||
b100100 D
|
||||
b111 K
|
||||
b100100 L
|
||||
#24000000
|
||||
b11 7
|
||||
b110001 8
|
||||
b111 C
|
||||
b110001 D
|
||||
b11 L
|
||||
b110001 M
|
||||
#25000000
|
||||
b1 8
|
||||
b1000000 9
|
||||
b1000 C
|
||||
b1000000 D
|
||||
b1 M
|
||||
b1000000 N
|
||||
#26000000
|
||||
b0 9
|
||||
b1010001 :
|
||||
b1001 C
|
||||
b1010001 D
|
||||
b0 N
|
||||
b1010001 O
|
||||
#27000000
|
||||
b10000000 :
|
||||
b1100100 ;
|
||||
b1010 C
|
||||
b1100100 D
|
||||
b10000000 O
|
||||
b1100100 P
|
||||
#28000000
|
||||
b11000000 ;
|
||||
b1111001 <
|
||||
b1011 C
|
||||
b1111001 D
|
||||
b11000000 P
|
||||
b1111001 Q
|
||||
#29000000
|
||||
b11100000 <
|
||||
b10010000 =
|
||||
b1100 C
|
||||
b10010000 D
|
||||
b11100000 Q
|
||||
b10010000 R
|
||||
#30000000
|
||||
b11110000 =
|
||||
b10101001 >
|
||||
b1101 C
|
||||
b10101001 D
|
||||
b11110000 R
|
||||
b10101001 S
|
||||
#31000000
|
||||
b11111000 >
|
||||
b11000100 ?
|
||||
b1110 C
|
||||
b11000100 D
|
||||
b11111000 S
|
||||
b11000100 T
|
||||
#32000000
|
||||
b11111100 ?
|
||||
b11100001 @
|
||||
b1111 C
|
||||
b11100001 D
|
||||
b11111100 T
|
||||
b11100001 U
|
||||
#33000000
|
||||
b11111110 @
|
||||
b10000 C
|
||||
b0 D
|
||||
b11111110 U
|
||||
#34000000
|
||||
|
|
@ -1,183 +0,0 @@
|
|||
Simulation {
|
||||
state: State {
|
||||
insns: Insns {
|
||||
state_layout: StateLayout {
|
||||
ty: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 4,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(conditional_assignment_last: conditional_assignment_last).conditional_assignment_last::i",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(conditional_assignment_last: conditional_assignment_last).conditional_assignment_last::w",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Bool,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
sim_only_slots: StatePartLayout<SimOnlySlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
memories: StatePartLayout<Memories> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
insns: [
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
0: Const {
|
||||
dest: StatePartIndex<BigSlots>(3), // (0x0) SlotDebugData { name: "", ty: Bool },
|
||||
value: 0x0,
|
||||
},
|
||||
1: Const {
|
||||
dest: StatePartIndex<BigSlots>(2), // (0x1) SlotDebugData { name: "", ty: Bool },
|
||||
value: 0x1,
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:4:1
|
||||
2: Copy {
|
||||
dest: StatePartIndex<BigSlots>(1), // (0x0) SlotDebugData { name: "InstantiatedModule(conditional_assignment_last: conditional_assignment_last).conditional_assignment_last::w", ty: Bool },
|
||||
src: StatePartIndex<BigSlots>(2), // (0x1) SlotDebugData { name: "", ty: Bool },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:5:1
|
||||
3: BranchIfZero {
|
||||
target: 5,
|
||||
value: StatePartIndex<BigSlots>(0), // (0x1) SlotDebugData { name: "InstantiatedModule(conditional_assignment_last: conditional_assignment_last).conditional_assignment_last::i", ty: Bool },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:6:1
|
||||
4: Copy {
|
||||
dest: StatePartIndex<BigSlots>(1), // (0x0) SlotDebugData { name: "InstantiatedModule(conditional_assignment_last: conditional_assignment_last).conditional_assignment_last::w", ty: Bool },
|
||||
src: StatePartIndex<BigSlots>(3), // (0x0) SlotDebugData { name: "", ty: Bool },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
5: Return,
|
||||
],
|
||||
..
|
||||
},
|
||||
pc: 5,
|
||||
memory_write_log: [],
|
||||
memories: StatePart {
|
||||
value: [],
|
||||
},
|
||||
small_slots: StatePart {
|
||||
value: [],
|
||||
},
|
||||
big_slots: StatePart {
|
||||
value: [
|
||||
1,
|
||||
0,
|
||||
1,
|
||||
0,
|
||||
],
|
||||
},
|
||||
sim_only_slots: StatePart {
|
||||
value: [],
|
||||
},
|
||||
},
|
||||
io: Instance {
|
||||
name: <simulator>::conditional_assignment_last,
|
||||
instantiated: Module {
|
||||
name: conditional_assignment_last,
|
||||
..
|
||||
},
|
||||
},
|
||||
main_module: SimulationModuleState {
|
||||
base_targets: [
|
||||
Instance {
|
||||
name: <simulator>::conditional_assignment_last,
|
||||
instantiated: Module {
|
||||
name: conditional_assignment_last,
|
||||
..
|
||||
},
|
||||
}.i,
|
||||
],
|
||||
uninitialized_ios: {},
|
||||
io_targets: {
|
||||
Instance {
|
||||
name: <simulator>::conditional_assignment_last,
|
||||
instantiated: Module {
|
||||
name: conditional_assignment_last,
|
||||
..
|
||||
},
|
||||
}.i,
|
||||
},
|
||||
did_initial_settle: true,
|
||||
},
|
||||
extern_modules: [],
|
||||
state_ready_to_run: false,
|
||||
trace_decls: TraceModule {
|
||||
name: "conditional_assignment_last",
|
||||
children: [
|
||||
TraceModuleIO {
|
||||
name: "i",
|
||||
child: TraceBool {
|
||||
location: TraceScalarId(0),
|
||||
name: "i",
|
||||
flow: Source,
|
||||
},
|
||||
ty: Bool,
|
||||
flow: Source,
|
||||
},
|
||||
TraceWire {
|
||||
name: "w",
|
||||
child: TraceBool {
|
||||
location: TraceScalarId(1),
|
||||
name: "w",
|
||||
flow: Duplex,
|
||||
},
|
||||
ty: Bool,
|
||||
},
|
||||
],
|
||||
},
|
||||
traces: [
|
||||
SimTrace {
|
||||
id: TraceScalarId(0),
|
||||
kind: BigBool {
|
||||
index: StatePartIndex<BigSlots>(0),
|
||||
},
|
||||
state: 0x1,
|
||||
last_state: 0x0,
|
||||
},
|
||||
SimTrace {
|
||||
id: TraceScalarId(1),
|
||||
kind: BigBool {
|
||||
index: StatePartIndex<BigSlots>(1),
|
||||
},
|
||||
state: 0x0,
|
||||
last_state: 0x1,
|
||||
},
|
||||
],
|
||||
trace_memories: {},
|
||||
trace_writers: [
|
||||
Running(
|
||||
VcdWriter {
|
||||
finished_init: true,
|
||||
timescale: 1 ps,
|
||||
..
|
||||
},
|
||||
),
|
||||
],
|
||||
instant: 2 μs,
|
||||
clocks_triggered: [],
|
||||
..
|
||||
}
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
$timescale 1 ps $end
|
||||
$scope module conditional_assignment_last $end
|
||||
$var wire 1 ! i $end
|
||||
$var wire 1 " w $end
|
||||
$upscope $end
|
||||
$enddefinitions $end
|
||||
$dumpvars
|
||||
0!
|
||||
1"
|
||||
$end
|
||||
#1000000
|
||||
1!
|
||||
0"
|
||||
#2000000
|
||||
|
|
@ -22,12 +22,6 @@ Simulation {
|
|||
],
|
||||
..
|
||||
},
|
||||
sim_only_slots: StatePartLayout<SimOnlySlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
memories: StatePartLayout<Memories> {
|
||||
len: 0,
|
||||
|
|
@ -66,9 +60,6 @@ Simulation {
|
|||
5,
|
||||
],
|
||||
},
|
||||
sim_only_slots: StatePart {
|
||||
value: [],
|
||||
},
|
||||
},
|
||||
io: Instance {
|
||||
name: <simulator>::connect_const,
|
||||
|
|
@ -77,30 +68,45 @@ Simulation {
|
|||
..
|
||||
},
|
||||
},
|
||||
main_module: SimulationModuleState {
|
||||
base_targets: [
|
||||
Instance {
|
||||
name: <simulator>::connect_const,
|
||||
instantiated: Module {
|
||||
name: connect_const,
|
||||
..
|
||||
uninitialized_inputs: {},
|
||||
io_targets: {
|
||||
Instance {
|
||||
name: <simulator>::connect_const,
|
||||
instantiated: Module {
|
||||
name: connect_const,
|
||||
..
|
||||
},
|
||||
}.o: CompiledValue {
|
||||
layout: CompiledTypeLayout {
|
||||
ty: UInt<8>,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(connect_const: connect_const).connect_const::o",
|
||||
ty: UInt<8>,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
}.o,
|
||||
],
|
||||
uninitialized_ios: {},
|
||||
io_targets: {
|
||||
Instance {
|
||||
name: <simulator>::connect_const,
|
||||
instantiated: Module {
|
||||
name: connect_const,
|
||||
..
|
||||
},
|
||||
}.o,
|
||||
body: Scalar,
|
||||
},
|
||||
range: TypeIndexRange {
|
||||
small_slots: StatePartIndexRange<SmallSlots> { start: 0, len: 0 },
|
||||
big_slots: StatePartIndexRange<BigSlots> { start: 0, len: 1 },
|
||||
},
|
||||
write: None,
|
||||
},
|
||||
did_initial_settle: true,
|
||||
},
|
||||
extern_modules: [],
|
||||
state_ready_to_run: false,
|
||||
made_initial_step: true,
|
||||
needs_settle: false,
|
||||
trace_decls: TraceModule {
|
||||
name: "connect_const",
|
||||
children: [
|
||||
|
|
|
|||
|
|
@ -34,12 +34,6 @@ Simulation {
|
|||
],
|
||||
..
|
||||
},
|
||||
sim_only_slots: StatePartLayout<SimOnlySlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
memories: StatePartLayout<Memories> {
|
||||
len: 0,
|
||||
|
|
@ -95,9 +89,6 @@ Simulation {
|
|||
1,
|
||||
],
|
||||
},
|
||||
sim_only_slots: StatePart {
|
||||
value: [],
|
||||
},
|
||||
},
|
||||
io: Instance {
|
||||
name: <simulator>::connect_const_reset,
|
||||
|
|
@ -106,44 +97,79 @@ Simulation {
|
|||
..
|
||||
},
|
||||
},
|
||||
main_module: SimulationModuleState {
|
||||
base_targets: [
|
||||
Instance {
|
||||
name: <simulator>::connect_const_reset,
|
||||
instantiated: Module {
|
||||
name: connect_const_reset,
|
||||
..
|
||||
uninitialized_inputs: {},
|
||||
io_targets: {
|
||||
Instance {
|
||||
name: <simulator>::connect_const_reset,
|
||||
instantiated: Module {
|
||||
name: connect_const_reset,
|
||||
..
|
||||
},
|
||||
}.bit_out: CompiledValue {
|
||||
layout: CompiledTypeLayout {
|
||||
ty: Bool,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(connect_const_reset: connect_const_reset).connect_const_reset::bit_out",
|
||||
ty: Bool,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
}.reset_out,
|
||||
Instance {
|
||||
name: <simulator>::connect_const_reset,
|
||||
instantiated: Module {
|
||||
name: connect_const_reset,
|
||||
..
|
||||
},
|
||||
}.bit_out,
|
||||
],
|
||||
uninitialized_ios: {},
|
||||
io_targets: {
|
||||
Instance {
|
||||
name: <simulator>::connect_const_reset,
|
||||
instantiated: Module {
|
||||
name: connect_const_reset,
|
||||
..
|
||||
},
|
||||
}.bit_out,
|
||||
Instance {
|
||||
name: <simulator>::connect_const_reset,
|
||||
instantiated: Module {
|
||||
name: connect_const_reset,
|
||||
..
|
||||
},
|
||||
}.reset_out,
|
||||
body: Scalar,
|
||||
},
|
||||
range: TypeIndexRange {
|
||||
small_slots: StatePartIndexRange<SmallSlots> { start: 0, len: 0 },
|
||||
big_slots: StatePartIndexRange<BigSlots> { start: 1, len: 1 },
|
||||
},
|
||||
write: None,
|
||||
},
|
||||
Instance {
|
||||
name: <simulator>::connect_const_reset,
|
||||
instantiated: Module {
|
||||
name: connect_const_reset,
|
||||
..
|
||||
},
|
||||
}.reset_out: CompiledValue {
|
||||
layout: CompiledTypeLayout {
|
||||
ty: AsyncReset,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(connect_const_reset: connect_const_reset).connect_const_reset::reset_out",
|
||||
ty: AsyncReset,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Scalar,
|
||||
},
|
||||
range: TypeIndexRange {
|
||||
small_slots: StatePartIndexRange<SmallSlots> { start: 0, len: 0 },
|
||||
big_slots: StatePartIndexRange<BigSlots> { start: 0, len: 1 },
|
||||
},
|
||||
write: None,
|
||||
},
|
||||
did_initial_settle: true,
|
||||
},
|
||||
extern_modules: [],
|
||||
state_ready_to_run: false,
|
||||
made_initial_step: true,
|
||||
needs_settle: false,
|
||||
trace_decls: TraceModule {
|
||||
name: "connect_const_reset",
|
||||
children: [
|
||||
|
|
|
|||
|
|
@ -71,12 +71,6 @@ Simulation {
|
|||
],
|
||||
..
|
||||
},
|
||||
sim_only_slots: StatePartLayout<SimOnlySlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
memories: StatePartLayout<Memories> {
|
||||
len: 0,
|
||||
|
|
@ -201,9 +195,6 @@ Simulation {
|
|||
4,
|
||||
],
|
||||
},
|
||||
sim_only_slots: StatePart {
|
||||
value: [],
|
||||
},
|
||||
},
|
||||
io: Instance {
|
||||
name: <simulator>::counter,
|
||||
|
|
@ -212,58 +203,213 @@ Simulation {
|
|||
..
|
||||
},
|
||||
},
|
||||
main_module: SimulationModuleState {
|
||||
base_targets: [
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
uninitialized_inputs: {},
|
||||
io_targets: {
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.cd: CompiledValue {
|
||||
layout: CompiledTypeLayout {
|
||||
ty: Bundle {
|
||||
/* offset = 0 */
|
||||
clk: Clock,
|
||||
/* offset = 1 */
|
||||
rst: AsyncReset,
|
||||
},
|
||||
}.cd,
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 2,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::cd.clk",
|
||||
ty: Clock,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::cd.rst",
|
||||
ty: AsyncReset,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
}.count,
|
||||
],
|
||||
uninitialized_ios: {},
|
||||
io_targets: {
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
body: Bundle {
|
||||
fields: [
|
||||
CompiledBundleField {
|
||||
offset: TypeIndex {
|
||||
small_slots: StatePartIndex<SmallSlots>(0),
|
||||
big_slots: StatePartIndex<BigSlots>(0),
|
||||
},
|
||||
ty: CompiledTypeLayout {
|
||||
ty: Clock,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Clock,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Scalar,
|
||||
},
|
||||
},
|
||||
CompiledBundleField {
|
||||
offset: TypeIndex {
|
||||
small_slots: StatePartIndex<SmallSlots>(0),
|
||||
big_slots: StatePartIndex<BigSlots>(1),
|
||||
},
|
||||
ty: CompiledTypeLayout {
|
||||
ty: AsyncReset,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: AsyncReset,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Scalar,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
}.cd,
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.cd.clk,
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.cd.rst,
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.count,
|
||||
},
|
||||
range: TypeIndexRange {
|
||||
small_slots: StatePartIndexRange<SmallSlots> { start: 0, len: 0 },
|
||||
big_slots: StatePartIndexRange<BigSlots> { start: 0, len: 2 },
|
||||
},
|
||||
write: None,
|
||||
},
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.cd.clk: CompiledValue {
|
||||
layout: CompiledTypeLayout {
|
||||
ty: Clock,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Clock,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Scalar,
|
||||
},
|
||||
range: TypeIndexRange {
|
||||
small_slots: StatePartIndexRange<SmallSlots> { start: 0, len: 0 },
|
||||
big_slots: StatePartIndexRange<BigSlots> { start: 0, len: 1 },
|
||||
},
|
||||
write: None,
|
||||
},
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.cd.rst: CompiledValue {
|
||||
layout: CompiledTypeLayout {
|
||||
ty: AsyncReset,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: AsyncReset,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Scalar,
|
||||
},
|
||||
range: TypeIndexRange {
|
||||
small_slots: StatePartIndexRange<SmallSlots> { start: 0, len: 0 },
|
||||
big_slots: StatePartIndexRange<BigSlots> { start: 1, len: 1 },
|
||||
},
|
||||
write: None,
|
||||
},
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.count: CompiledValue {
|
||||
layout: CompiledTypeLayout {
|
||||
ty: UInt<4>,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::count",
|
||||
ty: UInt<4>,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Scalar,
|
||||
},
|
||||
range: TypeIndexRange {
|
||||
small_slots: StatePartIndexRange<SmallSlots> { start: 0, len: 0 },
|
||||
big_slots: StatePartIndexRange<BigSlots> { start: 2, len: 1 },
|
||||
},
|
||||
write: None,
|
||||
},
|
||||
did_initial_settle: true,
|
||||
},
|
||||
extern_modules: [],
|
||||
state_ready_to_run: false,
|
||||
made_initial_step: true,
|
||||
needs_settle: false,
|
||||
trace_decls: TraceModule {
|
||||
name: "counter",
|
||||
children: [
|
||||
|
|
|
|||
|
|
@ -67,12 +67,6 @@ Simulation {
|
|||
],
|
||||
..
|
||||
},
|
||||
sim_only_slots: StatePartLayout<SimOnlySlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
memories: StatePartLayout<Memories> {
|
||||
len: 0,
|
||||
|
|
@ -182,9 +176,6 @@ Simulation {
|
|||
4,
|
||||
],
|
||||
},
|
||||
sim_only_slots: StatePart {
|
||||
value: [],
|
||||
},
|
||||
},
|
||||
io: Instance {
|
||||
name: <simulator>::counter,
|
||||
|
|
@ -193,58 +184,213 @@ Simulation {
|
|||
..
|
||||
},
|
||||
},
|
||||
main_module: SimulationModuleState {
|
||||
base_targets: [
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
uninitialized_inputs: {},
|
||||
io_targets: {
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.cd: CompiledValue {
|
||||
layout: CompiledTypeLayout {
|
||||
ty: Bundle {
|
||||
/* offset = 0 */
|
||||
clk: Clock,
|
||||
/* offset = 1 */
|
||||
rst: SyncReset,
|
||||
},
|
||||
}.cd,
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 2,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::cd.clk",
|
||||
ty: Clock,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::cd.rst",
|
||||
ty: SyncReset,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
}.count,
|
||||
],
|
||||
uninitialized_ios: {},
|
||||
io_targets: {
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
body: Bundle {
|
||||
fields: [
|
||||
CompiledBundleField {
|
||||
offset: TypeIndex {
|
||||
small_slots: StatePartIndex<SmallSlots>(0),
|
||||
big_slots: StatePartIndex<BigSlots>(0),
|
||||
},
|
||||
ty: CompiledTypeLayout {
|
||||
ty: Clock,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Clock,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Scalar,
|
||||
},
|
||||
},
|
||||
CompiledBundleField {
|
||||
offset: TypeIndex {
|
||||
small_slots: StatePartIndex<SmallSlots>(0),
|
||||
big_slots: StatePartIndex<BigSlots>(1),
|
||||
},
|
||||
ty: CompiledTypeLayout {
|
||||
ty: SyncReset,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: SyncReset,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Scalar,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
}.cd,
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.cd.clk,
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.cd.rst,
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.count,
|
||||
},
|
||||
range: TypeIndexRange {
|
||||
small_slots: StatePartIndexRange<SmallSlots> { start: 0, len: 0 },
|
||||
big_slots: StatePartIndexRange<BigSlots> { start: 0, len: 2 },
|
||||
},
|
||||
write: None,
|
||||
},
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.cd.clk: CompiledValue {
|
||||
layout: CompiledTypeLayout {
|
||||
ty: Clock,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: Clock,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Scalar,
|
||||
},
|
||||
range: TypeIndexRange {
|
||||
small_slots: StatePartIndexRange<SmallSlots> { start: 0, len: 0 },
|
||||
big_slots: StatePartIndexRange<BigSlots> { start: 0, len: 1 },
|
||||
},
|
||||
write: None,
|
||||
},
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.cd.rst: CompiledValue {
|
||||
layout: CompiledTypeLayout {
|
||||
ty: SyncReset,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: SyncReset,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Scalar,
|
||||
},
|
||||
range: TypeIndexRange {
|
||||
small_slots: StatePartIndexRange<SmallSlots> { start: 0, len: 0 },
|
||||
big_slots: StatePartIndexRange<BigSlots> { start: 1, len: 1 },
|
||||
},
|
||||
write: None,
|
||||
},
|
||||
Instance {
|
||||
name: <simulator>::counter,
|
||||
instantiated: Module {
|
||||
name: counter,
|
||||
..
|
||||
},
|
||||
}.count: CompiledValue {
|
||||
layout: CompiledTypeLayout {
|
||||
ty: UInt<4>,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(counter: counter).counter::count",
|
||||
ty: UInt<4>,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Scalar,
|
||||
},
|
||||
range: TypeIndexRange {
|
||||
small_slots: StatePartIndexRange<SmallSlots> { start: 0, len: 0 },
|
||||
big_slots: StatePartIndexRange<BigSlots> { start: 2, len: 1 },
|
||||
},
|
||||
write: None,
|
||||
},
|
||||
did_initial_settle: true,
|
||||
},
|
||||
extern_modules: [],
|
||||
state_ready_to_run: false,
|
||||
made_initial_step: true,
|
||||
needs_settle: false,
|
||||
trace_decls: TraceModule {
|
||||
name: "counter",
|
||||
children: [
|
||||
|
|
|
|||
|
|
@ -1,166 +0,0 @@
|
|||
Simulation {
|
||||
state: State {
|
||||
insns: Insns {
|
||||
state_layout: StateLayout {
|
||||
ty: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 4,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(duplicate_names: duplicate_names).duplicate_names::w",
|
||||
ty: UInt<8>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: UInt<8>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(duplicate_names: duplicate_names).duplicate_names::w",
|
||||
ty: UInt<8>,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "",
|
||||
ty: UInt<8>,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
sim_only_slots: StatePartLayout<SimOnlySlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
memories: StatePartLayout<Memories> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
insns: [
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
0: Const {
|
||||
dest: StatePartIndex<BigSlots>(3), // (0x6) SlotDebugData { name: "", ty: UInt<8> },
|
||||
value: 0x6,
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:5:1
|
||||
1: Copy {
|
||||
dest: StatePartIndex<BigSlots>(2), // (0x6) SlotDebugData { name: "InstantiatedModule(duplicate_names: duplicate_names).duplicate_names::w", ty: UInt<8> },
|
||||
src: StatePartIndex<BigSlots>(3), // (0x6) SlotDebugData { name: "", ty: UInt<8> },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
2: Const {
|
||||
dest: StatePartIndex<BigSlots>(1), // (0x5) SlotDebugData { name: "", ty: UInt<8> },
|
||||
value: 0x5,
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:3:1
|
||||
3: Copy {
|
||||
dest: StatePartIndex<BigSlots>(0), // (0x5) SlotDebugData { name: "InstantiatedModule(duplicate_names: duplicate_names).duplicate_names::w", ty: UInt<8> },
|
||||
src: StatePartIndex<BigSlots>(1), // (0x5) SlotDebugData { name: "", ty: UInt<8> },
|
||||
},
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
4: Return,
|
||||
],
|
||||
..
|
||||
},
|
||||
pc: 4,
|
||||
memory_write_log: [],
|
||||
memories: StatePart {
|
||||
value: [],
|
||||
},
|
||||
small_slots: StatePart {
|
||||
value: [],
|
||||
},
|
||||
big_slots: StatePart {
|
||||
value: [
|
||||
5,
|
||||
5,
|
||||
6,
|
||||
6,
|
||||
],
|
||||
},
|
||||
sim_only_slots: StatePart {
|
||||
value: [],
|
||||
},
|
||||
},
|
||||
io: Instance {
|
||||
name: <simulator>::duplicate_names,
|
||||
instantiated: Module {
|
||||
name: duplicate_names,
|
||||
..
|
||||
},
|
||||
},
|
||||
main_module: SimulationModuleState {
|
||||
base_targets: [],
|
||||
uninitialized_ios: {},
|
||||
io_targets: {},
|
||||
did_initial_settle: true,
|
||||
},
|
||||
extern_modules: [],
|
||||
state_ready_to_run: false,
|
||||
trace_decls: TraceModule {
|
||||
name: "duplicate_names",
|
||||
children: [
|
||||
TraceWire {
|
||||
name: "w",
|
||||
child: TraceUInt {
|
||||
location: TraceScalarId(0),
|
||||
name: "w",
|
||||
ty: UInt<8>,
|
||||
flow: Duplex,
|
||||
},
|
||||
ty: UInt<8>,
|
||||
},
|
||||
TraceWire {
|
||||
name: "w",
|
||||
child: TraceUInt {
|
||||
location: TraceScalarId(1),
|
||||
name: "w",
|
||||
ty: UInt<8>,
|
||||
flow: Duplex,
|
||||
},
|
||||
ty: UInt<8>,
|
||||
},
|
||||
],
|
||||
},
|
||||
traces: [
|
||||
SimTrace {
|
||||
id: TraceScalarId(0),
|
||||
kind: BigUInt {
|
||||
index: StatePartIndex<BigSlots>(0),
|
||||
ty: UInt<8>,
|
||||
},
|
||||
state: 0x05,
|
||||
last_state: 0x05,
|
||||
},
|
||||
SimTrace {
|
||||
id: TraceScalarId(1),
|
||||
kind: BigUInt {
|
||||
index: StatePartIndex<BigSlots>(2),
|
||||
ty: UInt<8>,
|
||||
},
|
||||
state: 0x06,
|
||||
last_state: 0x06,
|
||||
},
|
||||
],
|
||||
trace_memories: {},
|
||||
trace_writers: [
|
||||
Running(
|
||||
VcdWriter {
|
||||
finished_init: true,
|
||||
timescale: 1 ps,
|
||||
..
|
||||
},
|
||||
),
|
||||
],
|
||||
instant: 1 μs,
|
||||
clocks_triggered: [],
|
||||
..
|
||||
}
|
||||
|
|
@ -1,11 +0,0 @@
|
|||
$timescale 1 ps $end
|
||||
$scope module duplicate_names $end
|
||||
$var wire 8 ! w $end
|
||||
$var wire 8 " w_2 $end
|
||||
$upscope $end
|
||||
$enddefinitions $end
|
||||
$dumpvars
|
||||
b101 !
|
||||
b110 "
|
||||
$end
|
||||
#1000000
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -16,25 +16,18 @@ $var wire 1 ) \0 $end
|
|||
$var wire 1 * \1 $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$scope struct b2_out $end
|
||||
$var string 1 + \$tag $end
|
||||
$scope struct HdlSome $end
|
||||
$var wire 1 , \0 $end
|
||||
$var wire 1 - \1 $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$scope struct the_reg $end
|
||||
$var string 1 . \$tag $end
|
||||
$var string 1 + \$tag $end
|
||||
$scope struct B $end
|
||||
$var reg 1 / \0 $end
|
||||
$var reg 1 0 \1 $end
|
||||
$var reg 1 , \0 $end
|
||||
$var reg 1 - \1 $end
|
||||
$upscope $end
|
||||
$scope struct C $end
|
||||
$scope struct a $end
|
||||
$var reg 1 1 \[0] $end
|
||||
$var reg 1 2 \[1] $end
|
||||
$var reg 1 . \[0] $end
|
||||
$var reg 1 / \[1] $end
|
||||
$upscope $end
|
||||
$var reg 2 3 b $end
|
||||
$var reg 2 0 b $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
$upscope $end
|
||||
|
|
@ -50,15 +43,12 @@ b0 '
|
|||
sHdlNone\x20(0) (
|
||||
0)
|
||||
0*
|
||||
sHdlNone\x20(0) +
|
||||
sA\x20(0) +
|
||||
0,
|
||||
0-
|
||||
sA\x20(0) .
|
||||
0.
|
||||
0/
|
||||
00
|
||||
01
|
||||
02
|
||||
b0 3
|
||||
b0 0
|
||||
$end
|
||||
#1000000
|
||||
1!
|
||||
|
|
@ -76,8 +66,7 @@ b1 $
|
|||
1!
|
||||
b1 &
|
||||
sHdlSome\x20(1) (
|
||||
sHdlSome\x20(1) +
|
||||
sB\x20(1) .
|
||||
sB\x20(1) +
|
||||
#6000000
|
||||
0#
|
||||
b0 $
|
||||
|
|
@ -96,10 +85,8 @@ b11 '
|
|||
1*
|
||||
1,
|
||||
1-
|
||||
1.
|
||||
1/
|
||||
10
|
||||
11
|
||||
12
|
||||
#10000000
|
||||
0!
|
||||
#11000000
|
||||
|
|
@ -114,11 +101,8 @@ b1111 '
|
|||
sHdlNone\x20(0) (
|
||||
0)
|
||||
0*
|
||||
sHdlNone\x20(0) +
|
||||
0,
|
||||
0-
|
||||
sC\x20(2) .
|
||||
b11 3
|
||||
sC\x20(2) +
|
||||
b11 0
|
||||
#14000000
|
||||
0!
|
||||
#15000000
|
||||
|
|
|
|||
|
|
@ -1,253 +0,0 @@
|
|||
Simulation {
|
||||
state: State {
|
||||
insns: Insns {
|
||||
state_layout: StateLayout {
|
||||
ty: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 2,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(extern_module: extern_module).extern_module::i",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(extern_module: extern_module).extern_module::o",
|
||||
ty: Bool,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
sim_only_slots: StatePartLayout<SimOnlySlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
memories: StatePartLayout<Memories> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
insns: [
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
0: Return,
|
||||
],
|
||||
..
|
||||
},
|
||||
pc: 0,
|
||||
memory_write_log: [],
|
||||
memories: StatePart {
|
||||
value: [],
|
||||
},
|
||||
small_slots: StatePart {
|
||||
value: [],
|
||||
},
|
||||
big_slots: StatePart {
|
||||
value: [
|
||||
1,
|
||||
1,
|
||||
],
|
||||
},
|
||||
sim_only_slots: StatePart {
|
||||
value: [],
|
||||
},
|
||||
},
|
||||
io: Instance {
|
||||
name: <simulator>::extern_module,
|
||||
instantiated: Module {
|
||||
name: extern_module,
|
||||
..
|
||||
},
|
||||
},
|
||||
main_module: SimulationModuleState {
|
||||
base_targets: [
|
||||
Instance {
|
||||
name: <simulator>::extern_module,
|
||||
instantiated: Module {
|
||||
name: extern_module,
|
||||
..
|
||||
},
|
||||
}.i,
|
||||
Instance {
|
||||
name: <simulator>::extern_module,
|
||||
instantiated: Module {
|
||||
name: extern_module,
|
||||
..
|
||||
},
|
||||
}.o,
|
||||
],
|
||||
uninitialized_ios: {},
|
||||
io_targets: {
|
||||
Instance {
|
||||
name: <simulator>::extern_module,
|
||||
instantiated: Module {
|
||||
name: extern_module,
|
||||
..
|
||||
},
|
||||
}.i,
|
||||
Instance {
|
||||
name: <simulator>::extern_module,
|
||||
instantiated: Module {
|
||||
name: extern_module,
|
||||
..
|
||||
},
|
||||
}.o,
|
||||
},
|
||||
did_initial_settle: true,
|
||||
},
|
||||
extern_modules: [
|
||||
SimulationExternModuleState {
|
||||
module_state: SimulationModuleState {
|
||||
base_targets: [
|
||||
ModuleIO {
|
||||
name: extern_module::i,
|
||||
is_input: true,
|
||||
ty: Bool,
|
||||
..
|
||||
},
|
||||
ModuleIO {
|
||||
name: extern_module::o,
|
||||
is_input: false,
|
||||
ty: Bool,
|
||||
..
|
||||
},
|
||||
],
|
||||
uninitialized_ios: {},
|
||||
io_targets: {
|
||||
ModuleIO {
|
||||
name: extern_module::i,
|
||||
is_input: true,
|
||||
ty: Bool,
|
||||
..
|
||||
},
|
||||
ModuleIO {
|
||||
name: extern_module::o,
|
||||
is_input: false,
|
||||
ty: Bool,
|
||||
..
|
||||
},
|
||||
},
|
||||
did_initial_settle: true,
|
||||
},
|
||||
sim: ExternModuleSimulation {
|
||||
generator: SimGeneratorFn {
|
||||
args: (
|
||||
ModuleIO {
|
||||
name: extern_module::i,
|
||||
is_input: true,
|
||||
ty: Bool,
|
||||
..
|
||||
},
|
||||
ModuleIO {
|
||||
name: extern_module::o,
|
||||
is_input: false,
|
||||
ty: Bool,
|
||||
..
|
||||
},
|
||||
),
|
||||
f: ...,
|
||||
},
|
||||
sim_io_to_generator_map: {
|
||||
ModuleIO {
|
||||
name: extern_module::i,
|
||||
is_input: true,
|
||||
ty: Bool,
|
||||
..
|
||||
}: ModuleIO {
|
||||
name: extern_module::i,
|
||||
is_input: true,
|
||||
ty: Bool,
|
||||
..
|
||||
},
|
||||
ModuleIO {
|
||||
name: extern_module::o,
|
||||
is_input: false,
|
||||
ty: Bool,
|
||||
..
|
||||
}: ModuleIO {
|
||||
name: extern_module::o,
|
||||
is_input: false,
|
||||
ty: Bool,
|
||||
..
|
||||
},
|
||||
},
|
||||
source_location: SourceLocation(
|
||||
module-XXXXXXXXXX.rs:4:1,
|
||||
),
|
||||
},
|
||||
running_generator: Some(
|
||||
...,
|
||||
),
|
||||
wait_targets: {
|
||||
Instant(
|
||||
20.500000000000 μs,
|
||||
),
|
||||
},
|
||||
},
|
||||
],
|
||||
state_ready_to_run: false,
|
||||
trace_decls: TraceModule {
|
||||
name: "extern_module",
|
||||
children: [
|
||||
TraceModuleIO {
|
||||
name: "i",
|
||||
child: TraceBool {
|
||||
location: TraceScalarId(0),
|
||||
name: "i",
|
||||
flow: Source,
|
||||
},
|
||||
ty: Bool,
|
||||
flow: Source,
|
||||
},
|
||||
TraceModuleIO {
|
||||
name: "o",
|
||||
child: TraceBool {
|
||||
location: TraceScalarId(1),
|
||||
name: "o",
|
||||
flow: Sink,
|
||||
},
|
||||
ty: Bool,
|
||||
flow: Sink,
|
||||
},
|
||||
],
|
||||
},
|
||||
traces: [
|
||||
SimTrace {
|
||||
id: TraceScalarId(0),
|
||||
kind: BigBool {
|
||||
index: StatePartIndex<BigSlots>(0),
|
||||
},
|
||||
state: 0x1,
|
||||
last_state: 0x1,
|
||||
},
|
||||
SimTrace {
|
||||
id: TraceScalarId(1),
|
||||
kind: BigBool {
|
||||
index: StatePartIndex<BigSlots>(1),
|
||||
},
|
||||
state: 0x1,
|
||||
last_state: 0x1,
|
||||
},
|
||||
],
|
||||
trace_memories: {},
|
||||
trace_writers: [
|
||||
Running(
|
||||
VcdWriter {
|
||||
finished_init: true,
|
||||
timescale: 1 ps,
|
||||
..
|
||||
},
|
||||
),
|
||||
],
|
||||
instant: 20 μs,
|
||||
clocks_triggered: [],
|
||||
..
|
||||
}
|
||||
|
|
@ -1,51 +0,0 @@
|
|||
$timescale 1 ps $end
|
||||
$scope module extern_module $end
|
||||
$var wire 1 ! i $end
|
||||
$var wire 1 " o $end
|
||||
$upscope $end
|
||||
$enddefinitions $end
|
||||
$dumpvars
|
||||
0!
|
||||
1"
|
||||
$end
|
||||
#500000
|
||||
#1500000
|
||||
0"
|
||||
#2500000
|
||||
1"
|
||||
#3500000
|
||||
0"
|
||||
#4500000
|
||||
1"
|
||||
#5500000
|
||||
0"
|
||||
#6500000
|
||||
1"
|
||||
#7500000
|
||||
0"
|
||||
#8500000
|
||||
1"
|
||||
#9500000
|
||||
0"
|
||||
#10000000
|
||||
1!
|
||||
#10500000
|
||||
#11500000
|
||||
1"
|
||||
#12500000
|
||||
0"
|
||||
#13500000
|
||||
1"
|
||||
#14500000
|
||||
0"
|
||||
#15500000
|
||||
1"
|
||||
#16500000
|
||||
0"
|
||||
#17500000
|
||||
1"
|
||||
#18500000
|
||||
0"
|
||||
#19500000
|
||||
1"
|
||||
#20000000
|
||||
|
|
@ -1,362 +0,0 @@
|
|||
Simulation {
|
||||
state: State {
|
||||
insns: Insns {
|
||||
state_layout: StateLayout {
|
||||
ty: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 3,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(extern_module2: extern_module2).extern_module2::en",
|
||||
ty: Bool,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(extern_module2: extern_module2).extern_module2::clk",
|
||||
ty: Clock,
|
||||
},
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(extern_module2: extern_module2).extern_module2::o",
|
||||
ty: UInt<8>,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
sim_only_slots: StatePartLayout<SimOnlySlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
memories: StatePartLayout<Memories> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
insns: [
|
||||
// at: module-XXXXXXXXXX.rs:1:1
|
||||
0: Return,
|
||||
],
|
||||
..
|
||||
},
|
||||
pc: 0,
|
||||
memory_write_log: [],
|
||||
memories: StatePart {
|
||||
value: [],
|
||||
},
|
||||
small_slots: StatePart {
|
||||
value: [],
|
||||
},
|
||||
big_slots: StatePart {
|
||||
value: [
|
||||
0,
|
||||
1,
|
||||
101,
|
||||
],
|
||||
},
|
||||
sim_only_slots: StatePart {
|
||||
value: [],
|
||||
},
|
||||
},
|
||||
io: Instance {
|
||||
name: <simulator>::extern_module2,
|
||||
instantiated: Module {
|
||||
name: extern_module2,
|
||||
..
|
||||
},
|
||||
},
|
||||
main_module: SimulationModuleState {
|
||||
base_targets: [
|
||||
Instance {
|
||||
name: <simulator>::extern_module2,
|
||||
instantiated: Module {
|
||||
name: extern_module2,
|
||||
..
|
||||
},
|
||||
}.en,
|
||||
Instance {
|
||||
name: <simulator>::extern_module2,
|
||||
instantiated: Module {
|
||||
name: extern_module2,
|
||||
..
|
||||
},
|
||||
}.clk,
|
||||
Instance {
|
||||
name: <simulator>::extern_module2,
|
||||
instantiated: Module {
|
||||
name: extern_module2,
|
||||
..
|
||||
},
|
||||
}.o,
|
||||
],
|
||||
uninitialized_ios: {},
|
||||
io_targets: {
|
||||
Instance {
|
||||
name: <simulator>::extern_module2,
|
||||
instantiated: Module {
|
||||
name: extern_module2,
|
||||
..
|
||||
},
|
||||
}.clk,
|
||||
Instance {
|
||||
name: <simulator>::extern_module2,
|
||||
instantiated: Module {
|
||||
name: extern_module2,
|
||||
..
|
||||
},
|
||||
}.en,
|
||||
Instance {
|
||||
name: <simulator>::extern_module2,
|
||||
instantiated: Module {
|
||||
name: extern_module2,
|
||||
..
|
||||
},
|
||||
}.o,
|
||||
},
|
||||
did_initial_settle: true,
|
||||
},
|
||||
extern_modules: [
|
||||
SimulationExternModuleState {
|
||||
module_state: SimulationModuleState {
|
||||
base_targets: [
|
||||
ModuleIO {
|
||||
name: extern_module2::en,
|
||||
is_input: true,
|
||||
ty: Bool,
|
||||
..
|
||||
},
|
||||
ModuleIO {
|
||||
name: extern_module2::clk,
|
||||
is_input: true,
|
||||
ty: Clock,
|
||||
..
|
||||
},
|
||||
ModuleIO {
|
||||
name: extern_module2::o,
|
||||
is_input: false,
|
||||
ty: UInt<8>,
|
||||
..
|
||||
},
|
||||
],
|
||||
uninitialized_ios: {},
|
||||
io_targets: {
|
||||
ModuleIO {
|
||||
name: extern_module2::clk,
|
||||
is_input: true,
|
||||
ty: Clock,
|
||||
..
|
||||
},
|
||||
ModuleIO {
|
||||
name: extern_module2::en,
|
||||
is_input: true,
|
||||
ty: Bool,
|
||||
..
|
||||
},
|
||||
ModuleIO {
|
||||
name: extern_module2::o,
|
||||
is_input: false,
|
||||
ty: UInt<8>,
|
||||
..
|
||||
},
|
||||
},
|
||||
did_initial_settle: true,
|
||||
},
|
||||
sim: ExternModuleSimulation {
|
||||
generator: SimGeneratorFn {
|
||||
args: (
|
||||
ModuleIO {
|
||||
name: extern_module2::en,
|
||||
is_input: true,
|
||||
ty: Bool,
|
||||
..
|
||||
},
|
||||
ModuleIO {
|
||||
name: extern_module2::clk,
|
||||
is_input: true,
|
||||
ty: Clock,
|
||||
..
|
||||
},
|
||||
ModuleIO {
|
||||
name: extern_module2::o,
|
||||
is_input: false,
|
||||
ty: UInt<8>,
|
||||
..
|
||||
},
|
||||
),
|
||||
f: ...,
|
||||
},
|
||||
sim_io_to_generator_map: {
|
||||
ModuleIO {
|
||||
name: extern_module2::clk,
|
||||
is_input: true,
|
||||
ty: Clock,
|
||||
..
|
||||
}: ModuleIO {
|
||||
name: extern_module2::clk,
|
||||
is_input: true,
|
||||
ty: Clock,
|
||||
..
|
||||
},
|
||||
ModuleIO {
|
||||
name: extern_module2::en,
|
||||
is_input: true,
|
||||
ty: Bool,
|
||||
..
|
||||
}: ModuleIO {
|
||||
name: extern_module2::en,
|
||||
is_input: true,
|
||||
ty: Bool,
|
||||
..
|
||||
},
|
||||
ModuleIO {
|
||||
name: extern_module2::o,
|
||||
is_input: false,
|
||||
ty: UInt<8>,
|
||||
..
|
||||
}: ModuleIO {
|
||||
name: extern_module2::o,
|
||||
is_input: false,
|
||||
ty: UInt<8>,
|
||||
..
|
||||
},
|
||||
},
|
||||
source_location: SourceLocation(
|
||||
module-XXXXXXXXXX.rs:5:1,
|
||||
),
|
||||
},
|
||||
running_generator: Some(
|
||||
...,
|
||||
),
|
||||
wait_targets: {
|
||||
Change {
|
||||
key: CompiledValue {
|
||||
layout: CompiledTypeLayout {
|
||||
ty: Clock,
|
||||
layout: TypeLayout {
|
||||
small_slots: StatePartLayout<SmallSlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
..
|
||||
},
|
||||
big_slots: StatePartLayout<BigSlots> {
|
||||
len: 1,
|
||||
debug_data: [
|
||||
SlotDebugData {
|
||||
name: "InstantiatedModule(extern_module2: extern_module2).extern_module2::clk",
|
||||
ty: Clock,
|
||||
},
|
||||
],
|
||||
..
|
||||
},
|
||||
sim_only_slots: StatePartLayout<SimOnlySlots> {
|
||||
len: 0,
|
||||
debug_data: [],
|
||||
layout_data: [],
|
||||
..
|
||||
},
|
||||
},
|
||||
body: Scalar,
|
||||
},
|
||||
range: TypeIndexRange {
|
||||
small_slots: StatePartIndexRange<SmallSlots> { start: 0, len: 0 },
|
||||
big_slots: StatePartIndexRange<BigSlots> { start: 1, len: 1 },
|
||||
sim_only_slots: StatePartIndexRange<SimOnlySlots> { start: 0, len: 0 },
|
||||
},
|
||||
write: None,
|
||||
},
|
||||
value: SimValue {
|
||||
ty: Clock,
|
||||
value: OpaqueSimValue {
|
||||
bits: 0x1_u1,
|
||||
sim_only_values: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
state_ready_to_run: false,
|
||||
trace_decls: TraceModule {
|
||||
name: "extern_module2",
|
||||
children: [
|
||||
TraceModuleIO {
|
||||
name: "en",
|
||||
child: TraceBool {
|
||||
location: TraceScalarId(0),
|
||||
name: "en",
|
||||
flow: Source,
|
||||
},
|
||||
ty: Bool,
|
||||
flow: Source,
|
||||
},
|
||||
TraceModuleIO {
|
||||
name: "clk",
|
||||
child: TraceClock {
|
||||
location: TraceScalarId(1),
|
||||
name: "clk",
|
||||
flow: Source,
|
||||
},
|
||||
ty: Clock,
|
||||
flow: Source,
|
||||
},
|
||||
TraceModuleIO {
|
||||
name: "o",
|
||||
child: TraceUInt {
|
||||
location: TraceScalarId(2),
|
||||
name: "o",
|
||||
ty: UInt<8>,
|
||||
flow: Sink,
|
||||
},
|
||||
ty: UInt<8>,
|
||||
flow: Sink,
|
||||
},
|
||||
],
|
||||
},
|
||||
traces: [
|
||||
SimTrace {
|
||||
id: TraceScalarId(0),
|
||||
kind: BigBool {
|
||||
index: StatePartIndex<BigSlots>(0),
|
||||
},
|
||||
state: 0x0,
|
||||
last_state: 0x0,
|
||||
},
|
||||
SimTrace {
|
||||
id: TraceScalarId(1),
|
||||
kind: BigClock {
|
||||
index: StatePartIndex<BigSlots>(1),
|
||||
},
|
||||
state: 0x1,
|
||||
last_state: 0x1,
|
||||
},
|
||||
SimTrace {
|
||||
id: TraceScalarId(2),
|
||||
kind: BigUInt {
|
||||
index: StatePartIndex<BigSlots>(2),
|
||||
ty: UInt<8>,
|
||||
},
|
||||
state: 0x65,
|
||||
last_state: 0x65,
|
||||
},
|
||||
],
|
||||
trace_memories: {},
|
||||
trace_writers: [
|
||||
Running(
|
||||
VcdWriter {
|
||||
finished_init: true,
|
||||
timescale: 1 ps,
|
||||
..
|
||||
},
|
||||
),
|
||||
],
|
||||
instant: 60 μs,
|
||||
clocks_triggered: [],
|
||||
..
|
||||
}
|
||||
|
|
@ -1,150 +0,0 @@
|
|||
$timescale 1 ps $end
|
||||
$scope module extern_module2 $end
|
||||
$var wire 1 ! en $end
|
||||
$var wire 1 " clk $end
|
||||
$var wire 8 # o $end
|
||||
$upscope $end
|
||||
$enddefinitions $end
|
||||
$dumpvars
|
||||
1!
|
||||
0"
|
||||
b1001000 #
|
||||
$end
|
||||
#1000000
|
||||
1"
|
||||
b1100101 #
|
||||
#2000000
|
||||
0"
|
||||
#3000000
|
||||
1"
|
||||
b1101100 #
|
||||
#4000000
|
||||
0"
|
||||
#5000000
|
||||
1"
|
||||
#6000000
|
||||
0"
|
||||
#7000000
|
||||
1"
|
||||
b1101111 #
|
||||
#8000000
|
||||
0"
|
||||
#9000000
|
||||
1"
|
||||
b101100 #
|
||||
#10000000
|
||||
0!
|
||||
0"
|
||||
#11000000
|
||||
1"
|
||||
#12000000
|
||||
0"
|
||||
#13000000
|
||||
1"
|
||||
#14000000
|
||||
0"
|
||||
#15000000
|
||||
1"
|
||||
#16000000
|
||||
0"
|
||||
#17000000
|
||||
1"
|
||||
#18000000
|
||||
0"
|
||||
#19000000
|
||||
1"
|
||||
#20000000
|
||||
1!
|
||||
0"
|
||||
#21000000
|
||||
1"
|
||||
b100000 #
|
||||
#22000000
|
||||
0"
|
||||
#23000000
|
||||
1"
|
||||
b1010111 #
|
||||
#24000000
|
||||
0"
|
||||
#25000000
|
||||
1"
|
||||
b1101111 #
|
||||
#26000000
|
||||
0"
|
||||
#27000000
|
||||
1"
|
||||
b1110010 #
|
||||
#28000000
|
||||
0"
|
||||
#29000000
|
||||
1"
|
||||
b1101100 #
|
||||
#30000000
|
||||
0!
|
||||
0"
|
||||
#31000000
|
||||
1"
|
||||
#32000000
|
||||
0"
|
||||
#33000000
|
||||
1"
|
||||
#34000000
|
||||
0"
|
||||
#35000000
|
||||
1"
|
||||
#36000000
|
||||
0"
|
||||
#37000000
|
||||
1"
|
||||
#38000000
|
||||
0"
|
||||
#39000000
|
||||
1"
|
||||
#40000000
|
||||
1!
|
||||
0"
|
||||
#41000000
|
||||
1"
|
||||
b1100100 #
|
||||
#42000000
|
||||
0"
|
||||
#43000000
|
||||
1"
|
||||
b100001 #
|
||||
#44000000
|
||||
0"
|
||||
#45000000
|
||||
1"
|
||||
b1010 #
|
||||
#46000000
|
||||
0"
|
||||
#47000000
|
||||
1"
|
||||
b1001000 #
|
||||
#48000000
|
||||
0"
|
||||
#49000000
|
||||
1"
|
||||
b1100101 #
|
||||
#50000000
|
||||
0!
|
||||
0"
|
||||
#51000000
|
||||
1"
|
||||
#52000000
|
||||
0"
|
||||
#53000000
|
||||
1"
|
||||
#54000000
|
||||
0"
|
||||
#55000000
|
||||
1"
|
||||
#56000000
|
||||
0"
|
||||
#57000000
|
||||
1"
|
||||
#58000000
|
||||
0"
|
||||
#59000000
|
||||
1"
|
||||
#60000000
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue