WIP: fpga support and arty a7 100t #38
39 changed files with 8480 additions and 1291 deletions
|
@ -1,77 +0,0 @@
|
||||||
# SPDX-License-Identifier: LGPL-3.0-or-later
|
|
||||||
# See Notices.txt for copyright information
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
outputs:
|
|
||||||
cache-primary-key:
|
|
||||||
value: ${{ jobs.deps.outputs.cache-primary-key }}
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
deps:
|
|
||||||
runs-on: debian-12
|
|
||||||
outputs:
|
|
||||||
cache-primary-key: ${{ steps.restore-deps.outputs.cache-primary-key }}
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
- uses: actions/cache/restore@v3
|
|
||||||
id: restore-deps
|
|
||||||
with:
|
|
||||||
path: deps
|
|
||||||
key: ${{ github.repository }}-deps-${{ runner.os }}-${{ hashFiles('.forgejo/workflows/deps.yml') }}
|
|
||||||
lookup-only: true
|
|
||||||
- name: Install Apt packages
|
|
||||||
if: steps.restore-deps.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
apt-get update -qq
|
|
||||||
apt-get install -qq \
|
|
||||||
bison \
|
|
||||||
build-essential \
|
|
||||||
ccache \
|
|
||||||
clang \
|
|
||||||
cvc5 \
|
|
||||||
flex \
|
|
||||||
gawk \
|
|
||||||
g++ \
|
|
||||||
git \
|
|
||||||
libboost-filesystem-dev \
|
|
||||||
libboost-python-dev \
|
|
||||||
libboost-system-dev \
|
|
||||||
libffi-dev \
|
|
||||||
libreadline-dev \
|
|
||||||
lld \
|
|
||||||
pkg-config \
|
|
||||||
python3 \
|
|
||||||
python3-click \
|
|
||||||
tcl-dev \
|
|
||||||
zlib1g-dev
|
|
||||||
- name: Install Firtool
|
|
||||||
if: steps.restore-deps.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
mkdir -p deps
|
|
||||||
wget -O deps/firrtl.tar.gz https://github.com/llvm/circt/releases/download/firtool-1.86.0/firrtl-bin-linux-x64.tar.gz
|
|
||||||
sha256sum -c - <<<'bf6f4ab18ae76f135c944efbd81e25391c31c1bd0617c58ab0592640abefee14 deps/firrtl.tar.gz'
|
|
||||||
tar -C deps -xvaf deps/firrtl.tar.gz
|
|
||||||
rm -rf deps/firtool
|
|
||||||
mv deps/firtool-1.86.0 deps/firtool
|
|
||||||
- name: Get SymbiYosys
|
|
||||||
if: steps.restore-deps.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
git clone --depth=1 --branch=yosys-0.45 https://git.libre-chip.org/mirrors/sby deps/sby
|
|
||||||
- name: Build Z3
|
|
||||||
if: steps.restore-deps.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
git clone --depth=1 --recursive --branch=z3-4.13.3 https://git.libre-chip.org/mirrors/z3 deps/z3
|
|
||||||
(cd deps/z3; PYTHON=python3 ./configure --prefix=/usr/local)
|
|
||||||
make -C deps/z3/build -j"$(nproc)"
|
|
||||||
- name: Build Yosys
|
|
||||||
if: steps.restore-deps.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
git clone --depth=1 --recursive --branch=0.45 https://git.libre-chip.org/mirrors/yosys deps/yosys
|
|
||||||
make -C deps/yosys -j"$(nproc)"
|
|
||||||
- uses: actions/cache/save@v3
|
|
||||||
if: steps.restore-deps.outputs.cache-hit != 'true'
|
|
||||||
with:
|
|
||||||
path: deps
|
|
||||||
key: ${{ steps.restore-deps.outputs.cache-primary-key }}
|
|
|
@ -3,57 +3,16 @@
|
||||||
on: [push, pull_request]
|
on: [push, pull_request]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
deps:
|
|
||||||
runs-on: debian-12
|
|
||||||
uses: ./.forgejo/workflows/deps.yml
|
|
||||||
test:
|
test:
|
||||||
runs-on: debian-12
|
runs-on: debian-12
|
||||||
needs: deps
|
container:
|
||||||
|
image: git.libre-chip.org/libre-chip/fayalite-deps:latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- run: |
|
- run: |
|
||||||
scripts/check-copyright.sh
|
scripts/check-copyright.sh
|
||||||
- run: |
|
|
||||||
apt-get update -qq
|
|
||||||
apt-get install -qq \
|
|
||||||
bison \
|
|
||||||
build-essential \
|
|
||||||
ccache \
|
|
||||||
clang \
|
|
||||||
cvc5 \
|
|
||||||
flex \
|
|
||||||
gawk \
|
|
||||||
git \
|
|
||||||
libboost-filesystem-dev \
|
|
||||||
libboost-python-dev \
|
|
||||||
libboost-system-dev \
|
|
||||||
libffi-dev \
|
|
||||||
libreadline-dev \
|
|
||||||
lld \
|
|
||||||
pkg-config \
|
|
||||||
python3 \
|
|
||||||
python3-click \
|
|
||||||
tcl-dev \
|
|
||||||
z3 \
|
|
||||||
zlib1g-dev
|
|
||||||
- run: |
|
|
||||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain 1.89.0
|
|
||||||
source "$HOME/.cargo/env"
|
|
||||||
rustup component add rust-src
|
|
||||||
echo "$PATH" >> "$GITHUB_PATH"
|
|
||||||
- uses: actions/cache/restore@v3
|
|
||||||
with:
|
|
||||||
path: deps
|
|
||||||
key: ${{ needs.deps.outputs.cache-primary-key }}
|
|
||||||
fail-on-cache-miss: true
|
|
||||||
- run: |
|
|
||||||
make -C deps/z3/build install
|
|
||||||
make -C deps/sby install
|
|
||||||
make -C deps/yosys install
|
|
||||||
export PATH="$(realpath deps/firtool/bin):$PATH"
|
|
||||||
echo "$PATH" >> "$GITHUB_PATH"
|
|
||||||
- uses: https://git.libre-chip.org/mirrors/rust-cache@v2
|
- uses: https://git.libre-chip.org/mirrors/rust-cache@v2
|
||||||
with:
|
with:
|
||||||
save-if: ${{ github.ref == 'refs/heads/master' }}
|
save-if: ${{ github.ref == 'refs/heads/master' }}
|
||||||
|
@ -62,3 +21,4 @@ jobs:
|
||||||
- run: cargo test --doc --features=unstable-doc
|
- run: cargo test --doc --features=unstable-doc
|
||||||
- run: cargo doc --features=unstable-doc
|
- run: cargo doc --features=unstable-doc
|
||||||
- run: FAYALITE_TEST_HASHER=always_zero cargo test --test=module --features=unstable-doc,unstable-test-hasher
|
- run: FAYALITE_TEST_HASHER=always_zero cargo test --test=module --features=unstable-doc,unstable-test-hasher
|
||||||
|
- run: cargo run --example blinky yosys-nextpnr-xray --nextpnr-xilinx-chipdb-dir /opt/fayalite-deps/nextpnr-xilinx/xilinx --prjxray-db-dir /opt/fayalite-deps/prjxray-db --device xc7a100ticsg324-1L -o target/blinky-out --clock-frequency=$((1000*1000*100))
|
||||||
|
|
107
Cargo.lock
generated
107
Cargo.lock
generated
|
@ -1,6 +1,6 @@
|
||||||
# This file is automatically @generated by Cargo.
|
# This file is automatically @generated by Cargo.
|
||||||
# It is not intended for manual editing.
|
# It is not intended for manual editing.
|
||||||
version = 3
|
version = 4
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "allocator-api2"
|
name = "allocator-api2"
|
||||||
|
@ -25,9 +25,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anstyle"
|
name = "anstyle"
|
||||||
version = "1.0.7"
|
version = "1.0.13"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b"
|
checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anstyle-parse"
|
name = "anstyle-parse"
|
||||||
|
@ -81,6 +81,12 @@ version = "0.2.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf"
|
checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "base64"
|
||||||
|
version = "0.22.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "basic-toml"
|
name = "basic-toml"
|
||||||
version = "0.1.8"
|
version = "0.1.8"
|
||||||
|
@ -149,9 +155,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap"
|
name = "clap"
|
||||||
version = "4.5.9"
|
version = "4.5.48"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "64acc1846d54c1fe936a78dc189c34e28d3f5afc348403f28ecf53660b9b8462"
|
checksum = "e2134bb3ea021b78629caa971416385309e0131b351b25e01dc16fb54e1b5fae"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clap_builder",
|
"clap_builder",
|
||||||
"clap_derive",
|
"clap_derive",
|
||||||
|
@ -159,9 +165,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap_builder"
|
name = "clap_builder"
|
||||||
version = "4.5.9"
|
version = "4.5.48"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "6fb8393d67ba2e7bfaf28a23458e4e2b543cc73a99595511eb207fdb8aede942"
|
checksum = "c2ba64afa3c0a6df7fa517765e31314e983f51dda798ffba27b988194fb65dc9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anstream",
|
"anstream",
|
||||||
"anstyle",
|
"anstyle",
|
||||||
|
@ -170,10 +176,19 @@ dependencies = [
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap_derive"
|
name = "clap_complete"
|
||||||
version = "4.5.8"
|
version = "4.5.58"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2bac35c6dafb060fd4d275d9a4ffae97917c13a6327903a8be2153cd964f7085"
|
checksum = "75bf0b32ad2e152de789bb635ea4d3078f6b838ad7974143e99b99f45a04af4a"
|
||||||
|
dependencies = [
|
||||||
|
"clap",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "clap_derive"
|
||||||
|
version = "4.5.47"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "bbfd7eae0b0f1a6e63d4b13c9c478de77c2eb546fba158ad50b4203dc24b9f9c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"heck",
|
"heck",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
|
@ -183,9 +198,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clap_lex"
|
name = "clap_lex"
|
||||||
version = "0.7.1"
|
version = "0.7.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4b82cf0babdbd58558212896d1a4272303a57bdb245c2bf1147185fb45640e70"
|
checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "colorchoice"
|
name = "colorchoice"
|
||||||
|
@ -291,9 +306,11 @@ checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5"
|
||||||
name = "fayalite"
|
name = "fayalite"
|
||||||
version = "0.3.0"
|
version = "0.3.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"base64",
|
||||||
"bitvec",
|
"bitvec",
|
||||||
"blake3",
|
"blake3",
|
||||||
"clap",
|
"clap",
|
||||||
|
"clap_complete",
|
||||||
"ctor",
|
"ctor",
|
||||||
"eyre",
|
"eyre",
|
||||||
"fayalite-proc-macros",
|
"fayalite-proc-macros",
|
||||||
|
@ -302,7 +319,6 @@ dependencies = [
|
||||||
"jobslot",
|
"jobslot",
|
||||||
"num-bigint",
|
"num-bigint",
|
||||||
"num-traits",
|
"num-traits",
|
||||||
"os_pipe",
|
|
||||||
"petgraph",
|
"petgraph",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
@ -377,12 +393,13 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "getrandom"
|
name = "getrandom"
|
||||||
version = "0.2.14"
|
version = "0.3.3"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c"
|
checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"libc",
|
"libc",
|
||||||
|
"r-efi",
|
||||||
"wasi",
|
"wasi",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -449,23 +466,23 @@ checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "jobslot"
|
name = "jobslot"
|
||||||
version = "0.2.19"
|
version = "0.2.23"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "fe10868679d7a24c2c67d862d0e64a342ce9aef7cdde9ce8019bd35d353d458d"
|
checksum = "58715c67c327da7f1558708348d68c207fd54900c4ae0529e29305d04d795b8c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"derive_destructure2",
|
"derive_destructure2",
|
||||||
"getrandom",
|
"getrandom",
|
||||||
"libc",
|
"libc",
|
||||||
"scopeguard",
|
"scopeguard",
|
||||||
"windows-sys 0.59.0",
|
"windows-sys 0.61.2",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libc"
|
name = "libc"
|
||||||
version = "0.2.153"
|
version = "0.2.176"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
|
checksum = "58f929b4d672ea937a23a1ab494143d968337a5f47e56d0815df1e0890ddf174"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "linux-raw-sys"
|
name = "linux-raw-sys"
|
||||||
|
@ -507,16 +524,6 @@ version = "1.19.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
|
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "os_pipe"
|
|
||||||
version = "1.2.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "5ffd2b0a5634335b135d5728d84c5e0fd726954b87111f7506a61c502280d982"
|
|
||||||
dependencies = [
|
|
||||||
"libc",
|
|
||||||
"windows-sys 0.59.0",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "petgraph"
|
name = "petgraph"
|
||||||
version = "0.8.1"
|
version = "0.8.1"
|
||||||
|
@ -557,6 +564,12 @@ dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "r-efi"
|
||||||
|
version = "5.3.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "radium"
|
name = "radium"
|
||||||
version = "0.7.0"
|
version = "0.7.0"
|
||||||
|
@ -748,9 +761,21 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasi"
|
name = "wasi"
|
||||||
version = "0.11.0+wasi-snapshot-preview1"
|
version = "0.14.7+wasi-0.2.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
|
checksum = "883478de20367e224c0090af9cf5f9fa85bed63a95c1abf3afc5c083ebc06e8c"
|
||||||
|
dependencies = [
|
||||||
|
"wasip2",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wasip2"
|
||||||
|
version = "1.0.1+wasi-0.2.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7"
|
||||||
|
dependencies = [
|
||||||
|
"wit-bindgen",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "which"
|
name = "which"
|
||||||
|
@ -795,6 +820,12 @@ version = "0.4.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "windows-link"
|
||||||
|
version = "0.2.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows-sys"
|
name = "windows-sys"
|
||||||
version = "0.52.0"
|
version = "0.52.0"
|
||||||
|
@ -806,11 +837,11 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "windows-sys"
|
name = "windows-sys"
|
||||||
version = "0.59.0"
|
version = "0.61.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
|
checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"windows-targets",
|
"windows-link",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -883,6 +914,12 @@ version = "0.0.19"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904"
|
checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "wit-bindgen"
|
||||||
|
version = "0.46.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wyz"
|
name = "wyz"
|
||||||
version = "0.5.1"
|
version = "0.5.1"
|
||||||
|
|
|
@ -18,17 +18,18 @@ fayalite-proc-macros = { version = "=0.3.0", path = "crates/fayalite-proc-macros
|
||||||
fayalite-proc-macros-impl = { version = "=0.3.0", path = "crates/fayalite-proc-macros-impl" }
|
fayalite-proc-macros-impl = { version = "=0.3.0", path = "crates/fayalite-proc-macros-impl" }
|
||||||
fayalite-visit-gen = { version = "=0.3.0", path = "crates/fayalite-visit-gen" }
|
fayalite-visit-gen = { version = "=0.3.0", path = "crates/fayalite-visit-gen" }
|
||||||
base16ct = "0.2.0"
|
base16ct = "0.2.0"
|
||||||
|
base64 = "0.22.1"
|
||||||
bitvec = { version = "1.0.1", features = ["serde"] }
|
bitvec = { version = "1.0.1", features = ["serde"] }
|
||||||
blake3 = { version = "1.5.4", features = ["serde"] }
|
blake3 = { version = "1.5.4", features = ["serde"] }
|
||||||
clap = { version = "4.5.9", features = ["derive", "env", "string"] }
|
clap = { version = "4.5.9", features = ["derive", "env", "string"] }
|
||||||
|
clap_complete = "4.5.58"
|
||||||
ctor = "0.2.8"
|
ctor = "0.2.8"
|
||||||
eyre = "0.6.12"
|
eyre = "0.6.12"
|
||||||
hashbrown = "0.15.2"
|
hashbrown = "0.15.2"
|
||||||
indexmap = { version = "2.5.0", features = ["serde"] }
|
indexmap = { version = "2.5.0", features = ["serde"] }
|
||||||
jobslot = "0.2.19"
|
jobslot = "0.2.23"
|
||||||
num-bigint = "0.4.6"
|
num-bigint = "0.4.6"
|
||||||
num-traits = "0.2.16"
|
num-traits = "0.2.16"
|
||||||
os_pipe = "1.2.1"
|
|
||||||
petgraph = "0.8.1"
|
petgraph = "0.8.1"
|
||||||
prettyplease = "0.2.20"
|
prettyplease = "0.2.20"
|
||||||
proc-macro2 = "1.0.83"
|
proc-macro2 = "1.0.83"
|
||||||
|
|
|
@ -14,9 +14,11 @@ rust-version.workspace = true
|
||||||
version.workspace = true
|
version.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
base64.workspace = true
|
||||||
bitvec.workspace = true
|
bitvec.workspace = true
|
||||||
blake3.workspace = true
|
blake3.workspace = true
|
||||||
clap.workspace = true
|
clap.workspace = true
|
||||||
|
clap_complete.workspace = true
|
||||||
ctor.workspace = true
|
ctor.workspace = true
|
||||||
eyre.workspace = true
|
eyre.workspace = true
|
||||||
fayalite-proc-macros.workspace = true
|
fayalite-proc-macros.workspace = true
|
||||||
|
@ -24,7 +26,6 @@ hashbrown.workspace = true
|
||||||
jobslot.workspace = true
|
jobslot.workspace = true
|
||||||
num-bigint.workspace = true
|
num-bigint.workspace = true
|
||||||
num-traits.workspace = true
|
num-traits.workspace = true
|
||||||
os_pipe.workspace = true
|
|
||||||
petgraph.workspace = true
|
petgraph.workspace = true
|
||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||||
// See Notices.txt for copyright information
|
// See Notices.txt for copyright information
|
||||||
use clap::Parser;
|
use fayalite::{
|
||||||
use fayalite::{cli, prelude::*};
|
build::{ToArgs, WriteArgs},
|
||||||
|
prelude::*,
|
||||||
|
};
|
||||||
|
|
||||||
#[hdl_module]
|
#[hdl_module]
|
||||||
fn blinky(clock_frequency: u64) {
|
fn blinky(clock_frequency: u64) {
|
||||||
|
@ -32,16 +34,22 @@ fn blinky(clock_frequency: u64) {
|
||||||
connect(led, output_reg);
|
connect(led, output_reg);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Parser)]
|
#[derive(clap::Args, Clone, PartialEq, Eq, Hash, Debug)]
|
||||||
struct Cli {
|
struct ExtraArgs {
|
||||||
/// clock frequency in hertz
|
/// clock frequency in hertz
|
||||||
#[arg(long, default_value = "1000000", value_parser = clap::value_parser!(u64).range(2..))]
|
#[arg(long, default_value = "1000000", value_parser = clap::value_parser!(u64).range(2..))]
|
||||||
clock_frequency: u64,
|
clock_frequency: u64,
|
||||||
#[command(subcommand)]
|
|
||||||
cli: cli::Cli,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() -> cli::Result {
|
impl ToArgs for ExtraArgs {
|
||||||
let cli = Cli::parse();
|
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
|
||||||
cli.cli.run(blinky(cli.clock_frequency))
|
let Self { clock_frequency } = self;
|
||||||
|
args.write_arg(format!("--clock-frequency={clock_frequency}"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
BuildCli::main(|_cli, ExtraArgs { clock_frequency }| {
|
||||||
|
Ok(JobParams::new(blinky(clock_frequency), "blinky"))
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -145,52 +145,73 @@ pub struct DocStringAnnotation {
|
||||||
|
|
||||||
macro_rules! make_annotation_enum {
|
macro_rules! make_annotation_enum {
|
||||||
(
|
(
|
||||||
|
#[$non_exhaustive:ident]
|
||||||
$(#[$meta:meta])*
|
$(#[$meta:meta])*
|
||||||
$vis:vis enum $Annotation:ident {
|
$vis:vis enum $AnnotationEnum:ident {
|
||||||
$($Variant:ident($T:ident),)*
|
$($Variant:ident($T:ty),)*
|
||||||
}
|
}
|
||||||
) => {
|
) => {
|
||||||
|
crate::annotations::make_annotation_enum!(@require_non_exhaustive $non_exhaustive);
|
||||||
|
|
||||||
|
#[$non_exhaustive]
|
||||||
$(#[$meta])*
|
$(#[$meta])*
|
||||||
$vis enum $Annotation {
|
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||||
|
$vis enum $AnnotationEnum {
|
||||||
$($Variant($T),)*
|
$($Variant($T),)*
|
||||||
}
|
}
|
||||||
|
|
||||||
$(impl IntoAnnotations for $T {
|
impl std::fmt::Debug for $AnnotationEnum {
|
||||||
type IntoAnnotations = [$Annotation; 1];
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
$(Self::$Variant(v) => v.fmt(f),)*
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$(impl From<$T> for crate::annotations::Annotation {
|
||||||
|
fn from(v: $T) -> Self {
|
||||||
|
$AnnotationEnum::$Variant(v).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl crate::annotations::IntoAnnotations for $T {
|
||||||
|
type IntoAnnotations = [crate::annotations::Annotation; 1];
|
||||||
|
|
||||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||||
[$Annotation::$Variant(self)]
|
[self.into()]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IntoAnnotations for &'_ $T {
|
impl crate::annotations::IntoAnnotations for &'_ $T {
|
||||||
type IntoAnnotations = [$Annotation; 1];
|
type IntoAnnotations = [crate::annotations::Annotation; 1];
|
||||||
|
|
||||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||||
[$Annotation::$Variant(*self)]
|
[crate::annotations::Annotation::from(self.clone())]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IntoAnnotations for &'_ mut $T {
|
impl crate::annotations::IntoAnnotations for &'_ mut $T {
|
||||||
type IntoAnnotations = [$Annotation; 1];
|
type IntoAnnotations = [crate::annotations::Annotation; 1];
|
||||||
|
|
||||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||||
[$Annotation::$Variant(*self)]
|
[crate::annotations::Annotation::from(self.clone())]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IntoAnnotations for Box<$T> {
|
impl crate::annotations::IntoAnnotations for Box<$T> {
|
||||||
type IntoAnnotations = [$Annotation; 1];
|
type IntoAnnotations = [crate::annotations::Annotation; 1];
|
||||||
|
|
||||||
fn into_annotations(self) -> Self::IntoAnnotations {
|
fn into_annotations(self) -> Self::IntoAnnotations {
|
||||||
[$Annotation::$Variant(*self)]
|
[crate::annotations::Annotation::from(*self)]
|
||||||
}
|
}
|
||||||
})*
|
})*
|
||||||
};
|
};
|
||||||
|
(@require_non_exhaustive non_exhaustive) => {};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) use make_annotation_enum;
|
||||||
|
|
||||||
make_annotation_enum! {
|
make_annotation_enum! {
|
||||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub enum Annotation {
|
pub enum Annotation {
|
||||||
DontTouch(DontTouchAnnotation),
|
DontTouch(DontTouchAnnotation),
|
||||||
|
@ -199,6 +220,7 @@ make_annotation_enum! {
|
||||||
BlackBoxPath(BlackBoxPathAnnotation),
|
BlackBoxPath(BlackBoxPathAnnotation),
|
||||||
DocString(DocStringAnnotation),
|
DocString(DocStringAnnotation),
|
||||||
CustomFirrtl(CustomFirrtlAnnotation),
|
CustomFirrtl(CustomFirrtlAnnotation),
|
||||||
|
Xilinx(crate::build::vendor::xilinx::XilinxAnnotation),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
2384
crates/fayalite/src/build.rs
Normal file
2384
crates/fayalite/src/build.rs
Normal file
File diff suppressed because it is too large
Load diff
1138
crates/fayalite/src/build/external.rs
Normal file
1138
crates/fayalite/src/build/external.rs
Normal file
File diff suppressed because it is too large
Load diff
125
crates/fayalite/src/build/firrtl.rs
Normal file
125
crates/fayalite/src/build/firrtl.rs
Normal file
|
@ -0,0 +1,125 @@
|
||||||
|
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||||
|
// See Notices.txt for copyright information
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
build::{
|
||||||
|
BaseJob, BaseJobKind, CommandParams, DynJobKind, JobAndDependencies,
|
||||||
|
JobArgsAndDependencies, JobItem, JobItemName, JobKind, JobKindAndDependencies, JobParams,
|
||||||
|
ToArgs, WriteArgs,
|
||||||
|
},
|
||||||
|
firrtl::{ExportOptions, FileBackend},
|
||||||
|
intern::{Intern, InternSlice, Interned},
|
||||||
|
util::job_server::AcquiredJob,
|
||||||
|
};
|
||||||
|
use clap::Args;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Debug)]
|
||||||
|
pub struct FirrtlJobKind;
|
||||||
|
|
||||||
|
#[derive(Args, Debug, Clone, Hash, PartialEq, Eq)]
|
||||||
|
#[group(id = "Firrtl")]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub struct FirrtlArgs {
|
||||||
|
#[command(flatten)]
|
||||||
|
pub export_options: ExportOptions,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToArgs for FirrtlArgs {
|
||||||
|
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
|
||||||
|
let Self { export_options } = self;
|
||||||
|
export_options.to_args(args);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||||
|
pub struct Firrtl {
|
||||||
|
base: BaseJob,
|
||||||
|
export_options: ExportOptions,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Firrtl {
|
||||||
|
fn make_firrtl_file_backend(&self) -> FileBackend {
|
||||||
|
FileBackend {
|
||||||
|
dir_path: PathBuf::from(&*self.base.output_dir()),
|
||||||
|
top_fir_file_stem: Some(self.base.file_stem().into()),
|
||||||
|
circuit_name: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn firrtl_file(&self) -> Interned<Path> {
|
||||||
|
self.base.file_with_ext("fir")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl JobKind for FirrtlJobKind {
|
||||||
|
type Args = FirrtlArgs;
|
||||||
|
type Job = Firrtl;
|
||||||
|
type Dependencies = JobKindAndDependencies<BaseJobKind>;
|
||||||
|
|
||||||
|
fn dependencies(self) -> Self::Dependencies {
|
||||||
|
JobKindAndDependencies::new(BaseJobKind)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn args_to_jobs(
|
||||||
|
args: JobArgsAndDependencies<Self>,
|
||||||
|
params: &JobParams,
|
||||||
|
) -> eyre::Result<JobAndDependencies<Self>> {
|
||||||
|
args.args_to_jobs_simple(
|
||||||
|
params,
|
||||||
|
|_kind, FirrtlArgs { export_options }, dependencies| {
|
||||||
|
Ok(Firrtl {
|
||||||
|
base: dependencies.get_job::<BaseJob, _>().clone(),
|
||||||
|
export_options,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||||
|
[JobItemName::Path {
|
||||||
|
path: job.base.output_dir(),
|
||||||
|
}]
|
||||||
|
.intern_slice()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||||
|
[JobItemName::Path {
|
||||||
|
path: job.firrtl_file(),
|
||||||
|
}]
|
||||||
|
.intern_slice()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn name(self) -> Interned<str> {
|
||||||
|
"firrtl".intern()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn external_command_params(self, _job: &Self::Job) -> Option<CommandParams> {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
self,
|
||||||
|
job: &Self::Job,
|
||||||
|
inputs: &[JobItem],
|
||||||
|
params: &JobParams,
|
||||||
|
_acquired_job: &mut AcquiredJob,
|
||||||
|
) -> eyre::Result<Vec<JobItem>> {
|
||||||
|
let [JobItem::Path { path: input_path }] = *inputs else {
|
||||||
|
panic!("wrong inputs, expected a single `Path`");
|
||||||
|
};
|
||||||
|
assert_eq!(input_path, job.base.output_dir());
|
||||||
|
crate::firrtl::export(
|
||||||
|
job.make_firrtl_file_backend(),
|
||||||
|
params.main_module(),
|
||||||
|
job.export_options,
|
||||||
|
)?;
|
||||||
|
Ok(vec![JobItem::Path {
|
||||||
|
path: job.firrtl_file(),
|
||||||
|
}])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = DynJobKind> {
|
||||||
|
[DynJobKind::new(FirrtlJobKind)]
|
||||||
|
}
|
411
crates/fayalite/src/build/formal.rs
Normal file
411
crates/fayalite/src/build/formal.rs
Normal file
|
@ -0,0 +1,411 @@
|
||||||
|
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||||
|
// See Notices.txt for copyright information
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
build::{
|
||||||
|
BaseJob, CommandParams, DynJobKind, GetJobPositionDependencies, JobAndDependencies,
|
||||||
|
JobArgsAndDependencies, JobDependencies, JobItem, JobItemName, JobKind,
|
||||||
|
JobKindAndDependencies, JobParams, ToArgs, WriteArgs,
|
||||||
|
external::{
|
||||||
|
ExternalCommand, ExternalCommandJob, ExternalCommandJobKind, ExternalProgramTrait,
|
||||||
|
},
|
||||||
|
verilog::{UnadjustedVerilog, VerilogDialect, VerilogJob, VerilogJobKind},
|
||||||
|
},
|
||||||
|
intern::{Intern, InternSlice, Interned},
|
||||||
|
module::NameId,
|
||||||
|
util::job_server::AcquiredJob,
|
||||||
|
};
|
||||||
|
use clap::{Args, ValueEnum};
|
||||||
|
use eyre::Context;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::{
|
||||||
|
ffi::{OsStr, OsString},
|
||||||
|
fmt::{self, Write},
|
||||||
|
path::Path,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash, Default, Deserialize, Serialize)]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub enum FormalMode {
|
||||||
|
#[default]
|
||||||
|
BMC,
|
||||||
|
Prove,
|
||||||
|
Live,
|
||||||
|
Cover,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FormalMode {
|
||||||
|
pub fn as_str(self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
FormalMode::BMC => "bmc",
|
||||||
|
FormalMode::Prove => "prove",
|
||||||
|
FormalMode::Live => "live",
|
||||||
|
FormalMode::Cover => "cover",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for FormalMode {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.write_str(self.as_str())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Args, Clone, Debug, PartialEq, Eq, Hash)]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub struct FormalArgs {
|
||||||
|
#[arg(long = "sby-extra-arg", value_name = "ARG")]
|
||||||
|
pub sby_extra_args: Vec<OsString>,
|
||||||
|
#[arg(long, default_value_t)]
|
||||||
|
pub formal_mode: FormalMode,
|
||||||
|
#[arg(long, default_value_t = Self::DEFAULT_DEPTH)]
|
||||||
|
pub formal_depth: u64,
|
||||||
|
#[arg(long, default_value = Self::DEFAULT_SOLVER)]
|
||||||
|
pub formal_solver: String,
|
||||||
|
#[arg(long = "smtbmc-extra-arg", value_name = "ARG")]
|
||||||
|
pub smtbmc_extra_args: Vec<OsString>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FormalArgs {
|
||||||
|
pub const DEFAULT_DEPTH: u64 = 20;
|
||||||
|
pub const DEFAULT_SOLVER: &'static str = "z3";
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToArgs for FormalArgs {
|
||||||
|
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
|
||||||
|
let Self {
|
||||||
|
sby_extra_args,
|
||||||
|
formal_mode,
|
||||||
|
formal_depth,
|
||||||
|
formal_solver,
|
||||||
|
smtbmc_extra_args,
|
||||||
|
} = self;
|
||||||
|
for arg in sby_extra_args {
|
||||||
|
args.write_long_option_eq("sby-extra-arg", arg);
|
||||||
|
}
|
||||||
|
args.write_display_args([
|
||||||
|
format_args!("--formal-mode={formal_mode}"),
|
||||||
|
format_args!("--formal-depth={formal_depth}"),
|
||||||
|
format_args!("--formal-solver={formal_solver}"),
|
||||||
|
]);
|
||||||
|
for arg in smtbmc_extra_args {
|
||||||
|
args.write_long_option_eq("smtbmc-extra-arg", arg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
|
||||||
|
pub struct WriteSbyFileJobKind;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, Hash, Deserialize, Serialize)]
|
||||||
|
pub struct WriteSbyFileJob {
|
||||||
|
sby_extra_args: Interned<[Interned<OsStr>]>,
|
||||||
|
formal_mode: FormalMode,
|
||||||
|
formal_depth: u64,
|
||||||
|
formal_solver: Interned<str>,
|
||||||
|
smtbmc_extra_args: Interned<[Interned<OsStr>]>,
|
||||||
|
sby_file: Interned<Path>,
|
||||||
|
output_dir: Interned<Path>,
|
||||||
|
main_verilog_file: Interned<Path>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl WriteSbyFileJob {
|
||||||
|
pub fn sby_extra_args(&self) -> Interned<[Interned<OsStr>]> {
|
||||||
|
self.sby_extra_args
|
||||||
|
}
|
||||||
|
pub fn formal_mode(&self) -> FormalMode {
|
||||||
|
self.formal_mode
|
||||||
|
}
|
||||||
|
pub fn formal_depth(&self) -> u64 {
|
||||||
|
self.formal_depth
|
||||||
|
}
|
||||||
|
pub fn formal_solver(&self) -> Interned<str> {
|
||||||
|
self.formal_solver
|
||||||
|
}
|
||||||
|
pub fn smtbmc_extra_args(&self) -> Interned<[Interned<OsStr>]> {
|
||||||
|
self.smtbmc_extra_args
|
||||||
|
}
|
||||||
|
pub fn sby_file(&self) -> Interned<Path> {
|
||||||
|
self.sby_file
|
||||||
|
}
|
||||||
|
pub fn output_dir(&self) -> Interned<Path> {
|
||||||
|
self.output_dir
|
||||||
|
}
|
||||||
|
pub fn main_verilog_file(&self) -> Interned<Path> {
|
||||||
|
self.main_verilog_file
|
||||||
|
}
|
||||||
|
fn write_sby(
|
||||||
|
&self,
|
||||||
|
output: &mut OsString,
|
||||||
|
additional_files: &[Interned<Path>],
|
||||||
|
main_module_name_id: NameId,
|
||||||
|
) -> eyre::Result<()> {
|
||||||
|
let Self {
|
||||||
|
sby_extra_args: _,
|
||||||
|
formal_mode,
|
||||||
|
formal_depth,
|
||||||
|
formal_solver,
|
||||||
|
smtbmc_extra_args,
|
||||||
|
sby_file: _,
|
||||||
|
output_dir: _,
|
||||||
|
main_verilog_file,
|
||||||
|
} = self;
|
||||||
|
write!(
|
||||||
|
output,
|
||||||
|
"[options]\n\
|
||||||
|
mode {formal_mode}\n\
|
||||||
|
depth {formal_depth}\n\
|
||||||
|
wait on\n\
|
||||||
|
\n\
|
||||||
|
[engines]\n\
|
||||||
|
smtbmc {formal_solver} -- --"
|
||||||
|
)
|
||||||
|
.expect("writing to OsString can't fail");
|
||||||
|
for i in smtbmc_extra_args {
|
||||||
|
output.push(" ");
|
||||||
|
output.push(i);
|
||||||
|
}
|
||||||
|
output.push(
|
||||||
|
"\n\
|
||||||
|
\n\
|
||||||
|
[script]\n",
|
||||||
|
);
|
||||||
|
for verilog_file in VerilogJob::all_verilog_files(*main_verilog_file, additional_files)? {
|
||||||
|
output.push("read_verilog -sv -formal \"");
|
||||||
|
output.push(verilog_file);
|
||||||
|
output.push("\"\n");
|
||||||
|
}
|
||||||
|
let circuit_name = crate::firrtl::get_circuit_name(main_module_name_id);
|
||||||
|
// workaround for wires disappearing -- set `keep` on all wires
|
||||||
|
writeln!(
|
||||||
|
output,
|
||||||
|
"hierarchy -top {circuit_name}\n\
|
||||||
|
proc\n\
|
||||||
|
setattr -set keep 1 w:\\*\n\
|
||||||
|
prep",
|
||||||
|
)
|
||||||
|
.expect("writing to OsString can't fail");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl JobKind for WriteSbyFileJobKind {
|
||||||
|
type Args = FormalArgs;
|
||||||
|
type Job = WriteSbyFileJob;
|
||||||
|
type Dependencies = JobKindAndDependencies<VerilogJobKind>;
|
||||||
|
|
||||||
|
fn dependencies(self) -> Self::Dependencies {
|
||||||
|
Default::default()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn args_to_jobs(
|
||||||
|
mut args: JobArgsAndDependencies<Self>,
|
||||||
|
params: &JobParams,
|
||||||
|
) -> eyre::Result<JobAndDependencies<Self>> {
|
||||||
|
args.dependencies
|
||||||
|
.dependencies
|
||||||
|
.args
|
||||||
|
.args
|
||||||
|
.additional_args
|
||||||
|
.verilog_dialect
|
||||||
|
.get_or_insert(VerilogDialect::Yosys);
|
||||||
|
args.args_to_jobs_simple(params, |_kind, args, dependencies| {
|
||||||
|
let FormalArgs {
|
||||||
|
sby_extra_args,
|
||||||
|
formal_mode,
|
||||||
|
formal_depth,
|
||||||
|
formal_solver,
|
||||||
|
smtbmc_extra_args,
|
||||||
|
} = args;
|
||||||
|
let base_job = dependencies.get_job::<BaseJob, _>();
|
||||||
|
Ok(WriteSbyFileJob {
|
||||||
|
sby_extra_args: sby_extra_args.into_iter().map(Interned::from).collect(),
|
||||||
|
formal_mode,
|
||||||
|
formal_depth,
|
||||||
|
formal_solver: formal_solver.intern_deref(),
|
||||||
|
smtbmc_extra_args: smtbmc_extra_args.into_iter().map(Interned::from).collect(),
|
||||||
|
sby_file: base_job.file_with_ext("sby"),
|
||||||
|
output_dir: base_job.output_dir(),
|
||||||
|
main_verilog_file: dependencies.get_job::<VerilogJob, _>().main_verilog_file(),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn inputs(self, _job: &Self::Job) -> Interned<[JobItemName]> {
|
||||||
|
[JobItemName::DynamicPaths {
|
||||||
|
source_job_name: VerilogJobKind.name(),
|
||||||
|
}]
|
||||||
|
.intern_slice()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||||
|
[JobItemName::Path { path: job.sby_file }].intern_slice()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn name(self) -> Interned<str> {
|
||||||
|
"write-sby-file".intern()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn external_command_params(self, _job: &Self::Job) -> Option<CommandParams> {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
self,
|
||||||
|
job: &Self::Job,
|
||||||
|
inputs: &[JobItem],
|
||||||
|
params: &JobParams,
|
||||||
|
_acquired_job: &mut AcquiredJob,
|
||||||
|
) -> eyre::Result<Vec<JobItem>> {
|
||||||
|
assert!(inputs.iter().map(JobItem::name).eq(self.inputs(job)));
|
||||||
|
let [additional_files] = inputs else {
|
||||||
|
unreachable!();
|
||||||
|
};
|
||||||
|
let additional_files = VerilogJob::unwrap_additional_files(additional_files);
|
||||||
|
let mut contents = OsString::new();
|
||||||
|
job.write_sby(
|
||||||
|
&mut contents,
|
||||||
|
additional_files,
|
||||||
|
params.main_module().name_id(),
|
||||||
|
)?;
|
||||||
|
let path = job.sby_file;
|
||||||
|
std::fs::write(path, contents.as_encoded_bytes())
|
||||||
|
.wrap_err_with(|| format!("writing {path:?} failed"))?;
|
||||||
|
Ok(vec![JobItem::Path { path }])
|
||||||
|
}
|
||||||
|
|
||||||
|
fn subcommand_hidden(self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
|
pub struct Formal {
|
||||||
|
#[serde(flatten)]
|
||||||
|
write_sby_file: WriteSbyFileJob,
|
||||||
|
sby_file_name: Interned<OsStr>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Debug for Formal {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
let Self {
|
||||||
|
write_sby_file:
|
||||||
|
WriteSbyFileJob {
|
||||||
|
sby_extra_args,
|
||||||
|
formal_mode,
|
||||||
|
formal_depth,
|
||||||
|
formal_solver,
|
||||||
|
smtbmc_extra_args,
|
||||||
|
sby_file,
|
||||||
|
output_dir: _,
|
||||||
|
main_verilog_file,
|
||||||
|
},
|
||||||
|
sby_file_name,
|
||||||
|
} = self;
|
||||||
|
f.debug_struct("Formal")
|
||||||
|
.field("sby_extra_args", sby_extra_args)
|
||||||
|
.field("formal_mode", formal_mode)
|
||||||
|
.field("formal_depth", formal_depth)
|
||||||
|
.field("formal_solver", formal_solver)
|
||||||
|
.field("smtbmc_extra_args", smtbmc_extra_args)
|
||||||
|
.field("sby_file", sby_file)
|
||||||
|
.field("sby_file_name", sby_file_name)
|
||||||
|
.field("main_verilog_file", main_verilog_file)
|
||||||
|
.finish_non_exhaustive()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)]
|
||||||
|
pub struct Symbiyosys;
|
||||||
|
|
||||||
|
impl ExternalProgramTrait for Symbiyosys {
|
||||||
|
fn default_program_name() -> Interned<str> {
|
||||||
|
"sby".intern()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Hash, PartialEq, Eq, Debug, Args)]
|
||||||
|
pub struct FormalAdditionalArgs {}
|
||||||
|
|
||||||
|
impl ToArgs for FormalAdditionalArgs {
|
||||||
|
fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) {
|
||||||
|
let Self {} = self;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExternalCommand for Formal {
|
||||||
|
type AdditionalArgs = FormalAdditionalArgs;
|
||||||
|
type AdditionalJobData = Formal;
|
||||||
|
type BaseJobPosition = GetJobPositionDependencies<
|
||||||
|
GetJobPositionDependencies<
|
||||||
|
GetJobPositionDependencies<<UnadjustedVerilog as ExternalCommand>::BaseJobPosition>,
|
||||||
|
>,
|
||||||
|
>;
|
||||||
|
type Dependencies = JobKindAndDependencies<WriteSbyFileJobKind>;
|
||||||
|
type ExternalProgram = Symbiyosys;
|
||||||
|
|
||||||
|
fn dependencies() -> Self::Dependencies {
|
||||||
|
Default::default()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn args_to_jobs(
|
||||||
|
args: JobArgsAndDependencies<ExternalCommandJobKind<Self>>,
|
||||||
|
params: &JobParams,
|
||||||
|
) -> eyre::Result<(
|
||||||
|
Self::AdditionalJobData,
|
||||||
|
<Self::Dependencies as JobDependencies>::JobsAndKinds,
|
||||||
|
)> {
|
||||||
|
args.args_to_jobs_external_simple(params, |args, dependencies| {
|
||||||
|
let FormalAdditionalArgs {} = args.additional_args;
|
||||||
|
let write_sby_file = dependencies.get_job::<WriteSbyFileJob, _>().clone();
|
||||||
|
Ok(Formal {
|
||||||
|
sby_file_name: write_sby_file
|
||||||
|
.sby_file()
|
||||||
|
.interned_file_name()
|
||||||
|
.expect("known to have file name"),
|
||||||
|
write_sby_file,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn inputs(job: &ExternalCommandJob<Self>) -> Interned<[JobItemName]> {
|
||||||
|
[
|
||||||
|
JobItemName::Path {
|
||||||
|
path: job.additional_job_data().write_sby_file.sby_file(),
|
||||||
|
},
|
||||||
|
JobItemName::Path {
|
||||||
|
path: job.additional_job_data().write_sby_file.main_verilog_file(),
|
||||||
|
},
|
||||||
|
JobItemName::DynamicPaths {
|
||||||
|
source_job_name: VerilogJobKind.name(),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
.intern_slice()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn output_paths(_job: &ExternalCommandJob<Self>) -> Interned<[Interned<Path>]> {
|
||||||
|
Interned::default()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn command_line_args<W: ?Sized + WriteArgs>(job: &ExternalCommandJob<Self>, args: &mut W) {
|
||||||
|
// args.write_str_arg("-j1"); // sby seems not to respect job count in parallel mode
|
||||||
|
args.write_arg("-f");
|
||||||
|
args.write_interned_arg(job.additional_job_data().sby_file_name);
|
||||||
|
args.write_interned_args(job.additional_job_data().write_sby_file.sby_extra_args());
|
||||||
|
}
|
||||||
|
|
||||||
|
fn current_dir(job: &ExternalCommandJob<Self>) -> Option<Interned<Path>> {
|
||||||
|
Some(job.output_dir())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn job_kind_name() -> Interned<str> {
|
||||||
|
"formal".intern()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = DynJobKind> {
|
||||||
|
[
|
||||||
|
DynJobKind::new(WriteSbyFileJobKind),
|
||||||
|
DynJobKind::new(ExternalCommandJobKind::<Formal>::new()),
|
||||||
|
]
|
||||||
|
}
|
818
crates/fayalite/src/build/graph.rs
Normal file
818
crates/fayalite/src/build/graph.rs
Normal file
|
@ -0,0 +1,818 @@
|
||||||
|
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||||
|
// See Notices.txt for copyright information
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
build::{DynJob, JobItem, JobItemName, JobParams, program_name_for_internal_jobs},
|
||||||
|
intern::Interned,
|
||||||
|
util::{HashMap, HashSet, job_server::AcquiredJob},
|
||||||
|
};
|
||||||
|
use eyre::{ContextCompat, eyre};
|
||||||
|
use petgraph::{
|
||||||
|
algo::{DfsSpace, kosaraju_scc, toposort},
|
||||||
|
graph::DiGraph,
|
||||||
|
visit::{GraphBase, Visitable},
|
||||||
|
};
|
||||||
|
use serde::{Deserialize, Deserializer, Serialize, Serializer, de::Error, ser::SerializeSeq};
|
||||||
|
use std::{
|
||||||
|
cell::OnceCell,
|
||||||
|
collections::{BTreeMap, BTreeSet, VecDeque},
|
||||||
|
convert::Infallible,
|
||||||
|
ffi::OsStr,
|
||||||
|
fmt::{self, Write},
|
||||||
|
panic,
|
||||||
|
rc::Rc,
|
||||||
|
str::Utf8Error,
|
||||||
|
sync::mpsc,
|
||||||
|
thread::{self, ScopedJoinHandle},
|
||||||
|
};
|
||||||
|
|
||||||
|
macro_rules! write_str {
|
||||||
|
($s:expr, $($rest:tt)*) => {
|
||||||
|
write!($s, $($rest)*).expect("String::write_fmt can't fail")
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
enum JobGraphNode {
|
||||||
|
Job(DynJob),
|
||||||
|
Item {
|
||||||
|
#[allow(dead_code, reason = "name used for debugging")]
|
||||||
|
name: JobItemName,
|
||||||
|
source_job: Option<DynJob>,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
type JobGraphInner = DiGraph<JobGraphNode, ()>;
|
||||||
|
|
||||||
|
#[derive(Clone, Default)]
|
||||||
|
pub struct JobGraph {
|
||||||
|
jobs: HashMap<DynJob, <JobGraphInner as GraphBase>::NodeId>,
|
||||||
|
items: HashMap<JobItemName, <JobGraphInner as GraphBase>::NodeId>,
|
||||||
|
graph: JobGraphInner,
|
||||||
|
topological_order: Vec<<JobGraphInner as GraphBase>::NodeId>,
|
||||||
|
space: DfsSpace<<JobGraphInner as GraphBase>::NodeId, <JobGraphInner as Visitable>::Map>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Debug for JobGraph {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
let Self {
|
||||||
|
jobs: _,
|
||||||
|
items: _,
|
||||||
|
graph,
|
||||||
|
topological_order,
|
||||||
|
space: _,
|
||||||
|
} = self;
|
||||||
|
f.debug_struct("JobGraph")
|
||||||
|
.field("graph", graph)
|
||||||
|
.field("topological_order", topological_order)
|
||||||
|
.finish_non_exhaustive()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub enum JobGraphError {
|
||||||
|
CycleError {
|
||||||
|
job: DynJob,
|
||||||
|
output: JobItemName,
|
||||||
|
},
|
||||||
|
MultipleJobsCreateSameOutput {
|
||||||
|
output_item: JobItemName,
|
||||||
|
existing_job: DynJob,
|
||||||
|
new_job: DynJob,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::error::Error for JobGraphError {}
|
||||||
|
|
||||||
|
impl fmt::Display for JobGraphError {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
Self::CycleError { job, output } => write!(
|
||||||
|
f,
|
||||||
|
"job can't be added to job graph because it would introduce a cyclic dependency through this job output:\n\
|
||||||
|
{output:?}\n\
|
||||||
|
job:\n{job:?}",
|
||||||
|
),
|
||||||
|
JobGraphError::MultipleJobsCreateSameOutput {
|
||||||
|
output_item,
|
||||||
|
existing_job,
|
||||||
|
new_job,
|
||||||
|
} => write!(
|
||||||
|
f,
|
||||||
|
"job can't be added to job graph because the new job has an output that is also produced by an existing job.\n\
|
||||||
|
conflicting output:\n\
|
||||||
|
{output_item:?}\n\
|
||||||
|
existing job:\n\
|
||||||
|
{existing_job:?}\n\
|
||||||
|
new job:\n\
|
||||||
|
{new_job:?}",
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, Debug)]
|
||||||
|
enum EscapeForUnixShellState {
|
||||||
|
DollarSingleQuote,
|
||||||
|
SingleQuote,
|
||||||
|
Unquoted,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct EscapeForUnixShell<'a> {
|
||||||
|
state: EscapeForUnixShellState,
|
||||||
|
prefix: [u8; 3],
|
||||||
|
bytes: &'a [u8],
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> fmt::Debug for EscapeForUnixShell<'a> {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
fmt::Display::fmt(self, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> fmt::Display for EscapeForUnixShell<'a> {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
for c in self.clone() {
|
||||||
|
f.write_char(c)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> EscapeForUnixShell<'a> {
|
||||||
|
pub fn new(s: &'a (impl ?Sized + AsRef<OsStr>)) -> Self {
|
||||||
|
Self::from_bytes(s.as_ref().as_encoded_bytes())
|
||||||
|
}
|
||||||
|
fn make_prefix(bytes: &[u8]) -> [u8; 3] {
|
||||||
|
let mut prefix = [0; 3];
|
||||||
|
prefix[..bytes.len()].copy_from_slice(bytes);
|
||||||
|
prefix
|
||||||
|
}
|
||||||
|
pub fn from_bytes(bytes: &'a [u8]) -> Self {
|
||||||
|
let mut needs_single_quote = bytes.is_empty();
|
||||||
|
for &b in bytes {
|
||||||
|
match b {
|
||||||
|
b'!' | b'\'' | b'\"' | b' ' => needs_single_quote = true,
|
||||||
|
0..0x20 | 0x7F.. => {
|
||||||
|
return Self {
|
||||||
|
state: EscapeForUnixShellState::DollarSingleQuote,
|
||||||
|
prefix: Self::make_prefix(b"$'"),
|
||||||
|
bytes,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if needs_single_quote {
|
||||||
|
Self {
|
||||||
|
state: EscapeForUnixShellState::SingleQuote,
|
||||||
|
prefix: Self::make_prefix(b"'"),
|
||||||
|
bytes,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Self {
|
||||||
|
state: EscapeForUnixShellState::Unquoted,
|
||||||
|
prefix: Self::make_prefix(b""),
|
||||||
|
bytes,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Iterator for EscapeForUnixShell<'_> {
|
||||||
|
type Item = char;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
match &mut self.prefix {
|
||||||
|
[0, 0, 0] => {}
|
||||||
|
[0, 0, v] | // find first
|
||||||
|
[0, v, _] | // non-zero byte
|
||||||
|
[v, _, _] => {
|
||||||
|
let retval = *v as char;
|
||||||
|
*v = 0;
|
||||||
|
return Some(retval);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let Some(&next_byte) = self.bytes.split_off_first() else {
|
||||||
|
return match self.state {
|
||||||
|
EscapeForUnixShellState::DollarSingleQuote
|
||||||
|
| EscapeForUnixShellState::SingleQuote => {
|
||||||
|
self.state = EscapeForUnixShellState::Unquoted;
|
||||||
|
Some('\'')
|
||||||
|
}
|
||||||
|
EscapeForUnixShellState::Unquoted => None,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
match self.state {
|
||||||
|
EscapeForUnixShellState::DollarSingleQuote => match next_byte {
|
||||||
|
b'\'' | b'\\' => {
|
||||||
|
self.prefix = Self::make_prefix(&[next_byte]);
|
||||||
|
Some('\\')
|
||||||
|
}
|
||||||
|
b'\t' => {
|
||||||
|
self.prefix = Self::make_prefix(b"t");
|
||||||
|
Some('\\')
|
||||||
|
}
|
||||||
|
b'\n' => {
|
||||||
|
self.prefix = Self::make_prefix(b"n");
|
||||||
|
Some('\\')
|
||||||
|
}
|
||||||
|
b'\r' => {
|
||||||
|
self.prefix = Self::make_prefix(b"r");
|
||||||
|
Some('\\')
|
||||||
|
}
|
||||||
|
0x20..=0x7E => Some(next_byte as char),
|
||||||
|
_ => {
|
||||||
|
self.prefix = [
|
||||||
|
b'x',
|
||||||
|
char::from_digit(next_byte as u32 >> 4, 0x10).expect("known to be in range")
|
||||||
|
as u8,
|
||||||
|
char::from_digit(next_byte as u32 & 0xF, 0x10)
|
||||||
|
.expect("known to be in range") as u8,
|
||||||
|
];
|
||||||
|
Some('\\')
|
||||||
|
}
|
||||||
|
},
|
||||||
|
EscapeForUnixShellState::SingleQuote => {
|
||||||
|
if next_byte == b'\'' {
|
||||||
|
self.prefix = Self::make_prefix(b"\\''");
|
||||||
|
Some('\'')
|
||||||
|
} else {
|
||||||
|
Some(next_byte as char)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EscapeForUnixShellState::Unquoted => match next_byte {
|
||||||
|
b' ' | b'!' | b'"' | b'#' | b'$' | b'&' | b'\'' | b'(' | b')' | b'*' | b','
|
||||||
|
| b';' | b'<' | b'>' | b'?' | b'[' | b'\\' | b']' | b'^' | b'`' | b'{' | b'|'
|
||||||
|
| b'}' | b'~' => {
|
||||||
|
self.prefix = Self::make_prefix(&[next_byte]);
|
||||||
|
Some('\\')
|
||||||
|
}
|
||||||
|
_ => Some(next_byte as char),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub enum UnixMakefileEscapeKind {
|
||||||
|
NonRecipe,
|
||||||
|
RecipeWithoutShellEscaping,
|
||||||
|
RecipeWithShellEscaping,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone)]
|
||||||
|
pub struct EscapeForUnixMakefile<'a> {
|
||||||
|
s: &'a OsStr,
|
||||||
|
kind: UnixMakefileEscapeKind,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> fmt::Debug for EscapeForUnixMakefile<'a> {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
fmt::Display::fmt(self, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> fmt::Display for EscapeForUnixMakefile<'a> {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
self.do_write(
|
||||||
|
f,
|
||||||
|
fmt::Write::write_str,
|
||||||
|
fmt::Write::write_char,
|
||||||
|
|_, _| Ok(()),
|
||||||
|
|_| unreachable!("already checked that the input causes no UTF-8 errors"),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> EscapeForUnixMakefile<'a> {
|
||||||
|
fn do_write<S: ?Sized, E>(
|
||||||
|
&self,
|
||||||
|
state: &mut S,
|
||||||
|
write_str: impl Fn(&mut S, &str) -> Result<(), E>,
|
||||||
|
write_char: impl Fn(&mut S, char) -> Result<(), E>,
|
||||||
|
add_variable: impl Fn(&mut S, &'static str) -> Result<(), E>,
|
||||||
|
utf8_error: impl Fn(Utf8Error) -> E,
|
||||||
|
) -> Result<(), E> {
|
||||||
|
let escape_recipe_char = |c| match c {
|
||||||
|
'$' => write_str(state, "$$"),
|
||||||
|
'\0'..='\x1F' | '\x7F' => {
|
||||||
|
panic!("can't escape a control character for Unix Makefile: {c:?}");
|
||||||
|
}
|
||||||
|
_ => write_char(state, c),
|
||||||
|
};
|
||||||
|
match self.kind {
|
||||||
|
UnixMakefileEscapeKind::NonRecipe => str::from_utf8(self.s.as_encoded_bytes())
|
||||||
|
.map_err(&utf8_error)?
|
||||||
|
.chars()
|
||||||
|
.try_for_each(|c| match c {
|
||||||
|
'=' => {
|
||||||
|
add_variable(state, "EQUALS = =")?;
|
||||||
|
write_str(state, "$(EQUALS)")
|
||||||
|
}
|
||||||
|
';' => panic!("can't escape a semicolon (;) for Unix Makefile"),
|
||||||
|
'$' => write_str(state, "$$"),
|
||||||
|
'\\' | ' ' | '#' | ':' | '%' | '*' | '?' | '[' | ']' | '~' => {
|
||||||
|
write_char(state, '\\')?;
|
||||||
|
write_char(state, c)
|
||||||
|
}
|
||||||
|
'\0'..='\x1F' | '\x7F' => {
|
||||||
|
panic!("can't escape a control character for Unix Makefile: {c:?}");
|
||||||
|
}
|
||||||
|
_ => write_char(state, c),
|
||||||
|
}),
|
||||||
|
UnixMakefileEscapeKind::RecipeWithoutShellEscaping => {
|
||||||
|
str::from_utf8(self.s.as_encoded_bytes())
|
||||||
|
.map_err(&utf8_error)?
|
||||||
|
.chars()
|
||||||
|
.try_for_each(escape_recipe_char)
|
||||||
|
}
|
||||||
|
UnixMakefileEscapeKind::RecipeWithShellEscaping => {
|
||||||
|
EscapeForUnixShell::new(self.s).try_for_each(escape_recipe_char)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn new(
|
||||||
|
s: &'a (impl ?Sized + AsRef<OsStr>),
|
||||||
|
kind: UnixMakefileEscapeKind,
|
||||||
|
needed_variables: &mut BTreeSet<&'static str>,
|
||||||
|
) -> Result<Self, Utf8Error> {
|
||||||
|
let s = s.as_ref();
|
||||||
|
let retval = Self { s, kind };
|
||||||
|
retval.do_write(
|
||||||
|
needed_variables,
|
||||||
|
|_, _| Ok(()),
|
||||||
|
|_, _| Ok(()),
|
||||||
|
|needed_variables, variable| {
|
||||||
|
needed_variables.insert(variable);
|
||||||
|
Ok(())
|
||||||
|
},
|
||||||
|
|e| e,
|
||||||
|
)?;
|
||||||
|
Ok(retval)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl JobGraph {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self::default()
|
||||||
|
}
|
||||||
|
fn try_add_item_node(
|
||||||
|
&mut self,
|
||||||
|
name: JobItemName,
|
||||||
|
new_source_job: Option<DynJob>,
|
||||||
|
new_nodes: &mut HashSet<<JobGraphInner as GraphBase>::NodeId>,
|
||||||
|
) -> Result<<JobGraphInner as GraphBase>::NodeId, JobGraphError> {
|
||||||
|
use hashbrown::hash_map::Entry;
|
||||||
|
match self.items.entry(name) {
|
||||||
|
Entry::Occupied(item_entry) => {
|
||||||
|
let node_id = *item_entry.get();
|
||||||
|
let JobGraphNode::Item {
|
||||||
|
name: _,
|
||||||
|
source_job,
|
||||||
|
} = &mut self.graph[node_id]
|
||||||
|
else {
|
||||||
|
unreachable!("known to be an item");
|
||||||
|
};
|
||||||
|
if let Some(new_source_job) = new_source_job {
|
||||||
|
if let Some(source_job) = source_job {
|
||||||
|
return Err(JobGraphError::MultipleJobsCreateSameOutput {
|
||||||
|
output_item: item_entry.key().clone(),
|
||||||
|
existing_job: source_job.clone(),
|
||||||
|
new_job: new_source_job,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
*source_job = Some(new_source_job);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(node_id)
|
||||||
|
}
|
||||||
|
Entry::Vacant(item_entry) => {
|
||||||
|
let node_id = self.graph.add_node(JobGraphNode::Item {
|
||||||
|
name,
|
||||||
|
source_job: new_source_job,
|
||||||
|
});
|
||||||
|
new_nodes.insert(node_id);
|
||||||
|
item_entry.insert(node_id);
|
||||||
|
Ok(node_id)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn try_add_jobs<I: IntoIterator<Item = DynJob>>(
|
||||||
|
&mut self,
|
||||||
|
jobs: I,
|
||||||
|
) -> Result<(), JobGraphError> {
|
||||||
|
use hashbrown::hash_map::Entry;
|
||||||
|
let jobs = jobs.into_iter();
|
||||||
|
struct RemoveNewNodesOnError<'a> {
|
||||||
|
this: &'a mut JobGraph,
|
||||||
|
new_nodes: HashSet<<JobGraphInner as GraphBase>::NodeId>,
|
||||||
|
}
|
||||||
|
impl Drop for RemoveNewNodesOnError<'_> {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
for node in self.new_nodes.drain() {
|
||||||
|
self.this.graph.remove_node(node);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let mut remove_new_nodes_on_error = RemoveNewNodesOnError {
|
||||||
|
this: self,
|
||||||
|
new_nodes: HashSet::with_capacity_and_hasher(jobs.size_hint().0, Default::default()),
|
||||||
|
};
|
||||||
|
let new_nodes = &mut remove_new_nodes_on_error.new_nodes;
|
||||||
|
let this = &mut *remove_new_nodes_on_error.this;
|
||||||
|
for job in jobs {
|
||||||
|
let Entry::Vacant(job_entry) = this.jobs.entry(job.clone()) else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
let job_node_id = this
|
||||||
|
.graph
|
||||||
|
.add_node(JobGraphNode::Job(job_entry.key().clone()));
|
||||||
|
new_nodes.insert(job_node_id);
|
||||||
|
job_entry.insert(job_node_id);
|
||||||
|
for name in job.outputs() {
|
||||||
|
let item_node_id = this.try_add_item_node(name, Some(job.clone()), new_nodes)?;
|
||||||
|
this.graph.add_edge(job_node_id, item_node_id, ());
|
||||||
|
}
|
||||||
|
for name in job.inputs() {
|
||||||
|
let item_node_id = this.try_add_item_node(name, None, new_nodes)?;
|
||||||
|
this.graph.add_edge(item_node_id, job_node_id, ());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
match toposort(&this.graph, Some(&mut this.space)) {
|
||||||
|
Ok(v) => {
|
||||||
|
this.topological_order = v;
|
||||||
|
// no need to remove any of the new nodes on drop since we didn't encounter any errors
|
||||||
|
remove_new_nodes_on_error.new_nodes.clear();
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
Err(_) => {
|
||||||
|
// there's at least one cycle, find one!
|
||||||
|
let cycle = kosaraju_scc(&this.graph)
|
||||||
|
.into_iter()
|
||||||
|
.find_map(|scc| {
|
||||||
|
if scc.len() <= 1 {
|
||||||
|
// can't be a cycle since our graph is bipartite --
|
||||||
|
// jobs only connect to items, never jobs to jobs or items to items
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(scc)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.expect("we know there's a cycle");
|
||||||
|
let cycle_set = HashSet::from_iter(cycle.iter().copied());
|
||||||
|
let job = cycle
|
||||||
|
.into_iter()
|
||||||
|
.find_map(|node_id| {
|
||||||
|
if let JobGraphNode::Job(job) = &this.graph[node_id] {
|
||||||
|
Some(job.clone())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.expect("a job must be part of the cycle");
|
||||||
|
let output = job
|
||||||
|
.outputs()
|
||||||
|
.into_iter()
|
||||||
|
.find(|output| cycle_set.contains(&this.items[output]))
|
||||||
|
.expect("an output must be part of the cycle");
|
||||||
|
Err(JobGraphError::CycleError { job, output })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#[track_caller]
|
||||||
|
pub fn add_jobs<I: IntoIterator<Item = DynJob>>(&mut self, jobs: I) {
|
||||||
|
match self.try_add_jobs(jobs) {
|
||||||
|
Ok(()) => {}
|
||||||
|
Err(e) => panic!("error: {e}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn to_unix_makefile(&self, extra_args: &[Interned<OsStr>]) -> Result<String, Utf8Error> {
|
||||||
|
self.to_unix_makefile_with_internal_program_prefix(
|
||||||
|
&[program_name_for_internal_jobs()],
|
||||||
|
extra_args,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
pub fn to_unix_makefile_with_internal_program_prefix(
|
||||||
|
&self,
|
||||||
|
internal_program_prefix: &[Interned<OsStr>],
|
||||||
|
extra_args: &[Interned<OsStr>],
|
||||||
|
) -> Result<String, Utf8Error> {
|
||||||
|
let mut retval = String::new();
|
||||||
|
let mut needed_variables = BTreeSet::new();
|
||||||
|
let mut phony_targets = BTreeSet::new();
|
||||||
|
for &node_id in &self.topological_order {
|
||||||
|
let JobGraphNode::Job(job) = &self.graph[node_id] else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
let outputs = job.outputs();
|
||||||
|
if outputs.is_empty() {
|
||||||
|
retval.push_str(":");
|
||||||
|
} else {
|
||||||
|
for output in job.outputs() {
|
||||||
|
match output {
|
||||||
|
JobItemName::Path { path } => {
|
||||||
|
write_str!(
|
||||||
|
retval,
|
||||||
|
"{} ",
|
||||||
|
EscapeForUnixMakefile::new(
|
||||||
|
&str::from_utf8(path.as_os_str().as_encoded_bytes())?,
|
||||||
|
UnixMakefileEscapeKind::NonRecipe,
|
||||||
|
&mut needed_variables
|
||||||
|
)?
|
||||||
|
);
|
||||||
|
}
|
||||||
|
JobItemName::DynamicPaths { source_job_name } => {
|
||||||
|
write_str!(
|
||||||
|
retval,
|
||||||
|
"{} ",
|
||||||
|
EscapeForUnixMakefile::new(
|
||||||
|
&source_job_name,
|
||||||
|
UnixMakefileEscapeKind::NonRecipe,
|
||||||
|
&mut needed_variables
|
||||||
|
)?
|
||||||
|
);
|
||||||
|
phony_targets.insert(Interned::into_inner(source_job_name));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if outputs.len() == 1 {
|
||||||
|
retval.push_str(":");
|
||||||
|
} else {
|
||||||
|
retval.push_str("&:");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for input in job.inputs() {
|
||||||
|
match input {
|
||||||
|
JobItemName::Path { path } => {
|
||||||
|
write_str!(
|
||||||
|
retval,
|
||||||
|
" {}",
|
||||||
|
EscapeForUnixMakefile::new(
|
||||||
|
&str::from_utf8(path.as_os_str().as_encoded_bytes())?,
|
||||||
|
UnixMakefileEscapeKind::NonRecipe,
|
||||||
|
&mut needed_variables
|
||||||
|
)?
|
||||||
|
);
|
||||||
|
}
|
||||||
|
JobItemName::DynamicPaths { source_job_name } => {
|
||||||
|
write_str!(
|
||||||
|
retval,
|
||||||
|
" {}",
|
||||||
|
EscapeForUnixMakefile::new(
|
||||||
|
&source_job_name,
|
||||||
|
UnixMakefileEscapeKind::NonRecipe,
|
||||||
|
&mut needed_variables
|
||||||
|
)?
|
||||||
|
);
|
||||||
|
phony_targets.insert(Interned::into_inner(source_job_name));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
retval.push_str("\n\t");
|
||||||
|
job.command_params_with_internal_program_prefix(internal_program_prefix, extra_args)
|
||||||
|
.to_unix_shell_line(&mut retval, |arg, output| {
|
||||||
|
write_str!(
|
||||||
|
output,
|
||||||
|
"{}",
|
||||||
|
EscapeForUnixMakefile::new(
|
||||||
|
arg,
|
||||||
|
UnixMakefileEscapeKind::RecipeWithShellEscaping,
|
||||||
|
&mut needed_variables
|
||||||
|
)?
|
||||||
|
);
|
||||||
|
Ok(())
|
||||||
|
})?;
|
||||||
|
retval.push_str("\n\n");
|
||||||
|
}
|
||||||
|
if !phony_targets.is_empty() {
|
||||||
|
retval.push_str("\n.PHONY:");
|
||||||
|
for phony_target in phony_targets {
|
||||||
|
write_str!(
|
||||||
|
retval,
|
||||||
|
" {}",
|
||||||
|
EscapeForUnixMakefile::new(
|
||||||
|
phony_target,
|
||||||
|
UnixMakefileEscapeKind::NonRecipe,
|
||||||
|
&mut needed_variables
|
||||||
|
)?
|
||||||
|
);
|
||||||
|
}
|
||||||
|
retval.push_str("\n");
|
||||||
|
}
|
||||||
|
if !needed_variables.is_empty() {
|
||||||
|
retval.insert_str(
|
||||||
|
0,
|
||||||
|
&String::from_iter(needed_variables.into_iter().map(|v| format!("{v}\n"))),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
Ok(retval)
|
||||||
|
}
|
||||||
|
pub fn to_unix_shell_script(&self, extra_args: &[Interned<OsStr>]) -> String {
|
||||||
|
self.to_unix_shell_script_with_internal_program_prefix(
|
||||||
|
&[program_name_for_internal_jobs()],
|
||||||
|
extra_args,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
pub fn to_unix_shell_script_with_internal_program_prefix(
|
||||||
|
&self,
|
||||||
|
internal_program_prefix: &[Interned<OsStr>],
|
||||||
|
extra_args: &[Interned<OsStr>],
|
||||||
|
) -> String {
|
||||||
|
let mut retval = String::from(
|
||||||
|
"#!/bin/sh\n\
|
||||||
|
set -ex\n",
|
||||||
|
);
|
||||||
|
for &node_id in &self.topological_order {
|
||||||
|
let JobGraphNode::Job(job) = &self.graph[node_id] else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
let Ok(()) = job
|
||||||
|
.command_params_with_internal_program_prefix(internal_program_prefix, extra_args)
|
||||||
|
.to_unix_shell_line(&mut retval, |arg, output| -> Result<(), Infallible> {
|
||||||
|
write_str!(output, "{}", EscapeForUnixShell::new(&arg));
|
||||||
|
Ok(())
|
||||||
|
});
|
||||||
|
retval.push_str("\n");
|
||||||
|
}
|
||||||
|
retval
|
||||||
|
}
|
||||||
|
pub fn run(&self, params: &JobParams) -> eyre::Result<()> {
|
||||||
|
// use scope to auto-join threads on errors
|
||||||
|
thread::scope(|scope| {
|
||||||
|
struct WaitingJobState {
|
||||||
|
job_node_id: <JobGraphInner as GraphBase>::NodeId,
|
||||||
|
job: DynJob,
|
||||||
|
inputs: BTreeMap<JobItemName, OnceCell<JobItem>>,
|
||||||
|
}
|
||||||
|
let mut ready_jobs = VecDeque::new();
|
||||||
|
let mut item_name_to_waiting_jobs_map = HashMap::<_, Vec<_>>::default();
|
||||||
|
for &node_id in &self.topological_order {
|
||||||
|
let JobGraphNode::Job(job) = &self.graph[node_id] else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
let waiting_job = WaitingJobState {
|
||||||
|
job_node_id: node_id,
|
||||||
|
job: job.clone(),
|
||||||
|
inputs: job
|
||||||
|
.inputs()
|
||||||
|
.iter()
|
||||||
|
.map(|&name| (name, OnceCell::new()))
|
||||||
|
.collect(),
|
||||||
|
};
|
||||||
|
if waiting_job.inputs.is_empty() {
|
||||||
|
ready_jobs.push_back(waiting_job);
|
||||||
|
} else {
|
||||||
|
let waiting_job = Rc::new(waiting_job);
|
||||||
|
for &input_item in waiting_job.inputs.keys() {
|
||||||
|
item_name_to_waiting_jobs_map
|
||||||
|
.entry(input_item)
|
||||||
|
.or_default()
|
||||||
|
.push(waiting_job.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
struct RunningJob<'scope> {
|
||||||
|
job: DynJob,
|
||||||
|
thread: ScopedJoinHandle<'scope, eyre::Result<Vec<JobItem>>>,
|
||||||
|
}
|
||||||
|
let mut running_jobs = HashMap::default();
|
||||||
|
let (finished_jobs_sender, finished_jobs_receiver) = mpsc::channel();
|
||||||
|
loop {
|
||||||
|
while let Some(finished_job) = finished_jobs_receiver.try_recv().ok() {
|
||||||
|
let Some(RunningJob { job, thread }) = running_jobs.remove(&finished_job)
|
||||||
|
else {
|
||||||
|
unreachable!();
|
||||||
|
};
|
||||||
|
let output_items = thread.join().map_err(panic::resume_unwind)??;
|
||||||
|
assert!(
|
||||||
|
output_items.iter().map(JobItem::name).eq(job.outputs()),
|
||||||
|
"job's run() method returned the wrong output items:\n\
|
||||||
|
output items:\n\
|
||||||
|
{output_items:?}\n\
|
||||||
|
expected outputs:\n\
|
||||||
|
{:?}\n\
|
||||||
|
job:\n\
|
||||||
|
{job:?}",
|
||||||
|
job.outputs(),
|
||||||
|
);
|
||||||
|
for output_item in output_items {
|
||||||
|
for waiting_job in item_name_to_waiting_jobs_map
|
||||||
|
.remove(&output_item.name())
|
||||||
|
.unwrap_or_default()
|
||||||
|
{
|
||||||
|
let Ok(()) =
|
||||||
|
waiting_job.inputs[&output_item.name()].set(output_item.clone())
|
||||||
|
else {
|
||||||
|
unreachable!();
|
||||||
|
};
|
||||||
|
if let Some(waiting_job) = Rc::into_inner(waiting_job) {
|
||||||
|
ready_jobs.push_back(waiting_job);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if let Some(WaitingJobState {
|
||||||
|
job_node_id,
|
||||||
|
job,
|
||||||
|
inputs,
|
||||||
|
}) = ready_jobs.pop_front()
|
||||||
|
{
|
||||||
|
struct RunningJobInThread<'a> {
|
||||||
|
job_node_id: <JobGraphInner as GraphBase>::NodeId,
|
||||||
|
job: DynJob,
|
||||||
|
inputs: Vec<JobItem>,
|
||||||
|
params: &'a JobParams,
|
||||||
|
acquired_job: AcquiredJob,
|
||||||
|
finished_jobs_sender: mpsc::Sender<<JobGraphInner as GraphBase>::NodeId>,
|
||||||
|
}
|
||||||
|
impl RunningJobInThread<'_> {
|
||||||
|
fn run(mut self) -> eyre::Result<Vec<JobItem>> {
|
||||||
|
self.job
|
||||||
|
.run(&self.inputs, self.params, &mut self.acquired_job)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl Drop for RunningJobInThread<'_> {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
let _ = self.finished_jobs_sender.send(self.job_node_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let name = job.kind().name();
|
||||||
|
let running_job_in_thread = RunningJobInThread {
|
||||||
|
job_node_id,
|
||||||
|
job: job.clone(),
|
||||||
|
inputs: Result::from_iter(job.inputs().iter().map(|input_name| {
|
||||||
|
inputs.get(input_name).and_then(|v| v.get().cloned()).wrap_err_with(|| {
|
||||||
|
eyre!("failed when trying to run job {name}: nothing provided the input item: {input_name:?}")
|
||||||
|
})
|
||||||
|
}))?,
|
||||||
|
params,
|
||||||
|
acquired_job: AcquiredJob::acquire()?,
|
||||||
|
finished_jobs_sender: finished_jobs_sender.clone(),
|
||||||
|
};
|
||||||
|
running_jobs.insert(
|
||||||
|
job_node_id,
|
||||||
|
RunningJob {
|
||||||
|
job,
|
||||||
|
thread: thread::Builder::new()
|
||||||
|
.name(format!("job:{name}"))
|
||||||
|
.spawn_scoped(scope, move || running_job_in_thread.run())
|
||||||
|
.expect("failed to spawn thread for job"),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if running_jobs.is_empty() {
|
||||||
|
assert!(item_name_to_waiting_jobs_map.is_empty());
|
||||||
|
assert!(ready_jobs.is_empty());
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Extend<DynJob> for JobGraph {
|
||||||
|
#[track_caller]
|
||||||
|
fn extend<T: IntoIterator<Item = DynJob>>(&mut self, iter: T) {
|
||||||
|
self.add_jobs(iter);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromIterator<DynJob> for JobGraph {
|
||||||
|
#[track_caller]
|
||||||
|
fn from_iter<T: IntoIterator<Item = DynJob>>(iter: T) -> Self {
|
||||||
|
let mut retval = Self::new();
|
||||||
|
retval.add_jobs(iter);
|
||||||
|
retval
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Serialize for JobGraph {
|
||||||
|
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: Serializer,
|
||||||
|
{
|
||||||
|
let mut serializer = serializer.serialize_seq(Some(self.jobs.len()))?;
|
||||||
|
for &node_id in &self.topological_order {
|
||||||
|
let JobGraphNode::Job(job) = &self.graph[node_id] else {
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
serializer.serialize_element(job)?;
|
||||||
|
}
|
||||||
|
serializer.end()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'de> Deserialize<'de> for JobGraph {
|
||||||
|
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||||
|
where
|
||||||
|
D: Deserializer<'de>,
|
||||||
|
{
|
||||||
|
let jobs = Vec::<DynJob>::deserialize(deserializer)?;
|
||||||
|
let mut retval = JobGraph::new();
|
||||||
|
retval.try_add_jobs(jobs).map_err(D::Error::custom)?;
|
||||||
|
Ok(retval)
|
||||||
|
}
|
||||||
|
}
|
341
crates/fayalite/src/build/registry.rs
Normal file
341
crates/fayalite/src/build/registry.rs
Normal file
|
@ -0,0 +1,341 @@
|
||||||
|
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||||
|
// See Notices.txt for copyright information
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
build::{DynJobKind, JobKind, built_in_job_kinds},
|
||||||
|
intern::Interned,
|
||||||
|
};
|
||||||
|
use std::{
|
||||||
|
borrow::Borrow,
|
||||||
|
cmp::Ordering,
|
||||||
|
collections::BTreeMap,
|
||||||
|
fmt,
|
||||||
|
sync::{Arc, OnceLock, RwLock, RwLockWriteGuard},
|
||||||
|
};
|
||||||
|
|
||||||
|
impl DynJobKind {
|
||||||
|
pub fn registry() -> JobKindRegistrySnapshot {
|
||||||
|
JobKindRegistrySnapshot(JobKindRegistry::get())
|
||||||
|
}
|
||||||
|
#[track_caller]
|
||||||
|
pub fn register(self) {
|
||||||
|
JobKindRegistry::register(JobKindRegistry::lock(), self);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, PartialEq, Eq)]
|
||||||
|
struct InternedStrCompareAsStr(Interned<str>);
|
||||||
|
|
||||||
|
impl fmt::Debug for InternedStrCompareAsStr {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
self.0.fmt(f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ord for InternedStrCompareAsStr {
|
||||||
|
fn cmp(&self, other: &Self) -> Ordering {
|
||||||
|
str::cmp(&self.0, &other.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialOrd for InternedStrCompareAsStr {
|
||||||
|
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||||
|
Some(self.cmp(other))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Borrow<str> for InternedStrCompareAsStr {
|
||||||
|
fn borrow(&self) -> &str {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
struct JobKindRegistry {
|
||||||
|
job_kinds: BTreeMap<InternedStrCompareAsStr, DynJobKind>,
|
||||||
|
}
|
||||||
|
|
||||||
|
enum JobKindRegisterError {
|
||||||
|
SameName {
|
||||||
|
name: InternedStrCompareAsStr,
|
||||||
|
old_job_kind: DynJobKind,
|
||||||
|
new_job_kind: DynJobKind,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for JobKindRegisterError {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
Self::SameName {
|
||||||
|
name,
|
||||||
|
old_job_kind,
|
||||||
|
new_job_kind,
|
||||||
|
} => write!(
|
||||||
|
f,
|
||||||
|
"two different `JobKind` can't share the same name:\n\
|
||||||
|
{name:?}\n\
|
||||||
|
old job kind:\n\
|
||||||
|
{old_job_kind:?}\n\
|
||||||
|
new job kind:\n\
|
||||||
|
{new_job_kind:?}",
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
trait JobKindRegistryRegisterLock {
|
||||||
|
type Locked;
|
||||||
|
fn lock(self) -> Self::Locked;
|
||||||
|
fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl JobKindRegistryRegisterLock for &'static RwLock<Arc<JobKindRegistry>> {
|
||||||
|
type Locked = RwLockWriteGuard<'static, Arc<JobKindRegistry>>;
|
||||||
|
fn lock(self) -> Self::Locked {
|
||||||
|
self.write().expect("shouldn't be poisoned")
|
||||||
|
}
|
||||||
|
fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry {
|
||||||
|
Arc::make_mut(locked)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl JobKindRegistryRegisterLock for &'_ mut JobKindRegistry {
|
||||||
|
type Locked = Self;
|
||||||
|
|
||||||
|
fn lock(self) -> Self::Locked {
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry {
|
||||||
|
locked
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl JobKindRegistry {
|
||||||
|
fn lock() -> &'static RwLock<Arc<Self>> {
|
||||||
|
static REGISTRY: OnceLock<RwLock<Arc<JobKindRegistry>>> = OnceLock::new();
|
||||||
|
REGISTRY.get_or_init(Default::default)
|
||||||
|
}
|
||||||
|
fn try_register<L: JobKindRegistryRegisterLock>(
|
||||||
|
lock: L,
|
||||||
|
job_kind: DynJobKind,
|
||||||
|
) -> Result<(), JobKindRegisterError> {
|
||||||
|
use std::collections::btree_map::Entry;
|
||||||
|
let name = InternedStrCompareAsStr(job_kind.name());
|
||||||
|
// run user code only outside of lock
|
||||||
|
let mut locked = lock.lock();
|
||||||
|
let this = L::make_mut(&mut locked);
|
||||||
|
let result = match this.job_kinds.entry(name) {
|
||||||
|
Entry::Occupied(entry) => Err(JobKindRegisterError::SameName {
|
||||||
|
name,
|
||||||
|
old_job_kind: entry.get().clone(),
|
||||||
|
new_job_kind: job_kind,
|
||||||
|
}),
|
||||||
|
Entry::Vacant(entry) => {
|
||||||
|
entry.insert(job_kind);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
};
|
||||||
|
drop(locked);
|
||||||
|
// outside of lock now, so we can test if it's the same DynJobKind
|
||||||
|
match result {
|
||||||
|
Err(JobKindRegisterError::SameName {
|
||||||
|
name: _,
|
||||||
|
old_job_kind,
|
||||||
|
new_job_kind,
|
||||||
|
}) if old_job_kind == new_job_kind => Ok(()),
|
||||||
|
result => result,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#[track_caller]
|
||||||
|
fn register<L: JobKindRegistryRegisterLock>(lock: L, job_kind: DynJobKind) {
|
||||||
|
match Self::try_register(lock, job_kind) {
|
||||||
|
Err(e) => panic!("{e}"),
|
||||||
|
Ok(()) => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn get() -> Arc<Self> {
|
||||||
|
Self::lock().read().expect("shouldn't be poisoned").clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for JobKindRegistry {
|
||||||
|
fn default() -> Self {
|
||||||
|
let mut retval = Self {
|
||||||
|
job_kinds: BTreeMap::new(),
|
||||||
|
};
|
||||||
|
for job_kind in built_in_job_kinds() {
|
||||||
|
Self::register(&mut retval, job_kind);
|
||||||
|
}
|
||||||
|
retval
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct JobKindRegistrySnapshot(Arc<JobKindRegistry>);
|
||||||
|
|
||||||
|
impl JobKindRegistrySnapshot {
|
||||||
|
pub fn get() -> Self {
|
||||||
|
JobKindRegistrySnapshot(JobKindRegistry::get())
|
||||||
|
}
|
||||||
|
pub fn get_by_name<'a>(&'a self, name: &str) -> Option<&'a DynJobKind> {
|
||||||
|
self.0.job_kinds.get(name)
|
||||||
|
}
|
||||||
|
pub fn iter_with_names(&self) -> JobKindRegistryIterWithNames<'_> {
|
||||||
|
JobKindRegistryIterWithNames(self.0.job_kinds.iter())
|
||||||
|
}
|
||||||
|
pub fn iter(&self) -> JobKindRegistryIter<'_> {
|
||||||
|
JobKindRegistryIter(self.0.job_kinds.values())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> IntoIterator for &'a JobKindRegistrySnapshot {
|
||||||
|
type Item = &'a DynJobKind;
|
||||||
|
type IntoIter = JobKindRegistryIter<'a>;
|
||||||
|
|
||||||
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
|
self.iter()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> IntoIterator for &'a mut JobKindRegistrySnapshot {
|
||||||
|
type Item = &'a DynJobKind;
|
||||||
|
type IntoIter = JobKindRegistryIter<'a>;
|
||||||
|
|
||||||
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
|
self.iter()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct JobKindRegistryIter<'a>(
|
||||||
|
std::collections::btree_map::Values<'a, InternedStrCompareAsStr, DynJobKind>,
|
||||||
|
);
|
||||||
|
|
||||||
|
impl<'a> Iterator for JobKindRegistryIter<'a> {
|
||||||
|
type Item = &'a DynJobKind;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
self.0.next()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||||
|
self.0.size_hint()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn count(self) -> usize
|
||||||
|
where
|
||||||
|
Self: Sized,
|
||||||
|
{
|
||||||
|
self.0.count()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn last(self) -> Option<Self::Item> {
|
||||||
|
self.0.last()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn nth(&mut self, n: usize) -> Option<Self::Item> {
|
||||||
|
self.0.nth(n)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fold<B, F>(self, init: B, f: F) -> B
|
||||||
|
where
|
||||||
|
F: FnMut(B, Self::Item) -> B,
|
||||||
|
{
|
||||||
|
self.0.fold(init, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> std::iter::FusedIterator for JobKindRegistryIter<'a> {}
|
||||||
|
|
||||||
|
impl<'a> ExactSizeIterator for JobKindRegistryIter<'a> {}
|
||||||
|
|
||||||
|
impl<'a> DoubleEndedIterator for JobKindRegistryIter<'a> {
|
||||||
|
fn next_back(&mut self) -> Option<Self::Item> {
|
||||||
|
self.0.next_back()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
|
||||||
|
self.0.nth_back(n)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn rfold<B, F>(self, init: B, f: F) -> B
|
||||||
|
where
|
||||||
|
F: FnMut(B, Self::Item) -> B,
|
||||||
|
{
|
||||||
|
self.0.rfold(init, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct JobKindRegistryIterWithNames<'a>(
|
||||||
|
std::collections::btree_map::Iter<'a, InternedStrCompareAsStr, DynJobKind>,
|
||||||
|
);
|
||||||
|
|
||||||
|
impl<'a> Iterator for JobKindRegistryIterWithNames<'a> {
|
||||||
|
type Item = (Interned<str>, &'a DynJobKind);
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
self.0.next().map(|(name, job_kind)| (name.0, job_kind))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||||
|
self.0.size_hint()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn count(self) -> usize
|
||||||
|
where
|
||||||
|
Self: Sized,
|
||||||
|
{
|
||||||
|
self.0.count()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn last(self) -> Option<Self::Item> {
|
||||||
|
self.0.last().map(|(name, job_kind)| (name.0, job_kind))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn nth(&mut self, n: usize) -> Option<Self::Item> {
|
||||||
|
self.0.nth(n).map(|(name, job_kind)| (name.0, job_kind))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fold<B, F>(self, init: B, f: F) -> B
|
||||||
|
where
|
||||||
|
F: FnMut(B, Self::Item) -> B,
|
||||||
|
{
|
||||||
|
self.0
|
||||||
|
.map(|(name, job_kind)| (name.0, job_kind))
|
||||||
|
.fold(init, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> std::iter::FusedIterator for JobKindRegistryIterWithNames<'a> {}
|
||||||
|
|
||||||
|
impl<'a> ExactSizeIterator for JobKindRegistryIterWithNames<'a> {}
|
||||||
|
|
||||||
|
impl<'a> DoubleEndedIterator for JobKindRegistryIterWithNames<'a> {
|
||||||
|
fn next_back(&mut self) -> Option<Self::Item> {
|
||||||
|
self.0
|
||||||
|
.next_back()
|
||||||
|
.map(|(name, job_kind)| (name.0, job_kind))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
|
||||||
|
self.0
|
||||||
|
.nth_back(n)
|
||||||
|
.map(|(name, job_kind)| (name.0, job_kind))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn rfold<B, F>(self, init: B, f: F) -> B
|
||||||
|
where
|
||||||
|
F: FnMut(B, Self::Item) -> B,
|
||||||
|
{
|
||||||
|
self.0
|
||||||
|
.map(|(name, job_kind)| (name.0, job_kind))
|
||||||
|
.rfold(init, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
|
pub fn register_job_kind<K: JobKind>(kind: K) {
|
||||||
|
DynJobKind::new(kind).register();
|
||||||
|
}
|
8
crates/fayalite/src/build/vendor.rs
Normal file
8
crates/fayalite/src/build/vendor.rs
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||||
|
// See Notices.txt for copyright information
|
||||||
|
|
||||||
|
pub mod xilinx;
|
||||||
|
|
||||||
|
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = crate::build::DynJobKind> {
|
||||||
|
xilinx::built_in_job_kinds()
|
||||||
|
}
|
28
crates/fayalite/src/build/vendor/xilinx.rs
vendored
Normal file
28
crates/fayalite/src/build/vendor/xilinx.rs
vendored
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||||
|
// See Notices.txt for copyright information
|
||||||
|
|
||||||
|
use crate::{annotations::make_annotation_enum, intern::Interned};
|
||||||
|
|
||||||
|
pub mod yosys_nextpnr_prjxray;
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||||
|
pub struct XdcIOStandardAnnotation {
|
||||||
|
pub value: Interned<str>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||||
|
pub struct XdcLocationAnnotation {
|
||||||
|
pub location: Interned<str>,
|
||||||
|
}
|
||||||
|
|
||||||
|
make_annotation_enum! {
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub enum XilinxAnnotation {
|
||||||
|
XdcIOStandard(XdcIOStandardAnnotation),
|
||||||
|
XdcLocation(XdcLocationAnnotation),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = crate::build::DynJobKind> {
|
||||||
|
yosys_nextpnr_prjxray::built_in_job_kinds()
|
||||||
|
}
|
920
crates/fayalite/src/build/vendor/xilinx/yosys_nextpnr_prjxray.rs
vendored
Normal file
920
crates/fayalite/src/build/vendor/xilinx/yosys_nextpnr_prjxray.rs
vendored
Normal file
|
@ -0,0 +1,920 @@
|
||||||
|
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||||
|
// See Notices.txt for copyright information
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
annotations::Annotation,
|
||||||
|
build::{
|
||||||
|
BaseJob, CommandParams, DynJobKind, GetJobPositionDependencies, JobAndDependencies,
|
||||||
|
JobArgsAndDependencies, JobDependencies, JobItem, JobItemName, JobKind,
|
||||||
|
JobKindAndDependencies, ToArgs, WriteArgs,
|
||||||
|
external::{
|
||||||
|
ExternalCommand, ExternalCommandJob, ExternalCommandJobKind, ExternalProgramTrait,
|
||||||
|
},
|
||||||
|
vendor::xilinx::{XdcIOStandardAnnotation, XdcLocationAnnotation, XilinxAnnotation},
|
||||||
|
verilog::{UnadjustedVerilog, VerilogDialect, VerilogJob, VerilogJobKind},
|
||||||
|
},
|
||||||
|
bundle::Bundle,
|
||||||
|
firrtl::{ScalarizedModuleABI, ScalarizedModuleABIAnnotations, ScalarizedModuleABIPort},
|
||||||
|
intern::{Intern, InternSlice, Interned},
|
||||||
|
module::{Module, NameId},
|
||||||
|
prelude::JobParams,
|
||||||
|
util::job_server::AcquiredJob,
|
||||||
|
};
|
||||||
|
use clap::ValueEnum;
|
||||||
|
use eyre::Context;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::{
|
||||||
|
ffi::{OsStr, OsString},
|
||||||
|
fmt::{self, Write},
|
||||||
|
ops::ControlFlow,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash, Default)]
|
||||||
|
pub struct YosysNextpnrXrayWriteYsFileJobKind;
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)]
|
||||||
|
pub struct YosysNextpnrXrayWriteYsFileArgs {}
|
||||||
|
|
||||||
|
impl ToArgs for YosysNextpnrXrayWriteYsFileArgs {
|
||||||
|
fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) {
|
||||||
|
let Self {} = self;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)]
|
||||||
|
pub struct YosysNextpnrXrayWriteYsFile {
|
||||||
|
main_verilog_file: Interned<Path>,
|
||||||
|
ys_file: Interned<Path>,
|
||||||
|
json_file: Interned<Path>,
|
||||||
|
json_file_name: Interned<OsStr>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl YosysNextpnrXrayWriteYsFile {
|
||||||
|
pub fn main_verilog_file(&self) -> Interned<Path> {
|
||||||
|
self.main_verilog_file
|
||||||
|
}
|
||||||
|
pub fn ys_file(&self) -> Interned<Path> {
|
||||||
|
self.ys_file
|
||||||
|
}
|
||||||
|
pub fn json_file(&self) -> Interned<Path> {
|
||||||
|
self.json_file
|
||||||
|
}
|
||||||
|
pub fn json_file_name(&self) -> Interned<OsStr> {
|
||||||
|
self.json_file_name
|
||||||
|
}
|
||||||
|
fn write_ys(
|
||||||
|
&self,
|
||||||
|
output: &mut OsString,
|
||||||
|
additional_files: &[Interned<Path>],
|
||||||
|
main_module_name_id: NameId,
|
||||||
|
) -> eyre::Result<()> {
|
||||||
|
let Self {
|
||||||
|
main_verilog_file,
|
||||||
|
ys_file: _,
|
||||||
|
json_file: _,
|
||||||
|
json_file_name,
|
||||||
|
} = self;
|
||||||
|
for verilog_file in VerilogJob::all_verilog_files(*main_verilog_file, additional_files)? {
|
||||||
|
output.push("read_verilog -sv \"");
|
||||||
|
output.push(verilog_file);
|
||||||
|
output.push("\"\n");
|
||||||
|
}
|
||||||
|
let circuit_name = crate::firrtl::get_circuit_name(main_module_name_id);
|
||||||
|
writeln!(
|
||||||
|
output,
|
||||||
|
"synth_xilinx -flatten -abc9 -nobram -arch xc7 -top {circuit_name}"
|
||||||
|
)
|
||||||
|
.expect("writing to OsString can't fail");
|
||||||
|
output.push("write_json \"");
|
||||||
|
output.push(json_file_name);
|
||||||
|
output.push("\"\n");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl JobKind for YosysNextpnrXrayWriteYsFileJobKind {
|
||||||
|
type Args = YosysNextpnrXrayWriteYsFileArgs;
|
||||||
|
type Job = YosysNextpnrXrayWriteYsFile;
|
||||||
|
type Dependencies = JobKindAndDependencies<VerilogJobKind>;
|
||||||
|
|
||||||
|
fn dependencies(self) -> Self::Dependencies {
|
||||||
|
Default::default()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn args_to_jobs(
|
||||||
|
mut args: JobArgsAndDependencies<Self>,
|
||||||
|
params: &JobParams,
|
||||||
|
) -> eyre::Result<JobAndDependencies<Self>> {
|
||||||
|
args.dependencies
|
||||||
|
.dependencies
|
||||||
|
.args
|
||||||
|
.args
|
||||||
|
.additional_args
|
||||||
|
.verilog_dialect
|
||||||
|
.get_or_insert(VerilogDialect::Yosys);
|
||||||
|
args.args_to_jobs_simple(params, |_kind, args, dependencies| {
|
||||||
|
let YosysNextpnrXrayWriteYsFileArgs {} = args;
|
||||||
|
let base_job = dependencies.get_job::<BaseJob, _>();
|
||||||
|
let verilog_job = dependencies.get_job::<VerilogJob, _>();
|
||||||
|
let json_file = base_job.file_with_ext("json");
|
||||||
|
Ok(YosysNextpnrXrayWriteYsFile {
|
||||||
|
main_verilog_file: verilog_job.main_verilog_file(),
|
||||||
|
ys_file: base_job.file_with_ext("ys"),
|
||||||
|
json_file,
|
||||||
|
json_file_name: json_file
|
||||||
|
.interned_file_name()
|
||||||
|
.expect("known to have file name"),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn inputs(self, _job: &Self::Job) -> Interned<[JobItemName]> {
|
||||||
|
[JobItemName::DynamicPaths {
|
||||||
|
source_job_name: VerilogJobKind.name(),
|
||||||
|
}]
|
||||||
|
.intern_slice()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||||
|
[JobItemName::Path { path: job.ys_file }].intern_slice()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn name(self) -> Interned<str> {
|
||||||
|
"yosys-nextpnr-xray-write-ys-file".intern()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn external_command_params(self, _job: &Self::Job) -> Option<CommandParams> {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
self,
|
||||||
|
job: &Self::Job,
|
||||||
|
inputs: &[JobItem],
|
||||||
|
params: &JobParams,
|
||||||
|
_acquired_job: &mut AcquiredJob,
|
||||||
|
) -> eyre::Result<Vec<JobItem>> {
|
||||||
|
assert!(inputs.iter().map(JobItem::name).eq(self.inputs(job)));
|
||||||
|
let [additional_files] = inputs else {
|
||||||
|
unreachable!();
|
||||||
|
};
|
||||||
|
let additional_files = VerilogJob::unwrap_additional_files(additional_files);
|
||||||
|
let mut contents = OsString::new();
|
||||||
|
job.write_ys(
|
||||||
|
&mut contents,
|
||||||
|
additional_files,
|
||||||
|
params.main_module().name_id(),
|
||||||
|
)?;
|
||||||
|
let path = job.ys_file;
|
||||||
|
std::fs::write(path, contents.as_encoded_bytes())
|
||||||
|
.wrap_err_with(|| format!("writing {path:?} failed"))?;
|
||||||
|
Ok(vec![JobItem::Path { path }])
|
||||||
|
}
|
||||||
|
|
||||||
|
fn subcommand_hidden(self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)]
|
||||||
|
pub struct YosysNextpnrXraySynthArgs {}
|
||||||
|
|
||||||
|
impl ToArgs for YosysNextpnrXraySynthArgs {
|
||||||
|
fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) {
|
||||||
|
let Self {} = self;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Hash, Deserialize, Serialize)]
|
||||||
|
pub struct YosysNextpnrXraySynth {
|
||||||
|
#[serde(flatten)]
|
||||||
|
write_ys_file: YosysNextpnrXrayWriteYsFile,
|
||||||
|
ys_file_name: Interned<OsStr>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Debug for YosysNextpnrXraySynth {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
let Self {
|
||||||
|
write_ys_file:
|
||||||
|
YosysNextpnrXrayWriteYsFile {
|
||||||
|
main_verilog_file,
|
||||||
|
ys_file,
|
||||||
|
json_file,
|
||||||
|
json_file_name,
|
||||||
|
},
|
||||||
|
ys_file_name,
|
||||||
|
} = self;
|
||||||
|
f.debug_struct("YosysNextpnrXraySynth")
|
||||||
|
.field("main_verilog_file", main_verilog_file)
|
||||||
|
.field("ys_file", ys_file)
|
||||||
|
.field("ys_file_name", ys_file_name)
|
||||||
|
.field("json_file", json_file)
|
||||||
|
.field("json_file_name", json_file_name)
|
||||||
|
.finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl YosysNextpnrXraySynth {
|
||||||
|
pub fn main_verilog_file(&self) -> Interned<Path> {
|
||||||
|
self.write_ys_file.main_verilog_file()
|
||||||
|
}
|
||||||
|
pub fn ys_file(&self) -> Interned<Path> {
|
||||||
|
self.write_ys_file.ys_file()
|
||||||
|
}
|
||||||
|
pub fn ys_file_name(&self) -> Interned<OsStr> {
|
||||||
|
self.ys_file_name
|
||||||
|
}
|
||||||
|
pub fn json_file(&self) -> Interned<Path> {
|
||||||
|
self.write_ys_file.json_file()
|
||||||
|
}
|
||||||
|
pub fn json_file_name(&self) -> Interned<OsStr> {
|
||||||
|
self.write_ys_file.json_file_name()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)]
|
||||||
|
pub struct Yosys;
|
||||||
|
|
||||||
|
impl ExternalProgramTrait for Yosys {
|
||||||
|
fn default_program_name() -> Interned<str> {
|
||||||
|
"yosys".intern()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExternalCommand for YosysNextpnrXraySynth {
|
||||||
|
type AdditionalArgs = YosysNextpnrXraySynthArgs;
|
||||||
|
type AdditionalJobData = Self;
|
||||||
|
type BaseJobPosition = GetJobPositionDependencies<
|
||||||
|
GetJobPositionDependencies<
|
||||||
|
GetJobPositionDependencies<<UnadjustedVerilog as ExternalCommand>::BaseJobPosition>,
|
||||||
|
>,
|
||||||
|
>;
|
||||||
|
type Dependencies = JobKindAndDependencies<YosysNextpnrXrayWriteYsFileJobKind>;
|
||||||
|
type ExternalProgram = Yosys;
|
||||||
|
|
||||||
|
fn dependencies() -> Self::Dependencies {
|
||||||
|
Default::default()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn args_to_jobs(
|
||||||
|
args: JobArgsAndDependencies<ExternalCommandJobKind<Self>>,
|
||||||
|
params: &JobParams,
|
||||||
|
) -> eyre::Result<(
|
||||||
|
Self::AdditionalJobData,
|
||||||
|
<Self::Dependencies as JobDependencies>::JobsAndKinds,
|
||||||
|
)> {
|
||||||
|
args.args_to_jobs_external_simple(params, |args, dependencies| {
|
||||||
|
let YosysNextpnrXraySynthArgs {} = args.additional_args;
|
||||||
|
Ok(Self {
|
||||||
|
write_ys_file: dependencies.job.job.clone(),
|
||||||
|
ys_file_name: dependencies
|
||||||
|
.job
|
||||||
|
.job
|
||||||
|
.ys_file()
|
||||||
|
.interned_file_name()
|
||||||
|
.expect("known to have file name"),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn inputs(job: &ExternalCommandJob<Self>) -> Interned<[JobItemName]> {
|
||||||
|
[
|
||||||
|
JobItemName::Path {
|
||||||
|
path: job.additional_job_data().ys_file(),
|
||||||
|
},
|
||||||
|
JobItemName::Path {
|
||||||
|
path: job.additional_job_data().main_verilog_file(),
|
||||||
|
},
|
||||||
|
JobItemName::DynamicPaths {
|
||||||
|
source_job_name: VerilogJobKind.name(),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
.intern_slice()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn output_paths(job: &ExternalCommandJob<Self>) -> Interned<[Interned<Path>]> {
|
||||||
|
[job.additional_job_data().json_file()].intern_slice()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn command_line_args<W: ?Sized + WriteArgs>(job: &ExternalCommandJob<Self>, args: &mut W) {
|
||||||
|
args.write_arg("-s");
|
||||||
|
args.write_interned_arg(job.additional_job_data().ys_file_name());
|
||||||
|
}
|
||||||
|
|
||||||
|
fn current_dir(job: &ExternalCommandJob<Self>) -> Option<Interned<Path>> {
|
||||||
|
Some(job.output_dir())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn job_kind_name() -> Interned<str> {
|
||||||
|
"yosys-nextpnr-xray-synth".intern()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn subcommand_hidden() -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash, Default)]
|
||||||
|
pub struct YosysNextpnrXrayWriteXdcFileJobKind;
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)]
|
||||||
|
pub struct YosysNextpnrXrayWriteXdcFileArgs {}
|
||||||
|
|
||||||
|
impl ToArgs for YosysNextpnrXrayWriteXdcFileArgs {
|
||||||
|
fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) {
|
||||||
|
let Self {} = self;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)]
|
||||||
|
pub struct YosysNextpnrXrayWriteXdcFile {
|
||||||
|
firrtl_export_options: crate::firrtl::ExportOptions,
|
||||||
|
output_dir: Interned<Path>,
|
||||||
|
xdc_file: Interned<Path>,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct WriteXdcContentsError(eyre::Report);
|
||||||
|
|
||||||
|
impl From<eyre::Report> for WriteXdcContentsError {
|
||||||
|
fn from(v: eyre::Report) -> Self {
|
||||||
|
Self(v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<fmt::Error> for WriteXdcContentsError {
|
||||||
|
fn from(_v: fmt::Error) -> Self {
|
||||||
|
unreachable!("String write can't fail")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn tcl_escape(s: impl AsRef<str>) -> String {
|
||||||
|
let s = s.as_ref();
|
||||||
|
let mut retval = String::with_capacity(s.len().saturating_add(2));
|
||||||
|
retval.push('"');
|
||||||
|
for ch in s.chars() {
|
||||||
|
if let '$' | '\\' | '[' = ch {
|
||||||
|
retval.push('\\');
|
||||||
|
}
|
||||||
|
retval.push(ch);
|
||||||
|
}
|
||||||
|
retval.push('"');
|
||||||
|
retval
|
||||||
|
}
|
||||||
|
|
||||||
|
impl YosysNextpnrXrayWriteXdcFile {
|
||||||
|
fn write_xdc_contents_for_port_and_annotations(
|
||||||
|
&self,
|
||||||
|
output: &mut impl fmt::Write,
|
||||||
|
port: &ScalarizedModuleABIPort,
|
||||||
|
annotations: ScalarizedModuleABIAnnotations<'_>,
|
||||||
|
) -> Result<(), WriteXdcContentsError> {
|
||||||
|
for annotation in annotations {
|
||||||
|
match annotation.annotation() {
|
||||||
|
Annotation::DontTouch(_)
|
||||||
|
| Annotation::SVAttribute(_)
|
||||||
|
| Annotation::BlackBoxInline(_)
|
||||||
|
| Annotation::BlackBoxPath(_)
|
||||||
|
| Annotation::DocString(_)
|
||||||
|
| Annotation::CustomFirrtl(_) => {}
|
||||||
|
Annotation::Xilinx(XilinxAnnotation::XdcLocation(XdcLocationAnnotation {
|
||||||
|
location,
|
||||||
|
})) => writeln!(
|
||||||
|
output,
|
||||||
|
"set_property LOC {} [get_ports {}]",
|
||||||
|
tcl_escape(location),
|
||||||
|
tcl_escape(port.scalarized_name())
|
||||||
|
)?,
|
||||||
|
Annotation::Xilinx(XilinxAnnotation::XdcIOStandard(XdcIOStandardAnnotation {
|
||||||
|
value,
|
||||||
|
})) => writeln!(
|
||||||
|
output,
|
||||||
|
"set_property IOSTANDARD {} [get_ports {}]",
|
||||||
|
tcl_escape(value),
|
||||||
|
tcl_escape(port.scalarized_name())
|
||||||
|
)?,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
fn write_xdc_contents(
|
||||||
|
&self,
|
||||||
|
output: &mut String,
|
||||||
|
top_module: &Module<Bundle>,
|
||||||
|
) -> eyre::Result<()> {
|
||||||
|
let scalarized_module_abi =
|
||||||
|
ScalarizedModuleABI::new(top_module, self.firrtl_export_options)
|
||||||
|
.map_err(eyre::Report::from)?;
|
||||||
|
match scalarized_module_abi.for_each_port_and_annotations(|port, annotations| {
|
||||||
|
match self.write_xdc_contents_for_port_and_annotations(output, port, annotations) {
|
||||||
|
Ok(()) => ControlFlow::Continue(()),
|
||||||
|
Err(e) => ControlFlow::Break(e),
|
||||||
|
}
|
||||||
|
}) {
|
||||||
|
ControlFlow::Continue(()) => Ok(()),
|
||||||
|
ControlFlow::Break(e) => Err(e.0),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl JobKind for YosysNextpnrXrayWriteXdcFileJobKind {
|
||||||
|
type Args = YosysNextpnrXrayWriteXdcFileArgs;
|
||||||
|
type Job = YosysNextpnrXrayWriteXdcFile;
|
||||||
|
type Dependencies = JobKindAndDependencies<ExternalCommandJobKind<YosysNextpnrXraySynth>>;
|
||||||
|
|
||||||
|
fn dependencies(self) -> Self::Dependencies {
|
||||||
|
Default::default()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn args_to_jobs(
|
||||||
|
args: JobArgsAndDependencies<Self>,
|
||||||
|
params: &JobParams,
|
||||||
|
) -> eyre::Result<JobAndDependencies<Self>> {
|
||||||
|
let firrtl_export_options = args
|
||||||
|
.dependencies
|
||||||
|
.dependencies
|
||||||
|
.dependencies
|
||||||
|
.dependencies
|
||||||
|
.dependencies
|
||||||
|
.args
|
||||||
|
.args
|
||||||
|
.export_options;
|
||||||
|
args.args_to_jobs_simple(params, |_kind, args, dependencies| {
|
||||||
|
let YosysNextpnrXrayWriteXdcFileArgs {} = args;
|
||||||
|
let base_job = dependencies.get_job::<BaseJob, _>();
|
||||||
|
Ok(YosysNextpnrXrayWriteXdcFile {
|
||||||
|
firrtl_export_options,
|
||||||
|
output_dir: base_job.output_dir(),
|
||||||
|
xdc_file: base_job.file_with_ext("xdc"),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||||
|
[JobItemName::Path {
|
||||||
|
path: job.output_dir,
|
||||||
|
}]
|
||||||
|
.intern_slice()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||||
|
[JobItemName::Path { path: job.xdc_file }].intern_slice()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn name(self) -> Interned<str> {
|
||||||
|
"yosys-nextpnr-xray-write-xdc-file".intern()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn external_command_params(self, _job: &Self::Job) -> Option<CommandParams> {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
self,
|
||||||
|
job: &Self::Job,
|
||||||
|
inputs: &[JobItem],
|
||||||
|
params: &JobParams,
|
||||||
|
_acquired_job: &mut AcquiredJob,
|
||||||
|
) -> eyre::Result<Vec<JobItem>> {
|
||||||
|
assert!(inputs.iter().map(JobItem::name).eq(self.inputs(job)));
|
||||||
|
let mut xdc = String::new();
|
||||||
|
job.write_xdc_contents(&mut xdc, params.main_module())?;
|
||||||
|
// TODO: create actual .xdc from input module
|
||||||
|
std::fs::write(
|
||||||
|
job.xdc_file,
|
||||||
|
r"# autogenerated
|
||||||
|
set_property LOC G6 [get_ports led]
|
||||||
|
set_property IOSTANDARD LVCMOS33 [get_ports led]
|
||||||
|
set_property LOC E3 [get_ports clk]
|
||||||
|
set_property IOSTANDARD LVCMOS33 [get_ports clk]
|
||||||
|
set_property LOC C2 [get_ports rst]
|
||||||
|
set_property IOSTANDARD LVCMOS33 [get_ports rst]
|
||||||
|
",
|
||||||
|
)?;
|
||||||
|
Ok(vec![JobItem::Path { path: job.xdc_file }])
|
||||||
|
}
|
||||||
|
|
||||||
|
fn subcommand_hidden(self) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)]
|
||||||
|
pub struct NextpnrXilinx;
|
||||||
|
|
||||||
|
impl ExternalProgramTrait for NextpnrXilinx {
|
||||||
|
fn default_program_name() -> Interned<str> {
|
||||||
|
"nextpnr-xilinx".intern()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! make_device_enum {
|
||||||
|
($vis:vis enum $Device:ident {
|
||||||
|
$(
|
||||||
|
#[
|
||||||
|
name = $name:literal,
|
||||||
|
xray_part = $xray_part:literal,
|
||||||
|
xray_device = $xray_device:literal,
|
||||||
|
xray_family = $xray_family:literal,
|
||||||
|
]
|
||||||
|
$variant:ident,
|
||||||
|
)*
|
||||||
|
}) => {
|
||||||
|
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, ValueEnum)]
|
||||||
|
$vis enum $Device {
|
||||||
|
$(
|
||||||
|
#[value(name = $name, alias = $xray_part)]
|
||||||
|
$variant,
|
||||||
|
)*
|
||||||
|
}
|
||||||
|
|
||||||
|
impl $Device {
|
||||||
|
$vis fn as_str(self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
$(Self::$variant => $name,)*
|
||||||
|
}
|
||||||
|
}
|
||||||
|
$vis fn xray_part(self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
$(Self::$variant => $xray_part,)*
|
||||||
|
}
|
||||||
|
}
|
||||||
|
$vis fn xray_device(self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
$(Self::$variant => $xray_device,)*
|
||||||
|
}
|
||||||
|
}
|
||||||
|
$vis fn xray_family(self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
$(Self::$variant => $xray_family,)*
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct DeviceVisitor;
|
||||||
|
|
||||||
|
impl<'de> serde::de::Visitor<'de> for DeviceVisitor {
|
||||||
|
type Value = $Device;
|
||||||
|
|
||||||
|
fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.write_str("a Xilinx device string")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
|
||||||
|
where
|
||||||
|
E: serde::de::Error,
|
||||||
|
{
|
||||||
|
match $Device::from_str(v, false) {
|
||||||
|
Ok(v) => Ok(v),
|
||||||
|
Err(_) => Err(E::invalid_value(serde::de::Unexpected::Str(v), &self)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>
|
||||||
|
where
|
||||||
|
E: serde::de::Error,
|
||||||
|
{
|
||||||
|
match str::from_utf8(v).ok().and_then(|v| $Device::from_str(v, false).ok()) {
|
||||||
|
Some(v) => Ok(v),
|
||||||
|
None => Err(E::invalid_value(serde::de::Unexpected::Bytes(v), &self)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'de> Deserialize<'de> for $Device {
|
||||||
|
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||||
|
where
|
||||||
|
D: serde::Deserializer<'de>,
|
||||||
|
{
|
||||||
|
deserializer.deserialize_string(DeviceVisitor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Serialize for $Device {
|
||||||
|
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: serde::Serializer,
|
||||||
|
{
|
||||||
|
self.as_str().serialize(serializer)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
make_device_enum! {
|
||||||
|
pub enum Device {
|
||||||
|
#[
|
||||||
|
name = "xc7a35ticsg324-1L",
|
||||||
|
xray_part = "xc7a35tcsg324-1",
|
||||||
|
xray_device = "xc7a35t",
|
||||||
|
xray_family = "artix7",
|
||||||
|
]
|
||||||
|
Xc7a35ticsg324_1l,
|
||||||
|
#[
|
||||||
|
name = "xc7a100ticsg324-1L",
|
||||||
|
xray_part = "xc7a100tcsg324-1",
|
||||||
|
xray_device = "xc7a100t",
|
||||||
|
xray_family = "artix7",
|
||||||
|
]
|
||||||
|
Xc7a100ticsg324_1l,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for Device {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.write_str(self.as_str())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)]
|
||||||
|
pub struct YosysNextpnrXrayRunNextpnrArgs {
|
||||||
|
#[arg(long, env = "CHIPDB_DIR", value_hint = clap::ValueHint::DirPath)]
|
||||||
|
pub nextpnr_xilinx_chipdb_dir: PathBuf,
|
||||||
|
#[arg(long)]
|
||||||
|
pub device: Device,
|
||||||
|
#[arg(long, default_value_t = 0)]
|
||||||
|
pub nextpnr_xilinx_seed: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToArgs for YosysNextpnrXrayRunNextpnrArgs {
|
||||||
|
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
|
||||||
|
let Self {
|
||||||
|
nextpnr_xilinx_chipdb_dir,
|
||||||
|
device,
|
||||||
|
nextpnr_xilinx_seed,
|
||||||
|
} = self;
|
||||||
|
args.write_long_option_eq("nextpnr-xilinx-chipdb-dir", nextpnr_xilinx_chipdb_dir);
|
||||||
|
args.write_long_option_eq("device", device.as_str());
|
||||||
|
args.write_display_arg(format_args!("--nextpnr-xilinx-seed={nextpnr_xilinx_seed}"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)]
|
||||||
|
pub struct YosysNextpnrXrayRunNextpnr {
|
||||||
|
nextpnr_xilinx_chipdb_dir: Interned<Path>,
|
||||||
|
device: Device,
|
||||||
|
nextpnr_xilinx_seed: i32,
|
||||||
|
xdc_file: Interned<Path>,
|
||||||
|
xdc_file_name: Interned<OsStr>,
|
||||||
|
json_file: Interned<Path>,
|
||||||
|
json_file_name: Interned<OsStr>,
|
||||||
|
routed_json_file: Interned<Path>,
|
||||||
|
routed_json_file_name: Interned<OsStr>,
|
||||||
|
fasm_file: Interned<Path>,
|
||||||
|
fasm_file_name: Interned<OsStr>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl YosysNextpnrXrayRunNextpnr {
|
||||||
|
fn chipdb_file(&self) -> Interned<Path> {
|
||||||
|
let mut retval = self
|
||||||
|
.nextpnr_xilinx_chipdb_dir
|
||||||
|
.join(self.device.xray_device());
|
||||||
|
retval.set_extension("bin");
|
||||||
|
retval.intern_deref()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExternalCommand for YosysNextpnrXrayRunNextpnr {
|
||||||
|
type AdditionalArgs = YosysNextpnrXrayRunNextpnrArgs;
|
||||||
|
type AdditionalJobData = Self;
|
||||||
|
type BaseJobPosition = GetJobPositionDependencies<
|
||||||
|
GetJobPositionDependencies<<YosysNextpnrXraySynth as ExternalCommand>::BaseJobPosition>,
|
||||||
|
>;
|
||||||
|
type Dependencies = JobKindAndDependencies<YosysNextpnrXrayWriteXdcFileJobKind>;
|
||||||
|
type ExternalProgram = NextpnrXilinx;
|
||||||
|
|
||||||
|
fn dependencies() -> Self::Dependencies {
|
||||||
|
Default::default()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn args_to_jobs(
|
||||||
|
args: JobArgsAndDependencies<ExternalCommandJobKind<Self>>,
|
||||||
|
params: &JobParams,
|
||||||
|
) -> eyre::Result<(
|
||||||
|
Self::AdditionalJobData,
|
||||||
|
<Self::Dependencies as JobDependencies>::JobsAndKinds,
|
||||||
|
)> {
|
||||||
|
args.args_to_jobs_external_simple(params, |args, dependencies| {
|
||||||
|
let YosysNextpnrXrayRunNextpnrArgs {
|
||||||
|
nextpnr_xilinx_chipdb_dir,
|
||||||
|
device,
|
||||||
|
nextpnr_xilinx_seed,
|
||||||
|
} = args.additional_args;
|
||||||
|
let base_job = dependencies.get_job::<BaseJob, _>();
|
||||||
|
let write_xdc_file = dependencies.get_job::<YosysNextpnrXrayWriteXdcFile, _>();
|
||||||
|
let synth = dependencies.get_job::<ExternalCommandJob<YosysNextpnrXraySynth>, _>();
|
||||||
|
let routed_json_file = base_job.file_with_ext("routed.json");
|
||||||
|
let fasm_file = base_job.file_with_ext("fasm");
|
||||||
|
Ok(Self {
|
||||||
|
nextpnr_xilinx_chipdb_dir: nextpnr_xilinx_chipdb_dir.intern_deref(),
|
||||||
|
device,
|
||||||
|
nextpnr_xilinx_seed,
|
||||||
|
xdc_file: write_xdc_file.xdc_file,
|
||||||
|
xdc_file_name: write_xdc_file
|
||||||
|
.xdc_file
|
||||||
|
.interned_file_name()
|
||||||
|
.expect("known to have file name"),
|
||||||
|
json_file: synth.additional_job_data().json_file(),
|
||||||
|
json_file_name: synth.additional_job_data().json_file_name(),
|
||||||
|
routed_json_file,
|
||||||
|
routed_json_file_name: routed_json_file
|
||||||
|
.interned_file_name()
|
||||||
|
.expect("known to have file name"),
|
||||||
|
fasm_file,
|
||||||
|
fasm_file_name: fasm_file
|
||||||
|
.interned_file_name()
|
||||||
|
.expect("known to have file name"),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn inputs(job: &ExternalCommandJob<Self>) -> Interned<[JobItemName]> {
|
||||||
|
[
|
||||||
|
JobItemName::Path {
|
||||||
|
path: job.additional_job_data().json_file,
|
||||||
|
},
|
||||||
|
JobItemName::Path {
|
||||||
|
path: job.additional_job_data().xdc_file,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
.intern_slice()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn output_paths(job: &ExternalCommandJob<Self>) -> Interned<[Interned<Path>]> {
|
||||||
|
[
|
||||||
|
job.additional_job_data().routed_json_file,
|
||||||
|
job.additional_job_data().fasm_file,
|
||||||
|
]
|
||||||
|
.intern_slice()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn command_line_args<W: ?Sized + WriteArgs>(job: &ExternalCommandJob<Self>, args: &mut W) {
|
||||||
|
let job_data @ YosysNextpnrXrayRunNextpnr {
|
||||||
|
nextpnr_xilinx_seed,
|
||||||
|
xdc_file_name,
|
||||||
|
json_file_name,
|
||||||
|
routed_json_file_name,
|
||||||
|
fasm_file_name,
|
||||||
|
..
|
||||||
|
} = job.additional_job_data();
|
||||||
|
args.write_long_option_eq("chipdb", job_data.chipdb_file());
|
||||||
|
args.write_long_option_eq("xdc", xdc_file_name);
|
||||||
|
args.write_long_option_eq("json", json_file_name);
|
||||||
|
args.write_long_option_eq("write", routed_json_file_name);
|
||||||
|
args.write_long_option_eq("fasm", fasm_file_name);
|
||||||
|
args.write_display_arg(format_args!("--seed={nextpnr_xilinx_seed}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn current_dir(job: &ExternalCommandJob<Self>) -> Option<Interned<Path>> {
|
||||||
|
Some(job.output_dir())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn job_kind_name() -> Interned<str> {
|
||||||
|
"yosys-nextpnr-xray-run-nextpnr".intern()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn subcommand_hidden() -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)]
|
||||||
|
pub struct Xcfasm;
|
||||||
|
|
||||||
|
impl ExternalProgramTrait for Xcfasm {
|
||||||
|
fn default_program_name() -> Interned<str> {
|
||||||
|
"xcfasm".intern()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)]
|
||||||
|
pub struct YosysNextpnrXrayArgs {
|
||||||
|
#[arg(long, env = "DB_DIR", value_hint = clap::ValueHint::DirPath)]
|
||||||
|
pub prjxray_db_dir: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToArgs for YosysNextpnrXrayArgs {
|
||||||
|
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
|
||||||
|
let Self { prjxray_db_dir } = self;
|
||||||
|
args.write_long_option_eq("prjxray-db-dir", prjxray_db_dir);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)]
|
||||||
|
pub struct YosysNextpnrXray {
|
||||||
|
prjxray_db_dir: Interned<Path>,
|
||||||
|
device: Device,
|
||||||
|
fasm_file: Interned<Path>,
|
||||||
|
fasm_file_name: Interned<OsStr>,
|
||||||
|
frames_file: Interned<Path>,
|
||||||
|
frames_file_name: Interned<OsStr>,
|
||||||
|
bit_file: Interned<Path>,
|
||||||
|
bit_file_name: Interned<OsStr>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl YosysNextpnrXray {
|
||||||
|
fn db_root(&self) -> Interned<Path> {
|
||||||
|
self.prjxray_db_dir
|
||||||
|
.join(self.device.xray_family())
|
||||||
|
.intern_deref()
|
||||||
|
}
|
||||||
|
fn part_file(&self) -> Interned<Path> {
|
||||||
|
let mut retval = self.prjxray_db_dir.join(self.device.xray_family());
|
||||||
|
retval.push(self.device.xray_part());
|
||||||
|
retval.push("part.yaml");
|
||||||
|
retval.intern_deref()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExternalCommand for YosysNextpnrXray {
|
||||||
|
type AdditionalArgs = YosysNextpnrXrayArgs;
|
||||||
|
type AdditionalJobData = Self;
|
||||||
|
type BaseJobPosition = GetJobPositionDependencies<
|
||||||
|
<YosysNextpnrXrayRunNextpnr as ExternalCommand>::BaseJobPosition,
|
||||||
|
>;
|
||||||
|
type Dependencies = JobKindAndDependencies<ExternalCommandJobKind<YosysNextpnrXrayRunNextpnr>>;
|
||||||
|
type ExternalProgram = Xcfasm;
|
||||||
|
|
||||||
|
fn dependencies() -> Self::Dependencies {
|
||||||
|
Default::default()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn args_to_jobs(
|
||||||
|
args: JobArgsAndDependencies<ExternalCommandJobKind<Self>>,
|
||||||
|
params: &JobParams,
|
||||||
|
) -> eyre::Result<(
|
||||||
|
Self::AdditionalJobData,
|
||||||
|
<Self::Dependencies as JobDependencies>::JobsAndKinds,
|
||||||
|
)> {
|
||||||
|
args.args_to_jobs_external_simple(params, |args, dependencies| {
|
||||||
|
let YosysNextpnrXrayArgs { prjxray_db_dir } = args.additional_args;
|
||||||
|
let base_job = dependencies.get_job::<BaseJob, _>();
|
||||||
|
let frames_file = base_job.file_with_ext("frames");
|
||||||
|
let bit_file = base_job.file_with_ext("bit");
|
||||||
|
Ok(Self {
|
||||||
|
prjxray_db_dir: prjxray_db_dir.intern_deref(),
|
||||||
|
device: dependencies.job.job.additional_job_data().device,
|
||||||
|
fasm_file: dependencies.job.job.additional_job_data().fasm_file,
|
||||||
|
fasm_file_name: dependencies.job.job.additional_job_data().fasm_file_name,
|
||||||
|
frames_file,
|
||||||
|
frames_file_name: frames_file
|
||||||
|
.interned_file_name()
|
||||||
|
.expect("known to have file name"),
|
||||||
|
bit_file,
|
||||||
|
bit_file_name: bit_file
|
||||||
|
.interned_file_name()
|
||||||
|
.expect("known to have file name"),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn inputs(job: &ExternalCommandJob<Self>) -> Interned<[JobItemName]> {
|
||||||
|
[JobItemName::Path {
|
||||||
|
path: job.additional_job_data().fasm_file,
|
||||||
|
}]
|
||||||
|
.intern_slice()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn output_paths(job: &ExternalCommandJob<Self>) -> Interned<[Interned<Path>]> {
|
||||||
|
[
|
||||||
|
job.additional_job_data().frames_file,
|
||||||
|
job.additional_job_data().bit_file,
|
||||||
|
]
|
||||||
|
.intern_slice()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn command_line_args<W: ?Sized + WriteArgs>(job: &ExternalCommandJob<Self>, args: &mut W) {
|
||||||
|
let job_data @ YosysNextpnrXray {
|
||||||
|
device,
|
||||||
|
fasm_file_name,
|
||||||
|
frames_file_name,
|
||||||
|
bit_file_name,
|
||||||
|
..
|
||||||
|
} = job.additional_job_data();
|
||||||
|
args.write_arg("--sparse");
|
||||||
|
args.write_long_option_eq("db-root", job_data.db_root());
|
||||||
|
args.write_long_option_eq("part", device.xray_part());
|
||||||
|
args.write_long_option_eq("part_file", job_data.part_file());
|
||||||
|
args.write_long_option_eq("fn_in", fasm_file_name);
|
||||||
|
args.write_long_option_eq("frm_out", frames_file_name);
|
||||||
|
args.write_long_option_eq("bit_out", bit_file_name);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn current_dir(job: &ExternalCommandJob<Self>) -> Option<Interned<Path>> {
|
||||||
|
Some(job.output_dir())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn job_kind_name() -> Interned<str> {
|
||||||
|
"yosys-nextpnr-xray".intern()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = DynJobKind> {
|
||||||
|
[
|
||||||
|
DynJobKind::new(YosysNextpnrXrayWriteYsFileJobKind),
|
||||||
|
DynJobKind::new(ExternalCommandJobKind::<YosysNextpnrXraySynth>::new()),
|
||||||
|
DynJobKind::new(YosysNextpnrXrayWriteXdcFileJobKind),
|
||||||
|
DynJobKind::new(ExternalCommandJobKind::<YosysNextpnrXrayRunNextpnr>::new()),
|
||||||
|
DynJobKind::new(ExternalCommandJobKind::<YosysNextpnrXray>::new()),
|
||||||
|
]
|
||||||
|
}
|
415
crates/fayalite/src/build/verilog.rs
Normal file
415
crates/fayalite/src/build/verilog.rs
Normal file
|
@ -0,0 +1,415 @@
|
||||||
|
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||||
|
// See Notices.txt for copyright information
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
build::{
|
||||||
|
BaseJob, CommandParams, DynJobKind, GetJobPositionDependencies, GetJobPositionJob,
|
||||||
|
JobAndDependencies, JobArgsAndDependencies, JobDependencies, JobItem, JobItemName, JobKind,
|
||||||
|
JobKindAndDependencies, JobParams, ToArgs, WriteArgs,
|
||||||
|
external::{
|
||||||
|
ExternalCommand, ExternalCommandJob, ExternalCommandJobKind, ExternalProgramTrait,
|
||||||
|
},
|
||||||
|
firrtl::{Firrtl, FirrtlJobKind},
|
||||||
|
},
|
||||||
|
intern::{Intern, InternSlice, Interned},
|
||||||
|
util::job_server::AcquiredJob,
|
||||||
|
};
|
||||||
|
use clap::Args;
|
||||||
|
use eyre::{Context, bail};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::{
|
||||||
|
ffi::{OsStr, OsString},
|
||||||
|
fmt, mem,
|
||||||
|
path::Path,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// based on [LLVM Circt's recommended lowering options][lowering-options]
|
||||||
|
///
|
||||||
|
/// [lowering-options]: https://circt.llvm.org/docs/VerilogGeneration/#recommended-loweringoptions-by-target
|
||||||
|
#[derive(clap::ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub enum VerilogDialect {
|
||||||
|
Questa,
|
||||||
|
Spyglass,
|
||||||
|
Verilator,
|
||||||
|
Vivado,
|
||||||
|
Yosys,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for VerilogDialect {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.write_str(self.as_str())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl VerilogDialect {
|
||||||
|
pub fn as_str(self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
VerilogDialect::Questa => "questa",
|
||||||
|
VerilogDialect::Spyglass => "spyglass",
|
||||||
|
VerilogDialect::Verilator => "verilator",
|
||||||
|
VerilogDialect::Vivado => "vivado",
|
||||||
|
VerilogDialect::Yosys => "yosys",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn firtool_extra_args(self) -> &'static [&'static str] {
|
||||||
|
match self {
|
||||||
|
VerilogDialect::Questa => &["--lowering-options=emitWireInPorts"],
|
||||||
|
VerilogDialect::Spyglass => {
|
||||||
|
&["--lowering-options=explicitBitcast,disallowExpressionInliningInPorts"]
|
||||||
|
}
|
||||||
|
VerilogDialect::Verilator => &[
|
||||||
|
"--lowering-options=locationInfoStyle=wrapInAtSquareBracket,disallowLocalVariables",
|
||||||
|
],
|
||||||
|
VerilogDialect::Vivado => &["--lowering-options=mitigateVivadoArrayIndexConstPropBug"],
|
||||||
|
VerilogDialect::Yosys => {
|
||||||
|
&["--lowering-options=disallowLocalVariables,disallowPackedArrays"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Args, Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub struct UnadjustedVerilogArgs {
|
||||||
|
#[arg(long = "firtool-extra-arg", value_name = "ARG")]
|
||||||
|
pub firtool_extra_args: Vec<OsString>,
|
||||||
|
/// adapt the generated Verilog for a particular toolchain
|
||||||
|
#[arg(long)]
|
||||||
|
pub verilog_dialect: Option<VerilogDialect>,
|
||||||
|
#[arg(long)]
|
||||||
|
pub verilog_debug: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ToArgs for UnadjustedVerilogArgs {
|
||||||
|
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
|
||||||
|
let Self {
|
||||||
|
ref firtool_extra_args,
|
||||||
|
verilog_dialect,
|
||||||
|
verilog_debug,
|
||||||
|
} = *self;
|
||||||
|
for arg in firtool_extra_args {
|
||||||
|
args.write_long_option_eq("firtool-extra-arg", arg);
|
||||||
|
}
|
||||||
|
if let Some(verilog_dialect) = verilog_dialect {
|
||||||
|
args.write_long_option_eq("verilog-dialect", verilog_dialect.as_str());
|
||||||
|
}
|
||||||
|
if verilog_debug {
|
||||||
|
args.write_arg("--verilog-debug");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)]
|
||||||
|
pub struct Firtool;
|
||||||
|
|
||||||
|
impl ExternalProgramTrait for Firtool {
|
||||||
|
fn default_program_name() -> Interned<str> {
|
||||||
|
"firtool".intern()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, Eq, Hash, Debug, Deserialize, Serialize)]
|
||||||
|
pub struct UnadjustedVerilog {
|
||||||
|
firrtl_file: Interned<Path>,
|
||||||
|
firrtl_file_name: Interned<OsStr>,
|
||||||
|
unadjusted_verilog_file: Interned<Path>,
|
||||||
|
unadjusted_verilog_file_name: Interned<OsStr>,
|
||||||
|
firtool_extra_args: Interned<[Interned<OsStr>]>,
|
||||||
|
verilog_dialect: Option<VerilogDialect>,
|
||||||
|
verilog_debug: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UnadjustedVerilog {
|
||||||
|
pub fn firrtl_file(&self) -> Interned<Path> {
|
||||||
|
self.firrtl_file
|
||||||
|
}
|
||||||
|
pub fn unadjusted_verilog_file(&self) -> Interned<Path> {
|
||||||
|
self.unadjusted_verilog_file
|
||||||
|
}
|
||||||
|
pub fn firtool_extra_args(&self) -> Interned<[Interned<OsStr>]> {
|
||||||
|
self.firtool_extra_args
|
||||||
|
}
|
||||||
|
pub fn verilog_dialect(&self) -> Option<VerilogDialect> {
|
||||||
|
self.verilog_dialect
|
||||||
|
}
|
||||||
|
pub fn verilog_debug(&self) -> bool {
|
||||||
|
self.verilog_debug
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExternalCommand for UnadjustedVerilog {
|
||||||
|
type AdditionalArgs = UnadjustedVerilogArgs;
|
||||||
|
type AdditionalJobData = UnadjustedVerilog;
|
||||||
|
type BaseJobPosition = GetJobPositionDependencies<GetJobPositionJob>;
|
||||||
|
type Dependencies = JobKindAndDependencies<FirrtlJobKind>;
|
||||||
|
type ExternalProgram = Firtool;
|
||||||
|
|
||||||
|
fn dependencies() -> Self::Dependencies {
|
||||||
|
Default::default()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn args_to_jobs(
|
||||||
|
args: JobArgsAndDependencies<ExternalCommandJobKind<Self>>,
|
||||||
|
params: &JobParams,
|
||||||
|
) -> eyre::Result<(
|
||||||
|
Self::AdditionalJobData,
|
||||||
|
<Self::Dependencies as JobDependencies>::JobsAndKinds,
|
||||||
|
)> {
|
||||||
|
args.args_to_jobs_external_simple(params, |args, dependencies| {
|
||||||
|
let UnadjustedVerilogArgs {
|
||||||
|
firtool_extra_args,
|
||||||
|
verilog_dialect,
|
||||||
|
verilog_debug,
|
||||||
|
} = args.additional_args;
|
||||||
|
let unadjusted_verilog_file = dependencies
|
||||||
|
.dependencies
|
||||||
|
.job
|
||||||
|
.job
|
||||||
|
.file_with_ext("unadjusted.v");
|
||||||
|
let firrtl_job = dependencies.get_job::<Firrtl, _>();
|
||||||
|
Ok(UnadjustedVerilog {
|
||||||
|
firrtl_file: firrtl_job.firrtl_file(),
|
||||||
|
firrtl_file_name: firrtl_job
|
||||||
|
.firrtl_file()
|
||||||
|
.interned_file_name()
|
||||||
|
.expect("known to have file name"),
|
||||||
|
unadjusted_verilog_file,
|
||||||
|
unadjusted_verilog_file_name: unadjusted_verilog_file
|
||||||
|
.interned_file_name()
|
||||||
|
.expect("known to have file name"),
|
||||||
|
firtool_extra_args: firtool_extra_args.into_iter().map(Interned::from).collect(),
|
||||||
|
verilog_dialect,
|
||||||
|
verilog_debug,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn inputs(job: &ExternalCommandJob<Self>) -> Interned<[JobItemName]> {
|
||||||
|
[JobItemName::Path {
|
||||||
|
path: job.additional_job_data().firrtl_file,
|
||||||
|
}]
|
||||||
|
.intern_slice()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn output_paths(job: &ExternalCommandJob<Self>) -> Interned<[Interned<Path>]> {
|
||||||
|
[job.additional_job_data().unadjusted_verilog_file].intern_slice()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn command_line_args<W: ?Sized + WriteArgs>(job: &ExternalCommandJob<Self>, args: &mut W) {
|
||||||
|
let UnadjustedVerilog {
|
||||||
|
firrtl_file: _,
|
||||||
|
firrtl_file_name,
|
||||||
|
unadjusted_verilog_file: _,
|
||||||
|
unadjusted_verilog_file_name,
|
||||||
|
firtool_extra_args,
|
||||||
|
verilog_dialect,
|
||||||
|
verilog_debug,
|
||||||
|
} = *job.additional_job_data();
|
||||||
|
args.write_interned_arg(firrtl_file_name);
|
||||||
|
args.write_arg("-o");
|
||||||
|
args.write_interned_arg(unadjusted_verilog_file_name);
|
||||||
|
if verilog_debug {
|
||||||
|
args.write_args(["-g", "--preserve-values=all"]);
|
||||||
|
}
|
||||||
|
if let Some(dialect) = verilog_dialect {
|
||||||
|
args.write_args(dialect.firtool_extra_args().iter().copied());
|
||||||
|
}
|
||||||
|
args.write_interned_args(firtool_extra_args);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn current_dir(job: &ExternalCommandJob<Self>) -> Option<Interned<Path>> {
|
||||||
|
Some(job.output_dir())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn job_kind_name() -> Interned<str> {
|
||||||
|
"unadjusted-verilog".intern()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn subcommand_hidden() -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run_even_if_cached_arg_name() -> Interned<str> {
|
||||||
|
"firtool-run-even-if-cached".intern()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
|
pub struct VerilogJobKind;
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, Hash, Args)]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub struct VerilogJobArgs {}
|
||||||
|
|
||||||
|
impl ToArgs for VerilogJobArgs {
|
||||||
|
fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) {
|
||||||
|
let Self {} = self;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||||
|
pub struct VerilogJob {
|
||||||
|
output_dir: Interned<Path>,
|
||||||
|
unadjusted_verilog_file: Interned<Path>,
|
||||||
|
main_verilog_file: Interned<Path>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl VerilogJob {
|
||||||
|
pub fn output_dir(&self) -> Interned<Path> {
|
||||||
|
self.output_dir
|
||||||
|
}
|
||||||
|
pub fn unadjusted_verilog_file(&self) -> Interned<Path> {
|
||||||
|
self.unadjusted_verilog_file
|
||||||
|
}
|
||||||
|
pub fn main_verilog_file(&self) -> Interned<Path> {
|
||||||
|
self.main_verilog_file
|
||||||
|
}
|
||||||
|
#[track_caller]
|
||||||
|
pub fn unwrap_additional_files(additional_files: &JobItem) -> &[Interned<Path>] {
|
||||||
|
match additional_files {
|
||||||
|
JobItem::DynamicPaths {
|
||||||
|
paths,
|
||||||
|
source_job_name,
|
||||||
|
} if *source_job_name == VerilogJobKind.name() => paths,
|
||||||
|
v => panic!("expected VerilogJob's additional files JobItem: {v:?}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn all_verilog_files(
|
||||||
|
main_verilog_file: Interned<Path>,
|
||||||
|
additional_files: &[Interned<Path>],
|
||||||
|
) -> eyre::Result<Interned<[Interned<Path>]>> {
|
||||||
|
let mut retval = Vec::with_capacity(additional_files.len().saturating_add(1));
|
||||||
|
for verilog_file in [main_verilog_file].iter().chain(additional_files) {
|
||||||
|
if !["v", "sv"]
|
||||||
|
.iter()
|
||||||
|
.any(|extension| verilog_file.extension() == Some(extension.as_ref()))
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
let verilog_file = std::path::absolute(verilog_file).wrap_err_with(|| {
|
||||||
|
format!("converting {verilog_file:?} to an absolute path failed")
|
||||||
|
})?;
|
||||||
|
if verilog_file
|
||||||
|
.as_os_str()
|
||||||
|
.as_encoded_bytes()
|
||||||
|
.iter()
|
||||||
|
.any(|&ch| (ch != b' ' && ch != b'\t' && ch.is_ascii_whitespace()) || ch == b'"')
|
||||||
|
{
|
||||||
|
bail!("verilog file path contains characters that aren't permitted");
|
||||||
|
}
|
||||||
|
retval.push(verilog_file.intern_deref());
|
||||||
|
}
|
||||||
|
Ok(retval.intern_slice())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl JobKind for VerilogJobKind {
|
||||||
|
type Args = VerilogJobArgs;
|
||||||
|
type Job = VerilogJob;
|
||||||
|
type Dependencies = JobKindAndDependencies<ExternalCommandJobKind<UnadjustedVerilog>>;
|
||||||
|
|
||||||
|
fn dependencies(self) -> Self::Dependencies {
|
||||||
|
Default::default()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn args_to_jobs(
|
||||||
|
args: JobArgsAndDependencies<Self>,
|
||||||
|
params: &JobParams,
|
||||||
|
) -> eyre::Result<JobAndDependencies<Self>> {
|
||||||
|
args.args_to_jobs_simple(params, |_kind, args, dependencies| {
|
||||||
|
let VerilogJobArgs {} = args;
|
||||||
|
let base_job = dependencies.get_job::<BaseJob, _>();
|
||||||
|
Ok(VerilogJob {
|
||||||
|
output_dir: base_job.output_dir(),
|
||||||
|
unadjusted_verilog_file: dependencies
|
||||||
|
.job
|
||||||
|
.job
|
||||||
|
.additional_job_data()
|
||||||
|
.unadjusted_verilog_file(),
|
||||||
|
main_verilog_file: base_job.file_with_ext("v"),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||||
|
[JobItemName::Path {
|
||||||
|
path: job.unadjusted_verilog_file,
|
||||||
|
}]
|
||||||
|
.intern_slice()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||||
|
[
|
||||||
|
JobItemName::Path {
|
||||||
|
path: job.main_verilog_file,
|
||||||
|
},
|
||||||
|
JobItemName::DynamicPaths {
|
||||||
|
source_job_name: self.name(),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
.intern_slice()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn name(self) -> Interned<str> {
|
||||||
|
"verilog".intern()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn external_command_params(self, _job: &Self::Job) -> Option<CommandParams> {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
self,
|
||||||
|
job: &Self::Job,
|
||||||
|
inputs: &[JobItem],
|
||||||
|
_params: &JobParams,
|
||||||
|
_acquired_job: &mut AcquiredJob,
|
||||||
|
) -> eyre::Result<Vec<JobItem>> {
|
||||||
|
assert!(inputs.iter().map(JobItem::name).eq(self.inputs(job)));
|
||||||
|
let input = std::fs::read_to_string(job.unadjusted_verilog_file())?;
|
||||||
|
let file_separator_prefix = "\n// ----- 8< ----- FILE \"";
|
||||||
|
let file_separator_suffix = "\" ----- 8< -----\n\n";
|
||||||
|
let mut input = &*input;
|
||||||
|
let main_verilog_file = job.main_verilog_file();
|
||||||
|
let mut file_name = Some(main_verilog_file);
|
||||||
|
let mut additional_outputs = Vec::new();
|
||||||
|
loop {
|
||||||
|
let (chunk, next_file_name) = if let Some((chunk, rest)) =
|
||||||
|
input.split_once(file_separator_prefix)
|
||||||
|
{
|
||||||
|
let Some((next_file_name, rest)) = rest.split_once(file_separator_suffix) else {
|
||||||
|
bail!(
|
||||||
|
"parsing firtool's output failed: found {file_separator_prefix:?} but no {file_separator_suffix:?}"
|
||||||
|
);
|
||||||
|
};
|
||||||
|
input = rest;
|
||||||
|
let next_file_name = job.output_dir.join(next_file_name).intern_deref();
|
||||||
|
additional_outputs.push(next_file_name);
|
||||||
|
(chunk, Some(next_file_name))
|
||||||
|
} else {
|
||||||
|
(mem::take(&mut input), None)
|
||||||
|
};
|
||||||
|
let Some(file_name) = mem::replace(&mut file_name, next_file_name) else {
|
||||||
|
break;
|
||||||
|
};
|
||||||
|
std::fs::write(&file_name, chunk)?;
|
||||||
|
}
|
||||||
|
Ok(vec![
|
||||||
|
JobItem::Path {
|
||||||
|
path: main_verilog_file,
|
||||||
|
},
|
||||||
|
JobItem::DynamicPaths {
|
||||||
|
paths: additional_outputs,
|
||||||
|
source_job_name: self.name(),
|
||||||
|
},
|
||||||
|
])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = DynJobKind> {
|
||||||
|
[
|
||||||
|
DynJobKind::new(ExternalCommandJobKind::<UnadjustedVerilog>::new()),
|
||||||
|
DynJobKind::new(VerilogJobKind),
|
||||||
|
]
|
||||||
|
}
|
|
@ -7,7 +7,7 @@ use crate::{
|
||||||
ops::{ArrayLiteral, BundleLiteral, ExprPartialEq},
|
ops::{ArrayLiteral, BundleLiteral, ExprPartialEq},
|
||||||
},
|
},
|
||||||
int::{Bool, DynSize},
|
int::{Bool, DynSize},
|
||||||
intern::{Intern, Interned},
|
intern::{Intern, InternSlice, Interned},
|
||||||
sim::value::{SimValue, SimValuePartialEq, ToSimValue, ToSimValueWithType},
|
sim::value::{SimValue, SimValuePartialEq, ToSimValue, ToSimValueWithType},
|
||||||
source_location::SourceLocation,
|
source_location::SourceLocation,
|
||||||
ty::{
|
ty::{
|
||||||
|
@ -549,7 +549,7 @@ macro_rules! impl_tuples {
|
||||||
type FilledBuilder = TupleBuilder<($(Expr<$T>,)*)>;
|
type FilledBuilder = TupleBuilder<($(Expr<$T>,)*)>;
|
||||||
fn fields(&self) -> Interned<[BundleField]> {
|
fn fields(&self) -> Interned<[BundleField]> {
|
||||||
let ($($var,)*) = self;
|
let ($($var,)*) = self;
|
||||||
[$(BundleField { name: stringify!($num).intern(), flipped: false, ty: $var.canonical() }),*][..].intern()
|
[$(BundleField { name: stringify!($num).intern(), flipped: false, ty: $var.canonical() }),*].intern_slice()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl<$($T: Type,)*> TypeWithDeref for ($($T,)*) {
|
impl<$($T: Type,)*> TypeWithDeref for ($($T,)*) {
|
||||||
|
@ -580,7 +580,7 @@ macro_rules! impl_tuples {
|
||||||
$(let $var = $var.to_expr();)*
|
$(let $var = $var.to_expr();)*
|
||||||
let ty = ($(Expr::ty($var),)*);
|
let ty = ($(Expr::ty($var),)*);
|
||||||
let field_values = [$(Expr::canonical($var)),*];
|
let field_values = [$(Expr::canonical($var)),*];
|
||||||
BundleLiteral::new(ty, field_values[..].intern()).to_expr()
|
BundleLiteral::new(ty, field_values.intern_slice()).to_expr()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl<$($T: Type,)*> ToExpr for TupleBuilder<($(Expr<$T>,)*)> {
|
impl<$($T: Type,)*> ToExpr for TupleBuilder<($(Expr<$T>,)*)> {
|
||||||
|
@ -590,7 +590,7 @@ macro_rules! impl_tuples {
|
||||||
let ($($var,)*) = self.0;
|
let ($($var,)*) = self.0;
|
||||||
let ty = ($(Expr::ty($var),)*);
|
let ty = ($(Expr::ty($var),)*);
|
||||||
let field_values = [$(Expr::canonical($var)),*];
|
let field_values = [$(Expr::canonical($var)),*];
|
||||||
BundleLiteral::new(ty, field_values[..].intern()).to_expr()
|
BundleLiteral::new(ty, field_values.intern_slice()).to_expr()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl<$($T: ToSimValueWithType<CanonicalType>,)*> ToSimValueWithType<CanonicalType> for ($($T,)*) {
|
impl<$($T: ToSimValueWithType<CanonicalType>,)*> ToSimValueWithType<CanonicalType> for ($($T,)*) {
|
||||||
|
|
|
@ -1,806 +0,0 @@
|
||||||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
|
||||||
// See Notices.txt for copyright information
|
|
||||||
use crate::{
|
|
||||||
bundle::{Bundle, BundleType},
|
|
||||||
firrtl::{self, ExportOptions},
|
|
||||||
intern::Interned,
|
|
||||||
module::Module,
|
|
||||||
util::{job_server::AcquiredJob, streaming_read_utf8::streaming_read_utf8},
|
|
||||||
};
|
|
||||||
use clap::{
|
|
||||||
Parser, Subcommand, ValueEnum, ValueHint,
|
|
||||||
builder::{OsStringValueParser, TypedValueParser},
|
|
||||||
};
|
|
||||||
use eyre::{Report, eyre};
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use std::{
|
|
||||||
error,
|
|
||||||
ffi::OsString,
|
|
||||||
fmt::{self, Write},
|
|
||||||
fs, io, mem,
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
process,
|
|
||||||
};
|
|
||||||
use tempfile::TempDir;
|
|
||||||
|
|
||||||
pub type Result<T = (), E = CliError> = std::result::Result<T, E>;
|
|
||||||
|
|
||||||
pub struct CliError(Report);
|
|
||||||
|
|
||||||
impl fmt::Debug for CliError {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
self.0.fmt(f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for CliError {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
self.0.fmt(f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl error::Error for CliError {}
|
|
||||||
|
|
||||||
impl From<io::Error> for CliError {
|
|
||||||
fn from(value: io::Error) -> Self {
|
|
||||||
CliError(Report::new(value))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait RunPhase<Arg> {
|
|
||||||
type Output;
|
|
||||||
fn run(&self, arg: Arg) -> Result<Self::Output> {
|
|
||||||
self.run_with_job(arg, &mut AcquiredJob::acquire())
|
|
||||||
}
|
|
||||||
fn run_with_job(&self, arg: Arg, acquired_job: &mut AcquiredJob) -> Result<Self::Output>;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Parser, Debug, Clone)]
|
|
||||||
#[non_exhaustive]
|
|
||||||
pub struct BaseArgs {
|
|
||||||
/// the directory to put the generated main output file and associated files in
|
|
||||||
#[arg(short, long, value_hint = ValueHint::DirPath, required = true)]
|
|
||||||
pub output: Option<PathBuf>,
|
|
||||||
/// the stem of the generated main output file, e.g. to get foo.v, pass --file-stem=foo
|
|
||||||
#[arg(long)]
|
|
||||||
pub file_stem: Option<String>,
|
|
||||||
#[arg(long, env = "FAYALITE_KEEP_TEMP_DIR")]
|
|
||||||
pub keep_temp_dir: bool,
|
|
||||||
#[arg(skip = false)]
|
|
||||||
pub redirect_output_for_rust_test: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl BaseArgs {
|
|
||||||
fn make_firrtl_file_backend(&self) -> Result<(firrtl::FileBackend, Option<TempDir>)> {
|
|
||||||
let (dir_path, temp_dir) = match &self.output {
|
|
||||||
Some(output) => (output.clone(), None),
|
|
||||||
None => {
|
|
||||||
let temp_dir = TempDir::new()?;
|
|
||||||
if self.keep_temp_dir {
|
|
||||||
let temp_dir = temp_dir.into_path();
|
|
||||||
println!("created temporary directory: {}", temp_dir.display());
|
|
||||||
(temp_dir, None)
|
|
||||||
} else {
|
|
||||||
(temp_dir.path().to_path_buf(), Some(temp_dir))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
Ok((
|
|
||||||
firrtl::FileBackend {
|
|
||||||
dir_path,
|
|
||||||
top_fir_file_stem: self.file_stem.clone(),
|
|
||||||
circuit_name: None,
|
|
||||||
},
|
|
||||||
temp_dir,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
/// handles possibly redirecting the command's output for Rust tests
|
|
||||||
pub fn run_external_command(
|
|
||||||
&self,
|
|
||||||
_acquired_job: &mut AcquiredJob,
|
|
||||||
mut command: process::Command,
|
|
||||||
mut captured_output: Option<&mut String>,
|
|
||||||
) -> io::Result<process::ExitStatus> {
|
|
||||||
if self.redirect_output_for_rust_test || captured_output.is_some() {
|
|
||||||
let (reader, writer) = os_pipe::pipe()?;
|
|
||||||
let mut reader = io::BufReader::new(reader);
|
|
||||||
command.stderr(writer.try_clone()?);
|
|
||||||
command.stdout(writer); // must not leave writer around after spawning child
|
|
||||||
command.stdin(process::Stdio::null());
|
|
||||||
let mut child = command.spawn()?;
|
|
||||||
drop(command); // close writers
|
|
||||||
Ok(loop {
|
|
||||||
let status = child.try_wait()?;
|
|
||||||
streaming_read_utf8(&mut reader, |s| {
|
|
||||||
if let Some(captured_output) = captured_output.as_deref_mut() {
|
|
||||||
captured_output.push_str(s);
|
|
||||||
}
|
|
||||||
// use print! so output goes to Rust test output capture
|
|
||||||
print!("{s}");
|
|
||||||
io::Result::Ok(())
|
|
||||||
})?;
|
|
||||||
if let Some(status) = status {
|
|
||||||
break status;
|
|
||||||
}
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
command.status()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Parser, Debug, Clone)]
|
|
||||||
#[non_exhaustive]
|
|
||||||
pub struct FirrtlArgs {
|
|
||||||
#[command(flatten)]
|
|
||||||
pub base: BaseArgs,
|
|
||||||
#[command(flatten)]
|
|
||||||
pub export_options: ExportOptions,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
#[non_exhaustive]
|
|
||||||
pub struct FirrtlOutput {
|
|
||||||
pub file_stem: String,
|
|
||||||
pub top_module: String,
|
|
||||||
pub output_dir: PathBuf,
|
|
||||||
pub temp_dir: Option<TempDir>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FirrtlOutput {
|
|
||||||
pub fn file_with_ext(&self, ext: &str) -> PathBuf {
|
|
||||||
let mut retval = self.output_dir.join(&self.file_stem);
|
|
||||||
retval.set_extension(ext);
|
|
||||||
retval
|
|
||||||
}
|
|
||||||
pub fn firrtl_file(&self) -> PathBuf {
|
|
||||||
self.file_with_ext("fir")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FirrtlArgs {
|
|
||||||
fn run_impl(
|
|
||||||
&self,
|
|
||||||
top_module: Module<Bundle>,
|
|
||||||
_acquired_job: &mut AcquiredJob,
|
|
||||||
) -> Result<FirrtlOutput> {
|
|
||||||
let (file_backend, temp_dir) = self.base.make_firrtl_file_backend()?;
|
|
||||||
let firrtl::FileBackend {
|
|
||||||
top_fir_file_stem,
|
|
||||||
circuit_name,
|
|
||||||
dir_path,
|
|
||||||
} = firrtl::export(file_backend, &top_module, self.export_options)?;
|
|
||||||
Ok(FirrtlOutput {
|
|
||||||
file_stem: top_fir_file_stem.expect(
|
|
||||||
"export is known to set the file stem from the circuit name if not provided",
|
|
||||||
),
|
|
||||||
top_module: circuit_name.expect("export is known to set the circuit name"),
|
|
||||||
output_dir: dir_path,
|
|
||||||
temp_dir,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: BundleType> RunPhase<Module<T>> for FirrtlArgs {
|
|
||||||
type Output = FirrtlOutput;
|
|
||||||
fn run_with_job(
|
|
||||||
&self,
|
|
||||||
top_module: Module<T>,
|
|
||||||
acquired_job: &mut AcquiredJob,
|
|
||||||
) -> Result<Self::Output> {
|
|
||||||
self.run_impl(top_module.canonical(), acquired_job)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: BundleType> RunPhase<Interned<Module<T>>> for FirrtlArgs {
|
|
||||||
type Output = FirrtlOutput;
|
|
||||||
fn run_with_job(
|
|
||||||
&self,
|
|
||||||
top_module: Interned<Module<T>>,
|
|
||||||
acquired_job: &mut AcquiredJob,
|
|
||||||
) -> Result<Self::Output> {
|
|
||||||
self.run_with_job(*top_module, acquired_job)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// based on [LLVM Circt's recommended lowering options
|
|
||||||
/// ](https://circt.llvm.org/docs/VerilogGeneration/#recommended-loweringoptions-by-target)
|
|
||||||
#[derive(ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
|
||||||
#[non_exhaustive]
|
|
||||||
pub enum VerilogDialect {
|
|
||||||
Questa,
|
|
||||||
Spyglass,
|
|
||||||
Verilator,
|
|
||||||
Vivado,
|
|
||||||
Yosys,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for VerilogDialect {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
f.write_str(self.as_str())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl VerilogDialect {
|
|
||||||
pub fn as_str(self) -> &'static str {
|
|
||||||
match self {
|
|
||||||
VerilogDialect::Questa => "questa",
|
|
||||||
VerilogDialect::Spyglass => "spyglass",
|
|
||||||
VerilogDialect::Verilator => "verilator",
|
|
||||||
VerilogDialect::Vivado => "vivado",
|
|
||||||
VerilogDialect::Yosys => "yosys",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub fn firtool_extra_args(self) -> &'static [&'static str] {
|
|
||||||
match self {
|
|
||||||
VerilogDialect::Questa => &["--lowering-options=emitWireInPorts"],
|
|
||||||
VerilogDialect::Spyglass => {
|
|
||||||
&["--lowering-options=explicitBitcast,disallowExpressionInliningInPorts"]
|
|
||||||
}
|
|
||||||
VerilogDialect::Verilator => &[
|
|
||||||
"--lowering-options=locationInfoStyle=wrapInAtSquareBracket,disallowLocalVariables",
|
|
||||||
],
|
|
||||||
VerilogDialect::Vivado => &["--lowering-options=mitigateVivadoArrayIndexConstPropBug"],
|
|
||||||
VerilogDialect::Yosys => {
|
|
||||||
&["--lowering-options=disallowLocalVariables,disallowPackedArrays"]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Parser, Debug, Clone)]
|
|
||||||
#[non_exhaustive]
|
|
||||||
pub struct VerilogArgs {
|
|
||||||
#[command(flatten)]
|
|
||||||
pub firrtl: FirrtlArgs,
|
|
||||||
#[arg(
|
|
||||||
long,
|
|
||||||
default_value = "firtool",
|
|
||||||
env = "FIRTOOL",
|
|
||||||
value_hint = ValueHint::CommandName,
|
|
||||||
value_parser = OsStringValueParser::new().try_map(which)
|
|
||||||
)]
|
|
||||||
pub firtool: PathBuf,
|
|
||||||
#[arg(long)]
|
|
||||||
pub firtool_extra_args: Vec<OsString>,
|
|
||||||
/// adapt the generated Verilog for a particular toolchain
|
|
||||||
#[arg(long)]
|
|
||||||
pub verilog_dialect: Option<VerilogDialect>,
|
|
||||||
#[arg(long, short = 'g')]
|
|
||||||
pub debug: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
#[non_exhaustive]
|
|
||||||
pub struct VerilogOutput {
|
|
||||||
pub firrtl: FirrtlOutput,
|
|
||||||
pub verilog_files: Vec<PathBuf>,
|
|
||||||
pub contents_hash: Option<blake3::Hash>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl VerilogOutput {
|
|
||||||
pub fn main_verilog_file(&self) -> PathBuf {
|
|
||||||
self.firrtl.file_with_ext("v")
|
|
||||||
}
|
|
||||||
fn unadjusted_verilog_file(&self) -> PathBuf {
|
|
||||||
self.firrtl.file_with_ext("unadjusted.v")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl VerilogArgs {
|
|
||||||
fn process_unadjusted_verilog_file(&self, mut output: VerilogOutput) -> Result<VerilogOutput> {
|
|
||||||
let input = fs::read_to_string(output.unadjusted_verilog_file())?;
|
|
||||||
let file_separator_prefix = "\n// ----- 8< ----- FILE \"";
|
|
||||||
let file_separator_suffix = "\" ----- 8< -----\n\n";
|
|
||||||
let mut input = &*input;
|
|
||||||
output.contents_hash = Some(blake3::hash(input.as_bytes()));
|
|
||||||
let main_verilog_file = output.main_verilog_file();
|
|
||||||
let mut file_name: Option<&Path> = Some(&main_verilog_file);
|
|
||||||
loop {
|
|
||||||
let (chunk, next_file_name) = if let Some((chunk, rest)) =
|
|
||||||
input.split_once(file_separator_prefix)
|
|
||||||
{
|
|
||||||
let Some((next_file_name, rest)) = rest.split_once(file_separator_suffix) else {
|
|
||||||
return Err(CliError(eyre!(
|
|
||||||
"parsing firtool's output failed: found {file_separator_prefix:?} but no {file_separator_suffix:?}"
|
|
||||||
)));
|
|
||||||
};
|
|
||||||
input = rest;
|
|
||||||
(chunk, Some(next_file_name.as_ref()))
|
|
||||||
} else {
|
|
||||||
(mem::take(&mut input), None)
|
|
||||||
};
|
|
||||||
let Some(file_name) = mem::replace(&mut file_name, next_file_name) else {
|
|
||||||
break;
|
|
||||||
};
|
|
||||||
let file_name = output.firrtl.output_dir.join(file_name);
|
|
||||||
fs::write(&file_name, chunk)?;
|
|
||||||
if let Some(extension) = file_name.extension() {
|
|
||||||
if extension == "v" || extension == "sv" {
|
|
||||||
output.verilog_files.push(file_name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(output)
|
|
||||||
}
|
|
||||||
fn run_impl(
|
|
||||||
&self,
|
|
||||||
firrtl_output: FirrtlOutput,
|
|
||||||
acquired_job: &mut AcquiredJob,
|
|
||||||
) -> Result<VerilogOutput> {
|
|
||||||
let Self {
|
|
||||||
firrtl,
|
|
||||||
firtool,
|
|
||||||
firtool_extra_args,
|
|
||||||
verilog_dialect,
|
|
||||||
debug,
|
|
||||||
} = self;
|
|
||||||
let output = VerilogOutput {
|
|
||||||
firrtl: firrtl_output,
|
|
||||||
verilog_files: vec![],
|
|
||||||
contents_hash: None,
|
|
||||||
};
|
|
||||||
let mut cmd = process::Command::new(firtool);
|
|
||||||
cmd.arg(output.firrtl.firrtl_file());
|
|
||||||
cmd.arg("-o");
|
|
||||||
cmd.arg(output.unadjusted_verilog_file());
|
|
||||||
if *debug {
|
|
||||||
cmd.arg("-g");
|
|
||||||
cmd.arg("--preserve-values=all");
|
|
||||||
}
|
|
||||||
if let Some(dialect) = verilog_dialect {
|
|
||||||
cmd.args(dialect.firtool_extra_args());
|
|
||||||
}
|
|
||||||
cmd.args(firtool_extra_args);
|
|
||||||
cmd.current_dir(&output.firrtl.output_dir);
|
|
||||||
let status = firrtl.base.run_external_command(acquired_job, cmd, None)?;
|
|
||||||
if status.success() {
|
|
||||||
self.process_unadjusted_verilog_file(output)
|
|
||||||
} else {
|
|
||||||
Err(CliError(eyre!(
|
|
||||||
"running {} failed: {status}",
|
|
||||||
self.firtool.display()
|
|
||||||
)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<Arg> RunPhase<Arg> for VerilogArgs
|
|
||||||
where
|
|
||||||
FirrtlArgs: RunPhase<Arg, Output = FirrtlOutput>,
|
|
||||||
{
|
|
||||||
type Output = VerilogOutput;
|
|
||||||
fn run_with_job(&self, arg: Arg, acquired_job: &mut AcquiredJob) -> Result<Self::Output> {
|
|
||||||
let firrtl_output = self.firrtl.run_with_job(arg, acquired_job)?;
|
|
||||||
self.run_impl(firrtl_output, acquired_job)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash, Default)]
|
|
||||||
#[non_exhaustive]
|
|
||||||
pub enum FormalMode {
|
|
||||||
#[default]
|
|
||||||
BMC,
|
|
||||||
Prove,
|
|
||||||
Live,
|
|
||||||
Cover,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FormalMode {
|
|
||||||
pub fn as_str(self) -> &'static str {
|
|
||||||
match self {
|
|
||||||
FormalMode::BMC => "bmc",
|
|
||||||
FormalMode::Prove => "prove",
|
|
||||||
FormalMode::Live => "live",
|
|
||||||
FormalMode::Cover => "cover",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for FormalMode {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
f.write_str(self.as_str())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
struct FormalAdjustArgs;
|
|
||||||
|
|
||||||
impl clap::FromArgMatches for FormalAdjustArgs {
|
|
||||||
fn from_arg_matches(_matches: &clap::ArgMatches) -> Result<Self, clap::Error> {
|
|
||||||
Ok(Self)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_from_arg_matches(&mut self, _matches: &clap::ArgMatches) -> Result<(), clap::Error> {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl clap::Args for FormalAdjustArgs {
|
|
||||||
fn augment_args(cmd: clap::Command) -> clap::Command {
|
|
||||||
cmd.mut_arg("output", |arg| arg.required(false))
|
|
||||||
.mut_arg("verilog_dialect", |arg| {
|
|
||||||
arg.default_value(VerilogDialect::Yosys.to_string())
|
|
||||||
.hide(true)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn augment_args_for_update(cmd: clap::Command) -> clap::Command {
|
|
||||||
Self::augment_args(cmd)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn which(v: std::ffi::OsString) -> which::Result<PathBuf> {
|
|
||||||
#[cfg(not(miri))]
|
|
||||||
return which::which(v);
|
|
||||||
#[cfg(miri)]
|
|
||||||
return Ok(Path::new("/").join(v));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Parser, Clone)]
|
|
||||||
#[non_exhaustive]
|
|
||||||
pub struct FormalArgs {
|
|
||||||
#[command(flatten)]
|
|
||||||
pub verilog: VerilogArgs,
|
|
||||||
#[arg(
|
|
||||||
long,
|
|
||||||
default_value = "sby",
|
|
||||||
env = "SBY",
|
|
||||||
value_hint = ValueHint::CommandName,
|
|
||||||
value_parser = OsStringValueParser::new().try_map(which)
|
|
||||||
)]
|
|
||||||
pub sby: PathBuf,
|
|
||||||
#[arg(long)]
|
|
||||||
pub sby_extra_args: Vec<String>,
|
|
||||||
#[arg(long, default_value_t)]
|
|
||||||
pub mode: FormalMode,
|
|
||||||
#[arg(long, default_value_t = Self::DEFAULT_DEPTH)]
|
|
||||||
pub depth: u64,
|
|
||||||
#[arg(long, default_value = "z3")]
|
|
||||||
pub solver: String,
|
|
||||||
#[arg(long)]
|
|
||||||
pub smtbmc_extra_args: Vec<String>,
|
|
||||||
#[arg(long, default_value_t = true, env = "FAYALITE_CACHE_RESULTS")]
|
|
||||||
pub cache_results: bool,
|
|
||||||
#[command(flatten)]
|
|
||||||
_formal_adjust_args: FormalAdjustArgs,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Debug for FormalArgs {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
let Self {
|
|
||||||
verilog,
|
|
||||||
sby,
|
|
||||||
sby_extra_args,
|
|
||||||
mode,
|
|
||||||
depth,
|
|
||||||
solver,
|
|
||||||
smtbmc_extra_args,
|
|
||||||
cache_results,
|
|
||||||
_formal_adjust_args: _,
|
|
||||||
} = self;
|
|
||||||
f.debug_struct("FormalArgs")
|
|
||||||
.field("verilog", verilog)
|
|
||||||
.field("sby", sby)
|
|
||||||
.field("sby_extra_args", sby_extra_args)
|
|
||||||
.field("mode", mode)
|
|
||||||
.field("depth", depth)
|
|
||||||
.field("solver", solver)
|
|
||||||
.field("smtbmc_extra_args", smtbmc_extra_args)
|
|
||||||
.field("cache_results", cache_results)
|
|
||||||
.finish_non_exhaustive()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FormalArgs {
|
|
||||||
pub const DEFAULT_DEPTH: u64 = 20;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
#[non_exhaustive]
|
|
||||||
pub struct FormalOutput {
|
|
||||||
pub verilog: VerilogOutput,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FormalOutput {
|
|
||||||
pub fn sby_file(&self) -> PathBuf {
|
|
||||||
self.verilog.firrtl.file_with_ext("sby")
|
|
||||||
}
|
|
||||||
pub fn cache_file(&self) -> PathBuf {
|
|
||||||
self.verilog.firrtl.file_with_ext("cache.json")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
|
||||||
#[non_exhaustive]
|
|
||||||
pub struct FormalCacheOutput {}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
|
||||||
#[non_exhaustive]
|
|
||||||
pub enum FormalCacheVersion {
|
|
||||||
V1,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FormalCacheVersion {
|
|
||||||
pub const CURRENT: Self = Self::V1;
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
|
||||||
#[non_exhaustive]
|
|
||||||
pub struct FormalCache {
|
|
||||||
pub version: FormalCacheVersion,
|
|
||||||
pub contents_hash: blake3::Hash,
|
|
||||||
pub stdout_stderr: String,
|
|
||||||
pub result: Result<FormalCacheOutput, String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FormalCache {
|
|
||||||
pub fn new(
|
|
||||||
version: FormalCacheVersion,
|
|
||||||
contents_hash: blake3::Hash,
|
|
||||||
stdout_stderr: String,
|
|
||||||
result: Result<FormalCacheOutput, String>,
|
|
||||||
) -> Self {
|
|
||||||
Self {
|
|
||||||
version,
|
|
||||||
contents_hash,
|
|
||||||
stdout_stderr,
|
|
||||||
result,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FormalArgs {
|
|
||||||
fn sby_contents(&self, output: &FormalOutput) -> Result<String> {
|
|
||||||
let Self {
|
|
||||||
verilog: _,
|
|
||||||
sby: _,
|
|
||||||
sby_extra_args: _,
|
|
||||||
mode,
|
|
||||||
depth,
|
|
||||||
smtbmc_extra_args,
|
|
||||||
solver,
|
|
||||||
cache_results: _,
|
|
||||||
_formal_adjust_args: _,
|
|
||||||
} = self;
|
|
||||||
let smtbmc_options = smtbmc_extra_args.join(" ");
|
|
||||||
let top_module = &output.verilog.firrtl.top_module;
|
|
||||||
let mut retval = format!(
|
|
||||||
"[options]\n\
|
|
||||||
mode {mode}\n\
|
|
||||||
depth {depth}\n\
|
|
||||||
wait on\n\
|
|
||||||
\n\
|
|
||||||
[engines]\n\
|
|
||||||
smtbmc {solver} -- -- {smtbmc_options}\n\
|
|
||||||
\n\
|
|
||||||
[script]\n"
|
|
||||||
);
|
|
||||||
for verilog_file in &output.verilog.verilog_files {
|
|
||||||
let verilog_file = verilog_file
|
|
||||||
.to_str()
|
|
||||||
.ok_or_else(|| CliError(eyre!("verilog file path is not UTF-8")))?;
|
|
||||||
if verilog_file.contains(|ch: char| {
|
|
||||||
(ch != ' ' && ch != '\t' && ch.is_ascii_whitespace()) || ch == '"'
|
|
||||||
}) {
|
|
||||||
return Err(CliError(eyre!(
|
|
||||||
"verilog file path contains characters that aren't permitted"
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
writeln!(retval, "read_verilog -sv -formal \"{verilog_file}\"").unwrap();
|
|
||||||
}
|
|
||||||
// workaround for wires disappearing -- set `keep` on all wires
|
|
||||||
writeln!(retval, "hierarchy -top {top_module}").unwrap();
|
|
||||||
writeln!(retval, "proc").unwrap();
|
|
||||||
writeln!(retval, "setattr -set keep 1 w:\\*").unwrap();
|
|
||||||
writeln!(retval, "prep").unwrap();
|
|
||||||
Ok(retval)
|
|
||||||
}
|
|
||||||
fn run_impl(
|
|
||||||
&self,
|
|
||||||
verilog_output: VerilogOutput,
|
|
||||||
acquired_job: &mut AcquiredJob,
|
|
||||||
) -> Result<FormalOutput> {
|
|
||||||
let output = FormalOutput {
|
|
||||||
verilog: verilog_output,
|
|
||||||
};
|
|
||||||
let sby_file = output.sby_file();
|
|
||||||
let sby_contents = self.sby_contents(&output)?;
|
|
||||||
let contents_hash = output.verilog.contents_hash.map(|verilog_hash| {
|
|
||||||
let mut hasher = blake3::Hasher::new();
|
|
||||||
hasher.update(verilog_hash.as_bytes());
|
|
||||||
hasher.update(sby_contents.as_bytes());
|
|
||||||
hasher.update(&(self.sby_extra_args.len() as u64).to_le_bytes());
|
|
||||||
for sby_extra_arg in self.sby_extra_args.iter() {
|
|
||||||
hasher.update(&(sby_extra_arg.len() as u64).to_le_bytes());
|
|
||||||
hasher.update(sby_extra_arg.as_bytes());
|
|
||||||
}
|
|
||||||
hasher.finalize()
|
|
||||||
});
|
|
||||||
std::fs::write(&sby_file, sby_contents)?;
|
|
||||||
let mut cmd = process::Command::new(&self.sby);
|
|
||||||
cmd.arg("-j1"); // sby seems not to respect job count in parallel mode
|
|
||||||
cmd.arg("-f");
|
|
||||||
cmd.arg(sby_file.file_name().unwrap());
|
|
||||||
cmd.args(&self.sby_extra_args);
|
|
||||||
cmd.current_dir(&output.verilog.firrtl.output_dir);
|
|
||||||
let mut captured_output = String::new();
|
|
||||||
let cache_file = output.cache_file();
|
|
||||||
let do_cache = if let Some(contents_hash) = contents_hash.filter(|_| self.cache_results) {
|
|
||||||
if let Some(FormalCache {
|
|
||||||
version: FormalCacheVersion::CURRENT,
|
|
||||||
contents_hash: cache_contents_hash,
|
|
||||||
stdout_stderr,
|
|
||||||
result,
|
|
||||||
}) = fs::read(&cache_file)
|
|
||||||
.ok()
|
|
||||||
.and_then(|v| serde_json::from_slice(&v).ok())
|
|
||||||
{
|
|
||||||
if cache_contents_hash == contents_hash {
|
|
||||||
println!("Using cached formal result:\n{stdout_stderr}");
|
|
||||||
return match result {
|
|
||||||
Ok(FormalCacheOutput {}) => Ok(output),
|
|
||||||
Err(error) => Err(CliError(eyre::Report::msg(error))),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
true
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
};
|
|
||||||
let _ = fs::remove_file(&cache_file);
|
|
||||||
let status = self.verilog.firrtl.base.run_external_command(
|
|
||||||
acquired_job,
|
|
||||||
cmd,
|
|
||||||
do_cache.then_some(&mut captured_output),
|
|
||||||
)?;
|
|
||||||
let result = if status.success() {
|
|
||||||
Ok(output)
|
|
||||||
} else {
|
|
||||||
Err(CliError(eyre!(
|
|
||||||
"running {} failed: {status}",
|
|
||||||
self.sby.display()
|
|
||||||
)))
|
|
||||||
};
|
|
||||||
fs::write(
|
|
||||||
cache_file,
|
|
||||||
serde_json::to_string_pretty(&FormalCache {
|
|
||||||
version: FormalCacheVersion::CURRENT,
|
|
||||||
contents_hash: contents_hash.unwrap(),
|
|
||||||
stdout_stderr: captured_output,
|
|
||||||
result: match &result {
|
|
||||||
Ok(FormalOutput { verilog: _ }) => Ok(FormalCacheOutput {}),
|
|
||||||
Err(error) => Err(error.to_string()),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
.expect("serialization shouldn't ever fail"),
|
|
||||||
)?;
|
|
||||||
result
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<Arg> RunPhase<Arg> for FormalArgs
|
|
||||||
where
|
|
||||||
VerilogArgs: RunPhase<Arg, Output = VerilogOutput>,
|
|
||||||
{
|
|
||||||
type Output = FormalOutput;
|
|
||||||
fn run_with_job(&self, arg: Arg, acquired_job: &mut AcquiredJob) -> Result<Self::Output> {
|
|
||||||
let verilog_output = self.verilog.run_with_job(arg, acquired_job)?;
|
|
||||||
self.run_impl(verilog_output, acquired_job)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Subcommand, Debug)]
|
|
||||||
enum CliCommand {
|
|
||||||
/// Generate FIRRTL
|
|
||||||
Firrtl(FirrtlArgs),
|
|
||||||
/// Generate Verilog
|
|
||||||
Verilog(VerilogArgs),
|
|
||||||
/// Run a formal proof
|
|
||||||
Formal(FormalArgs),
|
|
||||||
}
|
|
||||||
|
|
||||||
/// a simple CLI
|
|
||||||
///
|
|
||||||
/// Use like:
|
|
||||||
///
|
|
||||||
/// ```no_run
|
|
||||||
/// # use fayalite::prelude::*;
|
|
||||||
/// # #[hdl_module]
|
|
||||||
/// # fn my_module() {}
|
|
||||||
/// use fayalite::cli;
|
|
||||||
///
|
|
||||||
/// fn main() -> cli::Result {
|
|
||||||
/// cli::Cli::parse().run(my_module())
|
|
||||||
/// }
|
|
||||||
/// ```
|
|
||||||
///
|
|
||||||
/// You can also use it with a larger [`clap`]-based CLI like so:
|
|
||||||
///
|
|
||||||
/// ```no_run
|
|
||||||
/// # use fayalite::prelude::*;
|
|
||||||
/// # #[hdl_module]
|
|
||||||
/// # fn my_module() {}
|
|
||||||
/// use clap::{Subcommand, Parser};
|
|
||||||
/// use fayalite::cli;
|
|
||||||
///
|
|
||||||
/// #[derive(Subcommand)]
|
|
||||||
/// pub enum Cmd {
|
|
||||||
/// #[command(flatten)]
|
|
||||||
/// Fayalite(cli::Cli),
|
|
||||||
/// MySpecialCommand {
|
|
||||||
/// #[arg(long)]
|
|
||||||
/// foo: bool,
|
|
||||||
/// },
|
|
||||||
/// }
|
|
||||||
///
|
|
||||||
/// #[derive(Parser)]
|
|
||||||
/// pub struct Cli {
|
|
||||||
/// #[command(subcommand)]
|
|
||||||
/// cmd: Cmd, // or just use cli::Cli directly if you don't need more subcommands
|
|
||||||
/// }
|
|
||||||
///
|
|
||||||
/// fn main() -> cli::Result {
|
|
||||||
/// match Cli::parse().cmd {
|
|
||||||
/// Cmd::Fayalite(v) => v.run(my_module())?,
|
|
||||||
/// Cmd::MySpecialCommand { foo } => println!("special: foo={foo}"),
|
|
||||||
/// }
|
|
||||||
/// Ok(())
|
|
||||||
/// }
|
|
||||||
/// ```
|
|
||||||
#[derive(Parser, Debug)]
|
|
||||||
// clear things that would be crate-specific
|
|
||||||
#[command(name = "Fayalite Simple CLI", about = None, long_about = None)]
|
|
||||||
pub struct Cli {
|
|
||||||
#[command(subcommand)]
|
|
||||||
subcommand: CliCommand,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl clap::Subcommand for Cli {
|
|
||||||
fn augment_subcommands(cmd: clap::Command) -> clap::Command {
|
|
||||||
CliCommand::augment_subcommands(cmd)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn augment_subcommands_for_update(cmd: clap::Command) -> clap::Command {
|
|
||||||
CliCommand::augment_subcommands_for_update(cmd)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn has_subcommand(name: &str) -> bool {
|
|
||||||
CliCommand::has_subcommand(name)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> RunPhase<T> for Cli
|
|
||||||
where
|
|
||||||
FirrtlArgs: RunPhase<T, Output = FirrtlOutput>,
|
|
||||||
{
|
|
||||||
type Output = ();
|
|
||||||
fn run_with_job(&self, arg: T, acquired_job: &mut AcquiredJob) -> Result<Self::Output> {
|
|
||||||
match &self.subcommand {
|
|
||||||
CliCommand::Firrtl(c) => {
|
|
||||||
c.run_with_job(arg, acquired_job)?;
|
|
||||||
}
|
|
||||||
CliCommand::Verilog(c) => {
|
|
||||||
c.run_with_job(arg, acquired_job)?;
|
|
||||||
}
|
|
||||||
CliCommand::Formal(c) => {
|
|
||||||
c.run_with_job(arg, acquired_job)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Cli {
|
|
||||||
/// forwards to [`clap::Parser::parse()`] so you don't have to import [`clap::Parser`]
|
|
||||||
pub fn parse() -> Self {
|
|
||||||
clap::Parser::parse()
|
|
||||||
}
|
|
||||||
/// forwards to [`RunPhase::run()`] so you don't have to import [`RunPhase`]
|
|
||||||
pub fn run<T>(&self, top_module: T) -> Result<()>
|
|
||||||
where
|
|
||||||
Self: RunPhase<T, Output = ()>,
|
|
||||||
{
|
|
||||||
RunPhase::run(self, top_module)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -4,9 +4,10 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
annotations::{
|
annotations::{
|
||||||
Annotation, BlackBoxInlineAnnotation, BlackBoxPathAnnotation, CustomFirrtlAnnotation,
|
Annotation, BlackBoxInlineAnnotation, BlackBoxPathAnnotation, CustomFirrtlAnnotation,
|
||||||
DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation,
|
DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation, TargetedAnnotation,
|
||||||
},
|
},
|
||||||
array::Array,
|
array::Array,
|
||||||
|
build::{ToArgs, WriteArgs, vendor::xilinx::XilinxAnnotation},
|
||||||
bundle::{Bundle, BundleField, BundleType},
|
bundle::{Bundle, BundleField, BundleType},
|
||||||
clock::Clock,
|
clock::Clock,
|
||||||
enum_::{Enum, EnumType, EnumVariant},
|
enum_::{Enum, EnumType, EnumVariant},
|
||||||
|
@ -23,9 +24,9 @@ use crate::{
|
||||||
memory::{Mem, PortKind, PortName, ReadUnderWrite},
|
memory::{Mem, PortKind, PortName, ReadUnderWrite},
|
||||||
module::{
|
module::{
|
||||||
AnnotatedModuleIO, Block, ExternModuleBody, ExternModuleParameter,
|
AnnotatedModuleIO, Block, ExternModuleBody, ExternModuleParameter,
|
||||||
ExternModuleParameterValue, Module, ModuleBody, NameOptId, NormalModuleBody, Stmt,
|
ExternModuleParameterValue, Module, ModuleBody, ModuleIO, NameId, NameOptId,
|
||||||
StmtConnect, StmtDeclaration, StmtFormal, StmtIf, StmtInstance, StmtMatch, StmtReg,
|
NormalModuleBody, Stmt, StmtConnect, StmtDeclaration, StmtFormal, StmtIf, StmtInstance,
|
||||||
StmtWire,
|
StmtMatch, StmtReg, StmtWire,
|
||||||
transform::{
|
transform::{
|
||||||
simplify_enums::{SimplifyEnumsError, SimplifyEnumsKind, simplify_enums},
|
simplify_enums::{SimplifyEnumsError, SimplifyEnumsKind, simplify_enums},
|
||||||
simplify_memories::simplify_memories,
|
simplify_memories::simplify_memories,
|
||||||
|
@ -42,17 +43,18 @@ use crate::{
|
||||||
use bitvec::slice::BitSlice;
|
use bitvec::slice::BitSlice;
|
||||||
use clap::value_parser;
|
use clap::value_parser;
|
||||||
use num_traits::Signed;
|
use num_traits::Signed;
|
||||||
use serde::Serialize;
|
use serde::{Deserialize, Serialize};
|
||||||
use std::{
|
use std::{
|
||||||
cell::{Cell, RefCell},
|
cell::{Cell, RefCell},
|
||||||
cmp::Ordering,
|
cmp::Ordering,
|
||||||
collections::{BTreeMap, VecDeque},
|
collections::{BTreeMap, VecDeque},
|
||||||
error::Error,
|
error::Error,
|
||||||
|
ffi::OsString,
|
||||||
fmt::{self, Write},
|
fmt::{self, Write},
|
||||||
fs,
|
fs,
|
||||||
hash::Hash,
|
hash::Hash,
|
||||||
io,
|
io,
|
||||||
ops::Range,
|
ops::{ControlFlow, Range},
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
rc::Rc,
|
rc::Rc,
|
||||||
};
|
};
|
||||||
|
@ -404,10 +406,10 @@ impl TypeState {
|
||||||
self.next_type_name.set(id + 1);
|
self.next_type_name.set(id + 1);
|
||||||
Ident(Intern::intern_owned(format!("Ty{id}")))
|
Ident(Intern::intern_owned(format!("Ty{id}")))
|
||||||
}
|
}
|
||||||
fn get_bundle_field(&mut self, ty: Bundle, name: Interned<str>) -> Result<Ident> {
|
fn get_bundle_field(&mut self, ty: Bundle, name: Interned<str>) -> Result<Ident, FirrtlError> {
|
||||||
Ok(self.bundle_ns(ty)?.borrow_mut().get(name))
|
Ok(self.bundle_ns(ty)?.borrow_mut().get(name))
|
||||||
}
|
}
|
||||||
fn bundle_def(&self, ty: Bundle) -> Result<(Ident, Rc<RefCell<Namespace>>)> {
|
fn bundle_def(&self, ty: Bundle) -> Result<(Ident, Rc<RefCell<Namespace>>), FirrtlError> {
|
||||||
self.bundle_defs.get_or_make(ty, |&ty, definitions| {
|
self.bundle_defs.get_or_make(ty, |&ty, definitions| {
|
||||||
let mut ns = Namespace::default();
|
let mut ns = Namespace::default();
|
||||||
let mut body = String::new();
|
let mut body = String::new();
|
||||||
|
@ -428,13 +430,13 @@ impl TypeState {
|
||||||
Ok((name, Rc::new(RefCell::new(ns))))
|
Ok((name, Rc::new(RefCell::new(ns))))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
fn bundle_ty(&self, ty: Bundle) -> Result<Ident> {
|
fn bundle_ty(&self, ty: Bundle) -> Result<Ident, FirrtlError> {
|
||||||
Ok(self.bundle_def(ty)?.0)
|
Ok(self.bundle_def(ty)?.0)
|
||||||
}
|
}
|
||||||
fn bundle_ns(&self, ty: Bundle) -> Result<Rc<RefCell<Namespace>>> {
|
fn bundle_ns(&self, ty: Bundle) -> Result<Rc<RefCell<Namespace>>, FirrtlError> {
|
||||||
Ok(self.bundle_def(ty)?.1)
|
Ok(self.bundle_def(ty)?.1)
|
||||||
}
|
}
|
||||||
fn enum_def(&self, ty: Enum) -> Result<(Ident, Rc<EnumDef>)> {
|
fn enum_def(&self, ty: Enum) -> Result<(Ident, Rc<EnumDef>), FirrtlError> {
|
||||||
self.enum_defs.get_or_make(ty, |&ty, definitions| {
|
self.enum_defs.get_or_make(ty, |&ty, definitions| {
|
||||||
let mut variants = Namespace::default();
|
let mut variants = Namespace::default();
|
||||||
let mut body = String::new();
|
let mut body = String::new();
|
||||||
|
@ -461,13 +463,13 @@ impl TypeState {
|
||||||
))
|
))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
fn enum_ty(&self, ty: Enum) -> Result<Ident> {
|
fn enum_ty(&self, ty: Enum) -> Result<Ident, FirrtlError> {
|
||||||
Ok(self.enum_def(ty)?.0)
|
Ok(self.enum_def(ty)?.0)
|
||||||
}
|
}
|
||||||
fn get_enum_variant(&mut self, ty: Enum, name: Interned<str>) -> Result<Ident> {
|
fn get_enum_variant(&mut self, ty: Enum, name: Interned<str>) -> Result<Ident, FirrtlError> {
|
||||||
Ok(self.enum_def(ty)?.1.variants.borrow_mut().get(name))
|
Ok(self.enum_def(ty)?.1.variants.borrow_mut().get(name))
|
||||||
}
|
}
|
||||||
fn ty<T: Type>(&self, ty: T) -> Result<String> {
|
fn ty<T: Type>(&self, ty: T) -> Result<String, FirrtlError> {
|
||||||
Ok(match ty.canonical() {
|
Ok(match ty.canonical() {
|
||||||
CanonicalType::Bundle(ty) => self.bundle_ty(ty)?.to_string(),
|
CanonicalType::Bundle(ty) => self.bundle_ty(ty)?.to_string(),
|
||||||
CanonicalType::Enum(ty) => self.enum_ty(ty)?.to_string(),
|
CanonicalType::Enum(ty) => self.enum_ty(ty)?.to_string(),
|
||||||
|
@ -485,7 +487,7 @@ impl TypeState {
|
||||||
CanonicalType::Reset(Reset {}) => "Reset".into(),
|
CanonicalType::Reset(Reset {}) => "Reset".into(),
|
||||||
CanonicalType::PhantomConst(_) => "{}".into(),
|
CanonicalType::PhantomConst(_) => "{}".into(),
|
||||||
CanonicalType::DynSimOnly(_) => {
|
CanonicalType::DynSimOnly(_) => {
|
||||||
return Err(FirrtlError::SimOnlyValuesAreNotPermitted.into());
|
return Err(FirrtlError::SimOnlyValuesAreNotPermitted);
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -1206,9 +1208,7 @@ impl<'a> Exporter<'a> {
|
||||||
| CanonicalType::AsyncReset(_)
|
| CanonicalType::AsyncReset(_)
|
||||||
| CanonicalType::Reset(_) => Ok(format!("asUInt({value_str})")),
|
| CanonicalType::Reset(_) => Ok(format!("asUInt({value_str})")),
|
||||||
CanonicalType::PhantomConst(_) => Ok("UInt<0>(0)".into()),
|
CanonicalType::PhantomConst(_) => Ok("UInt<0>(0)".into()),
|
||||||
CanonicalType::DynSimOnly(_) => {
|
CanonicalType::DynSimOnly(_) => Err(FirrtlError::SimOnlyValuesAreNotPermitted.into()),
|
||||||
Err(FirrtlError::SimOnlyValuesAreNotPermitted.into())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn expr_cast_bits_to_bundle(
|
fn expr_cast_bits_to_bundle(
|
||||||
|
@ -1429,9 +1429,7 @@ impl<'a> Exporter<'a> {
|
||||||
definitions.add_definition_line(format_args!("{extra_indent}invalidate {retval}"));
|
definitions.add_definition_line(format_args!("{extra_indent}invalidate {retval}"));
|
||||||
return Ok(retval.to_string());
|
return Ok(retval.to_string());
|
||||||
}
|
}
|
||||||
CanonicalType::DynSimOnly(_) => {
|
CanonicalType::DynSimOnly(_) => Err(FirrtlError::SimOnlyValuesAreNotPermitted.into()),
|
||||||
Err(FirrtlError::SimOnlyValuesAreNotPermitted.into())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn expr_unary<T: Type>(
|
fn expr_unary<T: Type>(
|
||||||
|
@ -1907,6 +1905,8 @@ impl<'a> Exporter<'a> {
|
||||||
class: str::to_string(class),
|
class: str::to_string(class),
|
||||||
additional_fields: (*additional_fields).into(),
|
additional_fields: (*additional_fields).into(),
|
||||||
},
|
},
|
||||||
|
Annotation::Xilinx(XilinxAnnotation::XdcLocation(_))
|
||||||
|
| Annotation::Xilinx(XilinxAnnotation::XdcIOStandard(_)) => return,
|
||||||
};
|
};
|
||||||
self.annotations.push(FirrtlAnnotation {
|
self.annotations.push(FirrtlAnnotation {
|
||||||
data,
|
data,
|
||||||
|
@ -2454,7 +2454,7 @@ impl<T: ?Sized + FileBackendTrait> FileBackendTrait for &'_ mut T {
|
||||||
pub struct FileBackend {
|
pub struct FileBackend {
|
||||||
pub dir_path: PathBuf,
|
pub dir_path: PathBuf,
|
||||||
pub circuit_name: Option<String>,
|
pub circuit_name: Option<String>,
|
||||||
pub top_fir_file_stem: Option<String>,
|
pub top_fir_file_stem: Option<OsString>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FileBackend {
|
impl FileBackend {
|
||||||
|
@ -2503,7 +2503,7 @@ impl FileBackendTrait for FileBackend {
|
||||||
) -> Result<(), Self::Error> {
|
) -> Result<(), Self::Error> {
|
||||||
let top_fir_file_stem = self
|
let top_fir_file_stem = self
|
||||||
.top_fir_file_stem
|
.top_fir_file_stem
|
||||||
.get_or_insert_with(|| circuit_name.clone());
|
.get_or_insert_with(|| circuit_name.clone().into());
|
||||||
self.circuit_name = Some(circuit_name);
|
self.circuit_name = Some(circuit_name);
|
||||||
let mut path = self.dir_path.join(top_fir_file_stem);
|
let mut path = self.dir_path.join(top_fir_file_stem);
|
||||||
if let Some(parent) = path.parent().filter(|v| !v.as_os_str().is_empty()) {
|
if let Some(parent) = path.parent().filter(|v| !v.as_os_str().is_empty()) {
|
||||||
|
@ -2677,21 +2677,12 @@ impl FileBackendTrait for TestBackend {
|
||||||
|
|
||||||
fn export_impl(
|
fn export_impl(
|
||||||
file_backend: &mut dyn WrappedFileBackendTrait,
|
file_backend: &mut dyn WrappedFileBackendTrait,
|
||||||
mut top_module: Interned<Module<Bundle>>,
|
top_module: Interned<Module<Bundle>>,
|
||||||
options: ExportOptions,
|
options: ExportOptions,
|
||||||
) -> Result<(), WrappedError> {
|
) -> Result<(), WrappedError> {
|
||||||
let ExportOptions {
|
let top_module = options
|
||||||
simplify_memories: do_simplify_memories,
|
.prepare_top_module(top_module)
|
||||||
simplify_enums: do_simplify_enums,
|
.map_err(|e| file_backend.simplify_enums_error(e))?;
|
||||||
__private: _,
|
|
||||||
} = options;
|
|
||||||
if let Some(kind) = do_simplify_enums {
|
|
||||||
top_module =
|
|
||||||
simplify_enums(top_module, kind).map_err(|e| file_backend.simplify_enums_error(e))?;
|
|
||||||
}
|
|
||||||
if do_simplify_memories {
|
|
||||||
top_module = simplify_memories(top_module);
|
|
||||||
}
|
|
||||||
let indent_depth = Cell::new(0);
|
let indent_depth = Cell::new(0);
|
||||||
let mut global_ns = Namespace::default();
|
let mut global_ns = Namespace::default();
|
||||||
let circuit_name = global_ns.get(top_module.name_id());
|
let circuit_name = global_ns.get(top_module.name_id());
|
||||||
|
@ -2753,14 +2744,23 @@ impl clap::builder::TypedValueParser for OptionSimplifyEnumsKindValueParser {
|
||||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||||
pub struct ExportOptionsPrivate(());
|
pub struct ExportOptionsPrivate(());
|
||||||
|
|
||||||
#[derive(clap::Parser, Copy, Clone, PartialEq, Eq, Hash)]
|
impl ExportOptionsPrivate {
|
||||||
|
fn private_new() -> Self {
|
||||||
|
Self(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(clap::Parser, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||||
pub struct ExportOptions {
|
pub struct ExportOptions {
|
||||||
#[clap(long = "no-simplify-memories", action = clap::ArgAction::SetFalse)]
|
#[clap(long = "no-simplify-memories", action = clap::ArgAction::SetFalse)]
|
||||||
|
#[serde(default = "ExportOptions::default_simplify_memories")]
|
||||||
pub simplify_memories: bool,
|
pub simplify_memories: bool,
|
||||||
#[clap(long, value_parser = OptionSimplifyEnumsKindValueParser, default_value = "replace-with-bundle-of-uints")]
|
#[clap(long, value_parser = OptionSimplifyEnumsKindValueParser, default_value = "replace-with-bundle-of-uints")]
|
||||||
pub simplify_enums: std::option::Option<SimplifyEnumsKind>,
|
#[serde(default = "ExportOptions::default_simplify_enums")]
|
||||||
|
pub simplify_enums: std::option::Option<SimplifyEnumsKind>, // use std::option::Option instead of Option to avoid clap mis-parsing
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
#[clap(skip = ExportOptionsPrivate(()))]
|
#[clap(skip = ExportOptionsPrivate(()))]
|
||||||
|
#[serde(skip, default = "ExportOptionsPrivate::private_new")]
|
||||||
/// `#[non_exhaustive]` except allowing struct update syntax
|
/// `#[non_exhaustive]` except allowing struct update syntax
|
||||||
pub __private: ExportOptionsPrivate,
|
pub __private: ExportOptionsPrivate,
|
||||||
}
|
}
|
||||||
|
@ -2771,7 +2771,34 @@ impl fmt::Debug for ExportOptions {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl ToArgs for ExportOptions {
|
||||||
|
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
|
||||||
|
let Self {
|
||||||
|
simplify_memories,
|
||||||
|
simplify_enums,
|
||||||
|
__private: ExportOptionsPrivate(()),
|
||||||
|
} = *self;
|
||||||
|
if !simplify_memories {
|
||||||
|
args.write_arg("--no-simplify-memories");
|
||||||
|
}
|
||||||
|
let simplify_enums = simplify_enums.map(|v| {
|
||||||
|
clap::ValueEnum::to_possible_value(&v).expect("there are no skipped variants")
|
||||||
|
});
|
||||||
|
let simplify_enums = match &simplify_enums {
|
||||||
|
None => OptionSimplifyEnumsKindValueParser::NONE_NAME,
|
||||||
|
Some(v) => v.get_name(),
|
||||||
|
};
|
||||||
|
args.write_long_option_eq("simplify-enums", simplify_enums);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl ExportOptions {
|
impl ExportOptions {
|
||||||
|
fn default_simplify_memories() -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
fn default_simplify_enums() -> Option<SimplifyEnumsKind> {
|
||||||
|
Some(SimplifyEnumsKind::ReplaceWithBundleOfUInts)
|
||||||
|
}
|
||||||
fn debug_fmt(
|
fn debug_fmt(
|
||||||
&self,
|
&self,
|
||||||
f: &mut fmt::Formatter<'_>,
|
f: &mut fmt::Formatter<'_>,
|
||||||
|
@ -2823,18 +2850,47 @@ impl ExportOptions {
|
||||||
if f.alternate() { "\n}" } else { " }" }
|
if f.alternate() { "\n}" } else { " }" }
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
fn prepare_top_module_helper(
|
||||||
|
self,
|
||||||
|
mut top_module: Interned<Module<Bundle>>,
|
||||||
|
) -> Result<Interned<Module<Bundle>>, SimplifyEnumsError> {
|
||||||
|
let Self {
|
||||||
|
simplify_memories: do_simplify_memories,
|
||||||
|
simplify_enums: do_simplify_enums,
|
||||||
|
__private: _,
|
||||||
|
} = self;
|
||||||
|
if let Some(kind) = do_simplify_enums {
|
||||||
|
top_module = simplify_enums(top_module, kind)?;
|
||||||
|
}
|
||||||
|
if do_simplify_memories {
|
||||||
|
top_module = simplify_memories(top_module);
|
||||||
|
}
|
||||||
|
Ok(top_module)
|
||||||
|
}
|
||||||
|
pub fn prepare_top_module<T: BundleType>(
|
||||||
|
self,
|
||||||
|
top_module: impl AsRef<Module<T>>,
|
||||||
|
) -> Result<Interned<Module<Bundle>>, SimplifyEnumsError> {
|
||||||
|
self.prepare_top_module_helper(top_module.as_ref().canonical().intern())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for ExportOptions {
|
impl Default for ExportOptions {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self {
|
Self {
|
||||||
simplify_memories: true,
|
simplify_memories: Self::default_simplify_memories(),
|
||||||
simplify_enums: Some(SimplifyEnumsKind::ReplaceWithBundleOfUInts),
|
simplify_enums: Self::default_simplify_enums(),
|
||||||
__private: ExportOptionsPrivate(()),
|
__private: ExportOptionsPrivate(()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_circuit_name(top_module_name_id: NameId) -> Interned<str> {
|
||||||
|
let mut global_ns = Namespace::default();
|
||||||
|
let circuit_name = global_ns.get(top_module_name_id);
|
||||||
|
circuit_name.0
|
||||||
|
}
|
||||||
|
|
||||||
pub fn export<T: BundleType, B: FileBackendTrait>(
|
pub fn export<T: BundleType, B: FileBackendTrait>(
|
||||||
file_backend: B,
|
file_backend: B,
|
||||||
top_module: &Module<T>,
|
top_module: &Module<T>,
|
||||||
|
@ -2846,6 +2902,497 @@ pub fn export<T: BundleType, B: FileBackendTrait>(
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub enum ScalarizedModuleABIError {
|
||||||
|
SimOnlyValuesAreNotPermitted,
|
||||||
|
SimplifyEnumsError(SimplifyEnumsError),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for ScalarizedModuleABIError {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
ScalarizedModuleABIError::SimOnlyValuesAreNotPermitted => {
|
||||||
|
FirrtlError::SimOnlyValuesAreNotPermitted.fmt(f)
|
||||||
|
}
|
||||||
|
ScalarizedModuleABIError::SimplifyEnumsError(e) => e.fmt(f),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::error::Error for ScalarizedModuleABIError {}
|
||||||
|
|
||||||
|
impl From<SimplifyEnumsError> for ScalarizedModuleABIError {
|
||||||
|
fn from(value: SimplifyEnumsError) -> Self {
|
||||||
|
Self::SimplifyEnumsError(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||||
|
pub enum ScalarizedModuleABIPortItem {
|
||||||
|
Group(ScalarizedModuleABIPortGroup),
|
||||||
|
Port(ScalarizedModuleABIPort),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ScalarizedModuleABIPortItem {
|
||||||
|
pub fn module_io(self) -> ModuleIO<CanonicalType> {
|
||||||
|
*self
|
||||||
|
.target()
|
||||||
|
.base()
|
||||||
|
.module_io()
|
||||||
|
.expect("known to be ModuleIO")
|
||||||
|
}
|
||||||
|
pub fn target(self) -> Interned<Target> {
|
||||||
|
match self {
|
||||||
|
Self::Group(v) => v.target(),
|
||||||
|
Self::Port(v) => v.target(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn for_each_port_and_annotations_helper<
|
||||||
|
F: for<'a> FnMut(
|
||||||
|
&'a ScalarizedModuleABIPort,
|
||||||
|
ScalarizedModuleABIAnnotations<'a>,
|
||||||
|
) -> ControlFlow<B>,
|
||||||
|
B,
|
||||||
|
>(
|
||||||
|
&self,
|
||||||
|
parent: Option<&ScalarizedModuleABIAnnotations<'_>>,
|
||||||
|
f: &mut F,
|
||||||
|
) -> ControlFlow<B> {
|
||||||
|
match self {
|
||||||
|
Self::Group(v) => v.for_each_port_and_annotations_helper(parent, f),
|
||||||
|
Self::Port(port) => f(
|
||||||
|
port,
|
||||||
|
ScalarizedModuleABIAnnotations::new(parent, port.annotations.iter()),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn for_each_port_and_annotations<
|
||||||
|
F: for<'a> FnMut(
|
||||||
|
&'a ScalarizedModuleABIPort,
|
||||||
|
ScalarizedModuleABIAnnotations<'a>,
|
||||||
|
) -> ControlFlow<B>,
|
||||||
|
B,
|
||||||
|
>(
|
||||||
|
self,
|
||||||
|
mut f: F,
|
||||||
|
) -> ControlFlow<B> {
|
||||||
|
self.for_each_port_and_annotations_helper(None, &mut f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Debug for ScalarizedModuleABIPortItem {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
Self::Group(v) => v.fmt(f),
|
||||||
|
Self::Port(v) => v.fmt(f),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct ScalarizedModuleABIAnnotations<'a> {
|
||||||
|
parent: Option<&'a ScalarizedModuleABIAnnotations<'a>>,
|
||||||
|
parent_len: usize,
|
||||||
|
annotations: std::slice::Iter<'a, TargetedAnnotation>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> ScalarizedModuleABIAnnotations<'a> {
|
||||||
|
fn new(
|
||||||
|
parent: Option<&'a ScalarizedModuleABIAnnotations<'a>>,
|
||||||
|
annotations: std::slice::Iter<'a, TargetedAnnotation>,
|
||||||
|
) -> Self {
|
||||||
|
Self {
|
||||||
|
parent,
|
||||||
|
parent_len: parent.map_or(0, |parent| parent.len()),
|
||||||
|
annotations,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Default for ScalarizedModuleABIAnnotations<'a> {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
parent: None,
|
||||||
|
parent_len: 0,
|
||||||
|
annotations: Default::default(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Iterator for ScalarizedModuleABIAnnotations<'a> {
|
||||||
|
type Item = &'a TargetedAnnotation;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
loop {
|
||||||
|
if let retval @ Some(_) = self.annotations.next() {
|
||||||
|
break retval;
|
||||||
|
}
|
||||||
|
*self = self.parent?.clone();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||||
|
let len = self.len();
|
||||||
|
(len, Some(len))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fold<B, F>(mut self, mut init: B, mut f: F) -> B
|
||||||
|
where
|
||||||
|
F: FnMut(B, Self::Item) -> B,
|
||||||
|
{
|
||||||
|
loop {
|
||||||
|
let Self {
|
||||||
|
parent,
|
||||||
|
parent_len: _,
|
||||||
|
annotations,
|
||||||
|
} = self;
|
||||||
|
init = annotations.fold(init, &mut f);
|
||||||
|
let Some(next) = parent else {
|
||||||
|
break;
|
||||||
|
};
|
||||||
|
self = next.clone();
|
||||||
|
}
|
||||||
|
init
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::iter::FusedIterator for ScalarizedModuleABIAnnotations<'_> {}
|
||||||
|
|
||||||
|
impl ExactSizeIterator for ScalarizedModuleABIAnnotations<'_> {
|
||||||
|
fn len(&self) -> usize {
|
||||||
|
self.parent_len + self.annotations.len()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||||
|
pub struct ScalarizedModuleABIPortGroup {
|
||||||
|
target: Interned<Target>,
|
||||||
|
common_annotations: Interned<[TargetedAnnotation]>,
|
||||||
|
children: Interned<[ScalarizedModuleABIPortItem]>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ScalarizedModuleABIPortGroup {
|
||||||
|
pub fn module_io(self) -> ModuleIO<CanonicalType> {
|
||||||
|
*self
|
||||||
|
.target
|
||||||
|
.base()
|
||||||
|
.module_io()
|
||||||
|
.expect("known to be ModuleIO")
|
||||||
|
}
|
||||||
|
pub fn target(self) -> Interned<Target> {
|
||||||
|
self.target
|
||||||
|
}
|
||||||
|
pub fn common_annotations(self) -> Interned<[TargetedAnnotation]> {
|
||||||
|
self.common_annotations
|
||||||
|
}
|
||||||
|
pub fn children(self) -> Interned<[ScalarizedModuleABIPortItem]> {
|
||||||
|
self.children
|
||||||
|
}
|
||||||
|
fn for_each_port_and_annotations_helper<
|
||||||
|
F: for<'a> FnMut(
|
||||||
|
&'a ScalarizedModuleABIPort,
|
||||||
|
ScalarizedModuleABIAnnotations<'a>,
|
||||||
|
) -> ControlFlow<B>,
|
||||||
|
B,
|
||||||
|
>(
|
||||||
|
&self,
|
||||||
|
parent: Option<&ScalarizedModuleABIAnnotations<'_>>,
|
||||||
|
f: &mut F,
|
||||||
|
) -> ControlFlow<B> {
|
||||||
|
let parent = ScalarizedModuleABIAnnotations::new(parent, self.common_annotations.iter());
|
||||||
|
for item in &self.children {
|
||||||
|
item.for_each_port_and_annotations_helper(Some(&parent), f)?;
|
||||||
|
}
|
||||||
|
ControlFlow::Continue(())
|
||||||
|
}
|
||||||
|
pub fn for_each_port_and_annotations<
|
||||||
|
F: for<'a> FnMut(
|
||||||
|
&'a ScalarizedModuleABIPort,
|
||||||
|
ScalarizedModuleABIAnnotations<'a>,
|
||||||
|
) -> ControlFlow<B>,
|
||||||
|
B,
|
||||||
|
>(
|
||||||
|
self,
|
||||||
|
mut f: F,
|
||||||
|
) -> ControlFlow<B> {
|
||||||
|
self.for_each_port_and_annotations_helper(None, &mut f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||||
|
pub struct ScalarizedModuleABIPort {
|
||||||
|
target: Interned<Target>,
|
||||||
|
annotations: Interned<[TargetedAnnotation]>,
|
||||||
|
scalarized_name: Interned<str>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ScalarizedModuleABIPort {
|
||||||
|
pub fn module_io(self) -> ModuleIO<CanonicalType> {
|
||||||
|
*self
|
||||||
|
.target
|
||||||
|
.base()
|
||||||
|
.module_io()
|
||||||
|
.expect("known to be ModuleIO")
|
||||||
|
}
|
||||||
|
pub fn target(self) -> Interned<Target> {
|
||||||
|
self.target
|
||||||
|
}
|
||||||
|
pub fn annotations(self) -> Interned<[TargetedAnnotation]> {
|
||||||
|
self.annotations
|
||||||
|
}
|
||||||
|
pub fn scalarized_name(self) -> Interned<str> {
|
||||||
|
self.scalarized_name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
enum ScalarizeTreeNodeBody {
|
||||||
|
Leaf {
|
||||||
|
scalarized_name: Interned<str>,
|
||||||
|
},
|
||||||
|
Bundle {
|
||||||
|
ty: Bundle,
|
||||||
|
fields: Vec<ScalarizeTreeNode>,
|
||||||
|
},
|
||||||
|
Array {
|
||||||
|
elements: Vec<ScalarizeTreeNode>,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
struct ScalarizeTreeNode {
|
||||||
|
target: Interned<Target>,
|
||||||
|
annotations: Vec<TargetedAnnotation>,
|
||||||
|
body: ScalarizeTreeNodeBody,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ScalarizeTreeNode {
|
||||||
|
#[track_caller]
|
||||||
|
fn find_target(&mut self, annotation_target: Interned<Target>) -> &mut Self {
|
||||||
|
match *annotation_target {
|
||||||
|
Target::Base(_) => {
|
||||||
|
assert_eq!(
|
||||||
|
annotation_target, self.target,
|
||||||
|
"annotation not on correct ModuleIO",
|
||||||
|
);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
Target::Child(target_child) => {
|
||||||
|
let parent = self.find_target(target_child.parent());
|
||||||
|
match *target_child.path_element() {
|
||||||
|
TargetPathElement::BundleField(TargetPathBundleField { name }) => {
|
||||||
|
match parent.body {
|
||||||
|
ScalarizeTreeNodeBody::Leaf { .. } => parent,
|
||||||
|
ScalarizeTreeNodeBody::Bundle { ty, ref mut fields } => {
|
||||||
|
&mut fields[ty.name_indexes()[&name]]
|
||||||
|
}
|
||||||
|
ScalarizeTreeNodeBody::Array { .. } => {
|
||||||
|
unreachable!("types are known to match")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
TargetPathElement::ArrayElement(TargetPathArrayElement { index }) => {
|
||||||
|
match parent.body {
|
||||||
|
ScalarizeTreeNodeBody::Leaf { .. } => parent,
|
||||||
|
ScalarizeTreeNodeBody::Bundle { .. } => {
|
||||||
|
unreachable!("types are known to match")
|
||||||
|
}
|
||||||
|
ScalarizeTreeNodeBody::Array { ref mut elements } => {
|
||||||
|
&mut elements[index]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
TargetPathElement::DynArrayElement(_) => {
|
||||||
|
unreachable!("annotations are only on static targets");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fn into_scalarized_item(self) -> ScalarizedModuleABIPortItem {
|
||||||
|
let Self {
|
||||||
|
target,
|
||||||
|
annotations,
|
||||||
|
body,
|
||||||
|
} = self;
|
||||||
|
match body {
|
||||||
|
ScalarizeTreeNodeBody::Leaf { scalarized_name } => {
|
||||||
|
ScalarizedModuleABIPortItem::Port(ScalarizedModuleABIPort {
|
||||||
|
target,
|
||||||
|
annotations: Intern::intern_owned(annotations),
|
||||||
|
scalarized_name,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
ScalarizeTreeNodeBody::Bundle { fields: items, .. }
|
||||||
|
| ScalarizeTreeNodeBody::Array { elements: items } => {
|
||||||
|
ScalarizedModuleABIPortItem::Group(ScalarizedModuleABIPortGroup {
|
||||||
|
target,
|
||||||
|
common_annotations: Intern::intern_owned(annotations),
|
||||||
|
children: Interned::from_iter(
|
||||||
|
items.into_iter().map(Self::into_scalarized_item),
|
||||||
|
),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
struct ScalarizeTreeBuilder {
|
||||||
|
scalarized_ns: Namespace,
|
||||||
|
type_state: TypeState,
|
||||||
|
name: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ScalarizeTreeBuilder {
|
||||||
|
#[track_caller]
|
||||||
|
fn build_bundle(
|
||||||
|
&mut self,
|
||||||
|
target: Interned<Target>,
|
||||||
|
ty: Bundle,
|
||||||
|
) -> Result<ScalarizeTreeNode, ScalarizedModuleABIError> {
|
||||||
|
let mut fields = Vec::with_capacity(ty.fields().len());
|
||||||
|
let original_len = self.name.len();
|
||||||
|
for BundleField { name, .. } in ty.fields() {
|
||||||
|
let firrtl_name = self
|
||||||
|
.type_state
|
||||||
|
.get_bundle_field(ty, name)
|
||||||
|
.map_err(|e| match e {
|
||||||
|
FirrtlError::SimOnlyValuesAreNotPermitted => {
|
||||||
|
ScalarizedModuleABIError::SimOnlyValuesAreNotPermitted
|
||||||
|
}
|
||||||
|
})?
|
||||||
|
.0;
|
||||||
|
write!(self.name, "_{firrtl_name}").expect("writing to String is infallible");
|
||||||
|
fields.push(
|
||||||
|
self.build(
|
||||||
|
target
|
||||||
|
.join(TargetPathElement::intern_sized(
|
||||||
|
TargetPathBundleField { name }.into(),
|
||||||
|
))
|
||||||
|
.intern_sized(),
|
||||||
|
)?,
|
||||||
|
);
|
||||||
|
self.name.truncate(original_len);
|
||||||
|
}
|
||||||
|
Ok(ScalarizeTreeNode {
|
||||||
|
target,
|
||||||
|
annotations: Vec::new(),
|
||||||
|
body: ScalarizeTreeNodeBody::Bundle { ty, fields },
|
||||||
|
})
|
||||||
|
}
|
||||||
|
#[track_caller]
|
||||||
|
fn build(
|
||||||
|
&mut self,
|
||||||
|
target: Interned<Target>,
|
||||||
|
) -> Result<ScalarizeTreeNode, ScalarizedModuleABIError> {
|
||||||
|
Ok(match target.canonical_ty() {
|
||||||
|
CanonicalType::UInt(_)
|
||||||
|
| CanonicalType::SInt(_)
|
||||||
|
| CanonicalType::Bool(_)
|
||||||
|
| CanonicalType::Enum(_)
|
||||||
|
| CanonicalType::AsyncReset(_)
|
||||||
|
| CanonicalType::SyncReset(_)
|
||||||
|
| CanonicalType::Reset(_)
|
||||||
|
| CanonicalType::Clock(_) => {
|
||||||
|
let scalarized_name = self.scalarized_ns.get(str::intern(&self.name)).0;
|
||||||
|
ScalarizeTreeNode {
|
||||||
|
target,
|
||||||
|
annotations: Vec::new(),
|
||||||
|
body: ScalarizeTreeNodeBody::Leaf { scalarized_name },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
CanonicalType::Array(ty) => {
|
||||||
|
let mut elements = Vec::with_capacity(ty.len());
|
||||||
|
let original_len = self.name.len();
|
||||||
|
for index in 0..ty.len() {
|
||||||
|
write!(self.name, "_{index}").expect("writing to String is infallible");
|
||||||
|
elements.push(
|
||||||
|
self.build(
|
||||||
|
target
|
||||||
|
.join(TargetPathElement::intern_sized(
|
||||||
|
TargetPathArrayElement { index }.into(),
|
||||||
|
))
|
||||||
|
.intern_sized(),
|
||||||
|
)?,
|
||||||
|
);
|
||||||
|
self.name.truncate(original_len);
|
||||||
|
}
|
||||||
|
ScalarizeTreeNode {
|
||||||
|
target,
|
||||||
|
annotations: Vec::new(),
|
||||||
|
body: ScalarizeTreeNodeBody::Array { elements },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
CanonicalType::Bundle(ty) => self.build_bundle(target, ty)?,
|
||||||
|
CanonicalType::PhantomConst(_) => {
|
||||||
|
self.build_bundle(target, Bundle::new(Interned::default()))?
|
||||||
|
}
|
||||||
|
CanonicalType::DynSimOnly(_) => {
|
||||||
|
return Err(ScalarizedModuleABIError::SimOnlyValuesAreNotPermitted);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||||
|
pub struct ScalarizedModuleABI {
|
||||||
|
module_io: Interned<[AnnotatedModuleIO]>,
|
||||||
|
items: Interned<[ScalarizedModuleABIPortItem]>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ScalarizedModuleABI {
|
||||||
|
#[track_caller]
|
||||||
|
fn from_io(module_io: Interned<[AnnotatedModuleIO]>) -> Result<Self, ScalarizedModuleABIError> {
|
||||||
|
let mut firrtl_ns = Namespace::default();
|
||||||
|
let mut tree_builder = ScalarizeTreeBuilder::default();
|
||||||
|
let mut items = Vec::new();
|
||||||
|
for module_io in module_io {
|
||||||
|
let firrtl_name = firrtl_ns.get(module_io.module_io.name_id());
|
||||||
|
tree_builder.name.clear();
|
||||||
|
tree_builder.name.push_str(&firrtl_name.0);
|
||||||
|
let mut tree = tree_builder.build(Target::from(module_io.module_io).intern_sized())?;
|
||||||
|
for annotation in module_io.annotations {
|
||||||
|
tree.find_target(annotation.target())
|
||||||
|
.annotations
|
||||||
|
.push(annotation);
|
||||||
|
}
|
||||||
|
items.push(tree.into_scalarized_item());
|
||||||
|
}
|
||||||
|
Ok(Self {
|
||||||
|
module_io,
|
||||||
|
items: Intern::intern_owned(items),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
#[track_caller]
|
||||||
|
pub fn new<T: BundleType>(
|
||||||
|
module: impl AsRef<Module<T>>,
|
||||||
|
options: ExportOptions,
|
||||||
|
) -> Result<Self, ScalarizedModuleABIError> {
|
||||||
|
Self::from_io(options.prepare_top_module(module)?.module_io())
|
||||||
|
}
|
||||||
|
pub fn module_io(&self) -> Interned<[AnnotatedModuleIO]> {
|
||||||
|
self.module_io
|
||||||
|
}
|
||||||
|
pub fn items(&self) -> Interned<[ScalarizedModuleABIPortItem]> {
|
||||||
|
self.items
|
||||||
|
}
|
||||||
|
pub fn for_each_port_and_annotations<
|
||||||
|
F: for<'a> FnMut(
|
||||||
|
&'a ScalarizedModuleABIPort,
|
||||||
|
ScalarizedModuleABIAnnotations<'a>,
|
||||||
|
) -> ControlFlow<B>,
|
||||||
|
B,
|
||||||
|
>(
|
||||||
|
self,
|
||||||
|
mut f: F,
|
||||||
|
) -> ControlFlow<B> {
|
||||||
|
for item in &self.items {
|
||||||
|
item.for_each_port_and_annotations_helper(None, &mut f)?;
|
||||||
|
}
|
||||||
|
ControlFlow::Continue(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
#[track_caller]
|
#[track_caller]
|
||||||
pub fn assert_export_firrtl_impl<T: BundleType>(top_module: &Module<T>, expected: TestBackend) {
|
pub fn assert_export_firrtl_impl<T: BundleType>(top_module: &Module<T>, expected: TestBackend) {
|
||||||
|
|
|
@ -8,7 +8,7 @@ use crate::{
|
||||||
ops::{ExprCastTo, ExprPartialEq, ExprPartialOrd},
|
ops::{ExprCastTo, ExprPartialEq, ExprPartialOrd},
|
||||||
},
|
},
|
||||||
int::{Bool, DynSize, KnownSize, Size, SizeType, UInt, UIntType},
|
int::{Bool, DynSize, KnownSize, Size, SizeType, UInt, UIntType},
|
||||||
intern::{Intern, Interned},
|
intern::{Intern, InternSlice, Interned},
|
||||||
phantom_const::PhantomConst,
|
phantom_const::PhantomConst,
|
||||||
sim::value::{SimValue, SimValuePartialEq, ToSimValueWithType},
|
sim::value::{SimValue, SimValuePartialEq, ToSimValueWithType},
|
||||||
source_location::SourceLocation,
|
source_location::SourceLocation,
|
||||||
|
@ -112,8 +112,8 @@ impl BundleType for UIntInRangeMaskType {
|
||||||
flipped: false,
|
flipped: false,
|
||||||
ty: range.canonical(),
|
ty: range.canonical(),
|
||||||
},
|
},
|
||||||
][..]
|
]
|
||||||
.intern()
|
.intern_slice()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -409,8 +409,8 @@ macro_rules! define_uint_in_range_type {
|
||||||
flipped: false,
|
flipped: false,
|
||||||
ty: range.canonical(),
|
ty: range.canonical(),
|
||||||
},
|
},
|
||||||
][..]
|
]
|
||||||
.intern()
|
.intern_slice()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -9,11 +9,13 @@ use std::{
|
||||||
any::{Any, TypeId},
|
any::{Any, TypeId},
|
||||||
borrow::{Borrow, Cow},
|
borrow::{Borrow, Cow},
|
||||||
cmp::Ordering,
|
cmp::Ordering,
|
||||||
|
ffi::{OsStr, OsString},
|
||||||
fmt,
|
fmt,
|
||||||
hash::{BuildHasher, Hash, Hasher},
|
hash::{BuildHasher, Hash, Hasher},
|
||||||
iter::FusedIterator,
|
iter::FusedIterator,
|
||||||
marker::PhantomData,
|
marker::PhantomData,
|
||||||
ops::Deref,
|
ops::Deref,
|
||||||
|
path::{Path, PathBuf},
|
||||||
sync::{Mutex, RwLock},
|
sync::{Mutex, RwLock},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -287,15 +289,266 @@ impl InternedCompare for BitSlice {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl InternedCompare for str {
|
/// Safety: `as_bytes` and `from_bytes_unchecked` must return the same pointer as the input.
|
||||||
type InternedCompareKey = PtrEqWithMetadata<Self>;
|
/// all values returned by `as_bytes` must be valid to pass to `from_bytes_unchecked`.
|
||||||
|
/// `into_bytes` must return the exact same thing as `as_bytes`.
|
||||||
|
/// `Interned<Self>` must contain the exact same references as `Interned<[u8]>`,
|
||||||
|
/// so they can be safely interconverted without needing re-interning.
|
||||||
|
unsafe trait InternStrLike: ToOwned {
|
||||||
|
fn as_bytes(this: &Self) -> &[u8];
|
||||||
|
fn into_bytes(this: Self::Owned) -> Vec<u8>;
|
||||||
|
/// Safety: `bytes` must be a valid sequence of bytes for this type. All UTF-8 sequences are valid.
|
||||||
|
unsafe fn from_bytes_unchecked(bytes: &[u8]) -> &Self;
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! impl_intern_str_like {
|
||||||
|
($ty:ty, owned = $Owned:ty) => {
|
||||||
|
impl InternedCompare for $ty {
|
||||||
|
type InternedCompareKey = PtrEqWithMetadata<[u8]>;
|
||||||
fn interned_compare_key_ref(this: &Self) -> Self::InternedCompareKey {
|
fn interned_compare_key_ref(this: &Self) -> Self::InternedCompareKey {
|
||||||
PtrEqWithMetadata(this)
|
PtrEqWithMetadata(InternStrLike::as_bytes(this))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl Intern for $ty {
|
||||||
|
fn intern(&self) -> Interned<Self> {
|
||||||
|
Self::intern_cow(Cow::Borrowed(self))
|
||||||
|
}
|
||||||
|
fn intern_cow(this: Cow<'_, Self>) -> Interned<Self> {
|
||||||
|
Interned::cast_unchecked(
|
||||||
|
<[u8]>::intern_cow(match this {
|
||||||
|
Cow::Borrowed(v) => Cow::Borrowed(<Self as InternStrLike>::as_bytes(v)),
|
||||||
|
Cow::Owned(v) => {
|
||||||
|
// verify $Owned is correct
|
||||||
|
let v: $Owned = v;
|
||||||
|
Cow::Owned(<Self as InternStrLike>::into_bytes(v))
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
// Safety: guaranteed safe because we got the bytes from `as_bytes`/`into_bytes`
|
||||||
|
|v| unsafe { <Self as InternStrLike>::from_bytes_unchecked(v) },
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl Default for Interned<$ty> {
|
||||||
|
fn default() -> Self {
|
||||||
|
// Safety: safe because the empty sequence is valid UTF-8
|
||||||
|
unsafe { <$ty as InternStrLike>::from_bytes_unchecked(&[]) }.intern()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl<'de> Deserialize<'de> for Interned<$ty> {
|
||||||
|
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||||
|
where
|
||||||
|
D: serde::Deserializer<'de>,
|
||||||
|
{
|
||||||
|
Cow::<'de, $ty>::deserialize(deserializer).map(Intern::intern_cow)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl From<$Owned> for Interned<$ty> {
|
||||||
|
fn from(v: $Owned) -> Self {
|
||||||
|
v.intern_deref()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl From<Interned<$ty>> for $Owned {
|
||||||
|
fn from(v: Interned<$ty>) -> Self {
|
||||||
|
Interned::into_inner(v).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl From<Interned<$ty>> for Box<$ty> {
|
||||||
|
fn from(v: Interned<$ty>) -> Self {
|
||||||
|
Interned::into_inner(v).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Safety: satisfies `InternStrLike`'s requirements where the valid sequences for `from_bytes_unchecked` matches `str`
|
||||||
|
unsafe impl InternStrLike for str {
|
||||||
|
fn as_bytes(this: &Self) -> &[u8] {
|
||||||
|
this.as_bytes()
|
||||||
|
}
|
||||||
|
fn into_bytes(this: Self::Owned) -> Vec<u8> {
|
||||||
|
this.into_bytes()
|
||||||
|
}
|
||||||
|
unsafe fn from_bytes_unchecked(bytes: &[u8]) -> &Self {
|
||||||
|
// Safety: `bytes` is guaranteed UTF-8 by the caller
|
||||||
|
unsafe { str::from_utf8_unchecked(bytes) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl_intern_str_like!(str, owned = String);
|
||||||
|
|
||||||
|
// Safety: satisfies `InternStrLike`'s requirements where the valid sequences for `from_bytes_unchecked` matches `OsStr`
|
||||||
|
unsafe impl InternStrLike for OsStr {
|
||||||
|
fn as_bytes(this: &Self) -> &[u8] {
|
||||||
|
this.as_encoded_bytes()
|
||||||
|
}
|
||||||
|
fn into_bytes(this: Self::Owned) -> Vec<u8> {
|
||||||
|
this.into_encoded_bytes()
|
||||||
|
}
|
||||||
|
unsafe fn from_bytes_unchecked(bytes: &[u8]) -> &Self {
|
||||||
|
// Safety: `bytes` is guaranteed valid for `OsStr` by the caller
|
||||||
|
unsafe { OsStr::from_encoded_bytes_unchecked(bytes) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl_intern_str_like!(OsStr, owned = OsString);
|
||||||
|
|
||||||
|
// Safety: satisfies `InternStrLike`'s requirements where the valid sequences for `from_bytes_unchecked` matches `OsStr`
|
||||||
|
unsafe impl InternStrLike for Path {
|
||||||
|
fn as_bytes(this: &Self) -> &[u8] {
|
||||||
|
this.as_os_str().as_encoded_bytes()
|
||||||
|
}
|
||||||
|
fn into_bytes(this: Self::Owned) -> Vec<u8> {
|
||||||
|
this.into_os_string().into_encoded_bytes()
|
||||||
|
}
|
||||||
|
unsafe fn from_bytes_unchecked(bytes: &[u8]) -> &Self {
|
||||||
|
// Safety: `bytes` is guaranteed valid for `OsStr` by the caller
|
||||||
|
unsafe { Path::new(OsStr::from_encoded_bytes_unchecked(bytes)) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl_intern_str_like!(Path, owned = PathBuf);
|
||||||
|
|
||||||
|
impl Interned<str> {
|
||||||
|
pub fn from_utf8(v: Interned<[u8]>) -> Result<Self, std::str::Utf8Error> {
|
||||||
|
Interned::try_cast_unchecked(v, str::from_utf8)
|
||||||
|
}
|
||||||
|
pub fn as_interned_bytes(self) -> Interned<[u8]> {
|
||||||
|
Interned::cast_unchecked(self, str::as_bytes)
|
||||||
|
}
|
||||||
|
pub fn as_interned_os_str(self) -> Interned<OsStr> {
|
||||||
|
Interned::cast_unchecked(self, AsRef::as_ref)
|
||||||
|
}
|
||||||
|
pub fn as_interned_path(self) -> Interned<Path> {
|
||||||
|
Interned::cast_unchecked(self, AsRef::as_ref)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Interned<str>> for Interned<OsStr> {
|
||||||
|
fn from(value: Interned<str>) -> Self {
|
||||||
|
value.as_interned_os_str()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Interned<str>> for Interned<Path> {
|
||||||
|
fn from(value: Interned<str>) -> Self {
|
||||||
|
value.as_interned_path()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Interned<OsStr> {
|
||||||
|
pub fn as_interned_encoded_bytes(self) -> Interned<[u8]> {
|
||||||
|
Interned::cast_unchecked(self, OsStr::as_encoded_bytes)
|
||||||
|
}
|
||||||
|
pub fn to_interned_str(self) -> Option<Interned<str>> {
|
||||||
|
Interned::try_cast_unchecked(self, |v| v.to_str().ok_or(())).ok()
|
||||||
|
}
|
||||||
|
pub fn display(self) -> std::ffi::os_str::Display<'static> {
|
||||||
|
Self::into_inner(self).display()
|
||||||
|
}
|
||||||
|
pub fn as_interned_path(self) -> Interned<Path> {
|
||||||
|
Interned::cast_unchecked(self, AsRef::as_ref)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Interned<OsStr>> for Interned<Path> {
|
||||||
|
fn from(value: Interned<OsStr>) -> Self {
|
||||||
|
value.as_interned_path()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Interned<Path> {
|
||||||
|
pub fn as_interned_os_str(self) -> Interned<OsStr> {
|
||||||
|
Interned::cast_unchecked(self, AsRef::as_ref)
|
||||||
|
}
|
||||||
|
pub fn to_interned_str(self) -> Option<Interned<str>> {
|
||||||
|
Interned::try_cast_unchecked(self, |v| v.to_str().ok_or(())).ok()
|
||||||
|
}
|
||||||
|
pub fn display(self) -> std::path::Display<'static> {
|
||||||
|
Self::into_inner(self).display()
|
||||||
|
}
|
||||||
|
pub fn interned_file_name(self) -> Option<Interned<OsStr>> {
|
||||||
|
Some(self.file_name()?.intern())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Interned<Path>> for Interned<OsStr> {
|
||||||
|
fn from(value: Interned<Path>) -> Self {
|
||||||
|
value.as_interned_os_str()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait InternSlice: Sized {
|
||||||
|
type Element: 'static + Send + Sync + Clone + Hash + Eq;
|
||||||
|
fn intern_slice(self) -> Interned<[Self::Element]>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: 'static + Send + Sync + Clone + Hash + Eq> InternSlice for Box<[T]> {
|
||||||
|
type Element = T;
|
||||||
|
fn intern_slice(self) -> Interned<[Self::Element]> {
|
||||||
|
self.into_vec().intern_slice()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: 'static + Send + Sync + Clone + Hash + Eq> InternSlice for Vec<T> {
|
||||||
|
type Element = T;
|
||||||
|
fn intern_slice(self) -> Interned<[Self::Element]> {
|
||||||
|
self.intern_deref()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: 'static + Send + Sync + Clone + Hash + Eq> InternSlice for &'_ [T] {
|
||||||
|
type Element = T;
|
||||||
|
fn intern_slice(self) -> Interned<[Self::Element]> {
|
||||||
|
self.intern()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: 'static + Send + Sync + Clone + Hash + Eq> InternSlice for &'_ mut [T] {
|
||||||
|
type Element = T;
|
||||||
|
fn intern_slice(self) -> Interned<[Self::Element]> {
|
||||||
|
self.intern()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: 'static + Send + Sync + Clone + Hash + Eq, const N: usize> InternSlice for [T; N] {
|
||||||
|
type Element = T;
|
||||||
|
fn intern_slice(self) -> Interned<[Self::Element]> {
|
||||||
|
(&self).intern_slice()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: 'static + Send + Sync + Clone + Hash + Eq, const N: usize> InternSlice for Box<[T; N]> {
|
||||||
|
type Element = T;
|
||||||
|
fn intern_slice(self) -> Interned<[Self::Element]> {
|
||||||
|
let this: Box<[T]> = self;
|
||||||
|
this.intern_slice()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: 'static + Send + Sync + Clone + Hash + Eq, const N: usize> InternSlice for &'_ [T; N] {
|
||||||
|
type Element = T;
|
||||||
|
fn intern_slice(self) -> Interned<[Self::Element]> {
|
||||||
|
let this: &[T] = self;
|
||||||
|
this.intern()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: 'static + Send + Sync + Clone + Hash + Eq, const N: usize> InternSlice for &'_ mut [T; N] {
|
||||||
|
type Element = T;
|
||||||
|
fn intern_slice(self) -> Interned<[Self::Element]> {
|
||||||
|
let this: &[T] = self;
|
||||||
|
this.intern()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait Intern: Any + Send + Sync {
|
pub trait Intern: Any + Send + Sync {
|
||||||
fn intern(&self) -> Interned<Self>;
|
fn intern(&self) -> Interned<Self>;
|
||||||
|
fn intern_deref(self) -> Interned<Self::Target>
|
||||||
|
where
|
||||||
|
Self: Sized + Deref<Target: Intern + ToOwned<Owned = Self>>,
|
||||||
|
{
|
||||||
|
Self::Target::intern_owned(self)
|
||||||
|
}
|
||||||
fn intern_sized(self) -> Interned<Self>
|
fn intern_sized(self) -> Interned<Self>
|
||||||
where
|
where
|
||||||
Self: Clone,
|
Self: Clone,
|
||||||
|
@ -316,6 +569,30 @@ pub trait Intern: Any + Send + Sync {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<T: ?Sized + Intern + ToOwned> From<Cow<'_, T>> for Interned<T> {
|
||||||
|
fn from(value: Cow<'_, T>) -> Self {
|
||||||
|
Intern::intern_cow(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: ?Sized + Intern> From<&'_ T> for Interned<T> {
|
||||||
|
fn from(value: &'_ T) -> Self {
|
||||||
|
Intern::intern(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: Intern + Clone> From<T> for Interned<T> {
|
||||||
|
fn from(value: T) -> Self {
|
||||||
|
Intern::intern_sized(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: ?Sized + 'static + Send + Sync + ToOwned> From<Interned<T>> for Cow<'_, T> {
|
||||||
|
fn from(value: Interned<T>) -> Self {
|
||||||
|
Cow::Borrowed(Interned::into_inner(value))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
struct InternerState<T: ?Sized + 'static + Send + Sync> {
|
struct InternerState<T: ?Sized + 'static + Send + Sync> {
|
||||||
table: HashTable<&'static T>,
|
table: HashTable<&'static T>,
|
||||||
hasher: DefaultBuildHasher,
|
hasher: DefaultBuildHasher,
|
||||||
|
@ -381,12 +658,6 @@ impl Interner<BitSlice> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Interner<str> {
|
|
||||||
fn intern_str(&self, value: Cow<'_, str>) -> Interned<str> {
|
|
||||||
self.intern(|value| value.into_owned().leak(), value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct Interned<T: ?Sized + 'static + Send + Sync> {
|
pub struct Interned<T: ?Sized + 'static + Send + Sync> {
|
||||||
inner: &'static T,
|
inner: &'static T,
|
||||||
}
|
}
|
||||||
|
@ -416,6 +687,12 @@ forward_fmt_trait!(Pointer);
|
||||||
forward_fmt_trait!(UpperExp);
|
forward_fmt_trait!(UpperExp);
|
||||||
forward_fmt_trait!(UpperHex);
|
forward_fmt_trait!(UpperHex);
|
||||||
|
|
||||||
|
impl<T: ?Sized + 'static + Send + Sync + AsRef<U>, U: ?Sized> AsRef<U> for Interned<T> {
|
||||||
|
fn as_ref(&self) -> &U {
|
||||||
|
T::as_ref(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct InternedSliceIter<T: Clone + 'static + Send + Sync> {
|
pub struct InternedSliceIter<T: Clone + 'static + Send + Sync> {
|
||||||
slice: Interned<[T]>,
|
slice: Interned<[T]>,
|
||||||
|
@ -485,6 +762,57 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<I> FromIterator<I> for Interned<str>
|
||||||
|
where
|
||||||
|
String: FromIterator<I>,
|
||||||
|
{
|
||||||
|
fn from_iter<T: IntoIterator<Item = I>>(iter: T) -> Self {
|
||||||
|
String::from_iter(iter).intern_deref()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<I> FromIterator<I> for Interned<Path>
|
||||||
|
where
|
||||||
|
PathBuf: FromIterator<I>,
|
||||||
|
{
|
||||||
|
fn from_iter<T: IntoIterator<Item = I>>(iter: T) -> Self {
|
||||||
|
PathBuf::from_iter(iter).intern_deref()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<I> FromIterator<I> for Interned<OsStr>
|
||||||
|
where
|
||||||
|
OsString: FromIterator<I>,
|
||||||
|
{
|
||||||
|
fn from_iter<T: IntoIterator<Item = I>>(iter: T) -> Self {
|
||||||
|
OsString::from_iter(iter).intern_deref()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Interned<str>> for clap::builder::Str {
|
||||||
|
fn from(value: Interned<str>) -> Self {
|
||||||
|
Interned::into_inner(value).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Interned<str>> for clap::builder::OsStr {
|
||||||
|
fn from(value: Interned<str>) -> Self {
|
||||||
|
Interned::into_inner(value).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Interned<str>> for clap::builder::StyledStr {
|
||||||
|
fn from(value: Interned<str>) -> Self {
|
||||||
|
Interned::into_inner(value).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Interned<str>> for clap::Id {
|
||||||
|
fn from(value: Interned<str>) -> Self {
|
||||||
|
Interned::into_inner(value).into()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<T: 'static + Clone + Send + Sync> From<Interned<[T]>> for Vec<T> {
|
impl<T: 'static + Clone + Send + Sync> From<Interned<[T]>> for Vec<T> {
|
||||||
fn from(value: Interned<[T]>) -> Self {
|
fn from(value: Interned<[T]>) -> Self {
|
||||||
Vec::from(&*value)
|
Vec::from(&*value)
|
||||||
|
@ -497,24 +825,12 @@ impl<T: 'static + Clone + Send + Sync> From<Interned<[T]>> for Box<[T]> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Interned<str>> for String {
|
|
||||||
fn from(value: Interned<str>) -> Self {
|
|
||||||
String::from(&*value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<I> Default for Interned<[I]>
|
impl<I> Default for Interned<[I]>
|
||||||
where
|
where
|
||||||
[I]: Intern,
|
[I]: Intern,
|
||||||
{
|
{
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
[][..].intern()
|
Intern::intern(&[])
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for Interned<str> {
|
|
||||||
fn default() -> Self {
|
|
||||||
"".intern()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -645,15 +961,6 @@ impl<'de> Deserialize<'de> for Interned<BitSlice> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'de> Deserialize<'de> for Interned<str> {
|
|
||||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
|
||||||
where
|
|
||||||
D: serde::Deserializer<'de>,
|
|
||||||
{
|
|
||||||
String::deserialize(deserializer).map(Intern::intern_owned)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Clone + Send + Sync + 'static + Hash + Eq> Intern for T {
|
impl<T: Clone + Send + Sync + 'static + Hash + Eq> Intern for T {
|
||||||
fn intern(&self) -> Interned<Self> {
|
fn intern(&self) -> Interned<Self> {
|
||||||
Self::intern_cow(Cow::Borrowed(self))
|
Self::intern_cow(Cow::Borrowed(self))
|
||||||
|
@ -714,26 +1021,6 @@ impl Intern for BitSlice {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Intern for str {
|
|
||||||
fn intern(&self) -> Interned<Self> {
|
|
||||||
Self::intern_cow(Cow::Borrowed(self))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn intern_owned(this: <Self as ToOwned>::Owned) -> Interned<Self>
|
|
||||||
where
|
|
||||||
Self: ToOwned,
|
|
||||||
{
|
|
||||||
Self::intern_cow(Cow::Owned(this))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn intern_cow(this: Cow<'_, Self>) -> Interned<Self>
|
|
||||||
where
|
|
||||||
Self: ToOwned,
|
|
||||||
{
|
|
||||||
Interner::get().intern_str(this)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub trait MemoizeGeneric: 'static + Send + Sync + Hash + Eq + Copy {
|
pub trait MemoizeGeneric: 'static + Send + Sync + Hash + Eq + Copy {
|
||||||
type InputRef<'a>: 'a + Send + Sync + Hash + Copy;
|
type InputRef<'a>: 'a + Send + Sync + Hash + Copy;
|
||||||
type InputOwned: 'static + Send + Sync;
|
type InputOwned: 'static + Send + Sync;
|
||||||
|
|
|
@ -87,8 +87,8 @@ pub mod _docs;
|
||||||
|
|
||||||
pub mod annotations;
|
pub mod annotations;
|
||||||
pub mod array;
|
pub mod array;
|
||||||
|
pub mod build;
|
||||||
pub mod bundle;
|
pub mod bundle;
|
||||||
pub mod cli;
|
|
||||||
pub mod clock;
|
pub mod clock;
|
||||||
pub mod enum_;
|
pub mod enum_;
|
||||||
pub mod expr;
|
pub mod expr;
|
||||||
|
@ -104,6 +104,7 @@ pub mod reg;
|
||||||
pub mod reset;
|
pub mod reset;
|
||||||
pub mod sim;
|
pub mod sim;
|
||||||
pub mod source_location;
|
pub mod source_location;
|
||||||
|
pub mod target;
|
||||||
pub mod testing;
|
pub mod testing;
|
||||||
pub mod ty;
|
pub mod ty;
|
||||||
pub mod util;
|
pub mod util;
|
||||||
|
|
|
@ -833,6 +833,8 @@ pub struct AnnotatedModuleIO<S: ModuleBuildingStatus = ModuleBuilt> {
|
||||||
pub module_io: ModuleIO<CanonicalType>,
|
pub module_io: ModuleIO<CanonicalType>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Copy for AnnotatedModuleIO {}
|
||||||
|
|
||||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||||
pub enum ModuleKind {
|
pub enum ModuleKind {
|
||||||
Extern,
|
Extern,
|
||||||
|
@ -1212,6 +1214,12 @@ pub struct Module<T: BundleType> {
|
||||||
module_annotations: Interned<[Annotation]>,
|
module_annotations: Interned<[Annotation]>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<T: BundleType> AsRef<Self> for Module<T> {
|
||||||
|
fn as_ref(&self) -> &Self {
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
struct DebugFmtModulesState {
|
struct DebugFmtModulesState {
|
||||||
seen: HashSet<NameId>,
|
seen: HashSet<NameId>,
|
||||||
|
|
|
@ -1802,6 +1802,7 @@ impl_run_pass_clone!([] ExternModuleParameter);
|
||||||
impl_run_pass_clone!([] SIntValue);
|
impl_run_pass_clone!([] SIntValue);
|
||||||
impl_run_pass_clone!([] std::ops::Range<usize>);
|
impl_run_pass_clone!([] std::ops::Range<usize>);
|
||||||
impl_run_pass_clone!([] UIntValue);
|
impl_run_pass_clone!([] UIntValue);
|
||||||
|
impl_run_pass_clone!([] crate::build::vendor::xilinx::XilinxAnnotation);
|
||||||
impl_run_pass_copy!([] BlackBoxInlineAnnotation);
|
impl_run_pass_copy!([] BlackBoxInlineAnnotation);
|
||||||
impl_run_pass_copy!([] BlackBoxPathAnnotation);
|
impl_run_pass_copy!([] BlackBoxPathAnnotation);
|
||||||
impl_run_pass_copy!([] bool);
|
impl_run_pass_copy!([] bool);
|
||||||
|
@ -2217,6 +2218,7 @@ impl_run_pass_for_enum! {
|
||||||
BlackBoxPath(v),
|
BlackBoxPath(v),
|
||||||
DocString(v),
|
DocString(v),
|
||||||
CustomFirrtl(v),
|
CustomFirrtl(v),
|
||||||
|
Xilinx(v),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,7 @@ use crate::{
|
||||||
},
|
},
|
||||||
hdl,
|
hdl,
|
||||||
int::UInt,
|
int::UInt,
|
||||||
intern::{Intern, Interned, Memoize},
|
intern::{Intern, InternSlice, Interned, Memoize},
|
||||||
memory::{DynPortType, Mem, MemPort},
|
memory::{DynPortType, Mem, MemPort},
|
||||||
module::{
|
module::{
|
||||||
Block, Id, Module, NameId, ScopedNameId, Stmt, StmtConnect, StmtIf, StmtMatch, StmtWire,
|
Block, Id, Module, NameId, ScopedNameId, Stmt, StmtConnect, StmtIf, StmtMatch, StmtWire,
|
||||||
|
@ -22,6 +22,7 @@ use crate::{
|
||||||
wire::Wire,
|
wire::Wire,
|
||||||
};
|
};
|
||||||
use core::fmt;
|
use core::fmt;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum SimplifyEnumsError {
|
pub enum SimplifyEnumsError {
|
||||||
|
@ -619,7 +620,7 @@ fn match_int_tag(
|
||||||
block,
|
block,
|
||||||
Block {
|
Block {
|
||||||
memories: Default::default(),
|
memories: Default::default(),
|
||||||
stmts: [Stmt::from(retval)][..].intern(),
|
stmts: [Stmt::from(retval)].intern_slice(),
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
};
|
};
|
||||||
|
@ -955,12 +956,15 @@ impl Folder for State {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, clap::ValueEnum)]
|
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, clap::ValueEnum, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "kebab-case")]
|
||||||
pub enum SimplifyEnumsKind {
|
pub enum SimplifyEnumsKind {
|
||||||
SimplifyToEnumsWithNoBody,
|
SimplifyToEnumsWithNoBody,
|
||||||
#[clap(name = "replace-with-bundle-of-uints")]
|
#[clap(name = "replace-with-bundle-of-uints")]
|
||||||
|
#[serde(rename = "replace-with-bundle-of-uints")]
|
||||||
ReplaceWithBundleOfUInts,
|
ReplaceWithBundleOfUInts,
|
||||||
#[clap(name = "replace-with-uint")]
|
#[clap(name = "replace-with-uint")]
|
||||||
|
#[serde(rename = "replace-with-uint")]
|
||||||
ReplaceWithUInt,
|
ReplaceWithUInt,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -7,6 +7,7 @@ use crate::{
|
||||||
DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation, TargetedAnnotation,
|
DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation, TargetedAnnotation,
|
||||||
},
|
},
|
||||||
array::ArrayType,
|
array::ArrayType,
|
||||||
|
build::vendor::xilinx::{XdcIOStandardAnnotation, XdcLocationAnnotation, XilinxAnnotation},
|
||||||
bundle::{Bundle, BundleField, BundleType},
|
bundle::{Bundle, BundleField, BundleType},
|
||||||
clock::Clock,
|
clock::Clock,
|
||||||
enum_::{Enum, EnumType, EnumVariant},
|
enum_::{Enum, EnumType, EnumVariant},
|
||||||
|
|
|
@ -7,8 +7,8 @@ pub use crate::{
|
||||||
DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation,
|
DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation,
|
||||||
},
|
},
|
||||||
array::{Array, ArrayType},
|
array::{Array, ArrayType},
|
||||||
|
build::{BuildCli, JobParams, RunBuild},
|
||||||
bundle::Bundle,
|
bundle::Bundle,
|
||||||
cli::Cli,
|
|
||||||
clock::{Clock, ClockDomain, ToClock},
|
clock::{Clock, ClockDomain, ToClock},
|
||||||
enum_::{Enum, HdlNone, HdlOption, HdlSome},
|
enum_::{Enum, HdlNone, HdlOption, HdlSome},
|
||||||
expr::{
|
expr::{
|
||||||
|
@ -36,6 +36,7 @@ pub use crate::{
|
||||||
value::{SimOnly, SimOnlyValue, SimValue, ToSimValue, ToSimValueWithType},
|
value::{SimOnly, SimOnlyValue, SimValue, ToSimValue, ToSimValueWithType},
|
||||||
},
|
},
|
||||||
source_location::SourceLocation,
|
source_location::SourceLocation,
|
||||||
|
testing::assert_formal,
|
||||||
ty::{AsMask, CanonicalType, Type},
|
ty::{AsMask, CanonicalType, Type},
|
||||||
util::{ConstUsize, GenericConstUsize},
|
util::{ConstUsize, GenericConstUsize},
|
||||||
wire::Wire,
|
wire::Wire,
|
||||||
|
|
|
@ -12,7 +12,9 @@ use crate::{
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
int::BoolOrIntType,
|
int::BoolOrIntType,
|
||||||
intern::{Intern, Interned, InternedCompare, PtrEqWithTypeId, SupportsPtrEqWithTypeId},
|
intern::{
|
||||||
|
Intern, InternSlice, Interned, InternedCompare, PtrEqWithTypeId, SupportsPtrEqWithTypeId,
|
||||||
|
},
|
||||||
module::{
|
module::{
|
||||||
ModuleIO,
|
ModuleIO,
|
||||||
transform::visit::{Fold, Folder, Visit, Visitor},
|
transform::visit::{Fold, Folder, Visit, Visitor},
|
||||||
|
@ -262,7 +264,7 @@ impl_trace_decl! {
|
||||||
}),
|
}),
|
||||||
Instance(TraceInstance {
|
Instance(TraceInstance {
|
||||||
fn children(self) -> _ {
|
fn children(self) -> _ {
|
||||||
[self.instance_io.into(), self.module.into()][..].intern()
|
[self.instance_io.into(), self.module.into()].intern_slice()
|
||||||
}
|
}
|
||||||
name: Interned<str>,
|
name: Interned<str>,
|
||||||
instance_io: TraceBundle,
|
instance_io: TraceBundle,
|
||||||
|
@ -282,7 +284,7 @@ impl_trace_decl! {
|
||||||
}),
|
}),
|
||||||
MemPort(TraceMemPort {
|
MemPort(TraceMemPort {
|
||||||
fn children(self) -> _ {
|
fn children(self) -> _ {
|
||||||
[self.bundle.into()][..].intern()
|
[self.bundle.into()].intern_slice()
|
||||||
}
|
}
|
||||||
name: Interned<str>,
|
name: Interned<str>,
|
||||||
bundle: TraceBundle,
|
bundle: TraceBundle,
|
||||||
|
@ -290,7 +292,7 @@ impl_trace_decl! {
|
||||||
}),
|
}),
|
||||||
Wire(TraceWire {
|
Wire(TraceWire {
|
||||||
fn children(self) -> _ {
|
fn children(self) -> _ {
|
||||||
[*self.child][..].intern()
|
[*self.child].intern_slice()
|
||||||
}
|
}
|
||||||
name: Interned<str>,
|
name: Interned<str>,
|
||||||
child: Interned<TraceDecl>,
|
child: Interned<TraceDecl>,
|
||||||
|
@ -298,7 +300,7 @@ impl_trace_decl! {
|
||||||
}),
|
}),
|
||||||
Reg(TraceReg {
|
Reg(TraceReg {
|
||||||
fn children(self) -> _ {
|
fn children(self) -> _ {
|
||||||
[*self.child][..].intern()
|
[*self.child].intern_slice()
|
||||||
}
|
}
|
||||||
name: Interned<str>,
|
name: Interned<str>,
|
||||||
child: Interned<TraceDecl>,
|
child: Interned<TraceDecl>,
|
||||||
|
@ -306,7 +308,7 @@ impl_trace_decl! {
|
||||||
}),
|
}),
|
||||||
ModuleIO(TraceModuleIO {
|
ModuleIO(TraceModuleIO {
|
||||||
fn children(self) -> _ {
|
fn children(self) -> _ {
|
||||||
[*self.child][..].intern()
|
[*self.child].intern_slice()
|
||||||
}
|
}
|
||||||
name: Interned<str>,
|
name: Interned<str>,
|
||||||
child: Interned<TraceDecl>,
|
child: Interned<TraceDecl>,
|
||||||
|
|
|
@ -14,7 +14,7 @@ use crate::{
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
int::BoolOrIntType,
|
int::BoolOrIntType,
|
||||||
intern::{Intern, Interned, Memoize},
|
intern::{Intern, InternSlice, Interned, Memoize},
|
||||||
memory::PortKind,
|
memory::PortKind,
|
||||||
module::{
|
module::{
|
||||||
AnnotatedModuleIO, Block, ExternModuleBody, Id, InstantiatedModule, ModuleBody, NameId,
|
AnnotatedModuleIO, Block, ExternModuleBody, Id, InstantiatedModule, ModuleBody, NameId,
|
||||||
|
@ -3950,8 +3950,8 @@ impl Compiler {
|
||||||
[Cond {
|
[Cond {
|
||||||
body: CondBody::IfTrue { cond },
|
body: CondBody::IfTrue { cond },
|
||||||
source_location: reg.source_location(),
|
source_location: reg.source_location(),
|
||||||
}][..]
|
}]
|
||||||
.intern(),
|
.intern_slice(),
|
||||||
lhs,
|
lhs,
|
||||||
init,
|
init,
|
||||||
reg.source_location(),
|
reg.source_location(),
|
||||||
|
|
202
crates/fayalite/src/target.rs
Normal file
202
crates/fayalite/src/target.rs
Normal file
|
@ -0,0 +1,202 @@
|
||||||
|
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||||
|
// See Notices.txt for copyright information
|
||||||
|
|
||||||
|
use crate::{intern::Interned, util::job_server::AcquiredJob};
|
||||||
|
use std::{
|
||||||
|
any::Any,
|
||||||
|
fmt,
|
||||||
|
iter::FusedIterator,
|
||||||
|
sync::{Arc, Mutex},
|
||||||
|
};
|
||||||
|
|
||||||
|
pub trait Peripheral: Any + Send + Sync + fmt::Debug {}
|
||||||
|
|
||||||
|
pub trait Tool: Any + Send + Sync + fmt::Debug {
|
||||||
|
fn name(&self) -> Interned<str>;
|
||||||
|
fn run(&self, acquired_job: &mut AcquiredJob);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait Target: Any + Send + Sync + fmt::Debug {
|
||||||
|
fn name(&self) -> Interned<str>;
|
||||||
|
fn peripherals(&self) -> Interned<[Interned<dyn Peripheral>]>;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
struct TargetsMap(Vec<(Interned<str>, Interned<dyn Target>)>);
|
||||||
|
|
||||||
|
impl TargetsMap {
|
||||||
|
fn sort(&mut self) {
|
||||||
|
self.0.sort_by(|(k1, _), (k2, _)| str::cmp(k1, k2));
|
||||||
|
self.0.dedup_by_key(|(k, _)| *k);
|
||||||
|
}
|
||||||
|
fn from_unsorted_vec(unsorted_vec: Vec<(Interned<str>, Interned<dyn Target>)>) -> Self {
|
||||||
|
let mut retval = Self(unsorted_vec);
|
||||||
|
retval.sort();
|
||||||
|
retval
|
||||||
|
}
|
||||||
|
fn extend_from_unsorted_slice(&mut self, additional: &[(Interned<str>, Interned<dyn Target>)]) {
|
||||||
|
self.0.extend_from_slice(additional);
|
||||||
|
self.sort();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for TargetsMap {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::from_unsorted_vec(vec![
|
||||||
|
// TODO: add default targets here
|
||||||
|
])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn access_targets<F: FnOnce(&mut Option<Arc<TargetsMap>>) -> R, R>(f: F) -> R {
|
||||||
|
static TARGETS: Mutex<Option<Arc<TargetsMap>>> = Mutex::new(None);
|
||||||
|
let mut targets_lock = TARGETS.lock().expect("shouldn't be poisoned");
|
||||||
|
f(&mut targets_lock)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_targets<I: IntoIterator<Item = Interned<dyn Target>>>(additional: I) {
|
||||||
|
// run iterator and target methods outside of lock
|
||||||
|
let additional = Vec::from_iter(additional.into_iter().map(|v| (v.name(), v)));
|
||||||
|
access_targets(|targets| {
|
||||||
|
Arc::make_mut(targets.get_or_insert_default()).extend_from_unsorted_slice(&additional);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn targets() -> TargetsSnapshot {
|
||||||
|
access_targets(|targets| match targets {
|
||||||
|
Some(targets) => TargetsSnapshot {
|
||||||
|
targets: targets.clone(),
|
||||||
|
},
|
||||||
|
None => {
|
||||||
|
let new_targets = Arc::<TargetsMap>::default();
|
||||||
|
*targets = Some(new_targets.clone());
|
||||||
|
TargetsSnapshot {
|
||||||
|
targets: new_targets,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct TargetsSnapshot {
|
||||||
|
targets: Arc<TargetsMap>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TargetsSnapshot {
|
||||||
|
pub fn get(&self, key: &str) -> Option<Interned<dyn Target>> {
|
||||||
|
let index = self
|
||||||
|
.targets
|
||||||
|
.0
|
||||||
|
.binary_search_by_key(&key, |(k, _v)| k)
|
||||||
|
.ok()?;
|
||||||
|
Some(self.targets.0[index].1)
|
||||||
|
}
|
||||||
|
pub fn iter(&self) -> TargetsIter {
|
||||||
|
self.into_iter()
|
||||||
|
}
|
||||||
|
pub fn len(&self) -> usize {
|
||||||
|
self.targets.0.len()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Debug for TargetsSnapshot {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.write_str("TargetsSnapshot ")?;
|
||||||
|
f.debug_map().entries(self).finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoIterator for &'_ mut TargetsSnapshot {
|
||||||
|
type Item = (Interned<str>, Interned<dyn Target>);
|
||||||
|
type IntoIter = TargetsIter;
|
||||||
|
|
||||||
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
|
self.clone().into_iter()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoIterator for &'_ TargetsSnapshot {
|
||||||
|
type Item = (Interned<str>, Interned<dyn Target>);
|
||||||
|
type IntoIter = TargetsIter;
|
||||||
|
|
||||||
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
|
self.clone().into_iter()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoIterator for TargetsSnapshot {
|
||||||
|
type Item = (Interned<str>, Interned<dyn Target>);
|
||||||
|
type IntoIter = TargetsIter;
|
||||||
|
|
||||||
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
|
TargetsIter {
|
||||||
|
indexes: 0..self.targets.0.len(),
|
||||||
|
targets: self.targets,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct TargetsIter {
|
||||||
|
targets: Arc<TargetsMap>,
|
||||||
|
indexes: std::ops::Range<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Debug for TargetsIter {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.write_str("TargetsIter ")?;
|
||||||
|
f.debug_map().entries(self.clone()).finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Iterator for TargetsIter {
|
||||||
|
type Item = (Interned<str>, Interned<dyn Target>);
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
Some(self.targets.0[self.indexes.next()?])
|
||||||
|
}
|
||||||
|
|
||||||
|
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||||
|
self.indexes.size_hint()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn count(self) -> usize {
|
||||||
|
self.indexes.len()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn last(mut self) -> Option<Self::Item> {
|
||||||
|
self.next_back()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn nth(&mut self, n: usize) -> Option<Self::Item> {
|
||||||
|
Some(self.targets.0[self.indexes.nth(n)?])
|
||||||
|
}
|
||||||
|
|
||||||
|
fn fold<B, F: FnMut(B, Self::Item) -> B>(self, init: B, mut f: F) -> B {
|
||||||
|
self.indexes
|
||||||
|
.fold(init, move |retval, index| f(retval, self.targets.0[index]))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FusedIterator for TargetsIter {}
|
||||||
|
|
||||||
|
impl DoubleEndedIterator for TargetsIter {
|
||||||
|
fn next_back(&mut self) -> Option<Self::Item> {
|
||||||
|
Some(self.targets.0[self.indexes.next_back()?])
|
||||||
|
}
|
||||||
|
|
||||||
|
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
|
||||||
|
Some(self.targets.0[self.indexes.nth_back(n)?])
|
||||||
|
}
|
||||||
|
|
||||||
|
fn rfold<B, F: FnMut(B, Self::Item) -> B>(self, init: B, mut f: F) -> B {
|
||||||
|
self.indexes
|
||||||
|
.rfold(init, move |retval, index| f(retval, self.targets.0[index]))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExactSizeIterator for TargetsIter {
|
||||||
|
fn len(&self) -> usize {
|
||||||
|
self.indexes.len()
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,11 +1,19 @@
|
||||||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||||
// See Notices.txt for copyright information
|
// See Notices.txt for copyright information
|
||||||
use crate::{
|
use crate::{
|
||||||
cli::{FormalArgs, FormalMode, FormalOutput, RunPhase},
|
build::{
|
||||||
|
BaseJobArgs, BaseJobKind, JobArgsAndDependencies, JobKindAndArgs, JobParams, NoArgs,
|
||||||
|
RunBuild,
|
||||||
|
external::{ExternalCommandArgs, ExternalCommandJobKind},
|
||||||
|
firrtl::{FirrtlArgs, FirrtlJobKind},
|
||||||
|
formal::{Formal, FormalAdditionalArgs, FormalArgs, FormalMode, WriteSbyFileJobKind},
|
||||||
|
verilog::{UnadjustedVerilogArgs, VerilogJobArgs, VerilogJobKind},
|
||||||
|
},
|
||||||
|
bundle::BundleType,
|
||||||
firrtl::ExportOptions,
|
firrtl::ExportOptions,
|
||||||
|
module::Module,
|
||||||
util::HashMap,
|
util::HashMap,
|
||||||
};
|
};
|
||||||
use clap::Parser;
|
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use std::{
|
use std::{
|
||||||
fmt::Write,
|
fmt::Write,
|
||||||
|
@ -14,14 +22,6 @@ use std::{
|
||||||
sync::{Mutex, OnceLock},
|
sync::{Mutex, OnceLock},
|
||||||
};
|
};
|
||||||
|
|
||||||
fn assert_formal_helper() -> FormalArgs {
|
|
||||||
static FORMAL_ARGS: OnceLock<FormalArgs> = OnceLock::new();
|
|
||||||
// ensure we only run parsing once, so errors from env vars don't produce overlapping output if we're called on multiple threads
|
|
||||||
FORMAL_ARGS
|
|
||||||
.get_or_init(|| FormalArgs::parse_from(["fayalite::testing::assert_formal"]))
|
|
||||||
.clone()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
struct CargoMetadata {
|
struct CargoMetadata {
|
||||||
target_directory: String,
|
target_directory: String,
|
||||||
|
@ -97,26 +97,99 @@ fn get_assert_formal_target_path(test_name: &dyn std::fmt::Display) -> PathBuf {
|
||||||
.join(dir)
|
.join(dir)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[track_caller]
|
fn make_assert_formal_args(
|
||||||
pub fn assert_formal<M>(
|
test_name: &dyn std::fmt::Display,
|
||||||
test_name: impl std::fmt::Display,
|
formal_mode: FormalMode,
|
||||||
module: M,
|
formal_depth: u64,
|
||||||
mode: FormalMode,
|
|
||||||
depth: u64,
|
|
||||||
solver: Option<&str>,
|
solver: Option<&str>,
|
||||||
export_options: ExportOptions,
|
export_options: ExportOptions,
|
||||||
) where
|
) -> eyre::Result<JobArgsAndDependencies<ExternalCommandJobKind<Formal>>> {
|
||||||
FormalArgs: RunPhase<M, Output = FormalOutput>,
|
let args = JobKindAndArgs {
|
||||||
{
|
kind: BaseJobKind,
|
||||||
let mut args = assert_formal_helper();
|
args: BaseJobArgs::from_output_dir_and_env(get_assert_formal_target_path(&test_name)),
|
||||||
args.verilog.firrtl.base.redirect_output_for_rust_test = true;
|
};
|
||||||
args.verilog.firrtl.base.output = Some(get_assert_formal_target_path(&test_name));
|
let dependencies = JobArgsAndDependencies {
|
||||||
args.verilog.firrtl.export_options = export_options;
|
args,
|
||||||
args.verilog.debug = true;
|
dependencies: (),
|
||||||
args.mode = mode;
|
};
|
||||||
args.depth = depth;
|
let args = JobKindAndArgs {
|
||||||
if let Some(solver) = solver {
|
kind: FirrtlJobKind,
|
||||||
args.solver = solver.into();
|
args: FirrtlArgs { export_options },
|
||||||
|
};
|
||||||
|
let dependencies = JobArgsAndDependencies { args, dependencies };
|
||||||
|
let args = JobKindAndArgs {
|
||||||
|
kind: ExternalCommandJobKind::new(),
|
||||||
|
args: ExternalCommandArgs::resolve_program_path(
|
||||||
|
None,
|
||||||
|
UnadjustedVerilogArgs {
|
||||||
|
firtool_extra_args: vec![],
|
||||||
|
verilog_dialect: None,
|
||||||
|
verilog_debug: true,
|
||||||
|
},
|
||||||
|
)?,
|
||||||
|
};
|
||||||
|
let dependencies = JobArgsAndDependencies { args, dependencies };
|
||||||
|
let args = JobKindAndArgs {
|
||||||
|
kind: VerilogJobKind,
|
||||||
|
args: VerilogJobArgs {},
|
||||||
|
};
|
||||||
|
let dependencies = JobArgsAndDependencies { args, dependencies };
|
||||||
|
let args = JobKindAndArgs {
|
||||||
|
kind: WriteSbyFileJobKind,
|
||||||
|
args: FormalArgs {
|
||||||
|
sby_extra_args: vec![],
|
||||||
|
formal_mode,
|
||||||
|
formal_depth,
|
||||||
|
formal_solver: solver.unwrap_or(FormalArgs::DEFAULT_SOLVER).into(),
|
||||||
|
smtbmc_extra_args: vec![],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
let dependencies = JobArgsAndDependencies { args, dependencies };
|
||||||
|
let args = JobKindAndArgs {
|
||||||
|
kind: ExternalCommandJobKind::new(),
|
||||||
|
args: ExternalCommandArgs::resolve_program_path(None, FormalAdditionalArgs {})?,
|
||||||
|
};
|
||||||
|
Ok(JobArgsAndDependencies { args, dependencies })
|
||||||
}
|
}
|
||||||
args.run(module).expect("testing::assert_formal() failed");
|
|
||||||
|
pub fn try_assert_formal<M: AsRef<Module<T>>, T: BundleType>(
|
||||||
|
test_name: impl std::fmt::Display,
|
||||||
|
module: M,
|
||||||
|
formal_mode: FormalMode,
|
||||||
|
formal_depth: u64,
|
||||||
|
solver: Option<&str>,
|
||||||
|
export_options: ExportOptions,
|
||||||
|
) -> eyre::Result<()> {
|
||||||
|
const APP_NAME: &'static str = "fayalite::testing::assert_formal";
|
||||||
|
make_assert_formal_args(
|
||||||
|
&test_name,
|
||||||
|
formal_mode,
|
||||||
|
formal_depth,
|
||||||
|
solver,
|
||||||
|
export_options,
|
||||||
|
)?
|
||||||
|
.run(
|
||||||
|
|NoArgs {}| Ok(JobParams::new(module, APP_NAME)),
|
||||||
|
clap::Command::new(APP_NAME), // not actually used, so we can use an arbitrary value
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
|
pub fn assert_formal<M: AsRef<Module<T>>, T: BundleType>(
|
||||||
|
test_name: impl std::fmt::Display,
|
||||||
|
module: M,
|
||||||
|
formal_mode: FormalMode,
|
||||||
|
formal_depth: u64,
|
||||||
|
solver: Option<&str>,
|
||||||
|
export_options: ExportOptions,
|
||||||
|
) {
|
||||||
|
try_assert_formal(
|
||||||
|
test_name,
|
||||||
|
module,
|
||||||
|
formal_mode,
|
||||||
|
formal_depth,
|
||||||
|
solver,
|
||||||
|
export_options,
|
||||||
|
)
|
||||||
|
.expect("testing::assert_formal() failed");
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,8 +36,11 @@ pub use scoped_ref::ScopedRef;
|
||||||
pub(crate) use misc::chain;
|
pub(crate) use misc::chain;
|
||||||
#[doc(inline)]
|
#[doc(inline)]
|
||||||
pub use misc::{
|
pub use misc::{
|
||||||
BitSliceWriteWithBase, DebugAsDisplay, DebugAsRawString, MakeMutSlice, RcWriter, interned_bit,
|
BitSliceWriteWithBase, DebugAsDisplay, DebugAsRawString, MakeMutSlice, RcWriter,
|
||||||
iter_eq_by, slice_range, try_slice_range,
|
SerdeJsonEscapeIf, SerdeJsonEscapeIfFormatter, SerdeJsonEscapeIfTest,
|
||||||
|
SerdeJsonEscapeIfTestResult, interned_bit, iter_eq_by, os_str_strip_prefix,
|
||||||
|
os_str_strip_suffix, serialize_to_json_ascii, serialize_to_json_ascii_pretty,
|
||||||
|
serialize_to_json_ascii_pretty_with_indent, slice_range, try_slice_range,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub mod job_server;
|
pub mod job_server;
|
||||||
|
|
|
@ -1,26 +1,36 @@
|
||||||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||||
// See Notices.txt for copyright information
|
// See Notices.txt for copyright information
|
||||||
|
|
||||||
use ctor::ctor;
|
use ctor::{ctor, dtor};
|
||||||
use jobslot::{Acquired, Client};
|
use jobslot::Client;
|
||||||
use std::{
|
use std::{
|
||||||
ffi::OsString,
|
ffi::OsString,
|
||||||
mem,
|
io, mem,
|
||||||
num::NonZeroUsize,
|
num::NonZeroUsize,
|
||||||
sync::{Condvar, Mutex, Once, OnceLock},
|
sync::{Mutex, MutexGuard},
|
||||||
thread::spawn,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
fn get_or_make_client() -> &'static Client {
|
|
||||||
#[ctor]
|
#[ctor]
|
||||||
static CLIENT: OnceLock<Client> = unsafe {
|
static CLIENT: Mutex<Option<Option<Client>>> = unsafe { Mutex::new(Some(Client::from_env())) };
|
||||||
match Client::from_env() {
|
|
||||||
Some(client) => OnceLock::from(client),
|
|
||||||
None => OnceLock::new(),
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
CLIENT.get_or_init(|| {
|
#[dtor]
|
||||||
|
fn drop_client() {
|
||||||
|
drop(
|
||||||
|
match CLIENT.lock() {
|
||||||
|
Ok(v) => v,
|
||||||
|
Err(e) => e.into_inner(),
|
||||||
|
}
|
||||||
|
.take(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_or_make_client() -> Client {
|
||||||
|
CLIENT
|
||||||
|
.lock()
|
||||||
|
.expect("shouldn't have panicked")
|
||||||
|
.as_mut()
|
||||||
|
.expect("shutting down")
|
||||||
|
.get_or_insert_with(|| {
|
||||||
let mut available_parallelism = None;
|
let mut available_parallelism = None;
|
||||||
let mut args = std::env::args_os().skip(1);
|
let mut args = std::env::args_os().skip(1);
|
||||||
while let Some(arg) = args.next() {
|
while let Some(arg) = args.next() {
|
||||||
|
@ -52,141 +62,95 @@ fn get_or_make_client() -> &'static Client {
|
||||||
} else {
|
} else {
|
||||||
NonZeroUsize::new(1).unwrap()
|
NonZeroUsize::new(1).unwrap()
|
||||||
};
|
};
|
||||||
Client::new_with_fifo(available_parallelism.get() - 1).expect("failed to create job server")
|
Client::new_with_fifo(available_parallelism.get() - 1)
|
||||||
|
.expect("failed to create job server")
|
||||||
})
|
})
|
||||||
|
.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
struct State {
|
struct State {
|
||||||
|
obtained_count: usize,
|
||||||
waiting_count: usize,
|
waiting_count: usize,
|
||||||
available: Vec<Acquired>,
|
|
||||||
implicit_available: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl State {
|
|
||||||
fn total_available(&self) -> usize {
|
|
||||||
self.available.len() + self.implicit_available as usize
|
|
||||||
}
|
|
||||||
fn additional_waiting(&self) -> usize {
|
|
||||||
self.waiting_count.saturating_sub(self.total_available())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static STATE: Mutex<State> = Mutex::new(State {
|
static STATE: Mutex<State> = Mutex::new(State {
|
||||||
|
obtained_count: 0,
|
||||||
waiting_count: 0,
|
waiting_count: 0,
|
||||||
available: Vec::new(),
|
|
||||||
implicit_available: true,
|
|
||||||
});
|
});
|
||||||
static COND_VAR: Condvar = Condvar::new();
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
enum AcquiredJobInner {
|
|
||||||
FromJobServer(Acquired),
|
|
||||||
ImplicitJob,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct AcquiredJob {
|
pub struct AcquiredJob {
|
||||||
job: AcquiredJobInner,
|
client: Client,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl AcquiredJob {
|
impl AcquiredJob {
|
||||||
fn start_acquire_thread() {
|
pub fn acquire() -> io::Result<Self> {
|
||||||
static STARTED_THREAD: Once = Once::new();
|
|
||||||
STARTED_THREAD.call_once(|| {
|
|
||||||
spawn(|| {
|
|
||||||
let mut acquired = None;
|
|
||||||
let client = get_or_make_client();
|
let client = get_or_make_client();
|
||||||
|
struct Waiting {}
|
||||||
|
|
||||||
|
impl Waiting {
|
||||||
|
fn done(self) -> MutexGuard<'static, State> {
|
||||||
|
mem::forget(self);
|
||||||
let mut state = STATE.lock().unwrap();
|
let mut state = STATE.lock().unwrap();
|
||||||
loop {
|
state.waiting_count -= 1;
|
||||||
state = if state.additional_waiting() == 0 {
|
|
||||||
if acquired.is_some() {
|
|
||||||
drop(state);
|
|
||||||
drop(acquired.take()); // drop Acquired outside of lock
|
|
||||||
STATE.lock().unwrap()
|
|
||||||
} else {
|
|
||||||
COND_VAR.wait(state).unwrap()
|
|
||||||
}
|
|
||||||
} else if acquired.is_some() {
|
|
||||||
// allocate space before moving Acquired to ensure we
|
|
||||||
// drop Acquired outside of the lock on panic
|
|
||||||
state.available.reserve(1);
|
|
||||||
state.available.push(acquired.take().unwrap());
|
|
||||||
COND_VAR.notify_all();
|
|
||||||
state
|
state
|
||||||
} else {
|
|
||||||
drop(state);
|
|
||||||
acquired = Some(
|
|
||||||
client
|
|
||||||
.acquire()
|
|
||||||
.expect("can't acquire token from job server"),
|
|
||||||
);
|
|
||||||
STATE.lock().unwrap()
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
fn acquire_inner(block: bool) -> Option<Self> {
|
impl Drop for Waiting {
|
||||||
Self::start_acquire_thread();
|
fn drop(&mut self) {
|
||||||
|
STATE.lock().unwrap().waiting_count -= 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
let mut state = STATE.lock().unwrap();
|
let mut state = STATE.lock().unwrap();
|
||||||
loop {
|
if state.obtained_count == 0 && state.waiting_count == 0 {
|
||||||
if let Some(acquired) = state.available.pop() {
|
state.obtained_count = 1; // get implicit token
|
||||||
return Some(Self {
|
return Ok(Self { client });
|
||||||
job: AcquiredJobInner::FromJobServer(acquired),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
if state.implicit_available {
|
|
||||||
state.implicit_available = false;
|
|
||||||
return Some(Self {
|
|
||||||
job: AcquiredJobInner::ImplicitJob,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
if !block {
|
|
||||||
return None;
|
|
||||||
}
|
}
|
||||||
state.waiting_count += 1;
|
state.waiting_count += 1;
|
||||||
state = COND_VAR.wait(state).unwrap();
|
drop(state);
|
||||||
state.waiting_count -= 1;
|
let waiting = Waiting {};
|
||||||
}
|
client.acquire_raw()?;
|
||||||
}
|
state = waiting.done();
|
||||||
pub fn try_acquire() -> Option<Self> {
|
state.obtained_count = state
|
||||||
Self::acquire_inner(false)
|
.obtained_count
|
||||||
}
|
.checked_add(1)
|
||||||
pub fn acquire() -> Self {
|
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "obtained_count overflowed"))?;
|
||||||
Self::acquire_inner(true).expect("failed to acquire token")
|
drop(state);
|
||||||
|
Ok(Self { client })
|
||||||
}
|
}
|
||||||
pub fn run_command<R>(
|
pub fn run_command<R>(
|
||||||
&mut self,
|
&mut self,
|
||||||
cmd: std::process::Command,
|
cmd: std::process::Command,
|
||||||
f: impl FnOnce(&mut std::process::Command) -> std::io::Result<R>,
|
f: impl FnOnce(&mut std::process::Command) -> std::io::Result<R>,
|
||||||
) -> std::io::Result<R> {
|
) -> std::io::Result<R> {
|
||||||
get_or_make_client().configure_make_and_run_with_fifo(cmd, f)
|
self.client.configure_make_and_run_with_fifo(cmd, f)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Drop for AcquiredJob {
|
impl Drop for AcquiredJob {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
let mut state = STATE.lock().unwrap();
|
let mut state = STATE.lock().unwrap();
|
||||||
match &self.job {
|
match &mut *state {
|
||||||
AcquiredJobInner::FromJobServer(_) => {
|
State {
|
||||||
if state.waiting_count > state.available.len() + state.implicit_available as usize {
|
obtained_count: 0, ..
|
||||||
// allocate space before moving Acquired to ensure we
|
} => unreachable!(),
|
||||||
// drop Acquired outside of the lock on panic
|
State {
|
||||||
state.available.reserve(1);
|
obtained_count: obtained_count @ 1,
|
||||||
let AcquiredJobInner::FromJobServer(acquired) =
|
waiting_count,
|
||||||
mem::replace(&mut self.job, AcquiredJobInner::ImplicitJob)
|
} => {
|
||||||
else {
|
*obtained_count = 0; // drop implicit token
|
||||||
unreachable!()
|
let any_waiting = *waiting_count != 0;
|
||||||
};
|
drop(state);
|
||||||
state.available.push(acquired);
|
if any_waiting {
|
||||||
COND_VAR.notify_all();
|
// we have the implicit token, but some other thread is trying to acquire a token,
|
||||||
|
// release the implicit token so they can acquire it.
|
||||||
|
let _ = self.client.release_raw(); // we're in drop, just ignore errors since we at least tried
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
AcquiredJobInner::ImplicitJob => {
|
State { obtained_count, .. } => {
|
||||||
state.implicit_available = true;
|
*obtained_count = obtained_count.saturating_sub(1);
|
||||||
if state.waiting_count > state.available.len() {
|
drop(state);
|
||||||
COND_VAR.notify_all();
|
let _ = self.client.release_raw(); // we're in drop, just ignore errors since we at least tried
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,9 @@ use crate::intern::{Intern, Interned};
|
||||||
use bitvec::{bits, order::Lsb0, slice::BitSlice, view::BitView};
|
use bitvec::{bits, order::Lsb0, slice::BitSlice, view::BitView};
|
||||||
use std::{
|
use std::{
|
||||||
cell::Cell,
|
cell::Cell,
|
||||||
|
ffi::OsStr,
|
||||||
fmt::{self, Debug, Write},
|
fmt::{self, Debug, Write},
|
||||||
|
io,
|
||||||
ops::{Bound, Range, RangeBounds},
|
ops::{Bound, Range, RangeBounds},
|
||||||
rc::Rc,
|
rc::Rc,
|
||||||
sync::{Arc, OnceLock},
|
sync::{Arc, OnceLock},
|
||||||
|
@ -243,3 +245,343 @@ pub fn try_slice_range<R: RangeBounds<usize>>(range: R, size: usize) -> Option<R
|
||||||
pub fn slice_range<R: RangeBounds<usize>>(range: R, size: usize) -> Range<usize> {
|
pub fn slice_range<R: RangeBounds<usize>>(range: R, size: usize) -> Range<usize> {
|
||||||
try_slice_range(range, size).expect("range out of bounds")
|
try_slice_range(range, size).expect("range out of bounds")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub trait SerdeJsonEscapeIfTest {
|
||||||
|
fn char_needs_escape(&mut self, ch: char) -> serde_json::Result<bool>;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait SerdeJsonEscapeIfTestResult {
|
||||||
|
fn to_result(self) -> serde_json::Result<bool>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SerdeJsonEscapeIfTestResult for bool {
|
||||||
|
fn to_result(self) -> serde_json::Result<bool> {
|
||||||
|
Ok(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<E: Into<serde_json::Error>> SerdeJsonEscapeIfTestResult for Result<bool, E> {
|
||||||
|
fn to_result(self) -> serde_json::Result<bool> {
|
||||||
|
self.map_err(Into::into)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: ?Sized + FnMut(char) -> R, R: SerdeJsonEscapeIfTestResult> SerdeJsonEscapeIfTest for T {
|
||||||
|
fn char_needs_escape(&mut self, ch: char) -> serde_json::Result<bool> {
|
||||||
|
self(ch).to_result()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait SerdeJsonEscapeIfFormatter: serde_json::ser::Formatter {
|
||||||
|
fn write_unicode_escape<W>(&mut self, writer: &mut W, ch: char) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
for utf16 in ch.encode_utf16(&mut [0; 2]) {
|
||||||
|
write!(writer, "\\u{utf16:04x}")?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl SerdeJsonEscapeIfFormatter for serde_json::ser::CompactFormatter {}
|
||||||
|
impl SerdeJsonEscapeIfFormatter for serde_json::ser::PrettyFormatter<'_> {}
|
||||||
|
|
||||||
|
pub struct SerdeJsonEscapeIf<Test, Base = serde_json::ser::CompactFormatter> {
|
||||||
|
pub base: Base,
|
||||||
|
pub test: Test,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<Test: SerdeJsonEscapeIfTest, Base: SerdeJsonEscapeIfFormatter> serde_json::ser::Formatter
|
||||||
|
for SerdeJsonEscapeIf<Test, Base>
|
||||||
|
{
|
||||||
|
fn write_null<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.write_null(writer)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_bool<W>(&mut self, writer: &mut W, value: bool) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.write_bool(writer, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_i8<W>(&mut self, writer: &mut W, value: i8) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.write_i8(writer, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_i16<W>(&mut self, writer: &mut W, value: i16) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.write_i16(writer, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_i32<W>(&mut self, writer: &mut W, value: i32) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.write_i32(writer, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_i64<W>(&mut self, writer: &mut W, value: i64) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.write_i64(writer, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_i128<W>(&mut self, writer: &mut W, value: i128) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.write_i128(writer, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_u8<W>(&mut self, writer: &mut W, value: u8) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.write_u8(writer, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_u16<W>(&mut self, writer: &mut W, value: u16) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.write_u16(writer, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_u32<W>(&mut self, writer: &mut W, value: u32) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.write_u32(writer, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_u64<W>(&mut self, writer: &mut W, value: u64) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.write_u64(writer, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_u128<W>(&mut self, writer: &mut W, value: u128) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.write_u128(writer, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_f32<W>(&mut self, writer: &mut W, value: f32) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.write_f32(writer, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_f64<W>(&mut self, writer: &mut W, value: f64) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.write_f64(writer, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_number_str<W>(&mut self, writer: &mut W, value: &str) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.write_number_str(writer, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn begin_string<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.begin_string(writer)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn end_string<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.end_string(writer)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_string_fragment<W>(&mut self, writer: &mut W, mut fragment: &str) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
while let Some((next_escape_index, next_escape_char)) = fragment
|
||||||
|
.char_indices()
|
||||||
|
.find_map(|(index, ch)| match self.test.char_needs_escape(ch) {
|
||||||
|
Ok(false) => None,
|
||||||
|
Ok(true) => Some(Ok((index, ch))),
|
||||||
|
Err(e) => Some(Err(e)),
|
||||||
|
})
|
||||||
|
.transpose()?
|
||||||
|
{
|
||||||
|
let (no_escapes, rest) = fragment.split_at(next_escape_index);
|
||||||
|
fragment = &rest[next_escape_char.len_utf8()..];
|
||||||
|
self.base.write_string_fragment(writer, no_escapes)?;
|
||||||
|
self.base.write_unicode_escape(writer, next_escape_char)?;
|
||||||
|
}
|
||||||
|
self.base.write_string_fragment(writer, fragment)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_char_escape<W>(
|
||||||
|
&mut self,
|
||||||
|
writer: &mut W,
|
||||||
|
char_escape: serde_json::ser::CharEscape,
|
||||||
|
) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.write_char_escape(writer, char_escape)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_byte_array<W>(&mut self, writer: &mut W, value: &[u8]) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.write_byte_array(writer, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn begin_array<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.begin_array(writer)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn end_array<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.end_array(writer)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn begin_array_value<W>(&mut self, writer: &mut W, first: bool) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.begin_array_value(writer, first)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn end_array_value<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.end_array_value(writer)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn begin_object<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.begin_object(writer)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn end_object<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.end_object(writer)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn begin_object_key<W>(&mut self, writer: &mut W, first: bool) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.begin_object_key(writer, first)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn end_object_key<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.end_object_key(writer)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn begin_object_value<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.begin_object_value(writer)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn end_object_value<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.end_object_value(writer)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn write_raw_fragment<W>(&mut self, writer: &mut W, fragment: &str) -> io::Result<()>
|
||||||
|
where
|
||||||
|
W: ?Sized + io::Write,
|
||||||
|
{
|
||||||
|
self.base.write_raw_fragment(writer, fragment)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn serialize_to_json_ascii_helper<F: SerdeJsonEscapeIfFormatter, S: serde::Serialize + ?Sized>(
|
||||||
|
v: &S,
|
||||||
|
base: F,
|
||||||
|
) -> serde_json::Result<String> {
|
||||||
|
let mut retval = Vec::new();
|
||||||
|
v.serialize(&mut serde_json::ser::Serializer::with_formatter(
|
||||||
|
&mut retval,
|
||||||
|
SerdeJsonEscapeIf {
|
||||||
|
base,
|
||||||
|
test: |ch| ch < '\x20' || ch > '\x7F',
|
||||||
|
},
|
||||||
|
))?;
|
||||||
|
String::from_utf8(retval).map_err(|_| serde::ser::Error::custom("invalid UTF-8"))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn serialize_to_json_ascii<T: serde::Serialize + ?Sized>(v: &T) -> serde_json::Result<String> {
|
||||||
|
serialize_to_json_ascii_helper(v, serde_json::ser::CompactFormatter)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn serialize_to_json_ascii_pretty<T: serde::Serialize + ?Sized>(
|
||||||
|
v: &T,
|
||||||
|
) -> serde_json::Result<String> {
|
||||||
|
serialize_to_json_ascii_helper(v, serde_json::ser::PrettyFormatter::new())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn serialize_to_json_ascii_pretty_with_indent<T: serde::Serialize + ?Sized>(
|
||||||
|
v: &T,
|
||||||
|
indent: &str,
|
||||||
|
) -> serde_json::Result<String> {
|
||||||
|
serialize_to_json_ascii_helper(
|
||||||
|
v,
|
||||||
|
serde_json::ser::PrettyFormatter::with_indent(indent.as_bytes()),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn os_str_strip_prefix<'a>(os_str: &'a OsStr, prefix: impl AsRef<str>) -> Option<&'a OsStr> {
|
||||||
|
os_str
|
||||||
|
.as_encoded_bytes()
|
||||||
|
.strip_prefix(prefix.as_ref().as_bytes())
|
||||||
|
.map(|bytes| {
|
||||||
|
// Safety: we removed a UTF-8 prefix so bytes starts with a valid boundary
|
||||||
|
unsafe { OsStr::from_encoded_bytes_unchecked(bytes) }
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn os_str_strip_suffix<'a>(os_str: &'a OsStr, suffix: impl AsRef<str>) -> Option<&'a OsStr> {
|
||||||
|
os_str
|
||||||
|
.as_encoded_bytes()
|
||||||
|
.strip_suffix(suffix.as_ref().as_bytes())
|
||||||
|
.map(|bytes| {
|
||||||
|
// Safety: we removed a UTF-8 suffix so bytes ends with a valid boundary
|
||||||
|
unsafe { OsStr::from_encoded_bytes_unchecked(bytes) }
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
|
@ -212,7 +212,7 @@ pub fn queue<T: Type>(
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::{
|
use crate::{
|
||||||
cli::FormalMode, firrtl::ExportOptions,
|
build::formal::FormalMode, firrtl::ExportOptions,
|
||||||
module::transform::simplify_enums::SimplifyEnumsKind, testing::assert_formal,
|
module::transform::simplify_enums::SimplifyEnumsKind, testing::assert_formal,
|
||||||
ty::StaticType,
|
ty::StaticType,
|
||||||
};
|
};
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
//! Formal tests in Fayalite
|
//! Formal tests in Fayalite
|
||||||
|
|
||||||
use fayalite::{
|
use fayalite::{
|
||||||
cli::FormalMode,
|
build::formal::FormalMode,
|
||||||
clock::{Clock, ClockDomain},
|
clock::{Clock, ClockDomain},
|
||||||
expr::{CastTo, HdlPartialEq},
|
expr::{CastTo, HdlPartialEq},
|
||||||
firrtl::ExportOptions,
|
firrtl::ExportOptions,
|
||||||
|
|
|
@ -156,7 +156,7 @@ note: required by a bound in `intern_sized`
|
||||||
|
|
|
|
||||||
| pub trait Intern: Any + Send + Sync {
|
| pub trait Intern: Any + Send + Sync {
|
||||||
| ^^^^ required by this bound in `Intern::intern_sized`
|
| ^^^^ required by this bound in `Intern::intern_sized`
|
||||||
| fn intern(&self) -> Interned<Self>;
|
...
|
||||||
| fn intern_sized(self) -> Interned<Self>
|
| fn intern_sized(self) -> Interned<Self>
|
||||||
| ------------ required by a bound in this associated function
|
| ------------ required by a bound in this associated function
|
||||||
help: consider dereferencing here
|
help: consider dereferencing here
|
||||||
|
@ -188,7 +188,7 @@ note: required by a bound in `intern_sized`
|
||||||
|
|
|
|
||||||
| pub trait Intern: Any + Send + Sync {
|
| pub trait Intern: Any + Send + Sync {
|
||||||
| ^^^^ required by this bound in `Intern::intern_sized`
|
| ^^^^ required by this bound in `Intern::intern_sized`
|
||||||
| fn intern(&self) -> Interned<Self>;
|
...
|
||||||
| fn intern_sized(self) -> Interned<Self>
|
| fn intern_sized(self) -> Interned<Self>
|
||||||
| ------------ required by a bound in this associated function
|
| ------------ required by a bound in this associated function
|
||||||
help: consider dereferencing here
|
help: consider dereferencing here
|
||||||
|
@ -255,7 +255,7 @@ note: required by a bound in `intern_sized`
|
||||||
|
|
|
|
||||||
| pub trait Intern: Any + Send + Sync {
|
| pub trait Intern: Any + Send + Sync {
|
||||||
| ^^^^ required by this bound in `Intern::intern_sized`
|
| ^^^^ required by this bound in `Intern::intern_sized`
|
||||||
| fn intern(&self) -> Interned<Self>;
|
...
|
||||||
| fn intern_sized(self) -> Interned<Self>
|
| fn intern_sized(self) -> Interned<Self>
|
||||||
| ------------ required by a bound in this associated function
|
| ------------ required by a bound in this associated function
|
||||||
help: consider dereferencing here
|
help: consider dereferencing here
|
||||||
|
|
|
@ -1176,7 +1176,8 @@
|
||||||
"BlackBoxInline": "Visible",
|
"BlackBoxInline": "Visible",
|
||||||
"BlackBoxPath": "Visible",
|
"BlackBoxPath": "Visible",
|
||||||
"DocString": "Visible",
|
"DocString": "Visible",
|
||||||
"CustomFirrtl": "Visible"
|
"CustomFirrtl": "Visible",
|
||||||
|
"Xilinx": "Visible"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"DontTouchAnnotation": {
|
"DontTouchAnnotation": {
|
||||||
|
@ -1214,6 +1215,23 @@
|
||||||
"$kind": "Opaque"
|
"$kind": "Opaque"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"XilinxAnnotation": {
|
||||||
|
"data": {
|
||||||
|
"$kind": "Enum",
|
||||||
|
"XdcLocation": "Visible",
|
||||||
|
"XdcIOStandard": "Visible"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"XdcLocationAnnotation": {
|
||||||
|
"data": {
|
||||||
|
"$kind": "Opaque"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"XdcIOStandardAnnotation": {
|
||||||
|
"data": {
|
||||||
|
"$kind": "Opaque"
|
||||||
|
}
|
||||||
|
},
|
||||||
"Target": {
|
"Target": {
|
||||||
"data": {
|
"data": {
|
||||||
"$kind": "Enum",
|
"$kind": "Enum",
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue