diff --git a/.forgejo/workflows/deps.yml b/.forgejo/workflows/deps.yml deleted file mode 100644 index 1a58a35..0000000 --- a/.forgejo/workflows/deps.yml +++ /dev/null @@ -1,77 +0,0 @@ -# SPDX-License-Identifier: LGPL-3.0-or-later -# See Notices.txt for copyright information -on: - workflow_call: - outputs: - cache-primary-key: - value: ${{ jobs.deps.outputs.cache-primary-key }} - -jobs: - deps: - runs-on: debian-12 - outputs: - cache-primary-key: ${{ steps.restore-deps.outputs.cache-primary-key }} - steps: - - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - uses: actions/cache/restore@v3 - id: restore-deps - with: - path: deps - key: ${{ github.repository }}-deps-${{ runner.os }}-${{ hashFiles('.forgejo/workflows/deps.yml') }} - lookup-only: true - - name: Install Apt packages - if: steps.restore-deps.outputs.cache-hit != 'true' - run: | - apt-get update -qq - apt-get install -qq \ - bison \ - build-essential \ - ccache \ - clang \ - cvc5 \ - flex \ - gawk \ - g++ \ - git \ - libboost-filesystem-dev \ - libboost-python-dev \ - libboost-system-dev \ - libffi-dev \ - libreadline-dev \ - lld \ - pkg-config \ - python3 \ - python3-click \ - tcl-dev \ - zlib1g-dev - - name: Install Firtool - if: steps.restore-deps.outputs.cache-hit != 'true' - run: | - mkdir -p deps - wget -O deps/firrtl.tar.gz https://github.com/llvm/circt/releases/download/firtool-1.86.0/firrtl-bin-linux-x64.tar.gz - sha256sum -c - <<<'bf6f4ab18ae76f135c944efbd81e25391c31c1bd0617c58ab0592640abefee14 deps/firrtl.tar.gz' - tar -C deps -xvaf deps/firrtl.tar.gz - rm -rf deps/firtool - mv deps/firtool-1.86.0 deps/firtool - - name: Get SymbiYosys - if: steps.restore-deps.outputs.cache-hit != 'true' - run: | - git clone --depth=1 --branch=yosys-0.45 https://git.libre-chip.org/mirrors/sby deps/sby - - name: Build Z3 - if: steps.restore-deps.outputs.cache-hit != 'true' - run: | - git clone --depth=1 --recursive --branch=z3-4.13.3 https://git.libre-chip.org/mirrors/z3 deps/z3 - (cd deps/z3; PYTHON=python3 ./configure --prefix=/usr/local) - make -C deps/z3/build -j"$(nproc)" - - name: Build Yosys - if: steps.restore-deps.outputs.cache-hit != 'true' - run: | - git clone --depth=1 --recursive --branch=0.45 https://git.libre-chip.org/mirrors/yosys deps/yosys - make -C deps/yosys -j"$(nproc)" - - uses: actions/cache/save@v3 - if: steps.restore-deps.outputs.cache-hit != 'true' - with: - path: deps - key: ${{ steps.restore-deps.outputs.cache-primary-key }} diff --git a/.forgejo/workflows/test.yml b/.forgejo/workflows/test.yml index 21e56bf..1b9910e 100644 --- a/.forgejo/workflows/test.yml +++ b/.forgejo/workflows/test.yml @@ -3,57 +3,16 @@ on: [push, pull_request] jobs: - deps: - runs-on: debian-12 - uses: ./.forgejo/workflows/deps.yml test: runs-on: debian-12 - needs: deps + container: + image: git.libre-chip.org/libre-chip/fayalite-deps:latest steps: - uses: actions/checkout@v3 with: fetch-depth: 0 - run: | scripts/check-copyright.sh - - run: | - apt-get update -qq - apt-get install -qq \ - bison \ - build-essential \ - ccache \ - clang \ - cvc5 \ - flex \ - gawk \ - git \ - libboost-filesystem-dev \ - libboost-python-dev \ - libboost-system-dev \ - libffi-dev \ - libreadline-dev \ - lld \ - pkg-config \ - python3 \ - python3-click \ - tcl-dev \ - z3 \ - zlib1g-dev - - run: | - curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain 1.89.0 - source "$HOME/.cargo/env" - rustup component add rust-src - echo "$PATH" >> "$GITHUB_PATH" - - uses: actions/cache/restore@v3 - with: - path: deps - key: ${{ needs.deps.outputs.cache-primary-key }} - fail-on-cache-miss: true - - run: | - make -C deps/z3/build install - make -C deps/sby install - make -C deps/yosys install - export PATH="$(realpath deps/firtool/bin):$PATH" - echo "$PATH" >> "$GITHUB_PATH" - uses: https://git.libre-chip.org/mirrors/rust-cache@v2 with: save-if: ${{ github.ref == 'refs/heads/master' }} @@ -62,3 +21,4 @@ jobs: - run: cargo test --doc --features=unstable-doc - run: cargo doc --features=unstable-doc - run: FAYALITE_TEST_HASHER=always_zero cargo test --test=module --features=unstable-doc,unstable-test-hasher + - run: cargo run --example blinky yosys-nextpnr-xray --nextpnr-xilinx-chipdb-dir /opt/fayalite-deps/nextpnr-xilinx/xilinx --prjxray-db-dir /opt/fayalite-deps/prjxray-db --device xc7a100ticsg324-1L -o target/blinky-out --clock-frequency=$((1000*1000*100)) diff --git a/Cargo.lock b/Cargo.lock index e0c32e9..f4b564a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "allocator-api2" @@ -25,9 +25,9 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.7" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" +checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" [[package]] name = "anstyle-parse" @@ -81,6 +81,12 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + [[package]] name = "basic-toml" version = "0.1.8" @@ -149,9 +155,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "clap" -version = "4.5.9" +version = "4.5.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64acc1846d54c1fe936a78dc189c34e28d3f5afc348403f28ecf53660b9b8462" +checksum = "e2134bb3ea021b78629caa971416385309e0131b351b25e01dc16fb54e1b5fae" dependencies = [ "clap_builder", "clap_derive", @@ -159,9 +165,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.9" +version = "4.5.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fb8393d67ba2e7bfaf28a23458e4e2b543cc73a99595511eb207fdb8aede942" +checksum = "c2ba64afa3c0a6df7fa517765e31314e983f51dda798ffba27b988194fb65dc9" dependencies = [ "anstream", "anstyle", @@ -170,10 +176,19 @@ dependencies = [ ] [[package]] -name = "clap_derive" -version = "4.5.8" +name = "clap_complete" +version = "4.5.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bac35c6dafb060fd4d275d9a4ffae97917c13a6327903a8be2153cd964f7085" +checksum = "75bf0b32ad2e152de789bb635ea4d3078f6b838ad7974143e99b99f45a04af4a" +dependencies = [ + "clap", +] + +[[package]] +name = "clap_derive" +version = "4.5.47" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbfd7eae0b0f1a6e63d4b13c9c478de77c2eb546fba158ad50b4203dc24b9f9c" dependencies = [ "heck", "proc-macro2", @@ -183,9 +198,9 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.7.1" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b82cf0babdbd58558212896d1a4272303a57bdb245c2bf1147185fb45640e70" +checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675" [[package]] name = "colorchoice" @@ -291,9 +306,11 @@ checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" name = "fayalite" version = "0.3.0" dependencies = [ + "base64", "bitvec", "blake3", "clap", + "clap_complete", "ctor", "eyre", "fayalite-proc-macros", @@ -302,7 +319,6 @@ dependencies = [ "jobslot", "num-bigint", "num-traits", - "os_pipe", "petgraph", "serde", "serde_json", @@ -377,12 +393,13 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.14" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c" +checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" dependencies = [ "cfg-if", "libc", + "r-efi", "wasi", ] @@ -449,23 +466,23 @@ checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" [[package]] name = "jobslot" -version = "0.2.19" +version = "0.2.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe10868679d7a24c2c67d862d0e64a342ce9aef7cdde9ce8019bd35d353d458d" +checksum = "58715c67c327da7f1558708348d68c207fd54900c4ae0529e29305d04d795b8c" dependencies = [ "cfg-if", "derive_destructure2", "getrandom", "libc", "scopeguard", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] name = "libc" -version = "0.2.153" +version = "0.2.176" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" +checksum = "58f929b4d672ea937a23a1ab494143d968337a5f47e56d0815df1e0890ddf174" [[package]] name = "linux-raw-sys" @@ -507,16 +524,6 @@ version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" -[[package]] -name = "os_pipe" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ffd2b0a5634335b135d5728d84c5e0fd726954b87111f7506a61c502280d982" -dependencies = [ - "libc", - "windows-sys 0.59.0", -] - [[package]] name = "petgraph" version = "0.8.1" @@ -557,6 +564,12 @@ dependencies = [ "proc-macro2", ] +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + [[package]] name = "radium" version = "0.7.0" @@ -748,9 +761,21 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" [[package]] name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" +version = "0.14.7+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +checksum = "883478de20367e224c0090af9cf5f9fa85bed63a95c1abf3afc5c083ebc06e8c" +dependencies = [ + "wasip2", +] + +[[package]] +name = "wasip2" +version = "1.0.1+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" +dependencies = [ + "wit-bindgen", +] [[package]] name = "which" @@ -795,6 +820,12 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + [[package]] name = "windows-sys" version = "0.52.0" @@ -806,11 +837,11 @@ dependencies = [ [[package]] name = "windows-sys" -version = "0.59.0" +version = "0.61.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" dependencies = [ - "windows-targets", + "windows-link", ] [[package]] @@ -883,6 +914,12 @@ version = "0.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904" +[[package]] +name = "wit-bindgen" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" + [[package]] name = "wyz" version = "0.5.1" diff --git a/Cargo.toml b/Cargo.toml index 5a792c6..b905f73 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,17 +18,18 @@ fayalite-proc-macros = { version = "=0.3.0", path = "crates/fayalite-proc-macros fayalite-proc-macros-impl = { version = "=0.3.0", path = "crates/fayalite-proc-macros-impl" } fayalite-visit-gen = { version = "=0.3.0", path = "crates/fayalite-visit-gen" } base16ct = "0.2.0" +base64 = "0.22.1" bitvec = { version = "1.0.1", features = ["serde"] } blake3 = { version = "1.5.4", features = ["serde"] } clap = { version = "4.5.9", features = ["derive", "env", "string"] } +clap_complete = "4.5.58" ctor = "0.2.8" eyre = "0.6.12" hashbrown = "0.15.2" indexmap = { version = "2.5.0", features = ["serde"] } -jobslot = "0.2.19" +jobslot = "0.2.23" num-bigint = "0.4.6" num-traits = "0.2.16" -os_pipe = "1.2.1" petgraph = "0.8.1" prettyplease = "0.2.20" proc-macro2 = "1.0.83" diff --git a/crates/fayalite/Cargo.toml b/crates/fayalite/Cargo.toml index 082e607..2403ff5 100644 --- a/crates/fayalite/Cargo.toml +++ b/crates/fayalite/Cargo.toml @@ -14,9 +14,11 @@ rust-version.workspace = true version.workspace = true [dependencies] +base64.workspace = true bitvec.workspace = true blake3.workspace = true clap.workspace = true +clap_complete.workspace = true ctor.workspace = true eyre.workspace = true fayalite-proc-macros.workspace = true @@ -24,7 +26,6 @@ hashbrown.workspace = true jobslot.workspace = true num-bigint.workspace = true num-traits.workspace = true -os_pipe.workspace = true petgraph.workspace = true serde_json.workspace = true serde.workspace = true diff --git a/crates/fayalite/examples/blinky.rs b/crates/fayalite/examples/blinky.rs index 87b77c1..40b22dc 100644 --- a/crates/fayalite/examples/blinky.rs +++ b/crates/fayalite/examples/blinky.rs @@ -1,7 +1,9 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information -use clap::Parser; -use fayalite::{cli, prelude::*}; +use fayalite::{ + build::{ToArgs, WriteArgs}, + prelude::*, +}; #[hdl_module] fn blinky(clock_frequency: u64) { @@ -32,16 +34,22 @@ fn blinky(clock_frequency: u64) { connect(led, output_reg); } -#[derive(Parser)] -struct Cli { +#[derive(clap::Args, Clone, PartialEq, Eq, Hash, Debug)] +struct ExtraArgs { /// clock frequency in hertz #[arg(long, default_value = "1000000", value_parser = clap::value_parser!(u64).range(2..))] clock_frequency: u64, - #[command(subcommand)] - cli: cli::Cli, } -fn main() -> cli::Result { - let cli = Cli::parse(); - cli.cli.run(blinky(cli.clock_frequency)) +impl ToArgs for ExtraArgs { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) { + let Self { clock_frequency } = self; + args.write_arg(format_args!("--clock-frequency={clock_frequency}")); + } +} + +fn main() { + BuildCli::main(|_cli, ExtraArgs { clock_frequency }| { + Ok(JobParams::new(blinky(clock_frequency), "blinky")) + }); } diff --git a/crates/fayalite/src/annotations.rs b/crates/fayalite/src/annotations.rs index 70f0460..d578626 100644 --- a/crates/fayalite/src/annotations.rs +++ b/crates/fayalite/src/annotations.rs @@ -147,7 +147,7 @@ macro_rules! make_annotation_enum { ( $(#[$meta:meta])* $vis:vis enum $Annotation:ident { - $($Variant:ident($T:ident),)* + $($Variant:ident($T:ty),)* } ) => { $(#[$meta])* @@ -199,6 +199,8 @@ make_annotation_enum! { BlackBoxPath(BlackBoxPathAnnotation), DocString(DocStringAnnotation), CustomFirrtl(CustomFirrtlAnnotation), + XdcLocation(crate::build::vendor::xilinx::XdcLocationAnnotation), + XdcIOStandard(crate::build::vendor::xilinx::XdcIOStandardAnnotation), } } diff --git a/crates/fayalite/src/build.rs b/crates/fayalite/src/build.rs new file mode 100644 index 0000000..96492cf --- /dev/null +++ b/crates/fayalite/src/build.rs @@ -0,0 +1,2304 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{ + build::graph::JobGraph, + bundle::{Bundle, BundleType}, + intern::{Intern, Interned}, + module::Module, + util::job_server::AcquiredJob, +}; +use clap::ArgAction; +use serde::{ + Deserialize, Deserializer, Serialize, Serializer, + de::{DeserializeOwned, Error as _}, + ser::Error as _, +}; +use std::{ + any::{Any, TypeId}, + borrow::Cow, + cmp::Ordering, + ffi::OsStr, + fmt, + hash::{Hash, Hasher}, + path::{Path, PathBuf}, + sync::{Arc, OnceLock}, +}; +use tempfile::TempDir; + +pub mod external; +pub mod firrtl; +pub mod formal; +pub mod graph; +pub mod registry; +pub mod vendor; +pub mod verilog; + +pub(crate) fn built_in_job_kinds() -> impl IntoIterator { + [ + DynJobKind::new(BaseJobKind), + DynJobKind::new(CreateOutputDirJobKind), + ] + .into_iter() + .chain(firrtl::built_in_job_kinds()) + .chain(formal::built_in_job_kinds()) + .chain(vendor::built_in_job_kinds()) + .chain(verilog::built_in_job_kinds()) +} + +#[track_caller] +fn intern_known_utf8_path_buf(v: PathBuf) -> Interned { + let Ok(v) = v.into_os_string().into_string().map(str::intern_owned) else { + unreachable!("known to be valid UTF-8"); + }; + v +} + +#[track_caller] +fn intern_known_utf8_str(v: impl AsRef) -> Interned { + let Some(v) = v.as_ref().to_str().map(str::intern) else { + unreachable!("known to be valid UTF-8"); + }; + v +} + +#[track_caller] +fn interned_known_utf8_method &OsStr>( + v: impl AsRef, + f: F, +) -> Interned { + intern_known_utf8_str(f(v.as_ref().as_ref())) +} + +#[track_caller] +fn interned_known_utf8_path_buf_method PathBuf>( + v: impl AsRef, + f: F, +) -> Interned { + intern_known_utf8_path_buf(f(v.as_ref().as_ref())) +} + +#[derive(Clone, Hash, PartialEq, Eq, Debug)] +#[non_exhaustive] +pub enum JobItem { + Path { + path: Interned, + }, + DynamicPaths { + paths: Vec>, + source_job_name: Interned, + }, +} + +impl JobItem { + pub fn name(&self) -> JobItemName { + match self { + &JobItem::Path { path } => JobItemName::Path { path }, + &JobItem::DynamicPaths { + paths: _, + source_job_name, + } => JobItemName::DynamicPaths { source_job_name }, + } + } +} + +#[derive(Copy, Clone, Hash, PartialEq, Eq, Debug, Serialize, Deserialize)] +#[non_exhaustive] +pub enum JobItemName { + Path { path: Interned }, + DynamicPaths { source_job_name: Interned }, +} + +impl JobItemName { + fn as_ref(&self) -> JobItemNameRef<'_> { + match self { + JobItemName::Path { path } => JobItemNameRef::Path { path }, + JobItemName::DynamicPaths { source_job_name } => { + JobItemNameRef::DynamicPaths { source_job_name } + } + } + } +} + +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] +enum JobItemNameRef<'a> { + Path { path: &'a str }, + DynamicPaths { source_job_name: &'a str }, +} + +/// ordered by string contents, not by `Interned` +impl PartialOrd for JobItemName { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +/// ordered by string contents, not by `Interned` +impl Ord for JobItemName { + fn cmp(&self, other: &Self) -> Ordering { + if self == other { + Ordering::Equal + } else { + self.as_ref().cmp(&other.as_ref()) + } + } +} + +pub trait WriteArgs: + for<'a> Extend<&'a str> + + Extend + + Extend> + + for<'a> Extend> +{ + fn write_args(&mut self, args: impl IntoIterator) { + self.extend(args.into_iter().map(|v| format!("{v}"))); + } + fn write_string_args(&mut self, args: impl IntoIterator) { + self.extend(args); + } + fn write_str_args<'a>(&mut self, args: impl IntoIterator) { + self.extend(args); + } + fn write_interned_args(&mut self, args: impl IntoIterator>) { + self.extend(args); + } + fn write_arg(&mut self, arg: impl fmt::Display) { + self.extend([format_args!("{arg}")]); + } + fn write_string_arg(&mut self, arg: String) { + self.extend([arg]); + } + fn write_str_arg(&mut self, arg: &str) { + self.extend([arg]); + } + fn write_interned_arg(&mut self, arg: Interned) { + self.extend([arg]); + } + /// finds the first option that is `--{option_name}={value}` and returns `value` + fn get_long_option_eq(&self, option_name: impl AsRef) -> Option<&str>; +} + +pub struct ArgsWriter(pub Vec); + +impl Default for ArgsWriter { + fn default() -> Self { + Self(Default::default()) + } +} + +impl> ArgsWriter { + fn get_long_option_eq_helper(&self, option_name: &str) -> Option<&str> { + self.0.iter().find_map(|arg| { + arg.as_ref() + .strip_prefix("--") + .and_then(|arg| arg.strip_prefix(option_name)) + .and_then(|arg| arg.strip_prefix("=")) + }) + } +} + +impl<'a, W> Extend> for ArgsWriter +where + Self: Extend, +{ + fn extend>>(&mut self, iter: T) { + self.extend(iter.into_iter().map(|v| v.to_string())) + } +} + +impl<'a> Extend<&'a str> for ArgsWriter> { + fn extend>(&mut self, iter: T) { + self.extend(iter.into_iter().map(str::intern)) + } +} + +impl Extend for ArgsWriter> { + fn extend>(&mut self, iter: T) { + self.extend(iter.into_iter().map(str::intern_owned)) + } +} + +impl Extend> for ArgsWriter { + fn extend>>(&mut self, iter: T) { + self.extend(iter.into_iter().map(String::from)) + } +} + +impl<'a> Extend<&'a str> for ArgsWriter { + fn extend>(&mut self, iter: T) { + self.extend(iter.into_iter().map(String::from)) + } +} + +impl Extend for ArgsWriter { + fn extend>(&mut self, iter: T) { + self.0.extend(iter); + } +} + +impl WriteArgs for ArgsWriter { + fn get_long_option_eq(&self, option_name: impl AsRef) -> Option<&str> { + self.get_long_option_eq_helper(option_name.as_ref()) + } +} + +impl WriteArgs for ArgsWriter> { + fn get_long_option_eq(&self, option_name: impl AsRef) -> Option<&str> { + self.get_long_option_eq_helper(option_name.as_ref()) + } +} + +pub trait ToArgs: clap::Args + 'static + Send + Sync + Hash + Eq + fmt::Debug + Clone { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)); + fn to_interned_args(&self) -> Interned<[Interned]> { + Intern::intern_owned(self.to_interned_args_vec()) + } + fn to_interned_args_vec(&self) -> Vec> { + let mut retval = ArgsWriter::default(); + self.to_args(&mut retval); + retval.0 + } + fn to_string_args(&self) -> Vec { + let mut retval = ArgsWriter::default(); + self.to_args(&mut retval); + retval.0 + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct JobKindAndArgs { + pub kind: K, + pub args: K::Args, +} + +impl JobKindAndArgs { + pub fn args_to_jobs( + self, + dependencies: ::KindsAndArgs, + params: &JobParams, + ) -> eyre::Result> { + K::args_to_jobs( + JobArgsAndDependencies { + args: self, + dependencies, + }, + params, + ) + } +} + +impl> Copy for JobKindAndArgs {} + +impl From> for DynJobArgs { + fn from(value: JobKindAndArgs) -> Self { + let JobKindAndArgs { kind, args } = value; + DynJobArgs::new(kind, args) + } +} + +impl TryFrom for JobKindAndArgs { + type Error = DynJobArgs; + fn try_from(value: DynJobArgs) -> Result { + value.downcast() + } +} + +#[derive(Debug, PartialEq, Eq, Hash)] +pub struct JobAndKind { + pub kind: K, + pub job: K::Job, +} + +impl> Clone for JobAndKind { + fn clone(&self) -> Self { + Self { + kind: self.kind.clone(), + job: self.job.clone(), + } + } +} + +impl> Copy for JobAndKind {} + +impl From> for DynJob { + fn from(value: JobAndKind) -> Self { + let JobAndKind { kind, job } = value; + DynJob::new(kind, job) + } +} + +impl> TryFrom for JobAndKind { + type Error = DynJob; + fn try_from(value: DynJob) -> Result { + value.downcast() + } +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub struct JobKindAndDependencies { + pub kind: K, + pub dependencies: K::Dependencies, +} + +impl Default for JobKindAndDependencies { + fn default() -> Self { + Self::new(K::default()) + } +} + +impl JobKindAndDependencies { + pub fn new(kind: K) -> Self { + Self { + kind, + dependencies: kind.dependencies(), + } + } +} + +#[derive(Debug, PartialEq, Eq, Hash)] +pub struct JobAndDependencies { + pub job: JobAndKind, + pub dependencies: ::JobsAndKinds, +} + +impl Clone for JobAndDependencies +where + K::Job: Clone, + ::JobsAndKinds: Clone, +{ + fn clone(&self) -> Self { + Self { + job: self.job.clone(), + dependencies: self.dependencies.clone(), + } + } +} + +impl Copy for JobAndDependencies +where + K::Job: Copy, + ::JobsAndKinds: Copy, +{ +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct JobArgsAndDependencies { + pub args: JobKindAndArgs, + pub dependencies: ::KindsAndArgs, +} + +impl Copy for JobArgsAndDependencies +where + K::Args: Copy, + ::KindsAndArgs: Copy, +{ +} + +impl JobArgsAndDependencies { + pub fn args_to_jobs(self, params: &JobParams) -> eyre::Result> { + K::args_to_jobs(self, params) + } +} + +impl>, D: JobKind> JobArgsAndDependencies { + pub fn args_to_jobs_simple( + self, + params: &JobParams, + f: F, + ) -> eyre::Result> + where + F: FnOnce(K, K::Args, &mut JobAndDependencies) -> eyre::Result, + { + let Self { + args: JobKindAndArgs { kind, args }, + dependencies, + } = self; + let mut dependencies = dependencies.args_to_jobs(params)?; + let job = f(kind, args, &mut dependencies)?; + Ok(JobAndDependencies { + job: JobAndKind { kind, job }, + dependencies, + }) + } +} + +impl>, D: JobKind> + JobArgsAndDependencies> +{ + pub fn args_to_jobs_external_simple( + self, + params: &JobParams, + f: F, + ) -> eyre::Result<( + C::AdditionalJobData, + ::JobsAndKinds, + )> + where + F: FnOnce( + external::ExternalCommandArgs, + &mut JobAndDependencies, + ) -> eyre::Result, + { + let Self { + args: JobKindAndArgs { kind: _, args }, + dependencies, + } = self; + let mut dependencies = dependencies.args_to_jobs(params)?; + let additional_job_data = f(args, &mut dependencies)?; + Ok((additional_job_data, dependencies)) + } +} + +pub trait JobDependencies: 'static + Send + Sync + Hash + Eq + fmt::Debug + Copy { + type KindsAndArgs: 'static + Send + Sync + Hash + Eq + fmt::Debug + Clone; + type JobsAndKinds: 'static + Send + Sync + Hash + Eq + fmt::Debug; + fn kinds_dyn_extend>(self, dyn_kinds: &mut E); + fn kinds_dyn(self) -> Vec { + let mut retval = Vec::new(); + self.kinds_dyn_extend(&mut retval); + retval + } + fn into_dyn_jobs_extend>(jobs: Self::JobsAndKinds, dyn_jobs: &mut E); + fn into_dyn_jobs(jobs: Self::JobsAndKinds) -> Vec { + let mut retval = Vec::new(); + Self::into_dyn_jobs_extend(jobs, &mut retval); + retval + } + #[track_caller] + fn from_dyn_args_prefix>( + args: &mut I, + ) -> Self::KindsAndArgs; + #[track_caller] + fn from_dyn_args>(args: I) -> Self::KindsAndArgs { + let mut iter = args.into_iter(); + let retval = Self::from_dyn_args_prefix(&mut iter); + if iter.next().is_some() { + panic!("wrong number of dependencies"); + } + retval + } +} + +impl JobDependencies for JobKindAndDependencies { + type KindsAndArgs = JobArgsAndDependencies; + type JobsAndKinds = JobAndDependencies; + + fn kinds_dyn_extend>(self, dyn_kinds: &mut E) { + let Self { kind, dependencies } = self; + dependencies.kinds_dyn_extend(dyn_kinds); + dyn_kinds.extend([DynJobKind::new(kind)]); + } + + fn into_dyn_jobs_extend>( + jobs: Self::JobsAndKinds, + dyn_jobs: &mut E, + ) { + let JobAndDependencies { job, dependencies } = jobs; + K::Dependencies::into_dyn_jobs_extend(dependencies, dyn_jobs); + dyn_jobs.extend([job.into()]); + } + + #[track_caller] + fn from_dyn_args_prefix>( + args: &mut I, + ) -> Self::KindsAndArgs { + let dependencies = K::Dependencies::from_dyn_args_prefix(args); + let Some(args) = args.next() else { + panic!("wrong number of dependencies"); + }; + match args.downcast() { + Ok(args) => JobArgsAndDependencies { args, dependencies }, + Err(args) => { + panic!( + "wrong type of dependency, expected {} got:\n{args:?}", + std::any::type_name::() + ) + } + } + } +} + +macro_rules! impl_job_dependencies { + (@impl $(($v:ident: $T:ident),)*) => { + impl<$($T: JobDependencies),*> JobDependencies for ($($T,)*) { + type KindsAndArgs = ($($T::KindsAndArgs,)*); + type JobsAndKinds = ($($T::JobsAndKinds,)*); + + fn kinds_dyn_extend>(self, dyn_kinds: &mut E) { + #![allow(unused_variables)] + let ($($v,)*) = self; + $($T::kinds_dyn_extend($v, dyn_kinds);)* + } + + fn into_dyn_jobs_extend>( + jobs: Self::JobsAndKinds, + dyn_jobs: &mut E, + ) { + #![allow(unused_variables)] + let ($($v,)*) = jobs; + $($T::into_dyn_jobs_extend($v, dyn_jobs);)* + } + + #[track_caller] + fn from_dyn_args_prefix>( + args: &mut I, + ) -> Self::KindsAndArgs { + #![allow(unused_variables)] + $(let $v = $T::from_dyn_args_prefix(args);)* + ($($v,)*) + } + } + }; + ($($first:tt, $($rest:tt,)*)?) => { + impl_job_dependencies!(@impl $($first, $($rest,)*)?); + $(impl_job_dependencies!($($rest,)*);)? + }; +} + +impl_job_dependencies! { + (v0: T0), + (v1: T1), + (v2: T2), + (v3: T3), + (v4: T4), + (v5: T5), + (v6: T6), + (v7: T7), + (v8: T8), + (v9: T9), + (v10: T10), + (v11: T11), +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +pub struct JobParams { + main_module: Module, + application_name: Interned, +} + +impl AsRef for JobParams { + fn as_ref(&self) -> &Self { + self + } +} + +impl JobParams { + pub fn new_canonical(main_module: Module, application_name: Interned) -> Self { + Self { + main_module, + application_name, + } + } + pub fn new( + main_module: impl AsRef>, + application_name: impl AsRef, + ) -> Self { + Self::new_canonical( + main_module.as_ref().canonical(), + application_name.as_ref().intern(), + ) + } + pub fn main_module(&self) -> &Module { + &self.main_module + } + pub fn application_name(&self) -> Interned { + self.application_name + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub struct CommandParams { + pub command_line: Interned<[Interned]>, + pub current_dir: Option>, +} + +impl CommandParams { + fn to_unix_shell_line( + self, + output: &mut W, + mut escape_arg: impl FnMut(&str, &mut W) -> fmt::Result, + ) -> fmt::Result { + let Self { + command_line, + current_dir, + } = self; + let mut end = None; + let mut separator = if let Some(current_dir) = current_dir { + output.write_str("(cd ")?; + end = Some(")"); + if !current_dir.starts_with(|ch: char| { + ch.is_ascii_alphanumeric() || matches!(ch, '/' | '\\' | '.') + }) { + output.write_str("-- ")?; + } + escape_arg(¤t_dir, output)?; + "; exec -- " + } else { + "" + }; + for arg in command_line { + output.write_str(separator)?; + separator = " "; + escape_arg(&arg, output)?; + } + if let Some(end) = end { + output.write_str(end)?; + } + Ok(()) + } +} + +pub trait JobKind: 'static + Send + Sync + Hash + Eq + fmt::Debug + Sized + Copy { + type Args: ToArgs; + type Job: 'static + Send + Sync + Hash + Eq + fmt::Debug + Serialize + DeserializeOwned; + type Dependencies: JobDependencies; + fn dependencies(self) -> Self::Dependencies; + fn args_to_jobs( + args: JobArgsAndDependencies, + params: &JobParams, + ) -> eyre::Result>; + fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]>; + fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]>; + fn name(self) -> Interned; + fn external_command_params(self, job: &Self::Job) -> Option; + fn run( + self, + job: &Self::Job, + inputs: &[JobItem], + params: &JobParams, + acquired_job: &mut AcquiredJob, + ) -> eyre::Result>; + fn subcommand_hidden(self) -> bool { + false + } + fn external_program(self) -> Option> { + None + } +} + +trait DynJobKindTrait: 'static + Send + Sync + fmt::Debug { + fn as_any(&self) -> &dyn Any; + fn as_arc_any(self: Arc) -> Arc; + fn eq_dyn(&self, other: &dyn DynJobKindTrait) -> bool; + fn hash_dyn(&self, state: &mut dyn Hasher); + fn dependencies_kinds_dyn(&self) -> Vec; + fn args_group_id_dyn(&self) -> Option; + fn augment_args_dyn(&self, cmd: clap::Command) -> clap::Command; + fn augment_args_for_update_dyn(&self, cmd: clap::Command) -> clap::Command; + fn from_arg_matches_dyn( + &self, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result; + fn name_dyn(&self) -> Interned; + fn subcommand_hidden_dyn(&self) -> bool; + fn deserialize_job_from_json_str(self: Arc, json: &str) -> serde_json::Result; + fn deserialize_job_from_json_value( + self: Arc, + json: &serde_json::Value, + ) -> serde_json::Result; +} + +impl DynJobKindTrait for T { + fn as_any(&self) -> &dyn Any { + self + } + + fn as_arc_any(self: Arc) -> Arc { + self + } + + fn eq_dyn(&self, other: &dyn DynJobKindTrait) -> bool { + other + .as_any() + .downcast_ref::() + .is_some_and(|other| self == other) + } + + fn hash_dyn(&self, mut state: &mut dyn Hasher) { + self.hash(&mut state); + } + + fn dependencies_kinds_dyn(&self) -> Vec { + self.dependencies().kinds_dyn() + } + + fn args_group_id_dyn(&self) -> Option { + ::group_id() + } + + fn augment_args_dyn(&self, cmd: clap::Command) -> clap::Command { + ::augment_args(cmd) + } + + fn augment_args_for_update_dyn(&self, cmd: clap::Command) -> clap::Command { + ::augment_args_for_update(cmd) + } + + fn from_arg_matches_dyn( + &self, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result { + Ok(DynJobArgs::new( + *self, + ::from_arg_matches_mut(matches)?, + )) + } + + fn name_dyn(&self) -> Interned { + self.name() + } + + fn subcommand_hidden_dyn(&self) -> bool { + self.subcommand_hidden() + } + + fn deserialize_job_from_json_str(self: Arc, json: &str) -> serde_json::Result { + Ok(DynJob::from_arc(self, serde_json::from_str(json)?)) + } + + fn deserialize_job_from_json_value( + self: Arc, + json: &serde_json::Value, + ) -> serde_json::Result { + Ok(DynJob::from_arc(self, Deserialize::deserialize(json)?)) + } +} + +#[derive(Clone)] +pub struct DynJobKind(Arc); + +impl DynJobKind { + pub fn from_arc(job_kind: Arc) -> Self { + Self(job_kind) + } + pub fn new(job_kind: T) -> Self { + Self(Arc::new(job_kind)) + } + pub fn type_id(&self) -> TypeId { + DynJobKindTrait::as_any(&*self.0).type_id() + } + pub fn downcast(&self) -> Option { + DynJobKindTrait::as_any(&*self.0).downcast_ref().copied() + } + pub fn downcast_arc(self) -> Result, Self> { + if self.downcast::().is_some() { + Ok(Arc::downcast::(self.0.as_arc_any()) + .ok() + .expect("already checked type")) + } else { + Err(self) + } + } + pub fn dependencies_kinds(&self) -> Vec { + DynJobKindTrait::dependencies_kinds_dyn(&*self.0) + } + pub fn args_group_id(&self) -> Option { + DynJobKindTrait::args_group_id_dyn(&*self.0) + } + pub fn augment_args(&self, cmd: clap::Command) -> clap::Command { + DynJobKindTrait::augment_args_dyn(&*self.0, cmd) + } + pub fn augment_args_for_update(&self, cmd: clap::Command) -> clap::Command { + DynJobKindTrait::augment_args_for_update_dyn(&*self.0, cmd) + } + pub fn from_arg_matches( + &self, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result { + DynJobKindTrait::from_arg_matches_dyn(&*self.0, matches) + } + pub fn name(&self) -> Interned { + DynJobKindTrait::name_dyn(&*self.0) + } + pub fn subcommand_hidden(&self) -> bool { + DynJobKindTrait::subcommand_hidden_dyn(&*self.0) + } + pub fn deserialize_job_from_json_str(self, json: &str) -> serde_json::Result { + DynJobKindTrait::deserialize_job_from_json_str(self.0, json) + } + pub fn deserialize_job_from_json_value( + self, + json: &serde_json::Value, + ) -> serde_json::Result { + DynJobKindTrait::deserialize_job_from_json_value(self.0, json) + } + fn make_subcommand_without_args(&self) -> clap::Command { + clap::Command::new(Interned::into_inner(self.name())).hide(self.subcommand_hidden()) + } + pub fn make_subcommand(&self) -> clap::Command { + let mut subcommand = self.make_subcommand_without_args(); + for dependency in self.dependencies_kinds() { + subcommand = dependency.augment_args(subcommand); + } + self.augment_args(subcommand) + } + pub fn make_subcommand_for_update(&self) -> clap::Command { + let mut subcommand = self.make_subcommand_without_args(); + for dependency in self.dependencies_kinds() { + subcommand = dependency.augment_args_for_update(subcommand); + } + self.augment_args_for_update(subcommand) + } +} + +impl Hash for DynJobKind { + fn hash(&self, state: &mut H) { + self.type_id().hash(state); + DynJobKindTrait::hash_dyn(&*self.0, state); + } +} + +impl PartialEq for DynJobKind { + fn eq(&self, other: &Self) -> bool { + DynJobKindTrait::eq_dyn(&*self.0, &*other.0) + } +} + +impl Eq for DynJobKind {} + +impl fmt::Debug for DynJobKind { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + +impl Serialize for DynJobKind { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + self.name().serialize(serializer) + } +} + +impl<'de> Deserialize<'de> for DynJobKind { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let name = Cow::::deserialize(deserializer)?; + match Self::registry().get_by_name(&name) { + Some(retval) => Ok(retval.clone()), + None => Err(D::Error::custom(format_args!( + "unknown job kind: name not found in registry: {name:?}" + ))), + } + } +} + +#[derive(Copy, Clone, Debug, Default)] +pub struct DynJobKindValueParser; + +#[derive(Clone, PartialEq, Eq, Hash)] +struct DynJobKindValueEnum { + name: Interned, + job_kind: DynJobKind, +} + +impl clap::ValueEnum for DynJobKindValueEnum { + fn value_variants<'a>() -> &'a [Self] { + Interned::into_inner( + registry::JobKindRegistrySnapshot::get() + .iter_with_names() + .map(|(name, job_kind)| Self { + name, + job_kind: job_kind.clone(), + }) + .collect(), + ) + } + + fn to_possible_value(&self) -> Option { + Some(clap::builder::PossibleValue::new(Interned::into_inner( + self.name, + ))) + } +} + +impl clap::builder::TypedValueParser for DynJobKindValueParser { + type Value = DynJobKind; + + fn parse_ref( + &self, + cmd: &clap::Command, + arg: Option<&clap::Arg>, + value: &std::ffi::OsStr, + ) -> clap::error::Result { + clap::builder::EnumValueParser::::new() + .parse_ref(cmd, arg, value) + .map(|v| v.job_kind) + } + + fn possible_values( + &self, + ) -> Option + '_>> { + static ENUM_VALUE_PARSER: OnceLock> = + OnceLock::new(); + ENUM_VALUE_PARSER + .get_or_init(clap::builder::EnumValueParser::::new) + .possible_values() + } +} + +impl clap::builder::ValueParserFactory for DynJobKind { + type Parser = DynJobKindValueParser; + + fn value_parser() -> Self::Parser { + DynJobKindValueParser::default() + } +} + +trait DynExtendInternedStr { + fn extend_from_slice(&mut self, items: &[Interned]); +} + +impl Extend> for dyn DynExtendInternedStr + '_ { + fn extend>>(&mut self, iter: T) { + let mut buf = [Interned::default(); 64]; + let mut buf_len = 0; + iter.into_iter().for_each(|item| { + buf[buf_len] = item; + buf_len += 1; + if buf_len == buf.len() { + ::extend_from_slice(self, &buf); + buf_len = 0; + } + }); + if buf_len > 0 { + ::extend_from_slice( + self, + &buf[..buf_len], + ); + } + } +} + +impl>> DynExtendInternedStr for T { + fn extend_from_slice(&mut self, items: &[Interned]) { + self.extend(items.iter().copied()); + } +} + +#[derive(PartialEq, Eq, Hash, Clone)] +struct DynJobArgsInner(JobKindAndArgs); + +impl fmt::Debug for DynJobArgsInner { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self(JobKindAndArgs { kind, args }) = self; + f.debug_struct("DynJobArgs") + .field("kind", kind) + .field("args", args) + .finish() + } +} + +trait DynJobArgsTrait: 'static + Send + Sync + fmt::Debug { + fn as_any(&self) -> &dyn Any; + fn as_arc_any(self: Arc) -> Arc; + fn kind_type_id(&self) -> TypeId; + fn eq_dyn(&self, other: &dyn DynJobArgsTrait) -> bool; + fn hash_dyn(&self, state: &mut dyn Hasher); + fn kind(&self) -> DynJobKind; + fn to_args_extend_vec(&self, args: Vec>) -> Vec>; + fn clone_into_arc(&self) -> Arc; + fn update_from_arg_matches( + &mut self, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result<()>; + #[track_caller] + fn args_to_jobs( + self: Arc, + dependencies_args: Vec, + params: &JobParams, + ) -> eyre::Result<(DynJob, Vec)>; +} + +impl DynJobArgsTrait for DynJobArgsInner { + fn as_any(&self) -> &dyn Any { + self + } + + fn as_arc_any(self: Arc) -> Arc { + self + } + + fn kind_type_id(&self) -> TypeId { + TypeId::of::() + } + + fn eq_dyn(&self, other: &dyn DynJobArgsTrait) -> bool { + other + .as_any() + .downcast_ref::() + .is_some_and(|other| self == other) + } + + fn hash_dyn(&self, mut state: &mut dyn Hasher) { + self.hash(&mut state); + } + + fn kind(&self) -> DynJobKind { + DynJobKind::new(self.0.kind) + } + + fn to_args_extend_vec(&self, args: Vec>) -> Vec> { + let mut writer = ArgsWriter(args); + self.0.args.to_args(&mut writer); + writer.0 + } + + fn clone_into_arc(&self) -> Arc { + Arc::new(self.clone()) + } + + fn update_from_arg_matches( + &mut self, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result<()> { + clap::FromArgMatches::update_from_arg_matches_mut(&mut self.0.args, matches) + } + + #[track_caller] + fn args_to_jobs( + self: Arc, + dependencies_args: Vec, + params: &JobParams, + ) -> eyre::Result<(DynJob, Vec)> { + let JobAndDependencies { job, dependencies } = JobArgsAndDependencies { + args: Arc::unwrap_or_clone(self).0, + dependencies: K::Dependencies::from_dyn_args(dependencies_args), + } + .args_to_jobs(params)?; + Ok((job.into(), K::Dependencies::into_dyn_jobs(dependencies))) + } +} + +#[derive(Clone)] +pub struct DynJobArgs(Arc); + +impl DynJobArgs { + pub fn new(kind: K, args: K::Args) -> Self { + Self(Arc::new(DynJobArgsInner(JobKindAndArgs { kind, args }))) + } + pub fn kind_type_id(&self) -> TypeId { + DynJobArgsTrait::kind_type_id(&*self.0) + } + pub fn downcast_ref(&self) -> Option<(&K, &K::Args)> { + let DynJobArgsInner::(JobKindAndArgs { kind, args }) = + DynJobArgsTrait::as_any(&*self.0).downcast_ref()?; + Some((kind, args)) + } + pub fn downcast(self) -> Result, Self> { + if self.downcast_ref::().is_some() { + let this = Arc::downcast::>(self.0.as_arc_any()) + .ok() + .expect("already checked type"); + Ok(Arc::unwrap_or_clone(this).0) + } else { + Err(self) + } + } + pub fn kind(&self) -> DynJobKind { + DynJobArgsTrait::kind(&*self.0) + } + pub fn to_args_vec(&self) -> Vec> { + self.to_args_extend_vec(Vec::new()) + } + pub fn to_args_extend_vec(&self, args: Vec>) -> Vec> { + DynJobArgsTrait::to_args_extend_vec(&*self.0, args) + } + fn make_mut(&mut self) -> &mut dyn DynJobArgsTrait { + // can't just return the reference if the first get_mut returns Some since + // as of rustc 1.90.0 this causes a false-positive lifetime error. + if Arc::get_mut(&mut self.0).is_none() { + self.0 = DynJobArgsTrait::clone_into_arc(&*self.0); + } + Arc::get_mut(&mut self.0).expect("clone_into_arc returns a new arc with a ref-count of 1") + } + pub fn update_from_arg_matches( + &mut self, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result<()> { + DynJobArgsTrait::update_from_arg_matches(self.make_mut(), matches) + } + pub fn args_to_jobs( + self, + dependencies_args: Vec, + params: &JobParams, + ) -> eyre::Result<(DynJob, Vec)> { + DynJobArgsTrait::args_to_jobs(self.0, dependencies_args, params) + } +} + +impl Hash for DynJobArgs { + fn hash(&self, state: &mut H) { + self.kind_type_id().hash(state); + DynJobArgsTrait::hash_dyn(&*self.0, state); + } +} + +impl PartialEq for DynJobArgs { + fn eq(&self, other: &Self) -> bool { + DynJobArgsTrait::eq_dyn(&*self.0, &*other.0) + } +} + +impl Eq for DynJobArgs {} + +impl fmt::Debug for DynJobArgs { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + +#[derive(PartialEq, Eq, Hash)] +struct DynJobInner { + kind: Arc, + job: T::Job, + inputs: Interned<[JobItemName]>, + outputs: Interned<[JobItemName]>, + external_command_params: Option, +} + +impl> Clone for DynJobInner { + fn clone(&self) -> Self { + Self { + kind: self.kind.clone(), + job: self.job.clone(), + inputs: self.inputs, + outputs: self.outputs, + external_command_params: self.external_command_params, + } + } +} + +impl fmt::Debug for DynJobInner { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + kind, + job, + inputs, + outputs, + external_command_params, + } = self; + f.debug_struct("DynJob") + .field("kind", kind) + .field("job", job) + .field("inputs", inputs) + .field("outputs", outputs) + .field("external_command_params", external_command_params) + .finish() + } +} + +trait DynJobTrait: 'static + Send + Sync + fmt::Debug { + fn as_any(&self) -> &dyn Any; + fn as_arc_any(self: Arc) -> Arc; + fn eq_dyn(&self, other: &dyn DynJobTrait) -> bool; + fn hash_dyn(&self, state: &mut dyn Hasher); + fn kind_type_id(&self) -> TypeId; + fn kind(&self) -> DynJobKind; + fn inputs(&self) -> Interned<[JobItemName]>; + fn outputs(&self) -> Interned<[JobItemName]>; + fn external_command_params(&self) -> Option; + fn serialize_to_json_ascii(&self) -> serde_json::Result; + fn serialize_to_json_value(&self) -> serde_json::Result; + fn run( + &self, + inputs: &[JobItem], + params: &JobParams, + acquired_job: &mut AcquiredJob, + ) -> eyre::Result>; +} + +impl DynJobTrait for DynJobInner { + fn as_any(&self) -> &dyn Any { + self + } + + fn as_arc_any(self: Arc) -> Arc { + self + } + + fn eq_dyn(&self, other: &dyn DynJobTrait) -> bool { + other + .as_any() + .downcast_ref::() + .is_some_and(|other| self == other) + } + + fn hash_dyn(&self, mut state: &mut dyn Hasher) { + self.hash(&mut state); + } + + fn kind_type_id(&self) -> TypeId { + TypeId::of::() + } + + fn kind(&self) -> DynJobKind { + DynJobKind(self.kind.clone()) + } + + fn inputs(&self) -> Interned<[JobItemName]> { + self.inputs + } + + fn outputs(&self) -> Interned<[JobItemName]> { + self.outputs + } + + fn external_command_params(&self) -> Option { + self.external_command_params + } + + fn serialize_to_json_ascii(&self) -> serde_json::Result { + crate::util::serialize_to_json_ascii(&self.job) + } + + fn serialize_to_json_value(&self) -> serde_json::Result { + serde_json::to_value(&self.job) + } + + fn run( + &self, + inputs: &[JobItem], + params: &JobParams, + acquired_job: &mut AcquiredJob, + ) -> eyre::Result> { + self.kind.run(&self.job, inputs, params, acquired_job) + } +} + +#[derive(Clone, Debug)] +pub struct DynJob(Arc); + +impl DynJob { + pub fn from_arc(job_kind: Arc, job: T::Job) -> Self { + let inputs = job_kind.inputs(&job); + let outputs = job_kind.outputs(&job); + let external_command_params = job_kind.external_command_params(&job); + Self(Arc::new(DynJobInner { + kind: job_kind, + job, + inputs, + outputs, + external_command_params, + })) + } + pub fn new(job_kind: T, job: T::Job) -> Self { + Self::from_arc(Arc::new(job_kind), job) + } + pub fn kind_type_id(&self) -> TypeId { + self.0.kind_type_id() + } + pub fn downcast_ref(&self) -> Option<(&K, &K::Job)> { + let DynJobInner { kind, job, .. } = self.0.as_any().downcast_ref()?; + Some((kind, job)) + } + pub fn downcast>(self) -> Result, Self> { + if self.kind_type_id() == TypeId::of::() { + let DynJobInner { kind, job, .. } = Arc::unwrap_or_clone( + self.0 + .as_arc_any() + .downcast::>() + .expect("already checked type"), + ); + Ok(JobAndKind { kind: *kind, job }) + } else { + Err(self) + } + } + pub fn kind(&self) -> DynJobKind { + DynJobTrait::kind(&*self.0) + } + pub fn inputs(&self) -> Interned<[JobItemName]> { + DynJobTrait::inputs(&*self.0) + } + pub fn outputs(&self) -> Interned<[JobItemName]> { + DynJobTrait::outputs(&*self.0) + } + pub fn serialize_to_json_ascii(&self) -> serde_json::Result { + DynJobTrait::serialize_to_json_ascii(&*self.0) + } + pub fn serialize_to_json_value(&self) -> serde_json::Result { + DynJobTrait::serialize_to_json_value(&*self.0) + } + pub fn external_command_params(&self) -> Option { + DynJobTrait::external_command_params(&*self.0) + } + #[track_caller] + pub fn internal_command_params_with_program_prefix( + &self, + internal_program_prefix: &[Interned], + extra_args: &[Interned], + ) -> CommandParams { + let mut command_line = internal_program_prefix.to_vec(); + let command_line = match RunSingleJob::try_add_subcommand(self, &mut command_line) { + Ok(()) => { + command_line.extend_from_slice(extra_args); + Intern::intern_owned(command_line) + } + Err(e) => panic!("Serializing job {:?} failed: {e}", self.kind().name()), + }; + CommandParams { + command_line, + current_dir: None, + } + } + #[track_caller] + pub fn internal_command_params(&self, extra_args: &[Interned]) -> CommandParams { + self.internal_command_params_with_program_prefix( + &[program_name_for_internal_jobs()], + extra_args, + ) + } + #[track_caller] + pub fn command_params_with_internal_program_prefix( + &self, + internal_program_prefix: &[Interned], + extra_args: &[Interned], + ) -> CommandParams { + match self.external_command_params() { + Some(v) => v, + None => self + .internal_command_params_with_program_prefix(internal_program_prefix, extra_args), + } + } + #[track_caller] + pub fn command_params(&self, extra_args: &[Interned]) -> CommandParams { + self.command_params_with_internal_program_prefix( + &[program_name_for_internal_jobs()], + extra_args, + ) + } + pub fn run( + &self, + inputs: &[JobItem], + params: &JobParams, + acquired_job: &mut AcquiredJob, + ) -> eyre::Result> { + DynJobTrait::run(&*self.0, inputs, params, acquired_job) + } +} + +impl Eq for DynJob {} + +impl PartialEq for DynJob { + fn eq(&self, other: &Self) -> bool { + DynJobTrait::eq_dyn(&*self.0, &*other.0) + } +} + +impl Hash for DynJob { + fn hash(&self, state: &mut H) { + DynJobTrait::hash_dyn(&*self.0, state); + } +} + +#[derive(Serialize, Deserialize)] +#[serde(rename = "DynJob")] +struct DynJobSerde { + kind: DynJobKind, + job: serde_json::Value, +} + +impl Serialize for DynJob { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + DynJobSerde { + kind: self.kind(), + job: self.serialize_to_json_value().map_err(S::Error::custom)?, + } + .serialize(serializer) + } +} + +impl<'de> Deserialize<'de> for DynJob { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let DynJobSerde { kind, job } = Deserialize::deserialize(deserializer)?; + kind.deserialize_job_from_json_value(&job) + .map_err(D::Error::custom) + } +} + +pub trait RunBuild: Sized { + fn main(make_params: F) + where + Self: clap::Parser + Clone, + F: FnOnce(Self, Extra) -> eyre::Result, + { + match Self::try_main(make_params) { + Ok(()) => {} + Err(e) => { + eprintln!("{e:#}"); + std::process::exit(1); + } + } + } + fn try_main(make_params: F) -> eyre::Result<()> + where + Self: clap::Parser + Clone, + F: FnOnce(Self, Extra) -> eyre::Result, + { + let args = Self::parse(); + args.clone() + .run(|extra| make_params(args, extra), Self::command()) + } + fn run(self, make_params: F, cmd: clap::Command) -> eyre::Result<()> + where + F: FnOnce(Extra) -> eyre::Result; +} + +impl RunBuild for JobArgsAndDependencies { + fn run(self, make_params: F, cmd: clap::Command) -> eyre::Result<()> + where + F: FnOnce(NoArgs) -> eyre::Result, + { + let params = make_params(NoArgs)?; + self.args_to_jobs(¶ms)?.run(|_| Ok(params), cmd) + } +} + +impl RunBuild for JobAndDependencies { + fn run(self, make_params: F, cmd: clap::Command) -> eyre::Result<()> + where + F: FnOnce(NoArgs) -> eyre::Result, + { + let _ = cmd; + let params = make_params(NoArgs)?; + let Self { job, dependencies } = self; + let mut jobs = vec![DynJob::from(job)]; + K::Dependencies::into_dyn_jobs_extend(dependencies, &mut jobs); + let mut job_graph = JobGraph::new(); + job_graph.add_jobs(jobs); // add all at once to avoid recomputing graph properties multiple times + job_graph.run(¶ms) + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub struct RunSingleJob { + pub job: DynJob, + pub extra: Extra, +} + +impl RunSingleJob { + pub const SUBCOMMAND_NAME: &'static str = "run-single-job"; + fn try_add_subcommand( + job: &DynJob, + subcommand_line: &mut Vec>, + ) -> serde_json::Result<()> { + let mut json = job.serialize_to_json_ascii()?; + json.insert_str(0, "--json="); + subcommand_line.extend([ + Self::SUBCOMMAND_NAME.intern(), + Intern::intern_owned(format!("--name={}", job.kind().name())), + Intern::intern_owned(json), + ]); + Ok(()) + } +} + +impl TryFrom> for RunSingleJob { + type Error = clap::Error; + + fn try_from(value: RunSingleJobClap) -> Result { + let RunSingleJobClap::RunSingleJob { + name: job_kind, + json, + extra, + } = value; + let name = job_kind.name(); + job_kind + .deserialize_job_from_json_str(&json) + .map_err(|e| { + clap::Error::raw( + clap::error::ErrorKind::ValueValidation, + format_args!("failed to parse job {name} from JSON: {e}"), + ) + }) + .map(|job| Self { job, extra }) + } +} + +#[derive(clap::Subcommand)] +enum RunSingleJobClap { + #[command(name = RunSingleJob::SUBCOMMAND_NAME, hide = true)] + RunSingleJob { + #[arg(long)] + name: DynJobKind, + #[arg(long)] + json: String, + #[command(flatten)] + extra: Extra, + }, +} + +impl clap::Subcommand for RunSingleJob { + fn augment_subcommands(cmd: clap::Command) -> clap::Command { + RunSingleJobClap::::augment_subcommands(cmd) + } + + fn augment_subcommands_for_update(cmd: clap::Command) -> clap::Command { + RunSingleJobClap::::augment_subcommands(cmd) + } + + fn has_subcommand(name: &str) -> bool { + RunSingleJobClap::::has_subcommand(name) + } +} + +impl clap::FromArgMatches for RunSingleJob { + fn from_arg_matches(matches: &clap::ArgMatches) -> clap::error::Result { + RunSingleJobClap::from_arg_matches(matches)?.try_into() + } + fn from_arg_matches_mut(matches: &mut clap::ArgMatches) -> clap::error::Result { + RunSingleJobClap::from_arg_matches_mut(matches)?.try_into() + } + fn update_from_arg_matches(&mut self, matches: &clap::ArgMatches) -> clap::error::Result<()> { + *self = Self::from_arg_matches(matches)?; + Ok(()) + } + fn update_from_arg_matches_mut( + &mut self, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result<()> { + *self = Self::from_arg_matches_mut(matches)?; + Ok(()) + } +} + +impl RunBuild for RunSingleJob { + fn run(self, make_params: F, cmd: clap::Command) -> eyre::Result<()> + where + F: FnOnce(Extra) -> eyre::Result, + { + let _ = cmd; + let params = make_params(self.extra)?; + let mut job_graph = JobGraph::new(); + job_graph.add_jobs([self.job]); + job_graph.run(¶ms) + } +} + +#[derive(Clone, PartialEq, Eq, Hash, clap::Subcommand)] +pub enum Completions { + #[non_exhaustive] + Completions { + #[arg(default_value = Self::shell_str_from_env(), required = Self::shell_from_env().is_none())] + shell: clap_complete::aot::Shell, + }, +} + +impl Completions { + pub fn new(shell: clap_complete::aot::Shell) -> Self { + Self::Completions { shell } + } + pub fn from_env() -> Option { + Some(Self::Completions { + shell: Self::shell_from_env()?, + }) + } + fn shell_from_env() -> Option { + static SHELL: OnceLock> = OnceLock::new(); + *SHELL.get_or_init(clap_complete::aot::Shell::from_env) + } + fn shell_str_from_env() -> clap::builder::Resettable { + static SHELL_STR: OnceLock> = OnceLock::new(); + SHELL_STR + .get_or_init(|| Self::shell_from_env().map(|v| v.to_string())) + .as_deref() + .map(Into::into) + .into() + } +} + +impl RunBuild for Completions { + fn run(self, _make_params: F, mut cmd: clap::Command) -> eyre::Result<()> + where + F: FnOnce(NoArgs) -> eyre::Result, + { + let Self::Completions { shell } = self; + let bin_name = cmd + .get_bin_name() + .map(str::intern) + .unwrap_or_else(|| program_name_for_internal_jobs()); + clap_complete::aot::generate( + shell, + &mut cmd, + &*bin_name, + &mut std::io::BufWriter::new(std::io::stdout().lock()), + ); + Ok(()) + } +} + +#[derive( + clap::Args, + Copy, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Hash, + Debug, + Default, + Serialize, + Deserialize, +)] +pub struct NoArgs; + +impl ToArgs for NoArgs { + fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) { + let Self {} = self; + } +} + +#[derive(Clone, PartialEq, Eq, Hash, clap::Parser)] +pub enum BuildCli { + #[clap(flatten)] + Job(AnyJobSubcommand), + #[clap(flatten)] + RunSingleJob(RunSingleJob), + #[clap(flatten)] + Completions(Completions), + #[clap(flatten)] + CreateUnixShellScript(CreateUnixShellScript), +} + +impl RunBuild for BuildCli { + fn run(self, make_params: F, cmd: clap::Command) -> eyre::Result<()> + where + F: FnOnce(Extra) -> eyre::Result, + { + match self { + BuildCli::Job(v) => v.run(make_params, cmd), + BuildCli::RunSingleJob(v) => v.run(make_params, cmd), + BuildCli::Completions(v) => v.run(|NoArgs {}| unreachable!(), cmd), + BuildCli::CreateUnixShellScript(v) => v.run(make_params, cmd), + } + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Subcommand)] +enum CreateUnixShellScriptInner { + CreateUnixShellScript { + #[arg(name = "i-know-this-is-incomplete", long, required = true, action = ArgAction::SetTrue)] + _incomplete: (), + #[command(subcommand)] + inner: AnyJobSubcommand, + }, +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub struct CreateUnixShellScript(CreateUnixShellScriptInner); + +impl RunBuild for CreateUnixShellScript { + fn run(self, make_params: F, cmd: clap::Command) -> eyre::Result<()> + where + F: FnOnce(Extra) -> eyre::Result, + { + let CreateUnixShellScriptInner::CreateUnixShellScript { + _incomplete: (), + inner: + AnyJobSubcommand { + args, + dependencies_args, + extra, + }, + } = self.0; + let extra_args = extra.to_interned_args_vec(); + let params = make_params(extra)?; + let (job, dependencies) = args.args_to_jobs(dependencies_args, ¶ms)?; + let mut job_graph = JobGraph::new(); + job_graph.add_jobs([job].into_iter().chain(dependencies)); + println!( + "{}", + job_graph.to_unix_shell_script_with_internal_program_prefix( + &[cmd + .get_bin_name() + .map(str::intern) + .unwrap_or_else(|| program_name_for_internal_jobs())], + &extra_args, + ) + ); + Ok(()) + } +} + +impl clap::FromArgMatches for CreateUnixShellScript { + fn from_arg_matches(matches: &clap::ArgMatches) -> Result { + clap::FromArgMatches::from_arg_matches(matches).map(Self) + } + fn from_arg_matches_mut(matches: &mut clap::ArgMatches) -> Result { + clap::FromArgMatches::from_arg_matches_mut(matches).map(Self) + } + fn update_from_arg_matches(&mut self, matches: &clap::ArgMatches) -> Result<(), clap::Error> { + self.0.update_from_arg_matches(matches) + } + fn update_from_arg_matches_mut( + &mut self, + matches: &mut clap::ArgMatches, + ) -> Result<(), clap::Error> { + self.0.update_from_arg_matches_mut(matches) + } +} + +impl clap::Subcommand for CreateUnixShellScript { + fn augment_subcommands(cmd: clap::Command) -> clap::Command { + CreateUnixShellScriptInner::::augment_subcommands(cmd) + } + + fn augment_subcommands_for_update(cmd: clap::Command) -> clap::Command { + CreateUnixShellScriptInner::::augment_subcommands_for_update(cmd) + } + + fn has_subcommand(name: &str) -> bool { + CreateUnixShellScriptInner::::has_subcommand(name) + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub struct AnyJobSubcommand { + pub args: DynJobArgs, + pub dependencies_args: Vec, + pub extra: Extra, +} + +impl AnyJobSubcommand { + pub fn from_subcommand_arg_matches( + job_kind: &DynJobKind, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result { + let dependencies = job_kind.dependencies_kinds(); + let dependencies_args = Result::from_iter( + dependencies + .into_iter() + .map(|dependency| dependency.from_arg_matches(matches)), + )?; + Ok(Self { + args: job_kind.clone().from_arg_matches(matches)?, + dependencies_args, + extra: Extra::from_arg_matches_mut(matches)?, + }) + } + pub fn update_from_subcommand_arg_matches( + &mut self, + job_kind: &DynJobKind, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result<()> { + let Self { + args, + dependencies_args, + extra, + } = self; + if *job_kind == args.kind() { + for dependency in dependencies_args { + dependency.update_from_arg_matches(matches)?; + } + args.update_from_arg_matches(matches)?; + } else { + let dependencies = job_kind.dependencies_kinds(); + let new_dependencies_args = Result::from_iter( + dependencies + .into_iter() + .map(|dependency| dependency.from_arg_matches(matches)), + )?; + *args = job_kind.clone().from_arg_matches(matches)?; + *dependencies_args = new_dependencies_args; + } + extra.update_from_arg_matches_mut(matches) + } +} + +impl clap::Subcommand for AnyJobSubcommand { + fn augment_subcommands(mut cmd: clap::Command) -> clap::Command { + let snapshot = registry::JobKindRegistrySnapshot::get(); + for job_kind in &snapshot { + cmd = cmd.subcommand(Extra::augment_args(job_kind.make_subcommand())); + } + cmd + } + + fn augment_subcommands_for_update(mut cmd: clap::Command) -> clap::Command { + let snapshot = registry::JobKindRegistrySnapshot::get(); + for job_kind in &snapshot { + cmd = cmd.subcommand(Extra::augment_args_for_update( + job_kind.make_subcommand_for_update(), + )); + } + cmd + } + + fn has_subcommand(name: &str) -> bool { + registry::JobKindRegistrySnapshot::get() + .get_by_name(name) + .is_some() + } +} + +impl clap::FromArgMatches for AnyJobSubcommand { + fn from_arg_matches(matches: &clap::ArgMatches) -> clap::error::Result { + Self::from_arg_matches_mut(&mut matches.clone()) + } + + fn from_arg_matches_mut(matches: &mut clap::ArgMatches) -> clap::error::Result { + if let Some((name, mut matches)) = matches.remove_subcommand() { + let job_kind_registry_snapshot = registry::JobKindRegistrySnapshot::get(); + if let Some(job_kind) = job_kind_registry_snapshot.get_by_name(&name) { + Self::from_subcommand_arg_matches(job_kind, &mut matches) + } else { + Err(clap::Error::raw( + clap::error::ErrorKind::InvalidSubcommand, + format!("the subcommand '{name}' wasn't recognized"), + )) + } + } else { + Err(clap::Error::raw( + clap::error::ErrorKind::MissingSubcommand, + "a subcommand is required but one was not provided", + )) + } + } + + fn update_from_arg_matches(&mut self, matches: &clap::ArgMatches) -> clap::error::Result<()> { + Self::update_from_arg_matches_mut(self, &mut matches.clone()) + } + + fn update_from_arg_matches_mut( + &mut self, + matches: &mut clap::ArgMatches, + ) -> clap::error::Result<()> { + if let Some((name, mut matches)) = matches.remove_subcommand() { + let job_kind_registry_snapshot = registry::JobKindRegistrySnapshot::get(); + if let Some(job_kind) = job_kind_registry_snapshot.get_by_name(&name) { + self.update_from_subcommand_arg_matches(job_kind, &mut matches) + } else { + Err(clap::Error::raw( + clap::error::ErrorKind::InvalidSubcommand, + format!("the subcommand '{name}' wasn't recognized"), + )) + } + } else { + Err(clap::Error::raw( + clap::error::ErrorKind::MissingSubcommand, + "a subcommand is required but one was not provided", + )) + } + } +} + +impl RunBuild for AnyJobSubcommand { + fn run(self, make_params: F, cmd: clap::Command) -> eyre::Result<()> + where + F: FnOnce(Extra) -> eyre::Result, + { + let _ = cmd; + let Self { + args, + dependencies_args, + extra, + } = self; + let params = make_params(extra)?; + let (job, dependencies) = args.args_to_jobs(dependencies_args, ¶ms)?; + let mut job_graph = JobGraph::new(); + job_graph.add_jobs([job].into_iter().chain(dependencies)); // add all at once to avoid recomputing graph properties multiple times + job_graph.run(¶ms) + } +} + +pub fn program_name_for_internal_jobs() -> Interned { + static PROGRAM_NAME: OnceLock> = OnceLock::new(); + *PROGRAM_NAME + .get_or_init(|| str::intern_owned(std::env::args().next().expect("can't get program name"))) +} + +#[derive(clap::Args, PartialEq, Eq, Hash, Debug, Clone)] +#[group(id = "CreateOutputDir")] +pub struct CreateOutputDirArgs { + /// the directory to put the generated main output file and associated files in + #[arg(short, long, value_hint = clap::ValueHint::DirPath)] + pub output: Option, + #[arg(long, env = "FAYALITE_KEEP_TEMP_DIR")] + pub keep_temp_dir: bool, +} + +impl ToArgs for CreateOutputDirArgs { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) { + let Self { + output, + keep_temp_dir, + } = self; + if let Some(output) = output { + args.write_arg(format_args!("--output={output}")); + } + if *keep_temp_dir { + args.write_str_arg("--keep-temp-dir"); + } + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CreateOutputDir { + output_dir: Interned, + #[serde(skip)] + temp_dir: Option>, +} + +impl Eq for CreateOutputDir {} + +impl PartialEq for CreateOutputDir { + fn eq(&self, other: &Self) -> bool { + self.compare_key() == other.compare_key() + } +} + +impl Hash for CreateOutputDir { + fn hash(&self, state: &mut H) { + self.compare_key().hash(state); + } +} + +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Debug)] +pub struct CreateOutputDirJobKind; + +impl JobKind for CreateOutputDirJobKind { + type Args = CreateOutputDirArgs; + type Job = CreateOutputDir; + type Dependencies = (); + + fn dependencies(self) -> Self::Dependencies { + () + } + + fn args_to_jobs( + args: JobArgsAndDependencies, + _params: &JobParams, + ) -> eyre::Result> { + let JobArgsAndDependencies { + args: + JobKindAndArgs { + kind, + args: + CreateOutputDirArgs { + output, + keep_temp_dir, + }, + }, + dependencies: (), + } = args; + let (output_dir, temp_dir) = if let Some(output) = output { + (Intern::intern_owned(output), None) + } else { + // we create the temp dir here rather than in run so other + // jobs can have their paths based on the chosen temp dir + let temp_dir = TempDir::new()?; + let output_dir = temp_dir + .path() + .as_os_str() + .to_str() + .ok_or_else(|| { + std::io::Error::new( + std::io::ErrorKind::InvalidFilename, + format!( + "temporary directory path is not valid UTF-8: {:?}", + temp_dir.path() + ), + ) + })? + .intern(); + let temp_dir = if keep_temp_dir { + // use TempDir::into_path() to no longer automatically delete the temp dir + let temp_dir_path = temp_dir.into_path(); + println!("created temporary directory: {}", temp_dir_path.display()); + None + } else { + Some(Arc::new(temp_dir)) + }; + (output_dir, temp_dir) + }; + Ok(JobAndDependencies { + job: JobAndKind { + kind, + job: CreateOutputDir { + output_dir, + temp_dir, + }, + }, + dependencies: (), + }) + } + + fn inputs(self, _job: &Self::Job) -> Interned<[JobItemName]> { + Interned::default() + } + + fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + [JobItemName::Path { + path: job.output_dir, + }][..] + .intern() + } + + fn name(self) -> Interned { + "create-output-dir".intern() + } + + fn external_command_params(self, job: &Self::Job) -> Option { + Some(CommandParams { + command_line: [ + "mkdir".intern(), + "-p".intern(), + "--".intern(), + job.output_dir, + ][..] + .intern(), + current_dir: None, + }) + } + + fn run( + self, + job: &Self::Job, + inputs: &[JobItem], + _params: &JobParams, + _acquired_job: &mut AcquiredJob, + ) -> eyre::Result> { + let [] = inputs else { + panic!("invalid inputs for CreateOutputDir"); + }; + std::fs::create_dir_all(&*job.output_dir)?; + Ok(vec![JobItem::Path { + path: job.output_dir, + }]) + } + + fn subcommand_hidden(self) -> bool { + true + } +} + +impl CreateOutputDir { + pub fn output_dir(&self) -> Interned { + self.output_dir + } + fn compare_key(&self) -> (&str, bool) { + let Self { + output_dir, + temp_dir, + } = self; + (output_dir, temp_dir.is_some()) + } +} + +#[derive(clap::Args, Debug, Clone, Hash, PartialEq, Eq)] +#[group(id = "BaseJob")] +#[non_exhaustive] +pub struct BaseJobArgs { + /// rather than having CreateOutputDir be a normal dependency, it's nested in BaseJob to avoid a cyclic dependency + #[command(flatten)] + pub create_output_dir_args: CreateOutputDirArgs, + /// the stem of the generated main output file, e.g. to get foo.v, pass --file-stem=foo + #[arg(long)] + pub file_stem: Option, + /// run commands even if their results are already cached + #[arg(long, env = Self::RUN_EVEN_IF_CACHED_ENV_NAME)] + pub run_even_if_cached: bool, +} + +impl BaseJobArgs { + pub const RUN_EVEN_IF_CACHED_ENV_NAME: &'static str = "FAYALITE_RUN_EVEN_IF_CACHED"; + pub fn from_output_dir_and_env(output: String) -> Self { + Self { + create_output_dir_args: CreateOutputDirArgs { + output: Some(output), + keep_temp_dir: false, + }, + file_stem: None, + run_even_if_cached: std::env::var_os(Self::RUN_EVEN_IF_CACHED_ENV_NAME).is_some(), + } + } +} + +impl ToArgs for BaseJobArgs { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) { + let Self { + create_output_dir_args, + file_stem, + run_even_if_cached, + } = self; + create_output_dir_args.to_args(args); + if let Some(file_stem) = file_stem { + args.write_arg(format_args!("--file-stem={file_stem}")); + } + if *run_even_if_cached { + args.write_str_arg("--run-even-if-cached"); + } + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] +pub struct BaseJob { + /// rather than having CreateOutputDir be a normal dependency, it's nested in BaseJob to avoid a cyclic dependency + #[serde(flatten)] + create_output_dir: CreateOutputDir, + file_stem: Interned, + run_even_if_cached: bool, +} + +impl BaseJob { + pub fn output_dir(&self) -> Interned { + self.create_output_dir.output_dir() + } + pub fn file_stem(&self) -> Interned { + self.file_stem + } + pub fn file_with_ext(&self, ext: &str) -> Interned { + let mut retval = std::path::Path::new(&self.output_dir()).join(self.file_stem()); + retval.set_extension(ext); + intern_known_utf8_path_buf(retval) + } + pub fn run_even_if_cached(&self) -> bool { + self.run_even_if_cached + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Default)] +pub struct BaseJobKind; + +impl JobKind for BaseJobKind { + type Args = BaseJobArgs; + type Job = BaseJob; + type Dependencies = (); + + fn dependencies(self) -> Self::Dependencies { + () + } + + fn args_to_jobs( + args: JobArgsAndDependencies, + params: &JobParams, + ) -> eyre::Result> { + let BaseJobArgs { + create_output_dir_args, + file_stem, + run_even_if_cached, + } = args.args.args; + let create_output_dir_args = JobKindAndArgs { + kind: CreateOutputDirJobKind, + args: create_output_dir_args, + }; + let create_output_dir = create_output_dir_args.args_to_jobs((), params)?.job.job; + let file_stem = file_stem + .map(Intern::intern_owned) + .unwrap_or(params.main_module().name()); + Ok(JobAndDependencies { + job: JobAndKind { + kind: BaseJobKind, + job: BaseJob { + create_output_dir, + file_stem, + run_even_if_cached, + }, + }, + dependencies: (), + }) + } + + fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + CreateOutputDirJobKind.inputs(&job.create_output_dir) + } + + fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + CreateOutputDirJobKind.outputs(&job.create_output_dir) + } + + fn name(self) -> Interned { + "base-job".intern() + } + + fn external_command_params(self, job: &Self::Job) -> Option { + CreateOutputDirJobKind.external_command_params(&job.create_output_dir) + } + + fn run( + self, + job: &Self::Job, + inputs: &[JobItem], + params: &JobParams, + acquired_job: &mut AcquiredJob, + ) -> eyre::Result> { + CreateOutputDirJobKind.run(&job.create_output_dir, inputs, params, acquired_job) + } + + fn subcommand_hidden(self) -> bool { + true + } +} + +pub trait GetBaseJob { + fn base_job(&self) -> &BaseJob; +} + +impl GetBaseJob for &'_ T { + fn base_job(&self) -> &BaseJob { + T::base_job(self) + } +} + +impl GetBaseJob for &'_ mut T { + fn base_job(&self) -> &BaseJob { + T::base_job(self) + } +} + +impl GetBaseJob for Box { + fn base_job(&self) -> &BaseJob { + T::base_job(self) + } +} + +impl GetBaseJob for BaseJob { + fn base_job(&self) -> &BaseJob { + self + } +} + +impl GetBaseJob for JobAndKind { + fn base_job(&self) -> &BaseJob { + &self.job + } +} + +impl GetBaseJob for JobAndDependencies { + fn base_job(&self) -> &BaseJob { + &self.job.job + } +} + +impl GetBaseJob for JobAndDependencies +where + K::Dependencies: JobDependencies, + ::JobsAndKinds: GetBaseJob, +{ + fn base_job(&self) -> &BaseJob { + self.dependencies.base_job() + } +} + +impl GetBaseJob for (T, U) { + fn base_job(&self) -> &BaseJob { + self.0.base_job() + } +} + +impl GetBaseJob for (T, U, V) { + fn base_job(&self) -> &BaseJob { + self.0.base_job() + } +} diff --git a/crates/fayalite/src/build/external.rs b/crates/fayalite/src/build/external.rs new file mode 100644 index 0000000..021d63d --- /dev/null +++ b/crates/fayalite/src/build/external.rs @@ -0,0 +1,1101 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{ + build::{ + ArgsWriter, CommandParams, GetBaseJob, JobAndDependencies, JobAndKind, + JobArgsAndDependencies, JobDependencies, JobItem, JobItemName, JobKind, JobKindAndArgs, + JobParams, ToArgs, WriteArgs, intern_known_utf8_path_buf, + }, + intern::{Intern, Interned}, + util::{job_server::AcquiredJob, streaming_read_utf8::streaming_read_utf8}, +}; +use base64::{Engine, prelude::BASE64_URL_SAFE_NO_PAD}; +use clap::builder::OsStringValueParser; +use eyre::{Context, bail, ensure, eyre}; +use serde::{ + Deserialize, Deserializer, Serialize, Serializer, + de::{DeserializeOwned, Error}, +}; +use std::{ + borrow::Cow, + collections::BTreeMap, + ffi::{OsStr, OsString}, + fmt, + hash::{Hash, Hasher}, + io::Write, + marker::PhantomData, + path::{Path, PathBuf}, + sync::OnceLock, +}; + +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Serialize, Deserialize)] +#[non_exhaustive] +pub enum ExternalJobCacheVersion { + /// not used, used to be for `FormalCacheVersion` + V1, + V2, +} + +impl ExternalJobCacheVersion { + pub const CURRENT: Self = Self::V2; +} + +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] +#[non_exhaustive] +pub enum MaybeUtf8 { + Utf8(String), + Binary(Vec), +} + +impl MaybeUtf8 { + pub fn as_bytes(&self) -> &[u8] { + match self { + MaybeUtf8::Utf8(v) => v.as_bytes(), + MaybeUtf8::Binary(v) => v, + } + } +} + +#[derive(Serialize, Deserialize)] +#[serde(rename = "MaybeUtf8")] +enum MaybeUtf8Serde<'a> { + Utf8(Cow<'a, str>), + Binary(String), +} + +impl<'de> Deserialize<'de> for MaybeUtf8 { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + Ok(match MaybeUtf8Serde::deserialize(deserializer)? { + MaybeUtf8Serde::Utf8(v) => Self::Utf8(v.into_owned()), + MaybeUtf8Serde::Binary(v) => BASE64_URL_SAFE_NO_PAD + .decode(&*v) + .map_err(D::Error::custom)? + .into(), + }) + } +} + +impl Serialize for MaybeUtf8 { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + match self { + MaybeUtf8::Utf8(v) => MaybeUtf8Serde::Utf8(Cow::Borrowed(v)), + MaybeUtf8::Binary(v) => MaybeUtf8Serde::Binary(BASE64_URL_SAFE_NO_PAD.encode(v)), + } + .serialize(serializer) + } +} + +impl From> for MaybeUtf8 { + fn from(value: Vec) -> Self { + match String::from_utf8(value) { + Ok(value) => Self::Utf8(value), + Err(e) => Self::Binary(e.into_bytes()), + } + } +} + +impl From for MaybeUtf8 { + fn from(value: String) -> Self { + Self::Utf8(value) + } +} + +#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] +#[serde(rename = "ExternalJobCache")] +pub struct ExternalJobCacheV2 { + pub version: ExternalJobCacheVersion, + pub inputs_hash: blake3::Hash, + pub stdout_stderr: String, + pub result: Result, String>, +} + +impl ExternalJobCacheV2 { + fn read_from_file(cache_json_path: Interned) -> eyre::Result { + let cache_str = std::fs::read_to_string(&*cache_json_path) + .wrap_err_with(|| format!("can't read {cache_json_path}"))?; + serde_json::from_str(&cache_str).wrap_err_with(|| format!("can't decode {cache_json_path}")) + } + fn write_to_file(&self, cache_json_path: Interned) -> eyre::Result<()> { + let cache_str = serde_json::to_string_pretty(&self).expect("serialization can't fail"); + std::fs::write(&*cache_json_path, cache_str) + .wrap_err_with(|| format!("can't write {cache_json_path}")) + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub struct ExternalJobCaching { + cache_json_path: Interned, + run_even_if_cached: bool, +} + +#[derive(Default)] +struct JobCacheHasher(blake3::Hasher); + +impl JobCacheHasher { + fn hash_size(&mut self, size: usize) { + self.0.update(&u64::to_le_bytes( + size.try_into().expect("size should fit in u64"), + )); + } + fn hash_sized_bytes(&mut self, bytes: &[u8]) { + self.hash_size(bytes.len()); + self.0.update(bytes); + } + fn hash_sized_str(&mut self, s: &str) { + self.hash_sized_bytes(s.as_bytes()); + } + fn hash_iter>( + &mut self, + iter: I, + mut f: F, + ) { + let iter = iter.into_iter(); + self.hash_size(iter.len()); + iter.for_each(|item| f(self, item)); + } + fn try_hash_iter< + F: FnMut(&mut Self, I::Item) -> Result<(), E>, + E, + I: IntoIterator, + >( + &mut self, + iter: I, + mut f: F, + ) -> Result<(), E> { + let mut iter = iter.into_iter(); + self.hash_size(iter.len()); + iter.try_for_each(|item| f(self, item)) + } +} + +fn write_file_atomically_no_clobber C, C: AsRef<[u8]>>( + path: impl AsRef, + containing_dir: impl AsRef, + contents: F, +) -> std::io::Result<()> { + let path = path.as_ref(); + let containing_dir = containing_dir.as_ref(); + if !matches!(std::fs::exists(&path), Ok(true)) { + // use File::create_new rather than tempfile's code to get normal file permissions rather than mode 600 on Unix. + let mut file = tempfile::Builder::new() + .make_in(containing_dir, |path| std::fs::File::create_new(path))?; + file.write_all(contents().as_ref())?; // write all in one operation to avoid a bunch of tiny writes + file.into_temp_path().persist_noclobber(path)?; + } + Ok(()) +} + +impl ExternalJobCaching { + pub fn get_cache_dir_from_output_dir(output_dir: &str) -> PathBuf { + Path::join(output_dir.as_ref(), ".fayalite-job-cache") + } + pub fn make_cache_dir( + cache_dir: impl AsRef, + application_name: &str, + ) -> std::io::Result<()> { + let cache_dir = cache_dir.as_ref(); + std::fs::create_dir_all(cache_dir)?; + write_file_atomically_no_clobber(cache_dir.join("CACHEDIR.TAG"), cache_dir, || { + format!( + "Signature: 8a477f597d28d172789f06886806bc55\n\ + # This file is a cache directory tag created by {application_name}.\n\ + # For information about cache directory tags see https://bford.info/cachedir/\n" + ) + })?; + write_file_atomically_no_clobber(cache_dir.join(".gitignore"), cache_dir, || { + format!( + "# This is a cache directory created by {application_name}.\n\ + # ignore all files\n\ + *\n" + ) + }) + } + pub fn new( + output_dir: &str, + application_name: &str, + json_file_stem: &str, + run_even_if_cached: bool, + ) -> std::io::Result { + let cache_dir = Self::get_cache_dir_from_output_dir(output_dir); + Self::make_cache_dir(&cache_dir, application_name)?; + let mut cache_json_path = cache_dir; + cache_json_path.push(json_file_stem); + cache_json_path.set_extension("json"); + let cache_json_path = intern_known_utf8_path_buf(cache_json_path); + Ok(Self { + cache_json_path, + run_even_if_cached, + }) + } + fn write_stdout_stderr(stdout_stderr: &str) { + if stdout_stderr == "" { + return; + } + // use print! so output goes to Rust test output capture + if stdout_stderr.ends_with('\n') { + print!("{stdout_stderr}"); + } else { + println!("{stdout_stderr}"); + } + } + /// returns `Err(_)` if reading the cache failed, otherwise returns `Ok(_)` with the results from the cache + fn run_from_cache( + self, + inputs_hash: blake3::Hash, + output_file_paths: impl IntoIterator>, + ) -> Result, ()> { + if self.run_even_if_cached { + return Err(()); + } + let Ok(ExternalJobCacheV2 { + version: ExternalJobCacheVersion::CURRENT, + inputs_hash: cached_inputs_hash, + stdout_stderr, + result, + }) = ExternalJobCacheV2::read_from_file(self.cache_json_path) + else { + return Err(()); + }; + if inputs_hash != cached_inputs_hash { + return Err(()); + } + match result { + Ok(outputs) => { + for output_file_path in output_file_paths { + let Some(output_data) = outputs.get(&*output_file_path) else { + if let Ok(true) = std::fs::exists(&*output_file_path) { + // assume the existing file is the correct one + continue; + } + return Err(()); + }; + let Ok(()) = std::fs::write(&*output_file_path, output_data.as_bytes()) else { + return Err(()); + }; + } + Self::write_stdout_stderr(&stdout_stderr); + Ok(Ok(())) + } + Err(error) => { + Self::write_stdout_stderr(&stdout_stderr); + Ok(Err(error)) + } + } + } + fn make_command( + command_line: Interned<[Interned]>, + ) -> eyre::Result { + ensure!(!command_line.is_empty(), "command line must not be empty"); + let mut cmd = std::process::Command::new(&*command_line[0]); + cmd.args(command_line[1..].iter().map(|arg| &**arg)) + .stdin(std::process::Stdio::null()); + Ok(cmd) + } + pub fn run eyre::Result<()>>( + self, + command_line: Interned<[Interned]>, + input_file_paths: impl IntoIterator>, + output_file_paths: impl IntoIterator> + Clone, + run_fn: F, + ) -> eyre::Result<()> { + let mut hasher = JobCacheHasher::default(); + hasher.hash_iter(command_line.iter(), |hasher, arg| { + hasher.hash_sized_str(arg) + }); + let mut input_file_paths = + Vec::<&str>::from_iter(input_file_paths.into_iter().map(Interned::into_inner)); + input_file_paths.sort_unstable(); + input_file_paths.dedup(); + hasher.try_hash_iter( + &input_file_paths, + |hasher, input_file_path| -> eyre::Result<()> { + hasher.hash_sized_str(input_file_path); + hasher.hash_sized_bytes( + &std::fs::read(input_file_path).wrap_err_with(|| { + format!("can't read job input file: {input_file_path}") + })?, + ); + Ok(()) + }, + )?; + let inputs_hash = hasher.0.finalize(); + match self.run_from_cache(inputs_hash, output_file_paths.clone()) { + Ok(result) => return result.map_err(|e| eyre!(e)), + Err(()) => {} + } + let (pipe_reader, stdout, stderr) = std::io::pipe() + .and_then(|(r, w)| Ok((r, w.try_clone()?, w))) + .wrap_err_with(|| format!("when trying to create a pipe to run: {command_line:?}"))?; + let mut cmd = Self::make_command(command_line)?; + cmd.stdout(stdout).stderr(stderr); + let mut stdout_stderr = String::new(); + let result = std::thread::scope(|scope| { + std::thread::Builder::new() + .name(format!("stdout:{}", command_line[0])) + .spawn_scoped(scope, || { + let _ = streaming_read_utf8(std::io::BufReader::new(pipe_reader), |s| { + stdout_stderr.push_str(s); + // use print! so output goes to Rust test output capture + print!("{s}"); + std::io::Result::Ok(()) + }); + if !stdout_stderr.is_empty() && !stdout_stderr.ends_with('\n') { + println!(); + } + }) + .expect("spawn shouldn't fail"); + run_fn(cmd) + }); + ExternalJobCacheV2 { + version: ExternalJobCacheVersion::CURRENT, + inputs_hash, + stdout_stderr, + result: match &result { + Ok(()) => Ok(Result::from_iter(output_file_paths.into_iter().map( + |output_file_path: Interned| -> eyre::Result<_> { + let output_file_path = &*output_file_path; + Ok(( + String::from(output_file_path), + MaybeUtf8::from(std::fs::read(output_file_path).wrap_err_with( + || format!("can't read job output file: {output_file_path}"), + )?), + )) + }, + ))?), + Err(e) => Err(format!("{e:#}")), + }, + } + .write_to_file(self.cache_json_path)?; + result + } + pub fn run_maybe_cached eyre::Result<()>>( + this: Option, + command_line: Interned<[Interned]>, + input_file_paths: impl IntoIterator>, + output_file_paths: impl IntoIterator> + Clone, + run_fn: F, + ) -> eyre::Result<()> { + match this { + Some(this) => this.run(command_line, input_file_paths, output_file_paths, run_fn), + None => run_fn(Self::make_command(command_line)?), + } + } +} + +#[derive(Clone, Eq, Hash)] +pub struct ExternalCommandJobKind(PhantomData); + +impl fmt::Debug for ExternalCommandJobKind { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "ExternalCommandJobKind<{}>", std::any::type_name::()) + } +} + +impl PartialEq for ExternalCommandJobKind { + fn eq(&self, _other: &Self) -> bool { + true + } +} + +impl Ord for ExternalCommandJobKind { + fn cmp(&self, _other: &Self) -> std::cmp::Ordering { + std::cmp::Ordering::Equal + } +} + +impl PartialOrd for ExternalCommandJobKind { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Default for ExternalCommandJobKind { + fn default() -> Self { + Self(PhantomData) + } +} + +impl Copy for ExternalCommandJobKind {} + +impl ExternalCommandJobKind { + pub const fn new() -> Self { + Self(PhantomData) + } +} + +#[derive(Copy, Clone)] +struct ExternalProgramPathValueParser(ExternalProgram); + +fn parse_which_result( + which_result: which::Result, + program_name: impl Into, + program_path_arg_name: impl FnOnce() -> String, +) -> Result, ResolveProgramPathError> { + let which_result = match which_result { + Ok(v) => v, + Err(e) => { + return Err(ResolveProgramPathError { + inner: ResolveProgramPathErrorInner::Which(e), + program_name: program_name.into(), + program_path_arg_name: program_path_arg_name(), + }); + } + }; + Ok(str::intern_owned( + which_result + .into_os_string() + .into_string() + .map_err(|_| ResolveProgramPathError { + inner: ResolveProgramPathErrorInner::NotValidUtf8, + program_name: program_name.into(), + program_path_arg_name: program_path_arg_name(), + })?, + )) +} + +impl clap::builder::TypedValueParser for ExternalProgramPathValueParser { + type Value = Interned; + + fn parse_ref( + &self, + cmd: &clap::Command, + arg: Option<&clap::Arg>, + value: &OsStr, + ) -> clap::error::Result { + let program_path_arg_name = self.0.program_path_arg_name; + OsStringValueParser::new() + .try_map(move |program_name| { + parse_which_result(which::which(&program_name), program_name, || { + program_path_arg_name.into() + }) + }) + .parse_ref(cmd, arg, value) + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)] +#[group(id = T::args_group_id())] +#[non_exhaustive] +pub struct ExternalCommandArgs { + #[command(flatten)] + pub program_path: ExternalProgramPath, + #[arg( + name = Interned::into_inner(T::run_even_if_cached_arg_name()), + long = T::run_even_if_cached_arg_name(), + )] + pub run_even_if_cached: bool, + #[command(flatten)] + pub additional_args: T::AdditionalArgs, +} + +#[derive(Clone)] +enum ResolveProgramPathErrorInner { + Which(which::Error), + NotValidUtf8, +} + +impl fmt::Debug for ResolveProgramPathErrorInner { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Which(v) => v.fmt(f), + Self::NotValidUtf8 => f.write_str("NotValidUtf8"), + } + } +} + +impl fmt::Display for ResolveProgramPathErrorInner { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Which(v) => v.fmt(f), + Self::NotValidUtf8 => f.write_str("path is not valid UTF-8"), + } + } +} + +#[derive(Clone, Debug)] +pub struct ResolveProgramPathError { + inner: ResolveProgramPathErrorInner, + program_name: std::ffi::OsString, + program_path_arg_name: String, +} + +impl fmt::Display for ResolveProgramPathError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + inner, + program_name, + program_path_arg_name, + } = self; + write!( + f, + "{program_path_arg_name}: failed to resolve {program_name:?} to a valid program: {inner}", + ) + } +} + +impl std::error::Error for ResolveProgramPathError {} + +pub fn resolve_program_path( + program_name: Option<&OsStr>, + default_program_name: impl AsRef, + program_path_env_var_name: Option<&OsStr>, +) -> Result, ResolveProgramPathError> { + let default_program_name = default_program_name.as_ref(); + let owned_program_name; + let program_name = if let Some(program_name) = program_name { + program_name + } else if let Some(v) = program_path_env_var_name.and_then(std::env::var_os) { + owned_program_name = v; + &owned_program_name + } else { + default_program_name + }; + parse_which_result(which::which(program_name), program_name, || { + default_program_name.display().to_string() + }) +} + +impl ExternalCommandArgs { + pub fn with_resolved_program_path( + program_path: Interned, + additional_args: T::AdditionalArgs, + ) -> Self { + Self::new( + ExternalProgramPath::with_resolved_program_path(program_path), + additional_args, + ) + } + pub fn new( + program_path: ExternalProgramPath, + additional_args: T::AdditionalArgs, + ) -> Self { + Self { + program_path, + run_even_if_cached: false, + additional_args, + } + } + pub fn resolve_program_path( + program_name: Option<&OsStr>, + additional_args: T::AdditionalArgs, + ) -> Result { + Ok(Self::new( + ExternalProgramPath::resolve_program_path(program_name)?, + additional_args, + )) + } +} + +impl ToArgs for ExternalCommandArgs { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) { + let Self { + program_path, + run_even_if_cached, + ref additional_args, + } = *self; + program_path.to_args(args); + if run_even_if_cached { + args.write_arg(format_args!("--{}", T::run_even_if_cached_arg_name())); + } + additional_args.to_args(args); + } +} + +#[derive(Copy, Clone)] +struct ExternalCommandJobParams { + command_params: CommandParams, + inputs: Interned<[JobItemName]>, + outputs: Interned<[JobItemName]>, + output_paths: Interned<[Interned]>, +} + +impl ExternalCommandJobParams { + fn new(job: &ExternalCommandJob) -> Self { + let output_paths = T::output_paths(job); + let mut command_line = ArgsWriter(vec![job.program_path]); + T::command_line_args(job, &mut command_line); + Self { + command_params: CommandParams { + command_line: Intern::intern_owned(command_line.0), + current_dir: T::current_dir(job), + }, + inputs: T::inputs(job), + outputs: output_paths + .iter() + .map(|&path| JobItemName::Path { path }) + .collect(), + output_paths, + } + } +} + +#[derive(Deserialize, Serialize)] +pub struct ExternalCommandJob { + additional_job_data: T::AdditionalJobData, + program_path: Interned, + output_dir: Interned, + run_even_if_cached: bool, + #[serde(skip)] + params_cache: OnceLock, +} + +impl Eq for ExternalCommandJob {} + +impl> Clone for ExternalCommandJob { + fn clone(&self) -> Self { + let Self { + ref additional_job_data, + program_path, + output_dir, + run_even_if_cached, + ref params_cache, + } = *self; + Self { + additional_job_data: additional_job_data.clone(), + program_path, + output_dir, + run_even_if_cached, + params_cache: params_cache.clone(), + } + } +} + +impl fmt::Debug for ExternalCommandJob { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + additional_job_data, + program_path, + output_dir, + run_even_if_cached, + params_cache: _, + } = self; + write!(f, "ExternalCommandJob<{}>", std::any::type_name::())?; + f.debug_struct("") + .field("additional_job_data", additional_job_data) + .field("program_path", program_path) + .field("output_dir", output_dir) + .field("run_even_if_cached", run_even_if_cached) + .finish() + } +} + +impl PartialEq for ExternalCommandJob { + fn eq(&self, other: &Self) -> bool { + let Self { + additional_job_data, + program_path, + output_dir, + run_even_if_cached, + params_cache: _, + } = self; + *additional_job_data == other.additional_job_data + && *program_path == other.program_path + && *output_dir == other.output_dir + && *run_even_if_cached == other.run_even_if_cached + } +} + +impl Hash for ExternalCommandJob { + fn hash(&self, state: &mut H) { + let Self { + additional_job_data, + program_path, + output_dir, + run_even_if_cached, + params_cache: _, + } = self; + additional_job_data.hash(state); + program_path.hash(state); + output_dir.hash(state); + run_even_if_cached.hash(state); + } +} + +impl ExternalCommandJob { + pub fn additional_job_data(&self) -> &T::AdditionalJobData { + &self.additional_job_data + } + pub fn program_path(&self) -> Interned { + self.program_path + } + pub fn output_dir(&self) -> Interned { + self.output_dir + } + pub fn run_even_if_cached(&self) -> bool { + self.run_even_if_cached + } + fn params(&self) -> &ExternalCommandJobParams { + self.params_cache + .get_or_init(|| ExternalCommandJobParams::new(self)) + } + pub fn command_params(&self) -> CommandParams { + self.params().command_params + } + pub fn inputs(&self) -> Interned<[JobItemName]> { + self.params().inputs + } + pub fn output_paths(&self) -> Interned<[Interned]> { + self.params().output_paths + } + pub fn outputs(&self) -> Interned<[JobItemName]> { + self.params().outputs + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub struct ExternalProgramPath { + program_path: Interned, + _phantom: PhantomData, +} + +impl ExternalProgramPath { + pub fn with_resolved_program_path(program_path: Interned) -> Self { + Self { + program_path, + _phantom: PhantomData, + } + } + pub fn resolve_program_path( + program_name: Option<&OsStr>, + ) -> Result { + let ExternalProgram { + default_program_name, + program_path_arg_name: _, + program_path_arg_value_name: _, + program_path_env_var_name, + } = ExternalProgram::new::(); + Ok(Self { + program_path: resolve_program_path( + program_name, + default_program_name, + program_path_env_var_name.as_ref().map(OsStr::new), + )?, + _phantom: PhantomData, + }) + } + pub fn program_path(&self) -> Interned { + self.program_path + } +} + +impl fmt::Debug for ExternalProgramPath { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + program_path, + _phantom: _, + } = self; + write!(f, "ExternalProgramPath<{}>", std::any::type_name::())?; + f.debug_tuple("").field(program_path).finish() + } +} + +impl clap::FromArgMatches for ExternalProgramPath { + fn from_arg_matches(matches: &clap::ArgMatches) -> Result { + let id = Interned::into_inner(ExternalProgram::new::().program_path_arg_name); + // don't remove argument so later instances of Self can use it too + let program_path = *matches.get_one(id).expect("arg should always be present"); + Ok(Self { + program_path, + _phantom: PhantomData, + }) + } + + fn update_from_arg_matches(&mut self, matches: &clap::ArgMatches) -> Result<(), clap::Error> { + *self = Self::from_arg_matches(matches)?; + Ok(()) + } +} + +impl clap::Args for ExternalProgramPath { + fn augment_args(cmd: clap::Command) -> clap::Command { + let external_program @ ExternalProgram { + default_program_name, + program_path_arg_name, + program_path_arg_value_name, + program_path_env_var_name, + } = ExternalProgram::new::(); + let arg = cmd + .get_arguments() + .find(|arg| *arg.get_id().as_str() == *program_path_arg_name); + if let Some(arg) = arg { + // don't insert duplicate arguments. + // check that the previous argument actually matches this argument: + assert!(!arg.is_required_set()); + assert!(matches!(arg.get_action(), clap::ArgAction::Set)); + assert_eq!(arg.get_long(), Some(&*program_path_arg_name)); + assert_eq!( + arg.get_value_names(), + Some(&[clap::builder::Str::from(program_path_arg_value_name)][..]) + ); + assert_eq!( + arg.get_env(), + program_path_env_var_name.as_ref().map(OsStr::new) + ); + assert_eq!( + arg.get_default_values(), + &[OsStr::new(&default_program_name)] + ); + assert_eq!(arg.get_value_hint(), clap::ValueHint::CommandName); + cmd + } else { + cmd.arg( + clap::Arg::new(Interned::into_inner(program_path_arg_name)) + .required(false) + .value_parser(ExternalProgramPathValueParser(external_program)) + .action(clap::ArgAction::Set) + .long(program_path_arg_name) + .value_name(program_path_arg_value_name) + .env(program_path_env_var_name.map(Interned::into_inner)) + .default_value(default_program_name) + .value_hint(clap::ValueHint::CommandName), + ) + } + } + + fn augment_args_for_update(cmd: clap::Command) -> clap::Command { + Self::augment_args(cmd) + } +} + +impl ToArgs for ExternalProgramPath { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) { + let ExternalProgram { + program_path_arg_name, + .. + } = ExternalProgram::new::(); + let Self { + program_path, + _phantom: _, + } = self; + if args.get_long_option_eq(program_path_arg_name) != Some(&**program_path) { + args.write_arg(format_args!("--{program_path_arg_name}={program_path}")); + } + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +#[non_exhaustive] +pub struct ExternalProgram { + default_program_name: Interned, + program_path_arg_name: Interned, + program_path_arg_value_name: Interned, + program_path_env_var_name: Option>, +} + +impl ExternalProgram { + pub fn new() -> Self { + Self { + default_program_name: T::default_program_name(), + program_path_arg_name: T::program_path_arg_name(), + program_path_arg_value_name: T::program_path_arg_value_name(), + program_path_env_var_name: T::program_path_env_var_name(), + } + } + pub fn default_program_name(&self) -> Interned { + self.default_program_name + } + pub fn program_path_arg_name(&self) -> Interned { + self.program_path_arg_name + } + pub fn program_path_arg_value_name(&self) -> Interned { + self.program_path_arg_value_name + } + pub fn program_path_env_var_name(&self) -> Option> { + self.program_path_env_var_name + } +} + +impl From for ExternalProgram { + fn from(_value: T) -> Self { + Self::new::() + } +} + +impl From for Interned { + fn from(_value: T) -> Self { + ExternalProgram::new::().intern_sized() + } +} + +pub trait ExternalProgramTrait: + 'static + Send + Sync + Hash + Ord + fmt::Debug + Default + Copy +{ + fn program_path_arg_name() -> Interned { + Self::default_program_name() + } + fn program_path_arg_value_name() -> Interned { + Intern::intern_owned(Self::program_path_arg_name().to_uppercase()) + } + fn default_program_name() -> Interned; + fn program_path_env_var_name() -> Option> { + Some(Intern::intern_owned( + Self::program_path_arg_name() + .to_uppercase() + .replace('-', "_"), + )) + } +} + +pub trait ExternalCommand: 'static + Send + Sync + Hash + Eq + fmt::Debug + Sized + Clone { + type AdditionalArgs: ToArgs; + type AdditionalJobData: 'static + + Send + + Sync + + Hash + + Eq + + fmt::Debug + + Serialize + + DeserializeOwned; + type Dependencies: JobDependencies; + type ExternalProgram: ExternalProgramTrait; + fn dependencies() -> Self::Dependencies; + fn args_to_jobs( + args: JobArgsAndDependencies>, + params: &JobParams, + ) -> eyre::Result<( + Self::AdditionalJobData, + ::JobsAndKinds, + )>; + fn inputs(job: &ExternalCommandJob) -> Interned<[JobItemName]>; + fn output_paths(job: &ExternalCommandJob) -> Interned<[Interned]>; + fn command_line_args(job: &ExternalCommandJob, args: &mut W); + fn current_dir(job: &ExternalCommandJob) -> Option>; + fn job_kind_name() -> Interned; + fn args_group_id() -> clap::Id { + Interned::into_inner(Self::job_kind_name()).into() + } + fn run_even_if_cached_arg_name() -> Interned { + Intern::intern_owned(format!("{}-run-even-if-cached", Self::job_kind_name())) + } + fn subcommand_hidden() -> bool { + false + } +} + +impl JobKind for ExternalCommandJobKind { + type Args = ExternalCommandArgs; + type Job = ExternalCommandJob; + type Dependencies = T::Dependencies; + + fn dependencies(self) -> Self::Dependencies { + T::dependencies() + } + + fn args_to_jobs( + args: JobArgsAndDependencies, + params: &JobParams, + ) -> eyre::Result> { + let JobKindAndArgs { + kind, + args: + ExternalCommandArgs { + program_path: + ExternalProgramPath { + program_path, + _phantom: _, + }, + run_even_if_cached, + additional_args: _, + }, + } = args.args; + let (additional_job_data, dependencies) = T::args_to_jobs(args, params)?; + let base_job = dependencies.base_job(); + let job = ExternalCommandJob { + additional_job_data, + program_path, + output_dir: base_job.output_dir(), + run_even_if_cached: base_job.run_even_if_cached() | run_even_if_cached, + params_cache: OnceLock::new(), + }; + job.params(); // fill cache + Ok(JobAndDependencies { + job: JobAndKind { kind, job }, + dependencies, + }) + } + + fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + job.inputs() + } + + fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + job.outputs() + } + + fn name(self) -> Interned { + T::job_kind_name() + } + + fn external_command_params(self, job: &Self::Job) -> Option { + Some(job.command_params()) + } + + fn run( + self, + job: &Self::Job, + inputs: &[JobItem], + params: &JobParams, + acquired_job: &mut AcquiredJob, + ) -> eyre::Result> { + assert!( + inputs.iter().map(JobItem::name).eq(job.inputs()), + "{}\ninputs:\n{inputs:?}\njob.inputs():\n{:?}", + std::any::type_name::(), + job.inputs(), + ); + let CommandParams { + command_line, + current_dir, + } = job.command_params(); + ExternalJobCaching::new( + &job.output_dir, + ¶ms.application_name(), + &T::job_kind_name(), + job.run_even_if_cached, + )? + .run( + command_line, + inputs + .iter() + .flat_map(|item| match item { + JobItem::Path { path } => std::slice::from_ref(path), + JobItem::DynamicPaths { + paths, + source_job_name: _, + } => paths, + }) + .copied(), + job.output_paths(), + |mut cmd| { + if let Some(current_dir) = current_dir { + cmd.current_dir(current_dir); + } + let status = acquired_job.run_command(cmd, |cmd| cmd.status())?; + if !status.success() { + bail!("running {command_line:?} failed: {status}") + } + Ok(()) + }, + )?; + Ok(job + .output_paths() + .iter() + .map(|&path| JobItem::Path { path }) + .collect()) + } + + fn subcommand_hidden(self) -> bool { + T::subcommand_hidden() + } + + fn external_program(self) -> Option> { + Some(ExternalProgram::new::().intern_sized()) + } +} diff --git a/crates/fayalite/src/build/firrtl.rs b/crates/fayalite/src/build/firrtl.rs new file mode 100644 index 0000000..62657f8 --- /dev/null +++ b/crates/fayalite/src/build/firrtl.rs @@ -0,0 +1,125 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{ + build::{ + BaseJob, BaseJobKind, CommandParams, DynJobKind, JobAndDependencies, + JobArgsAndDependencies, JobItem, JobItemName, JobKind, JobKindAndDependencies, JobParams, + ToArgs, WriteArgs, + }, + firrtl::{ExportOptions, FileBackend}, + intern::{Intern, Interned}, + util::job_server::AcquiredJob, +}; +use clap::Args; +use serde::{Deserialize, Serialize}; +use std::path::PathBuf; + +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Debug)] +pub struct FirrtlJobKind; + +#[derive(Args, Debug, Clone, Hash, PartialEq, Eq)] +#[group(id = "Firrtl")] +#[non_exhaustive] +pub struct FirrtlArgs { + #[command(flatten)] + pub export_options: ExportOptions, +} + +impl ToArgs for FirrtlArgs { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) { + let Self { export_options } = self; + export_options.to_args(args); + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] +pub struct Firrtl { + base: BaseJob, + export_options: ExportOptions, +} + +impl Firrtl { + fn make_firrtl_file_backend(&self) -> FileBackend { + FileBackend { + dir_path: PathBuf::from(&*self.base.output_dir()), + top_fir_file_stem: Some(String::from(&*self.base.file_stem())), + circuit_name: None, + } + } + pub fn firrtl_file(&self) -> Interned { + self.base.file_with_ext("fir") + } +} + +impl JobKind for FirrtlJobKind { + type Args = FirrtlArgs; + type Job = Firrtl; + type Dependencies = JobKindAndDependencies; + + fn dependencies(self) -> Self::Dependencies { + JobKindAndDependencies::new(BaseJobKind) + } + + fn args_to_jobs( + args: JobArgsAndDependencies, + params: &JobParams, + ) -> eyre::Result> { + args.args_to_jobs_simple( + params, + |_kind, FirrtlArgs { export_options }, dependencies| { + Ok(Firrtl { + base: dependencies.job.job.clone(), + export_options, + }) + }, + ) + } + + fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + [JobItemName::Path { + path: job.base.output_dir(), + }][..] + .intern() + } + + fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + [JobItemName::Path { + path: job.firrtl_file(), + }][..] + .intern() + } + + fn name(self) -> Interned { + "firrtl".intern() + } + + fn external_command_params(self, _job: &Self::Job) -> Option { + None + } + + fn run( + self, + job: &Self::Job, + inputs: &[JobItem], + params: &JobParams, + _acquired_job: &mut AcquiredJob, + ) -> eyre::Result> { + let [JobItem::Path { path: input_path }] = *inputs else { + panic!("wrong inputs, expected a single `Path`"); + }; + assert_eq!(input_path, job.base.output_dir()); + crate::firrtl::export( + job.make_firrtl_file_backend(), + params.main_module(), + job.export_options, + )?; + Ok(vec![JobItem::Path { + path: job.firrtl_file(), + }]) + } +} + +pub(crate) fn built_in_job_kinds() -> impl IntoIterator { + [DynJobKind::new(FirrtlJobKind)] +} diff --git a/crates/fayalite/src/build/formal.rs b/crates/fayalite/src/build/formal.rs new file mode 100644 index 0000000..a289c81 --- /dev/null +++ b/crates/fayalite/src/build/formal.rs @@ -0,0 +1,410 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{ + build::{ + CommandParams, DynJobKind, GetBaseJob, JobAndDependencies, JobArgsAndDependencies, + JobDependencies, JobItem, JobItemName, JobKind, JobKindAndDependencies, JobParams, ToArgs, + WriteArgs, + external::{ + ExternalCommand, ExternalCommandJob, ExternalCommandJobKind, ExternalProgramTrait, + }, + interned_known_utf8_method, + verilog::{VerilogDialect, VerilogJob, VerilogJobKind}, + }, + intern::{Intern, Interned}, + module::NameId, + util::job_server::AcquiredJob, +}; +use clap::{Args, ValueEnum}; +use eyre::Context; +use serde::{Deserialize, Serialize}; +use std::fmt; + +#[derive(ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash, Default, Deserialize, Serialize)] +#[non_exhaustive] +pub enum FormalMode { + #[default] + BMC, + Prove, + Live, + Cover, +} + +impl FormalMode { + pub fn as_str(self) -> &'static str { + match self { + FormalMode::BMC => "bmc", + FormalMode::Prove => "prove", + FormalMode::Live => "live", + FormalMode::Cover => "cover", + } + } +} + +impl fmt::Display for FormalMode { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(self.as_str()) + } +} + +#[derive(Args, Clone, Debug, PartialEq, Eq, Hash)] +#[non_exhaustive] +pub struct FormalArgs { + #[arg(long = "sby-extra-arg", value_name = "ARG")] + pub sby_extra_args: Vec, + #[arg(long, default_value_t)] + pub formal_mode: FormalMode, + #[arg(long, default_value_t = Self::DEFAULT_DEPTH)] + pub formal_depth: u64, + #[arg(long, default_value = Self::DEFAULT_SOLVER)] + pub formal_solver: String, + #[arg(long = "smtbmc-extra-arg", value_name = "ARG")] + pub smtbmc_extra_args: Vec, +} + +impl FormalArgs { + pub const DEFAULT_DEPTH: u64 = 20; + pub const DEFAULT_SOLVER: &'static str = "z3"; +} + +impl ToArgs for FormalArgs { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) { + let Self { + sby_extra_args, + formal_mode, + formal_depth, + formal_solver, + smtbmc_extra_args, + } = self; + args.extend( + sby_extra_args + .iter() + .map(|v| format!("--sby-extra-arg={v}")), + ); + args.extend([ + format_args!("--formal-mode={formal_mode}"), + format_args!("--formal-depth={formal_depth}"), + format_args!("--formal-solver={formal_solver}"), + ]); + args.extend( + smtbmc_extra_args + .iter() + .map(|v| format!("--smtbmc-extra-arg={v}")), + ); + } +} + +#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +pub struct WriteSbyFileJobKind; + +#[derive(Clone, Debug, PartialEq, Eq, Hash, Deserialize, Serialize)] +pub struct WriteSbyFileJob { + sby_extra_args: Interned<[Interned]>, + formal_mode: FormalMode, + formal_depth: u64, + formal_solver: Interned, + smtbmc_extra_args: Interned<[Interned]>, + sby_file: Interned, + output_dir: Interned, + main_verilog_file: Interned, +} + +impl WriteSbyFileJob { + pub fn sby_extra_args(&self) -> Interned<[Interned]> { + self.sby_extra_args + } + pub fn formal_mode(&self) -> FormalMode { + self.formal_mode + } + pub fn formal_depth(&self) -> u64 { + self.formal_depth + } + pub fn formal_solver(&self) -> Interned { + self.formal_solver + } + pub fn smtbmc_extra_args(&self) -> Interned<[Interned]> { + self.smtbmc_extra_args + } + pub fn sby_file(&self) -> Interned { + self.sby_file + } + pub fn output_dir(&self) -> Interned { + self.output_dir + } + pub fn main_verilog_file(&self) -> Interned { + self.main_verilog_file + } + fn write_sby( + &self, + output: &mut W, + additional_files: &[Interned], + main_module_name_id: NameId, + ) -> Result, fmt::Error> { + let Self { + sby_extra_args: _, + formal_mode, + formal_depth, + formal_solver, + smtbmc_extra_args, + sby_file: _, + output_dir: _, + main_verilog_file, + } = self; + write!( + output, + "[options]\n\ + mode {formal_mode}\n\ + depth {formal_depth}\n\ + wait on\n\ + \n\ + [engines]\n\ + smtbmc {formal_solver} -- --" + )?; + for i in smtbmc_extra_args { + output.write_str(" ")?; + output.write_str(i)?; + } + output.write_str( + "\n\ + \n\ + [script]\n", + )?; + let all_verilog_files = + match VerilogJob::all_verilog_files(*main_verilog_file, additional_files) { + Ok(v) => v, + Err(e) => return Ok(Err(e)), + }; + for verilog_file in all_verilog_files { + writeln!(output, "read_verilog -sv -formal \"{verilog_file}\"")?; + } + let circuit_name = crate::firrtl::get_circuit_name(main_module_name_id); + // workaround for wires disappearing -- set `keep` on all wires + writeln!( + output, + "hierarchy -top {circuit_name}\n\ + proc\n\ + setattr -set keep 1 w:\\*\n\ + prep", + )?; + Ok(Ok(())) + } +} + +impl JobKind for WriteSbyFileJobKind { + type Args = FormalArgs; + type Job = WriteSbyFileJob; + type Dependencies = JobKindAndDependencies; + + fn dependencies(self) -> Self::Dependencies { + Default::default() + } + + fn args_to_jobs( + mut args: JobArgsAndDependencies, + params: &JobParams, + ) -> eyre::Result> { + args.dependencies + .dependencies + .args + .args + .additional_args + .verilog_dialect + .get_or_insert(VerilogDialect::Yosys); + args.args_to_jobs_simple(params, |_kind, args, dependencies| { + let FormalArgs { + sby_extra_args, + formal_mode, + formal_depth, + formal_solver, + smtbmc_extra_args, + } = args; + Ok(WriteSbyFileJob { + sby_extra_args: sby_extra_args.into_iter().map(str::intern_owned).collect(), + formal_mode, + formal_depth, + formal_solver: str::intern_owned(formal_solver), + smtbmc_extra_args: smtbmc_extra_args + .into_iter() + .map(str::intern_owned) + .collect(), + sby_file: dependencies.base_job().file_with_ext("sby"), + output_dir: dependencies.base_job().output_dir(), + main_verilog_file: dependencies.job.job.main_verilog_file(), + }) + }) + } + + fn inputs(self, _job: &Self::Job) -> Interned<[JobItemName]> { + [JobItemName::DynamicPaths { + source_job_name: VerilogJobKind.name(), + }][..] + .intern() + } + + fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + [JobItemName::Path { path: job.sby_file }][..].intern() + } + + fn name(self) -> Interned { + "write-sby-file".intern() + } + + fn external_command_params(self, _job: &Self::Job) -> Option { + None + } + + fn run( + self, + job: &Self::Job, + inputs: &[JobItem], + params: &JobParams, + _acquired_job: &mut AcquiredJob, + ) -> eyre::Result> { + assert!(inputs.iter().map(JobItem::name).eq(self.inputs(job))); + let [additional_files] = inputs else { + unreachable!(); + }; + let additional_files = VerilogJob::unwrap_additional_files(additional_files); + let mut contents = String::new(); + match job.write_sby( + &mut contents, + additional_files, + params.main_module().name_id(), + ) { + Ok(result) => result?, + Err(fmt::Error) => unreachable!("writing to String can't fail"), + } + std::fs::write(job.sby_file, contents) + .wrap_err_with(|| format!("writing {} failed", job.sby_file))?; + Ok(vec![JobItem::Path { path: job.sby_file }]) + } + + fn subcommand_hidden(self) -> bool { + true + } +} + +#[derive(Clone, Hash, PartialEq, Eq, Serialize, Deserialize)] +pub struct Formal { + #[serde(flatten)] + write_sby_file: WriteSbyFileJob, + sby_file_name: Interned, +} + +impl fmt::Debug for Formal { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + write_sby_file: + WriteSbyFileJob { + sby_extra_args, + formal_mode, + formal_depth, + formal_solver, + smtbmc_extra_args, + sby_file, + output_dir: _, + main_verilog_file, + }, + sby_file_name, + } = self; + f.debug_struct("Formal") + .field("sby_extra_args", sby_extra_args) + .field("formal_mode", formal_mode) + .field("formal_depth", formal_depth) + .field("formal_solver", formal_solver) + .field("smtbmc_extra_args", smtbmc_extra_args) + .field("sby_file", sby_file) + .field("sby_file_name", sby_file_name) + .field("main_verilog_file", main_verilog_file) + .finish_non_exhaustive() + } +} + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)] +pub struct Symbiyosys; + +impl ExternalProgramTrait for Symbiyosys { + fn default_program_name() -> Interned { + "sby".intern() + } +} + +#[derive(Clone, Hash, PartialEq, Eq, Debug, Args)] +pub struct FormalAdditionalArgs {} + +impl ToArgs for FormalAdditionalArgs { + fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) { + let Self {} = self; + } +} + +impl ExternalCommand for Formal { + type AdditionalArgs = FormalAdditionalArgs; + type AdditionalJobData = Formal; + type Dependencies = JobKindAndDependencies; + type ExternalProgram = Symbiyosys; + + fn dependencies() -> Self::Dependencies { + Default::default() + } + + fn args_to_jobs( + args: JobArgsAndDependencies>, + params: &JobParams, + ) -> eyre::Result<( + Self::AdditionalJobData, + ::JobsAndKinds, + )> { + args.args_to_jobs_external_simple(params, |args, dependencies| { + let FormalAdditionalArgs {} = args.additional_args; + Ok(Formal { + write_sby_file: dependencies.job.job.clone(), + sby_file_name: interned_known_utf8_method(dependencies.job.job.sby_file(), |v| { + v.file_name().expect("known to have file name") + }), + }) + }) + } + + fn inputs(job: &ExternalCommandJob) -> Interned<[JobItemName]> { + [ + JobItemName::Path { + path: job.additional_job_data().write_sby_file.sby_file(), + }, + JobItemName::Path { + path: job.additional_job_data().write_sby_file.main_verilog_file(), + }, + JobItemName::DynamicPaths { + source_job_name: VerilogJobKind.name(), + }, + ][..] + .intern() + } + + fn output_paths(_job: &ExternalCommandJob) -> Interned<[Interned]> { + Interned::default() + } + + fn command_line_args(job: &ExternalCommandJob, args: &mut W) { + // args.write_str_arg("-j1"); // sby seems not to respect job count in parallel mode + args.write_str_arg("-f"); + args.write_interned_arg(job.additional_job_data().sby_file_name); + args.write_interned_args(job.additional_job_data().write_sby_file.sby_extra_args()); + } + + fn current_dir(job: &ExternalCommandJob) -> Option> { + Some(job.output_dir()) + } + + fn job_kind_name() -> Interned { + "formal".intern() + } +} + +pub(crate) fn built_in_job_kinds() -> impl IntoIterator { + [ + DynJobKind::new(WriteSbyFileJobKind), + DynJobKind::new(ExternalCommandJobKind::::new()), + ] +} diff --git a/crates/fayalite/src/build/graph.rs b/crates/fayalite/src/build/graph.rs new file mode 100644 index 0000000..0cf54d5 --- /dev/null +++ b/crates/fayalite/src/build/graph.rs @@ -0,0 +1,801 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{ + build::{DynJob, JobItem, JobItemName, JobParams, program_name_for_internal_jobs}, + intern::Interned, + util::{HashMap, HashSet, job_server::AcquiredJob}, +}; +use eyre::{ContextCompat, eyre}; +use petgraph::{ + algo::{DfsSpace, kosaraju_scc, toposort}, + graph::DiGraph, + visit::{GraphBase, Visitable}, +}; +use serde::{Deserialize, Deserializer, Serialize, Serializer, de::Error, ser::SerializeSeq}; +use std::{ + cell::OnceCell, + collections::{BTreeMap, BTreeSet, VecDeque}, + fmt::{self, Write}, + panic, + rc::Rc, + sync::mpsc, + thread::{self, ScopedJoinHandle}, +}; + +macro_rules! write_str { + ($s:expr, $($rest:tt)*) => { + write!($s, $($rest)*).expect("String::write_fmt can't fail") + }; +} + +#[derive(Clone, Debug)] +enum JobGraphNode { + Job(DynJob), + Item { + #[allow(dead_code, reason = "name used for debugging")] + name: JobItemName, + source_job: Option, + }, +} + +type JobGraphInner = DiGraph; + +#[derive(Clone, Default)] +pub struct JobGraph { + jobs: HashMap::NodeId>, + items: HashMap::NodeId>, + graph: JobGraphInner, + topological_order: Vec<::NodeId>, + space: DfsSpace<::NodeId, ::Map>, +} + +impl fmt::Debug for JobGraph { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + jobs: _, + items: _, + graph, + topological_order, + space: _, + } = self; + f.debug_struct("JobGraph") + .field("graph", graph) + .field("topological_order", topological_order) + .finish_non_exhaustive() + } +} + +#[derive(Clone, Debug)] +pub enum JobGraphError { + CycleError { + job: DynJob, + output: JobItemName, + }, + MultipleJobsCreateSameOutput { + output_item: JobItemName, + existing_job: DynJob, + new_job: DynJob, + }, +} + +impl std::error::Error for JobGraphError {} + +impl fmt::Display for JobGraphError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::CycleError { job, output } => write!( + f, + "job can't be added to job graph because it would introduce a cyclic dependency through this job output:\n\ + {output:?}\n\ + job:\n{job:?}", + ), + JobGraphError::MultipleJobsCreateSameOutput { + output_item, + existing_job, + new_job, + } => write!( + f, + "job can't be added to job graph because the new job has an output that is also produced by an existing job.\n\ + conflicting output:\n\ + {output_item:?}\n\ + existing job:\n\ + {existing_job:?}\n\ + new job:\n\ + {new_job:?}", + ), + } + } +} + +#[derive(Copy, Clone, Debug)] +enum EscapeForUnixShellState { + DollarSingleQuote, + SingleQuote, + Unquoted, +} + +#[derive(Clone)] +pub struct EscapeForUnixShell<'a> { + state: EscapeForUnixShellState, + prefix: [u8; 3], + bytes: &'a [u8], +} + +impl<'a> fmt::Debug for EscapeForUnixShell<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(self, f) + } +} + +impl<'a> fmt::Display for EscapeForUnixShell<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + for c in self.clone() { + f.write_char(c)?; + } + Ok(()) + } +} + +impl<'a> EscapeForUnixShell<'a> { + pub fn new(s: &'a str) -> Self { + Self::from_bytes(s.as_bytes()) + } + fn make_prefix(bytes: &[u8]) -> [u8; 3] { + let mut prefix = [0; 3]; + prefix[..bytes.len()].copy_from_slice(bytes); + prefix + } + pub fn from_bytes(bytes: &'a [u8]) -> Self { + let mut needs_single_quote = bytes.is_empty(); + for &b in bytes { + match b { + b'!' | b'\'' | b'\"' | b' ' => needs_single_quote = true, + 0..0x20 | 0x7F.. => { + return Self { + state: EscapeForUnixShellState::DollarSingleQuote, + prefix: Self::make_prefix(b"$'"), + bytes, + }; + } + _ => {} + } + } + if needs_single_quote { + Self { + state: EscapeForUnixShellState::SingleQuote, + prefix: Self::make_prefix(b"'"), + bytes, + } + } else { + Self { + state: EscapeForUnixShellState::Unquoted, + prefix: Self::make_prefix(b""), + bytes, + } + } + } +} + +impl Iterator for EscapeForUnixShell<'_> { + type Item = char; + + fn next(&mut self) -> Option { + match &mut self.prefix { + [0, 0, 0] => {} + [0, 0, v] | // find first + [0, v, _] | // non-zero byte + [v, _, _] => { + let retval = *v as char; + *v = 0; + return Some(retval); + } + } + let Some(&next_byte) = self.bytes.split_off_first() else { + return match self.state { + EscapeForUnixShellState::DollarSingleQuote + | EscapeForUnixShellState::SingleQuote => { + self.state = EscapeForUnixShellState::Unquoted; + Some('\'') + } + EscapeForUnixShellState::Unquoted => None, + }; + }; + match self.state { + EscapeForUnixShellState::DollarSingleQuote => match next_byte { + b'\'' | b'\\' => { + self.prefix = Self::make_prefix(&[next_byte]); + Some('\\') + } + b'\t' => { + self.prefix = Self::make_prefix(b"t"); + Some('\\') + } + b'\n' => { + self.prefix = Self::make_prefix(b"n"); + Some('\\') + } + b'\r' => { + self.prefix = Self::make_prefix(b"r"); + Some('\\') + } + 0x20..=0x7E => Some(next_byte as char), + _ => { + self.prefix = [ + b'x', + char::from_digit(next_byte as u32 >> 4, 0x10).expect("known to be in range") + as u8, + char::from_digit(next_byte as u32 & 0xF, 0x10) + .expect("known to be in range") as u8, + ]; + Some('\\') + } + }, + EscapeForUnixShellState::SingleQuote => { + if next_byte == b'\'' { + self.prefix = Self::make_prefix(b"\\''"); + Some('\'') + } else { + Some(next_byte as char) + } + } + EscapeForUnixShellState::Unquoted => match next_byte { + b' ' | b'!' | b'"' | b'#' | b'$' | b'&' | b'\'' | b'(' | b')' | b'*' | b',' + | b';' | b'<' | b'>' | b'?' | b'[' | b'\\' | b']' | b'^' | b'`' | b'{' | b'|' + | b'}' | b'~' => { + self.prefix = Self::make_prefix(&[next_byte]); + Some('\\') + } + _ => Some(next_byte as char), + }, + } + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +#[non_exhaustive] +pub enum UnixMakefileEscapeKind { + NonRecipe, + RecipeWithoutShellEscaping, + RecipeWithShellEscaping, +} + +#[derive(Copy, Clone)] +pub struct EscapeForUnixMakefile<'a> { + s: &'a str, + kind: UnixMakefileEscapeKind, +} + +impl<'a> fmt::Debug for EscapeForUnixMakefile<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Display::fmt(self, f) + } +} + +impl<'a> fmt::Display for EscapeForUnixMakefile<'a> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.do_write(f, fmt::Write::write_str, fmt::Write::write_char, |_, _| { + Ok(()) + }) + } +} + +impl<'a> EscapeForUnixMakefile<'a> { + fn do_write( + &self, + state: &mut S, + write_str: impl Fn(&mut S, &str) -> Result<(), E>, + write_char: impl Fn(&mut S, char) -> Result<(), E>, + add_variable: impl Fn(&mut S, &'static str) -> Result<(), E>, + ) -> Result<(), E> { + let escape_recipe_char = |c| match c { + '$' => write_str(state, "$$"), + '\0'..='\x1F' | '\x7F' => { + panic!("can't escape a control character for Unix Makefile: {c:?}"); + } + _ => write_char(state, c), + }; + match self.kind { + UnixMakefileEscapeKind::NonRecipe => self.s.chars().try_for_each(|c| match c { + '=' => { + add_variable(state, "EQUALS = =")?; + write_str(state, "$(EQUALS)") + } + ';' => panic!("can't escape a semicolon (;) for Unix Makefile"), + '$' => write_str(state, "$$"), + '\\' | ' ' | '#' | ':' | '%' | '*' | '?' | '[' | ']' | '~' => { + write_char(state, '\\')?; + write_char(state, c) + } + '\0'..='\x1F' | '\x7F' => { + panic!("can't escape a control character for Unix Makefile: {c:?}"); + } + _ => write_char(state, c), + }), + UnixMakefileEscapeKind::RecipeWithoutShellEscaping => { + self.s.chars().try_for_each(escape_recipe_char) + } + UnixMakefileEscapeKind::RecipeWithShellEscaping => { + EscapeForUnixShell::new(self.s).try_for_each(escape_recipe_char) + } + } + } + pub fn new( + s: &'a str, + kind: UnixMakefileEscapeKind, + needed_variables: &mut BTreeSet<&'static str>, + ) -> Self { + let retval = Self { s, kind }; + let Ok(()) = retval.do_write( + needed_variables, + |_, _| Ok(()), + |_, _| Ok(()), + |needed_variables, variable| -> Result<(), std::convert::Infallible> { + needed_variables.insert(variable); + Ok(()) + }, + ); + retval + } +} + +impl JobGraph { + pub fn new() -> Self { + Self::default() + } + fn try_add_item_node( + &mut self, + name: JobItemName, + new_source_job: Option, + new_nodes: &mut HashSet<::NodeId>, + ) -> Result<::NodeId, JobGraphError> { + use hashbrown::hash_map::Entry; + match self.items.entry(name) { + Entry::Occupied(item_entry) => { + let node_id = *item_entry.get(); + let JobGraphNode::Item { + name: _, + source_job, + } = &mut self.graph[node_id] + else { + unreachable!("known to be an item"); + }; + if let Some(new_source_job) = new_source_job { + if let Some(source_job) = source_job { + return Err(JobGraphError::MultipleJobsCreateSameOutput { + output_item: item_entry.key().clone(), + existing_job: source_job.clone(), + new_job: new_source_job, + }); + } else { + *source_job = Some(new_source_job); + } + } + Ok(node_id) + } + Entry::Vacant(item_entry) => { + let node_id = self.graph.add_node(JobGraphNode::Item { + name, + source_job: new_source_job, + }); + new_nodes.insert(node_id); + item_entry.insert(node_id); + Ok(node_id) + } + } + } + pub fn try_add_jobs>( + &mut self, + jobs: I, + ) -> Result<(), JobGraphError> { + use hashbrown::hash_map::Entry; + let jobs = jobs.into_iter(); + struct RemoveNewNodesOnError<'a> { + this: &'a mut JobGraph, + new_nodes: HashSet<::NodeId>, + } + impl Drop for RemoveNewNodesOnError<'_> { + fn drop(&mut self) { + for node in self.new_nodes.drain() { + self.this.graph.remove_node(node); + } + } + } + let mut remove_new_nodes_on_error = RemoveNewNodesOnError { + this: self, + new_nodes: HashSet::with_capacity_and_hasher(jobs.size_hint().0, Default::default()), + }; + let new_nodes = &mut remove_new_nodes_on_error.new_nodes; + let this = &mut *remove_new_nodes_on_error.this; + for job in jobs { + let Entry::Vacant(job_entry) = this.jobs.entry(job.clone()) else { + continue; + }; + let job_node_id = this + .graph + .add_node(JobGraphNode::Job(job_entry.key().clone())); + new_nodes.insert(job_node_id); + job_entry.insert(job_node_id); + for name in job.outputs() { + let item_node_id = this.try_add_item_node(name, Some(job.clone()), new_nodes)?; + this.graph.add_edge(job_node_id, item_node_id, ()); + } + for name in job.inputs() { + let item_node_id = this.try_add_item_node(name, None, new_nodes)?; + this.graph.add_edge(item_node_id, job_node_id, ()); + } + } + match toposort(&this.graph, Some(&mut this.space)) { + Ok(v) => { + this.topological_order = v; + // no need to remove any of the new nodes on drop since we didn't encounter any errors + remove_new_nodes_on_error.new_nodes.clear(); + Ok(()) + } + Err(_) => { + // there's at least one cycle, find one! + let cycle = kosaraju_scc(&this.graph) + .into_iter() + .find_map(|scc| { + if scc.len() <= 1 { + // can't be a cycle since our graph is bipartite -- + // jobs only connect to items, never jobs to jobs or items to items + None + } else { + Some(scc) + } + }) + .expect("we know there's a cycle"); + let cycle_set = HashSet::from_iter(cycle.iter().copied()); + let job = cycle + .into_iter() + .find_map(|node_id| { + if let JobGraphNode::Job(job) = &this.graph[node_id] { + Some(job.clone()) + } else { + None + } + }) + .expect("a job must be part of the cycle"); + let output = job + .outputs() + .into_iter() + .find(|output| cycle_set.contains(&this.items[output])) + .expect("an output must be part of the cycle"); + Err(JobGraphError::CycleError { job, output }) + } + } + } + #[track_caller] + pub fn add_jobs>(&mut self, jobs: I) { + match self.try_add_jobs(jobs) { + Ok(()) => {} + Err(e) => panic!("error: {e}"), + } + } + pub fn to_unix_makefile(&self, extra_args: &[Interned]) -> String { + self.to_unix_makefile_with_internal_program_prefix( + &[program_name_for_internal_jobs()], + extra_args, + ) + } + pub fn to_unix_makefile_with_internal_program_prefix( + &self, + internal_program_prefix: &[Interned], + extra_args: &[Interned], + ) -> String { + let mut retval = String::new(); + let mut needed_variables = BTreeSet::new(); + let mut phony_targets = BTreeSet::new(); + for &node_id in &self.topological_order { + let JobGraphNode::Job(job) = &self.graph[node_id] else { + continue; + }; + let outputs = job.outputs(); + if outputs.is_empty() { + retval.push_str(":"); + } else { + for output in job.outputs() { + match output { + JobItemName::Path { path } => { + write_str!( + retval, + "{} ", + EscapeForUnixMakefile::new( + &path, + UnixMakefileEscapeKind::NonRecipe, + &mut needed_variables + ) + ); + } + JobItemName::DynamicPaths { source_job_name } => { + write_str!( + retval, + "{} ", + EscapeForUnixMakefile::new( + &source_job_name, + UnixMakefileEscapeKind::NonRecipe, + &mut needed_variables + ) + ); + phony_targets.insert(Interned::into_inner(source_job_name)); + } + } + } + if outputs.len() == 1 { + retval.push_str(":"); + } else { + retval.push_str("&:"); + } + } + for input in job.inputs() { + match input { + JobItemName::Path { path } => { + write_str!( + retval, + " {}", + EscapeForUnixMakefile::new( + &path, + UnixMakefileEscapeKind::NonRecipe, + &mut needed_variables + ) + ); + } + JobItemName::DynamicPaths { source_job_name } => { + write_str!( + retval, + " {}", + EscapeForUnixMakefile::new( + &source_job_name, + UnixMakefileEscapeKind::NonRecipe, + &mut needed_variables + ) + ); + phony_targets.insert(Interned::into_inner(source_job_name)); + } + } + } + retval.push_str("\n\t"); + job.command_params_with_internal_program_prefix(internal_program_prefix, extra_args) + .to_unix_shell_line(&mut retval, |arg, output| { + write!( + output, + "{}", + EscapeForUnixMakefile::new( + arg, + UnixMakefileEscapeKind::RecipeWithShellEscaping, + &mut needed_variables + ) + ) + }) + .expect("writing to String never fails"); + retval.push_str("\n\n"); + } + if !phony_targets.is_empty() { + retval.push_str("\n.PHONY:"); + for phony_target in phony_targets { + write_str!( + retval, + " {}", + EscapeForUnixMakefile::new( + phony_target, + UnixMakefileEscapeKind::NonRecipe, + &mut needed_variables + ) + ); + } + retval.push_str("\n"); + } + if !needed_variables.is_empty() { + retval.insert_str( + 0, + &String::from_iter(needed_variables.into_iter().map(|v| format!("{v}\n"))), + ); + } + retval + } + pub fn to_unix_shell_script(&self, extra_args: &[Interned]) -> String { + self.to_unix_shell_script_with_internal_program_prefix( + &[program_name_for_internal_jobs()], + extra_args, + ) + } + pub fn to_unix_shell_script_with_internal_program_prefix( + &self, + internal_program_prefix: &[Interned], + extra_args: &[Interned], + ) -> String { + let mut retval = String::from( + "#!/bin/sh\n\ + set -ex\n", + ); + for &node_id in &self.topological_order { + let JobGraphNode::Job(job) = &self.graph[node_id] else { + continue; + }; + job.command_params_with_internal_program_prefix(internal_program_prefix, extra_args) + .to_unix_shell_line(&mut retval, |arg, output| { + write!(output, "{}", EscapeForUnixShell::new(&arg)) + }) + .expect("writing to String never fails"); + retval.push_str("\n"); + } + retval + } + pub fn run(&self, params: &JobParams) -> eyre::Result<()> { + // use scope to auto-join threads on errors + thread::scope(|scope| { + struct WaitingJobState { + job_node_id: ::NodeId, + job: DynJob, + inputs: BTreeMap>, + } + let mut ready_jobs = VecDeque::new(); + let mut item_name_to_waiting_jobs_map = HashMap::<_, Vec<_>>::default(); + for &node_id in &self.topological_order { + let JobGraphNode::Job(job) = &self.graph[node_id] else { + continue; + }; + let waiting_job = WaitingJobState { + job_node_id: node_id, + job: job.clone(), + inputs: job + .inputs() + .iter() + .map(|&name| (name, OnceCell::new())) + .collect(), + }; + if waiting_job.inputs.is_empty() { + ready_jobs.push_back(waiting_job); + } else { + let waiting_job = Rc::new(waiting_job); + for &input_item in waiting_job.inputs.keys() { + item_name_to_waiting_jobs_map + .entry(input_item) + .or_default() + .push(waiting_job.clone()); + } + } + } + struct RunningJob<'scope> { + job: DynJob, + thread: ScopedJoinHandle<'scope, eyre::Result>>, + } + let mut running_jobs = HashMap::default(); + let (finished_jobs_sender, finished_jobs_receiver) = mpsc::channel(); + loop { + while let Some(finished_job) = finished_jobs_receiver.try_recv().ok() { + let Some(RunningJob { job, thread }) = running_jobs.remove(&finished_job) + else { + unreachable!(); + }; + let output_items = thread.join().map_err(panic::resume_unwind)??; + assert!( + output_items.iter().map(JobItem::name).eq(job.outputs()), + "job's run() method returned the wrong output items:\n\ + output items:\n\ + {output_items:?}\n\ + expected outputs:\n\ + {:?}\n\ + job:\n\ + {job:?}", + job.outputs(), + ); + for output_item in output_items { + for waiting_job in item_name_to_waiting_jobs_map + .remove(&output_item.name()) + .unwrap_or_default() + { + let Ok(()) = + waiting_job.inputs[&output_item.name()].set(output_item.clone()) + else { + unreachable!(); + }; + if let Some(waiting_job) = Rc::into_inner(waiting_job) { + ready_jobs.push_back(waiting_job); + } + } + } + } + if let Some(WaitingJobState { + job_node_id, + job, + inputs, + }) = ready_jobs.pop_front() + { + struct RunningJobInThread<'a> { + job_node_id: ::NodeId, + job: DynJob, + inputs: Vec, + params: &'a JobParams, + acquired_job: AcquiredJob, + finished_jobs_sender: mpsc::Sender<::NodeId>, + } + impl RunningJobInThread<'_> { + fn run(mut self) -> eyre::Result> { + self.job + .run(&self.inputs, self.params, &mut self.acquired_job) + } + } + impl Drop for RunningJobInThread<'_> { + fn drop(&mut self) { + let _ = self.finished_jobs_sender.send(self.job_node_id); + } + } + let name = job.kind().name(); + let running_job_in_thread = RunningJobInThread { + job_node_id, + job: job.clone(), + inputs: Result::from_iter(job.inputs().iter().map(|input_name| { + inputs.get(input_name).and_then(|v| v.get().cloned()).wrap_err_with(|| { + eyre!("failed when trying to run job {name}: nothing provided the input item: {input_name:?}") + }) + }))?, + params, + acquired_job: AcquiredJob::acquire()?, + finished_jobs_sender: finished_jobs_sender.clone(), + }; + running_jobs.insert( + job_node_id, + RunningJob { + job, + thread: thread::Builder::new() + .name(format!("job:{name}")) + .spawn_scoped(scope, move || running_job_in_thread.run()) + .expect("failed to spawn thread for job"), + }, + ); + } + if running_jobs.is_empty() { + assert!(item_name_to_waiting_jobs_map.is_empty()); + assert!(ready_jobs.is_empty()); + return Ok(()); + } + } + }) + } +} + +impl Extend for JobGraph { + #[track_caller] + fn extend>(&mut self, iter: T) { + self.add_jobs(iter); + } +} + +impl FromIterator for JobGraph { + #[track_caller] + fn from_iter>(iter: T) -> Self { + let mut retval = Self::new(); + retval.add_jobs(iter); + retval + } +} + +impl Serialize for JobGraph { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + let mut serializer = serializer.serialize_seq(Some(self.jobs.len()))?; + for &node_id in &self.topological_order { + let JobGraphNode::Job(job) = &self.graph[node_id] else { + continue; + }; + serializer.serialize_element(job)?; + } + serializer.end() + } +} + +impl<'de> Deserialize<'de> for JobGraph { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let jobs = Vec::::deserialize(deserializer)?; + let mut retval = JobGraph::new(); + retval.try_add_jobs(jobs).map_err(D::Error::custom)?; + Ok(retval) + } +} diff --git a/crates/fayalite/src/build/registry.rs b/crates/fayalite/src/build/registry.rs new file mode 100644 index 0000000..ccb401f --- /dev/null +++ b/crates/fayalite/src/build/registry.rs @@ -0,0 +1,341 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{ + build::{DynJobKind, JobKind, built_in_job_kinds}, + intern::Interned, +}; +use std::{ + borrow::Borrow, + cmp::Ordering, + collections::BTreeMap, + fmt, + sync::{Arc, OnceLock, RwLock, RwLockWriteGuard}, +}; + +impl DynJobKind { + pub fn registry() -> JobKindRegistrySnapshot { + JobKindRegistrySnapshot(JobKindRegistry::get()) + } + #[track_caller] + pub fn register(self) { + JobKindRegistry::register(JobKindRegistry::lock(), self); + } +} + +#[derive(Copy, Clone, PartialEq, Eq)] +struct InternedStrCompareAsStr(Interned); + +impl fmt::Debug for InternedStrCompareAsStr { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + +impl Ord for InternedStrCompareAsStr { + fn cmp(&self, other: &Self) -> Ordering { + str::cmp(&self.0, &other.0) + } +} + +impl PartialOrd for InternedStrCompareAsStr { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Borrow for InternedStrCompareAsStr { + fn borrow(&self) -> &str { + &self.0 + } +} + +#[derive(Clone, Debug)] +struct JobKindRegistry { + job_kinds: BTreeMap, +} + +enum JobKindRegisterError { + SameName { + name: InternedStrCompareAsStr, + old_job_kind: DynJobKind, + new_job_kind: DynJobKind, + }, +} + +impl fmt::Display for JobKindRegisterError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::SameName { + name, + old_job_kind, + new_job_kind, + } => write!( + f, + "two different `JobKind` can't share the same name:\n\ + {name:?}\n\ + old job kind:\n\ + {old_job_kind:?}\n\ + new job kind:\n\ + {new_job_kind:?}", + ), + } + } +} + +trait JobKindRegistryRegisterLock { + type Locked; + fn lock(self) -> Self::Locked; + fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry; +} + +impl JobKindRegistryRegisterLock for &'static RwLock> { + type Locked = RwLockWriteGuard<'static, Arc>; + fn lock(self) -> Self::Locked { + self.write().expect("shouldn't be poisoned") + } + fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry { + Arc::make_mut(locked) + } +} + +impl JobKindRegistryRegisterLock for &'_ mut JobKindRegistry { + type Locked = Self; + + fn lock(self) -> Self::Locked { + self + } + + fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry { + locked + } +} + +impl JobKindRegistry { + fn lock() -> &'static RwLock> { + static REGISTRY: OnceLock>> = OnceLock::new(); + REGISTRY.get_or_init(Default::default) + } + fn try_register( + lock: L, + job_kind: DynJobKind, + ) -> Result<(), JobKindRegisterError> { + use std::collections::btree_map::Entry; + let name = InternedStrCompareAsStr(job_kind.name()); + // run user code only outside of lock + let mut locked = lock.lock(); + let this = L::make_mut(&mut locked); + let result = match this.job_kinds.entry(name) { + Entry::Occupied(entry) => Err(JobKindRegisterError::SameName { + name, + old_job_kind: entry.get().clone(), + new_job_kind: job_kind, + }), + Entry::Vacant(entry) => { + entry.insert(job_kind); + Ok(()) + } + }; + drop(locked); + // outside of lock now, so we can test if it's the same DynJobKind + match result { + Err(JobKindRegisterError::SameName { + name: _, + old_job_kind, + new_job_kind, + }) if old_job_kind == new_job_kind => Ok(()), + result => result, + } + } + #[track_caller] + fn register(lock: L, job_kind: DynJobKind) { + match Self::try_register(lock, job_kind) { + Err(e) => panic!("{e}"), + Ok(()) => {} + } + } + fn get() -> Arc { + Self::lock().read().expect("shouldn't be poisoned").clone() + } +} + +impl Default for JobKindRegistry { + fn default() -> Self { + let mut retval = Self { + job_kinds: BTreeMap::new(), + }; + for job_kind in built_in_job_kinds() { + Self::register(&mut retval, job_kind); + } + retval + } +} + +#[derive(Clone, Debug)] +pub struct JobKindRegistrySnapshot(Arc); + +impl JobKindRegistrySnapshot { + pub fn get() -> Self { + JobKindRegistrySnapshot(JobKindRegistry::get()) + } + pub fn get_by_name<'a>(&'a self, name: &str) -> Option<&'a DynJobKind> { + self.0.job_kinds.get(name) + } + pub fn iter_with_names(&self) -> JobKindRegistryIterWithNames<'_> { + JobKindRegistryIterWithNames(self.0.job_kinds.iter()) + } + pub fn iter(&self) -> JobKindRegistryIter<'_> { + JobKindRegistryIter(self.0.job_kinds.values()) + } +} + +impl<'a> IntoIterator for &'a JobKindRegistrySnapshot { + type Item = &'a DynJobKind; + type IntoIter = JobKindRegistryIter<'a>; + + fn into_iter(self) -> Self::IntoIter { + self.iter() + } +} + +impl<'a> IntoIterator for &'a mut JobKindRegistrySnapshot { + type Item = &'a DynJobKind; + type IntoIter = JobKindRegistryIter<'a>; + + fn into_iter(self) -> Self::IntoIter { + self.iter() + } +} + +#[derive(Clone, Debug)] +pub struct JobKindRegistryIter<'a>( + std::collections::btree_map::Values<'a, InternedStrCompareAsStr, DynJobKind>, +); + +impl<'a> Iterator for JobKindRegistryIter<'a> { + type Item = &'a DynJobKind; + + fn next(&mut self) -> Option { + self.0.next() + } + + fn size_hint(&self) -> (usize, Option) { + self.0.size_hint() + } + + fn count(self) -> usize + where + Self: Sized, + { + self.0.count() + } + + fn last(self) -> Option { + self.0.last() + } + + fn nth(&mut self, n: usize) -> Option { + self.0.nth(n) + } + + fn fold(self, init: B, f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + self.0.fold(init, f) + } +} + +impl<'a> std::iter::FusedIterator for JobKindRegistryIter<'a> {} + +impl<'a> ExactSizeIterator for JobKindRegistryIter<'a> {} + +impl<'a> DoubleEndedIterator for JobKindRegistryIter<'a> { + fn next_back(&mut self) -> Option { + self.0.next_back() + } + + fn nth_back(&mut self, n: usize) -> Option { + self.0.nth_back(n) + } + + fn rfold(self, init: B, f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + self.0.rfold(init, f) + } +} + +#[derive(Clone, Debug)] +pub struct JobKindRegistryIterWithNames<'a>( + std::collections::btree_map::Iter<'a, InternedStrCompareAsStr, DynJobKind>, +); + +impl<'a> Iterator for JobKindRegistryIterWithNames<'a> { + type Item = (Interned, &'a DynJobKind); + + fn next(&mut self) -> Option { + self.0.next().map(|(name, job_kind)| (name.0, job_kind)) + } + + fn size_hint(&self) -> (usize, Option) { + self.0.size_hint() + } + + fn count(self) -> usize + where + Self: Sized, + { + self.0.count() + } + + fn last(self) -> Option { + self.0.last().map(|(name, job_kind)| (name.0, job_kind)) + } + + fn nth(&mut self, n: usize) -> Option { + self.0.nth(n).map(|(name, job_kind)| (name.0, job_kind)) + } + + fn fold(self, init: B, f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + self.0 + .map(|(name, job_kind)| (name.0, job_kind)) + .fold(init, f) + } +} + +impl<'a> std::iter::FusedIterator for JobKindRegistryIterWithNames<'a> {} + +impl<'a> ExactSizeIterator for JobKindRegistryIterWithNames<'a> {} + +impl<'a> DoubleEndedIterator for JobKindRegistryIterWithNames<'a> { + fn next_back(&mut self) -> Option { + self.0 + .next_back() + .map(|(name, job_kind)| (name.0, job_kind)) + } + + fn nth_back(&mut self, n: usize) -> Option { + self.0 + .nth_back(n) + .map(|(name, job_kind)| (name.0, job_kind)) + } + + fn rfold(self, init: B, f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + self.0 + .map(|(name, job_kind)| (name.0, job_kind)) + .rfold(init, f) + } +} + +#[track_caller] +pub fn register_job_kind(kind: K) { + DynJobKind::new(kind).register(); +} diff --git a/crates/fayalite/src/build/vendor.rs b/crates/fayalite/src/build/vendor.rs new file mode 100644 index 0000000..56297cf --- /dev/null +++ b/crates/fayalite/src/build/vendor.rs @@ -0,0 +1,8 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +pub mod xilinx; + +pub(crate) fn built_in_job_kinds() -> impl IntoIterator { + xilinx::built_in_job_kinds() +} diff --git a/crates/fayalite/src/build/vendor/xilinx.rs b/crates/fayalite/src/build/vendor/xilinx.rs new file mode 100644 index 0000000..71889e0 --- /dev/null +++ b/crates/fayalite/src/build/vendor/xilinx.rs @@ -0,0 +1,20 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::intern::Interned; + +pub mod yosys_nextpnr_prjxray; + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub struct XdcIOStandardAnnotation { + pub value: Interned, +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub struct XdcLocationAnnotation { + pub location: Interned, +} + +pub(crate) fn built_in_job_kinds() -> impl IntoIterator { + yosys_nextpnr_prjxray::built_in_job_kinds() +} diff --git a/crates/fayalite/src/build/vendor/xilinx/yosys_nextpnr_prjxray.rs b/crates/fayalite/src/build/vendor/xilinx/yosys_nextpnr_prjxray.rs new file mode 100644 index 0000000..3db256a --- /dev/null +++ b/crates/fayalite/src/build/vendor/xilinx/yosys_nextpnr_prjxray.rs @@ -0,0 +1,910 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{ + annotations::Annotation, + build::{ + CommandParams, DynJobKind, GetBaseJob, JobAndDependencies, JobArgsAndDependencies, + JobDependencies, JobItem, JobItemName, JobKind, JobKindAndDependencies, ToArgs, WriteArgs, + external::{ + ExternalCommand, ExternalCommandJob, ExternalCommandJobKind, ExternalProgramTrait, + }, + interned_known_utf8_method, interned_known_utf8_path_buf_method, + vendor::xilinx::{XdcIOStandardAnnotation, XdcLocationAnnotation}, + verilog::{VerilogDialect, VerilogJob, VerilogJobKind}, + }, + bundle::Bundle, + firrtl::{ScalarizedModuleABI, ScalarizedModuleABIAnnotations, ScalarizedModuleABIPort}, + intern::{Intern, Interned}, + module::{Module, NameId}, + prelude::JobParams, + util::job_server::AcquiredJob, +}; +use clap::ValueEnum; +use eyre::Context; +use serde::{Deserialize, Serialize}; +use std::{fmt, ops::ControlFlow}; + +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash, Default)] +pub struct YosysNextpnrXrayWriteYsFileJobKind; + +#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)] +pub struct YosysNextpnrXrayWriteYsFileArgs {} + +impl ToArgs for YosysNextpnrXrayWriteYsFileArgs { + fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) { + let Self {} = self; + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)] +pub struct YosysNextpnrXrayWriteYsFile { + main_verilog_file: Interned, + ys_file: Interned, + json_file: Interned, + json_file_name: Interned, +} + +impl YosysNextpnrXrayWriteYsFile { + pub fn main_verilog_file(&self) -> Interned { + self.main_verilog_file + } + pub fn ys_file(&self) -> Interned { + self.ys_file + } + pub fn json_file(&self) -> Interned { + self.json_file + } + pub fn json_file_name(&self) -> Interned { + self.json_file_name + } + fn write_ys( + &self, + output: &mut W, + additional_files: &[Interned], + main_module_name_id: NameId, + ) -> Result, fmt::Error> { + let Self { + main_verilog_file, + ys_file: _, + json_file: _, + json_file_name, + } = self; + let all_verilog_files = + match VerilogJob::all_verilog_files(*main_verilog_file, additional_files) { + Ok(v) => v, + Err(e) => return Ok(Err(e)), + }; + for verilog_file in all_verilog_files { + writeln!(output, "read_verilog -sv \"{verilog_file}\"")?; + } + let circuit_name = crate::firrtl::get_circuit_name(main_module_name_id); + writeln!( + output, + "synth_xilinx -flatten -abc9 -nobram -arch xc7 -top {circuit_name}" + )?; + writeln!(output, "write_json \"{json_file_name}\"")?; + Ok(Ok(())) + } +} + +impl JobKind for YosysNextpnrXrayWriteYsFileJobKind { + type Args = YosysNextpnrXrayWriteYsFileArgs; + type Job = YosysNextpnrXrayWriteYsFile; + type Dependencies = JobKindAndDependencies; + + fn dependencies(self) -> Self::Dependencies { + Default::default() + } + + fn args_to_jobs( + mut args: JobArgsAndDependencies, + params: &JobParams, + ) -> eyre::Result> { + args.dependencies + .dependencies + .args + .args + .additional_args + .verilog_dialect + .get_or_insert(VerilogDialect::Yosys); + args.args_to_jobs_simple(params, |_kind, args, dependencies| { + let YosysNextpnrXrayWriteYsFileArgs {} = args; + let json_file = dependencies.base_job().file_with_ext("json"); + Ok(YosysNextpnrXrayWriteYsFile { + main_verilog_file: dependencies.job.job.main_verilog_file(), + ys_file: dependencies.base_job().file_with_ext("ys"), + json_file, + json_file_name: interned_known_utf8_method(json_file, |v| { + v.file_name().expect("known to have file name") + }), + }) + }) + } + + fn inputs(self, _job: &Self::Job) -> Interned<[JobItemName]> { + [JobItemName::DynamicPaths { + source_job_name: VerilogJobKind.name(), + }][..] + .intern() + } + + fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + [JobItemName::Path { path: job.ys_file }][..].intern() + } + + fn name(self) -> Interned { + "yosys-nextpnr-xray-write-ys-file".intern() + } + + fn external_command_params(self, _job: &Self::Job) -> Option { + None + } + + fn run( + self, + job: &Self::Job, + inputs: &[JobItem], + params: &JobParams, + _acquired_job: &mut AcquiredJob, + ) -> eyre::Result> { + assert!(inputs.iter().map(JobItem::name).eq(self.inputs(job))); + let [additional_files] = inputs else { + unreachable!(); + }; + let additional_files = VerilogJob::unwrap_additional_files(additional_files); + let mut contents = String::new(); + match job.write_ys( + &mut contents, + additional_files, + params.main_module().name_id(), + ) { + Ok(result) => result?, + Err(fmt::Error) => unreachable!("writing to String can't fail"), + } + std::fs::write(job.ys_file, contents) + .wrap_err_with(|| format!("writing {} failed", job.ys_file))?; + Ok(vec![JobItem::Path { path: job.ys_file }]) + } + + fn subcommand_hidden(self) -> bool { + true + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)] +pub struct YosysNextpnrXraySynthArgs {} + +impl ToArgs for YosysNextpnrXraySynthArgs { + fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) { + let Self {} = self; + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Deserialize, Serialize)] +pub struct YosysNextpnrXraySynth { + #[serde(flatten)] + write_ys_file: YosysNextpnrXrayWriteYsFile, + ys_file_name: Interned, +} + +impl fmt::Debug for YosysNextpnrXraySynth { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + write_ys_file: + YosysNextpnrXrayWriteYsFile { + main_verilog_file, + ys_file, + json_file, + json_file_name, + }, + ys_file_name, + } = self; + f.debug_struct("YosysNextpnrXraySynth") + .field("main_verilog_file", main_verilog_file) + .field("ys_file", ys_file) + .field("ys_file_name", ys_file_name) + .field("json_file", json_file) + .field("json_file_name", json_file_name) + .finish() + } +} + +impl YosysNextpnrXraySynth { + pub fn main_verilog_file(&self) -> Interned { + self.write_ys_file.main_verilog_file() + } + pub fn ys_file(&self) -> Interned { + self.write_ys_file.ys_file() + } + pub fn ys_file_name(&self) -> Interned { + self.ys_file_name + } + pub fn json_file(&self) -> Interned { + self.write_ys_file.json_file() + } + pub fn json_file_name(&self) -> Interned { + self.write_ys_file.json_file_name() + } +} + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)] +pub struct Yosys; + +impl ExternalProgramTrait for Yosys { + fn default_program_name() -> Interned { + "yosys".intern() + } +} + +impl ExternalCommand for YosysNextpnrXraySynth { + type AdditionalArgs = YosysNextpnrXraySynthArgs; + type AdditionalJobData = Self; + type Dependencies = JobKindAndDependencies; + type ExternalProgram = Yosys; + + fn dependencies() -> Self::Dependencies { + Default::default() + } + + fn args_to_jobs( + args: JobArgsAndDependencies>, + params: &JobParams, + ) -> eyre::Result<( + Self::AdditionalJobData, + ::JobsAndKinds, + )> { + args.args_to_jobs_external_simple(params, |args, dependencies| { + let YosysNextpnrXraySynthArgs {} = args.additional_args; + Ok(Self { + write_ys_file: dependencies.job.job.clone(), + ys_file_name: interned_known_utf8_method(dependencies.job.job.ys_file(), |v| { + v.file_name().expect("known to have file name") + }), + }) + }) + } + + fn inputs(job: &ExternalCommandJob) -> Interned<[JobItemName]> { + [ + JobItemName::Path { + path: job.additional_job_data().ys_file(), + }, + JobItemName::Path { + path: job.additional_job_data().main_verilog_file(), + }, + JobItemName::DynamicPaths { + source_job_name: VerilogJobKind.name(), + }, + ][..] + .intern() + } + + fn output_paths(job: &ExternalCommandJob) -> Interned<[Interned]> { + [job.additional_job_data().json_file()][..].intern() + } + + fn command_line_args(job: &ExternalCommandJob, args: &mut W) { + args.write_str_arg("-s"); + args.write_interned_arg(job.additional_job_data().ys_file_name()); + } + + fn current_dir(job: &ExternalCommandJob) -> Option> { + Some(job.output_dir()) + } + + fn job_kind_name() -> Interned { + "yosys-nextpnr-xray-synth".intern() + } + + fn subcommand_hidden() -> bool { + true + } +} + +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash, Default)] +pub struct YosysNextpnrXrayWriteXdcFileJobKind; + +#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)] +pub struct YosysNextpnrXrayWriteXdcFileArgs {} + +impl ToArgs for YosysNextpnrXrayWriteXdcFileArgs { + fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) { + let Self {} = self; + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)] +pub struct YosysNextpnrXrayWriteXdcFile { + firrtl_export_options: crate::firrtl::ExportOptions, + output_dir: Interned, + xdc_file: Interned, +} + +struct WriteXdcContentsError(eyre::Report); + +impl From for WriteXdcContentsError { + fn from(v: eyre::Report) -> Self { + Self(v) + } +} + +impl From for WriteXdcContentsError { + fn from(_v: fmt::Error) -> Self { + unreachable!("String write can't fail") + } +} + +fn tcl_escape(s: impl AsRef) -> String { + let s = s.as_ref(); + let mut retval = String::with_capacity(s.len().saturating_add(2)); + retval.push('"'); + for ch in s.chars() { + if let '$' | '\\' | '[' = ch { + retval.push('\\'); + } + retval.push(ch); + } + retval.push('"'); + retval +} + +impl YosysNextpnrXrayWriteXdcFile { + fn write_xdc_contents_for_port_and_annotations( + &self, + output: &mut impl fmt::Write, + port: &ScalarizedModuleABIPort, + annotations: ScalarizedModuleABIAnnotations<'_>, + ) -> Result<(), WriteXdcContentsError> { + for annotation in annotations { + match annotation.annotation() { + Annotation::DontTouch(_) + | Annotation::SVAttribute(_) + | Annotation::BlackBoxInline(_) + | Annotation::BlackBoxPath(_) + | Annotation::DocString(_) + | Annotation::CustomFirrtl(_) => {} + Annotation::XdcLocation(XdcLocationAnnotation { location }) => writeln!( + output, + "set_property LOC {} [get_ports {}]", + tcl_escape(location), + tcl_escape(port.scalarized_name()) + )?, + Annotation::XdcIOStandard(XdcIOStandardAnnotation { value }) => writeln!( + output, + "set_property IOSTANDARD {} [get_ports {}]", + tcl_escape(value), + tcl_escape(port.scalarized_name()) + )?, + } + } + Ok(()) + } + fn write_xdc_contents( + &self, + output: &mut String, + top_module: &Module, + ) -> eyre::Result<()> { + let scalarized_module_abi = + ScalarizedModuleABI::new(top_module, self.firrtl_export_options) + .map_err(eyre::Report::from)?; + match scalarized_module_abi.for_each_port_and_annotations(|port, annotations| { + match self.write_xdc_contents_for_port_and_annotations(output, port, annotations) { + Ok(()) => ControlFlow::Continue(()), + Err(e) => ControlFlow::Break(e), + } + }) { + ControlFlow::Continue(()) => Ok(()), + ControlFlow::Break(e) => Err(e.0), + } + } +} + +impl JobKind for YosysNextpnrXrayWriteXdcFileJobKind { + type Args = YosysNextpnrXrayWriteXdcFileArgs; + type Job = YosysNextpnrXrayWriteXdcFile; + type Dependencies = JobKindAndDependencies>; + + fn dependencies(self) -> Self::Dependencies { + Default::default() + } + + fn args_to_jobs( + args: JobArgsAndDependencies, + params: &JobParams, + ) -> eyre::Result> { + let firrtl_export_options = args + .dependencies + .dependencies + .dependencies + .dependencies + .dependencies + .args + .args + .export_options; + args.args_to_jobs_simple(params, |_kind, args, dependencies| { + let YosysNextpnrXrayWriteXdcFileArgs {} = args; + Ok(YosysNextpnrXrayWriteXdcFile { + firrtl_export_options, + output_dir: dependencies.base_job().output_dir(), + xdc_file: dependencies.base_job().file_with_ext("xdc"), + }) + }) + } + + fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + [JobItemName::Path { + path: job.output_dir, + }][..] + .intern() + } + + fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + [JobItemName::Path { path: job.xdc_file }][..].intern() + } + + fn name(self) -> Interned { + "yosys-nextpnr-xray-write-xdc-file".intern() + } + + fn external_command_params(self, _job: &Self::Job) -> Option { + None + } + + fn run( + self, + job: &Self::Job, + inputs: &[JobItem], + params: &JobParams, + _acquired_job: &mut AcquiredJob, + ) -> eyre::Result> { + assert!(inputs.iter().map(JobItem::name).eq(self.inputs(job))); + let mut xdc = String::new(); + job.write_xdc_contents(&mut xdc, params.main_module())?; + // TODO: create actual .xdc from input module + std::fs::write( + job.xdc_file, + r"# autogenerated +set_property LOC G6 [get_ports led] +set_property IOSTANDARD LVCMOS33 [get_ports led] +set_property LOC E3 [get_ports clk] +set_property IOSTANDARD LVCMOS33 [get_ports clk] +set_property LOC C2 [get_ports rst] +set_property IOSTANDARD LVCMOS33 [get_ports rst] +", + )?; + Ok(vec![JobItem::Path { path: job.xdc_file }]) + } + + fn subcommand_hidden(self) -> bool { + true + } +} + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)] +pub struct NextpnrXilinx; + +impl ExternalProgramTrait for NextpnrXilinx { + fn default_program_name() -> Interned { + "nextpnr-xilinx".intern() + } +} + +macro_rules! make_device_enum { + ($vis:vis enum $Device:ident { + $( + #[ + name = $name:literal, + xray_part = $xray_part:literal, + xray_device = $xray_device:literal, + xray_family = $xray_family:literal, + ] + $variant:ident, + )* + }) => { + #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, ValueEnum)] + $vis enum $Device { + $( + #[value(name = $name, alias = $xray_part)] + $variant, + )* + } + + impl $Device { + $vis fn as_str(self) -> &'static str { + match self { + $(Self::$variant => $name,)* + } + } + $vis fn xray_part(self) -> &'static str { + match self { + $(Self::$variant => $xray_part,)* + } + } + $vis fn xray_device(self) -> &'static str { + match self { + $(Self::$variant => $xray_device,)* + } + } + $vis fn xray_family(self) -> &'static str { + match self { + $(Self::$variant => $xray_family,)* + } + } + } + + struct DeviceVisitor; + + impl<'de> serde::de::Visitor<'de> for DeviceVisitor { + type Value = $Device; + + fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("a Xilinx device string") + } + + fn visit_str(self, v: &str) -> Result + where + E: serde::de::Error, + { + match $Device::from_str(v, false) { + Ok(v) => Ok(v), + Err(_) => Err(E::invalid_value(serde::de::Unexpected::Str(v), &self)), + } + } + + fn visit_bytes(self, v: &[u8]) -> Result + where + E: serde::de::Error, + { + match str::from_utf8(v).ok().and_then(|v| $Device::from_str(v, false).ok()) { + Some(v) => Ok(v), + None => Err(E::invalid_value(serde::de::Unexpected::Bytes(v), &self)), + } + } + } + + impl<'de> Deserialize<'de> for $Device { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + deserializer.deserialize_string(DeviceVisitor) + } + } + + impl Serialize for $Device { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + self.as_str().serialize(serializer) + } + } + }; +} + +make_device_enum! { + pub enum Device { + #[ + name = "xc7a35ticsg324-1L", + xray_part = "xc7a35tcsg324-1", + xray_device = "xc7a35t", + xray_family = "artix7", + ] + Xc7a35ticsg324_1l, + #[ + name = "xc7a100ticsg324-1L", + xray_part = "xc7a100tcsg324-1", + xray_device = "xc7a100t", + xray_family = "artix7", + ] + Xc7a100ticsg324_1l, + } +} + +impl fmt::Display for Device { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(self.as_str()) + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)] +pub struct YosysNextpnrXrayRunNextpnrArgs { + #[arg(long, env = "CHIPDB_DIR", value_hint = clap::ValueHint::DirPath)] + pub nextpnr_xilinx_chipdb_dir: String, + #[arg(long)] + pub device: Device, + #[arg(long, default_value_t = 0)] + pub nextpnr_xilinx_seed: i32, +} + +impl ToArgs for YosysNextpnrXrayRunNextpnrArgs { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) { + let Self { + nextpnr_xilinx_chipdb_dir, + device, + nextpnr_xilinx_seed, + } = self; + args.write_args([ + format_args!("--nextpnr-xilinx-chipdb-dir={nextpnr_xilinx_chipdb_dir}"), + format_args!("--device={device}"), + format_args!("--nextpnr-xilinx-seed={nextpnr_xilinx_seed}"), + ]); + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)] +pub struct YosysNextpnrXrayRunNextpnr { + nextpnr_xilinx_chipdb_dir: Interned, + device: Device, + nextpnr_xilinx_seed: i32, + xdc_file: Interned, + xdc_file_name: Interned, + json_file: Interned, + json_file_name: Interned, + routed_json_file: Interned, + routed_json_file_name: Interned, + fasm_file: Interned, + fasm_file_name: Interned, +} + +impl YosysNextpnrXrayRunNextpnr { + fn chipdb_file(&self) -> Interned { + interned_known_utf8_path_buf_method(self.nextpnr_xilinx_chipdb_dir, |chipdb_dir| { + let mut retval = chipdb_dir.join(self.device.xray_device()); + retval.set_extension("bin"); + retval + }) + } +} + +impl ExternalCommand for YosysNextpnrXrayRunNextpnr { + type AdditionalArgs = YosysNextpnrXrayRunNextpnrArgs; + type AdditionalJobData = Self; + type Dependencies = JobKindAndDependencies; + type ExternalProgram = NextpnrXilinx; + + fn dependencies() -> Self::Dependencies { + Default::default() + } + + fn args_to_jobs( + args: JobArgsAndDependencies>, + params: &JobParams, + ) -> eyre::Result<( + Self::AdditionalJobData, + ::JobsAndKinds, + )> { + args.args_to_jobs_external_simple(params, |args, dependencies| { + let YosysNextpnrXrayRunNextpnrArgs { + nextpnr_xilinx_chipdb_dir, + device, + nextpnr_xilinx_seed, + } = args.additional_args; + let xdc_file = dependencies.job.job.xdc_file; + let routed_json_file = dependencies.base_job().file_with_ext("routed.json"); + let fasm_file = dependencies.base_job().file_with_ext("fasm"); + Ok(Self { + nextpnr_xilinx_chipdb_dir: str::intern_owned(nextpnr_xilinx_chipdb_dir), + device, + nextpnr_xilinx_seed, + xdc_file, + xdc_file_name: interned_known_utf8_method(xdc_file, |v| { + v.file_name().expect("known to have file name") + }), + json_file: dependencies + .dependencies + .job + .job + .additional_job_data() + .json_file(), + json_file_name: dependencies + .dependencies + .job + .job + .additional_job_data() + .json_file_name(), + routed_json_file, + routed_json_file_name: interned_known_utf8_method(routed_json_file, |v| { + v.file_name().expect("known to have file name") + }), + fasm_file, + fasm_file_name: interned_known_utf8_method(fasm_file, |v| { + v.file_name().expect("known to have file name") + }), + }) + }) + } + + fn inputs(job: &ExternalCommandJob) -> Interned<[JobItemName]> { + [ + JobItemName::Path { + path: job.additional_job_data().json_file, + }, + JobItemName::Path { + path: job.additional_job_data().xdc_file, + }, + ][..] + .intern() + } + + fn output_paths(job: &ExternalCommandJob) -> Interned<[Interned]> { + [ + job.additional_job_data().routed_json_file, + job.additional_job_data().fasm_file, + ][..] + .intern() + } + + fn command_line_args(job: &ExternalCommandJob, args: &mut W) { + let job_data @ YosysNextpnrXrayRunNextpnr { + nextpnr_xilinx_seed, + xdc_file_name, + json_file_name, + routed_json_file_name, + fasm_file_name, + .. + } = job.additional_job_data(); + args.write_args([ + format_args!("--chipdb={}", job_data.chipdb_file()), + format_args!("--xdc={xdc_file_name}"), + format_args!("--json={json_file_name}"), + format_args!("--write={routed_json_file_name}"), + format_args!("--fasm={fasm_file_name}"), + format_args!("--seed={nextpnr_xilinx_seed}"), + ]); + } + + fn current_dir(job: &ExternalCommandJob) -> Option> { + Some(job.output_dir()) + } + + fn job_kind_name() -> Interned { + "yosys-nextpnr-xray-run-nextpnr".intern() + } + + fn subcommand_hidden() -> bool { + true + } +} + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)] +pub struct Xcfasm; + +impl ExternalProgramTrait for Xcfasm { + fn default_program_name() -> Interned { + "xcfasm".intern() + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)] +pub struct YosysNextpnrXrayArgs { + #[arg(long, env = "DB_DIR", value_hint = clap::ValueHint::DirPath)] + pub prjxray_db_dir: String, +} + +impl ToArgs for YosysNextpnrXrayArgs { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) { + let Self { prjxray_db_dir } = self; + args.write_arg(format_args!("--prjxray-db-dir={prjxray_db_dir}")); + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)] +pub struct YosysNextpnrXray { + prjxray_db_dir: Interned, + device: Device, + fasm_file: Interned, + fasm_file_name: Interned, + frames_file: Interned, + frames_file_name: Interned, + bit_file: Interned, + bit_file_name: Interned, +} + +impl YosysNextpnrXray { + fn db_root(&self) -> Interned { + interned_known_utf8_path_buf_method(self.prjxray_db_dir, |prjxray_db_dir| { + prjxray_db_dir.join(self.device.xray_family()) + }) + } + fn part_file(&self) -> Interned { + interned_known_utf8_path_buf_method(self.prjxray_db_dir, |prjxray_db_dir| { + let mut retval = prjxray_db_dir.join(self.device.xray_family()); + retval.push(self.device.xray_part()); + retval.push("part.yaml"); + retval + }) + } +} + +impl ExternalCommand for YosysNextpnrXray { + type AdditionalArgs = YosysNextpnrXrayArgs; + type AdditionalJobData = Self; + type Dependencies = JobKindAndDependencies>; + type ExternalProgram = Xcfasm; + + fn dependencies() -> Self::Dependencies { + Default::default() + } + + fn args_to_jobs( + args: JobArgsAndDependencies>, + params: &JobParams, + ) -> eyre::Result<( + Self::AdditionalJobData, + ::JobsAndKinds, + )> { + args.args_to_jobs_external_simple(params, |args, dependencies| { + let YosysNextpnrXrayArgs { prjxray_db_dir } = args.additional_args; + let frames_file = dependencies.base_job().file_with_ext("frames"); + let bit_file = dependencies.base_job().file_with_ext("bit"); + Ok(Self { + prjxray_db_dir: str::intern_owned(prjxray_db_dir), + device: dependencies.job.job.additional_job_data().device, + fasm_file: dependencies.job.job.additional_job_data().fasm_file, + fasm_file_name: dependencies.job.job.additional_job_data().fasm_file_name, + frames_file, + frames_file_name: interned_known_utf8_method(frames_file, |v| { + v.file_name().expect("known to have file name") + }), + bit_file, + bit_file_name: interned_known_utf8_method(bit_file, |v| { + v.file_name().expect("known to have file name") + }), + }) + }) + } + + fn inputs(job: &ExternalCommandJob) -> Interned<[JobItemName]> { + [JobItemName::Path { + path: job.additional_job_data().fasm_file, + }][..] + .intern() + } + + fn output_paths(job: &ExternalCommandJob) -> Interned<[Interned]> { + [ + job.additional_job_data().frames_file, + job.additional_job_data().bit_file, + ][..] + .intern() + } + + fn command_line_args(job: &ExternalCommandJob, args: &mut W) { + let job_data @ YosysNextpnrXray { + device, + fasm_file_name, + frames_file_name, + bit_file_name, + .. + } = job.additional_job_data(); + args.write_args([ + format_args!("--sparse"), + format_args!("--db-root={}", job_data.db_root()), + format_args!("--part={}", device.xray_part()), + format_args!("--part_file={}", job_data.part_file()), + format_args!("--fn_in={fasm_file_name}"), + format_args!("--frm_out={frames_file_name}"), + format_args!("--bit_out={bit_file_name}"), + ]); + } + + fn current_dir(job: &ExternalCommandJob) -> Option> { + Some(job.output_dir()) + } + + fn job_kind_name() -> Interned { + "yosys-nextpnr-xray".intern() + } +} + +pub(crate) fn built_in_job_kinds() -> impl IntoIterator { + [ + DynJobKind::new(YosysNextpnrXrayWriteYsFileJobKind), + DynJobKind::new(ExternalCommandJobKind::::new()), + DynJobKind::new(YosysNextpnrXrayWriteXdcFileJobKind), + DynJobKind::new(ExternalCommandJobKind::::new()), + DynJobKind::new(ExternalCommandJobKind::::new()), + ] +} diff --git a/crates/fayalite/src/build/verilog.rs b/crates/fayalite/src/build/verilog.rs new file mode 100644 index 0000000..39334f2 --- /dev/null +++ b/crates/fayalite/src/build/verilog.rs @@ -0,0 +1,416 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{ + build::{ + CommandParams, DynJobKind, GetBaseJob, JobAndDependencies, JobArgsAndDependencies, + JobDependencies, JobItem, JobItemName, JobKind, JobKindAndDependencies, JobParams, ToArgs, + WriteArgs, + external::{ + ExternalCommand, ExternalCommandJob, ExternalCommandJobKind, ExternalProgramTrait, + }, + firrtl::FirrtlJobKind, + interned_known_utf8_method, interned_known_utf8_path_buf_method, + }, + intern::{Intern, Interned}, + util::job_server::AcquiredJob, +}; +use clap::Args; +use eyre::{Context, bail}; +use serde::{Deserialize, Serialize}; +use std::{fmt, mem}; + +/// based on [LLVM Circt's recommended lowering options][lowering-options] +/// +/// [lowering-options]: https://circt.llvm.org/docs/VerilogGeneration/#recommended-loweringoptions-by-target +#[derive(clap::ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[non_exhaustive] +pub enum VerilogDialect { + Questa, + Spyglass, + Verilator, + Vivado, + Yosys, +} + +impl fmt::Display for VerilogDialect { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(self.as_str()) + } +} + +impl VerilogDialect { + pub fn as_str(self) -> &'static str { + match self { + VerilogDialect::Questa => "questa", + VerilogDialect::Spyglass => "spyglass", + VerilogDialect::Verilator => "verilator", + VerilogDialect::Vivado => "vivado", + VerilogDialect::Yosys => "yosys", + } + } + pub fn firtool_extra_args(self) -> &'static [&'static str] { + match self { + VerilogDialect::Questa => &["--lowering-options=emitWireInPorts"], + VerilogDialect::Spyglass => { + &["--lowering-options=explicitBitcast,disallowExpressionInliningInPorts"] + } + VerilogDialect::Verilator => &[ + "--lowering-options=locationInfoStyle=wrapInAtSquareBracket,disallowLocalVariables", + ], + VerilogDialect::Vivado => &["--lowering-options=mitigateVivadoArrayIndexConstPropBug"], + VerilogDialect::Yosys => { + &["--lowering-options=disallowLocalVariables,disallowPackedArrays"] + } + } + } +} + +#[derive(Args, Debug, Clone, PartialEq, Eq, Hash)] +#[non_exhaustive] +pub struct UnadjustedVerilogArgs { + #[arg(long = "firtool-extra-arg", value_name = "ARG")] + pub firtool_extra_args: Vec, + /// adapt the generated Verilog for a particular toolchain + #[arg(long)] + pub verilog_dialect: Option, + #[arg(long)] + pub verilog_debug: bool, +} + +impl ToArgs for UnadjustedVerilogArgs { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) { + let Self { + ref firtool_extra_args, + verilog_dialect, + verilog_debug, + } = *self; + args.extend( + firtool_extra_args + .iter() + .map(|arg| format!("--firtool-extra-arg={arg}")), + ); + if let Some(verilog_dialect) = verilog_dialect { + args.write_arg(format_args!("--verilog-dialect={verilog_dialect}")); + } + if verilog_debug { + args.write_str_arg("--verilog-debug"); + } + } +} + +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)] +pub struct Firtool; + +impl ExternalProgramTrait for Firtool { + fn default_program_name() -> Interned { + "firtool".intern() + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug, Deserialize, Serialize)] +pub struct UnadjustedVerilog { + firrtl_file: Interned, + firrtl_file_name: Interned, + unadjusted_verilog_file: Interned, + unadjusted_verilog_file_name: Interned, + firtool_extra_args: Interned<[Interned]>, + verilog_dialect: Option, + verilog_debug: bool, +} + +impl UnadjustedVerilog { + pub fn firrtl_file(&self) -> Interned { + self.firrtl_file + } + pub fn unadjusted_verilog_file(&self) -> Interned { + self.unadjusted_verilog_file + } + pub fn firtool_extra_args(&self) -> Interned<[Interned]> { + self.firtool_extra_args + } + pub fn verilog_dialect(&self) -> Option { + self.verilog_dialect + } + pub fn verilog_debug(&self) -> bool { + self.verilog_debug + } +} + +impl ExternalCommand for UnadjustedVerilog { + type AdditionalArgs = UnadjustedVerilogArgs; + type AdditionalJobData = UnadjustedVerilog; + type Dependencies = JobKindAndDependencies; + type ExternalProgram = Firtool; + + fn dependencies() -> Self::Dependencies { + Default::default() + } + + fn args_to_jobs( + args: JobArgsAndDependencies>, + params: &JobParams, + ) -> eyre::Result<( + Self::AdditionalJobData, + ::JobsAndKinds, + )> { + args.args_to_jobs_external_simple(params, |args, dependencies| { + let UnadjustedVerilogArgs { + firtool_extra_args, + verilog_dialect, + verilog_debug, + } = args.additional_args; + let unadjusted_verilog_file = dependencies + .dependencies + .job + .job + .file_with_ext("unadjusted.v"); + Ok(UnadjustedVerilog { + firrtl_file: dependencies.job.job.firrtl_file(), + firrtl_file_name: interned_known_utf8_method( + dependencies.job.job.firrtl_file(), + |v| v.file_name().expect("known to have file name"), + ), + unadjusted_verilog_file, + unadjusted_verilog_file_name: interned_known_utf8_method( + unadjusted_verilog_file, + |v| v.file_name().expect("known to have file name"), + ), + firtool_extra_args: firtool_extra_args + .into_iter() + .map(str::intern_owned) + .collect(), + verilog_dialect, + verilog_debug, + }) + }) + } + + fn inputs(job: &ExternalCommandJob) -> Interned<[JobItemName]> { + [JobItemName::Path { + path: job.additional_job_data().firrtl_file, + }][..] + .intern() + } + + fn output_paths(job: &ExternalCommandJob) -> Interned<[Interned]> { + [job.additional_job_data().unadjusted_verilog_file][..].intern() + } + + fn command_line_args(job: &ExternalCommandJob, args: &mut W) { + let UnadjustedVerilog { + firrtl_file: _, + firrtl_file_name, + unadjusted_verilog_file: _, + unadjusted_verilog_file_name, + firtool_extra_args, + verilog_dialect, + verilog_debug, + } = *job.additional_job_data(); + args.write_interned_arg(firrtl_file_name); + args.write_str_arg("-o"); + args.write_interned_arg(unadjusted_verilog_file_name); + if verilog_debug { + args.write_str_args(["-g", "--preserve-values=all"]); + } + if let Some(dialect) = verilog_dialect { + args.write_str_args(dialect.firtool_extra_args().iter().copied()); + } + args.write_interned_args(firtool_extra_args); + } + + fn current_dir(job: &ExternalCommandJob) -> Option> { + Some(job.output_dir()) + } + + fn job_kind_name() -> Interned { + "unadjusted-verilog".intern() + } + + fn subcommand_hidden() -> bool { + true + } + + fn run_even_if_cached_arg_name() -> Interned { + "firtool-run-even-if-cached".intern() + } +} + +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct VerilogJobKind; + +#[derive(Clone, Debug, PartialEq, Eq, Hash, Args)] +#[non_exhaustive] +pub struct VerilogJobArgs {} + +impl ToArgs for VerilogJobArgs { + fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) { + let Self {} = self; + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] +pub struct VerilogJob { + output_dir: Interned, + unadjusted_verilog_file: Interned, + main_verilog_file: Interned, +} + +impl VerilogJob { + pub fn output_dir(&self) -> Interned { + self.output_dir + } + pub fn unadjusted_verilog_file(&self) -> Interned { + self.unadjusted_verilog_file + } + pub fn main_verilog_file(&self) -> Interned { + self.main_verilog_file + } + #[track_caller] + pub fn unwrap_additional_files(additional_files: &JobItem) -> &[Interned] { + match additional_files { + JobItem::DynamicPaths { + paths, + source_job_name, + } if *source_job_name == VerilogJobKind.name() => paths, + v => panic!("expected VerilogJob's additional files JobItem: {v:?}"), + } + } + pub fn all_verilog_files( + main_verilog_file: Interned, + additional_files: &[Interned], + ) -> eyre::Result]>> { + let mut retval = Vec::with_capacity(additional_files.len().saturating_add(1)); + for verilog_file in [main_verilog_file].iter().chain(additional_files) { + if !(verilog_file.ends_with(".v") || verilog_file.ends_with(".sv")) { + continue; + } + let verilog_file = std::path::absolute(verilog_file) + .and_then(|v| { + v.into_os_string().into_string().map_err(|_| { + std::io::Error::new(std::io::ErrorKind::Other, "path is not valid UTF-8") + }) + }) + .wrap_err_with(|| { + format!("converting {verilog_file:?} to an absolute path failed") + })?; + if verilog_file.contains(|ch: char| { + (ch != ' ' && ch != '\t' && ch.is_ascii_whitespace()) || ch == '"' + }) { + bail!("verilog file path contains characters that aren't permitted"); + } + retval.push(str::intern_owned(verilog_file)); + } + Ok(Intern::intern_owned(retval)) + } +} + +impl JobKind for VerilogJobKind { + type Args = VerilogJobArgs; + type Job = VerilogJob; + type Dependencies = JobKindAndDependencies>; + + fn dependencies(self) -> Self::Dependencies { + Default::default() + } + + fn args_to_jobs( + args: JobArgsAndDependencies, + params: &JobParams, + ) -> eyre::Result> { + args.args_to_jobs_simple(params, |_kind, args, dependencies| { + let VerilogJobArgs {} = args; + Ok(VerilogJob { + output_dir: dependencies.base_job().output_dir(), + unadjusted_verilog_file: dependencies + .job + .job + .additional_job_data() + .unadjusted_verilog_file(), + main_verilog_file: dependencies.base_job().file_with_ext("v"), + }) + }) + } + + fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + [JobItemName::Path { + path: job.unadjusted_verilog_file, + }][..] + .intern() + } + + fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> { + [ + JobItemName::Path { + path: job.main_verilog_file, + }, + JobItemName::DynamicPaths { + source_job_name: self.name(), + }, + ][..] + .intern() + } + + fn name(self) -> Interned { + "verilog".intern() + } + + fn external_command_params(self, _job: &Self::Job) -> Option { + None + } + + fn run( + self, + job: &Self::Job, + inputs: &[JobItem], + _params: &JobParams, + _acquired_job: &mut AcquiredJob, + ) -> eyre::Result> { + assert!(inputs.iter().map(JobItem::name).eq(self.inputs(job))); + let input = std::fs::read_to_string(job.unadjusted_verilog_file())?; + let file_separator_prefix = "\n// ----- 8< ----- FILE \""; + let file_separator_suffix = "\" ----- 8< -----\n\n"; + let mut input = &*input; + let main_verilog_file = job.main_verilog_file(); + let mut file_name = Some(main_verilog_file); + let mut additional_outputs = Vec::new(); + loop { + let (chunk, next_file_name) = if let Some((chunk, rest)) = + input.split_once(file_separator_prefix) + { + let Some((next_file_name, rest)) = rest.split_once(file_separator_suffix) else { + bail!( + "parsing firtool's output failed: found {file_separator_prefix:?} but no {file_separator_suffix:?}" + ); + }; + input = rest; + let next_file_name = + interned_known_utf8_path_buf_method(job.output_dir, |v| v.join(next_file_name)); + additional_outputs.push(next_file_name); + (chunk, Some(next_file_name)) + } else { + (mem::take(&mut input), None) + }; + let Some(file_name) = mem::replace(&mut file_name, next_file_name) else { + break; + }; + std::fs::write(&file_name, chunk)?; + } + Ok(vec![ + JobItem::Path { + path: main_verilog_file, + }, + JobItem::DynamicPaths { + paths: additional_outputs, + source_job_name: self.name(), + }, + ]) + } +} + +pub(crate) fn built_in_job_kinds() -> impl IntoIterator { + [ + DynJobKind::new(ExternalCommandJobKind::::new()), + DynJobKind::new(VerilogJobKind), + ] +} diff --git a/crates/fayalite/src/cli.rs b/crates/fayalite/src/cli.rs deleted file mode 100644 index 6fb4b5e..0000000 --- a/crates/fayalite/src/cli.rs +++ /dev/null @@ -1,806 +0,0 @@ -// SPDX-License-Identifier: LGPL-3.0-or-later -// See Notices.txt for copyright information -use crate::{ - bundle::{Bundle, BundleType}, - firrtl::{self, ExportOptions}, - intern::Interned, - module::Module, - util::{job_server::AcquiredJob, streaming_read_utf8::streaming_read_utf8}, -}; -use clap::{ - Parser, Subcommand, ValueEnum, ValueHint, - builder::{OsStringValueParser, TypedValueParser}, -}; -use eyre::{Report, eyre}; -use serde::{Deserialize, Serialize}; -use std::{ - error, - ffi::OsString, - fmt::{self, Write}, - fs, io, mem, - path::{Path, PathBuf}, - process, -}; -use tempfile::TempDir; - -pub type Result = std::result::Result; - -pub struct CliError(Report); - -impl fmt::Debug for CliError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.0.fmt(f) - } -} - -impl fmt::Display for CliError { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.0.fmt(f) - } -} - -impl error::Error for CliError {} - -impl From for CliError { - fn from(value: io::Error) -> Self { - CliError(Report::new(value)) - } -} - -pub trait RunPhase { - type Output; - fn run(&self, arg: Arg) -> Result { - self.run_with_job(arg, &mut AcquiredJob::acquire()) - } - fn run_with_job(&self, arg: Arg, acquired_job: &mut AcquiredJob) -> Result; -} - -#[derive(Parser, Debug, Clone)] -#[non_exhaustive] -pub struct BaseArgs { - /// the directory to put the generated main output file and associated files in - #[arg(short, long, value_hint = ValueHint::DirPath, required = true)] - pub output: Option, - /// the stem of the generated main output file, e.g. to get foo.v, pass --file-stem=foo - #[arg(long)] - pub file_stem: Option, - #[arg(long, env = "FAYALITE_KEEP_TEMP_DIR")] - pub keep_temp_dir: bool, - #[arg(skip = false)] - pub redirect_output_for_rust_test: bool, -} - -impl BaseArgs { - fn make_firrtl_file_backend(&self) -> Result<(firrtl::FileBackend, Option)> { - let (dir_path, temp_dir) = match &self.output { - Some(output) => (output.clone(), None), - None => { - let temp_dir = TempDir::new()?; - if self.keep_temp_dir { - let temp_dir = temp_dir.into_path(); - println!("created temporary directory: {}", temp_dir.display()); - (temp_dir, None) - } else { - (temp_dir.path().to_path_buf(), Some(temp_dir)) - } - } - }; - Ok(( - firrtl::FileBackend { - dir_path, - top_fir_file_stem: self.file_stem.clone(), - circuit_name: None, - }, - temp_dir, - )) - } - /// handles possibly redirecting the command's output for Rust tests - pub fn run_external_command( - &self, - _acquired_job: &mut AcquiredJob, - mut command: process::Command, - mut captured_output: Option<&mut String>, - ) -> io::Result { - if self.redirect_output_for_rust_test || captured_output.is_some() { - let (reader, writer) = os_pipe::pipe()?; - let mut reader = io::BufReader::new(reader); - command.stderr(writer.try_clone()?); - command.stdout(writer); // must not leave writer around after spawning child - command.stdin(process::Stdio::null()); - let mut child = command.spawn()?; - drop(command); // close writers - Ok(loop { - let status = child.try_wait()?; - streaming_read_utf8(&mut reader, |s| { - if let Some(captured_output) = captured_output.as_deref_mut() { - captured_output.push_str(s); - } - // use print! so output goes to Rust test output capture - print!("{s}"); - io::Result::Ok(()) - })?; - if let Some(status) = status { - break status; - } - }) - } else { - command.status() - } - } -} - -#[derive(Parser, Debug, Clone)] -#[non_exhaustive] -pub struct FirrtlArgs { - #[command(flatten)] - pub base: BaseArgs, - #[command(flatten)] - pub export_options: ExportOptions, -} - -#[derive(Debug)] -#[non_exhaustive] -pub struct FirrtlOutput { - pub file_stem: String, - pub top_module: String, - pub output_dir: PathBuf, - pub temp_dir: Option, -} - -impl FirrtlOutput { - pub fn file_with_ext(&self, ext: &str) -> PathBuf { - let mut retval = self.output_dir.join(&self.file_stem); - retval.set_extension(ext); - retval - } - pub fn firrtl_file(&self) -> PathBuf { - self.file_with_ext("fir") - } -} - -impl FirrtlArgs { - fn run_impl( - &self, - top_module: Module, - _acquired_job: &mut AcquiredJob, - ) -> Result { - let (file_backend, temp_dir) = self.base.make_firrtl_file_backend()?; - let firrtl::FileBackend { - top_fir_file_stem, - circuit_name, - dir_path, - } = firrtl::export(file_backend, &top_module, self.export_options)?; - Ok(FirrtlOutput { - file_stem: top_fir_file_stem.expect( - "export is known to set the file stem from the circuit name if not provided", - ), - top_module: circuit_name.expect("export is known to set the circuit name"), - output_dir: dir_path, - temp_dir, - }) - } -} - -impl RunPhase> for FirrtlArgs { - type Output = FirrtlOutput; - fn run_with_job( - &self, - top_module: Module, - acquired_job: &mut AcquiredJob, - ) -> Result { - self.run_impl(top_module.canonical(), acquired_job) - } -} - -impl RunPhase>> for FirrtlArgs { - type Output = FirrtlOutput; - fn run_with_job( - &self, - top_module: Interned>, - acquired_job: &mut AcquiredJob, - ) -> Result { - self.run_with_job(*top_module, acquired_job) - } -} - -/// based on [LLVM Circt's recommended lowering options -/// ](https://circt.llvm.org/docs/VerilogGeneration/#recommended-loweringoptions-by-target) -#[derive(ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash)] -#[non_exhaustive] -pub enum VerilogDialect { - Questa, - Spyglass, - Verilator, - Vivado, - Yosys, -} - -impl fmt::Display for VerilogDialect { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(self.as_str()) - } -} - -impl VerilogDialect { - pub fn as_str(self) -> &'static str { - match self { - VerilogDialect::Questa => "questa", - VerilogDialect::Spyglass => "spyglass", - VerilogDialect::Verilator => "verilator", - VerilogDialect::Vivado => "vivado", - VerilogDialect::Yosys => "yosys", - } - } - pub fn firtool_extra_args(self) -> &'static [&'static str] { - match self { - VerilogDialect::Questa => &["--lowering-options=emitWireInPorts"], - VerilogDialect::Spyglass => { - &["--lowering-options=explicitBitcast,disallowExpressionInliningInPorts"] - } - VerilogDialect::Verilator => &[ - "--lowering-options=locationInfoStyle=wrapInAtSquareBracket,disallowLocalVariables", - ], - VerilogDialect::Vivado => &["--lowering-options=mitigateVivadoArrayIndexConstPropBug"], - VerilogDialect::Yosys => { - &["--lowering-options=disallowLocalVariables,disallowPackedArrays"] - } - } - } -} - -#[derive(Parser, Debug, Clone)] -#[non_exhaustive] -pub struct VerilogArgs { - #[command(flatten)] - pub firrtl: FirrtlArgs, - #[arg( - long, - default_value = "firtool", - env = "FIRTOOL", - value_hint = ValueHint::CommandName, - value_parser = OsStringValueParser::new().try_map(which) - )] - pub firtool: PathBuf, - #[arg(long)] - pub firtool_extra_args: Vec, - /// adapt the generated Verilog for a particular toolchain - #[arg(long)] - pub verilog_dialect: Option, - #[arg(long, short = 'g')] - pub debug: bool, -} - -#[derive(Debug)] -#[non_exhaustive] -pub struct VerilogOutput { - pub firrtl: FirrtlOutput, - pub verilog_files: Vec, - pub contents_hash: Option, -} - -impl VerilogOutput { - pub fn main_verilog_file(&self) -> PathBuf { - self.firrtl.file_with_ext("v") - } - fn unadjusted_verilog_file(&self) -> PathBuf { - self.firrtl.file_with_ext("unadjusted.v") - } -} - -impl VerilogArgs { - fn process_unadjusted_verilog_file(&self, mut output: VerilogOutput) -> Result { - let input = fs::read_to_string(output.unadjusted_verilog_file())?; - let file_separator_prefix = "\n// ----- 8< ----- FILE \""; - let file_separator_suffix = "\" ----- 8< -----\n\n"; - let mut input = &*input; - output.contents_hash = Some(blake3::hash(input.as_bytes())); - let main_verilog_file = output.main_verilog_file(); - let mut file_name: Option<&Path> = Some(&main_verilog_file); - loop { - let (chunk, next_file_name) = if let Some((chunk, rest)) = - input.split_once(file_separator_prefix) - { - let Some((next_file_name, rest)) = rest.split_once(file_separator_suffix) else { - return Err(CliError(eyre!( - "parsing firtool's output failed: found {file_separator_prefix:?} but no {file_separator_suffix:?}" - ))); - }; - input = rest; - (chunk, Some(next_file_name.as_ref())) - } else { - (mem::take(&mut input), None) - }; - let Some(file_name) = mem::replace(&mut file_name, next_file_name) else { - break; - }; - let file_name = output.firrtl.output_dir.join(file_name); - fs::write(&file_name, chunk)?; - if let Some(extension) = file_name.extension() { - if extension == "v" || extension == "sv" { - output.verilog_files.push(file_name); - } - } - } - Ok(output) - } - fn run_impl( - &self, - firrtl_output: FirrtlOutput, - acquired_job: &mut AcquiredJob, - ) -> Result { - let Self { - firrtl, - firtool, - firtool_extra_args, - verilog_dialect, - debug, - } = self; - let output = VerilogOutput { - firrtl: firrtl_output, - verilog_files: vec![], - contents_hash: None, - }; - let mut cmd = process::Command::new(firtool); - cmd.arg(output.firrtl.firrtl_file()); - cmd.arg("-o"); - cmd.arg(output.unadjusted_verilog_file()); - if *debug { - cmd.arg("-g"); - cmd.arg("--preserve-values=all"); - } - if let Some(dialect) = verilog_dialect { - cmd.args(dialect.firtool_extra_args()); - } - cmd.args(firtool_extra_args); - cmd.current_dir(&output.firrtl.output_dir); - let status = firrtl.base.run_external_command(acquired_job, cmd, None)?; - if status.success() { - self.process_unadjusted_verilog_file(output) - } else { - Err(CliError(eyre!( - "running {} failed: {status}", - self.firtool.display() - ))) - } - } -} - -impl RunPhase for VerilogArgs -where - FirrtlArgs: RunPhase, -{ - type Output = VerilogOutput; - fn run_with_job(&self, arg: Arg, acquired_job: &mut AcquiredJob) -> Result { - let firrtl_output = self.firrtl.run_with_job(arg, acquired_job)?; - self.run_impl(firrtl_output, acquired_job) - } -} - -#[derive(ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash, Default)] -#[non_exhaustive] -pub enum FormalMode { - #[default] - BMC, - Prove, - Live, - Cover, -} - -impl FormalMode { - pub fn as_str(self) -> &'static str { - match self { - FormalMode::BMC => "bmc", - FormalMode::Prove => "prove", - FormalMode::Live => "live", - FormalMode::Cover => "cover", - } - } -} - -impl fmt::Display for FormalMode { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(self.as_str()) - } -} - -#[derive(Clone)] -struct FormalAdjustArgs; - -impl clap::FromArgMatches for FormalAdjustArgs { - fn from_arg_matches(_matches: &clap::ArgMatches) -> Result { - Ok(Self) - } - - fn update_from_arg_matches(&mut self, _matches: &clap::ArgMatches) -> Result<(), clap::Error> { - Ok(()) - } -} - -impl clap::Args for FormalAdjustArgs { - fn augment_args(cmd: clap::Command) -> clap::Command { - cmd.mut_arg("output", |arg| arg.required(false)) - .mut_arg("verilog_dialect", |arg| { - arg.default_value(VerilogDialect::Yosys.to_string()) - .hide(true) - }) - } - - fn augment_args_for_update(cmd: clap::Command) -> clap::Command { - Self::augment_args(cmd) - } -} - -fn which(v: std::ffi::OsString) -> which::Result { - #[cfg(not(miri))] - return which::which(v); - #[cfg(miri)] - return Ok(Path::new("/").join(v)); -} - -#[derive(Parser, Clone)] -#[non_exhaustive] -pub struct FormalArgs { - #[command(flatten)] - pub verilog: VerilogArgs, - #[arg( - long, - default_value = "sby", - env = "SBY", - value_hint = ValueHint::CommandName, - value_parser = OsStringValueParser::new().try_map(which) - )] - pub sby: PathBuf, - #[arg(long)] - pub sby_extra_args: Vec, - #[arg(long, default_value_t)] - pub mode: FormalMode, - #[arg(long, default_value_t = Self::DEFAULT_DEPTH)] - pub depth: u64, - #[arg(long, default_value = "z3")] - pub solver: String, - #[arg(long)] - pub smtbmc_extra_args: Vec, - #[arg(long, default_value_t = true, env = "FAYALITE_CACHE_RESULTS")] - pub cache_results: bool, - #[command(flatten)] - _formal_adjust_args: FormalAdjustArgs, -} - -impl fmt::Debug for FormalArgs { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let Self { - verilog, - sby, - sby_extra_args, - mode, - depth, - solver, - smtbmc_extra_args, - cache_results, - _formal_adjust_args: _, - } = self; - f.debug_struct("FormalArgs") - .field("verilog", verilog) - .field("sby", sby) - .field("sby_extra_args", sby_extra_args) - .field("mode", mode) - .field("depth", depth) - .field("solver", solver) - .field("smtbmc_extra_args", smtbmc_extra_args) - .field("cache_results", cache_results) - .finish_non_exhaustive() - } -} - -impl FormalArgs { - pub const DEFAULT_DEPTH: u64 = 20; -} - -#[derive(Debug)] -#[non_exhaustive] -pub struct FormalOutput { - pub verilog: VerilogOutput, -} - -impl FormalOutput { - pub fn sby_file(&self) -> PathBuf { - self.verilog.firrtl.file_with_ext("sby") - } - pub fn cache_file(&self) -> PathBuf { - self.verilog.firrtl.file_with_ext("cache.json") - } -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[non_exhaustive] -pub struct FormalCacheOutput {} - -#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[non_exhaustive] -pub enum FormalCacheVersion { - V1, -} - -impl FormalCacheVersion { - pub const CURRENT: Self = Self::V1; -} - -#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[non_exhaustive] -pub struct FormalCache { - pub version: FormalCacheVersion, - pub contents_hash: blake3::Hash, - pub stdout_stderr: String, - pub result: Result, -} - -impl FormalCache { - pub fn new( - version: FormalCacheVersion, - contents_hash: blake3::Hash, - stdout_stderr: String, - result: Result, - ) -> Self { - Self { - version, - contents_hash, - stdout_stderr, - result, - } - } -} - -impl FormalArgs { - fn sby_contents(&self, output: &FormalOutput) -> Result { - let Self { - verilog: _, - sby: _, - sby_extra_args: _, - mode, - depth, - smtbmc_extra_args, - solver, - cache_results: _, - _formal_adjust_args: _, - } = self; - let smtbmc_options = smtbmc_extra_args.join(" "); - let top_module = &output.verilog.firrtl.top_module; - let mut retval = format!( - "[options]\n\ - mode {mode}\n\ - depth {depth}\n\ - wait on\n\ - \n\ - [engines]\n\ - smtbmc {solver} -- -- {smtbmc_options}\n\ - \n\ - [script]\n" - ); - for verilog_file in &output.verilog.verilog_files { - let verilog_file = verilog_file - .to_str() - .ok_or_else(|| CliError(eyre!("verilog file path is not UTF-8")))?; - if verilog_file.contains(|ch: char| { - (ch != ' ' && ch != '\t' && ch.is_ascii_whitespace()) || ch == '"' - }) { - return Err(CliError(eyre!( - "verilog file path contains characters that aren't permitted" - ))); - } - writeln!(retval, "read_verilog -sv -formal \"{verilog_file}\"").unwrap(); - } - // workaround for wires disappearing -- set `keep` on all wires - writeln!(retval, "hierarchy -top {top_module}").unwrap(); - writeln!(retval, "proc").unwrap(); - writeln!(retval, "setattr -set keep 1 w:\\*").unwrap(); - writeln!(retval, "prep").unwrap(); - Ok(retval) - } - fn run_impl( - &self, - verilog_output: VerilogOutput, - acquired_job: &mut AcquiredJob, - ) -> Result { - let output = FormalOutput { - verilog: verilog_output, - }; - let sby_file = output.sby_file(); - let sby_contents = self.sby_contents(&output)?; - let contents_hash = output.verilog.contents_hash.map(|verilog_hash| { - let mut hasher = blake3::Hasher::new(); - hasher.update(verilog_hash.as_bytes()); - hasher.update(sby_contents.as_bytes()); - hasher.update(&(self.sby_extra_args.len() as u64).to_le_bytes()); - for sby_extra_arg in self.sby_extra_args.iter() { - hasher.update(&(sby_extra_arg.len() as u64).to_le_bytes()); - hasher.update(sby_extra_arg.as_bytes()); - } - hasher.finalize() - }); - std::fs::write(&sby_file, sby_contents)?; - let mut cmd = process::Command::new(&self.sby); - cmd.arg("-j1"); // sby seems not to respect job count in parallel mode - cmd.arg("-f"); - cmd.arg(sby_file.file_name().unwrap()); - cmd.args(&self.sby_extra_args); - cmd.current_dir(&output.verilog.firrtl.output_dir); - let mut captured_output = String::new(); - let cache_file = output.cache_file(); - let do_cache = if let Some(contents_hash) = contents_hash.filter(|_| self.cache_results) { - if let Some(FormalCache { - version: FormalCacheVersion::CURRENT, - contents_hash: cache_contents_hash, - stdout_stderr, - result, - }) = fs::read(&cache_file) - .ok() - .and_then(|v| serde_json::from_slice(&v).ok()) - { - if cache_contents_hash == contents_hash { - println!("Using cached formal result:\n{stdout_stderr}"); - return match result { - Ok(FormalCacheOutput {}) => Ok(output), - Err(error) => Err(CliError(eyre::Report::msg(error))), - }; - } - } - true - } else { - false - }; - let _ = fs::remove_file(&cache_file); - let status = self.verilog.firrtl.base.run_external_command( - acquired_job, - cmd, - do_cache.then_some(&mut captured_output), - )?; - let result = if status.success() { - Ok(output) - } else { - Err(CliError(eyre!( - "running {} failed: {status}", - self.sby.display() - ))) - }; - fs::write( - cache_file, - serde_json::to_string_pretty(&FormalCache { - version: FormalCacheVersion::CURRENT, - contents_hash: contents_hash.unwrap(), - stdout_stderr: captured_output, - result: match &result { - Ok(FormalOutput { verilog: _ }) => Ok(FormalCacheOutput {}), - Err(error) => Err(error.to_string()), - }, - }) - .expect("serialization shouldn't ever fail"), - )?; - result - } -} - -impl RunPhase for FormalArgs -where - VerilogArgs: RunPhase, -{ - type Output = FormalOutput; - fn run_with_job(&self, arg: Arg, acquired_job: &mut AcquiredJob) -> Result { - let verilog_output = self.verilog.run_with_job(arg, acquired_job)?; - self.run_impl(verilog_output, acquired_job) - } -} - -#[derive(Subcommand, Debug)] -enum CliCommand { - /// Generate FIRRTL - Firrtl(FirrtlArgs), - /// Generate Verilog - Verilog(VerilogArgs), - /// Run a formal proof - Formal(FormalArgs), -} - -/// a simple CLI -/// -/// Use like: -/// -/// ```no_run -/// # use fayalite::prelude::*; -/// # #[hdl_module] -/// # fn my_module() {} -/// use fayalite::cli; -/// -/// fn main() -> cli::Result { -/// cli::Cli::parse().run(my_module()) -/// } -/// ``` -/// -/// You can also use it with a larger [`clap`]-based CLI like so: -/// -/// ```no_run -/// # use fayalite::prelude::*; -/// # #[hdl_module] -/// # fn my_module() {} -/// use clap::{Subcommand, Parser}; -/// use fayalite::cli; -/// -/// #[derive(Subcommand)] -/// pub enum Cmd { -/// #[command(flatten)] -/// Fayalite(cli::Cli), -/// MySpecialCommand { -/// #[arg(long)] -/// foo: bool, -/// }, -/// } -/// -/// #[derive(Parser)] -/// pub struct Cli { -/// #[command(subcommand)] -/// cmd: Cmd, // or just use cli::Cli directly if you don't need more subcommands -/// } -/// -/// fn main() -> cli::Result { -/// match Cli::parse().cmd { -/// Cmd::Fayalite(v) => v.run(my_module())?, -/// Cmd::MySpecialCommand { foo } => println!("special: foo={foo}"), -/// } -/// Ok(()) -/// } -/// ``` -#[derive(Parser, Debug)] -// clear things that would be crate-specific -#[command(name = "Fayalite Simple CLI", about = None, long_about = None)] -pub struct Cli { - #[command(subcommand)] - subcommand: CliCommand, -} - -impl clap::Subcommand for Cli { - fn augment_subcommands(cmd: clap::Command) -> clap::Command { - CliCommand::augment_subcommands(cmd) - } - - fn augment_subcommands_for_update(cmd: clap::Command) -> clap::Command { - CliCommand::augment_subcommands_for_update(cmd) - } - - fn has_subcommand(name: &str) -> bool { - CliCommand::has_subcommand(name) - } -} - -impl RunPhase for Cli -where - FirrtlArgs: RunPhase, -{ - type Output = (); - fn run_with_job(&self, arg: T, acquired_job: &mut AcquiredJob) -> Result { - match &self.subcommand { - CliCommand::Firrtl(c) => { - c.run_with_job(arg, acquired_job)?; - } - CliCommand::Verilog(c) => { - c.run_with_job(arg, acquired_job)?; - } - CliCommand::Formal(c) => { - c.run_with_job(arg, acquired_job)?; - } - } - Ok(()) - } -} - -impl Cli { - /// forwards to [`clap::Parser::parse()`] so you don't have to import [`clap::Parser`] - pub fn parse() -> Self { - clap::Parser::parse() - } - /// forwards to [`RunPhase::run()`] so you don't have to import [`RunPhase`] - pub fn run(&self, top_module: T) -> Result<()> - where - Self: RunPhase, - { - RunPhase::run(self, top_module) - } -} diff --git a/crates/fayalite/src/firrtl.rs b/crates/fayalite/src/firrtl.rs index b4a1d14..a83f269 100644 --- a/crates/fayalite/src/firrtl.rs +++ b/crates/fayalite/src/firrtl.rs @@ -4,9 +4,10 @@ use crate::{ annotations::{ Annotation, BlackBoxInlineAnnotation, BlackBoxPathAnnotation, CustomFirrtlAnnotation, - DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation, + DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation, TargetedAnnotation, }, array::Array, + build::{ToArgs, WriteArgs}, bundle::{Bundle, BundleField, BundleType}, clock::Clock, enum_::{Enum, EnumType, EnumVariant}, @@ -23,9 +24,9 @@ use crate::{ memory::{Mem, PortKind, PortName, ReadUnderWrite}, module::{ AnnotatedModuleIO, Block, ExternModuleBody, ExternModuleParameter, - ExternModuleParameterValue, Module, ModuleBody, NameOptId, NormalModuleBody, Stmt, - StmtConnect, StmtDeclaration, StmtFormal, StmtIf, StmtInstance, StmtMatch, StmtReg, - StmtWire, + ExternModuleParameterValue, Module, ModuleBody, ModuleIO, NameId, NameOptId, + NormalModuleBody, Stmt, StmtConnect, StmtDeclaration, StmtFormal, StmtIf, StmtInstance, + StmtMatch, StmtReg, StmtWire, transform::{ simplify_enums::{SimplifyEnumsError, SimplifyEnumsKind, simplify_enums}, simplify_memories::simplify_memories, @@ -42,7 +43,7 @@ use crate::{ use bitvec::slice::BitSlice; use clap::value_parser; use num_traits::Signed; -use serde::Serialize; +use serde::{Deserialize, Serialize}; use std::{ cell::{Cell, RefCell}, cmp::Ordering, @@ -52,7 +53,7 @@ use std::{ fs, hash::Hash, io, - ops::Range, + ops::{ControlFlow, Range}, path::{Path, PathBuf}, rc::Rc, }; @@ -404,10 +405,10 @@ impl TypeState { self.next_type_name.set(id + 1); Ident(Intern::intern_owned(format!("Ty{id}"))) } - fn get_bundle_field(&mut self, ty: Bundle, name: Interned) -> Result { + fn get_bundle_field(&mut self, ty: Bundle, name: Interned) -> Result { Ok(self.bundle_ns(ty)?.borrow_mut().get(name)) } - fn bundle_def(&self, ty: Bundle) -> Result<(Ident, Rc>)> { + fn bundle_def(&self, ty: Bundle) -> Result<(Ident, Rc>), FirrtlError> { self.bundle_defs.get_or_make(ty, |&ty, definitions| { let mut ns = Namespace::default(); let mut body = String::new(); @@ -428,13 +429,13 @@ impl TypeState { Ok((name, Rc::new(RefCell::new(ns)))) }) } - fn bundle_ty(&self, ty: Bundle) -> Result { + fn bundle_ty(&self, ty: Bundle) -> Result { Ok(self.bundle_def(ty)?.0) } - fn bundle_ns(&self, ty: Bundle) -> Result>> { + fn bundle_ns(&self, ty: Bundle) -> Result>, FirrtlError> { Ok(self.bundle_def(ty)?.1) } - fn enum_def(&self, ty: Enum) -> Result<(Ident, Rc)> { + fn enum_def(&self, ty: Enum) -> Result<(Ident, Rc), FirrtlError> { self.enum_defs.get_or_make(ty, |&ty, definitions| { let mut variants = Namespace::default(); let mut body = String::new(); @@ -461,13 +462,13 @@ impl TypeState { )) }) } - fn enum_ty(&self, ty: Enum) -> Result { + fn enum_ty(&self, ty: Enum) -> Result { Ok(self.enum_def(ty)?.0) } - fn get_enum_variant(&mut self, ty: Enum, name: Interned) -> Result { + fn get_enum_variant(&mut self, ty: Enum, name: Interned) -> Result { Ok(self.enum_def(ty)?.1.variants.borrow_mut().get(name)) } - fn ty(&self, ty: T) -> Result { + fn ty(&self, ty: T) -> Result { Ok(match ty.canonical() { CanonicalType::Bundle(ty) => self.bundle_ty(ty)?.to_string(), CanonicalType::Enum(ty) => self.enum_ty(ty)?.to_string(), @@ -485,7 +486,7 @@ impl TypeState { CanonicalType::Reset(Reset {}) => "Reset".into(), CanonicalType::PhantomConst(_) => "{}".into(), CanonicalType::DynSimOnly(_) => { - return Err(FirrtlError::SimOnlyValuesAreNotPermitted.into()); + return Err(FirrtlError::SimOnlyValuesAreNotPermitted); } }) } @@ -1206,9 +1207,7 @@ impl<'a> Exporter<'a> { | CanonicalType::AsyncReset(_) | CanonicalType::Reset(_) => Ok(format!("asUInt({value_str})")), CanonicalType::PhantomConst(_) => Ok("UInt<0>(0)".into()), - CanonicalType::DynSimOnly(_) => { - Err(FirrtlError::SimOnlyValuesAreNotPermitted.into()) - } + CanonicalType::DynSimOnly(_) => Err(FirrtlError::SimOnlyValuesAreNotPermitted.into()), } } fn expr_cast_bits_to_bundle( @@ -1429,9 +1428,7 @@ impl<'a> Exporter<'a> { definitions.add_definition_line(format_args!("{extra_indent}invalidate {retval}")); return Ok(retval.to_string()); } - CanonicalType::DynSimOnly(_) => { - Err(FirrtlError::SimOnlyValuesAreNotPermitted.into()) - } + CanonicalType::DynSimOnly(_) => Err(FirrtlError::SimOnlyValuesAreNotPermitted.into()), } } fn expr_unary( @@ -1907,6 +1904,7 @@ impl<'a> Exporter<'a> { class: str::to_string(class), additional_fields: (*additional_fields).into(), }, + Annotation::XdcLocation(_) | Annotation::XdcIOStandard(_) => return, }; self.annotations.push(FirrtlAnnotation { data, @@ -2677,21 +2675,12 @@ impl FileBackendTrait for TestBackend { fn export_impl( file_backend: &mut dyn WrappedFileBackendTrait, - mut top_module: Interned>, + top_module: Interned>, options: ExportOptions, ) -> Result<(), WrappedError> { - let ExportOptions { - simplify_memories: do_simplify_memories, - simplify_enums: do_simplify_enums, - __private: _, - } = options; - if let Some(kind) = do_simplify_enums { - top_module = - simplify_enums(top_module, kind).map_err(|e| file_backend.simplify_enums_error(e))?; - } - if do_simplify_memories { - top_module = simplify_memories(top_module); - } + let top_module = options + .prepare_top_module(top_module) + .map_err(|e| file_backend.simplify_enums_error(e))?; let indent_depth = Cell::new(0); let mut global_ns = Namespace::default(); let circuit_name = global_ns.get(top_module.name_id()); @@ -2753,14 +2742,23 @@ impl clap::builder::TypedValueParser for OptionSimplifyEnumsKindValueParser { #[derive(Copy, Clone, PartialEq, Eq, Hash)] pub struct ExportOptionsPrivate(()); -#[derive(clap::Parser, Copy, Clone, PartialEq, Eq, Hash)] +impl ExportOptionsPrivate { + fn private_new() -> Self { + Self(()) + } +} + +#[derive(clap::Parser, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] pub struct ExportOptions { #[clap(long = "no-simplify-memories", action = clap::ArgAction::SetFalse)] + #[serde(default = "ExportOptions::default_simplify_memories")] pub simplify_memories: bool, #[clap(long, value_parser = OptionSimplifyEnumsKindValueParser, default_value = "replace-with-bundle-of-uints")] - pub simplify_enums: std::option::Option, + #[serde(default = "ExportOptions::default_simplify_enums")] + pub simplify_enums: std::option::Option, // use std::option::Option instead of Option to avoid clap mis-parsing #[doc(hidden)] #[clap(skip = ExportOptionsPrivate(()))] + #[serde(skip, default = "ExportOptionsPrivate::private_new")] /// `#[non_exhaustive]` except allowing struct update syntax pub __private: ExportOptionsPrivate, } @@ -2771,7 +2769,34 @@ impl fmt::Debug for ExportOptions { } } +impl ToArgs for ExportOptions { + fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) { + let Self { + simplify_memories, + simplify_enums, + __private: ExportOptionsPrivate(()), + } = *self; + if !simplify_memories { + args.write_str_arg("--no-simplify-memories"); + } + let simplify_enums = simplify_enums.map(|v| { + clap::ValueEnum::to_possible_value(&v).expect("there are no skipped variants") + }); + let simplify_enums = match &simplify_enums { + None => OptionSimplifyEnumsKindValueParser::NONE_NAME, + Some(v) => v.get_name(), + }; + args.write_arg(format_args!("--simplify-enums={simplify_enums}")); + } +} + impl ExportOptions { + fn default_simplify_memories() -> bool { + true + } + fn default_simplify_enums() -> Option { + Some(SimplifyEnumsKind::ReplaceWithBundleOfUInts) + } fn debug_fmt( &self, f: &mut fmt::Formatter<'_>, @@ -2823,18 +2848,47 @@ impl ExportOptions { if f.alternate() { "\n}" } else { " }" } ) } + fn prepare_top_module_helper( + self, + mut top_module: Interned>, + ) -> Result>, SimplifyEnumsError> { + let Self { + simplify_memories: do_simplify_memories, + simplify_enums: do_simplify_enums, + __private: _, + } = self; + if let Some(kind) = do_simplify_enums { + top_module = simplify_enums(top_module, kind)?; + } + if do_simplify_memories { + top_module = simplify_memories(top_module); + } + Ok(top_module) + } + pub fn prepare_top_module( + self, + top_module: impl AsRef>, + ) -> Result>, SimplifyEnumsError> { + self.prepare_top_module_helper(top_module.as_ref().canonical().intern()) + } } impl Default for ExportOptions { fn default() -> Self { Self { - simplify_memories: true, - simplify_enums: Some(SimplifyEnumsKind::ReplaceWithBundleOfUInts), + simplify_memories: Self::default_simplify_memories(), + simplify_enums: Self::default_simplify_enums(), __private: ExportOptionsPrivate(()), } } } +pub fn get_circuit_name(top_module_name_id: NameId) -> Interned { + let mut global_ns = Namespace::default(); + let circuit_name = global_ns.get(top_module_name_id); + circuit_name.0 +} + pub fn export( file_backend: B, top_module: &Module, @@ -2846,6 +2900,497 @@ pub fn export( }) } +#[derive(Debug)] +#[non_exhaustive] +pub enum ScalarizedModuleABIError { + SimOnlyValuesAreNotPermitted, + SimplifyEnumsError(SimplifyEnumsError), +} + +impl fmt::Display for ScalarizedModuleABIError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + ScalarizedModuleABIError::SimOnlyValuesAreNotPermitted => { + FirrtlError::SimOnlyValuesAreNotPermitted.fmt(f) + } + ScalarizedModuleABIError::SimplifyEnumsError(e) => e.fmt(f), + } + } +} + +impl std::error::Error for ScalarizedModuleABIError {} + +impl From for ScalarizedModuleABIError { + fn from(value: SimplifyEnumsError) -> Self { + Self::SimplifyEnumsError(value) + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash)] +pub enum ScalarizedModuleABIPortItem { + Group(ScalarizedModuleABIPortGroup), + Port(ScalarizedModuleABIPort), +} + +impl ScalarizedModuleABIPortItem { + pub fn module_io(self) -> ModuleIO { + *self + .target() + .base() + .module_io() + .expect("known to be ModuleIO") + } + pub fn target(self) -> Interned { + match self { + Self::Group(v) => v.target(), + Self::Port(v) => v.target(), + } + } + fn for_each_port_and_annotations_helper< + F: for<'a> FnMut( + &'a ScalarizedModuleABIPort, + ScalarizedModuleABIAnnotations<'a>, + ) -> ControlFlow, + B, + >( + &self, + parent: Option<&ScalarizedModuleABIAnnotations<'_>>, + f: &mut F, + ) -> ControlFlow { + match self { + Self::Group(v) => v.for_each_port_and_annotations_helper(parent, f), + Self::Port(port) => f( + port, + ScalarizedModuleABIAnnotations::new(parent, port.annotations.iter()), + ), + } + } + pub fn for_each_port_and_annotations< + F: for<'a> FnMut( + &'a ScalarizedModuleABIPort, + ScalarizedModuleABIAnnotations<'a>, + ) -> ControlFlow, + B, + >( + self, + mut f: F, + ) -> ControlFlow { + self.for_each_port_and_annotations_helper(None, &mut f) + } +} + +impl fmt::Debug for ScalarizedModuleABIPortItem { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Group(v) => v.fmt(f), + Self::Port(v) => v.fmt(f), + } + } +} + +#[derive(Debug, Clone)] +pub struct ScalarizedModuleABIAnnotations<'a> { + parent: Option<&'a ScalarizedModuleABIAnnotations<'a>>, + parent_len: usize, + annotations: std::slice::Iter<'a, TargetedAnnotation>, +} + +impl<'a> ScalarizedModuleABIAnnotations<'a> { + fn new( + parent: Option<&'a ScalarizedModuleABIAnnotations<'a>>, + annotations: std::slice::Iter<'a, TargetedAnnotation>, + ) -> Self { + Self { + parent, + parent_len: parent.map_or(0, |parent| parent.len()), + annotations, + } + } +} + +impl<'a> Default for ScalarizedModuleABIAnnotations<'a> { + fn default() -> Self { + Self { + parent: None, + parent_len: 0, + annotations: Default::default(), + } + } +} + +impl<'a> Iterator for ScalarizedModuleABIAnnotations<'a> { + type Item = &'a TargetedAnnotation; + + fn next(&mut self) -> Option { + loop { + if let retval @ Some(_) = self.annotations.next() { + break retval; + } + *self = self.parent?.clone(); + } + } + + fn size_hint(&self) -> (usize, Option) { + let len = self.len(); + (len, Some(len)) + } + + fn fold(mut self, mut init: B, mut f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + loop { + let Self { + parent, + parent_len: _, + annotations, + } = self; + init = annotations.fold(init, &mut f); + let Some(next) = parent else { + break; + }; + self = next.clone(); + } + init + } +} + +impl std::iter::FusedIterator for ScalarizedModuleABIAnnotations<'_> {} + +impl ExactSizeIterator for ScalarizedModuleABIAnnotations<'_> { + fn len(&self) -> usize { + self.parent_len + self.annotations.len() + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub struct ScalarizedModuleABIPortGroup { + target: Interned, + common_annotations: Interned<[TargetedAnnotation]>, + children: Interned<[ScalarizedModuleABIPortItem]>, +} + +impl ScalarizedModuleABIPortGroup { + pub fn module_io(self) -> ModuleIO { + *self + .target + .base() + .module_io() + .expect("known to be ModuleIO") + } + pub fn target(self) -> Interned { + self.target + } + pub fn common_annotations(self) -> Interned<[TargetedAnnotation]> { + self.common_annotations + } + pub fn children(self) -> Interned<[ScalarizedModuleABIPortItem]> { + self.children + } + fn for_each_port_and_annotations_helper< + F: for<'a> FnMut( + &'a ScalarizedModuleABIPort, + ScalarizedModuleABIAnnotations<'a>, + ) -> ControlFlow, + B, + >( + &self, + parent: Option<&ScalarizedModuleABIAnnotations<'_>>, + f: &mut F, + ) -> ControlFlow { + let parent = ScalarizedModuleABIAnnotations::new(parent, self.common_annotations.iter()); + for item in &self.children { + item.for_each_port_and_annotations_helper(Some(&parent), f)?; + } + ControlFlow::Continue(()) + } + pub fn for_each_port_and_annotations< + F: for<'a> FnMut( + &'a ScalarizedModuleABIPort, + ScalarizedModuleABIAnnotations<'a>, + ) -> ControlFlow, + B, + >( + self, + mut f: F, + ) -> ControlFlow { + self.for_each_port_and_annotations_helper(None, &mut f) + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub struct ScalarizedModuleABIPort { + target: Interned, + annotations: Interned<[TargetedAnnotation]>, + scalarized_name: Interned, +} + +impl ScalarizedModuleABIPort { + pub fn module_io(self) -> ModuleIO { + *self + .target + .base() + .module_io() + .expect("known to be ModuleIO") + } + pub fn target(self) -> Interned { + self.target + } + pub fn annotations(self) -> Interned<[TargetedAnnotation]> { + self.annotations + } + pub fn scalarized_name(self) -> Interned { + self.scalarized_name + } +} + +enum ScalarizeTreeNodeBody { + Leaf { + scalarized_name: Interned, + }, + Bundle { + ty: Bundle, + fields: Vec, + }, + Array { + elements: Vec, + }, +} + +struct ScalarizeTreeNode { + target: Interned, + annotations: Vec, + body: ScalarizeTreeNodeBody, +} + +impl ScalarizeTreeNode { + #[track_caller] + fn find_target(&mut self, annotation_target: Interned) -> &mut Self { + match *annotation_target { + Target::Base(_) => { + assert_eq!( + annotation_target, self.target, + "annotation not on correct ModuleIO", + ); + self + } + Target::Child(target_child) => { + let parent = self.find_target(target_child.parent()); + match *target_child.path_element() { + TargetPathElement::BundleField(TargetPathBundleField { name }) => { + match parent.body { + ScalarizeTreeNodeBody::Leaf { .. } => parent, + ScalarizeTreeNodeBody::Bundle { ty, ref mut fields } => { + &mut fields[ty.name_indexes()[&name]] + } + ScalarizeTreeNodeBody::Array { .. } => { + unreachable!("types are known to match") + } + } + } + TargetPathElement::ArrayElement(TargetPathArrayElement { index }) => { + match parent.body { + ScalarizeTreeNodeBody::Leaf { .. } => parent, + ScalarizeTreeNodeBody::Bundle { .. } => { + unreachable!("types are known to match") + } + ScalarizeTreeNodeBody::Array { ref mut elements } => { + &mut elements[index] + } + } + } + TargetPathElement::DynArrayElement(_) => { + unreachable!("annotations are only on static targets"); + } + } + } + } + } + fn into_scalarized_item(self) -> ScalarizedModuleABIPortItem { + let Self { + target, + annotations, + body, + } = self; + match body { + ScalarizeTreeNodeBody::Leaf { scalarized_name } => { + ScalarizedModuleABIPortItem::Port(ScalarizedModuleABIPort { + target, + annotations: Intern::intern_owned(annotations), + scalarized_name, + }) + } + ScalarizeTreeNodeBody::Bundle { fields: items, .. } + | ScalarizeTreeNodeBody::Array { elements: items } => { + ScalarizedModuleABIPortItem::Group(ScalarizedModuleABIPortGroup { + target, + common_annotations: Intern::intern_owned(annotations), + children: Interned::from_iter( + items.into_iter().map(Self::into_scalarized_item), + ), + }) + } + } + } +} + +#[derive(Default)] +struct ScalarizeTreeBuilder { + scalarized_ns: Namespace, + type_state: TypeState, + name: String, +} + +impl ScalarizeTreeBuilder { + #[track_caller] + fn build_bundle( + &mut self, + target: Interned, + ty: Bundle, + ) -> Result { + let mut fields = Vec::with_capacity(ty.fields().len()); + let original_len = self.name.len(); + for BundleField { name, .. } in ty.fields() { + let firrtl_name = self + .type_state + .get_bundle_field(ty, name) + .map_err(|e| match e { + FirrtlError::SimOnlyValuesAreNotPermitted => { + ScalarizedModuleABIError::SimOnlyValuesAreNotPermitted + } + })? + .0; + write!(self.name, "_{firrtl_name}").expect("writing to String is infallible"); + fields.push( + self.build( + target + .join(TargetPathElement::intern_sized( + TargetPathBundleField { name }.into(), + )) + .intern_sized(), + )?, + ); + self.name.truncate(original_len); + } + Ok(ScalarizeTreeNode { + target, + annotations: Vec::new(), + body: ScalarizeTreeNodeBody::Bundle { ty, fields }, + }) + } + #[track_caller] + fn build( + &mut self, + target: Interned, + ) -> Result { + Ok(match target.canonical_ty() { + CanonicalType::UInt(_) + | CanonicalType::SInt(_) + | CanonicalType::Bool(_) + | CanonicalType::Enum(_) + | CanonicalType::AsyncReset(_) + | CanonicalType::SyncReset(_) + | CanonicalType::Reset(_) + | CanonicalType::Clock(_) => { + let scalarized_name = self.scalarized_ns.get(str::intern(&self.name)).0; + ScalarizeTreeNode { + target, + annotations: Vec::new(), + body: ScalarizeTreeNodeBody::Leaf { scalarized_name }, + } + } + CanonicalType::Array(ty) => { + let mut elements = Vec::with_capacity(ty.len()); + let original_len = self.name.len(); + for index in 0..ty.len() { + write!(self.name, "_{index}").expect("writing to String is infallible"); + elements.push( + self.build( + target + .join(TargetPathElement::intern_sized( + TargetPathArrayElement { index }.into(), + )) + .intern_sized(), + )?, + ); + self.name.truncate(original_len); + } + ScalarizeTreeNode { + target, + annotations: Vec::new(), + body: ScalarizeTreeNodeBody::Array { elements }, + } + } + CanonicalType::Bundle(ty) => self.build_bundle(target, ty)?, + CanonicalType::PhantomConst(_) => { + self.build_bundle(target, Bundle::new(Interned::default()))? + } + CanonicalType::DynSimOnly(_) => { + return Err(ScalarizedModuleABIError::SimOnlyValuesAreNotPermitted); + } + }) + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +pub struct ScalarizedModuleABI { + module_io: Interned<[AnnotatedModuleIO]>, + items: Interned<[ScalarizedModuleABIPortItem]>, +} + +impl ScalarizedModuleABI { + #[track_caller] + fn from_io(module_io: Interned<[AnnotatedModuleIO]>) -> Result { + let mut firrtl_ns = Namespace::default(); + let mut tree_builder = ScalarizeTreeBuilder::default(); + let mut items = Vec::new(); + for module_io in module_io { + let firrtl_name = firrtl_ns.get(module_io.module_io.name_id()); + tree_builder.name.clear(); + tree_builder.name.push_str(&firrtl_name.0); + let mut tree = tree_builder.build(Target::from(module_io.module_io).intern_sized())?; + for annotation in module_io.annotations { + tree.find_target(annotation.target()) + .annotations + .push(annotation); + } + items.push(tree.into_scalarized_item()); + } + Ok(Self { + module_io, + items: Intern::intern_owned(items), + }) + } + #[track_caller] + pub fn new( + module: impl AsRef>, + options: ExportOptions, + ) -> Result { + Self::from_io(options.prepare_top_module(module)?.module_io()) + } + pub fn module_io(&self) -> Interned<[AnnotatedModuleIO]> { + self.module_io + } + pub fn items(&self) -> Interned<[ScalarizedModuleABIPortItem]> { + self.items + } + pub fn for_each_port_and_annotations< + F: for<'a> FnMut( + &'a ScalarizedModuleABIPort, + ScalarizedModuleABIAnnotations<'a>, + ) -> ControlFlow, + B, + >( + self, + mut f: F, + ) -> ControlFlow { + for item in &self.items { + item.for_each_port_and_annotations_helper(None, &mut f)?; + } + ControlFlow::Continue(()) + } +} + #[doc(hidden)] #[track_caller] pub fn assert_export_firrtl_impl(top_module: &Module, expected: TestBackend) { diff --git a/crates/fayalite/src/intern.rs b/crates/fayalite/src/intern.rs index af91f0a..9d57146 100644 --- a/crates/fayalite/src/intern.rs +++ b/crates/fayalite/src/intern.rs @@ -9,11 +9,13 @@ use std::{ any::{Any, TypeId}, borrow::{Borrow, Cow}, cmp::Ordering, + ffi::OsStr, fmt, hash::{BuildHasher, Hash, Hasher}, iter::FusedIterator, marker::PhantomData, ops::Deref, + path::Path, sync::{Mutex, RwLock}, }; @@ -416,6 +418,12 @@ forward_fmt_trait!(Pointer); forward_fmt_trait!(UpperExp); forward_fmt_trait!(UpperHex); +impl AsRef for Interned { + fn as_ref(&self) -> &T { + self + } +} + #[derive(Clone, Debug)] pub struct InternedSliceIter { slice: Interned<[T]>, @@ -485,6 +493,51 @@ where } } +impl FromIterator for Interned +where + String: FromIterator, +{ + fn from_iter>(iter: T) -> Self { + str::intern_owned(FromIterator::from_iter(iter)) + } +} + +impl AsRef for Interned { + fn as_ref(&self) -> &OsStr { + str::as_ref(self) + } +} + +impl AsRef for Interned { + fn as_ref(&self) -> &Path { + str::as_ref(self) + } +} + +impl From> for clap::builder::Str { + fn from(value: Interned) -> Self { + Interned::into_inner(value).into() + } +} + +impl From> for clap::builder::OsStr { + fn from(value: Interned) -> Self { + Interned::into_inner(value).into() + } +} + +impl From> for clap::builder::StyledStr { + fn from(value: Interned) -> Self { + Interned::into_inner(value).into() + } +} + +impl From> for clap::Id { + fn from(value: Interned) -> Self { + Interned::into_inner(value).into() + } +} + impl From> for Vec { fn from(value: Interned<[T]>) -> Self { Vec::from(&*value) diff --git a/crates/fayalite/src/lib.rs b/crates/fayalite/src/lib.rs index 932464b..e5323f3 100644 --- a/crates/fayalite/src/lib.rs +++ b/crates/fayalite/src/lib.rs @@ -87,8 +87,8 @@ pub mod _docs; pub mod annotations; pub mod array; +pub mod build; pub mod bundle; -pub mod cli; pub mod clock; pub mod enum_; pub mod expr; @@ -104,6 +104,7 @@ pub mod reg; pub mod reset; pub mod sim; pub mod source_location; +pub mod target; pub mod testing; pub mod ty; pub mod util; diff --git a/crates/fayalite/src/module.rs b/crates/fayalite/src/module.rs index a81893d..aa7a673 100644 --- a/crates/fayalite/src/module.rs +++ b/crates/fayalite/src/module.rs @@ -833,6 +833,8 @@ pub struct AnnotatedModuleIO { pub module_io: ModuleIO, } +impl Copy for AnnotatedModuleIO {} + #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] pub enum ModuleKind { Extern, @@ -1212,6 +1214,12 @@ pub struct Module { module_annotations: Interned<[Annotation]>, } +impl AsRef for Module { + fn as_ref(&self) -> &Self { + self + } +} + #[derive(Default)] struct DebugFmtModulesState { seen: HashSet, diff --git a/crates/fayalite/src/module/transform/deduce_resets.rs b/crates/fayalite/src/module/transform/deduce_resets.rs index 57197ad..4fa931e 100644 --- a/crates/fayalite/src/module/transform/deduce_resets.rs +++ b/crates/fayalite/src/module/transform/deduce_resets.rs @@ -1817,6 +1817,8 @@ impl_run_pass_copy!([] UInt); impl_run_pass_copy!([] usize); impl_run_pass_copy!([] FormalKind); impl_run_pass_copy!([] PhantomConst); +impl_run_pass_copy!([] crate::build::vendor::xilinx::XdcIOStandardAnnotation); +impl_run_pass_copy!([] crate::build::vendor::xilinx::XdcLocationAnnotation); macro_rules! impl_run_pass_for_struct { ( @@ -2217,6 +2219,8 @@ impl_run_pass_for_enum! { BlackBoxPath(v), DocString(v), CustomFirrtl(v), + XdcLocation(v), + XdcIOStandard(v), } } diff --git a/crates/fayalite/src/module/transform/simplify_enums.rs b/crates/fayalite/src/module/transform/simplify_enums.rs index ccdecf6..0839881 100644 --- a/crates/fayalite/src/module/transform/simplify_enums.rs +++ b/crates/fayalite/src/module/transform/simplify_enums.rs @@ -22,6 +22,7 @@ use crate::{ wire::Wire, }; use core::fmt; +use serde::{Deserialize, Serialize}; #[derive(Debug)] pub enum SimplifyEnumsError { @@ -955,12 +956,15 @@ impl Folder for State { } } -#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, clap::ValueEnum)] +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, clap::ValueEnum, Serialize, Deserialize)] +#[serde(rename_all = "kebab-case")] pub enum SimplifyEnumsKind { SimplifyToEnumsWithNoBody, #[clap(name = "replace-with-bundle-of-uints")] + #[serde(rename = "replace-with-bundle-of-uints")] ReplaceWithBundleOfUInts, #[clap(name = "replace-with-uint")] + #[serde(rename = "replace-with-uint")] ReplaceWithUInt, } diff --git a/crates/fayalite/src/module/transform/visit.rs b/crates/fayalite/src/module/transform/visit.rs index 44aabc3..d08ac10 100644 --- a/crates/fayalite/src/module/transform/visit.rs +++ b/crates/fayalite/src/module/transform/visit.rs @@ -7,6 +7,7 @@ use crate::{ DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation, TargetedAnnotation, }, array::ArrayType, + build::vendor::xilinx::{XdcIOStandardAnnotation, XdcLocationAnnotation}, bundle::{Bundle, BundleField, BundleType}, clock::Clock, enum_::{Enum, EnumType, EnumVariant}, diff --git a/crates/fayalite/src/prelude.rs b/crates/fayalite/src/prelude.rs index 4fca2ad..5ff3a64 100644 --- a/crates/fayalite/src/prelude.rs +++ b/crates/fayalite/src/prelude.rs @@ -7,8 +7,8 @@ pub use crate::{ DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation, }, array::{Array, ArrayType}, + build::{BuildCli, JobParams, RunBuild}, bundle::Bundle, - cli::Cli, clock::{Clock, ClockDomain, ToClock}, enum_::{Enum, HdlNone, HdlOption, HdlSome}, expr::{ @@ -36,6 +36,7 @@ pub use crate::{ value::{SimOnly, SimOnlyValue, SimValue, ToSimValue, ToSimValueWithType}, }, source_location::SourceLocation, + testing::assert_formal, ty::{AsMask, CanonicalType, Type}, util::{ConstUsize, GenericConstUsize}, wire::Wire, diff --git a/crates/fayalite/src/target.rs b/crates/fayalite/src/target.rs new file mode 100644 index 0000000..33ffee9 --- /dev/null +++ b/crates/fayalite/src/target.rs @@ -0,0 +1,202 @@ +// SPDX-License-Identifier: LGPL-3.0-or-later +// See Notices.txt for copyright information + +use crate::{intern::Interned, util::job_server::AcquiredJob}; +use std::{ + any::Any, + fmt, + iter::FusedIterator, + sync::{Arc, Mutex}, +}; + +pub trait Peripheral: Any + Send + Sync + fmt::Debug {} + +pub trait Tool: Any + Send + Sync + fmt::Debug { + fn name(&self) -> Interned; + fn run(&self, acquired_job: &mut AcquiredJob); +} + +pub trait Target: Any + Send + Sync + fmt::Debug { + fn name(&self) -> Interned; + fn peripherals(&self) -> Interned<[Interned]>; +} + +#[derive(Clone)] +struct TargetsMap(Vec<(Interned, Interned)>); + +impl TargetsMap { + fn sort(&mut self) { + self.0.sort_by(|(k1, _), (k2, _)| str::cmp(k1, k2)); + self.0.dedup_by_key(|(k, _)| *k); + } + fn from_unsorted_vec(unsorted_vec: Vec<(Interned, Interned)>) -> Self { + let mut retval = Self(unsorted_vec); + retval.sort(); + retval + } + fn extend_from_unsorted_slice(&mut self, additional: &[(Interned, Interned)]) { + self.0.extend_from_slice(additional); + self.sort(); + } +} + +impl Default for TargetsMap { + fn default() -> Self { + Self::from_unsorted_vec(vec![ + // TODO: add default targets here + ]) + } +} + +fn access_targets>) -> R, R>(f: F) -> R { + static TARGETS: Mutex>> = Mutex::new(None); + let mut targets_lock = TARGETS.lock().expect("shouldn't be poisoned"); + f(&mut targets_lock) +} + +pub fn add_targets>>(additional: I) { + // run iterator and target methods outside of lock + let additional = Vec::from_iter(additional.into_iter().map(|v| (v.name(), v))); + access_targets(|targets| { + Arc::make_mut(targets.get_or_insert_default()).extend_from_unsorted_slice(&additional); + }); +} + +pub fn targets() -> TargetsSnapshot { + access_targets(|targets| match targets { + Some(targets) => TargetsSnapshot { + targets: targets.clone(), + }, + None => { + let new_targets = Arc::::default(); + *targets = Some(new_targets.clone()); + TargetsSnapshot { + targets: new_targets, + } + } + }) +} + +#[derive(Clone)] +pub struct TargetsSnapshot { + targets: Arc, +} + +impl TargetsSnapshot { + pub fn get(&self, key: &str) -> Option> { + let index = self + .targets + .0 + .binary_search_by_key(&key, |(k, _v)| k) + .ok()?; + Some(self.targets.0[index].1) + } + pub fn iter(&self) -> TargetsIter { + self.into_iter() + } + pub fn len(&self) -> usize { + self.targets.0.len() + } +} + +impl fmt::Debug for TargetsSnapshot { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("TargetsSnapshot ")?; + f.debug_map().entries(self).finish() + } +} + +impl IntoIterator for &'_ mut TargetsSnapshot { + type Item = (Interned, Interned); + type IntoIter = TargetsIter; + + fn into_iter(self) -> Self::IntoIter { + self.clone().into_iter() + } +} + +impl IntoIterator for &'_ TargetsSnapshot { + type Item = (Interned, Interned); + type IntoIter = TargetsIter; + + fn into_iter(self) -> Self::IntoIter { + self.clone().into_iter() + } +} + +impl IntoIterator for TargetsSnapshot { + type Item = (Interned, Interned); + type IntoIter = TargetsIter; + + fn into_iter(self) -> Self::IntoIter { + TargetsIter { + indexes: 0..self.targets.0.len(), + targets: self.targets, + } + } +} + +#[derive(Clone)] +pub struct TargetsIter { + targets: Arc, + indexes: std::ops::Range, +} + +impl fmt::Debug for TargetsIter { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("TargetsIter ")?; + f.debug_map().entries(self.clone()).finish() + } +} + +impl Iterator for TargetsIter { + type Item = (Interned, Interned); + + fn next(&mut self) -> Option { + Some(self.targets.0[self.indexes.next()?]) + } + + fn size_hint(&self) -> (usize, Option) { + self.indexes.size_hint() + } + + fn count(self) -> usize { + self.indexes.len() + } + + fn last(mut self) -> Option { + self.next_back() + } + + fn nth(&mut self, n: usize) -> Option { + Some(self.targets.0[self.indexes.nth(n)?]) + } + + fn fold B>(self, init: B, mut f: F) -> B { + self.indexes + .fold(init, move |retval, index| f(retval, self.targets.0[index])) + } +} + +impl FusedIterator for TargetsIter {} + +impl DoubleEndedIterator for TargetsIter { + fn next_back(&mut self) -> Option { + Some(self.targets.0[self.indexes.next_back()?]) + } + + fn nth_back(&mut self, n: usize) -> Option { + Some(self.targets.0[self.indexes.nth_back(n)?]) + } + + fn rfold B>(self, init: B, mut f: F) -> B { + self.indexes + .rfold(init, move |retval, index| f(retval, self.targets.0[index])) + } +} + +impl ExactSizeIterator for TargetsIter { + fn len(&self) -> usize { + self.indexes.len() + } +} diff --git a/crates/fayalite/src/testing.rs b/crates/fayalite/src/testing.rs index b81bc3f..47c7cfa 100644 --- a/crates/fayalite/src/testing.rs +++ b/crates/fayalite/src/testing.rs @@ -1,11 +1,20 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information use crate::{ - cli::{FormalArgs, FormalMode, FormalOutput, RunPhase}, + build::{ + BaseJobArgs, BaseJobKind, JobArgsAndDependencies, JobKindAndArgs, JobParams, NoArgs, + RunBuild, + external::{ExternalCommandArgs, ExternalCommandJobKind}, + firrtl::{FirrtlArgs, FirrtlJobKind}, + formal::{Formal, FormalAdditionalArgs, FormalArgs, FormalMode, WriteSbyFileJobKind}, + verilog::{UnadjustedVerilogArgs, VerilogJobArgs, VerilogJobKind}, + }, + bundle::BundleType, firrtl::ExportOptions, + module::Module, util::HashMap, }; -use clap::Parser; +use eyre::eyre; use serde::Deserialize; use std::{ fmt::Write, @@ -14,14 +23,6 @@ use std::{ sync::{Mutex, OnceLock}, }; -fn assert_formal_helper() -> FormalArgs { - static FORMAL_ARGS: OnceLock = OnceLock::new(); - // ensure we only run parsing once, so errors from env vars don't produce overlapping output if we're called on multiple threads - FORMAL_ARGS - .get_or_init(|| FormalArgs::parse_from(["fayalite::testing::assert_formal"])) - .clone() -} - #[derive(Deserialize)] struct CargoMetadata { target_directory: String, @@ -97,26 +98,104 @@ fn get_assert_formal_target_path(test_name: &dyn std::fmt::Display) -> PathBuf { .join(dir) } -#[track_caller] -pub fn assert_formal( - test_name: impl std::fmt::Display, - module: M, - mode: FormalMode, - depth: u64, +fn make_assert_formal_args( + test_name: &dyn std::fmt::Display, + formal_mode: FormalMode, + formal_depth: u64, solver: Option<&str>, export_options: ExportOptions, -) where - FormalArgs: RunPhase, -{ - let mut args = assert_formal_helper(); - args.verilog.firrtl.base.redirect_output_for_rust_test = true; - args.verilog.firrtl.base.output = Some(get_assert_formal_target_path(&test_name)); - args.verilog.firrtl.export_options = export_options; - args.verilog.debug = true; - args.mode = mode; - args.depth = depth; - if let Some(solver) = solver { - args.solver = solver.into(); - } - args.run(module).expect("testing::assert_formal() failed"); +) -> eyre::Result>> { + let args = JobKindAndArgs { + kind: BaseJobKind, + args: BaseJobArgs::from_output_dir_and_env( + get_assert_formal_target_path(&test_name) + .into_os_string() + .into_string() + .map_err(|_| eyre!("path is not valid UTF-8"))?, + ), + }; + let dependencies = JobArgsAndDependencies { + args, + dependencies: (), + }; + let args = JobKindAndArgs { + kind: FirrtlJobKind, + args: FirrtlArgs { export_options }, + }; + let dependencies = JobArgsAndDependencies { args, dependencies }; + let args = JobKindAndArgs { + kind: ExternalCommandJobKind::new(), + args: ExternalCommandArgs::resolve_program_path( + None, + UnadjustedVerilogArgs { + firtool_extra_args: vec![], + verilog_dialect: None, + verilog_debug: true, + }, + )?, + }; + let dependencies = JobArgsAndDependencies { args, dependencies }; + let args = JobKindAndArgs { + kind: VerilogJobKind, + args: VerilogJobArgs {}, + }; + let dependencies = JobArgsAndDependencies { args, dependencies }; + let args = JobKindAndArgs { + kind: WriteSbyFileJobKind, + args: FormalArgs { + sby_extra_args: vec![], + formal_mode, + formal_depth, + formal_solver: solver.unwrap_or(FormalArgs::DEFAULT_SOLVER).into(), + smtbmc_extra_args: vec![], + }, + }; + let dependencies = JobArgsAndDependencies { args, dependencies }; + let args = JobKindAndArgs { + kind: ExternalCommandJobKind::new(), + args: ExternalCommandArgs::resolve_program_path(None, FormalAdditionalArgs {})?, + }; + Ok(JobArgsAndDependencies { args, dependencies }) +} + +pub fn try_assert_formal>, T: BundleType>( + test_name: impl std::fmt::Display, + module: M, + formal_mode: FormalMode, + formal_depth: u64, + solver: Option<&str>, + export_options: ExportOptions, +) -> eyre::Result<()> { + const APP_NAME: &'static str = "fayalite::testing::assert_formal"; + make_assert_formal_args( + &test_name, + formal_mode, + formal_depth, + solver, + export_options, + )? + .run( + |NoArgs {}| Ok(JobParams::new(module, APP_NAME)), + clap::Command::new(APP_NAME), // not actually used, so we can use an arbitrary value + ) +} + +#[track_caller] +pub fn assert_formal>, T: BundleType>( + test_name: impl std::fmt::Display, + module: M, + formal_mode: FormalMode, + formal_depth: u64, + solver: Option<&str>, + export_options: ExportOptions, +) { + try_assert_formal( + test_name, + module, + formal_mode, + formal_depth, + solver, + export_options, + ) + .expect("testing::assert_formal() failed"); } diff --git a/crates/fayalite/src/util.rs b/crates/fayalite/src/util.rs index e85bc9c..23a6852 100644 --- a/crates/fayalite/src/util.rs +++ b/crates/fayalite/src/util.rs @@ -36,8 +36,11 @@ pub use scoped_ref::ScopedRef; pub(crate) use misc::chain; #[doc(inline)] pub use misc::{ - BitSliceWriteWithBase, DebugAsDisplay, DebugAsRawString, MakeMutSlice, RcWriter, interned_bit, - iter_eq_by, slice_range, try_slice_range, + BitSliceWriteWithBase, DebugAsDisplay, DebugAsRawString, MakeMutSlice, RcWriter, + SerdeJsonEscapeIf, SerdeJsonEscapeIfFormatter, SerdeJsonEscapeIfTest, + SerdeJsonEscapeIfTestResult, interned_bit, iter_eq_by, serialize_to_json_ascii, + serialize_to_json_ascii_pretty, serialize_to_json_ascii_pretty_with_indent, slice_range, + try_slice_range, }; pub mod job_server; diff --git a/crates/fayalite/src/util/job_server.rs b/crates/fayalite/src/util/job_server.rs index 376ddc0..e58bba8 100644 --- a/crates/fayalite/src/util/job_server.rs +++ b/crates/fayalite/src/util/job_server.rs @@ -1,192 +1,156 @@ // SPDX-License-Identifier: LGPL-3.0-or-later // See Notices.txt for copyright information -use ctor::ctor; -use jobslot::{Acquired, Client}; +use ctor::{ctor, dtor}; +use jobslot::Client; use std::{ ffi::OsString, - mem, + io, mem, num::NonZeroUsize, - sync::{Condvar, Mutex, Once, OnceLock}, - thread::spawn, + sync::{Mutex, MutexGuard}, }; -fn get_or_make_client() -> &'static Client { - #[ctor] - static CLIENT: OnceLock = unsafe { - match Client::from_env() { - Some(client) => OnceLock::from(client), - None => OnceLock::new(), - } - }; +#[ctor] +static CLIENT: Mutex>> = unsafe { Mutex::new(Some(Client::from_env())) }; - CLIENT.get_or_init(|| { - let mut available_parallelism = None; - let mut args = std::env::args_os().skip(1); - while let Some(arg) = args.next() { - const TEST_THREADS_OPTION: &'static [u8] = b"--test-threads"; - if arg.as_encoded_bytes().starts_with(TEST_THREADS_OPTION) { - match arg.as_encoded_bytes().get(TEST_THREADS_OPTION.len()) { - Some(b'=') => { - let mut arg = arg.into_encoded_bytes(); - arg.drain(..=TEST_THREADS_OPTION.len()); - available_parallelism = Some(arg); - break; +#[dtor] +fn drop_client() { + drop( + match CLIENT.lock() { + Ok(v) => v, + Err(e) => e.into_inner(), + } + .take(), + ); +} + +fn get_or_make_client() -> Client { + CLIENT + .lock() + .expect("shouldn't have panicked") + .as_mut() + .expect("shutting down") + .get_or_insert_with(|| { + let mut available_parallelism = None; + let mut args = std::env::args_os().skip(1); + while let Some(arg) = args.next() { + const TEST_THREADS_OPTION: &'static [u8] = b"--test-threads"; + if arg.as_encoded_bytes().starts_with(TEST_THREADS_OPTION) { + match arg.as_encoded_bytes().get(TEST_THREADS_OPTION.len()) { + Some(b'=') => { + let mut arg = arg.into_encoded_bytes(); + arg.drain(..=TEST_THREADS_OPTION.len()); + available_parallelism = Some(arg); + break; + } + None => { + available_parallelism = args.next().map(OsString::into_encoded_bytes); + break; + } + _ => {} } - None => { - available_parallelism = args.next().map(OsString::into_encoded_bytes); - break; - } - _ => {} } } - } - let available_parallelism = if let Some(available_parallelism) = available_parallelism - .as_deref() - .and_then(|v| std::str::from_utf8(v).ok()) - .and_then(|v| v.parse().ok()) - { - available_parallelism - } else if let Ok(available_parallelism) = std::thread::available_parallelism() { - available_parallelism - } else { - NonZeroUsize::new(1).unwrap() - }; - Client::new_with_fifo(available_parallelism.get() - 1).expect("failed to create job server") - }) + let available_parallelism = if let Some(available_parallelism) = available_parallelism + .as_deref() + .and_then(|v| std::str::from_utf8(v).ok()) + .and_then(|v| v.parse().ok()) + { + available_parallelism + } else if let Ok(available_parallelism) = std::thread::available_parallelism() { + available_parallelism + } else { + NonZeroUsize::new(1).unwrap() + }; + Client::new_with_fifo(available_parallelism.get() - 1) + .expect("failed to create job server") + }) + .clone() } struct State { + obtained_count: usize, waiting_count: usize, - available: Vec, - implicit_available: bool, -} - -impl State { - fn total_available(&self) -> usize { - self.available.len() + self.implicit_available as usize - } - fn additional_waiting(&self) -> usize { - self.waiting_count.saturating_sub(self.total_available()) - } } static STATE: Mutex = Mutex::new(State { + obtained_count: 0, waiting_count: 0, - available: Vec::new(), - implicit_available: true, }); -static COND_VAR: Condvar = Condvar::new(); - -#[derive(Debug)] -enum AcquiredJobInner { - FromJobServer(Acquired), - ImplicitJob, -} #[derive(Debug)] pub struct AcquiredJob { - job: AcquiredJobInner, + client: Client, } impl AcquiredJob { - fn start_acquire_thread() { - static STARTED_THREAD: Once = Once::new(); - STARTED_THREAD.call_once(|| { - spawn(|| { - let mut acquired = None; - let client = get_or_make_client(); + pub fn acquire() -> io::Result { + let client = get_or_make_client(); + struct Waiting {} + + impl Waiting { + fn done(self) -> MutexGuard<'static, State> { + mem::forget(self); let mut state = STATE.lock().unwrap(); - loop { - state = if state.additional_waiting() == 0 { - if acquired.is_some() { - drop(state); - drop(acquired.take()); // drop Acquired outside of lock - STATE.lock().unwrap() - } else { - COND_VAR.wait(state).unwrap() - } - } else if acquired.is_some() { - // allocate space before moving Acquired to ensure we - // drop Acquired outside of the lock on panic - state.available.reserve(1); - state.available.push(acquired.take().unwrap()); - COND_VAR.notify_all(); - state - } else { - drop(state); - acquired = Some( - client - .acquire() - .expect("can't acquire token from job server"), - ); - STATE.lock().unwrap() - }; - } - }); - }); - } - fn acquire_inner(block: bool) -> Option { - Self::start_acquire_thread(); - let mut state = STATE.lock().unwrap(); - loop { - if let Some(acquired) = state.available.pop() { - return Some(Self { - job: AcquiredJobInner::FromJobServer(acquired), - }); + state.waiting_count -= 1; + state } - if state.implicit_available { - state.implicit_available = false; - return Some(Self { - job: AcquiredJobInner::ImplicitJob, - }); - } - if !block { - return None; - } - state.waiting_count += 1; - state = COND_VAR.wait(state).unwrap(); - state.waiting_count -= 1; } - } - pub fn try_acquire() -> Option { - Self::acquire_inner(false) - } - pub fn acquire() -> Self { - Self::acquire_inner(true).expect("failed to acquire token") + impl Drop for Waiting { + fn drop(&mut self) { + STATE.lock().unwrap().waiting_count -= 1; + } + } + let mut state = STATE.lock().unwrap(); + if state.obtained_count == 0 && state.waiting_count == 0 { + state.obtained_count = 1; // get implicit token + return Ok(Self { client }); + } + state.waiting_count += 1; + drop(state); + let waiting = Waiting {}; + client.acquire_raw()?; + state = waiting.done(); + state.obtained_count = state + .obtained_count + .checked_add(1) + .ok_or_else(|| io::Error::new(io::ErrorKind::Other, "obtained_count overflowed"))?; + drop(state); + Ok(Self { client }) } pub fn run_command( &mut self, cmd: std::process::Command, f: impl FnOnce(&mut std::process::Command) -> std::io::Result, ) -> std::io::Result { - get_or_make_client().configure_make_and_run_with_fifo(cmd, f) + self.client.configure_make_and_run_with_fifo(cmd, f) } } impl Drop for AcquiredJob { fn drop(&mut self) { let mut state = STATE.lock().unwrap(); - match &self.job { - AcquiredJobInner::FromJobServer(_) => { - if state.waiting_count > state.available.len() + state.implicit_available as usize { - // allocate space before moving Acquired to ensure we - // drop Acquired outside of the lock on panic - state.available.reserve(1); - let AcquiredJobInner::FromJobServer(acquired) = - mem::replace(&mut self.job, AcquiredJobInner::ImplicitJob) - else { - unreachable!() - }; - state.available.push(acquired); - COND_VAR.notify_all(); + match &mut *state { + State { + obtained_count: 0, .. + } => unreachable!(), + State { + obtained_count: obtained_count @ 1, + waiting_count, + } => { + *obtained_count = 0; // drop implicit token + let any_waiting = *waiting_count != 0; + drop(state); + if any_waiting { + // we have the implicit token, but some other thread is trying to acquire a token, + // release the implicit token so they can acquire it. + let _ = self.client.release_raw(); // we're in drop, just ignore errors since we at least tried } } - AcquiredJobInner::ImplicitJob => { - state.implicit_available = true; - if state.waiting_count > state.available.len() { - COND_VAR.notify_all(); - } + State { obtained_count, .. } => { + *obtained_count = obtained_count.saturating_sub(1); + drop(state); + let _ = self.client.release_raw(); // we're in drop, just ignore errors since we at least tried } } } diff --git a/crates/fayalite/src/util/misc.rs b/crates/fayalite/src/util/misc.rs index cebbceb..6c9acee 100644 --- a/crates/fayalite/src/util/misc.rs +++ b/crates/fayalite/src/util/misc.rs @@ -5,6 +5,7 @@ use bitvec::{bits, order::Lsb0, slice::BitSlice, view::BitView}; use std::{ cell::Cell, fmt::{self, Debug, Write}, + io, ops::{Bound, Range, RangeBounds}, rc::Rc, sync::{Arc, OnceLock}, @@ -243,3 +244,323 @@ pub fn try_slice_range>(range: R, size: usize) -> Option>(range: R, size: usize) -> Range { try_slice_range(range, size).expect("range out of bounds") } + +pub trait SerdeJsonEscapeIfTest { + fn char_needs_escape(&mut self, ch: char) -> serde_json::Result; +} + +pub trait SerdeJsonEscapeIfTestResult { + fn to_result(self) -> serde_json::Result; +} + +impl SerdeJsonEscapeIfTestResult for bool { + fn to_result(self) -> serde_json::Result { + Ok(self) + } +} + +impl> SerdeJsonEscapeIfTestResult for Result { + fn to_result(self) -> serde_json::Result { + self.map_err(Into::into) + } +} + +impl R, R: SerdeJsonEscapeIfTestResult> SerdeJsonEscapeIfTest for T { + fn char_needs_escape(&mut self, ch: char) -> serde_json::Result { + self(ch).to_result() + } +} + +pub trait SerdeJsonEscapeIfFormatter: serde_json::ser::Formatter { + fn write_unicode_escape(&mut self, writer: &mut W, ch: char) -> io::Result<()> + where + W: ?Sized + io::Write, + { + for utf16 in ch.encode_utf16(&mut [0; 2]) { + write!(writer, "\\u{utf16:04x}")?; + } + Ok(()) + } +} + +impl SerdeJsonEscapeIfFormatter for serde_json::ser::CompactFormatter {} +impl SerdeJsonEscapeIfFormatter for serde_json::ser::PrettyFormatter<'_> {} + +pub struct SerdeJsonEscapeIf { + pub base: Base, + pub test: Test, +} + +impl serde_json::ser::Formatter + for SerdeJsonEscapeIf +{ + fn write_null(&mut self, writer: &mut W) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.write_null(writer) + } + + fn write_bool(&mut self, writer: &mut W, value: bool) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.write_bool(writer, value) + } + + fn write_i8(&mut self, writer: &mut W, value: i8) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.write_i8(writer, value) + } + + fn write_i16(&mut self, writer: &mut W, value: i16) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.write_i16(writer, value) + } + + fn write_i32(&mut self, writer: &mut W, value: i32) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.write_i32(writer, value) + } + + fn write_i64(&mut self, writer: &mut W, value: i64) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.write_i64(writer, value) + } + + fn write_i128(&mut self, writer: &mut W, value: i128) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.write_i128(writer, value) + } + + fn write_u8(&mut self, writer: &mut W, value: u8) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.write_u8(writer, value) + } + + fn write_u16(&mut self, writer: &mut W, value: u16) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.write_u16(writer, value) + } + + fn write_u32(&mut self, writer: &mut W, value: u32) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.write_u32(writer, value) + } + + fn write_u64(&mut self, writer: &mut W, value: u64) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.write_u64(writer, value) + } + + fn write_u128(&mut self, writer: &mut W, value: u128) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.write_u128(writer, value) + } + + fn write_f32(&mut self, writer: &mut W, value: f32) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.write_f32(writer, value) + } + + fn write_f64(&mut self, writer: &mut W, value: f64) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.write_f64(writer, value) + } + + fn write_number_str(&mut self, writer: &mut W, value: &str) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.write_number_str(writer, value) + } + + fn begin_string(&mut self, writer: &mut W) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.begin_string(writer) + } + + fn end_string(&mut self, writer: &mut W) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.end_string(writer) + } + + fn write_string_fragment(&mut self, writer: &mut W, mut fragment: &str) -> io::Result<()> + where + W: ?Sized + io::Write, + { + while let Some((next_escape_index, next_escape_char)) = fragment + .char_indices() + .find_map(|(index, ch)| match self.test.char_needs_escape(ch) { + Ok(false) => None, + Ok(true) => Some(Ok((index, ch))), + Err(e) => Some(Err(e)), + }) + .transpose()? + { + let (no_escapes, rest) = fragment.split_at(next_escape_index); + fragment = &rest[next_escape_char.len_utf8()..]; + self.base.write_string_fragment(writer, no_escapes)?; + self.base.write_unicode_escape(writer, next_escape_char)?; + } + self.base.write_string_fragment(writer, fragment) + } + + fn write_char_escape( + &mut self, + writer: &mut W, + char_escape: serde_json::ser::CharEscape, + ) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.write_char_escape(writer, char_escape) + } + + fn write_byte_array(&mut self, writer: &mut W, value: &[u8]) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.write_byte_array(writer, value) + } + + fn begin_array(&mut self, writer: &mut W) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.begin_array(writer) + } + + fn end_array(&mut self, writer: &mut W) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.end_array(writer) + } + + fn begin_array_value(&mut self, writer: &mut W, first: bool) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.begin_array_value(writer, first) + } + + fn end_array_value(&mut self, writer: &mut W) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.end_array_value(writer) + } + + fn begin_object(&mut self, writer: &mut W) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.begin_object(writer) + } + + fn end_object(&mut self, writer: &mut W) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.end_object(writer) + } + + fn begin_object_key(&mut self, writer: &mut W, first: bool) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.begin_object_key(writer, first) + } + + fn end_object_key(&mut self, writer: &mut W) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.end_object_key(writer) + } + + fn begin_object_value(&mut self, writer: &mut W) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.begin_object_value(writer) + } + + fn end_object_value(&mut self, writer: &mut W) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.end_object_value(writer) + } + + fn write_raw_fragment(&mut self, writer: &mut W, fragment: &str) -> io::Result<()> + where + W: ?Sized + io::Write, + { + self.base.write_raw_fragment(writer, fragment) + } +} + +fn serialize_to_json_ascii_helper( + v: &S, + base: F, +) -> serde_json::Result { + let mut retval = Vec::new(); + v.serialize(&mut serde_json::ser::Serializer::with_formatter( + &mut retval, + SerdeJsonEscapeIf { + base, + test: |ch| ch < '\x20' || ch > '\x7F', + }, + ))?; + String::from_utf8(retval).map_err(|_| serde::ser::Error::custom("invalid UTF-8")) +} + +pub fn serialize_to_json_ascii(v: &T) -> serde_json::Result { + serialize_to_json_ascii_helper(v, serde_json::ser::CompactFormatter) +} + +pub fn serialize_to_json_ascii_pretty( + v: &T, +) -> serde_json::Result { + serialize_to_json_ascii_helper(v, serde_json::ser::PrettyFormatter::new()) +} + +pub fn serialize_to_json_ascii_pretty_with_indent( + v: &T, + indent: &str, +) -> serde_json::Result { + serialize_to_json_ascii_helper( + v, + serde_json::ser::PrettyFormatter::with_indent(indent.as_bytes()), + ) +} diff --git a/crates/fayalite/src/util/ready_valid.rs b/crates/fayalite/src/util/ready_valid.rs index ac08a64..06dc873 100644 --- a/crates/fayalite/src/util/ready_valid.rs +++ b/crates/fayalite/src/util/ready_valid.rs @@ -212,7 +212,7 @@ pub fn queue( mod tests { use super::*; use crate::{ - cli::FormalMode, firrtl::ExportOptions, + build::formal::FormalMode, firrtl::ExportOptions, module::transform::simplify_enums::SimplifyEnumsKind, testing::assert_formal, ty::StaticType, }; diff --git a/crates/fayalite/tests/formal.rs b/crates/fayalite/tests/formal.rs index 65264dc..e7d677d 100644 --- a/crates/fayalite/tests/formal.rs +++ b/crates/fayalite/tests/formal.rs @@ -3,7 +3,7 @@ //! Formal tests in Fayalite use fayalite::{ - cli::FormalMode, + build::formal::FormalMode, clock::{Clock, ClockDomain}, expr::{CastTo, HdlPartialEq}, firrtl::ExportOptions, diff --git a/crates/fayalite/visit_types.json b/crates/fayalite/visit_types.json index effbd82..f47ca58 100644 --- a/crates/fayalite/visit_types.json +++ b/crates/fayalite/visit_types.json @@ -1176,7 +1176,9 @@ "BlackBoxInline": "Visible", "BlackBoxPath": "Visible", "DocString": "Visible", - "CustomFirrtl": "Visible" + "CustomFirrtl": "Visible", + "XdcLocation": "Visible", + "XdcIOStandard": "Visible" } }, "DontTouchAnnotation": { @@ -1214,6 +1216,16 @@ "$kind": "Opaque" } }, + "XdcLocationAnnotation": { + "data": { + "$kind": "Opaque" + } + }, + "XdcIOStandardAnnotation": { + "data": { + "$kind": "Opaque" + } + }, "Target": { "data": { "$kind": "Enum",