switch to using new crate::build system

This commit is contained in:
Jacob Lifshay 2025-09-28 23:05:24 -07:00
parent 90aed1615e
commit 01ba321014
Signed by: programmerjake
SSH key fingerprint: SHA256:HnFTLGpSm4Q4Fj502oCFisjZSoakwEuTsJJMSke63RQ
24 changed files with 5202 additions and 3043 deletions

87
Cargo.lock generated
View file

@ -25,9 +25,9 @@ dependencies = [
[[package]]
name = "anstyle"
version = "1.0.7"
version = "1.0.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b"
checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78"
[[package]]
name = "anstyle-parse"
@ -155,9 +155,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "clap"
version = "4.5.9"
version = "4.5.48"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64acc1846d54c1fe936a78dc189c34e28d3f5afc348403f28ecf53660b9b8462"
checksum = "e2134bb3ea021b78629caa971416385309e0131b351b25e01dc16fb54e1b5fae"
dependencies = [
"clap_builder",
"clap_derive",
@ -165,9 +165,9 @@ dependencies = [
[[package]]
name = "clap_builder"
version = "4.5.9"
version = "4.5.48"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fb8393d67ba2e7bfaf28a23458e4e2b543cc73a99595511eb207fdb8aede942"
checksum = "c2ba64afa3c0a6df7fa517765e31314e983f51dda798ffba27b988194fb65dc9"
dependencies = [
"anstream",
"anstyle",
@ -176,10 +176,19 @@ dependencies = [
]
[[package]]
name = "clap_derive"
version = "4.5.8"
name = "clap_complete"
version = "4.5.58"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2bac35c6dafb060fd4d275d9a4ffae97917c13a6327903a8be2153cd964f7085"
checksum = "75bf0b32ad2e152de789bb635ea4d3078f6b838ad7974143e99b99f45a04af4a"
dependencies = [
"clap",
]
[[package]]
name = "clap_derive"
version = "4.5.47"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbfd7eae0b0f1a6e63d4b13c9c478de77c2eb546fba158ad50b4203dc24b9f9c"
dependencies = [
"heck",
"proc-macro2",
@ -189,9 +198,9 @@ dependencies = [
[[package]]
name = "clap_lex"
version = "0.7.1"
version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b82cf0babdbd58558212896d1a4272303a57bdb245c2bf1147185fb45640e70"
checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675"
[[package]]
name = "colorchoice"
@ -301,6 +310,7 @@ dependencies = [
"bitvec",
"blake3",
"clap",
"clap_complete",
"ctor",
"eyre",
"fayalite-proc-macros",
@ -383,12 +393,13 @@ dependencies = [
[[package]]
name = "getrandom"
version = "0.2.14"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c"
checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4"
dependencies = [
"cfg-if",
"libc",
"r-efi",
"wasi",
]
@ -455,23 +466,23 @@ checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c"
[[package]]
name = "jobslot"
version = "0.2.19"
version = "0.2.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe10868679d7a24c2c67d862d0e64a342ce9aef7cdde9ce8019bd35d353d458d"
checksum = "58715c67c327da7f1558708348d68c207fd54900c4ae0529e29305d04d795b8c"
dependencies = [
"cfg-if",
"derive_destructure2",
"getrandom",
"libc",
"scopeguard",
"windows-sys 0.59.0",
"windows-sys 0.61.2",
]
[[package]]
name = "libc"
version = "0.2.153"
version = "0.2.176"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
checksum = "58f929b4d672ea937a23a1ab494143d968337a5f47e56d0815df1e0890ddf174"
[[package]]
name = "linux-raw-sys"
@ -553,6 +564,12 @@ dependencies = [
"proc-macro2",
]
[[package]]
name = "r-efi"
version = "5.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
[[package]]
name = "radium"
version = "0.7.0"
@ -744,9 +761,21 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "wasi"
version = "0.11.0+wasi-snapshot-preview1"
version = "0.14.7+wasi-0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
checksum = "883478de20367e224c0090af9cf5f9fa85bed63a95c1abf3afc5c083ebc06e8c"
dependencies = [
"wasip2",
]
[[package]]
name = "wasip2"
version = "1.0.1+wasi-0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7"
dependencies = [
"wit-bindgen",
]
[[package]]
name = "which"
@ -791,6 +820,12 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows-link"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
[[package]]
name = "windows-sys"
version = "0.52.0"
@ -802,11 +837,11 @@ dependencies = [
[[package]]
name = "windows-sys"
version = "0.59.0"
version = "0.61.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc"
dependencies = [
"windows-targets",
"windows-link",
]
[[package]]
@ -879,6 +914,12 @@ version = "0.0.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904"
[[package]]
name = "wit-bindgen"
version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59"
[[package]]
name = "wyz"
version = "0.5.1"

View file

@ -22,11 +22,12 @@ base64 = "0.22.1"
bitvec = { version = "1.0.1", features = ["serde"] }
blake3 = { version = "1.5.4", features = ["serde"] }
clap = { version = "4.5.9", features = ["derive", "env", "string"] }
clap_complete = "4.5.58"
ctor = "0.2.8"
eyre = "0.6.12"
hashbrown = "0.15.2"
indexmap = { version = "2.5.0", features = ["serde"] }
jobslot = "0.2.19"
jobslot = "0.2.23"
num-bigint = "0.4.6"
num-traits = "0.2.16"
petgraph = "0.8.1"

View file

@ -18,6 +18,7 @@ base64.workspace = true
bitvec.workspace = true
blake3.workspace = true
clap.workspace = true
clap_complete.workspace = true
ctor.workspace = true
eyre.workspace = true
fayalite-proc-macros.workspace = true

View file

@ -1,7 +1,9 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use clap::Parser;
use fayalite::{cli, prelude::*};
use fayalite::{
build::{ToArgs, WriteArgs},
prelude::*,
};
#[hdl_module]
fn blinky(clock_frequency: u64) {
@ -32,16 +34,22 @@ fn blinky(clock_frequency: u64) {
connect(led, output_reg);
}
#[derive(Parser)]
struct Cli {
#[derive(clap::Args, Clone, PartialEq, Eq, Hash, Debug)]
struct ExtraArgs {
/// clock frequency in hertz
#[arg(long, default_value = "1000000", value_parser = clap::value_parser!(u64).range(2..))]
clock_frequency: u64,
#[command(subcommand)]
cli: cli::Cli,
}
fn main() -> cli::Result {
let cli = Cli::parse();
cli.cli.run(blinky(cli.clock_frequency))
impl ToArgs for ExtraArgs {
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
let Self { clock_frequency } = self;
args.write_arg(format_args!("--clock-frequency={clock_frequency}"));
}
}
fn main() {
BuildCli::main(|_cli, ExtraArgs { clock_frequency }| {
Ok(JobParams::new(blinky(clock_frequency), "blinky"))
});
}

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -2,81 +2,119 @@
// See Notices.txt for copyright information
use crate::{
build::{BaseArgs, DynJob, InternalJobTrait, JobItem, JobItemName},
build::{
BaseJob, BaseJobKind, CommandParams, JobAndDependencies, JobArgsAndDependencies, JobItem,
JobItemName, JobKind, JobKindAndDependencies, JobParams, ToArgs, WriteArgs,
},
firrtl::{ExportOptions, FileBackend},
intern::{Intern, Interned},
util::job_server::AcquiredJob,
};
use clap::Parser;
use std::{borrow::Cow, collections::BTreeMap};
use clap::Args;
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
#[derive(Parser, Debug, Clone, Hash, PartialEq, Eq)]
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Debug)]
pub struct FirrtlJobKind;
#[derive(Args, Debug, Clone, Hash, PartialEq, Eq)]
#[group(id = "Firrtl")]
#[non_exhaustive]
pub struct FirrtlArgs {
#[command(flatten)]
pub base: BaseArgs,
#[command(flatten)]
pub export_options: ExportOptions,
}
impl FirrtlArgs {
impl ToArgs for FirrtlArgs {
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
let Self { export_options } = self;
export_options.to_args(args);
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct Firrtl {
base: BaseJob,
export_options: ExportOptions,
}
impl Firrtl {
fn make_firrtl_file_backend(&self) -> FileBackend {
FileBackend {
dir_path: self.base.output.path().into(),
top_fir_file_stem: self.base.file_stem.clone(),
dir_path: PathBuf::from(&*self.base.output_dir()),
top_fir_file_stem: Some(String::from(&*self.base.file_stem())),
circuit_name: None,
}
}
pub fn firrtl_file(&self) -> String {
pub fn firrtl_file(&self) -> Interned<str> {
self.base.file_with_ext("fir")
}
}
impl InternalJobTrait for FirrtlArgs {
fn subcommand_name() -> Interned<str> {
"firrtl".intern()
impl JobKind for FirrtlJobKind {
type Args = FirrtlArgs;
type Job = Firrtl;
type Dependencies = JobKindAndDependencies<BaseJobKind>;
fn dependencies(self) -> Self::Dependencies {
JobKindAndDependencies::new(BaseJobKind)
}
fn to_args(&self) -> Vec<Interned<str>> {
let Self {
base,
export_options,
} = self;
let mut retval = base.to_args();
retval.extend(export_options.to_args());
retval
}
fn inputs_and_direct_dependencies<'a>(
&'a self,
) -> Cow<'a, BTreeMap<JobItemName, Option<DynJob>>> {
Cow::Owned(BTreeMap::from_iter([(
JobItemName::Module {
name: str::intern(&self.base.module_name),
fn args_to_jobs(
args: JobArgsAndDependencies<Self>,
params: &JobParams,
) -> eyre::Result<JobAndDependencies<Self>> {
args.args_to_jobs_simple(
params,
|_kind, FirrtlArgs { export_options }, dependencies| {
Ok(Firrtl {
base: dependencies.job.job.clone(),
export_options,
})
},
None,
)]))
)
}
fn outputs(&self) -> Interned<[JobItemName]> {
[JobItemName::File {
path: str::intern_owned(self.firrtl_file()),
fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
[JobItemName::Path {
path: job.base.output_dir(),
}][..]
.intern()
}
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
[JobItemName::Path {
path: job.firrtl_file(),
}][..]
.intern()
}
fn name(self) -> Interned<str> {
"firrtl".intern()
}
fn external_command_params(self, _job: &Self::Job) -> Option<CommandParams> {
None
}
fn run(
&self,
self,
job: &Self::Job,
inputs: &[JobItem],
params: &JobParams,
_acquired_job: &mut AcquiredJob,
) -> eyre::Result<Vec<JobItem>> {
let [JobItem::Module { value: module }] = inputs else {
panic!("wrong inputs, expected a single `Module`");
let [JobItem::Path { path: input_path }] = *inputs else {
panic!("wrong inputs, expected a single `Path`");
};
assert_eq!(*module.name(), *self.base.module_name);
crate::firrtl::export(self.make_firrtl_file_backend(), module, self.export_options)?;
Ok(vec![JobItem::File {
path: str::intern_owned(self.firrtl_file()),
assert_eq!(input_path, job.base.output_dir());
crate::firrtl::export(
job.make_firrtl_file_backend(),
params.main_module(),
job.export_options,
)?;
Ok(vec![JobItem::Path {
path: job.firrtl_file(),
}])
}
}

View file

@ -0,0 +1,419 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
build::{
CommandParams, GetBaseJob, JobAndDependencies, JobArgsAndDependencies, JobDependencies,
JobItem, JobItemName, JobKind, JobKindAndDependencies, JobParams, ToArgs, WriteArgs,
external::{ExternalCommand, ExternalCommandJob, ExternalCommandJobKind},
interned_known_utf8_method,
verilog::{VerilogDialect, VerilogJobKind},
},
intern::{Intern, Interned},
module::NameId,
util::job_server::AcquiredJob,
};
use clap::{Args, ValueEnum};
use eyre::{Context, eyre};
use serde::{Deserialize, Serialize};
use std::fmt;
#[derive(ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash, Default, Deserialize, Serialize)]
#[non_exhaustive]
pub enum FormalMode {
#[default]
BMC,
Prove,
Live,
Cover,
}
impl FormalMode {
pub fn as_str(self) -> &'static str {
match self {
FormalMode::BMC => "bmc",
FormalMode::Prove => "prove",
FormalMode::Live => "live",
FormalMode::Cover => "cover",
}
}
}
impl fmt::Display for FormalMode {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.as_str())
}
}
#[derive(Args, Clone, Debug, PartialEq, Eq, Hash)]
#[non_exhaustive]
pub struct FormalArgs {
#[arg(long = "sby-extra-arg", value_name = "ARG")]
pub sby_extra_args: Vec<String>,
#[arg(long, default_value_t)]
pub formal_mode: FormalMode,
#[arg(long, default_value_t = Self::DEFAULT_DEPTH)]
pub formal_depth: u64,
#[arg(long, default_value = Self::DEFAULT_SOLVER)]
pub formal_solver: String,
#[arg(long = "smtbmc-extra-arg", value_name = "ARG")]
pub smtbmc_extra_args: Vec<String>,
}
impl FormalArgs {
pub const DEFAULT_DEPTH: u64 = 20;
pub const DEFAULT_SOLVER: &'static str = "z3";
}
impl ToArgs for FormalArgs {
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
let Self {
sby_extra_args,
formal_mode,
formal_depth,
formal_solver,
smtbmc_extra_args,
} = self;
args.extend(
sby_extra_args
.iter()
.map(|v| format!("--sby-extra-arg={v}")),
);
args.extend([
format_args!("--formal-mode={formal_mode}"),
format_args!("--formal-depth={formal_depth}"),
format_args!("--formal-solver={formal_solver}"),
]);
args.extend(
smtbmc_extra_args
.iter()
.map(|v| format!("--smtbmc-extra-arg={v}")),
);
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
pub struct WriteSbyFileJobKind;
#[derive(Clone, Debug, PartialEq, Eq, Hash, Deserialize, Serialize)]
pub struct WriteSbyFileJob {
sby_extra_args: Interned<[Interned<str>]>,
formal_mode: FormalMode,
formal_depth: u64,
formal_solver: Interned<str>,
smtbmc_extra_args: Interned<[Interned<str>]>,
sby_file: Interned<str>,
output_dir: Interned<str>,
main_verilog_file: Interned<str>,
}
impl WriteSbyFileJob {
pub fn sby_extra_args(&self) -> Interned<[Interned<str>]> {
self.sby_extra_args
}
pub fn formal_mode(&self) -> FormalMode {
self.formal_mode
}
pub fn formal_depth(&self) -> u64 {
self.formal_depth
}
pub fn formal_solver(&self) -> Interned<str> {
self.formal_solver
}
pub fn smtbmc_extra_args(&self) -> Interned<[Interned<str>]> {
self.smtbmc_extra_args
}
pub fn sby_file(&self) -> Interned<str> {
self.sby_file
}
pub fn output_dir(&self) -> Interned<str> {
self.output_dir
}
pub fn main_verilog_file(&self) -> Interned<str> {
self.main_verilog_file
}
fn write_sby<W: ?Sized + fmt::Write>(
&self,
output: &mut W,
additional_files: &[Interned<str>],
main_module_name_id: NameId,
) -> Result<eyre::Result<()>, fmt::Error> {
let Self {
sby_extra_args: _,
formal_mode,
formal_depth,
formal_solver,
smtbmc_extra_args,
sby_file: _,
output_dir: _,
main_verilog_file,
} = self;
write!(
output,
"[options]\n\
mode {formal_mode}\n\
depth {formal_depth}\n\
wait on\n\
\n\
[engines]\n\
smtbmc {formal_solver} -- --"
)?;
for i in smtbmc_extra_args {
output.write_str(" ")?;
output.write_str(i)?;
}
output.write_str(
"\n\
\n\
[script]\n",
)?;
for verilog_file in [main_verilog_file].into_iter().chain(additional_files) {
if !(verilog_file.ends_with(".v") || verilog_file.ends_with(".sv")) {
continue;
}
let verilog_file = match std::path::absolute(verilog_file)
.and_then(|v| {
v.into_os_string().into_string().map_err(|_| {
std::io::Error::new(std::io::ErrorKind::Other, "path is not valid UTF-8")
})
})
.wrap_err_with(|| format!("converting {verilog_file:?} to an absolute path failed"))
{
Ok(v) => v,
Err(e) => return Ok(Err(e)),
};
if verilog_file.contains(|ch: char| {
(ch != ' ' && ch != '\t' && ch.is_ascii_whitespace()) || ch == '"'
}) {
return Ok(Err(eyre!(
"verilog file path contains characters that aren't permitted"
)));
}
writeln!(output, "read_verilog -sv -formal \"{verilog_file}\"")?;
}
let circuit_name = crate::firrtl::get_circuit_name(main_module_name_id);
// workaround for wires disappearing -- set `keep` on all wires
writeln!(
output,
"hierarchy -top {circuit_name}\n\
proc\n\
setattr -set keep 1 w:\\*\n\
prep",
)?;
Ok(Ok(()))
}
}
impl JobKind for WriteSbyFileJobKind {
type Args = FormalArgs;
type Job = WriteSbyFileJob;
type Dependencies = JobKindAndDependencies<VerilogJobKind>;
fn dependencies(self) -> Self::Dependencies {
Default::default()
}
fn args_to_jobs(
mut args: JobArgsAndDependencies<Self>,
params: &JobParams,
) -> eyre::Result<JobAndDependencies<Self>> {
args.dependencies
.dependencies
.args
.args
.additional_args
.verilog_dialect
.get_or_insert(VerilogDialect::Yosys);
args.args_to_jobs_simple(params, |_kind, args, dependencies| {
let FormalArgs {
sby_extra_args,
formal_mode,
formal_depth,
formal_solver,
smtbmc_extra_args,
} = args;
Ok(WriteSbyFileJob {
sby_extra_args: sby_extra_args.into_iter().map(str::intern_owned).collect(),
formal_mode,
formal_depth,
formal_solver: str::intern_owned(formal_solver),
smtbmc_extra_args: smtbmc_extra_args
.into_iter()
.map(str::intern_owned)
.collect(),
sby_file: dependencies.base_job().file_with_ext("sby"),
output_dir: dependencies.base_job().output_dir(),
main_verilog_file: dependencies.job.job.main_verilog_file(),
})
})
}
fn inputs(self, _job: &Self::Job) -> Interned<[JobItemName]> {
[JobItemName::DynamicPaths {
source_job_name: VerilogJobKind.name(),
}][..]
.intern()
}
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
[JobItemName::Path { path: job.sby_file }][..].intern()
}
fn name(self) -> Interned<str> {
"write-sby-file".intern()
}
fn external_command_params(self, _job: &Self::Job) -> Option<CommandParams> {
None
}
fn run(
self,
job: &Self::Job,
inputs: &[JobItem],
params: &JobParams,
_acquired_job: &mut AcquiredJob,
) -> eyre::Result<Vec<JobItem>> {
assert!(inputs.iter().map(JobItem::name).eq(self.inputs(job)));
let [
JobItem::DynamicPaths {
paths: additional_files,
..
},
] = inputs
else {
unreachable!();
};
let mut contents = String::new();
match job.write_sby(
&mut contents,
additional_files,
params.main_module().name_id(),
) {
Ok(result) => result?,
Err(fmt::Error) => unreachable!("writing to String can't fail"),
}
std::fs::write(job.sby_file, contents)
.wrap_err_with(|| format!("writing {} failed", job.sby_file))?;
Ok(vec![JobItem::Path { path: job.sby_file }])
}
fn subcommand_hidden(self) -> bool {
true
}
}
#[derive(Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
pub struct Formal {
#[serde(flatten)]
write_sby_file: WriteSbyFileJob,
sby_file_name: Interned<str>,
}
impl fmt::Debug for Formal {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Self {
write_sby_file:
WriteSbyFileJob {
sby_extra_args,
formal_mode,
formal_depth,
formal_solver,
smtbmc_extra_args,
sby_file,
output_dir: _,
main_verilog_file,
},
sby_file_name,
} = self;
f.debug_struct("Formal")
.field("sby_extra_args", sby_extra_args)
.field("formal_mode", formal_mode)
.field("formal_depth", formal_depth)
.field("formal_solver", formal_solver)
.field("smtbmc_extra_args", smtbmc_extra_args)
.field("sby_file", sby_file)
.field("sby_file_name", sby_file_name)
.field("main_verilog_file", main_verilog_file)
.finish_non_exhaustive()
}
}
#[derive(Clone, Hash, PartialEq, Eq, Debug, Args)]
pub struct FormalAdditionalArgs {}
impl ToArgs for FormalAdditionalArgs {
fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) {
let Self {} = self;
}
}
impl ExternalCommand for Formal {
type AdditionalArgs = FormalAdditionalArgs;
type AdditionalJobData = Formal;
type Dependencies = JobKindAndDependencies<WriteSbyFileJobKind>;
fn dependencies() -> Self::Dependencies {
Default::default()
}
fn args_to_jobs(
args: JobArgsAndDependencies<ExternalCommandJobKind<Self>>,
params: &JobParams,
) -> eyre::Result<(
Self::AdditionalJobData,
<Self::Dependencies as JobDependencies>::JobsAndKinds,
)> {
args.args_to_jobs_external_simple(params, |args, dependencies| {
let FormalAdditionalArgs {} = args.additional_args;
Ok(Formal {
write_sby_file: dependencies.job.job.clone(),
sby_file_name: interned_known_utf8_method(dependencies.job.job.sby_file(), |v| {
v.file_name().expect("known to have file name")
}),
})
})
}
fn inputs(job: &ExternalCommandJob<Self>) -> Interned<[JobItemName]> {
[
JobItemName::Path {
path: job.additional_job_data().write_sby_file.sby_file(),
},
JobItemName::Path {
path: job.additional_job_data().write_sby_file.main_verilog_file(),
},
JobItemName::DynamicPaths {
source_job_name: VerilogJobKind.name(),
},
][..]
.intern()
}
fn output_paths(_job: &ExternalCommandJob<Self>) -> Interned<[Interned<str>]> {
Interned::default()
}
fn command_line_args(job: &ExternalCommandJob<Self>) -> Interned<[Interned<str>]> {
[
// "-j1".intern(), // sby seems not to respect job count in parallel mode
"-f".intern(),
job.additional_job_data().sby_file_name,
]
.into_iter()
.chain(job.additional_job_data().write_sby_file.sby_extra_args())
.collect()
}
fn current_dir(job: &ExternalCommandJob<Self>) -> Option<Interned<str>> {
Some(job.output_dir())
}
fn job_kind_name() -> Interned<str> {
"formal".intern()
}
fn default_program_name() -> Interned<str> {
"sby".intern()
}
}

View file

@ -0,0 +1,801 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
build::{DynJob, JobItem, JobItemName, JobParams, program_name_for_internal_jobs},
intern::Interned,
util::{HashMap, HashSet, job_server::AcquiredJob},
};
use eyre::{ContextCompat, eyre};
use petgraph::{
algo::{DfsSpace, kosaraju_scc, toposort},
graph::DiGraph,
visit::{GraphBase, Visitable},
};
use serde::{Deserialize, Deserializer, Serialize, Serializer, de::Error, ser::SerializeSeq};
use std::{
cell::OnceCell,
collections::{BTreeMap, BTreeSet, VecDeque},
fmt::{self, Write},
panic,
rc::Rc,
sync::mpsc,
thread::{self, ScopedJoinHandle},
};
macro_rules! write_str {
($s:expr, $($rest:tt)*) => {
write!($s, $($rest)*).expect("String::write_fmt can't fail")
};
}
#[derive(Clone, Debug)]
enum JobGraphNode {
Job(DynJob),
Item {
#[allow(dead_code, reason = "name used for debugging")]
name: JobItemName,
source_job: Option<DynJob>,
},
}
type JobGraphInner = DiGraph<JobGraphNode, ()>;
#[derive(Clone, Default)]
pub struct JobGraph {
jobs: HashMap<DynJob, <JobGraphInner as GraphBase>::NodeId>,
items: HashMap<JobItemName, <JobGraphInner as GraphBase>::NodeId>,
graph: JobGraphInner,
topological_order: Vec<<JobGraphInner as GraphBase>::NodeId>,
space: DfsSpace<<JobGraphInner as GraphBase>::NodeId, <JobGraphInner as Visitable>::Map>,
}
impl fmt::Debug for JobGraph {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Self {
jobs: _,
items: _,
graph,
topological_order,
space: _,
} = self;
f.debug_struct("JobGraph")
.field("graph", graph)
.field("topological_order", topological_order)
.finish_non_exhaustive()
}
}
#[derive(Clone, Debug)]
pub enum JobGraphError {
CycleError {
job: DynJob,
output: JobItemName,
},
MultipleJobsCreateSameOutput {
output_item: JobItemName,
existing_job: DynJob,
new_job: DynJob,
},
}
impl std::error::Error for JobGraphError {}
impl fmt::Display for JobGraphError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::CycleError { job, output } => write!(
f,
"job can't be added to job graph because it would introduce a cyclic dependency through this job output:\n\
{output:?}\n\
job:\n{job:?}",
),
JobGraphError::MultipleJobsCreateSameOutput {
output_item,
existing_job,
new_job,
} => write!(
f,
"job can't be added to job graph because the new job has an output that is also produced by an existing job.\n\
conflicting output:\n\
{output_item:?}\n\
existing job:\n\
{existing_job:?}\n\
new job:\n\
{new_job:?}",
),
}
}
}
#[derive(Copy, Clone, Debug)]
enum EscapeForUnixShellState {
DollarSingleQuote,
SingleQuote,
Unquoted,
}
#[derive(Clone)]
pub struct EscapeForUnixShell<'a> {
state: EscapeForUnixShellState,
prefix: [u8; 3],
bytes: &'a [u8],
}
impl<'a> fmt::Debug for EscapeForUnixShell<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(self, f)
}
}
impl<'a> fmt::Display for EscapeForUnixShell<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
for c in self.clone() {
f.write_char(c)?;
}
Ok(())
}
}
impl<'a> EscapeForUnixShell<'a> {
pub fn new(s: &'a str) -> Self {
Self::from_bytes(s.as_bytes())
}
fn make_prefix(bytes: &[u8]) -> [u8; 3] {
let mut prefix = [0; 3];
prefix[..bytes.len()].copy_from_slice(bytes);
prefix
}
pub fn from_bytes(bytes: &'a [u8]) -> Self {
let mut needs_single_quote = bytes.is_empty();
for &b in bytes {
match b {
b'!' | b'\'' | b'\"' | b' ' => needs_single_quote = true,
0..0x20 | 0x7F.. => {
return Self {
state: EscapeForUnixShellState::DollarSingleQuote,
prefix: Self::make_prefix(b"$'"),
bytes,
};
}
_ => {}
}
}
if needs_single_quote {
Self {
state: EscapeForUnixShellState::SingleQuote,
prefix: Self::make_prefix(b"'"),
bytes,
}
} else {
Self {
state: EscapeForUnixShellState::Unquoted,
prefix: Self::make_prefix(b""),
bytes,
}
}
}
}
impl Iterator for EscapeForUnixShell<'_> {
type Item = char;
fn next(&mut self) -> Option<Self::Item> {
match &mut self.prefix {
[0, 0, 0] => {}
[0, 0, v] | // find first
[0, v, _] | // non-zero byte
[v, _, _] => {
let retval = *v as char;
*v = 0;
return Some(retval);
}
}
let Some(&next_byte) = self.bytes.split_off_first() else {
return match self.state {
EscapeForUnixShellState::DollarSingleQuote
| EscapeForUnixShellState::SingleQuote => {
self.state = EscapeForUnixShellState::Unquoted;
Some('\'')
}
EscapeForUnixShellState::Unquoted => None,
};
};
match self.state {
EscapeForUnixShellState::DollarSingleQuote => match next_byte {
b'\'' | b'\\' => {
self.prefix = Self::make_prefix(&[next_byte]);
Some('\\')
}
b'\t' => {
self.prefix = Self::make_prefix(b"t");
Some('\\')
}
b'\n' => {
self.prefix = Self::make_prefix(b"n");
Some('\\')
}
b'\r' => {
self.prefix = Self::make_prefix(b"r");
Some('\\')
}
0x20..=0x7E => Some(next_byte as char),
_ => {
self.prefix = [
b'x',
char::from_digit(next_byte as u32 >> 4, 0x10).expect("known to be in range")
as u8,
char::from_digit(next_byte as u32 & 0xF, 0x10)
.expect("known to be in range") as u8,
];
Some('\\')
}
},
EscapeForUnixShellState::SingleQuote => {
if next_byte == b'\'' {
self.prefix = Self::make_prefix(b"\\''");
Some('\'')
} else {
Some(next_byte as char)
}
}
EscapeForUnixShellState::Unquoted => match next_byte {
b' ' | b'!' | b'"' | b'#' | b'$' | b'&' | b'\'' | b'(' | b')' | b'*' | b','
| b';' | b'<' | b'>' | b'?' | b'[' | b'\\' | b']' | b'^' | b'`' | b'{' | b'|'
| b'}' | b'~' => {
self.prefix = Self::make_prefix(&[next_byte]);
Some('\\')
}
_ => Some(next_byte as char),
},
}
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
#[non_exhaustive]
pub enum UnixMakefileEscapeKind {
NonRecipe,
RecipeWithoutShellEscaping,
RecipeWithShellEscaping,
}
#[derive(Copy, Clone)]
pub struct EscapeForUnixMakefile<'a> {
s: &'a str,
kind: UnixMakefileEscapeKind,
}
impl<'a> fmt::Debug for EscapeForUnixMakefile<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(self, f)
}
}
impl<'a> fmt::Display for EscapeForUnixMakefile<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.do_write(f, fmt::Write::write_str, fmt::Write::write_char, |_, _| {
Ok(())
})
}
}
impl<'a> EscapeForUnixMakefile<'a> {
fn do_write<S: ?Sized, E>(
&self,
state: &mut S,
write_str: impl Fn(&mut S, &str) -> Result<(), E>,
write_char: impl Fn(&mut S, char) -> Result<(), E>,
add_variable: impl Fn(&mut S, &'static str) -> Result<(), E>,
) -> Result<(), E> {
let escape_recipe_char = |c| match c {
'$' => write_str(state, "$$"),
'\0'..='\x1F' | '\x7F' => {
panic!("can't escape a control character for Unix Makefile: {c:?}");
}
_ => write_char(state, c),
};
match self.kind {
UnixMakefileEscapeKind::NonRecipe => self.s.chars().try_for_each(|c| match c {
'=' => {
add_variable(state, "EQUALS = =")?;
write_str(state, "$(EQUALS)")
}
';' => panic!("can't escape a semicolon (;) for Unix Makefile"),
'$' => write_str(state, "$$"),
'\\' | ' ' | '#' | ':' | '%' | '*' | '?' | '[' | ']' | '~' => {
write_char(state, '\\')?;
write_char(state, c)
}
'\0'..='\x1F' | '\x7F' => {
panic!("can't escape a control character for Unix Makefile: {c:?}");
}
_ => write_char(state, c),
}),
UnixMakefileEscapeKind::RecipeWithoutShellEscaping => {
self.s.chars().try_for_each(escape_recipe_char)
}
UnixMakefileEscapeKind::RecipeWithShellEscaping => {
EscapeForUnixShell::new(self.s).try_for_each(escape_recipe_char)
}
}
}
pub fn new(
s: &'a str,
kind: UnixMakefileEscapeKind,
needed_variables: &mut BTreeSet<&'static str>,
) -> Self {
let retval = Self { s, kind };
let Ok(()) = retval.do_write(
needed_variables,
|_, _| Ok(()),
|_, _| Ok(()),
|needed_variables, variable| -> Result<(), std::convert::Infallible> {
needed_variables.insert(variable);
Ok(())
},
);
retval
}
}
impl JobGraph {
pub fn new() -> Self {
Self::default()
}
fn try_add_item_node(
&mut self,
name: JobItemName,
new_source_job: Option<DynJob>,
new_nodes: &mut HashSet<<JobGraphInner as GraphBase>::NodeId>,
) -> Result<<JobGraphInner as GraphBase>::NodeId, JobGraphError> {
use hashbrown::hash_map::Entry;
match self.items.entry(name) {
Entry::Occupied(item_entry) => {
let node_id = *item_entry.get();
let JobGraphNode::Item {
name: _,
source_job,
} = &mut self.graph[node_id]
else {
unreachable!("known to be an item");
};
if let Some(new_source_job) = new_source_job {
if let Some(source_job) = source_job {
return Err(JobGraphError::MultipleJobsCreateSameOutput {
output_item: item_entry.key().clone(),
existing_job: source_job.clone(),
new_job: new_source_job,
});
} else {
*source_job = Some(new_source_job);
}
}
Ok(node_id)
}
Entry::Vacant(item_entry) => {
let node_id = self.graph.add_node(JobGraphNode::Item {
name,
source_job: new_source_job,
});
new_nodes.insert(node_id);
item_entry.insert(node_id);
Ok(node_id)
}
}
}
pub fn try_add_jobs<I: IntoIterator<Item = DynJob>>(
&mut self,
jobs: I,
) -> Result<(), JobGraphError> {
use hashbrown::hash_map::Entry;
let jobs = jobs.into_iter();
struct RemoveNewNodesOnError<'a> {
this: &'a mut JobGraph,
new_nodes: HashSet<<JobGraphInner as GraphBase>::NodeId>,
}
impl Drop for RemoveNewNodesOnError<'_> {
fn drop(&mut self) {
for node in self.new_nodes.drain() {
self.this.graph.remove_node(node);
}
}
}
let mut remove_new_nodes_on_error = RemoveNewNodesOnError {
this: self,
new_nodes: HashSet::with_capacity_and_hasher(jobs.size_hint().0, Default::default()),
};
let new_nodes = &mut remove_new_nodes_on_error.new_nodes;
let this = &mut *remove_new_nodes_on_error.this;
for job in jobs {
let Entry::Vacant(job_entry) = this.jobs.entry(job.clone()) else {
continue;
};
let job_node_id = this
.graph
.add_node(JobGraphNode::Job(job_entry.key().clone()));
new_nodes.insert(job_node_id);
job_entry.insert(job_node_id);
for name in job.outputs() {
let item_node_id = this.try_add_item_node(name, Some(job.clone()), new_nodes)?;
this.graph.add_edge(job_node_id, item_node_id, ());
}
for name in job.inputs() {
let item_node_id = this.try_add_item_node(name, None, new_nodes)?;
this.graph.add_edge(item_node_id, job_node_id, ());
}
}
match toposort(&this.graph, Some(&mut this.space)) {
Ok(v) => {
this.topological_order = v;
// no need to remove any of the new nodes on drop since we didn't encounter any errors
remove_new_nodes_on_error.new_nodes.clear();
Ok(())
}
Err(_) => {
// there's at least one cycle, find one!
let cycle = kosaraju_scc(&this.graph)
.into_iter()
.find_map(|scc| {
if scc.len() <= 1 {
// can't be a cycle since our graph is bipartite --
// jobs only connect to items, never jobs to jobs or items to items
None
} else {
Some(scc)
}
})
.expect("we know there's a cycle");
let cycle_set = HashSet::from_iter(cycle.iter().copied());
let job = cycle
.into_iter()
.find_map(|node_id| {
if let JobGraphNode::Job(job) = &this.graph[node_id] {
Some(job.clone())
} else {
None
}
})
.expect("a job must be part of the cycle");
let output = job
.outputs()
.into_iter()
.find(|output| cycle_set.contains(&this.items[output]))
.expect("an output must be part of the cycle");
Err(JobGraphError::CycleError { job, output })
}
}
}
#[track_caller]
pub fn add_jobs<I: IntoIterator<Item = DynJob>>(&mut self, jobs: I) {
match self.try_add_jobs(jobs) {
Ok(()) => {}
Err(e) => panic!("error: {e}"),
}
}
pub fn to_unix_makefile(&self, extra_args: &[Interned<str>]) -> String {
self.to_unix_makefile_with_internal_program_prefix(
&[program_name_for_internal_jobs()],
extra_args,
)
}
pub fn to_unix_makefile_with_internal_program_prefix(
&self,
internal_program_prefix: &[Interned<str>],
extra_args: &[Interned<str>],
) -> String {
let mut retval = String::new();
let mut needed_variables = BTreeSet::new();
let mut phony_targets = BTreeSet::new();
for &node_id in &self.topological_order {
let JobGraphNode::Job(job) = &self.graph[node_id] else {
continue;
};
let outputs = job.outputs();
if outputs.is_empty() {
retval.push_str(":");
} else {
for output in job.outputs() {
match output {
JobItemName::Path { path } => {
write_str!(
retval,
"{} ",
EscapeForUnixMakefile::new(
&path,
UnixMakefileEscapeKind::NonRecipe,
&mut needed_variables
)
);
}
JobItemName::DynamicPaths { source_job_name } => {
write_str!(
retval,
"{} ",
EscapeForUnixMakefile::new(
&source_job_name,
UnixMakefileEscapeKind::NonRecipe,
&mut needed_variables
)
);
phony_targets.insert(Interned::into_inner(source_job_name));
}
}
}
if outputs.len() == 1 {
retval.push_str(":");
} else {
retval.push_str("&:");
}
}
for input in job.inputs() {
match input {
JobItemName::Path { path } => {
write_str!(
retval,
" {}",
EscapeForUnixMakefile::new(
&path,
UnixMakefileEscapeKind::NonRecipe,
&mut needed_variables
)
);
}
JobItemName::DynamicPaths { source_job_name } => {
write_str!(
retval,
" {}",
EscapeForUnixMakefile::new(
&source_job_name,
UnixMakefileEscapeKind::NonRecipe,
&mut needed_variables
)
);
phony_targets.insert(Interned::into_inner(source_job_name));
}
}
}
retval.push_str("\n\t");
job.command_params_with_internal_program_prefix(internal_program_prefix, extra_args)
.to_unix_shell_line(&mut retval, |arg, output| {
write!(
output,
"{}",
EscapeForUnixMakefile::new(
arg,
UnixMakefileEscapeKind::RecipeWithShellEscaping,
&mut needed_variables
)
)
})
.expect("writing to String never fails");
retval.push_str("\n\n");
}
if !phony_targets.is_empty() {
retval.push_str("\n.PHONY:");
for phony_target in phony_targets {
write_str!(
retval,
" {}",
EscapeForUnixMakefile::new(
phony_target,
UnixMakefileEscapeKind::NonRecipe,
&mut needed_variables
)
);
}
retval.push_str("\n");
}
if !needed_variables.is_empty() {
retval.insert_str(
0,
&String::from_iter(needed_variables.into_iter().map(|v| format!("{v}\n"))),
);
}
retval
}
pub fn to_unix_shell_script(&self, extra_args: &[Interned<str>]) -> String {
self.to_unix_shell_script_with_internal_program_prefix(
&[program_name_for_internal_jobs()],
extra_args,
)
}
pub fn to_unix_shell_script_with_internal_program_prefix(
&self,
internal_program_prefix: &[Interned<str>],
extra_args: &[Interned<str>],
) -> String {
let mut retval = String::from(
"#!/bin/sh\n\
set -ex\n",
);
for &node_id in &self.topological_order {
let JobGraphNode::Job(job) = &self.graph[node_id] else {
continue;
};
job.command_params_with_internal_program_prefix(internal_program_prefix, extra_args)
.to_unix_shell_line(&mut retval, |arg, output| {
write!(output, "{}", EscapeForUnixShell::new(&arg))
})
.expect("writing to String never fails");
retval.push_str("\n");
}
retval
}
pub fn run(&self, params: &JobParams) -> eyre::Result<()> {
// use scope to auto-join threads on errors
thread::scope(|scope| {
struct WaitingJobState {
job_node_id: <JobGraphInner as GraphBase>::NodeId,
job: DynJob,
inputs: BTreeMap<JobItemName, OnceCell<JobItem>>,
}
let mut ready_jobs = VecDeque::new();
let mut item_name_to_waiting_jobs_map = HashMap::<_, Vec<_>>::default();
for &node_id in &self.topological_order {
let JobGraphNode::Job(job) = &self.graph[node_id] else {
continue;
};
let waiting_job = WaitingJobState {
job_node_id: node_id,
job: job.clone(),
inputs: job
.inputs()
.iter()
.map(|&name| (name, OnceCell::new()))
.collect(),
};
if waiting_job.inputs.is_empty() {
ready_jobs.push_back(waiting_job);
} else {
let waiting_job = Rc::new(waiting_job);
for &input_item in waiting_job.inputs.keys() {
item_name_to_waiting_jobs_map
.entry(input_item)
.or_default()
.push(waiting_job.clone());
}
}
}
struct RunningJob<'scope> {
job: DynJob,
thread: ScopedJoinHandle<'scope, eyre::Result<Vec<JobItem>>>,
}
let mut running_jobs = HashMap::default();
let (finished_jobs_sender, finished_jobs_receiver) = mpsc::channel();
loop {
while let Some(finished_job) = finished_jobs_receiver.try_recv().ok() {
let Some(RunningJob { job, thread }) = running_jobs.remove(&finished_job)
else {
unreachable!();
};
let output_items = thread.join().map_err(panic::resume_unwind)??;
assert!(
output_items.iter().map(JobItem::name).eq(job.outputs()),
"job's run() method returned the wrong output items:\n\
output items:\n\
{output_items:?}\n\
expected outputs:\n\
{:?}\n\
job:\n\
{job:?}",
job.outputs(),
);
for output_item in output_items {
for waiting_job in item_name_to_waiting_jobs_map
.remove(&output_item.name())
.unwrap_or_default()
{
let Ok(()) =
waiting_job.inputs[&output_item.name()].set(output_item.clone())
else {
unreachable!();
};
if let Some(waiting_job) = Rc::into_inner(waiting_job) {
ready_jobs.push_back(waiting_job);
}
}
}
}
if let Some(WaitingJobState {
job_node_id,
job,
inputs,
}) = ready_jobs.pop_front()
{
struct RunningJobInThread<'a> {
job_node_id: <JobGraphInner as GraphBase>::NodeId,
job: DynJob,
inputs: Vec<JobItem>,
params: &'a JobParams,
acquired_job: AcquiredJob,
finished_jobs_sender: mpsc::Sender<<JobGraphInner as GraphBase>::NodeId>,
}
impl RunningJobInThread<'_> {
fn run(mut self) -> eyre::Result<Vec<JobItem>> {
self.job
.run(&self.inputs, self.params, &mut self.acquired_job)
}
}
impl Drop for RunningJobInThread<'_> {
fn drop(&mut self) {
let _ = self.finished_jobs_sender.send(self.job_node_id);
}
}
let name = job.kind().name();
let running_job_in_thread = RunningJobInThread {
job_node_id,
job: job.clone(),
inputs: Result::from_iter(inputs.into_iter().map(|(input_name, input)| {
input.into_inner().wrap_err_with(|| {
eyre!("failed when trying to run job {name}: nothing provided the input item: {input_name:?}")
})
}))?,
params,
acquired_job: AcquiredJob::acquire()?,
finished_jobs_sender: finished_jobs_sender.clone(),
};
running_jobs.insert(
job_node_id,
RunningJob {
job,
thread: thread::Builder::new()
.name(format!("job:{name}"))
.spawn_scoped(scope, move || running_job_in_thread.run())
.expect("failed to spawn thread for job"),
},
);
}
if running_jobs.is_empty() {
assert!(item_name_to_waiting_jobs_map.is_empty());
assert!(ready_jobs.is_empty());
return Ok(());
}
}
})
}
}
impl Extend<DynJob> for JobGraph {
#[track_caller]
fn extend<T: IntoIterator<Item = DynJob>>(&mut self, iter: T) {
self.add_jobs(iter);
}
}
impl FromIterator<DynJob> for JobGraph {
#[track_caller]
fn from_iter<T: IntoIterator<Item = DynJob>>(iter: T) -> Self {
let mut retval = Self::new();
retval.add_jobs(iter);
retval
}
}
impl Serialize for JobGraph {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut serializer = serializer.serialize_seq(Some(self.jobs.len()))?;
for &node_id in &self.topological_order {
let JobGraphNode::Job(job) = &self.graph[node_id] else {
continue;
};
serializer.serialize_element(job)?;
}
serializer.end()
}
}
impl<'de> Deserialize<'de> for JobGraph {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let jobs = Vec::<DynJob>::deserialize(deserializer)?;
let mut retval = JobGraph::new();
retval.try_add_jobs(jobs).map_err(D::Error::custom)?;
Ok(retval)
}
}

View file

@ -0,0 +1,341 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
build::{BUILT_IN_JOB_KINDS, DynJobKind, JobKind},
intern::Interned,
};
use std::{
borrow::Borrow,
cmp::Ordering,
collections::BTreeMap,
fmt,
sync::{Arc, OnceLock, RwLock, RwLockWriteGuard},
};
impl DynJobKind {
pub fn registry() -> JobKindRegistrySnapshot {
JobKindRegistrySnapshot(JobKindRegistry::get())
}
#[track_caller]
pub fn register(self) {
JobKindRegistry::register(JobKindRegistry::lock(), self);
}
}
#[derive(Copy, Clone, PartialEq, Eq)]
struct InternedStrCompareAsStr(Interned<str>);
impl fmt::Debug for InternedStrCompareAsStr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl Ord for InternedStrCompareAsStr {
fn cmp(&self, other: &Self) -> Ordering {
str::cmp(&self.0, &other.0)
}
}
impl PartialOrd for InternedStrCompareAsStr {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Borrow<str> for InternedStrCompareAsStr {
fn borrow(&self) -> &str {
&self.0
}
}
#[derive(Clone, Debug)]
struct JobKindRegistry {
job_kinds: BTreeMap<InternedStrCompareAsStr, DynJobKind>,
}
enum JobKindRegisterError {
SameName {
name: InternedStrCompareAsStr,
old_job_kind: DynJobKind,
new_job_kind: DynJobKind,
},
}
impl fmt::Display for JobKindRegisterError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::SameName {
name,
old_job_kind,
new_job_kind,
} => write!(
f,
"two different `JobKind` can't share the same name:\n\
{name:?}\n\
old job kind:\n\
{old_job_kind:?}\n\
new job kind:\n\
{new_job_kind:?}",
),
}
}
}
trait JobKindRegistryRegisterLock {
type Locked;
fn lock(self) -> Self::Locked;
fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry;
}
impl JobKindRegistryRegisterLock for &'static RwLock<Arc<JobKindRegistry>> {
type Locked = RwLockWriteGuard<'static, Arc<JobKindRegistry>>;
fn lock(self) -> Self::Locked {
self.write().expect("shouldn't be poisoned")
}
fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry {
Arc::make_mut(locked)
}
}
impl JobKindRegistryRegisterLock for &'_ mut JobKindRegistry {
type Locked = Self;
fn lock(self) -> Self::Locked {
self
}
fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry {
locked
}
}
impl JobKindRegistry {
fn lock() -> &'static RwLock<Arc<Self>> {
static REGISTRY: OnceLock<RwLock<Arc<JobKindRegistry>>> = OnceLock::new();
REGISTRY.get_or_init(Default::default)
}
fn try_register<L: JobKindRegistryRegisterLock>(
lock: L,
job_kind: DynJobKind,
) -> Result<(), JobKindRegisterError> {
use std::collections::btree_map::Entry;
let name = InternedStrCompareAsStr(job_kind.name());
// run user code only outside of lock
let mut locked = lock.lock();
let this = L::make_mut(&mut locked);
let result = match this.job_kinds.entry(name) {
Entry::Occupied(entry) => Err(JobKindRegisterError::SameName {
name,
old_job_kind: entry.get().clone(),
new_job_kind: job_kind,
}),
Entry::Vacant(entry) => {
entry.insert(job_kind);
Ok(())
}
};
drop(locked);
// outside of lock now, so we can test if it's the same DynJobKind
match result {
Err(JobKindRegisterError::SameName {
name: _,
old_job_kind,
new_job_kind,
}) if old_job_kind == new_job_kind => Ok(()),
result => result,
}
}
#[track_caller]
fn register<L: JobKindRegistryRegisterLock>(lock: L, job_kind: DynJobKind) {
match Self::try_register(lock, job_kind) {
Err(e) => panic!("{e}"),
Ok(()) => {}
}
}
fn get() -> Arc<Self> {
Self::lock().read().expect("shouldn't be poisoned").clone()
}
}
impl Default for JobKindRegistry {
fn default() -> Self {
let mut retval = Self {
job_kinds: BTreeMap::new(),
};
for job_kind in BUILT_IN_JOB_KINDS {
Self::register(&mut retval, job_kind());
}
retval
}
}
#[derive(Clone, Debug)]
pub struct JobKindRegistrySnapshot(Arc<JobKindRegistry>);
impl JobKindRegistrySnapshot {
pub fn get() -> Self {
JobKindRegistrySnapshot(JobKindRegistry::get())
}
pub fn get_by_name<'a>(&'a self, name: &str) -> Option<&'a DynJobKind> {
self.0.job_kinds.get(name)
}
pub fn iter_with_names(&self) -> JobKindRegistryIterWithNames<'_> {
JobKindRegistryIterWithNames(self.0.job_kinds.iter())
}
pub fn iter(&self) -> JobKindRegistryIter<'_> {
JobKindRegistryIter(self.0.job_kinds.values())
}
}
impl<'a> IntoIterator for &'a JobKindRegistrySnapshot {
type Item = &'a DynJobKind;
type IntoIter = JobKindRegistryIter<'a>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a> IntoIterator for &'a mut JobKindRegistrySnapshot {
type Item = &'a DynJobKind;
type IntoIter = JobKindRegistryIter<'a>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
#[derive(Clone, Debug)]
pub struct JobKindRegistryIter<'a>(
std::collections::btree_map::Values<'a, InternedStrCompareAsStr, DynJobKind>,
);
impl<'a> Iterator for JobKindRegistryIter<'a> {
type Item = &'a DynJobKind;
fn next(&mut self) -> Option<Self::Item> {
self.0.next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
fn count(self) -> usize
where
Self: Sized,
{
self.0.count()
}
fn last(self) -> Option<Self::Item> {
self.0.last()
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.0.nth(n)
}
fn fold<B, F>(self, init: B, f: F) -> B
where
F: FnMut(B, Self::Item) -> B,
{
self.0.fold(init, f)
}
}
impl<'a> std::iter::FusedIterator for JobKindRegistryIter<'a> {}
impl<'a> ExactSizeIterator for JobKindRegistryIter<'a> {}
impl<'a> DoubleEndedIterator for JobKindRegistryIter<'a> {
fn next_back(&mut self) -> Option<Self::Item> {
self.0.next_back()
}
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
self.0.nth_back(n)
}
fn rfold<B, F>(self, init: B, f: F) -> B
where
F: FnMut(B, Self::Item) -> B,
{
self.0.rfold(init, f)
}
}
#[derive(Clone, Debug)]
pub struct JobKindRegistryIterWithNames<'a>(
std::collections::btree_map::Iter<'a, InternedStrCompareAsStr, DynJobKind>,
);
impl<'a> Iterator for JobKindRegistryIterWithNames<'a> {
type Item = (Interned<str>, &'a DynJobKind);
fn next(&mut self) -> Option<Self::Item> {
self.0.next().map(|(name, job_kind)| (name.0, job_kind))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
fn count(self) -> usize
where
Self: Sized,
{
self.0.count()
}
fn last(self) -> Option<Self::Item> {
self.0.last().map(|(name, job_kind)| (name.0, job_kind))
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.0.nth(n).map(|(name, job_kind)| (name.0, job_kind))
}
fn fold<B, F>(self, init: B, f: F) -> B
where
F: FnMut(B, Self::Item) -> B,
{
self.0
.map(|(name, job_kind)| (name.0, job_kind))
.fold(init, f)
}
}
impl<'a> std::iter::FusedIterator for JobKindRegistryIterWithNames<'a> {}
impl<'a> ExactSizeIterator for JobKindRegistryIterWithNames<'a> {}
impl<'a> DoubleEndedIterator for JobKindRegistryIterWithNames<'a> {
fn next_back(&mut self) -> Option<Self::Item> {
self.0
.next_back()
.map(|(name, job_kind)| (name.0, job_kind))
}
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
self.0
.nth_back(n)
.map(|(name, job_kind)| (name.0, job_kind))
}
fn rfold<B, F>(self, init: B, f: F) -> B
where
F: FnMut(B, Self::Item) -> B,
{
self.0
.map(|(name, job_kind)| (name.0, job_kind))
.rfold(init, f)
}
}
#[track_caller]
pub fn register_job_kind<K: JobKind>(kind: K) {
DynJobKind::new(kind).register();
}

View file

@ -0,0 +1,373 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
build::{
CommandParams, GetBaseJob, JobAndDependencies, JobArgsAndDependencies, JobDependencies,
JobItem, JobItemName, JobKind, JobKindAndDependencies, JobParams, ToArgs, WriteArgs,
external::{ExternalCommand, ExternalCommandJob, ExternalCommandJobKind},
firrtl::FirrtlJobKind,
interned_known_utf8_method, interned_known_utf8_path_buf_method,
},
intern::{Intern, Interned},
util::job_server::AcquiredJob,
};
use clap::Args;
use eyre::bail;
use serde::{Deserialize, Serialize};
use std::{fmt, mem};
/// based on [LLVM Circt's recommended lowering options][lowering-options]
///
/// [lowering-options]: https://circt.llvm.org/docs/VerilogGeneration/#recommended-loweringoptions-by-target
#[derive(clap::ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[non_exhaustive]
pub enum VerilogDialect {
Questa,
Spyglass,
Verilator,
Vivado,
Yosys,
}
impl fmt::Display for VerilogDialect {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.as_str())
}
}
impl VerilogDialect {
pub fn as_str(self) -> &'static str {
match self {
VerilogDialect::Questa => "questa",
VerilogDialect::Spyglass => "spyglass",
VerilogDialect::Verilator => "verilator",
VerilogDialect::Vivado => "vivado",
VerilogDialect::Yosys => "yosys",
}
}
pub fn firtool_extra_args(self) -> &'static [&'static str] {
match self {
VerilogDialect::Questa => &["--lowering-options=emitWireInPorts"],
VerilogDialect::Spyglass => {
&["--lowering-options=explicitBitcast,disallowExpressionInliningInPorts"]
}
VerilogDialect::Verilator => &[
"--lowering-options=locationInfoStyle=wrapInAtSquareBracket,disallowLocalVariables",
],
VerilogDialect::Vivado => &["--lowering-options=mitigateVivadoArrayIndexConstPropBug"],
VerilogDialect::Yosys => {
&["--lowering-options=disallowLocalVariables,disallowPackedArrays"]
}
}
}
}
#[derive(Args, Debug, Clone, PartialEq, Eq, Hash)]
#[non_exhaustive]
pub struct UnadjustedVerilogArgs {
#[arg(long = "firtool-extra-arg", value_name = "ARG")]
pub firtool_extra_args: Vec<String>,
/// adapt the generated Verilog for a particular toolchain
#[arg(long)]
pub verilog_dialect: Option<VerilogDialect>,
#[arg(long)]
pub verilog_debug: bool,
}
impl ToArgs for UnadjustedVerilogArgs {
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
let Self {
ref firtool_extra_args,
verilog_dialect,
verilog_debug,
} = *self;
args.extend(
firtool_extra_args
.iter()
.map(|arg| format!("--firtool-extra-arg={arg}")),
);
if let Some(verilog_dialect) = verilog_dialect {
args.write_arg(format_args!("--verilog-dialect={verilog_dialect}"));
}
if verilog_debug {
args.write_str_arg("--verilog-debug");
}
}
}
#[derive(Clone, PartialEq, Eq, Hash, Debug, Deserialize, Serialize)]
pub struct UnadjustedVerilog {
firrtl_file: Interned<str>,
firrtl_file_name: Interned<str>,
unadjusted_verilog_file: Interned<str>,
unadjusted_verilog_file_name: Interned<str>,
firtool_extra_args: Interned<[Interned<str>]>,
verilog_dialect: Option<VerilogDialect>,
verilog_debug: bool,
}
impl UnadjustedVerilog {
pub fn firrtl_file(&self) -> Interned<str> {
self.firrtl_file
}
pub fn unadjusted_verilog_file(&self) -> Interned<str> {
self.unadjusted_verilog_file
}
pub fn firtool_extra_args(&self) -> Interned<[Interned<str>]> {
self.firtool_extra_args
}
pub fn verilog_dialect(&self) -> Option<VerilogDialect> {
self.verilog_dialect
}
pub fn verilog_debug(&self) -> bool {
self.verilog_debug
}
}
impl ExternalCommand for UnadjustedVerilog {
type AdditionalArgs = UnadjustedVerilogArgs;
type AdditionalJobData = UnadjustedVerilog;
type Dependencies = JobKindAndDependencies<FirrtlJobKind>;
fn dependencies() -> Self::Dependencies {
Default::default()
}
fn args_to_jobs(
args: JobArgsAndDependencies<ExternalCommandJobKind<Self>>,
params: &JobParams,
) -> eyre::Result<(
Self::AdditionalJobData,
<Self::Dependencies as JobDependencies>::JobsAndKinds,
)> {
args.args_to_jobs_external_simple(params, |args, dependencies| {
let UnadjustedVerilogArgs {
firtool_extra_args,
verilog_dialect,
verilog_debug,
} = args.additional_args;
let unadjusted_verilog_file = dependencies
.dependencies
.job
.job
.file_with_ext("unadjusted.v");
Ok(UnadjustedVerilog {
firrtl_file: dependencies.job.job.firrtl_file(),
firrtl_file_name: interned_known_utf8_method(
dependencies.job.job.firrtl_file(),
|v| v.file_name().expect("known to have file name"),
),
unadjusted_verilog_file,
unadjusted_verilog_file_name: interned_known_utf8_method(
unadjusted_verilog_file,
|v| v.file_name().expect("known to have file name"),
),
firtool_extra_args: firtool_extra_args
.into_iter()
.map(str::intern_owned)
.collect(),
verilog_dialect,
verilog_debug,
})
})
}
fn inputs(job: &ExternalCommandJob<Self>) -> Interned<[JobItemName]> {
[JobItemName::Path {
path: job.additional_job_data().firrtl_file,
}][..]
.intern()
}
fn output_paths(job: &ExternalCommandJob<Self>) -> Interned<[Interned<str>]> {
[job.additional_job_data().unadjusted_verilog_file][..].intern()
}
fn command_line_args(job: &ExternalCommandJob<Self>) -> Interned<[Interned<str>]> {
let UnadjustedVerilog {
firrtl_file: _,
firrtl_file_name,
unadjusted_verilog_file: _,
unadjusted_verilog_file_name,
firtool_extra_args,
verilog_dialect,
verilog_debug,
} = *job.additional_job_data();
let mut retval = vec![
firrtl_file_name,
"-o".intern(),
unadjusted_verilog_file_name,
];
if verilog_debug {
retval.push("-g".intern());
retval.push("--preserve-values=all".intern());
}
if let Some(dialect) = verilog_dialect {
retval.extend(
dialect
.firtool_extra_args()
.iter()
.copied()
.map(str::intern),
);
}
retval.extend_from_slice(&firtool_extra_args);
Intern::intern_owned(retval)
}
fn current_dir(job: &ExternalCommandJob<Self>) -> Option<Interned<str>> {
Some(job.output_dir())
}
fn job_kind_name() -> Interned<str> {
"unadjusted-verilog".intern()
}
fn default_program_name() -> Interned<str> {
"firtool".intern()
}
fn subcommand_hidden() -> bool {
true
}
fn run_even_if_cached_arg_name() -> Interned<str> {
"firtool-run-even-if-cached".intern()
}
}
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct VerilogJobKind;
#[derive(Clone, Debug, PartialEq, Eq, Hash, Args)]
#[non_exhaustive]
pub struct VerilogJobArgs {}
impl ToArgs for VerilogJobArgs {
fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) {
let Self {} = self;
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct VerilogJob {
output_dir: Interned<str>,
unadjusted_verilog_file: Interned<str>,
main_verilog_file: Interned<str>,
}
impl VerilogJob {
pub fn output_dir(&self) -> Interned<str> {
self.output_dir
}
pub fn unadjusted_verilog_file(&self) -> Interned<str> {
self.unadjusted_verilog_file
}
pub fn main_verilog_file(&self) -> Interned<str> {
self.main_verilog_file
}
}
impl JobKind for VerilogJobKind {
type Args = VerilogJobArgs;
type Job = VerilogJob;
type Dependencies = JobKindAndDependencies<ExternalCommandJobKind<UnadjustedVerilog>>;
fn dependencies(self) -> Self::Dependencies {
Default::default()
}
fn args_to_jobs(
args: JobArgsAndDependencies<Self>,
params: &JobParams,
) -> eyre::Result<JobAndDependencies<Self>> {
args.args_to_jobs_simple(params, |_kind, args, dependencies| {
let VerilogJobArgs {} = args;
Ok(VerilogJob {
output_dir: dependencies.base_job().output_dir(),
unadjusted_verilog_file: dependencies
.job
.job
.additional_job_data()
.unadjusted_verilog_file(),
main_verilog_file: dependencies.base_job().file_with_ext("v"),
})
})
}
fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
[JobItemName::Path {
path: job.unadjusted_verilog_file,
}][..]
.intern()
}
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
[
JobItemName::Path {
path: job.main_verilog_file,
},
JobItemName::DynamicPaths {
source_job_name: self.name(),
},
][..]
.intern()
}
fn name(self) -> Interned<str> {
"verilog".intern()
}
fn external_command_params(self, _job: &Self::Job) -> Option<CommandParams> {
None
}
fn run(
self,
job: &Self::Job,
inputs: &[JobItem],
_params: &JobParams,
_acquired_job: &mut AcquiredJob,
) -> eyre::Result<Vec<JobItem>> {
assert!(inputs.iter().map(JobItem::name).eq(self.inputs(job)));
let input = std::fs::read_to_string(job.unadjusted_verilog_file())?;
let file_separator_prefix = "\n// ----- 8< ----- FILE \"";
let file_separator_suffix = "\" ----- 8< -----\n\n";
let mut input = &*input;
let main_verilog_file = job.main_verilog_file();
let mut file_name = Some(main_verilog_file);
let mut additional_outputs = Vec::new();
loop {
let (chunk, next_file_name) = if let Some((chunk, rest)) =
input.split_once(file_separator_prefix)
{
let Some((next_file_name, rest)) = rest.split_once(file_separator_suffix) else {
bail!(
"parsing firtool's output failed: found {file_separator_prefix:?} but no {file_separator_suffix:?}"
);
};
input = rest;
let next_file_name =
interned_known_utf8_path_buf_method(job.output_dir, |v| v.join(next_file_name));
additional_outputs.push(next_file_name);
(chunk, Some(next_file_name))
} else {
(mem::take(&mut input), None)
};
let Some(file_name) = mem::replace(&mut file_name, next_file_name) else {
break;
};
std::fs::write(&file_name, chunk)?;
}
Ok(vec![
JobItem::Path {
path: main_verilog_file,
},
JobItem::DynamicPaths {
paths: additional_outputs,
source_job_name: self.name(),
},
])
}
}

View file

@ -1,806 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
bundle::{Bundle, BundleType},
firrtl::{self, ExportOptions},
intern::Interned,
module::Module,
util::{job_server::AcquiredJob, streaming_read_utf8::streaming_read_utf8},
};
use clap::{
Parser, Subcommand, ValueEnum, ValueHint,
builder::{OsStringValueParser, TypedValueParser},
};
use eyre::{Report, eyre};
use serde::{Deserialize, Serialize};
use std::{
error,
ffi::OsString,
fmt::{self, Write},
fs, io, mem,
path::{Path, PathBuf},
process,
};
use tempfile::TempDir;
pub type Result<T = (), E = CliError> = std::result::Result<T, E>;
pub struct CliError(Report);
impl fmt::Debug for CliError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl fmt::Display for CliError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl error::Error for CliError {}
impl From<io::Error> for CliError {
fn from(value: io::Error) -> Self {
CliError(Report::new(value))
}
}
pub trait RunPhase<Arg> {
type Output;
fn run(&self, arg: Arg) -> Result<Self::Output> {
self.run_with_job(arg, &mut AcquiredJob::acquire())
}
fn run_with_job(&self, arg: Arg, acquired_job: &mut AcquiredJob) -> Result<Self::Output>;
}
#[derive(Parser, Debug, Clone)]
#[non_exhaustive]
pub struct BaseArgs {
/// the directory to put the generated main output file and associated files in
#[arg(short, long, value_hint = ValueHint::DirPath, required = true)]
pub output: Option<PathBuf>,
/// the stem of the generated main output file, e.g. to get foo.v, pass --file-stem=foo
#[arg(long)]
pub file_stem: Option<String>,
#[arg(long, env = "FAYALITE_KEEP_TEMP_DIR")]
pub keep_temp_dir: bool,
#[arg(skip = false)]
pub redirect_output_for_rust_test: bool,
}
impl BaseArgs {
fn make_firrtl_file_backend(&self) -> Result<(firrtl::FileBackend, Option<TempDir>)> {
let (dir_path, temp_dir) = match &self.output {
Some(output) => (output.clone(), None),
None => {
let temp_dir = TempDir::new()?;
if self.keep_temp_dir {
let temp_dir = temp_dir.into_path();
println!("created temporary directory: {}", temp_dir.display());
(temp_dir, None)
} else {
(temp_dir.path().to_path_buf(), Some(temp_dir))
}
}
};
Ok((
firrtl::FileBackend {
dir_path,
top_fir_file_stem: self.file_stem.clone(),
circuit_name: None,
},
temp_dir,
))
}
/// handles possibly redirecting the command's output for Rust tests
pub fn run_external_command(
&self,
_acquired_job: &mut AcquiredJob,
mut command: process::Command,
mut captured_output: Option<&mut String>,
) -> io::Result<process::ExitStatus> {
if self.redirect_output_for_rust_test || captured_output.is_some() {
let (reader, writer) = io::pipe()?;
let mut reader = io::BufReader::new(reader);
command.stderr(writer.try_clone()?);
command.stdout(writer); // must not leave writer around after spawning child
command.stdin(process::Stdio::null());
let mut child = command.spawn()?;
drop(command); // close writers
Ok(loop {
let status = child.try_wait()?;
streaming_read_utf8(&mut reader, |s| {
if let Some(captured_output) = captured_output.as_deref_mut() {
captured_output.push_str(s);
}
// use print! so output goes to Rust test output capture
print!("{s}");
io::Result::Ok(())
})?;
if let Some(status) = status {
break status;
}
})
} else {
command.status()
}
}
}
#[derive(Parser, Debug, Clone)]
#[non_exhaustive]
pub struct FirrtlArgs {
#[command(flatten)]
pub base: BaseArgs,
#[command(flatten)]
pub export_options: ExportOptions,
}
#[derive(Debug)]
#[non_exhaustive]
pub struct FirrtlOutput {
pub file_stem: String,
pub top_module: String,
pub output_dir: PathBuf,
pub temp_dir: Option<TempDir>,
}
impl FirrtlOutput {
pub fn file_with_ext(&self, ext: &str) -> PathBuf {
let mut retval = self.output_dir.join(&self.file_stem);
retval.set_extension(ext);
retval
}
pub fn firrtl_file(&self) -> PathBuf {
self.file_with_ext("fir")
}
}
impl FirrtlArgs {
fn run_impl(
&self,
top_module: Module<Bundle>,
_acquired_job: &mut AcquiredJob,
) -> Result<FirrtlOutput> {
let (file_backend, temp_dir) = self.base.make_firrtl_file_backend()?;
let firrtl::FileBackend {
top_fir_file_stem,
circuit_name,
dir_path,
} = firrtl::export(file_backend, &top_module, self.export_options)?;
Ok(FirrtlOutput {
file_stem: top_fir_file_stem.expect(
"export is known to set the file stem from the circuit name if not provided",
),
top_module: circuit_name.expect("export is known to set the circuit name"),
output_dir: dir_path,
temp_dir,
})
}
}
impl<T: BundleType> RunPhase<Module<T>> for FirrtlArgs {
type Output = FirrtlOutput;
fn run_with_job(
&self,
top_module: Module<T>,
acquired_job: &mut AcquiredJob,
) -> Result<Self::Output> {
self.run_impl(top_module.canonical(), acquired_job)
}
}
impl<T: BundleType> RunPhase<Interned<Module<T>>> for FirrtlArgs {
type Output = FirrtlOutput;
fn run_with_job(
&self,
top_module: Interned<Module<T>>,
acquired_job: &mut AcquiredJob,
) -> Result<Self::Output> {
self.run_with_job(*top_module, acquired_job)
}
}
/// based on [LLVM Circt's recommended lowering options
/// ](https://circt.llvm.org/docs/VerilogGeneration/#recommended-loweringoptions-by-target)
#[derive(ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[non_exhaustive]
pub enum VerilogDialect {
Questa,
Spyglass,
Verilator,
Vivado,
Yosys,
}
impl fmt::Display for VerilogDialect {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.as_str())
}
}
impl VerilogDialect {
pub fn as_str(self) -> &'static str {
match self {
VerilogDialect::Questa => "questa",
VerilogDialect::Spyglass => "spyglass",
VerilogDialect::Verilator => "verilator",
VerilogDialect::Vivado => "vivado",
VerilogDialect::Yosys => "yosys",
}
}
pub fn firtool_extra_args(self) -> &'static [&'static str] {
match self {
VerilogDialect::Questa => &["--lowering-options=emitWireInPorts"],
VerilogDialect::Spyglass => {
&["--lowering-options=explicitBitcast,disallowExpressionInliningInPorts"]
}
VerilogDialect::Verilator => &[
"--lowering-options=locationInfoStyle=wrapInAtSquareBracket,disallowLocalVariables",
],
VerilogDialect::Vivado => &["--lowering-options=mitigateVivadoArrayIndexConstPropBug"],
VerilogDialect::Yosys => {
&["--lowering-options=disallowLocalVariables,disallowPackedArrays"]
}
}
}
}
#[derive(Parser, Debug, Clone)]
#[non_exhaustive]
pub struct VerilogArgs {
#[command(flatten)]
pub firrtl: FirrtlArgs,
#[arg(
long,
default_value = "firtool",
env = "FIRTOOL",
value_hint = ValueHint::CommandName,
value_parser = OsStringValueParser::new().try_map(which)
)]
pub firtool: PathBuf,
#[arg(long)]
pub firtool_extra_args: Vec<OsString>,
/// adapt the generated Verilog for a particular toolchain
#[arg(long)]
pub verilog_dialect: Option<VerilogDialect>,
#[arg(long, short = 'g')]
pub debug: bool,
}
#[derive(Debug)]
#[non_exhaustive]
pub struct VerilogOutput {
pub firrtl: FirrtlOutput,
pub verilog_files: Vec<PathBuf>,
pub contents_hash: Option<blake3::Hash>,
}
impl VerilogOutput {
pub fn main_verilog_file(&self) -> PathBuf {
self.firrtl.file_with_ext("v")
}
fn unadjusted_verilog_file(&self) -> PathBuf {
self.firrtl.file_with_ext("unadjusted.v")
}
}
impl VerilogArgs {
fn process_unadjusted_verilog_file(&self, mut output: VerilogOutput) -> Result<VerilogOutput> {
let input = fs::read_to_string(output.unadjusted_verilog_file())?;
let file_separator_prefix = "\n// ----- 8< ----- FILE \"";
let file_separator_suffix = "\" ----- 8< -----\n\n";
let mut input = &*input;
output.contents_hash = Some(blake3::hash(input.as_bytes()));
let main_verilog_file = output.main_verilog_file();
let mut file_name: Option<&Path> = Some(&main_verilog_file);
loop {
let (chunk, next_file_name) = if let Some((chunk, rest)) =
input.split_once(file_separator_prefix)
{
let Some((next_file_name, rest)) = rest.split_once(file_separator_suffix) else {
return Err(CliError(eyre!(
"parsing firtool's output failed: found {file_separator_prefix:?} but no {file_separator_suffix:?}"
)));
};
input = rest;
(chunk, Some(next_file_name.as_ref()))
} else {
(mem::take(&mut input), None)
};
let Some(file_name) = mem::replace(&mut file_name, next_file_name) else {
break;
};
let file_name = output.firrtl.output_dir.join(file_name);
fs::write(&file_name, chunk)?;
if let Some(extension) = file_name.extension() {
if extension == "v" || extension == "sv" {
output.verilog_files.push(file_name);
}
}
}
Ok(output)
}
fn run_impl(
&self,
firrtl_output: FirrtlOutput,
acquired_job: &mut AcquiredJob,
) -> Result<VerilogOutput> {
let Self {
firrtl,
firtool,
firtool_extra_args,
verilog_dialect,
debug,
} = self;
let output = VerilogOutput {
firrtl: firrtl_output,
verilog_files: vec![],
contents_hash: None,
};
let mut cmd = process::Command::new(firtool);
cmd.arg(output.firrtl.firrtl_file());
cmd.arg("-o");
cmd.arg(output.unadjusted_verilog_file());
if *debug {
cmd.arg("-g");
cmd.arg("--preserve-values=all");
}
if let Some(dialect) = verilog_dialect {
cmd.args(dialect.firtool_extra_args());
}
cmd.args(firtool_extra_args);
cmd.current_dir(&output.firrtl.output_dir);
let status = firrtl.base.run_external_command(acquired_job, cmd, None)?;
if status.success() {
self.process_unadjusted_verilog_file(output)
} else {
Err(CliError(eyre!(
"running {} failed: {status}",
self.firtool.display()
)))
}
}
}
impl<Arg> RunPhase<Arg> for VerilogArgs
where
FirrtlArgs: RunPhase<Arg, Output = FirrtlOutput>,
{
type Output = VerilogOutput;
fn run_with_job(&self, arg: Arg, acquired_job: &mut AcquiredJob) -> Result<Self::Output> {
let firrtl_output = self.firrtl.run_with_job(arg, acquired_job)?;
self.run_impl(firrtl_output, acquired_job)
}
}
#[derive(ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash, Default)]
#[non_exhaustive]
pub enum FormalMode {
#[default]
BMC,
Prove,
Live,
Cover,
}
impl FormalMode {
pub fn as_str(self) -> &'static str {
match self {
FormalMode::BMC => "bmc",
FormalMode::Prove => "prove",
FormalMode::Live => "live",
FormalMode::Cover => "cover",
}
}
}
impl fmt::Display for FormalMode {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.as_str())
}
}
#[derive(Clone)]
struct FormalAdjustArgs;
impl clap::FromArgMatches for FormalAdjustArgs {
fn from_arg_matches(_matches: &clap::ArgMatches) -> Result<Self, clap::Error> {
Ok(Self)
}
fn update_from_arg_matches(&mut self, _matches: &clap::ArgMatches) -> Result<(), clap::Error> {
Ok(())
}
}
impl clap::Args for FormalAdjustArgs {
fn augment_args(cmd: clap::Command) -> clap::Command {
cmd.mut_arg("output", |arg| arg.required(false))
.mut_arg("verilog_dialect", |arg| {
arg.default_value(VerilogDialect::Yosys.to_string())
.hide(true)
})
}
fn augment_args_for_update(cmd: clap::Command) -> clap::Command {
Self::augment_args(cmd)
}
}
fn which(v: std::ffi::OsString) -> which::Result<PathBuf> {
#[cfg(not(miri))]
return which::which(v);
#[cfg(miri)]
return Ok(Path::new("/").join(v));
}
#[derive(Parser, Clone)]
#[non_exhaustive]
pub struct FormalArgs {
#[command(flatten)]
pub verilog: VerilogArgs,
#[arg(
long,
default_value = "sby",
env = "SBY",
value_hint = ValueHint::CommandName,
value_parser = OsStringValueParser::new().try_map(which)
)]
pub sby: PathBuf,
#[arg(long)]
pub sby_extra_args: Vec<String>,
#[arg(long, default_value_t)]
pub mode: FormalMode,
#[arg(long, default_value_t = Self::DEFAULT_DEPTH)]
pub depth: u64,
#[arg(long, default_value = "z3")]
pub solver: String,
#[arg(long)]
pub smtbmc_extra_args: Vec<String>,
#[arg(long, default_value_t = true, env = "FAYALITE_CACHE_RESULTS")]
pub cache_results: bool,
#[command(flatten)]
_formal_adjust_args: FormalAdjustArgs,
}
impl fmt::Debug for FormalArgs {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Self {
verilog,
sby,
sby_extra_args,
mode,
depth,
solver,
smtbmc_extra_args,
cache_results,
_formal_adjust_args: _,
} = self;
f.debug_struct("FormalArgs")
.field("verilog", verilog)
.field("sby", sby)
.field("sby_extra_args", sby_extra_args)
.field("mode", mode)
.field("depth", depth)
.field("solver", solver)
.field("smtbmc_extra_args", smtbmc_extra_args)
.field("cache_results", cache_results)
.finish_non_exhaustive()
}
}
impl FormalArgs {
pub const DEFAULT_DEPTH: u64 = 20;
}
#[derive(Debug)]
#[non_exhaustive]
pub struct FormalOutput {
pub verilog: VerilogOutput,
}
impl FormalOutput {
pub fn sby_file(&self) -> PathBuf {
self.verilog.firrtl.file_with_ext("sby")
}
pub fn cache_file(&self) -> PathBuf {
self.verilog.firrtl.file_with_ext("cache.json")
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[non_exhaustive]
pub struct FormalCacheOutput {}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[non_exhaustive]
pub enum FormalCacheVersion {
V1,
}
impl FormalCacheVersion {
pub const CURRENT: Self = Self::V1;
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[non_exhaustive]
pub struct FormalCache {
pub version: FormalCacheVersion,
pub contents_hash: blake3::Hash,
pub stdout_stderr: String,
pub result: Result<FormalCacheOutput, String>,
}
impl FormalCache {
pub fn new(
version: FormalCacheVersion,
contents_hash: blake3::Hash,
stdout_stderr: String,
result: Result<FormalCacheOutput, String>,
) -> Self {
Self {
version,
contents_hash,
stdout_stderr,
result,
}
}
}
impl FormalArgs {
fn sby_contents(&self, output: &FormalOutput) -> Result<String> {
let Self {
verilog: _,
sby: _,
sby_extra_args: _,
mode,
depth,
smtbmc_extra_args,
solver,
cache_results: _,
_formal_adjust_args: _,
} = self;
let smtbmc_options = smtbmc_extra_args.join(" ");
let top_module = &output.verilog.firrtl.top_module;
let mut retval = format!(
"[options]\n\
mode {mode}\n\
depth {depth}\n\
wait on\n\
\n\
[engines]\n\
smtbmc {solver} -- -- {smtbmc_options}\n\
\n\
[script]\n"
);
for verilog_file in &output.verilog.verilog_files {
let verilog_file = verilog_file
.to_str()
.ok_or_else(|| CliError(eyre!("verilog file path is not UTF-8")))?;
if verilog_file.contains(|ch: char| {
(ch != ' ' && ch != '\t' && ch.is_ascii_whitespace()) || ch == '"'
}) {
return Err(CliError(eyre!(
"verilog file path contains characters that aren't permitted"
)));
}
writeln!(retval, "read_verilog -sv -formal \"{verilog_file}\"").unwrap();
}
// workaround for wires disappearing -- set `keep` on all wires
writeln!(retval, "hierarchy -top {top_module}").unwrap();
writeln!(retval, "proc").unwrap();
writeln!(retval, "setattr -set keep 1 w:\\*").unwrap();
writeln!(retval, "prep").unwrap();
Ok(retval)
}
fn run_impl(
&self,
verilog_output: VerilogOutput,
acquired_job: &mut AcquiredJob,
) -> Result<FormalOutput> {
let output = FormalOutput {
verilog: verilog_output,
};
let sby_file = output.sby_file();
let sby_contents = self.sby_contents(&output)?;
let contents_hash = output.verilog.contents_hash.map(|verilog_hash| {
let mut hasher = blake3::Hasher::new();
hasher.update(verilog_hash.as_bytes());
hasher.update(sby_contents.as_bytes());
hasher.update(&(self.sby_extra_args.len() as u64).to_le_bytes());
for sby_extra_arg in self.sby_extra_args.iter() {
hasher.update(&(sby_extra_arg.len() as u64).to_le_bytes());
hasher.update(sby_extra_arg.as_bytes());
}
hasher.finalize()
});
std::fs::write(&sby_file, sby_contents)?;
let mut cmd = process::Command::new(&self.sby);
cmd.arg("-j1"); // sby seems not to respect job count in parallel mode
cmd.arg("-f");
cmd.arg(sby_file.file_name().unwrap());
cmd.args(&self.sby_extra_args);
cmd.current_dir(&output.verilog.firrtl.output_dir);
let mut captured_output = String::new();
let cache_file = output.cache_file();
let do_cache = if let Some(contents_hash) = contents_hash.filter(|_| self.cache_results) {
if let Some(FormalCache {
version: FormalCacheVersion::CURRENT,
contents_hash: cache_contents_hash,
stdout_stderr,
result,
}) = fs::read(&cache_file)
.ok()
.and_then(|v| serde_json::from_slice(&v).ok())
{
if cache_contents_hash == contents_hash {
println!("Using cached formal result:\n{stdout_stderr}");
return match result {
Ok(FormalCacheOutput {}) => Ok(output),
Err(error) => Err(CliError(eyre::Report::msg(error))),
};
}
}
true
} else {
false
};
let _ = fs::remove_file(&cache_file);
let status = self.verilog.firrtl.base.run_external_command(
acquired_job,
cmd,
do_cache.then_some(&mut captured_output),
)?;
let result = if status.success() {
Ok(output)
} else {
Err(CliError(eyre!(
"running {} failed: {status}",
self.sby.display()
)))
};
fs::write(
cache_file,
serde_json::to_string_pretty(&FormalCache {
version: FormalCacheVersion::CURRENT,
contents_hash: contents_hash.unwrap(),
stdout_stderr: captured_output,
result: match &result {
Ok(FormalOutput { verilog: _ }) => Ok(FormalCacheOutput {}),
Err(error) => Err(error.to_string()),
},
})
.expect("serialization shouldn't ever fail"),
)?;
result
}
}
impl<Arg> RunPhase<Arg> for FormalArgs
where
VerilogArgs: RunPhase<Arg, Output = VerilogOutput>,
{
type Output = FormalOutput;
fn run_with_job(&self, arg: Arg, acquired_job: &mut AcquiredJob) -> Result<Self::Output> {
let verilog_output = self.verilog.run_with_job(arg, acquired_job)?;
self.run_impl(verilog_output, acquired_job)
}
}
#[derive(Subcommand, Debug)]
enum CliCommand {
/// Generate FIRRTL
Firrtl(FirrtlArgs),
/// Generate Verilog
Verilog(VerilogArgs),
/// Run a formal proof
Formal(FormalArgs),
}
/// a simple CLI
///
/// Use like:
///
/// ```no_run
/// # use fayalite::prelude::*;
/// # #[hdl_module]
/// # fn my_module() {}
/// use fayalite::cli;
///
/// fn main() -> cli::Result {
/// cli::Cli::parse().run(my_module())
/// }
/// ```
///
/// You can also use it with a larger [`clap`]-based CLI like so:
///
/// ```no_run
/// # use fayalite::prelude::*;
/// # #[hdl_module]
/// # fn my_module() {}
/// use clap::{Subcommand, Parser};
/// use fayalite::cli;
///
/// #[derive(Subcommand)]
/// pub enum Cmd {
/// #[command(flatten)]
/// Fayalite(cli::Cli),
/// MySpecialCommand {
/// #[arg(long)]
/// foo: bool,
/// },
/// }
///
/// #[derive(Parser)]
/// pub struct Cli {
/// #[command(subcommand)]
/// cmd: Cmd, // or just use cli::Cli directly if you don't need more subcommands
/// }
///
/// fn main() -> cli::Result {
/// match Cli::parse().cmd {
/// Cmd::Fayalite(v) => v.run(my_module())?,
/// Cmd::MySpecialCommand { foo } => println!("special: foo={foo}"),
/// }
/// Ok(())
/// }
/// ```
#[derive(Parser, Debug)]
// clear things that would be crate-specific
#[command(name = "Fayalite Simple CLI", about = None, long_about = None)]
pub struct Cli {
#[command(subcommand)]
subcommand: CliCommand,
}
impl clap::Subcommand for Cli {
fn augment_subcommands(cmd: clap::Command) -> clap::Command {
CliCommand::augment_subcommands(cmd)
}
fn augment_subcommands_for_update(cmd: clap::Command) -> clap::Command {
CliCommand::augment_subcommands_for_update(cmd)
}
fn has_subcommand(name: &str) -> bool {
CliCommand::has_subcommand(name)
}
}
impl<T> RunPhase<T> for Cli
where
FirrtlArgs: RunPhase<T, Output = FirrtlOutput>,
{
type Output = ();
fn run_with_job(&self, arg: T, acquired_job: &mut AcquiredJob) -> Result<Self::Output> {
match &self.subcommand {
CliCommand::Firrtl(c) => {
c.run_with_job(arg, acquired_job)?;
}
CliCommand::Verilog(c) => {
c.run_with_job(arg, acquired_job)?;
}
CliCommand::Formal(c) => {
c.run_with_job(arg, acquired_job)?;
}
}
Ok(())
}
}
impl Cli {
/// forwards to [`clap::Parser::parse()`] so you don't have to import [`clap::Parser`]
pub fn parse() -> Self {
clap::Parser::parse()
}
/// forwards to [`RunPhase::run()`] so you don't have to import [`RunPhase`]
pub fn run<T>(&self, top_module: T) -> Result<()>
where
Self: RunPhase<T, Output = ()>,
{
RunPhase::run(self, top_module)
}
}

View file

@ -7,6 +7,7 @@ use crate::{
DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation,
},
array::Array,
build::{ToArgs, WriteArgs},
bundle::{Bundle, BundleField, BundleType},
clock::Clock,
enum_::{Enum, EnumType, EnumVariant},
@ -23,7 +24,7 @@ use crate::{
memory::{Mem, PortKind, PortName, ReadUnderWrite},
module::{
AnnotatedModuleIO, Block, ExternModuleBody, ExternModuleParameter,
ExternModuleParameterValue, Module, ModuleBody, NameOptId, NormalModuleBody, Stmt,
ExternModuleParameterValue, Module, ModuleBody, NameId, NameOptId, NormalModuleBody, Stmt,
StmtConnect, StmtDeclaration, StmtFormal, StmtIf, StmtInstance, StmtMatch, StmtReg,
StmtWire,
transform::{
@ -42,7 +43,7 @@ use crate::{
use bitvec::slice::BitSlice;
use clap::value_parser;
use num_traits::Signed;
use serde::Serialize;
use serde::{Deserialize, Serialize};
use std::{
cell::{Cell, RefCell},
cmp::Ordering,
@ -2749,14 +2750,23 @@ impl clap::builder::TypedValueParser for OptionSimplifyEnumsKindValueParser {
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct ExportOptionsPrivate(());
#[derive(clap::Parser, Copy, Clone, PartialEq, Eq, Hash)]
impl ExportOptionsPrivate {
fn private_new() -> Self {
Self(())
}
}
#[derive(clap::Parser, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct ExportOptions {
#[clap(long = "no-simplify-memories", action = clap::ArgAction::SetFalse)]
#[serde(default = "ExportOptions::default_simplify_memories")]
pub simplify_memories: bool,
#[clap(long, value_parser = OptionSimplifyEnumsKindValueParser, default_value = "replace-with-bundle-of-uints")]
#[serde(default = "ExportOptions::default_simplify_enums")]
pub simplify_enums: std::option::Option<SimplifyEnumsKind>, // use std::option::Option instead of Option to avoid clap mis-parsing
#[doc(hidden)]
#[clap(skip = ExportOptionsPrivate(()))]
#[serde(skip, default = "ExportOptionsPrivate::private_new")]
/// `#[non_exhaustive]` except allowing struct update syntax
pub __private: ExportOptionsPrivate,
}
@ -2767,16 +2777,15 @@ impl fmt::Debug for ExportOptions {
}
}
impl ExportOptions {
pub fn to_args(&self) -> Vec<Interned<str>> {
impl ToArgs for ExportOptions {
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
let Self {
simplify_memories,
simplify_enums,
__private: ExportOptionsPrivate(()),
} = self;
let mut retval = Vec::new();
if !*simplify_memories {
retval.push("--no-simplify-memories".intern());
} = *self;
if !simplify_memories {
args.write_str_arg("--no-simplify-memories");
}
let simplify_enums = simplify_enums.map(|v| {
clap::ValueEnum::to_possible_value(&v).expect("there are no skipped variants")
@ -2785,10 +2794,16 @@ impl ExportOptions {
None => OptionSimplifyEnumsKindValueParser::NONE_NAME,
Some(v) => v.get_name(),
};
retval.push(str::intern_owned(format!(
"--simplify-enums={simplify_enums}"
)));
retval
args.write_arg(format_args!("--simplify-enums={simplify_enums}"));
}
}
impl ExportOptions {
fn default_simplify_memories() -> bool {
true
}
fn default_simplify_enums() -> Option<SimplifyEnumsKind> {
Some(SimplifyEnumsKind::ReplaceWithBundleOfUInts)
}
fn debug_fmt(
&self,
@ -2846,13 +2861,19 @@ impl ExportOptions {
impl Default for ExportOptions {
fn default() -> Self {
Self {
simplify_memories: true,
simplify_enums: Some(SimplifyEnumsKind::ReplaceWithBundleOfUInts),
simplify_memories: Self::default_simplify_memories(),
simplify_enums: Self::default_simplify_enums(),
__private: ExportOptionsPrivate(()),
}
}
}
pub fn get_circuit_name(top_module_name_id: NameId) -> Interned<str> {
let mut global_ns = Namespace::default();
let circuit_name = global_ns.get(top_module_name_id);
circuit_name.0
}
pub fn export<T: BundleType, B: FileBackendTrait>(
file_backend: B,
top_module: &Module<T>,

View file

@ -9,11 +9,13 @@ use std::{
any::{Any, TypeId},
borrow::{Borrow, Cow},
cmp::Ordering,
ffi::OsStr,
fmt,
hash::{BuildHasher, Hash, Hasher},
iter::FusedIterator,
marker::PhantomData,
ops::Deref,
path::Path,
sync::{Mutex, RwLock},
};
@ -416,6 +418,12 @@ forward_fmt_trait!(Pointer);
forward_fmt_trait!(UpperExp);
forward_fmt_trait!(UpperHex);
impl<T: ?Sized + 'static + Send + Sync> AsRef<T> for Interned<T> {
fn as_ref(&self) -> &T {
self
}
}
#[derive(Clone, Debug)]
pub struct InternedSliceIter<T: Clone + 'static + Send + Sync> {
slice: Interned<[T]>,
@ -485,6 +493,51 @@ where
}
}
impl<I> FromIterator<I> for Interned<str>
where
String: FromIterator<I>,
{
fn from_iter<T: IntoIterator<Item = I>>(iter: T) -> Self {
str::intern_owned(FromIterator::from_iter(iter))
}
}
impl AsRef<OsStr> for Interned<str> {
fn as_ref(&self) -> &OsStr {
str::as_ref(self)
}
}
impl AsRef<Path> for Interned<str> {
fn as_ref(&self) -> &Path {
str::as_ref(self)
}
}
impl From<Interned<str>> for clap::builder::Str {
fn from(value: Interned<str>) -> Self {
Interned::into_inner(value).into()
}
}
impl From<Interned<str>> for clap::builder::OsStr {
fn from(value: Interned<str>) -> Self {
Interned::into_inner(value).into()
}
}
impl From<Interned<str>> for clap::builder::StyledStr {
fn from(value: Interned<str>) -> Self {
Interned::into_inner(value).into()
}
}
impl From<Interned<str>> for clap::Id {
fn from(value: Interned<str>) -> Self {
Interned::into_inner(value).into()
}
}
impl<T: 'static + Clone + Send + Sync> From<Interned<[T]>> for Vec<T> {
fn from(value: Interned<[T]>) -> Self {
Vec::from(&*value)

View file

@ -89,7 +89,6 @@ pub mod annotations;
pub mod array;
pub mod build;
pub mod bundle;
pub mod cli;
pub mod clock;
pub mod enum_;
pub mod expr;

View file

@ -1212,6 +1212,12 @@ pub struct Module<T: BundleType> {
module_annotations: Interned<[Annotation]>,
}
impl<T: BundleType> AsRef<Self> for Module<T> {
fn as_ref(&self) -> &Self {
self
}
}
#[derive(Default)]
struct DebugFmtModulesState {
seen: HashSet<NameId>,

View file

@ -22,6 +22,7 @@ use crate::{
wire::Wire,
};
use core::fmt;
use serde::{Deserialize, Serialize};
#[derive(Debug)]
pub enum SimplifyEnumsError {
@ -955,12 +956,15 @@ impl Folder for State {
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, clap::ValueEnum)]
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, clap::ValueEnum, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub enum SimplifyEnumsKind {
SimplifyToEnumsWithNoBody,
#[clap(name = "replace-with-bundle-of-uints")]
#[serde(rename = "replace-with-bundle-of-uints")]
ReplaceWithBundleOfUInts,
#[clap(name = "replace-with-uint")]
#[serde(rename = "replace-with-uint")]
ReplaceWithUInt,
}

View file

@ -7,8 +7,8 @@ pub use crate::{
DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation,
},
array::{Array, ArrayType},
build::{BuildCli, JobParams, RunBuild},
bundle::Bundle,
cli::Cli,
clock::{Clock, ClockDomain, ToClock},
enum_::{Enum, HdlNone, HdlOption, HdlSome},
expr::{
@ -36,6 +36,7 @@ pub use crate::{
value::{SimOnly, SimOnlyValue, SimValue, ToSimValue, ToSimValueWithType},
},
source_location::SourceLocation,
testing::assert_formal,
ty::{AsMask, CanonicalType, Type},
util::{ConstUsize, GenericConstUsize},
wire::Wire,

View file

@ -1,11 +1,20 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
cli::{FormalArgs, FormalMode, FormalOutput, RunPhase},
build::{
BaseJobArgs, BaseJobKind, JobArgsAndDependencies, JobKindAndArgs, JobParams, NoArgs,
RunBuild,
external::{ExternalCommandArgs, ExternalCommandJobKind},
firrtl::{FirrtlArgs, FirrtlJobKind},
formal::{Formal, FormalAdditionalArgs, FormalArgs, FormalMode, WriteSbyFileJobKind},
verilog::{UnadjustedVerilogArgs, VerilogJobArgs, VerilogJobKind},
},
bundle::BundleType,
firrtl::ExportOptions,
module::Module,
util::HashMap,
};
use clap::Parser;
use eyre::eyre;
use serde::Deserialize;
use std::{
fmt::Write,
@ -14,14 +23,6 @@ use std::{
sync::{Mutex, OnceLock},
};
fn assert_formal_helper() -> FormalArgs {
static FORMAL_ARGS: OnceLock<FormalArgs> = OnceLock::new();
// ensure we only run parsing once, so errors from env vars don't produce overlapping output if we're called on multiple threads
FORMAL_ARGS
.get_or_init(|| FormalArgs::parse_from(["fayalite::testing::assert_formal"]))
.clone()
}
#[derive(Deserialize)]
struct CargoMetadata {
target_directory: String,
@ -97,26 +98,104 @@ fn get_assert_formal_target_path(test_name: &dyn std::fmt::Display) -> PathBuf {
.join(dir)
}
#[track_caller]
pub fn assert_formal<M>(
test_name: impl std::fmt::Display,
module: M,
mode: FormalMode,
depth: u64,
fn make_assert_formal_args(
test_name: &dyn std::fmt::Display,
formal_mode: FormalMode,
formal_depth: u64,
solver: Option<&str>,
export_options: ExportOptions,
) where
FormalArgs: RunPhase<M, Output = FormalOutput>,
{
let mut args = assert_formal_helper();
args.verilog.firrtl.base.redirect_output_for_rust_test = true;
args.verilog.firrtl.base.output = Some(get_assert_formal_target_path(&test_name));
args.verilog.firrtl.export_options = export_options;
args.verilog.debug = true;
args.mode = mode;
args.depth = depth;
if let Some(solver) = solver {
args.solver = solver.into();
}
args.run(module).expect("testing::assert_formal() failed");
) -> eyre::Result<JobArgsAndDependencies<ExternalCommandJobKind<Formal>>> {
let args = JobKindAndArgs {
kind: BaseJobKind,
args: BaseJobArgs::from_output_dir_and_env(
get_assert_formal_target_path(&test_name)
.into_os_string()
.into_string()
.map_err(|_| eyre!("path is not valid UTF-8"))?,
),
};
let dependencies = JobArgsAndDependencies {
args,
dependencies: (),
};
let args = JobKindAndArgs {
kind: FirrtlJobKind,
args: FirrtlArgs { export_options },
};
let dependencies = JobArgsAndDependencies { args, dependencies };
let args = JobKindAndArgs {
kind: ExternalCommandJobKind::new(),
args: ExternalCommandArgs::new(
None,
UnadjustedVerilogArgs {
firtool_extra_args: vec![],
verilog_dialect: None,
verilog_debug: true,
},
)?,
};
let dependencies = JobArgsAndDependencies { args, dependencies };
let args = JobKindAndArgs {
kind: VerilogJobKind,
args: VerilogJobArgs {},
};
let dependencies = JobArgsAndDependencies { args, dependencies };
let args = JobKindAndArgs {
kind: WriteSbyFileJobKind,
args: FormalArgs {
sby_extra_args: vec![],
formal_mode,
formal_depth,
formal_solver: solver.unwrap_or(FormalArgs::DEFAULT_SOLVER).into(),
smtbmc_extra_args: vec![],
},
};
let dependencies = JobArgsAndDependencies { args, dependencies };
let args = JobKindAndArgs {
kind: ExternalCommandJobKind::new(),
args: ExternalCommandArgs::new(None, FormalAdditionalArgs {})?,
};
Ok(JobArgsAndDependencies { args, dependencies })
}
pub fn try_assert_formal<M: AsRef<Module<T>>, T: BundleType>(
test_name: impl std::fmt::Display,
module: M,
formal_mode: FormalMode,
formal_depth: u64,
solver: Option<&str>,
export_options: ExportOptions,
) -> eyre::Result<()> {
const APP_NAME: &'static str = "fayalite::testing::assert_formal";
make_assert_formal_args(
&test_name,
formal_mode,
formal_depth,
solver,
export_options,
)?
.run(
|NoArgs {}| Ok(JobParams::new(module, APP_NAME)),
clap::Command::new(APP_NAME), // not actually used, so we can use an arbitrary value
)
}
#[track_caller]
pub fn assert_formal<M: AsRef<Module<T>>, T: BundleType>(
test_name: impl std::fmt::Display,
module: M,
formal_mode: FormalMode,
formal_depth: u64,
solver: Option<&str>,
export_options: ExportOptions,
) {
try_assert_formal(
test_name,
module,
formal_mode,
formal_depth,
solver,
export_options,
)
.expect("testing::assert_formal() failed");
}

View file

@ -36,8 +36,11 @@ pub use scoped_ref::ScopedRef;
pub(crate) use misc::chain;
#[doc(inline)]
pub use misc::{
BitSliceWriteWithBase, DebugAsDisplay, DebugAsRawString, MakeMutSlice, RcWriter, interned_bit,
iter_eq_by, slice_range, try_slice_range,
BitSliceWriteWithBase, DebugAsDisplay, DebugAsRawString, MakeMutSlice, RcWriter,
SerdeJsonEscapeIf, SerdeJsonEscapeIfFormatter, SerdeJsonEscapeIfTest,
SerdeJsonEscapeIfTestResult, interned_bit, iter_eq_by, serialize_to_json_ascii,
serialize_to_json_ascii_pretty, serialize_to_json_ascii_pretty_with_indent, slice_range,
try_slice_range,
};
pub mod job_server;

View file

@ -1,192 +1,156 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use ctor::ctor;
use jobslot::{Acquired, Client};
use ctor::{ctor, dtor};
use jobslot::Client;
use std::{
ffi::OsString,
mem,
io, mem,
num::NonZeroUsize,
sync::{Condvar, Mutex, Once, OnceLock},
thread::spawn,
sync::{Mutex, MutexGuard},
};
fn get_or_make_client() -> &'static Client {
#[ctor]
static CLIENT: OnceLock<Client> = unsafe {
match Client::from_env() {
Some(client) => OnceLock::from(client),
None => OnceLock::new(),
}
};
#[ctor]
static CLIENT: Mutex<Option<Option<Client>>> = unsafe { Mutex::new(Some(Client::from_env())) };
CLIENT.get_or_init(|| {
let mut available_parallelism = None;
let mut args = std::env::args_os().skip(1);
while let Some(arg) = args.next() {
const TEST_THREADS_OPTION: &'static [u8] = b"--test-threads";
if arg.as_encoded_bytes().starts_with(TEST_THREADS_OPTION) {
match arg.as_encoded_bytes().get(TEST_THREADS_OPTION.len()) {
Some(b'=') => {
let mut arg = arg.into_encoded_bytes();
arg.drain(..=TEST_THREADS_OPTION.len());
available_parallelism = Some(arg);
break;
#[dtor]
fn drop_client() {
drop(
match CLIENT.lock() {
Ok(v) => v,
Err(e) => e.into_inner(),
}
.take(),
);
}
fn get_or_make_client() -> Client {
CLIENT
.lock()
.expect("shouldn't have panicked")
.as_mut()
.expect("shutting down")
.get_or_insert_with(|| {
let mut available_parallelism = None;
let mut args = std::env::args_os().skip(1);
while let Some(arg) = args.next() {
const TEST_THREADS_OPTION: &'static [u8] = b"--test-threads";
if arg.as_encoded_bytes().starts_with(TEST_THREADS_OPTION) {
match arg.as_encoded_bytes().get(TEST_THREADS_OPTION.len()) {
Some(b'=') => {
let mut arg = arg.into_encoded_bytes();
arg.drain(..=TEST_THREADS_OPTION.len());
available_parallelism = Some(arg);
break;
}
None => {
available_parallelism = args.next().map(OsString::into_encoded_bytes);
break;
}
_ => {}
}
None => {
available_parallelism = args.next().map(OsString::into_encoded_bytes);
break;
}
_ => {}
}
}
}
let available_parallelism = if let Some(available_parallelism) = available_parallelism
.as_deref()
.and_then(|v| std::str::from_utf8(v).ok())
.and_then(|v| v.parse().ok())
{
available_parallelism
} else if let Ok(available_parallelism) = std::thread::available_parallelism() {
available_parallelism
} else {
NonZeroUsize::new(1).unwrap()
};
Client::new_with_fifo(available_parallelism.get() - 1).expect("failed to create job server")
})
let available_parallelism = if let Some(available_parallelism) = available_parallelism
.as_deref()
.and_then(|v| std::str::from_utf8(v).ok())
.and_then(|v| v.parse().ok())
{
available_parallelism
} else if let Ok(available_parallelism) = std::thread::available_parallelism() {
available_parallelism
} else {
NonZeroUsize::new(1).unwrap()
};
Client::new_with_fifo(available_parallelism.get() - 1)
.expect("failed to create job server")
})
.clone()
}
struct State {
obtained_count: usize,
waiting_count: usize,
available: Vec<Acquired>,
implicit_available: bool,
}
impl State {
fn total_available(&self) -> usize {
self.available.len() + self.implicit_available as usize
}
fn additional_waiting(&self) -> usize {
self.waiting_count.saturating_sub(self.total_available())
}
}
static STATE: Mutex<State> = Mutex::new(State {
obtained_count: 0,
waiting_count: 0,
available: Vec::new(),
implicit_available: true,
});
static COND_VAR: Condvar = Condvar::new();
#[derive(Debug)]
enum AcquiredJobInner {
FromJobServer(Acquired),
ImplicitJob,
}
#[derive(Debug)]
pub struct AcquiredJob {
job: AcquiredJobInner,
client: Client,
}
impl AcquiredJob {
fn start_acquire_thread() {
static STARTED_THREAD: Once = Once::new();
STARTED_THREAD.call_once(|| {
spawn(|| {
let mut acquired = None;
let client = get_or_make_client();
pub fn acquire() -> io::Result<Self> {
let client = get_or_make_client();
struct Waiting {}
impl Waiting {
fn done(self) -> MutexGuard<'static, State> {
mem::forget(self);
let mut state = STATE.lock().unwrap();
loop {
state = if state.additional_waiting() == 0 {
if acquired.is_some() {
drop(state);
drop(acquired.take()); // drop Acquired outside of lock
STATE.lock().unwrap()
} else {
COND_VAR.wait(state).unwrap()
}
} else if acquired.is_some() {
// allocate space before moving Acquired to ensure we
// drop Acquired outside of the lock on panic
state.available.reserve(1);
state.available.push(acquired.take().unwrap());
COND_VAR.notify_all();
state
} else {
drop(state);
acquired = Some(
client
.acquire()
.expect("can't acquire token from job server"),
);
STATE.lock().unwrap()
};
}
});
});
}
fn acquire_inner(block: bool) -> Option<Self> {
Self::start_acquire_thread();
let mut state = STATE.lock().unwrap();
loop {
if let Some(acquired) = state.available.pop() {
return Some(Self {
job: AcquiredJobInner::FromJobServer(acquired),
});
state.waiting_count -= 1;
state
}
if state.implicit_available {
state.implicit_available = false;
return Some(Self {
job: AcquiredJobInner::ImplicitJob,
});
}
if !block {
return None;
}
state.waiting_count += 1;
state = COND_VAR.wait(state).unwrap();
state.waiting_count -= 1;
}
}
pub fn try_acquire() -> Option<Self> {
Self::acquire_inner(false)
}
pub fn acquire() -> Self {
Self::acquire_inner(true).expect("failed to acquire token")
impl Drop for Waiting {
fn drop(&mut self) {
STATE.lock().unwrap().waiting_count -= 1;
}
}
let mut state = STATE.lock().unwrap();
if state.obtained_count == 0 && state.waiting_count == 0 {
state.obtained_count = 1; // get implicit token
return Ok(Self { client });
}
state.waiting_count += 1;
drop(state);
let waiting = Waiting {};
client.acquire_raw()?;
state = waiting.done();
state.obtained_count = state
.obtained_count
.checked_add(1)
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "obtained_count overflowed"))?;
drop(state);
Ok(Self { client })
}
pub fn run_command<R>(
&mut self,
cmd: std::process::Command,
f: impl FnOnce(&mut std::process::Command) -> std::io::Result<R>,
) -> std::io::Result<R> {
get_or_make_client().configure_make_and_run_with_fifo(cmd, f)
self.client.configure_make_and_run_with_fifo(cmd, f)
}
}
impl Drop for AcquiredJob {
fn drop(&mut self) {
let mut state = STATE.lock().unwrap();
match &self.job {
AcquiredJobInner::FromJobServer(_) => {
if state.waiting_count > state.available.len() + state.implicit_available as usize {
// allocate space before moving Acquired to ensure we
// drop Acquired outside of the lock on panic
state.available.reserve(1);
let AcquiredJobInner::FromJobServer(acquired) =
mem::replace(&mut self.job, AcquiredJobInner::ImplicitJob)
else {
unreachable!()
};
state.available.push(acquired);
COND_VAR.notify_all();
match &mut *state {
State {
obtained_count: 0, ..
} => unreachable!(),
State {
obtained_count: obtained_count @ 1,
waiting_count,
} => {
*obtained_count = 0; // drop implicit token
let any_waiting = *waiting_count != 0;
drop(state);
if any_waiting {
// we have the implicit token, but some other thread is trying to acquire a token,
// release the implicit token so they can acquire it.
let _ = self.client.release_raw(); // we're in drop, just ignore errors since we at least tried
}
}
AcquiredJobInner::ImplicitJob => {
state.implicit_available = true;
if state.waiting_count > state.available.len() {
COND_VAR.notify_all();
}
State { obtained_count, .. } => {
*obtained_count = obtained_count.saturating_sub(1);
drop(state);
let _ = self.client.release_raw(); // we're in drop, just ignore errors since we at least tried
}
}
}

View file

@ -5,6 +5,7 @@ use bitvec::{bits, order::Lsb0, slice::BitSlice, view::BitView};
use std::{
cell::Cell,
fmt::{self, Debug, Write},
io,
ops::{Bound, Range, RangeBounds},
rc::Rc,
sync::{Arc, OnceLock},
@ -243,3 +244,323 @@ pub fn try_slice_range<R: RangeBounds<usize>>(range: R, size: usize) -> Option<R
pub fn slice_range<R: RangeBounds<usize>>(range: R, size: usize) -> Range<usize> {
try_slice_range(range, size).expect("range out of bounds")
}
pub trait SerdeJsonEscapeIfTest {
fn char_needs_escape(&mut self, ch: char) -> serde_json::Result<bool>;
}
pub trait SerdeJsonEscapeIfTestResult {
fn to_result(self) -> serde_json::Result<bool>;
}
impl SerdeJsonEscapeIfTestResult for bool {
fn to_result(self) -> serde_json::Result<bool> {
Ok(self)
}
}
impl<E: Into<serde_json::Error>> SerdeJsonEscapeIfTestResult for Result<bool, E> {
fn to_result(self) -> serde_json::Result<bool> {
self.map_err(Into::into)
}
}
impl<T: ?Sized + FnMut(char) -> R, R: SerdeJsonEscapeIfTestResult> SerdeJsonEscapeIfTest for T {
fn char_needs_escape(&mut self, ch: char) -> serde_json::Result<bool> {
self(ch).to_result()
}
}
pub trait SerdeJsonEscapeIfFormatter: serde_json::ser::Formatter {
fn write_unicode_escape<W>(&mut self, writer: &mut W, ch: char) -> io::Result<()>
where
W: ?Sized + io::Write,
{
for utf16 in ch.encode_utf16(&mut [0; 2]) {
write!(writer, "\\u{utf16:04x}")?;
}
Ok(())
}
}
impl SerdeJsonEscapeIfFormatter for serde_json::ser::CompactFormatter {}
impl SerdeJsonEscapeIfFormatter for serde_json::ser::PrettyFormatter<'_> {}
pub struct SerdeJsonEscapeIf<Test, Base = serde_json::ser::CompactFormatter> {
pub base: Base,
pub test: Test,
}
impl<Test: SerdeJsonEscapeIfTest, Base: SerdeJsonEscapeIfFormatter> serde_json::ser::Formatter
for SerdeJsonEscapeIf<Test, Base>
{
fn write_null<W>(&mut self, writer: &mut W) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.write_null(writer)
}
fn write_bool<W>(&mut self, writer: &mut W, value: bool) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.write_bool(writer, value)
}
fn write_i8<W>(&mut self, writer: &mut W, value: i8) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.write_i8(writer, value)
}
fn write_i16<W>(&mut self, writer: &mut W, value: i16) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.write_i16(writer, value)
}
fn write_i32<W>(&mut self, writer: &mut W, value: i32) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.write_i32(writer, value)
}
fn write_i64<W>(&mut self, writer: &mut W, value: i64) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.write_i64(writer, value)
}
fn write_i128<W>(&mut self, writer: &mut W, value: i128) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.write_i128(writer, value)
}
fn write_u8<W>(&mut self, writer: &mut W, value: u8) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.write_u8(writer, value)
}
fn write_u16<W>(&mut self, writer: &mut W, value: u16) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.write_u16(writer, value)
}
fn write_u32<W>(&mut self, writer: &mut W, value: u32) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.write_u32(writer, value)
}
fn write_u64<W>(&mut self, writer: &mut W, value: u64) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.write_u64(writer, value)
}
fn write_u128<W>(&mut self, writer: &mut W, value: u128) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.write_u128(writer, value)
}
fn write_f32<W>(&mut self, writer: &mut W, value: f32) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.write_f32(writer, value)
}
fn write_f64<W>(&mut self, writer: &mut W, value: f64) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.write_f64(writer, value)
}
fn write_number_str<W>(&mut self, writer: &mut W, value: &str) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.write_number_str(writer, value)
}
fn begin_string<W>(&mut self, writer: &mut W) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.begin_string(writer)
}
fn end_string<W>(&mut self, writer: &mut W) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.end_string(writer)
}
fn write_string_fragment<W>(&mut self, writer: &mut W, mut fragment: &str) -> io::Result<()>
where
W: ?Sized + io::Write,
{
while let Some((next_escape_index, next_escape_char)) = fragment
.char_indices()
.find_map(|(index, ch)| match self.test.char_needs_escape(ch) {
Ok(false) => None,
Ok(true) => Some(Ok((index, ch))),
Err(e) => Some(Err(e)),
})
.transpose()?
{
let (no_escapes, rest) = fragment.split_at(next_escape_index);
fragment = &rest[next_escape_char.len_utf8()..];
self.base.write_string_fragment(writer, no_escapes)?;
self.base.write_unicode_escape(writer, next_escape_char)?;
}
self.base.write_string_fragment(writer, fragment)
}
fn write_char_escape<W>(
&mut self,
writer: &mut W,
char_escape: serde_json::ser::CharEscape,
) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.write_char_escape(writer, char_escape)
}
fn write_byte_array<W>(&mut self, writer: &mut W, value: &[u8]) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.write_byte_array(writer, value)
}
fn begin_array<W>(&mut self, writer: &mut W) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.begin_array(writer)
}
fn end_array<W>(&mut self, writer: &mut W) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.end_array(writer)
}
fn begin_array_value<W>(&mut self, writer: &mut W, first: bool) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.begin_array_value(writer, first)
}
fn end_array_value<W>(&mut self, writer: &mut W) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.end_array_value(writer)
}
fn begin_object<W>(&mut self, writer: &mut W) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.begin_object(writer)
}
fn end_object<W>(&mut self, writer: &mut W) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.end_object(writer)
}
fn begin_object_key<W>(&mut self, writer: &mut W, first: bool) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.begin_object_key(writer, first)
}
fn end_object_key<W>(&mut self, writer: &mut W) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.end_object_key(writer)
}
fn begin_object_value<W>(&mut self, writer: &mut W) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.begin_object_value(writer)
}
fn end_object_value<W>(&mut self, writer: &mut W) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.end_object_value(writer)
}
fn write_raw_fragment<W>(&mut self, writer: &mut W, fragment: &str) -> io::Result<()>
where
W: ?Sized + io::Write,
{
self.base.write_raw_fragment(writer, fragment)
}
}
fn serialize_to_json_ascii_helper<F: SerdeJsonEscapeIfFormatter, S: serde::Serialize + ?Sized>(
v: &S,
base: F,
) -> serde_json::Result<String> {
let mut retval = Vec::new();
v.serialize(&mut serde_json::ser::Serializer::with_formatter(
&mut retval,
SerdeJsonEscapeIf {
base,
test: |ch| ch < '\x20' || ch > '\x7F',
},
))?;
String::from_utf8(retval).map_err(|_| serde::ser::Error::custom("invalid UTF-8"))
}
pub fn serialize_to_json_ascii<T: serde::Serialize + ?Sized>(v: &T) -> serde_json::Result<String> {
serialize_to_json_ascii_helper(v, serde_json::ser::CompactFormatter)
}
pub fn serialize_to_json_ascii_pretty<T: serde::Serialize + ?Sized>(
v: &T,
) -> serde_json::Result<String> {
serialize_to_json_ascii_helper(v, serde_json::ser::PrettyFormatter::new())
}
pub fn serialize_to_json_ascii_pretty_with_indent<T: serde::Serialize + ?Sized>(
v: &T,
indent: &str,
) -> serde_json::Result<String> {
serialize_to_json_ascii_helper(
v,
serde_json::ser::PrettyFormatter::with_indent(indent.as_bytes()),
)
}

View file

@ -212,7 +212,7 @@ pub fn queue<T: Type>(
mod tests {
use super::*;
use crate::{
cli::FormalMode, firrtl::ExportOptions,
build::formal::FormalMode, firrtl::ExportOptions,
module::transform::simplify_enums::SimplifyEnumsKind, testing::assert_formal,
ty::StaticType,
};

View file

@ -3,7 +3,7 @@
//! Formal tests in Fayalite
use fayalite::{
cli::FormalMode,
build::formal::FormalMode,
clock::{Clock, ClockDomain},
expr::{CastTo, HdlPartialEq},
firrtl::ExportOptions,