forked from libre-chip/fayalite
WIP refactoring to have JobKind be internal jobs
This commit is contained in:
parent
a823f8485b
commit
0150d4c7fb
14 changed files with 4316 additions and 1705 deletions
|
|
@ -1,7 +1,10 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use clap::Parser;
|
||||
use fayalite::{cli, prelude::*};
|
||||
use fayalite::{
|
||||
build::{BuildSubcommand, DynJob, JobParams, RunBuild},
|
||||
prelude::*,
|
||||
};
|
||||
|
||||
#[hdl_module]
|
||||
fn blinky(clock_frequency: u64) {
|
||||
|
|
@ -38,10 +41,19 @@ struct Cli {
|
|||
#[arg(long, default_value = "1000000", value_parser = clap::value_parser!(u64).range(2..))]
|
||||
clock_frequency: u64,
|
||||
#[command(subcommand)]
|
||||
cli: cli::Cli,
|
||||
cli: BuildSubcommand,
|
||||
}
|
||||
|
||||
fn main() -> cli::Result {
|
||||
let cli = Cli::parse();
|
||||
cli.cli.run(blinky(cli.clock_frequency))
|
||||
impl RunBuild for Cli {
|
||||
fn into_jobs<E: ?Sized + Extend<DynJob>>(
|
||||
self,
|
||||
jobs: &mut E,
|
||||
params: impl AsRef<JobParams>,
|
||||
) -> eyre::Result<()> {
|
||||
self.cli.into_jobs(jobs, params)
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
Cli::main(|cli| Ok(JobParams::new(blinky(cli.clock_frequency), "blinky")));
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -2,22 +2,33 @@
|
|||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
build::{DynJob, EscapeForUnixShell, JobItem, JobItemName, JobKind},
|
||||
build::{
|
||||
CommandParams, GetBaseJob, JobAndDependencies, JobAndKind, JobArgs, JobArgsAndDependencies,
|
||||
JobDependencies, JobItem, JobItemName, JobKind, JobParams, intern_known_utf8_path_buf,
|
||||
},
|
||||
intern::{Intern, Interned},
|
||||
util::{job_server::AcquiredJob, streaming_read_utf8::streaming_read_utf8},
|
||||
};
|
||||
use base64::{Engine, prelude::BASE64_URL_SAFE_NO_PAD};
|
||||
use clap::builder::StyledStr;
|
||||
use eyre::{Context, ensure, eyre};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer, de::Error};
|
||||
use clap::builder::OsStringValueParser;
|
||||
use eyre::{Context, bail, ensure, eyre};
|
||||
use serde::{
|
||||
Deserialize, Deserializer, Serialize, Serializer,
|
||||
de::{DeserializeOwned, Error},
|
||||
};
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
collections::BTreeMap,
|
||||
env,
|
||||
fmt::{self, Write},
|
||||
mem,
|
||||
ffi::{OsStr, OsString},
|
||||
fmt,
|
||||
hash::{Hash, Hasher},
|
||||
io::Write,
|
||||
marker::PhantomData,
|
||||
path::{Path, PathBuf},
|
||||
sync::OnceLock,
|
||||
};
|
||||
|
||||
#[cfg(todo)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
||||
enum TemplateArg {
|
||||
Literal(String),
|
||||
|
|
@ -25,6 +36,7 @@ enum TemplateArg {
|
|||
OutputPath { before: String, after: String },
|
||||
}
|
||||
|
||||
#[cfg(todo)]
|
||||
impl TemplateArg {
|
||||
fn after_mut(&mut self) -> &mut String {
|
||||
match self {
|
||||
|
|
@ -114,14 +126,15 @@ impl From<String> for MaybeUtf8 {
|
|||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)]
|
||||
pub struct ExternalJobCache {
|
||||
#[serde(rename = "ExternalJobCache")]
|
||||
pub struct ExternalJobCacheV2 {
|
||||
pub version: ExternalJobCacheVersion,
|
||||
pub inputs_hash: blake3::Hash,
|
||||
pub stdout_stderr: String,
|
||||
pub result: Result<BTreeMap<String, MaybeUtf8>, String>,
|
||||
}
|
||||
|
||||
impl ExternalJobCache {
|
||||
impl ExternalJobCacheV2 {
|
||||
fn read_from_file(cache_json_path: Interned<str>) -> eyre::Result<Self> {
|
||||
let cache_str = std::fs::read_to_string(&*cache_json_path)
|
||||
.wrap_err_with(|| format!("can't read {cache_json_path}"))?;
|
||||
|
|
@ -136,8 +149,8 @@ impl ExternalJobCache {
|
|||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct ExternalJobCaching {
|
||||
pub cache_json_path: Interned<str>,
|
||||
pub run_even_if_cached: bool,
|
||||
cache_json_path: Interned<str>,
|
||||
run_even_if_cached: bool,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
|
|
@ -180,20 +193,64 @@ impl JobCacheHasher {
|
|||
}
|
||||
}
|
||||
|
||||
fn write_file_atomically_no_clobber<F: FnOnce() -> C, C: AsRef<[u8]>>(
|
||||
path: impl AsRef<Path>,
|
||||
containing_dir: impl AsRef<Path>,
|
||||
contents: F,
|
||||
) -> std::io::Result<()> {
|
||||
let path = path.as_ref();
|
||||
let containing_dir = containing_dir.as_ref();
|
||||
if !matches!(std::fs::exists(&path), Ok(true)) {
|
||||
// use File::create_new rather than tempfile's code to get normal file permissions rather than mode 600 on Unix.
|
||||
let mut file = tempfile::Builder::new()
|
||||
.make_in(containing_dir, |path| std::fs::File::create_new(path))?;
|
||||
file.write_all(contents().as_ref())?; // write all in one operation to avoid a bunch of tiny writes
|
||||
file.into_temp_path().persist_noclobber(path)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
impl ExternalJobCaching {
|
||||
pub fn new(cache_json_path: Interned<str>) -> Self {
|
||||
Self {
|
||||
pub fn get_cache_dir_from_output_dir(output_dir: &str) -> PathBuf {
|
||||
Path::join(output_dir.as_ref(), ".cache")
|
||||
}
|
||||
pub fn make_cache_dir(
|
||||
cache_dir: impl AsRef<Path>,
|
||||
application_name: &str,
|
||||
) -> std::io::Result<()> {
|
||||
let cache_dir = cache_dir.as_ref();
|
||||
std::fs::create_dir_all(cache_dir)?;
|
||||
write_file_atomically_no_clobber(cache_dir.join("CACHEDIR.TAG"), cache_dir, || {
|
||||
format!(
|
||||
"Signature: 8a477f597d28d172789f06886806bc55\n\
|
||||
# This file is a cache directory tag created by {application_name}.\n\
|
||||
# For information about cache directory tags see https://bford.info/cachedir/\n"
|
||||
)
|
||||
})?;
|
||||
write_file_atomically_no_clobber(cache_dir.join(".gitignore"), cache_dir, || {
|
||||
format!(
|
||||
"# This is a cache directory created by {application_name}.\n\
|
||||
# ignore all files\n\
|
||||
*\n"
|
||||
)
|
||||
})
|
||||
}
|
||||
pub fn new(
|
||||
output_dir: &str,
|
||||
application_name: &str,
|
||||
json_file_stem: &str,
|
||||
run_even_if_cached: bool,
|
||||
) -> std::io::Result<Self> {
|
||||
let cache_dir = Self::get_cache_dir_from_output_dir(output_dir);
|
||||
Self::make_cache_dir(&cache_dir, application_name)?;
|
||||
let mut cache_json_path = cache_dir;
|
||||
cache_json_path.push(json_file_stem);
|
||||
cache_json_path.set_extension("json");
|
||||
let cache_json_path = intern_known_utf8_path_buf(cache_json_path);
|
||||
Ok(Self {
|
||||
cache_json_path,
|
||||
run_even_if_cached: false,
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn from_path(cache_json_path: impl AsRef<std::path::Path>) -> Self {
|
||||
let cache_json_path = cache_json_path.as_ref();
|
||||
let Some(cache_json_path) = cache_json_path.as_os_str().to_str() else {
|
||||
panic!("non-UTF-8 path to cache json: {cache_json_path:?}");
|
||||
};
|
||||
Self::new(cache_json_path.intern())
|
||||
run_even_if_cached,
|
||||
})
|
||||
}
|
||||
fn write_stdout_stderr(stdout_stderr: &str) {
|
||||
if stdout_stderr == "" {
|
||||
|
|
@ -215,12 +272,12 @@ impl ExternalJobCaching {
|
|||
if self.run_even_if_cached {
|
||||
return Err(());
|
||||
}
|
||||
let Ok(ExternalJobCache {
|
||||
let Ok(ExternalJobCacheV2 {
|
||||
version: ExternalJobCacheVersion::CURRENT,
|
||||
inputs_hash: cached_inputs_hash,
|
||||
stdout_stderr,
|
||||
result,
|
||||
}) = ExternalJobCache::read_from_file(self.cache_json_path)
|
||||
}) = ExternalJobCacheV2::read_from_file(self.cache_json_path)
|
||||
else {
|
||||
return Err(());
|
||||
};
|
||||
|
|
@ -253,7 +310,7 @@ impl ExternalJobCaching {
|
|||
fn make_command(
|
||||
command_line: Interned<[Interned<str>]>,
|
||||
) -> eyre::Result<std::process::Command> {
|
||||
ensure!(command_line.is_empty(), "command line must not be empty");
|
||||
ensure!(!command_line.is_empty(), "command line must not be empty");
|
||||
let mut cmd = std::process::Command::new(&*command_line[0]);
|
||||
cmd.args(command_line[1..].iter().map(|arg| &**arg))
|
||||
.stdin(std::process::Stdio::null());
|
||||
|
|
@ -314,7 +371,7 @@ impl ExternalJobCaching {
|
|||
.expect("spawn shouldn't fail");
|
||||
run_fn(cmd)
|
||||
});
|
||||
ExternalJobCache {
|
||||
ExternalJobCacheV2 {
|
||||
version: ExternalJobCacheVersion::CURRENT,
|
||||
inputs_hash,
|
||||
stdout_stderr,
|
||||
|
|
@ -350,6 +407,7 @@ impl ExternalJobCaching {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(todo)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct TemplatedExternalJobKind {
|
||||
template: Interned<[TemplateArg]>,
|
||||
|
|
@ -357,12 +415,14 @@ pub struct TemplatedExternalJobKind {
|
|||
caching: Option<ExternalJobCaching>,
|
||||
}
|
||||
|
||||
#[cfg(todo)]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
enum Token {
|
||||
Char(char),
|
||||
ArgSeparator,
|
||||
}
|
||||
|
||||
#[cfg(todo)]
|
||||
impl Token {
|
||||
fn as_ident_start(self) -> Option<char> {
|
||||
match self {
|
||||
|
|
@ -380,12 +440,14 @@ impl Token {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(todo)]
|
||||
#[derive(Clone, Debug)]
|
||||
struct Tokens<'a> {
|
||||
current: std::str::Chars<'a>,
|
||||
rest: std::slice::Iter<'a, &'a str>,
|
||||
}
|
||||
|
||||
#[cfg(todo)]
|
||||
impl<'a> Tokens<'a> {
|
||||
fn new(args: &'a [&'a str]) -> Self {
|
||||
Self {
|
||||
|
|
@ -395,6 +457,7 @@ impl<'a> Tokens<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(todo)]
|
||||
impl Iterator for Tokens<'_> {
|
||||
type Item = Token;
|
||||
|
||||
|
|
@ -409,11 +472,13 @@ impl Iterator for Tokens<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(todo)]
|
||||
struct Parser<'a> {
|
||||
tokens: std::iter::Peekable<Tokens<'a>>,
|
||||
template: Vec<TemplateArg>,
|
||||
}
|
||||
|
||||
#[cfg(todo)]
|
||||
impl<'a> Parser<'a> {
|
||||
fn new(args_template: &'a [&'a str]) -> Self {
|
||||
Self {
|
||||
|
|
@ -519,6 +584,7 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(todo)]
|
||||
pub fn find_program<'a>(
|
||||
default_program_name: &'a str,
|
||||
program_path_env_var: Option<&str>,
|
||||
|
|
@ -535,6 +601,7 @@ pub fn find_program<'a>(
|
|||
.map_err(|program_path| eyre!("path to program is not valid UTF-8: {program_path:?}"))
|
||||
}
|
||||
|
||||
#[cfg(todo)]
|
||||
#[derive(Clone, Debug)]
|
||||
enum ParseErrorKind {
|
||||
ExpectedVar,
|
||||
|
|
@ -542,15 +609,18 @@ enum ParseErrorKind {
|
|||
EachArgMustHaveAtMostOneVar,
|
||||
}
|
||||
|
||||
#[cfg(todo)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct TemplateParseError(ParseErrorKind);
|
||||
|
||||
#[cfg(todo)]
|
||||
impl From<ParseErrorKind> for TemplateParseError {
|
||||
fn from(value: ParseErrorKind) -> Self {
|
||||
Self(value)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(todo)]
|
||||
impl fmt::Display for TemplateParseError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match &self.0 {
|
||||
|
|
@ -568,8 +638,10 @@ impl fmt::Display for TemplateParseError {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(todo)]
|
||||
impl std::error::Error for TemplateParseError {}
|
||||
|
||||
#[cfg(todo)]
|
||||
impl TemplatedExternalJobKind {
|
||||
pub fn try_new(
|
||||
default_program_name: &str,
|
||||
|
|
@ -599,6 +671,7 @@ impl TemplatedExternalJobKind {
|
|||
Err(e) => panic!("{e}"),
|
||||
}
|
||||
}
|
||||
#[cfg(todo)]
|
||||
fn usage(&self) -> StyledStr {
|
||||
let mut retval = String::from("Usage:");
|
||||
let mut last_input_index = 0usize;
|
||||
|
|
@ -632,6 +705,7 @@ impl TemplatedExternalJobKind {
|
|||
}
|
||||
retval.into()
|
||||
}
|
||||
#[cfg(todo)]
|
||||
fn with_usage(&self, mut e: clap::Error) -> clap::Error {
|
||||
e.insert(
|
||||
clap::error::ContextKind::Usage,
|
||||
|
|
@ -641,6 +715,7 @@ impl TemplatedExternalJobKind {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(todo)]
|
||||
impl JobKind for TemplatedExternalJobKind {
|
||||
type Job = TemplatedExternalJob;
|
||||
|
||||
|
|
@ -771,6 +846,7 @@ impl JobKind for TemplatedExternalJobKind {
|
|||
}
|
||||
}
|
||||
|
||||
#[cfg(todo)]
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct TemplatedExternalJob {
|
||||
command_line: Interned<[Interned<str>]>,
|
||||
|
|
@ -778,6 +854,7 @@ pub struct TemplatedExternalJob {
|
|||
outputs: Interned<[JobItemName]>,
|
||||
}
|
||||
|
||||
#[cfg(todo)]
|
||||
impl TemplatedExternalJob {
|
||||
pub fn try_add_direct_dependency(&mut self, new_dependency: DynJob) -> eyre::Result<()> {
|
||||
let mut added = false;
|
||||
|
|
@ -833,3 +910,525 @@ impl TemplatedExternalJob {
|
|||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Eq, Hash)]
|
||||
pub struct ExternalCommandJobKind<T: ExternalCommand>(PhantomData<T>);
|
||||
|
||||
impl<T: ExternalCommand> fmt::Debug for ExternalCommandJobKind<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "ExternalCommandJobKind<{}>", std::any::type_name::<T>())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ExternalCommand> PartialEq for ExternalCommandJobKind<T> {
|
||||
fn eq(&self, _other: &Self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ExternalCommand> Ord for ExternalCommandJobKind<T> {
|
||||
fn cmp(&self, _other: &Self) -> std::cmp::Ordering {
|
||||
std::cmp::Ordering::Equal
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ExternalCommand> PartialOrd for ExternalCommandJobKind<T> {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ExternalCommand> Default for ExternalCommandJobKind<T> {
|
||||
fn default() -> Self {
|
||||
Self(PhantomData)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ExternalCommand> Copy for ExternalCommandJobKind<T> {}
|
||||
|
||||
impl<T: ExternalCommand> ExternalCommandJobKind<T> {
|
||||
pub const fn new() -> Self {
|
||||
Self(PhantomData)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
struct ExternalCommandProgramPathValueParser<T: ExternalCommand>(PhantomData<T>);
|
||||
|
||||
fn parse_which_result(
|
||||
which_result: which::Result<PathBuf>,
|
||||
program_name: impl Into<OsString>,
|
||||
program_path_arg_name: impl FnOnce() -> String,
|
||||
) -> Result<Interned<str>, ResolveProgramPathError> {
|
||||
let which_result = match which_result {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
return Err(ResolveProgramPathError {
|
||||
inner: ResolveProgramPathErrorInner::Which(e),
|
||||
program_name: program_name.into(),
|
||||
program_path_arg_name: program_path_arg_name(),
|
||||
});
|
||||
}
|
||||
};
|
||||
Ok(str::intern_owned(
|
||||
which_result
|
||||
.into_os_string()
|
||||
.into_string()
|
||||
.map_err(|_| ResolveProgramPathError {
|
||||
inner: ResolveProgramPathErrorInner::NotValidUtf8,
|
||||
program_name: program_name.into(),
|
||||
program_path_arg_name: program_path_arg_name(),
|
||||
})?,
|
||||
))
|
||||
}
|
||||
|
||||
impl<T: ExternalCommand> clap::builder::TypedValueParser
|
||||
for ExternalCommandProgramPathValueParser<T>
|
||||
{
|
||||
type Value = Interned<str>;
|
||||
|
||||
fn parse_ref(
|
||||
&self,
|
||||
cmd: &clap::Command,
|
||||
arg: Option<&clap::Arg>,
|
||||
value: &OsStr,
|
||||
) -> clap::error::Result<Self::Value> {
|
||||
OsStringValueParser::new()
|
||||
.try_map(|program_name| {
|
||||
parse_which_result(which::which(&program_name), program_name, || {
|
||||
T::program_path_arg_name().into()
|
||||
})
|
||||
})
|
||||
.parse_ref(cmd, arg, value)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)]
|
||||
#[group(id = T::args_group_id())]
|
||||
#[non_exhaustive]
|
||||
pub struct ExternalCommandArgs<T: ExternalCommand> {
|
||||
#[arg(
|
||||
name = Interned::into_inner(T::program_path_arg_name()),
|
||||
long = T::program_path_arg_name(),
|
||||
value_name = T::program_path_arg_value_name(),
|
||||
env = T::program_path_env_var_name().map(Interned::into_inner),
|
||||
value_parser = ExternalCommandProgramPathValueParser::<T>(PhantomData),
|
||||
default_value = T::default_program_name(),
|
||||
value_hint = clap::ValueHint::CommandName,
|
||||
)]
|
||||
pub program_path: Interned<str>,
|
||||
#[arg(
|
||||
name = Interned::into_inner(T::run_even_if_cached_arg_name()),
|
||||
long = T::run_even_if_cached_arg_name(),
|
||||
)]
|
||||
pub run_even_if_cached: bool,
|
||||
#[command(flatten)]
|
||||
pub additional_args: T::AdditionalArgs,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
enum ResolveProgramPathErrorInner {
|
||||
Which(which::Error),
|
||||
NotValidUtf8,
|
||||
}
|
||||
|
||||
impl fmt::Debug for ResolveProgramPathErrorInner {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::Which(v) => v.fmt(f),
|
||||
Self::NotValidUtf8 => f.write_str("NotValidUtf8"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for ResolveProgramPathErrorInner {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::Which(v) => v.fmt(f),
|
||||
Self::NotValidUtf8 => f.write_str("path is not valid UTF-8"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ResolveProgramPathError {
|
||||
inner: ResolveProgramPathErrorInner,
|
||||
program_name: std::ffi::OsString,
|
||||
program_path_arg_name: String,
|
||||
}
|
||||
|
||||
impl fmt::Display for ResolveProgramPathError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let Self {
|
||||
inner,
|
||||
program_name,
|
||||
program_path_arg_name,
|
||||
} = self;
|
||||
write!(
|
||||
f,
|
||||
"{program_path_arg_name}: failed to resolve {program_name:?} to a valid program: {inner}",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for ResolveProgramPathError {}
|
||||
|
||||
pub fn resolve_program_path(
|
||||
program_name: Option<&OsStr>,
|
||||
default_program_name: impl AsRef<OsStr>,
|
||||
program_path_env_var_name: Option<&OsStr>,
|
||||
) -> Result<Interned<str>, ResolveProgramPathError> {
|
||||
let default_program_name = default_program_name.as_ref();
|
||||
let owned_program_name;
|
||||
let program_name = if let Some(program_name) = program_name {
|
||||
program_name
|
||||
} else if let Some(v) = program_path_env_var_name.and_then(std::env::var_os) {
|
||||
owned_program_name = v;
|
||||
&owned_program_name
|
||||
} else {
|
||||
default_program_name
|
||||
};
|
||||
parse_which_result(which::which(program_name), program_name, || {
|
||||
default_program_name.display().to_string()
|
||||
})
|
||||
}
|
||||
|
||||
impl<T: ExternalCommand> ExternalCommandArgs<T> {
|
||||
pub fn with_resolved_program_path(
|
||||
program_path: Interned<str>,
|
||||
additional_args: T::AdditionalArgs,
|
||||
) -> Self {
|
||||
Self {
|
||||
program_path,
|
||||
run_even_if_cached: false,
|
||||
additional_args,
|
||||
}
|
||||
}
|
||||
pub fn new(
|
||||
program_name: Option<&OsStr>,
|
||||
additional_args: T::AdditionalArgs,
|
||||
) -> Result<Self, ResolveProgramPathError> {
|
||||
Ok(Self::with_resolved_program_path(
|
||||
resolve_program_path(
|
||||
program_name,
|
||||
T::default_program_name(),
|
||||
T::program_path_env_var_name().as_ref().map(AsRef::as_ref),
|
||||
)?,
|
||||
additional_args,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ExternalCommand> JobArgs for ExternalCommandArgs<T> {
|
||||
fn to_args<Args: Extend<Interned<str>> + ?Sized>(&self, args: &mut Args) {
|
||||
let Self {
|
||||
program_path,
|
||||
run_even_if_cached,
|
||||
ref additional_args,
|
||||
} = *self;
|
||||
args.extend([str::intern_owned(format!(
|
||||
"--{}={program_path}",
|
||||
T::program_path_arg_name()
|
||||
))]);
|
||||
if run_even_if_cached {
|
||||
args.extend([str::intern_owned(format!(
|
||||
"--{}",
|
||||
T::run_even_if_cached_arg_name()
|
||||
))]);
|
||||
}
|
||||
additional_args.to_args(args);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
struct ExternalCommandJobParams {
|
||||
command_params: CommandParams,
|
||||
inputs: Interned<[JobItemName]>,
|
||||
outputs: Interned<[JobItemName]>,
|
||||
output_paths: Interned<[Interned<str>]>,
|
||||
}
|
||||
|
||||
impl ExternalCommandJobParams {
|
||||
fn new<T: ExternalCommand>(job: &ExternalCommandJob<T>) -> Self {
|
||||
let output_paths = T::output_paths(job);
|
||||
Self {
|
||||
command_params: CommandParams {
|
||||
command_line: Interned::from_iter(
|
||||
[job.program_path]
|
||||
.into_iter()
|
||||
.chain(T::command_line_args(job).iter().copied()),
|
||||
),
|
||||
current_dir: T::current_dir(job),
|
||||
},
|
||||
inputs: T::inputs(job),
|
||||
outputs: output_paths
|
||||
.iter()
|
||||
.map(|&path| JobItemName::Path { path })
|
||||
.collect(),
|
||||
output_paths,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize)]
|
||||
pub struct ExternalCommandJob<T: ExternalCommand> {
|
||||
additional_job_data: T::AdditionalJobData,
|
||||
program_path: Interned<str>,
|
||||
output_dir: Interned<str>,
|
||||
run_even_if_cached: bool,
|
||||
#[serde(skip)]
|
||||
params_cache: OnceLock<ExternalCommandJobParams>,
|
||||
}
|
||||
|
||||
impl<T: ExternalCommand> Eq for ExternalCommandJob<T> {}
|
||||
|
||||
impl<T: ExternalCommand<AdditionalJobData: Clone>> Clone for ExternalCommandJob<T> {
|
||||
fn clone(&self) -> Self {
|
||||
let Self {
|
||||
ref additional_job_data,
|
||||
program_path,
|
||||
output_dir,
|
||||
run_even_if_cached,
|
||||
ref params_cache,
|
||||
} = *self;
|
||||
Self {
|
||||
additional_job_data: additional_job_data.clone(),
|
||||
program_path,
|
||||
output_dir,
|
||||
run_even_if_cached,
|
||||
params_cache: params_cache.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ExternalCommand> fmt::Debug for ExternalCommandJob<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let Self {
|
||||
additional_job_data,
|
||||
program_path,
|
||||
output_dir,
|
||||
run_even_if_cached,
|
||||
params_cache: _,
|
||||
} = self;
|
||||
write!(f, "ExternalCommandJob<{}>", std::any::type_name::<T>())?;
|
||||
f.debug_struct("")
|
||||
.field("additional_job_data", additional_job_data)
|
||||
.field("program_path", program_path)
|
||||
.field("output_dir", output_dir)
|
||||
.field("run_even_if_cached", run_even_if_cached)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ExternalCommand> PartialEq for ExternalCommandJob<T> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
let Self {
|
||||
additional_job_data,
|
||||
program_path,
|
||||
output_dir,
|
||||
run_even_if_cached,
|
||||
params_cache: _,
|
||||
} = self;
|
||||
*additional_job_data == other.additional_job_data
|
||||
&& *program_path == other.program_path
|
||||
&& *output_dir == other.output_dir
|
||||
&& *run_even_if_cached == other.run_even_if_cached
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ExternalCommand> Hash for ExternalCommandJob<T> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
let Self {
|
||||
additional_job_data,
|
||||
program_path,
|
||||
output_dir,
|
||||
run_even_if_cached,
|
||||
params_cache: _,
|
||||
} = self;
|
||||
additional_job_data.hash(state);
|
||||
program_path.hash(state);
|
||||
output_dir.hash(state);
|
||||
run_even_if_cached.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ExternalCommand> ExternalCommandJob<T> {
|
||||
pub fn additional_job_data(&self) -> &T::AdditionalJobData {
|
||||
&self.additional_job_data
|
||||
}
|
||||
pub fn program_path(&self) -> Interned<str> {
|
||||
self.program_path
|
||||
}
|
||||
pub fn output_dir(&self) -> Interned<str> {
|
||||
self.output_dir
|
||||
}
|
||||
pub fn run_even_if_cached(&self) -> bool {
|
||||
self.run_even_if_cached
|
||||
}
|
||||
fn params(&self) -> &ExternalCommandJobParams {
|
||||
self.params_cache
|
||||
.get_or_init(|| ExternalCommandJobParams::new(self))
|
||||
}
|
||||
pub fn command_params(&self) -> CommandParams {
|
||||
self.params().command_params
|
||||
}
|
||||
pub fn inputs(&self) -> Interned<[JobItemName]> {
|
||||
self.params().inputs
|
||||
}
|
||||
pub fn output_paths(&self) -> Interned<[Interned<str>]> {
|
||||
self.params().output_paths
|
||||
}
|
||||
pub fn outputs(&self) -> Interned<[JobItemName]> {
|
||||
self.params().outputs
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ExternalCommand: 'static + Send + Sync + Hash + Eq + fmt::Debug + Sized + Clone {
|
||||
type AdditionalArgs: JobArgs;
|
||||
type AdditionalJobData: 'static
|
||||
+ Send
|
||||
+ Sync
|
||||
+ Hash
|
||||
+ Eq
|
||||
+ fmt::Debug
|
||||
+ Serialize
|
||||
+ DeserializeOwned;
|
||||
type Dependencies: JobDependencies<JobsAndKinds: GetBaseJob>;
|
||||
fn dependencies() -> Self::Dependencies;
|
||||
fn args_to_jobs(
|
||||
args: JobArgsAndDependencies<ExternalCommandJobKind<Self>>,
|
||||
params: &JobParams,
|
||||
) -> eyre::Result<(
|
||||
Self::AdditionalJobData,
|
||||
<Self::Dependencies as JobDependencies>::JobsAndKinds,
|
||||
)>;
|
||||
fn inputs(job: &ExternalCommandJob<Self>) -> Interned<[JobItemName]>;
|
||||
fn output_paths(job: &ExternalCommandJob<Self>) -> Interned<[Interned<str>]>;
|
||||
fn command_line_args(job: &ExternalCommandJob<Self>) -> Interned<[Interned<str>]>;
|
||||
fn current_dir(job: &ExternalCommandJob<Self>) -> Option<Interned<str>>;
|
||||
fn job_kind_name() -> Interned<str>;
|
||||
fn args_group_id() -> clap::Id {
|
||||
Interned::into_inner(Self::job_kind_name()).into()
|
||||
}
|
||||
fn program_path_arg_name() -> Interned<str> {
|
||||
Self::default_program_name()
|
||||
}
|
||||
fn program_path_arg_value_name() -> Interned<str> {
|
||||
Intern::intern_owned(Self::program_path_arg_name().to_uppercase())
|
||||
}
|
||||
fn default_program_name() -> Interned<str>;
|
||||
fn program_path_env_var_name() -> Option<Interned<str>> {
|
||||
Some(Intern::intern_owned(
|
||||
Self::program_path_arg_name()
|
||||
.to_uppercase()
|
||||
.replace('-', "_"),
|
||||
))
|
||||
}
|
||||
fn run_even_if_cached_arg_name() -> Interned<str> {
|
||||
Intern::intern_owned(format!("{}-run-even-if-cached", Self::job_kind_name()))
|
||||
}
|
||||
fn subcommand_hidden() -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ExternalCommand> JobKind for ExternalCommandJobKind<T> {
|
||||
type Args = ExternalCommandArgs<T>;
|
||||
type Job = ExternalCommandJob<T>;
|
||||
type Dependencies = T::Dependencies;
|
||||
|
||||
fn dependencies(self) -> Self::Dependencies {
|
||||
T::dependencies()
|
||||
}
|
||||
|
||||
fn args_to_jobs(
|
||||
args: JobArgsAndDependencies<Self>,
|
||||
params: &JobParams,
|
||||
) -> eyre::Result<JobAndDependencies<Self>> {
|
||||
let kind = args.args.kind;
|
||||
let program_path = args.args.args.program_path;
|
||||
let (additional_job_data, dependencies) = T::args_to_jobs(args, params)?;
|
||||
let base_job = dependencies.base_job();
|
||||
let job = ExternalCommandJob {
|
||||
additional_job_data,
|
||||
program_path,
|
||||
output_dir: base_job.output_dir(),
|
||||
run_even_if_cached: base_job.run_even_if_cached(),
|
||||
params_cache: OnceLock::new(),
|
||||
};
|
||||
job.params(); // fill cache
|
||||
Ok(JobAndDependencies {
|
||||
job: JobAndKind { kind, job },
|
||||
dependencies,
|
||||
})
|
||||
}
|
||||
|
||||
fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||
job.inputs()
|
||||
}
|
||||
|
||||
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||
job.outputs()
|
||||
}
|
||||
|
||||
fn name(self) -> Interned<str> {
|
||||
T::job_kind_name()
|
||||
}
|
||||
|
||||
fn external_command_params(self, job: &Self::Job) -> Option<CommandParams> {
|
||||
Some(job.command_params())
|
||||
}
|
||||
|
||||
fn run(
|
||||
self,
|
||||
job: &Self::Job,
|
||||
inputs: &[JobItem],
|
||||
params: &JobParams,
|
||||
acquired_job: &mut AcquiredJob,
|
||||
) -> eyre::Result<Vec<JobItem>> {
|
||||
assert!(inputs.iter().map(JobItem::name).eq(job.inputs()));
|
||||
let CommandParams {
|
||||
command_line,
|
||||
current_dir,
|
||||
} = job.command_params();
|
||||
ExternalJobCaching::new(
|
||||
&job.output_dir,
|
||||
¶ms.application_name(),
|
||||
&T::job_kind_name(),
|
||||
job.run_even_if_cached,
|
||||
)?
|
||||
.run(
|
||||
command_line,
|
||||
inputs
|
||||
.iter()
|
||||
.flat_map(|item| match item {
|
||||
JobItem::Path { path } => std::slice::from_ref(path),
|
||||
JobItem::DynamicPaths {
|
||||
paths,
|
||||
source_job_name: _,
|
||||
} => paths,
|
||||
})
|
||||
.copied(),
|
||||
job.output_paths(),
|
||||
|mut cmd| {
|
||||
if let Some(current_dir) = current_dir {
|
||||
cmd.current_dir(current_dir);
|
||||
}
|
||||
let status = acquired_job.run_command(cmd, |cmd| cmd.status())?;
|
||||
if !status.success() {
|
||||
bail!("running {command_line:?} failed: {status}")
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
Ok(job
|
||||
.output_paths()
|
||||
.iter()
|
||||
.map(|&path| JobItem::Path { path })
|
||||
.collect())
|
||||
}
|
||||
|
||||
fn subcommand_hidden(self) -> bool {
|
||||
T::subcommand_hidden()
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,81 +2,119 @@
|
|||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
build::{BaseArgs, DynJob, InternalJobTrait, JobItem, JobItemName},
|
||||
build::{
|
||||
BaseJob, BaseJobKind, CommandParams, JobAndDependencies, JobArgs, JobArgsAndDependencies,
|
||||
JobItem, JobItemName, JobKind, JobKindAndDependencies, JobParams,
|
||||
},
|
||||
firrtl::{ExportOptions, FileBackend},
|
||||
intern::{Intern, Interned},
|
||||
util::job_server::AcquiredJob,
|
||||
};
|
||||
use clap::Parser;
|
||||
use std::{borrow::Cow, collections::BTreeMap};
|
||||
use clap::Args;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Parser, Debug, Clone, Hash, PartialEq, Eq)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Debug)]
|
||||
pub struct FirrtlJobKind;
|
||||
|
||||
#[derive(Args, Debug, Clone, Hash, PartialEq, Eq)]
|
||||
#[group(id = "Firrtl")]
|
||||
#[non_exhaustive]
|
||||
pub struct FirrtlArgs {
|
||||
#[command(flatten)]
|
||||
pub base: BaseArgs,
|
||||
#[command(flatten)]
|
||||
pub export_options: ExportOptions,
|
||||
}
|
||||
|
||||
impl FirrtlArgs {
|
||||
impl JobArgs for FirrtlArgs {
|
||||
fn to_args<Args: Extend<Interned<str>> + ?Sized>(&self, args: &mut Args) {
|
||||
let Self { export_options } = self;
|
||||
export_options.to_args(args);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
pub struct Firrtl {
|
||||
base: BaseJob,
|
||||
export_options: ExportOptions,
|
||||
}
|
||||
|
||||
impl Firrtl {
|
||||
fn make_firrtl_file_backend(&self) -> FileBackend {
|
||||
FileBackend {
|
||||
dir_path: self.base.output.path().into(),
|
||||
top_fir_file_stem: self.base.file_stem.clone(),
|
||||
dir_path: PathBuf::from(&*self.base.output_dir()),
|
||||
top_fir_file_stem: Some(String::from(&*self.base.file_stem())),
|
||||
circuit_name: None,
|
||||
}
|
||||
}
|
||||
pub fn firrtl_file(&self) -> String {
|
||||
pub fn firrtl_file(&self) -> Interned<str> {
|
||||
self.base.file_with_ext("fir")
|
||||
}
|
||||
}
|
||||
|
||||
impl InternalJobTrait for FirrtlArgs {
|
||||
fn subcommand_name() -> Interned<str> {
|
||||
"firrtl".intern()
|
||||
impl JobKind for FirrtlJobKind {
|
||||
type Args = FirrtlArgs;
|
||||
type Job = Firrtl;
|
||||
type Dependencies = JobKindAndDependencies<BaseJobKind>;
|
||||
|
||||
fn dependencies(self) -> Self::Dependencies {
|
||||
JobKindAndDependencies::new(BaseJobKind)
|
||||
}
|
||||
|
||||
fn to_args(&self) -> Vec<Interned<str>> {
|
||||
let Self {
|
||||
base,
|
||||
fn args_to_jobs(
|
||||
args: JobArgsAndDependencies<Self>,
|
||||
params: &JobParams,
|
||||
) -> eyre::Result<JobAndDependencies<Self>> {
|
||||
args.args_to_jobs_simple(
|
||||
params,
|
||||
|_kind, FirrtlArgs { export_options }, dependencies| {
|
||||
Ok(Firrtl {
|
||||
base: dependencies.job.job.clone(),
|
||||
export_options,
|
||||
} = self;
|
||||
let mut retval = base.to_args();
|
||||
retval.extend(export_options.to_args());
|
||||
retval
|
||||
}
|
||||
|
||||
fn inputs_and_direct_dependencies<'a>(
|
||||
&'a self,
|
||||
) -> Cow<'a, BTreeMap<JobItemName, Option<DynJob>>> {
|
||||
Cow::Owned(BTreeMap::from_iter([(
|
||||
JobItemName::Module {
|
||||
name: str::intern(&self.base.module_name),
|
||||
})
|
||||
},
|
||||
None,
|
||||
)]))
|
||||
)
|
||||
}
|
||||
|
||||
fn outputs(&self) -> Interned<[JobItemName]> {
|
||||
[JobItemName::File {
|
||||
path: str::intern_owned(self.firrtl_file()),
|
||||
fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||
[JobItemName::Path {
|
||||
path: job.base.output_dir(),
|
||||
}][..]
|
||||
.intern()
|
||||
}
|
||||
|
||||
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||
[JobItemName::Path {
|
||||
path: job.firrtl_file(),
|
||||
}][..]
|
||||
.intern()
|
||||
}
|
||||
|
||||
fn name(self) -> Interned<str> {
|
||||
"firrtl".intern()
|
||||
}
|
||||
|
||||
fn external_command_params(self, _job: &Self::Job) -> Option<CommandParams> {
|
||||
None
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
self,
|
||||
job: &Self::Job,
|
||||
inputs: &[JobItem],
|
||||
params: &JobParams,
|
||||
_acquired_job: &mut AcquiredJob,
|
||||
) -> eyre::Result<Vec<JobItem>> {
|
||||
let [JobItem::Module { value: module }] = inputs else {
|
||||
panic!("wrong inputs, expected a single `Module`");
|
||||
let [JobItem::Path { path: input_path }] = *inputs else {
|
||||
panic!("wrong inputs, expected a single `Path`");
|
||||
};
|
||||
assert_eq!(*module.name(), *self.base.module_name);
|
||||
crate::firrtl::export(self.make_firrtl_file_backend(), module, self.export_options)?;
|
||||
Ok(vec![JobItem::File {
|
||||
path: str::intern_owned(self.firrtl_file()),
|
||||
assert_eq!(input_path, job.base.output_dir());
|
||||
crate::firrtl::export(
|
||||
job.make_firrtl_file_backend(),
|
||||
params.main_module(),
|
||||
job.export_options,
|
||||
)?;
|
||||
Ok(vec![JobItem::Path {
|
||||
path: job.firrtl_file(),
|
||||
}])
|
||||
}
|
||||
}
|
||||
|
|
|
|||
792
crates/fayalite/src/build/graph.rs
Normal file
792
crates/fayalite/src/build/graph.rs
Normal file
|
|
@ -0,0 +1,792 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
build::{DynJob, JobItem, JobItemName, JobParams, program_name_for_internal_jobs},
|
||||
intern::Interned,
|
||||
util::{HashMap, HashSet, job_server::AcquiredJob},
|
||||
};
|
||||
use petgraph::{
|
||||
algo::{DfsSpace, kosaraju_scc, toposort},
|
||||
graph::DiGraph,
|
||||
visit::{GraphBase, Visitable},
|
||||
};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer, de::Error, ser::SerializeSeq};
|
||||
use std::{
|
||||
cell::OnceCell,
|
||||
collections::{BTreeMap, BTreeSet, VecDeque},
|
||||
fmt::{self, Write},
|
||||
panic,
|
||||
rc::Rc,
|
||||
sync::mpsc,
|
||||
thread::{self, ScopedJoinHandle},
|
||||
};
|
||||
|
||||
macro_rules! write_str {
|
||||
($s:expr, $($rest:tt)*) => {
|
||||
write!($s, $($rest)*).expect("String::write_fmt can't fail")
|
||||
};
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
enum JobGraphNode {
|
||||
Job(DynJob),
|
||||
Item {
|
||||
#[allow(dead_code, reason = "name used for debugging")]
|
||||
name: JobItemName,
|
||||
source_job: Option<DynJob>,
|
||||
},
|
||||
}
|
||||
|
||||
type JobGraphInner = DiGraph<JobGraphNode, ()>;
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct JobGraph {
|
||||
jobs: HashMap<DynJob, <JobGraphInner as GraphBase>::NodeId>,
|
||||
items: HashMap<JobItemName, <JobGraphInner as GraphBase>::NodeId>,
|
||||
graph: JobGraphInner,
|
||||
topological_order: Vec<<JobGraphInner as GraphBase>::NodeId>,
|
||||
space: DfsSpace<<JobGraphInner as GraphBase>::NodeId, <JobGraphInner as Visitable>::Map>,
|
||||
}
|
||||
|
||||
impl fmt::Debug for JobGraph {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let Self {
|
||||
jobs: _,
|
||||
items: _,
|
||||
graph,
|
||||
topological_order,
|
||||
space: _,
|
||||
} = self;
|
||||
f.debug_struct("JobGraph")
|
||||
.field("graph", graph)
|
||||
.field("topological_order", topological_order)
|
||||
.finish_non_exhaustive()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum JobGraphError {
|
||||
CycleError {
|
||||
job: DynJob,
|
||||
output: JobItemName,
|
||||
},
|
||||
MultipleJobsCreateSameOutput {
|
||||
output_item: JobItemName,
|
||||
existing_job: DynJob,
|
||||
new_job: DynJob,
|
||||
},
|
||||
}
|
||||
|
||||
impl std::error::Error for JobGraphError {}
|
||||
|
||||
impl fmt::Display for JobGraphError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::CycleError { job, output } => write!(
|
||||
f,
|
||||
"job can't be added to job graph because it would introduce a cyclic dependency through this job output:\n\
|
||||
{output:?}\n\
|
||||
job:\n{job:?}",
|
||||
),
|
||||
JobGraphError::MultipleJobsCreateSameOutput {
|
||||
output_item,
|
||||
existing_job,
|
||||
new_job,
|
||||
} => write!(
|
||||
f,
|
||||
"job can't be added to job graph because the new job has an output that is also produced by an existing job.\n\
|
||||
conflicting output:\n\
|
||||
{output_item:?}\n\
|
||||
existing job:\n\
|
||||
{existing_job:?}\n\
|
||||
new job:\n\
|
||||
{new_job:?}",
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum EscapeForUnixShellState {
|
||||
DollarSingleQuote,
|
||||
SingleQuote,
|
||||
Unquoted,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct EscapeForUnixShell<'a> {
|
||||
state: EscapeForUnixShellState,
|
||||
prefix: [u8; 3],
|
||||
bytes: &'a [u8],
|
||||
}
|
||||
|
||||
impl<'a> fmt::Debug for EscapeForUnixShell<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::Display::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> fmt::Display for EscapeForUnixShell<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
for c in self.clone() {
|
||||
f.write_char(c)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> EscapeForUnixShell<'a> {
|
||||
pub fn new(s: &'a str) -> Self {
|
||||
Self::from_bytes(s.as_bytes())
|
||||
}
|
||||
fn make_prefix(bytes: &[u8]) -> [u8; 3] {
|
||||
let mut prefix = [0; 3];
|
||||
prefix[..bytes.len()].copy_from_slice(bytes);
|
||||
prefix
|
||||
}
|
||||
pub fn from_bytes(bytes: &'a [u8]) -> Self {
|
||||
let mut needs_single_quote = bytes.is_empty();
|
||||
for &b in bytes {
|
||||
match b {
|
||||
b'!' | b'\'' | b'\"' | b' ' => needs_single_quote = true,
|
||||
0..0x20 | 0x7F.. => {
|
||||
return Self {
|
||||
state: EscapeForUnixShellState::DollarSingleQuote,
|
||||
prefix: Self::make_prefix(b"$'"),
|
||||
bytes,
|
||||
};
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
if needs_single_quote {
|
||||
Self {
|
||||
state: EscapeForUnixShellState::SingleQuote,
|
||||
prefix: Self::make_prefix(b"'"),
|
||||
bytes,
|
||||
}
|
||||
} else {
|
||||
Self {
|
||||
state: EscapeForUnixShellState::Unquoted,
|
||||
prefix: Self::make_prefix(b""),
|
||||
bytes,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for EscapeForUnixShell<'_> {
|
||||
type Item = char;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match &mut self.prefix {
|
||||
[0, 0, 0] => {}
|
||||
[0, 0, v] | // find first
|
||||
[0, v, _] | // non-zero byte
|
||||
[v, _, _] => {
|
||||
let retval = *v as char;
|
||||
*v = 0;
|
||||
return Some(retval);
|
||||
}
|
||||
}
|
||||
let Some(&next_byte) = self.bytes.split_off_first() else {
|
||||
return match self.state {
|
||||
EscapeForUnixShellState::DollarSingleQuote
|
||||
| EscapeForUnixShellState::SingleQuote => {
|
||||
self.state = EscapeForUnixShellState::Unquoted;
|
||||
Some('\'')
|
||||
}
|
||||
EscapeForUnixShellState::Unquoted => None,
|
||||
};
|
||||
};
|
||||
match self.state {
|
||||
EscapeForUnixShellState::DollarSingleQuote => match next_byte {
|
||||
b'\'' | b'\\' => {
|
||||
self.prefix = Self::make_prefix(&[next_byte]);
|
||||
Some('\\')
|
||||
}
|
||||
b'\t' => {
|
||||
self.prefix = Self::make_prefix(b"t");
|
||||
Some('\\')
|
||||
}
|
||||
b'\n' => {
|
||||
self.prefix = Self::make_prefix(b"n");
|
||||
Some('\\')
|
||||
}
|
||||
b'\r' => {
|
||||
self.prefix = Self::make_prefix(b"r");
|
||||
Some('\\')
|
||||
}
|
||||
0x20..=0x7E => Some(next_byte as char),
|
||||
_ => {
|
||||
self.prefix = [
|
||||
b'x',
|
||||
char::from_digit(next_byte as u32 >> 4, 0x10).expect("known to be in range")
|
||||
as u8,
|
||||
char::from_digit(next_byte as u32 & 0xF, 0x10)
|
||||
.expect("known to be in range") as u8,
|
||||
];
|
||||
Some('\\')
|
||||
}
|
||||
},
|
||||
EscapeForUnixShellState::SingleQuote => {
|
||||
if next_byte == b'\'' {
|
||||
self.prefix = Self::make_prefix(b"\\''");
|
||||
Some('\'')
|
||||
} else {
|
||||
Some(next_byte as char)
|
||||
}
|
||||
}
|
||||
EscapeForUnixShellState::Unquoted => match next_byte {
|
||||
b' ' | b'!' | b'"' | b'#' | b'$' | b'&' | b'\'' | b'(' | b')' | b'*' | b','
|
||||
| b';' | b'<' | b'>' | b'?' | b'[' | b'\\' | b']' | b'^' | b'`' | b'{' | b'|'
|
||||
| b'}' | b'~' => {
|
||||
self.prefix = Self::make_prefix(&[next_byte]);
|
||||
Some('\\')
|
||||
}
|
||||
_ => Some(next_byte as char),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
#[non_exhaustive]
|
||||
pub enum UnixMakefileEscapeKind {
|
||||
NonRecipe,
|
||||
RecipeWithoutShellEscaping,
|
||||
RecipeWithShellEscaping,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct EscapeForUnixMakefile<'a> {
|
||||
s: &'a str,
|
||||
kind: UnixMakefileEscapeKind,
|
||||
}
|
||||
|
||||
impl<'a> fmt::Debug for EscapeForUnixMakefile<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::Display::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> fmt::Display for EscapeForUnixMakefile<'a> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.do_write(f, fmt::Write::write_str, fmt::Write::write_char, |_, _| {
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> EscapeForUnixMakefile<'a> {
|
||||
fn do_write<S: ?Sized, E>(
|
||||
&self,
|
||||
state: &mut S,
|
||||
write_str: impl Fn(&mut S, &str) -> Result<(), E>,
|
||||
write_char: impl Fn(&mut S, char) -> Result<(), E>,
|
||||
add_variable: impl Fn(&mut S, &'static str) -> Result<(), E>,
|
||||
) -> Result<(), E> {
|
||||
let escape_recipe_char = |c| match c {
|
||||
'$' => write_str(state, "$$"),
|
||||
'\0'..='\x1F' | '\x7F' => {
|
||||
panic!("can't escape a control character for Unix Makefile: {c:?}");
|
||||
}
|
||||
_ => write_char(state, c),
|
||||
};
|
||||
match self.kind {
|
||||
UnixMakefileEscapeKind::NonRecipe => self.s.chars().try_for_each(|c| match c {
|
||||
'=' => {
|
||||
add_variable(state, "EQUALS = =")?;
|
||||
write_str(state, "$(EQUALS)")
|
||||
}
|
||||
';' => panic!("can't escape a semicolon (;) for Unix Makefile"),
|
||||
'$' => write_str(state, "$$"),
|
||||
'\\' | ' ' | '#' | ':' | '%' | '*' | '?' | '[' | ']' | '~' => {
|
||||
write_char(state, '\\')?;
|
||||
write_char(state, c)
|
||||
}
|
||||
'\0'..='\x1F' | '\x7F' => {
|
||||
panic!("can't escape a control character for Unix Makefile: {c:?}");
|
||||
}
|
||||
_ => write_char(state, c),
|
||||
}),
|
||||
UnixMakefileEscapeKind::RecipeWithoutShellEscaping => {
|
||||
self.s.chars().try_for_each(escape_recipe_char)
|
||||
}
|
||||
UnixMakefileEscapeKind::RecipeWithShellEscaping => {
|
||||
EscapeForUnixShell::new(self.s).try_for_each(escape_recipe_char)
|
||||
}
|
||||
}
|
||||
}
|
||||
pub fn new(
|
||||
s: &'a str,
|
||||
kind: UnixMakefileEscapeKind,
|
||||
needed_variables: &mut BTreeSet<&'static str>,
|
||||
) -> Self {
|
||||
let retval = Self { s, kind };
|
||||
let Ok(()) = retval.do_write(
|
||||
needed_variables,
|
||||
|_, _| Ok(()),
|
||||
|_, _| Ok(()),
|
||||
|needed_variables, variable| -> Result<(), std::convert::Infallible> {
|
||||
needed_variables.insert(variable);
|
||||
Ok(())
|
||||
},
|
||||
);
|
||||
retval
|
||||
}
|
||||
}
|
||||
|
||||
impl JobGraph {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
fn try_add_item_node(
|
||||
&mut self,
|
||||
name: JobItemName,
|
||||
new_source_job: Option<DynJob>,
|
||||
new_nodes: &mut HashSet<<JobGraphInner as GraphBase>::NodeId>,
|
||||
) -> Result<<JobGraphInner as GraphBase>::NodeId, JobGraphError> {
|
||||
use hashbrown::hash_map::Entry;
|
||||
match self.items.entry(name) {
|
||||
Entry::Occupied(item_entry) => {
|
||||
let node_id = *item_entry.get();
|
||||
let JobGraphNode::Item {
|
||||
name: _,
|
||||
source_job,
|
||||
} = &mut self.graph[node_id]
|
||||
else {
|
||||
unreachable!("known to be an item");
|
||||
};
|
||||
if let Some(new_source_job) = new_source_job {
|
||||
if let Some(source_job) = source_job {
|
||||
return Err(JobGraphError::MultipleJobsCreateSameOutput {
|
||||
output_item: item_entry.key().clone(),
|
||||
existing_job: source_job.clone(),
|
||||
new_job: new_source_job,
|
||||
});
|
||||
} else {
|
||||
*source_job = Some(new_source_job);
|
||||
}
|
||||
}
|
||||
Ok(node_id)
|
||||
}
|
||||
Entry::Vacant(item_entry) => {
|
||||
let node_id = self.graph.add_node(JobGraphNode::Item {
|
||||
name,
|
||||
source_job: new_source_job,
|
||||
});
|
||||
new_nodes.insert(node_id);
|
||||
item_entry.insert(node_id);
|
||||
Ok(node_id)
|
||||
}
|
||||
}
|
||||
}
|
||||
pub fn try_add_jobs<I: IntoIterator<Item = DynJob>>(
|
||||
&mut self,
|
||||
jobs: I,
|
||||
) -> Result<(), JobGraphError> {
|
||||
use hashbrown::hash_map::Entry;
|
||||
let jobs = jobs.into_iter();
|
||||
struct RemoveNewNodesOnError<'a> {
|
||||
this: &'a mut JobGraph,
|
||||
new_nodes: HashSet<<JobGraphInner as GraphBase>::NodeId>,
|
||||
}
|
||||
impl Drop for RemoveNewNodesOnError<'_> {
|
||||
fn drop(&mut self) {
|
||||
for node in self.new_nodes.drain() {
|
||||
self.this.graph.remove_node(node);
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut remove_new_nodes_on_error = RemoveNewNodesOnError {
|
||||
this: self,
|
||||
new_nodes: HashSet::with_capacity_and_hasher(jobs.size_hint().0, Default::default()),
|
||||
};
|
||||
let new_nodes = &mut remove_new_nodes_on_error.new_nodes;
|
||||
let this = &mut *remove_new_nodes_on_error.this;
|
||||
for job in jobs {
|
||||
let Entry::Vacant(job_entry) = this.jobs.entry(job.clone()) else {
|
||||
continue;
|
||||
};
|
||||
let job_node_id = this
|
||||
.graph
|
||||
.add_node(JobGraphNode::Job(job_entry.key().clone()));
|
||||
new_nodes.insert(job_node_id);
|
||||
job_entry.insert(job_node_id);
|
||||
for name in job.outputs() {
|
||||
let item_node_id = this.try_add_item_node(name, Some(job.clone()), new_nodes)?;
|
||||
this.graph.add_edge(job_node_id, item_node_id, ());
|
||||
}
|
||||
for name in job.inputs() {
|
||||
let item_node_id = this.try_add_item_node(name, None, new_nodes)?;
|
||||
this.graph.add_edge(item_node_id, job_node_id, ());
|
||||
}
|
||||
}
|
||||
match toposort(&this.graph, Some(&mut this.space)) {
|
||||
Ok(v) => {
|
||||
this.topological_order = v;
|
||||
// no need to remove any of the new nodes on drop since we didn't encounter any errors
|
||||
remove_new_nodes_on_error.new_nodes.clear();
|
||||
Ok(())
|
||||
}
|
||||
Err(_) => {
|
||||
// there's at least one cycle, find one!
|
||||
let cycle = kosaraju_scc(&this.graph)
|
||||
.into_iter()
|
||||
.find_map(|scc| {
|
||||
if scc.len() <= 1 {
|
||||
// can't be a cycle since our graph is bipartite --
|
||||
// jobs only connect to items, never jobs to jobs or items to items
|
||||
None
|
||||
} else {
|
||||
Some(scc)
|
||||
}
|
||||
})
|
||||
.expect("we know there's a cycle");
|
||||
let cycle_set = HashSet::from_iter(cycle.iter().copied());
|
||||
let job = cycle
|
||||
.into_iter()
|
||||
.find_map(|node_id| {
|
||||
if let JobGraphNode::Job(job) = &this.graph[node_id] {
|
||||
Some(job.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.expect("a job must be part of the cycle");
|
||||
let output = job
|
||||
.outputs()
|
||||
.into_iter()
|
||||
.find(|output| cycle_set.contains(&this.items[output]))
|
||||
.expect("an output must be part of the cycle");
|
||||
Err(JobGraphError::CycleError { job, output })
|
||||
}
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn add_jobs<I: IntoIterator<Item = DynJob>>(&mut self, jobs: I) {
|
||||
match self.try_add_jobs(jobs) {
|
||||
Ok(()) => {}
|
||||
Err(e) => panic!("error: {e}"),
|
||||
}
|
||||
}
|
||||
pub fn to_unix_makefile(&self) -> String {
|
||||
self.to_unix_makefile_with_internal_program_prefix(&[program_name_for_internal_jobs()])
|
||||
}
|
||||
pub fn to_unix_makefile_with_internal_program_prefix(
|
||||
&self,
|
||||
internal_program_prefix: &[Interned<str>],
|
||||
) -> String {
|
||||
let mut retval = String::new();
|
||||
let mut needed_variables = BTreeSet::new();
|
||||
let mut phony_targets = BTreeSet::new();
|
||||
for &node_id in &self.topological_order {
|
||||
let JobGraphNode::Job(job) = &self.graph[node_id] else {
|
||||
continue;
|
||||
};
|
||||
let outputs = job.outputs();
|
||||
if outputs.is_empty() {
|
||||
retval.push_str(":");
|
||||
} else {
|
||||
for output in job.outputs() {
|
||||
match output {
|
||||
JobItemName::Path { path } => {
|
||||
write_str!(
|
||||
retval,
|
||||
"{} ",
|
||||
EscapeForUnixMakefile::new(
|
||||
&path,
|
||||
UnixMakefileEscapeKind::NonRecipe,
|
||||
&mut needed_variables
|
||||
)
|
||||
);
|
||||
}
|
||||
JobItemName::DynamicPaths { source_job_name } => {
|
||||
write_str!(
|
||||
retval,
|
||||
"{} ",
|
||||
EscapeForUnixMakefile::new(
|
||||
&source_job_name,
|
||||
UnixMakefileEscapeKind::NonRecipe,
|
||||
&mut needed_variables
|
||||
)
|
||||
);
|
||||
phony_targets.insert(Interned::into_inner(source_job_name));
|
||||
}
|
||||
}
|
||||
}
|
||||
if outputs.len() == 1 {
|
||||
retval.push_str(":");
|
||||
} else {
|
||||
retval.push_str("&:");
|
||||
}
|
||||
}
|
||||
for input in job.inputs() {
|
||||
match input {
|
||||
JobItemName::Path { path } => {
|
||||
write_str!(
|
||||
retval,
|
||||
" {}",
|
||||
EscapeForUnixMakefile::new(
|
||||
&path,
|
||||
UnixMakefileEscapeKind::NonRecipe,
|
||||
&mut needed_variables
|
||||
)
|
||||
);
|
||||
}
|
||||
JobItemName::DynamicPaths { source_job_name } => {
|
||||
write_str!(
|
||||
retval,
|
||||
" {}",
|
||||
EscapeForUnixMakefile::new(
|
||||
&source_job_name,
|
||||
UnixMakefileEscapeKind::NonRecipe,
|
||||
&mut needed_variables
|
||||
)
|
||||
);
|
||||
phony_targets.insert(Interned::into_inner(source_job_name));
|
||||
}
|
||||
}
|
||||
}
|
||||
retval.push_str("\n\t");
|
||||
job.command_params_with_internal_program_prefix(internal_program_prefix)
|
||||
.to_unix_shell_line(&mut retval, |arg, output| {
|
||||
write!(
|
||||
output,
|
||||
"{}",
|
||||
EscapeForUnixMakefile::new(
|
||||
arg,
|
||||
UnixMakefileEscapeKind::RecipeWithShellEscaping,
|
||||
&mut needed_variables
|
||||
)
|
||||
)
|
||||
})
|
||||
.expect("writing to String never fails");
|
||||
retval.push_str("\n\n");
|
||||
}
|
||||
if !phony_targets.is_empty() {
|
||||
retval.push_str("\n.PHONY:");
|
||||
for phony_target in phony_targets {
|
||||
write_str!(
|
||||
retval,
|
||||
" {}",
|
||||
EscapeForUnixMakefile::new(
|
||||
phony_target,
|
||||
UnixMakefileEscapeKind::NonRecipe,
|
||||
&mut needed_variables
|
||||
)
|
||||
);
|
||||
}
|
||||
retval.push_str("\n");
|
||||
}
|
||||
if !needed_variables.is_empty() {
|
||||
retval.insert_str(
|
||||
0,
|
||||
&String::from_iter(needed_variables.into_iter().map(|v| format!("{v}\n"))),
|
||||
);
|
||||
}
|
||||
retval
|
||||
}
|
||||
pub fn to_unix_shell_script(&self) -> String {
|
||||
self.to_unix_shell_script_with_internal_program_prefix(&[program_name_for_internal_jobs()])
|
||||
}
|
||||
pub fn to_unix_shell_script_with_internal_program_prefix(
|
||||
&self,
|
||||
internal_program_prefix: &[Interned<str>],
|
||||
) -> String {
|
||||
let mut retval = String::from(
|
||||
"#!/bin/sh\n\
|
||||
set -ex\n",
|
||||
);
|
||||
for &node_id in &self.topological_order {
|
||||
let JobGraphNode::Job(job) = &self.graph[node_id] else {
|
||||
continue;
|
||||
};
|
||||
job.command_params_with_internal_program_prefix(internal_program_prefix)
|
||||
.to_unix_shell_line(&mut retval, |arg, output| {
|
||||
write!(output, "{}", EscapeForUnixShell::new(&arg))
|
||||
})
|
||||
.expect("writing to String never fails");
|
||||
retval.push_str("\n");
|
||||
}
|
||||
retval
|
||||
}
|
||||
pub fn run(&self, params: &JobParams) -> eyre::Result<()> {
|
||||
// use scope to auto-join threads on errors
|
||||
thread::scope(|scope| {
|
||||
struct WaitingJobState {
|
||||
job_node_id: <JobGraphInner as GraphBase>::NodeId,
|
||||
job: DynJob,
|
||||
inputs: BTreeMap<JobItemName, OnceCell<JobItem>>,
|
||||
}
|
||||
let mut ready_jobs = VecDeque::new();
|
||||
let mut item_name_to_waiting_jobs_map = HashMap::<_, Vec<_>>::default();
|
||||
for &node_id in &self.topological_order {
|
||||
let JobGraphNode::Job(job) = &self.graph[node_id] else {
|
||||
continue;
|
||||
};
|
||||
let waiting_job = WaitingJobState {
|
||||
job_node_id: node_id,
|
||||
job: job.clone(),
|
||||
inputs: job
|
||||
.inputs()
|
||||
.iter()
|
||||
.map(|&name| (name, OnceCell::new()))
|
||||
.collect(),
|
||||
};
|
||||
if waiting_job.inputs.is_empty() {
|
||||
ready_jobs.push_back(waiting_job);
|
||||
} else {
|
||||
let waiting_job = Rc::new(waiting_job);
|
||||
for &input_item in waiting_job.inputs.keys() {
|
||||
item_name_to_waiting_jobs_map
|
||||
.entry(input_item)
|
||||
.or_default()
|
||||
.push(waiting_job.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
struct RunningJob<'scope> {
|
||||
job: DynJob,
|
||||
thread: ScopedJoinHandle<'scope, eyre::Result<Vec<JobItem>>>,
|
||||
}
|
||||
let mut running_jobs = HashMap::default();
|
||||
let (finished_jobs_sender, finished_jobs_receiver) = mpsc::channel();
|
||||
loop {
|
||||
while let Some(finished_job) = finished_jobs_receiver.try_recv().ok() {
|
||||
let Some(RunningJob { job, thread }) = running_jobs.remove(&finished_job)
|
||||
else {
|
||||
unreachable!();
|
||||
};
|
||||
let output_items = thread.join().map_err(panic::resume_unwind)??;
|
||||
assert!(
|
||||
output_items.iter().map(JobItem::name).eq(job.outputs()),
|
||||
"job's run() method returned the wrong output items:\n\
|
||||
output items:\n\
|
||||
{output_items:?}\n\
|
||||
expected outputs:\n\
|
||||
{:?}\n\
|
||||
job:\n\
|
||||
{job:?}",
|
||||
job.outputs(),
|
||||
);
|
||||
for output_item in output_items {
|
||||
for waiting_job in item_name_to_waiting_jobs_map
|
||||
.remove(&output_item.name())
|
||||
.unwrap_or_default()
|
||||
{
|
||||
let Ok(()) =
|
||||
waiting_job.inputs[&output_item.name()].set(output_item.clone())
|
||||
else {
|
||||
unreachable!();
|
||||
};
|
||||
if let Some(waiting_job) = Rc::into_inner(waiting_job) {
|
||||
ready_jobs.push_back(waiting_job);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if let Some(WaitingJobState {
|
||||
job_node_id,
|
||||
job,
|
||||
inputs,
|
||||
}) = ready_jobs.pop_front()
|
||||
{
|
||||
struct RunningJobInThread<'a> {
|
||||
job_node_id: <JobGraphInner as GraphBase>::NodeId,
|
||||
job: DynJob,
|
||||
inputs: Vec<JobItem>,
|
||||
params: &'a JobParams,
|
||||
acquired_job: AcquiredJob,
|
||||
finished_jobs_sender: mpsc::Sender<<JobGraphInner as GraphBase>::NodeId>,
|
||||
}
|
||||
impl RunningJobInThread<'_> {
|
||||
fn run(mut self) -> eyre::Result<Vec<JobItem>> {
|
||||
self.job
|
||||
.run(&self.inputs, self.params, &mut self.acquired_job)
|
||||
}
|
||||
}
|
||||
impl Drop for RunningJobInThread<'_> {
|
||||
fn drop(&mut self) {
|
||||
let _ = self.finished_jobs_sender.send(self.job_node_id);
|
||||
}
|
||||
}
|
||||
let name = job.kind().name();
|
||||
let running_job_in_thread = RunningJobInThread {
|
||||
job_node_id,
|
||||
job: job.clone(),
|
||||
inputs: Vec::from_iter(
|
||||
inputs
|
||||
.into_values()
|
||||
.map(|input| input.into_inner().expect("was set earlier")),
|
||||
),
|
||||
params,
|
||||
acquired_job: AcquiredJob::acquire()?,
|
||||
finished_jobs_sender: finished_jobs_sender.clone(),
|
||||
};
|
||||
running_jobs.insert(
|
||||
job_node_id,
|
||||
RunningJob {
|
||||
job,
|
||||
thread: thread::Builder::new()
|
||||
.name(format!("job:{name}"))
|
||||
.spawn_scoped(scope, move || running_job_in_thread.run())
|
||||
.expect("failed to spawn thread for job"),
|
||||
},
|
||||
);
|
||||
}
|
||||
if running_jobs.is_empty() {
|
||||
assert!(item_name_to_waiting_jobs_map.is_empty());
|
||||
assert!(ready_jobs.is_empty());
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Extend<DynJob> for JobGraph {
|
||||
#[track_caller]
|
||||
fn extend<T: IntoIterator<Item = DynJob>>(&mut self, iter: T) {
|
||||
self.add_jobs(iter);
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<DynJob> for JobGraph {
|
||||
#[track_caller]
|
||||
fn from_iter<T: IntoIterator<Item = DynJob>>(iter: T) -> Self {
|
||||
let mut retval = Self::new();
|
||||
retval.add_jobs(iter);
|
||||
retval
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for JobGraph {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut serializer = serializer.serialize_seq(Some(self.jobs.len()))?;
|
||||
for &node_id in &self.topological_order {
|
||||
let JobGraphNode::Job(job) = &self.graph[node_id] else {
|
||||
continue;
|
||||
};
|
||||
serializer.serialize_element(job)?;
|
||||
}
|
||||
serializer.end()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for JobGraph {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let jobs = Vec::<DynJob>::deserialize(deserializer)?;
|
||||
let mut retval = JobGraph::new();
|
||||
retval.try_add_jobs(jobs).map_err(D::Error::custom)?;
|
||||
Ok(retval)
|
||||
}
|
||||
}
|
||||
341
crates/fayalite/src/build/registry.rs
Normal file
341
crates/fayalite/src/build/registry.rs
Normal file
|
|
@ -0,0 +1,341 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
build::{BUILT_IN_JOB_KINDS, DynJobKind, JobKind},
|
||||
intern::Interned,
|
||||
};
|
||||
use std::{
|
||||
borrow::Borrow,
|
||||
cmp::Ordering,
|
||||
collections::BTreeMap,
|
||||
fmt,
|
||||
sync::{Arc, OnceLock, RwLock, RwLockWriteGuard},
|
||||
};
|
||||
|
||||
impl DynJobKind {
|
||||
pub fn registry() -> JobKindRegistrySnapshot {
|
||||
JobKindRegistrySnapshot(JobKindRegistry::get())
|
||||
}
|
||||
#[track_caller]
|
||||
pub fn register(self) {
|
||||
JobKindRegistry::register(JobKindRegistry::lock(), self);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq)]
|
||||
struct InternedStrCompareAsStr(Interned<str>);
|
||||
|
||||
impl fmt::Debug for InternedStrCompareAsStr {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for InternedStrCompareAsStr {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
str::cmp(&self.0, &other.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for InternedStrCompareAsStr {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Borrow<str> for InternedStrCompareAsStr {
|
||||
fn borrow(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct JobKindRegistry {
|
||||
job_kinds: BTreeMap<InternedStrCompareAsStr, DynJobKind>,
|
||||
}
|
||||
|
||||
enum JobKindRegisterError {
|
||||
SameName {
|
||||
name: InternedStrCompareAsStr,
|
||||
old_job_kind: DynJobKind,
|
||||
new_job_kind: DynJobKind,
|
||||
},
|
||||
}
|
||||
|
||||
impl fmt::Display for JobKindRegisterError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::SameName {
|
||||
name,
|
||||
old_job_kind,
|
||||
new_job_kind,
|
||||
} => write!(
|
||||
f,
|
||||
"two different `JobKind` can't share the same name:\n\
|
||||
{name:?}\n\
|
||||
old job kind:\n\
|
||||
{old_job_kind:?}\n\
|
||||
new job kind:\n\
|
||||
{new_job_kind:?}",
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trait JobKindRegistryRegisterLock {
|
||||
type Locked;
|
||||
fn lock(self) -> Self::Locked;
|
||||
fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry;
|
||||
}
|
||||
|
||||
impl JobKindRegistryRegisterLock for &'static RwLock<Arc<JobKindRegistry>> {
|
||||
type Locked = RwLockWriteGuard<'static, Arc<JobKindRegistry>>;
|
||||
fn lock(self) -> Self::Locked {
|
||||
self.write().expect("shouldn't be poisoned")
|
||||
}
|
||||
fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry {
|
||||
Arc::make_mut(locked)
|
||||
}
|
||||
}
|
||||
|
||||
impl JobKindRegistryRegisterLock for &'_ mut JobKindRegistry {
|
||||
type Locked = Self;
|
||||
|
||||
fn lock(self) -> Self::Locked {
|
||||
self
|
||||
}
|
||||
|
||||
fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry {
|
||||
locked
|
||||
}
|
||||
}
|
||||
|
||||
impl JobKindRegistry {
|
||||
fn lock() -> &'static RwLock<Arc<Self>> {
|
||||
static REGISTRY: OnceLock<RwLock<Arc<JobKindRegistry>>> = OnceLock::new();
|
||||
REGISTRY.get_or_init(Default::default)
|
||||
}
|
||||
fn try_register<L: JobKindRegistryRegisterLock>(
|
||||
lock: L,
|
||||
job_kind: DynJobKind,
|
||||
) -> Result<(), JobKindRegisterError> {
|
||||
use std::collections::btree_map::Entry;
|
||||
let name = InternedStrCompareAsStr(job_kind.name());
|
||||
// run user code only outside of lock
|
||||
let mut locked = lock.lock();
|
||||
let this = L::make_mut(&mut locked);
|
||||
let result = match this.job_kinds.entry(name) {
|
||||
Entry::Occupied(entry) => Err(JobKindRegisterError::SameName {
|
||||
name,
|
||||
old_job_kind: entry.get().clone(),
|
||||
new_job_kind: job_kind,
|
||||
}),
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(job_kind);
|
||||
Ok(())
|
||||
}
|
||||
};
|
||||
drop(locked);
|
||||
// outside of lock now, so we can test if it's the same DynJobKind
|
||||
match result {
|
||||
Err(JobKindRegisterError::SameName {
|
||||
name: _,
|
||||
old_job_kind,
|
||||
new_job_kind,
|
||||
}) if old_job_kind == new_job_kind => Ok(()),
|
||||
result => result,
|
||||
}
|
||||
}
|
||||
#[track_caller]
|
||||
fn register<L: JobKindRegistryRegisterLock>(lock: L, job_kind: DynJobKind) {
|
||||
match Self::try_register(lock, job_kind) {
|
||||
Err(e) => panic!("{e}"),
|
||||
Ok(()) => {}
|
||||
}
|
||||
}
|
||||
fn get() -> Arc<Self> {
|
||||
Self::lock().read().expect("shouldn't be poisoned").clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for JobKindRegistry {
|
||||
fn default() -> Self {
|
||||
let mut retval = Self {
|
||||
job_kinds: BTreeMap::new(),
|
||||
};
|
||||
for job_kind in BUILT_IN_JOB_KINDS {
|
||||
Self::register(&mut retval, job_kind());
|
||||
}
|
||||
retval
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct JobKindRegistrySnapshot(Arc<JobKindRegistry>);
|
||||
|
||||
impl JobKindRegistrySnapshot {
|
||||
pub fn get() -> Self {
|
||||
JobKindRegistrySnapshot(JobKindRegistry::get())
|
||||
}
|
||||
pub fn get_by_name<'a>(&'a self, name: &str) -> Option<&'a DynJobKind> {
|
||||
self.0.job_kinds.get(name)
|
||||
}
|
||||
pub fn iter_with_names(&self) -> JobKindRegistryIterWithNames<'_> {
|
||||
JobKindRegistryIterWithNames(self.0.job_kinds.iter())
|
||||
}
|
||||
pub fn iter(&self) -> JobKindRegistryIter<'_> {
|
||||
JobKindRegistryIter(self.0.job_kinds.values())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a JobKindRegistrySnapshot {
|
||||
type Item = &'a DynJobKind;
|
||||
type IntoIter = JobKindRegistryIter<'a>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> IntoIterator for &'a mut JobKindRegistrySnapshot {
|
||||
type Item = &'a DynJobKind;
|
||||
type IntoIter = JobKindRegistryIter<'a>;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.iter()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct JobKindRegistryIter<'a>(
|
||||
std::collections::btree_map::Values<'a, InternedStrCompareAsStr, DynJobKind>,
|
||||
);
|
||||
|
||||
impl<'a> Iterator for JobKindRegistryIter<'a> {
|
||||
type Item = &'a DynJobKind;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.0.next()
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.0.size_hint()
|
||||
}
|
||||
|
||||
fn count(self) -> usize
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self.0.count()
|
||||
}
|
||||
|
||||
fn last(self) -> Option<Self::Item> {
|
||||
self.0.last()
|
||||
}
|
||||
|
||||
fn nth(&mut self, n: usize) -> Option<Self::Item> {
|
||||
self.0.nth(n)
|
||||
}
|
||||
|
||||
fn fold<B, F>(self, init: B, f: F) -> B
|
||||
where
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
self.0.fold(init, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> std::iter::FusedIterator for JobKindRegistryIter<'a> {}
|
||||
|
||||
impl<'a> ExactSizeIterator for JobKindRegistryIter<'a> {}
|
||||
|
||||
impl<'a> DoubleEndedIterator for JobKindRegistryIter<'a> {
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
self.0.next_back()
|
||||
}
|
||||
|
||||
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
|
||||
self.0.nth_back(n)
|
||||
}
|
||||
|
||||
fn rfold<B, F>(self, init: B, f: F) -> B
|
||||
where
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
self.0.rfold(init, f)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct JobKindRegistryIterWithNames<'a>(
|
||||
std::collections::btree_map::Iter<'a, InternedStrCompareAsStr, DynJobKind>,
|
||||
);
|
||||
|
||||
impl<'a> Iterator for JobKindRegistryIterWithNames<'a> {
|
||||
type Item = (Interned<str>, &'a DynJobKind);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.0.next().map(|(name, job_kind)| (name.0, job_kind))
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.0.size_hint()
|
||||
}
|
||||
|
||||
fn count(self) -> usize
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self.0.count()
|
||||
}
|
||||
|
||||
fn last(self) -> Option<Self::Item> {
|
||||
self.0.last().map(|(name, job_kind)| (name.0, job_kind))
|
||||
}
|
||||
|
||||
fn nth(&mut self, n: usize) -> Option<Self::Item> {
|
||||
self.0.nth(n).map(|(name, job_kind)| (name.0, job_kind))
|
||||
}
|
||||
|
||||
fn fold<B, F>(self, init: B, f: F) -> B
|
||||
where
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
self.0
|
||||
.map(|(name, job_kind)| (name.0, job_kind))
|
||||
.fold(init, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> std::iter::FusedIterator for JobKindRegistryIterWithNames<'a> {}
|
||||
|
||||
impl<'a> ExactSizeIterator for JobKindRegistryIterWithNames<'a> {}
|
||||
|
||||
impl<'a> DoubleEndedIterator for JobKindRegistryIterWithNames<'a> {
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
self.0
|
||||
.next_back()
|
||||
.map(|(name, job_kind)| (name.0, job_kind))
|
||||
}
|
||||
|
||||
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
|
||||
self.0
|
||||
.nth_back(n)
|
||||
.map(|(name, job_kind)| (name.0, job_kind))
|
||||
}
|
||||
|
||||
fn rfold<B, F>(self, init: B, f: F) -> B
|
||||
where
|
||||
F: FnMut(B, Self::Item) -> B,
|
||||
{
|
||||
self.0
|
||||
.map(|(name, job_kind)| (name.0, job_kind))
|
||||
.rfold(init, f)
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn register_job_kind<K: JobKind>(kind: K) {
|
||||
DynJobKind::new(kind).register();
|
||||
}
|
||||
371
crates/fayalite/src/build/verilog.rs
Normal file
371
crates/fayalite/src/build/verilog.rs
Normal file
|
|
@ -0,0 +1,371 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use crate::{
|
||||
build::{
|
||||
CommandParams, GetBaseJob, JobAndDependencies, JobArgs, JobArgsAndDependencies,
|
||||
JobDependencies, JobItem, JobItemName, JobKind, JobKindAndDependencies, JobParams,
|
||||
external::{ExternalCommand, ExternalCommandJob, ExternalCommandJobKind},
|
||||
firrtl::FirrtlJobKind,
|
||||
interned_known_utf8_method, interned_known_utf8_path_buf_method,
|
||||
},
|
||||
intern::{Intern, Interned},
|
||||
util::job_server::AcquiredJob,
|
||||
};
|
||||
use clap::Args;
|
||||
use eyre::bail;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{fmt, mem};
|
||||
|
||||
/// based on [LLVM Circt's recommended lowering options][lowering-options]
|
||||
///
|
||||
/// [lowering-options]: https://circt.llvm.org/docs/VerilogGeneration/#recommended-loweringoptions-by-target
|
||||
#[derive(clap::ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[non_exhaustive]
|
||||
pub enum VerilogDialect {
|
||||
Questa,
|
||||
Spyglass,
|
||||
Verilator,
|
||||
Vivado,
|
||||
Yosys,
|
||||
}
|
||||
|
||||
impl fmt::Display for VerilogDialect {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl VerilogDialect {
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
VerilogDialect::Questa => "questa",
|
||||
VerilogDialect::Spyglass => "spyglass",
|
||||
VerilogDialect::Verilator => "verilator",
|
||||
VerilogDialect::Vivado => "vivado",
|
||||
VerilogDialect::Yosys => "yosys",
|
||||
}
|
||||
}
|
||||
pub fn firtool_extra_args(self) -> &'static [&'static str] {
|
||||
match self {
|
||||
VerilogDialect::Questa => &["--lowering-options=emitWireInPorts"],
|
||||
VerilogDialect::Spyglass => {
|
||||
&["--lowering-options=explicitBitcast,disallowExpressionInliningInPorts"]
|
||||
}
|
||||
VerilogDialect::Verilator => &[
|
||||
"--lowering-options=locationInfoStyle=wrapInAtSquareBracket,disallowLocalVariables",
|
||||
],
|
||||
VerilogDialect::Vivado => &["--lowering-options=mitigateVivadoArrayIndexConstPropBug"],
|
||||
VerilogDialect::Yosys => {
|
||||
&["--lowering-options=disallowLocalVariables,disallowPackedArrays"]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Args, Debug, Clone, PartialEq, Eq, Hash)]
|
||||
#[non_exhaustive]
|
||||
pub struct UnadjustedVerilogArgs {
|
||||
#[arg(long = "firtool-extra-arg", value_name = "ARG")]
|
||||
pub firtool_extra_args: Vec<String>,
|
||||
/// adapt the generated Verilog for a particular toolchain
|
||||
#[arg(long)]
|
||||
pub verilog_dialect: Option<VerilogDialect>,
|
||||
#[arg(long)]
|
||||
pub verilog_debug: bool,
|
||||
}
|
||||
|
||||
impl JobArgs for UnadjustedVerilogArgs {
|
||||
fn to_args<Args: Extend<Interned<str>> + ?Sized>(&self, args: &mut Args) {
|
||||
let Self {
|
||||
ref firtool_extra_args,
|
||||
verilog_dialect,
|
||||
verilog_debug,
|
||||
} = *self;
|
||||
args.extend(
|
||||
firtool_extra_args
|
||||
.iter()
|
||||
.map(|arg| str::intern_owned(format!("--firtool-extra-arg={arg}"))),
|
||||
);
|
||||
args.extend(verilog_dialect.map(|v| str::intern_owned(format!("--verilog-dialect={v}"))));
|
||||
if verilog_debug {
|
||||
args.extend(["--verilog-debug".intern()]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, Deserialize, Serialize)]
|
||||
pub struct UnadjustedVerilog {
|
||||
firrtl_file: Interned<str>,
|
||||
firrtl_file_name: Interned<str>,
|
||||
unadjusted_verilog_file: Interned<str>,
|
||||
unadjusted_verilog_file_name: Interned<str>,
|
||||
firtool_extra_args: Interned<[Interned<str>]>,
|
||||
verilog_dialect: Option<VerilogDialect>,
|
||||
verilog_debug: bool,
|
||||
}
|
||||
|
||||
impl UnadjustedVerilog {
|
||||
pub fn firrtl_file(&self) -> Interned<str> {
|
||||
self.firrtl_file
|
||||
}
|
||||
pub fn unadjusted_verilog_file(&self) -> Interned<str> {
|
||||
self.unadjusted_verilog_file
|
||||
}
|
||||
pub fn firtool_extra_args(&self) -> Interned<[Interned<str>]> {
|
||||
self.firtool_extra_args
|
||||
}
|
||||
pub fn verilog_dialect(&self) -> Option<VerilogDialect> {
|
||||
self.verilog_dialect
|
||||
}
|
||||
pub fn verilog_debug(&self) -> bool {
|
||||
self.verilog_debug
|
||||
}
|
||||
}
|
||||
|
||||
impl ExternalCommand for UnadjustedVerilog {
|
||||
type AdditionalArgs = UnadjustedVerilogArgs;
|
||||
type AdditionalJobData = UnadjustedVerilog;
|
||||
type Dependencies = JobKindAndDependencies<FirrtlJobKind>;
|
||||
|
||||
fn dependencies() -> Self::Dependencies {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
fn args_to_jobs(
|
||||
args: JobArgsAndDependencies<ExternalCommandJobKind<Self>>,
|
||||
params: &JobParams,
|
||||
) -> eyre::Result<(
|
||||
Self::AdditionalJobData,
|
||||
<Self::Dependencies as JobDependencies>::JobsAndKinds,
|
||||
)> {
|
||||
args.args_to_jobs_external_simple(params, |args, dependencies| {
|
||||
let UnadjustedVerilogArgs {
|
||||
firtool_extra_args,
|
||||
verilog_dialect,
|
||||
verilog_debug,
|
||||
} = args.additional_args;
|
||||
let unadjusted_verilog_file = dependencies
|
||||
.dependencies
|
||||
.job
|
||||
.job
|
||||
.file_with_ext("unadjusted.v");
|
||||
Ok(UnadjustedVerilog {
|
||||
firrtl_file: dependencies.job.job.firrtl_file(),
|
||||
firrtl_file_name: interned_known_utf8_method(
|
||||
dependencies.job.job.firrtl_file(),
|
||||
|v| v.file_name().expect("known to have file name"),
|
||||
),
|
||||
unadjusted_verilog_file,
|
||||
unadjusted_verilog_file_name: interned_known_utf8_method(
|
||||
unadjusted_verilog_file,
|
||||
|v| v.file_name().expect("known to have file name"),
|
||||
),
|
||||
firtool_extra_args: firtool_extra_args
|
||||
.into_iter()
|
||||
.map(str::intern_owned)
|
||||
.collect(),
|
||||
verilog_dialect,
|
||||
verilog_debug,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn inputs(job: &ExternalCommandJob<Self>) -> Interned<[JobItemName]> {
|
||||
[JobItemName::Path {
|
||||
path: job.additional_job_data().firrtl_file,
|
||||
}][..]
|
||||
.intern()
|
||||
}
|
||||
|
||||
fn output_paths(job: &ExternalCommandJob<Self>) -> Interned<[Interned<str>]> {
|
||||
[job.additional_job_data().unadjusted_verilog_file][..].intern()
|
||||
}
|
||||
|
||||
fn command_line_args(job: &ExternalCommandJob<Self>) -> Interned<[Interned<str>]> {
|
||||
let UnadjustedVerilog {
|
||||
firrtl_file: _,
|
||||
firrtl_file_name,
|
||||
unadjusted_verilog_file: _,
|
||||
unadjusted_verilog_file_name,
|
||||
firtool_extra_args,
|
||||
verilog_dialect,
|
||||
verilog_debug,
|
||||
} = *job.additional_job_data();
|
||||
let mut retval = vec![
|
||||
firrtl_file_name,
|
||||
"-o".intern(),
|
||||
unadjusted_verilog_file_name,
|
||||
];
|
||||
if verilog_debug {
|
||||
retval.push("-g".intern());
|
||||
retval.push("--preserve-values=all".intern());
|
||||
}
|
||||
if let Some(dialect) = verilog_dialect {
|
||||
retval.extend(
|
||||
dialect
|
||||
.firtool_extra_args()
|
||||
.iter()
|
||||
.copied()
|
||||
.map(str::intern),
|
||||
);
|
||||
}
|
||||
retval.extend_from_slice(&firtool_extra_args);
|
||||
Intern::intern_owned(retval)
|
||||
}
|
||||
|
||||
fn current_dir(job: &ExternalCommandJob<Self>) -> Option<Interned<str>> {
|
||||
Some(job.output_dir())
|
||||
}
|
||||
|
||||
fn job_kind_name() -> Interned<str> {
|
||||
"unadjusted-verilog".intern()
|
||||
}
|
||||
|
||||
fn default_program_name() -> Interned<str> {
|
||||
"firtool".intern()
|
||||
}
|
||||
|
||||
fn subcommand_hidden() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn run_even_if_cached_arg_name() -> Interned<str> {
|
||||
"firtool-run-even-if-cached".intern()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct VerilogJobKind;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, Args)]
|
||||
#[non_exhaustive]
|
||||
pub struct VerilogJobArgs {}
|
||||
|
||||
impl JobArgs for VerilogJobArgs {
|
||||
fn to_args<Args: Extend<Interned<str>> + ?Sized>(&self, _args: &mut Args) {
|
||||
let Self {} = self;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
pub struct VerilogJob {
|
||||
output_dir: Interned<str>,
|
||||
unadjusted_verilog_file: Interned<str>,
|
||||
main_verilog_file: Interned<str>,
|
||||
}
|
||||
|
||||
impl VerilogJob {
|
||||
pub fn output_dir(&self) -> Interned<str> {
|
||||
self.output_dir
|
||||
}
|
||||
pub fn unadjusted_verilog_file(&self) -> Interned<str> {
|
||||
self.unadjusted_verilog_file
|
||||
}
|
||||
pub fn main_verilog_file(&self) -> Interned<str> {
|
||||
self.main_verilog_file
|
||||
}
|
||||
}
|
||||
|
||||
impl JobKind for VerilogJobKind {
|
||||
type Args = VerilogJobArgs;
|
||||
type Job = VerilogJob;
|
||||
type Dependencies = JobKindAndDependencies<ExternalCommandJobKind<UnadjustedVerilog>>;
|
||||
|
||||
fn dependencies(self) -> Self::Dependencies {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
fn args_to_jobs(
|
||||
args: JobArgsAndDependencies<Self>,
|
||||
params: &JobParams,
|
||||
) -> eyre::Result<JobAndDependencies<Self>> {
|
||||
args.args_to_jobs_simple(params, |_kind, args, dependencies| {
|
||||
let VerilogJobArgs {} = args;
|
||||
Ok(VerilogJob {
|
||||
output_dir: dependencies.base_job().output_dir(),
|
||||
unadjusted_verilog_file: dependencies
|
||||
.job
|
||||
.job
|
||||
.additional_job_data()
|
||||
.unadjusted_verilog_file(),
|
||||
main_verilog_file: dependencies.base_job().file_with_ext("v"),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||
[JobItemName::Path {
|
||||
path: job.unadjusted_verilog_file,
|
||||
}][..]
|
||||
.intern()
|
||||
}
|
||||
|
||||
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
|
||||
[
|
||||
JobItemName::Path {
|
||||
path: job.main_verilog_file,
|
||||
},
|
||||
JobItemName::DynamicPaths {
|
||||
source_job_name: self.name(),
|
||||
},
|
||||
][..]
|
||||
.intern()
|
||||
}
|
||||
|
||||
fn name(self) -> Interned<str> {
|
||||
"verilog".intern()
|
||||
}
|
||||
|
||||
fn external_command_params(self, _job: &Self::Job) -> Option<CommandParams> {
|
||||
None
|
||||
}
|
||||
|
||||
fn run(
|
||||
self,
|
||||
job: &Self::Job,
|
||||
inputs: &[JobItem],
|
||||
_params: &JobParams,
|
||||
_acquired_job: &mut AcquiredJob,
|
||||
) -> eyre::Result<Vec<JobItem>> {
|
||||
assert!(inputs.iter().map(JobItem::name).eq(self.inputs(job)));
|
||||
let input = std::fs::read_to_string(job.unadjusted_verilog_file())?;
|
||||
let file_separator_prefix = "\n// ----- 8< ----- FILE \"";
|
||||
let file_separator_suffix = "\" ----- 8< -----\n\n";
|
||||
let mut input = &*input;
|
||||
let main_verilog_file = job.main_verilog_file();
|
||||
let mut file_name = Some(main_verilog_file);
|
||||
let mut additional_outputs = Vec::new();
|
||||
loop {
|
||||
let (chunk, next_file_name) = if let Some((chunk, rest)) =
|
||||
input.split_once(file_separator_prefix)
|
||||
{
|
||||
let Some((next_file_name, rest)) = rest.split_once(file_separator_suffix) else {
|
||||
bail!(
|
||||
"parsing firtool's output failed: found {file_separator_prefix:?} but no {file_separator_suffix:?}"
|
||||
);
|
||||
};
|
||||
input = rest;
|
||||
let next_file_name =
|
||||
interned_known_utf8_path_buf_method(job.output_dir, |v| v.join(next_file_name));
|
||||
additional_outputs.push(next_file_name);
|
||||
(chunk, Some(next_file_name))
|
||||
} else {
|
||||
(mem::take(&mut input), None)
|
||||
};
|
||||
let Some(file_name) = mem::replace(&mut file_name, next_file_name) else {
|
||||
break;
|
||||
};
|
||||
std::fs::write(&file_name, chunk)?;
|
||||
}
|
||||
Ok(vec![
|
||||
JobItem::Path {
|
||||
path: main_verilog_file,
|
||||
},
|
||||
JobItem::DynamicPaths {
|
||||
paths: additional_outputs,
|
||||
source_job_name: self.name(),
|
||||
},
|
||||
])
|
||||
}
|
||||
}
|
||||
|
|
@ -50,7 +50,7 @@ impl From<io::Error> for CliError {
|
|||
pub trait RunPhase<Arg> {
|
||||
type Output;
|
||||
fn run(&self, arg: Arg) -> Result<Self::Output> {
|
||||
self.run_with_job(arg, &mut AcquiredJob::acquire())
|
||||
self.run_with_job(arg, &mut AcquiredJob::acquire()?)
|
||||
}
|
||||
fn run_with_job(&self, arg: Arg, acquired_job: &mut AcquiredJob) -> Result<Self::Output>;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ use crate::{
|
|||
DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation,
|
||||
},
|
||||
array::Array,
|
||||
build::JobArgs,
|
||||
bundle::{Bundle, BundleField, BundleType},
|
||||
clock::Clock,
|
||||
enum_::{Enum, EnumType, EnumVariant},
|
||||
|
|
@ -42,7 +43,7 @@ use crate::{
|
|||
use bitvec::slice::BitSlice;
|
||||
use clap::value_parser;
|
||||
use num_traits::Signed;
|
||||
use serde::Serialize;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
cell::{Cell, RefCell},
|
||||
cmp::Ordering,
|
||||
|
|
@ -2749,14 +2750,23 @@ impl clap::builder::TypedValueParser for OptionSimplifyEnumsKindValueParser {
|
|||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct ExportOptionsPrivate(());
|
||||
|
||||
#[derive(clap::Parser, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
impl ExportOptionsPrivate {
|
||||
fn private_new() -> Self {
|
||||
Self(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(clap::Parser, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
pub struct ExportOptions {
|
||||
#[clap(long = "no-simplify-memories", action = clap::ArgAction::SetFalse)]
|
||||
#[serde(default = "ExportOptions::default_simplify_memories")]
|
||||
pub simplify_memories: bool,
|
||||
#[clap(long, value_parser = OptionSimplifyEnumsKindValueParser, default_value = "replace-with-bundle-of-uints")]
|
||||
#[serde(default = "ExportOptions::default_simplify_enums")]
|
||||
pub simplify_enums: std::option::Option<SimplifyEnumsKind>, // use std::option::Option instead of Option to avoid clap mis-parsing
|
||||
#[doc(hidden)]
|
||||
#[clap(skip = ExportOptionsPrivate(()))]
|
||||
#[serde(skip, default = "ExportOptionsPrivate::private_new")]
|
||||
/// `#[non_exhaustive]` except allowing struct update syntax
|
||||
pub __private: ExportOptionsPrivate,
|
||||
}
|
||||
|
|
@ -2767,16 +2777,15 @@ impl fmt::Debug for ExportOptions {
|
|||
}
|
||||
}
|
||||
|
||||
impl ExportOptions {
|
||||
pub fn to_args(&self) -> Vec<Interned<str>> {
|
||||
impl JobArgs for ExportOptions {
|
||||
fn to_args<Args: Extend<Interned<str>> + ?Sized>(&self, args: &mut Args) {
|
||||
let Self {
|
||||
simplify_memories,
|
||||
simplify_enums,
|
||||
__private: ExportOptionsPrivate(()),
|
||||
} = self;
|
||||
let mut retval = Vec::new();
|
||||
if !*simplify_memories {
|
||||
retval.push("--no-simplify-memories".intern());
|
||||
} = *self;
|
||||
if !simplify_memories {
|
||||
args.extend(["--no-simplify-memories".intern()]);
|
||||
}
|
||||
let simplify_enums = simplify_enums.map(|v| {
|
||||
clap::ValueEnum::to_possible_value(&v).expect("there are no skipped variants")
|
||||
|
|
@ -2785,10 +2794,18 @@ impl ExportOptions {
|
|||
None => OptionSimplifyEnumsKindValueParser::NONE_NAME,
|
||||
Some(v) => v.get_name(),
|
||||
};
|
||||
retval.push(str::intern_owned(format!(
|
||||
args.extend([str::intern_owned(format!(
|
||||
"--simplify-enums={simplify_enums}"
|
||||
)));
|
||||
retval
|
||||
))]);
|
||||
}
|
||||
}
|
||||
|
||||
impl ExportOptions {
|
||||
fn default_simplify_memories() -> bool {
|
||||
true
|
||||
}
|
||||
fn default_simplify_enums() -> Option<SimplifyEnumsKind> {
|
||||
Some(SimplifyEnumsKind::ReplaceWithBundleOfUInts)
|
||||
}
|
||||
fn debug_fmt(
|
||||
&self,
|
||||
|
|
@ -2846,8 +2863,8 @@ impl ExportOptions {
|
|||
impl Default for ExportOptions {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
simplify_memories: true,
|
||||
simplify_enums: Some(SimplifyEnumsKind::ReplaceWithBundleOfUInts),
|
||||
simplify_memories: Self::default_simplify_memories(),
|
||||
simplify_enums: Self::default_simplify_enums(),
|
||||
__private: ExportOptionsPrivate(()),
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,11 +9,13 @@ use std::{
|
|||
any::{Any, TypeId},
|
||||
borrow::{Borrow, Cow},
|
||||
cmp::Ordering,
|
||||
ffi::OsStr,
|
||||
fmt,
|
||||
hash::{BuildHasher, Hash, Hasher},
|
||||
iter::FusedIterator,
|
||||
marker::PhantomData,
|
||||
ops::Deref,
|
||||
path::Path,
|
||||
sync::{Mutex, RwLock},
|
||||
};
|
||||
|
||||
|
|
@ -416,6 +418,12 @@ forward_fmt_trait!(Pointer);
|
|||
forward_fmt_trait!(UpperExp);
|
||||
forward_fmt_trait!(UpperHex);
|
||||
|
||||
impl<T: ?Sized + 'static + Send + Sync> AsRef<T> for Interned<T> {
|
||||
fn as_ref(&self) -> &T {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct InternedSliceIter<T: Clone + 'static + Send + Sync> {
|
||||
slice: Interned<[T]>,
|
||||
|
|
@ -485,6 +493,51 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
impl<I> FromIterator<I> for Interned<str>
|
||||
where
|
||||
String: FromIterator<I>,
|
||||
{
|
||||
fn from_iter<T: IntoIterator<Item = I>>(iter: T) -> Self {
|
||||
str::intern_owned(FromIterator::from_iter(iter))
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<OsStr> for Interned<str> {
|
||||
fn as_ref(&self) -> &OsStr {
|
||||
str::as_ref(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<Path> for Interned<str> {
|
||||
fn as_ref(&self) -> &Path {
|
||||
str::as_ref(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Interned<str>> for clap::builder::Str {
|
||||
fn from(value: Interned<str>) -> Self {
|
||||
Interned::into_inner(value).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Interned<str>> for clap::builder::OsStr {
|
||||
fn from(value: Interned<str>) -> Self {
|
||||
Interned::into_inner(value).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Interned<str>> for clap::builder::StyledStr {
|
||||
fn from(value: Interned<str>) -> Self {
|
||||
Interned::into_inner(value).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Interned<str>> for clap::Id {
|
||||
fn from(value: Interned<str>) -> Self {
|
||||
Interned::into_inner(value).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: 'static + Clone + Send + Sync> From<Interned<[T]>> for Vec<T> {
|
||||
fn from(value: Interned<[T]>) -> Self {
|
||||
Vec::from(&*value)
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
use serde::{Deserialize, Serialize};
|
||||
|
||||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
use crate::{
|
||||
|
|
@ -955,12 +957,15 @@ impl Folder for State {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, clap::ValueEnum)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, clap::ValueEnum, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum SimplifyEnumsKind {
|
||||
SimplifyToEnumsWithNoBody,
|
||||
#[clap(name = "replace-with-bundle-of-uints")]
|
||||
#[serde(rename = "replace-with-bundle-of-uints")]
|
||||
ReplaceWithBundleOfUInts,
|
||||
#[clap(name = "replace-with-uint")]
|
||||
#[serde(rename = "replace-with-uint")]
|
||||
ReplaceWithUInt,
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -36,8 +36,11 @@ pub use scoped_ref::ScopedRef;
|
|||
pub(crate) use misc::chain;
|
||||
#[doc(inline)]
|
||||
pub use misc::{
|
||||
BitSliceWriteWithBase, DebugAsDisplay, DebugAsRawString, MakeMutSlice, RcWriter, interned_bit,
|
||||
iter_eq_by, slice_range, try_slice_range,
|
||||
BitSliceWriteWithBase, DebugAsDisplay, DebugAsRawString, MakeMutSlice, RcWriter,
|
||||
SerdeJsonEscapeIf, SerdeJsonEscapeIfFormatter, SerdeJsonEscapeIfTest,
|
||||
SerdeJsonEscapeIfTestResult, interned_bit, iter_eq_by, serialize_to_json_ascii,
|
||||
serialize_to_json_ascii_pretty, serialize_to_json_ascii_pretty_with_indent, slice_range,
|
||||
try_slice_range,
|
||||
};
|
||||
|
||||
pub mod job_server;
|
||||
|
|
|
|||
|
|
@ -1,26 +1,36 @@
|
|||
// SPDX-License-Identifier: LGPL-3.0-or-later
|
||||
// See Notices.txt for copyright information
|
||||
|
||||
use ctor::ctor;
|
||||
use jobslot::{Acquired, Client};
|
||||
use ctor::{ctor, dtor};
|
||||
use jobslot::Client;
|
||||
use std::{
|
||||
ffi::OsString,
|
||||
mem,
|
||||
io, mem,
|
||||
num::NonZeroUsize,
|
||||
sync::{Condvar, Mutex, Once, OnceLock},
|
||||
thread::spawn,
|
||||
sync::{Mutex, MutexGuard},
|
||||
};
|
||||
|
||||
fn get_or_make_client() -> &'static Client {
|
||||
#[ctor]
|
||||
static CLIENT: OnceLock<Client> = unsafe {
|
||||
match Client::from_env() {
|
||||
Some(client) => OnceLock::from(client),
|
||||
None => OnceLock::new(),
|
||||
}
|
||||
};
|
||||
#[ctor]
|
||||
static CLIENT: Mutex<Option<Option<Client>>> = unsafe { Mutex::new(Some(Client::from_env())) };
|
||||
|
||||
CLIENT.get_or_init(|| {
|
||||
#[dtor]
|
||||
fn drop_client() {
|
||||
drop(
|
||||
match CLIENT.lock() {
|
||||
Ok(v) => v,
|
||||
Err(e) => e.into_inner(),
|
||||
}
|
||||
.take(),
|
||||
);
|
||||
}
|
||||
|
||||
fn get_or_make_client() -> Client {
|
||||
CLIENT
|
||||
.lock()
|
||||
.expect("shouldn't have panicked")
|
||||
.as_mut()
|
||||
.expect("shutting down")
|
||||
.get_or_insert_with(|| {
|
||||
let mut available_parallelism = None;
|
||||
let mut args = std::env::args_os().skip(1);
|
||||
while let Some(arg) = args.next() {
|
||||
|
|
@ -52,141 +62,95 @@ fn get_or_make_client() -> &'static Client {
|
|||
} else {
|
||||
NonZeroUsize::new(1).unwrap()
|
||||
};
|
||||
Client::new_with_fifo(available_parallelism.get() - 1).expect("failed to create job server")
|
||||
Client::new_with_fifo(available_parallelism.get() - 1)
|
||||
.expect("failed to create job server")
|
||||
})
|
||||
.clone()
|
||||
}
|
||||
|
||||
struct State {
|
||||
obtained_count: usize,
|
||||
waiting_count: usize,
|
||||
available: Vec<Acquired>,
|
||||
implicit_available: bool,
|
||||
}
|
||||
|
||||
impl State {
|
||||
fn total_available(&self) -> usize {
|
||||
self.available.len() + self.implicit_available as usize
|
||||
}
|
||||
fn additional_waiting(&self) -> usize {
|
||||
self.waiting_count.saturating_sub(self.total_available())
|
||||
}
|
||||
}
|
||||
|
||||
static STATE: Mutex<State> = Mutex::new(State {
|
||||
obtained_count: 0,
|
||||
waiting_count: 0,
|
||||
available: Vec::new(),
|
||||
implicit_available: true,
|
||||
});
|
||||
static COND_VAR: Condvar = Condvar::new();
|
||||
|
||||
#[derive(Debug)]
|
||||
enum AcquiredJobInner {
|
||||
FromJobServer(Acquired),
|
||||
ImplicitJob,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct AcquiredJob {
|
||||
job: AcquiredJobInner,
|
||||
client: Client,
|
||||
}
|
||||
|
||||
impl AcquiredJob {
|
||||
fn start_acquire_thread() {
|
||||
static STARTED_THREAD: Once = Once::new();
|
||||
STARTED_THREAD.call_once(|| {
|
||||
spawn(|| {
|
||||
let mut acquired = None;
|
||||
pub fn acquire() -> io::Result<Self> {
|
||||
let client = get_or_make_client();
|
||||
struct Waiting {}
|
||||
|
||||
impl Waiting {
|
||||
fn done(self) -> MutexGuard<'static, State> {
|
||||
mem::forget(self);
|
||||
let mut state = STATE.lock().unwrap();
|
||||
loop {
|
||||
state = if state.additional_waiting() == 0 {
|
||||
if acquired.is_some() {
|
||||
drop(state);
|
||||
drop(acquired.take()); // drop Acquired outside of lock
|
||||
STATE.lock().unwrap()
|
||||
} else {
|
||||
COND_VAR.wait(state).unwrap()
|
||||
}
|
||||
} else if acquired.is_some() {
|
||||
// allocate space before moving Acquired to ensure we
|
||||
// drop Acquired outside of the lock on panic
|
||||
state.available.reserve(1);
|
||||
state.available.push(acquired.take().unwrap());
|
||||
COND_VAR.notify_all();
|
||||
state.waiting_count -= 1;
|
||||
state
|
||||
} else {
|
||||
drop(state);
|
||||
acquired = Some(
|
||||
client
|
||||
.acquire()
|
||||
.expect("can't acquire token from job server"),
|
||||
);
|
||||
STATE.lock().unwrap()
|
||||
};
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
fn acquire_inner(block: bool) -> Option<Self> {
|
||||
Self::start_acquire_thread();
|
||||
impl Drop for Waiting {
|
||||
fn drop(&mut self) {
|
||||
STATE.lock().unwrap().waiting_count -= 1;
|
||||
}
|
||||
}
|
||||
let mut state = STATE.lock().unwrap();
|
||||
loop {
|
||||
if let Some(acquired) = state.available.pop() {
|
||||
return Some(Self {
|
||||
job: AcquiredJobInner::FromJobServer(acquired),
|
||||
});
|
||||
}
|
||||
if state.implicit_available {
|
||||
state.implicit_available = false;
|
||||
return Some(Self {
|
||||
job: AcquiredJobInner::ImplicitJob,
|
||||
});
|
||||
}
|
||||
if !block {
|
||||
return None;
|
||||
if state.obtained_count == 0 && state.waiting_count == 0 {
|
||||
state.obtained_count = 1; // get implicit token
|
||||
return Ok(Self { client });
|
||||
}
|
||||
state.waiting_count += 1;
|
||||
state = COND_VAR.wait(state).unwrap();
|
||||
state.waiting_count -= 1;
|
||||
}
|
||||
}
|
||||
pub fn try_acquire() -> Option<Self> {
|
||||
Self::acquire_inner(false)
|
||||
}
|
||||
pub fn acquire() -> Self {
|
||||
Self::acquire_inner(true).expect("failed to acquire token")
|
||||
drop(state);
|
||||
let waiting = Waiting {};
|
||||
client.acquire_raw()?;
|
||||
state = waiting.done();
|
||||
state.obtained_count = state
|
||||
.obtained_count
|
||||
.checked_add(1)
|
||||
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "obtained_count overflowed"))?;
|
||||
drop(state);
|
||||
Ok(Self { client })
|
||||
}
|
||||
pub fn run_command<R>(
|
||||
&mut self,
|
||||
cmd: std::process::Command,
|
||||
f: impl FnOnce(&mut std::process::Command) -> std::io::Result<R>,
|
||||
) -> std::io::Result<R> {
|
||||
get_or_make_client().configure_make_and_run_with_fifo(cmd, f)
|
||||
self.client.configure_make_and_run_with_fifo(cmd, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for AcquiredJob {
|
||||
fn drop(&mut self) {
|
||||
let mut state = STATE.lock().unwrap();
|
||||
match &self.job {
|
||||
AcquiredJobInner::FromJobServer(_) => {
|
||||
if state.waiting_count > state.available.len() + state.implicit_available as usize {
|
||||
// allocate space before moving Acquired to ensure we
|
||||
// drop Acquired outside of the lock on panic
|
||||
state.available.reserve(1);
|
||||
let AcquiredJobInner::FromJobServer(acquired) =
|
||||
mem::replace(&mut self.job, AcquiredJobInner::ImplicitJob)
|
||||
else {
|
||||
unreachable!()
|
||||
};
|
||||
state.available.push(acquired);
|
||||
COND_VAR.notify_all();
|
||||
match &mut *state {
|
||||
State {
|
||||
obtained_count: 0, ..
|
||||
} => unreachable!(),
|
||||
State {
|
||||
obtained_count: obtained_count @ 1,
|
||||
waiting_count,
|
||||
} => {
|
||||
*obtained_count = 0; // drop implicit token
|
||||
let any_waiting = *waiting_count != 0;
|
||||
drop(state);
|
||||
if any_waiting {
|
||||
// we have the implicit token, but some other thread is trying to acquire a token,
|
||||
// release the implicit token so they can acquire it.
|
||||
let _ = self.client.release_raw(); // we're in drop, just ignore errors since we at least tried
|
||||
}
|
||||
}
|
||||
AcquiredJobInner::ImplicitJob => {
|
||||
state.implicit_available = true;
|
||||
if state.waiting_count > state.available.len() {
|
||||
COND_VAR.notify_all();
|
||||
}
|
||||
State { obtained_count, .. } => {
|
||||
*obtained_count = obtained_count.saturating_sub(1);
|
||||
drop(state);
|
||||
let _ = self.client.release_raw(); // we're in drop, just ignore errors since we at least tried
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ use bitvec::{bits, order::Lsb0, slice::BitSlice, view::BitView};
|
|||
use std::{
|
||||
cell::Cell,
|
||||
fmt::{self, Debug, Write},
|
||||
io,
|
||||
ops::{Bound, Range, RangeBounds},
|
||||
rc::Rc,
|
||||
sync::{Arc, OnceLock},
|
||||
|
|
@ -243,3 +244,323 @@ pub fn try_slice_range<R: RangeBounds<usize>>(range: R, size: usize) -> Option<R
|
|||
pub fn slice_range<R: RangeBounds<usize>>(range: R, size: usize) -> Range<usize> {
|
||||
try_slice_range(range, size).expect("range out of bounds")
|
||||
}
|
||||
|
||||
pub trait SerdeJsonEscapeIfTest {
|
||||
fn char_needs_escape(&mut self, ch: char) -> serde_json::Result<bool>;
|
||||
}
|
||||
|
||||
pub trait SerdeJsonEscapeIfTestResult {
|
||||
fn to_result(self) -> serde_json::Result<bool>;
|
||||
}
|
||||
|
||||
impl SerdeJsonEscapeIfTestResult for bool {
|
||||
fn to_result(self) -> serde_json::Result<bool> {
|
||||
Ok(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: Into<serde_json::Error>> SerdeJsonEscapeIfTestResult for Result<bool, E> {
|
||||
fn to_result(self) -> serde_json::Result<bool> {
|
||||
self.map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized + FnMut(char) -> R, R: SerdeJsonEscapeIfTestResult> SerdeJsonEscapeIfTest for T {
|
||||
fn char_needs_escape(&mut self, ch: char) -> serde_json::Result<bool> {
|
||||
self(ch).to_result()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait SerdeJsonEscapeIfFormatter: serde_json::ser::Formatter {
|
||||
fn write_unicode_escape<W>(&mut self, writer: &mut W, ch: char) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
for utf16 in ch.encode_utf16(&mut [0; 2]) {
|
||||
write!(writer, "\\u{utf16:04x}")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl SerdeJsonEscapeIfFormatter for serde_json::ser::CompactFormatter {}
|
||||
impl SerdeJsonEscapeIfFormatter for serde_json::ser::PrettyFormatter<'_> {}
|
||||
|
||||
pub struct SerdeJsonEscapeIf<Test, Base = serde_json::ser::CompactFormatter> {
|
||||
pub base: Base,
|
||||
pub test: Test,
|
||||
}
|
||||
|
||||
impl<Test: SerdeJsonEscapeIfTest, Base: SerdeJsonEscapeIfFormatter> serde_json::ser::Formatter
|
||||
for SerdeJsonEscapeIf<Test, Base>
|
||||
{
|
||||
fn write_null<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_null(writer)
|
||||
}
|
||||
|
||||
fn write_bool<W>(&mut self, writer: &mut W, value: bool) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_bool(writer, value)
|
||||
}
|
||||
|
||||
fn write_i8<W>(&mut self, writer: &mut W, value: i8) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_i8(writer, value)
|
||||
}
|
||||
|
||||
fn write_i16<W>(&mut self, writer: &mut W, value: i16) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_i16(writer, value)
|
||||
}
|
||||
|
||||
fn write_i32<W>(&mut self, writer: &mut W, value: i32) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_i32(writer, value)
|
||||
}
|
||||
|
||||
fn write_i64<W>(&mut self, writer: &mut W, value: i64) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_i64(writer, value)
|
||||
}
|
||||
|
||||
fn write_i128<W>(&mut self, writer: &mut W, value: i128) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_i128(writer, value)
|
||||
}
|
||||
|
||||
fn write_u8<W>(&mut self, writer: &mut W, value: u8) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_u8(writer, value)
|
||||
}
|
||||
|
||||
fn write_u16<W>(&mut self, writer: &mut W, value: u16) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_u16(writer, value)
|
||||
}
|
||||
|
||||
fn write_u32<W>(&mut self, writer: &mut W, value: u32) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_u32(writer, value)
|
||||
}
|
||||
|
||||
fn write_u64<W>(&mut self, writer: &mut W, value: u64) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_u64(writer, value)
|
||||
}
|
||||
|
||||
fn write_u128<W>(&mut self, writer: &mut W, value: u128) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_u128(writer, value)
|
||||
}
|
||||
|
||||
fn write_f32<W>(&mut self, writer: &mut W, value: f32) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_f32(writer, value)
|
||||
}
|
||||
|
||||
fn write_f64<W>(&mut self, writer: &mut W, value: f64) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_f64(writer, value)
|
||||
}
|
||||
|
||||
fn write_number_str<W>(&mut self, writer: &mut W, value: &str) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_number_str(writer, value)
|
||||
}
|
||||
|
||||
fn begin_string<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.begin_string(writer)
|
||||
}
|
||||
|
||||
fn end_string<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.end_string(writer)
|
||||
}
|
||||
|
||||
fn write_string_fragment<W>(&mut self, writer: &mut W, mut fragment: &str) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
while let Some((next_escape_index, next_escape_char)) = fragment
|
||||
.char_indices()
|
||||
.find_map(|(index, ch)| match self.test.char_needs_escape(ch) {
|
||||
Ok(false) => None,
|
||||
Ok(true) => Some(Ok((index, ch))),
|
||||
Err(e) => Some(Err(e)),
|
||||
})
|
||||
.transpose()?
|
||||
{
|
||||
let (no_escapes, rest) = fragment.split_at(next_escape_index);
|
||||
fragment = &rest[next_escape_char.len_utf8()..];
|
||||
self.base.write_string_fragment(writer, no_escapes)?;
|
||||
self.base.write_unicode_escape(writer, next_escape_char)?;
|
||||
}
|
||||
self.base.write_string_fragment(writer, fragment)
|
||||
}
|
||||
|
||||
fn write_char_escape<W>(
|
||||
&mut self,
|
||||
writer: &mut W,
|
||||
char_escape: serde_json::ser::CharEscape,
|
||||
) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_char_escape(writer, char_escape)
|
||||
}
|
||||
|
||||
fn write_byte_array<W>(&mut self, writer: &mut W, value: &[u8]) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_byte_array(writer, value)
|
||||
}
|
||||
|
||||
fn begin_array<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.begin_array(writer)
|
||||
}
|
||||
|
||||
fn end_array<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.end_array(writer)
|
||||
}
|
||||
|
||||
fn begin_array_value<W>(&mut self, writer: &mut W, first: bool) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.begin_array_value(writer, first)
|
||||
}
|
||||
|
||||
fn end_array_value<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.end_array_value(writer)
|
||||
}
|
||||
|
||||
fn begin_object<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.begin_object(writer)
|
||||
}
|
||||
|
||||
fn end_object<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.end_object(writer)
|
||||
}
|
||||
|
||||
fn begin_object_key<W>(&mut self, writer: &mut W, first: bool) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.begin_object_key(writer, first)
|
||||
}
|
||||
|
||||
fn end_object_key<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.end_object_key(writer)
|
||||
}
|
||||
|
||||
fn begin_object_value<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.begin_object_value(writer)
|
||||
}
|
||||
|
||||
fn end_object_value<W>(&mut self, writer: &mut W) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.end_object_value(writer)
|
||||
}
|
||||
|
||||
fn write_raw_fragment<W>(&mut self, writer: &mut W, fragment: &str) -> io::Result<()>
|
||||
where
|
||||
W: ?Sized + io::Write,
|
||||
{
|
||||
self.base.write_raw_fragment(writer, fragment)
|
||||
}
|
||||
}
|
||||
|
||||
fn serialize_to_json_ascii_helper<F: SerdeJsonEscapeIfFormatter, S: serde::Serialize + ?Sized>(
|
||||
v: &S,
|
||||
base: F,
|
||||
) -> serde_json::Result<String> {
|
||||
let mut retval = Vec::new();
|
||||
v.serialize(&mut serde_json::ser::Serializer::with_formatter(
|
||||
&mut retval,
|
||||
SerdeJsonEscapeIf {
|
||||
base,
|
||||
test: |ch| ch < '\x20' || ch > '\x7F',
|
||||
},
|
||||
))?;
|
||||
String::from_utf8(retval).map_err(|_| serde::ser::Error::custom("invalid UTF-8"))
|
||||
}
|
||||
|
||||
pub fn serialize_to_json_ascii<T: serde::Serialize + ?Sized>(v: &T) -> serde_json::Result<String> {
|
||||
serialize_to_json_ascii_helper(v, serde_json::ser::CompactFormatter)
|
||||
}
|
||||
|
||||
pub fn serialize_to_json_ascii_pretty<T: serde::Serialize + ?Sized>(
|
||||
v: &T,
|
||||
) -> serde_json::Result<String> {
|
||||
serialize_to_json_ascii_helper(v, serde_json::ser::PrettyFormatter::new())
|
||||
}
|
||||
|
||||
pub fn serialize_to_json_ascii_pretty_with_indent<T: serde::Serialize + ?Sized>(
|
||||
v: &T,
|
||||
indent: &str,
|
||||
) -> serde_json::Result<String> {
|
||||
serialize_to_json_ascii_helper(
|
||||
v,
|
||||
serde_json::ser::PrettyFormatter::with_indent(indent.as_bytes()),
|
||||
)
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue