1
0
Fork 0

WIP adding FPGA support

This commit is contained in:
Jacob Lifshay 2025-09-16 01:40:42 -07:00
parent c06ef56482
commit 32f713f849
Signed by: programmerjake
SSH key fingerprint: SHA256:HnFTLGpSm4Q4Fj502oCFisjZSoakwEuTsJJMSke63RQ
3 changed files with 1021 additions and 0 deletions

View file

@ -0,0 +1,817 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
bundle::Bundle,
intern::Interned,
module::Module,
util::{HashMap, HashSet, job_server::AcquiredJob},
};
use hashbrown::hash_map::Entry;
use petgraph::{
algo::{DfsSpace, kosaraju_scc, toposort},
graph::DiGraph,
visit::{GraphBase, Visitable},
};
use serde::{Deserialize, Deserializer, Serialize, Serializer, de::Error, ser::SerializeSeq};
use std::{
any::{Any, TypeId},
collections::BTreeSet,
fmt::{self, Write},
hash::{Hash, Hasher},
iter, mem,
sync::Arc,
};
#[derive(Clone, Hash, PartialEq, Eq, Debug)]
pub enum JobItem {
Module { value: Module<Bundle> },
File { path: Interned<str> },
}
impl JobItem {
pub fn name(&self) -> JobItemName {
match self {
JobItem::Module { value } => JobItemName::Module { name: value.name() },
&JobItem::File { path } => JobItemName::File { path },
}
}
}
#[derive(Copy, Clone, Hash, PartialEq, Eq, Debug)]
pub enum JobItemName {
Module { name: Interned<str> },
File { path: Interned<str> },
}
pub struct CommandLine {}
pub trait JobKind: 'static + Send + Sync + Hash + Eq + fmt::Debug {
type Job: 'static + Send + Sync + Hash + Eq + fmt::Debug;
fn inputs(&self, job: &Self::Job) -> Interned<[JobItemName]>;
fn outputs(&self, job: &Self::Job) -> Interned<[JobItemName]>;
fn to_command_line(&self, job: &Self::Job) -> Interned<[Interned<str>]>;
fn parse_command_line(
&self,
command_line: Interned<[Interned<str>]>,
) -> clap::error::Result<Self::Job>;
fn run(
&self,
job: &Self::Job,
inputs: &[JobItem],
acquired_job: &mut AcquiredJob,
) -> eyre::Result<Vec<JobItem>>;
}
trait DynJobKindTrait: 'static + Send + Sync + fmt::Debug {
fn as_any(&self) -> &dyn Any;
fn as_arc_any(self: Arc<Self>) -> Arc<dyn Any + Send + Sync>;
fn eq_dyn(&self, other: &dyn DynJobKindTrait) -> bool;
fn hash_dyn(&self, state: &mut dyn Hasher);
fn parse_command_line_dyn(
self: Arc<Self>,
command_line: Interned<[Interned<str>]>,
) -> clap::error::Result<DynJob>;
}
impl<T: JobKind> DynJobKindTrait for T {
fn as_any(&self) -> &dyn Any {
self
}
fn as_arc_any(self: Arc<Self>) -> Arc<dyn Any + Send + Sync> {
self
}
fn eq_dyn(&self, other: &dyn DynJobKindTrait) -> bool {
other
.as_any()
.downcast_ref::<T>()
.is_some_and(|other| self == other)
}
fn hash_dyn(&self, mut state: &mut dyn Hasher) {
self.hash(&mut state)
}
fn parse_command_line_dyn(
self: std::sync::Arc<T>,
command_line: Interned<[Interned<str>]>,
) -> clap::error::Result<DynJob> {
let job = self.parse_command_line(command_line)?;
let inputs = self.inputs(&job);
let outputs = self.outputs(&job);
Ok(DynJob(Arc::new(inner::DynJob {
kind: self,
job,
inputs,
outputs,
})))
}
}
#[derive(Clone)]
pub struct DynJobKind(Arc<dyn DynJobKindTrait>);
impl DynJobKind {
pub fn from_arc<T: JobKind>(job_kind: Arc<T>) -> Self {
if TypeId::of::<T>() == TypeId::of::<Self>() {
Self::clone(
&Arc::downcast::<Self>(job_kind.as_arc_any())
.ok()
.expect("already checked type"),
)
} else {
Self(job_kind)
}
}
pub fn new<T: JobKind>(job_kind: T) -> Self {
if let Some(job_kind) = DynJobKindTrait::as_any(&job_kind).downcast_ref::<Self>() {
job_kind.clone()
} else {
Self(Arc::new(job_kind))
}
}
pub fn type_id(&self) -> TypeId {
DynJobKindTrait::as_any(&*self.0).type_id()
}
pub fn downcast_ref<T: JobKind>(&self) -> Option<&T> {
DynJobKindTrait::as_any(&*self.0).downcast_ref()
}
pub fn downcast_arc<T: JobKind>(self) -> Result<Arc<T>, Self> {
if self.downcast_ref::<T>().is_some() {
Ok(Arc::downcast::<T>(self.0.as_arc_any())
.ok()
.expect("already checked type"))
} else {
Err(self)
}
}
pub fn parse_command_line(
&self,
command_line: Interned<[Interned<str>]>,
) -> clap::error::Result<DynJob> {
self.0.clone().parse_command_line_dyn(command_line)
}
}
impl Hash for DynJobKind {
fn hash<H: Hasher>(&self, state: &mut H) {
DynJobKindTrait::as_any(&*self.0).type_id().hash(state);
DynJobKindTrait::hash_dyn(&*self.0, state);
}
}
impl PartialEq for DynJobKind {
fn eq(&self, other: &Self) -> bool {
DynJobKindTrait::eq_dyn(&*self.0, &*other.0)
}
}
impl Eq for DynJobKind {}
impl fmt::Debug for DynJobKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
trait DynJobTrait: 'static + Send + Sync + fmt::Debug {
fn as_any(&self) -> &dyn Any;
fn eq_dyn(&self, other: &dyn DynJobTrait) -> bool;
fn hash_dyn(&self, state: &mut dyn Hasher);
fn kind_type_id(&self) -> TypeId;
fn kind(&self) -> DynJobKind;
fn inputs(&self) -> Interned<[JobItemName]>;
fn outputs(&self) -> Interned<[JobItemName]>;
fn to_command_line(&self) -> Interned<[Interned<str>]>;
fn run(&self, inputs: &[JobItem], acquired_job: &mut AcquiredJob)
-> eyre::Result<Vec<JobItem>>;
}
mod inner {
use super::*;
#[derive(Debug, PartialEq, Eq, Hash)]
pub(crate) struct DynJob<T: JobKind> {
pub(crate) kind: Arc<T>,
pub(crate) job: T::Job,
pub(crate) inputs: Interned<[JobItemName]>,
pub(crate) outputs: Interned<[JobItemName]>,
}
}
impl<T: JobKind> DynJobTrait for inner::DynJob<T> {
fn as_any(&self) -> &dyn Any {
self
}
fn eq_dyn(&self, other: &dyn DynJobTrait) -> bool {
other
.as_any()
.downcast_ref::<inner::DynJob<T>>()
.is_some_and(|other| self == other)
}
fn hash_dyn(&self, mut state: &mut dyn Hasher) {
self.hash(&mut state);
}
fn kind_type_id(&self) -> TypeId {
TypeId::of::<T>()
}
fn kind(&self) -> DynJobKind {
DynJobKind(self.kind.clone())
}
fn inputs(&self) -> Interned<[JobItemName]> {
self.inputs
}
fn outputs(&self) -> Interned<[JobItemName]> {
self.outputs
}
fn to_command_line(&self) -> Interned<[Interned<str>]> {
self.kind.to_command_line(&self.job)
}
fn run(
&self,
inputs: &[JobItem],
acquired_job: &mut AcquiredJob,
) -> eyre::Result<Vec<JobItem>> {
self.kind.run(&self.job, inputs, acquired_job)
}
}
#[derive(Clone, Debug)]
pub struct DynJob(Arc<dyn DynJobTrait>);
impl DynJob {
pub fn kind_type_id(&self) -> TypeId {
self.0.kind_type_id()
}
pub fn downcast<T: JobKind>(&self) -> Option<(&T, &T::Job)> {
let inner::DynJob { kind, job, .. } = self.0.as_any().downcast_ref()?;
Some((kind, job))
}
pub fn kind(&self) -> DynJobKind {
DynJobTrait::kind(&*self.0)
}
pub fn inputs(&self) -> Interned<[JobItemName]> {
DynJobTrait::inputs(&*self.0)
}
pub fn outputs(&self) -> Interned<[JobItemName]> {
DynJobTrait::outputs(&*self.0)
}
pub fn to_command_line(&self) -> Interned<[Interned<str>]> {
DynJobTrait::to_command_line(&*self.0)
}
pub fn run(
&self,
inputs: &[JobItem],
acquired_job: &mut AcquiredJob,
) -> eyre::Result<Vec<JobItem>> {
DynJobTrait::run(&*self.0, inputs, acquired_job)
}
}
impl Eq for DynJob {}
impl PartialEq for DynJob {
fn eq(&self, other: &Self) -> bool {
DynJobTrait::eq_dyn(&*self.0, &*other.0)
}
}
impl Hash for DynJob {
fn hash<H: Hasher>(&self, state: &mut H) {
DynJobTrait::hash_dyn(&*self.0, state);
}
}
#[derive(Serialize, Deserialize)]
#[serde(rename = "DynJob")]
struct DynJobSerde {
kind: DynJobKind,
command_line: Interned<[Interned<str>]>,
}
impl Serialize for DynJob {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
DynJobSerde {
kind: self.kind(),
command_line: self.to_command_line(),
}
.serialize(serializer)
}
}
impl<'de> Deserialize<'de> for DynJob {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let DynJobSerde { kind, command_line } = Deserialize::deserialize(deserializer)?;
kind.parse_command_line(command_line)
.map_err(D::Error::custom)
}
}
#[derive(Clone, Debug)]
enum JobGraphNode {
Job(DynJob),
Item(#[allow(dead_code, reason = "used for Debug")] JobItemName),
}
type JobGraphInner = DiGraph<JobGraphNode, ()>;
#[derive(Clone, Default)]
pub struct JobGraph {
jobs: HashMap<DynJob, <JobGraphInner as GraphBase>::NodeId>,
items: HashMap<JobItemName, <JobGraphInner as GraphBase>::NodeId>,
graph: JobGraphInner,
topological_order: Vec<<JobGraphInner as GraphBase>::NodeId>,
space: DfsSpace<<JobGraphInner as GraphBase>::NodeId, <JobGraphInner as Visitable>::Map>,
}
impl fmt::Debug for JobGraph {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Self {
jobs: _,
items: _,
graph,
topological_order,
space: _,
} = self;
f.debug_struct("JobGraph")
.field("graph", graph)
.field("topological_order", topological_order)
.finish_non_exhaustive()
}
}
#[derive(Clone, Debug)]
pub struct CycleError {
pub job: DynJob,
pub output: JobItemName,
}
impl std::error::Error for CycleError {}
impl fmt::Display for CycleError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Self { job, output } = self;
write!(
f,
"adding job to job graph would introduce a cyclic dependency through job output:\n\
{output:?}\n\
job:\n{job:?}",
)
}
}
#[derive(Copy, Clone, Debug)]
enum EscapeForUnixShellState {
DollarSingleQuote,
SingleQuote,
Unquoted,
}
#[derive(Clone)]
pub struct EscapeForUnixShell<'a> {
state: EscapeForUnixShellState,
prefix: [u8; 3],
bytes: &'a [u8],
}
impl<'a> fmt::Debug for EscapeForUnixShell<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(self, f)
}
}
impl<'a> fmt::Display for EscapeForUnixShell<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
for c in self.clone() {
f.write_char(c)?;
}
Ok(())
}
}
impl<'a> EscapeForUnixShell<'a> {
pub fn new(s: &'a str) -> Self {
Self::from_bytes(s.as_bytes())
}
fn make_prefix(bytes: &[u8]) -> [u8; 3] {
let mut prefix = [0; 3];
prefix[..bytes.len()].copy_from_slice(bytes);
prefix
}
pub fn from_bytes(bytes: &'a [u8]) -> Self {
let mut needs_single_quote = bytes.is_empty();
for &b in bytes {
match b {
b'!' | b'\'' | b'\"' | b' ' => needs_single_quote = true,
0..0x20 | 0x7F.. => {
return Self {
state: EscapeForUnixShellState::DollarSingleQuote,
prefix: Self::make_prefix(b"$'"),
bytes,
};
}
_ => {}
}
}
if needs_single_quote {
Self {
state: EscapeForUnixShellState::SingleQuote,
prefix: Self::make_prefix(b"'"),
bytes,
}
} else {
Self {
state: EscapeForUnixShellState::Unquoted,
prefix: Self::make_prefix(b""),
bytes,
}
}
}
}
impl Iterator for EscapeForUnixShell<'_> {
type Item = char;
fn next(&mut self) -> Option<Self::Item> {
match &mut self.prefix {
[0, 0, 0] => {}
[0, 0, v] | // find first
[0, v, _] | // non-zero byte
[v, _, _] => {
let retval = *v as char;
*v = 0;
return Some(retval);
}
}
let Some(&next_byte) = self.bytes.split_off_first() else {
return match self.state {
EscapeForUnixShellState::DollarSingleQuote
| EscapeForUnixShellState::SingleQuote => {
self.state = EscapeForUnixShellState::Unquoted;
Some('\'')
}
EscapeForUnixShellState::Unquoted => None,
};
};
match self.state {
EscapeForUnixShellState::DollarSingleQuote => match next_byte {
b'\'' | b'\\' => {
self.prefix = Self::make_prefix(&[next_byte]);
Some('\\')
}
b'\t' => {
self.prefix = Self::make_prefix(b"t");
Some('\\')
}
b'\n' => {
self.prefix = Self::make_prefix(b"n");
Some('\\')
}
b'\r' => {
self.prefix = Self::make_prefix(b"r");
Some('\\')
}
0x20..=0x7E => Some(next_byte as char),
_ => {
self.prefix = [
b'x',
char::from_digit(next_byte as u32 >> 4, 0x10).expect("known to be in range")
as u8,
char::from_digit(next_byte as u32 & 0xF, 0x10)
.expect("known to be in range") as u8,
];
Some('\\')
}
},
EscapeForUnixShellState::SingleQuote => {
if next_byte == b'\'' {
self.prefix = Self::make_prefix(b"\\''");
Some('\'')
} else {
Some(next_byte as char)
}
}
EscapeForUnixShellState::Unquoted => match next_byte {
b' ' | b'!' | b'"' | b'#' | b'$' | b'&' | b'\'' | b'(' | b')' | b'*' | b','
| b';' | b'<' | b'>' | b'?' | b'[' | b'\\' | b']' | b'^' | b'`' | b'{' | b'|'
| b'}' | b'~' => {
self.prefix = Self::make_prefix(&[next_byte]);
Some('\\')
}
_ => Some(next_byte as char),
},
}
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
#[non_exhaustive]
pub enum UnixMakefileEscapeKind {
NonRecipe,
RecipeWithoutShellEscaping,
RecipeWithShellEscaping,
}
#[derive(Copy, Clone)]
pub struct EscapeForUnixMakefile<'a> {
s: &'a str,
kind: UnixMakefileEscapeKind,
}
impl<'a> fmt::Debug for EscapeForUnixMakefile<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(self, f)
}
}
impl<'a> fmt::Display for EscapeForUnixMakefile<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.do_write(f, fmt::Write::write_str, fmt::Write::write_char, |_, _| {
Ok(())
})
}
}
impl<'a> EscapeForUnixMakefile<'a> {
fn do_write<S: ?Sized, E>(
&self,
state: &mut S,
write_str: impl Fn(&mut S, &str) -> Result<(), E>,
write_char: impl Fn(&mut S, char) -> Result<(), E>,
add_variable: impl Fn(&mut S, &'static str) -> Result<(), E>,
) -> Result<(), E> {
let escape_recipe_char = |c| match c {
'$' => write_str(state, "$$"),
'\0'..='\x1F' | '\x7F' => {
panic!("can't escape a control character for Unix Makefile: {c:?}");
}
_ => write_char(state, c),
};
match self.kind {
UnixMakefileEscapeKind::NonRecipe => self.s.chars().try_for_each(|c| match c {
'=' => {
add_variable(state, "EQUALS = =")?;
write_str(state, "$(EQUALS)")
}
';' => panic!("can't escape a semicolon (;) for Unix Makefile"),
'$' => write_str(state, "$$"),
'\\' | ' ' | '#' | ':' | '%' | '*' | '?' | '[' | ']' | '~' => {
write_char(state, '\\')?;
write_char(state, c)
}
'\0'..='\x1F' | '\x7F' => {
panic!("can't escape a control character for Unix Makefile: {c:?}");
}
_ => write_char(state, c),
}),
UnixMakefileEscapeKind::RecipeWithoutShellEscaping => {
self.s.chars().try_for_each(escape_recipe_char)
}
UnixMakefileEscapeKind::RecipeWithShellEscaping => {
EscapeForUnixShell::new(self.s).try_for_each(escape_recipe_char)
}
}
}
pub fn new(
s: &'a str,
kind: UnixMakefileEscapeKind,
needed_variables: &mut BTreeSet<&'static str>,
) -> Self {
let retval = Self { s, kind };
let Ok(()) = retval.do_write(
needed_variables,
|_, _| Ok(()),
|_, _| Ok(()),
|needed_variables, variable| -> Result<(), std::convert::Infallible> {
needed_variables.insert(variable);
Ok(())
},
);
retval
}
}
impl JobGraph {
pub fn new() -> Self {
Self::default()
}
pub fn try_add_jobs<I: IntoIterator<Item = DynJob>>(
&mut self,
jobs: I,
) -> Result<(), CycleError> {
let jobs = jobs.into_iter();
let mut new_nodes =
HashSet::with_capacity_and_hasher(jobs.size_hint().0, Default::default());
for job in jobs {
let Entry::Vacant(entry) = self.jobs.entry(job) else {
continue;
};
let job_node_id = self.graph.add_node(JobGraphNode::Job(entry.key().clone()));
new_nodes.insert(job_node_id);
let entry = entry.insert_entry(job_node_id);
for (item, is_input) in entry
.key()
.inputs()
.iter()
.zip(iter::repeat(true))
.chain(entry.key().outputs().iter().zip(iter::repeat(false)))
{
let item_node_id;
match self.items.entry(*item) {
Entry::Occupied(entry) => item_node_id = *entry.get(),
Entry::Vacant(entry) => {
item_node_id = self.graph.add_node(JobGraphNode::Item(*item));
new_nodes.insert(item_node_id);
entry.insert(item_node_id);
}
}
let mut source = job_node_id;
let mut dest = item_node_id;
if is_input {
mem::swap(&mut source, &mut dest);
}
self.graph.add_edge(source, dest, ());
}
}
match toposort(&self.graph, Some(&mut self.space)) {
Ok(v) => {
self.topological_order = v;
Ok(())
}
Err(_) => {
// there's at least one cycle, find one!
let cycle = kosaraju_scc(&self.graph)
.into_iter()
.find_map(|scc| {
if scc.len() <= 1 {
// can't be a cycle since our graph is bipartite --
// jobs only connect to items, never jobs to jobs or items to items
None
} else {
Some(scc)
}
})
.expect("we know there's a cycle");
let cycle_set = HashSet::from_iter(cycle.iter().copied());
let job = cycle
.into_iter()
.find_map(|node_id| {
if let JobGraphNode::Job(job) = &self.graph[node_id] {
Some(job.clone())
} else {
None
}
})
.expect("a job must be part of the cycle");
let output = job
.outputs()
.into_iter()
.find(|output| cycle_set.contains(&self.items[output]))
.expect("an output must be part of the cycle");
for node in new_nodes {
self.graph.remove_node(node);
}
Err(CycleError { job, output })
}
}
}
#[track_caller]
pub fn add_jobs<I: IntoIterator<Item = DynJob>>(&mut self, jobs: I) {
match self.try_add_jobs(jobs) {
Ok(()) => {}
Err(e) => panic!("error: {e}"),
}
}
pub fn to_unix_makefile(&self) -> String {
macro_rules! write_str {
($s:expr, $($rest:tt)*) => {
String::write_fmt(&mut $s, format_args!($($rest)*)).expect("String::write_fmt can't fail")
};
}
let mut retval = String::new();
let mut needed_variables = BTreeSet::new();
for &node_id in &self.topological_order {
let JobGraphNode::Job(job) = &self.graph[node_id] else {
continue;
};
for (index, output) in job.outputs().into_iter().enumerate() {
match output {
JobItemName::Module { .. } => continue,
JobItemName::File { path } => {
if index != 0 {
retval.push_str(" ");
}
write_str!(
retval,
"{}",
EscapeForUnixMakefile::new(
&path,
UnixMakefileEscapeKind::NonRecipe,
&mut needed_variables
)
);
}
}
}
retval.push_str(":");
for input in job.inputs() {
match input {
JobItemName::Module { .. } => continue,
JobItemName::File { path } => {
write_str!(
retval,
" {}",
EscapeForUnixMakefile::new(
&path,
UnixMakefileEscapeKind::NonRecipe,
&mut needed_variables
)
);
}
}
}
retval.push_str("\n\t");
for (index, arg) in job.to_command_line().into_iter().enumerate() {
if index != 0 {
retval.push_str(" ");
}
write_str!(
retval,
"{}",
EscapeForUnixMakefile::new(
&arg,
UnixMakefileEscapeKind::RecipeWithShellEscaping,
&mut needed_variables
)
);
}
}
if !needed_variables.is_empty() {
retval.insert_str(
0,
&String::from_iter(needed_variables.into_iter().map(|v| format!("{v}\n"))),
);
}
retval
}
}
impl Extend<DynJob> for JobGraph {
#[track_caller]
fn extend<T: IntoIterator<Item = DynJob>>(&mut self, iter: T) {
self.add_jobs(iter);
}
}
impl FromIterator<DynJob> for JobGraph {
#[track_caller]
fn from_iter<T: IntoIterator<Item = DynJob>>(iter: T) -> Self {
let mut retval = Self::new();
retval.add_jobs(iter);
retval
}
}
impl Serialize for JobGraph {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut serializer = serializer.serialize_seq(Some(self.jobs.len()))?;
for &node_id in &self.topological_order {
let JobGraphNode::Job(job) = &self.graph[node_id] else {
continue;
};
serializer.serialize_element(job)?;
}
serializer.end()
}
}
impl<'de> Deserialize<'de> for JobGraph {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let jobs = Vec::<DynJob>::deserialize(deserializer)?;
let mut retval = JobGraph::new();
retval.try_add_jobs(jobs).map_err(D::Error::custom)?;
Ok(retval)
}
}

View file

@ -87,6 +87,7 @@ pub mod _docs;
pub mod annotations; pub mod annotations;
pub mod array; pub mod array;
pub mod build;
pub mod bundle; pub mod bundle;
pub mod cli; pub mod cli;
pub mod clock; pub mod clock;
@ -104,6 +105,7 @@ pub mod reg;
pub mod reset; pub mod reset;
pub mod sim; pub mod sim;
pub mod source_location; pub mod source_location;
pub mod target;
pub mod testing; pub mod testing;
pub mod ty; pub mod ty;
pub mod util; pub mod util;

View file

@ -0,0 +1,202 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{intern::Interned, util::job_server::AcquiredJob};
use std::{
any::Any,
fmt,
iter::FusedIterator,
sync::{Arc, Mutex},
};
pub trait Peripheral: Any + Send + Sync + fmt::Debug {}
pub trait Tool: Any + Send + Sync + fmt::Debug {
fn name(&self) -> Interned<str>;
fn run(&self, acquired_job: &mut AcquiredJob);
}
pub trait Target: Any + Send + Sync + fmt::Debug {
fn name(&self) -> Interned<str>;
fn peripherals(&self) -> Interned<[Interned<dyn Peripheral>]>;
}
#[derive(Clone)]
struct TargetsMap(Vec<(Interned<str>, Interned<dyn Target>)>);
impl TargetsMap {
fn sort(&mut self) {
self.0.sort_by(|(k1, _), (k2, _)| str::cmp(k1, k2));
self.0.dedup_by_key(|(k, _)| *k);
}
fn from_unsorted_vec(unsorted_vec: Vec<(Interned<str>, Interned<dyn Target>)>) -> Self {
let mut retval = Self(unsorted_vec);
retval.sort();
retval
}
fn extend_from_unsorted_slice(&mut self, additional: &[(Interned<str>, Interned<dyn Target>)]) {
self.0.extend_from_slice(additional);
self.sort();
}
}
impl Default for TargetsMap {
fn default() -> Self {
Self::from_unsorted_vec(vec![
// TODO: add default targets here
])
}
}
fn access_targets<F: FnOnce(&mut Option<Arc<TargetsMap>>) -> R, R>(f: F) -> R {
static TARGETS: Mutex<Option<Arc<TargetsMap>>> = Mutex::new(None);
let mut targets_lock = TARGETS.lock().expect("shouldn't be poisoned");
f(&mut targets_lock)
}
pub fn add_targets<I: IntoIterator<Item = Interned<dyn Target>>>(additional: I) {
// run iterator and target methods outside of lock
let additional = Vec::from_iter(additional.into_iter().map(|v| (v.name(), v)));
access_targets(|targets| {
Arc::make_mut(targets.get_or_insert_default()).extend_from_unsorted_slice(&additional);
});
}
pub fn targets() -> TargetsSnapshot {
access_targets(|targets| match targets {
Some(targets) => TargetsSnapshot {
targets: targets.clone(),
},
None => {
let new_targets = Arc::<TargetsMap>::default();
*targets = Some(new_targets.clone());
TargetsSnapshot {
targets: new_targets,
}
}
})
}
#[derive(Clone)]
pub struct TargetsSnapshot {
targets: Arc<TargetsMap>,
}
impl TargetsSnapshot {
pub fn get(&self, key: &str) -> Option<Interned<dyn Target>> {
let index = self
.targets
.0
.binary_search_by_key(&key, |(k, _v)| k)
.ok()?;
Some(self.targets.0[index].1)
}
pub fn iter(&self) -> TargetsIter {
self.into_iter()
}
pub fn len(&self) -> usize {
self.targets.0.len()
}
}
impl fmt::Debug for TargetsSnapshot {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("TargetsSnapshot ")?;
f.debug_map().entries(self).finish()
}
}
impl IntoIterator for &'_ mut TargetsSnapshot {
type Item = (Interned<str>, Interned<dyn Target>);
type IntoIter = TargetsIter;
fn into_iter(self) -> Self::IntoIter {
self.clone().into_iter()
}
}
impl IntoIterator for &'_ TargetsSnapshot {
type Item = (Interned<str>, Interned<dyn Target>);
type IntoIter = TargetsIter;
fn into_iter(self) -> Self::IntoIter {
self.clone().into_iter()
}
}
impl IntoIterator for TargetsSnapshot {
type Item = (Interned<str>, Interned<dyn Target>);
type IntoIter = TargetsIter;
fn into_iter(self) -> Self::IntoIter {
TargetsIter {
indexes: 0..self.targets.0.len(),
targets: self.targets,
}
}
}
#[derive(Clone)]
pub struct TargetsIter {
targets: Arc<TargetsMap>,
indexes: std::ops::Range<usize>,
}
impl fmt::Debug for TargetsIter {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("TargetsIter ")?;
f.debug_map().entries(self.clone()).finish()
}
}
impl Iterator for TargetsIter {
type Item = (Interned<str>, Interned<dyn Target>);
fn next(&mut self) -> Option<Self::Item> {
Some(self.targets.0[self.indexes.next()?])
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.indexes.size_hint()
}
fn count(self) -> usize {
self.indexes.len()
}
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
Some(self.targets.0[self.indexes.nth(n)?])
}
fn fold<B, F: FnMut(B, Self::Item) -> B>(self, init: B, mut f: F) -> B {
self.indexes
.fold(init, move |retval, index| f(retval, self.targets.0[index]))
}
}
impl FusedIterator for TargetsIter {}
impl DoubleEndedIterator for TargetsIter {
fn next_back(&mut self) -> Option<Self::Item> {
Some(self.targets.0[self.indexes.next_back()?])
}
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
Some(self.targets.0[self.indexes.nth_back(n)?])
}
fn rfold<B, F: FnMut(B, Self::Item) -> B>(self, init: B, mut f: F) -> B {
self.indexes
.rfold(init, move |retval, index| f(retval, self.targets.0[index]))
}
}
impl ExactSizeIterator for TargetsIter {
fn len(&self) -> usize {
self.indexes.len()
}
}