forked from libre-chip/fayalite
Compare commits
2 commits
b63676d0ca
...
b7ec623bfa
Author | SHA1 | Date | |
---|---|---|---|
b7ec623bfa | |||
831c9e28d9 |
6 changed files with 2170 additions and 11 deletions
|
@ -3,14 +3,20 @@
|
|||
#![cfg_attr(test, recursion_limit = "512")]
|
||||
use proc_macro2::{Span, TokenStream};
|
||||
use quote::{quote, ToTokens};
|
||||
use std::io::{ErrorKind, Write};
|
||||
use std::{
|
||||
collections::{hash_map::Entry, HashMap},
|
||||
io::{ErrorKind, Write},
|
||||
};
|
||||
use syn::{
|
||||
bracketed, parenthesized,
|
||||
bracketed,
|
||||
ext::IdentExt,
|
||||
parenthesized,
|
||||
parse::{Parse, ParseStream, Parser},
|
||||
parse_quote,
|
||||
punctuated::Pair,
|
||||
punctuated::{Pair, Punctuated},
|
||||
spanned::Spanned,
|
||||
AttrStyle, Attribute, Error, Item, ItemFn, Token,
|
||||
token::{Bracket, Paren},
|
||||
AttrStyle, Attribute, Error, Ident, Item, ItemFn, LitBool, LitStr, Meta, Token,
|
||||
};
|
||||
|
||||
mod fold;
|
||||
|
@ -19,6 +25,7 @@ mod hdl_enum;
|
|||
mod hdl_type_alias;
|
||||
mod hdl_type_common;
|
||||
mod module;
|
||||
mod process_cfg;
|
||||
|
||||
pub(crate) trait CustomToken:
|
||||
Copy
|
||||
|
@ -59,6 +66,10 @@ mod kw {
|
|||
};
|
||||
}
|
||||
|
||||
custom_keyword!(all);
|
||||
custom_keyword!(any);
|
||||
custom_keyword!(cfg);
|
||||
custom_keyword!(cfg_attr);
|
||||
custom_keyword!(clock_domain);
|
||||
custom_keyword!(connect_inexact);
|
||||
custom_keyword!(custom_bounds);
|
||||
|
@ -75,6 +86,7 @@ mod kw {
|
|||
custom_keyword!(no_reset);
|
||||
custom_keyword!(no_runtime_generics);
|
||||
custom_keyword!(no_static);
|
||||
custom_keyword!(not);
|
||||
custom_keyword!(outline_generated);
|
||||
custom_keyword!(output);
|
||||
custom_keyword!(reg_builder);
|
||||
|
@ -901,15 +913,278 @@ fn hdl_module_impl(item: ItemFn) -> syn::Result<TokenStream> {
|
|||
Ok(contents)
|
||||
}
|
||||
|
||||
pub fn hdl_module(attr: TokenStream, item: TokenStream) -> syn::Result<TokenStream> {
|
||||
let kw = kw::hdl_module::default();
|
||||
hdl_module_impl(syn::parse2(quote! { #[#kw(#attr)] #item })?)
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub(crate) enum CfgExpr {
|
||||
Option {
|
||||
ident: Ident,
|
||||
value: Option<(Token![=], LitStr)>,
|
||||
},
|
||||
All {
|
||||
all: kw::all,
|
||||
paren: Paren,
|
||||
exprs: Punctuated<CfgExpr, Token![,]>,
|
||||
},
|
||||
Any {
|
||||
any: kw::any,
|
||||
paren: Paren,
|
||||
exprs: Punctuated<CfgExpr, Token![,]>,
|
||||
},
|
||||
Not {
|
||||
not: kw::not,
|
||||
paren: Paren,
|
||||
expr: Box<CfgExpr>,
|
||||
trailing_comma: Option<Token![,]>,
|
||||
},
|
||||
}
|
||||
|
||||
pub fn hdl_attr(attr: TokenStream, item: TokenStream) -> syn::Result<TokenStream> {
|
||||
let kw = kw::hdl::default();
|
||||
let item = quote! { #[#kw(#attr)] #item };
|
||||
let item = syn::parse2::<Item>(item)?;
|
||||
impl Parse for CfgExpr {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
match input.cursor().ident() {
|
||||
Some((_, cursor)) if cursor.eof() => {
|
||||
return Ok(CfgExpr::Option {
|
||||
ident: input.call(Ident::parse_any)?,
|
||||
value: None,
|
||||
});
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
if input.peek(Ident::peek_any) && input.peek2(Token![=]) {
|
||||
return Ok(CfgExpr::Option {
|
||||
ident: input.call(Ident::parse_any)?,
|
||||
value: Some((input.parse()?, input.parse()?)),
|
||||
});
|
||||
}
|
||||
let contents;
|
||||
if input.peek(kw::all) {
|
||||
Ok(CfgExpr::All {
|
||||
all: input.parse()?,
|
||||
paren: parenthesized!(contents in input),
|
||||
exprs: contents.call(Punctuated::parse_terminated)?,
|
||||
})
|
||||
} else if input.peek(kw::any) {
|
||||
Ok(CfgExpr::Any {
|
||||
any: input.parse()?,
|
||||
paren: parenthesized!(contents in input),
|
||||
exprs: contents.call(Punctuated::parse_terminated)?,
|
||||
})
|
||||
} else if input.peek(kw::not) {
|
||||
Ok(CfgExpr::Not {
|
||||
not: input.parse()?,
|
||||
paren: parenthesized!(contents in input),
|
||||
expr: contents.parse()?,
|
||||
trailing_comma: contents.parse()?,
|
||||
})
|
||||
} else {
|
||||
Err(input.error("expected cfg-pattern"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for CfgExpr {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
match self {
|
||||
CfgExpr::Option { ident, value } => {
|
||||
ident.to_tokens(tokens);
|
||||
if let Some((eq, value)) = value {
|
||||
eq.to_tokens(tokens);
|
||||
value.to_tokens(tokens);
|
||||
}
|
||||
}
|
||||
CfgExpr::All { all, paren, exprs } => {
|
||||
all.to_tokens(tokens);
|
||||
paren.surround(tokens, |tokens| exprs.to_tokens(tokens));
|
||||
}
|
||||
CfgExpr::Any { any, paren, exprs } => {
|
||||
any.to_tokens(tokens);
|
||||
paren.surround(tokens, |tokens| exprs.to_tokens(tokens));
|
||||
}
|
||||
CfgExpr::Not {
|
||||
not,
|
||||
paren,
|
||||
expr,
|
||||
trailing_comma,
|
||||
} => {
|
||||
not.to_tokens(tokens);
|
||||
paren.surround(tokens, |tokens| {
|
||||
expr.to_tokens(tokens);
|
||||
trailing_comma.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub(crate) struct Cfg {
|
||||
cfg: kw::cfg,
|
||||
paren: Paren,
|
||||
expr: CfgExpr,
|
||||
trailing_comma: Option<Token![,]>,
|
||||
}
|
||||
|
||||
impl ToTokens for Cfg {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
cfg,
|
||||
paren,
|
||||
expr,
|
||||
trailing_comma,
|
||||
} = self;
|
||||
cfg.to_tokens(tokens);
|
||||
paren.surround(tokens, |tokens| {
|
||||
expr.to_tokens(tokens);
|
||||
trailing_comma.to_tokens(tokens);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for Cfg {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
let contents;
|
||||
Ok(Self {
|
||||
cfg: input.parse()?,
|
||||
paren: parenthesized!(contents in input),
|
||||
expr: contents.parse()?,
|
||||
trailing_comma: contents.parse()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub(crate) struct CfgAttr {
|
||||
cfg_attr: kw::cfg_attr,
|
||||
paren: Paren,
|
||||
expr: CfgExpr,
|
||||
comma: Token![,],
|
||||
attrs: Punctuated<Meta, Token![,]>,
|
||||
}
|
||||
|
||||
impl CfgAttr {
|
||||
pub(crate) fn to_cfg(&self) -> Cfg {
|
||||
Cfg {
|
||||
cfg: kw::cfg(self.cfg_attr.span),
|
||||
paren: self.paren,
|
||||
expr: self.expr.clone(),
|
||||
trailing_comma: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for CfgAttr {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
let contents;
|
||||
Ok(Self {
|
||||
cfg_attr: input.parse()?,
|
||||
paren: parenthesized!(contents in input),
|
||||
expr: contents.parse()?,
|
||||
comma: contents.parse()?,
|
||||
attrs: contents.call(Punctuated::parse_terminated)?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct CfgAndValue {
|
||||
cfg: Cfg,
|
||||
eq_token: Token![=],
|
||||
value: LitBool,
|
||||
}
|
||||
|
||||
impl Parse for CfgAndValue {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
Ok(Self {
|
||||
cfg: input.parse()?,
|
||||
eq_token: input.parse()?,
|
||||
value: input.parse()?,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct Cfgs<T> {
|
||||
pub(crate) bracket: Bracket,
|
||||
pub(crate) cfgs_map: HashMap<Cfg, T>,
|
||||
pub(crate) cfgs_list: Vec<Cfg>,
|
||||
}
|
||||
|
||||
impl<T> Cfgs<T> {
|
||||
fn insert_cfg(&mut self, cfg: Cfg, value: T) {
|
||||
match self.cfgs_map.entry(cfg) {
|
||||
Entry::Occupied(_) => {}
|
||||
Entry::Vacant(entry) => {
|
||||
self.cfgs_list.push(entry.key().clone());
|
||||
entry.insert(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for Cfgs<bool> {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
let contents;
|
||||
let bracket = bracketed!(contents in input);
|
||||
let mut cfgs_map = HashMap::new();
|
||||
let mut cfgs_list = Vec::new();
|
||||
for CfgAndValue {
|
||||
cfg,
|
||||
eq_token: _,
|
||||
value,
|
||||
} in contents.call(Punctuated::<CfgAndValue, Token![,]>::parse_terminated)?
|
||||
{
|
||||
match cfgs_map.entry(cfg) {
|
||||
Entry::Occupied(_) => {}
|
||||
Entry::Vacant(entry) => {
|
||||
cfgs_list.push(entry.key().clone());
|
||||
entry.insert(value.value);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(Self {
|
||||
bracket,
|
||||
cfgs_map,
|
||||
cfgs_list,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Parse for Cfgs<()> {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
let contents;
|
||||
let bracket = bracketed!(contents in input);
|
||||
let mut cfgs_map = HashMap::new();
|
||||
let mut cfgs_list = Vec::new();
|
||||
for cfg in contents.call(Punctuated::<Cfg, Token![,]>::parse_terminated)? {
|
||||
match cfgs_map.entry(cfg) {
|
||||
Entry::Occupied(_) => {}
|
||||
Entry::Vacant(entry) => {
|
||||
cfgs_list.push(entry.key().clone());
|
||||
entry.insert(());
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(Self {
|
||||
bracket,
|
||||
cfgs_map,
|
||||
cfgs_list,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ToTokens for Cfgs<()> {
|
||||
fn to_tokens(&self, tokens: &mut TokenStream) {
|
||||
let Self {
|
||||
bracket,
|
||||
cfgs_map: _,
|
||||
cfgs_list,
|
||||
} = self;
|
||||
bracket.surround(tokens, |tokens| {
|
||||
for cfg in cfgs_list {
|
||||
cfg.to_tokens(tokens);
|
||||
<Token![,]>::default().to_tokens(tokens);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_top_level_attr(item: Item) -> syn::Result<TokenStream> {
|
||||
match item {
|
||||
Item::Enum(item) => hdl_enum::hdl_enum(item),
|
||||
Item::Struct(item) => hdl_bundle::hdl_bundle(item),
|
||||
|
@ -921,3 +1196,40 @@ pub fn hdl_attr(attr: TokenStream, item: TokenStream) -> syn::Result<TokenStream
|
|||
)),
|
||||
}
|
||||
}
|
||||
|
||||
fn generate_cfg_expansion_tokens(input: TokenStream) -> syn::Result<TokenStream> {
|
||||
let item = syn::parse2::<Item>(input)?;
|
||||
let cfgs = process_cfg::collect_cfgs(item.clone())?;
|
||||
Ok(quote! {
|
||||
::fayalite::__cfg_expansion_helper! {
|
||||
[]
|
||||
#cfgs
|
||||
::fayalite::__after_cfg_expansion { #item }
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn hdl_module(attr: TokenStream, item: TokenStream) -> syn::Result<TokenStream> {
|
||||
let kw = kw::hdl_module::default();
|
||||
generate_cfg_expansion_tokens(quote! { #[#kw(#attr)] #item })
|
||||
}
|
||||
|
||||
pub fn hdl_attr(attr: TokenStream, item: TokenStream) -> syn::Result<TokenStream> {
|
||||
let kw = kw::hdl::default();
|
||||
generate_cfg_expansion_tokens(quote! { #[#kw(#attr)] #item })
|
||||
}
|
||||
|
||||
pub fn after_cfg_expansion(input: TokenStream) -> syn::Result<TokenStream> {
|
||||
syn::parse::Parser::parse2(
|
||||
|input: ParseStream| -> syn::Result<TokenStream> {
|
||||
let cfgs = input.parse()?;
|
||||
let item = input.parse()?;
|
||||
let item = process_cfg::process_cfgs(item, cfgs)?;
|
||||
Ok(item
|
||||
.map(handle_top_level_attr)
|
||||
.transpose()?
|
||||
.unwrap_or_default())
|
||||
},
|
||||
input,
|
||||
)
|
||||
}
|
||||
|
|
1706
crates/fayalite-proc-macros-impl/src/process_cfg.rs
Normal file
1706
crates/fayalite-proc-macros-impl/src/process_cfg.rs
Normal file
File diff suppressed because it is too large
Load diff
|
@ -27,3 +27,12 @@ pub fn hdl(
|
|||
Err(err) => err.into_compile_error().into(),
|
||||
}
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
#[proc_macro]
|
||||
pub fn __after_cfg_expansion(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
match fayalite_proc_macros_impl::after_cfg_expansion(input.into()) {
|
||||
Ok(retval) => retval.into(),
|
||||
Err(err) => err.into_compile_error().into(),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,6 +5,8 @@ use std::{env, fs, path::Path};
|
|||
|
||||
fn main() {
|
||||
println!("cargo::rustc-check-cfg=cfg(todo)");
|
||||
println!("cargo::rustc-check-cfg=cfg(cfg_false_for_tests)");
|
||||
println!("cargo::rustc-cfg=cfg_true_for_tests");
|
||||
let path = "visit_types.json";
|
||||
println!("cargo::rerun-if-changed={path}");
|
||||
println!("cargo::rerun-if-changed=build.rs");
|
||||
|
|
|
@ -11,6 +11,58 @@ extern crate self as fayalite;
|
|||
#[doc(hidden)]
|
||||
pub use std as __std;
|
||||
|
||||
#[doc(hidden)]
|
||||
#[macro_export]
|
||||
macro_rules! __cfg_expansion_helper {
|
||||
(
|
||||
[
|
||||
$($evaluated_cfgs:ident($($evaluated_exprs:tt)*) = $evaluated_results:ident,)*
|
||||
]
|
||||
[
|
||||
$cfg:ident($($expr:tt)*),
|
||||
$($unevaluated_cfgs:ident($($unevaluated_exprs:tt)*),)*
|
||||
]
|
||||
$after_evaluation:path {$($after_evaluation_args:tt)*}
|
||||
) => {
|
||||
#[$cfg($($expr)*)]
|
||||
$crate::__cfg_expansion_helper! {
|
||||
[
|
||||
$($evaluated_cfgs($($evaluated_exprs)*) = $evaluated_results,)*
|
||||
$cfg($($expr)*) = true,
|
||||
]
|
||||
[
|
||||
$($unevaluated_cfgs($($unevaluated_exprs)*),)*
|
||||
]
|
||||
$after_evaluation {$($after_evaluation_args)*}
|
||||
}
|
||||
#[$cfg(not($($expr)*))]
|
||||
$crate::__cfg_expansion_helper! {
|
||||
[
|
||||
$($evaluated_cfgs($($evaluated_exprs)*) = $evaluated_results,)*
|
||||
$cfg($($expr)*) = false,
|
||||
]
|
||||
[
|
||||
$($unevaluated_cfgs($($unevaluated_exprs)*),)*
|
||||
]
|
||||
$after_evaluation {$($after_evaluation_args)*}
|
||||
}
|
||||
};
|
||||
(
|
||||
[
|
||||
$($evaluated_cfgs:ident($($evaluated_exprs:tt)*) = $evaluated_results:ident,)*
|
||||
]
|
||||
[]
|
||||
$after_evaluation:path {$($after_evaluation_args:tt)*}
|
||||
) => {
|
||||
$after_evaluation! {
|
||||
[
|
||||
$($evaluated_cfgs($($evaluated_exprs)*) = $evaluated_results,)*
|
||||
]
|
||||
$($after_evaluation_args)*
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[doc(inline)]
|
||||
/// The `#[hdl_module]` attribute is applied to a Rust function so that that function creates
|
||||
/// a [`Module`][`::fayalite::module::Module`] when called.
|
||||
|
@ -23,6 +75,9 @@ pub use fayalite_proc_macros::hdl_module;
|
|||
#[doc(inline)]
|
||||
pub use fayalite_proc_macros::hdl;
|
||||
|
||||
#[doc(hidden)]
|
||||
pub use fayalite_proc_macros::__after_cfg_expansion;
|
||||
|
||||
/// struct used as a placeholder when applying defaults
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
|
||||
pub struct __;
|
||||
|
|
|
@ -4287,3 +4287,78 @@ circuit check_deduce_resets:
|
|||
",
|
||||
};
|
||||
}
|
||||
|
||||
#[hdl_module(outline_generated)]
|
||||
pub fn check_cfgs<#[cfg(cfg_false_for_tests)] A, #[cfg(cfg_true_for_tests)] B>(
|
||||
#[cfg(cfg_false_for_tests)] a: A,
|
||||
#[cfg(cfg_true_for_tests)] b: B,
|
||||
) {
|
||||
#[hdl]
|
||||
struct S<#[cfg(cfg_false_for_tests)] A, #[cfg(cfg_true_for_tests)] B> {
|
||||
#[cfg(cfg_false_for_tests)]
|
||||
a: A,
|
||||
#[cfg(cfg_true_for_tests)]
|
||||
b: B,
|
||||
}
|
||||
#[hdl]
|
||||
#[cfg(cfg_false_for_tests)]
|
||||
let i_a: A = m.input(a);
|
||||
#[hdl]
|
||||
#[cfg(cfg_true_for_tests)]
|
||||
let i_b: B = m.input(b);
|
||||
#[hdl]
|
||||
let w: S<UInt<8>> = wire();
|
||||
#[cfg(cfg_false_for_tests)]
|
||||
{
|
||||
#[hdl]
|
||||
let o_a: A = m.output(a);
|
||||
connect(o_a, w.a.cast_bits_to(a));
|
||||
connect(w.a, i_a.cast_to_bits(UInt::new_static()));
|
||||
}
|
||||
#[cfg(cfg_true_for_tests)]
|
||||
{
|
||||
#[hdl]
|
||||
let o_a: B = m.output(b);
|
||||
connect(o_b, w.b.cast_bits_to(b));
|
||||
connect(w.b, i_b.cast_to_bits(UInt::new_static()));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cfgs() {
|
||||
let _n = SourceLocation::normalize_files_for_tests();
|
||||
let m = check_cfgs(UInt::<8>);
|
||||
dbg!(m);
|
||||
#[rustfmt::skip] // work around https://github.com/rust-lang/rustfmt/issues/6161
|
||||
assert_export_firrtl! {
|
||||
m =>
|
||||
options: ExportOptions {
|
||||
simplify_enums: None,
|
||||
..ExportOptions::default()
|
||||
},
|
||||
"/test/check_deduce_resets.fir": r"FIRRTL version 3.2.0
|
||||
circuit check_deduce_resets:
|
||||
type Ty0 = {clk: Clock, rst: Reset}
|
||||
type Ty1 = {|A: Reset, B: AsyncReset, C: UInt<1>|}
|
||||
module check_deduce_resets: @[module-XXXXXXXXXX.rs 1:1]
|
||||
input cd: Ty0 @[module-XXXXXXXXXX.rs 2:1]
|
||||
input u8_in: UInt<8> @[module-XXXXXXXXXX.rs 4:1]
|
||||
output u8_out: UInt<8> @[module-XXXXXXXXXX.rs 6:1]
|
||||
input enum_in: Ty1 @[module-XXXXXXXXXX.rs 8:1]
|
||||
output enum_out: Ty1 @[module-XXXXXXXXXX.rs 9:1]
|
||||
output reset_out: Reset @[module-XXXXXXXXXX.rs 10:1]
|
||||
regreset my_reg: UInt<8>, cd.clk, cd.rst, UInt<8>(0h0) @[module-XXXXXXXXXX.rs 3:1]
|
||||
connect my_reg, u8_in @[module-XXXXXXXXXX.rs 5:1]
|
||||
connect u8_out, my_reg @[module-XXXXXXXXXX.rs 7:1]
|
||||
connect reset_out, cd.rst @[module-XXXXXXXXXX.rs 11:1]
|
||||
match enum_in: @[module-XXXXXXXXXX.rs 12:1]
|
||||
A(_match_arm_value):
|
||||
connect enum_out, {|A: Reset, B: AsyncReset, C: UInt<1>|}(A, cd.rst) @[module-XXXXXXXXXX.rs 13:1]
|
||||
connect reset_out, _match_arm_value @[module-XXXXXXXXXX.rs 14:1]
|
||||
B(_match_arm_value_1):
|
||||
connect enum_out, {|A: Reset, B: AsyncReset, C: UInt<1>|}(B, _match_arm_value_1) @[module-XXXXXXXXXX.rs 15:1]
|
||||
C(_match_arm_value_2):
|
||||
connect enum_out, {|A: Reset, B: AsyncReset, C: UInt<1>|}(C, _match_arm_value_2) @[module-XXXXXXXXXX.rs 16:1]
|
||||
",
|
||||
};
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue