1
0
Fork 0

Compare commits

...

10 commits

48 changed files with 6099 additions and 2161 deletions

View file

@ -21,4 +21,4 @@ jobs:
- run: cargo test --doc --features=unstable-doc
- run: cargo doc --features=unstable-doc
- run: FAYALITE_TEST_HASHER=always_zero cargo test --test=module --features=unstable-doc,unstable-test-hasher
- run: cargo run --example blinky yosys-nextpnr-xray --nextpnr-xilinx-chipdb-dir /opt/fayalite-deps/nextpnr-xilinx/xilinx --prjxray-db-dir /opt/fayalite-deps/prjxray-db --device xc7a100ticsg324-1L -o target/blinky-out --clock-frequency=$((1000*1000*100))
- run: cargo run --example blinky yosys-nextpnr-xray --platform=arty-a7-100t --nextpnr-xilinx-chipdb-dir /opt/fayalite-deps/nextpnr-xilinx/xilinx --prjxray-db-dir /opt/fayalite-deps/prjxray-db -o target/blinky-out

31
Cargo.lock generated
View file

@ -319,6 +319,7 @@ dependencies = [
"jobslot",
"num-bigint",
"num-traits",
"ordered-float",
"petgraph",
"serde",
"serde_json",
@ -524,6 +525,17 @@ version = "1.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
[[package]]
name = "ordered-float"
version = "5.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f4779c6901a562440c3786d08192c6fbda7c1c2060edd10006b05ee35d10f2d"
dependencies = [
"num-traits",
"rand",
"serde",
]
[[package]]
name = "petgraph"
version = "0.8.1"
@ -576,6 +588,25 @@ version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09"
[[package]]
name = "rand"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
dependencies = [
"rand_core",
"serde",
]
[[package]]
name = "rand_core"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
dependencies = [
"serde",
]
[[package]]
name = "rustix"
version = "0.38.31"

View file

@ -30,6 +30,7 @@ indexmap = { version = "2.5.0", features = ["serde"] }
jobslot = "0.2.23"
num-bigint = "0.4.6"
num-traits = "0.2.16"
ordered-float = { version = "5.1.0", features = ["serde"] }
petgraph = "0.8.1"
prettyplease = "0.2.20"
proc-macro2 = "1.0.83"

View file

@ -8,6 +8,43 @@ Fayalite is a library for designing digital hardware -- a hardware description l
[FIRRTL]: https://github.com/chipsalliance/firrtl-spec
# Building the [Blinky example] for the Arty A7 100T on Linux
[Blinky example]: crates/fayalite/examples/blinky.rs
This uses the container image containing all the external programs and files that Fayalite needs to build for FPGAs, the sources for the container image are in https://git.libre-chip.org/libre-chip/fayalite-deps
Steps:
Install podman (or docker).
Run:
```bash
podman run --rm --security-opt label=disable --volume="$(pwd):$(pwd)" -w="$(pwd)" -it git.libre-chip.org/libre-chip/fayalite-deps:latest cargo run --example blinky yosys-nextpnr-xray --nextpnr-xilinx-chipdb-dir /opt/fayalite-deps/nextpnr-xilinx/xilinx --prjxray-db-dir /opt/fayalite-deps/prjxray-db --platform arty-a7-100t -o target/blinky-out
```
To actually program the FPGA, you'll need to install [openFPGALoader] on your host OS:
[openFPGALoader]: https://github.com/trabucayre/openFPGALoader
On Debian 12:
```bash
sudo apt update && sudo apt install openfpgaloader
```
Then program the FPGA:
```bash
sudo openFPGALoader --board arty_a7_100t target/blinky-out/blinky.bit
```
This will program the FPGA but leave the Flash chip unmodified, so the FPGA will revert when the board is power-cycled.
To program the Flash also, so it stays programmed when power-cycling the board:
```bash
sudo openFPGALoader --board arty_a7_100t -f target/blinky-out/blinky.bit
```
# Funding
## NLnet Grants

View file

@ -66,6 +66,7 @@ mod kw {
}
custom_keyword!(__evaluated_cfgs);
custom_keyword!(add_platform_io);
custom_keyword!(all);
custom_keyword!(any);
custom_keyword!(cfg);

View file

@ -4,7 +4,7 @@ use crate::{
Errors, HdlAttr, PairsIterExt,
hdl_type_common::{ParsedGenerics, SplitForImpl},
kw,
module::transform_body::{HdlLet, HdlLetKindIO},
module::transform_body::{HdlLet, HdlLetKindIO, ModuleIOOrAddPlatformIO},
options,
};
use proc_macro2::TokenStream;
@ -39,7 +39,7 @@ pub(crate) fn check_name_conflicts_with_module_builder(name: &Ident) -> syn::Res
if name == "m" {
Err(Error::new_spanned(
name,
"name conflicts with implicit `m: &mut ModuleBuilder<_>`",
"name conflicts with implicit `m: &ModuleBuilder`",
))
} else {
Ok(())
@ -67,7 +67,7 @@ struct ModuleFnModule {
vis: Visibility,
sig: Signature,
block: Box<Block>,
struct_generics: ParsedGenerics,
struct_generics: Option<ParsedGenerics>,
the_struct: TokenStream,
}
@ -290,7 +290,7 @@ impl ModuleFn {
paren_token,
body,
} => {
debug_assert!(io.is_empty());
debug_assert!(matches!(io, ModuleIOOrAddPlatformIO::ModuleIO(v) if v.is_empty()));
return Ok(Self(ModuleFnImpl::Fn {
attrs,
config_options: HdlAttr {
@ -322,6 +322,21 @@ impl ModuleFn {
body,
},
};
let io = match io {
ModuleIOOrAddPlatformIO::ModuleIO(io) => io,
ModuleIOOrAddPlatformIO::AddPlatformIO => {
return Ok(Self(ModuleFnImpl::Module(ModuleFnModule {
attrs,
config_options,
module_kind: module_kind.unwrap(),
vis,
sig,
block,
struct_generics: None,
the_struct: TokenStream::new(),
})));
}
};
let (_struct_impl_generics, _struct_type_generics, struct_where_clause) =
struct_generics.split_for_impl();
let struct_where_clause: Option<WhereClause> = parse_quote! { #struct_where_clause };
@ -364,7 +379,7 @@ impl ModuleFn {
vis,
sig,
block,
struct_generics,
struct_generics: Some(struct_generics),
the_struct,
})))
}
@ -433,9 +448,14 @@ impl ModuleFn {
ModuleKind::Normal => quote! { ::fayalite::module::ModuleKind::Normal },
};
let fn_name = &outer_sig.ident;
let (_struct_impl_generics, struct_type_generics, _struct_where_clause) =
struct_generics.split_for_impl();
let struct_ty = quote! {#fn_name #struct_type_generics};
let struct_ty = match struct_generics {
Some(struct_generics) => {
let (_struct_impl_generics, struct_type_generics, _struct_where_clause) =
struct_generics.split_for_impl();
quote! {#fn_name #struct_type_generics}
}
None => quote! {::fayalite::bundle::Bundle},
};
body_sig.ident = parse_quote! {__body};
body_sig
.inputs

View file

@ -39,6 +39,7 @@ options! {
pub(crate) enum LetFnKind {
Input(input),
Output(output),
AddPlatformIO(add_platform_io),
Instance(instance),
RegBuilder(reg_builder),
Wire(wire),
@ -216,6 +217,49 @@ impl HdlLetKindToTokens for HdlLetKindInstance {
}
}
#[derive(Clone, Debug)]
pub(crate) struct HdlLetKindAddPlatformIO {
pub(crate) m: kw::m,
pub(crate) dot_token: Token![.],
pub(crate) add_platform_io: kw::add_platform_io,
pub(crate) paren: Paren,
pub(crate) platform_io_builder: Box<Expr>,
}
impl ParseTypes<Self> for HdlLetKindAddPlatformIO {
fn parse_types(input: &mut Self, _parser: &mut TypesParser<'_>) -> Result<Self, ParseFailed> {
Ok(input.clone())
}
}
impl_fold! {
struct HdlLetKindAddPlatformIO<> {
m: kw::m,
dot_token: Token![.],
add_platform_io: kw::add_platform_io,
paren: Paren,
platform_io_builder: Box<Expr>,
}
}
impl HdlLetKindToTokens for HdlLetKindAddPlatformIO {
fn ty_to_tokens(&self, _tokens: &mut TokenStream) {}
fn expr_to_tokens(&self, tokens: &mut TokenStream) {
let Self {
m,
dot_token,
add_platform_io,
paren,
platform_io_builder,
} = self;
m.to_tokens(tokens);
dot_token.to_tokens(tokens);
add_platform_io.to_tokens(tokens);
paren.surround(tokens, |tokens| platform_io_builder.to_tokens(tokens));
}
}
#[derive(Clone, Debug)]
pub(crate) struct RegBuilderClockDomain {
pub(crate) dot_token: Token![.],
@ -711,6 +755,7 @@ impl HdlLetKindMemory {
#[derive(Clone, Debug)]
pub(crate) enum HdlLetKind<IOType = ParsedType> {
IO(HdlLetKindIO<ModuleIOKind, IOType>),
AddPlatformIO(HdlLetKindAddPlatformIO),
Incomplete(HdlLetKindIncomplete),
Instance(HdlLetKindInstance),
RegBuilder(HdlLetKindRegBuilder),
@ -721,6 +766,7 @@ pub(crate) enum HdlLetKind<IOType = ParsedType> {
impl_fold! {
enum HdlLetKind<IOType,> {
IO(HdlLetKindIO<ModuleIOKind, IOType>),
AddPlatformIO(HdlLetKindAddPlatformIO),
Incomplete(HdlLetKindIncomplete),
Instance(HdlLetKindInstance),
RegBuilder(HdlLetKindRegBuilder),
@ -736,6 +782,9 @@ impl<T: ParseTypes<I>, I> ParseTypes<HdlLetKind<I>> for HdlLetKind<T> {
) -> Result<Self, ParseFailed> {
match input {
HdlLetKind::IO(input) => ParseTypes::parse_types(input, parser).map(HdlLetKind::IO),
HdlLetKind::AddPlatformIO(input) => {
ParseTypes::parse_types(input, parser).map(HdlLetKind::AddPlatformIO)
}
HdlLetKind::Incomplete(input) => {
ParseTypes::parse_types(input, parser).map(HdlLetKind::Incomplete)
}
@ -861,6 +910,23 @@ impl HdlLetKindParse for HdlLetKind<Type> {
ModuleIOKind::Output(output),
)
.map(Self::IO),
LetFnKind::AddPlatformIO((add_platform_io,)) => {
if let Some(parsed_ty) = parsed_ty {
return Err(Error::new_spanned(
parsed_ty.1,
"type annotation not allowed for instance",
));
}
let (m, dot_token) = unwrap_m_dot(m_dot, kind)?;
let paren_contents;
Ok(Self::AddPlatformIO(HdlLetKindAddPlatformIO {
m,
dot_token,
add_platform_io,
paren: parenthesized!(paren_contents in input),
platform_io_builder: paren_contents.call(parse_single_fn_arg)?,
}))
}
LetFnKind::Instance((instance,)) => {
if let Some(parsed_ty) = parsed_ty {
return Err(Error::new_spanned(
@ -936,6 +1002,7 @@ impl HdlLetKindToTokens for HdlLetKind {
fn ty_to_tokens(&self, tokens: &mut TokenStream) {
match self {
HdlLetKind::IO(v) => v.ty_to_tokens(tokens),
HdlLetKind::AddPlatformIO(v) => v.ty_to_tokens(tokens),
HdlLetKind::Incomplete(v) => v.ty_to_tokens(tokens),
HdlLetKind::Instance(v) => v.ty_to_tokens(tokens),
HdlLetKind::RegBuilder(v) => v.ty_to_tokens(tokens),
@ -947,6 +1014,7 @@ impl HdlLetKindToTokens for HdlLetKind {
fn expr_to_tokens(&self, tokens: &mut TokenStream) {
match self {
HdlLetKind::IO(v) => v.expr_to_tokens(tokens),
HdlLetKind::AddPlatformIO(v) => v.expr_to_tokens(tokens),
HdlLetKind::Incomplete(v) => v.expr_to_tokens(tokens),
HdlLetKind::Instance(v) => v.expr_to_tokens(tokens),
HdlLetKind::RegBuilder(v) => v.expr_to_tokens(tokens),
@ -1149,7 +1217,7 @@ impl<T: ToString> ToTokens for ImplicitName<T> {
struct Visitor<'a> {
module_kind: Option<ModuleKind>,
errors: Errors,
io: Vec<ModuleIO>,
io: ModuleIOOrAddPlatformIO,
block_depth: usize,
parsed_generics: &'a ParsedGenerics,
}
@ -1289,7 +1357,81 @@ impl Visitor<'_> {
}),
semi_token: hdl_let.semi_token,
};
self.io.push(hdl_let);
match &mut self.io {
ModuleIOOrAddPlatformIO::ModuleIO(io) => io.push(hdl_let),
ModuleIOOrAddPlatformIO::AddPlatformIO => {
self.errors.error(
kind,
"can't have other inputs/outputs in a module using m.add_platform_io()",
);
}
}
let_stmt
}
fn process_hdl_let_add_platform_io(
&mut self,
hdl_let: HdlLet<HdlLetKindAddPlatformIO>,
) -> Local {
let HdlLet {
mut attrs,
hdl_attr: _,
let_token,
mut_token,
ref name,
eq_token,
kind:
HdlLetKindAddPlatformIO {
m,
dot_token,
add_platform_io,
paren,
platform_io_builder,
},
semi_token,
} = hdl_let;
let mut expr = quote! {#m #dot_token #add_platform_io};
paren.surround(&mut expr, |expr| {
let name_str = ImplicitName {
name,
span: name.span(),
};
quote_spanned! {name.span()=>
#name_str, #platform_io_builder
}
.to_tokens(expr);
});
self.require_module(add_platform_io);
attrs.push(parse_quote_spanned! {let_token.span=>
#[allow(unused_variables)]
});
let let_stmt = Local {
attrs,
let_token,
pat: parse_quote! { #mut_token #name },
init: Some(LocalInit {
eq_token,
expr: parse_quote! { #expr },
diverge: None,
}),
semi_token,
};
match &mut self.io {
ModuleIOOrAddPlatformIO::ModuleIO(io) => {
for io in io {
self.errors.error(
io.kind.kind,
"can't have other inputs/outputs in a module using m.add_platform_io()",
);
}
}
ModuleIOOrAddPlatformIO::AddPlatformIO => {
self.errors.error(
add_platform_io,
"can't use m.add_platform_io() more than once in a single module",
);
}
}
self.io = ModuleIOOrAddPlatformIO::AddPlatformIO;
let_stmt
}
fn process_hdl_let_instance(&mut self, hdl_let: HdlLet<HdlLetKindInstance>) -> Local {
@ -1510,6 +1652,7 @@ impl Visitor<'_> {
}
the_match! {
IO => process_hdl_let_io,
AddPlatformIO => process_hdl_let_add_platform_io,
Incomplete => process_hdl_let_incomplete,
Instance => process_hdl_let_instance,
RegBuilder => process_hdl_let_reg_builder,
@ -1753,15 +1896,20 @@ impl Fold for Visitor<'_> {
}
}
pub(crate) enum ModuleIOOrAddPlatformIO {
ModuleIO(Vec<ModuleIO>),
AddPlatformIO,
}
pub(crate) fn transform_body(
module_kind: Option<ModuleKind>,
mut body: Box<Block>,
parsed_generics: &ParsedGenerics,
) -> syn::Result<(Box<Block>, Vec<ModuleIO>)> {
) -> syn::Result<(Box<Block>, ModuleIOOrAddPlatformIO)> {
let mut visitor = Visitor {
module_kind,
errors: Errors::new(),
io: vec![],
io: ModuleIOOrAddPlatformIO::ModuleIO(vec![]),
block_depth: 0,
parsed_generics,
};

View file

@ -26,6 +26,7 @@ hashbrown.workspace = true
jobslot.workspace = true
num-bigint.workspace = true
num-traits.workspace = true
ordered-float.workspace = true
petgraph.workspace = true
serde_json.workspace = true
serde.workspace = true

View file

@ -1,55 +1,64 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use fayalite::{
build::{ToArgs, WriteArgs},
prelude::*,
};
use fayalite::prelude::*;
#[hdl_module]
fn blinky(clock_frequency: u64) {
#[hdl]
let clk: Clock = m.input();
#[hdl]
let rst: SyncReset = m.input();
fn blinky(platform_io_builder: PlatformIOBuilder<'_>) {
let clk_input =
platform_io_builder.peripherals_with_type::<peripherals::ClockInput>()[0].use_peripheral();
let rst = platform_io_builder.peripherals_with_type::<Reset>()[0].use_peripheral();
let cd = #[hdl]
ClockDomain {
clk,
rst: rst.to_reset(),
clk: clk_input.clk,
rst,
};
let max_value = clock_frequency / 2 - 1;
let max_value = (Expr::ty(clk_input).frequency() / 2.0).round_ties_even() as u64 - 1;
let int_ty = UInt::range_inclusive(0..=max_value);
#[hdl]
let counter_reg: UInt = reg_builder().clock_domain(cd).reset(0u8.cast_to(int_ty));
#[hdl]
let output_reg: Bool = reg_builder().clock_domain(cd).reset(false);
#[hdl]
let rgb_output_reg = reg_builder().clock_domain(cd).reset(
#[hdl]
peripherals::RgbLed {
r: false,
g: false,
b: false,
},
);
#[hdl]
if counter_reg.cmp_eq(max_value) {
connect_any(counter_reg, 0u8);
connect(output_reg, !output_reg);
connect(rgb_output_reg.r, !rgb_output_reg.r);
#[hdl]
if rgb_output_reg.r {
connect(rgb_output_reg.g, !rgb_output_reg.g);
#[hdl]
if rgb_output_reg.g {
connect(rgb_output_reg.b, !rgb_output_reg.b);
}
}
} else {
connect_any(counter_reg, counter_reg + 1_hdl_u1);
}
#[hdl]
let led: Bool = m.output();
connect(led, output_reg);
}
#[derive(clap::Args, Clone, PartialEq, Eq, Hash, Debug)]
struct ExtraArgs {
/// clock frequency in hertz
#[arg(long, default_value = "1000000", value_parser = clap::value_parser!(u64).range(2..))]
clock_frequency: u64,
}
impl ToArgs for ExtraArgs {
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
let Self { clock_frequency } = self;
args.write_arg(format_args!("--clock-frequency={clock_frequency}"));
for led in platform_io_builder.peripherals_with_type::<peripherals::Led>() {
if let Ok(led) = led.try_use_peripheral() {
connect(led.on, output_reg);
}
}
for rgb_led in platform_io_builder.peripherals_with_type::<peripherals::RgbLed>() {
if let Ok(rgb_led) = rgb_led.try_use_peripheral() {
connect(rgb_led, rgb_output_reg);
}
}
#[hdl]
let io = m.add_platform_io(platform_io_builder);
}
fn main() {
BuildCli::main(|_cli, ExtraArgs { clock_frequency }| {
Ok(JobParams::new(blinky(clock_frequency), "blinky"))
<BuildCli>::main("blinky", |_, platform, _| {
Ok(JobParams::new(platform.wrap_main_module(blinky)))
});
}

View file

@ -0,0 +1,169 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use clap::builder::TypedValueParser;
use fayalite::{
build::{ToArgs, WriteArgs},
prelude::*,
};
use ordered_float::NotNan;
#[hdl_module]
fn tx_only_uart(
platform_io_builder: PlatformIOBuilder<'_>,
divisor: f64,
message: impl AsRef<[u8]>,
) {
let message = message.as_ref();
let clk_input =
platform_io_builder.peripherals_with_type::<peripherals::ClockInput>()[0].use_peripheral();
let rst = platform_io_builder.peripherals_with_type::<Reset>()[0].use_peripheral();
let cd = #[hdl]
ClockDomain {
clk: clk_input.clk,
rst,
};
let numerator = 1u128 << 16;
let denominator = (divisor * numerator as f64).round() as u128;
#[hdl]
let remainder_reg: UInt<128> = reg_builder().clock_domain(cd).reset(0u128);
#[hdl]
let sum: UInt<128> = wire();
connect_any(sum, remainder_reg + numerator);
#[hdl]
let tick_reg = reg_builder().clock_domain(cd).reset(false);
connect(tick_reg, false);
#[hdl]
let next_remainder: UInt<128> = wire();
connect(remainder_reg, next_remainder);
#[hdl]
if sum.cmp_ge(denominator) {
connect_any(next_remainder, sum - denominator);
connect(tick_reg, true);
} else {
connect(next_remainder, sum);
}
#[hdl]
let uart_state_reg = reg_builder().clock_domain(cd).reset(0_hdl_u4);
#[hdl]
let next_uart_state: UInt<4> = wire();
connect_any(next_uart_state, uart_state_reg + 1u8);
#[hdl]
let mut message_mem = memory_with_init(message);
message_mem.read_latency(4);
#[hdl]
let startup_reg = reg_builder().clock_domain(cd).reset(0u128);
connect_any(startup_reg, (startup_reg << 1) | 1u8);
let message_read = message_mem.new_read_port();
connect(message_read.clk, cd.clk);
connect(message_read.en, true);
#[hdl]
let addr_reg: UInt<32> = reg_builder().clock_domain(cd).reset(0u32);
connect_any(message_read.addr, addr_reg);
#[hdl]
let next_addr: UInt<32> = wire();
connect(next_addr, addr_reg);
#[hdl]
let tx = reg_builder().clock_domain(cd).reset(true);
#[hdl]
if !startup_reg[message_mem.get_read_latency()] {
connect(next_uart_state, 0_hdl_u4);
connect(tx, true);
} else if uart_state_reg.cmp_eq(0_hdl_u4) {
connect(tx, false); // start bit
} else if uart_state_reg.cmp_le(8_hdl_u4) {
connect(
tx,
(message_read.data >> (uart_state_reg - 1_hdl_u4))[0].cast_to_static(),
); // data bit
} else {
connect(tx, true); // stop bit
connect(next_uart_state, 0_hdl_u4);
let next_addr_val = addr_reg + 1u8;
#[hdl]
if next_addr_val.cmp_lt(message.len()) {
connect_any(next_addr, next_addr_val);
} else {
connect(next_addr, 0u32);
}
}
#[hdl]
if tick_reg {
connect(uart_state_reg, next_uart_state);
connect(addr_reg, next_addr);
}
for uart in platform_io_builder.peripherals_with_type::<peripherals::Uart>() {
connect(uart.use_peripheral().tx, tx);
}
#[hdl]
let io = m.add_platform_io(platform_io_builder);
}
fn parse_baud_rate(
v: impl AsRef<str>,
) -> Result<NotNan<f64>, Box<dyn std::error::Error + Send + Sync>> {
let retval: NotNan<f64> = v
.as_ref()
.parse()
.map_err(|_| "invalid baud rate, must be a finite positive floating-point value")?;
if *retval > 0.0 && retval.is_finite() {
Ok(retval)
} else {
Err("baud rate must be finite and positive".into())
}
}
#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)]
pub struct ExtraArgs {
#[arg(long, value_parser = clap::builder::StringValueParser::new().try_map(parse_baud_rate), default_value = "115200")]
pub baud_rate: NotNan<f64>,
#[arg(long, default_value = "Hello World from Fayalite!!!\r\n", value_parser = clap::builder::NonEmptyStringValueParser::new())]
pub message: String,
}
impl ToArgs for ExtraArgs {
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
let Self { baud_rate, message } = self;
args.write_display_arg(format_args!("--baud-rate={baud_rate}"));
args.write_long_option_eq("message", message);
}
}
fn main() {
type Cli = BuildCli<ExtraArgs>;
Cli::main(
"tx_only_uart",
|_, platform, ExtraArgs { baud_rate, message }| {
Ok(JobParams::new(platform.try_wrap_main_module(|io| {
let clk = io.peripherals_with_type::<peripherals::ClockInput>()[0].ty();
let divisor = clk.frequency() / *baud_rate;
let baud_rate_error = |msg| {
<Cli as clap::CommandFactory>::command()
.error(clap::error::ErrorKind::ValueValidation, msg)
};
const HUGE_DIVISOR: f64 = u64::MAX as f64;
match divisor {
divisor if !divisor.is_finite() => {
return Err(baud_rate_error("bad baud rate"));
}
HUGE_DIVISOR.. => return Err(baud_rate_error("baud rate is too small")),
4.0.. => {}
_ => return Err(baud_rate_error("baud rate is too large")),
}
Ok(tx_only_uart(io, divisor, message))
})?))
},
);
}

View file

@ -145,52 +145,73 @@ pub struct DocStringAnnotation {
macro_rules! make_annotation_enum {
(
#[$non_exhaustive:ident]
$(#[$meta:meta])*
$vis:vis enum $Annotation:ident {
$vis:vis enum $AnnotationEnum:ident {
$($Variant:ident($T:ty),)*
}
) => {
crate::annotations::make_annotation_enum!(@require_non_exhaustive $non_exhaustive);
#[$non_exhaustive]
$(#[$meta])*
$vis enum $Annotation {
#[derive(Clone, PartialEq, Eq, Hash)]
$vis enum $AnnotationEnum {
$($Variant($T),)*
}
$(impl IntoAnnotations for $T {
type IntoAnnotations = [$Annotation; 1];
fn into_annotations(self) -> Self::IntoAnnotations {
[$Annotation::$Variant(self)]
impl std::fmt::Debug for $AnnotationEnum {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
$(Self::$Variant(v) => v.fmt(f),)*
}
}
}
impl IntoAnnotations for &'_ $T {
type IntoAnnotations = [$Annotation; 1];
fn into_annotations(self) -> Self::IntoAnnotations {
[$Annotation::$Variant(*self)]
$(impl From<$T> for crate::annotations::Annotation {
fn from(v: $T) -> Self {
$AnnotationEnum::$Variant(v).into()
}
}
impl IntoAnnotations for &'_ mut $T {
type IntoAnnotations = [$Annotation; 1];
impl crate::annotations::IntoAnnotations for $T {
type IntoAnnotations = [crate::annotations::Annotation; 1];
fn into_annotations(self) -> Self::IntoAnnotations {
[$Annotation::$Variant(*self)]
[self.into()]
}
}
impl IntoAnnotations for Box<$T> {
type IntoAnnotations = [$Annotation; 1];
impl crate::annotations::IntoAnnotations for &'_ $T {
type IntoAnnotations = [crate::annotations::Annotation; 1];
fn into_annotations(self) -> Self::IntoAnnotations {
[$Annotation::$Variant(*self)]
[crate::annotations::Annotation::from(self.clone())]
}
}
impl crate::annotations::IntoAnnotations for &'_ mut $T {
type IntoAnnotations = [crate::annotations::Annotation; 1];
fn into_annotations(self) -> Self::IntoAnnotations {
[crate::annotations::Annotation::from(self.clone())]
}
}
impl crate::annotations::IntoAnnotations for Box<$T> {
type IntoAnnotations = [crate::annotations::Annotation; 1];
fn into_annotations(self) -> Self::IntoAnnotations {
[crate::annotations::Annotation::from(*self)]
}
})*
};
(@require_non_exhaustive non_exhaustive) => {};
}
pub(crate) use make_annotation_enum;
make_annotation_enum! {
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
#[non_exhaustive]
pub enum Annotation {
DontTouch(DontTouchAnnotation),
@ -199,8 +220,7 @@ make_annotation_enum! {
BlackBoxPath(BlackBoxPathAnnotation),
DocString(DocStringAnnotation),
CustomFirrtl(CustomFirrtlAnnotation),
XdcLocation(crate::build::vendor::xilinx::XdcLocationAnnotation),
XdcIOStandard(crate::build::vendor::xilinx::XdcIOStandardAnnotation),
Xilinx(crate::vendor::xilinx::XilinxAnnotation),
}
}

File diff suppressed because it is too large Load diff

View file

@ -3,9 +3,9 @@
use crate::{
build::{
ArgsWriter, CommandParams, GetBaseJob, JobAndDependencies, JobAndKind,
JobArgsAndDependencies, JobDependencies, JobItem, JobItemName, JobKind, JobKindAndArgs,
JobParams, ToArgs, WriteArgs, intern_known_utf8_path_buf,
ArgsWriter, CommandParams, GlobalParams, JobAndDependencies, JobAndKind,
JobArgsAndDependencies, JobDependencies, JobDependenciesHasBase, JobItem, JobItemName,
JobKind, JobKindAndArgs, JobParams, ToArgs, WriteArgs,
},
intern::{Intern, Interned},
util::{job_server::AcquiredJob, streaming_read_utf8::streaming_read_utf8},
@ -55,6 +55,25 @@ impl MaybeUtf8 {
MaybeUtf8::Binary(v) => v,
}
}
pub fn as_os_str(&self) -> &OsStr {
#![allow(unreachable_code)]
#[cfg(unix)]
{
return std::os::unix::ffi::OsStrExt::from_bytes(self.as_bytes());
}
#[cfg(target_os = "wasi")]
{
return std::os::wasi::ffi::OsStrExt::from_bytes(self.as_bytes());
}
// implementing WTF-8 is too much of a pain so don't have a special case for windows
if let Ok(s) = str::from_utf8(self.as_bytes()) {
return OsStr::new(s);
}
panic!("invalid UTF-8 conversion to OsStr is not implemented on this platform");
}
pub fn as_path(&self) -> &Path {
Path::new(self.as_os_str())
}
}
#[derive(Serialize, Deserialize)]
@ -107,31 +126,80 @@ impl From<String> for MaybeUtf8 {
}
}
impl From<PathBuf> for MaybeUtf8 {
fn from(value: PathBuf) -> Self {
Self::from(value.into_os_string().into_encoded_bytes())
}
}
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Serialize, Deserialize)]
#[serde(rename = "File")]
pub struct ExternalJobCacheV2File<'a> {
pub name: MaybeUtf8,
pub contents: Cow<'a, MaybeUtf8>,
}
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug)]
pub struct ExternalJobCacheV2Files(pub BTreeMap<PathBuf, MaybeUtf8>);
impl Serialize for ExternalJobCacheV2Files {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.collect_seq(
self.0
.iter()
.map(|(name, contents)| ExternalJobCacheV2File {
name: name.clone().into(),
contents: Cow::Borrowed(contents),
}),
)
}
}
impl<'de> Deserialize<'de> for ExternalJobCacheV2Files {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Ok(Self(
Vec::deserialize(deserializer)?
.into_iter()
.map(|ExternalJobCacheV2File { name, contents }| {
(name.as_path().to_path_buf(), contents.into_owned())
})
.collect(),
))
}
}
#[derive(Clone, PartialEq, Eq, Debug, Serialize, Deserialize)]
#[serde(rename = "ExternalJobCache")]
pub struct ExternalJobCacheV2 {
pub version: ExternalJobCacheVersion,
pub inputs_hash: blake3::Hash,
pub stdout_stderr: String,
pub result: Result<BTreeMap<String, MaybeUtf8>, String>,
pub result: Result<ExternalJobCacheV2Files, String>,
}
impl ExternalJobCacheV2 {
fn read_from_file(cache_json_path: Interned<str>) -> eyre::Result<Self> {
fn read_from_file(cache_json_path: Interned<Path>) -> eyre::Result<Self> {
let cache_str = std::fs::read_to_string(&*cache_json_path)
.wrap_err_with(|| format!("can't read {cache_json_path}"))?;
serde_json::from_str(&cache_str).wrap_err_with(|| format!("can't decode {cache_json_path}"))
.wrap_err_with(|| format!("can't read {cache_json_path:?}"))?;
serde_json::from_str(&cache_str)
.wrap_err_with(|| format!("can't decode {cache_json_path:?}"))
}
fn write_to_file(&self, cache_json_path: Interned<str>) -> eyre::Result<()> {
fn write_to_file(&self, cache_json_path: Interned<Path>) -> eyre::Result<()> {
let cache_str = serde_json::to_string_pretty(&self).expect("serialization can't fail");
std::fs::write(&*cache_json_path, cache_str)
.wrap_err_with(|| format!("can't write {cache_json_path}"))
.wrap_err_with(|| format!("can't write {cache_json_path:?}"))
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub struct ExternalJobCaching {
cache_json_path: Interned<str>,
cache_json_path: Interned<Path>,
run_even_if_cached: bool,
}
@ -148,8 +216,8 @@ impl JobCacheHasher {
self.hash_size(bytes.len());
self.0.update(bytes);
}
fn hash_sized_str(&mut self, s: &str) {
self.hash_sized_bytes(s.as_bytes());
fn hash_sized_os_str(&mut self, s: &OsStr) {
self.hash_sized_bytes(s.as_encoded_bytes());
}
fn hash_iter<F: FnMut(&mut Self, I::Item), I: IntoIterator<IntoIter: ExactSizeIterator>>(
&mut self,
@ -193,8 +261,8 @@ fn write_file_atomically_no_clobber<F: FnOnce() -> C, C: AsRef<[u8]>>(
}
impl ExternalJobCaching {
pub fn get_cache_dir_from_output_dir(output_dir: &str) -> PathBuf {
Path::join(output_dir.as_ref(), ".fayalite-job-cache")
pub fn get_cache_dir_from_output_dir(output_dir: impl AsRef<Path>) -> PathBuf {
output_dir.as_ref().join(".fayalite-job-cache")
}
pub fn make_cache_dir(
cache_dir: impl AsRef<Path>,
@ -218,19 +286,18 @@ impl ExternalJobCaching {
})
}
pub fn new(
output_dir: &str,
output_dir: impl AsRef<Path>,
application_name: &str,
json_file_stem: &str,
json_file_stem: impl AsRef<OsStr>,
run_even_if_cached: bool,
) -> std::io::Result<Self> {
let cache_dir = Self::get_cache_dir_from_output_dir(output_dir);
Self::make_cache_dir(&cache_dir, application_name)?;
let mut cache_json_path = cache_dir;
cache_json_path.push(json_file_stem);
cache_json_path.push(json_file_stem.as_ref());
cache_json_path.set_extension("json");
let cache_json_path = intern_known_utf8_path_buf(cache_json_path);
Ok(Self {
cache_json_path,
cache_json_path: Path::intern_owned(cache_json_path),
run_even_if_cached,
})
}
@ -249,7 +316,7 @@ impl ExternalJobCaching {
fn run_from_cache(
self,
inputs_hash: blake3::Hash,
output_file_paths: impl IntoIterator<Item = Interned<str>>,
output_file_paths: impl IntoIterator<Item = Interned<Path>>,
) -> Result<Result<(), String>, ()> {
if self.run_even_if_cached {
return Err(());
@ -269,7 +336,7 @@ impl ExternalJobCaching {
match result {
Ok(outputs) => {
for output_file_path in output_file_paths {
let Some(output_data) = outputs.get(&*output_file_path) else {
let Some(output_data) = outputs.0.get(&*output_file_path) else {
if let Ok(true) = std::fs::exists(&*output_file_path) {
// assume the existing file is the correct one
continue;
@ -290,7 +357,7 @@ impl ExternalJobCaching {
}
}
fn make_command(
command_line: Interned<[Interned<str>]>,
command_line: Interned<[Interned<OsStr>]>,
) -> eyre::Result<std::process::Command> {
ensure!(!command_line.is_empty(), "command line must not be empty");
let mut cmd = std::process::Command::new(&*command_line[0]);
@ -300,26 +367,26 @@ impl ExternalJobCaching {
}
pub fn run<F: FnOnce(std::process::Command) -> eyre::Result<()>>(
self,
command_line: Interned<[Interned<str>]>,
input_file_paths: impl IntoIterator<Item = Interned<str>>,
output_file_paths: impl IntoIterator<Item = Interned<str>> + Clone,
command_line: Interned<[Interned<OsStr>]>,
input_file_paths: impl IntoIterator<Item = Interned<Path>>,
output_file_paths: impl IntoIterator<Item = Interned<Path>> + Clone,
run_fn: F,
) -> eyre::Result<()> {
let mut hasher = JobCacheHasher::default();
hasher.hash_iter(command_line.iter(), |hasher, arg| {
hasher.hash_sized_str(arg)
hasher.hash_sized_os_str(arg)
});
let mut input_file_paths =
Vec::<&str>::from_iter(input_file_paths.into_iter().map(Interned::into_inner));
Vec::<&Path>::from_iter(input_file_paths.into_iter().map(Interned::into_inner));
input_file_paths.sort_unstable();
input_file_paths.dedup();
hasher.try_hash_iter(
&input_file_paths,
|hasher, input_file_path| -> eyre::Result<()> {
hasher.hash_sized_str(input_file_path);
hasher.hash_sized_os_str(input_file_path.as_ref());
hasher.hash_sized_bytes(
&std::fs::read(input_file_path).wrap_err_with(|| {
format!("can't read job input file: {input_file_path}")
format!("can't read job input file: {input_file_path:?}")
})?,
);
Ok(())
@ -338,7 +405,7 @@ impl ExternalJobCaching {
let mut stdout_stderr = String::new();
let result = std::thread::scope(|scope| {
std::thread::Builder::new()
.name(format!("stdout:{}", command_line[0]))
.name(format!("stdout:{}", command_line[0].display()))
.spawn_scoped(scope, || {
let _ = streaming_read_utf8(std::io::BufReader::new(pipe_reader), |s| {
stdout_stderr.push_str(s);
@ -358,17 +425,19 @@ impl ExternalJobCaching {
inputs_hash,
stdout_stderr,
result: match &result {
Ok(()) => Ok(Result::from_iter(output_file_paths.into_iter().map(
|output_file_path: Interned<str>| -> eyre::Result<_> {
let output_file_path = &*output_file_path;
Ok((
String::from(output_file_path),
MaybeUtf8::from(std::fs::read(output_file_path).wrap_err_with(
|| format!("can't read job output file: {output_file_path}"),
)?),
))
},
))?),
Ok(()) => Ok(ExternalJobCacheV2Files(Result::from_iter(
output_file_paths.into_iter().map(
|output_file_path: Interned<Path>| -> eyre::Result<_> {
let output_file_path = &*output_file_path;
Ok((
PathBuf::from(output_file_path),
MaybeUtf8::from(std::fs::read(output_file_path).wrap_err_with(
|| format!("can't read job output file: {output_file_path:?}"),
)?),
))
},
),
)?)),
Err(e) => Err(format!("{e:#}")),
},
}
@ -377,9 +446,9 @@ impl ExternalJobCaching {
}
pub fn run_maybe_cached<F: FnOnce(std::process::Command) -> eyre::Result<()>>(
this: Option<Self>,
command_line: Interned<[Interned<str>]>,
input_file_paths: impl IntoIterator<Item = Interned<str>>,
output_file_paths: impl IntoIterator<Item = Interned<str>> + Clone,
command_line: Interned<[Interned<OsStr>]>,
input_file_paths: impl IntoIterator<Item = Interned<Path>>,
output_file_paths: impl IntoIterator<Item = Interned<Path>> + Clone,
run_fn: F,
) -> eyre::Result<()> {
match this {
@ -437,31 +506,22 @@ fn parse_which_result(
which_result: which::Result<PathBuf>,
program_name: impl Into<OsString>,
program_path_arg_name: impl FnOnce() -> String,
) -> Result<Interned<str>, ResolveProgramPathError> {
) -> Result<Interned<Path>, ResolveProgramPathError> {
let which_result = match which_result {
Ok(v) => v,
Err(e) => {
Err(inner) => {
return Err(ResolveProgramPathError {
inner: ResolveProgramPathErrorInner::Which(e),
inner,
program_name: program_name.into(),
program_path_arg_name: program_path_arg_name(),
});
}
};
Ok(str::intern_owned(
which_result
.into_os_string()
.into_string()
.map_err(|_| ResolveProgramPathError {
inner: ResolveProgramPathErrorInner::NotValidUtf8,
program_name: program_name.into(),
program_path_arg_name: program_path_arg_name(),
})?,
))
Ok(which_result.intern_deref())
}
impl clap::builder::TypedValueParser for ExternalProgramPathValueParser {
type Value = Interned<str>;
type Value = Interned<Path>;
fn parse_ref(
&self,
@ -495,34 +555,10 @@ pub struct ExternalCommandArgs<T: ExternalCommand> {
pub additional_args: T::AdditionalArgs,
}
#[derive(Clone)]
enum ResolveProgramPathErrorInner {
Which(which::Error),
NotValidUtf8,
}
impl fmt::Debug for ResolveProgramPathErrorInner {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Which(v) => v.fmt(f),
Self::NotValidUtf8 => f.write_str("NotValidUtf8"),
}
}
}
impl fmt::Display for ResolveProgramPathErrorInner {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Which(v) => v.fmt(f),
Self::NotValidUtf8 => f.write_str("path is not valid UTF-8"),
}
}
}
#[derive(Clone, Debug)]
pub struct ResolveProgramPathError {
inner: ResolveProgramPathErrorInner,
program_name: std::ffi::OsString,
inner: which::Error,
program_name: OsString,
program_path_arg_name: String,
}
@ -546,7 +582,7 @@ pub fn resolve_program_path(
program_name: Option<&OsStr>,
default_program_name: impl AsRef<OsStr>,
program_path_env_var_name: Option<&OsStr>,
) -> Result<Interned<str>, ResolveProgramPathError> {
) -> Result<Interned<Path>, ResolveProgramPathError> {
let default_program_name = default_program_name.as_ref();
let owned_program_name;
let program_name = if let Some(program_name) = program_name {
@ -564,7 +600,7 @@ pub fn resolve_program_path(
impl<T: ExternalCommand> ExternalCommandArgs<T> {
pub fn with_resolved_program_path(
program_path: Interned<str>,
program_path: Interned<Path>,
additional_args: T::AdditionalArgs,
) -> Self {
Self::new(
@ -602,7 +638,7 @@ impl<T: ExternalCommand> ToArgs for ExternalCommandArgs<T> {
} = *self;
program_path.to_args(args);
if run_even_if_cached {
args.write_arg(format_args!("--{}", T::run_even_if_cached_arg_name()));
args.write_display_arg(format_args!("--{}", T::run_even_if_cached_arg_name()));
}
additional_args.to_args(args);
}
@ -613,13 +649,13 @@ struct ExternalCommandJobParams {
command_params: CommandParams,
inputs: Interned<[JobItemName]>,
outputs: Interned<[JobItemName]>,
output_paths: Interned<[Interned<str>]>,
output_paths: Interned<[Interned<Path>]>,
}
impl ExternalCommandJobParams {
fn new<T: ExternalCommand>(job: &ExternalCommandJob<T>) -> Self {
let output_paths = T::output_paths(job);
let mut command_line = ArgsWriter(vec![job.program_path]);
let mut command_line = ArgsWriter(vec![job.program_path.as_interned_os_str()]);
T::command_line_args(job, &mut command_line);
Self {
command_params: CommandParams {
@ -639,8 +675,8 @@ impl ExternalCommandJobParams {
#[derive(Deserialize, Serialize)]
pub struct ExternalCommandJob<T: ExternalCommand> {
additional_job_data: T::AdditionalJobData,
program_path: Interned<str>,
output_dir: Interned<str>,
program_path: Interned<Path>,
output_dir: Interned<Path>,
run_even_if_cached: bool,
#[serde(skip)]
params_cache: OnceLock<ExternalCommandJobParams>,
@ -722,10 +758,10 @@ impl<T: ExternalCommand> ExternalCommandJob<T> {
pub fn additional_job_data(&self) -> &T::AdditionalJobData {
&self.additional_job_data
}
pub fn program_path(&self) -> Interned<str> {
pub fn program_path(&self) -> Interned<Path> {
self.program_path
}
pub fn output_dir(&self) -> Interned<str> {
pub fn output_dir(&self) -> Interned<Path> {
self.output_dir
}
pub fn run_even_if_cached(&self) -> bool {
@ -741,7 +777,7 @@ impl<T: ExternalCommand> ExternalCommandJob<T> {
pub fn inputs(&self) -> Interned<[JobItemName]> {
self.params().inputs
}
pub fn output_paths(&self) -> Interned<[Interned<str>]> {
pub fn output_paths(&self) -> Interned<[Interned<Path>]> {
self.params().output_paths
}
pub fn outputs(&self) -> Interned<[JobItemName]> {
@ -751,12 +787,12 @@ impl<T: ExternalCommand> ExternalCommandJob<T> {
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct ExternalProgramPath<T: ExternalProgramTrait> {
program_path: Interned<str>,
program_path: Interned<Path>,
_phantom: PhantomData<T>,
}
impl<T: ExternalProgramTrait> ExternalProgramPath<T> {
pub fn with_resolved_program_path(program_path: Interned<str>) -> Self {
pub fn with_resolved_program_path(program_path: Interned<Path>) -> Self {
Self {
program_path,
_phantom: PhantomData,
@ -780,7 +816,7 @@ impl<T: ExternalProgramTrait> ExternalProgramPath<T> {
_phantom: PhantomData,
})
}
pub fn program_path(&self) -> Interned<str> {
pub fn program_path(&self) -> Interned<Path> {
self.program_path
}
}
@ -874,8 +910,8 @@ impl<T: ExternalProgramTrait> ToArgs for ExternalProgramPath<T> {
program_path,
_phantom: _,
} = self;
if args.get_long_option_eq(program_path_arg_name) != Some(&**program_path) {
args.write_arg(format_args!("--{program_path_arg_name}={program_path}"));
if args.get_long_option_eq(program_path_arg_name) != Some(program_path.as_os_str()) {
args.write_long_option_eq(program_path_arg_name, program_path);
}
}
}
@ -953,20 +989,22 @@ pub trait ExternalCommand: 'static + Send + Sync + Hash + Eq + fmt::Debug + Size
+ fmt::Debug
+ Serialize
+ DeserializeOwned;
type Dependencies: JobDependencies<JobsAndKinds: GetBaseJob>;
type BaseJobPosition;
type Dependencies: JobDependenciesHasBase;
type ExternalProgram: ExternalProgramTrait;
fn dependencies() -> Self::Dependencies;
fn args_to_jobs(
args: JobArgsAndDependencies<ExternalCommandJobKind<Self>>,
params: &JobParams,
global_params: &GlobalParams,
) -> eyre::Result<(
Self::AdditionalJobData,
<Self::Dependencies as JobDependencies>::JobsAndKinds,
)>;
fn inputs(job: &ExternalCommandJob<Self>) -> Interned<[JobItemName]>;
fn output_paths(job: &ExternalCommandJob<Self>) -> Interned<[Interned<str>]>;
fn output_paths(job: &ExternalCommandJob<Self>) -> Interned<[Interned<Path>]>;
fn command_line_args<W: ?Sized + WriteArgs>(job: &ExternalCommandJob<Self>, args: &mut W);
fn current_dir(job: &ExternalCommandJob<Self>) -> Option<Interned<str>>;
fn current_dir(job: &ExternalCommandJob<Self>) -> Option<Interned<Path>>;
fn job_kind_name() -> Interned<str>;
fn args_group_id() -> clap::Id {
Interned::into_inner(Self::job_kind_name()).into()
@ -991,6 +1029,7 @@ impl<T: ExternalCommand> JobKind for ExternalCommandJobKind<T> {
fn args_to_jobs(
args: JobArgsAndDependencies<Self>,
params: &JobParams,
global_params: &GlobalParams,
) -> eyre::Result<JobAndDependencies<Self>> {
let JobKindAndArgs {
kind,
@ -1005,8 +1044,8 @@ impl<T: ExternalCommand> JobKind for ExternalCommandJobKind<T> {
additional_args: _,
},
} = args.args;
let (additional_job_data, dependencies) = T::args_to_jobs(args, params)?;
let base_job = dependencies.base_job();
let (additional_job_data, dependencies) = T::args_to_jobs(args, params, global_params)?;
let base_job = T::Dependencies::base_job(&dependencies);
let job = ExternalCommandJob {
additional_job_data,
program_path,
@ -1041,7 +1080,8 @@ impl<T: ExternalCommand> JobKind for ExternalCommandJobKind<T> {
self,
job: &Self::Job,
inputs: &[JobItem],
params: &JobParams,
_params: &JobParams,
global_params: &GlobalParams,
acquired_job: &mut AcquiredJob,
) -> eyre::Result<Vec<JobItem>> {
assert!(
@ -1056,7 +1096,7 @@ impl<T: ExternalCommand> JobKind for ExternalCommandJobKind<T> {
} = job.command_params();
ExternalJobCaching::new(
&job.output_dir,
&params.application_name(),
&global_params.application_name(),
&T::job_kind_name(),
job.run_even_if_cached,
)?

View file

@ -3,17 +3,17 @@
use crate::{
build::{
BaseJob, BaseJobKind, CommandParams, DynJobKind, JobAndDependencies,
BaseJob, BaseJobKind, CommandParams, DynJobKind, GlobalParams, JobAndDependencies,
JobArgsAndDependencies, JobItem, JobItemName, JobKind, JobKindAndDependencies, JobParams,
ToArgs, WriteArgs,
},
firrtl::{ExportOptions, FileBackend},
intern::{Intern, Interned},
intern::{Intern, InternSlice, Interned},
util::job_server::AcquiredJob,
};
use clap::Args;
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use std::path::{Path, PathBuf};
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Debug)]
pub struct FirrtlJobKind;
@ -43,11 +43,11 @@ impl Firrtl {
fn make_firrtl_file_backend(&self) -> FileBackend {
FileBackend {
dir_path: PathBuf::from(&*self.base.output_dir()),
top_fir_file_stem: Some(String::from(&*self.base.file_stem())),
top_fir_file_stem: Some(self.base.file_stem().into()),
circuit_name: None,
}
}
pub fn firrtl_file(&self) -> Interned<str> {
pub fn firrtl_file(&self) -> Interned<Path> {
self.base.file_with_ext("fir")
}
}
@ -64,12 +64,14 @@ impl JobKind for FirrtlJobKind {
fn args_to_jobs(
args: JobArgsAndDependencies<Self>,
params: &JobParams,
global_params: &GlobalParams,
) -> eyre::Result<JobAndDependencies<Self>> {
args.args_to_jobs_simple(
params,
global_params,
|_kind, FirrtlArgs { export_options }, dependencies| {
Ok(Firrtl {
base: dependencies.job.job.clone(),
base: dependencies.get_job::<BaseJob, _>().clone(),
export_options,
})
},
@ -79,15 +81,15 @@ impl JobKind for FirrtlJobKind {
fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
[JobItemName::Path {
path: job.base.output_dir(),
}][..]
.intern()
}]
.intern_slice()
}
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
[JobItemName::Path {
path: job.firrtl_file(),
}][..]
.intern()
}]
.intern_slice()
}
fn name(self) -> Interned<str> {
@ -103,6 +105,7 @@ impl JobKind for FirrtlJobKind {
job: &Self::Job,
inputs: &[JobItem],
params: &JobParams,
_global_params: &GlobalParams,
_acquired_job: &mut AcquiredJob,
) -> eyre::Result<Vec<JobItem>> {
let [JobItem::Path { path: input_path }] = *inputs else {

View file

@ -3,23 +3,26 @@
use crate::{
build::{
CommandParams, DynJobKind, GetBaseJob, JobAndDependencies, JobArgsAndDependencies,
JobDependencies, JobItem, JobItemName, JobKind, JobKindAndDependencies, JobParams, ToArgs,
WriteArgs,
BaseJob, CommandParams, DynJobKind, GetJobPositionDependencies, GlobalParams,
JobAndDependencies, JobArgsAndDependencies, JobDependencies, JobItem, JobItemName, JobKind,
JobKindAndDependencies, JobParams, ToArgs, WriteArgs,
external::{
ExternalCommand, ExternalCommandJob, ExternalCommandJobKind, ExternalProgramTrait,
},
interned_known_utf8_method,
verilog::{VerilogDialect, VerilogJob, VerilogJobKind},
verilog::{UnadjustedVerilog, VerilogDialect, VerilogJob, VerilogJobKind},
},
intern::{Intern, Interned},
intern::{Intern, InternSlice, Interned},
module::NameId,
util::job_server::AcquiredJob,
};
use clap::{Args, ValueEnum};
use eyre::Context;
use serde::{Deserialize, Serialize};
use std::fmt;
use std::{
ffi::{OsStr, OsString},
fmt::{self, Write},
path::Path,
};
#[derive(ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash, Default, Deserialize, Serialize)]
#[non_exhaustive]
@ -52,7 +55,7 @@ impl fmt::Display for FormalMode {
#[non_exhaustive]
pub struct FormalArgs {
#[arg(long = "sby-extra-arg", value_name = "ARG")]
pub sby_extra_args: Vec<String>,
pub sby_extra_args: Vec<OsString>,
#[arg(long, default_value_t)]
pub formal_mode: FormalMode,
#[arg(long, default_value_t = Self::DEFAULT_DEPTH)]
@ -60,7 +63,7 @@ pub struct FormalArgs {
#[arg(long, default_value = Self::DEFAULT_SOLVER)]
pub formal_solver: String,
#[arg(long = "smtbmc-extra-arg", value_name = "ARG")]
pub smtbmc_extra_args: Vec<String>,
pub smtbmc_extra_args: Vec<OsString>,
}
impl FormalArgs {
@ -77,21 +80,17 @@ impl ToArgs for FormalArgs {
formal_solver,
smtbmc_extra_args,
} = self;
args.extend(
sby_extra_args
.iter()
.map(|v| format!("--sby-extra-arg={v}")),
);
args.extend([
for arg in sby_extra_args {
args.write_long_option_eq("sby-extra-arg", arg);
}
args.write_display_args([
format_args!("--formal-mode={formal_mode}"),
format_args!("--formal-depth={formal_depth}"),
format_args!("--formal-solver={formal_solver}"),
]);
args.extend(
smtbmc_extra_args
.iter()
.map(|v| format!("--smtbmc-extra-arg={v}")),
);
for arg in smtbmc_extra_args {
args.write_long_option_eq("smtbmc-extra-arg", arg);
}
}
}
@ -100,18 +99,18 @@ pub struct WriteSbyFileJobKind;
#[derive(Clone, Debug, PartialEq, Eq, Hash, Deserialize, Serialize)]
pub struct WriteSbyFileJob {
sby_extra_args: Interned<[Interned<str>]>,
sby_extra_args: Interned<[Interned<OsStr>]>,
formal_mode: FormalMode,
formal_depth: u64,
formal_solver: Interned<str>,
smtbmc_extra_args: Interned<[Interned<str>]>,
sby_file: Interned<str>,
output_dir: Interned<str>,
main_verilog_file: Interned<str>,
smtbmc_extra_args: Interned<[Interned<OsStr>]>,
sby_file: Interned<Path>,
output_dir: Interned<Path>,
main_verilog_file: Interned<Path>,
}
impl WriteSbyFileJob {
pub fn sby_extra_args(&self) -> Interned<[Interned<str>]> {
pub fn sby_extra_args(&self) -> Interned<[Interned<OsStr>]> {
self.sby_extra_args
}
pub fn formal_mode(&self) -> FormalMode {
@ -123,24 +122,24 @@ impl WriteSbyFileJob {
pub fn formal_solver(&self) -> Interned<str> {
self.formal_solver
}
pub fn smtbmc_extra_args(&self) -> Interned<[Interned<str>]> {
pub fn smtbmc_extra_args(&self) -> Interned<[Interned<OsStr>]> {
self.smtbmc_extra_args
}
pub fn sby_file(&self) -> Interned<str> {
pub fn sby_file(&self) -> Interned<Path> {
self.sby_file
}
pub fn output_dir(&self) -> Interned<str> {
pub fn output_dir(&self) -> Interned<Path> {
self.output_dir
}
pub fn main_verilog_file(&self) -> Interned<str> {
pub fn main_verilog_file(&self) -> Interned<Path> {
self.main_verilog_file
}
fn write_sby<W: ?Sized + fmt::Write>(
fn write_sby(
&self,
output: &mut W,
additional_files: &[Interned<str>],
output: &mut OsString,
additional_files: &[Interned<Path>],
main_module_name_id: NameId,
) -> Result<eyre::Result<()>, fmt::Error> {
) -> eyre::Result<()> {
let Self {
sby_extra_args: _,
formal_mode,
@ -160,23 +159,21 @@ impl WriteSbyFileJob {
\n\
[engines]\n\
smtbmc {formal_solver} -- --"
)?;
)
.expect("writing to OsString can't fail");
for i in smtbmc_extra_args {
output.write_str(" ")?;
output.write_str(i)?;
output.push(" ");
output.push(i);
}
output.write_str(
output.push(
"\n\
\n\
[script]\n",
)?;
let all_verilog_files =
match VerilogJob::all_verilog_files(*main_verilog_file, additional_files) {
Ok(v) => v,
Err(e) => return Ok(Err(e)),
};
for verilog_file in all_verilog_files {
writeln!(output, "read_verilog -sv -formal \"{verilog_file}\"")?;
);
for verilog_file in VerilogJob::all_verilog_files(*main_verilog_file, additional_files)? {
output.push("read_verilog -sv -formal \"");
output.push(verilog_file);
output.push("\"\n");
}
let circuit_name = crate::firrtl::get_circuit_name(main_module_name_id);
// workaround for wires disappearing -- set `keep` on all wires
@ -186,8 +183,9 @@ impl WriteSbyFileJob {
proc\n\
setattr -set keep 1 w:\\*\n\
prep",
)?;
Ok(Ok(()))
)
.expect("writing to OsString can't fail");
Ok(())
}
}
@ -203,6 +201,7 @@ impl JobKind for WriteSbyFileJobKind {
fn args_to_jobs(
mut args: JobArgsAndDependencies<Self>,
params: &JobParams,
global_params: &GlobalParams,
) -> eyre::Result<JobAndDependencies<Self>> {
args.dependencies
.dependencies
@ -211,7 +210,7 @@ impl JobKind for WriteSbyFileJobKind {
.additional_args
.verilog_dialect
.get_or_insert(VerilogDialect::Yosys);
args.args_to_jobs_simple(params, |_kind, args, dependencies| {
args.args_to_jobs_simple(params, global_params, |_kind, args, dependencies| {
let FormalArgs {
sby_extra_args,
formal_mode,
@ -219,18 +218,16 @@ impl JobKind for WriteSbyFileJobKind {
formal_solver,
smtbmc_extra_args,
} = args;
let base_job = dependencies.get_job::<BaseJob, _>();
Ok(WriteSbyFileJob {
sby_extra_args: sby_extra_args.into_iter().map(str::intern_owned).collect(),
sby_extra_args: sby_extra_args.into_iter().map(Interned::from).collect(),
formal_mode,
formal_depth,
formal_solver: str::intern_owned(formal_solver),
smtbmc_extra_args: smtbmc_extra_args
.into_iter()
.map(str::intern_owned)
.collect(),
sby_file: dependencies.base_job().file_with_ext("sby"),
output_dir: dependencies.base_job().output_dir(),
main_verilog_file: dependencies.job.job.main_verilog_file(),
formal_solver: formal_solver.intern_deref(),
smtbmc_extra_args: smtbmc_extra_args.into_iter().map(Interned::from).collect(),
sby_file: base_job.file_with_ext("sby"),
output_dir: base_job.output_dir(),
main_verilog_file: dependencies.get_job::<VerilogJob, _>().main_verilog_file(),
})
})
}
@ -238,12 +235,12 @@ impl JobKind for WriteSbyFileJobKind {
fn inputs(self, _job: &Self::Job) -> Interned<[JobItemName]> {
[JobItemName::DynamicPaths {
source_job_name: VerilogJobKind.name(),
}][..]
.intern()
}]
.intern_slice()
}
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
[JobItemName::Path { path: job.sby_file }][..].intern()
[JobItemName::Path { path: job.sby_file }].intern_slice()
}
fn name(self) -> Interned<str> {
@ -259,6 +256,7 @@ impl JobKind for WriteSbyFileJobKind {
job: &Self::Job,
inputs: &[JobItem],
params: &JobParams,
_global_params: &GlobalParams,
_acquired_job: &mut AcquiredJob,
) -> eyre::Result<Vec<JobItem>> {
assert!(inputs.iter().map(JobItem::name).eq(self.inputs(job)));
@ -266,18 +264,16 @@ impl JobKind for WriteSbyFileJobKind {
unreachable!();
};
let additional_files = VerilogJob::unwrap_additional_files(additional_files);
let mut contents = String::new();
match job.write_sby(
let mut contents = OsString::new();
job.write_sby(
&mut contents,
additional_files,
params.main_module().name_id(),
) {
Ok(result) => result?,
Err(fmt::Error) => unreachable!("writing to String can't fail"),
}
std::fs::write(job.sby_file, contents)
.wrap_err_with(|| format!("writing {} failed", job.sby_file))?;
Ok(vec![JobItem::Path { path: job.sby_file }])
)?;
let path = job.sby_file;
std::fs::write(path, contents.as_encoded_bytes())
.wrap_err_with(|| format!("writing {path:?} failed"))?;
Ok(vec![JobItem::Path { path }])
}
fn subcommand_hidden(self) -> bool {
@ -289,7 +285,7 @@ impl JobKind for WriteSbyFileJobKind {
pub struct Formal {
#[serde(flatten)]
write_sby_file: WriteSbyFileJob,
sby_file_name: Interned<str>,
sby_file_name: Interned<OsStr>,
}
impl fmt::Debug for Formal {
@ -342,6 +338,11 @@ impl ToArgs for FormalAdditionalArgs {
impl ExternalCommand for Formal {
type AdditionalArgs = FormalAdditionalArgs;
type AdditionalJobData = Formal;
type BaseJobPosition = GetJobPositionDependencies<
GetJobPositionDependencies<
GetJobPositionDependencies<<UnadjustedVerilog as ExternalCommand>::BaseJobPosition>,
>,
>;
type Dependencies = JobKindAndDependencies<WriteSbyFileJobKind>;
type ExternalProgram = Symbiyosys;
@ -352,17 +353,20 @@ impl ExternalCommand for Formal {
fn args_to_jobs(
args: JobArgsAndDependencies<ExternalCommandJobKind<Self>>,
params: &JobParams,
global_params: &GlobalParams,
) -> eyre::Result<(
Self::AdditionalJobData,
<Self::Dependencies as JobDependencies>::JobsAndKinds,
)> {
args.args_to_jobs_external_simple(params, |args, dependencies| {
args.args_to_jobs_external_simple(params, global_params, |args, dependencies| {
let FormalAdditionalArgs {} = args.additional_args;
let write_sby_file = dependencies.get_job::<WriteSbyFileJob, _>().clone();
Ok(Formal {
write_sby_file: dependencies.job.job.clone(),
sby_file_name: interned_known_utf8_method(dependencies.job.job.sby_file(), |v| {
v.file_name().expect("known to have file name")
}),
sby_file_name: write_sby_file
.sby_file()
.interned_file_name()
.expect("known to have file name"),
write_sby_file,
})
})
}
@ -378,22 +382,22 @@ impl ExternalCommand for Formal {
JobItemName::DynamicPaths {
source_job_name: VerilogJobKind.name(),
},
][..]
.intern()
]
.intern_slice()
}
fn output_paths(_job: &ExternalCommandJob<Self>) -> Interned<[Interned<str>]> {
fn output_paths(_job: &ExternalCommandJob<Self>) -> Interned<[Interned<Path>]> {
Interned::default()
}
fn command_line_args<W: ?Sized + WriteArgs>(job: &ExternalCommandJob<Self>, args: &mut W) {
// args.write_str_arg("-j1"); // sby seems not to respect job count in parallel mode
args.write_str_arg("-f");
args.write_arg("-f");
args.write_interned_arg(job.additional_job_data().sby_file_name);
args.write_interned_args(job.additional_job_data().write_sby_file.sby_extra_args());
}
fn current_dir(job: &ExternalCommandJob<Self>) -> Option<Interned<str>> {
fn current_dir(job: &ExternalCommandJob<Self>) -> Option<Interned<Path>> {
Some(job.output_dir())
}

View file

@ -2,8 +2,11 @@
// See Notices.txt for copyright information
use crate::{
build::{DynJob, JobItem, JobItemName, JobParams, program_name_for_internal_jobs},
build::{
DynJob, GlobalParams, JobItem, JobItemName, JobParams, program_name_for_internal_jobs,
},
intern::Interned,
platform::DynPlatform,
util::{HashMap, HashSet, job_server::AcquiredJob},
};
use eyre::{ContextCompat, eyre};
@ -16,9 +19,12 @@ use serde::{Deserialize, Deserializer, Serialize, Serializer, de::Error, ser::Se
use std::{
cell::OnceCell,
collections::{BTreeMap, BTreeSet, VecDeque},
convert::Infallible,
ffi::OsStr,
fmt::{self, Write},
panic,
rc::Rc,
str::Utf8Error,
sync::mpsc,
thread::{self, ScopedJoinHandle},
};
@ -138,8 +144,8 @@ impl<'a> fmt::Display for EscapeForUnixShell<'a> {
}
impl<'a> EscapeForUnixShell<'a> {
pub fn new(s: &'a str) -> Self {
Self::from_bytes(s.as_bytes())
pub fn new(s: &'a (impl ?Sized + AsRef<OsStr>)) -> Self {
Self::from_bytes(s.as_ref().as_encoded_bytes())
}
fn make_prefix(bytes: &[u8]) -> [u8; 3] {
let mut prefix = [0; 3];
@ -262,7 +268,7 @@ pub enum UnixMakefileEscapeKind {
#[derive(Copy, Clone)]
pub struct EscapeForUnixMakefile<'a> {
s: &'a str,
s: &'a OsStr,
kind: UnixMakefileEscapeKind,
}
@ -274,9 +280,13 @@ impl<'a> fmt::Debug for EscapeForUnixMakefile<'a> {
impl<'a> fmt::Display for EscapeForUnixMakefile<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.do_write(f, fmt::Write::write_str, fmt::Write::write_char, |_, _| {
Ok(())
})
self.do_write(
f,
fmt::Write::write_str,
fmt::Write::write_char,
|_, _| Ok(()),
|_| unreachable!("already checked that the input causes no UTF-8 errors"),
)
}
}
@ -287,6 +297,7 @@ impl<'a> EscapeForUnixMakefile<'a> {
write_str: impl Fn(&mut S, &str) -> Result<(), E>,
write_char: impl Fn(&mut S, char) -> Result<(), E>,
add_variable: impl Fn(&mut S, &'static str) -> Result<(), E>,
utf8_error: impl Fn(Utf8Error) -> E,
) -> Result<(), E> {
let escape_recipe_char = |c| match c {
'$' => write_str(state, "$$"),
@ -296,24 +307,30 @@ impl<'a> EscapeForUnixMakefile<'a> {
_ => write_char(state, c),
};
match self.kind {
UnixMakefileEscapeKind::NonRecipe => self.s.chars().try_for_each(|c| match c {
'=' => {
add_variable(state, "EQUALS = =")?;
write_str(state, "$(EQUALS)")
}
';' => panic!("can't escape a semicolon (;) for Unix Makefile"),
'$' => write_str(state, "$$"),
'\\' | ' ' | '#' | ':' | '%' | '*' | '?' | '[' | ']' | '~' => {
write_char(state, '\\')?;
write_char(state, c)
}
'\0'..='\x1F' | '\x7F' => {
panic!("can't escape a control character for Unix Makefile: {c:?}");
}
_ => write_char(state, c),
}),
UnixMakefileEscapeKind::NonRecipe => str::from_utf8(self.s.as_encoded_bytes())
.map_err(&utf8_error)?
.chars()
.try_for_each(|c| match c {
'=' => {
add_variable(state, "EQUALS = =")?;
write_str(state, "$(EQUALS)")
}
';' => panic!("can't escape a semicolon (;) for Unix Makefile"),
'$' => write_str(state, "$$"),
'\\' | ' ' | '#' | ':' | '%' | '*' | '?' | '[' | ']' | '~' => {
write_char(state, '\\')?;
write_char(state, c)
}
'\0'..='\x1F' | '\x7F' => {
panic!("can't escape a control character for Unix Makefile: {c:?}");
}
_ => write_char(state, c),
}),
UnixMakefileEscapeKind::RecipeWithoutShellEscaping => {
self.s.chars().try_for_each(escape_recipe_char)
str::from_utf8(self.s.as_encoded_bytes())
.map_err(&utf8_error)?
.chars()
.try_for_each(escape_recipe_char)
}
UnixMakefileEscapeKind::RecipeWithShellEscaping => {
EscapeForUnixShell::new(self.s).try_for_each(escape_recipe_char)
@ -321,21 +338,23 @@ impl<'a> EscapeForUnixMakefile<'a> {
}
}
pub fn new(
s: &'a str,
s: &'a (impl ?Sized + AsRef<OsStr>),
kind: UnixMakefileEscapeKind,
needed_variables: &mut BTreeSet<&'static str>,
) -> Self {
) -> Result<Self, Utf8Error> {
let s = s.as_ref();
let retval = Self { s, kind };
let Ok(()) = retval.do_write(
retval.do_write(
needed_variables,
|_, _| Ok(()),
|_, _| Ok(()),
|needed_variables, variable| -> Result<(), std::convert::Infallible> {
|needed_variables, variable| {
needed_variables.insert(variable);
Ok(())
},
);
retval
|e| e,
)?;
Ok(retval)
}
}
@ -473,17 +492,23 @@ impl JobGraph {
Err(e) => panic!("error: {e}"),
}
}
pub fn to_unix_makefile(&self, extra_args: &[Interned<str>]) -> String {
pub fn to_unix_makefile(
&self,
platform: Option<&DynPlatform>,
extra_args: &[Interned<OsStr>],
) -> Result<String, Utf8Error> {
self.to_unix_makefile_with_internal_program_prefix(
&[program_name_for_internal_jobs()],
platform,
extra_args,
)
}
pub fn to_unix_makefile_with_internal_program_prefix(
&self,
internal_program_prefix: &[Interned<str>],
extra_args: &[Interned<str>],
) -> String {
internal_program_prefix: &[Interned<OsStr>],
platform: Option<&DynPlatform>,
extra_args: &[Interned<OsStr>],
) -> Result<String, Utf8Error> {
let mut retval = String::new();
let mut needed_variables = BTreeSet::new();
let mut phony_targets = BTreeSet::new();
@ -502,10 +527,10 @@ impl JobGraph {
retval,
"{} ",
EscapeForUnixMakefile::new(
&path,
&str::from_utf8(path.as_os_str().as_encoded_bytes())?,
UnixMakefileEscapeKind::NonRecipe,
&mut needed_variables
)
)?
);
}
JobItemName::DynamicPaths { source_job_name } => {
@ -516,7 +541,7 @@ impl JobGraph {
&source_job_name,
UnixMakefileEscapeKind::NonRecipe,
&mut needed_variables
)
)?
);
phony_targets.insert(Interned::into_inner(source_job_name));
}
@ -535,10 +560,10 @@ impl JobGraph {
retval,
" {}",
EscapeForUnixMakefile::new(
&path,
&str::from_utf8(path.as_os_str().as_encoded_bytes())?,
UnixMakefileEscapeKind::NonRecipe,
&mut needed_variables
)
)?
);
}
JobItemName::DynamicPaths { source_job_name } => {
@ -549,26 +574,30 @@ impl JobGraph {
&source_job_name,
UnixMakefileEscapeKind::NonRecipe,
&mut needed_variables
)
)?
);
phony_targets.insert(Interned::into_inner(source_job_name));
}
}
}
retval.push_str("\n\t");
job.command_params_with_internal_program_prefix(internal_program_prefix, extra_args)
.to_unix_shell_line(&mut retval, |arg, output| {
write!(
output,
"{}",
EscapeForUnixMakefile::new(
arg,
UnixMakefileEscapeKind::RecipeWithShellEscaping,
&mut needed_variables
)
)
})
.expect("writing to String never fails");
job.command_params_with_internal_program_prefix(
internal_program_prefix,
platform,
extra_args,
)
.to_unix_shell_line(&mut retval, |arg, output| {
write_str!(
output,
"{}",
EscapeForUnixMakefile::new(
arg,
UnixMakefileEscapeKind::RecipeWithShellEscaping,
&mut needed_variables
)?
);
Ok(())
})?;
retval.push_str("\n\n");
}
if !phony_targets.is_empty() {
@ -581,7 +610,7 @@ impl JobGraph {
phony_target,
UnixMakefileEscapeKind::NonRecipe,
&mut needed_variables
)
)?
);
}
retval.push_str("\n");
@ -592,18 +621,24 @@ impl JobGraph {
&String::from_iter(needed_variables.into_iter().map(|v| format!("{v}\n"))),
);
}
retval
Ok(retval)
}
pub fn to_unix_shell_script(&self, extra_args: &[Interned<str>]) -> String {
pub fn to_unix_shell_script(
&self,
platform: Option<&DynPlatform>,
extra_args: &[Interned<OsStr>],
) -> String {
self.to_unix_shell_script_with_internal_program_prefix(
&[program_name_for_internal_jobs()],
platform,
extra_args,
)
}
pub fn to_unix_shell_script_with_internal_program_prefix(
&self,
internal_program_prefix: &[Interned<str>],
extra_args: &[Interned<str>],
internal_program_prefix: &[Interned<OsStr>],
platform: Option<&DynPlatform>,
extra_args: &[Interned<OsStr>],
) -> String {
let mut retval = String::from(
"#!/bin/sh\n\
@ -613,16 +648,21 @@ impl JobGraph {
let JobGraphNode::Job(job) = &self.graph[node_id] else {
continue;
};
job.command_params_with_internal_program_prefix(internal_program_prefix, extra_args)
.to_unix_shell_line(&mut retval, |arg, output| {
write!(output, "{}", EscapeForUnixShell::new(&arg))
})
.expect("writing to String never fails");
let Ok(()) = job
.command_params_with_internal_program_prefix(
internal_program_prefix,
platform,
extra_args,
)
.to_unix_shell_line(&mut retval, |arg, output| -> Result<(), Infallible> {
write_str!(output, "{}", EscapeForUnixShell::new(&arg));
Ok(())
});
retval.push_str("\n");
}
retval
}
pub fn run(&self, params: &JobParams) -> eyre::Result<()> {
pub fn run(&self, params: &JobParams, global_params: &GlobalParams) -> eyre::Result<()> {
// use scope to auto-join threads on errors
thread::scope(|scope| {
struct WaitingJobState {
@ -708,13 +748,18 @@ impl JobGraph {
job: DynJob,
inputs: Vec<JobItem>,
params: &'a JobParams,
global_params: &'a GlobalParams,
acquired_job: AcquiredJob,
finished_jobs_sender: mpsc::Sender<<JobGraphInner as GraphBase>::NodeId>,
}
impl RunningJobInThread<'_> {
fn run(mut self) -> eyre::Result<Vec<JobItem>> {
self.job
.run(&self.inputs, self.params, &mut self.acquired_job)
self.job.run(
&self.inputs,
self.params,
self.global_params,
&mut self.acquired_job,
)
}
}
impl Drop for RunningJobInThread<'_> {
@ -732,6 +777,7 @@ impl JobGraph {
})
}))?,
params,
global_params,
acquired_job: AcquiredJob::acquire()?,
finished_jobs_sender: finished_jobs_sender.clone(),
};

View file

@ -4,10 +4,9 @@
use crate::{
build::{DynJobKind, JobKind, built_in_job_kinds},
intern::Interned,
util::InternedStrCompareAsStr,
};
use std::{
borrow::Borrow,
cmp::Ordering,
collections::BTreeMap,
fmt,
sync::{Arc, OnceLock, RwLock, RwLockWriteGuard},
@ -23,33 +22,6 @@ impl DynJobKind {
}
}
#[derive(Copy, Clone, PartialEq, Eq)]
struct InternedStrCompareAsStr(Interned<str>);
impl fmt::Debug for InternedStrCompareAsStr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl Ord for InternedStrCompareAsStr {
fn cmp(&self, other: &Self) -> Ordering {
str::cmp(&self.0, &other.0)
}
}
impl PartialOrd for InternedStrCompareAsStr {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Borrow<str> for InternedStrCompareAsStr {
fn borrow(&self) -> &str {
&self.0
}
}
#[derive(Clone, Debug)]
struct JobKindRegistry {
job_kinds: BTreeMap<InternedStrCompareAsStr, DynJobKind>,

View file

@ -1,20 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::intern::Interned;
pub mod yosys_nextpnr_prjxray;
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub struct XdcIOStandardAnnotation {
pub value: Interned<str>,
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub struct XdcLocationAnnotation {
pub location: Interned<str>,
}
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = crate::build::DynJobKind> {
yosys_nextpnr_prjxray::built_in_job_kinds()
}

View file

@ -1,910 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
annotations::Annotation,
build::{
CommandParams, DynJobKind, GetBaseJob, JobAndDependencies, JobArgsAndDependencies,
JobDependencies, JobItem, JobItemName, JobKind, JobKindAndDependencies, ToArgs, WriteArgs,
external::{
ExternalCommand, ExternalCommandJob, ExternalCommandJobKind, ExternalProgramTrait,
},
interned_known_utf8_method, interned_known_utf8_path_buf_method,
vendor::xilinx::{XdcIOStandardAnnotation, XdcLocationAnnotation},
verilog::{VerilogDialect, VerilogJob, VerilogJobKind},
},
bundle::Bundle,
firrtl::{ScalarizedModuleABI, ScalarizedModuleABIAnnotations, ScalarizedModuleABIPort},
intern::{Intern, Interned},
module::{Module, NameId},
prelude::JobParams,
util::job_server::AcquiredJob,
};
use clap::ValueEnum;
use eyre::Context;
use serde::{Deserialize, Serialize};
use std::{fmt, ops::ControlFlow};
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash, Default)]
pub struct YosysNextpnrXrayWriteYsFileJobKind;
#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)]
pub struct YosysNextpnrXrayWriteYsFileArgs {}
impl ToArgs for YosysNextpnrXrayWriteYsFileArgs {
fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) {
let Self {} = self;
}
}
#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)]
pub struct YosysNextpnrXrayWriteYsFile {
main_verilog_file: Interned<str>,
ys_file: Interned<str>,
json_file: Interned<str>,
json_file_name: Interned<str>,
}
impl YosysNextpnrXrayWriteYsFile {
pub fn main_verilog_file(&self) -> Interned<str> {
self.main_verilog_file
}
pub fn ys_file(&self) -> Interned<str> {
self.ys_file
}
pub fn json_file(&self) -> Interned<str> {
self.json_file
}
pub fn json_file_name(&self) -> Interned<str> {
self.json_file_name
}
fn write_ys<W: ?Sized + fmt::Write>(
&self,
output: &mut W,
additional_files: &[Interned<str>],
main_module_name_id: NameId,
) -> Result<eyre::Result<()>, fmt::Error> {
let Self {
main_verilog_file,
ys_file: _,
json_file: _,
json_file_name,
} = self;
let all_verilog_files =
match VerilogJob::all_verilog_files(*main_verilog_file, additional_files) {
Ok(v) => v,
Err(e) => return Ok(Err(e)),
};
for verilog_file in all_verilog_files {
writeln!(output, "read_verilog -sv \"{verilog_file}\"")?;
}
let circuit_name = crate::firrtl::get_circuit_name(main_module_name_id);
writeln!(
output,
"synth_xilinx -flatten -abc9 -nobram -arch xc7 -top {circuit_name}"
)?;
writeln!(output, "write_json \"{json_file_name}\"")?;
Ok(Ok(()))
}
}
impl JobKind for YosysNextpnrXrayWriteYsFileJobKind {
type Args = YosysNextpnrXrayWriteYsFileArgs;
type Job = YosysNextpnrXrayWriteYsFile;
type Dependencies = JobKindAndDependencies<VerilogJobKind>;
fn dependencies(self) -> Self::Dependencies {
Default::default()
}
fn args_to_jobs(
mut args: JobArgsAndDependencies<Self>,
params: &JobParams,
) -> eyre::Result<JobAndDependencies<Self>> {
args.dependencies
.dependencies
.args
.args
.additional_args
.verilog_dialect
.get_or_insert(VerilogDialect::Yosys);
args.args_to_jobs_simple(params, |_kind, args, dependencies| {
let YosysNextpnrXrayWriteYsFileArgs {} = args;
let json_file = dependencies.base_job().file_with_ext("json");
Ok(YosysNextpnrXrayWriteYsFile {
main_verilog_file: dependencies.job.job.main_verilog_file(),
ys_file: dependencies.base_job().file_with_ext("ys"),
json_file,
json_file_name: interned_known_utf8_method(json_file, |v| {
v.file_name().expect("known to have file name")
}),
})
})
}
fn inputs(self, _job: &Self::Job) -> Interned<[JobItemName]> {
[JobItemName::DynamicPaths {
source_job_name: VerilogJobKind.name(),
}][..]
.intern()
}
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
[JobItemName::Path { path: job.ys_file }][..].intern()
}
fn name(self) -> Interned<str> {
"yosys-nextpnr-xray-write-ys-file".intern()
}
fn external_command_params(self, _job: &Self::Job) -> Option<CommandParams> {
None
}
fn run(
self,
job: &Self::Job,
inputs: &[JobItem],
params: &JobParams,
_acquired_job: &mut AcquiredJob,
) -> eyre::Result<Vec<JobItem>> {
assert!(inputs.iter().map(JobItem::name).eq(self.inputs(job)));
let [additional_files] = inputs else {
unreachable!();
};
let additional_files = VerilogJob::unwrap_additional_files(additional_files);
let mut contents = String::new();
match job.write_ys(
&mut contents,
additional_files,
params.main_module().name_id(),
) {
Ok(result) => result?,
Err(fmt::Error) => unreachable!("writing to String can't fail"),
}
std::fs::write(job.ys_file, contents)
.wrap_err_with(|| format!("writing {} failed", job.ys_file))?;
Ok(vec![JobItem::Path { path: job.ys_file }])
}
fn subcommand_hidden(self) -> bool {
true
}
}
#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)]
pub struct YosysNextpnrXraySynthArgs {}
impl ToArgs for YosysNextpnrXraySynthArgs {
fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) {
let Self {} = self;
}
}
#[derive(Clone, PartialEq, Eq, Hash, Deserialize, Serialize)]
pub struct YosysNextpnrXraySynth {
#[serde(flatten)]
write_ys_file: YosysNextpnrXrayWriteYsFile,
ys_file_name: Interned<str>,
}
impl fmt::Debug for YosysNextpnrXraySynth {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Self {
write_ys_file:
YosysNextpnrXrayWriteYsFile {
main_verilog_file,
ys_file,
json_file,
json_file_name,
},
ys_file_name,
} = self;
f.debug_struct("YosysNextpnrXraySynth")
.field("main_verilog_file", main_verilog_file)
.field("ys_file", ys_file)
.field("ys_file_name", ys_file_name)
.field("json_file", json_file)
.field("json_file_name", json_file_name)
.finish()
}
}
impl YosysNextpnrXraySynth {
pub fn main_verilog_file(&self) -> Interned<str> {
self.write_ys_file.main_verilog_file()
}
pub fn ys_file(&self) -> Interned<str> {
self.write_ys_file.ys_file()
}
pub fn ys_file_name(&self) -> Interned<str> {
self.ys_file_name
}
pub fn json_file(&self) -> Interned<str> {
self.write_ys_file.json_file()
}
pub fn json_file_name(&self) -> Interned<str> {
self.write_ys_file.json_file_name()
}
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)]
pub struct Yosys;
impl ExternalProgramTrait for Yosys {
fn default_program_name() -> Interned<str> {
"yosys".intern()
}
}
impl ExternalCommand for YosysNextpnrXraySynth {
type AdditionalArgs = YosysNextpnrXraySynthArgs;
type AdditionalJobData = Self;
type Dependencies = JobKindAndDependencies<YosysNextpnrXrayWriteYsFileJobKind>;
type ExternalProgram = Yosys;
fn dependencies() -> Self::Dependencies {
Default::default()
}
fn args_to_jobs(
args: JobArgsAndDependencies<ExternalCommandJobKind<Self>>,
params: &JobParams,
) -> eyre::Result<(
Self::AdditionalJobData,
<Self::Dependencies as JobDependencies>::JobsAndKinds,
)> {
args.args_to_jobs_external_simple(params, |args, dependencies| {
let YosysNextpnrXraySynthArgs {} = args.additional_args;
Ok(Self {
write_ys_file: dependencies.job.job.clone(),
ys_file_name: interned_known_utf8_method(dependencies.job.job.ys_file(), |v| {
v.file_name().expect("known to have file name")
}),
})
})
}
fn inputs(job: &ExternalCommandJob<Self>) -> Interned<[JobItemName]> {
[
JobItemName::Path {
path: job.additional_job_data().ys_file(),
},
JobItemName::Path {
path: job.additional_job_data().main_verilog_file(),
},
JobItemName::DynamicPaths {
source_job_name: VerilogJobKind.name(),
},
][..]
.intern()
}
fn output_paths(job: &ExternalCommandJob<Self>) -> Interned<[Interned<str>]> {
[job.additional_job_data().json_file()][..].intern()
}
fn command_line_args<W: ?Sized + WriteArgs>(job: &ExternalCommandJob<Self>, args: &mut W) {
args.write_str_arg("-s");
args.write_interned_arg(job.additional_job_data().ys_file_name());
}
fn current_dir(job: &ExternalCommandJob<Self>) -> Option<Interned<str>> {
Some(job.output_dir())
}
fn job_kind_name() -> Interned<str> {
"yosys-nextpnr-xray-synth".intern()
}
fn subcommand_hidden() -> bool {
true
}
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash, Default)]
pub struct YosysNextpnrXrayWriteXdcFileJobKind;
#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)]
pub struct YosysNextpnrXrayWriteXdcFileArgs {}
impl ToArgs for YosysNextpnrXrayWriteXdcFileArgs {
fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) {
let Self {} = self;
}
}
#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)]
pub struct YosysNextpnrXrayWriteXdcFile {
firrtl_export_options: crate::firrtl::ExportOptions,
output_dir: Interned<str>,
xdc_file: Interned<str>,
}
struct WriteXdcContentsError(eyre::Report);
impl From<eyre::Report> for WriteXdcContentsError {
fn from(v: eyre::Report) -> Self {
Self(v)
}
}
impl From<fmt::Error> for WriteXdcContentsError {
fn from(_v: fmt::Error) -> Self {
unreachable!("String write can't fail")
}
}
fn tcl_escape(s: impl AsRef<str>) -> String {
let s = s.as_ref();
let mut retval = String::with_capacity(s.len().saturating_add(2));
retval.push('"');
for ch in s.chars() {
if let '$' | '\\' | '[' = ch {
retval.push('\\');
}
retval.push(ch);
}
retval.push('"');
retval
}
impl YosysNextpnrXrayWriteXdcFile {
fn write_xdc_contents_for_port_and_annotations(
&self,
output: &mut impl fmt::Write,
port: &ScalarizedModuleABIPort,
annotations: ScalarizedModuleABIAnnotations<'_>,
) -> Result<(), WriteXdcContentsError> {
for annotation in annotations {
match annotation.annotation() {
Annotation::DontTouch(_)
| Annotation::SVAttribute(_)
| Annotation::BlackBoxInline(_)
| Annotation::BlackBoxPath(_)
| Annotation::DocString(_)
| Annotation::CustomFirrtl(_) => {}
Annotation::XdcLocation(XdcLocationAnnotation { location }) => writeln!(
output,
"set_property LOC {} [get_ports {}]",
tcl_escape(location),
tcl_escape(port.scalarized_name())
)?,
Annotation::XdcIOStandard(XdcIOStandardAnnotation { value }) => writeln!(
output,
"set_property IOSTANDARD {} [get_ports {}]",
tcl_escape(value),
tcl_escape(port.scalarized_name())
)?,
}
}
Ok(())
}
fn write_xdc_contents(
&self,
output: &mut String,
top_module: &Module<Bundle>,
) -> eyre::Result<()> {
let scalarized_module_abi =
ScalarizedModuleABI::new(top_module, self.firrtl_export_options)
.map_err(eyre::Report::from)?;
match scalarized_module_abi.for_each_port_and_annotations(|port, annotations| {
match self.write_xdc_contents_for_port_and_annotations(output, port, annotations) {
Ok(()) => ControlFlow::Continue(()),
Err(e) => ControlFlow::Break(e),
}
}) {
ControlFlow::Continue(()) => Ok(()),
ControlFlow::Break(e) => Err(e.0),
}
}
}
impl JobKind for YosysNextpnrXrayWriteXdcFileJobKind {
type Args = YosysNextpnrXrayWriteXdcFileArgs;
type Job = YosysNextpnrXrayWriteXdcFile;
type Dependencies = JobKindAndDependencies<ExternalCommandJobKind<YosysNextpnrXraySynth>>;
fn dependencies(self) -> Self::Dependencies {
Default::default()
}
fn args_to_jobs(
args: JobArgsAndDependencies<Self>,
params: &JobParams,
) -> eyre::Result<JobAndDependencies<Self>> {
let firrtl_export_options = args
.dependencies
.dependencies
.dependencies
.dependencies
.dependencies
.args
.args
.export_options;
args.args_to_jobs_simple(params, |_kind, args, dependencies| {
let YosysNextpnrXrayWriteXdcFileArgs {} = args;
Ok(YosysNextpnrXrayWriteXdcFile {
firrtl_export_options,
output_dir: dependencies.base_job().output_dir(),
xdc_file: dependencies.base_job().file_with_ext("xdc"),
})
})
}
fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
[JobItemName::Path {
path: job.output_dir,
}][..]
.intern()
}
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
[JobItemName::Path { path: job.xdc_file }][..].intern()
}
fn name(self) -> Interned<str> {
"yosys-nextpnr-xray-write-xdc-file".intern()
}
fn external_command_params(self, _job: &Self::Job) -> Option<CommandParams> {
None
}
fn run(
self,
job: &Self::Job,
inputs: &[JobItem],
params: &JobParams,
_acquired_job: &mut AcquiredJob,
) -> eyre::Result<Vec<JobItem>> {
assert!(inputs.iter().map(JobItem::name).eq(self.inputs(job)));
let mut xdc = String::new();
job.write_xdc_contents(&mut xdc, params.main_module())?;
// TODO: create actual .xdc from input module
std::fs::write(
job.xdc_file,
r"# autogenerated
set_property LOC G6 [get_ports led]
set_property IOSTANDARD LVCMOS33 [get_ports led]
set_property LOC E3 [get_ports clk]
set_property IOSTANDARD LVCMOS33 [get_ports clk]
set_property LOC C2 [get_ports rst]
set_property IOSTANDARD LVCMOS33 [get_ports rst]
",
)?;
Ok(vec![JobItem::Path { path: job.xdc_file }])
}
fn subcommand_hidden(self) -> bool {
true
}
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)]
pub struct NextpnrXilinx;
impl ExternalProgramTrait for NextpnrXilinx {
fn default_program_name() -> Interned<str> {
"nextpnr-xilinx".intern()
}
}
macro_rules! make_device_enum {
($vis:vis enum $Device:ident {
$(
#[
name = $name:literal,
xray_part = $xray_part:literal,
xray_device = $xray_device:literal,
xray_family = $xray_family:literal,
]
$variant:ident,
)*
}) => {
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, ValueEnum)]
$vis enum $Device {
$(
#[value(name = $name, alias = $xray_part)]
$variant,
)*
}
impl $Device {
$vis fn as_str(self) -> &'static str {
match self {
$(Self::$variant => $name,)*
}
}
$vis fn xray_part(self) -> &'static str {
match self {
$(Self::$variant => $xray_part,)*
}
}
$vis fn xray_device(self) -> &'static str {
match self {
$(Self::$variant => $xray_device,)*
}
}
$vis fn xray_family(self) -> &'static str {
match self {
$(Self::$variant => $xray_family,)*
}
}
}
struct DeviceVisitor;
impl<'de> serde::de::Visitor<'de> for DeviceVisitor {
type Value = $Device;
fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("a Xilinx device string")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
match $Device::from_str(v, false) {
Ok(v) => Ok(v),
Err(_) => Err(E::invalid_value(serde::de::Unexpected::Str(v), &self)),
}
}
fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
match str::from_utf8(v).ok().and_then(|v| $Device::from_str(v, false).ok()) {
Some(v) => Ok(v),
None => Err(E::invalid_value(serde::de::Unexpected::Bytes(v), &self)),
}
}
}
impl<'de> Deserialize<'de> for $Device {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
deserializer.deserialize_string(DeviceVisitor)
}
}
impl Serialize for $Device {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
self.as_str().serialize(serializer)
}
}
};
}
make_device_enum! {
pub enum Device {
#[
name = "xc7a35ticsg324-1L",
xray_part = "xc7a35tcsg324-1",
xray_device = "xc7a35t",
xray_family = "artix7",
]
Xc7a35ticsg324_1l,
#[
name = "xc7a100ticsg324-1L",
xray_part = "xc7a100tcsg324-1",
xray_device = "xc7a100t",
xray_family = "artix7",
]
Xc7a100ticsg324_1l,
}
}
impl fmt::Display for Device {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.as_str())
}
}
#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)]
pub struct YosysNextpnrXrayRunNextpnrArgs {
#[arg(long, env = "CHIPDB_DIR", value_hint = clap::ValueHint::DirPath)]
pub nextpnr_xilinx_chipdb_dir: String,
#[arg(long)]
pub device: Device,
#[arg(long, default_value_t = 0)]
pub nextpnr_xilinx_seed: i32,
}
impl ToArgs for YosysNextpnrXrayRunNextpnrArgs {
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
let Self {
nextpnr_xilinx_chipdb_dir,
device,
nextpnr_xilinx_seed,
} = self;
args.write_args([
format_args!("--nextpnr-xilinx-chipdb-dir={nextpnr_xilinx_chipdb_dir}"),
format_args!("--device={device}"),
format_args!("--nextpnr-xilinx-seed={nextpnr_xilinx_seed}"),
]);
}
}
#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)]
pub struct YosysNextpnrXrayRunNextpnr {
nextpnr_xilinx_chipdb_dir: Interned<str>,
device: Device,
nextpnr_xilinx_seed: i32,
xdc_file: Interned<str>,
xdc_file_name: Interned<str>,
json_file: Interned<str>,
json_file_name: Interned<str>,
routed_json_file: Interned<str>,
routed_json_file_name: Interned<str>,
fasm_file: Interned<str>,
fasm_file_name: Interned<str>,
}
impl YosysNextpnrXrayRunNextpnr {
fn chipdb_file(&self) -> Interned<str> {
interned_known_utf8_path_buf_method(self.nextpnr_xilinx_chipdb_dir, |chipdb_dir| {
let mut retval = chipdb_dir.join(self.device.xray_device());
retval.set_extension("bin");
retval
})
}
}
impl ExternalCommand for YosysNextpnrXrayRunNextpnr {
type AdditionalArgs = YosysNextpnrXrayRunNextpnrArgs;
type AdditionalJobData = Self;
type Dependencies = JobKindAndDependencies<YosysNextpnrXrayWriteXdcFileJobKind>;
type ExternalProgram = NextpnrXilinx;
fn dependencies() -> Self::Dependencies {
Default::default()
}
fn args_to_jobs(
args: JobArgsAndDependencies<ExternalCommandJobKind<Self>>,
params: &JobParams,
) -> eyre::Result<(
Self::AdditionalJobData,
<Self::Dependencies as JobDependencies>::JobsAndKinds,
)> {
args.args_to_jobs_external_simple(params, |args, dependencies| {
let YosysNextpnrXrayRunNextpnrArgs {
nextpnr_xilinx_chipdb_dir,
device,
nextpnr_xilinx_seed,
} = args.additional_args;
let xdc_file = dependencies.job.job.xdc_file;
let routed_json_file = dependencies.base_job().file_with_ext("routed.json");
let fasm_file = dependencies.base_job().file_with_ext("fasm");
Ok(Self {
nextpnr_xilinx_chipdb_dir: str::intern_owned(nextpnr_xilinx_chipdb_dir),
device,
nextpnr_xilinx_seed,
xdc_file,
xdc_file_name: interned_known_utf8_method(xdc_file, |v| {
v.file_name().expect("known to have file name")
}),
json_file: dependencies
.dependencies
.job
.job
.additional_job_data()
.json_file(),
json_file_name: dependencies
.dependencies
.job
.job
.additional_job_data()
.json_file_name(),
routed_json_file,
routed_json_file_name: interned_known_utf8_method(routed_json_file, |v| {
v.file_name().expect("known to have file name")
}),
fasm_file,
fasm_file_name: interned_known_utf8_method(fasm_file, |v| {
v.file_name().expect("known to have file name")
}),
})
})
}
fn inputs(job: &ExternalCommandJob<Self>) -> Interned<[JobItemName]> {
[
JobItemName::Path {
path: job.additional_job_data().json_file,
},
JobItemName::Path {
path: job.additional_job_data().xdc_file,
},
][..]
.intern()
}
fn output_paths(job: &ExternalCommandJob<Self>) -> Interned<[Interned<str>]> {
[
job.additional_job_data().routed_json_file,
job.additional_job_data().fasm_file,
][..]
.intern()
}
fn command_line_args<W: ?Sized + WriteArgs>(job: &ExternalCommandJob<Self>, args: &mut W) {
let job_data @ YosysNextpnrXrayRunNextpnr {
nextpnr_xilinx_seed,
xdc_file_name,
json_file_name,
routed_json_file_name,
fasm_file_name,
..
} = job.additional_job_data();
args.write_args([
format_args!("--chipdb={}", job_data.chipdb_file()),
format_args!("--xdc={xdc_file_name}"),
format_args!("--json={json_file_name}"),
format_args!("--write={routed_json_file_name}"),
format_args!("--fasm={fasm_file_name}"),
format_args!("--seed={nextpnr_xilinx_seed}"),
]);
}
fn current_dir(job: &ExternalCommandJob<Self>) -> Option<Interned<str>> {
Some(job.output_dir())
}
fn job_kind_name() -> Interned<str> {
"yosys-nextpnr-xray-run-nextpnr".intern()
}
fn subcommand_hidden() -> bool {
true
}
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)]
pub struct Xcfasm;
impl ExternalProgramTrait for Xcfasm {
fn default_program_name() -> Interned<str> {
"xcfasm".intern()
}
}
#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)]
pub struct YosysNextpnrXrayArgs {
#[arg(long, env = "DB_DIR", value_hint = clap::ValueHint::DirPath)]
pub prjxray_db_dir: String,
}
impl ToArgs for YosysNextpnrXrayArgs {
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
let Self { prjxray_db_dir } = self;
args.write_arg(format_args!("--prjxray-db-dir={prjxray_db_dir}"));
}
}
#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)]
pub struct YosysNextpnrXray {
prjxray_db_dir: Interned<str>,
device: Device,
fasm_file: Interned<str>,
fasm_file_name: Interned<str>,
frames_file: Interned<str>,
frames_file_name: Interned<str>,
bit_file: Interned<str>,
bit_file_name: Interned<str>,
}
impl YosysNextpnrXray {
fn db_root(&self) -> Interned<str> {
interned_known_utf8_path_buf_method(self.prjxray_db_dir, |prjxray_db_dir| {
prjxray_db_dir.join(self.device.xray_family())
})
}
fn part_file(&self) -> Interned<str> {
interned_known_utf8_path_buf_method(self.prjxray_db_dir, |prjxray_db_dir| {
let mut retval = prjxray_db_dir.join(self.device.xray_family());
retval.push(self.device.xray_part());
retval.push("part.yaml");
retval
})
}
}
impl ExternalCommand for YosysNextpnrXray {
type AdditionalArgs = YosysNextpnrXrayArgs;
type AdditionalJobData = Self;
type Dependencies = JobKindAndDependencies<ExternalCommandJobKind<YosysNextpnrXrayRunNextpnr>>;
type ExternalProgram = Xcfasm;
fn dependencies() -> Self::Dependencies {
Default::default()
}
fn args_to_jobs(
args: JobArgsAndDependencies<ExternalCommandJobKind<Self>>,
params: &JobParams,
) -> eyre::Result<(
Self::AdditionalJobData,
<Self::Dependencies as JobDependencies>::JobsAndKinds,
)> {
args.args_to_jobs_external_simple(params, |args, dependencies| {
let YosysNextpnrXrayArgs { prjxray_db_dir } = args.additional_args;
let frames_file = dependencies.base_job().file_with_ext("frames");
let bit_file = dependencies.base_job().file_with_ext("bit");
Ok(Self {
prjxray_db_dir: str::intern_owned(prjxray_db_dir),
device: dependencies.job.job.additional_job_data().device,
fasm_file: dependencies.job.job.additional_job_data().fasm_file,
fasm_file_name: dependencies.job.job.additional_job_data().fasm_file_name,
frames_file,
frames_file_name: interned_known_utf8_method(frames_file, |v| {
v.file_name().expect("known to have file name")
}),
bit_file,
bit_file_name: interned_known_utf8_method(bit_file, |v| {
v.file_name().expect("known to have file name")
}),
})
})
}
fn inputs(job: &ExternalCommandJob<Self>) -> Interned<[JobItemName]> {
[JobItemName::Path {
path: job.additional_job_data().fasm_file,
}][..]
.intern()
}
fn output_paths(job: &ExternalCommandJob<Self>) -> Interned<[Interned<str>]> {
[
job.additional_job_data().frames_file,
job.additional_job_data().bit_file,
][..]
.intern()
}
fn command_line_args<W: ?Sized + WriteArgs>(job: &ExternalCommandJob<Self>, args: &mut W) {
let job_data @ YosysNextpnrXray {
device,
fasm_file_name,
frames_file_name,
bit_file_name,
..
} = job.additional_job_data();
args.write_args([
format_args!("--sparse"),
format_args!("--db-root={}", job_data.db_root()),
format_args!("--part={}", device.xray_part()),
format_args!("--part_file={}", job_data.part_file()),
format_args!("--fn_in={fasm_file_name}"),
format_args!("--frm_out={frames_file_name}"),
format_args!("--bit_out={bit_file_name}"),
]);
}
fn current_dir(job: &ExternalCommandJob<Self>) -> Option<Interned<str>> {
Some(job.output_dir())
}
fn job_kind_name() -> Interned<str> {
"yosys-nextpnr-xray".intern()
}
}
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = DynJobKind> {
[
DynJobKind::new(YosysNextpnrXrayWriteYsFileJobKind),
DynJobKind::new(ExternalCommandJobKind::<YosysNextpnrXraySynth>::new()),
DynJobKind::new(YosysNextpnrXrayWriteXdcFileJobKind),
DynJobKind::new(ExternalCommandJobKind::<YosysNextpnrXrayRunNextpnr>::new()),
DynJobKind::new(ExternalCommandJobKind::<YosysNextpnrXray>::new()),
]
}

View file

@ -3,22 +3,25 @@
use crate::{
build::{
CommandParams, DynJobKind, GetBaseJob, JobAndDependencies, JobArgsAndDependencies,
JobDependencies, JobItem, JobItemName, JobKind, JobKindAndDependencies, JobParams, ToArgs,
WriteArgs,
BaseJob, CommandParams, DynJobKind, GetJobPositionDependencies, GetJobPositionJob,
GlobalParams, JobAndDependencies, JobArgsAndDependencies, JobDependencies, JobItem,
JobItemName, JobKind, JobKindAndDependencies, JobParams, ToArgs, WriteArgs,
external::{
ExternalCommand, ExternalCommandJob, ExternalCommandJobKind, ExternalProgramTrait,
},
firrtl::FirrtlJobKind,
interned_known_utf8_method, interned_known_utf8_path_buf_method,
firrtl::{Firrtl, FirrtlJobKind},
},
intern::{Intern, Interned},
intern::{Intern, InternSlice, Interned},
util::job_server::AcquiredJob,
};
use clap::Args;
use eyre::{Context, bail};
use serde::{Deserialize, Serialize};
use std::{fmt, mem};
use std::{
ffi::{OsStr, OsString},
fmt, mem,
path::Path,
};
/// based on [LLVM Circt's recommended lowering options][lowering-options]
///
@ -70,7 +73,7 @@ impl VerilogDialect {
#[non_exhaustive]
pub struct UnadjustedVerilogArgs {
#[arg(long = "firtool-extra-arg", value_name = "ARG")]
pub firtool_extra_args: Vec<String>,
pub firtool_extra_args: Vec<OsString>,
/// adapt the generated Verilog for a particular toolchain
#[arg(long)]
pub verilog_dialect: Option<VerilogDialect>,
@ -85,16 +88,14 @@ impl ToArgs for UnadjustedVerilogArgs {
verilog_dialect,
verilog_debug,
} = *self;
args.extend(
firtool_extra_args
.iter()
.map(|arg| format!("--firtool-extra-arg={arg}")),
);
for arg in firtool_extra_args {
args.write_long_option_eq("firtool-extra-arg", arg);
}
if let Some(verilog_dialect) = verilog_dialect {
args.write_arg(format_args!("--verilog-dialect={verilog_dialect}"));
args.write_long_option_eq("verilog-dialect", verilog_dialect.as_str());
}
if verilog_debug {
args.write_str_arg("--verilog-debug");
args.write_arg("--verilog-debug");
}
}
}
@ -110,23 +111,23 @@ impl ExternalProgramTrait for Firtool {
#[derive(Clone, PartialEq, Eq, Hash, Debug, Deserialize, Serialize)]
pub struct UnadjustedVerilog {
firrtl_file: Interned<str>,
firrtl_file_name: Interned<str>,
unadjusted_verilog_file: Interned<str>,
unadjusted_verilog_file_name: Interned<str>,
firtool_extra_args: Interned<[Interned<str>]>,
firrtl_file: Interned<Path>,
firrtl_file_name: Interned<OsStr>,
unadjusted_verilog_file: Interned<Path>,
unadjusted_verilog_file_name: Interned<OsStr>,
firtool_extra_args: Interned<[Interned<OsStr>]>,
verilog_dialect: Option<VerilogDialect>,
verilog_debug: bool,
}
impl UnadjustedVerilog {
pub fn firrtl_file(&self) -> Interned<str> {
pub fn firrtl_file(&self) -> Interned<Path> {
self.firrtl_file
}
pub fn unadjusted_verilog_file(&self) -> Interned<str> {
pub fn unadjusted_verilog_file(&self) -> Interned<Path> {
self.unadjusted_verilog_file
}
pub fn firtool_extra_args(&self) -> Interned<[Interned<str>]> {
pub fn firtool_extra_args(&self) -> Interned<[Interned<OsStr>]> {
self.firtool_extra_args
}
pub fn verilog_dialect(&self) -> Option<VerilogDialect> {
@ -140,6 +141,7 @@ impl UnadjustedVerilog {
impl ExternalCommand for UnadjustedVerilog {
type AdditionalArgs = UnadjustedVerilogArgs;
type AdditionalJobData = UnadjustedVerilog;
type BaseJobPosition = GetJobPositionDependencies<GetJobPositionJob>;
type Dependencies = JobKindAndDependencies<FirrtlJobKind>;
type ExternalProgram = Firtool;
@ -150,11 +152,12 @@ impl ExternalCommand for UnadjustedVerilog {
fn args_to_jobs(
args: JobArgsAndDependencies<ExternalCommandJobKind<Self>>,
params: &JobParams,
global_params: &GlobalParams,
) -> eyre::Result<(
Self::AdditionalJobData,
<Self::Dependencies as JobDependencies>::JobsAndKinds,
)> {
args.args_to_jobs_external_simple(params, |args, dependencies| {
args.args_to_jobs_external_simple(params, global_params, |args, dependencies| {
let UnadjustedVerilogArgs {
firtool_extra_args,
verilog_dialect,
@ -165,21 +168,18 @@ impl ExternalCommand for UnadjustedVerilog {
.job
.job
.file_with_ext("unadjusted.v");
let firrtl_job = dependencies.get_job::<Firrtl, _>();
Ok(UnadjustedVerilog {
firrtl_file: dependencies.job.job.firrtl_file(),
firrtl_file_name: interned_known_utf8_method(
dependencies.job.job.firrtl_file(),
|v| v.file_name().expect("known to have file name"),
),
firrtl_file: firrtl_job.firrtl_file(),
firrtl_file_name: firrtl_job
.firrtl_file()
.interned_file_name()
.expect("known to have file name"),
unadjusted_verilog_file,
unadjusted_verilog_file_name: interned_known_utf8_method(
unadjusted_verilog_file,
|v| v.file_name().expect("known to have file name"),
),
firtool_extra_args: firtool_extra_args
.into_iter()
.map(str::intern_owned)
.collect(),
unadjusted_verilog_file_name: unadjusted_verilog_file
.interned_file_name()
.expect("known to have file name"),
firtool_extra_args: firtool_extra_args.into_iter().map(Interned::from).collect(),
verilog_dialect,
verilog_debug,
})
@ -189,12 +189,12 @@ impl ExternalCommand for UnadjustedVerilog {
fn inputs(job: &ExternalCommandJob<Self>) -> Interned<[JobItemName]> {
[JobItemName::Path {
path: job.additional_job_data().firrtl_file,
}][..]
.intern()
}]
.intern_slice()
}
fn output_paths(job: &ExternalCommandJob<Self>) -> Interned<[Interned<str>]> {
[job.additional_job_data().unadjusted_verilog_file][..].intern()
fn output_paths(job: &ExternalCommandJob<Self>) -> Interned<[Interned<Path>]> {
[job.additional_job_data().unadjusted_verilog_file].intern_slice()
}
fn command_line_args<W: ?Sized + WriteArgs>(job: &ExternalCommandJob<Self>, args: &mut W) {
@ -208,18 +208,18 @@ impl ExternalCommand for UnadjustedVerilog {
verilog_debug,
} = *job.additional_job_data();
args.write_interned_arg(firrtl_file_name);
args.write_str_arg("-o");
args.write_arg("-o");
args.write_interned_arg(unadjusted_verilog_file_name);
if verilog_debug {
args.write_str_args(["-g", "--preserve-values=all"]);
args.write_args(["-g", "--preserve-values=all"]);
}
if let Some(dialect) = verilog_dialect {
args.write_str_args(dialect.firtool_extra_args().iter().copied());
args.write_args(dialect.firtool_extra_args().iter().copied());
}
args.write_interned_args(firtool_extra_args);
}
fn current_dir(job: &ExternalCommandJob<Self>) -> Option<Interned<str>> {
fn current_dir(job: &ExternalCommandJob<Self>) -> Option<Interned<Path>> {
Some(job.output_dir())
}
@ -251,23 +251,23 @@ impl ToArgs for VerilogJobArgs {
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct VerilogJob {
output_dir: Interned<str>,
unadjusted_verilog_file: Interned<str>,
main_verilog_file: Interned<str>,
output_dir: Interned<Path>,
unadjusted_verilog_file: Interned<Path>,
main_verilog_file: Interned<Path>,
}
impl VerilogJob {
pub fn output_dir(&self) -> Interned<str> {
pub fn output_dir(&self) -> Interned<Path> {
self.output_dir
}
pub fn unadjusted_verilog_file(&self) -> Interned<str> {
pub fn unadjusted_verilog_file(&self) -> Interned<Path> {
self.unadjusted_verilog_file
}
pub fn main_verilog_file(&self) -> Interned<str> {
pub fn main_verilog_file(&self) -> Interned<Path> {
self.main_verilog_file
}
#[track_caller]
pub fn unwrap_additional_files(additional_files: &JobItem) -> &[Interned<str>] {
pub fn unwrap_additional_files(additional_files: &JobItem) -> &[Interned<Path>] {
match additional_files {
JobItem::DynamicPaths {
paths,
@ -277,31 +277,31 @@ impl VerilogJob {
}
}
pub fn all_verilog_files(
main_verilog_file: Interned<str>,
additional_files: &[Interned<str>],
) -> eyre::Result<Interned<[Interned<str>]>> {
main_verilog_file: Interned<Path>,
additional_files: &[Interned<Path>],
) -> eyre::Result<Interned<[Interned<Path>]>> {
let mut retval = Vec::with_capacity(additional_files.len().saturating_add(1));
for verilog_file in [main_verilog_file].iter().chain(additional_files) {
if !(verilog_file.ends_with(".v") || verilog_file.ends_with(".sv")) {
if !["v", "sv"]
.iter()
.any(|extension| verilog_file.extension() == Some(extension.as_ref()))
{
continue;
}
let verilog_file = std::path::absolute(verilog_file)
.and_then(|v| {
v.into_os_string().into_string().map_err(|_| {
std::io::Error::new(std::io::ErrorKind::Other, "path is not valid UTF-8")
})
})
.wrap_err_with(|| {
format!("converting {verilog_file:?} to an absolute path failed")
})?;
if verilog_file.contains(|ch: char| {
(ch != ' ' && ch != '\t' && ch.is_ascii_whitespace()) || ch == '"'
}) {
let verilog_file = std::path::absolute(verilog_file).wrap_err_with(|| {
format!("converting {verilog_file:?} to an absolute path failed")
})?;
if verilog_file
.as_os_str()
.as_encoded_bytes()
.iter()
.any(|&ch| (ch != b' ' && ch != b'\t' && ch.is_ascii_whitespace()) || ch == b'"')
{
bail!("verilog file path contains characters that aren't permitted");
}
retval.push(str::intern_owned(verilog_file));
retval.push(verilog_file.intern_deref());
}
Ok(Intern::intern_owned(retval))
Ok(retval.intern_slice())
}
}
@ -317,17 +317,19 @@ impl JobKind for VerilogJobKind {
fn args_to_jobs(
args: JobArgsAndDependencies<Self>,
params: &JobParams,
global_params: &GlobalParams,
) -> eyre::Result<JobAndDependencies<Self>> {
args.args_to_jobs_simple(params, |_kind, args, dependencies| {
args.args_to_jobs_simple(params, global_params, |_kind, args, dependencies| {
let VerilogJobArgs {} = args;
let base_job = dependencies.get_job::<BaseJob, _>();
Ok(VerilogJob {
output_dir: dependencies.base_job().output_dir(),
output_dir: base_job.output_dir(),
unadjusted_verilog_file: dependencies
.job
.job
.additional_job_data()
.unadjusted_verilog_file(),
main_verilog_file: dependencies.base_job().file_with_ext("v"),
main_verilog_file: base_job.file_with_ext("v"),
})
})
}
@ -335,8 +337,8 @@ impl JobKind for VerilogJobKind {
fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
[JobItemName::Path {
path: job.unadjusted_verilog_file,
}][..]
.intern()
}]
.intern_slice()
}
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
@ -347,8 +349,8 @@ impl JobKind for VerilogJobKind {
JobItemName::DynamicPaths {
source_job_name: self.name(),
},
][..]
.intern()
]
.intern_slice()
}
fn name(self) -> Interned<str> {
@ -364,6 +366,7 @@ impl JobKind for VerilogJobKind {
job: &Self::Job,
inputs: &[JobItem],
_params: &JobParams,
_global_params: &GlobalParams,
_acquired_job: &mut AcquiredJob,
) -> eyre::Result<Vec<JobItem>> {
assert!(inputs.iter().map(JobItem::name).eq(self.inputs(job)));
@ -384,8 +387,7 @@ impl JobKind for VerilogJobKind {
);
};
input = rest;
let next_file_name =
interned_known_utf8_path_buf_method(job.output_dir, |v| v.join(next_file_name));
let next_file_name = job.output_dir.join(next_file_name).intern_deref();
additional_outputs.push(next_file_name);
(chunk, Some(next_file_name))
} else {

View file

@ -7,7 +7,7 @@ use crate::{
ops::{ArrayLiteral, BundleLiteral, ExprPartialEq},
},
int::{Bool, DynSize},
intern::{Intern, Interned},
intern::{Intern, InternSlice, Interned},
sim::value::{SimValue, SimValuePartialEq, ToSimValue, ToSimValueWithType},
source_location::SourceLocation,
ty::{
@ -549,7 +549,7 @@ macro_rules! impl_tuples {
type FilledBuilder = TupleBuilder<($(Expr<$T>,)*)>;
fn fields(&self) -> Interned<[BundleField]> {
let ($($var,)*) = self;
[$(BundleField { name: stringify!($num).intern(), flipped: false, ty: $var.canonical() }),*][..].intern()
[$(BundleField { name: stringify!($num).intern(), flipped: false, ty: $var.canonical() }),*].intern_slice()
}
}
impl<$($T: Type,)*> TypeWithDeref for ($($T,)*) {
@ -580,7 +580,7 @@ macro_rules! impl_tuples {
$(let $var = $var.to_expr();)*
let ty = ($(Expr::ty($var),)*);
let field_values = [$(Expr::canonical($var)),*];
BundleLiteral::new(ty, field_values[..].intern()).to_expr()
BundleLiteral::new(ty, field_values.intern_slice()).to_expr()
}
}
impl<$($T: Type,)*> ToExpr for TupleBuilder<($(Expr<$T>,)*)> {
@ -590,7 +590,7 @@ macro_rules! impl_tuples {
let ($($var,)*) = self.0;
let ty = ($(Expr::ty($var),)*);
let field_values = [$(Expr::canonical($var)),*];
BundleLiteral::new(ty, field_values[..].intern()).to_expr()
BundleLiteral::new(ty, field_values.intern_slice()).to_expr()
}
}
impl<$($T: ToSimValueWithType<CanonicalType>,)*> ToSimValueWithType<CanonicalType> for ($($T,)*) {

View file

@ -39,6 +39,7 @@ use crate::{
BitSliceWriteWithBase, DebugAsRawString, GenericConstBool, HashMap, HashSet,
const_str_array_is_strictly_ascending,
},
vendor::xilinx::XilinxAnnotation,
};
use bitvec::slice::BitSlice;
use clap::value_parser;
@ -49,6 +50,7 @@ use std::{
cmp::Ordering,
collections::{BTreeMap, VecDeque},
error::Error,
ffi::OsString,
fmt::{self, Write},
fs,
hash::Hash,
@ -1881,7 +1883,11 @@ impl<'a> Exporter<'a> {
}
fn annotation(&mut self, path: AnnotationTargetPath, annotation: &Annotation) {
let data = match annotation {
Annotation::DontTouch(DontTouchAnnotation {}) => AnnotationData::DontTouch,
Annotation::DontTouch(DontTouchAnnotation {}) => {
// TODO: error if the annotated thing was renamed because of a naming conflict,
// unless Target::base() is one of the ports of the top-level module since that's handled by ScalarizedModuleABI
AnnotationData::DontTouch
}
Annotation::SVAttribute(SVAttributeAnnotation { text }) => {
AnnotationData::AttributeAnnotation { description: *text }
}
@ -1904,7 +1910,9 @@ impl<'a> Exporter<'a> {
class: str::to_string(class),
additional_fields: (*additional_fields).into(),
},
Annotation::XdcLocation(_) | Annotation::XdcIOStandard(_) => return,
Annotation::Xilinx(XilinxAnnotation::XdcLocation(_))
| Annotation::Xilinx(XilinxAnnotation::XdcIOStandard(_))
| Annotation::Xilinx(XilinxAnnotation::XdcCreateClock(_)) => return,
};
self.annotations.push(FirrtlAnnotation {
data,
@ -2452,7 +2460,7 @@ impl<T: ?Sized + FileBackendTrait> FileBackendTrait for &'_ mut T {
pub struct FileBackend {
pub dir_path: PathBuf,
pub circuit_name: Option<String>,
pub top_fir_file_stem: Option<String>,
pub top_fir_file_stem: Option<OsString>,
}
impl FileBackend {
@ -2501,7 +2509,7 @@ impl FileBackendTrait for FileBackend {
) -> Result<(), Self::Error> {
let top_fir_file_stem = self
.top_fir_file_stem
.get_or_insert_with(|| circuit_name.clone());
.get_or_insert_with(|| circuit_name.clone().into());
self.circuit_name = Some(circuit_name);
let mut path = self.dir_path.join(top_fir_file_stem);
if let Some(parent) = path.parent().filter(|v| !v.as_os_str().is_empty()) {
@ -2777,7 +2785,7 @@ impl ToArgs for ExportOptions {
__private: ExportOptionsPrivate(()),
} = *self;
if !simplify_memories {
args.write_str_arg("--no-simplify-memories");
args.write_arg("--no-simplify-memories");
}
let simplify_enums = simplify_enums.map(|v| {
clap::ValueEnum::to_possible_value(&v).expect("there are no skipped variants")
@ -2786,7 +2794,7 @@ impl ToArgs for ExportOptions {
None => OptionSimplifyEnumsKindValueParser::NONE_NAME,
Some(v) => v.get_name(),
};
args.write_arg(format_args!("--simplify-enums={simplify_enums}"));
args.write_long_option_eq("simplify-enums", simplify_enums);
}
}

View file

@ -8,7 +8,7 @@ use crate::{
ops::{ExprCastTo, ExprPartialEq, ExprPartialOrd},
},
int::{Bool, DynSize, KnownSize, Size, SizeType, UInt, UIntType},
intern::{Intern, Interned},
intern::{Intern, InternSlice, Interned},
phantom_const::PhantomConst,
sim::value::{SimValue, SimValuePartialEq, ToSimValueWithType},
source_location::SourceLocation,
@ -112,8 +112,8 @@ impl BundleType for UIntInRangeMaskType {
flipped: false,
ty: range.canonical(),
},
][..]
.intern()
]
.intern_slice()
}
}
@ -409,8 +409,8 @@ macro_rules! define_uint_in_range_type {
flipped: false,
ty: range.canonical(),
},
][..]
.intern()
]
.intern_slice()
}
}

View file

@ -9,13 +9,13 @@ use std::{
any::{Any, TypeId},
borrow::{Borrow, Cow},
cmp::Ordering,
ffi::OsStr,
ffi::{OsStr, OsString},
fmt,
hash::{BuildHasher, Hash, Hasher},
iter::FusedIterator,
marker::PhantomData,
ops::Deref,
path::Path,
path::{Path, PathBuf},
sync::{Mutex, RwLock},
};
@ -289,15 +289,266 @@ impl InternedCompare for BitSlice {
}
}
impl InternedCompare for str {
type InternedCompareKey = PtrEqWithMetadata<Self>;
fn interned_compare_key_ref(this: &Self) -> Self::InternedCompareKey {
PtrEqWithMetadata(this)
/// Safety: `as_bytes` and `from_bytes_unchecked` must return the same pointer as the input.
/// all values returned by `as_bytes` must be valid to pass to `from_bytes_unchecked`.
/// `into_bytes` must return the exact same thing as `as_bytes`.
/// `Interned<Self>` must contain the exact same references as `Interned<[u8]>`,
/// so they can be safely interconverted without needing re-interning.
unsafe trait InternStrLike: ToOwned {
fn as_bytes(this: &Self) -> &[u8];
fn into_bytes(this: Self::Owned) -> Vec<u8>;
/// Safety: `bytes` must be a valid sequence of bytes for this type. All UTF-8 sequences are valid.
unsafe fn from_bytes_unchecked(bytes: &[u8]) -> &Self;
}
macro_rules! impl_intern_str_like {
($ty:ty, owned = $Owned:ty) => {
impl InternedCompare for $ty {
type InternedCompareKey = PtrEqWithMetadata<[u8]>;
fn interned_compare_key_ref(this: &Self) -> Self::InternedCompareKey {
PtrEqWithMetadata(InternStrLike::as_bytes(this))
}
}
impl Intern for $ty {
fn intern(&self) -> Interned<Self> {
Self::intern_cow(Cow::Borrowed(self))
}
fn intern_cow(this: Cow<'_, Self>) -> Interned<Self> {
Interned::cast_unchecked(
<[u8]>::intern_cow(match this {
Cow::Borrowed(v) => Cow::Borrowed(<Self as InternStrLike>::as_bytes(v)),
Cow::Owned(v) => {
// verify $Owned is correct
let v: $Owned = v;
Cow::Owned(<Self as InternStrLike>::into_bytes(v))
}
}),
// Safety: guaranteed safe because we got the bytes from `as_bytes`/`into_bytes`
|v| unsafe { <Self as InternStrLike>::from_bytes_unchecked(v) },
)
}
}
impl Default for Interned<$ty> {
fn default() -> Self {
// Safety: safe because the empty sequence is valid UTF-8
unsafe { <$ty as InternStrLike>::from_bytes_unchecked(&[]) }.intern()
}
}
impl<'de> Deserialize<'de> for Interned<$ty> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
Cow::<'de, $ty>::deserialize(deserializer).map(Intern::intern_cow)
}
}
impl From<$Owned> for Interned<$ty> {
fn from(v: $Owned) -> Self {
v.intern_deref()
}
}
impl From<Interned<$ty>> for $Owned {
fn from(v: Interned<$ty>) -> Self {
Interned::into_inner(v).into()
}
}
impl From<Interned<$ty>> for Box<$ty> {
fn from(v: Interned<$ty>) -> Self {
Interned::into_inner(v).into()
}
}
};
}
// Safety: satisfies `InternStrLike`'s requirements where the valid sequences for `from_bytes_unchecked` matches `str`
unsafe impl InternStrLike for str {
fn as_bytes(this: &Self) -> &[u8] {
this.as_bytes()
}
fn into_bytes(this: Self::Owned) -> Vec<u8> {
this.into_bytes()
}
unsafe fn from_bytes_unchecked(bytes: &[u8]) -> &Self {
// Safety: `bytes` is guaranteed UTF-8 by the caller
unsafe { str::from_utf8_unchecked(bytes) }
}
}
impl_intern_str_like!(str, owned = String);
// Safety: satisfies `InternStrLike`'s requirements where the valid sequences for `from_bytes_unchecked` matches `OsStr`
unsafe impl InternStrLike for OsStr {
fn as_bytes(this: &Self) -> &[u8] {
this.as_encoded_bytes()
}
fn into_bytes(this: Self::Owned) -> Vec<u8> {
this.into_encoded_bytes()
}
unsafe fn from_bytes_unchecked(bytes: &[u8]) -> &Self {
// Safety: `bytes` is guaranteed valid for `OsStr` by the caller
unsafe { OsStr::from_encoded_bytes_unchecked(bytes) }
}
}
impl_intern_str_like!(OsStr, owned = OsString);
// Safety: satisfies `InternStrLike`'s requirements where the valid sequences for `from_bytes_unchecked` matches `OsStr`
unsafe impl InternStrLike for Path {
fn as_bytes(this: &Self) -> &[u8] {
this.as_os_str().as_encoded_bytes()
}
fn into_bytes(this: Self::Owned) -> Vec<u8> {
this.into_os_string().into_encoded_bytes()
}
unsafe fn from_bytes_unchecked(bytes: &[u8]) -> &Self {
// Safety: `bytes` is guaranteed valid for `OsStr` by the caller
unsafe { Path::new(OsStr::from_encoded_bytes_unchecked(bytes)) }
}
}
impl_intern_str_like!(Path, owned = PathBuf);
impl Interned<str> {
pub fn from_utf8(v: Interned<[u8]>) -> Result<Self, std::str::Utf8Error> {
Interned::try_cast_unchecked(v, str::from_utf8)
}
pub fn as_interned_bytes(self) -> Interned<[u8]> {
Interned::cast_unchecked(self, str::as_bytes)
}
pub fn as_interned_os_str(self) -> Interned<OsStr> {
Interned::cast_unchecked(self, AsRef::as_ref)
}
pub fn as_interned_path(self) -> Interned<Path> {
Interned::cast_unchecked(self, AsRef::as_ref)
}
}
impl From<Interned<str>> for Interned<OsStr> {
fn from(value: Interned<str>) -> Self {
value.as_interned_os_str()
}
}
impl From<Interned<str>> for Interned<Path> {
fn from(value: Interned<str>) -> Self {
value.as_interned_path()
}
}
impl Interned<OsStr> {
pub fn as_interned_encoded_bytes(self) -> Interned<[u8]> {
Interned::cast_unchecked(self, OsStr::as_encoded_bytes)
}
pub fn to_interned_str(self) -> Option<Interned<str>> {
Interned::try_cast_unchecked(self, |v| v.to_str().ok_or(())).ok()
}
pub fn display(self) -> std::ffi::os_str::Display<'static> {
Self::into_inner(self).display()
}
pub fn as_interned_path(self) -> Interned<Path> {
Interned::cast_unchecked(self, AsRef::as_ref)
}
}
impl From<Interned<OsStr>> for Interned<Path> {
fn from(value: Interned<OsStr>) -> Self {
value.as_interned_path()
}
}
impl Interned<Path> {
pub fn as_interned_os_str(self) -> Interned<OsStr> {
Interned::cast_unchecked(self, AsRef::as_ref)
}
pub fn to_interned_str(self) -> Option<Interned<str>> {
Interned::try_cast_unchecked(self, |v| v.to_str().ok_or(())).ok()
}
pub fn display(self) -> std::path::Display<'static> {
Self::into_inner(self).display()
}
pub fn interned_file_name(self) -> Option<Interned<OsStr>> {
Some(self.file_name()?.intern())
}
}
impl From<Interned<Path>> for Interned<OsStr> {
fn from(value: Interned<Path>) -> Self {
value.as_interned_os_str()
}
}
pub trait InternSlice: Sized {
type Element: 'static + Send + Sync + Clone + Hash + Eq;
fn intern_slice(self) -> Interned<[Self::Element]>;
}
impl<T: 'static + Send + Sync + Clone + Hash + Eq> InternSlice for Box<[T]> {
type Element = T;
fn intern_slice(self) -> Interned<[Self::Element]> {
self.into_vec().intern_slice()
}
}
impl<T: 'static + Send + Sync + Clone + Hash + Eq> InternSlice for Vec<T> {
type Element = T;
fn intern_slice(self) -> Interned<[Self::Element]> {
self.intern_deref()
}
}
impl<T: 'static + Send + Sync + Clone + Hash + Eq> InternSlice for &'_ [T] {
type Element = T;
fn intern_slice(self) -> Interned<[Self::Element]> {
self.intern()
}
}
impl<T: 'static + Send + Sync + Clone + Hash + Eq> InternSlice for &'_ mut [T] {
type Element = T;
fn intern_slice(self) -> Interned<[Self::Element]> {
self.intern()
}
}
impl<T: 'static + Send + Sync + Clone + Hash + Eq, const N: usize> InternSlice for [T; N] {
type Element = T;
fn intern_slice(self) -> Interned<[Self::Element]> {
(&self).intern_slice()
}
}
impl<T: 'static + Send + Sync + Clone + Hash + Eq, const N: usize> InternSlice for Box<[T; N]> {
type Element = T;
fn intern_slice(self) -> Interned<[Self::Element]> {
let this: Box<[T]> = self;
this.intern_slice()
}
}
impl<T: 'static + Send + Sync + Clone + Hash + Eq, const N: usize> InternSlice for &'_ [T; N] {
type Element = T;
fn intern_slice(self) -> Interned<[Self::Element]> {
let this: &[T] = self;
this.intern()
}
}
impl<T: 'static + Send + Sync + Clone + Hash + Eq, const N: usize> InternSlice for &'_ mut [T; N] {
type Element = T;
fn intern_slice(self) -> Interned<[Self::Element]> {
let this: &[T] = self;
this.intern()
}
}
pub trait Intern: Any + Send + Sync {
fn intern(&self) -> Interned<Self>;
fn intern_deref(self) -> Interned<Self::Target>
where
Self: Sized + Deref<Target: Intern + ToOwned<Owned = Self>>,
{
Self::Target::intern_owned(self)
}
fn intern_sized(self) -> Interned<Self>
where
Self: Clone,
@ -318,6 +569,30 @@ pub trait Intern: Any + Send + Sync {
}
}
impl<T: ?Sized + Intern + ToOwned> From<Cow<'_, T>> for Interned<T> {
fn from(value: Cow<'_, T>) -> Self {
Intern::intern_cow(value)
}
}
impl<T: ?Sized + Intern> From<&'_ T> for Interned<T> {
fn from(value: &'_ T) -> Self {
Intern::intern(value)
}
}
impl<T: Intern + Clone> From<T> for Interned<T> {
fn from(value: T) -> Self {
Intern::intern_sized(value)
}
}
impl<T: ?Sized + 'static + Send + Sync + ToOwned> From<Interned<T>> for Cow<'_, T> {
fn from(value: Interned<T>) -> Self {
Cow::Borrowed(Interned::into_inner(value))
}
}
struct InternerState<T: ?Sized + 'static + Send + Sync> {
table: HashTable<&'static T>,
hasher: DefaultBuildHasher,
@ -383,12 +658,6 @@ impl Interner<BitSlice> {
}
}
impl Interner<str> {
fn intern_str(&self, value: Cow<'_, str>) -> Interned<str> {
self.intern(|value| value.into_owned().leak(), value)
}
}
pub struct Interned<T: ?Sized + 'static + Send + Sync> {
inner: &'static T,
}
@ -418,9 +687,9 @@ forward_fmt_trait!(Pointer);
forward_fmt_trait!(UpperExp);
forward_fmt_trait!(UpperHex);
impl<T: ?Sized + 'static + Send + Sync> AsRef<T> for Interned<T> {
fn as_ref(&self) -> &T {
self
impl<T: ?Sized + 'static + Send + Sync + AsRef<U>, U: ?Sized> AsRef<U> for Interned<T> {
fn as_ref(&self) -> &U {
T::as_ref(self)
}
}
@ -498,19 +767,25 @@ where
String: FromIterator<I>,
{
fn from_iter<T: IntoIterator<Item = I>>(iter: T) -> Self {
str::intern_owned(FromIterator::from_iter(iter))
String::from_iter(iter).intern_deref()
}
}
impl AsRef<OsStr> for Interned<str> {
fn as_ref(&self) -> &OsStr {
str::as_ref(self)
impl<I> FromIterator<I> for Interned<Path>
where
PathBuf: FromIterator<I>,
{
fn from_iter<T: IntoIterator<Item = I>>(iter: T) -> Self {
PathBuf::from_iter(iter).intern_deref()
}
}
impl AsRef<Path> for Interned<str> {
fn as_ref(&self) -> &Path {
str::as_ref(self)
impl<I> FromIterator<I> for Interned<OsStr>
where
OsString: FromIterator<I>,
{
fn from_iter<T: IntoIterator<Item = I>>(iter: T) -> Self {
OsString::from_iter(iter).intern_deref()
}
}
@ -550,24 +825,12 @@ impl<T: 'static + Clone + Send + Sync> From<Interned<[T]>> for Box<[T]> {
}
}
impl From<Interned<str>> for String {
fn from(value: Interned<str>) -> Self {
String::from(&*value)
}
}
impl<I> Default for Interned<[I]>
where
[I]: Intern,
{
fn default() -> Self {
[][..].intern()
}
}
impl Default for Interned<str> {
fn default() -> Self {
"".intern()
Intern::intern(&[])
}
}
@ -698,15 +961,6 @@ impl<'de> Deserialize<'de> for Interned<BitSlice> {
}
}
impl<'de> Deserialize<'de> for Interned<str> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
String::deserialize(deserializer).map(Intern::intern_owned)
}
}
impl<T: Clone + Send + Sync + 'static + Hash + Eq> Intern for T {
fn intern(&self) -> Interned<Self> {
Self::intern_cow(Cow::Borrowed(self))
@ -767,26 +1021,6 @@ impl Intern for BitSlice {
}
}
impl Intern for str {
fn intern(&self) -> Interned<Self> {
Self::intern_cow(Cow::Borrowed(self))
}
fn intern_owned(this: <Self as ToOwned>::Owned) -> Interned<Self>
where
Self: ToOwned,
{
Self::intern_cow(Cow::Owned(this))
}
fn intern_cow(this: Cow<'_, Self>) -> Interned<Self>
where
Self: ToOwned,
{
Interner::get().intern_str(this)
}
}
pub trait MemoizeGeneric: 'static + Send + Sync + Hash + Eq + Copy {
type InputRef<'a>: 'a + Send + Sync + Hash + Copy;
type InputOwned: 'static + Send + Sync;

View file

@ -99,13 +99,14 @@ pub mod intern;
pub mod memory;
pub mod module;
pub mod phantom_const;
pub mod platform;
pub mod prelude;
pub mod reg;
pub mod reset;
pub mod sim;
pub mod source_location;
pub mod target;
pub mod testing;
pub mod ty;
pub mod util;
pub mod vendor;
pub mod wire;

View file

@ -19,6 +19,7 @@ use crate::{
int::{Bool, DynSize, Size},
intern::{Intern, Interned},
memory::{Mem, MemBuilder, MemBuilderTarget, PortName},
platform::PlatformIOBuilder,
reg::Reg,
reset::{AsyncReset, Reset, ResetType, ResetTypeDispatch, SyncReset},
sim::{ExternModuleSimGenerator, ExternModuleSimulation},
@ -2119,6 +2120,27 @@ impl ModuleBuilder {
self.output_with_loc(implicit_name.0, SourceLocation::caller(), ty)
}
#[track_caller]
pub fn add_platform_io_with_loc(
&self,
name: &str,
source_location: SourceLocation,
platform_io_builder: PlatformIOBuilder<'_>,
) -> Expr<Bundle> {
platform_io_builder.add_platform_io(name, source_location, self)
}
#[track_caller]
pub fn add_platform_io(
&self,
implicit_name: ImplicitName<'_>,
platform_io_builder: PlatformIOBuilder<'_>,
) -> Expr<Bundle> {
self.add_platform_io_with_loc(
implicit_name.0,
SourceLocation::caller(),
platform_io_builder,
)
}
#[track_caller]
pub fn run<T: BundleType>(
name: &str,
module_kind: ModuleKind,
@ -2743,6 +2765,22 @@ impl<T: Type> ModuleIO<T> {
source_location,
}
}
pub fn from_canonical(canonical_module_io: ModuleIO<CanonicalType>) -> Self {
let ModuleIO {
containing_module_name,
bundle_field,
id,
ty,
source_location,
} = canonical_module_io;
Self {
containing_module_name,
bundle_field,
id,
ty: T::from_canonical(ty),
source_location,
}
}
pub fn bundle_field(&self) -> BundleField {
self.bundle_field
}

View file

@ -1802,6 +1802,7 @@ impl_run_pass_clone!([] ExternModuleParameter);
impl_run_pass_clone!([] SIntValue);
impl_run_pass_clone!([] std::ops::Range<usize>);
impl_run_pass_clone!([] UIntValue);
impl_run_pass_clone!([] crate::vendor::xilinx::XilinxAnnotation);
impl_run_pass_copy!([] BlackBoxInlineAnnotation);
impl_run_pass_copy!([] BlackBoxPathAnnotation);
impl_run_pass_copy!([] bool);
@ -1817,8 +1818,6 @@ impl_run_pass_copy!([] UInt);
impl_run_pass_copy!([] usize);
impl_run_pass_copy!([] FormalKind);
impl_run_pass_copy!([] PhantomConst);
impl_run_pass_copy!([] crate::build::vendor::xilinx::XdcIOStandardAnnotation);
impl_run_pass_copy!([] crate::build::vendor::xilinx::XdcLocationAnnotation);
macro_rules! impl_run_pass_for_struct {
(
@ -2219,8 +2218,7 @@ impl_run_pass_for_enum! {
BlackBoxPath(v),
DocString(v),
CustomFirrtl(v),
XdcLocation(v),
XdcIOStandard(v),
Xilinx(v),
}
}

View file

@ -10,7 +10,7 @@ use crate::{
},
hdl,
int::UInt,
intern::{Intern, Interned, Memoize},
intern::{Intern, InternSlice, Interned, Memoize},
memory::{DynPortType, Mem, MemPort},
module::{
Block, Id, Module, NameId, ScopedNameId, Stmt, StmtConnect, StmtIf, StmtMatch, StmtWire,
@ -620,7 +620,7 @@ fn match_int_tag(
block,
Block {
memories: Default::default(),
stmts: [Stmt::from(retval)][..].intern(),
stmts: [Stmt::from(retval)].intern_slice(),
},
],
};

View file

@ -7,7 +7,6 @@ use crate::{
DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation, TargetedAnnotation,
},
array::ArrayType,
build::vendor::xilinx::{XdcIOStandardAnnotation, XdcLocationAnnotation},
bundle::{Bundle, BundleField, BundleType},
clock::Clock,
enum_::{Enum, EnumType, EnumVariant},
@ -34,6 +33,9 @@ use crate::{
sim::{ExternModuleSimulation, value::DynSimOnly},
source_location::SourceLocation,
ty::{CanonicalType, Type},
vendor::xilinx::{
XdcCreateClockAnnotation, XdcIOStandardAnnotation, XdcLocationAnnotation, XilinxAnnotation,
},
wire::Wire,
};
use num_bigint::{BigInt, BigUint};

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,62 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{intern::Intern, prelude::*};
use ordered_float::NotNan;
use serde::{Deserialize, Serialize};
#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)]
#[non_exhaustive]
pub struct ClockInputProperties {
pub frequency: NotNan<f64>,
}
#[hdl(no_runtime_generics, no_static)]
pub struct ClockInput {
pub clk: Clock,
pub properties: PhantomConst<ClockInputProperties>,
}
impl ClockInput {
#[track_caller]
pub fn new(frequency: f64) -> Self {
assert!(
frequency > 0.0 && frequency.is_finite(),
"invalid clock frequency: {frequency}"
);
Self {
clk: Clock,
properties: PhantomConst::new(
ClockInputProperties {
frequency: NotNan::new(frequency).expect("just checked"),
}
.intern_sized(),
),
}
}
pub fn frequency(self) -> f64 {
self.properties.get().frequency.into_inner()
}
}
#[hdl]
pub struct Led {
pub on: Bool,
}
#[hdl]
pub struct RgbLed {
pub r: Bool,
pub g: Bool,
pub b: Bool,
}
#[hdl]
/// UART, used as an output from the FPGA
pub struct Uart {
/// transmit from the FPGA's perspective
pub tx: Bool,
/// receive from the FPGA's perspective
#[hdl(flip)]
pub rx: Bool,
}

View file

@ -28,6 +28,7 @@ pub use crate::{
memory, memory_array, memory_with_init, reg_builder, wire,
},
phantom_const::PhantomConst,
platform::{DynPlatform, Platform, PlatformIOBuilder, peripherals},
reg::Reg,
reset::{AsyncReset, Reset, SyncReset, ToAsyncReset, ToReset, ToSyncReset},
sim::{

View file

@ -12,7 +12,9 @@ use crate::{
},
},
int::BoolOrIntType,
intern::{Intern, Interned, InternedCompare, PtrEqWithTypeId, SupportsPtrEqWithTypeId},
intern::{
Intern, InternSlice, Interned, InternedCompare, PtrEqWithTypeId, SupportsPtrEqWithTypeId,
},
module::{
ModuleIO,
transform::visit::{Fold, Folder, Visit, Visitor},
@ -262,7 +264,7 @@ impl_trace_decl! {
}),
Instance(TraceInstance {
fn children(self) -> _ {
[self.instance_io.into(), self.module.into()][..].intern()
[self.instance_io.into(), self.module.into()].intern_slice()
}
name: Interned<str>,
instance_io: TraceBundle,
@ -282,7 +284,7 @@ impl_trace_decl! {
}),
MemPort(TraceMemPort {
fn children(self) -> _ {
[self.bundle.into()][..].intern()
[self.bundle.into()].intern_slice()
}
name: Interned<str>,
bundle: TraceBundle,
@ -290,7 +292,7 @@ impl_trace_decl! {
}),
Wire(TraceWire {
fn children(self) -> _ {
[*self.child][..].intern()
[*self.child].intern_slice()
}
name: Interned<str>,
child: Interned<TraceDecl>,
@ -298,7 +300,7 @@ impl_trace_decl! {
}),
Reg(TraceReg {
fn children(self) -> _ {
[*self.child][..].intern()
[*self.child].intern_slice()
}
name: Interned<str>,
child: Interned<TraceDecl>,
@ -306,7 +308,7 @@ impl_trace_decl! {
}),
ModuleIO(TraceModuleIO {
fn children(self) -> _ {
[*self.child][..].intern()
[*self.child].intern_slice()
}
name: Interned<str>,
child: Interned<TraceDecl>,

View file

@ -14,7 +14,7 @@ use crate::{
},
},
int::BoolOrIntType,
intern::{Intern, Interned, Memoize},
intern::{Intern, InternSlice, Interned, Memoize},
memory::PortKind,
module::{
AnnotatedModuleIO, Block, ExternModuleBody, Id, InstantiatedModule, ModuleBody, NameId,
@ -3950,8 +3950,8 @@ impl Compiler {
[Cond {
body: CondBody::IfTrue { cond },
source_location: reg.source_location(),
}][..]
.intern(),
}]
.intern_slice(),
lhs,
init,
reg.source_location(),

View file

@ -1,202 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{intern::Interned, util::job_server::AcquiredJob};
use std::{
any::Any,
fmt,
iter::FusedIterator,
sync::{Arc, Mutex},
};
pub trait Peripheral: Any + Send + Sync + fmt::Debug {}
pub trait Tool: Any + Send + Sync + fmt::Debug {
fn name(&self) -> Interned<str>;
fn run(&self, acquired_job: &mut AcquiredJob);
}
pub trait Target: Any + Send + Sync + fmt::Debug {
fn name(&self) -> Interned<str>;
fn peripherals(&self) -> Interned<[Interned<dyn Peripheral>]>;
}
#[derive(Clone)]
struct TargetsMap(Vec<(Interned<str>, Interned<dyn Target>)>);
impl TargetsMap {
fn sort(&mut self) {
self.0.sort_by(|(k1, _), (k2, _)| str::cmp(k1, k2));
self.0.dedup_by_key(|(k, _)| *k);
}
fn from_unsorted_vec(unsorted_vec: Vec<(Interned<str>, Interned<dyn Target>)>) -> Self {
let mut retval = Self(unsorted_vec);
retval.sort();
retval
}
fn extend_from_unsorted_slice(&mut self, additional: &[(Interned<str>, Interned<dyn Target>)]) {
self.0.extend_from_slice(additional);
self.sort();
}
}
impl Default for TargetsMap {
fn default() -> Self {
Self::from_unsorted_vec(vec![
// TODO: add default targets here
])
}
}
fn access_targets<F: FnOnce(&mut Option<Arc<TargetsMap>>) -> R, R>(f: F) -> R {
static TARGETS: Mutex<Option<Arc<TargetsMap>>> = Mutex::new(None);
let mut targets_lock = TARGETS.lock().expect("shouldn't be poisoned");
f(&mut targets_lock)
}
pub fn add_targets<I: IntoIterator<Item = Interned<dyn Target>>>(additional: I) {
// run iterator and target methods outside of lock
let additional = Vec::from_iter(additional.into_iter().map(|v| (v.name(), v)));
access_targets(|targets| {
Arc::make_mut(targets.get_or_insert_default()).extend_from_unsorted_slice(&additional);
});
}
pub fn targets() -> TargetsSnapshot {
access_targets(|targets| match targets {
Some(targets) => TargetsSnapshot {
targets: targets.clone(),
},
None => {
let new_targets = Arc::<TargetsMap>::default();
*targets = Some(new_targets.clone());
TargetsSnapshot {
targets: new_targets,
}
}
})
}
#[derive(Clone)]
pub struct TargetsSnapshot {
targets: Arc<TargetsMap>,
}
impl TargetsSnapshot {
pub fn get(&self, key: &str) -> Option<Interned<dyn Target>> {
let index = self
.targets
.0
.binary_search_by_key(&key, |(k, _v)| k)
.ok()?;
Some(self.targets.0[index].1)
}
pub fn iter(&self) -> TargetsIter {
self.into_iter()
}
pub fn len(&self) -> usize {
self.targets.0.len()
}
}
impl fmt::Debug for TargetsSnapshot {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("TargetsSnapshot ")?;
f.debug_map().entries(self).finish()
}
}
impl IntoIterator for &'_ mut TargetsSnapshot {
type Item = (Interned<str>, Interned<dyn Target>);
type IntoIter = TargetsIter;
fn into_iter(self) -> Self::IntoIter {
self.clone().into_iter()
}
}
impl IntoIterator for &'_ TargetsSnapshot {
type Item = (Interned<str>, Interned<dyn Target>);
type IntoIter = TargetsIter;
fn into_iter(self) -> Self::IntoIter {
self.clone().into_iter()
}
}
impl IntoIterator for TargetsSnapshot {
type Item = (Interned<str>, Interned<dyn Target>);
type IntoIter = TargetsIter;
fn into_iter(self) -> Self::IntoIter {
TargetsIter {
indexes: 0..self.targets.0.len(),
targets: self.targets,
}
}
}
#[derive(Clone)]
pub struct TargetsIter {
targets: Arc<TargetsMap>,
indexes: std::ops::Range<usize>,
}
impl fmt::Debug for TargetsIter {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("TargetsIter ")?;
f.debug_map().entries(self.clone()).finish()
}
}
impl Iterator for TargetsIter {
type Item = (Interned<str>, Interned<dyn Target>);
fn next(&mut self) -> Option<Self::Item> {
Some(self.targets.0[self.indexes.next()?])
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.indexes.size_hint()
}
fn count(self) -> usize {
self.indexes.len()
}
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
Some(self.targets.0[self.indexes.nth(n)?])
}
fn fold<B, F: FnMut(B, Self::Item) -> B>(self, init: B, mut f: F) -> B {
self.indexes
.fold(init, move |retval, index| f(retval, self.targets.0[index]))
}
}
impl FusedIterator for TargetsIter {}
impl DoubleEndedIterator for TargetsIter {
fn next_back(&mut self) -> Option<Self::Item> {
Some(self.targets.0[self.indexes.next_back()?])
}
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
Some(self.targets.0[self.indexes.nth_back(n)?])
}
fn rfold<B, F: FnMut(B, Self::Item) -> B>(self, init: B, mut f: F) -> B {
self.indexes
.rfold(init, move |retval, index| f(retval, self.targets.0[index]))
}
}
impl ExactSizeIterator for TargetsIter {
fn len(&self) -> usize {
self.indexes.len()
}
}

View file

@ -2,8 +2,8 @@
// See Notices.txt for copyright information
use crate::{
build::{
BaseJobArgs, BaseJobKind, JobArgsAndDependencies, JobKindAndArgs, JobParams, NoArgs,
RunBuild,
BaseJobArgs, BaseJobKind, GlobalParams, JobArgsAndDependencies, JobKindAndArgs, JobParams,
NoArgs, RunBuild,
external::{ExternalCommandArgs, ExternalCommandJobKind},
firrtl::{FirrtlArgs, FirrtlJobKind},
formal::{Formal, FormalAdditionalArgs, FormalArgs, FormalMode, WriteSbyFileJobKind},
@ -14,7 +14,6 @@ use crate::{
module::Module,
util::HashMap,
};
use eyre::eyre;
use serde::Deserialize;
use std::{
fmt::Write,
@ -107,12 +106,7 @@ fn make_assert_formal_args(
) -> eyre::Result<JobArgsAndDependencies<ExternalCommandJobKind<Formal>>> {
let args = JobKindAndArgs {
kind: BaseJobKind,
args: BaseJobArgs::from_output_dir_and_env(
get_assert_formal_target_path(&test_name)
.into_os_string()
.into_string()
.map_err(|_| eyre!("path is not valid UTF-8"))?,
),
args: BaseJobArgs::from_output_dir_and_env(get_assert_formal_target_path(&test_name), None),
};
let dependencies = JobArgsAndDependencies {
args,
@ -174,9 +168,9 @@ pub fn try_assert_formal<M: AsRef<Module<T>>, T: BundleType>(
solver,
export_options,
)?
.run(
|NoArgs {}| Ok(JobParams::new(module, APP_NAME)),
clap::Command::new(APP_NAME), // not actually used, so we can use an arbitrary value
.run_without_platform(
|NoArgs {}| Ok(JobParams::new(module)),
&GlobalParams::new(None, APP_NAME),
)
}

View file

@ -33,15 +33,15 @@ pub use const_cmp::{
#[doc(inline)]
pub use scoped_ref::ScopedRef;
pub(crate) use misc::chain;
#[doc(inline)]
pub use misc::{
BitSliceWriteWithBase, DebugAsDisplay, DebugAsRawString, MakeMutSlice, RcWriter,
SerdeJsonEscapeIf, SerdeJsonEscapeIfFormatter, SerdeJsonEscapeIfTest,
SerdeJsonEscapeIfTestResult, interned_bit, iter_eq_by, serialize_to_json_ascii,
serialize_to_json_ascii_pretty, serialize_to_json_ascii_pretty_with_indent, slice_range,
try_slice_range,
SerdeJsonEscapeIfTestResult, interned_bit, iter_eq_by, os_str_strip_prefix,
os_str_strip_suffix, serialize_to_json_ascii, serialize_to_json_ascii_pretty,
serialize_to_json_ascii_pretty_with_indent, slice_range, try_slice_range,
};
pub(crate) use misc::{InternedStrCompareAsStr, chain};
pub mod job_server;
pub mod prefix_sum;

View file

@ -4,6 +4,7 @@ use crate::intern::{Intern, Interned};
use bitvec::{bits, order::Lsb0, slice::BitSlice, view::BitView};
use std::{
cell::Cell,
ffi::OsStr,
fmt::{self, Debug, Write},
io,
ops::{Bound, Range, RangeBounds},
@ -564,3 +565,50 @@ pub fn serialize_to_json_ascii_pretty_with_indent<T: serde::Serialize + ?Sized>(
serde_json::ser::PrettyFormatter::with_indent(indent.as_bytes()),
)
}
pub fn os_str_strip_prefix<'a>(os_str: &'a OsStr, prefix: impl AsRef<str>) -> Option<&'a OsStr> {
os_str
.as_encoded_bytes()
.strip_prefix(prefix.as_ref().as_bytes())
.map(|bytes| {
// Safety: we removed a UTF-8 prefix so bytes starts with a valid boundary
unsafe { OsStr::from_encoded_bytes_unchecked(bytes) }
})
}
pub fn os_str_strip_suffix<'a>(os_str: &'a OsStr, suffix: impl AsRef<str>) -> Option<&'a OsStr> {
os_str
.as_encoded_bytes()
.strip_suffix(suffix.as_ref().as_bytes())
.map(|bytes| {
// Safety: we removed a UTF-8 suffix so bytes ends with a valid boundary
unsafe { OsStr::from_encoded_bytes_unchecked(bytes) }
})
}
#[derive(Copy, Clone, PartialEq, Eq)]
pub(crate) struct InternedStrCompareAsStr(pub(crate) Interned<str>);
impl fmt::Debug for InternedStrCompareAsStr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl Ord for InternedStrCompareAsStr {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
str::cmp(&self.0, &other.0)
}
}
impl PartialOrd for InternedStrCompareAsStr {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl std::borrow::Borrow<str> for InternedStrCompareAsStr {
fn borrow(&self) -> &str {
&self.0
}
}

View file

@ -6,3 +6,7 @@ pub mod xilinx;
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = crate::build::DynJobKind> {
xilinx::built_in_job_kinds()
}
pub(crate) fn built_in_platforms() -> impl IntoIterator<Item = crate::platform::DynPlatform> {
xilinx::built_in_platforms()
}

207
crates/fayalite/src/vendor/xilinx.rs vendored Normal file
View file

@ -0,0 +1,207 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
annotations::make_annotation_enum,
build::{GlobalParams, ToArgs, WriteArgs},
intern::Interned,
prelude::{DynPlatform, Platform},
};
use clap::ValueEnum;
use ordered_float::NotNan;
use serde::{Deserialize, Serialize};
use std::fmt;
pub mod arty_a7;
pub mod primitives;
pub mod yosys_nextpnr_prjxray;
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub struct XdcIOStandardAnnotation {
pub value: Interned<str>,
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub struct XdcLocationAnnotation {
pub location: Interned<str>,
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub struct XdcCreateClockAnnotation {
/// clock period in nanoseconds
pub period: NotNan<f64>,
}
make_annotation_enum! {
#[non_exhaustive]
pub enum XilinxAnnotation {
XdcIOStandard(XdcIOStandardAnnotation),
XdcLocation(XdcLocationAnnotation),
XdcCreateClock(XdcCreateClockAnnotation),
}
}
#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)]
pub struct XilinxArgs {
#[arg(long)]
pub device: Option<Device>,
}
impl XilinxArgs {
pub fn require_device(
&self,
platform: Option<&DynPlatform>,
global_params: &GlobalParams,
) -> clap::error::Result<Device> {
if let Some(device) = self.device {
return Ok(device);
}
if let Some(device) =
platform.and_then(|platform| platform.aspects().get_single_by_type::<Device>().copied())
{
return Ok(device);
}
Err(global_params.clap_error(
clap::error::ErrorKind::MissingRequiredArgument,
"missing --device option",
))
}
}
impl ToArgs for XilinxArgs {
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
if let Some(device) = self.device {
args.write_long_option_eq("device", device.as_str());
}
}
}
macro_rules! make_device_enum {
($vis:vis enum $Device:ident {
$(
#[
name = $name:literal,
xray_part = $xray_part:literal,
xray_device = $xray_device:literal,
xray_family = $xray_family:literal,
]
$variant:ident,
)*
}) => {
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, ValueEnum)]
$vis enum $Device {
$(
#[value(name = $name, alias = $xray_part)]
$variant,
)*
}
impl $Device {
$vis fn as_str(self) -> &'static str {
match self {
$(Self::$variant => $name,)*
}
}
$vis fn xray_part(self) -> &'static str {
match self {
$(Self::$variant => $xray_part,)*
}
}
$vis fn xray_device(self) -> &'static str {
match self {
$(Self::$variant => $xray_device,)*
}
}
$vis fn xray_family(self) -> &'static str {
match self {
$(Self::$variant => $xray_family,)*
}
}
}
struct DeviceVisitor;
impl<'de> serde::de::Visitor<'de> for DeviceVisitor {
type Value = $Device;
fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("a Xilinx device string")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
match $Device::from_str(v, false) {
Ok(v) => Ok(v),
Err(_) => Err(E::invalid_value(serde::de::Unexpected::Str(v), &self)),
}
}
fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
match str::from_utf8(v).ok().and_then(|v| $Device::from_str(v, false).ok()) {
Some(v) => Ok(v),
None => Err(E::invalid_value(serde::de::Unexpected::Bytes(v), &self)),
}
}
}
impl<'de> Deserialize<'de> for $Device {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
deserializer.deserialize_string(DeviceVisitor)
}
}
impl Serialize for $Device {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
self.as_str().serialize(serializer)
}
}
};
}
make_device_enum! {
pub enum Device {
#[
name = "xc7a35ticsg324-1L",
xray_part = "xc7a35tcsg324-1",
xray_device = "xc7a35t",
xray_family = "artix7",
]
Xc7a35ticsg324_1l,
#[
name = "xc7a100ticsg324-1L",
xray_part = "xc7a100tcsg324-1",
xray_device = "xc7a100t",
xray_family = "artix7",
]
Xc7a100ticsg324_1l,
}
}
impl fmt::Display for Device {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.as_str())
}
}
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = crate::build::DynJobKind> {
arty_a7::built_in_job_kinds()
.into_iter()
.chain(yosys_nextpnr_prjxray::built_in_job_kinds())
}
pub(crate) fn built_in_platforms() -> impl IntoIterator<Item = crate::platform::DynPlatform> {
arty_a7::built_in_platforms()
.into_iter()
.chain(yosys_nextpnr_prjxray::built_in_platforms())
}

View file

@ -0,0 +1,341 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
intern::{Intern, Interned},
module::{instance_with_loc, wire_with_loc},
platform::{
DynPlatform, Peripheral, PeripheralRef, Peripherals, PeripheralsBuilderFactory,
PeripheralsBuilderFinished, Platform, PlatformAspectSet,
peripherals::{ClockInput, Led, RgbLed, Uart},
},
prelude::*,
vendor::xilinx::{
Device, XdcCreateClockAnnotation, XdcIOStandardAnnotation, XdcLocationAnnotation,
primitives::{self, BUFGCE, STARTUPE2_default_inputs},
},
};
use ordered_float::NotNan;
use std::sync::OnceLock;
macro_rules! arty_a7_platform {
(
$vis:vis enum $ArtyA7Platform:ident {
$(#[name = $name:literal, device = $device:ident]
$Variant:ident,)*
}
) => {
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
#[non_exhaustive]
$vis enum $ArtyA7Platform {
$($Variant,)*
}
impl $ArtyA7Platform {
$vis const VARIANTS: &'static [Self] = &[$(Self::$Variant,)*];
$vis fn device(self) -> Device {
match self {
$(Self::$Variant => Device::$device,)*
}
}
$vis const fn as_str(self) -> &'static str {
match self {
$(Self::$Variant => $name,)*
}
}
fn get_aspects(self) -> &'static PlatformAspectSet {
match self {
$(Self::$Variant => {
static ASPECTS_SET: OnceLock<PlatformAspectSet> = OnceLock::new();
ASPECTS_SET.get_or_init(|| self.make_aspects())
})*
}
}
}
};
}
arty_a7_platform! {
pub enum ArtyA7Platform {
#[name = "arty-a7-35t", device = Xc7a35ticsg324_1l]
ArtyA7_35T,
#[name = "arty-a7-100t", device = Xc7a100ticsg324_1l]
ArtyA7_100T,
}
}
#[derive(Debug)]
pub struct ArtyA7Peripherals {
clk100: Peripheral<ClockInput>,
rst: Peripheral<Reset>,
rst_sync: Peripheral<SyncReset>,
ld0: Peripheral<RgbLed>,
ld1: Peripheral<RgbLed>,
ld2: Peripheral<RgbLed>,
ld3: Peripheral<RgbLed>,
ld4: Peripheral<Led>,
ld5: Peripheral<Led>,
ld6: Peripheral<Led>,
ld7: Peripheral<Led>,
uart: Peripheral<Uart>,
// TODO: add rest of peripherals when we need them
}
impl Peripherals for ArtyA7Peripherals {
fn append_peripherals<'a>(&'a self, peripherals: &mut Vec<PeripheralRef<'a, CanonicalType>>) {
let Self {
clk100,
rst,
rst_sync,
ld0,
ld1,
ld2,
ld3,
ld4,
ld5,
ld6,
ld7,
uart,
} = self;
clk100.append_peripherals(peripherals);
rst.append_peripherals(peripherals);
rst_sync.append_peripherals(peripherals);
ld0.append_peripherals(peripherals);
ld1.append_peripherals(peripherals);
ld2.append_peripherals(peripherals);
ld3.append_peripherals(peripherals);
ld4.append_peripherals(peripherals);
ld5.append_peripherals(peripherals);
ld6.append_peripherals(peripherals);
ld7.append_peripherals(peripherals);
uart.append_peripherals(peripherals);
}
}
impl ArtyA7Platform {
fn make_aspects(self) -> PlatformAspectSet {
let mut retval = PlatformAspectSet::new();
retval.insert_new(self.device());
retval
}
}
#[hdl_module(extern)]
fn reset_sync() {
#[hdl]
let clk: Clock = m.input();
#[hdl]
let inp: Bool = m.input();
#[hdl]
let out: SyncReset = m.output();
m.annotate_module(BlackBoxInlineAnnotation {
path: "fayalite_arty_a7_reset_sync.v".intern(),
text: r#"module __fayalite_arty_a7_reset_sync(input clk, input inp, output out);
wire reset_0_out;
(* ASYNC_REG = "TRUE" *)
FDPE #(
.INIT(1'b1)
) reset_0 (
.Q(reset_0_out),
.C(clk),
.CE(1'b1),
.PRE(inp),
.D(1'b0)
);
(* ASYNC_REG = "TRUE" *)
FDPE #(
.INIT(1'b1)
) reset_1 (
.Q(out),
.C(clk),
.CE(1'b1),
.PRE(inp),
.D(reset_0_out)
);
endmodule
"#
.intern(),
});
m.verilog_name("__fayalite_arty_a7_reset_sync");
}
impl Platform for ArtyA7Platform {
type Peripherals = ArtyA7Peripherals;
fn name(&self) -> Interned<str> {
self.as_str().intern()
}
fn new_peripherals<'builder>(
&self,
builder_factory: PeripheralsBuilderFactory<'builder>,
) -> (Self::Peripherals, PeripheralsBuilderFinished<'builder>) {
let mut builder = builder_factory.builder();
(
ArtyA7Peripherals {
clk100: builder.input_peripheral("clk100", ClockInput::new(100e6)),
rst: builder.input_peripheral("rst", Reset),
rst_sync: builder.input_peripheral("rst_sync", SyncReset),
ld0: builder.output_peripheral("ld0", RgbLed),
ld1: builder.output_peripheral("ld1", RgbLed),
ld2: builder.output_peripheral("ld2", RgbLed),
ld3: builder.output_peripheral("ld3", RgbLed),
ld4: builder.output_peripheral("ld4", Led),
ld5: builder.output_peripheral("ld5", Led),
ld6: builder.output_peripheral("ld6", Led),
ld7: builder.output_peripheral("ld7", Led),
uart: builder.output_peripheral("uart", Uart),
},
builder.finish(),
)
}
fn source_location(&self) -> SourceLocation {
SourceLocation::builtin()
}
fn add_peripherals_in_wrapper_module(&self, m: &ModuleBuilder, peripherals: Self::Peripherals) {
let ArtyA7Peripherals {
clk100,
rst,
rst_sync,
ld0,
ld1,
ld2,
ld3,
ld4,
ld5,
ld6,
ld7,
uart,
} = peripherals;
let make_buffered_input = |name: &str, location: &str, io_standard: &str, invert: bool| {
let pin = m.input_with_loc(name, SourceLocation::builtin(), Bool);
annotate(
pin,
XdcLocationAnnotation {
location: location.intern(),
},
);
annotate(
pin,
XdcIOStandardAnnotation {
value: io_standard.intern(),
},
);
let buf = instance_with_loc(
&format!("{name}_buf"),
primitives::IBUF(),
SourceLocation::builtin(),
);
connect(buf.I, pin);
if invert { !buf.O } else { buf.O }
};
let make_buffered_output = |name: &str, location: &str, io_standard: &str| {
let pin = m.output_with_loc(name, SourceLocation::builtin(), Bool);
annotate(
pin,
XdcLocationAnnotation {
location: location.intern(),
},
);
annotate(
pin,
XdcIOStandardAnnotation {
value: io_standard.intern(),
},
);
let buf = instance_with_loc(
&format!("{name}_buf"),
primitives::OBUFT(),
SourceLocation::builtin(),
);
connect(pin, buf.O);
connect(buf.T, false);
buf.I
};
let clock_annotation = XdcCreateClockAnnotation {
period: NotNan::new(1e9 / clk100.ty().frequency()).expect("known to be valid"),
};
let clk100_buf = make_buffered_input("clk100", "E3", "LVCMOS33", false);
let startup = instance_with_loc(
"startup",
STARTUPE2_default_inputs(),
SourceLocation::builtin(),
);
let clk100_sync = instance_with_loc("clk100_sync", BUFGCE(), SourceLocation::builtin());
connect(clk100_sync.CE, startup.EOS);
connect(clk100_sync.I, clk100_buf);
let clk100_out = wire_with_loc("clk100_out", SourceLocation::builtin(), Clock);
connect(clk100_out, clk100_sync.O);
annotate(clk100_out, clock_annotation);
annotate(clk100_out, DontTouchAnnotation);
if let Some(clk100) = clk100.into_used() {
connect(clk100.instance_io_field().clk, clk100_out);
}
let rst_value = {
let rst_buf = make_buffered_input("rst", "C2", "LVCMOS33", true);
let rst_sync = instance_with_loc("rst_sync", reset_sync(), SourceLocation::builtin());
connect(rst_sync.clk, clk100_sync.O);
connect(rst_sync.inp, rst_buf);
rst_sync.out
};
if let Some(rst) = rst.into_used() {
connect(rst.instance_io_field(), rst_value.to_reset());
}
if let Some(rst_sync) = rst_sync.into_used() {
connect(rst_sync.instance_io_field(), rst_value);
}
let rgb_leds = [
(ld0, ("G6", "F6", "E1")),
(ld1, ("G3", "J4", "G4")),
(ld2, ("J3", "J2", "H4")),
(ld3, ("K1", "H6", "K2")),
];
for (rgb_led, (r_loc, g_loc, b_loc)) in rgb_leds {
let r = make_buffered_output(&format!("{}_r", rgb_led.name()), r_loc, "LVCMOS33");
let g = make_buffered_output(&format!("{}_g", rgb_led.name()), g_loc, "LVCMOS33");
let b = make_buffered_output(&format!("{}_b", rgb_led.name()), b_loc, "LVCMOS33");
if let Some(rgb_led) = rgb_led.into_used() {
connect(r, rgb_led.instance_io_field().r);
connect(g, rgb_led.instance_io_field().g);
connect(b, rgb_led.instance_io_field().b);
} else {
connect(r, false);
connect(g, false);
connect(b, false);
}
}
let leds = [(ld4, "H5"), (ld5, "J5"), (ld6, "T9"), (ld7, "T10")];
for (led, loc) in leds {
let o = make_buffered_output(&led.name(), loc, "LVCMOS33");
if let Some(led) = led.into_used() {
connect(o, led.instance_io_field().on);
} else {
connect(o, false);
}
}
let uart_tx = make_buffered_output("uart_tx", "D10", "LVCMOS33");
let uart_rx = make_buffered_input("uart_rx", "A9", "LVCMOS33", false);
if let Some(uart) = uart.into_used() {
connect(uart_tx, uart.instance_io_field().tx);
connect(uart.instance_io_field().rx, uart_rx);
} else {
connect(uart_tx, true); // idle
}
}
fn aspects(&self) -> PlatformAspectSet {
self.get_aspects().clone()
}
}
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = crate::build::DynJobKind> {
[]
}
pub(crate) fn built_in_platforms() -> impl IntoIterator<Item = DynPlatform> {
ArtyA7Platform::VARIANTS
.iter()
.map(|&v| DynPlatform::new(v))
}

View file

@ -0,0 +1,50 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
#![allow(non_snake_case)]
use crate::prelude::*;
#[hdl_module(extern)]
pub fn IBUF() {
m.verilog_name("IBUF");
#[hdl]
let O: Bool = m.output();
#[hdl]
let I: Bool = m.input();
}
#[hdl_module(extern)]
pub fn OBUFT() {
m.verilog_name("OBUFT");
#[hdl]
let O: Bool = m.output();
#[hdl]
let I: Bool = m.input();
#[hdl]
let T: Bool = m.input();
}
#[hdl_module(extern)]
pub fn BUFGCE() {
m.verilog_name("BUFGCE");
#[hdl]
let O: Clock = m.output();
#[hdl]
let CE: Bool = m.input();
#[hdl]
let I: Bool = m.input();
}
#[hdl_module(extern)]
pub fn STARTUPE2_default_inputs() {
m.verilog_name("STARTUPE2");
#[hdl]
let CFGCLK: Clock = m.output();
#[hdl]
let CFGMCLK: Clock = m.output();
#[hdl]
let EOS: Bool = m.output();
#[hdl]
let PREQ: Bool = m.output();
}

File diff suppressed because it is too large Load diff

View file

@ -6,6 +6,7 @@ use fayalite::{
int::{UIntInRange, UIntInRangeInclusive},
intern::Intern,
module::transform::simplify_enums::SimplifyEnumsKind,
platform::PlatformIOBuilder,
prelude::*,
reset::ResetType,
ty::StaticType,
@ -4631,3 +4632,55 @@ circuit check_uint_in_range:
",
};
}
#[hdl_module(outline_generated)]
pub fn check_platform_io(platform_io_builder: PlatformIOBuilder<'_>) {
#[hdl]
let io = m.add_platform_io(platform_io_builder);
}
#[cfg(todo)]
#[test]
fn test_platform_io() {
let _n = SourceLocation::normalize_files_for_tests();
let m = check_platform_io(todo!());
dbg!(m);
#[rustfmt::skip] // work around https://github.com/rust-lang/rustfmt/issues/6161
assert_export_firrtl! {
m =>
"/test/check_platform_io.fir": r"FIRRTL version 3.2.0
circuit check_platform_io:
type Ty0 = {value: UInt<0>, range: {}}
type Ty1 = {value: UInt<1>, range: {}}
type Ty2 = {value: UInt<2>, range: {}}
type Ty3 = {value: UInt<2>, range: {}}
type Ty4 = {value: UInt<3>, range: {}}
type Ty5 = {value: UInt<3>, range: {}}
type Ty6 = {value: UInt<4>, range: {}}
type Ty7 = {value: UInt<0>, range: {}}
type Ty8 = {value: UInt<1>, range: {}}
type Ty9 = {value: UInt<2>, range: {}}
type Ty10 = {value: UInt<2>, range: {}}
type Ty11 = {value: UInt<3>, range: {}}
type Ty12 = {value: UInt<3>, range: {}}
type Ty13 = {value: UInt<4>, range: {}}
type Ty14 = {value: UInt<4>, range: {}}
module check_platform_io: @[module-XXXXXXXXXX.rs 1:1]
input i_0_to_1: Ty0 @[module-XXXXXXXXXX.rs 2:1]
input i_0_to_2: Ty1 @[module-XXXXXXXXXX.rs 3:1]
input i_0_to_3: Ty2 @[module-XXXXXXXXXX.rs 4:1]
input i_0_to_4: Ty3 @[module-XXXXXXXXXX.rs 5:1]
input i_0_to_7: Ty4 @[module-XXXXXXXXXX.rs 6:1]
input i_0_to_8: Ty5 @[module-XXXXXXXXXX.rs 7:1]
input i_0_to_9: Ty6 @[module-XXXXXXXXXX.rs 8:1]
input i_0_through_0: Ty7 @[module-XXXXXXXXXX.rs 9:1]
input i_0_through_1: Ty8 @[module-XXXXXXXXXX.rs 10:1]
input i_0_through_2: Ty9 @[module-XXXXXXXXXX.rs 11:1]
input i_0_through_3: Ty10 @[module-XXXXXXXXXX.rs 12:1]
input i_0_through_4: Ty11 @[module-XXXXXXXXXX.rs 13:1]
input i_0_through_7: Ty12 @[module-XXXXXXXXXX.rs 14:1]
input i_0_through_8: Ty13 @[module-XXXXXXXXXX.rs 15:1]
input i_0_through_9: Ty14 @[module-XXXXXXXXXX.rs 16:1]
",
};
}

View file

@ -11,4 +11,20 @@ pub fn my_module(a: i32, m: u32, (m, _): (i32, u32)) {
let o: UInt<8> = m.output();
}
#[hdl_module]
pub fn my_module2(platform_io_builder: PlatformIOBuilder<'_>) {
#[hdl]
let a: UInt<8> = m.input();
#[hdl]
let b: UInt<8> = m.output();
#[hdl]
let io = m.add_platform_io(platform_io_builder);
#[hdl]
let c: UInt<8> = m.input();
#[hdl]
let d: UInt<8> = m.output();
#[hdl]
let io = m.add_platform_io(platform_io_builder);
}
fn main() {}

View file

@ -1,17 +1,47 @@
error: name conflicts with implicit `m: &mut ModuleBuilder<_>`
error: name conflicts with implicit `m: &ModuleBuilder`
--> tests/ui/module.rs:7:26
|
7 | pub fn my_module(a: i32, m: u32, (m, _): (i32, u32)) {
| ^
error: name conflicts with implicit `m: &mut ModuleBuilder<_>`
error: name conflicts with implicit `m: &ModuleBuilder`
--> tests/ui/module.rs:7:35
|
7 | pub fn my_module(a: i32, m: u32, (m, _): (i32, u32)) {
| ^
error: name conflicts with implicit `m: &mut ModuleBuilder<_>`
error: name conflicts with implicit `m: &ModuleBuilder`
--> tests/ui/module.rs:9:9
|
9 | let m: UInt<8> = m.input();
| ^
error: can't have other inputs/outputs in a module using m.add_platform_io()
--> tests/ui/module.rs:17:24
|
17 | let a: UInt<8> = m.input();
| ^^^^^
error: can't have other inputs/outputs in a module using m.add_platform_io()
--> tests/ui/module.rs:19:24
|
19 | let b: UInt<8> = m.output();
| ^^^^^^
error: can't have other inputs/outputs in a module using m.add_platform_io()
--> tests/ui/module.rs:23:24
|
23 | let c: UInt<8> = m.input();
| ^^^^^
error: can't have other inputs/outputs in a module using m.add_platform_io()
--> tests/ui/module.rs:25:24
|
25 | let d: UInt<8> = m.output();
| ^^^^^^
error: can't use m.add_platform_io() more than once in a single module
--> tests/ui/module.rs:27:16
|
27 | let io = m.add_platform_io(platform_io_builder);
| ^^^^^^^^^^^^^^^

View file

@ -156,7 +156,7 @@ note: required by a bound in `intern_sized`
|
| pub trait Intern: Any + Send + Sync {
| ^^^^ required by this bound in `Intern::intern_sized`
| fn intern(&self) -> Interned<Self>;
...
| fn intern_sized(self) -> Interned<Self>
| ------------ required by a bound in this associated function
help: consider dereferencing here
@ -188,7 +188,7 @@ note: required by a bound in `intern_sized`
|
| pub trait Intern: Any + Send + Sync {
| ^^^^ required by this bound in `Intern::intern_sized`
| fn intern(&self) -> Interned<Self>;
...
| fn intern_sized(self) -> Interned<Self>
| ------------ required by a bound in this associated function
help: consider dereferencing here
@ -255,7 +255,7 @@ note: required by a bound in `intern_sized`
|
| pub trait Intern: Any + Send + Sync {
| ^^^^ required by this bound in `Intern::intern_sized`
| fn intern(&self) -> Interned<Self>;
...
| fn intern_sized(self) -> Interned<Self>
| ------------ required by a bound in this associated function
help: consider dereferencing here

View file

@ -1177,8 +1177,7 @@
"BlackBoxPath": "Visible",
"DocString": "Visible",
"CustomFirrtl": "Visible",
"XdcLocation": "Visible",
"XdcIOStandard": "Visible"
"Xilinx": "Visible"
}
},
"DontTouchAnnotation": {
@ -1216,6 +1215,14 @@
"$kind": "Opaque"
}
},
"XilinxAnnotation": {
"data": {
"$kind": "Enum",
"XdcLocation": "Visible",
"XdcIOStandard": "Visible",
"XdcCreateClock": "Visible"
}
},
"XdcLocationAnnotation": {
"data": {
"$kind": "Opaque"
@ -1226,6 +1233,11 @@
"$kind": "Opaque"
}
},
"XdcCreateClockAnnotation": {
"data": {
"$kind": "Opaque"
}
},
"Target": {
"data": {
"$kind": "Enum",