Compare commits

..

242 commits

Author SHA1 Message Date
edcc5927a5
don't cache external job failures if they could be caused by the user killing processes
All checks were successful
/ test (pull_request) Successful in 4m51s
/ test (push) Successful in 5m30s
2025-10-24 02:27:20 -07:00
7dc4417874
add test_many_memories so we catch if memories are iterated in an inconsistent order like in 838bd469ce
All checks were successful
/ test (pull_request) Successful in 4m44s
/ test (push) Successful in 5m22s
2025-10-24 01:40:30 -07:00
838bd469ce
change SimulationImpl::trace_memories to a BTreeMap for consistent iteration order 2025-10-24 00:53:13 -07:00
b6e4cd0614
move FormalMode to crate::testing and add to prelude 2025-10-24 00:14:04 -07:00
3e5b2f126a
make UIntInRange[Inclusive][Type] castable from/to any UInt<N> and add methods to get bit_width, start, and end 2025-10-23 23:52:41 -07:00
040cefea21
add tx_only_uart example to readme
All checks were successful
/ test (pull_request) Successful in 4m43s
/ test (push) Successful in 5m21s
2025-10-22 20:31:25 -07:00
3267cb38c4
build tx_only_uart in CI
All checks were successful
/ test (pull_request) Successful in 4m41s
2025-10-22 20:12:08 -07:00
b3cc28e2b6
add transmit-only UART example
All checks were successful
/ test (pull_request) Successful in 4m33s
2025-10-22 20:11:02 -07:00
26840daf13
arty_a7: add divided clocks as available input peripherals so you're not stuck with 100MHz 2025-10-22 20:11:02 -07:00
4d9e8d3b47
Add building blinky example to the readme
All checks were successful
/ test (pull_request) Successful in 4m31s
/ test (push) Successful in 5m9s
2025-10-21 23:00:16 -07:00
c6feea6d51
properly handle all XilinxAnnotations, this makes nextpnr-xilinx properly pick up the clock frequency
All checks were successful
/ test (pull_request) Successful in 4m34s
/ test (push) Successful in 5m9s
2025-10-21 22:24:02 -07:00
409992961c
switch to using verilog for reset synchronizer so we can use attributes on FDPE instances 2025-10-21 22:24:02 -07:00
2bdc8a7c72
WIP adding xdc create_clock -- nextpnr-xilinx currently ignores it
All checks were successful
/ test (pull_request) Successful in 4m35s
2025-10-19 23:13:28 -07:00
477a1f2d29
Add peripherals and Arty A7 platforms -- blinky works correctly now on arty-a7-100t! 2025-10-19 23:13:28 -07:00
4d54f903be
move vendor module to top level 2025-10-17 15:00:19 -07:00
3f5dd61e46
WIP adding Platform
All checks were successful
/ test (pull_request) Successful in 4m28s
2025-10-17 05:55:22 -07:00
def406ab52
group all xilinx annotations together
All checks were successful
/ test (pull_request) Successful in 4m23s
2025-10-16 04:53:58 -07:00
a565be1b09
do some clean up
All checks were successful
/ test (pull_request) Successful in 4m24s
2025-10-16 04:32:56 -07:00
676c1e3b7d
WIP adding annotations for generating the .xdc file for yosys-nextpnr-prjxray
All checks were successful
/ test (pull_request) Successful in 4m27s
2025-10-15 04:29:00 -07:00
169be960f8
generate Arty A7 100T .bit file for blinky example in CI 2025-10-15 04:29:00 -07:00
2b52799f5c
try building .bit file 2025-10-15 04:29:00 -07:00
35f98f3229
fix redirects 2025-10-15 04:29:00 -07:00
8a63ea89d0
WIP adding yosys-nextpnr-xray xilinx fpga toolchain -- blinky works on arty a7 100t (except for inverted reset) 2025-10-15 04:29:00 -07:00
84c5978eaf
WIP build Xilinx FPGA dependencies in CI 2025-10-15 04:29:00 -07:00
42e3179a60
change cache directory name to be fayalite-specific 2025-10-15 04:29:00 -07:00
53ae3ff670
mark create-unix-shell-script as incomplete in CLI 2025-10-15 04:29:00 -07:00
7af9abfb6f
switch to using new crate::build system 2025-10-15 04:29:00 -07:00
aacd05378f
WIP converting from cli.rs to build/*.rs 2025-10-15 04:29:00 -07:00
908ccef674
added automatically-added dependencies; added caching for external jobs 2025-10-15 04:29:00 -07:00
057670c12a
WIP adding FPGA support -- build module should be complete 2025-10-15 04:29:00 -07:00
f8ac78abd6
Remove extraneous #[automatically_derived] annotations that are causing warnings as reported by Tobias
All checks were successful
/ deps (pull_request) Successful in 20s
/ test (pull_request) Successful in 4m54s
/ deps (push) Successful in 13s
/ test (push) Successful in 5m24s
2025-10-15 04:17:47 -07:00
64ec6c0dcc
switch to use server's new actions org
All checks were successful
/ deps (pull_request) Successful in 12m59s
/ test (pull_request) Successful in 4m58s
/ deps (push) Successful in 13s
/ test (push) Successful in 5m29s
2025-10-09 23:48:17 -07:00
c06ef56482
add NLnet grant 2024-12-324 to readme
All checks were successful
/ deps (pull_request) Successful in 16s
/ test (pull_request) Successful in 4m48s
/ deps (push) Successful in 14s
/ test (push) Successful in 5m19s
2025-09-08 23:08:25 -07:00
db9b1c202c
add simulator support for sim-only values
All checks were successful
/ deps (pull_request) Successful in 17s
/ test (pull_request) Successful in 4m59s
/ deps (push) Successful in 14s
/ test (push) Successful in 5m30s
2025-09-08 22:19:43 -07:00
d3dd66cbf0
add rust-src component in CI for consistent error messages 2025-09-08 22:18:10 -07:00
b5b1ee866c
converted to using get_state_part_kinds! 2025-09-05 19:10:06 -07:00
f0e3aef061
add get_state_part_kinds! macro 2025-09-05 19:07:07 -07:00
6d36698adf
move public paths of sim::{Compiled,Compiler} to sim::compiler 2025-08-26 19:23:21 -07:00
e7e831cf00
split out simulator compiler into a separate module 2025-08-26 19:17:21 -07:00
4008c311bf
format code after switching to edition 2024
All checks were successful
/ deps (pull_request) Successful in 16s
/ test (pull_request) Successful in 4m58s
/ deps (push) Successful in 14s
/ test (push) Successful in 5m23s
2025-08-24 16:35:21 -07:00
ef85d11327
try to get actions to run
All checks were successful
/ deps (pull_request) Successful in 12m11s
/ test (pull_request) Successful in 5m6s
2025-08-24 16:14:03 -07:00
ae7c4be9dc
remove get_many_mut since it was stabilized in std as get_disjoint_mut
Some checks failed
/ deps (pull_request) Has been cancelled
/ test (pull_request) Has been cancelled
2025-08-24 15:53:21 -07:00
65f9ab32f4
switch to edition 2024 2025-08-24 15:53:21 -07:00
67e66ac3bd
upgrade to rust 1.89.0 2025-08-24 15:53:21 -07:00
668e714dc9
actually test always_zero hasher
All checks were successful
/ deps (pull_request) Successful in 16s
/ test (pull_request) Successful in 4m58s
/ deps (push) Successful in 13s
/ test (push) Successful in 5m32s
2025-04-09 21:11:09 -07:00
88323a8c16
run some tests with always_zero hasher
All checks were successful
/ deps (pull_request) Successful in 17s
/ test (pull_request) Successful in 5m1s
/ deps (push) Successful in 13s
/ test (push) Successful in 5m36s
2025-04-09 21:03:57 -07:00
91e1b619e8
switch to petgraph 0.8.1 now that my PR was merged and released to crates.io
All checks were successful
/ deps (pull_request) Successful in 18s
/ test (pull_request) Successful in 4m25s
2025-04-09 20:48:40 -07:00
e2d2d4110b
upgrade hashbrown to 0.15.2 2025-04-09 20:33:21 -07:00
b1f9706e4e
add custom hasher for testing 2025-04-09 20:27:22 -07:00
4eda4366c8
check types in debug mode in impl Debug for Expr, helping to catch bugs 2025-04-09 20:23:19 -07:00
122c08d3cf
add fake which for miri 2025-04-09 20:21:43 -07:00
b08a747e20
switch to using type aliases for HashMap/HashSet to allow easily switching hashers 2025-04-09 20:17:46 -07:00
e0c9939147
add test that SimValue can't be interned, since its PartialEq may ignore types 2025-04-09 19:55:09 -07:00
07725ab489
switch interning to use HashTable rather than HashMap 2025-04-09 19:30:02 -07:00
36f1b9bbb6
add derive(Debug) to all types that are interned 2025-04-09 19:24:08 -07:00
9a1b047d2f
change TypeIdMap to not use any unsafe code 2025-04-09 16:25:56 -07:00
5967e812a2
fix [SU]IntValue's PartialEq for interning
All checks were successful
/ deps (pull_request) Successful in 17s
/ test (pull_request) Successful in 4m23s
/ deps (push) Successful in 13s
/ test (push) Successful in 4m52s
different widths must make values compare not equal otherwise interning
will e.g. substitute a 0x0_u8 for a 0x0_u2
2025-04-08 21:57:56 -07:00
001fd31451
add UIntInRange[Inclusive][Type]
All checks were successful
/ deps (pull_request) Successful in 18s
/ test (pull_request) Successful in 4m21s
/ deps (push) Successful in 13s
/ test (push) Successful in 5m7s
2025-04-07 18:27:54 -07:00
57aae7b7fb
implement [de]serializing BaseTypes, SimValues, and support PhantomConst<T> in #[hdl] struct S<T>
All checks were successful
/ deps (pull_request) Successful in 15s
/ test (pull_request) Successful in 4m16s
/ deps (push) Successful in 12s
/ test (push) Successful in 4m46s
2025-04-04 01:04:26 -07:00
6929352be7
re-export bitvec and add types useful for simulation to the prelude
All checks were successful
/ deps (pull_request) Successful in 15s
/ test (pull_request) Successful in 4m12s
/ deps (push) Successful in 13s
/ test (push) Successful in 4m40s
2025-04-03 16:01:39 -07:00
62058dc141
fix cargo doc warnings -- convert urls to auto links
All checks were successful
/ deps (pull_request) Successful in 14s
/ test (pull_request) Successful in 4m19s
/ deps (push) Successful in 13s
/ test (push) Successful in 5m7s
2025-04-01 22:22:54 -07:00
c4b6a0fee6
add support for #[hdl(sim)] enum_ty.Variant(value) and #[hdl(sim)] EnumTy::Variant(value) and non-sim variants too
All checks were successful
/ deps (pull_request) Successful in 15s
/ test (pull_request) Successful in 4m16s
2025-04-01 22:16:47 -07:00
9092e45447
fix #[hdl(sim)] match on enums
All checks were successful
/ deps (pull_request) Successful in 15s
/ test (pull_request) Successful in 4m12s
2025-03-30 01:25:07 -07:00
a40eaaa2da
expand SimValue support
All checks were successful
/ deps (pull_request) Successful in 16s
/ test (pull_request) Successful in 4m11s
2025-03-30 00:55:38 -07:00
5028401a5a
change SimValue to contain and deref to a value and not just contain bits 2025-03-27 23:44:36 -07:00
e0f978fbb6
silence unused m variable warning in #[hdl_module] with an empty body. 2025-03-27 23:17:28 -07:00
ec3a61513b
simulator read/write types must be passive 2025-03-27 23:03:44 -07:00
fdc73b5f3b
add ripple counter test to test simulating alternating circuits and extern modules
All checks were successful
/ deps (pull_request) Successful in 15s
/ test (pull_request) Successful in 4m19s
/ deps (push) Successful in 12s
/ test (push) Successful in 4m26s
2025-03-25 18:56:26 -07:00
a115585d5a
simulator: allow external module generators to wait for value changes and/or clock edges
All checks were successful
/ deps (pull_request) Successful in 16s
/ test (pull_request) Successful in 4m2s
2025-03-25 18:26:48 -07:00
ab9ff4f2db
simplify setting an extern module simulation
All checks were successful
/ deps (pull_request) Successful in 12m46s
/ test (pull_request) Successful in 4m8s
2025-03-21 17:08:29 -07:00
d1bd176b28
implement simulation of extern modules
All checks were successful
/ deps (pull_request) Successful in 11m39s
/ test (pull_request) Successful in 3m55s
2025-03-21 01:47:14 -07:00
920d8d875f
add some missing #[track_caller] 2025-03-19 17:10:51 -07:00
d453755bb2
add ExprPartialEq/ExprPartialOrd impls for PhantomConst
All checks were successful
/ deps (pull_request) Successful in 15s
/ test (pull_request) Successful in 3m45s
/ deps (push) Successful in 13s
/ test (push) Successful in 4m10s
2025-03-10 19:40:03 -07:00
450e1004b6
fix using fayalite as a dependency
All checks were successful
/ deps (pull_request) Successful in 14s
/ test (pull_request) Successful in 3m51s
/ deps (push) Successful in 13s
/ test (push) Successful in 4m14s
2025-03-09 23:14:14 -07:00
c0c5b550bc
add PhantomConst
All checks were successful
/ deps (pull_request) Successful in 15s
/ test (pull_request) Successful in 3m51s
/ deps (push) Successful in 13s
/ test (push) Successful in 4m15s
2025-03-09 21:03:47 -07:00
2fa0ea6192
make FillInDefaultedGenerics work with Sizes and not just Types 2025-03-09 20:59:21 -07:00
bd75fdfefd
add efficient prefix-sums and reductions
All checks were successful
/ deps (pull_request) Successful in 15s
/ test (pull_request) Successful in 3m40s
/ deps (push) Successful in 14s
/ test (push) Successful in 4m5s
2025-03-02 23:04:17 -08:00
50c86e18dc
add Expr<ArrayType<T, Len>>: IntoIterator and Expr<Array<T>>: FromIterator<T>
All checks were successful
/ deps (pull_request) Successful in 16s
/ test (pull_request) Successful in 3m40s
/ deps (push) Successful in 13s
/ test (push) Successful in 4m5s
2025-03-02 18:02:34 -08:00
60734cc9d1
switch CI to use mirrors
All checks were successful
/ deps (pull_request) Successful in 12m27s
/ test (pull_request) Successful in 4m3s
/ deps (push) Successful in 14s
/ test (push) Successful in 4m14s
2025-03-02 17:43:29 -08:00
3458c21f44
add #[hdl(cmp_eq)] to implement HdlPartialEq automatically
All checks were successful
/ deps (pull_request) Successful in 19s
/ test (pull_request) Successful in 3m38s
/ deps (push) Successful in 15s
/ test (push) Successful in 4m10s
2025-02-16 20:48:16 -08:00
43797db36e
sort custom keywords 2025-02-16 20:46:54 -08:00
cdd84953d0
support unknown trait bounds in type parameters
All checks were successful
/ deps (pull_request) Successful in 19s
/ test (pull_request) Successful in 3m34s
/ deps (push) Successful in 14s
/ test (push) Successful in 3m58s
2025-02-13 18:35:30 -08:00
86a1bb46be
add #[hdl] let destructuring and, while at it, tuple patterns
All checks were successful
/ deps (pull_request) Successful in 11m32s
/ test (pull_request) Successful in 3m44s
/ deps (push) Successful in 15s
/ test (push) Successful in 4m9s
2025-02-10 22:49:41 -08:00
209d5b5fe1
fix broken doc links 2025-02-10 22:49:16 -08:00
d4ea826051
sim: fix "label address not set" bug when the last Assignment is conditional
All checks were successful
/ deps (pull_request) Successful in 23s
/ test (pull_request) Successful in 3m32s
/ deps (push) Successful in 15s
/ test (push) Successful in 3m58s
2025-01-15 19:04:40 -08:00
404a2ee043
tests/sim: add test_array_rw
All checks were successful
/ deps (pull_request) Successful in 19s
/ test (pull_request) Successful in 3m34s
/ deps (push) Successful in 15s
/ test (push) Successful in 4m2s
2025-01-12 21:38:59 -08:00
e3a2ccd41c
properly handle duplicate names in vcd
All checks were successful
/ deps (pull_request) Successful in 14s
/ test (pull_request) Successful in 3m43s
/ deps (push) Successful in 15s
/ test (push) Successful in 4m8s
2025-01-09 22:52:22 -08:00
3771cea78e
Gather the FIFO debug ports in a bundle
All checks were successful
/ deps (pull_request) Successful in 15s
/ test (pull_request) Successful in 3m35s
/ deps (push) Successful in 16s
/ test (push) Successful in 3m59s
2024-12-29 13:17:24 -03:00
dcf865caec
Add assertions and debug ports in order for the FIFO to pass induction
As some proofs involving memories, it is necessary to add more ports to
the queue interface, to sync state. These changes are predicated on the
test environment, so normal use is not affected.

Since some speedup is achieved, use the saved time to test with a deeper
FIFO.
2024-12-29 13:12:58 -03:00
31d01046a8
Initial queue formal proof based on one-entry FIFO equivalence
For now, only check that the basic properties work in bounded model check
mode, leave the induction proof for later.

Partially replace the previously existing proof.

Remove earlier assumptions and bounds that don't apply for this proof.

Use parameterized types instead of hard-coded types.
2024-12-29 13:04:01 -03:00
c16726cee6
fix #[hdl]/#[hdl_module] attributes getting the wrong hygiene when processing #[cfg]s
All checks were successful
/ deps (pull_request) Successful in 17s
/ test (pull_request) Successful in 5m13s
/ deps (push) Successful in 14s
/ test (push) Successful in 5m42s
2024-12-29 00:48:15 -08:00
b63676d0ca
add test for cfgs
All checks were successful
/ deps (pull_request) Successful in 18s
/ test (pull_request) Successful in 5m23s
/ deps (push) Successful in 14s
/ test (push) Successful in 6m5s
2024-12-28 23:39:50 -08:00
7005fa3330
implement handling #[cfg] and #[cfg_attr] in proc macro inputs 2024-12-28 23:39:08 -08:00
2ab8428062
upgrade syn version 2024-12-28 23:39:08 -08:00
9b06019bf5
make sim::Compiler not print things to stdout unless you ask for it
All checks were successful
/ deps (pull_request) Successful in 16s
/ test (pull_request) Successful in 5m12s
/ deps (push) Successful in 16s
/ test (push) Successful in 5m18s
2024-12-18 21:15:09 -08:00
36bad52978
sim: fix sim.write to struct
All checks were successful
/ deps (pull_request) Successful in 15s
/ test (pull_request) Successful in 5m16s
/ deps (push) Successful in 14s
/ test (push) Successful in 5m14s
2024-12-18 20:50:50 -08:00
21c73051ec
sim: add SimValue and reading/writing more than just a scalar
All checks were successful
/ deps (pull_request) Successful in 14s
/ test (pull_request) Successful in 5m14s
/ deps (push) Successful in 14s
/ test (push) Successful in 5m12s
2024-12-18 01:39:35 -08:00
304d8da0e8
Merge remote-tracking branch 'origin/master' into adding-simulator
All checks were successful
/ deps (pull_request) Successful in 14s
/ test (pull_request) Successful in 5m24s
/ deps (push) Successful in 16s
/ test (push) Successful in 5m46s
2024-12-13 15:06:45 -08:00
2af38de900
add more memory tests
Some checks failed
/ deps (push) Successful in 19s
/ test (push) Has been cancelled
/ deps (pull_request) Successful in 14s
/ test (pull_request) Successful in 5m21s
2024-12-13 15:04:48 -08:00
c756aeec70
tests/sim: add test for memory rw port
All checks were successful
/ deps (push) Successful in 18s
/ test (push) Successful in 5m20s
/ deps (pull_request) Successful in 13s
/ test (pull_request) Successful in 6m32s
2024-12-12 20:50:41 -08:00
903ca1bf30
sim: simple memory test works!
All checks were successful
/ deps (push) Successful in 17s
/ test (push) Successful in 5m20s
/ deps (pull_request) Successful in 14s
/ test (pull_request) Successful in 5m24s
2024-12-12 19:47:57 -08:00
8d030ac65d
sim/interpreter: add addresses to instruction listing
All checks were successful
/ deps (push) Successful in 18s
/ test (push) Successful in 5m20s
/ deps (pull_request) Successful in 14s
/ test (pull_request) Successful in 5m19s
2024-12-12 16:25:38 -08:00
562c479b62
sim/interpreter: fix StatePartLayout name in debug output 2024-12-12 15:06:17 -08:00
393f78a14d
sim: add WIP memory test
All checks were successful
/ deps (push) Successful in 18s
/ test (push) Successful in 5m16s
/ deps (pull_request) Successful in 14s
/ test (pull_request) Successful in 5m19s
2024-12-11 23:28:15 -08:00
8616ee4737
tests/sim: test_enums works!
All checks were successful
/ deps (push) Successful in 17s
/ test (push) Successful in 5m18s
/ deps (pull_request) Successful in 14s
/ test (pull_request) Successful in 5m20s
2024-12-11 00:01:04 -08:00
5087f16099
sim: fix assignments graph by properly including conditions as assignment inputs 2024-12-11 00:00:21 -08:00
6b31e6d515
sim: add .dot output for Assignments graph for debugging
All checks were successful
/ deps (push) Successful in 18s
/ test (push) Successful in 5m20s
/ deps (pull_request) Successful in 15s
/ test (pull_request) Successful in 5m21s
2024-12-10 23:40:33 -08:00
564ccb30bc
sim/vcd: fix variable identifiers to follow verilog rules 2024-12-10 23:39:17 -08:00
ca759168ff
tests/sim: add WIP test for enums 2024-12-10 23:37:26 -08:00
e4cf66adf8
sim: implement memories, still needs testing
All checks were successful
/ deps (push) Successful in 18s
/ test (push) Successful in 5m15s
/ deps (pull_request) Successful in 14s
/ test (pull_request) Successful in 5m20s
2024-12-09 23:03:01 -08:00
cd0dd7b7ee
change memory write latency to NonZeroUsize to match read latency being usize 2024-12-09 23:01:40 -08:00
Cesar Strauss
2e7d685dc7 add module exercising formal verification of memories
All checks were successful
/ deps (pull_request) Successful in 11m25s
/ test (pull_request) Successful in 4m47s
/ deps (push) Successful in 14s
/ test (push) Successful in 5m12s
2024-12-08 17:13:26 -03:00
9654167ca3
sim: WIP working on memory
All checks were successful
/ deps (push) Successful in 17s
/ test (push) Successful in 5m18s
/ deps (pull_request) Successful in 14s
/ test (pull_request) Successful in 5m18s
2024-12-06 15:53:34 -08:00
3ed7827485
sim: WIP adding memory support
All checks were successful
/ deps (push) Successful in 16s
/ test (push) Successful in 5m28s
/ deps (pull_request) Successful in 14s
/ test (pull_request) Successful in 5m24s
2024-12-05 21:35:23 -08:00
e504cfebfe
add BoolOrIntType::copy_bits_from_bigint_wrapping and take BigInt arguments by reference
All checks were successful
/ deps (push) Successful in 17s
/ test (push) Successful in 5m21s
/ deps (pull_request) Successful in 14s
/ test (pull_request) Successful in 5m19s
2024-12-05 20:32:15 -08:00
9f42cab471
change to version 0.3.0 for breaking change 2024-12-05 20:26:28 -08:00
259bee39c2
tests/sim: split expected output text into separate files
All checks were successful
/ deps (push) Successful in 18s
/ test (push) Successful in 5m16s
/ deps (pull_request) Successful in 14s
/ test (pull_request) Successful in 5m22s
2024-12-05 18:17:13 -08:00
643816d5b5
vcd: handle enums with fields
All checks were successful
/ deps (push) Successful in 16s
/ test (push) Successful in 5m17s
/ deps (pull_request) Successful in 13s
/ test (pull_request) Successful in 5m23s
2024-12-04 21:03:29 -08:00
42afd2da0e
sim: implement enums (except for connecting unequal enum types)
Some checks failed
/ deps (push) Successful in 18s
/ test (push) Has been cancelled
/ deps (pull_request) Successful in 14s
/ test (pull_request) Successful in 5m24s
2024-12-04 20:58:39 -08:00
15bc304bb6
impl ToExpr for TargetBase 2024-12-04 20:57:44 -08:00
4422157db8
WIP adding enums to simulator
All checks were successful
/ deps (push) Successful in 23s
/ test (push) Successful in 5m17s
/ deps (pull_request) Successful in 13s
/ test (pull_request) Successful in 5m19s
2024-12-02 21:06:23 -08:00
d3f52292a1
test doc tests in CI
All checks were successful
/ deps (push) Successful in 16s
/ test (push) Successful in 5m16s
/ deps (pull_request) Successful in 13s
/ test (pull_request) Successful in 5m18s
2024-12-01 20:21:26 -08:00
fd45465d35
sim: add support for registers
All checks were successful
/ deps (push) Successful in 19s
/ test (push) Successful in 5m1s
/ deps (pull_request) Successful in 14s
/ test (pull_request) Successful in 5m0s
2024-12-01 20:14:13 -08:00
5e0548db26
vcd: single bit signals have no spaces in their value changes 2024-12-01 20:12:43 -08:00
12b3ba57f1
add some ExprCastTo supertraits to ResetType to make generic code easier 2024-12-01 20:10:25 -08:00
965fe53077
deduce_resets: show more debugging info on assertion failure 2024-12-01 20:09:17 -08:00
3abba7f9eb
simulating circuits with deduced resets works
All checks were successful
/ deps (push) Successful in 15s
/ test (push) Successful in 4m58s
/ deps (pull_request) Successful in 14s
/ test (pull_request) Successful in 4m58s
2024-11-27 23:52:07 -08:00
6446b71afd
deduce_resets works!
All checks were successful
/ deps (push) Successful in 18s
/ test (push) Successful in 4m56s
/ deps (pull_request) Successful in 13s
/ test (pull_request) Successful in 5m0s
2024-11-27 23:24:11 -08:00
d36cf92d7f
make ToReset generic over the reset type 2024-11-27 23:19:55 -08:00
d744d85c66
working on deduce_resets
All checks were successful
/ deps (push) Successful in 16s
/ test (push) Successful in 4m57s
/ deps (pull_request) Successful in 13s
/ test (pull_request) Successful in 5m3s
2024-11-27 01:31:18 -08:00
358cdd10c8
add more expr casts 2024-11-27 01:30:28 -08:00
9128a84284
Merge remote-tracking branch 'origin/master' into adding-simulator
All checks were successful
/ deps (push) Successful in 15s
/ test (push) Successful in 4m55s
/ deps (pull_request) Successful in 14s
/ test (pull_request) Successful in 4m58s
2024-11-26 21:28:22 -08:00
546010739a
working on deduce_resets
Some checks failed
/ deps (push) Successful in 15s
/ test (push) Has been cancelled
/ deps (pull_request) Successful in 13s
/ test (pull_request) Successful in 5m1s
2024-11-26 21:26:56 -08:00
9b5f1218fd
make ClockDomain and Reg generic over reset type
All checks were successful
/ deps (push) Successful in 15s
/ test (push) Successful in 4m57s
/ deps (pull_request) Successful in 13s
/ test (pull_request) Successful in 4m55s
2024-11-26 20:47:03 -08:00
89d84551f8
add ResetType to the list of recognized type bounds 2024-11-26 18:52:03 -08:00
c45624e3c2
Fix SInt::for_value not accounting for sign bit for positive values
All checks were successful
/ deps (pull_request) Successful in 14s
/ test (pull_request) Successful in 4m42s
/ deps (push) Successful in 13s
/ test (push) Successful in 4m39s
Fixes: #4
2024-11-26 16:26:29 -08:00
7851bf545c
working on deduce_resets.rs
All checks were successful
/ deps (push) Successful in 19s
/ test (push) Successful in 4m53s
/ deps (pull_request) Successful in 13s
/ test (pull_request) Successful in 4m52s
2024-11-26 00:07:11 -08:00
3e3da53bd2
working on deduce_resets
All checks were successful
/ deps (push) Successful in 16s
/ test (push) Successful in 4m54s
/ deps (pull_request) Successful in 13s
/ test (pull_request) Successful in 4m53s
2024-11-25 00:01:02 -08:00
fa50930ff8
update petgraph dependency to include UnionFind::new_set() 2024-11-25 00:00:26 -08:00
9516fe03a1
increase rust version in CI too
All checks were successful
/ deps (push) Successful in 15s
/ test (push) Successful in 4m57s
/ deps (pull_request) Successful in 14s
/ test (pull_request) Successful in 5m1s
2024-11-24 14:46:25 -08:00
52ab134673
increase rust version to support omitting match arms with uninhabited types
Some checks failed
/ deps (push) Successful in 17s
/ test (push) Failing after 1m18s
/ deps (pull_request) Successful in 13s
/ test (pull_request) Failing after 1m17s
2024-11-24 14:41:39 -08:00
698b8adc23
working on deduce_resets pass
Some checks failed
/ deps (push) Successful in 16s
/ test (push) Failing after 1m30s
/ deps (pull_request) Successful in 13s
/ test (pull_request) Failing after 1m32s
2024-11-24 14:39:32 -08:00
59be3bd645
WIP working on implementing deduce_resets pass
Some checks failed
/ deps (push) Successful in 18s
/ test (push) Failing after 1m31s
/ deps (pull_request) Successful in 13s
/ test (pull_request) Failing after 1m31s
2024-11-24 03:44:31 -08:00
913baa37e9
WIP adding deduce_resets pass
All checks were successful
/ deps (push) Successful in 19s
/ test (push) Successful in 4m46s
/ deps (pull_request) Successful in 13s
/ test (pull_request) Successful in 4m45s
2024-11-22 16:07:18 -08:00
11ddbc43c7
writing VCD for combinatorial circuits works!
All checks were successful
/ deps (push) Successful in 15s
/ test (push) Successful in 4m46s
/ deps (pull_request) Successful in 13s
/ test (pull_request) Successful in 4m45s
2024-11-20 22:53:54 -08:00
c4b5d00419
WIP adding VCD output 2024-11-20 22:53:54 -08:00
09aa9fbc78
wire up simulator trace writing interface 2024-11-20 22:53:54 -08:00
288a6b71b9
WIP adding VCD output 2024-11-20 22:53:54 -08:00
0095570f19
simple combinatorial simulation works! 2024-11-20 22:53:54 -08:00
f54e55a143
Simulation::settle_step() works for simple modules 2024-11-20 22:53:54 -08:00
a6e40839ac
simulator WIP: use petgraph for topological sort over assignments 2024-11-20 22:53:54 -08:00
3106a6fff6
working on simulator... 2024-11-20 22:53:54 -08:00
f338f37d3e
working on simulator 2024-11-20 22:53:54 -08:00
277d3e0d4d
working on simulator 2024-11-20 22:53:54 -08:00
b288d6f8f2
add missing copyright headers 2024-11-20 22:53:54 -08:00
479d59b287
WIP implementing simulator 2024-11-20 22:53:54 -08:00
6f904148c4
WIP adding simulator 2024-11-20 22:53:54 -08:00
3ea0d98924
always write formal cache json
All checks were successful
/ deps (push) Successful in 15s
/ test (push) Successful in 4m37s
2024-11-20 22:51:40 -08:00
Cesar Strauss
c1f1a8b749 Add test module exercising formal verification.
All checks were successful
/ deps (pull_request) Successful in 15s
/ test (pull_request) Successful in 4m47s
/ deps (push) Successful in 13s
/ test (push) Successful in 5m16s
2024-11-20 18:29:39 -03:00
3d5d8c54b6
add repository to cache key
All checks were successful
/ deps (push) Successful in 10m39s
/ test (push) Successful in 5m14s
2024-10-30 20:55:02 -07:00
ee15fd2b94
support #[hdl] type aliases
All checks were successful
/ deps (push) Successful in 11m28s
/ test (push) Successful in 4m40s
2024-10-30 20:47:10 -07:00
20cf0abbcc
fix using #[hdl] types like S<{ 1 + 2 }> 2024-10-30 20:46:11 -07:00
5bd0de48b7
change to version 0.2.1 2024-10-30 19:36:05 -07:00
0c9c48a066
split out deps into separate workflow with better caching using deps.yml from cpu.git
All checks were successful
/ deps (push) Successful in 21s
/ test (push) Successful in 19m32s
2024-10-17 21:05:18 -07:00
cb17913004
limit sby to one thread each since it seems not to respect job count in parallel mode
All checks were successful
/ test (push) Successful in 46m32s
2024-10-15 21:32:38 -07:00
42effd1132
switch to using a make job server for managing test parallelism
Some checks failed
/ test (push) Failing after 39m16s
2024-10-15 20:32:33 -07:00
3d0f95cfe5
formal: add workaround for wires disappearing because yosys optimizes them out
All checks were successful
/ test (push) Successful in 39m13s
2024-10-15 01:48:48 -07:00
3939ce2360
add Bundle and Enum to prelude
All checks were successful
/ test (push) Successful in 39m33s
2024-10-14 17:47:58 -07:00
d0229fbcfb
get #[hdl] struct S<A: KnownSize, B: KnownSize> to work
All checks were successful
/ test (push) Successful in 39m2s
2024-10-11 17:30:49 -07:00
4909724995
add more thorough checks that bounds are properly handled on #[hdl] structs
All checks were successful
/ test (push) Successful in 37m37s
2024-10-10 23:34:46 -07:00
d0694cbd52
add disabled test for #[hdl] struct S4<W: KnownSize, W2: KnownSize> which type errors
Some checks failed
/ test (push) Has been cancelled
2024-10-10 22:58:15 -07:00
1a2149b040
silence warnings for field names that start with _
All checks were successful
/ test (push) Successful in 37m30s
2024-10-10 20:53:29 -07:00
59cef3f398
add PhantomData as a hdl bundle
Some checks failed
/ test (push) Has been cancelled
2024-10-10 20:48:09 -07:00
bf907c3872
cache results of formal proofs
All checks were successful
/ test (push) Successful in 39m54s
2024-10-07 23:31:24 -07:00
99180eb3b4
fix clippy lints in generated code
All checks were successful
/ test (push) Successful in 37m34s
2024-10-07 22:06:59 -07:00
017c14a2f1
don't use #[allow(..., reason = "...")] since that's not stable yet on rust 1.80.1 2024-10-07 22:06:59 -07:00
ed1aea41f3
clean up some clippy warnings
Some checks failed
/ test (push) Failing after 3m41s
2024-10-07 21:49:18 -07:00
f12322aa2a
remove interning contexts 2024-10-07 21:33:56 -07:00
44ca1a607a
remove unused AGCContext 2024-10-07 21:23:13 -07:00
30b9a5e48d
change NameId to have an opaque Id so output firrtl doesn't depend on how many modules of the same name were ever created
All checks were successful
/ test (push) Successful in 39m6s
2024-10-07 19:06:01 -07:00
eed0afc6ab
add some utility From<Interned<T>> impls 2024-10-07 19:05:20 -07:00
aec383c0af
try to fix ccache
All checks were successful
/ test (push) Successful in 1h19m11s
2024-10-06 20:57:42 -07:00
f403eed7c0
only run tests once, since they are quite slow
Some checks failed
/ test (push) Has been cancelled
2024-10-06 20:08:39 -07:00
2e8b73d2fc
rename fire/fire_data to firing/firing_data 2024-10-06 19:04:48 -07:00
e05c368688
change register names to end in _reg by convention 2024-10-06 18:50:09 -07:00
ec77559e2b
fix cache action name
All checks were successful
/ test (push) Successful in 1h44m43s
2024-10-04 17:10:06 -07:00
b7f1101164
reduce parallelism to fit within the number of available cpus even when running sby in prove mode (which likes to run 2 smt solvers in parallel)
Some checks failed
/ test (push) Has been cancelled
2024-10-04 17:03:51 -07:00
487af07154
yosys build runs out of memory
Some checks failed
/ test (push) Failing after 1h8m27s
2024-10-04 01:03:17 -07:00
c0d4de56a9
try to make yosys build faster
Some checks failed
/ test (push) Failing after 24m17s
2024-10-03 23:40:44 -07:00
9f154e6b96
try caching ccache manually
Some checks are pending
/ test (push) Waiting to run
2024-10-03 23:36:39 -07:00
0d54b9a2a9
queue formal proof passes!
Some checks are pending
/ test (push) Has started running
2024-10-03 23:07:14 -07:00
343805f80b
fix #[hdl] to work with unusual identifier hygiene from macros 2024-10-03 23:04:14 -07:00
15a28aa7a7
install python3-click -- needed by symbiyosys
All checks were successful
/ test (push) Successful in 33m5s
2024-10-03 01:44:06 -07:00
4084a70485
switch default solver to z3 2024-10-03 01:43:46 -07:00
3e2fb9b94f
WIP getting queue formal to pass -- passes for capacity <= 2
Some checks failed
/ test (push) Has been cancelled
2024-10-03 01:08:01 -07:00
bc26fe32fd
add ccache and clean up deps
Some checks failed
/ test (push) Has been cancelled
2024-10-03 01:01:06 -07:00
eb65bec26e
add yosys deps
Some checks failed
/ test (push) Has been cancelled
2024-10-03 00:44:04 -07:00
4497f09ea0
fix wrong build steps
Some checks failed
/ test (push) Failing after 1m2s
2024-10-03 00:39:18 -07:00
1c63a441a9
add needed tools to CI
Some checks failed
/ test (push) Failing after 1m5s
2024-10-03 00:35:43 -07:00
0cf01600b3
add mod formal and move assert/assume/cover stuff to it 2024-10-01 19:56:17 -07:00
f3d6528f5b
make annotations easier to use 2024-10-01 19:54:17 -07:00
f35d88d2bb
remove unused valueless.rs 2024-10-01 18:41:41 -07:00
e8c393f3bb
sort pub mod items 2024-10-01 18:40:52 -07:00
d0b406d288
add more annotation kinds
All checks were successful
/ test (push) Successful in 4m45s
2024-10-01 18:33:32 -07:00
2a25dd9d7b
fix annotations getting lost 2024-10-01 18:31:44 -07:00
6e0b6c000d
remove stray debugging prints 2024-10-01 18:30:46 -07:00
d089095667
change default to --simplify-enums=replace-with-bundle-of-uints
All checks were successful
/ test (push) Successful in 4m42s
2024-10-01 00:07:48 -07:00
9d66fcc548
improve ExportOptions support in assert_export_firrtl! 2024-10-01 00:05:39 -07:00
186488a82e
remove FIXME now that simplify_enums is fixed 2024-09-30 23:31:45 -07:00
edcea1adc3
add firrtl comments when connecting expressions with different types
All checks were successful
/ test (push) Successful in 4m44s
2024-09-30 22:33:27 -07:00
30a38bc8da
fix simplify_enums to properly handle nested enums and connects with different types 2024-09-30 22:31:16 -07:00
1e2831da47
add validation of connects and matches when validating module
this is useful for catching errors in transformation passes
2024-09-30 21:20:35 -07:00
d2ba313f0f
fix simplify_memories trying to connect Bool with UInt 2024-09-30 21:19:20 -07:00
04752c5037
add test for connect_any with nested enums with different-sized variant bodies
All checks were successful
/ test (push) Successful in 4m42s
simplify_enums is currently broken in that case
2024-09-25 21:55:52 -07:00
e661aeab11
add WIP formal proof for queue()
All checks were successful
/ test (push) Successful in 5m27s
2024-09-25 02:00:06 -07:00
5fc7dbd6e9
add assert_formal helper for running formal proofs in rust tests 2024-09-25 02:00:06 -07:00
45dbb554d0
add formal subcommand 2024-09-25 02:00:06 -07:00
bb860d54cc
add command line options for selecting which transforms to apply when generating firrtl 2024-09-25 02:00:06 -07:00
efc3a539ed
support redirecting subprocesses' stdout/stderr to print!() so it gets captured for rust tests 2024-09-25 02:00:06 -07:00
f32c0a7863
switch to #[derive(Parser)] instead of #[derive(Args)] 2024-09-25 01:28:11 -07:00
4ff01690a7
clean up deps and move missed deps to workspace 2024-09-25 01:22:35 -07:00
28aad19bf5
add assert/assume/cover
All checks were successful
/ test (push) Successful in 4m33s
2024-09-23 19:10:51 -07:00
716c65edcd
add WIP version of queue()
All checks were successful
/ test (push) Successful in 4m36s
2024-09-22 18:59:12 -07:00
f6146048d1
add memory::splat_mask to generate mask types from a Bool 2024-09-22 18:57:30 -07:00
a701f99fd6
add repeat() 2024-09-22 18:56:26 -07:00
78edfc97b2
split int::IntCmp into expr::HdlPartialEq and expr::HdlPartialOrd
All checks were successful
/ test (push) Successful in 4m32s
2024-09-22 17:28:46 -07:00
9ad4ec0f39
add ty.uninit()
All checks were successful
/ test (push) Successful in 4m30s
2024-09-22 17:26:23 -07:00
8449854cac
add ToExpr for usize/isize/NonZero<T>
All checks were successful
/ test (push) Successful in 4m32s
2024-09-22 17:19:58 -07:00
790bb15408
remove reset_default from proc-macro, forgot to remove when removing from RegBuilder 2024-09-22 16:03:20 -07:00
bdbc6d89bd
add check-copyright to CI
All checks were successful
/ test (push) Successful in 4m33s
2024-09-22 15:30:53 -07:00
10ae95fac1
add missing copyright headers 2024-09-22 15:30:05 -07:00
053391b010
add script for checking copyright headers 2024-09-22 15:29:28 -07:00
51ce7b079e
add ReadyValid<T>
All checks were successful
/ test (push) Successful in 4m26s
2024-09-20 19:11:30 -07:00
ff269e5def
add utility functions on HdlOption, inspired by Option's API 2024-09-20 18:49:12 -07:00
df55a514e4
add support for incomplete_wire -- a wire that you can supply the type of later 2024-09-20 18:46:56 -07:00
ff94dda922
support #[hdl] on functions -- enables #[hdl] usage in function body 2024-09-20 18:42:24 -07:00
a8c804ef4a
some final cleanups
All checks were successful
/ test (push) Successful in 5m43s
2024-09-19 23:52:32 -07:00
2d293ae87b
#[hdl] match works! 2024-09-19 23:51:54 -07:00
9887d70f41
fix handling of const and size type generics when generating Index impls
All checks were successful
/ test (push) Successful in 5m1s
2024-09-19 18:45:04 -07:00
2c1afd1cd6
const generics on hdl_module work!
All checks were successful
/ test (push) Successful in 4m54s
2024-09-17 15:39:23 -07:00
76ea7f82c3
WIP adding const generics
Some checks failed
/ test (push) Failing after 1m3s
2024-09-16 16:47:10 -07:00
5835b995a9
WIP: use HdlOption[the_type_var] or UInt[123 + n] for creating types
All checks were successful
/ test (push) Successful in 4m56s
2024-08-21 22:27:21 -07:00
163 changed files with 94154 additions and 15671 deletions

View file

@ -1,19 +1,25 @@
# SPDX-License-Identifier: LGPL-3.0-or-later
# See Notices.txt for copyright information
on: [push, pull_request]
jobs:
test:
runs-on: debian-12
container:
image: git.libre-chip.org/libre-chip/fayalite-deps:latest
steps:
- uses: https://code.forgejo.org/actions/checkout@v3
- uses: actions/checkout@v3
with:
fetch-depth: 0
- run: |
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y --default-toolchain 1.79.0
source "$HOME/.cargo/env"
echo "$PATH" >> "$GITHUB_PATH"
- uses: https://github.com/Swatinem/rust-cache@v2
scripts/check-copyright.sh
- uses: https://git.libre-chip.org/mirrors/rust-cache@v2
with:
save-if: ${{ github.ref == 'refs/heads/master' }}
- run: cargo test
- run: cargo test --features=unstable-doc
- run: cargo build --tests --features=unstable-doc
- run: cargo test --doc --features=unstable-doc
- run: cargo doc --features=unstable-doc
- run: FAYALITE_TEST_HASHER=always_zero cargo test --test=module --features=unstable-doc,unstable-test-hasher
- run: cargo run --example blinky yosys-nextpnr-xray --platform=arty-a7-100t --nextpnr-xilinx-chipdb-dir /opt/fayalite-deps/nextpnr-xilinx/xilinx --prjxray-db-dir /opt/fayalite-deps/prjxray-db -o target/blinky-out
- run: cargo run --example tx_only_uart yosys-nextpnr-xray --platform=arty-a7-100t --nextpnr-xilinx-chipdb-dir /opt/fayalite-deps/nextpnr-xilinx/xilinx --prjxray-db-dir /opt/fayalite-deps/prjxray-db -o target/tx_only_uart-out

4
.gitignore vendored
View file

@ -1,2 +1,4 @@
# SPDX-License-Identifier: LGPL-3.0-or-later
# See Notices.txt for copyright information
/target
.vscode
.vscode

376
Cargo.lock generated
View file

@ -1,18 +1,6 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "ahash"
version = "0.8.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77c3a9648d43b9cd48db467b3f87fdd6e146bcc88ab0180006cef2179fe11d01"
dependencies = [
"cfg-if",
"once_cell",
"version_check",
"zerocopy",
]
version = 4
[[package]]
name = "allocator-api2"
@ -37,9 +25,9 @@ dependencies = [
[[package]]
name = "anstyle"
version = "1.0.7"
version = "1.0.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b"
checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78"
[[package]]
name = "anstyle-parse"
@ -56,7 +44,7 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ad186efb764318d35165f1758e7dcef3b10628e26d41a44bc5550652e6804391"
dependencies = [
"windows-sys",
"windows-sys 0.52.0",
]
[[package]]
@ -66,9 +54,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19"
dependencies = [
"anstyle",
"windows-sys",
"windows-sys 0.52.0",
]
[[package]]
name = "arrayref"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb"
[[package]]
name = "arrayvec"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
[[package]]
name = "autocfg"
version = "1.1.0"
@ -81,6 +81,12 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf"
[[package]]
name = "base64"
version = "0.22.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
[[package]]
name = "basic-toml"
version = "0.1.8"
@ -109,6 +115,20 @@ dependencies = [
"wyz",
]
[[package]]
name = "blake3"
version = "1.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d82033247fd8e890df8f740e407ad4d038debb9eb1f40533fffb32e7d17dc6f7"
dependencies = [
"arrayref",
"arrayvec",
"cc",
"cfg-if",
"constant_time_eq",
"serde",
]
[[package]]
name = "block-buffer"
version = "0.10.4"
@ -118,6 +138,15 @@ dependencies = [
"generic-array",
]
[[package]]
name = "cc"
version = "1.1.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e80e3b6a3ab07840e1cae9b0666a63970dc28e8ed5ffbcdacbfc760c281bfc1"
dependencies = [
"shlex",
]
[[package]]
name = "cfg-if"
version = "1.0.0"
@ -126,9 +155,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "clap"
version = "4.5.9"
version = "4.5.48"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64acc1846d54c1fe936a78dc189c34e28d3f5afc348403f28ecf53660b9b8462"
checksum = "e2134bb3ea021b78629caa971416385309e0131b351b25e01dc16fb54e1b5fae"
dependencies = [
"clap_builder",
"clap_derive",
@ -136,9 +165,9 @@ dependencies = [
[[package]]
name = "clap_builder"
version = "4.5.9"
version = "4.5.48"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fb8393d67ba2e7bfaf28a23458e4e2b543cc73a99595511eb207fdb8aede942"
checksum = "c2ba64afa3c0a6df7fa517765e31314e983f51dda798ffba27b988194fb65dc9"
dependencies = [
"anstream",
"anstyle",
@ -147,10 +176,19 @@ dependencies = [
]
[[package]]
name = "clap_derive"
version = "4.5.8"
name = "clap_complete"
version = "4.5.58"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2bac35c6dafb060fd4d275d9a4ffae97917c13a6327903a8be2153cd964f7085"
checksum = "75bf0b32ad2e152de789bb635ea4d3078f6b838ad7974143e99b99f45a04af4a"
dependencies = [
"clap",
]
[[package]]
name = "clap_derive"
version = "4.5.47"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbfd7eae0b0f1a6e63d4b13c9c478de77c2eb546fba158ad50b4203dc24b9f9c"
dependencies = [
"heck",
"proc-macro2",
@ -160,9 +198,9 @@ dependencies = [
[[package]]
name = "clap_lex"
version = "0.7.1"
version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b82cf0babdbd58558212896d1a4272303a57bdb245c2bf1147185fb45640e70"
checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675"
[[package]]
name = "colorchoice"
@ -170,6 +208,12 @@ version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422"
[[package]]
name = "constant_time_eq"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c74b8349d32d297c9134b8c88677813a227df8f779daa29bfc29c183fe3dca6"
[[package]]
name = "cpufeatures"
version = "0.2.12"
@ -189,6 +233,27 @@ dependencies = [
"typenum",
]
[[package]]
name = "ctor"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "edb49164822f3ee45b17acd4a208cfc1251410cf0cad9a833234c9890774dd9f"
dependencies = [
"quote",
"syn",
]
[[package]]
name = "derive_destructure2"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64b697ac90ff296f0fc031ee5a61c7ac31fb9fff50e3fb32873b09223613fc0c"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "digest"
version = "0.10.7"
@ -218,7 +283,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245"
dependencies = [
"libc",
"windows-sys",
"windows-sys 0.52.0",
]
[[package]]
@ -239,32 +304,41 @@ checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5"
[[package]]
name = "fayalite"
version = "0.2.0"
version = "0.3.0"
dependencies = [
"base64",
"bitvec",
"blake3",
"clap",
"clap_complete",
"ctor",
"eyre",
"fayalite-proc-macros",
"fayalite-visit-gen",
"hashbrown",
"jobslot",
"num-bigint",
"num-traits",
"ordered-float",
"petgraph",
"serde",
"serde_json",
"tempfile",
"trybuild",
"vec_map",
"which",
]
[[package]]
name = "fayalite-proc-macros"
version = "0.2.0"
version = "0.3.0"
dependencies = [
"fayalite-proc-macros-impl",
]
[[package]]
name = "fayalite-proc-macros-impl"
version = "0.2.0"
version = "0.3.0"
dependencies = [
"base16ct",
"num-bigint",
@ -278,7 +352,7 @@ dependencies = [
[[package]]
name = "fayalite-visit-gen"
version = "0.2.0"
version = "0.3.0"
dependencies = [
"indexmap",
"prettyplease",
@ -290,6 +364,18 @@ dependencies = [
"thiserror",
]
[[package]]
name = "fixedbitset"
version = "0.5.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99"
[[package]]
name = "foldhash"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
[[package]]
name = "funty"
version = "2.0.0"
@ -306,6 +392,18 @@ dependencies = [
"version_check",
]
[[package]]
name = "getrandom"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4"
dependencies = [
"cfg-if",
"libc",
"r-efi",
"wasi",
]
[[package]]
name = "glob"
version = "0.3.1"
@ -314,12 +412,13 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
[[package]]
name = "hashbrown"
version = "0.14.3"
version = "0.15.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604"
checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289"
dependencies = [
"ahash",
"allocator-api2",
"equivalent",
"foldhash",
]
[[package]]
@ -334,7 +433,7 @@ version = "0.5.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5"
dependencies = [
"windows-sys",
"windows-sys 0.52.0",
]
[[package]]
@ -345,9 +444,9 @@ checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683"
[[package]]
name = "indexmap"
version = "2.2.6"
version = "2.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26"
checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e"
dependencies = [
"equivalent",
"hashbrown",
@ -367,10 +466,24 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c"
[[package]]
name = "libc"
version = "0.2.153"
name = "jobslot"
version = "0.2.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd"
checksum = "58715c67c327da7f1558708348d68c207fd54900c4ae0529e29305d04d795b8c"
dependencies = [
"cfg-if",
"derive_destructure2",
"getrandom",
"libc",
"scopeguard",
"windows-sys 0.61.2",
]
[[package]]
name = "libc"
version = "0.2.176"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "58f929b4d672ea937a23a1ab494143d968337a5f47e56d0815df1e0890ddf174"
[[package]]
name = "linux-raw-sys"
@ -380,11 +493,10 @@ checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c"
[[package]]
name = "num-bigint"
version = "0.4.4"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0"
checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9"
dependencies = [
"autocfg",
"num-integer",
"num-traits",
]
@ -413,6 +525,29 @@ version = "1.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
[[package]]
name = "ordered-float"
version = "5.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f4779c6901a562440c3786d08192c6fbda7c1c2060edd10006b05ee35d10f2d"
dependencies = [
"num-traits",
"rand",
"serde",
]
[[package]]
name = "petgraph"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a98c6720655620a521dcc722d0ad66cd8afd5d86e34a89ef691c50b7b24de06"
dependencies = [
"fixedbitset",
"hashbrown",
"indexmap",
"serde",
]
[[package]]
name = "prettyplease"
version = "0.2.20"
@ -425,9 +560,9 @@ dependencies = [
[[package]]
name = "proc-macro2"
version = "1.0.83"
version = "1.0.92"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b33eb56c327dec362a9e55b3ad14f9d2f0904fb5a5b03b513ab5465399e9f43"
checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0"
dependencies = [
"unicode-ident",
]
@ -441,12 +576,37 @@ dependencies = [
"proc-macro2",
]
[[package]]
name = "r-efi"
version = "5.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
[[package]]
name = "radium"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09"
[[package]]
name = "rand"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
dependencies = [
"rand_core",
"serde",
]
[[package]]
name = "rand_core"
version = "0.6.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
dependencies = [
"serde",
]
[[package]]
name = "rustix"
version = "0.38.31"
@ -457,7 +617,7 @@ dependencies = [
"errno",
"libc",
"linux-raw-sys",
"windows-sys",
"windows-sys 0.52.0",
]
[[package]]
@ -466,6 +626,12 @@ version = "1.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1"
[[package]]
name = "scopeguard"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
name = "serde"
version = "1.0.202"
@ -509,6 +675,12 @@ dependencies = [
"digest",
]
[[package]]
name = "shlex"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
[[package]]
name = "strsim"
version = "0.11.1"
@ -517,9 +689,9 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "syn"
version = "2.0.66"
version = "2.0.93"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5"
checksum = "9c786062daee0d6db1132800e623df74274a0a87322d8e183338e01b3d98d058"
dependencies = [
"proc-macro2",
"quote",
@ -541,7 +713,7 @@ dependencies = [
"cfg-if",
"fastrand",
"rustix",
"windows-sys",
"windows-sys 0.52.0",
]
[[package]]
@ -606,12 +778,36 @@ version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
[[package]]
name = "vec_map"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
[[package]]
name = "version_check"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
[[package]]
name = "wasi"
version = "0.14.7+wasi-0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "883478de20367e224c0090af9cf5f9fa85bed63a95c1abf3afc5c083ebc06e8c"
dependencies = [
"wasip2",
]
[[package]]
name = "wasip2"
version = "1.0.1+wasi-0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7"
dependencies = [
"wit-bindgen",
]
[[package]]
name = "which"
version = "6.0.1"
@ -655,6 +851,12 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows-link"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
[[package]]
name = "windows-sys"
version = "0.52.0"
@ -665,14 +867,24 @@ dependencies = [
]
[[package]]
name = "windows-targets"
version = "0.52.4"
name = "windows-sys"
version = "0.61.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b"
checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc"
dependencies = [
"windows-link",
]
[[package]]
name = "windows-targets"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
dependencies = [
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_gnullvm",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_gnullvm",
@ -681,45 +893,51 @@ dependencies = [
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.4"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9"
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.4"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675"
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
[[package]]
name = "windows_i686_gnu"
version = "0.52.4"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3"
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
[[package]]
name = "windows_i686_gnullvm"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
[[package]]
name = "windows_i686_msvc"
version = "0.52.4"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02"
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.4"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03"
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.4"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177"
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.4"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8"
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
[[package]]
name = "winsafe"
@ -727,6 +945,12 @@ version = "0.0.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904"
[[package]]
name = "wit-bindgen"
version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59"
[[package]]
name = "wyz"
version = "0.5.1"
@ -735,23 +959,3 @@ checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed"
dependencies = [
"tap",
]
[[package]]
name = "zerocopy"
version = "0.7.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be"
dependencies = [
"zerocopy-derive",
]
[[package]]
name = "zerocopy-derive"
version = "0.7.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6"
dependencies = [
"proc-macro2",
"quote",
"syn",
]

View file

@ -5,31 +5,42 @@ resolver = "2"
members = ["crates/*"]
[workspace.package]
version = "0.2.0"
version = "0.3.0"
license = "LGPL-3.0-or-later"
edition = "2021"
edition = "2024"
repository = "https://git.libre-chip.org/libre-chip/fayalite"
keywords = ["hdl", "hardware", "semiconductors", "firrtl", "fpga"]
categories = ["simulation", "development-tools", "compilers"]
rust-version = "1.79"
rust-version = "1.89.0"
[workspace.dependencies]
fayalite-proc-macros = { version = "=0.2.0", path = "crates/fayalite-proc-macros" }
fayalite-proc-macros-impl = { version = "=0.2.0", path = "crates/fayalite-proc-macros-impl" }
fayalite-visit-gen = { version = "=0.2.0", path = "crates/fayalite-visit-gen" }
fayalite-proc-macros = { version = "=0.3.0", path = "crates/fayalite-proc-macros" }
fayalite-proc-macros-impl = { version = "=0.3.0", path = "crates/fayalite-proc-macros-impl" }
fayalite-visit-gen = { version = "=0.3.0", path = "crates/fayalite-visit-gen" }
base16ct = "0.2.0"
base64 = "0.22.1"
bitvec = { version = "1.0.1", features = ["serde"] }
hashbrown = "0.14.3"
indexmap = { version = "2.2.6", features = ["serde"] }
num-bigint = "0.4.4"
blake3 = { version = "1.5.4", features = ["serde"] }
clap = { version = "4.5.9", features = ["derive", "env", "string"] }
clap_complete = "4.5.58"
ctor = "0.2.8"
eyre = "0.6.12"
hashbrown = "0.15.2"
indexmap = { version = "2.5.0", features = ["serde"] }
jobslot = "0.2.23"
num-bigint = "0.4.6"
num-traits = "0.2.16"
ordered-float = { version = "5.1.0", features = ["serde"] }
petgraph = "0.8.1"
prettyplease = "0.2.20"
proc-macro2 = "1.0.83"
quote = "1.0.36"
serde = { version = "1.0.202", features = ["derive"] }
serde_json = { version = "1.0.117", features = ["preserve_order"] }
sha2 = "0.10.8"
syn = { version = "2.0.66", features = ["full", "fold", "visit", "extra-traits"] }
syn = { version = "2.0.93", features = ["full", "fold", "visit", "extra-traits"] }
tempfile = "3.10.1"
thiserror = "1.0.61"
trybuild = "1.0"
vec_map = "0.8.2"
which = "6.0.1"

View file

@ -1,5 +1,84 @@
<!--
SPDX-License-Identifier: LGPL-3.0-or-later
See Notices.txt for copyright information
-->
# Fayalite
Fayalite is a library for designing digital hardware -- a hardware description language (HDL) embedded in the Rust programming language. Fayalite's semantics are based on [FIRRTL] as interpreted by [LLVM CIRCT](https://circt.llvm.org/docs/Dialects/FIRRTL/FIRRTLAnnotations/).
[FIRRTL]: https://github.com/chipsalliance/firrtl-spec
# Building the [Blinky example] for the Arty A7 100T on Linux
[Blinky example]: crates/fayalite/examples/blinky.rs
This uses the container image containing all the external programs and files that Fayalite needs to build for FPGAs, the sources for the container image are in https://git.libre-chip.org/libre-chip/fayalite-deps
Steps:
Install podman (or docker).
Run:
```bash
podman run --rm --security-opt label=disable --volume="$(pwd):$(pwd)" -w="$(pwd)" -it git.libre-chip.org/libre-chip/fayalite-deps:latest cargo run --example blinky yosys-nextpnr-xray --nextpnr-xilinx-chipdb-dir /opt/fayalite-deps/nextpnr-xilinx/xilinx --prjxray-db-dir /opt/fayalite-deps/prjxray-db --platform arty-a7-100t -o target/blinky-out
```
To actually program the FPGA, you'll need to install [openFPGALoader] on your host OS:
[openFPGALoader]: https://github.com/trabucayre/openFPGALoader
On Debian 12:
```bash
sudo apt update && sudo apt install openfpgaloader
```
Then program the FPGA:
```bash
sudo openFPGALoader --board arty_a7_100t target/blinky-out/blinky.bit
```
This will program the FPGA but leave the Flash chip unmodified, so the FPGA will revert when the board is power-cycled.
To program the Flash also, so it stays programmed when power-cycling the board:
```bash
sudo openFPGALoader --board arty_a7_100t -f target/blinky-out/blinky.bit
```
# Building the [Transmit-only UART example] for the Arty A7 100T on Linux
[Transmit-only UART example]: crates/fayalite/examples/tx_only_uart.rs
Follow the steps above of building the Blinky example, but replace `blinky` with `tx_only_uart`.
View the output using [tio](https://github.com/tio/tio) which you can install in Debian using `apt`.
Find the correct USB device:
```bash
sudo tio --list
```
You want the device with a name like (note the `if01`, `if00` is presumably the JTAG port):
`/dev/serial/by-id/usb-Digilent_Digilent_USB_Device_210319B4A51E-if01-port0`
Connect to the serial port:
```bash
sudo tio -b115200 /dev/serial/by-id/put-your-device-id-here
```
You'll see (repeating endlessly):
```text
Hello World from Fayalite!!!
Hello World from Fayalite!!!
Hello World from Fayalite!!!
```
Press Ctrl+T then `q` to exit tio.
# Funding
## NLnet Grants
* [Libre-Chip CPU with proof of No Spectre bugs](https://nlnet.nl/project/Libre-Chip-proof/) 2024-12-324 [(progress)](https://git.libre-chip.org/libre-chip/grant-tracking/src/branch/master/nlnet-2024-12-324/progress.md)
This project was funded through the [NGI0 Commons Fund](https://nlnet.nl/commonsfund), a fund established by [NLnet](https://nlnet.nl/) with financial support from the European Commission's [Next Generation Internet](https://ngi.eu) programme, under the aegis of [DG Communications Networks, Content and Technology](https://commission.europa.eu/about-european-commission/departments-and-executive-agencies/communications-networks-content-and-technology_en) under grant agreement &numero; [101135429](https://cordis.europa.eu/project/id/101135429). Additional funding is made available by the [Swiss State Secretariat for Education, Research and Innovation](https://www.sbfi.admin.ch/sbfi/en/home.html) (SERI).

View file

@ -13,11 +13,11 @@ rust-version.workspace = true
version.workspace = true
[dependencies]
base16ct = { workspace = true }
num-bigint = { workspace = true }
prettyplease = { workspace = true }
proc-macro2 = { workspace = true }
quote = { workspace = true }
sha2 = { workspace = true }
syn = { workspace = true }
tempfile = { workspace = true }
base16ct.workspace = true
num-bigint.workspace = true
prettyplease.workspace = true
proc-macro2.workspace = true
quote.workspace = true
sha2.workspace = true
syn.workspace = true
tempfile.workspace = true

View file

@ -220,29 +220,36 @@ forward_fold!(syn::ExprArray => fold_expr_array);
forward_fold!(syn::ExprCall => fold_expr_call);
forward_fold!(syn::ExprIf => fold_expr_if);
forward_fold!(syn::ExprMatch => fold_expr_match);
forward_fold!(syn::ExprMethodCall => fold_expr_method_call);
forward_fold!(syn::ExprPath => fold_expr_path);
forward_fold!(syn::ExprRepeat => fold_expr_repeat);
forward_fold!(syn::ExprStruct => fold_expr_struct);
forward_fold!(syn::ExprTuple => fold_expr_tuple);
forward_fold!(syn::FieldPat => fold_field_pat);
forward_fold!(syn::Ident => fold_ident);
forward_fold!(syn::Member => fold_member);
forward_fold!(syn::Path => fold_path);
forward_fold!(syn::Type => fold_type);
forward_fold!(syn::TypePath => fold_type_path);
forward_fold!(syn::WherePredicate => fold_where_predicate);
no_op_fold!(proc_macro2::Span);
no_op_fold!(syn::parse::Nothing);
no_op_fold!(syn::token::Brace);
no_op_fold!(syn::token::Bracket);
no_op_fold!(syn::token::Group);
no_op_fold!(syn::token::Paren);
no_op_fold!(syn::Token![_]);
no_op_fold!(syn::Token![,]);
no_op_fold!(syn::Token![;]);
no_op_fold!(syn::Token![:]);
no_op_fold!(syn::Token![::]);
no_op_fold!(syn::Token![..]);
no_op_fold!(syn::Token![.]);
no_op_fold!(syn::Token![#]);
no_op_fold!(syn::Token![<]);
no_op_fold!(syn::Token![=]);
no_op_fold!(syn::Token![=>]);
no_op_fold!(syn::Token![>]);
no_op_fold!(syn::Token![|]);
no_op_fold!(syn::Token![enum]);
no_op_fold!(syn::Token![extern]);
@ -251,3 +258,4 @@ no_op_fold!(syn::Token![mut]);
no_op_fold!(syn::Token![static]);
no_op_fold!(syn::Token![struct]);
no_op_fold!(syn::Token![where]);
no_op_fold!(usize);

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,139 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
Errors, HdlAttr,
hdl_type_common::{
ItemOptions, MakeHdlTypeExpr, MaybeParsed, ParsedGenerics, ParsedType, TypesParser,
get_target,
},
kw,
};
use proc_macro2::TokenStream;
use quote::ToTokens;
use syn::{Attribute, Generics, Ident, ItemType, Token, Type, Visibility, parse_quote_spanned};
#[derive(Clone, Debug)]
pub(crate) struct ParsedTypeAlias {
pub(crate) attrs: Vec<Attribute>,
pub(crate) options: HdlAttr<ItemOptions, kw::hdl>,
pub(crate) vis: Visibility,
pub(crate) type_token: Token![type],
pub(crate) ident: Ident,
pub(crate) generics: MaybeParsed<ParsedGenerics, Generics>,
pub(crate) eq_token: Token![=],
pub(crate) ty: MaybeParsed<ParsedType, Type>,
pub(crate) semi_token: Token![;],
}
impl ParsedTypeAlias {
fn parse(item: ItemType) -> syn::Result<Self> {
let ItemType {
mut attrs,
vis,
type_token,
ident,
mut generics,
eq_token,
ty,
semi_token,
} = item;
let mut errors = Errors::new();
let mut options = errors
.unwrap_or_default(HdlAttr::<ItemOptions, kw::hdl>::parse_and_take_attr(
&mut attrs,
))
.unwrap_or_default();
errors.ok(options.body.validate());
let ItemOptions {
outline_generated: _,
target: _,
custom_bounds,
no_static,
no_runtime_generics: _,
cmp_eq,
} = options.body;
if let Some((no_static,)) = no_static {
errors.error(no_static, "no_static is not valid on type aliases");
}
if let Some((cmp_eq,)) = cmp_eq {
errors.error(cmp_eq, "cmp_eq is not valid on type aliases");
}
let generics = if custom_bounds.is_some() {
MaybeParsed::Unrecognized(generics)
} else if let Some(generics) = errors.ok(ParsedGenerics::parse(&mut generics)) {
MaybeParsed::Parsed(generics)
} else {
MaybeParsed::Unrecognized(generics)
};
let ty = TypesParser::maybe_run(generics.as_ref(), *ty, &mut errors);
errors.finish()?;
Ok(Self {
attrs,
options,
vis,
type_token,
ident,
generics,
eq_token,
ty,
semi_token,
})
}
}
impl ToTokens for ParsedTypeAlias {
fn to_tokens(&self, tokens: &mut TokenStream) {
let Self {
attrs,
options,
vis,
type_token,
ident,
generics,
eq_token,
ty,
semi_token,
} = self;
let ItemOptions {
outline_generated: _,
target,
custom_bounds: _,
no_static: _,
no_runtime_generics,
cmp_eq: _,
} = &options.body;
let target = get_target(target, ident);
let mut type_attrs = attrs.clone();
type_attrs.push(parse_quote_spanned! {ident.span()=>
#[allow(type_alias_bounds)]
});
ItemType {
attrs: type_attrs,
vis: vis.clone(),
type_token: *type_token,
ident: ident.clone(),
generics: generics.into(),
eq_token: *eq_token,
ty: Box::new(ty.clone().into()),
semi_token: *semi_token,
}
.to_tokens(tokens);
if let (MaybeParsed::Parsed(generics), MaybeParsed::Parsed(ty), None) =
(generics, ty, no_runtime_generics)
{
generics.make_runtime_generics(tokens, vis, ident, &target, |context| {
ty.make_hdl_type_expr(context)
})
}
}
}
pub(crate) fn hdl_type_alias_impl(item: ItemType) -> syn::Result<TokenStream> {
let item = ParsedTypeAlias::parse(item)?;
let outline_generated = item.options.body.outline_generated;
let mut contents = item.to_token_stream();
if outline_generated.is_some() {
contents = crate::outline_generated(contents, "hdl-type-alias-");
}
Ok(contents)
}

File diff suppressed because it is too large Load diff

View file

@ -2,26 +2,46 @@
// See Notices.txt for copyright information
#![cfg_attr(test, recursion_limit = "512")]
use proc_macro2::{Span, TokenStream};
use quote::{quote, ToTokens};
use std::io::{ErrorKind, Write};
use quote::{ToTokens, quote};
use std::{
collections::{HashMap, hash_map::Entry},
io::{ErrorKind, Write},
};
use syn::{
bracketed, parenthesized,
AttrStyle, Attribute, Error, Ident, Item, ItemFn, LitBool, LitStr, Meta, Token, bracketed,
ext::IdentExt,
parenthesized,
parse::{Parse, ParseStream, Parser},
parse_quote,
punctuated::Pair,
AttrStyle, Attribute, Error, Item, Token,
punctuated::{Pair, Punctuated},
spanned::Spanned,
token::{Bracket, Paren},
};
mod fold;
mod hdl_bundle;
mod hdl_enum;
mod hdl_type_alias;
mod hdl_type_common;
mod module;
mod value_derive_common;
mod value_derive_enum;
mod value_derive_struct;
mod process_cfg;
pub(crate) trait CustomToken:
Copy
+ Spanned
+ ToTokens
+ std::fmt::Debug
+ Eq
+ std::hash::Hash
+ Default
+ quote::IdentFragment
+ Parse
{
const IDENT_STR: &'static str;
}
mod kw {
pub(crate) use syn::token::{
Enum as enum_, Extern as extern_, Static as static_, Struct as struct_, Where as where_,
};
pub(crate) use syn::token::Extern as extern_;
macro_rules! custom_keyword {
($kw:ident) => {
@ -38,13 +58,27 @@ mod kw {
}
crate::fold::no_op_fold!($kw);
impl crate::CustomToken for $kw {
const IDENT_STR: &'static str = stringify!($kw);
}
};
}
custom_keyword!(__evaluated_cfgs);
custom_keyword!(add_platform_io);
custom_keyword!(all);
custom_keyword!(any);
custom_keyword!(cfg);
custom_keyword!(cfg_attr);
custom_keyword!(clock_domain);
custom_keyword!(cmp_eq);
custom_keyword!(connect_inexact);
custom_keyword!(custom_bounds);
custom_keyword!(flip);
custom_keyword!(hdl);
custom_keyword!(hdl_module);
custom_keyword!(incomplete_wire);
custom_keyword!(input);
custom_keyword!(instance);
custom_keyword!(m);
@ -52,11 +86,14 @@ mod kw {
custom_keyword!(memory_array);
custom_keyword!(memory_with_init);
custom_keyword!(no_reset);
custom_keyword!(no_runtime_generics);
custom_keyword!(no_static);
custom_keyword!(not);
custom_keyword!(outline_generated);
custom_keyword!(output);
custom_keyword!(reg_builder);
custom_keyword!(reset);
custom_keyword!(reset_default);
custom_keyword!(sim);
custom_keyword!(skip);
custom_keyword!(target);
custom_keyword!(wire);
@ -65,34 +102,34 @@ mod kw {
type Pound = Token![#]; // work around https://github.com/rust-lang/rust/issues/50676
#[derive(Clone, Debug)]
pub(crate) struct HdlAttr<T> {
pub(crate) struct HdlAttr<T, KW> {
pub(crate) pound_token: Pound,
pub(crate) style: AttrStyle,
pub(crate) bracket_token: syn::token::Bracket,
pub(crate) hdl: kw::hdl,
pub(crate) kw: KW,
pub(crate) paren_token: Option<syn::token::Paren>,
pub(crate) body: T,
}
crate::fold::impl_fold! {
struct HdlAttr<T,> {
struct HdlAttr<T, KW,> {
pound_token: Pound,
style: AttrStyle,
bracket_token: syn::token::Bracket,
hdl: kw::hdl,
kw: KW,
paren_token: Option<syn::token::Paren>,
body: T,
}
}
#[allow(dead_code)]
impl<T> HdlAttr<T> {
pub(crate) fn split_body(self) -> (HdlAttr<()>, T) {
impl<T, KW> HdlAttr<T, KW> {
pub(crate) fn split_body(self) -> (HdlAttr<(), KW>, T) {
let Self {
pound_token,
style,
bracket_token,
hdl,
kw,
paren_token,
body,
} = self;
@ -101,19 +138,19 @@ impl<T> HdlAttr<T> {
pound_token,
style,
bracket_token,
hdl,
kw,
paren_token,
body: (),
},
body,
)
}
pub(crate) fn replace_body<T2>(self, body: T2) -> HdlAttr<T2> {
pub(crate) fn replace_body<T2>(self, body: T2) -> HdlAttr<T2, KW> {
let Self {
pound_token,
style,
bracket_token,
hdl,
kw,
paren_token,
body: _,
} = self;
@ -121,17 +158,20 @@ impl<T> HdlAttr<T> {
pound_token,
style,
bracket_token,
hdl,
kw,
paren_token,
body,
}
}
pub(crate) fn as_ref(&self) -> HdlAttr<&T> {
pub(crate) fn as_ref(&self) -> HdlAttr<&T, KW>
where
KW: Clone,
{
let Self {
pound_token,
style,
bracket_token,
hdl,
ref kw,
paren_token,
ref body,
} = *self;
@ -139,17 +179,20 @@ impl<T> HdlAttr<T> {
pound_token,
style,
bracket_token,
hdl,
kw: kw.clone(),
paren_token,
body,
}
}
pub(crate) fn try_map<R, E, F: FnOnce(T) -> Result<R, E>>(self, f: F) -> Result<HdlAttr<R>, E> {
pub(crate) fn try_map<R, E, F: FnOnce(T) -> Result<R, E>>(
self,
f: F,
) -> Result<HdlAttr<R, KW>, E> {
let Self {
pound_token,
style,
bracket_token,
hdl,
kw,
paren_token,
body,
} = self;
@ -157,17 +200,17 @@ impl<T> HdlAttr<T> {
pound_token,
style,
bracket_token,
hdl,
kw,
paren_token,
body: f(body)?,
})
}
pub(crate) fn map<R, F: FnOnce(T) -> R>(self, f: F) -> HdlAttr<R> {
pub(crate) fn map<R, F: FnOnce(T) -> R>(self, f: F) -> HdlAttr<R, KW> {
let Self {
pound_token,
style,
bracket_token,
hdl,
kw,
paren_token,
body,
} = self;
@ -175,7 +218,7 @@ impl<T> HdlAttr<T> {
pound_token,
style,
bracket_token,
hdl,
kw,
paren_token,
body: f(body),
}
@ -183,31 +226,32 @@ impl<T> HdlAttr<T> {
fn to_attr(&self) -> Attribute
where
T: ToTokens,
KW: ToTokens,
{
parse_quote! { #self }
}
}
impl<T: Default> Default for HdlAttr<T> {
impl<T: Default, KW: Default> Default for HdlAttr<T, KW> {
fn default() -> Self {
T::default().into()
}
}
impl<T> From<T> for HdlAttr<T> {
impl<T, KW: Default> From<T> for HdlAttr<T, KW> {
fn from(body: T) -> Self {
HdlAttr {
pound_token: Default::default(),
style: AttrStyle::Outer,
bracket_token: Default::default(),
hdl: Default::default(),
kw: Default::default(),
paren_token: Default::default(),
body,
}
}
}
impl<T: ToTokens> ToTokens for HdlAttr<T> {
impl<T: ToTokens, KW: ToTokens + Spanned> ToTokens for HdlAttr<T, KW> {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.pound_token.to_tokens(tokens);
match self.style {
@ -215,7 +259,7 @@ impl<T: ToTokens> ToTokens for HdlAttr<T> {
AttrStyle::Outer => {}
};
self.bracket_token.surround(tokens, |tokens| {
self.hdl.to_tokens(tokens);
self.kw.to_tokens(tokens);
match self.paren_token {
Some(paren_token) => {
paren_token.surround(tokens, |tokens| self.body.to_tokens(tokens))
@ -223,7 +267,7 @@ impl<T: ToTokens> ToTokens for HdlAttr<T> {
None => {
let body = self.body.to_token_stream();
if !body.is_empty() {
syn::token::Paren(self.hdl.span)
syn::token::Paren(self.kw.span())
.surround(tokens, |tokens| tokens.extend([body]));
}
}
@ -232,18 +276,24 @@ impl<T: ToTokens> ToTokens for HdlAttr<T> {
}
}
fn is_hdl_attr(attr: &Attribute) -> bool {
attr.path().is_ident("hdl")
fn is_hdl_attr<KW: CustomToken>(attr: &Attribute) -> bool {
attr.path().is_ident(KW::IDENT_STR)
}
impl<T: Parse> HdlAttr<T> {
fn parse_and_take_attr(attrs: &mut Vec<Attribute>) -> syn::Result<Option<Self>> {
impl<T: Parse, KW: Parse> HdlAttr<T, KW> {
fn parse_and_take_attr(attrs: &mut Vec<Attribute>) -> syn::Result<Option<Self>>
where
KW: ToTokens,
{
let mut retval = None;
let mut errors = Errors::new();
attrs.retain(|attr| {
if is_hdl_attr(attr) {
if let Ok(kw) = syn::parse2::<KW>(attr.path().to_token_stream()) {
if retval.is_some() {
errors.push(Error::new_spanned(attr, "more than one #[hdl] attribute"));
errors.push(Error::new_spanned(
attr,
format_args!("more than one #[{}] attribute", kw.to_token_stream()),
));
}
errors.unwrap_or_default(Self::parse_attr(attr).map(|v| retval = Some(v)));
false
@ -254,13 +304,19 @@ impl<T: Parse> HdlAttr<T> {
errors.finish()?;
Ok(retval)
}
fn parse_and_leave_attr(attrs: &[Attribute]) -> syn::Result<Option<Self>> {
fn parse_and_leave_attr(attrs: &[Attribute]) -> syn::Result<Option<Self>>
where
KW: ToTokens,
{
let mut retval = None;
let mut errors = Errors::new();
for attr in attrs {
if is_hdl_attr(attr) {
if let Ok(kw) = syn::parse2::<KW>(attr.path().to_token_stream()) {
if retval.is_some() {
errors.push(Error::new_spanned(attr, "more than one #[hdl] attribute"));
errors.push(Error::new_spanned(
attr,
format_args!("more than one #[{}] attribute", kw.to_token_stream()),
));
}
errors.unwrap_or_default(Self::parse_attr(attr).map(|v| retval = Some(v)));
}
@ -281,7 +337,7 @@ impl<T: Parse> HdlAttr<T> {
) -> syn::Result<Self> {
let bracket_content;
let bracket_token = bracketed!(bracket_content in input);
let hdl = bracket_content.parse()?;
let kw = bracket_content.parse()?;
let paren_content;
let body;
let paren_token;
@ -302,7 +358,7 @@ impl<T: Parse> HdlAttr<T> {
pound_token,
style,
bracket_token,
hdl,
kw,
paren_token,
body,
})
@ -519,6 +575,26 @@ macro_rules! impl_extra_traits_for_options {
) => {
impl Copy for $option_enum_name {}
impl PartialEq for $option_enum_name {
fn eq(&self, other: &Self) -> bool {
self.cmp(other).is_eq()
}
}
impl Eq for $option_enum_name {}
impl PartialOrd for $option_enum_name {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for $option_enum_name {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.variant().cmp(&other.variant())
}
}
impl quote::IdentFragment for $option_enum_name {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
let _ = f;
@ -554,6 +630,66 @@ pub(crate) use impl_extra_traits_for_options;
macro_rules! options {
(
#[options = $options_name:ident]
$($tt:tt)*
) => {
crate::options! {
#[options = $options_name, punct = syn::Token![,], allow_duplicates = false]
$($tt)*
}
};
(
#[options = $options_name:ident, punct = $Punct:ty, allow_duplicates = true]
$(#[$($enum_meta:tt)*])*
$enum_vis:vis enum $option_enum_name:ident {
$($Variant:ident($key:ident $(, $value:ty)?),)*
}
) => {
crate::options! {
#[options = $options_name, punct = $Punct, allow_duplicates = (true)]
$(#[$($enum_meta)*])*
$enum_vis enum $option_enum_name {
$($Variant($key $(, $value)?),)*
}
}
impl Extend<$option_enum_name> for $options_name {
fn extend<T: IntoIterator<Item = $option_enum_name>>(&mut self, iter: T) {
iter.into_iter().for_each(|v| match v {
$($option_enum_name::$Variant(v) => {
self.$key = Some(v);
})*
});
}
}
impl FromIterator<$option_enum_name> for $options_name {
fn from_iter<T: IntoIterator<Item = $option_enum_name>>(iter: T) -> Self {
let mut retval = Self::default();
retval.extend(iter);
retval
}
}
impl Extend<$options_name> for $options_name {
fn extend<T: IntoIterator<Item = $options_name>>(&mut self, iter: T) {
iter.into_iter().for_each(|v| {
$(if let Some(v) = v.$key {
self.$key = Some(v);
})*
});
}
}
impl FromIterator<$options_name> for $options_name {
fn from_iter<T: IntoIterator<Item = $options_name>>(iter: T) -> Self {
let mut retval = Self::default();
retval.extend(iter);
retval
}
}
};
(
#[options = $options_name:ident, punct = $Punct:ty, allow_duplicates = $allow_duplicates:expr]
$(#[$($enum_meta:tt)*])*
$enum_vis:vis enum $option_enum_name:ident {
$($Variant:ident($key:ident $(, $value:ty)?),)*
@ -567,8 +703,11 @@ macro_rules! options {
}
#[derive(Clone, Debug, Default)]
#[allow(non_snake_case)]
$enum_vis struct $options_name {
$($enum_vis $key: Option<(crate::kw::$key, $(syn::token::Paren, $value)?)>,)*
$(
$enum_vis $key: Option<(crate::kw::$key, $(syn::token::Paren, $value)?)>,
)*
}
crate::fold::impl_fold! {
@ -577,6 +716,43 @@ macro_rules! options {
}
}
const _: () = {
#[derive(Clone, Debug)]
$enum_vis struct Iter($enum_vis $options_name);
impl IntoIterator for $options_name {
type Item = $option_enum_name;
type IntoIter = Iter;
fn into_iter(self) -> Self::IntoIter {
Iter(self)
}
}
impl Iterator for Iter {
type Item = $option_enum_name;
fn next(&mut self) -> Option<Self::Item> {
$(
if let Some(value) = self.0.$key.take() {
return Some($option_enum_name::$Variant(value));
}
)*
None
}
#[allow(unused_mut, unused_variables)]
fn fold<B, F: FnMut(B, Self::Item) -> B>(mut self, mut init: B, mut f: F) -> B {
$(
if let Some(value) = self.0.$key.take() {
init = f(init, $option_enum_name::$Variant(value));
}
)*
init
}
}
};
impl syn::parse::Parse for $options_name {
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
#![allow(unused_mut, unused_variables, unreachable_code)]
@ -585,7 +761,7 @@ macro_rules! options {
let old_input = input.fork();
match input.parse::<$option_enum_name>()? {
$($option_enum_name::$Variant(v) => {
if retval.$key.replace(v).is_some() {
if retval.$key.replace(v).is_some() && !$allow_duplicates {
return Err(old_input.error(concat!("duplicate ", stringify!($key), " option")));
}
})*
@ -593,7 +769,7 @@ macro_rules! options {
if input.is_empty() {
break;
}
input.parse::<syn::Token![,]>()?;
input.parse::<$Punct>()?;
}
Ok(retval)
}
@ -602,7 +778,7 @@ macro_rules! options {
impl quote::ToTokens for $options_name {
#[allow(unused_mut, unused_variables, unused_assignments)]
fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {
let mut separator: Option<syn::Token![,]> = None;
let mut separator: Option<$Punct> = None;
$(if let Some(v) = &self.$key {
separator.to_tokens(tokens);
separator = Some(Default::default());
@ -673,9 +849,24 @@ macro_rules! options {
}
}
}
impl $option_enum_name {
#[allow(dead_code)]
fn variant(&self) -> usize {
#[repr(usize)]
enum Variant {
$($Variant,)*
__Last, // so it doesn't complain about zero-variant enums
}
match *self {
$(Self::$Variant(..) => Variant::$Variant as usize,)*
}
}
}
};
}
use crate::hdl_type_alias::hdl_type_alias_impl;
pub(crate) use options;
pub(crate) fn outline_generated(contents: TokenStream, prefix: &str) -> TokenStream {
@ -686,6 +877,15 @@ pub(crate) fn outline_generated(contents: TokenStream, prefix: &str) -> TokenStr
.suffix(".tmp.rs")
.tempfile_in(out_dir)
.unwrap();
struct PrintOnPanic<'a>(&'a TokenStream);
impl Drop for PrintOnPanic<'_> {
fn drop(&mut self) {
if std::thread::panicking() {
println!("{}", self.0);
}
}
}
let _print_on_panic = PrintOnPanic(&contents);
let contents = prettyplease::unparse(&parse_quote! { #contents });
let hash = <sha2::Sha256 as sha2::Digest>::digest(&contents);
let hash = base16ct::HexDisplay(&hash[..5]);
@ -706,25 +906,372 @@ pub(crate) fn outline_generated(contents: TokenStream, prefix: &str) -> TokenStr
}
}
pub fn module(attr: TokenStream, item: TokenStream) -> syn::Result<TokenStream> {
let options = syn::parse2::<module::ConfigOptions>(attr)?;
let options = HdlAttr::from(options);
let func = syn::parse2::<module::ModuleFn>(quote! { #options #item })?;
fn hdl_module_impl(item: ItemFn) -> syn::Result<TokenStream> {
let func = module::ModuleFn::parse_from_fn(item)?;
let options = func.config_options();
let mut contents = func.generate();
if options.body.outline_generated.is_some() {
if options.outline_generated.is_some() {
contents = outline_generated(contents, "module-");
}
Ok(contents)
}
pub fn value_derive(item: TokenStream) -> syn::Result<TokenStream> {
let item = syn::parse2::<Item>(item)?;
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub(crate) enum CfgExpr {
Option {
ident: Ident,
value: Option<(Token![=], LitStr)>,
},
All {
all: kw::all,
paren: Paren,
exprs: Punctuated<CfgExpr, Token![,]>,
},
Any {
any: kw::any,
paren: Paren,
exprs: Punctuated<CfgExpr, Token![,]>,
},
Not {
not: kw::not,
paren: Paren,
expr: Box<CfgExpr>,
trailing_comma: Option<Token![,]>,
},
}
impl Parse for CfgExpr {
fn parse(input: ParseStream) -> syn::Result<Self> {
match input.cursor().ident() {
Some((_, cursor)) if cursor.eof() => {
return Ok(CfgExpr::Option {
ident: input.call(Ident::parse_any)?,
value: None,
});
}
_ => {}
}
if input.peek(Ident::peek_any) && input.peek2(Token![=]) {
return Ok(CfgExpr::Option {
ident: input.call(Ident::parse_any)?,
value: Some((input.parse()?, input.parse()?)),
});
}
let contents;
if input.peek(kw::all) {
Ok(CfgExpr::All {
all: input.parse()?,
paren: parenthesized!(contents in input),
exprs: contents.call(Punctuated::parse_terminated)?,
})
} else if input.peek(kw::any) {
Ok(CfgExpr::Any {
any: input.parse()?,
paren: parenthesized!(contents in input),
exprs: contents.call(Punctuated::parse_terminated)?,
})
} else if input.peek(kw::not) {
Ok(CfgExpr::Not {
not: input.parse()?,
paren: parenthesized!(contents in input),
expr: contents.parse()?,
trailing_comma: contents.parse()?,
})
} else {
Err(input.error("expected cfg-pattern"))
}
}
}
impl ToTokens for CfgExpr {
fn to_tokens(&self, tokens: &mut TokenStream) {
match self {
CfgExpr::Option { ident, value } => {
ident.to_tokens(tokens);
if let Some((eq, value)) = value {
eq.to_tokens(tokens);
value.to_tokens(tokens);
}
}
CfgExpr::All { all, paren, exprs } => {
all.to_tokens(tokens);
paren.surround(tokens, |tokens| exprs.to_tokens(tokens));
}
CfgExpr::Any { any, paren, exprs } => {
any.to_tokens(tokens);
paren.surround(tokens, |tokens| exprs.to_tokens(tokens));
}
CfgExpr::Not {
not,
paren,
expr,
trailing_comma,
} => {
not.to_tokens(tokens);
paren.surround(tokens, |tokens| {
expr.to_tokens(tokens);
trailing_comma.to_tokens(tokens);
});
}
}
}
}
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub(crate) struct Cfg {
cfg: kw::cfg,
paren: Paren,
expr: CfgExpr,
trailing_comma: Option<Token![,]>,
}
impl Cfg {
fn parse_meta(meta: &Meta) -> syn::Result<Self> {
syn::parse2(meta.to_token_stream())
}
}
impl ToTokens for Cfg {
fn to_tokens(&self, tokens: &mut TokenStream) {
let Self {
cfg,
paren,
expr,
trailing_comma,
} = self;
cfg.to_tokens(tokens);
paren.surround(tokens, |tokens| {
expr.to_tokens(tokens);
trailing_comma.to_tokens(tokens);
});
}
}
impl Parse for Cfg {
fn parse(input: ParseStream) -> syn::Result<Self> {
let contents;
Ok(Self {
cfg: input.parse()?,
paren: parenthesized!(contents in input),
expr: contents.parse()?,
trailing_comma: contents.parse()?,
})
}
}
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub(crate) struct CfgAttr {
cfg_attr: kw::cfg_attr,
paren: Paren,
expr: CfgExpr,
comma: Token![,],
attrs: Punctuated<Meta, Token![,]>,
}
impl CfgAttr {
pub(crate) fn to_cfg(&self) -> Cfg {
Cfg {
cfg: kw::cfg(self.cfg_attr.span),
paren: self.paren,
expr: self.expr.clone(),
trailing_comma: None,
}
}
fn parse_meta(meta: &Meta) -> syn::Result<Self> {
syn::parse2(meta.to_token_stream())
}
}
impl Parse for CfgAttr {
fn parse(input: ParseStream) -> syn::Result<Self> {
let contents;
Ok(Self {
cfg_attr: input.parse()?,
paren: parenthesized!(contents in input),
expr: contents.parse()?,
comma: contents.parse()?,
attrs: contents.call(Punctuated::parse_terminated)?,
})
}
}
pub(crate) struct CfgAndValue {
cfg: Cfg,
eq_token: Token![=],
value: LitBool,
}
impl Parse for CfgAndValue {
fn parse(input: ParseStream) -> syn::Result<Self> {
Ok(Self {
cfg: input.parse()?,
eq_token: input.parse()?,
value: input.parse()?,
})
}
}
pub(crate) struct Cfgs<T> {
pub(crate) bracket: Bracket,
pub(crate) cfgs_map: HashMap<Cfg, T>,
pub(crate) cfgs_list: Vec<Cfg>,
}
impl<T> Default for Cfgs<T> {
fn default() -> Self {
Self {
bracket: Default::default(),
cfgs_map: Default::default(),
cfgs_list: Default::default(),
}
}
}
impl<T> Cfgs<T> {
fn insert_cfg(&mut self, cfg: Cfg, value: T) {
match self.cfgs_map.entry(cfg) {
Entry::Occupied(_) => {}
Entry::Vacant(entry) => {
self.cfgs_list.push(entry.key().clone());
entry.insert(value);
}
}
}
}
impl Parse for Cfgs<bool> {
fn parse(input: ParseStream) -> syn::Result<Self> {
let contents;
let bracket = bracketed!(contents in input);
let mut cfgs_map = HashMap::new();
let mut cfgs_list = Vec::new();
for CfgAndValue {
cfg,
eq_token,
value,
} in contents.call(Punctuated::<CfgAndValue, Token![,]>::parse_terminated)?
{
let _ = eq_token;
match cfgs_map.entry(cfg) {
Entry::Occupied(_) => {}
Entry::Vacant(entry) => {
cfgs_list.push(entry.key().clone());
entry.insert(value.value);
}
}
}
Ok(Self {
bracket,
cfgs_map,
cfgs_list,
})
}
}
impl Parse for Cfgs<()> {
fn parse(input: ParseStream) -> syn::Result<Self> {
let contents;
let bracket = bracketed!(contents in input);
let mut cfgs_map = HashMap::new();
let mut cfgs_list = Vec::new();
for cfg in contents.call(Punctuated::<Cfg, Token![,]>::parse_terminated)? {
match cfgs_map.entry(cfg) {
Entry::Occupied(_) => {}
Entry::Vacant(entry) => {
cfgs_list.push(entry.key().clone());
entry.insert(());
}
}
}
Ok(Self {
bracket,
cfgs_map,
cfgs_list,
})
}
}
impl ToTokens for Cfgs<()> {
fn to_tokens(&self, tokens: &mut TokenStream) {
let Self {
bracket,
cfgs_map: _,
cfgs_list,
} = self;
bracket.surround(tokens, |tokens| {
for cfg in cfgs_list {
cfg.to_tokens(tokens);
<Token![,]>::default().to_tokens(tokens);
}
});
}
}
fn hdl_main(
kw: impl CustomToken,
attr: TokenStream,
item: TokenStream,
) -> syn::Result<TokenStream> {
fn parse_evaluated_cfgs_attr<R>(
input: ParseStream,
parse_inner: impl FnOnce(ParseStream) -> syn::Result<R>,
) -> syn::Result<R> {
let _: Token![#] = input.parse()?;
let bracket_content;
bracketed!(bracket_content in input);
let _: kw::__evaluated_cfgs = bracket_content.parse()?;
let paren_content;
parenthesized!(paren_content in bracket_content);
parse_inner(&paren_content)
}
let (evaluated_cfgs, item): (_, TokenStream) = Parser::parse2(
|input: ParseStream| {
let peek = input.fork();
if parse_evaluated_cfgs_attr(&peek, |_| Ok(())).is_ok() {
let evaluated_cfgs = parse_evaluated_cfgs_attr(input, Cfgs::<bool>::parse)?;
Ok((Some(evaluated_cfgs), input.parse()?))
} else {
Ok((None, input.parse()?))
}
},
item,
)?;
let cfgs = if let Some(cfgs) = evaluated_cfgs {
cfgs
} else {
let cfgs = process_cfg::collect_cfgs(syn::parse2(item.clone())?)?;
if cfgs.cfgs_list.is_empty() {
Cfgs::default()
} else {
return Ok(quote! {
::fayalite::__cfg_expansion_helper! {
[]
#cfgs
{#[::fayalite::#kw(#attr)]} { #item }
}
});
}
};
let item = syn::parse2(quote! { #[#kw(#attr)] #item })?;
let Some(item) = process_cfg::process_cfgs(item, cfgs)? else {
return Ok(TokenStream::new());
};
match item {
Item::Enum(item) => value_derive_enum::value_derive_enum(item),
Item::Struct(item) => value_derive_struct::value_derive_struct(item),
Item::Enum(item) => hdl_enum::hdl_enum(item),
Item::Struct(item) => hdl_bundle::hdl_bundle(item),
Item::Fn(item) => hdl_module_impl(item),
Item::Type(item) => hdl_type_alias_impl(item),
_ => Err(syn::Error::new(
Span::call_site(),
"derive(Value) can only be used on structs or enums",
"top-level #[hdl] can only be used on structs, enums, type aliases, or functions",
)),
}
}
pub fn hdl_module(attr: TokenStream, item: TokenStream) -> syn::Result<TokenStream> {
hdl_main(kw::hdl_module::default(), attr, item)
}
pub fn hdl_attr(attr: TokenStream, item: TokenStream) -> syn::Result<TokenStream> {
hdl_main(kw::hdl::default(), attr, item)
}

View file

@ -1,19 +1,20 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
is_hdl_attr,
module::transform_body::{HdlLet, HdlLetKindIO},
options, Errors, HdlAttr, PairsIterExt,
Errors, HdlAttr, PairsIterExt,
hdl_type_common::{ParsedGenerics, SplitForImpl},
kw,
module::transform_body::{HdlLet, HdlLetKindIO, ModuleIOOrAddPlatformIO},
options,
};
use proc_macro2::TokenStream;
use quote::{format_ident, quote, quote_spanned, ToTokens};
use quote::{ToTokens, format_ident, quote, quote_spanned};
use std::collections::HashSet;
use syn::{
parse::{Parse, ParseStream},
parse_quote,
visit::{visit_pat, Visit},
Attribute, Block, ConstParam, Error, FnArg, GenericParam, Generics, Ident, ItemFn, ItemStruct,
LifetimeParam, ReturnType, Signature, TypeParam, Visibility, WhereClause, WherePredicate,
parse_quote,
visit::{Visit, visit_pat},
};
mod transform_body;
@ -38,7 +39,7 @@ pub(crate) fn check_name_conflicts_with_module_builder(name: &Ident) -> syn::Res
if name == "m" {
Err(Error::new_spanned(
name,
"name conflicts with implicit `m: &mut ModuleBuilder<_>`",
"name conflicts with implicit `m: &ModuleBuilder`",
))
} else {
Ok(())
@ -57,26 +58,39 @@ impl Visit<'_> for CheckNameConflictsWithModuleBuilderVisitor<'_> {
}
}
fn retain_struct_attrs<F: FnMut(&Attribute) -> bool>(item: &mut ItemStruct, mut f: F) {
item.attrs.retain(&mut f);
for field in item.fields.iter_mut() {
field.attrs.retain(&mut f);
}
}
pub(crate) type ModuleIO = HdlLet<HdlLetKindIO>;
pub(crate) struct ModuleFn {
struct ModuleFnModule {
attrs: Vec<Attribute>,
config_options: HdlAttr<ConfigOptions>,
config_options: HdlAttr<ConfigOptions, kw::hdl_module>,
module_kind: ModuleKind,
vis: Visibility,
sig: Signature,
block: Box<Block>,
io: Vec<ModuleIO>,
struct_generics: Generics,
struct_generics: Option<ParsedGenerics>,
the_struct: TokenStream,
}
enum ModuleFnImpl {
Module(ModuleFnModule),
Fn {
attrs: Vec<Attribute>,
config_options: HdlAttr<ConfigOptions, kw::hdl>,
vis: Visibility,
sig: Signature,
block: Box<Block>,
},
}
options! {
pub(crate) enum HdlOrHdlModule {
Hdl(hdl),
HdlModule(hdl_module),
}
}
pub(crate) struct ModuleFn(ModuleFnImpl);
#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
pub(crate) enum ModuleKind {
Extern,
@ -96,14 +110,25 @@ impl Visit<'_> for ContainsSkippedIdent<'_> {
}
}
impl Parse for ModuleFn {
fn parse(input: ParseStream) -> syn::Result<Self> {
impl ModuleFn {
pub(crate) fn config_options(&self) -> ConfigOptions {
let (ModuleFnImpl::Module(ModuleFnModule {
config_options: HdlAttr { body, .. },
..
})
| ModuleFnImpl::Fn {
config_options: HdlAttr { body, .. },
..
}) = &self.0;
body.clone()
}
pub(crate) fn parse_from_fn(item: ItemFn) -> syn::Result<Self> {
let ItemFn {
mut attrs,
vis,
mut sig,
block,
} = input.parse()?;
} = item;
let Signature {
ref constness,
ref asyncness,
@ -118,43 +143,60 @@ impl Parse for ModuleFn {
ref output,
} = sig;
let mut errors = Errors::new();
let config_options = errors
.unwrap_or_default(HdlAttr::parse_and_take_attr(&mut attrs))
.unwrap_or_default();
let Some(mut config_options) =
errors.unwrap_or_default(
HdlAttr::<ConfigOptions, HdlOrHdlModule>::parse_and_take_attr(&mut attrs),
)
else {
errors.error(sig.ident, "missing #[hdl] or #[hdl_module] attribute");
errors.finish()?;
unreachable!();
};
let ConfigOptions {
outline_generated: _,
extern_,
} = config_options.body;
let module_kind = match extern_ {
Some(_) => ModuleKind::Extern,
None => ModuleKind::Normal,
let module_kind = match (config_options.kw, extern_) {
(HdlOrHdlModule::Hdl(_), None) => None,
(HdlOrHdlModule::Hdl(_), Some(extern2)) => {
config_options.body.extern_ = None;
errors.error(
extern2.0,
"extern can only be used as #[hdl_module(extern)]",
);
None
}
(HdlOrHdlModule::HdlModule(_), None) => Some(ModuleKind::Normal),
(HdlOrHdlModule::HdlModule(_), Some(_)) => Some(ModuleKind::Extern),
};
for fn_arg in inputs {
match fn_arg {
FnArg::Receiver(_) => {
errors.push(syn::Error::new_spanned(fn_arg, "self not allowed here"));
}
FnArg::Typed(fn_arg) => {
visit_pat(
&mut CheckNameConflictsWithModuleBuilderVisitor {
errors: &mut errors,
},
&fn_arg.pat,
);
if let HdlOrHdlModule::HdlModule(_) = config_options.kw {
for fn_arg in inputs {
match fn_arg {
FnArg::Receiver(_) => {
errors.push(syn::Error::new_spanned(fn_arg, "self not allowed here"));
}
FnArg::Typed(fn_arg) => {
visit_pat(
&mut CheckNameConflictsWithModuleBuilderVisitor {
errors: &mut errors,
},
&fn_arg.pat,
);
}
}
}
}
if let Some(constness) = constness {
errors.push(syn::Error::new_spanned(constness, "const not allowed here"));
}
if let Some(asyncness) = asyncness {
errors.push(syn::Error::new_spanned(asyncness, "async not allowed here"));
}
if let Some(unsafety) = unsafety {
errors.push(syn::Error::new_spanned(unsafety, "unsafe not allowed here"));
}
if let Some(abi) = abi {
errors.push(syn::Error::new_spanned(abi, "extern not allowed here"));
if let Some(constness) = constness {
errors.push(syn::Error::new_spanned(constness, "const not allowed here"));
}
if let Some(asyncness) = asyncness {
errors.push(syn::Error::new_spanned(asyncness, "async not allowed here"));
}
if let Some(unsafety) = unsafety {
errors.push(syn::Error::new_spanned(unsafety, "unsafe not allowed here"));
}
if let Some(abi) = abi {
errors.push(syn::Error::new_spanned(abi, "extern not allowed here"));
}
}
let mut skipped_idents = HashSet::new();
let struct_generic_params = generics
@ -162,14 +204,17 @@ impl Parse for ModuleFn {
.pairs_mut()
.filter_map_pair_value_mut(|v| match v {
GenericParam::Lifetime(LifetimeParam { attrs, .. }) => {
errors
.unwrap_or_default(HdlAttr::<crate::kw::skip>::parse_and_take_attr(attrs));
errors.unwrap_or_default(
HdlAttr::<crate::kw::skip, kw::hdl>::parse_and_take_attr(attrs),
);
None
}
GenericParam::Type(TypeParam { attrs, ident, .. })
| GenericParam::Const(ConstParam { attrs, ident, .. }) => {
if errors
.unwrap_or_default(HdlAttr::<crate::kw::skip>::parse_and_take_attr(attrs))
.unwrap_or_default(
HdlAttr::<crate::kw::skip, kw::hdl>::parse_and_take_attr(attrs),
)
.is_some()
{
skipped_idents.insert(ident.clone());
@ -183,6 +228,7 @@ impl Parse for ModuleFn {
let struct_where_clause = generics
.where_clause
.as_mut()
.filter(|_| matches!(config_options.kw, HdlOrHdlModule::HdlModule(_)))
.map(|where_clause| WhereClause {
where_token: where_clause.where_token,
predicates: where_clause
@ -205,49 +251,173 @@ impl Parse for ModuleFn {
})
.collect(),
});
let struct_generics = Generics {
lt_token: generics.lt_token,
params: struct_generic_params,
gt_token: generics.gt_token,
where_clause: struct_where_clause,
let struct_generics = if let HdlOrHdlModule::HdlModule(_) = config_options.kw {
let mut struct_generics = Generics {
lt_token: generics.lt_token,
params: struct_generic_params,
gt_token: generics.gt_token,
where_clause: struct_where_clause,
};
if let Some(variadic) = variadic {
errors.push(syn::Error::new_spanned(variadic, "... not allowed here"));
}
if !matches!(output, ReturnType::Default) {
errors.push(syn::Error::new_spanned(
output,
"return type not allowed here",
));
}
errors.ok(ParsedGenerics::parse(&mut struct_generics))
} else {
Some(ParsedGenerics::default())
};
if let Some(variadic) = variadic {
errors.push(syn::Error::new_spanned(variadic, "... not allowed here"));
}
if !matches!(output, ReturnType::Default) {
errors.push(syn::Error::new_spanned(
output,
"return type not allowed here",
));
}
let body_results = errors.ok(transform_body::transform_body(module_kind, block));
let body_results = struct_generics.as_ref().and_then(|struct_generics| {
errors.ok(transform_body::transform_body(
module_kind,
block,
struct_generics,
))
});
errors.finish()?;
let struct_generics = struct_generics.unwrap();
let (block, io) = body_results.unwrap();
Ok(Self {
let config_options = match config_options {
HdlAttr {
pound_token,
style,
bracket_token,
kw: HdlOrHdlModule::Hdl((kw,)),
paren_token,
body,
} => {
debug_assert!(matches!(io, ModuleIOOrAddPlatformIO::ModuleIO(v) if v.is_empty()));
return Ok(Self(ModuleFnImpl::Fn {
attrs,
config_options: HdlAttr {
pound_token,
style,
bracket_token,
kw,
paren_token,
body,
},
vis,
sig,
block,
}));
}
HdlAttr {
pound_token,
style,
bracket_token,
kw: HdlOrHdlModule::HdlModule((kw,)),
paren_token,
body,
} => HdlAttr {
pound_token,
style,
bracket_token,
kw,
paren_token,
body,
},
};
let io = match io {
ModuleIOOrAddPlatformIO::ModuleIO(io) => io,
ModuleIOOrAddPlatformIO::AddPlatformIO => {
return Ok(Self(ModuleFnImpl::Module(ModuleFnModule {
attrs,
config_options,
module_kind: module_kind.unwrap(),
vis,
sig,
block,
struct_generics: None,
the_struct: TokenStream::new(),
})));
}
};
let (_struct_impl_generics, _struct_type_generics, struct_where_clause) =
struct_generics.split_for_impl();
let struct_where_clause: Option<WhereClause> = parse_quote! { #struct_where_clause };
if let Some(struct_where_clause) = &struct_where_clause {
sig.generics
.where_clause
.get_or_insert_with(|| WhereClause {
where_token: struct_where_clause.where_token,
predicates: Default::default(),
})
.predicates
.extend(struct_where_clause.predicates.clone());
}
let fn_name = &sig.ident;
let io_flips = io
.iter()
.map(|io| match io.kind.kind {
ModuleIOKind::Input((input,)) => quote_spanned! {input.span=>
#[hdl(flip)]
},
ModuleIOKind::Output(_) => quote! {},
})
.collect::<Vec<_>>();
let io_types = io.iter().map(|io| &io.kind.ty).collect::<Vec<_>>();
let io_names = io.iter().map(|io| &io.name).collect::<Vec<_>>();
let the_struct: ItemStruct = parse_quote! {
#[allow(non_camel_case_types)]
#[hdl(no_runtime_generics, no_static)]
#vis struct #fn_name #struct_generics #struct_where_clause {
#(
#io_flips
#vis #io_names: #io_types,)*
}
};
let the_struct = crate::hdl_bundle::hdl_bundle(the_struct)?;
Ok(Self(ModuleFnImpl::Module(ModuleFnModule {
attrs,
config_options,
module_kind,
module_kind: module_kind.unwrap(),
vis,
sig,
block,
io,
struct_generics,
})
struct_generics: Some(struct_generics),
the_struct,
})))
}
}
impl ModuleFn {
pub(crate) fn generate(self) -> TokenStream {
let Self {
let ModuleFnModule {
attrs,
config_options,
module_kind,
vis,
sig,
block,
io,
mut block,
struct_generics,
} = self;
the_struct,
} = match self.0 {
ModuleFnImpl::Module(v) => v,
ModuleFnImpl::Fn {
attrs,
config_options,
vis,
sig,
block,
} => {
let ConfigOptions {
outline_generated: _,
extern_: _,
} = config_options.body;
return ItemFn {
attrs,
vis,
sig,
block,
}
.into_token_stream();
}
};
let ConfigOptions {
outline_generated: _,
extern_: _,
@ -273,18 +443,28 @@ impl ModuleFn {
});
name
}));
let module_kind_ty = match module_kind {
ModuleKind::Extern => quote! { ::fayalite::module::ExternModule },
ModuleKind::Normal => quote! { ::fayalite::module::NormalModule },
let module_kind_value = match module_kind {
ModuleKind::Extern => quote! { ::fayalite::module::ModuleKind::Extern },
ModuleKind::Normal => quote! { ::fayalite::module::ModuleKind::Normal },
};
let fn_name = &outer_sig.ident;
let (_struct_impl_generics, struct_type_generics, struct_where_clause) =
struct_generics.split_for_impl();
let struct_ty = quote! {#fn_name #struct_type_generics};
let struct_ty = match struct_generics {
Some(struct_generics) => {
let (_struct_impl_generics, struct_type_generics, _struct_where_clause) =
struct_generics.split_for_impl();
quote! {#fn_name #struct_type_generics}
}
None => quote! {::fayalite::bundle::Bundle},
};
body_sig.ident = parse_quote! {__body};
body_sig.inputs.insert(
body_sig
.inputs
.insert(0, parse_quote! { m: &::fayalite::module::ModuleBuilder });
block.stmts.insert(
0,
parse_quote! {m: &mut ::fayalite::module::ModuleBuilder<#struct_ty, #module_kind_ty>},
parse_quote! {
let _ = m;
},
);
let body_fn = ItemFn {
attrs: vec![],
@ -294,50 +474,26 @@ impl ModuleFn {
};
outer_sig.output =
parse_quote! {-> ::fayalite::intern::Interned<::fayalite::module::Module<#struct_ty>>};
let io_flips = io
.iter()
.map(|io| match io.kind.kind {
ModuleIOKind::Input((input,)) => quote_spanned! {input.span=>
#[hdl(flip)]
},
ModuleIOKind::Output(_) => quote! {},
})
.collect::<Vec<_>>();
let io_types = io.iter().map(|io| &io.kind.ty).collect::<Vec<_>>();
let io_names = io.iter().map(|io| &io.name).collect::<Vec<_>>();
let fn_name_str = fn_name.to_string();
let (_, body_type_generics, _) = body_fn.sig.generics.split_for_impl();
let body_turbofish_type_generics = body_type_generics.as_turbofish();
let block = parse_quote! {{
#body_fn
::fayalite::module::ModuleBuilder::run(#fn_name_str, |m| __body #body_turbofish_type_generics(m, #(#param_names,)*))
}};
let static_type = io.iter().all(|io| io.kind.ty_expr.is_none());
let struct_options = if static_type {
quote! { #[hdl(static)] }
let body_lambda = if param_names.is_empty() {
quote! {
__body #body_turbofish_type_generics
}
} else {
quote! {}
};
let the_struct: ItemStruct = parse_quote! {
#[derive(::fayalite::__std::clone::Clone,
::fayalite::__std::hash::Hash,
::fayalite::__std::cmp::PartialEq,
::fayalite::__std::cmp::Eq,
::fayalite::__std::fmt::Debug)]
#[allow(non_camel_case_types)]
#struct_options
#vis struct #fn_name #struct_generics #struct_where_clause {
#(
#io_flips
#vis #io_names: #io_types,)*
quote! {
|m| __body #body_turbofish_type_generics(m, #(#param_names,)*)
}
};
let mut struct_without_hdl_attrs = the_struct.clone();
let mut struct_without_derives = the_struct;
retain_struct_attrs(&mut struct_without_hdl_attrs, |attr| !is_hdl_attr(attr));
retain_struct_attrs(&mut struct_without_derives, |attr| {
!attr.path().is_ident("derive")
});
let block = parse_quote! {{
#body_fn
::fayalite::module::ModuleBuilder::run(
#fn_name_str,
#module_kind_value,
#body_lambda,
)
}};
let outer_fn = ItemFn {
attrs,
vis,
@ -345,10 +501,7 @@ impl ModuleFn {
block,
};
let mut retval = outer_fn.into_token_stream();
struct_without_hdl_attrs.to_tokens(&mut retval);
retval.extend(
crate::value_derive_struct::value_derive_struct(struct_without_derives).unwrap(),
);
retval.extend(the_struct);
retval
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,540 +1,273 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{module::transform_body::Visitor, options, Errors, HdlAttr, PairsIterExt};
use proc_macro2::{Span, TokenStream};
use quote::{format_ident, quote_spanned, ToTokens, TokenStreamExt};
use crate::{
HdlAttr, kw,
module::transform_body::{
ExprOptions, Visitor,
expand_match::{EnumPath, parse_enum_path},
},
};
use quote::{format_ident, quote_spanned};
use std::mem;
use syn::{
parse::Nothing,
parse_quote, parse_quote_spanned,
punctuated::{Pair, Punctuated},
spanned::Spanned,
token::{Brace, Paren},
Attribute, Expr, ExprArray, ExprCall, ExprGroup, ExprPath, ExprRepeat, ExprStruct, ExprTuple,
FieldValue, Ident, Index, Member, Path, PathArguments, PathSegment, Token, TypePath,
Expr, ExprArray, ExprCall, ExprGroup, ExprMethodCall, ExprParen, ExprPath, ExprRepeat,
ExprStruct, ExprTuple, FieldValue, Token, TypePath, parse_quote_spanned,
punctuated::Punctuated, spanned::Spanned, token::Paren,
};
options! {
#[options = AggregateLiteralOptions]
#[no_ident_fragment]
pub(crate) enum AggregateLiteralOption {
Struct(struct_),
Enum(enum_),
}
}
#[derive(Clone, Debug)]
pub(crate) struct StructOrEnumPath {
pub(crate) ty: TypePath,
pub(crate) variant: Option<(TypePath, Ident)>,
}
#[derive(Debug, Copy, Clone)]
pub(crate) struct SingleSegmentVariant {
pub(crate) name: &'static str,
pub(crate) make_type_path: fn(Span, &PathArguments) -> Path,
}
impl StructOrEnumPath {
pub(crate) const SINGLE_SEGMENT_VARIANTS: &'static [SingleSegmentVariant] = {
fn make_option_type_path(span: Span, arguments: &PathArguments) -> Path {
let arguments = if arguments.is_none() {
quote_spanned! {span=>
<_>
}
} else {
arguments.to_token_stream()
};
parse_quote_spanned! {span=>
::fayalite::__std::option::Option #arguments
}
}
fn make_result_type_path(span: Span, arguments: &PathArguments) -> Path {
let arguments = if arguments.is_none() {
quote_spanned! {span=>
<_, _>
}
} else {
arguments.to_token_stream()
};
parse_quote_spanned! {span=>
::fayalite::__std::result::Result #arguments
}
}
&[
SingleSegmentVariant {
name: "Some",
make_type_path: make_option_type_path,
},
SingleSegmentVariant {
name: "None",
make_type_path: make_option_type_path,
},
SingleSegmentVariant {
name: "Ok",
make_type_path: make_result_type_path,
},
SingleSegmentVariant {
name: "Err",
make_type_path: make_result_type_path,
},
]
};
pub(crate) fn new(
errors: &mut Errors,
path: TypePath,
options: &AggregateLiteralOptions,
) -> Result<Self, ()> {
let Path {
leading_colon,
segments,
} = &path.path;
let qself_position = path.qself.as_ref().map(|qself| qself.position).unwrap_or(0);
let variant_name = if qself_position < segments.len() {
Some(segments.last().unwrap().ident.clone())
} else {
None
};
let enum_type = 'guess_enum_type: {
if options.enum_.is_some() {
if let Some((struct_,)) = options.struct_ {
errors.error(
struct_,
"can't specify both #[hdl(enum)] and #[hdl(struct)]",
);
}
break 'guess_enum_type Some(None);
}
if options.struct_.is_some() {
break 'guess_enum_type None;
}
if path.qself.is_none() && leading_colon.is_none() && segments.len() == 1 {
let PathSegment { ident, arguments } = &segments[0];
for &SingleSegmentVariant {
name,
make_type_path,
} in Self::SINGLE_SEGMENT_VARIANTS
{
if ident == name {
break 'guess_enum_type Some(Some(TypePath {
qself: None,
path: make_type_path(ident.span(), arguments),
}));
}
}
}
if segments.len() == qself_position + 2
&& segments[qself_position + 1].arguments.is_none()
&& (path.qself.is_some()
|| segments[qself_position].ident.to_string().as_bytes()[0]
.is_ascii_uppercase())
{
let mut ty = path.clone();
ty.path.segments.pop();
ty.path.segments.pop_punct();
break 'guess_enum_type Some(Some(ty));
}
None
};
if let Some(enum_type) = enum_type {
let ty = if let Some(enum_type) = enum_type {
enum_type
} else {
if qself_position >= segments.len() {
errors.error(path, "#[hdl]: can't figure out enum's type");
return Err(());
}
let mut ty = path.clone();
ty.path.segments.pop();
ty.path.segments.pop_punct();
ty
};
let Some(variant_name) = variant_name else {
errors.error(path, "#[hdl]: can't figure out enum's variant name");
return Err(());
};
Ok(Self {
ty,
variant: Some((path, variant_name)),
})
} else {
Ok(Self {
ty: path,
variant: None,
})
}
}
}
#[derive(Copy, Clone, Debug)]
pub(crate) enum BraceOrParen {
Brace(Brace),
Paren(Paren),
}
impl BraceOrParen {
pub(crate) fn surround(self, tokens: &mut TokenStream, f: impl FnOnce(&mut TokenStream)) {
match self {
BraceOrParen::Brace(v) => v.surround(tokens, f),
BraceOrParen::Paren(v) => v.surround(tokens, f),
}
}
}
#[derive(Debug, Clone)]
pub(crate) struct StructOrEnumLiteralField {
pub(crate) attrs: Vec<Attribute>,
pub(crate) member: Member,
pub(crate) colon_token: Option<Token![:]>,
pub(crate) expr: Expr,
}
#[derive(Debug, Clone)]
pub(crate) struct StructOrEnumLiteral {
pub(crate) attrs: Vec<Attribute>,
pub(crate) path: TypePath,
pub(crate) brace_or_paren: BraceOrParen,
pub(crate) fields: Punctuated<StructOrEnumLiteralField, Token![,]>,
pub(crate) dot2_token: Option<Token![..]>,
pub(crate) rest: Option<Box<Expr>>,
}
impl StructOrEnumLiteral {
pub(crate) fn map_field_exprs(self, mut f: impl FnMut(Expr) -> Expr) -> Self {
self.map_fields(|mut field| {
field.expr = f(field.expr);
field
})
}
pub(crate) fn map_fields(
self,
f: impl FnMut(StructOrEnumLiteralField) -> StructOrEnumLiteralField,
) -> Self {
let Self {
attrs,
path,
brace_or_paren,
fields,
dot2_token,
rest,
} = self;
let fields = fields.into_pairs().map_pair_value(f).collect();
Self {
attrs,
path,
brace_or_paren,
fields,
dot2_token,
rest,
}
}
}
impl From<ExprStruct> for StructOrEnumLiteral {
fn from(value: ExprStruct) -> Self {
let ExprStruct {
attrs,
qself,
path,
brace_token,
fields,
dot2_token,
rest,
} = value;
Self {
attrs,
path: TypePath { qself, path },
brace_or_paren: BraceOrParen::Brace(brace_token),
fields: fields
.into_pairs()
.map_pair_value(
|FieldValue {
attrs,
member,
colon_token,
expr,
}| StructOrEnumLiteralField {
attrs,
member,
colon_token,
expr,
},
)
.collect(),
dot2_token,
rest,
}
}
}
fn expr_to_member(expr: &Expr) -> Option<Member> {
syn::parse2(expr.to_token_stream()).ok()
}
impl ToTokens for StructOrEnumLiteral {
fn to_tokens(&self, tokens: &mut TokenStream) {
let Self {
attrs,
path,
brace_or_paren,
fields,
dot2_token,
rest,
} = self;
tokens.append_all(attrs);
path.to_tokens(tokens);
brace_or_paren.surround(tokens, |tokens| {
match brace_or_paren {
BraceOrParen::Brace(_) => {
for (
StructOrEnumLiteralField {
attrs,
member,
mut colon_token,
expr,
},
comma,
) in fields.pairs().map(|v| v.into_tuple())
{
tokens.append_all(attrs);
if Some(member) != expr_to_member(expr).as_ref() {
colon_token = Some(<Token![:]>::default());
}
member.to_tokens(tokens);
colon_token.to_tokens(tokens);
expr.to_tokens(tokens);
comma.to_tokens(tokens);
}
}
BraceOrParen::Paren(_) => {
for (
StructOrEnumLiteralField {
attrs,
member: _,
colon_token: _,
expr,
},
comma,
) in fields.pairs().map(|v| v.into_tuple())
{
tokens.append_all(attrs);
expr.to_tokens(tokens);
comma.to_tokens(tokens);
}
}
}
if let Some(rest) = rest {
dot2_token.unwrap_or_default().to_tokens(tokens);
rest.to_tokens(tokens);
}
});
}
}
impl Visitor {
impl Visitor<'_> {
pub(crate) fn process_hdl_array(
&mut self,
hdl_attr: HdlAttr<Nothing>,
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
mut expr_array: ExprArray,
) -> Expr {
self.require_normal_module(hdl_attr);
for elem in &mut expr_array.elems {
*elem = parse_quote_spanned! {elem.span()=>
::fayalite::expr::ToExpr::to_expr(&(#elem))
};
let ExprOptions { sim } = hdl_attr.body;
let span = hdl_attr.kw.span;
if sim.is_some() {
for elem in &mut expr_array.elems {
*elem = parse_quote_spanned! {elem.span()=>
::fayalite::sim::value::ToSimValue::to_sim_value(&(#elem))
};
}
parse_quote_spanned! {span=>
::fayalite::sim::value::ToSimValue::into_sim_value(#expr_array)
}
} else {
for elem in &mut expr_array.elems {
*elem = parse_quote_spanned! {elem.span()=>
::fayalite::expr::ToExpr::to_expr(&(#elem))
};
}
parse_quote_spanned! {span=>
::fayalite::expr::ToExpr::to_expr(&#expr_array)
}
}
parse_quote! {::fayalite::expr::ToExpr::to_expr(&#expr_array)}
}
pub(crate) fn process_hdl_repeat(
&mut self,
hdl_attr: HdlAttr<Nothing>,
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
mut expr_repeat: ExprRepeat,
) -> Expr {
self.require_normal_module(hdl_attr);
let repeated_value = &expr_repeat.expr;
*expr_repeat.expr = parse_quote_spanned! {repeated_value.span()=>
::fayalite::expr::ToExpr::to_expr(&(#repeated_value))
};
parse_quote! {::fayalite::expr::ToExpr::to_expr(&#expr_repeat)}
}
pub(crate) fn process_struct_enum(
&mut self,
hdl_attr: HdlAttr<AggregateLiteralOptions>,
mut literal: StructOrEnumLiteral,
) -> Expr {
let span = hdl_attr.hdl.span;
if let Some(rest) = literal.rest.take() {
self.errors
.error(rest, "#[hdl] struct functional update syntax not supported");
}
let mut next_var = 0usize;
let mut new_var = || -> Ident {
let retval = format_ident!("__v{}", next_var, span = span);
next_var += 1;
retval
};
let infallible_var = new_var();
let retval_var = new_var();
let mut lets = vec![];
let mut build_steps = vec![];
let literal = literal.map_field_exprs(|expr| {
let field_var = new_var();
lets.push(quote_spanned! {span=>
let #field_var = ::fayalite::expr::ToExpr::to_expr(&#expr);
});
parse_quote! { #field_var }
});
let Ok(StructOrEnumPath { ty, variant }) =
StructOrEnumPath::new(&mut self.errors, literal.path.clone(), &hdl_attr.body)
else {
return parse_quote_spanned! {span=>
{}
let ExprOptions { sim } = hdl_attr.body;
let span = hdl_attr.kw.span;
if sim.is_some() {
*expr_repeat.expr = parse_quote_spanned! {repeated_value.span()=>
::fayalite::sim::value::ToSimValue::to_sim_value(&(#repeated_value))
};
};
for StructOrEnumLiteralField {
attrs: _,
member,
colon_token: _,
expr,
} in literal.fields.iter()
{
let field_fn = format_ident!("field_{}", member);
build_steps.push(quote_spanned! {span=>
let #retval_var = #retval_var.#field_fn(#expr);
});
}
let check_literal = literal.map_field_exprs(|expr| {
parse_quote_spanned! {span=>
::fayalite::expr::value_from_expr_type(#expr, #infallible_var)
}
});
let make_expr_fn = if let Some((_variant_path, variant_ident)) = &variant {
let variant_fn = format_ident!("variant_{}", variant_ident);
build_steps.push(quote_spanned! {span=>
let #retval_var = #retval_var.#variant_fn();
});
quote_spanned! {span=>
::fayalite::expr::make_enum_expr
::fayalite::sim::value::ToSimValue::into_sim_value(#expr_repeat)
}
} else {
build_steps.push(quote_spanned! {span=>
let #retval_var = #retval_var.build();
});
quote_spanned! {span=>
::fayalite::expr::make_bundle_expr
}
};
let variant_or_type =
variant.map_or_else(|| ty.clone(), |(variant_path, _variant_ident)| variant_path);
parse_quote_spanned! {span=>
{
#(#lets)*
#make_expr_fn::<#ty>(|#infallible_var| {
let #retval_var = #check_literal;
#[allow(unreachable_code)]
match #retval_var {
#variant_or_type { .. } => #retval_var,
#[allow(unreachable_patterns)]
_ => match #infallible_var {},
}
}, |#retval_var| {
#(#build_steps)*
#retval_var
})
*expr_repeat.expr = parse_quote_spanned! {repeated_value.span()=>
::fayalite::expr::ToExpr::to_expr(&(#repeated_value))
};
parse_quote_spanned! {span=>
::fayalite::expr::ToExpr::to_expr(&#expr_repeat)
}
}
}
pub(crate) fn process_hdl_struct(
&mut self,
hdl_attr: HdlAttr<AggregateLiteralOptions>,
expr_struct: ExprStruct,
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
mut expr_struct: ExprStruct,
) -> Expr {
self.require_normal_module(&hdl_attr);
self.process_struct_enum(hdl_attr, expr_struct.into())
let name_span = expr_struct.path.segments.last().unwrap().ident.span();
let ExprOptions { sim } = hdl_attr.body;
if sim.is_some() {
let ty_path = TypePath {
qself: expr_struct.qself.take(),
path: expr_struct.path,
};
expr_struct.path = parse_quote_spanned! {name_span=>
__SimValue::<#ty_path>
};
for field in &mut expr_struct.fields {
let expr = &field.expr;
field.expr = parse_quote_spanned! {field.member.span()=>
::fayalite::sim::value::ToSimValue::to_sim_value(&(#expr))
};
}
return parse_quote_spanned! {name_span=>
{
type __SimValue<T> = <T as ::fayalite::ty::Type>::SimValue;
let value: ::fayalite::sim::value::SimValue<#ty_path> = ::fayalite::sim::value::ToSimValue::into_sim_value(#expr_struct);
value
}
};
}
let builder_ident = format_ident!("__builder", span = name_span);
let empty_builder = if expr_struct.qself.is_some()
|| expr_struct
.path
.segments
.iter()
.any(|seg| !seg.arguments.is_none())
{
let ty = TypePath {
qself: expr_struct.qself,
path: expr_struct.path,
};
let builder_ty = quote_spanned! {name_span=>
<#ty as ::fayalite::bundle::BundleType>::Builder
};
quote_spanned! {name_span=>
<#builder_ty as ::fayalite::__std::default::Default>::default()
}
} else {
let path = ExprPath {
attrs: vec![],
qself: expr_struct.qself,
path: expr_struct.path,
};
quote_spanned! {name_span=>
#path::__bundle_builder()
}
};
let field_calls = Vec::from_iter(expr_struct.fields.iter().map(
|FieldValue {
attrs: _,
member,
colon_token: _,
expr,
}| {
let field_fn = format_ident!("field_{}", member);
quote_spanned! {member.span()=>
let #builder_ident = #builder_ident.#field_fn(#expr);
}
},
));
parse_quote_spanned! {name_span=>
{
let #builder_ident = #empty_builder;
#(#field_calls)*
::fayalite::expr::ToExpr::to_expr(&#builder_ident)
}
}
}
pub(crate) fn process_hdl_tuple(
&mut self,
hdl_attr: HdlAttr<Nothing>,
expr_tuple: ExprTuple,
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
mut expr_tuple: ExprTuple,
) -> Expr {
self.require_normal_module(hdl_attr);
parse_quote_spanned! {expr_tuple.span()=>
::fayalite::expr::ToExpr::to_expr(&#expr_tuple)
}
}
pub(crate) fn process_hdl_path(
&mut self,
hdl_attr: HdlAttr<Nothing>,
expr_path: ExprPath,
) -> Expr {
self.require_normal_module(hdl_attr);
parse_quote_spanned! {expr_path.span()=>
::fayalite::expr::ToExpr::to_expr(&#expr_path)
let ExprOptions { sim } = hdl_attr.body;
if sim.is_some() {
for element in &mut expr_tuple.elems {
*element = parse_quote_spanned! {element.span()=>
&(#element)
};
}
parse_quote_spanned! {expr_tuple.span()=>
::fayalite::sim::value::ToSimValue::into_sim_value(#expr_tuple)
}
} else {
parse_quote_spanned! {expr_tuple.span()=>
::fayalite::expr::ToExpr::to_expr(&#expr_tuple)
}
}
}
pub(crate) fn process_hdl_call(
&mut self,
hdl_attr: HdlAttr<AggregateLiteralOptions>,
expr_call: ExprCall,
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
mut expr_call: ExprCall,
) -> Expr {
self.require_normal_module(&hdl_attr);
let ExprCall {
attrs: mut literal_attrs,
func,
paren_token,
args,
} = expr_call;
let mut path_expr = *func;
let path = loop {
break match path_expr {
Expr::Group(ExprGroup {
attrs,
group_token: _,
expr,
}) => {
literal_attrs.extend(attrs);
path_expr = *expr;
continue;
let span = hdl_attr.kw.span;
let mut func = &mut *expr_call.func;
let EnumPath {
variant_path: _,
enum_path,
variant_name,
} = loop {
match func {
Expr::Group(ExprGroup { expr, .. }) | Expr::Paren(ExprParen { expr, .. }) => {
func = &mut **expr;
}
Expr::Path(ExprPath { attrs, qself, path }) => {
literal_attrs.extend(attrs);
TypePath { qself, path }
Expr::Path(_) => {
let Expr::Path(ExprPath { attrs, qself, path }) =
mem::replace(func, Expr::PLACEHOLDER)
else {
unreachable!();
};
match parse_enum_path(TypePath { qself, path }) {
Ok(path) => break path,
Err(path) => {
self.errors.error(&path, "unsupported enum variant path");
let TypePath { qself, path } = path;
*func = ExprPath { attrs, qself, path }.into();
return expr_call.into();
}
}
}
_ => {
self.errors.error(&path_expr, "missing tuple struct's name");
return parse_quote_spanned! {path_expr.span()=>
{}
};
self.errors.error(
&expr_call.func,
"#[hdl] function call -- function must be a possibly-parenthesized path",
);
return expr_call.into();
}
};
}
};
let fields = args
.into_pairs()
.enumerate()
.map(|(index, p)| {
let (expr, comma) = p.into_tuple();
let mut index = Index::from(index);
index.span = hdl_attr.hdl.span;
Pair::new(
StructOrEnumLiteralField {
attrs: vec![],
member: Member::Unnamed(index),
colon_token: None,
expr,
},
comma,
)
})
.collect();
self.process_struct_enum(
self.process_hdl_method_call(
hdl_attr,
StructOrEnumLiteral {
attrs: literal_attrs,
path,
brace_or_paren: BraceOrParen::Paren(paren_token),
fields,
dot2_token: None,
rest: None,
ExprMethodCall {
attrs: expr_call.attrs,
receiver: parse_quote_spanned! {span=>
<#enum_path as ::fayalite::ty::StaticType>::TYPE
},
dot_token: Token![.](span),
method: variant_name,
turbofish: None,
paren_token: expr_call.paren_token,
args: expr_call.args,
},
)
}
pub(crate) fn process_hdl_method_call(
&mut self,
hdl_attr: HdlAttr<ExprOptions, kw::hdl>,
mut expr_method_call: ExprMethodCall,
) -> Expr {
let ExprOptions { sim } = hdl_attr.body;
let span = hdl_attr.kw.span;
// remove any number of groups and up to one paren
let mut receiver = &mut *expr_method_call.receiver;
let mut has_group = false;
let receiver = loop {
match receiver {
Expr::Group(ExprGroup { expr, .. }) => {
has_group = true;
receiver = expr;
}
Expr::Paren(ExprParen { expr, .. }) => break &mut **expr,
receiver @ Expr::Path(_) => break receiver,
_ => {
if !has_group {
self.errors.error(
&expr_method_call.receiver,
"#[hdl] on a method call needs parenthesized receiver",
);
}
break &mut *expr_method_call.receiver;
}
}
};
let func = if sim.is_some() {
parse_quote_spanned! {span=>
::fayalite::enum_::enum_type_to_sim_builder
}
} else {
parse_quote_spanned! {span=>
::fayalite::enum_::assert_is_enum_type
}
};
*expr_method_call.receiver = ExprCall {
attrs: vec![],
func,
paren_token: Paren(span),
args: Punctuated::from_iter([mem::replace(receiver, Expr::PLACEHOLDER)]),
}
.into();
expr_method_call.into()
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,761 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{fold::impl_fold, kw, Errors, HdlAttr};
use proc_macro2::{Span, TokenStream};
use quote::{format_ident, quote, quote_spanned, ToTokens};
use std::collections::{BTreeMap, HashMap, HashSet};
use syn::{
fold::{fold_generics, Fold},
parse::{Parse, ParseStream},
parse_quote, parse_quote_spanned,
punctuated::Punctuated,
spanned::Spanned,
token::{Brace, Paren, Where},
Block, ConstParam, Expr, Field, Fields, FieldsNamed, FieldsUnnamed, GenericParam, Generics,
Ident, Index, ItemImpl, Lifetime, LifetimeParam, Member, Path, Token, Type, TypeParam,
TypePath, Visibility, WhereClause, WherePredicate,
};
#[derive(Clone, Debug)]
pub(crate) struct Bounds(pub(crate) Punctuated<WherePredicate, Token![,]>);
impl_fold! {
struct Bounds<>(Punctuated<WherePredicate, Token![,]>);
}
impl Parse for Bounds {
fn parse(input: ParseStream) -> syn::Result<Self> {
Ok(Bounds(Punctuated::parse_terminated(input)?))
}
}
impl From<Option<WhereClause>> for Bounds {
fn from(value: Option<WhereClause>) -> Self {
Self(value.map_or_else(Punctuated::new, |v| v.predicates))
}
}
impl ToTokens for Bounds {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.0.to_tokens(tokens)
}
}
#[derive(Debug, Clone)]
pub(crate) struct ParsedField<O> {
pub(crate) options: HdlAttr<O>,
pub(crate) vis: Visibility,
pub(crate) name: Member,
pub(crate) ty: Type,
}
impl<O> ParsedField<O> {
pub(crate) fn var_name(&self) -> Ident {
format_ident!("__v_{}", self.name)
}
}
pub(crate) fn get_field_name(
index: usize,
name: Option<Ident>,
ty_span: impl FnOnce() -> Span,
) -> Member {
match name {
Some(name) => Member::Named(name),
None => Member::Unnamed(Index {
index: index as _,
span: ty_span(),
}),
}
}
pub(crate) fn get_field_names(fields: &Fields) -> impl Iterator<Item = Member> + '_ {
fields
.iter()
.enumerate()
.map(|(index, field)| get_field_name(index, field.ident.clone(), || field.ty.span()))
}
impl<O: Parse + Default> ParsedField<O> {
pub(crate) fn parse_fields(
errors: &mut Errors,
fields: &mut Fields,
in_enum: bool,
) -> (FieldsKind, Vec<ParsedField<O>>) {
let mut unit_fields = Punctuated::new();
let (fields_kind, fields) = match fields {
Fields::Named(fields) => (FieldsKind::Named(fields.brace_token), &mut fields.named),
Fields::Unnamed(fields) => {
(FieldsKind::Unnamed(fields.paren_token), &mut fields.unnamed)
}
Fields::Unit => (FieldsKind::Unit, &mut unit_fields),
};
let fields = fields
.iter_mut()
.enumerate()
.map(|(index, field)| {
let options = errors
.unwrap_or_default(HdlAttr::parse_and_take_attr(&mut field.attrs))
.unwrap_or_default();
let name = get_field_name(index, field.ident.clone(), || field.ty.span());
if in_enum && !matches!(field.vis, Visibility::Inherited) {
errors.error(&field.vis, "field visibility not allowed in enums");
}
ParsedField {
options,
vis: field.vis.clone(),
name,
ty: field.ty.clone(),
}
})
.collect();
(fields_kind, fields)
}
}
#[derive(Copy, Clone, Debug)]
pub(crate) enum FieldsKind {
Unit,
Named(Brace),
Unnamed(Paren),
}
impl FieldsKind {
pub(crate) fn into_fields_named(
brace_token: Brace,
fields: impl IntoIterator<Item = syn::Field>,
) -> Fields {
Fields::Named(FieldsNamed {
brace_token,
named: Punctuated::from_iter(fields),
})
}
pub(crate) fn into_fields_unnamed(
paren_token: Paren,
fields: impl IntoIterator<Item = syn::Field>,
) -> Fields {
Fields::Unnamed(FieldsUnnamed {
paren_token,
unnamed: Punctuated::from_iter(fields),
})
}
pub(crate) fn into_fields(self, fields: impl IntoIterator<Item = syn::Field>) -> Fields {
match self {
FieldsKind::Unit => {
let mut fields = fields.into_iter().peekable();
let Some(first_field) = fields.peek() else {
return Fields::Unit;
};
if first_field.ident.is_some() {
Self::into_fields_named(Default::default(), fields)
} else {
Self::into_fields_unnamed(Default::default(), fields)
}
}
FieldsKind::Named(brace_token) => Self::into_fields_named(brace_token, fields),
FieldsKind::Unnamed(paren_token) => Self::into_fields_unnamed(paren_token, fields),
}
}
}
pub(crate) fn get_target(target: &Option<(kw::target, Paren, Path)>, item_ident: &Ident) -> Path {
match target {
Some((_, _, target)) => target.clone(),
None => item_ident.clone().into(),
}
}
pub(crate) struct ValueDeriveGenerics {
pub(crate) generics: Generics,
pub(crate) static_type_generics: Generics,
}
impl ValueDeriveGenerics {
pub(crate) fn get(mut generics: Generics, where_: &Option<(Where, Paren, Bounds)>) -> Self {
let mut static_type_generics = generics.clone();
if let Some((_, _, bounds)) = where_ {
generics
.make_where_clause()
.predicates
.extend(bounds.0.iter().cloned());
static_type_generics
.where_clause
.clone_from(&generics.where_clause);
} else {
let type_params = Vec::from_iter(generics.type_params().map(|v| v.ident.clone()));
let predicates = &mut generics.make_where_clause().predicates;
let static_type_predicates = &mut static_type_generics.make_where_clause().predicates;
for type_param in type_params {
predicates.push(parse_quote! {#type_param: ::fayalite::ty::Value<Type: ::fayalite::ty::Type<Value = #type_param>>});
static_type_predicates
.push(parse_quote! {#type_param: ::fayalite::ty::StaticValue});
}
}
Self {
generics,
static_type_generics,
}
}
}
pub(crate) fn derive_clone_hash_eq_partialeq_for_struct<Name: ToTokens>(
the_struct_ident: &Ident,
generics: &Generics,
field_names: &[Name],
) -> TokenStream {
let (impl_generics, type_generics, where_clause) = generics.split_for_impl();
quote! {
#[automatically_derived]
impl #impl_generics ::fayalite::__std::clone::Clone for #the_struct_ident #type_generics
#where_clause
{
fn clone(&self) -> Self {
Self {
#(#field_names: ::fayalite::__std::clone::Clone::clone(&self.#field_names),)*
}
}
}
#[automatically_derived]
impl #impl_generics ::fayalite::__std::hash::Hash for #the_struct_ident #type_generics
#where_clause
{
#[allow(unused_variables)]
fn hash<__H: ::fayalite::__std::hash::Hasher>(&self, hasher: &mut __H) {
#(::fayalite::__std::hash::Hash::hash(&self.#field_names, hasher);)*
}
}
#[automatically_derived]
impl #impl_generics ::fayalite::__std::cmp::Eq for #the_struct_ident #type_generics
#where_clause
{
}
#[automatically_derived]
impl #impl_generics ::fayalite::__std::cmp::PartialEq for #the_struct_ident #type_generics
#where_clause
{
#[allow(unused_variables)]
#[allow(clippy::nonminimal_bool)]
fn eq(&self, other: &Self) -> ::fayalite::__std::primitive::bool {
true #(&& ::fayalite::__std::cmp::PartialEq::eq(
&self.#field_names,
&other.#field_names,
))*
}
}
}
}
pub(crate) fn append_field(fields: &mut Fields, mut field: Field) -> Member {
let ident = field.ident.clone().expect("ident is supplied");
match fields {
Fields::Named(FieldsNamed { named, .. }) => {
named.push(field);
Member::Named(ident)
}
Fields::Unnamed(FieldsUnnamed { unnamed, .. }) => {
field.ident = None;
field.colon_token = None;
let index = unnamed.len();
unnamed.push(field);
Member::Unnamed(index.into())
}
Fields::Unit => {
*fields = Fields::Named(FieldsNamed {
brace_token: Default::default(),
named: Punctuated::from_iter([field]),
});
Member::Named(ident)
}
}
}
#[derive(Clone, Debug)]
pub(crate) struct BuilderField {
pub(crate) names: HashSet<Member>,
pub(crate) mapped_value: Expr,
pub(crate) mapped_type: Type,
pub(crate) where_clause: Option<WhereClause>,
pub(crate) builder_field_name: Ident,
pub(crate) type_param: Ident,
}
#[derive(Debug)]
pub(crate) struct Builder {
struct_name: Ident,
vis: Visibility,
fields: BTreeMap<String, BuilderField>,
}
#[derive(Debug)]
pub(crate) struct BuilderWithFields {
struct_name: Ident,
vis: Visibility,
phantom_type_param: Ident,
phantom_type_field: Ident,
fields: Vec<(String, BuilderField)>,
}
impl Builder {
pub(crate) fn new(struct_name: Ident, vis: Visibility) -> Self {
Self {
struct_name,
vis,
fields: BTreeMap::new(),
}
}
pub(crate) fn insert_field(
&mut self,
name: Member,
map_value: impl FnOnce(&Ident) -> Expr,
map_type: impl FnOnce(&Ident) -> Type,
where_clause: impl FnOnce(&Ident) -> Option<WhereClause>,
) {
self.fields
.entry(name.to_token_stream().to_string())
.or_insert_with_key(|name| {
let builder_field_name =
format_ident!("field_{}", name, span = self.struct_name.span());
let type_param = format_ident!("__T_{}", name, span = self.struct_name.span());
BuilderField {
names: HashSet::new(),
mapped_value: map_value(&builder_field_name),
mapped_type: map_type(&type_param),
where_clause: where_clause(&type_param),
builder_field_name,
type_param,
}
})
.names
.insert(name);
}
pub(crate) fn finish_filling_in_fields(self) -> BuilderWithFields {
let Self {
struct_name,
vis,
fields,
} = self;
let fields = Vec::from_iter(fields);
BuilderWithFields {
phantom_type_param: Ident::new("__Phantom", struct_name.span()),
phantom_type_field: Ident::new("__phantom", struct_name.span()),
struct_name,
vis,
fields,
}
}
}
impl BuilderWithFields {
pub(crate) fn get_field(&self, name: &Member) -> Option<(usize, &BuilderField)> {
let index = self
.fields
.binary_search_by_key(&&*name.to_token_stream().to_string(), |v| &*v.0)
.ok()?;
Some((index, &self.fields[index].1))
}
pub(crate) fn ty(
&self,
specified_fields: impl IntoIterator<Item = (Member, Type)>,
phantom_type: Option<&Type>,
other_fields_are_any_type: bool,
) -> TypePath {
let Self {
struct_name,
vis: _,
phantom_type_param,
phantom_type_field: _,
fields,
} = self;
let span = struct_name.span();
let mut arguments =
Vec::from_iter(fields.iter().map(|(_, BuilderField { type_param, .. })| {
if other_fields_are_any_type {
parse_quote_spanned! {span=>
#type_param
}
} else {
parse_quote_spanned! {span=>
()
}
}
}));
for (name, ty) in specified_fields {
let Some((index, _)) = self.get_field(&name) else {
panic!("field not found: {}", name.to_token_stream());
};
arguments[index] = ty;
}
let phantom_type_param = phantom_type.is_none().then_some(phantom_type_param);
parse_quote_spanned! {span=>
#struct_name::<#phantom_type_param #phantom_type #(, #arguments)*>
}
}
pub(crate) fn append_generics(
&self,
specified_fields: impl IntoIterator<Item = Member>,
has_phantom_type_param: bool,
other_fields_are_any_type: bool,
generics: &mut Generics,
) {
let Self {
struct_name: _,
vis: _,
phantom_type_param,
phantom_type_field: _,
fields,
} = self;
if has_phantom_type_param {
generics.params.push(GenericParam::from(TypeParam::from(
phantom_type_param.clone(),
)));
}
if !other_fields_are_any_type {
return;
}
let mut type_params = Vec::from_iter(
fields
.iter()
.map(|(_, BuilderField { type_param, .. })| Some(type_param)),
);
for name in specified_fields {
let Some((index, _)) = self.get_field(&name) else {
panic!("field not found: {}", name.to_token_stream());
};
type_params[index] = None;
}
generics.params.extend(
type_params
.into_iter()
.filter_map(|v| Some(GenericParam::from(TypeParam::from(v?.clone())))),
);
}
pub(crate) fn make_build_method(
&self,
build_fn_name: &Ident,
specified_fields: impl IntoIterator<Item = (Member, Type)>,
generics: &Generics,
phantom_type: &Type,
return_ty: &Type,
mut body: Block,
) -> ItemImpl {
let Self {
struct_name,
vis,
phantom_type_param: _,
phantom_type_field,
fields,
} = self;
let span = struct_name.span();
let field_names = Vec::from_iter(fields.iter().map(|v| &v.1.builder_field_name));
let (impl_generics, _type_generics, where_clause) = generics.split_for_impl();
let empty_arg = parse_quote_spanned! {span=>
()
};
let mut ty_arguments = vec![empty_arg; fields.len()];
let empty_field_pat = quote_spanned! {span=>
: _
};
let mut field_pats = vec![Some(empty_field_pat); fields.len()];
for (name, ty) in specified_fields {
let Some((index, _)) = self.get_field(&name) else {
panic!("field not found: {}", name.to_token_stream());
};
ty_arguments[index] = ty;
field_pats[index] = None;
}
body.stmts.insert(
0,
parse_quote_spanned! {span=>
let Self {
#(#field_names #field_pats,)*
#phantom_type_field: _,
} = self;
},
);
parse_quote_spanned! {span=>
#[automatically_derived]
impl #impl_generics #struct_name<#phantom_type #(, #ty_arguments)*>
#where_clause
{
#[allow(non_snake_case, dead_code)]
#vis fn #build_fn_name(self) -> #return_ty
#body
}
}
}
}
impl ToTokens for BuilderWithFields {
fn to_tokens(&self, tokens: &mut TokenStream) {
let Self {
struct_name,
vis,
phantom_type_param,
phantom_type_field,
fields,
} = self;
let span = struct_name.span();
let mut any_generics = Generics::default();
self.append_generics([], true, true, &mut any_generics);
let empty_ty = self.ty([], None, false);
let field_names = Vec::from_iter(fields.iter().map(|v| &v.1.builder_field_name));
let field_type_params = Vec::from_iter(fields.iter().map(|v| &v.1.type_param));
quote_spanned! {span=>
#[allow(non_camel_case_types)]
#[non_exhaustive]
#vis struct #struct_name #any_generics {
#(#field_names: #field_type_params,)*
#phantom_type_field: ::fayalite::__std::marker::PhantomData<#phantom_type_param>,
}
#[automatically_derived]
impl<#phantom_type_param> #empty_ty {
fn new() -> Self {
Self {
#(#field_names: (),)*
#phantom_type_field: ::fayalite::__std::marker::PhantomData,
}
}
}
}
.to_tokens(tokens);
for (field_index, (_, field)) in self.fields.iter().enumerate() {
let initial_fields = &fields[..field_index];
let final_fields = &fields[field_index..][1..];
let initial_type_params =
Vec::from_iter(initial_fields.iter().map(|v| &v.1.type_param));
let final_type_params = Vec::from_iter(final_fields.iter().map(|v| &v.1.type_param));
let initial_field_names =
Vec::from_iter(initial_fields.iter().map(|v| &v.1.builder_field_name));
let final_field_names =
Vec::from_iter(final_fields.iter().map(|v| &v.1.builder_field_name));
let BuilderField {
names: _,
mapped_value,
mapped_type,
where_clause,
builder_field_name,
type_param,
} = field;
quote_spanned! {span=>
#[automatically_derived]
#[allow(non_camel_case_types, dead_code)]
impl<#phantom_type_param #(, #initial_type_params)* #(, #final_type_params)*>
#struct_name<
#phantom_type_param,
#(#initial_type_params,)*
(), #(#final_type_params,)*
>
{
#vis fn #builder_field_name<#type_param>(
self,
#builder_field_name: #type_param,
) -> #struct_name<
#phantom_type_param,
#(#initial_type_params,)*
#mapped_type,
#(#final_type_params,)*
>
#where_clause
{
let Self {
#(#initial_field_names,)*
#builder_field_name: (),
#(#final_field_names,)*
#phantom_type_field: _,
} = self;
let #builder_field_name = #mapped_value;
#struct_name {
#(#field_names,)*
#phantom_type_field: ::fayalite::__std::marker::PhantomData,
}
}
}
}
.to_tokens(tokens);
}
}
}
pub(crate) struct MapIdents {
pub(crate) map: HashMap<Ident, Ident>,
}
impl Fold for &MapIdents {
fn fold_ident(&mut self, i: Ident) -> Ident {
self.map.get(&i).cloned().unwrap_or(i)
}
}
pub(crate) struct DupGenerics<M> {
pub(crate) combined: Generics,
pub(crate) maps: M,
}
pub(crate) fn merge_punctuated<T, P: Default>(
target: &mut Punctuated<T, P>,
source: Punctuated<T, P>,
make_punct: impl FnOnce() -> P,
) {
if source.is_empty() {
return;
}
if target.is_empty() {
*target = source;
return;
}
if !target.trailing_punct() {
target.push_punct(make_punct());
}
target.extend(source.into_pairs());
}
pub(crate) fn merge_generics(target: &mut Generics, source: Generics) {
let Generics {
lt_token,
params,
gt_token,
where_clause,
} = source;
let span = lt_token.map(|v| v.span).unwrap_or_else(|| params.span());
target.lt_token = target.lt_token.or(lt_token);
merge_punctuated(&mut target.params, params, || Token![,](span));
target.gt_token = target.gt_token.or(gt_token);
if let Some(where_clause) = where_clause {
if let Some(target_where_clause) = &mut target.where_clause {
let WhereClause {
where_token,
predicates,
} = where_clause;
let span = where_token.span;
target_where_clause.where_token = where_token;
merge_punctuated(&mut target_where_clause.predicates, predicates, || {
Token![,](span)
});
} else {
target.where_clause = Some(where_clause);
}
}
}
impl DupGenerics<Vec<MapIdents>> {
pub(crate) fn new_dyn(generics: &Generics, count: usize) -> Self {
let mut maps = Vec::from_iter((0..count).map(|_| MapIdents {
map: HashMap::new(),
}));
for param in &generics.params {
let (GenericParam::Lifetime(LifetimeParam {
lifetime: Lifetime { ident, .. },
..
})
| GenericParam::Type(TypeParam { ident, .. })
| GenericParam::Const(ConstParam { ident, .. })) = param;
for (i, map_idents) in maps.iter_mut().enumerate() {
map_idents
.map
.insert(ident.clone(), format_ident!("__{}_{}", ident, i));
}
}
let mut combined = Generics::default();
for map_idents in maps.iter() {
merge_generics(
&mut combined,
fold_generics(&mut { map_idents }, generics.clone()),
);
}
Self { combined, maps }
}
}
impl<const COUNT: usize> DupGenerics<[MapIdents; COUNT]> {
pub(crate) fn new(generics: &Generics) -> Self {
let DupGenerics { combined, maps } = DupGenerics::new_dyn(generics, COUNT);
Self {
combined,
maps: maps.try_into().ok().unwrap(),
}
}
}
pub(crate) fn add_where_predicate(
target: &mut Generics,
span: Span,
where_predicate: WherePredicate,
) {
let WhereClause {
where_token: _,
predicates,
} = target.where_clause.get_or_insert_with(|| WhereClause {
where_token: Token![where](span),
predicates: Punctuated::new(),
});
if !predicates.empty_or_trailing() {
predicates.push_punct(Token![,](span));
}
predicates.push_value(where_predicate);
}
pub(crate) fn make_connect_impl(
connect_inexact: Option<(crate::kw::connect_inexact,)>,
generics: &Generics,
ty_ident: &Ident,
field_types: impl IntoIterator<Item = Type>,
) -> TokenStream {
let span = ty_ident.span();
let impl_generics;
let combined_generics;
let where_clause;
let lhs_generics;
let lhs_type_generics;
let rhs_generics;
let rhs_type_generics;
if connect_inexact.is_some() {
let DupGenerics {
mut combined,
maps: [lhs_map, rhs_map],
} = DupGenerics::new(generics);
for field_type in field_types {
let lhs_type = (&lhs_map).fold_type(field_type.clone());
let rhs_type = (&rhs_map).fold_type(field_type);
add_where_predicate(
&mut combined,
span,
parse_quote_spanned! {span=>
#lhs_type: ::fayalite::ty::Connect<#rhs_type>
},
);
}
combined_generics = combined;
(impl_generics, _, where_clause) = combined_generics.split_for_impl();
lhs_generics = (&lhs_map).fold_generics(generics.clone());
(_, lhs_type_generics, _) = lhs_generics.split_for_impl();
rhs_generics = (&rhs_map).fold_generics(generics.clone());
(_, rhs_type_generics, _) = rhs_generics.split_for_impl();
} else {
let mut generics = generics.clone();
for field_type in field_types {
add_where_predicate(
&mut generics,
span,
parse_quote_spanned! {span=>
#field_type: ::fayalite::ty::Connect<#field_type>
},
);
}
combined_generics = generics;
(impl_generics, lhs_type_generics, where_clause) = combined_generics.split_for_impl();
rhs_type_generics = lhs_type_generics.clone();
}
quote_spanned! {span=>
#[automatically_derived]
#[allow(non_camel_case_types)]
impl #impl_generics ::fayalite::ty::Connect<#ty_ident #rhs_type_generics>
for #ty_ident #lhs_type_generics
#where_clause
{
}
}
}

View file

@ -1,975 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
value_derive_common::{
append_field, derive_clone_hash_eq_partialeq_for_struct, get_field_names, get_target,
make_connect_impl, Bounds, Builder, FieldsKind, ParsedField, ValueDeriveGenerics,
},
value_derive_struct::{self, ParsedStruct, ParsedStructNames, StructOptions},
Errors, HdlAttr,
};
use proc_macro2::TokenStream;
use quote::{format_ident, quote, quote_spanned, ToTokens};
use syn::{
parse_quote, parse_quote_spanned, punctuated::Punctuated, spanned::Spanned, token::Brace,
Field, FieldMutability, Fields, FieldsNamed, Generics, Ident, Index, ItemEnum, ItemStruct,
Member, Path, Token, Type, Variant, Visibility,
};
crate::options! {
#[options = EnumOptions]
enum EnumOption {
OutlineGenerated(outline_generated),
ConnectInexact(connect_inexact),
Bounds(where_, Bounds),
Target(target, Path),
}
}
crate::options! {
#[options = VariantOptions]
enum VariantOption {}
}
crate::options! {
#[options = FieldOptions]
enum FieldOption {}
}
enum VariantValue {
None,
Direct {
value_type: Type,
},
Struct {
value_struct: ItemStruct,
parsed_struct: ParsedStruct,
},
}
impl VariantValue {
fn is_none(&self) -> bool {
matches!(self, Self::None)
}
fn value_ty(&self) -> Option<Type> {
match self {
VariantValue::None => None,
VariantValue::Direct { value_type } => Some(value_type.clone()),
VariantValue::Struct { value_struct, .. } => {
let (_, type_generics, _) = value_struct.generics.split_for_impl();
let ident = &value_struct.ident;
Some(parse_quote! { #ident #type_generics })
}
}
}
}
struct ParsedVariant {
options: HdlAttr<VariantOptions>,
ident: Ident,
fields_kind: FieldsKind,
fields: Vec<ParsedField<FieldOptions>>,
value: VariantValue,
}
impl ParsedVariant {
fn parse(
errors: &mut Errors,
variant: Variant,
enum_options: &EnumOptions,
enum_vis: &Visibility,
enum_ident: &Ident,
enum_generics: &Generics,
) -> Self {
let target = get_target(&enum_options.target, enum_ident);
let Variant {
mut attrs,
ident,
fields,
discriminant,
} = variant;
if let Some((eq, _)) = discriminant {
errors.error(eq, "#[derive(Value)]: discriminants not allowed");
}
let variant_options = errors
.unwrap_or_default(HdlAttr::parse_and_take_attr(&mut attrs))
.unwrap_or_default();
let (fields_kind, parsed_fields) =
ParsedField::parse_fields(errors, &mut fields.clone(), true);
let value = match (&fields_kind, &*parsed_fields) {
(FieldsKind::Unit, _) => VariantValue::None,
(
FieldsKind::Unnamed(_),
[ParsedField {
options,
vis: _,
name: Member::Unnamed(Index { index: 0, span: _ }),
ty,
}],
) => {
let FieldOptions {} = options.body;
VariantValue::Direct {
value_type: ty.clone(),
}
}
_ => {
let variant_value_struct_ident =
format_ident!("__{}__{}", enum_ident, ident, span = ident.span());
let variant_type_struct_ident =
format_ident!("__{}__{}__Type", enum_ident, ident, span = ident.span());
let mut value_struct_fields = fields.clone();
let (_, type_generics, _) = enum_generics.split_for_impl();
append_field(
&mut value_struct_fields,
Field {
attrs: vec![HdlAttr::from(value_derive_struct::FieldOptions {
flip: None,
skip: Some(Default::default()),
})
.to_attr()],
vis: enum_vis.clone(),
mutability: FieldMutability::None,
ident: Some(Ident::new("__phantom", ident.span())),
colon_token: None,
ty: parse_quote_spanned! {ident.span()=>
::fayalite::__std::marker::PhantomData<#target #type_generics>
},
},
);
let (value_struct_fields_kind, value_struct_parsed_fields) =
ParsedField::parse_fields(errors, &mut value_struct_fields, false);
let value_struct = ItemStruct {
attrs: vec![parse_quote! { #[allow(non_camel_case_types)] }],
vis: enum_vis.clone(),
struct_token: Token![struct](ident.span()),
ident: variant_value_struct_ident.clone(),
generics: enum_generics.clone(),
fields: value_struct_fields,
semi_token: None,
};
VariantValue::Struct {
value_struct,
parsed_struct: ParsedStruct {
options: StructOptions {
outline_generated: None,
static_: Some(Default::default()),
where_: Some((
Default::default(),
Default::default(),
ValueDeriveGenerics::get(
enum_generics.clone(),
&enum_options.where_,
)
.static_type_generics
.where_clause
.into(),
)),
target: None,
connect_inexact: enum_options.connect_inexact,
}
.into(),
vis: enum_vis.clone(),
struct_token: Default::default(),
generics: enum_generics.clone(),
fields_kind: value_struct_fields_kind,
fields: value_struct_parsed_fields,
semi_token: None, // it will fill in the semicolon if needed
skip_check_fields: true,
names: ParsedStructNames {
ident: variant_value_struct_ident.clone(),
type_struct_debug_ident: Some(format!("{enum_ident}::{ident}::Type")),
type_struct_ident: variant_type_struct_ident,
match_variant_ident: None,
builder_struct_ident: None,
mask_match_variant_ident: None,
mask_type_ident: None,
mask_type_debug_ident: Some(format!(
"AsMask<{enum_ident}::{ident}>::Type"
)),
mask_value_ident: None,
mask_value_debug_ident: Some(format!("AsMask<{enum_ident}::{ident}>")),
mask_builder_struct_ident: None,
},
},
}
}
};
ParsedVariant {
options: variant_options,
ident,
fields_kind,
fields: parsed_fields,
value,
}
}
}
struct ParsedEnum {
options: HdlAttr<EnumOptions>,
vis: Visibility,
enum_token: Token![enum],
ident: Ident,
generics: Generics,
brace_token: Brace,
variants: Vec<ParsedVariant>,
}
impl ParsedEnum {
fn parse(item: ItemEnum) -> syn::Result<Self> {
let ItemEnum {
mut attrs,
vis,
enum_token,
ident,
generics,
brace_token,
variants,
} = item;
let mut errors = Errors::new();
let enum_options = errors
.unwrap_or_default(HdlAttr::parse_and_take_attr(&mut attrs))
.unwrap_or_default();
let variants = variants
.into_iter()
.map(|variant| {
ParsedVariant::parse(
&mut errors,
variant,
&enum_options.body,
&vis,
&ident,
&generics,
)
})
.collect();
errors.finish()?;
Ok(ParsedEnum {
options: enum_options,
vis,
enum_token,
ident,
generics,
brace_token,
variants,
})
}
}
impl ToTokens for ParsedEnum {
fn to_tokens(&self, tokens: &mut TokenStream) {
let Self {
options,
vis,
enum_token,
ident: enum_ident,
generics: enum_generics,
brace_token,
variants,
} = self;
let EnumOptions {
outline_generated: _,
connect_inexact,
where_,
target,
} = &options.body;
let target = get_target(target, enum_ident);
let ValueDeriveGenerics {
generics: _,
static_type_generics,
} = ValueDeriveGenerics::get(enum_generics.clone(), where_);
let (static_type_impl_generics, static_type_type_generics, static_type_where_clause) =
static_type_generics.split_for_impl();
let type_struct_ident = format_ident!("__{}__Type", enum_ident);
let mut field_checks = vec![];
let mut make_type_struct_variant_type = |variant: &ParsedVariant| {
let VariantOptions {} = variant.options.body;
let (value_struct, parsed_struct) = match &variant.value {
VariantValue::None => {
return None;
}
VariantValue::Direct { value_type } => {
field_checks.push(quote_spanned! {value_type.span()=>
__check_field::<#value_type>();
});
return Some(parse_quote! { <#value_type as ::fayalite::expr::ToExpr>::Type });
}
VariantValue::Struct {
value_struct,
parsed_struct,
} => (value_struct, parsed_struct),
};
value_struct.to_tokens(tokens);
parsed_struct.to_tokens(tokens);
let mut field_names = Vec::from_iter(get_field_names(&value_struct.fields));
derive_clone_hash_eq_partialeq_for_struct(
&value_struct.ident,
&static_type_generics,
&field_names,
)
.to_tokens(tokens);
field_names = Vec::from_iter(
field_names
.into_iter()
.zip(parsed_struct.fields.iter())
.filter_map(|(member, field)| {
field.options.body.skip.is_none().then_some(member)
}),
);
let field_name_strs =
Vec::from_iter(field_names.iter().map(|v| v.to_token_stream().to_string()));
let debug_ident = format!("{enum_ident}::{}", variant.ident);
let debug_body = match variant.fields_kind {
FieldsKind::Unit => quote! {
f.debug_struct(#debug_ident).finish()
},
FieldsKind::Named(_) => quote! {
f.debug_struct(#debug_ident)
#(.field(#field_name_strs, &self.#field_names))*
.finish()
},
FieldsKind::Unnamed(_) => quote! {
f.debug_tuple(#debug_ident)#(.field(&self.#field_names))*.finish()
},
};
let value_struct_ident = &value_struct.ident;
quote! {
#[automatically_derived]
impl #static_type_impl_generics ::fayalite::__std::fmt::Debug
for #value_struct_ident #static_type_type_generics
#static_type_where_clause
{
fn fmt(
&self,
f: &mut ::fayalite::__std::fmt::Formatter<'_>,
) -> ::fayalite::__std::fmt::Result {
#debug_body
}
}
}
.to_tokens(tokens);
Some(parse_quote! {
<
#value_struct_ident #static_type_type_generics
as ::fayalite::expr::ToExpr
>::Type
})
};
let type_struct_variants = Punctuated::from_iter(variants.iter().filter_map(|variant| {
let VariantOptions {} = variant.options.body;
Some(Field {
attrs: vec![],
vis: vis.clone(),
mutability: FieldMutability::None,
ident: Some(variant.ident.clone()),
colon_token: None, // it will fill in the colon if needed
ty: make_type_struct_variant_type(variant)?,
})
}));
let type_struct = ItemStruct {
attrs: vec![
parse_quote! {#[allow(non_camel_case_types)]},
parse_quote! {#[allow(non_snake_case)]},
],
vis: vis.clone(),
struct_token: Token![struct](enum_token.span),
ident: type_struct_ident,
generics: static_type_generics.clone(),
fields: Fields::Named(FieldsNamed {
brace_token: *brace_token,
named: type_struct_variants,
}),
semi_token: None,
};
let type_struct_ident = &type_struct.ident;
let type_struct_debug_ident = format!("{enum_ident}::Type");
type_struct.to_tokens(tokens);
let non_empty_variant_names = Vec::from_iter(
variants
.iter()
.filter(|v| !v.value.is_none())
.map(|v| v.ident.clone()),
);
let non_empty_variant_name_strs =
Vec::from_iter(non_empty_variant_names.iter().map(|v| v.to_string()));
let debug_type_body = quote! {
f.debug_struct(#type_struct_debug_ident)
#(.field(#non_empty_variant_name_strs, &self.#non_empty_variant_names))*
.finish()
};
derive_clone_hash_eq_partialeq_for_struct(
type_struct_ident,
&static_type_generics,
&non_empty_variant_names,
)
.to_tokens(tokens);
let variant_names = Vec::from_iter(variants.iter().map(|v| &v.ident));
let variant_name_strs = Vec::from_iter(variant_names.iter().map(|v| v.to_string()));
let (variant_field_pats, variant_to_canonical_values): (Vec<_>, Vec<_>) = variants
.iter()
.map(|v| {
let field_names: Vec<_> = v.fields.iter().map(|field| &field.name).collect();
let var_names: Vec<_> = v.fields.iter().map(|field| field.var_name()).collect();
let field_pats = quote! {
#(#field_names: #var_names,)*
};
let to_canonical_value = match &v.value {
VariantValue::None => quote! { ::fayalite::__std::option::Option::None },
VariantValue::Direct { .. } => {
debug_assert_eq!(var_names.len(), 1);
quote! {
::fayalite::__std::option::Option::Some(
::fayalite::ty::DynValueTrait::to_canonical_dyn(#(#var_names)*),
)
}
}
VariantValue::Struct {
value_struct,
parsed_struct,
} => {
let value_struct_ident = &value_struct.ident;
let phantom_field_name = &parsed_struct
.fields
.last()
.expect("missing phantom field")
.name;
let type_generics = static_type_type_generics.as_turbofish();
quote! {
::fayalite::__std::option::Option::Some(
::fayalite::ty::DynValueTrait::to_canonical_dyn(
&#value_struct_ident #type_generics {
#(#field_names:
::fayalite::__std::clone::Clone::clone(#var_names),)*
#phantom_field_name: ::fayalite::__std::marker::PhantomData,
},
),
)
}
}
};
(field_pats, to_canonical_value)
})
.unzip();
let mut match_enum_variants = Punctuated::new();
let mut match_enum_debug_arms = vec![];
let mut match_enum_arms = vec![];
let mut variant_vars = vec![];
let mut from_canonical_type_variant_lets = vec![];
let mut non_empty_variant_vars = vec![];
let mut enum_type_variants = vec![];
let mut enum_type_variants_hint = vec![];
let match_enum_ident = format_ident!("__{}__MatchEnum", enum_ident);
let mut builder = Builder::new(format_ident!("__{}__Builder", enum_ident), vis.clone());
for variant in variants.iter() {
for field in variant.fields.iter() {
builder.insert_field(
field.name.clone(),
|v| {
parse_quote_spanned! {v.span()=>
::fayalite::expr::ToExpr::to_expr(&#v)
}
},
|t| {
parse_quote_spanned! {t.span()=>
::fayalite::expr::Expr<<
<#t as ::fayalite::expr::ToExpr>::Type
as ::fayalite::ty::Type
>::Value>
}
},
|t| {
parse_quote_spanned! {t.span()=>
where
#t: ::fayalite::expr::ToExpr,
}
},
);
}
}
let builder = builder.finish_filling_in_fields();
builder.to_tokens(tokens);
for (variant_index, variant) in variants.iter().enumerate() {
let variant_var = format_ident!("__v_{}", variant.ident);
let variant_name = &variant.ident;
let variant_name_str = variant.ident.to_string();
match_enum_variants.push(Variant {
attrs: vec![],
ident: variant.ident.clone(),
fields: variant.fields_kind.into_fields(variant.fields.iter().map(
|ParsedField {
options,
vis,
name,
ty,
}| {
let FieldOptions {} = options.body;
Field {
attrs: vec![],
vis: vis.clone(),
mutability: FieldMutability::None,
ident: if let Member::Named(name) = name {
Some(name.clone())
} else {
None
},
colon_token: None,
ty: parse_quote! { ::fayalite::expr::Expr<#ty> },
}
},
)),
discriminant: None,
});
let match_enum_field_names = Vec::from_iter(variant.fields.iter().map(
|ParsedField {
options,
vis: _,
name,
ty: _,
}| {
let FieldOptions {} = options.body;
name
},
));
let match_enum_field_name_strs = Vec::from_iter(variant.fields.iter().map(
|ParsedField {
options,
vis: _,
name,
ty: _,
}| {
let FieldOptions {} = options.body;
name.to_token_stream().to_string()
},
));
let match_enum_debug_vars = Vec::from_iter(variant.fields.iter().map(
|ParsedField {
options,
vis: _,
name,
ty: _,
}| {
let FieldOptions {} = options.body;
format_ident!("__v_{}", name)
},
));
match_enum_debug_arms.push(match variant.fields_kind {
FieldsKind::Unit | FieldsKind::Named(_) => quote! {
Self::#variant_name {
#(#match_enum_field_names: ref #match_enum_debug_vars,)*
} => f.debug_struct(#variant_name_str)
#(.field(#match_enum_field_name_strs, #match_enum_debug_vars))*
.finish(),
},
FieldsKind::Unnamed(_) => quote! {
Self::#variant_name(
#(ref #match_enum_debug_vars,)*
) => f.debug_tuple(#variant_name_str)
#(.field(#match_enum_debug_vars))*
.finish(),
},
});
if let Some(value_ty) = variant.value.value_ty() {
from_canonical_type_variant_lets.push(quote! {
let #variant_var =
#variant_var.from_canonical_type_helper_has_value(#variant_name_str);
});
non_empty_variant_vars.push(variant_var.clone());
enum_type_variants.push(quote! {
::fayalite::enum_::VariantType {
name: ::fayalite::intern::Intern::intern(#variant_name_str),
ty: ::fayalite::__std::option::Option::Some(
::fayalite::ty::DynType::canonical_dyn(&self.#variant_name),
),
}
});
enum_type_variants_hint.push(quote! {
::fayalite::enum_::VariantType {
name: ::fayalite::intern::Intern::intern(#variant_name_str),
ty: ::fayalite::__std::option::Option::Some(
::fayalite::bundle::TypeHint::<
<#value_ty as ::fayalite::expr::ToExpr>::Type,
>::intern_dyn(),
),
}
});
} else {
from_canonical_type_variant_lets.push(quote! {
#variant_var.from_canonical_type_helper_no_value(#variant_name_str);
});
enum_type_variants.push(quote! {
::fayalite::enum_::VariantType {
name: ::fayalite::intern::Intern::intern(#variant_name_str),
ty: ::fayalite::__std::option::Option::None,
}
});
enum_type_variants_hint.push(quote! {
::fayalite::enum_::VariantType {
name: ::fayalite::intern::Intern::intern(#variant_name_str),
ty: ::fayalite::__std::option::Option::None,
}
});
}
variant_vars.push(variant_var);
match_enum_arms.push(match &variant.value {
VariantValue::None => quote! {
#variant_index => #match_enum_ident::#variant_name,
},
VariantValue::Direct { value_type } => quote! {
#variant_index => #match_enum_ident::#variant_name {
#(#match_enum_field_names)*: ::fayalite::expr::ToExpr::to_expr(
&__variant_access.downcast_unchecked::<
<#value_type as ::fayalite::expr::ToExpr>::Type>(),
),
},
},
VariantValue::Struct {
value_struct: ItemStruct { ident, .. },
..
} => quote! {
#variant_index => {
let __variant_access = ::fayalite::expr::ToExpr::to_expr(
&__variant_access.downcast_unchecked::<<
#ident #static_type_type_generics
as ::fayalite::expr::ToExpr
>::Type>(),
);
#match_enum_ident::#variant_name {
#(#match_enum_field_names:
(*__variant_access).#match_enum_field_names,)*
}
},
},
});
let builder_field_and_types = Vec::from_iter(variant.fields.iter().map(
|ParsedField {
options,
vis: _,
name,
ty,
}| {
let FieldOptions {} = options.body;
(name, ty)
},
));
let builder_field_vars = Vec::from_iter(
builder_field_and_types
.iter()
.map(|(name, _)| &builder.get_field(name).unwrap().1.builder_field_name),
);
let build_body = match &variant.value {
VariantValue::None => parse_quote! {
{
::fayalite::expr::ToExpr::to_expr(
&::fayalite::expr::ops::EnumLiteral::<
#type_struct_ident #static_type_type_generics
>::new_unchecked(
::fayalite::__std::option::Option::None,
#variant_index,
::fayalite::ty::StaticType::static_type(),
),
)
}
},
VariantValue::Direct { value_type: _ } => parse_quote! {
{
::fayalite::expr::ToExpr::to_expr(
&::fayalite::expr::ops::EnumLiteral::<
#type_struct_ident #static_type_type_generics
>::new_unchecked(
::fayalite::__std::option::Option::Some(
#(#builder_field_vars)*.to_canonical_dyn(),
),
#variant_index,
::fayalite::ty::StaticType::static_type(),
),
)
}
},
VariantValue::Struct {
parsed_struct:
ParsedStruct {
names:
ParsedStructNames {
type_struct_ident: field_type_struct_ident,
..
},
..
},
..
} => parse_quote! {
{
let __builder = <
#field_type_struct_ident #static_type_type_generics
as ::fayalite::bundle::BundleType
>::builder();
#(let __builder = __builder.#builder_field_vars(#builder_field_vars);)*
::fayalite::expr::ToExpr::to_expr(
&::fayalite::expr::ops::EnumLiteral::<
#type_struct_ident #static_type_type_generics
>::new_unchecked(
::fayalite::__std::option::Option::Some(
__builder.build().to_canonical_dyn(),
),
#variant_index,
::fayalite::ty::StaticType::static_type(),
),
)
}
},
};
builder
.make_build_method(
&format_ident!("variant_{}", variant_name),
variant.fields.iter().map(
|ParsedField {
options,
vis: _,
name,
ty,
}| {
let FieldOptions {} = options.body;
(name.clone(), parse_quote! { ::fayalite::expr::Expr<#ty> })
},
),
&static_type_generics,
&parse_quote! {#type_struct_ident #static_type_type_generics},
&parse_quote! { ::fayalite::expr::Expr<#target #static_type_type_generics> },
build_body,
)
.to_tokens(tokens);
}
let match_enum = ItemEnum {
attrs: vec![parse_quote! {#[allow(non_camel_case_types)]}],
vis: vis.clone(),
enum_token: *enum_token,
ident: match_enum_ident,
generics: static_type_generics.clone(),
brace_token: *brace_token,
variants: match_enum_variants,
};
let match_enum_ident = &match_enum.ident;
match_enum.to_tokens(tokens);
make_connect_impl(
*connect_inexact,
&static_type_generics,
type_struct_ident,
variants.iter().flat_map(|variant| {
variant.fields.iter().map(|field| {
let ty = &field.ty;
parse_quote_spanned! {field.name.span()=>
<#ty as ::fayalite::expr::ToExpr>::Type
}
})
}),
)
.to_tokens(tokens);
let variant_count = variants.len();
let empty_builder_ty = builder.ty([], Some(&parse_quote! { Self }), false);
quote! {
#[automatically_derived]
impl #static_type_impl_generics ::fayalite::__std::fmt::Debug
for #match_enum_ident #static_type_type_generics
#static_type_where_clause
{
fn fmt(
&self,
f: &mut ::fayalite::__std::fmt::Formatter<'_>,
) -> ::fayalite::__std::fmt::Result {
match *self {
#(#match_enum_debug_arms)*
}
}
}
#[automatically_derived]
impl #static_type_impl_generics ::fayalite::ty::StaticType
for #type_struct_ident #static_type_type_generics
#static_type_where_clause
{
fn static_type() -> Self {
Self {
#(#non_empty_variant_names: ::fayalite::ty::StaticType::static_type(),)*
}
}
}
fn __check_field<T: ::fayalite::ty::Value>()
where
<T as ::fayalite::expr::ToExpr>::Type: ::fayalite::ty::Type<Value = T>,
{}
fn __check_fields #static_type_impl_generics(_: #target #static_type_type_generics)
#static_type_where_clause
{
#(#field_checks)*
}
#[automatically_derived]
impl #static_type_impl_generics ::fayalite::__std::fmt::Debug
for #type_struct_ident #static_type_type_generics
#static_type_where_clause
{
fn fmt(
&self,
f: &mut ::fayalite::__std::fmt::Formatter<'_>,
) -> ::fayalite::__std::fmt::Result {
#debug_type_body
}
}
#[automatically_derived]
impl #static_type_impl_generics ::fayalite::ty::Connect<::fayalite::type_deduction::UndeducedType>
for #type_struct_ident #static_type_type_generics
#static_type_where_clause
{}
#[automatically_derived]
impl #static_type_impl_generics ::fayalite::ty::Type
for #type_struct_ident #static_type_type_generics
#static_type_where_clause
{
type CanonicalType = ::fayalite::enum_::DynEnumType;
type Value = #target #static_type_type_generics;
type CanonicalValue = ::fayalite::enum_::DynEnum;
type MaskType = ::fayalite::int::UIntType<1>;
type MaskValue = ::fayalite::int::UInt<1>;
type MatchVariant = #match_enum_ident #static_type_type_generics;
type MatchActiveScope = ::fayalite::module::Scope;
type MatchVariantAndInactiveScope =
::fayalite::enum_::EnumMatchVariantAndInactiveScope<Self>;
type MatchVariantsIter = ::fayalite::enum_::EnumMatchVariantsIter<Self>;
fn match_variants<IO: ::fayalite::bundle::BundleValue>(
this: ::fayalite::expr::Expr<<Self as ::fayalite::ty::Type>::Value>,
module_builder: &mut ::fayalite::module::ModuleBuilder<
IO,
::fayalite::module::NormalModule,
>,
source_location: ::fayalite::source_location::SourceLocation,
) -> <Self as ::fayalite::ty::Type>::MatchVariantsIter
where
<IO as ::fayalite::expr::ToExpr>::Type:
::fayalite::bundle::BundleType<Value = IO>,
{
module_builder.enum_match_variants_helper(this, source_location)
}
fn mask_type(&self) -> <Self as ::fayalite::ty::Type>::MaskType {
::fayalite::int::UIntType::new()
}
fn canonical(&self) -> <Self as ::fayalite::ty::Type>::CanonicalType {
let variants = ::fayalite::enum_::EnumType::variants(self);
::fayalite::enum_::DynEnumType::new(variants)
}
fn source_location(&self) -> ::fayalite::source_location::SourceLocation {
::fayalite::source_location::SourceLocation::caller()
}
fn type_enum(&self) -> ::fayalite::ty::TypeEnum {
::fayalite::ty::TypeEnum::EnumType(::fayalite::ty::Type::canonical(self))
}
#[allow(non_snake_case)]
fn from_canonical_type(t: <Self as ::fayalite::ty::Type>::CanonicalType) -> Self {
let [#(#variant_vars),*] = *::fayalite::enum_::EnumType::variants(&t) else {
::fayalite::__std::panic!("wrong number of variants");
};
#(#from_canonical_type_variant_lets)*
Self {
#(#non_empty_variant_names: #non_empty_variant_vars,)*
}
}
}
#[automatically_derived]
#[allow(clippy::init_numbered_fields)]
impl #static_type_impl_generics ::fayalite::enum_::EnumType
for #type_struct_ident #static_type_type_generics
#static_type_where_clause
{
type Builder = #empty_builder_ty;
fn match_activate_scope(
v: <Self as ::fayalite::ty::Type>::MatchVariantAndInactiveScope,
) -> (
<Self as ::fayalite::ty::Type>::MatchVariant,
<Self as ::fayalite::ty::Type>::MatchActiveScope,
) {
let (__variant_access, __scope) = v.activate();
(
match ::fayalite::expr::ops::VariantAccess::variant_index(
&*__variant_access,
) {
#(#match_enum_arms)*
#variant_count.. => ::fayalite::__std::panic!("invalid variant index"),
},
__scope,
)
}
fn builder() -> <Self as ::fayalite::enum_::EnumType>::Builder {
#empty_builder_ty::new()
}
fn variants(&self) -> ::fayalite::intern::Interned<[::fayalite::enum_::VariantType<
::fayalite::intern::Interned<dyn ::fayalite::ty::DynCanonicalType>,
>]> {
::fayalite::intern::Intern::intern(&[#(#enum_type_variants,)*][..])
}
fn variants_hint() -> ::fayalite::enum_::VariantsHint {
::fayalite::enum_::VariantsHint::new([#(#enum_type_variants_hint,)*], false)
}
}
#[automatically_derived]
impl #static_type_impl_generics ::fayalite::expr::ToExpr
for #target #static_type_type_generics
#static_type_where_clause
{
type Type = #type_struct_ident #static_type_type_generics;
fn ty(&self) -> <Self as ::fayalite::expr::ToExpr>::Type {
::fayalite::ty::StaticType::static_type()
}
fn to_expr(&self) -> ::fayalite::expr::Expr<Self> {
::fayalite::expr::Expr::from_value(self)
}
}
#[automatically_derived]
impl #static_type_impl_generics ::fayalite::ty::Value
for #target #static_type_type_generics
#static_type_where_clause
{
fn to_canonical(&self) -> <
<Self as ::fayalite::expr::ToExpr>::Type
as ::fayalite::ty::Type
>::CanonicalValue
{
let __ty = ::fayalite::ty::Type::canonical(&::fayalite::expr::ToExpr::ty(self));
match self {
#(Self::#variant_names { #variant_field_pats } => {
::fayalite::enum_::DynEnum::new_by_name(
__ty,
::fayalite::intern::Intern::intern(#variant_name_strs),
#variant_to_canonical_values,
)
})*
}
}
}
#[automatically_derived]
impl #static_type_impl_generics ::fayalite::enum_::EnumValue
for #target #static_type_type_generics
#static_type_where_clause
{
}
}
.to_tokens(tokens);
}
}
pub(crate) fn value_derive_enum(item: ItemEnum) -> syn::Result<TokenStream> {
let item = ParsedEnum::parse(item)?;
let outline_generated = item.options.body.outline_generated;
let mut contents = quote! {
const _: () = {
#item
};
};
if outline_generated.is_some() {
contents = crate::outline_generated(contents, "value-enum-");
}
Ok(contents)
}

View file

@ -1,771 +0,0 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
value_derive_common::{
append_field, derive_clone_hash_eq_partialeq_for_struct, get_target, make_connect_impl,
Bounds, Builder, FieldsKind, ParsedField, ValueDeriveGenerics,
},
Errors, HdlAttr,
};
use proc_macro2::TokenStream;
use quote::{format_ident, quote, quote_spanned, ToTokens};
use syn::{
parse_quote, parse_quote_spanned, spanned::Spanned, FieldMutability, Generics, Ident,
ItemStruct, Member, Path, Token, Visibility,
};
crate::options! {
#[options = StructOptions]
pub(crate) enum StructOption {
OutlineGenerated(outline_generated),
Static(static_),
ConnectInexact(connect_inexact),
Bounds(where_, Bounds),
Target(target, Path),
}
}
crate::options! {
#[options = FieldOptions]
pub(crate) enum FieldOption {
Flip(flip),
Skip(skip),
}
}
pub(crate) struct ParsedStructNames<I, S> {
pub(crate) ident: Ident,
pub(crate) type_struct_debug_ident: S,
pub(crate) type_struct_ident: Ident,
pub(crate) match_variant_ident: I,
pub(crate) builder_struct_ident: I,
pub(crate) mask_match_variant_ident: I,
pub(crate) mask_type_ident: I,
pub(crate) mask_type_debug_ident: S,
pub(crate) mask_value_ident: I,
pub(crate) mask_value_debug_ident: S,
pub(crate) mask_builder_struct_ident: I,
}
pub(crate) struct ParsedStruct {
pub(crate) options: HdlAttr<StructOptions>,
pub(crate) vis: Visibility,
pub(crate) struct_token: Token![struct],
pub(crate) generics: Generics,
pub(crate) fields_kind: FieldsKind,
pub(crate) fields: Vec<ParsedField<FieldOptions>>,
pub(crate) semi_token: Option<Token![;]>,
pub(crate) skip_check_fields: bool,
pub(crate) names: ParsedStructNames<Option<Ident>, Option<String>>,
}
impl ParsedStruct {
pub(crate) fn parse(item: &mut ItemStruct) -> syn::Result<Self> {
let ItemStruct {
attrs,
vis,
struct_token,
ident,
generics,
fields,
semi_token,
} = item;
let mut errors = Errors::new();
let struct_options = errors
.unwrap_or_default(HdlAttr::parse_and_take_attr(attrs))
.unwrap_or_default();
let (fields_kind, fields) = ParsedField::parse_fields(&mut errors, fields, false);
errors.finish()?;
Ok(ParsedStruct {
options: struct_options,
vis: vis.clone(),
struct_token: *struct_token,
generics: generics.clone(),
fields_kind,
fields,
semi_token: *semi_token,
skip_check_fields: false,
names: ParsedStructNames {
ident: ident.clone(),
type_struct_debug_ident: None,
type_struct_ident: format_ident!("__{}__Type", ident),
match_variant_ident: None,
builder_struct_ident: None,
mask_match_variant_ident: None,
mask_type_ident: None,
mask_type_debug_ident: None,
mask_value_ident: None,
mask_value_debug_ident: None,
mask_builder_struct_ident: None,
},
})
}
pub(crate) fn write_body(
&self,
target: Path,
names: ParsedStructNames<&Ident, &String>,
is_for_mask: bool,
tokens: &mut TokenStream,
) {
let Self {
options,
vis,
struct_token,
generics,
fields_kind,
fields,
semi_token,
skip_check_fields,
names: _,
} = self;
let skip_check_fields = *skip_check_fields || is_for_mask;
let ParsedStructNames {
ident: struct_ident,
type_struct_debug_ident,
type_struct_ident,
match_variant_ident,
builder_struct_ident,
mask_match_variant_ident: _,
mask_type_ident,
mask_type_debug_ident: _,
mask_value_ident,
mask_value_debug_ident,
mask_builder_struct_ident: _,
} = names;
let StructOptions {
outline_generated: _,
where_,
target: _,
static_,
connect_inexact,
} = &options.body;
let ValueDeriveGenerics {
generics,
static_type_generics,
} = ValueDeriveGenerics::get(generics.clone(), where_);
let (impl_generics, type_generics, where_clause) = generics.split_for_impl();
let unskipped_fields = fields
.iter()
.filter(|field| field.options.body.skip.is_none());
let _field_names = Vec::from_iter(fields.iter().map(|field| field.name.clone()));
let unskipped_field_names =
Vec::from_iter(unskipped_fields.clone().map(|field| field.name.clone()));
let unskipped_field_name_strs = Vec::from_iter(
unskipped_field_names
.iter()
.map(|field_name| field_name.to_token_stream().to_string()),
);
let unskipped_field_vars = Vec::from_iter(
unskipped_field_names
.iter()
.map(|field_name| format_ident!("__v_{}", field_name)),
);
let unskipped_field_flips = Vec::from_iter(
unskipped_fields
.clone()
.map(|field| field.options.body.flip.is_some()),
);
let mut any_fields_skipped = false;
let type_fields = Vec::from_iter(fields.iter().filter_map(|field| {
let ParsedField {
options,
vis,
name,
ty,
} = field;
let FieldOptions { flip: _, skip } = &options.body;
if skip.is_some() {
any_fields_skipped = true;
return None;
}
let ty = if is_for_mask {
parse_quote! { ::fayalite::ty::AsMask<#ty> }
} else {
ty.to_token_stream()
};
Some(syn::Field {
attrs: vec![],
vis: vis.clone(),
mutability: FieldMutability::None,
ident: match name.clone() {
Member::Named(name) => Some(name),
Member::Unnamed(_) => None,
},
colon_token: None,
ty: parse_quote! { <#ty as ::fayalite::expr::ToExpr>::Type },
})
}));
let field_types = Vec::from_iter(type_fields.iter().map(|field| field.ty.clone()));
let match_variant_fields = Vec::from_iter(fields.iter().zip(&type_fields).map(
|(parsed_field, type_field)| {
let field_ty = &parsed_field.ty;
syn::Field {
ty: parse_quote! { ::fayalite::expr::Expr<#field_ty> },
..type_field.clone()
}
},
));
let mask_value_fields = Vec::from_iter(fields.iter().zip(&type_fields).map(
|(parsed_field, type_field)| {
let field_ty = &parsed_field.ty;
syn::Field {
ty: parse_quote! { ::fayalite::ty::AsMask<#field_ty> },
..type_field.clone()
}
},
));
let mut type_struct_fields = fields_kind.into_fields(type_fields);
let mut match_variant_fields = fields_kind.into_fields(match_variant_fields);
let mut mask_value_fields = fields_kind.into_fields(mask_value_fields);
let phantom_data_field_name = any_fields_skipped.then(|| {
let phantom_data_field_name = Ident::new("__phantom_data", type_struct_ident.span());
let member = append_field(
&mut type_struct_fields,
syn::Field {
attrs: vec![],
vis: vis.clone(),
mutability: FieldMutability::None,
ident: Some(phantom_data_field_name.clone()),
colon_token: None,
ty: parse_quote_spanned! {type_struct_ident.span()=>
::fayalite::__std::marker::PhantomData<#struct_ident #type_generics>
},
},
);
append_field(
&mut match_variant_fields,
syn::Field {
attrs: vec![],
vis: Visibility::Inherited,
mutability: FieldMutability::None,
ident: Some(phantom_data_field_name.clone()),
colon_token: None,
ty: parse_quote_spanned! {type_struct_ident.span()=>
::fayalite::__std::marker::PhantomData<#struct_ident #type_generics>
},
},
);
append_field(
&mut mask_value_fields,
syn::Field {
attrs: vec![],
vis: Visibility::Inherited,
mutability: FieldMutability::None,
ident: Some(phantom_data_field_name),
colon_token: None,
ty: parse_quote_spanned! {type_struct_ident.span()=>
::fayalite::__std::marker::PhantomData<#struct_ident #type_generics>
},
},
);
member
});
let phantom_data_field_name_slice = phantom_data_field_name.as_slice();
let type_struct = ItemStruct {
attrs: vec![parse_quote! {#[allow(non_camel_case_types)]}],
vis: vis.clone(),
struct_token: *struct_token,
ident: type_struct_ident.clone(),
generics: generics.clone(),
fields: type_struct_fields,
semi_token: *semi_token,
};
type_struct.to_tokens(tokens);
let match_variant_struct = ItemStruct {
attrs: vec![parse_quote! {#[allow(non_camel_case_types)]}],
vis: vis.clone(),
struct_token: *struct_token,
ident: match_variant_ident.clone(),
generics: generics.clone(),
fields: match_variant_fields,
semi_token: *semi_token,
};
match_variant_struct.to_tokens(tokens);
let mask_type_body = if is_for_mask {
quote! {
::fayalite::__std::clone::Clone::clone(self)
}
} else {
let mask_value_struct = ItemStruct {
attrs: vec![parse_quote! {#[allow(non_camel_case_types)]}],
vis: vis.clone(),
struct_token: *struct_token,
ident: mask_value_ident.clone(),
generics: generics.clone(),
fields: mask_value_fields,
semi_token: *semi_token,
};
mask_value_struct.to_tokens(tokens);
let debug_mask_value_body = match fields_kind {
FieldsKind::Unit => quote! {
f.debug_struct(#mask_value_debug_ident).finish()
},
FieldsKind::Named(_) => quote! {
f.debug_struct(#mask_value_debug_ident)
#(.field(#unskipped_field_name_strs, &self.#unskipped_field_names))*
.finish()
},
FieldsKind::Unnamed(_) => quote! {
f.debug_tuple(#mask_value_debug_ident)
#(.field(&self.#unskipped_field_names))*
.finish()
},
};
quote! {
#[automatically_derived]
impl #impl_generics ::fayalite::__std::fmt::Debug
for #mask_value_ident #type_generics
#where_clause
{
fn fmt(
&self,
f: &mut ::fayalite::__std::fmt::Formatter<'_>,
) -> ::fayalite::__std::fmt::Result {
#debug_mask_value_body
}
}
}
.to_tokens(tokens);
quote! {
#mask_type_ident {
#(#unskipped_field_names:
::fayalite::ty::Type::mask_type(&self.#unskipped_field_names),)*
#(#phantom_data_field_name_slice: ::fayalite::__std::marker::PhantomData,)*
}
}
};
let debug_type_body = match fields_kind {
FieldsKind::Unit => quote! {
f.debug_struct(#type_struct_debug_ident).finish()
},
FieldsKind::Named(_) => quote! {
f.debug_struct(#type_struct_debug_ident)
#(.field(#unskipped_field_name_strs, &self.#unskipped_field_names))*
.finish()
},
FieldsKind::Unnamed(_) => quote! {
f.debug_tuple(#type_struct_debug_ident)
#(.field(&self.#unskipped_field_names))*
.finish()
},
};
for the_struct_ident in [&type_struct_ident, match_variant_ident]
.into_iter()
.chain(is_for_mask.then_some(mask_value_ident))
{
derive_clone_hash_eq_partialeq_for_struct(
the_struct_ident,
&generics,
&Vec::from_iter(
unskipped_field_names
.iter()
.cloned()
.chain(phantom_data_field_name.clone()),
),
)
.to_tokens(tokens);
}
let check_v = format_ident!("__v");
let field_checks = Vec::from_iter(fields.iter().map(|ParsedField { ty, name, .. }| {
quote_spanned! {ty.span()=>
__check_field(#check_v.#name);
}
}));
if static_.is_some() {
let (impl_generics, type_generics, where_clause) =
static_type_generics.split_for_impl();
quote! {
#[automatically_derived]
impl #impl_generics ::fayalite::ty::StaticType for #type_struct_ident #type_generics
#where_clause
{
fn static_type() -> Self {
Self {
#(#unskipped_field_names: ::fayalite::ty::StaticType::static_type(),)*
#(#phantom_data_field_name_slice:
::fayalite::__std::marker::PhantomData,)*
}
}
}
}
.to_tokens(tokens);
}
if !skip_check_fields {
quote! {
fn __check_field<T: ::fayalite::ty::Value>(_v: T)
where
<T as ::fayalite::expr::ToExpr>::Type: ::fayalite::ty::Type<Value = T>,
{}
fn __check_fields #impl_generics(#check_v: #target #type_generics)
#where_clause
{
#(#field_checks)*
}
}
.to_tokens(tokens);
}
let mut builder = Builder::new(builder_struct_ident.clone(), vis.clone());
for field in unskipped_fields.clone() {
builder.insert_field(
field.name.clone(),
|v| {
parse_quote_spanned! {v.span()=>
::fayalite::expr::ToExpr::to_expr(&#v)
}
},
|t| {
parse_quote_spanned! {t.span()=>
::fayalite::expr::Expr<<
<#t as ::fayalite::expr::ToExpr>::Type
as ::fayalite::ty::Type
>::Value>
}
},
|t| {
parse_quote_spanned! {t.span()=>
where
#t: ::fayalite::expr::ToExpr,
}
},
);
}
let builder = builder.finish_filling_in_fields();
builder.to_tokens(tokens);
let build_type_fields =
Vec::from_iter(unskipped_fields.clone().map(|ParsedField { name, .. }| {
let builder_field_name = &builder.get_field(name).unwrap().1.builder_field_name;
quote_spanned! {struct_ident.span()=>
#name: ::fayalite::expr::ToExpr::ty(&#builder_field_name)
}
}));
let build_expr_fields =
Vec::from_iter(unskipped_fields.clone().map(|ParsedField { name, .. }| {
let builder_field_name = &builder.get_field(name).unwrap().1.builder_field_name;
quote_spanned! {struct_ident.span()=>
#builder_field_name.to_canonical_dyn()
}
}));
let build_specified_fields = unskipped_fields.clone().map(
|ParsedField {
options: _,
vis: _,
name,
ty,
}| {
let ty = if is_for_mask {
parse_quote_spanned! {name.span()=>
::fayalite::expr::Expr<::fayalite::ty::AsMask<#ty>>
}
} else {
parse_quote_spanned! {name.span()=>
::fayalite::expr::Expr<#ty>
}
};
(name.clone(), ty)
},
);
let build_body = parse_quote_spanned! {struct_ident.span()=>
{
::fayalite::expr::ToExpr::to_expr(
&::fayalite::expr::ops::BundleLiteral::new_unchecked(
::fayalite::intern::Intern::intern(&[#(
#build_expr_fields,
)*][..]),
#type_struct_ident {
#(#build_type_fields,)*
#(#phantom_data_field_name_slice:
::fayalite::__std::marker::PhantomData,)*
},
),
)
}
};
builder
.make_build_method(
&Ident::new("build", struct_ident.span()),
build_specified_fields,
&generics,
&parse_quote_spanned! {struct_ident.span()=>
#type_struct_ident #type_generics
},
&parse_quote_spanned! {struct_ident.span()=>
::fayalite::expr::Expr<#target #type_generics>
},
build_body,
)
.to_tokens(tokens);
make_connect_impl(
*connect_inexact,
&generics,
&type_struct_ident,
unskipped_fields.clone().map(|field| {
let ty = &field.ty;
parse_quote_spanned! {field.name.span()=>
<#ty as ::fayalite::expr::ToExpr>::Type
}
}),
)
.to_tokens(tokens);
let empty_builder_ty = builder.ty([], Some(&parse_quote! { Self }), false);
quote! {
#[automatically_derived]
impl #impl_generics ::fayalite::__std::fmt::Debug for #type_struct_ident #type_generics
#where_clause
{
fn fmt(
&self,
f: &mut ::fayalite::__std::fmt::Formatter<'_>,
) -> ::fayalite::__std::fmt::Result {
#debug_type_body
}
}
#[automatically_derived]
impl #impl_generics ::fayalite::ty::Connect<::fayalite::type_deduction::UndeducedType>
for #type_struct_ident #type_generics
#where_clause
{}
#[automatically_derived]
impl #impl_generics ::fayalite::ty::Type for #type_struct_ident #type_generics
#where_clause
{
type CanonicalType = ::fayalite::bundle::DynBundleType;
type Value = #target #type_generics;
type CanonicalValue = ::fayalite::bundle::DynBundle;
type MaskType = #mask_type_ident #type_generics;
type MaskValue = #mask_value_ident #type_generics;
type MatchVariant = #match_variant_ident #type_generics;
type MatchActiveScope = ();
type MatchVariantAndInactiveScope = ::fayalite::ty::MatchVariantWithoutScope<
#match_variant_ident #type_generics,
>;
type MatchVariantsIter = ::fayalite::__std::iter::Once<
<Self as ::fayalite::ty::Type>::MatchVariantAndInactiveScope,
>;
#[allow(unused_variables)]
fn match_variants<IO: ::fayalite::bundle::BundleValue>(
this: ::fayalite::expr::Expr<<Self as ::fayalite::ty::Type>::Value>,
module_builder: &mut ::fayalite::module::ModuleBuilder<
IO,
::fayalite::module::NormalModule,
>,
source_location: ::fayalite::source_location::SourceLocation,
) -> <Self as ::fayalite::ty::Type>::MatchVariantsIter
where
<IO as ::fayalite::expr::ToExpr>::Type:
::fayalite::bundle::BundleType<Value = IO>,
{
::fayalite::__std::iter::once(::fayalite::ty::MatchVariantWithoutScope(
#match_variant_ident {
#(#unskipped_field_names: this.field(#unskipped_field_name_strs),)*
#(#phantom_data_field_name_slice:
::fayalite::__std::marker::PhantomData,)*
},
))
}
fn mask_type(&self) -> <Self as ::fayalite::ty::Type>::MaskType {
#mask_type_body
}
fn canonical(&self) -> <Self as ::fayalite::ty::Type>::CanonicalType {
let fields = ::fayalite::bundle::BundleType::fields(self);
::fayalite::bundle::DynBundleType::new(fields)
}
fn source_location(&self) -> ::fayalite::source_location::SourceLocation {
::fayalite::source_location::SourceLocation::caller()
}
fn type_enum(&self) -> ::fayalite::ty::TypeEnum {
::fayalite::ty::TypeEnum::BundleType(::fayalite::ty::Type::canonical(self))
}
fn from_canonical_type(t: <Self as ::fayalite::ty::Type>::CanonicalType) -> Self {
let [#(#unskipped_field_vars),*] = *::fayalite::bundle::BundleType::fields(&t)
else {
::fayalite::__std::panic!("wrong number of fields");
};
Self {
#(#unskipped_field_names: #unskipped_field_vars.from_canonical_type_helper(
#unskipped_field_name_strs,
#unskipped_field_flips,
),)*
#(#phantom_data_field_name_slice: ::fayalite::__std::marker::PhantomData,)*
}
}
}
#[automatically_derived]
impl #impl_generics ::fayalite::ty::TypeWithDeref for #type_struct_ident #type_generics
#where_clause
{
#[allow(unused_variables)]
fn expr_deref(this: &::fayalite::expr::Expr<<Self as ::fayalite::ty::Type>::Value>)
-> &<Self as ::fayalite::ty::Type>::MatchVariant
{
::fayalite::intern::Interned::<_>::into_inner(
::fayalite::intern::Intern::intern_sized(#match_variant_ident {
#(#unskipped_field_names: this.field(#unskipped_field_name_strs),)*
#(#phantom_data_field_name_slice:
::fayalite::__std::marker::PhantomData,)*
}),
)
}
}
#[automatically_derived]
impl #impl_generics ::fayalite::bundle::BundleType for #type_struct_ident #type_generics
#where_clause
{
type Builder = #empty_builder_ty;
fn builder() -> <Self as ::fayalite::bundle::BundleType>::Builder {
#empty_builder_ty::new()
}
fn fields(&self) -> ::fayalite::intern::Interned<
[::fayalite::bundle::FieldType<::fayalite::intern::Interned<
dyn ::fayalite::ty::DynCanonicalType,
>>],
>
{
::fayalite::intern::Intern::intern(&[#(
::fayalite::bundle::FieldType {
name: ::fayalite::intern::Intern::intern(#unskipped_field_name_strs),
flipped: #unskipped_field_flips,
ty: ::fayalite::ty::DynType::canonical_dyn(
&self.#unskipped_field_names,
),
},
)*][..])
}
fn fields_hint() -> ::fayalite::bundle::FieldsHint {
::fayalite::bundle::FieldsHint::new([#(
::fayalite::bundle::FieldType {
name: ::fayalite::intern::Intern::intern(#unskipped_field_name_strs),
flipped: #unskipped_field_flips,
ty: ::fayalite::bundle::TypeHint::<#field_types>::intern_dyn(),
},
)*], false)
}
}
#[automatically_derived]
impl #impl_generics ::fayalite::expr::ToExpr for #target #type_generics
#where_clause
{
type Type = #type_struct_ident #type_generics;
fn ty(&self) -> <Self as ::fayalite::expr::ToExpr>::Type {
#type_struct_ident {
#(#unskipped_field_names: ::fayalite::expr::ToExpr::ty(
&self.#unskipped_field_names,
),)*
#(#phantom_data_field_name_slice: ::fayalite::__std::marker::PhantomData,)*
}
}
fn to_expr(&self) -> ::fayalite::expr::Expr<Self> {
::fayalite::expr::Expr::from_value(self)
}
}
#[automatically_derived]
impl #impl_generics ::fayalite::ty::Value for #target #type_generics
#where_clause
{
fn to_canonical(&self) -> <
<Self as ::fayalite::expr::ToExpr>::Type
as ::fayalite::ty::Type
>::CanonicalValue
{
let ty = ::fayalite::ty::Type::canonical(&::fayalite::expr::ToExpr::ty(self));
::fayalite::bundle::DynBundle::new(ty, ::fayalite::__std::sync::Arc::new([
#(::fayalite::ty::DynValueTrait::to_canonical_dyn(
&self.#unskipped_field_names,
),)*
]))
}
}
#[automatically_derived]
impl #impl_generics ::fayalite::bundle::BundleValue for #target #type_generics
#where_clause
{
}
}
.to_tokens(tokens);
}
}
impl ToTokens for ParsedStruct {
fn to_tokens(&self, tokens: &mut TokenStream) {
let ParsedStructNames {
ident: struct_ident,
type_struct_debug_ident,
type_struct_ident,
match_variant_ident,
builder_struct_ident,
mask_match_variant_ident,
mask_type_ident,
mask_type_debug_ident,
mask_value_ident,
mask_value_debug_ident,
mask_builder_struct_ident,
} = &self.names;
macro_rules! unwrap_or_set {
($(let $var:ident =? $fallback_value:expr;)*) => {
$(let $var = $var.clone().unwrap_or_else(|| $fallback_value);)*
};
}
unwrap_or_set! {
let type_struct_debug_ident =? format!("{struct_ident}::Type");
let match_variant_ident =? format_ident!("__{}__MatchVariant", struct_ident);
let builder_struct_ident =? format_ident!("__{}__Builder", struct_ident);
let mask_match_variant_ident =? format_ident!("__AsMask__{}__MatchVariant", struct_ident);
let mask_type_ident =? format_ident!("__AsMask__{}__Type", struct_ident);
let mask_type_debug_ident =? format!("AsMask<{struct_ident}>::Type");
let mask_value_ident =? format_ident!("__AsMask__{}", struct_ident);
let mask_value_debug_ident =? format!("AsMask<{struct_ident}>");
let mask_builder_struct_ident =? format_ident!("__AsMask__{}__Builder", struct_ident);
}
let target = get_target(&self.options.body.target, struct_ident);
let names = ParsedStructNames {
ident: struct_ident.clone(),
type_struct_debug_ident: &type_struct_debug_ident,
type_struct_ident: type_struct_ident.clone(),
match_variant_ident: &match_variant_ident,
builder_struct_ident: &builder_struct_ident,
mask_match_variant_ident: &mask_match_variant_ident,
mask_type_ident: &mask_type_ident,
mask_type_debug_ident: &mask_type_debug_ident,
mask_value_ident: &mask_value_ident,
mask_value_debug_ident: &mask_value_debug_ident,
mask_builder_struct_ident: &mask_builder_struct_ident,
};
self.write_body(target, names, false, tokens);
let mask_names = ParsedStructNames {
ident: mask_value_ident.clone(),
type_struct_debug_ident: &mask_type_debug_ident,
type_struct_ident: mask_type_ident.clone(),
match_variant_ident: &mask_match_variant_ident,
builder_struct_ident: &mask_builder_struct_ident,
mask_match_variant_ident: &mask_match_variant_ident,
mask_type_ident: &mask_type_ident,
mask_type_debug_ident: &mask_type_debug_ident,
mask_value_ident: &mask_value_ident,
mask_value_debug_ident: &mask_value_debug_ident,
mask_builder_struct_ident: &mask_builder_struct_ident,
};
self.write_body(mask_value_ident.clone().into(), mask_names, true, tokens);
}
}
pub(crate) fn value_derive_struct(mut item: ItemStruct) -> syn::Result<TokenStream> {
let item = ParsedStruct::parse(&mut item)?;
let outline_generated = item.options.body.outline_generated;
let mut contents = quote! {
const _: () = {
#item
};
};
if outline_generated.is_some() {
contents = crate::outline_generated(contents, "value-struct-");
}
Ok(contents)
}

View file

@ -16,4 +16,4 @@ version.workspace = true
proc-macro = true
[dependencies]
fayalite-proc-macros-impl = { workspace = true }
fayalite-proc-macros-impl.workspace = true

View file

@ -2,7 +2,7 @@
// See Notices.txt for copyright information
//! proc macros for `fayalite`
//!
//! see `fayalite::hdl_module` and `fayalite::ty::Value` for docs
//! see `fayalite::hdl_module` and `fayalite::hdl` for docs
// intentionally not documented here, see `fayalite::hdl_module` for docs
#[proc_macro_attribute]
@ -10,16 +10,19 @@ pub fn hdl_module(
attr: proc_macro::TokenStream,
item: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
match fayalite_proc_macros_impl::module(attr.into(), item.into()) {
match fayalite_proc_macros_impl::hdl_module(attr.into(), item.into()) {
Ok(retval) => retval.into(),
Err(err) => err.into_compile_error().into(),
}
}
// intentionally not documented here, see `fayalite::ty::Value` for docs
#[proc_macro_derive(Value, attributes(hdl))]
pub fn value_derive(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
match fayalite_proc_macros_impl::value_derive(item.into()) {
// intentionally not documented here, see `fayalite::hdl` for docs
#[proc_macro_attribute]
pub fn hdl(
attr: proc_macro::TokenStream,
item: proc_macro::TokenStream,
) -> proc_macro::TokenStream {
match fayalite_proc_macros_impl::hdl_attr(attr.into(), item.into()) {
Ok(retval) => retval.into(),
Err(err) => err.into_compile_error().into(),
}

View file

@ -13,11 +13,11 @@ rust-version.workspace = true
version.workspace = true
[dependencies]
indexmap = { workspace = true }
prettyplease = { workspace = true }
proc-macro2 = { workspace = true }
quote = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
syn = { workspace = true }
thiserror = { workspace = true }
indexmap.workspace = true
prettyplease.workspace = true
proc-macro2.workspace = true
quote.workspace = true
serde.workspace = true
serde_json.workspace = true
syn.workspace = true
thiserror.workspace = true

View file

@ -1,7 +1,7 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use proc_macro2::{Span, TokenStream};
use quote::{format_ident, quote, ToTokens};
use quote::{ToTokens, format_ident, quote};
use std::{collections::BTreeMap, fs};
use syn::{fold::Fold, parse_quote};

View file

@ -14,25 +14,36 @@ rust-version.workspace = true
version.workspace = true
[dependencies]
bitvec = { workspace = true }
hashbrown = { workspace = true }
num-bigint = { workspace = true }
num-traits = { workspace = true }
fayalite-proc-macros = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
clap = { version = "4.5.9", features = ["derive", "env"] }
eyre = "0.6.12"
which = "6.0.1"
base64.workspace = true
bitvec.workspace = true
blake3.workspace = true
clap.workspace = true
clap_complete.workspace = true
ctor.workspace = true
eyre.workspace = true
fayalite-proc-macros.workspace = true
hashbrown.workspace = true
jobslot.workspace = true
num-bigint.workspace = true
num-traits.workspace = true
ordered-float.workspace = true
petgraph.workspace = true
serde_json.workspace = true
serde.workspace = true
tempfile.workspace = true
vec_map.workspace = true
which.workspace = true
[dev-dependencies]
trybuild = { workspace = true }
trybuild.workspace = true
serde = { workspace = true, features = ["rc"] }
[build-dependencies]
fayalite-visit-gen = { workspace = true }
fayalite-visit-gen.workspace = true
[features]
unstable-doc = []
unstable-test-hasher = []
[package.metadata.docs.rs]
features = ["unstable-doc"]

View file

@ -4,6 +4,10 @@ use fayalite_visit_gen::parse_and_generate;
use std::{env, fs, path::Path};
fn main() {
println!("cargo::rustc-check-cfg=cfg(todo)");
println!("cargo::rustc-check-cfg=cfg(cfg_false_for_tests)");
println!("cargo::rustc-check-cfg=cfg(cfg_true_for_tests)");
println!("cargo::rustc-cfg=cfg_true_for_tests");
let path = "visit_types.json";
println!("cargo::rerun-if-changed={path}");
println!("cargo::rerun-if-changed=build.rs");

View file

@ -1,51 +1,64 @@
use clap::Parser;
use fayalite::{
cli,
clock::{Clock, ClockDomain},
hdl_module,
int::{DynUInt, DynUIntType, IntCmp, IntTypeTrait, UInt},
reset::{SyncReset, ToReset},
};
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use fayalite::prelude::*;
#[hdl_module]
fn blinky(clock_frequency: u64) {
#[hdl]
let clk: Clock = m.input();
#[hdl]
let rst: SyncReset = m.input();
fn blinky(platform_io_builder: PlatformIOBuilder<'_>) {
let clk_input =
platform_io_builder.peripherals_with_type::<peripherals::ClockInput>()[0].use_peripheral();
let rst = platform_io_builder.peripherals_with_type::<Reset>()[0].use_peripheral();
let cd = #[hdl]
ClockDomain {
clk,
rst: rst.to_reset(),
clk: clk_input.clk,
rst,
};
let max_value = clock_frequency / 2 - 1;
let int_ty = DynUIntType::range_inclusive(0..=max_value);
let max_value = (Expr::ty(clk_input).frequency() / 2.0).round_ties_even() as u64 - 1;
let int_ty = UInt::range_inclusive(0..=max_value);
#[hdl]
let counter: DynUInt = m.reg_builder().clock_domain(cd).reset(int_ty.literal(0));
let counter_reg: UInt = reg_builder().clock_domain(cd).reset(0u8.cast_to(int_ty));
#[hdl]
let output_reg: UInt<1> = m.reg_builder().clock_domain(cd).reset_default();
let output_reg: Bool = reg_builder().clock_domain(cd).reset(false);
#[hdl]
if counter.cmp_eq(max_value) {
m.connect_any(counter, 0u8);
m.connect(output_reg, !output_reg);
let rgb_output_reg = reg_builder().clock_domain(cd).reset(
#[hdl]
peripherals::RgbLed {
r: false,
g: false,
b: false,
},
);
#[hdl]
if counter_reg.cmp_eq(max_value) {
connect_any(counter_reg, 0u8);
connect(output_reg, !output_reg);
connect(rgb_output_reg.r, !rgb_output_reg.r);
#[hdl]
if rgb_output_reg.r {
connect(rgb_output_reg.g, !rgb_output_reg.g);
#[hdl]
if rgb_output_reg.g {
connect(rgb_output_reg.b, !rgb_output_reg.b);
}
}
} else {
m.connect_any(counter, counter + 1_hdl_u1);
connect_any(counter_reg, counter_reg + 1_hdl_u1);
}
for led in platform_io_builder.peripherals_with_type::<peripherals::Led>() {
if let Ok(led) = led.try_use_peripheral() {
connect(led.on, output_reg);
}
}
for rgb_led in platform_io_builder.peripherals_with_type::<peripherals::RgbLed>() {
if let Ok(rgb_led) = rgb_led.try_use_peripheral() {
connect(rgb_led, rgb_output_reg);
}
}
#[hdl]
let led: UInt<1> = m.output();
m.connect(led, output_reg);
let io = m.add_platform_io(platform_io_builder);
}
#[derive(Parser)]
struct Cli {
/// clock frequency in hertz
#[arg(long, default_value = "1000000", value_parser = clap::value_parser!(u64).range(2..))]
clock_frequency: u64,
#[command(subcommand)]
cli: cli::Cli,
}
fn main() -> cli::Result {
let cli = Cli::parse();
cli.cli.run(blinky(cli.clock_frequency))
fn main() {
<BuildCli>::main("blinky", |_, platform, _| {
Ok(JobParams::new(platform.wrap_main_module(blinky)))
});
}

View file

@ -0,0 +1,188 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use clap::builder::TypedValueParser;
use fayalite::{
build::{ToArgs, WriteArgs},
platform::PeripheralRef,
prelude::*,
};
use ordered_float::NotNan;
fn pick_clock<'a>(
platform_io_builder: &PlatformIOBuilder<'a>,
) -> PeripheralRef<'a, peripherals::ClockInput> {
let mut clks = platform_io_builder.peripherals_with_type::<peripherals::ClockInput>();
clks.sort_by_key(|clk| {
// sort clocks by preference, smaller return values means higher preference
let mut frequency = clk.ty().frequency();
let priority;
if frequency < 10e6 {
frequency = -frequency; // prefer bigger frequencies
priority = 1;
} else if frequency > 50e6 {
// prefer smaller frequencies
priority = 2; // least preferred
} else {
priority = 0; // most preferred
frequency = (frequency - 25e6).abs(); // prefer closer to 25MHz
}
(priority, NotNan::new(frequency).expect("should be valid"))
});
clks[0]
}
#[hdl_module]
fn tx_only_uart(
platform_io_builder: PlatformIOBuilder<'_>,
divisor: f64,
message: impl AsRef<[u8]>,
) {
let message = message.as_ref();
let clk_input = pick_clock(&platform_io_builder).use_peripheral();
let rst = platform_io_builder.peripherals_with_type::<Reset>()[0].use_peripheral();
let cd = #[hdl]
ClockDomain {
clk: clk_input.clk,
rst,
};
let numerator = 1u128 << 16;
let denominator = (divisor * numerator as f64).round() as u128;
#[hdl]
let remainder_reg: UInt<128> = reg_builder().clock_domain(cd).reset(0u128);
#[hdl]
let sum: UInt<128> = wire();
connect_any(sum, remainder_reg + numerator);
#[hdl]
let tick_reg = reg_builder().clock_domain(cd).reset(false);
connect(tick_reg, false);
#[hdl]
let next_remainder: UInt<128> = wire();
connect(remainder_reg, next_remainder);
#[hdl]
if sum.cmp_ge(denominator) {
connect_any(next_remainder, sum - denominator);
connect(tick_reg, true);
} else {
connect(next_remainder, sum);
}
#[hdl]
let uart_state_reg = reg_builder().clock_domain(cd).reset(0_hdl_u4);
#[hdl]
let next_uart_state: UInt<4> = wire();
connect_any(next_uart_state, uart_state_reg + 1u8);
#[hdl]
let message_mem: Array<UInt<8>> = wire(Array[UInt::new_static()][message.len()]);
for (message, message_mem) in message.iter().zip(message_mem) {
connect(message_mem, *message);
}
#[hdl]
let addr_reg: UInt<32> = reg_builder().clock_domain(cd).reset(0u32);
#[hdl]
let next_addr: UInt<32> = wire();
connect(next_addr, addr_reg);
#[hdl]
let tx = reg_builder().clock_domain(cd).reset(true);
#[hdl]
let tx_bits: Array<Bool, 10> = wire();
connect(tx_bits[0], false); // start bit
connect(tx_bits[9], true); // stop bit
for i in 0..8 {
connect(tx_bits[i + 1], message_mem[addr_reg][i]); // data bits
}
connect(tx, tx_bits[uart_state_reg]);
#[hdl]
if uart_state_reg.cmp_eq(Expr::ty(tx_bits).len() - 1) {
connect(next_uart_state, 0_hdl_u4);
let next_addr_val = addr_reg + 1u8;
#[hdl]
if next_addr_val.cmp_lt(message.len()) {
connect_any(next_addr, next_addr_val);
} else {
connect(next_addr, 0u32);
}
}
#[hdl]
if tick_reg {
connect(uart_state_reg, next_uart_state);
connect(addr_reg, next_addr);
}
for uart in platform_io_builder.peripherals_with_type::<peripherals::Uart>() {
connect(uart.use_peripheral().tx, tx);
}
#[hdl]
let io = m.add_platform_io(platform_io_builder);
}
fn parse_baud_rate(
v: impl AsRef<str>,
) -> Result<NotNan<f64>, Box<dyn std::error::Error + Send + Sync>> {
let retval: NotNan<f64> = v
.as_ref()
.parse()
.map_err(|_| "invalid baud rate, must be a finite positive floating-point value")?;
if *retval > 0.0 && retval.is_finite() {
Ok(retval)
} else {
Err("baud rate must be finite and positive".into())
}
}
#[derive(Clone, PartialEq, Eq, Hash, Debug, clap::Args)]
pub struct ExtraArgs {
#[arg(long, value_parser = clap::builder::StringValueParser::new().try_map(parse_baud_rate), default_value = "115200")]
pub baud_rate: NotNan<f64>,
#[arg(long, default_value = "Hello World from Fayalite!!!\r\n", value_parser = clap::builder::NonEmptyStringValueParser::new())]
pub message: String,
}
impl ToArgs for ExtraArgs {
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
let Self { baud_rate, message } = self;
args.write_display_arg(format_args!("--baud-rate={baud_rate}"));
args.write_long_option_eq("message", message);
}
}
fn main() {
type Cli = BuildCli<ExtraArgs>;
Cli::main(
"tx_only_uart",
|_, platform, ExtraArgs { baud_rate, message }| {
Ok(JobParams::new(platform.try_wrap_main_module(|io| {
let clk = pick_clock(&io).ty();
let divisor = clk.frequency() / *baud_rate;
let baud_rate_error = |msg| {
<Cli as clap::CommandFactory>::command()
.error(clap::error::ErrorKind::ValueValidation, msg)
};
const HUGE_DIVISOR: f64 = u64::MAX as f64;
match divisor {
divisor if !divisor.is_finite() => {
return Err(baud_rate_error("bad baud rate"));
}
HUGE_DIVISOR.. => return Err(baud_rate_error("baud rate is too small")),
4.0.. => {}
_ => return Err(baud_rate_error("baud rate is too large")),
}
Ok(tx_only_uart(io, divisor, message))
})?))
},
);
}

View file

@ -1,3 +1,5 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
#![doc = include_str!("../README.md")]
//!
@ -10,7 +12,7 @@
//! function to add inputs/outputs and other components to that module.
//!
//! ```
//! # use fayalite::{hdl_module, int::UInt};
//! # use fayalite::prelude::*;
//! #
//! #[hdl_module]
//! pub fn example_module() {
@ -18,7 +20,7 @@
//! let an_input: UInt<10> = m.input(); // create an input that is a 10-bit unsigned integer
//! #[hdl]
//! let some_output: UInt<10> = m.output();
//! m.connect(some_output, an_input); // assigns the value of `an_input` to `some_output`
//! connect(some_output, an_input); // assigns the value of `an_input` to `some_output`
//! }
//! ```

View file

@ -1,3 +1,5 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! # Fayalite Modules
//!
//! The [`#[hdl_module]`][`crate::hdl_module`] attribute is applied to a Rust

View file

@ -1,3 +1,5 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! These are for when you want to use modules written in
//! some other language, such as Verilog.
//!
@ -11,8 +13,6 @@
//! * [`parameter_raw_verilog()`][`ModuleBuilder::parameter_raw_verilog`]
//! * [`parameter()`][`ModuleBuilder::parameter`]
//!
//! These use the [`ExternModule`][`crate::module::ExternModule`] tag type.
//!
//! [inputs/outputs]: crate::_docs::modules::module_bodies::hdl_let_statements::inputs_outputs
#[allow(unused)]

View file

@ -1,3 +1,5 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! # Module Function Bodies
//!
//! The `#[hdl_module]` attribute lets you have statements/expressions with `#[hdl]` annotations

View file

@ -1,24 +1,26 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! # `#[hdl]` Array Expressions
//!
//! `#[hdl]` can be used on Array Expressions to construct an [`Array<[T; N]>`][Array] expression:
//! `#[hdl]` can be used on Array Expressions to construct an [`Array<[T; N]>`][type@Array] expression:
//!
//! ```
//! # use fayalite::{hdl_module, int::UInt, array::Array};
//! # use fayalite::prelude::*;
//! # #[hdl_module]
//! # fn module() {
//! #[hdl]
//! let v: UInt<8> = m.input();
//! #[hdl]
//! let w: Array<[UInt<8>; 4]> = m.wire();
//! m.connect(
//! let w: Array<UInt<8>, 4> = wire();
//! connect(
//! w,
//! #[hdl]
//! [4_hdl_u8, v, 3_hdl_u8, (v + 7_hdl_u8).cast()] // you can make an array like this
//! [4_hdl_u8, v, 3_hdl_u8, (v + 7_hdl_u8).cast_to_static()] // you can make an array like this
//! );
//! m.connect(
//! connect(
//! w,
//! #[hdl]
//! [(v + 1_hdl_u8).cast(); 4] // or you can make an array repeat like this
//! [(v + 1_hdl_u8).cast_to_static(); 4] // or you can make an array repeat like this
//! );
//! # }
//! ```

View file

@ -1,10 +1,11 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! # `#[hdl] if` Statements
//!
//! `#[hdl] if` statements behave similarly to Rust `if` statements, except they end up as muxes
//! and stuff in the final hardware instead of being run when the fayalite module is being created.
//!
//! The condition of an `#[hdl] if` statement must have type [`UInt<1>`] or [`DynUInt`] with
//! `width() == 1` or be an [expression][Expr] of one of those types.
//! The condition of an `#[hdl] if` statement must have type [`Expr<Bool>`][Bool].
//!
//! `#[hdl] if` statements' bodies must evaluate to type `()` for now.
//!
@ -14,7 +15,4 @@
//! [match]: super::hdl_match_statements
#[allow(unused)]
use crate::{
expr::Expr,
int::{DynUInt, UInt},
};
use crate::int::Bool;

View file

@ -1,5 +1,8 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! ## `#[hdl] let` statements
pub mod destructuring;
pub mod inputs_outputs;
pub mod instances;
pub mod memories;

View file

@ -0,0 +1,33 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! ### Destructuring Let
//!
//! You can use `#[hdl] let` to destructure types, similarly to Rust `let` statements with non-trivial patterns.
//!
//! `#[hdl] let` statements can only match one level of struct/tuple pattern for now,
//! e.g. you can match with the pattern `MyStruct { a, b }`, but not `MyStruct { a, b: Struct2 { v } }`.
//!
//! ```
//! # use fayalite::prelude::*;
//! #[hdl]
//! struct MyStruct {
//! a: UInt<8>,
//! b: Bool,
//! }
//!
//! #[hdl_module]
//! fn my_module() {
//! #[hdl]
//! let my_input: MyStruct = m.input();
//! #[hdl]
//! let my_output: UInt<8> = m.input();
//! #[hdl]
//! let MyStruct { a, b } = my_input;
//! #[hdl]
//! if b {
//! connect(my_output, a);
//! } else {
//! connect(my_output, 0_hdl_u8);
//! }
//! }
//! ```

View file

@ -1,3 +1,5 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! ### Inputs/Outputs
//!
//! Inputs/Outputs create a Rust variable with type [`Expr<T>`] where `T` is the type of the input/output.
@ -6,14 +8,14 @@
//! so you should read it.
//!
//! ```
//! # use fayalite::{hdl_module, int::UInt, expr::Expr, array::Array};
//! # use fayalite::prelude::*;
//! # #[hdl_module]
//! # fn module() {
//! #[hdl]
//! let my_input: UInt<10> = m.input();
//! let _: Expr<UInt<10>> = my_input; // my_input has type Expr<UInt<10>>
//! #[hdl]
//! let my_output: Array<[UInt<10>; 3]> = m.output();
//! let my_output: Array<UInt<10>, 3> = m.output();
//! # }
//! ```
//!

View file

@ -1,18 +1,20 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! ### Module Instances
//!
//! module instances are kinda like the hardware equivalent of calling a function,
//! you can create them like so:
//!
//! ```
//! # use fayalite::{hdl_module, int::UInt, array::Array};
//! # use fayalite::prelude::*;
//! # #[hdl_module]
//! # fn module() {
//! #[hdl]
//! let my_instance = m.instance(some_module());
//! let my_instance = instance(some_module());
//! // now you can use `my_instance`'s inputs/outputs like so:
//! #[hdl]
//! let v: UInt<3> = m.input();
//! m.connect(my_instance.a, v);
//! connect(my_instance.a, v);
//! #[hdl_module]
//! fn some_module() {
//! #[hdl]

View file

@ -1,3 +1,5 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! # Memories
//!
//! Memories are optimized for storing large amounts of data.
@ -7,12 +9,12 @@
//!
//! There are several different ways to create a memory:
//!
//! ## using [`ModuleBuilder::memory()`]
//! ## using [`memory()`]
//!
//! This way you have to set the [`depth`][`MemBuilder::depth`] separately.
//!
//! ```
//! # use fayalite::{hdl_module, int::UInt, clock::ClockDomain};
//! # use fayalite::prelude::*;
//! # #[hdl_module]
//! # fn module() {
//! // first, we need some IO
@ -25,45 +27,45 @@
//!
//! // now create the memory
//! #[hdl]
//! let mut my_memory = m.memory();
//! let mut my_memory = memory();
//! my_memory.depth(256); // the memory has 256 elements
//!
//! let read_port = my_memory.new_read_port();
//!
//! // connect up the read port
//! m.connect_any(read_port.addr, read_addr);
//! m.connect(read_port.en, 1_hdl_u1);
//! m.connect(read_port.clk, cd.clk);
//! m.connect(read_data, read_port.data);
//! connect_any(read_port.addr, read_addr);
//! connect(read_port.en, true);
//! connect(read_port.clk, cd.clk);
//! connect(read_data, read_port.data);
//!
//! // we need more IO for the write port
//! #[hdl]
//! let write_addr: UInt<8> = m.input();
//! #[hdl]
//! let do_write: UInt<1> = m.input();
//! let do_write: Bool = m.input();
//! #[hdl]
//! let write_data: UInt<8> = m.input();
//!
//! let write_port = my_memory.new_write_port();
//!
//! m.connect_any(write_port.addr, write_addr);
//! m.connect(write_port.en, do_write);
//! m.connect(write_port.clk, cd.clk);
//! m.connect(write_port.data, write_port.data);
//! m.connect(write_port.mask, 1_hdl_u1);
//! connect_any(write_port.addr, write_addr);
//! connect(write_port.en, do_write);
//! connect(write_port.clk, cd.clk);
//! connect(write_port.data, write_port.data);
//! connect(write_port.mask, true);
//! # }
//! ```
//!
//! ## using [`ModuleBuilder::memory_array()`]
//! ## using [`memory_array()`]
//!
//! this allows you to specify the memory's underlying array type directly.
//!
//! ```
//! # use fayalite::{hdl_module, int::UInt, memory::MemBuilder};
//! # use fayalite::prelude::*;
//! # #[hdl_module]
//! # fn module() {
//! #[hdl]
//! let mut my_memory: MemBuilder<[UInt<8>; 256]> = m.memory_array();
//! let mut my_memory: MemBuilder<UInt<8>, ConstUsize<256>> = memory_array();
//!
//! let read_port = my_memory.new_read_port();
//! // ...
@ -72,25 +74,22 @@
//! # }
//! ```
//!
//! ## using [`ModuleBuilder::memory_with_init()`]
//! ## using [`memory_with_init()`]
//!
//! This allows you to deduce the memory's array type from the data used to initialize the memory.
//!
//! ```
//! # use fayalite::{hdl_module, int::UInt};
//! # use fayalite::prelude::*;
//! # #[hdl_module]
//! # fn module() {
//! # #[hdl]
//! # let read_addr: UInt<2> = m.input();
//! #[hdl]
//! let mut my_memory = m.memory_with_init(
//! #[hdl]
//! [0x12_hdl_u8, 0x34_hdl_u8, 0x56_hdl_u8, 0x78_hdl_u8],
//! );
//! let mut my_memory = memory_with_init([0x12_hdl_u8, 0x34_hdl_u8, 0x56_hdl_u8, 0x78_hdl_u8]);
//!
//! let read_port = my_memory.new_read_port();
//! // note that `read_addr` is `UInt<2>` since the memory only has 4 elements
//! m.connect_any(read_port.addr, read_addr);
//! connect_any(read_port.addr, read_addr);
//! // ...
//! let write_port = my_memory.new_write_port();
//! // ...
@ -98,4 +97,4 @@
//! ```
#[allow(unused)]
use crate::{memory::MemBuilder, module::ModuleBuilder};
use crate::prelude::*;

View file

@ -1,3 +1,5 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! ### Registers
//!
//! Registers are memory devices that will change their state only on a clock
@ -7,20 +9,23 @@
//!
//! Registers follow [connection semantics], which are unlike assignments in software, so you should read it.
//!
//! By convention, register names end in `_reg` -- this helps you tell which values are written
//! immediately or on the next clock edge when connecting to them.
//!
//! ```
//! # use fayalite::{hdl_module, int::UInt, array::Array, clock::ClockDomain};
//! # use fayalite::prelude::*;
//! # #[hdl_module]
//! # fn module() {
//! # #[hdl]
//! # let v: UInt<1> = m.input();
//! # let v: Bool = m.input();
//! #[hdl]
//! let cd: ClockDomain = m.input();
//! #[hdl]
//! let my_register: UInt<8> = m.reg_builder().clock_domain(cd).reset(8_hdl_u8);
//! let my_reg: UInt<8> = reg_builder().clock_domain(cd).reset(8_hdl_u8);
//! #[hdl]
//! if v {
//! // my_register is only changed when both `v` is set and `cd`'s clock edge occurs.
//! m.connect(my_register, 0x45_hdl_u8);
//! // my_reg is only changed when both `v` is set and `cd`'s clock edge occurs.
//! connect(my_reg, 0x45_hdl_u8);
//! }
//! # }
//! ```

View file

@ -1,27 +1,29 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! ### Wires
//!
//! Wires are kinda like variables, but unlike registers,
//! they have no memory (they're combinatorial).
//! You must [connect][`ModuleBuilder::connect`] to all wires, so they have a defined value.
//! You must [connect] to all wires, so they have a defined value.
//!
//! Wires create a Rust variable with type [`Expr<T>`] where `T` is the type of the wire.
//!
//! Wires follow [connection semantics], which are unlike assignments in software, so you should read it.
//!
//! ```
//! # use fayalite::{hdl_module, int::UInt, array::Array, clock::ClockDomain};
//! # use fayalite::prelude::*;
//! # #[hdl_module]
//! # fn module() {
//! # #[hdl]
//! # let v: UInt<1> = m.input();
//! # let v: Bool = m.input();
//! #[hdl]
//! let my_wire: UInt<8> = m.wire();
//! let my_wire: UInt<8> = wire();
//! #[hdl]
//! if v {
//! m.connect(my_wire, 0x45_hdl_u8);
//! connect(my_wire, 0x45_hdl_u8);
//! } else {
//! // wires must be connected to under all conditions
//! m.connect(my_wire, 0x23_hdl_u8);
//! connect(my_wire, 0x23_hdl_u8);
//! }
//! # }
//! ```
@ -29,4 +31,4 @@
//! [connection semantics]: crate::_docs::semantics::connection_semantics
#[allow(unused)]
use crate::{expr::Expr, module::ModuleBuilder};
use crate::prelude::*;

View file

@ -1,9 +1,10 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! # `_hdl`-suffixed literals
//!
//! You can have integer literals with an arbitrary number of bits like so:
//!
//! `_hdl`-suffixed literals have type [`Expr<UInt<N>>`] or [`Expr<SInt<N>>`]
//! ... which are basically just [`UInt<N>`] or [`SInt<N>`] converted to an expression.
//! `_hdl`-suffixed literals have type [`Expr<UInt<N>>`] or [`Expr<SInt<N>>`].
//!
//! ```
//! # #[fayalite::hdl_module]

View file

@ -1,3 +1,5 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! # `#[hdl] match` Statements
//!
//! `#[hdl] match` statements behave similarly to Rust `match` statements, except they end up as muxes
@ -5,5 +7,5 @@
//!
//! `#[hdl] match` statements' bodies must evaluate to type `()` for now.
//!
//! `#[hdl] match` statements can only match one level of struct/enum pattern for now,
//! e.g. you can match with the pattern `Some(v)`, but not `Some(Some(_))`.
//! `#[hdl] match` statements can only match one level of struct/tuple/enum pattern for now,
//! e.g. you can match with the pattern `HdlSome(v)`, but not `HdlSome(HdlSome(_))`.

View file

@ -1,28 +1,27 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! # `#[hdl]` Struct/Variant Expressions
//!
//! Note: Structs are also known as [Bundles] when used in Fayalite, the Bundle name comes from [FIRRTL].
//!
//! [Bundles]: crate::bundle::BundleValue
//! [Bundles]: crate::bundle::BundleType
//! [FIRRTL]: https://github.com/chipsalliance/firrtl-spec
//!
//! `#[hdl]` can be used on Struct/Variant Expressions to construct a value of that
//! struct/variant's type. They can also be used on tuples.
//! `#[hdl]` can be used on Struct Expressions to construct a value of that
//! struct's type. They can also be used on tuples.
//!
//! ```
//! # use fayalite::{hdl_module, int::UInt, array::Array, ty::Value};
//! #[derive(Value, Clone, PartialEq, Eq, Hash, Debug)]
//! #[hdl(static)]
//! # use fayalite::prelude::*;
//! #[hdl]
//! pub struct MyStruct {
//! pub a: UInt<8>,
//! pub b: UInt<16>,
//! }
//!
//! #[derive(Value, Clone, PartialEq, Eq, Hash, Debug)]
//! #[hdl]
//! pub enum MyEnum {
//! A,
//! B {
//! v: UInt<32>,
//! },
//! B(UInt<32>),
//! }
//!
//! # #[hdl_module]
@ -30,8 +29,8 @@
//! #[hdl]
//! let v: UInt<8> = m.input();
//! #[hdl]
//! let my_struct: MyStruct = m.wire();
//! m.connect(
//! let my_struct: MyStruct = wire();
//! connect(
//! my_struct,
//! #[hdl]
//! MyStruct {
@ -40,15 +39,14 @@
//! },
//! );
//! #[hdl]
//! let my_enum: MyEnum = m.wire();
//! m.connect(
//! let my_enum: MyEnum = wire();
//! connect(
//! my_enum,
//! #[hdl]
//! MyEnum::B { v: 12345678_hdl_u32 },
//! MyEnum.B(12345678_hdl_u32),
//! );
//! #[hdl]
//! let some_tuple: (UInt<4>, UInt<12>) = m.wire();
//! m.connect(
//! let some_tuple: (UInt<4>, UInt<12>) = wire();
//! connect(
//! some_tuple,
//! #[hdl]
//! (12_hdl_u4, 3421_hdl_u12),
@ -57,4 +55,4 @@
//! ```
#[allow(unused)]
use crate::array::Array;
use crate::prelude::*;

View file

@ -1,6 +1,6 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! # Normal Modules
//!
//! These use the [`NormalModule`][`crate::module::NormalModule`] tag type.
//!
//! See also: [Extern Modules][`super::extern_module`]
//! See also: [Module Bodies][`super::module_bodies`]

View file

@ -1,3 +1,5 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! # Fayalite Semantics
//!
//! Fayalite's semantics are based on [FIRRTL]. Due to their significance, some of the semantics are also documented here.

View file

@ -1,3 +1,5 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! # Connection Semantics
//!
//! Fayalite's connection semantics are unlike assignments in software, so be careful!
@ -20,62 +22,60 @@
//! Connection Semantics Example:
//!
//! ```
//! # use fayalite::{hdl_module, int::UInt};
//! # use fayalite::prelude::*;
//! # #[hdl_module]
//! # fn module() {
//! #[hdl]
//! let a: UInt<8> = m.wire();
//! let a: UInt<8> = wire();
//! #[hdl]
//! let b: UInt<8> = m.output();
//!
//! // doesn't actually affect anything, since `a` is completely overwritten later
//! m.connect(a, 5_hdl_u8);
//! connect(a, 5_hdl_u8);
//!
//! // here `a` has value `7` since the last connection assigns
//! // `7` to `a`, so `b` has value `7` too.
//! m.connect(b, a);
//! connect(b, a);
//!
//! // this is the last `connect` to `a`, so this `connect` determines `a`'s value
//! m.connect(a, 7_hdl_u8);
//! connect(a, 7_hdl_u8);
//! # }
//! ```
//!
//! # Conditional Connection Semantics
//!
//! ```
//! # use fayalite::{hdl_module, int::UInt};
//! # use fayalite::prelude::*;
//! # #[hdl_module]
//! # fn module() {
//! #[hdl]
//! let cond: UInt<1> = m.input();
//! let cond: Bool = m.input();
//! #[hdl]
//! let a: UInt<8> = m.wire();
//! let a: UInt<8> = wire();
//! #[hdl]
//! let b: UInt<8> = m.output();
//!
//! // this is the last `connect` to `a` when `cond` is `0`
//! m.connect(a, 5_hdl_u8);
//! connect(a, 5_hdl_u8);
//!
//! // here `a` has value `7` if `cond` is `1` since the last connection assigns
//! // `7` to `a`, so `b` has value `7` too, otherwise `a` (and therefore `b`)
//! // have value `5` since then the connection assigning `7` is in a
//! // conditional block where the condition doesn't hold.
//! m.connect(b, a);
//! connect(b, a);
//!
//! #[hdl]
//! if cond {
//! // this is the last `connect` to `a` when `cond` is `1`
//! m.connect(a, 7_hdl_u8);
//! connect(a, 7_hdl_u8);
//! }
//! # }
//! ```
//!
//! [conditional block]: self#conditional-connection-semantics
//! [`connect()`]: ModuleBuilder::connect
//! [`connect_any()`]: ModuleBuilder::connect_any
//! [wire]: crate::_docs::modules::module_bodies::hdl_let_statements::wires
//! [if]: crate::_docs::modules::module_bodies::hdl_if_statements
//! [FIRRTL]: https://github.com/chipsalliance/firrtl-spec
#[allow(unused)]
use crate::module::ModuleBuilder;
use crate::prelude::*;

View file

@ -1,17 +1,18 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
expr::Target,
expr::target::Target,
intern::{Intern, Interned},
};
use serde::{Deserialize, Serialize};
use std::{
fmt,
hash::{Hash, Hasher},
iter::FusedIterator,
ops::Deref,
};
#[derive(Clone)]
#[derive(Clone, Debug)]
struct CustomFirrtlAnnotationFieldsImpl {
value: serde_json::Map<String, serde_json::Value>,
serialized: Interned<str>,
@ -118,11 +119,109 @@ pub struct CustomFirrtlAnnotation {
pub additional_fields: CustomFirrtlAnnotationFields,
}
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
#[non_exhaustive]
pub enum Annotation {
DontTouch,
CustomFirrtl(CustomFirrtlAnnotation),
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)]
pub struct DontTouchAnnotation;
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)]
pub struct SVAttributeAnnotation {
pub text: Interned<str>,
}
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)]
pub struct BlackBoxInlineAnnotation {
pub path: Interned<str>,
pub text: Interned<str>,
}
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)]
pub struct BlackBoxPathAnnotation {
pub path: Interned<str>,
}
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, Serialize, Deserialize)]
pub struct DocStringAnnotation {
pub text: Interned<str>,
}
macro_rules! make_annotation_enum {
(
#[$non_exhaustive:ident]
$(#[$meta:meta])*
$vis:vis enum $AnnotationEnum:ident {
$($Variant:ident($T:ty),)*
}
) => {
crate::annotations::make_annotation_enum!(@require_non_exhaustive $non_exhaustive);
#[$non_exhaustive]
$(#[$meta])*
#[derive(Clone, PartialEq, Eq, Hash)]
$vis enum $AnnotationEnum {
$($Variant($T),)*
}
impl std::fmt::Debug for $AnnotationEnum {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
$(Self::$Variant(v) => v.fmt(f),)*
}
}
}
$(impl From<$T> for crate::annotations::Annotation {
fn from(v: $T) -> Self {
$AnnotationEnum::$Variant(v).into()
}
}
impl crate::annotations::IntoAnnotations for $T {
type IntoAnnotations = [crate::annotations::Annotation; 1];
fn into_annotations(self) -> Self::IntoAnnotations {
[self.into()]
}
}
impl crate::annotations::IntoAnnotations for &'_ $T {
type IntoAnnotations = [crate::annotations::Annotation; 1];
fn into_annotations(self) -> Self::IntoAnnotations {
[crate::annotations::Annotation::from(self.clone())]
}
}
impl crate::annotations::IntoAnnotations for &'_ mut $T {
type IntoAnnotations = [crate::annotations::Annotation; 1];
fn into_annotations(self) -> Self::IntoAnnotations {
[crate::annotations::Annotation::from(self.clone())]
}
}
impl crate::annotations::IntoAnnotations for Box<$T> {
type IntoAnnotations = [crate::annotations::Annotation; 1];
fn into_annotations(self) -> Self::IntoAnnotations {
[crate::annotations::Annotation::from(*self)]
}
})*
};
(@require_non_exhaustive non_exhaustive) => {};
}
pub(crate) use make_annotation_enum;
make_annotation_enum! {
#[non_exhaustive]
pub enum Annotation {
DontTouch(DontTouchAnnotation),
SVAttribute(SVAttributeAnnotation),
BlackBoxInline(BlackBoxInlineAnnotation),
BlackBoxPath(BlackBoxPathAnnotation),
DocString(DocStringAnnotation),
CustomFirrtl(CustomFirrtlAnnotation),
Xilinx(crate::vendor::xilinx::XilinxAnnotation),
}
}
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
@ -187,10 +286,68 @@ impl IntoAnnotations for &'_ mut Annotation {
}
}
impl<T: IntoIterator<Item = Annotation>> IntoAnnotations for T {
type IntoAnnotations = Self;
pub struct IterIntoAnnotations<T: Iterator<Item: IntoAnnotations>> {
outer: T,
inner: Option<<<T::Item as IntoAnnotations>::IntoAnnotations as IntoIterator>::IntoIter>,
}
fn into_annotations(self) -> Self::IntoAnnotations {
self
impl<T: Iterator<Item: IntoAnnotations>> Iterator for IterIntoAnnotations<T> {
type Item = Annotation;
fn next(&mut self) -> Option<Self::Item> {
loop {
if let Some(inner) = &mut self.inner {
let Some(retval) = inner.next() else {
self.inner = None;
continue;
};
return Some(retval);
} else {
self.inner = Some(self.outer.next()?.into_annotations().into_iter());
}
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
if let (0, Some(0)) = self.outer.size_hint() {
self.inner
.as_ref()
.map(|v| v.size_hint())
.unwrap_or((0, Some(0)))
} else {
(
self.inner.as_ref().map(|v| v.size_hint().0).unwrap_or(0),
None,
)
}
}
fn fold<B, F>(self, init: B, f: F) -> B
where
Self: Sized,
F: FnMut(B, Self::Item) -> B,
{
self.inner
.into_iter()
.chain(self.outer.map(|v| v.into_annotations().into_iter()))
.flatten()
.fold(init, f)
}
}
impl<
T: FusedIterator<Item: IntoAnnotations<IntoAnnotations: IntoIterator<IntoIter: FusedIterator>>>,
> FusedIterator for IterIntoAnnotations<T>
{
}
impl<T: IntoIterator<Item: IntoAnnotations>> IntoAnnotations for T {
type IntoAnnotations = IterIntoAnnotations<T::IntoIter>;
fn into_annotations(self) -> Self::IntoAnnotations {
IterIntoAnnotations {
outer: self.into_iter(),
inner: None,
}
}
}

File diff suppressed because it is too large Load diff

2803
crates/fayalite/src/build.rs Normal file

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,128 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
build::{
BaseJob, BaseJobKind, CommandParams, DynJobKind, GlobalParams, JobAndDependencies,
JobArgsAndDependencies, JobItem, JobItemName, JobKind, JobKindAndDependencies, JobParams,
ToArgs, WriteArgs,
},
firrtl::{ExportOptions, FileBackend},
intern::{Intern, InternSlice, Interned},
util::job_server::AcquiredJob,
};
use clap::Args;
use serde::{Deserialize, Serialize};
use std::path::{Path, PathBuf};
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Debug)]
pub struct FirrtlJobKind;
#[derive(Args, Debug, Clone, Hash, PartialEq, Eq)]
#[group(id = "Firrtl")]
#[non_exhaustive]
pub struct FirrtlArgs {
#[command(flatten)]
pub export_options: ExportOptions,
}
impl ToArgs for FirrtlArgs {
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
let Self { export_options } = self;
export_options.to_args(args);
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct Firrtl {
base: BaseJob,
export_options: ExportOptions,
}
impl Firrtl {
fn make_firrtl_file_backend(&self) -> FileBackend {
FileBackend {
dir_path: PathBuf::from(&*self.base.output_dir()),
top_fir_file_stem: Some(self.base.file_stem().into()),
circuit_name: None,
}
}
pub fn firrtl_file(&self) -> Interned<Path> {
self.base.file_with_ext("fir")
}
}
impl JobKind for FirrtlJobKind {
type Args = FirrtlArgs;
type Job = Firrtl;
type Dependencies = JobKindAndDependencies<BaseJobKind>;
fn dependencies(self) -> Self::Dependencies {
JobKindAndDependencies::new(BaseJobKind)
}
fn args_to_jobs(
args: JobArgsAndDependencies<Self>,
params: &JobParams,
global_params: &GlobalParams,
) -> eyre::Result<JobAndDependencies<Self>> {
args.args_to_jobs_simple(
params,
global_params,
|_kind, FirrtlArgs { export_options }, dependencies| {
Ok(Firrtl {
base: dependencies.get_job::<BaseJob, _>().clone(),
export_options,
})
},
)
}
fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
[JobItemName::Path {
path: job.base.output_dir(),
}]
.intern_slice()
}
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
[JobItemName::Path {
path: job.firrtl_file(),
}]
.intern_slice()
}
fn name(self) -> Interned<str> {
"firrtl".intern()
}
fn external_command_params(self, _job: &Self::Job) -> Option<CommandParams> {
None
}
fn run(
self,
job: &Self::Job,
inputs: &[JobItem],
params: &JobParams,
_global_params: &GlobalParams,
_acquired_job: &mut AcquiredJob,
) -> eyre::Result<Vec<JobItem>> {
let [JobItem::Path { path: input_path }] = *inputs else {
panic!("wrong inputs, expected a single `Path`");
};
assert_eq!(input_path, job.base.output_dir());
crate::firrtl::export(
job.make_firrtl_file_backend(),
params.main_module(),
job.export_options,
)?;
Ok(vec![JobItem::Path {
path: job.firrtl_file(),
}])
}
}
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = DynJobKind> {
[DynJobKind::new(FirrtlJobKind)]
}

View file

@ -0,0 +1,388 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
build::{
BaseJob, CommandParams, DynJobKind, GetJobPositionDependencies, GlobalParams,
JobAndDependencies, JobArgsAndDependencies, JobDependencies, JobItem, JobItemName, JobKind,
JobKindAndDependencies, JobParams, ToArgs, WriteArgs,
external::{
ExternalCommand, ExternalCommandJob, ExternalCommandJobKind, ExternalProgramTrait,
},
verilog::{UnadjustedVerilog, VerilogDialect, VerilogJob, VerilogJobKind},
},
intern::{Intern, InternSlice, Interned},
module::NameId,
testing::FormalMode,
util::job_server::AcquiredJob,
};
use clap::Args;
use eyre::Context;
use serde::{Deserialize, Serialize};
use std::{
ffi::{OsStr, OsString},
fmt::{self, Write},
path::Path,
};
#[derive(Args, Clone, Debug, PartialEq, Eq, Hash)]
#[non_exhaustive]
pub struct FormalArgs {
#[arg(long = "sby-extra-arg", value_name = "ARG")]
pub sby_extra_args: Vec<OsString>,
#[arg(long, default_value_t)]
pub formal_mode: FormalMode,
#[arg(long, default_value_t = Self::DEFAULT_DEPTH)]
pub formal_depth: u64,
#[arg(long, default_value = Self::DEFAULT_SOLVER)]
pub formal_solver: String,
#[arg(long = "smtbmc-extra-arg", value_name = "ARG")]
pub smtbmc_extra_args: Vec<OsString>,
}
impl FormalArgs {
pub const DEFAULT_DEPTH: u64 = 20;
pub const DEFAULT_SOLVER: &'static str = "z3";
}
impl ToArgs for FormalArgs {
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
let Self {
sby_extra_args,
formal_mode,
formal_depth,
formal_solver,
smtbmc_extra_args,
} = self;
for arg in sby_extra_args {
args.write_long_option_eq("sby-extra-arg", arg);
}
args.write_display_args([
format_args!("--formal-mode={formal_mode}"),
format_args!("--formal-depth={formal_depth}"),
format_args!("--formal-solver={formal_solver}"),
]);
for arg in smtbmc_extra_args {
args.write_long_option_eq("smtbmc-extra-arg", arg);
}
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
pub struct WriteSbyFileJobKind;
#[derive(Clone, Debug, PartialEq, Eq, Hash, Deserialize, Serialize)]
pub struct WriteSbyFileJob {
sby_extra_args: Interned<[Interned<OsStr>]>,
formal_mode: FormalMode,
formal_depth: u64,
formal_solver: Interned<str>,
smtbmc_extra_args: Interned<[Interned<OsStr>]>,
sby_file: Interned<Path>,
output_dir: Interned<Path>,
main_verilog_file: Interned<Path>,
}
impl WriteSbyFileJob {
pub fn sby_extra_args(&self) -> Interned<[Interned<OsStr>]> {
self.sby_extra_args
}
pub fn formal_mode(&self) -> FormalMode {
self.formal_mode
}
pub fn formal_depth(&self) -> u64 {
self.formal_depth
}
pub fn formal_solver(&self) -> Interned<str> {
self.formal_solver
}
pub fn smtbmc_extra_args(&self) -> Interned<[Interned<OsStr>]> {
self.smtbmc_extra_args
}
pub fn sby_file(&self) -> Interned<Path> {
self.sby_file
}
pub fn output_dir(&self) -> Interned<Path> {
self.output_dir
}
pub fn main_verilog_file(&self) -> Interned<Path> {
self.main_verilog_file
}
fn write_sby(
&self,
output: &mut OsString,
additional_files: &[Interned<Path>],
main_module_name_id: NameId,
) -> eyre::Result<()> {
let Self {
sby_extra_args: _,
formal_mode,
formal_depth,
formal_solver,
smtbmc_extra_args,
sby_file: _,
output_dir: _,
main_verilog_file,
} = self;
write!(
output,
"[options]\n\
mode {formal_mode}\n\
depth {formal_depth}\n\
wait on\n\
\n\
[engines]\n\
smtbmc {formal_solver} -- --"
)
.expect("writing to OsString can't fail");
for i in smtbmc_extra_args {
output.push(" ");
output.push(i);
}
output.push(
"\n\
\n\
[script]\n",
);
for verilog_file in VerilogJob::all_verilog_files(*main_verilog_file, additional_files)? {
output.push("read_verilog -sv -formal \"");
output.push(verilog_file);
output.push("\"\n");
}
let circuit_name = crate::firrtl::get_circuit_name(main_module_name_id);
// workaround for wires disappearing -- set `keep` on all wires
writeln!(
output,
"hierarchy -top {circuit_name}\n\
proc\n\
setattr -set keep 1 w:\\*\n\
prep",
)
.expect("writing to OsString can't fail");
Ok(())
}
}
impl JobKind for WriteSbyFileJobKind {
type Args = FormalArgs;
type Job = WriteSbyFileJob;
type Dependencies = JobKindAndDependencies<VerilogJobKind>;
fn dependencies(self) -> Self::Dependencies {
Default::default()
}
fn args_to_jobs(
mut args: JobArgsAndDependencies<Self>,
params: &JobParams,
global_params: &GlobalParams,
) -> eyre::Result<JobAndDependencies<Self>> {
args.dependencies
.dependencies
.args
.args
.additional_args
.verilog_dialect
.get_or_insert(VerilogDialect::Yosys);
args.args_to_jobs_simple(params, global_params, |_kind, args, dependencies| {
let FormalArgs {
sby_extra_args,
formal_mode,
formal_depth,
formal_solver,
smtbmc_extra_args,
} = args;
let base_job = dependencies.get_job::<BaseJob, _>();
Ok(WriteSbyFileJob {
sby_extra_args: sby_extra_args.into_iter().map(Interned::from).collect(),
formal_mode,
formal_depth,
formal_solver: formal_solver.intern_deref(),
smtbmc_extra_args: smtbmc_extra_args.into_iter().map(Interned::from).collect(),
sby_file: base_job.file_with_ext("sby"),
output_dir: base_job.output_dir(),
main_verilog_file: dependencies.get_job::<VerilogJob, _>().main_verilog_file(),
})
})
}
fn inputs(self, _job: &Self::Job) -> Interned<[JobItemName]> {
[JobItemName::DynamicPaths {
source_job_name: VerilogJobKind.name(),
}]
.intern_slice()
}
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
[JobItemName::Path { path: job.sby_file }].intern_slice()
}
fn name(self) -> Interned<str> {
"write-sby-file".intern()
}
fn external_command_params(self, _job: &Self::Job) -> Option<CommandParams> {
None
}
fn run(
self,
job: &Self::Job,
inputs: &[JobItem],
params: &JobParams,
_global_params: &GlobalParams,
_acquired_job: &mut AcquiredJob,
) -> eyre::Result<Vec<JobItem>> {
assert!(inputs.iter().map(JobItem::name).eq(self.inputs(job)));
let [additional_files] = inputs else {
unreachable!();
};
let additional_files = VerilogJob::unwrap_additional_files(additional_files);
let mut contents = OsString::new();
job.write_sby(
&mut contents,
additional_files,
params.main_module().name_id(),
)?;
let path = job.sby_file;
std::fs::write(path, contents.as_encoded_bytes())
.wrap_err_with(|| format!("writing {path:?} failed"))?;
Ok(vec![JobItem::Path { path }])
}
fn subcommand_hidden(self) -> bool {
true
}
}
#[derive(Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
pub struct Formal {
#[serde(flatten)]
write_sby_file: WriteSbyFileJob,
sby_file_name: Interned<OsStr>,
}
impl fmt::Debug for Formal {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Self {
write_sby_file:
WriteSbyFileJob {
sby_extra_args,
formal_mode,
formal_depth,
formal_solver,
smtbmc_extra_args,
sby_file,
output_dir: _,
main_verilog_file,
},
sby_file_name,
} = self;
f.debug_struct("Formal")
.field("sby_extra_args", sby_extra_args)
.field("formal_mode", formal_mode)
.field("formal_depth", formal_depth)
.field("formal_solver", formal_solver)
.field("smtbmc_extra_args", smtbmc_extra_args)
.field("sby_file", sby_file)
.field("sby_file_name", sby_file_name)
.field("main_verilog_file", main_verilog_file)
.finish_non_exhaustive()
}
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)]
pub struct Symbiyosys;
impl ExternalProgramTrait for Symbiyosys {
fn default_program_name() -> Interned<str> {
"sby".intern()
}
}
#[derive(Clone, Hash, PartialEq, Eq, Debug, Args)]
pub struct FormalAdditionalArgs {}
impl ToArgs for FormalAdditionalArgs {
fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) {
let Self {} = self;
}
}
impl ExternalCommand for Formal {
type AdditionalArgs = FormalAdditionalArgs;
type AdditionalJobData = Formal;
type BaseJobPosition = GetJobPositionDependencies<
GetJobPositionDependencies<
GetJobPositionDependencies<<UnadjustedVerilog as ExternalCommand>::BaseJobPosition>,
>,
>;
type Dependencies = JobKindAndDependencies<WriteSbyFileJobKind>;
type ExternalProgram = Symbiyosys;
fn dependencies() -> Self::Dependencies {
Default::default()
}
fn args_to_jobs(
args: JobArgsAndDependencies<ExternalCommandJobKind<Self>>,
params: &JobParams,
global_params: &GlobalParams,
) -> eyre::Result<(
Self::AdditionalJobData,
<Self::Dependencies as JobDependencies>::JobsAndKinds,
)> {
args.args_to_jobs_external_simple(params, global_params, |args, dependencies| {
let FormalAdditionalArgs {} = args.additional_args;
let write_sby_file = dependencies.get_job::<WriteSbyFileJob, _>().clone();
Ok(Formal {
sby_file_name: write_sby_file
.sby_file()
.interned_file_name()
.expect("known to have file name"),
write_sby_file,
})
})
}
fn inputs(job: &ExternalCommandJob<Self>) -> Interned<[JobItemName]> {
[
JobItemName::Path {
path: job.additional_job_data().write_sby_file.sby_file(),
},
JobItemName::Path {
path: job.additional_job_data().write_sby_file.main_verilog_file(),
},
JobItemName::DynamicPaths {
source_job_name: VerilogJobKind.name(),
},
]
.intern_slice()
}
fn output_paths(_job: &ExternalCommandJob<Self>) -> Interned<[Interned<Path>]> {
Interned::default()
}
fn command_line_args<W: ?Sized + WriteArgs>(job: &ExternalCommandJob<Self>, args: &mut W) {
// args.write_str_arg("-j1"); // sby seems not to respect job count in parallel mode
args.write_arg("-f");
args.write_interned_arg(job.additional_job_data().sby_file_name);
args.write_interned_args(job.additional_job_data().write_sby_file.sby_extra_args());
}
fn current_dir(job: &ExternalCommandJob<Self>) -> Option<Interned<Path>> {
Some(job.output_dir())
}
fn job_kind_name() -> Interned<str> {
"formal".intern()
}
}
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = DynJobKind> {
[
DynJobKind::new(WriteSbyFileJobKind),
DynJobKind::new(ExternalCommandJobKind::<Formal>::new()),
]
}

View file

@ -0,0 +1,847 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
build::{
DynJob, GlobalParams, JobItem, JobItemName, JobParams, program_name_for_internal_jobs,
},
intern::Interned,
platform::DynPlatform,
util::{HashMap, HashSet, job_server::AcquiredJob},
};
use eyre::{ContextCompat, eyre};
use petgraph::{
algo::{DfsSpace, kosaraju_scc, toposort},
graph::DiGraph,
visit::{GraphBase, Visitable},
};
use serde::{Deserialize, Deserializer, Serialize, Serializer, de::Error, ser::SerializeSeq};
use std::{
cell::OnceCell,
collections::{BTreeMap, BTreeSet, VecDeque},
convert::Infallible,
ffi::OsStr,
fmt::{self, Write},
panic,
rc::Rc,
str::Utf8Error,
sync::mpsc,
thread::{self, ScopedJoinHandle},
};
macro_rules! write_str {
($s:expr, $($rest:tt)*) => {
write!($s, $($rest)*).expect("String::write_fmt can't fail")
};
}
#[derive(Clone, Debug)]
enum JobGraphNode {
Job(DynJob),
Item {
#[allow(dead_code, reason = "name used for debugging")]
name: JobItemName,
source_job: Option<DynJob>,
},
}
type JobGraphInner = DiGraph<JobGraphNode, ()>;
#[derive(Clone, Default)]
pub struct JobGraph {
jobs: HashMap<DynJob, <JobGraphInner as GraphBase>::NodeId>,
items: HashMap<JobItemName, <JobGraphInner as GraphBase>::NodeId>,
graph: JobGraphInner,
topological_order: Vec<<JobGraphInner as GraphBase>::NodeId>,
space: DfsSpace<<JobGraphInner as GraphBase>::NodeId, <JobGraphInner as Visitable>::Map>,
}
impl fmt::Debug for JobGraph {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Self {
jobs: _,
items: _,
graph,
topological_order,
space: _,
} = self;
f.debug_struct("JobGraph")
.field("graph", graph)
.field("topological_order", topological_order)
.finish_non_exhaustive()
}
}
#[derive(Clone, Debug)]
pub enum JobGraphError {
CycleError {
job: DynJob,
output: JobItemName,
},
MultipleJobsCreateSameOutput {
output_item: JobItemName,
existing_job: DynJob,
new_job: DynJob,
},
}
impl std::error::Error for JobGraphError {}
impl fmt::Display for JobGraphError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::CycleError { job, output } => write!(
f,
"job can't be added to job graph because it would introduce a cyclic dependency through this job output:\n\
{output:?}\n\
job:\n{job:?}",
),
JobGraphError::MultipleJobsCreateSameOutput {
output_item,
existing_job,
new_job,
} => write!(
f,
"job can't be added to job graph because the new job has an output that is also produced by an existing job.\n\
conflicting output:\n\
{output_item:?}\n\
existing job:\n\
{existing_job:?}\n\
new job:\n\
{new_job:?}",
),
}
}
}
#[derive(Copy, Clone, Debug)]
enum EscapeForUnixShellState {
DollarSingleQuote,
SingleQuote,
Unquoted,
}
#[derive(Clone)]
pub struct EscapeForUnixShell<'a> {
state: EscapeForUnixShellState,
prefix: [u8; 3],
bytes: &'a [u8],
}
impl<'a> fmt::Debug for EscapeForUnixShell<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(self, f)
}
}
impl<'a> fmt::Display for EscapeForUnixShell<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
for c in self.clone() {
f.write_char(c)?;
}
Ok(())
}
}
impl<'a> EscapeForUnixShell<'a> {
pub fn new(s: &'a (impl ?Sized + AsRef<OsStr>)) -> Self {
Self::from_bytes(s.as_ref().as_encoded_bytes())
}
fn make_prefix(bytes: &[u8]) -> [u8; 3] {
let mut prefix = [0; 3];
prefix[..bytes.len()].copy_from_slice(bytes);
prefix
}
pub fn from_bytes(bytes: &'a [u8]) -> Self {
let mut needs_single_quote = bytes.is_empty();
for &b in bytes {
match b {
b'!' | b'\'' | b'\"' | b' ' => needs_single_quote = true,
0..0x20 | 0x7F.. => {
return Self {
state: EscapeForUnixShellState::DollarSingleQuote,
prefix: Self::make_prefix(b"$'"),
bytes,
};
}
_ => {}
}
}
if needs_single_quote {
Self {
state: EscapeForUnixShellState::SingleQuote,
prefix: Self::make_prefix(b"'"),
bytes,
}
} else {
Self {
state: EscapeForUnixShellState::Unquoted,
prefix: Self::make_prefix(b""),
bytes,
}
}
}
}
impl Iterator for EscapeForUnixShell<'_> {
type Item = char;
fn next(&mut self) -> Option<Self::Item> {
match &mut self.prefix {
[0, 0, 0] => {}
[0, 0, v] | // find first
[0, v, _] | // non-zero byte
[v, _, _] => {
let retval = *v as char;
*v = 0;
return Some(retval);
}
}
let Some(&next_byte) = self.bytes.split_off_first() else {
return match self.state {
EscapeForUnixShellState::DollarSingleQuote
| EscapeForUnixShellState::SingleQuote => {
self.state = EscapeForUnixShellState::Unquoted;
Some('\'')
}
EscapeForUnixShellState::Unquoted => None,
};
};
match self.state {
EscapeForUnixShellState::DollarSingleQuote => match next_byte {
b'\'' | b'\\' => {
self.prefix = Self::make_prefix(&[next_byte]);
Some('\\')
}
b'\t' => {
self.prefix = Self::make_prefix(b"t");
Some('\\')
}
b'\n' => {
self.prefix = Self::make_prefix(b"n");
Some('\\')
}
b'\r' => {
self.prefix = Self::make_prefix(b"r");
Some('\\')
}
0x20..=0x7E => Some(next_byte as char),
_ => {
self.prefix = [
b'x',
char::from_digit(next_byte as u32 >> 4, 0x10).expect("known to be in range")
as u8,
char::from_digit(next_byte as u32 & 0xF, 0x10)
.expect("known to be in range") as u8,
];
Some('\\')
}
},
EscapeForUnixShellState::SingleQuote => {
if next_byte == b'\'' {
self.prefix = Self::make_prefix(b"\\''");
Some('\'')
} else {
Some(next_byte as char)
}
}
EscapeForUnixShellState::Unquoted => match next_byte {
b' ' | b'!' | b'"' | b'#' | b'$' | b'&' | b'\'' | b'(' | b')' | b'*' | b','
| b';' | b'<' | b'>' | b'?' | b'[' | b'\\' | b']' | b'^' | b'`' | b'{' | b'|'
| b'}' | b'~' => {
self.prefix = Self::make_prefix(&[next_byte]);
Some('\\')
}
_ => Some(next_byte as char),
},
}
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
#[non_exhaustive]
pub enum UnixMakefileEscapeKind {
NonRecipe,
RecipeWithoutShellEscaping,
RecipeWithShellEscaping,
}
#[derive(Copy, Clone)]
pub struct EscapeForUnixMakefile<'a> {
s: &'a OsStr,
kind: UnixMakefileEscapeKind,
}
impl<'a> fmt::Debug for EscapeForUnixMakefile<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(self, f)
}
}
impl<'a> fmt::Display for EscapeForUnixMakefile<'a> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.do_write(
f,
fmt::Write::write_str,
fmt::Write::write_char,
|_, _| Ok(()),
|_| unreachable!("already checked that the input causes no UTF-8 errors"),
)
}
}
impl<'a> EscapeForUnixMakefile<'a> {
fn do_write<S: ?Sized, E>(
&self,
state: &mut S,
write_str: impl Fn(&mut S, &str) -> Result<(), E>,
write_char: impl Fn(&mut S, char) -> Result<(), E>,
add_variable: impl Fn(&mut S, &'static str) -> Result<(), E>,
utf8_error: impl Fn(Utf8Error) -> E,
) -> Result<(), E> {
let escape_recipe_char = |c| match c {
'$' => write_str(state, "$$"),
'\0'..='\x1F' | '\x7F' => {
panic!("can't escape a control character for Unix Makefile: {c:?}");
}
_ => write_char(state, c),
};
match self.kind {
UnixMakefileEscapeKind::NonRecipe => str::from_utf8(self.s.as_encoded_bytes())
.map_err(&utf8_error)?
.chars()
.try_for_each(|c| match c {
'=' => {
add_variable(state, "EQUALS = =")?;
write_str(state, "$(EQUALS)")
}
';' => panic!("can't escape a semicolon (;) for Unix Makefile"),
'$' => write_str(state, "$$"),
'\\' | ' ' | '#' | ':' | '%' | '*' | '?' | '[' | ']' | '~' => {
write_char(state, '\\')?;
write_char(state, c)
}
'\0'..='\x1F' | '\x7F' => {
panic!("can't escape a control character for Unix Makefile: {c:?}");
}
_ => write_char(state, c),
}),
UnixMakefileEscapeKind::RecipeWithoutShellEscaping => {
str::from_utf8(self.s.as_encoded_bytes())
.map_err(&utf8_error)?
.chars()
.try_for_each(escape_recipe_char)
}
UnixMakefileEscapeKind::RecipeWithShellEscaping => {
EscapeForUnixShell::new(self.s).try_for_each(escape_recipe_char)
}
}
}
pub fn new(
s: &'a (impl ?Sized + AsRef<OsStr>),
kind: UnixMakefileEscapeKind,
needed_variables: &mut BTreeSet<&'static str>,
) -> Result<Self, Utf8Error> {
let s = s.as_ref();
let retval = Self { s, kind };
retval.do_write(
needed_variables,
|_, _| Ok(()),
|_, _| Ok(()),
|needed_variables, variable| {
needed_variables.insert(variable);
Ok(())
},
|e| e,
)?;
Ok(retval)
}
}
impl JobGraph {
pub fn new() -> Self {
Self::default()
}
fn try_add_item_node(
&mut self,
name: JobItemName,
new_source_job: Option<DynJob>,
new_nodes: &mut HashSet<<JobGraphInner as GraphBase>::NodeId>,
) -> Result<<JobGraphInner as GraphBase>::NodeId, JobGraphError> {
use hashbrown::hash_map::Entry;
match self.items.entry(name) {
Entry::Occupied(item_entry) => {
let node_id = *item_entry.get();
let JobGraphNode::Item {
name: _,
source_job,
} = &mut self.graph[node_id]
else {
unreachable!("known to be an item");
};
if let Some(new_source_job) = new_source_job {
if let Some(source_job) = source_job {
return Err(JobGraphError::MultipleJobsCreateSameOutput {
output_item: item_entry.key().clone(),
existing_job: source_job.clone(),
new_job: new_source_job,
});
} else {
*source_job = Some(new_source_job);
}
}
Ok(node_id)
}
Entry::Vacant(item_entry) => {
let node_id = self.graph.add_node(JobGraphNode::Item {
name,
source_job: new_source_job,
});
new_nodes.insert(node_id);
item_entry.insert(node_id);
Ok(node_id)
}
}
}
pub fn try_add_jobs<I: IntoIterator<Item = DynJob>>(
&mut self,
jobs: I,
) -> Result<(), JobGraphError> {
use hashbrown::hash_map::Entry;
let jobs = jobs.into_iter();
struct RemoveNewNodesOnError<'a> {
this: &'a mut JobGraph,
new_nodes: HashSet<<JobGraphInner as GraphBase>::NodeId>,
}
impl Drop for RemoveNewNodesOnError<'_> {
fn drop(&mut self) {
for node in self.new_nodes.drain() {
self.this.graph.remove_node(node);
}
}
}
let mut remove_new_nodes_on_error = RemoveNewNodesOnError {
this: self,
new_nodes: HashSet::with_capacity_and_hasher(jobs.size_hint().0, Default::default()),
};
let new_nodes = &mut remove_new_nodes_on_error.new_nodes;
let this = &mut *remove_new_nodes_on_error.this;
for job in jobs {
let Entry::Vacant(job_entry) = this.jobs.entry(job.clone()) else {
continue;
};
let job_node_id = this
.graph
.add_node(JobGraphNode::Job(job_entry.key().clone()));
new_nodes.insert(job_node_id);
job_entry.insert(job_node_id);
for name in job.outputs() {
let item_node_id = this.try_add_item_node(name, Some(job.clone()), new_nodes)?;
this.graph.add_edge(job_node_id, item_node_id, ());
}
for name in job.inputs() {
let item_node_id = this.try_add_item_node(name, None, new_nodes)?;
this.graph.add_edge(item_node_id, job_node_id, ());
}
}
match toposort(&this.graph, Some(&mut this.space)) {
Ok(v) => {
this.topological_order = v;
// no need to remove any of the new nodes on drop since we didn't encounter any errors
remove_new_nodes_on_error.new_nodes.clear();
Ok(())
}
Err(_) => {
// there's at least one cycle, find one!
let cycle = kosaraju_scc(&this.graph)
.into_iter()
.find_map(|scc| {
if scc.len() <= 1 {
// can't be a cycle since our graph is bipartite --
// jobs only connect to items, never jobs to jobs or items to items
None
} else {
Some(scc)
}
})
.expect("we know there's a cycle");
let cycle_set = HashSet::from_iter(cycle.iter().copied());
let job = cycle
.into_iter()
.find_map(|node_id| {
if let JobGraphNode::Job(job) = &this.graph[node_id] {
Some(job.clone())
} else {
None
}
})
.expect("a job must be part of the cycle");
let output = job
.outputs()
.into_iter()
.find(|output| cycle_set.contains(&this.items[output]))
.expect("an output must be part of the cycle");
Err(JobGraphError::CycleError { job, output })
}
}
}
#[track_caller]
pub fn add_jobs<I: IntoIterator<Item = DynJob>>(&mut self, jobs: I) {
match self.try_add_jobs(jobs) {
Ok(()) => {}
Err(e) => panic!("error: {e}"),
}
}
pub fn to_unix_makefile(
&self,
platform: Option<&DynPlatform>,
extra_args: &[Interned<OsStr>],
) -> Result<String, Utf8Error> {
self.to_unix_makefile_with_internal_program_prefix(
&[program_name_for_internal_jobs()],
platform,
extra_args,
)
}
pub fn to_unix_makefile_with_internal_program_prefix(
&self,
internal_program_prefix: &[Interned<OsStr>],
platform: Option<&DynPlatform>,
extra_args: &[Interned<OsStr>],
) -> Result<String, Utf8Error> {
let mut retval = String::new();
let mut needed_variables = BTreeSet::new();
let mut phony_targets = BTreeSet::new();
for &node_id in &self.topological_order {
let JobGraphNode::Job(job) = &self.graph[node_id] else {
continue;
};
let outputs = job.outputs();
if outputs.is_empty() {
retval.push_str(":");
} else {
for output in job.outputs() {
match output {
JobItemName::Path { path } => {
write_str!(
retval,
"{} ",
EscapeForUnixMakefile::new(
&str::from_utf8(path.as_os_str().as_encoded_bytes())?,
UnixMakefileEscapeKind::NonRecipe,
&mut needed_variables
)?
);
}
JobItemName::DynamicPaths { source_job_name } => {
write_str!(
retval,
"{} ",
EscapeForUnixMakefile::new(
&source_job_name,
UnixMakefileEscapeKind::NonRecipe,
&mut needed_variables
)?
);
phony_targets.insert(Interned::into_inner(source_job_name));
}
}
}
if outputs.len() == 1 {
retval.push_str(":");
} else {
retval.push_str("&:");
}
}
for input in job.inputs() {
match input {
JobItemName::Path { path } => {
write_str!(
retval,
" {}",
EscapeForUnixMakefile::new(
&str::from_utf8(path.as_os_str().as_encoded_bytes())?,
UnixMakefileEscapeKind::NonRecipe,
&mut needed_variables
)?
);
}
JobItemName::DynamicPaths { source_job_name } => {
write_str!(
retval,
" {}",
EscapeForUnixMakefile::new(
&source_job_name,
UnixMakefileEscapeKind::NonRecipe,
&mut needed_variables
)?
);
phony_targets.insert(Interned::into_inner(source_job_name));
}
}
}
retval.push_str("\n\t");
job.command_params_with_internal_program_prefix(
internal_program_prefix,
platform,
extra_args,
)
.to_unix_shell_line(&mut retval, |arg, output| {
write_str!(
output,
"{}",
EscapeForUnixMakefile::new(
arg,
UnixMakefileEscapeKind::RecipeWithShellEscaping,
&mut needed_variables
)?
);
Ok(())
})?;
retval.push_str("\n\n");
}
if !phony_targets.is_empty() {
retval.push_str("\n.PHONY:");
for phony_target in phony_targets {
write_str!(
retval,
" {}",
EscapeForUnixMakefile::new(
phony_target,
UnixMakefileEscapeKind::NonRecipe,
&mut needed_variables
)?
);
}
retval.push_str("\n");
}
if !needed_variables.is_empty() {
retval.insert_str(
0,
&String::from_iter(needed_variables.into_iter().map(|v| format!("{v}\n"))),
);
}
Ok(retval)
}
pub fn to_unix_shell_script(
&self,
platform: Option<&DynPlatform>,
extra_args: &[Interned<OsStr>],
) -> String {
self.to_unix_shell_script_with_internal_program_prefix(
&[program_name_for_internal_jobs()],
platform,
extra_args,
)
}
pub fn to_unix_shell_script_with_internal_program_prefix(
&self,
internal_program_prefix: &[Interned<OsStr>],
platform: Option<&DynPlatform>,
extra_args: &[Interned<OsStr>],
) -> String {
let mut retval = String::from(
"#!/bin/sh\n\
set -ex\n",
);
for &node_id in &self.topological_order {
let JobGraphNode::Job(job) = &self.graph[node_id] else {
continue;
};
let Ok(()) = job
.command_params_with_internal_program_prefix(
internal_program_prefix,
platform,
extra_args,
)
.to_unix_shell_line(&mut retval, |arg, output| -> Result<(), Infallible> {
write_str!(output, "{}", EscapeForUnixShell::new(&arg));
Ok(())
});
retval.push_str("\n");
}
retval
}
pub fn run(&self, params: &JobParams, global_params: &GlobalParams) -> eyre::Result<()> {
// use scope to auto-join threads on errors
thread::scope(|scope| {
struct WaitingJobState {
job_node_id: <JobGraphInner as GraphBase>::NodeId,
job: DynJob,
inputs: BTreeMap<JobItemName, OnceCell<JobItem>>,
}
let mut ready_jobs = VecDeque::new();
let mut item_name_to_waiting_jobs_map = HashMap::<_, Vec<_>>::default();
for &node_id in &self.topological_order {
let JobGraphNode::Job(job) = &self.graph[node_id] else {
continue;
};
let waiting_job = WaitingJobState {
job_node_id: node_id,
job: job.clone(),
inputs: job
.inputs()
.iter()
.map(|&name| (name, OnceCell::new()))
.collect(),
};
if waiting_job.inputs.is_empty() {
ready_jobs.push_back(waiting_job);
} else {
let waiting_job = Rc::new(waiting_job);
for &input_item in waiting_job.inputs.keys() {
item_name_to_waiting_jobs_map
.entry(input_item)
.or_default()
.push(waiting_job.clone());
}
}
}
struct RunningJob<'scope> {
job: DynJob,
thread: ScopedJoinHandle<'scope, eyre::Result<Vec<JobItem>>>,
}
let mut running_jobs = HashMap::default();
let (finished_jobs_sender, finished_jobs_receiver) = mpsc::channel();
loop {
while let Some(finished_job) = finished_jobs_receiver.try_recv().ok() {
let Some(RunningJob { job, thread }) = running_jobs.remove(&finished_job)
else {
unreachable!();
};
let output_items = thread.join().map_err(panic::resume_unwind)??;
assert!(
output_items.iter().map(JobItem::name).eq(job.outputs()),
"job's run() method returned the wrong output items:\n\
output items:\n\
{output_items:?}\n\
expected outputs:\n\
{:?}\n\
job:\n\
{job:?}",
job.outputs(),
);
for output_item in output_items {
for waiting_job in item_name_to_waiting_jobs_map
.remove(&output_item.name())
.unwrap_or_default()
{
let Ok(()) =
waiting_job.inputs[&output_item.name()].set(output_item.clone())
else {
unreachable!();
};
if let Some(waiting_job) = Rc::into_inner(waiting_job) {
ready_jobs.push_back(waiting_job);
}
}
}
}
if let Some(WaitingJobState {
job_node_id,
job,
inputs,
}) = ready_jobs.pop_front()
{
struct RunningJobInThread<'a> {
job_node_id: <JobGraphInner as GraphBase>::NodeId,
job: DynJob,
inputs: Vec<JobItem>,
params: &'a JobParams,
global_params: &'a GlobalParams,
acquired_job: AcquiredJob,
finished_jobs_sender: mpsc::Sender<<JobGraphInner as GraphBase>::NodeId>,
}
impl RunningJobInThread<'_> {
fn run(mut self) -> eyre::Result<Vec<JobItem>> {
self.job.run(
&self.inputs,
self.params,
self.global_params,
&mut self.acquired_job,
)
}
}
impl Drop for RunningJobInThread<'_> {
fn drop(&mut self) {
let _ = self.finished_jobs_sender.send(self.job_node_id);
}
}
let name = job.kind().name();
let running_job_in_thread = RunningJobInThread {
job_node_id,
job: job.clone(),
inputs: Result::from_iter(job.inputs().iter().map(|input_name| {
inputs.get(input_name).and_then(|v| v.get().cloned()).wrap_err_with(|| {
eyre!("failed when trying to run job {name}: nothing provided the input item: {input_name:?}")
})
}))?,
params,
global_params,
acquired_job: AcquiredJob::acquire()?,
finished_jobs_sender: finished_jobs_sender.clone(),
};
running_jobs.insert(
job_node_id,
RunningJob {
job,
thread: thread::Builder::new()
.name(format!("job:{name}"))
.spawn_scoped(scope, move || running_job_in_thread.run())
.expect("failed to spawn thread for job"),
},
);
}
if running_jobs.is_empty() {
assert!(item_name_to_waiting_jobs_map.is_empty());
assert!(ready_jobs.is_empty());
return Ok(());
}
}
})
}
}
impl Extend<DynJob> for JobGraph {
#[track_caller]
fn extend<T: IntoIterator<Item = DynJob>>(&mut self, iter: T) {
self.add_jobs(iter);
}
}
impl FromIterator<DynJob> for JobGraph {
#[track_caller]
fn from_iter<T: IntoIterator<Item = DynJob>>(iter: T) -> Self {
let mut retval = Self::new();
retval.add_jobs(iter);
retval
}
}
impl Serialize for JobGraph {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut serializer = serializer.serialize_seq(Some(self.jobs.len()))?;
for &node_id in &self.topological_order {
let JobGraphNode::Job(job) = &self.graph[node_id] else {
continue;
};
serializer.serialize_element(job)?;
}
serializer.end()
}
}
impl<'de> Deserialize<'de> for JobGraph {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let jobs = Vec::<DynJob>::deserialize(deserializer)?;
let mut retval = JobGraph::new();
retval.try_add_jobs(jobs).map_err(D::Error::custom)?;
Ok(retval)
}
}

View file

@ -0,0 +1,313 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
build::{DynJobKind, JobKind, built_in_job_kinds},
intern::Interned,
util::InternedStrCompareAsStr,
};
use std::{
collections::BTreeMap,
fmt,
sync::{Arc, OnceLock, RwLock, RwLockWriteGuard},
};
impl DynJobKind {
pub fn registry() -> JobKindRegistrySnapshot {
JobKindRegistrySnapshot(JobKindRegistry::get())
}
#[track_caller]
pub fn register(self) {
JobKindRegistry::register(JobKindRegistry::lock(), self);
}
}
#[derive(Clone, Debug)]
struct JobKindRegistry {
job_kinds: BTreeMap<InternedStrCompareAsStr, DynJobKind>,
}
enum JobKindRegisterError {
SameName {
name: InternedStrCompareAsStr,
old_job_kind: DynJobKind,
new_job_kind: DynJobKind,
},
}
impl fmt::Display for JobKindRegisterError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::SameName {
name,
old_job_kind,
new_job_kind,
} => write!(
f,
"two different `JobKind` can't share the same name:\n\
{name:?}\n\
old job kind:\n\
{old_job_kind:?}\n\
new job kind:\n\
{new_job_kind:?}",
),
}
}
}
trait JobKindRegistryRegisterLock {
type Locked;
fn lock(self) -> Self::Locked;
fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry;
}
impl JobKindRegistryRegisterLock for &'static RwLock<Arc<JobKindRegistry>> {
type Locked = RwLockWriteGuard<'static, Arc<JobKindRegistry>>;
fn lock(self) -> Self::Locked {
self.write().expect("shouldn't be poisoned")
}
fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry {
Arc::make_mut(locked)
}
}
impl JobKindRegistryRegisterLock for &'_ mut JobKindRegistry {
type Locked = Self;
fn lock(self) -> Self::Locked {
self
}
fn make_mut(locked: &mut Self::Locked) -> &mut JobKindRegistry {
locked
}
}
impl JobKindRegistry {
fn lock() -> &'static RwLock<Arc<Self>> {
static REGISTRY: OnceLock<RwLock<Arc<JobKindRegistry>>> = OnceLock::new();
REGISTRY.get_or_init(Default::default)
}
fn try_register<L: JobKindRegistryRegisterLock>(
lock: L,
job_kind: DynJobKind,
) -> Result<(), JobKindRegisterError> {
use std::collections::btree_map::Entry;
let name = InternedStrCompareAsStr(job_kind.name());
// run user code only outside of lock
let mut locked = lock.lock();
let this = L::make_mut(&mut locked);
let result = match this.job_kinds.entry(name) {
Entry::Occupied(entry) => Err(JobKindRegisterError::SameName {
name,
old_job_kind: entry.get().clone(),
new_job_kind: job_kind,
}),
Entry::Vacant(entry) => {
entry.insert(job_kind);
Ok(())
}
};
drop(locked);
// outside of lock now, so we can test if it's the same DynJobKind
match result {
Err(JobKindRegisterError::SameName {
name: _,
old_job_kind,
new_job_kind,
}) if old_job_kind == new_job_kind => Ok(()),
result => result,
}
}
#[track_caller]
fn register<L: JobKindRegistryRegisterLock>(lock: L, job_kind: DynJobKind) {
match Self::try_register(lock, job_kind) {
Err(e) => panic!("{e}"),
Ok(()) => {}
}
}
fn get() -> Arc<Self> {
Self::lock().read().expect("shouldn't be poisoned").clone()
}
}
impl Default for JobKindRegistry {
fn default() -> Self {
let mut retval = Self {
job_kinds: BTreeMap::new(),
};
for job_kind in built_in_job_kinds() {
Self::register(&mut retval, job_kind);
}
retval
}
}
#[derive(Clone, Debug)]
pub struct JobKindRegistrySnapshot(Arc<JobKindRegistry>);
impl JobKindRegistrySnapshot {
pub fn get() -> Self {
JobKindRegistrySnapshot(JobKindRegistry::get())
}
pub fn get_by_name<'a>(&'a self, name: &str) -> Option<&'a DynJobKind> {
self.0.job_kinds.get(name)
}
pub fn iter_with_names(&self) -> JobKindRegistryIterWithNames<'_> {
JobKindRegistryIterWithNames(self.0.job_kinds.iter())
}
pub fn iter(&self) -> JobKindRegistryIter<'_> {
JobKindRegistryIter(self.0.job_kinds.values())
}
}
impl<'a> IntoIterator for &'a JobKindRegistrySnapshot {
type Item = &'a DynJobKind;
type IntoIter = JobKindRegistryIter<'a>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a> IntoIterator for &'a mut JobKindRegistrySnapshot {
type Item = &'a DynJobKind;
type IntoIter = JobKindRegistryIter<'a>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
#[derive(Clone, Debug)]
pub struct JobKindRegistryIter<'a>(
std::collections::btree_map::Values<'a, InternedStrCompareAsStr, DynJobKind>,
);
impl<'a> Iterator for JobKindRegistryIter<'a> {
type Item = &'a DynJobKind;
fn next(&mut self) -> Option<Self::Item> {
self.0.next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
fn count(self) -> usize
where
Self: Sized,
{
self.0.count()
}
fn last(self) -> Option<Self::Item> {
self.0.last()
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.0.nth(n)
}
fn fold<B, F>(self, init: B, f: F) -> B
where
F: FnMut(B, Self::Item) -> B,
{
self.0.fold(init, f)
}
}
impl<'a> std::iter::FusedIterator for JobKindRegistryIter<'a> {}
impl<'a> ExactSizeIterator for JobKindRegistryIter<'a> {}
impl<'a> DoubleEndedIterator for JobKindRegistryIter<'a> {
fn next_back(&mut self) -> Option<Self::Item> {
self.0.next_back()
}
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
self.0.nth_back(n)
}
fn rfold<B, F>(self, init: B, f: F) -> B
where
F: FnMut(B, Self::Item) -> B,
{
self.0.rfold(init, f)
}
}
#[derive(Clone, Debug)]
pub struct JobKindRegistryIterWithNames<'a>(
std::collections::btree_map::Iter<'a, InternedStrCompareAsStr, DynJobKind>,
);
impl<'a> Iterator for JobKindRegistryIterWithNames<'a> {
type Item = (Interned<str>, &'a DynJobKind);
fn next(&mut self) -> Option<Self::Item> {
self.0.next().map(|(name, job_kind)| (name.0, job_kind))
}
fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
fn count(self) -> usize
where
Self: Sized,
{
self.0.count()
}
fn last(self) -> Option<Self::Item> {
self.0.last().map(|(name, job_kind)| (name.0, job_kind))
}
fn nth(&mut self, n: usize) -> Option<Self::Item> {
self.0.nth(n).map(|(name, job_kind)| (name.0, job_kind))
}
fn fold<B, F>(self, init: B, f: F) -> B
where
F: FnMut(B, Self::Item) -> B,
{
self.0
.map(|(name, job_kind)| (name.0, job_kind))
.fold(init, f)
}
}
impl<'a> std::iter::FusedIterator for JobKindRegistryIterWithNames<'a> {}
impl<'a> ExactSizeIterator for JobKindRegistryIterWithNames<'a> {}
impl<'a> DoubleEndedIterator for JobKindRegistryIterWithNames<'a> {
fn next_back(&mut self) -> Option<Self::Item> {
self.0
.next_back()
.map(|(name, job_kind)| (name.0, job_kind))
}
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
self.0
.nth_back(n)
.map(|(name, job_kind)| (name.0, job_kind))
}
fn rfold<B, F>(self, init: B, f: F) -> B
where
F: FnMut(B, Self::Item) -> B,
{
self.0
.map(|(name, job_kind)| (name.0, job_kind))
.rfold(init, f)
}
}
#[track_caller]
pub fn register_job_kind<K: JobKind>(kind: K) {
DynJobKind::new(kind).register();
}

View file

@ -0,0 +1,418 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
build::{
BaseJob, CommandParams, DynJobKind, GetJobPositionDependencies, GetJobPositionJob,
GlobalParams, JobAndDependencies, JobArgsAndDependencies, JobDependencies, JobItem,
JobItemName, JobKind, JobKindAndDependencies, JobParams, ToArgs, WriteArgs,
external::{
ExternalCommand, ExternalCommandJob, ExternalCommandJobKind, ExternalProgramTrait,
},
firrtl::{Firrtl, FirrtlJobKind},
},
intern::{Intern, InternSlice, Interned},
util::job_server::AcquiredJob,
};
use clap::Args;
use eyre::{Context, bail};
use serde::{Deserialize, Serialize};
use std::{
ffi::{OsStr, OsString},
fmt, mem,
path::Path,
};
/// based on [LLVM Circt's recommended lowering options][lowering-options]
///
/// [lowering-options]: https://circt.llvm.org/docs/VerilogGeneration/#recommended-loweringoptions-by-target
#[derive(clap::ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[non_exhaustive]
pub enum VerilogDialect {
Questa,
Spyglass,
Verilator,
Vivado,
Yosys,
}
impl fmt::Display for VerilogDialect {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.as_str())
}
}
impl VerilogDialect {
pub fn as_str(self) -> &'static str {
match self {
VerilogDialect::Questa => "questa",
VerilogDialect::Spyglass => "spyglass",
VerilogDialect::Verilator => "verilator",
VerilogDialect::Vivado => "vivado",
VerilogDialect::Yosys => "yosys",
}
}
pub fn firtool_extra_args(self) -> &'static [&'static str] {
match self {
VerilogDialect::Questa => &["--lowering-options=emitWireInPorts"],
VerilogDialect::Spyglass => {
&["--lowering-options=explicitBitcast,disallowExpressionInliningInPorts"]
}
VerilogDialect::Verilator => &[
"--lowering-options=locationInfoStyle=wrapInAtSquareBracket,disallowLocalVariables",
],
VerilogDialect::Vivado => &["--lowering-options=mitigateVivadoArrayIndexConstPropBug"],
VerilogDialect::Yosys => {
&["--lowering-options=disallowLocalVariables,disallowPackedArrays"]
}
}
}
}
#[derive(Args, Debug, Clone, PartialEq, Eq, Hash)]
#[non_exhaustive]
pub struct UnadjustedVerilogArgs {
#[arg(long = "firtool-extra-arg", value_name = "ARG")]
pub firtool_extra_args: Vec<OsString>,
/// adapt the generated Verilog for a particular toolchain
#[arg(long)]
pub verilog_dialect: Option<VerilogDialect>,
#[arg(long)]
pub verilog_debug: bool,
}
impl ToArgs for UnadjustedVerilogArgs {
fn to_args(&self, args: &mut (impl WriteArgs + ?Sized)) {
let Self {
ref firtool_extra_args,
verilog_dialect,
verilog_debug,
} = *self;
for arg in firtool_extra_args {
args.write_long_option_eq("firtool-extra-arg", arg);
}
if let Some(verilog_dialect) = verilog_dialect {
args.write_long_option_eq("verilog-dialect", verilog_dialect.as_str());
}
if verilog_debug {
args.write_arg("--verilog-debug");
}
}
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)]
pub struct Firtool;
impl ExternalProgramTrait for Firtool {
fn default_program_name() -> Interned<str> {
"firtool".intern()
}
}
#[derive(Clone, PartialEq, Eq, Hash, Debug, Deserialize, Serialize)]
pub struct UnadjustedVerilog {
firrtl_file: Interned<Path>,
firrtl_file_name: Interned<OsStr>,
unadjusted_verilog_file: Interned<Path>,
unadjusted_verilog_file_name: Interned<OsStr>,
firtool_extra_args: Interned<[Interned<OsStr>]>,
verilog_dialect: Option<VerilogDialect>,
verilog_debug: bool,
}
impl UnadjustedVerilog {
pub fn firrtl_file(&self) -> Interned<Path> {
self.firrtl_file
}
pub fn unadjusted_verilog_file(&self) -> Interned<Path> {
self.unadjusted_verilog_file
}
pub fn firtool_extra_args(&self) -> Interned<[Interned<OsStr>]> {
self.firtool_extra_args
}
pub fn verilog_dialect(&self) -> Option<VerilogDialect> {
self.verilog_dialect
}
pub fn verilog_debug(&self) -> bool {
self.verilog_debug
}
}
impl ExternalCommand for UnadjustedVerilog {
type AdditionalArgs = UnadjustedVerilogArgs;
type AdditionalJobData = UnadjustedVerilog;
type BaseJobPosition = GetJobPositionDependencies<GetJobPositionJob>;
type Dependencies = JobKindAndDependencies<FirrtlJobKind>;
type ExternalProgram = Firtool;
fn dependencies() -> Self::Dependencies {
Default::default()
}
fn args_to_jobs(
args: JobArgsAndDependencies<ExternalCommandJobKind<Self>>,
params: &JobParams,
global_params: &GlobalParams,
) -> eyre::Result<(
Self::AdditionalJobData,
<Self::Dependencies as JobDependencies>::JobsAndKinds,
)> {
args.args_to_jobs_external_simple(params, global_params, |args, dependencies| {
let UnadjustedVerilogArgs {
firtool_extra_args,
verilog_dialect,
verilog_debug,
} = args.additional_args;
let unadjusted_verilog_file = dependencies
.dependencies
.job
.job
.file_with_ext("unadjusted.v");
let firrtl_job = dependencies.get_job::<Firrtl, _>();
Ok(UnadjustedVerilog {
firrtl_file: firrtl_job.firrtl_file(),
firrtl_file_name: firrtl_job
.firrtl_file()
.interned_file_name()
.expect("known to have file name"),
unadjusted_verilog_file,
unadjusted_verilog_file_name: unadjusted_verilog_file
.interned_file_name()
.expect("known to have file name"),
firtool_extra_args: firtool_extra_args.into_iter().map(Interned::from).collect(),
verilog_dialect,
verilog_debug,
})
})
}
fn inputs(job: &ExternalCommandJob<Self>) -> Interned<[JobItemName]> {
[JobItemName::Path {
path: job.additional_job_data().firrtl_file,
}]
.intern_slice()
}
fn output_paths(job: &ExternalCommandJob<Self>) -> Interned<[Interned<Path>]> {
[job.additional_job_data().unadjusted_verilog_file].intern_slice()
}
fn command_line_args<W: ?Sized + WriteArgs>(job: &ExternalCommandJob<Self>, args: &mut W) {
let UnadjustedVerilog {
firrtl_file: _,
firrtl_file_name,
unadjusted_verilog_file: _,
unadjusted_verilog_file_name,
firtool_extra_args,
verilog_dialect,
verilog_debug,
} = *job.additional_job_data();
args.write_interned_arg(firrtl_file_name);
args.write_arg("-o");
args.write_interned_arg(unadjusted_verilog_file_name);
if verilog_debug {
args.write_args(["-g", "--preserve-values=all"]);
}
if let Some(dialect) = verilog_dialect {
args.write_args(dialect.firtool_extra_args().iter().copied());
}
args.write_interned_args(firtool_extra_args);
}
fn current_dir(job: &ExternalCommandJob<Self>) -> Option<Interned<Path>> {
Some(job.output_dir())
}
fn job_kind_name() -> Interned<str> {
"unadjusted-verilog".intern()
}
fn subcommand_hidden() -> bool {
true
}
fn run_even_if_cached_arg_name() -> Interned<str> {
"firtool-run-even-if-cached".intern()
}
}
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct VerilogJobKind;
#[derive(Clone, Debug, PartialEq, Eq, Hash, Args)]
#[non_exhaustive]
pub struct VerilogJobArgs {}
impl ToArgs for VerilogJobArgs {
fn to_args(&self, _args: &mut (impl WriteArgs + ?Sized)) {
let Self {} = self;
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct VerilogJob {
output_dir: Interned<Path>,
unadjusted_verilog_file: Interned<Path>,
main_verilog_file: Interned<Path>,
}
impl VerilogJob {
pub fn output_dir(&self) -> Interned<Path> {
self.output_dir
}
pub fn unadjusted_verilog_file(&self) -> Interned<Path> {
self.unadjusted_verilog_file
}
pub fn main_verilog_file(&self) -> Interned<Path> {
self.main_verilog_file
}
#[track_caller]
pub fn unwrap_additional_files(additional_files: &JobItem) -> &[Interned<Path>] {
match additional_files {
JobItem::DynamicPaths {
paths,
source_job_name,
} if *source_job_name == VerilogJobKind.name() => paths,
v => panic!("expected VerilogJob's additional files JobItem: {v:?}"),
}
}
pub fn all_verilog_files(
main_verilog_file: Interned<Path>,
additional_files: &[Interned<Path>],
) -> eyre::Result<Interned<[Interned<Path>]>> {
let mut retval = Vec::with_capacity(additional_files.len().saturating_add(1));
for verilog_file in [main_verilog_file].iter().chain(additional_files) {
if !["v", "sv"]
.iter()
.any(|extension| verilog_file.extension() == Some(extension.as_ref()))
{
continue;
}
let verilog_file = std::path::absolute(verilog_file).wrap_err_with(|| {
format!("converting {verilog_file:?} to an absolute path failed")
})?;
if verilog_file
.as_os_str()
.as_encoded_bytes()
.iter()
.any(|&ch| (ch != b' ' && ch != b'\t' && ch.is_ascii_whitespace()) || ch == b'"')
{
bail!("verilog file path contains characters that aren't permitted");
}
retval.push(verilog_file.intern_deref());
}
Ok(retval.intern_slice())
}
}
impl JobKind for VerilogJobKind {
type Args = VerilogJobArgs;
type Job = VerilogJob;
type Dependencies = JobKindAndDependencies<ExternalCommandJobKind<UnadjustedVerilog>>;
fn dependencies(self) -> Self::Dependencies {
Default::default()
}
fn args_to_jobs(
args: JobArgsAndDependencies<Self>,
params: &JobParams,
global_params: &GlobalParams,
) -> eyre::Result<JobAndDependencies<Self>> {
args.args_to_jobs_simple(params, global_params, |_kind, args, dependencies| {
let VerilogJobArgs {} = args;
let base_job = dependencies.get_job::<BaseJob, _>();
Ok(VerilogJob {
output_dir: base_job.output_dir(),
unadjusted_verilog_file: dependencies
.job
.job
.additional_job_data()
.unadjusted_verilog_file(),
main_verilog_file: base_job.file_with_ext("v"),
})
})
}
fn inputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
[JobItemName::Path {
path: job.unadjusted_verilog_file,
}]
.intern_slice()
}
fn outputs(self, job: &Self::Job) -> Interned<[JobItemName]> {
[
JobItemName::Path {
path: job.main_verilog_file,
},
JobItemName::DynamicPaths {
source_job_name: self.name(),
},
]
.intern_slice()
}
fn name(self) -> Interned<str> {
"verilog".intern()
}
fn external_command_params(self, _job: &Self::Job) -> Option<CommandParams> {
None
}
fn run(
self,
job: &Self::Job,
inputs: &[JobItem],
_params: &JobParams,
_global_params: &GlobalParams,
_acquired_job: &mut AcquiredJob,
) -> eyre::Result<Vec<JobItem>> {
assert!(inputs.iter().map(JobItem::name).eq(self.inputs(job)));
let input = std::fs::read_to_string(job.unadjusted_verilog_file())?;
let file_separator_prefix = "\n// ----- 8< ----- FILE \"";
let file_separator_suffix = "\" ----- 8< -----\n\n";
let mut input = &*input;
let main_verilog_file = job.main_verilog_file();
let mut file_name = Some(main_verilog_file);
let mut additional_outputs = Vec::new();
loop {
let (chunk, next_file_name) = if let Some((chunk, rest)) =
input.split_once(file_separator_prefix)
{
let Some((next_file_name, rest)) = rest.split_once(file_separator_suffix) else {
bail!(
"parsing firtool's output failed: found {file_separator_prefix:?} but no {file_separator_suffix:?}"
);
};
input = rest;
let next_file_name = job.output_dir.join(next_file_name).intern_deref();
additional_outputs.push(next_file_name);
(chunk, Some(next_file_name))
} else {
(mem::take(&mut input), None)
};
let Some(file_name) = mem::replace(&mut file_name, next_file_name) else {
break;
};
std::fs::write(&file_name, chunk)?;
}
Ok(vec![
JobItem::Path {
path: main_verilog_file,
},
JobItem::DynamicPaths {
paths: additional_outputs,
source_job_name: self.name(),
},
])
}
}
pub(crate) fn built_in_job_kinds() -> impl IntoIterator<Item = DynJobKind> {
[
DynJobKind::new(ExternalCommandJobKind::<UnadjustedVerilog>::new()),
DynJobKind::new(VerilogJobKind),
]
}

File diff suppressed because it is too large Load diff

View file

@ -1,326 +0,0 @@
use crate::{
bundle::{BundleType, BundleValue, DynBundle},
firrtl,
intern::Interned,
module::Module,
};
use clap::{
builder::{OsStringValueParser, TypedValueParser},
Args, Parser, Subcommand, ValueEnum, ValueHint,
};
use eyre::{eyre, Report};
use std::{error, ffi::OsString, fmt, io, path::PathBuf, process};
pub type Result<T = (), E = CliError> = std::result::Result<T, E>;
pub struct CliError(Report);
impl fmt::Debug for CliError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl fmt::Display for CliError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl error::Error for CliError {}
impl From<io::Error> for CliError {
fn from(value: io::Error) -> Self {
CliError(Report::new(value))
}
}
pub trait RunPhase<Arg> {
type Output;
fn run(&self, arg: Arg) -> Result<Self::Output>;
}
#[derive(Args, Debug)]
#[non_exhaustive]
pub struct BaseArgs {
/// the directory to put the generated main output file and associated files in
#[arg(short, long, value_hint = ValueHint::DirPath)]
pub output: PathBuf,
/// the stem of the generated main output file, e.g. to get foo.v, pass --file-stem=foo
#[arg(long)]
pub file_stem: Option<String>,
}
impl BaseArgs {
pub fn to_firrtl_file_backend(&self) -> firrtl::FileBackend {
firrtl::FileBackend {
dir_path: self.output.clone(),
top_fir_file_stem: self.file_stem.clone(),
}
}
}
#[derive(Args, Debug)]
#[non_exhaustive]
pub struct FirrtlArgs {
#[command(flatten)]
pub base: BaseArgs,
}
#[derive(Debug)]
#[non_exhaustive]
pub struct FirrtlOutput {
pub file_stem: String,
}
impl FirrtlOutput {
pub fn firrtl_file(&self, args: &FirrtlArgs) -> PathBuf {
let mut retval = args.base.output.join(&self.file_stem);
retval.set_extension("fir");
retval
}
}
impl FirrtlArgs {
fn run_impl(&self, top_module: Module<DynBundle>) -> Result<FirrtlOutput> {
let firrtl::FileBackend {
top_fir_file_stem, ..
} = firrtl::export(self.base.to_firrtl_file_backend(), &top_module)?;
Ok(FirrtlOutput {
file_stem: top_fir_file_stem.expect(
"export is known to set the file stem from the circuit name if not provided",
),
})
}
}
impl<T: BundleValue> RunPhase<Module<T>> for FirrtlArgs
where
T::Type: BundleType<Value = T>,
{
type Output = FirrtlOutput;
fn run(&self, top_module: Module<T>) -> Result<Self::Output> {
self.run_impl(top_module.canonical())
}
}
impl<T: BundleValue> RunPhase<Interned<Module<T>>> for FirrtlArgs
where
T::Type: BundleType<Value = T>,
{
type Output = FirrtlOutput;
fn run(&self, top_module: Interned<Module<T>>) -> Result<Self::Output> {
self.run(*top_module)
}
}
/// based on [LLVM Circt's recommended lowering options
/// ](https://circt.llvm.org/docs/VerilogGeneration/#recommended-loweringoptions-by-target)
#[derive(ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash)]
#[non_exhaustive]
pub enum VerilogDialect {
Questa,
Spyglass,
Verilator,
Vivado,
Yosys,
}
impl VerilogDialect {
pub fn firtool_extra_args(self) -> &'static [&'static str] {
match self {
VerilogDialect::Questa => &["--lowering-options=emitWireInPorts"],
VerilogDialect::Spyglass => {
&["--lowering-options=explicitBitcast,disallowExpressionInliningInPorts"]
}
VerilogDialect::Verilator => &[
"--lowering-options=locationInfoStyle=wrapInAtSquareBracket,disallowLocalVariables",
],
VerilogDialect::Vivado => &["--lowering-options=mitigateVivadoArrayIndexConstPropBug"],
VerilogDialect::Yosys => {
&["--lowering-options=disallowLocalVariables,disallowPackedArrays"]
}
}
}
}
#[derive(Args, Debug)]
#[non_exhaustive]
pub struct VerilogArgs {
#[command(flatten)]
pub firrtl: FirrtlArgs,
#[arg(
long,
default_value = "firtool",
env = "FIRTOOL",
value_hint = ValueHint::CommandName,
value_parser = OsStringValueParser::new().try_map(which::which)
)]
pub firtool: PathBuf,
#[arg(long)]
pub firtool_extra_args: Vec<OsString>,
/// adapt the generated Verilog for a particular toolchain
#[arg(long)]
pub verilog_dialect: Option<VerilogDialect>,
}
#[derive(Debug)]
#[non_exhaustive]
pub struct VerilogOutput {
pub firrtl: FirrtlOutput,
}
impl VerilogOutput {
pub fn verilog_file(&self, args: &VerilogArgs) -> PathBuf {
let mut retval = args.firrtl.base.output.join(&self.firrtl.file_stem);
retval.set_extension("v");
retval
}
}
impl VerilogArgs {
fn run_impl(&self, firrtl_output: FirrtlOutput) -> Result<VerilogOutput> {
let output = VerilogOutput {
firrtl: firrtl_output,
};
let mut cmd = process::Command::new(&self.firtool);
cmd.arg(output.firrtl.firrtl_file(&self.firrtl));
cmd.arg("-o");
cmd.arg(output.verilog_file(self));
if let Some(dialect) = self.verilog_dialect {
cmd.args(dialect.firtool_extra_args());
}
cmd.args(&self.firtool_extra_args);
cmd.current_dir(&self.firrtl.base.output);
let status = cmd.status()?;
if status.success() {
Ok(output)
} else {
Err(CliError(eyre!(
"running {} failed: {status}",
self.firtool.display()
)))
}
}
}
impl<Arg> RunPhase<Arg> for VerilogArgs
where
FirrtlArgs: RunPhase<Arg, Output = FirrtlOutput>,
{
type Output = VerilogOutput;
fn run(&self, arg: Arg) -> Result<Self::Output> {
let firrtl_output = self.firrtl.run(arg)?;
self.run_impl(firrtl_output)
}
}
#[derive(Subcommand, Debug)]
enum CliCommand {
/// Generate FIRRTL
Firrtl(FirrtlArgs),
/// Generate Verilog
Verilog(VerilogArgs),
}
/// a simple CLI
///
/// Use like:
///
/// ```no_run
/// # use fayalite::hdl_module;
/// # #[hdl_module]
/// # fn my_module() {}
/// use fayalite::cli;
///
/// fn main() -> cli::Result {
/// cli::Cli::parse().run(my_module())
/// }
/// ```
///
/// You can also use it with a larger [`clap`]-based CLI like so:
///
/// ```no_run
/// # use fayalite::hdl_module;
/// # #[hdl_module]
/// # fn my_module() {}
/// use clap::{Subcommand, Parser};
/// use fayalite::cli;
///
/// #[derive(Subcommand)]
/// pub enum Cmd {
/// #[command(flatten)]
/// Fayalite(cli::Cli),
/// MySpecialCommand {
/// #[arg(long)]
/// foo: bool,
/// },
/// }
///
/// #[derive(Parser)]
/// pub struct Cli {
/// #[command(subcommand)]
/// cmd: Cmd, // or just use cli::Cli directly if you don't need more subcommands
/// }
///
/// fn main() -> cli::Result {
/// match Cli::parse().cmd {
/// Cmd::Fayalite(v) => v.run(my_module())?,
/// Cmd::MySpecialCommand { foo } => println!("special: foo={foo}"),
/// }
/// Ok(())
/// }
/// ```
#[derive(Parser, Debug)]
// clear things that would be crate-specific
#[command(name = "Fayalite Simple CLI", about = None, long_about = None)]
pub struct Cli {
#[command(subcommand)]
subcommand: CliCommand,
}
impl clap::Subcommand for Cli {
fn augment_subcommands(cmd: clap::Command) -> clap::Command {
CliCommand::augment_subcommands(cmd)
}
fn augment_subcommands_for_update(cmd: clap::Command) -> clap::Command {
CliCommand::augment_subcommands_for_update(cmd)
}
fn has_subcommand(name: &str) -> bool {
CliCommand::has_subcommand(name)
}
}
impl<T> RunPhase<T> for Cli
where
FirrtlArgs: RunPhase<T, Output = FirrtlOutput>,
{
type Output = ();
fn run(&self, arg: T) -> Result<Self::Output> {
match &self.subcommand {
CliCommand::Firrtl(c) => {
c.run(arg)?;
}
CliCommand::Verilog(c) => {
c.run(arg)?;
}
}
Ok(())
}
}
impl Cli {
/// forwards to [`clap::Parser::parse()`] so you don't have to import [`clap::Parser`]
pub fn parse() -> Self {
clap::Parser::parse()
}
/// forwards to [`RunPhase::run()`] so you don't have to import [`RunPhase`]
pub fn run<T>(&self, top_module: T) -> Result<()>
where
Self: RunPhase<T, Output = ()>,
{
RunPhase::run(self, top_module)
}
}

View file

@ -2,114 +2,92 @@
// See Notices.txt for copyright information
use crate::{
expr::{Expr, ToExpr},
int::{UInt, UIntType},
intern::Interned,
reset::Reset,
hdl,
int::Bool,
reset::{Reset, ResetType},
source_location::SourceLocation,
ty::{
impl_match_values_as_self, CanonicalType, CanonicalTypeKind, CanonicalValue, Connect,
DynCanonicalType, StaticType, Type, TypeEnum, Value, ValueEnum,
CanonicalType, OpaqueSimValueSize, OpaqueSimValueSlice, OpaqueSimValueWriter,
OpaqueSimValueWritten, StaticType, Type, TypeProperties, impl_match_variant_as_self,
},
type_deduction::{HitUndeducedType, UndeducedType},
util::interned_bit,
};
use bitvec::slice::BitSlice;
use bitvec::{bits, order::Lsb0};
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)]
pub struct ClockType;
pub struct Clock;
impl ClockType {
pub const fn new() -> Self {
Self
}
}
impl Type for Clock {
type BaseType = Clock;
type MaskType = Bool;
type SimValue = bool;
impl Connect<UndeducedType> for ClockType {}
impl Type for ClockType {
type Value = Clock;
type CanonicalType = ClockType;
type CanonicalValue = Clock;
type MaskType = UIntType<1>;
type MaskValue = UInt<1>;
impl_match_values_as_self!();
impl_match_variant_as_self!();
fn mask_type(&self) -> Self::MaskType {
UIntType::new()
Bool
}
fn type_enum(&self) -> TypeEnum {
TypeEnum::Clock(*self)
fn canonical(&self) -> CanonicalType {
CanonicalType::Clock(*self)
}
fn from_canonical_type(t: Self::CanonicalType) -> Self {
t
}
fn canonical(&self) -> Self::CanonicalType {
*self
}
fn source_location(&self) -> SourceLocation {
fn source_location() -> SourceLocation {
SourceLocation::builtin()
}
fn as_dyn_canonical_type_impl(this: &Self) -> Option<&dyn DynCanonicalType> {
Some(this)
fn from_canonical(canonical_type: CanonicalType) -> Self {
let CanonicalType::Clock(retval) = canonical_type else {
panic!("expected Clock");
};
retval
}
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
assert_eq!(opaque.size(), OpaqueSimValueSize::from_bit_width(1));
opaque.bits()[0]
}
fn sim_value_clone_from_opaque(
&self,
value: &mut Self::SimValue,
opaque: OpaqueSimValueSlice<'_>,
) {
assert_eq!(opaque.size(), OpaqueSimValueSize::from_bit_width(1));
*value = opaque.bits()[0];
}
fn sim_value_to_opaque<'w>(
&self,
value: &Self::SimValue,
writer: OpaqueSimValueWriter<'w>,
) -> OpaqueSimValueWritten<'w> {
assert_eq!(writer.size(), OpaqueSimValueSize::from_bit_width(1));
writer.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(
[bits![0], bits![1]][*value as usize],
))
}
}
impl Connect<Self> for ClockType {}
impl CanonicalType for ClockType {
const CANONICAL_TYPE_KIND: CanonicalTypeKind = CanonicalTypeKind::Clock;
}
impl StaticType for ClockType {
fn static_type() -> Self {
Self
impl Clock {
pub fn type_properties(self) -> TypeProperties {
Self::TYPE_PROPERTIES
}
pub fn can_connect(self, _rhs: Self) -> bool {
true
}
}
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub struct Clock(pub bool);
impl ToExpr for Clock {
type Type = ClockType;
fn ty(&self) -> Self::Type {
ClockType
}
fn to_expr(&self) -> Expr<Self> {
Expr::from_value(self)
}
}
impl Value for Clock {
fn to_canonical(&self) -> <Self::Type as Type>::CanonicalValue {
*self
}
fn to_bits_impl(this: &Self) -> Result<Interned<BitSlice>, HitUndeducedType> {
Ok(interned_bit(this.0))
}
}
impl CanonicalValue for Clock {
fn value_enum_impl(this: &Self) -> ValueEnum {
ValueEnum::Clock(*this)
}
fn to_bits_impl(this: &Self) -> Result<Interned<BitSlice>, HitUndeducedType> {
Ok(interned_bit(this.0))
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Value)]
#[hdl(static, outline_generated)]
pub struct ClockDomain {
pub clk: Clock,
pub rst: Reset,
impl StaticType for Clock {
const TYPE: Self = Self;
const MASK_TYPE: Self::MaskType = Bool;
const TYPE_PROPERTIES: TypeProperties = TypeProperties {
is_passive: true,
is_storable: false,
is_castable_from_bits: true,
bit_width: 1,
sim_only_values_len: 0,
};
const MASK_TYPE_PROPERTIES: TypeProperties = Bool::TYPE_PROPERTIES;
}
pub trait ToClock {
@ -140,10 +118,10 @@ impl ToClock for Expr<Clock> {
}
}
impl ToClock for Clock {
fn to_clock(&self) -> Expr<Clock> {
self.to_expr()
}
#[hdl]
pub struct ClockDomain<R: ResetType = Reset> {
pub clk: Clock,
pub rst: R,
}
impl ToClock for bool {
@ -151,9 +129,3 @@ impl ToClock for bool {
self.to_expr().to_clock()
}
}
impl ToClock for UInt<1> {
fn to_clock(&self) -> Expr<Clock> {
self.to_expr().to_clock()
}
}

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,489 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
array::Array,
bundle::{Bundle, BundleField},
expr::{Expr, Flow, ToExpr},
intern::{Intern, Interned},
memory::{DynPortType, MemPort},
module::{Instance, ModuleIO, TargetName},
reg::Reg,
reset::{AsyncReset, Reset, ResetType, ResetTypeDispatch, SyncReset},
source_location::SourceLocation,
ty::{CanonicalType, Type},
wire::Wire,
};
use std::fmt;
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct TargetPathBundleField {
pub name: Interned<str>,
}
impl fmt::Display for TargetPathBundleField {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, ".{}", self.name)
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct TargetPathArrayElement {
pub index: usize,
}
impl fmt::Display for TargetPathArrayElement {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "[{}]", self.index)
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct TargetPathDynArrayElement {}
impl fmt::Display for TargetPathDynArrayElement {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "[<dyn>]")
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum TargetPathElement {
BundleField(TargetPathBundleField),
ArrayElement(TargetPathArrayElement),
DynArrayElement(TargetPathDynArrayElement),
}
impl From<TargetPathBundleField> for TargetPathElement {
fn from(value: TargetPathBundleField) -> Self {
Self::BundleField(value)
}
}
impl From<TargetPathArrayElement> for TargetPathElement {
fn from(value: TargetPathArrayElement) -> Self {
Self::ArrayElement(value)
}
}
impl From<TargetPathDynArrayElement> for TargetPathElement {
fn from(value: TargetPathDynArrayElement) -> Self {
Self::DynArrayElement(value)
}
}
impl fmt::Display for TargetPathElement {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::BundleField(v) => v.fmt(f),
Self::ArrayElement(v) => v.fmt(f),
Self::DynArrayElement(v) => v.fmt(f),
}
}
}
impl TargetPathElement {
pub fn canonical_ty(&self, parent: Interned<Target>) -> CanonicalType {
match self {
&Self::BundleField(TargetPathBundleField { name }) => {
let parent_ty = Bundle::from_canonical(parent.canonical_ty());
let field = parent_ty
.field_by_name(name)
.expect("field name is known to be a valid field of parent type");
field.ty
}
&Self::ArrayElement(TargetPathArrayElement { index }) => {
let parent_ty = Array::<CanonicalType>::from_canonical(parent.canonical_ty());
assert!(index < parent_ty.len());
parent_ty.element()
}
Self::DynArrayElement(_) => {
let parent_ty = Array::<CanonicalType>::from_canonical(parent.canonical_ty());
parent_ty.element()
}
}
}
pub fn flow(&self, parent: Interned<Target>) -> Flow {
match self {
Self::BundleField(v) => {
let parent_ty = Bundle::from_canonical(parent.canonical_ty());
let field = parent_ty
.field_by_name(v.name)
.expect("field name is known to be a valid field of parent type");
parent.flow().flip_if(field.flipped)
}
Self::ArrayElement(_) => parent.flow(),
Self::DynArrayElement(_) => parent.flow(),
}
}
pub fn is_static(&self) -> bool {
match self {
Self::BundleField(_) | Self::ArrayElement(_) => true,
Self::DynArrayElement(_) => false,
}
}
}
macro_rules! impl_target_base {
(
$(#[$enum_meta:meta])*
$enum_vis:vis enum $TargetBase:ident {
$(
$(#[from = $from:ident])?
#[is = $is_fn:ident]
#[to = $to_fn:ident]
$(#[$variant_meta:meta])*
$Variant:ident($VariantTy:ty),
)*
}
) => {
$(#[$enum_meta])*
$enum_vis enum $TargetBase {
$(
$(#[$variant_meta])*
$Variant($VariantTy),
)*
}
impl fmt::Debug for $TargetBase {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
$(Self::$Variant(v) => v.fmt(f),)*
}
}
}
$($(
impl From<$VariantTy> for $TargetBase {
fn $from(value: $VariantTy) -> Self {
Self::$Variant(value)
}
}
impl From<$VariantTy> for Target {
fn $from(value: $VariantTy) -> Self {
$TargetBase::$Variant(value).into()
}
}
)*)?
impl $TargetBase {
$(
$enum_vis fn $is_fn(&self) -> bool {
self.$to_fn().is_some()
}
$enum_vis fn $to_fn(&self) -> Option<&$VariantTy> {
if let Self::$Variant(retval) = self {
Some(retval)
} else {
None
}
}
)*
$enum_vis fn must_connect_to(&self) -> bool {
match self {
$(Self::$Variant(v) => v.must_connect_to(),)*
}
}
$enum_vis fn flow(&self) -> Flow {
match self {
$(Self::$Variant(v) => v.flow(),)*
}
}
$enum_vis fn source_location(&self) -> SourceLocation {
match self {
$(Self::$Variant(v) => v.source_location(),)*
}
}
}
impl ToExpr for $TargetBase {
type Type = CanonicalType;
fn to_expr(&self) -> Expr<Self::Type> {
match self {
$(Self::$Variant(v) => Expr::canonical(v.to_expr()),)*
}
}
}
};
}
impl_target_base! {
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub enum TargetBase {
#[from = from]
#[is = is_module_io]
#[to = module_io]
ModuleIO(ModuleIO<CanonicalType>),
#[from = from]
#[is = is_mem_port]
#[to = mem_port]
MemPort(MemPort<DynPortType>),
#[is = is_reg]
#[to = reg]
Reg(Reg<CanonicalType, Reset>),
#[is = is_reg_sync]
#[to = reg_sync]
RegSync(Reg<CanonicalType, SyncReset>),
#[is = is_reg_async]
#[to = reg_async]
RegAsync(Reg<CanonicalType, AsyncReset>),
#[from = from]
#[is = is_wire]
#[to = wire]
Wire(Wire<CanonicalType>),
#[from = from]
#[is = is_instance]
#[to = instance]
Instance(Instance<Bundle>),
}
}
impl<R: ResetType> From<Reg<CanonicalType, R>> for TargetBase {
fn from(value: Reg<CanonicalType, R>) -> Self {
struct Dispatch;
impl ResetTypeDispatch for Dispatch {
type Input<T: ResetType> = Reg<CanonicalType, T>;
type Output<T: ResetType> = TargetBase;
fn reset(self, input: Self::Input<Reset>) -> Self::Output<Reset> {
TargetBase::Reg(input)
}
fn sync_reset(self, input: Self::Input<SyncReset>) -> Self::Output<SyncReset> {
TargetBase::RegSync(input)
}
fn async_reset(self, input: Self::Input<AsyncReset>) -> Self::Output<AsyncReset> {
TargetBase::RegAsync(input)
}
}
R::dispatch(value, Dispatch)
}
}
impl<R: ResetType> From<Reg<CanonicalType, R>> for Target {
fn from(value: Reg<CanonicalType, R>) -> Self {
TargetBase::from(value).into()
}
}
impl fmt::Display for TargetBase {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}", self.target_name())
}
}
impl TargetBase {
pub fn target_name(&self) -> TargetName {
match self {
TargetBase::ModuleIO(v) => TargetName(v.scoped_name(), None),
TargetBase::MemPort(v) => TargetName(v.mem_name(), Some(v.port_name())),
TargetBase::Reg(v) => TargetName(v.scoped_name(), None),
TargetBase::RegSync(v) => TargetName(v.scoped_name(), None),
TargetBase::RegAsync(v) => TargetName(v.scoped_name(), None),
TargetBase::Wire(v) => TargetName(v.scoped_name(), None),
TargetBase::Instance(v) => TargetName(v.scoped_name(), None),
}
}
pub fn canonical_ty(&self) -> CanonicalType {
match self {
TargetBase::ModuleIO(v) => v.ty(),
TargetBase::MemPort(v) => v.ty().canonical(),
TargetBase::Reg(v) => v.ty(),
TargetBase::RegSync(v) => v.ty(),
TargetBase::RegAsync(v) => v.ty(),
TargetBase::Wire(v) => v.ty(),
TargetBase::Instance(v) => v.ty().canonical(),
}
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct TargetChild {
parent: Interned<Target>,
path_element: Interned<TargetPathElement>,
canonical_ty: CanonicalType,
flow: Flow,
}
impl fmt::Debug for TargetChild {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Self {
parent,
path_element,
canonical_ty: _,
flow: _,
} = self;
parent.fmt(f)?;
fmt::Display::fmt(path_element, f)
}
}
impl fmt::Display for TargetChild {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Self {
parent,
path_element,
canonical_ty: _,
flow: _,
} = self;
parent.fmt(f)?;
path_element.fmt(f)
}
}
impl TargetChild {
pub fn new(parent: Interned<Target>, path_element: Interned<TargetPathElement>) -> Self {
Self {
parent,
path_element,
canonical_ty: path_element.canonical_ty(parent),
flow: path_element.flow(parent),
}
}
pub fn parent(self) -> Interned<Target> {
self.parent
}
pub fn path_element(self) -> Interned<TargetPathElement> {
self.path_element
}
pub fn canonical_ty(self) -> CanonicalType {
self.canonical_ty
}
pub fn flow(self) -> Flow {
self.flow
}
pub fn bundle_field(self) -> Option<BundleField> {
if let TargetPathElement::BundleField(TargetPathBundleField { name }) = *self.path_element {
let parent_ty = Bundle::from_canonical(self.parent.canonical_ty());
Some(
parent_ty
.field_by_name(name)
.expect("field name known to be a valid field of parent"),
)
} else {
None
}
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub enum Target {
Base(Interned<TargetBase>),
Child(TargetChild),
}
impl From<TargetBase> for Target {
fn from(value: TargetBase) -> Self {
Self::Base(Intern::intern_sized(value))
}
}
impl From<TargetChild> for Target {
fn from(value: TargetChild) -> Self {
Self::Child(value)
}
}
impl From<Interned<TargetBase>> for Target {
fn from(value: Interned<TargetBase>) -> Self {
Self::Base(value)
}
}
impl Target {
pub fn base(&self) -> Interned<TargetBase> {
let mut target = self;
loop {
match target {
Self::Base(v) => break *v,
Self::Child(v) => target = &v.parent,
}
}
}
pub fn child(&self) -> Option<TargetChild> {
match *self {
Target::Base(_) => None,
Target::Child(v) => Some(v),
}
}
pub fn is_static(&self) -> bool {
let mut target = self;
loop {
match target {
Self::Base(_) => return true,
Self::Child(v) if !v.path_element().is_static() => return false,
Self::Child(v) => target = &v.parent,
}
}
}
#[must_use]
pub fn join(&self, path_element: Interned<TargetPathElement>) -> Self {
TargetChild::new(self.intern(), path_element).into()
}
pub fn flow(&self) -> Flow {
match self {
Self::Base(v) => v.flow(),
Self::Child(v) => v.flow(),
}
}
pub fn canonical_ty(&self) -> CanonicalType {
match self {
Target::Base(v) => v.canonical_ty(),
Target::Child(v) => v.canonical_ty(),
}
}
}
impl fmt::Display for Target {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Base(v) => v.fmt(f),
Self::Child(v) => v.fmt(f),
}
}
}
impl fmt::Debug for Target {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Base(v) => v.fmt(f),
Self::Child(v) => v.fmt(f),
}
}
}
pub trait GetTarget {
fn target(&self) -> Option<Interned<Target>>;
}
impl GetTarget for bool {
fn target(&self) -> Option<Interned<Target>> {
None
}
}
impl<T: ?Sized + GetTarget + Send + Sync + 'static> GetTarget for Interned<T> {
fn target(&self) -> Option<Interned<Target>> {
T::target(self)
}
}
impl<T: ?Sized + GetTarget> GetTarget for &'_ T {
fn target(&self) -> Option<Interned<Target>> {
T::target(self)
}
}
impl<T: ?Sized + GetTarget> GetTarget for &'_ mut T {
fn target(&self) -> Option<Interned<Target>> {
T::target(self)
}
}
impl<T: ?Sized + GetTarget> GetTarget for Box<T> {
fn target(&self) -> Option<Interned<Target>> {
T::target(self)
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,247 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
int::BoolOrIntType,
intern::{Intern, Interned, Memoize},
prelude::*,
};
use std::sync::OnceLock;
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
pub enum FormalKind {
Assert,
Assume,
Cover,
}
impl FormalKind {
pub fn as_str(self) -> &'static str {
match self {
Self::Assert => "assert",
Self::Assume => "assume",
Self::Cover => "cover",
}
}
}
#[track_caller]
pub fn formal_stmt_with_enable_and_loc(
kind: FormalKind,
clk: Expr<Clock>,
pred: Expr<Bool>,
en: Expr<Bool>,
text: &str,
source_location: SourceLocation,
) {
crate::module::add_stmt_formal(crate::module::StmtFormal {
kind,
clk,
pred,
en: en & !formal_reset().cast_to_static::<Bool>(),
text: text.intern(),
source_location,
});
}
#[track_caller]
pub fn formal_stmt_with_enable(
kind: FormalKind,
clk: Expr<Clock>,
pred: Expr<Bool>,
en: Expr<Bool>,
text: &str,
) {
formal_stmt_with_enable_and_loc(kind, clk, pred, en, text, SourceLocation::caller());
}
#[track_caller]
pub fn formal_stmt_with_loc(
kind: FormalKind,
clk: Expr<Clock>,
pred: Expr<Bool>,
text: &str,
source_location: SourceLocation,
) {
formal_stmt_with_enable_and_loc(kind, clk, pred, true.to_expr(), text, source_location);
}
#[track_caller]
pub fn formal_stmt(kind: FormalKind, clk: Expr<Clock>, pred: Expr<Bool>, text: &str) {
formal_stmt_with_loc(kind, clk, pred, text, SourceLocation::caller());
}
macro_rules! make_formal {
($kind:ident, $formal_stmt_with_enable_and_loc:ident, $formal_stmt_with_enable:ident, $formal_stmt_with_loc:ident, $formal_stmt:ident) => {
#[track_caller]
pub fn $formal_stmt_with_enable_and_loc(
clk: Expr<Clock>,
pred: Expr<Bool>,
en: Expr<Bool>,
text: &str,
source_location: SourceLocation,
) {
formal_stmt_with_enable_and_loc(
FormalKind::$kind,
clk,
pred,
en,
text,
source_location,
);
}
#[track_caller]
pub fn $formal_stmt_with_enable(
clk: Expr<Clock>,
pred: Expr<Bool>,
en: Expr<Bool>,
text: &str,
) {
formal_stmt_with_enable(FormalKind::$kind, clk, pred, en, text);
}
#[track_caller]
pub fn $formal_stmt_with_loc(
clk: Expr<Clock>,
pred: Expr<Bool>,
text: &str,
source_location: SourceLocation,
) {
formal_stmt_with_loc(FormalKind::$kind, clk, pred, text, source_location);
}
#[track_caller]
pub fn $formal_stmt(clk: Expr<Clock>, pred: Expr<Bool>, text: &str) {
formal_stmt(FormalKind::$kind, clk, pred, text);
}
};
}
make_formal!(
Assert,
hdl_assert_with_enable_and_loc,
hdl_assert_with_enable,
hdl_assert_with_loc,
hdl_assert
);
make_formal!(
Assume,
hdl_assume_with_enable_and_loc,
hdl_assume_with_enable,
hdl_assume_with_loc,
hdl_assume
);
make_formal!(
Cover,
hdl_cover_with_enable_and_loc,
hdl_cover_with_enable,
hdl_cover_with_loc,
hdl_cover
);
pub trait MakeFormalExpr: Type {}
impl<T: Type> MakeFormalExpr for T {}
#[hdl]
pub fn formal_global_clock() -> Expr<Clock> {
#[hdl_module(extern)]
fn formal_global_clock() {
#[hdl]
let clk: Clock = m.output();
m.annotate_module(BlackBoxInlineAnnotation {
path: "fayalite_formal_global_clock.v".intern(),
text: r"module __fayalite_formal_global_clock(output clk);
(* gclk *)
reg clk;
endmodule
"
.intern(),
});
m.verilog_name("__fayalite_formal_global_clock");
}
#[hdl]
let formal_global_clock = instance(formal_global_clock());
formal_global_clock.clk
}
#[hdl]
pub fn formal_reset() -> Expr<SyncReset> {
#[hdl_module(extern)]
fn formal_reset() {
#[hdl]
let rst: SyncReset = m.output();
m.annotate_module(BlackBoxInlineAnnotation {
path: "fayalite_formal_reset.v".intern(),
text: r"module __fayalite_formal_reset(output rst);
assign rst = $initstate;
endmodule
"
.intern(),
});
m.verilog_name("__fayalite_formal_reset");
}
static MOD: OnceLock<Interned<Module<formal_reset>>> = OnceLock::new();
#[hdl]
let formal_reset = instance(*MOD.get_or_init(formal_reset));
formal_reset.rst
}
macro_rules! make_any_const_fn {
($ident:ident, $verilog_attribute:literal) => {
#[hdl]
pub fn $ident<T: BoolOrIntType>(ty: T) -> Expr<T> {
#[hdl_module(extern)]
pub(super) fn $ident<T: BoolOrIntType>(ty: T) {
#[hdl]
let out: T = m.output(ty);
let width = ty.width();
let verilog_bitslice = if width == 1 {
String::new()
} else {
format!(" [{}:0]", width - 1)
};
m.annotate_module(BlackBoxInlineAnnotation {
path: Intern::intern_owned(format!(
"fayalite_{}_{width}.v",
stringify!($ident),
)),
text: Intern::intern_owned(format!(
r"module __fayalite_{}_{width}(output{verilog_bitslice} out);
(* {} *)
reg{verilog_bitslice} out;
endmodule
",
stringify!($ident),
$verilog_attribute,
)),
});
m.verilog_name(format!("__fayalite_{}_{width}", stringify!($ident)));
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
struct TheMemoize<T>(T);
impl<T: BoolOrIntType> Memoize for TheMemoize<T> {
type Input = ();
type InputOwned = ();
type Output = Option<Interned<Module<$ident<T>>>>;
fn inner(self, _input: &Self::Input) -> Self::Output {
if self.0.width() == 0 {
None
} else {
Some($ident(self.0))
}
}
}
let Some(module) = TheMemoize(ty).get_owned(()) else {
return 0_hdl_u0.cast_bits_to(ty);
};
#[hdl]
let $ident = instance(module);
$ident.out
}
};
}
make_any_const_fn!(any_const, "anyconst");
make_any_const_fn!(any_seq, "anyseq");
make_any_const_fn!(all_const, "allconst");
make_any_const_fn!(all_seq, "allseq");

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,656 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
bundle::{Bundle, BundleField, BundleType, BundleTypePropertiesBuilder, NoBuilder},
expr::{
CastBitsTo, CastTo, CastToBits, Expr, HdlPartialEq, HdlPartialOrd,
ops::{ExprCastTo, ExprPartialEq, ExprPartialOrd},
},
int::{Bool, DynSize, KnownSize, Size, SizeType, UInt, UIntType},
intern::{Intern, InternSlice, Interned},
phantom_const::PhantomConst,
sim::value::{SimValue, SimValuePartialEq, ToSimValueWithType},
source_location::SourceLocation,
ty::{
CanonicalType, OpaqueSimValueSlice, OpaqueSimValueWriter, OpaqueSimValueWritten,
StaticType, Type, TypeProperties, impl_match_variant_as_self,
},
};
use bitvec::{order::Lsb0, view::BitView};
use serde::{
Deserialize, Deserializer, Serialize, Serializer,
de::{Error, Visitor, value::UsizeDeserializer},
};
use std::{fmt, marker::PhantomData, ops::Index};
const UINT_IN_RANGE_TYPE_FIELD_NAMES: [&'static str; 2] = ["value", "range"];
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)]
pub struct UIntInRangeMaskType {
value: Bool,
range: PhantomConstRangeMaskType,
}
impl Type for UIntInRangeMaskType {
type BaseType = Bundle;
type MaskType = Self;
type SimValue = bool;
impl_match_variant_as_self!();
fn mask_type(&self) -> Self::MaskType {
*self
}
fn canonical(&self) -> CanonicalType {
CanonicalType::Bundle(Bundle::new(self.fields()))
}
fn from_canonical(canonical_type: CanonicalType) -> Self {
let fields = Bundle::from_canonical(canonical_type).fields();
let [
BundleField {
name: value_name,
flipped: false,
ty: value,
},
BundleField {
name: range_name,
flipped: false,
ty: range,
},
] = *fields
else {
panic!("expected UIntInRangeMaskType");
};
assert_eq!([&*value_name, &*range_name], UINT_IN_RANGE_TYPE_FIELD_NAMES);
let value = Bool::from_canonical(value);
let range = PhantomConstRangeMaskType::from_canonical(range);
Self { value, range }
}
fn source_location() -> SourceLocation {
SourceLocation::builtin()
}
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
Bool.sim_value_from_opaque(opaque)
}
fn sim_value_clone_from_opaque(
&self,
value: &mut Self::SimValue,
opaque: OpaqueSimValueSlice<'_>,
) {
Bool.sim_value_clone_from_opaque(value, opaque);
}
fn sim_value_to_opaque<'w>(
&self,
value: &Self::SimValue,
writer: OpaqueSimValueWriter<'w>,
) -> OpaqueSimValueWritten<'w> {
Bool.sim_value_to_opaque(value, writer)
}
}
impl BundleType for UIntInRangeMaskType {
type Builder = NoBuilder;
type FilledBuilder = Expr<UIntInRangeMaskType>;
fn fields(&self) -> Interned<[BundleField]> {
let [value_name, range_name] = UINT_IN_RANGE_TYPE_FIELD_NAMES;
let Self { value, range } = self;
[
BundleField {
name: value_name.intern(),
flipped: false,
ty: value.canonical(),
},
BundleField {
name: range_name.intern(),
flipped: false,
ty: range.canonical(),
},
]
.intern_slice()
}
}
impl StaticType for UIntInRangeMaskType {
const TYPE: Self = Self {
value: Bool,
range: PhantomConstRangeMaskType::TYPE,
};
const MASK_TYPE: Self::MaskType = Self::TYPE;
const TYPE_PROPERTIES: TypeProperties = BundleTypePropertiesBuilder::new()
.field(false, Bool::TYPE_PROPERTIES)
.field(false, PhantomConstRangeMaskType::TYPE_PROPERTIES)
.finish();
const MASK_TYPE_PROPERTIES: TypeProperties = Self::TYPE_PROPERTIES;
}
impl ToSimValueWithType<UIntInRangeMaskType> for bool {
fn to_sim_value_with_type(&self, ty: UIntInRangeMaskType) -> SimValue<UIntInRangeMaskType> {
SimValue::from_value(ty, *self)
}
}
impl ExprCastTo<Bool> for UIntInRangeMaskType {
fn cast_to(src: Expr<Self>, to_type: Bool) -> Expr<Bool> {
src.cast_to_bits().cast_to(to_type)
}
}
impl ExprCastTo<UIntInRangeMaskType> for Bool {
fn cast_to(src: Expr<Self>, to_type: UIntInRangeMaskType) -> Expr<UIntInRangeMaskType> {
src.cast_to_static::<UInt<1>>().cast_bits_to(to_type)
}
}
impl ExprPartialEq<Self> for UIntInRangeMaskType {
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
lhs.cast_to_bits().cmp_eq(rhs.cast_to_bits())
}
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
lhs.cast_to_bits().cmp_ne(rhs.cast_to_bits())
}
}
impl SimValuePartialEq<Self> for UIntInRangeMaskType {
fn sim_value_eq(this: &SimValue<Self>, other: &SimValue<Self>) -> bool {
**this == **other
}
}
type PhantomConstRangeMaskType = <PhantomConst<SerdeRange<DynSize, DynSize>> as Type>::MaskType;
#[derive(Default, Copy, Clone, Debug)]
struct RangeParseError;
macro_rules! define_uint_in_range_type {
(
$UIntInRange:ident,
$UIntInRangeType:ident,
$UIntInRangeTypeWithoutGenerics:ident,
$UIntInRangeTypeWithStart:ident,
$SerdeRange:ident,
$range_operator_str:literal,
|$uint_range_usize_start:ident, $uint_range_usize_end:ident| $uint_range_usize:expr,
) => {
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
struct $SerdeRange<Start: Size, End: Size> {
start: Start::SizeType,
end: End::SizeType,
}
impl<Start: KnownSize, End: KnownSize> Default for $SerdeRange<Start, End> {
fn default() -> Self {
Self {
start: Start::SIZE,
end: End::SIZE,
}
}
}
impl std::str::FromStr for $SerdeRange<DynSize, DynSize> {
type Err = RangeParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let Some((start, end)) = s.split_once($range_operator_str) else {
return Err(RangeParseError);
};
if start.is_empty()
|| start.bytes().any(|b| !b.is_ascii_digit())
|| end.is_empty()
|| end.bytes().any(|b| !b.is_ascii_digit())
{
return Err(RangeParseError);
}
let start = start.parse().map_err(|_| RangeParseError)?;
let end = end.parse().map_err(|_| RangeParseError)?;
let retval = Self { start, end };
if retval.is_empty() {
Err(RangeParseError)
} else {
Ok(retval)
}
}
}
impl<Start: Size, End: Size> fmt::Display for $SerdeRange<Start, End> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Self { start, end } = *self;
write!(
f,
"{}{}{}",
Start::as_usize(start),
$range_operator_str,
End::as_usize(end),
)
}
}
impl<Start: Size, End: Size> Serialize for $SerdeRange<Start, End> {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
serializer.collect_str(self)
}
}
impl<'de, Start: Size, End: Size> Deserialize<'de> for $SerdeRange<Start, End> {
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
struct SerdeRangeVisitor<Start: Size, End: Size>(PhantomData<(Start, End)>);
impl<'de, Start: Size, End: Size> Visitor<'de> for SerdeRangeVisitor<Start, End> {
type Value = $SerdeRange<Start, End>;
fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("a string with format \"")?;
if let Some(start) = Start::KNOWN_VALUE {
write!(f, "{start}")?;
} else {
f.write_str("<int>")?;
};
f.write_str($range_operator_str)?;
if let Some(end) = End::KNOWN_VALUE {
write!(f, "{end}")?;
} else {
f.write_str("<int>")?;
};
f.write_str("\" that is a non-empty range")
}
fn visit_str<E: Error>(self, v: &str) -> Result<Self::Value, E> {
let $SerdeRange::<DynSize, DynSize> { start, end } =
v.parse().map_err(|_| {
Error::invalid_value(serde::de::Unexpected::Str(v), &self)
})?;
let start =
Start::SizeType::deserialize(UsizeDeserializer::<E>::new(start))?;
let end = End::SizeType::deserialize(UsizeDeserializer::<E>::new(end))?;
Ok($SerdeRange { start, end })
}
fn visit_bytes<E: Error>(self, v: &[u8]) -> Result<Self::Value, E> {
match std::str::from_utf8(v) {
Ok(v) => self.visit_str(v),
Err(_) => {
Err(Error::invalid_value(serde::de::Unexpected::Bytes(v), &self))
}
}
}
}
deserializer.deserialize_str(SerdeRangeVisitor(PhantomData))
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct $UIntInRangeType<Start: Size, End: Size> {
value: UInt,
range: PhantomConst<$SerdeRange<Start, End>>,
}
impl<Start: Size, End: Size> $UIntInRangeType<Start, End> {
fn from_phantom_const_range(range: PhantomConst<$SerdeRange<Start, End>>) -> Self {
let $SerdeRange { start, end } = *range.get();
let $uint_range_usize_start = Start::as_usize(start);
let $uint_range_usize_end = End::as_usize(end);
Self {
value: $uint_range_usize,
range,
}
}
pub fn new(start: Start::SizeType, end: End::SizeType) -> Self {
Self::from_phantom_const_range(PhantomConst::new(
$SerdeRange { start, end }.intern_sized(),
))
}
pub fn bit_width(self) -> usize {
self.value.width()
}
pub fn start(self) -> Start::SizeType {
self.range.get().start
}
pub fn end(self) -> End::SizeType {
self.range.get().end
}
}
impl<Start: Size, End: Size> fmt::Debug for $UIntInRangeType<Start, End> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Self { value, range } = self;
let $SerdeRange { start, end } = *range.get();
f.debug_struct(&format!(
"{}<{}, {}>",
stringify!($UIntInRange),
Start::as_usize(start),
End::as_usize(end),
))
.field("value", value)
.finish_non_exhaustive()
}
}
impl<Start: Size, End: Size> Type for $UIntInRangeType<Start, End> {
type BaseType = Bundle;
type MaskType = UIntInRangeMaskType;
type SimValue = usize;
impl_match_variant_as_self!();
fn mask_type(&self) -> Self::MaskType {
UIntInRangeMaskType::TYPE
}
fn canonical(&self) -> CanonicalType {
CanonicalType::Bundle(Bundle::new(self.fields()))
}
fn from_canonical(canonical_type: CanonicalType) -> Self {
let fields = Bundle::from_canonical(canonical_type).fields();
let [
BundleField {
name: value_name,
flipped: false,
ty: value,
},
BundleField {
name: range_name,
flipped: false,
ty: range,
},
] = *fields
else {
panic!("expected {}", stringify!($UIntInRange));
};
assert_eq!([&*value_name, &*range_name], UINT_IN_RANGE_TYPE_FIELD_NAMES);
let value = UInt::from_canonical(value);
let range = PhantomConst::<$SerdeRange<Start, End>>::from_canonical(range);
let retval = Self::from_phantom_const_range(range);
assert_eq!(retval, Self { value, range });
retval
}
fn source_location() -> SourceLocation {
SourceLocation::builtin()
}
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
assert_eq!(opaque.size(), self.value.type_properties().size());
let mut retval = 0usize;
retval.view_bits_mut::<Lsb0>()[..opaque.bit_width()]
.clone_from_bitslice(opaque.bits());
retval
}
fn sim_value_clone_from_opaque(
&self,
value: &mut Self::SimValue,
opaque: OpaqueSimValueSlice<'_>,
) {
*value = self.sim_value_from_opaque(opaque);
}
fn sim_value_to_opaque<'w>(
&self,
value: &Self::SimValue,
writer: OpaqueSimValueWriter<'w>,
) -> OpaqueSimValueWritten<'w> {
writer.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(
&value.view_bits::<Lsb0>()[..self.value.width()],
))
}
}
impl<Start: Size, End: Size> BundleType for $UIntInRangeType<Start, End> {
type Builder = NoBuilder;
type FilledBuilder = Expr<Self>;
fn fields(&self) -> Interned<[BundleField]> {
let [value_name, range_name] = UINT_IN_RANGE_TYPE_FIELD_NAMES;
let Self { value, range } = self;
[
BundleField {
name: value_name.intern(),
flipped: false,
ty: value.canonical(),
},
BundleField {
name: range_name.intern(),
flipped: false,
ty: range.canonical(),
},
]
.intern_slice()
}
}
impl<Start: KnownSize, End: KnownSize> Default for $UIntInRangeType<Start, End> {
fn default() -> Self {
Self::TYPE
}
}
impl<Start: KnownSize, End: KnownSize> StaticType for $UIntInRangeType<Start, End> {
const TYPE: Self = {
let $uint_range_usize_start = Start::VALUE;
let $uint_range_usize_end = End::VALUE;
Self {
value: $uint_range_usize,
range: PhantomConst::<$SerdeRange<Start, End>>::TYPE,
}
};
const MASK_TYPE: Self::MaskType = UIntInRangeMaskType::TYPE;
const TYPE_PROPERTIES: TypeProperties = BundleTypePropertiesBuilder::new()
.field(false, Self::TYPE.value.type_properties_dyn())
.field(
false,
PhantomConst::<$SerdeRange<Start, End>>::TYPE_PROPERTIES,
)
.finish();
const MASK_TYPE_PROPERTIES: TypeProperties = UIntInRangeMaskType::TYPE_PROPERTIES;
}
impl<Start: Size, End: Size> ToSimValueWithType<$UIntInRangeType<Start, End>> for usize {
fn to_sim_value_with_type(
&self,
ty: $UIntInRangeType<Start, End>,
) -> SimValue<$UIntInRangeType<Start, End>> {
SimValue::from_value(ty, *self)
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)]
pub struct $UIntInRangeTypeWithoutGenerics;
#[allow(non_upper_case_globals)]
pub const $UIntInRangeType: $UIntInRangeTypeWithoutGenerics =
$UIntInRangeTypeWithoutGenerics;
impl<StartSize: SizeType> Index<StartSize> for $UIntInRangeTypeWithoutGenerics {
type Output = $UIntInRangeTypeWithStart<StartSize::Size>;
fn index(&self, start: StartSize) -> &Self::Output {
Interned::into_inner($UIntInRangeTypeWithStart(start).intern_sized())
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub struct $UIntInRangeTypeWithStart<Start: Size>(Start::SizeType);
impl<Start: Size, EndSize: SizeType<Size = End>, End: Size<SizeType = EndSize>>
Index<EndSize> for $UIntInRangeTypeWithStart<Start>
{
type Output = $UIntInRangeType<Start, End>;
fn index(&self, end: EndSize) -> &Self::Output {
Interned::into_inner($UIntInRangeType::new(self.0, end).intern_sized())
}
}
impl<Start: Size, End: Size, Width: Size> ExprCastTo<UIntType<Width>>
for $UIntInRangeType<Start, End>
{
fn cast_to(src: Expr<Self>, to_type: UIntType<Width>) -> Expr<UIntType<Width>> {
src.cast_to_bits().cast_to(to_type)
}
}
impl<Start: Size, End: Size, Width: Size> ExprCastTo<$UIntInRangeType<Start, End>>
for UIntType<Width>
{
fn cast_to(
src: Expr<Self>,
to_type: $UIntInRangeType<Start, End>,
) -> Expr<$UIntInRangeType<Start, End>> {
src.cast_to(to_type.value).cast_bits_to(to_type)
}
}
impl<LhsStart: Size, LhsEnd: Size, RhsStart: Size, RhsEnd: Size>
ExprPartialEq<$UIntInRangeType<RhsStart, RhsEnd>>
for $UIntInRangeType<LhsStart, LhsEnd>
{
fn cmp_eq(
lhs: Expr<Self>,
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
) -> Expr<Bool> {
lhs.cast_to_bits().cmp_eq(rhs.cast_to_bits())
}
fn cmp_ne(
lhs: Expr<Self>,
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
) -> Expr<Bool> {
lhs.cast_to_bits().cmp_ne(rhs.cast_to_bits())
}
}
impl<LhsStart: Size, LhsEnd: Size, RhsStart: Size, RhsEnd: Size>
ExprPartialOrd<$UIntInRangeType<RhsStart, RhsEnd>>
for $UIntInRangeType<LhsStart, LhsEnd>
{
fn cmp_lt(
lhs: Expr<Self>,
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
) -> Expr<Bool> {
lhs.cast_to_bits().cmp_lt(rhs.cast_to_bits())
}
fn cmp_le(
lhs: Expr<Self>,
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
) -> Expr<Bool> {
lhs.cast_to_bits().cmp_le(rhs.cast_to_bits())
}
fn cmp_gt(
lhs: Expr<Self>,
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
) -> Expr<Bool> {
lhs.cast_to_bits().cmp_gt(rhs.cast_to_bits())
}
fn cmp_ge(
lhs: Expr<Self>,
rhs: Expr<$UIntInRangeType<RhsStart, RhsEnd>>,
) -> Expr<Bool> {
lhs.cast_to_bits().cmp_ge(rhs.cast_to_bits())
}
}
impl<LhsStart: Size, LhsEnd: Size, RhsStart: Size, RhsEnd: Size>
SimValuePartialEq<$UIntInRangeType<RhsStart, RhsEnd>>
for $UIntInRangeType<LhsStart, LhsEnd>
{
fn sim_value_eq(
this: &SimValue<Self>,
other: &SimValue<$UIntInRangeType<RhsStart, RhsEnd>>,
) -> bool {
**this == **other
}
}
impl<Start: Size, End: Size, Width: Size> ExprPartialEq<UIntType<Width>>
for $UIntInRangeType<Start, End>
{
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
lhs.cast_to_bits().cmp_eq(rhs)
}
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
lhs.cast_to_bits().cmp_ne(rhs)
}
}
impl<Start: Size, End: Size, Width: Size> ExprPartialEq<$UIntInRangeType<Start, End>>
for UIntType<Width>
{
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
lhs.cmp_eq(rhs.cast_to_bits())
}
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
lhs.cmp_ne(rhs.cast_to_bits())
}
}
impl<Start: Size, End: Size, Width: Size> ExprPartialOrd<UIntType<Width>>
for $UIntInRangeType<Start, End>
{
fn cmp_lt(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
lhs.cast_to_bits().cmp_lt(rhs)
}
fn cmp_le(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
lhs.cast_to_bits().cmp_le(rhs)
}
fn cmp_gt(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
lhs.cast_to_bits().cmp_gt(rhs)
}
fn cmp_ge(lhs: Expr<Self>, rhs: Expr<UIntType<Width>>) -> Expr<Bool> {
lhs.cast_to_bits().cmp_ge(rhs)
}
}
impl<Start: Size, End: Size, Width: Size> ExprPartialOrd<$UIntInRangeType<Start, End>>
for UIntType<Width>
{
fn cmp_lt(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
lhs.cmp_lt(rhs.cast_to_bits())
}
fn cmp_le(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
lhs.cmp_le(rhs.cast_to_bits())
}
fn cmp_gt(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
lhs.cmp_gt(rhs.cast_to_bits())
}
fn cmp_ge(lhs: Expr<Self>, rhs: Expr<$UIntInRangeType<Start, End>>) -> Expr<Bool> {
lhs.cmp_ge(rhs.cast_to_bits())
}
}
};
}
define_uint_in_range_type! {
UIntInRange,
UIntInRangeType,
UIntInRangeTypeWithoutGenerics,
UIntInRangeTypeWithStart,
SerdeRange,
"..",
|start, end| UInt::range_usize(start..end),
}
define_uint_in_range_type! {
UIntInRangeInclusive,
UIntInRangeInclusiveType,
UIntInRangeInclusiveTypeWithoutGenerics,
UIntInRangeInclusiveTypeWithStart,
SerdeRangeInclusive,
"..=",
|start, end| UInt::range_inclusive_usize(start..=end),
}
impl SerdeRange<DynSize, DynSize> {
fn is_empty(self) -> bool {
self.start >= self.end
}
}
impl SerdeRangeInclusive<DynSize, DynSize> {
fn is_empty(self) -> bool {
self.start > self.end
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,10 +1,8 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use hashbrown::HashMap;
use std::{
any::{Any, TypeId},
hash::{BuildHasher, Hasher},
ptr::NonNull,
sync::RwLock,
};
@ -75,59 +73,36 @@ impl BuildHasher for TypeIdBuildHasher {
}
}
struct Value(NonNull<dyn Any + Send + Sync>);
impl Value {
unsafe fn get_transmute_lifetime<'b>(&self) -> &'b (dyn Any + Send + Sync) {
unsafe { &*self.0.as_ptr() }
}
fn new(v: Box<dyn Any + Send + Sync>) -> Self {
unsafe { Self(NonNull::new_unchecked(Box::into_raw(v))) }
}
}
unsafe impl Send for Value {}
unsafe impl Sync for Value {}
impl Drop for Value {
fn drop(&mut self) {
unsafe { std::ptr::drop_in_place(self.0.as_ptr()) }
}
}
pub struct TypeIdMap(RwLock<HashMap<TypeId, Value, TypeIdBuildHasher>>);
pub(crate) struct TypeIdMap(
RwLock<hashbrown::HashMap<TypeId, &'static (dyn Any + Send + Sync), TypeIdBuildHasher>>,
);
impl TypeIdMap {
pub const fn new() -> Self {
Self(RwLock::new(HashMap::with_hasher(TypeIdBuildHasher)))
pub(crate) const fn new() -> Self {
Self(RwLock::new(hashbrown::HashMap::with_hasher(
TypeIdBuildHasher,
)))
}
#[cold]
unsafe fn insert_slow(
fn insert_slow(
&self,
type_id: TypeId,
make: fn() -> Box<dyn Any + Sync + Send>,
) -> &(dyn Any + Sync + Send) {
let value = Value::new(make());
) -> &'static (dyn Any + Sync + Send) {
let value = Box::leak(make());
let mut write_guard = self.0.write().unwrap();
unsafe {
write_guard
.entry(type_id)
.or_insert(value)
.get_transmute_lifetime()
}
*write_guard.entry(type_id).or_insert(value)
}
pub fn get_or_insert_default<T: Sized + Any + Send + Sync + Default>(&self) -> &T {
pub(crate) fn get_or_insert_default<T: Sized + Any + Send + Sync + Default>(&self) -> &T {
let type_id = TypeId::of::<T>();
let read_guard = self.0.read().unwrap();
let retval = read_guard
.get(&type_id)
.map(|v| unsafe { Value::get_transmute_lifetime(v) });
let retval = read_guard.get(&type_id).map(|v| *v);
drop(read_guard);
let retval = match retval {
Some(retval) => retval,
None => unsafe { self.insert_slow(type_id, move || Box::new(T::default())) },
None => self.insert_slow(type_id, move || Box::new(T::default())),
};
unsafe { &*(retval as *const dyn Any as *const T) }
retval.downcast_ref().expect("known to have correct TypeId")
}
}

View file

@ -11,8 +11,60 @@ extern crate self as fayalite;
#[doc(hidden)]
pub use std as __std;
#[doc(hidden)]
#[macro_export]
macro_rules! __cfg_expansion_helper {
(
[
$($evaluated_cfgs:ident($($evaluated_exprs:tt)*) = $evaluated_results:ident,)*
]
[
$cfg:ident($($expr:tt)*),
$($unevaluated_cfgs:ident($($unevaluated_exprs:tt)*),)*
]
// pass as tt so we get right span for attribute
$after_evaluation_attr:tt $after_evaluation_body:tt
) => {
#[$cfg($($expr)*)]
$crate::__cfg_expansion_helper! {
[
$($evaluated_cfgs($($evaluated_exprs)*) = $evaluated_results,)*
$cfg($($expr)*) = true,
]
[
$($unevaluated_cfgs($($unevaluated_exprs)*),)*
]
$after_evaluation_attr $after_evaluation_body
}
#[$cfg(not($($expr)*))]
$crate::__cfg_expansion_helper! {
[
$($evaluated_cfgs($($evaluated_exprs)*) = $evaluated_results,)*
$cfg($($expr)*) = false,
]
[
$($unevaluated_cfgs($($unevaluated_exprs)*),)*
]
$after_evaluation_attr $after_evaluation_body
}
};
(
[
$($evaluated_cfgs:ident($($evaluated_exprs:tt)*) = $evaluated_results:ident,)*
]
[]
// don't use #[...] so we get right span for `#` and `[]` of attribute
{$($after_evaluation_attr:tt)*} {$($after_evaluation_body:tt)*}
) => {
$($after_evaluation_attr)*
#[__evaluated_cfgs([
$($evaluated_cfgs($($evaluated_exprs)*) = $evaluated_results,)*
])]
$($after_evaluation_body)*
};
}
#[doc(inline)]
#[doc(alias = "hdl")]
/// The `#[hdl_module]` attribute is applied to a Rust function so that that function creates
/// a [`Module`][`::fayalite::module::Module`] when called.
/// In the function body it will implicitly create a
@ -21,26 +73,40 @@ pub use std as __std;
/// See [Fayalite Modules][crate::_docs::modules]
pub use fayalite_proc_macros::hdl_module;
#[doc(inline)]
pub use fayalite_proc_macros::hdl;
pub use bitvec;
/// struct used as a placeholder when applying defaults
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
pub struct __;
#[cfg(feature = "unstable-doc")]
pub mod _docs;
pub mod annotations;
pub mod array;
pub mod build;
pub mod bundle;
pub mod cli;
pub mod clock;
pub mod enum_;
pub mod expr;
pub mod firrtl;
pub mod formal;
pub mod int;
pub mod intern;
pub mod memory;
pub mod module;
pub mod phantom_const;
pub mod platform;
pub mod prelude;
pub mod reg;
pub mod reset;
pub mod sim;
pub mod source_location;
pub mod testing;
pub mod ty;
pub mod type_deduction;
pub mod util;
pub mod valueless;
pub mod vendor;
pub mod wire;

View file

@ -4,16 +4,16 @@
#![allow(clippy::multiple_bound_locations)]
use crate::{
annotations::{Annotation, IntoAnnotations, TargetedAnnotation},
array::{Array, ArrayType, ArrayTypeTrait, ValueArrayOrSlice},
bundle::{BundleType, BundleValue, DynBundle, DynBundleType},
clock::{Clock, ClockType},
expr::{Expr, ExprEnum, ExprTrait, Flow, ToExpr},
int::{DynUInt, DynUIntType, UInt, UIntType},
array::{Array, ArrayType},
bundle::{Bundle, BundleType},
clock::Clock,
expr::{Expr, Flow, ToExpr, ToLiteralBits, ops::BundleLiteral, repeat},
hdl,
int::{Bool, DynSize, Size, UInt, UIntType},
intern::{Intern, Interned},
module::ScopedNameId,
source_location::SourceLocation,
ty::{AsMask, DynCanonicalType, DynCanonicalValue, DynType, Type, Value},
type_deduction::HitUndeducedType,
ty::{AsMask, CanonicalType, Type},
util::DebugAsDisplay,
};
use bitvec::slice::BitSlice;
@ -21,37 +21,37 @@ use std::{
cell::RefCell,
fmt,
hash::{Hash, Hasher},
num::NonZeroU32,
marker::PhantomData,
num::NonZeroUsize,
rc::Rc,
};
#[derive(Value, Clone, PartialEq, Eq, Hash, Debug)]
pub struct ReadStruct<Element> {
pub addr: DynUInt,
pub en: UInt<1>,
#[hdl]
pub struct ReadStruct<Element, AddrWidth: Size> {
pub addr: UIntType<AddrWidth>,
pub en: Bool,
pub clk: Clock,
#[hdl(flip)]
pub data: Element,
}
#[derive(Value, Clone, PartialEq, Eq, Hash, Debug)]
pub struct WriteStruct<Element: Value> {
pub addr: DynUInt,
pub en: UInt<1>,
#[hdl]
pub struct WriteStruct<Element, AddrWidth: Size> {
pub addr: UIntType<AddrWidth>,
pub en: Bool,
pub clk: Clock,
pub data: Element,
pub mask: AsMask<Element>,
}
#[allow(clippy::multiple_bound_locations)]
#[derive(Value, Clone, PartialEq, Eq, Hash, Debug)]
pub struct ReadWriteStruct<Element: Value> {
pub addr: DynUInt,
pub en: UInt<1>,
#[hdl]
pub struct ReadWriteStruct<Element, AddrWidth: Size> {
pub addr: UIntType<AddrWidth>,
pub en: Bool,
pub clk: Clock,
#[hdl(flip)]
pub rdata: Element,
pub wmode: UInt<1>,
pub wmode: Bool,
pub wdata: Element,
pub wmask: AsMask<Element>,
}
@ -64,11 +64,10 @@ pub trait PortType:
sealed::Sealed + Clone + Eq + Hash + fmt::Debug + Send + Sync + 'static
{
type PortKindTy: Copy + Eq + Hash + fmt::Debug + Send + Sync + 'static;
type PortType: BundleType<Value = Self::PortValue>;
type PortValue: BundleValue<Type = Self::PortType>;
type Port: BundleType;
fn port_kind(port_kind: Self::PortKindTy) -> PortKind;
fn from_port_kind(port_kind: PortKind) -> Self::PortKindTy;
fn port_ty(port: &MemPort<Self>) -> Self::PortType;
fn port_ty(port: &MemPort<Self>) -> Self::Port;
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
@ -80,8 +79,7 @@ impl sealed::Sealed for DynPortType {}
impl PortType for DynPortType {
type PortKindTy = PortKind;
type PortType = DynBundleType;
type PortValue = DynBundle;
type Port = Bundle;
fn port_kind(port_kind: Self::PortKindTy) -> PortKind {
port_kind
@ -91,41 +89,38 @@ impl PortType for DynPortType {
port_kind
}
fn port_ty(port: &MemPort<Self>) -> Self::PortType {
match port.port_kind {
PortKind::ReadOnly => MemPort::<ReadStruct<DynCanonicalValue>>::from_canonical(*port)
.ty()
.canonical(),
PortKind::WriteOnly => MemPort::<WriteStruct<DynCanonicalValue>>::from_canonical(*port)
.ty()
.canonical(),
PortKind::ReadWrite => {
MemPort::<ReadWriteStruct<DynCanonicalValue>>::from_canonical(*port)
fn port_ty(port: &MemPort<Self>) -> Self::Port {
Bundle::new(match port.port_kind {
PortKind::ReadOnly => {
MemPort::<ReadStruct<CanonicalType, DynSize>>::from_canonical(*port)
.ty()
.canonical()
.fields()
}
}
PortKind::WriteOnly => {
MemPort::<WriteStruct<CanonicalType, DynSize>>::from_canonical(*port)
.ty()
.fields()
}
PortKind::ReadWrite => {
MemPort::<ReadWriteStruct<CanonicalType, DynSize>>::from_canonical(*port)
.ty()
.fields()
}
})
}
}
pub trait PortStruct:
BundleValue
+ sealed::Sealed
+ PortType<PortKindTy = (), PortType = <Self as ToExpr>::Type, PortValue = Self>
where
Self::Type: BundleType<Value = Self>,
{
type Element: Value<Type = Self::ElementType>;
type ElementType: Type<Value = Self::Element>;
pub trait PortStruct: BundleType + sealed::Sealed + PortType<PortKindTy = (), Port = Self> {
type Element: Type;
const PORT_KIND: PortKind;
fn addr(this: Expr<Self>) -> Expr<DynUInt>;
fn en(this: Expr<Self>) -> Expr<UInt<1>>;
fn addr(this: Expr<Self>) -> Expr<UInt>;
fn en(this: Expr<Self>) -> Expr<Bool>;
fn clk(this: Expr<Self>) -> Expr<Clock>;
fn rdata(this: Expr<Self>) -> Option<Expr<Self::Element>>;
fn wdata(this: Expr<Self>) -> Option<Expr<Self::Element>>;
fn wmask(this: Expr<Self>) -> Option<Expr<AsMask<Self::Element>>>;
fn wmode(this: Expr<Self>) -> Expr<UInt<1>>;
fn wmode(this: Expr<Self>) -> Expr<Bool>;
}
macro_rules! impl_port_struct {
@ -138,19 +133,11 @@ macro_rules! impl_port_struct {
$($body:tt)*
}
) => {
impl<$Element: Value> sealed::Sealed for $Struct<$Element>
where
$Element::Type: Type<Value = $Element>,
{
}
impl<$Element: Type> sealed::Sealed for $Struct<$Element, DynSize> {}
impl<$Element: Value> PortType for $Struct<$Element>
where
$Element::Type: Type<Value = $Element>,
{
impl<$Element: Type> PortType for $Struct<$Element, DynSize> {
type PortKindTy = ();
type PortType = <Self as ToExpr>::Type;
type PortValue = Self;
type Port = Self;
fn port_kind(_port_kind: Self::PortKindTy) -> PortKind {
Self::PORT_KIND
@ -165,18 +152,14 @@ macro_rules! impl_port_struct {
}
}
impl<$Element: Value> PortStruct for $Struct<$Element>
where
$Element::Type: Type<Value = $Element>,
{
impl<$Element: Type> PortStruct for $Struct<$Element, DynSize> {
type Element = $Element;
type ElementType = $Element::Type;
fn addr(this: Expr<Self>) -> Expr<DynUInt> {
fn addr(this: Expr<Self>) -> Expr<UInt> {
this.addr
}
fn en(this: Expr<Self>) -> Expr<UInt<1>> {
fn en(this: Expr<Self>) -> Expr<Bool> {
this.en
}
@ -191,14 +174,9 @@ macro_rules! impl_port_struct {
impl_port_struct! {
impl<Element> _ for ReadStruct {
fn port_ty(port: &MemPort<Self>) -> <MemPort<Self> as ToExpr>::Type {
type T<V> = <V as ToExpr>::Type;
T::<Self> {
addr: port.addr_type,
en: UIntType::new(),
clk: ClockType,
data: Element::Type::from_dyn_canonical_type(port.mem_element_type),
}
fn port_ty(port: &MemPort<Self>) -> Self {
let element = Element::from_canonical(port.mem_element_type);
ReadStruct[element][port.addr_type.width()]
}
const PORT_KIND: PortKind = PortKind::ReadOnly;
@ -215,7 +193,7 @@ impl_port_struct! {
None
}
fn wmode(_this: Expr<Self>) -> Expr<UInt<1>> {
fn wmode(_this: Expr<Self>) -> Expr<Bool> {
false.to_expr()
}
}
@ -223,16 +201,9 @@ impl_port_struct! {
impl_port_struct! {
impl<Element> _ for WriteStruct {
fn port_ty(port: &MemPort<Self>) -> <MemPort<Self> as ToExpr>::Type {
type T<V> = <V as ToExpr>::Type;
let element_ty = Element::Type::from_dyn_canonical_type(port.mem_element_type);
T::<Self> {
addr: port.addr_type,
en: UIntType::new(),
clk: ClockType,
mask: element_ty.mask_type(),
data: element_ty,
}
fn port_ty(port: &MemPort<Self>) -> Self {
let element = Element::from_canonical(port.mem_element_type);
WriteStruct[element][port.addr_type.width()]
}
const PORT_KIND: PortKind = PortKind::WriteOnly;
@ -249,7 +220,7 @@ impl_port_struct! {
Some(this.mask)
}
fn wmode(_this: Expr<Self>) -> Expr<UInt<1>> {
fn wmode(_this: Expr<Self>) -> Expr<Bool> {
true.to_expr()
}
}
@ -257,18 +228,9 @@ impl_port_struct! {
impl_port_struct! {
impl<Element> _ for ReadWriteStruct {
fn port_ty(port: &MemPort<Self>) -> <MemPort<Self> as ToExpr>::Type {
type T<V> = <V as ToExpr>::Type;
let element_ty = Element::Type::from_dyn_canonical_type(port.mem_element_type);
T::<Self> {
addr: port.addr_type,
en: UIntType::new(),
clk: ClockType,
rdata: element_ty.clone(),
wmode: UIntType::new(),
wmask: element_ty.mask_type(),
wdata: element_ty,
}
fn port_ty(port: &MemPort<Self>) -> Self {
let element = Element::from_canonical(port.mem_element_type);
ReadWriteStruct[element][port.addr_type.width()]
}
const PORT_KIND: PortKind = PortKind::ReadWrite;
@ -285,7 +247,7 @@ impl_port_struct! {
Some(this.wmask)
}
fn wmode(this: Expr<Self>) -> Expr<UInt<1>> {
fn wmode(this: Expr<Self>) -> Expr<Bool> {
this.wmode
}
}
@ -383,42 +345,31 @@ pub struct MemPort<T: PortType> {
source_location: SourceLocation,
port_kind: T::PortKindTy,
port_index: usize,
addr_type: DynUIntType,
mem_element_type: Interned<dyn DynCanonicalType>,
addr_type: UInt,
mem_element_type: CanonicalType,
}
impl<T: PortType> fmt::Debug for MemPort<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Self {
mem_name,
source_location: _,
port_kind: _,
port_index: _,
addr_type,
mem_element_type,
} = self;
f.debug_struct("MemPort")
.field("mem_name", mem_name)
.field("port_name", &self.port_name())
.field("addr_type", addr_type)
.field("mem_element_type", mem_element_type)
.finish_non_exhaustive()
}
}
impl<T: PortType> ToExpr for MemPort<T> {
type Type = T::PortType;
fn ty(&self) -> Self::Type {
T::port_ty(self)
}
fn to_expr(&self) -> Expr<<Self::Type as Type>::Value> {
Expr::new_unchecked(self.expr_enum())
f.write_str("MemPort(")?;
self.mem_name.fmt(f)?;
f.write_str(".")?;
self.port_name().fmt(f)?;
f.write_str(": ")?;
match self.port_kind() {
PortKind::ReadOnly => f.write_str("ReadStruct<")?,
PortKind::WriteOnly => f.write_str("WriteStruct<")?,
PortKind::ReadWrite => f.write_str("ReadWriteStruct<")?,
}
self.mem_element_type.fmt(f)?;
f.write_str(">)")
}
}
impl<T: PortType> MemPort<T> {
pub fn ty(&self) -> T::Port {
T::port_ty(self)
}
pub fn source_location(&self) -> SourceLocation {
self.source_location
}
@ -437,10 +388,10 @@ impl<T: PortType> MemPort<T> {
index: self.port_index,
}
}
pub fn mem_element_type(&self) -> Interned<dyn DynCanonicalType> {
pub fn mem_element_type(&self) -> CanonicalType {
self.mem_element_type
}
pub fn addr_type(&self) -> DynUIntType {
pub fn addr_type(&self) -> UInt {
self.addr_type
}
pub fn canonical(&self) -> MemPort<DynPortType> {
@ -464,7 +415,6 @@ impl<T: PortType> MemPort<T> {
pub fn from_canonical(port: MemPort<DynPortType>) -> Self
where
T: PortStruct,
T::Type: BundleType<Value = T>,
{
let MemPort {
mem_name,
@ -489,11 +439,11 @@ impl<T: PortType> MemPort<T> {
mem_name: ScopedNameId,
source_location: SourceLocation,
port_name: PortName,
addr_type: DynUIntType,
mem_element_type: Interned<dyn DynCanonicalType>,
addr_type: UInt,
mem_element_type: CanonicalType,
) -> Self {
assert!(
mem_element_type.is_storable().unwrap_or(true),
mem_element_type.is_storable(),
"memory element type must be a storable type"
);
Self {
@ -520,25 +470,25 @@ pub enum ReadUnderWrite {
Undefined,
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
struct MemImpl<T: ArrayTypeTrait, P> {
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
struct MemImpl<Element: Type, Len: Size, P> {
scoped_name: ScopedNameId,
source_location: SourceLocation,
array_type: T,
array_type: ArrayType<Element, Len>,
initial_value: Option<Interned<BitSlice>>,
ports: P,
read_latency: usize,
write_latency: NonZeroU32,
write_latency: NonZeroUsize,
read_under_write: ReadUnderWrite,
port_annotations: Interned<[TargetedAnnotation]>,
mem_annotations: Interned<[Annotation]>,
}
pub struct Mem<VA: ValueArrayOrSlice + ?Sized>(
Interned<MemImpl<ArrayType<VA>, Interned<[Interned<MemPort<DynPortType>>]>>>,
pub struct Mem<Element: Type = CanonicalType, Len: Size = DynSize>(
Interned<MemImpl<Element, Len, Interned<[MemPort<DynPortType>]>>>,
);
struct PortsDebug<'a>(&'a [Interned<MemPort<DynPortType>>]);
struct PortsDebug<'a>(&'a [MemPort<DynPortType>]);
impl fmt::Debug for PortsDebug<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
@ -552,7 +502,7 @@ impl fmt::Debug for PortsDebug<'_> {
}
}
impl<VA: ValueArrayOrSlice + ?Sized> fmt::Debug for Mem<VA> {
impl<Element: Type, Len: Size> fmt::Debug for Mem<Element, Len> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let MemImpl {
scoped_name,
@ -569,7 +519,12 @@ impl<VA: ValueArrayOrSlice + ?Sized> fmt::Debug for Mem<VA> {
f.debug_struct("Mem")
.field("name", scoped_name)
.field("array_type", array_type)
.field("initial_value", initial_value)
.field(
"initial_value",
&initial_value.as_ref().map(|initial_value| {
DebugMemoryData::from_bit_slice(*array_type, initial_value)
}),
)
.field("read_latency", read_latency)
.field("write_latency", write_latency)
.field("read_under_write", read_under_write)
@ -580,54 +535,54 @@ impl<VA: ValueArrayOrSlice + ?Sized> fmt::Debug for Mem<VA> {
}
}
impl<VA: ValueArrayOrSlice + ?Sized> Hash for Mem<VA> {
impl<Element: Type, Len: Size> Hash for Mem<Element, Len> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.0.hash(state);
}
}
impl<VA: ValueArrayOrSlice + ?Sized> Eq for Mem<VA> {}
impl<Element: Type, Len: Size> Eq for Mem<Element, Len> {}
impl<VA: ValueArrayOrSlice + ?Sized> PartialEq for Mem<VA> {
impl<Element: Type, Len: Size> PartialEq for Mem<Element, Len> {
fn eq(&self, other: &Self) -> bool {
self.0 == other.0
}
}
impl<VA: ValueArrayOrSlice + ?Sized> Copy for Mem<VA> {}
impl<Element: Type, Len: Size> Copy for Mem<Element, Len> {}
impl<VA: ValueArrayOrSlice + ?Sized> Clone for Mem<VA> {
impl<Element: Type, Len: Size> Clone for Mem<Element, Len> {
fn clone(&self) -> Self {
*self
}
}
impl<VA: ValueArrayOrSlice + ?Sized> Mem<VA> {
impl<Element: Type, Len: Size> Mem<Element, Len> {
#[allow(clippy::too_many_arguments)]
#[track_caller]
pub fn new_unchecked(
scoped_name: ScopedNameId,
source_location: SourceLocation,
array_type: ArrayType<VA>,
array_type: ArrayType<Element, Len>,
initial_value: Option<Interned<BitSlice>>,
ports: Interned<[Interned<MemPort<DynPortType>>]>,
ports: Interned<[MemPort<DynPortType>]>,
read_latency: usize,
write_latency: NonZeroU32,
write_latency: NonZeroUsize,
read_under_write: ReadUnderWrite,
port_annotations: Interned<[TargetedAnnotation]>,
mem_annotations: Interned<[Annotation]>,
) -> Self {
if let Some(initial_value) = initial_value {
MemBuilder::<VA>::check_initial_value_bit_slice(
MemBuilder::<Element, Len>::check_initial_value_bit_slice(
array_type.element(),
Some(array_type.len()),
initial_value,
);
}
let addr_width = memory_addr_width(array_type.len());
let expected_mem_element_type = array_type.element().canonical_dyn();
let expected_mem_element_type = array_type.element().canonical();
assert!(
expected_mem_element_type.is_storable().unwrap_or(true),
expected_mem_element_type.is_storable(),
"memory element type must be a storable type"
);
for (index, port) in ports.iter().enumerate() {
@ -638,7 +593,7 @@ impl<VA: ValueArrayOrSlice + ?Sized> Mem<VA> {
port_index,
addr_type,
mem_element_type,
} = **port;
} = *port;
assert_eq!(mem_name, scoped_name, "memory name must match with ports");
assert_eq!(
port_index, index,
@ -660,7 +615,7 @@ impl<VA: ValueArrayOrSlice + ?Sized> Mem<VA> {
};
assert_eq!(
Some(port),
ports.get(port.port_index).map(|v| &**v),
ports.get(port.port_index),
"port on memory must match annotation's target base"
);
}
@ -683,19 +638,19 @@ impl<VA: ValueArrayOrSlice + ?Sized> Mem<VA> {
pub fn source_location(self) -> SourceLocation {
self.0.source_location
}
pub fn array_type(self) -> ArrayType<VA> {
self.0.array_type.clone()
pub fn array_type(self) -> ArrayType<Element, Len> {
self.0.array_type
}
pub fn initial_value(self) -> Option<Interned<BitSlice>> {
self.0.initial_value
}
pub fn ports(self) -> Interned<[Interned<MemPort<DynPortType>>]> {
pub fn ports(self) -> Interned<[MemPort<DynPortType>]> {
self.0.ports
}
pub fn read_latency(self) -> usize {
self.0.read_latency
}
pub fn write_latency(self) -> NonZeroU32 {
pub fn write_latency(self) -> NonZeroUsize {
self.0.write_latency
}
pub fn read_under_write(self) -> ReadUnderWrite {
@ -707,7 +662,7 @@ impl<VA: ValueArrayOrSlice + ?Sized> Mem<VA> {
pub fn mem_annotations(self) -> Interned<[Annotation]> {
self.0.mem_annotations
}
pub fn canonical(self) -> Mem<[DynCanonicalValue]> {
pub fn canonical(self) -> Mem {
let MemImpl {
scoped_name,
source_location,
@ -720,7 +675,7 @@ impl<VA: ValueArrayOrSlice + ?Sized> Mem<VA> {
port_annotations,
mem_annotations,
} = *self.0;
let array_type = array_type.canonical();
let array_type = array_type.as_dyn_array();
Mem(Intern::intern_sized(MemImpl {
scoped_name,
source_location,
@ -752,23 +707,23 @@ impl<T: fmt::Debug> fmt::Debug for MaybeSpecified<T> {
pub(crate) struct MemBuilderTarget {
pub(crate) scoped_name: ScopedNameId,
pub(crate) source_location: SourceLocation,
pub(crate) mem_element_type: Interned<dyn DynCanonicalType>,
pub(crate) mem_element_type: CanonicalType,
pub(crate) depth: Option<usize>,
pub(crate) initial_value: Option<Interned<BitSlice>>,
pub(crate) ports: Vec<Interned<MemPort<DynPortType>>>,
pub(crate) ports: Vec<MemPort<DynPortType>>,
pub(crate) read_latency: usize,
pub(crate) write_latency: NonZeroU32,
pub(crate) write_latency: NonZeroUsize,
pub(crate) read_under_write: ReadUnderWrite,
pub(crate) port_annotations: Vec<TargetedAnnotation>,
pub(crate) mem_annotations: Vec<Annotation>,
}
impl MemBuilderTarget {
pub(crate) fn make_memory(&self) -> Option<Mem<[DynCanonicalValue]>> {
pub(crate) fn make_memory(&self) -> Option<Mem> {
Some(Mem::new_unchecked(
self.scoped_name,
self.source_location,
ArrayType::new_slice(self.mem_element_type, self.depth?),
ArrayType::new_dyn(self.mem_element_type, self.depth?),
self.initial_value,
Intern::intern(&self.ports),
self.read_latency,
@ -818,16 +773,18 @@ impl fmt::Debug for MemBuilderTarget {
}
}
pub struct MemBuilder<VA: ValueArrayOrSlice + ?Sized> {
mem_element_type: VA::ElementType,
pub struct MemBuilder<Element: Type, Len: Size = DynSize> {
mem_element_type: Element,
target: Rc<RefCell<MemBuilderTarget>>,
_phantom: PhantomData<Len>,
}
impl<VA: ValueArrayOrSlice + ?Sized> fmt::Debug for MemBuilder<VA> {
impl<Element: Type, Len: Size> fmt::Debug for MemBuilder<Element, Len> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Self {
mem_element_type,
target,
_phantom: _,
} = &self;
target.borrow().debug_fmt("MemBuilder", mem_element_type, f)
}
@ -839,29 +796,28 @@ pub fn memory_addr_width(depth: usize) -> usize {
.map_or(usize::BITS, usize::ilog2) as usize
}
impl<VA: ValueArrayOrSlice + ?Sized> MemBuilder<VA> {
impl<Element: Type, Len: Size> MemBuilder<Element, Len> {
#[track_caller]
fn check_initial_value_bit_slice(
mem_element_type: &VA::ElementType,
mem_element_type: Element,
depth: Option<usize>,
initial_value: Interned<BitSlice>,
) -> Interned<BitSlice> {
let element_bit_width = mem_element_type.canonical().bit_width();
if let Some(depth) = depth {
if let Ok(mem_element_bit_width) = mem_element_type.bit_width() {
let expected_len = depth.checked_mul(mem_element_bit_width).expect(
"memory must be small enough that its initializer bit length fits in usize",
);
assert_eq!(
expected_len,
initial_value.len(),
"Mem's initializer bit length doesn't match the expected value",
);
}
let expected_len = depth.checked_mul(element_bit_width).expect(
"memory must be small enough that its initializer bit length fits in usize",
);
assert_eq!(
expected_len,
initial_value.len(),
"Mem's initializer bit length doesn't match the expected value",
);
}
assert!(
initial_value
.len()
.checked_rem(mem_element_type.bit_width().unwrap_or(1))
.checked_rem(element_bit_width)
.unwrap_or(initial_value.len())
== 0,
"Mem's initializer bit length must be a multiple of the element type's bit width",
@ -870,14 +826,14 @@ impl<VA: ValueArrayOrSlice + ?Sized> MemBuilder<VA> {
}
#[track_caller]
fn check_initial_value_expr(
mem_element_type: &VA::ElementType,
mem_element_type: &Element,
depth: Option<usize>,
initial_value: Expr<Array<[DynCanonicalValue]>>,
initial_value: Expr<Array>,
) -> Interned<BitSlice> {
let initial_value_ty = initial_value.canonical_type();
let initial_value_ty = Expr::ty(initial_value);
assert_eq!(
*mem_element_type,
<VA::ElementType as DynType>::from_dyn_canonical_type(*initial_value_ty.element()),
Element::from_canonical(initial_value_ty.element()),
"Mem's element type must match initializer's element type",
);
if let Some(depth) = depth {
@ -890,18 +846,9 @@ impl<VA: ValueArrayOrSlice + ?Sized> MemBuilder<VA> {
let Ok(retval) = initial_value.to_literal_bits() else {
panic!("Mem's initializer must be convertible to literal bits");
};
let retval = match retval {
Ok(retval) => retval,
Err(HitUndeducedType { .. }) => {
todo!(
"Mem's initializer contains undeduced types, \
you can work around this by using only hdl(static) types"
)
}
};
debug_assert_eq!(
Ok(retval.len()),
initial_value_ty.bit_width(),
retval.len(),
initial_value_ty.type_properties().bit_width,
"initial value produced wrong literal bits length"
);
retval
@ -910,22 +857,22 @@ impl<VA: ValueArrayOrSlice + ?Sized> MemBuilder<VA> {
pub(crate) fn new(
scoped_name: ScopedNameId,
source_location: SourceLocation,
mem_element_type: VA::ElementType,
mem_element_type: Element,
) -> (Self, Rc<RefCell<MemBuilderTarget>>) {
let canonical_mem_element_type = mem_element_type.canonical_dyn();
let canonical_mem_element_type = mem_element_type.canonical();
assert!(
canonical_mem_element_type.is_storable().unwrap_or(true),
canonical_mem_element_type.is_storable(),
"memory element type must be a storable type"
);
let target = Rc::new(RefCell::new(MemBuilderTarget {
scoped_name,
source_location,
mem_element_type: canonical_mem_element_type,
depth: VA::FIXED_LEN_TYPE.map(VA::len_from_len_type),
depth: Len::KNOWN_VALUE,
initial_value: None,
ports: vec![],
read_latency: 0,
write_latency: NonZeroU32::new(1).unwrap(),
write_latency: NonZeroUsize::new(1).unwrap(),
read_under_write: ReadUnderWrite::Old,
port_annotations: vec![],
mem_annotations: vec![],
@ -934,6 +881,7 @@ impl<VA: ValueArrayOrSlice + ?Sized> MemBuilder<VA> {
Self {
mem_element_type,
target: Rc::clone(&target),
_phantom: PhantomData,
},
target,
)
@ -943,19 +891,19 @@ impl<VA: ValueArrayOrSlice + ?Sized> MemBuilder<VA> {
&mut self,
source_location: SourceLocation,
port_kind: PortKind,
) -> Interned<MemPort<DynPortType>> {
) -> MemPort<DynPortType> {
let mut target = self.target.borrow_mut();
let Some(depth) = target.depth else {
panic!("MemBuilder::depth must be called before adding ports");
};
let port = Intern::intern_sized(MemPort {
let port = MemPort {
mem_name: target.scoped_name,
source_location,
port_kind,
port_index: target.ports.len(),
addr_type: DynUIntType::new(memory_addr_width(depth)),
addr_type: UInt::new(memory_addr_width(depth)),
mem_element_type: target.mem_element_type,
});
};
target.ports.push(port);
port
}
@ -964,50 +912,53 @@ impl<VA: ValueArrayOrSlice + ?Sized> MemBuilder<VA> {
&mut self,
source_location: SourceLocation,
kind: PortKind,
) -> Expr<DynBundle> {
Expr::new_unchecked(ExprEnum::MemPort(self.new_port_impl(source_location, kind)))
) -> Expr<Bundle> {
self.new_port_impl(source_location, kind).to_expr()
}
#[track_caller]
pub fn new_port(&mut self, kind: PortKind) -> Expr<DynBundle> {
pub fn new_port(&mut self, kind: PortKind) -> Expr<Bundle> {
self.new_port_with_loc(SourceLocation::caller(), kind)
}
#[track_caller]
pub fn new_read_port_with_loc(
&mut self,
source_location: SourceLocation,
) -> Expr<ReadStruct<VA::Element>> {
Expr::new_unchecked(ExprEnum::MemPort(
self.new_port_impl(source_location, PortKind::ReadOnly),
))
) -> Expr<ReadStruct<Element, DynSize>> {
Expr::from_bundle(
self.new_port_impl(source_location, PortKind::ReadOnly)
.to_expr(),
)
}
#[track_caller]
pub fn new_read_port(&mut self) -> Expr<ReadStruct<VA::Element>> {
pub fn new_read_port(&mut self) -> Expr<ReadStruct<Element, DynSize>> {
self.new_read_port_with_loc(SourceLocation::caller())
}
#[track_caller]
pub fn new_write_port_with_loc(
&mut self,
source_location: SourceLocation,
) -> Expr<WriteStruct<VA::Element>> {
Expr::new_unchecked(ExprEnum::MemPort(
self.new_port_impl(source_location, PortKind::WriteOnly),
))
) -> Expr<WriteStruct<Element, DynSize>> {
Expr::from_bundle(
self.new_port_impl(source_location, PortKind::WriteOnly)
.to_expr(),
)
}
#[track_caller]
pub fn new_write_port(&mut self) -> Expr<WriteStruct<VA::Element>> {
pub fn new_write_port(&mut self) -> Expr<WriteStruct<Element, DynSize>> {
self.new_write_port_with_loc(SourceLocation::caller())
}
#[track_caller]
pub fn new_rw_port_with_loc(
&mut self,
source_location: SourceLocation,
) -> Expr<ReadWriteStruct<VA::Element>> {
Expr::new_unchecked(ExprEnum::MemPort(
self.new_port_impl(source_location, PortKind::ReadWrite),
))
) -> Expr<ReadWriteStruct<Element, DynSize>> {
Expr::from_bundle(
self.new_port_impl(source_location, PortKind::ReadWrite)
.to_expr(),
)
}
#[track_caller]
pub fn new_rw_port(&mut self) -> Expr<ReadWriteStruct<VA::Element>> {
pub fn new_rw_port(&mut self) -> Expr<ReadWriteStruct<Element, DynSize>> {
self.new_rw_port_with_loc(SourceLocation::caller())
}
pub fn scoped_name(&self) -> ScopedNameId {
@ -1016,7 +967,7 @@ impl<VA: ValueArrayOrSlice + ?Sized> MemBuilder<VA> {
pub fn source_location(&self) -> SourceLocation {
self.target.borrow().source_location
}
pub fn get_mem_element_type(&self) -> &VA::ElementType {
pub fn get_mem_element_type(&self) -> &Element {
&self.mem_element_type
}
#[allow(clippy::result_unit_err)]
@ -1039,28 +990,28 @@ impl<VA: ValueArrayOrSlice + ?Sized> MemBuilder<VA> {
target.depth = Some(depth);
}
#[allow(clippy::result_unit_err)]
pub fn get_array_type(&self) -> Result<ArrayType<VA>, ()> {
Ok(ArrayType::new_with_len(
self.mem_element_type.clone(),
self.get_depth()?,
pub fn get_array_type(&self) -> Result<ArrayType<Element, Len>, ()> {
Ok(ArrayType::new(
self.mem_element_type,
Len::from_usize(self.get_depth()?),
))
}
pub fn get_initial_value(&self) -> Option<Interned<BitSlice>> {
self.target.borrow().initial_value
}
#[track_caller]
pub fn initial_value(&mut self, initial_value: impl ToExpr<Type = ArrayType<VA>>) {
pub fn initial_value(&mut self, initial_value: impl ToExpr<Type = ArrayType<Element, Len>>) {
let mut target = self.target.borrow_mut();
if target.initial_value.is_some() {
panic!("can't set Mem's initial value more than once");
}
let initial_value = initial_value.to_expr().canonical();
let initial_value = Expr::as_dyn_array(initial_value.to_expr());
target.initial_value = Some(Self::check_initial_value_expr(
&self.mem_element_type,
target.depth,
initial_value,
));
target.depth = Some(initial_value.ty().len());
target.depth = Some(Expr::ty(initial_value).len());
}
#[track_caller]
pub fn initial_value_bit_slice(&mut self, initial_value: Interned<BitSlice>) {
@ -1069,14 +1020,13 @@ impl<VA: ValueArrayOrSlice + ?Sized> MemBuilder<VA> {
panic!("can't set Mem's initial value more than once");
}
target.initial_value = Some(Self::check_initial_value_bit_slice(
&self.mem_element_type,
self.mem_element_type,
target.depth,
initial_value,
));
if let Ok(element_bit_width) = self.mem_element_type.bit_width() {
if element_bit_width != 0 {
target.depth = Some(initial_value.len() / element_bit_width);
}
let element_bit_width = self.mem_element_type.canonical().bit_width();
if element_bit_width != 0 {
target.depth = Some(initial_value.len() / element_bit_width);
}
}
pub fn get_read_latency(&self) -> usize {
@ -1085,10 +1035,10 @@ impl<VA: ValueArrayOrSlice + ?Sized> MemBuilder<VA> {
pub fn read_latency(&mut self, read_latency: usize) {
self.target.borrow_mut().read_latency = read_latency;
}
pub fn get_write_latency(&self) -> NonZeroU32 {
pub fn get_write_latency(&self) -> NonZeroUsize {
self.target.borrow().write_latency
}
pub fn write_latency(&mut self, write_latency: NonZeroU32) {
pub fn write_latency(&mut self, write_latency: NonZeroUsize) {
self.target.borrow_mut().write_latency = write_latency;
}
pub fn get_read_under_write(&self) -> ReadUnderWrite {
@ -1105,3 +1055,92 @@ impl<VA: ValueArrayOrSlice + ?Sized> MemBuilder<VA> {
.extend(annotations.into_annotations());
}
}
pub fn splat_mask<T: Type>(ty: T, value: Expr<Bool>) -> Expr<AsMask<T>> {
let canonical_ty = ty.canonical();
match canonical_ty {
CanonicalType::UInt(_)
| CanonicalType::SInt(_)
| CanonicalType::Bool(_)
| CanonicalType::AsyncReset(_)
| CanonicalType::SyncReset(_)
| CanonicalType::Reset(_)
| CanonicalType::Clock(_)
| CanonicalType::Enum(_)
| CanonicalType::DynSimOnly(_) => Expr::from_canonical(Expr::canonical(value)),
CanonicalType::Array(array) => Expr::from_canonical(Expr::canonical(repeat(
splat_mask(array.element(), value),
array.len(),
))),
CanonicalType::Bundle(bundle) => Expr::from_canonical(Expr::canonical(
BundleLiteral::new(
bundle.mask_type(),
bundle
.fields()
.iter()
.map(|field| splat_mask(field.ty, value))
.collect(),
)
.to_expr(),
)),
CanonicalType::PhantomConst(_) => Expr::from_canonical(Expr::canonical(().to_expr())),
}
}
pub trait DebugMemoryDataGetElement {
fn get_element(&self, element_index: usize, array_type: Array) -> &BitSlice;
}
impl<'a, F: ?Sized + Fn(usize, Array) -> &'a BitSlice> DebugMemoryDataGetElement for &'a F {
fn get_element(&self, element_index: usize, array_type: Array) -> &BitSlice {
self(element_index, array_type)
}
}
#[derive(Clone)]
pub struct DebugMemoryData<GetElement: DebugMemoryDataGetElement> {
pub array_type: Array,
pub get_element: GetElement,
}
impl DebugMemoryDataGetElement for &'_ BitSlice {
fn get_element(&self, element_index: usize, array_type: Array) -> &BitSlice {
assert!(element_index < array_type.len());
let stride = array_type.element().bit_width();
let start = element_index
.checked_mul(stride)
.expect("memory is too big");
let end = start.checked_add(stride).expect("memory is too big");
&self[start..end]
}
}
impl<'a> DebugMemoryData<&'a BitSlice> {
pub fn from_bit_slice<T: Type, Depth: Size>(
array_type: ArrayType<T, Depth>,
bit_slice: &'a BitSlice,
) -> Self {
let array_type = array_type.as_dyn_array();
assert_eq!(bit_slice.len(), array_type.type_properties().bit_width);
Self {
array_type,
get_element: bit_slice,
}
}
}
impl<GetElement: DebugMemoryDataGetElement> fmt::Debug for DebugMemoryData<GetElement> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.array_type.len() == 0 {
return f.write_str("[]");
}
writeln!(f, "[\n // len = {:#x}", self.array_type.len())?;
for element_index in 0..self.array_type.len() {
let element = crate::util::BitSliceWriteWithBase(
self.get_element.get_element(element_index, self.array_type),
);
writeln!(f, " [{element_index:#x}]: {element:#x},")?;
}
f.write_str("]")
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,5 +1,6 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
pub mod deduce_resets;
pub mod simplify_enums;
pub mod simplify_memories;
pub mod visit;

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -2,25 +2,24 @@
// See Notices.txt for copyright information
use crate::{
annotations::TargetedAnnotation,
array::{Array, ArrayType, ValueArrayOrSlice},
bundle::{BundleType, BundleValue, DynBundle},
expr::{Expr, ExprEnum, ToExpr},
int::{DynSInt, DynSIntType, DynUInt, DynUIntType},
array::Array,
bundle::{Bundle, BundleType},
expr::{CastBitsTo, CastToBits, Expr, ExprEnum, ToExpr},
int::{Bool, SInt, Size, UInt},
intern::{Intern, Interned},
memory::{Mem, MemPort, PortType},
module::{
Block, Id, Module, NameId, ScopedNameId, Stmt, StmtConnect, StmtWire,
transform::visit::{Fold, Folder},
Block, Module, NameId, NameIdGen, ScopedNameId, Stmt, StmtConnect, StmtWire,
},
source_location::SourceLocation,
ty::{DynCanonicalValue, DynType, Type, TypeEnum},
type_deduction::HitUndeducedType,
util::MakeMutSlice,
ty::{CanonicalType, Type},
util::{HashMap, MakeMutSlice},
wire::Wire,
};
use bitvec::{slice::BitSlice, vec::BitVec};
use hashbrown::HashMap;
use std::{
convert::Infallible,
fmt::Write,
ops::{Deref, DerefMut},
rc::Rc,
@ -28,26 +27,31 @@ use std::{
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
enum SingleType {
UInt(DynUIntType),
SInt(DynSIntType),
UIntArray(ArrayType<[DynUInt]>),
SIntArray(ArrayType<[DynSInt]>),
UInt(UInt),
SInt(SInt),
Bool(Bool),
UIntArray(Array<UInt>),
SIntArray(Array<SInt>),
BoolArray(Array<Bool>),
}
impl SingleType {
fn is_array_type(self, array_type: ArrayType<[DynCanonicalValue]>) -> bool {
fn is_array_type(self, array_type: Array) -> bool {
match self {
SingleType::UInt(_) | SingleType::SInt(_) => false,
SingleType::UIntArray(ty) => ty.canonical() == array_type,
SingleType::SIntArray(ty) => ty.canonical() == array_type,
SingleType::UInt(_) | SingleType::SInt(_) | SingleType::Bool(_) => false,
SingleType::UIntArray(ty) => ty.as_dyn_array() == array_type,
SingleType::SIntArray(ty) => ty.as_dyn_array() == array_type,
SingleType::BoolArray(ty) => ty.as_dyn_array() == array_type,
}
}
fn array_len(self) -> usize {
match self {
SingleType::UInt(_ty) => 1,
SingleType::SInt(_ty) => 1,
SingleType::Bool(_ty) => 1,
SingleType::UIntArray(ty) => ty.len(),
SingleType::SIntArray(ty) => ty.len(),
SingleType::BoolArray(ty) => ty.len(),
}
}
}
@ -57,8 +61,9 @@ enum MemSplit {
Bundle {
fields: Rc<[MemSplit]>,
},
PhantomConst,
Single {
output_mem: Option<Mem<[DynCanonicalValue]>>,
output_mem: Option<Mem>,
element_type: SingleType,
unchanged_element_type: bool,
},
@ -71,6 +76,7 @@ impl MemSplit {
fn mark_changed_element_type(self) -> Self {
match self {
MemSplit::Bundle { fields: _ } => self,
MemSplit::PhantomConst => self,
MemSplit::Single {
output_mem,
element_type,
@ -83,17 +89,18 @@ impl MemSplit {
MemSplit::Array { elements: _ } => self,
}
}
fn new(element_type: TypeEnum) -> Result<Self, HitUndeducedType> {
Ok(match element_type {
TypeEnum::BundleType(bundle_ty) => MemSplit::Bundle {
fn new(element_type: CanonicalType) -> Self {
match element_type {
CanonicalType::Bundle(bundle_ty) => MemSplit::Bundle {
fields: bundle_ty
.fields()
.into_iter()
.map(|field| Ok(Self::new(field.ty.type_enum())?.mark_changed_element_type()))
.collect::<Result<_, _>>()?,
.map(|field| Self::new(field.ty).mark_changed_element_type())
.collect(),
},
TypeEnum::ArrayType(ty) => {
let element = MemSplit::new(ty.element().type_enum())?;
CanonicalType::PhantomConst(_) => MemSplit::PhantomConst,
CanonicalType::Array(ty) => {
let element = MemSplit::new(ty.element());
if let Self::Single {
output_mem: _,
element_type,
@ -103,7 +110,7 @@ impl MemSplit {
match element_type {
SingleType::UInt(element_type) => Self::Single {
output_mem: None,
element_type: SingleType::UIntArray(ArrayType::new_slice(
element_type: SingleType::UIntArray(Array::new_dyn(
element_type,
ty.len(),
)),
@ -111,7 +118,15 @@ impl MemSplit {
},
SingleType::SInt(element_type) => Self::Single {
output_mem: None,
element_type: SingleType::SIntArray(ArrayType::new_slice(
element_type: SingleType::SIntArray(Array::new_dyn(
element_type,
ty.len(),
)),
unchanged_element_type,
},
SingleType::Bool(element_type) => Self::Single {
output_mem: None,
element_type: SingleType::BoolArray(Array::new_dyn(
element_type,
ty.len(),
)),
@ -119,8 +134,8 @@ impl MemSplit {
},
SingleType::UIntArray(element_type) => Self::Single {
output_mem: None,
element_type: SingleType::UIntArray(ArrayType::new_slice(
*element_type.element(),
element_type: SingleType::UIntArray(Array::new_dyn(
element_type.element(),
ty.len()
.checked_mul(element_type.len())
.expect("memory element type can't be too big"),
@ -129,8 +144,18 @@ impl MemSplit {
},
SingleType::SIntArray(element_type) => Self::Single {
output_mem: None,
element_type: SingleType::SIntArray(ArrayType::new_slice(
*element_type.element(),
element_type: SingleType::SIntArray(Array::new_dyn(
element_type.element(),
ty.len()
.checked_mul(element_type.len())
.expect("memory element type can't be too big"),
)),
unchanged_element_type: false,
},
SingleType::BoolArray(element_type) => Self::Single {
output_mem: None,
element_type: SingleType::BoolArray(Array::new_dyn(
element_type.element(),
ty.len()
.checked_mul(element_type.len())
.expect("memory element type can't be too big"),
@ -145,27 +170,32 @@ impl MemSplit {
}
}
}
TypeEnum::UInt(ty) => Self::Single {
CanonicalType::UInt(ty) => Self::Single {
output_mem: None,
element_type: SingleType::UInt(ty),
unchanged_element_type: true,
},
TypeEnum::SInt(ty) => Self::Single {
CanonicalType::SInt(ty) => Self::Single {
output_mem: None,
element_type: SingleType::SInt(ty),
unchanged_element_type: true,
},
TypeEnum::EnumType(ty) => Self::Single {
CanonicalType::Bool(ty) => Self::Single {
output_mem: None,
element_type: SingleType::UInt(DynUIntType::new(ty.bit_width()?)),
element_type: SingleType::Bool(ty),
unchanged_element_type: true,
},
CanonicalType::Enum(ty) => Self::Single {
output_mem: None,
element_type: SingleType::UInt(UInt::new_dyn(ty.type_properties().bit_width)),
unchanged_element_type: false,
},
TypeEnum::Clock(_)
| TypeEnum::AsyncReset(_)
| TypeEnum::SyncReset(_)
| TypeEnum::Reset(_) => unreachable!("memory element type is a storable type"),
TypeEnum::Deduce(_) => return Err(HitUndeducedType),
})
CanonicalType::Clock(_)
| CanonicalType::AsyncReset(_)
| CanonicalType::SyncReset(_)
| CanonicalType::Reset(_) => unreachable!("memory element type is a storable type"),
CanonicalType::DynSimOnly(_) => todo!("memory containing sim-only values"),
}
}
}
@ -174,9 +204,9 @@ struct MemState {
}
struct SplitState<'a> {
wire_rdata: Box<[Option<Expr<DynCanonicalValue>>]>,
wire_wdata: Box<[Option<Expr<DynCanonicalValue>>]>,
wire_wmask: Box<[Option<Expr<DynCanonicalValue>>]>,
wire_rdata: Box<[Option<Expr<CanonicalType>>]>,
wire_wdata: Box<[Option<Expr<CanonicalType>>]>,
wire_wmask: Box<[Option<Expr<CanonicalType>>]>,
initial_value: Option<Box<[&'a BitSlice]>>,
}
@ -224,7 +254,7 @@ impl<'a> SplitStateStack<'a> {
}
fn push_map(
&mut self,
mut wire_map: impl FnMut(Expr<DynCanonicalValue>) -> Expr<DynCanonicalValue>,
mut wire_map: impl FnMut(Expr<CanonicalType>) -> Expr<CanonicalType>,
mut initial_value_element_map: impl FnMut(&BitSlice) -> &BitSlice,
) {
let top_index = self.top_index + 1;
@ -261,10 +291,10 @@ impl<'a> SplitStateStack<'a> {
struct SplitMemState<'a, 'b> {
module_state: &'a mut ModuleState,
input_mem: Mem<[DynCanonicalValue]>,
output_mems: &'a mut Vec<Mem<[DynCanonicalValue]>>,
input_mem: Mem,
output_mems: &'a mut Vec<Mem>,
output_stmts: &'a mut Vec<Stmt>,
element_type: TypeEnum,
element_type: CanonicalType,
split: &'a mut MemSplit,
mem_name_path: &'a mut String,
split_state_stack: &'a mut SplitStateStack<'b>,
@ -272,11 +302,11 @@ struct SplitMemState<'a, 'b> {
}
impl SplitMemState<'_, '_> {
fn split_mem(self) -> Result<(), HitUndeducedType> {
fn split_mem(self) {
let outer_mem_name_path_len = self.mem_name_path.len();
match self.split {
MemSplit::Bundle { fields } => {
let TypeEnum::BundleType(bundle_type) = self.element_type else {
let CanonicalType::Bundle(bundle_type) = self.element_type else {
unreachable!();
};
for ((field, field_offset), split) in bundle_type
@ -288,10 +318,15 @@ impl SplitMemState<'_, '_> {
self.mem_name_path.truncate(outer_mem_name_path_len);
self.mem_name_path.push('_');
self.mem_name_path.push_str(&field.name);
let field_ty_bit_width = field.ty.bit_width()?;
let field_ty_bit_width = field.ty.bit_width();
self.split_state_stack.push_map(
|e: Expr<DynCanonicalValue>| e.with_type::<DynBundle>().field(&field.name),
|e: Expr<CanonicalType>| {
Expr::field(Expr::<Bundle>::from_canonical(e), &field.name)
},
|initial_value_element| {
let Some(field_offset) = field_offset.only_bit_width() else {
todo!("memory containing sim-only values");
};
&initial_value_element[field_offset..][..field_ty_bit_width]
},
);
@ -300,16 +335,17 @@ impl SplitMemState<'_, '_> {
input_mem: self.input_mem,
output_mems: self.output_mems,
output_stmts: self.output_stmts,
element_type: field.ty.type_enum(),
element_type: field.ty,
split,
mem_name_path: self.mem_name_path,
split_state_stack: self.split_state_stack,
mem_state: self.mem_state,
}
.split_mem()?;
.split_mem();
self.split_state_stack.pop();
}
}
MemSplit::PhantomConst => {}
MemSplit::Single {
output_mem,
element_type: single_type,
@ -322,14 +358,14 @@ impl SplitMemState<'_, '_> {
*single_type,
self.mem_name_path,
self.split_state_stack.top(),
)?;
);
for (port, wire) in new_mem
.ports()
.into_iter()
.zip(self.mem_state.replacement_ports.iter())
{
let port_expr = port.to_expr();
let wire_expr = Expr::<DynBundle>::new_unchecked(*wire);
let wire_expr = Expr::<Bundle>::from_canonical(wire.to_expr());
for name in [
Some("addr"),
Some("clk"),
@ -341,8 +377,8 @@ impl SplitMemState<'_, '_> {
};
self.output_stmts.push(
StmtConnect {
lhs: port_expr.field(name),
rhs: wire_expr.field(name),
lhs: Expr::field(port_expr, name),
rhs: Expr::field(wire_expr, name),
source_location: port.source_location(),
}
.into(),
@ -353,18 +389,16 @@ impl SplitMemState<'_, '_> {
self.output_mems.push(new_mem);
}
MemSplit::Array { elements } => {
let TypeEnum::ArrayType(array_type) = self.element_type else {
let CanonicalType::Array(array_type) = self.element_type else {
unreachable!();
};
let element_type = array_type.element().type_enum();
let element_bit_width = array_type.element().bit_width()?;
let element_type = array_type.element();
let element_bit_width = element_type.bit_width();
for (index, split) in elements.make_mut_slice().iter_mut().enumerate() {
self.mem_name_path.truncate(outer_mem_name_path_len);
write!(self.mem_name_path, "_{index}").unwrap();
self.split_state_stack.push_map(
|e: Expr<DynCanonicalValue>| {
e.with_type::<Array<[DynCanonicalValue]>>()[index]
},
|e| Expr::<Array>::from_canonical(e)[index],
|initial_value_element| {
&initial_value_element[index * element_bit_width..][..element_bit_width]
},
@ -380,18 +414,16 @@ impl SplitMemState<'_, '_> {
split_state_stack: self.split_state_stack,
mem_state: self.mem_state,
}
.split_mem()?;
.split_mem();
self.split_state_stack.pop();
}
}
}
Ok(())
}
}
struct ModuleState {
output_module: Option<Interned<Module<DynBundle>>>,
name_id_gen: NameIdGen,
output_module: Option<Interned<Module<Bundle>>>,
memories: HashMap<ScopedNameId, MemState>,
}
@ -399,18 +431,18 @@ impl ModuleState {
#[allow(clippy::too_many_arguments)]
fn connect_split_mem_port_arrays(
output_stmts: &mut Vec<Stmt>,
input_array_types: &[ArrayType<[DynCanonicalValue]>],
input_array_types: &[Array],
memory_element_array_range_start: usize,
memory_element_array_range_len: usize,
wire_rdata: Option<Expr<DynCanonicalValue>>,
wire_wdata: Option<Expr<DynCanonicalValue>>,
wire_wmask: Option<Expr<DynCanonicalValue>>,
port_rdata: Option<Expr<Array<[DynCanonicalValue]>>>,
port_wdata: Option<Expr<Array<[DynCanonicalValue]>>>,
port_wmask: Option<Expr<Array<[DynCanonicalValue]>>>,
connect_rdata: impl Copy + Fn(&mut Vec<Stmt>, Expr<DynCanonicalValue>, Expr<DynCanonicalValue>),
connect_wdata: impl Copy + Fn(&mut Vec<Stmt>, Expr<DynCanonicalValue>, Expr<DynCanonicalValue>),
connect_wmask: impl Copy + Fn(&mut Vec<Stmt>, Expr<DynCanonicalValue>, Expr<DynCanonicalValue>),
wire_rdata: Option<Expr<CanonicalType>>,
wire_wdata: Option<Expr<CanonicalType>>,
wire_wmask: Option<Expr<CanonicalType>>,
port_rdata: Option<Expr<Array>>,
port_wdata: Option<Expr<Array>>,
port_wmask: Option<Expr<Array>>,
connect_rdata: impl Copy + Fn(&mut Vec<Stmt>, Expr<CanonicalType>, Expr<CanonicalType>),
connect_wdata: impl Copy + Fn(&mut Vec<Stmt>, Expr<CanonicalType>, Expr<CanonicalType>),
connect_wmask: impl Copy + Fn(&mut Vec<Stmt>, Expr<CanonicalType>, Expr<CanonicalType>),
) {
let Some((input_array_type, input_array_types_rest)) = input_array_types.split_first()
else {
@ -432,8 +464,7 @@ impl ModuleState {
assert_eq!(memory_element_array_range_len % input_array_type.len(), 0);
let chunk_size = memory_element_array_range_len / input_array_type.len();
for index in 0..input_array_type.len() {
let map =
|e: Expr<DynCanonicalValue>| e.with_type::<Array<[DynCanonicalValue]>>()[index];
let map = |e| Expr::<Array>::from_canonical(e)[index];
let wire_rdata = wire_rdata.map(map);
let wire_wdata = wire_wdata.map(map);
let wire_wmask = wire_wmask.map(map);
@ -458,20 +489,20 @@ impl ModuleState {
fn connect_split_mem_port(
&mut self,
output_stmts: &mut Vec<Stmt>,
mut input_element_type: TypeEnum,
mut input_element_type: CanonicalType,
single_type: SingleType,
source_location: SourceLocation,
wire_rdata: Option<Expr<DynCanonicalValue>>,
wire_wdata: Option<Expr<DynCanonicalValue>>,
wire_wmask: Option<Expr<DynCanonicalValue>>,
port_rdata: Option<Expr<DynCanonicalValue>>,
port_wdata: Option<Expr<DynCanonicalValue>>,
port_wmask: Option<Expr<DynCanonicalValue>>,
) -> Result<(), HitUndeducedType> {
wire_rdata: Option<Expr<CanonicalType>>,
wire_wdata: Option<Expr<CanonicalType>>,
wire_wmask: Option<Expr<CanonicalType>>,
port_rdata: Option<Expr<CanonicalType>>,
port_wdata: Option<Expr<CanonicalType>>,
port_wmask: Option<Expr<CanonicalType>>,
) {
let mut input_array_types = vec![];
let connect_read = |output_stmts: &mut Vec<Stmt>,
wire_read: Expr<DynCanonicalValue>,
port_read: Expr<DynCanonicalValue>| {
wire_read: Expr<CanonicalType>,
port_read: Expr<CanonicalType>| {
output_stmts.push(
StmtConnect {
lhs: wire_read,
@ -482,8 +513,8 @@ impl ModuleState {
);
};
let connect_write = |output_stmts: &mut Vec<Stmt>,
wire_write: Expr<DynCanonicalValue>,
port_write: Expr<DynCanonicalValue>| {
wire_write: Expr<CanonicalType>,
port_write: Expr<CanonicalType>| {
output_stmts.push(
StmtConnect {
lhs: port_write,
@ -493,32 +524,34 @@ impl ModuleState {
.into(),
);
};
let connect_read_enum =
|output_stmts: &mut Vec<Stmt>,
wire_read: Expr<DynCanonicalValue>,
port_read: Expr<DynCanonicalValue>| {
connect_read(
output_stmts,
wire_read,
port_read
.with_type::<DynUInt>()
.cast_bits_to(wire_read.ty()),
);
};
let connect_read_enum = |output_stmts: &mut Vec<Stmt>,
wire_read: Expr<CanonicalType>,
port_read: Expr<CanonicalType>| {
connect_read(
output_stmts,
wire_read,
Expr::<UInt>::from_canonical(port_read).cast_bits_to(Expr::ty(wire_read)),
);
};
let connect_write_enum =
|output_stmts: &mut Vec<Stmt>,
wire_write: Expr<DynCanonicalValue>,
port_write: Expr<DynCanonicalValue>| {
wire_write: Expr<CanonicalType>,
port_write: Expr<CanonicalType>| {
connect_write(
output_stmts,
wire_write.cast_to_bits().to_canonical_dyn(),
Expr::canonical(wire_write.cast_to_bits()),
port_write,
);
};
loop {
match input_element_type {
TypeEnum::BundleType(_) => unreachable!("bundle types are always split"),
TypeEnum::EnumType(_)
CanonicalType::Bundle(_) => {
unreachable!("bundle types are always split")
}
CanonicalType::PhantomConst(_) => {
unreachable!("PhantomConst are always removed")
}
CanonicalType::Enum(_)
if input_array_types
.first()
.map(|&v| single_type.is_array_type(v))
@ -534,7 +567,7 @@ impl ModuleState {
connect_write(output_stmts, wire_wmask, port_wmask);
}
}
TypeEnum::EnumType(_) => Self::connect_split_mem_port_arrays(
CanonicalType::Enum(_) => Self::connect_split_mem_port_arrays(
output_stmts,
&input_array_types,
0,
@ -542,25 +575,19 @@ impl ModuleState {
wire_rdata,
wire_wdata,
wire_wmask,
port_rdata.map(|e: Expr<DynCanonicalValue>| {
e.with_type::<Array<[DynCanonicalValue]>>()
}),
port_wdata.map(|e: Expr<DynCanonicalValue>| {
e.with_type::<Array<[DynCanonicalValue]>>()
}),
port_wmask.map(|e: Expr<DynCanonicalValue>| {
e.with_type::<Array<[DynCanonicalValue]>>()
}),
port_rdata.map(Expr::from_canonical),
port_wdata.map(Expr::from_canonical),
port_wmask.map(Expr::from_canonical),
connect_read_enum,
connect_write_enum,
connect_write_enum,
connect_write,
),
TypeEnum::ArrayType(array_type) => {
CanonicalType::Array(array_type) => {
input_array_types.push(array_type);
input_element_type = array_type.element().type_enum();
input_element_type = array_type.element();
continue;
}
TypeEnum::UInt(_) | TypeEnum::SInt(_)
CanonicalType::UInt(_) | CanonicalType::SInt(_) | CanonicalType::Bool(_)
if input_array_types
.first()
.map(|&v| single_type.is_array_type(v))
@ -576,62 +603,57 @@ impl ModuleState {
connect_write(output_stmts, wire_wmask, port_wmask);
}
}
TypeEnum::UInt(_) | TypeEnum::SInt(_) => Self::connect_split_mem_port_arrays(
output_stmts,
&input_array_types,
0,
single_type.array_len(),
wire_rdata,
wire_wdata,
wire_wmask,
port_rdata.map(|e: Expr<DynCanonicalValue>| {
e.with_type::<Array<[DynCanonicalValue]>>()
}),
port_wdata.map(|e: Expr<DynCanonicalValue>| {
e.with_type::<Array<[DynCanonicalValue]>>()
}),
port_wmask.map(|e: Expr<DynCanonicalValue>| {
e.with_type::<Array<[DynCanonicalValue]>>()
}),
connect_read,
connect_write,
connect_write,
),
TypeEnum::Clock(_)
| TypeEnum::AsyncReset(_)
| TypeEnum::SyncReset(_)
| TypeEnum::Reset(_) => unreachable!("memory element type is a storable type"),
TypeEnum::Deduce(_) => return Err(HitUndeducedType),
CanonicalType::UInt(_) | CanonicalType::SInt(_) | CanonicalType::Bool(_) => {
Self::connect_split_mem_port_arrays(
output_stmts,
&input_array_types,
0,
single_type.array_len(),
wire_rdata,
wire_wdata,
wire_wmask,
port_rdata.map(Expr::from_canonical),
port_wdata.map(Expr::from_canonical),
port_wmask.map(Expr::from_canonical),
connect_read,
connect_write,
connect_write,
)
}
CanonicalType::Clock(_)
| CanonicalType::AsyncReset(_)
| CanonicalType::SyncReset(_)
| CanonicalType::Reset(_) => unreachable!("memory element type is a storable type"),
CanonicalType::DynSimOnly(_) => todo!("memory containing sim-only values"),
}
break;
}
Ok(())
}
fn create_split_mem(
&mut self,
input_mem: Mem<[DynCanonicalValue]>,
input_mem: Mem,
output_stmts: &mut Vec<Stmt>,
input_element_type: TypeEnum,
input_element_type: CanonicalType,
single_type: SingleType,
mem_name_path: &str,
split_state: &SplitState<'_>,
) -> Result<Mem<[DynCanonicalValue]>, HitUndeducedType> {
let mem_name = self.name_id_gen.gen(Intern::intern_owned(format!(
"{}{mem_name_path}",
input_mem.scoped_name().1 .0
)));
) -> Mem {
let mem_name = NameId(
Intern::intern_owned(format!("{}{mem_name_path}", input_mem.scoped_name().1.0)),
Id::new(),
);
let mem_name = ScopedNameId(input_mem.scoped_name().0, mem_name);
let output_element_type = match single_type {
SingleType::UInt(ty) => ty.canonical_dyn(),
SingleType::SInt(ty) => ty.canonical_dyn(),
SingleType::UIntArray(ty) => ty.canonical_dyn(),
SingleType::SIntArray(ty) => ty.canonical_dyn(),
SingleType::UInt(ty) => ty.canonical(),
SingleType::SInt(ty) => ty.canonical(),
SingleType::Bool(ty) => ty.canonical(),
SingleType::UIntArray(ty) => ty.canonical(),
SingleType::SIntArray(ty) => ty.canonical(),
SingleType::BoolArray(ty) => ty.canonical(),
};
let output_array_type =
ArrayType::new_slice(output_element_type, input_mem.array_type().len());
let output_array_bit_width = output_array_type.bit_width()?;
let output_array_type = Array::new_dyn(output_element_type, input_mem.array_type().len());
let initial_value = split_state.initial_value.as_ref().map(|initial_value| {
let mut bits = BitVec::with_capacity(output_array_bit_width);
let mut bits = BitVec::with_capacity(output_array_type.type_properties().bit_width);
for element in initial_value.iter() {
bits.extend_from_bitslice(element);
}
@ -648,7 +670,6 @@ impl ModuleState {
port.addr_type(),
output_element_type,
)
.intern_sized()
})
.collect();
let output_mem = Mem::new_unchecked(
@ -681,15 +702,15 @@ impl ModuleState {
let port_rdata = port
.port_kind()
.rdata_name()
.map(|name| port_expr.field(name));
.map(|name| Expr::field(port_expr, name));
let port_wdata = port
.port_kind()
.wdata_name()
.map(|name| port_expr.field(name));
.map(|name| Expr::field(port_expr, name));
let port_wmask = port
.port_kind()
.wmask_name()
.map(|name| port_expr.field(name));
.map(|name| Expr::field(port_expr, name));
self.connect_split_mem_port(
output_stmts,
input_element_type,
@ -701,18 +722,18 @@ impl ModuleState {
port_rdata,
port_wdata,
port_wmask,
)?;
);
}
Ok(output_mem)
output_mem
}
fn process_mem(
&mut self,
input_mem: Mem<[DynCanonicalValue]>,
output_mems: &mut Vec<Mem<[DynCanonicalValue]>>,
input_mem: Mem,
output_mems: &mut Vec<Mem>,
output_stmts: &mut Vec<Stmt>,
) -> Result<(), HitUndeducedType> {
let element_type = input_mem.array_type().element().type_enum();
let mut split = MemSplit::new(element_type)?;
) {
let element_type = input_mem.array_type().element();
let mut split = MemSplit::new(element_type);
let mem_state = match split {
MemSplit::Single {
ref mut output_mem,
@ -735,7 +756,8 @@ impl ModuleState {
..
}
| MemSplit::Bundle { .. }
| MemSplit::Array { .. } => {
| MemSplit::Array { .. }
| MemSplit::PhantomConst => {
let mut replacement_ports = Vec::with_capacity(input_mem.ports().len());
let mut wire_port_rdata = Vec::with_capacity(input_mem.ports().len());
let mut wire_port_wdata = Vec::with_capacity(input_mem.ports().len());
@ -744,38 +766,39 @@ impl ModuleState {
let port_ty = port.ty();
let NameId(mem_name, _) = input_mem.scoped_name().1;
let port_name = port.port_name();
let wire_name = self
.name_id_gen
.gen(Intern::intern_owned(format!("{mem_name}_{port_name}")));
let wire_name = NameId(
Intern::intern_owned(format!("{mem_name}_{port_name}")),
Id::new(),
);
let wire = Wire::new_unchecked(
ScopedNameId(input_mem.scoped_name().0, wire_name),
port.source_location(),
port_ty,
);
let wire_expr = wire.to_expr();
let canonical_wire = wire.to_dyn_canonical_wire();
let canonical_wire = wire.canonical();
output_stmts.push(
StmtWire {
annotations: Default::default(),
wire: canonical_wire.clone(),
wire: canonical_wire,
}
.into(),
);
replacement_ports.push(ExprEnum::Wire(canonical_wire.intern_sized()));
replacement_ports.push(ExprEnum::Wire(canonical_wire));
wire_port_rdata.push(
port.port_kind()
.rdata_name()
.map(|name| wire_expr.field(name)),
.map(|name| Expr::field(wire_expr, name)),
);
wire_port_wdata.push(
port.port_kind()
.wdata_name()
.map(|name| wire_expr.field(name)),
.map(|name| Expr::field(wire_expr, name)),
);
wire_port_wmask.push(
port.port_kind()
.wmask_name()
.map(|name| wire_expr.field(name)),
.map(|name| Expr::field(wire_expr, name)),
);
}
let mem_state = MemState {
@ -810,19 +833,18 @@ impl ModuleState {
),
mem_state: &mem_state,
}
.split_mem()?;
.split_mem();
mem_state
}
};
self.memories.insert(input_mem.scoped_name(), mem_state);
Ok(())
}
}
#[derive(Default)]
struct State {
modules: HashMap<Interned<Module<DynBundle>>, ModuleState>,
current_module: Option<Interned<Module<DynBundle>>>,
modules: HashMap<Interned<Module<Bundle>>, ModuleState>,
current_module: Option<Interned<Module<Bundle>>>,
}
impl State {
@ -834,11 +856,11 @@ impl State {
struct PushedState<'a> {
state: &'a mut State,
old_module: Option<Interned<Module<DynBundle>>>,
old_module: Option<Interned<Module<Bundle>>>,
}
impl<'a> PushedState<'a> {
fn push_module(state: &'a mut State, module: Interned<Module<DynBundle>>) -> Self {
fn push_module(state: &'a mut State, module: Interned<Module<Bundle>>) -> Self {
let old_module = state.current_module.replace(module);
Self { state, old_module }
}
@ -864,12 +886,9 @@ impl DerefMut for PushedState<'_> {
}
impl Folder for State {
type Error = HitUndeducedType;
type Error = Infallible;
fn fold_module<T: BundleValue>(&mut self, v: Module<T>) -> Result<Module<T>, Self::Error>
where
T::Type: BundleType<Value = T>,
{
fn fold_module<T: BundleType>(&mut self, v: Module<T>) -> Result<Module<T>, Self::Error> {
let module: Interned<_> = v.canonical().intern_sized();
if let Some(module_state) = self.modules.get(&module) {
return Ok(Module::from_canonical(
@ -882,8 +901,7 @@ impl Folder for State {
module,
ModuleState {
output_module: None,
name_id_gen: NameIdGen::for_module(*module),
memories: HashMap::new(),
memories: HashMap::default(),
},
);
let mut this = PushedState::push_module(self, module);
@ -892,10 +910,10 @@ impl Folder for State {
Ok(Module::from_canonical(*module))
}
fn fold_mem<VA: ValueArrayOrSlice + ?Sized>(
fn fold_mem<Element: Type, Len: Size>(
&mut self,
_v: Mem<VA>,
) -> Result<Mem<VA>, Self::Error> {
_v: Mem<Element, Len>,
) -> Result<Mem<Element, Len>, Self::Error> {
unreachable!()
}
@ -908,11 +926,13 @@ impl Folder for State {
let mut output_stmts = vec![];
let module_state = self.module_state();
for input_mem in input_mems {
module_state.process_mem(input_mem, &mut output_mems, &mut output_stmts)?;
}
for stmt in input_stmts {
output_stmts.push(stmt.fold(self)?);
module_state.process_mem(input_mem, &mut output_mems, &mut output_stmts);
}
output_stmts.extend(
input_stmts
.into_iter()
.map(|stmt| stmt.fold(self).unwrap_or_else(|v| match v {})),
);
Ok(Block {
memories: Intern::intern_owned(output_mems),
stmts: Intern::intern_owned(output_stmts),
@ -937,8 +957,8 @@ impl Folder for State {
}
}
pub fn simplify_memories(
module: Interned<Module<DynBundle>>,
) -> Result<Interned<Module<DynBundle>>, HitUndeducedType> {
module.fold(&mut State::default())
pub fn simplify_memories(module: Interned<Module<Bundle>>) -> Interned<Module<Bundle>> {
module
.fold(&mut State::default())
.unwrap_or_else(|v| match v {})
}

View file

@ -2,33 +2,40 @@
// See Notices.txt for copyright information
#![allow(clippy::multiple_bound_locations)]
use crate::{
annotations::{Annotation, CustomFirrtlAnnotation, TargetedAnnotation},
array::{Array, ArrayType, ArrayTypeTrait, ValueArrayOrSlice},
bundle::{BundleType, BundleValue, DynBundle, DynBundleType, FieldType},
clock::{Clock, ClockType},
enum_::{DynEnum, DynEnumType, EnumType, EnumValue, VariantType},
annotations::{
Annotation, BlackBoxInlineAnnotation, BlackBoxPathAnnotation, CustomFirrtlAnnotation,
DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation, TargetedAnnotation,
},
array::ArrayType,
bundle::{Bundle, BundleField, BundleType},
clock::Clock,
enum_::{Enum, EnumType, EnumVariant},
expr::{
ops, Expr, ExprEnum, Literal, Target, TargetBase, TargetChild, TargetPathArrayElement,
TargetPathBundleField, TargetPathDynArrayElement, TargetPathElement, ToExpr,
},
int::{
DynInt, DynIntType, DynSInt, DynSIntType, DynUInt, DynUIntType, IntType, IntTypeTrait,
StaticOrDynIntType,
Expr, ExprEnum, ops,
target::{
Target, TargetBase, TargetChild, TargetPathArrayElement, TargetPathBundleField,
TargetPathDynArrayElement, TargetPathElement,
},
},
formal::FormalKind,
int::{Bool, SIntType, SIntValue, Size, UIntType, UIntValue},
intern::{Intern, Interned},
memory::{Mem, MemPort, PortKind, PortName, PortType, ReadUnderWrite},
module::{
AnnotatedModuleIO, Block, BlockId, ExternModuleBody, ExternModuleParameter,
ExternModuleParameterValue, Instance, Module, ModuleBody, ModuleIO, NameId,
NormalModuleBody, ScopedNameId, Stmt, StmtConnect, StmtDeclaration, StmtIf, StmtInstance,
StmtMatch, StmtReg, StmtWire,
NormalModuleBody, ScopedNameId, Stmt, StmtConnect, StmtDeclaration, StmtFormal, StmtIf,
StmtInstance, StmtMatch, StmtReg, StmtWire,
},
phantom_const::PhantomConst,
reg::Reg,
reset::{AsyncReset, AsyncResetType, Reset, ResetType, SyncReset, SyncResetType},
reset::{AsyncReset, Reset, ResetType, SyncReset},
sim::{ExternModuleSimulation, value::DynSimOnly},
source_location::SourceLocation,
ty::{DynCanonicalType, DynCanonicalValue, DynType, Type, TypeEnum, Value, ValueEnum},
type_deduction::UndeducedType,
util::{ConstBool, GenericConstBool},
ty::{CanonicalType, Type},
vendor::xilinx::{
XdcCreateClockAnnotation, XdcIOStandardAnnotation, XdcLocationAnnotation, XilinxAnnotation,
},
wire::Wire,
};
use num_bigint::{BigInt, BigUint};
@ -474,7 +481,4 @@ impl<T: ?Sized + Visit<State>, State: ?Sized + Visitor> Visit<State> for &'_ mut
}
}
type InternedDynType = Interned<dyn DynType>;
type InternedDynCanonicalType = Interned<dyn DynCanonicalType>;
include!(concat!(env!("OUT_DIR"), "/visit.rs"));

View file

@ -0,0 +1,417 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
expr::{
Expr, ToExpr,
ops::{ExprPartialEq, ExprPartialOrd},
},
int::Bool,
intern::{Intern, Interned, InternedCompare, LazyInterned, LazyInternedTrait, Memoize},
sim::value::{SimValue, SimValuePartialEq, ToSimValue, ToSimValueWithType},
source_location::SourceLocation,
ty::{
CanonicalType, OpaqueSimValueSlice, OpaqueSimValueWriter, OpaqueSimValueWritten,
StaticType, Type, TypeProperties, impl_match_variant_as_self,
serde_impls::{SerdeCanonicalType, SerdePhantomConst},
},
};
use serde::{
Deserialize, Deserializer, Serialize, Serializer,
de::{DeserializeOwned, Error},
};
use std::{
any::Any,
fmt,
hash::{Hash, Hasher},
marker::PhantomData,
ops::Index,
};
#[derive(Clone)]
pub struct PhantomConstCanonicalValue {
parsed: serde_json::Value,
serialized: Interned<str>,
}
impl PhantomConstCanonicalValue {
pub fn from_json_value(parsed: serde_json::Value) -> Self {
let serialized = Intern::intern_owned(
serde_json::to_string(&parsed)
.expect("conversion from json value to text shouldn't fail"),
);
Self { parsed, serialized }
}
pub fn as_json_value(&self) -> &serde_json::Value {
&self.parsed
}
pub fn as_str(&self) -> Interned<str> {
self.serialized
}
}
impl fmt::Debug for PhantomConstCanonicalValue {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(&self.serialized)
}
}
impl fmt::Display for PhantomConstCanonicalValue {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(&self.serialized)
}
}
impl PartialEq for PhantomConstCanonicalValue {
fn eq(&self, other: &Self) -> bool {
self.serialized == other.serialized
}
}
impl Eq for PhantomConstCanonicalValue {}
impl Hash for PhantomConstCanonicalValue {
fn hash<H: Hasher>(&self, state: &mut H) {
self.serialized.hash(state);
}
}
impl Serialize for PhantomConstCanonicalValue {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
self.parsed.serialize(serializer)
}
}
impl<'de> Deserialize<'de> for PhantomConstCanonicalValue {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
Ok(Self::from_json_value(serde_json::Value::deserialize(
deserializer,
)?))
}
}
pub trait PhantomConstValue: Intern + InternedCompare + Serialize + fmt::Debug {
fn deserialize_value<'de, D>(deserializer: D) -> Result<Interned<Self>, D::Error>
where
D: serde::Deserializer<'de>;
}
impl<T> PhantomConstValue for T
where
T: ?Sized + Intern + InternedCompare + Serialize + fmt::Debug,
Interned<T>: DeserializeOwned,
{
fn deserialize_value<'de, D>(deserializer: D) -> Result<Interned<Self>, D::Error>
where
D: serde::Deserializer<'de>,
{
<Interned<T> as Deserialize<'de>>::deserialize(deserializer)
}
}
/// Wrapper type that allows any Rust value to be smuggled as a HDL [`Type`].
/// This only works for values that can be [serialized][Serialize] to and [deserialized][Deserialize] from [JSON][serde_json].
pub struct PhantomConst<T: ?Sized + PhantomConstValue = PhantomConstCanonicalValue> {
value: LazyInterned<T>,
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)]
pub struct PhantomConstWithoutGenerics;
#[allow(non_upper_case_globals)]
pub const PhantomConst: PhantomConstWithoutGenerics = PhantomConstWithoutGenerics;
impl<T: Type + PhantomConstValue> Index<T> for PhantomConstWithoutGenerics {
type Output = PhantomConst<T>;
fn index(&self, value: T) -> &Self::Output {
Interned::into_inner(PhantomConst::new(value.intern()).intern_sized())
}
}
impl<T: ?Sized + PhantomConstValue> fmt::Debug for PhantomConst<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("PhantomConst").field(&self.get()).finish()
}
}
impl<T: ?Sized + PhantomConstValue> Clone for PhantomConst<T> {
fn clone(&self) -> Self {
*self
}
}
impl<T: ?Sized + PhantomConstValue> Copy for PhantomConst<T> {}
impl<T: ?Sized + PhantomConstValue> PartialEq for PhantomConst<T> {
fn eq(&self, other: &Self) -> bool {
self.get() == other.get()
}
}
impl<T: ?Sized + PhantomConstValue> Eq for PhantomConst<T> {}
impl<T: ?Sized + PhantomConstValue> Hash for PhantomConst<T> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.get().hash(state);
}
}
struct PhantomConstCanonicalMemoize<T: ?Sized, const IS_FROM_CANONICAL: bool>(PhantomData<T>);
impl<T: ?Sized, const IS_FROM_CANONICAL: bool> Copy
for PhantomConstCanonicalMemoize<T, IS_FROM_CANONICAL>
{
}
impl<T: ?Sized, const IS_FROM_CANONICAL: bool> Clone
for PhantomConstCanonicalMemoize<T, IS_FROM_CANONICAL>
{
fn clone(&self) -> Self {
*self
}
}
impl<T: ?Sized, const IS_FROM_CANONICAL: bool> Eq
for PhantomConstCanonicalMemoize<T, IS_FROM_CANONICAL>
{
}
impl<T: ?Sized, const IS_FROM_CANONICAL: bool> PartialEq
for PhantomConstCanonicalMemoize<T, IS_FROM_CANONICAL>
{
fn eq(&self, _other: &Self) -> bool {
true
}
}
impl<T: ?Sized, const IS_FROM_CANONICAL: bool> Hash
for PhantomConstCanonicalMemoize<T, IS_FROM_CANONICAL>
{
fn hash<H: Hasher>(&self, _state: &mut H) {}
}
impl<T: ?Sized + PhantomConstValue> Memoize for PhantomConstCanonicalMemoize<T, false> {
type Input = Interned<T>;
type InputOwned = Interned<T>;
type Output = Interned<PhantomConstCanonicalValue>;
fn inner(self, input: &Self::Input) -> Self::Output {
Intern::intern_sized(PhantomConstCanonicalValue::from_json_value(
serde_json::to_value(input)
.expect("serialization failed when constructing a canonical PhantomConst"),
))
}
}
impl<T: ?Sized + PhantomConstValue> Memoize for PhantomConstCanonicalMemoize<T, true> {
type Input = Interned<PhantomConstCanonicalValue>;
type InputOwned = Interned<PhantomConstCanonicalValue>;
type Output = Interned<T>;
fn inner(self, input: &Self::Input) -> Self::Output {
PhantomConstValue::deserialize_value(input.as_json_value())
.expect("deserialization failed ")
}
}
impl<T: ?Sized + PhantomConstValue> PhantomConst<T> {
pub fn new(value: Interned<T>) -> Self {
Self {
value: LazyInterned::Interned(value),
}
}
pub const fn new_lazy(v: &'static dyn LazyInternedTrait<T>) -> Self {
Self {
value: LazyInterned::new_lazy(v),
}
}
pub fn get(self) -> Interned<T> {
self.value.interned()
}
pub fn type_properties(self) -> TypeProperties {
<()>::TYPE_PROPERTIES
}
pub fn can_connect(self, other: Self) -> bool {
self == other
}
pub fn canonical_phantom_const(self) -> PhantomConst {
if let Some(&retval) = <dyn Any>::downcast_ref::<PhantomConst>(&self) {
return retval;
}
<PhantomConst>::new(
PhantomConstCanonicalMemoize::<T, false>(PhantomData).get_owned(self.get()),
)
}
pub fn from_canonical_phantom_const(canonical_type: PhantomConst) -> Self {
if let Some(&retval) = <dyn Any>::downcast_ref::<Self>(&canonical_type) {
return retval;
}
Self::new(
PhantomConstCanonicalMemoize::<T, true>(PhantomData).get_owned(canonical_type.get()),
)
}
}
impl<T: ?Sized + PhantomConstValue> Type for PhantomConst<T> {
type BaseType = PhantomConst;
type MaskType = ();
type SimValue = PhantomConst<T>;
impl_match_variant_as_self!();
fn mask_type(&self) -> Self::MaskType {
()
}
fn canonical(&self) -> CanonicalType {
CanonicalType::PhantomConst(self.canonical_phantom_const())
}
fn from_canonical(canonical_type: CanonicalType) -> Self {
let CanonicalType::PhantomConst(phantom_const) = canonical_type else {
panic!("expected PhantomConst");
};
Self::from_canonical_phantom_const(phantom_const)
}
fn source_location() -> SourceLocation {
SourceLocation::builtin()
}
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
assert!(opaque.is_empty());
*self
}
fn sim_value_clone_from_opaque(
&self,
value: &mut Self::SimValue,
opaque: OpaqueSimValueSlice<'_>,
) {
assert!(opaque.is_empty());
assert_eq!(*value, *self);
}
fn sim_value_to_opaque<'w>(
&self,
value: &Self::SimValue,
writer: OpaqueSimValueWriter<'w>,
) -> OpaqueSimValueWritten<'w> {
assert_eq!(*value, *self);
writer.fill_cloned_from_slice(OpaqueSimValueSlice::empty())
}
}
impl<T: ?Sized + PhantomConstValue> Default for PhantomConst<T>
where
Interned<T>: Default,
{
fn default() -> Self {
Self::TYPE
}
}
impl<T: ?Sized + PhantomConstValue> StaticType for PhantomConst<T>
where
Interned<T>: Default,
{
const TYPE: Self = PhantomConst {
value: LazyInterned::new_lazy(&Interned::<T>::default),
};
const MASK_TYPE: Self::MaskType = ();
const TYPE_PROPERTIES: TypeProperties = <()>::TYPE_PROPERTIES;
const MASK_TYPE_PROPERTIES: TypeProperties = <()>::TYPE_PROPERTIES;
}
type SerdeType<T> = SerdeCanonicalType<CanonicalType, SerdePhantomConst<Interned<T>>>;
impl<T: ?Sized + PhantomConstValue> Serialize for PhantomConst<T> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
SerdeType::<T>::PhantomConst(SerdePhantomConst(self.get())).serialize(serializer)
}
}
impl<'de, T: ?Sized + PhantomConstValue> Deserialize<'de> for PhantomConst<T> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
match SerdeType::<T>::deserialize(deserializer)? {
SerdeCanonicalType::PhantomConst(SerdePhantomConst(value)) => Ok(Self::new(value)),
ty => Err(Error::invalid_value(
serde::de::Unexpected::Other(ty.as_serde_unexpected_str()),
&"a PhantomConst",
)),
}
}
}
impl<T: ?Sized + PhantomConstValue> ExprPartialEq<Self> for PhantomConst<T> {
fn cmp_eq(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
true.to_expr()
}
fn cmp_ne(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
false.to_expr()
}
}
impl<T: ?Sized + PhantomConstValue> ExprPartialOrd<Self> for PhantomConst<T> {
fn cmp_lt(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
false.to_expr()
}
fn cmp_le(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
true.to_expr()
}
fn cmp_gt(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
false.to_expr()
}
fn cmp_ge(lhs: Expr<Self>, rhs: Expr<Self>) -> Expr<Bool> {
assert_eq!(Expr::ty(lhs), Expr::ty(rhs));
true.to_expr()
}
}
impl<T: ?Sized + PhantomConstValue> SimValuePartialEq<Self> for PhantomConst<T> {
fn sim_value_eq(this: &SimValue<Self>, other: &SimValue<Self>) -> bool {
assert_eq!(SimValue::ty(this), SimValue::ty(other));
true
}
}
impl<T: ?Sized + PhantomConstValue> ToSimValue for PhantomConst<T> {
type Type = PhantomConst<T>;
fn to_sim_value(&self) -> SimValue<Self::Type> {
SimValue::from_value(*self, *self)
}
}
impl<T: ?Sized + PhantomConstValue> ToSimValueWithType<PhantomConst<T>> for PhantomConst<T> {
fn to_sim_value_with_type(&self, ty: PhantomConst<T>) -> SimValue<PhantomConst<T>> {
SimValue::from_value(ty, *self)
}
}
impl<T: ?Sized + PhantomConstValue> ToSimValueWithType<CanonicalType> for PhantomConst<T> {
fn to_sim_value_with_type(&self, ty: CanonicalType) -> SimValue<CanonicalType> {
SimValue::into_canonical(SimValue::from_value(Self::from_canonical(ty), *self))
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,62 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{intern::Intern, prelude::*};
use ordered_float::NotNan;
use serde::{Deserialize, Serialize};
#[derive(Clone, PartialEq, Eq, Hash, Debug, Serialize, Deserialize)]
#[non_exhaustive]
pub struct ClockInputProperties {
pub frequency: NotNan<f64>,
}
#[hdl(no_runtime_generics, no_static)]
pub struct ClockInput {
pub clk: Clock,
pub properties: PhantomConst<ClockInputProperties>,
}
impl ClockInput {
#[track_caller]
pub fn new(frequency: f64) -> Self {
assert!(
frequency > 0.0 && frequency.is_finite(),
"invalid clock frequency: {frequency}"
);
Self {
clk: Clock,
properties: PhantomConst::new(
ClockInputProperties {
frequency: NotNan::new(frequency).expect("just checked"),
}
.intern_sized(),
),
}
}
pub fn frequency(self) -> f64 {
self.properties.get().frequency.into_inner()
}
}
#[hdl]
pub struct Led {
pub on: Bool,
}
#[hdl]
pub struct RgbLed {
pub r: Bool,
pub g: Bool,
pub b: Bool,
}
#[hdl]
/// UART, used as an output from the FPGA
pub struct Uart {
/// transmit from the FPGA's perspective
pub tx: Bool,
/// receive from the FPGA's perspective
#[hdl(flip)]
pub rx: Bool,
}

View file

@ -0,0 +1,45 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
pub use crate::{
__,
annotations::{
BlackBoxInlineAnnotation, BlackBoxPathAnnotation, CustomFirrtlAnnotation,
DocStringAnnotation, DontTouchAnnotation, SVAttributeAnnotation,
},
array::{Array, ArrayType},
build::{BuildCli, JobParams, RunBuild},
bundle::Bundle,
clock::{Clock, ClockDomain, ToClock},
enum_::{Enum, HdlNone, HdlOption, HdlSome},
expr::{
CastBitsTo, CastTo, CastToBits, Expr, HdlPartialEq, HdlPartialOrd, MakeUninitExpr,
ReduceBits, ToExpr, repeat,
},
formal::{
MakeFormalExpr, all_const, all_seq, any_const, any_seq, formal_global_clock, formal_reset,
hdl_assert, hdl_assert_with_enable, hdl_assume, hdl_assume_with_enable, hdl_cover,
hdl_cover_with_enable,
},
hdl, hdl_module,
int::{Bool, DynSize, KnownSize, SInt, SIntType, SIntValue, Size, UInt, UIntType, UIntValue},
memory::{Mem, MemBuilder, ReadUnderWrite},
module::{
Instance, Module, ModuleBuilder, annotate, connect, connect_any, incomplete_wire, instance,
memory, memory_array, memory_with_init, reg_builder, wire,
},
phantom_const::PhantomConst,
platform::{DynPlatform, Platform, PlatformIOBuilder, peripherals},
reg::Reg,
reset::{AsyncReset, Reset, SyncReset, ToAsyncReset, ToReset, ToSyncReset},
sim::{
ExternModuleSimulationState, Simulation,
time::{SimDuration, SimInstant},
value::{SimOnly, SimOnlyValue, SimValue, ToSimValue, ToSimValueWithType},
},
source_location::SourceLocation,
testing::{FormalMode, assert_formal},
ty::{AsMask, CanonicalType, Type},
util::{ConstUsize, GenericConstUsize},
wire::Wire,
};
pub use bitvec::{slice::BitSlice, vec::BitVec};

View file

@ -2,24 +2,25 @@
// See Notices.txt for copyright information
use crate::{
clock::ClockDomain,
expr::{Expr, ExprTrait, Flow, ToExpr},
expr::{Expr, Flow},
intern::Interned,
module::{NameId, ScopedNameId},
reset::{Reset, ResetType},
source_location::SourceLocation,
ty::{DynCanonicalType, DynType, Type},
ty::{CanonicalType, Type},
};
use std::fmt;
#[derive(Clone, Eq, PartialEq, Hash)]
pub struct Reg<T: Type> {
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
pub struct Reg<T: Type, R: ResetType = Reset> {
name: ScopedNameId,
source_location: SourceLocation,
ty: T,
clock_domain: Expr<ClockDomain>,
init: Option<Expr<T::Value>>,
clock_domain: Expr<ClockDomain<R>>,
init: Option<Expr<T>>,
}
impl<T: Type + fmt::Debug> fmt::Debug for Reg<T> {
impl<T: Type + fmt::Debug, R: ResetType> fmt::Debug for Reg<T, R> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Self {
name,
@ -37,20 +38,8 @@ impl<T: Type + fmt::Debug> fmt::Debug for Reg<T> {
}
}
impl<T: Type> ToExpr for Reg<T> {
type Type = T;
fn ty(&self) -> Self::Type {
self.ty.clone()
}
fn to_expr(&self) -> Expr<<Self::Type as Type>::Value> {
Expr::new_unchecked(self.expr_enum())
}
}
impl<T: Type> Reg<T> {
pub fn canonical(&self) -> Reg<T::CanonicalType> {
impl<T: Type, R: ResetType> Reg<T, R> {
pub fn canonical(&self) -> Reg<CanonicalType, R> {
let Self {
name,
source_location,
@ -66,52 +55,20 @@ impl<T: Type> Reg<T> {
init: init.map(Expr::canonical),
}
}
pub fn to_dyn_reg(&self) -> Reg<Interned<dyn DynType>> {
let Self {
name,
source_location,
ref ty,
clock_domain,
init,
} = *self;
Reg {
name,
source_location,
ty: ty.to_dyn(),
clock_domain,
init: init.map(Expr::to_dyn),
}
}
pub fn to_dyn_canonical_reg(&self) -> Reg<Interned<dyn DynCanonicalType>> {
let Self {
name,
source_location,
ref ty,
clock_domain,
init,
} = *self;
Reg {
name,
source_location,
ty: ty.canonical_dyn(),
clock_domain,
init: init.map(Expr::to_canonical_dyn),
}
}
#[track_caller]
pub fn new_unchecked(
scoped_name: ScopedNameId,
source_location: SourceLocation,
ty: T,
clock_domain: Expr<ClockDomain>,
init: Option<Expr<T::Value>>,
clock_domain: Expr<ClockDomain<R>>,
init: Option<Expr<T>>,
) -> Self {
assert!(
ty.is_storable().unwrap_or(true),
ty.canonical().is_storable(),
"register type must be a storable type"
);
if let Some(init) = init {
assert_eq!(ty, init.ty(), "register's type must match init type");
assert_eq!(ty, Expr::ty(init), "register's type must match init type");
}
Self {
name: scoped_name,
@ -121,6 +78,9 @@ impl<T: Type> Reg<T> {
init,
}
}
pub fn ty(&self) -> T {
self.ty
}
pub fn source_location(&self) -> SourceLocation {
self.source_location
}
@ -139,10 +99,10 @@ impl<T: Type> Reg<T> {
pub fn scoped_name(&self) -> ScopedNameId {
self.name
}
pub fn clock_domain(&self) -> Expr<ClockDomain> {
pub fn clock_domain(&self) -> Expr<ClockDomain<R>> {
self.clock_domain
}
pub fn init(&self) -> Option<Expr<T::Value>> {
pub fn init(&self) -> Option<Expr<T>> {
self.init
}
pub fn flow(&self) -> Flow {

View file

@ -1,366 +1,183 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
expr::{Expr, ToExpr},
int::{UInt, UIntType},
intern::Interned,
clock::Clock,
expr::{Expr, ToExpr, ops},
int::{Bool, SInt, UInt},
source_location::SourceLocation,
ty::{
impl_match_values_as_self, CanonicalType, CanonicalTypeKind, CanonicalValue, Connect,
DynCanonicalType, StaticType, Type, TypeEnum, Value, ValueEnum,
CanonicalType, OpaqueSimValueSize, OpaqueSimValueSlice, OpaqueSimValueWriter,
OpaqueSimValueWritten, StaticType, Type, TypeProperties, impl_match_variant_as_self,
},
type_deduction::{HitUndeducedType, UndeducedType},
util::interned_bit,
};
use bitvec::slice::BitSlice;
use bitvec::{bits, order::Lsb0};
pub trait ResetTypeTrait: CanonicalType + StaticType<MaskType = UIntType<1>> {}
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)]
pub struct AsyncResetType;
impl AsyncResetType {
pub const fn new() -> Self {
Self
}
mod sealed {
pub trait ResetTypeSealed {}
}
impl Connect<UndeducedType> for AsyncResetType {}
impl Type for AsyncResetType {
type Value = AsyncReset;
type CanonicalType = AsyncResetType;
type CanonicalValue = AsyncReset;
type MaskType = UIntType<1>;
type MaskValue = UInt<1>;
impl_match_values_as_self!();
fn mask_type(&self) -> Self::MaskType {
UIntType::new()
}
fn canonical(&self) -> Self::CanonicalType {
*self
}
fn source_location(&self) -> SourceLocation {
SourceLocation::builtin()
}
fn type_enum(&self) -> TypeEnum {
TypeEnum::AsyncReset(*self)
}
fn from_canonical_type(t: Self::CanonicalType) -> Self {
t
}
fn as_dyn_canonical_type_impl(this: &Self) -> Option<&dyn DynCanonicalType> {
Some(this)
}
pub trait ResetType:
StaticType<MaskType = Bool>
+ sealed::ResetTypeSealed
+ ops::ExprCastTo<Bool>
+ ops::ExprCastTo<Reset>
+ ops::ExprCastTo<SyncReset>
+ ops::ExprCastTo<AsyncReset>
+ ops::ExprCastTo<Clock>
+ ops::ExprCastTo<UInt<1>>
+ ops::ExprCastTo<SInt<1>>
+ ops::ExprCastTo<UInt>
+ ops::ExprCastTo<SInt>
{
fn dispatch<D: ResetTypeDispatch>(input: D::Input<Self>, dispatch: D) -> D::Output<Self>;
}
impl Connect<Self> for AsyncResetType {}
pub trait ResetTypeDispatch: Sized {
type Input<T: ResetType>;
type Output<T: ResetType>;
impl CanonicalType for AsyncResetType {
const CANONICAL_TYPE_KIND: CanonicalTypeKind = CanonicalTypeKind::AsyncReset;
fn reset(self, input: Self::Input<Reset>) -> Self::Output<Reset>;
fn sync_reset(self, input: Self::Input<SyncReset>) -> Self::Output<SyncReset>;
fn async_reset(self, input: Self::Input<AsyncReset>) -> Self::Output<AsyncReset>;
}
impl StaticType for AsyncResetType {
fn static_type() -> Self {
Self
}
}
macro_rules! reset_type {
($name:ident, $(#[$impl_trait:ident])? $Trait:ident::$trait_fn:ident, $is_castable_from_bits:literal, $dispatch_fn:ident) => {
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)]
pub struct $name;
impl ResetTypeTrait for AsyncResetType {}
impl Type for $name {
type BaseType = $name;
type MaskType = Bool;
type SimValue = bool;
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug)]
pub struct AsyncReset(pub bool);
impl_match_variant_as_self!();
impl ToExpr for AsyncReset {
type Type = AsyncResetType;
fn mask_type(&self) -> Self::MaskType {
Bool
}
fn ty(&self) -> Self::Type {
AsyncResetType
}
fn canonical(&self) -> CanonicalType {
CanonicalType::$name(*self)
}
fn to_expr(&self) -> Expr<Self> {
Expr::from_value(self)
}
}
fn source_location() -> SourceLocation {
SourceLocation::builtin()
}
impl Value for AsyncReset {
fn to_canonical(&self) -> <Self::Type as Type>::CanonicalValue {
*self
}
fn to_bits_impl(this: &Self) -> Result<Interned<BitSlice>, HitUndeducedType> {
Ok(interned_bit(this.0))
}
}
fn from_canonical(canonical_type: CanonicalType) -> Self {
let CanonicalType::$name(retval) = canonical_type else {
panic!("expected {}", stringify!($name));
};
retval
}
impl CanonicalValue for AsyncReset {
fn value_enum_impl(this: &Self) -> ValueEnum {
ValueEnum::AsyncReset(*this)
}
fn to_bits_impl(this: &Self) -> Result<Interned<BitSlice>, HitUndeducedType> {
Ok(interned_bit(this.0))
}
}
fn sim_value_from_opaque(&self, opaque: OpaqueSimValueSlice<'_>) -> Self::SimValue {
assert_eq!(opaque.size(), OpaqueSimValueSize::from_bit_width(1));
opaque.bits()[0]
}
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)]
pub struct SyncResetType;
fn sim_value_clone_from_opaque(
&self,
value: &mut Self::SimValue,
opaque: OpaqueSimValueSlice<'_>,
) {
assert_eq!(opaque.size(), OpaqueSimValueSize::from_bit_width(1));
*value = opaque.bits()[0];
}
impl SyncResetType {
pub const fn new() -> Self {
Self
}
}
impl Connect<UndeducedType> for SyncResetType {}
impl Type for SyncResetType {
type CanonicalType = SyncResetType;
type Value = SyncReset;
type CanonicalValue = SyncReset;
type MaskType = UIntType<1>;
type MaskValue = UInt<1>;
impl_match_values_as_self!();
fn mask_type(&self) -> Self::MaskType {
UIntType::new()
}
fn canonical(&self) -> Self::CanonicalType {
*self
}
fn source_location(&self) -> SourceLocation {
SourceLocation::builtin()
}
fn type_enum(&self) -> TypeEnum {
TypeEnum::SyncReset(*self)
}
fn from_canonical_type(t: Self::CanonicalType) -> Self {
t
}
fn as_dyn_canonical_type_impl(this: &Self) -> Option<&dyn DynCanonicalType> {
Some(this)
}
}
impl Connect<Self> for SyncResetType {}
impl CanonicalType for SyncResetType {
const CANONICAL_TYPE_KIND: CanonicalTypeKind = CanonicalTypeKind::SyncReset;
}
impl StaticType for SyncResetType {
fn static_type() -> Self {
Self
}
}
impl ResetTypeTrait for SyncResetType {}
#[derive(Copy, Clone, Eq, PartialEq, Hash, Debug, Default)]
pub struct SyncReset(pub bool);
impl ToExpr for SyncReset {
type Type = SyncResetType;
fn ty(&self) -> Self::Type {
SyncResetType
}
fn to_expr(&self) -> Expr<Self> {
Expr::from_value(self)
}
}
impl Value for SyncReset {
fn to_canonical(&self) -> <Self::Type as Type>::CanonicalValue {
*self
}
fn to_bits_impl(this: &Self) -> Result<Interned<BitSlice>, HitUndeducedType> {
Ok(interned_bit(this.0))
}
}
impl CanonicalValue for SyncReset {
fn value_enum_impl(this: &Self) -> ValueEnum {
ValueEnum::SyncReset(*this)
}
fn to_bits_impl(this: &Self) -> Result<Interned<BitSlice>, HitUndeducedType> {
Ok(interned_bit(this.0))
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, Default)]
pub struct ResetType;
impl ResetType {
pub const fn new() -> Self {
Self
}
}
impl Connect<UndeducedType> for ResetType {}
impl Type for ResetType {
type Value = Reset;
type CanonicalType = ResetType;
type CanonicalValue = Reset;
type MaskType = UIntType<1>;
type MaskValue = UInt<1>;
impl_match_values_as_self!();
fn mask_type(&self) -> Self::MaskType {
UIntType::new()
}
fn canonical(&self) -> Self::CanonicalType {
*self
}
fn source_location(&self) -> SourceLocation {
SourceLocation::builtin()
}
fn type_enum(&self) -> TypeEnum {
TypeEnum::Reset(*self)
}
fn from_canonical_type(t: Self::CanonicalType) -> Self {
t
}
fn as_dyn_canonical_type_impl(this: &Self) -> Option<&dyn DynCanonicalType> {
Some(this)
}
}
impl Connect<Self> for ResetType {}
impl CanonicalType for ResetType {
const CANONICAL_TYPE_KIND: CanonicalTypeKind = CanonicalTypeKind::Reset;
}
impl StaticType for ResetType {
fn static_type() -> Self {
Self
}
}
impl ResetTypeTrait for ResetType {}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub enum Reset {}
impl ToExpr for Reset {
type Type = ResetType;
fn ty(&self) -> Self::Type {
match *self {}
}
fn to_expr(&self) -> Expr<Self> {
Expr::from_value(self)
}
}
impl Value for Reset {
fn to_canonical(&self) -> <Self::Type as Type>::CanonicalValue {
*self
}
fn to_bits_impl(this: &Self) -> Result<Interned<BitSlice>, HitUndeducedType> {
match *this {}
}
}
impl CanonicalValue for Reset {
fn value_enum_impl(this: &Self) -> ValueEnum {
ValueEnum::Reset(*this)
}
fn to_bits_impl(this: &Self) -> Result<Interned<BitSlice>, HitUndeducedType> {
match *this {}
}
}
macro_rules! make_to_reset {
(
$(#[from_value($from_value_ty:ty)])*
$vis:vis trait $Trait:ident {
fn $fn:ident(&self) -> Expr<$T:ty>;
fn sim_value_to_opaque<'w>(
&self,
value: &Self::SimValue,
writer: OpaqueSimValueWriter<'w>,
) -> OpaqueSimValueWritten<'w> {
assert_eq!(writer.size(), OpaqueSimValueSize::from_bit_width(1));
writer.fill_cloned_from_slice(OpaqueSimValueSlice::from_bitslice(
[bits![0], bits![1]][*value as usize],
))
}
}
) => {
$vis trait $Trait {
fn $fn(&self) -> Expr<$T>;
impl $name {
pub fn type_properties(self) -> TypeProperties {
Self::TYPE_PROPERTIES
}
pub fn can_connect(self, _rhs: Self) -> bool {
true
}
}
impl StaticType for $name {
const TYPE: Self = Self;
const MASK_TYPE: Self::MaskType = Bool;
const TYPE_PROPERTIES: TypeProperties = TypeProperties {
is_passive: true,
is_storable: false,
is_castable_from_bits: $is_castable_from_bits,
bit_width: 1,
sim_only_values_len: 0,
};
const MASK_TYPE_PROPERTIES: TypeProperties = Bool::TYPE_PROPERTIES;
}
impl sealed::ResetTypeSealed for $name {}
impl ResetType for $name {
fn dispatch<D: ResetTypeDispatch>(
input: D::Input<Self>,
dispatch: D,
) -> D::Output<Self> {
dispatch.$dispatch_fn(input)
}
}
pub trait $Trait {
fn $trait_fn(&self) -> Expr<$name>;
}
impl<T: ?Sized + $Trait> $Trait for &'_ T {
fn $fn(&self) -> Expr<$T> {
(**self).$fn()
fn $trait_fn(&self) -> Expr<$name> {
(**self).$trait_fn()
}
}
impl<T: ?Sized + $Trait> $Trait for &'_ mut T {
fn $fn(&self) -> Expr<$T> {
(**self).$fn()
fn $trait_fn(&self) -> Expr<$name> {
(**self).$trait_fn()
}
}
impl<T: ?Sized + $Trait> $Trait for Box<T> {
fn $fn(&self) -> Expr<$T> {
(**self).$fn()
fn $trait_fn(&self) -> Expr<$name> {
(**self).$trait_fn()
}
}
impl $Trait for Expr<$T> {
fn $fn(&self) -> Expr<$T> {
$($impl_trait $Trait for Expr<$name> {
fn $trait_fn(&self) -> Expr<$name> {
*self
}
}
impl $Trait for $T {
fn $fn(&self) -> Expr<$T> {
self.to_expr()
}
}
$(impl $Trait for $from_value_ty {
fn $fn(&self) -> Expr<$T> {
self.to_expr().$fn()
}
})*
})?
};
}
make_to_reset! {
#[from_value(SyncReset)]
#[from_value(AsyncReset)]
pub trait ToReset {
fn to_reset(&self) -> Expr<Reset>;
reset_type!(AsyncReset, #[impl] ToAsyncReset::to_async_reset, true, async_reset);
reset_type!(SyncReset, #[impl] ToSyncReset::to_sync_reset, true, sync_reset);
reset_type!(
Reset,
ToReset::to_reset,
false, // Reset is not castable from bits because we don't know if it's async or sync
reset
);
impl ToSyncReset for bool {
fn to_sync_reset(&self) -> Expr<SyncReset> {
self.to_expr().to_sync_reset()
}
}
make_to_reset! {
#[from_value(bool)]
#[from_value(UInt<1>)]
pub trait ToAsyncReset {
fn to_async_reset(&self) -> Expr<AsyncReset>;
}
}
make_to_reset! {
#[from_value(bool)]
#[from_value(UInt<1>)]
pub trait ToSyncReset {
fn to_sync_reset(&self) -> Expr<SyncReset>;
impl ToAsyncReset for bool {
fn to_async_reset(&self) -> Expr<AsyncReset> {
self.to_expr().to_async_reset()
}
}

3038
crates/fayalite/src/sim.rs Normal file

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,397 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use std::{
fmt,
ops::{Add, AddAssign, Sub, SubAssign},
time::Duration,
};
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
pub struct SimInstant {
time_since_start: SimDuration,
}
impl SimInstant {
pub const fn checked_add(self, duration: SimDuration) -> Option<Self> {
let Some(time_since_start) = self.time_since_start.checked_add(duration) else {
return None;
};
Some(SimInstant { time_since_start })
}
pub const fn checked_duration_since(self, earlier: Self) -> Option<SimDuration> {
self.time_since_start.checked_sub(earlier.time_since_start)
}
pub const fn checked_sub(self, duration: SimDuration) -> Option<Self> {
let Some(time_since_start) = self.time_since_start.checked_sub(duration) else {
return None;
};
Some(SimInstant { time_since_start })
}
#[track_caller]
pub const fn duration_since(self, earlier: Self) -> SimDuration {
let Some(retval) = self.checked_duration_since(earlier) else {
panic!(
"tried to compute the duration since a later time -- durations can't be negative"
);
};
retval
}
pub const fn saturating_duration_since(self, earlier: Self) -> SimDuration {
let Some(retval) = self.checked_duration_since(earlier) else {
return SimDuration::ZERO;
};
retval
}
}
impl Add<SimDuration> for SimInstant {
type Output = SimInstant;
#[track_caller]
fn add(mut self, rhs: SimDuration) -> Self::Output {
self += rhs;
self
}
}
impl AddAssign<SimDuration> for SimInstant {
#[track_caller]
fn add_assign(&mut self, rhs: SimDuration) {
self.time_since_start += rhs;
}
}
impl Add<SimInstant> for SimDuration {
type Output = SimInstant;
#[track_caller]
fn add(self, rhs: SimInstant) -> Self::Output {
rhs.add(self)
}
}
impl Sub for SimInstant {
type Output = SimDuration;
#[track_caller]
fn sub(self, rhs: SimInstant) -> Self::Output {
self.duration_since(rhs)
}
}
impl Sub<SimDuration> for SimInstant {
type Output = SimInstant;
#[track_caller]
fn sub(self, rhs: SimDuration) -> Self::Output {
let Some(retval) = self.checked_sub(rhs) else {
panic!("SimInstant underflow");
};
retval
}
}
impl SubAssign<SimDuration> for SimInstant {
#[track_caller]
fn sub_assign(&mut self, rhs: SimDuration) {
*self = *self - rhs;
}
}
impl SimInstant {
pub const START: SimInstant = SimInstant {
time_since_start: SimDuration::ZERO,
};
}
impl fmt::Debug for SimInstant {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.time_since_start.fmt(f)
}
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
pub struct SimDuration {
attos: u128,
}
impl AddAssign for SimDuration {
#[track_caller]
fn add_assign(&mut self, rhs: SimDuration) {
*self = *self + rhs;
}
}
impl Add for SimDuration {
type Output = SimDuration;
#[track_caller]
fn add(self, rhs: SimDuration) -> Self::Output {
SimDuration {
attos: self
.attos
.checked_add(rhs.attos)
.expect("overflow adding durations"),
}
}
}
impl Sub for SimDuration {
type Output = Self;
#[track_caller]
fn sub(self, rhs: Self) -> Self::Output {
SimDuration {
attos: self
.attos
.checked_add(rhs.attos)
.expect("underflow subtracting durations -- durations can't be negative"),
}
}
}
impl SubAssign for SimDuration {
#[track_caller]
fn sub_assign(&mut self, rhs: Self) {
*self = *self - rhs;
}
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Default)]
pub struct SimDurationParts {
pub attos: u16,
pub femtos: u16,
pub picos: u16,
pub nanos: u16,
pub micros: u16,
pub millis: u16,
pub secs: u128,
}
macro_rules! impl_duration_units {
(
$(
#[unit_const = $UNIT:ident, from_units = $from_units:ident, as_units = $as_units:ident, units = $units:ident, suffix = $suffix:literal]
const $log10_units_per_sec:ident: u32 = $log10_units_per_sec_value:expr;
)*
) => {
impl SimDuration {
$(
const $log10_units_per_sec: u32 = $log10_units_per_sec_value;
pub const fn $from_units($units: u128) -> Self {
Self::from_units_helper::<{ Self::$log10_units_per_sec }>($units)
}
pub const fn $as_units(self) -> u128 {
self.attos / const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) }
}
)*
pub const fn to_parts(mut self) -> SimDurationParts {
$(
let $units = self.attos / const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) };
self.attos %= const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) };
)*
SimDurationParts {
$($units: $units as _,)*
}
}
pub const fn from_parts_checked(parts: SimDurationParts) -> Option<Self> {
let attos = 0u128;
$(
let Some(product) = const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) }.checked_mul(parts.$units as u128) else {
return None;
};
let Some(attos) = attos.checked_add(product) else {
return None;
};
)*
Some(Self {
attos,
})
}
}
impl fmt::Debug for SimDuration {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let ilog10_attos = match self.attos.checked_ilog10() {
Some(v) => v,
None => Self::LOG10_ATTOS_PER_SEC,
};
let (suffix, int, fraction, fraction_digits) =
match Self::LOG10_ATTOS_PER_SEC.saturating_sub(ilog10_attos) {
$(
..=Self::$log10_units_per_sec => {
let divisor = const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) };
(
$suffix,
self.attos / divisor,
self.attos % divisor,
(Self::LOG10_ATTOS_PER_SEC - Self::$log10_units_per_sec) as usize,
)
},
)*
_ => unreachable!(),
};
write!(f, "{int}")?;
if fraction != 0 {
write!(f, ".{fraction:0fraction_digits$}")?;
}
write!(f, " {suffix}")
}
}
#[cfg(test)]
#[test]
fn test_duration_debug() {
$(
assert_eq!(
format!("{:?}", SimDuration::$from_units(123)),
concat!("123 ", $suffix)
);
assert_eq!(
format!("{:?}", SimDuration::$from_units(1)),
concat!("1 ", $suffix),
);
let mut v = SimDuration::$from_units(1);
if v.attos < 1 << 53 {
v.attos += 1;
assert_eq!(
format!("{v:?}"),
format!("{} {}", v.attos as f64 / 10.0f64.powf((SimDuration::LOG10_ATTOS_PER_SEC - SimDuration::$log10_units_per_sec) as f64), $suffix),
"1 {} + 1 as == {} as", $suffix, v.attos,
);
}
)*
}
};
}
impl_duration_units! {
#[unit_const = SECOND, from_units = from_secs, as_units = as_secs, units = secs, suffix = "s"]
const LOG10_SECS_PER_SEC: u32 = 0;
#[unit_const = MILLISECOND, from_units = from_millis, as_units = as_millis, units = millis, suffix = "ms"]
const LOG10_MILLIS_PER_SEC: u32 = 3;
#[unit_const = MICROSECOND, from_units = from_micros, as_units = as_micros, units = micros, suffix = "μs"]
const LOG10_MICROS_PER_SEC: u32 = 6;
#[unit_const = NANOSECOND, from_units = from_nanos, as_units = as_nanos, units = nanos, suffix = "ns"]
const LOG10_NANOS_PER_SEC: u32 = 9;
#[unit_const = PICOSECOND, from_units = from_picos, as_units = as_picos, units = picos, suffix = "ps"]
const LOG10_PICOS_PER_SEC: u32 = 12;
#[unit_const = FEMTOSECOND, from_units = from_femtos, as_units = as_femtos, units = femtos, suffix = "fs"]
const LOG10_FEMTOS_PER_SEC: u32 = 15;
#[unit_const = ATTOSECOND, from_units = from_attos, as_units = as_attos, units = attos, suffix = "as"]
const LOG10_ATTOS_PER_SEC: u32 = 18;
}
impl SimDuration {
const fn from_units_helper<const UNITS_PER_SEC: u32>(units: u128) -> Self {
let Some(attos) =
units.checked_mul(const { 10u128.pow(Self::LOG10_ATTOS_PER_SEC - UNITS_PER_SEC) })
else {
panic!("duration too big");
};
Self { attos }
}
pub const ZERO: SimDuration = SimDuration::from_secs(0);
pub const fn from_parts(parts: SimDurationParts) -> Self {
match Self::from_parts_checked(parts) {
Some(v) => v,
None => panic!("duration too big"),
}
}
pub const fn abs_diff(self, other: Self) -> Self {
Self {
attos: self.attos.abs_diff(other.attos),
}
}
pub const fn checked_add(self, rhs: Self) -> Option<Self> {
let Some(attos) = self.attos.checked_add(rhs.attos) else {
return None;
};
Some(Self { attos })
}
pub const fn checked_sub(self, rhs: Self) -> Option<Self> {
let Some(attos) = self.attos.checked_sub(rhs.attos) else {
return None;
};
Some(Self { attos })
}
pub const fn is_zero(self) -> bool {
self.attos == 0
}
pub const fn saturating_add(self, rhs: Self) -> Self {
Self {
attos: self.attos.saturating_add(rhs.attos),
}
}
pub const fn saturating_sub(self, rhs: Self) -> Self {
Self {
attos: self.attos.saturating_sub(rhs.attos),
}
}
pub const fn checked_ilog10(self) -> Option<i32> {
let Some(ilog10_attos) = self.attos.checked_ilog10() else {
return None;
};
Some(ilog10_attos as i32 - Self::LOG10_ATTOS_PER_SEC as i32)
}
#[track_caller]
pub const fn ilog10(self) -> i32 {
let Some(retval) = self.checked_ilog10() else {
panic!("tried to take the ilog10 of 0");
};
retval
}
pub const fn checked_pow10(log10: i32, underflow_is_zero: bool) -> Option<Self> {
let Some(log10) = Self::LOG10_ATTOS_PER_SEC.checked_add_signed(log10) else {
return if log10 < 0 && underflow_is_zero {
Some(Self::ZERO)
} else {
None
};
};
let Some(attos) = 10u128.checked_pow(log10) else {
return None;
};
Some(Self { attos })
}
#[track_caller]
pub const fn pow10(log10: i32) -> Self {
let Some(retval) = Self::checked_pow10(log10, true) else {
panic!("pow10 overflowed");
};
retval
}
pub const fn is_power_of_ten(self) -> bool {
const TEN: u128 = 10;
const NUMBER_OF_POWERS_OF_TEN: usize = {
let mut n = 0;
while let Some(_) = TEN.checked_pow(n as u32) {
n += 1;
}
n
};
const POWERS_OF_TEN: [u128; NUMBER_OF_POWERS_OF_TEN] = {
let mut retval = [0; NUMBER_OF_POWERS_OF_TEN];
let mut i = 0;
while i < NUMBER_OF_POWERS_OF_TEN {
retval[i] = TEN.pow(i as u32);
i += 1;
}
retval
};
let mut i = 0;
while i < NUMBER_OF_POWERS_OF_TEN {
if self.attos == POWERS_OF_TEN[i] {
return true;
}
i += 1;
}
false
}
}
impl From<Duration> for SimDuration {
fn from(duration: Duration) -> Self {
Self::from_nanos(duration.as_nanos())
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,304 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
//! `unsafe` parts of [`DynSimOnlyValue`]
use serde::{Serialize, de::DeserializeOwned};
use std::{
any::{self, TypeId},
fmt,
hash::{Hash, Hasher},
marker::PhantomData,
mem::ManuallyDrop,
rc::Rc,
};
pub trait SimOnlyValueTrait:
'static + Eq + Hash + fmt::Debug + Serialize + DeserializeOwned + Clone + Default
{
}
impl<T: 'static + Eq + Hash + fmt::Debug + Serialize + DeserializeOwned + Clone + Default>
SimOnlyValueTrait for T
{
}
/// Safety: `type_id_dyn` must return `TypeId::of::<T>()` where `Self = SimOnly<T>`
unsafe trait DynSimOnlyTrait: 'static + Send + Sync {
fn type_id_dyn(&self) -> TypeId;
fn type_name(&self) -> &'static str;
fn default_value(&self) -> Rc<dyn DynSimOnlyValueTrait>;
fn deserialize_from_json_string(
&self,
json_str: &str,
) -> serde_json::Result<Rc<dyn DynSimOnlyValueTrait>>;
}
/// Safety: `type_id_dyn` is implemented correctly
unsafe impl<T: SimOnlyValueTrait> DynSimOnlyTrait for SimOnly<T> {
fn type_id_dyn(&self) -> TypeId {
TypeId::of::<T>()
}
fn type_name(&self) -> &'static str {
any::type_name::<T>()
}
fn default_value(&self) -> Rc<dyn DynSimOnlyValueTrait> {
Rc::new(T::default())
}
fn deserialize_from_json_string(
&self,
json_str: &str,
) -> serde_json::Result<Rc<dyn DynSimOnlyValueTrait>> {
Ok(Rc::<T>::new(serde_json::from_str(json_str)?))
}
}
/// Safety:
/// * `type_id_dyn()` must return `TypeId::of::<Self>()`.
/// * `ty().type_id()` must return `TypeId::of::<Self>()`.
unsafe trait DynSimOnlyValueTrait: 'static + fmt::Debug {
fn type_id_dyn(&self) -> TypeId;
fn ty(&self) -> DynSimOnly;
fn eq_dyn(&self, other: &dyn DynSimOnlyValueTrait) -> bool;
fn serialize_to_json_string(&self) -> serde_json::Result<String>;
fn hash_dyn(&self, state: &mut dyn Hasher);
}
impl dyn DynSimOnlyValueTrait {
fn is<T: SimOnlyValueTrait>(&self) -> bool {
Self::type_id_dyn(self) == TypeId::of::<T>()
}
fn downcast_ref<T: SimOnlyValueTrait>(&self) -> Option<&T> {
if Self::is::<T>(self) {
// Safety: checked that `Self` is really `T`
Some(unsafe { &*(self as *const Self as *const T) })
} else {
None
}
}
fn downcast_rc<T: SimOnlyValueTrait>(self: Rc<Self>) -> Result<Rc<T>, Rc<Self>> {
if Self::is::<T>(&*self) {
// Safety: checked that `Self` is really `T`
Ok(unsafe { Rc::from_raw(Rc::into_raw(self) as *const T) })
} else {
Err(self)
}
}
}
/// Safety:
/// * `type_id_dyn()` returns `TypeId::of::<Self>()`.
/// * `ty().type_id()` returns `TypeId::of::<Self>()`.
unsafe impl<T: SimOnlyValueTrait> DynSimOnlyValueTrait for T {
fn type_id_dyn(&self) -> TypeId {
TypeId::of::<T>()
}
fn ty(&self) -> DynSimOnly {
DynSimOnly::of::<T>()
}
fn eq_dyn(&self, other: &dyn DynSimOnlyValueTrait) -> bool {
other.downcast_ref::<T>().is_some_and(|other| self == other)
}
fn serialize_to_json_string(&self) -> serde_json::Result<String> {
serde_json::to_string(self)
}
fn hash_dyn(&self, mut state: &mut dyn Hasher) {
self.hash(&mut state);
}
}
#[derive(Copy, Clone)]
pub struct DynSimOnly {
ty: &'static dyn DynSimOnlyTrait,
}
impl DynSimOnly {
pub const fn of<T: SimOnlyValueTrait>() -> Self {
Self {
ty: &const { SimOnly::<T>::new() },
}
}
pub fn type_id(self) -> TypeId {
self.ty.type_id_dyn()
}
pub fn type_name(self) -> &'static str {
self.ty.type_name()
}
pub fn is<T: SimOnlyValueTrait>(self) -> bool {
self.type_id() == TypeId::of::<T>()
}
pub fn downcast<T: SimOnlyValueTrait>(self) -> Option<SimOnly<T>> {
self.is::<T>().then_some(SimOnly::default())
}
pub fn deserialize_from_json_string(
self,
json_str: &str,
) -> serde_json::Result<DynSimOnlyValue> {
self.ty
.deserialize_from_json_string(json_str)
.map(DynSimOnlyValue)
}
pub fn default_value(self) -> DynSimOnlyValue {
DynSimOnlyValue(self.ty.default_value())
}
}
impl PartialEq for DynSimOnly {
fn eq(&self, other: &Self) -> bool {
Self::type_id(*self) == Self::type_id(*other)
}
}
impl Eq for DynSimOnly {}
impl Hash for DynSimOnly {
fn hash<H: Hasher>(&self, state: &mut H) {
Self::type_id(*self).hash(state);
}
}
impl fmt::Debug for DynSimOnly {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "SimOnly<{}>", self.ty.type_name())
}
}
impl<T: SimOnlyValueTrait> From<SimOnly<T>> for DynSimOnly {
fn from(value: SimOnly<T>) -> Self {
let SimOnly(PhantomData) = value;
Self::of::<T>()
}
}
/// the [`Type`][Type] for a value that can only be used in a Fayalite simulation, it can't be converted to FIRRTL
///
/// [Type]: crate::ty::Type
#[derive(Clone, Eq, PartialEq, Hash)]
pub struct SimOnly<T: SimOnlyValueTrait>(PhantomData<fn(T) -> T>);
impl<T: SimOnlyValueTrait> fmt::Debug for SimOnly<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
DynSimOnly::of::<T>().fmt(f)
}
}
impl<T: SimOnlyValueTrait> SimOnly<T> {
pub const fn new() -> Self {
Self(PhantomData)
}
}
impl<T: SimOnlyValueTrait> Copy for SimOnly<T> {}
impl<T: SimOnlyValueTrait> Default for SimOnly<T> {
fn default() -> Self {
Self::new()
}
}
/// a value that can only be used in a Fayalite simulation, it can't be converted to FIRRTL
#[derive(Clone, Eq, PartialEq, Hash, Default, PartialOrd, Ord)]
pub struct SimOnlyValue<T: SimOnlyValueTrait>(Rc<T>);
impl<T: SimOnlyValueTrait> SimOnlyValue<T> {
pub fn with_dyn_ref<F: FnOnce(&DynSimOnlyValue) -> R, R>(&self, f: F) -> R {
// Safety: creating a copied `Rc<T>` is safe as long as the copy isn't dropped and isn't changed
// to point somewhere else, `f` can't change `dyn_ref` because it's only given a shared reference.
let dyn_ref =
unsafe { ManuallyDrop::new(DynSimOnlyValue(Rc::<T>::from_raw(Rc::as_ptr(&self.0)))) };
f(&dyn_ref)
}
pub fn from_rc(v: Rc<T>) -> Self {
Self(v)
}
pub fn new(v: T) -> Self {
Self(Rc::new(v))
}
pub fn into_inner(this: Self) -> Rc<T> {
this.0
}
pub fn inner_mut(this: &mut Self) -> &mut Rc<T> {
&mut this.0
}
pub fn inner(this: &Self) -> &Rc<T> {
&this.0
}
pub fn into_dyn(this: Self) -> DynSimOnlyValue {
DynSimOnlyValue::from(this)
}
}
impl<T: SimOnlyValueTrait> std::ops::Deref for SimOnlyValue<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<T: SimOnlyValueTrait> std::ops::DerefMut for SimOnlyValue<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
Rc::make_mut(&mut self.0)
}
}
#[derive(Clone)]
pub struct DynSimOnlyValue(Rc<dyn DynSimOnlyValueTrait>);
impl fmt::Debug for DynSimOnlyValue {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
<dyn DynSimOnlyValueTrait as fmt::Debug>::fmt(&*self.0, f)
}
}
impl PartialEq for DynSimOnlyValue {
fn eq(&self, other: &Self) -> bool {
DynSimOnlyValueTrait::eq_dyn(&*self.0, &*other.0)
}
}
impl Eq for DynSimOnlyValue {}
impl Hash for DynSimOnlyValue {
fn hash<H: Hasher>(&self, state: &mut H) {
DynSimOnlyValueTrait::hash_dyn(&*self.0, state);
}
}
impl<T: SimOnlyValueTrait> From<SimOnlyValue<T>> for DynSimOnlyValue {
fn from(value: SimOnlyValue<T>) -> Self {
Self(value.0)
}
}
impl DynSimOnlyValue {
pub fn ty(&self) -> DynSimOnly {
self.0.ty()
}
pub fn type_id(&self) -> TypeId {
self.0.type_id_dyn()
}
pub fn is<T: SimOnlyValueTrait>(&self) -> bool {
self.0.is::<T>()
}
pub fn downcast<T: SimOnlyValueTrait>(self) -> Result<SimOnlyValue<T>, DynSimOnlyValue> {
match <dyn DynSimOnlyValueTrait>::downcast_rc(self.0) {
Ok(v) => Ok(SimOnlyValue(v)),
Err(v) => Err(Self(v)),
}
}
pub fn downcast_ref<T: SimOnlyValueTrait>(&self) -> Option<&T> {
<dyn DynSimOnlyValueTrait>::downcast_ref(&*self.0)
}
pub fn serialize_to_json_string(&self) -> serde_json::Result<String> {
self.0.serialize_to_json_string()
}
}

File diff suppressed because it is too large Load diff

View file

@ -2,9 +2,8 @@
// See Notices.txt for copyright information
use crate::{
intern::{Intern, Interned},
util::DebugAsDisplay,
util::{DebugAsDisplay, HashMap},
};
use hashbrown::HashMap;
use std::{cell::RefCell, fmt, num::NonZeroUsize, panic, path::Path};
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
@ -97,7 +96,7 @@ impl NormalizeFilesForTestsState {
fn new() -> Self {
Self {
test_position: panic::Location::caller(),
file_pattern_matches: HashMap::new(),
file_pattern_matches: HashMap::default(),
}
}
}
@ -143,7 +142,7 @@ impl From<&'_ panic::Location<'_>> for SourceLocation {
map.entry_ref(file)
.or_insert_with(|| NormalizedFileForTestState {
file_name_id: NonZeroUsize::new(len + 1).unwrap(),
positions_map: HashMap::new(),
positions_map: HashMap::default(),
});
file_str = m.generate_file_name(file_state.file_name_id);
file = &file_str;

View file

@ -0,0 +1,224 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
build::{
BaseJobArgs, BaseJobKind, GlobalParams, JobArgsAndDependencies, JobKindAndArgs, JobParams,
NoArgs, RunBuild,
external::{ExternalCommandArgs, ExternalCommandJobKind},
firrtl::{FirrtlArgs, FirrtlJobKind},
formal::{Formal, FormalAdditionalArgs, FormalArgs, WriteSbyFileJobKind},
verilog::{UnadjustedVerilogArgs, VerilogJobArgs, VerilogJobKind},
},
bundle::BundleType,
firrtl::ExportOptions,
module::Module,
util::HashMap,
};
use serde::{Deserialize, Serialize};
use std::{
fmt::{self, Write},
path::{Path, PathBuf},
process::Command,
sync::{Mutex, OnceLock},
};
#[derive(
clap::ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Hash, Default, Deserialize, Serialize,
)]
#[non_exhaustive]
pub enum FormalMode {
#[default]
BMC,
Prove,
Live,
Cover,
}
impl FormalMode {
pub fn as_str(self) -> &'static str {
match self {
FormalMode::BMC => "bmc",
FormalMode::Prove => "prove",
FormalMode::Live => "live",
FormalMode::Cover => "cover",
}
}
}
impl fmt::Display for FormalMode {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.as_str())
}
}
#[derive(Deserialize)]
struct CargoMetadata {
target_directory: String,
}
fn get_cargo_target_dir() -> &'static Path {
static RETVAL: OnceLock<PathBuf> = OnceLock::new();
RETVAL.get_or_init(|| {
let output = Command::new(
std::env::var_os("CARGO")
.as_deref()
.unwrap_or("cargo".as_ref()),
)
.arg("metadata")
.output()
.expect("can't run `cargo metadata`");
if !output.status.success() {
panic!(
"can't run `cargo metadata`:\n{}\nexited with status: {}",
String::from_utf8_lossy(&output.stderr),
output.status
);
}
let CargoMetadata { target_directory } =
serde_json::from_slice(&output.stdout).expect("can't parse output of `cargo metadata`");
PathBuf::from(target_directory)
})
}
#[track_caller]
fn get_assert_formal_target_path(test_name: &dyn std::fmt::Display) -> PathBuf {
static DIRS: Mutex<Option<HashMap<String, u64>>> = Mutex::new(None);
let test_name = test_name.to_string();
// don't use line/column numbers since that constantly changes as you edit tests
let file = std::panic::Location::caller().file();
// simple reproducible hash
let simple_hash = file.bytes().chain(test_name.bytes()).fold(
((file.len() as u32) << 16).wrapping_add(test_name.len() as u32),
|mut h, b| {
h = h.wrapping_mul(0xaa0d184b);
h ^= h.rotate_right(5);
h ^= h.rotate_right(13);
h.wrapping_add(b as u32)
},
);
let mut dir = String::with_capacity(64);
for ch in Path::new(file)
.file_stem()
.unwrap_or_default()
.to_str()
.unwrap()
.chars()
.chain(['-'])
.chain(test_name.chars())
{
dir.push(match ch {
ch if ch.is_alphanumeric() => ch,
'_' | '-' | '+' | '.' | ',' | ' ' => ch,
_ => '_',
});
}
write!(dir, "-{simple_hash:08x}").unwrap();
let index = *DIRS
.lock()
.unwrap()
.get_or_insert_with(HashMap::default)
.entry_ref(&dir)
.and_modify(|v| *v += 1)
.or_insert(0);
write!(dir, "-{index}").unwrap();
get_cargo_target_dir()
.join("fayalite_assert_formal")
.join(dir)
}
fn make_assert_formal_args(
test_name: &dyn std::fmt::Display,
formal_mode: FormalMode,
formal_depth: u64,
solver: Option<&str>,
export_options: ExportOptions,
) -> eyre::Result<JobArgsAndDependencies<ExternalCommandJobKind<Formal>>> {
let args = JobKindAndArgs {
kind: BaseJobKind,
args: BaseJobArgs::from_output_dir_and_env(get_assert_formal_target_path(&test_name), None),
};
let dependencies = JobArgsAndDependencies {
args,
dependencies: (),
};
let args = JobKindAndArgs {
kind: FirrtlJobKind,
args: FirrtlArgs { export_options },
};
let dependencies = JobArgsAndDependencies { args, dependencies };
let args = JobKindAndArgs {
kind: ExternalCommandJobKind::new(),
args: ExternalCommandArgs::resolve_program_path(
None,
UnadjustedVerilogArgs {
firtool_extra_args: vec![],
verilog_dialect: None,
verilog_debug: true,
},
)?,
};
let dependencies = JobArgsAndDependencies { args, dependencies };
let args = JobKindAndArgs {
kind: VerilogJobKind,
args: VerilogJobArgs {},
};
let dependencies = JobArgsAndDependencies { args, dependencies };
let args = JobKindAndArgs {
kind: WriteSbyFileJobKind,
args: FormalArgs {
sby_extra_args: vec![],
formal_mode,
formal_depth,
formal_solver: solver.unwrap_or(FormalArgs::DEFAULT_SOLVER).into(),
smtbmc_extra_args: vec![],
},
};
let dependencies = JobArgsAndDependencies { args, dependencies };
let args = JobKindAndArgs {
kind: ExternalCommandJobKind::new(),
args: ExternalCommandArgs::resolve_program_path(None, FormalAdditionalArgs {})?,
};
Ok(JobArgsAndDependencies { args, dependencies })
}
pub fn try_assert_formal<M: AsRef<Module<T>>, T: BundleType>(
test_name: impl std::fmt::Display,
module: M,
formal_mode: FormalMode,
formal_depth: u64,
solver: Option<&str>,
export_options: ExportOptions,
) -> eyre::Result<()> {
const APP_NAME: &'static str = "fayalite::testing::assert_formal";
make_assert_formal_args(
&test_name,
formal_mode,
formal_depth,
solver,
export_options,
)?
.run_without_platform(
|NoArgs {}| Ok(JobParams::new(module)),
&GlobalParams::new(None, APP_NAME),
)
}
#[track_caller]
pub fn assert_formal<M: AsRef<Module<T>>, T: BundleType>(
test_name: impl std::fmt::Display,
module: M,
formal_mode: FormalMode,
formal_depth: u64,
solver: Option<&str>,
export_options: ExportOptions,
) {
try_assert_formal(
test_name,
module,
formal_mode,
formal_depth,
solver,
export_options,
)
.expect("testing::assert_formal() failed");
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,135 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::{
array::Array,
bundle::{Bundle, BundleType},
clock::Clock,
enum_::{Enum, EnumType},
int::{Bool, SInt, UInt},
intern::Interned,
phantom_const::{PhantomConstCanonicalValue, PhantomConstValue},
prelude::PhantomConst,
reset::{AsyncReset, Reset, SyncReset},
sim::value::DynSimOnly,
ty::{BaseType, CanonicalType},
};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
pub(crate) struct SerdePhantomConst<T>(pub T);
impl<T: ?Sized + PhantomConstValue> Serialize for SerdePhantomConst<Interned<T>> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
self.0.serialize(serializer)
}
}
impl<'de, T: ?Sized + PhantomConstValue> Deserialize<'de> for SerdePhantomConst<Interned<T>> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
T::deserialize_value(deserializer).map(Self)
}
}
#[derive(Serialize, Deserialize)]
#[serde(rename = "CanonicalType")]
pub(crate) enum SerdeCanonicalType<
ArrayElement = CanonicalType,
ThePhantomConst = SerdePhantomConst<Interned<PhantomConstCanonicalValue>>,
> {
UInt {
width: usize,
},
SInt {
width: usize,
},
Bool,
Array {
element: ArrayElement,
len: usize,
},
Enum {
variants: Interned<[crate::enum_::EnumVariant]>,
},
Bundle {
fields: Interned<[crate::bundle::BundleField]>,
},
AsyncReset,
SyncReset,
Reset,
Clock,
PhantomConst(ThePhantomConst),
DynSimOnly(DynSimOnly),
}
impl<ArrayElement, PhantomConstInner> SerdeCanonicalType<ArrayElement, PhantomConstInner> {
pub(crate) fn as_serde_unexpected_str(&self) -> &'static str {
match self {
Self::UInt { .. } => "a UInt",
Self::SInt { .. } => "a SInt",
Self::Bool => "a Bool",
Self::Array { .. } => "an Array",
Self::Enum { .. } => "an Enum",
Self::Bundle { .. } => "a Bundle",
Self::AsyncReset => "an AsyncReset",
Self::SyncReset => "a SyncReset",
Self::Reset => "a Reset",
Self::Clock => "a Clock",
Self::PhantomConst(_) => "a PhantomConst",
Self::DynSimOnly(_) => "a SimOnlyValue",
}
}
}
impl<T: BaseType> From<T> for SerdeCanonicalType {
fn from(ty: T) -> Self {
let ty: CanonicalType = ty.into();
match ty {
CanonicalType::UInt(ty) => Self::UInt { width: ty.width() },
CanonicalType::SInt(ty) => Self::SInt { width: ty.width() },
CanonicalType::Bool(Bool {}) => Self::Bool,
CanonicalType::Array(ty) => Self::Array {
element: ty.element(),
len: ty.len(),
},
CanonicalType::Enum(ty) => Self::Enum {
variants: ty.variants(),
},
CanonicalType::Bundle(ty) => Self::Bundle {
fields: ty.fields(),
},
CanonicalType::AsyncReset(AsyncReset {}) => Self::AsyncReset,
CanonicalType::SyncReset(SyncReset {}) => Self::SyncReset,
CanonicalType::Reset(Reset {}) => Self::Reset,
CanonicalType::Clock(Clock {}) => Self::Clock,
CanonicalType::PhantomConst(ty) => Self::PhantomConst(SerdePhantomConst(ty.get())),
CanonicalType::DynSimOnly(ty) => Self::DynSimOnly(ty),
}
}
}
impl From<SerdeCanonicalType> for CanonicalType {
fn from(ty: SerdeCanonicalType) -> Self {
match ty {
SerdeCanonicalType::UInt { width } => Self::UInt(UInt::new(width)),
SerdeCanonicalType::SInt { width } => Self::SInt(SInt::new(width)),
SerdeCanonicalType::Bool => Self::Bool(Bool),
SerdeCanonicalType::Array { element, len } => Self::Array(Array::new(element, len)),
SerdeCanonicalType::Enum { variants } => Self::Enum(Enum::new(variants)),
SerdeCanonicalType::Bundle { fields } => Self::Bundle(Bundle::new(fields)),
SerdeCanonicalType::AsyncReset => Self::AsyncReset(AsyncReset),
SerdeCanonicalType::SyncReset => Self::SyncReset(SyncReset),
SerdeCanonicalType::Reset => Self::Reset(Reset),
SerdeCanonicalType::Clock => Self::Clock(Clock),
SerdeCanonicalType::PhantomConst(value) => {
Self::PhantomConst(PhantomConst::new(value.0))
}
SerdeCanonicalType::DynSimOnly(value) => Self::DynSimOnly(value),
}
}
}

View file

@ -1,136 +0,0 @@
use crate::{
expr::{Expr, ToExpr},
intern::Interned,
source_location::SourceLocation,
ty::{
impl_match_values_as_self, CanonicalType, CanonicalTypeKind, CanonicalValue, Connect, Type,
TypeEnum, Value, ValueEnum,
},
};
use bitvec::prelude::BitSlice;
use std::{
fmt,
sync::atomic::{AtomicU64, Ordering},
};
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[non_exhaustive]
pub struct HitUndeducedType;
impl HitUndeducedType {
/// if either input is [`Ok(false)`][Ok], return [`Ok(false)`][Ok],
/// otherwise if either input is [`Err(_)`][Err], return [`Err(_)`][Err],
/// otherwise return [`Ok(true)`][Ok].
pub fn reduce_and(l: Result<bool, Self>, r: Result<bool, Self>) -> Result<bool, Self> {
match (l, r) {
(Ok(false), _) | (_, Ok(false)) => Ok(false),
(Err(e), _) | (_, Err(e)) => Err(e),
(Ok(true), Ok(true)) => Ok(true),
}
}
}
impl fmt::Display for HitUndeducedType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("encountered a not-yet-deduced type")
}
}
impl std::error::Error for HitUndeducedType {}
#[derive(Copy, Debug, Clone, PartialEq, Eq, Hash)]
pub struct UndeducedType {
id: u64,
mask_ty_id: u64,
source_location: SourceLocation,
}
impl UndeducedType {
#[track_caller]
pub fn new() -> Self {
Self::new_with_loc(SourceLocation::caller())
}
pub fn new_with_loc(source_location: SourceLocation) -> Self {
static NEXT_ID: AtomicU64 = AtomicU64::new(0);
let id = NEXT_ID.fetch_add(2, Ordering::Relaxed);
let mask_ty_id = id + 1;
Self {
id,
mask_ty_id,
source_location,
}
}
pub fn id(self) -> u64 {
self.id
}
}
impl<T> Connect<T> for UndeducedType {}
impl Type for UndeducedType {
type CanonicalType = UndeducedType;
type Value = Deduce;
type CanonicalValue = Deduce;
type MaskType = UndeducedType;
type MaskValue = Deduce;
impl_match_values_as_self!();
fn mask_type(&self) -> Self::MaskType {
Self {
id: self.mask_ty_id,
mask_ty_id: self.mask_ty_id,
source_location: self.source_location,
}
}
fn canonical(&self) -> Self::CanonicalType {
*self
}
fn source_location(&self) -> SourceLocation {
self.source_location
}
fn type_enum(&self) -> TypeEnum {
TypeEnum::Deduce(*self)
}
fn from_canonical_type(t: Self::CanonicalType) -> Self {
t
}
}
impl CanonicalType for UndeducedType {
const CANONICAL_TYPE_KIND: CanonicalTypeKind = CanonicalTypeKind::Deduce;
}
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub enum Deduce {}
impl ToExpr for Deduce {
type Type = UndeducedType;
fn ty(&self) -> Self::Type {
match *self {}
}
fn to_expr(&self) -> Expr<<Self::Type as Type>::Value> {
match *self {}
}
}
impl Value for Deduce {
fn to_canonical(&self) -> <Self::Type as Type>::CanonicalValue {
*self
}
}
impl CanonicalValue for Deduce {
fn value_enum_impl(this: &Self) -> ValueEnum {
match *this {}
}
fn to_bits_impl(this: &Self) -> Result<Interned<BitSlice>, HitUndeducedType> {
match *this {}
}
}

View file

@ -1,12 +1,28 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
pub(crate) mod alternating_cell;
mod const_bool;
mod const_cmp;
mod const_usize;
mod misc;
mod scoped_ref;
pub(crate) mod streaming_read_utf8;
mod test_hasher;
// allow easily switching the hasher crate-wide for testing
#[cfg(feature = "unstable-test-hasher")]
pub type DefaultBuildHasher = test_hasher::DefaultBuildHasher;
#[cfg(not(feature = "unstable-test-hasher"))]
pub(crate) type DefaultBuildHasher = hashbrown::DefaultHashBuilder;
pub(crate) type HashMap<K, V> = hashbrown::HashMap<K, V, DefaultBuildHasher>;
pub(crate) type HashSet<T> = hashbrown::HashSet<T, DefaultBuildHasher>;
#[doc(inline)]
pub use const_bool::{ConstBool, ConstBoolDispatch, ConstBoolDispatchTag, GenericConstBool};
#[doc(inline)]
pub use const_usize::{ConstUsize, GenericConstUsize};
#[doc(inline)]
pub use const_cmp::{
@ -14,7 +30,19 @@ pub use const_cmp::{
const_usize_cmp,
};
#[doc(inline)]
pub use scoped_ref::ScopedRef;
#[doc(inline)]
pub use misc::{
interned_bit, iter_eq_by, BitSliceWriteWithBase, DebugAsDisplay, DebugAsRawString, MakeMutSlice,
BitSliceWriteWithBase, DebugAsDisplay, DebugAsRawString, MakeMutSlice, RcWriter,
SerdeJsonEscapeIf, SerdeJsonEscapeIfFormatter, SerdeJsonEscapeIfTest,
SerdeJsonEscapeIfTestResult, interned_bit, iter_eq_by, os_str_strip_prefix,
os_str_strip_suffix, serialize_to_json_ascii, serialize_to_json_ascii_pretty,
serialize_to_json_ascii_pretty_with_indent, slice_range, try_slice_range,
};
pub(crate) use misc::{InternedStrCompareAsStr, chain};
pub mod job_server;
pub mod prefix_sum;
pub mod ready_valid;

View file

@ -0,0 +1,122 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use crate::util::DebugAsDisplay;
use std::{
cell::{Cell, UnsafeCell},
fmt,
};
pub(crate) trait AlternatingCellMethods {
fn unique_to_shared(&mut self);
fn shared_to_unique(&mut self);
}
#[derive(Copy, Clone, Debug)]
enum State {
Unique,
Shared,
Locked,
}
pub(crate) struct AlternatingCell<T: ?Sized> {
state: Cell<State>,
value: UnsafeCell<T>,
}
impl<T: ?Sized + fmt::Debug + AlternatingCellMethods> fmt::Debug for AlternatingCell<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("AlternatingCell")
.field(
self.try_share()
.as_ref()
.map(|v| -> &dyn fmt::Debug { v })
.unwrap_or(&DebugAsDisplay("<...>")),
)
.finish()
}
}
impl<T: ?Sized> AlternatingCell<T> {
pub(crate) const fn new_shared(value: T) -> Self
where
T: Sized,
{
Self {
state: Cell::new(State::Shared),
value: UnsafeCell::new(value),
}
}
pub(crate) const fn new_unique(value: T) -> Self
where
T: Sized,
{
Self {
state: Cell::new(State::Unique),
value: UnsafeCell::new(value),
}
}
pub(crate) fn is_unique(&self) -> bool {
matches!(self.state.get(), State::Unique)
}
pub(crate) fn is_shared(&self) -> bool {
matches!(self.state.get(), State::Shared)
}
pub(crate) fn into_inner(self) -> T
where
T: Sized,
{
self.value.into_inner()
}
pub(crate) fn try_share(&self) -> Option<&T>
where
T: AlternatingCellMethods,
{
match self.state.get() {
State::Shared => {}
State::Unique => {
struct Locked<'a>(&'a Cell<State>);
impl Drop for Locked<'_> {
fn drop(&mut self) {
self.0.set(State::Shared);
}
}
self.state.set(State::Locked);
let lock = Locked(&self.state);
// Safety: state is Locked, so nothing else will
// access value while calling unique_to_shared.
unsafe { &mut *self.value.get() }.unique_to_shared();
drop(lock);
}
State::Locked => return None,
}
// Safety: state is Shared so nothing will create any mutable
// references until the returned reference's lifetime expires.
Some(unsafe { &*self.value.get() })
}
#[track_caller]
pub(crate) fn share(&self) -> &T
where
T: AlternatingCellMethods,
{
let Some(retval) = self.try_share() else {
panic!("`share` called recursively");
};
retval
}
pub(crate) fn unique(&mut self) -> &mut T
where
T: AlternatingCellMethods,
{
match self.state.get() {
State::Shared => {
self.state.set(State::Unique);
self.value.get_mut().shared_to_unique();
}
State::Unique => {}
State::Locked => unreachable!(),
}
self.value.get_mut()
}
}

View file

@ -1,5 +1,9 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use serde::{
Deserialize, Deserializer, Serialize, Serializer,
de::{DeserializeOwned, Error, Unexpected},
};
use std::{fmt::Debug, hash::Hash, mem::ManuallyDrop, ptr};
mod sealed {
@ -9,7 +13,17 @@ mod sealed {
/// # Safety
/// the only implementation is `ConstBool<Self::VALUE>`
pub unsafe trait GenericConstBool:
sealed::Sealed + Copy + Ord + Hash + Default + Debug + 'static + Send + Sync
sealed::Sealed
+ Copy
+ Ord
+ Hash
+ Default
+ Debug
+ 'static
+ Send
+ Sync
+ Serialize
+ DeserializeOwned
{
const VALUE: bool;
}
@ -30,6 +44,32 @@ unsafe impl<const VALUE: bool> GenericConstBool for ConstBool<VALUE> {
const VALUE: bool = VALUE;
}
impl<const VALUE: bool> Serialize for ConstBool<VALUE> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
VALUE.serialize(serializer)
}
}
impl<'de, const VALUE: bool> Deserialize<'de> for ConstBool<VALUE> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let value = bool::deserialize(deserializer)?;
if value == VALUE {
Ok(ConstBool)
} else {
Err(D::Error::invalid_value(
Unexpected::Bool(value),
&if VALUE { "true" } else { "false" },
))
}
}
}
pub trait ConstBoolDispatchTag {
type Type<Select: GenericConstBool>;
}

View file

@ -0,0 +1,69 @@
// SPDX-License-Identifier: LGPL-3.0-or-later
// See Notices.txt for copyright information
use serde::{
Deserialize, Deserializer, Serialize, Serializer,
de::{DeserializeOwned, Error, Unexpected},
};
use std::{fmt::Debug, hash::Hash};
mod sealed {
pub trait Sealed {}
}
/// the only implementation is `ConstUsize<Self::VALUE>`
pub trait GenericConstUsize:
sealed::Sealed
+ Copy
+ Ord
+ Hash
+ Default
+ Debug
+ 'static
+ Send
+ Sync
+ Serialize
+ DeserializeOwned
{
const VALUE: usize;
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
pub struct ConstUsize<const VALUE: usize>;
impl<const VALUE: usize> Debug for ConstUsize<VALUE> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("ConstUsize").field(&Self::VALUE).finish()
}
}
impl<const VALUE: usize> sealed::Sealed for ConstUsize<VALUE> {}
impl<const VALUE: usize> GenericConstUsize for ConstUsize<VALUE> {
const VALUE: usize = VALUE;
}
impl<const VALUE: usize> Serialize for ConstUsize<VALUE> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
VALUE.serialize(serializer)
}
}
impl<'de, const VALUE: usize> Deserialize<'de> for ConstUsize<VALUE> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let value = usize::deserialize(deserializer)?;
if value == VALUE {
Ok(ConstUsize)
} else {
Err(D::Error::invalid_value(
Unexpected::Unsigned(value as u64),
&&*VALUE.to_string(),
))
}
}
}

Some files were not shown because too many files have changed in this diff Show more