mirror of
https://github.com/YosysHQ/sby.git
synced 2025-04-04 13:54:07 +00:00
Merge pull request #263 from jix/pdr-X
Support for "abc --keep-going pdr" via new "pdr -X" mode
This commit is contained in:
commit
0c84510cef
|
@ -22,6 +22,7 @@ import json, os, sys, shutil, tempfile, re
|
|||
from sby_cmdline import parser_func
|
||||
from sby_core import SbyConfig, SbyTask, SbyAbort, SbyTaskloop, process_filename, dress_message
|
||||
from sby_jobserver import SbyJobClient, process_jobserver_environment
|
||||
from sby_status import SbyStatusDb
|
||||
import time, platform, click
|
||||
|
||||
process_jobserver_environment() # needs to be called early
|
||||
|
@ -55,6 +56,39 @@ autotune_config = args.autotune_config
|
|||
sequential = args.sequential
|
||||
jobcount = args.jobcount
|
||||
init_config_file = args.init_config_file
|
||||
status_show = args.status
|
||||
status_reset = args.status_reset
|
||||
|
||||
if status_show or status_reset:
|
||||
target = workdir_prefix or workdir or sbyfile
|
||||
if not os.path.isdir(target) and target.endswith('.sby'):
|
||||
target = target[:-4]
|
||||
if not os.path.isdir(target):
|
||||
print(f"ERROR: No directory found at {target!r}.", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
with open(f"{target}/status.path", "r") as status_path_file:
|
||||
status_path = f"{target}/{status_path_file.read().rstrip()}"
|
||||
except FileNotFoundError:
|
||||
status_path = f"{target}/status.sqlite"
|
||||
|
||||
if not os.path.exists(status_path):
|
||||
print(f"ERROR: No status database found at {status_path!r}.", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
status_db = SbyStatusDb(status_path, task=None)
|
||||
|
||||
if status_show:
|
||||
status_db.print_status_summary()
|
||||
sys.exit(0)
|
||||
|
||||
if status_reset:
|
||||
status_db.reset()
|
||||
|
||||
status_db.db.close()
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
if sbyfile is not None:
|
||||
if os.path.isdir(sbyfile):
|
||||
|
@ -356,6 +390,7 @@ def start_task(taskloop, taskname):
|
|||
|
||||
my_opt_tmpdir = opt_tmpdir
|
||||
my_workdir = None
|
||||
my_status_db = None
|
||||
|
||||
if workdir is not None:
|
||||
my_workdir = workdir
|
||||
|
@ -364,10 +399,12 @@ def start_task(taskloop, taskname):
|
|||
my_workdir = workdir_prefix
|
||||
else:
|
||||
my_workdir = workdir_prefix + "_" + taskname
|
||||
my_status_db = f"../{os.path.basename(workdir_prefix)}/status.sqlite"
|
||||
|
||||
if my_workdir is None and sbyfile is not None and not my_opt_tmpdir:
|
||||
my_workdir = sbyfile[:-4]
|
||||
if taskname is not None:
|
||||
my_status_db = f"../{os.path.basename(my_workdir)}/status.sqlite"
|
||||
my_workdir += "_" + taskname
|
||||
|
||||
if my_workdir is not None:
|
||||
|
@ -399,6 +436,14 @@ def start_task(taskloop, taskname):
|
|||
with open(f"{my_workdir}/.gitignore", "w") as gitignore:
|
||||
print("*", file=gitignore)
|
||||
|
||||
if my_status_db is not None:
|
||||
os.makedirs(f"{my_workdir}/{os.path.dirname(my_status_db)}", exist_ok=True)
|
||||
if os.getenv("SBY_WORKDIR_GITIGNORE"):
|
||||
with open(f"{my_workdir}/{os.path.dirname(my_status_db)}/.gitignore", "w") as gitignore:
|
||||
print("*", file=gitignore)
|
||||
with open(f"{my_workdir}/status.path", "w") as status_path:
|
||||
print(my_status_db, file=status_path)
|
||||
|
||||
junit_ts_name = os.path.basename(sbyfile[:-4]) if sbyfile is not None else workdir if workdir is not None else "stdin"
|
||||
junit_tc_name = taskname if taskname is not None else "default"
|
||||
|
||||
|
|
|
@ -378,6 +378,7 @@ class SbyAutotune:
|
|||
|
||||
def run(self):
|
||||
self.task.handle_non_engine_options()
|
||||
self.task.setup_status_db(':memory:')
|
||||
self.config = self.task.autotune_config or SbyAutotuneConfig()
|
||||
|
||||
if "expect" not in self.task.options:
|
||||
|
|
|
@ -69,6 +69,11 @@ def parser_func():
|
|||
parser.add_argument("--setup", action="store_true", dest="setupmode",
|
||||
help="set up the working directory and exit")
|
||||
|
||||
parser.add_argument("--status", action="store_true", dest="status",
|
||||
help="summarize the contents of the status database")
|
||||
parser.add_argument("--statusreset", action="store_true", dest="status_reset",
|
||||
help="reset the contents of the status database")
|
||||
|
||||
parser.add_argument("--init-config-file", dest="init_config_file",
|
||||
help="create a default .sby config file")
|
||||
parser.add_argument("sbyfile", metavar="<jobname>.sby | <dirname>", nargs="?",
|
||||
|
|
|
@ -27,6 +27,7 @@ from shutil import copyfile, copytree, rmtree
|
|||
from select import select
|
||||
from time import monotonic, localtime, sleep, strftime
|
||||
from sby_design import SbyProperty, SbyModule, design_hierarchy
|
||||
from sby_status import SbyStatusDb
|
||||
|
||||
all_procs_running = []
|
||||
|
||||
|
@ -674,20 +675,41 @@ class SbySummary:
|
|||
self.engine_summaries[engine_idx] = SbyEngineSummary(engine_idx)
|
||||
return self.engine_summaries[engine_idx]
|
||||
|
||||
def add_event(self, *args, **kwargs):
|
||||
def add_event(self, *args, update_status=True, **kwargs):
|
||||
event = SbySummaryEvent(*args, **kwargs)
|
||||
|
||||
engine = self.engine_summary(event.engine_idx)
|
||||
|
||||
if update_status:
|
||||
status_metadata = dict(source="summary_event", engine=engine.engine)
|
||||
|
||||
if event.prop:
|
||||
if event.type == "$assert":
|
||||
event.prop.status = "FAIL"
|
||||
if event.path:
|
||||
event.prop.tracefiles.append(event.path)
|
||||
if update_status:
|
||||
self.task.status_db.add_task_property_data(
|
||||
event.prop,
|
||||
"trace",
|
||||
data=dict(path=event.path, step=event.step, **status_metadata),
|
||||
)
|
||||
if event.prop:
|
||||
if event.type == "$cover":
|
||||
event.prop.status = "PASS"
|
||||
if event.path:
|
||||
event.prop.tracefiles.append(event.path)
|
||||
|
||||
engine = self.engine_summary(event.engine_idx)
|
||||
if update_status:
|
||||
self.task.status_db.add_task_property_data(
|
||||
event.prop,
|
||||
"trace",
|
||||
data=dict(path=event.path, step=event.step, **status_metadata),
|
||||
)
|
||||
if event.prop and update_status:
|
||||
self.task.status_db.set_task_property_status(
|
||||
event.prop,
|
||||
data=status_metadata
|
||||
)
|
||||
|
||||
if event.trace not in engine.traces:
|
||||
engine.traces[event.trace] = SbyTraceSummary(event.trace, path=event.path, engine_case=event.engine_case)
|
||||
|
@ -765,8 +787,8 @@ class SbySummary:
|
|||
event = same_events[0]
|
||||
steps = sorted(e.step for e in same_events)
|
||||
if short and len(steps) > step_limit:
|
||||
steps = [str(step) for step in steps[:step_limit]]
|
||||
excess = len(steps) - step_limit
|
||||
steps = [str(step) for step in steps[:step_limit]]
|
||||
omitted_excess = True
|
||||
steps[-1] += f" and {excess} further step{'s' if excess != 1 else ''}"
|
||||
|
||||
|
@ -1005,6 +1027,7 @@ class SbyTask(SbyConfig):
|
|||
print("setundef -undriven -anyseq", file=f)
|
||||
print("opt -fast", file=f)
|
||||
if self.opt_witrename:
|
||||
# we need to run this a second time to handle anything added by prep
|
||||
print("rename -witness", file=f)
|
||||
print("opt_clean", file=f)
|
||||
print(f"""write_rtlil ../model/design_prep.il""", file=f)
|
||||
|
@ -1026,6 +1049,9 @@ class SbyTask(SbyConfig):
|
|||
print(cmd, file=f)
|
||||
# the user must designate a top module in [script]
|
||||
print("hierarchy -smtcheck", file=f)
|
||||
# we need to give flatten-preserved names before write_jny
|
||||
if self.opt_witrename:
|
||||
print("rename -witness", file=f)
|
||||
print(f"""write_jny -no-connections ../model/design.json""", file=f)
|
||||
print(f"""write_rtlil ../model/design.il""", file=f)
|
||||
|
||||
|
@ -1041,6 +1067,10 @@ class SbyTask(SbyConfig):
|
|||
if self.design == None:
|
||||
with open(f"{self.workdir}/model/design.json") as f:
|
||||
self.design = design_hierarchy(f)
|
||||
self.status_db.create_task_properties([
|
||||
prop for prop in self.design.properties_by_path.values()
|
||||
if not prop.type.assume_like
|
||||
])
|
||||
|
||||
def instance_hierarchy_error_callback(retcode):
|
||||
self.precise_prop_status = False
|
||||
|
@ -1186,8 +1216,13 @@ class SbyTask(SbyConfig):
|
|||
self.status = "ERROR"
|
||||
self.terminate()
|
||||
|
||||
def pass_unknown_asserts(self, data):
|
||||
for prop in self.design.pass_unknown_asserts():
|
||||
self.status_db.set_task_property_status(prop, data=data)
|
||||
|
||||
def update_status(self, new_status):
|
||||
assert new_status in ["PASS", "FAIL", "UNKNOWN", "ERROR"]
|
||||
self.status_db.set_task_status(new_status)
|
||||
|
||||
if new_status == "UNKNOWN":
|
||||
return
|
||||
|
@ -1199,7 +1234,7 @@ class SbyTask(SbyConfig):
|
|||
assert self.status != "FAIL"
|
||||
self.status = "PASS"
|
||||
if self.opt_mode in ("bmc", "prove") and self.design:
|
||||
self.design.pass_unknown_asserts()
|
||||
self.pass_unknown_asserts(dict(source="task_status"))
|
||||
|
||||
elif new_status == "FAIL":
|
||||
assert self.status != "PASS"
|
||||
|
@ -1258,6 +1293,19 @@ class SbyTask(SbyConfig):
|
|||
|
||||
self.handle_bool_option("assume_early", True)
|
||||
|
||||
def setup_status_db(self, status_path=None):
|
||||
if hasattr(self, 'status_db'):
|
||||
return
|
||||
|
||||
if status_path is None:
|
||||
try:
|
||||
with open(f"{self.workdir}/status.path", "r") as status_path_file:
|
||||
status_path = f"{self.workdir}/{status_path_file.read().rstrip()}"
|
||||
except FileNotFoundError:
|
||||
status_path = f"{self.workdir}/status.sqlite"
|
||||
|
||||
self.status_db = SbyStatusDb(status_path, self)
|
||||
|
||||
def setup_procs(self, setupmode):
|
||||
self.handle_non_engine_options()
|
||||
if self.opt_smtc is not None:
|
||||
|
@ -1285,6 +1333,8 @@ class SbyTask(SbyConfig):
|
|||
self.retcode = 0
|
||||
return
|
||||
|
||||
self.setup_status_db()
|
||||
|
||||
if self.opt_make_model is not None:
|
||||
for name in self.opt_make_model.split(","):
|
||||
self.model(name.strip())
|
||||
|
|
|
@ -88,6 +88,10 @@ class SbyProperty:
|
|||
return c.FAIR
|
||||
raise ValueError("Unknown property type: " + name)
|
||||
|
||||
@property
|
||||
def assume_like(self):
|
||||
return self in [self.ASSUME, self.FAIR]
|
||||
|
||||
name: str
|
||||
path: Tuple[str, ...]
|
||||
type: Type
|
||||
|
@ -171,9 +175,12 @@ class SbyDesign:
|
|||
properties_by_path: dict = field(default_factory=dict)
|
||||
|
||||
def pass_unknown_asserts(self):
|
||||
updated = []
|
||||
for prop in self.hierarchy:
|
||||
if prop.type == prop.Type.ASSERT and prop.status == "UNKNOWN":
|
||||
prop.status = "PASS"
|
||||
updated.append(prop)
|
||||
return updated
|
||||
|
||||
|
||||
def cell_path(cell):
|
||||
|
|
|
@ -17,32 +17,101 @@
|
|||
#
|
||||
|
||||
import re, getopt
|
||||
import json
|
||||
from sby_core import SbyProc
|
||||
from sby_engine_aiger import aigsmt_exit_callback
|
||||
from sby_engine_aiger import aigsmt_exit_callback, aigsmt_trace_callback
|
||||
|
||||
|
||||
def abc_getopt(args, long):
|
||||
long = set(long)
|
||||
output = []
|
||||
parsed = []
|
||||
toggles = set()
|
||||
pos = 0
|
||||
|
||||
while pos < len(args):
|
||||
arg = args[pos]
|
||||
pos += 1
|
||||
if not arg.startswith('-'):
|
||||
output.append(arg)
|
||||
elif arg == '--':
|
||||
output.extend(args[pos:])
|
||||
break
|
||||
elif arg.startswith('--'):
|
||||
if '=' in arg:
|
||||
prefix, param = arg.split('=', 1)
|
||||
if prefix + "=" in long:
|
||||
parsed.append(prefix, param)
|
||||
elif arg[2:] in long:
|
||||
parsed.append((arg, ''))
|
||||
elif arg[2:] + "=" in long:
|
||||
parsed.append((arg, args[pos]))
|
||||
pos += 1
|
||||
else:
|
||||
output.append(arg)
|
||||
elif arg.startswith('-'):
|
||||
output.append(arg)
|
||||
for c in arg[1:]:
|
||||
if 'A' <= c <= 'Z':
|
||||
if pos < len(args):
|
||||
output.append(args[pos])
|
||||
pos += 1
|
||||
else:
|
||||
toggles.symmetric_difference_update([c])
|
||||
|
||||
return output, parsed, toggles
|
||||
|
||||
|
||||
def run(mode, task, engine_idx, engine):
|
||||
abc_opts, abc_command = getopt.getopt(engine[1:], "", [])
|
||||
keep_going = False
|
||||
|
||||
fold_command = "fold"
|
||||
if task.opt_aigfolds:
|
||||
fold_command += " -s"
|
||||
|
||||
abc_command, custom_options, toggles = abc_getopt(engine[1:], [
|
||||
"keep-going",
|
||||
])
|
||||
|
||||
if len(abc_command) == 0:
|
||||
task.error("Missing ABC command.")
|
||||
|
||||
for o, a in abc_opts:
|
||||
task.error("Unexpected ABC engine options.")
|
||||
if abc_command[0].startswith('-'):
|
||||
task.error(f"Unexpected ABC engine option '{abc_command[0]}'.")
|
||||
|
||||
if abc_command[0] == "bmc3":
|
||||
if mode != "bmc":
|
||||
task.error("ABC command 'bmc3' is only valid in bmc mode.")
|
||||
for o, a in custom_options:
|
||||
task.error(f"Option {o} not supported by 'abc {abc_command[0]}'")
|
||||
abc_command[0] += f" -F {task.opt_depth} -v"
|
||||
|
||||
elif abc_command[0] == "sim3":
|
||||
if mode != "bmc":
|
||||
task.error("ABC command 'sim3' is only valid in bmc mode.")
|
||||
for o, a in custom_options:
|
||||
task.error(f"Option {o} not supported by 'abc {abc_command[0]}'")
|
||||
abc_command[0] += f" -F {task.opt_depth} -v"
|
||||
|
||||
elif abc_command[0] == "pdr":
|
||||
if mode != "prove":
|
||||
task.error("ABC command 'pdr' is only valid in prove mode.")
|
||||
abc_command[0] += f" -v -I engine_{engine_idx}/invariants.pla"
|
||||
|
||||
for o, a in custom_options:
|
||||
if o == '--keep-going':
|
||||
keep_going = True
|
||||
else:
|
||||
task.error(f"Option {o} not supported by 'abc {abc_command[0]}'")
|
||||
|
||||
abc_command[0] += " -v -l"
|
||||
|
||||
if keep_going:
|
||||
abc_command += ["-a", "-X", f"engine_{engine_idx}/trace_"]
|
||||
|
||||
if 'd' in toggles:
|
||||
abc_command += ["-I", f"engine_{engine_idx}/invariants.pla"]
|
||||
if not task.opt_aigfolds:
|
||||
fold_command += " -s"
|
||||
|
||||
else:
|
||||
task.error(f"Invalid ABC command {abc_command[0]}.")
|
||||
|
@ -66,21 +135,61 @@ def run(mode, task, engine_idx, engine):
|
|||
task,
|
||||
f"engine_{engine_idx}",
|
||||
task.model("aig"),
|
||||
f"""cd {task.workdir}; {task.exe_paths["abc"]} -c 'read_aiger model/design_aiger.aig; fold{
|
||||
" -s" if task.opt_aigfolds or (abc_command[0].startswith("pdr ") and "-d" in abc_command[1:]) else ""
|
||||
}; strash; {" ".join(abc_command)}; write_cex -a engine_{engine_idx}/trace.aiw'""",
|
||||
f"""cd {task.workdir}; {task.exe_paths["abc"]} -c 'read_aiger model/design_aiger.aig; {
|
||||
fold_command}; strash; {" ".join(abc_command)}; write_cex -a engine_{engine_idx}/trace.aiw'""",
|
||||
logfile=open(f"{task.workdir}/engine_{engine_idx}/logfile.txt", "w")
|
||||
)
|
||||
proc.checkretcode = True
|
||||
|
||||
proc.noprintregex = re.compile(r"^\.+$")
|
||||
proc_status = None
|
||||
proc_status = "UNKNOWN"
|
||||
|
||||
procs_running = 1
|
||||
|
||||
aiger_props = None
|
||||
disproved = set()
|
||||
proved = set()
|
||||
|
||||
def output_callback(line):
|
||||
nonlocal proc_status
|
||||
nonlocal procs_running
|
||||
nonlocal aiger_props
|
||||
|
||||
match = re.match(r"^Output [0-9]+ of miter .* was asserted in frame [0-9]+.", line)
|
||||
if match: proc_status = "FAIL"
|
||||
if aiger_props is None:
|
||||
with open(f"{task.workdir}/model/design_aiger.ywa") as ywa_file:
|
||||
ywa = json.load(ywa_file)
|
||||
aiger_props = []
|
||||
for path in ywa["asserts"]:
|
||||
aiger_props.append(task.design.properties_by_path.get(tuple(path)))
|
||||
|
||||
if keep_going:
|
||||
match = re.match(r"Writing CEX for output ([0-9]+) to engine_[0-9]+/(.*)\.aiw", line)
|
||||
if match:
|
||||
output = int(match[1])
|
||||
prop = aiger_props[output]
|
||||
if prop:
|
||||
prop.status = "FAIL"
|
||||
task.status_db.set_task_property_status(prop, data=dict(source="abc pdr", engine=f"engine_{engine_idx}"))
|
||||
disproved.add(output)
|
||||
proc_status = "FAIL"
|
||||
proc = aigsmt_trace_callback(task, engine_idx, proc_status,
|
||||
run_aigsmt=run_aigsmt, smtbmc_vcd=smtbmc_vcd, smtbmc_append=smtbmc_append, sim_append=sim_append,
|
||||
name=match[2],
|
||||
)
|
||||
proc.register_exit_callback(exit_callback)
|
||||
procs_running += 1
|
||||
else:
|
||||
match = re.match(r"^Output [0-9]+ of miter .* was asserted in frame [0-9]+.", line)
|
||||
if match: proc_status = "FAIL"
|
||||
|
||||
match = re.match(r"^Proved output +([0-9]+) in frame +[0-9]+", line)
|
||||
if match:
|
||||
output = int(match[1])
|
||||
prop = aiger_props[output]
|
||||
if prop:
|
||||
prop.status = "PASS"
|
||||
task.status_db.set_task_property_status(prop, data=dict(source="abc pdr", engine=f"engine_{engine_idx}"))
|
||||
proved.add(output)
|
||||
|
||||
match = re.match(r"^Simulation of [0-9]+ frames for [0-9]+ rounds with [0-9]+ restarts did not assert POs.", line)
|
||||
if match: proc_status = "UNKNOWN"
|
||||
|
@ -94,11 +203,44 @@ def run(mode, task, engine_idx, engine):
|
|||
match = re.match(r"^Property proved.", line)
|
||||
if match: proc_status = "PASS"
|
||||
|
||||
if keep_going:
|
||||
match = re.match(r"^Properties: All = (\d+). Proved = (\d+). Disproved = (\d+). Undecided = (\d+).", line)
|
||||
if match:
|
||||
all_count = int(match[1])
|
||||
proved_count = int(match[2])
|
||||
disproved_count = int(match[3])
|
||||
undecided_count = int(match[4])
|
||||
if (
|
||||
all_count != len(aiger_props) or
|
||||
all_count != proved_count + disproved_count + undecided_count or
|
||||
disproved_count != len(disproved) or
|
||||
proved_count != len(proved)
|
||||
):
|
||||
log("WARNING: inconsistent status output")
|
||||
proc_status = "UNKNOWN"
|
||||
elif proved_count == all_count:
|
||||
proc_status = "PASS"
|
||||
elif disproved_count == 0:
|
||||
proc_status = "UNKNOWN"
|
||||
else:
|
||||
proc_status = "FAIL"
|
||||
|
||||
return line
|
||||
|
||||
def exit_callback(retcode):
|
||||
aigsmt_exit_callback(task, engine_idx, proc_status,
|
||||
run_aigsmt=run_aigsmt, smtbmc_vcd=smtbmc_vcd, smtbmc_append=smtbmc_append, sim_append=sim_append, )
|
||||
nonlocal procs_running
|
||||
if keep_going:
|
||||
procs_running -= 1
|
||||
if not procs_running:
|
||||
if proc_status == "FAIL" and mode == "bmc" and keep_going:
|
||||
task.pass_unknown_asserts(dict(source="abc pdr", keep_going=True, engine=f"engine_{engine_idx}"))
|
||||
task.update_status(proc_status)
|
||||
task.summary.set_engine_status(engine_idx, proc_status)
|
||||
if proc_status != "UNKNOWN" and not keep_going:
|
||||
task.terminate()
|
||||
else:
|
||||
aigsmt_exit_callback(task, engine_idx, proc_status,
|
||||
run_aigsmt=run_aigsmt, smtbmc_vcd=smtbmc_vcd, smtbmc_append=smtbmc_append, sim_append=sim_append)
|
||||
|
||||
proc.output_callback = output_callback
|
||||
proc.register_exit_callback(exit_callback)
|
||||
|
|
|
@ -121,70 +121,108 @@ def run(mode, task, engine_idx, engine):
|
|||
|
||||
|
||||
def aigsmt_exit_callback(task, engine_idx, proc_status, *, run_aigsmt, smtbmc_vcd, smtbmc_append, sim_append):
|
||||
if proc_status is None:
|
||||
task.error(f"engine_{engine_idx}: Could not determine engine status.")
|
||||
if proc_status is None:
|
||||
task.error(f"engine_{engine_idx}: Could not determine engine status.")
|
||||
|
||||
task.update_status(proc_status)
|
||||
task.summary.set_engine_status(engine_idx, proc_status)
|
||||
task.terminate()
|
||||
task.update_status(proc_status)
|
||||
task.summary.set_engine_status(engine_idx, proc_status)
|
||||
task.terminate()
|
||||
if proc_status == "FAIL" and (not run_aigsmt or task.opt_aigsmt != "none"):
|
||||
aigsmt_trace_callback(task, engine_idx, proc_status, run_aigsmt=run_aigsmt, smtbmc_vcd=smtbmc_vcd, smtbmc_append=smtbmc_append, sim_append=sim_append)
|
||||
|
||||
if proc_status == "FAIL" and (not run_aigsmt or task.opt_aigsmt != "none"):
|
||||
trace_prefix = f"engine_{engine_idx}/trace"
|
||||
def aigsmt_trace_callback(task, engine_idx, proc_status, *, run_aigsmt, smtbmc_vcd, smtbmc_append, sim_append, name="trace"):
|
||||
|
||||
aiw2yw_suffix = '_aiw' if run_aigsmt else ''
|
||||
trace_prefix = f"engine_{engine_idx}/{name}"
|
||||
|
||||
witness_proc = SbyProc(
|
||||
task, f"engine_{engine_idx}", [],
|
||||
f"cd {task.workdir}; {task.exe_paths['witness']} aiw2yw engine_{engine_idx}/trace.aiw model/design_aiger.ywa engine_{engine_idx}/trace{aiw2yw_suffix}.yw",
|
||||
)
|
||||
yw_proc = witness_proc
|
||||
aiw2yw_suffix = '_aiw' if run_aigsmt else ''
|
||||
|
||||
if run_aigsmt:
|
||||
smtbmc_opts = []
|
||||
smtbmc_opts += ["-s", task.opt_aigsmt]
|
||||
if task.opt_tbtop is not None:
|
||||
smtbmc_opts += ["--vlogtb-top", task.opt_tbtop]
|
||||
smtbmc_opts += ["--noprogress", f"--append {smtbmc_append}"]
|
||||
if smtbmc_vcd:
|
||||
smtbmc_opts += [f"--dump-vcd {trace_prefix}.vcd"]
|
||||
smtbmc_opts += [f"--dump-yw {trace_prefix}.yw", f"--dump-vlogtb {trace_prefix}_tb.v", f"--dump-smtc {trace_prefix}.smtc"]
|
||||
witness_proc = SbyProc(
|
||||
task, f"engine_{engine_idx}", [],
|
||||
f"cd {task.workdir}; {task.exe_paths['witness']} aiw2yw engine_{engine_idx}/{name}.aiw model/design_aiger.ywa engine_{engine_idx}/{name}{aiw2yw_suffix}.yw",
|
||||
)
|
||||
final_proc = witness_proc
|
||||
|
||||
proc2 = SbyProc(
|
||||
task,
|
||||
f"engine_{engine_idx}",
|
||||
[*task.model("smt2"), witness_proc],
|
||||
f"cd {task.workdir}; {task.exe_paths['smtbmc']} {' '.join(smtbmc_opts)} --yw engine_{engine_idx}/trace{aiw2yw_suffix}.yw model/design_smt2.smt2",
|
||||
logfile=open(f"{task.workdir}/engine_{engine_idx}/logfile2.txt", "w")
|
||||
)
|
||||
if run_aigsmt:
|
||||
smtbmc_opts = []
|
||||
smtbmc_opts += ["-s", task.opt_aigsmt]
|
||||
if task.opt_tbtop is not None:
|
||||
smtbmc_opts += ["--vlogtb-top", task.opt_tbtop]
|
||||
smtbmc_opts += ["--noprogress", f"--append {smtbmc_append}"]
|
||||
if smtbmc_vcd:
|
||||
smtbmc_opts += [f"--dump-vcd {trace_prefix}.vcd"]
|
||||
smtbmc_opts += [f"--dump-yw {trace_prefix}.yw", f"--dump-vlogtb {trace_prefix}_tb.v", f"--dump-smtc {trace_prefix}.smtc"]
|
||||
|
||||
proc2_status = None
|
||||
proc2 = SbyProc(
|
||||
task,
|
||||
f"engine_{engine_idx}",
|
||||
[*task.model("smt2"), witness_proc],
|
||||
f"cd {task.workdir}; {task.exe_paths['smtbmc']} {' '.join(smtbmc_opts)} --yw engine_{engine_idx}/{name}{aiw2yw_suffix}.yw model/design_smt2.smt2",
|
||||
logfile=open(f"{task.workdir}/engine_{engine_idx}/logfile2.txt", "w"),
|
||||
)
|
||||
|
||||
def output_callback2(line):
|
||||
nonlocal proc2_status
|
||||
proc2_status = None
|
||||
|
||||
match = re.match(r"^## [0-9: ]+ Status: FAILED", line)
|
||||
if match: proc2_status = "FAIL"
|
||||
last_prop = []
|
||||
current_step = None
|
||||
|
||||
match = re.match(r"^## [0-9: ]+ Status: PASSED", line)
|
||||
if match: proc2_status = "PASS"
|
||||
def output_callback2(line):
|
||||
nonlocal proc2_status
|
||||
nonlocal last_prop
|
||||
nonlocal current_step
|
||||
|
||||
return line
|
||||
smt2_trans = {'\\':'/', '|':'/'}
|
||||
|
||||
def exit_callback2(retcode):
|
||||
if proc2_status is None:
|
||||
task.error(f"engine_{engine_idx}: Could not determine aigsmt status.")
|
||||
if proc2_status != "FAIL":
|
||||
task.error(f"engine_{engine_idx}: Unexpected aigsmt status.")
|
||||
if os.path.exists(f"{task.workdir}/engine_{engine_idx}/trace.vcd"):
|
||||
task.summary.add_event(engine_idx, trace="trace", path=f"engine_{engine_idx}/trace.vcd", type="$assert")
|
||||
match = re.match(r"^## [0-9: ]+ .* in step ([0-9]+)\.\.", line)
|
||||
if match:
|
||||
current_step = int(match[1])
|
||||
return line
|
||||
|
||||
proc2.output_callback = output_callback2
|
||||
proc2.register_exit_callback(exit_callback2)
|
||||
match = re.match(r"^## [0-9: ]+ Status: FAILED", line)
|
||||
if match: proc2_status = "FAIL"
|
||||
|
||||
yw_proc = proc2
|
||||
match = re.match(r"^## [0-9: ]+ Status: PASSED", line)
|
||||
if match: proc2_status = "PASS"
|
||||
|
||||
if task.opt_fst or (task.opt_vcd and task.opt_vcd_sim):
|
||||
sim_witness_trace(f"engine_{engine_idx}", task, engine_idx, f"engine_{engine_idx}/trace.yw", append=sim_append, deps=[yw_proc])
|
||||
match = re.match(r"^## [0-9: ]+ Assert failed in (\S+): (\S+)(?: \((\S+)\))?", line)
|
||||
if match:
|
||||
cell_name = match[3] or match[2]
|
||||
prop = task.design.hierarchy.find_property_by_cellname(cell_name, trans_dict=smt2_trans)
|
||||
prop.status = "FAIL"
|
||||
task.status_db.set_task_property_status(prop, data=dict(source="aigsmt", engine=f"engine_{engine_idx}"))
|
||||
last_prop.append(prop)
|
||||
return line
|
||||
|
||||
else:
|
||||
task.log(f"{click.style(f'engine_{engine_idx}', fg='magenta')}: Engine did not produce a counter example.")
|
||||
match = re.match(r"^## [0-9: ]+ Writing trace to VCD file: (\S+)", line)
|
||||
if match:
|
||||
tracefile = match[1]
|
||||
trace = os.path.basename(tracefile)[:-4]
|
||||
task.summary.add_event(engine_idx=engine_idx, trace=trace, path=tracefile)
|
||||
|
||||
if match and last_prop:
|
||||
for p in last_prop:
|
||||
task.summary.add_event(
|
||||
engine_idx=engine_idx, trace=trace,
|
||||
type=p.celltype, hdlname=p.hdlname, src=p.location, step=current_step)
|
||||
p.tracefiles.append(tracefile)
|
||||
last_prop = []
|
||||
return line
|
||||
|
||||
return line
|
||||
|
||||
def exit_callback2(retcode):
|
||||
if proc2_status is None:
|
||||
task.error(f"engine_{engine_idx}: Could not determine aigsmt status.")
|
||||
if proc2_status != "FAIL":
|
||||
task.error(f"engine_{engine_idx}: Unexpected aigsmt status.")
|
||||
|
||||
proc2.output_callback = output_callback2
|
||||
proc2.register_exit_callback(exit_callback2)
|
||||
|
||||
final_proc = proc2
|
||||
|
||||
if task.opt_fst or (task.opt_vcd and task.opt_vcd_sim):
|
||||
final_proc = sim_witness_trace(f"engine_{engine_idx}", task, engine_idx, f"engine_{engine_idx}/{name}.yw", append=sim_append, deps=[final_proc])
|
||||
elif not run_aigsmt:
|
||||
task.log(f"{click.style(f'engine_{engine_idx}', fg='magenta')}: Engine did not produce a counter example.")
|
||||
|
||||
return final_proc
|
||||
|
|
|
@ -233,6 +233,7 @@ def run(mode, task, engine_idx, engine):
|
|||
cell_name = match[3] or match[2]
|
||||
prop = task.design.hierarchy.find_property_by_cellname(cell_name, trans_dict=smt2_trans)
|
||||
prop.status = "FAIL"
|
||||
task.status_db.set_task_property_status(prop, data=dict(source="smtbmc", engine=f"engine_{engine_idx}"))
|
||||
last_prop.append(prop)
|
||||
return line
|
||||
|
||||
|
@ -241,6 +242,7 @@ def run(mode, task, engine_idx, engine):
|
|||
cell_name = match[2] or match[1]
|
||||
prop = task.design.hierarchy.find_property_by_cellname(cell_name, trans_dict=smt2_trans)
|
||||
prop.status = "PASS"
|
||||
task.status_db.set_task_property_status(prop, data=dict(source="smtbmc", engine=f"engine_{engine_idx}"))
|
||||
last_prop.append(prop)
|
||||
return line
|
||||
|
||||
|
@ -271,6 +273,7 @@ def run(mode, task, engine_idx, engine):
|
|||
cell_name = match[2]
|
||||
prop = task.design.hierarchy.find_property_by_cellname(cell_name, trans_dict=smt2_trans)
|
||||
prop.status = "FAIL"
|
||||
task.status_db.set_task_property_status(prop, data=dict(source="smtbmc", engine=f"engine_{engine_idx}"))
|
||||
|
||||
return line
|
||||
|
||||
|
@ -288,10 +291,12 @@ def run(mode, task, engine_idx, engine):
|
|||
def last_exit_callback():
|
||||
if mode == "bmc" or mode == "cover":
|
||||
task.update_status(proc_status)
|
||||
if proc_status == "FAIL" and mode == "bmc" and keep_going:
|
||||
task.pass_unknown_asserts(dict(source="smtbmc", keep_going=True, engine=f"engine_{engine_idx}"))
|
||||
proc_status_lower = proc_status.lower() if proc_status == "PASS" else proc_status
|
||||
task.summary.set_engine_status(engine_idx, proc_status_lower)
|
||||
|
||||
task.terminate()
|
||||
if not keep_going:
|
||||
task.terminate()
|
||||
|
||||
elif mode in ["prove_basecase", "prove_induction"]:
|
||||
proc_status_lower = proc_status.lower() if proc_status == "PASS" else proc_status
|
||||
|
@ -321,7 +326,8 @@ def run(mode, task, engine_idx, engine):
|
|||
if task.basecase_pass and task.induction_pass:
|
||||
task.update_status("PASS")
|
||||
task.summary.append("successful proof by k-induction.")
|
||||
task.terminate()
|
||||
if not keep_going:
|
||||
task.terminate()
|
||||
|
||||
else:
|
||||
assert False
|
||||
|
|
344
sbysrc/sby_status.py
Normal file
344
sbysrc/sby_status.py
Normal file
|
@ -0,0 +1,344 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import sqlite3
|
||||
import os
|
||||
import time
|
||||
import json
|
||||
from collections import defaultdict
|
||||
from functools import wraps
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, TypeVar, Optional, Iterable
|
||||
from sby_design import SbyProperty, pretty_path
|
||||
|
||||
|
||||
Fn = TypeVar("Fn", bound=Callable[..., Any])
|
||||
|
||||
|
||||
def transaction(method: Fn) -> Fn:
|
||||
@wraps(method)
|
||||
def wrapper(self: SbyStatusDb, *args: Any, **kwargs: Any) -> Any:
|
||||
if self._transaction_active:
|
||||
return method(self, *args, **kwargs)
|
||||
|
||||
try:
|
||||
self.log_debug(f"begin {method.__name__!r} transaction")
|
||||
self.db.execute("begin")
|
||||
self._transaction_active = True
|
||||
result = method(self, *args, **kwargs)
|
||||
self.db.execute("commit")
|
||||
self._transaction_active = False
|
||||
self.log_debug(f"comitted {method.__name__!r} transaction")
|
||||
return result
|
||||
except sqlite3.OperationalError as err:
|
||||
self.log_debug(f"failed {method.__name__!r} transaction {err}")
|
||||
self.db.rollback()
|
||||
self._transaction_active = False
|
||||
except Exception as err:
|
||||
self.log_debug(f"failed {method.__name__!r} transaction {err}")
|
||||
self.db.rollback()
|
||||
self._transaction_active = False
|
||||
raise
|
||||
try:
|
||||
self.log_debug(
|
||||
f"retrying {method.__name__!r} transaction once in immediate mode"
|
||||
)
|
||||
self.db.execute("begin immediate")
|
||||
self._transaction_active = True
|
||||
result = method(self, *args, **kwargs)
|
||||
self.db.execute("commit")
|
||||
self._transaction_active = False
|
||||
self.log_debug(f"comitted {method.__name__!r} transaction")
|
||||
return result
|
||||
except Exception as err:
|
||||
self.log_debug(f"failed {method.__name__!r} transaction {err}")
|
||||
self.db.rollback()
|
||||
self._transaction_active = False
|
||||
raise
|
||||
|
||||
return wrapper # type: ignore
|
||||
|
||||
|
||||
class SbyStatusDb:
|
||||
def __init__(self, path: Path, task, timeout: float = 5.0):
|
||||
self.debug = False
|
||||
self.task = task
|
||||
self._transaction_active = False
|
||||
|
||||
setup = not os.path.exists(path)
|
||||
|
||||
self.db = sqlite3.connect(path, isolation_level=None, timeout=timeout)
|
||||
self.db.row_factory = sqlite3.Row
|
||||
self.db.execute("PRAGMA journal_mode=WAL")
|
||||
self.db.execute("PRAGMA synchronous=0")
|
||||
|
||||
if setup:
|
||||
self._setup()
|
||||
|
||||
if task is not None:
|
||||
self.task_id = self.create_task(workdir=task.workdir, mode=task.opt_mode)
|
||||
|
||||
def log_debug(self, *args):
|
||||
if self.debug:
|
||||
if self.task:
|
||||
self.task.log(" ".join(str(arg) for arg in args))
|
||||
else:
|
||||
print(*args)
|
||||
|
||||
@transaction
|
||||
def _setup(self):
|
||||
script = """
|
||||
CREATE TABLE task (
|
||||
id INTEGER PRIMARY KEY,
|
||||
workdir TEXT,
|
||||
mode TEXT,
|
||||
created REAL
|
||||
);
|
||||
CREATE TABLE task_status (
|
||||
id INTEGER PRIMARY KEY,
|
||||
task INTEGER,
|
||||
status TEXT,
|
||||
data TEXT,
|
||||
created REAL,
|
||||
FOREIGN KEY(task) REFERENCES task(id)
|
||||
);
|
||||
CREATE TABLE task_property (
|
||||
id INTEGER PRIMARY KEY,
|
||||
task INTEGER,
|
||||
src TEXT,
|
||||
name TEXT,
|
||||
created REAL,
|
||||
FOREIGN KEY(task) REFERENCES task(id)
|
||||
);
|
||||
CREATE TABLE task_property_status (
|
||||
id INTEGER PRIMARY KEY,
|
||||
task_property INTEGER,
|
||||
status TEXT,
|
||||
data TEXT,
|
||||
created REAL,
|
||||
FOREIGN KEY(task_property) REFERENCES task_property(id)
|
||||
);
|
||||
CREATE TABLE task_property_data (
|
||||
id INTEGER PRIMARY KEY,
|
||||
task_property INTEGER,
|
||||
kind TEXT,
|
||||
data TEXT,
|
||||
created REAL,
|
||||
FOREIGN KEY(task_property) REFERENCES task_property(id)
|
||||
);
|
||||
"""
|
||||
for statement in script.split(";\n"):
|
||||
statement = statement.strip()
|
||||
if statement:
|
||||
self.db.execute(statement)
|
||||
|
||||
@transaction
|
||||
def create_task(self, workdir: str, mode: str) -> int:
|
||||
return self.db.execute(
|
||||
"""
|
||||
INSERT INTO task (workdir, mode, created)
|
||||
VALUES (:workdir, :mode, :now)
|
||||
""",
|
||||
dict(workdir=workdir, mode=mode, now=time.time()),
|
||||
).lastrowid
|
||||
|
||||
@transaction
|
||||
def create_task_properties(
|
||||
self, properties: Iterable[SbyProperty], *, task_id: Optional[int] = None
|
||||
):
|
||||
if task_id is None:
|
||||
task_id = self.task_id
|
||||
now = time.time()
|
||||
self.db.executemany(
|
||||
"""
|
||||
INSERT INTO task_property (name, src, task, created)
|
||||
VALUES (:name, :src, :task, :now)
|
||||
""",
|
||||
[
|
||||
dict(
|
||||
name=json.dumps(prop.path),
|
||||
src=prop.location or "",
|
||||
task=task_id,
|
||||
now=now,
|
||||
)
|
||||
for prop in properties
|
||||
],
|
||||
)
|
||||
|
||||
@transaction
|
||||
def set_task_status(
|
||||
self,
|
||||
status: Optional[str] = None,
|
||||
data: Any = None,
|
||||
):
|
||||
if status is None:
|
||||
status = property.status
|
||||
|
||||
now = time.time()
|
||||
self.db.execute(
|
||||
"""
|
||||
INSERT INTO task_status (
|
||||
task, status, data, created
|
||||
)
|
||||
VALUES (
|
||||
:task, :status, :data, :now
|
||||
)
|
||||
""",
|
||||
dict(
|
||||
task=self.task_id,
|
||||
status=status,
|
||||
data=json.dumps(data),
|
||||
now=now,
|
||||
),
|
||||
)
|
||||
|
||||
@transaction
|
||||
def set_task_property_status(
|
||||
self,
|
||||
property: SbyProperty,
|
||||
status: Optional[str] = None,
|
||||
data: Any = None,
|
||||
):
|
||||
if status is None:
|
||||
status = property.status
|
||||
|
||||
now = time.time()
|
||||
self.db.execute(
|
||||
"""
|
||||
INSERT INTO task_property_status (
|
||||
task_property, status, data, created
|
||||
)
|
||||
VALUES (
|
||||
(SELECT id FROM task_property WHERE task = :task AND name = :name),
|
||||
:status, :data, :now
|
||||
)
|
||||
""",
|
||||
dict(
|
||||
task=self.task_id,
|
||||
name=json.dumps(property.path),
|
||||
status=status,
|
||||
data=json.dumps(data),
|
||||
now=now,
|
||||
),
|
||||
)
|
||||
|
||||
@transaction
|
||||
def add_task_property_data(self, property: SbyProperty, kind: str, data: Any):
|
||||
now = time.time()
|
||||
self.db.execute(
|
||||
"""
|
||||
INSERT INTO task_property_data (
|
||||
task_property, kind, data, created
|
||||
)
|
||||
VALUES (
|
||||
(SELECT id FROM task_property WHERE task = :task AND name = :name),
|
||||
:kind, :data, :now
|
||||
)
|
||||
""",
|
||||
dict(
|
||||
task=self.task_id,
|
||||
name=json.dumps(property.path),
|
||||
kind=kind,
|
||||
data=json.dumps(data),
|
||||
now=now,
|
||||
),
|
||||
)
|
||||
|
||||
@transaction
|
||||
def all_tasks(self):
|
||||
rows = self.db.execute(
|
||||
"""
|
||||
SELECT id, workdir, created FROM task
|
||||
"""
|
||||
).fetchall()
|
||||
|
||||
return {row["id"]: dict(row) for row in rows}
|
||||
|
||||
@transaction
|
||||
def all_task_properties(self):
|
||||
rows = self.db.execute(
|
||||
"""
|
||||
SELECT id, task, src, name, created FROM task_property
|
||||
"""
|
||||
).fetchall()
|
||||
|
||||
def get_result(row):
|
||||
row = dict(row)
|
||||
row["name"] = tuple(json.loads(row.get("name", "[]")))
|
||||
row["data"] = json.loads(row.get("data", "null"))
|
||||
return row
|
||||
|
||||
return {row["id"]: get_result(row) for row in rows}
|
||||
|
||||
@transaction
|
||||
def all_task_property_statuses(self):
|
||||
rows = self.db.execute(
|
||||
"""
|
||||
SELECT id, task_property, status, data, created
|
||||
FROM task_property_status
|
||||
"""
|
||||
).fetchall()
|
||||
|
||||
def get_result(row):
|
||||
row = dict(row)
|
||||
row["data"] = json.loads(row.get("data", "null"))
|
||||
return row
|
||||
|
||||
return {row["id"]: get_result(row) for row in rows}
|
||||
|
||||
@transaction
|
||||
def all_status_data(self):
|
||||
return (
|
||||
self.all_tasks(),
|
||||
self.all_task_properties(),
|
||||
self.all_task_property_statuses(),
|
||||
)
|
||||
|
||||
@transaction
|
||||
def reset(self):
|
||||
self.db.execute("""DELETE FROM task_property_status""")
|
||||
self.db.execute("""DELETE FROM task_property_data""")
|
||||
self.db.execute("""DELETE FROM task_property""")
|
||||
self.db.execute("""DELETE FROM task_status""")
|
||||
self.db.execute("""DELETE FROM task""")
|
||||
|
||||
def print_status_summary(self):
|
||||
tasks, task_properties, task_property_statuses = self.all_status_data()
|
||||
properties = defaultdict(set)
|
||||
|
||||
uniquify_paths = defaultdict(dict)
|
||||
|
||||
def add_status(task_property, status):
|
||||
|
||||
display_name = task_property["name"]
|
||||
if display_name[-1].startswith("$"):
|
||||
counters = uniquify_paths[task_property["src"]]
|
||||
counter = counters.setdefault(display_name[-1], len(counters) + 1)
|
||||
if task_property["src"]:
|
||||
if counter < 2:
|
||||
path_based = f"<unnamed at {task_property['src']}>"
|
||||
else:
|
||||
path_based = f"<unnamed #{counter} at {task_property['src']}>"
|
||||
else:
|
||||
path_based = f"<unnamed #{counter}>"
|
||||
display_name = (*display_name[:-1], path_based)
|
||||
|
||||
properties[display_name].add(status)
|
||||
|
||||
for task_property in task_properties.values():
|
||||
add_status(task_property, "UNKNOWN")
|
||||
|
||||
for status in task_property_statuses.values():
|
||||
task_property = task_properties[status["task_property"]]
|
||||
add_status(task_property, status["status"])
|
||||
|
||||
for display_name, statuses in sorted(properties.items()):
|
||||
print(pretty_path(display_name), combine_statuses(statuses))
|
||||
|
||||
|
||||
def combine_statuses(statuses):
|
||||
statuses = set(statuses)
|
||||
|
||||
if len(statuses) > 1:
|
||||
statuses.discard("UNKNOWN")
|
||||
|
||||
return ",".join(sorted(statuses))
|
|
@ -12,6 +12,6 @@ def line_ref(dir, filename, pattern):
|
|||
for number, line in enumerate(file, 1):
|
||||
if pattern_re.search(line):
|
||||
# Needs to match source locations for both verilog frontends
|
||||
return fr"{filename}:(?:{number}|\d+.\d+-{number}.\d+)"
|
||||
return fr"{filename}:(?:{number}|\d+\.\d+-{number}\.\d+)"
|
||||
|
||||
raise RuntimeError("%s: pattern `%s` not found" % (filename, pattern))
|
||||
|
|
|
@ -11,21 +11,34 @@ step_5 = line_ref(workdir, src, "step 5")
|
|||
step_7 = line_ref(workdir, src, "step 7")
|
||||
|
||||
log = open(workdir + "/logfile.txt").read()
|
||||
log_per_trace = log.split("Writing trace to Yosys witness file")[:-1]
|
||||
|
||||
if "_abc]" not in log:
|
||||
log_per_trace = log.split("Writing trace to Yosys witness file")[:-1]
|
||||
assert len(log_per_trace) == 4
|
||||
assert re.search(r"Assert failed in test: %s \(.*\)$" % assert_0, log_per_trace[0], re.M)
|
||||
|
||||
for i in range(1, 4):
|
||||
assert re.search(r"Assert failed in test: %s \(.*\) \[failed before\]$" % assert_0, log_per_trace[i], re.M)
|
||||
|
||||
|
||||
assert re.search(r"Assert failed in test: %s \(.*\)$" % step_3_7, log_per_trace[1], re.M)
|
||||
assert re.search(r"Assert failed in test: %s \(.*\)$" % step_5, log_per_trace[2], re.M)
|
||||
assert re.search(r"Assert failed in test: %s \(.*\) \[failed before\]$" % step_3_7, log_per_trace[3], re.M)
|
||||
assert re.search(r"Assert failed in test: %s \(.*\)$" % step_7, log_per_trace[3], re.M)
|
||||
|
||||
pattern = f"Property ASSERT in test at {assert_0} failed. Trace file: engine_0/trace0.(vcd|fst)"
|
||||
assert re.search(pattern, open(f"{workdir}/{workdir}.xml").read())
|
||||
|
||||
log_per_trace = log.split("summary: counterexample trace")[1:]
|
||||
assert len(log_per_trace) == 4
|
||||
|
||||
for i in range(4):
|
||||
assert re.search(r"failed assertion test\..* at %s" % assert_0, log_per_trace[i], re.M)
|
||||
|
||||
assert re.search(r"Assert failed in test: %s \(.*\)$" % assert_0, log_per_trace[0], re.M)
|
||||
step_3_7_traces = [i for i, t in enumerate(log_per_trace) if re.search(r"failed assertion test\..* at %s" % step_3_7, t, re.M)]
|
||||
step_5_traces = [i for i, t in enumerate(log_per_trace) if re.search(r"failed assertion test\..* at %s" % step_5, t, re.M)]
|
||||
step_7_traces = [i for i, t in enumerate(log_per_trace) if re.search(r"failed assertion test\..* at %s" % step_7, t, re.M)]
|
||||
|
||||
for i in range(1, 4):
|
||||
assert re.search(r"Assert failed in test: %s \(.*\) \[failed before\]$" % assert_0, log_per_trace[i], re.M)
|
||||
|
||||
|
||||
assert re.search(r"Assert failed in test: %s \(.*\)$" % step_3_7, log_per_trace[1], re.M)
|
||||
assert re.search(r"Assert failed in test: %s \(.*\)$" % step_5, log_per_trace[2], re.M)
|
||||
assert re.search(r"Assert failed in test: %s \(.*\) \[failed before\]$" % step_3_7, log_per_trace[3], re.M)
|
||||
assert re.search(r"Assert failed in test: %s \(.*\)$" % step_7, log_per_trace[3], re.M)
|
||||
|
||||
pattern = f"Property ASSERT in test at {assert_0} failed. Trace file: engine_0/trace0.(vcd|fst)"
|
||||
assert re.search(pattern, open(f"{workdir}/{workdir}.xml").read())
|
||||
assert len(step_3_7_traces) == 2
|
||||
assert len(step_5_traces) == 1
|
||||
assert len(step_7_traces) == 1
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
[tasks]
|
||||
bmc
|
||||
prove
|
||||
abc : prove
|
||||
|
||||
[options]
|
||||
bmc: mode bmc
|
||||
|
@ -8,7 +9,8 @@ prove: mode prove
|
|||
expect fail
|
||||
|
||||
[engines]
|
||||
smtbmc --keep-going boolector
|
||||
~abc: smtbmc --keep-going boolector
|
||||
abc: abc --keep-going pdr
|
||||
|
||||
[script]
|
||||
read -sv keepgoing_multi_step.sv
|
||||
|
|
|
@ -18,5 +18,6 @@ module test (
|
|||
if (counter == 7) begin
|
||||
assert(a); // step 7
|
||||
end
|
||||
assert(1);
|
||||
end
|
||||
endmodule
|
||||
|
|
|
@ -9,7 +9,11 @@ SAFE_PATH = re.compile(r"^[a-zA-Z0-9_./\\]*$")
|
|||
|
||||
def collect(path):
|
||||
# don't pick up any paths that need escaping nor any sby workdirs
|
||||
if not SAFE_PATH.match(str(path)) or (path / "config.sby").exists():
|
||||
if (
|
||||
not SAFE_PATH.match(str(path))
|
||||
or (path / "config.sby").exists()
|
||||
or (path / "status.sqlite").exists()
|
||||
):
|
||||
return
|
||||
|
||||
checked_dirs.append(path)
|
||||
|
|
Loading…
Reference in a new issue