mirror of
https://github.com/YosysHQ/sby.git
synced 2025-04-06 14:24:08 +00:00
Merge branch 'master' into krys/argparse_docs
This commit is contained in:
commit
ffa53a32c1
|
@ -2,6 +2,8 @@
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
sys.path.append(os.path.abspath(f"{__file__}/../../../sbysrc"))
|
||||||
|
|
||||||
project = 'YosysHQ SBY'
|
project = 'YosysHQ SBY'
|
||||||
author = 'YosysHQ GmbH'
|
author = 'YosysHQ GmbH'
|
||||||
copyright = '2023 YosysHQ GmbH'
|
copyright = '2023 YosysHQ GmbH'
|
||||||
|
@ -43,5 +45,3 @@ html_theme_options = {
|
||||||
|
|
||||||
extensions = ['sphinx.ext.autosectionlabel']
|
extensions = ['sphinx.ext.autosectionlabel']
|
||||||
extensions += ['sphinxarg.ext']
|
extensions += ['sphinxarg.ext']
|
||||||
|
|
||||||
sys.path.append(os.path.abspath(f"{__file__}/../../../sbysrc"))
|
|
||||||
|
|
|
@ -402,7 +402,7 @@ class SbyAutotune:
|
||||||
|
|
||||||
self.build_candidates()
|
self.build_candidates()
|
||||||
if not self.active_candidates:
|
if not self.active_candidates:
|
||||||
self.error("no supported engines found for the current configuration and design")
|
self.task.error("no supported engines found for the current configuration and design")
|
||||||
self.log(f"testing {len(self.active_candidates)} engine configurations...")
|
self.log(f"testing {len(self.active_candidates)} engine configurations...")
|
||||||
|
|
||||||
self.start_engines()
|
self.start_engines()
|
||||||
|
|
|
@ -1154,7 +1154,7 @@ class SbyTask(SbyConfig):
|
||||||
self,
|
self,
|
||||||
model_name,
|
model_name,
|
||||||
self.model("aig"),
|
self.model("aig"),
|
||||||
f"""cd {self.workdir}/model; {self.exe_paths["abc"]} -c 'read_aiger design_aiger.aig; fold; strash; write_aiger design_aiger_fold.aig'""",
|
f"""cd {self.workdir}/model; {self.exe_paths["abc"]} -c 'read_aiger design_aiger.aig; fold{" -s" if self.opt_aigfolds else ""}; strash; write_aiger design_aiger_fold.aig'""",
|
||||||
logfile=open(f"{self.workdir}/model/design_aiger_fold.log", "w")
|
logfile=open(f"{self.workdir}/model/design_aiger_fold.log", "w")
|
||||||
)
|
)
|
||||||
proc.checkretcode = True
|
proc.checkretcode = True
|
||||||
|
@ -1236,6 +1236,7 @@ class SbyTask(SbyConfig):
|
||||||
self.handle_bool_option("fst", False)
|
self.handle_bool_option("fst", False)
|
||||||
|
|
||||||
self.handle_bool_option("witrename", True)
|
self.handle_bool_option("witrename", True)
|
||||||
|
self.handle_bool_option("aigfolds", False)
|
||||||
self.handle_bool_option("aigvmap", False)
|
self.handle_bool_option("aigvmap", False)
|
||||||
self.handle_bool_option("aigsyms", False)
|
self.handle_bool_option("aigsyms", False)
|
||||||
|
|
||||||
|
|
|
@ -42,7 +42,7 @@ def run(mode, task, engine_idx, engine):
|
||||||
elif abc_command[0] == "pdr":
|
elif abc_command[0] == "pdr":
|
||||||
if mode != "prove":
|
if mode != "prove":
|
||||||
task.error("ABC command 'pdr' is only valid in prove mode.")
|
task.error("ABC command 'pdr' is only valid in prove mode.")
|
||||||
abc_command[0] += f" -v"
|
abc_command[0] += f" -v -I engine_{engine_idx}/invariants.pla"
|
||||||
|
|
||||||
else:
|
else:
|
||||||
task.error(f"Invalid ABC command {abc_command[0]}.")
|
task.error(f"Invalid ABC command {abc_command[0]}.")
|
||||||
|
@ -66,7 +66,9 @@ def run(mode, task, engine_idx, engine):
|
||||||
task,
|
task,
|
||||||
f"engine_{engine_idx}",
|
f"engine_{engine_idx}",
|
||||||
task.model("aig"),
|
task.model("aig"),
|
||||||
f"""cd {task.workdir}; {task.exe_paths["abc"]} -c 'read_aiger model/design_aiger.aig; fold; strash; {" ".join(abc_command)}; write_cex -a engine_{engine_idx}/trace.aiw'""",
|
f"""cd {task.workdir}; {task.exe_paths["abc"]} -c 'read_aiger model/design_aiger.aig; fold{
|
||||||
|
" -s" if task.opt_aigfolds or (abc_command[0].startswith("pdr ") and "-d" in abc_command[1:]) else ""
|
||||||
|
}; strash; {" ".join(abc_command)}; write_cex -a engine_{engine_idx}/trace.aiw'""",
|
||||||
logfile=open(f"{task.workdir}/engine_{engine_idx}/logfile.txt", "w")
|
logfile=open(f"{task.workdir}/engine_{engine_idx}/logfile.txt", "w")
|
||||||
)
|
)
|
||||||
proc.checkretcode = True
|
proc.checkretcode = True
|
||||||
|
|
|
@ -69,8 +69,6 @@ def run(mode, task, engine_idx, engine):
|
||||||
task.error("smtbmc options --basecase and --induction are exclusive.")
|
task.error("smtbmc options --basecase and --induction are exclusive.")
|
||||||
induction_only = True
|
induction_only = True
|
||||||
elif o == "--keep-going":
|
elif o == "--keep-going":
|
||||||
if mode not in ("bmc", "prove", "prove_basecase", "prove_induction"):
|
|
||||||
task.error("smtbmc option --keep-going is only supported in bmc and prove mode.")
|
|
||||||
keep_going = True
|
keep_going = True
|
||||||
elif o == "--seed":
|
elif o == "--seed":
|
||||||
random_seed = a
|
random_seed = a
|
||||||
|
@ -134,7 +132,8 @@ def run(mode, task, engine_idx, engine):
|
||||||
|
|
||||||
if keep_going and mode != "prove_induction":
|
if keep_going and mode != "prove_induction":
|
||||||
smtbmc_opts.append("--keep-going")
|
smtbmc_opts.append("--keep-going")
|
||||||
trace_prefix += "%"
|
if mode != "cover":
|
||||||
|
trace_prefix += "%"
|
||||||
|
|
||||||
if dumpsmt2:
|
if dumpsmt2:
|
||||||
smtbmc_opts += ["--dump-smt2", trace_prefix.replace("%", "") + ".smt2"]
|
smtbmc_opts += ["--dump-smt2", trace_prefix.replace("%", "") + ".smt2"]
|
||||||
|
|
|
@ -80,7 +80,16 @@ def process_jobserver_environment():
|
||||||
|
|
||||||
def jobserver_helper(jobserver_read_fd, jobserver_write_fd, request_fd, response_fd):
|
def jobserver_helper(jobserver_read_fd, jobserver_write_fd, request_fd, response_fd):
|
||||||
"""Helper process to handle blocking jobserver pipes."""
|
"""Helper process to handle blocking jobserver pipes."""
|
||||||
|
def handle_sigusr1(*args):
|
||||||
|
# Since Python doesn't allow user code to handle EINTR anymore, we replace the
|
||||||
|
# jobserver fd with an fd at EOF to interrupt a blocking read in a way that
|
||||||
|
# cannot lose any read data
|
||||||
|
r, w = os.pipe()
|
||||||
|
os.close(w)
|
||||||
|
os.dup2(r, jobserver_read_fd)
|
||||||
|
os.close(r)
|
||||||
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
||||||
|
signal.signal(signal.SIGUSR1, handle_sigusr1)
|
||||||
pending = 0
|
pending = 0
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
|
@ -110,6 +119,8 @@ def jobserver_helper(jobserver_read_fd, jobserver_write_fd, request_fd, response
|
||||||
except BlockingIOError:
|
except BlockingIOError:
|
||||||
select.select([jobserver_read_fd], [], [])
|
select.select([jobserver_read_fd], [], [])
|
||||||
continue
|
continue
|
||||||
|
if not token:
|
||||||
|
break
|
||||||
|
|
||||||
pending -= 1
|
pending -= 1
|
||||||
|
|
||||||
|
@ -240,6 +251,10 @@ class SbyJobClient:
|
||||||
# Closing the request pipe singals the helper that we want to exit
|
# Closing the request pipe singals the helper that we want to exit
|
||||||
os.close(self.request_write_fd)
|
os.close(self.request_write_fd)
|
||||||
|
|
||||||
|
# Additionally we send a signal to interrupt a blocking read within the
|
||||||
|
# helper
|
||||||
|
self.helper_process.send_signal(signal.SIGUSR1)
|
||||||
|
|
||||||
# The helper might have been in the process of sending us some tokens, which
|
# The helper might have been in the process of sending us some tokens, which
|
||||||
# we still need to return
|
# we still need to return
|
||||||
while True:
|
while True:
|
||||||
|
|
Loading…
Reference in a new issue