mirror of
https://github.com/YosysHQ/sby.git
synced 2025-08-25 22:26:07 +00:00
Merge branch 'main' into krys/fix_status_trace
This commit is contained in:
commit
5fc7b93627
9 changed files with 156 additions and 101 deletions
|
@ -64,15 +64,15 @@ status_show = args.status
|
|||
status_reset = args.status_reset
|
||||
status_cancels = args.status_cancels
|
||||
task_status = args.task_status
|
||||
status_live_csv = args.livecsv
|
||||
status_show_csv = args.statuscsv
|
||||
status_live_formats = args.live_formats
|
||||
status_format = args.status_format
|
||||
status_latest = args.status_latest
|
||||
|
||||
if autotune and linkmode:
|
||||
print("ERROR: --link flag currently not available with --autotune")
|
||||
sys.exit(1)
|
||||
|
||||
if status_show or status_reset or task_status or status_show_csv:
|
||||
if status_show or status_reset or task_status or status_format:
|
||||
target = workdir_prefix or workdir or sbyfile
|
||||
if target is None:
|
||||
print("ERROR: Specify a .sby config file or working directory to use --status.")
|
||||
|
@ -104,16 +104,16 @@ if status_show or status_reset or task_status or status_show_csv:
|
|||
if status_show:
|
||||
status_db.print_status_summary(status_latest)
|
||||
|
||||
if status_show_csv:
|
||||
status_db.print_status_summary_csv(tasknames, status_latest)
|
||||
if status_format:
|
||||
status_db.print_status_summary_fmt(tasknames, status_format, status_latest)
|
||||
|
||||
if task_status:
|
||||
status_db.print_task_summary()
|
||||
|
||||
status_db.db.close()
|
||||
|
||||
if status_live_csv:
|
||||
print(f"WARNING: --livecsv flag found but not used.")
|
||||
if status_live_formats:
|
||||
print(f"WARNING: --live option found but not used.")
|
||||
|
||||
sys.exit(0)
|
||||
elif status_latest:
|
||||
|
@ -496,7 +496,7 @@ def start_task(taskloop, taskname):
|
|||
else:
|
||||
junit_filename = "junit"
|
||||
|
||||
task = SbyTask(sbyconfig, my_workdir, early_logmsgs, reusedir, status_cancels, taskloop, name=taskname, live_csv=status_live_csv)
|
||||
task = SbyTask(sbyconfig, my_workdir, early_logmsgs, reusedir, status_cancels, taskloop, name=taskname, live_formats=status_live_formats)
|
||||
|
||||
for k, v in exe_paths.items():
|
||||
task.exe_paths[k] = v
|
||||
|
|
|
@ -29,8 +29,8 @@ def parser_func(release_version='unknown SBY version'):
|
|||
help="maximum number of processes to run in parallel")
|
||||
parser.add_argument("--sequential", action="store_true", dest="sequential",
|
||||
help="run tasks in sequence, not in parallel")
|
||||
parser.add_argument("--livecsv", action="store_true", dest="livecsv",
|
||||
help="print live updates of property statuses during task execution in csv format")
|
||||
parser.add_argument("--live", action="append", choices=["csv", "jsonl"], dest="live_formats",
|
||||
help="print live updates of property statuses during task execution, may be specified multiple times")
|
||||
|
||||
parser.add_argument("--autotune", action="store_true", dest="autotune",
|
||||
help="automatically find a well performing engine and engine configuration for each task")
|
||||
|
@ -77,8 +77,8 @@ def parser_func(release_version='unknown SBY version'):
|
|||
|
||||
parser.add_argument("--status", action="store_true", dest="status",
|
||||
help="summarize the contents of the status database")
|
||||
parser.add_argument("--statuscsv", action="store_true", dest="statuscsv",
|
||||
help="print the most recent status for each property in csv format")
|
||||
parser.add_argument("--statusfmt", action="store", default="", choices=["csv", "jsonl"], dest="status_format",
|
||||
help="print the most recent status for each property in specified format")
|
||||
parser.add_argument("--latest", action="store_true", dest="status_latest",
|
||||
help="only check statuses from the most recent run of a task")
|
||||
parser.add_argument("--statusreset", action="store_true", dest="status_reset",
|
||||
|
|
|
@ -45,12 +45,8 @@ signal.signal(signal.SIGINT, force_shutdown)
|
|||
signal.signal(signal.SIGTERM, force_shutdown)
|
||||
|
||||
def process_filename(filename):
|
||||
if filename.startswith("~/"):
|
||||
filename = os.environ['HOME'] + filename[1:]
|
||||
|
||||
filename = os.path.expandvars(filename)
|
||||
|
||||
return filename
|
||||
return Path(filename).expanduser()
|
||||
|
||||
def dress_message(workdir, logmessage):
|
||||
tm = localtime()
|
||||
|
@ -579,7 +575,7 @@ class SbyConfig:
|
|||
self.error(f"sby file syntax error: '[files]' section entry expects up to 2 arguments, {len(entries)} specified")
|
||||
|
||||
if len(entries) == 1:
|
||||
self.files[os.path.basename(entries[0])] = entries[0]
|
||||
self.files[Path(entries[0]).name] = entries[0]
|
||||
elif len(entries) == 2:
|
||||
self.files[entries[0]] = entries[1]
|
||||
|
||||
|
@ -913,7 +909,7 @@ class SbySummary:
|
|||
|
||||
|
||||
class SbyTask(SbyConfig):
|
||||
def __init__(self, sbyconfig, workdir, early_logs, reusedir, status_cancels=False, taskloop=None, logfile=None, name=None, live_csv=False):
|
||||
def __init__(self, sbyconfig, workdir, early_logs, reusedir, status_cancels=False, taskloop=None, logfile=None, name=None, live_formats=[]):
|
||||
super().__init__()
|
||||
self.used_options = set()
|
||||
self.models = dict()
|
||||
|
@ -921,7 +917,7 @@ class SbyTask(SbyConfig):
|
|||
self.reusedir = reusedir
|
||||
self.status_cancels = status_cancels
|
||||
self.name = name
|
||||
self.live_csv = live_csv
|
||||
self.live_formats = live_formats
|
||||
self.status = "UNKNOWN"
|
||||
self.total_time = 0
|
||||
self.expect = list()
|
||||
|
@ -1033,42 +1029,44 @@ class SbyTask(SbyConfig):
|
|||
raise SbyAbort(logmessage)
|
||||
|
||||
def makedirs(self, path):
|
||||
if self.reusedir and os.path.isdir(path):
|
||||
path = Path(path)
|
||||
if self.reusedir and path.is_dir():
|
||||
rmtree(path, ignore_errors=True)
|
||||
if not os.path.isdir(path):
|
||||
os.makedirs(path)
|
||||
path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def copy_src(self, linkmode=False):
|
||||
self.makedirs(self.workdir + "/src")
|
||||
outdir = Path(self.workdir) / "src"
|
||||
self.makedirs(outdir)
|
||||
|
||||
for dstfile, lines in self.verbatim_files.items():
|
||||
dstfile = self.workdir + "/src/" + dstfile
|
||||
self.log(f"Writing '{dstfile}'.")
|
||||
dstfile = outdir / dstfile
|
||||
self.log(f"Writing '{dstfile.absolute()}'.")
|
||||
dstfile.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with open(dstfile, "w") as f:
|
||||
for line in lines:
|
||||
f.write(line)
|
||||
|
||||
for dstfile, srcfile in self.files.items():
|
||||
if dstfile.startswith("/") or dstfile.startswith("../") or ("/../" in dstfile):
|
||||
dstfile = Path(dstfile)
|
||||
if dstfile.is_absolute() or ".." in dstfile.parts:
|
||||
self.error(f"destination filename must be a relative path without /../: {dstfile}")
|
||||
dstfile = self.workdir + "/src/" + dstfile
|
||||
dstfile = outdir / dstfile
|
||||
|
||||
srcfile = process_filename(srcfile)
|
||||
|
||||
basedir = os.path.dirname(dstfile)
|
||||
if basedir != "" and not os.path.exists(basedir):
|
||||
os.makedirs(basedir)
|
||||
basedir = dstfile.parent
|
||||
basedir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if linkmode:
|
||||
verb = "Link"
|
||||
else:
|
||||
verb = "Copy"
|
||||
self.log(f"{verb} '{os.path.abspath(srcfile)}' to '{os.path.abspath(dstfile)}'.")
|
||||
self.log(f"{verb} '{srcfile.absolute()}' to '{dstfile.absolute()}'.")
|
||||
|
||||
if linkmode:
|
||||
os.symlink(os.path.relpath(srcfile, basedir), dstfile)
|
||||
elif os.path.isdir(srcfile):
|
||||
os.symlink(srcfile.resolve(), dstfile)
|
||||
elif srcfile.is_dir():
|
||||
copytree(srcfile, dstfile, dirs_exist_ok=True)
|
||||
else:
|
||||
copyfile(srcfile, dstfile)
|
||||
|
@ -1097,12 +1095,12 @@ class SbyTask(SbyConfig):
|
|||
self.__dict__["opt_" + option_name] = default_value
|
||||
|
||||
def make_model(self, model_name):
|
||||
if not os.path.isdir(f"{self.workdir}/model"):
|
||||
os.makedirs(f"{self.workdir}/model")
|
||||
modeldir = Path(self.workdir) / "model"
|
||||
modeldir.mkdir(exist_ok=True)
|
||||
|
||||
if model_name == "prep":
|
||||
with open(f"""{self.workdir}/model/design_prep.ys""", "w") as f:
|
||||
print(f"# running in {self.workdir}/model/", file=f)
|
||||
with open(modeldir / "design_prep.ys", "w") as f:
|
||||
print(f"# running in {modeldir}/", file=f)
|
||||
print(f"""read_rtlil design.il""", file=f)
|
||||
if not self.opt_skip_prep:
|
||||
print("scc -select; simplemap; select -clear", file=f)
|
||||
|
@ -1146,7 +1144,7 @@ class SbyTask(SbyConfig):
|
|||
return [proc]
|
||||
|
||||
if model_name == "base":
|
||||
with open(f"""{self.workdir}/model/design.ys""", "w") as f:
|
||||
with open(modeldir / "design.ys", "w") as f:
|
||||
print(f"# running in {self.workdir}/src/", file=f)
|
||||
for cmd in self.script:
|
||||
print(cmd, file=f)
|
||||
|
@ -1168,7 +1166,7 @@ class SbyTask(SbyConfig):
|
|||
|
||||
def instance_hierarchy_callback(retcode):
|
||||
if self.design == None:
|
||||
with open(f"{self.workdir}/model/design.json") as f:
|
||||
with open(modeldir / "design.json") as f:
|
||||
self.design = design_hierarchy(f)
|
||||
self.status_db.create_task_properties([
|
||||
prop for prop in self.design.properties_by_path.values()
|
||||
|
@ -1184,8 +1182,8 @@ class SbyTask(SbyConfig):
|
|||
return [proc]
|
||||
|
||||
if re.match(r"^smt2(_syn)?(_nomem)?(_stbv|_stdt)?$", model_name):
|
||||
with open(f"{self.workdir}/model/design_{model_name}.ys", "w") as f:
|
||||
print(f"# running in {self.workdir}/model/", file=f)
|
||||
with open(modeldir / f"design_{model_name}.ys", "w") as f:
|
||||
print(f"# running in {modeldir}/", file=f)
|
||||
print(f"""read_rtlil design_prep.il""", file=f)
|
||||
print("hierarchy -smtcheck", file=f)
|
||||
print("delete */t:$print", file=f)
|
||||
|
@ -1218,8 +1216,8 @@ class SbyTask(SbyConfig):
|
|||
return [proc]
|
||||
|
||||
if re.match(r"^btor(_syn)?(_nomem)?$", model_name):
|
||||
with open(f"{self.workdir}/model/design_{model_name}.ys", "w") as f:
|
||||
print(f"# running in {self.workdir}/model/", file=f)
|
||||
with open(modeldir / f"design_{model_name}.ys", "w") as f:
|
||||
print(f"# running in {modeldir}/", file=f)
|
||||
print(f"""read_rtlil design_prep.il""", file=f)
|
||||
print("hierarchy -simcheck", file=f)
|
||||
print("delete */t:$print", file=f)
|
||||
|
@ -1254,8 +1252,8 @@ class SbyTask(SbyConfig):
|
|||
return [proc]
|
||||
|
||||
if model_name == "aig":
|
||||
with open(f"{self.workdir}/model/design_aiger.ys", "w") as f:
|
||||
print(f"# running in {self.workdir}/model/", file=f)
|
||||
with open(modeldir / "design_aiger.ys", "w") as f:
|
||||
print(f"# running in {modeldir}/", file=f)
|
||||
print("read_rtlil design_prep.il", file=f)
|
||||
print("delete */t:$print", file=f)
|
||||
print("hierarchy -simcheck", file=f)
|
||||
|
@ -1281,7 +1279,7 @@ class SbyTask(SbyConfig):
|
|||
self,
|
||||
"aig",
|
||||
self.model("prep"),
|
||||
f"""cd {self.workdir}/model; {self.exe_paths["yosys"]} -ql design_aiger.log design_aiger.ys"""
|
||||
f"""cd {modeldir}; {self.exe_paths["yosys"]} -ql design_aiger.log design_aiger.ys"""
|
||||
)
|
||||
proc.checkretcode = True
|
||||
|
||||
|
@ -1292,8 +1290,8 @@ class SbyTask(SbyConfig):
|
|||
self,
|
||||
model_name,
|
||||
self.model("aig"),
|
||||
f"""cd {self.workdir}/model; {self.exe_paths["abc"]} -c 'read_aiger design_aiger.aig; fold{" -s" if self.opt_aigfolds else ""}; strash; write_aiger design_aiger_fold.aig'""",
|
||||
logfile=open(f"{self.workdir}/model/design_aiger_fold.log", "w")
|
||||
f"""cd {modeldir}; {self.exe_paths["abc"]} -c 'read_aiger design_aiger.aig; fold{" -s" if self.opt_aigfolds else ""}; strash; write_aiger design_aiger_fold.aig'""",
|
||||
logfile=open(f"{modeldir}/design_aiger_fold.log", "w")
|
||||
)
|
||||
proc.checkretcode = True
|
||||
|
||||
|
@ -1430,7 +1428,7 @@ class SbyTask(SbyConfig):
|
|||
except FileNotFoundError:
|
||||
status_path = f"{self.workdir}/status.sqlite"
|
||||
|
||||
self.status_db = SbyStatusDb(status_path, self, live_csv=self.live_csv)
|
||||
self.status_db = SbyStatusDb(status_path, self, live_formats=self.live_formats)
|
||||
|
||||
def setup_procs(self, setupmode, linkmode=False):
|
||||
self.handle_non_engine_options()
|
||||
|
|
|
@ -106,10 +106,10 @@ class FileInUseError(Exception):
|
|||
|
||||
|
||||
class SbyStatusDb:
|
||||
def __init__(self, path: Path, task, timeout: float = 5.0, live_csv = False):
|
||||
def __init__(self, path: Path, task, timeout: float = 5.0, live_formats = []):
|
||||
self.debug = False
|
||||
self.task = task
|
||||
self.live_csv = live_csv
|
||||
self.live_formats = live_formats
|
||||
|
||||
self.con = sqlite3.connect(path, isolation_level=None, timeout=timeout)
|
||||
self.db = self.con.cursor()
|
||||
|
@ -250,10 +250,11 @@ class SbyStatusDb:
|
|||
),
|
||||
)
|
||||
|
||||
if self.live_csv:
|
||||
if self.live_formats:
|
||||
row = self.get_status_data_joined(self.db.lastrowid)
|
||||
csvline = format_status_data_csvline(row)
|
||||
self.task.log(f"{click.style('csv', fg='yellow')}: {csvline}")
|
||||
for fmt in self.live_formats:
|
||||
fmtline = format_status_data_fmtline(row, fmt)
|
||||
self.task.log(f"{click.style(fmt, fg='yellow')}: {fmtline}")
|
||||
|
||||
@transaction
|
||||
def add_task_trace(
|
||||
|
@ -440,14 +441,15 @@ class SbyStatusDb:
|
|||
|
||||
return {row["id"]: parse_status_data_row(row) for row in rows}
|
||||
|
||||
def print_status_summary_csv(self, tasknames: list[str], latest: bool):
|
||||
def print_status_summary_fmt(self, tasknames: list[str], status_format: str, latest: bool):
|
||||
# get all statuses
|
||||
all_properties = self.all_status_data_joined()
|
||||
latest_task_ids = filter_latest_task_ids(self.all_tasks())
|
||||
|
||||
# print csv header
|
||||
csvheader = format_status_data_csvline(None)
|
||||
print(csvheader)
|
||||
# print header
|
||||
header = format_status_data_fmtline(None, status_format)
|
||||
if header:
|
||||
print(header)
|
||||
|
||||
# find summary for each task/property combo
|
||||
prop_map: dict[(str, str, str), dict[str, (int, int)]] = {}
|
||||
|
@ -497,9 +499,8 @@ class SbyStatusDb:
|
|||
del prop["UNKNOWN"]
|
||||
|
||||
for _, row, _ in prop.values():
|
||||
csvline = format_status_data_csvline(all_properties[row])
|
||||
print(csvline)
|
||||
|
||||
line = format_status_data_fmtline(all_properties[row], status_format)
|
||||
print(line)
|
||||
|
||||
def combine_statuses(statuses):
|
||||
statuses = set(statuses)
|
||||
|
@ -515,9 +516,7 @@ def parse_status_data_row(raw: sqlite3.Row):
|
|||
row_dict["data"] = json.loads(row_dict.get("data") or "{}")
|
||||
return row_dict
|
||||
|
||||
def format_status_data_csvline(row: dict|None) -> str:
|
||||
if row is None:
|
||||
csv_header = [
|
||||
fmtline_columns = [
|
||||
"time",
|
||||
"task_name",
|
||||
"mode",
|
||||
|
@ -529,33 +528,52 @@ def format_status_data_csvline(row: dict|None) -> str:
|
|||
"trace",
|
||||
"depth",
|
||||
]
|
||||
return ','.join(csv_header)
|
||||
|
||||
def format_status_data_fmtline(row: dict|None, fmt: str = "csv") -> str:
|
||||
if row is None:
|
||||
data = None
|
||||
else:
|
||||
engine = row['data'].get('engine', row['data'].get('source'))
|
||||
try:
|
||||
time = row['status_created'] - row['created']
|
||||
except TypeError:
|
||||
time = 0
|
||||
name = row['hdlname']
|
||||
depth = row['data'].get('step')
|
||||
try:
|
||||
trace_path = Path(row['workdir']) / row['path']
|
||||
except TypeError:
|
||||
trace_path = None
|
||||
|
||||
csv_line = [
|
||||
round(time, 2),
|
||||
row['task_name'],
|
||||
row['mode'],
|
||||
engine,
|
||||
name or pretty_path(row['name']),
|
||||
row['location'],
|
||||
row['kind'],
|
||||
row['status'] or "UNKNOWN",
|
||||
trace_path,
|
||||
depth,
|
||||
]
|
||||
return ','.join("" if v is None else str(v) for v in csv_line)
|
||||
data = {
|
||||
"task_name": row['task_name'],
|
||||
"mode": row['mode'],
|
||||
"engine": engine,
|
||||
"name": name or pretty_path(row['name']),
|
||||
"location": row['location'],
|
||||
"kind": row['kind'],
|
||||
"status": row['status'] or "UNKNOWN",
|
||||
"depth": depth,
|
||||
}
|
||||
try:
|
||||
data["trace"] = str(Path(row['workdir']) / row['path'])
|
||||
except TypeError:
|
||||
pass
|
||||
try:
|
||||
data['time'] = round(row['status_created'] - row['created'], 2)
|
||||
except TypeError:
|
||||
pass
|
||||
if fmt == "csv":
|
||||
if data is None:
|
||||
csv_line = fmtline_columns
|
||||
else:
|
||||
csv_line = [data.get(column) for column in fmtline_columns]
|
||||
def csv_field(value):
|
||||
if value is None:
|
||||
return ""
|
||||
value = str(value).replace('"', '""')
|
||||
if any(c in value for c in '",\n'):
|
||||
value = f'"{value}"'
|
||||
return value
|
||||
return ','.join(map(csv_field, csv_line))
|
||||
elif fmt == "jsonl":
|
||||
if data is None:
|
||||
return ""
|
||||
# field order
|
||||
data = {column: data[column] for column in fmtline_columns if data.get(column)}
|
||||
return json.dumps(data)
|
||||
|
||||
def filter_latest_task_ids(all_tasks: dict[int, dict[str]]):
|
||||
latest: dict[str, int] = {}
|
||||
|
|
20
tests/links/more_dirs.sby
Normal file
20
tests/links/more_dirs.sby
Normal file
|
@ -0,0 +1,20 @@
|
|||
[tasks]
|
||||
link
|
||||
copy
|
||||
|
||||
[options]
|
||||
mode prep
|
||||
|
||||
[engines]
|
||||
btor btormc
|
||||
|
||||
[script]
|
||||
read -noverific
|
||||
script dir/script.ys
|
||||
|
||||
[files]
|
||||
here/dir ${WORKDIR}/../dir
|
||||
a/b/c.v prv32fmcmp.v
|
||||
|
||||
[file here/doc]
|
||||
log foo
|
10
tests/links/more_dirs.sh
Normal file
10
tests/links/more_dirs.sh
Normal file
|
@ -0,0 +1,10 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
if [[ $TASK == link ]]; then
|
||||
flags="--setup --link"
|
||||
else
|
||||
flags="--setup"
|
||||
fi
|
||||
python3 $SBY_MAIN -f $SBY_FILE $TASK $flags
|
||||
|
||||
test -e ${WORKDIR}/src/here/dir -a -e ${WORKDIR}/src/a/b/c.v -a -e ${WORKDIR}/src/here/doc
|
|
@ -1,10 +1,10 @@
|
|||
import os
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
def main():
|
||||
workdir, task = sys.argv[1:]
|
||||
src = Path(workdir) / "src"
|
||||
count = 0
|
||||
for srcfile in src.iterdir():
|
||||
if srcfile.name == "heredoc":
|
||||
assert(not srcfile.is_symlink())
|
||||
|
@ -13,6 +13,11 @@ def main():
|
|||
assert(local_contents.strip() == 'log foo')
|
||||
else:
|
||||
assert(srcfile.is_symlink() == (task == "link"))
|
||||
assert(srcfile.name != "script.ys")
|
||||
count += 1
|
||||
assert(count == 4)
|
||||
script_ys = src / "dir" / "script.ys"
|
||||
assert(script_ys.exists())
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
[tasks]
|
||||
link
|
||||
copy
|
||||
dir_implicit: dir
|
||||
dir_explicit: dir
|
||||
|
||||
[options]
|
||||
mode prep
|
||||
|
@ -15,7 +17,9 @@ script dir/script.ys
|
|||
[files]
|
||||
../../docs/examples/demos/picorv32.v
|
||||
prv32fmcmp.v
|
||||
dir
|
||||
~dir: dir
|
||||
dir_implicit: dir/
|
||||
dir_explicit: dir/ dir/
|
||||
|
||||
[file heredoc]
|
||||
log foo
|
||||
|
|
|
@ -3,7 +3,7 @@ set -e
|
|||
python3 $SBY_MAIN -f $SBY_FILE $TASK
|
||||
|
||||
STATUS_CSV=${WORKDIR}/status.csv
|
||||
python3 $SBY_MAIN -f $SBY_FILE $TASK --statuscsv --latest | tee $STATUS_CSV
|
||||
python3 $SBY_MAIN -f $SBY_FILE $TASK --statusfmt csv --latest | tee $STATUS_CSV
|
||||
|
||||
if [[ $TASK =~ "_cover" ]]; then
|
||||
wc -l $STATUS_CSV | grep -q '6'
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue