3
0
Fork 0
mirror of https://github.com/YosysHQ/sby.git synced 2025-08-26 06:36:02 +00:00

Merge branch 'main' into krys/fix_status_trace

This commit is contained in:
KrystalDelusion 2025-08-05 12:55:12 +12:00 committed by GitHub
commit 5fc7b93627
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
9 changed files with 156 additions and 101 deletions

View file

@ -64,15 +64,15 @@ status_show = args.status
status_reset = args.status_reset status_reset = args.status_reset
status_cancels = args.status_cancels status_cancels = args.status_cancels
task_status = args.task_status task_status = args.task_status
status_live_csv = args.livecsv status_live_formats = args.live_formats
status_show_csv = args.statuscsv status_format = args.status_format
status_latest = args.status_latest status_latest = args.status_latest
if autotune and linkmode: if autotune and linkmode:
print("ERROR: --link flag currently not available with --autotune") print("ERROR: --link flag currently not available with --autotune")
sys.exit(1) sys.exit(1)
if status_show or status_reset or task_status or status_show_csv: if status_show or status_reset or task_status or status_format:
target = workdir_prefix or workdir or sbyfile target = workdir_prefix or workdir or sbyfile
if target is None: if target is None:
print("ERROR: Specify a .sby config file or working directory to use --status.") print("ERROR: Specify a .sby config file or working directory to use --status.")
@ -104,16 +104,16 @@ if status_show or status_reset or task_status or status_show_csv:
if status_show: if status_show:
status_db.print_status_summary(status_latest) status_db.print_status_summary(status_latest)
if status_show_csv: if status_format:
status_db.print_status_summary_csv(tasknames, status_latest) status_db.print_status_summary_fmt(tasknames, status_format, status_latest)
if task_status: if task_status:
status_db.print_task_summary() status_db.print_task_summary()
status_db.db.close() status_db.db.close()
if status_live_csv: if status_live_formats:
print(f"WARNING: --livecsv flag found but not used.") print(f"WARNING: --live option found but not used.")
sys.exit(0) sys.exit(0)
elif status_latest: elif status_latest:
@ -496,7 +496,7 @@ def start_task(taskloop, taskname):
else: else:
junit_filename = "junit" junit_filename = "junit"
task = SbyTask(sbyconfig, my_workdir, early_logmsgs, reusedir, status_cancels, taskloop, name=taskname, live_csv=status_live_csv) task = SbyTask(sbyconfig, my_workdir, early_logmsgs, reusedir, status_cancels, taskloop, name=taskname, live_formats=status_live_formats)
for k, v in exe_paths.items(): for k, v in exe_paths.items():
task.exe_paths[k] = v task.exe_paths[k] = v

View file

@ -29,8 +29,8 @@ def parser_func(release_version='unknown SBY version'):
help="maximum number of processes to run in parallel") help="maximum number of processes to run in parallel")
parser.add_argument("--sequential", action="store_true", dest="sequential", parser.add_argument("--sequential", action="store_true", dest="sequential",
help="run tasks in sequence, not in parallel") help="run tasks in sequence, not in parallel")
parser.add_argument("--livecsv", action="store_true", dest="livecsv", parser.add_argument("--live", action="append", choices=["csv", "jsonl"], dest="live_formats",
help="print live updates of property statuses during task execution in csv format") help="print live updates of property statuses during task execution, may be specified multiple times")
parser.add_argument("--autotune", action="store_true", dest="autotune", parser.add_argument("--autotune", action="store_true", dest="autotune",
help="automatically find a well performing engine and engine configuration for each task") help="automatically find a well performing engine and engine configuration for each task")
@ -77,8 +77,8 @@ def parser_func(release_version='unknown SBY version'):
parser.add_argument("--status", action="store_true", dest="status", parser.add_argument("--status", action="store_true", dest="status",
help="summarize the contents of the status database") help="summarize the contents of the status database")
parser.add_argument("--statuscsv", action="store_true", dest="statuscsv", parser.add_argument("--statusfmt", action="store", default="", choices=["csv", "jsonl"], dest="status_format",
help="print the most recent status for each property in csv format") help="print the most recent status for each property in specified format")
parser.add_argument("--latest", action="store_true", dest="status_latest", parser.add_argument("--latest", action="store_true", dest="status_latest",
help="only check statuses from the most recent run of a task") help="only check statuses from the most recent run of a task")
parser.add_argument("--statusreset", action="store_true", dest="status_reset", parser.add_argument("--statusreset", action="store_true", dest="status_reset",

View file

@ -45,12 +45,8 @@ signal.signal(signal.SIGINT, force_shutdown)
signal.signal(signal.SIGTERM, force_shutdown) signal.signal(signal.SIGTERM, force_shutdown)
def process_filename(filename): def process_filename(filename):
if filename.startswith("~/"):
filename = os.environ['HOME'] + filename[1:]
filename = os.path.expandvars(filename) filename = os.path.expandvars(filename)
return Path(filename).expanduser()
return filename
def dress_message(workdir, logmessage): def dress_message(workdir, logmessage):
tm = localtime() tm = localtime()
@ -579,7 +575,7 @@ class SbyConfig:
self.error(f"sby file syntax error: '[files]' section entry expects up to 2 arguments, {len(entries)} specified") self.error(f"sby file syntax error: '[files]' section entry expects up to 2 arguments, {len(entries)} specified")
if len(entries) == 1: if len(entries) == 1:
self.files[os.path.basename(entries[0])] = entries[0] self.files[Path(entries[0]).name] = entries[0]
elif len(entries) == 2: elif len(entries) == 2:
self.files[entries[0]] = entries[1] self.files[entries[0]] = entries[1]
@ -913,7 +909,7 @@ class SbySummary:
class SbyTask(SbyConfig): class SbyTask(SbyConfig):
def __init__(self, sbyconfig, workdir, early_logs, reusedir, status_cancels=False, taskloop=None, logfile=None, name=None, live_csv=False): def __init__(self, sbyconfig, workdir, early_logs, reusedir, status_cancels=False, taskloop=None, logfile=None, name=None, live_formats=[]):
super().__init__() super().__init__()
self.used_options = set() self.used_options = set()
self.models = dict() self.models = dict()
@ -921,7 +917,7 @@ class SbyTask(SbyConfig):
self.reusedir = reusedir self.reusedir = reusedir
self.status_cancels = status_cancels self.status_cancels = status_cancels
self.name = name self.name = name
self.live_csv = live_csv self.live_formats = live_formats
self.status = "UNKNOWN" self.status = "UNKNOWN"
self.total_time = 0 self.total_time = 0
self.expect = list() self.expect = list()
@ -1033,42 +1029,44 @@ class SbyTask(SbyConfig):
raise SbyAbort(logmessage) raise SbyAbort(logmessage)
def makedirs(self, path): def makedirs(self, path):
if self.reusedir and os.path.isdir(path): path = Path(path)
if self.reusedir and path.is_dir():
rmtree(path, ignore_errors=True) rmtree(path, ignore_errors=True)
if not os.path.isdir(path): path.mkdir(parents=True, exist_ok=True)
os.makedirs(path)
def copy_src(self, linkmode=False): def copy_src(self, linkmode=False):
self.makedirs(self.workdir + "/src") outdir = Path(self.workdir) / "src"
self.makedirs(outdir)
for dstfile, lines in self.verbatim_files.items(): for dstfile, lines in self.verbatim_files.items():
dstfile = self.workdir + "/src/" + dstfile dstfile = outdir / dstfile
self.log(f"Writing '{dstfile}'.") self.log(f"Writing '{dstfile.absolute()}'.")
dstfile.parent.mkdir(parents=True, exist_ok=True)
with open(dstfile, "w") as f: with open(dstfile, "w") as f:
for line in lines: for line in lines:
f.write(line) f.write(line)
for dstfile, srcfile in self.files.items(): for dstfile, srcfile in self.files.items():
if dstfile.startswith("/") or dstfile.startswith("../") or ("/../" in dstfile): dstfile = Path(dstfile)
if dstfile.is_absolute() or ".." in dstfile.parts:
self.error(f"destination filename must be a relative path without /../: {dstfile}") self.error(f"destination filename must be a relative path without /../: {dstfile}")
dstfile = self.workdir + "/src/" + dstfile dstfile = outdir / dstfile
srcfile = process_filename(srcfile) srcfile = process_filename(srcfile)
basedir = os.path.dirname(dstfile) basedir = dstfile.parent
if basedir != "" and not os.path.exists(basedir): basedir.mkdir(parents=True, exist_ok=True)
os.makedirs(basedir)
if linkmode: if linkmode:
verb = "Link" verb = "Link"
else: else:
verb = "Copy" verb = "Copy"
self.log(f"{verb} '{os.path.abspath(srcfile)}' to '{os.path.abspath(dstfile)}'.") self.log(f"{verb} '{srcfile.absolute()}' to '{dstfile.absolute()}'.")
if linkmode: if linkmode:
os.symlink(os.path.relpath(srcfile, basedir), dstfile) os.symlink(srcfile.resolve(), dstfile)
elif os.path.isdir(srcfile): elif srcfile.is_dir():
copytree(srcfile, dstfile, dirs_exist_ok=True) copytree(srcfile, dstfile, dirs_exist_ok=True)
else: else:
copyfile(srcfile, dstfile) copyfile(srcfile, dstfile)
@ -1097,12 +1095,12 @@ class SbyTask(SbyConfig):
self.__dict__["opt_" + option_name] = default_value self.__dict__["opt_" + option_name] = default_value
def make_model(self, model_name): def make_model(self, model_name):
if not os.path.isdir(f"{self.workdir}/model"): modeldir = Path(self.workdir) / "model"
os.makedirs(f"{self.workdir}/model") modeldir.mkdir(exist_ok=True)
if model_name == "prep": if model_name == "prep":
with open(f"""{self.workdir}/model/design_prep.ys""", "w") as f: with open(modeldir / "design_prep.ys", "w") as f:
print(f"# running in {self.workdir}/model/", file=f) print(f"# running in {modeldir}/", file=f)
print(f"""read_rtlil design.il""", file=f) print(f"""read_rtlil design.il""", file=f)
if not self.opt_skip_prep: if not self.opt_skip_prep:
print("scc -select; simplemap; select -clear", file=f) print("scc -select; simplemap; select -clear", file=f)
@ -1146,7 +1144,7 @@ class SbyTask(SbyConfig):
return [proc] return [proc]
if model_name == "base": if model_name == "base":
with open(f"""{self.workdir}/model/design.ys""", "w") as f: with open(modeldir / "design.ys", "w") as f:
print(f"# running in {self.workdir}/src/", file=f) print(f"# running in {self.workdir}/src/", file=f)
for cmd in self.script: for cmd in self.script:
print(cmd, file=f) print(cmd, file=f)
@ -1168,7 +1166,7 @@ class SbyTask(SbyConfig):
def instance_hierarchy_callback(retcode): def instance_hierarchy_callback(retcode):
if self.design == None: if self.design == None:
with open(f"{self.workdir}/model/design.json") as f: with open(modeldir / "design.json") as f:
self.design = design_hierarchy(f) self.design = design_hierarchy(f)
self.status_db.create_task_properties([ self.status_db.create_task_properties([
prop for prop in self.design.properties_by_path.values() prop for prop in self.design.properties_by_path.values()
@ -1184,8 +1182,8 @@ class SbyTask(SbyConfig):
return [proc] return [proc]
if re.match(r"^smt2(_syn)?(_nomem)?(_stbv|_stdt)?$", model_name): if re.match(r"^smt2(_syn)?(_nomem)?(_stbv|_stdt)?$", model_name):
with open(f"{self.workdir}/model/design_{model_name}.ys", "w") as f: with open(modeldir / f"design_{model_name}.ys", "w") as f:
print(f"# running in {self.workdir}/model/", file=f) print(f"# running in {modeldir}/", file=f)
print(f"""read_rtlil design_prep.il""", file=f) print(f"""read_rtlil design_prep.il""", file=f)
print("hierarchy -smtcheck", file=f) print("hierarchy -smtcheck", file=f)
print("delete */t:$print", file=f) print("delete */t:$print", file=f)
@ -1218,8 +1216,8 @@ class SbyTask(SbyConfig):
return [proc] return [proc]
if re.match(r"^btor(_syn)?(_nomem)?$", model_name): if re.match(r"^btor(_syn)?(_nomem)?$", model_name):
with open(f"{self.workdir}/model/design_{model_name}.ys", "w") as f: with open(modeldir / f"design_{model_name}.ys", "w") as f:
print(f"# running in {self.workdir}/model/", file=f) print(f"# running in {modeldir}/", file=f)
print(f"""read_rtlil design_prep.il""", file=f) print(f"""read_rtlil design_prep.il""", file=f)
print("hierarchy -simcheck", file=f) print("hierarchy -simcheck", file=f)
print("delete */t:$print", file=f) print("delete */t:$print", file=f)
@ -1254,8 +1252,8 @@ class SbyTask(SbyConfig):
return [proc] return [proc]
if model_name == "aig": if model_name == "aig":
with open(f"{self.workdir}/model/design_aiger.ys", "w") as f: with open(modeldir / "design_aiger.ys", "w") as f:
print(f"# running in {self.workdir}/model/", file=f) print(f"# running in {modeldir}/", file=f)
print("read_rtlil design_prep.il", file=f) print("read_rtlil design_prep.il", file=f)
print("delete */t:$print", file=f) print("delete */t:$print", file=f)
print("hierarchy -simcheck", file=f) print("hierarchy -simcheck", file=f)
@ -1281,7 +1279,7 @@ class SbyTask(SbyConfig):
self, self,
"aig", "aig",
self.model("prep"), self.model("prep"),
f"""cd {self.workdir}/model; {self.exe_paths["yosys"]} -ql design_aiger.log design_aiger.ys""" f"""cd {modeldir}; {self.exe_paths["yosys"]} -ql design_aiger.log design_aiger.ys"""
) )
proc.checkretcode = True proc.checkretcode = True
@ -1292,8 +1290,8 @@ class SbyTask(SbyConfig):
self, self,
model_name, model_name,
self.model("aig"), self.model("aig"),
f"""cd {self.workdir}/model; {self.exe_paths["abc"]} -c 'read_aiger design_aiger.aig; fold{" -s" if self.opt_aigfolds else ""}; strash; write_aiger design_aiger_fold.aig'""", f"""cd {modeldir}; {self.exe_paths["abc"]} -c 'read_aiger design_aiger.aig; fold{" -s" if self.opt_aigfolds else ""}; strash; write_aiger design_aiger_fold.aig'""",
logfile=open(f"{self.workdir}/model/design_aiger_fold.log", "w") logfile=open(f"{modeldir}/design_aiger_fold.log", "w")
) )
proc.checkretcode = True proc.checkretcode = True
@ -1430,7 +1428,7 @@ class SbyTask(SbyConfig):
except FileNotFoundError: except FileNotFoundError:
status_path = f"{self.workdir}/status.sqlite" status_path = f"{self.workdir}/status.sqlite"
self.status_db = SbyStatusDb(status_path, self, live_csv=self.live_csv) self.status_db = SbyStatusDb(status_path, self, live_formats=self.live_formats)
def setup_procs(self, setupmode, linkmode=False): def setup_procs(self, setupmode, linkmode=False):
self.handle_non_engine_options() self.handle_non_engine_options()

View file

@ -106,10 +106,10 @@ class FileInUseError(Exception):
class SbyStatusDb: class SbyStatusDb:
def __init__(self, path: Path, task, timeout: float = 5.0, live_csv = False): def __init__(self, path: Path, task, timeout: float = 5.0, live_formats = []):
self.debug = False self.debug = False
self.task = task self.task = task
self.live_csv = live_csv self.live_formats = live_formats
self.con = sqlite3.connect(path, isolation_level=None, timeout=timeout) self.con = sqlite3.connect(path, isolation_level=None, timeout=timeout)
self.db = self.con.cursor() self.db = self.con.cursor()
@ -250,10 +250,11 @@ class SbyStatusDb:
), ),
) )
if self.live_csv: if self.live_formats:
row = self.get_status_data_joined(self.db.lastrowid) row = self.get_status_data_joined(self.db.lastrowid)
csvline = format_status_data_csvline(row) for fmt in self.live_formats:
self.task.log(f"{click.style('csv', fg='yellow')}: {csvline}") fmtline = format_status_data_fmtline(row, fmt)
self.task.log(f"{click.style(fmt, fg='yellow')}: {fmtline}")
@transaction @transaction
def add_task_trace( def add_task_trace(
@ -440,14 +441,15 @@ class SbyStatusDb:
return {row["id"]: parse_status_data_row(row) for row in rows} return {row["id"]: parse_status_data_row(row) for row in rows}
def print_status_summary_csv(self, tasknames: list[str], latest: bool): def print_status_summary_fmt(self, tasknames: list[str], status_format: str, latest: bool):
# get all statuses # get all statuses
all_properties = self.all_status_data_joined() all_properties = self.all_status_data_joined()
latest_task_ids = filter_latest_task_ids(self.all_tasks()) latest_task_ids = filter_latest_task_ids(self.all_tasks())
# print csv header # print header
csvheader = format_status_data_csvline(None) header = format_status_data_fmtline(None, status_format)
print(csvheader) if header:
print(header)
# find summary for each task/property combo # find summary for each task/property combo
prop_map: dict[(str, str, str), dict[str, (int, int)]] = {} prop_map: dict[(str, str, str), dict[str, (int, int)]] = {}
@ -497,9 +499,8 @@ class SbyStatusDb:
del prop["UNKNOWN"] del prop["UNKNOWN"]
for _, row, _ in prop.values(): for _, row, _ in prop.values():
csvline = format_status_data_csvline(all_properties[row]) line = format_status_data_fmtline(all_properties[row], status_format)
print(csvline) print(line)
def combine_statuses(statuses): def combine_statuses(statuses):
statuses = set(statuses) statuses = set(statuses)
@ -515,47 +516,64 @@ def parse_status_data_row(raw: sqlite3.Row):
row_dict["data"] = json.loads(row_dict.get("data") or "{}") row_dict["data"] = json.loads(row_dict.get("data") or "{}")
return row_dict return row_dict
def format_status_data_csvline(row: dict|None) -> str: fmtline_columns = [
"time",
"task_name",
"mode",
"engine",
"name",
"location",
"kind",
"status",
"trace",
"depth",
]
def format_status_data_fmtline(row: dict|None, fmt: str = "csv") -> str:
if row is None: if row is None:
csv_header = [ data = None
"time",
"task_name",
"mode",
"engine",
"name",
"location",
"kind",
"status",
"trace",
"depth",
]
return ','.join(csv_header)
else: else:
engine = row['data'].get('engine', row['data'].get('source')) engine = row['data'].get('engine', row['data'].get('source'))
try:
time = row['status_created'] - row['created']
except TypeError:
time = 0
name = row['hdlname'] name = row['hdlname']
depth = row['data'].get('step') depth = row['data'].get('step')
try:
trace_path = Path(row['workdir']) / row['path']
except TypeError:
trace_path = None
csv_line = [ data = {
round(time, 2), "task_name": row['task_name'],
row['task_name'], "mode": row['mode'],
row['mode'], "engine": engine,
engine, "name": name or pretty_path(row['name']),
name or pretty_path(row['name']), "location": row['location'],
row['location'], "kind": row['kind'],
row['kind'], "status": row['status'] or "UNKNOWN",
row['status'] or "UNKNOWN", "depth": depth,
trace_path, }
depth, try:
] data["trace"] = str(Path(row['workdir']) / row['path'])
return ','.join("" if v is None else str(v) for v in csv_line) except TypeError:
pass
try:
data['time'] = round(row['status_created'] - row['created'], 2)
except TypeError:
pass
if fmt == "csv":
if data is None:
csv_line = fmtline_columns
else:
csv_line = [data.get(column) for column in fmtline_columns]
def csv_field(value):
if value is None:
return ""
value = str(value).replace('"', '""')
if any(c in value for c in '",\n'):
value = f'"{value}"'
return value
return ','.join(map(csv_field, csv_line))
elif fmt == "jsonl":
if data is None:
return ""
# field order
data = {column: data[column] for column in fmtline_columns if data.get(column)}
return json.dumps(data)
def filter_latest_task_ids(all_tasks: dict[int, dict[str]]): def filter_latest_task_ids(all_tasks: dict[int, dict[str]]):
latest: dict[str, int] = {} latest: dict[str, int] = {}

20
tests/links/more_dirs.sby Normal file
View file

@ -0,0 +1,20 @@
[tasks]
link
copy
[options]
mode prep
[engines]
btor btormc
[script]
read -noverific
script dir/script.ys
[files]
here/dir ${WORKDIR}/../dir
a/b/c.v prv32fmcmp.v
[file here/doc]
log foo

10
tests/links/more_dirs.sh Normal file
View file

@ -0,0 +1,10 @@
#!/bin/bash
set -e
if [[ $TASK == link ]]; then
flags="--setup --link"
else
flags="--setup"
fi
python3 $SBY_MAIN -f $SBY_FILE $TASK $flags
test -e ${WORKDIR}/src/here/dir -a -e ${WORKDIR}/src/a/b/c.v -a -e ${WORKDIR}/src/here/doc

View file

@ -1,10 +1,10 @@
import os
from pathlib import Path from pathlib import Path
import sys import sys
def main(): def main():
workdir, task = sys.argv[1:] workdir, task = sys.argv[1:]
src = Path(workdir) / "src" src = Path(workdir) / "src"
count = 0
for srcfile in src.iterdir(): for srcfile in src.iterdir():
if srcfile.name == "heredoc": if srcfile.name == "heredoc":
assert(not srcfile.is_symlink()) assert(not srcfile.is_symlink())
@ -13,6 +13,11 @@ def main():
assert(local_contents.strip() == 'log foo') assert(local_contents.strip() == 'log foo')
else: else:
assert(srcfile.is_symlink() == (task == "link")) assert(srcfile.is_symlink() == (task == "link"))
assert(srcfile.name != "script.ys")
count += 1
assert(count == 4)
script_ys = src / "dir" / "script.ys"
assert(script_ys.exists())
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View file

@ -1,6 +1,8 @@
[tasks] [tasks]
link link
copy copy
dir_implicit: dir
dir_explicit: dir
[options] [options]
mode prep mode prep
@ -15,7 +17,9 @@ script dir/script.ys
[files] [files]
../../docs/examples/demos/picorv32.v ../../docs/examples/demos/picorv32.v
prv32fmcmp.v prv32fmcmp.v
dir ~dir: dir
dir_implicit: dir/
dir_explicit: dir/ dir/
[file heredoc] [file heredoc]
log foo log foo

View file

@ -3,7 +3,7 @@ set -e
python3 $SBY_MAIN -f $SBY_FILE $TASK python3 $SBY_MAIN -f $SBY_FILE $TASK
STATUS_CSV=${WORKDIR}/status.csv STATUS_CSV=${WORKDIR}/status.csv
python3 $SBY_MAIN -f $SBY_FILE $TASK --statuscsv --latest | tee $STATUS_CSV python3 $SBY_MAIN -f $SBY_FILE $TASK --statusfmt csv --latest | tee $STATUS_CSV
if [[ $TASK =~ "_cover" ]]; then if [[ $TASK =~ "_cover" ]]; then
wc -l $STATUS_CSV | grep -q '6' wc -l $STATUS_CSV | grep -q '6'