3
0
Fork 0
mirror of https://github.com/YosysHQ/sby.git synced 2025-08-26 06:36:02 +00:00

Merge pull request #334 from YosysHQ/krys/jsonlines

Add jsonl status format
This commit is contained in:
KrystalDelusion 2025-08-01 10:45:50 +12:00 committed by GitHub
commit 32f6ac2a5a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 82 additions and 64 deletions

View file

@ -64,15 +64,15 @@ status_show = args.status
status_reset = args.status_reset
status_cancels = args.status_cancels
task_status = args.task_status
status_live_csv = args.livecsv
status_show_csv = args.statuscsv
status_live_formats = args.live_formats
status_format = args.status_format
status_latest = args.status_latest
if autotune and linkmode:
print("ERROR: --link flag currently not available with --autotune")
sys.exit(1)
if status_show or status_reset or task_status or status_show_csv:
if status_show or status_reset or task_status or status_format:
target = workdir_prefix or workdir or sbyfile
if target is None:
print("ERROR: Specify a .sby config file or working directory to use --status.")
@ -104,16 +104,16 @@ if status_show or status_reset or task_status or status_show_csv:
if status_show:
status_db.print_status_summary(status_latest)
if status_show_csv:
status_db.print_status_summary_csv(tasknames, status_latest)
if status_format:
status_db.print_status_summary_fmt(tasknames, status_format, status_latest)
if task_status:
status_db.print_task_summary()
status_db.db.close()
if status_live_csv:
print(f"WARNING: --livecsv flag found but not used.")
if status_live_formats:
print(f"WARNING: --live option found but not used.")
sys.exit(0)
elif status_latest:
@ -496,7 +496,7 @@ def start_task(taskloop, taskname):
else:
junit_filename = "junit"
task = SbyTask(sbyconfig, my_workdir, early_logmsgs, reusedir, status_cancels, taskloop, name=taskname, live_csv=status_live_csv)
task = SbyTask(sbyconfig, my_workdir, early_logmsgs, reusedir, status_cancels, taskloop, name=taskname, live_formats=status_live_formats)
for k, v in exe_paths.items():
task.exe_paths[k] = v

View file

@ -29,8 +29,8 @@ def parser_func(release_version='unknown SBY version'):
help="maximum number of processes to run in parallel")
parser.add_argument("--sequential", action="store_true", dest="sequential",
help="run tasks in sequence, not in parallel")
parser.add_argument("--livecsv", action="store_true", dest="livecsv",
help="print live updates of property statuses during task execution in csv format")
parser.add_argument("--live", action="append", choices=["csv", "jsonl"], dest="live_formats",
help="print live updates of property statuses during task execution, may be specified multiple times")
parser.add_argument("--autotune", action="store_true", dest="autotune",
help="automatically find a well performing engine and engine configuration for each task")
@ -77,8 +77,8 @@ def parser_func(release_version='unknown SBY version'):
parser.add_argument("--status", action="store_true", dest="status",
help="summarize the contents of the status database")
parser.add_argument("--statuscsv", action="store_true", dest="statuscsv",
help="print the most recent status for each property in csv format")
parser.add_argument("--statusfmt", action="store", default="", choices=["csv", "jsonl"], dest="status_format",
help="print the most recent status for each property in specified format")
parser.add_argument("--latest", action="store_true", dest="status_latest",
help="only check statuses from the most recent run of a task")
parser.add_argument("--statusreset", action="store_true", dest="status_reset",

View file

@ -913,7 +913,7 @@ class SbySummary:
class SbyTask(SbyConfig):
def __init__(self, sbyconfig, workdir, early_logs, reusedir, status_cancels=False, taskloop=None, logfile=None, name=None, live_csv=False):
def __init__(self, sbyconfig, workdir, early_logs, reusedir, status_cancels=False, taskloop=None, logfile=None, name=None, live_formats=[]):
super().__init__()
self.used_options = set()
self.models = dict()
@ -921,7 +921,7 @@ class SbyTask(SbyConfig):
self.reusedir = reusedir
self.status_cancels = status_cancels
self.name = name
self.live_csv = live_csv
self.live_formats = live_formats
self.status = "UNKNOWN"
self.total_time = 0
self.expect = list()
@ -1430,7 +1430,7 @@ class SbyTask(SbyConfig):
except FileNotFoundError:
status_path = f"{self.workdir}/status.sqlite"
self.status_db = SbyStatusDb(status_path, self, live_csv=self.live_csv)
self.status_db = SbyStatusDb(status_path, self, live_formats=self.live_formats)
def setup_procs(self, setupmode, linkmode=False):
self.handle_non_engine_options()

View file

@ -106,10 +106,10 @@ class FileInUseError(Exception):
class SbyStatusDb:
def __init__(self, path: Path, task, timeout: float = 5.0, live_csv = False):
def __init__(self, path: Path, task, timeout: float = 5.0, live_formats = []):
self.debug = False
self.task = task
self.live_csv = live_csv
self.live_formats = live_formats
self.con = sqlite3.connect(path, isolation_level=None, timeout=timeout)
self.db = self.con.cursor()
@ -250,10 +250,11 @@ class SbyStatusDb:
),
)
if self.live_csv:
if self.live_formats:
row = self.get_status_data_joined(self.db.lastrowid)
csvline = format_status_data_csvline(row)
self.task.log(f"{click.style('csv', fg='yellow')}: {csvline}")
for fmt in self.live_formats:
fmtline = format_status_data_fmtline(row, fmt)
self.task.log(f"{click.style(fmt, fg='yellow')}: {fmtline}")
@transaction
def add_task_trace(
@ -440,14 +441,15 @@ class SbyStatusDb:
return {row["id"]: parse_status_data_row(row) for row in rows}
def print_status_summary_csv(self, tasknames: list[str], latest: bool):
def print_status_summary_fmt(self, tasknames: list[str], status_format: str, latest: bool):
# get all statuses
all_properties = self.all_status_data_joined()
latest_task_ids = filter_latest_task_ids(self.all_tasks())
# print csv header
csvheader = format_status_data_csvline(None)
print(csvheader)
# print header
header = format_status_data_fmtline(None, status_format)
if header:
print(header)
# find summary for each task/property combo
prop_map: dict[(str, str), dict[str, (int, int)]] = {}
@ -488,9 +490,8 @@ class SbyStatusDb:
del prop["UNKNOWN"]
for _, row in prop.values():
csvline = format_status_data_csvline(all_properties[row])
print(csvline)
line = format_status_data_fmtline(all_properties[row], status_format)
print(line)
def combine_statuses(statuses):
statuses = set(statuses)
@ -506,47 +507,64 @@ def parse_status_data_row(raw: sqlite3.Row):
row_dict["data"] = json.loads(row_dict.get("data") or "{}")
return row_dict
def format_status_data_csvline(row: dict|None) -> str:
fmtline_columns = [
"time",
"task_name",
"mode",
"engine",
"name",
"location",
"kind",
"status",
"trace",
"depth",
]
def format_status_data_fmtline(row: dict|None, fmt: str = "csv") -> str:
if row is None:
csv_header = [
"time",
"task_name",
"mode",
"engine",
"name",
"location",
"kind",
"status",
"trace",
"depth",
]
return ','.join(csv_header)
data = None
else:
engine = row['data'].get('engine', row['data'].get('source'))
try:
time = row['status_created'] - row['created']
except TypeError:
time = 0
name = row['hdlname']
depth = row['data'].get('step')
try:
trace_path = Path(row['workdir']) / row['path']
except TypeError:
trace_path = None
csv_line = [
round(time, 2),
row['task_name'],
row['mode'],
engine,
name or pretty_path(row['name']),
row['location'],
row['kind'],
row['status'] or "UNKNOWN",
trace_path,
depth,
]
return ','.join("" if v is None else str(v) for v in csv_line)
data = {
"task_name": row['task_name'],
"mode": row['mode'],
"engine": engine,
"name": name or pretty_path(row['name']),
"location": row['location'],
"kind": row['kind'],
"status": row['status'] or "UNKNOWN",
"depth": depth,
}
try:
data["trace"] = str(Path(row['workdir']) / row['path'])
except TypeError:
pass
try:
data['time'] = round(row['status_created'] - row['created'], 2)
except TypeError:
pass
if fmt == "csv":
if data is None:
csv_line = fmtline_columns
else:
csv_line = [data.get(column) for column in fmtline_columns]
def csv_field(value):
if value is None:
return ""
value = str(value).replace('"', '""')
if any(c in value for c in '",\n'):
value = f'"{value}"'
return value
return ','.join(map(csv_field, csv_line))
elif fmt == "jsonl":
if data is None:
return ""
# field order
data = {column: data[column] for column in fmtline_columns if data.get(column)}
return json.dumps(data)
def filter_latest_task_ids(all_tasks: dict[int, dict[str]]):
latest: dict[str, int] = {}

View file

@ -3,7 +3,7 @@ set -e
python3 $SBY_MAIN -f $SBY_FILE $TASK
STATUS_CSV=${WORKDIR}/status.csv
python3 $SBY_MAIN -f $SBY_FILE $TASK --statuscsv --latest | tee $STATUS_CSV
python3 $SBY_MAIN -f $SBY_FILE $TASK --statusfmt csv --latest | tee $STATUS_CSV
if [[ $TASK =~ "_cover" ]]; then
wc -l $STATUS_CSV | grep -q '6'