From 236f0ec59ca7e64c14328243346789854f38f77c Mon Sep 17 00:00:00 2001 From: KrystalDelusion <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:44:01 +1200 Subject: [PATCH 01/34] Revert "work around pypy bug" This reverts commit b47d2829f9b23e658d03b109f0118476bc78df1b. --- sbysrc/sby_status.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sbysrc/sby_status.py b/sbysrc/sby_status.py index 2b63070..e4722c3 100644 --- a/sbysrc/sby_status.py +++ b/sbysrc/sby_status.py @@ -68,8 +68,8 @@ class SbyStatusDb: self.db = sqlite3.connect(path, isolation_level=None, timeout=timeout) self.db.row_factory = sqlite3.Row - self.db.execute("PRAGMA journal_mode=WAL").fetchone() - self.db.execute("PRAGMA synchronous=0").fetchone() + self.db.execute("PRAGMA journal_mode=WAL") + self.db.execute("PRAGMA synchronous=0") if setup: self._setup() From 03244c4f96a43a53b094f99d65ddc7b5bfb338ee Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:44:01 +1200 Subject: [PATCH 02/34] Use a cursor for PRAGMAs --- sbysrc/sby_status.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/sbysrc/sby_status.py b/sbysrc/sby_status.py index e4722c3..3d5c295 100644 --- a/sbysrc/sby_status.py +++ b/sbysrc/sby_status.py @@ -68,8 +68,10 @@ class SbyStatusDb: self.db = sqlite3.connect(path, isolation_level=None, timeout=timeout) self.db.row_factory = sqlite3.Row - self.db.execute("PRAGMA journal_mode=WAL") - self.db.execute("PRAGMA synchronous=0") + cur = self.db.cursor() + cur.execute("PRAGMA journal_mode=WAL") + cur.execute("PRAGMA synchronous=0") + self.db.commit() if setup: self._setup() From f84e648391c76ab70c0987ecfb20ec64cd2c9968 Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:44:01 +1200 Subject: [PATCH 03/34] Better transaction control Use context manager to handle commit/rollback. Use `sqlite3.Connection.in_transaction` property instead of rolling our own. Read-only methods don't need the transaction wrapper and we never read-update-write. --- sbysrc/sby_status.py | 65 ++++++++++++++++++-------------------------- 1 file changed, 26 insertions(+), 39 deletions(-) diff --git a/sbysrc/sby_status.py b/sbysrc/sby_status.py index 3d5c295..fb3fc82 100644 --- a/sbysrc/sby_status.py +++ b/sbysrc/sby_status.py @@ -17,61 +17,52 @@ Fn = TypeVar("Fn", bound=Callable[..., Any]) def transaction(method: Fn) -> Fn: @wraps(method) def wrapper(self: SbyStatusDb, *args: Any, **kwargs: Any) -> Any: - if self._transaction_active: + if self.con.in_transaction: return method(self, *args, **kwargs) try: - self.log_debug(f"begin {method.__name__!r} transaction") - self.db.execute("begin") - self._transaction_active = True - result = method(self, *args, **kwargs) - self.db.execute("commit") - self._transaction_active = False - self.log_debug(f"comitted {method.__name__!r} transaction") - return result - except sqlite3.OperationalError as err: - self.log_debug(f"failed {method.__name__!r} transaction {err}") - self.db.rollback() - self._transaction_active = False + with self.con: + self.log_debug(f"begin {method.__name__!r} transaction") + self.db.execute("begin") + result = method(self, *args, **kwargs) except Exception as err: self.log_debug(f"failed {method.__name__!r} transaction {err}") - self.db.rollback() - self._transaction_active = False - raise + if not isinstance(err, sqlite3.OperationalError): + raise + else: + self.log_debug(f"comitted {method.__name__!r} transaction") + return result + try: - self.log_debug( - f"retrying {method.__name__!r} transaction once in immediate mode" - ) - self.db.execute("begin immediate") - self._transaction_active = True - result = method(self, *args, **kwargs) - self.db.execute("commit") - self._transaction_active = False - self.log_debug(f"comitted {method.__name__!r} transaction") - return result + with self.con: + self.log_debug( + f"retrying {method.__name__!r} transaction once in immediate mode" + ) + self.db.execute("begin immediate") + result = method(self, *args, **kwargs) except Exception as err: self.log_debug(f"failed {method.__name__!r} transaction {err}") - self.db.rollback() - self._transaction_active = False raise + else: + self.log_debug(f"comitted {method.__name__!r} transaction") + return result return wrapper # type: ignore class SbyStatusDb: def __init__(self, path: Path, task, timeout: float = 5.0): - self.debug = False + self.debug = True self.task = task - self._transaction_active = False setup = not os.path.exists(path) - self.db = sqlite3.connect(path, isolation_level=None, timeout=timeout) + self.con = sqlite3.connect(path, isolation_level=None, timeout=timeout) + self.db = self.con.cursor() self.db.row_factory = sqlite3.Row - cur = self.db.cursor() - cur.execute("PRAGMA journal_mode=WAL") - cur.execute("PRAGMA synchronous=0") - self.db.commit() + with self.con: + self.db.execute("PRAGMA journal_mode=WAL") + self.db.execute("PRAGMA synchronous=0") if setup: self._setup() @@ -245,7 +236,6 @@ class SbyStatusDb: ), ) - @transaction def all_tasks(self): rows = self.db.execute( """ @@ -255,7 +245,6 @@ class SbyStatusDb: return {row["id"]: dict(row) for row in rows} - @transaction def all_task_properties(self): rows = self.db.execute( """ @@ -271,7 +260,6 @@ class SbyStatusDb: return {row["id"]: get_result(row) for row in rows} - @transaction def all_task_property_statuses(self): rows = self.db.execute( """ @@ -287,7 +275,6 @@ class SbyStatusDb: return {row["id"]: get_result(row) for row in rows} - @transaction def all_status_data(self): return ( self.all_tasks(), From ad9382d46c0fd14f430b98b4310374357525e4e6 Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:44:01 +1200 Subject: [PATCH 04/34] Re-disable db debug mode --- sbysrc/sby_status.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sbysrc/sby_status.py b/sbysrc/sby_status.py index fb3fc82..e154205 100644 --- a/sbysrc/sby_status.py +++ b/sbysrc/sby_status.py @@ -52,7 +52,7 @@ def transaction(method: Fn) -> Fn: class SbyStatusDb: def __init__(self, path: Path, task, timeout: float = 5.0): - self.debug = True + self.debug = False self.task = task setup = not os.path.exists(path) From 10040ce859fffdf603e7d2890bd9db7f5bfa4a6d Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:44:01 +1200 Subject: [PATCH 05/34] Test status db schema `--status` reports if the schema doesn't match. `--statusreset` is able to perform a hard reset, dropping all the existing tables and calling `_setup()`. --- sbysrc/sby.py | 10 ++-- sbysrc/sby_status.py | 106 ++++++++++++++++++++++++------------------- 2 files changed, 66 insertions(+), 50 deletions(-) diff --git a/sbysrc/sby.py b/sbysrc/sby.py index 31835be..dc02971 100644 --- a/sbysrc/sby.py +++ b/sbysrc/sby.py @@ -85,12 +85,14 @@ if status_show or status_reset: status_db = SbyStatusDb(status_path, task=None) - if status_show: - status_db.print_status_summary() - sys.exit(0) - if status_reset: status_db.reset() + elif status_db.test_schema(): + print(f"ERROR: Status database does not match expected formatted. Use --statusreset to reset.") + sys.exit(1) + + if status_show: + status_db.print_status_summary() status_db.db.close() sys.exit(0) diff --git a/sbysrc/sby_status.py b/sbysrc/sby_status.py index e154205..cb91174 100644 --- a/sbysrc/sby_status.py +++ b/sbysrc/sby_status.py @@ -4,6 +4,7 @@ import sqlite3 import os import time import json +import re from collections import defaultdict from functools import wraps from pathlib import Path @@ -13,6 +14,45 @@ from sby_design import SbyProperty, pretty_path Fn = TypeVar("Fn", bound=Callable[..., Any]) +SQLSCRIPT = """\ +CREATE TABLE task ( + id INTEGER PRIMARY KEY, + workdir TEXT, + mode TEXT, + created REAL +); +CREATE TABLE task_status ( + id INTEGER PRIMARY KEY, + task INTEGER, + status TEXT, + data TEXT, + created REAL, + FOREIGN KEY(task) REFERENCES task(id) +); +CREATE TABLE task_property ( + id INTEGER PRIMARY KEY, + task INTEGER, + src TEXT, + name TEXT, + created REAL, + FOREIGN KEY(task) REFERENCES task(id) +); +CREATE TABLE task_property_status ( + id INTEGER PRIMARY KEY, + task_property INTEGER, + status TEXT, + data TEXT, + created REAL, + FOREIGN KEY(task_property) REFERENCES task_property(id) +); +CREATE TABLE task_property_data ( + id INTEGER PRIMARY KEY, + task_property INTEGER, + kind TEXT, + data TEXT, + created REAL, + FOREIGN KEY(task_property) REFERENCES task_property(id) +);""" def transaction(method: Fn) -> Fn: @wraps(method) @@ -79,50 +119,17 @@ class SbyStatusDb: @transaction def _setup(self): - script = """ - CREATE TABLE task ( - id INTEGER PRIMARY KEY, - workdir TEXT, - mode TEXT, - created REAL - ); - CREATE TABLE task_status ( - id INTEGER PRIMARY KEY, - task INTEGER, - status TEXT, - data TEXT, - created REAL, - FOREIGN KEY(task) REFERENCES task(id) - ); - CREATE TABLE task_property ( - id INTEGER PRIMARY KEY, - task INTEGER, - src TEXT, - name TEXT, - created REAL, - FOREIGN KEY(task) REFERENCES task(id) - ); - CREATE TABLE task_property_status ( - id INTEGER PRIMARY KEY, - task_property INTEGER, - status TEXT, - data TEXT, - created REAL, - FOREIGN KEY(task_property) REFERENCES task_property(id) - ); - CREATE TABLE task_property_data ( - id INTEGER PRIMARY KEY, - task_property INTEGER, - kind TEXT, - data TEXT, - created REAL, - FOREIGN KEY(task_property) REFERENCES task_property(id) - ); - """ - for statement in script.split(";\n"): + for statement in SQLSCRIPT.split(";\n"): statement = statement.strip() if statement: self.db.execute(statement) + self.db.execute("""PRAGMA foreign_keys = ON;""") + + def test_schema(self) -> bool: + schema = self.db.execute("SELECT sql FROM sqlite_master;").fetchall() + schema_script = '\n'.join(str(sql[0] + ';') for sql in schema) + self._tables = re.findall(r"CREATE TABLE (\w+) \(", schema_script) + return schema_script != SQLSCRIPT @transaction def create_task(self, workdir: str, mode: str) -> int: @@ -284,11 +291,18 @@ class SbyStatusDb: @transaction def reset(self): - self.db.execute("""DELETE FROM task_property_status""") - self.db.execute("""DELETE FROM task_property_data""") - self.db.execute("""DELETE FROM task_property""") - self.db.execute("""DELETE FROM task_status""") - self.db.execute("""DELETE FROM task""") + hard_reset = self.test_schema() + # table names can't be parameters, so we need to use f-strings + # but it is safe to use here because it comes from the regex "\w+" + for table in self._tables: + if hard_reset: + self.log_debug(f"dropping {table}") + self.db.execute(f"DROP TABLE {table}") + else: + self.log_debug(f"clearing {table}") + self.db.execute(f"DELETE FROM {table}") + if hard_reset: + self._setup() def print_status_summary(self): tasks, task_properties, task_property_statuses = self.all_status_data() From b1d9bcbb42bd4851e64f5062e8aca807c9a08d71 Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:44:02 +1200 Subject: [PATCH 06/34] tests: Add statusdb test Ensures that `--statusreset` doesn't break the schema. --- tests/statusdb/reset.sby | 14 ++++++++++++++ tests/statusdb/reset.sh | 11 +++++++++++ tests/statusdb/reset.sv | 21 +++++++++++++++++++++ 3 files changed, 46 insertions(+) create mode 100644 tests/statusdb/reset.sby create mode 100644 tests/statusdb/reset.sh create mode 100644 tests/statusdb/reset.sv diff --git a/tests/statusdb/reset.sby b/tests/statusdb/reset.sby new file mode 100644 index 0000000..aaf03c4 --- /dev/null +++ b/tests/statusdb/reset.sby @@ -0,0 +1,14 @@ +[options] +mode bmc +depth 100 +expect fail + +[engines] +smtbmc --keep-going boolector + +[script] +read -formal reset.sv +prep -top demo + +[files] +reset.sv diff --git a/tests/statusdb/reset.sh b/tests/statusdb/reset.sh new file mode 100644 index 0000000..3030056 --- /dev/null +++ b/tests/statusdb/reset.sh @@ -0,0 +1,11 @@ +#!/bin/bash +set -e + +# run task +python3 $SBY_MAIN -f $SBY_FILE $TASK +# task has 3 properties +python3 $SBY_MAIN -f $SBY_FILE --status | wc -l | grep -q '3' +# resetting task should work +python3 $SBY_MAIN -f $SBY_FILE --statusreset +# clean database should have no properties +python3 $SBY_MAIN -f $SBY_FILE --status | wc -l | grep -q '0' diff --git a/tests/statusdb/reset.sv b/tests/statusdb/reset.sv new file mode 100644 index 0000000..4514a9d --- /dev/null +++ b/tests/statusdb/reset.sv @@ -0,0 +1,21 @@ +module demo ( + input clk, + output reg [5:0] counter +); + initial counter = 0; + + always @(posedge clk) begin + if (counter == 15) + counter <= 0; + else + counter <= counter + 1; + end + +`ifdef FORMAL + always @(posedge clk) begin + assert (1); + assert (counter < 7); + assert (0); + end +`endif +endmodule From a64f29de6c6cb5e3ad8ad30ebed6bc31c168a81e Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:44:02 +1200 Subject: [PATCH 07/34] Add note to bad schema exception Requires python >= 3.11 (oss-cad-suite is 3.11.6, but there isn't any minimum python version given for SBY that I can find). Makes the error less opaque (even though it still has the trace), which I think is necessary given that using the status db is totally optional and otherwise using a different version of SBY with an extra db field in a directory where a previous version has run will just crash with an obscure sqlite3.OperationalError. --- sbysrc/sby_status.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/sbysrc/sby_status.py b/sbysrc/sby_status.py index cb91174..ec18b8d 100644 --- a/sbysrc/sby_status.py +++ b/sbysrc/sby_status.py @@ -69,6 +69,9 @@ def transaction(method: Fn) -> Fn: self.log_debug(f"failed {method.__name__!r} transaction {err}") if not isinstance(err, sqlite3.OperationalError): raise + if re.match("table \w+ has no column named \w+", err.args[0]): + err.add_note("SBY status database can be reset with --statusreset") + raise else: self.log_debug(f"comitted {method.__name__!r} transaction") return result From 233d5f12648d2414915eecb343088ecc7bbbb062 Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:44:02 +1200 Subject: [PATCH 08/34] Actually use foreign key constraints --- sbysrc/sby_status.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/sbysrc/sby_status.py b/sbysrc/sby_status.py index ec18b8d..b693cea 100644 --- a/sbysrc/sby_status.py +++ b/sbysrc/sby_status.py @@ -103,9 +103,9 @@ class SbyStatusDb: self.con = sqlite3.connect(path, isolation_level=None, timeout=timeout) self.db = self.con.cursor() self.db.row_factory = sqlite3.Row - with self.con: - self.db.execute("PRAGMA journal_mode=WAL") - self.db.execute("PRAGMA synchronous=0") + self.db.execute("PRAGMA journal_mode=WAL") + self.db.execute("PRAGMA synchronous=0") + self.db.execute("PRAGMA foreign_keys=ON") if setup: self._setup() @@ -126,7 +126,6 @@ class SbyStatusDb: statement = statement.strip() if statement: self.db.execute(statement) - self.db.execute("""PRAGMA foreign_keys = ON;""") def test_schema(self) -> bool: schema = self.db.execute("SELECT sql FROM sqlite_master;").fetchall() @@ -293,7 +292,7 @@ class SbyStatusDb: ) @transaction - def reset(self): + def _reset(self): hard_reset = self.test_schema() # table names can't be parameters, so we need to use f-strings # but it is safe to use here because it comes from the regex "\w+" @@ -307,6 +306,11 @@ class SbyStatusDb: if hard_reset: self._setup() + def reset(self): + self.db.execute("PRAGMA foreign_keys=OFF") + self._reset() + self.db.execute("PRAGMA foreign_keys=ON") + def print_status_summary(self): tasks, task_properties, task_property_statuses = self.all_status_data() properties = defaultdict(set) From bd0d615b2ae396b819fc2eeee619958f6a933f35 Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:44:02 +1200 Subject: [PATCH 09/34] Fix raw string --- sbysrc/sby_status.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sbysrc/sby_status.py b/sbysrc/sby_status.py index b693cea..8807086 100644 --- a/sbysrc/sby_status.py +++ b/sbysrc/sby_status.py @@ -69,7 +69,7 @@ def transaction(method: Fn) -> Fn: self.log_debug(f"failed {method.__name__!r} transaction {err}") if not isinstance(err, sqlite3.OperationalError): raise - if re.match("table \w+ has no column named \w+", err.args[0]): + if re.match(r"table \w+ has no column named \w+", err.args[0]): err.add_note("SBY status database can be reset with --statusreset") raise else: From 3493f2152f4ec921ac2e750f84806fb4ad4195a2 Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:44:02 +1200 Subject: [PATCH 10/34] statusdb: Retry backoff for PRAGMAs --- sbysrc/sby_status.py | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/sbysrc/sby_status.py b/sbysrc/sby_status.py index 8807086..3e5293f 100644 --- a/sbysrc/sby_status.py +++ b/sbysrc/sby_status.py @@ -103,9 +103,25 @@ class SbyStatusDb: self.con = sqlite3.connect(path, isolation_level=None, timeout=timeout) self.db = self.con.cursor() self.db.row_factory = sqlite3.Row - self.db.execute("PRAGMA journal_mode=WAL") - self.db.execute("PRAGMA synchronous=0") - self.db.execute("PRAGMA foreign_keys=ON") + err_count = 0 + err_max = 3 + while True: + try: + self.db.execute("PRAGMA journal_mode=WAL") + self.db.execute("PRAGMA synchronous=0") + self.db.execute("PRAGMA foreign_keys=ON") + except sqlite3.OperationalError as err: + if "database is locked" not in err.args[0]: + raise + err_count += 1 + if err_count > err_max: + err.add_note(f"Failed to acquire lock after {err_count} attempts, aborting") + raise + backoff = err_count / 10.0 + self.log_debug(f"Database locked, retrying in {backoff}s") + time.sleep(backoff) + else: + break if setup: self._setup() From de59dcc9c4531c2a45fb1b4a39b1aa60959dedf6 Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:44:02 +1200 Subject: [PATCH 11/34] statusdb: Safer setup Always call `_setup()`, but use `CREATE TABLE IF NOT EXISTS`. The sql schema doesn't include this, so inject it during the setup instead of adding it to the `SQLSCRIPT`. --- sbysrc/sby_status.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/sbysrc/sby_status.py b/sbysrc/sby_status.py index 3e5293f..41990b6 100644 --- a/sbysrc/sby_status.py +++ b/sbysrc/sby_status.py @@ -98,8 +98,6 @@ class SbyStatusDb: self.debug = False self.task = task - setup = not os.path.exists(path) - self.con = sqlite3.connect(path, isolation_level=None, timeout=timeout) self.db = self.con.cursor() self.db.row_factory = sqlite3.Row @@ -123,8 +121,7 @@ class SbyStatusDb: else: break - if setup: - self._setup() + self._setup() if task is not None: self.task_id = self.create_task(workdir=task.workdir, mode=task.opt_mode) @@ -139,7 +136,7 @@ class SbyStatusDb: @transaction def _setup(self): for statement in SQLSCRIPT.split(";\n"): - statement = statement.strip() + statement = statement.strip().replace("CREATE TABLE", "CREATE TABLE IF NOT EXISTS") if statement: self.db.execute(statement) From f63cd46d1221ad81cc520bdb4cffec8a098428de Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:47:31 +1200 Subject: [PATCH 12/34] Store task name in task and statusdb --- sbysrc/sby.py | 2 +- sbysrc/sby_core.py | 3 ++- sbysrc/sby_status.py | 11 ++++++----- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/sbysrc/sby.py b/sbysrc/sby.py index dc02971..8748ffe 100644 --- a/sbysrc/sby.py +++ b/sbysrc/sby.py @@ -467,7 +467,7 @@ def start_task(taskloop, taskname): else: junit_filename = "junit" - task = SbyTask(sbyconfig, my_workdir, early_logmsgs, reusedir, taskloop) + task = SbyTask(sbyconfig, my_workdir, early_logmsgs, reusedir, taskloop, name=taskname) for k, v in exe_paths.items(): task.exe_paths[k] = v diff --git a/sbysrc/sby_core.py b/sbysrc/sby_core.py index 6e14d6d..5ac0bc0 100644 --- a/sbysrc/sby_core.py +++ b/sbysrc/sby_core.py @@ -825,12 +825,13 @@ class SbySummary: class SbyTask(SbyConfig): - def __init__(self, sbyconfig, workdir, early_logs, reusedir, taskloop=None, logfile=None): + def __init__(self, sbyconfig, workdir, early_logs, reusedir, taskloop=None, logfile=None, name=None): super().__init__() self.used_options = set() self.models = dict() self.workdir = workdir self.reusedir = reusedir + self.name = name self.status = "UNKNOWN" self.total_time = 0 self.expect = list() diff --git a/sbysrc/sby_status.py b/sbysrc/sby_status.py index 41990b6..12eb64f 100644 --- a/sbysrc/sby_status.py +++ b/sbysrc/sby_status.py @@ -18,6 +18,7 @@ SQLSCRIPT = """\ CREATE TABLE task ( id INTEGER PRIMARY KEY, workdir TEXT, + name TEXT, mode TEXT, created REAL ); @@ -124,7 +125,7 @@ class SbyStatusDb: self._setup() if task is not None: - self.task_id = self.create_task(workdir=task.workdir, mode=task.opt_mode) + self.task_id = self.create_task(workdir=task.workdir, name=task.name, mode=task.opt_mode) def log_debug(self, *args): if self.debug: @@ -147,13 +148,13 @@ class SbyStatusDb: return schema_script != SQLSCRIPT @transaction - def create_task(self, workdir: str, mode: str) -> int: + def create_task(self, workdir: str, name: str, mode: str) -> int: return self.db.execute( """ - INSERT INTO task (workdir, mode, created) - VALUES (:workdir, :mode, :now) + INSERT INTO task (workdir, name, mode, created) + VALUES (:workdir, :name, :mode, :now) """, - dict(workdir=workdir, mode=mode, now=time.time()), + dict(workdir=workdir, name=name, mode=mode, now=time.time()), ).lastrowid @transaction From 0367db76f563612a98131fa507655f96b0d219ec Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:47:31 +1200 Subject: [PATCH 13/34] Initial live csv dumping Currently unconditional, and only for aiger and smtbmc. Uses task.name from intertask cancellation. Stores `time.time()` when calling `SbyStatusDb::create_task()` in order to calculate time since start of task (consider reducing to integer seconds). --- sbysrc/sby_engine_aiger.py | 2 +- sbysrc/sby_engine_smtbmc.py | 6 +++--- sbysrc/sby_status.py | 20 +++++++++++++++++--- 3 files changed, 21 insertions(+), 7 deletions(-) diff --git a/sbysrc/sby_engine_aiger.py b/sbysrc/sby_engine_aiger.py index 11e176f..02e853d 100644 --- a/sbysrc/sby_engine_aiger.py +++ b/sbysrc/sby_engine_aiger.py @@ -233,7 +233,7 @@ def aigsmt_trace_callback(task, engine_idx, proc_status, *, run_aigsmt, smtbmc_v cell_name = match[3] or match[2] prop = task.design.hierarchy.find_property(path, cell_name, trans_dict=smt2_trans) prop.status = "FAIL" - task.status_db.set_task_property_status(prop, data=dict(source="aigsmt", engine=f"engine_{engine_idx}")) + task.status_db.set_task_property_status(prop, data=dict(source="aigsmt", engine=f"engine_{engine_idx}", step=current_step)) last_prop.append(prop) return line diff --git a/sbysrc/sby_engine_smtbmc.py b/sbysrc/sby_engine_smtbmc.py index fe0380e..0a3fdc4 100644 --- a/sbysrc/sby_engine_smtbmc.py +++ b/sbysrc/sby_engine_smtbmc.py @@ -242,7 +242,7 @@ def run(mode, task, engine_idx, engine): cell_name = match[3] or match[2] prop = task.design.hierarchy.find_property(path, cell_name, trans_dict=smt2_trans) prop.status = "FAIL" - task.status_db.set_task_property_status(prop, data=dict(source="smtbmc", engine=f"engine_{engine_idx}")) + task.status_db.set_task_property_status(prop, data=dict(source="smtbmc", engine=f"engine_{engine_idx}", step=current_step)) last_prop.append(prop) return line @@ -252,7 +252,7 @@ def run(mode, task, engine_idx, engine): cell_name = match[3] or match[2] prop = task.design.hierarchy.find_property(path, cell_name, trans_dict=smt2_trans) prop.status = "PASS" - task.status_db.set_task_property_status(prop, data=dict(source="smtbmc", engine=f"engine_{engine_idx}")) + task.status_db.set_task_property_status(prop, data=dict(source="smtbmc", engine=f"engine_{engine_idx}", step=current_step)) last_prop.append(prop) return line @@ -284,7 +284,7 @@ def run(mode, task, engine_idx, engine): cell_name = match[3] or match[2] prop = task.design.hierarchy.find_property(path, cell_name, trans_dict=smt2_trans) prop.status = "FAIL" - task.status_db.set_task_property_status(prop, data=dict(source="smtbmc", engine=f"engine_{engine_idx}")) + task.status_db.set_task_property_status(prop, data=dict(source="smtbmc", engine=f"engine_{engine_idx}", step=current_step)) return line diff --git a/sbysrc/sby_status.py b/sbysrc/sby_status.py index 12eb64f..3b07507 100644 --- a/sbysrc/sby_status.py +++ b/sbysrc/sby_status.py @@ -125,7 +125,8 @@ class SbyStatusDb: self._setup() if task is not None: - self.task_id = self.create_task(workdir=task.workdir, name=task.name, mode=task.opt_mode) + self.start_time = time.time() + self.task_id = self.create_task(workdir=task.workdir, name=task.name, mode=task.opt_mode, now=self.start_time) def log_debug(self, *args): if self.debug: @@ -148,13 +149,13 @@ class SbyStatusDb: return schema_script != SQLSCRIPT @transaction - def create_task(self, workdir: str, name: str, mode: str) -> int: + def create_task(self, workdir: str, name: str, mode: str, now:float) -> int: return self.db.execute( """ INSERT INTO task (workdir, name, mode, created) VALUES (:workdir, :name, :mode, :now) """, - dict(workdir=workdir, name=name, mode=mode, now=time.time()), + dict(workdir=workdir, name=name, mode=mode, now=now), ).lastrowid @transaction @@ -237,6 +238,19 @@ class SbyStatusDb: ), ) + if True: + csv = [ + round(now - self.start_time, 2), + self.task.name, + self.task.opt_mode, + data.get("engine", "ENGINE?"), + property.hdlname, + property.location, + property.status, + data.get("step", "DEPTH?"), + ] + self.task.log(f"csv: {','.join(str(v) for v in csv)}") + @transaction def add_task_property_data(self, property: SbyProperty, kind: str, data: Any): now = time.time() From a332b017e4e3c74cc4a0bd442429ee0409775dd2 Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:47:31 +1200 Subject: [PATCH 14/34] More depth tracking `SbyTask::update_status()` optionally takes the current step/depth, which gets used for some solvers/engines when exiting to pass unknown properties. btor engine tracks the current step, but it doesn't track/report which property triggered a CEX so it's only useful on exit. Use data source as a fallback if engine isn't provided (such as when coming from the `task_status` instead of an engine update). --- sbysrc/sby_core.py | 7 +++++-- sbysrc/sby_engine_btor.py | 9 ++++++++- sbysrc/sby_engine_smtbmc.py | 7 ++++--- sbysrc/sby_status.py | 2 +- 4 files changed, 18 insertions(+), 7 deletions(-) diff --git a/sbysrc/sby_core.py b/sbysrc/sby_core.py index 5ac0bc0..08334d4 100644 --- a/sbysrc/sby_core.py +++ b/sbysrc/sby_core.py @@ -1226,7 +1226,7 @@ class SbyTask(SbyConfig): for prop in self.design.pass_unknown_asserts(): self.status_db.set_task_property_status(prop, data=data) - def update_status(self, new_status): + def update_status(self, new_status, step = None): assert new_status in ["PASS", "FAIL", "UNKNOWN", "ERROR"] self.status_db.set_task_status(new_status) @@ -1240,7 +1240,10 @@ class SbyTask(SbyConfig): assert self.status != "FAIL" self.status = "PASS" if self.opt_mode in ("bmc", "prove") and self.design: - self.pass_unknown_asserts(dict(source="task_status")) + data = {"source": "task_status"} + if step: + data["step"] = step + self.pass_unknown_asserts(data) elif new_status == "FAIL": assert self.status != "PASS" diff --git a/sbysrc/sby_engine_btor.py b/sbysrc/sby_engine_btor.py index a3e744e..601f4f1 100644 --- a/sbysrc/sby_engine_btor.py +++ b/sbysrc/sby_engine_btor.py @@ -77,6 +77,7 @@ def run(mode, task, engine_idx, engine): common_state.wit_file = None common_state.assert_fail = False common_state.running_procs = 0 + common_state.current_step = None def print_traces_and_terminate(): if mode == "cover": @@ -100,7 +101,7 @@ def run(mode, task, engine_idx, engine): else: task.error(f"engine_{engine_idx}: Engine terminated without status.") - task.update_status(proc_status.upper()) + task.update_status(proc_status.upper(), common_state.current_step) task.summary.set_engine_status(engine_idx, proc_status) task.terminate() @@ -205,6 +206,9 @@ def run(mode, task, engine_idx, engine): if solver_args[0] == "btormc": if "calling BMC on" in line: return line + match = re.match(r".*at bound k = (\d+).*", line) + if match: + common_state.current_step = int(match[1]) if "SATISFIABLE" in line: return line if "bad state properties at bound" in line: @@ -215,6 +219,9 @@ def run(mode, task, engine_idx, engine): return line elif solver_args[0] == "pono": + match = re.match(r".*at bound (\d+).*", line) + if match: + common_state.current_step = int(match[1]) if line == "unknown": if common_state.solver_status is None: common_state.solver_status = "unsat" diff --git a/sbysrc/sby_engine_smtbmc.py b/sbysrc/sby_engine_smtbmc.py index 0a3fdc4..34fdb8c 100644 --- a/sbysrc/sby_engine_smtbmc.py +++ b/sbysrc/sby_engine_smtbmc.py @@ -300,10 +300,11 @@ def run(mode, task, engine_idx, engine): simple_exit_callback(retcode) def last_exit_callback(): + nonlocal current_step if mode == "bmc" or mode == "cover": - task.update_status(proc_status) + task.update_status(proc_status, current_step) if proc_status == "FAIL" and mode == "bmc" and keep_going: - task.pass_unknown_asserts(dict(source="smtbmc", keep_going=True, engine=f"engine_{engine_idx}")) + task.pass_unknown_asserts(dict(source="smtbmc", keep_going=True, engine=f"engine_{engine_idx}", step=current_step)) proc_status_lower = proc_status.lower() if proc_status == "PASS" else proc_status task.summary.set_engine_status(engine_idx, proc_status_lower) if not keep_going: @@ -335,7 +336,7 @@ def run(mode, task, engine_idx, engine): assert False if task.basecase_pass and task.induction_pass: - task.update_status("PASS") + task.update_status("PASS", current_step) task.summary.append("successful proof by k-induction.") if not keep_going: task.terminate() diff --git a/sbysrc/sby_status.py b/sbysrc/sby_status.py index 3b07507..f153510 100644 --- a/sbysrc/sby_status.py +++ b/sbysrc/sby_status.py @@ -243,7 +243,7 @@ class SbyStatusDb: round(now - self.start_time, 2), self.task.name, self.task.opt_mode, - data.get("engine", "ENGINE?"), + data.get("engine", data["source"]), property.hdlname, property.location, property.status, From e9f4f06fe90a692777ea4ace2033dc177fdf7a45 Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:47:32 +1200 Subject: [PATCH 15/34] smtbmc updates db at each step All properties marked UNKNOWN get dumped to the db for the previous step, each time the current step is updated. --- sbysrc/sby_core.py | 5 +++++ sbysrc/sby_engine_smtbmc.py | 3 +++ 2 files changed, 8 insertions(+) diff --git a/sbysrc/sby_core.py b/sbysrc/sby_core.py index 08334d4..033d42d 100644 --- a/sbysrc/sby_core.py +++ b/sbysrc/sby_core.py @@ -1222,6 +1222,11 @@ class SbyTask(SbyConfig): self.status = "ERROR" self.terminate() + def update_unknown_asserts(self, data): + for prop in self.design.hierarchy: + if prop.type == prop.Type.ASSERT and prop.status == "UNKNOWN": + self.status_db.set_task_property_status(prop, data=data) + def pass_unknown_asserts(self, data): for prop in self.design.pass_unknown_asserts(): self.status_db.set_task_property_status(prop, data=data) diff --git a/sbysrc/sby_engine_smtbmc.py b/sbysrc/sby_engine_smtbmc.py index 34fdb8c..4deedf1 100644 --- a/sbysrc/sby_engine_smtbmc.py +++ b/sbysrc/sby_engine_smtbmc.py @@ -212,7 +212,10 @@ def run(mode, task, engine_idx, engine): match = re.match(r"^## [0-9: ]+ .* in step ([0-9]+)\.\.", line) if match: + last_step = current_step current_step = int(match[1]) + if current_step != last_step and last_step is not None: + task.update_unknown_asserts(dict(source="smtbmc", engine=f"engine_{engine_idx}", step=last_step)) return line match = re.match(r"^## [0-9: ]+ Status: FAILED", line) From e2b1e850903b99dc348a3284ac324a678b95d39d Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:47:32 +1200 Subject: [PATCH 16/34] Add --livecsv Gate csv dump on status updates. Format 'csv' heading in yellow. --- sbysrc/sby.py | 3 ++- sbysrc/sby_cmdline.py | 2 ++ sbysrc/sby_core.py | 5 +++-- sbysrc/sby_status.py | 8 +++++--- 4 files changed, 12 insertions(+), 6 deletions(-) diff --git a/sbysrc/sby.py b/sbysrc/sby.py index 8748ffe..b6dac82 100644 --- a/sbysrc/sby.py +++ b/sbysrc/sby.py @@ -61,6 +61,7 @@ jobcount = args.jobcount init_config_file = args.init_config_file status_show = args.status status_reset = args.status_reset +status_live_csv = args.livecsv if status_show or status_reset: target = workdir_prefix or workdir or sbyfile @@ -467,7 +468,7 @@ def start_task(taskloop, taskname): else: junit_filename = "junit" - task = SbyTask(sbyconfig, my_workdir, early_logmsgs, reusedir, taskloop, name=taskname) + task = SbyTask(sbyconfig, my_workdir, early_logmsgs, reusedir, taskloop, name=taskname, live_csv=status_live_csv) for k, v in exe_paths.items(): task.exe_paths[k] = v diff --git a/sbysrc/sby_cmdline.py b/sbysrc/sby_cmdline.py index 812c0c5..cebbbdb 100644 --- a/sbysrc/sby_cmdline.py +++ b/sbysrc/sby_cmdline.py @@ -29,6 +29,8 @@ def parser_func(release_version='unknown SBY version'): help="maximum number of processes to run in parallel") parser.add_argument("--sequential", action="store_true", dest="sequential", help="run tasks in sequence, not in parallel") + parser.add_argument("--livecsv", action="store_true", dest="livecsv", + help="print live updates of property statuses during task execution in csv format") parser.add_argument("--autotune", action="store_true", dest="autotune", help="automatically find a well performing engine and engine configuration for each task") diff --git a/sbysrc/sby_core.py b/sbysrc/sby_core.py index 033d42d..2528ab6 100644 --- a/sbysrc/sby_core.py +++ b/sbysrc/sby_core.py @@ -825,13 +825,14 @@ class SbySummary: class SbyTask(SbyConfig): - def __init__(self, sbyconfig, workdir, early_logs, reusedir, taskloop=None, logfile=None, name=None): + def __init__(self, sbyconfig, workdir, early_logs, reusedir, taskloop=None, logfile=None, name=None, live_csv=False): super().__init__() self.used_options = set() self.models = dict() self.workdir = workdir self.reusedir = reusedir self.name = name + self.live_csv = live_csv self.status = "UNKNOWN" self.total_time = 0 self.expect = list() @@ -1321,7 +1322,7 @@ class SbyTask(SbyConfig): except FileNotFoundError: status_path = f"{self.workdir}/status.sqlite" - self.status_db = SbyStatusDb(status_path, self) + self.status_db = SbyStatusDb(status_path, self, live_csv=self.live_csv) def setup_procs(self, setupmode): self.handle_non_engine_options() diff --git a/sbysrc/sby_status.py b/sbysrc/sby_status.py index f153510..6a6a24a 100644 --- a/sbysrc/sby_status.py +++ b/sbysrc/sby_status.py @@ -4,6 +4,7 @@ import sqlite3 import os import time import json +import click import re from collections import defaultdict from functools import wraps @@ -95,9 +96,10 @@ def transaction(method: Fn) -> Fn: class SbyStatusDb: - def __init__(self, path: Path, task, timeout: float = 5.0): + def __init__(self, path: Path, task, timeout: float = 5.0, live_csv = False): self.debug = False self.task = task + self.live_csv = live_csv self.con = sqlite3.connect(path, isolation_level=None, timeout=timeout) self.db = self.con.cursor() @@ -238,7 +240,7 @@ class SbyStatusDb: ), ) - if True: + if self.live_csv: csv = [ round(now - self.start_time, 2), self.task.name, @@ -249,7 +251,7 @@ class SbyStatusDb: property.status, data.get("step", "DEPTH?"), ] - self.task.log(f"csv: {','.join(str(v) for v in csv)}") + self.task.log(f"{click.style('csv', fg='yellow')}: {','.join(str(v) for v in csv)}") @transaction def add_task_property_data(self, property: SbyProperty, kind: str, data: Any): From 0fa5715909a5f0a02248a411078dcd429d9fe2a2 Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:47:32 +1200 Subject: [PATCH 17/34] Add --statuscsv Prints summary of properties for each task, selecting only a single row for each unique status/property/task. Prefers the earliest FAIL or the latest non-FAIL, using the same formatting as the `--livecsv`. --- sbysrc/sby.py | 6 ++- sbysrc/sby_cmdline.py | 2 + sbysrc/sby_status.py | 86 ++++++++++++++++++++++++++++++++++++++++++- 3 files changed, 91 insertions(+), 3 deletions(-) diff --git a/sbysrc/sby.py b/sbysrc/sby.py index b6dac82..cdac898 100644 --- a/sbysrc/sby.py +++ b/sbysrc/sby.py @@ -62,8 +62,9 @@ init_config_file = args.init_config_file status_show = args.status status_reset = args.status_reset status_live_csv = args.livecsv +status_show_csv = args.statuscsv -if status_show or status_reset: +if status_show or status_reset or status_show_csv: target = workdir_prefix or workdir or sbyfile if target is None: print("ERROR: Specify a .sby config file or working directory to use --status.") @@ -95,6 +96,9 @@ if status_show or status_reset: if status_show: status_db.print_status_summary() + if status_show_csv: + status_db.print_status_summary_csv() + status_db.db.close() sys.exit(0) diff --git a/sbysrc/sby_cmdline.py b/sbysrc/sby_cmdline.py index cebbbdb..9b3e107 100644 --- a/sbysrc/sby_cmdline.py +++ b/sbysrc/sby_cmdline.py @@ -75,6 +75,8 @@ def parser_func(release_version='unknown SBY version'): parser.add_argument("--status", action="store_true", dest="status", help="summarize the contents of the status database") + parser.add_argument("--statuscsv", action="store_true", dest="statuscsv", + help="print the most recent status for each property in csv format") parser.add_argument("--statusreset", action="store_true", dest="status_reset", help="reset the contents of the status database") diff --git a/sbysrc/sby_status.py b/sbysrc/sby_status.py index 6a6a24a..932e9ef 100644 --- a/sbysrc/sby_status.py +++ b/sbysrc/sby_status.py @@ -36,6 +36,7 @@ CREATE TABLE task_property ( task INTEGER, src TEXT, name TEXT, + hdlname TEXT, created REAL, FOREIGN KEY(task) REFERENCES task(id) ); @@ -169,13 +170,14 @@ class SbyStatusDb: now = time.time() self.db.executemany( """ - INSERT INTO task_property (name, src, task, created) - VALUES (:name, :src, :task, :now) + INSERT INTO task_property (name, src, hdlname, task, created) + VALUES (:name, :src, :hdlname, :task, :now) """, [ dict( name=json.dumps(prop.path), src=prop.location or "", + hdlname=prop.hdlname, task=task_id, now=now, ) @@ -374,6 +376,86 @@ class SbyStatusDb: for display_name, statuses in sorted(properties.items()): print(pretty_path(display_name), combine_statuses(statuses)) + @transaction + def all_status_data_joined(self): + rows = self.db.execute( + """ + SELECT task.name as 'task_name', task.mode, task.created, + task_property.src as 'location', task_property.hdlname, task_property_status.status, + task_property_status.data, task_property_status.created as 'status_created', + task_property_status.id + FROM task + INNER JOIN task_property ON task_property.task=task.id + INNER JOIN task_property_status ON task_property_status.task_property=task_property.id; + """ + ).fetchall() + + def get_result(row): + row = dict(row) + row["data"] = json.loads(row.get("data", "null")) + return row + + return {row["id"]: get_result(row) for row in rows} + + def print_status_summary_csv(self): + # get all statuses + all_properties = self.all_status_data_joined() + + # print csv header + csv_header = [ + "time", + "task_name", + "mode", + "engine", + "name", + "location", + "status", + "depth", + ] + print(','.join(csv_header)) + + # find summary for each task/property combo + prop_map: dict[(str, str), dict[str, (int, int)]] = {} + for row, prop_status in all_properties.items(): + status = prop_status['status'] + this_depth = prop_status['data'].get('step') + key = (prop_status['task_name'], prop_status['hdlname']) + try: + prop_status_map = prop_map[key] + except KeyError: + prop_map[key] = prop_status_map = {} + + # get earliest FAIL, or latest non-FAIL + current_depth = prop_status_map.get(status, (None,))[0] + if (current_depth is None or this_depth is not None and + ((status == 'FAIL' and this_depth < current_depth) or + (status != 'FAIL' and this_depth > current_depth))): + prop_status_map[status] = (this_depth, row) + + for prop in prop_map.values(): + # ignore UNKNOWNs if there are other statuses + if len(prop) > 1: + del prop["UNKNOWN"] + + for status, (depth, row) in prop.items(): + prop_status = all_properties[row] + engine = prop_status['data'].get('engine', prop_status['data']['source']) + time = prop_status['status_created'] - prop_status['created'] + name = prop_status['hdlname'] + + # print as csv + csv_line = [ + round(time, 2), + prop_status['task_name'], + prop_status['mode'], + engine, + name, + prop_status['location'], + status, + depth, + ] + print(','.join(str(v) for v in csv_line)) + def combine_statuses(statuses): statuses = set(statuses) From 4a14207b3726c3eca3d46ab101d05eb89f858fda Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:47:32 +1200 Subject: [PATCH 18/34] statuscsv: Better error handling --- sbysrc/sby_status.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/sbysrc/sby_status.py b/sbysrc/sby_status.py index 932e9ef..789b474 100644 --- a/sbysrc/sby_status.py +++ b/sbysrc/sby_status.py @@ -381,7 +381,7 @@ class SbyStatusDb: rows = self.db.execute( """ SELECT task.name as 'task_name', task.mode, task.created, - task_property.src as 'location', task_property.hdlname, task_property_status.status, + task_property.src as 'location', task_property.name, task_property.hdlname, task_property_status.status, task_property_status.data, task_property_status.created as 'status_created', task_property_status.id FROM task @@ -392,6 +392,7 @@ class SbyStatusDb: def get_result(row): row = dict(row) + row["name"] = json.loads(row.get("name", "null")) row["data"] = json.loads(row.get("data", "null")) return row @@ -434,7 +435,7 @@ class SbyStatusDb: for prop in prop_map.values(): # ignore UNKNOWNs if there are other statuses - if len(prop) > 1: + if len(prop) > 1 and "UNKNOWN" in prop: del prop["UNKNOWN"] for status, (depth, row) in prop.items(): @@ -442,19 +443,19 @@ class SbyStatusDb: engine = prop_status['data'].get('engine', prop_status['data']['source']) time = prop_status['status_created'] - prop_status['created'] name = prop_status['hdlname'] - + # print as csv csv_line = [ round(time, 2), prop_status['task_name'], prop_status['mode'], engine, - name, + name or pretty_path(prop_status['name']), prop_status['location'], status, depth, ] - print(','.join(str(v) for v in csv_line)) + print(','.join("N/A" if v is None else str(v) for v in csv_line)) def combine_statuses(statuses): From 48a5859a1ede79824ed744ecc4ef7026b1df1428 Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:47:32 +1200 Subject: [PATCH 19/34] Add kind to csv (and database) --- sbysrc/sby_design.py | 4 ++++ sbysrc/sby_status.py | 16 ++++++++++------ 2 files changed, 14 insertions(+), 6 deletions(-) diff --git a/sbysrc/sby_design.py b/sbysrc/sby_design.py index ffb827c..234e7f8 100644 --- a/sbysrc/sby_design.py +++ b/sbysrc/sby_design.py @@ -111,6 +111,10 @@ class SbyProperty: def celltype(self): return f"${str(self.type).lower()}" + @property + def kind(self): + return str(self.type) + @property def hdlname(self): return pretty_path(self.path).rstrip() diff --git a/sbysrc/sby_status.py b/sbysrc/sby_status.py index 789b474..bc6dc43 100644 --- a/sbysrc/sby_status.py +++ b/sbysrc/sby_status.py @@ -37,6 +37,7 @@ CREATE TABLE task_property ( src TEXT, name TEXT, hdlname TEXT, + kind TEXT, created REAL, FOREIGN KEY(task) REFERENCES task(id) ); @@ -170,8 +171,8 @@ class SbyStatusDb: now = time.time() self.db.executemany( """ - INSERT INTO task_property (name, src, hdlname, task, created) - VALUES (:name, :src, :hdlname, :task, :now) + INSERT INTO task_property (name, src, hdlname, task, kind, created) + VALUES (:name, :src, :hdlname, :task, :kind, :now) """, [ dict( @@ -179,6 +180,7 @@ class SbyStatusDb: src=prop.location or "", hdlname=prop.hdlname, task=task_id, + kind=prop.kind, now=now, ) for prop in properties @@ -250,8 +252,9 @@ class SbyStatusDb: data.get("engine", data["source"]), property.hdlname, property.location, + property.kind, property.status, - data.get("step", "DEPTH?"), + data.get("step", ""), ] self.task.log(f"{click.style('csv', fg='yellow')}: {','.join(str(v) for v in csv)}") @@ -296,7 +299,6 @@ class SbyStatusDb: def get_result(row): row = dict(row) row["name"] = tuple(json.loads(row.get("name", "[]"))) - row["data"] = json.loads(row.get("data", "null")) return row return {row["id"]: get_result(row) for row in rows} @@ -380,7 +382,7 @@ class SbyStatusDb: def all_status_data_joined(self): rows = self.db.execute( """ - SELECT task.name as 'task_name', task.mode, task.created, + SELECT task.name as 'task_name', task.mode, task.created, task_property.kind, task_property.src as 'location', task_property.name, task_property.hdlname, task_property_status.status, task_property_status.data, task_property_status.created as 'status_created', task_property_status.id @@ -410,6 +412,7 @@ class SbyStatusDb: "engine", "name", "location", + "kind", "status", "depth", ] @@ -452,10 +455,11 @@ class SbyStatusDb: engine, name or pretty_path(prop_status['name']), prop_status['location'], + prop_status['kind'], status, depth, ] - print(','.join("N/A" if v is None else str(v) for v in csv_line)) + print(','.join("" if v is None else str(v) for v in csv_line)) def combine_statuses(statuses): From 1d233405bfc227632394f224c9bc98f8de6c94aa Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:47:32 +1200 Subject: [PATCH 20/34] Warn on --livecsv in --status* block --- sbysrc/sby.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/sbysrc/sby.py b/sbysrc/sby.py index cdac898..827b7cd 100644 --- a/sbysrc/sby.py +++ b/sbysrc/sby.py @@ -100,6 +100,10 @@ if status_show or status_reset or status_show_csv: status_db.print_status_summary_csv() status_db.db.close() + + if status_live_csv: + print(f"WARNING: --livecsv flag found but not used.") + sys.exit(0) From f0aca6c75edd72aff0dd7320b812288cb4b3f708 Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:47:33 +1200 Subject: [PATCH 21/34] Add traces to database Setting task property status accepts an optional trace id for associating a prop status with a trace. Where relevant, delays adding prop status(es) to the database until the corresponding tracefile is known, similar to how tracefiles and prop statuses are linked during the summary. --- sbysrc/sby_engine_aiger.py | 9 ++++++-- sbysrc/sby_engine_btor.py | 9 +++++--- sbysrc/sby_engine_smtbmc.py | 8 +++++-- sbysrc/sby_status.py | 45 ++++++++++++++++++++++++++++++++----- 4 files changed, 59 insertions(+), 12 deletions(-) diff --git a/sbysrc/sby_engine_aiger.py b/sbysrc/sby_engine_aiger.py index 02e853d..afcf5b3 100644 --- a/sbysrc/sby_engine_aiger.py +++ b/sbysrc/sby_engine_aiger.py @@ -233,7 +233,6 @@ def aigsmt_trace_callback(task, engine_idx, proc_status, *, run_aigsmt, smtbmc_v cell_name = match[3] or match[2] prop = task.design.hierarchy.find_property(path, cell_name, trans_dict=smt2_trans) prop.status = "FAIL" - task.status_db.set_task_property_status(prop, data=dict(source="aigsmt", engine=f"engine_{engine_idx}", step=current_step)) last_prop.append(prop) return line @@ -241,24 +240,30 @@ def aigsmt_trace_callback(task, engine_idx, proc_status, *, run_aigsmt, smtbmc_v if match: tracefile = match[1] trace = os.path.basename(tracefile)[:-4] + trace_path = f"{task.workdir}/{tracefile}" task.summary.add_event(engine_idx=engine_idx, trace=trace, path=tracefile) + trace_id = task.status_db.add_task_trace(trace, trace_path) if match and last_prop: for p in last_prop: task.summary.add_event( engine_idx=engine_idx, trace=trace, type=p.celltype, hdlname=p.hdlname, src=p.location, step=current_step) - p.tracefiles.append(tracefile) + p.tracefiles.append(tracefile) + task.status_db.set_task_property_status(p, trace_id=trace_id, data=dict(source="aigsmt", engine=f"engine_{engine_idx}", step=current_step, trace_path=trace_path)) last_prop = [] return line return line def exit_callback2(retcode): + nonlocal last_prop if proc2_status is None: task.error(f"engine_{engine_idx}: Could not determine aigsmt status.") if proc2_status != "FAIL": task.error(f"engine_{engine_idx}: Unexpected aigsmt status.") + if len(last_prop): + task.error(f"engine_{engine_idx}: Found properties without trace.") proc2.output_callback = output_callback2 proc2.register_exit_callback(exit_callback2) diff --git a/sbysrc/sby_engine_btor.py b/sbysrc/sby_engine_btor.py index 601f4f1..6f348e2 100644 --- a/sbysrc/sby_engine_btor.py +++ b/sbysrc/sby_engine_btor.py @@ -118,9 +118,12 @@ def run(mode, task, engine_idx, engine): def make_exit_callback(suffix): def exit_callback2(retcode): - vcdpath = f"engine_{engine_idx}/trace{suffix}.vcd" - if os.path.exists(f"{task.workdir}/{vcdpath}"): - task.summary.add_event(engine_idx=engine_idx, trace=f'trace{suffix}', path=vcdpath, type="$cover" if mode == "cover" else "$assert") + trace = f"trace{suffix}" + vcdpath = f"engine_{engine_idx}/{trace}.vcd" + trace_path = f"{task.workdir}/{vcdpath}" + if os.path.exists(trace_path): + task.summary.add_event(engine_idx=engine_idx, trace=trace, path=vcdpath, type="$cover" if mode == "cover" else "$assert") + task.status_db.add_task_trace(trace, trace_path) common_state.running_procs -= 1 if (common_state.running_procs == 0): diff --git a/sbysrc/sby_engine_smtbmc.py b/sbysrc/sby_engine_smtbmc.py index 4deedf1..074f570 100644 --- a/sbysrc/sby_engine_smtbmc.py +++ b/sbysrc/sby_engine_smtbmc.py @@ -245,7 +245,6 @@ def run(mode, task, engine_idx, engine): cell_name = match[3] or match[2] prop = task.design.hierarchy.find_property(path, cell_name, trans_dict=smt2_trans) prop.status = "FAIL" - task.status_db.set_task_property_status(prop, data=dict(source="smtbmc", engine=f"engine_{engine_idx}", step=current_step)) last_prop.append(prop) return line @@ -255,7 +254,6 @@ def run(mode, task, engine_idx, engine): cell_name = match[3] or match[2] prop = task.design.hierarchy.find_property(path, cell_name, trans_dict=smt2_trans) prop.status = "PASS" - task.status_db.set_task_property_status(prop, data=dict(source="smtbmc", engine=f"engine_{engine_idx}", step=current_step)) last_prop.append(prop) return line @@ -264,8 +262,10 @@ def run(mode, task, engine_idx, engine): if match: tracefile = match[1] trace = os.path.basename(tracefile)[:-4] + trace_path = f"{task.workdir}/{tracefile}" engine_case = mode.split('_')[1] if '_' in mode else None task.summary.add_event(engine_idx=engine_idx, trace=trace, path=tracefile, engine_case=engine_case) + trace_id = task.status_db.add_task_trace(trace, trace_path, engine_case) if match and last_prop: for p in last_prop: @@ -273,6 +273,7 @@ def run(mode, task, engine_idx, engine): engine_idx=engine_idx, trace=trace, type=p.celltype, hdlname=p.hdlname, src=p.location, step=current_step) p.tracefiles.append(tracefile) + task.status_db.set_task_property_status(p, trace_id=trace_id, data=dict(source="smtbmc", engine=f"engine_{engine_idx}", step=current_step, trace_path=trace_path)) last_prop = [] return line else: @@ -298,8 +299,11 @@ def run(mode, task, engine_idx, engine): last_exit_callback() def exit_callback(retcode): + nonlocal last_prop if proc_status is None: task.error(f"engine_{engine_idx}: Engine terminated without status.") + if len(last_prop): + task.error(f"engine_{engine_idx}: Found properties without trace.") simple_exit_callback(retcode) def last_exit_callback(): diff --git a/sbysrc/sby_status.py b/sbysrc/sby_status.py index bc6dc43..c356419 100644 --- a/sbysrc/sby_status.py +++ b/sbysrc/sby_status.py @@ -44,10 +44,12 @@ CREATE TABLE task_property ( CREATE TABLE task_property_status ( id INTEGER PRIMARY KEY, task_property INTEGER, + task_trace INTEGER, status TEXT, data TEXT, created REAL, - FOREIGN KEY(task_property) REFERENCES task_property(id) + FOREIGN KEY(task_property) REFERENCES task_property(id), + FOREIGN KEY(task_trace) REFERENCES task_trace(id) ); CREATE TABLE task_property_data ( id INTEGER PRIMARY KEY, @@ -56,6 +58,13 @@ CREATE TABLE task_property_data ( data TEXT, created REAL, FOREIGN KEY(task_property) REFERENCES task_property(id) +); +CREATE TABLE task_trace ( + id INTEGER PRIMARY KEY, + trace TEXT, + path TEXT, + engine_case TEXT, + created REAL );""" def transaction(method: Fn) -> Fn: @@ -219,6 +228,7 @@ class SbyStatusDb: self, property: SbyProperty, status: Optional[str] = None, + trace_id: Optional[int] = None, data: Any = None, ): if status is None: @@ -228,15 +238,16 @@ class SbyStatusDb: self.db.execute( """ INSERT INTO task_property_status ( - task_property, status, data, created + task_property, task_trace, status, data, created ) VALUES ( (SELECT id FROM task_property WHERE task = :task AND name = :name), - :status, :data, :now + :trace_id, :status, :data, :now ) """, dict( task=self.task_id, + trace_id=trace_id, name=json.dumps(property.path), status=status, data=json.dumps(data), @@ -254,6 +265,7 @@ class SbyStatusDb: property.location, property.kind, property.status, + data.get("trace_path", ""), data.get("step", ""), ] self.task.log(f"{click.style('csv', fg='yellow')}: {','.join(str(v) for v in csv)}") @@ -279,6 +291,26 @@ class SbyStatusDb: now=now, ), ) + + @transaction + def add_task_trace(self, trace: str, path: str, engine_case: Optional[str] = None): + now = time.time() + return self.db.execute( + """ + INSERT INTO task_trace ( + trace, path, engine_case, created + ) + VALUES ( + :trace, :path, :engine_case, :now + ) + """, + dict( + trace=trace, + path=path, + engine_case=engine_case, + now=now + ) + ).lastrowid def all_tasks(self): rows = self.db.execute( @@ -385,10 +417,11 @@ class SbyStatusDb: SELECT task.name as 'task_name', task.mode, task.created, task_property.kind, task_property.src as 'location', task_property.name, task_property.hdlname, task_property_status.status, task_property_status.data, task_property_status.created as 'status_created', - task_property_status.id + task_property_status.id, task_trace.path as 'trace_path' FROM task INNER JOIN task_property ON task_property.task=task.id - INNER JOIN task_property_status ON task_property_status.task_property=task_property.id; + INNER JOIN task_property_status ON task_property_status.task_property=task_property.id + INNER JOIN task_trace ON task_property_status.task_trace=task_trace.id; """ ).fetchall() @@ -414,6 +447,7 @@ class SbyStatusDb: "location", "kind", "status", + "trace", "depth", ] print(','.join(csv_header)) @@ -457,6 +491,7 @@ class SbyStatusDb: prop_status['location'], prop_status['kind'], status, + prop_status['trace_path'], depth, ] print(','.join("" if v is None else str(v) for v in csv_line)) From 98ef1c4182c0ad516d056414190e921a5703e68a Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:47:33 +1200 Subject: [PATCH 22/34] More status tracking unification - Status database only gets called from summary events instead of from engines. - More trace witnesses (.aiw and .yw) are tracked as events. - Multiple tracefiles can be included in the same trace summary, varying only by extension. These are ordered by priority so that in the logfile only a single tracefile is listed. - For engines where multiple properties can be collected for a single trace, these properties are now available for all traces until the next step. If any properties are collected but never recorded with a trace, an error is raised. - Fix formatting for events without steps (e.g. running abc with `vcd off`). - Drop task_property_data table entirely, since it is now redundant and unused. - Fix properties being skipped in all status dump if they don't have a trace. --- sbysrc/sby_core.py | 121 ++++++++++++++++++++++++------------ sbysrc/sby_engine_abc.py | 19 ++++-- sbysrc/sby_engine_aiger.py | 26 ++++---- sbysrc/sby_engine_btor.py | 1 - sbysrc/sby_engine_smtbmc.py | 52 ++++++++-------- sbysrc/sby_status.py | 67 +++++++++----------- 6 files changed, 164 insertions(+), 122 deletions(-) diff --git a/sbysrc/sby_core.py b/sbysrc/sby_core.py index 2528ab6..e97c6aa 100644 --- a/sbysrc/sby_core.py +++ b/sbysrc/sby_core.py @@ -20,6 +20,7 @@ import os, re, sys, signal, platform, click if os.name == "posix": import resource, fcntl import subprocess +from pathlib import Path from dataclasses import dataclass, field from collections import defaultdict from typing import Optional @@ -631,6 +632,8 @@ class SbyTraceSummary: path: Optional[str] = field(default=None) engine_case: Optional[str] = field(default=None) events: dict = field(default_factory=lambda: defaultdict(lambda: defaultdict(list))) + trace_ids: dict[str, int] = field(default_factory=lambda: dict()) + last_ext: Optional[str] = field(default=None) @property def kind(self): @@ -682,42 +685,68 @@ class SbySummary: if update_status: status_metadata = dict(source="summary_event", engine=engine.engine) + if event.step: + status_metadata["step"] = event.step if event.prop: - if event.type == "$assert": + add_trace = False + if event.type is None: + event.type = event.prop.celltype + elif event.type == "$assert": event.prop.status = "FAIL" - if event.path: - event.prop.tracefiles.append(event.path) - if update_status: - self.task.status_db.add_task_property_data( - event.prop, - "trace", - data=dict(path=event.path, step=event.step, **status_metadata), - ) - if event.prop: - if event.type == "$cover": + add_trace = True + elif event.type == "$cover": event.prop.status = "PASS" - if event.path: - event.prop.tracefiles.append(event.path) - if update_status: - self.task.status_db.add_task_property_data( - event.prop, - "trace", - data=dict(path=event.path, step=event.step, **status_metadata), - ) + add_trace = True + + if event.path and add_trace: + event.prop.tracefiles.append(event.path) + + trace_path = None + if event.trace: + # get or create trace summary + try: + trace_summary = engine.traces[event.trace] + except KeyError: + trace_summary = SbyTraceSummary(event.trace, path=event.path, engine_case=event.engine_case) + engine.traces[event.trace] = trace_summary + + if event.path: + trace_path = Path(event.path) + trace_ext = trace_path.suffix + trace_summary.last_ext = trace_ext + try: + # use existing tracefile for this extension + trace_id = trace_summary.trace_ids[trace_ext] + except KeyError: + # add tracefile to database + trace_id = self.task.status_db.add_task_trace(event.trace, event.path, trace_ext[1:], event.engine_case) + trace_summary.trace_ids[trace_ext] = trace_id + elif trace_summary.path: + # use existing tracefile for last extension + trace_path = Path(trace_summary.path) + trace_ext = trace_summary.last_ext + trace_id = trace_summary.trace_ids[trace_ext] + + if event.type: + by_type = trace_summary.events[event.type] + if event.hdlname: + by_type[event.hdlname].append(event) + if event.prop and update_status: - self.task.status_db.set_task_property_status( - event.prop, - data=status_metadata - ) - - if event.trace not in engine.traces: - engine.traces[event.trace] = SbyTraceSummary(event.trace, path=event.path, engine_case=event.engine_case) - - if event.type: - by_type = engine.traces[event.trace].events[event.type] - if event.hdlname: - by_type[event.hdlname].append(event) + # update property status in database + if trace_path: + self.task.status_db.set_task_property_status( + event.prop, + trace_id=trace_id, + trace_path=Path(self.task.workdir, trace_path).with_suffix(trace_ext), + data=status_metadata, + ) + else: + self.task.status_db.set_task_property_status( + event.prop, + data=status_metadata, + ) def set_engine_status(self, engine_idx, status, case=None): engine_summary = self.engine_summary(engine_idx) @@ -759,10 +788,21 @@ class SbySummary: break case_suffix = f" [{trace.engine_case}]" if trace.engine_case else "" if trace.path: - if short: - yield f"{trace.kind}{case_suffix}: {self.task.workdir}/{trace.path}" - else: - yield f"{trace.kind}{case_suffix}: {trace.path}" + # print single preferred trace + preferred_exts = [".fst", ".vcd"] + if trace.last_ext not in preferred_exts: preferred_exts.append(trace.last_ext) + for ext in trace.trace_ids.keys(): + if ext not in preferred_exts: preferred_exts.append(ext) + for ext in preferred_exts: + if ext not in trace.trace_ids: + continue + if short: + path = Path(self.task.workdir) / trace.path + else: + path = Path(trace.path) + yield f"{trace.kind}{case_suffix}: {path.with_suffix(ext)}" + if short: + break else: yield f"{trace.kind}{case_suffix}: <{trace.trace}>" produced_traces = True @@ -785,15 +825,18 @@ class SbySummary: break event = same_events[0] - steps = sorted(e.step for e in same_events) + # uniquify steps and ignore events with missing steps + steps = sorted(set(e.step for e in same_events if e.step)) if short and len(steps) > step_limit: excess = len(steps) - step_limit steps = [str(step) for step in steps[:step_limit]] omitted_excess = True steps[-1] += f" and {excess} further step{'s' if excess != 1 else ''}" - steps = f"step{'s' if len(steps) > 1 else ''} {', '.join(map(str, steps))}" - yield f" {desc} {event.hdlname} at {event.src} in {steps}" + event_string = f" {desc} {hdlname} at {event.src}" + if steps: + event_string += f" step{'s' if len(steps) > 1 else ''} {', '.join(map(str, steps))}" + yield event_string if not produced_traces: yield f"{engine.engine} did not produce any traces" @@ -801,7 +844,7 @@ class SbySummary: if self.unreached_covers is None and self.task.opt_mode == 'cover' and self.task.status != "PASS" and self.task.design: self.unreached_covers = [] for prop in self.task.design.hierarchy: - if prop.type == prop.Type.COVER and prop.status == "UNKNOWN": + if prop.type == prop.Type.COVER and prop.status in ["UNKNOWN", "FAIL"]: self.unreached_covers.append(prop) if self.unreached_covers: diff --git a/sbysrc/sby_engine_abc.py b/sbysrc/sby_engine_abc.py index 3527035..d8e0189 100644 --- a/sbysrc/sby_engine_abc.py +++ b/sbysrc/sby_engine_abc.py @@ -18,6 +18,7 @@ import re, getopt import json +import os from sby_core import SbyProc from sby_engine_aiger import aigsmt_exit_callback, aigsmt_trace_callback @@ -173,18 +174,25 @@ def run(mode, task, engine_idx, engine): aiger_props.append(task.design.properties_by_path.get(tuple(path))) if keep_going: - match = re.match(r"Writing CEX for output ([0-9]+) to engine_[0-9]+/(.*)\.aiw", line) + match = re.match(r"Writing CEX for output ([0-9]+) to (engine_[0-9]+/(.*)\.aiw)", line) if match: output = int(match[1]) + tracefile = match[2] + name = match[3] + trace, _ = os.path.splitext(name) + task.summary.add_event(engine_idx=engine_idx, trace=trace, path=tracefile) prop = aiger_props[output] if prop: prop.status = "FAIL" - task.status_db.set_task_property_status(prop, data=dict(source="abc pdr", engine=f"engine_{engine_idx}")) + task.summary.add_event( + engine_idx=engine_idx, trace=trace, + hdlname=prop.hdlname, src=prop.location, prop=prop, + ) disproved.add(output) proc_status = "FAIL" proc = aigsmt_trace_callback(task, engine_idx, proc_status, run_aigsmt=run_aigsmt, smtbmc_vcd=smtbmc_vcd, smtbmc_append=smtbmc_append, sim_append=sim_append, - name=match[2], + name=name, ) proc.register_exit_callback(exit_callback) procs_running += 1 @@ -198,7 +206,10 @@ def run(mode, task, engine_idx, engine): prop = aiger_props[output] if prop: prop.status = "PASS" - task.status_db.set_task_property_status(prop, data=dict(source="abc pdr", engine=f"engine_{engine_idx}")) + task.summary.add_event( + engine_idx=engine_idx, trace=None, + hdlname=prop.hdlname, src=prop.location, prop=prop, + ) proved.add(output) match = re.match(r"^Simulation of [0-9]+ frames for [0-9]+ rounds with [0-9]+ restarts did not assert POs.", line) diff --git a/sbysrc/sby_engine_aiger.py b/sbysrc/sby_engine_aiger.py index afcf5b3..72add92 100644 --- a/sbysrc/sby_engine_aiger.py +++ b/sbysrc/sby_engine_aiger.py @@ -202,11 +202,13 @@ def aigsmt_trace_callback(task, engine_idx, proc_status, *, run_aigsmt, smtbmc_v proc2_status = None last_prop = [] + recorded_last = False current_step = None def output_callback2(line): nonlocal proc2_status nonlocal last_prop + nonlocal recorded_last nonlocal current_step smt2_trans = {'\\':'/', '|':'/'} @@ -218,6 +220,8 @@ def aigsmt_trace_callback(task, engine_idx, proc_status, *, run_aigsmt, smtbmc_v match = re.match(r"^## [0-9: ]+ .* in step ([0-9]+)\.\.", line) if match: + last_prop = [] + recorded_last = False current_step = int(match[1]) return line @@ -236,33 +240,29 @@ def aigsmt_trace_callback(task, engine_idx, proc_status, *, run_aigsmt, smtbmc_v last_prop.append(prop) return line - match = re.match(r"^## [0-9: ]+ Writing trace to VCD file: (\S+)", line) + match = re.match(r"^## [0-9: ]+ Writing trace to (VCD|Yosys witness) file: (\S+)", line) if match: - tracefile = match[1] - trace = os.path.basename(tracefile)[:-4] - trace_path = f"{task.workdir}/{tracefile}" + tracefile = match[2] + trace, _ = os.path.splitext(os.path.basename(tracefile)) task.summary.add_event(engine_idx=engine_idx, trace=trace, path=tracefile) - trace_id = task.status_db.add_task_trace(trace, trace_path) - - if match and last_prop: for p in last_prop: task.summary.add_event( engine_idx=engine_idx, trace=trace, - type=p.celltype, hdlname=p.hdlname, src=p.location, step=current_step) - p.tracefiles.append(tracefile) - task.status_db.set_task_property_status(p, trace_id=trace_id, data=dict(source="aigsmt", engine=f"engine_{engine_idx}", step=current_step, trace_path=trace_path)) - last_prop = [] + type=p.celltype, hdlname=p.hdlname, src=p.location, + step=current_step, prop=p, + ) + recorded_last = True return line return line def exit_callback2(retcode): - nonlocal last_prop + nonlocal last_prop, recorded_last if proc2_status is None: task.error(f"engine_{engine_idx}: Could not determine aigsmt status.") if proc2_status != "FAIL": task.error(f"engine_{engine_idx}: Unexpected aigsmt status.") - if len(last_prop): + if len(last_prop) and not recorded_last: task.error(f"engine_{engine_idx}: Found properties without trace.") proc2.output_callback = output_callback2 diff --git a/sbysrc/sby_engine_btor.py b/sbysrc/sby_engine_btor.py index 6f348e2..9ef947e 100644 --- a/sbysrc/sby_engine_btor.py +++ b/sbysrc/sby_engine_btor.py @@ -123,7 +123,6 @@ def run(mode, task, engine_idx, engine): trace_path = f"{task.workdir}/{vcdpath}" if os.path.exists(trace_path): task.summary.add_event(engine_idx=engine_idx, trace=trace, path=vcdpath, type="$cover" if mode == "cover" else "$assert") - task.status_db.add_task_trace(trace, trace_path) common_state.running_procs -= 1 if (common_state.running_procs == 0): diff --git a/sbysrc/sby_engine_smtbmc.py b/sbysrc/sby_engine_smtbmc.py index 074f570..52f0459 100644 --- a/sbysrc/sby_engine_smtbmc.py +++ b/sbysrc/sby_engine_smtbmc.py @@ -184,6 +184,7 @@ def run(mode, task, engine_idx, engine): proc_status = None last_prop = [] + recorded_last = False pending_sim = None current_step = None procs_running = 1 @@ -192,6 +193,7 @@ def run(mode, task, engine_idx, engine): def output_callback(line): nonlocal proc_status nonlocal last_prop + nonlocal recorded_last nonlocal pending_sim nonlocal current_step nonlocal procs_running @@ -212,6 +214,8 @@ def run(mode, task, engine_idx, engine): match = re.match(r"^## [0-9: ]+ .* in step ([0-9]+)\.\.", line) if match: + last_prop = [] + recorded_last = False last_step = current_step current_step = int(match[1]) if current_step != last_step and last_step is not None: @@ -257,30 +261,22 @@ def run(mode, task, engine_idx, engine): last_prop.append(prop) return line - if smtbmc_vcd and not task.opt_fst: - match = re.match(r"^## [0-9: ]+ Writing trace to VCD file: (\S+)", line) - if match: - tracefile = match[1] - trace = os.path.basename(tracefile)[:-4] - trace_path = f"{task.workdir}/{tracefile}" - engine_case = mode.split('_')[1] if '_' in mode else None - task.summary.add_event(engine_idx=engine_idx, trace=trace, path=tracefile, engine_case=engine_case) - trace_id = task.status_db.add_task_trace(trace, trace_path, engine_case) - - if match and last_prop: - for p in last_prop: - task.summary.add_event( - engine_idx=engine_idx, trace=trace, - type=p.celltype, hdlname=p.hdlname, src=p.location, step=current_step) - p.tracefiles.append(tracefile) - task.status_db.set_task_property_status(p, trace_id=trace_id, data=dict(source="smtbmc", engine=f"engine_{engine_idx}", step=current_step, trace_path=trace_path)) - last_prop = [] - return line - else: - match = re.match(r"^## [0-9: ]+ Writing trace to Yosys witness file: (\S+)", line) - if match: - tracefile = match[1] + match = re.match(r"^## [0-9: ]+ Writing trace to (VCD|Yosys witness) file: (\S+)", line) + if match: + tracefile = match[2] + if match[1] == "Yosys witness" and (task.opt_fst or task.opt_vcd_sim): pending_sim = tracefile + trace, _ = os.path.splitext(os.path.basename(tracefile)) + engine_case = mode.split('_')[1] if '_' in mode else None + task.summary.add_event(engine_idx=engine_idx, trace=trace, path=tracefile, engine_case=engine_case) + for p in last_prop: + task.summary.add_event( + engine_idx=engine_idx, trace=trace, + type=p.celltype, hdlname=p.hdlname, src=p.location, + step=current_step, prop=p, + ) + recorded_last = True + return line match = re.match(r"^## [0-9: ]+ Unreached cover statement at ([^:]+): (\S+)(?: \((\S+)\))?", line) if match and not failed_assert: @@ -288,7 +284,11 @@ def run(mode, task, engine_idx, engine): cell_name = match[3] or match[2] prop = task.design.hierarchy.find_property(path, cell_name, trans_dict=smt2_trans) prop.status = "FAIL" - task.status_db.set_task_property_status(prop, data=dict(source="smtbmc", engine=f"engine_{engine_idx}", step=current_step)) + task.summary.add_event( + engine_idx=engine_idx, trace=None, + hdlname=prop.hdlname, src=prop.location, + step=current_step, prop=prop, + ) return line @@ -299,10 +299,10 @@ def run(mode, task, engine_idx, engine): last_exit_callback() def exit_callback(retcode): - nonlocal last_prop + nonlocal last_prop, recorded_last if proc_status is None: task.error(f"engine_{engine_idx}: Engine terminated without status.") - if len(last_prop): + if len(last_prop) and not recorded_last: task.error(f"engine_{engine_idx}: Found properties without trace.") simple_exit_callback(retcode) diff --git a/sbysrc/sby_status.py b/sbysrc/sby_status.py index c356419..50ed71a 100644 --- a/sbysrc/sby_status.py +++ b/sbysrc/sby_status.py @@ -51,20 +51,15 @@ CREATE TABLE task_property_status ( FOREIGN KEY(task_property) REFERENCES task_property(id), FOREIGN KEY(task_trace) REFERENCES task_trace(id) ); -CREATE TABLE task_property_data ( - id INTEGER PRIMARY KEY, - task_property INTEGER, - kind TEXT, - data TEXT, - created REAL, - FOREIGN KEY(task_property) REFERENCES task_property(id) -); CREATE TABLE task_trace ( id INTEGER PRIMARY KEY, + task INTEGER, trace TEXT, path TEXT, + kind TEXT, engine_case TEXT, - created REAL + created REAL, + FOREIGN KEY(task) REFERENCES task(id) );""" def transaction(method: Fn) -> Fn: @@ -229,6 +224,7 @@ class SbyStatusDb: property: SbyProperty, status: Optional[str] = None, trace_id: Optional[int] = None, + trace_path: str = "", data: Any = None, ): if status is None: @@ -265,49 +261,38 @@ class SbyStatusDb: property.location, property.kind, property.status, - data.get("trace_path", ""), + trace_path, data.get("step", ""), ] self.task.log(f"{click.style('csv', fg='yellow')}: {','.join(str(v) for v in csv)}") - - @transaction - def add_task_property_data(self, property: SbyProperty, kind: str, data: Any): - now = time.time() - self.db.execute( - """ - INSERT INTO task_property_data ( - task_property, kind, data, created - ) - VALUES ( - (SELECT id FROM task_property WHERE task = :task AND name = :name), - :kind, :data, :now - ) - """, - dict( - task=self.task_id, - name=json.dumps(property.path), - kind=kind, - data=json.dumps(data), - now=now, - ), - ) @transaction - def add_task_trace(self, trace: str, path: str, engine_case: Optional[str] = None): + def add_task_trace( + self, + trace: str, + path: str, + kind: str, + engine_case: Optional[str] = None, + task_id: Optional[int] = None, + ): + if task_id is None: + task_id = self.task_id now = time.time() return self.db.execute( """ INSERT INTO task_trace ( - trace, path, engine_case, created + trace, task, path, engine_case, kind, created ) VALUES ( - :trace, :path, :engine_case, :now + :trace, :task, :path, :engine_case, :kind, :now ) """, dict( trace=trace, + task=task_id, path=path, engine_case=engine_case, + kind=kind, now=now ) ).lastrowid @@ -414,14 +399,14 @@ class SbyStatusDb: def all_status_data_joined(self): rows = self.db.execute( """ - SELECT task.name as 'task_name', task.mode, task.created, task_property.kind, + SELECT task.name as 'task_name', task.mode, task.workdir, task.created, task_property.kind, task_property.src as 'location', task_property.name, task_property.hdlname, task_property_status.status, task_property_status.data, task_property_status.created as 'status_created', - task_property_status.id, task_trace.path as 'trace_path' + task_property_status.id, task_trace.path as 'path' FROM task INNER JOIN task_property ON task_property.task=task.id INNER JOIN task_property_status ON task_property_status.task_property=task_property.id - INNER JOIN task_trace ON task_property_status.task_trace=task_trace.id; + LEFT JOIN task_trace ON task_property_status.task_trace=task_trace.id; """ ).fetchall() @@ -480,6 +465,10 @@ class SbyStatusDb: engine = prop_status['data'].get('engine', prop_status['data']['source']) time = prop_status['status_created'] - prop_status['created'] name = prop_status['hdlname'] + try: + trace_path = f"{prop_status['workdir']}/{prop_status['path']}" + except KeyError: + trace_path = None # print as csv csv_line = [ @@ -491,7 +480,7 @@ class SbyStatusDb: prop_status['location'], prop_status['kind'], status, - prop_status['trace_path'], + trace_path, depth, ] print(','.join("" if v is None else str(v) for v in csv_line)) From b3f2889b9e245bfd41d79798dcbef6e878b420c9 Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:47:33 +1200 Subject: [PATCH 23/34] CSV tidying Use the same function for csv formatting during live and status reporting. Reads back the row for live reporting to get the full JOIN data. Remove unused/unnecessary arguments for setting task property status. Drop transaction wrapper from read-only db access. --- sbysrc/sby_core.py | 20 ++--- sbysrc/sby_status.py | 189 ++++++++++++++++++++++++------------------- 2 files changed, 110 insertions(+), 99 deletions(-) diff --git a/sbysrc/sby_core.py b/sbysrc/sby_core.py index e97c6aa..af16d02 100644 --- a/sbysrc/sby_core.py +++ b/sbysrc/sby_core.py @@ -702,7 +702,7 @@ class SbySummary: if event.path and add_trace: event.prop.tracefiles.append(event.path) - trace_path = None + trace_id = None if event.trace: # get or create trace summary try: @@ -724,7 +724,6 @@ class SbySummary: trace_summary.trace_ids[trace_ext] = trace_id elif trace_summary.path: # use existing tracefile for last extension - trace_path = Path(trace_summary.path) trace_ext = trace_summary.last_ext trace_id = trace_summary.trace_ids[trace_ext] @@ -735,18 +734,11 @@ class SbySummary: if event.prop and update_status: # update property status in database - if trace_path: - self.task.status_db.set_task_property_status( - event.prop, - trace_id=trace_id, - trace_path=Path(self.task.workdir, trace_path).with_suffix(trace_ext), - data=status_metadata, - ) - else: - self.task.status_db.set_task_property_status( - event.prop, - data=status_metadata, - ) + self.task.status_db.set_task_property_status( + event.prop, + trace_id=trace_id, + data=status_metadata, + ) def set_engine_status(self, engine_idx, status, case=None): engine_summary = self.engine_summary(engine_idx) diff --git a/sbysrc/sby_status.py b/sbysrc/sby_status.py index 50ed71a..587b61a 100644 --- a/sbysrc/sby_status.py +++ b/sbysrc/sby_status.py @@ -222,14 +222,9 @@ class SbyStatusDb: def set_task_property_status( self, property: SbyProperty, - status: Optional[str] = None, trace_id: Optional[int] = None, - trace_path: str = "", data: Any = None, ): - if status is None: - status = property.status - now = time.time() self.db.execute( """ @@ -245,26 +240,16 @@ class SbyStatusDb: task=self.task_id, trace_id=trace_id, name=json.dumps(property.path), - status=status, + status=property.status, data=json.dumps(data), now=now, ), ) if self.live_csv: - csv = [ - round(now - self.start_time, 2), - self.task.name, - self.task.opt_mode, - data.get("engine", data["source"]), - property.hdlname, - property.location, - property.kind, - property.status, - trace_path, - data.get("step", ""), - ] - self.task.log(f"{click.style('csv', fg='yellow')}: {','.join(str(v) for v in csv)}") + row = self.get_status_data_joined(self.db.lastrowid) + csvline = format_status_data_csvline(row) + self.task.log(f"{click.style('csv', fg='yellow')}: {csvline}") @transaction def add_task_trace( @@ -395,34 +380,102 @@ class SbyStatusDb: for display_name, statuses in sorted(properties.items()): print(pretty_path(display_name), combine_statuses(statuses)) - @transaction + def get_status_data_joined(self, status_id: int): + row = self.db.execute( + """ + SELECT task.name as 'task_name', task.mode, task.workdir, task.created, task_property.kind, + task_property.src as 'location', task_property.name, task_property.hdlname, task_property_status.status, + task_property_status.data, task_property_status.created as 'status_created', + task_property_status.id, task_trace.path, task_trace.kind as trace_kind + FROM task + INNER JOIN task_property ON task_property.task=task.id + INNER JOIN task_property_status ON task_property_status.task_property=task_property.id + LEFT JOIN task_trace ON task_property_status.task_trace=task_trace.id + WHERE task_property_status.id=:status_id; + """, + dict(status_id=status_id) + ).fetchone() + return parse_status_data_row(row) + def all_status_data_joined(self): rows = self.db.execute( """ SELECT task.name as 'task_name', task.mode, task.workdir, task.created, task_property.kind, task_property.src as 'location', task_property.name, task_property.hdlname, task_property_status.status, task_property_status.data, task_property_status.created as 'status_created', - task_property_status.id, task_trace.path as 'path' + task_property_status.id, task_trace.path, task_trace.kind as trace_kind FROM task INNER JOIN task_property ON task_property.task=task.id INNER JOIN task_property_status ON task_property_status.task_property=task_property.id LEFT JOIN task_trace ON task_property_status.task_trace=task_trace.id; """ ).fetchall() - - def get_result(row): - row = dict(row) - row["name"] = json.loads(row.get("name", "null")) - row["data"] = json.loads(row.get("data", "null")) - return row - return {row["id"]: get_result(row) for row in rows} + return {row["id"]: parse_status_data_row(row) for row in rows} def print_status_summary_csv(self): # get all statuses all_properties = self.all_status_data_joined() # print csv header + csvheader = format_status_data_csvline(None) + print(csvheader) + + # find summary for each task/property combo + prop_map: dict[(str, str), dict[str, (int, int)]] = {} + for row, prop_status in all_properties.items(): + status = prop_status['status'] + this_depth = prop_status['data'].get('step') + this_kind = prop_status['trace_kind'] + key = (prop_status['task_name'], prop_status['hdlname']) + try: + prop_status_map = prop_map[key] + except KeyError: + prop_map[key] = prop_status_map = {} + + current_depth, _ = prop_status_map.get(status, (None, None)) + update_map = False + if current_depth is None: + update_map = True + elif this_depth is not None: + if status == 'FAIL' and this_depth < current_depth: + # earliest fail + update_map = True + elif status != 'FAIL' and this_depth > current_depth: + # latest non-FAIL + update_map = True + elif this_depth == current_depth and this_kind in ['fst', 'vcd']: + # prefer traces over witness files + update_map = True + if update_map: + prop_status_map[status] = (this_depth, row) + + for prop in prop_map.values(): + # ignore UNKNOWNs if there are other statuses + if len(prop) > 1 and "UNKNOWN" in prop: + del prop["UNKNOWN"] + + for _, row in prop.values(): + csvline = format_status_data_csvline(all_properties[row]) + print(csvline) + + +def combine_statuses(statuses): + statuses = set(statuses) + + if len(statuses) > 1: + statuses.discard("UNKNOWN") + + return ",".join(sorted(statuses)) + +def parse_status_data_row(raw: sqlite3.Row): + row_dict = dict(raw) + row_dict["name"] = json.loads(row_dict.get("name", "null")) + row_dict["data"] = json.loads(row_dict.get("data", "null")) + return row_dict + +def format_status_data_csvline(row: dict|None) -> str: + if row is None: csv_header = [ "time", "task_name", @@ -435,61 +488,27 @@ class SbyStatusDb: "trace", "depth", ] - print(','.join(csv_header)) + return ','.join(csv_header) + else: + engine = row['data'].get('engine', row['data']['source']) + time = row['status_created'] - row['created'] + name = row['hdlname'] + depth = row['data'].get('step') + try: + trace_path = Path(row['workdir']) / row['path'] + except TypeError: + trace_path = None - # find summary for each task/property combo - prop_map: dict[(str, str), dict[str, (int, int)]] = {} - for row, prop_status in all_properties.items(): - status = prop_status['status'] - this_depth = prop_status['data'].get('step') - key = (prop_status['task_name'], prop_status['hdlname']) - try: - prop_status_map = prop_map[key] - except KeyError: - prop_map[key] = prop_status_map = {} - - # get earliest FAIL, or latest non-FAIL - current_depth = prop_status_map.get(status, (None,))[0] - if (current_depth is None or this_depth is not None and - ((status == 'FAIL' and this_depth < current_depth) or - (status != 'FAIL' and this_depth > current_depth))): - prop_status_map[status] = (this_depth, row) - - for prop in prop_map.values(): - # ignore UNKNOWNs if there are other statuses - if len(prop) > 1 and "UNKNOWN" in prop: - del prop["UNKNOWN"] - - for status, (depth, row) in prop.items(): - prop_status = all_properties[row] - engine = prop_status['data'].get('engine', prop_status['data']['source']) - time = prop_status['status_created'] - prop_status['created'] - name = prop_status['hdlname'] - try: - trace_path = f"{prop_status['workdir']}/{prop_status['path']}" - except KeyError: - trace_path = None - - # print as csv - csv_line = [ - round(time, 2), - prop_status['task_name'], - prop_status['mode'], - engine, - name or pretty_path(prop_status['name']), - prop_status['location'], - prop_status['kind'], - status, - trace_path, - depth, - ] - print(','.join("" if v is None else str(v) for v in csv_line)) - - -def combine_statuses(statuses): - statuses = set(statuses) - - if len(statuses) > 1: - statuses.discard("UNKNOWN") - - return ",".join(sorted(statuses)) + csv_line = [ + round(time, 2), + row['task_name'], + row['mode'], + engine, + name or pretty_path(row['name']), + row['location'], + row['kind'], + row['status'], + trace_path, + depth, + ] + return ','.join("" if v is None else str(v) for v in csv_line) From 4fc23bebecf4455d03d75662384125300ce69f11 Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:47:33 +1200 Subject: [PATCH 24/34] Fix prop.tracefiles --- sbysrc/sby_core.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/sbysrc/sby_core.py b/sbysrc/sby_core.py index af16d02..d3ed77c 100644 --- a/sbysrc/sby_core.py +++ b/sbysrc/sby_core.py @@ -688,8 +688,8 @@ class SbySummary: if event.step: status_metadata["step"] = event.step + add_trace = False if event.prop: - add_trace = False if event.type is None: event.type = event.prop.celltype elif event.type == "$assert": @@ -699,10 +699,8 @@ class SbySummary: event.prop.status = "PASS" add_trace = True - if event.path and add_trace: - event.prop.tracefiles.append(event.path) - trace_id = None + trace_path = None if event.trace: # get or create trace summary try: @@ -724,6 +722,7 @@ class SbySummary: trace_summary.trace_ids[trace_ext] = trace_id elif trace_summary.path: # use existing tracefile for last extension + trace_path = Path(trace_summary.path) trace_ext = trace_summary.last_ext trace_id = trace_summary.trace_ids[trace_ext] @@ -740,6 +739,9 @@ class SbySummary: data=status_metadata, ) + if trace_path and add_trace: + event.prop.tracefiles.append(str(trace_path)) + def set_engine_status(self, engine_idx, status, case=None): engine_summary = self.engine_summary(engine_idx) if case is None: From 488d25b6250398e6b21c21cd30ad85cb03618063 Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:47:33 +1200 Subject: [PATCH 25/34] Don't use induction step for depth --- sbysrc/sby_engine_smtbmc.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sbysrc/sby_engine_smtbmc.py b/sbysrc/sby_engine_smtbmc.py index 52f0459..cdd579a 100644 --- a/sbysrc/sby_engine_smtbmc.py +++ b/sbysrc/sby_engine_smtbmc.py @@ -216,6 +216,8 @@ def run(mode, task, engine_idx, engine): if match: last_prop = [] recorded_last = False + if mode == "prove_induction": + return line last_step = current_step current_step = int(match[1]) if current_step != last_step and last_step is not None: From 2aa8d266ad09ba0cbfa8bcf7cd16e6da4e8357f6 Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:47:33 +1200 Subject: [PATCH 26/34] Update unknown covers as well as asserts Currently only applicable to smtbmc. Also don't update assert depth during `cover` mode. --- sbysrc/sby_core.py | 7 +++++-- sbysrc/sby_engine_smtbmc.py | 2 +- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/sbysrc/sby_core.py b/sbysrc/sby_core.py index d3ed77c..0d4adef 100644 --- a/sbysrc/sby_core.py +++ b/sbysrc/sby_core.py @@ -1260,9 +1260,12 @@ class SbyTask(SbyConfig): self.status = "ERROR" self.terminate() - def update_unknown_asserts(self, data): + def update_unknown_props(self, data): for prop in self.design.hierarchy: - if prop.type == prop.Type.ASSERT and prop.status == "UNKNOWN": + if prop.status != "UNKNOWN": + continue + if ((prop.type == prop.Type.ASSERT and self.opt_mode in ["bmc", "prove"]) or + (prop.type == prop.Type.COVER and self.opt_mode == "cover")): self.status_db.set_task_property_status(prop, data=data) def pass_unknown_asserts(self, data): diff --git a/sbysrc/sby_engine_smtbmc.py b/sbysrc/sby_engine_smtbmc.py index cdd579a..0364929 100644 --- a/sbysrc/sby_engine_smtbmc.py +++ b/sbysrc/sby_engine_smtbmc.py @@ -221,7 +221,7 @@ def run(mode, task, engine_idx, engine): last_step = current_step current_step = int(match[1]) if current_step != last_step and last_step is not None: - task.update_unknown_asserts(dict(source="smtbmc", engine=f"engine_{engine_idx}", step=last_step)) + task.update_unknown_props(dict(source="smtbmc", engine=f"engine_{engine_idx}", step=last_step)) return line match = re.match(r"^## [0-9: ]+ Status: FAILED", line) From ceaeac43f72c0ca69b37d1c5e286ca70ec95b6f7 Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:47:34 +1200 Subject: [PATCH 27/34] Use tasknames in --statuscsv Also fix fallbacks for property statuses without a `task_property_status` entry. --- sbysrc/sby.py | 2 +- sbysrc/sby_status.py | 15 ++++++++++----- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/sbysrc/sby.py b/sbysrc/sby.py index 827b7cd..997e134 100644 --- a/sbysrc/sby.py +++ b/sbysrc/sby.py @@ -97,7 +97,7 @@ if status_show or status_reset or status_show_csv: status_db.print_status_summary() if status_show_csv: - status_db.print_status_summary_csv() + status_db.print_status_summary_csv(tasknames) status_db.db.close() diff --git a/sbysrc/sby_status.py b/sbysrc/sby_status.py index 587b61a..a6fff5f 100644 --- a/sbysrc/sby_status.py +++ b/sbysrc/sby_status.py @@ -413,7 +413,7 @@ class SbyStatusDb: return {row["id"]: parse_status_data_row(row) for row in rows} - def print_status_summary_csv(self): + def print_status_summary_csv(self, tasknames): # get all statuses all_properties = self.all_status_data_joined() @@ -424,6 +424,8 @@ class SbyStatusDb: # find summary for each task/property combo prop_map: dict[(str, str), dict[str, (int, int)]] = {} for row, prop_status in all_properties.items(): + if tasknames and prop_status['task_name'] not in tasknames: + continue status = prop_status['status'] this_depth = prop_status['data'].get('step') this_kind = prop_status['trace_kind'] @@ -471,7 +473,7 @@ def combine_statuses(statuses): def parse_status_data_row(raw: sqlite3.Row): row_dict = dict(raw) row_dict["name"] = json.loads(row_dict.get("name", "null")) - row_dict["data"] = json.loads(row_dict.get("data", "null")) + row_dict["data"] = json.loads(row_dict.get("data") or "{}") return row_dict def format_status_data_csvline(row: dict|None) -> str: @@ -490,8 +492,11 @@ def format_status_data_csvline(row: dict|None) -> str: ] return ','.join(csv_header) else: - engine = row['data'].get('engine', row['data']['source']) - time = row['status_created'] - row['created'] + engine = row['data'].get('engine', row['data'].get('source')) + try: + time = row['status_created'] - row['created'] + except TypeError: + time = 0 name = row['hdlname'] depth = row['data'].get('step') try: @@ -507,7 +512,7 @@ def format_status_data_csvline(row: dict|None) -> str: name or pretty_path(row['name']), row['location'], row['kind'], - row['status'], + row['status'] or "UNKNOWN", trace_path, depth, ] From f399acc22d06342be86992d4447a119ec2b53330 Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:47:34 +1200 Subject: [PATCH 28/34] Update unknowns on timeout --- sbysrc/sby_core.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sbysrc/sby_core.py b/sbysrc/sby_core.py index 0d4adef..c9a3ff4 100644 --- a/sbysrc/sby_core.py +++ b/sbysrc/sby_core.py @@ -1254,6 +1254,8 @@ class SbyTask(SbyConfig): proc.terminate(timeout=timeout) for proc in list(self.procs_pending): proc.terminate(timeout=timeout) + if timeout: + self.update_unknown_props(dict(source="timeout")) def proc_failed(self, proc): # proc parameter used by autotune override From 41bd894eff12e8b013f3fc3d941585ab8f39c00f Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:47:34 +1200 Subject: [PATCH 29/34] Test property statuses after timeout --- tests/statusdb/timeout.sby | 111 +++++++++++++++++++++++++++++++++++++ tests/statusdb/timeout.sh | 22 ++++++++ 2 files changed, 133 insertions(+) create mode 100644 tests/statusdb/timeout.sby create mode 100644 tests/statusdb/timeout.sh diff --git a/tests/statusdb/timeout.sby b/tests/statusdb/timeout.sby new file mode 100644 index 0000000..e3c8b64 --- /dev/null +++ b/tests/statusdb/timeout.sby @@ -0,0 +1,111 @@ +[tasks] +btor_bmc: btor bmc +pono_bmc: bmc +btor_cover: btor cover +smt_bmc: smt bmc +smt_cover: smt cover +smt_prove: smt prove +smt_fail: bmc fail +aig_bmc: bmc +aig_prove: prove +abc_bmc: bmc +abc_prove: prove +abc_fail: prove fail + +[options] +bmc: mode bmc +cover: mode cover +prove: mode prove +expect TIMEOUT +depth 10000 +timeout 1 +vcd_sim off + +[engines] +btor: btor btormc +pono_bmc: btor pono +smt: smtbmc bitwuzla +smt_fail: smtbmc --keep-going bitwuzla +aig_bmc: aiger aigbmc +aig_prove: aiger suprove +abc_bmc: abc bmc3 +abc_prove: abc pdr +abc_fail: abc --keep-going pdr + +[script] +fail: read -define FAIL=1 +read -sv timeout.sv +prep -top top + +[file timeout.sv] +module top #( + parameter WIDTH = 8 +) ( + input clk, + input load, + input [WIDTH-1:0] a, + input [WIDTH-1:0] b, + output reg [WIDTH-1:0] q, + output reg [WIDTH-1:0] r, + output reg done +); + + reg [WIDTH-1:0] a_reg = 0; + reg [WIDTH-1:0] b_reg = 1; + + initial begin + q <= 0; + r <= 0; + done <= 1; + end + + reg [WIDTH-1:0] q_step = 1; + reg [WIDTH-1:0] r_step = 1; + + // This is not how you design a good divider circuit! + always @(posedge clk) begin + if (load) begin + a_reg <= a; + b_reg <= b; + q <= 0; + r <= a; + q_step <= 1; + r_step <= b; + done <= b == 0; + end else begin + if (r_step <= r) begin + q <= q + q_step; + r <= r - r_step; + + if (!r_step[WIDTH-1]) begin + r_step <= r_step << 1; + q_step <= q_step << 1; + end + end else begin + if (!q_step[0]) begin + r_step <= r_step >> 1; + q_step <= q_step >> 1; + end else begin + done <= 1; + end + end + end + end + + always @(posedge clk) begin + assert (1); // trivial + `ifdef FAIL + assert (0); + `endif + assert (r_step == b_reg * q_step); // Helper invariant + + assert (q * b_reg + r == a_reg); // Main invariant & correct output relationship + if (done) assert (r <= b_reg - 1); // Output range + + cover (done); + cover (done && b_reg == 0); + cover (r != a_reg); + cover (r == a_reg); + cover (0); // unreachable + end +endmodule diff --git a/tests/statusdb/timeout.sh b/tests/statusdb/timeout.sh new file mode 100644 index 0000000..ac06f3d --- /dev/null +++ b/tests/statusdb/timeout.sh @@ -0,0 +1,22 @@ +#!/bin/bash +set -e +python3 $SBY_MAIN -f $SBY_FILE $TASK + +STATUS_CSV=${WORKDIR}/status.csv +python3 $SBY_MAIN -f $SBY_FILE $TASK --statuscsv | tee $STATUS_CSV + +if [[ $TASK =~ "_cover" ]]; then + wc -l $STATUS_CSV | grep -q '6' + grep "COVER" $STATUS_CSV | wc -l | grep -q '5' +elif [[ $TASK =~ "_fail" ]]; then + wc -l $STATUS_CSV | grep -q '6' + grep "ASSERT" $STATUS_CSV | wc -l | grep -q '5' + grep "FAIL" $STATUS_CSV | wc -l | grep -q '1' +else + wc -l $STATUS_CSV | grep -q '5' + grep "ASSERT" $STATUS_CSV | wc -l | grep -q '4' +fi + +if [[ $TASK == "smt_cover" ]]; then + grep "PASS" $STATUS_CSV | wc -l | grep -q '4' +fi From 4adf5e52591839fe88d88a029dec04cd3c99894a Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:47:34 +1200 Subject: [PATCH 30/34] timeout.sby: Increase depth CI was too fast --- tests/statusdb/timeout.sby | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/statusdb/timeout.sby b/tests/statusdb/timeout.sby index e3c8b64..2872ad6 100644 --- a/tests/statusdb/timeout.sby +++ b/tests/statusdb/timeout.sby @@ -17,7 +17,7 @@ bmc: mode bmc cover: mode cover prove: mode prove expect TIMEOUT -depth 10000 +depth 40000 timeout 1 vcd_sim off From aa2d3ed02546184361d37ed1f529d0dccfc3801e Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 15:47:34 +1200 Subject: [PATCH 31/34] Add and use --latest flag for statuses Should fix CI problem of running tests twice and the verific and non verific properties having different names when testing the statusdb. --- sbysrc/sby.py | 7 +++++-- sbysrc/sby_cmdline.py | 2 ++ sbysrc/sby_status.py | 18 +++++++++++++++--- tests/statusdb/timeout.sh | 2 +- 4 files changed, 23 insertions(+), 6 deletions(-) diff --git a/sbysrc/sby.py b/sbysrc/sby.py index 997e134..0deefc1 100644 --- a/sbysrc/sby.py +++ b/sbysrc/sby.py @@ -63,6 +63,7 @@ status_show = args.status status_reset = args.status_reset status_live_csv = args.livecsv status_show_csv = args.statuscsv +status_latest = args.status_latest if status_show or status_reset or status_show_csv: target = workdir_prefix or workdir or sbyfile @@ -94,10 +95,10 @@ if status_show or status_reset or status_show_csv: sys.exit(1) if status_show: - status_db.print_status_summary() + status_db.print_status_summary(status_latest) if status_show_csv: - status_db.print_status_summary_csv(tasknames) + status_db.print_status_summary_csv(tasknames, status_latest) status_db.db.close() @@ -105,6 +106,8 @@ if status_show or status_reset or status_show_csv: print(f"WARNING: --livecsv flag found but not used.") sys.exit(0) +elif status_latest: + print(f"WARNING: --latest flag found but not used.") if sbyfile is not None: diff --git a/sbysrc/sby_cmdline.py b/sbysrc/sby_cmdline.py index 9b3e107..f99c439 100644 --- a/sbysrc/sby_cmdline.py +++ b/sbysrc/sby_cmdline.py @@ -77,6 +77,8 @@ def parser_func(release_version='unknown SBY version'): help="summarize the contents of the status database") parser.add_argument("--statuscsv", action="store_true", dest="statuscsv", help="print the most recent status for each property in csv format") + parser.add_argument("--latest", action="store_true", dest="status_latest", + help="only check statuses from the most recent run of a task") parser.add_argument("--statusreset", action="store_true", dest="status_reset", help="reset the contents of the status database") diff --git a/sbysrc/sby_status.py b/sbysrc/sby_status.py index a6fff5f..26352ec 100644 --- a/sbysrc/sby_status.py +++ b/sbysrc/sby_status.py @@ -347,13 +347,16 @@ class SbyStatusDb: self._reset() self.db.execute("PRAGMA foreign_keys=ON") - def print_status_summary(self): + def print_status_summary(self, latest: bool): tasks, task_properties, task_property_statuses = self.all_status_data() + latest_task_ids = filter_latest_task_ids(tasks) properties = defaultdict(set) uniquify_paths = defaultdict(dict) def add_status(task_property, status): + if latest and task_property["task"] not in latest_task_ids: + return display_name = task_property["name"] if display_name[-1].startswith("$"): @@ -400,7 +403,7 @@ class SbyStatusDb: def all_status_data_joined(self): rows = self.db.execute( """ - SELECT task.name as 'task_name', task.mode, task.workdir, task.created, task_property.kind, + SELECT task.id as 'task_id', task.name as 'task_name', task.mode, task.workdir, task.created, task_property.kind, task_property.src as 'location', task_property.name, task_property.hdlname, task_property_status.status, task_property_status.data, task_property_status.created as 'status_created', task_property_status.id, task_trace.path, task_trace.kind as trace_kind @@ -413,9 +416,10 @@ class SbyStatusDb: return {row["id"]: parse_status_data_row(row) for row in rows} - def print_status_summary_csv(self, tasknames): + def print_status_summary_csv(self, tasknames: list[str], latest: bool): # get all statuses all_properties = self.all_status_data_joined() + latest_task_ids = filter_latest_task_ids(self.all_tasks()) # print csv header csvheader = format_status_data_csvline(None) @@ -426,6 +430,8 @@ class SbyStatusDb: for row, prop_status in all_properties.items(): if tasknames and prop_status['task_name'] not in tasknames: continue + if latest and prop_status['task_id'] not in latest_task_ids: + continue status = prop_status['status'] this_depth = prop_status['data'].get('step') this_kind = prop_status['trace_kind'] @@ -517,3 +523,9 @@ def format_status_data_csvline(row: dict|None) -> str: depth, ] return ','.join("" if v is None else str(v) for v in csv_line) + +def filter_latest_task_ids(all_tasks: dict[int, dict[str]]): + latest: dict[str, int] = {} + for task_id, task_dict in all_tasks.items(): + latest[task_dict["workdir"]] = task_id + return list(latest.values()) diff --git a/tests/statusdb/timeout.sh b/tests/statusdb/timeout.sh index ac06f3d..2d70133 100644 --- a/tests/statusdb/timeout.sh +++ b/tests/statusdb/timeout.sh @@ -3,7 +3,7 @@ set -e python3 $SBY_MAIN -f $SBY_FILE $TASK STATUS_CSV=${WORKDIR}/status.csv -python3 $SBY_MAIN -f $SBY_FILE $TASK --statuscsv | tee $STATUS_CSV +python3 $SBY_MAIN -f $SBY_FILE $TASK --statuscsv --latest | tee $STATUS_CSV if [[ $TASK =~ "_cover" ]]; then wc -l $STATUS_CSV | grep -q '6' From 83723696c739e5903ce51fe6b02826247eb5bc2b Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 16:04:45 +1200 Subject: [PATCH 32/34] Update failing test Each property can have more than one status, but we only need to test the last one. Also fix the warning about `\c` being an invalid escape. --- tests/statusdb/mixed.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/tests/statusdb/mixed.py b/tests/statusdb/mixed.py index 5daba53..e8995a1 100644 --- a/tests/statusdb/mixed.py +++ b/tests/statusdb/mixed.py @@ -5,7 +5,7 @@ import sys def get_prop_type(prop: str): prop = json.loads(prop or "[]") name_parts = prop[-1].split("_") - if name_parts[0] == "\check": + if name_parts[0] == "\\check": # read_verilog style # \check_cover_mixed_v... return name_parts[1] @@ -24,22 +24,28 @@ def main(): # custom sql to get all task property statuses for the current workdir rows = db.execute( """ - SELECT task.id, task_property.name, task_property.src, task_property_status.status + SELECT task.id, task_property.id, task_property.name, task_property.src, task_property_status.status FROM task LEFT JOIN task_property ON task_property.task=task.id LEFT JOIN task_property_status ON task_property_status.task_property=task_property.id - WHERE task.workdir=:workdir; + WHERE task.workdir=:workdir + ORDER BY task_property_status.id DESC; """, {"workdir": workdir} ).fetchall() # only check the most recent iteration of the test last_id = 0 - for row_id, _, _, _ in rows: + for row_id, _, _, _, _ in rows: if row_id > last_id: last_id = row_id - for row_id, prop, src, status in rows: + # only check the last status of a property + checked_props = set() + for row_id, prop_id, prop, src, status in rows: if row_id != last_id: continue + if prop_id in checked_props: + continue + checked_props.add(prop_id) prop_type = get_prop_type(prop) valid_status: list[None|str] = [] if workdir in ["mixed_bmc", "mixed_assert"] and prop_type == "assert": From 73c5e5cae6356e450ac6de1e533540ff78292c6f Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 17:10:01 +1200 Subject: [PATCH 33/34] timeout.sby: Add non-timeout equivalents Number of properties reported should be consistent whether the task times out or finishes. Currently fails `btor_fin_cover`. --- tests/statusdb/timeout.sby | 51 ++++++++++++++++++++++++++------------ 1 file changed, 35 insertions(+), 16 deletions(-) diff --git a/tests/statusdb/timeout.sby b/tests/statusdb/timeout.sby index 2872ad6..b1a0a13 100644 --- a/tests/statusdb/timeout.sby +++ b/tests/statusdb/timeout.sby @@ -1,45 +1,64 @@ [tasks] btor_bmc: btor bmc -pono_bmc: bmc +btor_fin_bmc: btor bmc fin +pono_bmc: pono bmc +pono_fin_bmc: pono bmc fin btor_cover: btor cover +btor_fin_cover: btor cover fin smt_bmc: smt bmc +smt_fin_bmc: smt bmc fin smt_cover: smt cover +smt_fin_cover: smt cover fin smt_prove: smt prove -smt_fail: bmc fail -aig_bmc: bmc -aig_prove: prove -abc_bmc: bmc -abc_prove: prove -abc_fail: prove fail +smt_fin_prove: smt prove fin +smt_fail: smtfail bmc fail +smt_fin_fail: smtfail bmc fail fin +aig_bmc: aigbmc bmc +aig_fin_bmc: aigbmc bmc fin +aig_prove: aiger prove +aig_fin_prove: aiger prove fin +abc_bmc: abcbmc bmc +abc_fin_bmc: abcbmc bmc fin +abc_prove: abc prove +abc_fin_prove: abc prove fin +abc_fail: abcfail prove fail +abc_fin_fail: abcfail prove fail fin [options] bmc: mode bmc cover: mode cover prove: mode prove +fin: +expect PASS,FAIL,UNKNOWN +depth 10 +-- +~fin: expect TIMEOUT depth 40000 timeout 1 -vcd_sim off +-- [engines] btor: btor btormc -pono_bmc: btor pono +pono: btor pono smt: smtbmc bitwuzla -smt_fail: smtbmc --keep-going bitwuzla -aig_bmc: aiger aigbmc -aig_prove: aiger suprove -abc_bmc: abc bmc3 -abc_prove: abc pdr -abc_fail: abc --keep-going pdr +smtfail: smtbmc --keep-going bitwuzla +aigbmc: aiger aigbmc +aiger: aiger suprove +abcbmc: abc bmc3 +abc: abc pdr +abcfail: abc --keep-going pdr [script] +fin: read -define WIDTH=4 +~fin: read -define WIDTH=8 fail: read -define FAIL=1 read -sv timeout.sv prep -top top [file timeout.sv] module top #( - parameter WIDTH = 8 + parameter WIDTH = `WIDTH ) ( input clk, input load, From af511945a32cc685074006fb715fc3e123948dc7 Mon Sep 17 00:00:00 2001 From: Krystine Sherwin <93062060+KrystalDelusion@users.noreply.github.com> Date: Tue, 8 Jul 2025 17:26:55 +1200 Subject: [PATCH 34/34] btor: Add unknown props First during the init, then again at the end. Depth information isn't available, since there may be a pending trace job for that depth which would provide a known status. --- sbysrc/sby_engine_btor.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sbysrc/sby_engine_btor.py b/sbysrc/sby_engine_btor.py index 9ef947e..b938397 100644 --- a/sbysrc/sby_engine_btor.py +++ b/sbysrc/sby_engine_btor.py @@ -91,6 +91,7 @@ def run(mode, task, engine_idx, engine): proc_status = "FAIL" else: task.error(f"engine_{engine_idx}: Engine terminated without status.") + task.update_unknown_props(dict(source="btor", engine=f"engine_{engine_idx}")) else: if common_state.expected_cex == 0: proc_status = "pass" @@ -143,6 +144,7 @@ def run(mode, task, engine_idx, engine): common_state.expected_cex = int(match[1]) if common_state.produced_cex != 0: task.error(f"engine_{engine_idx}: Unexpected engine output (property count).") + task.update_unknown_props(dict(source="btor_init", engine=f"engine_{engine_idx}")) else: task.error(f"engine_{engine_idx}: BTOR solver '{solver_args[0]}' is currently not supported in cover mode.")