mirror of
https://github.com/Z3Prover/z3
synced 2025-10-12 18:50:17 +00:00
Add cube tree optimization about resolving cores recursively up the path, to prune. Also integrate asms into the tree so they're not tracked separately (#7960)
* draft attempt at optimizing cube tree with resolvents. have not tested/ran yet * adding comments * fix bug about needing to bubble resolvent upwards to highest ancestor * fix bug where we need to cover the whole resolvent in the path when bubbling up * clean up comments * close entire tree when sibling resolvent is empty * integrate asms directly into cube tree, remove separate tracking * try to fix bug about redundant resolutions, merging close and try_resolve_upwards into once function * separate the logic again to avoid mutual recursion
This commit is contained in:
parent
c8bdbd2dc4
commit
52fd59df1b
3 changed files with 156 additions and 55 deletions
|
@ -115,10 +115,6 @@ namespace smt {
|
||||||
b.set_unsat(m_l2g, unsat_core);
|
b.set_unsat(m_l2g, unsat_core);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// report assumptions used in unsat core, so they can be used in final core
|
|
||||||
for (expr *e : unsat_core)
|
|
||||||
if (asms.contains(e))
|
|
||||||
b.report_assumption_used(m_l2g, e);
|
|
||||||
|
|
||||||
LOG_WORKER(1, " found unsat cube\n");
|
LOG_WORKER(1, " found unsat cube\n");
|
||||||
b.backtrack(m_l2g, unsat_core, node);
|
b.backtrack(m_l2g, unsat_core, node);
|
||||||
|
@ -260,7 +256,6 @@ namespace smt {
|
||||||
vector<cube_config::literal> g_core;
|
vector<cube_config::literal> g_core;
|
||||||
for (auto c : core) {
|
for (auto c : core) {
|
||||||
expr_ref g_c(l2g(c), m);
|
expr_ref g_c(l2g(c), m);
|
||||||
if (!is_assumption(g_c))
|
|
||||||
g_core.push_back(expr_ref(l2g(c), m));
|
g_core.push_back(expr_ref(l2g(c), m));
|
||||||
}
|
}
|
||||||
m_search_tree.backtrack(node, g_core);
|
m_search_tree.backtrack(node, g_core);
|
||||||
|
@ -411,11 +406,6 @@ namespace smt {
|
||||||
cancel_workers();
|
cancel_workers();
|
||||||
}
|
}
|
||||||
|
|
||||||
void parallel::batch_manager::report_assumption_used(ast_translation &l2g, expr *assumption) {
|
|
||||||
std::scoped_lock lock(mux);
|
|
||||||
p.m_assumptions_used.insert(l2g(assumption));
|
|
||||||
}
|
|
||||||
|
|
||||||
lbool parallel::batch_manager::get_result() const {
|
lbool parallel::batch_manager::get_result() const {
|
||||||
if (m.limit().is_canceled())
|
if (m.limit().is_canceled())
|
||||||
return l_undef; // the main context was cancelled, so we return undef.
|
return l_undef; // the main context was cancelled, so we return undef.
|
||||||
|
@ -424,11 +414,12 @@ namespace smt {
|
||||||
// means all cubes were unsat
|
// means all cubes were unsat
|
||||||
if (!m_search_tree.is_closed())
|
if (!m_search_tree.is_closed())
|
||||||
throw default_exception("inconsistent end state");
|
throw default_exception("inconsistent end state");
|
||||||
if (!p.m_assumptions_used.empty()) {
|
|
||||||
// collect unsat core from assumptions used, if any --> case when all cubes were unsat, but depend on
|
// case when all cubes were unsat, but depend on nonempty asms, so we need to add these asms to final unsat core
|
||||||
// nonempty asms, so we need to add these asms to final unsat core
|
// these asms are stored in the cube tree, at the root node
|
||||||
SASSERT(p.ctx.m_unsat_core.empty());
|
if (p.ctx.m_unsat_core.empty()) {
|
||||||
for (auto a : p.m_assumptions_used)
|
SASSERT(root && root->is_closed());
|
||||||
|
for (auto a : m_search_tree.get_core_from_root())
|
||||||
p.ctx.m_unsat_core.push_back(a);
|
p.ctx.m_unsat_core.push_back(a);
|
||||||
}
|
}
|
||||||
return l_false;
|
return l_false;
|
||||||
|
@ -496,16 +487,12 @@ namespace smt {
|
||||||
scoped_clear(parallel &p) : p(p) {}
|
scoped_clear(parallel &p) : p(p) {}
|
||||||
~scoped_clear() {
|
~scoped_clear() {
|
||||||
p.m_workers.reset();
|
p.m_workers.reset();
|
||||||
p.m_assumptions_used.reset();
|
|
||||||
p.m_assumptions.reset();
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
scoped_clear clear(*this);
|
scoped_clear clear(*this);
|
||||||
|
|
||||||
m_batch_manager.initialize();
|
m_batch_manager.initialize();
|
||||||
m_workers.reset();
|
m_workers.reset();
|
||||||
for (auto e : asms)
|
|
||||||
m_assumptions.insert(e);
|
|
||||||
scoped_limits sl(m.limit());
|
scoped_limits sl(m.limit());
|
||||||
flet<unsigned> _nt(ctx.m_fparams.m_threads, 1);
|
flet<unsigned> _nt(ctx.m_fparams.m_threads, 1);
|
||||||
SASSERT(num_threads > 1);
|
SASSERT(num_threads > 1);
|
||||||
|
|
|
@ -79,10 +79,6 @@ namespace smt {
|
||||||
|
|
||||||
void init_parameters_state();
|
void init_parameters_state();
|
||||||
|
|
||||||
bool is_assumption(expr* e) const {
|
|
||||||
return p.m_assumptions.contains(e);
|
|
||||||
}
|
|
||||||
|
|
||||||
public:
|
public:
|
||||||
batch_manager(ast_manager& m, parallel& p) : m(m), p(p), m_search_tree(expr_ref(m)) { }
|
batch_manager(ast_manager& m, parallel& p) : m(m), p(p), m_search_tree(expr_ref(m)) { }
|
||||||
|
|
||||||
|
@ -98,7 +94,6 @@ namespace smt {
|
||||||
void backtrack(ast_translation& l2g, expr_ref_vector const& core, node* n);
|
void backtrack(ast_translation& l2g, expr_ref_vector const& core, node* n);
|
||||||
void split(ast_translation& l2g, unsigned id, node* n, expr* atom);
|
void split(ast_translation& l2g, unsigned id, node* n, expr* atom);
|
||||||
|
|
||||||
void report_assumption_used(ast_translation& l2g, expr* assumption);
|
|
||||||
void collect_clause(ast_translation& l2g, unsigned source_worker_id, expr* clause);
|
void collect_clause(ast_translation& l2g, unsigned source_worker_id, expr* clause);
|
||||||
expr_ref_vector return_shared_clauses(ast_translation& g2l, unsigned& worker_limit, unsigned worker_id);
|
expr_ref_vector return_shared_clauses(ast_translation& g2l, unsigned& worker_limit, unsigned worker_id);
|
||||||
|
|
||||||
|
@ -162,8 +157,6 @@ namespace smt {
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
obj_hashtable<expr> m_assumptions_used; // assumptions used in unsat cores, to be used in final core
|
|
||||||
obj_hashtable<expr> m_assumptions; // all assumptions
|
|
||||||
batch_manager m_batch_manager;
|
batch_manager m_batch_manager;
|
||||||
scoped_ptr_vector<worker> m_workers;
|
scoped_ptr_vector<worker> m_workers;
|
||||||
|
|
||||||
|
|
|
@ -41,6 +41,7 @@ namespace search_tree {
|
||||||
literal m_literal;
|
literal m_literal;
|
||||||
node* m_left = nullptr, * m_right = nullptr, * m_parent = nullptr;
|
node* m_left = nullptr, * m_right = nullptr, * m_parent = nullptr;
|
||||||
status m_status;
|
status m_status;
|
||||||
|
vector<literal> m_core;
|
||||||
public:
|
public:
|
||||||
node(literal const& l, node* parent) :
|
node(literal const& l, node* parent) :
|
||||||
m_literal(l), m_parent(parent), m_status(status::open) {}
|
m_literal(l), m_parent(parent), m_status(status::open) {}
|
||||||
|
@ -96,6 +97,13 @@ namespace search_tree {
|
||||||
if (m_right)
|
if (m_right)
|
||||||
m_right->display(out, indent + 2);
|
m_right->display(out, indent + 2);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool has_core() const { return !m_core.empty(); }
|
||||||
|
void set_core(vector<literal> const &core) {
|
||||||
|
m_core = core;
|
||||||
|
}
|
||||||
|
vector<literal> const & get_core() const { return m_core; }
|
||||||
|
void clear_core() { m_core.clear(); }
|
||||||
};
|
};
|
||||||
|
|
||||||
template<typename Config>
|
template<typename Config>
|
||||||
|
@ -131,31 +139,139 @@ namespace search_tree {
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
void close_node(node<Config>* n) {
|
// Invariants:
|
||||||
if (!n)
|
// Cores labeling nodes are subsets of the literals on the path to the node and the (external) assumption literals.
|
||||||
return;
|
// If a parent is open, then the one of the children is open.
|
||||||
if (n->get_status() == status::closed)
|
void close_with_core(node<Config>* n, vector<literal> const &C, bool allow_resolve = true) {
|
||||||
|
if (!n || n->get_status() == status::closed)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
n->set_core(C);
|
||||||
n->set_status(status::closed);
|
n->set_status(status::closed);
|
||||||
close_node(n->left());
|
|
||||||
close_node(n->right());
|
close_with_core(n->left(), C, false);
|
||||||
while (n) {
|
close_with_core(n->right(), C, false);
|
||||||
auto p = n->parent();
|
|
||||||
if (!p)
|
// stop at root
|
||||||
|
if (!n->parent()) return;
|
||||||
|
|
||||||
|
node<Config>* p = n->parent();
|
||||||
|
if (!p) return; // root reached
|
||||||
|
|
||||||
|
auto is_literal_in_core = [](literal const& l, vector<literal> const& C) {
|
||||||
|
for (unsigned i = 0; i < C.size(); ++i)
|
||||||
|
if (C[i] == l) return true;
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
|
||||||
|
// case 1: current splitting literal not in the conflict core
|
||||||
|
if (!is_literal_in_core(n->get_literal(), C)) {
|
||||||
|
close_with_core(p, C);
|
||||||
|
// case 2: both siblings closed -> resolve
|
||||||
|
} else if (allow_resolve && p->left()->get_status() == status::closed && p->right()->get_status() == status::closed) {
|
||||||
|
try_resolve_upwards(p);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Given complementary sibling nodes for literals x and ¬x, sibling resolvent = (core_left ∪ core_right) \ {x, ¬x}
|
||||||
|
vector<literal> compute_sibling_resolvent(node<Config>* left, node<Config>* right) {
|
||||||
|
vector<literal> res;
|
||||||
|
|
||||||
|
if (!left->has_core() || !right->has_core()) return res;
|
||||||
|
|
||||||
|
bool are_sibling_complements = left->parent() == right->parent();
|
||||||
|
if (!are_sibling_complements)
|
||||||
|
return res;
|
||||||
|
|
||||||
|
auto &core_l = left->get_core();
|
||||||
|
auto &core_r = right->get_core();
|
||||||
|
|
||||||
|
auto contains = [](vector<literal> const &v, literal const &l) {
|
||||||
|
for (unsigned i = 0; i < v.size(); ++i)
|
||||||
|
if (v[i] == l) return true;
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
|
||||||
|
auto lit_l = left->get_literal();
|
||||||
|
auto lit_r = right->get_literal();
|
||||||
|
|
||||||
|
// Add literals from left core, skipping lit_l
|
||||||
|
for (unsigned i = 0; i < core_l.size(); ++i) {
|
||||||
|
if (core_l[i] != lit_l && !contains(res, core_l[i]))
|
||||||
|
res.push_back(core_l[i]);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add literals from right core, skipping lit_r
|
||||||
|
for (unsigned i = 0; i < core_r.size(); ++i) {
|
||||||
|
if (core_r[i] != lit_r && !contains(res, core_r[i]))
|
||||||
|
res.push_back(core_r[i]);
|
||||||
|
}
|
||||||
|
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
void try_resolve_upwards(node<Config>* p) {
|
||||||
|
while (p) {
|
||||||
|
auto left = p->left();
|
||||||
|
auto right = p->right();
|
||||||
|
if (!left || !right) return;
|
||||||
|
|
||||||
|
// only attempt when both children are closed and each has a core
|
||||||
|
if (left->get_status() != status::closed || right->get_status() != status::closed) return;
|
||||||
|
if (!left->has_core() || !right->has_core()) return;
|
||||||
|
|
||||||
|
auto resolvent = compute_sibling_resolvent(left, right);
|
||||||
|
|
||||||
|
// empty resolvent of sibling complement (i.e. tautology) -> global UNSAT
|
||||||
|
if (resolvent.empty()) {
|
||||||
|
close_with_core(m_root.get(), resolvent, false);
|
||||||
return;
|
return;
|
||||||
if (p->get_status() != status::open)
|
}
|
||||||
|
|
||||||
|
// if p already has the same core, nothing more to do
|
||||||
|
if (p->has_core() && resolvent == p->get_core())
|
||||||
return;
|
return;
|
||||||
if (p->left()->get_status() != status::closed)
|
|
||||||
return;
|
// Bubble to the highest ancestor where ALL literals in the resolvent
|
||||||
if (p->right()->get_status() != status::closed)
|
// are present somewhere on the path from that ancestor to root
|
||||||
return;
|
node<Config>* candidate = p;
|
||||||
p->set_status(status::closed);
|
node<Config>* attach_here = p; // fallback
|
||||||
n = p;
|
|
||||||
|
while (candidate) {
|
||||||
|
bool all_found = true;
|
||||||
|
|
||||||
|
for (auto const& r : resolvent) {
|
||||||
|
bool found = false;
|
||||||
|
for (node<Config>* q = candidate; q; q = q->parent()) {
|
||||||
|
if (q->get_literal() == r) {
|
||||||
|
found = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!found) {
|
||||||
|
all_found = false;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (all_found) {
|
||||||
|
attach_here = candidate; // bubble up to this node
|
||||||
|
}
|
||||||
|
|
||||||
|
candidate = candidate->parent();
|
||||||
|
}
|
||||||
|
|
||||||
|
// attach the resolvent and close the subtree at attach_here
|
||||||
|
if (!attach_here->has_core() || attach_here->get_core() != resolvent) {
|
||||||
|
close_with_core(attach_here, resolvent, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
// continue upward from parent of attach_here
|
||||||
|
p = attach_here->parent();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
|
||||||
tree(literal const& null_literal) : m_null_literal(null_literal) {
|
tree(literal const& null_literal) : m_null_literal(null_literal) {
|
||||||
reset();
|
reset();
|
||||||
}
|
}
|
||||||
|
@ -176,11 +292,10 @@ namespace search_tree {
|
||||||
}
|
}
|
||||||
|
|
||||||
// conflict is given by a set of literals.
|
// conflict is given by a set of literals.
|
||||||
// they are a subset of literals on the path from root to n
|
// they are subsets of the literals on the path from root to n AND the external assumption literals
|
||||||
void backtrack(node<Config>* n, vector<literal> const& conflict) {
|
void backtrack(node<Config>* n, vector<literal> const& conflict) {
|
||||||
if (conflict.empty()) {
|
if (conflict.empty()) {
|
||||||
close_node(m_root.get());
|
close_with_core(m_root.get(), conflict);
|
||||||
m_root->set_status(status::closed);
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
SASSERT(n != m_root.get());
|
SASSERT(n != m_root.get());
|
||||||
|
@ -201,9 +316,11 @@ namespace search_tree {
|
||||||
|
|
||||||
while (n) {
|
while (n) {
|
||||||
if (any_of(conflict, [&](auto const& a) { return a == n->get_literal(); })) {
|
if (any_of(conflict, [&](auto const& a) { return a == n->get_literal(); })) {
|
||||||
close_node(n);
|
// close the subtree under n (preserves core attached to n), and attempt to resolve upwards
|
||||||
|
close_with_core(n, conflict);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
n = n->parent();
|
n = n->parent();
|
||||||
}
|
}
|
||||||
UNREACHABLE();
|
UNREACHABLE();
|
||||||
|
@ -252,6 +369,10 @@ namespace search_tree {
|
||||||
return m_root->find_active_node();
|
return m_root->find_active_node();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
vector<literal> const& get_core_from_root() const {
|
||||||
|
return m_root->get_core();
|
||||||
|
}
|
||||||
|
|
||||||
bool is_closed() const {
|
bool is_closed() const {
|
||||||
return m_root->get_status() == status::closed;
|
return m_root->get_status() == status::closed;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue