mirror of
https://github.com/Z3Prover/z3
synced 2025-04-15 13:28:47 +00:00
added option fixedpoint.spacer.print_farkas_stats to print number of Farkas lemmas in each proof
This commit is contained in:
parent
4148ee128c
commit
56fcb8e6fd
|
@ -187,6 +187,7 @@ def_module_params('fixedpoint',
|
|||
('spacer.farkas_a_const', BOOL, True, 'if the unoptimized farkas plugin is used, use the constants from A while constructing unsat_cores'),
|
||||
('spacer.lemma_sanity_check', BOOL, False, 'check during generalization whether lemma is actually correct'),
|
||||
('spacer.reuse_pobs', BOOL, True, 'reuse POBs'),
|
||||
('spacer.print_farkas_stats', BOOL, False, 'prints for each proof how many Farkas lemmas it contains and how many of these participate in the cut'),
|
||||
('spacer.simplify_pob', BOOL, False, 'simplify POBs by removing redundant constraints')
|
||||
))
|
||||
|
||||
|
|
|
@ -274,7 +274,7 @@ void itp_solver::get_itp_core (expr_ref_vector &core)
|
|||
simplify_bounds (core); // XXX potentially redundant
|
||||
} else {
|
||||
// new code
|
||||
unsat_core_learner learner(m);
|
||||
unsat_core_learner learner(m,m_print_farkas_stats);
|
||||
|
||||
if (m_farkas_optimized) {
|
||||
if (true) // TODO: proper options
|
||||
|
|
|
@ -62,6 +62,7 @@ private:
|
|||
bool m_minimize_unsat_core;
|
||||
bool m_farkas_optimized;
|
||||
bool m_farkas_a_const;
|
||||
bool m_print_farkas_stats;
|
||||
|
||||
bool is_proxy(expr *e, app_ref &def);
|
||||
void undo_proxies_in_core(ptr_vector<expr> &v);
|
||||
|
@ -69,7 +70,7 @@ private:
|
|||
app* fresh_proxy();
|
||||
void elim_proxies(expr_ref_vector &v);
|
||||
public:
|
||||
itp_solver(solver &solver, bool new_unsat_core, bool minimize_unsat_core, bool farkas_optimized, bool farkas_a_const, bool split_literals = false) :
|
||||
itp_solver(solver &solver, bool new_unsat_core, bool minimize_unsat_core, bool farkas_optimized, bool farkas_a_const, bool print_farkas_stats, bool split_literals = false) :
|
||||
m(solver.get_manager()),
|
||||
m_solver(solver),
|
||||
m_proxies(m),
|
||||
|
@ -83,7 +84,8 @@ public:
|
|||
m_new_unsat_core(new_unsat_core),
|
||||
m_minimize_unsat_core(minimize_unsat_core),
|
||||
m_farkas_optimized(farkas_optimized),
|
||||
m_farkas_a_const(farkas_a_const)
|
||||
m_farkas_a_const(farkas_a_const),
|
||||
m_print_farkas_stats(print_farkas_stats)
|
||||
{}
|
||||
|
||||
~itp_solver() override {}
|
||||
|
|
|
@ -60,7 +60,7 @@ prop_solver::prop_solver(manager& pm, fixedpoint_params const& p, symbol const&
|
|||
m_solvers[1] = pm.mk_fresh2();
|
||||
m_fparams[1] = &pm.fparams2();
|
||||
|
||||
m_contexts[0] = alloc(spacer::itp_solver, *(m_solvers[0]), p.spacer_new_unsat_core(), p.spacer_minimize_unsat_core(), p.spacer_farkas_optimized(), p.spacer_farkas_a_const(), p.spacer_split_farkas_literals());
|
||||
m_contexts[0] = alloc(spacer::itp_solver, *(m_solvers[0]), p.spacer_new_unsat_core(), p.spacer_minimize_unsat_core(), p.spacer_farkas_optimized(), p.spacer_farkas_a_const(), p.spacer_print_farkas_stats(), p.spacer_split_farkas_literals());
|
||||
m_contexts[1] = alloc(spacer::itp_solver, *(m_solvers[1]), p.spacer_new_unsat_core(), p.spacer_minimize_unsat_core(), p.spacer_farkas_optimized(), p.spacer_farkas_a_const(), p.spacer_split_farkas_literals());
|
||||
|
||||
for (unsigned i = 0; i < 2; ++i)
|
||||
|
|
|
@ -130,6 +130,52 @@ void unsat_core_learner::compute_unsat_core(proof *root, expr_set& asserted_b, e
|
|||
|
||||
// TODO: remove duplicates from unsat core?
|
||||
|
||||
// count both number of all Farkas lemmas and number of Farkas lemmas in the cut
|
||||
if (m_print_farkas_stats)
|
||||
{
|
||||
unsigned farkas_counter = 0;
|
||||
unsigned farkas_counter2 = 0;
|
||||
|
||||
ProofIteratorPostOrder it3(root, m);
|
||||
while (it3.hasNext())
|
||||
{
|
||||
proof* currentNode = it3.next();
|
||||
|
||||
// if node is theory lemma
|
||||
if (currentNode->get_decl_kind() == PR_TH_LEMMA)
|
||||
{
|
||||
func_decl* d = currentNode->get_decl();
|
||||
symbol sym;
|
||||
// and theory lemma is Farkas lemma
|
||||
if (d->get_num_parameters() >= 2 && // the Farkas coefficients are saved in the parameters of step
|
||||
d->get_parameter(0).is_symbol(sym) && sym == "arith" && // the first two parameters are "arith", "farkas",
|
||||
d->get_parameter(1).is_symbol(sym) && sym == "farkas")
|
||||
{
|
||||
farkas_counter++;
|
||||
|
||||
// check whether farkas lemma is to be interpolated (could potentially miss farkas lemmas, which are interpolated, because we potentially don't want to use the lowest cut)
|
||||
bool has_no_mixed_parents = true;
|
||||
for (int i = 0; i < m.get_num_parents(currentNode); ++i)
|
||||
{
|
||||
proof* premise = to_app(currentNode->get_arg(i));
|
||||
if (is_a_marked(premise) && is_b_marked(premise))
|
||||
{
|
||||
has_no_mixed_parents = false;
|
||||
}
|
||||
}
|
||||
if (has_no_mixed_parents && is_a_marked(currentNode) && is_b_marked(currentNode))
|
||||
{
|
||||
farkas_counter2++;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
verbose_stream() << "\nThis proof contains " << farkas_counter << " Farkas lemmas. " << farkas_counter2 << " Farkas lemmas participate in the lowest cut\n";
|
||||
}
|
||||
|
||||
|
||||
bool debug_proof = false;
|
||||
if(debug_proof)
|
||||
{
|
||||
|
|
|
@ -31,7 +31,7 @@ namespace spacer {
|
|||
typedef obj_hashtable<expr> expr_set;
|
||||
|
||||
public:
|
||||
unsat_core_learner(ast_manager& m) : m(m), m_unsat_core(m) {};
|
||||
unsat_core_learner(ast_manager& m, bool print_farkas_stats = false) : m(m), m_unsat_core(m), m_print_farkas_stats(print_farkas_stats) {};
|
||||
virtual ~unsat_core_learner();
|
||||
|
||||
ast_manager& m;
|
||||
|
@ -100,6 +100,8 @@ namespace spacer {
|
|||
* finalize computation of unsat-core
|
||||
*/
|
||||
void finalize();
|
||||
|
||||
bool m_print_farkas_stats;
|
||||
};
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue