3
0
Fork 0
mirror of https://github.com/Z3Prover/z3 synced 2025-06-06 06:03:23 +00:00

Fix compiler warnings

This commit is contained in:
Arie Gurfinkel 2018-05-15 12:13:18 -07:00
parent 4b6921dffb
commit 7c727ee922
6 changed files with 79 additions and 74 deletions

View file

@ -158,6 +158,7 @@ void unsat_core_generalizer::operator()(lemma_ref &lemma)
unsigned old_sz = lemma->get_cube().size(); unsigned old_sz = lemma->get_cube().size();
unsigned old_level = lemma->level(); unsigned old_level = lemma->level();
(void)old_level;
unsigned uses_level; unsigned uses_level;
expr_ref_vector core(m); expr_ref_vector core(m);

View file

@ -279,7 +279,7 @@ void itp_solver::get_itp_core (expr_ref_vector &core)
else else
{ {
proof_ref res(get_proof(),m); proof_ref res(get_proof(),m);
// new code // new code
if (m_old_hyp_reducer) if (m_old_hyp_reducer)
{ {
@ -290,10 +290,10 @@ void itp_solver::get_itp_core (expr_ref_vector &core)
verbose_stream() << "\nStats before transformation:"; verbose_stream() << "\nStats before transformation:";
iuc_before.print_farkas_stats(); iuc_before.print_farkas_stats();
} }
proof_utils::reduce_hypotheses(res); proof_utils::reduce_hypotheses(res);
proof_utils::permute_unit_resolution(res); proof_utils::permute_unit_resolution(res);
if (m_print_farkas_stats) if (m_print_farkas_stats)
{ {
iuc_proof iuc_after(m, res.get(), B); iuc_proof iuc_after(m, res.get(), B);
@ -310,13 +310,13 @@ void itp_solver::get_itp_core (expr_ref_vector &core)
verbose_stream() << "\nStats before transformation:"; verbose_stream() << "\nStats before transformation:";
iuc_before.print_farkas_stats(); iuc_before.print_farkas_stats();
} }
theory_axiom_reducer ta_reducer(m); theory_axiom_reducer ta_reducer(m);
proof_ref pr_without_theory_lemmas = ta_reducer.reduce(res.get()); proof_ref pr_without_theory_lemmas = ta_reducer.reduce(res.get());
hypothesis_reducer hyp_reducer(m); hypothesis_reducer hyp_reducer(m);
proof_ref pr_with_less_hypothesis = hyp_reducer.reduce(pr_without_theory_lemmas); proof_ref pr_with_less_hypothesis = hyp_reducer.reduce(pr_without_theory_lemmas);
if (m_print_farkas_stats) if (m_print_farkas_stats)
{ {
iuc_proof iuc_after(m, pr_with_less_hypothesis.get(), B); iuc_proof iuc_after(m, pr_with_less_hypothesis.get(), B);
@ -328,9 +328,9 @@ void itp_solver::get_itp_core (expr_ref_vector &core)
// construct proof object with contains partition information // construct proof object with contains partition information
iuc_proof iuc_pr(m, res, B); iuc_proof iuc_pr(m, res, B);
// configure learner // configure learner
unsat_core_learner learner(m, iuc_pr, m_print_farkas_stats, m_iuc_debug_proof); unsat_core_learner learner(m, iuc_pr);
if (m_iuc_arith == 0 || m_iuc_arith > 3) if (m_iuc_arith == 0 || m_iuc_arith > 3)
{ {
@ -352,7 +352,7 @@ void itp_solver::get_itp_core (expr_ref_vector &core)
unsat_core_plugin_farkas_lemma_bounded* plugin_farkas_lemma_bounded = alloc(unsat_core_plugin_farkas_lemma_bounded, learner,m); unsat_core_plugin_farkas_lemma_bounded* plugin_farkas_lemma_bounded = alloc(unsat_core_plugin_farkas_lemma_bounded, learner,m);
learner.register_plugin(plugin_farkas_lemma_bounded); learner.register_plugin(plugin_farkas_lemma_bounded);
} }
if (m_iuc == 2) if (m_iuc == 2)
{ {
unsat_core_plugin_min_cut* plugin_min_cut = alloc(unsat_core_plugin_min_cut, learner, m); unsat_core_plugin_min_cut* plugin_min_cut = alloc(unsat_core_plugin_min_cut, learner, m);

View file

@ -61,7 +61,6 @@ private:
unsigned m_iuc; unsigned m_iuc;
unsigned m_iuc_arith; unsigned m_iuc_arith;
bool m_print_farkas_stats; bool m_print_farkas_stats;
bool m_iuc_debug_proof;
bool m_old_hyp_reducer; bool m_old_hyp_reducer;
bool is_proxy(expr *e, app_ref &def); bool is_proxy(expr *e, app_ref &def);
void undo_proxies_in_core(ptr_vector<expr> &v); void undo_proxies_in_core(ptr_vector<expr> &v);
@ -69,7 +68,7 @@ private:
app* fresh_proxy(); app* fresh_proxy();
void elim_proxies(expr_ref_vector &v); void elim_proxies(expr_ref_vector &v);
public: public:
itp_solver(solver &solver, unsigned iuc, unsigned iuc_arith, bool print_farkas_stats, bool iuc_debug_proof, bool old_hyp_reducer, bool split_literals = false) : itp_solver(solver &solver, unsigned iuc, unsigned iuc_arith, bool print_farkas_stats, bool old_hyp_reducer, bool split_literals = false) :
m(solver.get_manager()), m(solver.get_manager()),
m_solver(solver), m_solver(solver),
m_proxies(m), m_proxies(m),
@ -83,7 +82,6 @@ public:
m_iuc(iuc), m_iuc(iuc),
m_iuc_arith(iuc_arith), m_iuc_arith(iuc_arith),
m_print_farkas_stats(print_farkas_stats), m_print_farkas_stats(print_farkas_stats),
m_iuc_debug_proof(iuc_debug_proof),
m_old_hyp_reducer(old_hyp_reducer) m_old_hyp_reducer(old_hyp_reducer)
{} {}

View file

@ -28,7 +28,7 @@ Revision History:
#include "muz/base/dl_util.h" #include "muz/base/dl_util.h"
namespace spacer { namespace spacer {
// arith lemmas: second parameter specifies exact type of lemma, could be "farkas", "triangle-eq", "eq-propagate", "assign-bounds", maybe also something else // arith lemmas: second parameter specifies exact type of lemma, could be "farkas", "triangle-eq", "eq-propagate", "assign-bounds", maybe also something else
bool is_arith_lemma(ast_manager& m, proof* pr) bool is_arith_lemma(ast_manager& m, proof* pr)
{ {
@ -44,7 +44,7 @@ namespace spacer {
} }
return false; return false;
} }
bool is_farkas_lemma(ast_manager& m, proof* pr) bool is_farkas_lemma(ast_manager& m, proof* pr)
{ {
if (pr->get_decl_kind() == PR_TH_LEMMA) if (pr->get_decl_kind() == PR_TH_LEMMA)
@ -126,28 +126,28 @@ proof* ProofIteratorPostOrder::next()
// open temporary dot-file // open temporary dot-file
std::string dotfile_path = "proof.dot"; std::string dotfile_path = "proof.dot";
std::ofstream dotstream(dotfile_path); std::ofstream dotstream(dotfile_path);
// dump dot representation to stream // dump dot representation to stream
pp_proof_dot_to_stream(m, dotstream, pr, iuc_pr); pp_proof_dot_to_stream(m, dotstream, pr, iuc_pr);
// post process dot-file, TODO: factor this out to a different place // post process dot-file, TODO: factor this out to a different place
pp_proof_post_process_dot(dotfile_path,dotstream); pp_proof_post_process_dot(dotfile_path,dotstream);
} }
void pp_proof_dot_to_stream(ast_manager& m, std::ofstream& dotstream, proof* pr, iuc_proof* iuc_pr) void pp_proof_dot_to_stream(ast_manager& m, std::ofstream& dotstream, proof* pr, iuc_proof* iuc_pr)
{ {
dotstream << "digraph proof { \n"; dotstream << "digraph proof { \n";
std::unordered_map<unsigned, unsigned> id_to_small_id; std::unordered_map<unsigned, unsigned> id_to_small_id;
unsigned counter = 0; unsigned counter = 0;
ProofIteratorPostOrder it2(pr, m); ProofIteratorPostOrder it2(pr, m);
while (it2.hasNext()) while (it2.hasNext())
{ {
proof* currentNode = it2.next(); proof* currentNode = it2.next();
SASSERT(id_to_small_id.find(currentNode->get_id()) == id_to_small_id.end()); SASSERT(id_to_small_id.find(currentNode->get_id()) == id_to_small_id.end());
id_to_small_id.insert(std::make_pair(currentNode->get_id(), counter)); id_to_small_id.insert(std::make_pair(currentNode->get_id(), counter));
std::string color = "white"; std::string color = "white";
if (iuc_pr != nullptr) if (iuc_pr != nullptr)
{ {
@ -169,11 +169,11 @@ proof* ProofIteratorPostOrder::next()
params_ref p; params_ref p;
p.set_uint("max_depth", 4294967295u); p.set_uint("max_depth", 4294967295u);
p.set_uint("min_alias_size", 4294967295u); p.set_uint("min_alias_size", 4294967295u);
std::ostringstream label_ostream; std::ostringstream label_ostream;
label_ostream << mk_pp(m.get_fact(currentNode),m,p) << "\n"; label_ostream << mk_pp(m.get_fact(currentNode),m,p) << "\n";
std::string label = escape_dot(label_ostream.str()); std::string label = escape_dot(label_ostream.str());
// compute edge-label // compute edge-label
std::string edge_label = ""; std::string edge_label = "";
if (m.get_num_parents(currentNode) == 0) if (m.get_num_parents(currentNode) == 0)
@ -223,7 +223,7 @@ proof* ProofIteratorPostOrder::next()
} }
} }
} }
// generate entry for node in dot-file // generate entry for node in dot-file
dotstream << "node_" << counter << " " dotstream << "node_" << counter << " "
<< "[" << "["
@ -231,7 +231,7 @@ proof* ProofIteratorPostOrder::next()
<< "label=\"" << edge_label << " " << label << "\", " << "label=\"" << edge_label << " " << label << "\", "
<< "fillcolor=\"" << color << "\"" << "fillcolor=\"" << color << "\""
<< "]\n"; << "]\n";
// add entry for each edge to that node // add entry for each edge to that node
for (unsigned i = m.get_num_parents(currentNode); i > 0 ; --i) for (unsigned i = m.get_num_parents(currentNode); i > 0 ; --i)
{ {
@ -242,12 +242,12 @@ proof* ProofIteratorPostOrder::next()
<< "node_" << counter << "node_" << counter
<< ";\n"; << ";\n";
} }
++counter; ++counter;
} }
dotstream << "\n}" << std::endl; dotstream << "\n}" << std::endl;
} }
std::string escape_dot(const std::string &s) std::string escape_dot(const std::string &s)
{ {
std::string res; std::string res;
@ -260,7 +260,7 @@ proof* ProofIteratorPostOrder::next()
} }
return res; return res;
} }
void pp_proof_post_process_dot(std::string dot_filepath, std::ofstream &dotstream) void pp_proof_post_process_dot(std::string dot_filepath, std::ofstream &dotstream)
{ {
// replace variables in the dotfiles with nicer descriptions (hack: hard coded replacement for now) // replace variables in the dotfiles with nicer descriptions (hack: hard coded replacement for now)
@ -269,7 +269,7 @@ proof* ProofIteratorPostOrder::next()
predicates.push_back(l1); predicates.push_back(l1);
std::vector<std::string> l2 = {"L2","j","m","B"}; std::vector<std::string> l2 = {"L2","j","m","B"};
predicates.push_back(l2); predicates.push_back(l2);
for(auto& predicate : predicates) for(auto& predicate : predicates)
{ {
std::string predicate_name = predicate[0]; std::string predicate_name = predicate[0];
@ -278,17 +278,17 @@ proof* ProofIteratorPostOrder::next()
std::string new_name = predicate[i+1]; std::string new_name = predicate[i+1];
std::string substring0 = predicate_name + "_" + std::to_string(i) + "_0"; std::string substring0 = predicate_name + "_" + std::to_string(i) + "_0";
std::string substringN = predicate_name + "_" + std::to_string(i) + "_n"; std::string substringN = predicate_name + "_" + std::to_string(i) + "_n";
std::string command0 = "sed -i '.bak' 's/" + substring0 + "/" + new_name + "/g' " + dot_filepath; std::string command0 = "sed -i '.bak' 's/" + substring0 + "/" + new_name + "/g' " + dot_filepath;
verbose_stream() << command0 << std::endl; verbose_stream() << command0 << std::endl;
system(command0.c_str()); system(command0.c_str());
std::string commandN = "sed -i '.bak' s/" + substringN + "/" + new_name + "\\'" + "/g " + dot_filepath; std::string commandN = "sed -i '.bak' s/" + substringN + "/" + new_name + "\\'" + "/g " + dot_filepath;
verbose_stream() << commandN << std::endl; verbose_stream() << commandN << std::endl;
system(commandN.c_str()); system(commandN.c_str());
} }
} }
verbose_stream() << "end of postprocessing"; verbose_stream() << "end of postprocessing";
} }
@ -297,20 +297,20 @@ proof* ProofIteratorPostOrder::next()
* methods for transforming proofs * methods for transforming proofs
* ==================================== * ====================================
*/ */
void theory_axiom_reducer::reset() void theory_axiom_reducer::reset()
{ {
m_cache.reset(); m_cache.reset();
m_pinned.reset(); m_pinned.reset();
} }
proof_ref theory_axiom_reducer::reduce(proof* pr) proof_ref theory_axiom_reducer::reduce(proof* pr)
{ {
ProofIteratorPostOrder pit(pr, m); ProofIteratorPostOrder pit(pr, m);
while (pit.hasNext()) while (pit.hasNext())
{ {
proof* p = pit.next(); proof* p = pit.next();
if (m.get_num_parents(p) == 0 && is_arith_lemma(m, p)) if (m.get_num_parents(p) == 0 && is_arith_lemma(m, p))
{ {
// we have an arith-theory-axiom and want to get rid of it // we have an arith-theory-axiom and want to get rid of it
@ -321,7 +321,7 @@ proof* ProofIteratorPostOrder::next()
for (unsigned i = 0, sz = cls_fact->get_num_args(); i < sz; ++i) for (unsigned i = 0, sz = cls_fact->get_num_args(); i < sz; ++i)
{ cls.push_back(cls_fact->get_arg(i)); } { cls.push_back(cls_fact->get_arg(i)); }
} else { cls.push_back(cls_fact); } } else { cls.push_back(cls_fact); }
// 1a) create hypothesis' // 1a) create hypothesis'
ptr_buffer<proof> hyps; ptr_buffer<proof> hyps;
for (unsigned i=0; i < cls.size(); ++i) for (unsigned i=0; i < cls.size(); ++i)
@ -331,7 +331,7 @@ proof* ProofIteratorPostOrder::next()
m_pinned.push_back(hyp); m_pinned.push_back(hyp);
hyps.push_back(hyp); hyps.push_back(hyp);
} }
// 1b) create farkas lemma: need to rebuild parameters since mk_th_lemma adds tid as first parameter // 1b) create farkas lemma: need to rebuild parameters since mk_th_lemma adds tid as first parameter
unsigned num_params = p->get_decl()->get_num_parameters(); unsigned num_params = p->get_decl()->get_num_parameters();
parameter const* params = p->get_decl()->get_parameters(); parameter const* params = p->get_decl()->get_parameters();
@ -339,14 +339,14 @@ proof* ProofIteratorPostOrder::next()
for (unsigned i = 1; i < num_params; ++i) { for (unsigned i = 1; i < num_params; ++i) {
parameters.push_back(params[i]); parameters.push_back(params[i]);
} }
SASSERT(params[0].is_symbol()); SASSERT(params[0].is_symbol());
family_id tid = m.mk_family_id(params[0].get_symbol()); family_id tid = m.mk_family_id(params[0].get_symbol());
SASSERT(tid != null_family_id); SASSERT(tid != null_family_id);
proof* th_lemma = m.mk_th_lemma(tid, m.mk_false(),hyps.size(), hyps.c_ptr(), num_params-1, parameters.c_ptr()); proof* th_lemma = m.mk_th_lemma(tid, m.mk_false(),hyps.size(), hyps.c_ptr(), num_params-1, parameters.c_ptr());
SASSERT(is_arith_lemma(m, th_lemma)); SASSERT(is_arith_lemma(m, th_lemma));
// 1c) create lemma // 1c) create lemma
proof* res = m.mk_lemma(th_lemma, cls_fact); proof* res = m.mk_lemma(th_lemma, cls_fact);
SASSERT(m.get_fact(res) == m.get_fact(p)); SASSERT(m.get_fact(res) == m.get_fact(p));
@ -386,15 +386,14 @@ proof* ProofIteratorPostOrder::next()
} }
} }
} }
proof* res; proof* res;
bool found = m_cache.find(pr,res); VERIFY(m_cache.find(pr,res));
SASSERT(found);
DEBUG_CODE(proof_checker pc(m); DEBUG_CODE(proof_checker pc(m);
expr_ref_vector side(m); expr_ref_vector side(m);
SASSERT(pc.check(res, side)); SASSERT(pc.check(res, side));
); );
return proof_ref(res,m); return proof_ref(res,m);
} }
@ -408,16 +407,16 @@ proof* ProofIteratorPostOrder::next()
m_pinned_parent_hyps.reset(); m_pinned_parent_hyps.reset();
m_pinned.reset(); m_pinned.reset();
} }
void hypothesis_reducer::compute_hypsets(proof* pr) void hypothesis_reducer::compute_hypsets(proof* pr)
{ {
ptr_vector<proof> todo; ptr_vector<proof> todo;
todo.push_back(pr); todo.push_back(pr);
while (!todo.empty()) while (!todo.empty())
{ {
proof* p = todo.back(); proof* p = todo.back();
if (m_active_hyps.contains(p)) if (m_active_hyps.contains(p))
{ {
SASSERT(m_parent_hyps.contains(p)); SASSERT(m_parent_hyps.contains(p));
@ -427,13 +426,13 @@ proof* ProofIteratorPostOrder::next()
else else
{ {
bool existsUnvisitedParent = false; bool existsUnvisitedParent = false;
// add unprocessed premises to stack for DFS. If there is at least one unprocessed premise, don't compute the result // add unprocessed premises to stack for DFS. If there is at least one unprocessed premise, don't compute the result
// for p now, but wait until those unprocessed premises are processed. // for p now, but wait until those unprocessed premises are processed.
for (unsigned i = 0; i < m.get_num_parents(p); ++i) { for (unsigned i = 0; i < m.get_num_parents(p); ++i) {
SASSERT(m.is_proof(p->get_arg(i))); SASSERT(m.is_proof(p->get_arg(i)));
proof* premise = to_app(p->get_arg(i)); proof* premise = to_app(p->get_arg(i));
// if we haven't visited the premise yet // if we haven't visited the premise yet
if (!m_active_hyps.contains(premise)) if (!m_active_hyps.contains(premise))
{ {
@ -443,7 +442,7 @@ proof* ProofIteratorPostOrder::next()
existsUnvisitedParent = true; existsUnvisitedParent = true;
} }
} }
// if we already visited all premises, we can visit p too // if we already visited all premises, we can visit p too
if (!existsUnvisitedParent) if (!existsUnvisitedParent)
{ {
@ -453,11 +452,11 @@ proof* ProofIteratorPostOrder::next()
proof_set* active_hyps = alloc(proof_set); proof_set* active_hyps = alloc(proof_set);
m_pinned_active_hyps.insert(active_hyps); m_pinned_active_hyps.insert(active_hyps);
m_active_hyps.insert(p, active_hyps); m_active_hyps.insert(p, active_hyps);
expr_set* parent_hyps = alloc(expr_set); expr_set* parent_hyps = alloc(expr_set);
m_pinned_parent_hyps.insert(parent_hyps); m_pinned_parent_hyps.insert(parent_hyps);
m_parent_hyps.insert(p, parent_hyps); m_parent_hyps.insert(p, parent_hyps);
// fill both sets // fill both sets
if (m.is_hypothesis(p)) if (m.is_hypothesis(p))
{ {
@ -481,7 +480,7 @@ proof* ProofIteratorPostOrder::next()
} }
} }
} }
// collect all units that are hyp-free and are used as hypotheses somewhere // collect all units that are hyp-free and are used as hypotheses somewhere
// requires that m_active_hyps and m_parent_hyps have been computed // requires that m_active_hyps and m_parent_hyps have been computed
void hypothesis_reducer::collect_units(proof* pr) void hypothesis_reducer::collect_units(proof* pr)
@ -510,12 +509,12 @@ proof* ProofIteratorPostOrder::next()
{ {
compute_hypsets(pr); compute_hypsets(pr);
collect_units(pr); collect_units(pr);
proof* res = compute_transformed_proof(pr); proof* res = compute_transformed_proof(pr);
SASSERT(res != nullptr); SASSERT(res != nullptr);
proof_ref res_ref(res,m); proof_ref res_ref(res,m);
reset(); reset();
DEBUG_CODE(proof_checker pc(m); DEBUG_CODE(proof_checker pc(m);
expr_ref_vector side(m); expr_ref_vector side(m);
@ -523,7 +522,7 @@ proof* ProofIteratorPostOrder::next()
); );
return res_ref; return res_ref;
} }
proof* hypothesis_reducer::compute_transformed_proof(proof* pf) proof* hypothesis_reducer::compute_transformed_proof(proof* pf)
{ {
proof *res = NULL; proof *res = NULL;
@ -559,7 +558,7 @@ proof* ProofIteratorPostOrder::next()
if (todo_sz < todo.size()) { continue; } if (todo_sz < todo.size()) { continue; }
else { todo.pop_back(); } else { todo.pop_back(); }
// here the proof transformation begins // here the proof transformation begins
// INV: whenever we visit p, active_hyps and parent_hyps have been computed for the args. // INV: whenever we visit p, active_hyps and parent_hyps have been computed for the args.
if (m.is_hypothesis(p)) if (m.is_hypothesis(p))
@ -578,7 +577,7 @@ proof* ProofIteratorPostOrder::next()
// compute hypsets (have not been computed in general, since the unit can be anywhere in the proof) // compute hypsets (have not been computed in general, since the unit can be anywhere in the proof)
compute_hypsets(proof_of_unit); compute_hypsets(proof_of_unit);
// if the transformation doesn't create a cycle, perform it // if the transformation doesn't create a cycle, perform it
SASSERT(m_parent_hyps.contains(proof_of_unit)); SASSERT(m_parent_hyps.contains(proof_of_unit));
expr_set* parent_hyps = m_parent_hyps.find(proof_of_unit); expr_set* parent_hyps = m_parent_hyps.find(proof_of_unit);
@ -622,8 +621,8 @@ proof* ProofIteratorPostOrder::next()
} }
SASSERT(res); SASSERT(res);
m_cache.insert(p, res); m_cache.insert(p, res);
SASSERT(m_active_hyps.contains(res)); SASSERT(m_active_hyps.contains(res));
proof_set* active_hyps = m_active_hyps.find(res); proof_set* active_hyps = m_active_hyps.find(res);
if (active_hyps->empty() && m.has_fact(res) && m.is_false(m.get_fact(res))) if (active_hyps->empty() && m.has_fact(res) && m.is_false(m.get_fact(res)))
@ -634,11 +633,11 @@ proof* ProofIteratorPostOrder::next()
UNREACHABLE(); UNREACHABLE();
return nullptr; return nullptr;
} }
proof* hypothesis_reducer::mk_lemma_core(proof* premise, expr *fact) proof* hypothesis_reducer::mk_lemma_core(proof* premise, expr *fact)
{ {
SASSERT(m.is_false(m.get_fact(premise))); SASSERT(m.is_false(m.get_fact(premise)));
SASSERT(m_active_hyps.contains(premise)); SASSERT(m_active_hyps.contains(premise));
proof_set* active_hyps = m_active_hyps.find(premise); proof_set* active_hyps = m_active_hyps.find(premise);
@ -658,7 +657,7 @@ proof* ProofIteratorPostOrder::next()
negated_hyp_fact = m.is_not(hyp_fact) ? to_app(hyp_fact)->get_arg(0) : m.mk_not(hyp_fact); negated_hyp_fact = m.is_not(hyp_fact) ? to_app(hyp_fact)->get_arg(0) : m.mk_not(hyp_fact);
args.push_back(negated_hyp_fact); args.push_back(negated_hyp_fact);
} }
expr_ref lemma(m); expr_ref lemma(m);
if (args.size() == 1) if (args.size() == 1)
{ {
@ -731,7 +730,7 @@ proof* ProofIteratorPostOrder::next()
return res; return res;
} }
} }
proof* hypothesis_reducer::mk_step_core(proof* old_step, ptr_buffer<proof>& args) proof* hypothesis_reducer::mk_step_core(proof* old_step, ptr_buffer<proof>& args)
{ {
// if any of the literals is false, we don't need a step // if any of the literals is false, we don't need a step
@ -742,10 +741,10 @@ proof* ProofIteratorPostOrder::next()
return args[i]; return args[i];
} }
} }
// otherwise build step // otherwise build step
args.push_back(to_app(m.get_fact(old_step))); // BUG: I guess this doesn't work with quantifiers (since they are no apps) args.push_back(to_app(m.get_fact(old_step))); // BUG: I guess this doesn't work with quantifiers (since they are no apps)
SASSERT(old_step->get_decl()->get_arity() == args.size()); SASSERT(old_step->get_decl()->get_arity() == args.size());
proof* res = m.mk_app(old_step->get_decl(), args.size(), (expr * const*)args.c_ptr()); proof* res = m.mk_app(old_step->get_decl(), args.size(), (expr * const*)args.c_ptr());
m_pinned.push_back(res); m_pinned.push_back(res);

View file

@ -60,8 +60,14 @@ prop_solver::prop_solver(manager& pm, fixedpoint_params const& p, symbol const&
m_solvers[1] = pm.mk_fresh2(); m_solvers[1] = pm.mk_fresh2();
m_fparams[1] = &pm.fparams2(); m_fparams[1] = &pm.fparams2();
m_contexts[0] = alloc(spacer::itp_solver, *(m_solvers[0]), p.spacer_iuc(), p.spacer_iuc_arith(), p.spacer_print_farkas_stats(), p.spacer_iuc_debug_proof(), p.spacer_old_hyp_reducer(), p.spacer_split_farkas_literals()); m_contexts[0] = alloc(spacer::itp_solver, *(m_solvers[0]), p.spacer_iuc(),
m_contexts[1] = alloc(spacer::itp_solver, *(m_solvers[1]), p.spacer_iuc(), p.spacer_iuc_arith(), p.spacer_print_farkas_stats(), p.spacer_iuc_debug_proof(), p.spacer_old_hyp_reducer(), p.spacer_split_farkas_literals()); p.spacer_iuc_arith(),
p.spacer_old_hyp_reducer(),
p.spacer_split_farkas_literals());
m_contexts[1] = alloc(spacer::itp_solver, *(m_solvers[1]), p.spacer_iuc(),
p.spacer_iuc_arith(),
p.spacer_old_hyp_reducer(),
p.spacer_split_farkas_literals());
for (unsigned i = 0; i < 2; ++i) for (unsigned i = 0; i < 2; ++i)
{ m_contexts[i]->assert_expr(m_pm.get_background()); } { m_contexts[i]->assert_expr(m_pm.get_background()); }

View file

@ -32,7 +32,8 @@ namespace spacer {
typedef obj_hashtable<expr> expr_set; typedef obj_hashtable<expr> expr_set;
public: public:
unsat_core_learner(ast_manager& m, iuc_proof& pr, bool print_farkas_stats = false, bool iuc_debug_proof = false) : m(m), m_pr(pr), m_unsat_core(m), m_print_farkas_stats(print_farkas_stats), m_iuc_debug_proof(iuc_debug_proof) {}; unsat_core_learner(ast_manager& m, iuc_proof& pr) :
m(m), m_pr(pr), m_unsat_core(m) {};
virtual ~unsat_core_learner(); virtual ~unsat_core_learner();
ast_manager& m; ast_manager& m;
@ -60,7 +61,7 @@ namespace spacer {
void set_closed(proof* p, bool value); void set_closed(proof* p, bool value);
bool is_b_open (proof *p); bool is_b_open (proof *p);
/* /*
* adds a lemma to the unsat core * adds a lemma to the unsat core
*/ */
@ -72,7 +73,9 @@ namespace spacer {
ptr_vector<unsat_core_plugin> m_plugins; ptr_vector<unsat_core_plugin> m_plugins;
ast_mark m_closed; ast_mark m_closed;
expr_ref_vector m_unsat_core; // collects the lemmas of the unsat-core, will at the end be inserted into unsat_core. // collects the lemmas of the unsat-core
// will at the end be inserted into unsat_core.
expr_ref_vector m_unsat_core;
/* /*
* computes partial core for step by delegating computation to plugins * computes partial core for step by delegating computation to plugins
@ -83,9 +86,7 @@ namespace spacer {
* finalize computation of unsat-core * finalize computation of unsat-core
*/ */
void finalize(); void finalize();
bool m_print_farkas_stats;
bool m_iuc_debug_proof;
}; };
} }