mirror of
https://github.com/Z3Prover/z3
synced 2025-04-10 19:27:06 +00:00
speeding up Generalize and adding Lazy Propagation
This commit is contained in:
parent
48e10a9e2d
commit
c98b853917
|
@ -98,7 +98,7 @@ namespace Duality {
|
|||
bool IsClosedFormula(const Term &t);
|
||||
|
||||
Term AdjustQuantifiers(const Term &t);
|
||||
private:
|
||||
protected:
|
||||
|
||||
void SummarizeRec(hash_set<ast> &memo, std::vector<expr> &lits, int &ops, const Term &t);
|
||||
int CountOperatorsRec(hash_set<ast> &memo, const Term &t);
|
||||
|
@ -309,7 +309,7 @@ private:
|
|||
|
||||
LogicSolver *ls;
|
||||
|
||||
private:
|
||||
protected:
|
||||
int nodeCount;
|
||||
int edgeCount;
|
||||
|
||||
|
@ -324,7 +324,7 @@ private:
|
|||
|
||||
public:
|
||||
model dualModel;
|
||||
private:
|
||||
protected:
|
||||
literals dualLabels;
|
||||
std::list<stack_entry> stack;
|
||||
std::vector<Term> axioms; // only saved here for printing purposes
|
||||
|
@ -829,7 +829,7 @@ private:
|
|||
*/
|
||||
void ComputeProofCore();
|
||||
|
||||
private:
|
||||
protected:
|
||||
|
||||
void ClearProofCore(){
|
||||
if(proof_core)
|
||||
|
@ -947,6 +947,8 @@ private:
|
|||
|
||||
expr SimplifyOr(std::vector<expr> &lits);
|
||||
|
||||
expr SimplifyAnd(std::vector<expr> &lits);
|
||||
|
||||
void SetAnnotation(Node *root, const expr &t);
|
||||
|
||||
void AddEdgeToSolver(Edge *edge);
|
||||
|
@ -959,9 +961,11 @@ private:
|
|||
|
||||
expr NegateLit(const expr &f);
|
||||
|
||||
expr GetEdgeFormula(Edge *e, int persist, bool with_children, bool underapprox);
|
||||
};
|
||||
|
||||
/** RPFP solver base class. */
|
||||
|
||||
/** RPFP solver base class. */
|
||||
|
||||
class Solver {
|
||||
|
||||
|
@ -1044,3 +1048,54 @@ namespace std {
|
|||
}
|
||||
};
|
||||
}
|
||||
|
||||
namespace Duality {
|
||||
/** Caching version of RPFP. Instead of asserting constraints, returns assumption literals */
|
||||
|
||||
class RPFP_caching : public RPFP {
|
||||
public:
|
||||
|
||||
/** appends assumption literals for edge to lits. if with_children is true,
|
||||
includes that annotation of the edge's children.
|
||||
*/
|
||||
void AssertEdgeCache(Edge *e, std::vector<Term> &lits, bool with_children = false);
|
||||
|
||||
/** appends assumption literals for node to lits */
|
||||
void AssertNodeCache(Node *, std::vector<Term> lits);
|
||||
|
||||
/** check assumption lits, and return core */
|
||||
check_result CheckCore(const std::vector<Term> &assumps, std::vector<Term> &core);
|
||||
|
||||
/** Clone another RPFP into this one, keeping a map */
|
||||
void Clone(RPFP *other);
|
||||
|
||||
/** Get the clone of a node */
|
||||
Node *GetNodeClone(Node *other_node);
|
||||
|
||||
/** Get the clone of an edge */
|
||||
Edge *GetEdgeClone(Edge *other_edge);
|
||||
|
||||
/** Try to strengthen the parent of an edge */
|
||||
void GeneralizeCache(Edge *edge);
|
||||
|
||||
/** Try to propagate some facts from children to parents of edge.
|
||||
Return true if success. */
|
||||
bool PropagateCache(Edge *edge);
|
||||
|
||||
/** Construct a caching RPFP using a LogicSolver */
|
||||
RPFP_caching(LogicSolver *_ls) : RPFP(_ls) {}
|
||||
|
||||
protected:
|
||||
hash_map<ast,expr> AssumptionLits;
|
||||
hash_map<Node *, Node *> NodeCloneMap;
|
||||
hash_map<Edge *, Edge *> EdgeCloneMap;
|
||||
|
||||
void GetAssumptionLits(const expr &fmla, std::vector<expr> &lits, hash_map<ast,expr> *opt_map = 0);
|
||||
|
||||
void GreedyReduceCache(std::vector<expr> &assumps, std::vector<expr> &core);
|
||||
|
||||
void FilterCore(std::vector<expr> &core, std::vector<expr> &full_core);
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -26,6 +26,7 @@ Revision History:
|
|||
#include <stdlib.h>
|
||||
|
||||
#include "duality_wrapper.h"
|
||||
#include "iz3profiling.h"
|
||||
|
||||
namespace Duality {
|
||||
|
||||
|
@ -103,6 +104,7 @@ namespace Duality {
|
|||
output_time(*pfs, it->second.t);
|
||||
(*pfs) << std::endl;
|
||||
}
|
||||
profiling::print(os); // print the interpolation stats
|
||||
}
|
||||
|
||||
void timer_start(const char *name){
|
||||
|
|
|
@ -519,6 +519,20 @@ namespace Duality {
|
|||
const expr &lit = args[i];
|
||||
expr atom, val;
|
||||
if(IsLiteral(lit,atom,val)){
|
||||
if(atom.is_app() && atom.decl().get_decl_kind() == Equal)
|
||||
if(pol ? eq(val,ctx.bool_val(true)) : eq(val,ctx.bool_val(false))){
|
||||
expr lhs = atom.arg(0), rhs = atom.arg(1);
|
||||
if(lhs.is_numeral())
|
||||
std::swap(lhs,rhs);
|
||||
if(rhs.is_numeral() && lhs.is_app()){
|
||||
for(unsigned j = 0; j < args.size(); j++)
|
||||
if(j != i){
|
||||
smemo.clear();
|
||||
smemo[lhs] = rhs;
|
||||
args[j] = SubstRec(smemo,args[j]);
|
||||
}
|
||||
}
|
||||
}
|
||||
for(unsigned j = 0; j < args.size(); j++)
|
||||
if(j != i){
|
||||
smemo.clear();
|
||||
|
@ -711,6 +725,39 @@ namespace Duality {
|
|||
#endif
|
||||
|
||||
|
||||
expr RPFP::GetEdgeFormula(Edge *e, int persist, bool with_children, bool underapprox)
|
||||
{
|
||||
if (e->dual.null()) {
|
||||
timer_start("ReducedDualEdge");
|
||||
e->dual = ReducedDualEdge(e);
|
||||
timer_stop("ReducedDualEdge");
|
||||
timer_start("getting children");
|
||||
if(with_children)
|
||||
for(unsigned i = 0; i < e->Children.size(); i++)
|
||||
e->dual = e->dual && GetAnnotation(e->Children[i]);
|
||||
if(underapprox){
|
||||
std::vector<expr> cus(e->Children.size());
|
||||
for(unsigned i = 0; i < e->Children.size(); i++)
|
||||
cus[i] = !UnderapproxFlag(e->Children[i]) || GetUnderapprox(e->Children[i]);
|
||||
expr cnst = conjoin(cus);
|
||||
e->dual = e->dual && cnst;
|
||||
}
|
||||
timer_stop("getting children");
|
||||
timer_start("Persisting");
|
||||
std::list<stack_entry>::reverse_iterator it = stack.rbegin();
|
||||
for(int i = 0; i < persist && it != stack.rend(); i++)
|
||||
it++;
|
||||
if(it != stack.rend())
|
||||
it->edges.push_back(e);
|
||||
timer_stop("Persisting");
|
||||
//Console.WriteLine("{0}", cnst);
|
||||
}
|
||||
return e->dual;
|
||||
timer_start("solver add");
|
||||
slvr.add(e->dual);
|
||||
timer_stop("solver add");
|
||||
}
|
||||
|
||||
/** For incremental solving, asserts the constraint associated
|
||||
* with this edge in the SMT context. If this edge is removed,
|
||||
* you must pop the context accordingly. The second argument is
|
||||
|
@ -732,41 +779,40 @@ namespace Duality {
|
|||
{
|
||||
if(eq(e->F.Formula,ctx.bool_val(true)) && (!with_children || e->Children.empty()))
|
||||
return;
|
||||
if (e->dual.null())
|
||||
{
|
||||
timer_start("ReducedDualEdge");
|
||||
e->dual = ReducedDualEdge(e);
|
||||
timer_stop("ReducedDualEdge");
|
||||
timer_start("getting children");
|
||||
if(with_children)
|
||||
for(unsigned i = 0; i < e->Children.size(); i++)
|
||||
e->dual = e->dual && GetAnnotation(e->Children[i]);
|
||||
if(underapprox){
|
||||
std::vector<expr> cus(e->Children.size());
|
||||
for(unsigned i = 0; i < e->Children.size(); i++)
|
||||
cus[i] = !UnderapproxFlag(e->Children[i]) || GetUnderapprox(e->Children[i]);
|
||||
expr cnst = conjoin(cus);
|
||||
e->dual = e->dual && cnst;
|
||||
}
|
||||
timer_stop("getting children");
|
||||
timer_start("Persisting");
|
||||
std::list<stack_entry>::reverse_iterator it = stack.rbegin();
|
||||
for(int i = 0; i < persist && it != stack.rend(); i++)
|
||||
it++;
|
||||
if(it != stack.rend())
|
||||
it->edges.push_back(e);
|
||||
#if 0
|
||||
if(persist != 0)
|
||||
Z3_persist_ast(ctx,e->dual,persist);
|
||||
#endif
|
||||
timer_stop("Persisting");
|
||||
//Console.WriteLine("{0}", cnst);
|
||||
}
|
||||
expr fmla = GetEdgeFormula(e, persist, with_children, underapprox);
|
||||
timer_start("solver add");
|
||||
slvr.add(e->dual);
|
||||
timer_stop("solver add");
|
||||
}
|
||||
|
||||
// caching verion of above
|
||||
void RPFP_caching::AssertEdgeCache(Edge *e, std::vector<Term> &lits, bool with_children){
|
||||
if(eq(e->F.Formula,ctx.bool_val(true)) && (!with_children || e->Children.empty()))
|
||||
return;
|
||||
expr fmla = GetEdgeFormula(e, 0, with_children, false);
|
||||
GetAssumptionLits(fmla,lits);
|
||||
}
|
||||
|
||||
void RPFP_caching::GetAssumptionLits(const expr &fmla, std::vector<expr> &lits, hash_map<ast,expr> *opt_map){
|
||||
std::vector<expr> conjs;
|
||||
CollectConjuncts(fmla,conjs);
|
||||
for(unsigned i = 0; i < conjs.size(); i++){
|
||||
const expr &conj = conjs[i];
|
||||
std::pair<ast,Term> foo(conj,expr(ctx));
|
||||
std::pair<hash_map<ast,Term>::iterator, bool> bar = AssumptionLits.insert(foo);
|
||||
Term &res = bar.first->second;
|
||||
if(bar.second){
|
||||
func_decl pred = ctx.fresh_func_decl("@alit", ctx.bool_sort());
|
||||
res = pred();
|
||||
slvr.add(ctx.make(Implies,res,conj));
|
||||
// std::cout << res << ": " << conj << "\n";
|
||||
}
|
||||
if(opt_map)
|
||||
(*opt_map)[res] = conj;
|
||||
lits.push_back(res);
|
||||
}
|
||||
}
|
||||
|
||||
void RPFP::ConstrainParent(Edge *parent, Node *child){
|
||||
ConstrainEdgeLocalized(parent,GetAnnotation(child));
|
||||
}
|
||||
|
@ -786,6 +832,53 @@ namespace Duality {
|
|||
}
|
||||
}
|
||||
|
||||
// caching version of above
|
||||
void RPFP_caching::AssertNodeCache(Node *n, std::vector<Term> lits){
|
||||
if (n->dual.null())
|
||||
{
|
||||
n->dual = GetUpperBound(n);
|
||||
stack.back().nodes.push_back(n);
|
||||
GetAssumptionLits(n->dual,lits);
|
||||
}
|
||||
}
|
||||
|
||||
/** Clone another RPFP into this one, keeping a map */
|
||||
void RPFP_caching::Clone(RPFP *other){
|
||||
for(unsigned i = 0; i < other->nodes.size(); i++)
|
||||
NodeCloneMap[other->nodes[i]] = CloneNode(other->nodes[i]);
|
||||
for(unsigned i = 0; i < other->edges.size(); i++){
|
||||
Edge *edge = other->edges[i];
|
||||
std::vector<Node *> cs;
|
||||
for(unsigned j = 0; j < edge->Children.size(); j++)
|
||||
// cs.push_back(NodeCloneMap[edge->Children[j]]);
|
||||
cs.push_back(CloneNode(edge->Children[j]));
|
||||
EdgeCloneMap[edge] = CreateEdge(NodeCloneMap[edge->Parent],edge->F,cs);
|
||||
}
|
||||
}
|
||||
|
||||
/** Get the clone of a node */
|
||||
RPFP::Node *RPFP_caching::GetNodeClone(Node *other_node){
|
||||
return NodeCloneMap[other_node];
|
||||
}
|
||||
|
||||
/** Get the clone of an edge */
|
||||
RPFP::Edge *RPFP_caching::GetEdgeClone(Edge *other_edge){
|
||||
return EdgeCloneMap[other_edge];
|
||||
}
|
||||
|
||||
/** check assumption lits, and return core */
|
||||
check_result RPFP_caching::CheckCore(const std::vector<Term> &assumps, std::vector<Term> &core){
|
||||
core.resize(assumps.size());
|
||||
unsigned core_size;
|
||||
check_result res = slvr.check(assumps.size(),(expr *)&assumps[0],&core_size,&core[0]);
|
||||
if(res == unsat)
|
||||
core.resize(core_size);
|
||||
else
|
||||
core.clear();
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
/** Assert a constraint on an edge in the SMT context.
|
||||
*/
|
||||
|
||||
|
@ -970,6 +1063,7 @@ namespace Duality {
|
|||
|
||||
check_result RPFP::Check(Node *root, std::vector<Node *> underapproxes, std::vector<Node *> *underapprox_core )
|
||||
{
|
||||
timer_start("Check");
|
||||
ClearProofCore();
|
||||
// if (dualModel != null) dualModel.Dispose();
|
||||
check_result res;
|
||||
|
@ -1002,6 +1096,7 @@ namespace Duality {
|
|||
// check_result temp = slvr.check();
|
||||
}
|
||||
dualModel = slvr.get_model();
|
||||
timer_stop("Check");
|
||||
return res;
|
||||
}
|
||||
|
||||
|
@ -1927,10 +2022,14 @@ namespace Duality {
|
|||
for(int i = 0; i < num_args; i++)
|
||||
CollectConjuncts(f.arg(i),lits,false);
|
||||
}
|
||||
else if(pos)
|
||||
lits.push_back(f);
|
||||
else
|
||||
lits.push_back(!f);
|
||||
else if(pos){
|
||||
if(!eq(f,ctx.bool_val(true)))
|
||||
lits.push_back(f);
|
||||
}
|
||||
else {
|
||||
if(!eq(f,ctx.bool_val(false)))
|
||||
lits.push_back(!f);
|
||||
}
|
||||
}
|
||||
|
||||
struct TermLt {
|
||||
|
@ -2253,6 +2352,45 @@ namespace Duality {
|
|||
}
|
||||
}
|
||||
|
||||
void RPFP_caching::FilterCore(std::vector<expr> &core, std::vector<expr> &full_core){
|
||||
hash_set<ast> core_set;
|
||||
std::copy(full_core.begin(),full_core.end(),std::inserter(core_set,core_set.begin()));
|
||||
std::vector<expr> new_core;
|
||||
for(unsigned i = 0; i < core.size(); i++)
|
||||
if(core_set.find(core[i]) != core_set.end())
|
||||
new_core.push_back(core[i]);
|
||||
core.swap(new_core);
|
||||
}
|
||||
|
||||
void RPFP_caching::GreedyReduceCache(std::vector<expr> &assumps, std::vector<expr> &core){
|
||||
std::vector<expr> lits = assumps, full_core;
|
||||
std::copy(core.begin(),core.end(),std::inserter(lits,lits.end()));
|
||||
|
||||
// verify
|
||||
check_result res = CheckCore(lits,full_core);
|
||||
if(res != unsat)
|
||||
throw "should be unsat";
|
||||
FilterCore(core,full_core);
|
||||
|
||||
std::vector<expr> dummy;
|
||||
if(CheckCore(full_core,dummy) != unsat)
|
||||
throw "should be unsat";
|
||||
|
||||
for(unsigned i = 0; i < core.size(); ){
|
||||
expr temp = core[i];
|
||||
std::swap(core[i],core.back());
|
||||
core.pop_back();
|
||||
lits.resize(assumps.size());
|
||||
std::copy(core.begin(),core.end(),std::inserter(lits,lits.end()));
|
||||
res = CheckCore(lits,full_core);
|
||||
if(res != unsat){
|
||||
core.push_back(temp);
|
||||
std::swap(core[i],core.back());
|
||||
i++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expr RPFP::NegateLit(const expr &f){
|
||||
if(f.is_app() && f.decl().get_decl_kind() == Not)
|
||||
return f.arg(0);
|
||||
|
@ -2278,6 +2416,14 @@ namespace Duality {
|
|||
return ctx.make(Or,lits);
|
||||
}
|
||||
|
||||
expr RPFP::SimplifyAnd(std::vector<expr> &lits){
|
||||
if(lits.size() == 0)
|
||||
return ctx.bool_val(true);
|
||||
if(lits.size() == 1)
|
||||
return lits[0];
|
||||
return ctx.make(And,lits);
|
||||
}
|
||||
|
||||
// set up edge constraint in aux solver
|
||||
void RPFP::AddEdgeToSolver(Edge *edge){
|
||||
if(!edge->dual.null())
|
||||
|
@ -2321,7 +2467,7 @@ namespace Duality {
|
|||
std::vector<expr> case_lits;
|
||||
itp = StrengthenFormulaByCaseSplitting(itp, case_lits);
|
||||
SetAnnotation(node,itp);
|
||||
node->Annotation.Formula.simplify();
|
||||
node->Annotation.Formula = node->Annotation.Formula.simplify();
|
||||
}
|
||||
|
||||
if(node->Annotation.IsEmpty()){
|
||||
|
@ -2330,6 +2476,7 @@ namespace Duality {
|
|||
const std::vector<expr> &theory = ls->get_axioms();
|
||||
for(unsigned i = 0; i < theory.size(); i++)
|
||||
aux_solver.add(theory[i]);
|
||||
axioms_added = true;
|
||||
}
|
||||
else {
|
||||
std::cout << "bad in InterpolateByCase -- core:\n";
|
||||
|
@ -2350,6 +2497,7 @@ namespace Duality {
|
|||
}
|
||||
|
||||
void RPFP::Generalize(Node *root, Node *node){
|
||||
timer_start("Generalize");
|
||||
aux_solver.push();
|
||||
AddEdgeToSolver(node->Outgoing);
|
||||
expr fmla = GetAnnotation(node);
|
||||
|
@ -2359,8 +2507,103 @@ namespace Duality {
|
|||
aux_solver.pop(1);
|
||||
NegateLits(conjuncts);
|
||||
SetAnnotation(node,SimplifyOr(conjuncts));
|
||||
timer_stop("Generalize");
|
||||
}
|
||||
|
||||
|
||||
// caching version of above
|
||||
void RPFP_caching::GeneralizeCache(Edge *edge){
|
||||
timer_start("Generalize");
|
||||
Node *node = edge->Parent;
|
||||
std::vector<expr> assumps, core, conjuncts;
|
||||
AssertEdgeCache(edge,assumps);
|
||||
for(unsigned i = 0; i < edge->Children.size(); i++){
|
||||
expr ass = GetAnnotation(edge->Children[i]);
|
||||
std::vector<expr> clauses;
|
||||
CollectConjuncts(ass.arg(1),clauses);
|
||||
for(unsigned j = 0; j < clauses.size(); j++)
|
||||
GetAssumptionLits(ass.arg(0) || clauses[j],assumps);
|
||||
}
|
||||
expr fmla = GetAnnotation(node);
|
||||
assumps.push_back(fmla.arg(0).arg(0));
|
||||
std::vector<expr> lits;
|
||||
CollectConjuncts(!fmla.arg(1),lits);
|
||||
#if 0
|
||||
for(unsigned i = 0; i < lits.size(); i++){
|
||||
const expr &lit = lits[i];
|
||||
if(lit.is_app() && lit.decl().get_decl_kind() == Equal){
|
||||
lits[i] = ctx.make(Leq,lit.arg(0),lit.arg(1));
|
||||
lits.push_back(ctx.make(Leq,lit.arg(1),lit.arg(0)));
|
||||
}
|
||||
}
|
||||
#endif
|
||||
hash_map<ast,expr> lit_map;
|
||||
for(unsigned i = 0; i < lits.size(); i++)
|
||||
GetAssumptionLits(lits[i],core,&lit_map);
|
||||
GreedyReduceCache(assumps,core);
|
||||
for(unsigned i = 0; i < core.size(); i++)
|
||||
conjuncts.push_back(lit_map[core[i]]);
|
||||
NegateLits(conjuncts);
|
||||
SetAnnotation(node,SimplifyOr(conjuncts));
|
||||
timer_stop("Generalize");
|
||||
}
|
||||
|
||||
// caching version of above
|
||||
bool RPFP_caching::PropagateCache(Edge *edge){
|
||||
timer_start("PropagateCache");
|
||||
bool some = false;
|
||||
{
|
||||
std::vector<expr> candidates, skip;
|
||||
Node *node = edge->Parent;
|
||||
CollectConjuncts(node->Annotation.Formula,skip);
|
||||
for(unsigned i = 0; i < edge->Children.size(); i++){
|
||||
Node *child = edge->Children[i];
|
||||
if(child->map == node->map){
|
||||
CollectConjuncts(child->Annotation.Formula,candidates);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if(candidates.empty()) goto done;
|
||||
hash_set<ast> skip_set;
|
||||
std::copy(skip.begin(),skip.end(),std::inserter(skip_set,skip_set.begin()));
|
||||
std::vector<expr> new_candidates;
|
||||
for(unsigned i = 0; i < candidates.size(); i++)
|
||||
if(skip_set.find(candidates[i]) == skip_set.end())
|
||||
new_candidates.push_back(candidates[i]);
|
||||
candidates.swap(new_candidates);
|
||||
if(candidates.empty()) goto done;
|
||||
std::vector<expr> assumps, core, conjuncts;
|
||||
AssertEdgeCache(edge,assumps);
|
||||
for(unsigned i = 0; i < edge->Children.size(); i++){
|
||||
expr ass = GetAnnotation(edge->Children[i]);
|
||||
std::vector<expr> clauses;
|
||||
CollectConjuncts(ass.arg(1),clauses);
|
||||
for(unsigned j = 0; j < clauses.size(); j++)
|
||||
GetAssumptionLits(ass.arg(0) || clauses[j],assumps);
|
||||
}
|
||||
for(unsigned i = 0; i < candidates.size(); i++){
|
||||
unsigned old_size = assumps.size();
|
||||
node->Annotation.Formula = candidates[i];
|
||||
expr fmla = GetAnnotation(node);
|
||||
assumps.push_back(fmla.arg(0).arg(0));
|
||||
GetAssumptionLits(!fmla.arg(1),assumps);
|
||||
std::vector<expr> full_core;
|
||||
check_result res = CheckCore(assumps,full_core);
|
||||
if(res == unsat)
|
||||
conjuncts.push_back(candidates[i]);
|
||||
assumps.resize(old_size);
|
||||
}
|
||||
if(conjuncts.empty())
|
||||
goto done;
|
||||
SetAnnotation(node,SimplifyAnd(conjuncts));
|
||||
some = true;
|
||||
}
|
||||
done:
|
||||
timer_stop("PropagateCache");
|
||||
return some;
|
||||
}
|
||||
|
||||
|
||||
/** Push a scope. Assertions made after Push can be undone by Pop. */
|
||||
|
||||
void RPFP::Push()
|
||||
|
|
|
@ -44,7 +44,7 @@ Revision History:
|
|||
// #define TOP_DOWN
|
||||
// #define EFFORT_BOUNDED_STRAT
|
||||
#define SKIP_UNDERAPPROX_NODES
|
||||
|
||||
#define USE_RPFP_CLONE
|
||||
|
||||
namespace Duality {
|
||||
|
||||
|
@ -115,8 +115,25 @@ namespace Duality {
|
|||
Report = false;
|
||||
StratifiedInlining = false;
|
||||
RecursionBound = -1;
|
||||
#ifdef USE_RPFP_CLONE
|
||||
clone_ls = new RPFP::iZ3LogicSolver(ctx);
|
||||
clone_rpfp = new RPFP_caching(clone_ls);
|
||||
clone_rpfp->Clone(rpfp);
|
||||
#endif
|
||||
}
|
||||
|
||||
~Duality(){
|
||||
#ifdef USE_RPFP_CLONE
|
||||
delete clone_rpfp;
|
||||
delete clone_ls;
|
||||
#endif
|
||||
}
|
||||
|
||||
#ifdef USE_RPFP_CLONE
|
||||
RPFP::LogicSolver *clone_ls;
|
||||
RPFP_caching *clone_rpfp;
|
||||
#endif
|
||||
|
||||
typedef RPFP::Node Node;
|
||||
typedef RPFP::Edge Edge;
|
||||
|
||||
|
@ -1309,6 +1326,7 @@ namespace Duality {
|
|||
node. */
|
||||
bool SatisfyUpperBound(Node *node){
|
||||
if(node->Bound.IsFull()) return true;
|
||||
Propagate();
|
||||
reporter->Bound(node);
|
||||
int start_decs = rpfp->CumulativeDecisions();
|
||||
DerivationTree *dtp = new DerivationTreeSlow(this,unwinding,reporter,heuristic,FullExpand);
|
||||
|
@ -1412,6 +1430,70 @@ namespace Duality {
|
|||
}
|
||||
}
|
||||
|
||||
// Propagate conjuncts up the unwinding
|
||||
void Propagate(){
|
||||
reporter->Message("beginning propagation");
|
||||
timer_start("Propagate");
|
||||
std::vector<Node *> sorted_nodes = unwinding->nodes;
|
||||
std::sort(sorted_nodes.begin(),sorted_nodes.end(),std::less<Node *>()); // sorts by sequence number
|
||||
hash_map<Node *,std::set<expr> > facts;
|
||||
for(unsigned i = 0; i < sorted_nodes.size(); i++){
|
||||
Node *node = sorted_nodes[i];
|
||||
std::set<expr> &node_facts = facts[node->map];
|
||||
if(!(node->Outgoing && indset->Contains(node)))
|
||||
continue;
|
||||
std::vector<expr> conj_vec;
|
||||
unwinding->CollectConjuncts(node->Annotation.Formula,conj_vec);
|
||||
std::set<expr> conjs;
|
||||
std::copy(conj_vec.begin(),conj_vec.end(),std::inserter(conjs,conjs.begin()));
|
||||
if(!node_facts.empty()){
|
||||
RPFP *checker = new RPFP(rpfp->ls);
|
||||
slvr.push();
|
||||
Node *root = checker->CloneNode(node);
|
||||
Edge *edge = node->Outgoing;
|
||||
// checker->AssertNode(root);
|
||||
std::vector<Node *> cs;
|
||||
for(unsigned j = 0; j < edge->Children.size(); j++){
|
||||
Node *oc = edge->Children[j];
|
||||
Node *nc = checker->CloneNode(oc);
|
||||
nc->Annotation = oc->Annotation; // is this needed?
|
||||
cs.push_back(nc);
|
||||
}
|
||||
Edge *checker_edge = checker->CreateEdge(root,edge->F,cs);
|
||||
checker->AssertEdge(checker_edge, 0, true, false);
|
||||
std::vector<expr> propagated;
|
||||
for(std::set<expr> ::iterator it = node_facts.begin(), en = node_facts.end(); it != en;){
|
||||
const expr &fact = *it;
|
||||
if(conjs.find(fact) == conjs.end()){
|
||||
root->Bound.Formula = fact;
|
||||
slvr.push();
|
||||
checker->AssertNode(root);
|
||||
check_result res = checker->Check(root);
|
||||
slvr.pop();
|
||||
if(res != unsat){
|
||||
std::set<expr> ::iterator victim = it;
|
||||
++it;
|
||||
node_facts.erase(victim); // if it ain't true, nix it
|
||||
continue;
|
||||
}
|
||||
propagated.push_back(fact);
|
||||
}
|
||||
++it;
|
||||
}
|
||||
slvr.pop();
|
||||
for(unsigned i = 0; i < propagated.size(); i++){
|
||||
root->Annotation.Formula = propagated[i];
|
||||
UpdateNodeToNode(node,root);
|
||||
}
|
||||
delete checker;
|
||||
}
|
||||
for(std::set<expr> ::iterator it = conjs.begin(), en = conjs.end(); it != en; ++it){
|
||||
expr foo = *it;
|
||||
node_facts.insert(foo);
|
||||
}
|
||||
}
|
||||
timer_stop("Propagate");
|
||||
}
|
||||
|
||||
/** This class represents a derivation tree. */
|
||||
class DerivationTree {
|
||||
|
@ -1757,7 +1839,9 @@ namespace Duality {
|
|||
tree->SolveSingleNode(top,node);
|
||||
if(expansions.size() == 1 && NodeTooComplicated(node))
|
||||
SimplifyNode(node);
|
||||
tree->Generalize(top,node);
|
||||
else
|
||||
node->Annotation.Formula = tree->RemoveRedundancy(node->Annotation.Formula).simplify();
|
||||
Generalize(node);
|
||||
if(RecordUpdate(node))
|
||||
update_count++;
|
||||
}
|
||||
|
@ -1791,7 +1875,14 @@ namespace Duality {
|
|||
RemoveExpansion(node);
|
||||
}
|
||||
stack.pop_back();
|
||||
if(prev_level_used || stack.size() == 1) break;
|
||||
if(stack.size() == 1)break;
|
||||
if(prev_level_used){
|
||||
Node *node = stack.back().expansions[0];
|
||||
if(!Propagate(node)) break;
|
||||
if(!RecordUpdate(node)) break; // shouldn't happen!
|
||||
RemoveUpdateNodesAtCurrentLevel(); // this level is about to be deleted -- remove its children from update list
|
||||
continue;
|
||||
}
|
||||
RemoveUpdateNodesAtCurrentLevel(); // this level is about to be deleted -- remove its children from update list
|
||||
std::vector<Node *> &unused_ex = stack.back().expansions;
|
||||
for(unsigned i = 0; i < unused_ex.size(); i++)
|
||||
|
@ -1833,8 +1924,10 @@ namespace Duality {
|
|||
|
||||
void SimplifyNode(Node *node){
|
||||
// have to destroy the old proof to get a new interpolant
|
||||
timer_start("SimplifyNode");
|
||||
tree->PopPush();
|
||||
tree->InterpolateByCases(top,node);
|
||||
timer_stop("SimplifyNode");
|
||||
}
|
||||
|
||||
bool LevelUsedInProof(unsigned level){
|
||||
|
@ -1933,6 +2026,34 @@ namespace Duality {
|
|||
throw "can't unmap node";
|
||||
}
|
||||
|
||||
void Generalize(Node *node){
|
||||
#ifndef USE_RPFP_CLONE
|
||||
tree->Generalize(top,node);
|
||||
#else
|
||||
RPFP_caching *clone_rpfp = duality->clone_rpfp;
|
||||
Edge *clone_edge = clone_rpfp->GetEdgeClone(node->Outgoing->map);
|
||||
Node *clone_node = clone_edge->Parent;
|
||||
clone_node->Annotation = node->Annotation;
|
||||
for(unsigned i = 0; i < clone_edge->Children.size(); i++)
|
||||
clone_edge->Children[i]->Annotation = node->map->Outgoing->Children[i]->Annotation;
|
||||
clone_rpfp->GeneralizeCache(clone_edge);
|
||||
node->Annotation = clone_node->Annotation;
|
||||
}
|
||||
#endif
|
||||
|
||||
bool Propagate(Node *node){
|
||||
RPFP_caching *clone_rpfp = duality->clone_rpfp;
|
||||
Edge *clone_edge = clone_rpfp->GetEdgeClone(node->Outgoing->map);
|
||||
Node *clone_node = clone_edge->Parent;
|
||||
clone_node->Annotation = node->map->Annotation;
|
||||
for(unsigned i = 0; i < clone_edge->Children.size(); i++)
|
||||
clone_edge->Children[i]->Annotation = node->map->Outgoing->Children[i]->Annotation;
|
||||
bool res = clone_rpfp->PropagateCache(clone_edge);
|
||||
if(res)
|
||||
node->Annotation = clone_node->Annotation;
|
||||
return res;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -504,7 +504,10 @@ expr context::make_quant(decl_kind op, const std::vector<sort> &_sorts, const st
|
|||
add(linear_assumptions[i][j]);
|
||||
}
|
||||
|
||||
check_result res = check();
|
||||
check_result res = unsat;
|
||||
|
||||
if(!m_solver->get_proof())
|
||||
res = check();
|
||||
|
||||
if(res == unsat){
|
||||
|
||||
|
|
|
@ -1393,6 +1393,18 @@ namespace std {
|
|||
};
|
||||
}
|
||||
|
||||
// to make Duality::ast usable in ordered collections
|
||||
namespace std {
|
||||
template <>
|
||||
class less<Duality::expr> {
|
||||
public:
|
||||
bool operator()(const Duality::expr &s, const Duality::expr &t) const {
|
||||
// return s.raw() < t.raw();
|
||||
return s.raw()->get_id() < t.raw()->get_id();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// to make Duality::func_decl hashable
|
||||
namespace hash_space {
|
||||
template <>
|
||||
|
|
|
@ -2170,7 +2170,8 @@ class iz3proof_itp_impl : public iz3proof_itp {
|
|||
for(unsigned i = 0; i < prem_cons.size(); i++){
|
||||
const ast &lit = prem_cons[i];
|
||||
if(get_term_type(lit) == LitA)
|
||||
linear_comb(thing,coeffs[i],lit);
|
||||
// Farkas rule seems to assume strict integer inequalities are rounded
|
||||
linear_comb(thing,coeffs[i],lit,true /*round_off*/);
|
||||
}
|
||||
thing = simplify_ineq(thing);
|
||||
for(unsigned i = 0; i < prem_cons.size(); i++){
|
||||
|
@ -2195,9 +2196,9 @@ class iz3proof_itp_impl : public iz3proof_itp {
|
|||
|
||||
/** Set P to P + cQ, where P and Q are linear inequalities. Assumes P is 0 <= y or 0 < y. */
|
||||
|
||||
void linear_comb(ast &P, const ast &c, const ast &Q){
|
||||
void linear_comb(ast &P, const ast &c, const ast &Q, bool round_off = false){
|
||||
ast Qrhs;
|
||||
bool strict = op(P) == Lt;
|
||||
bool qstrict = false;
|
||||
if(is_not(Q)){
|
||||
ast nQ = arg(Q,0);
|
||||
switch(op(nQ)){
|
||||
|
@ -2209,11 +2210,11 @@ class iz3proof_itp_impl : public iz3proof_itp {
|
|||
break;
|
||||
case Geq:
|
||||
Qrhs = make(Sub,arg(nQ,1),arg(nQ,0));
|
||||
strict = true;
|
||||
qstrict = true;
|
||||
break;
|
||||
case Leq:
|
||||
Qrhs = make(Sub,arg(nQ,0),arg(nQ,1));
|
||||
strict = true;
|
||||
qstrict = true;
|
||||
break;
|
||||
default:
|
||||
throw proof_error();
|
||||
|
@ -2229,17 +2230,20 @@ class iz3proof_itp_impl : public iz3proof_itp {
|
|||
break;
|
||||
case Lt:
|
||||
Qrhs = make(Sub,arg(Q,1),arg(Q,0));
|
||||
strict = true;
|
||||
qstrict = true;
|
||||
break;
|
||||
case Gt:
|
||||
Qrhs = make(Sub,arg(Q,0),arg(Q,1));
|
||||
strict = true;
|
||||
qstrict = true;
|
||||
break;
|
||||
default:
|
||||
throw proof_error();
|
||||
}
|
||||
}
|
||||
Qrhs = make(Times,c,Qrhs);
|
||||
bool pstrict = op(P) == Lt, strict = pstrict || qstrict;
|
||||
if(pstrict && qstrict && round_off)
|
||||
Qrhs = make(Sub,Qrhs,make_int(rational(1)));
|
||||
if(strict)
|
||||
P = make(Lt,arg(P,0),make(Plus,arg(P,1),Qrhs));
|
||||
else
|
||||
|
|
Loading…
Reference in a new issue