mirror of
https://github.com/Z3Prover/z3
synced 2025-04-28 03:15:50 +00:00
Merge branch 'unstable' of https://git01.codeplex.com/z3 into opt
This commit is contained in:
commit
c42ee3bb01
37 changed files with 2162 additions and 182 deletions
|
@ -36,12 +36,11 @@ namespace Duality {
|
|||
struct Z3User {
|
||||
|
||||
context &ctx;
|
||||
solver &slvr;
|
||||
|
||||
typedef func_decl FuncDecl;
|
||||
typedef expr Term;
|
||||
|
||||
Z3User(context &_ctx, solver &_slvr) : ctx(_ctx), slvr(_slvr){}
|
||||
Z3User(context &_ctx) : ctx(_ctx){}
|
||||
|
||||
const char *string_of_int(int n);
|
||||
|
||||
|
@ -53,6 +52,8 @@ namespace Duality {
|
|||
|
||||
Term SubstRec(hash_map<ast, Term> &memo, const Term &t);
|
||||
|
||||
Term SubstRec(hash_map<ast, Term> &memo, hash_map<func_decl, func_decl> &map, const Term &t);
|
||||
|
||||
void Strengthen(Term &x, const Term &y);
|
||||
|
||||
// return the func_del of an app if it is uninterpreted
|
||||
|
@ -77,14 +78,14 @@ namespace Duality {
|
|||
|
||||
void Summarize(const Term &t);
|
||||
|
||||
int CumulativeDecisions();
|
||||
|
||||
int CountOperators(const Term &t);
|
||||
|
||||
Term SubstAtom(hash_map<ast, Term> &memo, const expr &t, const expr &atom, const expr &val);
|
||||
|
||||
Term RemoveRedundancy(const Term &t);
|
||||
|
||||
Term IneqToEq(const Term &t);
|
||||
|
||||
bool IsLiteral(const expr &lit, expr &atom, expr &val);
|
||||
|
||||
expr Negate(const expr &f);
|
||||
|
@ -98,7 +99,10 @@ namespace Duality {
|
|||
bool IsClosedFormula(const Term &t);
|
||||
|
||||
Term AdjustQuantifiers(const Term &t);
|
||||
private:
|
||||
|
||||
FuncDecl RenumberPred(const FuncDecl &f, int n);
|
||||
|
||||
protected:
|
||||
|
||||
void SummarizeRec(hash_set<ast> &memo, std::vector<expr> &lits, int &ops, const Term &t);
|
||||
int CountOperatorsRec(hash_set<ast> &memo, const Term &t);
|
||||
|
@ -108,6 +112,7 @@ private:
|
|||
expr ReduceAndOr(const std::vector<expr> &args, bool is_and, std::vector<expr> &res);
|
||||
expr FinishAndOr(const std::vector<expr> &args, bool is_and);
|
||||
expr PullCommonFactors(std::vector<expr> &args, bool is_and);
|
||||
Term IneqToEqRec(hash_map<ast, Term> &memo, const Term &t);
|
||||
|
||||
|
||||
};
|
||||
|
@ -256,9 +261,9 @@ private:
|
|||
}
|
||||
#endif
|
||||
|
||||
iZ3LogicSolver(context &c) : LogicSolver(c) {
|
||||
iZ3LogicSolver(context &c, bool models = true) : LogicSolver(c) {
|
||||
ctx = ictx = &c;
|
||||
slvr = islvr = new interpolating_solver(*ictx);
|
||||
slvr = islvr = new interpolating_solver(*ictx, models);
|
||||
need_goals = false;
|
||||
islvr->SetWeakInterpolants(true);
|
||||
}
|
||||
|
@ -308,8 +313,8 @@ private:
|
|||
}
|
||||
|
||||
LogicSolver *ls;
|
||||
|
||||
private:
|
||||
|
||||
protected:
|
||||
int nodeCount;
|
||||
int edgeCount;
|
||||
|
||||
|
@ -324,7 +329,7 @@ private:
|
|||
|
||||
public:
|
||||
model dualModel;
|
||||
private:
|
||||
protected:
|
||||
literals dualLabels;
|
||||
std::list<stack_entry> stack;
|
||||
std::vector<Term> axioms; // only saved here for printing purposes
|
||||
|
@ -340,7 +345,7 @@ private:
|
|||
inherit the axioms.
|
||||
*/
|
||||
|
||||
RPFP(LogicSolver *_ls) : Z3User(*(_ls->ctx), *(_ls->slvr)), dualModel(*(_ls->ctx)), aux_solver(_ls->aux_solver)
|
||||
RPFP(LogicSolver *_ls) : Z3User(*(_ls->ctx)), dualModel(*(_ls->ctx)), aux_solver(_ls->aux_solver)
|
||||
{
|
||||
ls = _ls;
|
||||
nodeCount = 0;
|
||||
|
@ -350,7 +355,7 @@ private:
|
|||
proof_core = 0;
|
||||
}
|
||||
|
||||
~RPFP();
|
||||
virtual ~RPFP();
|
||||
|
||||
/** Symbolic representation of a relational transformer */
|
||||
class Transformer
|
||||
|
@ -574,7 +579,7 @@ private:
|
|||
* you must pop the context accordingly. The second argument is
|
||||
* the number of pushes we are inside. */
|
||||
|
||||
void AssertEdge(Edge *e, int persist = 0, bool with_children = false, bool underapprox = false);
|
||||
virtual void AssertEdge(Edge *e, int persist = 0, bool with_children = false, bool underapprox = false);
|
||||
|
||||
/* Constrain an edge by the annotation of one of its children. */
|
||||
|
||||
|
@ -808,9 +813,31 @@ private:
|
|||
/** Edges of the graph. */
|
||||
std::vector<Edge *> edges;
|
||||
|
||||
/** Fuse a vector of transformers. If the total number of inputs of the transformers
|
||||
is N, then the result is an N-ary transfomer whose output is the union of
|
||||
the outputs of the given transformers. The is, suppose we have a vetor of transfoermers
|
||||
{T_i(r_i1,...,r_iN(i) : i=1..M}. The the result is a transformer
|
||||
|
||||
F(r_11,...,r_iN(1),...,r_M1,...,r_MN(M)) =
|
||||
T_1(r_11,...,r_iN(1)) U ... U T_M(r_M1,...,r_MN(M))
|
||||
*/
|
||||
|
||||
Transformer Fuse(const std::vector<Transformer *> &trs);
|
||||
|
||||
/** Fuse edges so that each node is the output of at most one edge. This
|
||||
transformation is solution-preserving, but changes the numbering of edges in
|
||||
counterexamples.
|
||||
*/
|
||||
void FuseEdges();
|
||||
|
||||
void RemoveDeadNodes();
|
||||
|
||||
Term SubstParams(const std::vector<Term> &from,
|
||||
const std::vector<Term> &to, const Term &t);
|
||||
|
||||
Term SubstParamsNoCapture(const std::vector<Term> &from,
|
||||
const std::vector<Term> &to, const Term &t);
|
||||
|
||||
Term Localize(Edge *e, const Term &t);
|
||||
|
||||
void EvalNodeAsConstraint(Node *p, Transformer &res);
|
||||
|
@ -829,7 +856,13 @@ private:
|
|||
*/
|
||||
void ComputeProofCore();
|
||||
|
||||
private:
|
||||
int CumulativeDecisions();
|
||||
|
||||
solver &slvr(){
|
||||
return *ls->slvr;
|
||||
}
|
||||
|
||||
protected:
|
||||
|
||||
void ClearProofCore(){
|
||||
if(proof_core)
|
||||
|
@ -947,6 +980,8 @@ private:
|
|||
|
||||
expr SimplifyOr(std::vector<expr> &lits);
|
||||
|
||||
expr SimplifyAnd(std::vector<expr> &lits);
|
||||
|
||||
void SetAnnotation(Node *root, const expr &t);
|
||||
|
||||
void AddEdgeToSolver(Edge *edge);
|
||||
|
@ -959,9 +994,58 @@ private:
|
|||
|
||||
expr NegateLit(const expr &f);
|
||||
|
||||
expr GetEdgeFormula(Edge *e, int persist, bool with_children, bool underapprox);
|
||||
|
||||
bool IsVar(const expr &t);
|
||||
|
||||
void GetVarsRec(hash_set<ast> &memo, const expr &cnst, std::vector<expr> &vars);
|
||||
|
||||
expr UnhoistPullRec(hash_map<ast,expr> & memo, const expr &w, hash_map<ast,expr> & init_defs, hash_map<ast,expr> & const_params, hash_map<ast,expr> &const_params_inv, std::vector<expr> &new_params);
|
||||
|
||||
void AddParamsToTransformer(Transformer &trans, const std::vector<expr> ¶ms);
|
||||
|
||||
expr AddParamsToApp(const expr &app, const func_decl &new_decl, const std::vector<expr> ¶ms);
|
||||
|
||||
expr GetRelRec(hash_set<ast> &memo, const expr &t, const func_decl &rel);
|
||||
|
||||
expr GetRel(Edge *edge, int child_idx);
|
||||
|
||||
void GetDefs(const expr &cnst, hash_map<ast,expr> &defs);
|
||||
|
||||
void GetDefsRec(const expr &cnst, hash_map<ast,expr> &defs);
|
||||
|
||||
void AddParamsToNode(Node *node, const std::vector<expr> ¶ms);
|
||||
|
||||
void UnhoistLoop(Edge *loop_edge, Edge *init_edge);
|
||||
|
||||
void Unhoist();
|
||||
|
||||
Term ElimIteRec(hash_map<ast,expr> &memo, const Term &t, std::vector<expr> &cnsts);
|
||||
|
||||
Term ElimIte(const Term &t);
|
||||
|
||||
void MarkLiveNodes(hash_map<Node *,std::vector<Edge *> > &outgoing, hash_set<Node *> &live_nodes, Node *node);
|
||||
|
||||
virtual void slvr_add(const expr &e);
|
||||
|
||||
virtual void slvr_pop(int i);
|
||||
|
||||
virtual void slvr_push();
|
||||
|
||||
virtual check_result slvr_check(unsigned n = 0, expr * const assumptions = 0, unsigned *core_size = 0, expr *core = 0);
|
||||
|
||||
virtual lbool ls_interpolate_tree(TermTree *assumptions,
|
||||
TermTree *&interpolants,
|
||||
model &_model,
|
||||
TermTree *goals = 0,
|
||||
bool weak = false);
|
||||
|
||||
virtual bool proof_core_contains(const expr &e);
|
||||
|
||||
};
|
||||
|
||||
/** RPFP solver base class. */
|
||||
|
||||
/** RPFP solver base class. */
|
||||
|
||||
class Solver {
|
||||
|
||||
|
@ -1005,6 +1089,8 @@ private:
|
|||
/** Object thrown on cancellation */
|
||||
struct Canceled {};
|
||||
|
||||
/** Object thrown on incompleteness */
|
||||
struct Incompleteness {};
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -1042,3 +1128,130 @@ namespace std {
|
|||
}
|
||||
};
|
||||
}
|
||||
|
||||
// #define LIMIT_STACK_WEIGHT 5
|
||||
|
||||
|
||||
namespace Duality {
|
||||
/** Caching version of RPFP. Instead of asserting constraints, returns assumption literals */
|
||||
|
||||
class RPFP_caching : public RPFP {
|
||||
public:
|
||||
|
||||
/** appends assumption literals for edge to lits. if with_children is true,
|
||||
includes that annotation of the edge's children.
|
||||
*/
|
||||
void AssertEdgeCache(Edge *e, std::vector<Term> &lits, bool with_children = false);
|
||||
|
||||
/** appends assumption literals for node to lits */
|
||||
void AssertNodeCache(Node *, std::vector<Term> lits);
|
||||
|
||||
/** check assumption lits, and return core */
|
||||
check_result CheckCore(const std::vector<Term> &assumps, std::vector<Term> &core);
|
||||
|
||||
/** Clone another RPFP into this one, keeping a map */
|
||||
void Clone(RPFP *other);
|
||||
|
||||
/** Get the clone of a node */
|
||||
Node *GetNodeClone(Node *other_node);
|
||||
|
||||
/** Get the clone of an edge */
|
||||
Edge *GetEdgeClone(Edge *other_edge);
|
||||
|
||||
/** Try to strengthen the parent of an edge */
|
||||
void GeneralizeCache(Edge *edge);
|
||||
|
||||
/** Try to propagate some facts from children to parents of edge.
|
||||
Return true if success. */
|
||||
bool PropagateCache(Edge *edge);
|
||||
|
||||
/** Construct a caching RPFP using a LogicSolver */
|
||||
RPFP_caching(LogicSolver *_ls) : RPFP(_ls) {}
|
||||
|
||||
/** Constraint an edge by its child's annotation. Return
|
||||
assumption lits. */
|
||||
void ConstrainParentCache(Edge *parent, Node *child, std::vector<Term> &lits);
|
||||
|
||||
#ifdef LIMIT_STACK_WEIGHT
|
||||
virtual void AssertEdge(Edge *e, int persist = 0, bool with_children = false, bool underapprox = false);
|
||||
#endif
|
||||
|
||||
virtual ~RPFP_caching(){}
|
||||
|
||||
protected:
|
||||
hash_map<ast,expr> AssumptionLits;
|
||||
hash_map<Node *, Node *> NodeCloneMap;
|
||||
hash_map<Edge *, Edge *> EdgeCloneMap;
|
||||
std::vector<expr> alit_stack;
|
||||
std::vector<unsigned> alit_stack_sizes;
|
||||
hash_map<Edge *, uptr<LogicSolver> > edge_solvers;
|
||||
|
||||
#ifdef LIMIT_STACK_WEIGHT
|
||||
struct weight_counter {
|
||||
int val;
|
||||
weight_counter(){val = 0;}
|
||||
void swap(weight_counter &other){
|
||||
std::swap(val,other.val);
|
||||
}
|
||||
};
|
||||
|
||||
struct big_stack_entry {
|
||||
weight_counter weight_added;
|
||||
std::vector<expr> new_alits;
|
||||
std::vector<expr> alit_stack;
|
||||
std::vector<unsigned> alit_stack_sizes;
|
||||
};
|
||||
|
||||
std::vector<expr> new_alits;
|
||||
weight_counter weight_added;
|
||||
std::vector<big_stack_entry> big_stack;
|
||||
#endif
|
||||
|
||||
|
||||
|
||||
void GetAssumptionLits(const expr &fmla, std::vector<expr> &lits, hash_map<ast,expr> *opt_map = 0);
|
||||
|
||||
void GreedyReduceCache(std::vector<expr> &assumps, std::vector<expr> &core);
|
||||
|
||||
void FilterCore(std::vector<expr> &core, std::vector<expr> &full_core);
|
||||
void ConstrainEdgeLocalizedCache(Edge *e, const Term &tl, std::vector<expr> &lits);
|
||||
|
||||
virtual void slvr_add(const expr &e);
|
||||
|
||||
virtual void slvr_pop(int i);
|
||||
|
||||
virtual void slvr_push();
|
||||
|
||||
virtual check_result slvr_check(unsigned n = 0, expr * const assumptions = 0, unsigned *core_size = 0, expr *core = 0);
|
||||
|
||||
virtual lbool ls_interpolate_tree(TermTree *assumptions,
|
||||
TermTree *&interpolants,
|
||||
model &_model,
|
||||
TermTree *goals = 0,
|
||||
bool weak = false);
|
||||
|
||||
virtual bool proof_core_contains(const expr &e);
|
||||
|
||||
void GetTermTreeAssertionLiterals(TermTree *assumptions);
|
||||
|
||||
void GetTermTreeAssertionLiteralsRec(TermTree *assumptions);
|
||||
|
||||
LogicSolver *SolverForEdge(Edge *edge, bool models);
|
||||
|
||||
public:
|
||||
struct scoped_solver_for_edge {
|
||||
LogicSolver *orig_ls;
|
||||
RPFP_caching *rpfp;
|
||||
scoped_solver_for_edge(RPFP_caching *_rpfp, Edge *edge, bool models = false){
|
||||
rpfp = _rpfp;
|
||||
orig_ls = rpfp->ls;
|
||||
rpfp->ls = rpfp->SolverForEdge(edge,models);
|
||||
}
|
||||
~scoped_solver_for_edge(){
|
||||
rpfp->ls = orig_ls;
|
||||
}
|
||||
};
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -25,7 +25,14 @@ Revision History:
|
|||
#include <string.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
#ifdef WIN32
|
||||
#pragma warning(disable:4996)
|
||||
#pragma warning(disable:4800)
|
||||
#pragma warning(disable:4267)
|
||||
#endif
|
||||
|
||||
#include "duality_wrapper.h"
|
||||
#include "iz3profiling.h"
|
||||
|
||||
namespace Duality {
|
||||
|
||||
|
@ -103,6 +110,7 @@ namespace Duality {
|
|||
output_time(*pfs, it->second.t);
|
||||
(*pfs) << std::endl;
|
||||
}
|
||||
profiling::print(os); // print the interpolation stats
|
||||
}
|
||||
|
||||
void timer_start(const char *name){
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -19,6 +19,12 @@ Revision History:
|
|||
|
||||
--*/
|
||||
|
||||
#ifdef WIN32
|
||||
#pragma warning(disable:4996)
|
||||
#pragma warning(disable:4800)
|
||||
#pragma warning(disable:4267)
|
||||
#endif
|
||||
|
||||
#include "duality.h"
|
||||
#include "duality_profiling.h"
|
||||
|
||||
|
@ -26,6 +32,7 @@ Revision History:
|
|||
#include <set>
|
||||
#include <map>
|
||||
#include <list>
|
||||
#include <iterator>
|
||||
|
||||
// TODO: make these official options or get rid of them
|
||||
|
||||
|
@ -37,14 +44,18 @@ Revision History:
|
|||
#define MINIMIZE_CANDIDATES
|
||||
// #define MINIMIZE_CANDIDATES_HARDER
|
||||
#define BOUNDED
|
||||
#define CHECK_CANDS_FROM_IND_SET
|
||||
// #define CHECK_CANDS_FROM_IND_SET
|
||||
#define UNDERAPPROX_NODES
|
||||
#define NEW_EXPAND
|
||||
#define EARLY_EXPAND
|
||||
// #define TOP_DOWN
|
||||
// #define EFFORT_BOUNDED_STRAT
|
||||
#define SKIP_UNDERAPPROX_NODES
|
||||
|
||||
#define USE_RPFP_CLONE
|
||||
// #define KEEP_EXPANSIONS
|
||||
// #define USE_CACHING_RPFP
|
||||
// #define PROPAGATE_BEFORE_CHECK
|
||||
#define USE_NEW_GEN_CANDS
|
||||
|
||||
namespace Duality {
|
||||
|
||||
|
@ -101,7 +112,7 @@ namespace Duality {
|
|||
public:
|
||||
Duality(RPFP *_rpfp)
|
||||
: ctx(_rpfp->ctx),
|
||||
slvr(_rpfp->slvr),
|
||||
slvr(_rpfp->slvr()),
|
||||
nodes(_rpfp->nodes),
|
||||
edges(_rpfp->edges)
|
||||
{
|
||||
|
@ -115,8 +126,42 @@ namespace Duality {
|
|||
Report = false;
|
||||
StratifiedInlining = false;
|
||||
RecursionBound = -1;
|
||||
{
|
||||
scoped_no_proof no_proofs_please(ctx.m());
|
||||
#ifdef USE_RPFP_CLONE
|
||||
clone_ls = new RPFP::iZ3LogicSolver(ctx, false); // no models needed for this one
|
||||
clone_rpfp = new RPFP_caching(clone_ls);
|
||||
clone_rpfp->Clone(rpfp);
|
||||
#endif
|
||||
#ifdef USE_NEW_GEN_CANDS
|
||||
gen_cands_ls = new RPFP::iZ3LogicSolver(ctx);
|
||||
gen_cands_rpfp = new RPFP_caching(gen_cands_ls);
|
||||
gen_cands_rpfp->Clone(rpfp);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
~Duality(){
|
||||
#ifdef USE_RPFP_CLONE
|
||||
delete clone_rpfp;
|
||||
delete clone_ls;
|
||||
#endif
|
||||
#ifdef USE_NEW_GEN_CANDS
|
||||
delete gen_cands_rpfp;
|
||||
delete gen_cands_ls;
|
||||
#endif
|
||||
}
|
||||
|
||||
#ifdef USE_RPFP_CLONE
|
||||
RPFP::LogicSolver *clone_ls;
|
||||
RPFP_caching *clone_rpfp;
|
||||
#endif
|
||||
#ifdef USE_NEW_GEN_CANDS
|
||||
RPFP::LogicSolver *gen_cands_ls;
|
||||
RPFP_caching *gen_cands_rpfp;
|
||||
#endif
|
||||
|
||||
|
||||
typedef RPFP::Node Node;
|
||||
typedef RPFP::Edge Edge;
|
||||
|
||||
|
@ -804,8 +849,10 @@ namespace Duality {
|
|||
Node *child = chs[i];
|
||||
if(TopoSort[child] < TopoSort[node->map]){
|
||||
Node *leaf = LeafMap[child];
|
||||
if(!indset->Contains(leaf))
|
||||
if(!indset->Contains(leaf)){
|
||||
node->Outgoing->F.Formula = ctx.bool_val(false); // make this a proper leaf, else bogus cex
|
||||
return node->Outgoing;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1085,7 +1132,8 @@ namespace Duality {
|
|||
void ExtractCandidateFromCex(Edge *edge, RPFP *checker, Node *root, Candidate &candidate){
|
||||
candidate.edge = edge;
|
||||
for(unsigned j = 0; j < edge->Children.size(); j++){
|
||||
Edge *lb = root->Outgoing->Children[j]->Outgoing;
|
||||
Node *node = root->Outgoing->Children[j];
|
||||
Edge *lb = node->Outgoing;
|
||||
std::vector<Node *> &insts = insts_of_node[edge->Children[j]];
|
||||
#ifndef MINIMIZE_CANDIDATES
|
||||
for(int k = insts.size()-1; k >= 0; k--)
|
||||
|
@ -1095,8 +1143,8 @@ namespace Duality {
|
|||
{
|
||||
Node *inst = insts[k];
|
||||
if(indset->Contains(inst)){
|
||||
if(checker->Empty(lb->Parent) ||
|
||||
eq(checker->Eval(lb,NodeMarker(inst)),ctx.bool_val(true))){
|
||||
if(checker->Empty(node) ||
|
||||
eq(lb ? checker->Eval(lb,NodeMarker(inst)) : checker->dualModel.eval(NodeMarker(inst)),ctx.bool_val(true))){
|
||||
candidate.Children.push_back(inst);
|
||||
goto next_child;
|
||||
}
|
||||
|
@ -1166,6 +1214,25 @@ namespace Duality {
|
|||
#endif
|
||||
|
||||
|
||||
Node *CheckerForEdgeClone(Edge *edge, RPFP_caching *checker){
|
||||
Edge *gen_cands_edge = checker->GetEdgeClone(edge);
|
||||
Node *root = gen_cands_edge->Parent;
|
||||
root->Outgoing = gen_cands_edge;
|
||||
GenNodeSolutionFromIndSet(edge->Parent, root->Bound);
|
||||
#if 0
|
||||
if(root->Bound.IsFull())
|
||||
return = 0;
|
||||
#endif
|
||||
checker->AssertNode(root);
|
||||
for(unsigned j = 0; j < edge->Children.size(); j++){
|
||||
Node *oc = edge->Children[j];
|
||||
Node *nc = gen_cands_edge->Children[j];
|
||||
GenNodeSolutionWithMarkers(oc,nc->Annotation,true);
|
||||
}
|
||||
checker->AssertEdge(gen_cands_edge,1,true);
|
||||
return root;
|
||||
}
|
||||
|
||||
/** If the current proposed solution is not inductive,
|
||||
use the induction failure to generate candidates for extension. */
|
||||
void GenCandidatesFromInductionFailure(bool full_scan = false){
|
||||
|
@ -1175,6 +1242,7 @@ namespace Duality {
|
|||
Edge *edge = edges[i];
|
||||
if(!full_scan && updated_nodes.find(edge->Parent) == updated_nodes.end())
|
||||
continue;
|
||||
#ifndef USE_NEW_GEN_CANDS
|
||||
slvr.push();
|
||||
RPFP *checker = new RPFP(rpfp->ls);
|
||||
Node *root = CheckerForEdge(edge,checker);
|
||||
|
@ -1186,6 +1254,18 @@ namespace Duality {
|
|||
}
|
||||
slvr.pop(1);
|
||||
delete checker;
|
||||
#else
|
||||
RPFP_caching::scoped_solver_for_edge(gen_cands_rpfp,edge,true /* models */);
|
||||
gen_cands_rpfp->Push();
|
||||
Node *root = CheckerForEdgeClone(edge,gen_cands_rpfp);
|
||||
if(gen_cands_rpfp->Check(root) != unsat){
|
||||
Candidate candidate;
|
||||
ExtractCandidateFromCex(edge,gen_cands_rpfp,root,candidate);
|
||||
reporter->InductionFailure(edge,candidate.Children);
|
||||
candidates.push_back(candidate);
|
||||
}
|
||||
gen_cands_rpfp->Pop(1);
|
||||
#endif
|
||||
}
|
||||
updated_nodes.clear();
|
||||
timer_stop("GenCandIndFail");
|
||||
|
@ -1309,6 +1389,9 @@ namespace Duality {
|
|||
node. */
|
||||
bool SatisfyUpperBound(Node *node){
|
||||
if(node->Bound.IsFull()) return true;
|
||||
#ifdef PROPAGATE_BEFORE_CHECK
|
||||
Propagate();
|
||||
#endif
|
||||
reporter->Bound(node);
|
||||
int start_decs = rpfp->CumulativeDecisions();
|
||||
DerivationTree *dtp = new DerivationTreeSlow(this,unwinding,reporter,heuristic,FullExpand);
|
||||
|
@ -1412,13 +1495,77 @@ namespace Duality {
|
|||
}
|
||||
}
|
||||
|
||||
// Propagate conjuncts up the unwinding
|
||||
void Propagate(){
|
||||
reporter->Message("beginning propagation");
|
||||
timer_start("Propagate");
|
||||
std::vector<Node *> sorted_nodes = unwinding->nodes;
|
||||
std::sort(sorted_nodes.begin(),sorted_nodes.end(),std::less<Node *>()); // sorts by sequence number
|
||||
hash_map<Node *,std::set<expr> > facts;
|
||||
for(unsigned i = 0; i < sorted_nodes.size(); i++){
|
||||
Node *node = sorted_nodes[i];
|
||||
std::set<expr> &node_facts = facts[node->map];
|
||||
if(!(node->Outgoing && indset->Contains(node)))
|
||||
continue;
|
||||
std::vector<expr> conj_vec;
|
||||
unwinding->CollectConjuncts(node->Annotation.Formula,conj_vec);
|
||||
std::set<expr> conjs;
|
||||
std::copy(conj_vec.begin(),conj_vec.end(),std::inserter(conjs,conjs.begin()));
|
||||
if(!node_facts.empty()){
|
||||
RPFP *checker = new RPFP(rpfp->ls);
|
||||
slvr.push();
|
||||
Node *root = checker->CloneNode(node);
|
||||
Edge *edge = node->Outgoing;
|
||||
// checker->AssertNode(root);
|
||||
std::vector<Node *> cs;
|
||||
for(unsigned j = 0; j < edge->Children.size(); j++){
|
||||
Node *oc = edge->Children[j];
|
||||
Node *nc = checker->CloneNode(oc);
|
||||
nc->Annotation = oc->Annotation; // is this needed?
|
||||
cs.push_back(nc);
|
||||
}
|
||||
Edge *checker_edge = checker->CreateEdge(root,edge->F,cs);
|
||||
checker->AssertEdge(checker_edge, 0, true, false);
|
||||
std::vector<expr> propagated;
|
||||
for(std::set<expr> ::iterator it = node_facts.begin(), en = node_facts.end(); it != en;){
|
||||
const expr &fact = *it;
|
||||
if(conjs.find(fact) == conjs.end()){
|
||||
root->Bound.Formula = fact;
|
||||
slvr.push();
|
||||
checker->AssertNode(root);
|
||||
check_result res = checker->Check(root);
|
||||
slvr.pop();
|
||||
if(res != unsat){
|
||||
std::set<expr> ::iterator victim = it;
|
||||
++it;
|
||||
node_facts.erase(victim); // if it ain't true, nix it
|
||||
continue;
|
||||
}
|
||||
propagated.push_back(fact);
|
||||
}
|
||||
++it;
|
||||
}
|
||||
slvr.pop();
|
||||
for(unsigned i = 0; i < propagated.size(); i++){
|
||||
root->Annotation.Formula = propagated[i];
|
||||
UpdateNodeToNode(node,root);
|
||||
}
|
||||
delete checker;
|
||||
}
|
||||
for(std::set<expr> ::iterator it = conjs.begin(), en = conjs.end(); it != en; ++it){
|
||||
expr foo = *it;
|
||||
node_facts.insert(foo);
|
||||
}
|
||||
}
|
||||
timer_stop("Propagate");
|
||||
}
|
||||
|
||||
/** This class represents a derivation tree. */
|
||||
class DerivationTree {
|
||||
public:
|
||||
|
||||
DerivationTree(Duality *_duality, RPFP *rpfp, Reporter *_reporter, Heuristic *_heuristic, bool _full_expand)
|
||||
: slvr(rpfp->slvr),
|
||||
: slvr(rpfp->slvr()),
|
||||
ctx(rpfp->ctx)
|
||||
{
|
||||
duality = _duality;
|
||||
|
@ -1462,7 +1609,13 @@ namespace Duality {
|
|||
constrained = _constrained;
|
||||
false_approx = true;
|
||||
timer_start("Derive");
|
||||
#ifndef USE_CACHING_RPFP
|
||||
tree = _tree ? _tree : new RPFP(rpfp->ls);
|
||||
#else
|
||||
RPFP::LogicSolver *cache_ls = new RPFP::iZ3LogicSolver(ctx);
|
||||
cache_ls->slvr->push();
|
||||
tree = _tree ? _tree : new RPFP_caching(cache_ls);
|
||||
#endif
|
||||
tree->HornClauses = rpfp->HornClauses;
|
||||
tree->Push(); // so we can clear out the solver later when finished
|
||||
top = CreateApproximatedInstance(root);
|
||||
|
@ -1474,19 +1627,28 @@ namespace Duality {
|
|||
timer_start("Pop");
|
||||
tree->Pop(1);
|
||||
timer_stop("Pop");
|
||||
#ifdef USE_CACHING_RPFP
|
||||
cache_ls->slvr->pop(1);
|
||||
delete cache_ls;
|
||||
tree->ls = rpfp->ls;
|
||||
#endif
|
||||
timer_stop("Derive");
|
||||
return res;
|
||||
}
|
||||
|
||||
#define WITH_CHILDREN
|
||||
|
||||
Node *CreateApproximatedInstance(RPFP::Node *from){
|
||||
Node *to = tree->CloneNode(from);
|
||||
to->Annotation = from->Annotation;
|
||||
void InitializeApproximatedInstance(RPFP::Node *to){
|
||||
to->Annotation = to->map->Annotation;
|
||||
#ifndef WITH_CHILDREN
|
||||
tree->CreateLowerBoundEdge(to);
|
||||
#endif
|
||||
leaves.push_back(to);
|
||||
}
|
||||
|
||||
Node *CreateApproximatedInstance(RPFP::Node *from){
|
||||
Node *to = tree->CloneNode(from);
|
||||
InitializeApproximatedInstance(to);
|
||||
return to;
|
||||
}
|
||||
|
||||
|
@ -1555,13 +1717,23 @@ namespace Duality {
|
|||
|
||||
virtual void ExpandNode(RPFP::Node *p){
|
||||
// tree->RemoveEdge(p->Outgoing);
|
||||
Edge *edge = duality->GetNodeOutgoing(p->map,last_decs);
|
||||
std::vector<RPFP::Node *> &cs = edge->Children;
|
||||
std::vector<RPFP::Node *> children(cs.size());
|
||||
for(unsigned i = 0; i < cs.size(); i++)
|
||||
children[i] = CreateApproximatedInstance(cs[i]);
|
||||
Edge *ne = tree->CreateEdge(p, p->map->Outgoing->F, children);
|
||||
ne->map = p->map->Outgoing->map;
|
||||
Edge *ne = p->Outgoing;
|
||||
if(ne) {
|
||||
// reporter->Message("Recycling edge...");
|
||||
std::vector<RPFP::Node *> &cs = ne->Children;
|
||||
for(unsigned i = 0; i < cs.size(); i++)
|
||||
InitializeApproximatedInstance(cs[i]);
|
||||
// ne->dual = expr();
|
||||
}
|
||||
else {
|
||||
Edge *edge = duality->GetNodeOutgoing(p->map,last_decs);
|
||||
std::vector<RPFP::Node *> &cs = edge->Children;
|
||||
std::vector<RPFP::Node *> children(cs.size());
|
||||
for(unsigned i = 0; i < cs.size(); i++)
|
||||
children[i] = CreateApproximatedInstance(cs[i]);
|
||||
ne = tree->CreateEdge(p, p->map->Outgoing->F, children);
|
||||
ne->map = p->map->Outgoing->map;
|
||||
}
|
||||
#ifndef WITH_CHILDREN
|
||||
tree->AssertEdge(ne); // assert the edge in the solver
|
||||
#else
|
||||
|
@ -1703,12 +1875,25 @@ namespace Duality {
|
|||
void RemoveExpansion(RPFP::Node *p){
|
||||
Edge *edge = p->Outgoing;
|
||||
Node *parent = edge->Parent;
|
||||
#ifndef KEEP_EXPANSIONS
|
||||
std::vector<RPFP::Node *> cs = edge->Children;
|
||||
tree->DeleteEdge(edge);
|
||||
for(unsigned i = 0; i < cs.size(); i++)
|
||||
tree->DeleteNode(cs[i]);
|
||||
#endif
|
||||
leaves.push_back(parent);
|
||||
}
|
||||
|
||||
// remove all the descendants of tree root (but not root itself)
|
||||
void RemoveTree(RPFP *tree, RPFP::Node *root){
|
||||
Edge *edge = root->Outgoing;
|
||||
std::vector<RPFP::Node *> cs = edge->Children;
|
||||
tree->DeleteEdge(edge);
|
||||
for(unsigned i = 0; i < cs.size(); i++){
|
||||
RemoveTree(tree,cs[i]);
|
||||
tree->DeleteNode(cs[i]);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
class DerivationTreeSlow : public DerivationTree {
|
||||
|
@ -1730,13 +1915,14 @@ namespace Duality {
|
|||
|
||||
virtual bool Build(){
|
||||
|
||||
stack.back().level = tree->slvr.get_scope_level();
|
||||
stack.back().level = tree->slvr().get_scope_level();
|
||||
bool was_sat = true;
|
||||
|
||||
while (true)
|
||||
{
|
||||
lbool res;
|
||||
|
||||
unsigned slvr_level = tree->slvr.get_scope_level();
|
||||
unsigned slvr_level = tree->slvr().get_scope_level();
|
||||
if(slvr_level != stack.back().level)
|
||||
throw "stacks out of sync!";
|
||||
|
||||
|
@ -1756,14 +1942,22 @@ namespace Duality {
|
|||
tree->SolveSingleNode(top,node);
|
||||
if(expansions.size() == 1 && NodeTooComplicated(node))
|
||||
SimplifyNode(node);
|
||||
tree->Generalize(top,node);
|
||||
else
|
||||
node->Annotation.Formula = tree->RemoveRedundancy(node->Annotation.Formula).simplify();
|
||||
Generalize(node);
|
||||
if(RecordUpdate(node))
|
||||
update_count++;
|
||||
else
|
||||
heuristic->Update(node->map); // make it less likely to expand this node in future
|
||||
}
|
||||
if(update_count == 0)
|
||||
if(update_count == 0){
|
||||
if(was_sat)
|
||||
throw Incompleteness();
|
||||
reporter->Message("backtracked without learning");
|
||||
}
|
||||
}
|
||||
tree->ComputeProofCore(); // need to compute the proof core before popping solver
|
||||
bool propagated = false;
|
||||
while(1) {
|
||||
std::vector<Node *> &expansions = stack.back().expansions;
|
||||
bool prev_level_used = LevelUsedInProof(stack.size()-2); // need to compute this before pop
|
||||
|
@ -1787,28 +1981,42 @@ namespace Duality {
|
|||
RemoveExpansion(node);
|
||||
}
|
||||
stack.pop_back();
|
||||
if(prev_level_used || stack.size() == 1) break;
|
||||
if(stack.size() == 1)break;
|
||||
if(prev_level_used){
|
||||
Node *node = stack.back().expansions[0];
|
||||
if(!Propagate(node)) break;
|
||||
if(!RecordUpdate(node)) break; // shouldn't happen!
|
||||
RemoveUpdateNodesAtCurrentLevel(); // this level is about to be deleted -- remove its children from update list
|
||||
propagated = true;
|
||||
continue;
|
||||
}
|
||||
if(propagated) break; // propagation invalidates the proof core, so disable non-chron backtrack
|
||||
RemoveUpdateNodesAtCurrentLevel(); // this level is about to be deleted -- remove its children from update list
|
||||
std::vector<Node *> &unused_ex = stack.back().expansions;
|
||||
for(unsigned i = 0; i < unused_ex.size(); i++)
|
||||
heuristic->Update(unused_ex[i]->map); // make it less likely to expand this node in future
|
||||
}
|
||||
HandleUpdatedNodes();
|
||||
if(stack.size() == 1)
|
||||
if(stack.size() == 1){
|
||||
if(top->Outgoing)
|
||||
tree->DeleteEdge(top->Outgoing); // in case we kept the tree
|
||||
return false;
|
||||
}
|
||||
was_sat = false;
|
||||
}
|
||||
else {
|
||||
was_sat = true;
|
||||
tree->Push();
|
||||
std::vector<Node *> &expansions = stack.back().expansions;
|
||||
for(unsigned i = 0; i < expansions.size(); i++){
|
||||
tree->FixCurrentState(expansions[i]->Outgoing);
|
||||
}
|
||||
#if 0
|
||||
if(tree->slvr.check() == unsat)
|
||||
if(tree->slvr().check() == unsat)
|
||||
throw "help!";
|
||||
#endif
|
||||
stack.push_back(stack_entry());
|
||||
stack.back().level = tree->slvr.get_scope_level();
|
||||
stack.back().level = tree->slvr().get_scope_level();
|
||||
if(ExpandSomeNodes(false,1)){
|
||||
continue;
|
||||
}
|
||||
|
@ -1822,13 +2030,18 @@ namespace Duality {
|
|||
}
|
||||
|
||||
bool NodeTooComplicated(Node *node){
|
||||
int ops = tree->CountOperators(node->Annotation.Formula);
|
||||
if(ops > 10) return true;
|
||||
node->Annotation.Formula = tree->RemoveRedundancy(node->Annotation.Formula).simplify();
|
||||
return tree->CountOperators(node->Annotation.Formula) > 3;
|
||||
}
|
||||
|
||||
void SimplifyNode(Node *node){
|
||||
// have to destroy the old proof to get a new interpolant
|
||||
timer_start("SimplifyNode");
|
||||
tree->PopPush();
|
||||
tree->InterpolateByCases(top,node);
|
||||
timer_stop("SimplifyNode");
|
||||
}
|
||||
|
||||
bool LevelUsedInProof(unsigned level){
|
||||
|
@ -1927,6 +2140,39 @@ namespace Duality {
|
|||
throw "can't unmap node";
|
||||
}
|
||||
|
||||
void Generalize(Node *node){
|
||||
#ifndef USE_RPFP_CLONE
|
||||
tree->Generalize(top,node);
|
||||
#else
|
||||
RPFP_caching *clone_rpfp = duality->clone_rpfp;
|
||||
if(!node->Outgoing->map) return;
|
||||
Edge *clone_edge = clone_rpfp->GetEdgeClone(node->Outgoing->map);
|
||||
Node *clone_node = clone_edge->Parent;
|
||||
clone_node->Annotation = node->Annotation;
|
||||
for(unsigned i = 0; i < clone_edge->Children.size(); i++)
|
||||
clone_edge->Children[i]->Annotation = node->map->Outgoing->Children[i]->Annotation;
|
||||
clone_rpfp->GeneralizeCache(clone_edge);
|
||||
node->Annotation = clone_node->Annotation;
|
||||
#endif
|
||||
}
|
||||
|
||||
bool Propagate(Node *node){
|
||||
#ifdef USE_RPFP_CLONE
|
||||
RPFP_caching *clone_rpfp = duality->clone_rpfp;
|
||||
Edge *clone_edge = clone_rpfp->GetEdgeClone(node->Outgoing->map);
|
||||
Node *clone_node = clone_edge->Parent;
|
||||
clone_node->Annotation = node->map->Annotation;
|
||||
for(unsigned i = 0; i < clone_edge->Children.size(); i++)
|
||||
clone_edge->Children[i]->Annotation = node->map->Outgoing->Children[i]->Annotation;
|
||||
bool res = clone_rpfp->PropagateCache(clone_edge);
|
||||
if(res)
|
||||
node->Annotation = clone_node->Annotation;
|
||||
return res;
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
|
||||
|
@ -1948,6 +2194,11 @@ namespace Duality {
|
|||
Duality *parent;
|
||||
bool some_updates;
|
||||
|
||||
#define NO_CONJ_ON_SIMPLE_LOOPS
|
||||
#ifdef NO_CONJ_ON_SIMPLE_LOOPS
|
||||
hash_set<Node *> simple_loops;
|
||||
#endif
|
||||
|
||||
Node *&covered_by(Node *node){
|
||||
return cm[node].covered_by;
|
||||
}
|
||||
|
@ -1982,6 +2233,24 @@ namespace Duality {
|
|||
Covering(Duality *_parent){
|
||||
parent = _parent;
|
||||
some_updates = false;
|
||||
|
||||
#ifdef NO_CONJ_ON_SIMPLE_LOOPS
|
||||
hash_map<Node *,std::vector<Edge *> > outgoing;
|
||||
for(unsigned i = 0; i < parent->rpfp->edges.size(); i++)
|
||||
outgoing[parent->rpfp->edges[i]->Parent].push_back(parent->rpfp->edges[i]);
|
||||
for(unsigned i = 0; i < parent->rpfp->nodes.size(); i++){
|
||||
Node * node = parent->rpfp->nodes[i];
|
||||
std::vector<Edge *> &outs = outgoing[node];
|
||||
if(outs.size() == 2){
|
||||
for(int j = 0; j < 2; j++){
|
||||
Edge *loop_edge = outs[j];
|
||||
if(loop_edge->Children.size() == 1 && loop_edge->Children[0] == loop_edge->Parent)
|
||||
simple_loops.insert(node);
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
}
|
||||
|
||||
bool IsCoveredRec(hash_set<Node *> &memo, Node *node){
|
||||
|
@ -2144,6 +2413,11 @@ namespace Duality {
|
|||
}
|
||||
|
||||
bool CouldCover(Node *covered, Node *covering){
|
||||
#ifdef NO_CONJ_ON_SIMPLE_LOOPS
|
||||
// Forsimple loops, we rely on propagation, not covering
|
||||
if(simple_loops.find(covered->map) != simple_loops.end())
|
||||
return false;
|
||||
#endif
|
||||
#ifdef UNDERAPPROX_NODES
|
||||
// if(parent->underapprox_map.find(covering) != parent->underapprox_map.end())
|
||||
// return parent->underapprox_map[covering] == covered;
|
||||
|
|
|
@ -18,6 +18,13 @@ Revision History:
|
|||
|
||||
--*/
|
||||
|
||||
#ifdef WIN32
|
||||
#pragma warning(disable:4996)
|
||||
#pragma warning(disable:4800)
|
||||
#pragma warning(disable:4267)
|
||||
#pragma warning(disable:4101)
|
||||
#endif
|
||||
|
||||
#include "duality_wrapper.h"
|
||||
#include <iostream>
|
||||
#include "smt_solver.h"
|
||||
|
@ -30,10 +37,11 @@ Revision History:
|
|||
|
||||
namespace Duality {
|
||||
|
||||
solver::solver(Duality::context& c, bool extensional) : object(c), the_model(c) {
|
||||
solver::solver(Duality::context& c, bool extensional, bool models) : object(c), the_model(c) {
|
||||
params_ref p;
|
||||
p.set_bool("proof", true); // this is currently useless
|
||||
p.set_bool("model", true);
|
||||
if(models)
|
||||
p.set_bool("model", true);
|
||||
p.set_bool("unsat_core", true);
|
||||
p.set_bool("mbqi",true);
|
||||
p.set_str("mbqi.id","itp"); // use mbqi for quantifiers in interpolants
|
||||
|
@ -44,6 +52,7 @@ namespace Duality {
|
|||
m_solver = (*sf)(m(), p, true, true, true, ::symbol::null);
|
||||
m_solver->updt_params(p); // why do we have to do this?
|
||||
canceled = false;
|
||||
m_mode = m().proof_mode();
|
||||
}
|
||||
|
||||
expr context::constant(const std::string &name, const sort &ty){
|
||||
|
@ -338,6 +347,17 @@ expr context::make_quant(decl_kind op, const std::vector<sort> &_sorts, const st
|
|||
return ctx().cook(result);
|
||||
}
|
||||
|
||||
expr expr::qe_lite(const std::set<int> &idxs, bool index_of_bound) const {
|
||||
::qe_lite qe(m());
|
||||
expr_ref result(to_expr(raw()),m());
|
||||
proof_ref pf(m());
|
||||
uint_set uis;
|
||||
for(std::set<int>::const_iterator it=idxs.begin(), en = idxs.end(); it != en; ++it)
|
||||
uis.insert(*it);
|
||||
qe(uis,index_of_bound,result);
|
||||
return ctx().cook(result);
|
||||
}
|
||||
|
||||
expr clone_quantifier(const expr &q, const expr &b){
|
||||
return q.ctx().cook(q.m().update_quantifier(to_quantifier(q.raw()), to_expr(b.raw())));
|
||||
}
|
||||
|
@ -362,6 +382,18 @@ expr context::make_quant(decl_kind op, const std::vector<sort> &_sorts, const st
|
|||
}
|
||||
|
||||
|
||||
unsigned func_decl::arity() const {
|
||||
return (to_func_decl(raw())->get_arity());
|
||||
}
|
||||
|
||||
sort func_decl::domain(unsigned i) const {
|
||||
return sort(ctx(),(to_func_decl(raw())->get_domain(i)));
|
||||
}
|
||||
|
||||
sort func_decl::range() const {
|
||||
return sort(ctx(),(to_func_decl(raw())->get_range()));
|
||||
}
|
||||
|
||||
func_decl context::fresh_func_decl(char const * prefix, const std::vector<sort> &domain, sort const & range){
|
||||
std::vector < ::sort * > _domain(domain.size());
|
||||
for(unsigned i = 0; i < domain.size(); i++)
|
||||
|
@ -504,7 +536,10 @@ expr context::make_quant(decl_kind op, const std::vector<sort> &_sorts, const st
|
|||
add(linear_assumptions[i][j]);
|
||||
}
|
||||
|
||||
check_result res = check();
|
||||
check_result res = unsat;
|
||||
|
||||
if(!m_solver->get_proof())
|
||||
res = check();
|
||||
|
||||
if(res == unsat){
|
||||
|
||||
|
|
|
@ -26,6 +26,7 @@ Revision History:
|
|||
#include<sstream>
|
||||
#include<vector>
|
||||
#include<list>
|
||||
#include <set>
|
||||
#include"version.h"
|
||||
#include<limits.h>
|
||||
|
||||
|
@ -50,6 +51,7 @@ Revision History:
|
|||
#include"scoped_ctrl_c.h"
|
||||
#include"cancel_eh.h"
|
||||
#include"scoped_timer.h"
|
||||
#include"scoped_proof.h"
|
||||
|
||||
namespace Duality {
|
||||
|
||||
|
@ -449,6 +451,7 @@ namespace Duality {
|
|||
bool is_datatype() const { return get_sort().is_datatype(); }
|
||||
bool is_relation() const { return get_sort().is_relation(); }
|
||||
bool is_finite_domain() const { return get_sort().is_finite_domain(); }
|
||||
bool is_true() const {return is_app() && decl().get_decl_kind() == True; }
|
||||
|
||||
bool is_numeral() const {
|
||||
return is_app() && decl().get_decl_kind() == OtherArith && m().is_unique_value(to_expr(raw()));
|
||||
|
@ -560,6 +563,8 @@ namespace Duality {
|
|||
|
||||
expr qe_lite() const;
|
||||
|
||||
expr qe_lite(const std::set<int> &idxs, bool index_of_bound) const;
|
||||
|
||||
friend expr clone_quantifier(const expr &, const expr &);
|
||||
|
||||
friend expr clone_quantifier(const expr &q, const expr &b, const std::vector<expr> &patterns);
|
||||
|
@ -718,6 +723,7 @@ namespace Duality {
|
|||
m_model = s;
|
||||
return *this;
|
||||
}
|
||||
bool null() const {return !m_model;}
|
||||
|
||||
expr eval(expr const & n, bool model_completion=true) const {
|
||||
::model * _m = m_model.get();
|
||||
|
@ -811,8 +817,9 @@ namespace Duality {
|
|||
::solver *m_solver;
|
||||
model the_model;
|
||||
bool canceled;
|
||||
proof_gen_mode m_mode;
|
||||
public:
|
||||
solver(context & c, bool extensional = false);
|
||||
solver(context & c, bool extensional = false, bool models = true);
|
||||
solver(context & c, ::solver *s):object(c),the_model(c) { m_solver = s; canceled = false;}
|
||||
solver(solver const & s):object(s), the_model(s.the_model) { m_solver = s.m_solver; canceled = false;}
|
||||
~solver() {
|
||||
|
@ -824,6 +831,7 @@ namespace Duality {
|
|||
m_ctx = s.m_ctx;
|
||||
m_solver = s.m_solver;
|
||||
the_model = s.the_model;
|
||||
m_mode = s.m_mode;
|
||||
return *this;
|
||||
}
|
||||
struct cancel_exception {};
|
||||
|
@ -832,11 +840,12 @@ namespace Duality {
|
|||
throw(cancel_exception());
|
||||
}
|
||||
// void set(params const & p) { Z3_solver_set_params(ctx(), m_solver, p); check_error(); }
|
||||
void push() { m_solver->push(); }
|
||||
void pop(unsigned n = 1) { m_solver->pop(n); }
|
||||
void push() { scoped_proof_mode spm(m(),m_mode); m_solver->push(); }
|
||||
void pop(unsigned n = 1) { scoped_proof_mode spm(m(),m_mode); m_solver->pop(n); }
|
||||
// void reset() { Z3_solver_reset(ctx(), m_solver); check_error(); }
|
||||
void add(expr const & e) { m_solver->assert_expr(e); }
|
||||
void add(expr const & e) { scoped_proof_mode spm(m(),m_mode); m_solver->assert_expr(e); }
|
||||
check_result check() {
|
||||
scoped_proof_mode spm(m(),m_mode);
|
||||
checkpoint();
|
||||
lbool r = m_solver->check_sat(0,0);
|
||||
model_ref m;
|
||||
|
@ -845,6 +854,7 @@ namespace Duality {
|
|||
return to_check_result(r);
|
||||
}
|
||||
check_result check_keep_model(unsigned n, expr * const assumptions, unsigned *core_size = 0, expr *core = 0) {
|
||||
scoped_proof_mode spm(m(),m_mode);
|
||||
model old_model(the_model);
|
||||
check_result res = check(n,assumptions,core_size,core);
|
||||
if(the_model == 0)
|
||||
|
@ -852,6 +862,7 @@ namespace Duality {
|
|||
return res;
|
||||
}
|
||||
check_result check(unsigned n, expr * const assumptions, unsigned *core_size = 0, expr *core = 0) {
|
||||
scoped_proof_mode spm(m(),m_mode);
|
||||
checkpoint();
|
||||
std::vector< ::expr *> _assumptions(n);
|
||||
for (unsigned i = 0; i < n; i++) {
|
||||
|
@ -876,6 +887,7 @@ namespace Duality {
|
|||
}
|
||||
#if 0
|
||||
check_result check(expr_vector assumptions) {
|
||||
scoped_proof_mode spm(m(),m_mode);
|
||||
unsigned n = assumptions.size();
|
||||
z3array<Z3_ast> _assumptions(n);
|
||||
for (unsigned i = 0; i < n; i++) {
|
||||
|
@ -900,17 +912,19 @@ namespace Duality {
|
|||
int get_num_decisions();
|
||||
|
||||
void cancel(){
|
||||
scoped_proof_mode spm(m(),m_mode);
|
||||
canceled = true;
|
||||
if(m_solver)
|
||||
m_solver->cancel();
|
||||
}
|
||||
|
||||
unsigned get_scope_level(){return m_solver->get_scope_level();}
|
||||
unsigned get_scope_level(){ scoped_proof_mode spm(m(),m_mode); return m_solver->get_scope_level();}
|
||||
|
||||
void show();
|
||||
void show_assertion_ids();
|
||||
|
||||
proof get_proof(){
|
||||
scoped_proof_mode spm(m(),m_mode);
|
||||
return proof(ctx(),m_solver->get_proof());
|
||||
}
|
||||
|
||||
|
@ -1294,8 +1308,8 @@ namespace Duality {
|
|||
|
||||
class interpolating_solver : public solver {
|
||||
public:
|
||||
interpolating_solver(context &ctx)
|
||||
: solver(ctx)
|
||||
interpolating_solver(context &ctx, bool models = true)
|
||||
: solver(ctx, true, models)
|
||||
{
|
||||
weak_mode = false;
|
||||
}
|
||||
|
@ -1359,6 +1373,21 @@ namespace Duality {
|
|||
typedef double clock_t;
|
||||
clock_t current_time();
|
||||
inline void output_time(std::ostream &os, clock_t time){os << time;}
|
||||
|
||||
template <class X> class uptr {
|
||||
public:
|
||||
X *ptr;
|
||||
uptr(){ptr = 0;}
|
||||
void set(X *_ptr){
|
||||
if(ptr) delete ptr;
|
||||
ptr = _ptr;
|
||||
}
|
||||
X *get(){ return ptr;}
|
||||
~uptr(){
|
||||
if(ptr) delete ptr;
|
||||
}
|
||||
};
|
||||
|
||||
};
|
||||
|
||||
// to make Duality::ast hashable
|
||||
|
@ -1393,6 +1422,18 @@ namespace std {
|
|||
};
|
||||
}
|
||||
|
||||
// to make Duality::ast usable in ordered collections
|
||||
namespace std {
|
||||
template <>
|
||||
class less<Duality::expr> {
|
||||
public:
|
||||
bool operator()(const Duality::expr &s, const Duality::expr &t) const {
|
||||
// return s.raw() < t.raw();
|
||||
return s.raw()->get_id() < t.raw()->get_id();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// to make Duality::func_decl hashable
|
||||
namespace hash_space {
|
||||
template <>
|
||||
|
@ -1425,6 +1466,5 @@ namespace std {
|
|||
};
|
||||
}
|
||||
|
||||
|
||||
#endif
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue