3
0
Fork 0
mirror of https://github.com/Z3Prover/z3 synced 2025-04-28 19:35:50 +00:00

Merge branch 'bvsls' of https://git01.codeplex.com/z3 into opt

This commit is contained in:
Christoph M. Wintersteiger 2014-03-27 13:13:10 +00:00
commit 3ab1766588
30 changed files with 1664 additions and 838 deletions

View file

@ -25,12 +25,12 @@ Revision History:
#include <map>
// make hash_map and hash_set available
#ifndef _WINDOWS
using namespace stl_ext;
#endif
namespace Duality {
class implicant_solver;
/* Generic operations on Z3 formulas */
struct Z3User {
@ -82,6 +82,8 @@ namespace Duality {
Term SubstAtom(hash_map<ast, Term> &memo, const expr &t, const expr &atom, const expr &val);
Term CloneQuantAndSimp(const expr &t, const expr &body);
Term RemoveRedundancy(const Term &t);
Term IneqToEq(const Term &t);
@ -102,6 +104,9 @@ namespace Duality {
FuncDecl RenumberPred(const FuncDecl &f, int n);
Term ExtractStores(hash_map<ast, Term> &memo, const Term &t, std::vector<expr> &cnstrs, hash_map<ast,expr> &renaming);
protected:
void SummarizeRec(hash_set<ast> &memo, std::vector<expr> &lits, int &ops, const Term &t);
@ -197,6 +202,9 @@ protected:
/** Is this a background constant? */
virtual bool is_constant(const func_decl &f) = 0;
/** Get the constants in the background vocabulary */
virtual hash_set<func_decl> &get_constants() = 0;
/** Assert a background axiom. */
virtual void assert_axiom(const expr &axiom) = 0;
@ -290,6 +298,11 @@ protected:
return bckg.find(f) != bckg.end();
}
/** Get the constants in the background vocabulary */
virtual hash_set<func_decl> &get_constants(){
return bckg;
}
~iZ3LogicSolver(){
// delete ictx;
delete islvr;
@ -600,6 +613,8 @@ protected:
void FixCurrentState(Edge *root);
void FixCurrentStateFull(Edge *edge, const expr &extra);
void FixCurrentStateFull(Edge *edge, const std::vector<expr> &assumps, const hash_map<ast,expr> &renaming);
/** Declare a constant in the background theory. */
@ -731,6 +746,10 @@ protected:
struct bad_format {
};
// thrown on internal error
struct Bad {
};
/** Pop a scope (see Push). Note, you cannot pop axioms. */
void Pop(int num_scopes);
@ -942,10 +961,12 @@ protected:
Term UnderapproxFormula(const Term &f, hash_set<ast> &dont_cares);
void ImplicantFullRed(hash_map<ast,int> &memo, const Term &f, std::vector<Term> &lits,
hash_set<ast> &done, hash_set<ast> &dont_cares);
hash_set<ast> &done, hash_set<ast> &dont_cares, bool extensional = true);
Term UnderapproxFullFormula(const Term &f, hash_set<ast> &dont_cares);
public:
Term UnderapproxFullFormula(const Term &f, bool extensional = true);
protected:
Term ToRuleRec(Edge *e, hash_map<ast,Term> &memo, const Term &t, std::vector<expr> &quants);
hash_map<ast,Term> resolve_ite_memo;
@ -986,6 +1007,8 @@ protected:
void AddEdgeToSolver(Edge *edge);
void AddEdgeToSolver(implicant_solver &aux_solver, Edge *edge);
void AddToProofCore(hash_set<ast> &core);
void GetGroundLitsUnderQuants(hash_set<ast> *memo, const Term &f, std::vector<Term> &res, int under);
@ -1051,13 +1074,40 @@ protected:
public:
struct Counterexample {
class Counterexample {
private:
RPFP *tree;
RPFP::Node *root;
public:
Counterexample(){
tree = 0;
root = 0;
}
Counterexample(RPFP *_tree, RPFP::Node *_root){
tree = _tree;
root = _root;
}
~Counterexample(){
if(tree) delete tree;
}
void swap(Counterexample &other){
std::swap(tree,other.tree);
std::swap(root,other.root);
}
void set(RPFP *_tree, RPFP::Node *_root){
if(tree) delete tree;
tree = _tree;
root = _root;
}
void clear(){
if(tree) delete tree;
tree = 0;
}
RPFP *get_tree() const {return tree;}
RPFP::Node *get_root() const {return root;}
private:
Counterexample &operator=(const Counterexample &);
Counterexample(const Counterexample &);
};
/** Solve the problem. You can optionally give an old
@ -1067,7 +1117,7 @@ protected:
virtual bool Solve() = 0;
virtual Counterexample GetCounterexample() = 0;
virtual Counterexample &GetCounterexample() = 0;
virtual bool SetOption(const std::string &option, const std::string &value) = 0;
@ -1075,7 +1125,7 @@ protected:
is chiefly useful for abstraction refinement, when we want to
solve a series of similar problems. */
virtual void LearnFrom(Counterexample &old_cex) = 0;
virtual void LearnFrom(Solver *old_solver) = 0;
virtual ~Solver(){}

View file

@ -1,169 +0,0 @@
/*++
Copyright (c) 2011 Microsoft Corporation
Module Name:
iz3hash.h
Abstract:
Wrapper for stl hash tables
Author:
Ken McMillan (kenmcmil)
Revision History:
--*/
// pull in the headers for has_map and hash_set
// these live in non-standard places
#ifndef IZ3_HASH_H
#define IZ3_HASH_H
//#define USE_UNORDERED_MAP
#ifdef USE_UNORDERED_MAP
#define stl_ext std
#define hash_space std
#include <unordered_map>
#include <unordered_set>
#define hash_map unordered_map
#define hash_set unordered_set
#else
#if __GNUC__ >= 3
#undef __DEPRECATED
#define stl_ext __gnu_cxx
#define hash_space stl_ext
#include <ext/hash_map>
#include <ext/hash_set>
#else
#ifdef WIN32
#define stl_ext stdext
#define hash_space std
#include <hash_map>
#include <hash_set>
#else
#define stl_ext std
#define hash_space std
#include <hash_map>
#include <hash_set>
#endif
#endif
#endif
#include <string>
// stupid STL doesn't include hash function for class string
#ifndef WIN32
namespace stl_ext {
template <>
class hash<std::string> {
stl_ext::hash<char *> H;
public:
size_t operator()(const std::string &s) const {
return H(s.c_str());
}
};
}
#endif
namespace hash_space {
template <>
class hash<std::pair<int,int> > {
public:
size_t operator()(const std::pair<int,int> &p) const {
return p.first + p.second;
}
};
}
#ifdef WIN32
template <> inline
size_t stdext::hash_value<std::pair<int,int> >(const std::pair<int,int>& p)
{ // hash _Keyval to size_t value one-to-one
return p.first + p.second;
}
#endif
namespace hash_space {
template <class T>
class hash<std::pair<T *, T *> > {
public:
size_t operator()(const std::pair<T *,T *> &p) const {
return (size_t)p.first + (size_t)p.second;
}
};
}
#if 0
template <class T> inline
size_t stdext::hash_value<std::pair<T *, T *> >(const std::pair<T *, T *>& p)
{ // hash _Keyval to size_t value one-to-one
return (size_t)p.first + (size_t)p.second;
}
#endif
#ifdef WIN32
namespace std {
template <>
class less<std::pair<int,int> > {
public:
bool operator()(const pair<int,int> &x, const pair<int,int> &y) const {
return x.first < y.first || x.first == y.first && x.second < y.second;
}
};
}
namespace std {
template <class T>
class less<std::pair<T *,T *> > {
public:
bool operator()(const pair<T *,T *> &x, const pair<T *,T *> &y) const {
return (size_t)x.first < (size_t)y.first || (size_t)x.first == (size_t)y.first && (size_t)x.second < (size_t)y.second;
}
};
}
#endif
#ifndef WIN32
namespace stl_ext {
template <class T>
class hash<T *> {
public:
size_t operator()(const T *p) const {
return (size_t) p;
}
};
}
#endif
#ifdef WIN32
template <class K, class T>
class hash_map : public stl_ext::hash_map<K,T,stl_ext::hash_compare<K,std::less<K> > > {};
template <class K>
class hash_set : public stl_ext::hash_set<K,stl_ext::hash_compare<K,std::less<K> > > {};
#endif
#endif

View file

@ -25,7 +25,7 @@ Revision History:
#include <string.h>
#include <stdlib.h>
#ifdef WIN32
#ifdef _WINDOWS
#pragma warning(disable:4996)
#pragma warning(disable:4800)
#pragma warning(disable:4267)

View file

@ -21,7 +21,7 @@ Revision History:
#ifdef WIN32
#ifdef _WINDOWS
#pragma warning(disable:4996)
#pragma warning(disable:4800)
#pragma warning(disable:4267)
@ -36,10 +36,6 @@ Revision History:
#include "duality.h"
#include "duality_profiling.h"
#ifndef WIN32
// #define Z3OPS
#endif
// TODO: do we need these?
#ifdef Z3OPS
@ -150,8 +146,10 @@ namespace Duality {
}
return 0;
}
if(t.is_quantifier())
return CountOperatorsRec(memo,t.body())+2; // count 2 for a quantifier
if(t.is_quantifier()){
int nbv = t.get_quantifier_num_bound();
return CountOperatorsRec(memo,t.body()) + 2 * nbv; // count 2 for each quantifier
}
return 0;
}
@ -410,6 +408,33 @@ namespace Duality {
return res;
}
Z3User::Term Z3User::ExtractStores(hash_map<ast, Term> &memo, const Term &t, std::vector<expr> &cnstrs, hash_map<ast,expr> &renaming)
{
std::pair<ast,Term> foo(t,expr(ctx));
std::pair<hash_map<ast,Term>::iterator, bool> bar = memo.insert(foo);
Term &res = bar.first->second;
if(!bar.second) return res;
if (t.is_app()) {
func_decl f = t.decl();
std::vector<Term> args;
int nargs = t.num_args();
for(int i = 0; i < nargs; i++)
args.push_back(ExtractStores(memo, t.arg(i),cnstrs,renaming));
res = f(args.size(),&args[0]);
if(f.get_decl_kind() == Store){
func_decl fresh = ctx.fresh_func_decl("@arr", res.get_sort());
expr y = fresh();
expr equ = ctx.make(Equal,y,res);
cnstrs.push_back(equ);
renaming[y] = res;
res = y;
}
}
else res = t;
return res;
}
bool Z3User::IsLiteral(const expr &lit, expr &atom, expr &val){
if(!(lit.is_quantifier() && IsClosedFormula(lit))){
if(!lit.is_app())
@ -523,6 +548,25 @@ namespace Duality {
return foo;
}
Z3User::Term Z3User::CloneQuantAndSimp(const expr &t, const expr &body){
if(t.is_quantifier_forall() && body.is_app() && body.decl().get_decl_kind() == And){
int nargs = body.num_args();
std::vector<expr> args(nargs);
for(int i = 0; i < nargs; i++)
args[i] = CloneQuantAndSimp(t, body.arg(i));
return ctx.make(And,args);
}
if(!t.is_quantifier_forall() && body.is_app() && body.decl().get_decl_kind() == Or){
int nargs = body.num_args();
std::vector<expr> args(nargs);
for(int i = 0; i < nargs; i++)
args[i] = CloneQuantAndSimp(t, body.arg(i));
return ctx.make(Or,args);
}
return clone_quantifier(t,body);
}
Z3User::Term Z3User::SubstAtom(hash_map<ast, Term> &memo, const expr &t, const expr &atom, const expr &val){
std::pair<ast,Term> foo(t,expr(ctx));
std::pair<hash_map<ast,Term>::iterator, bool> bar = memo.insert(foo);
@ -1832,7 +1876,7 @@ namespace Duality {
}
void RPFP::ImplicantFullRed(hash_map<ast,int> &memo, const Term &f, std::vector<Term> &lits,
hash_set<ast> &done, hash_set<ast> &dont_cares){
hash_set<ast> &done, hash_set<ast> &dont_cares, bool extensional){
if(done.find(f) != done.end())
return; /* already processed */
if(f.is_app()){
@ -1840,7 +1884,7 @@ namespace Duality {
decl_kind k = f.decl().get_decl_kind();
if(k == Implies || k == Iff || k == And || k == Or || k == Not){
for(int i = 0; i < nargs; i++)
ImplicantFullRed(memo,f.arg(i),lits,done,dont_cares);
ImplicantFullRed(memo,f.arg(i),lits,done,dont_cares, extensional);
goto done;
}
}
@ -1848,6 +1892,15 @@ namespace Duality {
if(dont_cares.find(f) == dont_cares.end()){
int b = SubtermTruth(memo,f);
if(b != 0 && b != 1) goto done;
if(f.is_app() && f.decl().get_decl_kind() == Equal && f.arg(0).is_array()){
if(b == 1 && !extensional){
expr x = dualModel.eval(f.arg(0)); expr y = dualModel.eval(f.arg(1));
if(!eq(x,y))
b = 0;
}
if(b == 0)
goto done;
}
expr bv = (b==1) ? f : !f;
lits.push_back(bv);
}
@ -1969,12 +2022,16 @@ namespace Duality {
return conjoin(lits);
}
RPFP::Term RPFP::UnderapproxFullFormula(const Term &f, hash_set<ast> &dont_cares){
RPFP::Term RPFP::UnderapproxFullFormula(const Term &f, bool extensional){
hash_set<ast> dont_cares;
resolve_ite_memo.clear();
timer_start("UnderapproxFormula");
/* first compute truth values of subterms */
hash_map<ast,int> memo;
hash_set<ast> done;
std::vector<Term> lits;
ImplicantFullRed(memo,f,lits,done,dont_cares);
ImplicantFullRed(memo,f,lits,done,dont_cares, extensional);
timer_stop("UnderapproxFormula");
/* return conjunction of literals */
return conjoin(lits);
}
@ -2519,22 +2576,6 @@ namespace Duality {
ConstrainEdgeLocalized(edge,eu);
}
void RPFP::FixCurrentStateFull(Edge *edge, const expr &extra){
hash_set<ast> dont_cares;
resolve_ite_memo.clear();
timer_start("UnderapproxFormula");
Term dual = edge->dual.null() ? ctx.bool_val(true) : edge->dual;
for(unsigned i = 0; i < edge->constraints.size(); i++)
dual = dual && edge->constraints[i];
// dual = dual && extra;
Term eu = UnderapproxFullFormula(dual,dont_cares);
timer_stop("UnderapproxFormula");
ConstrainEdgeLocalized(edge,eu);
}
void RPFP::GetGroundLitsUnderQuants(hash_set<ast> *memo, const Term &f, std::vector<Term> &res, int under){
if(memo[under].find(f) != memo[under].end())
return;
@ -2817,7 +2858,91 @@ namespace Duality {
return ctx.make(And,lits);
}
// set up edge constraint in aux solver
/* This is a wrapper for a solver that is intended to compute
implicants from models. It works around a problem in Z3 with
models in the non-extensional array theory. It does this by
naming all of the store terms in a formula. That is, (store ...)
is replaced by "name" with an added constraint name = (store
...). This allows us to determine from the model whether an array
equality is true or false (it is false if the two sides are
mapped to different function symbols, even if they have the same
contents).
*/
struct implicant_solver {
RPFP *owner;
solver &aux_solver;
std::vector<expr> assumps, namings;
std::vector<int> assump_stack, naming_stack;
hash_map<ast,expr> renaming, renaming_memo;
void add(const expr &e){
expr t = e;
if(!aux_solver.extensional_array_theory()){
unsigned i = namings.size();
t = owner->ExtractStores(renaming_memo,t,namings,renaming);
for(; i < namings.size(); i++)
aux_solver.add(namings[i]);
}
assumps.push_back(t);
aux_solver.add(t);
}
void push() {
assump_stack.push_back(assumps.size());
naming_stack.push_back(namings.size());
aux_solver.push();
}
// When we pop the solver, we have to re-add any namings that were lost
void pop(int n) {
aux_solver.pop(n);
int new_assumps = assump_stack[assump_stack.size()-n];
int new_namings = naming_stack[naming_stack.size()-n];
for(unsigned i = new_namings; i < namings.size(); i++)
aux_solver.add(namings[i]);
assumps.resize(new_assumps);
namings.resize(new_namings);
assump_stack.resize(assump_stack.size()-1);
naming_stack.resize(naming_stack.size()-1);
}
check_result check() {
return aux_solver.check();
}
model get_model() {
return aux_solver.get_model();
}
expr get_implicant() {
owner->dualModel = aux_solver.get_model();
expr dual = owner->ctx.make(And,assumps);
bool ext = aux_solver.extensional_array_theory();
expr eu = owner->UnderapproxFullFormula(dual,ext);
// if we renamed store terms, we have to undo
if(!ext)
eu = owner->SubstRec(renaming,eu);
return eu;
}
implicant_solver(RPFP *_owner, solver &_aux_solver)
: owner(_owner), aux_solver(_aux_solver)
{}
};
// set up edge constraint in aux solver
void RPFP::AddEdgeToSolver(implicant_solver &aux_solver, Edge *edge){
if(!edge->dual.null())
aux_solver.add(edge->dual);
for(unsigned i = 0; i < edge->constraints.size(); i++){
expr tl = edge->constraints[i];
aux_solver.add(tl);
}
}
void RPFP::AddEdgeToSolver(Edge *edge){
if(!edge->dual.null())
aux_solver.add(edge->dual);
@ -2827,57 +2952,132 @@ namespace Duality {
}
}
static int by_case_counter = 0;
void RPFP::InterpolateByCases(Node *root, Node *node){
timer_start("InterpolateByCases");
bool axioms_added = false;
aux_solver.push();
AddEdgeToSolver(node->Outgoing);
hash_set<ast> axioms_needed;
const std::vector<expr> &theory = ls->get_axioms();
for(unsigned i = 0; i < theory.size(); i++)
axioms_needed.insert(theory[i]);
implicant_solver is(this,aux_solver);
is.push();
AddEdgeToSolver(is,node->Outgoing);
node->Annotation.SetEmpty();
hash_set<ast> *core = new hash_set<ast>;
core->insert(node->Outgoing->dual);
while(1){
aux_solver.push();
by_case_counter++;
is.push();
expr annot = !GetAnnotation(node);
aux_solver.add(annot);
if(aux_solver.check() == unsat){
aux_solver.pop(1);
is.add(annot);
if(is.check() == unsat){
is.pop(1);
break;
}
dualModel = aux_solver.get_model();
aux_solver.pop(1);
is.pop(1);
Push();
FixCurrentStateFull(node->Outgoing,annot);
ConstrainEdgeLocalized(node->Outgoing,!GetAnnotation(node));
ConstrainEdgeLocalized(node->Outgoing,is.get_implicant());
ConstrainEdgeLocalized(node->Outgoing,!GetAnnotation(node)); //TODO: need this?
check_result foo = Check(root);
if(foo != unsat)
if(foo != unsat){
slvr().print("should_be_unsat.smt2");
throw "should be unsat";
AddToProofCore(*core);
}
std::vector<expr> assumps, axioms_to_add;
slvr().get_proof().get_assumptions(assumps);
for(unsigned i = 0; i < assumps.size(); i++){
(*core).insert(assumps[i]);
if(axioms_needed.find(assumps[i]) != axioms_needed.end()){
axioms_to_add.push_back(assumps[i]);
axioms_needed.erase(assumps[i]);
}
}
// AddToProofCore(*core);
Transformer old_annot = node->Annotation;
SolveSingleNode(root,node);
{
expr itp = GetAnnotation(node);
dualModel = aux_solver.get_model();
dualModel = is.get_model(); // TODO: what does this mean?
std::vector<expr> case_lits;
itp = StrengthenFormulaByCaseSplitting(itp, case_lits);
SetAnnotation(node,itp);
node->Annotation.Formula = node->Annotation.Formula.simplify();
}
for(unsigned i = 0; i < axioms_to_add.size(); i++)
is.add(axioms_to_add[i]);
#define TEST_BAD
#ifdef TEST_BAD
{
static int bad_count = 0, num_bads = 1;
if(bad_count >= num_bads){
bad_count = 0;
num_bads = num_bads * 2;
Pop(1);
is.pop(1);
delete core;
timer_stop("InterpolateByCases");
throw Bad();
}
bad_count++;
}
#endif
if(node->Annotation.IsEmpty()){
if(!axioms_added){
// add the axioms in the off chance they are useful
const std::vector<expr> &theory = ls->get_axioms();
for(unsigned i = 0; i < theory.size(); i++)
aux_solver.add(theory[i]);
is.add(theory[i]);
axioms_added = true;
}
else {
#ifdef KILL_ON_BAD_INTERPOLANT
std::cout << "bad in InterpolateByCase -- core:\n";
#if 0
std::vector<expr> assumps;
slvr().get_proof().get_assumptions(assumps);
for(unsigned i = 0; i < assumps.size(); i++)
assumps[i].show();
#endif
std::cout << "checking for inconsistency\n";
std::cout << "model:\n";
is.get_model().show();
expr impl = is.get_implicant();
std::vector<expr> conjuncts;
CollectConjuncts(impl,conjuncts,true);
std::cout << "impl:\n";
for(unsigned i = 0; i < conjuncts.size(); i++)
conjuncts[i].show();
std::cout << "annot:\n";
annot.show();
is.add(annot);
for(unsigned i = 0; i < conjuncts.size(); i++)
is.add(conjuncts[i]);
if(is.check() == unsat){
std::cout << "inconsistent!\n";
std::vector<expr> is_assumps;
is.aux_solver.get_proof().get_assumptions(is_assumps);
std::cout << "core:\n";
for(unsigned i = 0; i < is_assumps.size(); i++)
is_assumps[i].show();
}
else {
std::cout << "consistent!\n";
is.aux_solver.print("should_be_inconsistent.smt2");
}
std::cout << "by_case_counter = " << by_case_counter << "\n";
throw "ack!";
#endif
Pop(1);
is.pop(1);
delete core;
timer_stop("InterpolateByCases");
throw Bad();
}
}
Pop(1);
@ -2886,7 +3086,8 @@ namespace Duality {
if(proof_core)
delete proof_core; // shouldn't happen
proof_core = core;
aux_solver.pop(1);
is.pop(1);
timer_stop("InterpolateByCases");
}
void RPFP::Generalize(Node *root, Node *node){
@ -3187,7 +3388,7 @@ namespace Duality {
func_decl f = t.decl();
std::vector<Term> args;
int nargs = t.num_args();
if(nargs == 0)
if(nargs == 0 && f.get_decl_kind() == Uninterpreted)
ls->declare_constant(f); // keep track of background constants
for(int i = 0; i < nargs; i++)
args.push_back(SubstBoundRec(memo, subst, level, t.arg(i)));

View file

@ -19,7 +19,7 @@ Revision History:
--*/
#ifdef WIN32
#ifdef _WINDOWS
#pragma warning(disable:4996)
#pragma warning(disable:4800)
#pragma warning(disable:4267)
@ -54,6 +54,7 @@ Revision History:
// #define KEEP_EXPANSIONS
// #define USE_CACHING_RPFP
// #define PROPAGATE_BEFORE_CHECK
#define NEW_STRATIFIED_INLINING
#define USE_RPFP_CLONE
#define USE_NEW_GEN_CANDS
@ -82,7 +83,7 @@ namespace Duality {
rpfp = _rpfp;
}
virtual void Extend(RPFP::Node *node){}
virtual void Update(RPFP::Node *node, const RPFP::Transformer &update){}
virtual void Update(RPFP::Node *node, const RPFP::Transformer &update, bool eager){}
virtual void Bound(RPFP::Node *node){}
virtual void Expand(RPFP::Edge *edge){}
virtual void AddCover(RPFP::Node *covered, std::vector<RPFP::Node *> &covering){}
@ -94,6 +95,7 @@ namespace Duality {
virtual void UpdateUnderapprox(RPFP::Node *node, const RPFP::Transformer &update){}
virtual void Reject(RPFP::Edge *edge, const std::vector<RPFP::Node *> &Children){}
virtual void Message(const std::string &msg){}
virtual void Depth(int){}
virtual ~Reporter(){}
};
@ -124,6 +126,7 @@ namespace Duality {
rpfp = _rpfp;
reporter = 0;
heuristic = 0;
unwinding = 0;
FullExpand = false;
NoConj = false;
FeasibleEdges = true;
@ -131,6 +134,7 @@ namespace Duality {
Report = false;
StratifiedInlining = false;
RecursionBound = -1;
BatchExpand = false;
{
scoped_no_proof no_proofs_please(ctx.m());
#ifdef USE_RPFP_CLONE
@ -151,6 +155,7 @@ namespace Duality {
#ifdef USE_NEW_GEN_CANDS
delete gen_cands_rpfp;
#endif
if(unwinding) delete unwinding;
}
#ifdef USE_RPFP_CLONE
@ -249,6 +254,19 @@ namespace Duality {
virtual void Done() {}
};
/** The Proposer class proposes conjectures eagerly. These can come
from any source, including predicate abstraction, templates, or
previous solver runs. The proposed conjectures are checked
with low effort when the unwinding is expanded.
*/
class Proposer {
public:
/** Given a node in the unwinding, propose some conjectures */
virtual std::vector<RPFP::Transformer> &Propose(Node *node) = 0;
virtual ~Proposer(){};
};
class Covering; // see below
@ -278,6 +296,7 @@ namespace Duality {
hash_map<Node *, Node *> underapprox_map; // maps underapprox nodes to the nodes they approximate
int last_decisions;
hash_set<Node *> overapproxes;
std::vector<Proposer *> proposers;
#ifdef BOUNDED
struct Counter {
@ -292,24 +311,22 @@ namespace Duality {
virtual bool Solve(){
reporter = Report ? CreateStdoutReporter(rpfp) : new Reporter(rpfp);
#ifndef LOCALIZE_CONJECTURES
heuristic = !cex.tree ? new Heuristic(rpfp) : new ReplayHeuristic(rpfp,cex);
heuristic = !cex.get_tree() ? new Heuristic(rpfp) : new ReplayHeuristic(rpfp,cex);
#else
heuristic = !cex.tree ? (Heuristic *)(new LocalHeuristic(rpfp))
heuristic = !cex.get_tree() ? (Heuristic *)(new LocalHeuristic(rpfp))
: (Heuristic *)(new ReplayHeuristic(rpfp,cex));
#endif
cex.tree = 0; // heuristic now owns it
cex.clear(); // in case we didn't use it for heuristic
if(unwinding) delete unwinding;
unwinding = new RPFP(rpfp->ls);
unwinding->HornClauses = rpfp->HornClauses;
indset = new Covering(this);
last_decisions = 0;
CreateEdgesByChildMap();
CreateLeaves();
#ifndef TOP_DOWN
if(!StratifiedInlining){
if(FeasibleEdges)NullaryCandidates();
else InstantiateAllEdges();
}
CreateInitialUnwinding();
#else
CreateLeaves();
for(unsigned i = 0; i < leaves.size(); i++)
if(!SatisfyUpperBound(leaves[i]))
return false;
@ -321,11 +338,29 @@ namespace Duality {
// print_profile(std::cout);
delete indset;
delete heuristic;
delete unwinding;
// delete unwinding; // keep the unwinding for future mining of predicates
delete reporter;
for(unsigned i = 0; i < proposers.size(); i++)
delete proposers[i];
return res;
}
void CreateInitialUnwinding(){
if(!StratifiedInlining){
CreateLeaves();
if(FeasibleEdges)NullaryCandidates();
else InstantiateAllEdges();
}
else {
#ifdef NEW_STRATIFIED_INLINING
#else
CreateLeaves();
#endif
}
}
void Cancel(){
// TODO
}
@ -346,15 +381,19 @@ namespace Duality {
}
#endif
virtual void LearnFrom(Counterexample &old_cex){
cex = old_cex;
virtual void LearnFrom(Solver *other_solver){
// get the counterexample as a guide
cex.swap(other_solver->GetCounterexample());
// propose conjectures based on old unwinding
Duality *old_duality = dynamic_cast<Duality *>(other_solver);
if(old_duality)
proposers.push_back(new HistoryProposer(old_duality,this));
}
/** Return the counterexample */
virtual Counterexample GetCounterexample(){
Counterexample res = cex;
cex.tree = 0; // Cex now belongs to caller
return res;
/** Return a reference to the counterexample */
virtual Counterexample &GetCounterexample(){
return cex;
}
// options
@ -365,6 +404,7 @@ namespace Duality {
bool Report; // spew on stdout
bool StratifiedInlining; // Do stratified inlining as preprocessing step
int RecursionBound; // Recursion bound for bounded verification
bool BatchExpand;
bool SetBoolOption(bool &opt, const std::string &value){
if(value == "0") {
@ -403,6 +443,9 @@ namespace Duality {
if(option == "stratified_inlining"){
return SetBoolOption(StratifiedInlining,value);
}
if(option == "batch_expand"){
return SetBoolOption(BatchExpand,value);
}
if(option == "recursion_bound"){
return SetIntOption(RecursionBound,value);
}
@ -514,7 +557,11 @@ namespace Duality {
c.Children.resize(edge->Children.size());
for(unsigned j = 0; j < c.Children.size(); j++)
c.Children[j] = leaf_map[edge->Children[j]];
Extend(c);
Node *new_node;
Extend(c,new_node);
#ifdef EARLY_EXPAND
TryExpandNode(new_node);
#endif
}
for(Unexpanded::iterator it = unexpanded.begin(), en = unexpanded.end(); it != en; ++it)
indset->Add(*it);
@ -766,16 +813,14 @@ namespace Duality {
}
/* For stratified inlining, we need a topological sort of the
nodes. */
hash_map<Node *, int> TopoSort;
int TopoSortCounter;
std::vector<Edge *> SortedEdges;
void DoTopoSortRec(Node *node){
if(TopoSort.find(node) != TopoSort.end())
return;
TopoSort[node] = TopoSortCounter++; // just to break cycles
TopoSort[node] = INT_MAX; // just to break cycles
Edge *edge = node->Outgoing; // note, this is just *one* outgoing edge
if(edge){
std::vector<Node *> &chs = edge->Children;
@ -783,22 +828,81 @@ namespace Duality {
DoTopoSortRec(chs[i]);
}
TopoSort[node] = TopoSortCounter++;
SortedEdges.push_back(edge);
}
void DoTopoSort(){
TopoSort.clear();
SortedEdges.clear();
TopoSortCounter = 0;
for(unsigned i = 0; i < nodes.size(); i++)
DoTopoSortRec(nodes[i]);
}
int StratifiedLeafCount;
#ifdef NEW_STRATIFIED_INLINING
/** Stratified inlining builds an initial layered unwinding before
switching to the LAWI strategy. Currently the number of layers
is one. Only nodes that are the targets of back edges are
consider to be leaves. This assumes we have already computed a
topological sort.
*/
bool DoStratifiedInlining(){
DoTopoSort();
int depth = 1; // TODO: make this an option
std::vector<hash_map<Node *,Node *> > unfolding_levels(depth+1);
for(int level = 1; level <= depth; level++)
for(unsigned i = 0; i < SortedEdges.size(); i++){
Edge *edge = SortedEdges[i];
Node *parent = edge->Parent;
std::vector<Node *> &chs = edge->Children;
std::vector<Node *> my_chs(chs.size());
for(unsigned j = 0; j < chs.size(); j++){
Node *child = chs[j];
int ch_level = TopoSort[child] >= TopoSort[parent] ? level-1 : level;
if(unfolding_levels[ch_level].find(child) == unfolding_levels[ch_level].end()){
if(ch_level == 0)
unfolding_levels[0][child] = CreateLeaf(child);
else
throw InternalError("in levelized unwinding");
}
my_chs[j] = unfolding_levels[ch_level][child];
}
Candidate cand; cand.edge = edge; cand.Children = my_chs;
Node *new_node;
bool ok = Extend(cand,new_node);
MarkExpanded(new_node); // we don't expand here -- just mark it done
if(!ok) return false; // got a counterexample
unfolding_levels[level][parent] = new_node;
}
return true;
}
Node *CreateLeaf(Node *node){
RPFP::Node *nchild = CreateNodeInstance(node);
MakeLeaf(nchild, /* do_not_expand = */ true);
nchild->Annotation.SetEmpty();
return nchild;
}
void MarkExpanded(Node *node){
if(unexpanded.find(node) != unexpanded.end()){
unexpanded.erase(node);
insts_of_node[node->map].push_back(node);
}
}
#else
/** In stratified inlining, we build the unwinding from the bottom
down, trying to satisfy the node bounds. We do this as a pre-pass,
limiting the expansion. If we get a counterexample, we are done,
else we continue as usual expanding the unwinding upward.
*/
int StratifiedLeafCount;
bool DoStratifiedInlining(){
timer_start("StratifiedInlining");
@ -821,6 +925,8 @@ namespace Duality {
return true;
}
#endif
/** Here, we do the downward expansion for stratified inlining */
hash_map<Node *, Node *> LeafMap, StratifiedLeafMap;
@ -907,9 +1013,14 @@ namespace Duality {
}
Candidate cand = candidates.front();
candidates.pop_front();
if(CandidateFeasible(cand))
if(!Extend(cand))
if(CandidateFeasible(cand)){
Node *new_node;
if(!Extend(cand,new_node))
return false;
#ifdef EARLY_EXPAND
TryExpandNode(new_node);
#endif
}
}
}
@ -929,9 +1040,9 @@ namespace Duality {
Node *CreateUnderapproxNode(Node *node){
// cex.tree->ComputeUnderapprox(cex.root,0);
// cex.get_tree()->ComputeUnderapprox(cex.get_root(),0);
RPFP::Node *under_node = CreateNodeInstance(node->map /* ,StratifiedLeafCount-- */);
under_node->Annotation.IntersectWith(cex.root->Underapprox);
under_node->Annotation.IntersectWith(cex.get_root()->Underapprox);
AddThing(under_node->Annotation.Formula);
Edge *e = unwinding->CreateLowerBoundEdge(under_node);
under_node->Annotation.SetFull(); // allow this node to cover others
@ -967,9 +1078,8 @@ namespace Duality {
ExpandNodeFromCoverFail(node);
}
#endif
if(_cex) *_cex = cex;
else delete cex.tree; // delete the cex if not required
cex.tree = 0;
if(_cex) (*_cex).swap(cex); // return the cex if asked
cex.clear(); // throw away the useless cex
node->Bound = save; // put back original bound
timer_stop("ProveConjecture");
return false;
@ -1349,16 +1459,20 @@ namespace Duality {
}
}
bool UpdateNodeToNode(Node *node, Node *top){
if(!node->Annotation.SubsetEq(top->Annotation)){
reporter->Update(node,top->Annotation);
indset->Update(node,top->Annotation);
bool Update(Node *node, const RPFP::Transformer &fact, bool eager=false){
if(!node->Annotation.SubsetEq(fact)){
reporter->Update(node,fact,eager);
indset->Update(node,fact);
updated_nodes.insert(node->map);
node->Annotation.IntersectWith(top->Annotation);
node->Annotation.IntersectWith(fact);
return true;
}
return false;
}
bool UpdateNodeToNode(Node *node, Node *top){
return Update(node,top->Annotation);
}
/** Update the unwinding solution, using an interpolant for the
derivation tree. */
@ -1400,8 +1514,7 @@ namespace Duality {
// std::cout << "decisions: " << (end_decs - start_decs) << std::endl;
last_decisions = end_decs - start_decs;
if(res){
cex.tree = dt.tree;
cex.root = dt.top;
cex.set(dt.tree,dt.top); // note tree is now owned by cex
if(UseUnderapprox){
UpdateWithCounterexample(node,dt.tree,dt.top);
}
@ -1413,6 +1526,64 @@ namespace Duality {
delete dtp;
return !res;
}
/* For a given nod in the unwinding, get conjectures from the
proposers and check them locally. Update the node with any true
conjectures.
*/
void DoEagerDeduction(Node *node){
for(unsigned i = 0; i < proposers.size(); i++){
const std::vector<RPFP::Transformer> &conjectures = proposers[i]->Propose(node);
for(unsigned j = 0; j < conjectures.size(); j++){
const RPFP::Transformer &conjecture = conjectures[j];
RPFP::Transformer bound(conjecture);
std::vector<expr> conj_vec;
unwinding->CollectConjuncts(bound.Formula,conj_vec);
for(unsigned k = 0; k < conj_vec.size(); k++){
bound.Formula = conj_vec[k];
if(CheckEdgeCaching(node->Outgoing,bound) == unsat)
Update(node,bound, /* eager = */ true);
//else
//std::cout << "conjecture failed\n";
}
}
}
}
check_result CheckEdge(RPFP *checker, Edge *edge){
Node *root = edge->Parent;
checker->Push();
checker->AssertNode(root);
checker->AssertEdge(edge,1,true);
check_result res = checker->Check(root);
checker->Pop(1);
return res;
}
check_result CheckEdgeCaching(Edge *unwinding_edge, const RPFP::Transformer &bound){
// use a dedicated solver for this edge
// TODO: can this mess be hidden somehow?
RPFP_caching *checker = gen_cands_rpfp; // TODO: a good choice?
Edge *edge = unwinding_edge->map; // get the edge in the original RPFP
RPFP_caching::scoped_solver_for_edge ssfe(checker,edge,true /* models */, true /*axioms*/);
Edge *checker_edge = checker->GetEdgeClone(edge);
// copy the annotations and bound to the clone
Node *root = checker_edge->Parent;
root->Bound = bound;
for(unsigned j = 0; j < checker_edge->Children.size(); j++){
Node *oc = unwinding_edge->Children[j];
Node *nc = checker_edge->Children[j];
nc->Annotation = oc->Annotation;
}
return CheckEdge(checker,checker_edge);
}
/* If the counterexample derivation is partial due to
use of underapproximations, complete it. */
@ -1421,10 +1592,7 @@ namespace Duality {
DerivationTree dt(this,unwinding,reporter,heuristic,FullExpand);
bool res = dt.Derive(unwinding,node,UseUnderapprox,true); // build full tree
if(!res) throw "Duality internal error in BuildFullCex";
if(cex.tree)
delete cex.tree;
cex.tree = dt.tree;
cex.root = dt.top;
cex.set(dt.tree,dt.top);
}
void UpdateBackEdges(Node *node){
@ -1447,25 +1615,23 @@ namespace Duality {
}
/** Extend the unwinding, keeping it solved. */
bool Extend(Candidate &cand){
bool Extend(Candidate &cand, Node *&node){
timer_start("Extend");
Node *node = CreateNodeInstance(cand.edge->Parent);
node = CreateNodeInstance(cand.edge->Parent);
CreateEdgeInstance(cand.edge,node,cand.Children);
UpdateBackEdges(node);
reporter->Extend(node);
bool res = SatisfyUpperBound(node);
DoEagerDeduction(node); // first be eager...
bool res = SatisfyUpperBound(node); // then be lazy
if(res) indset->CloseDescendants(node);
else {
#ifdef UNDERAPPROX_NODES
ExpandUnderapproxNodes(cex.tree, cex.root);
ExpandUnderapproxNodes(cex.get_tree(), cex.get_root());
#endif
if(UseUnderapprox) BuildFullCex(node);
timer_stop("Extend");
return res;
}
#ifdef EARLY_EXPAND
TryExpandNode(node);
#endif
timer_stop("Extend");
return res;
}
@ -1563,6 +1729,8 @@ namespace Duality {
class DerivationTree {
public:
virtual ~DerivationTree(){}
DerivationTree(Duality *_duality, RPFP *rpfp, Reporter *_reporter, Heuristic *_heuristic, bool _full_expand)
: slvr(rpfp->slvr()),
ctx(rpfp->ctx)
@ -1912,10 +2080,28 @@ namespace Duality {
stack.push_back(stack_entry());
}
struct DoRestart {};
virtual bool Build(){
while (true) {
try {
return BuildMain();
}
catch (const DoRestart &) {
// clear the statck and try again
updated_nodes.clear();
while(stack.size() > 1)
PopLevel();
reporter->Message("restarted");
}
}
}
bool BuildMain(){
stack.back().level = tree->slvr().get_scope_level();
bool was_sat = true;
int update_failures = 0;
while (true)
{
@ -1924,6 +2110,7 @@ namespace Duality {
unsigned slvr_level = tree->slvr().get_scope_level();
if(slvr_level != stack.back().level)
throw "stacks out of sync!";
reporter->Depth(stack.size());
// res = tree->Solve(top, 1); // incremental solve, keep interpolants for one pop
check_result foo = tree->Check(top);
@ -1942,11 +2129,17 @@ namespace Duality {
#ifdef NO_GENERALIZE
node->Annotation.Formula = tree->RemoveRedundancy(node->Annotation.Formula).simplify();
#else
if(expansions.size() == 1 && NodeTooComplicated(node))
SimplifyNode(node);
else
node->Annotation.Formula = tree->RemoveRedundancy(node->Annotation.Formula).simplify();
Generalize(node);
try {
if(expansions.size() == 1 && NodeTooComplicated(node))
SimplifyNode(node);
else
node->Annotation.Formula = tree->RemoveRedundancy(node->Annotation.Formula).simplify();
Generalize(node);
}
catch(const RPFP::Bad &){
// bad interpolants can get us here
throw DoRestart();
}
#endif
if(RecordUpdate(node))
update_count++;
@ -1954,36 +2147,20 @@ namespace Duality {
heuristic->Update(node->map); // make it less likely to expand this node in future
}
if(update_count == 0){
if(was_sat)
throw Incompleteness();
if(was_sat){
update_failures++;
if(update_failures > 10)
throw Incompleteness();
}
reporter->Message("backtracked without learning");
}
else update_failures = 0;
}
tree->ComputeProofCore(); // need to compute the proof core before popping solver
bool propagated = false;
while(1) {
std::vector<Node *> &expansions = stack.back().expansions;
bool prev_level_used = LevelUsedInProof(stack.size()-2); // need to compute this before pop
tree->Pop(1);
hash_set<Node *> leaves_to_remove;
for(unsigned i = 0; i < expansions.size(); i++){
Node *node = expansions[i];
// if(node != top)
// tree->ConstrainParent(node->Incoming[0],node);
std::vector<Node *> &cs = node->Outgoing->Children;
for(unsigned i = 0; i < cs.size(); i++){
leaves_to_remove.insert(cs[i]);
UnmapNode(cs[i]);
if(std::find(updated_nodes.begin(),updated_nodes.end(),cs[i]) != updated_nodes.end())
throw "help!";
}
}
RemoveLeaves(leaves_to_remove); // have to do this before actually deleting the children
for(unsigned i = 0; i < expansions.size(); i++){
Node *node = expansions[i];
RemoveExpansion(node);
}
stack.pop_back();
PopLevel();
if(stack.size() == 1)break;
if(prev_level_used){
Node *node = stack.back().expansions[0];
@ -2011,7 +2188,7 @@ namespace Duality {
}
else {
was_sat = true;
tree->Push();
tree->Push();
std::vector<Node *> &expansions = stack.back().expansions;
#ifndef NO_DECISIONS
for(unsigned i = 0; i < expansions.size(); i++){
@ -2022,11 +2199,17 @@ namespace Duality {
if(tree->slvr().check() == unsat)
throw "help!";
#endif
stack.push_back(stack_entry());
stack.back().level = tree->slvr().get_scope_level();
if(ExpandSomeNodes(false,1)){
continue;
int expand_max = 1;
if(0&&duality->BatchExpand){
int thing = stack.size() * 0.1;
expand_max = std::max(1,thing);
if(expand_max > 1)
std::cout << "foo!\n";
}
if(ExpandSomeNodes(false,expand_max))
continue;
tree->Pop(1);
while(stack.size() > 1){
tree->Pop(1);
stack.pop_back();
@ -2036,6 +2219,30 @@ namespace Duality {
}
}
void PopLevel(){
std::vector<Node *> &expansions = stack.back().expansions;
tree->Pop(1);
hash_set<Node *> leaves_to_remove;
for(unsigned i = 0; i < expansions.size(); i++){
Node *node = expansions[i];
// if(node != top)
// tree->ConstrainParent(node->Incoming[0],node);
std::vector<Node *> &cs = node->Outgoing->Children;
for(unsigned i = 0; i < cs.size(); i++){
leaves_to_remove.insert(cs[i]);
UnmapNode(cs[i]);
if(std::find(updated_nodes.begin(),updated_nodes.end(),cs[i]) != updated_nodes.end())
throw "help!";
}
}
RemoveLeaves(leaves_to_remove); // have to do this before actually deleting the children
for(unsigned i = 0; i < expansions.size(); i++){
Node *node = expansions[i];
RemoveExpansion(node);
}
stack.pop_back();
}
bool NodeTooComplicated(Node *node){
int ops = tree->CountOperators(node->Annotation.Formula);
if(ops > 10) return true;
@ -2047,7 +2254,13 @@ namespace Duality {
// have to destroy the old proof to get a new interpolant
timer_start("SimplifyNode");
tree->PopPush();
tree->InterpolateByCases(top,node);
try {
tree->InterpolateByCases(top,node);
}
catch(const RPFP::Bad&){
timer_stop("SimplifyNode");
throw RPFP::Bad();
}
timer_stop("SimplifyNode");
}
@ -2085,6 +2298,8 @@ namespace Duality {
std::list<Node *> updated_nodes;
virtual void ExpandNode(RPFP::Node *p){
stack.push_back(stack_entry());
stack.back().level = tree->slvr().get_scope_level();
stack.back().expansions.push_back(p);
DerivationTree::ExpandNode(p);
std::vector<Node *> &new_nodes = p->Outgoing->Children;
@ -2442,7 +2657,7 @@ namespace Duality {
}
bool ContainsCex(Node *node, Counterexample &cex){
expr val = cex.tree->Eval(cex.root->Outgoing,node->Annotation.Formula);
expr val = cex.get_tree()->Eval(cex.get_root()->Outgoing,node->Annotation.Formula);
return eq(val,parent->ctx.bool_val(true));
}
@ -2461,15 +2676,15 @@ namespace Duality {
Node *other = insts[i];
if(CouldCover(node,other)){
reporter()->Forcing(node,other);
if(cex.tree && !ContainsCex(other,cex))
if(cex.get_tree() && !ContainsCex(other,cex))
continue;
if(cex.tree) {delete cex.tree; cex.tree = 0;}
cex.clear();
if(parent->ProveConjecture(node,other->Annotation,other,&cex))
if(CloseDescendants(node))
return true;
}
}
if(cex.tree) {delete cex.tree; cex.tree = 0;}
cex.clear();
return false;
}
#else
@ -2568,13 +2783,12 @@ namespace Duality {
Counterexample old_cex;
public:
ReplayHeuristic(RPFP *_rpfp, Counterexample &_old_cex)
: Heuristic(_rpfp), old_cex(_old_cex)
: Heuristic(_rpfp)
{
old_cex.swap(_old_cex); // take ownership from caller
}
~ReplayHeuristic(){
if(old_cex.tree)
delete old_cex.tree;
}
// Maps nodes of derivation tree into old cex
@ -2582,9 +2796,7 @@ namespace Duality {
void Done() {
cex_map.clear();
if(old_cex.tree)
delete old_cex.tree;
old_cex.tree = 0; // only replay once!
old_cex.clear();
}
void ShowNodeAndChildren(Node *n){
@ -2606,7 +2818,7 @@ namespace Duality {
}
virtual void ChooseExpand(const std::set<RPFP::Node *> &choices, std::set<RPFP::Node *> &best, bool high_priority, bool best_only){
if(!high_priority || !old_cex.tree){
if(!high_priority || !old_cex.get_tree()){
Heuristic::ChooseExpand(choices,best,false);
return;
}
@ -2615,7 +2827,7 @@ namespace Duality {
for(std::set<Node *>::iterator it = choices.begin(), en = choices.end(); it != en; ++it){
Node *node = (*it);
if(cex_map.empty())
cex_map[node] = old_cex.root; // match the root nodes
cex_map[node] = old_cex.get_root(); // match the root nodes
if(cex_map.find(node) == cex_map.end()){ // try to match an unmatched node
Node *parent = node->Incoming[0]->Parent; // assumes we are a tree!
if(cex_map.find(parent) == cex_map.end())
@ -2641,7 +2853,7 @@ namespace Duality {
Node *old_node = cex_map[node];
if(!old_node)
unmatched.insert(node);
else if(old_cex.tree->Empty(old_node))
else if(old_cex.get_tree()->Empty(old_node))
unmatched.insert(node);
else
matched.insert(node);
@ -2715,7 +2927,120 @@ namespace Duality {
}
};
/** This proposer class generates conjectures based on the
unwinding generated by a previous solver. The assumption is
that the provious solver was working on a different
abstraction of the same system. The trick is to adapt the
annotations in the old unwinding to the new predicates. We
start by generating a map from predicates and parameters in
the old problem to the new.
HACK: mapping is done by cheesy name comparison.
*/
class HistoryProposer : public Proposer
{
Duality *old_solver;
Duality *new_solver;
hash_map<Node *, std::vector<RPFP::Transformer> > conjectures;
public:
/** Construct a history solver. */
HistoryProposer(Duality *_old_solver, Duality *_new_solver)
: old_solver(_old_solver), new_solver(_new_solver) {
// tricky: names in the axioms may have changed -- map them
hash_set<func_decl> &old_constants = old_solver->unwinding->ls->get_constants();
hash_set<func_decl> &new_constants = new_solver->rpfp->ls->get_constants();
hash_map<std::string,func_decl> cmap;
for(hash_set<func_decl>::iterator it = new_constants.begin(), en = new_constants.end(); it != en; ++it)
cmap[GetKey(*it)] = *it;
hash_map<func_decl,func_decl> bckg_map;
for(hash_set<func_decl>::iterator it = old_constants.begin(), en = old_constants.end(); it != en; ++it){
func_decl f = new_solver->ctx.translate(*it); // move to new context
if(cmap.find(GetKey(f)) != cmap.end())
bckg_map[f] = cmap[GetKey(f)];
// else
// std::cout << "constant not matched\n";
}
RPFP *old_unwinding = old_solver->unwinding;
hash_map<std::string, std::vector<Node *> > pred_match;
// index all the predicates in the old unwinding
for(unsigned i = 0; i < old_unwinding->nodes.size(); i++){
Node *node = old_unwinding->nodes[i];
std::string key = GetKey(node);
pred_match[key].push_back(node);
}
// match with predicates in the new RPFP
RPFP *rpfp = new_solver->rpfp;
for(unsigned i = 0; i < rpfp->nodes.size(); i++){
Node *node = rpfp->nodes[i];
std::string key = GetKey(node);
std::vector<Node *> &matches = pred_match[key];
for(unsigned j = 0; j < matches.size(); j++)
MatchNodes(node,matches[j],bckg_map);
}
}
virtual std::vector<RPFP::Transformer> &Propose(Node *node){
return conjectures[node->map];
}
virtual ~HistoryProposer(){
};
private:
void MatchNodes(Node *new_node, Node *old_node, hash_map<func_decl,func_decl> &bckg_map){
if(old_node->Annotation.IsFull())
return; // don't conjecture true!
hash_map<std::string, expr> var_match;
std::vector<expr> &new_params = new_node->Annotation.IndParams;
// Index the new parameters by their keys
for(unsigned i = 0; i < new_params.size(); i++)
var_match[GetKey(new_params[i])] = new_params[i];
RPFP::Transformer &old = old_node->Annotation;
std::vector<expr> from_params = old.IndParams;
for(unsigned j = 0; j < from_params.size(); j++)
from_params[j] = new_solver->ctx.translate(from_params[j]); // get in new context
std::vector<expr> to_params = from_params;
for(unsigned j = 0; j < to_params.size(); j++){
std::string key = GetKey(to_params[j]);
if(var_match.find(key) == var_match.end()){
// std::cout << "unmatched parameter!\n";
return;
}
to_params[j] = var_match[key];
}
expr fmla = new_solver->ctx.translate(old.Formula); // get in new context
fmla = new_solver->rpfp->SubstParams(old.IndParams,to_params,fmla); // substitute parameters
hash_map<ast,expr> memo;
fmla = new_solver->rpfp->SubstRec(memo,bckg_map,fmla); // substitute background constants
RPFP::Transformer new_annot = new_node->Annotation;
new_annot.Formula = fmla;
conjectures[new_node].push_back(new_annot);
}
// We match names by removing suffixes beginning with double at sign
std::string GetKey(Node *node){
return GetKey(node->Name);
}
std::string GetKey(const expr &var){
return GetKey(var.decl());
}
std::string GetKey(const func_decl &f){
std::string name = f.name().str();
int idx = name.find("@@");
if(idx >= 0)
name.erase(idx);
return name;
}
};
};
@ -2723,8 +3048,9 @@ namespace Duality {
std::ostream &s;
public:
StreamReporter(RPFP *_rpfp, std::ostream &_s)
: Reporter(_rpfp), s(_s) {event = 0;}
: Reporter(_rpfp), s(_s) {event = 0; depth = -1;}
int event;
int depth;
void ev(){
s << "[" << event++ << "]" ;
}
@ -2735,23 +3061,30 @@ namespace Duality {
s << " " << rps[i]->number;
s << std::endl;
}
virtual void Update(RPFP::Node *node, const RPFP::Transformer &update){
virtual void Update(RPFP::Node *node, const RPFP::Transformer &update, bool eager){
ev(); s << "update " << node->number << " " << node->Name.name() << ": ";
rpfp->Summarize(update.Formula);
std::cout << std::endl;
if(depth > 0) s << " (depth=" << depth << ")";
if(eager) s << " (eager)";
s << std::endl;
}
virtual void Bound(RPFP::Node *node){
ev(); s << "check " << node->number << std::endl;
}
virtual void Expand(RPFP::Edge *edge){
RPFP::Node *node = edge->Parent;
ev(); s << "expand " << node->map->number << " " << node->Name.name() << std::endl;
ev(); s << "expand " << node->map->number << " " << node->Name.name();
if(depth > 0) s << " (depth=" << depth << ")";
s << std::endl;
}
virtual void Depth(int d){
depth = d;
}
virtual void AddCover(RPFP::Node *covered, std::vector<RPFP::Node *> &covering){
ev(); s << "cover " << covered->Name.name() << ": " << covered->number << " by ";
for(unsigned i = 0; i < covering.size(); i++)
std::cout << covering[i]->number << " ";
std::cout << std::endl;
s << covering[i]->number << " ";
s << std::endl;
}
virtual void RemoveCover(RPFP::Node *covered, RPFP::Node *covering){
ev(); s << "uncover " << covered->Name.name() << ": " << covered->number << " by " << covering->number << std::endl;
@ -2762,7 +3095,7 @@ namespace Duality {
virtual void Conjecture(RPFP::Node *node, const RPFP::Transformer &t){
ev(); s << "conjecture " << node->number << " " << node->Name.name() << ": ";
rpfp->Summarize(t.Formula);
std::cout << std::endl;
s << std::endl;
}
virtual void Dominates(RPFP::Node *node, RPFP::Node *other){
ev(); s << "dominates " << node->Name.name() << ": " << node->number << " > " << other->number << std::endl;

View file

@ -18,7 +18,7 @@ Revision History:
--*/
#ifdef WIN32
#ifdef _WINDOWS
#pragma warning(disable:4996)
#pragma warning(disable:4800)
#pragma warning(disable:4267)
@ -37,16 +37,18 @@ Revision History:
namespace Duality {
solver::solver(Duality::context& c, bool extensional, bool models) : object(c), the_model(c) {
solver::solver(Duality::context& c, bool _extensional, bool models) : object(c), the_model(c) {
params_ref p;
p.set_bool("proof", true); // this is currently useless
if(models)
p.set_bool("model", true);
p.set_bool("unsat_core", true);
p.set_bool("mbqi",true);
bool mbqi = c.get_config().get().get_bool("mbqi",true);
p.set_bool("mbqi",mbqi); // just to test
p.set_str("mbqi.id","itp"); // use mbqi for quantifiers in interpolants
p.set_uint("mbqi.max_iterations",1); // use mbqi for quantifiers in interpolants
if(true || extensional)
extensional = mbqi && (true || _extensional);
if(extensional)
p.set_bool("array.extensional",true);
scoped_ptr<solver_factory> sf = mk_smt_solver_factory();
m_solver = (*sf)(m(), p, true, true, true, ::symbol::null);
@ -656,6 +658,18 @@ expr context::make_quant(decl_kind op, const std::vector<sort> &_sorts, const st
pp.display_smt2(std::cout, m_solver->get_assertion(n-1));
}
void solver::print(const char *filename) {
std::ofstream f(filename);
unsigned n = m_solver->get_num_assertions();
if(!n)
return;
ast_smt_pp pp(m());
for (unsigned i = 0; i < n-1; ++i)
pp.add_assumption(m_solver->get_assertion(i));
pp.display_smt2(f, m_solver->get_assertion(n-1));
}
void solver::show_assertion_ids() {
#if 0
unsigned n = m_solver->get_num_assertions();

View file

@ -182,6 +182,7 @@ namespace Duality {
void set(char const * param, char const * value) { m_config.set(param,value); }
void set(char const * param, bool value) { m_config.set(param,value); }
void set(char const * param, int value) { m_config.set(param,value); }
config &get_config() {return m_config;}
symbol str_symbol(char const * s);
symbol int_symbol(int n);
@ -243,6 +244,9 @@ namespace Duality {
sort_kind get_sort_kind(const sort &s);
expr translate(const expr &e);
func_decl translate(const func_decl &);
void print_expr(std::ostream &s, const ast &e);
fixedpoint mk_fixedpoint();
@ -818,6 +822,7 @@ namespace Duality {
model the_model;
bool canceled;
proof_gen_mode m_mode;
bool extensional;
public:
solver(context & c, bool extensional = false, bool models = true);
solver(context & c, ::solver *s):object(c),the_model(c) { m_solver = s; canceled = false;}
@ -921,6 +926,7 @@ namespace Duality {
unsigned get_scope_level(){ scoped_proof_mode spm(m(),m_mode); return m_solver->get_scope_level();}
void show();
void print(const char *filename);
void show_assertion_ids();
proof get_proof(){
@ -928,6 +934,7 @@ namespace Duality {
return proof(ctx(),m_solver->get_proof());
}
bool extensional_array_theory() {return extensional;}
};
#if 0
@ -1370,6 +1377,20 @@ namespace Duality {
return to_expr(a.raw());
}
inline expr context::translate(const expr &e) {
::expr *f = to_expr(e.raw());
if(&e.ctx().m() != &m()) // same ast manager -> no translation
throw "ast manager mismatch";
return cook(f);
}
inline func_decl context::translate(const func_decl &e) {
::func_decl *f = to_func_decl(e.raw());
if(&e.ctx().m() != &m()) // same ast manager -> no translation
throw "ast manager mismatch";
return func_decl(*this,f);
}
typedef double clock_t;
clock_t current_time();
inline void output_time(std::ostream &os, clock_t time){os << time;}
@ -1401,14 +1422,6 @@ namespace hash_space {
};
}
// to make Duality::ast hashable in windows
#ifdef _WINDOWS
template <> inline
size_t stdext::hash_value<Duality::ast >(const Duality::ast& s)
{
return s.raw()->get_id();
}
#endif
// to make Duality::ast usable in ordered collections
namespace std {
@ -1445,14 +1458,6 @@ namespace hash_space {
};
}
// to make Duality::func_decl hashable in windows
#ifdef _WINDOWS
template <> inline
size_t stdext::hash_value<Duality::func_decl >(const Duality::func_decl& s)
{
return s.raw()->get_id();
}
#endif
// to make Duality::func_decl usable in ordered collections
namespace std {