mirror of
				https://github.com/Z3Prover/z3
				synced 2025-10-31 19:52:29 +00:00 
			
		
		
		
	Merge branch 'unstable' of https://git01.codeplex.com/z3 into bvsls
This commit is contained in:
		
						commit
						466ac0237f
					
				
					 26 changed files with 1469 additions and 668 deletions
				
			
		|  | @ -639,7 +639,7 @@ def is_CXX_gpp(): | |||
|     return is_compiler(CXX, 'g++') | ||||
| 
 | ||||
| def is_clang_in_gpp_form(cc): | ||||
|     version_string = subprocess.check_output([cc, '--version']) | ||||
|     version_string = check_output([cc, '--version']) | ||||
|     return str(version_string).find('clang') != -1 | ||||
| 
 | ||||
| def is_CXX_clangpp(): | ||||
|  |  | |||
|  | @ -39,11 +39,8 @@ Revision History: | |||
| #include"iz3pp.h" | ||||
| #include"iz3checker.h" | ||||
| 
 | ||||
| #ifndef WIN32 | ||||
| using namespace stl_ext; | ||||
| #endif | ||||
| 
 | ||||
| #ifndef WIN32 | ||||
| // WARNING: don't make a hash_map with this if the range type
 | ||||
| // has a destructor: you'll get an address dependency!!!
 | ||||
| namespace stl_ext { | ||||
|  | @ -55,7 +52,6 @@ namespace stl_ext { | |||
|     } | ||||
|   }; | ||||
| } | ||||
| #endif | ||||
| 
 | ||||
| typedef interpolation_options_struct *Z3_interpolation_options; | ||||
| 
 | ||||
|  |  | |||
|  | @ -25,12 +25,12 @@ Revision History: | |||
| #include <map> | ||||
| 
 | ||||
| // make hash_map and hash_set available
 | ||||
| #ifndef _WINDOWS | ||||
| using namespace stl_ext; | ||||
| #endif | ||||
| 
 | ||||
| namespace Duality { | ||||
| 
 | ||||
|   class implicant_solver; | ||||
| 
 | ||||
|   /* Generic operations on Z3 formulas */ | ||||
| 
 | ||||
|   struct Z3User { | ||||
|  | @ -82,6 +82,8 @@ namespace Duality { | |||
| 
 | ||||
|       Term SubstAtom(hash_map<ast, Term> &memo, const expr &t, const expr &atom, const expr &val); | ||||
| 
 | ||||
|       Term CloneQuantAndSimp(const expr &t, const expr &body); | ||||
| 
 | ||||
|       Term RemoveRedundancy(const Term &t); | ||||
| 
 | ||||
|       Term IneqToEq(const Term &t); | ||||
|  | @ -102,6 +104,9 @@ namespace Duality { | |||
| 
 | ||||
|       FuncDecl RenumberPred(const FuncDecl &f, int n); | ||||
| 
 | ||||
|     Term ExtractStores(hash_map<ast, Term> &memo, const Term &t, std::vector<expr> &cnstrs, hash_map<ast,expr> &renaming); | ||||
| 
 | ||||
| 
 | ||||
| protected: | ||||
| 
 | ||||
|       void SummarizeRec(hash_set<ast> &memo, std::vector<expr> &lits, int &ops, const Term &t); | ||||
|  | @ -197,6 +202,9 @@ protected: | |||
| 	   /** Is this a background constant? */ | ||||
| 	   virtual bool is_constant(const func_decl &f) = 0; | ||||
| 
 | ||||
| 	   /** Get the constants in the background vocabulary */ | ||||
| 	   virtual hash_set<func_decl> &get_constants() = 0; | ||||
| 
 | ||||
|            /** Assert a background axiom. */ | ||||
|            virtual void assert_axiom(const expr &axiom) = 0; | ||||
| 
 | ||||
|  | @ -290,6 +298,11 @@ protected: | |||
| 	  return bckg.find(f) != bckg.end(); | ||||
| 	} | ||||
| 
 | ||||
| 	/** Get the constants in the background vocabulary */ | ||||
| 	virtual hash_set<func_decl> &get_constants(){ | ||||
| 	  return bckg; | ||||
| 	} | ||||
| 
 | ||||
|         ~iZ3LogicSolver(){ | ||||
|           // delete ictx;
 | ||||
|           delete islvr; | ||||
|  | @ -600,6 +613,8 @@ protected: | |||
|       void FixCurrentState(Edge *root); | ||||
|      | ||||
|       void FixCurrentStateFull(Edge *edge, const expr &extra); | ||||
|        | ||||
|       void FixCurrentStateFull(Edge *edge, const std::vector<expr> &assumps, const hash_map<ast,expr> &renaming); | ||||
| 
 | ||||
|       /** Declare a constant in the background theory. */ | ||||
| 
 | ||||
|  | @ -731,6 +746,10 @@ protected: | |||
|       struct bad_format { | ||||
|       }; | ||||
| 
 | ||||
|       // thrown on internal error
 | ||||
|       struct Bad { | ||||
|       }; | ||||
|        | ||||
|       /** Pop a scope (see Push). Note, you cannot pop axioms. */ | ||||
|        | ||||
|       void Pop(int num_scopes); | ||||
|  | @ -942,10 +961,12 @@ protected: | |||
|       Term UnderapproxFormula(const Term &f, hash_set<ast> &dont_cares); | ||||
| 
 | ||||
|       void ImplicantFullRed(hash_map<ast,int> &memo, const Term &f, std::vector<Term> &lits, | ||||
| 			    hash_set<ast> &done, hash_set<ast> &dont_cares); | ||||
| 			    hash_set<ast> &done, hash_set<ast> &dont_cares, bool extensional = true); | ||||
| 
 | ||||
|       Term UnderapproxFullFormula(const Term &f, hash_set<ast> &dont_cares); | ||||
|     public: | ||||
|       Term UnderapproxFullFormula(const Term &f, bool extensional = true); | ||||
| 
 | ||||
|     protected: | ||||
|       Term ToRuleRec(Edge *e,  hash_map<ast,Term> &memo, const Term &t, std::vector<expr> &quants); | ||||
| 
 | ||||
|       hash_map<ast,Term> resolve_ite_memo; | ||||
|  | @ -986,6 +1007,8 @@ protected: | |||
| 
 | ||||
|       void AddEdgeToSolver(Edge *edge); | ||||
| 
 | ||||
|       void AddEdgeToSolver(implicant_solver &aux_solver, Edge *edge); | ||||
| 
 | ||||
|       void AddToProofCore(hash_set<ast> &core); | ||||
| 
 | ||||
|       void GetGroundLitsUnderQuants(hash_set<ast> *memo, const Term &f, std::vector<Term> &res, int under); | ||||
|  | @ -1051,13 +1074,40 @@ protected: | |||
|        | ||||
|     public: | ||||
|        | ||||
|       struct Counterexample { | ||||
|       class Counterexample { | ||||
|       private: | ||||
| 	RPFP *tree; | ||||
| 	RPFP::Node *root; | ||||
|       public: | ||||
| 	Counterexample(){ | ||||
| 	  tree = 0; | ||||
| 	  root = 0; | ||||
| 	} | ||||
| 	Counterexample(RPFP *_tree, RPFP::Node *_root){ | ||||
| 	  tree = _tree; | ||||
| 	  root = _root; | ||||
| 	} | ||||
| 	~Counterexample(){ | ||||
| 	  if(tree) delete tree; | ||||
| 	} | ||||
| 	void swap(Counterexample &other){ | ||||
| 	  std::swap(tree,other.tree); | ||||
| 	  std::swap(root,other.root); | ||||
| 	} | ||||
| 	void set(RPFP *_tree, RPFP::Node *_root){ | ||||
| 	  if(tree) delete tree; | ||||
| 	  tree = _tree; | ||||
| 	  root = _root; | ||||
| 	} | ||||
| 	void clear(){ | ||||
| 	  if(tree) delete tree; | ||||
| 	  tree = 0; | ||||
| 	} | ||||
| 	RPFP *get_tree() const {return tree;} | ||||
| 	RPFP::Node *get_root() const {return root;} | ||||
|       private: | ||||
| 	Counterexample &operator=(const Counterexample &); | ||||
| 	Counterexample(const Counterexample &); | ||||
|       }; | ||||
|        | ||||
|       /** Solve the problem. You can optionally give an old
 | ||||
|  | @ -1067,7 +1117,7 @@ protected: | |||
|        | ||||
|       virtual bool Solve() = 0; | ||||
|        | ||||
|       virtual Counterexample GetCounterexample() = 0; | ||||
|       virtual Counterexample &GetCounterexample() = 0; | ||||
|        | ||||
|       virtual bool SetOption(const std::string &option, const std::string &value) = 0; | ||||
|        | ||||
|  | @ -1075,7 +1125,7 @@ protected: | |||
| 	  is chiefly useful for abstraction refinement, when we want to | ||||
| 	  solve a series of similar problems. */ | ||||
| 
 | ||||
|       virtual void LearnFrom(Counterexample &old_cex) = 0; | ||||
|       virtual void LearnFrom(Solver *old_solver) = 0; | ||||
| 
 | ||||
|       virtual ~Solver(){} | ||||
| 
 | ||||
|  |  | |||
|  | @ -1,169 +0,0 @@ | |||
| /*++
 | ||||
| Copyright (c) 2011 Microsoft Corporation | ||||
| 
 | ||||
| Module Name: | ||||
| 
 | ||||
|     iz3hash.h | ||||
| 
 | ||||
| Abstract: | ||||
| 
 | ||||
|    Wrapper for stl hash tables | ||||
| 
 | ||||
| Author: | ||||
| 
 | ||||
|     Ken McMillan (kenmcmil) | ||||
| 
 | ||||
| Revision History: | ||||
| 
 | ||||
| --*/ | ||||
| 
 | ||||
| // pull in the headers for has_map and hash_set
 | ||||
| // these live in non-standard places
 | ||||
| 
 | ||||
| #ifndef IZ3_HASH_H | ||||
| #define IZ3_HASH_H | ||||
| 
 | ||||
| //#define USE_UNORDERED_MAP
 | ||||
| #ifdef USE_UNORDERED_MAP | ||||
| 
 | ||||
| #define stl_ext std | ||||
| #define hash_space std | ||||
| #include <unordered_map> | ||||
| #include <unordered_set> | ||||
| #define hash_map unordered_map | ||||
| #define hash_set unordered_set | ||||
| 
 | ||||
| #else | ||||
| 
 | ||||
| #if __GNUC__ >= 3 | ||||
| #undef __DEPRECATED | ||||
| #define stl_ext __gnu_cxx | ||||
| #define hash_space stl_ext | ||||
| #include <ext/hash_map> | ||||
| #include <ext/hash_set> | ||||
| #else | ||||
| #ifdef WIN32 | ||||
| #define stl_ext stdext | ||||
| #define hash_space std | ||||
| #include <hash_map> | ||||
| #include <hash_set> | ||||
| #else | ||||
| #define stl_ext std | ||||
| #define hash_space std | ||||
| #include <hash_map> | ||||
| #include <hash_set> | ||||
| #endif | ||||
| #endif | ||||
| 
 | ||||
| #endif | ||||
| 
 | ||||
| #include <string> | ||||
| 
 | ||||
| // stupid STL doesn't include hash function for class string
 | ||||
| 
 | ||||
| #ifndef WIN32  | ||||
| 
 | ||||
| namespace stl_ext { | ||||
|   template <> | ||||
|     class hash<std::string> { | ||||
|     stl_ext::hash<char *> H; | ||||
|   public: | ||||
|     size_t operator()(const std::string &s) const { | ||||
|       return H(s.c_str()); | ||||
|     } | ||||
|   }; | ||||
| } | ||||
| 
 | ||||
| #endif | ||||
| 
 | ||||
| namespace hash_space { | ||||
|   template <> | ||||
|     class hash<std::pair<int,int> > { | ||||
|   public: | ||||
|     size_t operator()(const std::pair<int,int> &p) const { | ||||
|       return p.first + p.second; | ||||
|     } | ||||
|   }; | ||||
| } | ||||
| 
 | ||||
| #ifdef WIN32  | ||||
| template <> inline | ||||
| size_t stdext::hash_value<std::pair<int,int> >(const std::pair<int,int>& p) | ||||
| {	// hash _Keyval to size_t value one-to-one
 | ||||
| 	return p.first + p.second; | ||||
| } | ||||
| #endif | ||||
| 
 | ||||
| namespace hash_space { | ||||
|   template <class T> | ||||
|     class hash<std::pair<T *, T *> > { | ||||
|   public: | ||||
|     size_t operator()(const std::pair<T *,T *> &p) const { | ||||
|       return (size_t)p.first + (size_t)p.second; | ||||
|     } | ||||
|   }; | ||||
| } | ||||
| 
 | ||||
| #if 0 | ||||
| template <class T> inline | ||||
| size_t stdext::hash_value<std::pair<T *, T *> >(const std::pair<T *, T *>& p) | ||||
| {	// hash _Keyval to size_t value one-to-one
 | ||||
|   return (size_t)p.first + (size_t)p.second; | ||||
| } | ||||
| #endif | ||||
| 
 | ||||
| #ifdef WIN32 | ||||
| 
 | ||||
| namespace std { | ||||
|     template <> | ||||
| 	   class less<std::pair<int,int> > { | ||||
| 	   public: | ||||
| 		   bool operator()(const pair<int,int> &x, const pair<int,int> &y) const { | ||||
| 		      return x.first < y.first || x.first == y.first && x.second < y.second; | ||||
| 		   } | ||||
| 	   }; | ||||
| 	 | ||||
| } | ||||
|    | ||||
| namespace std { | ||||
|     template <class T> | ||||
| 	   class less<std::pair<T *,T *> > { | ||||
| 	   public: | ||||
| 		   bool operator()(const pair<T *,T *> &x, const pair<T *,T *> &y) const { | ||||
| 		     return (size_t)x.first < (size_t)y.first || (size_t)x.first == (size_t)y.first && (size_t)x.second < (size_t)y.second; | ||||
| 		   } | ||||
| 	   }; | ||||
| 	 | ||||
| } | ||||
| 
 | ||||
| #endif | ||||
| 
 | ||||
| 
 | ||||
| #ifndef WIN32 | ||||
| 
 | ||||
| namespace stl_ext { | ||||
|   template <class T> | ||||
|     class hash<T *> { | ||||
|   public: | ||||
|     size_t operator()(const T *p) const { | ||||
|       return (size_t) p; | ||||
|     } | ||||
|   }; | ||||
| } | ||||
| 
 | ||||
| #endif | ||||
| 
 | ||||
| #ifdef WIN32 | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| template <class K, class T> | ||||
| class hash_map : public stl_ext::hash_map<K,T,stl_ext::hash_compare<K,std::less<K> > > {}; | ||||
| 
 | ||||
| template <class K> | ||||
| class hash_set : public stl_ext::hash_set<K,stl_ext::hash_compare<K,std::less<K> > > {}; | ||||
| 
 | ||||
| #endif | ||||
| 
 | ||||
| #endif | ||||
|  | @ -25,7 +25,7 @@ Revision History: | |||
| #include <string.h> | ||||
| #include <stdlib.h> | ||||
| 
 | ||||
| #ifdef WIN32 | ||||
| #ifdef _WINDOWS | ||||
| #pragma warning(disable:4996) | ||||
| #pragma warning(disable:4800) | ||||
| #pragma warning(disable:4267) | ||||
|  |  | |||
|  | @ -21,7 +21,7 @@ Revision History: | |||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| #ifdef WIN32 | ||||
| #ifdef _WINDOWS | ||||
| #pragma warning(disable:4996) | ||||
| #pragma warning(disable:4800) | ||||
| #pragma warning(disable:4267) | ||||
|  | @ -36,10 +36,6 @@ Revision History: | |||
| #include "duality.h" | ||||
| #include "duality_profiling.h" | ||||
| 
 | ||||
| #ifndef WIN32 | ||||
| // #define Z3OPS
 | ||||
| #endif | ||||
| 
 | ||||
| // TODO: do we need these?
 | ||||
| #ifdef Z3OPS | ||||
| 
 | ||||
|  | @ -150,8 +146,10 @@ namespace Duality { | |||
|       } | ||||
|       return 0; | ||||
|     } | ||||
|     if(t.is_quantifier()) | ||||
|       return CountOperatorsRec(memo,t.body())+2; // count 2 for a quantifier
 | ||||
|     if(t.is_quantifier()){ | ||||
|       int nbv = t.get_quantifier_num_bound(); | ||||
|       return CountOperatorsRec(memo,t.body()) + 2 * nbv; // count 2 for each quantifier
 | ||||
|     } | ||||
|     return 0; | ||||
|   } | ||||
| 
 | ||||
|  | @ -410,6 +408,33 @@ namespace Duality { | |||
|     return res; | ||||
|   } | ||||
| 
 | ||||
|   Z3User::Term Z3User::ExtractStores(hash_map<ast, Term> &memo, const Term &t, std::vector<expr> &cnstrs, hash_map<ast,expr> &renaming) | ||||
|   { | ||||
|     std::pair<ast,Term> foo(t,expr(ctx)); | ||||
|     std::pair<hash_map<ast,Term>::iterator, bool> bar = memo.insert(foo); | ||||
|     Term &res = bar.first->second; | ||||
|     if(!bar.second) return res; | ||||
|     if (t.is_app()) { | ||||
|       func_decl f = t.decl(); | ||||
|       std::vector<Term> args; | ||||
|       int nargs = t.num_args(); | ||||
|       for(int i = 0; i < nargs; i++) | ||||
| 	args.push_back(ExtractStores(memo, t.arg(i),cnstrs,renaming)); | ||||
|       res = f(args.size(),&args[0]); | ||||
|       if(f.get_decl_kind() == Store){ | ||||
| 	func_decl fresh = ctx.fresh_func_decl("@arr", res.get_sort()); | ||||
| 	expr y = fresh(); | ||||
| 	expr equ = ctx.make(Equal,y,res); | ||||
| 	cnstrs.push_back(equ); | ||||
| 	renaming[y] = res; | ||||
| 	res = y; | ||||
|       } | ||||
|     } | ||||
|     else res = t; | ||||
|     return res; | ||||
|   } | ||||
| 
 | ||||
| 
 | ||||
|   bool Z3User::IsLiteral(const expr &lit, expr &atom, expr &val){ | ||||
|     if(!(lit.is_quantifier() && IsClosedFormula(lit))){ | ||||
|       if(!lit.is_app()) | ||||
|  | @ -523,6 +548,25 @@ namespace Duality { | |||
|       return foo; | ||||
|   } | ||||
| 
 | ||||
|   Z3User::Term Z3User::CloneQuantAndSimp(const expr &t, const expr &body){ | ||||
|     if(t.is_quantifier_forall() && body.is_app() && body.decl().get_decl_kind() == And){ | ||||
|       int nargs = body.num_args(); | ||||
|       std::vector<expr> args(nargs); | ||||
|       for(int i = 0; i < nargs; i++) | ||||
| 	args[i] = CloneQuantAndSimp(t, body.arg(i)); | ||||
|       return ctx.make(And,args); | ||||
|     } | ||||
|     if(!t.is_quantifier_forall() && body.is_app() && body.decl().get_decl_kind() == Or){ | ||||
|       int nargs = body.num_args(); | ||||
|       std::vector<expr> args(nargs); | ||||
|       for(int i = 0; i < nargs; i++) | ||||
| 	args[i] = CloneQuantAndSimp(t, body.arg(i)); | ||||
|       return ctx.make(Or,args); | ||||
|     } | ||||
|     return clone_quantifier(t,body); | ||||
|   } | ||||
| 
 | ||||
| 
 | ||||
|   Z3User::Term Z3User::SubstAtom(hash_map<ast, Term> &memo, const expr &t, const expr &atom, const expr &val){ | ||||
|     std::pair<ast,Term> foo(t,expr(ctx)); | ||||
|     std::pair<hash_map<ast,Term>::iterator, bool> bar = memo.insert(foo); | ||||
|  | @ -1832,7 +1876,7 @@ namespace Duality { | |||
|   } | ||||
| 
 | ||||
|   void RPFP::ImplicantFullRed(hash_map<ast,int> &memo, const Term &f, std::vector<Term> &lits, | ||||
| 			      hash_set<ast> &done, hash_set<ast> &dont_cares){ | ||||
| 			      hash_set<ast> &done, hash_set<ast> &dont_cares, bool extensional){ | ||||
|     if(done.find(f) != done.end()) | ||||
|       return; /* already processed */ | ||||
|     if(f.is_app()){ | ||||
|  | @ -1840,7 +1884,7 @@ namespace Duality { | |||
|       decl_kind k = f.decl().get_decl_kind(); | ||||
|       if(k == Implies || k == Iff || k == And || k == Or || k == Not){ | ||||
| 	for(int i = 0; i < nargs; i++) | ||||
| 	  ImplicantFullRed(memo,f.arg(i),lits,done,dont_cares); | ||||
| 	  ImplicantFullRed(memo,f.arg(i),lits,done,dont_cares, extensional); | ||||
| 	goto done; | ||||
|       } | ||||
|     } | ||||
|  | @ -1848,6 +1892,15 @@ namespace Duality { | |||
|       if(dont_cares.find(f) == dont_cares.end()){ | ||||
| 	int b = SubtermTruth(memo,f); | ||||
| 	if(b != 0 && b != 1) goto done; | ||||
| 	if(f.is_app() && f.decl().get_decl_kind() == Equal && f.arg(0).is_array()){ | ||||
| 	  if(b == 1 && !extensional){ | ||||
| 	    expr x = dualModel.eval(f.arg(0)); expr y = dualModel.eval(f.arg(1)); | ||||
| 	    if(!eq(x,y)) | ||||
| 	      b = 0; | ||||
| 	  } | ||||
| 	  if(b == 0) | ||||
| 	    goto done; | ||||
| 	} | ||||
| 	expr bv = (b==1) ? f : !f; | ||||
| 	lits.push_back(bv); | ||||
|       } | ||||
|  | @ -1969,12 +2022,16 @@ namespace Duality { | |||
|     return conjoin(lits); | ||||
|   } | ||||
| 
 | ||||
|   RPFP::Term RPFP::UnderapproxFullFormula(const Term &f, hash_set<ast> &dont_cares){ | ||||
|   RPFP::Term RPFP::UnderapproxFullFormula(const Term &f, bool extensional){ | ||||
|     hash_set<ast> dont_cares; | ||||
|     resolve_ite_memo.clear(); | ||||
|     timer_start("UnderapproxFormula"); | ||||
|     /* first compute truth values of subterms */ | ||||
|     hash_map<ast,int> memo; | ||||
|     hash_set<ast> done; | ||||
|     std::vector<Term> lits; | ||||
|     ImplicantFullRed(memo,f,lits,done,dont_cares); | ||||
|     ImplicantFullRed(memo,f,lits,done,dont_cares, extensional); | ||||
|     timer_stop("UnderapproxFormula"); | ||||
|     /* return conjunction of literals */ | ||||
|     return conjoin(lits); | ||||
|   } | ||||
|  | @ -2519,22 +2576,6 @@ namespace Duality { | |||
|     ConstrainEdgeLocalized(edge,eu); | ||||
|   } | ||||
| 
 | ||||
|   void RPFP::FixCurrentStateFull(Edge *edge, const expr &extra){ | ||||
|     hash_set<ast> dont_cares; | ||||
|     resolve_ite_memo.clear(); | ||||
|     timer_start("UnderapproxFormula"); | ||||
|     Term dual = edge->dual.null() ? ctx.bool_val(true) : edge->dual; | ||||
|     for(unsigned i = 0; i < edge->constraints.size(); i++) | ||||
|       dual = dual && edge->constraints[i]; | ||||
|     // dual = dual && extra;
 | ||||
|     Term eu = UnderapproxFullFormula(dual,dont_cares); | ||||
|     timer_stop("UnderapproxFormula"); | ||||
|     ConstrainEdgeLocalized(edge,eu); | ||||
|   } | ||||
| 
 | ||||
|    | ||||
|    | ||||
| 
 | ||||
|   void RPFP::GetGroundLitsUnderQuants(hash_set<ast> *memo, const Term &f, std::vector<Term> &res, int under){ | ||||
|     if(memo[under].find(f) != memo[under].end()) | ||||
|       return; | ||||
|  | @ -2817,7 +2858,91 @@ namespace Duality { | |||
|     return ctx.make(And,lits); | ||||
|   } | ||||
| 
 | ||||
|     // set up edge constraint in aux solver
 | ||||
| 
 | ||||
|   /* This is a wrapper for a solver that is intended to compute
 | ||||
|      implicants from models. It works around a problem in Z3 with | ||||
|      models in the non-extensional array theory. It does this by | ||||
|      naming all of the store terms in a formula. That is, (store ...) | ||||
|      is replaced by "name" with an added constraint name = (store | ||||
|      ...). This allows us to determine from the model whether an array | ||||
|      equality is true or false (it is false if the two sides are | ||||
|      mapped to different function symbols, even if they have the same | ||||
|      contents). | ||||
|    */ | ||||
| 
 | ||||
|   struct implicant_solver { | ||||
|     RPFP *owner; | ||||
|     solver &aux_solver; | ||||
|     std::vector<expr> assumps, namings; | ||||
|     std::vector<int> assump_stack, naming_stack; | ||||
|     hash_map<ast,expr> renaming, renaming_memo; | ||||
|      | ||||
|     void add(const expr &e){ | ||||
|       expr t = e; | ||||
|       if(!aux_solver.extensional_array_theory()){ | ||||
| 	unsigned i = namings.size(); | ||||
| 	t = owner->ExtractStores(renaming_memo,t,namings,renaming); | ||||
| 	for(; i < namings.size(); i++) | ||||
| 	  aux_solver.add(namings[i]); | ||||
|       } | ||||
|       assumps.push_back(t); | ||||
|       aux_solver.add(t); | ||||
|     } | ||||
| 
 | ||||
|     void push() { | ||||
|       assump_stack.push_back(assumps.size()); | ||||
|       naming_stack.push_back(namings.size()); | ||||
|       aux_solver.push(); | ||||
|     } | ||||
| 
 | ||||
|     // When we pop the solver, we have to re-add any namings that were lost
 | ||||
| 
 | ||||
|     void pop(int n) { | ||||
|       aux_solver.pop(n); | ||||
|       int new_assumps = assump_stack[assump_stack.size()-n]; | ||||
|       int new_namings = naming_stack[naming_stack.size()-n]; | ||||
|       for(unsigned i = new_namings; i < namings.size(); i++) | ||||
| 	aux_solver.add(namings[i]); | ||||
|       assumps.resize(new_assumps); | ||||
|       namings.resize(new_namings); | ||||
|       assump_stack.resize(assump_stack.size()-1); | ||||
|       naming_stack.resize(naming_stack.size()-1); | ||||
|     } | ||||
| 
 | ||||
|     check_result check() { | ||||
|       return aux_solver.check(); | ||||
|     } | ||||
| 
 | ||||
|     model get_model() { | ||||
|       return aux_solver.get_model(); | ||||
|     } | ||||
|      | ||||
|     expr get_implicant() { | ||||
|       owner->dualModel = aux_solver.get_model(); | ||||
|       expr dual = owner->ctx.make(And,assumps); | ||||
|       bool ext = aux_solver.extensional_array_theory(); | ||||
|       expr eu = owner->UnderapproxFullFormula(dual,ext); | ||||
|       // if we renamed store terms, we have to undo
 | ||||
|       if(!ext) | ||||
| 	eu = owner->SubstRec(renaming,eu); | ||||
|       return eu; | ||||
|     } | ||||
|      | ||||
|     implicant_solver(RPFP *_owner, solver &_aux_solver)  | ||||
|       : owner(_owner), aux_solver(_aux_solver) | ||||
|       {} | ||||
|   }; | ||||
| 
 | ||||
|   // set up edge constraint in aux solver
 | ||||
|   void RPFP::AddEdgeToSolver(implicant_solver &aux_solver, Edge *edge){ | ||||
|     if(!edge->dual.null()) | ||||
|       aux_solver.add(edge->dual); | ||||
|     for(unsigned i = 0; i < edge->constraints.size(); i++){ | ||||
|       expr tl = edge->constraints[i]; | ||||
|       aux_solver.add(tl); | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|   void RPFP::AddEdgeToSolver(Edge *edge){ | ||||
|     if(!edge->dual.null()) | ||||
|       aux_solver.add(edge->dual); | ||||
|  | @ -2827,57 +2952,132 @@ namespace Duality { | |||
|     } | ||||
|   } | ||||
| 
 | ||||
|   static int by_case_counter = 0; | ||||
| 
 | ||||
|   void RPFP::InterpolateByCases(Node *root, Node *node){ | ||||
|     timer_start("InterpolateByCases"); | ||||
|     bool axioms_added = false; | ||||
|     aux_solver.push(); | ||||
|     AddEdgeToSolver(node->Outgoing); | ||||
|     hash_set<ast> axioms_needed; | ||||
|     const std::vector<expr> &theory = ls->get_axioms(); | ||||
|     for(unsigned i = 0; i < theory.size(); i++) | ||||
|       axioms_needed.insert(theory[i]); | ||||
|     implicant_solver is(this,aux_solver); | ||||
|     is.push(); | ||||
|     AddEdgeToSolver(is,node->Outgoing); | ||||
|     node->Annotation.SetEmpty(); | ||||
|     hash_set<ast> *core = new hash_set<ast>; | ||||
|     core->insert(node->Outgoing->dual); | ||||
|     while(1){ | ||||
|       aux_solver.push(); | ||||
|       by_case_counter++; | ||||
|       is.push(); | ||||
|       expr annot = !GetAnnotation(node); | ||||
|       aux_solver.add(annot); | ||||
|       if(aux_solver.check() == unsat){ | ||||
| 	aux_solver.pop(1); | ||||
|       is.add(annot); | ||||
|       if(is.check() == unsat){ | ||||
| 	is.pop(1); | ||||
| 	break; | ||||
|       } | ||||
|       dualModel = aux_solver.get_model(); | ||||
|       aux_solver.pop(1); | ||||
|       is.pop(1); | ||||
|       Push(); | ||||
|       FixCurrentStateFull(node->Outgoing,annot); | ||||
|        ConstrainEdgeLocalized(node->Outgoing,!GetAnnotation(node)); | ||||
|       ConstrainEdgeLocalized(node->Outgoing,is.get_implicant()); | ||||
|       ConstrainEdgeLocalized(node->Outgoing,!GetAnnotation(node)); //TODO: need this?
 | ||||
|       check_result foo = Check(root); | ||||
|       if(foo != unsat) | ||||
|       if(foo != unsat){ | ||||
| 	slvr().print("should_be_unsat.smt2"); | ||||
| 	throw "should be unsat"; | ||||
|       AddToProofCore(*core); | ||||
|       } | ||||
|       std::vector<expr> assumps, axioms_to_add; | ||||
|       slvr().get_proof().get_assumptions(assumps); | ||||
|       for(unsigned i = 0; i < assumps.size(); i++){ | ||||
| 	(*core).insert(assumps[i]); | ||||
| 	if(axioms_needed.find(assumps[i]) != axioms_needed.end()){ | ||||
| 	  axioms_to_add.push_back(assumps[i]); | ||||
| 	  axioms_needed.erase(assumps[i]); | ||||
| 	} | ||||
|       } | ||||
|       // AddToProofCore(*core);
 | ||||
|       Transformer old_annot = node->Annotation; | ||||
|       SolveSingleNode(root,node); | ||||
| 
 | ||||
|       { | ||||
| 	expr itp = GetAnnotation(node); | ||||
| 	dualModel = aux_solver.get_model(); | ||||
| 	dualModel = is.get_model(); // TODO: what does this mean?
 | ||||
| 	std::vector<expr> case_lits; | ||||
| 	itp = StrengthenFormulaByCaseSplitting(itp, case_lits); | ||||
| 	SetAnnotation(node,itp); | ||||
| 	node->Annotation.Formula = node->Annotation.Formula.simplify(); | ||||
|       } | ||||
| 
 | ||||
|       for(unsigned i = 0; i < axioms_to_add.size(); i++) | ||||
| 	is.add(axioms_to_add[i]); | ||||
|        | ||||
| #define TEST_BAD | ||||
| #ifdef TEST_BAD | ||||
|       { | ||||
| 	static int bad_count = 0, num_bads = 1; | ||||
| 	if(bad_count >= num_bads){ | ||||
| 	  bad_count = 0; | ||||
| 	  num_bads = num_bads * 2; | ||||
| 	  Pop(1); | ||||
| 	  is.pop(1); | ||||
| 	  delete core; | ||||
| 	  timer_stop("InterpolateByCases"); | ||||
| 	  throw Bad(); | ||||
| 	} | ||||
| 	bad_count++; | ||||
|       } | ||||
| #endif | ||||
| 
 | ||||
|       if(node->Annotation.IsEmpty()){ | ||||
| 	if(!axioms_added){ | ||||
| 	  // add the axioms in the off chance they are useful
 | ||||
| 	  const std::vector<expr> &theory = ls->get_axioms(); | ||||
| 	  for(unsigned i = 0; i < theory.size(); i++) | ||||
| 	    aux_solver.add(theory[i]); | ||||
| 	    is.add(theory[i]); | ||||
| 	  axioms_added = true; | ||||
| 	} | ||||
| 	else { | ||||
| #ifdef KILL_ON_BAD_INTERPOLANT	   | ||||
| 	  std::cout << "bad in InterpolateByCase -- core:\n"; | ||||
| #if 0 | ||||
| 	  std::vector<expr> assumps; | ||||
| 	  slvr().get_proof().get_assumptions(assumps); | ||||
| 	  for(unsigned i = 0; i < assumps.size(); i++) | ||||
| 	    assumps[i].show(); | ||||
| #endif | ||||
| 	  std::cout << "checking for inconsistency\n"; | ||||
| 	  std::cout << "model:\n"; | ||||
| 	  is.get_model().show();	   | ||||
| 	  expr impl = is.get_implicant(); | ||||
| 	  std::vector<expr> conjuncts; | ||||
| 	  CollectConjuncts(impl,conjuncts,true); | ||||
| 	  std::cout << "impl:\n"; | ||||
| 	  for(unsigned i = 0; i < conjuncts.size(); i++) | ||||
| 	    conjuncts[i].show(); | ||||
| 	  std::cout << "annot:\n"; | ||||
| 	  annot.show(); | ||||
| 	  is.add(annot); | ||||
| 	  for(unsigned i = 0; i < conjuncts.size(); i++) | ||||
| 	    is.add(conjuncts[i]); | ||||
| 	  if(is.check() == unsat){ | ||||
| 	    std::cout << "inconsistent!\n"; | ||||
| 	    std::vector<expr> is_assumps; | ||||
| 	    is.aux_solver.get_proof().get_assumptions(is_assumps); | ||||
| 	    std::cout << "core:\n"; | ||||
| 	    for(unsigned i = 0; i < is_assumps.size(); i++) | ||||
| 	      is_assumps[i].show(); | ||||
| 	  } | ||||
| 	  else { | ||||
| 	    std::cout << "consistent!\n"; | ||||
| 	    is.aux_solver.print("should_be_inconsistent.smt2"); | ||||
| 	  } | ||||
| 	  std::cout << "by_case_counter = " << by_case_counter << "\n"; | ||||
| 	  throw "ack!"; | ||||
| #endif | ||||
| 	  Pop(1); | ||||
| 	  is.pop(1); | ||||
| 	  delete core; | ||||
| 	  timer_stop("InterpolateByCases"); | ||||
| 	  throw Bad(); | ||||
| 	} | ||||
|       } | ||||
|       Pop(1); | ||||
|  | @ -2886,7 +3086,8 @@ namespace Duality { | |||
|     if(proof_core) | ||||
|       delete proof_core; // shouldn't happen
 | ||||
|     proof_core = core; | ||||
|     aux_solver.pop(1); | ||||
|     is.pop(1); | ||||
|     timer_stop("InterpolateByCases"); | ||||
|   } | ||||
| 
 | ||||
|   void RPFP::Generalize(Node *root, Node *node){ | ||||
|  | @ -3187,7 +3388,7 @@ namespace Duality { | |||
| 	func_decl f = t.decl(); | ||||
| 	std::vector<Term> args; | ||||
| 	int nargs = t.num_args(); | ||||
| 	if(nargs == 0) | ||||
| 	if(nargs == 0 && f.get_decl_kind() == Uninterpreted) | ||||
| 	  ls->declare_constant(f);  // keep track of background constants
 | ||||
| 	for(int i = 0; i < nargs; i++) | ||||
| 	  args.push_back(SubstBoundRec(memo, subst, level, t.arg(i))); | ||||
|  |  | |||
|  | @ -19,7 +19,7 @@ Revision History: | |||
| 
 | ||||
| --*/ | ||||
| 
 | ||||
| #ifdef WIN32 | ||||
| #ifdef _WINDOWS | ||||
| #pragma warning(disable:4996) | ||||
| #pragma warning(disable:4800) | ||||
| #pragma warning(disable:4267) | ||||
|  | @ -54,6 +54,7 @@ Revision History: | |||
| // #define KEEP_EXPANSIONS
 | ||||
| // #define USE_CACHING_RPFP
 | ||||
| // #define PROPAGATE_BEFORE_CHECK
 | ||||
| #define NEW_STRATIFIED_INLINING	 | ||||
| 
 | ||||
| #define USE_RPFP_CLONE | ||||
| #define USE_NEW_GEN_CANDS | ||||
|  | @ -82,7 +83,7 @@ namespace Duality { | |||
|       rpfp = _rpfp; | ||||
|     } | ||||
|     virtual void Extend(RPFP::Node *node){} | ||||
|     virtual void Update(RPFP::Node *node, const RPFP::Transformer &update){} | ||||
|     virtual void Update(RPFP::Node *node, const RPFP::Transformer &update, bool eager){} | ||||
|     virtual void Bound(RPFP::Node *node){} | ||||
|     virtual void Expand(RPFP::Edge *edge){} | ||||
|     virtual void AddCover(RPFP::Node *covered, std::vector<RPFP::Node *> &covering){} | ||||
|  | @ -94,6 +95,7 @@ namespace Duality { | |||
|     virtual void UpdateUnderapprox(RPFP::Node *node, const RPFP::Transformer &update){} | ||||
|     virtual void Reject(RPFP::Edge *edge, const std::vector<RPFP::Node *> &Children){} | ||||
|     virtual void Message(const std::string &msg){} | ||||
|     virtual void Depth(int){} | ||||
|     virtual ~Reporter(){} | ||||
|   }; | ||||
| 
 | ||||
|  | @ -124,6 +126,7 @@ namespace Duality { | |||
|       rpfp = _rpfp; | ||||
|       reporter = 0; | ||||
|       heuristic = 0; | ||||
|       unwinding = 0; | ||||
|       FullExpand = false; | ||||
|       NoConj = false; | ||||
|       FeasibleEdges = true; | ||||
|  | @ -131,6 +134,7 @@ namespace Duality { | |||
|       Report = false; | ||||
|       StratifiedInlining = false; | ||||
|       RecursionBound = -1; | ||||
|       BatchExpand = false; | ||||
|       { | ||||
| 	scoped_no_proof no_proofs_please(ctx.m()); | ||||
| #ifdef USE_RPFP_CLONE | ||||
|  | @ -151,6 +155,7 @@ namespace Duality { | |||
| #ifdef USE_NEW_GEN_CANDS | ||||
|       delete gen_cands_rpfp; | ||||
| #endif | ||||
|       if(unwinding) delete unwinding; | ||||
|     } | ||||
| 
 | ||||
| #ifdef USE_RPFP_CLONE | ||||
|  | @ -249,6 +254,19 @@ namespace Duality { | |||
|       virtual void Done() {} | ||||
|     }; | ||||
|      | ||||
|     /** The Proposer class proposes conjectures eagerly. These can come
 | ||||
|        from any source, including predicate abstraction, templates, or | ||||
|        previous solver runs. The proposed conjectures are checked | ||||
|        with low effort when the unwinding is expanded. | ||||
|     */ | ||||
| 
 | ||||
|     class Proposer { | ||||
|     public: | ||||
|       /** Given a node in the unwinding, propose some conjectures */ | ||||
|       virtual std::vector<RPFP::Transformer> &Propose(Node *node) = 0; | ||||
|       virtual ~Proposer(){}; | ||||
|     }; | ||||
| 
 | ||||
| 
 | ||||
|     class Covering; // see below
 | ||||
| 
 | ||||
|  | @ -278,6 +296,7 @@ namespace Duality { | |||
|     hash_map<Node *, Node *> underapprox_map; // maps underapprox nodes to the nodes they approximate
 | ||||
|     int last_decisions; | ||||
|     hash_set<Node *> overapproxes; | ||||
|     std::vector<Proposer *> proposers; | ||||
| 
 | ||||
| #ifdef BOUNDED | ||||
|     struct Counter { | ||||
|  | @ -292,24 +311,22 @@ namespace Duality { | |||
|     virtual bool Solve(){ | ||||
|       reporter = Report ? CreateStdoutReporter(rpfp) : new Reporter(rpfp); | ||||
| #ifndef LOCALIZE_CONJECTURES | ||||
|       heuristic = !cex.tree ? new Heuristic(rpfp) : new ReplayHeuristic(rpfp,cex); | ||||
|       heuristic = !cex.get_tree() ? new Heuristic(rpfp) : new ReplayHeuristic(rpfp,cex); | ||||
| #else | ||||
|       heuristic = !cex.tree ? (Heuristic *)(new LocalHeuristic(rpfp)) | ||||
|       heuristic = !cex.get_tree() ? (Heuristic *)(new LocalHeuristic(rpfp)) | ||||
| 	: (Heuristic *)(new ReplayHeuristic(rpfp,cex)); | ||||
| #endif | ||||
|       cex.tree = 0; // heuristic now owns it
 | ||||
|       cex.clear(); // in case we didn't use it for heuristic
 | ||||
|       if(unwinding) delete unwinding; | ||||
|       unwinding = new RPFP(rpfp->ls); | ||||
|       unwinding->HornClauses = rpfp->HornClauses; | ||||
|       indset = new Covering(this); | ||||
|       last_decisions = 0; | ||||
|       CreateEdgesByChildMap(); | ||||
|       CreateLeaves(); | ||||
| #ifndef TOP_DOWN | ||||
|       if(!StratifiedInlining){ | ||||
|         if(FeasibleEdges)NullaryCandidates(); | ||||
|         else InstantiateAllEdges(); | ||||
|       } | ||||
|       CreateInitialUnwinding(); | ||||
| #else | ||||
|       CreateLeaves(); | ||||
|       for(unsigned i = 0; i < leaves.size(); i++) | ||||
| 	if(!SatisfyUpperBound(leaves[i])) | ||||
| 	  return false; | ||||
|  | @ -321,11 +338,29 @@ namespace Duality { | |||
|       //  print_profile(std::cout);
 | ||||
|       delete indset; | ||||
|       delete heuristic; | ||||
|       delete unwinding; | ||||
|       // delete unwinding; // keep the unwinding for future mining of predicates
 | ||||
|       delete reporter; | ||||
|       for(unsigned i = 0; i < proposers.size(); i++) | ||||
| 	delete proposers[i]; | ||||
|       return res; | ||||
|     } | ||||
| 
 | ||||
|     void CreateInitialUnwinding(){ | ||||
|       if(!StratifiedInlining){ | ||||
| 	CreateLeaves(); | ||||
|         if(FeasibleEdges)NullaryCandidates(); | ||||
|         else InstantiateAllEdges(); | ||||
|       } | ||||
|       else { | ||||
| #ifdef NEW_STRATIFIED_INLINING	 | ||||
| 
 | ||||
| #else | ||||
| 	CreateLeaves(); | ||||
| #endif | ||||
|       } | ||||
|        | ||||
|     } | ||||
| 
 | ||||
|     void Cancel(){ | ||||
|       // TODO
 | ||||
|     } | ||||
|  | @ -346,15 +381,19 @@ namespace Duality { | |||
|     } | ||||
| #endif | ||||
| 
 | ||||
|     virtual void LearnFrom(Counterexample &old_cex){ | ||||
|       cex = old_cex; | ||||
|     virtual void LearnFrom(Solver *other_solver){ | ||||
|       // get the counterexample as a guide
 | ||||
|       cex.swap(other_solver->GetCounterexample()); | ||||
| 
 | ||||
|       // propose conjectures based on old unwinding
 | ||||
|       Duality *old_duality = dynamic_cast<Duality *>(other_solver); | ||||
|       if(old_duality) | ||||
| 	proposers.push_back(new HistoryProposer(old_duality,this)); | ||||
|     } | ||||
| 
 | ||||
|     /** Return the counterexample */ | ||||
|     virtual Counterexample GetCounterexample(){ | ||||
|       Counterexample res = cex; | ||||
|       cex.tree = 0; // Cex now belongs to caller
 | ||||
|       return res; | ||||
|     /** Return a reference to the counterexample */ | ||||
|     virtual Counterexample &GetCounterexample(){ | ||||
|       return cex; | ||||
|     } | ||||
| 
 | ||||
|     // options
 | ||||
|  | @ -365,6 +404,7 @@ namespace Duality { | |||
|     bool Report;         // spew on stdout
 | ||||
|     bool StratifiedInlining; // Do stratified inlining as preprocessing step
 | ||||
|     int RecursionBound;  // Recursion bound for bounded verification
 | ||||
|     bool BatchExpand; | ||||
|      | ||||
|     bool SetBoolOption(bool &opt, const std::string &value){ | ||||
|       if(value == "0") { | ||||
|  | @ -403,6 +443,9 @@ namespace Duality { | |||
|       if(option == "stratified_inlining"){ | ||||
|         return SetBoolOption(StratifiedInlining,value); | ||||
|       } | ||||
|       if(option == "batch_expand"){ | ||||
|         return SetBoolOption(BatchExpand,value); | ||||
|       } | ||||
|       if(option == "recursion_bound"){ | ||||
|         return SetIntOption(RecursionBound,value); | ||||
|       } | ||||
|  | @ -514,7 +557,11 @@ namespace Duality { | |||
| 	c.Children.resize(edge->Children.size()); | ||||
| 	for(unsigned j = 0; j < c.Children.size(); j++) | ||||
| 	  c.Children[j] = leaf_map[edge->Children[j]]; | ||||
| 	Extend(c); | ||||
| 	Node *new_node; | ||||
| 	Extend(c,new_node); | ||||
| #ifdef EARLY_EXPAND | ||||
| 	TryExpandNode(new_node); | ||||
| #endif | ||||
|       } | ||||
|       for(Unexpanded::iterator it = unexpanded.begin(), en = unexpanded.end(); it != en; ++it) | ||||
| 	indset->Add(*it); | ||||
|  | @ -766,16 +813,14 @@ namespace Duality { | |||
|     } | ||||
| 
 | ||||
| 
 | ||||
|     /* For stratified inlining, we need a topological sort of the
 | ||||
| 	nodes. */ | ||||
| 
 | ||||
|     hash_map<Node *, int> TopoSort; | ||||
|     int TopoSortCounter; | ||||
|     std::vector<Edge *> SortedEdges; | ||||
|      | ||||
|     void DoTopoSortRec(Node *node){ | ||||
|       if(TopoSort.find(node) != TopoSort.end()) | ||||
| 	return; | ||||
|       TopoSort[node] = TopoSortCounter++;  // just to break cycles
 | ||||
|       TopoSort[node] = INT_MAX;  // just to break cycles
 | ||||
|       Edge *edge = node->Outgoing; // note, this is just *one* outgoing edge
 | ||||
|       if(edge){ | ||||
| 	std::vector<Node *> &chs = edge->Children; | ||||
|  | @ -783,22 +828,81 @@ namespace Duality { | |||
| 	  DoTopoSortRec(chs[i]); | ||||
|       } | ||||
|       TopoSort[node] = TopoSortCounter++; | ||||
|       SortedEdges.push_back(edge); | ||||
|     } | ||||
| 
 | ||||
|     void DoTopoSort(){ | ||||
|       TopoSort.clear(); | ||||
|       SortedEdges.clear(); | ||||
|       TopoSortCounter = 0; | ||||
|       for(unsigned i = 0; i < nodes.size(); i++) | ||||
| 	DoTopoSortRec(nodes[i]); | ||||
|     } | ||||
| 
 | ||||
|      | ||||
|     int StratifiedLeafCount; | ||||
| 
 | ||||
| #ifdef NEW_STRATIFIED_INLINING	 | ||||
| 
 | ||||
|     /** Stratified inlining builds an initial layered unwinding before
 | ||||
| 	switching to the LAWI strategy. Currently the number of layers | ||||
| 	is one. Only nodes that are the targets of back edges are | ||||
| 	consider to be leaves. This assumes we have already computed a | ||||
| 	topological sort. | ||||
|     */ | ||||
| 
 | ||||
|     bool DoStratifiedInlining(){ | ||||
|       DoTopoSort(); | ||||
|       int depth = 1; // TODO: make this an option
 | ||||
|       std::vector<hash_map<Node *,Node *> > unfolding_levels(depth+1); | ||||
|       for(int level = 1; level <= depth; level++) | ||||
| 	for(unsigned i = 0; i < SortedEdges.size(); i++){ | ||||
| 	  Edge *edge = SortedEdges[i]; | ||||
| 	  Node *parent = edge->Parent; | ||||
| 	  std::vector<Node *> &chs = edge->Children; | ||||
| 	  std::vector<Node *> my_chs(chs.size()); | ||||
| 	  for(unsigned j = 0; j < chs.size(); j++){ | ||||
| 	    Node *child = chs[j]; | ||||
| 	    int ch_level = TopoSort[child] >= TopoSort[parent] ? level-1 : level; | ||||
| 	    if(unfolding_levels[ch_level].find(child) == unfolding_levels[ch_level].end()){ | ||||
| 	      if(ch_level == 0)  | ||||
| 		unfolding_levels[0][child] = CreateLeaf(child); | ||||
| 	      else | ||||
| 		throw InternalError("in levelized unwinding"); | ||||
| 	    } | ||||
| 	    my_chs[j] = unfolding_levels[ch_level][child]; | ||||
| 	  } | ||||
| 	  Candidate cand; cand.edge = edge; cand.Children = my_chs; | ||||
| 	  Node *new_node; | ||||
| 	  bool ok = Extend(cand,new_node); | ||||
| 	  MarkExpanded(new_node); // we don't expand here -- just mark it done
 | ||||
| 	  if(!ok) return false; // got a counterexample
 | ||||
| 	  unfolding_levels[level][parent] = new_node; | ||||
| 	} | ||||
|       return true; | ||||
|     } | ||||
| 
 | ||||
|     Node *CreateLeaf(Node *node){ | ||||
|       RPFP::Node *nchild = CreateNodeInstance(node); | ||||
|       MakeLeaf(nchild, /* do_not_expand = */ true); | ||||
|       nchild->Annotation.SetEmpty(); | ||||
|       return nchild; | ||||
|     }       | ||||
| 
 | ||||
|     void MarkExpanded(Node *node){ | ||||
|       if(unexpanded.find(node) != unexpanded.end()){ | ||||
| 	unexpanded.erase(node); | ||||
| 	insts_of_node[node->map].push_back(node); | ||||
|       } | ||||
|     } | ||||
| 
 | ||||
| #else | ||||
| 
 | ||||
|     /** In stratified inlining, we build the unwinding from the bottom
 | ||||
| 	down, trying to satisfy the node bounds. We do this as a pre-pass, | ||||
| 	limiting the expansion. If we get a counterexample, we are done, | ||||
| 	else we continue as usual expanding the unwinding upward. | ||||
|     */ | ||||
|      | ||||
|     int StratifiedLeafCount; | ||||
| 
 | ||||
|     bool DoStratifiedInlining(){ | ||||
|       timer_start("StratifiedInlining"); | ||||
|  | @ -821,6 +925,8 @@ namespace Duality { | |||
|       return true; | ||||
|     } | ||||
|      | ||||
| #endif | ||||
| 
 | ||||
|     /** Here, we do the downward expansion for stratified inlining */ | ||||
| 
 | ||||
|     hash_map<Node *, Node *> LeafMap, StratifiedLeafMap; | ||||
|  | @ -907,9 +1013,14 @@ namespace Duality { | |||
| 	} | ||||
| 	Candidate cand = candidates.front(); | ||||
| 	candidates.pop_front(); | ||||
| 	if(CandidateFeasible(cand)) | ||||
| 	  if(!Extend(cand)) | ||||
| 	if(CandidateFeasible(cand)){ | ||||
| 	  Node *new_node; | ||||
| 	  if(!Extend(cand,new_node)) | ||||
| 	    return false; | ||||
| #ifdef EARLY_EXPAND | ||||
| 	  TryExpandNode(new_node); | ||||
| #endif | ||||
| 	} | ||||
|       } | ||||
|     } | ||||
| 
 | ||||
|  | @ -929,9 +1040,9 @@ namespace Duality { | |||
| 	 | ||||
| 
 | ||||
|     Node *CreateUnderapproxNode(Node *node){ | ||||
|       // cex.tree->ComputeUnderapprox(cex.root,0);
 | ||||
|       // cex.get_tree()->ComputeUnderapprox(cex.get_root(),0);
 | ||||
|       RPFP::Node *under_node = CreateNodeInstance(node->map /* ,StratifiedLeafCount-- */); | ||||
|       under_node->Annotation.IntersectWith(cex.root->Underapprox); | ||||
|       under_node->Annotation.IntersectWith(cex.get_root()->Underapprox); | ||||
|       AddThing(under_node->Annotation.Formula); | ||||
|       Edge *e = unwinding->CreateLowerBoundEdge(under_node); | ||||
|       under_node->Annotation.SetFull(); // allow this node to cover others
 | ||||
|  | @ -967,9 +1078,8 @@ namespace Duality { | |||
| 	ExpandNodeFromCoverFail(node); | ||||
|       } | ||||
| #endif | ||||
|       if(_cex) *_cex = cex; | ||||
|       else delete cex.tree;    // delete the cex if not required
 | ||||
|       cex.tree = 0; | ||||
|       if(_cex) (*_cex).swap(cex); // return the cex if asked
 | ||||
|       cex.clear();                // throw away the useless cex
 | ||||
|       node->Bound = save; // put back original bound
 | ||||
|       timer_stop("ProveConjecture"); | ||||
|       return false; | ||||
|  | @ -1349,16 +1459,20 @@ namespace Duality { | |||
|       } | ||||
|     } | ||||
| 
 | ||||
|     bool UpdateNodeToNode(Node *node, Node *top){ | ||||
|       if(!node->Annotation.SubsetEq(top->Annotation)){ | ||||
| 	reporter->Update(node,top->Annotation); | ||||
| 	indset->Update(node,top->Annotation); | ||||
|     bool Update(Node *node, const RPFP::Transformer &fact, bool eager=false){ | ||||
|       if(!node->Annotation.SubsetEq(fact)){ | ||||
| 	reporter->Update(node,fact,eager); | ||||
| 	indset->Update(node,fact); | ||||
| 	updated_nodes.insert(node->map); | ||||
| 	node->Annotation.IntersectWith(top->Annotation); | ||||
| 	node->Annotation.IntersectWith(fact); | ||||
| 	return true; | ||||
|       } | ||||
|       return false; | ||||
|     } | ||||
|      | ||||
|     bool UpdateNodeToNode(Node *node, Node *top){ | ||||
|       return Update(node,top->Annotation); | ||||
|     } | ||||
| 
 | ||||
|     /** Update the unwinding solution, using an interpolant for the
 | ||||
| 	derivation tree. */ | ||||
|  | @ -1400,8 +1514,7 @@ namespace Duality { | |||
|       // std::cout << "decisions: " << (end_decs - start_decs)  << std::endl;
 | ||||
|       last_decisions = end_decs - start_decs; | ||||
|       if(res){ | ||||
| 	cex.tree = dt.tree; | ||||
| 	cex.root = dt.top; | ||||
| 	cex.set(dt.tree,dt.top); // note tree is now owned by cex
 | ||||
| 	if(UseUnderapprox){ | ||||
| 	  UpdateWithCounterexample(node,dt.tree,dt.top); | ||||
| 	} | ||||
|  | @ -1413,6 +1526,64 @@ namespace Duality { | |||
|       delete dtp; | ||||
|       return !res; | ||||
|     } | ||||
|      | ||||
|     /* For a given nod in the unwinding, get conjectures from the
 | ||||
|        proposers and check them locally. Update the node with any true | ||||
|        conjectures. | ||||
|      */ | ||||
| 
 | ||||
|     void DoEagerDeduction(Node *node){ | ||||
|       for(unsigned i = 0; i < proposers.size(); i++){ | ||||
| 	const std::vector<RPFP::Transformer> &conjectures = proposers[i]->Propose(node); | ||||
| 	for(unsigned j = 0; j < conjectures.size(); j++){  | ||||
| 	  const RPFP::Transformer &conjecture = conjectures[j]; | ||||
| 	  RPFP::Transformer bound(conjecture); | ||||
| 	  std::vector<expr> conj_vec; | ||||
| 	  unwinding->CollectConjuncts(bound.Formula,conj_vec); | ||||
| 	  for(unsigned k = 0; k < conj_vec.size(); k++){ | ||||
| 	    bound.Formula = conj_vec[k]; | ||||
| 	    if(CheckEdgeCaching(node->Outgoing,bound) == unsat) | ||||
| 	      Update(node,bound, /* eager = */ true); | ||||
| 	    //else
 | ||||
| 	    //std::cout << "conjecture failed\n";
 | ||||
| 	  } | ||||
| 	} | ||||
|       } | ||||
|     } | ||||
| 
 | ||||
|        | ||||
|     check_result CheckEdge(RPFP *checker, Edge *edge){ | ||||
|       Node *root = edge->Parent; | ||||
|       checker->Push(); | ||||
|       checker->AssertNode(root); | ||||
|       checker->AssertEdge(edge,1,true); | ||||
|       check_result res = checker->Check(root); | ||||
|       checker->Pop(1); | ||||
|       return res; | ||||
|     } | ||||
| 
 | ||||
|     check_result CheckEdgeCaching(Edge *unwinding_edge, const RPFP::Transformer &bound){ | ||||
| 
 | ||||
|       // use a dedicated solver for this edge
 | ||||
|       // TODO: can this mess be hidden somehow?
 | ||||
| 
 | ||||
|       RPFP_caching *checker = gen_cands_rpfp; // TODO: a good choice?
 | ||||
|       Edge *edge = unwinding_edge->map; // get the edge in the original RPFP
 | ||||
|       RPFP_caching::scoped_solver_for_edge ssfe(checker,edge,true /* models */, true /*axioms*/); | ||||
|       Edge *checker_edge = checker->GetEdgeClone(edge); | ||||
|        | ||||
|       // copy the annotations and bound to the clone
 | ||||
|       Node *root = checker_edge->Parent; | ||||
|       root->Bound = bound; | ||||
|       for(unsigned j = 0; j < checker_edge->Children.size(); j++){ | ||||
| 	Node *oc = unwinding_edge->Children[j]; | ||||
| 	Node *nc = checker_edge->Children[j]; | ||||
| 	nc->Annotation = oc->Annotation; | ||||
|       } | ||||
|        | ||||
|       return CheckEdge(checker,checker_edge); | ||||
|     } | ||||
| 
 | ||||
| 
 | ||||
|     /* If the counterexample derivation is partial due to
 | ||||
|        use of underapproximations, complete it. */ | ||||
|  | @ -1421,10 +1592,7 @@ namespace Duality { | |||
|       DerivationTree dt(this,unwinding,reporter,heuristic,FullExpand);  | ||||
|       bool res = dt.Derive(unwinding,node,UseUnderapprox,true); // build full tree
 | ||||
|       if(!res) throw "Duality internal error in BuildFullCex"; | ||||
|       if(cex.tree) | ||||
| 	delete cex.tree; | ||||
|       cex.tree = dt.tree; | ||||
|       cex.root = dt.top; | ||||
|       cex.set(dt.tree,dt.top); | ||||
|     } | ||||
|      | ||||
|     void UpdateBackEdges(Node *node){ | ||||
|  | @ -1447,25 +1615,23 @@ namespace Duality { | |||
|     } | ||||
| 
 | ||||
|     /** Extend the unwinding, keeping it solved. */ | ||||
|     bool Extend(Candidate &cand){ | ||||
|     bool Extend(Candidate &cand, Node *&node){ | ||||
|       timer_start("Extend"); | ||||
|       Node *node = CreateNodeInstance(cand.edge->Parent); | ||||
|       node = CreateNodeInstance(cand.edge->Parent); | ||||
|       CreateEdgeInstance(cand.edge,node,cand.Children); | ||||
|       UpdateBackEdges(node); | ||||
|       reporter->Extend(node); | ||||
|       bool res = SatisfyUpperBound(node); | ||||
|       DoEagerDeduction(node); // first be eager...
 | ||||
|       bool res = SatisfyUpperBound(node); // then be lazy
 | ||||
|       if(res) indset->CloseDescendants(node); | ||||
|       else { | ||||
| #ifdef UNDERAPPROX_NODES | ||||
| 	ExpandUnderapproxNodes(cex.tree, cex.root); | ||||
| 	ExpandUnderapproxNodes(cex.get_tree(), cex.get_root()); | ||||
| #endif | ||||
| 	if(UseUnderapprox) BuildFullCex(node); | ||||
| 	timer_stop("Extend"); | ||||
| 	return res; | ||||
|       } | ||||
| #ifdef EARLY_EXPAND | ||||
|       TryExpandNode(node); | ||||
| #endif | ||||
|       timer_stop("Extend"); | ||||
|       return res; | ||||
|     } | ||||
|  | @ -1563,6 +1729,8 @@ namespace Duality { | |||
|     class DerivationTree { | ||||
|     public: | ||||
| 
 | ||||
|       virtual ~DerivationTree(){} | ||||
| 
 | ||||
|       DerivationTree(Duality *_duality, RPFP *rpfp, Reporter *_reporter, Heuristic *_heuristic, bool _full_expand)  | ||||
| 	: slvr(rpfp->slvr()), | ||||
| 	  ctx(rpfp->ctx) | ||||
|  | @ -1912,10 +2080,28 @@ namespace Duality { | |||
| 	stack.push_back(stack_entry()); | ||||
|       } | ||||
| 
 | ||||
|       struct DoRestart {}; | ||||
| 
 | ||||
|       virtual bool Build(){ | ||||
| 	while (true) { | ||||
| 	  try { | ||||
| 	    return BuildMain(); | ||||
| 	  } | ||||
| 	  catch (const DoRestart &) { | ||||
| 	    // clear the statck and try again
 | ||||
| 	    updated_nodes.clear(); | ||||
| 	    while(stack.size() > 1) | ||||
| 	      PopLevel(); | ||||
| 	    reporter->Message("restarted"); | ||||
| 	  } | ||||
| 	} | ||||
|       } | ||||
| 
 | ||||
|       bool BuildMain(){ | ||||
| 
 | ||||
| 	stack.back().level = tree->slvr().get_scope_level(); | ||||
| 	bool was_sat = true; | ||||
| 	int update_failures = 0; | ||||
| 
 | ||||
| 	while (true) | ||||
| 	{ | ||||
|  | @ -1924,6 +2110,7 @@ namespace Duality { | |||
| 	  unsigned slvr_level = tree->slvr().get_scope_level(); | ||||
| 	  if(slvr_level != stack.back().level) | ||||
| 	    throw "stacks out of sync!"; | ||||
| 	  reporter->Depth(stack.size()); | ||||
| 
 | ||||
| 	  //	  res = tree->Solve(top, 1);            // incremental solve, keep interpolants for one pop
 | ||||
| 	  check_result foo = tree->Check(top); | ||||
|  | @ -1942,11 +2129,17 @@ namespace Duality { | |||
| #ifdef NO_GENERALIZE | ||||
| 		node->Annotation.Formula = tree->RemoveRedundancy(node->Annotation.Formula).simplify(); | ||||
| #else | ||||
| 		if(expansions.size() == 1 && NodeTooComplicated(node)) | ||||
| 		  SimplifyNode(node); | ||||
| 		else | ||||
| 		  node->Annotation.Formula = tree->RemoveRedundancy(node->Annotation.Formula).simplify(); | ||||
| 		Generalize(node); | ||||
| 		try { | ||||
| 		  if(expansions.size() == 1 && NodeTooComplicated(node)) | ||||
| 		    SimplifyNode(node); | ||||
| 		  else | ||||
| 		    node->Annotation.Formula = tree->RemoveRedundancy(node->Annotation.Formula).simplify(); | ||||
| 		  Generalize(node); | ||||
| 		} | ||||
| 		catch(const RPFP::Bad &){ | ||||
| 		  // bad interpolants can get us here
 | ||||
| 		  throw DoRestart(); | ||||
| 		} | ||||
| #endif | ||||
| 		if(RecordUpdate(node)) | ||||
| 		  update_count++; | ||||
|  | @ -1954,36 +2147,20 @@ namespace Duality { | |||
| 		  heuristic->Update(node->map); // make it less likely to expand this node in future
 | ||||
| 	      } | ||||
| 	      if(update_count == 0){ | ||||
| 		if(was_sat) | ||||
| 		  throw Incompleteness(); | ||||
| 		if(was_sat){ | ||||
| 		  update_failures++; | ||||
| 		  if(update_failures > 10) | ||||
| 		    throw Incompleteness(); | ||||
| 		} | ||||
| 		reporter->Message("backtracked without learning"); | ||||
| 	      } | ||||
| 	      else update_failures = 0; | ||||
| 	    } | ||||
| 	    tree->ComputeProofCore(); // need to compute the proof core before popping solver
 | ||||
| 	    bool propagated = false; | ||||
| 	    while(1) { | ||||
| 	      std::vector<Node *> &expansions = stack.back().expansions; | ||||
| 	      bool prev_level_used = LevelUsedInProof(stack.size()-2); // need to compute this before pop
 | ||||
| 	      tree->Pop(1);	 | ||||
| 	      hash_set<Node *> leaves_to_remove; | ||||
| 	      for(unsigned i = 0; i < expansions.size(); i++){ | ||||
| 		Node *node = expansions[i]; | ||||
| 		//	      if(node != top)
 | ||||
| 		//		tree->ConstrainParent(node->Incoming[0],node);
 | ||||
| 		std::vector<Node *> &cs = node->Outgoing->Children; | ||||
| 		for(unsigned i = 0; i < cs.size(); i++){ | ||||
| 		  leaves_to_remove.insert(cs[i]); | ||||
| 		  UnmapNode(cs[i]); | ||||
| 		  if(std::find(updated_nodes.begin(),updated_nodes.end(),cs[i]) != updated_nodes.end()) | ||||
| 		    throw "help!"; | ||||
| 		} | ||||
| 	      } | ||||
| 	      RemoveLeaves(leaves_to_remove); // have to do this before actually deleting the children
 | ||||
| 	      for(unsigned i = 0; i < expansions.size(); i++){ | ||||
| 		Node *node = expansions[i]; | ||||
| 		RemoveExpansion(node); | ||||
| 	      } | ||||
| 	      stack.pop_back(); | ||||
| 	      PopLevel(); | ||||
| 	      if(stack.size() == 1)break; | ||||
| 	      if(prev_level_used){ | ||||
| 		Node *node = stack.back().expansions[0]; | ||||
|  | @ -2011,7 +2188,7 @@ namespace Duality { | |||
| 	  } | ||||
| 	  else { | ||||
| 	    was_sat = true; | ||||
| 	    tree->Push(); | ||||
| 	    tree->Push(); | ||||
| 	    std::vector<Node *> &expansions = stack.back().expansions; | ||||
| #ifndef NO_DECISIONS | ||||
| 	    for(unsigned i = 0; i < expansions.size(); i++){ | ||||
|  | @ -2022,11 +2199,17 @@ namespace Duality { | |||
| 	    if(tree->slvr().check() == unsat) | ||||
| 	      throw "help!"; | ||||
| #endif | ||||
| 	    stack.push_back(stack_entry()); | ||||
| 	    stack.back().level = tree->slvr().get_scope_level(); | ||||
| 	    if(ExpandSomeNodes(false,1)){ | ||||
| 	      continue; | ||||
| 	    int expand_max = 1; | ||||
| 	    if(0&&duality->BatchExpand){ | ||||
| 	      int thing = stack.size() * 0.1; | ||||
| 	      expand_max = std::max(1,thing); | ||||
| 	      if(expand_max > 1) | ||||
| 		std::cout << "foo!\n"; | ||||
| 	    } | ||||
| 
 | ||||
| 	    if(ExpandSomeNodes(false,expand_max)) | ||||
| 	      continue; | ||||
| 	    tree->Pop(1); | ||||
| 	    while(stack.size() > 1){ | ||||
| 	      tree->Pop(1);	 | ||||
| 	      stack.pop_back(); | ||||
|  | @ -2036,6 +2219,30 @@ namespace Duality { | |||
| 	} | ||||
|       } | ||||
|        | ||||
|       void PopLevel(){ | ||||
| 	std::vector<Node *> &expansions = stack.back().expansions; | ||||
| 	tree->Pop(1);	 | ||||
| 	hash_set<Node *> leaves_to_remove; | ||||
| 	for(unsigned i = 0; i < expansions.size(); i++){ | ||||
| 	  Node *node = expansions[i]; | ||||
| 	  //	      if(node != top)
 | ||||
| 	  //		tree->ConstrainParent(node->Incoming[0],node);
 | ||||
| 	  std::vector<Node *> &cs = node->Outgoing->Children; | ||||
| 	  for(unsigned i = 0; i < cs.size(); i++){ | ||||
| 	    leaves_to_remove.insert(cs[i]); | ||||
| 	    UnmapNode(cs[i]); | ||||
| 	    if(std::find(updated_nodes.begin(),updated_nodes.end(),cs[i]) != updated_nodes.end()) | ||||
| 	      throw "help!"; | ||||
| 	  } | ||||
| 	} | ||||
| 	RemoveLeaves(leaves_to_remove); // have to do this before actually deleting the children
 | ||||
| 	for(unsigned i = 0; i < expansions.size(); i++){ | ||||
| 	  Node *node = expansions[i]; | ||||
| 	  RemoveExpansion(node); | ||||
| 	} | ||||
| 	stack.pop_back(); | ||||
|       } | ||||
| 	 | ||||
|       bool NodeTooComplicated(Node *node){ | ||||
| 	int ops = tree->CountOperators(node->Annotation.Formula); | ||||
| 	if(ops > 10) return true; | ||||
|  | @ -2047,7 +2254,13 @@ namespace Duality { | |||
| 	// have to destroy the old proof to get a new interpolant
 | ||||
| 	timer_start("SimplifyNode"); | ||||
| 	tree->PopPush(); | ||||
| 	tree->InterpolateByCases(top,node); | ||||
| 	try { | ||||
| 	  tree->InterpolateByCases(top,node); | ||||
| 	} | ||||
| 	catch(const RPFP::Bad&){ | ||||
| 	  timer_stop("SimplifyNode"); | ||||
| 	  throw RPFP::Bad(); | ||||
| 	} | ||||
| 	timer_stop("SimplifyNode"); | ||||
|       } | ||||
| 
 | ||||
|  | @ -2085,6 +2298,8 @@ namespace Duality { | |||
|       std::list<Node *> updated_nodes; | ||||
| 
 | ||||
|       virtual void ExpandNode(RPFP::Node *p){ | ||||
| 	stack.push_back(stack_entry()); | ||||
| 	stack.back().level = tree->slvr().get_scope_level(); | ||||
| 	stack.back().expansions.push_back(p); | ||||
| 	DerivationTree::ExpandNode(p); | ||||
| 	std::vector<Node *> &new_nodes = p->Outgoing->Children; | ||||
|  | @ -2442,7 +2657,7 @@ namespace Duality { | |||
|       }       | ||||
| 
 | ||||
|       bool ContainsCex(Node *node, Counterexample &cex){ | ||||
| 	expr val = cex.tree->Eval(cex.root->Outgoing,node->Annotation.Formula); | ||||
| 	expr val = cex.get_tree()->Eval(cex.get_root()->Outgoing,node->Annotation.Formula); | ||||
| 	return eq(val,parent->ctx.bool_val(true)); | ||||
|       } | ||||
| 
 | ||||
|  | @ -2461,15 +2676,15 @@ namespace Duality { | |||
| 	  Node *other = insts[i]; | ||||
| 	  if(CouldCover(node,other)){ | ||||
| 	    reporter()->Forcing(node,other); | ||||
| 	    if(cex.tree && !ContainsCex(other,cex)) | ||||
| 	    if(cex.get_tree() && !ContainsCex(other,cex)) | ||||
| 	      continue; | ||||
| 	    if(cex.tree) {delete cex.tree; cex.tree = 0;} | ||||
| 	    cex.clear(); | ||||
| 	    if(parent->ProveConjecture(node,other->Annotation,other,&cex)) | ||||
| 	      if(CloseDescendants(node)) | ||||
| 		return true; | ||||
| 	  } | ||||
| 	} | ||||
| 	if(cex.tree) {delete cex.tree; cex.tree = 0;} | ||||
| 	cex.clear(); | ||||
| 	return false; | ||||
|       } | ||||
| #else | ||||
|  | @ -2568,13 +2783,12 @@ namespace Duality { | |||
|       Counterexample old_cex; | ||||
|     public: | ||||
|       ReplayHeuristic(RPFP *_rpfp, Counterexample &_old_cex) | ||||
| 	: Heuristic(_rpfp), old_cex(_old_cex) | ||||
| 	: Heuristic(_rpfp) | ||||
|       { | ||||
| 	old_cex.swap(_old_cex); // take ownership from caller
 | ||||
|       } | ||||
| 
 | ||||
|       ~ReplayHeuristic(){ | ||||
| 	if(old_cex.tree) | ||||
| 	  delete old_cex.tree; | ||||
|       } | ||||
| 
 | ||||
|       // Maps nodes of derivation tree into old cex
 | ||||
|  | @ -2582,9 +2796,7 @@ namespace Duality { | |||
|        | ||||
|       void Done() { | ||||
| 	cex_map.clear(); | ||||
| 	if(old_cex.tree) | ||||
| 	  delete old_cex.tree; | ||||
| 	old_cex.tree = 0; // only replay once!
 | ||||
| 	old_cex.clear(); | ||||
|       } | ||||
| 
 | ||||
|       void ShowNodeAndChildren(Node *n){ | ||||
|  | @ -2606,7 +2818,7 @@ namespace Duality { | |||
|       } | ||||
| 
 | ||||
|       virtual void ChooseExpand(const std::set<RPFP::Node *> &choices, std::set<RPFP::Node *> &best, bool high_priority, bool best_only){ | ||||
| 	if(!high_priority || !old_cex.tree){ | ||||
| 	if(!high_priority || !old_cex.get_tree()){ | ||||
| 	  Heuristic::ChooseExpand(choices,best,false); | ||||
| 	  return; | ||||
| 	} | ||||
|  | @ -2615,7 +2827,7 @@ namespace Duality { | |||
| 	for(std::set<Node *>::iterator it = choices.begin(), en = choices.end(); it != en; ++it){ | ||||
| 	  Node *node = (*it); | ||||
| 	  if(cex_map.empty()) | ||||
| 	    cex_map[node] = old_cex.root;  // match the root nodes
 | ||||
| 	    cex_map[node] = old_cex.get_root();  // match the root nodes
 | ||||
| 	  if(cex_map.find(node) == cex_map.end()){ // try to match an unmatched node
 | ||||
| 	    Node *parent = node->Incoming[0]->Parent; // assumes we are a tree!
 | ||||
| 	    if(cex_map.find(parent) == cex_map.end()) | ||||
|  | @ -2641,7 +2853,7 @@ namespace Duality { | |||
| 	  Node *old_node = cex_map[node]; | ||||
| 	  if(!old_node) | ||||
| 	    unmatched.insert(node); | ||||
| 	  else if(old_cex.tree->Empty(old_node)) | ||||
| 	  else if(old_cex.get_tree()->Empty(old_node)) | ||||
| 	    unmatched.insert(node); | ||||
| 	  else | ||||
| 	    matched.insert(node); | ||||
|  | @ -2715,7 +2927,120 @@ namespace Duality { | |||
|       } | ||||
|     }; | ||||
| 
 | ||||
|     /** This proposer class generates conjectures based on the
 | ||||
| 	unwinding generated by a previous solver. The assumption is | ||||
| 	that the provious solver was working on a different | ||||
| 	abstraction of the same system. The trick is to adapt the | ||||
| 	annotations in the old unwinding to the new predicates.  We | ||||
| 	start by generating a map from predicates and parameters in | ||||
| 	the old problem to the new.  | ||||
| 
 | ||||
| 	HACK: mapping is done by cheesy name comparison. | ||||
|     */ | ||||
|      | ||||
|     class HistoryProposer : public Proposer | ||||
|     { | ||||
|       Duality *old_solver; | ||||
|       Duality *new_solver; | ||||
|       hash_map<Node *, std::vector<RPFP::Transformer> > conjectures; | ||||
| 
 | ||||
|     public: | ||||
|       /** Construct a history solver. */ | ||||
|       HistoryProposer(Duality *_old_solver, Duality *_new_solver) | ||||
| 	: old_solver(_old_solver), new_solver(_new_solver) { | ||||
| 
 | ||||
| 	// tricky: names in the axioms may have changed -- map them
 | ||||
| 	hash_set<func_decl> &old_constants = old_solver->unwinding->ls->get_constants(); | ||||
| 	hash_set<func_decl> &new_constants = new_solver->rpfp->ls->get_constants(); | ||||
| 	hash_map<std::string,func_decl> cmap; | ||||
|         for(hash_set<func_decl>::iterator it = new_constants.begin(), en = new_constants.end(); it != en; ++it) | ||||
| 	  cmap[GetKey(*it)] = *it; | ||||
| 	hash_map<func_decl,func_decl> bckg_map; | ||||
|         for(hash_set<func_decl>::iterator it = old_constants.begin(), en = old_constants.end(); it != en; ++it){ | ||||
| 	  func_decl f = new_solver->ctx.translate(*it); // move to new context
 | ||||
| 	  if(cmap.find(GetKey(f)) != cmap.end()) | ||||
| 	    bckg_map[f] = cmap[GetKey(f)]; | ||||
| 	  // else
 | ||||
| 	  //  std::cout << "constant not matched\n";
 | ||||
| 	} | ||||
| 	 | ||||
| 	RPFP *old_unwinding = old_solver->unwinding; | ||||
| 	hash_map<std::string, std::vector<Node *> > pred_match; | ||||
| 
 | ||||
| 	// index all the predicates in the old unwinding
 | ||||
| 	for(unsigned i = 0; i < old_unwinding->nodes.size(); i++){ | ||||
| 	  Node *node = old_unwinding->nodes[i]; | ||||
| 	  std::string key = GetKey(node); | ||||
| 	  pred_match[key].push_back(node); | ||||
| 	} | ||||
| 
 | ||||
| 	// match with predicates in the new RPFP
 | ||||
| 	RPFP *rpfp = new_solver->rpfp; | ||||
| 	for(unsigned i = 0; i < rpfp->nodes.size(); i++){ | ||||
| 	  Node *node = rpfp->nodes[i]; | ||||
| 	  std::string key = GetKey(node); | ||||
| 	  std::vector<Node *> &matches = pred_match[key]; | ||||
| 	  for(unsigned j = 0; j < matches.size(); j++) | ||||
| 	    MatchNodes(node,matches[j],bckg_map); | ||||
| 	} | ||||
|       } | ||||
|        | ||||
|       virtual std::vector<RPFP::Transformer> &Propose(Node *node){ | ||||
| 	return conjectures[node->map]; | ||||
|       } | ||||
| 
 | ||||
|       virtual ~HistoryProposer(){ | ||||
|       }; | ||||
| 
 | ||||
|     private: | ||||
|       void MatchNodes(Node *new_node, Node *old_node, hash_map<func_decl,func_decl> &bckg_map){ | ||||
| 	if(old_node->Annotation.IsFull()) | ||||
| 	  return; // don't conjecture true!
 | ||||
| 	hash_map<std::string, expr> var_match; | ||||
| 	std::vector<expr> &new_params = new_node->Annotation.IndParams; | ||||
| 	// Index the new parameters by their keys
 | ||||
| 	for(unsigned i = 0; i < new_params.size(); i++) | ||||
| 	  var_match[GetKey(new_params[i])] = new_params[i]; | ||||
| 	RPFP::Transformer &old = old_node->Annotation; | ||||
| 	std::vector<expr> from_params = old.IndParams; | ||||
| 	for(unsigned j = 0; j < from_params.size(); j++) | ||||
| 	  from_params[j] = new_solver->ctx.translate(from_params[j]); // get in new context
 | ||||
| 	std::vector<expr> to_params = from_params; | ||||
| 	for(unsigned j = 0; j < to_params.size(); j++){ | ||||
| 	  std::string key = GetKey(to_params[j]); | ||||
| 	  if(var_match.find(key) == var_match.end()){ | ||||
| 	    // std::cout << "unmatched parameter!\n";
 | ||||
| 	    return; | ||||
| 	  } | ||||
| 	  to_params[j] = var_match[key]; | ||||
| 	} | ||||
| 	expr fmla = new_solver->ctx.translate(old.Formula); // get in new context
 | ||||
| 	fmla = new_solver->rpfp->SubstParams(old.IndParams,to_params,fmla); // substitute parameters
 | ||||
| 	hash_map<ast,expr> memo; | ||||
| 	fmla = new_solver->rpfp->SubstRec(memo,bckg_map,fmla); // substitute background constants
 | ||||
| 	RPFP::Transformer new_annot = new_node->Annotation; | ||||
| 	new_annot.Formula = fmla; | ||||
| 	conjectures[new_node].push_back(new_annot); | ||||
|       } | ||||
|        | ||||
|       // We match names by removing suffixes beginning with double at sign
 | ||||
| 
 | ||||
|       std::string GetKey(Node *node){ | ||||
| 	return GetKey(node->Name); | ||||
|       } | ||||
| 
 | ||||
|       std::string GetKey(const expr &var){ | ||||
| 	return GetKey(var.decl()); | ||||
|       } | ||||
| 
 | ||||
|       std::string GetKey(const func_decl &f){ | ||||
| 	std::string name = f.name().str(); | ||||
| 	int idx = name.find("@@"); | ||||
| 	if(idx >= 0) | ||||
| 	  name.erase(idx); | ||||
| 	return name; | ||||
|       } | ||||
|     }; | ||||
|   }; | ||||
| 
 | ||||
| 
 | ||||
|  | @ -2723,8 +3048,9 @@ namespace Duality { | |||
|     std::ostream &s; | ||||
|   public: | ||||
|     StreamReporter(RPFP *_rpfp, std::ostream &_s) | ||||
|       : Reporter(_rpfp), s(_s) {event = 0;} | ||||
|       : Reporter(_rpfp), s(_s) {event = 0; depth = -1;} | ||||
|     int event; | ||||
|     int depth; | ||||
|     void ev(){ | ||||
|       s << "[" << event++ << "]" ; | ||||
|     } | ||||
|  | @ -2735,23 +3061,30 @@ namespace Duality { | |||
| 	s << " " << rps[i]->number; | ||||
|       s << std::endl; | ||||
|     } | ||||
|     virtual void Update(RPFP::Node *node, const RPFP::Transformer &update){ | ||||
|     virtual void Update(RPFP::Node *node, const RPFP::Transformer &update, bool eager){ | ||||
|       ev(); s << "update " << node->number << " " << node->Name.name()  << ": "; | ||||
|       rpfp->Summarize(update.Formula); | ||||
|       std::cout << std::endl; | ||||
|       if(depth > 0) s << " (depth=" << depth << ")"; | ||||
|       if(eager) s << " (eager)"; | ||||
|       s << std::endl; | ||||
|     } | ||||
|     virtual void Bound(RPFP::Node *node){ | ||||
|       ev(); s << "check " << node->number << std::endl; | ||||
|     } | ||||
|     virtual void Expand(RPFP::Edge *edge){ | ||||
|       RPFP::Node *node = edge->Parent; | ||||
|       ev(); s << "expand " << node->map->number << " " << node->Name.name() << std::endl; | ||||
|       ev(); s << "expand " << node->map->number << " " << node->Name.name(); | ||||
|       if(depth > 0) s << " (depth=" << depth << ")"; | ||||
|       s << std::endl; | ||||
|     } | ||||
|     virtual void Depth(int d){ | ||||
|       depth = d; | ||||
|     } | ||||
|     virtual void AddCover(RPFP::Node *covered, std::vector<RPFP::Node *> &covering){ | ||||
|       ev(); s << "cover " << covered->Name.name() << ": " << covered->number << " by "; | ||||
|       for(unsigned i = 0; i < covering.size(); i++) | ||||
| 	std::cout << covering[i]->number << " "; | ||||
|       std::cout << std::endl; | ||||
| 	s << covering[i]->number << " "; | ||||
|       s << std::endl; | ||||
|     } | ||||
|     virtual void RemoveCover(RPFP::Node *covered, RPFP::Node *covering){ | ||||
|       ev(); s << "uncover " << covered->Name.name() << ": " << covered->number << " by " << covering->number << std::endl; | ||||
|  | @ -2762,7 +3095,7 @@ namespace Duality { | |||
|     virtual void Conjecture(RPFP::Node *node, const RPFP::Transformer &t){ | ||||
|       ev(); s << "conjecture " << node->number << " " << node->Name.name() << ": "; | ||||
|       rpfp->Summarize(t.Formula); | ||||
|       std::cout << std::endl; | ||||
|       s << std::endl; | ||||
|     } | ||||
|     virtual void Dominates(RPFP::Node *node, RPFP::Node *other){ | ||||
|       ev(); s << "dominates " << node->Name.name() << ": " << node->number << " > " << other->number << std::endl; | ||||
|  |  | |||
|  | @ -18,7 +18,7 @@ Revision History: | |||
| 
 | ||||
| --*/ | ||||
| 
 | ||||
| #ifdef WIN32 | ||||
| #ifdef _WINDOWS | ||||
| #pragma warning(disable:4996) | ||||
| #pragma warning(disable:4800) | ||||
| #pragma warning(disable:4267) | ||||
|  | @ -37,16 +37,18 @@ Revision History: | |||
| 
 | ||||
| namespace Duality { | ||||
| 
 | ||||
|   solver::solver(Duality::context& c, bool extensional, bool models) : object(c), the_model(c) { | ||||
|   solver::solver(Duality::context& c, bool _extensional, bool models) : object(c), the_model(c) { | ||||
|     params_ref p; | ||||
|     p.set_bool("proof", true); // this is currently useless
 | ||||
|     if(models) | ||||
|       p.set_bool("model", true);  | ||||
|     p.set_bool("unsat_core", true);  | ||||
|     p.set_bool("mbqi",true); | ||||
|     bool mbqi = c.get_config().get().get_bool("mbqi",true); | ||||
|     p.set_bool("mbqi",mbqi); // just to test
 | ||||
|     p.set_str("mbqi.id","itp"); // use mbqi for quantifiers in interpolants
 | ||||
|     p.set_uint("mbqi.max_iterations",1); // use mbqi for quantifiers in interpolants
 | ||||
|     if(true || extensional) | ||||
|     extensional = mbqi && (true || _extensional); | ||||
|     if(extensional) | ||||
|       p.set_bool("array.extensional",true); | ||||
|     scoped_ptr<solver_factory> sf = mk_smt_solver_factory(); | ||||
|     m_solver = (*sf)(m(), p, true, true, true, ::symbol::null); | ||||
|  | @ -656,6 +658,18 @@ expr context::make_quant(decl_kind op, const std::vector<sort> &_sorts, const st | |||
|     pp.display_smt2(std::cout, m_solver->get_assertion(n-1)); | ||||
|   } | ||||
| 
 | ||||
|   void solver::print(const char *filename) { | ||||
|     std::ofstream f(filename); | ||||
|     unsigned n = m_solver->get_num_assertions(); | ||||
|     if(!n) | ||||
|       return; | ||||
|     ast_smt_pp pp(m()); | ||||
|     for (unsigned i = 0; i < n-1; ++i) | ||||
|       pp.add_assumption(m_solver->get_assertion(i)); | ||||
|     pp.display_smt2(f, m_solver->get_assertion(n-1)); | ||||
|   } | ||||
| 
 | ||||
| 
 | ||||
|   void solver::show_assertion_ids() { | ||||
| #if 0 | ||||
|     unsigned n = m_solver->get_num_assertions(); | ||||
|  |  | |||
|  | @ -182,6 +182,7 @@ namespace Duality { | |||
|       void set(char const * param, char const * value) { m_config.set(param,value); } | ||||
|       void set(char const * param, bool value) { m_config.set(param,value); } | ||||
|       void set(char const * param, int value) { m_config.set(param,value); } | ||||
|       config &get_config() {return m_config;} | ||||
| 
 | ||||
|       symbol str_symbol(char const * s); | ||||
|       symbol int_symbol(int n); | ||||
|  | @ -243,6 +244,9 @@ namespace Duality { | |||
| 
 | ||||
|       sort_kind get_sort_kind(const sort &s); | ||||
| 
 | ||||
|       expr translate(const expr &e); | ||||
|       func_decl translate(const func_decl &); | ||||
| 
 | ||||
|       void print_expr(std::ostream &s, const ast &e); | ||||
| 
 | ||||
|       fixedpoint mk_fixedpoint(); | ||||
|  | @ -818,6 +822,7 @@ namespace Duality { | |||
|         model the_model; | ||||
| 	bool canceled; | ||||
| 	proof_gen_mode m_mode; | ||||
| 	bool extensional; | ||||
|     public: | ||||
|         solver(context & c, bool extensional = false, bool models = true); | ||||
|         solver(context & c, ::solver *s):object(c),the_model(c) { m_solver = s; canceled = false;} | ||||
|  | @ -921,6 +926,7 @@ namespace Duality { | |||
| 	unsigned get_scope_level(){ scoped_proof_mode spm(m(),m_mode); return m_solver->get_scope_level();} | ||||
| 
 | ||||
| 	void show(); | ||||
| 	void print(const char *filename); | ||||
| 	void show_assertion_ids(); | ||||
| 
 | ||||
| 	proof get_proof(){ | ||||
|  | @ -928,6 +934,7 @@ namespace Duality { | |||
| 	  return proof(ctx(),m_solver->get_proof()); | ||||
| 	} | ||||
| 
 | ||||
| 	bool extensional_array_theory() {return extensional;} | ||||
|     }; | ||||
| 
 | ||||
| #if 0 | ||||
|  | @ -1370,6 +1377,20 @@ namespace Duality { | |||
|       return to_expr(a.raw()); | ||||
|     } | ||||
| 
 | ||||
|     inline expr context::translate(const expr &e) { | ||||
|       ::expr *f = to_expr(e.raw()); | ||||
|       if(&e.ctx().m() != &m()) // same ast manager -> no translation
 | ||||
| 	throw "ast manager mismatch"; | ||||
|       return cook(f); | ||||
|     } | ||||
| 
 | ||||
|     inline func_decl context::translate(const func_decl &e) { | ||||
|       ::func_decl *f = to_func_decl(e.raw()); | ||||
|       if(&e.ctx().m() != &m()) // same ast manager -> no translation
 | ||||
| 	throw "ast manager mismatch"; | ||||
|       return func_decl(*this,f); | ||||
|     } | ||||
| 
 | ||||
|     typedef double clock_t; | ||||
|     clock_t current_time(); | ||||
|     inline void output_time(std::ostream &os, clock_t time){os << time;} | ||||
|  | @ -1401,14 +1422,6 @@ namespace hash_space { | |||
|   }; | ||||
| } | ||||
| 
 | ||||
| // to make Duality::ast hashable in windows
 | ||||
| #ifdef _WINDOWS  | ||||
| template <> inline | ||||
| size_t stdext::hash_value<Duality::ast >(const Duality::ast& s) | ||||
| {	 | ||||
|   return s.raw()->get_id(); | ||||
| } | ||||
| #endif | ||||
| 
 | ||||
| // to make Duality::ast usable in ordered collections
 | ||||
| namespace std { | ||||
|  | @ -1445,14 +1458,6 @@ namespace hash_space { | |||
|   }; | ||||
| } | ||||
| 
 | ||||
| // to make Duality::func_decl hashable in windows
 | ||||
| #ifdef _WINDOWS  | ||||
| template <> inline | ||||
| size_t stdext::hash_value<Duality::func_decl >(const Duality::func_decl& s) | ||||
| {	 | ||||
|   return s.raw()->get_id(); | ||||
| } | ||||
| #endif | ||||
| 
 | ||||
| // to make Duality::func_decl usable in ordered collections
 | ||||
| namespace std { | ||||
|  |  | |||
|  | @ -23,7 +23,7 @@ Revision History: | |||
| #include <vector> | ||||
| #include <string> | ||||
| 
 | ||||
| #ifdef WIN32 | ||||
| #ifdef _WINDOWS | ||||
| #define FOCI2_EXPORT __declspec(dllexport) | ||||
| #else | ||||
| #define FOCI2_EXPORT __attribute__ ((visibility ("default"))) | ||||
|  |  | |||
|  | @ -18,7 +18,7 @@ Revision History: | |||
| 
 | ||||
| --*/ | ||||
| 
 | ||||
| #ifdef WIN32 | ||||
| #ifdef _WINDOWS | ||||
| #pragma warning(disable:4996) | ||||
| #pragma warning(disable:4800) | ||||
| #pragma warning(disable:4267) | ||||
|  | @ -34,9 +34,7 @@ Revision History: | |||
| #include "../smt/smt_solver.h" | ||||
| 
 | ||||
| 
 | ||||
| #ifndef WIN32 | ||||
| using namespace stl_ext; | ||||
| #endif | ||||
| 
 | ||||
| 
 | ||||
| iz3base::range &iz3base::ast_range(ast t){ | ||||
|  |  | |||
|  | @ -17,7 +17,7 @@ Revision History: | |||
| 
 | ||||
| --*/ | ||||
| 
 | ||||
| #ifdef WIN32 | ||||
| #ifdef _WINDOWS | ||||
| #pragma warning(disable:4996) | ||||
| #pragma warning(disable:4800) | ||||
| #pragma warning(disable:4267) | ||||
|  | @ -36,9 +36,7 @@ Revision History: | |||
| #include <iterator> | ||||
| 
 | ||||
| 
 | ||||
| #ifndef WIN32 | ||||
| using namespace stl_ext; | ||||
| #endif | ||||
| 
 | ||||
| struct iz3checker : iz3base { | ||||
|    | ||||
|  |  | |||
|  | @ -25,9 +25,7 @@ Revision History: | |||
| #include "foci2.h" | ||||
| #include "iz3foci.h" | ||||
| 
 | ||||
| #ifndef WIN32 | ||||
| using namespace stl_ext; | ||||
| #endif | ||||
| 
 | ||||
| class iz3foci_impl : public iz3secondary { | ||||
| 
 | ||||
|  |  | |||
							
								
								
									
										545
									
								
								src/interp/iz3hash.h
									
										
									
									
									
										
										
										Executable file → Normal file
									
								
							
							
						
						
									
										545
									
								
								src/interp/iz3hash.h
									
										
									
									
									
										
										
										Executable file → Normal file
									
								
							|  | @ -7,7 +7,16 @@ Module Name: | |||
| 
 | ||||
| Abstract: | ||||
| 
 | ||||
|    Wrapper for stl hash tables | ||||
|    Simple implementation of bucket-list hash tables conforming to SGI | ||||
|    hash_map and hash_set interfaces. Just enough members are | ||||
|    implemented to support iz3 and duality. | ||||
| 
 | ||||
|    iz3 and duality need this package because they assume that insert | ||||
|    preserves iterators and references to elements, which is not true | ||||
|    of the hashtable packages in util. | ||||
| 
 | ||||
|    This package lives in namespace hash_space. Specializations of | ||||
|    class "hash" should be made in this namespace. | ||||
| 
 | ||||
| Author: | ||||
| 
 | ||||
|  | @ -17,66 +26,36 @@ Revision History: | |||
| 
 | ||||
| --*/ | ||||
| 
 | ||||
| // pull in the headers for has_map and hash_set
 | ||||
| // these live in non-standard places
 | ||||
| 
 | ||||
| #ifndef IZ3_HASH_H | ||||
| #define IZ3_HASH_H | ||||
| 
 | ||||
| //#define USE_UNORDERED_MAP
 | ||||
| #ifdef USE_UNORDERED_MAP | ||||
| 
 | ||||
| #define stl_ext std | ||||
| #define hash_space std | ||||
| #include <unordered_map> | ||||
| #include <unordered_set> | ||||
| #define hash_map unordered_map | ||||
| #define hash_set unordered_set | ||||
| 
 | ||||
| #else | ||||
| 
 | ||||
| #if __GNUC__ >= 3 | ||||
| #undef __DEPRECATED | ||||
| #define stl_ext __gnu_cxx | ||||
| #define hash_space stl_ext | ||||
| #include <ext/hash_map> | ||||
| #include <ext/hash_set> | ||||
| #else | ||||
| #ifdef _WINDOWS | ||||
| #define stl_ext stdext | ||||
| #define hash_space std | ||||
| #include <hash_map> | ||||
| #include <hash_set> | ||||
| #else | ||||
| #define stl_ext std | ||||
| #define hash_space std | ||||
| #include <hash_map> | ||||
| #include <hash_set> | ||||
| #endif | ||||
| #endif | ||||
| 
 | ||||
| #endif | ||||
| 
 | ||||
| #include <string> | ||||
| #include <vector> | ||||
| #include <iterator> | ||||
| #include "hash.h" | ||||
| 
 | ||||
| // stupid STL doesn't include hash function for class string
 | ||||
| 
 | ||||
| #ifndef _WINDOWS  | ||||
| 
 | ||||
| namespace stl_ext { | ||||
|   template <> | ||||
|     class hash<std::string> { | ||||
|     stl_ext::hash<const char *> H; | ||||
|   public: | ||||
|     size_t operator()(const std::string &s) const { | ||||
|       return H(s.c_str()); | ||||
|     } | ||||
|   }; | ||||
| } | ||||
| 
 | ||||
| #endif | ||||
| #define stl_ext hash_space | ||||
| 
 | ||||
| namespace hash_space { | ||||
| 
 | ||||
|   template <typename T> class hash {}; | ||||
| 
 | ||||
|   template <> | ||||
|     class hash<int> { | ||||
|   public: | ||||
|     size_t operator()(const int &s) const { | ||||
|       return s; | ||||
|     } | ||||
|   }; | ||||
| 
 | ||||
|   template <> | ||||
|     class hash<std::string> { | ||||
|   public: | ||||
|     size_t operator()(const std::string &s) const { | ||||
|       return string_hash(s.c_str(), s.size(), 0); | ||||
|     } | ||||
|   }; | ||||
| 
 | ||||
|   template <> | ||||
|     class hash<std::pair<int,int> > { | ||||
|   public: | ||||
|  | @ -84,17 +63,7 @@ namespace hash_space { | |||
|       return p.first + p.second; | ||||
|     } | ||||
|   }; | ||||
| } | ||||
| 
 | ||||
| #ifdef _WINDOWS  | ||||
| template <> inline | ||||
| size_t stdext::hash_value<std::pair<int,int> >(const std::pair<int,int>& p) | ||||
| {	// hash _Keyval to size_t value one-to-one
 | ||||
| 	return p.first + p.second; | ||||
| } | ||||
| #endif | ||||
| 
 | ||||
| namespace hash_space { | ||||
|   template <class T> | ||||
|     class hash<std::pair<T *, T *> > { | ||||
|   public: | ||||
|  | @ -102,70 +71,402 @@ namespace hash_space { | |||
|       return (size_t)p.first + (size_t)p.second; | ||||
|     } | ||||
|   }; | ||||
| } | ||||
| 
 | ||||
| #if 0 | ||||
| template <class T> inline | ||||
| size_t stdext::hash_value<std::pair<T *, T *> >(const std::pair<T *, T *>& p) | ||||
| {	// hash _Keyval to size_t value one-to-one
 | ||||
|   return (size_t)p.first + (size_t)p.second; | ||||
| } | ||||
| #endif | ||||
|   enum { num_primes = 29 }; | ||||
| 
 | ||||
| #ifdef _WINDOWS | ||||
|   static const unsigned long primes[num_primes] = | ||||
|     { | ||||
|       7ul, | ||||
|       53ul, | ||||
|       97ul, | ||||
|       193ul, | ||||
|       389ul, | ||||
|       769ul, | ||||
|       1543ul, | ||||
|       3079ul, | ||||
|       6151ul, | ||||
|       12289ul, | ||||
|       24593ul, | ||||
|       49157ul, | ||||
|       98317ul, | ||||
|       196613ul, | ||||
|       393241ul, | ||||
|       786433ul, | ||||
|       1572869ul, | ||||
|       3145739ul, | ||||
|       6291469ul, | ||||
|       12582917ul, | ||||
|       25165843ul, | ||||
|       50331653ul, | ||||
|       100663319ul, | ||||
|       201326611ul, | ||||
|       402653189ul, | ||||
|       805306457ul, | ||||
|       1610612741ul, | ||||
|       3221225473ul, | ||||
|       4294967291ul | ||||
|     }; | ||||
| 
 | ||||
| namespace std { | ||||
|     template <> | ||||
| 	   class less<std::pair<int,int> > { | ||||
| 	   public: | ||||
| 		   bool operator()(const pair<int,int> &x, const pair<int,int> &y) const { | ||||
| 		      return x.first < y.first || x.first == y.first && x.second < y.second; | ||||
| 		   } | ||||
| 	   }; | ||||
| 	 | ||||
| } | ||||
|    | ||||
| namespace std { | ||||
|     template <class T> | ||||
| 	   class less<std::pair<T *,T *> > { | ||||
| 	   public: | ||||
| 		   bool operator()(const pair<T *,T *> &x, const pair<T *,T *> &y) const { | ||||
| 		     return (size_t)x.first < (size_t)y.first || (size_t)x.first == (size_t)y.first && (size_t)x.second < (size_t)y.second; | ||||
| 		   } | ||||
| 	   }; | ||||
| 	 | ||||
| } | ||||
|   inline unsigned long next_prime(unsigned long n) { | ||||
|     const unsigned long* to = primes + (int)num_primes; | ||||
|     for(const unsigned long* p = primes; p < to; p++) | ||||
|       if(*p >= n) return *p; | ||||
|     return primes[num_primes-1]; | ||||
|   } | ||||
| 
 | ||||
| #endif | ||||
| 
 | ||||
| 
 | ||||
| #ifndef _WINDOWS | ||||
| 
 | ||||
| #if 0 | ||||
| namespace stl_ext { | ||||
|   template <class T> | ||||
|     class hash<T *> { | ||||
|   template<class Value, class Key, class HashFun, class GetKey, class KeyEqFun> | ||||
|     class hashtable | ||||
|   { | ||||
|   public: | ||||
|     size_t operator()(const T *p) const { | ||||
|       return (size_t) p; | ||||
| 
 | ||||
|     typedef Value &reference; | ||||
|     typedef const Value &const_reference; | ||||
|      | ||||
|     struct Entry | ||||
|     { | ||||
|       Entry* next; | ||||
|       Value val; | ||||
|        | ||||
|       Entry(const Value &_val) : val(_val) {next = 0;} | ||||
|     }; | ||||
|      | ||||
| 
 | ||||
|     struct iterator | ||||
|     {       | ||||
|       Entry* ent; | ||||
|       hashtable* tab; | ||||
| 
 | ||||
|       typedef std::forward_iterator_tag iterator_category; | ||||
|       typedef Value value_type; | ||||
|       typedef std::ptrdiff_t difference_type; | ||||
|       typedef size_t size_type; | ||||
|       typedef Value& reference; | ||||
|       typedef Value* pointer; | ||||
| 
 | ||||
|       iterator(Entry* _ent, hashtable* _tab) : ent(_ent), tab(_tab) { } | ||||
| 
 | ||||
|       iterator() { } | ||||
| 
 | ||||
|       Value &operator*() const { return ent->val; } | ||||
| 
 | ||||
|       Value *operator->() const { return &(operator*()); } | ||||
| 
 | ||||
|       iterator &operator++() { | ||||
| 	Entry *old = ent; | ||||
| 	ent = ent->next; | ||||
| 	if (!ent) { | ||||
| 	  size_t bucket = tab->get_bucket(old->val); | ||||
| 	  while (!ent && ++bucket < tab->buckets.size()) | ||||
| 	    ent = tab->buckets[bucket]; | ||||
| 	} | ||||
| 	return *this; | ||||
|       } | ||||
| 
 | ||||
|       iterator operator++(int) { | ||||
| 	iterator tmp = *this; | ||||
| 	operator++(); | ||||
| 	return tmp; | ||||
|       } | ||||
| 
 | ||||
| 
 | ||||
|       bool operator==(const iterator& it) const {  | ||||
| 	return ent == it.ent; | ||||
|       } | ||||
| 
 | ||||
|       bool operator!=(const iterator& it) const { | ||||
| 	return ent != it.ent; | ||||
|       } | ||||
|     }; | ||||
| 
 | ||||
|     struct const_iterator | ||||
|     {       | ||||
|       const Entry* ent; | ||||
|       const hashtable* tab; | ||||
| 
 | ||||
|       typedef std::forward_iterator_tag iterator_category; | ||||
|       typedef Value value_type; | ||||
|       typedef std::ptrdiff_t difference_type; | ||||
|       typedef size_t size_type; | ||||
|       typedef const Value& reference; | ||||
|       typedef const Value* pointer; | ||||
| 
 | ||||
|       const_iterator(const Entry* _ent, const hashtable* _tab) : ent(_ent), tab(_tab) { } | ||||
| 
 | ||||
|       const_iterator() { } | ||||
| 
 | ||||
|       const Value &operator*() const { return ent->val; } | ||||
| 
 | ||||
|       const Value *operator->() const { return &(operator*()); } | ||||
| 
 | ||||
|       const_iterator &operator++() { | ||||
| 	Entry *old = ent; | ||||
| 	ent = ent->next; | ||||
| 	if (!ent) { | ||||
| 	  size_t bucket = tab->get_bucket(old->val); | ||||
| 	  while (!ent && ++bucket < tab->buckets.size()) | ||||
| 	    ent = tab->buckets[bucket]; | ||||
| 	} | ||||
| 	return *this; | ||||
|       } | ||||
| 
 | ||||
|       const_iterator operator++(int) { | ||||
| 	const_iterator tmp = *this; | ||||
| 	operator++(); | ||||
| 	return tmp; | ||||
|       } | ||||
| 
 | ||||
| 
 | ||||
|       bool operator==(const const_iterator& it) const {  | ||||
| 	return ent == it.ent; | ||||
|       } | ||||
| 
 | ||||
|       bool operator!=(const const_iterator& it) const { | ||||
| 	return ent != it.ent; | ||||
|       } | ||||
|     }; | ||||
| 
 | ||||
|     private: | ||||
| 
 | ||||
|     typedef std::vector<Entry*> Table; | ||||
| 
 | ||||
|     Table buckets; | ||||
|     size_t entries; | ||||
|     HashFun hash_fun ; | ||||
|     GetKey get_key; | ||||
|     KeyEqFun key_eq_fun; | ||||
|      | ||||
|     public: | ||||
| 
 | ||||
|     hashtable(size_t init_size) : buckets(init_size,(Entry *)0) { | ||||
|       entries = 0; | ||||
|     } | ||||
|      | ||||
|     hashtable(const hashtable& other) { | ||||
|       dup(other); | ||||
|     } | ||||
| 
 | ||||
|     hashtable& operator= (const hashtable& other) { | ||||
|       if (&other != this) | ||||
| 	dup(other); | ||||
|       return *this; | ||||
|     } | ||||
| 
 | ||||
|     ~hashtable() { | ||||
|       clear(); | ||||
|     } | ||||
| 
 | ||||
|     size_t size() const {  | ||||
|       return entries; | ||||
|     } | ||||
| 
 | ||||
|     bool empty() const {  | ||||
|       return size() == 0; | ||||
|     } | ||||
| 
 | ||||
|     void swap(hashtable& other) { | ||||
|       buckets.swap(other.buckets); | ||||
|       std::swap(entries, other.entries); | ||||
|     } | ||||
|      | ||||
|     iterator begin() { | ||||
|       for (size_t i = 0; i < buckets.size(); ++i) | ||||
| 	if (buckets[i]) | ||||
| 	  return iterator(buckets[i], this); | ||||
|       return end(); | ||||
|     } | ||||
|      | ||||
|     iterator end() {  | ||||
|       return iterator(0, this); | ||||
|     } | ||||
| 
 | ||||
|     const_iterator begin() const { | ||||
|       for (size_t i = 0; i < buckets.size(); ++i) | ||||
| 	if (buckets[i]) | ||||
| 	  return const_iterator(buckets[i], this); | ||||
|       return end(); | ||||
|     } | ||||
|      | ||||
|     const_iterator end() const {  | ||||
|       return const_iterator(0, this); | ||||
|     } | ||||
|      | ||||
|     size_t get_bucket(const Value& val, size_t n) const { | ||||
|       return hash_fun(get_key(val)) % n; | ||||
|     } | ||||
|      | ||||
|     size_t get_key_bucket(const Key& key) const { | ||||
|       return hash_fun(key) % buckets.size(); | ||||
|     } | ||||
| 
 | ||||
|     size_t get_bucket(const Value& val) const { | ||||
|       return get_bucket(val,buckets.size()); | ||||
|     } | ||||
| 
 | ||||
|     Entry *lookup(const Value& val, bool ins = false) | ||||
|     { | ||||
|       resize(entries + 1); | ||||
| 
 | ||||
|       size_t n = get_bucket(val); | ||||
|       Entry* from = buckets[n]; | ||||
|        | ||||
|       for (Entry* ent = from; ent; ent = ent->next) | ||||
| 	if (key_eq_fun(get_key(ent->val), get_key(val))) | ||||
| 	  return ent; | ||||
|        | ||||
|       if(!ins) return 0; | ||||
| 
 | ||||
|       Entry* tmp = new Entry(val); | ||||
|       tmp->next = from; | ||||
|       buckets[n] = tmp; | ||||
|       ++entries; | ||||
|       return tmp; | ||||
|     } | ||||
| 
 | ||||
|     Entry *lookup_key(const Key& key) const | ||||
|     { | ||||
|       size_t n = get_key_bucket(key); | ||||
|       Entry* from = buckets[n]; | ||||
|        | ||||
|       for (Entry* ent = from; ent; ent = ent->next) | ||||
| 	if (key_eq_fun(get_key(ent->val), key)) | ||||
| 	  return ent; | ||||
|        | ||||
|       return 0; | ||||
|     } | ||||
| 
 | ||||
|     const_iterator find(const Key& key) const { | ||||
|       return const_iterator(lookup_key(key),this); | ||||
|     } | ||||
| 
 | ||||
|     iterator find(const Key& key) { | ||||
|       return iterator(lookup_key(key),this); | ||||
|     } | ||||
| 
 | ||||
|     std::pair<iterator,bool> insert(const Value& val){ | ||||
|       size_t old_entries = entries; | ||||
|       Entry *ent = lookup(val,true); | ||||
|       return std::pair<iterator,bool>(iterator(ent,this),entries > old_entries); | ||||
|     } | ||||
|      | ||||
|     iterator insert(const iterator &it, const Value& val){ | ||||
|       Entry *ent = lookup(val,true); | ||||
|       return iterator(ent,this); | ||||
|     } | ||||
| 
 | ||||
|     size_t erase(const Key& key) | ||||
|     { | ||||
|       Entry** p = &(buckets[get_key_bucket(key)]); | ||||
|       size_t count = 0; | ||||
|       while(*p){ | ||||
| 	Entry *q = *p; | ||||
| 	if (key_eq_fun(get_key(q->val), key)) { | ||||
| 	  ++count; | ||||
| 	  *p = q->next; | ||||
| 	  delete q; | ||||
| 	} | ||||
| 	else | ||||
| 	  p = &(q->next); | ||||
|       } | ||||
|       entries -= count; | ||||
|       return count; | ||||
|     } | ||||
| 
 | ||||
|     void resize(size_t new_size) { | ||||
|       const size_t old_n = buckets.size(); | ||||
|       if (new_size <= old_n) return; | ||||
|       const size_t n = next_prime(new_size); | ||||
|       if (n <= old_n) return; | ||||
|       Table tmp(n, (Entry*)(0)); | ||||
|       for (size_t i = 0; i < old_n; ++i) { | ||||
| 	Entry* ent = buckets[i]; | ||||
| 	while (ent) { | ||||
| 	  size_t new_bucket = get_bucket(ent->val, n); | ||||
| 	  buckets[i] = ent->next; | ||||
| 	  ent->next = tmp[new_bucket]; | ||||
| 	  tmp[new_bucket] = ent; | ||||
| 	  ent = buckets[i]; | ||||
| 	} | ||||
|       } | ||||
|       buckets.swap(tmp); | ||||
|     } | ||||
|      | ||||
|     void clear() | ||||
|     { | ||||
|       for (size_t i = 0; i < buckets.size(); ++i) { | ||||
| 	for (Entry* ent = buckets[i]; ent != 0;) { | ||||
| 	  Entry* next = ent->next; | ||||
| 	  delete ent; | ||||
| 	  ent = next; | ||||
| 	} | ||||
| 	buckets[i] = 0; | ||||
|       } | ||||
|       entries = 0; | ||||
|     } | ||||
| 
 | ||||
|     void dup(const hashtable& other) | ||||
|     { | ||||
|       buckets.resize(other.buckets.size()); | ||||
|       for (size_t i = 0; i < other.buckets.size(); ++i) { | ||||
| 	Entry** to = &buckets[i]; | ||||
| 	for (Entry* from = other.buckets[i]; from; from = from->next) | ||||
| 	  to = &((*to = new Entry(from->val))->next); | ||||
|       } | ||||
|       entries = other.entries; | ||||
|     } | ||||
|   }; | ||||
| 
 | ||||
|   template <typename T>  | ||||
|     class equal { | ||||
|   public: | ||||
|     bool operator()(const T& x, const T &y) const { | ||||
|       return x == y; | ||||
|     } | ||||
|   }; | ||||
| 
 | ||||
|   template <typename T> | ||||
|     class identity { | ||||
|   public: | ||||
|     const T &operator()(const T &x) const { | ||||
|       return x; | ||||
|     } | ||||
|   }; | ||||
| 
 | ||||
|   template <typename T, typename U> | ||||
|     class proj1 { | ||||
|   public: | ||||
|     const T &operator()(const std::pair<T,U> &x) const { | ||||
|       return x.first; | ||||
|     } | ||||
|   }; | ||||
| 
 | ||||
|   template <typename Element, class HashFun = hash<Element>,  | ||||
|             class EqFun = equal<Element> > | ||||
|     class hash_set | ||||
|     : public hashtable<Element,Element,HashFun,identity<Element>,EqFun> { | ||||
| 
 | ||||
|   public: | ||||
| 
 | ||||
|   typedef Element value_type; | ||||
| 
 | ||||
|   hash_set() | ||||
|   : hashtable<Element,Element,HashFun,identity<Element>,EqFun>(7) {} | ||||
|   }; | ||||
| 
 | ||||
|   template <typename Key, typename Value, class HashFun = hash<Key>,  | ||||
|             class EqFun = equal<Key> > | ||||
|     class hash_map | ||||
|     : public hashtable<std::pair<Key,Value>,Key,HashFun,proj1<Key,Value>,EqFun> { | ||||
| 
 | ||||
|   public: | ||||
| 
 | ||||
|       hash_map() | ||||
|       : hashtable<std::pair<Key,Value>,Key,HashFun,proj1<Key,Value>,EqFun>(7) {} | ||||
| 
 | ||||
|       Value &operator[](const Key& key) { | ||||
| 	std::pair<Key,Value> kvp(key,Value()); | ||||
| 	return lookup(kvp,true)->val.second; | ||||
|       } | ||||
|   }; | ||||
| 
 | ||||
| } | ||||
| #endif | ||||
| 
 | ||||
| #endif | ||||
| 
 | ||||
| #ifdef _WINDOWS | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| template <class K, class T> | ||||
| class hash_map : public stl_ext::hash_map<K,T,stl_ext::hash_compare<K,std::less<K> > > {}; | ||||
| 
 | ||||
| template <class K> | ||||
| class hash_set : public stl_ext::hash_set<K,stl_ext::hash_compare<K,std::less<K> > > {}; | ||||
| 
 | ||||
| #endif | ||||
| 
 | ||||
| #endif | ||||
|  |  | |||
|  | @ -19,7 +19,7 @@ Revision History: | |||
| 
 | ||||
| /* Copyright 2011 Microsoft Research. */ | ||||
| 
 | ||||
| #ifdef WIN32 | ||||
| #ifdef _WINDOWS | ||||
| #pragma warning(disable:4996) | ||||
| #pragma warning(disable:4800) | ||||
| #pragma warning(disable:4267) | ||||
|  | @ -43,9 +43,7 @@ Revision History: | |||
| 
 | ||||
| 
 | ||||
| 
 | ||||
| #ifndef WIN32 | ||||
| using namespace stl_ext; | ||||
| #endif | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
|  | @ -64,7 +62,7 @@ struct frame_reducer : public iz3mgr { | |||
|     : iz3mgr(other) {} | ||||
| 
 | ||||
|   void get_proof_assumptions_rec(z3pf proof, hash_set<ast> &memo, std::vector<bool> &used_frames){ | ||||
|     if(memo.count(proof))return; | ||||
|     if(memo.find(proof) != memo.end())return; | ||||
|     memo.insert(proof); | ||||
|     pfrule dk = pr(proof); | ||||
|     if(dk == PR_ASSERTED){ | ||||
|  |  | |||
|  | @ -18,7 +18,7 @@ Revision History: | |||
| --*/ | ||||
| 
 | ||||
| 
 | ||||
| #ifdef WIN32 | ||||
| #ifdef _WINDOWS | ||||
| #pragma warning(disable:4996) | ||||
| #pragma warning(disable:4800) | ||||
| #pragma warning(disable:4267) | ||||
|  | @ -38,9 +38,7 @@ Revision History: | |||
| #include "params.h" | ||||
| 
 | ||||
| 
 | ||||
| #ifndef WIN32 | ||||
| using namespace stl_ext; | ||||
| #endif | ||||
| 
 | ||||
| 
 | ||||
| std::ostream &operator <<(std::ostream &s, const iz3mgr::ast &a){ | ||||
|  |  | |||
|  | @ -126,14 +126,6 @@ namespace hash_space { | |||
|   }; | ||||
| } | ||||
| 
 | ||||
| // to make ast_r hashable in windows
 | ||||
| #ifdef _WINDOWS  | ||||
| template <> inline | ||||
| size_t stdext::hash_value<ast_r >(const ast_r& s) | ||||
| {	 | ||||
|   return s.raw()->get_id(); | ||||
| } | ||||
| #endif | ||||
| 
 | ||||
| // to make ast_r usable in ordered collections
 | ||||
| namespace std { | ||||
|  |  | |||
|  | @ -36,11 +36,8 @@ Revision History: | |||
| #include"expr_abstract.h" | ||||
| 
 | ||||
| 
 | ||||
| #ifndef WIN32 | ||||
| using namespace stl_ext; | ||||
| #endif | ||||
| 
 | ||||
| #ifndef WIN32 | ||||
| // We promise not to use this for hash_map with range destructor
 | ||||
| namespace stl_ext { | ||||
|   template <> | ||||
|  | @ -51,7 +48,6 @@ namespace stl_ext { | |||
|     } | ||||
|   }; | ||||
| } | ||||
| #endif | ||||
| 
 | ||||
| 
 | ||||
| // TBD: algebraic data-types declarations will not be printed.
 | ||||
|  |  | |||
|  | @ -17,7 +17,7 @@ Revision History: | |||
| 
 | ||||
| --*/ | ||||
| 
 | ||||
| #ifdef WIN32 | ||||
| #ifdef _WINDOWS | ||||
| #pragma warning(disable:4996) | ||||
| #pragma warning(disable:4800) | ||||
| #pragma warning(disable:4267) | ||||
|  |  | |||
|  | @ -18,7 +18,7 @@ Revision History: | |||
| --*/ | ||||
| 
 | ||||
| 
 | ||||
| #ifdef WIN32 | ||||
| #ifdef _WINDOWS | ||||
| #pragma warning(disable:4996) | ||||
| #pragma warning(disable:4800) | ||||
| #pragma warning(disable:4267) | ||||
|  |  | |||
|  | @ -17,7 +17,7 @@ Revision History: | |||
| 
 | ||||
| --*/ | ||||
| 
 | ||||
| #ifdef WIN32 | ||||
| #ifdef _WINDOWS | ||||
| #pragma warning(disable:4996) | ||||
| #pragma warning(disable:4800) | ||||
| #pragma warning(disable:4267) | ||||
|  | @ -26,9 +26,7 @@ Revision History: | |||
| 
 | ||||
| #include "iz3proof_itp.h" | ||||
| 
 | ||||
| #ifndef WIN32 | ||||
| using namespace stl_ext; | ||||
| #endif | ||||
| 
 | ||||
| // #define INVARIANT_CHECKING
 | ||||
| 
 | ||||
|  | @ -369,11 +367,17 @@ class iz3proof_itp_impl : public iz3proof_itp { | |||
|       } | ||||
|       default: | ||||
| 	{ | ||||
| 	  symb s = sym(itp2); | ||||
| 	  if(s == sforall || s == sexists) | ||||
| 	    res = make(s,arg(itp2,0),resolve_arith_rec2(memo, pivot1, conj1, arg(itp2,1))); | ||||
| 	  else | ||||
| 	  opr o = op(itp2); | ||||
| 	  if(o == Uninterpreted){ | ||||
| 	    symb s = sym(itp2); | ||||
| 	    if(s == sforall || s == sexists) | ||||
| 	      res = make(s,arg(itp2,0),resolve_arith_rec2(memo, pivot1, conj1, arg(itp2,1))); | ||||
| 	    else | ||||
| 	      res = itp2; | ||||
| 	  } | ||||
| 	  else { | ||||
| 	    res = itp2; | ||||
| 	  } | ||||
| 	} | ||||
|       } | ||||
|     } | ||||
|  | @ -405,11 +409,17 @@ class iz3proof_itp_impl : public iz3proof_itp { | |||
|       } | ||||
|       default: | ||||
| 	{ | ||||
| 	  symb s = sym(itp1); | ||||
| 	  if(s == sforall || s == sexists) | ||||
| 	    res = make(s,arg(itp1,0),resolve_arith_rec1(memo, neg_pivot_lit, arg(itp1,1), itp2)); | ||||
| 	  else | ||||
| 	  opr o = op(itp1); | ||||
| 	  if(o == Uninterpreted){ | ||||
| 	    symb s = sym(itp1); | ||||
| 	    if(s == sforall || s == sexists) | ||||
| 	      res = make(s,arg(itp1,0),resolve_arith_rec1(memo, neg_pivot_lit, arg(itp1,1), itp2)); | ||||
| 	    else | ||||
| 	      res = itp1; | ||||
| 	  } | ||||
| 	  else { | ||||
| 	    res = itp1; | ||||
| 	  } | ||||
| 	} | ||||
|       } | ||||
|     } | ||||
|  | @ -464,18 +474,20 @@ class iz3proof_itp_impl : public iz3proof_itp { | |||
|     std::pair<hash_map<ast,ast>::iterator,bool> bar = subst_memo.insert(foo); | ||||
|     ast &res = bar.first->second; | ||||
|     if(bar.second){ | ||||
|       symb g = sym(e); | ||||
|       if(g == rotate_sum){ | ||||
| 	if(var == get_placeholder(arg(e,0))){ | ||||
| 	  res = e; | ||||
|       if(op(e) == Uninterpreted){ | ||||
| 	symb g = sym(e); | ||||
| 	if(g == rotate_sum){ | ||||
| 	  if(var == get_placeholder(arg(e,0))){ | ||||
| 	    res = e; | ||||
| 	  } | ||||
| 	  else  | ||||
| 	    res = make(rotate_sum,arg(e,0),subst_term_and_simp_rec(var,t,arg(e,1))); | ||||
| 	  return res; | ||||
| 	} | ||||
| 	if(g == concat){ | ||||
| 	  res = e; | ||||
| 	  return res; | ||||
| 	} | ||||
| 	else  | ||||
| 	  res = make(rotate_sum,arg(e,0),subst_term_and_simp_rec(var,t,arg(e,1))); | ||||
| 	return res; | ||||
|       } | ||||
|       if(g == concat){ | ||||
| 	res = e; | ||||
| 	return res; | ||||
|       } | ||||
|       int nargs = num_args(e); | ||||
|       std::vector<ast> args(nargs); | ||||
|  | @ -538,8 +550,9 @@ class iz3proof_itp_impl : public iz3proof_itp { | |||
| 	  else if(g == symm) res = simplify_symm(args); | ||||
| 	  else if(g == modpon) res = simplify_modpon(args); | ||||
| 	  else if(g == sum) res = simplify_sum(args); | ||||
| #if 0 | ||||
| 	  else if(g == exmid) res = simplify_exmid(args); | ||||
| 	  else if(g == cong) res = simplify_cong(args); | ||||
| #if 0 | ||||
| 	  else if(g == modpon) res = simplify_modpon(args); | ||||
| 	  else if(g == leq2eq) res = simplify_leq2eq(args); | ||||
| 	  else if(g == eq2leq) res = simplify_eq2leq(args); | ||||
|  | @ -680,7 +693,7 @@ class iz3proof_itp_impl : public iz3proof_itp { | |||
|     ast dummy1, dummy2; | ||||
|     sum_cond_ineq(in1,coeff2,in2,dummy1,dummy2); | ||||
|     n1 = merge_normal_chains(n1,n2, Aproves, Bproves); | ||||
|     ineq = make_normal(in1,n1); | ||||
|     ineq = is_true(n1) ? in1 : make_normal(in1,n1); | ||||
|   } | ||||
| 
 | ||||
|   bool is_ineq(const ast &ineq){ | ||||
|  | @ -729,29 +742,31 @@ class iz3proof_itp_impl : public iz3proof_itp { | |||
|       ast x = arg(equality,0); | ||||
|       ast y = arg(equality,1); | ||||
|       ast Aproves1 = mk_true(), Bproves1 = mk_true(); | ||||
|       ast xleqy = round_ineq(ineq_from_chain(arg(pf,1),Aproves1,Bproves1)); | ||||
|       ast yleqx = round_ineq(ineq_from_chain(arg(pf,2),Aproves1,Bproves1)); | ||||
|       ast pf1 = destruct_cond_ineq(arg(pf,1), Aproves1, Bproves1); | ||||
|       ast pf2 = destruct_cond_ineq(arg(pf,2), Aproves1, Bproves1); | ||||
|       ast xleqy = round_ineq(ineq_from_chain(pf1,Aproves1,Bproves1)); | ||||
|       ast yleqx = round_ineq(ineq_from_chain(pf2,Aproves1,Bproves1)); | ||||
|       ast ineq1 = make(Leq,make_int("0"),make_int("0")); | ||||
|       sum_cond_ineq(ineq1,make_int("-1"),xleqy,Aproves1,Bproves1); | ||||
|       sum_cond_ineq(ineq1,make_int("-1"),yleqx,Aproves1,Bproves1); | ||||
|       Bproves1 = my_and(Bproves1,z3_simplify(ineq1)); | ||||
|       ast Acond  = my_implies(Aproves1,my_and(Bproves1,z3_simplify(ineq1))); | ||||
|       ast Aproves2 = mk_true(), Bproves2 = mk_true(); | ||||
|       ast ineq2 = make(Leq,make_int("0"),make_int("0")); | ||||
|       sum_cond_ineq(ineq2,make_int("1"),xleqy,Aproves2,Bproves2); | ||||
|       sum_cond_ineq(ineq2,make_int("1"),yleqx,Aproves2,Bproves2); | ||||
|       Bproves2 = z3_simplify(ineq2); | ||||
|       if(!is_true(Aproves1) || !is_true(Aproves2)) | ||||
| 	throw "help!"; | ||||
|       ast Bcond = my_implies(Bproves1,my_and(Aproves1,z3_simplify(ineq2))); | ||||
|       //      if(!is_true(Aproves1) || !is_true(Bproves1))
 | ||||
|       //	std::cout << "foo!\n";;
 | ||||
|       if(get_term_type(x) == LitA){ | ||||
| 	ast iter = z3_simplify(make(Plus,x,get_ineq_rhs(xleqy))); | ||||
| 	ast rewrite1 = make_rewrite(LitA,top_pos,Bproves1,make(Equal,x,iter)); | ||||
| 	ast rewrite2 = make_rewrite(LitB,top_pos,Bproves2,make(Equal,iter,y)); | ||||
| 	ast rewrite1 = make_rewrite(LitA,top_pos,Acond,make(Equal,x,iter)); | ||||
| 	ast rewrite2 = make_rewrite(LitB,top_pos,Bcond,make(Equal,iter,y)); | ||||
| 	return chain_cons(chain_cons(mk_true(),rewrite1),rewrite2); | ||||
|       } | ||||
|       if(get_term_type(y) == LitA){ | ||||
| 	ast iter = z3_simplify(make(Plus,y,get_ineq_rhs(yleqx))); | ||||
| 	ast rewrite2 = make_rewrite(LitA,top_pos,Bproves1,make(Equal,iter,y)); | ||||
| 	ast rewrite1 = make_rewrite(LitB,top_pos,Bproves2,make(Equal,x,iter)); | ||||
| 	ast rewrite2 = make_rewrite(LitA,top_pos,Acond,make(Equal,iter,y)); | ||||
| 	ast rewrite1 = make_rewrite(LitB,top_pos,Bcond,make(Equal,x,iter)); | ||||
| 	return chain_cons(chain_cons(mk_true(),rewrite1),rewrite2); | ||||
|       } | ||||
|       throw cannot_simplify(); | ||||
|  | @ -760,6 +775,8 @@ class iz3proof_itp_impl : public iz3proof_itp { | |||
|   } | ||||
| 
 | ||||
|   ast round_ineq(const ast &ineq){ | ||||
|     if(sym(ineq) == normal) | ||||
|       return make_normal(round_ineq(arg(ineq,0)),arg(ineq,1)); | ||||
|     if(!is_ineq(ineq)) | ||||
|       throw cannot_simplify(); | ||||
|     ast res = simplify_ineq(ineq); | ||||
|  | @ -790,6 +807,7 @@ class iz3proof_itp_impl : public iz3proof_itp { | |||
|      return simplify_sum(args); | ||||
|   } | ||||
| 
 | ||||
| 
 | ||||
|   ast simplify_rotate_eq2leq(const ast &pl, const ast &neg_equality, const ast &pf){ | ||||
|     if(pl == arg(pf,1)){ | ||||
|       ast cond = mk_true(); | ||||
|  | @ -920,6 +938,8 @@ class iz3proof_itp_impl : public iz3proof_itp { | |||
|     return chain; | ||||
|   } | ||||
| 
 | ||||
|   struct subterm_normals_failed {}; | ||||
| 
 | ||||
|   void get_subterm_normals(const ast &ineq1, const ast &ineq2, const ast &chain, ast &normals, | ||||
| 			   const ast &pos, hash_set<ast> &memo, ast &Aproves, ast &Bproves){ | ||||
|     opr o1 = op(ineq1); | ||||
|  | @ -933,14 +953,77 @@ class iz3proof_itp_impl : public iz3proof_itp { | |||
| 	get_subterm_normals(arg(ineq1,i), arg(ineq2,i), chain, normals, new_pos, memo, Aproves, Bproves); | ||||
|       } | ||||
|     } | ||||
|     else if(get_term_type(ineq2) == LitMixed && memo.find(ineq2) == memo.end()){ | ||||
|       memo.insert(ineq2); | ||||
|       ast sub_chain = extract_rewrites(chain,pos); | ||||
|       if(is_true(sub_chain)) | ||||
| 	throw "bad inequality rewriting"; | ||||
|       ast new_normal = make_normal_step(ineq2,ineq1,reverse_chain(sub_chain)); | ||||
|       normals = merge_normal_chains(normals,cons_normal(new_normal,mk_true()), Aproves, Bproves); | ||||
|     else if(get_term_type(ineq2) == LitMixed){ | ||||
|       if(memo.find(ineq2) == memo.end()){ | ||||
| 	memo.insert(ineq2); | ||||
| 	ast sub_chain = extract_rewrites(chain,pos); | ||||
| 	if(is_true(sub_chain)) | ||||
| 	  throw "bad inequality rewriting"; | ||||
| 	ast new_normal = make_normal_step(ineq2,ineq1,reverse_chain(sub_chain)); | ||||
| 	normals = merge_normal_chains(normals,cons_normal(new_normal,mk_true()), Aproves, Bproves); | ||||
|       } | ||||
|     }  | ||||
|     else if(!(ineq1 == ineq2)) | ||||
|       throw subterm_normals_failed(); | ||||
|   } | ||||
| 
 | ||||
|   ast rewrites_to_normals(const ast &ineq1, const ast &chain, ast &normals, ast &Aproves, ast &Bproves, ast &Aineqs){ | ||||
|     if(is_true(chain)) | ||||
|       return ineq1; | ||||
|     ast last = chain_last(chain); | ||||
|     ast rest = chain_rest(chain); | ||||
|     ast new_ineq1 = rewrites_to_normals(ineq1, rest, normals, Aproves, Bproves, Aineqs); | ||||
|     ast p1 = rewrite_pos(last); | ||||
|     ast term1; | ||||
|     ast coeff = arith_rewrite_coeff(new_ineq1,p1,term1); | ||||
|     ast res = subst_in_pos(new_ineq1,rewrite_pos(last),rewrite_rhs(last)); | ||||
|     ast rpos; | ||||
|     pos_diff(p1,rewrite_pos(last),rpos); | ||||
|     ast term2 = subst_in_pos(term1,rpos,rewrite_rhs(last)); | ||||
|     if(get_term_type(term1) != LitMixed && get_term_type(term2) != LitMixed){ | ||||
|       if(is_rewrite_side(LitA,last)) | ||||
| 	linear_comb(Aineqs,coeff,make(Leq,make_int(rational(0)),make(Sub,term2,term1))); | ||||
|     } | ||||
|     else { | ||||
|       ast pf = extract_rewrites(make(concat,mk_true(),rest),p1); | ||||
|       ast new_normal = fix_normal(term1,term2,pf); | ||||
|       normals = merge_normal_chains(normals,cons_normal(new_normal,mk_true()), Aproves, Bproves); | ||||
|     } | ||||
|     return res; | ||||
|   } | ||||
| 
 | ||||
|   ast arith_rewrite_coeff(const ast &ineq, ast &p1, ast &term){ | ||||
|     ast coeff = make_int(rational(1)); | ||||
|     if(p1 == top_pos){ | ||||
|       term = ineq; | ||||
|       return coeff; | ||||
|     } | ||||
|     int argpos = pos_arg(p1); | ||||
|     opr o = op(ineq); | ||||
|     switch(o){ | ||||
|     case Leq:  | ||||
|     case Lt:  | ||||
|       coeff = argpos ? make_int(rational(1)) : make_int(rational(-1)); | ||||
|       break; | ||||
|     case Geq:  | ||||
|     case Gt:  | ||||
|       coeff = argpos ? make_int(rational(-1)) : make_int(rational(1)); | ||||
|       break; | ||||
|     case Not: | ||||
|     case Plus: | ||||
|       break; | ||||
|     case Times: | ||||
|       coeff = arg(ineq,0); | ||||
|       break; | ||||
|     default: | ||||
|       p1 = top_pos; | ||||
|       term = ineq; | ||||
|       return coeff; | ||||
|     } | ||||
|     p1 = arg(p1,1); | ||||
|     ast res = arith_rewrite_coeff(arg(ineq,argpos),p1,term); | ||||
|     p1 = pos_add(argpos,p1); | ||||
|     return coeff == make_int(rational(1)) ? res : make(Times,coeff,res); | ||||
|   } | ||||
| 
 | ||||
|   ast rewrite_chain_to_normal_ineq(const ast &chain, ast &Aproves, ast &Bproves){ | ||||
|  | @ -950,20 +1033,25 @@ class iz3proof_itp_impl : public iz3proof_itp { | |||
|     ast ineq2 = apply_rewrite_chain(ineq1,tail); | ||||
|     ast nc = mk_true(); | ||||
|     hash_set<ast> memo; | ||||
|     get_subterm_normals(ineq1,ineq2,tail,nc,top_pos,memo, Aproves, Bproves); | ||||
|     ast itp; | ||||
|     ast itp = make(Leq,make_int(rational(0)),make_int(rational(0))); | ||||
|     ast Aproves_save = Aproves, Bproves_save = Bproves; try { | ||||
|       get_subterm_normals(ineq1,ineq2,tail,nc,top_pos,memo, Aproves, Bproves); | ||||
|     } | ||||
|     catch (const subterm_normals_failed &){ Aproves = Aproves_save; Bproves = Bproves_save; nc = mk_true(); | ||||
|       rewrites_to_normals(ineq1, tail, nc, Aproves, Bproves, itp); | ||||
|     } | ||||
|     if(is_rewrite_side(LitA,head)){ | ||||
|       itp = make(Leq,make_int("0"),make_int("0")); | ||||
|       linear_comb(itp,make_int("1"),ineq1); // make sure it is normal form
 | ||||
|       //itp = ineq1;
 | ||||
|       ast mc = z3_simplify(chain_side_proves(LitB,pref)); | ||||
|       Bproves = my_and(Bproves,mc); | ||||
|     } | ||||
|     else { | ||||
|       itp = make(Leq,make_int(rational(0)),make_int(rational(0))); | ||||
|       ast mc = z3_simplify(chain_side_proves(LitA,pref)); | ||||
|       Aproves = my_and(Aproves,mc); | ||||
|     } | ||||
|     if(is_true(nc)) | ||||
|       return itp; | ||||
|     return make_normal(itp,nc); | ||||
|   } | ||||
| 
 | ||||
|  | @ -1010,6 +1098,31 @@ class iz3proof_itp_impl : public iz3proof_itp { | |||
|   } | ||||
| 
 | ||||
| 
 | ||||
|   ast simplify_exmid(const std::vector<ast> &args){ | ||||
|     if(is_equivrel(args[0])){ | ||||
|       ast Aproves = mk_true(), Bproves = mk_true(); | ||||
|       ast chain = destruct_cond_ineq(args[1],Aproves,Bproves); | ||||
|       ast Q2 = destruct_cond_ineq(args[2],Aproves,Bproves); | ||||
|       ast interp = contra_chain(Q2,chain); | ||||
|       return my_and(Aproves,my_implies(Bproves,interp)); | ||||
|     } | ||||
|     throw "bad exmid"; | ||||
|   } | ||||
| 
 | ||||
|   ast simplify_cong(const std::vector<ast> &args){ | ||||
|     ast Aproves = mk_true(), Bproves = mk_true(); | ||||
|     ast chain = destruct_cond_ineq(args[0],Aproves,Bproves); | ||||
|     rational pos; | ||||
|     if(is_numeral(args[1],pos)){ | ||||
|       int ipos = pos.get_unsigned(); | ||||
|       chain = chain_pos_add(ipos,chain); | ||||
|       ast Q2 = destruct_cond_ineq(args[2],Aproves,Bproves); | ||||
|       ast interp = contra_chain(Q2,chain); | ||||
|       return my_and(Aproves,my_implies(Bproves,interp)); | ||||
|     } | ||||
|     throw "bad cong"; | ||||
|   } | ||||
| 
 | ||||
|   bool is_equivrel(const ast &p){ | ||||
|     opr o = op(p); | ||||
|     return o == Equal || o == Iff; | ||||
|  | @ -1294,6 +1407,8 @@ class iz3proof_itp_impl : public iz3proof_itp { | |||
|     if(pos == top_pos && op(equality) == Iff && !is_true(arg(equality,0))) | ||||
|       throw "bad rewrite"; | ||||
| #endif | ||||
|     if(!is_equivrel(equality)) | ||||
|       throw "bad rewrite"; | ||||
|     return make(t == LitA ? rewrite_A : rewrite_B, pos, cond, equality); | ||||
|   } | ||||
| 
 | ||||
|  | @ -1689,7 +1804,7 @@ class iz3proof_itp_impl : public iz3proof_itp { | |||
|       ast diff; | ||||
|       if(comp_op == Leq) diff = make(Sub,rhs,mid); | ||||
|       else diff = make(Sub,mid,rhs); | ||||
|       ast foo = z3_simplify(make(Leq,make_int("0"),diff)); | ||||
|       ast foo = make(Leq,make_int("0"),z3_simplify(diff)); | ||||
|       if(is_true(cond)) | ||||
| 	cond = foo; | ||||
|       else { | ||||
|  | @ -2546,6 +2661,11 @@ class iz3proof_itp_impl : public iz3proof_itp { | |||
|     } | ||||
| 
 | ||||
|     hash_map<ast,ast>::iterator it = localization_map.find(e); | ||||
| 
 | ||||
|     if(it != localization_map.end() && is_bool_type(get_type(e))  | ||||
|        && !pv->ranges_intersect(pv->ast_scope(it->second),rng)) | ||||
|       it = localization_map.end();  // prevent quantifiers over booleans
 | ||||
| 
 | ||||
|     if(it != localization_map.end()){ | ||||
|       pf = localization_pf_map[e]; | ||||
|       e = it->second; | ||||
|  |  | |||
|  | @ -17,7 +17,7 @@ Revision History: | |||
| 
 | ||||
| --*/ | ||||
| 
 | ||||
| #ifdef WIN32 | ||||
| #ifdef _WINDOWS | ||||
| #pragma warning(disable:4996) | ||||
| #pragma warning(disable:4800) | ||||
| #pragma warning(disable:4267) | ||||
|  | @ -39,9 +39,7 @@ Revision History: | |||
| #include <set> | ||||
| 
 | ||||
| //using std::vector;
 | ||||
| #ifndef WIN32 | ||||
| using namespace stl_ext; | ||||
| #endif | ||||
| 
 | ||||
| 
 | ||||
| 
 | ||||
|  | @ -1592,6 +1590,27 @@ public: | |||
| 	return res; | ||||
|       } | ||||
| 
 | ||||
|       /* This idiom takes ~P and Q=P, yielding ~Q. It uses a "rewrite"
 | ||||
| 	 (Q=false) = ~Q. We eliminate the rewrite by using symmetry, | ||||
| 	 congruence and modus ponens. */ | ||||
| 
 | ||||
|       if(dk == PR_MODUS_PONENS && pr(prem(proof,1)) == PR_REWRITE && pr(prem(proof,0)) == PR_TRANSITIVITY && pr(prem(prem(proof,0),1)) == PR_IFF_FALSE){ | ||||
| 	if(op(con) == Not && arg(con,0) == arg(conc(prem(proof,0)),0)){ | ||||
| 	  Iproof::node ante1 = translate_main(prem(prem(proof,0),0),false); | ||||
| 	  Iproof::node ante2 = translate_main(prem(prem(prem(proof,0),1),0),false); | ||||
| 	  ast ante1_con = conc(prem(prem(proof,0),0)); | ||||
| 	  ast eq0 = arg(ante1_con,0); | ||||
| 	  ast eq1 = arg(ante1_con,1); | ||||
| 	  ast symm_con = make(Iff,eq1,eq0); | ||||
| 	  Iproof::node ante1s = iproof->make_symmetry(symm_con,ante1_con,ante1); | ||||
| 	  ast cong_con = make(Iff,make(Not,eq1),make(Not,eq0)); | ||||
| 	  Iproof::node ante1sc = iproof->make_congruence(symm_con,cong_con,ante1s); | ||||
| 	  res = iproof->make_mp(cong_con,ante2,ante1sc); | ||||
| 	  return res; | ||||
| 	} | ||||
|       } | ||||
|        | ||||
| 
 | ||||
|       // translate all the premises
 | ||||
|       std::vector<Iproof::node> args(nprems); | ||||
|       for(unsigned i = 0; i < nprems; i++) | ||||
|  | @ -1751,6 +1770,13 @@ public: | |||
| 	res = args[0]; | ||||
| 	break; | ||||
|       } | ||||
|       case PR_IFF_FALSE: { // turns ~p into p <-> false, noop for us
 | ||||
| 	if(is_local(con)) | ||||
| 	  res = args[0]; | ||||
| 	else | ||||
| 	  throw unsupported(); | ||||
| 	break; | ||||
|       } | ||||
|       case PR_COMMUTATIVITY: { | ||||
| 	ast comm_equiv = make(op(con),arg(con,0),arg(con,0)); | ||||
| 	ast pf = iproof->make_reflexivity(comm_equiv); | ||||
|  | @ -1758,6 +1784,7 @@ public: | |||
| 	break; | ||||
|       } | ||||
|       default: | ||||
| 	pfgoto(proof); | ||||
| 	assert(0 && "translate_main: unsupported proof rule"); | ||||
| 	throw unsupported(); | ||||
|       } | ||||
|  |  | |||
|  | @ -20,7 +20,7 @@ Revision History: | |||
| --*/ | ||||
| 
 | ||||
| 
 | ||||
| #ifdef WIN32 | ||||
| #ifdef _WINDOWS | ||||
| #pragma warning(disable:4996) | ||||
| #pragma warning(disable:4800) | ||||
| #pragma warning(disable:4267) | ||||
|  | @ -42,11 +42,7 @@ Revision History: | |||
| #include <set> | ||||
| 
 | ||||
| //using std::vector;
 | ||||
| #ifndef WIN32 | ||||
| using namespace stl_ext; | ||||
| #endif | ||||
| 
 | ||||
| #ifndef WIN32 | ||||
| 
 | ||||
| /* This can introduce an address dependency if the range type of hash_map has
 | ||||
|    a destructor. Since the code in this file is not used and only here for | ||||
|  | @ -62,9 +58,6 @@ namespace stl_ext { | |||
|   }; | ||||
| } | ||||
| 
 | ||||
| #endif | ||||
| 
 | ||||
| 
 | ||||
| static int lemma_count = 0; | ||||
| #if 0 | ||||
| static int nll_lemma_count = 0; | ||||
|  | @ -96,38 +89,12 @@ namespace hash_space { | |||
|   }; | ||||
| } | ||||
| 
 | ||||
| #ifdef WIN32 | ||||
| 
 | ||||
| template <> inline | ||||
| size_t stdext::hash_value<Z3_resolvent >(const Z3_resolvent& p) | ||||
| {	 | ||||
|   std::hash<Z3_resolvent> h; | ||||
|   return h(p); | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| namespace std { | ||||
|   template <> | ||||
|   class less<Z3_resolvent > { | ||||
|   public: | ||||
|    bool operator()(const Z3_resolvent &x, const Z3_resolvent &y) const { | ||||
|      size_t ixproof = (size_t) x.proof.raw(); | ||||
|      size_t iyproof = (size_t) y.proof.raw(); | ||||
|      if(ixproof < iyproof) return true; | ||||
|      if(ixproof > iyproof) return false; | ||||
|      return x.pivot < y.pivot; | ||||
|    } | ||||
|   }; | ||||
| } | ||||
| 
 | ||||
| #else | ||||
| 
 | ||||
| bool operator==(const Z3_resolvent &x, const Z3_resolvent &y) { | ||||
|   return x.proof == y.proof && x.pivot == y.pivot; | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| #endif | ||||
| 
 | ||||
| typedef std::vector<Z3_resolvent *> ResolventAppSet; | ||||
| 
 | ||||
|  | @ -151,36 +118,6 @@ namespace hash_space { | |||
|   }; | ||||
| } | ||||
| 
 | ||||
| #ifdef WIN32 | ||||
| 
 | ||||
| template <> inline | ||||
| size_t stdext::hash_value<non_local_lits >(const non_local_lits& p) | ||||
| {	 | ||||
|   std::hash<non_local_lits> h; | ||||
|   return h(p); | ||||
| } | ||||
| 
 | ||||
| namespace std { | ||||
|   template <> | ||||
|   class less<non_local_lits > { | ||||
|   public: | ||||
|    bool operator()(const non_local_lits &x, const non_local_lits &y) const { | ||||
|      ResolventAppSet::const_iterator itx = x.proofs.begin(); | ||||
|      ResolventAppSet::const_iterator ity = y.proofs.begin(); | ||||
|      while(true){ | ||||
|        if(ity == y.proofs.end()) return false; | ||||
|        if(itx == x.proofs.end()) return true; | ||||
|        size_t xi = (size_t) *itx; | ||||
|        size_t yi = (size_t) *ity; | ||||
|        if(xi < yi) return true; | ||||
|        if(xi > yi) return false; | ||||
|        ++itx; ++ity; | ||||
|      } | ||||
|    } | ||||
|   }; | ||||
| } | ||||
| 
 | ||||
| #else | ||||
| 
 | ||||
| bool operator==(const non_local_lits &x, const non_local_lits &y) { | ||||
|      ResolventAppSet::const_iterator itx = x.proofs.begin(); | ||||
|  | @ -194,8 +131,6 @@ bool operator==(const non_local_lits &x, const non_local_lits &y) { | |||
| } | ||||
| 
 | ||||
| 
 | ||||
| #endif | ||||
| 
 | ||||
| /* This translator goes directly from Z3 proofs to interpolatable
 | ||||
|    proofs without an intermediate representation as an iz3proof. */ | ||||
| 
 | ||||
|  |  | |||
|  | @ -74,6 +74,8 @@ def_module_params('fixedpoint', | |||
| 			  ('stratified_inlining', BOOL, False, 'DUALITY: Use stratified inlining'), | ||||
| 			  ('recursion_bound', UINT, UINT_MAX, 'DUALITY: Recursion bound for stratified inlining'), | ||||
| 			  ('profile', BOOL, False, 'DUALITY: profile run time'), | ||||
| 			  ('mbqi', BOOL, True, 'DUALITY: use model-based quantifier instantion'), | ||||
| 			  ('batch_expand', BOOL, False, 'DUALITY: use batch expansion'), | ||||
|                           ('dump_aig', SYMBOL, '', 'Dump clauses in AIG text format (AAG) to the given file name'), | ||||
|                           )) | ||||
| 
 | ||||
|  |  | |||
|  | @ -64,20 +64,22 @@ namespace Duality { | |||
|     std::vector<expr> clauses; | ||||
|     std::vector<std::vector<RPFP::label_struct> > clause_labels; | ||||
|     hash_map<RPFP::Edge *,int> map;  // edges to clauses
 | ||||
|     Solver *old_rs; | ||||
|     Solver::Counterexample cex; | ||||
| 
 | ||||
|     duality_data(ast_manager &_m) : ctx(_m,config(params_ref())) { | ||||
|       ls = 0; | ||||
|       rpfp = 0; | ||||
|       status = StatusNull; | ||||
|       old_rs = 0; | ||||
|     } | ||||
|     ~duality_data(){ | ||||
|       if(old_rs) | ||||
| 	dealloc(old_rs); | ||||
|       if(rpfp) | ||||
| 	dealloc(rpfp); | ||||
|       if(ls) | ||||
| 	dealloc(ls); | ||||
|       if(cex.tree) | ||||
| 	delete cex.tree; | ||||
|     } | ||||
|   }; | ||||
| 
 | ||||
|  | @ -132,15 +134,18 @@ lbool dl_interface::query(::expr * query) { | |||
|   m_ctx.ensure_opened(); | ||||
| 
 | ||||
|   // if there is old data, get the cex and dispose (later)
 | ||||
|   Solver::Counterexample old_cex; | ||||
|   duality_data *old_data = _d; | ||||
|   if(old_data) | ||||
|     old_cex = old_data->cex; | ||||
|   Solver *old_rs = 0; | ||||
|   if(old_data){ | ||||
|     old_rs = old_data->old_rs; | ||||
|     old_rs->GetCounterexample().swap(old_data->cex); | ||||
|   } | ||||
| 
 | ||||
|   scoped_proof generate_proofs_please(m_ctx.get_manager()); | ||||
| 
 | ||||
|   // make a new problem and solver
 | ||||
|   _d = alloc(duality_data,m_ctx.get_manager()); | ||||
|   _d->ctx.set("mbqi",m_ctx.get_params().mbqi()); | ||||
|   _d->ls = alloc(RPFP::iZ3LogicSolver,_d->ctx); | ||||
|   _d->rpfp = alloc(RPFP,_d->ls); | ||||
| 
 | ||||
|  | @ -195,8 +200,9 @@ lbool dl_interface::query(::expr * query) { | |||
| 
 | ||||
|   Solver *rs = Solver::Create("duality", _d->rpfp); | ||||
| 
 | ||||
|   rs->LearnFrom(old_cex); // new solver gets hints from old cex
 | ||||
| 
 | ||||
|   if(old_rs) | ||||
|     rs->LearnFrom(old_rs); // new solver gets hints from old solver
 | ||||
|    | ||||
|   // set its options
 | ||||
|   IF_VERBOSE(1, rs->SetOption("report","1");); | ||||
|   rs->SetOption("full_expand",m_ctx.get_params().full_expand() ? "1" : "0"); | ||||
|  | @ -204,6 +210,7 @@ lbool dl_interface::query(::expr * query) { | |||
|   rs->SetOption("feasible_edges",m_ctx.get_params().feasible_edges() ? "1" : "0"); | ||||
|   rs->SetOption("use_underapprox",m_ctx.get_params().use_underapprox() ? "1" : "0"); | ||||
|   rs->SetOption("stratified_inlining",m_ctx.get_params().stratified_inlining() ? "1" : "0"); | ||||
|   rs->SetOption("batch_expand",m_ctx.get_params().batch_expand() ? "1" : "0"); | ||||
|   unsigned rb = m_ctx.get_params().recursion_bound(); | ||||
|   if(rb != UINT_MAX){ | ||||
|     std::ostringstream os; os << rb; | ||||
|  | @ -229,15 +236,14 @@ lbool dl_interface::query(::expr * query) { | |||
| 
 | ||||
|   // save the result and counterexample if there is one
 | ||||
|   _d->status = ans ? StatusModel : StatusRefutation; | ||||
|   _d->cex = rs->GetCounterexample(); | ||||
|   _d->cex.swap(rs->GetCounterexample()); // take ownership of cex
 | ||||
|   _d->old_rs = rs; // save this for later hints
 | ||||
|    | ||||
|   if(old_data){ | ||||
|     old_data->cex.tree = 0; // we own it now
 | ||||
|     dealloc(old_data); | ||||
|     dealloc(old_data); // this deallocates the old solver if there is one
 | ||||
|   } | ||||
| 
 | ||||
| 
 | ||||
|   dealloc(rs); | ||||
|   // dealloc(rs); this is now owned by data
 | ||||
| 
 | ||||
|   // true means the RPFP problem is SAT, so the query is UNSAT
 | ||||
|   return ans ? l_false : l_true; | ||||
|  | @ -265,18 +271,16 @@ void dl_interface::reset_statistics() { | |||
| 
 | ||||
| static hash_set<func_decl> *local_func_decls; | ||||
| 
 | ||||
| static void print_proof(dl_interface *d, std::ostream& out, Solver::Counterexample &cex) { | ||||
|   static void print_proof(dl_interface *d, std::ostream& out, RPFP *tree, RPFP::Node *root) { | ||||
|   context &ctx = d->dd()->ctx; | ||||
|   RPFP::Node &node = *cex.root; | ||||
|   RPFP::Node &node = *root; | ||||
|   RPFP::Edge &edge = *node.Outgoing; | ||||
|    | ||||
|   // first, prove the children (that are actually used)
 | ||||
| 
 | ||||
|   for(unsigned i = 0; i < edge.Children.size(); i++){ | ||||
|     if(!cex.tree->Empty(edge.Children[i])){ | ||||
|       Solver::Counterexample foo = cex; | ||||
|       foo.root = edge.Children[i]; | ||||
|       print_proof(d,out,foo); | ||||
|     if(!tree->Empty(edge.Children[i])){ | ||||
|       print_proof(d,out,tree,edge.Children[i]); | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|  | @ -285,7 +289,7 @@ static void print_proof(dl_interface *d, std::ostream& out, Solver::Counterexamp | |||
|   out << "(step s!" << node.number; | ||||
|   out << " (" << node.Name.name(); | ||||
|   for(unsigned i = 0; i < edge.F.IndParams.size(); i++) | ||||
|     out << " " << cex.tree->Eval(&edge,edge.F.IndParams[i]); | ||||
|     out << " " << tree->Eval(&edge,edge.F.IndParams[i]); | ||||
|   out << ")\n"; | ||||
| 
 | ||||
|   // print the rule number
 | ||||
|  | @ -307,8 +311,8 @@ static void print_proof(dl_interface *d, std::ostream& out, Solver::Counterexamp | |||
|       sort the_sort = t.get_quantifier_bound_sort(j); | ||||
|       symbol name = t.get_quantifier_bound_name(j); | ||||
|       expr skolem = ctx.constant(symbol(ctx,name),sort(ctx,the_sort)); | ||||
|       out << "    (= " << skolem << " " << cex.tree->Eval(&edge,skolem) << ")\n"; | ||||
|       expr local_skolem = cex.tree->Localize(&edge,skolem); | ||||
|       out << "    (= " << skolem << " " << tree->Eval(&edge,skolem) << ")\n"; | ||||
|       expr local_skolem = tree->Localize(&edge,skolem); | ||||
|       (*local_func_decls).insert(local_skolem.decl()); | ||||
|     } | ||||
|   } | ||||
|  | @ -316,7 +320,7 @@ static void print_proof(dl_interface *d, std::ostream& out, Solver::Counterexamp | |||
|    | ||||
|   out << "  (labels"; | ||||
|   std::vector<symbol> labels; | ||||
|   cex.tree->GetLabels(&edge,labels); | ||||
|   tree->GetLabels(&edge,labels); | ||||
|   for(unsigned j = 0; j < labels.size(); j++){ | ||||
|     out << " " << labels[j]; | ||||
|   } | ||||
|  | @ -328,7 +332,7 @@ static void print_proof(dl_interface *d, std::ostream& out, Solver::Counterexamp | |||
|    | ||||
|   out << "  (ref "; | ||||
|   for(unsigned i = 0; i < edge.Children.size(); i++){ | ||||
|     if(!cex.tree->Empty(edge.Children[i])) | ||||
|     if(!tree->Empty(edge.Children[i])) | ||||
|       out << " s!" << edge.Children[i]->number; | ||||
|     else | ||||
|       out << " true"; | ||||
|  | @ -353,11 +357,11 @@ void dl_interface::display_certificate_non_const(std::ostream& out) { | |||
|     // negation of the query is the last clause -- prove it
 | ||||
|     hash_set<func_decl> locals; | ||||
|     local_func_decls = &locals; | ||||
|     print_proof(this,out,_d->cex); | ||||
|     print_proof(this,out,_d->cex.get_tree(),_d->cex.get_root()); | ||||
|     out << ")\n"; | ||||
|     out << "(model \n\""; | ||||
|     ::model mod(m_ctx.get_manager()); | ||||
|     model orig_model = _d->cex.tree->dualModel; | ||||
|     model orig_model = _d->cex.get_tree()->dualModel; | ||||
|     for(unsigned i = 0; i < orig_model.num_consts(); i++){ | ||||
|       func_decl cnst = orig_model.get_const_decl(i); | ||||
|       if(locals.find(cnst) == locals.end()){ | ||||
|  | @ -428,10 +432,10 @@ model_ref dl_interface::get_model() { | |||
|   return md; | ||||
| } | ||||
|    | ||||
| static proof_ref extract_proof(dl_interface *d, Solver::Counterexample &cex) { | ||||
|   static proof_ref extract_proof(dl_interface *d, RPFP *tree, RPFP::Node *root) { | ||||
|   context &ctx = d->dd()->ctx; | ||||
|   ast_manager &mgr = ctx.m(); | ||||
|   RPFP::Node &node = *cex.root; | ||||
|   RPFP::Node &node = *root; | ||||
|   RPFP::Edge &edge = *node.Outgoing; | ||||
|   RPFP::Edge *orig_edge = edge.map; | ||||
|    | ||||
|  | @ -453,21 +457,19 @@ static proof_ref extract_proof(dl_interface *d, Solver::Counterexample &cex) { | |||
|       sort the_sort = t.get_quantifier_bound_sort(j); | ||||
|       symbol name = t.get_quantifier_bound_name(j); | ||||
|       expr skolem = ctx.constant(symbol(ctx,name),sort(ctx,the_sort)); | ||||
|       expr val = cex.tree->Eval(&edge,skolem); | ||||
|       expr val = tree->Eval(&edge,skolem); | ||||
|       expr_ref thing(ctx.uncook(val),mgr); | ||||
|       substs[0].push_back(thing); | ||||
|       expr local_skolem = cex.tree->Localize(&edge,skolem); | ||||
|       expr local_skolem = tree->Localize(&edge,skolem); | ||||
|       (*local_func_decls).insert(local_skolem.decl()); | ||||
|     } | ||||
|   } | ||||
| 
 | ||||
|   svector<std::pair<unsigned, unsigned> > pos; | ||||
|   for(unsigned i = 0; i < edge.Children.size(); i++){ | ||||
|     if(!cex.tree->Empty(edge.Children[i])){ | ||||
|     if(!tree->Empty(edge.Children[i])){ | ||||
|       pos.push_back(std::pair<unsigned,unsigned>(i+1,0)); | ||||
|       Solver::Counterexample foo = cex; | ||||
|       foo.root = edge.Children[i]; | ||||
|       proof_ref prem = extract_proof(d,foo); | ||||
|       proof_ref prem = extract_proof(d,tree,edge.Children[i]); | ||||
|       prems.push_back(prem); | ||||
|       substs.push_back(expr_ref_vector(mgr)); | ||||
|     } | ||||
|  | @ -476,7 +478,7 @@ static proof_ref extract_proof(dl_interface *d, Solver::Counterexample &cex) { | |||
|   func_decl f = node.Name; | ||||
|   std::vector<expr> args; | ||||
|   for(unsigned i = 0; i < edge.F.IndParams.size(); i++) | ||||
|     args.push_back(cex.tree->Eval(&edge,edge.F.IndParams[i])); | ||||
|     args.push_back(tree->Eval(&edge,edge.F.IndParams[i])); | ||||
|   expr conc = f(args); | ||||
|    | ||||
| 
 | ||||
|  | @ -493,7 +495,7 @@ proof_ref dl_interface::get_proof() { | |||
|   if(_d->status == StatusRefutation){ | ||||
|     hash_set<func_decl> locals; | ||||
|     local_func_decls = &locals; | ||||
|     return extract_proof(this,_d->cex); | ||||
|     return extract_proof(this,_d->cex.get_tree(),_d->cex.get_root()); | ||||
|   } | ||||
|   else | ||||
|     return proof_ref(m_ctx.get_manager()); | ||||
|  |  | |||
|  | @ -331,22 +331,13 @@ public: | |||
|         return target; | ||||
|     } | ||||
| 
 | ||||
|     friend inline rational gcd(rational const & r1, rational const & r2) { | ||||
|         rational result; | ||||
|         m().gcd(r1.m_val, r2.m_val, result.m_val); | ||||
|         return result; | ||||
|     } | ||||
|     friend inline rational gcd(rational const & r1, rational const & r2); | ||||
| 
 | ||||
|     //
 | ||||
|     // extended Euclid:
 | ||||
|     // r1*a + r2*b = gcd
 | ||||
|     //
 | ||||
|     friend inline rational gcd(rational const & r1, rational const & r2, rational & a, rational & b) { | ||||
|         rational result; | ||||
|         m().gcd(r1.m_val, r2.m_val, a.m_val, b.m_val, result.m_val); | ||||
|         return result; | ||||
|     } | ||||
| 
 | ||||
|     friend inline rational gcd(rational const & r1, rational const & r2, rational & a, rational & b); | ||||
| 
 | ||||
|     friend inline rational lcm(rational const & r1, rational const & r2) { | ||||
|         rational result; | ||||
|  | @ -378,11 +369,7 @@ public: | |||
|         return result; | ||||
|     } | ||||
| 
 | ||||
|     friend inline rational abs(rational const & r) { | ||||
|         rational result(r); | ||||
|         m().abs(result.m_val); | ||||
|         return result; | ||||
|     } | ||||
|     friend inline rational abs(rational const & r); | ||||
| 
 | ||||
|     rational to_rational() const { return *this; } | ||||
| 
 | ||||
|  | @ -446,5 +433,24 @@ inline rational power(rational const & r, unsigned p) { | |||
|     return r.expt(p); | ||||
| } | ||||
| 
 | ||||
| inline rational abs(rational const & r) { | ||||
|   rational result(r); | ||||
|   rational::m().abs(result.m_val); | ||||
|   return result; | ||||
| } | ||||
| 
 | ||||
| inline rational gcd(rational const & r1, rational const & r2) { | ||||
|   rational result; | ||||
|   rational::m().gcd(r1.m_val, r2.m_val, result.m_val); | ||||
|   return result; | ||||
| } | ||||
| 
 | ||||
| inline rational gcd(rational const & r1, rational const & r2, rational & a, rational & b) { | ||||
|   rational result; | ||||
|   rational::m().gcd(r1.m_val, r2.m_val, a.m_val, b.m_val, result.m_val); | ||||
|   return result; | ||||
| } | ||||
| 
 | ||||
| 
 | ||||
| #endif /* _RATIONAL_H_ */ | ||||
| 
 | ||||
|  |  | |||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue