mirror of
https://github.com/Z3Prover/z3
synced 2025-04-05 09:04:07 +00:00
pipeline with release mode (#4206)
* pipeline with release mode Signed-off-by: Nikolaj Bjorner <nbjorner@microsoft.com> * na Signed-off-by: Nikolaj Bjorner <nbjorner@microsoft.com>
This commit is contained in:
parent
be998c308c
commit
b81ab94db7
|
@ -1,10 +1,21 @@
|
|||
|
||||
variables:
|
||||
cmakeJulia: '-DZ3_BUILD_JULIA_BINDINGS=True'
|
||||
cmakeJava: '-DZ3_BUILD_JAVA_BINDINGS=True'
|
||||
cmakeNet: '-DZ3_BUILD_DOTNET_BINDINGS=True'
|
||||
cmakePy: '-DZ3_BUILD_PYTHON_BINDINGS=True'
|
||||
cmakeStdArgs: '-DZ3_BUILD_DOTNET_BINDINGS=True -DZ3_BUILD_JAVA_BINDINGS=True -DZ3_BUILD_PYTHON_BINDINGS=True -G "Ninja" ../'
|
||||
asanEnv: 'CXXFLAGS="${CXXFLAGS} -fsanitize=address -fno-omit-frame-pointer" CFLAGS="${CFLAGS} -fsanitize=address -fno-omit-frame-pointer"'
|
||||
ubsanEnv: 'CXXFLAGS="${CXXFLAGS} -fsanitize=undefined" CFLAGS="${CFLAGS} -fsanitize=undefined"'
|
||||
msanEnv: 'CC=clang LDFLAGS="-L../libcxx/libcxx_msan/lib -lc++abi -Wl,-rpath=../libcxx/libcxx_msan/lib" CXX=clang++ CXXFLAGS="${CXXFLAGS} -stdlib=libc++ -fsanitize-memory-track-origins -fsanitize=memory -fPIE -fno-omit-frame-pointer -g -O2" CFLAGS="${CFLAGS} -stdlib=libc -fsanitize=memory -fsanitize-memory-track-origins -fno-omit-frame-pointer -g -O2"'
|
||||
|
||||
|
||||
# TBD:
|
||||
# test python bindings
|
||||
# build documentation
|
||||
# Asan, ubsan, msan
|
||||
# Disabled pending clang dependencies for std::unordered_map
|
||||
|
||||
jobs:
|
||||
|
||||
- job: "LinuxPythonDebug"
|
||||
|
@ -30,10 +41,7 @@ jobs:
|
|||
- template: scripts/test-regressions.yml
|
||||
- template: scripts/generate-doc.yml
|
||||
|
||||
# ./cpp_example
|
||||
# ./c_example
|
||||
|
||||
- job: "Ubuntu18"
|
||||
- job: "Ubuntu18Python"
|
||||
displayName: "Ubuntu 18 with ocaml"
|
||||
pool:
|
||||
vmImage: "Ubuntu-18.04"
|
||||
|
@ -54,11 +62,6 @@ jobs:
|
|||
- template: scripts/test-z3.yml
|
||||
- template: scripts/test-regressions.yml
|
||||
|
||||
# TBD:
|
||||
# test python bindings
|
||||
# build documentation
|
||||
# Asan, ubsan, msan
|
||||
# Disabled pending clang dependencies for std::unordered_map
|
||||
|
||||
- job: "LinuxMSan"
|
||||
displayName: "Ubuntu build - cmake"
|
||||
|
@ -94,7 +97,7 @@ jobs:
|
|||
# - template: scripts/test-java-cmake.yml
|
||||
# - template: scripts/test-regressions.yml
|
||||
|
||||
- job: "LinuxCMake"
|
||||
- job: "Ubuntu16CMake"
|
||||
displayName: "Ubuntu build - cmake"
|
||||
pool:
|
||||
vmImage: "Ubuntu-16.04"
|
||||
|
@ -108,7 +111,7 @@ jobs:
|
|||
debugClang:
|
||||
setupCmd1: 'julia -e "using Pkg; Pkg.add(PackageSpec(name=\"libcxxwrap_julia_jll\", version=\"0.7.0\"))"'
|
||||
setupCmd2: 'JlCxxDir=$(julia -e "using libcxxwrap_julia_jll; print(dirname(libcxxwrap_julia_jll.libcxxwrap_julia_path))")'
|
||||
buildCmd: 'CC=clang CXX=clang++ cmake -DJlCxx_DIR=$JlCxxDir/cmake/JlCxx -DZ3_BUILD_JULIA_BINDINGS=True $(cmakeStdArgs)'
|
||||
buildCmd: 'CC=clang CXX=clang++ cmake -DJlCxx_DIR=$JlCxxDir/cmake/JlCxx $(cmakeJulia) $(cmakeStdArgs)'
|
||||
runTests: 'True'
|
||||
debugGcc:
|
||||
setupCmd1: ''
|
||||
|
@ -155,14 +158,14 @@ jobs:
|
|||
setupCmd1: ''
|
||||
setupCmd2: ''
|
||||
setupCmd3: ''
|
||||
bindings: '-DZ3_BUILD_PYTHON_BINDINGS=True'
|
||||
bindings: '$(cmakePy)'
|
||||
runTests: 'False'
|
||||
x64:
|
||||
arch: 'x64'
|
||||
setupCmd1: 'julia -e "using Pkg; Pkg.add(PackageSpec(name=\"libcxxwrap_julia_jll\", version=\"0.7.0\"))"'
|
||||
setupCmd2: 'julia -e "using libcxxwrap_julia_jll; print(dirname(libcxxwrap_julia_jll.libcxxwrap_julia_path))" > tmp.env'
|
||||
setupCmd3: 'set /P JlCxxDir=<tmp.env'
|
||||
bindings: '-DJlCxx_DIR=%JlCxxDir%\..\lib\cmake\JlCxx -DZ3_BUILD_JULIA_BINDINGS=True -DZ3_BUILD_DOTNET_BINDINGS=True -DZ3_BUILD_JAVA_BINDINGS=True -DZ3_BUILD_PYTHON_BINDINGS=True'
|
||||
bindings: '-DJlCxx_DIR=%JlCxxDir%\..\lib\cmake\JlCxx $(cmakeJava) $(cmakeNet) $(cmakePy) -DCMAKE_BUILD_TYPE=RelWithDebInfo'
|
||||
runTests: 'True'
|
||||
arm64:
|
||||
arch: 'amd64_arm64'
|
||||
|
@ -205,7 +208,7 @@ jobs:
|
|||
condition: eq(variables['runTests'], 'True')
|
||||
|
||||
|
||||
- job: "MacOS"
|
||||
- job: "MacOSPython"
|
||||
displayName: "MacOS build"
|
||||
pool:
|
||||
vmImage: "macOS-10.14"
|
||||
|
@ -238,7 +241,7 @@ jobs:
|
|||
set -e
|
||||
mkdir build
|
||||
cd build
|
||||
CC=clang CXX=clang++ cmake -DJlCxx_DIR=$JlCxxDir -DZ3_BUILD_JULIA_BINDINGS=True -DZ3_BUILD_JAVA_BINDINGS=True -DZ3_BUILD_PYTHON_BINDINGS=True -DZ3_BUILD_DOTNET_BINDINGS=False -G "Ninja" ../
|
||||
CC=clang CXX=clang++ cmake -DJlCxx_DIR=$JlCxxDir $(cmakeJulia) $(cmakeJava) $(cmakePy) -DZ3_BUILD_DOTNET_BINDINGS=False -G "Ninja" ../
|
||||
ninja
|
||||
ninja test-z3
|
||||
cd ..
|
||||
|
|
|
@ -35,6 +35,7 @@ Revision History:
|
|||
#include "util/small_object_allocator.h"
|
||||
#include "util/obj_ref.h"
|
||||
#include "util/ref_vector.h"
|
||||
#include "util/ref_pair_vector.h"
|
||||
#include "util/ref_buffer.h"
|
||||
#include "util/obj_mark.h"
|
||||
#include "util/obj_hashtable.h"
|
||||
|
@ -2467,6 +2468,9 @@ typedef ref_vector<var, ast_manager> var_ref_vector;
|
|||
typedef ref_vector<quantifier, ast_manager> quantifier_ref_vector;
|
||||
typedef app_ref_vector proof_ref_vector;
|
||||
|
||||
typedef ref_pair_vector<expr, ast_manager> expr_ref_pair_vector;
|
||||
|
||||
|
||||
// -----------------------------------
|
||||
//
|
||||
// ast_buffer
|
||||
|
|
|
@ -23,14 +23,12 @@ Notes:
|
|||
#include "ast/ast_pp.h"
|
||||
#include "ast/arith_decl_plugin.h"
|
||||
#include "ast/rewriter/rewriter_types.h"
|
||||
#include "util/ref_pair_vector.h"
|
||||
#include "util/params.h"
|
||||
#include "util/lbool.h"
|
||||
#include "util/sign.h"
|
||||
#include "math/automata/automaton.h"
|
||||
#include "math/automata/symbolic_automata.h"
|
||||
|
||||
typedef ref_pair_vector<expr, ast_manager> expr_ref_pair_vector;
|
||||
|
||||
inline std::ostream& operator<<(std::ostream& out, expr_ref_pair_vector const& es) {
|
||||
for (auto const& p : es) {
|
||||
|
|
|
@ -682,10 +682,10 @@ namespace sat {
|
|||
bool tt = cur_solution(v);
|
||||
coeff_vector const& falsep = m_vars[v].m_watch[!tt];
|
||||
for (pbcoeff const& pbc : falsep) {
|
||||
auto slack = constraint_slack(pbc.m_constraint_id);
|
||||
int64_t slack = constraint_slack(pbc.m_constraint_id);
|
||||
if (slack < 0)
|
||||
++best_bsb;
|
||||
else if (slack < static_cast<int>(pbc.m_coeff))
|
||||
else if (slack < static_cast<int64_t>(pbc.m_coeff))
|
||||
best_bsb += num_unsat;
|
||||
}
|
||||
++cit;
|
||||
|
@ -697,7 +697,7 @@ namespace sat {
|
|||
coeff_vector const& falsep = m_vars[v].m_watch[!cur_solution(v)];
|
||||
auto it = falsep.begin(), end = falsep.end();
|
||||
for (; it != end; ++it) {
|
||||
auto slack = constraint_slack(it->m_constraint_id);
|
||||
int64_t slack = constraint_slack(it->m_constraint_id);
|
||||
if (slack < 0) {
|
||||
if (bsb == best_bsb) {
|
||||
break;
|
||||
|
@ -706,7 +706,7 @@ namespace sat {
|
|||
++bsb;
|
||||
}
|
||||
}
|
||||
else if (slack < static_cast<int>(it->m_coeff)) {
|
||||
else if (slack < static_cast<int64_t>(it->m_coeff)) {
|
||||
bsb += num_unsat;
|
||||
if (bsb > best_bsb) {
|
||||
break;
|
||||
|
|
|
@ -103,21 +103,21 @@ literal_vector collect_induction_literals::operator()() {
|
|||
|
||||
|
||||
// --------------------------------------
|
||||
// create_induction_lemmas
|
||||
// induction_lemmas
|
||||
|
||||
bool create_induction_lemmas::viable_induction_sort(sort* s) {
|
||||
bool induction_lemmas::viable_induction_sort(sort* s) {
|
||||
// potentially also induction on integers, sequences
|
||||
return m_dt.is_datatype(s) && m_dt.is_recursive(s);
|
||||
}
|
||||
|
||||
bool create_induction_lemmas::viable_induction_parent(enode* p, enode* n) {
|
||||
bool induction_lemmas::viable_induction_parent(enode* p, enode* n) {
|
||||
app* o = p->get_owner();
|
||||
return
|
||||
m_rec.is_defined(o) ||
|
||||
m_dt.is_constructor(o);
|
||||
}
|
||||
|
||||
bool create_induction_lemmas::viable_induction_children(enode* n) {
|
||||
bool induction_lemmas::viable_induction_children(enode* n) {
|
||||
app* e = n->get_owner();
|
||||
if (m.is_value(e))
|
||||
return false;
|
||||
|
@ -132,7 +132,7 @@ bool create_induction_lemmas::viable_induction_children(enode* n) {
|
|||
return false;
|
||||
}
|
||||
|
||||
bool create_induction_lemmas::viable_induction_term(enode* p, enode* n) {
|
||||
bool induction_lemmas::viable_induction_term(enode* p, enode* n) {
|
||||
return
|
||||
viable_induction_sort(m.get_sort(n->get_owner())) &&
|
||||
viable_induction_parent(p, n) &&
|
||||
|
@ -145,7 +145,7 @@ bool create_induction_lemmas::viable_induction_term(enode* p, enode* n) {
|
|||
* and none of the roots are equivalent to a value in the current
|
||||
* congruence closure.
|
||||
*/
|
||||
enode_vector create_induction_lemmas::induction_positions(enode* n) {
|
||||
enode_vector induction_lemmas::induction_positions(enode* n) {
|
||||
enode_vector result;
|
||||
enode_vector todo;
|
||||
auto add_todo = [&](enode* n) {
|
||||
|
@ -172,7 +172,7 @@ enode_vector create_induction_lemmas::induction_positions(enode* n) {
|
|||
return result;
|
||||
}
|
||||
|
||||
void create_induction_lemmas::abstract1(enode* n, enode* t, expr* x, abstractions& result) {
|
||||
void induction_lemmas::abstract1(enode* n, enode* t, expr* x, abstractions& result) {
|
||||
expr_safe_replace rep(m);
|
||||
rep.insert(t->get_owner(), x);
|
||||
expr_ref e(n->get_owner(), m);
|
||||
|
@ -187,7 +187,7 @@ void create_induction_lemmas::abstract1(enode* n, enode* t, expr* x, abstraction
|
|||
* TBD: add term sharing throttle.
|
||||
* TDD: add depth throttle.
|
||||
*/
|
||||
void create_induction_lemmas::abstract(enode* n, enode* t, expr* x, abstractions& result) {
|
||||
void induction_lemmas::abstract(enode* n, enode* t, expr* x, abstractions& result) {
|
||||
std::cout << "abs: " << result.size() << ": " << mk_pp(n->get_owner(), m) << "\n";
|
||||
if (n->get_root() == t->get_root()) {
|
||||
result.push_back(abstraction(m, x, n->get_owner(), t->get_owner()));
|
||||
|
@ -227,7 +227,7 @@ void create_induction_lemmas::abstract(enode* n, enode* t, expr* x, abstractions
|
|||
* If all evaluations are counter-examples, include
|
||||
* candidate generalization.
|
||||
*/
|
||||
void create_induction_lemmas::filter_abstractions(bool sign, abstractions& abs) {
|
||||
void induction_lemmas::filter_abstractions(bool sign, abstractions& abs) {
|
||||
vector<expr_ref_vector> values;
|
||||
expr_ref_vector fmls(m);
|
||||
for (auto & a : abs) fmls.push_back(a.m_term);
|
||||
|
@ -251,6 +251,39 @@ void create_induction_lemmas::filter_abstractions(bool sign, abstractions& abs)
|
|||
abs.shrink(j);
|
||||
}
|
||||
|
||||
/**
|
||||
extract substitutions for x into accessor values of the same sort.
|
||||
collect side-conditions for the accessors to be well defined.
|
||||
apply a depth-bounded unfolding of datatype constructors to collect
|
||||
accessor values beyond a first level and for nested (mutually recursive)
|
||||
datatypes.
|
||||
*/
|
||||
void induction_lemmas::mk_hypothesis_substs(unsigned depth, expr* x, cond_substs_t& subst) {
|
||||
expr_ref_vector conds(m);
|
||||
mk_hypothesis_substs_rec(depth, m.get_sort(x), x, conds, subst);
|
||||
}
|
||||
|
||||
void induction_lemmas::mk_hypothesis_substs_rec(unsigned depth, sort* s, expr* y, expr_ref_vector& conds, cond_substs_t& subst) {
|
||||
sort* ys = m.get_sort(y);
|
||||
for (func_decl* c : *m_dt.get_datatype_constructors(ys)) {
|
||||
func_decl* is_c = m_dt.get_constructor_recognizer(c);
|
||||
conds.push_back(m.mk_app(is_c, y));
|
||||
for (func_decl* acc : *m_dt.get_constructor_accessors(c)) {
|
||||
sort* rs = acc->get_range();
|
||||
if (!m_dt.is_datatype(rs) || !m_dt.is_recursive(rs))
|
||||
continue;
|
||||
expr_ref acc_y(m.mk_app(acc, y), m);
|
||||
if (rs == s) {
|
||||
subst.push_back(std::make_pair(conds, acc_y));
|
||||
}
|
||||
if (depth > 1) {
|
||||
mk_hypothesis_substs_rec(depth - 1, s, acc_y, conds, subst);
|
||||
}
|
||||
}
|
||||
conds.pop_back();
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Create simple induction lemmas of the form:
|
||||
*
|
||||
|
@ -272,51 +305,81 @@ void create_induction_lemmas::filter_abstractions(bool sign, abstractions& abs)
|
|||
* the instance of s is true. In the limit one can
|
||||
* set beta to all instantiations of smaller values than sk.
|
||||
*
|
||||
* create_hypotheses creates induction hypotheses.
|
||||
*/
|
||||
|
||||
void create_induction_lemmas::create_hypotheses(
|
||||
unsigned depth, expr* sk0, expr_ref& alpha, expr* sk, literal_vector& lits) {
|
||||
if (depth == 0)
|
||||
return;
|
||||
sort* s = m.get_sort(sk);
|
||||
for (func_decl* c : *m_dt.get_datatype_constructors(s)) {
|
||||
func_decl* is_c = m_dt.get_constructor_recognizer(c);
|
||||
for (func_decl* acc : *m_dt.get_constructor_accessors(c)) {
|
||||
sort* rs = acc->get_range();
|
||||
if (rs != s && !m_dt.is_datatype(rs))
|
||||
continue;
|
||||
if (rs != s && !m_dt.is_recursive(rs))
|
||||
continue;
|
||||
void induction_lemmas::mk_hypothesis_lemma(expr_ref_vector const& conds, expr_pair_vector const& subst, literal alpha) {
|
||||
expr* alpha_e = ctx.bool_var2expr(alpha.var());
|
||||
expr_ref beta(alpha_e, m);
|
||||
expr_safe_replace rep(m);
|
||||
for (auto const& p : subst) {
|
||||
rep.insert(p.first, p.second);
|
||||
}
|
||||
rep(beta); // set beta := alpha[sk/acc(acc2(sk))]
|
||||
literal b_lit = mk_literal(beta);
|
||||
if (alpha.sign()) b_lit.neg();
|
||||
literal_vector lits;
|
||||
lits.push_back(~alpha);
|
||||
for (expr* c : conds) lits.push_back(~mk_literal(c));
|
||||
lits.push_back(b_lit);
|
||||
add_th_lemma(lits);
|
||||
}
|
||||
|
||||
lits.push_back(~mk_literal(m.mk_app(is_c, sk)));
|
||||
|
||||
// alpha & is_c(sk) & is_c'(acc(sk)) => ~alpha[acc'(acc(sk)]
|
||||
if (rs != s && depth > 1) {
|
||||
app_ref acc_sk(m.mk_app(acc, sk), m);
|
||||
create_hypotheses(depth - 1, sk0, alpha, acc_sk, lits);
|
||||
}
|
||||
else {
|
||||
expr_ref beta(alpha); // set beta := alpha[sk0/acc(sk)]
|
||||
expr_safe_replace rep(m);
|
||||
rep.insert(sk0, m.mk_app(acc, sk));
|
||||
rep(beta);
|
||||
literal b_lit = mk_literal(beta);
|
||||
|
||||
if (lits[0].sign()) b_lit.neg(); // maintain the same sign as alpha
|
||||
|
||||
// alpha & is_c(sk) => ~beta
|
||||
lits.push_back(~b_lit);
|
||||
literal_vector _lits(lits); // mk_clause could make destructive updates
|
||||
add_th_lemma(_lits);
|
||||
lits.pop_back();
|
||||
}
|
||||
lits.pop_back();
|
||||
}
|
||||
void induction_lemmas::create_hypotheses(unsigned depth, expr* sk, literal alpha) {
|
||||
expr_ref_vector conds(m);
|
||||
cond_substs_t subst;
|
||||
expr* alpha_e = ctx.bool_var2expr(alpha.var());
|
||||
mk_hypothesis_substs(depth, sk, subst);
|
||||
for (auto& p : subst) {
|
||||
expr_pair_vector vec;
|
||||
vec.push_back(std::make_pair(sk, p.second));
|
||||
mk_hypothesis_lemma(p.first, vec, alpha);
|
||||
}
|
||||
}
|
||||
|
||||
void create_induction_lemmas::create_lemmas(expr* sk, abstraction& a, literal lit) {
|
||||
#if 0
|
||||
void induction_lemmas::create_hypotheses(unsigned depth, expr_ref_vector const& sks, literal alpha) {
|
||||
if (sks.empty())
|
||||
return;
|
||||
|
||||
// extract hypothesis substitutions
|
||||
vector<std::pair<expr*, cond_substs_t>> substs;
|
||||
for (expr* sk : sks) {
|
||||
cond_substs_t subst;
|
||||
mk_hypothesis_substs(depth, sk, subst);
|
||||
|
||||
// append the identity substitution:
|
||||
expr_ref_vector conds(m);
|
||||
subst.push_back(std::make_pair(conds, expr_ref(sk, m)));
|
||||
|
||||
substs.push_back(std::make_pair(sk, subst));
|
||||
}
|
||||
|
||||
// create cross-product of instantiations:
|
||||
vector<std::pair<expr_ref_vector, expr_pair_vector>> s1, s2;
|
||||
si.push_back(std::make_pair(expr_ref_vector(m), expr_pair_vector()));
|
||||
for (auto const& x2cond_sub : substs) {
|
||||
for (auto const& cond_sub : x2cond_sub.second) {
|
||||
s2.reset();
|
||||
for (auto const& cond_subs : s1) {
|
||||
expr_pair_vector pairs(cond_subs.second);
|
||||
expr_ref_vector conds(cond_subs.first);
|
||||
pairs.push_back(x2cond_sub.first, cond_sub.second);
|
||||
conds.append(cond_sub.first);
|
||||
s2.push_back(std::make_pair(conds, pairs));
|
||||
}
|
||||
s1.swap(s2);
|
||||
}
|
||||
}
|
||||
s1.pop_back(); // last substitution is the identity.
|
||||
|
||||
// extract lemmas from instantiations
|
||||
for (auto& p : s1) {
|
||||
mk_hypothesis_lemam(p.first, p.second, alpha);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
void induction_lemmas::create_lemmas(expr* sk, abstraction& a, literal lit) {
|
||||
std::cout << "abstraction: " << a.m_term << "\n";
|
||||
sort* s = m.get_sort(sk);
|
||||
if (!m_dt.is_datatype(s))
|
||||
|
@ -325,11 +388,9 @@ void create_induction_lemmas::create_lemmas(expr* sk, abstraction& a, literal li
|
|||
literal alpha_lit = mk_literal(alpha);
|
||||
if (lit.sign()) alpha_lit.neg();
|
||||
|
||||
literal_vector lits;
|
||||
lits.push_back(~alpha_lit);
|
||||
create_hypotheses(1, sk, alpha, sk, lits);
|
||||
lits.pop_back();
|
||||
create_hypotheses(1, sk, alpha_lit);
|
||||
|
||||
literal_vector lits;
|
||||
// phi & eqs => alpha
|
||||
lits.push_back(~lit);
|
||||
for (auto const& p : a.m_eqs) {
|
||||
|
@ -339,14 +400,14 @@ void create_induction_lemmas::create_lemmas(expr* sk, abstraction& a, literal li
|
|||
add_th_lemma(lits);
|
||||
}
|
||||
|
||||
void create_induction_lemmas::add_th_lemma(literal_vector const& lits) {
|
||||
void induction_lemmas::add_th_lemma(literal_vector const& lits) {
|
||||
IF_VERBOSE(0, ctx.display_literals_verbose(verbose_stream() << "lemma:\n", lits) << "\n");
|
||||
ctx.mk_clause(lits.size(), lits.c_ptr(), nullptr, smt::CLS_TH_AXIOM);
|
||||
// CLS_TH_LEMMA, but then should re-instance if GC'ed
|
||||
++m_num_lemmas;
|
||||
}
|
||||
|
||||
literal create_induction_lemmas::mk_literal(expr* e) {
|
||||
literal induction_lemmas::mk_literal(expr* e) {
|
||||
expr_ref _e(e, m);
|
||||
if (!ctx.e_internalized(e)) {
|
||||
ctx.internalize(e, false);
|
||||
|
@ -356,7 +417,7 @@ literal create_induction_lemmas::mk_literal(expr* e) {
|
|||
return ctx.get_literal(e);
|
||||
}
|
||||
|
||||
bool create_induction_lemmas::operator()(literal lit) {
|
||||
bool induction_lemmas::operator()(literal lit) {
|
||||
unsigned num = m_num_lemmas;
|
||||
enode* r = ctx.bool_var2enode(lit.var());
|
||||
unsigned position = 0;
|
||||
|
@ -367,7 +428,6 @@ bool create_induction_lemmas::operator()(literal lit) {
|
|||
std::cout << "abstract " << mk_pp(t, m) << " " << sk << "\n";
|
||||
abstractions abs;
|
||||
abstract1(r, n, sk, abs);
|
||||
// if (ab.size() > 1) abs.pop_back(); // last position has no generalizations
|
||||
if (abs.size() > 1) filter_abstractions(lit.sign(), abs);
|
||||
for (abstraction& a : abs) {
|
||||
create_lemmas(sk, a, lit);
|
||||
|
@ -377,7 +437,7 @@ bool create_induction_lemmas::operator()(literal lit) {
|
|||
return m_num_lemmas > num;
|
||||
}
|
||||
|
||||
create_induction_lemmas::create_induction_lemmas(context& ctx, ast_manager& m, value_sweep& vs):
|
||||
induction_lemmas::induction_lemmas(context& ctx, ast_manager& m, value_sweep& vs):
|
||||
ctx(ctx),
|
||||
m(m),
|
||||
vs(vs),
|
||||
|
|
|
@ -48,7 +48,7 @@ namespace smt {
|
|||
/**
|
||||
* Synthesize induction lemmas from induction candidates
|
||||
*/
|
||||
class create_induction_lemmas {
|
||||
class induction_lemmas {
|
||||
context& ctx;
|
||||
ast_manager& m;
|
||||
value_sweep& vs;
|
||||
|
@ -82,6 +82,8 @@ namespace smt {
|
|||
}
|
||||
};
|
||||
typedef vector<abstraction_arg> abstraction_args;
|
||||
typedef std::pair<expr_ref_vector, expr_ref> cond_subst_t;
|
||||
typedef vector<cond_subst_t> cond_substs_t;
|
||||
|
||||
bool viable_induction_sort(sort* s);
|
||||
bool viable_induction_parent(enode* p, enode* n);
|
||||
|
@ -92,12 +94,15 @@ namespace smt {
|
|||
void abstract1(enode* n, enode* t, expr* x, abstractions& result);
|
||||
void filter_abstractions(bool sign, abstractions& abs);
|
||||
void create_lemmas(expr* sk, abstraction& a, literal lit);
|
||||
void create_hypotheses(unsigned depth, expr* sk0, expr_ref& alpha, expr* sk, literal_vector& lits);
|
||||
void mk_hypothesis_substs(unsigned depth, expr* x, cond_substs_t& subst);
|
||||
void mk_hypothesis_substs_rec(unsigned depth, sort* s, expr* y, expr_ref_vector& conds, cond_substs_t& subst);
|
||||
void mk_hypothesis_lemma(expr_ref_vector const& conds, expr_pair_vector const& subst, literal alpha);
|
||||
void create_hypotheses(unsigned depth, expr* sk, literal alpha);
|
||||
literal mk_literal(expr* e);
|
||||
void add_th_lemma(literal_vector const& lits);
|
||||
|
||||
public:
|
||||
create_induction_lemmas(context& ctx, ast_manager& m, value_sweep& vs);
|
||||
induction_lemmas(context& ctx, ast_manager& m, value_sweep& vs);
|
||||
|
||||
bool operator()(literal lit);
|
||||
};
|
||||
|
@ -111,7 +116,7 @@ namespace smt {
|
|||
ast_manager& m;
|
||||
value_sweep vs;
|
||||
collect_induction_literals m_collect_literals;
|
||||
create_induction_lemmas m_create_lemmas;
|
||||
induction_lemmas m_create_lemmas;
|
||||
|
||||
void init_values();
|
||||
|
||||
|
|
Loading…
Reference in a new issue