mirror of
https://github.com/Z3Prover/z3
synced 2025-04-13 20:38:43 +00:00
fixes in horner's heuristic
Signed-off-by: Lev Nachmanson <levnach@hotmail.com>
This commit is contained in:
parent
c95f66e02a
commit
207c1c509f
|
@ -241,7 +241,7 @@ void horner::add_linear_to_vector(const nex& e, vector<std::pair<rational, lpvar
|
|||
SASSERT(false);
|
||||
}
|
||||
}
|
||||
// e = a * can_t + b, but we do not calculate the offset here
|
||||
// e = a * can_t + b
|
||||
lp::lar_term horner::expression_to_normalized_term(nex& e, rational& a, rational& b) {
|
||||
TRACE("nla_horner_details", tout << e << "\n";);
|
||||
lpvar smallest_j;
|
||||
|
@ -253,12 +253,13 @@ lp::lar_term horner::expression_to_normalized_term(nex& e, rational& a, rational
|
|||
b += c.value();
|
||||
} else {
|
||||
add_linear_to_vector(c, v);
|
||||
if (v.size() == 1 || smallest_j < v.back().second) {
|
||||
if (v.size() == 1 || smallest_j > v.back().second) {
|
||||
smallest_j = v.back().second;
|
||||
a_index = v.size() - 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
TRACE("nla_horner_details", tout << "a_index = " << a_index << ", v="; print_vector(v, tout) << "\n";);
|
||||
a = v[a_index].first;
|
||||
lp::lar_term t;
|
||||
|
||||
|
@ -277,7 +278,8 @@ lp::lar_term horner::expression_to_normalized_term(nex& e, rational& a, rational
|
|||
}
|
||||
}
|
||||
TRACE("nla_horner_details", tout << a << "* (";
|
||||
lp::lar_solver::print_term_as_indices(t, tout) << ") + " << b << std::endl;);
|
||||
lp::lar_solver::print_term_as_indices(t, tout) << ") + " << b << std::endl;);
|
||||
SASSERT(t.is_normalized());
|
||||
return t;
|
||||
}
|
||||
|
||||
|
@ -346,7 +348,7 @@ interv horner::interval_of_sum(const nex& e) {
|
|||
TRACE("nla_horner_details", tout << "e=" << e << "\n";);
|
||||
SASSERT(e.is_sum());
|
||||
interv i_e = interval_of_sum_no_terms(e);
|
||||
if (e.sum_is_linear()) {
|
||||
if (e.sum_is_a_linear_term()) {
|
||||
interv i_from_term ;
|
||||
if (interval_from_term(e, i_from_term)
|
||||
&&
|
||||
|
|
|
@ -2376,13 +2376,17 @@ void lar_solver::register_normalized_term(const lar_term& t, lpvar j) {
|
|||
lar_term normalized_t = t.get_normalized_by_min_var();
|
||||
TRACE("lar_solver_terms", tout << "t="; print_term_as_indices(t, tout);
|
||||
tout << ", normalized_t="; print_term_as_indices(normalized_t, tout) << "\n";);
|
||||
lp_assert(m_normalized_terms_to_columns.find(normalized_t) == m_normalized_terms_to_columns.end());
|
||||
m_normalized_terms_to_columns[normalized_t] = j;
|
||||
if (m_normalized_terms_to_columns.find(normalized_t) == m_normalized_terms_to_columns.end()) {
|
||||
m_normalized_terms_to_columns[normalized_t] = j;
|
||||
} else {
|
||||
TRACE("lar_solver_terms", tout << "the term has been seen already\n";);
|
||||
}
|
||||
}
|
||||
|
||||
void lar_solver::deregister_normalized_term(const lar_term& t) {
|
||||
TRACE("lar_solver_terms", tout << "deregister term ";
|
||||
print_term_as_indices(t, tout) << "\n";);
|
||||
lar_term normalized_t = t.get_normalized_by_min_var();
|
||||
lp_assert(m_normalized_terms_to_columns.find(normalized_t) != m_normalized_terms_to_columns.end());
|
||||
m_normalized_terms_to_columns.erase(normalized_t);
|
||||
}
|
||||
|
||||
|
@ -2395,9 +2399,15 @@ void lar_solver::register_existing_terms() {
|
|||
}
|
||||
|
||||
unsigned lar_solver::fetch_normalized_term_column(const lar_term& c) const {
|
||||
TRACE("lar_solver_terms", tout << "looking for term ";
|
||||
print_term_as_indices(c, tout) << "\n";);
|
||||
lp_assert(c.is_normalized());
|
||||
auto it = m_normalized_terms_to_columns.find(c);
|
||||
if (it != m_normalized_terms_to_columns.end())
|
||||
if (it != m_normalized_terms_to_columns.end()) {
|
||||
TRACE("lar_solver_terms", tout << "got " << it->second << "\n" ;);
|
||||
return it->second;
|
||||
}
|
||||
TRACE("lar_solver_terms", tout << "have not found\n";);
|
||||
return -1;
|
||||
}
|
||||
} // namespace lp
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
namespace lp {
|
||||
class lar_term {
|
||||
// the term evaluates to sum of m_coeffs
|
||||
typedef unsigned lpvar;
|
||||
u_map<mpq> m_coeffs;
|
||||
public:
|
||||
lar_term() {}
|
||||
|
@ -149,8 +150,41 @@ public:
|
|||
bool operator!=(const self_type &other) const { return !(*this == other); }
|
||||
};
|
||||
|
||||
|
||||
const_iterator begin() const { return m_coeffs.begin();}
|
||||
const_iterator end() const { return m_coeffs.end(); }
|
||||
bool is_normalized() const {
|
||||
lpvar min_var = -1;
|
||||
mpq c;
|
||||
for (const auto & p : *this) {
|
||||
if (p.var() < min_var) {
|
||||
min_var = p.var();
|
||||
}
|
||||
}
|
||||
lar_term r;
|
||||
for (const auto & p : *this) {
|
||||
if (p.var() == min_var) {
|
||||
return p.coeff().is_one();
|
||||
}
|
||||
}
|
||||
lp_unreachable();
|
||||
return false;
|
||||
}
|
||||
|
||||
// a is the coefficient by which we diveded the term to normalize it
|
||||
lar_term get_normalized_by_min_var(mpq& a) const {
|
||||
a = m_coeffs.begin()->second;
|
||||
if (a.is_one()) {
|
||||
return *this;
|
||||
}
|
||||
lar_term r;
|
||||
auto it = m_coeffs.begin();
|
||||
r.add_var(it->first);
|
||||
it++;
|
||||
for(;it != m_coeffs.end(); it++) {
|
||||
r.add_coeff_var(it->second / a, it->first);
|
||||
}
|
||||
return r;
|
||||
}
|
||||
const_iterator begin() const { return const_iterator(m_coeffs.begin());}
|
||||
const_iterator end() const { return const_iterator(m_coeffs.end()); }
|
||||
|
||||
};
|
||||
}
|
||||
|
|
|
@ -102,9 +102,7 @@ std::ostream& print_linear_combination_customized(const vector<std::pair<T, unsi
|
|||
val = -val;
|
||||
}
|
||||
}
|
||||
if (val == 1)
|
||||
out << " ";
|
||||
else {
|
||||
if (val != 1) {
|
||||
out << T_to_string(val);
|
||||
}
|
||||
out << var_str(it.second);
|
||||
|
|
|
@ -955,7 +955,7 @@ void core::init_search() {
|
|||
}
|
||||
|
||||
void core::init_to_refine() {
|
||||
TRACE("nla_solver", tout << "emons:" << pp_emons(*this, m_emons););
|
||||
TRACE("nla_solver_details", tout << "emons:" << pp_emons(*this, m_emons););
|
||||
m_to_refine.clear();
|
||||
m_to_refine.resize(m_lar_solver.number_of_vars());
|
||||
unsigned r = random(), sz = m_emons.number_of_monomials();
|
||||
|
|
|
@ -376,17 +376,20 @@ public:
|
|||
return *this;
|
||||
}
|
||||
|
||||
bool sum_is_linear() const {
|
||||
bool sum_is_a_linear_term() const {
|
||||
SASSERT(is_sum());
|
||||
int degree = 0;
|
||||
unsigned number_of_non_scalars = 0;
|
||||
for (auto & e : children()) {
|
||||
int d = e.get_degree();
|
||||
if (d > 1)
|
||||
return false;
|
||||
if (d > degree)
|
||||
degree = d;
|
||||
if (!e.is_scalar())
|
||||
number_of_non_scalars++;
|
||||
}
|
||||
return degree == 1;
|
||||
return degree == 1 && number_of_non_scalars > 1;
|
||||
}
|
||||
|
||||
int get_degree() const {
|
||||
|
|
Loading…
Reference in a new issue