1 /*++ 2 Copyright (c) 2018 Microsoft Corporation, Simon Cruanes 3 4 Module Name: 5 6 theory_recfun.cpp 7 8 Abstract: 9 10 Theory responsible for unrolling recursive functions 11 12 Author: 13 14 Simon Cruanes December 2017 15 16 Revision History: 17 18 --*/ 19 20 #include "util/stats.h" 21 #include "ast/ast_util.h" 22 #include "ast/ast_ll_pp.h" 23 #include "ast/for_each_expr.h" 24 #include "smt/theory_recfun.h" 25 26 27 #define TRACEFN(x) TRACE("recfun", tout << x << '\n';) 28 29 namespace smt { 30 theory_recfun(context & ctx)31 theory_recfun::theory_recfun(context& ctx) 32 : theory(ctx, ctx.get_manager().mk_family_id("recfun")), 33 m_plugin(*reinterpret_cast<recfun::decl::plugin*>(m.get_plugin(get_family_id()))), 34 m_util(m_plugin.u()), 35 m_disabled_guards(m), 36 m_enabled_guards(m), 37 m_preds(m) { 38 } 39 ~theory_recfun()40 theory_recfun::~theory_recfun() { 41 reset_eh(); 42 } 43 get_name() const44 char const * theory_recfun::get_name() const { return "recfun"; } 45 mk_fresh(context * new_ctx)46 theory* theory_recfun::mk_fresh(context* new_ctx) { 47 return alloc(theory_recfun, *new_ctx); 48 } 49 internalize_atom(app * atom,bool gate_ctx)50 bool theory_recfun::internalize_atom(app * atom, bool gate_ctx) { 51 if (!u().has_defs()) { 52 return false; 53 } 54 for (expr * arg : *atom) { 55 ctx.internalize(arg, false); 56 } 57 if (!ctx.e_internalized(atom)) { 58 ctx.mk_enode(atom, false, true, false); 59 } 60 if (!ctx.b_internalized(atom)) { 61 bool_var v = ctx.mk_bool_var(atom); 62 ctx.set_var_theory(v, get_id()); 63 } 64 if (!ctx.relevancy() && u().is_defined(atom)) { 65 push_case_expand(atom); 66 } 67 return true; 68 } 69 internalize_term(app * term)70 bool theory_recfun::internalize_term(app * term) { 71 if (!u().has_defs()) { 72 return false; 73 } 74 for (expr* e : *term) { 75 ctx.internalize(e, false); 76 } 77 if (!ctx.e_internalized(term)) { 78 ctx.mk_enode(term, false, false, true); 79 } 80 if (!ctx.relevancy() && u().is_defined(term)) { 81 push_case_expand(term); 82 } 83 return true; 84 } 85 86 reset_eh()87 void theory_recfun::reset_eh() { 88 m_stats.reset(); 89 theory::reset_eh(); 90 m_disabled_guards.reset(); 91 m_enabled_guards.reset(); 92 for (auto & kv : m_guard2pending) 93 dealloc(kv.m_value); 94 m_guard2pending.reset(); 95 } 96 97 /* 98 * when `n` becomes relevant, if it's `f(t1...tn)` with `f` defined, 99 * then case-expand `n`. If it's a macro we can also immediately 100 * body-expand it. 101 */ relevant_eh(app * n)102 void theory_recfun::relevant_eh(app * n) { 103 SASSERT(ctx.relevancy()); 104 // TRACEFN("relevant_eh: (defined) " << u().is_defined(n) << " " << mk_pp(n, m)); 105 if (u().is_defined(n) && u().has_defs()) { 106 push_case_expand(n); 107 } 108 } 109 push_scope_eh()110 void theory_recfun::push_scope_eh() { 111 theory::push_scope_eh(); 112 m_preds_lim.push_back(m_preds.size()); 113 } 114 pop_scope_eh(unsigned num_scopes)115 void theory_recfun::pop_scope_eh(unsigned num_scopes) { 116 theory::pop_scope_eh(num_scopes); 117 118 // restore depth book-keeping 119 unsigned new_lim = m_preds_lim.size()-num_scopes; 120 #if 0 121 // depth tracking of recursive unfolding is 122 // turned off when enabling this code: 123 unsigned start = m_preds_lim[new_lim]; 124 for (unsigned i = start; i < m_preds.size(); ++i) { 125 m_pred_depth.remove(m_preds.get(i)); 126 } 127 m_preds.resize(start); 128 #endif 129 m_preds_lim.shrink(new_lim); 130 } 131 can_propagate()132 bool theory_recfun::can_propagate() { 133 return m_qhead < m_propagation_queue.size(); 134 } 135 propagate()136 void theory_recfun::propagate() { 137 if (m_qhead == m_propagation_queue.size()) 138 return; 139 ctx.push_trail(value_trail<unsigned>(m_qhead)); 140 for (; m_qhead < m_propagation_queue.size() && !ctx.inconsistent(); ++m_qhead) { 141 auto& p = *m_propagation_queue[m_qhead]; 142 if (p.is_guard()) 143 activate_guard(p.guard(), *m_guard2pending[p.guard()]); 144 else if (p.is_core()) 145 block_core(p.core()); 146 else if (p.is_case()) 147 assert_case_axioms(p.case_ex()); 148 else 149 assert_body_axiom(p.body()); 150 } 151 } 152 push(propagation_item * p)153 void theory_recfun::push(propagation_item* p) { 154 m_propagation_queue.push_back(p); 155 ctx.push_trail(push_back_vector<scoped_ptr_vector<propagation_item>>(m_propagation_queue)); 156 } 157 158 159 /** 160 * make clause `depth_limit => ~guard` 161 * the guard appears at a depth below the current cutoff. 162 */ disable_guard(expr * guard,expr_ref_vector const & guards)163 void theory_recfun::disable_guard(expr* guard, expr_ref_vector const& guards) { 164 SASSERT(!is_enabled_guard(guard)); 165 app_ref dlimit = m_util.mk_num_rounds_pred(m_num_rounds); 166 expr_ref_vector core(m); 167 core.push_back(dlimit); 168 core.push_back(guard); 169 if (!m_guard2pending.contains(guard)) { 170 m_disabled_guards.push_back(guard); 171 m_guard2pending.insert(guard, alloc(expr_ref_vector, guards)); 172 } 173 TRACEFN("add core: " << core); 174 push_core(core); 175 } 176 177 /** 178 * retrieve depth associated with predicate or expression. 179 */ get_depth(expr * e)180 unsigned theory_recfun::get_depth(expr* e) { 181 SASSERT(u().is_defined(e) || u().is_case_pred(e)); 182 unsigned d = 0; 183 m_pred_depth.find(e, d); 184 return d; 185 } 186 187 /** 188 * Update depth of subterms of e with respect to d. 189 */ set_depth_rec(unsigned d,expr * e)190 void theory_recfun::set_depth_rec(unsigned d, expr* e) { 191 struct insert_c { 192 theory_recfun& th; 193 unsigned m_depth; 194 insert_c(theory_recfun& th, unsigned d): th(th), m_depth(d) {} 195 void operator()(app* e) { th.set_depth(m_depth, e); } 196 void operator()(quantifier*) {} 197 void operator()(var*) {} 198 }; 199 insert_c proc(*this, d); 200 for_each_expr(proc, e); 201 } 202 set_depth(unsigned depth,expr * e)203 void theory_recfun::set_depth(unsigned depth, expr* e) { 204 if ((u().is_defined(e) || u().is_case_pred(e)) && !m_pred_depth.contains(e)) { 205 m_pred_depth.insert(e, depth); 206 m_preds.push_back(e); 207 } 208 } 209 210 /** 211 * if `is_true` and `v = C_f_i(t1...tn)`, 212 * then body-expand i-th case of `f(t1...tn)` 213 */ assign_eh(bool_var v,bool is_true)214 void theory_recfun::assign_eh(bool_var v, bool is_true) { 215 expr* e = ctx.bool_var2expr(v); 216 if (is_true && u().is_case_pred(e)) { 217 TRACEFN("assign_case_pred_true " << mk_pp(e, m)); 218 // body-expand 219 push_body_expand(e); 220 } 221 } 222 223 // replace `vars` by `args` in `e` apply_args(unsigned depth,recfun::vars const & vars,expr_ref_vector const & args,expr * e)224 expr_ref theory_recfun::apply_args( 225 unsigned depth, 226 recfun::vars const & vars, 227 expr_ref_vector const & args, 228 expr * e) { 229 SASSERT(is_standard_order(vars)); 230 var_subst subst(m, true); 231 expr_ref new_body = subst(e, args); 232 ctx.get_rewriter()(new_body); // simplify 233 set_depth_rec(depth + 1, new_body); 234 return new_body; 235 } 236 mk_literal(expr * e)237 literal theory_recfun::mk_literal(expr* e) { 238 bool is_not = m.is_not(e, e); 239 ctx.internalize(e, false); 240 literal lit = ctx.get_literal(e); 241 ctx.mark_as_relevant(lit); 242 if (is_not) 243 lit.neg(); 244 return lit; 245 } 246 mk_eq_lit(expr * l,expr * r)247 literal theory_recfun::mk_eq_lit(expr* l, expr* r) { 248 literal lit; 249 if (has_quantifiers(l) || has_quantifiers(r)) { 250 expr_ref eq1(m.mk_eq(l, r), m); 251 expr_ref fn(m.mk_fresh_const("rec-eq", m.mk_bool_sort()), m); 252 expr_ref eq(m.mk_eq(fn, eq1), m); 253 ctx.assert_expr(eq); 254 ctx.internalize_assertions(); 255 lit = mk_literal(fn); 256 } 257 else { 258 if (m.is_true(r) || m.is_false(r)) 259 std::swap(l, r); 260 if (m.is_true(l)) 261 lit = mk_literal(r); 262 else if (m.is_false(l)) 263 lit = ~mk_literal(r); 264 else 265 lit = mk_eq(l, r, false); 266 } 267 ctx.mark_as_relevant(lit); 268 return lit; 269 } 270 block_core(expr_ref_vector const & core)271 void theory_recfun::block_core(expr_ref_vector const& core) { 272 literal_vector clause; 273 for (expr* e : core) 274 clause.push_back(~mk_literal(e)); 275 ctx.mk_th_axiom(get_id(), clause); 276 } 277 278 /** 279 * For functions f(args) that are given as macros f(vs) = rhs 280 * 281 * 1. substitute `e.args` for `vs` into the macro rhs 282 * 2. add unit clause `f(args) = rhs` 283 */ assert_macro_axiom(recfun::case_expansion & e)284 void theory_recfun::assert_macro_axiom(recfun::case_expansion & e) { 285 m_stats.m_macro_expansions++; 286 TRACEFN("case expansion " << e); 287 SASSERT(e.m_def->is_fun_macro()); 288 auto & vars = e.m_def->get_vars(); 289 expr_ref lhs(e.m_lhs, m); 290 unsigned depth = get_depth(e.m_lhs); 291 expr_ref rhs(apply_args(depth, vars, e.m_args, e.m_def->get_rhs()), m); 292 literal lit = mk_eq_lit(lhs, rhs); 293 std::function<literal(void)> fn = [&]() { return lit; }; 294 scoped_trace_stream _tr(*this, fn); 295 ctx.mk_th_axiom(get_id(), 1, &lit); 296 TRACEFN("macro expansion yields " << pp_lit(ctx, lit)); 297 } 298 299 /** 300 * Add case axioms for every case expansion path. 301 * 302 * assert `p(args) <=> And(guards)` (with CNF on the fly) 303 * 304 * also body-expand paths that do not depend on any defined fun 305 */ assert_case_axioms(recfun::case_expansion & e)306 void theory_recfun::assert_case_axioms(recfun::case_expansion & e) { 307 308 if (e.m_def->is_fun_macro()) { 309 assert_macro_axiom(e); 310 return; 311 } 312 313 ++m_stats.m_case_expansions; 314 TRACEFN("assert_case_axioms " << e 315 << " with " << e.m_def->get_cases().size() << " cases"); 316 SASSERT(e.m_def->is_fun_defined()); 317 // add case-axioms for all case-paths 318 // assert this was not defined before. 319 literal_vector preds; 320 auto & vars = e.m_def->get_vars(); 321 322 for (recfun::case_def const & c : e.m_def->get_cases()) { 323 // applied predicate to `args` 324 app_ref pred_applied = c.apply_case_predicate(e.m_args); 325 SASSERT(u().owns_app(pred_applied)); 326 literal concl = mk_literal(pred_applied); 327 preds.push_back(concl); 328 329 unsigned depth = get_depth(e.m_lhs); 330 set_depth(depth, pred_applied); 331 expr_ref_vector guards(m); 332 for (auto & g : c.get_guards()) { 333 guards.push_back(apply_args(depth, vars, e.m_args, g)); 334 } 335 if (c.is_immediate()) { 336 recfun::body_expansion be(pred_applied, c, e.m_args); 337 assert_body_axiom(be); 338 } 339 else if (!is_enabled_guard(pred_applied)) { 340 disable_guard(pred_applied, guards); 341 continue; 342 } 343 activate_guard(pred_applied, guards); 344 } 345 346 // the disjunction of branches is asserted 347 // to close the available cases. 348 349 scoped_trace_stream _tr(*this, preds); 350 ctx.mk_th_axiom(get_id(), preds); 351 } 352 activate_guard(expr * pred_applied,expr_ref_vector const & guards)353 void theory_recfun::activate_guard(expr* pred_applied, expr_ref_vector const& guards) { 354 literal concl = mk_literal(pred_applied); 355 literal_vector lguards; 356 lguards.push_back(concl); 357 for (expr* ga : guards) { 358 literal guard = mk_literal(ga); 359 lguards.push_back(~guard); 360 scoped_trace_stream _tr1(*this, ~concl, guard); 361 ctx.mk_th_axiom(get_id(), ~concl, guard); 362 } 363 scoped_trace_stream _tr2(*this, lguards); 364 ctx.mk_th_axiom(get_id(), lguards); 365 } 366 367 /** 368 * For a guarded definition guards => f(vars) = rhs 369 * and occurrence f(args) 370 * 371 * substitute `args` for `vars` in guards, and rhs 372 * add axiom guards[args/vars] => f(args) = rhs[args/vars] 373 * 374 */ assert_body_axiom(recfun::body_expansion & e)375 void theory_recfun::assert_body_axiom(recfun::body_expansion & e) { 376 ++m_stats.m_body_expansions; 377 recfun::def & d = *e.m_cdef->get_def(); 378 auto & vars = d.get_vars(); 379 auto & args = e.m_args; 380 SASSERT(is_standard_order(vars)); 381 unsigned depth = get_depth(e.m_pred); 382 expr_ref lhs(u().mk_fun_defined(d, args), m); 383 expr_ref rhs = apply_args(depth, vars, args, e.m_cdef->get_rhs()); 384 if (has_quantifiers(rhs)) { 385 expr_ref fn(m.mk_fresh_const("rec-eq", m.mk_bool_sort()), m); 386 expr_ref eq(m.mk_eq(fn, rhs), m); 387 ctx.assert_expr(eq); 388 ctx.internalize_assertions(); 389 rhs = fn; 390 } 391 literal_vector clause; 392 for (auto & g : e.m_cdef->get_guards()) { 393 expr_ref guard = apply_args(depth, vars, args, g); 394 clause.push_back(~mk_literal(guard)); 395 if (clause.back() == true_literal) { 396 TRACEFN("body " << e << "\n" << clause << "\n" << guard); 397 return; 398 } 399 if (clause.back() == false_literal) { 400 clause.pop_back(); 401 } 402 } 403 clause.push_back(mk_eq_lit(lhs, rhs)); 404 TRACEFN(e << "\n" << pp_lits(ctx, clause)); 405 std::function<literal_vector(void)> fn = [&]() { return clause; }; 406 scoped_trace_stream _tr(*this, fn); 407 ctx.mk_th_axiom(get_id(), clause); 408 } 409 final_check_eh()410 final_check_status theory_recfun::final_check_eh() { 411 if (can_propagate()) { 412 TRACEFN("final\n"); 413 propagate(); 414 return FC_CONTINUE; 415 } 416 return FC_DONE; 417 } 418 add_theory_assumptions(expr_ref_vector & assumptions)419 void theory_recfun::add_theory_assumptions(expr_ref_vector & assumptions) { 420 if (u().has_defs() || !m_disabled_guards.empty()) { 421 app_ref dlimit = m_util.mk_num_rounds_pred(m_num_rounds); 422 TRACEFN("add_theory_assumption " << dlimit); 423 assumptions.push_back(dlimit); 424 for (expr* e : m_disabled_guards) 425 assumptions.push_back(m.mk_not(e)); 426 } 427 } 428 429 // if `dlimit` or a disabled guard occurs in unsat core, return 'true' should_research(expr_ref_vector & unsat_core)430 bool theory_recfun::should_research(expr_ref_vector & unsat_core) { 431 bool found = false; 432 expr* to_delete = nullptr; 433 unsigned n = 0; 434 unsigned current_depth = UINT_MAX; 435 for (auto * ne : unsat_core) { 436 expr* e = nullptr; 437 if (m.is_not(ne, e) && is_disabled_guard(e)) { 438 found = true; 439 unsigned depth = get_depth(e); 440 if (depth < current_depth) 441 n = 0; 442 if (depth <= current_depth && (ctx.get_random_value() % (++n)) == 0) { 443 to_delete = e; 444 current_depth = depth; 445 } 446 } 447 else if (u().is_num_rounds(ne)) 448 found = true; 449 } 450 if (found) { 451 m_num_rounds++; 452 if (!to_delete && !m_disabled_guards.empty()) 453 to_delete = m_disabled_guards.back(); 454 if (to_delete) { 455 m_disabled_guards.erase(to_delete); 456 m_enabled_guards.push_back(to_delete); 457 IF_VERBOSE(2, verbose_stream() << "(smt.recfun :enable-guard " << mk_pp(to_delete, m) << ")\n"); 458 } 459 else { 460 IF_VERBOSE(2, verbose_stream() << "(smt.recfun :increment-round " << m_num_rounds << ")\n"); 461 } 462 for (expr* g : m_enabled_guards) 463 push_guard(g); 464 } 465 TRACEFN("should research " << found); 466 return found; 467 } 468 display(std::ostream & out) const469 void theory_recfun::display(std::ostream & out) const { 470 out << "recfun\n"; 471 out << "disabled guards:\n" << m_disabled_guards << "\n"; 472 out << "enabled guards:\n" << m_enabled_guards << "\n"; 473 } 474 collect_statistics(::statistics & st) const475 void theory_recfun::collect_statistics(::statistics & st) const { 476 st.update("recfun macro expansion", m_stats.m_macro_expansions); 477 st.update("recfun case expansion", m_stats.m_case_expansions); 478 st.update("recfun body expansion", m_stats.m_body_expansions); 479 } 480 481 } 482