1 /* Boolector: Satisfiability Modulo Theories (SMT) solver.
2 *
3 * Copyright (C) 2007-2021 by the authors listed in the AUTHORS file.
4 *
5 * This file is part of Boolector.
6 * See COPYING for more information on using this software.
7 */
8
9 #include "btorbeta.h"
10 #include "btorbv.h"
11 #include "btorcore.h"
12 #include "btordbg.h"
13 #include "btorexp.h"
14 #include "btorlog.h"
15 #include "utils/btorhashint.h"
16 #include "utils/btorhashptr.h"
17 #include "utils/btormem.h"
18 #include "utils/btornodeiter.h"
19 #include "utils/btorutil.h"
20
21 #include "btorrewrite.h"
22
23 #include <assert.h>
24
25 // TODO: mul: power of 2 optimizations
26
27 /* TODO: lots of word level simplifications:
28 * a[7:4] == b[7:4] && a[3:0] == b[3:0] <=> a == b
29 * {a,b} == {c,d} with |a|=|c| <=> a == c && b == d
30 * ...
31 */
32 /* TODO a + 2 * a <=> 3 * a <=> and see below */
33 /* TODO strength reduction: a * 2 == a << 1 (really ?) */
34 /* TODO strength reduction: a * 3 == (a << 1) + a (really ?) */
35 /* TODO strength reduction: a / 2 == (a >> 1) (yes!) */
36 /* TODO strength reduction: a / 3 => higher bits zero (check!) */
37 /* TODO MAX-1 < a <=> a == MAX */
38
39 /* TODO (x < ~x) <=> !msb(x) */
40 /* TODO (~x < x) <=> msb(x) */
41
42 /* TODO to support GAUSS bubble up odd terms:
43 * (2 * a + 3 * y) + 4 * x => 3 * y + (2 * a + 4 * x)
44 * or alternatively normalize arithmetic terms/polynomials
45 * or simply always replace by equation.
46 */
47
48 /* TODO simplify (c * x + 2 * y) + x == 5 at GAUSS application
49 * by first (c + 1) * x + 2 * y == 5 and then check whether 'c'
50 * is even.
51 */
52
53 /* TODO Howto handle 2 * x == 4 && 4 * x + 8 * y == 0 ?
54 * Maybe: x[30:0] == 2 && 4 * {x[31],2[30:0]} + 8 * y == 0?
55 * Then: x[30:0] == 2 && 8[31:0] + 8 *y == 0?
56 * Then: x[30:0] = 2 && 8 * y = -8
57 * Finally: x[30:0] = 2 && y[29:0] = -1
58 * etc.
59 */
60
61 /* recursive rewriting bound */
62 #define BTOR_REC_RW_BOUND (1 << 12)
63
64 /* iterative rewriting bounds */
65 #define BTOR_WRITE_CHAIN_NODE_RW_BOUND (1 << 5)
66 #define BTOR_READ_OVER_WRITE_DOWN_PROPAGATION_LIMIT (1 << 11)
67 #define BTOR_APPLY_PROPAGATION_LIMIT (1 << 13)
68
69 /* other rewriting bounds */
70 #define BTOR_FIND_AND_NODE_CONTRADICTION_LIMIT (1 << 4)
71
72 #define BTOR_INC_REC_RW_CALL(btor) \
73 do \
74 { \
75 (btor)->rec_rw_calls++; \
76 if ((btor)->rec_rw_calls > (btor)->stats.max_rec_rw_calls) \
77 (btor)->stats.max_rec_rw_calls = (btor)->rec_rw_calls; \
78 } while (0)
79
80 #define BTOR_DEC_REC_RW_CALL(btor) \
81 do \
82 { \
83 assert ((btor)->rec_rw_calls > 0); \
84 (btor)->rec_rw_calls--; \
85 } while (0)
86
87 // TODO: special_const_binary rewriting may return 0, hence the check if
88 // (result), may be obsolete if special_const_binary will be split
89 #ifndef NDEBUG
90 #define ADD_RW_RULE(rw_rule, ...) \
91 if (applies_##rw_rule (btor, __VA_ARGS__)) \
92 { \
93 assert (!result); \
94 result = apply_##rw_rule (btor, __VA_ARGS__); \
95 if (result) \
96 { \
97 if (btor->stats.rw_rules_applied) \
98 { \
99 BtorPtrHashBucket *b = \
100 btor_hashptr_table_get (btor->stats.rw_rules_applied, #rw_rule); \
101 if (!b) \
102 b = btor_hashptr_table_add (btor->stats.rw_rules_applied, #rw_rule); \
103 b->data.as_int += 1; \
104 } \
105 goto DONE; \
106 } \
107 }
108 #else
109 #define ADD_RW_RULE(rw_rule, ...) \
110 if (applies_##rw_rule (btor, __VA_ARGS__)) \
111 { \
112 assert (!result); \
113 result = apply_##rw_rule (btor, __VA_ARGS__); \
114 if (result) goto DONE; \
115 }
116 #endif
117 //{fprintf (stderr, "apply: %s (%s)\n", #rw_rule, __FUNCTION__);
118
119 /* -------------------------------------------------------------------------- */
120 /* rewrite cache */
121
122 static BtorNode *
check_rw_cache(Btor * btor,BtorNodeKind kind,int32_t id0,int32_t id1,int32_t id2)123 check_rw_cache (
124 Btor *btor, BtorNodeKind kind, int32_t id0, int32_t id1, int32_t id2)
125 {
126 BtorNode *result = 0;
127
128 int32_t cached_result_id =
129 btor_rw_cache_get (btor->rw_cache, kind, id0, id1, id2);
130 if (cached_result_id)
131 {
132 result = btor_node_get_by_id (btor, cached_result_id);
133 if (result)
134 {
135 btor->rw_cache->num_get++;
136 result = btor_node_copy (btor, btor_node_get_simplified (btor, result));
137 }
138 }
139 return result;
140 }
141
142 /* -------------------------------------------------------------------------- */
143 /* util functions */
144
145 static bool
is_const_zero_exp(Btor * btor,BtorNode * exp)146 is_const_zero_exp (Btor *btor, BtorNode *exp)
147 {
148 assert (btor);
149 assert (exp);
150
151 bool result;
152
153 exp = btor_simplify_exp (btor, exp);
154
155 if (!btor_node_is_bv_const (exp)) return false;
156
157 if (btor_node_is_inverted (exp))
158 result = btor_bv_is_ones (btor_node_bv_const_get_bits (exp));
159 else
160 result = btor_bv_is_zero (btor_node_bv_const_get_bits (exp));
161
162 return result;
163 }
164
165 #if 0
166 static bool
167 is_const_ones_exp (Btor * btor, BtorNode * exp)
168 {
169 assert (btor);
170 assert (exp);
171
172 bool result;
173
174 exp = btor_simplify_exp (btor, exp);
175
176 if (!btor_node_is_bv_const (exp))
177 return false;
178
179 if (btor_node_is_inverted (exp))
180 result = btor_is_zero_const (btor_node_bv_const_get_bits (exp));
181 else
182 result = btor_is_ones_const (btor_node_bv_const_get_bits (exp));
183
184 return result;
185 }
186 #endif
187
188 static bool
is_bv_const_zero_or_ones_exp(Btor * btor,BtorNode * exp)189 is_bv_const_zero_or_ones_exp (Btor *btor, BtorNode *exp)
190 {
191 assert (btor);
192 assert (exp);
193
194 bool result;
195 BtorBitVector *bits;
196
197 exp = btor_simplify_exp (btor, exp);
198
199 if (!btor_node_is_bv_const (exp)) return false;
200
201 bits = btor_node_bv_const_get_bits (exp);
202 result = btor_bv_is_zero (bits) || btor_bv_is_ones (bits);
203
204 return result;
205 }
206
207 static bool
is_odd_bv_const_exp(BtorNode * exp)208 is_odd_bv_const_exp (BtorNode *exp)
209 {
210 BtorBitVector *bits;
211
212 if (!btor_node_is_bv_const (exp)) return false;
213 if (btor_node_is_inverted (exp)) return false;
214
215 bits = btor_node_bv_const_get_bits (exp);
216 return btor_bv_get_bit (bits, 0) == 1;
217 }
218
219 static bool
is_xor_exp(Btor * btor,BtorNode * exp)220 is_xor_exp (Btor *btor, BtorNode *exp)
221 {
222 assert (btor);
223 assert (exp);
224
225 BtorNode *e0, *e1, *e0_0, *e0_1, *e1_0, *e1_1;
226
227 exp = btor_simplify_exp (btor, exp);
228 (void) btor;
229
230 if (btor_node_real_addr (exp)->kind != BTOR_BV_AND_NODE) return false;
231
232 e0 = btor_node_real_addr (exp)->e[0];
233 if (!(btor_node_is_inverted (e0)
234 && btor_node_real_addr (e0)->kind == BTOR_BV_AND_NODE))
235 return false;
236
237 e1 = btor_node_real_addr (exp)->e[1];
238 if (!(btor_node_is_inverted (e1)
239 && btor_node_real_addr (e1)->kind == BTOR_BV_AND_NODE))
240 return false;
241
242 e0_0 = btor_node_real_addr (e0)->e[0];
243 e0_1 = btor_node_real_addr (e0)->e[1];
244 e1_0 = btor_node_real_addr (e1)->e[0];
245 e1_1 = btor_node_real_addr (e1)->e[1];
246
247 /* we assume that the children of commutative operators are sorted by id */
248 /* are children of e0 the same children as of e1 (ignoring sign) ? */
249 /* if not we terminate with false */
250 if (btor_node_real_addr (e0_0) != btor_node_real_addr (e1_0)) return false;
251 if (btor_node_real_addr (e0_1) != btor_node_real_addr (e1_1)) return false;
252
253 /* we check for two cases */
254 /* first case: !(!a && !b) && !(a && b) */
255 if (!btor_node_is_inverted (exp))
256 {
257 if (btor_node_is_inverted (e0_0) == btor_node_is_inverted (e0_1)
258 && btor_node_is_inverted (e1_0) == btor_node_is_inverted (e1_1)
259 && btor_node_is_inverted (e0_0) != btor_node_is_inverted (e1_0))
260 return true;
261 }
262 /* second case: !((!a && b) && !(a && !b)) */
263 else
264 {
265 if (btor_node_is_inverted (e0_0) != btor_node_is_inverted (e1_0)
266 && btor_node_is_inverted (e0_1) != btor_node_is_inverted (e1_1)
267 && btor_node_is_inverted (e0_0) != btor_node_is_inverted (e0_1))
268 return true;
269 }
270 return false;
271 }
272
273 static bool
is_xnor_exp(Btor * btor,BtorNode * exp)274 is_xnor_exp (Btor *btor, BtorNode *exp)
275 {
276 assert (btor);
277 assert (exp);
278 exp = btor_simplify_exp (btor, exp);
279 return is_xor_exp (btor, btor_node_invert (exp));
280 }
281
282 static bool
slice_simplifiable(BtorNode * exp)283 slice_simplifiable (BtorNode *exp)
284 {
285 exp = btor_node_real_addr (exp);
286 return btor_node_is_bv_var (exp) || btor_node_is_bv_const (exp)
287 || btor_node_is_bv_slice (exp);
288 }
289
290 static bool
is_always_unequal(Btor * btor,BtorNode * e0,BtorNode * e1)291 is_always_unequal (Btor *btor, BtorNode *e0, BtorNode *e1)
292 {
293 BtorNode *real_e0, *real_e1;
294 BtorNode *e0_const = 0, *e0_node = 0, *e1_const = 0, *e1_node = 0;
295
296 e0 = btor_simplify_exp (btor, e0);
297 e1 = btor_simplify_exp (btor, e1);
298 assert (btor);
299 assert (e0);
300 assert (e1);
301 /* we need this so that a + 0 is rewritten to a,
302 * and constants are normalized (all inverted constants are odd) */
303 assert (btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 0);
304
305 real_e0 = btor_node_real_addr (e0);
306 real_e1 = btor_node_real_addr (e1);
307
308 if (!real_e0 || !real_e1) return 0;
309
310 if (btor_node_is_fun (real_e0))
311 {
312 assert (btor_node_is_fun (real_e1));
313 return false;
314 }
315
316 assert (!btor_node_is_fun (real_e1));
317
318 if (e0 == btor_node_invert (e1)) return true;
319
320 if (btor_node_is_bv_const (real_e0) && btor_node_is_bv_const (real_e1)
321 && e0 != e1)
322 return true;
323
324 if (btor_node_is_bv_add (real_e0))
325 {
326 if (btor_node_is_bv_const (real_e0->e[0]))
327 {
328 e0_const = real_e0->e[0];
329 e0_node = real_e0->e[1];
330 }
331 else if (btor_node_is_bv_const (real_e0->e[1]))
332 {
333 e0_const = real_e0->e[1];
334 e0_node = real_e0->e[0];
335 }
336
337 if (e0_const && !is_const_zero_exp (btor, e0_const)
338 && btor_node_cond_invert (e0, e0_node) == e1)
339 return true;
340 }
341
342 if (btor_node_is_bv_add (real_e1))
343 {
344 if (btor_node_is_bv_const (real_e1->e[0]))
345 {
346 e1_const = real_e1->e[0];
347 e1_node = real_e1->e[1];
348 }
349 else if (btor_node_is_bv_const (real_e1->e[1]))
350 {
351 e1_const = real_e1->e[1];
352 e1_node = real_e1->e[0];
353 }
354
355 if (e1_const && !is_const_zero_exp (btor, e1_const)
356 && btor_node_cond_invert (e1, e1_node) == e1)
357 return true;
358 }
359
360 if (e0_const && e1_const
361 && btor_node_is_inverted (e0) == btor_node_is_inverted (e1))
362 {
363 return e0_node == e1_node && e0_const != e1_const;
364 }
365
366 return false;
367 }
368
369 static int32_t
cmp_node_id(const void * p,const void * q)370 cmp_node_id (const void *p, const void *q)
371 {
372 BtorNode *a = *(BtorNode **) p;
373 BtorNode *b = *(BtorNode **) q;
374 return btor_node_get_id (a) - btor_node_get_id (b);
375 }
376
377 static bool
find_and_contradiction_exp(Btor * btor,BtorNode * exp,BtorNode * e0,BtorNode * e1,uint32_t * calls)378 find_and_contradiction_exp (
379 Btor *btor, BtorNode *exp, BtorNode *e0, BtorNode *e1, uint32_t *calls)
380 {
381 assert (btor);
382 assert (exp);
383 assert (e0);
384 assert (e1);
385 assert (calls);
386 (void) btor;
387
388 if (*calls >= BTOR_FIND_AND_NODE_CONTRADICTION_LIMIT) return false;
389
390 if (!btor_node_is_inverted (exp) && exp->kind == BTOR_BV_AND_NODE)
391 {
392 if (exp->e[0] == btor_node_invert (e0) || exp->e[0] == btor_node_invert (e1)
393 || exp->e[1] == btor_node_invert (e0)
394 || exp->e[1] == btor_node_invert (e1))
395 return true;
396 *calls += 1;
397 return find_and_contradiction_exp (btor, exp->e[0], e0, e1, calls)
398 || find_and_contradiction_exp (btor, exp->e[1], e0, e1, calls);
399 }
400 return false;
401 }
402
403 static bool
is_concat_simplifiable(BtorNode * exp)404 is_concat_simplifiable (BtorNode *exp)
405 {
406 return btor_node_is_bv_var (exp) || btor_node_is_bv_const (exp);
407 }
408
409 static bool
is_write_exp(BtorNode * exp,BtorNode ** array,BtorNode ** index,BtorNode ** value)410 is_write_exp (BtorNode *exp,
411 BtorNode **array,
412 BtorNode **index,
413 BtorNode **value)
414 {
415 assert (exp);
416 assert (btor_node_is_regular (exp));
417
418 BtorNode *param, *body, *eq, *app;
419
420 if (!btor_node_is_lambda (exp)
421 || btor_node_fun_get_arity (exp->btor, exp) > 1)
422 return false;
423
424 param = exp->e[0];
425 body = btor_node_binder_get_body (exp);
426
427 if (btor_node_is_inverted (body) || !btor_node_is_bv_cond (body))
428 return false;
429
430 /* check condition */
431 eq = body->e[0];
432 if (btor_node_is_inverted (eq) || !btor_node_is_bv_eq (eq)
433 || !eq->parameterized || (eq->e[0] != param && eq->e[1] != param))
434 return false;
435
436 /* check value */
437 if (btor_node_real_addr (body->e[1])->parameterized) return false;
438
439 /* check apply on unmodified array */
440 app = body->e[2];
441 if (btor_node_is_inverted (app) || !btor_node_is_apply (app)
442 || btor_node_args_get_arity (app->btor, app->e[1]) > 1
443 || app->e[1]->e[0] != param)
444 return false;
445
446 if (array) *array = app->e[0];
447 if (index) *index = eq->e[1] == param ? eq->e[0] : eq->e[1];
448 if (value) *value = body->e[1];
449 return true;
450 }
451
452 static bool
is_true_cond(BtorNode * cond)453 is_true_cond (BtorNode *cond)
454 {
455 assert (cond);
456 assert (btor_node_bv_get_width (btor_node_real_addr (cond)->btor, cond) == 1);
457
458 if (btor_node_is_inverted (cond)
459 && !btor_bv_get_bit (btor_node_bv_const_get_bits (cond), 0))
460 return true;
461 else if (!btor_node_is_inverted (cond)
462 && btor_bv_get_bit (btor_node_bv_const_get_bits (cond), 0))
463 return true;
464
465 return false;
466 }
467
468 #if 0
469 static bool
470 is_bit_mask (BtorNode * exp, uint32_t * upper, uint32_t * lower)
471 {
472 uint32_t i, len, inv, bit;
473 int32_t first, last;
474 BtorBitVector *bits;
475 BtorNode *real_exp;
476
477 real_exp = btor_node_real_addr (exp);
478
479 *upper = 0; *lower = 0;
480 first = -1; last = -1;
481
482 if (!btor_node_is_bv_const (real_exp))
483 return false;
484
485 bits = btor_node_bv_const_get_bits (real_exp);
486 inv = btor_node_is_inverted (exp);
487 len = btor_node_bv_get_width (real_exp->btor, real_exp);
488 for (i = 0; i < len; i++)
489 {
490 bit = btor_bv_get_bit (bits, i);
491 if (inv) bit ^= 1;
492
493 if (bit && first == -1)
494 first = i;
495 else if (!bit && first > -1 && last == -1)
496 last = i - 1;
497
498 if (bit && last > -1)
499 return false;
500 }
501 if (last == -1)
502 last = len - 1;
503
504 *upper = last;
505 *lower = first;
506 return true;
507 }
508 #endif
509
510 static bool
is_urem_exp(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode ** res_e0,BtorNode ** res_e1)511 is_urem_exp (Btor *btor,
512 BtorNode *e0,
513 BtorNode *e1,
514 BtorNode **res_e0,
515 BtorNode **res_e1)
516 {
517 BtorNode *mul, *udiv, *x, *y;
518
519 if (btor_node_bv_is_neg (btor, e0, &mul))
520 x = e1;
521 else if (btor_node_bv_is_neg (btor, e1, &mul))
522 x = e0;
523 else
524 return false;
525
526 if (btor_node_is_inverted (mul) || !btor_node_is_bv_mul (mul)) return false;
527
528 if (!btor_node_is_inverted (mul->e[0]) && btor_node_is_bv_udiv (mul->e[0]))
529 {
530 udiv = mul->e[0];
531 y = mul->e[0];
532 }
533 else if (!btor_node_is_inverted (mul->e[1])
534 && btor_node_is_bv_udiv (mul->e[1]))
535 {
536 udiv = mul->e[1];
537 y = mul->e[0];
538 }
539 else
540 return false;
541
542 if (udiv->e[0] == x && udiv->e[1] == y)
543 {
544 if (res_e0) *res_e0 = x;
545 if (res_e1) *res_e1 = y;
546 return true;
547 }
548 return false;
549 }
550
551 /* -------------------------------------------------------------------------- */
552
553 static BtorNode *rewrite_slice_exp (Btor *, BtorNode *, uint32_t, uint32_t);
554 static BtorNode *rewrite_eq_exp (Btor *, BtorNode *, BtorNode *);
555 static BtorNode *rewrite_ult_exp (Btor *, BtorNode *, BtorNode *);
556 static BtorNode *rewrite_and_exp (Btor *, BtorNode *, BtorNode *);
557 static BtorNode *rewrite_add_exp (Btor *, BtorNode *, BtorNode *);
558 static BtorNode *rewrite_mul_exp (Btor *, BtorNode *, BtorNode *);
559 static BtorNode *rewrite_udiv_exp (Btor *, BtorNode *, BtorNode *);
560 static BtorNode *rewrite_urem_exp (Btor *, BtorNode *, BtorNode *);
561 static BtorNode *rewrite_concat_exp (Btor *, BtorNode *, BtorNode *);
562 static BtorNode *rewrite_sll_exp (Btor *, BtorNode *, BtorNode *);
563 static BtorNode *rewrite_srl_exp (Btor *, BtorNode *, BtorNode *);
564 static BtorNode *rewrite_apply_exp (Btor *, BtorNode *, BtorNode *);
565 static BtorNode *rewrite_lambda_exp (Btor *, BtorNode *, BtorNode *);
566 static BtorNode *rewrite_forall_exp (Btor *, BtorNode *, BtorNode *);
567 static BtorNode *rewrite_exists_exp (Btor *, BtorNode *, BtorNode *);
568 static BtorNode *rewrite_cond_exp (Btor *, BtorNode *, BtorNode *, BtorNode *);
569
570 /* -------------------------------------------------------------------------- */
571 /* const term rewriting */
572 /* -------------------------------------------------------------------------- */
573
574 /*
575 * match: binary op with two constants
576 * result: constant
577 */
578 static inline bool
applies_const_binary_exp(Btor * btor,BtorNodeKind kind,BtorNode * e0,BtorNode * e1)579 applies_const_binary_exp (Btor *btor,
580 BtorNodeKind kind,
581 BtorNode *e0,
582 BtorNode *e1)
583 {
584 (void) btor;
585 (void) kind;
586 return btor_node_is_bv_const (e0) && btor_node_is_bv_const (e1);
587 }
588
589 static inline BtorNode *
apply_const_binary_exp(Btor * btor,BtorNodeKind kind,BtorNode * e0,BtorNode * e1)590 apply_const_binary_exp (Btor *btor,
591 BtorNodeKind kind,
592 BtorNode *e0,
593 BtorNode *e1)
594 {
595 assert (applies_const_binary_exp (btor, kind, e0, e1));
596
597 bool invert_b0, invert_b1;
598 BtorBitVector *b0, *b1, *bresult;
599 BtorMemMgr *mm;
600 BtorNode *result, *real_e0, *real_e1;
601
602 mm = btor->mm;
603 real_e0 = btor_node_real_addr (e0);
604 real_e1 = btor_node_real_addr (e1);
605
606 invert_b0 = btor_node_is_inverted (e0);
607 invert_b1 = btor_node_is_inverted (e1);
608 b0 = btor_node_bv_const_get_bits (real_e0);
609 b1 = btor_node_bv_const_get_bits (real_e1);
610 if (invert_b0) b0 = btor_bv_not (mm, b0);
611 if (invert_b1) b1 = btor_bv_not (mm, b1);
612
613 switch (kind)
614 {
615 case BTOR_BV_AND_NODE: bresult = btor_bv_and (mm, b0, b1); break;
616 case BTOR_BV_EQ_NODE: bresult = btor_bv_eq (mm, b0, b1); break;
617 case BTOR_BV_ADD_NODE: bresult = btor_bv_add (mm, b0, b1); break;
618 case BTOR_BV_MUL_NODE: bresult = btor_bv_mul (mm, b0, b1); break;
619 case BTOR_BV_ULT_NODE: bresult = btor_bv_ult (mm, b0, b1); break;
620 case BTOR_BV_UDIV_NODE: bresult = btor_bv_udiv (mm, b0, b1); break;
621 case BTOR_BV_UREM_NODE: bresult = btor_bv_urem (mm, b0, b1); break;
622 case BTOR_BV_SLL_NODE: bresult = btor_bv_sll (mm, b0, b1); break;
623 case BTOR_BV_SRL_NODE: bresult = btor_bv_srl (mm, b0, b1); break;
624 default:
625 assert (kind == BTOR_BV_CONCAT_NODE);
626 bresult = btor_bv_concat (mm, b0, b1);
627 break;
628 }
629 if (invert_b0) btor_bv_free (mm, b0);
630 if (invert_b1) btor_bv_free (mm, b1);
631 result = btor_exp_bv_const (btor, bresult);
632 btor_bv_free (mm, bresult);
633 return result;
634 }
635
636 /*
637 * match: binary op with one constant
638 * result: constant
639 */
640 static inline bool
applies_special_const_lhs_binary_exp(Btor * btor,BtorNodeKind kind,BtorNode * e0,BtorNode * e1)641 applies_special_const_lhs_binary_exp (Btor *btor,
642 BtorNodeKind kind,
643 BtorNode *e0,
644 BtorNode *e1)
645 {
646 (void) btor;
647 (void) kind;
648 return btor_node_is_bv_const (e0) && !btor_node_is_bv_const (e1);
649 }
650
651 static inline BtorNode *
apply_special_const_lhs_binary_exp(Btor * btor,BtorNodeKind kind,BtorNode * e0,BtorNode * e1)652 apply_special_const_lhs_binary_exp (Btor *btor,
653 BtorNodeKind kind,
654 BtorNode *e0,
655 BtorNode *e1)
656 {
657 assert (applies_special_const_lhs_binary_exp (btor, kind, e0, e1));
658
659 char tmpstr[2] = {'\0', '\0'}, *bvstr;
660 uint32_t pos, len, width_e0;
661 bool invert_b0;
662 BtorBitVector *b0, *bv;
663 BtorMemMgr *mm;
664 BtorSpecialConstBitVector sc;
665 BtorNode *result = 0, *real_e0, *real_e1, *left, *right, *tmp1, *tmp2, *tmp3;
666 BtorNode *tmp4, *eq;
667 BtorNodePtrStack stack;
668 BtorSortId sort;
669
670 mm = btor->mm;
671 real_e0 = btor_node_real_addr (e0);
672 real_e1 = btor_node_real_addr (e1);
673 invert_b0 = btor_node_is_inverted (e0);
674 b0 = btor_node_bv_const_get_bits (real_e0);
675 width_e0 = btor_node_bv_get_width (btor, real_e0);
676
677 if (invert_b0) b0 = btor_bv_not (mm, b0);
678 sc = btor_bv_is_special_const (b0);
679 if (invert_b0) btor_bv_free (mm, b0);
680
681 switch (sc)
682 {
683 case BTOR_SPECIAL_CONST_BV_ZERO:
684 switch (kind)
685 {
686 case BTOR_BV_EQ_NODE:
687 if (width_e0 == 1)
688 result = btor_exp_bv_not (btor, e1);
689 else if (is_xor_exp (btor, e1)) /* 0 == (a ^ b) --> a = b */
690 {
691 if (btor->rec_rw_calls < BTOR_REC_RW_BOUND)
692 {
693 BTOR_INC_REC_RW_CALL (btor);
694 result = rewrite_eq_exp (
695 btor,
696 btor_node_real_addr (
697 btor_node_real_addr (btor_node_real_addr (e1)->e[0])
698 ->e[0]),
699 btor_node_real_addr (
700 btor_node_real_addr (btor_node_real_addr (e1)->e[0])
701 ->e[1]));
702 BTOR_DEC_REC_RW_CALL (btor);
703 }
704 }
705 else if (btor_node_is_inverted (e1)
706 && real_e1->kind == BTOR_BV_AND_NODE)
707 { /* 0 == a | b --> a == 0 && b == 0 */
708 if (btor->rec_rw_calls < BTOR_REC_RW_BOUND)
709 {
710 BTOR_INC_REC_RW_CALL (btor);
711 left =
712 rewrite_eq_exp (btor, btor_node_invert (real_e1->e[0]), e0);
713 right =
714 rewrite_eq_exp (btor, btor_node_invert (real_e1->e[1]), e0);
715 result = rewrite_and_exp (btor, left, right);
716 BTOR_DEC_REC_RW_CALL (btor);
717 btor_node_release (btor, left);
718 btor_node_release (btor, right);
719 }
720 }
721 break;
722 case BTOR_BV_ULT_NODE: /* 0 < a --> a != 0 */
723 result = btor_node_invert (rewrite_eq_exp (btor, e0, e1));
724 break;
725 case BTOR_BV_ADD_NODE: result = btor_node_copy (btor, e1); break;
726 case BTOR_BV_MUL_NODE:
727 case BTOR_BV_SLL_NODE:
728 case BTOR_BV_SRL_NODE:
729 case BTOR_BV_UREM_NODE:
730 case BTOR_BV_AND_NODE:
731 result = btor_exp_bv_zero (btor, btor_node_get_sort_id (real_e0));
732 break;
733 case BTOR_BV_UDIV_NODE:
734 tmp2 = btor_exp_bv_zero (btor, btor_node_get_sort_id (real_e0));
735 tmp4 = btor_exp_bv_ones (btor, btor_node_get_sort_id (real_e0));
736 eq = rewrite_eq_exp (btor, e1, tmp2);
737 result = rewrite_cond_exp (btor, eq, tmp4, tmp2);
738 btor_node_release (btor, tmp2);
739 btor_node_release (btor, eq);
740 btor_node_release (btor, tmp4);
741 break;
742 default: break;
743 }
744 break;
745 case BTOR_SPECIAL_CONST_BV_ONE_ONES:
746 assert (width_e0 == 1);
747 if (kind == BTOR_BV_AND_NODE || kind == BTOR_BV_EQ_NODE
748 || kind == BTOR_BV_MUL_NODE)
749 result = btor_node_copy (btor, e1);
750 else if (kind == BTOR_BV_ULT_NODE)
751 result = btor_exp_false (btor);
752 break;
753 case BTOR_SPECIAL_CONST_BV_ONE:
754 if (kind == BTOR_BV_MUL_NODE) result = btor_node_copy (btor, e1);
755 break;
756 case BTOR_SPECIAL_CONST_BV_ONES:
757 if (kind == BTOR_BV_EQ_NODE)
758 {
759 if (is_xnor_exp (btor, e1)) /* 1+ == (a XNOR b) --> a = b */
760 {
761 if (btor->rec_rw_calls < BTOR_REC_RW_BOUND)
762 {
763 BTOR_INC_REC_RW_CALL (btor);
764 result = rewrite_eq_exp (
765 btor,
766 btor_node_real_addr (
767 btor_node_real_addr (btor_node_real_addr (e1)->e[0])->e[0]),
768 btor_node_real_addr (
769 btor_node_real_addr (btor_node_real_addr (e1)->e[0])
770 ->e[1]));
771 BTOR_DEC_REC_RW_CALL (btor);
772 }
773 }
774 else if (!btor_node_is_inverted (e1) && e1->kind == BTOR_BV_AND_NODE)
775 { /* 1+ == a & b --> a == 1+ && b == 1+ */
776 if (btor->rec_rw_calls < BTOR_REC_RW_BOUND)
777 {
778 BTOR_INC_REC_RW_CALL (btor);
779 left = rewrite_eq_exp (btor, e1->e[0], e0);
780 right = rewrite_eq_exp (btor, e1->e[1], e0);
781 result = rewrite_and_exp (btor, left, right);
782 BTOR_DEC_REC_RW_CALL (btor);
783 btor_node_release (btor, left);
784 btor_node_release (btor, right);
785 }
786 }
787 }
788 else if (kind == BTOR_BV_AND_NODE)
789 result = btor_node_copy (btor, e1);
790 else if (kind == BTOR_BV_ULT_NODE) /* UNSIGNED_MAX < x */
791 result = btor_exp_false (btor);
792 else if (kind == BTOR_BV_MUL_NODE)
793 result = btor_exp_bv_neg (btor, e1);
794 break;
795 default:
796 assert (sc == BTOR_SPECIAL_CONST_BV_NONE);
797 if (kind == BTOR_BV_EQ_NODE && real_e1->kind == BTOR_BV_AND_NODE
798 && btor->rec_rw_calls < BTOR_REC_RW_BOUND)
799 {
800 BTOR_INC_REC_RW_CALL (btor);
801 BTOR_INIT_STACK (btor->mm, stack);
802 if (btor_node_is_inverted (e0))
803 bv = btor_bv_not (btor->mm, btor_node_bv_const_get_bits (real_e0));
804 else
805 bv = btor_bv_copy (btor->mm, btor_node_bv_const_get_bits (real_e0));
806
807 pos = 0;
808 /* const == a | b */
809 if (btor_node_is_inverted (e1))
810 {
811 while (pos < width_e0)
812 {
813 bvstr = btor_bv_to_char (btor->mm, bv);
814 tmpstr[0] = bvstr[pos];
815 len = (uint32_t) strspn (bvstr + pos, tmpstr);
816 btor_mem_freestr (btor->mm, bvstr);
817 tmp1 = rewrite_slice_exp (btor,
818 btor_node_invert (real_e1->e[0]),
819 width_e0 - 1 - pos,
820 width_e0 - pos - len);
821 tmp2 = rewrite_slice_exp (btor,
822 btor_node_invert (real_e1->e[1]),
823 width_e0 - 1 - pos,
824 width_e0 - pos - len);
825 sort = btor_sort_bv (btor, len);
826 if (tmpstr[0] == '0')
827 {
828 tmp3 = btor_exp_bv_zero (btor, sort);
829 BTOR_PUSH_STACK (stack, rewrite_eq_exp (btor, tmp1, tmp3));
830 BTOR_PUSH_STACK (stack, rewrite_eq_exp (btor, tmp2, tmp3));
831 btor_node_release (btor, tmp3);
832 }
833 else
834 {
835 assert (tmpstr[0] == '1');
836 tmp3 = btor_exp_bv_or (btor, tmp1, tmp2);
837 tmp4 = btor_exp_bv_ones (btor, sort);
838 BTOR_PUSH_STACK (stack, rewrite_eq_exp (btor, tmp3, tmp4));
839 btor_node_release (btor, tmp3);
840 btor_node_release (btor, tmp4);
841 }
842 btor_sort_release (btor, sort);
843 btor_node_release (btor, tmp1);
844 btor_node_release (btor, tmp2);
845 pos += len;
846 }
847 }
848 else
849 {
850 assert (!btor_node_is_inverted (e1));
851 /* const == a & b */
852 while (pos < width_e0)
853 {
854 bvstr = btor_bv_to_char (btor->mm, bv);
855 tmpstr[0] = bvstr[pos];
856 len = (uint32_t) strspn (bvstr + pos, tmpstr);
857 btor_mem_freestr (btor->mm, bvstr);
858 tmp1 = rewrite_slice_exp (
859 btor, e1->e[0], width_e0 - 1 - pos, width_e0 - pos - len);
860 tmp2 = rewrite_slice_exp (
861 btor, e1->e[1], width_e0 - 1 - pos, width_e0 - pos - len);
862 sort = btor_sort_bv (btor, len);
863 if (tmpstr[0] == '1')
864 {
865 tmp3 = btor_exp_bv_ones (btor, sort);
866 BTOR_PUSH_STACK (stack, rewrite_eq_exp (btor, tmp1, tmp3));
867 BTOR_PUSH_STACK (stack, rewrite_eq_exp (btor, tmp2, tmp3));
868 btor_node_release (btor, tmp3);
869 }
870 else
871 {
872 assert (tmpstr[0] == '0');
873 tmp3 = rewrite_and_exp (btor, tmp1, tmp2);
874 tmp4 = btor_exp_bv_zero (btor, sort);
875 BTOR_PUSH_STACK (stack, rewrite_eq_exp (btor, tmp3, tmp4));
876 btor_node_release (btor, tmp3);
877 btor_node_release (btor, tmp4);
878 }
879 btor_sort_release (btor, sort);
880 btor_node_release (btor, tmp1);
881 btor_node_release (btor, tmp2);
882 pos += len;
883 }
884 }
885
886 result = btor_exp_true (btor);
887 assert (!BTOR_EMPTY_STACK (stack));
888 do
889 {
890 tmp1 = BTOR_POP_STACK (stack);
891 tmp2 = rewrite_and_exp (btor, result, tmp1);
892 btor_node_release (btor, result);
893 result = tmp2;
894 btor_node_release (btor, tmp1);
895 } while (!BTOR_EMPTY_STACK (stack));
896 btor_bv_free (btor->mm, bv);
897 BTOR_RELEASE_STACK (stack);
898 BTOR_DEC_REC_RW_CALL (btor);
899 }
900 break;
901 }
902
903 return result;
904 }
905
906 /*
907 * match: binary op with one constant
908 * result: constant
909 */
910 static inline bool
applies_special_const_rhs_binary_exp(Btor * btor,BtorNodeKind kind,BtorNode * e0,BtorNode * e1)911 applies_special_const_rhs_binary_exp (Btor *btor,
912 BtorNodeKind kind,
913 BtorNode *e0,
914 BtorNode *e1)
915 {
916 (void) btor;
917 (void) kind;
918 return !btor_node_is_bv_const (e0) && btor_node_is_bv_const (e1);
919 }
920
921 static inline BtorNode *
apply_special_const_rhs_binary_exp(Btor * btor,BtorNodeKind kind,BtorNode * e0,BtorNode * e1)922 apply_special_const_rhs_binary_exp (Btor *btor,
923 BtorNodeKind kind,
924 BtorNode *e0,
925 BtorNode *e1)
926 {
927 assert (applies_special_const_rhs_binary_exp (btor, kind, e0, e1));
928
929 char tmpstr[2] = {'\0', '\0'}, *bvstr;
930 uint32_t pos, len, width_e0, width_e1;
931 bool invert_b1;
932 BtorBitVector *b1, *bv;
933 BtorMemMgr *mm;
934 BtorSpecialConstBitVector sc;
935 BtorNode *result = 0, *real_e0, *real_e1, *left, *right, *tmp1, *tmp2, *tmp3;
936 BtorNode *tmp4;
937 BtorNodePtrStack stack;
938 BtorSortId sort;
939
940 mm = btor->mm;
941 real_e0 = btor_node_real_addr (e0);
942 real_e1 = btor_node_real_addr (e1);
943 invert_b1 = btor_node_is_inverted (e1);
944 b1 = btor_node_bv_const_get_bits (real_e1);
945 width_e0 = btor_node_bv_get_width (btor, real_e0);
946 width_e1 = btor_node_bv_get_width (btor, real_e1);
947
948 if (invert_b1) b1 = btor_bv_not (mm, b1);
949 sc = btor_bv_is_special_const (b1);
950 if (invert_b1) btor_bv_free (mm, b1);
951
952 switch (sc)
953 {
954 case BTOR_SPECIAL_CONST_BV_ZERO:
955 switch (kind)
956 {
957 case BTOR_BV_EQ_NODE:
958 if (width_e0 == 1)
959 result = btor_exp_bv_not (btor, e0);
960 else if (is_xor_exp (btor, e0)) /* (a ^ b) == 0 --> a = b */
961 {
962 if (btor->rec_rw_calls < BTOR_REC_RW_BOUND)
963 {
964 BTOR_INC_REC_RW_CALL (btor);
965 result = rewrite_eq_exp (
966 btor,
967 btor_node_real_addr (
968 btor_node_real_addr (btor_node_real_addr (e0)->e[0])
969 ->e[0]),
970 btor_node_real_addr (
971 btor_node_real_addr (btor_node_real_addr (e0)->e[0])
972 ->e[1]));
973 BTOR_DEC_REC_RW_CALL (btor);
974 }
975 }
976 else if (btor_node_is_inverted (e0)
977 && real_e0->kind == BTOR_BV_AND_NODE)
978 { /* a | b == 0 --> a == 0 && b == 0 */
979 if (btor->rec_rw_calls < BTOR_REC_RW_BOUND)
980 {
981 BTOR_INC_REC_RW_CALL (btor);
982 left =
983 rewrite_eq_exp (btor, btor_node_invert (real_e0->e[0]), e1);
984 right =
985 rewrite_eq_exp (btor, btor_node_invert (real_e0->e[1]), e1);
986 result = rewrite_and_exp (btor, left, right);
987 BTOR_DEC_REC_RW_CALL (btor);
988 btor_node_release (btor, left);
989 btor_node_release (btor, right);
990 }
991 }
992 break;
993 case BTOR_BV_SLL_NODE:
994 case BTOR_BV_SRL_NODE:
995 case BTOR_BV_UREM_NODE:
996 case BTOR_BV_ADD_NODE: result = btor_node_copy (btor, e0); break;
997 case BTOR_BV_MUL_NODE:
998 case BTOR_BV_AND_NODE:
999 result = btor_exp_bv_zero (btor, btor_node_get_sort_id (real_e0));
1000 break;
1001 case BTOR_BV_ULT_NODE: /* x < 0 */
1002 result = btor_exp_false (btor);
1003 break;
1004 case BTOR_BV_UDIV_NODE:
1005 result = btor_exp_bv_ones (btor, btor_node_get_sort_id (real_e0));
1006 break;
1007 default: break;
1008 }
1009 break;
1010 case BTOR_SPECIAL_CONST_BV_ONE_ONES:
1011 assert (width_e1 == 1);
1012 if (kind == BTOR_BV_AND_NODE || kind == BTOR_BV_EQ_NODE
1013 || kind == BTOR_BV_MUL_NODE || kind == BTOR_BV_UDIV_NODE)
1014 result = btor_node_copy (btor, e0);
1015 break;
1016 case BTOR_SPECIAL_CONST_BV_ONE:
1017 if (kind == BTOR_BV_MUL_NODE || kind == BTOR_BV_UDIV_NODE)
1018 result = btor_node_copy (btor, e0);
1019 else if (kind == BTOR_BV_UREM_NODE)
1020 result = btor_exp_bv_zero (btor, btor_node_get_sort_id (real_e0));
1021 else if (kind == BTOR_BV_ULT_NODE)
1022 {
1023 BTOR_INC_REC_RW_CALL (btor);
1024 tmp1 = btor_exp_bv_zero (btor, btor_node_get_sort_id (real_e0));
1025 result = rewrite_eq_exp (btor, e0, tmp1);
1026 btor_node_release (btor, tmp1);
1027 BTOR_DEC_REC_RW_CALL (btor);
1028 }
1029 break;
1030 case BTOR_SPECIAL_CONST_BV_ONES:
1031 if (kind == BTOR_BV_EQ_NODE)
1032 {
1033 if (is_xnor_exp (btor, e0)) /* (a XNOR b) == 1 --> a = b */
1034 {
1035 if (btor->rec_rw_calls < BTOR_REC_RW_BOUND)
1036 {
1037 BTOR_INC_REC_RW_CALL (btor);
1038 result = rewrite_eq_exp (
1039 btor,
1040 btor_node_real_addr (
1041 btor_node_real_addr (btor_node_real_addr (e0)->e[0])->e[0]),
1042 btor_node_real_addr (
1043 btor_node_real_addr (btor_node_real_addr (e0)->e[0])
1044 ->e[1]));
1045 BTOR_DEC_REC_RW_CALL (btor);
1046 }
1047 }
1048 else if (!btor_node_is_inverted (e0) && e0->kind == BTOR_BV_AND_NODE)
1049 {
1050 /* a & b == 1+ --> a == 1+ && b == 1+ */
1051 if (btor->rec_rw_calls < BTOR_REC_RW_BOUND)
1052 {
1053 BTOR_INC_REC_RW_CALL (btor);
1054 left = rewrite_eq_exp (btor, e0->e[0], e1);
1055 right = rewrite_eq_exp (btor, e0->e[1], e1);
1056 result = rewrite_and_exp (btor, left, right);
1057 BTOR_DEC_REC_RW_CALL (btor);
1058 btor_node_release (btor, left);
1059 btor_node_release (btor, right);
1060 }
1061 }
1062 }
1063 else if (kind == BTOR_BV_AND_NODE)
1064 result = btor_node_copy (btor, e0);
1065 else if (kind == BTOR_BV_ULT_NODE)
1066 {
1067 BTOR_INC_REC_RW_CALL (btor);
1068 result = btor_node_invert (rewrite_eq_exp (btor, e0, e1));
1069 BTOR_DEC_REC_RW_CALL (btor);
1070 }
1071 else if (kind == BTOR_BV_MUL_NODE)
1072 result = btor_exp_bv_neg (btor, e0);
1073 break;
1074 default:
1075 assert (sc == BTOR_SPECIAL_CONST_BV_NONE);
1076 if (kind == BTOR_BV_EQ_NODE && real_e0->kind == BTOR_BV_AND_NODE
1077 && btor->rec_rw_calls < BTOR_REC_RW_BOUND)
1078 {
1079 BTOR_INC_REC_RW_CALL (btor);
1080 BTOR_INIT_STACK (btor->mm, stack);
1081 if (btor_node_is_inverted (e1))
1082 bv = btor_bv_not (btor->mm, btor_node_bv_const_get_bits (real_e1));
1083 else
1084 bv = btor_bv_copy (btor->mm, btor_node_bv_const_get_bits (real_e1));
1085
1086 pos = 0;
1087 /* a | b == const */
1088 if (btor_node_is_inverted (e0))
1089 {
1090 while (pos < width_e1)
1091 {
1092 bvstr = btor_bv_to_char (btor->mm, bv);
1093 tmpstr[0] = bvstr[pos];
1094 len = (uint32_t) strspn (bvstr + pos, tmpstr);
1095 btor_mem_freestr (btor->mm, bvstr);
1096 tmp1 = rewrite_slice_exp (btor,
1097 btor_node_invert (real_e0->e[0]),
1098 width_e1 - 1 - pos,
1099 width_e1 - pos - len);
1100 tmp2 = rewrite_slice_exp (btor,
1101 btor_node_invert (real_e0->e[1]),
1102 width_e1 - 1 - pos,
1103 width_e1 - pos - len);
1104 sort = btor_sort_bv (btor, len);
1105 if (tmpstr[0] == '0')
1106 {
1107 tmp3 = btor_exp_bv_zero (btor, sort);
1108 BTOR_PUSH_STACK (stack, rewrite_eq_exp (btor, tmp1, tmp3));
1109 BTOR_PUSH_STACK (stack, rewrite_eq_exp (btor, tmp2, tmp3));
1110 btor_node_release (btor, tmp3);
1111 }
1112 else
1113 {
1114 assert (tmpstr[0] == '1');
1115 tmp3 = btor_exp_bv_or (btor, tmp1, tmp2);
1116 tmp4 = btor_exp_bv_ones (btor, sort);
1117 BTOR_PUSH_STACK (stack, rewrite_eq_exp (btor, tmp3, tmp4));
1118 btor_node_release (btor, tmp3);
1119 btor_node_release (btor, tmp4);
1120 }
1121 btor_sort_release (btor, sort);
1122 btor_node_release (btor, tmp1);
1123 btor_node_release (btor, tmp2);
1124 pos += len;
1125 }
1126 }
1127 else
1128 {
1129 assert (!btor_node_is_inverted (e0));
1130 /* a & b == const */
1131 while (pos < width_e1)
1132 {
1133 bvstr = btor_bv_to_char (btor->mm, bv);
1134 tmpstr[0] = bvstr[pos];
1135 len = (uint32_t) strspn (bvstr + pos, tmpstr);
1136 btor_mem_freestr (btor->mm, bvstr);
1137 tmp1 = rewrite_slice_exp (
1138 btor, e0->e[0], width_e1 - 1 - pos, width_e1 - pos - len);
1139 tmp2 = rewrite_slice_exp (
1140 btor, e0->e[1], width_e1 - 1 - pos, width_e1 - pos - len);
1141 sort = btor_sort_bv (btor, len);
1142 if (tmpstr[0] == '1')
1143 {
1144 tmp3 = btor_exp_bv_ones (btor, sort);
1145 BTOR_PUSH_STACK (stack, rewrite_eq_exp (btor, tmp1, tmp3));
1146 BTOR_PUSH_STACK (stack, rewrite_eq_exp (btor, tmp2, tmp3));
1147 btor_node_release (btor, tmp3);
1148 }
1149 else
1150 {
1151 assert (tmpstr[0] == '0');
1152 tmp3 = rewrite_and_exp (btor, tmp1, tmp2);
1153 tmp4 = btor_exp_bv_zero (btor, sort);
1154 BTOR_PUSH_STACK (stack, rewrite_eq_exp (btor, tmp3, tmp4));
1155 btor_node_release (btor, tmp3);
1156 btor_node_release (btor, tmp4);
1157 }
1158 btor_sort_release (btor, sort);
1159 btor_node_release (btor, tmp1);
1160 btor_node_release (btor, tmp2);
1161 pos += len;
1162 }
1163 }
1164
1165 result = btor_exp_true (btor);
1166 assert (!BTOR_EMPTY_STACK (stack));
1167 do
1168 {
1169 tmp1 = BTOR_POP_STACK (stack);
1170 tmp2 = rewrite_and_exp (btor, result, tmp1);
1171 btor_node_release (btor, result);
1172 result = tmp2;
1173 btor_node_release (btor, tmp1);
1174 } while (!BTOR_EMPTY_STACK (stack));
1175 btor_bv_free (btor->mm, bv);
1176 BTOR_RELEASE_STACK (stack);
1177 BTOR_DEC_REC_RW_CALL (btor);
1178 }
1179 break;
1180 }
1181
1182 return result;
1183 }
1184
1185 /* -------------------------------------------------------------------------- */
1186 /* linear term rewriting */
1187 /* -------------------------------------------------------------------------- */
1188
1189 /* Can we rewrite 'term' as 'factor*lhs + rhs' where 'lhs' is a variable,
1190 * and 'factor' is odd? We check whether this is possible but do not use
1191 * more than 'bound' recursive calls. */
1192 static bool
rewrite_linear_term_bounded(Btor * btor,BtorNode * term,BtorBitVector ** factor_ptr,BtorNode ** lhs_ptr,BtorNode ** rhs_ptr,uint32_t * bound_ptr)1193 rewrite_linear_term_bounded (Btor *btor,
1194 BtorNode *term,
1195 BtorBitVector **factor_ptr,
1196 BtorNode **lhs_ptr,
1197 BtorNode **rhs_ptr,
1198 uint32_t *bound_ptr)
1199 {
1200 BtorNode *tmp, *other;
1201 BtorBitVector *factor;
1202
1203 if (*bound_ptr <= 0) return false;
1204
1205 *bound_ptr -= 1;
1206
1207 if (btor_node_is_inverted (term))
1208 {
1209 /* term = ~subterm
1210 * = -1 - subterm
1211 * = -1 - (factor * lhs + rhs)
1212 * = (-factor) * lhs + (-1 -rhs)
1213 * = (-factor) * lhs + ~rhs
1214 */
1215 if (!rewrite_linear_term_bounded (btor,
1216 btor_node_invert (term),
1217 &factor,
1218 lhs_ptr,
1219 rhs_ptr,
1220 bound_ptr))
1221 return false;
1222
1223 *rhs_ptr = btor_node_invert (*rhs_ptr);
1224 *factor_ptr = btor_bv_neg (btor->mm, factor);
1225 btor_bv_free (btor->mm, factor);
1226 }
1227 else if (term->kind == BTOR_BV_ADD_NODE)
1228 {
1229 if (rewrite_linear_term_bounded (
1230 btor, term->e[0], factor_ptr, lhs_ptr, &tmp, bound_ptr))
1231 {
1232 /* term = e0 + e1
1233 * = (factor * lhs + rhs) + e1
1234 * = factor * lhs + (e1 + rhs)
1235 */
1236 other = term->e[1];
1237 }
1238 else if (rewrite_linear_term_bounded (
1239 btor, term->e[1], factor_ptr, lhs_ptr, &tmp, bound_ptr))
1240 {
1241 /* term = e0 + e1
1242 * = e0 + (factor * lhs + rhs)
1243 * = factor * lhs + (e0 + rhs)
1244 */
1245 other = term->e[0];
1246 }
1247 else
1248 return false;
1249
1250 *rhs_ptr = rewrite_add_exp (btor, other, tmp);
1251 btor_node_release (btor, tmp);
1252 }
1253 else if (term->kind == BTOR_BV_MUL_NODE)
1254 {
1255 if (is_odd_bv_const_exp (term->e[0]))
1256 {
1257 if (!rewrite_linear_term_bounded (
1258 btor, term->e[1], &factor, lhs_ptr, &tmp, bound_ptr))
1259 return false;
1260
1261 /* term = e0 * e1
1262 * = e0 * (factor * lhs + rhs)
1263 * = (e0 * factor) * lhs + e0 * rhs
1264 * = (other * factor) * lhs + other * rhs
1265 */
1266 other = term->e[0];
1267 }
1268 else if (is_odd_bv_const_exp (term->e[1]))
1269 {
1270 if (!rewrite_linear_term_bounded (
1271 btor, term->e[0], &factor, lhs_ptr, &tmp, bound_ptr))
1272 return false;
1273
1274 /* term = e0 * e1
1275 * = (factor * lhs + rhs) * e1
1276 * = (e1 * factor) * lhs + e1 * rhs
1277 * = (other * factor) * lhs + other * rhs
1278 */
1279 other = term->e[1];
1280 }
1281 else
1282 return false;
1283
1284 assert (!btor_node_is_inverted (other));
1285 *factor_ptr =
1286 btor_bv_mul (btor->mm, btor_node_bv_const_get_bits (other), factor);
1287 btor_bv_free (btor->mm, factor);
1288 *rhs_ptr = rewrite_mul_exp (btor, other, tmp);
1289 btor_node_release (btor, tmp);
1290 }
1291 else if (term->kind == BTOR_VAR_NODE)
1292 {
1293 *lhs_ptr = btor_node_copy (btor, term);
1294 *rhs_ptr = btor_exp_bv_zero (btor, btor_node_get_sort_id (term));
1295 *factor_ptr = btor_bv_one (btor->mm, btor_node_bv_get_width (btor, term));
1296 }
1297 else
1298 return false;
1299
1300 return true;
1301 }
1302
1303 bool
btor_rewrite_linear_term(Btor * btor,BtorNode * term,BtorBitVector ** fp,BtorNode ** lp,BtorNode ** rp)1304 btor_rewrite_linear_term (Btor *btor,
1305 BtorNode *term,
1306 BtorBitVector **fp,
1307 BtorNode **lp,
1308 BtorNode **rp)
1309 {
1310 uint32_t bound = 100;
1311 bool res;
1312 res = rewrite_linear_term_bounded (btor, term, fp, lp, rp, &bound);
1313 if (res) btor->stats.linear_equations++;
1314 return res;
1315 }
1316
1317 /* -------------------------------------------------------------------------- */
1318 /* rewriting rules */
1319 /* -------------------------------------------------------------------------- */
1320
1321 /*
1322 * for each rule we define two functions:
1323 * static inline bool
1324 * applies_<rw_rule> (Btor * btor, ...)
1325 * {
1326 * }
1327 *
1328 * static inline BtorNode *
1329 * apply_<rw_rule> (Btor * btor, ...)
1330 * {
1331 * assert (applies_<rw_rule> (...));
1332 * }
1333 *
1334 * where the first one determines if <rw_rule> is applicable, and the second
1335 * one applies the rule.
1336 *
1337 * for adding rw rules to a rewrite function use the ADD_RW_RULE macro.
1338 */
1339
1340
1341 /* SLICE rules */
1342 /* -------------------------------------------------------------------------- */
1343
1344 /*
1345 * match: exp[len(exp) - 1:0]
1346 * result: exp
1347 */
1348 static inline bool
applies_full_slice(Btor * btor,BtorNode * exp,uint32_t upper,uint32_t lower)1349 applies_full_slice (Btor *btor, BtorNode *exp, uint32_t upper, uint32_t lower)
1350 {
1351 (void) btor;
1352 return btor_node_bv_get_width (btor, exp) == upper - lower + 1;
1353 }
1354
1355 static inline BtorNode *
apply_full_slice(Btor * btor,BtorNode * exp,uint32_t upper,uint32_t lower)1356 apply_full_slice (Btor *btor, BtorNode *exp, uint32_t upper, uint32_t lower)
1357 {
1358 assert (applies_full_slice (btor, exp, upper, lower));
1359 (void) btor;
1360 (void) upper;
1361 (void) lower;
1362 return btor_node_copy (btor, exp);
1363 }
1364
1365 /*
1366 * match: exp[upper:lower], where exp is a constant
1367 * result: constant
1368 */
1369 static inline bool
applies_const_slice(Btor * btor,BtorNode * exp,uint32_t upper,uint32_t lower)1370 applies_const_slice (Btor *btor, BtorNode *exp, uint32_t upper, uint32_t lower)
1371 {
1372 (void) btor;
1373 (void) upper;
1374 (void) lower;
1375 return btor_node_is_bv_const (exp);
1376 }
1377
1378 static inline BtorNode *
apply_const_slice(Btor * btor,BtorNode * exp,uint32_t upper,uint32_t lower)1379 apply_const_slice (Btor *btor, BtorNode *exp, uint32_t upper, uint32_t lower)
1380 {
1381 assert (applies_const_slice (btor, exp, upper, lower));
1382
1383 BtorBitVector *bits;
1384 BtorNode *result;
1385
1386 bits =
1387 btor_bv_slice (btor->mm, btor_node_bv_const_get_bits (exp), upper, lower);
1388 result = btor_exp_bv_const (btor, bits);
1389 result = btor_node_cond_invert (exp, result);
1390 btor_bv_free (btor->mm, bits);
1391 return result;
1392 }
1393
1394 /*
1395 * match: (exp[u:l])[upper:lower]
1396 * result: exp[l+upper:l+lower]
1397 */
1398 static inline bool
applies_slice_slice(Btor * btor,BtorNode * exp,uint32_t upper,uint32_t lower)1399 applies_slice_slice (Btor *btor, BtorNode *exp, uint32_t upper, uint32_t lower)
1400 {
1401 (void) upper;
1402 (void) lower;
1403 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_bv_slice (exp);
1404 }
1405
1406 static inline BtorNode *
apply_slice_slice(Btor * btor,BtorNode * exp,uint32_t upper,uint32_t lower)1407 apply_slice_slice (Btor *btor, BtorNode *exp, uint32_t upper, uint32_t lower)
1408 {
1409 assert (applies_slice_slice (btor, exp, upper, lower));
1410
1411 BtorNode *result, *real_exp;
1412
1413 real_exp = btor_node_real_addr (exp);
1414 BTOR_INC_REC_RW_CALL (btor);
1415 result = rewrite_slice_exp (btor,
1416 btor_node_cond_invert (exp, real_exp->e[0]),
1417 btor_node_bv_slice_get_lower (real_exp) + upper,
1418 btor_node_bv_slice_get_lower (real_exp) + lower);
1419 BTOR_DEC_REC_RW_CALL (btor);
1420 return result;
1421 }
1422
1423 /*
1424 * match: (a::b)[len(b)-1:0]
1425 * result: b
1426 */
1427 static inline bool
applies_concat_lower_slice(Btor * btor,BtorNode * exp,uint32_t upper,uint32_t lower)1428 applies_concat_lower_slice (Btor *btor,
1429 BtorNode *exp,
1430 uint32_t upper,
1431 uint32_t lower)
1432 {
1433 (void) btor;
1434 return btor_node_is_bv_concat (exp) && lower == 0
1435 && btor_node_bv_get_width (btor, btor_node_real_addr (exp)->e[1])
1436 == upper - lower + 1;
1437 }
1438
1439 static inline BtorNode *
apply_concat_lower_slice(Btor * btor,BtorNode * exp,uint32_t upper,uint32_t lower)1440 apply_concat_lower_slice (Btor *btor,
1441 BtorNode *exp,
1442 uint32_t upper,
1443 uint32_t lower)
1444 {
1445 assert (applies_concat_lower_slice (btor, exp, upper, lower));
1446 (void) upper;
1447 (void) lower;
1448
1449 BtorNode *result;
1450
1451 result = btor_node_cond_invert (exp, btor_node_real_addr (exp)->e[1]);
1452 return btor_node_copy (btor, result);
1453 }
1454
1455 /*
1456 * match: (a::b)[len(a)+len(b)-1:len(b)]
1457 * result: a
1458 */
1459 static inline bool
applies_concat_upper_slice(Btor * btor,BtorNode * exp,uint32_t upper,uint32_t lower)1460 applies_concat_upper_slice (Btor *btor,
1461 BtorNode *exp,
1462 uint32_t upper,
1463 uint32_t lower)
1464 {
1465 return btor_node_is_bv_concat (exp)
1466 && btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) < 3
1467 && upper == btor_node_bv_get_width (btor, exp) - 1
1468 && btor_node_bv_get_width (btor, btor_node_real_addr (exp)->e[0])
1469 == upper - lower + 1;
1470 }
1471
1472 static inline BtorNode *
apply_concat_upper_slice(Btor * btor,BtorNode * exp,uint32_t upper,uint32_t lower)1473 apply_concat_upper_slice (Btor *btor,
1474 BtorNode *exp,
1475 uint32_t upper,
1476 uint32_t lower)
1477 {
1478 assert (applies_concat_upper_slice (btor, exp, upper, lower));
1479 (void) upper;
1480 (void) lower;
1481
1482 BtorNode *result;
1483
1484 result = btor_node_cond_invert (exp, btor_node_real_addr (exp)->e[0]);
1485 return btor_node_copy (btor, result);
1486 }
1487
1488 /*
1489 * match: (a::b)[upper:lower], where lower >= len(b)
1490 * result: a[upper-len(b):lower-len(b)]
1491 *
1492 * concats are normalized at rewrite level 3,
1493 * we recursively check if slice and child of concat matches
1494 */
1495 static inline bool
applies_concat_rec_upper_slice(Btor * btor,BtorNode * exp,uint32_t upper,uint32_t lower)1496 applies_concat_rec_upper_slice (Btor *btor,
1497 BtorNode *exp,
1498 uint32_t upper,
1499 uint32_t lower)
1500 {
1501 (void) upper;
1502 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) >= 3
1503 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
1504 && btor_node_is_bv_concat (exp)
1505 && lower >= btor_node_bv_get_width (btor,
1506 btor_node_real_addr (exp)->e[1]);
1507 }
1508
1509 static inline BtorNode *
apply_concat_rec_upper_slice(Btor * btor,BtorNode * exp,uint32_t upper,uint32_t lower)1510 apply_concat_rec_upper_slice (Btor *btor,
1511 BtorNode *exp,
1512 uint32_t upper,
1513 uint32_t lower)
1514 {
1515 assert (applies_concat_rec_upper_slice (btor, exp, upper, lower));
1516
1517 uint32_t len;
1518 BtorNode *result, *real_exp;
1519
1520 real_exp = btor_node_real_addr (exp);
1521 len = btor_node_bv_get_width (btor, real_exp->e[1]);
1522 BTOR_INC_REC_RW_CALL (btor);
1523 result = rewrite_slice_exp (btor,
1524 btor_node_cond_invert (exp, real_exp->e[0]),
1525 upper - len,
1526 lower - len);
1527 BTOR_DEC_REC_RW_CALL (btor);
1528 return result;
1529 }
1530
1531 /*
1532 * match: (a::b)[upper:lower], where upper < len(b)
1533 * result: b[upper:lower]
1534 *
1535 * concats are normalized at rewrite level 3,
1536 * we recursively check if slice and child of concat matches
1537 */
1538 static inline bool
applies_concat_rec_lower_slice(Btor * btor,BtorNode * exp,uint32_t upper,uint32_t lower)1539 applies_concat_rec_lower_slice (Btor *btor,
1540 BtorNode *exp,
1541 uint32_t upper,
1542 uint32_t lower)
1543 {
1544 (void) lower;
1545 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) >= 3
1546 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
1547 && btor_node_is_bv_concat (exp)
1548 && upper < btor_node_bv_get_width (btor,
1549 btor_node_real_addr (exp)->e[1]);
1550 }
1551
1552 static inline BtorNode *
apply_concat_rec_lower_slice(Btor * btor,BtorNode * exp,uint32_t upper,uint32_t lower)1553 apply_concat_rec_lower_slice (Btor *btor,
1554 BtorNode *exp,
1555 uint32_t upper,
1556 uint32_t lower)
1557 {
1558 assert (applies_concat_rec_lower_slice (btor, exp, upper, lower));
1559
1560 BtorNode *result;
1561
1562 BTOR_INC_REC_RW_CALL (btor);
1563 result = rewrite_slice_exp (
1564 btor,
1565 btor_node_cond_invert (exp, btor_node_real_addr (exp)->e[1]),
1566 upper,
1567 lower);
1568 BTOR_DEC_REC_RW_CALL (btor);
1569 return result;
1570 }
1571
1572 /*
1573 * match: (a::b)[upper:lower], where lower = 0 and upper >= len(b)
1574 * result: a[upper-len(b):0]::b
1575 *
1576 * concats are normalized at rewrite level 3,
1577 * we recursively check if slice and child of concat matches
1578 */
1579 static inline bool
applies_concat_rec_slice(Btor * btor,BtorNode * exp,uint32_t upper,uint32_t lower)1580 applies_concat_rec_slice (Btor *btor,
1581 BtorNode *exp,
1582 uint32_t upper,
1583 uint32_t lower)
1584 {
1585 return btor_node_is_bv_concat (exp)
1586 && btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) >= 3
1587 && btor->rec_rw_calls < BTOR_REC_RW_BOUND && lower == 0
1588 && upper >= btor_node_bv_get_width (btor,
1589 btor_node_real_addr (exp)->e[1]);
1590 }
1591
1592 static inline BtorNode *
apply_concat_rec_slice(Btor * btor,BtorNode * exp,uint32_t upper,uint32_t lower)1593 apply_concat_rec_slice (Btor *btor,
1594 BtorNode *exp,
1595 uint32_t upper,
1596 uint32_t lower)
1597 {
1598 assert (applies_concat_rec_slice (btor, exp, upper, lower));
1599 (void) lower;
1600
1601 uint32_t len;
1602 BtorNode *result, *real_exp, *tmp;
1603
1604 real_exp = btor_node_real_addr (exp);
1605 len = btor_node_bv_get_width (btor, real_exp->e[1]);
1606 BTOR_INC_REC_RW_CALL (btor);
1607 tmp = rewrite_slice_exp (
1608 btor, btor_node_cond_invert (exp, real_exp->e[0]), upper - len, 0);
1609 result = rewrite_concat_exp (
1610 btor, tmp, btor_node_cond_invert (exp, real_exp->e[1]));
1611 BTOR_DEC_REC_RW_CALL (btor);
1612 btor_node_release (btor, tmp);
1613 return result;
1614 }
1615
1616 /*
1617 * match: (a & b)[upper:lower]
1618 * result: a[upper:lower] & b[upper:lower]
1619 */
1620 static inline bool
applies_and_slice(Btor * btor,BtorNode * exp,uint32_t upper,uint32_t lower)1621 applies_and_slice (Btor *btor, BtorNode *exp, uint32_t upper, uint32_t lower)
1622 {
1623 (void) upper;
1624 (void) lower;
1625 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) >= 3
1626 && btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_bv_and (exp)
1627 && (slice_simplifiable (btor_node_real_addr (exp)->e[0])
1628 || slice_simplifiable (btor_node_real_addr (exp)->e[1]));
1629 }
1630
1631 static inline BtorNode *
apply_and_slice(Btor * btor,BtorNode * exp,uint32_t upper,uint32_t lower)1632 apply_and_slice (Btor *btor, BtorNode *exp, uint32_t upper, uint32_t lower)
1633 {
1634 assert (applies_and_slice (btor, exp, upper, lower));
1635
1636 BtorNode *result, *left, *right, *real_exp;
1637
1638 real_exp = btor_node_real_addr (exp);
1639 BTOR_INC_REC_RW_CALL (btor);
1640 left = rewrite_slice_exp (btor, real_exp->e[0], upper, lower);
1641 right = rewrite_slice_exp (btor, real_exp->e[1], upper, lower);
1642 result = btor_exp_bv_and (btor, left, right);
1643 btor_node_release (btor, right);
1644 btor_node_release (btor, left);
1645 result = btor_node_cond_invert (exp, result);
1646 BTOR_DEC_REC_RW_CALL (btor);
1647 return result;
1648 }
1649
1650 /*
1651 * match: (c ? a : b)[upper:lower]
1652 * result: c ? a[upper:lower] : b[upper:lower]
1653 */
1654 static inline bool
applies_bcond_slice(Btor * btor,BtorNode * exp,uint32_t upper,uint32_t lower)1655 applies_bcond_slice (Btor *btor, BtorNode *exp, uint32_t upper, uint32_t lower)
1656 {
1657 (void) upper;
1658 (void) lower;
1659 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) >= 3
1660 && btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_bv_cond (exp)
1661 && (slice_simplifiable (btor_node_real_addr (exp)->e[1])
1662 || slice_simplifiable (btor_node_real_addr (exp)->e[2]));
1663 }
1664
1665 static inline BtorNode *
apply_bcond_slice(Btor * btor,BtorNode * exp,uint32_t upper,uint32_t lower)1666 apply_bcond_slice (Btor *btor, BtorNode *exp, uint32_t upper, uint32_t lower)
1667 {
1668 assert (applies_bcond_slice (btor, exp, upper, lower));
1669
1670 BtorNode *t, *e, *result, *real_exp;
1671
1672 real_exp = btor_node_real_addr (exp);
1673 BTOR_INC_REC_RW_CALL (btor);
1674 t = rewrite_slice_exp (btor, real_exp->e[1], upper, lower);
1675 e = rewrite_slice_exp (btor, real_exp->e[2], upper, lower);
1676 result = rewrite_cond_exp (btor, real_exp->e[0], t, e);
1677 btor_node_release (btor, e);
1678 btor_node_release (btor, t);
1679 result = btor_node_cond_invert (exp, result);
1680 BTOR_DEC_REC_RW_CALL (btor);
1681 return result;
1682 }
1683
1684 static inline bool
applies_zero_lower_slice(Btor * btor,BtorNode * exp,uint32_t upper,uint32_t lower)1685 applies_zero_lower_slice (Btor *btor,
1686 BtorNode *exp,
1687 uint32_t upper,
1688 uint32_t lower)
1689 {
1690 (void) upper;
1691 return btor_opt_get (btor, BTOR_OPT_RW_ZERO_LOWER_SLICE)
1692 && btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
1693 && btor->rec_rw_calls < BTOR_REC_RW_BOUND && lower == 0
1694 && upper < btor_node_bv_get_width (btor, exp) / 2
1695 && (btor_node_is_bv_mul (exp) || btor_node_is_bv_add (exp));
1696 // || btor_node_is_bv_and (exp));
1697 }
1698
1699 static inline BtorNode *
apply_zero_lower_slice(Btor * btor,BtorNode * exp,uint32_t upper,uint32_t lower)1700 apply_zero_lower_slice (Btor *btor,
1701 BtorNode *exp,
1702 uint32_t upper,
1703 uint32_t lower)
1704 {
1705 BtorNode *result, *real_exp, *tmp1, *tmp2;
1706
1707 real_exp = btor_node_real_addr (exp);
1708 BTOR_INC_REC_RW_CALL (btor);
1709 tmp1 = rewrite_slice_exp (btor, real_exp->e[0], upper, lower);
1710 tmp2 = rewrite_slice_exp (btor, real_exp->e[1], upper, lower);
1711 result = btor_rewrite_binary_exp (btor, real_exp->kind, tmp1, tmp2);
1712 result = btor_node_cond_invert (exp, result);
1713 BTOR_DEC_REC_RW_CALL (btor);
1714 btor_node_release (btor, tmp1);
1715 btor_node_release (btor, tmp2);
1716 return result;
1717 }
1718
1719 /* EQ rules */
1720 /* -------------------------------------------------------------------------- */
1721
1722 /*
1723 * match: a = a
1724 * result: true
1725 */
1726 static inline bool
applies_true_eq(Btor * btor,BtorNode * e0,BtorNode * e1)1727 applies_true_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
1728 {
1729 (void) btor;
1730 return e0 == e1;
1731 }
1732
1733 static inline BtorNode *
apply_true_eq(Btor * btor,BtorNode * e0,BtorNode * e1)1734 apply_true_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
1735 {
1736 assert (applies_true_eq (btor, e0, e1));
1737 (void) e0;
1738 (void) e1;
1739 return btor_exp_true (btor);
1740 }
1741
1742 /*
1743 * match: a = b, where a != b
1744 * result: false
1745 */
1746 static inline bool
applies_false_eq(Btor * btor,BtorNode * e0,BtorNode * e1)1747 applies_false_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
1748 {
1749 return is_always_unequal (btor, e0, e1);
1750 }
1751
1752 static inline BtorNode *
apply_false_eq(Btor * btor,BtorNode * e0,BtorNode * e1)1753 apply_false_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
1754 {
1755 assert (applies_false_eq (btor, e0, e1));
1756 (void) e0;
1757 (void) e1;
1758 return btor_exp_false (btor);
1759 }
1760
1761 /*
1762 * match: a + b = a
1763 * result: b = 0
1764 *
1765 * This rule does not lead to less substitutions. 'a' cannot
1766 * be substituted as the occurrence check would fail
1767 */
1768 static inline bool
applies_add_left_eq(Btor * btor,BtorNode * e0,BtorNode * e1)1769 applies_add_left_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
1770 {
1771 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
1772 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
1773 && !btor_node_is_inverted (e0) && e0->kind == BTOR_BV_ADD_NODE
1774 && e0->e[0] == e1;
1775 }
1776
1777 static inline BtorNode *
apply_add_left_eq(Btor * btor,BtorNode * e0,BtorNode * e1)1778 apply_add_left_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
1779 {
1780 assert (applies_add_left_eq (btor, e0, e1));
1781 (void) e1;
1782
1783 BtorNode *tmp, *result;
1784
1785 tmp = btor_exp_bv_zero (btor, btor_node_get_sort_id (e0));
1786 BTOR_INC_REC_RW_CALL (btor);
1787 result = rewrite_eq_exp (btor, tmp, e0->e[1]);
1788 BTOR_DEC_REC_RW_CALL (btor);
1789 btor_node_release (btor, tmp);
1790 return result;
1791 }
1792
1793 /*
1794 * match: b + a = a
1795 * result: b = 0
1796 *
1797 * This rule does not lead to less substitutions. 'a' cannot
1798 * be substituted as the occurrence check would fail
1799 */
1800 static inline bool
applies_add_right_eq(Btor * btor,BtorNode * e0,BtorNode * e1)1801 applies_add_right_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
1802 {
1803 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
1804 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
1805 && !btor_node_is_inverted (e0) && e0->kind == BTOR_BV_ADD_NODE
1806 && e0->e[1] == e1;
1807 }
1808
1809 static inline BtorNode *
apply_add_right_eq(Btor * btor,BtorNode * e0,BtorNode * e1)1810 apply_add_right_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
1811 {
1812 assert (applies_add_right_eq (btor, e0, e1));
1813 (void) e1;
1814
1815 BtorNode *tmp, *result;
1816
1817 tmp = btor_exp_bv_zero (btor, btor_node_get_sort_id (e0));
1818 BTOR_INC_REC_RW_CALL (btor);
1819 result = rewrite_eq_exp (btor, tmp, e0->e[0]);
1820 BTOR_DEC_REC_RW_CALL (btor);
1821 btor_node_release (btor, tmp);
1822 return result;
1823 }
1824
1825 /*
1826 * match: a + b = a + c
1827 * result: b = c
1828 */
1829 static inline bool
applies_add_add_1_eq(Btor * btor,BtorNode * e0,BtorNode * e1)1830 applies_add_add_1_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
1831 {
1832 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
1833 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
1834 && !btor_node_is_inverted (e0) && !btor_node_is_inverted (e1)
1835 && e0->kind == BTOR_BV_ADD_NODE && e1->kind == BTOR_BV_ADD_NODE
1836 && e0->e[0] == e1->e[0];
1837 }
1838
1839 static inline BtorNode *
apply_add_add_1_eq(Btor * btor,BtorNode * e0,BtorNode * e1)1840 apply_add_add_1_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
1841 {
1842 assert (applies_add_add_1_eq (btor, e0, e1));
1843
1844 BtorNode *result;
1845 BTOR_INC_REC_RW_CALL (btor);
1846 result = rewrite_eq_exp (btor, e0->e[1], e1->e[1]);
1847 BTOR_DEC_REC_RW_CALL (btor);
1848 return result;
1849 }
1850
1851 /*
1852 * match: a + b = c + a
1853 * result: b = c
1854 */
1855 static inline bool
applies_add_add_2_eq(Btor * btor,BtorNode * e0,BtorNode * e1)1856 applies_add_add_2_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
1857 {
1858 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
1859 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
1860 && !btor_node_is_inverted (e0) && !btor_node_is_inverted (e1)
1861 && e0->kind == BTOR_BV_ADD_NODE && e1->kind == BTOR_BV_ADD_NODE
1862 && e0->e[0] == e1->e[1];
1863 }
1864
1865 static inline BtorNode *
apply_add_add_2_eq(Btor * btor,BtorNode * e0,BtorNode * e1)1866 apply_add_add_2_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
1867 {
1868 assert (applies_add_add_2_eq (btor, e0, e1));
1869
1870 BtorNode *result;
1871 BTOR_INC_REC_RW_CALL (btor);
1872 result = rewrite_eq_exp (btor, e0->e[1], e1->e[0]);
1873 BTOR_DEC_REC_RW_CALL (btor);
1874 return result;
1875 }
1876
1877 /*
1878 * match: b + a = a + c
1879 * result: b = c
1880 */
1881 static inline bool
applies_add_add_3_eq(Btor * btor,BtorNode * e0,BtorNode * e1)1882 applies_add_add_3_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
1883 {
1884 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
1885 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
1886 && !btor_node_is_inverted (e0) && !btor_node_is_inverted (e1)
1887 && e0->kind == BTOR_BV_ADD_NODE && e1->kind == BTOR_BV_ADD_NODE
1888 && e0->e[1] == e1->e[0];
1889 }
1890
1891 static inline BtorNode *
apply_add_add_3_eq(Btor * btor,BtorNode * e0,BtorNode * e1)1892 apply_add_add_3_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
1893 {
1894 assert (applies_add_add_3_eq (btor, e0, e1));
1895
1896 BtorNode *result;
1897 BTOR_INC_REC_RW_CALL (btor);
1898 result = rewrite_eq_exp (btor, e0->e[0], e1->e[1]);
1899 BTOR_DEC_REC_RW_CALL (btor);
1900 return result;
1901 }
1902
1903 /*
1904 * match: b + a = c + a
1905 * result: b = c
1906 */
1907 static inline bool
applies_add_add_4_eq(Btor * btor,BtorNode * e0,BtorNode * e1)1908 applies_add_add_4_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
1909 {
1910 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
1911 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
1912 && !btor_node_is_inverted (e0) && !btor_node_is_inverted (e1)
1913 && e0->kind == BTOR_BV_ADD_NODE && e1->kind == BTOR_BV_ADD_NODE
1914 && e0->e[1] == e1->e[1];
1915 }
1916
1917 static inline BtorNode *
apply_add_add_4_eq(Btor * btor,BtorNode * e0,BtorNode * e1)1918 apply_add_add_4_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
1919 {
1920 assert (applies_add_add_4_eq (btor, e0, e1));
1921
1922 BtorNode *result;
1923 BTOR_INC_REC_RW_CALL (btor);
1924 result = rewrite_eq_exp (btor, e0->e[0], e1->e[0]);
1925 BTOR_DEC_REC_RW_CALL (btor);
1926 return result;
1927 }
1928
1929 /*
1930 * match: t = a - b (t = a + ~b + 1)
1931 * result: t + b = a
1932 */
1933 static inline bool
applies_sub_eq(Btor * btor,BtorNode * e0,BtorNode * e1)1934 applies_sub_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
1935 {
1936 (void) e0;
1937 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
1938 && btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_regular (e1)
1939 && btor_node_is_bv_add (e1)
1940 && ((btor_node_is_regular (e1->e[0])
1941 && btor_node_bv_is_neg (btor, e1->e[0], 0))
1942 || (btor_node_is_regular (e1->e[1])
1943 && btor_node_bv_is_neg (btor, e1->e[1], 0)));
1944 }
1945
1946 static inline BtorNode *
apply_sub_eq(Btor * btor,BtorNode * e0,BtorNode * e1)1947 apply_sub_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
1948 {
1949 assert (applies_sub_eq (btor, e0, e1));
1950
1951 BtorNode *result;
1952 BtorNode *neg = 0, *other;
1953
1954 if (btor_node_bv_is_neg (btor, e1->e[0], &neg))
1955 other = e1->e[1];
1956 else
1957 {
1958 btor_node_bv_is_neg (btor, e1->e[1], &neg);
1959 other = e1->e[0];
1960 }
1961 assert (neg);
1962
1963 BTOR_INC_REC_RW_CALL (btor);
1964 BtorNode *lhs = rewrite_add_exp (btor, e0, neg);
1965 result = rewrite_eq_exp (btor, lhs, other);
1966 BTOR_DEC_REC_RW_CALL (btor);
1967 btor_node_release (btor, lhs);
1968 return result;
1969 }
1970
1971 #if 0
1972 /*
1973 * match: a & b = ~a & ~b
1974 * result: a = ~b
1975 *
1976 * Commutative operators are normalized ignoring signs, so we do not have to
1977 * check cases like a & b == ~b & a as they are represented as a & b == a & ~b
1978 */
1979 static inline bool
1980 applies_and_and_1_eq (Btor * btor, BtorNode * e0, BtorNode * e1)
1981 {
1982 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
1983 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
1984 && !btor_node_is_inverted (e0)
1985 && !btor_node_is_inverted (e1)
1986 && e0->kind == BTOR_BV_AND_NODE
1987 && e1->kind == BTOR_BV_AND_NODE
1988 && e0->e[0] == btor_node_invert (e1->e[0])
1989 && e0->e[1] == btor_node_invert (e1->e[1])
1990 && btor_node_is_inverted (e0->e[0]) ==
1991 btor_node_is_inverted (e0->e[1]);
1992 }
1993
1994 static inline BtorNode *
1995 apply_and_and_1_eq (Btor * btor, BtorNode * e0, BtorNode * e1)
1996 {
1997 assert (applies_and_and_1_eq (btor, e0, e1));
1998 assert (btor_node_is_inverted (e1->e[0]) == btor_node_is_inverted (e1->e[1]));
1999 (void) e1;
2000
2001 BtorNode *result;
2002
2003 BTOR_INC_REC_RW_CALL (btor);
2004 result = rewrite_eq_exp (btor, e0->e[0], btor_node_invert (e0->e[1]));
2005 BTOR_DEC_REC_RW_CALL (btor);
2006 return result;
2007 }
2008
2009 /*
2010 * match: ~a & b = a & ~b
2011 * result: a = b
2012 *
2013 * Commutative operators are normalized ignoring signs, so we do not have to
2014 * check cases like a & b == ~b & a as they are represented as a & b == a & ~b
2015 */
2016 static inline bool
2017 applies_and_and_2_eq (Btor * btor, BtorNode * e0, BtorNode * e1)
2018 {
2019 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
2020 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
2021 && !btor_node_is_inverted (e0)
2022 && !btor_node_is_inverted (e1)
2023 && e0->kind == BTOR_BV_AND_NODE
2024 && e1->kind == BTOR_BV_AND_NODE
2025 && e0->e[0] == btor_node_invert (e1->e[0])
2026 && e0->e[1] == btor_node_invert (e1->e[1])
2027 && btor_node_is_inverted (e0->e[0]) !=
2028 btor_node_is_inverted (e0->e[1]);
2029 }
2030
2031 static inline BtorNode *
2032 apply_and_and_2_eq (Btor * btor, BtorNode * e0, BtorNode * e1)
2033 {
2034 assert (applies_and_and_2_eq (btor, e0, e1));
2035 assert (btor_node_is_inverted (e1->e[0]) != btor_node_is_inverted (e1->e[1]));
2036 (void) e1;
2037
2038 BtorNode *result;
2039
2040 BTOR_INC_REC_RW_CALL (btor);
2041 result = rewrite_eq_exp (btor, btor_node_real_addr (e0->e[0]),
2042 btor_node_real_addr (e0->e[1]));
2043 BTOR_DEC_REC_RW_CALL (btor);
2044 return result;
2045 }
2046
2047 /*
2048 * match: a & b = a & ~b
2049 * result: a = 0
2050 *
2051 * Commutative operators are normalized ignoring signs, so we do not have to
2052 * check cases like a & b == ~b & a as they are represented as a & b == a & ~b
2053 */
2054 static inline bool
2055 applies_and_and_3_eq (Btor * btor, BtorNode * e0, BtorNode * e1)
2056 {
2057 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
2058 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
2059 && !btor_node_is_inverted (e0)
2060 && !btor_node_is_inverted (e1)
2061 && e0->kind == BTOR_BV_AND_NODE
2062 && e1->kind == BTOR_BV_AND_NODE
2063 && e0->e[0] == e1->e[0]
2064 && e0->e[1] == btor_node_invert (e1->e[1]);
2065 }
2066
2067 static inline BtorNode *
2068 apply_and_and_3_eq (Btor * btor, BtorNode * e0, BtorNode * e1)
2069 {
2070 assert (applies_and_and_3_eq (btor, e0, e1));
2071 (void) e1;
2072
2073 BtorNode *tmp, *result;
2074
2075 tmp = btor_exp_bv_zero (btor, btor_node_get_sort_id (e0));
2076 BTOR_INC_REC_RW_CALL (btor);
2077 result = rewrite_eq_exp (btor, e0->e[0], tmp);
2078 BTOR_DEC_REC_RW_CALL (btor);
2079 btor_node_release (btor, tmp);
2080 return result;
2081 }
2082
2083 /*
2084 * match: a & b = ~a & b
2085 * result: b = 0
2086 *
2087 * Commutative operators are normalized ignoring signs, so we do not have to
2088 * check cases like a & b == ~b & a as they are represented as a & b == a & ~b
2089 */
2090 static inline bool
2091 applies_and_and_4_eq (Btor * btor, BtorNode * e0, BtorNode * e1)
2092 {
2093 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
2094 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
2095 && !btor_node_is_inverted (e0)
2096 && !btor_node_is_inverted (e1)
2097 && e0->kind == BTOR_BV_AND_NODE
2098 && e1->kind == BTOR_BV_AND_NODE
2099 && e0->e[0] == btor_node_invert (e1->e[0])
2100 && e0->e[1] == e1->e[1];
2101 }
2102
2103 static inline BtorNode *
2104 apply_and_and_4_eq (Btor * btor, BtorNode * e0, BtorNode * e1)
2105 {
2106 assert (applies_and_and_4_eq (btor, e0, e1));
2107 (void) e1;
2108
2109 BtorNode *tmp, *result;
2110
2111 tmp = btor_exp_bv_zero (btor, btor_node_get_sort_id (e0));
2112 BTOR_INC_REC_RW_CALL (btor);
2113 result = rewrite_eq_exp (btor, e0->e[1], tmp);
2114 BTOR_DEC_REC_RW_CALL (btor);
2115 btor_node_release (btor, tmp);
2116 return result;
2117 }
2118 #endif
2119
2120 /*
2121 * match: b ? a : t = d, where a != d
2122 * result: !b AND d = t
2123 */
2124 static inline bool
applies_bcond_uneq_if_eq(Btor * btor,BtorNode * e0,BtorNode * e1)2125 applies_bcond_uneq_if_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
2126 {
2127 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
2128 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
2129 && !btor_node_is_inverted (e0) && btor_node_is_bv_cond (e0)
2130 && is_always_unequal (btor, e0->e[1], e1);
2131 }
2132
2133 static inline BtorNode *
apply_bcond_uneq_if_eq(Btor * btor,BtorNode * e0,BtorNode * e1)2134 apply_bcond_uneq_if_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
2135 {
2136 assert (applies_bcond_uneq_if_eq (btor, e0, e1));
2137
2138 BtorNode *tmp, *result;
2139
2140 BTOR_INC_REC_RW_CALL (btor);
2141 tmp = rewrite_eq_exp (btor, e0->e[2], e1);
2142 result = rewrite_and_exp (btor, btor_node_invert (e0->e[0]), tmp);
2143 BTOR_DEC_REC_RW_CALL (btor);
2144 btor_node_release (btor, tmp);
2145 return result;
2146 }
2147
2148 /*
2149 * match: b ? a : t = d, where t != d
2150 * result: b AND a = t
2151 */
2152 static inline bool
applies_bcond_uneq_else_eq(Btor * btor,BtorNode * e0,BtorNode * e1)2153 applies_bcond_uneq_else_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
2154 {
2155 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
2156 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
2157 && !btor_node_is_inverted (e0) && btor_node_is_bv_cond (e0)
2158 && is_always_unequal (btor, e0->e[2], e1);
2159 }
2160
2161 static inline BtorNode *
apply_bcond_uneq_else_eq(Btor * btor,BtorNode * e0,BtorNode * e1)2162 apply_bcond_uneq_else_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
2163 {
2164 assert (applies_bcond_uneq_else_eq (btor, e0, e1));
2165
2166 BtorNode *tmp, *result;
2167
2168 BTOR_INC_REC_RW_CALL (btor);
2169 tmp = rewrite_eq_exp (btor, e0->e[1], e1);
2170 result = rewrite_and_exp (btor, e0->e[0], tmp);
2171 BTOR_DEC_REC_RW_CALL (btor);
2172 btor_node_release (btor, tmp);
2173 return result;
2174 }
2175
2176 /*
2177 * match: a = b ? a : c
2178 * result: b OR a = c
2179 *
2180 * match: a = ~(b ? a : c)
2181 * result: !b AND a = ~c
2182 */
2183 static inline bool
applies_bcond_if_eq(Btor * btor,BtorNode * e0,BtorNode * e1)2184 applies_bcond_if_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
2185 {
2186 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
2187 && btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_bv_cond (e1)
2188 && btor_node_real_addr (e1)->e[1] == e0;
2189 }
2190
2191 static inline BtorNode *
apply_bcond_if_eq(Btor * btor,BtorNode * e0,BtorNode * e1)2192 apply_bcond_if_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
2193 {
2194 assert (applies_bcond_if_eq (btor, e0, e1));
2195
2196 BtorNode *tmp, *result, *real_e1;
2197
2198 real_e1 = btor_node_real_addr (e1);
2199
2200 BTOR_INC_REC_RW_CALL (btor);
2201 if (btor_node_is_inverted (e1))
2202 {
2203 tmp = rewrite_eq_exp (btor, btor_node_invert (real_e1->e[2]), e0);
2204 result = rewrite_and_exp (btor, btor_node_invert (real_e1->e[0]), tmp);
2205 }
2206 else
2207 {
2208 tmp = rewrite_eq_exp (btor, real_e1->e[2], e0);
2209 result = btor_exp_bv_or (btor, real_e1->e[0], tmp);
2210 }
2211 btor_node_release (btor, tmp);
2212 BTOR_DEC_REC_RW_CALL (btor);
2213 return result;
2214 }
2215
2216 /*
2217 * match: a = b ? c : a
2218 * result: !b OR a = c
2219 *
2220 * match: a = ~(b ? c : a)
2221 * result: b AND a = ~c
2222 */
2223 static inline bool
applies_bcond_else_eq(Btor * btor,BtorNode * e0,BtorNode * e1)2224 applies_bcond_else_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
2225 {
2226 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
2227 && btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_bv_cond (e1)
2228 && btor_node_real_addr (e1)->e[2] == e0;
2229 }
2230
2231 static inline BtorNode *
apply_bcond_else_eq(Btor * btor,BtorNode * e0,BtorNode * e1)2232 apply_bcond_else_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
2233 {
2234 assert (applies_bcond_else_eq (btor, e0, e1));
2235
2236 BtorNode *tmp, *result, *real_e1;
2237
2238 real_e1 = btor_node_real_addr (e1);
2239
2240 BTOR_INC_REC_RW_CALL (btor);
2241 if (btor_node_is_inverted (e1))
2242 {
2243 tmp = rewrite_eq_exp (btor, btor_node_invert (real_e1->e[1]), e0);
2244 result = rewrite_and_exp (btor, real_e1->e[0], tmp);
2245 }
2246 else
2247 {
2248 tmp = rewrite_eq_exp (btor, real_e1->e[1], e0);
2249 result = btor_exp_bv_or (btor, btor_node_invert (real_e1->e[0]), tmp);
2250 }
2251 btor_node_release (btor, tmp);
2252 BTOR_DEC_REC_RW_CALL (btor);
2253 return result;
2254 }
2255
2256 /*
2257 * match: (x ? a : b) = (x : c : d), where either a = c or b = d
2258 * result: x ? a = c : b = d
2259 */
2260 static inline bool
applies_bcond_eq(Btor * btor,BtorNode * e0,BtorNode * e1)2261 applies_bcond_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
2262 {
2263 BtorNode *real_e0, *real_e1;
2264 real_e0 = btor_node_real_addr (e0);
2265 real_e1 = btor_node_real_addr (e1);
2266 return btor->rec_rw_calls < BTOR_REC_RW_BOUND
2267 && btor_node_is_bv_cond (real_e0) && btor_node_is_bv_cond (real_e1)
2268 && btor_node_is_inverted (e0)
2269 == btor_node_is_inverted (e1) // TODO: needed?
2270 && real_e0->e[0] == real_e1->e[0]
2271 && (real_e0->e[1] == real_e1->e[1] || real_e0->e[2] == real_e1->e[2]);
2272 }
2273
2274 static inline BtorNode *
apply_bcond_eq(Btor * btor,BtorNode * e0,BtorNode * e1)2275 apply_bcond_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
2276 {
2277 assert (applies_bcond_eq (btor, e0, e1));
2278
2279 BtorNode *result, *left, *right, *real_e0, *real_e1;
2280
2281 real_e0 = btor_node_real_addr (e0);
2282 real_e1 = btor_node_real_addr (e1);
2283 BTOR_INC_REC_RW_CALL (btor);
2284 left = rewrite_eq_exp (btor,
2285 btor_node_cond_invert (e0, real_e0->e[1]),
2286 btor_node_cond_invert (e1, real_e1->e[1]));
2287 right = rewrite_eq_exp (btor,
2288 btor_node_cond_invert (e0, real_e0->e[2]),
2289 btor_node_cond_invert (e1, real_e1->e[2]));
2290 result = rewrite_cond_exp (btor, real_e0->e[0], left, right);
2291 BTOR_DEC_REC_RW_CALL (btor);
2292 btor_node_release (btor, left);
2293 btor_node_release (btor, right);
2294 return result;
2295 }
2296
2297 /*
2298 * match: a * b + a * c
2299 * result: a * (b + c)
2300 */
2301 static inline bool
applies_add_mul_distrib(Btor * btor,BtorNode * e0,BtorNode * e1)2302 applies_add_mul_distrib (Btor *btor, BtorNode *e0, BtorNode *e1)
2303 {
2304 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
2305 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
2306 && !btor_node_is_inverted (e0) && !btor_node_is_inverted (e1)
2307 && btor_node_is_bv_mul (e0) && btor_node_is_bv_mul (e1)
2308 && (e0->e[0] == e1->e[0] || e0->e[0] == e1->e[1]
2309 || e0->e[1] == e1->e[0] || e0->e[1] == e1->e[1]);
2310 }
2311
2312 static inline BtorNode *
apply_add_mul_distrib(Btor * btor,BtorNode * e0,BtorNode * e1)2313 apply_add_mul_distrib (Btor *btor, BtorNode *e0, BtorNode *e1)
2314 {
2315 assert (applies_add_mul_distrib (btor, e0, e1));
2316
2317 BtorNode *add, *mul, *result;
2318
2319 BTOR_INC_REC_RW_CALL (btor);
2320 if (e0->e[0] == e1->e[0])
2321 {
2322 add = rewrite_add_exp (btor, e0->e[1], e1->e[1]);
2323 mul = e0->e[0];
2324 }
2325 else if (e0->e[0] == e1->e[1])
2326 {
2327 add = rewrite_add_exp (btor, e0->e[1], e1->e[0]);
2328 mul = e0->e[0];
2329 }
2330 else if (e0->e[1] == e1->e[0])
2331 {
2332 add = rewrite_add_exp (btor, e0->e[0], e1->e[1]);
2333 mul = e0->e[1];
2334 }
2335 else
2336 {
2337 assert (e0->e[1] == e1->e[1]);
2338 add = rewrite_add_exp (btor, e0->e[0], e1->e[0]);
2339 mul = e0->e[1];
2340 }
2341
2342 result = rewrite_mul_exp (btor, mul, add);
2343 BTOR_DEC_REC_RW_CALL (btor);
2344 btor_node_release (btor, add);
2345 return result;
2346 }
2347
2348 /*
2349 * match: a * (b + c) = a * b + a * c
2350 * result: true
2351 */
2352 static inline bool
applies_distrib_add_mul_eq(Btor * btor,BtorNode * e0,BtorNode * e1)2353 applies_distrib_add_mul_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
2354 {
2355 bool result;
2356 BtorNode *tmp = 0;
2357
2358 result = btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
2359 && !btor_node_is_inverted (e0) && !btor_node_is_inverted (e1)
2360 && btor_node_is_bv_mul (e0) && btor_node_is_bv_add (e1)
2361 && applies_add_mul_distrib (btor, e1->e[0], e1->e[1])
2362 && (tmp = apply_add_mul_distrib (btor, e1->e[0], e1->e[1]))
2363 && tmp == e0;
2364 if (tmp) btor_node_release (btor, tmp);
2365 return result;
2366 }
2367
2368 static inline BtorNode *
apply_distrib_add_mul_eq(Btor * btor,BtorNode * e0,BtorNode * e1)2369 apply_distrib_add_mul_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
2370 {
2371 assert (applies_distrib_add_mul_eq (btor, e0, e1));
2372 (void) e0;
2373 (void) e1;
2374 return btor_exp_true (btor);
2375 }
2376
2377 /*
2378 * match: a :: b = c
2379 * result: a[u:l] = c[u:l] AND (a::b)[l:0] = c[l:0]
2380 * with: u: len(c)-1
2381 * l: len(c)-len(a)+1
2382 *
2383 * push eq down over concats
2384 */
2385 static inline bool
applies_concat_eq(Btor * btor,BtorNode * e0,BtorNode * e1)2386 applies_concat_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
2387 {
2388 (void) e1;
2389 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
2390 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
2391 && btor_node_real_addr (e0)->kind == BTOR_BV_CONCAT_NODE;
2392 }
2393
2394 static inline BtorNode *
apply_concat_eq(Btor * btor,BtorNode * e0,BtorNode * e1)2395 apply_concat_eq (Btor *btor, BtorNode *e0, BtorNode *e1)
2396 {
2397 assert (applies_concat_eq (btor, e0, e1));
2398
2399 uint32_t upper, lower;
2400 BtorNode *real_e0, *tmp1, *tmp2, *tmp3, *tmp4, *result, *eq1, *eq2;
2401
2402 real_e0 = btor_node_real_addr (e0);
2403
2404 BTOR_INC_REC_RW_CALL (btor);
2405 upper = btor_node_bv_get_width (btor, real_e0) - 1;
2406 lower = upper - btor_node_bv_get_width (btor, real_e0->e[0]) + 1;
2407
2408 tmp1 = rewrite_slice_exp (btor, e0, upper, lower);
2409 tmp2 = rewrite_slice_exp (btor, e1, upper, lower);
2410 lower--;
2411 tmp3 = rewrite_slice_exp (btor, e0, lower, 0);
2412 tmp4 = rewrite_slice_exp (btor, e1, lower, 0);
2413
2414 /* creating two slices on e1 does not really improve the situation here,
2415 * hence only create a result if a slice on e1 yields a result different
2416 * from a slice (through further rewriting) */
2417 if (!(btor_node_is_bv_slice (tmp2) && btor_node_is_bv_slice (tmp4)))
2418 {
2419 eq1 = rewrite_eq_exp (btor, tmp1, tmp2);
2420 eq2 = rewrite_eq_exp (btor, tmp3, tmp4);
2421 result = rewrite_and_exp (btor, eq1, eq2);
2422 btor_node_release (btor, eq1);
2423 btor_node_release (btor, eq2);
2424 }
2425 else
2426 result = 0;
2427
2428 btor_node_release (btor, tmp1);
2429 btor_node_release (btor, tmp2);
2430 btor_node_release (btor, tmp3);
2431 btor_node_release (btor, tmp4);
2432 BTOR_DEC_REC_RW_CALL (btor);
2433 return result;
2434 }
2435
2436 #if 0
2437 static inline bool
2438 applies_zero_eq_and_eq (Btor * btor, BtorNode * e0, BtorNode * e1)
2439 {
2440 BtorNode *real_e1;
2441 real_e1 = btor_node_real_addr (e1);
2442 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
2443 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
2444 && is_const_zero_exp (btor, e0)
2445 && btor_node_is_bv_and (real_e1)
2446 && (btor_node_is_bv_const (real_e1->e[0])
2447 || btor_node_is_bv_const (real_e1->e[1]));
2448 }
2449
2450 static inline BtorNode *
2451 apply_zero_eq_and_eq (Btor * btor, BtorNode * e0, BtorNode * e1)
2452 {
2453 (void) e0;
2454 uint32_t len, upper, lower;
2455 BtorNode *result, *real_e1, *masked, *zero, *slice;
2456 BtorSortId sort;
2457
2458 real_e1 = btor_node_real_addr (e1);
2459
2460 if (is_bit_mask (real_e1->e[0], &upper, &lower))
2461 masked = real_e1->e[1];
2462 else if (is_bit_mask (real_e1->e[1], &upper, &lower))
2463 masked = real_e1->e[0];
2464 else
2465 return 0;
2466
2467 len = upper - lower + 1;
2468
2469 BTOR_INC_REC_RW_CALL (btor);
2470 sort = btor_sort_bv (btor, len);
2471 zero = btor_exp_bv_zero (btor, sort);
2472 btor_sort_release (btor, sort);
2473 slice = rewrite_slice_exp (btor, masked, upper, lower);
2474 result = rewrite_eq_exp (btor, zero, slice);
2475 BTOR_DEC_REC_RW_CALL (btor);
2476 btor_node_release (btor, zero);
2477 btor_node_release (btor, slice);
2478 return result;
2479 }
2480 #endif
2481
2482
2483 /* ULT rules */
2484 /* -------------------------------------------------------------------------- */
2485
2486 /*
2487 * match: a < a
2488 * result: false
2489 */
2490 static inline bool
applies_false_ult(Btor * btor,BtorNode * e0,BtorNode * e1)2491 applies_false_ult (Btor *btor, BtorNode *e0, BtorNode *e1)
2492 {
2493 (void) btor;
2494 return e0 == e1;
2495 }
2496
2497 static inline BtorNode *
apply_false_ult(Btor * btor,BtorNode * e0,BtorNode * e1)2498 apply_false_ult (Btor *btor, BtorNode *e0, BtorNode *e1)
2499 {
2500 assert (applies_false_ult (btor, e0, e1));
2501 (void) e0;
2502 (void) e1;
2503 return btor_exp_false (btor);
2504 }
2505
2506 /*
2507 * match: a < b, where len(a) = 1
2508 * result: !a AND b
2509 */
2510 static inline bool
applies_bool_ult(Btor * btor,BtorNode * e0,BtorNode * e1)2511 applies_bool_ult (Btor *btor, BtorNode *e0, BtorNode *e1)
2512 {
2513 (void) e1;
2514 return btor->rec_rw_calls < BTOR_REC_RW_BOUND
2515 && btor_node_bv_get_width (btor, e0) == 1;
2516 }
2517
2518 static inline BtorNode *
apply_bool_ult(Btor * btor,BtorNode * e0,BtorNode * e1)2519 apply_bool_ult (Btor *btor, BtorNode *e0, BtorNode *e1)
2520 {
2521 assert (applies_bool_ult (btor, e0, e1));
2522
2523 BtorNode *result;
2524
2525 BTOR_INC_REC_RW_CALL (btor);
2526 result = rewrite_and_exp (btor, btor_node_invert (e0), e1);
2527 BTOR_DEC_REC_RW_CALL (btor);
2528 return result;
2529 }
2530
2531 /*
2532 * match: (a::b) < (a::c)
2533 * result: b < c
2534 */
2535 static inline bool
applies_concat_upper_ult(Btor * btor,BtorNode * e0,BtorNode * e1)2536 applies_concat_upper_ult (Btor *btor, BtorNode *e0, BtorNode *e1)
2537 {
2538 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
2539 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
2540 && !btor_node_is_inverted (e0) && !btor_node_is_inverted (e1)
2541 && btor_node_is_bv_concat (e0) && e0->kind == e1->kind
2542 && e0->e[0] == e1->e[0];
2543 }
2544
2545 static inline BtorNode *
apply_concat_upper_ult(Btor * btor,BtorNode * e0,BtorNode * e1)2546 apply_concat_upper_ult (Btor *btor, BtorNode *e0, BtorNode *e1)
2547 {
2548 assert (applies_concat_upper_ult (btor, e0, e1));
2549
2550 BtorNode *result;
2551
2552 BTOR_INC_REC_RW_CALL (btor);
2553 result = rewrite_ult_exp (btor, e0->e[1], e1->e[1]);
2554 BTOR_DEC_REC_RW_CALL (btor);
2555 return result;
2556 }
2557
2558 /*
2559 * match: (b::a) < (c::a)
2560 * result: b < c
2561 */
2562 static inline bool
applies_concat_lower_ult(Btor * btor,BtorNode * e0,BtorNode * e1)2563 applies_concat_lower_ult (Btor *btor, BtorNode *e0, BtorNode *e1)
2564 {
2565 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
2566 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
2567 && !btor_node_is_inverted (e0) && !btor_node_is_inverted (e1)
2568 && btor_node_is_bv_concat (e0) && e0->kind == e1->kind
2569 && e0->e[1] == e1->e[1];
2570 }
2571
2572 static inline BtorNode *
apply_concat_lower_ult(Btor * btor,BtorNode * e0,BtorNode * e1)2573 apply_concat_lower_ult (Btor *btor, BtorNode *e0, BtorNode *e1)
2574 {
2575 assert (applies_concat_lower_ult (btor, e0, e1));
2576
2577 BtorNode *result;
2578
2579 BTOR_INC_REC_RW_CALL (btor);
2580 result = rewrite_ult_exp (btor, e0->e[0], e1->e[0]);
2581 BTOR_DEC_REC_RW_CALL (btor);
2582 return result;
2583 }
2584
2585 /*
2586 * match: (x ? a : b) < (x : c : d), where either a = c or b = d
2587 * result: x ? a < c : b < d
2588 */
2589 static inline bool
applies_bcond_ult(Btor * btor,BtorNode * e0,BtorNode * e1)2590 applies_bcond_ult (Btor *btor, BtorNode *e0, BtorNode *e1)
2591 {
2592 BtorNode *real_e0, *real_e1;
2593 real_e0 = btor_node_real_addr (e0);
2594 real_e1 = btor_node_real_addr (e1);
2595 return btor->rec_rw_calls < BTOR_REC_RW_BOUND
2596 && btor_node_is_bv_cond (real_e0) && btor_node_is_bv_cond (real_e1)
2597 && btor_node_is_inverted (e0)
2598 == btor_node_is_inverted (e1) // TODO: needed?
2599 && real_e0->e[0] == real_e1->e[0]
2600 && (real_e0->e[1] == real_e1->e[1] || real_e0->e[2] == real_e1->e[2]);
2601 }
2602
2603 static inline BtorNode *
apply_bcond_ult(Btor * btor,BtorNode * e0,BtorNode * e1)2604 apply_bcond_ult (Btor *btor, BtorNode *e0, BtorNode *e1)
2605 {
2606 assert (applies_bcond_ult (btor, e0, e1));
2607
2608 BtorNode *result, *left, *right, *real_e0, *real_e1;
2609
2610 real_e0 = btor_node_real_addr (e0);
2611 real_e1 = btor_node_real_addr (e1);
2612 BTOR_INC_REC_RW_CALL (btor);
2613 left = rewrite_ult_exp (btor,
2614 btor_node_cond_invert (e0, real_e0->e[1]),
2615 btor_node_cond_invert (e1, real_e1->e[1]));
2616 right = rewrite_ult_exp (btor,
2617 btor_node_cond_invert (e0, real_e0->e[2]),
2618 btor_node_cond_invert (e1, real_e1->e[2]));
2619 result = rewrite_cond_exp (btor, real_e0->e[0], left, right);
2620 BTOR_DEC_REC_RW_CALL (btor);
2621 btor_node_release (btor, left);
2622 btor_node_release (btor, right);
2623 return result;
2624 }
2625
2626
2627 /* AND rules */
2628 /* -------------------------------------------------------------------------- */
2629
2630 /*
2631 * match: a & a
2632 * result: a
2633 */
2634 static inline bool
applies_idem1_and(Btor * btor,BtorNode * e0,BtorNode * e1)2635 applies_idem1_and (Btor *btor, BtorNode *e0, BtorNode *e1)
2636 {
2637 (void) btor;
2638 return e0 == e1;
2639 }
2640
2641 static inline BtorNode *
apply_idem1_and(Btor * btor,BtorNode * e0,BtorNode * e1)2642 apply_idem1_and (Btor *btor, BtorNode *e0, BtorNode *e1)
2643 {
2644 assert (applies_idem1_and (btor, e0, e1));
2645 (void) e1;
2646 return btor_node_copy (btor, e0);
2647 }
2648
2649 /*
2650 * match: a & ~a
2651 * result: 0
2652 */
2653 static inline bool
applies_contr1_and(Btor * btor,BtorNode * e0,BtorNode * e1)2654 applies_contr1_and (Btor *btor, BtorNode *e0, BtorNode *e1)
2655 {
2656 (void) btor;
2657 return btor_node_invert (e0) == e1;
2658 }
2659
2660 static inline BtorNode *
apply_contr1_and(Btor * btor,BtorNode * e0,BtorNode * e1)2661 apply_contr1_and (Btor *btor, BtorNode *e0, BtorNode *e1)
2662 {
2663 assert (applies_contr1_and (btor, e0, e1));
2664 (void) e1;
2665 return btor_exp_bv_zero (btor, btor_node_get_sort_id (e0));
2666 }
2667
2668 /*
2669 * match: a & b & c & d, where a = ~c OR a = ~d OR b = ~c OR b = ~d
2670 * result: false
2671 *
2672 * second rule of contradiction
2673 */
2674 static inline bool
applies_contr2_and(Btor * btor,BtorNode * e0,BtorNode * e1)2675 applies_contr2_and (Btor *btor, BtorNode *e0, BtorNode *e1)
2676 {
2677 (void) btor;
2678 return btor_node_is_bv_and (e0) && btor_node_is_bv_and (e1)
2679 && !btor_node_is_inverted (e0) && !btor_node_is_inverted (e1)
2680 && (e0->e[0] == btor_node_invert (e1->e[0])
2681 || e0->e[0] == btor_node_invert (e1->e[1])
2682 || e0->e[1] == btor_node_invert (e1->e[0])
2683 || e0->e[1] == btor_node_invert (e1->e[1]));
2684 }
2685
2686 static inline BtorNode *
apply_contr2_and(Btor * btor,BtorNode * e0,BtorNode * e1)2687 apply_contr2_and (Btor *btor, BtorNode *e0, BtorNode *e1)
2688 {
2689 assert (applies_contr2_and (btor, e0, e1));
2690 (void) e1;
2691 return btor_exp_bv_zero (btor, btor_node_get_sort_id (e0));
2692 }
2693
2694 /*
2695 * match: a & b & c & d, where a = c or b = c
2696 * result: a & b & d
2697 *
2698 * symmetric rule of idempotency
2699 */
2700 static inline bool
applies_idem2_and(Btor * btor,BtorNode * e0,BtorNode * e1)2701 applies_idem2_and (Btor *btor, BtorNode *e0, BtorNode *e1)
2702 {
2703 BtorNode *real_e0, *real_e1;
2704 real_e0 = btor_node_real_addr (e0);
2705 real_e1 = btor_node_real_addr (e1);
2706 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_bv_and (e0)
2707 && btor_node_is_bv_and (e1) && !btor_node_is_inverted (e0)
2708 && !btor_node_is_inverted (e1)
2709 && (real_e0->e[0] == real_e1->e[0] || real_e0->e[1] == real_e1->e[0]);
2710 }
2711
2712 static inline BtorNode *
apply_idem2_and(Btor * btor,BtorNode * e0,BtorNode * e1)2713 apply_idem2_and (Btor *btor, BtorNode *e0, BtorNode *e1)
2714 {
2715 assert (applies_idem2_and (btor, e0, e1));
2716
2717 BtorNode *result;
2718
2719 BTOR_INC_REC_RW_CALL (btor);
2720 result = rewrite_and_exp (btor, e0, btor_node_real_addr (e1)->e[1]);
2721 BTOR_DEC_REC_RW_CALL (btor);
2722 return result;
2723 }
2724
2725 /*
2726 * match: a & b & c & d, where a = d OR b = d
2727 * result: a & b & c
2728 *
2729 * use commutativity
2730 */
2731 static inline bool
applies_comm_and(Btor * btor,BtorNode * e0,BtorNode * e1)2732 applies_comm_and (Btor *btor, BtorNode *e0, BtorNode *e1)
2733 {
2734 BtorNode *real_e0, *real_e1;
2735 real_e0 = btor_node_real_addr (e0);
2736 real_e1 = btor_node_real_addr (e1);
2737 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_bv_and (e0)
2738 && btor_node_is_bv_and (e1) && !btor_node_is_inverted (e0)
2739 && !btor_node_is_inverted (e1)
2740 && (real_e0->e[0] == real_e1->e[1] || real_e0->e[1] == real_e1->e[1]);
2741 }
2742
2743 static inline BtorNode *
apply_comm_and(Btor * btor,BtorNode * e0,BtorNode * e1)2744 apply_comm_and (Btor *btor, BtorNode *e0, BtorNode *e1)
2745 {
2746 assert (applies_comm_and (btor, e0, e1));
2747
2748 BtorNode *result;
2749
2750 BTOR_INC_REC_RW_CALL (btor);
2751 result = rewrite_and_exp (btor, e0, btor_node_real_addr (e1)->e[0]);
2752 BTOR_DEC_REC_RW_CALL (btor);
2753 return result;
2754 }
2755
2756 /*
2757 * match: a & b & ~(c & d), where a = c OR a = d OR b = c OR b = d
2758 * result: a & b
2759 */
2760 static inline bool
applies_subsum1_and(Btor * btor,BtorNode * e0,BtorNode * e1)2761 applies_subsum1_and (Btor *btor, BtorNode *e0, BtorNode *e1)
2762 {
2763 (void) btor;
2764 BtorNode *real_e0, *real_e1;
2765 real_e0 = btor_node_real_addr (e0);
2766 real_e1 = btor_node_real_addr (e1);
2767 return btor_node_is_bv_and (e0) && btor_node_is_bv_and (e1)
2768 && !btor_node_is_inverted (e0) && btor_node_is_inverted (e1)
2769 && (real_e0->e[0] == btor_node_invert (real_e1->e[0])
2770 || real_e0->e[0] == btor_node_invert (real_e1->e[1])
2771 || real_e0->e[1] == btor_node_invert (real_e1->e[0])
2772 || real_e0->e[1] == btor_node_invert (real_e1->e[1]));
2773 }
2774
2775 static inline BtorNode *
apply_subsum1_and(Btor * btor,BtorNode * e0,BtorNode * e1)2776 apply_subsum1_and (Btor *btor, BtorNode *e0, BtorNode *e1)
2777 {
2778 assert (applies_subsum1_and (btor, e0, e1));
2779 (void) e1;
2780 return btor_node_copy (btor, e0);
2781 }
2782
2783 /*
2784 * match: a & b & ~(c & d), where a = c OR b = c
2785 * result: a & b & ~d
2786 *
2787 * symmetric rule of substitution
2788 */
2789 static inline bool
applies_subst1_and(Btor * btor,BtorNode * e0,BtorNode * e1)2790 applies_subst1_and (Btor *btor, BtorNode *e0, BtorNode *e1)
2791 {
2792 BtorNode *real_e0, *real_e1;
2793 real_e0 = btor_node_real_addr (e0);
2794 real_e1 = btor_node_real_addr (e1);
2795 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_bv_and (real_e0)
2796 && btor_node_is_bv_and (real_e1) && !btor_node_is_inverted (e0)
2797 && btor_node_is_inverted (e1)
2798 && (real_e1->e[0] == real_e0->e[1] || real_e1->e[0] == real_e0->e[0]);
2799 }
2800
2801 static inline BtorNode *
apply_subst1_and(Btor * btor,BtorNode * e0,BtorNode * e1)2802 apply_subst1_and (Btor *btor, BtorNode *e0, BtorNode *e1)
2803 {
2804 assert (applies_subst1_and (btor, e0, e1));
2805
2806 BtorNode *result;
2807
2808 BTOR_INC_REC_RW_CALL (btor);
2809 result = rewrite_and_exp (
2810 btor, e0, btor_node_invert (btor_node_real_addr (e1)->e[1]));
2811 BTOR_DEC_REC_RW_CALL (btor);
2812 return result;
2813 }
2814
2815 /*
2816 * match: a & b & ~(c & d), where a = d OR b = d
2817 * result: a & b & ~c
2818 *
2819 * symmetric rule of substitution
2820 */
2821 static inline bool
applies_subst2_and(Btor * btor,BtorNode * e0,BtorNode * e1)2822 applies_subst2_and (Btor *btor, BtorNode *e0, BtorNode *e1)
2823 {
2824 BtorNode *real_e0, *real_e1;
2825 real_e0 = btor_node_real_addr (e0);
2826 real_e1 = btor_node_real_addr (e1);
2827 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_bv_and (real_e0)
2828 && btor_node_is_bv_and (real_e1) && !btor_node_is_inverted (e0)
2829 && btor_node_is_inverted (e1)
2830 && (real_e1->e[1] == real_e0->e[1] || real_e1->e[1] == real_e0->e[0]);
2831 }
2832
2833 static inline BtorNode *
apply_subst2_and(Btor * btor,BtorNode * e0,BtorNode * e1)2834 apply_subst2_and (Btor *btor, BtorNode *e0, BtorNode *e1)
2835 {
2836 assert (applies_subst2_and (btor, e0, e1));
2837
2838 BtorNode *result;
2839
2840 BTOR_INC_REC_RW_CALL (btor);
2841 result = rewrite_and_exp (
2842 btor, e0, btor_node_invert (btor_node_real_addr (e1)->e[0]));
2843 BTOR_DEC_REC_RW_CALL (btor);
2844 return result;
2845 }
2846
2847 /*
2848 * match: a XNOR b, where len(a) = 1
2849 * result: a = b
2850 */
2851 static inline bool
applies_bool_xnor_and(Btor * btor,BtorNode * e0,BtorNode * e1)2852 applies_bool_xnor_and (Btor *btor, BtorNode *e0, BtorNode *e1)
2853 {
2854 BtorNode *real_e0, *real_e1;
2855 real_e0 = btor_node_real_addr (e0);
2856 real_e1 = btor_node_real_addr (e1);
2857 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_bv_and (real_e0)
2858 && btor_node_is_bv_and (real_e1) && btor_node_is_inverted (e0)
2859 && btor_node_is_inverted (e1)
2860 && btor_node_bv_get_width (btor, real_e0) == 1
2861 && btor_node_is_inverted (real_e0->e[0])
2862 != btor_node_is_inverted (real_e0->e[1])
2863 && btor_node_is_inverted (real_e1->e[0])
2864 != btor_node_is_inverted (real_e1->e[1])
2865 && ((real_e0->e[0] == btor_node_invert (real_e1->e[0])
2866 && real_e0->e[1] == btor_node_invert (real_e1->e[1]))
2867 || (real_e0->e[0] == btor_node_invert (real_e1->e[1])
2868 && real_e0->e[1] == btor_node_invert (real_e1->e[0])));
2869 }
2870
2871 static inline BtorNode *
apply_bool_xnor_and(Btor * btor,BtorNode * e0,BtorNode * e1)2872 apply_bool_xnor_and (Btor *btor, BtorNode *e0, BtorNode *e1)
2873 {
2874 assert (applies_bool_xnor_and (btor, e0, e1));
2875 (void) e1;
2876
2877 BtorNode *result;
2878
2879 BTOR_INC_REC_RW_CALL (btor);
2880 result =
2881 rewrite_eq_exp (btor,
2882 btor_node_real_addr (btor_node_real_addr (e0)->e[0]),
2883 btor_node_real_addr (btor_node_real_addr (e0)->e[1]));
2884 BTOR_DEC_REC_RW_CALL (btor);
2885 return result;
2886 }
2887
2888 /*
2889 * match: ~(a & b) & ~(c & d), where (a = c AND b = ~d) OR (a = d AND b = ~c)
2890 * result: ~a
2891 *
2892 * rule of resolution
2893 */
2894 static inline bool
applies_resol1_and(Btor * btor,BtorNode * e0,BtorNode * e1)2895 applies_resol1_and (Btor *btor, BtorNode *e0, BtorNode *e1)
2896 {
2897 BtorNode *real_e0, *real_e1;
2898 real_e0 = btor_node_real_addr (e0);
2899 real_e1 = btor_node_real_addr (e1);
2900 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_bv_and (real_e0)
2901 && btor_node_is_bv_and (real_e1) && btor_node_is_inverted (e0)
2902 && btor_node_is_inverted (e1)
2903 && ((real_e0->e[0] == real_e1->e[0]
2904 && real_e0->e[1] == btor_node_invert (real_e1->e[1]))
2905 || (real_e0->e[0] == real_e1->e[1]
2906 && real_e0->e[1] == btor_node_invert (real_e1->e[0])));
2907 }
2908
2909 static inline BtorNode *
apply_resol1_and(Btor * btor,BtorNode * e0,BtorNode * e1)2910 apply_resol1_and (Btor *btor, BtorNode *e0, BtorNode *e1)
2911 {
2912 assert (applies_resol1_and (btor, e0, e1));
2913 (void) e1;
2914 return btor_node_invert (
2915 btor_node_copy (btor, btor_node_real_addr (e0)->e[0]));
2916 }
2917
2918 /*
2919 * match: ~(a & b) & ~(c & d), where (~a = c AND b = d) OR (a = d AND ~b = c)
2920 * result: ~a
2921 *
2922 * rule of resolution
2923 */
2924 static inline bool
applies_resol2_and(Btor * btor,BtorNode * e0,BtorNode * e1)2925 applies_resol2_and (Btor *btor, BtorNode *e0, BtorNode *e1)
2926 {
2927 BtorNode *real_e0, *real_e1;
2928 real_e0 = btor_node_real_addr (e0);
2929 real_e1 = btor_node_real_addr (e1);
2930 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_bv_and (real_e0)
2931 && btor_node_is_bv_and (real_e1) && btor_node_is_inverted (e0)
2932 && btor_node_is_inverted (e1)
2933 && ((real_e1->e[1] == real_e0->e[1]
2934 && real_e1->e[0] == btor_node_invert (real_e0->e[0]))
2935 || (real_e1->e[1] == real_e0->e[0]
2936 && real_e1->e[0] == btor_node_invert (real_e0->e[1])));
2937 }
2938
2939 static inline BtorNode *
apply_resol2_and(Btor * btor,BtorNode * e0,BtorNode * e1)2940 apply_resol2_and (Btor *btor, BtorNode *e0, BtorNode *e1)
2941 {
2942 assert (applies_resol2_and (btor, e0, e1));
2943 (void) e0;
2944 return btor_node_invert (
2945 btor_node_copy (btor, btor_node_real_addr (e1)->e[1]));
2946 }
2947
2948 /*
2949 * match: ~(a & b) & c, where a == ~c OR b == ~c
2950 * result: c
2951 *
2952 * first rule of subsumption
2953 */
2954 static inline bool
applies_subsum2_and(Btor * btor,BtorNode * e0,BtorNode * e1)2955 applies_subsum2_and (Btor *btor, BtorNode *e0, BtorNode *e1)
2956 {
2957 (void) btor;
2958 BtorNode *real_e0;
2959 real_e0 = btor_node_real_addr (e0);
2960 return btor_node_is_bv_and (real_e0) && btor_node_is_inverted (e0)
2961 && (real_e0->e[0] == btor_node_invert (e1)
2962 || real_e0->e[1] == btor_node_invert (e1));
2963 }
2964
2965 static inline BtorNode *
apply_subsum2_and(Btor * btor,BtorNode * e0,BtorNode * e1)2966 apply_subsum2_and (Btor *btor, BtorNode *e0, BtorNode *e1)
2967 {
2968 assert (applies_subsum2_and (btor, e0, e1));
2969 (void) e0;
2970 return btor_node_copy (btor, e1);
2971 }
2972
2973 /*
2974 * match: ~(a & b) & c, where b = c
2975 * result: ~a & c
2976 *
2977 * asymmetric rule of substitution
2978 */
2979 static inline bool
applies_subst3_and(Btor * btor,BtorNode * e0,BtorNode * e1)2980 applies_subst3_and (Btor *btor, BtorNode *e0, BtorNode *e1)
2981 {
2982 BtorNode *real_e0;
2983 real_e0 = btor_node_real_addr (e0);
2984 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_bv_and (real_e0)
2985 && btor_node_is_inverted (e0) && real_e0->e[1] == e1;
2986 }
2987
2988 static inline BtorNode *
apply_subst3_and(Btor * btor,BtorNode * e0,BtorNode * e1)2989 apply_subst3_and (Btor *btor, BtorNode *e0, BtorNode *e1)
2990 {
2991 assert (applies_subst3_and (btor, e0, e1));
2992
2993 BtorNode *result;
2994
2995 BTOR_INC_REC_RW_CALL (btor);
2996 result = rewrite_and_exp (
2997 btor, btor_node_invert (btor_node_real_addr (e0)->e[0]), e1);
2998 BTOR_DEC_REC_RW_CALL (btor);
2999 return result;
3000 }
3001
3002 /*
3003 * match: ~(a & b) & c, where a = c
3004 * result: ~b & c
3005 *
3006 * asymmetric rule of substitution
3007 */
3008 static inline bool
applies_subst4_and(Btor * btor,BtorNode * e0,BtorNode * e1)3009 applies_subst4_and (Btor *btor, BtorNode *e0, BtorNode *e1)
3010 {
3011 BtorNode *real_e0;
3012 real_e0 = btor_node_real_addr (e0);
3013 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_bv_and (real_e0)
3014 && btor_node_is_inverted (e0) && real_e0->e[0] == e1;
3015 }
3016
3017 static inline BtorNode *
apply_subst4_and(Btor * btor,BtorNode * e0,BtorNode * e1)3018 apply_subst4_and (Btor *btor, BtorNode *e0, BtorNode *e1)
3019 {
3020 assert (applies_subst4_and (btor, e0, e1));
3021
3022 BtorNode *result;
3023
3024 BTOR_INC_REC_RW_CALL (btor);
3025 result = rewrite_and_exp (
3026 btor, btor_node_invert (btor_node_real_addr (e0)->e[1]), e1);
3027 BTOR_DEC_REC_RW_CALL (btor);
3028 return result;
3029 }
3030
3031 /*
3032 * match: a & b & c, where a = ~c OR b = ~c
3033 * result: 0
3034 *
3035 * first rule of contradiction
3036 */
3037 static inline bool
applies_contr3_and(Btor * btor,BtorNode * e0,BtorNode * e1)3038 applies_contr3_and (Btor *btor, BtorNode *e0, BtorNode *e1)
3039 {
3040 (void) btor;
3041 return btor_node_is_bv_and (e0) && !btor_node_is_inverted (e0)
3042 && (e0->e[0] == btor_node_invert (e1)
3043 || e0->e[1] == btor_node_invert (e1));
3044 }
3045
3046 static inline BtorNode *
apply_contr3_and(Btor * btor,BtorNode * e0,BtorNode * e1)3047 apply_contr3_and (Btor *btor, BtorNode *e0, BtorNode *e1)
3048 {
3049 assert (applies_contr3_and (btor, e0, e1));
3050 (void) e1;
3051 return btor_exp_bv_zero (btor, btor_node_get_sort_id (e0));
3052 }
3053
3054 /*
3055 * match: a & b & c, where a = c OR b = c
3056 * result: a
3057 *
3058 * asymmetric rule of idempotency
3059 */
3060 static inline bool
applies_idem3_and(Btor * btor,BtorNode * e0,BtorNode * e1)3061 applies_idem3_and (Btor *btor, BtorNode *e0, BtorNode *e1)
3062 {
3063 (void) btor;
3064 return btor_node_is_bv_and (e0) && !btor_node_is_inverted (e0)
3065 && (e0->e[0] == e1 || e0->e[1] == e1);
3066 }
3067
3068 static inline BtorNode *
apply_idem3_and(Btor * btor,BtorNode * e0,BtorNode * e1)3069 apply_idem3_and (Btor *btor, BtorNode *e0, BtorNode *e1)
3070 {
3071 assert (applies_idem3_and (btor, e0, e1));
3072 (void) e1;
3073 return btor_node_copy (btor, e0);
3074 }
3075
3076 /*
3077 * match: a & b & c, where a and c are constants
3078 * result: d & b, where d is a new constant obtained from a & c
3079 */
3080 static inline bool
applies_const1_and(Btor * btor,BtorNode * e0,BtorNode * e1)3081 applies_const1_and (Btor *btor, BtorNode *e0, BtorNode *e1)
3082 {
3083 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_bv_and (e0)
3084 && !btor_node_is_inverted (e0) && btor_node_is_bv_const (e1)
3085 && btor_node_is_bv_const (e0->e[0]);
3086 }
3087
3088 static inline BtorNode *
apply_const1_and(Btor * btor,BtorNode * e0,BtorNode * e1)3089 apply_const1_and (Btor *btor, BtorNode *e0, BtorNode *e1)
3090 {
3091 assert (applies_const1_and (btor, e0, e1));
3092 assert (!btor_node_is_bv_const (e0->e[1]));
3093
3094 BtorNode *tmp, *result;
3095
3096 BTOR_INC_REC_RW_CALL (btor);
3097 tmp = rewrite_and_exp (btor, e1, e0->e[0]);
3098 result = rewrite_and_exp (btor, tmp, e0->e[1]);
3099 BTOR_DEC_REC_RW_CALL (btor);
3100 btor_node_release (btor, tmp);
3101 return result;
3102 }
3103
3104 /*
3105 * match: a & b & c, where b and c are constants
3106 * result: d & a, where d is a new constant obtained from b & c
3107 */
3108 static inline bool
applies_const2_and(Btor * btor,BtorNode * e0,BtorNode * e1)3109 applies_const2_and (Btor *btor, BtorNode *e0, BtorNode *e1)
3110 {
3111 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_bv_and (e0)
3112 && !btor_node_is_inverted (e0) && btor_node_is_bv_const (e1)
3113 && btor_node_is_bv_const (e0->e[1]);
3114 }
3115
3116 static inline BtorNode *
apply_const2_and(Btor * btor,BtorNode * e0,BtorNode * e1)3117 apply_const2_and (Btor *btor, BtorNode *e0, BtorNode *e1)
3118 {
3119 assert (applies_const2_and (btor, e0, e1));
3120 assert (!btor_node_is_bv_const (e0->e[0]));
3121
3122 BtorNode *tmp, *result;
3123
3124 BTOR_INC_REC_RW_CALL (btor);
3125 tmp = rewrite_and_exp (btor, e1, e0->e[1]);
3126 result = rewrite_and_exp (btor, tmp, e0->e[0]);
3127 BTOR_DEC_REC_RW_CALL (btor);
3128 btor_node_release (btor, tmp);
3129 return result;
3130 }
3131
3132 /*
3133 * match: (a < b) & (b < a)
3134 * result: false
3135 */
3136 static inline bool
applies_ult_false_and(Btor * btor,BtorNode * e0,BtorNode * e1)3137 applies_ult_false_and (Btor *btor, BtorNode *e0, BtorNode *e1)
3138 {
3139 (void) btor;
3140 return btor_node_is_bv_ult (e0) && btor_node_is_bv_ult (e1)
3141 && !btor_node_is_inverted (e0) && !btor_node_is_inverted (e1)
3142 && e0->e[0] == e1->e[1] && e0->e[1] == e1->e[0];
3143 }
3144
3145 static inline BtorNode *
apply_ult_false_and(Btor * btor,BtorNode * e0,BtorNode * e1)3146 apply_ult_false_and (Btor *btor, BtorNode *e0, BtorNode *e1)
3147 {
3148 assert (applies_ult_false_and (btor, e0, e1));
3149 (void) e0;
3150 (void) e1;
3151 return btor_exp_false (btor);
3152 }
3153
3154 /*
3155 * match: ~(a < b) & ~(b < a)
3156 * result: a = b
3157 */
3158 static inline bool
applies_ult_and(Btor * btor,BtorNode * e0,BtorNode * e1)3159 applies_ult_and (Btor *btor, BtorNode *e0, BtorNode *e1)
3160 {
3161 BtorNode *real_e0, *real_e1;
3162 real_e0 = btor_node_real_addr (e0);
3163 real_e1 = btor_node_real_addr (e1);
3164 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_bv_ult (real_e0)
3165 && btor_node_is_bv_ult (real_e1) && btor_node_is_inverted (e0)
3166 && btor_node_is_inverted (e1) && real_e0->e[0] == real_e1->e[1]
3167 && real_e0->e[1] == real_e1->e[0];
3168 }
3169
3170 static inline BtorNode *
apply_ult_and(Btor * btor,BtorNode * e0,BtorNode * e1)3171 apply_ult_and (Btor *btor, BtorNode *e0, BtorNode *e1)
3172 {
3173 assert (applies_ult_and (btor, e0, e1));
3174 (void) e1;
3175
3176 BtorNode *result;
3177
3178 BTOR_INC_REC_RW_CALL (btor);
3179 result = rewrite_eq_exp (
3180 btor, btor_node_real_addr (e0)->e[0], btor_node_real_addr (e0)->e[1]);
3181 BTOR_DEC_REC_RW_CALL (btor);
3182 return result;
3183 }
3184
3185 /*
3186 * recursively find contradicting ands
3187 */
3188 static inline bool
applies_contr_rec_and(Btor * btor,BtorNode * e0,BtorNode * e1)3189 applies_contr_rec_and (Btor *btor, BtorNode *e0, BtorNode *e1)
3190 {
3191 uint32_t calls = 0;
3192 return find_and_contradiction_exp (btor, e0, e0, e1, &calls)
3193 || find_and_contradiction_exp (btor, e1, e0, e1, &calls);
3194 }
3195
3196 static inline BtorNode *
apply_contr_rec_and(Btor * btor,BtorNode * e0,BtorNode * e1)3197 apply_contr_rec_and (Btor *btor, BtorNode *e0, BtorNode *e1)
3198 {
3199 assert (applies_contr_rec_and (btor, e0, e1));
3200 (void) e1;
3201 return btor_exp_bv_zero (btor, btor_node_get_sort_id (e0));
3202 }
3203
3204 /*
3205 * match: (0::a) & (b::0)
3206 * result: 0
3207 *
3208 * match: (0::a) & (b::1)
3209 * result: 0::a
3210 *
3211 * match: (1::a) & (b::1)
3212 * result: b::a
3213 */
3214 static inline bool
applies_concat_and(Btor * btor,BtorNode * e0,BtorNode * e1)3215 applies_concat_and (Btor *btor, BtorNode *e0, BtorNode *e1)
3216 {
3217 bool result;
3218 BtorNode *real_e0, *real_e1, *e00, *e01, *e10, *e11;
3219 real_e0 = btor_node_real_addr (e0);
3220 real_e1 = btor_node_real_addr (e1);
3221
3222 result = btor->rec_rw_calls < BTOR_REC_RW_BOUND
3223 && btor_node_is_bv_concat (real_e0)
3224 && btor_node_is_bv_concat (real_e1)
3225 && btor_node_get_sort_id (real_e0->e[0])
3226 == btor_node_get_sort_id (real_e1->e[0]);
3227
3228 if (!result) return result;
3229
3230 e00 = btor_node_cond_invert (e0, real_e0->e[0]);
3231 e01 = btor_node_cond_invert (e0, real_e0->e[1]);
3232 e10 = btor_node_cond_invert (e1, real_e1->e[0]);
3233 e11 = btor_node_cond_invert (e1, real_e1->e[1]);
3234 return ((is_bv_const_zero_or_ones_exp (btor, e00)
3235 && is_bv_const_zero_or_ones_exp (btor, e11))
3236 || (is_bv_const_zero_or_ones_exp (btor, e01)
3237 && is_bv_const_zero_or_ones_exp (btor, e10)));
3238 }
3239
3240 static inline BtorNode *
apply_concat_and(Btor * btor,BtorNode * e0,BtorNode * e1)3241 apply_concat_and (Btor *btor, BtorNode *e0, BtorNode *e1)
3242 {
3243 assert (applies_concat_and (btor, e0, e1));
3244
3245 BtorNode *real_e0, *real_e1, *e00, *e01, *e10, *e11, *left, *right, *result;
3246
3247 real_e0 = btor_node_real_addr (e0);
3248 real_e1 = btor_node_real_addr (e1);
3249 e00 = btor_node_cond_invert (e0, real_e0->e[0]);
3250 e01 = btor_node_cond_invert (e0, real_e0->e[1]);
3251 e10 = btor_node_cond_invert (e1, real_e1->e[0]);
3252 e11 = btor_node_cond_invert (e1, real_e1->e[1]);
3253
3254 BTOR_INC_REC_RW_CALL (btor);
3255 left = btor_exp_bv_and (btor, e00, e10);
3256 right = btor_exp_bv_and (btor, e01, e11);
3257 result = rewrite_concat_exp (btor, left, right);
3258 btor_node_release (btor, right);
3259 btor_node_release (btor, left);
3260 BTOR_DEC_REC_RW_CALL (btor);
3261
3262 return result;
3263 }
3264
3265 static inline bool
applies_push_ite_and(Btor * btor,BtorNode * e0,BtorNode * e1)3266 applies_push_ite_and (Btor *btor, BtorNode *e0, BtorNode *e1)
3267 {
3268 (void) e1;
3269 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
3270 && btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_cond (e0)
3271 && (btor_node_is_bv_const_zero (btor, btor_node_real_addr (e0)->e[1])
3272 || btor_node_is_bv_const_zero (btor,
3273 btor_node_real_addr (e0)->e[2]));
3274 }
3275
3276 static inline BtorNode *
apply_push_ite_and(Btor * btor,BtorNode * e0,BtorNode * e1)3277 apply_push_ite_and (Btor *btor, BtorNode *e0, BtorNode *e1)
3278 {
3279 assert (applies_push_ite_and (btor, e0, e1));
3280
3281 BtorNode *result, *and_left, *and_right, *real_e0;
3282
3283 real_e0 = btor_node_real_addr (e0);
3284 BTOR_INC_REC_RW_CALL (btor);
3285 and_left =
3286 rewrite_and_exp (btor, btor_node_cond_invert (e0, real_e0->e[1]), e1);
3287 and_right =
3288 rewrite_and_exp (btor, btor_node_cond_invert (e0, real_e0->e[2]), e1);
3289
3290 result = rewrite_cond_exp (btor, real_e0->e[0], and_left, and_right);
3291 BTOR_DEC_REC_RW_CALL (btor);
3292 btor_node_release (btor, and_left);
3293 btor_node_release (btor, and_right);
3294 return result;
3295 }
3296
3297 #if 0
3298 /*
3299 * match:
3300 * result:
3301 */
3302 static inline bool
3303 applies_and (Btor * btor, BtorNode * e0, BtorNode * e1)
3304 {
3305 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
3306 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
3307 && !btor_node_is_inverted (e0)
3308 && btor_node_is_bv_cond (e0);
3309 }
3310
3311 static inline BtorNode *
3312 apply_and (Btor * btor, BtorNode * e0, BtorNode * e1)
3313 {
3314 assert (applies_and (btor, e0, e1));
3315
3316 }
3317
3318 // TODO (ma): uses shallow substitute, which does not really work
3319 if (!btor_node_is_inverted (e0) &&
3320 e0->kind == BTOR_BV_EQ_NODE &&
3321 btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2 &&
3322 btor->rec_rw_calls < BTOR_REC_RW_BOUND)
3323 {
3324 BtorNode * e1_simp = condrewrite (btor, e1, e0);
3325 if (e1_simp != e1)
3326 {
3327 BTOR_INC_REC_RW_CALL (btor);
3328 result = rewrite_and_exp (btor, e0, e1_simp);
3329 BTOR_DEC_REC_RW_CALL (btor);
3330 }
3331 else
3332 result = 0;
3333 btor_node_release (btor, e1_simp);
3334 if (result)
3335 {
3336 assert (!normalized);
3337 return result;
3338 }
3339 }
3340
3341 if (!btor_node_is_inverted (e1) &&
3342 e1->kind == BTOR_BV_EQ_NODE &&
3343 btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2 &&
3344 btor->rec_rw_calls < BTOR_REC_RW_BOUND)
3345 {
3346 BtorNode * e0_simp = condrewrite (btor, e0, e1);
3347 if (e0_simp != e0)
3348 {
3349 BTOR_INC_REC_RW_CALL (btor);
3350 result = rewrite_and_exp (btor, e0_simp, e1);
3351 BTOR_DEC_REC_RW_CALL (btor);
3352 }
3353 else
3354 result = 0;
3355 btor_node_release (btor, e0_simp);
3356 if (result)
3357 {
3358 assert (!normalized);
3359 return result;
3360 }
3361 }
3362 #endif
3363
3364
3365 /* ADD rules */
3366 /* -------------------------------------------------------------------------- */
3367
3368 /*
3369 * match: a + b, where len(a) = 1
3370 * result: a XOR b
3371 */
3372 static inline bool
applies_bool_add(Btor * btor,BtorNode * e0,BtorNode * e1)3373 applies_bool_add (Btor *btor, BtorNode *e0, BtorNode *e1)
3374 {
3375 (void) e1;
3376 return btor->rec_rw_calls < BTOR_REC_RW_BOUND
3377 && btor_node_bv_get_width (btor, e0) == 1;
3378 }
3379
3380 static inline BtorNode *
apply_bool_add(Btor * btor,BtorNode * e0,BtorNode * e1)3381 apply_bool_add (Btor *btor, BtorNode *e0, BtorNode *e1)
3382 {
3383 assert (applies_bool_add (btor, e0, e1));
3384
3385 BtorNode *result;
3386
3387 BTOR_INC_REC_RW_CALL (btor);
3388 result = btor_exp_bv_xor (btor, e0, e1);
3389 BTOR_DEC_REC_RW_CALL (btor);
3390 return result;
3391 }
3392
3393 /*
3394 * match: a - b OR -a + b, where a = b
3395 * result: 0
3396 */
3397 static inline bool
applies_neg_add(Btor * btor,BtorNode * e0,BtorNode * e1)3398 applies_neg_add (Btor *btor, BtorNode *e0, BtorNode *e1)
3399 {
3400 (void) btor;
3401 return !btor_node_is_inverted (e1) && btor_node_is_bv_add (e1)
3402 && ((e0 == btor_node_invert (e1->e[0])
3403 && btor_node_is_bv_const_one (btor, e1->e[1]))
3404 || (e0 == btor_node_invert (e1->e[1])
3405 && btor_node_is_bv_const_one (btor, e1->e[0])));
3406 }
3407
3408 static inline BtorNode *
apply_neg_add(Btor * btor,BtorNode * e0,BtorNode * e1)3409 apply_neg_add (Btor *btor, BtorNode *e0, BtorNode *e1)
3410 {
3411 assert (applies_neg_add (btor, e0, e1));
3412 (void) e1;
3413 return btor_exp_bv_zero (btor, btor_node_get_sort_id (e0));
3414 }
3415
3416 /*
3417 * match: 0 + b
3418 * result: b
3419 */
3420 static inline bool
applies_zero_add(Btor * btor,BtorNode * e0,BtorNode * e1)3421 applies_zero_add (Btor *btor, BtorNode *e0, BtorNode *e1)
3422 {
3423 (void) e1;
3424 return is_const_zero_exp (btor, e0);
3425 }
3426
3427 static inline BtorNode *
apply_zero_add(Btor * btor,BtorNode * e0,BtorNode * e1)3428 apply_zero_add (Btor *btor, BtorNode *e0, BtorNode *e1)
3429 {
3430 assert (applies_zero_add (btor, e0, e1));
3431 (void) e0;
3432 return btor_node_copy (btor, e1);
3433 }
3434
3435 /*
3436 * match: c0 + (c1 + b), where c0 and c1 are constants
3437 * result: c + b, where c is a new constant from c0 + c1
3438 *
3439 * recursion is no problem here, as one call leads to
3440 * folding of constants, and the other call can not
3441 * trigger the same kind of recursion anymore.
3442 */
3443 static inline bool
applies_const_lhs_add(Btor * btor,BtorNode * e0,BtorNode * e1)3444 applies_const_lhs_add (Btor *btor, BtorNode *e0, BtorNode *e1)
3445 {
3446 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_bv_const (e0)
3447 && !btor_node_is_inverted (e1) && btor_node_is_bv_add (e1)
3448 && btor_node_is_bv_const (e1->e[0]);
3449 }
3450
3451 static inline BtorNode *
apply_const_lhs_add(Btor * btor,BtorNode * e0,BtorNode * e1)3452 apply_const_lhs_add (Btor *btor, BtorNode *e0, BtorNode *e1)
3453 {
3454 assert (applies_const_lhs_add (btor, e0, e1));
3455 assert (!btor_node_is_bv_const (e1->e[1]));
3456
3457 BtorNode *result, *tmp;
3458
3459 BTOR_INC_REC_RW_CALL (btor);
3460 tmp = rewrite_add_exp (btor, e0, e1->e[0]);
3461 result = rewrite_add_exp (btor, tmp, e1->e[1]);
3462 BTOR_DEC_REC_RW_CALL (btor);
3463 btor_node_release (btor, tmp);
3464 return result;
3465 }
3466
3467 /*
3468 * match: c0 + (b + c1), where c0 and c1 are constants
3469 * result: c + b, where c is a new constant from c0 + c1
3470 *
3471 * recursion is no problem here, as one call leads to
3472 * folding of constants, and the other call can not
3473 * trigger the same kind of recursion anymore.
3474 */
3475 static inline bool
applies_const_rhs_add(Btor * btor,BtorNode * e0,BtorNode * e1)3476 applies_const_rhs_add (Btor *btor, BtorNode *e0, BtorNode *e1)
3477 {
3478 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_bv_const (e0)
3479 && !btor_node_is_inverted (e1) && btor_node_is_bv_add (e1)
3480 && btor_node_is_bv_const (e1->e[1]);
3481 }
3482
3483 static inline BtorNode *
apply_const_rhs_add(Btor * btor,BtorNode * e0,BtorNode * e1)3484 apply_const_rhs_add (Btor *btor, BtorNode *e0, BtorNode *e1)
3485 {
3486 assert (applies_const_rhs_add (btor, e0, e1));
3487 assert (!btor_node_is_bv_const (e1->e[0]));
3488
3489 BtorNode *result, *tmp;
3490
3491 BTOR_INC_REC_RW_CALL (btor);
3492 tmp = rewrite_add_exp (btor, e0, e1->e[1]);
3493 result = rewrite_add_exp (btor, tmp, e1->e[0]);
3494 BTOR_DEC_REC_RW_CALL (btor);
3495 btor_node_release (btor, tmp);
3496 return result;
3497 }
3498
3499 #if 0
3500 // TODO: problematic as long we do not do 'addneg normalization'
3501 //
3502 // e0 + e1 == ~(e00 + e01) + e1
3503 // == (-(e00 + e01) -1) + e1
3504 // == - e00 - e01 - 1 + e1
3505 // == (~e00 + 1) + (~e01 + 1) - 1 + e1
3506 // == ((~e00 + ~e01) + 1) + e1
3507 //
3508 if (btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2 &&
3509 btor_node_is_inverted (e0) &&
3510 btor->rec_rw_calls < BTOR_REC_RW_BOUND &&
3511 (temp = btor_node_real_addr (e0))->kind == BTOR_BV_ADD_NODE)
3512 {
3513 BtorNode * e00 = temp->e[0];
3514 BtorNode * e01 = temp->e[1];
3515 BtorNode * one, * sum;
3516 BTOR_INC_REC_RW_CALL (btor);
3517 one = btor_exp_bv_one (btor, btor_node_get_sort_id (temp));
3518 temp = btor_exp_bv_add (btor,
3519 btor_node_invert (e00), btor_node_invert (e01));
3520 sum = btor_exp_bv_add (btor, temp, one);
3521 result = btor_exp_bv_add (btor, sum, e1);
3522 BTOR_DEC_REC_RW_CALL (btor);
3523 btor_node_release (btor, sum);
3524 btor_node_release (btor, temp);
3525 btor_node_release (btor, one);
3526 return result;
3527 }
3528
3529 // TODO: problematic as long we do not do 'addneg normalization'
3530 //
3531 // e0 + e1 == e0 + ~(e10 + e11)
3532 // == e0 + (-(e10 + e11) -1)
3533 // == e0 - e10 - e11 - 1
3534 // == e0 + (~e10 + 1) + (~e11 + 1) - 1
3535 // == e0 + ((~e10 + ~e11) + 1)
3536 //
3537 if (btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2 &&
3538 btor_node_is_inverted (e1) &&
3539 btor->rec_rw_calls < BTOR_REC_RW_BOUND &&
3540 (temp = btor_node_real_addr (e1))->kind == BTOR_BV_ADD_NODE)
3541 {
3542 BtorNode * e10 = temp->e[0];
3543 BtorNode * e11 = temp->e[1];
3544 BtorNode * one, * sum;
3545 BTOR_INC_REC_RW_CALL (btor);
3546 one = btor_exp_bv_one (btor, btor_node_get_sort_id (temp));
3547 temp = btor_exp_bv_add (btor,
3548 btor_node_invert (e10), btor_node_invert (e11));
3549 sum = btor_exp_bv_add (btor, temp, one);
3550 result = btor_exp_bv_add (btor, e0, sum);
3551 BTOR_DEC_REC_RW_CALL (btor);
3552 btor_node_release (btor, sum);
3553 btor_node_release (btor, temp);
3554 btor_node_release (btor, one);
3555 return result;
3556 }
3557 #endif
3558
3559 /*
3560 * match: ~(c * a) + b
3561 * result: ((-c) * a - 1) + b
3562 */
3563 static inline bool
applies_const_neg_lhs_add(Btor * btor,BtorNode * e0,BtorNode * e1)3564 applies_const_neg_lhs_add (Btor *btor, BtorNode *e0, BtorNode *e1)
3565 {
3566 (void) e1;
3567 BtorNode *real_e0;
3568 real_e0 = btor_node_real_addr (e0);
3569 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
3570 && btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_inverted (e0)
3571 && btor_node_is_bv_mul (real_e0)
3572 && btor_node_is_bv_const (real_e0->e[0]);
3573 }
3574
3575 static inline BtorNode *
apply_const_neg_lhs_add(Btor * btor,BtorNode * e0,BtorNode * e1)3576 apply_const_neg_lhs_add (Btor *btor, BtorNode *e0, BtorNode *e1)
3577 {
3578 assert (applies_const_neg_lhs_add (btor, e0, e1));
3579
3580 BtorNode *result, *real_e0, *e00, *e01, *n00, *n01, *one, *sum, *tmp;
3581
3582 real_e0 = btor_node_real_addr (e0);
3583 e00 = real_e0->e[0];
3584 e01 = real_e0->e[1];
3585
3586 BTOR_INC_REC_RW_CALL (btor);
3587 n00 = btor_exp_bv_neg (btor, e00);
3588 n01 = btor_node_copy (btor, e01);
3589 one = btor_exp_bv_one (btor, btor_node_get_sort_id (real_e0));
3590 tmp = rewrite_mul_exp (btor, n00, n01);
3591 sum = btor_exp_bv_sub (btor, tmp, one);
3592 result = rewrite_add_exp (btor, sum, e1);
3593 btor_node_release (btor, sum);
3594 btor_node_release (btor, tmp);
3595 btor_node_release (btor, one);
3596 btor_node_release (btor, n00);
3597 btor_node_release (btor, n01);
3598 BTOR_DEC_REC_RW_CALL (btor);
3599 return result;
3600 }
3601
3602 /*
3603 * match: ~(a * c) + b
3604 * result: (a * (-c) - 1) + b
3605 */
3606 static inline bool
applies_const_neg_rhs_add(Btor * btor,BtorNode * e0,BtorNode * e1)3607 applies_const_neg_rhs_add (Btor *btor, BtorNode *e0, BtorNode *e1)
3608 {
3609 (void) e1;
3610 BtorNode *real_e0;
3611 real_e0 = btor_node_real_addr (e0);
3612 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
3613 && btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_inverted (e0)
3614 && btor_node_is_bv_mul (real_e0)
3615 && btor_node_is_bv_const (real_e0->e[1]);
3616 }
3617
3618 static inline BtorNode *
apply_const_neg_rhs_add(Btor * btor,BtorNode * e0,BtorNode * e1)3619 apply_const_neg_rhs_add (Btor *btor, BtorNode *e0, BtorNode *e1)
3620 {
3621 assert (applies_const_neg_rhs_add (btor, e0, e1));
3622
3623 BtorNode *result, *real_e0, *e00, *e01, *n00, *n01, *one, *sum, *tmp;
3624
3625 real_e0 = btor_node_real_addr (e0);
3626 e00 = real_e0->e[0];
3627 e01 = real_e0->e[1];
3628
3629 BTOR_INC_REC_RW_CALL (btor);
3630 n00 = btor_node_copy (btor, e00);
3631 n01 = btor_exp_bv_neg (btor, e01);
3632 one = btor_exp_bv_one (btor, btor_node_get_sort_id (real_e0));
3633 tmp = rewrite_mul_exp (btor, n00, n01);
3634 sum = btor_exp_bv_sub (btor, tmp, one);
3635 result = rewrite_add_exp (btor, sum, e1);
3636 btor_node_release (btor, sum);
3637 btor_node_release (btor, tmp);
3638 btor_node_release (btor, one);
3639 btor_node_release (btor, n00);
3640 btor_node_release (btor, n01);
3641 BTOR_DEC_REC_RW_CALL (btor);
3642 return result;
3643 }
3644
3645 /*
3646 * match: (a + (b << a))
3647 * result: (a | (b << a))
3648 */
3649 static inline bool
applies_sll_add(Btor * btor,BtorNode * e0,BtorNode * e1)3650 applies_sll_add (Btor *btor, BtorNode *e0, BtorNode *e1)
3651 {
3652 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
3653 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
3654 && !btor_node_is_inverted (e1) && btor_node_is_bv_sll (e1)
3655 && btor_node_is_bv_slice (e1->e[1])
3656 && !btor_node_is_inverted (e1->e[1]) && e0 == e1->e[1]->e[0];
3657 }
3658
3659 static inline BtorNode *
apply_sll_add(Btor * btor,BtorNode * e0,BtorNode * e1)3660 apply_sll_add (Btor *btor, BtorNode *e0, BtorNode *e1)
3661 {
3662 assert (applies_sll_add (btor, e0, e1));
3663
3664 BtorNode *result;
3665 BTOR_INC_REC_RW_CALL (btor);
3666 result = btor_node_invert (
3667 rewrite_and_exp (btor, btor_node_invert (e0), btor_node_invert (e1)));
3668 BTOR_DEC_REC_RW_CALL (btor);
3669 return result;
3670 }
3671
3672 static inline bool
applies_push_ite_add(Btor * btor,BtorNode * e0,BtorNode * e1)3673 applies_push_ite_add (Btor *btor, BtorNode *e0, BtorNode *e1)
3674 {
3675 (void) e1;
3676 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
3677 && btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_cond (e0)
3678 && !btor_node_is_inverted (e0)
3679 && (btor_node_is_bv_const_zero (btor, e0->e[1])
3680 || btor_node_is_bv_const_zero (btor, e0->e[2]));
3681 }
3682
3683 static inline BtorNode *
apply_push_ite_add(Btor * btor,BtorNode * e0,BtorNode * e1)3684 apply_push_ite_add (Btor *btor, BtorNode *e0, BtorNode *e1)
3685 {
3686 assert (applies_push_ite_add (btor, e0, e1));
3687
3688 BtorNode *result, *add_left, *add_right;
3689
3690 BTOR_INC_REC_RW_CALL (btor);
3691 add_left = rewrite_add_exp (btor, e0->e[1], e1);
3692 add_right = rewrite_add_exp (btor, e0->e[2], e1);
3693
3694 assert (add_left == e1 || add_right == e1);
3695
3696 result = rewrite_cond_exp (btor, e0->e[0], add_left, add_right);
3697 BTOR_DEC_REC_RW_CALL (btor);
3698 btor_node_release (btor, add_left);
3699 btor_node_release (btor, add_right);
3700 return result;
3701 }
3702
3703 /*
3704 * match: a + a
3705 * result: 2 * a
3706 */
3707 static inline bool
applies_mult_add(Btor * btor,BtorNode * e0,BtorNode * e1)3708 applies_mult_add (Btor *btor, BtorNode *e0, BtorNode *e1)
3709 {
3710 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && e0 == e1
3711 && btor_node_bv_get_width (btor, e0) >= 2;
3712 }
3713
3714 static inline BtorNode *
apply_mult_add(Btor * btor,BtorNode * e0,BtorNode * e1)3715 apply_mult_add (Btor *btor, BtorNode *e0, BtorNode *e1)
3716 {
3717 assert (applies_mult_add (btor, e0, e1));
3718 (void) e1;
3719
3720 BtorNode *result, *tmp;
3721
3722 BTOR_INC_REC_RW_CALL (btor);
3723 tmp = btor_exp_bv_int (btor, 2, btor_node_get_sort_id (e0));
3724 result = rewrite_mul_exp (btor, e0, tmp);
3725 btor_node_release (btor, tmp);
3726 BTOR_DEC_REC_RW_CALL (btor);
3727 return result;
3728 }
3729
3730 /*
3731 * match: a + ~a
3732 * result: -1
3733 */
3734 static inline bool
applies_not_add(Btor * btor,BtorNode * e0,BtorNode * e1)3735 applies_not_add (Btor *btor, BtorNode *e0, BtorNode *e1)
3736 {
3737 (void) btor;
3738 return btor_node_real_addr (e0) == btor_node_real_addr (e1) && e0 != e1;
3739 }
3740
3741 static inline BtorNode *
apply_not_add(Btor * btor,BtorNode * e0,BtorNode * e1)3742 apply_not_add (Btor *btor, BtorNode *e0, BtorNode *e1)
3743 {
3744 assert (applies_not_add (btor, e0, e1));
3745 (void) e1;
3746 return btor_exp_bv_ones (btor, btor_node_get_sort_id (e0));
3747 }
3748
3749 // TODO (ma): conditional rewriting: check if a and c or b and d are constants
3750 /*
3751 * match: (x ? a : b) + (x : c : d), where either a = c or b = d
3752 * result: x ? a + c : b + d
3753 */
3754 static inline bool
applies_bcond_add(Btor * btor,BtorNode * e0,BtorNode * e1)3755 applies_bcond_add (Btor *btor, BtorNode *e0, BtorNode *e1)
3756 {
3757 BtorNode *real_e0, *real_e1;
3758 real_e0 = btor_node_real_addr (e0);
3759 real_e1 = btor_node_real_addr (e1);
3760 return btor->rec_rw_calls < BTOR_REC_RW_BOUND
3761 && btor_node_is_bv_cond (real_e0) && btor_node_is_bv_cond (real_e1)
3762 && btor_node_is_inverted (e0)
3763 == btor_node_is_inverted (e1) // TODO: needed?
3764 && real_e0->e[0] == real_e1->e[0]
3765 && (real_e0->e[1] == real_e1->e[1] || real_e0->e[2] == real_e1->e[2]);
3766 }
3767
3768 static inline BtorNode *
apply_bcond_add(Btor * btor,BtorNode * e0,BtorNode * e1)3769 apply_bcond_add (Btor *btor, BtorNode *e0, BtorNode *e1)
3770 {
3771 assert (applies_bcond_add (btor, e0, e1));
3772
3773 BtorNode *result, *left, *right, *real_e0, *real_e1;
3774
3775 real_e0 = btor_node_real_addr (e0);
3776 real_e1 = btor_node_real_addr (e1);
3777 BTOR_INC_REC_RW_CALL (btor);
3778 left = rewrite_add_exp (btor,
3779 btor_node_cond_invert (e0, real_e0->e[1]),
3780 btor_node_cond_invert (e1, real_e1->e[1]));
3781 right = rewrite_add_exp (btor,
3782 btor_node_cond_invert (e0, real_e0->e[2]),
3783 btor_node_cond_invert (e1, real_e1->e[2]));
3784 result = rewrite_cond_exp (btor, real_e0->e[0], left, right);
3785 BTOR_DEC_REC_RW_CALL (btor);
3786 btor_node_release (btor, left);
3787 btor_node_release (btor, right);
3788 return result;
3789 }
3790
3791 static inline bool
applies_urem_add(Btor * btor,BtorNode * e0,BtorNode * e1)3792 applies_urem_add (Btor *btor, BtorNode *e0, BtorNode *e1)
3793 {
3794 return is_urem_exp (btor, e0, e1, 0, 0);
3795 }
3796
3797 static inline BtorNode *
apply_urem_add(Btor * btor,BtorNode * e0,BtorNode * e1)3798 apply_urem_add (Btor *btor, BtorNode *e0, BtorNode *e1)
3799 {
3800 assert (applies_urem_add (btor, e0, e1));
3801
3802 BtorNode *x, *y;
3803 is_urem_exp (btor, e0, e1, &x, &y);
3804 return rewrite_urem_exp (btor, x, y);
3805 }
3806
3807
3808 /* MUL rules */
3809 /* -------------------------------------------------------------------------- */
3810
3811 /*
3812 * match: a * b, wher len(a) = 1
3813 * result: a & b
3814 */
3815 static inline bool
applies_bool_mul(Btor * btor,BtorNode * e0,BtorNode * e1)3816 applies_bool_mul (Btor *btor, BtorNode *e0, BtorNode *e1)
3817 {
3818 (void) e1;
3819 return btor->rec_rw_calls < BTOR_REC_RW_BOUND
3820 && btor_node_bv_get_width (btor, e0) == 1;
3821 }
3822
3823 static inline BtorNode *
apply_bool_mul(Btor * btor,BtorNode * e0,BtorNode * e1)3824 apply_bool_mul (Btor *btor, BtorNode *e0, BtorNode *e1)
3825 {
3826 assert (applies_bool_mul (btor, e0, e1));
3827
3828 BtorNode *result;
3829 BTOR_INC_REC_RW_CALL (btor);
3830 result = rewrite_and_exp (btor, e0, e1);
3831 BTOR_DEC_REC_RW_CALL (btor);
3832 return result;
3833 }
3834
3835 /*
3836 * match: c0 * (c1 * b), where c0 and c1 are constants
3837 * result: c * b, where c is a new constant from c0 * c1
3838 */
3839 static inline bool
applies_const_lhs_mul(Btor * btor,BtorNode * e0,BtorNode * e1)3840 applies_const_lhs_mul (Btor *btor, BtorNode *e0, BtorNode *e1)
3841 {
3842 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_bv_const (e0)
3843 && !btor_node_is_inverted (e1) && btor_node_is_bv_mul (e1)
3844 && btor_node_is_bv_const (e1->e[0]);
3845 }
3846
3847 static inline BtorNode *
apply_const_lhs_mul(Btor * btor,BtorNode * e0,BtorNode * e1)3848 apply_const_lhs_mul (Btor *btor, BtorNode *e0, BtorNode *e1)
3849 {
3850 assert (applies_const_lhs_mul (btor, e0, e1));
3851
3852 BtorNode *result, *tmp;
3853
3854 BTOR_INC_REC_RW_CALL (btor);
3855 tmp = rewrite_mul_exp (btor, e0, e1->e[0]);
3856 result = rewrite_mul_exp (btor, tmp, e1->e[1]);
3857 BTOR_DEC_REC_RW_CALL (btor);
3858 btor_node_release (btor, tmp);
3859 return result;
3860 }
3861
3862 /*
3863 * match: c0 * (b * c1), where c0 and c1 are constants
3864 * result: c * b, where c is a new constant from c0 * c1
3865 */
3866 static inline bool
applies_const_rhs_mul(Btor * btor,BtorNode * e0,BtorNode * e1)3867 applies_const_rhs_mul (Btor *btor, BtorNode *e0, BtorNode *e1)
3868 {
3869 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_bv_const (e0)
3870 && !btor_node_is_inverted (e1) && btor_node_is_bv_mul (e1)
3871 && btor_node_is_bv_const (e1->e[1]);
3872 }
3873
3874 static inline BtorNode *
apply_const_rhs_mul(Btor * btor,BtorNode * e0,BtorNode * e1)3875 apply_const_rhs_mul (Btor *btor, BtorNode *e0, BtorNode *e1)
3876 {
3877 assert (applies_const_rhs_mul (btor, e0, e1));
3878
3879 BtorNode *result, *tmp;
3880
3881 BTOR_INC_REC_RW_CALL (btor);
3882 tmp = rewrite_mul_exp (btor, e0, e1->e[1]);
3883 result = rewrite_mul_exp (btor, tmp, e1->e[0]);
3884 BTOR_DEC_REC_RW_CALL (btor);
3885 btor_node_release (btor, tmp);
3886 return result;
3887 }
3888
3889 /*
3890 * match: c0 * (a + c1)
3891 * result: c0 * a + c, where c is a new constant from c0 * c1
3892 */
3893 static inline bool
applies_const_mul(Btor * btor,BtorNode * e0,BtorNode * e1)3894 applies_const_mul (Btor *btor, BtorNode *e0, BtorNode *e1)
3895 {
3896 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
3897 && btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_bv_const (e0)
3898 && !btor_node_is_inverted (e1) && btor_node_is_bv_add (e1)
3899 && (btor_node_is_bv_const (e1->e[0])
3900 || btor_node_is_bv_const (e1->e[1]));
3901 }
3902
3903 static inline BtorNode *
apply_const_mul(Btor * btor,BtorNode * e0,BtorNode * e1)3904 apply_const_mul (Btor *btor, BtorNode *e0, BtorNode *e1)
3905 {
3906 assert (applies_const_mul (btor, e0, e1));
3907
3908 BtorNode *result, *left, *right;
3909
3910 BTOR_INC_REC_RW_CALL (btor);
3911 left = rewrite_mul_exp (btor, e0, e1->e[0]);
3912 right = rewrite_mul_exp (btor, e0, e1->e[1]);
3913 result = rewrite_add_exp (btor, left, right);
3914 BTOR_DEC_REC_RW_CALL (btor);
3915 btor_node_release (btor, left);
3916 btor_node_release (btor, right);
3917 return result;
3918 }
3919
3920 /*
3921 *
3922 *
3923 */
3924 static inline bool
applies_push_ite_mul(Btor * btor,BtorNode * e0,BtorNode * e1)3925 applies_push_ite_mul (Btor *btor, BtorNode *e0, BtorNode *e1)
3926 {
3927 (void) e1;
3928 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
3929 && btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_cond (e0)
3930 && !btor_node_is_inverted (e0)
3931 && (btor_node_is_bv_const_zero (btor, e0->e[1])
3932 || btor_node_is_bv_const_zero (btor, e0->e[2]));
3933 }
3934
3935 static inline BtorNode *
apply_push_ite_mul(Btor * btor,BtorNode * e0,BtorNode * e1)3936 apply_push_ite_mul (Btor *btor, BtorNode *e0, BtorNode *e1)
3937 {
3938 assert (applies_push_ite_mul (btor, e0, e1));
3939
3940 BtorNode *result, *mul_left, *mul_right;
3941
3942 BTOR_INC_REC_RW_CALL (btor);
3943 mul_left = rewrite_mul_exp (btor, e0->e[1], e1);
3944 mul_right = rewrite_mul_exp (btor, e0->e[2], e1);
3945
3946 assert (btor_node_is_bv_const_zero (btor, mul_left)
3947 || btor_node_is_bv_const_zero (btor, mul_right));
3948
3949 result = rewrite_cond_exp (btor, e0->e[0], mul_left, mul_right);
3950 BTOR_DEC_REC_RW_CALL (btor);
3951 btor_node_release (btor, mul_left);
3952 btor_node_release (btor, mul_right);
3953 return result;
3954 }
3955
3956 static inline bool
applies_sll_mul(Btor * btor,BtorNode * e0,BtorNode * e1)3957 applies_sll_mul (Btor *btor, BtorNode *e0, BtorNode *e1)
3958 {
3959 (void) e1;
3960 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
3961 && btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_bv_sll (e0)
3962 && !btor_node_is_inverted (e0);
3963 }
3964
3965 static inline BtorNode *
apply_sll_mul(Btor * btor,BtorNode * e0,BtorNode * e1)3966 apply_sll_mul (Btor *btor, BtorNode *e0, BtorNode *e1)
3967 {
3968 assert (applies_sll_mul (btor, e0, e1));
3969
3970 BtorNode *result, *mul;
3971
3972 BTOR_INC_REC_RW_CALL (btor);
3973 mul = rewrite_mul_exp (btor, e0->e[0], e1);
3974 result = rewrite_sll_exp (btor, mul, e0->e[1]);
3975 BTOR_DEC_REC_RW_CALL (btor);
3976 btor_node_release (btor, mul);
3977 return result;
3978 }
3979
3980 static inline bool
applies_neg_mul(Btor * btor,BtorNode * e0,BtorNode * e1)3981 applies_neg_mul (Btor *btor, BtorNode *e0, BtorNode *e1)
3982 {
3983 (void) e1;
3984 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
3985 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
3986 && btor_node_bv_is_neg (btor, e0, 0)
3987 && btor_node_bv_is_neg (btor, e1, 0);
3988 }
3989
3990 static inline BtorNode *
apply_neg_mul(Btor * btor,BtorNode * e0,BtorNode * e1)3991 apply_neg_mul (Btor *btor, BtorNode *e0, BtorNode *e1)
3992 {
3993 assert (applies_neg_mul (btor, e0, e1));
3994
3995 BtorNode *result, *a, *b;
3996
3997 btor_node_bv_is_neg (btor, e0, &a);
3998 btor_node_bv_is_neg (btor, e1, &b);
3999
4000 BTOR_INC_REC_RW_CALL (btor);
4001 result = rewrite_mul_exp (btor, a, b);
4002 BTOR_DEC_REC_RW_CALL (btor);
4003 return result;
4004 }
4005
4006 #if 0
4007 // TODO: why should we disable this?
4008 //
4009 if (btor->rec_rw_calls < BTOR_REC_RW_BOUND)
4010 {
4011 if (is_const_ones_exp (btor, e0))
4012 result = e1;
4013 else
4014 if (is_const_ones_exp (btor, e1))
4015 result = e0;
4016 else
4017 result = 0;
4018
4019 if (result)
4020 {
4021 BtorNode * tmp, * one = btor_exp_bv_one (btor, btor_node_get_sort_id (result));
4022 BTOR_INC_REC_RW_CALL (btor);
4023 tmp = btor_exp_bv_add (btor, btor_node_invert (result), one);
4024 BTOR_DEC_REC_RW_CALL (btor);
4025 btor_node_release (btor, one);
4026 result = tmp;
4027 goto HAVE_RESULT_BUT_MIGHT_NEED_TO_RELEASE_SOMETHING;
4028 }
4029 }
4030 #endif
4031
4032 #if 0
4033 // TODO (ma): conditional rewriting: check if a and c or b and d are constants
4034 /* match: (x ? a : b) * (x : c : d), where either a = c or b = d
4035 * result: x ? a * c : b * d
4036 */
4037 static inline bool
4038 applies_bcond_mul (Btor * btor, BtorNode * e0, BtorNode * e1)
4039 {
4040 BtorNode *real_e0, *real_e1;
4041 real_e0 = btor_node_real_addr (e0);
4042 real_e1 = btor_node_real_addr (e1);
4043 return btor->rec_rw_calls < BTOR_REC_RW_BOUND
4044 && btor_node_is_bv_cond (real_e0)
4045 && btor_node_is_bv_cond (real_e1)
4046 && btor_node_is_inverted (e0) == btor_node_is_inverted (e1) // TODO: needed?
4047 && real_e0->e[0] == real_e1->e[0]
4048 && (real_e0->e[1] == real_e1->e[1]
4049 || real_e0->e[2] == real_e1->e[2]);
4050 }
4051
4052 static inline BtorNode *
4053 apply_bcond_mul (Btor * btor, BtorNode * e0, BtorNode * e1)
4054 {
4055 assert (applies_bcond_mul (btor, e0, e1));
4056
4057 BtorNode *result, *left, *right, *real_e0, *real_e1;
4058
4059 real_e0 = btor_node_real_addr (e0);
4060 real_e1 = btor_node_real_addr (e1);
4061 BTOR_INC_REC_RW_CALL (btor);
4062 left = rewrite_mul_exp (btor,
4063 btor_node_cond_invert (e0, real_e0->e[1]),
4064 btor_node_cond_invert (e1, real_e1->e[1]));
4065 right = rewrite_mul_exp (btor,
4066 btor_node_cond_invert (e0, real_e0->e[2]),
4067 btor_node_cond_invert (e1, real_e1->e[2]));
4068 result = rewrite_cond_exp (btor, real_e0->e[0], left, right);
4069 BTOR_DEC_REC_RW_CALL (btor);
4070 btor_node_release (btor, left);
4071 btor_node_release (btor, right);
4072 return result;
4073 }
4074 #endif
4075
4076
4077 /* UDIV rules */
4078 /* -------------------------------------------------------------------------- */
4079
4080 /*
4081 * match: a / b, where len(a) = 1
4082 * result: ~(~a & b)
4083 */
4084 static inline bool
applies_bool_udiv(Btor * btor,BtorNode * e0,BtorNode * e1)4085 applies_bool_udiv (Btor *btor, BtorNode *e0, BtorNode *e1)
4086 {
4087 (void) e1;
4088 return btor->rec_rw_calls < BTOR_REC_RW_BOUND
4089 && btor_node_bv_get_width (btor, e0) == 1;
4090 }
4091
4092 static inline BtorNode *
apply_bool_udiv(Btor * btor,BtorNode * e0,BtorNode * e1)4093 apply_bool_udiv (Btor *btor, BtorNode *e0, BtorNode *e1)
4094 {
4095 assert (applies_bool_udiv (btor, e0, e1));
4096
4097 BtorNode *result;
4098
4099 BTOR_INC_REC_RW_CALL (btor);
4100 result = btor_node_invert (rewrite_and_exp (btor, btor_node_invert (e0), e1));
4101 BTOR_DEC_REC_RW_CALL (btor);
4102 return result;
4103 }
4104
4105 /*
4106 * match: a / 2^n
4107 * result: 0 :: a[len(a)-1:n]
4108 */
4109 static inline bool
applies_power2_udiv(Btor * btor,BtorNode * e0,BtorNode * e1)4110 applies_power2_udiv (Btor *btor, BtorNode *e0, BtorNode *e1)
4111 {
4112 (void) e0;
4113 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && !btor_node_is_inverted (e1)
4114 && btor_node_is_bv_const (e1)
4115 && btor_bv_power_of_two (btor_node_bv_const_get_bits (e1)) > 0;
4116 }
4117
4118 static inline BtorNode *
apply_power2_udiv(Btor * btor,BtorNode * e0,BtorNode * e1)4119 apply_power2_udiv (Btor *btor, BtorNode *e0, BtorNode *e1)
4120 {
4121 assert (applies_power2_udiv (btor, e0, e1));
4122
4123 uint32_t l, n;
4124 BtorNode *slice, *pad, *result;
4125 BtorSortId sort;
4126
4127 n = btor_bv_power_of_two (btor_node_bv_const_get_bits (e1));
4128 l = btor_node_bv_get_width (btor, e0);
4129 assert (l > n);
4130
4131 BTOR_INC_REC_RW_CALL (btor);
4132 slice = rewrite_slice_exp (btor, e0, l - 1, n);
4133 sort = btor_sort_bv (btor, n);
4134 pad = btor_exp_bv_zero (btor, sort);
4135 btor_sort_release (btor, sort);
4136 result = rewrite_concat_exp (btor, pad, slice);
4137 BTOR_DEC_REC_RW_CALL (btor);
4138 assert (btor_node_bv_get_width (btor, result) == l);
4139 btor_node_release (btor, pad);
4140 btor_node_release (btor, slice);
4141 return result;
4142 }
4143
4144 /*
4145 * match: a / a
4146 * result: 1, if a != 0 and UINT32_MAX otherwise
4147 */
4148 static inline bool
applies_one_udiv(Btor * btor,BtorNode * e0,BtorNode * e1)4149 applies_one_udiv (Btor *btor, BtorNode *e0, BtorNode *e1)
4150 {
4151 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && e0 == e1;
4152 }
4153
4154 static inline BtorNode *
apply_one_udiv(Btor * btor,BtorNode * e0,BtorNode * e1)4155 apply_one_udiv (Btor *btor, BtorNode *e0, BtorNode *e1)
4156 {
4157 assert (applies_one_udiv (btor, e0, e1));
4158 (void) e1;
4159
4160 BtorNode *result, *tmp1, *tmp2, *tmp3, *eq, *real_e0;
4161
4162 real_e0 = btor_node_real_addr (e0);
4163 BTOR_INC_REC_RW_CALL (btor);
4164 tmp1 = btor_exp_bv_zero (btor, btor_node_get_sort_id (real_e0));
4165 tmp2 = btor_exp_bv_one (btor, btor_node_get_sort_id (real_e0));
4166 tmp3 = btor_exp_bv_ones (btor, btor_node_get_sort_id (real_e0));
4167 eq = rewrite_eq_exp (btor, e0, tmp1);
4168 result = rewrite_cond_exp (btor, eq, tmp3, tmp2);
4169 BTOR_DEC_REC_RW_CALL (btor);
4170 btor_node_release (btor, eq);
4171 btor_node_release (btor, tmp1);
4172 btor_node_release (btor, tmp2);
4173 btor_node_release (btor, tmp3);
4174 return result;
4175 }
4176
4177 // TODO (ma): conditional rewriting: check if a and c or b and d are constants
4178 /*
4179 * match: (x ? a : b) / (x : c : d), where either a = c or b = d
4180 * result: x ? a / c : b / d
4181 */
4182 static inline bool
applies_bcond_udiv(Btor * btor,BtorNode * e0,BtorNode * e1)4183 applies_bcond_udiv (Btor *btor, BtorNode *e0, BtorNode *e1)
4184 {
4185 BtorNode *real_e0, *real_e1;
4186 real_e0 = btor_node_real_addr (e0);
4187 real_e1 = btor_node_real_addr (e1);
4188 return btor->rec_rw_calls < BTOR_REC_RW_BOUND
4189 && btor_node_is_bv_cond (real_e0) && btor_node_is_bv_cond (real_e1)
4190 && btor_node_is_inverted (e0)
4191 == btor_node_is_inverted (e1) // TODO: needed?
4192 && real_e0->e[0] == real_e1->e[0]
4193 && (real_e0->e[1] == real_e1->e[1] || real_e0->e[2] == real_e1->e[2]);
4194 }
4195
4196 static inline BtorNode *
apply_bcond_udiv(Btor * btor,BtorNode * e0,BtorNode * e1)4197 apply_bcond_udiv (Btor *btor, BtorNode *e0, BtorNode *e1)
4198 {
4199 assert (applies_bcond_udiv (btor, e0, e1));
4200
4201 BtorNode *result, *left, *right, *real_e0, *real_e1;
4202
4203 real_e0 = btor_node_real_addr (e0);
4204 real_e1 = btor_node_real_addr (e1);
4205 BTOR_INC_REC_RW_CALL (btor);
4206 left = rewrite_udiv_exp (btor,
4207 btor_node_cond_invert (e0, real_e0->e[1]),
4208 btor_node_cond_invert (e1, real_e1->e[1]));
4209 right = rewrite_udiv_exp (btor,
4210 btor_node_cond_invert (e0, real_e0->e[2]),
4211 btor_node_cond_invert (e1, real_e1->e[2]));
4212 result = rewrite_cond_exp (btor, real_e0->e[0], left, right);
4213 BTOR_DEC_REC_RW_CALL (btor);
4214 btor_node_release (btor, left);
4215 btor_node_release (btor, right);
4216 return result;
4217 }
4218
4219 /* UREM rules */
4220 /* -------------------------------------------------------------------------- */
4221
4222 /*
4223 * match: a % b, where len(a) = 1
4224 * result: a & ~b
4225 */
4226 static inline bool
applies_bool_urem(Btor * btor,BtorNode * e0,BtorNode * e1)4227 applies_bool_urem (Btor *btor, BtorNode *e0, BtorNode *e1)
4228 {
4229 (void) e1;
4230 return btor->rec_rw_calls < BTOR_REC_RW_BOUND
4231 && btor_node_bv_get_width (btor, e0) == 1;
4232 }
4233
4234 static inline BtorNode *
apply_bool_urem(Btor * btor,BtorNode * e0,BtorNode * e1)4235 apply_bool_urem (Btor *btor, BtorNode *e0, BtorNode *e1)
4236 {
4237 assert (applies_bool_urem (btor, e0, e1));
4238
4239 BtorNode *result;
4240
4241 BTOR_INC_REC_RW_CALL (btor);
4242 result = rewrite_and_exp (btor, e0, btor_node_invert (e1));
4243 BTOR_DEC_REC_RW_CALL (btor);
4244 return result;
4245 }
4246
4247 /*
4248 * match: a % a
4249 * result: 0
4250 */
4251 static inline bool
applies_zero_urem(Btor * btor,BtorNode * e0,BtorNode * e1)4252 applies_zero_urem (Btor *btor, BtorNode *e0, BtorNode *e1)
4253 {
4254 (void) btor;
4255 return e0 == e1;
4256 }
4257
4258 static inline BtorNode *
apply_zero_urem(Btor * btor,BtorNode * e0,BtorNode * e1)4259 apply_zero_urem (Btor *btor, BtorNode *e0, BtorNode *e1)
4260 {
4261 assert (applies_zero_urem (btor, e0, e1));
4262 (void) e1;
4263 return btor_exp_bv_zero (btor, btor_node_get_sort_id (e0));
4264 }
4265
4266 /* CONCAT rules */
4267 /* -------------------------------------------------------------------------- */
4268
4269 /*
4270 * match: (a::c0)::c1
4271 * result: a::c, where c is a new constant obtained from c0::c1
4272 */
4273 static inline bool
applies_const_concat(Btor * btor,BtorNode * e0,BtorNode * e1)4274 applies_const_concat (Btor *btor, BtorNode *e0, BtorNode *e1)
4275 {
4276 BtorNode *real_e0;
4277 real_e0 = btor_node_real_addr (e0);
4278 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_bv_const (e1)
4279 && btor_node_is_bv_concat (real_e0)
4280 && btor_node_is_bv_const (real_e0->e[1]);
4281 }
4282
4283 static inline BtorNode *
apply_const_concat(Btor * btor,BtorNode * e0,BtorNode * e1)4284 apply_const_concat (Btor *btor, BtorNode *e0, BtorNode *e1)
4285 {
4286 assert (applies_const_concat (btor, e0, e1));
4287
4288 BtorNode *result, *tmp, *real_e0;
4289
4290 real_e0 = btor_node_real_addr (e0);
4291
4292 BTOR_INC_REC_RW_CALL (btor);
4293 tmp =
4294 rewrite_concat_exp (btor, btor_node_cond_invert (e0, real_e0->e[1]), e1);
4295 result =
4296 rewrite_concat_exp (btor, btor_node_cond_invert (e0, real_e0->e[0]), tmp);
4297 btor_node_release (btor, tmp);
4298 BTOR_DEC_REC_RW_CALL (btor);
4299 return result;
4300 }
4301
4302 /*
4303 * match: a[u1:l1]::a[u2:l2], where l1 = u2 + 1
4304 * result: a[u1:l2]
4305 */
4306 static inline bool
applies_slice_concat(Btor * btor,BtorNode * e0,BtorNode * e1)4307 applies_slice_concat (Btor *btor, BtorNode *e0, BtorNode *e1)
4308 {
4309 BtorNode *real_e0, *real_e1;
4310 real_e0 = btor_node_real_addr (e0);
4311 real_e1 = btor_node_real_addr (e1);
4312 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 0
4313 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
4314 && btor_node_is_inverted (e0) == btor_node_is_inverted (e1)
4315 && btor_node_is_bv_slice (real_e0) && btor_node_is_bv_slice (real_e1)
4316 && real_e0->e[0] == real_e1->e[0]
4317 && btor_node_bv_slice_get_lower (real_e0)
4318 == btor_node_bv_slice_get_upper (real_e1) + 1;
4319 }
4320
4321 static inline BtorNode *
apply_slice_concat(Btor * btor,BtorNode * e0,BtorNode * e1)4322 apply_slice_concat (Btor *btor, BtorNode *e0, BtorNode *e1)
4323 {
4324 assert (applies_slice_concat (btor, e0, e1));
4325
4326 BtorNode *result, *real_e0;
4327
4328 real_e0 = btor_node_real_addr (e0);
4329 BTOR_INC_REC_RW_CALL (btor);
4330 result = rewrite_slice_exp (btor,
4331 real_e0->e[0],
4332 btor_node_bv_slice_get_upper (real_e0),
4333 btor_node_bv_slice_get_lower (e1));
4334 BTOR_DEC_REC_RW_CALL (btor);
4335 result = btor_node_cond_invert (e0, result);
4336 return result;
4337 }
4338
4339 // NOTE: disabled for now, conflicts with rewriting rule of cond
4340 #if 0
4341 if (btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2 &&
4342 !btor_node_is_inverted (e0) &&
4343 e0->kind == BTOR_BCOND_NODE &&
4344 (btor_is_concat_simplifiable (e0->e[1]) ||
4345 btor_is_concat_simplifiable (e0->e[2])))
4346 {
4347 BTOR_INC_REC_RW_CALL (btor);
4348 t = btor_exp_bv_concat (btor, e0->e[1], e1);
4349 e = btor_exp_bv_concat (btor, e0->e[2], e1);
4350 result = btor_exp_cond (btor, e0->e[0], t, e);
4351 btor_node_release (btor, e);
4352 btor_node_release (btor, t);
4353 BTOR_DEC_REC_RW_CALL (btor);
4354 return result;
4355 }
4356 #endif
4357
4358 // NOTE: disabled for now, conflicts with rewriting rule of cond
4359 #if 0
4360 if (btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2 &&
4361 btor_node_is_inverted (e0) &&
4362 (real_e0 = btor_node_real_addr (e0))->kind == BTOR_BCOND_NODE &&
4363 (btor_is_concat_simplifiable (real_e0->e[1]) ||
4364 btor_is_concat_simplifiable (real_e0->e[2])))
4365 {
4366 BTOR_INC_REC_RW_CALL (btor);
4367 t = btor_exp_bv_concat (btor, btor_node_invert (real_e0->e[1]), e1);
4368 e = btor_exp_bv_concat (btor, btor_node_invert (real_e0->e[2]), e1);
4369 result = btor_exp_cond (btor, real_e0->e[0], t, e);
4370 btor_node_release (btor, e);
4371 btor_node_release (btor, t);
4372 BTOR_DEC_REC_RW_CALL (btor);
4373 return result;
4374 }
4375 #endif
4376
4377 // NOTE: disabled for now, conflicts with rewriting rule of cond
4378 #if 0
4379 if (btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2 &&
4380 !btor_node_is_inverted (e1) &&
4381 e1->kind == BTOR_BCOND_NODE &&
4382 (btor_is_concat_simplifiable (e1->e[1]) ||
4383 btor_is_concat_simplifiable (e1->e[2])))
4384 {
4385 BTOR_INC_REC_RW_CALL (btor);
4386 t = btor_exp_bv_concat (btor, e0, e1->e[1]);
4387 e = btor_exp_bv_concat (btor, e0, e1->e[2]);
4388 result = btor_exp_cond (btor, e1->e[0], t, e);
4389 btor_node_release (btor, e);
4390 btor_node_release (btor, t);
4391 BTOR_DEC_REC_RW_CALL (btor);
4392 return result;
4393 }
4394 #endif
4395
4396 // NOTE: disabled for now, conflicts with rewriting rule of cond
4397 #if 0
4398 if (btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2 &&
4399 btor_node_is_inverted (e1) &&
4400 (real_e1 = btor_node_real_addr (e1))->kind == BTOR_BCOND_NODE &&
4401 (btor_is_concat_simplifiable (real_e1->e[1]) ||
4402 btor_is_concat_simplifiable (real_e1->e[2])))
4403 {
4404 BTOR_INC_REC_RW_CALL (btor);
4405 t = btor_exp_bv_concat (btor, e0, btor_node_invert (real_e1->e[1]));
4406 e = btor_exp_bv_concat (btor, e0, btor_node_invert (real_e1->e[2]));
4407 result = btor_exp_cond (btor, real_e1->e[0], t, e);
4408 btor_node_release (btor, e);
4409 btor_node_release (btor, t);
4410 BTOR_DEC_REC_RW_CALL (btor);
4411 return result;
4412 }
4413 #endif
4414
4415 /*
4416 * match: (a & b)::c
4417 * result: (a::c) & (b::c)
4418 */
4419 static inline bool
applies_and_lhs_concat(Btor * btor,BtorNode * e0,BtorNode * e1)4420 applies_and_lhs_concat (Btor *btor, BtorNode *e0, BtorNode *e1)
4421 {
4422 (void) e1;
4423 BtorNode *real_e0;
4424 real_e0 = btor_node_real_addr (e0);
4425 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
4426 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
4427 && btor_node_is_bv_and (real_e0)
4428 && (is_concat_simplifiable (real_e0->e[0])
4429 || is_concat_simplifiable (real_e0->e[1]));
4430 }
4431
4432 static inline BtorNode *
apply_and_lhs_concat(Btor * btor,BtorNode * e0,BtorNode * e1)4433 apply_and_lhs_concat (Btor *btor, BtorNode *e0, BtorNode *e1)
4434 {
4435 assert (applies_and_lhs_concat (btor, e0, e1));
4436
4437 BtorNode *result, *left, *right, *real_e0;
4438
4439 real_e0 = btor_node_real_addr (e0);
4440 BTOR_INC_REC_RW_CALL (btor);
4441 left =
4442 rewrite_concat_exp (btor, real_e0->e[0], btor_node_cond_invert (e0, e1));
4443 right =
4444 rewrite_concat_exp (btor, real_e0->e[1], btor_node_cond_invert (e0, e1));
4445 result = btor_exp_bv_and (btor, left, right);
4446 result = btor_node_cond_invert (e0, result);
4447 btor_node_release (btor, right);
4448 btor_node_release (btor, left);
4449 BTOR_DEC_REC_RW_CALL (btor);
4450 return result;
4451 }
4452
4453 /*
4454 * match: a::(b & c)
4455 * result: (a::b) & (a::c)
4456 */
4457 static inline bool
applies_and_rhs_concat(Btor * btor,BtorNode * e0,BtorNode * e1)4458 applies_and_rhs_concat (Btor *btor, BtorNode *e0, BtorNode *e1)
4459 {
4460 (void) e0;
4461 BtorNode *real_e1;
4462 real_e1 = btor_node_real_addr (e1);
4463 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
4464 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
4465 && btor_node_is_bv_and (real_e1)
4466 && (is_concat_simplifiable (real_e1->e[0])
4467 || is_concat_simplifiable (real_e1->e[1]));
4468 }
4469
4470 static inline BtorNode *
apply_and_rhs_concat(Btor * btor,BtorNode * e0,BtorNode * e1)4471 apply_and_rhs_concat (Btor *btor, BtorNode *e0, BtorNode *e1)
4472 {
4473 assert (applies_and_rhs_concat (btor, e0, e1));
4474
4475 BtorNode *result, *left, *right, *real_e1;
4476
4477 real_e1 = btor_node_real_addr (e1);
4478 BTOR_INC_REC_RW_CALL (btor);
4479 left =
4480 rewrite_concat_exp (btor, btor_node_cond_invert (e1, e0), real_e1->e[0]);
4481 right =
4482 rewrite_concat_exp (btor, btor_node_cond_invert (e1, e0), real_e1->e[1]);
4483 result = btor_exp_bv_and (btor, left, right);
4484 result = btor_node_cond_invert (e1, result);
4485 btor_node_release (btor, right);
4486 btor_node_release (btor, left);
4487 BTOR_DEC_REC_RW_CALL (btor);
4488 return result;
4489 }
4490
4491 /* SLL rules */
4492 /* -------------------------------------------------------------------------- */
4493
4494 /*
4495 * match: a << c, where c is a constant
4496 * result: a[len(a)-val(c)-1:0]::0
4497 */
4498 static inline bool
applies_const_sll(Btor * btor,BtorNode * e0,BtorNode * e1)4499 applies_const_sll (Btor *btor, BtorNode *e0, BtorNode *e1)
4500 {
4501 (void) e0;
4502 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_bv_const (e1)
4503 && btor_node_bv_get_width (btor, e1) <= 32;
4504 }
4505
4506 static inline BtorNode *
apply_const_sll(Btor * btor,BtorNode * e0,BtorNode * e1)4507 apply_const_sll (Btor *btor, BtorNode *e0, BtorNode *e1)
4508 {
4509 assert (applies_const_sll (btor, e0, e1));
4510
4511 uint32_t shiftlen, width;
4512 BtorBitVector *bits;
4513 BtorNode *result, *real_e0, *real_e1, *pad, *slice;
4514 BtorSortId sort;
4515
4516 real_e0 = btor_node_real_addr (e0);
4517 real_e1 = btor_node_real_addr (e1);
4518
4519 if (is_const_zero_exp (btor, e1)) return btor_node_copy (btor, e0);
4520
4521 bits = btor_node_bv_const_get_bits (real_e1);
4522 width = btor_node_bv_get_width (btor, real_e0);
4523 assert (btor_bv_get_width (bits) == width);
4524 if (btor_node_is_inverted (e1)) bits = btor_bv_not (btor->mm, bits);
4525 shiftlen = (uint32_t) btor_bv_to_uint64 (bits);
4526 assert (shiftlen > 0);
4527 if (btor_node_is_inverted (e1)) btor_bv_free (btor->mm, bits);
4528 if (shiftlen >= width)
4529 {
4530 sort = btor_sort_bv (btor, width);
4531 result = btor_exp_bv_zero (btor, sort);
4532 btor_sort_release (btor, sort);
4533 }
4534 else
4535 {
4536 BTOR_INC_REC_RW_CALL (btor);
4537 sort = btor_sort_bv (btor, shiftlen);
4538 pad = btor_exp_bv_zero (btor, sort);
4539 btor_sort_release (btor, sort);
4540 slice = rewrite_slice_exp (
4541 btor, e0, btor_node_bv_get_width (btor, real_e0) - shiftlen - 1, 0);
4542 result = rewrite_concat_exp (btor, slice, pad);
4543 BTOR_DEC_REC_RW_CALL (btor);
4544 btor_node_release (btor, pad);
4545 btor_node_release (btor, slice);
4546 }
4547 assert (btor_node_get_sort_id (result) == btor_node_get_sort_id (real_e0));
4548 return result;
4549 }
4550
4551 /* SRL rules */
4552 /* -------------------------------------------------------------------------- */
4553
4554 /*
4555 * match: a >> c, where c is a constant
4556 * result: 0::a[len(a)-1:val(c)]
4557 */
4558 static inline bool
applies_const_srl(Btor * btor,BtorNode * e0,BtorNode * e1)4559 applies_const_srl (Btor *btor, BtorNode *e0, BtorNode *e1)
4560 {
4561 (void) e0;
4562 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_bv_const (e1)
4563 && btor_node_bv_get_width (btor, e1) <= 32;
4564 }
4565
4566 static inline BtorNode *
apply_const_srl(Btor * btor,BtorNode * e0,BtorNode * e1)4567 apply_const_srl (Btor *btor, BtorNode *e0, BtorNode *e1)
4568 {
4569 assert (applies_const_srl (btor, e0, e1));
4570
4571 uint32_t width, shiftlen;
4572 BtorBitVector *bits;
4573 BtorNode *result, *real_e0, *real_e1, *pad, *slice;
4574 BtorSortId sort;
4575
4576 real_e0 = btor_node_real_addr (e0);
4577 real_e1 = btor_node_real_addr (e1);
4578
4579 if (is_const_zero_exp (btor, e1)) return btor_node_copy (btor, e0);
4580
4581 bits = btor_node_bv_const_get_bits (real_e1);
4582 width = btor_node_bv_get_width (btor, real_e0);
4583 assert (btor_bv_get_width (bits) == width);
4584 if (btor_node_is_inverted (e1)) bits = btor_bv_not (btor->mm, bits);
4585 shiftlen = (uint32_t) btor_bv_to_uint64 (bits);
4586 assert (shiftlen > 0);
4587 if (btor_node_is_inverted (e1)) btor_bv_free (btor->mm, bits);
4588 if (shiftlen >= width)
4589 {
4590 sort = btor_sort_bv (btor, width);
4591 result = btor_exp_bv_zero (btor, sort);
4592 btor_sort_release (btor, sort);
4593 }
4594 else
4595 {
4596 BTOR_INC_REC_RW_CALL (btor);
4597 sort = btor_sort_bv (btor, shiftlen);
4598 pad = btor_exp_bv_zero (btor, sort);
4599 btor_sort_release (btor, sort);
4600 slice = rewrite_slice_exp (
4601 btor, e0, btor_node_bv_get_width (btor, real_e0) - 1, shiftlen);
4602 result = rewrite_concat_exp (btor, pad, slice);
4603 BTOR_DEC_REC_RW_CALL (btor);
4604 btor_node_release (btor, pad);
4605 btor_node_release (btor, slice);
4606 }
4607 assert (btor_node_get_sort_id (result) == btor_node_get_sort_id (real_e0));
4608 return result;
4609 }
4610
4611 /*
4612 * e0 >> e0 == 0
4613 * match: e0_[bw] == e1_[bw]
4614 * result: 0_[bw]
4615 */
4616 static inline bool
applies_same_srl(Btor * btor,BtorNode * e0,BtorNode * e1)4617 applies_same_srl (Btor *btor, BtorNode *e0, BtorNode *e1)
4618 {
4619 (void) btor;
4620 return e0 == e1;
4621 }
4622
4623 static inline BtorNode *
apply_same_srl(Btor * btor,BtorNode * e0,BtorNode * e1)4624 apply_same_srl (Btor *btor, BtorNode *e0, BtorNode *e1)
4625 {
4626 (void) e1;
4627 assert (applies_same_srl (btor, e0, e1));
4628 return btor_exp_bv_zero (btor, btor_node_get_sort_id (e0));
4629 }
4630
4631 /* APPLY rules */
4632 /* -------------------------------------------------------------------------- */
4633
4634 /*
4635 * match: (\lambda x . t)(a), where term t does not contain param x
4636 * result: t
4637 */
4638 static inline bool
applies_const_lambda_apply(Btor * btor,BtorNode * e0,BtorNode * e1)4639 applies_const_lambda_apply (Btor *btor, BtorNode *e0, BtorNode *e1)
4640 {
4641 (void) btor;
4642 (void) e1;
4643 return btor_node_is_lambda (e0)
4644 && !btor_node_real_addr (btor_node_binder_get_body (e0))
4645 ->parameterized;
4646 }
4647
4648 static inline BtorNode *
apply_const_lambda_apply(Btor * btor,BtorNode * e0,BtorNode * e1)4649 apply_const_lambda_apply (Btor *btor, BtorNode *e0, BtorNode *e1)
4650 {
4651 assert (applies_const_lambda_apply (btor, e0, e1));
4652 (void) e1;
4653 return btor_node_copy (btor,
4654 btor_node_binder_get_body (btor_node_real_addr (e0)));
4655 }
4656
4657 /*
4658 * match: (\lambda x . x)(a)
4659 * result: a
4660 */
4661 static inline bool
applies_param_lambda_apply(Btor * btor,BtorNode * e0,BtorNode * e1)4662 applies_param_lambda_apply (Btor *btor, BtorNode *e0, BtorNode *e1)
4663 {
4664 (void) btor;
4665 (void) e1;
4666 return btor_node_is_lambda (e0) && !e0->parameterized
4667 && btor_node_is_param (btor_node_binder_get_body (e0));
4668 }
4669
4670 static inline BtorNode *
apply_param_lambda_apply(Btor * btor,BtorNode * e0,BtorNode * e1)4671 apply_param_lambda_apply (Btor *btor, BtorNode *e0, BtorNode *e1)
4672 {
4673 assert (applies_param_lambda_apply (btor, e0, e1));
4674
4675 BtorNode *result, *body;
4676
4677 body = btor_node_binder_get_body (e0);
4678 btor_beta_assign_args (btor, e0, e1);
4679 result = btor_node_copy (
4680 btor, btor_node_param_get_assigned_exp (btor_node_real_addr (body)));
4681 btor_beta_unassign_params (btor, e0);
4682 result = btor_node_cond_invert (body, result);
4683 return result;
4684 }
4685
4686 /*
4687 * match: (\lambda x . f(x))(a)
4688 * result: f(a)
4689 */
4690 static inline bool
applies_apply_apply(Btor * btor,BtorNode * e0,BtorNode * e1)4691 applies_apply_apply (Btor *btor, BtorNode *e0, BtorNode *e1)
4692 {
4693 (void) e1;
4694 BtorNode *real_body;
4695 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_lambda (e0)
4696 && btor_node_is_apply ((real_body = btor_node_real_addr (
4697 btor_node_binder_get_body (e0))))
4698 && !real_body->e[0]->parameterized;
4699 }
4700
4701 static inline BtorNode *
apply_apply_apply(Btor * btor,BtorNode * e0,BtorNode * e1)4702 apply_apply_apply (Btor *btor, BtorNode *e0, BtorNode *e1)
4703 {
4704 assert (applies_apply_apply (btor, e0, e1));
4705
4706 BtorNode *result, *real_body, *body;
4707
4708 body = btor_node_binder_get_body (e0);
4709 real_body = btor_node_real_addr (body);
4710 BTOR_INC_REC_RW_CALL (btor);
4711 btor_beta_assign_args (btor, e0, e1);
4712 e1 = btor_beta_reduce_bounded (btor, real_body->e[1], 1);
4713 btor_beta_unassign_params (btor, e0);
4714 e0 = btor_simplify_exp (btor, real_body->e[0]);
4715 assert (btor_node_is_fun (e0));
4716 assert (btor_node_is_args (e1));
4717 result = rewrite_apply_exp (btor, e0, e1);
4718 BTOR_DEC_REC_RW_CALL (btor);
4719 btor_node_release (btor, e1);
4720 result = btor_node_cond_invert (body, result);
4721 return result;
4722 }
4723
4724 /*
4725 * propagate apply over parameterized bv conditionals
4726 */
4727 static inline bool
applies_prop_apply_lambda(Btor * btor,BtorNode * e0,BtorNode * e1)4728 applies_prop_apply_lambda (Btor *btor, BtorNode *e0, BtorNode *e1)
4729 {
4730 (void) btor;
4731 (void) e1;
4732 return btor_node_is_lambda (e0)
4733 && btor_node_is_bv_cond (btor_node_binder_get_body (e0));
4734 ;
4735 }
4736
4737 static inline BtorNode *
apply_prop_apply_lambda(Btor * btor,BtorNode * e0,BtorNode * e1)4738 apply_prop_apply_lambda (Btor *btor, BtorNode *e0, BtorNode *e1)
4739 {
4740 assert (applies_prop_apply_lambda (btor, e0, e1));
4741
4742 BtorNode *result, *cur_fun, *next_fun, *cur_args, *e_cond, *array, *args;
4743 BtorNode *beta_cond, *cur_cond, *real_cur_cond;
4744 BtorNode *cur_branch, *real_cur_branch;
4745 BtorNode *index, *write_index, *value;
4746 bool done, inv_result_tmp;
4747 uint32_t propagations, apply_propagations, inv_result;
4748
4749 done = 0;
4750 result = 0;
4751 propagations = 0;
4752 apply_propagations = 0;
4753 inv_result = 0;
4754 inv_result_tmp = false;
4755
4756 cur_fun = e0;
4757 cur_args = btor_node_copy (btor, e1);
4758
4759 /* try to propagate apply over bv conditionals were conditions evaluate to
4760 * true if beta reduced with 'cur_args'. */
4761 cur_cond =
4762 btor_node_is_lambda (cur_fun) ? btor_node_binder_get_body (cur_fun) : 0;
4763 while (!done && btor_node_is_lambda (cur_fun) && !cur_fun->parameterized
4764 && !cur_args->parameterized
4765 && (real_cur_cond = btor_node_real_addr (cur_cond))
4766 && btor_node_is_bv_cond (real_cur_cond)
4767 /* if the condition is not parameterized the check was already done
4768 * while creating 'cur_cond' */
4769 && btor_node_real_addr (real_cur_cond->e[0])->parameterized
4770 && propagations++ < BTOR_APPLY_PROPAGATION_LIMIT)
4771 {
4772 assert (cur_cond);
4773 assert (btor_node_is_regular (cur_fun));
4774 assert (btor_node_is_regular (cur_args));
4775 assert (!result);
4776
4777 next_fun = 0;
4778 /* optimization for lambdas representing array writes */
4779 if (is_write_exp (cur_fun, &array, &write_index, &value))
4780 {
4781 index = cur_args->e[0];
4782 /* found value at 'index' */
4783 if (write_index == index)
4784 {
4785 result = btor_node_copy (btor, value);
4786 done = 1;
4787 }
4788 /* propagate down to 'array' */
4789 else if (is_always_unequal (btor, write_index, index))
4790 {
4791 next_fun = array;
4792 apply_propagations++;
4793 }
4794 else
4795 goto REWRITE_APPLY_GENERAL_CASE;
4796 }
4797 /* more general case: any lambda with bv cond as body */
4798 else
4799 {
4800 REWRITE_APPLY_GENERAL_CASE:
4801 e_cond = real_cur_cond->e[0];
4802
4803 if (!btor_node_real_addr (e_cond)->parameterized) break;
4804
4805 /* 'inv_result_tmp' indicates if the result obtained from the
4806 * current propagation path needs to be inverted. in case we really
4807 * find a result, 'inv_result' will be inverted w.r.t.
4808 * 'inv_result_tmp'. */
4809 if (btor_node_is_inverted (cur_cond)) inv_result_tmp = !inv_result_tmp;
4810
4811 btor_beta_assign_args (btor, cur_fun, cur_args);
4812 beta_cond = btor_beta_reduce_bounded (btor, e_cond, 1);
4813 /* condition of bv cond is either true or false */
4814 if (btor_node_is_bv_const (beta_cond))
4815 {
4816 if (is_true_cond (beta_cond))
4817 cur_branch = real_cur_cond->e[1];
4818 else
4819 cur_branch = real_cur_cond->e[2];
4820
4821 real_cur_branch = btor_node_real_addr (cur_branch);
4822 /* branch not parameterized, we found a result */
4823 if (!real_cur_branch->parameterized)
4824 {
4825 result = btor_node_copy (btor, real_cur_branch);
4826 done = 1;
4827 goto HAVE_RESULT_CHECK_INVERTED;
4828 }
4829 else if (btor_node_is_param (real_cur_branch))
4830 {
4831 if ((value = btor_node_param_get_assigned_exp (real_cur_branch)))
4832 result = btor_node_copy (btor, value);
4833 else
4834 result = btor_node_copy (btor, real_cur_branch);
4835 done = 1;
4836 goto HAVE_RESULT_CHECK_INVERTED;
4837 }
4838 /* propagate down to next function */
4839 else if (btor_node_is_apply (real_cur_branch))
4840 {
4841 args = btor_beta_reduce_bounded (btor, real_cur_branch->e[1], 1);
4842 assert (btor_node_is_regular (args));
4843 assert (btor_node_is_args (args));
4844 /* nested lambda */
4845 if (btor_node_is_lambda (real_cur_branch->e[0])
4846 && real_cur_branch->e[0]->parameterized)
4847 {
4848 btor_beta_assign_args (btor, real_cur_branch->e[0], args);
4849 result = btor_beta_reduce_bounded (btor, real_cur_branch->e[0], 1);
4850 btor_beta_unassign_params (btor, real_cur_branch->e[0]);
4851 assert (!btor_node_is_fun (result));
4852
4853 /* propagate down to 'next_fun' */
4854 if (btor_node_is_apply (result))
4855 {
4856 next_fun = btor_node_real_addr (result)->e[0];
4857 btor_node_release (btor, args);
4858 args = btor_node_copy (btor, btor_node_real_addr (result)->e[1]);
4859 /* result is not needed here as it may be further
4860 * rewritten */
4861 btor_node_release (btor, result);
4862 result = 0;
4863 }
4864 else
4865 done = 1;
4866 }
4867 /* beta reduce parameterized condition and select branch */
4868 else if (btor_node_is_fun_cond (real_cur_branch->e[0])
4869 && real_cur_branch->e[0]->parameterized)
4870 {
4871 assert (real_cur_branch->e[0]->e[0]->parameterized);
4872 assert (!real_cur_branch->e[0]->e[1]->parameterized);
4873 assert (!real_cur_branch->e[0]->e[2]->parameterized);
4874 result =
4875 btor_beta_reduce_bounded (btor, real_cur_branch->e[0]->e[0], 1);
4876
4877 if (btor_node_is_bv_const (result))
4878 {
4879 if (result == btor->true_exp)
4880 next_fun = real_cur_branch->e[0]->e[1];
4881 else
4882 next_fun = real_cur_branch->e[0]->e[2];
4883 }
4884 btor_node_release (btor, result);
4885 result = 0;
4886 /* no branch can be selected, we are done */
4887 if (!next_fun)
4888 {
4889 btor_node_release (btor, args);
4890 goto REWRITE_APPLY_NO_RESULT_DONE;
4891 }
4892 }
4893 /* propagate down to 'next_fun' */
4894 else
4895 {
4896 next_fun = real_cur_branch->e[0];
4897 assert (btor_node_is_fun (next_fun));
4898 }
4899
4900 /* set arguments for new functin application */
4901 btor_node_release (btor, cur_args);
4902 cur_args = args;
4903
4904 HAVE_RESULT_CHECK_INVERTED:
4905 assert (result || next_fun);
4906 assert (!result || !next_fun);
4907 assert (!done || result);
4908 /* at this point we already have a result, which is either
4909 * a value obtained by beta reducing 'cur_fun' or a
4910 * function application on 'next_fun' with 'cur_args'.
4911 * in the latter case, we try to further rewrite the function
4912 * application. */
4913
4914 /* if 'cur_branch' is inverted we need to invert the result */
4915 if (btor_node_is_inverted (cur_branch))
4916 inv_result_tmp = !inv_result_tmp;
4917
4918 /* we got a result, we can savely set 'inv_result' */
4919 if (inv_result_tmp)
4920 {
4921 inv_result = !inv_result;
4922 inv_result_tmp = false;
4923 }
4924 apply_propagations++;
4925 }
4926 /* check if we can further propagate down along a conditional */
4927 else if (btor_node_is_bv_cond (real_cur_branch))
4928 {
4929 cur_cond = cur_branch;
4930 }
4931 /* cur_branch is some other parameterized term that we don't
4932 * expand */
4933 // TODO (ma): try to expand more parameterized terms?
4934 else
4935 goto REWRITE_APPLY_NO_RESULT_DONE;
4936 }
4937 else
4938 {
4939 REWRITE_APPLY_NO_RESULT_DONE:
4940 assert (!result);
4941 done = 1;
4942 }
4943 btor_beta_unassign_params (btor, cur_fun);
4944 btor_node_release (btor, beta_cond);
4945 }
4946
4947 if (next_fun)
4948 {
4949 cur_fun = next_fun;
4950 cur_cond = btor_node_is_lambda (cur_fun)
4951 ? btor_node_binder_get_body (cur_fun)
4952 : 0;
4953 }
4954 assert (!result || done);
4955 }
4956
4957 /* check if apply was propagated down to 'cur_fun' */
4958 if (!result && cur_fun != e0)
4959 result = btor_node_create_apply (btor, cur_fun, cur_args);
4960
4961 btor_node_release (btor, cur_args);
4962
4963 if (result && inv_result) result = btor_node_invert (result);
4964
4965 btor->stats.prop_apply_lambda += apply_propagations;
4966 return result;
4967 }
4968
4969 /*
4970 * TODO description
4971 */
4972 static inline bool
applies_prop_apply_update(Btor * btor,BtorNode * e0,BtorNode * e1)4973 applies_prop_apply_update (Btor *btor, BtorNode *e0, BtorNode *e1)
4974 {
4975 (void) btor;
4976 (void) e1;
4977 return btor_node_is_update (e0);
4978 }
4979
4980 static inline BtorNode *
apply_prop_apply_update(Btor * btor,BtorNode * e0,BtorNode * e1)4981 apply_prop_apply_update (Btor *btor, BtorNode *e0, BtorNode *e1)
4982 {
4983 assert (applies_prop_apply_update (btor, e0, e1));
4984
4985 uint32_t propagations = 0, num_eq;
4986 bool prop_down;
4987 BtorNode *cur, *args, *value, *a1, *a2, *result = 0, *eq;
4988 BtorArgsIterator it1, it2;
4989
4990 cur = e0;
4991 while (btor_node_is_update (cur))
4992 {
4993 args = cur->e[1];
4994 value = cur->e[2];
4995
4996 if (e1 == args)
4997 {
4998 propagations++;
4999 result = btor_node_copy (btor, value);
5000 break;
5001 }
5002
5003 prop_down = false;
5004 num_eq = 0;
5005 assert (e1->sort_id == args->sort_id);
5006 btor_iter_args_init (&it1, e1);
5007 btor_iter_args_init (&it2, args);
5008 while (btor_iter_args_has_next (&it1))
5009 {
5010 assert (btor_iter_args_has_next (&it2));
5011 a1 = btor_iter_args_next (&it1);
5012 a2 = btor_iter_args_next (&it2);
5013
5014 if (is_always_unequal (btor, a1, a2))
5015 {
5016 prop_down = true;
5017 break;
5018 }
5019
5020 BTOR_INC_REC_RW_CALL (btor);
5021 eq = rewrite_eq_exp (btor, a1, a2);
5022 BTOR_DEC_REC_RW_CALL (btor);
5023 if (eq == btor_node_invert (btor->true_exp))
5024 {
5025 btor_node_release (btor, eq);
5026 prop_down = true;
5027 break;
5028 }
5029 else if (eq == btor->true_exp)
5030 {
5031 num_eq++;
5032 }
5033 btor_node_release (btor, eq);
5034 }
5035
5036 if (num_eq == btor_node_args_get_arity (btor, args))
5037 {
5038 propagations++;
5039 result = btor_node_copy (btor, value);
5040 break;
5041 }
5042
5043 if (prop_down)
5044 {
5045 propagations++;
5046 cur = cur->e[0];
5047 }
5048 else
5049 break;
5050 }
5051
5052 /* propagated until 'cur', create apply on 'cur' */
5053 if (!result)
5054 {
5055 if (btor_node_is_const_array (cur))
5056 {
5057 result = btor_node_copy (btor, cur->e[1]);
5058 }
5059 else
5060 {
5061 BTOR_INC_REC_RW_CALL (btor);
5062 result = btor_node_create_apply (btor, cur, e1);
5063 BTOR_DEC_REC_RW_CALL (btor);
5064 }
5065 }
5066 btor->stats.prop_apply_update += propagations;
5067 return result;
5068 }
5069
5070 /* LAMBDA rules */
5071
5072 #if 0
5073 // TODO (ma): this rule cannot be applied yet as it may produce lambdas with
5074 // different sorts
5075 /*
5076 * match: (\lambda j . (\lambda k . t)(j))
5077 * result: \lambda k . t
5078 */
5079 static inline bool
5080 applies_lambda_lambda (Btor * btor, BtorNode * e0, BtorNode * e1)
5081 {
5082 return !btor_node_is_inverted (e1)
5083 && btor_node_is_apply (e1)
5084 && !e1->e[0]->parameterized
5085 && e1->e[1]->arity == 1
5086 && e1->e[1]->e[0] == e0;
5087 }
5088
5089 static inline BtorNode *
5090 apply_lambda_lambda (Btor * btor, BtorNode * e0, BtorNode * e1)
5091 {
5092 return btor_node_copy (btor, e1->e[0]);
5093 }
5094 #endif
5095
5096
5097 /* QUANTIFIER rules */
5098 /* -------------------------------------------------------------------------- */
5099
5100 /*
5101 * TODO description
5102 */
5103 static inline bool
applies_const_quantifier(Btor * btor,BtorNode * param,BtorNode * body)5104 applies_const_quantifier (Btor *btor, BtorNode *param, BtorNode *body)
5105 {
5106 (void) btor;
5107 (void) param;
5108 return !btor_node_real_addr (body)->parameterized;
5109 }
5110
5111 static inline BtorNode *
apply_const_quantifier(Btor * btor,BtorNode * param,BtorNode * body)5112 apply_const_quantifier (Btor *btor, BtorNode *param, BtorNode *body)
5113 {
5114 assert (applies_const_quantifier (btor, param, body));
5115 (void) param;
5116 return btor_node_copy (btor, body);
5117 }
5118
5119 /* FORALL rules */
5120 /* -------------------------------------------------------------------------- */
5121
5122 #if 0
5123
5124 /*
5125 * match: (\forall x . t) where x does not occur in t
5126 * result: t
5127 */
5128 static inline bool
5129 applies_param_free_forall (Btor * btor, BtorNode * param, BtorNode * body)
5130 {
5131 (void) btor;
5132 (void) body;
5133 return param->parents == 0;
5134 }
5135
5136 static inline BtorNode *
5137 apply_param_free_forall (Btor * btor, BtorNode * param, BtorNode * body)
5138 {
5139 assert (applies_param_free_forall (btor, param, body));
5140 (void) param;
5141 return btor_node_copy (btor, body);
5142 }
5143
5144 #endif
5145
5146 /*
5147 * match: \forall x . x = t if x \not \in vars(t)
5148 * match: \forall x . x != t if x \not \in vars(t)
5149 * result: false
5150 */
5151 static inline bool
applies_eq_forall(Btor * btor,BtorNode * param,BtorNode * body)5152 applies_eq_forall (Btor *btor, BtorNode *param, BtorNode *body)
5153 {
5154 (void) btor;
5155 (void) body;
5156 BtorNode *real_body;
5157 real_body = btor_node_real_addr (body);
5158 return btor_node_is_bv_eq (body)
5159 && param->parents == 1 // only parent is body
5160 && ((real_body->e[0] == param
5161 && !btor_node_real_addr (real_body->e[1])->quantifier_below)
5162 || (real_body->e[1] == param
5163 && !btor_node_real_addr (real_body->e[0])->quantifier_below));
5164 }
5165
5166 static inline BtorNode *
apply_eq_forall(Btor * btor,BtorNode * param,BtorNode * body)5167 apply_eq_forall (Btor *btor, BtorNode *param, BtorNode *body)
5168 {
5169 assert (applies_eq_forall (btor, param, body));
5170 (void) param;
5171 (void) body;
5172 return btor_exp_false (btor);
5173 }
5174
5175 #if 0
5176
5177 /* EXISTS rules */
5178 /* -------------------------------------------------------------------------- */
5179
5180 /*
5181 * match: (\exists x . t) where x does not occur in t
5182 * result: t
5183 */
5184 static inline bool
5185 applies_param_free_exists (Btor * btor, BtorNode * param, BtorNode * body)
5186 {
5187 (void) btor;
5188 (void) body;
5189 return param->parents == 0;
5190 }
5191
5192 static inline BtorNode *
5193 apply_param_free_exists (Btor * btor, BtorNode * param, BtorNode * body)
5194 {
5195 assert (applies_param_free_exists (btor, param, body));
5196 (void) param;
5197 return btor_node_copy (btor, body);
5198 }
5199
5200 #endif
5201
5202 /*
5203 * match: \exists x . x = t if x \not \in vars(t)
5204 * match: \exists x . x != t if x \not \in vars(t)
5205 * result: true
5206 */
5207 static inline bool
applies_eq_exists(Btor * btor,BtorNode * param,BtorNode * body)5208 applies_eq_exists (Btor *btor, BtorNode *param, BtorNode *body)
5209 {
5210 (void) btor;
5211 (void) body;
5212 BtorNode *real_body;
5213 real_body = btor_node_real_addr (body);
5214 return btor_node_is_bv_eq (body)
5215 && param->parents == 1 // only parent is body
5216 && ((real_body->e[0] == param
5217 && !btor_node_real_addr (real_body->e[1])->quantifier_below)
5218 || (real_body->e[1] == param
5219 && !btor_node_real_addr (real_body->e[0])->quantifier_below));
5220 }
5221
5222 static inline BtorNode *
apply_eq_exists(Btor * btor,BtorNode * param,BtorNode * body)5223 apply_eq_exists (Btor *btor, BtorNode *param, BtorNode *body)
5224 {
5225 assert (applies_eq_exists (btor, param, body));
5226 (void) param;
5227 (void) body;
5228 return btor_exp_true (btor);
5229 }
5230
5231 /* COND rules */
5232 /* -------------------------------------------------------------------------- */
5233
5234 /*
5235 * match: c ? a : a
5236 * result: a
5237 */
5238 static inline bool
applies_equal_branches_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5239 applies_equal_branches_cond (Btor *btor,
5240 BtorNode *e0,
5241 BtorNode *e1,
5242 BtorNode *e2)
5243 {
5244 (void) btor;
5245 (void) e0;
5246 return e1 == e2;
5247 }
5248
5249 static inline BtorNode *
apply_equal_branches_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5250 apply_equal_branches_cond (Btor *btor, BtorNode *e0, BtorNode *e1, BtorNode *e2)
5251 {
5252 assert (applies_equal_branches_cond (btor, e0, e1, e2));
5253 (void) e0;
5254 (void) e2;
5255 return btor_node_copy (btor, e1);
5256 }
5257
5258 /*
5259 * match: c ? a : b, where c is a constant
5260 * result: a if c is true, and b otherwise
5261 */
5262 static inline bool
applies_const_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5263 applies_const_cond (Btor *btor, BtorNode *e0, BtorNode *e1, BtorNode *e2)
5264 {
5265 (void) btor;
5266 (void) e1;
5267 (void) e2;
5268 return btor_node_is_bv_const (e0);
5269 }
5270
5271 static inline BtorNode *
apply_const_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5272 apply_const_cond (Btor *btor, BtorNode *e0, BtorNode *e1, BtorNode *e2)
5273 {
5274 assert (applies_const_cond (btor, e0, e1, e2));
5275 if (btor_bv_get_bit (btor_node_bv_const_get_bits (e0), 0))
5276 return btor_node_copy (btor, e1);
5277 return btor_node_copy (btor, e2);
5278 }
5279
5280 /*
5281 * match: c0 ? (c0 ? a : b) : c
5282 * result: c0 ? a : c
5283 */
5284 static inline bool
applies_cond_if_dom_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5285 applies_cond_if_dom_cond (Btor *btor, BtorNode *e0, BtorNode *e1, BtorNode *e2)
5286 {
5287 (void) e2;
5288 BtorNode *real_e1;
5289 real_e1 = btor_node_real_addr (e1);
5290 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_cond (real_e1)
5291 && real_e1->e[0] == e0;
5292 }
5293
5294 static inline BtorNode *
apply_cond_if_dom_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5295 apply_cond_if_dom_cond (Btor *btor, BtorNode *e0, BtorNode *e1, BtorNode *e2)
5296 {
5297 assert (applies_cond_if_dom_cond (btor, e0, e1, e2));
5298
5299 BtorNode *result;
5300
5301 BTOR_INC_REC_RW_CALL (btor);
5302 result = rewrite_cond_exp (
5303 btor, e0, btor_node_cond_invert (e1, btor_node_real_addr (e1)->e[1]), e2);
5304 BTOR_DEC_REC_RW_CALL (btor);
5305 return result;
5306 }
5307
5308 /*
5309 * match: c0 ? (c1 ? a : b) : a
5310 * result: c0 AND ~c1 ? b : a
5311 */
5312 static inline bool
applies_cond_if_merge_if_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5313 applies_cond_if_merge_if_cond (Btor *btor,
5314 BtorNode *e0,
5315 BtorNode *e1,
5316 BtorNode *e2)
5317 {
5318 (void) e0;
5319 BtorNode *real_e1;
5320 real_e1 = btor_node_real_addr (e1);
5321 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_cond (real_e1)
5322 && btor_node_cond_invert (e1, real_e1->e[1]) == e2;
5323 }
5324
5325 static inline BtorNode *
apply_cond_if_merge_if_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5326 apply_cond_if_merge_if_cond (Btor *btor,
5327 BtorNode *e0,
5328 BtorNode *e1,
5329 BtorNode *e2)
5330 {
5331 assert (applies_cond_if_merge_if_cond (btor, e0, e1, e2));
5332
5333 BtorNode *result, *tmp, *e10, *e12, *real_e1;
5334
5335 real_e1 = btor_node_real_addr (e1);
5336 e10 = real_e1->e[0];
5337 e12 = btor_node_cond_invert (e1, real_e1->e[2]);
5338 BTOR_INC_REC_RW_CALL (btor);
5339 tmp = rewrite_and_exp (btor, e0, btor_node_invert (e10));
5340 result = rewrite_cond_exp (btor, tmp, e12, e2);
5341 BTOR_DEC_REC_RW_CALL (btor);
5342 btor_node_release (btor, tmp);
5343 return result;
5344 }
5345
5346 /*
5347 * match: c0 ? (c1 ? b : a) : a
5348 * result: c0 AND c1 ? b : a
5349 */
5350 static inline bool
applies_cond_if_merge_else_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5351 applies_cond_if_merge_else_cond (Btor *btor,
5352 BtorNode *e0,
5353 BtorNode *e1,
5354 BtorNode *e2)
5355 {
5356 (void) e0;
5357 BtorNode *real_e1;
5358 real_e1 = btor_node_real_addr (e1);
5359 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_cond (real_e1)
5360 && btor_node_cond_invert (e1, real_e1->e[2]) == e2;
5361 }
5362
5363 static inline BtorNode *
apply_cond_if_merge_else_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5364 apply_cond_if_merge_else_cond (Btor *btor,
5365 BtorNode *e0,
5366 BtorNode *e1,
5367 BtorNode *e2)
5368 {
5369 assert (applies_cond_if_merge_else_cond (btor, e0, e1, e2));
5370
5371 BtorNode *result, *tmp, *e10, *e11, *real_e1;
5372
5373 real_e1 = btor_node_real_addr (e1);
5374 e10 = real_e1->e[0];
5375 e11 = btor_node_cond_invert (e1, real_e1->e[1]);
5376 BTOR_INC_REC_RW_CALL (btor);
5377 tmp = rewrite_and_exp (btor, e0, e10);
5378 result = rewrite_cond_exp (btor, tmp, e11, e2);
5379 BTOR_DEC_REC_RW_CALL (btor);
5380 btor_node_release (btor, tmp);
5381 return result;
5382 }
5383
5384 /*
5385 * match: c0 ? a : (c0 ? b : c)
5386 * result: c0 ? a : c
5387 */
5388 static inline bool
applies_cond_else_dom_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5389 applies_cond_else_dom_cond (Btor *btor,
5390 BtorNode *e0,
5391 BtorNode *e1,
5392 BtorNode *e2)
5393 {
5394 (void) e1;
5395 BtorNode *real_e2;
5396 real_e2 = btor_node_real_addr (e2);
5397 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_cond (real_e2)
5398 && real_e2->e[0] == e0;
5399 }
5400
5401 static inline BtorNode *
apply_cond_else_dom_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5402 apply_cond_else_dom_cond (Btor *btor, BtorNode *e0, BtorNode *e1, BtorNode *e2)
5403 {
5404 assert (applies_cond_else_dom_cond (btor, e0, e1, e2));
5405
5406 BtorNode *result;
5407
5408 BTOR_INC_REC_RW_CALL (btor);
5409 result = rewrite_cond_exp (
5410 btor, e0, e1, btor_node_cond_invert (e2, btor_node_real_addr (e2)->e[2]));
5411 BTOR_DEC_REC_RW_CALL (btor);
5412 return result;
5413 }
5414
5415 /*
5416 * match: c0 ? a : (c1 ? a : b)
5417 * result: ~c0 AND ~c1 ? b : a
5418 */
5419 static inline bool
applies_cond_else_merge_if_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5420 applies_cond_else_merge_if_cond (Btor *btor,
5421 BtorNode *e0,
5422 BtorNode *e1,
5423 BtorNode *e2)
5424 {
5425 (void) e0;
5426 BtorNode *real_e2;
5427 real_e2 = btor_node_real_addr (e2);
5428 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_cond (real_e2)
5429 && btor_node_cond_invert (e2, real_e2->e[1]) == e1;
5430 }
5431
5432 static inline BtorNode *
apply_cond_else_merge_if_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5433 apply_cond_else_merge_if_cond (Btor *btor,
5434 BtorNode *e0,
5435 BtorNode *e1,
5436 BtorNode *e2)
5437 {
5438 assert (applies_cond_else_merge_if_cond (btor, e0, e1, e2));
5439
5440 BtorNode *result, *tmp, *e20, *e22, *real_e2;
5441
5442 real_e2 = btor_node_real_addr (e2);
5443 e20 = real_e2->e[0];
5444 e22 = btor_node_cond_invert (e2, real_e2->e[2]);
5445 BTOR_INC_REC_RW_CALL (btor);
5446 tmp = rewrite_and_exp (btor, btor_node_invert (e0), btor_node_invert (e20));
5447 result = rewrite_cond_exp (btor, tmp, e22, e1);
5448 BTOR_DEC_REC_RW_CALL (btor);
5449 btor_node_release (btor, tmp);
5450 return result;
5451 }
5452
5453 /*
5454 * match: c0 ? a : (c1 ? b : a)
5455 * result: ~c0 AND c1 ? b : a
5456 */
5457 static inline bool
applies_cond_else_merge_else_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5458 applies_cond_else_merge_else_cond (Btor *btor,
5459 BtorNode *e0,
5460 BtorNode *e1,
5461 BtorNode *e2)
5462 {
5463 (void) e0;
5464 BtorNode *real_e2;
5465 real_e2 = btor_node_real_addr (e2);
5466 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_cond (real_e2)
5467 && btor_node_cond_invert (e2, real_e2->e[2]) == e1;
5468 }
5469
5470 static inline BtorNode *
apply_cond_else_merge_else_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5471 apply_cond_else_merge_else_cond (Btor *btor,
5472 BtorNode *e0,
5473 BtorNode *e1,
5474 BtorNode *e2)
5475 {
5476 assert (applies_cond_else_merge_else_cond (btor, e0, e1, e2));
5477
5478 BtorNode *result, *tmp, *e20, *e21, *real_e2;
5479
5480 real_e2 = btor_node_real_addr (e2);
5481 e20 = real_e2->e[0];
5482 e21 = btor_node_cond_invert (e2, real_e2->e[1]);
5483 BTOR_INC_REC_RW_CALL (btor);
5484 tmp = rewrite_and_exp (btor, btor_node_invert (e0), e20);
5485 result = rewrite_cond_exp (btor, tmp, e21, e1);
5486 BTOR_DEC_REC_RW_CALL (btor);
5487 btor_node_release (btor, tmp);
5488 return result;
5489 }
5490
5491 /*
5492 * match: c ? a : b, where len(a) = 1
5493 * result: (~c OR a) AND (c OR b)
5494 */
5495 static inline bool
applies_bool_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5496 applies_bool_cond (Btor *btor, BtorNode *e0, BtorNode *e1, BtorNode *e2)
5497 {
5498 (void) e0;
5499 (void) e2;
5500 return btor->rec_rw_calls < BTOR_REC_RW_BOUND
5501 && btor_node_bv_get_width (btor, e1) == 1;
5502 }
5503
5504 static inline BtorNode *
apply_bool_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5505 apply_bool_cond (Btor *btor, BtorNode *e0, BtorNode *e1, BtorNode *e2)
5506 {
5507 assert (applies_bool_cond (btor, e0, e1, e2));
5508
5509 BtorNode *tmp1, *tmp2, *result;
5510
5511 BTOR_INC_REC_RW_CALL (btor);
5512 tmp1 = btor_exp_bv_or (btor, btor_node_invert (e0), e1);
5513 tmp2 = btor_exp_bv_or (btor, e0, e2);
5514 result = rewrite_and_exp (btor, tmp1, tmp2);
5515 BTOR_DEC_REC_RW_CALL (btor);
5516 btor_node_release (btor, tmp1);
5517 btor_node_release (btor, tmp2);
5518 return result;
5519 }
5520
5521 /*
5522 * match: c ? (a + 1) : a
5523 * match: c ? (1 + a) : a
5524 * result: a + 0::c
5525 */
5526 static inline bool
applies_add_if_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5527 applies_add_if_cond (Btor *btor, BtorNode *e0, BtorNode *e1, BtorNode *e2)
5528 {
5529 (void) e0;
5530 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && !btor_node_is_inverted (e1)
5531 && btor_node_is_bv_add (e1)
5532 && ((e1->e[0] == e2 && btor_node_is_bv_const_one (btor, e1->e[1]))
5533 || (e1->e[1] == e2 && btor_node_is_bv_const_one (btor, e1->e[0])));
5534 }
5535
5536 static inline BtorNode *
apply_add_if_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5537 apply_add_if_cond (Btor *btor, BtorNode *e0, BtorNode *e1, BtorNode *e2)
5538 {
5539 assert (applies_add_if_cond (btor, e0, e1, e2));
5540
5541 BtorNode *result, *tmp;
5542
5543 BTOR_INC_REC_RW_CALL (btor);
5544 tmp = btor_exp_bv_uext (btor, e0, btor_node_bv_get_width (btor, e1) - 1);
5545 result = rewrite_add_exp (btor, e2, tmp);
5546 BTOR_DEC_REC_RW_CALL (btor);
5547 btor_node_release (btor, tmp);
5548 return result;
5549 }
5550
5551 /*
5552 * match: c ? a : (a + 1)
5553 * match: c ? a : (1 + a)
5554 * result: a + 0::c
5555 */
5556 static inline bool
applies_add_else_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5557 applies_add_else_cond (Btor *btor, BtorNode *e0, BtorNode *e1, BtorNode *e2)
5558 {
5559 (void) e0;
5560 return btor->rec_rw_calls < BTOR_REC_RW_BOUND && !btor_node_is_inverted (e2)
5561 && btor_node_is_bv_add (e2)
5562 && ((e2->e[0] == e1 && btor_node_is_bv_const_one (btor, e2->e[1]))
5563 || (e2->e[1] == e1 && btor_node_is_bv_const_one (btor, e2->e[0])));
5564 }
5565
5566 static inline BtorNode *
apply_add_else_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5567 apply_add_else_cond (Btor *btor, BtorNode *e0, BtorNode *e1, BtorNode *e2)
5568 {
5569 assert (applies_add_else_cond (btor, e0, e1, e2));
5570 (void) e2;
5571
5572 BtorNode *result, *tmp;
5573
5574 BTOR_INC_REC_RW_CALL (btor);
5575 tmp = btor_exp_bv_uext (
5576 btor, btor_node_invert (e0), btor_node_bv_get_width (btor, e1) - 1);
5577 result = rewrite_add_exp (btor, e1, tmp);
5578 BTOR_DEC_REC_RW_CALL (btor);
5579 btor_node_release (btor, tmp);
5580 return result;
5581 }
5582
5583 /*
5584 * match: c ? (a::b) : (a::d)
5585 * result: a :: (c ? b : d)
5586 *
5587 * match: c ? (a::b) : (d::b)
5588 * result: (c ? a : d) :: b
5589 */
5590 static inline bool
applies_concat_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5591 applies_concat_cond (Btor *btor, BtorNode *e0, BtorNode *e1, BtorNode *e2)
5592 {
5593 (void) e0;
5594 bool result;
5595 BtorNode *real_e1, *real_e2, *e10, *e11, *e20, *e21;
5596
5597 real_e1 = btor_node_real_addr (e1);
5598 real_e2 = btor_node_real_addr (e2);
5599 result = btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
5600 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
5601 && btor_node_is_bv_concat (real_e1)
5602 && btor_node_is_bv_concat (real_e2);
5603
5604 if (!result) return result;
5605
5606 e10 = btor_node_cond_invert (e1, real_e1->e[0]);
5607 e11 = btor_node_cond_invert (e1, real_e1->e[1]);
5608 e20 = btor_node_cond_invert (e2, real_e2->e[0]);
5609 e21 = btor_node_cond_invert (e2, real_e2->e[1]);
5610 return (e10 == e20 || e11 == e21);
5611 }
5612
5613 static inline BtorNode *
apply_concat_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5614 apply_concat_cond (Btor *btor, BtorNode *e0, BtorNode *e1, BtorNode *e2)
5615 {
5616 assert (applies_concat_cond (btor, e0, e1, e2));
5617
5618 BtorNode *result, *tmp1, *tmp2, *real_e1, *real_e2, *e10, *e11, *e20, *e21;
5619 real_e1 = btor_node_real_addr (e1);
5620 real_e2 = btor_node_real_addr (e2);
5621 e10 = btor_node_cond_invert (e1, real_e1->e[0]);
5622 e11 = btor_node_cond_invert (e1, real_e1->e[1]);
5623 e20 = btor_node_cond_invert (e2, real_e2->e[0]);
5624 e21 = btor_node_cond_invert (e2, real_e2->e[1]);
5625
5626 BTOR_INC_REC_RW_CALL (btor);
5627 tmp1 = rewrite_cond_exp (btor, e0, e10, e20);
5628 tmp2 = rewrite_cond_exp (btor, e0, e11, e21);
5629 result = rewrite_concat_exp (btor, tmp1, tmp2);
5630 BTOR_DEC_REC_RW_CALL (btor);
5631 btor_node_release (btor, tmp1);
5632 btor_node_release (btor, tmp2);
5633 return result;
5634 }
5635
5636 /*
5637 * match: c ? a OP b : a OP d, where OP is either +, &, *, /, %
5638 * result: a OP (c ? b : d)
5639 */
5640 static inline bool
applies_op_lhs_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5641 applies_op_lhs_cond (Btor *btor, BtorNode *e0, BtorNode *e1, BtorNode *e2)
5642 {
5643 (void) e0;
5644 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
5645 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
5646 && !btor_node_is_inverted (e1) && !btor_node_is_inverted (e2)
5647 && e1->kind == e2->kind
5648 && (btor_node_is_bv_add (e1) || btor_node_is_bv_and (e1)
5649 || btor_node_is_bv_mul (e1) || btor_node_is_bv_udiv (e1)
5650 || btor_node_is_bv_urem (e1))
5651 && e1->e[0] == e2->e[0];
5652 }
5653
5654 static inline BtorNode *
apply_op_lhs_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5655 apply_op_lhs_cond (Btor *btor, BtorNode *e0, BtorNode *e1, BtorNode *e2)
5656 {
5657 assert (applies_op_lhs_cond (btor, e0, e1, e2));
5658
5659 BtorNode *result, *tmp;
5660
5661 BTOR_INC_REC_RW_CALL (btor);
5662 tmp = rewrite_cond_exp (btor, e0, e1->e[1], e2->e[1]);
5663 result = btor_rewrite_binary_exp (btor, e1->kind, e1->e[0], tmp);
5664 BTOR_DEC_REC_RW_CALL (btor);
5665 btor_node_release (btor, tmp);
5666 return result;
5667 }
5668
5669 /*
5670 * match: c ? a OP b : d OP b, where OP is either +, &, *, /, %
5671 * result: (c ? a : d) OP b
5672 */
5673 static inline bool
applies_op_rhs_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5674 applies_op_rhs_cond (Btor *btor, BtorNode *e0, BtorNode *e1, BtorNode *e2)
5675 {
5676 (void) e0;
5677 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
5678 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
5679 && !btor_node_is_inverted (e1) && !btor_node_is_inverted (e2)
5680 && e1->kind == e2->kind
5681 && (btor_node_is_bv_add (e1) || btor_node_is_bv_and (e1)
5682 || btor_node_is_bv_mul (e1) || btor_node_is_bv_udiv (e1)
5683 || btor_node_is_bv_urem (e1))
5684 && e1->e[1] == e2->e[1];
5685 }
5686
5687 static inline BtorNode *
apply_op_rhs_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5688 apply_op_rhs_cond (Btor *btor, BtorNode *e0, BtorNode *e1, BtorNode *e2)
5689 {
5690 assert (applies_op_rhs_cond (btor, e0, e1, e2));
5691
5692 BtorNode *result, *tmp;
5693
5694 BTOR_INC_REC_RW_CALL (btor);
5695 tmp = rewrite_cond_exp (btor, e0, e1->e[0], e2->e[0]);
5696 result = btor_rewrite_binary_exp (btor, e1->kind, tmp, e1->e[1]);
5697 BTOR_DEC_REC_RW_CALL (btor);
5698 btor_node_release (btor, tmp);
5699 return result;
5700 }
5701
5702 /*
5703 * match: c ? a OP b : d OP a, where OP is either +, &, *
5704 * result: a OP (c ? b : d)
5705 */
5706 static inline bool
applies_comm_op_1_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5707 applies_comm_op_1_cond (Btor *btor, BtorNode *e0, BtorNode *e1, BtorNode *e2)
5708 {
5709 (void) e0;
5710 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
5711 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
5712 && !btor_node_is_inverted (e1) && !btor_node_is_inverted (e2)
5713 && e1->kind == e2->kind
5714 && (btor_node_is_bv_add (e1) || btor_node_is_bv_and (e1)
5715 || btor_node_is_bv_mul (e1))
5716 && e1->e[0] == e2->e[1];
5717 }
5718
5719 static inline BtorNode *
apply_comm_op_1_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5720 apply_comm_op_1_cond (Btor *btor, BtorNode *e0, BtorNode *e1, BtorNode *e2)
5721 {
5722 assert (applies_comm_op_1_cond (btor, e0, e1, e2));
5723
5724 BtorNode *result, *tmp;
5725
5726 BTOR_INC_REC_RW_CALL (btor);
5727 tmp = rewrite_cond_exp (btor, e0, e1->e[1], e2->e[0]);
5728 result = btor_rewrite_binary_exp (btor, e1->kind, e1->e[0], tmp);
5729 BTOR_DEC_REC_RW_CALL (btor);
5730 btor_node_release (btor, tmp);
5731 return result;
5732 }
5733
5734 /*
5735 * match: c ? a OP b : b OP d, where OP is either +, &, *
5736 * result: b OP (c ? a : d)
5737 */
5738 static inline bool
applies_comm_op_2_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5739 applies_comm_op_2_cond (Btor *btor, BtorNode *e0, BtorNode *e1, BtorNode *e2)
5740 {
5741 (void) e0;
5742 return btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
5743 && btor->rec_rw_calls < BTOR_REC_RW_BOUND
5744 && !btor_node_is_inverted (e1) && !btor_node_is_inverted (e2)
5745 && e1->kind == e2->kind
5746 && (btor_node_is_bv_add (e1) || btor_node_is_bv_and (e1)
5747 || btor_node_is_bv_mul (e1))
5748 && e1->e[1] == e2->e[0];
5749 }
5750
5751 static inline BtorNode *
apply_comm_op_2_cond(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)5752 apply_comm_op_2_cond (Btor *btor, BtorNode *e0, BtorNode *e1, BtorNode *e2)
5753 {
5754 assert (applies_comm_op_2_cond (btor, e0, e1, e2));
5755
5756 BtorNode *result, *tmp;
5757
5758 BTOR_INC_REC_RW_CALL (btor);
5759 tmp = rewrite_cond_exp (btor, e0, e1->e[0], e2->e[1]);
5760 result = btor_rewrite_binary_exp (btor, e1->kind, e1->e[1], tmp);
5761 BTOR_DEC_REC_RW_CALL (btor);
5762 btor_node_release (btor, tmp);
5763 return result;
5764 }
5765
5766 #if 0
5767 /*
5768 * match:
5769 * result:
5770 */
5771 static inline bool
5772 applies_cond (Btor * btor, BtorNode * e0, BtorNode * e1, BtorNode * e2)
5773 {
5774 }
5775
5776 static inline BtorNode *
5777 apply_cond (Btor * btor, BtorNode * e0, BtorNode * e1, BtorNode * e2)
5778 {
5779 assert (applies_cond (btor, e0, e1, e2));
5780
5781 }
5782 #endif
5783
5784 /* -------------------------------------------------------------------------- */
5785 /* normalizers */
5786 /* -------------------------------------------------------------------------- */
5787
5788 static BtorNode *
mk_norm_node_from_hash_table(Btor * btor,BtorNodeKind kind,BtorPtrHashTable * nodes)5789 mk_norm_node_from_hash_table (Btor *btor,
5790 BtorNodeKind kind,
5791 BtorPtrHashTable *nodes)
5792 {
5793 assert (nodes->count > 0);
5794
5795 size_t i;
5796 BtorNode *cur, *tmp, *result;
5797 BtorNodePtrStack stack;
5798 BtorPtrHashTableIterator it;
5799 BtorHashTableData *d;
5800
5801 BTOR_INIT_STACK (btor->mm, stack);
5802 btor_iter_hashptr_init (&it, nodes);
5803 while (btor_iter_hashptr_has_next (&it))
5804 {
5805 cur = it.cur;
5806 d = btor_iter_hashptr_next_data (&it);
5807 for (i = 0; i < (size_t) d->as_int; i++) BTOR_PUSH_STACK (stack, cur);
5808 }
5809
5810 qsort (
5811 stack.start, BTOR_COUNT_STACK (stack), sizeof (BtorNode *), cmp_node_id);
5812
5813 assert (!BTOR_EMPTY_STACK (stack));
5814 result = btor_node_copy (btor, BTOR_PEEK_STACK (stack, 0));
5815 for (i = 1; i < BTOR_COUNT_STACK (stack); i++)
5816 {
5817 cur = BTOR_PEEK_STACK (stack, i);
5818 tmp = btor_rewrite_binary_exp (btor, kind, result, cur);
5819 btor_node_release (btor, result);
5820 result = tmp;
5821 }
5822 BTOR_RELEASE_STACK (stack);
5823 return result;
5824 }
5825
5826 static void
normalize_bin_comm_ass_exp(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode ** e0_norm,BtorNode ** e1_norm)5827 normalize_bin_comm_ass_exp (Btor *btor,
5828 BtorNode *e0,
5829 BtorNode *e1,
5830 BtorNode **e0_norm,
5831 BtorNode **e1_norm)
5832 {
5833 assert (btor);
5834 assert (btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2);
5835 assert (e0);
5836 assert (e1);
5837 assert (!btor_node_real_addr (e0)->simplified);
5838 assert (!btor_node_real_addr (e1)->simplified);
5839 assert (e0_norm);
5840 assert (e1_norm);
5841 assert (!btor_node_is_inverted (e0));
5842 assert (!btor_node_is_inverted (e1));
5843 assert (btor_node_is_bv_add (e0) || btor_node_is_bv_and (e0)
5844 || btor_node_is_bv_mul (e0));
5845 assert (e0->kind == e1->kind);
5846
5847 BtorNodeKind kind;
5848 BtorNode *cur, *common;
5849 BtorNodePtrStack stack;
5850 BtorMemMgr *mm;
5851 BtorPtrHashTable *left, *right, *comm;
5852 BtorPtrHashBucket *b;
5853 BtorIntHashTable *cache;
5854 BtorHashTableData *d;
5855 bool normalize_all = true, need_restart = false;
5856
5857 mm = btor->mm;
5858 kind = e0->kind;
5859
5860 RESTART_NORMALIZE:
5861 left = btor_hashptr_table_new (mm,
5862 (BtorHashPtr) btor_node_hash_by_id,
5863 (BtorCmpPtr) btor_node_compare_by_id);
5864 right = btor_hashptr_table_new (mm,
5865 (BtorHashPtr) btor_node_hash_by_id,
5866 (BtorCmpPtr) btor_node_compare_by_id);
5867 comm = btor_hashptr_table_new (mm,
5868 (BtorHashPtr) btor_node_hash_by_id,
5869 (BtorCmpPtr) btor_node_compare_by_id);
5870 cache = btor_hashint_map_new (mm);
5871
5872 if (!btor_opt_get (btor, BTOR_OPT_NORMALIZE))
5873 goto RETURN_NO_RESULT;
5874
5875 /* We first try to normalize all nodes, i.e., we do a tree traversal on e0
5876 * and e1. If we encounter a node more than 32 times, we restart and do a
5877 * DAG traversal. The 'need_restart' flag indicates whether we actually need
5878 * to do a DAG traversal after the first pass, which is the case if a node
5879 * was visited more than once. */
5880 BTOR_INIT_STACK (mm, stack);
5881 BTOR_PUSH_STACK (stack, e0);
5882 do
5883 {
5884 cur = BTOR_POP_STACK (stack);
5885 if (!btor_node_is_inverted (cur) && cur->kind == kind)
5886 {
5887 d = btor_hashint_map_get (cache, cur->id);
5888 if (d)
5889 {
5890 if (normalize_all)
5891 {
5892 need_restart = true;
5893 }
5894 else
5895 {
5896 BTOR_RELEASE_STACK (stack);
5897 goto RETURN_NO_RESULT;
5898 }
5899 }
5900 else
5901 {
5902 d = btor_hashint_map_add (cache, cur->id);
5903 }
5904 d->as_int += 1;
5905 if (d->as_int > 32)
5906 {
5907 BTOR_RELEASE_STACK (stack);
5908 goto RESTART_NORMALIZE_ALL;
5909 }
5910 BTOR_PUSH_STACK (stack, cur->e[1]);
5911 BTOR_PUSH_STACK (stack, cur->e[0]);
5912 }
5913 else
5914 {
5915 if (!(b = btor_hashptr_table_get (left, cur)))
5916 b = btor_hashptr_table_add (left, cur);
5917 b->data.as_int++;
5918 }
5919 } while (!BTOR_EMPTY_STACK (stack));
5920 btor_hashint_map_delete (cache);
5921 cache = btor_hashint_map_new (mm);
5922
5923 BTOR_PUSH_STACK (stack, e1);
5924 do
5925 {
5926 cur = BTOR_POP_STACK (stack);
5927 if (!btor_node_is_inverted (cur) && cur->kind == kind)
5928 {
5929 d = btor_hashint_map_get (cache, cur->id);
5930 if (d)
5931 {
5932 if (normalize_all)
5933 {
5934 need_restart = true;
5935 }
5936 else
5937 {
5938 BTOR_RELEASE_STACK (stack);
5939 goto RETURN_NO_RESULT;
5940 }
5941 }
5942 else
5943 {
5944 d = btor_hashint_map_add (cache, cur->id);
5945 }
5946 d->as_int += 1;
5947 if (d->as_int > 32)
5948 {
5949 BTOR_RELEASE_STACK (stack);
5950 goto RESTART_NORMALIZE_ALL;
5951 }
5952 BTOR_PUSH_STACK (stack, cur->e[1]);
5953 BTOR_PUSH_STACK (stack, cur->e[0]);
5954 }
5955 else
5956 {
5957 b = btor_hashptr_table_get (left, cur);
5958 if (b)
5959 {
5960 /* we found one common operand */
5961
5962 /* remove operand from left */
5963 if (b->data.as_int > 1)
5964 b->data.as_int--;
5965 else
5966 {
5967 assert (b->data.as_int == 1);
5968 btor_hashptr_table_remove (left, cur, 0, 0);
5969 }
5970
5971 /* insert into common table */
5972 if (!(b = btor_hashptr_table_get (comm, cur)))
5973 b = btor_hashptr_table_add (comm, cur);
5974 b->data.as_int++;
5975 }
5976 else
5977 {
5978 /* operand is not common */
5979 if (!(b = btor_hashptr_table_get (right, cur)))
5980 b = btor_hashptr_table_add (right, cur);
5981 b->data.as_int++;
5982 }
5983 }
5984 } while (!BTOR_EMPTY_STACK (stack));
5985 BTOR_RELEASE_STACK (stack);
5986
5987 /* no operand or only one operand in common? leave everything as it is */
5988 if (comm->count < 2u)
5989 {
5990 RETURN_NO_RESULT:
5991 /* clean up */
5992 btor_hashptr_table_delete (left);
5993 btor_hashptr_table_delete (right);
5994 btor_hashptr_table_delete (comm);
5995 btor_hashint_map_delete (cache);
5996 *e0_norm = btor_node_copy (btor, e0);
5997 *e1_norm = btor_node_copy (btor, e1);
5998 return;
5999 }
6000
6001 if (normalize_all && need_restart && (left->count > 0 || right->count > 0))
6002 {
6003 RESTART_NORMALIZE_ALL:
6004 normalize_all = false;
6005 btor_hashptr_table_delete (left);
6006 btor_hashptr_table_delete (right);
6007 btor_hashptr_table_delete (comm);
6008 btor_hashint_map_delete (cache);
6009 goto RESTART_NORMALIZE;
6010 }
6011
6012 if (kind == BTOR_BV_AND_NODE)
6013 btor->stats.ands_normalized++;
6014 else if (kind == BTOR_BV_ADD_NODE)
6015 btor->stats.adds_normalized++;
6016 else
6017 {
6018 assert (kind == BTOR_BV_MUL_NODE);
6019 btor->stats.muls_normalized++;
6020 }
6021
6022 assert (comm->count >= 2u);
6023
6024 /* normalize common nodes */
6025 common = mk_norm_node_from_hash_table (btor, kind, comm);
6026
6027 if (!(b = btor_hashptr_table_get (left, common)))
6028 b = btor_hashptr_table_add (left, common);
6029 b->data.as_int += 1;
6030 *e0_norm = mk_norm_node_from_hash_table (btor, kind, left);
6031
6032 if (!(b = btor_hashptr_table_get (right, common)))
6033 b = btor_hashptr_table_add (right, common);
6034 b->data.as_int += 1;
6035 *e1_norm = mk_norm_node_from_hash_table (btor, kind, right);
6036
6037 /* clean up */
6038 btor_node_release (btor, common);
6039 btor_hashptr_table_delete (left);
6040 btor_hashptr_table_delete (right);
6041 btor_hashptr_table_delete (comm);
6042 btor_hashint_map_delete (cache);
6043 }
6044
6045 static BtorNode *
find_top_op(Btor * btor,BtorNode * e)6046 find_top_op (Btor *btor, BtorNode *e)
6047 {
6048 BtorNode *res;
6049 e = btor_node_real_addr (e);
6050 if (btor_node_is_bv_add (e) || btor_node_is_bv_mul (e)
6051 || btor_node_is_bv_and (e))
6052 return e;
6053 if (btor->rec_rw_calls >= BTOR_REC_RW_BOUND) return 0;
6054
6055 res = 0;
6056 BTOR_INC_REC_RW_CALL (btor);
6057 if (btor_node_is_bv_slice (e) || btor_node_is_bv_sll (e)
6058 || btor_node_is_bv_srl (e))
6059 {
6060 res = find_top_op (btor, e->e[0]);
6061 }
6062 BTOR_DEC_REC_RW_CALL (btor);
6063
6064 // TODO handle more operators ... (here first)
6065
6066 return res;
6067 }
6068
6069 static BtorNode *
rebuild_top_op(Btor * btor,BtorNode * e,BtorNode * c,BtorNode * r)6070 rebuild_top_op (Btor *btor, BtorNode *e, BtorNode *c, BtorNode *r)
6071 {
6072 assert (!btor_node_is_inverted (c));
6073
6074 BtorNode *res, *tmp;
6075
6076 if (btor_node_is_inverted (e))
6077 {
6078 tmp = rebuild_top_op (btor, btor_node_real_addr (e), c, r);
6079 res = btor_node_invert (tmp);
6080 }
6081 else if (e == c)
6082 res = btor_node_copy (btor, r);
6083 else
6084 {
6085 // TODO handle more operators ... (then here)
6086 //
6087 res = 0;
6088 tmp = rebuild_top_op (btor, e->e[0], c, r);
6089 if (btor_node_is_bv_slice (e))
6090 {
6091 res = rewrite_slice_exp (btor,
6092 tmp,
6093 btor_node_bv_slice_get_upper (e),
6094 btor_node_bv_slice_get_lower (e));
6095 }
6096 else if (btor_node_is_bv_sll (e))
6097 {
6098 res = rewrite_sll_exp (btor, tmp, e->e[1]);
6099 }
6100 else if (btor_node_is_bv_srl (e))
6101 {
6102 res = rewrite_srl_exp (btor, tmp, e->e[1]);
6103 }
6104 btor_node_release (btor, tmp);
6105 assert (res);
6106 }
6107 return res;
6108 }
6109
6110 static void
normalize_adds_muls_ands(Btor * btor,BtorNode ** left,BtorNode ** right)6111 normalize_adds_muls_ands (Btor *btor, BtorNode **left, BtorNode **right)
6112 {
6113 BtorNode *e0, *e1, *real_e0, *real_e1, *e0_norm, *e1_norm;
6114
6115 e0 = *left;
6116 e1 = *right;
6117 real_e0 = btor_node_real_addr (e0);
6118 real_e1 = btor_node_real_addr (e1);
6119
6120 if (btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
6121 && real_e0->kind == real_e1->kind
6122 && ((btor_node_is_bv_add (real_e0)
6123 && btor_opt_get (btor, BTOR_OPT_NORMALIZE_ADD))
6124 || btor_node_is_bv_mul (real_e0) || btor_node_is_bv_and (real_e0)))
6125 {
6126 normalize_bin_comm_ass_exp (btor, real_e0, real_e1, &e0_norm, &e1_norm);
6127 e0_norm = btor_node_cond_invert (e0, e0_norm);
6128 e1_norm = btor_node_cond_invert (e1, e1_norm);
6129 btor_node_release (btor, e0);
6130 btor_node_release (btor, e1);
6131 *left = e0_norm;
6132 *right = e1_norm;
6133 }
6134 }
6135
6136 static inline void
normalize_eq(Btor * btor,BtorNode ** left,BtorNode ** right)6137 normalize_eq (Btor *btor, BtorNode **left, BtorNode **right)
6138 {
6139 BtorNode *e0, *e1, *tmp1, *tmp2, *op0, *op1, *c0, *c1, *n0, *n1, *add;
6140
6141 e0 = *left;
6142 e1 = *right;
6143
6144 /* ~e0 == ~e1 is the same as e0 == e1 */
6145 if (btor_node_is_inverted (e0) && btor_node_is_inverted (e1))
6146 {
6147 e0 = btor_node_invert (e0);
6148 e1 = btor_node_invert (e1);
6149 }
6150
6151 if (btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 1)
6152 {
6153 if (btor_node_is_inverted (e0) && btor_node_is_bv_var (e0))
6154 {
6155 e0 = btor_node_invert (e0);
6156 e1 = btor_node_invert (e1);
6157 }
6158 }
6159
6160 // TODO(ma): this is probably redundant
6161 /* normalize adds and muls on demand */
6162 normalize_adds_muls_ands (btor, &e0, &e1);
6163
6164 if (btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
6165 && (op0 = find_top_op (btor, e0)) && (op1 = find_top_op (btor, e1))
6166 && btor_node_real_addr(op0)->kind == btor_node_real_addr(op1)->kind
6167 && btor_node_get_sort_id (op0) == btor_node_get_sort_id (op1))
6168 {
6169 if (!btor_node_is_bv_and(op0) || btor_opt_get (btor, BTOR_OPT_NORMALIZE_ADD))
6170 {
6171 normalize_bin_comm_ass_exp (btor, op0, op1, &n0, &n1);
6172 tmp1 = rebuild_top_op (btor, e0, op0, n0);
6173 tmp2 = rebuild_top_op (btor, e1, op1, n1);
6174 btor_node_release (btor, n0);
6175 btor_node_release (btor, n1);
6176 btor_node_release (btor, e0);
6177 btor_node_release (btor, e1);
6178 e0 = tmp1;
6179 e1 = tmp2;
6180 }
6181 }
6182
6183 // TODO (ma): check if this is also applicable to mul nodes and maybe others?
6184 /*
6185 * match: c0 = c1 + b
6186 * result: c0 - c1 = b
6187 *
6188 * also handles negated adds:
6189 *
6190 * c0 = ~(c1 + b) -> ~c0 = c1 + b
6191 */
6192 if (btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
6193 && ((btor_node_is_bv_add (e0) && btor_node_is_bv_const (e1))
6194 || (btor_node_is_bv_add (e1) && btor_node_is_bv_const (e0))))
6195 {
6196 if (btor_node_is_bv_const (e0) && btor_node_is_bv_add (e1))
6197 {
6198 c0 = e0;
6199 add = e1;
6200 }
6201 else
6202 {
6203 assert (btor_node_is_bv_add (e0));
6204 assert (btor_node_is_bv_const (e1));
6205 c0 = e1;
6206 add = e0;
6207 }
6208
6209 /* c0 = ~(c1 + b) -> ~c0 = c1 + b */
6210 c0 = btor_node_cond_invert (add, c0);
6211 add = btor_node_cond_invert (add, add);
6212
6213 c1 = tmp1 = 0;
6214 assert (btor_node_is_regular (add));
6215 if (btor_node_is_bv_const (add->e[0]))
6216 {
6217 c1 = add->e[0];
6218 tmp1 = add->e[1];
6219 }
6220 else if (btor_node_is_bv_const (add->e[1]))
6221 {
6222 c1 = add->e[1];
6223 tmp1 = add->e[0];
6224 }
6225
6226 if (tmp1)
6227 {
6228 assert (c0);
6229 assert (c1);
6230 n0 = btor_node_copy (btor, tmp1);
6231 n1 = btor_exp_bv_sub (btor, c0, c1);
6232 btor_node_release (btor, e0);
6233 btor_node_release (btor, e1);
6234 e0 = n0;
6235 e1 = n1;
6236 }
6237 }
6238
6239 /* ~e0 == ~e1 is the same as e0 == e1 */
6240 if (btor_node_is_inverted (e0) && btor_node_is_inverted (e1))
6241 {
6242 e0 = btor_node_invert (e0);
6243 e1 = btor_node_invert (e1);
6244 }
6245
6246 *left = e0;
6247 *right = e1;
6248 }
6249
6250 static void
normalize_ult(Btor * btor,BtorNode ** left,BtorNode ** right)6251 normalize_ult (Btor *btor, BtorNode **left, BtorNode **right)
6252 {
6253 BtorNode *e0, *e1, *tmp;
6254
6255 e0 = *left;
6256 e1 = *right;
6257
6258 /* ~a < ~b is the same as b < a */
6259 if (btor_node_is_inverted (e0) && btor_node_is_inverted (e1))
6260 {
6261 tmp = btor_node_real_addr (e1);
6262 e1 = btor_node_real_addr (e0);
6263 e0 = tmp;
6264 }
6265
6266 /* normalize adds and muls on demand */
6267 normalize_adds_muls_ands (btor, &e0, &e1);
6268
6269 *left = e0;
6270 *right = e1;
6271 }
6272
6273 static void
normalize_and(Btor * btor,BtorNode ** left,BtorNode ** right)6274 normalize_and (Btor *btor, BtorNode **left, BtorNode **right)
6275 {
6276 BtorNode *e0, *e1;
6277
6278 e0 = *left;
6279 e1 = *right;
6280
6281 /* normalize adds and muls on demand */
6282 if (btor_node_is_bv_mul (e0) || btor_node_is_bv_add (e1))
6283 normalize_adds_muls_ands (btor, &e0, &e1);
6284
6285 *left = e0;
6286 *right = e1;
6287 }
6288
6289 static void
normalize_add(Btor * btor,BtorNode ** left,BtorNode ** right)6290 normalize_add (Btor *btor, BtorNode **left, BtorNode **right)
6291 {
6292 BtorNode *e0, *e1;
6293
6294 e0 = *left;
6295 e1 = *right;
6296
6297 /* normalize muls and ands on demand */
6298 if (btor_node_is_bv_mul (e0) || btor_node_is_bv_and (e0))
6299 normalize_adds_muls_ands (btor, &e0, &e1);
6300
6301 *left = e0;
6302 *right = e1;
6303 }
6304
6305 static void
normalize_mul(Btor * btor,BtorNode ** left,BtorNode ** right)6306 normalize_mul (Btor *btor, BtorNode **left, BtorNode **right)
6307 {
6308 BtorNode *e0, *e1;
6309
6310 e0 = *left;
6311 e1 = *right;
6312
6313 /* normalize adds and ands on demand */
6314 if (btor_node_is_bv_add (e0) || btor_node_is_bv_and (e0))
6315 normalize_adds_muls_ands (btor, &e0, &e1);
6316
6317 *left = e0;
6318 *right = e1;
6319 }
6320
6321 static void
normalize_udiv(Btor * btor,BtorNode ** left,BtorNode ** right)6322 normalize_udiv (Btor *btor, BtorNode **left, BtorNode **right)
6323 {
6324 BtorNode *e0, *e1;
6325
6326 e0 = *left;
6327 e1 = *right;
6328
6329 /* normalize adds and muls on demand */
6330 normalize_adds_muls_ands (btor, &e0, &e1);
6331
6332 *left = e0;
6333 *right = e1;
6334 }
6335
6336 static void
normalize_urem(Btor * btor,BtorNode ** left,BtorNode ** right)6337 normalize_urem (Btor *btor, BtorNode **left, BtorNode **right)
6338 {
6339 BtorNode *e0, *e1;
6340
6341 e0 = *left;
6342 e1 = *right;
6343
6344 /* normalize adds and muls on demand */
6345 normalize_adds_muls_ands (btor, &e0, &e1);
6346
6347 *left = e0;
6348 *right = e1;
6349 }
6350
6351 static void
normalize_concat(Btor * btor,BtorNode ** left,BtorNode ** right)6352 normalize_concat (Btor *btor, BtorNode **left, BtorNode **right)
6353 {
6354 uint32_t i;
6355 BtorMemMgr *mm;
6356 BtorNode *e0, *e1, *cur, *real_cur, *tmp, *concat;
6357 BtorNodePtrStack stack, po_stack;
6358
6359 mm = btor->mm;
6360 e0 = *left;
6361 e1 = *right;
6362
6363 /* normalize concats --> left-associative */
6364 if (btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 2
6365 && btor->rec_rw_calls < BTOR_REC_RW_BOUND && btor_node_is_bv_concat (e1))
6366 {
6367 BTOR_INIT_STACK (mm, po_stack);
6368 BTOR_PUSH_STACK (po_stack, e0);
6369
6370 BTOR_INIT_STACK (mm, stack);
6371 BTOR_PUSH_STACK (stack, e1);
6372 do
6373 {
6374 cur = BTOR_POP_STACK (stack);
6375 real_cur = btor_node_real_addr (cur);
6376 if (btor_node_is_bv_concat (real_cur))
6377 {
6378 BTOR_PUSH_STACK (stack, btor_node_cond_invert (cur, real_cur->e[1]));
6379 BTOR_PUSH_STACK (stack, btor_node_cond_invert (cur, real_cur->e[0]));
6380 }
6381 else
6382 BTOR_PUSH_STACK (po_stack, cur);
6383 } while (!BTOR_EMPTY_STACK (stack));
6384
6385 BTOR_INC_REC_RW_CALL (btor);
6386 assert (BTOR_COUNT_STACK (po_stack) >= 3);
6387 cur = BTOR_PEEK_STACK (po_stack, 0);
6388 tmp = BTOR_PEEK_STACK (po_stack, 1);
6389 concat = rewrite_concat_exp (btor, cur, tmp);
6390
6391 for (i = 2; i < BTOR_COUNT_STACK (po_stack) - 1; i++)
6392 {
6393 cur = BTOR_PEEK_STACK (po_stack, i);
6394 assert (!btor_node_is_bv_concat (cur));
6395 tmp = rewrite_concat_exp (btor, concat, cur);
6396 btor_node_release (btor, concat);
6397 concat = tmp;
6398 }
6399 BTOR_DEC_REC_RW_CALL (btor);
6400
6401 btor_node_release (btor, e0);
6402 btor_node_release (btor, e1);
6403 e0 = concat;
6404 e1 = btor_node_copy (btor, BTOR_TOP_STACK (po_stack));
6405
6406 BTOR_RELEASE_STACK (stack);
6407 BTOR_RELEASE_STACK (po_stack);
6408 }
6409
6410 *left = e0;
6411 *right = e1;
6412 }
6413
6414 static inline void
normalize_cond(Btor * btor,BtorNode ** cond,BtorNode ** left,BtorNode ** right)6415 normalize_cond (Btor *btor, BtorNode **cond, BtorNode **left, BtorNode **right)
6416 {
6417 BtorNode *e0, *e1, *e2;
6418
6419 e0 = *cond;
6420 e1 = *left;
6421 e2 = *right;
6422
6423 /* normalization: ~e0 ? e1 : e2 is the same as e0 ? e2: e1 */
6424 if (btor_node_is_inverted (e0))
6425 {
6426 e0 = btor_node_invert (e0);
6427 BTOR_SWAP (BtorNode *, e1, e2);
6428 }
6429
6430 /* normalize adds and muls on demand */
6431 normalize_adds_muls_ands (btor, &e1, &e2);
6432
6433 *cond = e0;
6434 *left = e1;
6435 *right = e2;
6436 }
6437
6438 /* -------------------------------------------------------------------------- */
6439 /* term rewriting functions */
6440 /* -------------------------------------------------------------------------- */
6441
6442 static BtorNode *
rewrite_slice_exp(Btor * btor,BtorNode * e,uint32_t upper,uint32_t lower)6443 rewrite_slice_exp (Btor *btor, BtorNode *e, uint32_t upper, uint32_t lower)
6444 {
6445 BtorNode *result = 0;
6446
6447 e = btor_simplify_exp (btor, e);
6448 assert (btor_dbg_precond_slice_exp (btor, e, upper, lower));
6449
6450 result = check_rw_cache (
6451 btor, BTOR_BV_SLICE_NODE, btor_node_get_id (e), upper, lower);
6452
6453 if (!result)
6454 {
6455 ADD_RW_RULE (full_slice, e, upper, lower);
6456 ADD_RW_RULE (const_slice, e, upper, lower);
6457 ADD_RW_RULE (slice_slice, e, upper, lower);
6458 ADD_RW_RULE (concat_lower_slice, e, upper, lower);
6459 ADD_RW_RULE (concat_upper_slice, e, upper, lower);
6460 ADD_RW_RULE (concat_rec_upper_slice, e, upper, lower);
6461 ADD_RW_RULE (concat_rec_lower_slice, e, upper, lower);
6462 ADD_RW_RULE (concat_rec_slice, e, upper, lower);
6463 ADD_RW_RULE (and_slice, e, upper, lower);
6464 ADD_RW_RULE (bcond_slice, e, upper, lower);
6465 ADD_RW_RULE (zero_lower_slice, e, upper, lower);
6466
6467 assert (!result);
6468 if (!result)
6469 {
6470 result = btor_node_create_bv_slice (btor, e, upper, lower);
6471 }
6472 else
6473 {
6474 /* Note: The else branch is only active if we were able to use a rewrite
6475 * rule. */
6476 DONE:
6477 btor_rw_cache_add (btor->rw_cache,
6478 BTOR_BV_SLICE_NODE,
6479 btor_node_get_id (e),
6480 upper,
6481 lower,
6482 btor_node_get_id (result));
6483 }
6484 }
6485 assert (result);
6486 return result;
6487 }
6488
6489 static BtorNode *
rewrite_eq_exp(Btor * btor,BtorNode * e0,BtorNode * e1)6490 rewrite_eq_exp (Btor *btor, BtorNode *e0, BtorNode *e1)
6491 {
6492 bool swap_ops = false;
6493 BtorNode *result = 0;
6494 BtorNodeKind kind;
6495
6496 e0 = btor_simplify_exp (btor, e0);
6497 e1 = btor_simplify_exp (btor, e1);
6498 assert (btor_dbg_precond_eq_exp (btor, e0, e1));
6499 kind = btor_node_is_fun (e0) ? BTOR_FUN_EQ_NODE : BTOR_BV_EQ_NODE;
6500
6501 e0 = btor_node_copy (btor, e0);
6502 e1 = btor_node_copy (btor, e1);
6503 normalize_eq (btor, &e0, &e1);
6504
6505 SWAP_OPERANDS:
6506 result = check_rw_cache (
6507 btor, kind, btor_node_get_id (e0), btor_node_get_id (e1), 0);
6508
6509 if (!result)
6510 {
6511 if (!swap_ops)
6512 {
6513 ADD_RW_RULE (const_binary_exp, kind, e0, e1);
6514 /* We do not rewrite eq in the boolean case, as we cannot extract the
6515 * resulting XNOR on top level again and would therefore lose
6516 * substitutions.
6517 *
6518 * Additionally, we do not rewrite eq in the boolean case, as we rewrite
6519 * a != b to a = ~b and substitute.
6520 */
6521 ADD_RW_RULE (true_eq, e0, e1);
6522 ADD_RW_RULE (false_eq, e0, e1);
6523 ADD_RW_RULE (bcond_eq, e0, e1);
6524 ADD_RW_RULE (special_const_lhs_binary_exp, kind, e0, e1);
6525 ADD_RW_RULE (special_const_rhs_binary_exp, kind, e0, e1);
6526 }
6527 ADD_RW_RULE (add_left_eq, e0, e1);
6528 ADD_RW_RULE (add_right_eq, e0, e1);
6529 ADD_RW_RULE (add_add_1_eq, e0, e1);
6530 ADD_RW_RULE (add_add_2_eq, e0, e1);
6531 ADD_RW_RULE (add_add_3_eq, e0, e1);
6532 ADD_RW_RULE (add_add_4_eq, e0, e1);
6533 ADD_RW_RULE (sub_eq, e0, e1);
6534 ADD_RW_RULE (bcond_uneq_if_eq, e0, e1);
6535 ADD_RW_RULE (bcond_uneq_else_eq, e0, e1);
6536 ADD_RW_RULE (bcond_if_eq, e0, e1);
6537 ADD_RW_RULE (bcond_else_eq, e0, e1);
6538 ADD_RW_RULE (distrib_add_mul_eq, e0, e1);
6539 ADD_RW_RULE (concat_eq, e0, e1);
6540 #if 0
6541 ADD_RW_RULE (zero_eq_and_eq, e0, e1);
6542 #endif
6543
6544 assert (!result);
6545 /* no result so far, swap operands */
6546 if (!swap_ops)
6547 {
6548 BTOR_SWAP (BtorNode *, e0, e1);
6549 swap_ops = true;
6550 goto SWAP_OPERANDS;
6551 }
6552
6553 if (!result)
6554 {
6555 result = btor_node_create_eq (btor, e1, e0);
6556 }
6557 else
6558 {
6559 DONE:
6560 btor_rw_cache_add (btor->rw_cache,
6561 kind,
6562 btor_node_get_id (e0),
6563 btor_node_get_id (e1),
6564 0,
6565 btor_node_get_id (result));
6566 }
6567 }
6568
6569 btor_node_release (btor, e0);
6570 btor_node_release (btor, e1);
6571 assert (result);
6572 return result;
6573 }
6574
6575 static BtorNode *
rewrite_ult_exp(Btor * btor,BtorNode * e0,BtorNode * e1)6576 rewrite_ult_exp (Btor *btor, BtorNode *e0, BtorNode *e1)
6577 {
6578 BtorNode *result = 0;
6579
6580 e0 = btor_simplify_exp (btor, e0);
6581 e1 = btor_simplify_exp (btor, e1);
6582 assert (btor_dbg_precond_regular_binary_bv_exp (btor, e0, e1));
6583
6584 e0 = btor_node_copy (btor, e0);
6585 e1 = btor_node_copy (btor, e1);
6586 normalize_ult (btor, &e0, &e1);
6587
6588 result = check_rw_cache (
6589 btor, BTOR_BV_ULT_NODE, btor_node_get_id (e0), btor_node_get_id (e1), 0);
6590
6591 if (!result)
6592 {
6593 ADD_RW_RULE (const_binary_exp, BTOR_BV_ULT_NODE, e0, e1);
6594 ADD_RW_RULE (special_const_lhs_binary_exp, BTOR_BV_ULT_NODE, e0, e1);
6595 ADD_RW_RULE (special_const_rhs_binary_exp, BTOR_BV_ULT_NODE, e0, e1);
6596 ADD_RW_RULE (false_ult, e0, e1);
6597 ADD_RW_RULE (bool_ult, e0, e1);
6598 ADD_RW_RULE (concat_upper_ult, e0, e1);
6599 ADD_RW_RULE (concat_lower_ult, e0, e1);
6600 ADD_RW_RULE (bcond_ult, e0, e1);
6601
6602 assert (!result);
6603 if (!result)
6604 {
6605 result = btor_node_create_bv_ult (btor, e0, e1);
6606 }
6607 else
6608 {
6609 DONE:
6610 btor_rw_cache_add (btor->rw_cache,
6611 BTOR_BV_ULT_NODE,
6612 btor_node_get_id (e0),
6613 btor_node_get_id (e1),
6614 0,
6615 btor_node_get_id (result));
6616 }
6617 }
6618
6619 btor_node_release (btor, e0);
6620 btor_node_release (btor, e1);
6621 assert (result);
6622 return result;
6623 }
6624
6625 static BtorNode *
rewrite_and_exp(Btor * btor,BtorNode * e0,BtorNode * e1)6626 rewrite_and_exp (Btor *btor, BtorNode *e0, BtorNode *e1)
6627 {
6628 bool swap_ops = false;
6629 BtorNode *result = 0;
6630
6631 e0 = btor_simplify_exp (btor, e0);
6632 e1 = btor_simplify_exp (btor, e1);
6633 assert (btor_dbg_precond_regular_binary_bv_exp (btor, e0, e1));
6634
6635 e0 = btor_node_copy (btor, e0);
6636 e1 = btor_node_copy (btor, e1);
6637 normalize_and (btor, &e0, &e1);
6638
6639 SWAP_OPERANDS:
6640 result = check_rw_cache (
6641 btor, BTOR_BV_AND_NODE, btor_node_get_id (e0), btor_node_get_id (e1), 0);
6642
6643 if (!result)
6644 {
6645 if (!swap_ops)
6646 {
6647 ADD_RW_RULE (const_binary_exp, BTOR_BV_AND_NODE, e0, e1);
6648 ADD_RW_RULE (special_const_lhs_binary_exp, BTOR_BV_AND_NODE, e0, e1);
6649 ADD_RW_RULE (special_const_rhs_binary_exp, BTOR_BV_AND_NODE, e0, e1);
6650 ADD_RW_RULE (idem1_and, e0, e1);
6651 ADD_RW_RULE (contr1_and, e0, e1);
6652 ADD_RW_RULE (contr2_and, e0, e1);
6653 ADD_RW_RULE (idem2_and, e0, e1);
6654 ADD_RW_RULE (comm_and, e0, e1);
6655 ADD_RW_RULE (bool_xnor_and, e0, e1);
6656 ADD_RW_RULE (resol1_and, e0, e1);
6657 ADD_RW_RULE (resol2_and, e0, e1);
6658 ADD_RW_RULE (ult_false_and, e0, e1);
6659 ADD_RW_RULE (ult_and, e0, e1);
6660 ADD_RW_RULE (contr_rec_and, e0, e1);
6661 }
6662 ADD_RW_RULE (subsum1_and, e0, e1);
6663 ADD_RW_RULE (subst1_and, e0, e1);
6664 ADD_RW_RULE (subst2_and, e0, e1);
6665 ADD_RW_RULE (subsum2_and, e0, e1);
6666 ADD_RW_RULE (subst3_and, e0, e1);
6667 ADD_RW_RULE (subst4_and, e0, e1);
6668 ADD_RW_RULE (contr3_and, e0, e1);
6669 ADD_RW_RULE (idem3_and, e0, e1);
6670 ADD_RW_RULE (const1_and, e0, e1);
6671 ADD_RW_RULE (const2_and, e0, e1);
6672 ADD_RW_RULE (concat_and, e0, e1);
6673 // ADD_RW_RULE (push_ite_and, e0, e1);
6674
6675 assert (!result);
6676
6677 /* no result so far, swap operands */
6678 if (!swap_ops)
6679 {
6680 BTOR_SWAP (BtorNode *, e0, e1);
6681 swap_ops = true;
6682 goto SWAP_OPERANDS;
6683 }
6684
6685 if (!result)
6686 {
6687 result = btor_node_create_bv_and (btor, e1, e0);
6688 }
6689 else
6690 {
6691 DONE:
6692 btor_rw_cache_add (btor->rw_cache,
6693 BTOR_BV_AND_NODE,
6694 btor_node_get_id (e0),
6695 btor_node_get_id (e1),
6696 0,
6697 btor_node_get_id (result));
6698 }
6699 }
6700
6701 btor_node_release (btor, e0);
6702 btor_node_release (btor, e1);
6703 assert (result);
6704 return result;
6705 }
6706
6707 static BtorNode *
rewrite_add_exp(Btor * btor,BtorNode * e0,BtorNode * e1)6708 rewrite_add_exp (Btor *btor, BtorNode *e0, BtorNode *e1)
6709 {
6710 bool swap_ops = false;
6711 BtorNode *result = 0;
6712
6713 e0 = btor_simplify_exp (btor, e0);
6714 e1 = btor_simplify_exp (btor, e1);
6715 assert (btor_dbg_precond_regular_binary_bv_exp (btor, e0, e1));
6716
6717 e0 = btor_node_copy (btor, e0);
6718 e1 = btor_node_copy (btor, e1);
6719 normalize_add (btor, &e0, &e1);
6720
6721 SWAP_OPERANDS:
6722 result = check_rw_cache (
6723 btor, BTOR_BV_ADD_NODE, btor_node_get_id (e0), btor_node_get_id (e1), 0);
6724
6725 if (!result)
6726 {
6727 if (!swap_ops)
6728 {
6729 ADD_RW_RULE (const_binary_exp, BTOR_BV_ADD_NODE, e0, e1);
6730 ADD_RW_RULE (special_const_lhs_binary_exp, BTOR_BV_ADD_NODE, e0, e1);
6731 ADD_RW_RULE (special_const_rhs_binary_exp, BTOR_BV_ADD_NODE, e0, e1);
6732 ADD_RW_RULE (bool_add, e0, e1);
6733 ADD_RW_RULE (mult_add, e0, e1);
6734 ADD_RW_RULE (not_add, e0, e1);
6735 ADD_RW_RULE (bcond_add, e0, e1);
6736 ADD_RW_RULE (urem_add, e0, e1);
6737 }
6738 ADD_RW_RULE (neg_add, e0, e1);
6739 ADD_RW_RULE (zero_add, e0, e1);
6740 ADD_RW_RULE (const_lhs_add, e0, e1);
6741 ADD_RW_RULE (const_rhs_add, e0, e1);
6742 ADD_RW_RULE (const_neg_lhs_add, e0, e1);
6743 ADD_RW_RULE (const_neg_rhs_add, e0, e1);
6744 ADD_RW_RULE (push_ite_add, e0, e1);
6745 // TODO: enable when same bw shift is merged
6746 //ADD_RW_RULE (sll_add, e0, e1);
6747
6748 assert (!result);
6749
6750 /* no result so far, swap operands */
6751 if (!swap_ops)
6752 {
6753 BTOR_SWAP (BtorNode *, e0, e1);
6754 swap_ops = true;
6755 goto SWAP_OPERANDS;
6756 }
6757
6758 if (!result)
6759 {
6760 result = btor_node_create_bv_add (btor, e1, e0);
6761 }
6762 else
6763 {
6764 DONE:
6765 btor_rw_cache_add (btor->rw_cache,
6766 BTOR_BV_ADD_NODE,
6767 btor_node_get_id (e0),
6768 btor_node_get_id (e1),
6769 0,
6770 btor_node_get_id (result));
6771 }
6772 }
6773
6774 btor_node_release (btor, e0);
6775 btor_node_release (btor, e1);
6776 assert (result);
6777 return result;
6778 }
6779
6780 static BtorNode *
rewrite_mul_exp(Btor * btor,BtorNode * e0,BtorNode * e1)6781 rewrite_mul_exp (Btor *btor, BtorNode *e0, BtorNode *e1)
6782 {
6783 bool swap_ops = false;
6784 BtorNode *result = 0;
6785
6786 e0 = btor_simplify_exp (btor, e0);
6787 e1 = btor_simplify_exp (btor, e1);
6788 assert (btor_dbg_precond_regular_binary_bv_exp (btor, e0, e1));
6789
6790 e0 = btor_node_copy (btor, e0);
6791 e1 = btor_node_copy (btor, e1);
6792 normalize_mul (btor, &e0, &e1);
6793
6794 SWAP_OPERANDS:
6795 result = check_rw_cache (
6796 btor, BTOR_BV_MUL_NODE, btor_node_get_id (e0), btor_node_get_id (e1), 0);
6797
6798 if (!result)
6799 {
6800 if (!swap_ops)
6801 {
6802 ADD_RW_RULE (const_binary_exp, BTOR_BV_MUL_NODE, e0, e1);
6803 ADD_RW_RULE (special_const_lhs_binary_exp, BTOR_BV_MUL_NODE, e0, e1);
6804 ADD_RW_RULE (special_const_rhs_binary_exp, BTOR_BV_MUL_NODE, e0, e1);
6805 ADD_RW_RULE (bool_mul, e0, e1);
6806 #if 0
6807 // TODO (ma): this increases mul nodes in the general case, needs restriction
6808 ADD_RW_RULE (bcond_mul, e0, e1);
6809 #endif
6810 }
6811 ADD_RW_RULE (const_lhs_mul, e0, e1);
6812 ADD_RW_RULE (const_rhs_mul, e0, e1);
6813 ADD_RW_RULE (const_mul, e0, e1);
6814 ADD_RW_RULE (push_ite_mul, e0, e1);
6815 ADD_RW_RULE (sll_mul, e0, e1);
6816 ADD_RW_RULE (neg_mul, e0, e1);
6817
6818 assert (!result);
6819
6820 /* no result so far, swap operands */
6821 if (!swap_ops)
6822 {
6823 BTOR_SWAP (BtorNode *, e0, e1);
6824 swap_ops = true;
6825 goto SWAP_OPERANDS;
6826 }
6827
6828 if (!result)
6829 {
6830 result = btor_node_create_bv_mul (btor, e1, e0);
6831 }
6832 else
6833 {
6834 DONE:
6835 btor_rw_cache_add (btor->rw_cache,
6836 BTOR_BV_MUL_NODE,
6837 btor_node_get_id (e0),
6838 btor_node_get_id (e1),
6839 0,
6840 btor_node_get_id (result));
6841 }
6842 }
6843
6844 btor_node_release (btor, e0);
6845 btor_node_release (btor, e1);
6846 assert (result);
6847 return result;
6848 }
6849
6850 static BtorNode *
rewrite_udiv_exp(Btor * btor,BtorNode * e0,BtorNode * e1)6851 rewrite_udiv_exp (Btor *btor, BtorNode *e0, BtorNode *e1)
6852 {
6853 BtorNode *result = 0;
6854
6855 e0 = btor_simplify_exp (btor, e0);
6856 e1 = btor_simplify_exp (btor, e1);
6857 assert (btor_dbg_precond_regular_binary_bv_exp (btor, e0, e1));
6858
6859 e0 = btor_node_copy (btor, e0);
6860 e1 = btor_node_copy (btor, e1);
6861 normalize_udiv (btor, &e0, &e1);
6862
6863 result = check_rw_cache (
6864 btor, BTOR_BV_UDIV_NODE, btor_node_get_id (e0), btor_node_get_id (e1), 0);
6865
6866 if (!result)
6867 {
6868 // TODO what about non powers of 2, like divisor 3, which means that
6869 // some upper bits are 0 ...
6870
6871 ADD_RW_RULE (const_binary_exp, BTOR_BV_UDIV_NODE, e0, e1);
6872 ADD_RW_RULE (special_const_lhs_binary_exp, BTOR_BV_UDIV_NODE, e0, e1);
6873 ADD_RW_RULE (special_const_rhs_binary_exp, BTOR_BV_UDIV_NODE, e0, e1);
6874 ADD_RW_RULE (bool_udiv, e0, e1);
6875 ADD_RW_RULE (power2_udiv, e0, e1);
6876 ADD_RW_RULE (one_udiv, e0, e1);
6877 ADD_RW_RULE (bcond_udiv, e0, e1);
6878
6879 assert (!result);
6880 if (!result)
6881 {
6882 result = btor_node_create_bv_udiv (btor, e0, e1);
6883 }
6884 else
6885 {
6886 DONE:
6887 btor_rw_cache_add (btor->rw_cache,
6888 BTOR_BV_UDIV_NODE,
6889 btor_node_get_id (e0),
6890 btor_node_get_id (e1),
6891 0,
6892 btor_node_get_id (result));
6893 }
6894 }
6895
6896 btor_node_release (btor, e0);
6897 btor_node_release (btor, e1);
6898 assert (result);
6899 return result;
6900 }
6901
6902 static BtorNode *
rewrite_urem_exp(Btor * btor,BtorNode * e0,BtorNode * e1)6903 rewrite_urem_exp (Btor *btor, BtorNode *e0, BtorNode *e1)
6904 {
6905 BtorNode *result = 0;
6906
6907 e0 = btor_simplify_exp (btor, e0);
6908 e1 = btor_simplify_exp (btor, e1);
6909 assert (btor_dbg_precond_regular_binary_bv_exp (btor, e0, e1));
6910
6911 e0 = btor_node_copy (btor, e0);
6912 e1 = btor_node_copy (btor, e1);
6913 normalize_urem (btor, &e0, &e1);
6914
6915 result = check_rw_cache (
6916 btor, BTOR_BV_UREM_NODE, btor_node_get_id (e0), btor_node_get_id (e1), 0);
6917
6918 if (!result)
6919 {
6920 // TODO do optimize for powers of two even AIGs do it as well !!!
6921
6922 // TODO what about non powers of 2, like modulo 3, which means that
6923 // all but the last two bits are zero
6924
6925 ADD_RW_RULE (const_binary_exp, BTOR_BV_UREM_NODE, e0, e1);
6926 ADD_RW_RULE (special_const_lhs_binary_exp, BTOR_BV_UREM_NODE, e0, e1);
6927 ADD_RW_RULE (special_const_rhs_binary_exp, BTOR_BV_UREM_NODE, e0, e1);
6928 ADD_RW_RULE (bool_urem, e0, e1);
6929 ADD_RW_RULE (zero_urem, e0, e1);
6930
6931 assert (!result);
6932 if (!result)
6933 {
6934 result = btor_node_create_bv_urem (btor, e0, e1);
6935 }
6936 else
6937 {
6938 DONE:
6939 btor_rw_cache_add (btor->rw_cache,
6940 BTOR_BV_UREM_NODE,
6941 btor_node_get_id (e0),
6942 btor_node_get_id (e1),
6943 0,
6944 btor_node_get_id (result));
6945 }
6946 }
6947
6948 btor_node_release (btor, e0);
6949 btor_node_release (btor, e1);
6950 assert (result);
6951 return result;
6952 }
6953
6954 static BtorNode *
rewrite_concat_exp(Btor * btor,BtorNode * e0,BtorNode * e1)6955 rewrite_concat_exp (Btor *btor, BtorNode *e0, BtorNode *e1)
6956 {
6957 BtorNode *result = 0;
6958
6959 e0 = btor_simplify_exp (btor, e0);
6960 e1 = btor_simplify_exp (btor, e1);
6961 assert (btor_dbg_precond_concat_exp (btor, e0, e1));
6962
6963 e0 = btor_node_copy (btor, e0);
6964 e1 = btor_node_copy (btor, e1);
6965 normalize_concat (btor, &e0, &e1);
6966
6967 result = check_rw_cache (
6968 btor, BTOR_BV_CONCAT_NODE, btor_node_get_id (e0), btor_node_get_id (e1), 0);
6969
6970 if (!result)
6971 {
6972 ADD_RW_RULE (const_binary_exp, BTOR_BV_CONCAT_NODE, e0, e1);
6973 ADD_RW_RULE (special_const_lhs_binary_exp, BTOR_BV_CONCAT_NODE, e0, e1);
6974 ADD_RW_RULE (special_const_rhs_binary_exp, BTOR_BV_CONCAT_NODE, e0, e1);
6975 ADD_RW_RULE (const_concat, e0, e1);
6976 ADD_RW_RULE (slice_concat, e0, e1);
6977 ADD_RW_RULE (and_lhs_concat, e0, e1);
6978 ADD_RW_RULE (and_rhs_concat, e0, e1);
6979
6980 assert (!result);
6981 if (!result)
6982 {
6983 result = btor_node_create_bv_concat (btor, e0, e1);
6984 }
6985 else
6986 {
6987 DONE:
6988 btor_rw_cache_add (btor->rw_cache,
6989 BTOR_BV_CONCAT_NODE,
6990 btor_node_get_id (e0),
6991 btor_node_get_id (e1),
6992 0,
6993 btor_node_get_id (result));
6994 }
6995 }
6996
6997 btor_node_release (btor, e0);
6998 btor_node_release (btor, e1);
6999 assert (result);
7000 return result;
7001 }
7002
7003 static BtorNode *
rewrite_sll_exp(Btor * btor,BtorNode * e0,BtorNode * e1)7004 rewrite_sll_exp (Btor *btor, BtorNode *e0, BtorNode *e1)
7005 {
7006 BtorNode *result = 0;
7007
7008 e0 = btor_simplify_exp (btor, e0);
7009 e1 = btor_simplify_exp (btor, e1);
7010 assert (btor_dbg_precond_shift_exp (btor, e0, e1));
7011
7012 result = check_rw_cache (
7013 btor, BTOR_BV_SLL_NODE, btor_node_get_id (e0), btor_node_get_id (e1), 0);
7014
7015 if (!result)
7016 {
7017 ADD_RW_RULE (const_binary_exp, BTOR_BV_SLL_NODE, e0, e1);
7018 ADD_RW_RULE (special_const_lhs_binary_exp, BTOR_BV_SLL_NODE, e0, e1);
7019 ADD_RW_RULE (special_const_rhs_binary_exp, BTOR_BV_SLL_NODE, e0, e1);
7020 ADD_RW_RULE (const_sll, e0, e1);
7021
7022 assert (!result);
7023 if (!result)
7024 {
7025 result = btor_node_create_bv_sll (btor, e0, e1);
7026 }
7027 else
7028 {
7029 DONE:
7030 btor_rw_cache_add (btor->rw_cache,
7031 BTOR_BV_SLL_NODE,
7032 btor_node_get_id (e0),
7033 btor_node_get_id (e1),
7034 0,
7035 btor_node_get_id (result));
7036 }
7037 }
7038
7039 assert (result);
7040 return result;
7041 }
7042
7043 static BtorNode *
rewrite_srl_exp(Btor * btor,BtorNode * e0,BtorNode * e1)7044 rewrite_srl_exp (Btor *btor, BtorNode *e0, BtorNode *e1)
7045 {
7046 BtorNode *result = 0;
7047
7048 e0 = btor_simplify_exp (btor, e0);
7049 e1 = btor_simplify_exp (btor, e1);
7050 assert (btor_dbg_precond_shift_exp (btor, e0, e1));
7051
7052 result = check_rw_cache (
7053 btor, BTOR_BV_SRL_NODE, btor_node_get_id (e0), btor_node_get_id (e1), 0);
7054
7055 if (!result)
7056 {
7057 ADD_RW_RULE (const_binary_exp, BTOR_BV_SRL_NODE, e0, e1);
7058 ADD_RW_RULE (special_const_lhs_binary_exp, BTOR_BV_SRL_NODE, e0, e1);
7059 ADD_RW_RULE (special_const_rhs_binary_exp, BTOR_BV_SRL_NODE, e0, e1);
7060 ADD_RW_RULE (const_srl, e0, e1);
7061 //ADD_RW_RULE (zero_srl, e0, e1);
7062
7063 assert (!result);
7064 if (!result)
7065 {
7066 result = btor_node_create_bv_srl (btor, e0, e1);
7067 }
7068 else
7069 {
7070 DONE:
7071 btor_rw_cache_add (btor->rw_cache,
7072 BTOR_BV_SRL_NODE,
7073 btor_node_get_id (e0),
7074 btor_node_get_id (e1),
7075 0,
7076 btor_node_get_id (result));
7077 }
7078 }
7079
7080 assert (result);
7081 return result;
7082 }
7083
7084 static BtorNode *
rewrite_apply_exp(Btor * btor,BtorNode * e0,BtorNode * e1)7085 rewrite_apply_exp (Btor *btor, BtorNode *e0, BtorNode *e1)
7086 {
7087 BtorNode *result = 0;
7088
7089 e0 = btor_simplify_exp (btor, e0);
7090 e1 = btor_simplify_exp (btor, e1);
7091 assert (btor_dbg_precond_apply_exp (btor, e0, e1));
7092
7093 result = check_rw_cache (
7094 btor, BTOR_APPLY_NODE, btor_node_get_id (e0), btor_node_get_id (e1), 0);
7095
7096 if (!result)
7097 {
7098 ADD_RW_RULE (const_lambda_apply, e0, e1);
7099 ADD_RW_RULE (param_lambda_apply, e0, e1);
7100 ADD_RW_RULE (apply_apply, e0, e1);
7101 ADD_RW_RULE (prop_apply_lambda, e0, e1);
7102 ADD_RW_RULE (prop_apply_update, e0, e1);
7103
7104 assert (!result);
7105 if (!result)
7106 {
7107 result = btor_node_create_apply (btor, e0, e1);
7108 }
7109 else
7110 {
7111 DONE:
7112 btor_rw_cache_add (btor->rw_cache,
7113 BTOR_APPLY_NODE,
7114 btor_node_get_id (e0),
7115 btor_node_get_id (e1),
7116 0,
7117 btor_node_get_id (result));
7118 }
7119 }
7120
7121 assert (result);
7122 return result;
7123 }
7124
7125 static BtorNode *
rewrite_lambda_exp(Btor * btor,BtorNode * e0,BtorNode * e1)7126 rewrite_lambda_exp (Btor *btor, BtorNode *e0, BtorNode *e1)
7127 {
7128 BtorNode *result = 0;
7129
7130 e0 = btor_simplify_exp (btor, e0);
7131 e1 = btor_simplify_exp (btor, e1);
7132
7133 // Note: Rewrite caching not needed since no rules applied
7134
7135 // FIXME: this rule may yield lambdas with differents sorts (in case of
7136 // curried
7137 // lambdas)
7138 // ADD_RW_RULE (lambda_lambda, e0, e1);
7139
7140 assert (!result);
7141 result = btor_node_create_lambda (btor, e0, e1);
7142 // DONE:
7143 assert (result);
7144 return result;
7145 }
7146
7147 static BtorNode *
rewrite_forall_exp(Btor * btor,BtorNode * e0,BtorNode * e1)7148 rewrite_forall_exp (Btor *btor, BtorNode *e0, BtorNode *e1)
7149 {
7150 BtorNode *result = 0;
7151
7152 e0 = btor_simplify_exp (btor, e0);
7153 e1 = btor_simplify_exp (btor, e1);
7154
7155 result = check_rw_cache (
7156 btor, BTOR_FORALL_NODE, btor_node_get_id (e0), btor_node_get_id (e1), 0);
7157
7158 if (!result)
7159 {
7160 ADD_RW_RULE (const_quantifier, e0, e1);
7161 ADD_RW_RULE (eq_forall, e0, e1);
7162 // ADD_RW_RULE (param_free_forall, e0, e1);
7163
7164 assert (!result);
7165 if (!result)
7166 {
7167 result = btor_node_create_forall (btor, e0, e1);
7168 }
7169 else
7170 {
7171 DONE:
7172 btor_rw_cache_add (btor->rw_cache,
7173 BTOR_FORALL_NODE,
7174 btor_node_get_id (e0),
7175 btor_node_get_id (e1),
7176 0,
7177 btor_node_get_id (result));
7178 }
7179 }
7180
7181 assert (result);
7182 return result;
7183 }
7184
7185 static BtorNode *
rewrite_exists_exp(Btor * btor,BtorNode * e0,BtorNode * e1)7186 rewrite_exists_exp (Btor *btor, BtorNode *e0, BtorNode *e1)
7187 {
7188 BtorNode *result = 0;
7189
7190 e0 = btor_simplify_exp (btor, e0);
7191 e1 = btor_simplify_exp (btor, e1);
7192
7193 result = check_rw_cache (
7194 btor, BTOR_EXISTS_NODE, btor_node_get_id (e0), btor_node_get_id (e1), 0);
7195
7196 if (!result)
7197 {
7198 ADD_RW_RULE (const_quantifier, e0, e1);
7199 ADD_RW_RULE (eq_exists, e0, e1);
7200 // ADD_RW_RULE (param_free_exists, e0, e1);
7201
7202 assert (!result);
7203 if (!result)
7204 {
7205 result = btor_node_create_exists (btor, e0, e1);
7206 }
7207 else
7208 {
7209 DONE:
7210 btor_rw_cache_add (btor->rw_cache,
7211 BTOR_EXISTS_NODE,
7212 btor_node_get_id (e0),
7213 btor_node_get_id (e1),
7214 0,
7215 btor_node_get_id (result));
7216 }
7217 }
7218
7219 assert (result);
7220 return result;
7221 }
7222
7223 static BtorNode *
rewrite_cond_exp(Btor * btor,BtorNode * e0,BtorNode * e1,BtorNode * e2)7224 rewrite_cond_exp (Btor *btor, BtorNode *e0, BtorNode *e1, BtorNode *e2)
7225 {
7226 BtorNode *result = 0;
7227
7228 e0 = btor_simplify_exp (btor, e0);
7229 e1 = btor_simplify_exp (btor, e1);
7230 e2 = btor_simplify_exp (btor, e2);
7231 assert (btor_dbg_precond_cond_exp (btor, e0, e1, e2));
7232
7233 e0 = btor_node_copy (btor, e0);
7234 e1 = btor_node_copy (btor, e1);
7235 e2 = btor_node_copy (btor, e2);
7236 normalize_cond (btor, &e0, &e1, &e2);
7237 assert (btor_node_is_regular (e0));
7238
7239 result = check_rw_cache (btor,
7240 BTOR_COND_NODE,
7241 btor_node_get_id (e0),
7242 btor_node_get_id (e1),
7243 btor_node_get_id (e2));
7244
7245 if (!result)
7246 {
7247 ADD_RW_RULE (equal_branches_cond, e0, e1, e2);
7248 ADD_RW_RULE (const_cond, e0, e1, e2);
7249 ADD_RW_RULE (cond_if_dom_cond, e0, e1, e2);
7250 ADD_RW_RULE (cond_if_merge_if_cond, e0, e1, e2);
7251 ADD_RW_RULE (cond_if_merge_else_cond, e0, e1, e2);
7252 ADD_RW_RULE (cond_else_dom_cond, e0, e1, e2);
7253 ADD_RW_RULE (cond_else_merge_if_cond, e0, e1, e2);
7254 ADD_RW_RULE (cond_else_merge_else_cond, e0, e1, e2);
7255 // TODO (ma): check if more rules can be applied for ite on bv and funs
7256 if (!btor_node_is_fun (e1))
7257 {
7258 ADD_RW_RULE (bool_cond, e0, e1, e2);
7259 ADD_RW_RULE (add_if_cond, e0, e1, e2);
7260 ADD_RW_RULE (add_else_cond, e0, e1, e2);
7261 ADD_RW_RULE (concat_cond, e0, e1, e2);
7262 ADD_RW_RULE (op_lhs_cond, e0, e1, e2);
7263 ADD_RW_RULE (op_rhs_cond, e0, e1, e2);
7264 ADD_RW_RULE (comm_op_1_cond, e0, e1, e2);
7265 ADD_RW_RULE (comm_op_2_cond, e0, e1, e2);
7266 }
7267
7268 assert (!result);
7269 if (!result)
7270 {
7271 result = btor_node_create_cond (btor, e0, e1, e2);
7272 }
7273 else
7274 {
7275 DONE:
7276 btor_rw_cache_add (btor->rw_cache,
7277 BTOR_COND_NODE,
7278 btor_node_get_id (e0),
7279 btor_node_get_id (e1),
7280 btor_node_get_id (e2),
7281 btor_node_get_id (result));
7282 }
7283 }
7284 btor_node_release (btor, e0);
7285 btor_node_release (btor, e1);
7286 btor_node_release (btor, e2);
7287 assert (result);
7288 return result;
7289 }
7290
7291 /* -------------------------------------------------------------------------- */
7292 /* api function */
7293
7294 BtorNode *
btor_rewrite_slice_exp(Btor * btor,BtorNode * exp,uint32_t upper,uint32_t lower)7295 btor_rewrite_slice_exp (Btor *btor,
7296 BtorNode *exp,
7297 uint32_t upper,
7298 uint32_t lower)
7299 {
7300 assert (btor);
7301 assert (btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 0);
7302
7303 BtorNode *res;
7304 double start = btor_util_time_stamp ();
7305 res = rewrite_slice_exp (btor, exp, upper, lower);
7306 btor->time.rewrite += btor_util_time_stamp () - start;
7307 return res;
7308 }
7309
7310 BtorNode *
btor_rewrite_binary_exp(Btor * btor,BtorNodeKind kind,BtorNode * e0,BtorNode * e1)7311 btor_rewrite_binary_exp (Btor *btor,
7312 BtorNodeKind kind,
7313 BtorNode *e0,
7314 BtorNode *e1)
7315 {
7316 assert (btor);
7317 assert (kind);
7318 assert (e0);
7319 assert (e1);
7320 assert (btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 0);
7321
7322 BtorNode *result;
7323 double start = btor_util_time_stamp ();
7324
7325 switch (kind)
7326 {
7327 case BTOR_FUN_EQ_NODE:
7328 case BTOR_BV_EQ_NODE: result = rewrite_eq_exp (btor, e0, e1); break;
7329
7330 case BTOR_BV_ULT_NODE: result = rewrite_ult_exp (btor, e0, e1); break;
7331
7332 case BTOR_BV_AND_NODE: result = rewrite_and_exp (btor, e0, e1); break;
7333
7334 case BTOR_BV_ADD_NODE: result = rewrite_add_exp (btor, e0, e1); break;
7335
7336 case BTOR_BV_MUL_NODE: result = rewrite_mul_exp (btor, e0, e1); break;
7337
7338 case BTOR_BV_UDIV_NODE: result = rewrite_udiv_exp (btor, e0, e1); break;
7339
7340 case BTOR_BV_UREM_NODE: result = rewrite_urem_exp (btor, e0, e1); break;
7341
7342 case BTOR_BV_CONCAT_NODE: result = rewrite_concat_exp (btor, e0, e1); break;
7343
7344 case BTOR_BV_SLL_NODE: result = rewrite_sll_exp (btor, e0, e1); break;
7345
7346 case BTOR_BV_SRL_NODE: result = rewrite_srl_exp (btor, e0, e1); break;
7347
7348 case BTOR_APPLY_NODE: result = rewrite_apply_exp (btor, e0, e1); break;
7349
7350 case BTOR_FORALL_NODE: result = rewrite_forall_exp (btor, e0, e1); break;
7351
7352 case BTOR_EXISTS_NODE: result = rewrite_exists_exp (btor, e0, e1); break;
7353
7354 default:
7355 assert (kind == BTOR_LAMBDA_NODE);
7356 result = rewrite_lambda_exp (btor, e0, e1);
7357 }
7358
7359 btor->time.rewrite += btor_util_time_stamp () - start;
7360 return result;
7361 }
7362
7363 BtorNode *
btor_rewrite_ternary_exp(Btor * btor,BtorNodeKind kind,BtorNode * e0,BtorNode * e1,BtorNode * e2)7364 btor_rewrite_ternary_exp (
7365 Btor *btor, BtorNodeKind kind, BtorNode *e0, BtorNode *e1, BtorNode *e2)
7366 {
7367 assert (btor);
7368 assert (kind);
7369 assert (kind == BTOR_COND_NODE);
7370 assert (e0);
7371 assert (e1);
7372 assert (e2);
7373 assert (btor_opt_get (btor, BTOR_OPT_REWRITE_LEVEL) > 0);
7374 (void) kind;
7375
7376 BtorNode *res;
7377 double start = btor_util_time_stamp ();
7378 res = rewrite_cond_exp (btor, e0, e1, e2);
7379 btor->time.rewrite += btor_util_time_stamp () - start;
7380 return res;
7381 }
7382