1 /* SSA-PRE for trees. 2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 3 Free Software Foundation, Inc. 4 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher 5 <stevenb@suse.de> 6 7 This file is part of GCC. 8 9 GCC is free software; you can redistribute it and/or modify 10 it under the terms of the GNU General Public License as published by 11 the Free Software Foundation; either version 3, or (at your option) 12 any later version. 13 14 GCC is distributed in the hope that it will be useful, 15 but WITHOUT ANY WARRANTY; without even the implied warranty of 16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 17 GNU General Public License for more details. 18 19 You should have received a copy of the GNU General Public License 20 along with GCC; see the file COPYING3. If not see 21 <http://www.gnu.org/licenses/>. */ 22 23 #include "config.h" 24 #include "system.h" 25 #include "coretypes.h" 26 #include "tm.h" 27 #include "tree.h" 28 #include "basic-block.h" 29 #include "tree-pretty-print.h" 30 #include "gimple-pretty-print.h" 31 #include "tree-inline.h" 32 #include "tree-flow.h" 33 #include "gimple.h" 34 #include "tree-dump.h" 35 #include "timevar.h" 36 #include "fibheap.h" 37 #include "hashtab.h" 38 #include "tree-iterator.h" 39 #include "alloc-pool.h" 40 #include "obstack.h" 41 #include "tree-pass.h" 42 #include "flags.h" 43 #include "bitmap.h" 44 #include "langhooks.h" 45 #include "cfgloop.h" 46 #include "tree-ssa-sccvn.h" 47 #include "tree-scalar-evolution.h" 48 #include "params.h" 49 #include "dbgcnt.h" 50 51 /* TODO: 52 53 1. Avail sets can be shared by making an avail_find_leader that 54 walks up the dominator tree and looks in those avail sets. 55 This might affect code optimality, it's unclear right now. 56 2. Strength reduction can be performed by anticipating expressions 57 we can repair later on. 58 3. We can do back-substitution or smarter value numbering to catch 59 commutative expressions split up over multiple statements. 60 */ 61 62 /* For ease of terminology, "expression node" in the below refers to 63 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs 64 represent the actual statement containing the expressions we care about, 65 and we cache the value number by putting it in the expression. */ 66 67 /* Basic algorithm 68 69 First we walk the statements to generate the AVAIL sets, the 70 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the 71 generation of values/expressions by a given block. We use them 72 when computing the ANTIC sets. The AVAIL sets consist of 73 SSA_NAME's that represent values, so we know what values are 74 available in what blocks. AVAIL is a forward dataflow problem. In 75 SSA, values are never killed, so we don't need a kill set, or a 76 fixpoint iteration, in order to calculate the AVAIL sets. In 77 traditional parlance, AVAIL sets tell us the downsafety of the 78 expressions/values. 79 80 Next, we generate the ANTIC sets. These sets represent the 81 anticipatable expressions. ANTIC is a backwards dataflow 82 problem. An expression is anticipatable in a given block if it could 83 be generated in that block. This means that if we had to perform 84 an insertion in that block, of the value of that expression, we 85 could. Calculating the ANTIC sets requires phi translation of 86 expressions, because the flow goes backwards through phis. We must 87 iterate to a fixpoint of the ANTIC sets, because we have a kill 88 set. Even in SSA form, values are not live over the entire 89 function, only from their definition point onwards. So we have to 90 remove values from the ANTIC set once we go past the definition 91 point of the leaders that make them up. 92 compute_antic/compute_antic_aux performs this computation. 93 94 Third, we perform insertions to make partially redundant 95 expressions fully redundant. 96 97 An expression is partially redundant (excluding partial 98 anticipation) if: 99 100 1. It is AVAIL in some, but not all, of the predecessors of a 101 given block. 102 2. It is ANTIC in all the predecessors. 103 104 In order to make it fully redundant, we insert the expression into 105 the predecessors where it is not available, but is ANTIC. 106 107 For the partial anticipation case, we only perform insertion if it 108 is partially anticipated in some block, and fully available in all 109 of the predecessors. 110 111 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion 112 performs these steps. 113 114 Fourth, we eliminate fully redundant expressions. 115 This is a simple statement walk that replaces redundant 116 calculations with the now available values. */ 117 118 /* Representations of value numbers: 119 120 Value numbers are represented by a representative SSA_NAME. We 121 will create fake SSA_NAME's in situations where we need a 122 representative but do not have one (because it is a complex 123 expression). In order to facilitate storing the value numbers in 124 bitmaps, and keep the number of wasted SSA_NAME's down, we also 125 associate a value_id with each value number, and create full blown 126 ssa_name's only where we actually need them (IE in operands of 127 existing expressions). 128 129 Theoretically you could replace all the value_id's with 130 SSA_NAME_VERSION, but this would allocate a large number of 131 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number. 132 It would also require an additional indirection at each point we 133 use the value id. */ 134 135 /* Representation of expressions on value numbers: 136 137 Expressions consisting of value numbers are represented the same 138 way as our VN internally represents them, with an additional 139 "pre_expr" wrapping around them in order to facilitate storing all 140 of the expressions in the same sets. */ 141 142 /* Representation of sets: 143 144 The dataflow sets do not need to be sorted in any particular order 145 for the majority of their lifetime, are simply represented as two 146 bitmaps, one that keeps track of values present in the set, and one 147 that keeps track of expressions present in the set. 148 149 When we need them in topological order, we produce it on demand by 150 transforming the bitmap into an array and sorting it into topo 151 order. */ 152 153 /* Type of expression, used to know which member of the PRE_EXPR union 154 is valid. */ 155 156 enum pre_expr_kind 157 { 158 NAME, 159 NARY, 160 REFERENCE, 161 CONSTANT 162 }; 163 164 typedef union pre_expr_union_d 165 { 166 tree name; 167 tree constant; 168 vn_nary_op_t nary; 169 vn_reference_t reference; 170 } pre_expr_union; 171 172 typedef struct pre_expr_d 173 { 174 enum pre_expr_kind kind; 175 unsigned int id; 176 pre_expr_union u; 177 } *pre_expr; 178 179 #define PRE_EXPR_NAME(e) (e)->u.name 180 #define PRE_EXPR_NARY(e) (e)->u.nary 181 #define PRE_EXPR_REFERENCE(e) (e)->u.reference 182 #define PRE_EXPR_CONSTANT(e) (e)->u.constant 183 184 static int 185 pre_expr_eq (const void *p1, const void *p2) 186 { 187 const struct pre_expr_d *e1 = (const struct pre_expr_d *) p1; 188 const struct pre_expr_d *e2 = (const struct pre_expr_d *) p2; 189 190 if (e1->kind != e2->kind) 191 return false; 192 193 switch (e1->kind) 194 { 195 case CONSTANT: 196 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1), 197 PRE_EXPR_CONSTANT (e2)); 198 case NAME: 199 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2); 200 case NARY: 201 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2)); 202 case REFERENCE: 203 return vn_reference_eq (PRE_EXPR_REFERENCE (e1), 204 PRE_EXPR_REFERENCE (e2)); 205 default: 206 gcc_unreachable (); 207 } 208 } 209 210 static hashval_t 211 pre_expr_hash (const void *p1) 212 { 213 const struct pre_expr_d *e = (const struct pre_expr_d *) p1; 214 switch (e->kind) 215 { 216 case CONSTANT: 217 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e)); 218 case NAME: 219 return SSA_NAME_VERSION (PRE_EXPR_NAME (e)); 220 case NARY: 221 return PRE_EXPR_NARY (e)->hashcode; 222 case REFERENCE: 223 return PRE_EXPR_REFERENCE (e)->hashcode; 224 default: 225 gcc_unreachable (); 226 } 227 } 228 229 230 /* Next global expression id number. */ 231 static unsigned int next_expression_id; 232 233 /* Mapping from expression to id number we can use in bitmap sets. */ 234 DEF_VEC_P (pre_expr); 235 DEF_VEC_ALLOC_P (pre_expr, heap); 236 static VEC(pre_expr, heap) *expressions; 237 static htab_t expression_to_id; 238 static VEC(unsigned, heap) *name_to_id; 239 240 /* Allocate an expression id for EXPR. */ 241 242 static inline unsigned int 243 alloc_expression_id (pre_expr expr) 244 { 245 void **slot; 246 /* Make sure we won't overflow. */ 247 gcc_assert (next_expression_id + 1 > next_expression_id); 248 expr->id = next_expression_id++; 249 VEC_safe_push (pre_expr, heap, expressions, expr); 250 if (expr->kind == NAME) 251 { 252 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr)); 253 /* VEC_safe_grow_cleared allocates no headroom. Avoid frequent 254 re-allocations by using VEC_reserve upfront. There is no 255 VEC_quick_grow_cleared unfortunately. */ 256 VEC_reserve (unsigned, heap, name_to_id, num_ssa_names); 257 VEC_safe_grow_cleared (unsigned, heap, name_to_id, num_ssa_names); 258 gcc_assert (VEC_index (unsigned, name_to_id, version) == 0); 259 VEC_replace (unsigned, name_to_id, version, expr->id); 260 } 261 else 262 { 263 slot = htab_find_slot (expression_to_id, expr, INSERT); 264 gcc_assert (!*slot); 265 *slot = expr; 266 } 267 return next_expression_id - 1; 268 } 269 270 /* Return the expression id for tree EXPR. */ 271 272 static inline unsigned int 273 get_expression_id (const pre_expr expr) 274 { 275 return expr->id; 276 } 277 278 static inline unsigned int 279 lookup_expression_id (const pre_expr expr) 280 { 281 void **slot; 282 283 if (expr->kind == NAME) 284 { 285 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr)); 286 if (VEC_length (unsigned, name_to_id) <= version) 287 return 0; 288 return VEC_index (unsigned, name_to_id, version); 289 } 290 else 291 { 292 slot = htab_find_slot (expression_to_id, expr, NO_INSERT); 293 if (!slot) 294 return 0; 295 return ((pre_expr)*slot)->id; 296 } 297 } 298 299 /* Return the existing expression id for EXPR, or create one if one 300 does not exist yet. */ 301 302 static inline unsigned int 303 get_or_alloc_expression_id (pre_expr expr) 304 { 305 unsigned int id = lookup_expression_id (expr); 306 if (id == 0) 307 return alloc_expression_id (expr); 308 return expr->id = id; 309 } 310 311 /* Return the expression that has expression id ID */ 312 313 static inline pre_expr 314 expression_for_id (unsigned int id) 315 { 316 return VEC_index (pre_expr, expressions, id); 317 } 318 319 /* Free the expression id field in all of our expressions, 320 and then destroy the expressions array. */ 321 322 static void 323 clear_expression_ids (void) 324 { 325 VEC_free (pre_expr, heap, expressions); 326 } 327 328 static alloc_pool pre_expr_pool; 329 330 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */ 331 332 static pre_expr 333 get_or_alloc_expr_for_name (tree name) 334 { 335 struct pre_expr_d expr; 336 pre_expr result; 337 unsigned int result_id; 338 339 expr.kind = NAME; 340 expr.id = 0; 341 PRE_EXPR_NAME (&expr) = name; 342 result_id = lookup_expression_id (&expr); 343 if (result_id != 0) 344 return expression_for_id (result_id); 345 346 result = (pre_expr) pool_alloc (pre_expr_pool); 347 result->kind = NAME; 348 PRE_EXPR_NAME (result) = name; 349 alloc_expression_id (result); 350 return result; 351 } 352 353 static bool in_fre = false; 354 355 /* An unordered bitmap set. One bitmap tracks values, the other, 356 expressions. */ 357 typedef struct bitmap_set 358 { 359 bitmap_head expressions; 360 bitmap_head values; 361 } *bitmap_set_t; 362 363 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \ 364 EXECUTE_IF_SET_IN_BITMAP(&(set)->expressions, 0, (id), (bi)) 365 366 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \ 367 EXECUTE_IF_SET_IN_BITMAP(&(set)->values, 0, (id), (bi)) 368 369 /* Mapping from value id to expressions with that value_id. */ 370 DEF_VEC_P (bitmap_set_t); 371 DEF_VEC_ALLOC_P (bitmap_set_t, heap); 372 static VEC(bitmap_set_t, heap) *value_expressions; 373 374 /* Sets that we need to keep track of. */ 375 typedef struct bb_bitmap_sets 376 { 377 /* The EXP_GEN set, which represents expressions/values generated in 378 a basic block. */ 379 bitmap_set_t exp_gen; 380 381 /* The PHI_GEN set, which represents PHI results generated in a 382 basic block. */ 383 bitmap_set_t phi_gen; 384 385 /* The TMP_GEN set, which represents results/temporaries generated 386 in a basic block. IE the LHS of an expression. */ 387 bitmap_set_t tmp_gen; 388 389 /* The AVAIL_OUT set, which represents which values are available in 390 a given basic block. */ 391 bitmap_set_t avail_out; 392 393 /* The ANTIC_IN set, which represents which values are anticipatable 394 in a given basic block. */ 395 bitmap_set_t antic_in; 396 397 /* The PA_IN set, which represents which values are 398 partially anticipatable in a given basic block. */ 399 bitmap_set_t pa_in; 400 401 /* The NEW_SETS set, which is used during insertion to augment the 402 AVAIL_OUT set of blocks with the new insertions performed during 403 the current iteration. */ 404 bitmap_set_t new_sets; 405 406 /* A cache for value_dies_in_block_x. */ 407 bitmap expr_dies; 408 409 /* True if we have visited this block during ANTIC calculation. */ 410 unsigned int visited : 1; 411 412 /* True we have deferred processing this block during ANTIC 413 calculation until its successor is processed. */ 414 unsigned int deferred : 1; 415 416 /* True when the block contains a call that might not return. */ 417 unsigned int contains_may_not_return_call : 1; 418 } *bb_value_sets_t; 419 420 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen 421 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen 422 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen 423 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out 424 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in 425 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in 426 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets 427 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies 428 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited 429 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred 430 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call 431 432 433 /* Basic block list in postorder. */ 434 static int *postorder; 435 436 /* This structure is used to keep track of statistics on what 437 optimization PRE was able to perform. */ 438 static struct 439 { 440 /* The number of RHS computations eliminated by PRE. */ 441 int eliminations; 442 443 /* The number of new expressions/temporaries generated by PRE. */ 444 int insertions; 445 446 /* The number of inserts found due to partial anticipation */ 447 int pa_insert; 448 449 /* The number of new PHI nodes added by PRE. */ 450 int phis; 451 452 /* The number of values found constant. */ 453 int constified; 454 455 } pre_stats; 456 457 static bool do_partial_partial; 458 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int, gimple); 459 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr); 460 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr); 461 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t); 462 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int); 463 static void bitmap_insert_into_set (bitmap_set_t, pre_expr); 464 static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr, 465 unsigned int, bool); 466 static bitmap_set_t bitmap_set_new (void); 467 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *, 468 gimple, tree); 469 static tree find_or_generate_expression (basic_block, pre_expr, gimple_seq *, 470 gimple); 471 static unsigned int get_expr_value_id (pre_expr); 472 473 /* We can add and remove elements and entries to and from sets 474 and hash tables, so we use alloc pools for them. */ 475 476 static alloc_pool bitmap_set_pool; 477 static bitmap_obstack grand_bitmap_obstack; 478 479 /* To avoid adding 300 temporary variables when we only need one, we 480 only create one temporary variable, on demand, and build ssa names 481 off that. We do have to change the variable if the types don't 482 match the current variable's type. */ 483 static tree pretemp; 484 static tree storetemp; 485 static tree prephitemp; 486 487 /* Set of blocks with statements that have had their EH properties changed. */ 488 static bitmap need_eh_cleanup; 489 490 /* Set of blocks with statements that have had their AB properties changed. */ 491 static bitmap need_ab_cleanup; 492 493 /* The phi_translate_table caches phi translations for a given 494 expression and predecessor. */ 495 496 static htab_t phi_translate_table; 497 498 /* A three tuple {e, pred, v} used to cache phi translations in the 499 phi_translate_table. */ 500 501 typedef struct expr_pred_trans_d 502 { 503 /* The expression. */ 504 pre_expr e; 505 506 /* The predecessor block along which we translated the expression. */ 507 basic_block pred; 508 509 /* The value that resulted from the translation. */ 510 pre_expr v; 511 512 /* The hashcode for the expression, pred pair. This is cached for 513 speed reasons. */ 514 hashval_t hashcode; 515 } *expr_pred_trans_t; 516 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t; 517 518 /* Return the hash value for a phi translation table entry. */ 519 520 static hashval_t 521 expr_pred_trans_hash (const void *p) 522 { 523 const_expr_pred_trans_t const ve = (const_expr_pred_trans_t) p; 524 return ve->hashcode; 525 } 526 527 /* Return true if two phi translation table entries are the same. 528 P1 and P2 should point to the expr_pred_trans_t's to be compared.*/ 529 530 static int 531 expr_pred_trans_eq (const void *p1, const void *p2) 532 { 533 const_expr_pred_trans_t const ve1 = (const_expr_pred_trans_t) p1; 534 const_expr_pred_trans_t const ve2 = (const_expr_pred_trans_t) p2; 535 basic_block b1 = ve1->pred; 536 basic_block b2 = ve2->pred; 537 538 /* If they are not translations for the same basic block, they can't 539 be equal. */ 540 if (b1 != b2) 541 return false; 542 return pre_expr_eq (ve1->e, ve2->e); 543 } 544 545 /* Search in the phi translation table for the translation of 546 expression E in basic block PRED. 547 Return the translated value, if found, NULL otherwise. */ 548 549 static inline pre_expr 550 phi_trans_lookup (pre_expr e, basic_block pred) 551 { 552 void **slot; 553 struct expr_pred_trans_d ept; 554 555 ept.e = e; 556 ept.pred = pred; 557 ept.hashcode = iterative_hash_hashval_t (pre_expr_hash (e), pred->index); 558 slot = htab_find_slot_with_hash (phi_translate_table, &ept, ept.hashcode, 559 NO_INSERT); 560 if (!slot) 561 return NULL; 562 else 563 return ((expr_pred_trans_t) *slot)->v; 564 } 565 566 567 /* Add the tuple mapping from {expression E, basic block PRED} to 568 value V, to the phi translation table. */ 569 570 static inline void 571 phi_trans_add (pre_expr e, pre_expr v, basic_block pred) 572 { 573 void **slot; 574 expr_pred_trans_t new_pair = XNEW (struct expr_pred_trans_d); 575 new_pair->e = e; 576 new_pair->pred = pred; 577 new_pair->v = v; 578 new_pair->hashcode = iterative_hash_hashval_t (pre_expr_hash (e), 579 pred->index); 580 581 slot = htab_find_slot_with_hash (phi_translate_table, new_pair, 582 new_pair->hashcode, INSERT); 583 free (*slot); 584 *slot = (void *) new_pair; 585 } 586 587 588 /* Add expression E to the expression set of value id V. */ 589 590 void 591 add_to_value (unsigned int v, pre_expr e) 592 { 593 bitmap_set_t set; 594 595 gcc_assert (get_expr_value_id (e) == v); 596 597 if (v >= VEC_length (bitmap_set_t, value_expressions)) 598 { 599 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions, 600 v + 1); 601 } 602 603 set = VEC_index (bitmap_set_t, value_expressions, v); 604 if (!set) 605 { 606 set = bitmap_set_new (); 607 VEC_replace (bitmap_set_t, value_expressions, v, set); 608 } 609 610 bitmap_insert_into_set_1 (set, e, v, true); 611 } 612 613 /* Create a new bitmap set and return it. */ 614 615 static bitmap_set_t 616 bitmap_set_new (void) 617 { 618 bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool); 619 bitmap_initialize (&ret->expressions, &grand_bitmap_obstack); 620 bitmap_initialize (&ret->values, &grand_bitmap_obstack); 621 return ret; 622 } 623 624 /* Return the value id for a PRE expression EXPR. */ 625 626 static unsigned int 627 get_expr_value_id (pre_expr expr) 628 { 629 switch (expr->kind) 630 { 631 case CONSTANT: 632 { 633 unsigned int id; 634 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr)); 635 if (id == 0) 636 { 637 id = get_or_alloc_constant_value_id (PRE_EXPR_CONSTANT (expr)); 638 add_to_value (id, expr); 639 } 640 return id; 641 } 642 case NAME: 643 return VN_INFO (PRE_EXPR_NAME (expr))->value_id; 644 case NARY: 645 return PRE_EXPR_NARY (expr)->value_id; 646 case REFERENCE: 647 return PRE_EXPR_REFERENCE (expr)->value_id; 648 default: 649 gcc_unreachable (); 650 } 651 } 652 653 /* Remove an expression EXPR from a bitmapped set. */ 654 655 static void 656 bitmap_remove_from_set (bitmap_set_t set, pre_expr expr) 657 { 658 unsigned int val = get_expr_value_id (expr); 659 if (!value_id_constant_p (val)) 660 { 661 bitmap_clear_bit (&set->values, val); 662 bitmap_clear_bit (&set->expressions, get_expression_id (expr)); 663 } 664 } 665 666 static void 667 bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr, 668 unsigned int val, bool allow_constants) 669 { 670 if (allow_constants || !value_id_constant_p (val)) 671 { 672 /* We specifically expect this and only this function to be able to 673 insert constants into a set. */ 674 bitmap_set_bit (&set->values, val); 675 bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr)); 676 } 677 } 678 679 /* Insert an expression EXPR into a bitmapped set. */ 680 681 static void 682 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr) 683 { 684 bitmap_insert_into_set_1 (set, expr, get_expr_value_id (expr), false); 685 } 686 687 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */ 688 689 static void 690 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig) 691 { 692 bitmap_copy (&dest->expressions, &orig->expressions); 693 bitmap_copy (&dest->values, &orig->values); 694 } 695 696 697 /* Free memory used up by SET. */ 698 static void 699 bitmap_set_free (bitmap_set_t set) 700 { 701 bitmap_clear (&set->expressions); 702 bitmap_clear (&set->values); 703 } 704 705 706 /* Generate an topological-ordered array of bitmap set SET. */ 707 708 static VEC(pre_expr, heap) * 709 sorted_array_from_bitmap_set (bitmap_set_t set) 710 { 711 unsigned int i, j; 712 bitmap_iterator bi, bj; 713 VEC(pre_expr, heap) *result; 714 715 /* Pre-allocate roughly enough space for the array. */ 716 result = VEC_alloc (pre_expr, heap, bitmap_count_bits (&set->values)); 717 718 FOR_EACH_VALUE_ID_IN_SET (set, i, bi) 719 { 720 /* The number of expressions having a given value is usually 721 relatively small. Thus, rather than making a vector of all 722 the expressions and sorting it by value-id, we walk the values 723 and check in the reverse mapping that tells us what expressions 724 have a given value, to filter those in our set. As a result, 725 the expressions are inserted in value-id order, which means 726 topological order. 727 728 If this is somehow a significant lose for some cases, we can 729 choose which set to walk based on the set size. */ 730 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, i); 731 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bj) 732 { 733 if (bitmap_bit_p (&set->expressions, j)) 734 VEC_safe_push (pre_expr, heap, result, expression_for_id (j)); 735 } 736 } 737 738 return result; 739 } 740 741 /* Perform bitmapped set operation DEST &= ORIG. */ 742 743 static void 744 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig) 745 { 746 bitmap_iterator bi; 747 unsigned int i; 748 749 if (dest != orig) 750 { 751 bitmap_head temp; 752 bitmap_initialize (&temp, &grand_bitmap_obstack); 753 754 bitmap_and_into (&dest->values, &orig->values); 755 bitmap_copy (&temp, &dest->expressions); 756 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi) 757 { 758 pre_expr expr = expression_for_id (i); 759 unsigned int value_id = get_expr_value_id (expr); 760 if (!bitmap_bit_p (&dest->values, value_id)) 761 bitmap_clear_bit (&dest->expressions, i); 762 } 763 bitmap_clear (&temp); 764 } 765 } 766 767 /* Subtract all values and expressions contained in ORIG from DEST. */ 768 769 static bitmap_set_t 770 bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig) 771 { 772 bitmap_set_t result = bitmap_set_new (); 773 bitmap_iterator bi; 774 unsigned int i; 775 776 bitmap_and_compl (&result->expressions, &dest->expressions, 777 &orig->expressions); 778 779 FOR_EACH_EXPR_ID_IN_SET (result, i, bi) 780 { 781 pre_expr expr = expression_for_id (i); 782 unsigned int value_id = get_expr_value_id (expr); 783 bitmap_set_bit (&result->values, value_id); 784 } 785 786 return result; 787 } 788 789 /* Subtract all the values in bitmap set B from bitmap set A. */ 790 791 static void 792 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b) 793 { 794 unsigned int i; 795 bitmap_iterator bi; 796 bitmap_head temp; 797 798 bitmap_initialize (&temp, &grand_bitmap_obstack); 799 800 bitmap_copy (&temp, &a->expressions); 801 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi) 802 { 803 pre_expr expr = expression_for_id (i); 804 if (bitmap_set_contains_value (b, get_expr_value_id (expr))) 805 bitmap_remove_from_set (a, expr); 806 } 807 bitmap_clear (&temp); 808 } 809 810 811 /* Return true if bitmapped set SET contains the value VALUE_ID. */ 812 813 static bool 814 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id) 815 { 816 if (value_id_constant_p (value_id)) 817 return true; 818 819 if (!set || bitmap_empty_p (&set->expressions)) 820 return false; 821 822 return bitmap_bit_p (&set->values, value_id); 823 } 824 825 static inline bool 826 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr) 827 { 828 return bitmap_bit_p (&set->expressions, get_expression_id (expr)); 829 } 830 831 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */ 832 833 static void 834 bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor, 835 const pre_expr expr) 836 { 837 bitmap_set_t exprset; 838 unsigned int i; 839 bitmap_iterator bi; 840 841 if (value_id_constant_p (lookfor)) 842 return; 843 844 if (!bitmap_set_contains_value (set, lookfor)) 845 return; 846 847 /* The number of expressions having a given value is usually 848 significantly less than the total number of expressions in SET. 849 Thus, rather than check, for each expression in SET, whether it 850 has the value LOOKFOR, we walk the reverse mapping that tells us 851 what expressions have a given value, and see if any of those 852 expressions are in our set. For large testcases, this is about 853 5-10x faster than walking the bitmap. If this is somehow a 854 significant lose for some cases, we can choose which set to walk 855 based on the set size. */ 856 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor); 857 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi) 858 { 859 if (bitmap_clear_bit (&set->expressions, i)) 860 { 861 bitmap_set_bit (&set->expressions, get_expression_id (expr)); 862 return; 863 } 864 } 865 } 866 867 /* Return true if two bitmap sets are equal. */ 868 869 static bool 870 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b) 871 { 872 return bitmap_equal_p (&a->values, &b->values); 873 } 874 875 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists, 876 and add it otherwise. */ 877 878 static void 879 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr) 880 { 881 unsigned int val = get_expr_value_id (expr); 882 883 if (bitmap_set_contains_value (set, val)) 884 bitmap_set_replace_value (set, val, expr); 885 else 886 bitmap_insert_into_set (set, expr); 887 } 888 889 /* Insert EXPR into SET if EXPR's value is not already present in 890 SET. */ 891 892 static void 893 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr) 894 { 895 unsigned int val = get_expr_value_id (expr); 896 897 gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr)); 898 899 /* Constant values are always considered to be part of the set. */ 900 if (value_id_constant_p (val)) 901 return; 902 903 /* If the value membership changed, add the expression. */ 904 if (bitmap_set_bit (&set->values, val)) 905 bitmap_set_bit (&set->expressions, expr->id); 906 } 907 908 /* Print out EXPR to outfile. */ 909 910 static void 911 print_pre_expr (FILE *outfile, const pre_expr expr) 912 { 913 switch (expr->kind) 914 { 915 case CONSTANT: 916 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr), 0); 917 break; 918 case NAME: 919 print_generic_expr (outfile, PRE_EXPR_NAME (expr), 0); 920 break; 921 case NARY: 922 { 923 unsigned int i; 924 vn_nary_op_t nary = PRE_EXPR_NARY (expr); 925 fprintf (outfile, "{%s,", tree_code_name [nary->opcode]); 926 for (i = 0; i < nary->length; i++) 927 { 928 print_generic_expr (outfile, nary->op[i], 0); 929 if (i != (unsigned) nary->length - 1) 930 fprintf (outfile, ","); 931 } 932 fprintf (outfile, "}"); 933 } 934 break; 935 936 case REFERENCE: 937 { 938 vn_reference_op_t vro; 939 unsigned int i; 940 vn_reference_t ref = PRE_EXPR_REFERENCE (expr); 941 fprintf (outfile, "{"); 942 for (i = 0; 943 VEC_iterate (vn_reference_op_s, ref->operands, i, vro); 944 i++) 945 { 946 bool closebrace = false; 947 if (vro->opcode != SSA_NAME 948 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration) 949 { 950 fprintf (outfile, "%s", tree_code_name [vro->opcode]); 951 if (vro->op0) 952 { 953 fprintf (outfile, "<"); 954 closebrace = true; 955 } 956 } 957 if (vro->op0) 958 { 959 print_generic_expr (outfile, vro->op0, 0); 960 if (vro->op1) 961 { 962 fprintf (outfile, ","); 963 print_generic_expr (outfile, vro->op1, 0); 964 } 965 if (vro->op2) 966 { 967 fprintf (outfile, ","); 968 print_generic_expr (outfile, vro->op2, 0); 969 } 970 } 971 if (closebrace) 972 fprintf (outfile, ">"); 973 if (i != VEC_length (vn_reference_op_s, ref->operands) - 1) 974 fprintf (outfile, ","); 975 } 976 fprintf (outfile, "}"); 977 if (ref->vuse) 978 { 979 fprintf (outfile, "@"); 980 print_generic_expr (outfile, ref->vuse, 0); 981 } 982 } 983 break; 984 } 985 } 986 void debug_pre_expr (pre_expr); 987 988 /* Like print_pre_expr but always prints to stderr. */ 989 DEBUG_FUNCTION void 990 debug_pre_expr (pre_expr e) 991 { 992 print_pre_expr (stderr, e); 993 fprintf (stderr, "\n"); 994 } 995 996 /* Print out SET to OUTFILE. */ 997 998 static void 999 print_bitmap_set (FILE *outfile, bitmap_set_t set, 1000 const char *setname, int blockindex) 1001 { 1002 fprintf (outfile, "%s[%d] := { ", setname, blockindex); 1003 if (set) 1004 { 1005 bool first = true; 1006 unsigned i; 1007 bitmap_iterator bi; 1008 1009 FOR_EACH_EXPR_ID_IN_SET (set, i, bi) 1010 { 1011 const pre_expr expr = expression_for_id (i); 1012 1013 if (!first) 1014 fprintf (outfile, ", "); 1015 first = false; 1016 print_pre_expr (outfile, expr); 1017 1018 fprintf (outfile, " (%04d)", get_expr_value_id (expr)); 1019 } 1020 } 1021 fprintf (outfile, " }\n"); 1022 } 1023 1024 void debug_bitmap_set (bitmap_set_t); 1025 1026 DEBUG_FUNCTION void 1027 debug_bitmap_set (bitmap_set_t set) 1028 { 1029 print_bitmap_set (stderr, set, "debug", 0); 1030 } 1031 1032 /* Print out the expressions that have VAL to OUTFILE. */ 1033 1034 void 1035 print_value_expressions (FILE *outfile, unsigned int val) 1036 { 1037 bitmap_set_t set = VEC_index (bitmap_set_t, value_expressions, val); 1038 if (set) 1039 { 1040 char s[10]; 1041 sprintf (s, "%04d", val); 1042 print_bitmap_set (outfile, set, s, 0); 1043 } 1044 } 1045 1046 1047 DEBUG_FUNCTION void 1048 debug_value_expressions (unsigned int val) 1049 { 1050 print_value_expressions (stderr, val); 1051 } 1052 1053 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to 1054 represent it. */ 1055 1056 static pre_expr 1057 get_or_alloc_expr_for_constant (tree constant) 1058 { 1059 unsigned int result_id; 1060 unsigned int value_id; 1061 struct pre_expr_d expr; 1062 pre_expr newexpr; 1063 1064 expr.kind = CONSTANT; 1065 PRE_EXPR_CONSTANT (&expr) = constant; 1066 result_id = lookup_expression_id (&expr); 1067 if (result_id != 0) 1068 return expression_for_id (result_id); 1069 1070 newexpr = (pre_expr) pool_alloc (pre_expr_pool); 1071 newexpr->kind = CONSTANT; 1072 PRE_EXPR_CONSTANT (newexpr) = constant; 1073 alloc_expression_id (newexpr); 1074 value_id = get_or_alloc_constant_value_id (constant); 1075 add_to_value (value_id, newexpr); 1076 return newexpr; 1077 } 1078 1079 /* Given a value id V, find the actual tree representing the constant 1080 value if there is one, and return it. Return NULL if we can't find 1081 a constant. */ 1082 1083 static tree 1084 get_constant_for_value_id (unsigned int v) 1085 { 1086 if (value_id_constant_p (v)) 1087 { 1088 unsigned int i; 1089 bitmap_iterator bi; 1090 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, v); 1091 1092 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi) 1093 { 1094 pre_expr expr = expression_for_id (i); 1095 if (expr->kind == CONSTANT) 1096 return PRE_EXPR_CONSTANT (expr); 1097 } 1098 } 1099 return NULL; 1100 } 1101 1102 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it. 1103 Currently only supports constants and SSA_NAMES. */ 1104 static pre_expr 1105 get_or_alloc_expr_for (tree t) 1106 { 1107 if (TREE_CODE (t) == SSA_NAME) 1108 return get_or_alloc_expr_for_name (t); 1109 else if (is_gimple_min_invariant (t)) 1110 return get_or_alloc_expr_for_constant (t); 1111 else 1112 { 1113 /* More complex expressions can result from SCCVN expression 1114 simplification that inserts values for them. As they all 1115 do not have VOPs the get handled by the nary ops struct. */ 1116 vn_nary_op_t result; 1117 unsigned int result_id; 1118 vn_nary_op_lookup (t, &result); 1119 if (result != NULL) 1120 { 1121 pre_expr e = (pre_expr) pool_alloc (pre_expr_pool); 1122 e->kind = NARY; 1123 PRE_EXPR_NARY (e) = result; 1124 result_id = lookup_expression_id (e); 1125 if (result_id != 0) 1126 { 1127 pool_free (pre_expr_pool, e); 1128 e = expression_for_id (result_id); 1129 return e; 1130 } 1131 alloc_expression_id (e); 1132 return e; 1133 } 1134 } 1135 return NULL; 1136 } 1137 1138 /* Return the folded version of T if T, when folded, is a gimple 1139 min_invariant. Otherwise, return T. */ 1140 1141 static pre_expr 1142 fully_constant_expression (pre_expr e) 1143 { 1144 switch (e->kind) 1145 { 1146 case CONSTANT: 1147 return e; 1148 case NARY: 1149 { 1150 vn_nary_op_t nary = PRE_EXPR_NARY (e); 1151 switch (TREE_CODE_CLASS (nary->opcode)) 1152 { 1153 case tcc_binary: 1154 case tcc_comparison: 1155 { 1156 /* We have to go from trees to pre exprs to value ids to 1157 constants. */ 1158 tree naryop0 = nary->op[0]; 1159 tree naryop1 = nary->op[1]; 1160 tree result; 1161 if (!is_gimple_min_invariant (naryop0)) 1162 { 1163 pre_expr rep0 = get_or_alloc_expr_for (naryop0); 1164 unsigned int vrep0 = get_expr_value_id (rep0); 1165 tree const0 = get_constant_for_value_id (vrep0); 1166 if (const0) 1167 naryop0 = fold_convert (TREE_TYPE (naryop0), const0); 1168 } 1169 if (!is_gimple_min_invariant (naryop1)) 1170 { 1171 pre_expr rep1 = get_or_alloc_expr_for (naryop1); 1172 unsigned int vrep1 = get_expr_value_id (rep1); 1173 tree const1 = get_constant_for_value_id (vrep1); 1174 if (const1) 1175 naryop1 = fold_convert (TREE_TYPE (naryop1), const1); 1176 } 1177 result = fold_binary (nary->opcode, nary->type, 1178 naryop0, naryop1); 1179 if (result && is_gimple_min_invariant (result)) 1180 return get_or_alloc_expr_for_constant (result); 1181 /* We might have simplified the expression to a 1182 SSA_NAME for example from x_1 * 1. But we cannot 1183 insert a PHI for x_1 unconditionally as x_1 might 1184 not be available readily. */ 1185 return e; 1186 } 1187 case tcc_reference: 1188 if (nary->opcode != REALPART_EXPR 1189 && nary->opcode != IMAGPART_EXPR 1190 && nary->opcode != VIEW_CONVERT_EXPR) 1191 return e; 1192 /* Fallthrough. */ 1193 case tcc_unary: 1194 { 1195 /* We have to go from trees to pre exprs to value ids to 1196 constants. */ 1197 tree naryop0 = nary->op[0]; 1198 tree const0, result; 1199 if (is_gimple_min_invariant (naryop0)) 1200 const0 = naryop0; 1201 else 1202 { 1203 pre_expr rep0 = get_or_alloc_expr_for (naryop0); 1204 unsigned int vrep0 = get_expr_value_id (rep0); 1205 const0 = get_constant_for_value_id (vrep0); 1206 } 1207 result = NULL; 1208 if (const0) 1209 { 1210 tree type1 = TREE_TYPE (nary->op[0]); 1211 const0 = fold_convert (type1, const0); 1212 result = fold_unary (nary->opcode, nary->type, const0); 1213 } 1214 if (result && is_gimple_min_invariant (result)) 1215 return get_or_alloc_expr_for_constant (result); 1216 return e; 1217 } 1218 default: 1219 return e; 1220 } 1221 } 1222 case REFERENCE: 1223 { 1224 vn_reference_t ref = PRE_EXPR_REFERENCE (e); 1225 tree folded; 1226 if ((folded = fully_constant_vn_reference_p (ref))) 1227 return get_or_alloc_expr_for_constant (folded); 1228 return e; 1229 } 1230 default: 1231 return e; 1232 } 1233 return e; 1234 } 1235 1236 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that 1237 it has the value it would have in BLOCK. Set *SAME_VALID to true 1238 in case the new vuse doesn't change the value id of the OPERANDS. */ 1239 1240 static tree 1241 translate_vuse_through_block (VEC (vn_reference_op_s, heap) *operands, 1242 alias_set_type set, tree type, tree vuse, 1243 basic_block phiblock, 1244 basic_block block, bool *same_valid) 1245 { 1246 gimple phi = SSA_NAME_DEF_STMT (vuse); 1247 ao_ref ref; 1248 edge e = NULL; 1249 bool use_oracle; 1250 1251 *same_valid = true; 1252 1253 if (gimple_bb (phi) != phiblock) 1254 return vuse; 1255 1256 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands); 1257 1258 /* Use the alias-oracle to find either the PHI node in this block, 1259 the first VUSE used in this block that is equivalent to vuse or 1260 the first VUSE which definition in this block kills the value. */ 1261 if (gimple_code (phi) == GIMPLE_PHI) 1262 e = find_edge (block, phiblock); 1263 else if (use_oracle) 1264 while (!stmt_may_clobber_ref_p_1 (phi, &ref)) 1265 { 1266 vuse = gimple_vuse (phi); 1267 phi = SSA_NAME_DEF_STMT (vuse); 1268 if (gimple_bb (phi) != phiblock) 1269 return vuse; 1270 if (gimple_code (phi) == GIMPLE_PHI) 1271 { 1272 e = find_edge (block, phiblock); 1273 break; 1274 } 1275 } 1276 else 1277 return NULL_TREE; 1278 1279 if (e) 1280 { 1281 if (use_oracle) 1282 { 1283 bitmap visited = NULL; 1284 /* Try to find a vuse that dominates this phi node by skipping 1285 non-clobbering statements. */ 1286 vuse = get_continuation_for_phi (phi, &ref, &visited, false); 1287 if (visited) 1288 BITMAP_FREE (visited); 1289 } 1290 else 1291 vuse = NULL_TREE; 1292 if (!vuse) 1293 { 1294 /* If we didn't find any, the value ID can't stay the same, 1295 but return the translated vuse. */ 1296 *same_valid = false; 1297 vuse = PHI_ARG_DEF (phi, e->dest_idx); 1298 } 1299 /* ??? We would like to return vuse here as this is the canonical 1300 upmost vdef that this reference is associated with. But during 1301 insertion of the references into the hash tables we only ever 1302 directly insert with their direct gimple_vuse, hence returning 1303 something else would make us not find the other expression. */ 1304 return PHI_ARG_DEF (phi, e->dest_idx); 1305 } 1306 1307 return NULL_TREE; 1308 } 1309 1310 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or* 1311 SET2. This is used to avoid making a set consisting of the union 1312 of PA_IN and ANTIC_IN during insert. */ 1313 1314 static inline pre_expr 1315 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2) 1316 { 1317 pre_expr result; 1318 1319 result = bitmap_find_leader (set1, val, NULL); 1320 if (!result && set2) 1321 result = bitmap_find_leader (set2, val, NULL); 1322 return result; 1323 } 1324 1325 /* Get the tree type for our PRE expression e. */ 1326 1327 static tree 1328 get_expr_type (const pre_expr e) 1329 { 1330 switch (e->kind) 1331 { 1332 case NAME: 1333 return TREE_TYPE (PRE_EXPR_NAME (e)); 1334 case CONSTANT: 1335 return TREE_TYPE (PRE_EXPR_CONSTANT (e)); 1336 case REFERENCE: 1337 return PRE_EXPR_REFERENCE (e)->type; 1338 case NARY: 1339 return PRE_EXPR_NARY (e)->type; 1340 } 1341 gcc_unreachable(); 1342 } 1343 1344 /* Get a representative SSA_NAME for a given expression. 1345 Since all of our sub-expressions are treated as values, we require 1346 them to be SSA_NAME's for simplicity. 1347 Prior versions of GVNPRE used to use "value handles" here, so that 1348 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In 1349 either case, the operands are really values (IE we do not expect 1350 them to be usable without finding leaders). */ 1351 1352 static tree 1353 get_representative_for (const pre_expr e) 1354 { 1355 tree exprtype; 1356 tree name; 1357 unsigned int value_id = get_expr_value_id (e); 1358 1359 switch (e->kind) 1360 { 1361 case NAME: 1362 return PRE_EXPR_NAME (e); 1363 case CONSTANT: 1364 return PRE_EXPR_CONSTANT (e); 1365 case NARY: 1366 case REFERENCE: 1367 { 1368 /* Go through all of the expressions representing this value 1369 and pick out an SSA_NAME. */ 1370 unsigned int i; 1371 bitmap_iterator bi; 1372 bitmap_set_t exprs = VEC_index (bitmap_set_t, value_expressions, 1373 value_id); 1374 FOR_EACH_EXPR_ID_IN_SET (exprs, i, bi) 1375 { 1376 pre_expr rep = expression_for_id (i); 1377 if (rep->kind == NAME) 1378 return PRE_EXPR_NAME (rep); 1379 } 1380 } 1381 break; 1382 } 1383 /* If we reached here we couldn't find an SSA_NAME. This can 1384 happen when we've discovered a value that has never appeared in 1385 the program as set to an SSA_NAME, most likely as the result of 1386 phi translation. */ 1387 if (dump_file) 1388 { 1389 fprintf (dump_file, 1390 "Could not find SSA_NAME representative for expression:"); 1391 print_pre_expr (dump_file, e); 1392 fprintf (dump_file, "\n"); 1393 } 1394 1395 exprtype = get_expr_type (e); 1396 1397 /* Build and insert the assignment of the end result to the temporary 1398 that we will return. */ 1399 if (!pretemp || exprtype != TREE_TYPE (pretemp)) 1400 { 1401 pretemp = create_tmp_reg (exprtype, "pretmp"); 1402 add_referenced_var (pretemp); 1403 } 1404 1405 name = make_ssa_name (pretemp, gimple_build_nop ()); 1406 VN_INFO_GET (name)->value_id = value_id; 1407 if (e->kind == CONSTANT) 1408 VN_INFO (name)->valnum = PRE_EXPR_CONSTANT (e); 1409 else 1410 VN_INFO (name)->valnum = name; 1411 1412 add_to_value (value_id, get_or_alloc_expr_for_name (name)); 1413 if (dump_file) 1414 { 1415 fprintf (dump_file, "Created SSA_NAME representative "); 1416 print_generic_expr (dump_file, name, 0); 1417 fprintf (dump_file, " for expression:"); 1418 print_pre_expr (dump_file, e); 1419 fprintf (dump_file, "\n"); 1420 } 1421 1422 return name; 1423 } 1424 1425 1426 1427 static pre_expr 1428 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2, 1429 basic_block pred, basic_block phiblock); 1430 1431 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of 1432 the phis in PRED. Return NULL if we can't find a leader for each part 1433 of the translated expression. */ 1434 1435 static pre_expr 1436 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2, 1437 basic_block pred, basic_block phiblock) 1438 { 1439 switch (expr->kind) 1440 { 1441 case NARY: 1442 { 1443 unsigned int i; 1444 bool changed = false; 1445 vn_nary_op_t nary = PRE_EXPR_NARY (expr); 1446 vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s, 1447 sizeof_vn_nary_op (nary->length)); 1448 memcpy (newnary, nary, sizeof_vn_nary_op (nary->length)); 1449 1450 for (i = 0; i < newnary->length; i++) 1451 { 1452 if (TREE_CODE (newnary->op[i]) != SSA_NAME) 1453 continue; 1454 else 1455 { 1456 pre_expr leader, result; 1457 unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id; 1458 leader = find_leader_in_sets (op_val_id, set1, set2); 1459 result = phi_translate (leader, set1, set2, pred, phiblock); 1460 if (result && result != leader) 1461 { 1462 tree name = get_representative_for (result); 1463 if (!name) 1464 return NULL; 1465 newnary->op[i] = name; 1466 } 1467 else if (!result) 1468 return NULL; 1469 1470 changed |= newnary->op[i] != nary->op[i]; 1471 } 1472 } 1473 if (changed) 1474 { 1475 pre_expr constant; 1476 unsigned int new_val_id; 1477 1478 tree result = vn_nary_op_lookup_pieces (newnary->length, 1479 newnary->opcode, 1480 newnary->type, 1481 &newnary->op[0], 1482 &nary); 1483 if (result && is_gimple_min_invariant (result)) 1484 return get_or_alloc_expr_for_constant (result); 1485 1486 expr = (pre_expr) pool_alloc (pre_expr_pool); 1487 expr->kind = NARY; 1488 expr->id = 0; 1489 if (nary) 1490 { 1491 PRE_EXPR_NARY (expr) = nary; 1492 constant = fully_constant_expression (expr); 1493 if (constant != expr) 1494 return constant; 1495 1496 new_val_id = nary->value_id; 1497 get_or_alloc_expression_id (expr); 1498 } 1499 else 1500 { 1501 new_val_id = get_next_value_id (); 1502 VEC_safe_grow_cleared (bitmap_set_t, heap, 1503 value_expressions, 1504 get_max_value_id() + 1); 1505 nary = vn_nary_op_insert_pieces (newnary->length, 1506 newnary->opcode, 1507 newnary->type, 1508 &newnary->op[0], 1509 result, new_val_id); 1510 PRE_EXPR_NARY (expr) = nary; 1511 constant = fully_constant_expression (expr); 1512 if (constant != expr) 1513 return constant; 1514 get_or_alloc_expression_id (expr); 1515 } 1516 add_to_value (new_val_id, expr); 1517 } 1518 return expr; 1519 } 1520 break; 1521 1522 case REFERENCE: 1523 { 1524 vn_reference_t ref = PRE_EXPR_REFERENCE (expr); 1525 VEC (vn_reference_op_s, heap) *operands = ref->operands; 1526 tree vuse = ref->vuse; 1527 tree newvuse = vuse; 1528 VEC (vn_reference_op_s, heap) *newoperands = NULL; 1529 bool changed = false, same_valid = true; 1530 unsigned int i, j, n; 1531 vn_reference_op_t operand; 1532 vn_reference_t newref; 1533 1534 for (i = 0, j = 0; 1535 VEC_iterate (vn_reference_op_s, operands, i, operand); i++, j++) 1536 { 1537 pre_expr opresult; 1538 pre_expr leader; 1539 tree op[3]; 1540 tree type = operand->type; 1541 vn_reference_op_s newop = *operand; 1542 op[0] = operand->op0; 1543 op[1] = operand->op1; 1544 op[2] = operand->op2; 1545 for (n = 0; n < 3; ++n) 1546 { 1547 unsigned int op_val_id; 1548 if (!op[n]) 1549 continue; 1550 if (TREE_CODE (op[n]) != SSA_NAME) 1551 { 1552 /* We can't possibly insert these. */ 1553 if (n != 0 1554 && !is_gimple_min_invariant (op[n])) 1555 break; 1556 continue; 1557 } 1558 op_val_id = VN_INFO (op[n])->value_id; 1559 leader = find_leader_in_sets (op_val_id, set1, set2); 1560 if (!leader) 1561 break; 1562 /* Make sure we do not recursively translate ourselves 1563 like for translating a[n_1] with the leader for 1564 n_1 being a[n_1]. */ 1565 if (get_expression_id (leader) != get_expression_id (expr)) 1566 { 1567 opresult = phi_translate (leader, set1, set2, 1568 pred, phiblock); 1569 if (!opresult) 1570 break; 1571 if (opresult != leader) 1572 { 1573 tree name = get_representative_for (opresult); 1574 if (!name) 1575 break; 1576 changed |= name != op[n]; 1577 op[n] = name; 1578 } 1579 } 1580 } 1581 if (n != 3) 1582 { 1583 if (newoperands) 1584 VEC_free (vn_reference_op_s, heap, newoperands); 1585 return NULL; 1586 } 1587 if (!newoperands) 1588 newoperands = VEC_copy (vn_reference_op_s, heap, operands); 1589 /* We may have changed from an SSA_NAME to a constant */ 1590 if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME) 1591 newop.opcode = TREE_CODE (op[0]); 1592 newop.type = type; 1593 newop.op0 = op[0]; 1594 newop.op1 = op[1]; 1595 newop.op2 = op[2]; 1596 /* If it transforms a non-constant ARRAY_REF into a constant 1597 one, adjust the constant offset. */ 1598 if (newop.opcode == ARRAY_REF 1599 && newop.off == -1 1600 && TREE_CODE (op[0]) == INTEGER_CST 1601 && TREE_CODE (op[1]) == INTEGER_CST 1602 && TREE_CODE (op[2]) == INTEGER_CST) 1603 { 1604 double_int off = tree_to_double_int (op[0]); 1605 off = double_int_add (off, 1606 double_int_neg 1607 (tree_to_double_int (op[1]))); 1608 off = double_int_mul (off, tree_to_double_int (op[2])); 1609 if (double_int_fits_in_shwi_p (off)) 1610 newop.off = off.low; 1611 } 1612 VEC_replace (vn_reference_op_s, newoperands, j, &newop); 1613 /* If it transforms from an SSA_NAME to an address, fold with 1614 a preceding indirect reference. */ 1615 if (j > 0 && op[0] && TREE_CODE (op[0]) == ADDR_EXPR 1616 && VEC_index (vn_reference_op_s, 1617 newoperands, j - 1)->opcode == MEM_REF) 1618 vn_reference_fold_indirect (&newoperands, &j); 1619 } 1620 if (i != VEC_length (vn_reference_op_s, operands)) 1621 { 1622 if (newoperands) 1623 VEC_free (vn_reference_op_s, heap, newoperands); 1624 return NULL; 1625 } 1626 1627 if (vuse) 1628 { 1629 newvuse = translate_vuse_through_block (newoperands, 1630 ref->set, ref->type, 1631 vuse, phiblock, pred, 1632 &same_valid); 1633 if (newvuse == NULL_TREE) 1634 { 1635 VEC_free (vn_reference_op_s, heap, newoperands); 1636 return NULL; 1637 } 1638 } 1639 1640 if (changed || newvuse != vuse) 1641 { 1642 unsigned int new_val_id; 1643 pre_expr constant; 1644 bool converted = false; 1645 1646 tree result = vn_reference_lookup_pieces (newvuse, ref->set, 1647 ref->type, 1648 newoperands, 1649 &newref, VN_WALK); 1650 if (result) 1651 VEC_free (vn_reference_op_s, heap, newoperands); 1652 1653 if (result 1654 && !useless_type_conversion_p (ref->type, TREE_TYPE (result))) 1655 { 1656 result = fold_build1 (VIEW_CONVERT_EXPR, ref->type, result); 1657 converted = true; 1658 } 1659 else if (!result && newref 1660 && !useless_type_conversion_p (ref->type, newref->type)) 1661 { 1662 VEC_free (vn_reference_op_s, heap, newoperands); 1663 return NULL; 1664 } 1665 1666 if (result && is_gimple_min_invariant (result)) 1667 { 1668 gcc_assert (!newoperands); 1669 return get_or_alloc_expr_for_constant (result); 1670 } 1671 1672 expr = (pre_expr) pool_alloc (pre_expr_pool); 1673 expr->kind = REFERENCE; 1674 expr->id = 0; 1675 1676 if (converted) 1677 { 1678 vn_nary_op_t nary; 1679 tree nresult; 1680 1681 gcc_assert (CONVERT_EXPR_P (result) 1682 || TREE_CODE (result) == VIEW_CONVERT_EXPR); 1683 1684 nresult = vn_nary_op_lookup_pieces (1, TREE_CODE (result), 1685 TREE_TYPE (result), 1686 &TREE_OPERAND (result, 0), 1687 &nary); 1688 if (nresult && is_gimple_min_invariant (nresult)) 1689 return get_or_alloc_expr_for_constant (nresult); 1690 1691 expr->kind = NARY; 1692 if (nary) 1693 { 1694 PRE_EXPR_NARY (expr) = nary; 1695 constant = fully_constant_expression (expr); 1696 if (constant != expr) 1697 return constant; 1698 1699 new_val_id = nary->value_id; 1700 get_or_alloc_expression_id (expr); 1701 } 1702 else 1703 { 1704 new_val_id = get_next_value_id (); 1705 VEC_safe_grow_cleared (bitmap_set_t, heap, 1706 value_expressions, 1707 get_max_value_id() + 1); 1708 nary = vn_nary_op_insert_pieces (1, TREE_CODE (result), 1709 TREE_TYPE (result), 1710 &TREE_OPERAND (result, 0), 1711 NULL_TREE, 1712 new_val_id); 1713 PRE_EXPR_NARY (expr) = nary; 1714 constant = fully_constant_expression (expr); 1715 if (constant != expr) 1716 return constant; 1717 get_or_alloc_expression_id (expr); 1718 } 1719 } 1720 else if (newref) 1721 { 1722 PRE_EXPR_REFERENCE (expr) = newref; 1723 constant = fully_constant_expression (expr); 1724 if (constant != expr) 1725 return constant; 1726 1727 new_val_id = newref->value_id; 1728 get_or_alloc_expression_id (expr); 1729 } 1730 else 1731 { 1732 if (changed || !same_valid) 1733 { 1734 new_val_id = get_next_value_id (); 1735 VEC_safe_grow_cleared (bitmap_set_t, heap, 1736 value_expressions, 1737 get_max_value_id() + 1); 1738 } 1739 else 1740 new_val_id = ref->value_id; 1741 newref = vn_reference_insert_pieces (newvuse, ref->set, 1742 ref->type, 1743 newoperands, 1744 result, new_val_id); 1745 newoperands = NULL; 1746 PRE_EXPR_REFERENCE (expr) = newref; 1747 constant = fully_constant_expression (expr); 1748 if (constant != expr) 1749 return constant; 1750 get_or_alloc_expression_id (expr); 1751 } 1752 add_to_value (new_val_id, expr); 1753 } 1754 VEC_free (vn_reference_op_s, heap, newoperands); 1755 return expr; 1756 } 1757 break; 1758 1759 case NAME: 1760 { 1761 gimple phi = NULL; 1762 edge e; 1763 gimple def_stmt; 1764 tree name = PRE_EXPR_NAME (expr); 1765 1766 def_stmt = SSA_NAME_DEF_STMT (name); 1767 if (gimple_code (def_stmt) == GIMPLE_PHI 1768 && gimple_bb (def_stmt) == phiblock) 1769 phi = def_stmt; 1770 else 1771 return expr; 1772 1773 e = find_edge (pred, gimple_bb (phi)); 1774 if (e) 1775 { 1776 tree def = PHI_ARG_DEF (phi, e->dest_idx); 1777 pre_expr newexpr; 1778 1779 if (TREE_CODE (def) == SSA_NAME) 1780 def = VN_INFO (def)->valnum; 1781 1782 /* Handle constant. */ 1783 if (is_gimple_min_invariant (def)) 1784 return get_or_alloc_expr_for_constant (def); 1785 1786 if (TREE_CODE (def) == SSA_NAME && ssa_undefined_value_p (def)) 1787 return NULL; 1788 1789 newexpr = get_or_alloc_expr_for_name (def); 1790 return newexpr; 1791 } 1792 } 1793 return expr; 1794 1795 default: 1796 gcc_unreachable (); 1797 } 1798 } 1799 1800 /* Wrapper around phi_translate_1 providing caching functionality. */ 1801 1802 static pre_expr 1803 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2, 1804 basic_block pred, basic_block phiblock) 1805 { 1806 pre_expr phitrans; 1807 1808 if (!expr) 1809 return NULL; 1810 1811 /* Constants contain no values that need translation. */ 1812 if (expr->kind == CONSTANT) 1813 return expr; 1814 1815 if (value_id_constant_p (get_expr_value_id (expr))) 1816 return expr; 1817 1818 if (expr->kind != NAME) 1819 { 1820 phitrans = phi_trans_lookup (expr, pred); 1821 if (phitrans) 1822 return phitrans; 1823 } 1824 1825 /* Translate. */ 1826 phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock); 1827 1828 /* Don't add empty translations to the cache. Neither add 1829 translations of NAMEs as those are cheap to translate. */ 1830 if (phitrans 1831 && expr->kind != NAME) 1832 phi_trans_add (expr, phitrans, pred); 1833 1834 return phitrans; 1835 } 1836 1837 1838 /* For each expression in SET, translate the values through phi nodes 1839 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting 1840 expressions in DEST. */ 1841 1842 static void 1843 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred, 1844 basic_block phiblock) 1845 { 1846 VEC (pre_expr, heap) *exprs; 1847 pre_expr expr; 1848 int i; 1849 1850 if (gimple_seq_empty_p (phi_nodes (phiblock))) 1851 { 1852 bitmap_set_copy (dest, set); 1853 return; 1854 } 1855 1856 exprs = sorted_array_from_bitmap_set (set); 1857 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr) 1858 { 1859 pre_expr translated; 1860 translated = phi_translate (expr, set, NULL, pred, phiblock); 1861 if (!translated) 1862 continue; 1863 1864 /* We might end up with multiple expressions from SET being 1865 translated to the same value. In this case we do not want 1866 to retain the NARY or REFERENCE expression but prefer a NAME 1867 which would be the leader. */ 1868 if (translated->kind == NAME) 1869 bitmap_value_replace_in_set (dest, translated); 1870 else 1871 bitmap_value_insert_into_set (dest, translated); 1872 } 1873 VEC_free (pre_expr, heap, exprs); 1874 } 1875 1876 /* Find the leader for a value (i.e., the name representing that 1877 value) in a given set, and return it. If STMT is non-NULL it 1878 makes sure the defining statement for the leader dominates it. 1879 Return NULL if no leader is found. */ 1880 1881 static pre_expr 1882 bitmap_find_leader (bitmap_set_t set, unsigned int val, gimple stmt) 1883 { 1884 if (value_id_constant_p (val)) 1885 { 1886 unsigned int i; 1887 bitmap_iterator bi; 1888 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val); 1889 1890 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi) 1891 { 1892 pre_expr expr = expression_for_id (i); 1893 if (expr->kind == CONSTANT) 1894 return expr; 1895 } 1896 } 1897 if (bitmap_set_contains_value (set, val)) 1898 { 1899 /* Rather than walk the entire bitmap of expressions, and see 1900 whether any of them has the value we are looking for, we look 1901 at the reverse mapping, which tells us the set of expressions 1902 that have a given value (IE value->expressions with that 1903 value) and see if any of those expressions are in our set. 1904 The number of expressions per value is usually significantly 1905 less than the number of expressions in the set. In fact, for 1906 large testcases, doing it this way is roughly 5-10x faster 1907 than walking the bitmap. 1908 If this is somehow a significant lose for some cases, we can 1909 choose which set to walk based on which set is smaller. */ 1910 unsigned int i; 1911 bitmap_iterator bi; 1912 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val); 1913 1914 EXECUTE_IF_AND_IN_BITMAP (&exprset->expressions, 1915 &set->expressions, 0, i, bi) 1916 { 1917 pre_expr val = expression_for_id (i); 1918 /* At the point where stmt is not null, there should always 1919 be an SSA_NAME first in the list of expressions. */ 1920 if (stmt) 1921 { 1922 gimple def_stmt = SSA_NAME_DEF_STMT (PRE_EXPR_NAME (val)); 1923 if (gimple_code (def_stmt) != GIMPLE_PHI 1924 && gimple_bb (def_stmt) == gimple_bb (stmt) 1925 /* PRE insertions are at the end of the basic-block 1926 and have UID 0. */ 1927 && (gimple_uid (def_stmt) == 0 1928 || gimple_uid (def_stmt) >= gimple_uid (stmt))) 1929 continue; 1930 } 1931 return val; 1932 } 1933 } 1934 return NULL; 1935 } 1936 1937 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of 1938 BLOCK by seeing if it is not killed in the block. Note that we are 1939 only determining whether there is a store that kills it. Because 1940 of the order in which clean iterates over values, we are guaranteed 1941 that altered operands will have caused us to be eliminated from the 1942 ANTIC_IN set already. */ 1943 1944 static bool 1945 value_dies_in_block_x (pre_expr expr, basic_block block) 1946 { 1947 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse; 1948 vn_reference_t refx = PRE_EXPR_REFERENCE (expr); 1949 gimple def; 1950 gimple_stmt_iterator gsi; 1951 unsigned id = get_expression_id (expr); 1952 bool res = false; 1953 ao_ref ref; 1954 1955 if (!vuse) 1956 return false; 1957 1958 /* Lookup a previously calculated result. */ 1959 if (EXPR_DIES (block) 1960 && bitmap_bit_p (EXPR_DIES (block), id * 2)) 1961 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1); 1962 1963 /* A memory expression {e, VUSE} dies in the block if there is a 1964 statement that may clobber e. If, starting statement walk from the 1965 top of the basic block, a statement uses VUSE there can be no kill 1966 inbetween that use and the original statement that loaded {e, VUSE}, 1967 so we can stop walking. */ 1968 ref.base = NULL_TREE; 1969 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi)) 1970 { 1971 tree def_vuse, def_vdef; 1972 def = gsi_stmt (gsi); 1973 def_vuse = gimple_vuse (def); 1974 def_vdef = gimple_vdef (def); 1975 1976 /* Not a memory statement. */ 1977 if (!def_vuse) 1978 continue; 1979 1980 /* Not a may-def. */ 1981 if (!def_vdef) 1982 { 1983 /* A load with the same VUSE, we're done. */ 1984 if (def_vuse == vuse) 1985 break; 1986 1987 continue; 1988 } 1989 1990 /* Init ref only if we really need it. */ 1991 if (ref.base == NULL_TREE 1992 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type, 1993 refx->operands)) 1994 { 1995 res = true; 1996 break; 1997 } 1998 /* If the statement may clobber expr, it dies. */ 1999 if (stmt_may_clobber_ref_p_1 (def, &ref)) 2000 { 2001 res = true; 2002 break; 2003 } 2004 } 2005 2006 /* Remember the result. */ 2007 if (!EXPR_DIES (block)) 2008 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack); 2009 bitmap_set_bit (EXPR_DIES (block), id * 2); 2010 if (res) 2011 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1); 2012 2013 return res; 2014 } 2015 2016 2017 #define union_contains_value(SET1, SET2, VAL) \ 2018 (bitmap_set_contains_value ((SET1), (VAL)) \ 2019 || ((SET2) && bitmap_set_contains_value ((SET2), (VAL)))) 2020 2021 /* Determine if vn_reference_op_t VRO is legal in SET1 U SET2. 2022 */ 2023 static bool 2024 vro_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, 2025 vn_reference_op_t vro) 2026 { 2027 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME) 2028 { 2029 struct pre_expr_d temp; 2030 temp.kind = NAME; 2031 temp.id = 0; 2032 PRE_EXPR_NAME (&temp) = vro->op0; 2033 temp.id = lookup_expression_id (&temp); 2034 if (temp.id == 0) 2035 return false; 2036 if (!union_contains_value (set1, set2, 2037 get_expr_value_id (&temp))) 2038 return false; 2039 } 2040 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME) 2041 { 2042 struct pre_expr_d temp; 2043 temp.kind = NAME; 2044 temp.id = 0; 2045 PRE_EXPR_NAME (&temp) = vro->op1; 2046 temp.id = lookup_expression_id (&temp); 2047 if (temp.id == 0) 2048 return false; 2049 if (!union_contains_value (set1, set2, 2050 get_expr_value_id (&temp))) 2051 return false; 2052 } 2053 2054 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME) 2055 { 2056 struct pre_expr_d temp; 2057 temp.kind = NAME; 2058 temp.id = 0; 2059 PRE_EXPR_NAME (&temp) = vro->op2; 2060 temp.id = lookup_expression_id (&temp); 2061 if (temp.id == 0) 2062 return false; 2063 if (!union_contains_value (set1, set2, 2064 get_expr_value_id (&temp))) 2065 return false; 2066 } 2067 2068 return true; 2069 } 2070 2071 /* Determine if the expression EXPR is valid in SET1 U SET2. 2072 ONLY SET2 CAN BE NULL. 2073 This means that we have a leader for each part of the expression 2074 (if it consists of values), or the expression is an SSA_NAME. 2075 For loads/calls, we also see if the vuse is killed in this block. */ 2076 2077 static bool 2078 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr, 2079 basic_block block) 2080 { 2081 switch (expr->kind) 2082 { 2083 case NAME: 2084 return bitmap_set_contains_expr (AVAIL_OUT (block), expr); 2085 case NARY: 2086 { 2087 unsigned int i; 2088 vn_nary_op_t nary = PRE_EXPR_NARY (expr); 2089 for (i = 0; i < nary->length; i++) 2090 { 2091 if (TREE_CODE (nary->op[i]) == SSA_NAME) 2092 { 2093 struct pre_expr_d temp; 2094 temp.kind = NAME; 2095 temp.id = 0; 2096 PRE_EXPR_NAME (&temp) = nary->op[i]; 2097 temp.id = lookup_expression_id (&temp); 2098 if (temp.id == 0) 2099 return false; 2100 if (!union_contains_value (set1, set2, 2101 get_expr_value_id (&temp))) 2102 return false; 2103 } 2104 } 2105 /* If the NARY may trap make sure the block does not contain 2106 a possible exit point. 2107 ??? This is overly conservative if we translate AVAIL_OUT 2108 as the available expression might be after the exit point. */ 2109 if (BB_MAY_NOTRETURN (block) 2110 && vn_nary_may_trap (nary)) 2111 return false; 2112 return true; 2113 } 2114 break; 2115 case REFERENCE: 2116 { 2117 vn_reference_t ref = PRE_EXPR_REFERENCE (expr); 2118 vn_reference_op_t vro; 2119 unsigned int i; 2120 2121 FOR_EACH_VEC_ELT (vn_reference_op_s, ref->operands, i, vro) 2122 { 2123 if (!vro_valid_in_sets (set1, set2, vro)) 2124 return false; 2125 } 2126 if (ref->vuse) 2127 { 2128 gimple def_stmt = SSA_NAME_DEF_STMT (ref->vuse); 2129 if (!gimple_nop_p (def_stmt) 2130 && gimple_bb (def_stmt) != block 2131 && !dominated_by_p (CDI_DOMINATORS, 2132 block, gimple_bb (def_stmt))) 2133 return false; 2134 } 2135 return !value_dies_in_block_x (expr, block); 2136 } 2137 default: 2138 gcc_unreachable (); 2139 } 2140 } 2141 2142 /* Clean the set of expressions that are no longer valid in SET1 or 2143 SET2. This means expressions that are made up of values we have no 2144 leaders for in SET1 or SET2. This version is used for partial 2145 anticipation, which means it is not valid in either ANTIC_IN or 2146 PA_IN. */ 2147 2148 static void 2149 dependent_clean (bitmap_set_t set1, bitmap_set_t set2, basic_block block) 2150 { 2151 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set1); 2152 pre_expr expr; 2153 int i; 2154 2155 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr) 2156 { 2157 if (!valid_in_sets (set1, set2, expr, block)) 2158 bitmap_remove_from_set (set1, expr); 2159 } 2160 VEC_free (pre_expr, heap, exprs); 2161 } 2162 2163 /* Clean the set of expressions that are no longer valid in SET. This 2164 means expressions that are made up of values we have no leaders for 2165 in SET. */ 2166 2167 static void 2168 clean (bitmap_set_t set, basic_block block) 2169 { 2170 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set); 2171 pre_expr expr; 2172 int i; 2173 2174 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr) 2175 { 2176 if (!valid_in_sets (set, NULL, expr, block)) 2177 bitmap_remove_from_set (set, expr); 2178 } 2179 VEC_free (pre_expr, heap, exprs); 2180 } 2181 2182 static sbitmap has_abnormal_preds; 2183 2184 /* List of blocks that may have changed during ANTIC computation and 2185 thus need to be iterated over. */ 2186 2187 static sbitmap changed_blocks; 2188 2189 /* Decide whether to defer a block for a later iteration, or PHI 2190 translate SOURCE to DEST using phis in PHIBLOCK. Return false if we 2191 should defer the block, and true if we processed it. */ 2192 2193 static bool 2194 defer_or_phi_translate_block (bitmap_set_t dest, bitmap_set_t source, 2195 basic_block block, basic_block phiblock) 2196 { 2197 if (!BB_VISITED (phiblock)) 2198 { 2199 SET_BIT (changed_blocks, block->index); 2200 BB_VISITED (block) = 0; 2201 BB_DEFERRED (block) = 1; 2202 return false; 2203 } 2204 else 2205 phi_translate_set (dest, source, block, phiblock); 2206 return true; 2207 } 2208 2209 /* Compute the ANTIC set for BLOCK. 2210 2211 If succs(BLOCK) > 1 then 2212 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK) 2213 else if succs(BLOCK) == 1 then 2214 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)]) 2215 2216 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK]) 2217 */ 2218 2219 static bool 2220 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge) 2221 { 2222 bool changed = false; 2223 bitmap_set_t S, old, ANTIC_OUT; 2224 bitmap_iterator bi; 2225 unsigned int bii; 2226 edge e; 2227 edge_iterator ei; 2228 2229 old = ANTIC_OUT = S = NULL; 2230 BB_VISITED (block) = 1; 2231 2232 /* If any edges from predecessors are abnormal, antic_in is empty, 2233 so do nothing. */ 2234 if (block_has_abnormal_pred_edge) 2235 goto maybe_dump_sets; 2236 2237 old = ANTIC_IN (block); 2238 ANTIC_OUT = bitmap_set_new (); 2239 2240 /* If the block has no successors, ANTIC_OUT is empty. */ 2241 if (EDGE_COUNT (block->succs) == 0) 2242 ; 2243 /* If we have one successor, we could have some phi nodes to 2244 translate through. */ 2245 else if (single_succ_p (block)) 2246 { 2247 basic_block succ_bb = single_succ (block); 2248 2249 /* We trade iterations of the dataflow equations for having to 2250 phi translate the maximal set, which is incredibly slow 2251 (since the maximal set often has 300+ members, even when you 2252 have a small number of blocks). 2253 Basically, we defer the computation of ANTIC for this block 2254 until we have processed it's successor, which will inevitably 2255 have a *much* smaller set of values to phi translate once 2256 clean has been run on it. 2257 The cost of doing this is that we technically perform more 2258 iterations, however, they are lower cost iterations. 2259 2260 Timings for PRE on tramp3d-v4: 2261 without maximal set fix: 11 seconds 2262 with maximal set fix/without deferring: 26 seconds 2263 with maximal set fix/with deferring: 11 seconds 2264 */ 2265 2266 if (!defer_or_phi_translate_block (ANTIC_OUT, ANTIC_IN (succ_bb), 2267 block, succ_bb)) 2268 { 2269 changed = true; 2270 goto maybe_dump_sets; 2271 } 2272 } 2273 /* If we have multiple successors, we take the intersection of all of 2274 them. Note that in the case of loop exit phi nodes, we may have 2275 phis to translate through. */ 2276 else 2277 { 2278 VEC(basic_block, heap) * worklist; 2279 size_t i; 2280 basic_block bprime, first = NULL; 2281 2282 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs)); 2283 FOR_EACH_EDGE (e, ei, block->succs) 2284 { 2285 if (!first 2286 && BB_VISITED (e->dest)) 2287 first = e->dest; 2288 else if (BB_VISITED (e->dest)) 2289 VEC_quick_push (basic_block, worklist, e->dest); 2290 } 2291 2292 /* Of multiple successors we have to have visited one already. */ 2293 if (!first) 2294 { 2295 SET_BIT (changed_blocks, block->index); 2296 BB_VISITED (block) = 0; 2297 BB_DEFERRED (block) = 1; 2298 changed = true; 2299 VEC_free (basic_block, heap, worklist); 2300 goto maybe_dump_sets; 2301 } 2302 2303 if (!gimple_seq_empty_p (phi_nodes (first))) 2304 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first); 2305 else 2306 bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first)); 2307 2308 FOR_EACH_VEC_ELT (basic_block, worklist, i, bprime) 2309 { 2310 if (!gimple_seq_empty_p (phi_nodes (bprime))) 2311 { 2312 bitmap_set_t tmp = bitmap_set_new (); 2313 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime); 2314 bitmap_set_and (ANTIC_OUT, tmp); 2315 bitmap_set_free (tmp); 2316 } 2317 else 2318 bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime)); 2319 } 2320 VEC_free (basic_block, heap, worklist); 2321 } 2322 2323 /* Generate ANTIC_OUT - TMP_GEN. */ 2324 S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block)); 2325 2326 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */ 2327 ANTIC_IN (block) = bitmap_set_subtract (EXP_GEN (block), 2328 TMP_GEN (block)); 2329 2330 /* Then union in the ANTIC_OUT - TMP_GEN values, 2331 to get ANTIC_OUT U EXP_GEN - TMP_GEN */ 2332 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi) 2333 bitmap_value_insert_into_set (ANTIC_IN (block), 2334 expression_for_id (bii)); 2335 2336 clean (ANTIC_IN (block), block); 2337 2338 if (!bitmap_set_equal (old, ANTIC_IN (block))) 2339 { 2340 changed = true; 2341 SET_BIT (changed_blocks, block->index); 2342 FOR_EACH_EDGE (e, ei, block->preds) 2343 SET_BIT (changed_blocks, e->src->index); 2344 } 2345 else 2346 RESET_BIT (changed_blocks, block->index); 2347 2348 maybe_dump_sets: 2349 if (dump_file && (dump_flags & TDF_DETAILS)) 2350 { 2351 if (!BB_DEFERRED (block) || BB_VISITED (block)) 2352 { 2353 if (ANTIC_OUT) 2354 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index); 2355 2356 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN", 2357 block->index); 2358 2359 if (S) 2360 print_bitmap_set (dump_file, S, "S", block->index); 2361 } 2362 else 2363 { 2364 fprintf (dump_file, 2365 "Block %d was deferred for a future iteration.\n", 2366 block->index); 2367 } 2368 } 2369 if (old) 2370 bitmap_set_free (old); 2371 if (S) 2372 bitmap_set_free (S); 2373 if (ANTIC_OUT) 2374 bitmap_set_free (ANTIC_OUT); 2375 return changed; 2376 } 2377 2378 /* Compute PARTIAL_ANTIC for BLOCK. 2379 2380 If succs(BLOCK) > 1 then 2381 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not 2382 in ANTIC_OUT for all succ(BLOCK) 2383 else if succs(BLOCK) == 1 then 2384 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)]) 2385 2386 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK] 2387 - ANTIC_IN[BLOCK]) 2388 2389 */ 2390 static bool 2391 compute_partial_antic_aux (basic_block block, 2392 bool block_has_abnormal_pred_edge) 2393 { 2394 bool changed = false; 2395 bitmap_set_t old_PA_IN; 2396 bitmap_set_t PA_OUT; 2397 edge e; 2398 edge_iterator ei; 2399 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH); 2400 2401 old_PA_IN = PA_OUT = NULL; 2402 2403 /* If any edges from predecessors are abnormal, antic_in is empty, 2404 so do nothing. */ 2405 if (block_has_abnormal_pred_edge) 2406 goto maybe_dump_sets; 2407 2408 /* If there are too many partially anticipatable values in the 2409 block, phi_translate_set can take an exponential time: stop 2410 before the translation starts. */ 2411 if (max_pa 2412 && single_succ_p (block) 2413 && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa) 2414 goto maybe_dump_sets; 2415 2416 old_PA_IN = PA_IN (block); 2417 PA_OUT = bitmap_set_new (); 2418 2419 /* If the block has no successors, ANTIC_OUT is empty. */ 2420 if (EDGE_COUNT (block->succs) == 0) 2421 ; 2422 /* If we have one successor, we could have some phi nodes to 2423 translate through. Note that we can't phi translate across DFS 2424 back edges in partial antic, because it uses a union operation on 2425 the successors. For recurrences like IV's, we will end up 2426 generating a new value in the set on each go around (i + 3 (VH.1) 2427 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */ 2428 else if (single_succ_p (block)) 2429 { 2430 basic_block succ = single_succ (block); 2431 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK)) 2432 phi_translate_set (PA_OUT, PA_IN (succ), block, succ); 2433 } 2434 /* If we have multiple successors, we take the union of all of 2435 them. */ 2436 else 2437 { 2438 VEC(basic_block, heap) * worklist; 2439 size_t i; 2440 basic_block bprime; 2441 2442 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs)); 2443 FOR_EACH_EDGE (e, ei, block->succs) 2444 { 2445 if (e->flags & EDGE_DFS_BACK) 2446 continue; 2447 VEC_quick_push (basic_block, worklist, e->dest); 2448 } 2449 if (VEC_length (basic_block, worklist) > 0) 2450 { 2451 FOR_EACH_VEC_ELT (basic_block, worklist, i, bprime) 2452 { 2453 unsigned int i; 2454 bitmap_iterator bi; 2455 2456 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi) 2457 bitmap_value_insert_into_set (PA_OUT, 2458 expression_for_id (i)); 2459 if (!gimple_seq_empty_p (phi_nodes (bprime))) 2460 { 2461 bitmap_set_t pa_in = bitmap_set_new (); 2462 phi_translate_set (pa_in, PA_IN (bprime), block, bprime); 2463 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi) 2464 bitmap_value_insert_into_set (PA_OUT, 2465 expression_for_id (i)); 2466 bitmap_set_free (pa_in); 2467 } 2468 else 2469 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi) 2470 bitmap_value_insert_into_set (PA_OUT, 2471 expression_for_id (i)); 2472 } 2473 } 2474 VEC_free (basic_block, heap, worklist); 2475 } 2476 2477 /* PA_IN starts with PA_OUT - TMP_GEN. 2478 Then we subtract things from ANTIC_IN. */ 2479 PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block)); 2480 2481 /* For partial antic, we want to put back in the phi results, since 2482 we will properly avoid making them partially antic over backedges. */ 2483 bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values); 2484 bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions); 2485 2486 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */ 2487 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block)); 2488 2489 dependent_clean (PA_IN (block), ANTIC_IN (block), block); 2490 2491 if (!bitmap_set_equal (old_PA_IN, PA_IN (block))) 2492 { 2493 changed = true; 2494 SET_BIT (changed_blocks, block->index); 2495 FOR_EACH_EDGE (e, ei, block->preds) 2496 SET_BIT (changed_blocks, e->src->index); 2497 } 2498 else 2499 RESET_BIT (changed_blocks, block->index); 2500 2501 maybe_dump_sets: 2502 if (dump_file && (dump_flags & TDF_DETAILS)) 2503 { 2504 if (PA_OUT) 2505 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index); 2506 2507 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index); 2508 } 2509 if (old_PA_IN) 2510 bitmap_set_free (old_PA_IN); 2511 if (PA_OUT) 2512 bitmap_set_free (PA_OUT); 2513 return changed; 2514 } 2515 2516 /* Compute ANTIC and partial ANTIC sets. */ 2517 2518 static void 2519 compute_antic (void) 2520 { 2521 bool changed = true; 2522 int num_iterations = 0; 2523 basic_block block; 2524 int i; 2525 2526 /* If any predecessor edges are abnormal, we punt, so antic_in is empty. 2527 We pre-build the map of blocks with incoming abnormal edges here. */ 2528 has_abnormal_preds = sbitmap_alloc (last_basic_block); 2529 sbitmap_zero (has_abnormal_preds); 2530 2531 FOR_EACH_BB (block) 2532 { 2533 edge_iterator ei; 2534 edge e; 2535 2536 FOR_EACH_EDGE (e, ei, block->preds) 2537 { 2538 e->flags &= ~EDGE_DFS_BACK; 2539 if (e->flags & EDGE_ABNORMAL) 2540 { 2541 SET_BIT (has_abnormal_preds, block->index); 2542 break; 2543 } 2544 } 2545 2546 BB_VISITED (block) = 0; 2547 BB_DEFERRED (block) = 0; 2548 2549 /* While we are here, give empty ANTIC_IN sets to each block. */ 2550 ANTIC_IN (block) = bitmap_set_new (); 2551 PA_IN (block) = bitmap_set_new (); 2552 } 2553 2554 /* At the exit block we anticipate nothing. */ 2555 ANTIC_IN (EXIT_BLOCK_PTR) = bitmap_set_new (); 2556 BB_VISITED (EXIT_BLOCK_PTR) = 1; 2557 PA_IN (EXIT_BLOCK_PTR) = bitmap_set_new (); 2558 2559 changed_blocks = sbitmap_alloc (last_basic_block + 1); 2560 sbitmap_ones (changed_blocks); 2561 while (changed) 2562 { 2563 if (dump_file && (dump_flags & TDF_DETAILS)) 2564 fprintf (dump_file, "Starting iteration %d\n", num_iterations); 2565 /* ??? We need to clear our PHI translation cache here as the 2566 ANTIC sets shrink and we restrict valid translations to 2567 those having operands with leaders in ANTIC. Same below 2568 for PA ANTIC computation. */ 2569 num_iterations++; 2570 changed = false; 2571 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1; i >= 0; i--) 2572 { 2573 if (TEST_BIT (changed_blocks, postorder[i])) 2574 { 2575 basic_block block = BASIC_BLOCK (postorder[i]); 2576 changed |= compute_antic_aux (block, 2577 TEST_BIT (has_abnormal_preds, 2578 block->index)); 2579 } 2580 } 2581 /* Theoretically possible, but *highly* unlikely. */ 2582 gcc_checking_assert (num_iterations < 500); 2583 } 2584 2585 statistics_histogram_event (cfun, "compute_antic iterations", 2586 num_iterations); 2587 2588 if (do_partial_partial) 2589 { 2590 sbitmap_ones (changed_blocks); 2591 mark_dfs_back_edges (); 2592 num_iterations = 0; 2593 changed = true; 2594 while (changed) 2595 { 2596 if (dump_file && (dump_flags & TDF_DETAILS)) 2597 fprintf (dump_file, "Starting iteration %d\n", num_iterations); 2598 num_iterations++; 2599 changed = false; 2600 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1 ; i >= 0; i--) 2601 { 2602 if (TEST_BIT (changed_blocks, postorder[i])) 2603 { 2604 basic_block block = BASIC_BLOCK (postorder[i]); 2605 changed 2606 |= compute_partial_antic_aux (block, 2607 TEST_BIT (has_abnormal_preds, 2608 block->index)); 2609 } 2610 } 2611 /* Theoretically possible, but *highly* unlikely. */ 2612 gcc_checking_assert (num_iterations < 500); 2613 } 2614 statistics_histogram_event (cfun, "compute_partial_antic iterations", 2615 num_iterations); 2616 } 2617 sbitmap_free (has_abnormal_preds); 2618 sbitmap_free (changed_blocks); 2619 } 2620 2621 /* Return true if OP is a tree which we can perform PRE on. 2622 This may not match the operations we can value number, but in 2623 a perfect world would. */ 2624 2625 static bool 2626 can_PRE_operation (tree op) 2627 { 2628 return UNARY_CLASS_P (op) 2629 || BINARY_CLASS_P (op) 2630 || COMPARISON_CLASS_P (op) 2631 || TREE_CODE (op) == MEM_REF 2632 || TREE_CODE (op) == COMPONENT_REF 2633 || TREE_CODE (op) == VIEW_CONVERT_EXPR 2634 || TREE_CODE (op) == CALL_EXPR 2635 || TREE_CODE (op) == ARRAY_REF; 2636 } 2637 2638 2639 /* Inserted expressions are placed onto this worklist, which is used 2640 for performing quick dead code elimination of insertions we made 2641 that didn't turn out to be necessary. */ 2642 static bitmap inserted_exprs; 2643 2644 /* Pool allocated fake store expressions are placed onto this 2645 worklist, which, after performing dead code elimination, is walked 2646 to see which expressions need to be put into GC'able memory */ 2647 static VEC(gimple, heap) *need_creation; 2648 2649 /* The actual worker for create_component_ref_by_pieces. */ 2650 2651 static tree 2652 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref, 2653 unsigned int *operand, gimple_seq *stmts, 2654 gimple domstmt) 2655 { 2656 vn_reference_op_t currop = VEC_index (vn_reference_op_s, ref->operands, 2657 *operand); 2658 tree genop; 2659 ++*operand; 2660 switch (currop->opcode) 2661 { 2662 case CALL_EXPR: 2663 { 2664 tree folded, sc = NULL_TREE; 2665 unsigned int nargs = 0; 2666 tree fn, *args; 2667 if (TREE_CODE (currop->op0) == FUNCTION_DECL) 2668 fn = currop->op0; 2669 else 2670 { 2671 pre_expr op0 = get_or_alloc_expr_for (currop->op0); 2672 fn = find_or_generate_expression (block, op0, stmts, domstmt); 2673 if (!fn) 2674 return NULL_TREE; 2675 } 2676 if (currop->op1) 2677 { 2678 pre_expr scexpr = get_or_alloc_expr_for (currop->op1); 2679 sc = find_or_generate_expression (block, scexpr, stmts, domstmt); 2680 if (!sc) 2681 return NULL_TREE; 2682 } 2683 args = XNEWVEC (tree, VEC_length (vn_reference_op_s, 2684 ref->operands) - 1); 2685 while (*operand < VEC_length (vn_reference_op_s, ref->operands)) 2686 { 2687 args[nargs] = create_component_ref_by_pieces_1 (block, ref, 2688 operand, stmts, 2689 domstmt); 2690 if (!args[nargs]) 2691 { 2692 free (args); 2693 return NULL_TREE; 2694 } 2695 nargs++; 2696 } 2697 folded = build_call_array (currop->type, 2698 (TREE_CODE (fn) == FUNCTION_DECL 2699 ? build_fold_addr_expr (fn) : fn), 2700 nargs, args); 2701 free (args); 2702 if (sc) 2703 CALL_EXPR_STATIC_CHAIN (folded) = sc; 2704 return folded; 2705 } 2706 break; 2707 case MEM_REF: 2708 { 2709 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand, 2710 stmts, domstmt); 2711 tree offset = currop->op0; 2712 if (!baseop) 2713 return NULL_TREE; 2714 if (TREE_CODE (baseop) == ADDR_EXPR 2715 && handled_component_p (TREE_OPERAND (baseop, 0))) 2716 { 2717 HOST_WIDE_INT off; 2718 tree base; 2719 base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0), 2720 &off); 2721 gcc_assert (base); 2722 offset = int_const_binop (PLUS_EXPR, offset, 2723 build_int_cst (TREE_TYPE (offset), 2724 off)); 2725 baseop = build_fold_addr_expr (base); 2726 } 2727 return fold_build2 (MEM_REF, currop->type, baseop, offset); 2728 } 2729 break; 2730 case TARGET_MEM_REF: 2731 { 2732 pre_expr op0expr, op1expr; 2733 tree genop0 = NULL_TREE, genop1 = NULL_TREE; 2734 vn_reference_op_t nextop = VEC_index (vn_reference_op_s, ref->operands, 2735 ++*operand); 2736 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand, 2737 stmts, domstmt); 2738 if (!baseop) 2739 return NULL_TREE; 2740 if (currop->op0) 2741 { 2742 op0expr = get_or_alloc_expr_for (currop->op0); 2743 genop0 = find_or_generate_expression (block, op0expr, 2744 stmts, domstmt); 2745 if (!genop0) 2746 return NULL_TREE; 2747 } 2748 if (nextop->op0) 2749 { 2750 op1expr = get_or_alloc_expr_for (nextop->op0); 2751 genop1 = find_or_generate_expression (block, op1expr, 2752 stmts, domstmt); 2753 if (!genop1) 2754 return NULL_TREE; 2755 } 2756 return build5 (TARGET_MEM_REF, currop->type, 2757 baseop, currop->op2, genop0, currop->op1, genop1); 2758 } 2759 break; 2760 case ADDR_EXPR: 2761 if (currop->op0) 2762 { 2763 gcc_assert (is_gimple_min_invariant (currop->op0)); 2764 return currop->op0; 2765 } 2766 /* Fallthrough. */ 2767 case REALPART_EXPR: 2768 case IMAGPART_EXPR: 2769 case VIEW_CONVERT_EXPR: 2770 { 2771 tree folded; 2772 tree genop0 = create_component_ref_by_pieces_1 (block, ref, 2773 operand, 2774 stmts, domstmt); 2775 if (!genop0) 2776 return NULL_TREE; 2777 folded = fold_build1 (currop->opcode, currop->type, 2778 genop0); 2779 return folded; 2780 } 2781 break; 2782 case WITH_SIZE_EXPR: 2783 { 2784 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand, 2785 stmts, domstmt); 2786 pre_expr op1expr = get_or_alloc_expr_for (currop->op0); 2787 tree genop1; 2788 2789 if (!genop0) 2790 return NULL_TREE; 2791 2792 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt); 2793 if (!genop1) 2794 return NULL_TREE; 2795 2796 return fold_build2 (currop->opcode, currop->type, genop0, genop1); 2797 } 2798 break; 2799 case BIT_FIELD_REF: 2800 { 2801 tree folded; 2802 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand, 2803 stmts, domstmt); 2804 pre_expr op1expr = get_or_alloc_expr_for (currop->op0); 2805 pre_expr op2expr = get_or_alloc_expr_for (currop->op1); 2806 tree genop1; 2807 tree genop2; 2808 2809 if (!genop0) 2810 return NULL_TREE; 2811 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt); 2812 if (!genop1) 2813 return NULL_TREE; 2814 genop2 = find_or_generate_expression (block, op2expr, stmts, domstmt); 2815 if (!genop2) 2816 return NULL_TREE; 2817 folded = fold_build3 (BIT_FIELD_REF, currop->type, genop0, genop1, 2818 genop2); 2819 return folded; 2820 } 2821 2822 /* For array ref vn_reference_op's, operand 1 of the array ref 2823 is op0 of the reference op and operand 3 of the array ref is 2824 op1. */ 2825 case ARRAY_RANGE_REF: 2826 case ARRAY_REF: 2827 { 2828 tree genop0; 2829 tree genop1 = currop->op0; 2830 pre_expr op1expr; 2831 tree genop2 = currop->op1; 2832 pre_expr op2expr; 2833 tree genop3 = currop->op2; 2834 pre_expr op3expr; 2835 genop0 = create_component_ref_by_pieces_1 (block, ref, operand, 2836 stmts, domstmt); 2837 if (!genop0) 2838 return NULL_TREE; 2839 op1expr = get_or_alloc_expr_for (genop1); 2840 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt); 2841 if (!genop1) 2842 return NULL_TREE; 2843 if (genop2) 2844 { 2845 tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0)); 2846 /* Drop zero minimum index if redundant. */ 2847 if (integer_zerop (genop2) 2848 && (!domain_type 2849 || integer_zerop (TYPE_MIN_VALUE (domain_type)))) 2850 genop2 = NULL_TREE; 2851 else 2852 { 2853 op2expr = get_or_alloc_expr_for (genop2); 2854 genop2 = find_or_generate_expression (block, op2expr, stmts, 2855 domstmt); 2856 if (!genop2) 2857 return NULL_TREE; 2858 } 2859 } 2860 if (genop3) 2861 { 2862 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0)); 2863 /* We can't always put a size in units of the element alignment 2864 here as the element alignment may be not visible. See 2865 PR43783. Simply drop the element size for constant 2866 sizes. */ 2867 if (tree_int_cst_equal (genop3, TYPE_SIZE_UNIT (elmt_type))) 2868 genop3 = NULL_TREE; 2869 else 2870 { 2871 genop3 = size_binop (EXACT_DIV_EXPR, genop3, 2872 size_int (TYPE_ALIGN_UNIT (elmt_type))); 2873 op3expr = get_or_alloc_expr_for (genop3); 2874 genop3 = find_or_generate_expression (block, op3expr, stmts, 2875 domstmt); 2876 if (!genop3) 2877 return NULL_TREE; 2878 } 2879 } 2880 return build4 (currop->opcode, currop->type, genop0, genop1, 2881 genop2, genop3); 2882 } 2883 case COMPONENT_REF: 2884 { 2885 tree op0; 2886 tree op1; 2887 tree genop2 = currop->op1; 2888 pre_expr op2expr; 2889 op0 = create_component_ref_by_pieces_1 (block, ref, operand, 2890 stmts, domstmt); 2891 if (!op0) 2892 return NULL_TREE; 2893 /* op1 should be a FIELD_DECL, which are represented by 2894 themselves. */ 2895 op1 = currop->op0; 2896 if (genop2) 2897 { 2898 op2expr = get_or_alloc_expr_for (genop2); 2899 genop2 = find_or_generate_expression (block, op2expr, stmts, 2900 domstmt); 2901 if (!genop2) 2902 return NULL_TREE; 2903 } 2904 2905 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1, 2906 genop2); 2907 } 2908 break; 2909 case SSA_NAME: 2910 { 2911 pre_expr op0expr = get_or_alloc_expr_for (currop->op0); 2912 genop = find_or_generate_expression (block, op0expr, stmts, domstmt); 2913 return genop; 2914 } 2915 case STRING_CST: 2916 case INTEGER_CST: 2917 case COMPLEX_CST: 2918 case VECTOR_CST: 2919 case REAL_CST: 2920 case CONSTRUCTOR: 2921 case VAR_DECL: 2922 case PARM_DECL: 2923 case CONST_DECL: 2924 case RESULT_DECL: 2925 case FUNCTION_DECL: 2926 return currop->op0; 2927 2928 default: 2929 gcc_unreachable (); 2930 } 2931 } 2932 2933 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the 2934 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with 2935 trying to rename aggregates into ssa form directly, which is a no no. 2936 2937 Thus, this routine doesn't create temporaries, it just builds a 2938 single access expression for the array, calling 2939 find_or_generate_expression to build the innermost pieces. 2940 2941 This function is a subroutine of create_expression_by_pieces, and 2942 should not be called on it's own unless you really know what you 2943 are doing. */ 2944 2945 static tree 2946 create_component_ref_by_pieces (basic_block block, vn_reference_t ref, 2947 gimple_seq *stmts, gimple domstmt) 2948 { 2949 unsigned int op = 0; 2950 return create_component_ref_by_pieces_1 (block, ref, &op, stmts, domstmt); 2951 } 2952 2953 /* Find a leader for an expression, or generate one using 2954 create_expression_by_pieces if it's ANTIC but 2955 complex. 2956 BLOCK is the basic_block we are looking for leaders in. 2957 EXPR is the expression to find a leader or generate for. 2958 STMTS is the statement list to put the inserted expressions on. 2959 Returns the SSA_NAME of the LHS of the generated expression or the 2960 leader. 2961 DOMSTMT if non-NULL is a statement that should be dominated by 2962 all uses in the generated expression. If DOMSTMT is non-NULL this 2963 routine can fail and return NULL_TREE. Otherwise it will assert 2964 on failure. */ 2965 2966 static tree 2967 find_or_generate_expression (basic_block block, pre_expr expr, 2968 gimple_seq *stmts, gimple domstmt) 2969 { 2970 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block), 2971 get_expr_value_id (expr), domstmt); 2972 tree genop = NULL; 2973 if (leader) 2974 { 2975 if (leader->kind == NAME) 2976 genop = PRE_EXPR_NAME (leader); 2977 else if (leader->kind == CONSTANT) 2978 genop = PRE_EXPR_CONSTANT (leader); 2979 } 2980 2981 /* If it's still NULL, it must be a complex expression, so generate 2982 it recursively. Not so if inserting expressions for values generated 2983 by SCCVN. */ 2984 if (genop == NULL 2985 && !domstmt) 2986 { 2987 bitmap_set_t exprset; 2988 unsigned int lookfor = get_expr_value_id (expr); 2989 bool handled = false; 2990 bitmap_iterator bi; 2991 unsigned int i; 2992 2993 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor); 2994 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi) 2995 { 2996 pre_expr temp = expression_for_id (i); 2997 if (temp->kind != NAME) 2998 { 2999 handled = true; 3000 genop = create_expression_by_pieces (block, temp, stmts, 3001 domstmt, 3002 get_expr_type (expr)); 3003 break; 3004 } 3005 } 3006 if (!handled && domstmt) 3007 return NULL_TREE; 3008 3009 gcc_assert (handled); 3010 } 3011 return genop; 3012 } 3013 3014 #define NECESSARY GF_PLF_1 3015 3016 /* Create an expression in pieces, so that we can handle very complex 3017 expressions that may be ANTIC, but not necessary GIMPLE. 3018 BLOCK is the basic block the expression will be inserted into, 3019 EXPR is the expression to insert (in value form) 3020 STMTS is a statement list to append the necessary insertions into. 3021 3022 This function will die if we hit some value that shouldn't be 3023 ANTIC but is (IE there is no leader for it, or its components). 3024 This function may also generate expressions that are themselves 3025 partially or fully redundant. Those that are will be either made 3026 fully redundant during the next iteration of insert (for partially 3027 redundant ones), or eliminated by eliminate (for fully redundant 3028 ones). 3029 3030 If DOMSTMT is non-NULL then we make sure that all uses in the 3031 expressions dominate that statement. In this case the function 3032 can return NULL_TREE to signal failure. */ 3033 3034 static tree 3035 create_expression_by_pieces (basic_block block, pre_expr expr, 3036 gimple_seq *stmts, gimple domstmt, tree type) 3037 { 3038 tree temp, name; 3039 tree folded; 3040 gimple_seq forced_stmts = NULL; 3041 unsigned int value_id; 3042 gimple_stmt_iterator gsi; 3043 tree exprtype = type ? type : get_expr_type (expr); 3044 pre_expr nameexpr; 3045 gimple newstmt; 3046 3047 switch (expr->kind) 3048 { 3049 /* We may hit the NAME/CONSTANT case if we have to convert types 3050 that value numbering saw through. */ 3051 case NAME: 3052 folded = PRE_EXPR_NAME (expr); 3053 break; 3054 case CONSTANT: 3055 folded = PRE_EXPR_CONSTANT (expr); 3056 break; 3057 case REFERENCE: 3058 { 3059 vn_reference_t ref = PRE_EXPR_REFERENCE (expr); 3060 folded = create_component_ref_by_pieces (block, ref, stmts, domstmt); 3061 } 3062 break; 3063 case NARY: 3064 { 3065 vn_nary_op_t nary = PRE_EXPR_NARY (expr); 3066 tree genop[4]; 3067 unsigned i; 3068 for (i = 0; i < nary->length; ++i) 3069 { 3070 pre_expr op = get_or_alloc_expr_for (nary->op[i]); 3071 genop[i] = find_or_generate_expression (block, op, 3072 stmts, domstmt); 3073 if (!genop[i]) 3074 return NULL_TREE; 3075 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It 3076 may have conversions stripped. */ 3077 if (nary->opcode == POINTER_PLUS_EXPR) 3078 { 3079 if (i == 0) 3080 genop[i] = fold_convert (nary->type, genop[i]); 3081 else if (i == 1) 3082 genop[i] = convert_to_ptrofftype (genop[i]); 3083 } 3084 else 3085 genop[i] = fold_convert (TREE_TYPE (nary->op[i]), genop[i]); 3086 } 3087 if (nary->opcode == CONSTRUCTOR) 3088 { 3089 VEC(constructor_elt,gc) *elts = NULL; 3090 for (i = 0; i < nary->length; ++i) 3091 CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]); 3092 folded = build_constructor (nary->type, elts); 3093 } 3094 else 3095 { 3096 switch (nary->length) 3097 { 3098 case 1: 3099 folded = fold_build1 (nary->opcode, nary->type, 3100 genop[0]); 3101 break; 3102 case 2: 3103 folded = fold_build2 (nary->opcode, nary->type, 3104 genop[0], genop[1]); 3105 break; 3106 case 3: 3107 folded = fold_build3 (nary->opcode, nary->type, 3108 genop[0], genop[1], genop[3]); 3109 break; 3110 default: 3111 gcc_unreachable (); 3112 } 3113 } 3114 } 3115 break; 3116 default: 3117 return NULL_TREE; 3118 } 3119 3120 if (!useless_type_conversion_p (exprtype, TREE_TYPE (folded))) 3121 folded = fold_convert (exprtype, folded); 3122 3123 /* Force the generated expression to be a sequence of GIMPLE 3124 statements. 3125 We have to call unshare_expr because force_gimple_operand may 3126 modify the tree we pass to it. */ 3127 folded = force_gimple_operand (unshare_expr (folded), &forced_stmts, 3128 false, NULL); 3129 3130 /* If we have any intermediate expressions to the value sets, add them 3131 to the value sets and chain them in the instruction stream. */ 3132 if (forced_stmts) 3133 { 3134 gsi = gsi_start (forced_stmts); 3135 for (; !gsi_end_p (gsi); gsi_next (&gsi)) 3136 { 3137 gimple stmt = gsi_stmt (gsi); 3138 tree forcedname = gimple_get_lhs (stmt); 3139 pre_expr nameexpr; 3140 3141 if (TREE_CODE (forcedname) == SSA_NAME) 3142 { 3143 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname)); 3144 VN_INFO_GET (forcedname)->valnum = forcedname; 3145 VN_INFO (forcedname)->value_id = get_next_value_id (); 3146 nameexpr = get_or_alloc_expr_for_name (forcedname); 3147 add_to_value (VN_INFO (forcedname)->value_id, nameexpr); 3148 if (!in_fre) 3149 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr); 3150 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr); 3151 } 3152 mark_symbols_for_renaming (stmt); 3153 } 3154 gimple_seq_add_seq (stmts, forced_stmts); 3155 } 3156 3157 /* Build and insert the assignment of the end result to the temporary 3158 that we will return. */ 3159 if (!pretemp || exprtype != TREE_TYPE (pretemp)) 3160 pretemp = create_tmp_reg (exprtype, "pretmp"); 3161 3162 temp = pretemp; 3163 add_referenced_var (temp); 3164 3165 newstmt = gimple_build_assign (temp, folded); 3166 name = make_ssa_name (temp, newstmt); 3167 gimple_assign_set_lhs (newstmt, name); 3168 gimple_set_plf (newstmt, NECESSARY, false); 3169 3170 gimple_seq_add_stmt (stmts, newstmt); 3171 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (name)); 3172 3173 /* All the symbols in NEWEXPR should be put into SSA form. */ 3174 mark_symbols_for_renaming (newstmt); 3175 3176 /* Fold the last statement. */ 3177 gsi = gsi_last (*stmts); 3178 if (fold_stmt_inplace (&gsi)) 3179 update_stmt (gsi_stmt (gsi)); 3180 3181 /* Add a value number to the temporary. 3182 The value may already exist in either NEW_SETS, or AVAIL_OUT, because 3183 we are creating the expression by pieces, and this particular piece of 3184 the expression may have been represented. There is no harm in replacing 3185 here. */ 3186 VN_INFO_GET (name)->valnum = name; 3187 value_id = get_expr_value_id (expr); 3188 VN_INFO (name)->value_id = value_id; 3189 nameexpr = get_or_alloc_expr_for_name (name); 3190 add_to_value (value_id, nameexpr); 3191 if (NEW_SETS (block)) 3192 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr); 3193 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr); 3194 3195 pre_stats.insertions++; 3196 if (dump_file && (dump_flags & TDF_DETAILS)) 3197 { 3198 fprintf (dump_file, "Inserted "); 3199 print_gimple_stmt (dump_file, newstmt, 0, 0); 3200 fprintf (dump_file, " in predecessor %d\n", block->index); 3201 } 3202 3203 return name; 3204 } 3205 3206 3207 /* Returns true if we want to inhibit the insertions of PHI nodes 3208 for the given EXPR for basic block BB (a member of a loop). 3209 We want to do this, when we fear that the induction variable we 3210 create might inhibit vectorization. */ 3211 3212 static bool 3213 inhibit_phi_insertion (basic_block bb, pre_expr expr) 3214 { 3215 vn_reference_t vr = PRE_EXPR_REFERENCE (expr); 3216 VEC (vn_reference_op_s, heap) *ops = vr->operands; 3217 vn_reference_op_t op; 3218 unsigned i; 3219 3220 /* If we aren't going to vectorize we don't inhibit anything. */ 3221 if (!flag_tree_vectorize) 3222 return false; 3223 3224 /* Otherwise we inhibit the insertion when the address of the 3225 memory reference is a simple induction variable. In other 3226 cases the vectorizer won't do anything anyway (either it's 3227 loop invariant or a complicated expression). */ 3228 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op) 3229 { 3230 switch (op->opcode) 3231 { 3232 case ARRAY_REF: 3233 case ARRAY_RANGE_REF: 3234 if (TREE_CODE (op->op0) != SSA_NAME) 3235 break; 3236 /* Fallthru. */ 3237 case SSA_NAME: 3238 { 3239 basic_block defbb = gimple_bb (SSA_NAME_DEF_STMT (op->op0)); 3240 affine_iv iv; 3241 /* Default defs are loop invariant. */ 3242 if (!defbb) 3243 break; 3244 /* Defined outside this loop, also loop invariant. */ 3245 if (!flow_bb_inside_loop_p (bb->loop_father, defbb)) 3246 break; 3247 /* If it's a simple induction variable inhibit insertion, 3248 the vectorizer might be interested in this one. */ 3249 if (simple_iv (bb->loop_father, bb->loop_father, 3250 op->op0, &iv, true)) 3251 return true; 3252 /* No simple IV, vectorizer can't do anything, hence no 3253 reason to inhibit the transformation for this operand. */ 3254 break; 3255 } 3256 default: 3257 break; 3258 } 3259 } 3260 return false; 3261 } 3262 3263 /* Insert the to-be-made-available values of expression EXPRNUM for each 3264 predecessor, stored in AVAIL, into the predecessors of BLOCK, and 3265 merge the result with a phi node, given the same value number as 3266 NODE. Return true if we have inserted new stuff. */ 3267 3268 static bool 3269 insert_into_preds_of_block (basic_block block, unsigned int exprnum, 3270 pre_expr *avail) 3271 { 3272 pre_expr expr = expression_for_id (exprnum); 3273 pre_expr newphi; 3274 unsigned int val = get_expr_value_id (expr); 3275 edge pred; 3276 bool insertions = false; 3277 bool nophi = false; 3278 basic_block bprime; 3279 pre_expr eprime; 3280 edge_iterator ei; 3281 tree type = get_expr_type (expr); 3282 tree temp; 3283 gimple phi; 3284 3285 if (dump_file && (dump_flags & TDF_DETAILS)) 3286 { 3287 fprintf (dump_file, "Found partial redundancy for expression "); 3288 print_pre_expr (dump_file, expr); 3289 fprintf (dump_file, " (%04d)\n", val); 3290 } 3291 3292 /* Make sure we aren't creating an induction variable. */ 3293 if (block->loop_depth > 0 && EDGE_COUNT (block->preds) == 2) 3294 { 3295 bool firstinsideloop = false; 3296 bool secondinsideloop = false; 3297 firstinsideloop = flow_bb_inside_loop_p (block->loop_father, 3298 EDGE_PRED (block, 0)->src); 3299 secondinsideloop = flow_bb_inside_loop_p (block->loop_father, 3300 EDGE_PRED (block, 1)->src); 3301 /* Induction variables only have one edge inside the loop. */ 3302 if ((firstinsideloop ^ secondinsideloop) 3303 && (expr->kind != REFERENCE 3304 || inhibit_phi_insertion (block, expr))) 3305 { 3306 if (dump_file && (dump_flags & TDF_DETAILS)) 3307 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n"); 3308 nophi = true; 3309 } 3310 } 3311 3312 /* Make the necessary insertions. */ 3313 FOR_EACH_EDGE (pred, ei, block->preds) 3314 { 3315 gimple_seq stmts = NULL; 3316 tree builtexpr; 3317 bprime = pred->src; 3318 eprime = avail[bprime->index]; 3319 3320 if (eprime->kind != NAME && eprime->kind != CONSTANT) 3321 { 3322 builtexpr = create_expression_by_pieces (bprime, 3323 eprime, 3324 &stmts, NULL, 3325 type); 3326 gcc_assert (!(pred->flags & EDGE_ABNORMAL)); 3327 gsi_insert_seq_on_edge (pred, stmts); 3328 avail[bprime->index] = get_or_alloc_expr_for_name (builtexpr); 3329 insertions = true; 3330 } 3331 else if (eprime->kind == CONSTANT) 3332 { 3333 /* Constants may not have the right type, fold_convert 3334 should give us back a constant with the right type. 3335 */ 3336 tree constant = PRE_EXPR_CONSTANT (eprime); 3337 if (!useless_type_conversion_p (type, TREE_TYPE (constant))) 3338 { 3339 tree builtexpr = fold_convert (type, constant); 3340 if (!is_gimple_min_invariant (builtexpr)) 3341 { 3342 tree forcedexpr = force_gimple_operand (builtexpr, 3343 &stmts, true, 3344 NULL); 3345 if (!is_gimple_min_invariant (forcedexpr)) 3346 { 3347 if (forcedexpr != builtexpr) 3348 { 3349 VN_INFO_GET (forcedexpr)->valnum = PRE_EXPR_CONSTANT (eprime); 3350 VN_INFO (forcedexpr)->value_id = get_expr_value_id (eprime); 3351 } 3352 if (stmts) 3353 { 3354 gimple_stmt_iterator gsi; 3355 gsi = gsi_start (stmts); 3356 for (; !gsi_end_p (gsi); gsi_next (&gsi)) 3357 { 3358 gimple stmt = gsi_stmt (gsi); 3359 tree lhs = gimple_get_lhs (stmt); 3360 if (TREE_CODE (lhs) == SSA_NAME) 3361 bitmap_set_bit (inserted_exprs, 3362 SSA_NAME_VERSION (lhs)); 3363 gimple_set_plf (stmt, NECESSARY, false); 3364 } 3365 gsi_insert_seq_on_edge (pred, stmts); 3366 } 3367 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr); 3368 } 3369 } 3370 else 3371 avail[bprime->index] = get_or_alloc_expr_for_constant (builtexpr); 3372 } 3373 } 3374 else if (eprime->kind == NAME) 3375 { 3376 /* We may have to do a conversion because our value 3377 numbering can look through types in certain cases, but 3378 our IL requires all operands of a phi node have the same 3379 type. */ 3380 tree name = PRE_EXPR_NAME (eprime); 3381 if (!useless_type_conversion_p (type, TREE_TYPE (name))) 3382 { 3383 tree builtexpr; 3384 tree forcedexpr; 3385 builtexpr = fold_convert (type, name); 3386 forcedexpr = force_gimple_operand (builtexpr, 3387 &stmts, true, 3388 NULL); 3389 3390 if (forcedexpr != name) 3391 { 3392 VN_INFO_GET (forcedexpr)->valnum = VN_INFO (name)->valnum; 3393 VN_INFO (forcedexpr)->value_id = VN_INFO (name)->value_id; 3394 } 3395 3396 if (stmts) 3397 { 3398 gimple_stmt_iterator gsi; 3399 gsi = gsi_start (stmts); 3400 for (; !gsi_end_p (gsi); gsi_next (&gsi)) 3401 { 3402 gimple stmt = gsi_stmt (gsi); 3403 tree lhs = gimple_get_lhs (stmt); 3404 if (TREE_CODE (lhs) == SSA_NAME) 3405 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (lhs)); 3406 gimple_set_plf (stmt, NECESSARY, false); 3407 } 3408 gsi_insert_seq_on_edge (pred, stmts); 3409 } 3410 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr); 3411 } 3412 } 3413 } 3414 /* If we didn't want a phi node, and we made insertions, we still have 3415 inserted new stuff, and thus return true. If we didn't want a phi node, 3416 and didn't make insertions, we haven't added anything new, so return 3417 false. */ 3418 if (nophi && insertions) 3419 return true; 3420 else if (nophi && !insertions) 3421 return false; 3422 3423 /* Now build a phi for the new variable. */ 3424 if (!prephitemp || TREE_TYPE (prephitemp) != type) 3425 prephitemp = create_tmp_var (type, "prephitmp"); 3426 3427 temp = prephitemp; 3428 add_referenced_var (temp); 3429 3430 if (TREE_CODE (type) == COMPLEX_TYPE 3431 || TREE_CODE (type) == VECTOR_TYPE) 3432 DECL_GIMPLE_REG_P (temp) = 1; 3433 phi = create_phi_node (temp, block); 3434 3435 gimple_set_plf (phi, NECESSARY, false); 3436 VN_INFO_GET (gimple_phi_result (phi))->valnum = gimple_phi_result (phi); 3437 VN_INFO (gimple_phi_result (phi))->value_id = val; 3438 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (gimple_phi_result (phi))); 3439 FOR_EACH_EDGE (pred, ei, block->preds) 3440 { 3441 pre_expr ae = avail[pred->src->index]; 3442 gcc_assert (get_expr_type (ae) == type 3443 || useless_type_conversion_p (type, get_expr_type (ae))); 3444 if (ae->kind == CONSTANT) 3445 add_phi_arg (phi, PRE_EXPR_CONSTANT (ae), pred, UNKNOWN_LOCATION); 3446 else 3447 add_phi_arg (phi, PRE_EXPR_NAME (avail[pred->src->index]), pred, 3448 UNKNOWN_LOCATION); 3449 } 3450 3451 newphi = get_or_alloc_expr_for_name (gimple_phi_result (phi)); 3452 add_to_value (val, newphi); 3453 3454 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing 3455 this insertion, since we test for the existence of this value in PHI_GEN 3456 before proceeding with the partial redundancy checks in insert_aux. 3457 3458 The value may exist in AVAIL_OUT, in particular, it could be represented 3459 by the expression we are trying to eliminate, in which case we want the 3460 replacement to occur. If it's not existing in AVAIL_OUT, we want it 3461 inserted there. 3462 3463 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of 3464 this block, because if it did, it would have existed in our dominator's 3465 AVAIL_OUT, and would have been skipped due to the full redundancy check. 3466 */ 3467 3468 bitmap_insert_into_set (PHI_GEN (block), newphi); 3469 bitmap_value_replace_in_set (AVAIL_OUT (block), 3470 newphi); 3471 bitmap_insert_into_set (NEW_SETS (block), 3472 newphi); 3473 3474 if (dump_file && (dump_flags & TDF_DETAILS)) 3475 { 3476 fprintf (dump_file, "Created phi "); 3477 print_gimple_stmt (dump_file, phi, 0, 0); 3478 fprintf (dump_file, " in block %d\n", block->index); 3479 } 3480 pre_stats.phis++; 3481 return true; 3482 } 3483 3484 3485 3486 /* Perform insertion of partially redundant values. 3487 For BLOCK, do the following: 3488 1. Propagate the NEW_SETS of the dominator into the current block. 3489 If the block has multiple predecessors, 3490 2a. Iterate over the ANTIC expressions for the block to see if 3491 any of them are partially redundant. 3492 2b. If so, insert them into the necessary predecessors to make 3493 the expression fully redundant. 3494 2c. Insert a new PHI merging the values of the predecessors. 3495 2d. Insert the new PHI, and the new expressions, into the 3496 NEW_SETS set. 3497 3. Recursively call ourselves on the dominator children of BLOCK. 3498 3499 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by 3500 do_regular_insertion and do_partial_insertion. 3501 3502 */ 3503 3504 static bool 3505 do_regular_insertion (basic_block block, basic_block dom) 3506 { 3507 bool new_stuff = false; 3508 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (ANTIC_IN (block)); 3509 pre_expr expr; 3510 int i; 3511 3512 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr) 3513 { 3514 if (expr->kind != NAME) 3515 { 3516 pre_expr *avail; 3517 unsigned int val; 3518 bool by_some = false; 3519 bool cant_insert = false; 3520 bool all_same = true; 3521 pre_expr first_s = NULL; 3522 edge pred; 3523 basic_block bprime; 3524 pre_expr eprime = NULL; 3525 edge_iterator ei; 3526 pre_expr edoubleprime = NULL; 3527 bool do_insertion = false; 3528 3529 val = get_expr_value_id (expr); 3530 if (bitmap_set_contains_value (PHI_GEN (block), val)) 3531 continue; 3532 if (bitmap_set_contains_value (AVAIL_OUT (dom), val)) 3533 { 3534 if (dump_file && (dump_flags & TDF_DETAILS)) 3535 fprintf (dump_file, "Found fully redundant value\n"); 3536 continue; 3537 } 3538 3539 avail = XCNEWVEC (pre_expr, last_basic_block); 3540 FOR_EACH_EDGE (pred, ei, block->preds) 3541 { 3542 unsigned int vprime; 3543 3544 /* We should never run insertion for the exit block 3545 and so not come across fake pred edges. */ 3546 gcc_assert (!(pred->flags & EDGE_FAKE)); 3547 bprime = pred->src; 3548 eprime = phi_translate (expr, ANTIC_IN (block), NULL, 3549 bprime, block); 3550 3551 /* eprime will generally only be NULL if the 3552 value of the expression, translated 3553 through the PHI for this predecessor, is 3554 undefined. If that is the case, we can't 3555 make the expression fully redundant, 3556 because its value is undefined along a 3557 predecessor path. We can thus break out 3558 early because it doesn't matter what the 3559 rest of the results are. */ 3560 if (eprime == NULL) 3561 { 3562 cant_insert = true; 3563 break; 3564 } 3565 3566 eprime = fully_constant_expression (eprime); 3567 vprime = get_expr_value_id (eprime); 3568 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime), 3569 vprime, NULL); 3570 if (edoubleprime == NULL) 3571 { 3572 avail[bprime->index] = eprime; 3573 all_same = false; 3574 } 3575 else 3576 { 3577 avail[bprime->index] = edoubleprime; 3578 by_some = true; 3579 /* We want to perform insertions to remove a redundancy on 3580 a path in the CFG we want to optimize for speed. */ 3581 if (optimize_edge_for_speed_p (pred)) 3582 do_insertion = true; 3583 if (first_s == NULL) 3584 first_s = edoubleprime; 3585 else if (!pre_expr_eq (first_s, edoubleprime)) 3586 all_same = false; 3587 } 3588 } 3589 /* If we can insert it, it's not the same value 3590 already existing along every predecessor, and 3591 it's defined by some predecessor, it is 3592 partially redundant. */ 3593 if (!cant_insert && !all_same && by_some) 3594 { 3595 if (!do_insertion) 3596 { 3597 if (dump_file && (dump_flags & TDF_DETAILS)) 3598 { 3599 fprintf (dump_file, "Skipping partial redundancy for " 3600 "expression "); 3601 print_pre_expr (dump_file, expr); 3602 fprintf (dump_file, " (%04d), no redundancy on to be " 3603 "optimized for speed edge\n", val); 3604 } 3605 } 3606 else if (dbg_cnt (treepre_insert) 3607 && insert_into_preds_of_block (block, 3608 get_expression_id (expr), 3609 avail)) 3610 new_stuff = true; 3611 } 3612 /* If all edges produce the same value and that value is 3613 an invariant, then the PHI has the same value on all 3614 edges. Note this. */ 3615 else if (!cant_insert && all_same && eprime 3616 && (edoubleprime->kind == CONSTANT 3617 || edoubleprime->kind == NAME) 3618 && !value_id_constant_p (val)) 3619 { 3620 unsigned int j; 3621 bitmap_iterator bi; 3622 bitmap_set_t exprset = VEC_index (bitmap_set_t, 3623 value_expressions, val); 3624 3625 unsigned int new_val = get_expr_value_id (edoubleprime); 3626 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bi) 3627 { 3628 pre_expr expr = expression_for_id (j); 3629 3630 if (expr->kind == NAME) 3631 { 3632 vn_ssa_aux_t info = VN_INFO (PRE_EXPR_NAME (expr)); 3633 /* Just reset the value id and valnum so it is 3634 the same as the constant we have discovered. */ 3635 if (edoubleprime->kind == CONSTANT) 3636 { 3637 info->valnum = PRE_EXPR_CONSTANT (edoubleprime); 3638 pre_stats.constified++; 3639 } 3640 else 3641 info->valnum = VN_INFO (PRE_EXPR_NAME (edoubleprime))->valnum; 3642 info->value_id = new_val; 3643 } 3644 } 3645 } 3646 free (avail); 3647 } 3648 } 3649 3650 VEC_free (pre_expr, heap, exprs); 3651 return new_stuff; 3652 } 3653 3654 3655 /* Perform insertion for partially anticipatable expressions. There 3656 is only one case we will perform insertion for these. This case is 3657 if the expression is partially anticipatable, and fully available. 3658 In this case, we know that putting it earlier will enable us to 3659 remove the later computation. */ 3660 3661 3662 static bool 3663 do_partial_partial_insertion (basic_block block, basic_block dom) 3664 { 3665 bool new_stuff = false; 3666 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (PA_IN (block)); 3667 pre_expr expr; 3668 int i; 3669 3670 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr) 3671 { 3672 if (expr->kind != NAME) 3673 { 3674 pre_expr *avail; 3675 unsigned int val; 3676 bool by_all = true; 3677 bool cant_insert = false; 3678 edge pred; 3679 basic_block bprime; 3680 pre_expr eprime = NULL; 3681 edge_iterator ei; 3682 3683 val = get_expr_value_id (expr); 3684 if (bitmap_set_contains_value (PHI_GEN (block), val)) 3685 continue; 3686 if (bitmap_set_contains_value (AVAIL_OUT (dom), val)) 3687 continue; 3688 3689 avail = XCNEWVEC (pre_expr, last_basic_block); 3690 FOR_EACH_EDGE (pred, ei, block->preds) 3691 { 3692 unsigned int vprime; 3693 pre_expr edoubleprime; 3694 3695 /* We should never run insertion for the exit block 3696 and so not come across fake pred edges. */ 3697 gcc_assert (!(pred->flags & EDGE_FAKE)); 3698 bprime = pred->src; 3699 eprime = phi_translate (expr, ANTIC_IN (block), 3700 PA_IN (block), 3701 bprime, block); 3702 3703 /* eprime will generally only be NULL if the 3704 value of the expression, translated 3705 through the PHI for this predecessor, is 3706 undefined. If that is the case, we can't 3707 make the expression fully redundant, 3708 because its value is undefined along a 3709 predecessor path. We can thus break out 3710 early because it doesn't matter what the 3711 rest of the results are. */ 3712 if (eprime == NULL) 3713 { 3714 cant_insert = true; 3715 break; 3716 } 3717 3718 eprime = fully_constant_expression (eprime); 3719 vprime = get_expr_value_id (eprime); 3720 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime), 3721 vprime, NULL); 3722 if (edoubleprime == NULL) 3723 { 3724 by_all = false; 3725 break; 3726 } 3727 else 3728 avail[bprime->index] = edoubleprime; 3729 3730 } 3731 3732 /* If we can insert it, it's not the same value 3733 already existing along every predecessor, and 3734 it's defined by some predecessor, it is 3735 partially redundant. */ 3736 if (!cant_insert && by_all && dbg_cnt (treepre_insert)) 3737 { 3738 pre_stats.pa_insert++; 3739 if (insert_into_preds_of_block (block, get_expression_id (expr), 3740 avail)) 3741 new_stuff = true; 3742 } 3743 free (avail); 3744 } 3745 } 3746 3747 VEC_free (pre_expr, heap, exprs); 3748 return new_stuff; 3749 } 3750 3751 static bool 3752 insert_aux (basic_block block) 3753 { 3754 basic_block son; 3755 bool new_stuff = false; 3756 3757 if (block) 3758 { 3759 basic_block dom; 3760 dom = get_immediate_dominator (CDI_DOMINATORS, block); 3761 if (dom) 3762 { 3763 unsigned i; 3764 bitmap_iterator bi; 3765 bitmap_set_t newset = NEW_SETS (dom); 3766 if (newset) 3767 { 3768 /* Note that we need to value_replace both NEW_SETS, and 3769 AVAIL_OUT. For both the case of NEW_SETS, the value may be 3770 represented by some non-simple expression here that we want 3771 to replace it with. */ 3772 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi) 3773 { 3774 pre_expr expr = expression_for_id (i); 3775 bitmap_value_replace_in_set (NEW_SETS (block), expr); 3776 bitmap_value_replace_in_set (AVAIL_OUT (block), expr); 3777 } 3778 } 3779 if (!single_pred_p (block)) 3780 { 3781 new_stuff |= do_regular_insertion (block, dom); 3782 if (do_partial_partial) 3783 new_stuff |= do_partial_partial_insertion (block, dom); 3784 } 3785 } 3786 } 3787 for (son = first_dom_son (CDI_DOMINATORS, block); 3788 son; 3789 son = next_dom_son (CDI_DOMINATORS, son)) 3790 { 3791 new_stuff |= insert_aux (son); 3792 } 3793 3794 return new_stuff; 3795 } 3796 3797 /* Perform insertion of partially redundant values. */ 3798 3799 static void 3800 insert (void) 3801 { 3802 bool new_stuff = true; 3803 basic_block bb; 3804 int num_iterations = 0; 3805 3806 FOR_ALL_BB (bb) 3807 NEW_SETS (bb) = bitmap_set_new (); 3808 3809 while (new_stuff) 3810 { 3811 num_iterations++; 3812 new_stuff = insert_aux (ENTRY_BLOCK_PTR); 3813 } 3814 statistics_histogram_event (cfun, "insert iterations", num_iterations); 3815 } 3816 3817 3818 /* Add OP to EXP_GEN (block), and possibly to the maximal set. */ 3819 3820 static void 3821 add_to_exp_gen (basic_block block, tree op) 3822 { 3823 if (!in_fre) 3824 { 3825 pre_expr result; 3826 if (TREE_CODE (op) == SSA_NAME && ssa_undefined_value_p (op)) 3827 return; 3828 result = get_or_alloc_expr_for_name (op); 3829 bitmap_value_insert_into_set (EXP_GEN (block), result); 3830 } 3831 } 3832 3833 /* Create value ids for PHI in BLOCK. */ 3834 3835 static void 3836 make_values_for_phi (gimple phi, basic_block block) 3837 { 3838 tree result = gimple_phi_result (phi); 3839 3840 /* We have no need for virtual phis, as they don't represent 3841 actual computations. */ 3842 if (is_gimple_reg (result)) 3843 { 3844 pre_expr e = get_or_alloc_expr_for_name (result); 3845 add_to_value (get_expr_value_id (e), e); 3846 bitmap_insert_into_set (PHI_GEN (block), e); 3847 bitmap_value_insert_into_set (AVAIL_OUT (block), e); 3848 if (!in_fre) 3849 { 3850 unsigned i; 3851 for (i = 0; i < gimple_phi_num_args (phi); ++i) 3852 { 3853 tree arg = gimple_phi_arg_def (phi, i); 3854 if (TREE_CODE (arg) == SSA_NAME) 3855 { 3856 e = get_or_alloc_expr_for_name (arg); 3857 add_to_value (get_expr_value_id (e), e); 3858 } 3859 } 3860 } 3861 } 3862 } 3863 3864 /* Compute the AVAIL set for all basic blocks. 3865 3866 This function performs value numbering of the statements in each basic 3867 block. The AVAIL sets are built from information we glean while doing 3868 this value numbering, since the AVAIL sets contain only one entry per 3869 value. 3870 3871 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)]. 3872 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */ 3873 3874 static void 3875 compute_avail (void) 3876 { 3877 3878 basic_block block, son; 3879 basic_block *worklist; 3880 size_t sp = 0; 3881 unsigned i; 3882 3883 /* We pretend that default definitions are defined in the entry block. 3884 This includes function arguments and the static chain decl. */ 3885 for (i = 1; i < num_ssa_names; ++i) 3886 { 3887 tree name = ssa_name (i); 3888 pre_expr e; 3889 if (!name 3890 || !SSA_NAME_IS_DEFAULT_DEF (name) 3891 || has_zero_uses (name) 3892 || !is_gimple_reg (name)) 3893 continue; 3894 3895 e = get_or_alloc_expr_for_name (name); 3896 add_to_value (get_expr_value_id (e), e); 3897 if (!in_fre) 3898 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), e); 3899 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), e); 3900 } 3901 3902 /* Allocate the worklist. */ 3903 worklist = XNEWVEC (basic_block, n_basic_blocks); 3904 3905 /* Seed the algorithm by putting the dominator children of the entry 3906 block on the worklist. */ 3907 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR); 3908 son; 3909 son = next_dom_son (CDI_DOMINATORS, son)) 3910 worklist[sp++] = son; 3911 3912 /* Loop until the worklist is empty. */ 3913 while (sp) 3914 { 3915 gimple_stmt_iterator gsi; 3916 gimple stmt; 3917 basic_block dom; 3918 unsigned int stmt_uid = 1; 3919 3920 /* Pick a block from the worklist. */ 3921 block = worklist[--sp]; 3922 3923 /* Initially, the set of available values in BLOCK is that of 3924 its immediate dominator. */ 3925 dom = get_immediate_dominator (CDI_DOMINATORS, block); 3926 if (dom) 3927 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom)); 3928 3929 /* Generate values for PHI nodes. */ 3930 for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi)) 3931 make_values_for_phi (gsi_stmt (gsi), block); 3932 3933 BB_MAY_NOTRETURN (block) = 0; 3934 3935 /* Now compute value numbers and populate value sets with all 3936 the expressions computed in BLOCK. */ 3937 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi)) 3938 { 3939 ssa_op_iter iter; 3940 tree op; 3941 3942 stmt = gsi_stmt (gsi); 3943 gimple_set_uid (stmt, stmt_uid++); 3944 3945 /* Cache whether the basic-block has any non-visible side-effect 3946 or control flow. 3947 If this isn't a call or it is the last stmt in the 3948 basic-block then the CFG represents things correctly. */ 3949 if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt)) 3950 { 3951 /* Non-looping const functions always return normally. 3952 Otherwise the call might not return or have side-effects 3953 that forbids hoisting possibly trapping expressions 3954 before it. */ 3955 int flags = gimple_call_flags (stmt); 3956 if (!(flags & ECF_CONST) 3957 || (flags & ECF_LOOPING_CONST_OR_PURE)) 3958 BB_MAY_NOTRETURN (block) = 1; 3959 } 3960 3961 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF) 3962 { 3963 pre_expr e = get_or_alloc_expr_for_name (op); 3964 3965 add_to_value (get_expr_value_id (e), e); 3966 if (!in_fre) 3967 bitmap_insert_into_set (TMP_GEN (block), e); 3968 bitmap_value_insert_into_set (AVAIL_OUT (block), e); 3969 } 3970 3971 if (gimple_has_side_effects (stmt) || stmt_could_throw_p (stmt)) 3972 continue; 3973 3974 switch (gimple_code (stmt)) 3975 { 3976 case GIMPLE_RETURN: 3977 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE) 3978 add_to_exp_gen (block, op); 3979 continue; 3980 3981 case GIMPLE_CALL: 3982 { 3983 vn_reference_t ref; 3984 unsigned int i; 3985 vn_reference_op_t vro; 3986 pre_expr result = NULL; 3987 VEC(vn_reference_op_s, heap) *ops = NULL; 3988 3989 /* We can value number only calls to real functions. */ 3990 if (gimple_call_internal_p (stmt)) 3991 continue; 3992 3993 copy_reference_ops_from_call (stmt, &ops); 3994 vn_reference_lookup_pieces (gimple_vuse (stmt), 0, 3995 gimple_expr_type (stmt), 3996 ops, &ref, VN_NOWALK); 3997 VEC_free (vn_reference_op_s, heap, ops); 3998 if (!ref) 3999 continue; 4000 4001 for (i = 0; VEC_iterate (vn_reference_op_s, 4002 ref->operands, i, 4003 vro); i++) 4004 { 4005 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME) 4006 add_to_exp_gen (block, vro->op0); 4007 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME) 4008 add_to_exp_gen (block, vro->op1); 4009 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME) 4010 add_to_exp_gen (block, vro->op2); 4011 } 4012 result = (pre_expr) pool_alloc (pre_expr_pool); 4013 result->kind = REFERENCE; 4014 result->id = 0; 4015 PRE_EXPR_REFERENCE (result) = ref; 4016 4017 get_or_alloc_expression_id (result); 4018 add_to_value (get_expr_value_id (result), result); 4019 if (!in_fre) 4020 bitmap_value_insert_into_set (EXP_GEN (block), result); 4021 continue; 4022 } 4023 4024 case GIMPLE_ASSIGN: 4025 { 4026 pre_expr result = NULL; 4027 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))) 4028 { 4029 case tcc_unary: 4030 case tcc_binary: 4031 case tcc_comparison: 4032 { 4033 vn_nary_op_t nary; 4034 unsigned int i; 4035 4036 vn_nary_op_lookup_pieces (gimple_num_ops (stmt) - 1, 4037 gimple_assign_rhs_code (stmt), 4038 gimple_expr_type (stmt), 4039 gimple_assign_rhs1_ptr (stmt), 4040 &nary); 4041 4042 if (!nary) 4043 continue; 4044 4045 for (i = 0; i < nary->length; i++) 4046 if (TREE_CODE (nary->op[i]) == SSA_NAME) 4047 add_to_exp_gen (block, nary->op[i]); 4048 4049 result = (pre_expr) pool_alloc (pre_expr_pool); 4050 result->kind = NARY; 4051 result->id = 0; 4052 PRE_EXPR_NARY (result) = nary; 4053 break; 4054 } 4055 4056 case tcc_declaration: 4057 case tcc_reference: 4058 { 4059 vn_reference_t ref; 4060 unsigned int i; 4061 vn_reference_op_t vro; 4062 4063 vn_reference_lookup (gimple_assign_rhs1 (stmt), 4064 gimple_vuse (stmt), 4065 VN_WALK, &ref); 4066 if (!ref) 4067 continue; 4068 4069 for (i = 0; VEC_iterate (vn_reference_op_s, 4070 ref->operands, i, 4071 vro); i++) 4072 { 4073 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME) 4074 add_to_exp_gen (block, vro->op0); 4075 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME) 4076 add_to_exp_gen (block, vro->op1); 4077 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME) 4078 add_to_exp_gen (block, vro->op2); 4079 } 4080 result = (pre_expr) pool_alloc (pre_expr_pool); 4081 result->kind = REFERENCE; 4082 result->id = 0; 4083 PRE_EXPR_REFERENCE (result) = ref; 4084 break; 4085 } 4086 4087 default: 4088 /* For any other statement that we don't 4089 recognize, simply add all referenced 4090 SSA_NAMEs to EXP_GEN. */ 4091 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE) 4092 add_to_exp_gen (block, op); 4093 continue; 4094 } 4095 4096 get_or_alloc_expression_id (result); 4097 add_to_value (get_expr_value_id (result), result); 4098 if (!in_fre) 4099 bitmap_value_insert_into_set (EXP_GEN (block), result); 4100 4101 continue; 4102 } 4103 default: 4104 break; 4105 } 4106 } 4107 4108 /* Put the dominator children of BLOCK on the worklist of blocks 4109 to compute available sets for. */ 4110 for (son = first_dom_son (CDI_DOMINATORS, block); 4111 son; 4112 son = next_dom_son (CDI_DOMINATORS, son)) 4113 worklist[sp++] = son; 4114 } 4115 4116 free (worklist); 4117 } 4118 4119 /* Insert the expression for SSA_VN that SCCVN thought would be simpler 4120 than the available expressions for it. The insertion point is 4121 right before the first use in STMT. Returns the SSA_NAME that should 4122 be used for replacement. */ 4123 4124 static tree 4125 do_SCCVN_insertion (gimple stmt, tree ssa_vn) 4126 { 4127 basic_block bb = gimple_bb (stmt); 4128 gimple_stmt_iterator gsi; 4129 gimple_seq stmts = NULL; 4130 tree expr; 4131 pre_expr e; 4132 4133 /* First create a value expression from the expression we want 4134 to insert and associate it with the value handle for SSA_VN. */ 4135 e = get_or_alloc_expr_for (vn_get_expr_for (ssa_vn)); 4136 if (e == NULL) 4137 return NULL_TREE; 4138 4139 /* Then use create_expression_by_pieces to generate a valid 4140 expression to insert at this point of the IL stream. */ 4141 expr = create_expression_by_pieces (bb, e, &stmts, stmt, NULL); 4142 if (expr == NULL_TREE) 4143 return NULL_TREE; 4144 gsi = gsi_for_stmt (stmt); 4145 gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT); 4146 4147 return expr; 4148 } 4149 4150 /* Eliminate fully redundant computations. */ 4151 4152 static unsigned int 4153 eliminate (void) 4154 { 4155 VEC (gimple, heap) *to_remove = NULL; 4156 VEC (gimple, heap) *to_update = NULL; 4157 basic_block b; 4158 unsigned int todo = 0; 4159 gimple_stmt_iterator gsi; 4160 gimple stmt; 4161 unsigned i; 4162 4163 FOR_EACH_BB (b) 4164 { 4165 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi)) 4166 { 4167 tree lhs = NULL_TREE; 4168 tree rhs = NULL_TREE; 4169 4170 stmt = gsi_stmt (gsi); 4171 4172 if (gimple_has_lhs (stmt)) 4173 lhs = gimple_get_lhs (stmt); 4174 4175 if (gimple_assign_single_p (stmt)) 4176 rhs = gimple_assign_rhs1 (stmt); 4177 4178 /* Lookup the RHS of the expression, see if we have an 4179 available computation for it. If so, replace the RHS with 4180 the available computation. 4181 4182 See PR43491. 4183 We don't replace global register variable when it is a the RHS of 4184 a single assign. We do replace local register variable since gcc 4185 does not guarantee local variable will be allocated in register. */ 4186 if (gimple_has_lhs (stmt) 4187 && TREE_CODE (lhs) == SSA_NAME 4188 && !gimple_assign_ssa_name_copy_p (stmt) 4189 && (!gimple_assign_single_p (stmt) 4190 || (!is_gimple_min_invariant (rhs) 4191 && (gimple_assign_rhs_code (stmt) != VAR_DECL 4192 || !is_global_var (rhs) 4193 || !DECL_HARD_REGISTER (rhs)))) 4194 && !gimple_has_volatile_ops (stmt) 4195 && !has_zero_uses (lhs)) 4196 { 4197 tree sprime = NULL; 4198 pre_expr lhsexpr = get_or_alloc_expr_for_name (lhs); 4199 pre_expr sprimeexpr; 4200 gimple orig_stmt = stmt; 4201 4202 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b), 4203 get_expr_value_id (lhsexpr), 4204 NULL); 4205 4206 if (sprimeexpr) 4207 { 4208 if (sprimeexpr->kind == CONSTANT) 4209 sprime = PRE_EXPR_CONSTANT (sprimeexpr); 4210 else if (sprimeexpr->kind == NAME) 4211 sprime = PRE_EXPR_NAME (sprimeexpr); 4212 else 4213 gcc_unreachable (); 4214 } 4215 4216 /* If there is no existing leader but SCCVN knows this 4217 value is constant, use that constant. */ 4218 if (!sprime && is_gimple_min_invariant (VN_INFO (lhs)->valnum)) 4219 { 4220 sprime = VN_INFO (lhs)->valnum; 4221 if (!useless_type_conversion_p (TREE_TYPE (lhs), 4222 TREE_TYPE (sprime))) 4223 sprime = fold_convert (TREE_TYPE (lhs), sprime); 4224 4225 if (dump_file && (dump_flags & TDF_DETAILS)) 4226 { 4227 fprintf (dump_file, "Replaced "); 4228 print_gimple_expr (dump_file, stmt, 0, 0); 4229 fprintf (dump_file, " with "); 4230 print_generic_expr (dump_file, sprime, 0); 4231 fprintf (dump_file, " in "); 4232 print_gimple_stmt (dump_file, stmt, 0, 0); 4233 } 4234 pre_stats.eliminations++; 4235 propagate_tree_value_into_stmt (&gsi, sprime); 4236 stmt = gsi_stmt (gsi); 4237 update_stmt (stmt); 4238 4239 /* If we removed EH side-effects from the statement, clean 4240 its EH information. */ 4241 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt)) 4242 { 4243 bitmap_set_bit (need_eh_cleanup, 4244 gimple_bb (stmt)->index); 4245 if (dump_file && (dump_flags & TDF_DETAILS)) 4246 fprintf (dump_file, " Removed EH side-effects.\n"); 4247 } 4248 continue; 4249 } 4250 4251 /* If there is no existing usable leader but SCCVN thinks 4252 it has an expression it wants to use as replacement, 4253 insert that. */ 4254 if (!sprime || sprime == lhs) 4255 { 4256 tree val = VN_INFO (lhs)->valnum; 4257 if (val != VN_TOP 4258 && TREE_CODE (val) == SSA_NAME 4259 && VN_INFO (val)->needs_insertion 4260 && can_PRE_operation (vn_get_expr_for (val))) 4261 sprime = do_SCCVN_insertion (stmt, val); 4262 } 4263 if (sprime 4264 && sprime != lhs 4265 && (rhs == NULL_TREE 4266 || TREE_CODE (rhs) != SSA_NAME 4267 || may_propagate_copy (rhs, sprime))) 4268 { 4269 bool can_make_abnormal_goto 4270 = is_gimple_call (stmt) 4271 && stmt_can_make_abnormal_goto (stmt); 4272 4273 gcc_assert (sprime != rhs); 4274 4275 if (dump_file && (dump_flags & TDF_DETAILS)) 4276 { 4277 fprintf (dump_file, "Replaced "); 4278 print_gimple_expr (dump_file, stmt, 0, 0); 4279 fprintf (dump_file, " with "); 4280 print_generic_expr (dump_file, sprime, 0); 4281 fprintf (dump_file, " in "); 4282 print_gimple_stmt (dump_file, stmt, 0, 0); 4283 } 4284 4285 if (TREE_CODE (sprime) == SSA_NAME) 4286 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), 4287 NECESSARY, true); 4288 /* We need to make sure the new and old types actually match, 4289 which may require adding a simple cast, which fold_convert 4290 will do for us. */ 4291 if ((!rhs || TREE_CODE (rhs) != SSA_NAME) 4292 && !useless_type_conversion_p (gimple_expr_type (stmt), 4293 TREE_TYPE (sprime))) 4294 sprime = fold_convert (gimple_expr_type (stmt), sprime); 4295 4296 pre_stats.eliminations++; 4297 propagate_tree_value_into_stmt (&gsi, sprime); 4298 stmt = gsi_stmt (gsi); 4299 update_stmt (stmt); 4300 4301 /* If we removed EH side-effects from the statement, clean 4302 its EH information. */ 4303 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt)) 4304 { 4305 bitmap_set_bit (need_eh_cleanup, 4306 gimple_bb (stmt)->index); 4307 if (dump_file && (dump_flags & TDF_DETAILS)) 4308 fprintf (dump_file, " Removed EH side-effects.\n"); 4309 } 4310 4311 /* Likewise for AB side-effects. */ 4312 if (can_make_abnormal_goto 4313 && !stmt_can_make_abnormal_goto (stmt)) 4314 { 4315 bitmap_set_bit (need_ab_cleanup, 4316 gimple_bb (stmt)->index); 4317 if (dump_file && (dump_flags & TDF_DETAILS)) 4318 fprintf (dump_file, " Removed AB side-effects.\n"); 4319 } 4320 } 4321 } 4322 /* If the statement is a scalar store, see if the expression 4323 has the same value number as its rhs. If so, the store is 4324 dead. */ 4325 else if (gimple_assign_single_p (stmt) 4326 && !gimple_has_volatile_ops (stmt) 4327 && !is_gimple_reg (gimple_assign_lhs (stmt)) 4328 && (TREE_CODE (rhs) == SSA_NAME 4329 || is_gimple_min_invariant (rhs))) 4330 { 4331 tree val; 4332 val = vn_reference_lookup (gimple_assign_lhs (stmt), 4333 gimple_vuse (stmt), VN_WALK, NULL); 4334 if (TREE_CODE (rhs) == SSA_NAME) 4335 rhs = VN_INFO (rhs)->valnum; 4336 if (val 4337 && operand_equal_p (val, rhs, 0)) 4338 { 4339 if (dump_file && (dump_flags & TDF_DETAILS)) 4340 { 4341 fprintf (dump_file, "Deleted redundant store "); 4342 print_gimple_stmt (dump_file, stmt, 0, 0); 4343 } 4344 4345 /* Queue stmt for removal. */ 4346 VEC_safe_push (gimple, heap, to_remove, stmt); 4347 } 4348 } 4349 /* Visit COND_EXPRs and fold the comparison with the 4350 available value-numbers. */ 4351 else if (gimple_code (stmt) == GIMPLE_COND) 4352 { 4353 tree op0 = gimple_cond_lhs (stmt); 4354 tree op1 = gimple_cond_rhs (stmt); 4355 tree result; 4356 4357 if (TREE_CODE (op0) == SSA_NAME) 4358 op0 = VN_INFO (op0)->valnum; 4359 if (TREE_CODE (op1) == SSA_NAME) 4360 op1 = VN_INFO (op1)->valnum; 4361 result = fold_binary (gimple_cond_code (stmt), boolean_type_node, 4362 op0, op1); 4363 if (result && TREE_CODE (result) == INTEGER_CST) 4364 { 4365 if (integer_zerop (result)) 4366 gimple_cond_make_false (stmt); 4367 else 4368 gimple_cond_make_true (stmt); 4369 update_stmt (stmt); 4370 todo = TODO_cleanup_cfg; 4371 } 4372 } 4373 /* Visit indirect calls and turn them into direct calls if 4374 possible. */ 4375 if (is_gimple_call (stmt)) 4376 { 4377 tree orig_fn = gimple_call_fn (stmt); 4378 tree fn; 4379 if (!orig_fn) 4380 continue; 4381 if (TREE_CODE (orig_fn) == SSA_NAME) 4382 fn = VN_INFO (orig_fn)->valnum; 4383 else if (TREE_CODE (orig_fn) == OBJ_TYPE_REF 4384 && TREE_CODE (OBJ_TYPE_REF_EXPR (orig_fn)) == SSA_NAME) 4385 fn = VN_INFO (OBJ_TYPE_REF_EXPR (orig_fn))->valnum; 4386 else 4387 continue; 4388 if (gimple_call_addr_fndecl (fn) != NULL_TREE 4389 && useless_type_conversion_p (TREE_TYPE (orig_fn), 4390 TREE_TYPE (fn))) 4391 { 4392 bool can_make_abnormal_goto 4393 = stmt_can_make_abnormal_goto (stmt); 4394 bool was_noreturn = gimple_call_noreturn_p (stmt); 4395 4396 if (dump_file && (dump_flags & TDF_DETAILS)) 4397 { 4398 fprintf (dump_file, "Replacing call target with "); 4399 print_generic_expr (dump_file, fn, 0); 4400 fprintf (dump_file, " in "); 4401 print_gimple_stmt (dump_file, stmt, 0, 0); 4402 } 4403 4404 gimple_call_set_fn (stmt, fn); 4405 VEC_safe_push (gimple, heap, to_update, stmt); 4406 4407 /* When changing a call into a noreturn call, cfg cleanup 4408 is needed to fix up the noreturn call. */ 4409 if (!was_noreturn && gimple_call_noreturn_p (stmt)) 4410 todo |= TODO_cleanup_cfg; 4411 4412 /* If we removed EH side-effects from the statement, clean 4413 its EH information. */ 4414 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)) 4415 { 4416 bitmap_set_bit (need_eh_cleanup, 4417 gimple_bb (stmt)->index); 4418 if (dump_file && (dump_flags & TDF_DETAILS)) 4419 fprintf (dump_file, " Removed EH side-effects.\n"); 4420 } 4421 4422 /* Likewise for AB side-effects. */ 4423 if (can_make_abnormal_goto 4424 && !stmt_can_make_abnormal_goto (stmt)) 4425 { 4426 bitmap_set_bit (need_ab_cleanup, 4427 gimple_bb (stmt)->index); 4428 if (dump_file && (dump_flags & TDF_DETAILS)) 4429 fprintf (dump_file, " Removed AB side-effects.\n"); 4430 } 4431 4432 /* Changing an indirect call to a direct call may 4433 have exposed different semantics. This may 4434 require an SSA update. */ 4435 todo |= TODO_update_ssa_only_virtuals; 4436 } 4437 } 4438 } 4439 4440 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi);) 4441 { 4442 gimple stmt, phi = gsi_stmt (gsi); 4443 tree sprime = NULL_TREE, res = PHI_RESULT (phi); 4444 pre_expr sprimeexpr, resexpr; 4445 gimple_stmt_iterator gsi2; 4446 4447 /* We want to perform redundant PHI elimination. Do so by 4448 replacing the PHI with a single copy if possible. 4449 Do not touch inserted, single-argument or virtual PHIs. */ 4450 if (gimple_phi_num_args (phi) == 1 4451 || !is_gimple_reg (res)) 4452 { 4453 gsi_next (&gsi); 4454 continue; 4455 } 4456 4457 resexpr = get_or_alloc_expr_for_name (res); 4458 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b), 4459 get_expr_value_id (resexpr), NULL); 4460 if (sprimeexpr) 4461 { 4462 if (sprimeexpr->kind == CONSTANT) 4463 sprime = PRE_EXPR_CONSTANT (sprimeexpr); 4464 else if (sprimeexpr->kind == NAME) 4465 sprime = PRE_EXPR_NAME (sprimeexpr); 4466 else 4467 gcc_unreachable (); 4468 } 4469 if (!sprime && is_gimple_min_invariant (VN_INFO (res)->valnum)) 4470 { 4471 sprime = VN_INFO (res)->valnum; 4472 if (!useless_type_conversion_p (TREE_TYPE (res), 4473 TREE_TYPE (sprime))) 4474 sprime = fold_convert (TREE_TYPE (res), sprime); 4475 } 4476 if (!sprime 4477 || sprime == res) 4478 { 4479 gsi_next (&gsi); 4480 continue; 4481 } 4482 4483 if (dump_file && (dump_flags & TDF_DETAILS)) 4484 { 4485 fprintf (dump_file, "Replaced redundant PHI node defining "); 4486 print_generic_expr (dump_file, res, 0); 4487 fprintf (dump_file, " with "); 4488 print_generic_expr (dump_file, sprime, 0); 4489 fprintf (dump_file, "\n"); 4490 } 4491 4492 remove_phi_node (&gsi, false); 4493 4494 if (!bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)) 4495 && TREE_CODE (sprime) == SSA_NAME) 4496 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true); 4497 4498 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime))) 4499 sprime = fold_convert (TREE_TYPE (res), sprime); 4500 stmt = gimple_build_assign (res, sprime); 4501 SSA_NAME_DEF_STMT (res) = stmt; 4502 gimple_set_plf (stmt, NECESSARY, gimple_plf (phi, NECESSARY)); 4503 4504 gsi2 = gsi_after_labels (b); 4505 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT); 4506 /* Queue the copy for eventual removal. */ 4507 VEC_safe_push (gimple, heap, to_remove, stmt); 4508 /* If we inserted this PHI node ourself, it's not an elimination. */ 4509 if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res))) 4510 pre_stats.phis--; 4511 else 4512 pre_stats.eliminations++; 4513 } 4514 } 4515 4516 /* We cannot remove stmts during BB walk, especially not release SSA 4517 names there as this confuses the VN machinery. The stmts ending 4518 up in to_remove are either stores or simple copies. */ 4519 FOR_EACH_VEC_ELT (gimple, to_remove, i, stmt) 4520 { 4521 tree lhs = gimple_assign_lhs (stmt); 4522 tree rhs = gimple_assign_rhs1 (stmt); 4523 use_operand_p use_p; 4524 gimple use_stmt; 4525 4526 /* If there is a single use only, propagate the equivalency 4527 instead of keeping the copy. */ 4528 if (TREE_CODE (lhs) == SSA_NAME 4529 && TREE_CODE (rhs) == SSA_NAME 4530 && single_imm_use (lhs, &use_p, &use_stmt) 4531 && may_propagate_copy (USE_FROM_PTR (use_p), rhs)) 4532 { 4533 SET_USE (use_p, rhs); 4534 update_stmt (use_stmt); 4535 if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (lhs)) 4536 && TREE_CODE (rhs) == SSA_NAME) 4537 gimple_set_plf (SSA_NAME_DEF_STMT (rhs), NECESSARY, true); 4538 } 4539 4540 /* If this is a store or a now unused copy, remove it. */ 4541 if (TREE_CODE (lhs) != SSA_NAME 4542 || has_zero_uses (lhs)) 4543 { 4544 basic_block bb = gimple_bb (stmt); 4545 gsi = gsi_for_stmt (stmt); 4546 unlink_stmt_vdef (stmt); 4547 gsi_remove (&gsi, true); 4548 /* ??? gsi_remove doesn't tell us whether the stmt was 4549 in EH tables and thus whether we need to purge EH edges. 4550 Simply schedule the block for a cleanup. */ 4551 bitmap_set_bit (need_eh_cleanup, bb->index); 4552 if (TREE_CODE (lhs) == SSA_NAME) 4553 bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs)); 4554 release_defs (stmt); 4555 } 4556 } 4557 VEC_free (gimple, heap, to_remove); 4558 4559 /* We cannot update call statements with virtual operands during 4560 SSA walk. This might remove them which in turn makes our 4561 VN lattice invalid. */ 4562 FOR_EACH_VEC_ELT (gimple, to_update, i, stmt) 4563 update_stmt (stmt); 4564 VEC_free (gimple, heap, to_update); 4565 4566 return todo; 4567 } 4568 4569 /* Borrow a bit of tree-ssa-dce.c for the moment. 4570 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though 4571 this may be a bit faster, and we may want critical edges kept split. */ 4572 4573 /* If OP's defining statement has not already been determined to be necessary, 4574 mark that statement necessary. Return the stmt, if it is newly 4575 necessary. */ 4576 4577 static inline gimple 4578 mark_operand_necessary (tree op) 4579 { 4580 gimple stmt; 4581 4582 gcc_assert (op); 4583 4584 if (TREE_CODE (op) != SSA_NAME) 4585 return NULL; 4586 4587 stmt = SSA_NAME_DEF_STMT (op); 4588 gcc_assert (stmt); 4589 4590 if (gimple_plf (stmt, NECESSARY) 4591 || gimple_nop_p (stmt)) 4592 return NULL; 4593 4594 gimple_set_plf (stmt, NECESSARY, true); 4595 return stmt; 4596 } 4597 4598 /* Because we don't follow exactly the standard PRE algorithm, and decide not 4599 to insert PHI nodes sometimes, and because value numbering of casts isn't 4600 perfect, we sometimes end up inserting dead code. This simple DCE-like 4601 pass removes any insertions we made that weren't actually used. */ 4602 4603 static void 4604 remove_dead_inserted_code (void) 4605 { 4606 bitmap worklist; 4607 unsigned i; 4608 bitmap_iterator bi; 4609 gimple t; 4610 4611 worklist = BITMAP_ALLOC (NULL); 4612 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi) 4613 { 4614 t = SSA_NAME_DEF_STMT (ssa_name (i)); 4615 if (gimple_plf (t, NECESSARY)) 4616 bitmap_set_bit (worklist, i); 4617 } 4618 while (!bitmap_empty_p (worklist)) 4619 { 4620 i = bitmap_first_set_bit (worklist); 4621 bitmap_clear_bit (worklist, i); 4622 t = SSA_NAME_DEF_STMT (ssa_name (i)); 4623 4624 /* PHI nodes are somewhat special in that each PHI alternative has 4625 data and control dependencies. All the statements feeding the 4626 PHI node's arguments are always necessary. */ 4627 if (gimple_code (t) == GIMPLE_PHI) 4628 { 4629 unsigned k; 4630 4631 for (k = 0; k < gimple_phi_num_args (t); k++) 4632 { 4633 tree arg = PHI_ARG_DEF (t, k); 4634 if (TREE_CODE (arg) == SSA_NAME) 4635 { 4636 gimple n = mark_operand_necessary (arg); 4637 if (n) 4638 bitmap_set_bit (worklist, SSA_NAME_VERSION (arg)); 4639 } 4640 } 4641 } 4642 else 4643 { 4644 /* Propagate through the operands. Examine all the USE, VUSE and 4645 VDEF operands in this statement. Mark all the statements 4646 which feed this statement's uses as necessary. */ 4647 ssa_op_iter iter; 4648 tree use; 4649 4650 /* The operands of VDEF expressions are also needed as they 4651 represent potential definitions that may reach this 4652 statement (VDEF operands allow us to follow def-def 4653 links). */ 4654 4655 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES) 4656 { 4657 gimple n = mark_operand_necessary (use); 4658 if (n) 4659 bitmap_set_bit (worklist, SSA_NAME_VERSION (use)); 4660 } 4661 } 4662 } 4663 4664 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi) 4665 { 4666 t = SSA_NAME_DEF_STMT (ssa_name (i)); 4667 if (!gimple_plf (t, NECESSARY)) 4668 { 4669 gimple_stmt_iterator gsi; 4670 4671 if (dump_file && (dump_flags & TDF_DETAILS)) 4672 { 4673 fprintf (dump_file, "Removing unnecessary insertion:"); 4674 print_gimple_stmt (dump_file, t, 0, 0); 4675 } 4676 4677 gsi = gsi_for_stmt (t); 4678 if (gimple_code (t) == GIMPLE_PHI) 4679 remove_phi_node (&gsi, true); 4680 else 4681 { 4682 gsi_remove (&gsi, true); 4683 release_defs (t); 4684 } 4685 } 4686 } 4687 BITMAP_FREE (worklist); 4688 } 4689 4690 /* Compute a reverse post-order in *POST_ORDER. If INCLUDE_ENTRY_EXIT is 4691 true, then then ENTRY_BLOCK and EXIT_BLOCK are included. Returns 4692 the number of visited blocks. */ 4693 4694 static int 4695 my_rev_post_order_compute (int *post_order, bool include_entry_exit) 4696 { 4697 edge_iterator *stack; 4698 int sp; 4699 int post_order_num = 0; 4700 sbitmap visited; 4701 4702 if (include_entry_exit) 4703 post_order[post_order_num++] = EXIT_BLOCK; 4704 4705 /* Allocate stack for back-tracking up CFG. */ 4706 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1); 4707 sp = 0; 4708 4709 /* Allocate bitmap to track nodes that have been visited. */ 4710 visited = sbitmap_alloc (last_basic_block); 4711 4712 /* None of the nodes in the CFG have been visited yet. */ 4713 sbitmap_zero (visited); 4714 4715 /* Push the last edge on to the stack. */ 4716 stack[sp++] = ei_start (EXIT_BLOCK_PTR->preds); 4717 4718 while (sp) 4719 { 4720 edge_iterator ei; 4721 basic_block src; 4722 basic_block dest; 4723 4724 /* Look at the edge on the top of the stack. */ 4725 ei = stack[sp - 1]; 4726 src = ei_edge (ei)->src; 4727 dest = ei_edge (ei)->dest; 4728 4729 /* Check if the edge destination has been visited yet. */ 4730 if (src != ENTRY_BLOCK_PTR && ! TEST_BIT (visited, src->index)) 4731 { 4732 /* Mark that we have visited the destination. */ 4733 SET_BIT (visited, src->index); 4734 4735 if (EDGE_COUNT (src->preds) > 0) 4736 /* Since the DEST node has been visited for the first 4737 time, check its successors. */ 4738 stack[sp++] = ei_start (src->preds); 4739 else 4740 post_order[post_order_num++] = src->index; 4741 } 4742 else 4743 { 4744 if (ei_one_before_end_p (ei) && dest != EXIT_BLOCK_PTR) 4745 post_order[post_order_num++] = dest->index; 4746 4747 if (!ei_one_before_end_p (ei)) 4748 ei_next (&stack[sp - 1]); 4749 else 4750 sp--; 4751 } 4752 } 4753 4754 if (include_entry_exit) 4755 post_order[post_order_num++] = ENTRY_BLOCK; 4756 4757 free (stack); 4758 sbitmap_free (visited); 4759 return post_order_num; 4760 } 4761 4762 4763 /* Initialize data structures used by PRE. */ 4764 4765 static void 4766 init_pre (bool do_fre) 4767 { 4768 basic_block bb; 4769 4770 next_expression_id = 1; 4771 expressions = NULL; 4772 VEC_safe_push (pre_expr, heap, expressions, NULL); 4773 value_expressions = VEC_alloc (bitmap_set_t, heap, get_max_value_id () + 1); 4774 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions, 4775 get_max_value_id() + 1); 4776 name_to_id = NULL; 4777 4778 in_fre = do_fre; 4779 4780 inserted_exprs = BITMAP_ALLOC (NULL); 4781 need_creation = NULL; 4782 pretemp = NULL_TREE; 4783 storetemp = NULL_TREE; 4784 prephitemp = NULL_TREE; 4785 4786 connect_infinite_loops_to_exit (); 4787 memset (&pre_stats, 0, sizeof (pre_stats)); 4788 4789 4790 postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS); 4791 my_rev_post_order_compute (postorder, false); 4792 4793 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets)); 4794 4795 calculate_dominance_info (CDI_POST_DOMINATORS); 4796 calculate_dominance_info (CDI_DOMINATORS); 4797 4798 bitmap_obstack_initialize (&grand_bitmap_obstack); 4799 phi_translate_table = htab_create (5110, expr_pred_trans_hash, 4800 expr_pred_trans_eq, free); 4801 expression_to_id = htab_create (num_ssa_names * 3, 4802 pre_expr_hash, 4803 pre_expr_eq, NULL); 4804 bitmap_set_pool = create_alloc_pool ("Bitmap sets", 4805 sizeof (struct bitmap_set), 30); 4806 pre_expr_pool = create_alloc_pool ("pre_expr nodes", 4807 sizeof (struct pre_expr_d), 30); 4808 FOR_ALL_BB (bb) 4809 { 4810 EXP_GEN (bb) = bitmap_set_new (); 4811 PHI_GEN (bb) = bitmap_set_new (); 4812 TMP_GEN (bb) = bitmap_set_new (); 4813 AVAIL_OUT (bb) = bitmap_set_new (); 4814 } 4815 4816 need_eh_cleanup = BITMAP_ALLOC (NULL); 4817 need_ab_cleanup = BITMAP_ALLOC (NULL); 4818 } 4819 4820 4821 /* Deallocate data structures used by PRE. */ 4822 4823 static void 4824 fini_pre (bool do_fre) 4825 { 4826 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup); 4827 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup); 4828 4829 free (postorder); 4830 VEC_free (bitmap_set_t, heap, value_expressions); 4831 BITMAP_FREE (inserted_exprs); 4832 VEC_free (gimple, heap, need_creation); 4833 bitmap_obstack_release (&grand_bitmap_obstack); 4834 free_alloc_pool (bitmap_set_pool); 4835 free_alloc_pool (pre_expr_pool); 4836 htab_delete (phi_translate_table); 4837 htab_delete (expression_to_id); 4838 VEC_free (unsigned, heap, name_to_id); 4839 4840 free_aux_for_blocks (); 4841 4842 free_dominance_info (CDI_POST_DOMINATORS); 4843 4844 if (do_eh_cleanup) 4845 gimple_purge_all_dead_eh_edges (need_eh_cleanup); 4846 4847 if (do_ab_cleanup) 4848 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup); 4849 4850 BITMAP_FREE (need_eh_cleanup); 4851 BITMAP_FREE (need_ab_cleanup); 4852 4853 if (do_eh_cleanup || do_ab_cleanup) 4854 cleanup_tree_cfg (); 4855 4856 if (!do_fre) 4857 loop_optimizer_finalize (); 4858 } 4859 4860 /* Main entry point to the SSA-PRE pass. DO_FRE is true if the caller 4861 only wants to do full redundancy elimination. */ 4862 4863 static unsigned int 4864 execute_pre (bool do_fre) 4865 { 4866 unsigned int todo = 0; 4867 4868 do_partial_partial = optimize > 2 && optimize_function_for_speed_p (cfun); 4869 4870 /* This has to happen before SCCVN runs because 4871 loop_optimizer_init may create new phis, etc. */ 4872 if (!do_fre) 4873 loop_optimizer_init (LOOPS_NORMAL); 4874 4875 if (!run_scc_vn (do_fre ? VN_WALKREWRITE : VN_WALK)) 4876 { 4877 if (!do_fre) 4878 loop_optimizer_finalize (); 4879 4880 return 0; 4881 } 4882 4883 init_pre (do_fre); 4884 scev_initialize (); 4885 4886 /* Collect and value number expressions computed in each basic block. */ 4887 compute_avail (); 4888 4889 if (dump_file && (dump_flags & TDF_DETAILS)) 4890 { 4891 basic_block bb; 4892 4893 FOR_ALL_BB (bb) 4894 { 4895 print_bitmap_set (dump_file, EXP_GEN (bb), "exp_gen", bb->index); 4896 print_bitmap_set (dump_file, PHI_GEN (bb), "phi_gen", bb->index); 4897 print_bitmap_set (dump_file, TMP_GEN (bb), "tmp_gen", bb->index); 4898 print_bitmap_set (dump_file, AVAIL_OUT (bb), "avail_out", bb->index); 4899 } 4900 } 4901 4902 /* Insert can get quite slow on an incredibly large number of basic 4903 blocks due to some quadratic behavior. Until this behavior is 4904 fixed, don't run it when he have an incredibly large number of 4905 bb's. If we aren't going to run insert, there is no point in 4906 computing ANTIC, either, even though it's plenty fast. */ 4907 if (!do_fre && n_basic_blocks < 4000) 4908 { 4909 compute_antic (); 4910 insert (); 4911 } 4912 4913 /* Make sure to remove fake edges before committing our inserts. 4914 This makes sure we don't end up with extra critical edges that 4915 we would need to split. */ 4916 remove_fake_exit_edges (); 4917 gsi_commit_edge_inserts (); 4918 4919 /* Remove all the redundant expressions. */ 4920 todo |= eliminate (); 4921 4922 statistics_counter_event (cfun, "Insertions", pre_stats.insertions); 4923 statistics_counter_event (cfun, "PA inserted", pre_stats.pa_insert); 4924 statistics_counter_event (cfun, "New PHIs", pre_stats.phis); 4925 statistics_counter_event (cfun, "Eliminated", pre_stats.eliminations); 4926 statistics_counter_event (cfun, "Constified", pre_stats.constified); 4927 4928 clear_expression_ids (); 4929 if (!do_fre) 4930 { 4931 remove_dead_inserted_code (); 4932 todo |= TODO_verify_flow; 4933 } 4934 4935 scev_finalize (); 4936 fini_pre (do_fre); 4937 4938 if (!do_fre) 4939 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which 4940 case we can merge the block with the remaining predecessor of the block. 4941 It should either: 4942 - call merge_blocks after each tail merge iteration 4943 - call merge_blocks after all tail merge iterations 4944 - mark TODO_cleanup_cfg when necessary 4945 - share the cfg cleanup with fini_pre. */ 4946 todo |= tail_merge_optimize (todo); 4947 free_scc_vn (); 4948 4949 return todo; 4950 } 4951 4952 /* Gate and execute functions for PRE. */ 4953 4954 static unsigned int 4955 do_pre (void) 4956 { 4957 return execute_pre (false); 4958 } 4959 4960 static bool 4961 gate_pre (void) 4962 { 4963 return flag_tree_pre != 0; 4964 } 4965 4966 struct gimple_opt_pass pass_pre = 4967 { 4968 { 4969 GIMPLE_PASS, 4970 "pre", /* name */ 4971 gate_pre, /* gate */ 4972 do_pre, /* execute */ 4973 NULL, /* sub */ 4974 NULL, /* next */ 4975 0, /* static_pass_number */ 4976 TV_TREE_PRE, /* tv_id */ 4977 PROP_no_crit_edges | PROP_cfg 4978 | PROP_ssa, /* properties_required */ 4979 0, /* properties_provided */ 4980 0, /* properties_destroyed */ 4981 TODO_rebuild_alias, /* todo_flags_start */ 4982 TODO_update_ssa_only_virtuals | TODO_ggc_collect 4983 | TODO_verify_ssa /* todo_flags_finish */ 4984 } 4985 }; 4986 4987 4988 /* Gate and execute functions for FRE. */ 4989 4990 static unsigned int 4991 execute_fre (void) 4992 { 4993 return execute_pre (true); 4994 } 4995 4996 static bool 4997 gate_fre (void) 4998 { 4999 return flag_tree_fre != 0; 5000 } 5001 5002 struct gimple_opt_pass pass_fre = 5003 { 5004 { 5005 GIMPLE_PASS, 5006 "fre", /* name */ 5007 gate_fre, /* gate */ 5008 execute_fre, /* execute */ 5009 NULL, /* sub */ 5010 NULL, /* next */ 5011 0, /* static_pass_number */ 5012 TV_TREE_FRE, /* tv_id */ 5013 PROP_cfg | PROP_ssa, /* properties_required */ 5014 0, /* properties_provided */ 5015 0, /* properties_destroyed */ 5016 0, /* todo_flags_start */ 5017 TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */ 5018 } 5019 }; 5020