1 /* SCC value numbering for trees 2 Copyright (C) 2006, 2007, 2008, 2009, 2010 3 Free Software Foundation, Inc. 4 Contributed by Daniel Berlin <dan@dberlin.org> 5 6 This file is part of GCC. 7 8 GCC is free software; you can redistribute it and/or modify 9 it under the terms of the GNU General Public License as published by 10 the Free Software Foundation; either version 3, or (at your option) 11 any later version. 12 13 GCC is distributed in the hope that it will be useful, 14 but WITHOUT ANY WARRANTY; without even the implied warranty of 15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 16 GNU General Public License for more details. 17 18 You should have received a copy of the GNU General Public License 19 along with GCC; see the file COPYING3. If not see 20 <http://www.gnu.org/licenses/>. */ 21 22 #include "config.h" 23 #include "system.h" 24 #include "coretypes.h" 25 #include "tm.h" 26 #include "tree.h" 27 #include "basic-block.h" 28 #include "tree-pretty-print.h" 29 #include "gimple-pretty-print.h" 30 #include "tree-inline.h" 31 #include "tree-flow.h" 32 #include "gimple.h" 33 #include "tree-dump.h" 34 #include "timevar.h" 35 #include "fibheap.h" 36 #include "hashtab.h" 37 #include "tree-iterator.h" 38 #include "alloc-pool.h" 39 #include "tree-pass.h" 40 #include "flags.h" 41 #include "bitmap.h" 42 #include "langhooks.h" 43 #include "cfgloop.h" 44 #include "params.h" 45 #include "tree-ssa-propagate.h" 46 #include "tree-ssa-sccvn.h" 47 #include "gimple-fold.h" 48 49 /* This algorithm is based on the SCC algorithm presented by Keith 50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering" 51 (http://citeseer.ist.psu.edu/41805.html). In 52 straight line code, it is equivalent to a regular hash based value 53 numbering that is performed in reverse postorder. 54 55 For code with cycles, there are two alternatives, both of which 56 require keeping the hashtables separate from the actual list of 57 value numbers for SSA names. 58 59 1. Iterate value numbering in an RPO walk of the blocks, removing 60 all the entries from the hashtable after each iteration (but 61 keeping the SSA name->value number mapping between iterations). 62 Iterate until it does not change. 63 64 2. Perform value numbering as part of an SCC walk on the SSA graph, 65 iterating only the cycles in the SSA graph until they do not change 66 (using a separate, optimistic hashtable for value numbering the SCC 67 operands). 68 69 The second is not just faster in practice (because most SSA graph 70 cycles do not involve all the variables in the graph), it also has 71 some nice properties. 72 73 One of these nice properties is that when we pop an SCC off the 74 stack, we are guaranteed to have processed all the operands coming from 75 *outside of that SCC*, so we do not need to do anything special to 76 ensure they have value numbers. 77 78 Another nice property is that the SCC walk is done as part of a DFS 79 of the SSA graph, which makes it easy to perform combining and 80 simplifying operations at the same time. 81 82 The code below is deliberately written in a way that makes it easy 83 to separate the SCC walk from the other work it does. 84 85 In order to propagate constants through the code, we track which 86 expressions contain constants, and use those while folding. In 87 theory, we could also track expressions whose value numbers are 88 replaced, in case we end up folding based on expression 89 identities. 90 91 In order to value number memory, we assign value numbers to vuses. 92 This enables us to note that, for example, stores to the same 93 address of the same value from the same starting memory states are 94 equivalent. 95 TODO: 96 97 1. We can iterate only the changing portions of the SCC's, but 98 I have not seen an SCC big enough for this to be a win. 99 2. If you differentiate between phi nodes for loops and phi nodes 100 for if-then-else, you can properly consider phi nodes in different 101 blocks for equivalence. 102 3. We could value number vuses in more cases, particularly, whole 103 structure copies. 104 */ 105 106 /* The set of hashtables and alloc_pool's for their items. */ 107 108 typedef struct vn_tables_s 109 { 110 htab_t nary; 111 htab_t phis; 112 htab_t references; 113 struct obstack nary_obstack; 114 alloc_pool phis_pool; 115 alloc_pool references_pool; 116 } *vn_tables_t; 117 118 static htab_t constant_to_value_id; 119 static bitmap constant_value_ids; 120 121 122 /* Valid hashtables storing information we have proven to be 123 correct. */ 124 125 static vn_tables_t valid_info; 126 127 /* Optimistic hashtables storing information we are making assumptions about 128 during iterations. */ 129 130 static vn_tables_t optimistic_info; 131 132 /* Pointer to the set of hashtables that is currently being used. 133 Should always point to either the optimistic_info, or the 134 valid_info. */ 135 136 static vn_tables_t current_info; 137 138 139 /* Reverse post order index for each basic block. */ 140 141 static int *rpo_numbers; 142 143 #define SSA_VAL(x) (VN_INFO ((x))->valnum) 144 145 /* This represents the top of the VN lattice, which is the universal 146 value. */ 147 148 tree VN_TOP; 149 150 /* Unique counter for our value ids. */ 151 152 static unsigned int next_value_id; 153 154 /* Next DFS number and the stack for strongly connected component 155 detection. */ 156 157 static unsigned int next_dfs_num; 158 static VEC (tree, heap) *sccstack; 159 160 161 DEF_VEC_P(vn_ssa_aux_t); 162 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap); 163 164 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects 165 are allocated on an obstack for locality reasons, and to free them 166 without looping over the VEC. */ 167 168 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table; 169 static struct obstack vn_ssa_aux_obstack; 170 171 /* Return the value numbering information for a given SSA name. */ 172 173 vn_ssa_aux_t 174 VN_INFO (tree name) 175 { 176 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table, 177 SSA_NAME_VERSION (name)); 178 gcc_checking_assert (res); 179 return res; 180 } 181 182 /* Set the value numbering info for a given SSA name to a given 183 value. */ 184 185 static inline void 186 VN_INFO_SET (tree name, vn_ssa_aux_t value) 187 { 188 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table, 189 SSA_NAME_VERSION (name), value); 190 } 191 192 /* Initialize the value numbering info for a given SSA name. 193 This should be called just once for every SSA name. */ 194 195 vn_ssa_aux_t 196 VN_INFO_GET (tree name) 197 { 198 vn_ssa_aux_t newinfo; 199 200 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux); 201 memset (newinfo, 0, sizeof (struct vn_ssa_aux)); 202 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table)) 203 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table, 204 SSA_NAME_VERSION (name) + 1); 205 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table, 206 SSA_NAME_VERSION (name), newinfo); 207 return newinfo; 208 } 209 210 211 /* Get the representative expression for the SSA_NAME NAME. Returns 212 the representative SSA_NAME if there is no expression associated with it. */ 213 214 tree 215 vn_get_expr_for (tree name) 216 { 217 vn_ssa_aux_t vn = VN_INFO (name); 218 gimple def_stmt; 219 tree expr = NULL_TREE; 220 enum tree_code code; 221 222 if (vn->valnum == VN_TOP) 223 return name; 224 225 /* If the value-number is a constant it is the representative 226 expression. */ 227 if (TREE_CODE (vn->valnum) != SSA_NAME) 228 return vn->valnum; 229 230 /* Get to the information of the value of this SSA_NAME. */ 231 vn = VN_INFO (vn->valnum); 232 233 /* If the value-number is a constant it is the representative 234 expression. */ 235 if (TREE_CODE (vn->valnum) != SSA_NAME) 236 return vn->valnum; 237 238 /* Else if we have an expression, return it. */ 239 if (vn->expr != NULL_TREE) 240 return vn->expr; 241 242 /* Otherwise use the defining statement to build the expression. */ 243 def_stmt = SSA_NAME_DEF_STMT (vn->valnum); 244 245 /* If the value number is not an assignment use it directly. */ 246 if (!is_gimple_assign (def_stmt)) 247 return vn->valnum; 248 249 /* FIXME tuples. This is incomplete and likely will miss some 250 simplifications. */ 251 code = gimple_assign_rhs_code (def_stmt); 252 switch (TREE_CODE_CLASS (code)) 253 { 254 case tcc_reference: 255 if ((code == REALPART_EXPR 256 || code == IMAGPART_EXPR 257 || code == VIEW_CONVERT_EXPR) 258 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 259 0)) == SSA_NAME) 260 expr = fold_build1 (code, 261 gimple_expr_type (def_stmt), 262 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0)); 263 break; 264 265 case tcc_unary: 266 expr = fold_build1 (code, 267 gimple_expr_type (def_stmt), 268 gimple_assign_rhs1 (def_stmt)); 269 break; 270 271 case tcc_binary: 272 expr = fold_build2 (code, 273 gimple_expr_type (def_stmt), 274 gimple_assign_rhs1 (def_stmt), 275 gimple_assign_rhs2 (def_stmt)); 276 break; 277 278 case tcc_exceptional: 279 if (code == CONSTRUCTOR 280 && TREE_CODE 281 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE) 282 expr = gimple_assign_rhs1 (def_stmt); 283 break; 284 285 default:; 286 } 287 if (expr == NULL_TREE) 288 return vn->valnum; 289 290 /* Cache the expression. */ 291 vn->expr = expr; 292 293 return expr; 294 } 295 296 297 /* Free a phi operation structure VP. */ 298 299 static void 300 free_phi (void *vp) 301 { 302 vn_phi_t phi = (vn_phi_t) vp; 303 VEC_free (tree, heap, phi->phiargs); 304 } 305 306 /* Free a reference operation structure VP. */ 307 308 static void 309 free_reference (void *vp) 310 { 311 vn_reference_t vr = (vn_reference_t) vp; 312 VEC_free (vn_reference_op_s, heap, vr->operands); 313 } 314 315 /* Hash table equality function for vn_constant_t. */ 316 317 static int 318 vn_constant_eq (const void *p1, const void *p2) 319 { 320 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1; 321 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2; 322 323 if (vc1->hashcode != vc2->hashcode) 324 return false; 325 326 return vn_constant_eq_with_type (vc1->constant, vc2->constant); 327 } 328 329 /* Hash table hash function for vn_constant_t. */ 330 331 static hashval_t 332 vn_constant_hash (const void *p1) 333 { 334 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1; 335 return vc1->hashcode; 336 } 337 338 /* Lookup a value id for CONSTANT and return it. If it does not 339 exist returns 0. */ 340 341 unsigned int 342 get_constant_value_id (tree constant) 343 { 344 void **slot; 345 struct vn_constant_s vc; 346 347 vc.hashcode = vn_hash_constant_with_type (constant); 348 vc.constant = constant; 349 slot = htab_find_slot_with_hash (constant_to_value_id, &vc, 350 vc.hashcode, NO_INSERT); 351 if (slot) 352 return ((vn_constant_t)*slot)->value_id; 353 return 0; 354 } 355 356 /* Lookup a value id for CONSTANT, and if it does not exist, create a 357 new one and return it. If it does exist, return it. */ 358 359 unsigned int 360 get_or_alloc_constant_value_id (tree constant) 361 { 362 void **slot; 363 struct vn_constant_s vc; 364 vn_constant_t vcp; 365 366 vc.hashcode = vn_hash_constant_with_type (constant); 367 vc.constant = constant; 368 slot = htab_find_slot_with_hash (constant_to_value_id, &vc, 369 vc.hashcode, INSERT); 370 if (*slot) 371 return ((vn_constant_t)*slot)->value_id; 372 373 vcp = XNEW (struct vn_constant_s); 374 vcp->hashcode = vc.hashcode; 375 vcp->constant = constant; 376 vcp->value_id = get_next_value_id (); 377 *slot = (void *) vcp; 378 bitmap_set_bit (constant_value_ids, vcp->value_id); 379 return vcp->value_id; 380 } 381 382 /* Return true if V is a value id for a constant. */ 383 384 bool 385 value_id_constant_p (unsigned int v) 386 { 387 return bitmap_bit_p (constant_value_ids, v); 388 } 389 390 /* Compare two reference operands P1 and P2 for equality. Return true if 391 they are equal, and false otherwise. */ 392 393 static int 394 vn_reference_op_eq (const void *p1, const void *p2) 395 { 396 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1; 397 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2; 398 399 return (vro1->opcode == vro2->opcode 400 /* We do not care for differences in type qualification. */ 401 && (vro1->type == vro2->type 402 || (vro1->type && vro2->type 403 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type), 404 TYPE_MAIN_VARIANT (vro2->type)))) 405 && expressions_equal_p (vro1->op0, vro2->op0) 406 && expressions_equal_p (vro1->op1, vro2->op1) 407 && expressions_equal_p (vro1->op2, vro2->op2)); 408 } 409 410 /* Compute the hash for a reference operand VRO1. */ 411 412 static hashval_t 413 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result) 414 { 415 result = iterative_hash_hashval_t (vro1->opcode, result); 416 if (vro1->op0) 417 result = iterative_hash_expr (vro1->op0, result); 418 if (vro1->op1) 419 result = iterative_hash_expr (vro1->op1, result); 420 if (vro1->op2) 421 result = iterative_hash_expr (vro1->op2, result); 422 return result; 423 } 424 425 /* Return the hashcode for a given reference operation P1. */ 426 427 static hashval_t 428 vn_reference_hash (const void *p1) 429 { 430 const_vn_reference_t const vr1 = (const_vn_reference_t) p1; 431 return vr1->hashcode; 432 } 433 434 /* Compute a hash for the reference operation VR1 and return it. */ 435 436 hashval_t 437 vn_reference_compute_hash (const vn_reference_t vr1) 438 { 439 hashval_t result = 0; 440 int i; 441 vn_reference_op_t vro; 442 HOST_WIDE_INT off = -1; 443 bool deref = false; 444 445 FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro) 446 { 447 if (vro->opcode == MEM_REF) 448 deref = true; 449 else if (vro->opcode != ADDR_EXPR) 450 deref = false; 451 if (vro->off != -1) 452 { 453 if (off == -1) 454 off = 0; 455 off += vro->off; 456 } 457 else 458 { 459 if (off != -1 460 && off != 0) 461 result = iterative_hash_hashval_t (off, result); 462 off = -1; 463 if (deref 464 && vro->opcode == ADDR_EXPR) 465 { 466 if (vro->op0) 467 { 468 tree op = TREE_OPERAND (vro->op0, 0); 469 result = iterative_hash_hashval_t (TREE_CODE (op), result); 470 result = iterative_hash_expr (op, result); 471 } 472 } 473 else 474 result = vn_reference_op_compute_hash (vro, result); 475 } 476 } 477 if (vr1->vuse) 478 result += SSA_NAME_VERSION (vr1->vuse); 479 480 return result; 481 } 482 483 /* Return true if reference operations P1 and P2 are equivalent. This 484 means they have the same set of operands and vuses. */ 485 486 int 487 vn_reference_eq (const void *p1, const void *p2) 488 { 489 unsigned i, j; 490 491 const_vn_reference_t const vr1 = (const_vn_reference_t) p1; 492 const_vn_reference_t const vr2 = (const_vn_reference_t) p2; 493 if (vr1->hashcode != vr2->hashcode) 494 return false; 495 496 /* Early out if this is not a hash collision. */ 497 if (vr1->hashcode != vr2->hashcode) 498 return false; 499 500 /* The VOP needs to be the same. */ 501 if (vr1->vuse != vr2->vuse) 502 return false; 503 504 /* If the operands are the same we are done. */ 505 if (vr1->operands == vr2->operands) 506 return true; 507 508 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type))) 509 return false; 510 511 if (INTEGRAL_TYPE_P (vr1->type) 512 && INTEGRAL_TYPE_P (vr2->type)) 513 { 514 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type)) 515 return false; 516 } 517 else if (INTEGRAL_TYPE_P (vr1->type) 518 && (TYPE_PRECISION (vr1->type) 519 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type)))) 520 return false; 521 else if (INTEGRAL_TYPE_P (vr2->type) 522 && (TYPE_PRECISION (vr2->type) 523 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type)))) 524 return false; 525 526 i = 0; 527 j = 0; 528 do 529 { 530 HOST_WIDE_INT off1 = 0, off2 = 0; 531 vn_reference_op_t vro1, vro2; 532 vn_reference_op_s tem1, tem2; 533 bool deref1 = false, deref2 = false; 534 for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++) 535 { 536 if (vro1->opcode == MEM_REF) 537 deref1 = true; 538 if (vro1->off == -1) 539 break; 540 off1 += vro1->off; 541 } 542 for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++) 543 { 544 if (vro2->opcode == MEM_REF) 545 deref2 = true; 546 if (vro2->off == -1) 547 break; 548 off2 += vro2->off; 549 } 550 if (off1 != off2) 551 return false; 552 if (deref1 && vro1->opcode == ADDR_EXPR) 553 { 554 memset (&tem1, 0, sizeof (tem1)); 555 tem1.op0 = TREE_OPERAND (vro1->op0, 0); 556 tem1.type = TREE_TYPE (tem1.op0); 557 tem1.opcode = TREE_CODE (tem1.op0); 558 vro1 = &tem1; 559 deref1 = false; 560 } 561 if (deref2 && vro2->opcode == ADDR_EXPR) 562 { 563 memset (&tem2, 0, sizeof (tem2)); 564 tem2.op0 = TREE_OPERAND (vro2->op0, 0); 565 tem2.type = TREE_TYPE (tem2.op0); 566 tem2.opcode = TREE_CODE (tem2.op0); 567 vro2 = &tem2; 568 deref2 = false; 569 } 570 if (deref1 != deref2) 571 return false; 572 if (!vn_reference_op_eq (vro1, vro2)) 573 return false; 574 ++j; 575 ++i; 576 } 577 while (VEC_length (vn_reference_op_s, vr1->operands) != i 578 || VEC_length (vn_reference_op_s, vr2->operands) != j); 579 580 return true; 581 } 582 583 /* Copy the operations present in load/store REF into RESULT, a vector of 584 vn_reference_op_s's. */ 585 586 void 587 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result) 588 { 589 if (TREE_CODE (ref) == TARGET_MEM_REF) 590 { 591 vn_reference_op_s temp; 592 593 memset (&temp, 0, sizeof (temp)); 594 temp.type = TREE_TYPE (ref); 595 temp.opcode = TREE_CODE (ref); 596 temp.op0 = TMR_INDEX (ref); 597 temp.op1 = TMR_STEP (ref); 598 temp.op2 = TMR_OFFSET (ref); 599 temp.off = -1; 600 VEC_safe_push (vn_reference_op_s, heap, *result, &temp); 601 602 memset (&temp, 0, sizeof (temp)); 603 temp.type = NULL_TREE; 604 temp.opcode = ERROR_MARK; 605 temp.op0 = TMR_INDEX2 (ref); 606 temp.off = -1; 607 VEC_safe_push (vn_reference_op_s, heap, *result, &temp); 608 609 memset (&temp, 0, sizeof (temp)); 610 temp.type = NULL_TREE; 611 temp.opcode = TREE_CODE (TMR_BASE (ref)); 612 temp.op0 = TMR_BASE (ref); 613 temp.off = -1; 614 VEC_safe_push (vn_reference_op_s, heap, *result, &temp); 615 return; 616 } 617 618 /* For non-calls, store the information that makes up the address. */ 619 620 while (ref) 621 { 622 vn_reference_op_s temp; 623 624 memset (&temp, 0, sizeof (temp)); 625 temp.type = TREE_TYPE (ref); 626 temp.opcode = TREE_CODE (ref); 627 temp.off = -1; 628 629 switch (temp.opcode) 630 { 631 case WITH_SIZE_EXPR: 632 temp.op0 = TREE_OPERAND (ref, 1); 633 temp.off = 0; 634 break; 635 case MEM_REF: 636 /* The base address gets its own vn_reference_op_s structure. */ 637 temp.op0 = TREE_OPERAND (ref, 1); 638 if (host_integerp (TREE_OPERAND (ref, 1), 0)) 639 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1)); 640 break; 641 case BIT_FIELD_REF: 642 /* Record bits and position. */ 643 temp.op0 = TREE_OPERAND (ref, 1); 644 temp.op1 = TREE_OPERAND (ref, 2); 645 break; 646 case COMPONENT_REF: 647 /* The field decl is enough to unambiguously specify the field, 648 a matching type is not necessary and a mismatching type 649 is always a spurious difference. */ 650 temp.type = NULL_TREE; 651 temp.op0 = TREE_OPERAND (ref, 1); 652 temp.op1 = TREE_OPERAND (ref, 2); 653 { 654 tree this_offset = component_ref_field_offset (ref); 655 if (this_offset 656 && TREE_CODE (this_offset) == INTEGER_CST) 657 { 658 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1)); 659 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0) 660 { 661 double_int off 662 = double_int_add (tree_to_double_int (this_offset), 663 double_int_rshift 664 (tree_to_double_int (bit_offset), 665 BITS_PER_UNIT == 8 666 ? 3 : exact_log2 (BITS_PER_UNIT), 667 HOST_BITS_PER_DOUBLE_INT, true)); 668 if (double_int_fits_in_shwi_p (off)) 669 temp.off = off.low; 670 } 671 } 672 } 673 break; 674 case ARRAY_RANGE_REF: 675 case ARRAY_REF: 676 /* Record index as operand. */ 677 temp.op0 = TREE_OPERAND (ref, 1); 678 /* Always record lower bounds and element size. */ 679 temp.op1 = array_ref_low_bound (ref); 680 temp.op2 = array_ref_element_size (ref); 681 if (TREE_CODE (temp.op0) == INTEGER_CST 682 && TREE_CODE (temp.op1) == INTEGER_CST 683 && TREE_CODE (temp.op2) == INTEGER_CST) 684 { 685 double_int off = tree_to_double_int (temp.op0); 686 off = double_int_add (off, 687 double_int_neg 688 (tree_to_double_int (temp.op1))); 689 off = double_int_mul (off, tree_to_double_int (temp.op2)); 690 if (double_int_fits_in_shwi_p (off)) 691 temp.off = off.low; 692 } 693 break; 694 case VAR_DECL: 695 if (DECL_HARD_REGISTER (ref)) 696 { 697 temp.op0 = ref; 698 break; 699 } 700 /* Fallthru. */ 701 case PARM_DECL: 702 case CONST_DECL: 703 case RESULT_DECL: 704 /* Canonicalize decls to MEM[&decl] which is what we end up with 705 when valueizing MEM[ptr] with ptr = &decl. */ 706 temp.opcode = MEM_REF; 707 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0); 708 temp.off = 0; 709 VEC_safe_push (vn_reference_op_s, heap, *result, &temp); 710 temp.opcode = ADDR_EXPR; 711 temp.op0 = build_fold_addr_expr (ref); 712 temp.type = TREE_TYPE (temp.op0); 713 temp.off = -1; 714 break; 715 case STRING_CST: 716 case INTEGER_CST: 717 case COMPLEX_CST: 718 case VECTOR_CST: 719 case REAL_CST: 720 case FIXED_CST: 721 case CONSTRUCTOR: 722 case SSA_NAME: 723 temp.op0 = ref; 724 break; 725 case ADDR_EXPR: 726 if (is_gimple_min_invariant (ref)) 727 { 728 temp.op0 = ref; 729 break; 730 } 731 /* Fallthrough. */ 732 /* These are only interesting for their operands, their 733 existence, and their type. They will never be the last 734 ref in the chain of references (IE they require an 735 operand), so we don't have to put anything 736 for op* as it will be handled by the iteration */ 737 case REALPART_EXPR: 738 case VIEW_CONVERT_EXPR: 739 temp.off = 0; 740 break; 741 case IMAGPART_EXPR: 742 /* This is only interesting for its constant offset. */ 743 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref))); 744 break; 745 default: 746 gcc_unreachable (); 747 } 748 VEC_safe_push (vn_reference_op_s, heap, *result, &temp); 749 750 if (REFERENCE_CLASS_P (ref) 751 || TREE_CODE (ref) == WITH_SIZE_EXPR 752 || (TREE_CODE (ref) == ADDR_EXPR 753 && !is_gimple_min_invariant (ref))) 754 ref = TREE_OPERAND (ref, 0); 755 else 756 ref = NULL_TREE; 757 } 758 } 759 760 /* Build a alias-oracle reference abstraction in *REF from the vn_reference 761 operands in *OPS, the reference alias set SET and the reference type TYPE. 762 Return true if something useful was produced. */ 763 764 bool 765 ao_ref_init_from_vn_reference (ao_ref *ref, 766 alias_set_type set, tree type, 767 VEC (vn_reference_op_s, heap) *ops) 768 { 769 vn_reference_op_t op; 770 unsigned i; 771 tree base = NULL_TREE; 772 tree *op0_p = &base; 773 HOST_WIDE_INT offset = 0; 774 HOST_WIDE_INT max_size; 775 HOST_WIDE_INT size = -1; 776 tree size_tree = NULL_TREE; 777 alias_set_type base_alias_set = -1; 778 779 /* First get the final access size from just the outermost expression. */ 780 op = VEC_index (vn_reference_op_s, ops, 0); 781 if (op->opcode == COMPONENT_REF) 782 size_tree = DECL_SIZE (op->op0); 783 else if (op->opcode == BIT_FIELD_REF) 784 size_tree = op->op0; 785 else 786 { 787 enum machine_mode mode = TYPE_MODE (type); 788 if (mode == BLKmode) 789 size_tree = TYPE_SIZE (type); 790 else 791 size = GET_MODE_BITSIZE (mode); 792 } 793 if (size_tree != NULL_TREE) 794 { 795 if (!host_integerp (size_tree, 1)) 796 size = -1; 797 else 798 size = TREE_INT_CST_LOW (size_tree); 799 } 800 801 /* Initially, maxsize is the same as the accessed element size. 802 In the following it will only grow (or become -1). */ 803 max_size = size; 804 805 /* Compute cumulative bit-offset for nested component-refs and array-refs, 806 and find the ultimate containing object. */ 807 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op) 808 { 809 switch (op->opcode) 810 { 811 /* These may be in the reference ops, but we cannot do anything 812 sensible with them here. */ 813 case ADDR_EXPR: 814 /* Apart from ADDR_EXPR arguments to MEM_REF. */ 815 if (base != NULL_TREE 816 && TREE_CODE (base) == MEM_REF 817 && op->op0 818 && DECL_P (TREE_OPERAND (op->op0, 0))) 819 { 820 vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1); 821 base = TREE_OPERAND (op->op0, 0); 822 if (pop->off == -1) 823 { 824 max_size = -1; 825 offset = 0; 826 } 827 else 828 offset += pop->off * BITS_PER_UNIT; 829 op0_p = NULL; 830 break; 831 } 832 /* Fallthru. */ 833 case CALL_EXPR: 834 return false; 835 836 /* Record the base objects. */ 837 case MEM_REF: 838 base_alias_set = get_deref_alias_set (op->op0); 839 *op0_p = build2 (MEM_REF, op->type, 840 NULL_TREE, op->op0); 841 op0_p = &TREE_OPERAND (*op0_p, 0); 842 break; 843 844 case VAR_DECL: 845 case PARM_DECL: 846 case RESULT_DECL: 847 case SSA_NAME: 848 *op0_p = op->op0; 849 op0_p = NULL; 850 break; 851 852 /* And now the usual component-reference style ops. */ 853 case BIT_FIELD_REF: 854 offset += tree_low_cst (op->op1, 0); 855 break; 856 857 case COMPONENT_REF: 858 { 859 tree field = op->op0; 860 /* We do not have a complete COMPONENT_REF tree here so we 861 cannot use component_ref_field_offset. Do the interesting 862 parts manually. */ 863 864 if (op->op1 865 || !host_integerp (DECL_FIELD_OFFSET (field), 1)) 866 max_size = -1; 867 else 868 { 869 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) 870 * BITS_PER_UNIT); 871 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field)); 872 } 873 break; 874 } 875 876 case ARRAY_RANGE_REF: 877 case ARRAY_REF: 878 /* We recorded the lower bound and the element size. */ 879 if (!host_integerp (op->op0, 0) 880 || !host_integerp (op->op1, 0) 881 || !host_integerp (op->op2, 0)) 882 max_size = -1; 883 else 884 { 885 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0); 886 hindex -= TREE_INT_CST_LOW (op->op1); 887 hindex *= TREE_INT_CST_LOW (op->op2); 888 hindex *= BITS_PER_UNIT; 889 offset += hindex; 890 } 891 break; 892 893 case REALPART_EXPR: 894 break; 895 896 case IMAGPART_EXPR: 897 offset += size; 898 break; 899 900 case VIEW_CONVERT_EXPR: 901 break; 902 903 case STRING_CST: 904 case INTEGER_CST: 905 case COMPLEX_CST: 906 case VECTOR_CST: 907 case REAL_CST: 908 case CONSTRUCTOR: 909 case CONST_DECL: 910 return false; 911 912 default: 913 return false; 914 } 915 } 916 917 if (base == NULL_TREE) 918 return false; 919 920 ref->ref = NULL_TREE; 921 ref->base = base; 922 ref->offset = offset; 923 ref->size = size; 924 ref->max_size = max_size; 925 ref->ref_alias_set = set; 926 if (base_alias_set != -1) 927 ref->base_alias_set = base_alias_set; 928 else 929 ref->base_alias_set = get_alias_set (base); 930 /* We discount volatiles from value-numbering elsewhere. */ 931 ref->volatile_p = false; 932 933 return true; 934 } 935 936 /* Copy the operations present in load/store/call REF into RESULT, a vector of 937 vn_reference_op_s's. */ 938 939 void 940 copy_reference_ops_from_call (gimple call, 941 VEC(vn_reference_op_s, heap) **result) 942 { 943 vn_reference_op_s temp; 944 unsigned i; 945 946 /* Copy the type, opcode, function being called and static chain. */ 947 memset (&temp, 0, sizeof (temp)); 948 temp.type = gimple_call_return_type (call); 949 temp.opcode = CALL_EXPR; 950 temp.op0 = gimple_call_fn (call); 951 temp.op1 = gimple_call_chain (call); 952 temp.off = -1; 953 VEC_safe_push (vn_reference_op_s, heap, *result, &temp); 954 955 /* Copy the call arguments. As they can be references as well, 956 just chain them together. */ 957 for (i = 0; i < gimple_call_num_args (call); ++i) 958 { 959 tree callarg = gimple_call_arg (call, i); 960 copy_reference_ops_from_ref (callarg, result); 961 } 962 } 963 964 /* Create a vector of vn_reference_op_s structures from REF, a 965 REFERENCE_CLASS_P tree. The vector is not shared. */ 966 967 static VEC(vn_reference_op_s, heap) * 968 create_reference_ops_from_ref (tree ref) 969 { 970 VEC (vn_reference_op_s, heap) *result = NULL; 971 972 copy_reference_ops_from_ref (ref, &result); 973 return result; 974 } 975 976 /* Create a vector of vn_reference_op_s structures from CALL, a 977 call statement. The vector is not shared. */ 978 979 static VEC(vn_reference_op_s, heap) * 980 create_reference_ops_from_call (gimple call) 981 { 982 VEC (vn_reference_op_s, heap) *result = NULL; 983 984 copy_reference_ops_from_call (call, &result); 985 return result; 986 } 987 988 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates 989 *I_P to point to the last element of the replacement. */ 990 void 991 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops, 992 unsigned int *i_p) 993 { 994 unsigned int i = *i_p; 995 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i); 996 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1); 997 tree addr_base; 998 HOST_WIDE_INT addr_offset; 999 1000 /* The only thing we have to do is from &OBJ.foo.bar add the offset 1001 from .foo.bar to the preceeding MEM_REF offset and replace the 1002 address with &OBJ. */ 1003 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0), 1004 &addr_offset); 1005 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF); 1006 if (addr_base != op->op0) 1007 { 1008 double_int off = tree_to_double_int (mem_op->op0); 1009 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0))); 1010 off = double_int_add (off, shwi_to_double_int (addr_offset)); 1011 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off); 1012 op->op0 = build_fold_addr_expr (addr_base); 1013 if (host_integerp (mem_op->op0, 0)) 1014 mem_op->off = TREE_INT_CST_LOW (mem_op->op0); 1015 else 1016 mem_op->off = -1; 1017 } 1018 } 1019 1020 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates 1021 *I_P to point to the last element of the replacement. */ 1022 static void 1023 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops, 1024 unsigned int *i_p) 1025 { 1026 unsigned int i = *i_p; 1027 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i); 1028 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1); 1029 gimple def_stmt; 1030 enum tree_code code; 1031 double_int off; 1032 1033 def_stmt = SSA_NAME_DEF_STMT (op->op0); 1034 if (!is_gimple_assign (def_stmt)) 1035 return; 1036 1037 code = gimple_assign_rhs_code (def_stmt); 1038 if (code != ADDR_EXPR 1039 && code != POINTER_PLUS_EXPR) 1040 return; 1041 1042 off = tree_to_double_int (mem_op->op0); 1043 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0))); 1044 1045 /* The only thing we have to do is from &OBJ.foo.bar add the offset 1046 from .foo.bar to the preceeding MEM_REF offset and replace the 1047 address with &OBJ. */ 1048 if (code == ADDR_EXPR) 1049 { 1050 tree addr, addr_base; 1051 HOST_WIDE_INT addr_offset; 1052 1053 addr = gimple_assign_rhs1 (def_stmt); 1054 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0), 1055 &addr_offset); 1056 if (!addr_base 1057 || TREE_CODE (addr_base) != MEM_REF) 1058 return; 1059 1060 off = double_int_add (off, shwi_to_double_int (addr_offset)); 1061 off = double_int_add (off, mem_ref_offset (addr_base)); 1062 op->op0 = TREE_OPERAND (addr_base, 0); 1063 } 1064 else 1065 { 1066 tree ptr, ptroff; 1067 ptr = gimple_assign_rhs1 (def_stmt); 1068 ptroff = gimple_assign_rhs2 (def_stmt); 1069 if (TREE_CODE (ptr) != SSA_NAME 1070 || TREE_CODE (ptroff) != INTEGER_CST) 1071 return; 1072 1073 off = double_int_add (off, tree_to_double_int (ptroff)); 1074 op->op0 = ptr; 1075 } 1076 1077 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off); 1078 if (host_integerp (mem_op->op0, 0)) 1079 mem_op->off = TREE_INT_CST_LOW (mem_op->op0); 1080 else 1081 mem_op->off = -1; 1082 if (TREE_CODE (op->op0) == SSA_NAME) 1083 op->op0 = SSA_VAL (op->op0); 1084 if (TREE_CODE (op->op0) != SSA_NAME) 1085 op->opcode = TREE_CODE (op->op0); 1086 1087 /* And recurse. */ 1088 if (TREE_CODE (op->op0) == SSA_NAME) 1089 vn_reference_maybe_forwprop_address (ops, i_p); 1090 else if (TREE_CODE (op->op0) == ADDR_EXPR) 1091 vn_reference_fold_indirect (ops, i_p); 1092 } 1093 1094 /* Optimize the reference REF to a constant if possible or return 1095 NULL_TREE if not. */ 1096 1097 tree 1098 fully_constant_vn_reference_p (vn_reference_t ref) 1099 { 1100 VEC (vn_reference_op_s, heap) *operands = ref->operands; 1101 vn_reference_op_t op; 1102 1103 /* Try to simplify the translated expression if it is 1104 a call to a builtin function with at most two arguments. */ 1105 op = VEC_index (vn_reference_op_s, operands, 0); 1106 if (op->opcode == CALL_EXPR 1107 && TREE_CODE (op->op0) == ADDR_EXPR 1108 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL 1109 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0)) 1110 && VEC_length (vn_reference_op_s, operands) >= 2 1111 && VEC_length (vn_reference_op_s, operands) <= 3) 1112 { 1113 vn_reference_op_t arg0, arg1 = NULL; 1114 bool anyconst = false; 1115 arg0 = VEC_index (vn_reference_op_s, operands, 1); 1116 if (VEC_length (vn_reference_op_s, operands) > 2) 1117 arg1 = VEC_index (vn_reference_op_s, operands, 2); 1118 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant 1119 || (arg0->opcode == ADDR_EXPR 1120 && is_gimple_min_invariant (arg0->op0))) 1121 anyconst = true; 1122 if (arg1 1123 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant 1124 || (arg1->opcode == ADDR_EXPR 1125 && is_gimple_min_invariant (arg1->op0)))) 1126 anyconst = true; 1127 if (anyconst) 1128 { 1129 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0), 1130 arg1 ? 2 : 1, 1131 arg0->op0, 1132 arg1 ? arg1->op0 : NULL); 1133 if (folded 1134 && TREE_CODE (folded) == NOP_EXPR) 1135 folded = TREE_OPERAND (folded, 0); 1136 if (folded 1137 && is_gimple_min_invariant (folded)) 1138 return folded; 1139 } 1140 } 1141 1142 /* Simplify reads from constant strings. */ 1143 else if (op->opcode == ARRAY_REF 1144 && TREE_CODE (op->op0) == INTEGER_CST 1145 && integer_zerop (op->op1) 1146 && VEC_length (vn_reference_op_s, operands) == 2) 1147 { 1148 vn_reference_op_t arg0; 1149 arg0 = VEC_index (vn_reference_op_s, operands, 1); 1150 if (arg0->opcode == STRING_CST 1151 && (TYPE_MODE (op->type) 1152 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0)))) 1153 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT 1154 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1 1155 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0) 1156 return build_int_cst_type (op->type, 1157 (TREE_STRING_POINTER (arg0->op0) 1158 [TREE_INT_CST_LOW (op->op0)])); 1159 } 1160 1161 return NULL_TREE; 1162 } 1163 1164 /* Transform any SSA_NAME's in a vector of vn_reference_op_s 1165 structures into their value numbers. This is done in-place, and 1166 the vector passed in is returned. *VALUEIZED_ANYTHING will specify 1167 whether any operands were valueized. */ 1168 1169 static VEC (vn_reference_op_s, heap) * 1170 valueize_refs_1 (VEC (vn_reference_op_s, heap) *orig, bool *valueized_anything) 1171 { 1172 vn_reference_op_t vro; 1173 unsigned int i; 1174 1175 *valueized_anything = false; 1176 1177 FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro) 1178 { 1179 if (vro->opcode == SSA_NAME 1180 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)) 1181 { 1182 tree tem = SSA_VAL (vro->op0); 1183 if (tem != vro->op0) 1184 { 1185 *valueized_anything = true; 1186 vro->op0 = tem; 1187 } 1188 /* If it transforms from an SSA_NAME to a constant, update 1189 the opcode. */ 1190 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME) 1191 vro->opcode = TREE_CODE (vro->op0); 1192 } 1193 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME) 1194 { 1195 tree tem = SSA_VAL (vro->op1); 1196 if (tem != vro->op1) 1197 { 1198 *valueized_anything = true; 1199 vro->op1 = tem; 1200 } 1201 } 1202 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME) 1203 { 1204 tree tem = SSA_VAL (vro->op2); 1205 if (tem != vro->op2) 1206 { 1207 *valueized_anything = true; 1208 vro->op2 = tem; 1209 } 1210 } 1211 /* If it transforms from an SSA_NAME to an address, fold with 1212 a preceding indirect reference. */ 1213 if (i > 0 1214 && vro->op0 1215 && TREE_CODE (vro->op0) == ADDR_EXPR 1216 && VEC_index (vn_reference_op_s, 1217 orig, i - 1)->opcode == MEM_REF) 1218 vn_reference_fold_indirect (&orig, &i); 1219 else if (i > 0 1220 && vro->opcode == SSA_NAME 1221 && VEC_index (vn_reference_op_s, 1222 orig, i - 1)->opcode == MEM_REF) 1223 vn_reference_maybe_forwprop_address (&orig, &i); 1224 /* If it transforms a non-constant ARRAY_REF into a constant 1225 one, adjust the constant offset. */ 1226 else if (vro->opcode == ARRAY_REF 1227 && vro->off == -1 1228 && TREE_CODE (vro->op0) == INTEGER_CST 1229 && TREE_CODE (vro->op1) == INTEGER_CST 1230 && TREE_CODE (vro->op2) == INTEGER_CST) 1231 { 1232 double_int off = tree_to_double_int (vro->op0); 1233 off = double_int_add (off, 1234 double_int_neg 1235 (tree_to_double_int (vro->op1))); 1236 off = double_int_mul (off, tree_to_double_int (vro->op2)); 1237 if (double_int_fits_in_shwi_p (off)) 1238 vro->off = off.low; 1239 } 1240 } 1241 1242 return orig; 1243 } 1244 1245 static VEC (vn_reference_op_s, heap) * 1246 valueize_refs (VEC (vn_reference_op_s, heap) *orig) 1247 { 1248 bool tem; 1249 return valueize_refs_1 (orig, &tem); 1250 } 1251 1252 static VEC(vn_reference_op_s, heap) *shared_lookup_references; 1253 1254 /* Create a vector of vn_reference_op_s structures from REF, a 1255 REFERENCE_CLASS_P tree. The vector is shared among all callers of 1256 this function. *VALUEIZED_ANYTHING will specify whether any 1257 operands were valueized. */ 1258 1259 static VEC(vn_reference_op_s, heap) * 1260 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything) 1261 { 1262 if (!ref) 1263 return NULL; 1264 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0); 1265 copy_reference_ops_from_ref (ref, &shared_lookup_references); 1266 shared_lookup_references = valueize_refs_1 (shared_lookup_references, 1267 valueized_anything); 1268 return shared_lookup_references; 1269 } 1270 1271 /* Create a vector of vn_reference_op_s structures from CALL, a 1272 call statement. The vector is shared among all callers of 1273 this function. */ 1274 1275 static VEC(vn_reference_op_s, heap) * 1276 valueize_shared_reference_ops_from_call (gimple call) 1277 { 1278 if (!call) 1279 return NULL; 1280 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0); 1281 copy_reference_ops_from_call (call, &shared_lookup_references); 1282 shared_lookup_references = valueize_refs (shared_lookup_references); 1283 return shared_lookup_references; 1284 } 1285 1286 /* Lookup a SCCVN reference operation VR in the current hash table. 1287 Returns the resulting value number if it exists in the hash table, 1288 NULL_TREE otherwise. VNRESULT will be filled in with the actual 1289 vn_reference_t stored in the hashtable if something is found. */ 1290 1291 static tree 1292 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult) 1293 { 1294 void **slot; 1295 hashval_t hash; 1296 1297 hash = vr->hashcode; 1298 slot = htab_find_slot_with_hash (current_info->references, vr, 1299 hash, NO_INSERT); 1300 if (!slot && current_info == optimistic_info) 1301 slot = htab_find_slot_with_hash (valid_info->references, vr, 1302 hash, NO_INSERT); 1303 if (slot) 1304 { 1305 if (vnresult) 1306 *vnresult = (vn_reference_t)*slot; 1307 return ((vn_reference_t)*slot)->result; 1308 } 1309 1310 return NULL_TREE; 1311 } 1312 1313 static tree *last_vuse_ptr; 1314 static vn_lookup_kind vn_walk_kind; 1315 static vn_lookup_kind default_vn_walk_kind; 1316 1317 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_ 1318 with the current VUSE and performs the expression lookup. */ 1319 1320 static void * 1321 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_) 1322 { 1323 vn_reference_t vr = (vn_reference_t)vr_; 1324 void **slot; 1325 hashval_t hash; 1326 1327 if (last_vuse_ptr) 1328 *last_vuse_ptr = vuse; 1329 1330 /* Fixup vuse and hash. */ 1331 if (vr->vuse) 1332 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse); 1333 vr->vuse = SSA_VAL (vuse); 1334 if (vr->vuse) 1335 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse); 1336 1337 hash = vr->hashcode; 1338 slot = htab_find_slot_with_hash (current_info->references, vr, 1339 hash, NO_INSERT); 1340 if (!slot && current_info == optimistic_info) 1341 slot = htab_find_slot_with_hash (valid_info->references, vr, 1342 hash, NO_INSERT); 1343 if (slot) 1344 return *slot; 1345 1346 return NULL; 1347 } 1348 1349 /* Lookup an existing or insert a new vn_reference entry into the 1350 value table for the VUSE, SET, TYPE, OPERANDS reference which 1351 has the value VALUE which is either a constant or an SSA name. */ 1352 1353 static vn_reference_t 1354 vn_reference_lookup_or_insert_for_pieces (tree vuse, 1355 alias_set_type set, 1356 tree type, 1357 VEC (vn_reference_op_s, 1358 heap) *operands, 1359 tree value) 1360 { 1361 struct vn_reference_s vr1; 1362 vn_reference_t result; 1363 unsigned value_id; 1364 vr1.vuse = vuse; 1365 vr1.operands = operands; 1366 vr1.type = type; 1367 vr1.set = set; 1368 vr1.hashcode = vn_reference_compute_hash (&vr1); 1369 if (vn_reference_lookup_1 (&vr1, &result)) 1370 return result; 1371 if (TREE_CODE (value) == SSA_NAME) 1372 value_id = VN_INFO (value)->value_id; 1373 else 1374 value_id = get_or_alloc_constant_value_id (value); 1375 return vn_reference_insert_pieces (vuse, set, type, 1376 VEC_copy (vn_reference_op_s, heap, 1377 operands), value, value_id); 1378 } 1379 1380 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup 1381 from the statement defining VUSE and if not successful tries to 1382 translate *REFP and VR_ through an aggregate copy at the defintion 1383 of VUSE. */ 1384 1385 static void * 1386 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_) 1387 { 1388 vn_reference_t vr = (vn_reference_t)vr_; 1389 gimple def_stmt = SSA_NAME_DEF_STMT (vuse); 1390 tree base; 1391 HOST_WIDE_INT offset, maxsize; 1392 static VEC (vn_reference_op_s, heap) *lhs_ops = NULL; 1393 ao_ref lhs_ref; 1394 bool lhs_ref_ok = false; 1395 1396 /* First try to disambiguate after value-replacing in the definitions LHS. */ 1397 if (is_gimple_assign (def_stmt)) 1398 { 1399 VEC (vn_reference_op_s, heap) *tem; 1400 tree lhs = gimple_assign_lhs (def_stmt); 1401 bool valueized_anything = false; 1402 /* Avoid re-allocation overhead. */ 1403 VEC_truncate (vn_reference_op_s, lhs_ops, 0); 1404 copy_reference_ops_from_ref (lhs, &lhs_ops); 1405 tem = lhs_ops; 1406 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything); 1407 gcc_assert (lhs_ops == tem); 1408 if (valueized_anything) 1409 { 1410 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref, 1411 get_alias_set (lhs), 1412 TREE_TYPE (lhs), lhs_ops); 1413 if (lhs_ref_ok 1414 && !refs_may_alias_p_1 (ref, &lhs_ref, true)) 1415 return NULL; 1416 } 1417 else 1418 { 1419 ao_ref_init (&lhs_ref, lhs); 1420 lhs_ref_ok = true; 1421 } 1422 } 1423 1424 base = ao_ref_base (ref); 1425 offset = ref->offset; 1426 maxsize = ref->max_size; 1427 1428 /* If we cannot constrain the size of the reference we cannot 1429 test if anything kills it. */ 1430 if (maxsize == -1) 1431 return (void *)-1; 1432 1433 /* We can't deduce anything useful from clobbers. */ 1434 if (gimple_clobber_p (def_stmt)) 1435 return (void *)-1; 1436 1437 /* def_stmt may-defs *ref. See if we can derive a value for *ref 1438 from that definition. 1439 1) Memset. */ 1440 if (is_gimple_reg_type (vr->type) 1441 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET) 1442 && integer_zerop (gimple_call_arg (def_stmt, 1)) 1443 && host_integerp (gimple_call_arg (def_stmt, 2), 1) 1444 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR) 1445 { 1446 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0); 1447 tree base2; 1448 HOST_WIDE_INT offset2, size2, maxsize2; 1449 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2); 1450 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8; 1451 if ((unsigned HOST_WIDE_INT)size2 / 8 1452 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) 1453 && maxsize2 != -1 1454 && operand_equal_p (base, base2, 0) 1455 && offset2 <= offset 1456 && offset2 + size2 >= offset + maxsize) 1457 { 1458 tree val = build_zero_cst (vr->type); 1459 return vn_reference_lookup_or_insert_for_pieces 1460 (vuse, vr->set, vr->type, vr->operands, val); 1461 } 1462 } 1463 1464 /* 2) Assignment from an empty CONSTRUCTOR. */ 1465 else if (is_gimple_reg_type (vr->type) 1466 && gimple_assign_single_p (def_stmt) 1467 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR 1468 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0) 1469 { 1470 tree base2; 1471 HOST_WIDE_INT offset2, size2, maxsize2; 1472 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt), 1473 &offset2, &size2, &maxsize2); 1474 if (maxsize2 != -1 1475 && operand_equal_p (base, base2, 0) 1476 && offset2 <= offset 1477 && offset2 + size2 >= offset + maxsize) 1478 { 1479 tree val = build_zero_cst (vr->type); 1480 return vn_reference_lookup_or_insert_for_pieces 1481 (vuse, vr->set, vr->type, vr->operands, val); 1482 } 1483 } 1484 1485 /* 3) Assignment from a constant. We can use folds native encode/interpret 1486 routines to extract the assigned bits. */ 1487 else if (vn_walk_kind == VN_WALKREWRITE 1488 && CHAR_BIT == 8 && BITS_PER_UNIT == 8 1489 && ref->size == maxsize 1490 && maxsize % BITS_PER_UNIT == 0 1491 && offset % BITS_PER_UNIT == 0 1492 && is_gimple_reg_type (vr->type) 1493 && gimple_assign_single_p (def_stmt) 1494 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))) 1495 { 1496 tree base2; 1497 HOST_WIDE_INT offset2, size2, maxsize2; 1498 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt), 1499 &offset2, &size2, &maxsize2); 1500 if (maxsize2 != -1 1501 && maxsize2 == size2 1502 && size2 % BITS_PER_UNIT == 0 1503 && offset2 % BITS_PER_UNIT == 0 1504 && operand_equal_p (base, base2, 0) 1505 && offset2 <= offset 1506 && offset2 + size2 >= offset + maxsize) 1507 { 1508 /* We support up to 512-bit values (for V8DFmode). */ 1509 unsigned char buffer[64]; 1510 int len; 1511 1512 len = native_encode_expr (gimple_assign_rhs1 (def_stmt), 1513 buffer, sizeof (buffer)); 1514 if (len > 0) 1515 { 1516 tree val = native_interpret_expr (vr->type, 1517 buffer 1518 + ((offset - offset2) 1519 / BITS_PER_UNIT), 1520 ref->size / BITS_PER_UNIT); 1521 if (val) 1522 return vn_reference_lookup_or_insert_for_pieces 1523 (vuse, vr->set, vr->type, vr->operands, val); 1524 } 1525 } 1526 } 1527 1528 /* 4) Assignment from an SSA name which definition we may be able 1529 to access pieces from. */ 1530 else if (ref->size == maxsize 1531 && is_gimple_reg_type (vr->type) 1532 && gimple_assign_single_p (def_stmt) 1533 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME) 1534 { 1535 tree rhs1 = gimple_assign_rhs1 (def_stmt); 1536 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1); 1537 if (is_gimple_assign (def_stmt2) 1538 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR 1539 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR) 1540 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1)))) 1541 { 1542 tree base2; 1543 HOST_WIDE_INT offset2, size2, maxsize2, off; 1544 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt), 1545 &offset2, &size2, &maxsize2); 1546 off = offset - offset2; 1547 if (maxsize2 != -1 1548 && maxsize2 == size2 1549 && operand_equal_p (base, base2, 0) 1550 && offset2 <= offset 1551 && offset2 + size2 >= offset + maxsize) 1552 { 1553 tree val = NULL_TREE; 1554 HOST_WIDE_INT elsz 1555 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1)))); 1556 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR) 1557 { 1558 if (off == 0) 1559 val = gimple_assign_rhs1 (def_stmt2); 1560 else if (off == elsz) 1561 val = gimple_assign_rhs2 (def_stmt2); 1562 } 1563 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR 1564 && off % elsz == 0) 1565 { 1566 tree ctor = gimple_assign_rhs1 (def_stmt2); 1567 unsigned i = off / elsz; 1568 if (i < CONSTRUCTOR_NELTS (ctor)) 1569 { 1570 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i); 1571 if (compare_tree_int (elt->index, i) == 0) 1572 val = elt->value; 1573 } 1574 } 1575 if (val) 1576 return vn_reference_lookup_or_insert_for_pieces 1577 (vuse, vr->set, vr->type, vr->operands, val); 1578 } 1579 } 1580 } 1581 1582 /* 5) For aggregate copies translate the reference through them if 1583 the copy kills ref. */ 1584 else if (vn_walk_kind == VN_WALKREWRITE 1585 && gimple_assign_single_p (def_stmt) 1586 && (DECL_P (gimple_assign_rhs1 (def_stmt)) 1587 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF 1588 || handled_component_p (gimple_assign_rhs1 (def_stmt)))) 1589 { 1590 tree base2; 1591 HOST_WIDE_INT offset2, size2, maxsize2; 1592 int i, j; 1593 VEC (vn_reference_op_s, heap) *rhs = NULL; 1594 vn_reference_op_t vro; 1595 ao_ref r; 1596 1597 if (!lhs_ref_ok) 1598 return (void *)-1; 1599 1600 /* See if the assignment kills REF. */ 1601 base2 = ao_ref_base (&lhs_ref); 1602 offset2 = lhs_ref.offset; 1603 size2 = lhs_ref.size; 1604 maxsize2 = lhs_ref.max_size; 1605 if (maxsize2 == -1 1606 || (base != base2 && !operand_equal_p (base, base2, 0)) 1607 || offset2 > offset 1608 || offset2 + size2 < offset + maxsize) 1609 return (void *)-1; 1610 1611 /* Find the common base of ref and the lhs. lhs_ops already 1612 contains valueized operands for the lhs. */ 1613 i = VEC_length (vn_reference_op_s, vr->operands) - 1; 1614 j = VEC_length (vn_reference_op_s, lhs_ops) - 1; 1615 while (j >= 0 && i >= 0 1616 && vn_reference_op_eq (VEC_index (vn_reference_op_s, 1617 vr->operands, i), 1618 VEC_index (vn_reference_op_s, lhs_ops, j))) 1619 { 1620 i--; 1621 j--; 1622 } 1623 1624 /* ??? The innermost op should always be a MEM_REF and we already 1625 checked that the assignment to the lhs kills vr. Thus for 1626 aggregate copies using char[] types the vn_reference_op_eq 1627 may fail when comparing types for compatibility. But we really 1628 don't care here - further lookups with the rewritten operands 1629 will simply fail if we messed up types too badly. */ 1630 if (j == 0 && i >= 0 1631 && VEC_index (vn_reference_op_s, lhs_ops, 0)->opcode == MEM_REF 1632 && VEC_index (vn_reference_op_s, lhs_ops, 0)->off != -1 1633 && (VEC_index (vn_reference_op_s, lhs_ops, 0)->off 1634 == VEC_index (vn_reference_op_s, vr->operands, i)->off)) 1635 i--, j--; 1636 1637 /* i now points to the first additional op. 1638 ??? LHS may not be completely contained in VR, one or more 1639 VIEW_CONVERT_EXPRs could be in its way. We could at least 1640 try handling outermost VIEW_CONVERT_EXPRs. */ 1641 if (j != -1) 1642 return (void *)-1; 1643 1644 /* Now re-write REF to be based on the rhs of the assignment. */ 1645 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs); 1646 /* We need to pre-pend vr->operands[0..i] to rhs. */ 1647 if (i + 1 + VEC_length (vn_reference_op_s, rhs) 1648 > VEC_length (vn_reference_op_s, vr->operands)) 1649 { 1650 VEC (vn_reference_op_s, heap) *old = vr->operands; 1651 VEC_safe_grow (vn_reference_op_s, heap, vr->operands, 1652 i + 1 + VEC_length (vn_reference_op_s, rhs)); 1653 if (old == shared_lookup_references 1654 && vr->operands != old) 1655 shared_lookup_references = NULL; 1656 } 1657 else 1658 VEC_truncate (vn_reference_op_s, vr->operands, 1659 i + 1 + VEC_length (vn_reference_op_s, rhs)); 1660 FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro) 1661 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro); 1662 VEC_free (vn_reference_op_s, heap, rhs); 1663 vr->operands = valueize_refs (vr->operands); 1664 vr->hashcode = vn_reference_compute_hash (vr); 1665 1666 /* Adjust *ref from the new operands. */ 1667 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands)) 1668 return (void *)-1; 1669 /* This can happen with bitfields. */ 1670 if (ref->size != r.size) 1671 return (void *)-1; 1672 *ref = r; 1673 1674 /* Do not update last seen VUSE after translating. */ 1675 last_vuse_ptr = NULL; 1676 1677 /* Keep looking for the adjusted *REF / VR pair. */ 1678 return NULL; 1679 } 1680 1681 /* 6) For memcpy copies translate the reference through them if 1682 the copy kills ref. */ 1683 else if (vn_walk_kind == VN_WALKREWRITE 1684 && is_gimple_reg_type (vr->type) 1685 /* ??? Handle BCOPY as well. */ 1686 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY) 1687 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY) 1688 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE)) 1689 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR 1690 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME) 1691 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR 1692 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME) 1693 && host_integerp (gimple_call_arg (def_stmt, 2), 1)) 1694 { 1695 tree lhs, rhs; 1696 ao_ref r; 1697 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset; 1698 vn_reference_op_s op; 1699 HOST_WIDE_INT at; 1700 1701 1702 /* Only handle non-variable, addressable refs. */ 1703 if (ref->size != maxsize 1704 || offset % BITS_PER_UNIT != 0 1705 || ref->size % BITS_PER_UNIT != 0) 1706 return (void *)-1; 1707 1708 /* Extract a pointer base and an offset for the destination. */ 1709 lhs = gimple_call_arg (def_stmt, 0); 1710 lhs_offset = 0; 1711 if (TREE_CODE (lhs) == SSA_NAME) 1712 lhs = SSA_VAL (lhs); 1713 if (TREE_CODE (lhs) == ADDR_EXPR) 1714 { 1715 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0), 1716 &lhs_offset); 1717 if (!tem) 1718 return (void *)-1; 1719 if (TREE_CODE (tem) == MEM_REF 1720 && host_integerp (TREE_OPERAND (tem, 1), 1)) 1721 { 1722 lhs = TREE_OPERAND (tem, 0); 1723 lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)); 1724 } 1725 else if (DECL_P (tem)) 1726 lhs = build_fold_addr_expr (tem); 1727 else 1728 return (void *)-1; 1729 } 1730 if (TREE_CODE (lhs) != SSA_NAME 1731 && TREE_CODE (lhs) != ADDR_EXPR) 1732 return (void *)-1; 1733 1734 /* Extract a pointer base and an offset for the source. */ 1735 rhs = gimple_call_arg (def_stmt, 1); 1736 rhs_offset = 0; 1737 if (TREE_CODE (rhs) == SSA_NAME) 1738 rhs = SSA_VAL (rhs); 1739 if (TREE_CODE (rhs) == ADDR_EXPR) 1740 { 1741 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0), 1742 &rhs_offset); 1743 if (!tem) 1744 return (void *)-1; 1745 if (TREE_CODE (tem) == MEM_REF 1746 && host_integerp (TREE_OPERAND (tem, 1), 1)) 1747 { 1748 rhs = TREE_OPERAND (tem, 0); 1749 rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)); 1750 } 1751 else if (DECL_P (tem)) 1752 rhs = build_fold_addr_expr (tem); 1753 else 1754 return (void *)-1; 1755 } 1756 if (TREE_CODE (rhs) != SSA_NAME 1757 && TREE_CODE (rhs) != ADDR_EXPR) 1758 return (void *)-1; 1759 1760 copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)); 1761 1762 /* The bases of the destination and the references have to agree. */ 1763 if ((TREE_CODE (base) != MEM_REF 1764 && !DECL_P (base)) 1765 || (TREE_CODE (base) == MEM_REF 1766 && (TREE_OPERAND (base, 0) != lhs 1767 || !host_integerp (TREE_OPERAND (base, 1), 1))) 1768 || (DECL_P (base) 1769 && (TREE_CODE (lhs) != ADDR_EXPR 1770 || TREE_OPERAND (lhs, 0) != base))) 1771 return (void *)-1; 1772 1773 /* And the access has to be contained within the memcpy destination. */ 1774 at = offset / BITS_PER_UNIT; 1775 if (TREE_CODE (base) == MEM_REF) 1776 at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1)); 1777 if (lhs_offset > at 1778 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT) 1779 return (void *)-1; 1780 1781 /* Make room for 2 operands in the new reference. */ 1782 if (VEC_length (vn_reference_op_s, vr->operands) < 2) 1783 { 1784 VEC (vn_reference_op_s, heap) *old = vr->operands; 1785 VEC_safe_grow (vn_reference_op_s, heap, vr->operands, 2); 1786 if (old == shared_lookup_references 1787 && vr->operands != old) 1788 shared_lookup_references = NULL; 1789 } 1790 else 1791 VEC_truncate (vn_reference_op_s, vr->operands, 2); 1792 1793 /* The looked-through reference is a simple MEM_REF. */ 1794 memset (&op, 0, sizeof (op)); 1795 op.type = vr->type; 1796 op.opcode = MEM_REF; 1797 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset); 1798 op.off = at - lhs_offset + rhs_offset; 1799 VEC_replace (vn_reference_op_s, vr->operands, 0, &op); 1800 op.type = TREE_TYPE (rhs); 1801 op.opcode = TREE_CODE (rhs); 1802 op.op0 = rhs; 1803 op.off = -1; 1804 VEC_replace (vn_reference_op_s, vr->operands, 1, &op); 1805 vr->hashcode = vn_reference_compute_hash (vr); 1806 1807 /* Adjust *ref from the new operands. */ 1808 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands)) 1809 return (void *)-1; 1810 /* This can happen with bitfields. */ 1811 if (ref->size != r.size) 1812 return (void *)-1; 1813 *ref = r; 1814 1815 /* Do not update last seen VUSE after translating. */ 1816 last_vuse_ptr = NULL; 1817 1818 /* Keep looking for the adjusted *REF / VR pair. */ 1819 return NULL; 1820 } 1821 1822 /* Bail out and stop walking. */ 1823 return (void *)-1; 1824 } 1825 1826 /* Lookup a reference operation by it's parts, in the current hash table. 1827 Returns the resulting value number if it exists in the hash table, 1828 NULL_TREE otherwise. VNRESULT will be filled in with the actual 1829 vn_reference_t stored in the hashtable if something is found. */ 1830 1831 tree 1832 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type, 1833 VEC (vn_reference_op_s, heap) *operands, 1834 vn_reference_t *vnresult, vn_lookup_kind kind) 1835 { 1836 struct vn_reference_s vr1; 1837 vn_reference_t tmp; 1838 tree cst; 1839 1840 if (!vnresult) 1841 vnresult = &tmp; 1842 *vnresult = NULL; 1843 1844 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE; 1845 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0); 1846 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references, 1847 VEC_length (vn_reference_op_s, operands)); 1848 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references), 1849 VEC_address (vn_reference_op_s, operands), 1850 sizeof (vn_reference_op_s) 1851 * VEC_length (vn_reference_op_s, operands)); 1852 vr1.operands = operands = shared_lookup_references 1853 = valueize_refs (shared_lookup_references); 1854 vr1.type = type; 1855 vr1.set = set; 1856 vr1.hashcode = vn_reference_compute_hash (&vr1); 1857 if ((cst = fully_constant_vn_reference_p (&vr1))) 1858 return cst; 1859 1860 vn_reference_lookup_1 (&vr1, vnresult); 1861 if (!*vnresult 1862 && kind != VN_NOWALK 1863 && vr1.vuse) 1864 { 1865 ao_ref r; 1866 vn_walk_kind = kind; 1867 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands)) 1868 *vnresult = 1869 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, 1870 vn_reference_lookup_2, 1871 vn_reference_lookup_3, &vr1); 1872 if (vr1.operands != operands) 1873 VEC_free (vn_reference_op_s, heap, vr1.operands); 1874 } 1875 1876 if (*vnresult) 1877 return (*vnresult)->result; 1878 1879 return NULL_TREE; 1880 } 1881 1882 /* Lookup OP in the current hash table, and return the resulting value 1883 number if it exists in the hash table. Return NULL_TREE if it does 1884 not exist in the hash table or if the result field of the structure 1885 was NULL.. VNRESULT will be filled in with the vn_reference_t 1886 stored in the hashtable if one exists. */ 1887 1888 tree 1889 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind, 1890 vn_reference_t *vnresult) 1891 { 1892 VEC (vn_reference_op_s, heap) *operands; 1893 struct vn_reference_s vr1; 1894 tree cst; 1895 bool valuezied_anything; 1896 1897 if (vnresult) 1898 *vnresult = NULL; 1899 1900 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE; 1901 vr1.operands = operands 1902 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything); 1903 vr1.type = TREE_TYPE (op); 1904 vr1.set = get_alias_set (op); 1905 vr1.hashcode = vn_reference_compute_hash (&vr1); 1906 if ((cst = fully_constant_vn_reference_p (&vr1))) 1907 return cst; 1908 1909 if (kind != VN_NOWALK 1910 && vr1.vuse) 1911 { 1912 vn_reference_t wvnresult; 1913 ao_ref r; 1914 /* Make sure to use a valueized reference if we valueized anything. 1915 Otherwise preserve the full reference for advanced TBAA. */ 1916 if (!valuezied_anything 1917 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type, 1918 vr1.operands)) 1919 ao_ref_init (&r, op); 1920 vn_walk_kind = kind; 1921 wvnresult = 1922 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, 1923 vn_reference_lookup_2, 1924 vn_reference_lookup_3, &vr1); 1925 if (vr1.operands != operands) 1926 VEC_free (vn_reference_op_s, heap, vr1.operands); 1927 if (wvnresult) 1928 { 1929 if (vnresult) 1930 *vnresult = wvnresult; 1931 return wvnresult->result; 1932 } 1933 1934 return NULL_TREE; 1935 } 1936 1937 return vn_reference_lookup_1 (&vr1, vnresult); 1938 } 1939 1940 1941 /* Insert OP into the current hash table with a value number of 1942 RESULT, and return the resulting reference structure we created. */ 1943 1944 vn_reference_t 1945 vn_reference_insert (tree op, tree result, tree vuse) 1946 { 1947 void **slot; 1948 vn_reference_t vr1; 1949 1950 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool); 1951 if (TREE_CODE (result) == SSA_NAME) 1952 vr1->value_id = VN_INFO (result)->value_id; 1953 else 1954 vr1->value_id = get_or_alloc_constant_value_id (result); 1955 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE; 1956 vr1->operands = valueize_refs (create_reference_ops_from_ref (op)); 1957 vr1->type = TREE_TYPE (op); 1958 vr1->set = get_alias_set (op); 1959 vr1->hashcode = vn_reference_compute_hash (vr1); 1960 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result; 1961 1962 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode, 1963 INSERT); 1964 1965 /* Because we lookup stores using vuses, and value number failures 1966 using the vdefs (see visit_reference_op_store for how and why), 1967 it's possible that on failure we may try to insert an already 1968 inserted store. This is not wrong, there is no ssa name for a 1969 store that we could use as a differentiator anyway. Thus, unlike 1970 the other lookup functions, you cannot gcc_assert (!*slot) 1971 here. */ 1972 1973 /* But free the old slot in case of a collision. */ 1974 if (*slot) 1975 free_reference (*slot); 1976 1977 *slot = vr1; 1978 return vr1; 1979 } 1980 1981 /* Insert a reference by it's pieces into the current hash table with 1982 a value number of RESULT. Return the resulting reference 1983 structure we created. */ 1984 1985 vn_reference_t 1986 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type, 1987 VEC (vn_reference_op_s, heap) *operands, 1988 tree result, unsigned int value_id) 1989 1990 { 1991 void **slot; 1992 vn_reference_t vr1; 1993 1994 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool); 1995 vr1->value_id = value_id; 1996 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE; 1997 vr1->operands = valueize_refs (operands); 1998 vr1->type = type; 1999 vr1->set = set; 2000 vr1->hashcode = vn_reference_compute_hash (vr1); 2001 if (result && TREE_CODE (result) == SSA_NAME) 2002 result = SSA_VAL (result); 2003 vr1->result = result; 2004 2005 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode, 2006 INSERT); 2007 2008 /* At this point we should have all the things inserted that we have 2009 seen before, and we should never try inserting something that 2010 already exists. */ 2011 gcc_assert (!*slot); 2012 if (*slot) 2013 free_reference (*slot); 2014 2015 *slot = vr1; 2016 return vr1; 2017 } 2018 2019 /* Compute and return the hash value for nary operation VBO1. */ 2020 2021 hashval_t 2022 vn_nary_op_compute_hash (const vn_nary_op_t vno1) 2023 { 2024 hashval_t hash; 2025 unsigned i; 2026 2027 for (i = 0; i < vno1->length; ++i) 2028 if (TREE_CODE (vno1->op[i]) == SSA_NAME) 2029 vno1->op[i] = SSA_VAL (vno1->op[i]); 2030 2031 if (vno1->length == 2 2032 && commutative_tree_code (vno1->opcode) 2033 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false)) 2034 { 2035 tree temp = vno1->op[0]; 2036 vno1->op[0] = vno1->op[1]; 2037 vno1->op[1] = temp; 2038 } 2039 2040 hash = iterative_hash_hashval_t (vno1->opcode, 0); 2041 for (i = 0; i < vno1->length; ++i) 2042 hash = iterative_hash_expr (vno1->op[i], hash); 2043 2044 return hash; 2045 } 2046 2047 /* Return the computed hashcode for nary operation P1. */ 2048 2049 static hashval_t 2050 vn_nary_op_hash (const void *p1) 2051 { 2052 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1; 2053 return vno1->hashcode; 2054 } 2055 2056 /* Compare nary operations P1 and P2 and return true if they are 2057 equivalent. */ 2058 2059 int 2060 vn_nary_op_eq (const void *p1, const void *p2) 2061 { 2062 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1; 2063 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2; 2064 unsigned i; 2065 2066 if (vno1->hashcode != vno2->hashcode) 2067 return false; 2068 2069 if (vno1->length != vno2->length) 2070 return false; 2071 2072 if (vno1->opcode != vno2->opcode 2073 || !types_compatible_p (vno1->type, vno2->type)) 2074 return false; 2075 2076 for (i = 0; i < vno1->length; ++i) 2077 if (!expressions_equal_p (vno1->op[i], vno2->op[i])) 2078 return false; 2079 2080 return true; 2081 } 2082 2083 /* Initialize VNO from the pieces provided. */ 2084 2085 static void 2086 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length, 2087 enum tree_code code, tree type, tree *ops) 2088 { 2089 vno->opcode = code; 2090 vno->length = length; 2091 vno->type = type; 2092 memcpy (&vno->op[0], ops, sizeof (tree) * length); 2093 } 2094 2095 /* Initialize VNO from OP. */ 2096 2097 static void 2098 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op) 2099 { 2100 unsigned i; 2101 2102 vno->opcode = TREE_CODE (op); 2103 vno->length = TREE_CODE_LENGTH (TREE_CODE (op)); 2104 vno->type = TREE_TYPE (op); 2105 for (i = 0; i < vno->length; ++i) 2106 vno->op[i] = TREE_OPERAND (op, i); 2107 } 2108 2109 /* Return the number of operands for a vn_nary ops structure from STMT. */ 2110 2111 static unsigned int 2112 vn_nary_length_from_stmt (gimple stmt) 2113 { 2114 switch (gimple_assign_rhs_code (stmt)) 2115 { 2116 case REALPART_EXPR: 2117 case IMAGPART_EXPR: 2118 case VIEW_CONVERT_EXPR: 2119 return 1; 2120 2121 case CONSTRUCTOR: 2122 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt)); 2123 2124 default: 2125 return gimple_num_ops (stmt) - 1; 2126 } 2127 } 2128 2129 /* Initialize VNO from STMT. */ 2130 2131 static void 2132 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt) 2133 { 2134 unsigned i; 2135 2136 vno->opcode = gimple_assign_rhs_code (stmt); 2137 vno->type = gimple_expr_type (stmt); 2138 switch (vno->opcode) 2139 { 2140 case REALPART_EXPR: 2141 case IMAGPART_EXPR: 2142 case VIEW_CONVERT_EXPR: 2143 vno->length = 1; 2144 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0); 2145 break; 2146 2147 case CONSTRUCTOR: 2148 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt)); 2149 for (i = 0; i < vno->length; ++i) 2150 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value; 2151 break; 2152 2153 default: 2154 vno->length = gimple_num_ops (stmt) - 1; 2155 for (i = 0; i < vno->length; ++i) 2156 vno->op[i] = gimple_op (stmt, i + 1); 2157 } 2158 } 2159 2160 /* Compute the hashcode for VNO and look for it in the hash table; 2161 return the resulting value number if it exists in the hash table. 2162 Return NULL_TREE if it does not exist in the hash table or if the 2163 result field of the operation is NULL. VNRESULT will contain the 2164 vn_nary_op_t from the hashtable if it exists. */ 2165 2166 static tree 2167 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult) 2168 { 2169 void **slot; 2170 2171 if (vnresult) 2172 *vnresult = NULL; 2173 2174 vno->hashcode = vn_nary_op_compute_hash (vno); 2175 slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode, 2176 NO_INSERT); 2177 if (!slot && current_info == optimistic_info) 2178 slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode, 2179 NO_INSERT); 2180 if (!slot) 2181 return NULL_TREE; 2182 if (vnresult) 2183 *vnresult = (vn_nary_op_t)*slot; 2184 return ((vn_nary_op_t)*slot)->result; 2185 } 2186 2187 /* Lookup a n-ary operation by its pieces and return the resulting value 2188 number if it exists in the hash table. Return NULL_TREE if it does 2189 not exist in the hash table or if the result field of the operation 2190 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable 2191 if it exists. */ 2192 2193 tree 2194 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code, 2195 tree type, tree *ops, vn_nary_op_t *vnresult) 2196 { 2197 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s, 2198 sizeof_vn_nary_op (length)); 2199 init_vn_nary_op_from_pieces (vno1, length, code, type, ops); 2200 return vn_nary_op_lookup_1 (vno1, vnresult); 2201 } 2202 2203 /* Lookup OP in the current hash table, and return the resulting value 2204 number if it exists in the hash table. Return NULL_TREE if it does 2205 not exist in the hash table or if the result field of the operation 2206 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable 2207 if it exists. */ 2208 2209 tree 2210 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult) 2211 { 2212 vn_nary_op_t vno1 2213 = XALLOCAVAR (struct vn_nary_op_s, 2214 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op)))); 2215 init_vn_nary_op_from_op (vno1, op); 2216 return vn_nary_op_lookup_1 (vno1, vnresult); 2217 } 2218 2219 /* Lookup the rhs of STMT in the current hash table, and return the resulting 2220 value number if it exists in the hash table. Return NULL_TREE if 2221 it does not exist in the hash table. VNRESULT will contain the 2222 vn_nary_op_t from the hashtable if it exists. */ 2223 2224 tree 2225 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult) 2226 { 2227 vn_nary_op_t vno1 2228 = XALLOCAVAR (struct vn_nary_op_s, 2229 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt))); 2230 init_vn_nary_op_from_stmt (vno1, stmt); 2231 return vn_nary_op_lookup_1 (vno1, vnresult); 2232 } 2233 2234 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */ 2235 2236 static vn_nary_op_t 2237 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack) 2238 { 2239 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length)); 2240 } 2241 2242 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's 2243 obstack. */ 2244 2245 static vn_nary_op_t 2246 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id) 2247 { 2248 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, 2249 ¤t_info->nary_obstack); 2250 2251 vno1->value_id = value_id; 2252 vno1->length = length; 2253 vno1->result = result; 2254 2255 return vno1; 2256 } 2257 2258 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute 2259 VNO->HASHCODE first. */ 2260 2261 static vn_nary_op_t 2262 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash) 2263 { 2264 void **slot; 2265 2266 if (compute_hash) 2267 vno->hashcode = vn_nary_op_compute_hash (vno); 2268 2269 slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT); 2270 gcc_assert (!*slot); 2271 2272 *slot = vno; 2273 return vno; 2274 } 2275 2276 /* Insert a n-ary operation into the current hash table using it's 2277 pieces. Return the vn_nary_op_t structure we created and put in 2278 the hashtable. */ 2279 2280 vn_nary_op_t 2281 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code, 2282 tree type, tree *ops, 2283 tree result, unsigned int value_id) 2284 { 2285 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id); 2286 init_vn_nary_op_from_pieces (vno1, length, code, type, ops); 2287 return vn_nary_op_insert_into (vno1, current_info->nary, true); 2288 } 2289 2290 /* Insert OP into the current hash table with a value number of 2291 RESULT. Return the vn_nary_op_t structure we created and put in 2292 the hashtable. */ 2293 2294 vn_nary_op_t 2295 vn_nary_op_insert (tree op, tree result) 2296 { 2297 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op)); 2298 vn_nary_op_t vno1; 2299 2300 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id); 2301 init_vn_nary_op_from_op (vno1, op); 2302 return vn_nary_op_insert_into (vno1, current_info->nary, true); 2303 } 2304 2305 /* Insert the rhs of STMT into the current hash table with a value number of 2306 RESULT. */ 2307 2308 vn_nary_op_t 2309 vn_nary_op_insert_stmt (gimple stmt, tree result) 2310 { 2311 vn_nary_op_t vno1 2312 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt), 2313 result, VN_INFO (result)->value_id); 2314 init_vn_nary_op_from_stmt (vno1, stmt); 2315 return vn_nary_op_insert_into (vno1, current_info->nary, true); 2316 } 2317 2318 /* Compute a hashcode for PHI operation VP1 and return it. */ 2319 2320 static inline hashval_t 2321 vn_phi_compute_hash (vn_phi_t vp1) 2322 { 2323 hashval_t result; 2324 int i; 2325 tree phi1op; 2326 tree type; 2327 2328 result = vp1->block->index; 2329 2330 /* If all PHI arguments are constants we need to distinguish 2331 the PHI node via its type. */ 2332 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)); 2333 result += (INTEGRAL_TYPE_P (type) 2334 + (INTEGRAL_TYPE_P (type) 2335 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0)); 2336 2337 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op) 2338 { 2339 if (phi1op == VN_TOP) 2340 continue; 2341 result = iterative_hash_expr (phi1op, result); 2342 } 2343 2344 return result; 2345 } 2346 2347 /* Return the computed hashcode for phi operation P1. */ 2348 2349 static hashval_t 2350 vn_phi_hash (const void *p1) 2351 { 2352 const_vn_phi_t const vp1 = (const_vn_phi_t) p1; 2353 return vp1->hashcode; 2354 } 2355 2356 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */ 2357 2358 static int 2359 vn_phi_eq (const void *p1, const void *p2) 2360 { 2361 const_vn_phi_t const vp1 = (const_vn_phi_t) p1; 2362 const_vn_phi_t const vp2 = (const_vn_phi_t) p2; 2363 2364 if (vp1->hashcode != vp2->hashcode) 2365 return false; 2366 2367 if (vp1->block == vp2->block) 2368 { 2369 int i; 2370 tree phi1op; 2371 2372 /* If the PHI nodes do not have compatible types 2373 they are not the same. */ 2374 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)), 2375 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0)))) 2376 return false; 2377 2378 /* Any phi in the same block will have it's arguments in the 2379 same edge order, because of how we store phi nodes. */ 2380 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op) 2381 { 2382 tree phi2op = VEC_index (tree, vp2->phiargs, i); 2383 if (phi1op == VN_TOP || phi2op == VN_TOP) 2384 continue; 2385 if (!expressions_equal_p (phi1op, phi2op)) 2386 return false; 2387 } 2388 return true; 2389 } 2390 return false; 2391 } 2392 2393 static VEC(tree, heap) *shared_lookup_phiargs; 2394 2395 /* Lookup PHI in the current hash table, and return the resulting 2396 value number if it exists in the hash table. Return NULL_TREE if 2397 it does not exist in the hash table. */ 2398 2399 static tree 2400 vn_phi_lookup (gimple phi) 2401 { 2402 void **slot; 2403 struct vn_phi_s vp1; 2404 unsigned i; 2405 2406 VEC_truncate (tree, shared_lookup_phiargs, 0); 2407 2408 /* Canonicalize the SSA_NAME's to their value number. */ 2409 for (i = 0; i < gimple_phi_num_args (phi); i++) 2410 { 2411 tree def = PHI_ARG_DEF (phi, i); 2412 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def; 2413 VEC_safe_push (tree, heap, shared_lookup_phiargs, def); 2414 } 2415 vp1.phiargs = shared_lookup_phiargs; 2416 vp1.block = gimple_bb (phi); 2417 vp1.hashcode = vn_phi_compute_hash (&vp1); 2418 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode, 2419 NO_INSERT); 2420 if (!slot && current_info == optimistic_info) 2421 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode, 2422 NO_INSERT); 2423 if (!slot) 2424 return NULL_TREE; 2425 return ((vn_phi_t)*slot)->result; 2426 } 2427 2428 /* Insert PHI into the current hash table with a value number of 2429 RESULT. */ 2430 2431 static vn_phi_t 2432 vn_phi_insert (gimple phi, tree result) 2433 { 2434 void **slot; 2435 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool); 2436 unsigned i; 2437 VEC (tree, heap) *args = NULL; 2438 2439 /* Canonicalize the SSA_NAME's to their value number. */ 2440 for (i = 0; i < gimple_phi_num_args (phi); i++) 2441 { 2442 tree def = PHI_ARG_DEF (phi, i); 2443 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def; 2444 VEC_safe_push (tree, heap, args, def); 2445 } 2446 vp1->value_id = VN_INFO (result)->value_id; 2447 vp1->phiargs = args; 2448 vp1->block = gimple_bb (phi); 2449 vp1->result = result; 2450 vp1->hashcode = vn_phi_compute_hash (vp1); 2451 2452 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode, 2453 INSERT); 2454 2455 /* Because we iterate over phi operations more than once, it's 2456 possible the slot might already exist here, hence no assert.*/ 2457 *slot = vp1; 2458 return vp1; 2459 } 2460 2461 2462 /* Print set of components in strongly connected component SCC to OUT. */ 2463 2464 static void 2465 print_scc (FILE *out, VEC (tree, heap) *scc) 2466 { 2467 tree var; 2468 unsigned int i; 2469 2470 fprintf (out, "SCC consists of:"); 2471 FOR_EACH_VEC_ELT (tree, scc, i, var) 2472 { 2473 fprintf (out, " "); 2474 print_generic_expr (out, var, 0); 2475 } 2476 fprintf (out, "\n"); 2477 } 2478 2479 /* Set the value number of FROM to TO, return true if it has changed 2480 as a result. */ 2481 2482 static inline bool 2483 set_ssa_val_to (tree from, tree to) 2484 { 2485 tree currval = SSA_VAL (from); 2486 2487 if (from != to) 2488 { 2489 if (currval == from) 2490 { 2491 if (dump_file && (dump_flags & TDF_DETAILS)) 2492 { 2493 fprintf (dump_file, "Not changing value number of "); 2494 print_generic_expr (dump_file, from, 0); 2495 fprintf (dump_file, " from VARYING to "); 2496 print_generic_expr (dump_file, to, 0); 2497 fprintf (dump_file, "\n"); 2498 } 2499 return false; 2500 } 2501 else if (TREE_CODE (to) == SSA_NAME 2502 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to)) 2503 to = from; 2504 } 2505 2506 /* The only thing we allow as value numbers are VN_TOP, ssa_names 2507 and invariants. So assert that here. */ 2508 gcc_assert (to != NULL_TREE 2509 && (to == VN_TOP 2510 || TREE_CODE (to) == SSA_NAME 2511 || is_gimple_min_invariant (to))); 2512 2513 if (dump_file && (dump_flags & TDF_DETAILS)) 2514 { 2515 fprintf (dump_file, "Setting value number of "); 2516 print_generic_expr (dump_file, from, 0); 2517 fprintf (dump_file, " to "); 2518 print_generic_expr (dump_file, to, 0); 2519 } 2520 2521 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME)) 2522 { 2523 VN_INFO (from)->valnum = to; 2524 if (dump_file && (dump_flags & TDF_DETAILS)) 2525 fprintf (dump_file, " (changed)\n"); 2526 return true; 2527 } 2528 if (dump_file && (dump_flags & TDF_DETAILS)) 2529 fprintf (dump_file, "\n"); 2530 return false; 2531 } 2532 2533 /* Set all definitions in STMT to value number to themselves. 2534 Return true if a value number changed. */ 2535 2536 static bool 2537 defs_to_varying (gimple stmt) 2538 { 2539 bool changed = false; 2540 ssa_op_iter iter; 2541 def_operand_p defp; 2542 2543 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS) 2544 { 2545 tree def = DEF_FROM_PTR (defp); 2546 2547 VN_INFO (def)->use_processed = true; 2548 changed |= set_ssa_val_to (def, def); 2549 } 2550 return changed; 2551 } 2552 2553 static bool expr_has_constants (tree expr); 2554 static tree valueize_expr (tree expr); 2555 2556 /* Visit a copy between LHS and RHS, return true if the value number 2557 changed. */ 2558 2559 static bool 2560 visit_copy (tree lhs, tree rhs) 2561 { 2562 /* Follow chains of copies to their destination. */ 2563 while (TREE_CODE (rhs) == SSA_NAME 2564 && SSA_VAL (rhs) != rhs) 2565 rhs = SSA_VAL (rhs); 2566 2567 /* The copy may have a more interesting constant filled expression 2568 (we don't, since we know our RHS is just an SSA name). */ 2569 if (TREE_CODE (rhs) == SSA_NAME) 2570 { 2571 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants; 2572 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr; 2573 } 2574 2575 return set_ssa_val_to (lhs, rhs); 2576 } 2577 2578 /* Visit a nary operator RHS, value number it, and return true if the 2579 value number of LHS has changed as a result. */ 2580 2581 static bool 2582 visit_nary_op (tree lhs, gimple stmt) 2583 { 2584 bool changed = false; 2585 tree result = vn_nary_op_lookup_stmt (stmt, NULL); 2586 2587 if (result) 2588 changed = set_ssa_val_to (lhs, result); 2589 else 2590 { 2591 changed = set_ssa_val_to (lhs, lhs); 2592 vn_nary_op_insert_stmt (stmt, lhs); 2593 } 2594 2595 return changed; 2596 } 2597 2598 /* Visit a call STMT storing into LHS. Return true if the value number 2599 of the LHS has changed as a result. */ 2600 2601 static bool 2602 visit_reference_op_call (tree lhs, gimple stmt) 2603 { 2604 bool changed = false; 2605 struct vn_reference_s vr1; 2606 tree result; 2607 tree vuse = gimple_vuse (stmt); 2608 2609 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE; 2610 vr1.operands = valueize_shared_reference_ops_from_call (stmt); 2611 vr1.type = gimple_expr_type (stmt); 2612 vr1.set = 0; 2613 vr1.hashcode = vn_reference_compute_hash (&vr1); 2614 result = vn_reference_lookup_1 (&vr1, NULL); 2615 if (result) 2616 { 2617 changed = set_ssa_val_to (lhs, result); 2618 if (TREE_CODE (result) == SSA_NAME 2619 && VN_INFO (result)->has_constants) 2620 VN_INFO (lhs)->has_constants = true; 2621 } 2622 else 2623 { 2624 void **slot; 2625 vn_reference_t vr2; 2626 changed = set_ssa_val_to (lhs, lhs); 2627 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool); 2628 vr2->vuse = vr1.vuse; 2629 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt)); 2630 vr2->type = vr1.type; 2631 vr2->set = vr1.set; 2632 vr2->hashcode = vr1.hashcode; 2633 vr2->result = lhs; 2634 slot = htab_find_slot_with_hash (current_info->references, 2635 vr2, vr2->hashcode, INSERT); 2636 if (*slot) 2637 free_reference (*slot); 2638 *slot = vr2; 2639 } 2640 2641 return changed; 2642 } 2643 2644 /* Visit a load from a reference operator RHS, part of STMT, value number it, 2645 and return true if the value number of the LHS has changed as a result. */ 2646 2647 static bool 2648 visit_reference_op_load (tree lhs, tree op, gimple stmt) 2649 { 2650 bool changed = false; 2651 tree last_vuse; 2652 tree result; 2653 2654 last_vuse = gimple_vuse (stmt); 2655 last_vuse_ptr = &last_vuse; 2656 result = vn_reference_lookup (op, gimple_vuse (stmt), 2657 default_vn_walk_kind, NULL); 2658 last_vuse_ptr = NULL; 2659 2660 /* If we have a VCE, try looking up its operand as it might be stored in 2661 a different type. */ 2662 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR) 2663 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt), 2664 default_vn_walk_kind, NULL); 2665 2666 /* We handle type-punning through unions by value-numbering based 2667 on offset and size of the access. Be prepared to handle a 2668 type-mismatch here via creating a VIEW_CONVERT_EXPR. */ 2669 if (result 2670 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op))) 2671 { 2672 /* We will be setting the value number of lhs to the value number 2673 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result). 2674 So first simplify and lookup this expression to see if it 2675 is already available. */ 2676 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result); 2677 if ((CONVERT_EXPR_P (val) 2678 || TREE_CODE (val) == VIEW_CONVERT_EXPR) 2679 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME) 2680 { 2681 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0))); 2682 if ((CONVERT_EXPR_P (tem) 2683 || TREE_CODE (tem) == VIEW_CONVERT_EXPR) 2684 && (tem = fold_unary_ignore_overflow (TREE_CODE (val), 2685 TREE_TYPE (val), tem))) 2686 val = tem; 2687 } 2688 result = val; 2689 if (!is_gimple_min_invariant (val) 2690 && TREE_CODE (val) != SSA_NAME) 2691 result = vn_nary_op_lookup (val, NULL); 2692 /* If the expression is not yet available, value-number lhs to 2693 a new SSA_NAME we create. */ 2694 if (!result) 2695 { 2696 result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ()); 2697 /* Initialize value-number information properly. */ 2698 VN_INFO_GET (result)->valnum = result; 2699 VN_INFO (result)->value_id = get_next_value_id (); 2700 VN_INFO (result)->expr = val; 2701 VN_INFO (result)->has_constants = expr_has_constants (val); 2702 VN_INFO (result)->needs_insertion = true; 2703 /* As all "inserted" statements are singleton SCCs, insert 2704 to the valid table. This is strictly needed to 2705 avoid re-generating new value SSA_NAMEs for the same 2706 expression during SCC iteration over and over (the 2707 optimistic table gets cleared after each iteration). 2708 We do not need to insert into the optimistic table, as 2709 lookups there will fall back to the valid table. */ 2710 if (current_info == optimistic_info) 2711 { 2712 current_info = valid_info; 2713 vn_nary_op_insert (val, result); 2714 current_info = optimistic_info; 2715 } 2716 else 2717 vn_nary_op_insert (val, result); 2718 if (dump_file && (dump_flags & TDF_DETAILS)) 2719 { 2720 fprintf (dump_file, "Inserting name "); 2721 print_generic_expr (dump_file, result, 0); 2722 fprintf (dump_file, " for expression "); 2723 print_generic_expr (dump_file, val, 0); 2724 fprintf (dump_file, "\n"); 2725 } 2726 } 2727 } 2728 2729 if (result) 2730 { 2731 changed = set_ssa_val_to (lhs, result); 2732 if (TREE_CODE (result) == SSA_NAME 2733 && VN_INFO (result)->has_constants) 2734 { 2735 VN_INFO (lhs)->expr = VN_INFO (result)->expr; 2736 VN_INFO (lhs)->has_constants = true; 2737 } 2738 } 2739 else 2740 { 2741 changed = set_ssa_val_to (lhs, lhs); 2742 vn_reference_insert (op, lhs, last_vuse); 2743 } 2744 2745 return changed; 2746 } 2747 2748 2749 /* Visit a store to a reference operator LHS, part of STMT, value number it, 2750 and return true if the value number of the LHS has changed as a result. */ 2751 2752 static bool 2753 visit_reference_op_store (tree lhs, tree op, gimple stmt) 2754 { 2755 bool changed = false; 2756 tree result; 2757 bool resultsame = false; 2758 2759 /* First we want to lookup using the *vuses* from the store and see 2760 if there the last store to this location with the same address 2761 had the same value. 2762 2763 The vuses represent the memory state before the store. If the 2764 memory state, address, and value of the store is the same as the 2765 last store to this location, then this store will produce the 2766 same memory state as that store. 2767 2768 In this case the vdef versions for this store are value numbered to those 2769 vuse versions, since they represent the same memory state after 2770 this store. 2771 2772 Otherwise, the vdefs for the store are used when inserting into 2773 the table, since the store generates a new memory state. */ 2774 2775 result = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_NOWALK, NULL); 2776 2777 if (result) 2778 { 2779 if (TREE_CODE (result) == SSA_NAME) 2780 result = SSA_VAL (result); 2781 if (TREE_CODE (op) == SSA_NAME) 2782 op = SSA_VAL (op); 2783 resultsame = expressions_equal_p (result, op); 2784 } 2785 2786 if (!result || !resultsame) 2787 { 2788 tree vdef; 2789 2790 if (dump_file && (dump_flags & TDF_DETAILS)) 2791 { 2792 fprintf (dump_file, "No store match\n"); 2793 fprintf (dump_file, "Value numbering store "); 2794 print_generic_expr (dump_file, lhs, 0); 2795 fprintf (dump_file, " to "); 2796 print_generic_expr (dump_file, op, 0); 2797 fprintf (dump_file, "\n"); 2798 } 2799 /* Have to set value numbers before insert, since insert is 2800 going to valueize the references in-place. */ 2801 if ((vdef = gimple_vdef (stmt))) 2802 { 2803 VN_INFO (vdef)->use_processed = true; 2804 changed |= set_ssa_val_to (vdef, vdef); 2805 } 2806 2807 /* Do not insert structure copies into the tables. */ 2808 if (is_gimple_min_invariant (op) 2809 || is_gimple_reg (op)) 2810 vn_reference_insert (lhs, op, vdef); 2811 } 2812 else 2813 { 2814 /* We had a match, so value number the vdef to have the value 2815 number of the vuse it came from. */ 2816 tree def, use; 2817 2818 if (dump_file && (dump_flags & TDF_DETAILS)) 2819 fprintf (dump_file, "Store matched earlier value," 2820 "value numbering store vdefs to matching vuses.\n"); 2821 2822 def = gimple_vdef (stmt); 2823 use = gimple_vuse (stmt); 2824 2825 VN_INFO (def)->use_processed = true; 2826 changed |= set_ssa_val_to (def, SSA_VAL (use)); 2827 } 2828 2829 return changed; 2830 } 2831 2832 /* Visit and value number PHI, return true if the value number 2833 changed. */ 2834 2835 static bool 2836 visit_phi (gimple phi) 2837 { 2838 bool changed = false; 2839 tree result; 2840 tree sameval = VN_TOP; 2841 bool allsame = true; 2842 unsigned i; 2843 2844 /* TODO: We could check for this in init_sccvn, and replace this 2845 with a gcc_assert. */ 2846 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi))) 2847 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi)); 2848 2849 /* See if all non-TOP arguments have the same value. TOP is 2850 equivalent to everything, so we can ignore it. */ 2851 for (i = 0; i < gimple_phi_num_args (phi); i++) 2852 { 2853 tree def = PHI_ARG_DEF (phi, i); 2854 2855 if (TREE_CODE (def) == SSA_NAME) 2856 def = SSA_VAL (def); 2857 if (def == VN_TOP) 2858 continue; 2859 if (sameval == VN_TOP) 2860 { 2861 sameval = def; 2862 } 2863 else 2864 { 2865 if (!expressions_equal_p (def, sameval)) 2866 { 2867 allsame = false; 2868 break; 2869 } 2870 } 2871 } 2872 2873 /* If all value numbered to the same value, the phi node has that 2874 value. */ 2875 if (allsame) 2876 { 2877 if (is_gimple_min_invariant (sameval)) 2878 { 2879 VN_INFO (PHI_RESULT (phi))->has_constants = true; 2880 VN_INFO (PHI_RESULT (phi))->expr = sameval; 2881 } 2882 else 2883 { 2884 VN_INFO (PHI_RESULT (phi))->has_constants = false; 2885 VN_INFO (PHI_RESULT (phi))->expr = sameval; 2886 } 2887 2888 if (TREE_CODE (sameval) == SSA_NAME) 2889 return visit_copy (PHI_RESULT (phi), sameval); 2890 2891 return set_ssa_val_to (PHI_RESULT (phi), sameval); 2892 } 2893 2894 /* Otherwise, see if it is equivalent to a phi node in this block. */ 2895 result = vn_phi_lookup (phi); 2896 if (result) 2897 { 2898 if (TREE_CODE (result) == SSA_NAME) 2899 changed = visit_copy (PHI_RESULT (phi), result); 2900 else 2901 changed = set_ssa_val_to (PHI_RESULT (phi), result); 2902 } 2903 else 2904 { 2905 vn_phi_insert (phi, PHI_RESULT (phi)); 2906 VN_INFO (PHI_RESULT (phi))->has_constants = false; 2907 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi); 2908 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi)); 2909 } 2910 2911 return changed; 2912 } 2913 2914 /* Return true if EXPR contains constants. */ 2915 2916 static bool 2917 expr_has_constants (tree expr) 2918 { 2919 switch (TREE_CODE_CLASS (TREE_CODE (expr))) 2920 { 2921 case tcc_unary: 2922 return is_gimple_min_invariant (TREE_OPERAND (expr, 0)); 2923 2924 case tcc_binary: 2925 return is_gimple_min_invariant (TREE_OPERAND (expr, 0)) 2926 || is_gimple_min_invariant (TREE_OPERAND (expr, 1)); 2927 /* Constants inside reference ops are rarely interesting, but 2928 it can take a lot of looking to find them. */ 2929 case tcc_reference: 2930 case tcc_declaration: 2931 return false; 2932 default: 2933 return is_gimple_min_invariant (expr); 2934 } 2935 return false; 2936 } 2937 2938 /* Return true if STMT contains constants. */ 2939 2940 static bool 2941 stmt_has_constants (gimple stmt) 2942 { 2943 if (gimple_code (stmt) != GIMPLE_ASSIGN) 2944 return false; 2945 2946 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))) 2947 { 2948 case GIMPLE_UNARY_RHS: 2949 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt)); 2950 2951 case GIMPLE_BINARY_RHS: 2952 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt)) 2953 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))); 2954 case GIMPLE_TERNARY_RHS: 2955 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt)) 2956 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)) 2957 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt))); 2958 case GIMPLE_SINGLE_RHS: 2959 /* Constants inside reference ops are rarely interesting, but 2960 it can take a lot of looking to find them. */ 2961 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt)); 2962 default: 2963 gcc_unreachable (); 2964 } 2965 return false; 2966 } 2967 2968 /* Replace SSA_NAMES in expr with their value numbers, and return the 2969 result. 2970 This is performed in place. */ 2971 2972 static tree 2973 valueize_expr (tree expr) 2974 { 2975 switch (TREE_CODE_CLASS (TREE_CODE (expr))) 2976 { 2977 case tcc_binary: 2978 TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1)); 2979 /* Fallthru. */ 2980 case tcc_unary: 2981 TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0)); 2982 break; 2983 default:; 2984 } 2985 return expr; 2986 } 2987 2988 /* Simplify the binary expression RHS, and return the result if 2989 simplified. */ 2990 2991 static tree 2992 simplify_binary_expression (gimple stmt) 2993 { 2994 tree result = NULL_TREE; 2995 tree op0 = gimple_assign_rhs1 (stmt); 2996 tree op1 = gimple_assign_rhs2 (stmt); 2997 enum tree_code code = gimple_assign_rhs_code (stmt); 2998 2999 /* This will not catch every single case we could combine, but will 3000 catch those with constants. The goal here is to simultaneously 3001 combine constants between expressions, but avoid infinite 3002 expansion of expressions during simplification. */ 3003 if (TREE_CODE (op0) == SSA_NAME) 3004 { 3005 if (VN_INFO (op0)->has_constants 3006 || TREE_CODE_CLASS (code) == tcc_comparison 3007 || code == COMPLEX_EXPR) 3008 op0 = valueize_expr (vn_get_expr_for (op0)); 3009 else 3010 op0 = vn_valueize (op0); 3011 } 3012 3013 if (TREE_CODE (op1) == SSA_NAME) 3014 { 3015 if (VN_INFO (op1)->has_constants 3016 || code == COMPLEX_EXPR) 3017 op1 = valueize_expr (vn_get_expr_for (op1)); 3018 else 3019 op1 = vn_valueize (op1); 3020 } 3021 3022 /* Pointer plus constant can be represented as invariant address. 3023 Do so to allow further propatation, see also tree forwprop. */ 3024 if (code == POINTER_PLUS_EXPR 3025 && host_integerp (op1, 1) 3026 && TREE_CODE (op0) == ADDR_EXPR 3027 && is_gimple_min_invariant (op0)) 3028 return build_invariant_address (TREE_TYPE (op0), 3029 TREE_OPERAND (op0, 0), 3030 TREE_INT_CST_LOW (op1)); 3031 3032 /* Avoid folding if nothing changed. */ 3033 if (op0 == gimple_assign_rhs1 (stmt) 3034 && op1 == gimple_assign_rhs2 (stmt)) 3035 return NULL_TREE; 3036 3037 fold_defer_overflow_warnings (); 3038 3039 result = fold_binary (code, gimple_expr_type (stmt), op0, op1); 3040 if (result) 3041 STRIP_USELESS_TYPE_CONVERSION (result); 3042 3043 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result), 3044 stmt, 0); 3045 3046 /* Make sure result is not a complex expression consisting 3047 of operators of operators (IE (a + b) + (a + c)) 3048 Otherwise, we will end up with unbounded expressions if 3049 fold does anything at all. */ 3050 if (result && valid_gimple_rhs_p (result)) 3051 return result; 3052 3053 return NULL_TREE; 3054 } 3055 3056 /* Simplify the unary expression RHS, and return the result if 3057 simplified. */ 3058 3059 static tree 3060 simplify_unary_expression (gimple stmt) 3061 { 3062 tree result = NULL_TREE; 3063 tree orig_op0, op0 = gimple_assign_rhs1 (stmt); 3064 enum tree_code code = gimple_assign_rhs_code (stmt); 3065 3066 /* We handle some tcc_reference codes here that are all 3067 GIMPLE_ASSIGN_SINGLE codes. */ 3068 if (code == REALPART_EXPR 3069 || code == IMAGPART_EXPR 3070 || code == VIEW_CONVERT_EXPR 3071 || code == BIT_FIELD_REF) 3072 op0 = TREE_OPERAND (op0, 0); 3073 3074 if (TREE_CODE (op0) != SSA_NAME) 3075 return NULL_TREE; 3076 3077 orig_op0 = op0; 3078 if (VN_INFO (op0)->has_constants) 3079 op0 = valueize_expr (vn_get_expr_for (op0)); 3080 else if (CONVERT_EXPR_CODE_P (code) 3081 || code == REALPART_EXPR 3082 || code == IMAGPART_EXPR 3083 || code == VIEW_CONVERT_EXPR 3084 || code == BIT_FIELD_REF) 3085 { 3086 /* We want to do tree-combining on conversion-like expressions. 3087 Make sure we feed only SSA_NAMEs or constants to fold though. */ 3088 tree tem = valueize_expr (vn_get_expr_for (op0)); 3089 if (UNARY_CLASS_P (tem) 3090 || BINARY_CLASS_P (tem) 3091 || TREE_CODE (tem) == VIEW_CONVERT_EXPR 3092 || TREE_CODE (tem) == SSA_NAME 3093 || TREE_CODE (tem) == CONSTRUCTOR 3094 || is_gimple_min_invariant (tem)) 3095 op0 = tem; 3096 } 3097 3098 /* Avoid folding if nothing changed, but remember the expression. */ 3099 if (op0 == orig_op0) 3100 return NULL_TREE; 3101 3102 if (code == BIT_FIELD_REF) 3103 { 3104 tree rhs = gimple_assign_rhs1 (stmt); 3105 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs), 3106 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2)); 3107 } 3108 else 3109 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0); 3110 if (result) 3111 { 3112 STRIP_USELESS_TYPE_CONVERSION (result); 3113 if (valid_gimple_rhs_p (result)) 3114 return result; 3115 } 3116 3117 return NULL_TREE; 3118 } 3119 3120 /* Try to simplify RHS using equivalences and constant folding. */ 3121 3122 static tree 3123 try_to_simplify (gimple stmt) 3124 { 3125 enum tree_code code = gimple_assign_rhs_code (stmt); 3126 tree tem; 3127 3128 /* For stores we can end up simplifying a SSA_NAME rhs. Just return 3129 in this case, there is no point in doing extra work. */ 3130 if (code == SSA_NAME) 3131 return NULL_TREE; 3132 3133 /* First try constant folding based on our current lattice. */ 3134 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize); 3135 if (tem 3136 && (TREE_CODE (tem) == SSA_NAME 3137 || is_gimple_min_invariant (tem))) 3138 return tem; 3139 3140 /* If that didn't work try combining multiple statements. */ 3141 switch (TREE_CODE_CLASS (code)) 3142 { 3143 case tcc_reference: 3144 /* Fallthrough for some unary codes that can operate on registers. */ 3145 if (!(code == REALPART_EXPR 3146 || code == IMAGPART_EXPR 3147 || code == VIEW_CONVERT_EXPR 3148 || code == BIT_FIELD_REF)) 3149 break; 3150 /* We could do a little more with unary ops, if they expand 3151 into binary ops, but it's debatable whether it is worth it. */ 3152 case tcc_unary: 3153 return simplify_unary_expression (stmt); 3154 3155 case tcc_comparison: 3156 case tcc_binary: 3157 return simplify_binary_expression (stmt); 3158 3159 default: 3160 break; 3161 } 3162 3163 return NULL_TREE; 3164 } 3165 3166 /* Visit and value number USE, return true if the value number 3167 changed. */ 3168 3169 static bool 3170 visit_use (tree use) 3171 { 3172 bool changed = false; 3173 gimple stmt = SSA_NAME_DEF_STMT (use); 3174 3175 VN_INFO (use)->use_processed = true; 3176 3177 gcc_assert (!SSA_NAME_IN_FREE_LIST (use)); 3178 if (dump_file && (dump_flags & TDF_DETAILS) 3179 && !SSA_NAME_IS_DEFAULT_DEF (use)) 3180 { 3181 fprintf (dump_file, "Value numbering "); 3182 print_generic_expr (dump_file, use, 0); 3183 fprintf (dump_file, " stmt = "); 3184 print_gimple_stmt (dump_file, stmt, 0, 0); 3185 } 3186 3187 /* Handle uninitialized uses. */ 3188 if (SSA_NAME_IS_DEFAULT_DEF (use)) 3189 changed = set_ssa_val_to (use, use); 3190 else 3191 { 3192 if (gimple_code (stmt) == GIMPLE_PHI) 3193 changed = visit_phi (stmt); 3194 else if (!gimple_has_lhs (stmt) 3195 || gimple_has_volatile_ops (stmt)) 3196 changed = defs_to_varying (stmt); 3197 else if (is_gimple_assign (stmt)) 3198 { 3199 enum tree_code code = gimple_assign_rhs_code (stmt); 3200 tree lhs = gimple_assign_lhs (stmt); 3201 tree rhs1 = gimple_assign_rhs1 (stmt); 3202 tree simplified; 3203 3204 /* Shortcut for copies. Simplifying copies is pointless, 3205 since we copy the expression and value they represent. */ 3206 if (code == SSA_NAME 3207 && TREE_CODE (lhs) == SSA_NAME) 3208 { 3209 changed = visit_copy (lhs, rhs1); 3210 goto done; 3211 } 3212 simplified = try_to_simplify (stmt); 3213 if (simplified) 3214 { 3215 if (dump_file && (dump_flags & TDF_DETAILS)) 3216 { 3217 fprintf (dump_file, "RHS "); 3218 print_gimple_expr (dump_file, stmt, 0, 0); 3219 fprintf (dump_file, " simplified to "); 3220 print_generic_expr (dump_file, simplified, 0); 3221 if (TREE_CODE (lhs) == SSA_NAME) 3222 fprintf (dump_file, " has constants %d\n", 3223 expr_has_constants (simplified)); 3224 else 3225 fprintf (dump_file, "\n"); 3226 } 3227 } 3228 /* Setting value numbers to constants will occasionally 3229 screw up phi congruence because constants are not 3230 uniquely associated with a single ssa name that can be 3231 looked up. */ 3232 if (simplified 3233 && is_gimple_min_invariant (simplified) 3234 && TREE_CODE (lhs) == SSA_NAME) 3235 { 3236 VN_INFO (lhs)->expr = simplified; 3237 VN_INFO (lhs)->has_constants = true; 3238 changed = set_ssa_val_to (lhs, simplified); 3239 goto done; 3240 } 3241 else if (simplified 3242 && TREE_CODE (simplified) == SSA_NAME 3243 && TREE_CODE (lhs) == SSA_NAME) 3244 { 3245 changed = visit_copy (lhs, simplified); 3246 goto done; 3247 } 3248 else if (simplified) 3249 { 3250 if (TREE_CODE (lhs) == SSA_NAME) 3251 { 3252 VN_INFO (lhs)->has_constants = expr_has_constants (simplified); 3253 /* We have to unshare the expression or else 3254 valuizing may change the IL stream. */ 3255 VN_INFO (lhs)->expr = unshare_expr (simplified); 3256 } 3257 } 3258 else if (stmt_has_constants (stmt) 3259 && TREE_CODE (lhs) == SSA_NAME) 3260 VN_INFO (lhs)->has_constants = true; 3261 else if (TREE_CODE (lhs) == SSA_NAME) 3262 { 3263 /* We reset expr and constantness here because we may 3264 have been value numbering optimistically, and 3265 iterating. They may become non-constant in this case, 3266 even if they were optimistically constant. */ 3267 3268 VN_INFO (lhs)->has_constants = false; 3269 VN_INFO (lhs)->expr = NULL_TREE; 3270 } 3271 3272 if ((TREE_CODE (lhs) == SSA_NAME 3273 /* We can substitute SSA_NAMEs that are live over 3274 abnormal edges with their constant value. */ 3275 && !(gimple_assign_copy_p (stmt) 3276 && is_gimple_min_invariant (rhs1)) 3277 && !(simplified 3278 && is_gimple_min_invariant (simplified)) 3279 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)) 3280 /* Stores or copies from SSA_NAMEs that are live over 3281 abnormal edges are a problem. */ 3282 || (code == SSA_NAME 3283 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1))) 3284 changed = defs_to_varying (stmt); 3285 else if (REFERENCE_CLASS_P (lhs) 3286 || DECL_P (lhs)) 3287 changed = visit_reference_op_store (lhs, rhs1, stmt); 3288 else if (TREE_CODE (lhs) == SSA_NAME) 3289 { 3290 if ((gimple_assign_copy_p (stmt) 3291 && is_gimple_min_invariant (rhs1)) 3292 || (simplified 3293 && is_gimple_min_invariant (simplified))) 3294 { 3295 VN_INFO (lhs)->has_constants = true; 3296 if (simplified) 3297 changed = set_ssa_val_to (lhs, simplified); 3298 else 3299 changed = set_ssa_val_to (lhs, rhs1); 3300 } 3301 else 3302 { 3303 switch (get_gimple_rhs_class (code)) 3304 { 3305 case GIMPLE_UNARY_RHS: 3306 case GIMPLE_BINARY_RHS: 3307 case GIMPLE_TERNARY_RHS: 3308 changed = visit_nary_op (lhs, stmt); 3309 break; 3310 case GIMPLE_SINGLE_RHS: 3311 switch (TREE_CODE_CLASS (code)) 3312 { 3313 case tcc_reference: 3314 /* VOP-less references can go through unary case. */ 3315 if ((code == REALPART_EXPR 3316 || code == IMAGPART_EXPR 3317 || code == VIEW_CONVERT_EXPR 3318 || code == BIT_FIELD_REF) 3319 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME) 3320 { 3321 changed = visit_nary_op (lhs, stmt); 3322 break; 3323 } 3324 /* Fallthrough. */ 3325 case tcc_declaration: 3326 changed = visit_reference_op_load (lhs, rhs1, stmt); 3327 break; 3328 default: 3329 if (code == ADDR_EXPR) 3330 { 3331 changed = visit_nary_op (lhs, stmt); 3332 break; 3333 } 3334 else if (code == CONSTRUCTOR) 3335 { 3336 changed = visit_nary_op (lhs, stmt); 3337 break; 3338 } 3339 changed = defs_to_varying (stmt); 3340 } 3341 break; 3342 default: 3343 changed = defs_to_varying (stmt); 3344 break; 3345 } 3346 } 3347 } 3348 else 3349 changed = defs_to_varying (stmt); 3350 } 3351 else if (is_gimple_call (stmt)) 3352 { 3353 tree lhs = gimple_call_lhs (stmt); 3354 3355 /* ??? We could try to simplify calls. */ 3356 3357 if (stmt_has_constants (stmt) 3358 && TREE_CODE (lhs) == SSA_NAME) 3359 VN_INFO (lhs)->has_constants = true; 3360 else if (TREE_CODE (lhs) == SSA_NAME) 3361 { 3362 /* We reset expr and constantness here because we may 3363 have been value numbering optimistically, and 3364 iterating. They may become non-constant in this case, 3365 even if they were optimistically constant. */ 3366 VN_INFO (lhs)->has_constants = false; 3367 VN_INFO (lhs)->expr = NULL_TREE; 3368 } 3369 3370 if (TREE_CODE (lhs) == SSA_NAME 3371 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs)) 3372 changed = defs_to_varying (stmt); 3373 /* ??? We should handle stores from calls. */ 3374 else if (TREE_CODE (lhs) == SSA_NAME) 3375 { 3376 if (!gimple_call_internal_p (stmt) 3377 && gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)) 3378 changed = visit_reference_op_call (lhs, stmt); 3379 else 3380 changed = defs_to_varying (stmt); 3381 } 3382 else 3383 changed = defs_to_varying (stmt); 3384 } 3385 } 3386 done: 3387 return changed; 3388 } 3389 3390 /* Compare two operands by reverse postorder index */ 3391 3392 static int 3393 compare_ops (const void *pa, const void *pb) 3394 { 3395 const tree opa = *((const tree *)pa); 3396 const tree opb = *((const tree *)pb); 3397 gimple opstmta = SSA_NAME_DEF_STMT (opa); 3398 gimple opstmtb = SSA_NAME_DEF_STMT (opb); 3399 basic_block bba; 3400 basic_block bbb; 3401 3402 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb)) 3403 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb); 3404 else if (gimple_nop_p (opstmta)) 3405 return -1; 3406 else if (gimple_nop_p (opstmtb)) 3407 return 1; 3408 3409 bba = gimple_bb (opstmta); 3410 bbb = gimple_bb (opstmtb); 3411 3412 if (!bba && !bbb) 3413 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb); 3414 else if (!bba) 3415 return -1; 3416 else if (!bbb) 3417 return 1; 3418 3419 if (bba == bbb) 3420 { 3421 if (gimple_code (opstmta) == GIMPLE_PHI 3422 && gimple_code (opstmtb) == GIMPLE_PHI) 3423 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb); 3424 else if (gimple_code (opstmta) == GIMPLE_PHI) 3425 return -1; 3426 else if (gimple_code (opstmtb) == GIMPLE_PHI) 3427 return 1; 3428 else if (gimple_uid (opstmta) != gimple_uid (opstmtb)) 3429 return gimple_uid (opstmta) - gimple_uid (opstmtb); 3430 else 3431 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb); 3432 } 3433 return rpo_numbers[bba->index] - rpo_numbers[bbb->index]; 3434 } 3435 3436 /* Sort an array containing members of a strongly connected component 3437 SCC so that the members are ordered by RPO number. 3438 This means that when the sort is complete, iterating through the 3439 array will give you the members in RPO order. */ 3440 3441 static void 3442 sort_scc (VEC (tree, heap) *scc) 3443 { 3444 VEC_qsort (tree, scc, compare_ops); 3445 } 3446 3447 /* Insert the no longer used nary ONARY to the hash INFO. */ 3448 3449 static void 3450 copy_nary (vn_nary_op_t onary, vn_tables_t info) 3451 { 3452 size_t size = sizeof_vn_nary_op (onary->length); 3453 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length, 3454 &info->nary_obstack); 3455 memcpy (nary, onary, size); 3456 vn_nary_op_insert_into (nary, info->nary, false); 3457 } 3458 3459 /* Insert the no longer used phi OPHI to the hash INFO. */ 3460 3461 static void 3462 copy_phi (vn_phi_t ophi, vn_tables_t info) 3463 { 3464 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool); 3465 void **slot; 3466 memcpy (phi, ophi, sizeof (*phi)); 3467 ophi->phiargs = NULL; 3468 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT); 3469 gcc_assert (!*slot); 3470 *slot = phi; 3471 } 3472 3473 /* Insert the no longer used reference OREF to the hash INFO. */ 3474 3475 static void 3476 copy_reference (vn_reference_t oref, vn_tables_t info) 3477 { 3478 vn_reference_t ref; 3479 void **slot; 3480 ref = (vn_reference_t) pool_alloc (info->references_pool); 3481 memcpy (ref, oref, sizeof (*ref)); 3482 oref->operands = NULL; 3483 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode, 3484 INSERT); 3485 if (*slot) 3486 free_reference (*slot); 3487 *slot = ref; 3488 } 3489 3490 /* Process a strongly connected component in the SSA graph. */ 3491 3492 static void 3493 process_scc (VEC (tree, heap) *scc) 3494 { 3495 tree var; 3496 unsigned int i; 3497 unsigned int iterations = 0; 3498 bool changed = true; 3499 htab_iterator hi; 3500 vn_nary_op_t nary; 3501 vn_phi_t phi; 3502 vn_reference_t ref; 3503 3504 /* If the SCC has a single member, just visit it. */ 3505 if (VEC_length (tree, scc) == 1) 3506 { 3507 tree use = VEC_index (tree, scc, 0); 3508 if (VN_INFO (use)->use_processed) 3509 return; 3510 /* We need to make sure it doesn't form a cycle itself, which can 3511 happen for self-referential PHI nodes. In that case we would 3512 end up inserting an expression with VN_TOP operands into the 3513 valid table which makes us derive bogus equivalences later. 3514 The cheapest way to check this is to assume it for all PHI nodes. */ 3515 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI) 3516 /* Fallthru to iteration. */ ; 3517 else 3518 { 3519 visit_use (use); 3520 return; 3521 } 3522 } 3523 3524 /* Iterate over the SCC with the optimistic table until it stops 3525 changing. */ 3526 current_info = optimistic_info; 3527 while (changed) 3528 { 3529 changed = false; 3530 iterations++; 3531 if (dump_file && (dump_flags & TDF_DETAILS)) 3532 fprintf (dump_file, "Starting iteration %d\n", iterations); 3533 /* As we are value-numbering optimistically we have to 3534 clear the expression tables and the simplified expressions 3535 in each iteration until we converge. */ 3536 htab_empty (optimistic_info->nary); 3537 htab_empty (optimistic_info->phis); 3538 htab_empty (optimistic_info->references); 3539 obstack_free (&optimistic_info->nary_obstack, NULL); 3540 gcc_obstack_init (&optimistic_info->nary_obstack); 3541 empty_alloc_pool (optimistic_info->phis_pool); 3542 empty_alloc_pool (optimistic_info->references_pool); 3543 FOR_EACH_VEC_ELT (tree, scc, i, var) 3544 VN_INFO (var)->expr = NULL_TREE; 3545 FOR_EACH_VEC_ELT (tree, scc, i, var) 3546 changed |= visit_use (var); 3547 } 3548 3549 statistics_histogram_event (cfun, "SCC iterations", iterations); 3550 3551 /* Finally, copy the contents of the no longer used optimistic 3552 table to the valid table. */ 3553 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi) 3554 copy_nary (nary, valid_info); 3555 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi) 3556 copy_phi (phi, valid_info); 3557 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi) 3558 copy_reference (ref, valid_info); 3559 3560 current_info = valid_info; 3561 } 3562 3563 DEF_VEC_O(ssa_op_iter); 3564 DEF_VEC_ALLOC_O(ssa_op_iter,heap); 3565 3566 /* Pop the components of the found SCC for NAME off the SCC stack 3567 and process them. Returns true if all went well, false if 3568 we run into resource limits. */ 3569 3570 static bool 3571 extract_and_process_scc_for_name (tree name) 3572 { 3573 VEC (tree, heap) *scc = NULL; 3574 tree x; 3575 3576 /* Found an SCC, pop the components off the SCC stack and 3577 process them. */ 3578 do 3579 { 3580 x = VEC_pop (tree, sccstack); 3581 3582 VN_INFO (x)->on_sccstack = false; 3583 VEC_safe_push (tree, heap, scc, x); 3584 } while (x != name); 3585 3586 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */ 3587 if (VEC_length (tree, scc) 3588 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE)) 3589 { 3590 if (dump_file) 3591 fprintf (dump_file, "WARNING: Giving up with SCCVN due to " 3592 "SCC size %u exceeding %u\n", VEC_length (tree, scc), 3593 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE)); 3594 3595 VEC_free (tree, heap, scc); 3596 return false; 3597 } 3598 3599 if (VEC_length (tree, scc) > 1) 3600 sort_scc (scc); 3601 3602 if (dump_file && (dump_flags & TDF_DETAILS)) 3603 print_scc (dump_file, scc); 3604 3605 process_scc (scc); 3606 3607 VEC_free (tree, heap, scc); 3608 3609 return true; 3610 } 3611 3612 /* Depth first search on NAME to discover and process SCC's in the SSA 3613 graph. 3614 Execution of this algorithm relies on the fact that the SCC's are 3615 popped off the stack in topological order. 3616 Returns true if successful, false if we stopped processing SCC's due 3617 to resource constraints. */ 3618 3619 static bool 3620 DFS (tree name) 3621 { 3622 VEC(ssa_op_iter, heap) *itervec = NULL; 3623 VEC(tree, heap) *namevec = NULL; 3624 use_operand_p usep = NULL; 3625 gimple defstmt; 3626 tree use; 3627 ssa_op_iter iter; 3628 3629 start_over: 3630 /* SCC info */ 3631 VN_INFO (name)->dfsnum = next_dfs_num++; 3632 VN_INFO (name)->visited = true; 3633 VN_INFO (name)->low = VN_INFO (name)->dfsnum; 3634 3635 VEC_safe_push (tree, heap, sccstack, name); 3636 VN_INFO (name)->on_sccstack = true; 3637 defstmt = SSA_NAME_DEF_STMT (name); 3638 3639 /* Recursively DFS on our operands, looking for SCC's. */ 3640 if (!gimple_nop_p (defstmt)) 3641 { 3642 /* Push a new iterator. */ 3643 if (gimple_code (defstmt) == GIMPLE_PHI) 3644 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES); 3645 else 3646 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES); 3647 } 3648 else 3649 clear_and_done_ssa_iter (&iter); 3650 3651 while (1) 3652 { 3653 /* If we are done processing uses of a name, go up the stack 3654 of iterators and process SCCs as we found them. */ 3655 if (op_iter_done (&iter)) 3656 { 3657 /* See if we found an SCC. */ 3658 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum) 3659 if (!extract_and_process_scc_for_name (name)) 3660 { 3661 VEC_free (tree, heap, namevec); 3662 VEC_free (ssa_op_iter, heap, itervec); 3663 return false; 3664 } 3665 3666 /* Check if we are done. */ 3667 if (VEC_empty (tree, namevec)) 3668 { 3669 VEC_free (tree, heap, namevec); 3670 VEC_free (ssa_op_iter, heap, itervec); 3671 return true; 3672 } 3673 3674 /* Restore the last use walker and continue walking there. */ 3675 use = name; 3676 name = VEC_pop (tree, namevec); 3677 memcpy (&iter, VEC_last (ssa_op_iter, itervec), 3678 sizeof (ssa_op_iter)); 3679 VEC_pop (ssa_op_iter, itervec); 3680 goto continue_walking; 3681 } 3682 3683 use = USE_FROM_PTR (usep); 3684 3685 /* Since we handle phi nodes, we will sometimes get 3686 invariants in the use expression. */ 3687 if (TREE_CODE (use) == SSA_NAME) 3688 { 3689 if (! (VN_INFO (use)->visited)) 3690 { 3691 /* Recurse by pushing the current use walking state on 3692 the stack and starting over. */ 3693 VEC_safe_push(ssa_op_iter, heap, itervec, &iter); 3694 VEC_safe_push(tree, heap, namevec, name); 3695 name = use; 3696 goto start_over; 3697 3698 continue_walking: 3699 VN_INFO (name)->low = MIN (VN_INFO (name)->low, 3700 VN_INFO (use)->low); 3701 } 3702 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum 3703 && VN_INFO (use)->on_sccstack) 3704 { 3705 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum, 3706 VN_INFO (name)->low); 3707 } 3708 } 3709 3710 usep = op_iter_next_use (&iter); 3711 } 3712 } 3713 3714 /* Allocate a value number table. */ 3715 3716 static void 3717 allocate_vn_table (vn_tables_t table) 3718 { 3719 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi); 3720 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL); 3721 table->references = htab_create (23, vn_reference_hash, vn_reference_eq, 3722 free_reference); 3723 3724 gcc_obstack_init (&table->nary_obstack); 3725 table->phis_pool = create_alloc_pool ("VN phis", 3726 sizeof (struct vn_phi_s), 3727 30); 3728 table->references_pool = create_alloc_pool ("VN references", 3729 sizeof (struct vn_reference_s), 3730 30); 3731 } 3732 3733 /* Free a value number table. */ 3734 3735 static void 3736 free_vn_table (vn_tables_t table) 3737 { 3738 htab_delete (table->phis); 3739 htab_delete (table->nary); 3740 htab_delete (table->references); 3741 obstack_free (&table->nary_obstack, NULL); 3742 free_alloc_pool (table->phis_pool); 3743 free_alloc_pool (table->references_pool); 3744 } 3745 3746 static void 3747 init_scc_vn (void) 3748 { 3749 size_t i; 3750 int j; 3751 int *rpo_numbers_temp; 3752 3753 calculate_dominance_info (CDI_DOMINATORS); 3754 sccstack = NULL; 3755 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq, 3756 free); 3757 3758 constant_value_ids = BITMAP_ALLOC (NULL); 3759 3760 next_dfs_num = 1; 3761 next_value_id = 1; 3762 3763 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1); 3764 /* VEC_alloc doesn't actually grow it to the right size, it just 3765 preallocates the space to do so. */ 3766 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1); 3767 gcc_obstack_init (&vn_ssa_aux_obstack); 3768 3769 shared_lookup_phiargs = NULL; 3770 shared_lookup_references = NULL; 3771 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS); 3772 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS); 3773 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false); 3774 3775 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that 3776 the i'th block in RPO order is bb. We want to map bb's to RPO 3777 numbers, so we need to rearrange this array. */ 3778 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++) 3779 rpo_numbers[rpo_numbers_temp[j]] = j; 3780 3781 XDELETE (rpo_numbers_temp); 3782 3783 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top"); 3784 3785 /* Create the VN_INFO structures, and initialize value numbers to 3786 TOP. */ 3787 for (i = 0; i < num_ssa_names; i++) 3788 { 3789 tree name = ssa_name (i); 3790 if (name) 3791 { 3792 VN_INFO_GET (name)->valnum = VN_TOP; 3793 VN_INFO (name)->expr = NULL_TREE; 3794 VN_INFO (name)->value_id = 0; 3795 } 3796 } 3797 3798 renumber_gimple_stmt_uids (); 3799 3800 /* Create the valid and optimistic value numbering tables. */ 3801 valid_info = XCNEW (struct vn_tables_s); 3802 allocate_vn_table (valid_info); 3803 optimistic_info = XCNEW (struct vn_tables_s); 3804 allocate_vn_table (optimistic_info); 3805 } 3806 3807 void 3808 free_scc_vn (void) 3809 { 3810 size_t i; 3811 3812 htab_delete (constant_to_value_id); 3813 BITMAP_FREE (constant_value_ids); 3814 VEC_free (tree, heap, shared_lookup_phiargs); 3815 VEC_free (vn_reference_op_s, heap, shared_lookup_references); 3816 XDELETEVEC (rpo_numbers); 3817 3818 for (i = 0; i < num_ssa_names; i++) 3819 { 3820 tree name = ssa_name (i); 3821 if (name 3822 && VN_INFO (name)->needs_insertion) 3823 release_ssa_name (name); 3824 } 3825 obstack_free (&vn_ssa_aux_obstack, NULL); 3826 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table); 3827 3828 VEC_free (tree, heap, sccstack); 3829 free_vn_table (valid_info); 3830 XDELETE (valid_info); 3831 free_vn_table (optimistic_info); 3832 XDELETE (optimistic_info); 3833 } 3834 3835 /* Set *ID if we computed something useful in RESULT. */ 3836 3837 static void 3838 set_value_id_for_result (tree result, unsigned int *id) 3839 { 3840 if (result) 3841 { 3842 if (TREE_CODE (result) == SSA_NAME) 3843 *id = VN_INFO (result)->value_id; 3844 else if (is_gimple_min_invariant (result)) 3845 *id = get_or_alloc_constant_value_id (result); 3846 } 3847 } 3848 3849 /* Set the value ids in the valid hash tables. */ 3850 3851 static void 3852 set_hashtable_value_ids (void) 3853 { 3854 htab_iterator hi; 3855 vn_nary_op_t vno; 3856 vn_reference_t vr; 3857 vn_phi_t vp; 3858 3859 /* Now set the value ids of the things we had put in the hash 3860 table. */ 3861 3862 FOR_EACH_HTAB_ELEMENT (valid_info->nary, 3863 vno, vn_nary_op_t, hi) 3864 set_value_id_for_result (vno->result, &vno->value_id); 3865 3866 FOR_EACH_HTAB_ELEMENT (valid_info->phis, 3867 vp, vn_phi_t, hi) 3868 set_value_id_for_result (vp->result, &vp->value_id); 3869 3870 FOR_EACH_HTAB_ELEMENT (valid_info->references, 3871 vr, vn_reference_t, hi) 3872 set_value_id_for_result (vr->result, &vr->value_id); 3873 } 3874 3875 /* Do SCCVN. Returns true if it finished, false if we bailed out 3876 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies 3877 how we use the alias oracle walking during the VN process. */ 3878 3879 bool 3880 run_scc_vn (vn_lookup_kind default_vn_walk_kind_) 3881 { 3882 size_t i; 3883 tree param; 3884 bool changed = true; 3885 3886 default_vn_walk_kind = default_vn_walk_kind_; 3887 3888 init_scc_vn (); 3889 current_info = valid_info; 3890 3891 for (param = DECL_ARGUMENTS (current_function_decl); 3892 param; 3893 param = DECL_CHAIN (param)) 3894 { 3895 if (gimple_default_def (cfun, param) != NULL) 3896 { 3897 tree def = gimple_default_def (cfun, param); 3898 VN_INFO (def)->valnum = def; 3899 } 3900 } 3901 3902 for (i = 1; i < num_ssa_names; ++i) 3903 { 3904 tree name = ssa_name (i); 3905 if (name 3906 && VN_INFO (name)->visited == false 3907 && !has_zero_uses (name)) 3908 if (!DFS (name)) 3909 { 3910 free_scc_vn (); 3911 return false; 3912 } 3913 } 3914 3915 /* Initialize the value ids. */ 3916 3917 for (i = 1; i < num_ssa_names; ++i) 3918 { 3919 tree name = ssa_name (i); 3920 vn_ssa_aux_t info; 3921 if (!name) 3922 continue; 3923 info = VN_INFO (name); 3924 if (info->valnum == name 3925 || info->valnum == VN_TOP) 3926 info->value_id = get_next_value_id (); 3927 else if (is_gimple_min_invariant (info->valnum)) 3928 info->value_id = get_or_alloc_constant_value_id (info->valnum); 3929 } 3930 3931 /* Propagate until they stop changing. */ 3932 while (changed) 3933 { 3934 changed = false; 3935 for (i = 1; i < num_ssa_names; ++i) 3936 { 3937 tree name = ssa_name (i); 3938 vn_ssa_aux_t info; 3939 if (!name) 3940 continue; 3941 info = VN_INFO (name); 3942 if (TREE_CODE (info->valnum) == SSA_NAME 3943 && info->valnum != name 3944 && info->value_id != VN_INFO (info->valnum)->value_id) 3945 { 3946 changed = true; 3947 info->value_id = VN_INFO (info->valnum)->value_id; 3948 } 3949 } 3950 } 3951 3952 set_hashtable_value_ids (); 3953 3954 if (dump_file && (dump_flags & TDF_DETAILS)) 3955 { 3956 fprintf (dump_file, "Value numbers:\n"); 3957 for (i = 0; i < num_ssa_names; i++) 3958 { 3959 tree name = ssa_name (i); 3960 if (name 3961 && VN_INFO (name)->visited 3962 && SSA_VAL (name) != name) 3963 { 3964 print_generic_expr (dump_file, name, 0); 3965 fprintf (dump_file, " = "); 3966 print_generic_expr (dump_file, SSA_VAL (name), 0); 3967 fprintf (dump_file, "\n"); 3968 } 3969 } 3970 } 3971 3972 return true; 3973 } 3974 3975 /* Return the maximum value id we have ever seen. */ 3976 3977 unsigned int 3978 get_max_value_id (void) 3979 { 3980 return next_value_id; 3981 } 3982 3983 /* Return the next unique value id. */ 3984 3985 unsigned int 3986 get_next_value_id (void) 3987 { 3988 return next_value_id++; 3989 } 3990 3991 3992 /* Compare two expressions E1 and E2 and return true if they are equal. */ 3993 3994 bool 3995 expressions_equal_p (tree e1, tree e2) 3996 { 3997 /* The obvious case. */ 3998 if (e1 == e2) 3999 return true; 4000 4001 /* If only one of them is null, they cannot be equal. */ 4002 if (!e1 || !e2) 4003 return false; 4004 4005 /* Now perform the actual comparison. */ 4006 if (TREE_CODE (e1) == TREE_CODE (e2) 4007 && operand_equal_p (e1, e2, OEP_PURE_SAME)) 4008 return true; 4009 4010 return false; 4011 } 4012 4013 4014 /* Return true if the nary operation NARY may trap. This is a copy 4015 of stmt_could_throw_1_p adjusted to the SCCVN IL. */ 4016 4017 bool 4018 vn_nary_may_trap (vn_nary_op_t nary) 4019 { 4020 tree type; 4021 tree rhs2 = NULL_TREE; 4022 bool honor_nans = false; 4023 bool honor_snans = false; 4024 bool fp_operation = false; 4025 bool honor_trapv = false; 4026 bool handled, ret; 4027 unsigned i; 4028 4029 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison 4030 || TREE_CODE_CLASS (nary->opcode) == tcc_unary 4031 || TREE_CODE_CLASS (nary->opcode) == tcc_binary) 4032 { 4033 type = nary->type; 4034 fp_operation = FLOAT_TYPE_P (type); 4035 if (fp_operation) 4036 { 4037 honor_nans = flag_trapping_math && !flag_finite_math_only; 4038 honor_snans = flag_signaling_nans != 0; 4039 } 4040 else if (INTEGRAL_TYPE_P (type) 4041 && TYPE_OVERFLOW_TRAPS (type)) 4042 honor_trapv = true; 4043 } 4044 if (nary->length >= 2) 4045 rhs2 = nary->op[1]; 4046 ret = operation_could_trap_helper_p (nary->opcode, fp_operation, 4047 honor_trapv, 4048 honor_nans, honor_snans, rhs2, 4049 &handled); 4050 if (handled 4051 && ret) 4052 return true; 4053 4054 for (i = 0; i < nary->length; ++i) 4055 if (tree_could_trap_p (nary->op[i])) 4056 return true; 4057 4058 return false; 4059 } 4060