1 /* SCC value numbering for trees
2    Copyright (C) 2006-2018 Free Software Foundation, Inc.
3    Contributed by Daniel Berlin <dan@dberlin.org>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "params.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
62 #include "stringpool.h"
63 #include "attribs.h"
64 #include "tree-pass.h"
65 #include "statistics.h"
66 #include "langhooks.h"
67 #include "ipa-utils.h"
68 #include "dbgcnt.h"
69 #include "tree-cfgcleanup.h"
70 #include "tree-ssa-loop.h"
71 #include "tree-scalar-evolution.h"
72 #include "builtins.h"
73 #include "tree-ssa-sccvn.h"
74 
75 /* This algorithm is based on the SCC algorithm presented by Keith
76    Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
77    (http://citeseer.ist.psu.edu/41805.html).  In
78    straight line code, it is equivalent to a regular hash based value
79    numbering that is performed in reverse postorder.
80 
81    For code with cycles, there are two alternatives, both of which
82    require keeping the hashtables separate from the actual list of
83    value numbers for SSA names.
84 
85    1. Iterate value numbering in an RPO walk of the blocks, removing
86    all the entries from the hashtable after each iteration (but
87    keeping the SSA name->value number mapping between iterations).
88    Iterate until it does not change.
89 
90    2. Perform value numbering as part of an SCC walk on the SSA graph,
91    iterating only the cycles in the SSA graph until they do not change
92    (using a separate, optimistic hashtable for value numbering the SCC
93    operands).
94 
95    The second is not just faster in practice (because most SSA graph
96    cycles do not involve all the variables in the graph), it also has
97    some nice properties.
98 
99    One of these nice properties is that when we pop an SCC off the
100    stack, we are guaranteed to have processed all the operands coming from
101    *outside of that SCC*, so we do not need to do anything special to
102    ensure they have value numbers.
103 
104    Another nice property is that the SCC walk is done as part of a DFS
105    of the SSA graph, which makes it easy to perform combining and
106    simplifying operations at the same time.
107 
108    The code below is deliberately written in a way that makes it easy
109    to separate the SCC walk from the other work it does.
110 
111    In order to propagate constants through the code, we track which
112    expressions contain constants, and use those while folding.  In
113    theory, we could also track expressions whose value numbers are
114    replaced, in case we end up folding based on expression
115    identities.
116 
117    In order to value number memory, we assign value numbers to vuses.
118    This enables us to note that, for example, stores to the same
119    address of the same value from the same starting memory states are
120    equivalent.
121    TODO:
122 
123    1. We can iterate only the changing portions of the SCC's, but
124    I have not seen an SCC big enough for this to be a win.
125    2. If you differentiate between phi nodes for loops and phi nodes
126    for if-then-else, you can properly consider phi nodes in different
127    blocks for equivalence.
128    3. We could value number vuses in more cases, particularly, whole
129    structure copies.
130 */
131 
132 
133 static tree *last_vuse_ptr;
134 static vn_lookup_kind vn_walk_kind;
135 static vn_lookup_kind default_vn_walk_kind;
136 bitmap const_parms;
137 
138 /* vn_nary_op hashtable helpers.  */
139 
140 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
141 {
142   typedef vn_nary_op_s *compare_type;
143   static inline hashval_t hash (const vn_nary_op_s *);
144   static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
145 };
146 
147 /* Return the computed hashcode for nary operation P1.  */
148 
149 inline hashval_t
hash(const vn_nary_op_s * vno1)150 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
151 {
152   return vno1->hashcode;
153 }
154 
155 /* Compare nary operations P1 and P2 and return true if they are
156    equivalent.  */
157 
158 inline bool
equal(const vn_nary_op_s * vno1,const vn_nary_op_s * vno2)159 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
160 {
161   return vn_nary_op_eq (vno1, vno2);
162 }
163 
164 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
165 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
166 
167 
168 /* vn_phi hashtable helpers.  */
169 
170 static int
171 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
172 
173 struct vn_phi_hasher : pointer_hash <vn_phi_s>
174 {
175   static inline hashval_t hash (const vn_phi_s *);
176   static inline bool equal (const vn_phi_s *, const vn_phi_s *);
177   static inline void remove (vn_phi_s *);
178 };
179 
180 /* Return the computed hashcode for phi operation P1.  */
181 
182 inline hashval_t
hash(const vn_phi_s * vp1)183 vn_phi_hasher::hash (const vn_phi_s *vp1)
184 {
185   return vp1->hashcode;
186 }
187 
188 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
189 
190 inline bool
equal(const vn_phi_s * vp1,const vn_phi_s * vp2)191 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
192 {
193   return vn_phi_eq (vp1, vp2);
194 }
195 
196 /* Free a phi operation structure VP.  */
197 
198 inline void
remove(vn_phi_s * phi)199 vn_phi_hasher::remove (vn_phi_s *phi)
200 {
201   phi->phiargs.release ();
202 }
203 
204 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
205 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
206 
207 
208 /* Compare two reference operands P1 and P2 for equality.  Return true if
209    they are equal, and false otherwise.  */
210 
211 static int
vn_reference_op_eq(const void * p1,const void * p2)212 vn_reference_op_eq (const void *p1, const void *p2)
213 {
214   const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
215   const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
216 
217   return (vro1->opcode == vro2->opcode
218 	  /* We do not care for differences in type qualification.  */
219 	  && (vro1->type == vro2->type
220 	      || (vro1->type && vro2->type
221 		  && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
222 					 TYPE_MAIN_VARIANT (vro2->type))))
223 	  && expressions_equal_p (vro1->op0, vro2->op0)
224 	  && expressions_equal_p (vro1->op1, vro2->op1)
225 	  && expressions_equal_p (vro1->op2, vro2->op2));
226 }
227 
228 /* Free a reference operation structure VP.  */
229 
230 static inline void
free_reference(vn_reference_s * vr)231 free_reference (vn_reference_s *vr)
232 {
233   vr->operands.release ();
234 }
235 
236 
237 /* vn_reference hashtable helpers.  */
238 
239 struct vn_reference_hasher : pointer_hash <vn_reference_s>
240 {
241   static inline hashval_t hash (const vn_reference_s *);
242   static inline bool equal (const vn_reference_s *, const vn_reference_s *);
243   static inline void remove (vn_reference_s *);
244 };
245 
246 /* Return the hashcode for a given reference operation P1.  */
247 
248 inline hashval_t
hash(const vn_reference_s * vr1)249 vn_reference_hasher::hash (const vn_reference_s *vr1)
250 {
251   return vr1->hashcode;
252 }
253 
254 inline bool
equal(const vn_reference_s * v,const vn_reference_s * c)255 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
256 {
257   return vn_reference_eq (v, c);
258 }
259 
260 inline void
remove(vn_reference_s * v)261 vn_reference_hasher::remove (vn_reference_s *v)
262 {
263   free_reference (v);
264 }
265 
266 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
267 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
268 
269 
270 /* The set of hashtables and alloc_pool's for their items.  */
271 
272 typedef struct vn_tables_s
273 {
274   vn_nary_op_table_type *nary;
275   vn_phi_table_type *phis;
276   vn_reference_table_type *references;
277   struct obstack nary_obstack;
278   object_allocator<vn_phi_s> *phis_pool;
279   object_allocator<vn_reference_s> *references_pool;
280 } *vn_tables_t;
281 
282 
283 /* vn_constant hashtable helpers.  */
284 
285 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
286 {
287   static inline hashval_t hash (const vn_constant_s *);
288   static inline bool equal (const vn_constant_s *, const vn_constant_s *);
289 };
290 
291 /* Hash table hash function for vn_constant_t.  */
292 
293 inline hashval_t
hash(const vn_constant_s * vc1)294 vn_constant_hasher::hash (const vn_constant_s *vc1)
295 {
296   return vc1->hashcode;
297 }
298 
299 /* Hash table equality function for vn_constant_t.  */
300 
301 inline bool
equal(const vn_constant_s * vc1,const vn_constant_s * vc2)302 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
303 {
304   if (vc1->hashcode != vc2->hashcode)
305     return false;
306 
307   return vn_constant_eq_with_type (vc1->constant, vc2->constant);
308 }
309 
310 static hash_table<vn_constant_hasher> *constant_to_value_id;
311 static bitmap constant_value_ids;
312 
313 
314 /* Valid hashtables storing information we have proven to be
315    correct.  */
316 
317 static vn_tables_t valid_info;
318 
319 /* Optimistic hashtables storing information we are making assumptions about
320    during iterations.  */
321 
322 static vn_tables_t optimistic_info;
323 
324 /* Pointer to the set of hashtables that is currently being used.
325    Should always point to either the optimistic_info, or the
326    valid_info.  */
327 
328 static vn_tables_t current_info;
329 
330 
331 /* Reverse post order index for each basic block.  */
332 
333 static int *rpo_numbers;
334 
335 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
336 
337 /* Return the SSA value of the VUSE x, supporting released VDEFs
338    during elimination which will value-number the VDEF to the
339    associated VUSE (but not substitute in the whole lattice).  */
340 
341 static inline tree
vuse_ssa_val(tree x)342 vuse_ssa_val (tree x)
343 {
344   if (!x)
345     return NULL_TREE;
346 
347   do
348     {
349       tree tem = SSA_VAL (x);
350       /* stmt walking can walk over a backedge and reach code we didn't
351 	 value-number yet.  */
352       if (tem == VN_TOP)
353 	return x;
354       x = tem;
355     }
356   while (SSA_NAME_IN_FREE_LIST (x));
357 
358   return x;
359 }
360 
361 /* This represents the top of the VN lattice, which is the universal
362    value.  */
363 
364 tree VN_TOP;
365 
366 /* Unique counter for our value ids.  */
367 
368 static unsigned int next_value_id;
369 
370 /* Next DFS number and the stack for strongly connected component
371    detection. */
372 
373 static unsigned int next_dfs_num;
374 static vec<tree> sccstack;
375 
376 
377 
378 /* Table of vn_ssa_aux_t's, one per ssa_name.  The vn_ssa_aux_t objects
379    are allocated on an obstack for locality reasons, and to free them
380    without looping over the vec.  */
381 
382 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
383 static struct obstack vn_ssa_aux_obstack;
384 
385 /* Return whether there is value numbering information for a given SSA name.  */
386 
387 bool
has_VN_INFO(tree name)388 has_VN_INFO (tree name)
389 {
390   if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
391     return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
392   return false;
393 }
394 
395 /* Return the value numbering information for a given SSA name.  */
396 
397 vn_ssa_aux_t
VN_INFO(tree name)398 VN_INFO (tree name)
399 {
400   vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
401   gcc_checking_assert (res);
402   return res;
403 }
404 
405 /* Set the value numbering info for a given SSA name to a given
406    value.  */
407 
408 static inline void
VN_INFO_SET(tree name,vn_ssa_aux_t value)409 VN_INFO_SET (tree name, vn_ssa_aux_t value)
410 {
411   vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
412 }
413 
414 /* Initialize the value numbering info for a given SSA name.
415    This should be called just once for every SSA name.  */
416 
417 vn_ssa_aux_t
VN_INFO_GET(tree name)418 VN_INFO_GET (tree name)
419 {
420   vn_ssa_aux_t newinfo;
421 
422   gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
423 	      || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
424   newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
425   memset (newinfo, 0, sizeof (struct vn_ssa_aux));
426   if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
427     vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
428   vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
429   return newinfo;
430 }
431 
432 
433 /* Return the vn_kind the expression computed by the stmt should be
434    associated with.  */
435 
436 enum vn_kind
vn_get_stmt_kind(gimple * stmt)437 vn_get_stmt_kind (gimple *stmt)
438 {
439   switch (gimple_code (stmt))
440     {
441     case GIMPLE_CALL:
442       return VN_REFERENCE;
443     case GIMPLE_PHI:
444       return VN_PHI;
445     case GIMPLE_ASSIGN:
446       {
447 	enum tree_code code = gimple_assign_rhs_code (stmt);
448 	tree rhs1 = gimple_assign_rhs1 (stmt);
449 	switch (get_gimple_rhs_class (code))
450 	  {
451 	  case GIMPLE_UNARY_RHS:
452 	  case GIMPLE_BINARY_RHS:
453 	  case GIMPLE_TERNARY_RHS:
454 	    return VN_NARY;
455 	  case GIMPLE_SINGLE_RHS:
456 	    switch (TREE_CODE_CLASS (code))
457 	      {
458 	      case tcc_reference:
459 		/* VOP-less references can go through unary case.  */
460 		if ((code == REALPART_EXPR
461 		     || code == IMAGPART_EXPR
462 		     || code == VIEW_CONVERT_EXPR
463 		     || code == BIT_FIELD_REF)
464 		    && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
465 		  return VN_NARY;
466 
467 		/* Fallthrough.  */
468 	      case tcc_declaration:
469 		return VN_REFERENCE;
470 
471 	      case tcc_constant:
472 		return VN_CONSTANT;
473 
474 	      default:
475 		if (code == ADDR_EXPR)
476 		  return (is_gimple_min_invariant (rhs1)
477 			  ? VN_CONSTANT : VN_REFERENCE);
478 		else if (code == CONSTRUCTOR)
479 		  return VN_NARY;
480 		return VN_NONE;
481 	      }
482 	  default:
483 	    return VN_NONE;
484 	  }
485       }
486     default:
487       return VN_NONE;
488     }
489 }
490 
491 /* Lookup a value id for CONSTANT and return it.  If it does not
492    exist returns 0.  */
493 
494 unsigned int
get_constant_value_id(tree constant)495 get_constant_value_id (tree constant)
496 {
497   vn_constant_s **slot;
498   struct vn_constant_s vc;
499 
500   vc.hashcode = vn_hash_constant_with_type (constant);
501   vc.constant = constant;
502   slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
503   if (slot)
504     return (*slot)->value_id;
505   return 0;
506 }
507 
508 /* Lookup a value id for CONSTANT, and if it does not exist, create a
509    new one and return it.  If it does exist, return it.  */
510 
511 unsigned int
get_or_alloc_constant_value_id(tree constant)512 get_or_alloc_constant_value_id (tree constant)
513 {
514   vn_constant_s **slot;
515   struct vn_constant_s vc;
516   vn_constant_t vcp;
517 
518   vc.hashcode = vn_hash_constant_with_type (constant);
519   vc.constant = constant;
520   slot = constant_to_value_id->find_slot (&vc, INSERT);
521   if (*slot)
522     return (*slot)->value_id;
523 
524   vcp = XNEW (struct vn_constant_s);
525   vcp->hashcode = vc.hashcode;
526   vcp->constant = constant;
527   vcp->value_id = get_next_value_id ();
528   *slot = vcp;
529   bitmap_set_bit (constant_value_ids, vcp->value_id);
530   return vcp->value_id;
531 }
532 
533 /* Return true if V is a value id for a constant.  */
534 
535 bool
value_id_constant_p(unsigned int v)536 value_id_constant_p (unsigned int v)
537 {
538   return bitmap_bit_p (constant_value_ids, v);
539 }
540 
541 /* Compute the hash for a reference operand VRO1.  */
542 
543 static void
vn_reference_op_compute_hash(const vn_reference_op_t vro1,inchash::hash & hstate)544 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
545 {
546   hstate.add_int (vro1->opcode);
547   if (vro1->op0)
548     inchash::add_expr (vro1->op0, hstate);
549   if (vro1->op1)
550     inchash::add_expr (vro1->op1, hstate);
551   if (vro1->op2)
552     inchash::add_expr (vro1->op2, hstate);
553 }
554 
555 /* Compute a hash for the reference operation VR1 and return it.  */
556 
557 static hashval_t
vn_reference_compute_hash(const vn_reference_t vr1)558 vn_reference_compute_hash (const vn_reference_t vr1)
559 {
560   inchash::hash hstate;
561   hashval_t result;
562   int i;
563   vn_reference_op_t vro;
564   poly_int64 off = -1;
565   bool deref = false;
566 
567   FOR_EACH_VEC_ELT (vr1->operands, i, vro)
568     {
569       if (vro->opcode == MEM_REF)
570 	deref = true;
571       else if (vro->opcode != ADDR_EXPR)
572 	deref = false;
573       if (maybe_ne (vro->off, -1))
574 	{
575 	  if (known_eq (off, -1))
576 	    off = 0;
577 	  off += vro->off;
578 	}
579       else
580 	{
581 	  if (maybe_ne (off, -1)
582 	      && maybe_ne (off, 0))
583 	    hstate.add_poly_int (off);
584 	  off = -1;
585 	  if (deref
586 	      && vro->opcode == ADDR_EXPR)
587 	    {
588 	      if (vro->op0)
589 		{
590 		  tree op = TREE_OPERAND (vro->op0, 0);
591 		  hstate.add_int (TREE_CODE (op));
592 		  inchash::add_expr (op, hstate);
593 		}
594 	    }
595 	  else
596 	    vn_reference_op_compute_hash (vro, hstate);
597 	}
598     }
599   result = hstate.end ();
600   /* ??? We would ICE later if we hash instead of adding that in. */
601   if (vr1->vuse)
602     result += SSA_NAME_VERSION (vr1->vuse);
603 
604   return result;
605 }
606 
607 /* Return true if reference operations VR1 and VR2 are equivalent.  This
608    means they have the same set of operands and vuses.  */
609 
610 bool
vn_reference_eq(const_vn_reference_t const vr1,const_vn_reference_t const vr2)611 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
612 {
613   unsigned i, j;
614 
615   /* Early out if this is not a hash collision.  */
616   if (vr1->hashcode != vr2->hashcode)
617     return false;
618 
619   /* The VOP needs to be the same.  */
620   if (vr1->vuse != vr2->vuse)
621     return false;
622 
623   /* If the operands are the same we are done.  */
624   if (vr1->operands == vr2->operands)
625     return true;
626 
627   if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
628     return false;
629 
630   if (INTEGRAL_TYPE_P (vr1->type)
631       && INTEGRAL_TYPE_P (vr2->type))
632     {
633       if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
634 	return false;
635     }
636   else if (INTEGRAL_TYPE_P (vr1->type)
637 	   && (TYPE_PRECISION (vr1->type)
638 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
639     return false;
640   else if (INTEGRAL_TYPE_P (vr2->type)
641 	   && (TYPE_PRECISION (vr2->type)
642 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
643     return false;
644 
645   i = 0;
646   j = 0;
647   do
648     {
649       poly_int64 off1 = 0, off2 = 0;
650       vn_reference_op_t vro1, vro2;
651       vn_reference_op_s tem1, tem2;
652       bool deref1 = false, deref2 = false;
653       for (; vr1->operands.iterate (i, &vro1); i++)
654 	{
655 	  if (vro1->opcode == MEM_REF)
656 	    deref1 = true;
657 	  /* Do not look through a storage order barrier.  */
658 	  else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
659 	    return false;
660 	  if (known_eq (vro1->off, -1))
661 	    break;
662 	  off1 += vro1->off;
663 	}
664       for (; vr2->operands.iterate (j, &vro2); j++)
665 	{
666 	  if (vro2->opcode == MEM_REF)
667 	    deref2 = true;
668 	  /* Do not look through a storage order barrier.  */
669 	  else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
670 	    return false;
671 	  if (known_eq (vro2->off, -1))
672 	    break;
673 	  off2 += vro2->off;
674 	}
675       if (maybe_ne (off1, off2))
676 	return false;
677       if (deref1 && vro1->opcode == ADDR_EXPR)
678 	{
679 	  memset (&tem1, 0, sizeof (tem1));
680 	  tem1.op0 = TREE_OPERAND (vro1->op0, 0);
681 	  tem1.type = TREE_TYPE (tem1.op0);
682 	  tem1.opcode = TREE_CODE (tem1.op0);
683 	  vro1 = &tem1;
684 	  deref1 = false;
685 	}
686       if (deref2 && vro2->opcode == ADDR_EXPR)
687 	{
688 	  memset (&tem2, 0, sizeof (tem2));
689 	  tem2.op0 = TREE_OPERAND (vro2->op0, 0);
690 	  tem2.type = TREE_TYPE (tem2.op0);
691 	  tem2.opcode = TREE_CODE (tem2.op0);
692 	  vro2 = &tem2;
693 	  deref2 = false;
694 	}
695       if (deref1 != deref2)
696 	return false;
697       if (!vn_reference_op_eq (vro1, vro2))
698 	return false;
699       ++j;
700       ++i;
701     }
702   while (vr1->operands.length () != i
703 	 || vr2->operands.length () != j);
704 
705   return true;
706 }
707 
708 /* Copy the operations present in load/store REF into RESULT, a vector of
709    vn_reference_op_s's.  */
710 
711 static void
copy_reference_ops_from_ref(tree ref,vec<vn_reference_op_s> * result)712 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
713 {
714   if (TREE_CODE (ref) == TARGET_MEM_REF)
715     {
716       vn_reference_op_s temp;
717 
718       result->reserve (3);
719 
720       memset (&temp, 0, sizeof (temp));
721       temp.type = TREE_TYPE (ref);
722       temp.opcode = TREE_CODE (ref);
723       temp.op0 = TMR_INDEX (ref);
724       temp.op1 = TMR_STEP (ref);
725       temp.op2 = TMR_OFFSET (ref);
726       temp.off = -1;
727       temp.clique = MR_DEPENDENCE_CLIQUE (ref);
728       temp.base = MR_DEPENDENCE_BASE (ref);
729       result->quick_push (temp);
730 
731       memset (&temp, 0, sizeof (temp));
732       temp.type = NULL_TREE;
733       temp.opcode = ERROR_MARK;
734       temp.op0 = TMR_INDEX2 (ref);
735       temp.off = -1;
736       result->quick_push (temp);
737 
738       memset (&temp, 0, sizeof (temp));
739       temp.type = NULL_TREE;
740       temp.opcode = TREE_CODE (TMR_BASE (ref));
741       temp.op0 = TMR_BASE (ref);
742       temp.off = -1;
743       result->quick_push (temp);
744       return;
745     }
746 
747   /* For non-calls, store the information that makes up the address.  */
748   tree orig = ref;
749   while (ref)
750     {
751       vn_reference_op_s temp;
752 
753       memset (&temp, 0, sizeof (temp));
754       temp.type = TREE_TYPE (ref);
755       temp.opcode = TREE_CODE (ref);
756       temp.off = -1;
757 
758       switch (temp.opcode)
759 	{
760 	case MODIFY_EXPR:
761 	  temp.op0 = TREE_OPERAND (ref, 1);
762 	  break;
763 	case WITH_SIZE_EXPR:
764 	  temp.op0 = TREE_OPERAND (ref, 1);
765 	  temp.off = 0;
766 	  break;
767 	case MEM_REF:
768 	  /* The base address gets its own vn_reference_op_s structure.  */
769 	  temp.op0 = TREE_OPERAND (ref, 1);
770 	  if (!mem_ref_offset (ref).to_shwi (&temp.off))
771 	    temp.off = -1;
772 	  temp.clique = MR_DEPENDENCE_CLIQUE (ref);
773 	  temp.base = MR_DEPENDENCE_BASE (ref);
774 	  temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
775 	  break;
776 	case BIT_FIELD_REF:
777 	  /* Record bits, position and storage order.  */
778 	  temp.op0 = TREE_OPERAND (ref, 1);
779 	  temp.op1 = TREE_OPERAND (ref, 2);
780 	  if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
781 	    temp.off = -1;
782 	  temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
783 	  break;
784 	case COMPONENT_REF:
785 	  /* The field decl is enough to unambiguously specify the field,
786 	     a matching type is not necessary and a mismatching type
787 	     is always a spurious difference.  */
788 	  temp.type = NULL_TREE;
789 	  temp.op0 = TREE_OPERAND (ref, 1);
790 	  temp.op1 = TREE_OPERAND (ref, 2);
791 	  {
792 	    tree this_offset = component_ref_field_offset (ref);
793 	    if (this_offset
794 		&& poly_int_tree_p (this_offset))
795 	      {
796 		tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
797 		if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
798 		  {
799 		    poly_offset_int off
800 		      = (wi::to_poly_offset (this_offset)
801 			 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
802 		    /* Probibit value-numbering zero offset components
803 		       of addresses the same before the pass folding
804 		       __builtin_object_size had a chance to run
805 		       (checking cfun->after_inlining does the
806 		       trick here).  */
807 		    if (TREE_CODE (orig) != ADDR_EXPR
808 			|| maybe_ne (off, 0)
809 			|| cfun->after_inlining)
810 		      off.to_shwi (&temp.off);
811 		  }
812 	      }
813 	  }
814 	  break;
815 	case ARRAY_RANGE_REF:
816 	case ARRAY_REF:
817 	  {
818 	    tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
819 	    /* Record index as operand.  */
820 	    temp.op0 = TREE_OPERAND (ref, 1);
821 	    /* Always record lower bounds and element size.  */
822 	    temp.op1 = array_ref_low_bound (ref);
823 	    /* But record element size in units of the type alignment.  */
824 	    temp.op2 = TREE_OPERAND (ref, 3);
825 	    temp.align = eltype->type_common.align;
826 	    if (! temp.op2)
827 	      temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
828 				     size_int (TYPE_ALIGN_UNIT (eltype)));
829 	    if (poly_int_tree_p (temp.op0)
830 		&& poly_int_tree_p (temp.op1)
831 		&& TREE_CODE (temp.op2) == INTEGER_CST)
832 	      {
833 		poly_offset_int off = ((wi::to_poly_offset (temp.op0)
834 					- wi::to_poly_offset (temp.op1))
835 				       * wi::to_offset (temp.op2)
836 				       * vn_ref_op_align_unit (&temp));
837 		off.to_shwi (&temp.off);
838 	      }
839 	  }
840 	  break;
841 	case VAR_DECL:
842 	  if (DECL_HARD_REGISTER (ref))
843 	    {
844 	      temp.op0 = ref;
845 	      break;
846 	    }
847 	  /* Fallthru.  */
848 	case PARM_DECL:
849 	case CONST_DECL:
850 	case RESULT_DECL:
851 	  /* Canonicalize decls to MEM[&decl] which is what we end up with
852 	     when valueizing MEM[ptr] with ptr = &decl.  */
853 	  temp.opcode = MEM_REF;
854 	  temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
855 	  temp.off = 0;
856 	  result->safe_push (temp);
857 	  temp.opcode = ADDR_EXPR;
858 	  temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
859 	  temp.type = TREE_TYPE (temp.op0);
860 	  temp.off = -1;
861 	  break;
862 	case STRING_CST:
863 	case INTEGER_CST:
864 	case COMPLEX_CST:
865 	case VECTOR_CST:
866 	case REAL_CST:
867 	case FIXED_CST:
868 	case CONSTRUCTOR:
869 	case SSA_NAME:
870 	  temp.op0 = ref;
871 	  break;
872 	case ADDR_EXPR:
873 	  if (is_gimple_min_invariant (ref))
874 	    {
875 	      temp.op0 = ref;
876 	      break;
877 	    }
878 	  break;
879 	  /* These are only interesting for their operands, their
880 	     existence, and their type.  They will never be the last
881 	     ref in the chain of references (IE they require an
882 	     operand), so we don't have to put anything
883 	     for op* as it will be handled by the iteration  */
884 	case REALPART_EXPR:
885 	  temp.off = 0;
886 	  break;
887 	case VIEW_CONVERT_EXPR:
888 	  temp.off = 0;
889 	  temp.reverse = storage_order_barrier_p (ref);
890 	  break;
891 	case IMAGPART_EXPR:
892 	  /* This is only interesting for its constant offset.  */
893 	  temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
894 	  break;
895 	default:
896 	  gcc_unreachable ();
897 	}
898       result->safe_push (temp);
899 
900       if (REFERENCE_CLASS_P (ref)
901 	  || TREE_CODE (ref) == MODIFY_EXPR
902 	  || TREE_CODE (ref) == WITH_SIZE_EXPR
903 	  || (TREE_CODE (ref) == ADDR_EXPR
904 	      && !is_gimple_min_invariant (ref)))
905 	ref = TREE_OPERAND (ref, 0);
906       else
907 	ref = NULL_TREE;
908     }
909 }
910 
911 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
912    operands in *OPS, the reference alias set SET and the reference type TYPE.
913    Return true if something useful was produced.  */
914 
915 bool
ao_ref_init_from_vn_reference(ao_ref * ref,alias_set_type set,tree type,vec<vn_reference_op_s> ops)916 ao_ref_init_from_vn_reference (ao_ref *ref,
917 			       alias_set_type set, tree type,
918 			       vec<vn_reference_op_s> ops)
919 {
920   vn_reference_op_t op;
921   unsigned i;
922   tree base = NULL_TREE;
923   tree *op0_p = &base;
924   poly_offset_int offset = 0;
925   poly_offset_int max_size;
926   poly_offset_int size = -1;
927   tree size_tree = NULL_TREE;
928   alias_set_type base_alias_set = -1;
929 
930   /* First get the final access size from just the outermost expression.  */
931   op = &ops[0];
932   if (op->opcode == COMPONENT_REF)
933     size_tree = DECL_SIZE (op->op0);
934   else if (op->opcode == BIT_FIELD_REF)
935     size_tree = op->op0;
936   else
937     {
938       machine_mode mode = TYPE_MODE (type);
939       if (mode == BLKmode)
940 	size_tree = TYPE_SIZE (type);
941       else
942 	size = GET_MODE_BITSIZE (mode);
943     }
944   if (size_tree != NULL_TREE
945       && poly_int_tree_p (size_tree))
946     size = wi::to_poly_offset (size_tree);
947 
948   /* Initially, maxsize is the same as the accessed element size.
949      In the following it will only grow (or become -1).  */
950   max_size = size;
951 
952   /* Compute cumulative bit-offset for nested component-refs and array-refs,
953      and find the ultimate containing object.  */
954   FOR_EACH_VEC_ELT (ops, i, op)
955     {
956       switch (op->opcode)
957 	{
958 	/* These may be in the reference ops, but we cannot do anything
959 	   sensible with them here.  */
960 	case ADDR_EXPR:
961 	  /* Apart from ADDR_EXPR arguments to MEM_REF.  */
962 	  if (base != NULL_TREE
963 	      && TREE_CODE (base) == MEM_REF
964 	      && op->op0
965 	      && DECL_P (TREE_OPERAND (op->op0, 0)))
966 	    {
967 	      vn_reference_op_t pop = &ops[i-1];
968 	      base = TREE_OPERAND (op->op0, 0);
969 	      if (known_eq (pop->off, -1))
970 		{
971 		  max_size = -1;
972 		  offset = 0;
973 		}
974 	      else
975 		offset += pop->off * BITS_PER_UNIT;
976 	      op0_p = NULL;
977 	      break;
978 	    }
979 	  /* Fallthru.  */
980 	case CALL_EXPR:
981 	  return false;
982 
983 	/* Record the base objects.  */
984 	case MEM_REF:
985 	  base_alias_set = get_deref_alias_set (op->op0);
986 	  *op0_p = build2 (MEM_REF, op->type,
987 			   NULL_TREE, op->op0);
988 	  MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
989 	  MR_DEPENDENCE_BASE (*op0_p) = op->base;
990 	  op0_p = &TREE_OPERAND (*op0_p, 0);
991 	  break;
992 
993 	case VAR_DECL:
994 	case PARM_DECL:
995 	case RESULT_DECL:
996 	case SSA_NAME:
997 	  *op0_p = op->op0;
998 	  op0_p = NULL;
999 	  break;
1000 
1001 	/* And now the usual component-reference style ops.  */
1002 	case BIT_FIELD_REF:
1003 	  offset += wi::to_offset (op->op1);
1004 	  break;
1005 
1006 	case COMPONENT_REF:
1007 	  {
1008 	    tree field = op->op0;
1009 	    /* We do not have a complete COMPONENT_REF tree here so we
1010 	       cannot use component_ref_field_offset.  Do the interesting
1011 	       parts manually.  */
1012 	    tree this_offset = DECL_FIELD_OFFSET (field);
1013 
1014 	    if (op->op1 || !poly_int_tree_p (this_offset))
1015 	      max_size = -1;
1016 	    else
1017 	      {
1018 		poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1019 					   << LOG2_BITS_PER_UNIT);
1020 		woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1021 		offset += woffset;
1022 	      }
1023 	    break;
1024 	  }
1025 
1026 	case ARRAY_RANGE_REF:
1027 	case ARRAY_REF:
1028 	  /* We recorded the lower bound and the element size.  */
1029 	  if (!poly_int_tree_p (op->op0)
1030 	      || !poly_int_tree_p (op->op1)
1031 	      || TREE_CODE (op->op2) != INTEGER_CST)
1032 	    max_size = -1;
1033 	  else
1034 	    {
1035 	      poly_offset_int woffset
1036 		= wi::sext (wi::to_poly_offset (op->op0)
1037 			    - wi::to_poly_offset (op->op1),
1038 			    TYPE_PRECISION (TREE_TYPE (op->op0)));
1039 	      woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1040 	      woffset <<= LOG2_BITS_PER_UNIT;
1041 	      offset += woffset;
1042 	    }
1043 	  break;
1044 
1045 	case REALPART_EXPR:
1046 	  break;
1047 
1048 	case IMAGPART_EXPR:
1049 	  offset += size;
1050 	  break;
1051 
1052 	case VIEW_CONVERT_EXPR:
1053 	  break;
1054 
1055 	case STRING_CST:
1056 	case INTEGER_CST:
1057 	case COMPLEX_CST:
1058 	case VECTOR_CST:
1059 	case REAL_CST:
1060 	case CONSTRUCTOR:
1061 	case CONST_DECL:
1062 	  return false;
1063 
1064 	default:
1065 	  return false;
1066 	}
1067     }
1068 
1069   if (base == NULL_TREE)
1070     return false;
1071 
1072   ref->ref = NULL_TREE;
1073   ref->base = base;
1074   ref->ref_alias_set = set;
1075   if (base_alias_set != -1)
1076     ref->base_alias_set = base_alias_set;
1077   else
1078     ref->base_alias_set = get_alias_set (base);
1079   /* We discount volatiles from value-numbering elsewhere.  */
1080   ref->volatile_p = false;
1081 
1082   if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1083     {
1084       ref->offset = 0;
1085       ref->size = -1;
1086       ref->max_size = -1;
1087       return true;
1088     }
1089 
1090   if (!offset.to_shwi (&ref->offset))
1091     {
1092       ref->offset = 0;
1093       ref->max_size = -1;
1094       return true;
1095     }
1096 
1097   if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1098     ref->max_size = -1;
1099 
1100   return true;
1101 }
1102 
1103 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1104    vn_reference_op_s's.  */
1105 
1106 static void
copy_reference_ops_from_call(gcall * call,vec<vn_reference_op_s> * result)1107 copy_reference_ops_from_call (gcall *call,
1108 			      vec<vn_reference_op_s> *result)
1109 {
1110   vn_reference_op_s temp;
1111   unsigned i;
1112   tree lhs = gimple_call_lhs (call);
1113   int lr;
1114 
1115   /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1116      different.  By adding the lhs here in the vector, we ensure that the
1117      hashcode is different, guaranteeing a different value number.  */
1118   if (lhs && TREE_CODE (lhs) != SSA_NAME)
1119     {
1120       memset (&temp, 0, sizeof (temp));
1121       temp.opcode = MODIFY_EXPR;
1122       temp.type = TREE_TYPE (lhs);
1123       temp.op0 = lhs;
1124       temp.off = -1;
1125       result->safe_push (temp);
1126     }
1127 
1128   /* Copy the type, opcode, function, static chain and EH region, if any.  */
1129   memset (&temp, 0, sizeof (temp));
1130   temp.type = gimple_call_return_type (call);
1131   temp.opcode = CALL_EXPR;
1132   temp.op0 = gimple_call_fn (call);
1133   temp.op1 = gimple_call_chain (call);
1134   if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1135     temp.op2 = size_int (lr);
1136   temp.off = -1;
1137   if (gimple_call_with_bounds_p (call))
1138     temp.with_bounds = 1;
1139   result->safe_push (temp);
1140 
1141   /* Copy the call arguments.  As they can be references as well,
1142      just chain them together.  */
1143   for (i = 0; i < gimple_call_num_args (call); ++i)
1144     {
1145       tree callarg = gimple_call_arg (call, i);
1146       copy_reference_ops_from_ref (callarg, result);
1147     }
1148 }
1149 
1150 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1151    *I_P to point to the last element of the replacement.  */
1152 static bool
vn_reference_fold_indirect(vec<vn_reference_op_s> * ops,unsigned int * i_p)1153 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1154 			    unsigned int *i_p)
1155 {
1156   unsigned int i = *i_p;
1157   vn_reference_op_t op = &(*ops)[i];
1158   vn_reference_op_t mem_op = &(*ops)[i - 1];
1159   tree addr_base;
1160   poly_int64 addr_offset = 0;
1161 
1162   /* The only thing we have to do is from &OBJ.foo.bar add the offset
1163      from .foo.bar to the preceding MEM_REF offset and replace the
1164      address with &OBJ.  */
1165   addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1166 					     &addr_offset);
1167   gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1168   if (addr_base != TREE_OPERAND (op->op0, 0))
1169     {
1170       poly_offset_int off
1171 	= (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1172 				  SIGNED)
1173 	   + addr_offset);
1174       mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1175       op->op0 = build_fold_addr_expr (addr_base);
1176       if (tree_fits_shwi_p (mem_op->op0))
1177 	mem_op->off = tree_to_shwi (mem_op->op0);
1178       else
1179 	mem_op->off = -1;
1180       return true;
1181     }
1182   return false;
1183 }
1184 
1185 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1186    *I_P to point to the last element of the replacement.  */
1187 static bool
vn_reference_maybe_forwprop_address(vec<vn_reference_op_s> * ops,unsigned int * i_p)1188 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1189 				     unsigned int *i_p)
1190 {
1191   unsigned int i = *i_p;
1192   vn_reference_op_t op = &(*ops)[i];
1193   vn_reference_op_t mem_op = &(*ops)[i - 1];
1194   gimple *def_stmt;
1195   enum tree_code code;
1196   poly_offset_int off;
1197 
1198   def_stmt = SSA_NAME_DEF_STMT (op->op0);
1199   if (!is_gimple_assign (def_stmt))
1200     return false;
1201 
1202   code = gimple_assign_rhs_code (def_stmt);
1203   if (code != ADDR_EXPR
1204       && code != POINTER_PLUS_EXPR)
1205     return false;
1206 
1207   off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1208 
1209   /* The only thing we have to do is from &OBJ.foo.bar add the offset
1210      from .foo.bar to the preceding MEM_REF offset and replace the
1211      address with &OBJ.  */
1212   if (code == ADDR_EXPR)
1213     {
1214       tree addr, addr_base;
1215       poly_int64 addr_offset;
1216 
1217       addr = gimple_assign_rhs1 (def_stmt);
1218       addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1219 						 &addr_offset);
1220       /* If that didn't work because the address isn't invariant propagate
1221          the reference tree from the address operation in case the current
1222 	 dereference isn't offsetted.  */
1223       if (!addr_base
1224 	  && *i_p == ops->length () - 1
1225 	  && known_eq (off, 0)
1226 	  /* This makes us disable this transform for PRE where the
1227 	     reference ops might be also used for code insertion which
1228 	     is invalid.  */
1229 	  && default_vn_walk_kind == VN_WALKREWRITE)
1230 	{
1231 	  auto_vec<vn_reference_op_s, 32> tem;
1232 	  copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1233 	  /* Make sure to preserve TBAA info.  The only objects not
1234 	     wrapped in MEM_REFs that can have their address taken are
1235 	     STRING_CSTs.  */
1236 	  if (tem.length () >= 2
1237 	      && tem[tem.length () - 2].opcode == MEM_REF)
1238 	    {
1239 	      vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1240 	      new_mem_op->op0
1241 		= wide_int_to_tree (TREE_TYPE (mem_op->op0),
1242 				    wi::to_poly_wide (new_mem_op->op0));
1243 	    }
1244 	  else
1245 	    gcc_assert (tem.last ().opcode == STRING_CST);
1246 	  ops->pop ();
1247 	  ops->pop ();
1248 	  ops->safe_splice (tem);
1249 	  --*i_p;
1250 	  return true;
1251 	}
1252       if (!addr_base
1253 	  || TREE_CODE (addr_base) != MEM_REF
1254 	  || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1255 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base, 0))))
1256 	return false;
1257 
1258       off += addr_offset;
1259       off += mem_ref_offset (addr_base);
1260       op->op0 = TREE_OPERAND (addr_base, 0);
1261     }
1262   else
1263     {
1264       tree ptr, ptroff;
1265       ptr = gimple_assign_rhs1 (def_stmt);
1266       ptroff = gimple_assign_rhs2 (def_stmt);
1267       if (TREE_CODE (ptr) != SSA_NAME
1268 	  || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1269 	  || TREE_CODE (ptroff) != INTEGER_CST)
1270 	return false;
1271 
1272       off += wi::to_offset (ptroff);
1273       op->op0 = ptr;
1274     }
1275 
1276   mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1277   if (tree_fits_shwi_p (mem_op->op0))
1278     mem_op->off = tree_to_shwi (mem_op->op0);
1279   else
1280     mem_op->off = -1;
1281   if (TREE_CODE (op->op0) == SSA_NAME)
1282     op->op0 = SSA_VAL (op->op0);
1283   if (TREE_CODE (op->op0) != SSA_NAME)
1284     op->opcode = TREE_CODE (op->op0);
1285 
1286   /* And recurse.  */
1287   if (TREE_CODE (op->op0) == SSA_NAME)
1288     vn_reference_maybe_forwprop_address (ops, i_p);
1289   else if (TREE_CODE (op->op0) == ADDR_EXPR)
1290     vn_reference_fold_indirect (ops, i_p);
1291   return true;
1292 }
1293 
1294 /* Optimize the reference REF to a constant if possible or return
1295    NULL_TREE if not.  */
1296 
1297 tree
fully_constant_vn_reference_p(vn_reference_t ref)1298 fully_constant_vn_reference_p (vn_reference_t ref)
1299 {
1300   vec<vn_reference_op_s> operands = ref->operands;
1301   vn_reference_op_t op;
1302 
1303   /* Try to simplify the translated expression if it is
1304      a call to a builtin function with at most two arguments.  */
1305   op = &operands[0];
1306   if (op->opcode == CALL_EXPR
1307       && TREE_CODE (op->op0) == ADDR_EXPR
1308       && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1309       && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1310       && operands.length () >= 2
1311       && operands.length () <= 3)
1312     {
1313       vn_reference_op_t arg0, arg1 = NULL;
1314       bool anyconst = false;
1315       arg0 = &operands[1];
1316       if (operands.length () > 2)
1317 	arg1 = &operands[2];
1318       if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1319 	  || (arg0->opcode == ADDR_EXPR
1320 	      && is_gimple_min_invariant (arg0->op0)))
1321 	anyconst = true;
1322       if (arg1
1323 	  && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1324 	      || (arg1->opcode == ADDR_EXPR
1325 		  && is_gimple_min_invariant (arg1->op0))))
1326 	anyconst = true;
1327       if (anyconst)
1328 	{
1329 	  tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1330 					 arg1 ? 2 : 1,
1331 					 arg0->op0,
1332 					 arg1 ? arg1->op0 : NULL);
1333 	  if (folded
1334 	      && TREE_CODE (folded) == NOP_EXPR)
1335 	    folded = TREE_OPERAND (folded, 0);
1336 	  if (folded
1337 	      && is_gimple_min_invariant (folded))
1338 	    return folded;
1339 	}
1340     }
1341 
1342   /* Simplify reads from constants or constant initializers.  */
1343   else if (BITS_PER_UNIT == 8
1344 	   && is_gimple_reg_type (ref->type)
1345 	   && (!INTEGRAL_TYPE_P (ref->type)
1346 	       || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1347     {
1348       poly_int64 off = 0;
1349       HOST_WIDE_INT size;
1350       if (INTEGRAL_TYPE_P (ref->type))
1351 	size = TYPE_PRECISION (ref->type);
1352       else
1353 	size = tree_to_shwi (TYPE_SIZE (ref->type));
1354       if (size % BITS_PER_UNIT != 0
1355 	  || size > MAX_BITSIZE_MODE_ANY_MODE)
1356 	return NULL_TREE;
1357       size /= BITS_PER_UNIT;
1358       unsigned i;
1359       for (i = 0; i < operands.length (); ++i)
1360 	{
1361 	  if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1362 	    {
1363 	      ++i;
1364 	      break;
1365 	    }
1366 	  if (known_eq (operands[i].off, -1))
1367 	    return NULL_TREE;
1368 	  off += operands[i].off;
1369 	  if (operands[i].opcode == MEM_REF)
1370 	    {
1371 	      ++i;
1372 	      break;
1373 	    }
1374 	}
1375       vn_reference_op_t base = &operands[--i];
1376       tree ctor = error_mark_node;
1377       tree decl = NULL_TREE;
1378       if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1379 	ctor = base->op0;
1380       else if (base->opcode == MEM_REF
1381 	       && base[1].opcode == ADDR_EXPR
1382 	       && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1383 		   || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1384 		   || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1385 	{
1386 	  decl = TREE_OPERAND (base[1].op0, 0);
1387 	  if (TREE_CODE (decl) == STRING_CST)
1388 	    ctor = decl;
1389 	  else
1390 	    ctor = ctor_for_folding (decl);
1391 	}
1392       if (ctor == NULL_TREE)
1393 	return build_zero_cst (ref->type);
1394       else if (ctor != error_mark_node)
1395 	{
1396 	  HOST_WIDE_INT const_off;
1397 	  if (decl)
1398 	    {
1399 	      tree res = fold_ctor_reference (ref->type, ctor,
1400 					      off * BITS_PER_UNIT,
1401 					      size * BITS_PER_UNIT, decl);
1402 	      if (res)
1403 		{
1404 		  STRIP_USELESS_TYPE_CONVERSION (res);
1405 		  if (is_gimple_min_invariant (res))
1406 		    return res;
1407 		}
1408 	    }
1409 	  else if (off.is_constant (&const_off))
1410 	    {
1411 	      unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1412 	      int len = native_encode_expr (ctor, buf, size, const_off);
1413 	      if (len > 0)
1414 		return native_interpret_expr (ref->type, buf, len);
1415 	    }
1416 	}
1417     }
1418 
1419   return NULL_TREE;
1420 }
1421 
1422 /* Return true if OPS contain a storage order barrier.  */
1423 
1424 static bool
contains_storage_order_barrier_p(vec<vn_reference_op_s> ops)1425 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1426 {
1427   vn_reference_op_t op;
1428   unsigned i;
1429 
1430   FOR_EACH_VEC_ELT (ops, i, op)
1431     if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1432       return true;
1433 
1434   return false;
1435 }
1436 
1437 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1438    structures into their value numbers.  This is done in-place, and
1439    the vector passed in is returned.  *VALUEIZED_ANYTHING will specify
1440    whether any operands were valueized.  */
1441 
1442 static vec<vn_reference_op_s>
valueize_refs_1(vec<vn_reference_op_s> orig,bool * valueized_anything)1443 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1444 {
1445   vn_reference_op_t vro;
1446   unsigned int i;
1447 
1448   *valueized_anything = false;
1449 
1450   FOR_EACH_VEC_ELT (orig, i, vro)
1451     {
1452       if (vro->opcode == SSA_NAME
1453 	  || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1454 	{
1455 	  tree tem = SSA_VAL (vro->op0);
1456 	  if (tem != vro->op0)
1457 	    {
1458 	      *valueized_anything = true;
1459 	      vro->op0 = tem;
1460 	    }
1461 	  /* If it transforms from an SSA_NAME to a constant, update
1462 	     the opcode.  */
1463 	  if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1464 	    vro->opcode = TREE_CODE (vro->op0);
1465 	}
1466       if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1467 	{
1468 	  tree tem = SSA_VAL (vro->op1);
1469 	  if (tem != vro->op1)
1470 	    {
1471 	      *valueized_anything = true;
1472 	      vro->op1 = tem;
1473 	    }
1474 	}
1475       if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1476 	{
1477 	  tree tem = SSA_VAL (vro->op2);
1478 	  if (tem != vro->op2)
1479 	    {
1480 	      *valueized_anything = true;
1481 	      vro->op2 = tem;
1482 	    }
1483 	}
1484       /* If it transforms from an SSA_NAME to an address, fold with
1485 	 a preceding indirect reference.  */
1486       if (i > 0
1487 	  && vro->op0
1488 	  && TREE_CODE (vro->op0) == ADDR_EXPR
1489 	  && orig[i - 1].opcode == MEM_REF)
1490 	{
1491 	  if (vn_reference_fold_indirect (&orig, &i))
1492 	    *valueized_anything = true;
1493 	}
1494       else if (i > 0
1495 	       && vro->opcode == SSA_NAME
1496 	       && orig[i - 1].opcode == MEM_REF)
1497 	{
1498 	  if (vn_reference_maybe_forwprop_address (&orig, &i))
1499 	    *valueized_anything = true;
1500 	}
1501       /* If it transforms a non-constant ARRAY_REF into a constant
1502 	 one, adjust the constant offset.  */
1503       else if (vro->opcode == ARRAY_REF
1504 	       && known_eq (vro->off, -1)
1505 	       && poly_int_tree_p (vro->op0)
1506 	       && poly_int_tree_p (vro->op1)
1507 	       && TREE_CODE (vro->op2) == INTEGER_CST)
1508 	{
1509 	  poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1510 				  - wi::to_poly_offset (vro->op1))
1511 				 * wi::to_offset (vro->op2)
1512 				 * vn_ref_op_align_unit (vro));
1513 	  off.to_shwi (&vro->off);
1514 	}
1515     }
1516 
1517   return orig;
1518 }
1519 
1520 static vec<vn_reference_op_s>
valueize_refs(vec<vn_reference_op_s> orig)1521 valueize_refs (vec<vn_reference_op_s> orig)
1522 {
1523   bool tem;
1524   return valueize_refs_1 (orig, &tem);
1525 }
1526 
1527 static vec<vn_reference_op_s> shared_lookup_references;
1528 
1529 /* Create a vector of vn_reference_op_s structures from REF, a
1530    REFERENCE_CLASS_P tree.  The vector is shared among all callers of
1531    this function.  *VALUEIZED_ANYTHING will specify whether any
1532    operands were valueized.  */
1533 
1534 static vec<vn_reference_op_s>
valueize_shared_reference_ops_from_ref(tree ref,bool * valueized_anything)1535 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1536 {
1537   if (!ref)
1538     return vNULL;
1539   shared_lookup_references.truncate (0);
1540   copy_reference_ops_from_ref (ref, &shared_lookup_references);
1541   shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1542 					      valueized_anything);
1543   return shared_lookup_references;
1544 }
1545 
1546 /* Create a vector of vn_reference_op_s structures from CALL, a
1547    call statement.  The vector is shared among all callers of
1548    this function.  */
1549 
1550 static vec<vn_reference_op_s>
valueize_shared_reference_ops_from_call(gcall * call)1551 valueize_shared_reference_ops_from_call (gcall *call)
1552 {
1553   if (!call)
1554     return vNULL;
1555   shared_lookup_references.truncate (0);
1556   copy_reference_ops_from_call (call, &shared_lookup_references);
1557   shared_lookup_references = valueize_refs (shared_lookup_references);
1558   return shared_lookup_references;
1559 }
1560 
1561 /* Lookup a SCCVN reference operation VR in the current hash table.
1562    Returns the resulting value number if it exists in the hash table,
1563    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
1564    vn_reference_t stored in the hashtable if something is found.  */
1565 
1566 static tree
vn_reference_lookup_1(vn_reference_t vr,vn_reference_t * vnresult)1567 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1568 {
1569   vn_reference_s **slot;
1570   hashval_t hash;
1571 
1572   hash = vr->hashcode;
1573   slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1574   if (!slot && current_info == optimistic_info)
1575     slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1576   if (slot)
1577     {
1578       if (vnresult)
1579 	*vnresult = (vn_reference_t)*slot;
1580       return ((vn_reference_t)*slot)->result;
1581     }
1582 
1583   return NULL_TREE;
1584 }
1585 
1586 /* Callback for walk_non_aliased_vuses.  Adjusts the vn_reference_t VR_
1587    with the current VUSE and performs the expression lookup.  */
1588 
1589 static void *
vn_reference_lookup_2(ao_ref * op ATTRIBUTE_UNUSED,tree vuse,unsigned int cnt,void * vr_)1590 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1591 		       unsigned int cnt, void *vr_)
1592 {
1593   vn_reference_t vr = (vn_reference_t)vr_;
1594   vn_reference_s **slot;
1595   hashval_t hash;
1596 
1597   /* This bounds the stmt walks we perform on reference lookups
1598      to O(1) instead of O(N) where N is the number of dominating
1599      stores.  */
1600   if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1601     return (void *)-1;
1602 
1603   if (last_vuse_ptr)
1604     *last_vuse_ptr = vuse;
1605 
1606   /* Fixup vuse and hash.  */
1607   if (vr->vuse)
1608     vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1609   vr->vuse = vuse_ssa_val (vuse);
1610   if (vr->vuse)
1611     vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1612 
1613   hash = vr->hashcode;
1614   slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1615   if (!slot && current_info == optimistic_info)
1616     slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1617   if (slot)
1618     return *slot;
1619 
1620   return NULL;
1621 }
1622 
1623 /* Lookup an existing or insert a new vn_reference entry into the
1624    value table for the VUSE, SET, TYPE, OPERANDS reference which
1625    has the value VALUE which is either a constant or an SSA name.  */
1626 
1627 static vn_reference_t
vn_reference_lookup_or_insert_for_pieces(tree vuse,alias_set_type set,tree type,vec<vn_reference_op_s,va_heap> operands,tree value)1628 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1629 					  alias_set_type set,
1630 					  tree type,
1631 					  vec<vn_reference_op_s,
1632 					        va_heap> operands,
1633 					  tree value)
1634 {
1635   vn_reference_s vr1;
1636   vn_reference_t result;
1637   unsigned value_id;
1638   vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1639   vr1.operands = operands;
1640   vr1.type = type;
1641   vr1.set = set;
1642   vr1.hashcode = vn_reference_compute_hash (&vr1);
1643   if (vn_reference_lookup_1 (&vr1, &result))
1644     return result;
1645   if (TREE_CODE (value) == SSA_NAME)
1646     value_id = VN_INFO (value)->value_id;
1647   else
1648     value_id = get_or_alloc_constant_value_id (value);
1649   return vn_reference_insert_pieces (vuse, set, type,
1650 				     operands.copy (), value, value_id);
1651 }
1652 
1653 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1654 
1655 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables.  */
1656 
1657 static tree
vn_lookup_simplify_result(code_helper rcode,tree type,tree * ops_)1658 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops_)
1659 {
1660   if (!rcode.is_tree_code ())
1661     return NULL_TREE;
1662   tree *ops = ops_;
1663   unsigned int length = TREE_CODE_LENGTH ((tree_code) rcode);
1664   if (rcode == CONSTRUCTOR
1665       /* ???  We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
1666          and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree.  */
1667       && TREE_CODE (ops_[0]) == CONSTRUCTOR)
1668     {
1669       length = CONSTRUCTOR_NELTS (ops_[0]);
1670       ops = XALLOCAVEC (tree, length);
1671       for (unsigned i = 0; i < length; ++i)
1672 	ops[i] = CONSTRUCTOR_ELT (ops_[0], i)->value;
1673     }
1674   vn_nary_op_t vnresult = NULL;
1675   return vn_nary_op_lookup_pieces (length, (tree_code) rcode,
1676 				   type, ops, &vnresult);
1677 }
1678 
1679 /* Return a value-number for RCODE OPS... either by looking up an existing
1680    value-number for the simplified result or by inserting the operation if
1681    INSERT is true.  */
1682 
1683 static tree
vn_nary_build_or_lookup_1(code_helper rcode,tree type,tree * ops,bool insert)1684 vn_nary_build_or_lookup_1 (code_helper rcode, tree type, tree *ops,
1685 			   bool insert)
1686 {
1687   tree result = NULL_TREE;
1688   /* We will be creating a value number for
1689        RCODE (OPS...).
1690      So first simplify and lookup this expression to see if it
1691      is already available.  */
1692   mprts_hook = vn_lookup_simplify_result;
1693   bool res = false;
1694   switch (TREE_CODE_LENGTH ((tree_code) rcode))
1695     {
1696     case 1:
1697       res = gimple_resimplify1 (NULL, &rcode, type, ops, vn_valueize);
1698       break;
1699     case 2:
1700       res = gimple_resimplify2 (NULL, &rcode, type, ops, vn_valueize);
1701       break;
1702     case 3:
1703       res = gimple_resimplify3 (NULL, &rcode, type, ops, vn_valueize);
1704       break;
1705     }
1706   mprts_hook = NULL;
1707   gimple *new_stmt = NULL;
1708   if (res
1709       && gimple_simplified_result_is_gimple_val (rcode, ops))
1710     /* The expression is already available.  */
1711     result = ops[0];
1712   else
1713     {
1714       tree val = vn_lookup_simplify_result (rcode, type, ops);
1715       if (!val && insert)
1716 	{
1717 	  gimple_seq stmts = NULL;
1718 	  result = maybe_push_res_to_seq (rcode, type, ops, &stmts);
1719 	  if (result)
1720 	    {
1721 	      gcc_assert (gimple_seq_singleton_p (stmts));
1722 	      new_stmt = gimple_seq_first_stmt (stmts);
1723 	    }
1724 	}
1725       else
1726 	/* The expression is already available.  */
1727 	result = val;
1728     }
1729   if (new_stmt)
1730     {
1731       /* The expression is not yet available, value-number lhs to
1732 	 the new SSA_NAME we created.  */
1733       /* Initialize value-number information properly.  */
1734       VN_INFO_GET (result)->valnum = result;
1735       VN_INFO (result)->value_id = get_next_value_id ();
1736       gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1737 					  new_stmt);
1738       VN_INFO (result)->needs_insertion = true;
1739       /* ???  PRE phi-translation inserts NARYs without corresponding
1740          SSA name result.  Re-use those but set their result according
1741 	 to the stmt we just built.  */
1742       vn_nary_op_t nary = NULL;
1743       vn_nary_op_lookup_stmt (new_stmt, &nary);
1744       if (nary)
1745 	{
1746 	  gcc_assert (nary->result == NULL_TREE);
1747 	  nary->result = gimple_assign_lhs (new_stmt);
1748 	}
1749       /* As all "inserted" statements are singleton SCCs, insert
1750 	 to the valid table.  This is strictly needed to
1751 	 avoid re-generating new value SSA_NAMEs for the same
1752 	 expression during SCC iteration over and over (the
1753 	 optimistic table gets cleared after each iteration).
1754 	 We do not need to insert into the optimistic table, as
1755 	 lookups there will fall back to the valid table.  */
1756       else if (current_info == optimistic_info)
1757 	{
1758 	  current_info = valid_info;
1759 	  vn_nary_op_insert_stmt (new_stmt, result);
1760 	  current_info = optimistic_info;
1761 	}
1762       else
1763 	vn_nary_op_insert_stmt (new_stmt, result);
1764       if (dump_file && (dump_flags & TDF_DETAILS))
1765 	{
1766 	  fprintf (dump_file, "Inserting name ");
1767 	  print_generic_expr (dump_file, result);
1768 	  fprintf (dump_file, " for expression ");
1769 	  print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1770 	  fprintf (dump_file, "\n");
1771 	}
1772     }
1773   return result;
1774 }
1775 
1776 /* Return a value-number for RCODE OPS... either by looking up an existing
1777    value-number for the simplified result or by inserting the operation.  */
1778 
1779 static tree
vn_nary_build_or_lookup(code_helper rcode,tree type,tree * ops)1780 vn_nary_build_or_lookup (code_helper rcode, tree type, tree *ops)
1781 {
1782   return vn_nary_build_or_lookup_1 (rcode, type, ops, true);
1783 }
1784 
1785 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1786    its value if present.  */
1787 
1788 tree
vn_nary_simplify(vn_nary_op_t nary)1789 vn_nary_simplify (vn_nary_op_t nary)
1790 {
1791   if (nary->length > 3)
1792     return NULL_TREE;
1793   tree ops[3];
1794   memcpy (ops, nary->op, sizeof (tree) * nary->length);
1795   return vn_nary_build_or_lookup_1 (nary->opcode, nary->type, ops, false);
1796 }
1797 
1798 
1799 /* Callback for walk_non_aliased_vuses.  Tries to perform a lookup
1800    from the statement defining VUSE and if not successful tries to
1801    translate *REFP and VR_ through an aggregate copy at the definition
1802    of VUSE.  If *DISAMBIGUATE_ONLY is true then do not perform translation
1803    of *REF and *VR.  If only disambiguation was performed then
1804    *DISAMBIGUATE_ONLY is set to true.  */
1805 
1806 static void *
vn_reference_lookup_3(ao_ref * ref,tree vuse,void * vr_,bool * disambiguate_only)1807 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1808 		       bool *disambiguate_only)
1809 {
1810   vn_reference_t vr = (vn_reference_t)vr_;
1811   gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1812   tree base = ao_ref_base (ref);
1813   HOST_WIDE_INT offseti, maxsizei;
1814   static vec<vn_reference_op_s> lhs_ops;
1815   ao_ref lhs_ref;
1816   bool lhs_ref_ok = false;
1817   poly_int64 copy_size;
1818 
1819   /* If the reference is based on a parameter that was determined as
1820      pointing to readonly memory it doesn't change.  */
1821   if (TREE_CODE (base) == MEM_REF
1822       && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1823       && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1824       && bitmap_bit_p (const_parms,
1825 		       SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1826     {
1827       *disambiguate_only = true;
1828       return NULL;
1829     }
1830 
1831   /* First try to disambiguate after value-replacing in the definitions LHS.  */
1832   if (is_gimple_assign (def_stmt))
1833     {
1834       tree lhs = gimple_assign_lhs (def_stmt);
1835       bool valueized_anything = false;
1836       /* Avoid re-allocation overhead.  */
1837       lhs_ops.truncate (0);
1838       copy_reference_ops_from_ref (lhs, &lhs_ops);
1839       lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1840       if (valueized_anything)
1841 	{
1842 	  lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1843 						      get_alias_set (lhs),
1844 						      TREE_TYPE (lhs), lhs_ops);
1845 	  if (lhs_ref_ok
1846 	      && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1847 	    {
1848 	      *disambiguate_only = true;
1849 	      return NULL;
1850 	    }
1851 	}
1852       else
1853 	{
1854 	  ao_ref_init (&lhs_ref, lhs);
1855 	  lhs_ref_ok = true;
1856 	}
1857 
1858       /* If we reach a clobbering statement try to skip it and see if
1859          we find a VN result with exactly the same value as the
1860 	 possible clobber.  In this case we can ignore the clobber
1861 	 and return the found value.  */
1862       if (vn_walk_kind == VN_WALKREWRITE
1863 	  && is_gimple_reg_type (TREE_TYPE (lhs))
1864 	  && types_compatible_p (TREE_TYPE (lhs), vr->type)
1865 	  && ref->ref)
1866 	{
1867 	  tree *saved_last_vuse_ptr = last_vuse_ptr;
1868 	  /* Do not update last_vuse_ptr in vn_reference_lookup_2.  */
1869 	  last_vuse_ptr = NULL;
1870 	  tree saved_vuse = vr->vuse;
1871 	  hashval_t saved_hashcode = vr->hashcode;
1872 	  void *res = vn_reference_lookup_2 (ref,
1873 					     gimple_vuse (def_stmt), 0, vr);
1874 	  /* Need to restore vr->vuse and vr->hashcode.  */
1875 	  vr->vuse = saved_vuse;
1876 	  vr->hashcode = saved_hashcode;
1877 	  last_vuse_ptr = saved_last_vuse_ptr;
1878 	  if (res && res != (void *)-1)
1879 	    {
1880 	      vn_reference_t vnresult = (vn_reference_t) res;
1881 	      if (vnresult->result
1882 		  && operand_equal_p (vnresult->result,
1883 				      gimple_assign_rhs1 (def_stmt), 0)
1884 		  /* We have to honor our promise about union type punning
1885 		     and also support arbitrary overlaps with
1886 		     -fno-strict-aliasing.  So simply resort to alignment to
1887 		     rule out overlaps.  Do this check last because it is
1888 		     quite expensive compared to the hash-lookup above.  */
1889 		  && multiple_p (get_object_alignment (ref->ref), ref->size)
1890 		  && multiple_p (get_object_alignment (lhs), ref->size))
1891 		return res;
1892 	    }
1893 	}
1894     }
1895   else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1896 	   && gimple_call_num_args (def_stmt) <= 4)
1897     {
1898       /* For builtin calls valueize its arguments and call the
1899          alias oracle again.  Valueization may improve points-to
1900 	 info of pointers and constify size and position arguments.
1901 	 Originally this was motivated by PR61034 which has
1902 	 conditional calls to free falsely clobbering ref because
1903 	 of imprecise points-to info of the argument.  */
1904       tree oldargs[4];
1905       bool valueized_anything = false;
1906       for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1907 	{
1908 	  oldargs[i] = gimple_call_arg (def_stmt, i);
1909 	  tree val = vn_valueize (oldargs[i]);
1910 	  if (val != oldargs[i])
1911 	    {
1912 	      gimple_call_set_arg (def_stmt, i, val);
1913 	      valueized_anything = true;
1914 	    }
1915 	}
1916       if (valueized_anything)
1917 	{
1918 	  bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1919 					       ref);
1920 	  for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1921 	    gimple_call_set_arg (def_stmt, i, oldargs[i]);
1922 	  if (!res)
1923 	    {
1924 	      *disambiguate_only = true;
1925 	      return NULL;
1926 	    }
1927 	}
1928     }
1929 
1930   if (*disambiguate_only)
1931     return (void *)-1;
1932 
1933   /* If we cannot constrain the size of the reference we cannot
1934      test if anything kills it.  */
1935   if (!ref->max_size_known_p ())
1936     return (void *)-1;
1937 
1938   poly_int64 offset = ref->offset;
1939   poly_int64 maxsize = ref->max_size;
1940 
1941   /* We can't deduce anything useful from clobbers.  */
1942   if (gimple_clobber_p (def_stmt))
1943     return (void *)-1;
1944 
1945   /* def_stmt may-defs *ref.  See if we can derive a value for *ref
1946      from that definition.
1947      1) Memset.  */
1948   if (is_gimple_reg_type (vr->type)
1949       && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1950       && integer_zerop (gimple_call_arg (def_stmt, 1))
1951       && poly_int_tree_p (gimple_call_arg (def_stmt, 2))
1952       && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1953     {
1954       tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1955       tree base2;
1956       poly_int64 offset2, size2, maxsize2;
1957       bool reverse;
1958       base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1959 				       &reverse);
1960       tree len = gimple_call_arg (def_stmt, 2);
1961       if (known_size_p (maxsize2)
1962 	  && operand_equal_p (base, base2, 0)
1963 	  && known_subrange_p (offset, maxsize, offset2,
1964 			       wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
1965 	{
1966 	  tree val = build_zero_cst (vr->type);
1967 	  return vn_reference_lookup_or_insert_for_pieces
1968 	           (vuse, vr->set, vr->type, vr->operands, val);
1969 	}
1970     }
1971 
1972   /* 2) Assignment from an empty CONSTRUCTOR.  */
1973   else if (is_gimple_reg_type (vr->type)
1974 	   && gimple_assign_single_p (def_stmt)
1975 	   && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1976 	   && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1977     {
1978       tree base2;
1979       poly_int64 offset2, size2, maxsize2;
1980       bool reverse;
1981       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1982 				       &offset2, &size2, &maxsize2, &reverse);
1983       if (known_size_p (maxsize2)
1984 	  && known_eq (maxsize2, size2)
1985 	  && operand_equal_p (base, base2, 0)
1986 	  && known_subrange_p (offset, maxsize, offset2, size2))
1987 	{
1988 	  tree val = build_zero_cst (vr->type);
1989 	  return vn_reference_lookup_or_insert_for_pieces
1990 	           (vuse, vr->set, vr->type, vr->operands, val);
1991 	}
1992     }
1993 
1994   /* 3) Assignment from a constant.  We can use folds native encode/interpret
1995      routines to extract the assigned bits.  */
1996   else if (known_eq (ref->size, maxsize)
1997 	   && is_gimple_reg_type (vr->type)
1998 	   && !contains_storage_order_barrier_p (vr->operands)
1999 	   && gimple_assign_single_p (def_stmt)
2000 	   && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2001 	   /* native_encode and native_decode operate on arrays of bytes
2002 	      and so fundamentally need a compile-time size and offset.  */
2003 	   && maxsize.is_constant (&maxsizei)
2004 	   && maxsizei % BITS_PER_UNIT == 0
2005 	   && offset.is_constant (&offseti)
2006 	   && offseti % BITS_PER_UNIT == 0
2007 	   && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2008 	       || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2009 		   && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2010     {
2011       tree base2;
2012       HOST_WIDE_INT offset2, size2;
2013       bool reverse;
2014       base2 = get_ref_base_and_extent_hwi (gimple_assign_lhs (def_stmt),
2015 					   &offset2, &size2, &reverse);
2016       if (base2
2017 	  && !reverse
2018 	  && size2 % BITS_PER_UNIT == 0
2019 	  && offset2 % BITS_PER_UNIT == 0
2020 	  && operand_equal_p (base, base2, 0)
2021 	  && known_subrange_p (offseti, maxsizei, offset2, size2))
2022 	{
2023 	  /* We support up to 512-bit values (for V8DFmode).  */
2024 	  unsigned char buffer[64];
2025 	  int len;
2026 
2027 	  tree rhs = gimple_assign_rhs1 (def_stmt);
2028 	  if (TREE_CODE (rhs) == SSA_NAME)
2029 	    rhs = SSA_VAL (rhs);
2030 	  unsigned pad = 0;
2031 	  if (BYTES_BIG_ENDIAN
2032 	      && is_a <scalar_mode> (TYPE_MODE (TREE_TYPE (rhs))))
2033 	    {
2034 	      /* On big-endian the padding is at the 'front' so
2035 		 just skip the initial bytes.  */
2036 	      fixed_size_mode mode
2037 		  = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (rhs)));
2038 	      pad = GET_MODE_SIZE (mode) - size2 / BITS_PER_UNIT;
2039 	    }
2040 	  len = native_encode_expr (rhs,
2041 				    buffer, sizeof (buffer),
2042 				    ((offseti - offset2) / BITS_PER_UNIT
2043 				     + pad));
2044 	  if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2045 	    {
2046 	      tree type = vr->type;
2047 	      /* Make sure to interpret in a type that has a range
2048 	         covering the whole access size.  */
2049 	      if (INTEGRAL_TYPE_P (vr->type)
2050 		  && maxsizei != TYPE_PRECISION (vr->type))
2051 		type = build_nonstandard_integer_type (maxsizei,
2052 						       TYPE_UNSIGNED (type));
2053 	      tree val = native_interpret_expr (type, buffer,
2054 						maxsizei / BITS_PER_UNIT);
2055 	      /* If we chop off bits because the types precision doesn't
2056 		 match the memory access size this is ok when optimizing
2057 		 reads but not when called from the DSE code during
2058 		 elimination.  */
2059 	      if (val
2060 		  && type != vr->type)
2061 		{
2062 		  if (! int_fits_type_p (val, vr->type))
2063 		    val = NULL_TREE;
2064 		  else
2065 		    val = fold_convert (vr->type, val);
2066 		}
2067 
2068 	      if (val)
2069 		return vn_reference_lookup_or_insert_for_pieces
2070 			 (vuse, vr->set, vr->type, vr->operands, val);
2071 	    }
2072 	}
2073     }
2074 
2075   /* 4) Assignment from an SSA name which definition we may be able
2076      to access pieces from.  */
2077   else if (known_eq (ref->size, maxsize)
2078 	   && is_gimple_reg_type (vr->type)
2079 	   && !contains_storage_order_barrier_p (vr->operands)
2080 	   && gimple_assign_single_p (def_stmt)
2081 	   && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2082     {
2083       tree base2;
2084       poly_int64 offset2, size2, maxsize2;
2085       bool reverse;
2086       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2087 				       &offset2, &size2, &maxsize2,
2088 				       &reverse);
2089       tree def_rhs = gimple_assign_rhs1 (def_stmt);
2090       if (!reverse
2091 	  && known_size_p (maxsize2)
2092 	  && known_eq (maxsize2, size2)
2093 	  && operand_equal_p (base, base2, 0)
2094 	  && known_subrange_p (offset, maxsize, offset2, size2)
2095 	  /* ???  We can't handle bitfield precision extracts without
2096 	     either using an alternate type for the BIT_FIELD_REF and
2097 	     then doing a conversion or possibly adjusting the offset
2098 	     according to endianness.  */
2099 	  && (! INTEGRAL_TYPE_P (vr->type)
2100 	      || known_eq (ref->size, TYPE_PRECISION (vr->type)))
2101 	  && multiple_p (ref->size, BITS_PER_UNIT)
2102 	  && (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
2103 	      || type_has_mode_precision_p (TREE_TYPE (def_rhs))))
2104 	{
2105 	  code_helper rcode = BIT_FIELD_REF;
2106 	  tree ops[3];
2107 	  ops[0] = SSA_VAL (def_rhs);
2108 	  ops[1] = bitsize_int (ref->size);
2109 	  ops[2] = bitsize_int (offset - offset2);
2110 	  tree val = vn_nary_build_or_lookup (rcode, vr->type, ops);
2111 	  if (val
2112 	      && (TREE_CODE (val) != SSA_NAME
2113 		  || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2114 	    {
2115 	      vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2116 		  (vuse, vr->set, vr->type, vr->operands, val);
2117 	      return res;
2118 	    }
2119 	}
2120     }
2121 
2122   /* 5) For aggregate copies translate the reference through them if
2123      the copy kills ref.  */
2124   else if (vn_walk_kind == VN_WALKREWRITE
2125 	   && gimple_assign_single_p (def_stmt)
2126 	   && (DECL_P (gimple_assign_rhs1 (def_stmt))
2127 	       || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2128 	       || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2129     {
2130       tree base2;
2131       int i, j, k;
2132       auto_vec<vn_reference_op_s> rhs;
2133       vn_reference_op_t vro;
2134       ao_ref r;
2135 
2136       if (!lhs_ref_ok)
2137 	return (void *)-1;
2138 
2139       /* See if the assignment kills REF.  */
2140       base2 = ao_ref_base (&lhs_ref);
2141       if (!lhs_ref.max_size_known_p ()
2142 	  || (base != base2
2143 	      && (TREE_CODE (base) != MEM_REF
2144 		  || TREE_CODE (base2) != MEM_REF
2145 		  || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2146 		  || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2147 					  TREE_OPERAND (base2, 1))))
2148 	  || !stmt_kills_ref_p (def_stmt, ref))
2149 	return (void *)-1;
2150 
2151       /* Find the common base of ref and the lhs.  lhs_ops already
2152          contains valueized operands for the lhs.  */
2153       i = vr->operands.length () - 1;
2154       j = lhs_ops.length () - 1;
2155       while (j >= 0 && i >= 0
2156 	     && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2157 	{
2158 	  i--;
2159 	  j--;
2160 	}
2161 
2162       /* ???  The innermost op should always be a MEM_REF and we already
2163          checked that the assignment to the lhs kills vr.  Thus for
2164 	 aggregate copies using char[] types the vn_reference_op_eq
2165 	 may fail when comparing types for compatibility.  But we really
2166 	 don't care here - further lookups with the rewritten operands
2167 	 will simply fail if we messed up types too badly.  */
2168       poly_int64 extra_off = 0;
2169       if (j == 0 && i >= 0
2170 	  && lhs_ops[0].opcode == MEM_REF
2171 	  && maybe_ne (lhs_ops[0].off, -1))
2172 	{
2173 	  if (known_eq (lhs_ops[0].off, vr->operands[i].off))
2174 	    i--, j--;
2175 	  else if (vr->operands[i].opcode == MEM_REF
2176 		   && maybe_ne (vr->operands[i].off, -1))
2177 	    {
2178 	      extra_off = vr->operands[i].off - lhs_ops[0].off;
2179 	      i--, j--;
2180 	    }
2181 	}
2182 
2183       /* i now points to the first additional op.
2184 	 ???  LHS may not be completely contained in VR, one or more
2185 	 VIEW_CONVERT_EXPRs could be in its way.  We could at least
2186 	 try handling outermost VIEW_CONVERT_EXPRs.  */
2187       if (j != -1)
2188 	return (void *)-1;
2189 
2190       /* Punt if the additional ops contain a storage order barrier.  */
2191       for (k = i; k >= 0; k--)
2192 	{
2193 	  vro = &vr->operands[k];
2194 	  if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2195 	    return (void *)-1;
2196 	}
2197 
2198       /* Now re-write REF to be based on the rhs of the assignment.  */
2199       copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2200 
2201       /* Apply an extra offset to the inner MEM_REF of the RHS.  */
2202       if (maybe_ne (extra_off, 0))
2203 	{
2204 	  if (rhs.length () < 2
2205 	      || rhs[0].opcode != MEM_REF
2206 	      || known_eq (rhs[0].off, -1))
2207 	    return (void *)-1;
2208 	  rhs[0].off += extra_off;
2209 	  rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2210 					build_int_cst (TREE_TYPE (rhs[0].op0),
2211 						       extra_off));
2212 	}
2213 
2214       /* We need to pre-pend vr->operands[0..i] to rhs.  */
2215       vec<vn_reference_op_s> old = vr->operands;
2216       if (i + 1 + rhs.length () > vr->operands.length ())
2217 	vr->operands.safe_grow (i + 1 + rhs.length ());
2218       else
2219 	vr->operands.truncate (i + 1 + rhs.length ());
2220       FOR_EACH_VEC_ELT (rhs, j, vro)
2221 	vr->operands[i + 1 + j] = *vro;
2222       vr->operands = valueize_refs (vr->operands);
2223       if (old == shared_lookup_references)
2224 	shared_lookup_references = vr->operands;
2225       vr->hashcode = vn_reference_compute_hash (vr);
2226 
2227       /* Try folding the new reference to a constant.  */
2228       tree val = fully_constant_vn_reference_p (vr);
2229       if (val)
2230 	return vn_reference_lookup_or_insert_for_pieces
2231 		 (vuse, vr->set, vr->type, vr->operands, val);
2232 
2233       /* Adjust *ref from the new operands.  */
2234       if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2235 	return (void *)-1;
2236       /* This can happen with bitfields.  */
2237       if (maybe_ne (ref->size, r.size))
2238 	return (void *)-1;
2239       *ref = r;
2240 
2241       /* Do not update last seen VUSE after translating.  */
2242       last_vuse_ptr = NULL;
2243 
2244       /* Keep looking for the adjusted *REF / VR pair.  */
2245       return NULL;
2246     }
2247 
2248   /* 6) For memcpy copies translate the reference through them if
2249      the copy kills ref.  */
2250   else if (vn_walk_kind == VN_WALKREWRITE
2251 	   && is_gimple_reg_type (vr->type)
2252 	   /* ???  Handle BCOPY as well.  */
2253 	   && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2254 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2255 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2256 	   && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2257 	       || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2258 	   && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2259 	       || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2260 	   && poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size))
2261     {
2262       tree lhs, rhs;
2263       ao_ref r;
2264       poly_int64 rhs_offset, lhs_offset;
2265       vn_reference_op_s op;
2266       poly_uint64 mem_offset;
2267       poly_int64 at, byte_maxsize;
2268 
2269       /* Only handle non-variable, addressable refs.  */
2270       if (maybe_ne (ref->size, maxsize)
2271 	  || !multiple_p (offset, BITS_PER_UNIT, &at)
2272 	  || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
2273 	return (void *)-1;
2274 
2275       /* Extract a pointer base and an offset for the destination.  */
2276       lhs = gimple_call_arg (def_stmt, 0);
2277       lhs_offset = 0;
2278       if (TREE_CODE (lhs) == SSA_NAME)
2279 	{
2280 	  lhs = SSA_VAL (lhs);
2281 	  if (TREE_CODE (lhs) == SSA_NAME)
2282 	    {
2283 	      gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2284 	      if (gimple_assign_single_p (def_stmt)
2285 		  && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2286 		lhs = gimple_assign_rhs1 (def_stmt);
2287 	    }
2288 	}
2289       if (TREE_CODE (lhs) == ADDR_EXPR)
2290 	{
2291 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2292 						    &lhs_offset);
2293 	  if (!tem)
2294 	    return (void *)-1;
2295 	  if (TREE_CODE (tem) == MEM_REF
2296 	      && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2297 	    {
2298 	      lhs = TREE_OPERAND (tem, 0);
2299 	      if (TREE_CODE (lhs) == SSA_NAME)
2300 		lhs = SSA_VAL (lhs);
2301 	      lhs_offset += mem_offset;
2302 	    }
2303 	  else if (DECL_P (tem))
2304 	    lhs = build_fold_addr_expr (tem);
2305 	  else
2306 	    return (void *)-1;
2307 	}
2308       if (TREE_CODE (lhs) != SSA_NAME
2309 	  && TREE_CODE (lhs) != ADDR_EXPR)
2310 	return (void *)-1;
2311 
2312       /* Extract a pointer base and an offset for the source.  */
2313       rhs = gimple_call_arg (def_stmt, 1);
2314       rhs_offset = 0;
2315       if (TREE_CODE (rhs) == SSA_NAME)
2316 	rhs = SSA_VAL (rhs);
2317       if (TREE_CODE (rhs) == ADDR_EXPR)
2318 	{
2319 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2320 						    &rhs_offset);
2321 	  if (!tem)
2322 	    return (void *)-1;
2323 	  if (TREE_CODE (tem) == MEM_REF
2324 	      && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2325 	    {
2326 	      rhs = TREE_OPERAND (tem, 0);
2327 	      rhs_offset += mem_offset;
2328 	    }
2329 	  else if (DECL_P (tem)
2330 		   || TREE_CODE (tem) == STRING_CST)
2331 	    rhs = build_fold_addr_expr (tem);
2332 	  else
2333 	    return (void *)-1;
2334 	}
2335       if (TREE_CODE (rhs) != SSA_NAME
2336 	  && TREE_CODE (rhs) != ADDR_EXPR)
2337 	return (void *)-1;
2338 
2339       /* The bases of the destination and the references have to agree.  */
2340       if (TREE_CODE (base) == MEM_REF)
2341 	{
2342 	  if (TREE_OPERAND (base, 0) != lhs
2343 	      || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
2344 	    return (void *) -1;
2345 	  at += mem_offset;
2346 	}
2347       else if (!DECL_P (base)
2348 	       || TREE_CODE (lhs) != ADDR_EXPR
2349 	       || TREE_OPERAND (lhs, 0) != base)
2350 	return (void *)-1;
2351 
2352       /* If the access is completely outside of the memcpy destination
2353 	 area there is no aliasing.  */
2354       if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
2355 	return NULL;
2356       /* And the access has to be contained within the memcpy destination.  */
2357       if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
2358 	return (void *)-1;
2359 
2360       /* Make room for 2 operands in the new reference.  */
2361       if (vr->operands.length () < 2)
2362 	{
2363 	  vec<vn_reference_op_s> old = vr->operands;
2364 	  vr->operands.safe_grow_cleared (2);
2365 	  if (old == shared_lookup_references)
2366 	    shared_lookup_references = vr->operands;
2367 	}
2368       else
2369 	vr->operands.truncate (2);
2370 
2371       /* The looked-through reference is a simple MEM_REF.  */
2372       memset (&op, 0, sizeof (op));
2373       op.type = vr->type;
2374       op.opcode = MEM_REF;
2375       op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2376       op.off = at - lhs_offset + rhs_offset;
2377       vr->operands[0] = op;
2378       op.type = TREE_TYPE (rhs);
2379       op.opcode = TREE_CODE (rhs);
2380       op.op0 = rhs;
2381       op.off = -1;
2382       vr->operands[1] = op;
2383       vr->hashcode = vn_reference_compute_hash (vr);
2384 
2385       /* Try folding the new reference to a constant.  */
2386       tree val = fully_constant_vn_reference_p (vr);
2387       if (val)
2388 	return vn_reference_lookup_or_insert_for_pieces
2389 		 (vuse, vr->set, vr->type, vr->operands, val);
2390 
2391       /* Adjust *ref from the new operands.  */
2392       if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2393 	return (void *)-1;
2394       /* This can happen with bitfields.  */
2395       if (maybe_ne (ref->size, r.size))
2396 	return (void *)-1;
2397       *ref = r;
2398 
2399       /* Do not update last seen VUSE after translating.  */
2400       last_vuse_ptr = NULL;
2401 
2402       /* Keep looking for the adjusted *REF / VR pair.  */
2403       return NULL;
2404     }
2405 
2406   /* Bail out and stop walking.  */
2407   return (void *)-1;
2408 }
2409 
2410 /* Return a reference op vector from OP that can be used for
2411    vn_reference_lookup_pieces.  The caller is responsible for releasing
2412    the vector.  */
2413 
2414 vec<vn_reference_op_s>
vn_reference_operands_for_lookup(tree op)2415 vn_reference_operands_for_lookup (tree op)
2416 {
2417   bool valueized;
2418   return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2419 }
2420 
2421 /* Lookup a reference operation by it's parts, in the current hash table.
2422    Returns the resulting value number if it exists in the hash table,
2423    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
2424    vn_reference_t stored in the hashtable if something is found.  */
2425 
2426 tree
vn_reference_lookup_pieces(tree vuse,alias_set_type set,tree type,vec<vn_reference_op_s> operands,vn_reference_t * vnresult,vn_lookup_kind kind)2427 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2428 			    vec<vn_reference_op_s> operands,
2429 			    vn_reference_t *vnresult, vn_lookup_kind kind)
2430 {
2431   struct vn_reference_s vr1;
2432   vn_reference_t tmp;
2433   tree cst;
2434 
2435   if (!vnresult)
2436     vnresult = &tmp;
2437   *vnresult = NULL;
2438 
2439   vr1.vuse = vuse_ssa_val (vuse);
2440   shared_lookup_references.truncate (0);
2441   shared_lookup_references.safe_grow (operands.length ());
2442   memcpy (shared_lookup_references.address (),
2443 	  operands.address (),
2444 	  sizeof (vn_reference_op_s)
2445 	  * operands.length ());
2446   vr1.operands = operands = shared_lookup_references
2447     = valueize_refs (shared_lookup_references);
2448   vr1.type = type;
2449   vr1.set = set;
2450   vr1.hashcode = vn_reference_compute_hash (&vr1);
2451   if ((cst = fully_constant_vn_reference_p (&vr1)))
2452     return cst;
2453 
2454   vn_reference_lookup_1 (&vr1, vnresult);
2455   if (!*vnresult
2456       && kind != VN_NOWALK
2457       && vr1.vuse)
2458     {
2459       ao_ref r;
2460       vn_walk_kind = kind;
2461       if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2462 	*vnresult =
2463 	  (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2464 						  vn_reference_lookup_2,
2465 						  vn_reference_lookup_3,
2466 						  vuse_ssa_val, &vr1);
2467       gcc_checking_assert (vr1.operands == shared_lookup_references);
2468     }
2469 
2470   if (*vnresult)
2471      return (*vnresult)->result;
2472 
2473   return NULL_TREE;
2474 }
2475 
2476 /* Lookup OP in the current hash table, and return the resulting value
2477    number if it exists in the hash table.  Return NULL_TREE if it does
2478    not exist in the hash table or if the result field of the structure
2479    was NULL..  VNRESULT will be filled in with the vn_reference_t
2480    stored in the hashtable if one exists.  When TBAA_P is false assume
2481    we are looking up a store and treat it as having alias-set zero.  */
2482 
2483 tree
vn_reference_lookup(tree op,tree vuse,vn_lookup_kind kind,vn_reference_t * vnresult,bool tbaa_p)2484 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2485 		     vn_reference_t *vnresult, bool tbaa_p)
2486 {
2487   vec<vn_reference_op_s> operands;
2488   struct vn_reference_s vr1;
2489   tree cst;
2490   bool valuezied_anything;
2491 
2492   if (vnresult)
2493     *vnresult = NULL;
2494 
2495   vr1.vuse = vuse_ssa_val (vuse);
2496   vr1.operands = operands
2497     = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2498   vr1.type = TREE_TYPE (op);
2499   vr1.set = tbaa_p ? get_alias_set (op) : 0;
2500   vr1.hashcode = vn_reference_compute_hash (&vr1);
2501   if ((cst = fully_constant_vn_reference_p (&vr1)))
2502     return cst;
2503 
2504   if (kind != VN_NOWALK
2505       && vr1.vuse)
2506     {
2507       vn_reference_t wvnresult;
2508       ao_ref r;
2509       /* Make sure to use a valueized reference if we valueized anything.
2510          Otherwise preserve the full reference for advanced TBAA.  */
2511       if (!valuezied_anything
2512 	  || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2513 					     vr1.operands))
2514 	ao_ref_init (&r, op);
2515       if (! tbaa_p)
2516 	r.ref_alias_set = r.base_alias_set = 0;
2517       vn_walk_kind = kind;
2518       wvnresult =
2519 	(vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2520 						vn_reference_lookup_2,
2521 						vn_reference_lookup_3,
2522 						vuse_ssa_val, &vr1);
2523       gcc_checking_assert (vr1.operands == shared_lookup_references);
2524       if (wvnresult)
2525 	{
2526 	  if (vnresult)
2527 	    *vnresult = wvnresult;
2528 	  return wvnresult->result;
2529 	}
2530 
2531       return NULL_TREE;
2532     }
2533 
2534   return vn_reference_lookup_1 (&vr1, vnresult);
2535 }
2536 
2537 /* Lookup CALL in the current hash table and return the entry in
2538    *VNRESULT if found.  Populates *VR for the hashtable lookup.  */
2539 
2540 void
vn_reference_lookup_call(gcall * call,vn_reference_t * vnresult,vn_reference_t vr)2541 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2542 			  vn_reference_t vr)
2543 {
2544   if (vnresult)
2545     *vnresult = NULL;
2546 
2547   tree vuse = gimple_vuse (call);
2548 
2549   vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2550   vr->operands = valueize_shared_reference_ops_from_call (call);
2551   vr->type = gimple_expr_type (call);
2552   vr->set = 0;
2553   vr->hashcode = vn_reference_compute_hash (vr);
2554   vn_reference_lookup_1 (vr, vnresult);
2555 }
2556 
2557 /* Insert OP into the current hash table with a value number of
2558    RESULT, and return the resulting reference structure we created.  */
2559 
2560 static vn_reference_t
vn_reference_insert(tree op,tree result,tree vuse,tree vdef)2561 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2562 {
2563   vn_reference_s **slot;
2564   vn_reference_t vr1;
2565   bool tem;
2566 
2567   vr1 = current_info->references_pool->allocate ();
2568   if (TREE_CODE (result) == SSA_NAME)
2569     vr1->value_id = VN_INFO (result)->value_id;
2570   else
2571     vr1->value_id = get_or_alloc_constant_value_id (result);
2572   vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2573   vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2574   vr1->type = TREE_TYPE (op);
2575   vr1->set = get_alias_set (op);
2576   vr1->hashcode = vn_reference_compute_hash (vr1);
2577   vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2578   vr1->result_vdef = vdef;
2579 
2580   slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2581 							INSERT);
2582 
2583   /* Because we lookup stores using vuses, and value number failures
2584      using the vdefs (see visit_reference_op_store for how and why),
2585      it's possible that on failure we may try to insert an already
2586      inserted store.  This is not wrong, there is no ssa name for a
2587      store that we could use as a differentiator anyway.  Thus, unlike
2588      the other lookup functions, you cannot gcc_assert (!*slot)
2589      here.  */
2590 
2591   /* But free the old slot in case of a collision.  */
2592   if (*slot)
2593     free_reference (*slot);
2594 
2595   *slot = vr1;
2596   return vr1;
2597 }
2598 
2599 /* Insert a reference by it's pieces into the current hash table with
2600    a value number of RESULT.  Return the resulting reference
2601    structure we created.  */
2602 
2603 vn_reference_t
vn_reference_insert_pieces(tree vuse,alias_set_type set,tree type,vec<vn_reference_op_s> operands,tree result,unsigned int value_id)2604 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2605 			    vec<vn_reference_op_s> operands,
2606 			    tree result, unsigned int value_id)
2607 
2608 {
2609   vn_reference_s **slot;
2610   vn_reference_t vr1;
2611 
2612   vr1 = current_info->references_pool->allocate ();
2613   vr1->value_id = value_id;
2614   vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2615   vr1->operands = valueize_refs (operands);
2616   vr1->type = type;
2617   vr1->set = set;
2618   vr1->hashcode = vn_reference_compute_hash (vr1);
2619   if (result && TREE_CODE (result) == SSA_NAME)
2620     result = SSA_VAL (result);
2621   vr1->result = result;
2622 
2623   slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2624 							INSERT);
2625 
2626   /* At this point we should have all the things inserted that we have
2627      seen before, and we should never try inserting something that
2628      already exists.  */
2629   gcc_assert (!*slot);
2630   if (*slot)
2631     free_reference (*slot);
2632 
2633   *slot = vr1;
2634   return vr1;
2635 }
2636 
2637 /* Compute and return the hash value for nary operation VBO1.  */
2638 
2639 static hashval_t
vn_nary_op_compute_hash(const vn_nary_op_t vno1)2640 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2641 {
2642   inchash::hash hstate;
2643   unsigned i;
2644 
2645   for (i = 0; i < vno1->length; ++i)
2646     if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2647       vno1->op[i] = SSA_VAL (vno1->op[i]);
2648 
2649   if (((vno1->length == 2
2650 	&& commutative_tree_code (vno1->opcode))
2651        || (vno1->length == 3
2652 	   && commutative_ternary_tree_code (vno1->opcode)))
2653       && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2654     std::swap (vno1->op[0], vno1->op[1]);
2655   else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2656 	   && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2657     {
2658       std::swap (vno1->op[0], vno1->op[1]);
2659       vno1->opcode = swap_tree_comparison  (vno1->opcode);
2660     }
2661 
2662   hstate.add_int (vno1->opcode);
2663   for (i = 0; i < vno1->length; ++i)
2664     inchash::add_expr (vno1->op[i], hstate);
2665 
2666   return hstate.end ();
2667 }
2668 
2669 /* Compare nary operations VNO1 and VNO2 and return true if they are
2670    equivalent.  */
2671 
2672 bool
vn_nary_op_eq(const_vn_nary_op_t const vno1,const_vn_nary_op_t const vno2)2673 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2674 {
2675   unsigned i;
2676 
2677   if (vno1->hashcode != vno2->hashcode)
2678     return false;
2679 
2680   if (vno1->length != vno2->length)
2681     return false;
2682 
2683   if (vno1->opcode != vno2->opcode
2684       || !types_compatible_p (vno1->type, vno2->type))
2685     return false;
2686 
2687   for (i = 0; i < vno1->length; ++i)
2688     if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2689       return false;
2690 
2691   /* BIT_INSERT_EXPR has an implict operand as the type precision
2692      of op1.  Need to check to make sure they are the same.  */
2693   if (vno1->opcode == BIT_INSERT_EXPR
2694       && TREE_CODE (vno1->op[1]) == INTEGER_CST
2695       && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2696 	 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2697     return false;
2698 
2699   return true;
2700 }
2701 
2702 /* Initialize VNO from the pieces provided.  */
2703 
2704 static void
init_vn_nary_op_from_pieces(vn_nary_op_t vno,unsigned int length,enum tree_code code,tree type,tree * ops)2705 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2706 			     enum tree_code code, tree type, tree *ops)
2707 {
2708   vno->opcode = code;
2709   vno->length = length;
2710   vno->type = type;
2711   memcpy (&vno->op[0], ops, sizeof (tree) * length);
2712 }
2713 
2714 /* Initialize VNO from OP.  */
2715 
2716 static void
init_vn_nary_op_from_op(vn_nary_op_t vno,tree op)2717 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2718 {
2719   unsigned i;
2720 
2721   vno->opcode = TREE_CODE (op);
2722   vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2723   vno->type = TREE_TYPE (op);
2724   for (i = 0; i < vno->length; ++i)
2725     vno->op[i] = TREE_OPERAND (op, i);
2726 }
2727 
2728 /* Return the number of operands for a vn_nary ops structure from STMT.  */
2729 
2730 static unsigned int
vn_nary_length_from_stmt(gimple * stmt)2731 vn_nary_length_from_stmt (gimple *stmt)
2732 {
2733   switch (gimple_assign_rhs_code (stmt))
2734     {
2735     case REALPART_EXPR:
2736     case IMAGPART_EXPR:
2737     case VIEW_CONVERT_EXPR:
2738       return 1;
2739 
2740     case BIT_FIELD_REF:
2741       return 3;
2742 
2743     case CONSTRUCTOR:
2744       return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2745 
2746     default:
2747       return gimple_num_ops (stmt) - 1;
2748     }
2749 }
2750 
2751 /* Initialize VNO from STMT.  */
2752 
2753 static void
init_vn_nary_op_from_stmt(vn_nary_op_t vno,gimple * stmt)2754 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2755 {
2756   unsigned i;
2757 
2758   vno->opcode = gimple_assign_rhs_code (stmt);
2759   vno->type = gimple_expr_type (stmt);
2760   switch (vno->opcode)
2761     {
2762     case REALPART_EXPR:
2763     case IMAGPART_EXPR:
2764     case VIEW_CONVERT_EXPR:
2765       vno->length = 1;
2766       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2767       break;
2768 
2769     case BIT_FIELD_REF:
2770       vno->length = 3;
2771       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2772       vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2773       vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2774       break;
2775 
2776     case CONSTRUCTOR:
2777       vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2778       for (i = 0; i < vno->length; ++i)
2779 	vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2780       break;
2781 
2782     default:
2783       gcc_checking_assert (!gimple_assign_single_p (stmt));
2784       vno->length = gimple_num_ops (stmt) - 1;
2785       for (i = 0; i < vno->length; ++i)
2786 	vno->op[i] = gimple_op (stmt, i + 1);
2787     }
2788 }
2789 
2790 /* Compute the hashcode for VNO and look for it in the hash table;
2791    return the resulting value number if it exists in the hash table.
2792    Return NULL_TREE if it does not exist in the hash table or if the
2793    result field of the operation is NULL.  VNRESULT will contain the
2794    vn_nary_op_t from the hashtable if it exists.  */
2795 
2796 static tree
vn_nary_op_lookup_1(vn_nary_op_t vno,vn_nary_op_t * vnresult)2797 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2798 {
2799   vn_nary_op_s **slot;
2800 
2801   if (vnresult)
2802     *vnresult = NULL;
2803 
2804   vno->hashcode = vn_nary_op_compute_hash (vno);
2805   slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2806 						  NO_INSERT);
2807   if (!slot && current_info == optimistic_info)
2808     slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2809 						  NO_INSERT);
2810   if (!slot)
2811     return NULL_TREE;
2812   if (vnresult)
2813     *vnresult = *slot;
2814   return (*slot)->result;
2815 }
2816 
2817 /* Lookup a n-ary operation by its pieces and return the resulting value
2818    number if it exists in the hash table.  Return NULL_TREE if it does
2819    not exist in the hash table or if the result field of the operation
2820    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2821    if it exists.  */
2822 
2823 tree
vn_nary_op_lookup_pieces(unsigned int length,enum tree_code code,tree type,tree * ops,vn_nary_op_t * vnresult)2824 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2825 			  tree type, tree *ops, vn_nary_op_t *vnresult)
2826 {
2827   vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2828 				  sizeof_vn_nary_op (length));
2829   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2830   return vn_nary_op_lookup_1 (vno1, vnresult);
2831 }
2832 
2833 /* Lookup OP in the current hash table, and return the resulting value
2834    number if it exists in the hash table.  Return NULL_TREE if it does
2835    not exist in the hash table or if the result field of the operation
2836    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2837    if it exists.  */
2838 
2839 tree
vn_nary_op_lookup(tree op,vn_nary_op_t * vnresult)2840 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2841 {
2842   vn_nary_op_t vno1
2843     = XALLOCAVAR (struct vn_nary_op_s,
2844 		  sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2845   init_vn_nary_op_from_op (vno1, op);
2846   return vn_nary_op_lookup_1 (vno1, vnresult);
2847 }
2848 
2849 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2850    value number if it exists in the hash table.  Return NULL_TREE if
2851    it does not exist in the hash table.  VNRESULT will contain the
2852    vn_nary_op_t from the hashtable if it exists.  */
2853 
2854 tree
vn_nary_op_lookup_stmt(gimple * stmt,vn_nary_op_t * vnresult)2855 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2856 {
2857   vn_nary_op_t vno1
2858     = XALLOCAVAR (struct vn_nary_op_s,
2859 		  sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2860   init_vn_nary_op_from_stmt (vno1, stmt);
2861   return vn_nary_op_lookup_1 (vno1, vnresult);
2862 }
2863 
2864 /* Allocate a vn_nary_op_t with LENGTH operands on STACK.  */
2865 
2866 static vn_nary_op_t
alloc_vn_nary_op_noinit(unsigned int length,struct obstack * stack)2867 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2868 {
2869   return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2870 }
2871 
2872 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2873    obstack.  */
2874 
2875 static vn_nary_op_t
alloc_vn_nary_op(unsigned int length,tree result,unsigned int value_id)2876 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2877 {
2878   vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2879 					       &current_info->nary_obstack);
2880 
2881   vno1->value_id = value_id;
2882   vno1->length = length;
2883   vno1->result = result;
2884 
2885   return vno1;
2886 }
2887 
2888 /* Insert VNO into TABLE.  If COMPUTE_HASH is true, then compute
2889    VNO->HASHCODE first.  */
2890 
2891 static vn_nary_op_t
vn_nary_op_insert_into(vn_nary_op_t vno,vn_nary_op_table_type * table,bool compute_hash)2892 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2893 			bool compute_hash)
2894 {
2895   vn_nary_op_s **slot;
2896 
2897   if (compute_hash)
2898     vno->hashcode = vn_nary_op_compute_hash (vno);
2899 
2900   slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2901   /* While we do not want to insert things twice it's awkward to
2902      avoid it in the case where visit_nary_op pattern-matches stuff
2903      and ends up simplifying the replacement to itself.  We then
2904      get two inserts, one from visit_nary_op and one from
2905      vn_nary_build_or_lookup.
2906      So allow inserts with the same value number.  */
2907   if (*slot && (*slot)->result == vno->result)
2908     return *slot;
2909 
2910   gcc_assert (!*slot);
2911 
2912   *slot = vno;
2913   return vno;
2914 }
2915 
2916 /* Insert a n-ary operation into the current hash table using it's
2917    pieces.  Return the vn_nary_op_t structure we created and put in
2918    the hashtable.  */
2919 
2920 vn_nary_op_t
vn_nary_op_insert_pieces(unsigned int length,enum tree_code code,tree type,tree * ops,tree result,unsigned int value_id)2921 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2922 			  tree type, tree *ops,
2923 			  tree result, unsigned int value_id)
2924 {
2925   vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2926   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2927   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2928 }
2929 
2930 /* Insert OP into the current hash table with a value number of
2931    RESULT.  Return the vn_nary_op_t structure we created and put in
2932    the hashtable.  */
2933 
2934 vn_nary_op_t
vn_nary_op_insert(tree op,tree result)2935 vn_nary_op_insert (tree op, tree result)
2936 {
2937   unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2938   vn_nary_op_t vno1;
2939 
2940   vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2941   init_vn_nary_op_from_op (vno1, op);
2942   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2943 }
2944 
2945 /* Insert the rhs of STMT into the current hash table with a value number of
2946    RESULT.  */
2947 
2948 static vn_nary_op_t
vn_nary_op_insert_stmt(gimple * stmt,tree result)2949 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2950 {
2951   vn_nary_op_t vno1
2952     = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2953 			result, VN_INFO (result)->value_id);
2954   init_vn_nary_op_from_stmt (vno1, stmt);
2955   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2956 }
2957 
2958 /* Compute a hashcode for PHI operation VP1 and return it.  */
2959 
2960 static inline hashval_t
vn_phi_compute_hash(vn_phi_t vp1)2961 vn_phi_compute_hash (vn_phi_t vp1)
2962 {
2963   inchash::hash hstate (vp1->phiargs.length () > 2
2964 			? vp1->block->index : vp1->phiargs.length ());
2965   tree phi1op;
2966   tree type;
2967   edge e;
2968   edge_iterator ei;
2969 
2970   /* If all PHI arguments are constants we need to distinguish
2971      the PHI node via its type.  */
2972   type = vp1->type;
2973   hstate.merge_hash (vn_hash_type (type));
2974 
2975   FOR_EACH_EDGE (e, ei, vp1->block->preds)
2976     {
2977       /* Don't hash backedge values they need to be handled as VN_TOP
2978          for optimistic value-numbering.  */
2979       if (e->flags & EDGE_DFS_BACK)
2980 	continue;
2981 
2982       phi1op = vp1->phiargs[e->dest_idx];
2983       if (phi1op == VN_TOP)
2984 	continue;
2985       inchash::add_expr (phi1op, hstate);
2986     }
2987 
2988   return hstate.end ();
2989 }
2990 
2991 
2992 /* Return true if COND1 and COND2 represent the same condition, set
2993    *INVERTED_P if one needs to be inverted to make it the same as
2994    the other.  */
2995 
2996 static bool
cond_stmts_equal_p(gcond * cond1,tree lhs1,tree rhs1,gcond * cond2,tree lhs2,tree rhs2,bool * inverted_p)2997 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
2998 		    gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
2999 {
3000   enum tree_code code1 = gimple_cond_code (cond1);
3001   enum tree_code code2 = gimple_cond_code (cond2);
3002 
3003   *inverted_p = false;
3004   if (code1 == code2)
3005     ;
3006   else if (code1 == swap_tree_comparison (code2))
3007     std::swap (lhs2, rhs2);
3008   else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3009     *inverted_p = true;
3010   else if (code1 == invert_tree_comparison
3011 	   	      (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3012     {
3013       std::swap (lhs2, rhs2);
3014       *inverted_p = true;
3015     }
3016   else
3017     return false;
3018 
3019   return ((expressions_equal_p (lhs1, lhs2)
3020 	   && expressions_equal_p (rhs1, rhs2))
3021 	  || (commutative_tree_code (code1)
3022 	      && expressions_equal_p (lhs1, rhs2)
3023 	      && expressions_equal_p (rhs1, lhs2)));
3024 }
3025 
3026 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
3027 
3028 static int
vn_phi_eq(const_vn_phi_t const vp1,const_vn_phi_t const vp2)3029 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3030 {
3031   if (vp1->hashcode != vp2->hashcode)
3032     return false;
3033 
3034   if (vp1->block != vp2->block)
3035     {
3036       if (vp1->phiargs.length () != vp2->phiargs.length ())
3037 	return false;
3038 
3039       switch (vp1->phiargs.length ())
3040 	{
3041 	case 1:
3042 	  /* Single-arg PHIs are just copies.  */
3043 	  break;
3044 
3045 	case 2:
3046 	  {
3047 	    /* Rule out backedges into the PHI.  */
3048 	    if (vp1->block->loop_father->header == vp1->block
3049 		|| vp2->block->loop_father->header == vp2->block)
3050 	      return false;
3051 
3052 	    /* If the PHI nodes do not have compatible types
3053 	       they are not the same.  */
3054 	    if (!types_compatible_p (vp1->type, vp2->type))
3055 	      return false;
3056 
3057 	    basic_block idom1
3058 	      = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3059 	    basic_block idom2
3060 	      = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3061 	    /* If the immediate dominator end in switch stmts multiple
3062 	       values may end up in the same PHI arg via intermediate
3063 	       CFG merges.  */
3064 	    if (EDGE_COUNT (idom1->succs) != 2
3065 		|| EDGE_COUNT (idom2->succs) != 2)
3066 	      return false;
3067 
3068 	    /* Verify the controlling stmt is the same.  */
3069 	    gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3070 	    gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3071 	    if (! last1 || ! last2)
3072 	      return false;
3073 	    bool inverted_p;
3074 	    if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3075 				      last2, vp2->cclhs, vp2->ccrhs,
3076 				      &inverted_p))
3077 	      return false;
3078 
3079 	    /* Get at true/false controlled edges into the PHI.  */
3080 	    edge te1, te2, fe1, fe2;
3081 	    if (! extract_true_false_controlled_edges (idom1, vp1->block,
3082 						       &te1, &fe1)
3083 		|| ! extract_true_false_controlled_edges (idom2, vp2->block,
3084 							  &te2, &fe2))
3085 	      return false;
3086 
3087 	    /* Swap edges if the second condition is the inverted of the
3088 	       first.  */
3089 	    if (inverted_p)
3090 	      std::swap (te2, fe2);
3091 
3092 	    /* ???  Handle VN_TOP specially.  */
3093 	    if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3094 				       vp2->phiargs[te2->dest_idx])
3095 		|| ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3096 					  vp2->phiargs[fe2->dest_idx]))
3097 	      return false;
3098 
3099 	    return true;
3100 	  }
3101 
3102 	default:
3103 	  return false;
3104 	}
3105     }
3106 
3107   /* If the PHI nodes do not have compatible types
3108      they are not the same.  */
3109   if (!types_compatible_p (vp1->type, vp2->type))
3110     return false;
3111 
3112   /* Any phi in the same block will have it's arguments in the
3113      same edge order, because of how we store phi nodes.  */
3114   int i;
3115   tree phi1op;
3116   FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3117     {
3118       tree phi2op = vp2->phiargs[i];
3119       if (phi1op == VN_TOP || phi2op == VN_TOP)
3120 	continue;
3121       if (!expressions_equal_p (phi1op, phi2op))
3122 	return false;
3123     }
3124 
3125   return true;
3126 }
3127 
3128 static vec<tree> shared_lookup_phiargs;
3129 
3130 /* Lookup PHI in the current hash table, and return the resulting
3131    value number if it exists in the hash table.  Return NULL_TREE if
3132    it does not exist in the hash table. */
3133 
3134 static tree
vn_phi_lookup(gimple * phi)3135 vn_phi_lookup (gimple *phi)
3136 {
3137   vn_phi_s **slot;
3138   struct vn_phi_s vp1;
3139   edge e;
3140   edge_iterator ei;
3141 
3142   shared_lookup_phiargs.truncate (0);
3143   shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3144 
3145   /* Canonicalize the SSA_NAME's to their value number.  */
3146   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3147     {
3148       tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3149       def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3150       shared_lookup_phiargs[e->dest_idx] = def;
3151     }
3152   vp1.type = TREE_TYPE (gimple_phi_result (phi));
3153   vp1.phiargs = shared_lookup_phiargs;
3154   vp1.block = gimple_bb (phi);
3155   /* Extract values of the controlling condition.  */
3156   vp1.cclhs = NULL_TREE;
3157   vp1.ccrhs = NULL_TREE;
3158   basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1.block);
3159   if (EDGE_COUNT (idom1->succs) == 2)
3160     if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3161       {
3162 	vp1.cclhs = vn_valueize (gimple_cond_lhs (last1));
3163 	vp1.ccrhs = vn_valueize (gimple_cond_rhs (last1));
3164       }
3165   vp1.hashcode = vn_phi_compute_hash (&vp1);
3166   slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3167 						  NO_INSERT);
3168   if (!slot && current_info == optimistic_info)
3169     slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3170 						  NO_INSERT);
3171   if (!slot)
3172     return NULL_TREE;
3173   return (*slot)->result;
3174 }
3175 
3176 /* Insert PHI into the current hash table with a value number of
3177    RESULT.  */
3178 
3179 static vn_phi_t
vn_phi_insert(gimple * phi,tree result)3180 vn_phi_insert (gimple *phi, tree result)
3181 {
3182   vn_phi_s **slot;
3183   vn_phi_t vp1 = current_info->phis_pool->allocate ();
3184   vec<tree> args = vNULL;
3185   edge e;
3186   edge_iterator ei;
3187 
3188   args.safe_grow (gimple_phi_num_args (phi));
3189 
3190   /* Canonicalize the SSA_NAME's to their value number.  */
3191   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3192     {
3193       tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3194       def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3195       args[e->dest_idx] = def;
3196     }
3197   vp1->value_id = VN_INFO (result)->value_id;
3198   vp1->type = TREE_TYPE (gimple_phi_result (phi));
3199   vp1->phiargs = args;
3200   vp1->block = gimple_bb (phi);
3201   /* Extract values of the controlling condition.  */
3202   vp1->cclhs = NULL_TREE;
3203   vp1->ccrhs = NULL_TREE;
3204   basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3205   if (EDGE_COUNT (idom1->succs) == 2)
3206     if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3207       {
3208 	vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3209 	vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3210       }
3211   vp1->result = result;
3212   vp1->hashcode = vn_phi_compute_hash (vp1);
3213 
3214   slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3215 
3216   /* Because we iterate over phi operations more than once, it's
3217      possible the slot might already exist here, hence no assert.*/
3218   *slot = vp1;
3219   return vp1;
3220 }
3221 
3222 
3223 /* Print set of components in strongly connected component SCC to OUT. */
3224 
3225 static void
print_scc(FILE * out,vec<tree> scc)3226 print_scc (FILE *out, vec<tree> scc)
3227 {
3228   tree var;
3229   unsigned int i;
3230 
3231   fprintf (out, "SCC consists of %u:", scc.length ());
3232   FOR_EACH_VEC_ELT (scc, i, var)
3233     {
3234       fprintf (out, " ");
3235       print_generic_expr (out, var);
3236     }
3237   fprintf (out, "\n");
3238 }
3239 
3240 /* Return true if BB1 is dominated by BB2 taking into account edges
3241    that are not executable.  */
3242 
3243 static bool
dominated_by_p_w_unex(basic_block bb1,basic_block bb2)3244 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3245 {
3246   edge_iterator ei;
3247   edge e;
3248 
3249   if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3250     return true;
3251 
3252   /* Before iterating we'd like to know if there exists a
3253      (executable) path from bb2 to bb1 at all, if not we can
3254      directly return false.  For now simply iterate once.  */
3255 
3256   /* Iterate to the single executable bb1 predecessor.  */
3257   if (EDGE_COUNT (bb1->preds) > 1)
3258     {
3259       edge prede = NULL;
3260       FOR_EACH_EDGE (e, ei, bb1->preds)
3261 	if (e->flags & EDGE_EXECUTABLE)
3262 	  {
3263 	    if (prede)
3264 	      {
3265 		prede = NULL;
3266 		break;
3267 	      }
3268 	    prede = e;
3269 	  }
3270       if (prede)
3271 	{
3272 	  bb1 = prede->src;
3273 
3274 	  /* Re-do the dominance check with changed bb1.  */
3275 	  if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3276 	    return true;
3277 	}
3278     }
3279 
3280   /* Iterate to the single executable bb2 successor.  */
3281   edge succe = NULL;
3282   FOR_EACH_EDGE (e, ei, bb2->succs)
3283     if (e->flags & EDGE_EXECUTABLE)
3284       {
3285 	if (succe)
3286 	  {
3287 	    succe = NULL;
3288 	    break;
3289 	  }
3290 	succe = e;
3291       }
3292   if (succe)
3293     {
3294       /* Verify the reached block is only reached through succe.
3295 	 If there is only one edge we can spare us the dominator
3296 	 check and iterate directly.  */
3297       if (EDGE_COUNT (succe->dest->preds) > 1)
3298 	{
3299 	  FOR_EACH_EDGE (e, ei, succe->dest->preds)
3300 	    if (e != succe
3301 		&& (e->flags & EDGE_EXECUTABLE))
3302 	      {
3303 		succe = NULL;
3304 		break;
3305 	      }
3306 	}
3307       if (succe)
3308 	{
3309 	  bb2 = succe->dest;
3310 
3311 	  /* Re-do the dominance check with changed bb2.  */
3312 	  if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3313 	    return true;
3314 	}
3315     }
3316 
3317   /* We could now iterate updating bb1 / bb2.  */
3318   return false;
3319 }
3320 
3321 /* Set the value number of FROM to TO, return true if it has changed
3322    as a result.  */
3323 
3324 static inline bool
set_ssa_val_to(tree from,tree to)3325 set_ssa_val_to (tree from, tree to)
3326 {
3327   tree currval = SSA_VAL (from);
3328   poly_int64 toff, coff;
3329 
3330   /* The only thing we allow as value numbers are ssa_names
3331      and invariants.  So assert that here.  We don't allow VN_TOP
3332      as visiting a stmt should produce a value-number other than
3333      that.
3334      ???  Still VN_TOP can happen for unreachable code, so force
3335      it to varying in that case.  Not all code is prepared to
3336      get VN_TOP on valueization.  */
3337   if (to == VN_TOP)
3338     {
3339       if (dump_file && (dump_flags & TDF_DETAILS))
3340 	fprintf (dump_file, "Forcing value number to varying on "
3341 		 "receiving VN_TOP\n");
3342       to = from;
3343     }
3344 
3345   gcc_assert (to != NULL_TREE
3346 	      && ((TREE_CODE (to) == SSA_NAME
3347 		   && (to == from || SSA_VAL (to) == to))
3348 		  || is_gimple_min_invariant (to)));
3349 
3350   if (from != to)
3351     {
3352       if (currval == from)
3353 	{
3354 	  if (dump_file && (dump_flags & TDF_DETAILS))
3355 	    {
3356 	      fprintf (dump_file, "Not changing value number of ");
3357 	      print_generic_expr (dump_file, from);
3358 	      fprintf (dump_file, " from VARYING to ");
3359 	      print_generic_expr (dump_file, to);
3360 	      fprintf (dump_file, "\n");
3361 	    }
3362 	  return false;
3363 	}
3364       else if (currval != VN_TOP
3365 	       && ! is_gimple_min_invariant (currval)
3366 	       && is_gimple_min_invariant (to))
3367 	{
3368 	  if (dump_file && (dump_flags & TDF_DETAILS))
3369 	    {
3370 	      fprintf (dump_file, "Forcing VARYING instead of changing "
3371 		       "value number of ");
3372 	      print_generic_expr (dump_file, from);
3373 	      fprintf (dump_file, " from ");
3374 	      print_generic_expr (dump_file, currval);
3375 	      fprintf (dump_file, " (non-constant) to ");
3376 	      print_generic_expr (dump_file, to);
3377 	      fprintf (dump_file, " (constant)\n");
3378 	    }
3379 	  to = from;
3380 	}
3381       else if (TREE_CODE (to) == SSA_NAME
3382 	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3383 	to = from;
3384     }
3385 
3386   if (dump_file && (dump_flags & TDF_DETAILS))
3387     {
3388       fprintf (dump_file, "Setting value number of ");
3389       print_generic_expr (dump_file, from);
3390       fprintf (dump_file, " to ");
3391       print_generic_expr (dump_file, to);
3392     }
3393 
3394   if (currval != to
3395       && !operand_equal_p (currval, to, 0)
3396       /* Different undefined SSA names are not actually different.  See
3397          PR82320 for a testcase were we'd otherwise not terminate iteration.  */
3398       && !(TREE_CODE (currval) == SSA_NAME
3399 	   && TREE_CODE (to) == SSA_NAME
3400 	   && ssa_undefined_value_p (currval, false)
3401 	   && ssa_undefined_value_p (to, false))
3402       /* ???  For addresses involving volatile objects or types operand_equal_p
3403          does not reliably detect ADDR_EXPRs as equal.  We know we are only
3404 	 getting invariant gimple addresses here, so can use
3405 	 get_addr_base_and_unit_offset to do this comparison.  */
3406       && !(TREE_CODE (currval) == ADDR_EXPR
3407 	   && TREE_CODE (to) == ADDR_EXPR
3408 	   && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3409 	       == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3410 	   && known_eq (coff, toff)))
3411     {
3412       if (dump_file && (dump_flags & TDF_DETAILS))
3413 	fprintf (dump_file, " (changed)\n");
3414 
3415       /* If we equate two SSA names we have to make the side-band info
3416          of the leader conservative (and remember whatever original value
3417 	 was present).  */
3418       if (TREE_CODE (to) == SSA_NAME)
3419 	{
3420 	  if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3421 	      && SSA_NAME_RANGE_INFO (to))
3422 	    {
3423 	      if (SSA_NAME_IS_DEFAULT_DEF (to)
3424 		  || dominated_by_p_w_unex
3425 			(gimple_bb (SSA_NAME_DEF_STMT (from)),
3426 			 gimple_bb (SSA_NAME_DEF_STMT (to))))
3427 		/* Keep the info from the dominator.  */
3428 		;
3429 	      else
3430 		{
3431 		  /* Save old info.  */
3432 		  if (! VN_INFO (to)->info.range_info)
3433 		    {
3434 		      VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3435 		      VN_INFO (to)->range_info_anti_range_p
3436 			= SSA_NAME_ANTI_RANGE_P (to);
3437 		    }
3438 		  /* Rather than allocating memory and unioning the info
3439 		     just clear it.  */
3440 		  if (dump_file && (dump_flags & TDF_DETAILS))
3441 		    {
3442 		      fprintf (dump_file, "clearing range info of ");
3443 		      print_generic_expr (dump_file, to);
3444 		      fprintf (dump_file, "\n");
3445 		    }
3446 		  SSA_NAME_RANGE_INFO (to) = NULL;
3447 		}
3448 	    }
3449 	  else if (POINTER_TYPE_P (TREE_TYPE (to))
3450 		   && SSA_NAME_PTR_INFO (to))
3451 	    {
3452 	      if (SSA_NAME_IS_DEFAULT_DEF (to)
3453 		  || dominated_by_p_w_unex
3454 			(gimple_bb (SSA_NAME_DEF_STMT (from)),
3455 			 gimple_bb (SSA_NAME_DEF_STMT (to))))
3456 		/* Keep the info from the dominator.  */
3457 		;
3458 	      else if (! SSA_NAME_PTR_INFO (from)
3459 		       /* Handle the case of trivially equivalent info.  */
3460 		       || memcmp (SSA_NAME_PTR_INFO (to),
3461 				  SSA_NAME_PTR_INFO (from),
3462 				  sizeof (ptr_info_def)) != 0)
3463 		{
3464 		  /* Save old info.  */
3465 		  if (! VN_INFO (to)->info.ptr_info)
3466 		    VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3467 		  /* Rather than allocating memory and unioning the info
3468 		     just clear it.  */
3469 		  if (dump_file && (dump_flags & TDF_DETAILS))
3470 		    {
3471 		      fprintf (dump_file, "clearing points-to info of ");
3472 		      print_generic_expr (dump_file, to);
3473 		      fprintf (dump_file, "\n");
3474 		    }
3475 		  SSA_NAME_PTR_INFO (to) = NULL;
3476 		}
3477 	    }
3478 	}
3479 
3480       VN_INFO (from)->valnum = to;
3481       return true;
3482     }
3483   if (dump_file && (dump_flags & TDF_DETAILS))
3484     fprintf (dump_file, "\n");
3485   return false;
3486 }
3487 
3488 /* Mark as processed all the definitions in the defining stmt of USE, or
3489    the USE itself.  */
3490 
3491 static void
mark_use_processed(tree use)3492 mark_use_processed (tree use)
3493 {
3494   ssa_op_iter iter;
3495   def_operand_p defp;
3496   gimple *stmt = SSA_NAME_DEF_STMT (use);
3497 
3498   if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3499     {
3500       VN_INFO (use)->use_processed = true;
3501       return;
3502     }
3503 
3504   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3505     {
3506       tree def = DEF_FROM_PTR (defp);
3507 
3508       VN_INFO (def)->use_processed = true;
3509     }
3510 }
3511 
3512 /* Set all definitions in STMT to value number to themselves.
3513    Return true if a value number changed. */
3514 
3515 static bool
defs_to_varying(gimple * stmt)3516 defs_to_varying (gimple *stmt)
3517 {
3518   bool changed = false;
3519   ssa_op_iter iter;
3520   def_operand_p defp;
3521 
3522   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3523     {
3524       tree def = DEF_FROM_PTR (defp);
3525       changed |= set_ssa_val_to (def, def);
3526     }
3527   return changed;
3528 }
3529 
3530 /* Visit a copy between LHS and RHS, return true if the value number
3531    changed.  */
3532 
3533 static bool
visit_copy(tree lhs,tree rhs)3534 visit_copy (tree lhs, tree rhs)
3535 {
3536   /* Valueize.  */
3537   rhs = SSA_VAL (rhs);
3538 
3539   return set_ssa_val_to (lhs, rhs);
3540 }
3541 
3542 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3543    is the same.  */
3544 
3545 static tree
valueized_wider_op(tree wide_type,tree op)3546 valueized_wider_op (tree wide_type, tree op)
3547 {
3548   if (TREE_CODE (op) == SSA_NAME)
3549     op = SSA_VAL (op);
3550 
3551   /* Either we have the op widened available.  */
3552   tree ops[3] = {};
3553   ops[0] = op;
3554   tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3555 				       wide_type, ops, NULL);
3556   if (tem)
3557     return tem;
3558 
3559   /* Or the op is truncated from some existing value.  */
3560   if (TREE_CODE (op) == SSA_NAME)
3561     {
3562       gimple *def = SSA_NAME_DEF_STMT (op);
3563       if (is_gimple_assign (def)
3564 	  && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3565 	{
3566 	  tem = gimple_assign_rhs1 (def);
3567 	  if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3568 	    {
3569 	      if (TREE_CODE (tem) == SSA_NAME)
3570 		tem = SSA_VAL (tem);
3571 	      return tem;
3572 	    }
3573 	}
3574     }
3575 
3576   /* For constants simply extend it.  */
3577   if (TREE_CODE (op) == INTEGER_CST)
3578     return wide_int_to_tree (wide_type, wi::to_wide (op));
3579 
3580   return NULL_TREE;
3581 }
3582 
3583 /* Visit a nary operator RHS, value number it, and return true if the
3584    value number of LHS has changed as a result.  */
3585 
3586 static bool
visit_nary_op(tree lhs,gassign * stmt)3587 visit_nary_op (tree lhs, gassign *stmt)
3588 {
3589   tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3590   if (result)
3591     return set_ssa_val_to (lhs, result);
3592 
3593   /* Do some special pattern matching for redundancies of operations
3594      in different types.  */
3595   enum tree_code code = gimple_assign_rhs_code (stmt);
3596   tree type = TREE_TYPE (lhs);
3597   tree rhs1 = gimple_assign_rhs1 (stmt);
3598   switch (code)
3599     {
3600     CASE_CONVERT:
3601       /* Match arithmetic done in a different type where we can easily
3602          substitute the result from some earlier sign-changed or widened
3603 	 operation.  */
3604       if (INTEGRAL_TYPE_P (type)
3605 	  && TREE_CODE (rhs1) == SSA_NAME
3606 	  /* We only handle sign-changes or zero-extension -> & mask.  */
3607 	  && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3608 	       && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3609 	      || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3610 	{
3611 	  gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3612 	  if (def
3613 	      && (gimple_assign_rhs_code (def) == PLUS_EXPR
3614 		  || gimple_assign_rhs_code (def) == MINUS_EXPR
3615 		  || gimple_assign_rhs_code (def) == MULT_EXPR))
3616 	    {
3617 	      tree ops[3] = {};
3618 	      /* Either we have the op widened available.  */
3619 	      ops[0] = valueized_wider_op (type,
3620 					   gimple_assign_rhs1 (def));
3621 	      if (ops[0])
3622 		ops[1] = valueized_wider_op (type,
3623 					     gimple_assign_rhs2 (def));
3624 	      if (ops[0] && ops[1])
3625 		{
3626 		  ops[0] = vn_nary_op_lookup_pieces
3627 		      (2, gimple_assign_rhs_code (def), type, ops, NULL);
3628 		  /* We have wider operation available.  */
3629 		  if (ops[0]
3630 		      /* If the leader is a wrapping operation we can
3631 			 insert it for code hoisting w/o introducing
3632 			 undefined overflow.  If it is not it has to
3633 			 be available.  See PR86554.  */
3634 		      && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))
3635 			  || TREE_CODE (ops[0]) != SSA_NAME
3636 			  || SSA_NAME_IS_DEFAULT_DEF (ops[0])
3637 			  || dominated_by_p_w_unex
3638 			       (gimple_bb (stmt),
3639 				gimple_bb (SSA_NAME_DEF_STMT (ops[0])))))
3640 		    {
3641 		      unsigned lhs_prec = TYPE_PRECISION (type);
3642 		      unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3643 		      if (lhs_prec == rhs_prec)
3644 			{
3645 			  ops[1] = NULL_TREE;
3646 			  result = vn_nary_build_or_lookup (NOP_EXPR,
3647 							    type, ops);
3648 			  if (result)
3649 			    {
3650 			      bool changed = set_ssa_val_to (lhs, result);
3651 			      vn_nary_op_insert_stmt (stmt, result);
3652 			      return changed;
3653 			    }
3654 			}
3655 		      else
3656 			{
3657 			  ops[1] = wide_int_to_tree (type,
3658 						     wi::mask (rhs_prec, false,
3659 							       lhs_prec));
3660 			  result = vn_nary_build_or_lookup (BIT_AND_EXPR,
3661 							    TREE_TYPE (lhs),
3662 							    ops);
3663 			  if (result)
3664 			    {
3665 			      bool changed = set_ssa_val_to (lhs, result);
3666 			      vn_nary_op_insert_stmt (stmt, result);
3667 			      return changed;
3668 			    }
3669 			}
3670 		    }
3671 		}
3672 	    }
3673 	}
3674     default:;
3675     }
3676 
3677   bool changed = set_ssa_val_to (lhs, lhs);
3678   vn_nary_op_insert_stmt (stmt, lhs);
3679   return changed;
3680 }
3681 
3682 /* Visit a call STMT storing into LHS.  Return true if the value number
3683    of the LHS has changed as a result.  */
3684 
3685 static bool
visit_reference_op_call(tree lhs,gcall * stmt)3686 visit_reference_op_call (tree lhs, gcall *stmt)
3687 {
3688   bool changed = false;
3689   struct vn_reference_s vr1;
3690   vn_reference_t vnresult = NULL;
3691   tree vdef = gimple_vdef (stmt);
3692 
3693   /* Non-ssa lhs is handled in copy_reference_ops_from_call.  */
3694   if (lhs && TREE_CODE (lhs) != SSA_NAME)
3695     lhs = NULL_TREE;
3696 
3697   vn_reference_lookup_call (stmt, &vnresult, &vr1);
3698   if (vnresult)
3699     {
3700       if (vnresult->result_vdef && vdef)
3701 	changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3702       else if (vdef)
3703 	/* If the call was discovered to be pure or const reflect
3704 	   that as far as possible.  */
3705 	changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3706 
3707       if (!vnresult->result && lhs)
3708 	vnresult->result = lhs;
3709 
3710       if (vnresult->result && lhs)
3711 	changed |= set_ssa_val_to (lhs, vnresult->result);
3712     }
3713   else
3714     {
3715       vn_reference_t vr2;
3716       vn_reference_s **slot;
3717       tree vdef_val = vdef;
3718       if (vdef)
3719 	{
3720 	  /* If we value numbered an indirect functions function to
3721 	     one not clobbering memory value number its VDEF to its
3722 	     VUSE.  */
3723 	  tree fn = gimple_call_fn (stmt);
3724 	  if (fn && TREE_CODE (fn) == SSA_NAME)
3725 	    {
3726 	      fn = SSA_VAL (fn);
3727 	      if (TREE_CODE (fn) == ADDR_EXPR
3728 		  && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3729 		  && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3730 		      & (ECF_CONST | ECF_PURE)))
3731 		vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3732 	    }
3733 	  changed |= set_ssa_val_to (vdef, vdef_val);
3734 	}
3735       if (lhs)
3736 	changed |= set_ssa_val_to (lhs, lhs);
3737       vr2 = current_info->references_pool->allocate ();
3738       vr2->vuse = vr1.vuse;
3739       /* As we are not walking the virtual operand chain we know the
3740 	 shared_lookup_references are still original so we can re-use
3741 	 them here.  */
3742       vr2->operands = vr1.operands.copy ();
3743       vr2->type = vr1.type;
3744       vr2->set = vr1.set;
3745       vr2->hashcode = vr1.hashcode;
3746       vr2->result = lhs;
3747       vr2->result_vdef = vdef_val;
3748       slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3749 							    INSERT);
3750       gcc_assert (!*slot);
3751       *slot = vr2;
3752     }
3753 
3754   return changed;
3755 }
3756 
3757 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3758    and return true if the value number of the LHS has changed as a result.  */
3759 
3760 static bool
visit_reference_op_load(tree lhs,tree op,gimple * stmt)3761 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3762 {
3763   bool changed = false;
3764   tree last_vuse;
3765   tree result;
3766 
3767   last_vuse = gimple_vuse (stmt);
3768   last_vuse_ptr = &last_vuse;
3769   result = vn_reference_lookup (op, gimple_vuse (stmt),
3770 				default_vn_walk_kind, NULL, true);
3771   last_vuse_ptr = NULL;
3772 
3773   /* We handle type-punning through unions by value-numbering based
3774      on offset and size of the access.  Be prepared to handle a
3775      type-mismatch here via creating a VIEW_CONVERT_EXPR.  */
3776   if (result
3777       && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3778     {
3779       /* We will be setting the value number of lhs to the value number
3780 	 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3781 	 So first simplify and lookup this expression to see if it
3782 	 is already available.  */
3783       code_helper rcode = VIEW_CONVERT_EXPR;
3784       tree ops[3] = { result };
3785       result = vn_nary_build_or_lookup (rcode, TREE_TYPE (op), ops);
3786     }
3787 
3788   if (result)
3789     changed = set_ssa_val_to (lhs, result);
3790   else
3791     {
3792       changed = set_ssa_val_to (lhs, lhs);
3793       vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3794     }
3795 
3796   return changed;
3797 }
3798 
3799 
3800 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3801    and return true if the value number of the LHS has changed as a result.  */
3802 
3803 static bool
visit_reference_op_store(tree lhs,tree op,gimple * stmt)3804 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3805 {
3806   bool changed = false;
3807   vn_reference_t vnresult = NULL;
3808   tree assign;
3809   bool resultsame = false;
3810   tree vuse = gimple_vuse (stmt);
3811   tree vdef = gimple_vdef (stmt);
3812 
3813   if (TREE_CODE (op) == SSA_NAME)
3814     op = SSA_VAL (op);
3815 
3816   /* First we want to lookup using the *vuses* from the store and see
3817      if there the last store to this location with the same address
3818      had the same value.
3819 
3820      The vuses represent the memory state before the store.  If the
3821      memory state, address, and value of the store is the same as the
3822      last store to this location, then this store will produce the
3823      same memory state as that store.
3824 
3825      In this case the vdef versions for this store are value numbered to those
3826      vuse versions, since they represent the same memory state after
3827      this store.
3828 
3829      Otherwise, the vdefs for the store are used when inserting into
3830      the table, since the store generates a new memory state.  */
3831 
3832   vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3833   if (vnresult
3834       && vnresult->result)
3835     {
3836       tree result = vnresult->result;
3837       if (TREE_CODE (result) == SSA_NAME)
3838 	result = SSA_VAL (result);
3839       resultsame = expressions_equal_p (result, op);
3840       if (resultsame)
3841 	{
3842 	  /* If the TBAA state isn't compatible for downstream reads
3843 	     we cannot value-number the VDEFs the same.  */
3844 	  alias_set_type set = get_alias_set (lhs);
3845 	  if (vnresult->set != set
3846 	      && ! alias_set_subset_of (set, vnresult->set))
3847 	    resultsame = false;
3848 	}
3849     }
3850 
3851   if (!resultsame)
3852     {
3853       /* Only perform the following when being called from PRE
3854 	 which embeds tail merging.  */
3855       if (default_vn_walk_kind == VN_WALK)
3856 	{
3857 	  assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3858 	  vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3859 	  if (vnresult)
3860 	    {
3861 	      VN_INFO (vdef)->use_processed = true;
3862 	      return set_ssa_val_to (vdef, vnresult->result_vdef);
3863 	    }
3864 	}
3865 
3866       if (dump_file && (dump_flags & TDF_DETAILS))
3867 	{
3868 	  fprintf (dump_file, "No store match\n");
3869 	  fprintf (dump_file, "Value numbering store ");
3870 	  print_generic_expr (dump_file, lhs);
3871 	  fprintf (dump_file, " to ");
3872 	  print_generic_expr (dump_file, op);
3873 	  fprintf (dump_file, "\n");
3874 	}
3875       /* Have to set value numbers before insert, since insert is
3876 	 going to valueize the references in-place.  */
3877       if (vdef)
3878 	changed |= set_ssa_val_to (vdef, vdef);
3879 
3880       /* Do not insert structure copies into the tables.  */
3881       if (is_gimple_min_invariant (op)
3882 	  || is_gimple_reg (op))
3883         vn_reference_insert (lhs, op, vdef, NULL);
3884 
3885       /* Only perform the following when being called from PRE
3886 	 which embeds tail merging.  */
3887       if (default_vn_walk_kind == VN_WALK)
3888 	{
3889 	  assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3890 	  vn_reference_insert (assign, lhs, vuse, vdef);
3891 	}
3892     }
3893   else
3894     {
3895       /* We had a match, so value number the vdef to have the value
3896 	 number of the vuse it came from.  */
3897 
3898       if (dump_file && (dump_flags & TDF_DETAILS))
3899 	fprintf (dump_file, "Store matched earlier value, "
3900 		 "value numbering store vdefs to matching vuses.\n");
3901 
3902       changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3903     }
3904 
3905   return changed;
3906 }
3907 
3908 /* Visit and value number PHI, return true if the value number
3909    changed.  */
3910 
3911 static bool
visit_phi(gimple * phi)3912 visit_phi (gimple *phi)
3913 {
3914   tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
3915   unsigned n_executable = 0;
3916   bool allsame = true;
3917   edge_iterator ei;
3918   edge e;
3919 
3920   /* TODO: We could check for this in init_sccvn, and replace this
3921      with a gcc_assert.  */
3922   if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3923     return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3924 
3925   /* See if all non-TOP arguments have the same value.  TOP is
3926      equivalent to everything, so we can ignore it.  */
3927   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3928     if (e->flags & EDGE_EXECUTABLE)
3929       {
3930 	tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3931 
3932 	++n_executable;
3933 	if (TREE_CODE (def) == SSA_NAME)
3934 	  def = SSA_VAL (def);
3935 	if (def == VN_TOP)
3936 	  ;
3937 	/* Ignore undefined defs for sameval but record one.  */
3938 	else if (TREE_CODE (def) == SSA_NAME
3939 		 && ssa_undefined_value_p (def, false))
3940 	  seen_undef = def;
3941 	else if (sameval == VN_TOP)
3942 	  sameval = def;
3943 	else if (!expressions_equal_p (def, sameval))
3944 	  {
3945 	    allsame = false;
3946 	    break;
3947 	  }
3948       }
3949 
3950 
3951   /* If none of the edges was executable keep the value-number at VN_TOP,
3952      if only a single edge is exectuable use its value.  */
3953   if (n_executable <= 1)
3954     result = seen_undef ? seen_undef : sameval;
3955   /* If we saw only undefined values and VN_TOP use one of the
3956      undefined values.  */
3957   else if (sameval == VN_TOP)
3958     result = seen_undef ? seen_undef : sameval;
3959   /* First see if it is equivalent to a phi node in this block.  We prefer
3960      this as it allows IV elimination - see PRs 66502 and 67167.  */
3961   else if ((result = vn_phi_lookup (phi)))
3962     ;
3963   /* If all values are the same use that, unless we've seen undefined
3964      values as well and the value isn't constant.
3965      CCP/copyprop have the same restriction to not remove uninit warnings.  */
3966   else if (allsame
3967 	   && (! seen_undef || is_gimple_min_invariant (sameval)))
3968     result = sameval;
3969   else
3970     {
3971       result = PHI_RESULT (phi);
3972       /* Only insert PHIs that are varying, for constant value numbers
3973          we mess up equivalences otherwise as we are only comparing
3974 	 the immediate controlling predicates.  */
3975       vn_phi_insert (phi, result);
3976     }
3977 
3978   return set_ssa_val_to (PHI_RESULT (phi), result);
3979 }
3980 
3981 /* Try to simplify RHS using equivalences and constant folding.  */
3982 
3983 static tree
try_to_simplify(gassign * stmt)3984 try_to_simplify (gassign *stmt)
3985 {
3986   enum tree_code code = gimple_assign_rhs_code (stmt);
3987   tree tem;
3988 
3989   /* For stores we can end up simplifying a SSA_NAME rhs.  Just return
3990      in this case, there is no point in doing extra work.  */
3991   if (code == SSA_NAME)
3992     return NULL_TREE;
3993 
3994   /* First try constant folding based on our current lattice.  */
3995   mprts_hook = vn_lookup_simplify_result;
3996   tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3997   mprts_hook = NULL;
3998   if (tem
3999       && (TREE_CODE (tem) == SSA_NAME
4000 	  || is_gimple_min_invariant (tem)))
4001     return tem;
4002 
4003   return NULL_TREE;
4004 }
4005 
4006 /* Visit and value number USE, return true if the value number
4007    changed. */
4008 
4009 static bool
visit_use(tree use)4010 visit_use (tree use)
4011 {
4012   bool changed = false;
4013   gimple *stmt = SSA_NAME_DEF_STMT (use);
4014 
4015   mark_use_processed (use);
4016 
4017   gcc_assert (!SSA_NAME_IN_FREE_LIST (use)
4018 	      && !SSA_NAME_IS_DEFAULT_DEF (use));
4019 
4020   if (dump_file && (dump_flags & TDF_DETAILS))
4021     {
4022       fprintf (dump_file, "Value numbering ");
4023       print_generic_expr (dump_file, use);
4024       fprintf (dump_file, " stmt = ");
4025       print_gimple_stmt (dump_file, stmt, 0);
4026     }
4027 
4028   if (gimple_code (stmt) == GIMPLE_PHI)
4029     changed = visit_phi (stmt);
4030   else if (gimple_has_volatile_ops (stmt))
4031     changed = defs_to_varying (stmt);
4032   else if (gassign *ass = dyn_cast <gassign *> (stmt))
4033     {
4034       enum tree_code code = gimple_assign_rhs_code (ass);
4035       tree lhs = gimple_assign_lhs (ass);
4036       tree rhs1 = gimple_assign_rhs1 (ass);
4037       tree simplified;
4038 
4039       /* Shortcut for copies. Simplifying copies is pointless,
4040 	 since we copy the expression and value they represent.  */
4041       if (code == SSA_NAME
4042 	  && TREE_CODE (lhs) == SSA_NAME)
4043 	{
4044 	  changed = visit_copy (lhs, rhs1);
4045 	  goto done;
4046 	}
4047       simplified = try_to_simplify (ass);
4048       if (simplified)
4049 	{
4050 	  if (dump_file && (dump_flags & TDF_DETAILS))
4051 	    {
4052 	      fprintf (dump_file, "RHS ");
4053 	      print_gimple_expr (dump_file, ass, 0);
4054 	      fprintf (dump_file, " simplified to ");
4055 	      print_generic_expr (dump_file, simplified);
4056 	      fprintf (dump_file, "\n");
4057 	    }
4058 	}
4059       /* Setting value numbers to constants will occasionally
4060 	 screw up phi congruence because constants are not
4061 	 uniquely associated with a single ssa name that can be
4062 	 looked up.  */
4063       if (simplified
4064 	  && is_gimple_min_invariant (simplified)
4065 	  && TREE_CODE (lhs) == SSA_NAME)
4066 	{
4067 	  changed = set_ssa_val_to (lhs, simplified);
4068 	  goto done;
4069 	}
4070       else if (simplified
4071 	       && TREE_CODE (simplified) == SSA_NAME
4072 	       && TREE_CODE (lhs) == SSA_NAME)
4073 	{
4074 	  changed = visit_copy (lhs, simplified);
4075 	  goto done;
4076 	}
4077 
4078       if ((TREE_CODE (lhs) == SSA_NAME
4079 	   /* We can substitute SSA_NAMEs that are live over
4080 	      abnormal edges with their constant value.  */
4081 	   && !(gimple_assign_copy_p (ass)
4082 		&& is_gimple_min_invariant (rhs1))
4083 	   && !(simplified
4084 		&& is_gimple_min_invariant (simplified))
4085 	   && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4086 	  /* Stores or copies from SSA_NAMEs that are live over
4087 	     abnormal edges are a problem.  */
4088 	  || (code == SSA_NAME
4089 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4090 	changed = defs_to_varying (ass);
4091       else if (REFERENCE_CLASS_P (lhs)
4092 	       || DECL_P (lhs))
4093 	changed = visit_reference_op_store (lhs, rhs1, ass);
4094       else if (TREE_CODE (lhs) == SSA_NAME)
4095 	{
4096 	  if ((gimple_assign_copy_p (ass)
4097 	       && is_gimple_min_invariant (rhs1))
4098 	      || (simplified
4099 		  && is_gimple_min_invariant (simplified)))
4100 	    {
4101 	      if (simplified)
4102 		changed = set_ssa_val_to (lhs, simplified);
4103 	      else
4104 		changed = set_ssa_val_to (lhs, rhs1);
4105 	    }
4106 	  else
4107 	    {
4108 	      /* Visit the original statement.  */
4109 	      switch (vn_get_stmt_kind (ass))
4110 		{
4111 		case VN_NARY:
4112 		changed = visit_nary_op (lhs, ass);
4113 		break;
4114 		case VN_REFERENCE:
4115 		changed = visit_reference_op_load (lhs, rhs1, ass);
4116 		break;
4117 		default:
4118 		changed = defs_to_varying (ass);
4119 		break;
4120 		}
4121 	    }
4122 	}
4123       else
4124 	changed = defs_to_varying (ass);
4125     }
4126   else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4127     {
4128       tree lhs = gimple_call_lhs (call_stmt);
4129       if (lhs && TREE_CODE (lhs) == SSA_NAME)
4130 	{
4131 	  /* Try constant folding based on our current lattice.  */
4132 	  tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4133 							    vn_valueize);
4134 	  if (simplified)
4135 	    {
4136 	      if (dump_file && (dump_flags & TDF_DETAILS))
4137 		{
4138 		  fprintf (dump_file, "call ");
4139 		  print_gimple_expr (dump_file, call_stmt, 0);
4140 		  fprintf (dump_file, " simplified to ");
4141 		  print_generic_expr (dump_file, simplified);
4142 		  fprintf (dump_file, "\n");
4143 		}
4144 	    }
4145 	  /* Setting value numbers to constants will occasionally
4146 	     screw up phi congruence because constants are not
4147 	     uniquely associated with a single ssa name that can be
4148 	     looked up.  */
4149 	  if (simplified
4150 	      && is_gimple_min_invariant (simplified))
4151 	    {
4152 	      changed = set_ssa_val_to (lhs, simplified);
4153 	      if (gimple_vdef (call_stmt))
4154 		changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4155 					   SSA_VAL (gimple_vuse (call_stmt)));
4156 	      goto done;
4157 	    }
4158 	  else if (simplified
4159 		   && TREE_CODE (simplified) == SSA_NAME)
4160 	    {
4161 	      changed = visit_copy (lhs, simplified);
4162 	      if (gimple_vdef (call_stmt))
4163 		changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4164 					   SSA_VAL (gimple_vuse (call_stmt)));
4165 	      goto done;
4166 	    }
4167 	  else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4168 	    {
4169 	      changed = defs_to_varying (call_stmt);
4170 	      goto done;
4171 	    }
4172 	}
4173 
4174       /* Pick up flags from a devirtualization target.  */
4175       tree fn = gimple_call_fn (stmt);
4176       int extra_fnflags = 0;
4177       if (fn && TREE_CODE (fn) == SSA_NAME)
4178 	{
4179 	  fn = SSA_VAL (fn);
4180 	  if (TREE_CODE (fn) == ADDR_EXPR
4181 	      && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4182 	    extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4183 	}
4184       if (!gimple_call_internal_p (call_stmt)
4185 	  && (/* Calls to the same function with the same vuse
4186 		 and the same operands do not necessarily return the same
4187 		 value, unless they're pure or const.  */
4188 	      ((gimple_call_flags (call_stmt) | extra_fnflags)
4189 	       & (ECF_PURE | ECF_CONST))
4190 	      /* If calls have a vdef, subsequent calls won't have
4191 		 the same incoming vuse.  So, if 2 calls with vdef have the
4192 		 same vuse, we know they're not subsequent.
4193 		 We can value number 2 calls to the same function with the
4194 		 same vuse and the same operands which are not subsequent
4195 		 the same, because there is no code in the program that can
4196 		 compare the 2 values...  */
4197 	      || (gimple_vdef (call_stmt)
4198 		  /* ... unless the call returns a pointer which does
4199 		     not alias with anything else.  In which case the
4200 		     information that the values are distinct are encoded
4201 		     in the IL.  */
4202 		  && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4203 		  /* Only perform the following when being called from PRE
4204 		     which embeds tail merging.  */
4205 		  && default_vn_walk_kind == VN_WALK)))
4206 	changed = visit_reference_op_call (lhs, call_stmt);
4207       else
4208 	changed = defs_to_varying (call_stmt);
4209     }
4210   else
4211     changed = defs_to_varying (stmt);
4212  done:
4213   return changed;
4214 }
4215 
4216 /* Compare two operands by reverse postorder index */
4217 
4218 static int
compare_ops(const void * pa,const void * pb)4219 compare_ops (const void *pa, const void *pb)
4220 {
4221   const tree opa = *((const tree *)pa);
4222   const tree opb = *((const tree *)pb);
4223   gimple *opstmta = SSA_NAME_DEF_STMT (opa);
4224   gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
4225   basic_block bba;
4226   basic_block bbb;
4227 
4228   if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
4229     return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4230   else if (gimple_nop_p (opstmta))
4231     return -1;
4232   else if (gimple_nop_p (opstmtb))
4233     return 1;
4234 
4235   bba = gimple_bb (opstmta);
4236   bbb = gimple_bb (opstmtb);
4237 
4238   if (!bba && !bbb)
4239     return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4240   else if (!bba)
4241     return -1;
4242   else if (!bbb)
4243     return 1;
4244 
4245   if (bba == bbb)
4246     {
4247       if (gimple_code (opstmta) == GIMPLE_PHI
4248 	  && gimple_code (opstmtb) == GIMPLE_PHI)
4249 	return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4250       else if (gimple_code (opstmta) == GIMPLE_PHI)
4251 	return -1;
4252       else if (gimple_code (opstmtb) == GIMPLE_PHI)
4253 	return 1;
4254       else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
4255         return gimple_uid (opstmta) - gimple_uid (opstmtb);
4256       else
4257 	return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4258     }
4259   return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
4260 }
4261 
4262 /* Sort an array containing members of a strongly connected component
4263    SCC so that the members are ordered by RPO number.
4264    This means that when the sort is complete, iterating through the
4265    array will give you the members in RPO order.  */
4266 
4267 static void
sort_scc(vec<tree> scc)4268 sort_scc (vec<tree> scc)
4269 {
4270   scc.qsort (compare_ops);
4271 }
4272 
4273 /* Insert the no longer used nary ONARY to the hash INFO.  */
4274 
4275 static void
copy_nary(vn_nary_op_t onary,vn_tables_t info)4276 copy_nary (vn_nary_op_t onary, vn_tables_t info)
4277 {
4278   size_t size = sizeof_vn_nary_op (onary->length);
4279   vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
4280 					       &info->nary_obstack);
4281   memcpy (nary, onary, size);
4282   vn_nary_op_insert_into (nary, info->nary, false);
4283 }
4284 
4285 /* Insert the no longer used phi OPHI to the hash INFO.  */
4286 
4287 static void
copy_phi(vn_phi_t ophi,vn_tables_t info)4288 copy_phi (vn_phi_t ophi, vn_tables_t info)
4289 {
4290   vn_phi_t phi = info->phis_pool->allocate ();
4291   vn_phi_s **slot;
4292   memcpy (phi, ophi, sizeof (*phi));
4293   ophi->phiargs.create (0);
4294   slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
4295   gcc_assert (!*slot);
4296   *slot = phi;
4297 }
4298 
4299 /* Insert the no longer used reference OREF to the hash INFO.  */
4300 
4301 static void
copy_reference(vn_reference_t oref,vn_tables_t info)4302 copy_reference (vn_reference_t oref, vn_tables_t info)
4303 {
4304   vn_reference_t ref;
4305   vn_reference_s **slot;
4306   ref = info->references_pool->allocate ();
4307   memcpy (ref, oref, sizeof (*ref));
4308   oref->operands.create (0);
4309   slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
4310   if (*slot)
4311     free_reference (*slot);
4312   *slot = ref;
4313 }
4314 
4315 /* Process a strongly connected component in the SSA graph.  */
4316 
4317 static void
process_scc(vec<tree> scc)4318 process_scc (vec<tree> scc)
4319 {
4320   tree var;
4321   unsigned int i;
4322   unsigned int iterations = 0;
4323   bool changed = true;
4324   vn_nary_op_iterator_type hin;
4325   vn_phi_iterator_type hip;
4326   vn_reference_iterator_type hir;
4327   vn_nary_op_t nary;
4328   vn_phi_t phi;
4329   vn_reference_t ref;
4330 
4331   /* If the SCC has a single member, just visit it.  */
4332   if (scc.length () == 1)
4333     {
4334       tree use = scc[0];
4335       if (VN_INFO (use)->use_processed)
4336 	return;
4337       /* We need to make sure it doesn't form a cycle itself, which can
4338 	 happen for self-referential PHI nodes.  In that case we would
4339 	 end up inserting an expression with VN_TOP operands into the
4340 	 valid table which makes us derive bogus equivalences later.
4341 	 The cheapest way to check this is to assume it for all PHI nodes.  */
4342       if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4343 	/* Fallthru to iteration.  */ ;
4344       else
4345 	{
4346 	  visit_use (use);
4347 	  return;
4348 	}
4349     }
4350 
4351   if (dump_file && (dump_flags & TDF_DETAILS))
4352     print_scc (dump_file, scc);
4353 
4354   /* Iterate over the SCC with the optimistic table until it stops
4355      changing.  */
4356   current_info = optimistic_info;
4357   while (changed)
4358     {
4359       changed = false;
4360       iterations++;
4361       if (dump_file && (dump_flags & TDF_DETAILS))
4362 	fprintf (dump_file, "Starting iteration %d\n", iterations);
4363       /* As we are value-numbering optimistically we have to
4364 	 clear the expression tables and the simplified expressions
4365 	 in each iteration until we converge.  */
4366       optimistic_info->nary->empty ();
4367       optimistic_info->phis->empty ();
4368       optimistic_info->references->empty ();
4369       obstack_free (&optimistic_info->nary_obstack, NULL);
4370       gcc_obstack_init (&optimistic_info->nary_obstack);
4371       optimistic_info->phis_pool->release ();
4372       optimistic_info->references_pool->release ();
4373       FOR_EACH_VEC_ELT (scc, i, var)
4374 	gcc_assert (!VN_INFO (var)->needs_insertion
4375 		    && VN_INFO (var)->expr == NULL);
4376       FOR_EACH_VEC_ELT (scc, i, var)
4377 	changed |= visit_use (var);
4378     }
4379 
4380   if (dump_file && (dump_flags & TDF_DETAILS))
4381     fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4382   statistics_histogram_event (cfun, "SCC iterations", iterations);
4383 
4384   /* Finally, copy the contents of the no longer used optimistic
4385      table to the valid table.  */
4386   FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4387     copy_nary (nary, valid_info);
4388   FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4389     copy_phi (phi, valid_info);
4390   FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4391 			       ref, vn_reference_t, hir)
4392     copy_reference (ref, valid_info);
4393 
4394   current_info = valid_info;
4395 }
4396 
4397 
4398 /* Pop the components of the found SCC for NAME off the SCC stack
4399    and process them.  Returns true if all went well, false if
4400    we run into resource limits.  */
4401 
4402 static void
extract_and_process_scc_for_name(tree name)4403 extract_and_process_scc_for_name (tree name)
4404 {
4405   auto_vec<tree> scc;
4406   tree x;
4407 
4408   /* Found an SCC, pop the components off the SCC stack and
4409      process them.  */
4410   do
4411     {
4412       x = sccstack.pop ();
4413 
4414       VN_INFO (x)->on_sccstack = false;
4415       scc.safe_push (x);
4416     } while (x != name);
4417 
4418   /* Drop all defs in the SCC to varying in case a SCC turns out to be
4419      incredibly large.
4420      ???  Just switch to a non-optimistic mode that avoids any iteration.  */
4421   if (scc.length () > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4422     {
4423       if (dump_file)
4424 	{
4425 	  print_scc (dump_file, scc);
4426 	  fprintf (dump_file, "WARNING: Giving up value-numbering SCC due to "
4427 		   "size %u exceeding %u\n", scc.length (),
4428 		   (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4429 	}
4430       tree var;
4431       unsigned i;
4432       FOR_EACH_VEC_ELT (scc, i, var)
4433 	{
4434 	  gimple *def = SSA_NAME_DEF_STMT (var);
4435 	  mark_use_processed (var);
4436 	  if (SSA_NAME_IS_DEFAULT_DEF (var)
4437 	      || gimple_code (def) == GIMPLE_PHI)
4438 	    set_ssa_val_to (var, var);
4439 	  else
4440 	    defs_to_varying (def);
4441 	}
4442       return;
4443     }
4444 
4445   if (scc.length () > 1)
4446     sort_scc (scc);
4447 
4448   process_scc (scc);
4449 }
4450 
4451 /* Depth first search on NAME to discover and process SCC's in the SSA
4452    graph.
4453    Execution of this algorithm relies on the fact that the SCC's are
4454    popped off the stack in topological order.
4455    Returns true if successful, false if we stopped processing SCC's due
4456    to resource constraints.  */
4457 
4458 static void
DFS(tree name)4459 DFS (tree name)
4460 {
4461   auto_vec<ssa_op_iter> itervec;
4462   auto_vec<tree> namevec;
4463   use_operand_p usep = NULL;
4464   gimple *defstmt;
4465   tree use;
4466   ssa_op_iter iter;
4467 
4468 start_over:
4469   /* SCC info */
4470   VN_INFO (name)->dfsnum = next_dfs_num++;
4471   VN_INFO (name)->visited = true;
4472   VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4473 
4474   sccstack.safe_push (name);
4475   VN_INFO (name)->on_sccstack = true;
4476   defstmt = SSA_NAME_DEF_STMT (name);
4477 
4478   /* Recursively DFS on our operands, looking for SCC's.  */
4479   if (!gimple_nop_p (defstmt))
4480     {
4481       /* Push a new iterator.  */
4482       if (gphi *phi = dyn_cast <gphi *> (defstmt))
4483 	usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4484       else
4485 	usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4486     }
4487   else
4488     clear_and_done_ssa_iter (&iter);
4489 
4490   while (1)
4491     {
4492       /* If we are done processing uses of a name, go up the stack
4493 	 of iterators and process SCCs as we found them.  */
4494       if (op_iter_done (&iter))
4495 	{
4496 	  /* See if we found an SCC.  */
4497 	  if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4498 	    extract_and_process_scc_for_name (name);
4499 
4500 	  /* Check if we are done.  */
4501 	  if (namevec.is_empty ())
4502 	    return;
4503 
4504 	  /* Restore the last use walker and continue walking there.  */
4505 	  use = name;
4506 	  name = namevec.pop ();
4507 	  memcpy (&iter, &itervec.last (),
4508 		  sizeof (ssa_op_iter));
4509 	  itervec.pop ();
4510 	  goto continue_walking;
4511 	}
4512 
4513       use = USE_FROM_PTR (usep);
4514 
4515       /* Since we handle phi nodes, we will sometimes get
4516 	 invariants in the use expression.  */
4517       if (TREE_CODE (use) == SSA_NAME)
4518 	{
4519 	  if (! (VN_INFO (use)->visited))
4520 	    {
4521 	      /* Recurse by pushing the current use walking state on
4522 		 the stack and starting over.  */
4523 	      itervec.safe_push (iter);
4524 	      namevec.safe_push (name);
4525 	      name = use;
4526 	      goto start_over;
4527 
4528 continue_walking:
4529 	      VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4530 					 VN_INFO (use)->low);
4531 	    }
4532 	  if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4533 	      && VN_INFO (use)->on_sccstack)
4534 	    {
4535 	      VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4536 					 VN_INFO (name)->low);
4537 	    }
4538 	}
4539 
4540       usep = op_iter_next_use (&iter);
4541     }
4542 }
4543 
4544 /* Allocate a value number table.  */
4545 
4546 static void
allocate_vn_table(vn_tables_t table)4547 allocate_vn_table (vn_tables_t table)
4548 {
4549   table->phis = new vn_phi_table_type (23);
4550   table->nary = new vn_nary_op_table_type (23);
4551   table->references = new vn_reference_table_type (23);
4552 
4553   gcc_obstack_init (&table->nary_obstack);
4554   table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4555   table->references_pool = new object_allocator<vn_reference_s>
4556     ("VN references");
4557 }
4558 
4559 /* Free a value number table.  */
4560 
4561 static void
free_vn_table(vn_tables_t table)4562 free_vn_table (vn_tables_t table)
4563 {
4564   delete table->phis;
4565   table->phis = NULL;
4566   delete table->nary;
4567   table->nary = NULL;
4568   delete table->references;
4569   table->references = NULL;
4570   obstack_free (&table->nary_obstack, NULL);
4571   delete table->phis_pool;
4572   delete table->references_pool;
4573 }
4574 
4575 static void
init_scc_vn(void)4576 init_scc_vn (void)
4577 {
4578   int j;
4579   int *rpo_numbers_temp;
4580 
4581   calculate_dominance_info (CDI_DOMINATORS);
4582   mark_dfs_back_edges ();
4583 
4584   sccstack.create (0);
4585   constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4586 
4587   constant_value_ids = BITMAP_ALLOC (NULL);
4588 
4589   next_dfs_num = 1;
4590   next_value_id = 1;
4591 
4592   vn_ssa_aux_table.create (num_ssa_names + 1);
4593   /* VEC_alloc doesn't actually grow it to the right size, it just
4594      preallocates the space to do so.  */
4595   vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4596   gcc_obstack_init (&vn_ssa_aux_obstack);
4597 
4598   shared_lookup_phiargs.create (0);
4599   shared_lookup_references.create (0);
4600   rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4601   rpo_numbers_temp =
4602     XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4603   pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4604 
4605   /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4606      the i'th block in RPO order is bb.  We want to map bb's to RPO
4607      numbers, so we need to rearrange this array.  */
4608   for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4609     rpo_numbers[rpo_numbers_temp[j]] = j;
4610 
4611   XDELETE (rpo_numbers_temp);
4612 
4613   VN_TOP = build_decl (UNKNOWN_LOCATION, VAR_DECL,
4614 		       get_identifier ("VN_TOP"), error_mark_node);
4615 
4616   renumber_gimple_stmt_uids ();
4617 
4618   /* Create the valid and optimistic value numbering tables.  */
4619   valid_info = XCNEW (struct vn_tables_s);
4620   allocate_vn_table (valid_info);
4621   optimistic_info = XCNEW (struct vn_tables_s);
4622   allocate_vn_table (optimistic_info);
4623   current_info = valid_info;
4624 
4625   /* Create the VN_INFO structures, and initialize value numbers to
4626      TOP or VARYING for parameters.  */
4627   size_t i;
4628   tree name;
4629 
4630   FOR_EACH_SSA_NAME (i, name, cfun)
4631     {
4632       VN_INFO_GET (name)->valnum = VN_TOP;
4633       VN_INFO (name)->needs_insertion = false;
4634       VN_INFO (name)->expr = NULL;
4635       VN_INFO (name)->value_id = 0;
4636 
4637       if (!SSA_NAME_IS_DEFAULT_DEF (name))
4638 	continue;
4639 
4640       switch (TREE_CODE (SSA_NAME_VAR (name)))
4641 	{
4642 	case VAR_DECL:
4643 	  /* All undefined vars are VARYING.  */
4644 	  VN_INFO (name)->valnum = name;
4645 	  VN_INFO (name)->visited = true;
4646 	  break;
4647 
4648 	case PARM_DECL:
4649 	  /* Parameters are VARYING but we can record a condition
4650 	     if we know it is a non-NULL pointer.  */
4651 	  VN_INFO (name)->visited = true;
4652 	  VN_INFO (name)->valnum = name;
4653 	  if (POINTER_TYPE_P (TREE_TYPE (name))
4654 	      && nonnull_arg_p (SSA_NAME_VAR (name)))
4655 	    {
4656 	      tree ops[2];
4657 	      ops[0] = name;
4658 	      ops[1] = build_int_cst (TREE_TYPE (name), 0);
4659 	      vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4660 					boolean_true_node, 0);
4661 	      if (dump_file && (dump_flags & TDF_DETAILS))
4662 		{
4663 		  fprintf (dump_file, "Recording ");
4664 		  print_generic_expr (dump_file, name, TDF_SLIM);
4665 		  fprintf (dump_file, " != 0\n");
4666 		}
4667 	    }
4668 	  break;
4669 
4670 	case RESULT_DECL:
4671 	  /* If the result is passed by invisible reference the default
4672 	     def is initialized, otherwise it's uninitialized.  Still
4673 	     undefined is varying.  */
4674 	  VN_INFO (name)->visited = true;
4675 	  VN_INFO (name)->valnum = name;
4676 	  break;
4677 
4678 	default:
4679 	  gcc_unreachable ();
4680 	}
4681     }
4682 }
4683 
4684 /* Restore SSA info that has been reset on value leaders.  */
4685 
4686 void
scc_vn_restore_ssa_info(void)4687 scc_vn_restore_ssa_info (void)
4688 {
4689   unsigned i;
4690   tree name;
4691 
4692   FOR_EACH_SSA_NAME (i, name, cfun)
4693     {
4694       if (has_VN_INFO (name))
4695 	{
4696 	  if (VN_INFO (name)->needs_insertion)
4697 	    ;
4698 	  else if (POINTER_TYPE_P (TREE_TYPE (name))
4699 		   && VN_INFO (name)->info.ptr_info)
4700 	    SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4701 	  else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4702 		   && VN_INFO (name)->info.range_info)
4703 	    {
4704 	      SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4705 	      SSA_NAME_ANTI_RANGE_P (name)
4706 		= VN_INFO (name)->range_info_anti_range_p;
4707 	    }
4708 	}
4709     }
4710 }
4711 
4712 void
free_scc_vn(void)4713 free_scc_vn (void)
4714 {
4715   size_t i;
4716   tree name;
4717 
4718   delete constant_to_value_id;
4719   constant_to_value_id = NULL;
4720   BITMAP_FREE (constant_value_ids);
4721   shared_lookup_phiargs.release ();
4722   shared_lookup_references.release ();
4723   XDELETEVEC (rpo_numbers);
4724 
4725   FOR_EACH_SSA_NAME (i, name, cfun)
4726     {
4727       if (has_VN_INFO (name)
4728 	  && VN_INFO (name)->needs_insertion)
4729 	release_ssa_name (name);
4730     }
4731   obstack_free (&vn_ssa_aux_obstack, NULL);
4732   vn_ssa_aux_table.release ();
4733 
4734   sccstack.release ();
4735   free_vn_table (valid_info);
4736   XDELETE (valid_info);
4737   free_vn_table (optimistic_info);
4738   XDELETE (optimistic_info);
4739 
4740   BITMAP_FREE (const_parms);
4741 }
4742 
4743 /* Set *ID according to RESULT.  */
4744 
4745 static void
set_value_id_for_result(tree result,unsigned int * id)4746 set_value_id_for_result (tree result, unsigned int *id)
4747 {
4748   if (result && TREE_CODE (result) == SSA_NAME)
4749     *id = VN_INFO (result)->value_id;
4750   else if (result && is_gimple_min_invariant (result))
4751     *id = get_or_alloc_constant_value_id (result);
4752   else
4753     *id = get_next_value_id ();
4754 }
4755 
4756 /* Set the value ids in the valid hash tables.  */
4757 
4758 static void
set_hashtable_value_ids(void)4759 set_hashtable_value_ids (void)
4760 {
4761   vn_nary_op_iterator_type hin;
4762   vn_phi_iterator_type hip;
4763   vn_reference_iterator_type hir;
4764   vn_nary_op_t vno;
4765   vn_reference_t vr;
4766   vn_phi_t vp;
4767 
4768   /* Now set the value ids of the things we had put in the hash
4769      table.  */
4770 
4771   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4772     set_value_id_for_result (vno->result, &vno->value_id);
4773 
4774   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4775     set_value_id_for_result (vp->result, &vp->value_id);
4776 
4777   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4778 			       hir)
4779     set_value_id_for_result (vr->result, &vr->value_id);
4780 }
4781 
4782 class sccvn_dom_walker : public dom_walker
4783 {
4784 public:
sccvn_dom_walker()4785   sccvn_dom_walker ()
4786     : dom_walker (CDI_DOMINATORS, REACHABLE_BLOCKS), cond_stack (0) {}
4787 
4788   virtual edge before_dom_children (basic_block);
4789   virtual void after_dom_children (basic_block);
4790 
4791   void record_cond (basic_block,
4792 		    enum tree_code code, tree lhs, tree rhs, bool value);
4793   void record_conds (basic_block,
4794 		     enum tree_code code, tree lhs, tree rhs, bool value);
4795 
4796   auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4797     cond_stack;
4798 };
4799 
4800 /* Record a temporary condition for the BB and its dominated blocks.  */
4801 
4802 void
record_cond(basic_block bb,enum tree_code code,tree lhs,tree rhs,bool value)4803 sccvn_dom_walker::record_cond (basic_block bb,
4804 			       enum tree_code code, tree lhs, tree rhs,
4805 			       bool value)
4806 {
4807   tree ops[2] = { lhs, rhs };
4808   vn_nary_op_t old = NULL;
4809   if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4810     current_info->nary->remove_elt_with_hash (old, old->hashcode);
4811   vn_nary_op_t cond
4812     = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4813 				value
4814 				? boolean_true_node
4815 				: boolean_false_node, 0);
4816   if (dump_file && (dump_flags & TDF_DETAILS))
4817     {
4818       fprintf (dump_file, "Recording temporarily ");
4819       print_generic_expr (dump_file, ops[0], TDF_SLIM);
4820       fprintf (dump_file, " %s ", get_tree_code_name (code));
4821       print_generic_expr (dump_file, ops[1], TDF_SLIM);
4822       fprintf (dump_file, " == %s%s\n",
4823 	       value ? "true" : "false",
4824 	       old ? " (old entry saved)" : "");
4825     }
4826   cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4827 }
4828 
4829 /* Record temporary conditions for the BB and its dominated blocks
4830    according to LHS CODE RHS == VALUE and its dominated conditions.  */
4831 
4832 void
record_conds(basic_block bb,enum tree_code code,tree lhs,tree rhs,bool value)4833 sccvn_dom_walker::record_conds (basic_block bb,
4834 				enum tree_code code, tree lhs, tree rhs,
4835 				bool value)
4836 {
4837   /* Record the original condition.  */
4838   record_cond (bb, code, lhs, rhs, value);
4839 
4840   if (!value)
4841     return;
4842 
4843   /* Record dominated conditions if the condition is true.  Note that
4844      the inversion is already recorded.  */
4845   switch (code)
4846     {
4847     case LT_EXPR:
4848     case GT_EXPR:
4849       record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4850       record_cond (bb, NE_EXPR, lhs, rhs, true);
4851       record_cond (bb, EQ_EXPR, lhs, rhs, false);
4852       break;
4853 
4854     case EQ_EXPR:
4855       record_cond (bb, LE_EXPR, lhs, rhs, true);
4856       record_cond (bb, GE_EXPR, lhs, rhs, true);
4857       record_cond (bb, LT_EXPR, lhs, rhs, false);
4858       record_cond (bb, GT_EXPR, lhs, rhs, false);
4859       break;
4860 
4861     default:
4862       break;
4863     }
4864 }
4865 
4866 /* Restore expressions and values derived from conditionals.  */
4867 
4868 void
after_dom_children(basic_block bb)4869 sccvn_dom_walker::after_dom_children (basic_block bb)
4870 {
4871   while (!cond_stack.is_empty ()
4872 	 && cond_stack.last ().first == bb)
4873     {
4874       vn_nary_op_t cond = cond_stack.last ().second.first;
4875       vn_nary_op_t old = cond_stack.last ().second.second;
4876       current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4877       if (old)
4878 	vn_nary_op_insert_into (old, current_info->nary, false);
4879       cond_stack.pop ();
4880     }
4881 }
4882 
4883 /* Value number all statements in BB.  */
4884 
4885 edge
before_dom_children(basic_block bb)4886 sccvn_dom_walker::before_dom_children (basic_block bb)
4887 {
4888   if (dump_file && (dump_flags & TDF_DETAILS))
4889     fprintf (dump_file, "Visiting BB %d\n", bb->index);
4890 
4891   /* If we have a single predecessor record the equivalence from a
4892      possible condition on the predecessor edge.  */
4893   edge pred_e = single_pred_edge_ignoring_loop_edges (bb, false);
4894   if (pred_e)
4895     {
4896       /* Check if there are multiple executable successor edges in
4897 	 the source block.  Otherwise there is no additional info
4898 	 to be recorded.  */
4899       edge_iterator ei;
4900       edge e2;
4901       FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4902 	if (e2 != pred_e
4903 	    && e2->flags & EDGE_EXECUTABLE)
4904 	  break;
4905       if (e2 && (e2->flags & EDGE_EXECUTABLE))
4906 	{
4907 	  gimple *stmt = last_stmt (pred_e->src);
4908 	  if (stmt
4909 	      && gimple_code (stmt) == GIMPLE_COND)
4910 	    {
4911 	      enum tree_code code = gimple_cond_code (stmt);
4912 	      tree lhs = gimple_cond_lhs (stmt);
4913 	      tree rhs = gimple_cond_rhs (stmt);
4914 	      record_conds (bb, code, lhs, rhs,
4915 			    (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4916 	      code = invert_tree_comparison (code, HONOR_NANS (lhs));
4917 	      if (code != ERROR_MARK)
4918 		record_conds (bb, code, lhs, rhs,
4919 			      (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4920 	    }
4921 	}
4922     }
4923 
4924   /* Value-number all defs in the basic-block.  */
4925   for (gphi_iterator gsi = gsi_start_phis (bb);
4926        !gsi_end_p (gsi); gsi_next (&gsi))
4927     {
4928       gphi *phi = gsi.phi ();
4929       tree res = PHI_RESULT (phi);
4930       if (!VN_INFO (res)->visited)
4931 	DFS (res);
4932     }
4933   for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4934        !gsi_end_p (gsi); gsi_next (&gsi))
4935     {
4936       ssa_op_iter i;
4937       tree op;
4938       FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4939 	if (!VN_INFO (op)->visited)
4940 	  DFS (op);
4941     }
4942 
4943   /* Finally look at the last stmt.  */
4944   gimple *stmt = last_stmt (bb);
4945   if (!stmt)
4946     return NULL;
4947 
4948   enum gimple_code code = gimple_code (stmt);
4949   if (code != GIMPLE_COND
4950       && code != GIMPLE_SWITCH
4951       && code != GIMPLE_GOTO)
4952     return NULL;
4953 
4954   if (dump_file && (dump_flags & TDF_DETAILS))
4955     {
4956       fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4957       print_gimple_stmt (dump_file, stmt, 0);
4958     }
4959 
4960   /* ???  We can even handle stmts with outgoing EH or ABNORMAL edges
4961      if value-numbering can prove they are not reachable.  Handling
4962      computed gotos is also possible.  */
4963   tree val;
4964   switch (code)
4965     {
4966     case GIMPLE_COND:
4967       {
4968 	tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4969 	tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4970 	val = gimple_simplify (gimple_cond_code (stmt),
4971 			       boolean_type_node, lhs, rhs,
4972 			       NULL, vn_valueize);
4973 	/* If that didn't simplify to a constant see if we have recorded
4974 	   temporary expressions from taken edges.  */
4975 	if (!val || TREE_CODE (val) != INTEGER_CST)
4976 	  {
4977 	    tree ops[2];
4978 	    ops[0] = lhs;
4979 	    ops[1] = rhs;
4980 	    val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4981 					    boolean_type_node, ops, NULL);
4982 	  }
4983 	break;
4984       }
4985     case GIMPLE_SWITCH:
4986       val = gimple_switch_index (as_a <gswitch *> (stmt));
4987       break;
4988     case GIMPLE_GOTO:
4989       val = gimple_goto_dest (stmt);
4990       break;
4991     default:
4992       gcc_unreachable ();
4993     }
4994   if (!val)
4995     return NULL;
4996 
4997   edge taken = find_taken_edge (bb, vn_valueize (val));
4998   if (!taken)
4999     return NULL;
5000 
5001   if (dump_file && (dump_flags & TDF_DETAILS))
5002     fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
5003 	     "not executable\n", bb->index, bb->index, taken->dest->index);
5004 
5005   return taken;
5006 }
5007 
5008 /* Do SCCVN.  Returns true if it finished, false if we bailed out
5009    due to resource constraints.  DEFAULT_VN_WALK_KIND_ specifies
5010    how we use the alias oracle walking during the VN process.  */
5011 
5012 void
run_scc_vn(vn_lookup_kind default_vn_walk_kind_)5013 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
5014 {
5015   size_t i;
5016 
5017   default_vn_walk_kind = default_vn_walk_kind_;
5018 
5019   init_scc_vn ();
5020 
5021   /* Collect pointers we know point to readonly memory.  */
5022   const_parms = BITMAP_ALLOC (NULL);
5023   tree fnspec = lookup_attribute ("fn spec",
5024 				  TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
5025   if (fnspec)
5026     {
5027       fnspec = TREE_VALUE (TREE_VALUE (fnspec));
5028       i = 1;
5029       for (tree arg = DECL_ARGUMENTS (cfun->decl);
5030 	   arg; arg = DECL_CHAIN (arg), ++i)
5031 	{
5032 	  if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
5033 	    break;
5034 	  if (TREE_STRING_POINTER (fnspec)[i]  == 'R'
5035 	      || TREE_STRING_POINTER (fnspec)[i] == 'r')
5036 	    {
5037 	      tree name = ssa_default_def (cfun, arg);
5038 	      if (name)
5039 		bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
5040 	    }
5041 	}
5042     }
5043 
5044   /* Walk all blocks in dominator order, value-numbering stmts
5045      SSA defs and decide whether outgoing edges are not executable.  */
5046   sccvn_dom_walker walker;
5047   walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5048 
5049   /* Initialize the value ids and prune out remaining VN_TOPs
5050      from dead code.  */
5051   tree name;
5052   FOR_EACH_SSA_NAME (i, name, cfun)
5053     {
5054       vn_ssa_aux_t info = VN_INFO (name);
5055       if (!info->visited
5056 	  || info->valnum == VN_TOP)
5057 	info->valnum = name;
5058       if (info->valnum == name)
5059 	info->value_id = get_next_value_id ();
5060       else if (is_gimple_min_invariant (info->valnum))
5061 	info->value_id = get_or_alloc_constant_value_id (info->valnum);
5062     }
5063 
5064   /* Propagate.  */
5065   FOR_EACH_SSA_NAME (i, name, cfun)
5066     {
5067       vn_ssa_aux_t info = VN_INFO (name);
5068       if (TREE_CODE (info->valnum) == SSA_NAME
5069 	  && info->valnum != name
5070 	  && info->value_id != VN_INFO (info->valnum)->value_id)
5071 	info->value_id = VN_INFO (info->valnum)->value_id;
5072     }
5073 
5074   set_hashtable_value_ids ();
5075 
5076   if (dump_file && (dump_flags & TDF_DETAILS))
5077     {
5078       fprintf (dump_file, "Value numbers:\n");
5079       FOR_EACH_SSA_NAME (i, name, cfun)
5080 	{
5081 	  if (VN_INFO (name)->visited
5082 	      && SSA_VAL (name) != name)
5083 	    {
5084 	      print_generic_expr (dump_file, name);
5085 	      fprintf (dump_file, " = ");
5086 	      print_generic_expr (dump_file, SSA_VAL (name));
5087 	      fprintf (dump_file, "\n");
5088 	    }
5089 	}
5090     }
5091 }
5092 
5093 /* Return the maximum value id we have ever seen.  */
5094 
5095 unsigned int
get_max_value_id(void)5096 get_max_value_id (void)
5097 {
5098   return next_value_id;
5099 }
5100 
5101 /* Return the next unique value id.  */
5102 
5103 unsigned int
get_next_value_id(void)5104 get_next_value_id (void)
5105 {
5106   return next_value_id++;
5107 }
5108 
5109 
5110 /* Compare two expressions E1 and E2 and return true if they are equal.  */
5111 
5112 bool
expressions_equal_p(tree e1,tree e2)5113 expressions_equal_p (tree e1, tree e2)
5114 {
5115   /* The obvious case.  */
5116   if (e1 == e2)
5117     return true;
5118 
5119   /* If either one is VN_TOP consider them equal.  */
5120   if (e1 == VN_TOP || e2 == VN_TOP)
5121     return true;
5122 
5123   /* If only one of them is null, they cannot be equal.  */
5124   if (!e1 || !e2)
5125     return false;
5126 
5127   /* Now perform the actual comparison.  */
5128   if (TREE_CODE (e1) == TREE_CODE (e2)
5129       && operand_equal_p (e1, e2, OEP_PURE_SAME))
5130     return true;
5131 
5132   return false;
5133 }
5134 
5135 
5136 /* Return true if the nary operation NARY may trap.  This is a copy
5137    of stmt_could_throw_1_p adjusted to the SCCVN IL.  */
5138 
5139 bool
vn_nary_may_trap(vn_nary_op_t nary)5140 vn_nary_may_trap (vn_nary_op_t nary)
5141 {
5142   tree type;
5143   tree rhs2 = NULL_TREE;
5144   bool honor_nans = false;
5145   bool honor_snans = false;
5146   bool fp_operation = false;
5147   bool honor_trapv = false;
5148   bool handled, ret;
5149   unsigned i;
5150 
5151   if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5152       || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5153       || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5154     {
5155       type = nary->type;
5156       fp_operation = FLOAT_TYPE_P (type);
5157       if (fp_operation)
5158 	{
5159 	  honor_nans = flag_trapping_math && !flag_finite_math_only;
5160 	  honor_snans = flag_signaling_nans != 0;
5161 	}
5162       else if (INTEGRAL_TYPE_P (type)
5163 	       && TYPE_OVERFLOW_TRAPS (type))
5164 	honor_trapv = true;
5165     }
5166   if (nary->length >= 2)
5167     rhs2 = nary->op[1];
5168   ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5169 				       honor_trapv,
5170 				       honor_nans, honor_snans, rhs2,
5171 				       &handled);
5172   if (handled
5173       && ret)
5174     return true;
5175 
5176   for (i = 0; i < nary->length; ++i)
5177     if (tree_could_trap_p (nary->op[i]))
5178       return true;
5179 
5180   return false;
5181 }
5182 
5183 
5184 class eliminate_dom_walker : public dom_walker
5185 {
5186 public:
5187   eliminate_dom_walker (cdi_direction, bitmap);
5188   ~eliminate_dom_walker ();
5189 
5190   virtual edge before_dom_children (basic_block);
5191   virtual void after_dom_children (basic_block);
5192 
5193   tree eliminate_avail (tree op);
5194   void eliminate_push_avail (tree op);
5195   tree eliminate_insert (gimple_stmt_iterator *gsi, tree val);
5196 
5197   bool do_pre;
5198   unsigned int el_todo;
5199   unsigned int eliminations;
5200   unsigned int insertions;
5201 
5202   /* SSA names that had their defs inserted by PRE if do_pre.  */
5203   bitmap inserted_exprs;
5204 
5205   /* Blocks with statements that have had their EH properties changed.  */
5206   bitmap need_eh_cleanup;
5207 
5208   /* Blocks with statements that have had their AB properties changed.  */
5209   bitmap need_ab_cleanup;
5210 
5211   auto_vec<gimple *> to_remove;
5212   auto_vec<gimple *> to_fixup;
5213   auto_vec<tree> avail;
5214   auto_vec<tree> avail_stack;
5215 };
5216 
5217 /* Return true if the reference operation REF may trap.  */
5218 
5219 bool
vn_reference_may_trap(vn_reference_t ref)5220 vn_reference_may_trap (vn_reference_t ref)
5221 {
5222   switch (ref->operands[0].opcode)
5223     {
5224     case MODIFY_EXPR:
5225     case CALL_EXPR:
5226       /* We do not handle calls.  */
5227     case ADDR_EXPR:
5228       /* And toplevel address computations never trap.  */
5229       return false;
5230     default:;
5231     }
5232 
5233   vn_reference_op_t op;
5234   unsigned i;
5235   FOR_EACH_VEC_ELT (ref->operands, i, op)
5236     {
5237       switch (op->opcode)
5238 	{
5239 	case WITH_SIZE_EXPR:
5240 	case TARGET_MEM_REF:
5241 	  /* Always variable.  */
5242 	  return true;
5243 	case COMPONENT_REF:
5244 	  if (op->op1 && TREE_CODE (op->op1) == SSA_NAME)
5245 	    return true;
5246 	  break;
5247 	case ARRAY_RANGE_REF:
5248 	case ARRAY_REF:
5249 	  if (TREE_CODE (op->op0) == SSA_NAME)
5250 	    return true;
5251 	  break;
5252 	case MEM_REF:
5253 	  /* Nothing interesting in itself, the base is separate.  */
5254 	  break;
5255 	/* The following are the address bases.  */
5256 	case SSA_NAME:
5257 	  return true;
5258 	case ADDR_EXPR:
5259 	  if (op->op0)
5260 	    return tree_could_trap_p (TREE_OPERAND (op->op0, 0));
5261 	  return false;
5262 	default:;
5263 	}
5264     }
5265   return false;
5266 }
5267 
eliminate_dom_walker(cdi_direction direction,bitmap inserted_exprs_)5268 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5269 					    bitmap inserted_exprs_)
5270   : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5271     el_todo (0), eliminations (0), insertions (0),
5272     inserted_exprs (inserted_exprs_)
5273 {
5274   need_eh_cleanup = BITMAP_ALLOC (NULL);
5275   need_ab_cleanup = BITMAP_ALLOC (NULL);
5276 }
5277 
~eliminate_dom_walker()5278 eliminate_dom_walker::~eliminate_dom_walker ()
5279 {
5280   BITMAP_FREE (need_eh_cleanup);
5281   BITMAP_FREE (need_ab_cleanup);
5282 }
5283 
5284 /* Return a leader for OP that is available at the current point of the
5285    eliminate domwalk.  */
5286 
5287 tree
eliminate_avail(tree op)5288 eliminate_dom_walker::eliminate_avail (tree op)
5289 {
5290   tree valnum = VN_INFO (op)->valnum;
5291   if (TREE_CODE (valnum) == SSA_NAME)
5292     {
5293       if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5294 	return valnum;
5295       if (avail.length () > SSA_NAME_VERSION (valnum))
5296 	return avail[SSA_NAME_VERSION (valnum)];
5297     }
5298   else if (is_gimple_min_invariant (valnum))
5299     return valnum;
5300   return NULL_TREE;
5301 }
5302 
5303 /* At the current point of the eliminate domwalk make OP available.  */
5304 
5305 void
eliminate_push_avail(tree op)5306 eliminate_dom_walker::eliminate_push_avail (tree op)
5307 {
5308   tree valnum = VN_INFO (op)->valnum;
5309   if (TREE_CODE (valnum) == SSA_NAME)
5310     {
5311       if (avail.length () <= SSA_NAME_VERSION (valnum))
5312 	avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
5313       tree pushop = op;
5314       if (avail[SSA_NAME_VERSION (valnum)])
5315 	pushop = avail[SSA_NAME_VERSION (valnum)];
5316       avail_stack.safe_push (pushop);
5317       avail[SSA_NAME_VERSION (valnum)] = op;
5318     }
5319 }
5320 
5321 /* Insert the expression recorded by SCCVN for VAL at *GSI.  Returns
5322    the leader for the expression if insertion was successful.  */
5323 
5324 tree
eliminate_insert(gimple_stmt_iterator * gsi,tree val)5325 eliminate_dom_walker::eliminate_insert (gimple_stmt_iterator *gsi, tree val)
5326 {
5327   /* We can insert a sequence with a single assignment only.  */
5328   gimple_seq stmts = VN_INFO (val)->expr;
5329   if (!gimple_seq_singleton_p (stmts))
5330     return NULL_TREE;
5331   gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5332   if (!stmt
5333       || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5334 	  && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5335 	  && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5336 	  && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5337 	      || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5338     return NULL_TREE;
5339 
5340   tree op = gimple_assign_rhs1 (stmt);
5341   if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5342       || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5343     op = TREE_OPERAND (op, 0);
5344   tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
5345   if (!leader)
5346     return NULL_TREE;
5347 
5348   tree res;
5349   stmts = NULL;
5350   if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5351     res = gimple_build (&stmts, BIT_FIELD_REF,
5352 			TREE_TYPE (val), leader,
5353 			TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5354 			TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5355   else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5356     res = gimple_build (&stmts, BIT_AND_EXPR,
5357 			TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5358   else
5359     res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5360 			TREE_TYPE (val), leader);
5361   if (TREE_CODE (res) != SSA_NAME
5362       || SSA_NAME_IS_DEFAULT_DEF (res)
5363       || gimple_bb (SSA_NAME_DEF_STMT (res)))
5364     {
5365       gimple_seq_discard (stmts);
5366 
5367       /* During propagation we have to treat SSA info conservatively
5368          and thus we can end up simplifying the inserted expression
5369 	 at elimination time to sth not defined in stmts.  */
5370       /* But then this is a redundancy we failed to detect.  Which means
5371          res now has two values.  That doesn't play well with how
5372 	 we track availability here, so give up.  */
5373       if (dump_file && (dump_flags & TDF_DETAILS))
5374 	{
5375 	  if (TREE_CODE (res) == SSA_NAME)
5376 	    res = eliminate_avail (res);
5377 	  if (res)
5378 	    {
5379 	      fprintf (dump_file, "Failed to insert expression for value ");
5380 	      print_generic_expr (dump_file, val);
5381 	      fprintf (dump_file, " which is really fully redundant to ");
5382 	      print_generic_expr (dump_file, res);
5383 	      fprintf (dump_file, "\n");
5384 	    }
5385 	}
5386 
5387       return NULL_TREE;
5388     }
5389   else
5390     {
5391       gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
5392       VN_INFO_GET (res)->valnum = val;
5393     }
5394 
5395   insertions++;
5396   if (dump_file && (dump_flags & TDF_DETAILS))
5397     {
5398       fprintf (dump_file, "Inserted ");
5399       print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
5400     }
5401 
5402   return res;
5403 }
5404 
5405 
5406 
5407 /* Perform elimination for the basic-block B during the domwalk.  */
5408 
5409 edge
before_dom_children(basic_block b)5410 eliminate_dom_walker::before_dom_children (basic_block b)
5411 {
5412   /* Mark new bb.  */
5413   avail_stack.safe_push (NULL_TREE);
5414 
5415   /* Skip unreachable blocks marked unreachable during the SCCVN domwalk.  */
5416   edge_iterator ei;
5417   edge e;
5418   FOR_EACH_EDGE (e, ei, b->preds)
5419     if (e->flags & EDGE_EXECUTABLE)
5420       break;
5421   if (! e)
5422     return NULL;
5423 
5424   for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5425     {
5426       gphi *phi = gsi.phi ();
5427       tree res = PHI_RESULT (phi);
5428 
5429       if (virtual_operand_p (res))
5430 	{
5431 	  gsi_next (&gsi);
5432 	  continue;
5433 	}
5434 
5435       tree sprime = eliminate_avail (res);
5436       if (sprime
5437 	  && sprime != res)
5438 	{
5439 	  if (dump_file && (dump_flags & TDF_DETAILS))
5440 	    {
5441 	      fprintf (dump_file, "Replaced redundant PHI node defining ");
5442 	      print_generic_expr (dump_file, res);
5443 	      fprintf (dump_file, " with ");
5444 	      print_generic_expr (dump_file, sprime);
5445 	      fprintf (dump_file, "\n");
5446 	    }
5447 
5448 	  /* If we inserted this PHI node ourself, it's not an elimination.  */
5449 	  if (! inserted_exprs
5450 	      || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5451 	    eliminations++;
5452 
5453 	  /* If we will propagate into all uses don't bother to do
5454 	     anything.  */
5455 	  if (may_propagate_copy (res, sprime))
5456 	    {
5457 	      /* Mark the PHI for removal.  */
5458 	      to_remove.safe_push (phi);
5459 	      gsi_next (&gsi);
5460 	      continue;
5461 	    }
5462 
5463 	  remove_phi_node (&gsi, false);
5464 
5465 	  if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5466 	    sprime = fold_convert (TREE_TYPE (res), sprime);
5467 	  gimple *stmt = gimple_build_assign (res, sprime);
5468 	  gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5469 	  gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5470 	  continue;
5471 	}
5472 
5473       eliminate_push_avail (res);
5474       gsi_next (&gsi);
5475     }
5476 
5477   for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5478        !gsi_end_p (gsi);
5479        gsi_next (&gsi))
5480     {
5481       tree sprime = NULL_TREE;
5482       gimple *stmt = gsi_stmt (gsi);
5483       tree lhs = gimple_get_lhs (stmt);
5484       if (lhs && TREE_CODE (lhs) == SSA_NAME
5485 	  && !gimple_has_volatile_ops (stmt)
5486 	  /* See PR43491.  Do not replace a global register variable when
5487 	     it is a the RHS of an assignment.  Do replace local register
5488 	     variables since gcc does not guarantee a local variable will
5489 	     be allocated in register.
5490 	     ???  The fix isn't effective here.  This should instead
5491 	     be ensured by not value-numbering them the same but treating
5492 	     them like volatiles?  */
5493 	  && !(gimple_assign_single_p (stmt)
5494 	       && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
5495 		   && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
5496 		   && is_global_var (gimple_assign_rhs1 (stmt)))))
5497 	{
5498 	  sprime = eliminate_avail (lhs);
5499 	  if (!sprime)
5500 	    {
5501 	      /* If there is no existing usable leader but SCCVN thinks
5502 		 it has an expression it wants to use as replacement,
5503 		 insert that.  */
5504 	      tree val = VN_INFO (lhs)->valnum;
5505 	      if (val != VN_TOP
5506 		  && TREE_CODE (val) == SSA_NAME
5507 		  && VN_INFO (val)->needs_insertion
5508 		  && VN_INFO (val)->expr != NULL
5509 		  && (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
5510 		eliminate_push_avail (sprime);
5511 	    }
5512 
5513 	  /* If this now constitutes a copy duplicate points-to
5514 	     and range info appropriately.  This is especially
5515 	     important for inserted code.  See tree-ssa-copy.c
5516 	     for similar code.  */
5517 	  if (sprime
5518 	      && TREE_CODE (sprime) == SSA_NAME)
5519 	    {
5520 	      basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5521 	      if (POINTER_TYPE_P (TREE_TYPE (lhs))
5522 		  && VN_INFO_PTR_INFO (lhs)
5523 		  && ! VN_INFO_PTR_INFO (sprime))
5524 		{
5525 		  duplicate_ssa_name_ptr_info (sprime,
5526 					       VN_INFO_PTR_INFO (lhs));
5527 		  if (b != sprime_b)
5528 		    mark_ptr_info_alignment_unknown
5529 			(SSA_NAME_PTR_INFO (sprime));
5530 		}
5531 	      else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5532 		       && VN_INFO_RANGE_INFO (lhs)
5533 		       && ! VN_INFO_RANGE_INFO (sprime)
5534 		       && b == sprime_b)
5535 		duplicate_ssa_name_range_info (sprime,
5536 					       VN_INFO_RANGE_TYPE (lhs),
5537 					       VN_INFO_RANGE_INFO (lhs));
5538 	    }
5539 
5540 	  /* Inhibit the use of an inserted PHI on a loop header when
5541 	     the address of the memory reference is a simple induction
5542 	     variable.  In other cases the vectorizer won't do anything
5543 	     anyway (either it's loop invariant or a complicated
5544 	     expression).  */
5545 	  if (sprime
5546 	      && TREE_CODE (sprime) == SSA_NAME
5547 	      && do_pre
5548 	      && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5549 	      && loop_outer (b->loop_father)
5550 	      && has_zero_uses (sprime)
5551 	      && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5552 	      && gimple_assign_load_p (stmt))
5553 	    {
5554 	      gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5555 	      basic_block def_bb = gimple_bb (def_stmt);
5556 	      if (gimple_code (def_stmt) == GIMPLE_PHI
5557 		  && def_bb->loop_father->header == def_bb)
5558 		{
5559 		  loop_p loop = def_bb->loop_father;
5560 		  ssa_op_iter iter;
5561 		  tree op;
5562 		  bool found = false;
5563 		  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5564 		    {
5565 		      affine_iv iv;
5566 		      def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5567 		      if (def_bb
5568 			  && flow_bb_inside_loop_p (loop, def_bb)
5569 			  && simple_iv (loop, loop, op, &iv, true))
5570 			{
5571 			  found = true;
5572 			  break;
5573 			}
5574 		    }
5575 		  if (found)
5576 		    {
5577 		      if (dump_file && (dump_flags & TDF_DETAILS))
5578 			{
5579 			  fprintf (dump_file, "Not replacing ");
5580 			  print_gimple_expr (dump_file, stmt, 0);
5581 			  fprintf (dump_file, " with ");
5582 			  print_generic_expr (dump_file, sprime);
5583 			  fprintf (dump_file, " which would add a loop"
5584 				   " carried dependence to loop %d\n",
5585 				   loop->num);
5586 			}
5587 		      /* Don't keep sprime available.  */
5588 		      sprime = NULL_TREE;
5589 		    }
5590 		}
5591 	    }
5592 
5593 	  if (sprime)
5594 	    {
5595 	      /* If we can propagate the value computed for LHS into
5596 		 all uses don't bother doing anything with this stmt.  */
5597 	      if (may_propagate_copy (lhs, sprime))
5598 		{
5599 		  /* Mark it for removal.  */
5600 		  to_remove.safe_push (stmt);
5601 
5602 		  /* ???  Don't count copy/constant propagations.  */
5603 		  if (gimple_assign_single_p (stmt)
5604 		      && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5605 			  || gimple_assign_rhs1 (stmt) == sprime))
5606 		    continue;
5607 
5608 		  if (dump_file && (dump_flags & TDF_DETAILS))
5609 		    {
5610 		      fprintf (dump_file, "Replaced ");
5611 		      print_gimple_expr (dump_file, stmt, 0);
5612 		      fprintf (dump_file, " with ");
5613 		      print_generic_expr (dump_file, sprime);
5614 		      fprintf (dump_file, " in all uses of ");
5615 		      print_gimple_stmt (dump_file, stmt, 0);
5616 		    }
5617 
5618 		  eliminations++;
5619 		  continue;
5620 		}
5621 
5622 	      /* If this is an assignment from our leader (which
5623 	         happens in the case the value-number is a constant)
5624 		 then there is nothing to do.  */
5625 	      if (gimple_assign_single_p (stmt)
5626 		  && sprime == gimple_assign_rhs1 (stmt))
5627 		continue;
5628 
5629 	      /* Else replace its RHS.  */
5630 	      bool can_make_abnormal_goto
5631 		  = is_gimple_call (stmt)
5632 		  && stmt_can_make_abnormal_goto (stmt);
5633 
5634 	      if (dump_file && (dump_flags & TDF_DETAILS))
5635 		{
5636 		  fprintf (dump_file, "Replaced ");
5637 		  print_gimple_expr (dump_file, stmt, 0);
5638 		  fprintf (dump_file, " with ");
5639 		  print_generic_expr (dump_file, sprime);
5640 		  fprintf (dump_file, " in ");
5641 		  print_gimple_stmt (dump_file, stmt, 0);
5642 		}
5643 
5644 	      eliminations++;
5645 	      gimple *orig_stmt = stmt;
5646 	      if (!useless_type_conversion_p (TREE_TYPE (lhs),
5647 					      TREE_TYPE (sprime)))
5648 		sprime = fold_convert (TREE_TYPE (lhs), sprime);
5649 	      tree vdef = gimple_vdef (stmt);
5650 	      tree vuse = gimple_vuse (stmt);
5651 	      propagate_tree_value_into_stmt (&gsi, sprime);
5652 	      stmt = gsi_stmt (gsi);
5653 	      update_stmt (stmt);
5654 	      if (vdef != gimple_vdef (stmt))
5655 		VN_INFO (vdef)->valnum = vuse;
5656 
5657 	      /* If we removed EH side-effects from the statement, clean
5658 		 its EH information.  */
5659 	      if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5660 		{
5661 		  bitmap_set_bit (need_eh_cleanup,
5662 				  gimple_bb (stmt)->index);
5663 		  if (dump_file && (dump_flags & TDF_DETAILS))
5664 		    fprintf (dump_file, "  Removed EH side-effects.\n");
5665 		}
5666 
5667 	      /* Likewise for AB side-effects.  */
5668 	      if (can_make_abnormal_goto
5669 		  && !stmt_can_make_abnormal_goto (stmt))
5670 		{
5671 		  bitmap_set_bit (need_ab_cleanup,
5672 				  gimple_bb (stmt)->index);
5673 		  if (dump_file && (dump_flags & TDF_DETAILS))
5674 		    fprintf (dump_file, "  Removed AB side-effects.\n");
5675 		}
5676 
5677 	      continue;
5678 	    }
5679 	}
5680 
5681       /* If the statement is a scalar store, see if the expression
5682          has the same value number as its rhs.  If so, the store is
5683          dead.  */
5684       if (gimple_assign_single_p (stmt)
5685 	  && !gimple_has_volatile_ops (stmt)
5686 	  && !is_gimple_reg (gimple_assign_lhs (stmt))
5687 	  && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5688 	      || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5689 	{
5690 	  tree val;
5691 	  tree rhs = gimple_assign_rhs1 (stmt);
5692 	  vn_reference_t vnresult;
5693 	  val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5694 				     &vnresult, false);
5695 	  if (TREE_CODE (rhs) == SSA_NAME)
5696 	    rhs = VN_INFO (rhs)->valnum;
5697 	  if (val
5698 	      && operand_equal_p (val, rhs, 0))
5699 	    {
5700 	      /* We can only remove the later store if the former aliases
5701 		 at least all accesses the later one does or if the store
5702 		 was to readonly memory storing the same value.  */
5703 	      alias_set_type set = get_alias_set (lhs);
5704 	      if (! vnresult
5705 		  || vnresult->set == set
5706 		  || alias_set_subset_of (set, vnresult->set))
5707 		{
5708 		  if (dump_file && (dump_flags & TDF_DETAILS))
5709 		    {
5710 		      fprintf (dump_file, "Deleted redundant store ");
5711 		      print_gimple_stmt (dump_file, stmt, 0);
5712 		    }
5713 
5714 		  /* Queue stmt for removal.  */
5715 		  to_remove.safe_push (stmt);
5716 		  continue;
5717 		}
5718 	    }
5719 	}
5720 
5721       /* If this is a control statement value numbering left edges
5722 	 unexecuted on force the condition in a way consistent with
5723 	 that.  */
5724       if (gcond *cond = dyn_cast <gcond *> (stmt))
5725 	{
5726 	  if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5727 	      ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5728 	    {
5729               if (dump_file && (dump_flags & TDF_DETAILS))
5730                 {
5731                   fprintf (dump_file, "Removing unexecutable edge from ");
5732 		  print_gimple_stmt (dump_file, stmt, 0);
5733                 }
5734 	      if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5735 		  == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5736 		gimple_cond_make_true (cond);
5737 	      else
5738 		gimple_cond_make_false (cond);
5739 	      update_stmt (cond);
5740 	      el_todo |= TODO_cleanup_cfg;
5741 	      continue;
5742 	    }
5743 	}
5744 
5745       bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5746       bool was_noreturn = (is_gimple_call (stmt)
5747 			   && gimple_call_noreturn_p (stmt));
5748       tree vdef = gimple_vdef (stmt);
5749       tree vuse = gimple_vuse (stmt);
5750 
5751       /* If we didn't replace the whole stmt (or propagate the result
5752          into all uses), replace all uses on this stmt with their
5753 	 leaders.  */
5754       bool modified = false;
5755       use_operand_p use_p;
5756       ssa_op_iter iter;
5757       FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5758 	{
5759 	  tree use = USE_FROM_PTR (use_p);
5760 	  /* ???  The call code above leaves stmt operands un-updated.  */
5761 	  if (TREE_CODE (use) != SSA_NAME)
5762 	    continue;
5763 	  tree sprime = eliminate_avail (use);
5764 	  if (sprime && sprime != use
5765 	      && may_propagate_copy (use, sprime)
5766 	      /* We substitute into debug stmts to avoid excessive
5767 	         debug temporaries created by removed stmts, but we need
5768 		 to avoid doing so for inserted sprimes as we never want
5769 		 to create debug temporaries for them.  */
5770 	      && (!inserted_exprs
5771 		  || TREE_CODE (sprime) != SSA_NAME
5772 		  || !is_gimple_debug (stmt)
5773 		  || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5774 	    {
5775 	      propagate_value (use_p, sprime);
5776 	      modified = true;
5777 	    }
5778 	}
5779 
5780       /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5781          into which is a requirement for the IPA devirt machinery.  */
5782       gimple *old_stmt = stmt;
5783       if (modified)
5784 	{
5785 	  /* If a formerly non-invariant ADDR_EXPR is turned into an
5786 	     invariant one it was on a separate stmt.  */
5787 	  if (gimple_assign_single_p (stmt)
5788 	      && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5789 	    recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5790 	  gimple_stmt_iterator prev = gsi;
5791 	  gsi_prev (&prev);
5792 	  if (fold_stmt (&gsi))
5793 	    {
5794 	      /* fold_stmt may have created new stmts inbetween
5795 		 the previous stmt and the folded stmt.  Mark
5796 		 all defs created there as varying to not confuse
5797 		 the SCCVN machinery as we're using that even during
5798 		 elimination.  */
5799 	      if (gsi_end_p (prev))
5800 		prev = gsi_start_bb (b);
5801 	      else
5802 		gsi_next (&prev);
5803 	      if (gsi_stmt (prev) != gsi_stmt (gsi))
5804 		do
5805 		  {
5806 		    tree def;
5807 		    ssa_op_iter dit;
5808 		    FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5809 					       dit, SSA_OP_ALL_DEFS)
5810 		      /* As existing DEFs may move between stmts
5811 			 we have to guard VN_INFO_GET.  */
5812 		      if (! has_VN_INFO (def))
5813 			VN_INFO_GET (def)->valnum = def;
5814 		    if (gsi_stmt (prev) == gsi_stmt (gsi))
5815 		      break;
5816 		    gsi_next (&prev);
5817 		  }
5818 		while (1);
5819 	    }
5820 	  stmt = gsi_stmt (gsi);
5821 	  /* In case we folded the stmt away schedule the NOP for removal.  */
5822 	  if (gimple_nop_p (stmt))
5823 	    to_remove.safe_push (stmt);
5824 	}
5825 
5826       /* Visit indirect calls and turn them into direct calls if
5827 	 possible using the devirtualization machinery.  Do this before
5828 	 checking for required EH/abnormal/noreturn cleanup as devird
5829 	 may expose more of those.  */
5830       if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5831 	{
5832 	  tree fn = gimple_call_fn (call_stmt);
5833 	  if (fn
5834 	      && flag_devirtualize
5835 	      && virtual_method_call_p (fn))
5836 	    {
5837 	      tree otr_type = obj_type_ref_class (fn);
5838 	      unsigned HOST_WIDE_INT otr_tok
5839 		= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5840 	      tree instance;
5841 	      ipa_polymorphic_call_context context (current_function_decl,
5842 						    fn, stmt, &instance);
5843 	      context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5844 					otr_type, stmt);
5845 	      bool final;
5846 	      vec <cgraph_node *> targets
5847 		= possible_polymorphic_call_targets (obj_type_ref_class (fn),
5848 						     otr_tok, context, &final);
5849 	      if (dump_file)
5850 		dump_possible_polymorphic_call_targets (dump_file,
5851 							obj_type_ref_class (fn),
5852 							otr_tok, context);
5853 	      if (final && targets.length () <= 1 && dbg_cnt (devirt))
5854 		{
5855 		  tree fn;
5856 		  if (targets.length () == 1)
5857 		    fn = targets[0]->decl;
5858 		  else
5859 		    fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5860 		  if (dump_enabled_p ())
5861 		    {
5862 		      location_t loc = gimple_location (stmt);
5863 		      dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
5864 				       "converting indirect call to "
5865 				       "function %s\n",
5866 				       lang_hooks.decl_printable_name (fn, 2));
5867 		    }
5868 		  gimple_call_set_fndecl (call_stmt, fn);
5869 		  /* If changing the call to __builtin_unreachable
5870 		     or similar noreturn function, adjust gimple_call_fntype
5871 		     too.  */
5872 		  if (gimple_call_noreturn_p (call_stmt)
5873 		      && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5874 		      && TYPE_ARG_TYPES (TREE_TYPE (fn))
5875 		      && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5876 			  == void_type_node))
5877 		    gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5878 		  maybe_remove_unused_call_args (cfun, call_stmt);
5879 		  modified = true;
5880 		}
5881 	    }
5882 	}
5883 
5884       if (modified)
5885 	{
5886 	  /* When changing a call into a noreturn call, cfg cleanup
5887 	     is needed to fix up the noreturn call.  */
5888 	  if (!was_noreturn
5889 	      && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5890 	    to_fixup.safe_push  (stmt);
5891 	  /* When changing a condition or switch into one we know what
5892 	     edge will be executed, schedule a cfg cleanup.  */
5893 	  if ((gimple_code (stmt) == GIMPLE_COND
5894 	       && (gimple_cond_true_p (as_a <gcond *> (stmt))
5895 		   || gimple_cond_false_p (as_a <gcond *> (stmt))))
5896 	      || (gimple_code (stmt) == GIMPLE_SWITCH
5897 		  && TREE_CODE (gimple_switch_index
5898 				  (as_a <gswitch *> (stmt))) == INTEGER_CST))
5899 	    el_todo |= TODO_cleanup_cfg;
5900 	  /* If we removed EH side-effects from the statement, clean
5901 	     its EH information.  */
5902 	  if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5903 	    {
5904 	      bitmap_set_bit (need_eh_cleanup,
5905 			      gimple_bb (stmt)->index);
5906 	      if (dump_file && (dump_flags & TDF_DETAILS))
5907 		fprintf (dump_file, "  Removed EH side-effects.\n");
5908 	    }
5909 	  /* Likewise for AB side-effects.  */
5910 	  if (can_make_abnormal_goto
5911 	      && !stmt_can_make_abnormal_goto (stmt))
5912 	    {
5913 	      bitmap_set_bit (need_ab_cleanup,
5914 			      gimple_bb (stmt)->index);
5915 	      if (dump_file && (dump_flags & TDF_DETAILS))
5916 		fprintf (dump_file, "  Removed AB side-effects.\n");
5917 	    }
5918 	  update_stmt (stmt);
5919 	  if (vdef != gimple_vdef (stmt))
5920 	    VN_INFO (vdef)->valnum = vuse;
5921 	}
5922 
5923       /* Make new values available - for fully redundant LHS we
5924          continue with the next stmt above and skip this.  */
5925       def_operand_p defp;
5926       FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5927 	eliminate_push_avail (DEF_FROM_PTR (defp));
5928     }
5929 
5930   /* Replace destination PHI arguments.  */
5931   FOR_EACH_EDGE (e, ei, b->succs)
5932     if (e->flags & EDGE_EXECUTABLE)
5933       for (gphi_iterator gsi = gsi_start_phis (e->dest);
5934 	   !gsi_end_p (gsi);
5935 	   gsi_next (&gsi))
5936 	{
5937 	  gphi *phi = gsi.phi ();
5938 	  use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5939 	  tree arg = USE_FROM_PTR (use_p);
5940 	  if (TREE_CODE (arg) != SSA_NAME
5941 	      || virtual_operand_p (arg))
5942 	    continue;
5943 	  tree sprime = eliminate_avail (arg);
5944 	  if (sprime && may_propagate_copy (arg, sprime))
5945 	    propagate_value (use_p, sprime);
5946 	}
5947   return NULL;
5948 }
5949 
5950 /* Make no longer available leaders no longer available.  */
5951 
5952 void
after_dom_children(basic_block)5953 eliminate_dom_walker::after_dom_children (basic_block)
5954 {
5955   tree entry;
5956   while ((entry = avail_stack.pop ()) != NULL_TREE)
5957     {
5958       tree valnum = VN_INFO (entry)->valnum;
5959       tree old = avail[SSA_NAME_VERSION (valnum)];
5960       if (old == entry)
5961 	avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5962       else
5963 	avail[SSA_NAME_VERSION (valnum)] = entry;
5964     }
5965 }
5966 
5967 /* Eliminate fully redundant computations.  */
5968 
5969 unsigned int
vn_eliminate(bitmap inserted_exprs)5970 vn_eliminate (bitmap inserted_exprs)
5971 {
5972   eliminate_dom_walker el (CDI_DOMINATORS, inserted_exprs);
5973   el.avail.reserve (num_ssa_names);
5974 
5975   el.walk (cfun->cfg->x_entry_block_ptr);
5976 
5977   /* We cannot remove stmts during BB walk, especially not release SSA
5978      names there as this confuses the VN machinery.  The stmts ending
5979      up in to_remove are either stores or simple copies.
5980      Remove stmts in reverse order to make debug stmt creation possible.  */
5981   while (!el.to_remove.is_empty ())
5982     {
5983       gimple *stmt = el.to_remove.pop ();
5984 
5985       if (dump_file && (dump_flags & TDF_DETAILS))
5986 	{
5987 	  fprintf (dump_file, "Removing dead stmt ");
5988 	  print_gimple_stmt (dump_file, stmt, 0, 0);
5989 	}
5990 
5991       gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5992       if (gimple_code (stmt) == GIMPLE_PHI)
5993 	remove_phi_node (&gsi, true);
5994       else
5995 	{
5996 	  basic_block bb = gimple_bb (stmt);
5997 	  unlink_stmt_vdef (stmt);
5998 	  if (gsi_remove (&gsi, true))
5999 	    bitmap_set_bit (el.need_eh_cleanup, bb->index);
6000 	  if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
6001 	    bitmap_set_bit (el.need_ab_cleanup, bb->index);
6002 	  release_defs (stmt);
6003 	}
6004 
6005       /* Removing a stmt may expose a forwarder block.  */
6006       el.el_todo |= TODO_cleanup_cfg;
6007     }
6008 
6009   /* Fixup stmts that became noreturn calls.  This may require splitting
6010      blocks and thus isn't possible during the dominator walk.  Do this
6011      in reverse order so we don't inadvertedly remove a stmt we want to
6012      fixup by visiting a dominating now noreturn call first.  */
6013   while (!el.to_fixup.is_empty ())
6014     {
6015       gimple *stmt = el.to_fixup.pop ();
6016 
6017       if (dump_file && (dump_flags & TDF_DETAILS))
6018 	{
6019 	  fprintf (dump_file, "Fixing up noreturn call ");
6020 	  print_gimple_stmt (dump_file, stmt, 0);
6021 	}
6022 
6023       if (fixup_noreturn_call (stmt))
6024 	el.el_todo |= TODO_cleanup_cfg;
6025     }
6026 
6027   bool do_eh_cleanup = !bitmap_empty_p (el.need_eh_cleanup);
6028   bool do_ab_cleanup = !bitmap_empty_p (el.need_ab_cleanup);
6029 
6030   if (do_eh_cleanup)
6031     gimple_purge_all_dead_eh_edges (el.need_eh_cleanup);
6032 
6033   if (do_ab_cleanup)
6034     gimple_purge_all_dead_abnormal_call_edges (el.need_ab_cleanup);
6035 
6036   if (do_eh_cleanup || do_ab_cleanup)
6037     el.el_todo |= TODO_cleanup_cfg;
6038 
6039   statistics_counter_event (cfun, "Eliminated", el.eliminations);
6040   statistics_counter_event (cfun, "Insertions", el.insertions);
6041 
6042   return el.el_todo;
6043 }
6044 
6045 
6046 namespace {
6047 
6048 const pass_data pass_data_fre =
6049 {
6050   GIMPLE_PASS, /* type */
6051   "fre", /* name */
6052   OPTGROUP_NONE, /* optinfo_flags */
6053   TV_TREE_FRE, /* tv_id */
6054   ( PROP_cfg | PROP_ssa ), /* properties_required */
6055   0, /* properties_provided */
6056   0, /* properties_destroyed */
6057   0, /* todo_flags_start */
6058   0, /* todo_flags_finish */
6059 };
6060 
6061 class pass_fre : public gimple_opt_pass
6062 {
6063 public:
pass_fre(gcc::context * ctxt)6064   pass_fre (gcc::context *ctxt)
6065     : gimple_opt_pass (pass_data_fre, ctxt)
6066   {}
6067 
6068   /* opt_pass methods: */
clone()6069   opt_pass * clone () { return new pass_fre (m_ctxt); }
gate(function *)6070   virtual bool gate (function *) { return flag_tree_fre != 0; }
6071   virtual unsigned int execute (function *);
6072 
6073 }; // class pass_fre
6074 
6075 unsigned int
execute(function *)6076 pass_fre::execute (function *)
6077 {
6078   unsigned int todo = 0;
6079 
6080   run_scc_vn (VN_WALKREWRITE);
6081 
6082   /* Remove all the redundant expressions.  */
6083   todo |= vn_eliminate (NULL);
6084 
6085   scc_vn_restore_ssa_info ();
6086   free_scc_vn ();
6087 
6088   return todo;
6089 }
6090 
6091 } // anon namespace
6092 
6093 gimple_opt_pass *
make_pass_fre(gcc::context * ctxt)6094 make_pass_fre (gcc::context *ctxt)
6095 {
6096   return new pass_fre (ctxt);
6097 }
6098