1 /* SCC value numbering for trees
2    Copyright (C) 2006-2018 Free Software Foundation, Inc.
3    Contributed by Daniel Berlin <dan@dberlin.org>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "params.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
62 #include "stringpool.h"
63 #include "attribs.h"
64 #include "tree-pass.h"
65 #include "statistics.h"
66 #include "langhooks.h"
67 #include "ipa-utils.h"
68 #include "dbgcnt.h"
69 #include "tree-cfgcleanup.h"
70 #include "tree-ssa-loop.h"
71 #include "tree-scalar-evolution.h"
72 #include "tree-ssa-sccvn.h"
73 
74 /* This algorithm is based on the SCC algorithm presented by Keith
75    Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
76    (http://citeseer.ist.psu.edu/41805.html).  In
77    straight line code, it is equivalent to a regular hash based value
78    numbering that is performed in reverse postorder.
79 
80    For code with cycles, there are two alternatives, both of which
81    require keeping the hashtables separate from the actual list of
82    value numbers for SSA names.
83 
84    1. Iterate value numbering in an RPO walk of the blocks, removing
85    all the entries from the hashtable after each iteration (but
86    keeping the SSA name->value number mapping between iterations).
87    Iterate until it does not change.
88 
89    2. Perform value numbering as part of an SCC walk on the SSA graph,
90    iterating only the cycles in the SSA graph until they do not change
91    (using a separate, optimistic hashtable for value numbering the SCC
92    operands).
93 
94    The second is not just faster in practice (because most SSA graph
95    cycles do not involve all the variables in the graph), it also has
96    some nice properties.
97 
98    One of these nice properties is that when we pop an SCC off the
99    stack, we are guaranteed to have processed all the operands coming from
100    *outside of that SCC*, so we do not need to do anything special to
101    ensure they have value numbers.
102 
103    Another nice property is that the SCC walk is done as part of a DFS
104    of the SSA graph, which makes it easy to perform combining and
105    simplifying operations at the same time.
106 
107    The code below is deliberately written in a way that makes it easy
108    to separate the SCC walk from the other work it does.
109 
110    In order to propagate constants through the code, we track which
111    expressions contain constants, and use those while folding.  In
112    theory, we could also track expressions whose value numbers are
113    replaced, in case we end up folding based on expression
114    identities.
115 
116    In order to value number memory, we assign value numbers to vuses.
117    This enables us to note that, for example, stores to the same
118    address of the same value from the same starting memory states are
119    equivalent.
120    TODO:
121 
122    1. We can iterate only the changing portions of the SCC's, but
123    I have not seen an SCC big enough for this to be a win.
124    2. If you differentiate between phi nodes for loops and phi nodes
125    for if-then-else, you can properly consider phi nodes in different
126    blocks for equivalence.
127    3. We could value number vuses in more cases, particularly, whole
128    structure copies.
129 */
130 
131 
132 static tree *last_vuse_ptr;
133 static vn_lookup_kind vn_walk_kind;
134 static vn_lookup_kind default_vn_walk_kind;
135 bitmap const_parms;
136 
137 /* vn_nary_op hashtable helpers.  */
138 
139 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
140 {
141   typedef vn_nary_op_s *compare_type;
142   static inline hashval_t hash (const vn_nary_op_s *);
143   static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
144 };
145 
146 /* Return the computed hashcode for nary operation P1.  */
147 
148 inline hashval_t
hash(const vn_nary_op_s * vno1)149 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
150 {
151   return vno1->hashcode;
152 }
153 
154 /* Compare nary operations P1 and P2 and return true if they are
155    equivalent.  */
156 
157 inline bool
equal(const vn_nary_op_s * vno1,const vn_nary_op_s * vno2)158 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
159 {
160   return vn_nary_op_eq (vno1, vno2);
161 }
162 
163 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
165 
166 
167 /* vn_phi hashtable helpers.  */
168 
169 static int
170 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
171 
172 struct vn_phi_hasher : pointer_hash <vn_phi_s>
173 {
174   static inline hashval_t hash (const vn_phi_s *);
175   static inline bool equal (const vn_phi_s *, const vn_phi_s *);
176   static inline void remove (vn_phi_s *);
177 };
178 
179 /* Return the computed hashcode for phi operation P1.  */
180 
181 inline hashval_t
hash(const vn_phi_s * vp1)182 vn_phi_hasher::hash (const vn_phi_s *vp1)
183 {
184   return vp1->hashcode;
185 }
186 
187 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
188 
189 inline bool
equal(const vn_phi_s * vp1,const vn_phi_s * vp2)190 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
191 {
192   return vn_phi_eq (vp1, vp2);
193 }
194 
195 /* Free a phi operation structure VP.  */
196 
197 inline void
remove(vn_phi_s * phi)198 vn_phi_hasher::remove (vn_phi_s *phi)
199 {
200   phi->phiargs.release ();
201 }
202 
203 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
204 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
205 
206 
207 /* Compare two reference operands P1 and P2 for equality.  Return true if
208    they are equal, and false otherwise.  */
209 
210 static int
vn_reference_op_eq(const void * p1,const void * p2)211 vn_reference_op_eq (const void *p1, const void *p2)
212 {
213   const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
214   const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
215 
216   return (vro1->opcode == vro2->opcode
217 	  /* We do not care for differences in type qualification.  */
218 	  && (vro1->type == vro2->type
219 	      || (vro1->type && vro2->type
220 		  && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
221 					 TYPE_MAIN_VARIANT (vro2->type))))
222 	  && expressions_equal_p (vro1->op0, vro2->op0)
223 	  && expressions_equal_p (vro1->op1, vro2->op1)
224 	  && expressions_equal_p (vro1->op2, vro2->op2));
225 }
226 
227 /* Free a reference operation structure VP.  */
228 
229 static inline void
free_reference(vn_reference_s * vr)230 free_reference (vn_reference_s *vr)
231 {
232   vr->operands.release ();
233 }
234 
235 
236 /* vn_reference hashtable helpers.  */
237 
238 struct vn_reference_hasher : pointer_hash <vn_reference_s>
239 {
240   static inline hashval_t hash (const vn_reference_s *);
241   static inline bool equal (const vn_reference_s *, const vn_reference_s *);
242   static inline void remove (vn_reference_s *);
243 };
244 
245 /* Return the hashcode for a given reference operation P1.  */
246 
247 inline hashval_t
hash(const vn_reference_s * vr1)248 vn_reference_hasher::hash (const vn_reference_s *vr1)
249 {
250   return vr1->hashcode;
251 }
252 
253 inline bool
equal(const vn_reference_s * v,const vn_reference_s * c)254 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
255 {
256   return vn_reference_eq (v, c);
257 }
258 
259 inline void
remove(vn_reference_s * v)260 vn_reference_hasher::remove (vn_reference_s *v)
261 {
262   free_reference (v);
263 }
264 
265 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
266 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
267 
268 
269 /* The set of hashtables and alloc_pool's for their items.  */
270 
271 typedef struct vn_tables_s
272 {
273   vn_nary_op_table_type *nary;
274   vn_phi_table_type *phis;
275   vn_reference_table_type *references;
276   struct obstack nary_obstack;
277   object_allocator<vn_phi_s> *phis_pool;
278   object_allocator<vn_reference_s> *references_pool;
279 } *vn_tables_t;
280 
281 
282 /* vn_constant hashtable helpers.  */
283 
284 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
285 {
286   static inline hashval_t hash (const vn_constant_s *);
287   static inline bool equal (const vn_constant_s *, const vn_constant_s *);
288 };
289 
290 /* Hash table hash function for vn_constant_t.  */
291 
292 inline hashval_t
hash(const vn_constant_s * vc1)293 vn_constant_hasher::hash (const vn_constant_s *vc1)
294 {
295   return vc1->hashcode;
296 }
297 
298 /* Hash table equality function for vn_constant_t.  */
299 
300 inline bool
equal(const vn_constant_s * vc1,const vn_constant_s * vc2)301 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
302 {
303   if (vc1->hashcode != vc2->hashcode)
304     return false;
305 
306   return vn_constant_eq_with_type (vc1->constant, vc2->constant);
307 }
308 
309 static hash_table<vn_constant_hasher> *constant_to_value_id;
310 static bitmap constant_value_ids;
311 
312 
313 /* Valid hashtables storing information we have proven to be
314    correct.  */
315 
316 static vn_tables_t valid_info;
317 
318 /* Optimistic hashtables storing information we are making assumptions about
319    during iterations.  */
320 
321 static vn_tables_t optimistic_info;
322 
323 /* Pointer to the set of hashtables that is currently being used.
324    Should always point to either the optimistic_info, or the
325    valid_info.  */
326 
327 static vn_tables_t current_info;
328 
329 
330 /* Reverse post order index for each basic block.  */
331 
332 static int *rpo_numbers;
333 
334 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
335 
336 /* Return the SSA value of the VUSE x, supporting released VDEFs
337    during elimination which will value-number the VDEF to the
338    associated VUSE (but not substitute in the whole lattice).  */
339 
340 static inline tree
vuse_ssa_val(tree x)341 vuse_ssa_val (tree x)
342 {
343   if (!x)
344     return NULL_TREE;
345 
346   do
347     {
348       tree tem = SSA_VAL (x);
349       /* stmt walking can walk over a backedge and reach code we didn't
350 	 value-number yet.  */
351       if (tem == VN_TOP)
352 	return x;
353       x = tem;
354     }
355   while (SSA_NAME_IN_FREE_LIST (x));
356 
357   return x;
358 }
359 
360 /* This represents the top of the VN lattice, which is the universal
361    value.  */
362 
363 tree VN_TOP;
364 
365 /* Unique counter for our value ids.  */
366 
367 static unsigned int next_value_id;
368 
369 /* Next DFS number and the stack for strongly connected component
370    detection. */
371 
372 static unsigned int next_dfs_num;
373 static vec<tree> sccstack;
374 
375 
376 
377 /* Table of vn_ssa_aux_t's, one per ssa_name.  The vn_ssa_aux_t objects
378    are allocated on an obstack for locality reasons, and to free them
379    without looping over the vec.  */
380 
381 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
382 static struct obstack vn_ssa_aux_obstack;
383 
384 /* Return whether there is value numbering information for a given SSA name.  */
385 
386 bool
has_VN_INFO(tree name)387 has_VN_INFO (tree name)
388 {
389   if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
390     return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
391   return false;
392 }
393 
394 /* Return the value numbering information for a given SSA name.  */
395 
396 vn_ssa_aux_t
VN_INFO(tree name)397 VN_INFO (tree name)
398 {
399   vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
400   gcc_checking_assert (res);
401   return res;
402 }
403 
404 /* Set the value numbering info for a given SSA name to a given
405    value.  */
406 
407 static inline void
VN_INFO_SET(tree name,vn_ssa_aux_t value)408 VN_INFO_SET (tree name, vn_ssa_aux_t value)
409 {
410   vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
411 }
412 
413 /* Initialize the value numbering info for a given SSA name.
414    This should be called just once for every SSA name.  */
415 
416 vn_ssa_aux_t
VN_INFO_GET(tree name)417 VN_INFO_GET (tree name)
418 {
419   vn_ssa_aux_t newinfo;
420 
421   gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
422 	      || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
423   newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
424   memset (newinfo, 0, sizeof (struct vn_ssa_aux));
425   if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
426     vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
427   vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
428   return newinfo;
429 }
430 
431 
432 /* Return the vn_kind the expression computed by the stmt should be
433    associated with.  */
434 
435 enum vn_kind
vn_get_stmt_kind(gimple * stmt)436 vn_get_stmt_kind (gimple *stmt)
437 {
438   switch (gimple_code (stmt))
439     {
440     case GIMPLE_CALL:
441       return VN_REFERENCE;
442     case GIMPLE_PHI:
443       return VN_PHI;
444     case GIMPLE_ASSIGN:
445       {
446 	enum tree_code code = gimple_assign_rhs_code (stmt);
447 	tree rhs1 = gimple_assign_rhs1 (stmt);
448 	switch (get_gimple_rhs_class (code))
449 	  {
450 	  case GIMPLE_UNARY_RHS:
451 	  case GIMPLE_BINARY_RHS:
452 	  case GIMPLE_TERNARY_RHS:
453 	    return VN_NARY;
454 	  case GIMPLE_SINGLE_RHS:
455 	    switch (TREE_CODE_CLASS (code))
456 	      {
457 	      case tcc_reference:
458 		/* VOP-less references can go through unary case.  */
459 		if ((code == REALPART_EXPR
460 		     || code == IMAGPART_EXPR
461 		     || code == VIEW_CONVERT_EXPR
462 		     || code == BIT_FIELD_REF)
463 		    && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
464 		  return VN_NARY;
465 
466 		/* Fallthrough.  */
467 	      case tcc_declaration:
468 		return VN_REFERENCE;
469 
470 	      case tcc_constant:
471 		return VN_CONSTANT;
472 
473 	      default:
474 		if (code == ADDR_EXPR)
475 		  return (is_gimple_min_invariant (rhs1)
476 			  ? VN_CONSTANT : VN_REFERENCE);
477 		else if (code == CONSTRUCTOR)
478 		  return VN_NARY;
479 		return VN_NONE;
480 	      }
481 	  default:
482 	    return VN_NONE;
483 	  }
484       }
485     default:
486       return VN_NONE;
487     }
488 }
489 
490 /* Lookup a value id for CONSTANT and return it.  If it does not
491    exist returns 0.  */
492 
493 unsigned int
get_constant_value_id(tree constant)494 get_constant_value_id (tree constant)
495 {
496   vn_constant_s **slot;
497   struct vn_constant_s vc;
498 
499   vc.hashcode = vn_hash_constant_with_type (constant);
500   vc.constant = constant;
501   slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
502   if (slot)
503     return (*slot)->value_id;
504   return 0;
505 }
506 
507 /* Lookup a value id for CONSTANT, and if it does not exist, create a
508    new one and return it.  If it does exist, return it.  */
509 
510 unsigned int
get_or_alloc_constant_value_id(tree constant)511 get_or_alloc_constant_value_id (tree constant)
512 {
513   vn_constant_s **slot;
514   struct vn_constant_s vc;
515   vn_constant_t vcp;
516 
517   vc.hashcode = vn_hash_constant_with_type (constant);
518   vc.constant = constant;
519   slot = constant_to_value_id->find_slot (&vc, INSERT);
520   if (*slot)
521     return (*slot)->value_id;
522 
523   vcp = XNEW (struct vn_constant_s);
524   vcp->hashcode = vc.hashcode;
525   vcp->constant = constant;
526   vcp->value_id = get_next_value_id ();
527   *slot = vcp;
528   bitmap_set_bit (constant_value_ids, vcp->value_id);
529   return vcp->value_id;
530 }
531 
532 /* Return true if V is a value id for a constant.  */
533 
534 bool
value_id_constant_p(unsigned int v)535 value_id_constant_p (unsigned int v)
536 {
537   return bitmap_bit_p (constant_value_ids, v);
538 }
539 
540 /* Compute the hash for a reference operand VRO1.  */
541 
542 static void
vn_reference_op_compute_hash(const vn_reference_op_t vro1,inchash::hash & hstate)543 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
544 {
545   hstate.add_int (vro1->opcode);
546   if (vro1->op0)
547     inchash::add_expr (vro1->op0, hstate);
548   if (vro1->op1)
549     inchash::add_expr (vro1->op1, hstate);
550   if (vro1->op2)
551     inchash::add_expr (vro1->op2, hstate);
552 }
553 
554 /* Compute a hash for the reference operation VR1 and return it.  */
555 
556 static hashval_t
vn_reference_compute_hash(const vn_reference_t vr1)557 vn_reference_compute_hash (const vn_reference_t vr1)
558 {
559   inchash::hash hstate;
560   hashval_t result;
561   int i;
562   vn_reference_op_t vro;
563   poly_int64 off = -1;
564   bool deref = false;
565 
566   FOR_EACH_VEC_ELT (vr1->operands, i, vro)
567     {
568       if (vro->opcode == MEM_REF)
569 	deref = true;
570       else if (vro->opcode != ADDR_EXPR)
571 	deref = false;
572       if (maybe_ne (vro->off, -1))
573 	{
574 	  if (known_eq (off, -1))
575 	    off = 0;
576 	  off += vro->off;
577 	}
578       else
579 	{
580 	  if (maybe_ne (off, -1)
581 	      && maybe_ne (off, 0))
582 	    hstate.add_poly_int (off);
583 	  off = -1;
584 	  if (deref
585 	      && vro->opcode == ADDR_EXPR)
586 	    {
587 	      if (vro->op0)
588 		{
589 		  tree op = TREE_OPERAND (vro->op0, 0);
590 		  hstate.add_int (TREE_CODE (op));
591 		  inchash::add_expr (op, hstate);
592 		}
593 	    }
594 	  else
595 	    vn_reference_op_compute_hash (vro, hstate);
596 	}
597     }
598   result = hstate.end ();
599   /* ??? We would ICE later if we hash instead of adding that in. */
600   if (vr1->vuse)
601     result += SSA_NAME_VERSION (vr1->vuse);
602 
603   return result;
604 }
605 
606 /* Return true if reference operations VR1 and VR2 are equivalent.  This
607    means they have the same set of operands and vuses.  */
608 
609 bool
vn_reference_eq(const_vn_reference_t const vr1,const_vn_reference_t const vr2)610 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
611 {
612   unsigned i, j;
613 
614   /* Early out if this is not a hash collision.  */
615   if (vr1->hashcode != vr2->hashcode)
616     return false;
617 
618   /* The VOP needs to be the same.  */
619   if (vr1->vuse != vr2->vuse)
620     return false;
621 
622   /* If the operands are the same we are done.  */
623   if (vr1->operands == vr2->operands)
624     return true;
625 
626   if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
627     return false;
628 
629   if (INTEGRAL_TYPE_P (vr1->type)
630       && INTEGRAL_TYPE_P (vr2->type))
631     {
632       if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
633 	return false;
634     }
635   else if (INTEGRAL_TYPE_P (vr1->type)
636 	   && (TYPE_PRECISION (vr1->type)
637 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
638     return false;
639   else if (INTEGRAL_TYPE_P (vr2->type)
640 	   && (TYPE_PRECISION (vr2->type)
641 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
642     return false;
643 
644   i = 0;
645   j = 0;
646   do
647     {
648       poly_int64 off1 = 0, off2 = 0;
649       vn_reference_op_t vro1, vro2;
650       vn_reference_op_s tem1, tem2;
651       bool deref1 = false, deref2 = false;
652       for (; vr1->operands.iterate (i, &vro1); i++)
653 	{
654 	  if (vro1->opcode == MEM_REF)
655 	    deref1 = true;
656 	  /* Do not look through a storage order barrier.  */
657 	  else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
658 	    return false;
659 	  if (known_eq (vro1->off, -1))
660 	    break;
661 	  off1 += vro1->off;
662 	}
663       for (; vr2->operands.iterate (j, &vro2); j++)
664 	{
665 	  if (vro2->opcode == MEM_REF)
666 	    deref2 = true;
667 	  /* Do not look through a storage order barrier.  */
668 	  else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
669 	    return false;
670 	  if (known_eq (vro2->off, -1))
671 	    break;
672 	  off2 += vro2->off;
673 	}
674       if (maybe_ne (off1, off2))
675 	return false;
676       if (deref1 && vro1->opcode == ADDR_EXPR)
677 	{
678 	  memset (&tem1, 0, sizeof (tem1));
679 	  tem1.op0 = TREE_OPERAND (vro1->op0, 0);
680 	  tem1.type = TREE_TYPE (tem1.op0);
681 	  tem1.opcode = TREE_CODE (tem1.op0);
682 	  vro1 = &tem1;
683 	  deref1 = false;
684 	}
685       if (deref2 && vro2->opcode == ADDR_EXPR)
686 	{
687 	  memset (&tem2, 0, sizeof (tem2));
688 	  tem2.op0 = TREE_OPERAND (vro2->op0, 0);
689 	  tem2.type = TREE_TYPE (tem2.op0);
690 	  tem2.opcode = TREE_CODE (tem2.op0);
691 	  vro2 = &tem2;
692 	  deref2 = false;
693 	}
694       if (deref1 != deref2)
695 	return false;
696       if (!vn_reference_op_eq (vro1, vro2))
697 	return false;
698       ++j;
699       ++i;
700     }
701   while (vr1->operands.length () != i
702 	 || vr2->operands.length () != j);
703 
704   return true;
705 }
706 
707 /* Copy the operations present in load/store REF into RESULT, a vector of
708    vn_reference_op_s's.  */
709 
710 static void
copy_reference_ops_from_ref(tree ref,vec<vn_reference_op_s> * result)711 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
712 {
713   if (TREE_CODE (ref) == TARGET_MEM_REF)
714     {
715       vn_reference_op_s temp;
716 
717       result->reserve (3);
718 
719       memset (&temp, 0, sizeof (temp));
720       temp.type = TREE_TYPE (ref);
721       temp.opcode = TREE_CODE (ref);
722       temp.op0 = TMR_INDEX (ref);
723       temp.op1 = TMR_STEP (ref);
724       temp.op2 = TMR_OFFSET (ref);
725       temp.off = -1;
726       temp.clique = MR_DEPENDENCE_CLIQUE (ref);
727       temp.base = MR_DEPENDENCE_BASE (ref);
728       result->quick_push (temp);
729 
730       memset (&temp, 0, sizeof (temp));
731       temp.type = NULL_TREE;
732       temp.opcode = ERROR_MARK;
733       temp.op0 = TMR_INDEX2 (ref);
734       temp.off = -1;
735       result->quick_push (temp);
736 
737       memset (&temp, 0, sizeof (temp));
738       temp.type = NULL_TREE;
739       temp.opcode = TREE_CODE (TMR_BASE (ref));
740       temp.op0 = TMR_BASE (ref);
741       temp.off = -1;
742       result->quick_push (temp);
743       return;
744     }
745 
746   /* For non-calls, store the information that makes up the address.  */
747   tree orig = ref;
748   while (ref)
749     {
750       vn_reference_op_s temp;
751 
752       memset (&temp, 0, sizeof (temp));
753       temp.type = TREE_TYPE (ref);
754       temp.opcode = TREE_CODE (ref);
755       temp.off = -1;
756 
757       switch (temp.opcode)
758 	{
759 	case MODIFY_EXPR:
760 	  temp.op0 = TREE_OPERAND (ref, 1);
761 	  break;
762 	case WITH_SIZE_EXPR:
763 	  temp.op0 = TREE_OPERAND (ref, 1);
764 	  temp.off = 0;
765 	  break;
766 	case MEM_REF:
767 	  /* The base address gets its own vn_reference_op_s structure.  */
768 	  temp.op0 = TREE_OPERAND (ref, 1);
769 	  if (!mem_ref_offset (ref).to_shwi (&temp.off))
770 	    temp.off = -1;
771 	  temp.clique = MR_DEPENDENCE_CLIQUE (ref);
772 	  temp.base = MR_DEPENDENCE_BASE (ref);
773 	  temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
774 	  break;
775 	case BIT_FIELD_REF:
776 	  /* Record bits, position and storage order.  */
777 	  temp.op0 = TREE_OPERAND (ref, 1);
778 	  temp.op1 = TREE_OPERAND (ref, 2);
779 	  if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
780 	    temp.off = -1;
781 	  temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
782 	  break;
783 	case COMPONENT_REF:
784 	  /* The field decl is enough to unambiguously specify the field,
785 	     a matching type is not necessary and a mismatching type
786 	     is always a spurious difference.  */
787 	  temp.type = NULL_TREE;
788 	  temp.op0 = TREE_OPERAND (ref, 1);
789 	  temp.op1 = TREE_OPERAND (ref, 2);
790 	  {
791 	    tree this_offset = component_ref_field_offset (ref);
792 	    if (this_offset
793 		&& poly_int_tree_p (this_offset))
794 	      {
795 		tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
796 		if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
797 		  {
798 		    poly_offset_int off
799 		      = (wi::to_poly_offset (this_offset)
800 			 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
801 		    /* Probibit value-numbering zero offset components
802 		       of addresses the same before the pass folding
803 		       __builtin_object_size had a chance to run
804 		       (checking cfun->after_inlining does the
805 		       trick here).  */
806 		    if (TREE_CODE (orig) != ADDR_EXPR
807 			|| maybe_ne (off, 0)
808 			|| cfun->after_inlining)
809 		      off.to_shwi (&temp.off);
810 		  }
811 	      }
812 	  }
813 	  break;
814 	case ARRAY_RANGE_REF:
815 	case ARRAY_REF:
816 	  {
817 	    tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
818 	    /* Record index as operand.  */
819 	    temp.op0 = TREE_OPERAND (ref, 1);
820 	    /* Always record lower bounds and element size.  */
821 	    temp.op1 = array_ref_low_bound (ref);
822 	    /* But record element size in units of the type alignment.  */
823 	    temp.op2 = TREE_OPERAND (ref, 3);
824 	    temp.align = eltype->type_common.align;
825 	    if (! temp.op2)
826 	      temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
827 				     size_int (TYPE_ALIGN_UNIT (eltype)));
828 	    if (poly_int_tree_p (temp.op0)
829 		&& poly_int_tree_p (temp.op1)
830 		&& TREE_CODE (temp.op2) == INTEGER_CST)
831 	      {
832 		poly_offset_int off = ((wi::to_poly_offset (temp.op0)
833 					- wi::to_poly_offset (temp.op1))
834 				       * wi::to_offset (temp.op2)
835 				       * vn_ref_op_align_unit (&temp));
836 		off.to_shwi (&temp.off);
837 	      }
838 	  }
839 	  break;
840 	case VAR_DECL:
841 	  if (DECL_HARD_REGISTER (ref))
842 	    {
843 	      temp.op0 = ref;
844 	      break;
845 	    }
846 	  /* Fallthru.  */
847 	case PARM_DECL:
848 	case CONST_DECL:
849 	case RESULT_DECL:
850 	  /* Canonicalize decls to MEM[&decl] which is what we end up with
851 	     when valueizing MEM[ptr] with ptr = &decl.  */
852 	  temp.opcode = MEM_REF;
853 	  temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
854 	  temp.off = 0;
855 	  result->safe_push (temp);
856 	  temp.opcode = ADDR_EXPR;
857 	  temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
858 	  temp.type = TREE_TYPE (temp.op0);
859 	  temp.off = -1;
860 	  break;
861 	case STRING_CST:
862 	case INTEGER_CST:
863 	case COMPLEX_CST:
864 	case VECTOR_CST:
865 	case REAL_CST:
866 	case FIXED_CST:
867 	case CONSTRUCTOR:
868 	case SSA_NAME:
869 	  temp.op0 = ref;
870 	  break;
871 	case ADDR_EXPR:
872 	  if (is_gimple_min_invariant (ref))
873 	    {
874 	      temp.op0 = ref;
875 	      break;
876 	    }
877 	  break;
878 	  /* These are only interesting for their operands, their
879 	     existence, and their type.  They will never be the last
880 	     ref in the chain of references (IE they require an
881 	     operand), so we don't have to put anything
882 	     for op* as it will be handled by the iteration  */
883 	case REALPART_EXPR:
884 	  temp.off = 0;
885 	  break;
886 	case VIEW_CONVERT_EXPR:
887 	  temp.off = 0;
888 	  temp.reverse = storage_order_barrier_p (ref);
889 	  break;
890 	case IMAGPART_EXPR:
891 	  /* This is only interesting for its constant offset.  */
892 	  temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
893 	  break;
894 	default:
895 	  gcc_unreachable ();
896 	}
897       result->safe_push (temp);
898 
899       if (REFERENCE_CLASS_P (ref)
900 	  || TREE_CODE (ref) == MODIFY_EXPR
901 	  || TREE_CODE (ref) == WITH_SIZE_EXPR
902 	  || (TREE_CODE (ref) == ADDR_EXPR
903 	      && !is_gimple_min_invariant (ref)))
904 	ref = TREE_OPERAND (ref, 0);
905       else
906 	ref = NULL_TREE;
907     }
908 }
909 
910 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
911    operands in *OPS, the reference alias set SET and the reference type TYPE.
912    Return true if something useful was produced.  */
913 
914 bool
ao_ref_init_from_vn_reference(ao_ref * ref,alias_set_type set,tree type,vec<vn_reference_op_s> ops)915 ao_ref_init_from_vn_reference (ao_ref *ref,
916 			       alias_set_type set, tree type,
917 			       vec<vn_reference_op_s> ops)
918 {
919   vn_reference_op_t op;
920   unsigned i;
921   tree base = NULL_TREE;
922   tree *op0_p = &base;
923   poly_offset_int offset = 0;
924   poly_offset_int max_size;
925   poly_offset_int size = -1;
926   tree size_tree = NULL_TREE;
927   alias_set_type base_alias_set = -1;
928 
929   /* First get the final access size from just the outermost expression.  */
930   op = &ops[0];
931   if (op->opcode == COMPONENT_REF)
932     size_tree = DECL_SIZE (op->op0);
933   else if (op->opcode == BIT_FIELD_REF)
934     size_tree = op->op0;
935   else
936     {
937       machine_mode mode = TYPE_MODE (type);
938       if (mode == BLKmode)
939 	size_tree = TYPE_SIZE (type);
940       else
941 	size = GET_MODE_BITSIZE (mode);
942     }
943   if (size_tree != NULL_TREE
944       && poly_int_tree_p (size_tree))
945     size = wi::to_poly_offset (size_tree);
946 
947   /* Initially, maxsize is the same as the accessed element size.
948      In the following it will only grow (or become -1).  */
949   max_size = size;
950 
951   /* Compute cumulative bit-offset for nested component-refs and array-refs,
952      and find the ultimate containing object.  */
953   FOR_EACH_VEC_ELT (ops, i, op)
954     {
955       switch (op->opcode)
956 	{
957 	/* These may be in the reference ops, but we cannot do anything
958 	   sensible with them here.  */
959 	case ADDR_EXPR:
960 	  /* Apart from ADDR_EXPR arguments to MEM_REF.  */
961 	  if (base != NULL_TREE
962 	      && TREE_CODE (base) == MEM_REF
963 	      && op->op0
964 	      && DECL_P (TREE_OPERAND (op->op0, 0)))
965 	    {
966 	      vn_reference_op_t pop = &ops[i-1];
967 	      base = TREE_OPERAND (op->op0, 0);
968 	      if (known_eq (pop->off, -1))
969 		{
970 		  max_size = -1;
971 		  offset = 0;
972 		}
973 	      else
974 		offset += pop->off * BITS_PER_UNIT;
975 	      op0_p = NULL;
976 	      break;
977 	    }
978 	  /* Fallthru.  */
979 	case CALL_EXPR:
980 	  return false;
981 
982 	/* Record the base objects.  */
983 	case MEM_REF:
984 	  base_alias_set = get_deref_alias_set (op->op0);
985 	  *op0_p = build2 (MEM_REF, op->type,
986 			   NULL_TREE, op->op0);
987 	  MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
988 	  MR_DEPENDENCE_BASE (*op0_p) = op->base;
989 	  op0_p = &TREE_OPERAND (*op0_p, 0);
990 	  break;
991 
992 	case VAR_DECL:
993 	case PARM_DECL:
994 	case RESULT_DECL:
995 	case SSA_NAME:
996 	  *op0_p = op->op0;
997 	  op0_p = NULL;
998 	  break;
999 
1000 	/* And now the usual component-reference style ops.  */
1001 	case BIT_FIELD_REF:
1002 	  offset += wi::to_offset (op->op1);
1003 	  break;
1004 
1005 	case COMPONENT_REF:
1006 	  {
1007 	    tree field = op->op0;
1008 	    /* We do not have a complete COMPONENT_REF tree here so we
1009 	       cannot use component_ref_field_offset.  Do the interesting
1010 	       parts manually.  */
1011 	    tree this_offset = DECL_FIELD_OFFSET (field);
1012 
1013 	    if (op->op1 || !poly_int_tree_p (this_offset))
1014 	      max_size = -1;
1015 	    else
1016 	      {
1017 		poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1018 					   << LOG2_BITS_PER_UNIT);
1019 		woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1020 		offset += woffset;
1021 	      }
1022 	    break;
1023 	  }
1024 
1025 	case ARRAY_RANGE_REF:
1026 	case ARRAY_REF:
1027 	  /* We recorded the lower bound and the element size.  */
1028 	  if (!poly_int_tree_p (op->op0)
1029 	      || !poly_int_tree_p (op->op1)
1030 	      || TREE_CODE (op->op2) != INTEGER_CST)
1031 	    max_size = -1;
1032 	  else
1033 	    {
1034 	      poly_offset_int woffset
1035 		= wi::sext (wi::to_poly_offset (op->op0)
1036 			    - wi::to_poly_offset (op->op1),
1037 			    TYPE_PRECISION (TREE_TYPE (op->op0)));
1038 	      woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1039 	      woffset <<= LOG2_BITS_PER_UNIT;
1040 	      offset += woffset;
1041 	    }
1042 	  break;
1043 
1044 	case REALPART_EXPR:
1045 	  break;
1046 
1047 	case IMAGPART_EXPR:
1048 	  offset += size;
1049 	  break;
1050 
1051 	case VIEW_CONVERT_EXPR:
1052 	  break;
1053 
1054 	case STRING_CST:
1055 	case INTEGER_CST:
1056 	case COMPLEX_CST:
1057 	case VECTOR_CST:
1058 	case REAL_CST:
1059 	case CONSTRUCTOR:
1060 	case CONST_DECL:
1061 	  return false;
1062 
1063 	default:
1064 	  return false;
1065 	}
1066     }
1067 
1068   if (base == NULL_TREE)
1069     return false;
1070 
1071   ref->ref = NULL_TREE;
1072   ref->base = base;
1073   ref->ref_alias_set = set;
1074   if (base_alias_set != -1)
1075     ref->base_alias_set = base_alias_set;
1076   else
1077     ref->base_alias_set = get_alias_set (base);
1078   /* We discount volatiles from value-numbering elsewhere.  */
1079   ref->volatile_p = false;
1080 
1081   if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1082     {
1083       ref->offset = 0;
1084       ref->size = -1;
1085       ref->max_size = -1;
1086       return true;
1087     }
1088 
1089   if (!offset.to_shwi (&ref->offset))
1090     {
1091       ref->offset = 0;
1092       ref->max_size = -1;
1093       return true;
1094     }
1095 
1096   if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1097     ref->max_size = -1;
1098 
1099   return true;
1100 }
1101 
1102 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1103    vn_reference_op_s's.  */
1104 
1105 static void
copy_reference_ops_from_call(gcall * call,vec<vn_reference_op_s> * result)1106 copy_reference_ops_from_call (gcall *call,
1107 			      vec<vn_reference_op_s> *result)
1108 {
1109   vn_reference_op_s temp;
1110   unsigned i;
1111   tree lhs = gimple_call_lhs (call);
1112   int lr;
1113 
1114   /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1115      different.  By adding the lhs here in the vector, we ensure that the
1116      hashcode is different, guaranteeing a different value number.  */
1117   if (lhs && TREE_CODE (lhs) != SSA_NAME)
1118     {
1119       memset (&temp, 0, sizeof (temp));
1120       temp.opcode = MODIFY_EXPR;
1121       temp.type = TREE_TYPE (lhs);
1122       temp.op0 = lhs;
1123       temp.off = -1;
1124       result->safe_push (temp);
1125     }
1126 
1127   /* Copy the type, opcode, function, static chain and EH region, if any.  */
1128   memset (&temp, 0, sizeof (temp));
1129   temp.type = gimple_call_return_type (call);
1130   temp.opcode = CALL_EXPR;
1131   temp.op0 = gimple_call_fn (call);
1132   temp.op1 = gimple_call_chain (call);
1133   if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1134     temp.op2 = size_int (lr);
1135   temp.off = -1;
1136   if (gimple_call_with_bounds_p (call))
1137     temp.with_bounds = 1;
1138   result->safe_push (temp);
1139 
1140   /* Copy the call arguments.  As they can be references as well,
1141      just chain them together.  */
1142   for (i = 0; i < gimple_call_num_args (call); ++i)
1143     {
1144       tree callarg = gimple_call_arg (call, i);
1145       copy_reference_ops_from_ref (callarg, result);
1146     }
1147 }
1148 
1149 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1150    *I_P to point to the last element of the replacement.  */
1151 static bool
vn_reference_fold_indirect(vec<vn_reference_op_s> * ops,unsigned int * i_p)1152 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1153 			    unsigned int *i_p)
1154 {
1155   unsigned int i = *i_p;
1156   vn_reference_op_t op = &(*ops)[i];
1157   vn_reference_op_t mem_op = &(*ops)[i - 1];
1158   tree addr_base;
1159   poly_int64 addr_offset = 0;
1160 
1161   /* The only thing we have to do is from &OBJ.foo.bar add the offset
1162      from .foo.bar to the preceding MEM_REF offset and replace the
1163      address with &OBJ.  */
1164   addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1165 					     &addr_offset);
1166   gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1167   if (addr_base != TREE_OPERAND (op->op0, 0))
1168     {
1169       poly_offset_int off
1170 	= (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1171 				  SIGNED)
1172 	   + addr_offset);
1173       mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1174       op->op0 = build_fold_addr_expr (addr_base);
1175       if (tree_fits_shwi_p (mem_op->op0))
1176 	mem_op->off = tree_to_shwi (mem_op->op0);
1177       else
1178 	mem_op->off = -1;
1179       return true;
1180     }
1181   return false;
1182 }
1183 
1184 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1185    *I_P to point to the last element of the replacement.  */
1186 static bool
vn_reference_maybe_forwprop_address(vec<vn_reference_op_s> * ops,unsigned int * i_p)1187 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1188 				     unsigned int *i_p)
1189 {
1190   unsigned int i = *i_p;
1191   vn_reference_op_t op = &(*ops)[i];
1192   vn_reference_op_t mem_op = &(*ops)[i - 1];
1193   gimple *def_stmt;
1194   enum tree_code code;
1195   poly_offset_int off;
1196 
1197   def_stmt = SSA_NAME_DEF_STMT (op->op0);
1198   if (!is_gimple_assign (def_stmt))
1199     return false;
1200 
1201   code = gimple_assign_rhs_code (def_stmt);
1202   if (code != ADDR_EXPR
1203       && code != POINTER_PLUS_EXPR)
1204     return false;
1205 
1206   off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1207 
1208   /* The only thing we have to do is from &OBJ.foo.bar add the offset
1209      from .foo.bar to the preceding MEM_REF offset and replace the
1210      address with &OBJ.  */
1211   if (code == ADDR_EXPR)
1212     {
1213       tree addr, addr_base;
1214       poly_int64 addr_offset;
1215 
1216       addr = gimple_assign_rhs1 (def_stmt);
1217       addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1218 						 &addr_offset);
1219       /* If that didn't work because the address isn't invariant propagate
1220          the reference tree from the address operation in case the current
1221 	 dereference isn't offsetted.  */
1222       if (!addr_base
1223 	  && *i_p == ops->length () - 1
1224 	  && known_eq (off, 0)
1225 	  /* This makes us disable this transform for PRE where the
1226 	     reference ops might be also used for code insertion which
1227 	     is invalid.  */
1228 	  && default_vn_walk_kind == VN_WALKREWRITE)
1229 	{
1230 	  auto_vec<vn_reference_op_s, 32> tem;
1231 	  copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1232 	  /* Make sure to preserve TBAA info.  The only objects not
1233 	     wrapped in MEM_REFs that can have their address taken are
1234 	     STRING_CSTs.  */
1235 	  if (tem.length () >= 2
1236 	      && tem[tem.length () - 2].opcode == MEM_REF)
1237 	    {
1238 	      vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1239 	      new_mem_op->op0
1240 		= wide_int_to_tree (TREE_TYPE (mem_op->op0),
1241 				    wi::to_poly_wide (new_mem_op->op0));
1242 	    }
1243 	  else
1244 	    gcc_assert (tem.last ().opcode == STRING_CST);
1245 	  ops->pop ();
1246 	  ops->pop ();
1247 	  ops->safe_splice (tem);
1248 	  --*i_p;
1249 	  return true;
1250 	}
1251       if (!addr_base
1252 	  || TREE_CODE (addr_base) != MEM_REF
1253 	  || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1254 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base, 0))))
1255 	return false;
1256 
1257       off += addr_offset;
1258       off += mem_ref_offset (addr_base);
1259       op->op0 = TREE_OPERAND (addr_base, 0);
1260     }
1261   else
1262     {
1263       tree ptr, ptroff;
1264       ptr = gimple_assign_rhs1 (def_stmt);
1265       ptroff = gimple_assign_rhs2 (def_stmt);
1266       if (TREE_CODE (ptr) != SSA_NAME
1267 	  || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1268 	  || TREE_CODE (ptroff) != INTEGER_CST)
1269 	return false;
1270 
1271       off += wi::to_offset (ptroff);
1272       op->op0 = ptr;
1273     }
1274 
1275   mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1276   if (tree_fits_shwi_p (mem_op->op0))
1277     mem_op->off = tree_to_shwi (mem_op->op0);
1278   else
1279     mem_op->off = -1;
1280   if (TREE_CODE (op->op0) == SSA_NAME)
1281     op->op0 = SSA_VAL (op->op0);
1282   if (TREE_CODE (op->op0) != SSA_NAME)
1283     op->opcode = TREE_CODE (op->op0);
1284 
1285   /* And recurse.  */
1286   if (TREE_CODE (op->op0) == SSA_NAME)
1287     vn_reference_maybe_forwprop_address (ops, i_p);
1288   else if (TREE_CODE (op->op0) == ADDR_EXPR)
1289     vn_reference_fold_indirect (ops, i_p);
1290   return true;
1291 }
1292 
1293 /* Optimize the reference REF to a constant if possible or return
1294    NULL_TREE if not.  */
1295 
1296 tree
fully_constant_vn_reference_p(vn_reference_t ref)1297 fully_constant_vn_reference_p (vn_reference_t ref)
1298 {
1299   vec<vn_reference_op_s> operands = ref->operands;
1300   vn_reference_op_t op;
1301 
1302   /* Try to simplify the translated expression if it is
1303      a call to a builtin function with at most two arguments.  */
1304   op = &operands[0];
1305   if (op->opcode == CALL_EXPR
1306       && TREE_CODE (op->op0) == ADDR_EXPR
1307       && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1308       && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1309       && operands.length () >= 2
1310       && operands.length () <= 3)
1311     {
1312       vn_reference_op_t arg0, arg1 = NULL;
1313       bool anyconst = false;
1314       arg0 = &operands[1];
1315       if (operands.length () > 2)
1316 	arg1 = &operands[2];
1317       if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1318 	  || (arg0->opcode == ADDR_EXPR
1319 	      && is_gimple_min_invariant (arg0->op0)))
1320 	anyconst = true;
1321       if (arg1
1322 	  && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1323 	      || (arg1->opcode == ADDR_EXPR
1324 		  && is_gimple_min_invariant (arg1->op0))))
1325 	anyconst = true;
1326       if (anyconst)
1327 	{
1328 	  tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1329 					 arg1 ? 2 : 1,
1330 					 arg0->op0,
1331 					 arg1 ? arg1->op0 : NULL);
1332 	  if (folded
1333 	      && TREE_CODE (folded) == NOP_EXPR)
1334 	    folded = TREE_OPERAND (folded, 0);
1335 	  if (folded
1336 	      && is_gimple_min_invariant (folded))
1337 	    return folded;
1338 	}
1339     }
1340 
1341   /* Simplify reads from constants or constant initializers.  */
1342   else if (BITS_PER_UNIT == 8
1343 	   && is_gimple_reg_type (ref->type)
1344 	   && (!INTEGRAL_TYPE_P (ref->type)
1345 	       || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1346     {
1347       poly_int64 off = 0;
1348       HOST_WIDE_INT size;
1349       if (INTEGRAL_TYPE_P (ref->type))
1350 	size = TYPE_PRECISION (ref->type);
1351       else
1352 	size = tree_to_shwi (TYPE_SIZE (ref->type));
1353       if (size % BITS_PER_UNIT != 0
1354 	  || size > MAX_BITSIZE_MODE_ANY_MODE)
1355 	return NULL_TREE;
1356       size /= BITS_PER_UNIT;
1357       unsigned i;
1358       for (i = 0; i < operands.length (); ++i)
1359 	{
1360 	  if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1361 	    {
1362 	      ++i;
1363 	      break;
1364 	    }
1365 	  if (known_eq (operands[i].off, -1))
1366 	    return NULL_TREE;
1367 	  off += operands[i].off;
1368 	  if (operands[i].opcode == MEM_REF)
1369 	    {
1370 	      ++i;
1371 	      break;
1372 	    }
1373 	}
1374       vn_reference_op_t base = &operands[--i];
1375       tree ctor = error_mark_node;
1376       tree decl = NULL_TREE;
1377       if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1378 	ctor = base->op0;
1379       else if (base->opcode == MEM_REF
1380 	       && base[1].opcode == ADDR_EXPR
1381 	       && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1382 		   || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1383 		   || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1384 	{
1385 	  decl = TREE_OPERAND (base[1].op0, 0);
1386 	  if (TREE_CODE (decl) == STRING_CST)
1387 	    ctor = decl;
1388 	  else
1389 	    ctor = ctor_for_folding (decl);
1390 	}
1391       if (ctor == NULL_TREE)
1392 	return build_zero_cst (ref->type);
1393       else if (ctor != error_mark_node)
1394 	{
1395 	  HOST_WIDE_INT const_off;
1396 	  if (decl)
1397 	    {
1398 	      tree res = fold_ctor_reference (ref->type, ctor,
1399 					      off * BITS_PER_UNIT,
1400 					      size * BITS_PER_UNIT, decl);
1401 	      if (res)
1402 		{
1403 		  STRIP_USELESS_TYPE_CONVERSION (res);
1404 		  if (is_gimple_min_invariant (res))
1405 		    return res;
1406 		}
1407 	    }
1408 	  else if (off.is_constant (&const_off))
1409 	    {
1410 	      unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1411 	      int len = native_encode_expr (ctor, buf, size, const_off);
1412 	      if (len > 0)
1413 		return native_interpret_expr (ref->type, buf, len);
1414 	    }
1415 	}
1416     }
1417 
1418   return NULL_TREE;
1419 }
1420 
1421 /* Return true if OPS contain a storage order barrier.  */
1422 
1423 static bool
contains_storage_order_barrier_p(vec<vn_reference_op_s> ops)1424 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1425 {
1426   vn_reference_op_t op;
1427   unsigned i;
1428 
1429   FOR_EACH_VEC_ELT (ops, i, op)
1430     if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1431       return true;
1432 
1433   return false;
1434 }
1435 
1436 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1437    structures into their value numbers.  This is done in-place, and
1438    the vector passed in is returned.  *VALUEIZED_ANYTHING will specify
1439    whether any operands were valueized.  */
1440 
1441 static vec<vn_reference_op_s>
valueize_refs_1(vec<vn_reference_op_s> orig,bool * valueized_anything)1442 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1443 {
1444   vn_reference_op_t vro;
1445   unsigned int i;
1446 
1447   *valueized_anything = false;
1448 
1449   FOR_EACH_VEC_ELT (orig, i, vro)
1450     {
1451       if (vro->opcode == SSA_NAME
1452 	  || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1453 	{
1454 	  tree tem = SSA_VAL (vro->op0);
1455 	  if (tem != vro->op0)
1456 	    {
1457 	      *valueized_anything = true;
1458 	      vro->op0 = tem;
1459 	    }
1460 	  /* If it transforms from an SSA_NAME to a constant, update
1461 	     the opcode.  */
1462 	  if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1463 	    vro->opcode = TREE_CODE (vro->op0);
1464 	}
1465       if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1466 	{
1467 	  tree tem = SSA_VAL (vro->op1);
1468 	  if (tem != vro->op1)
1469 	    {
1470 	      *valueized_anything = true;
1471 	      vro->op1 = tem;
1472 	    }
1473 	}
1474       if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1475 	{
1476 	  tree tem = SSA_VAL (vro->op2);
1477 	  if (tem != vro->op2)
1478 	    {
1479 	      *valueized_anything = true;
1480 	      vro->op2 = tem;
1481 	    }
1482 	}
1483       /* If it transforms from an SSA_NAME to an address, fold with
1484 	 a preceding indirect reference.  */
1485       if (i > 0
1486 	  && vro->op0
1487 	  && TREE_CODE (vro->op0) == ADDR_EXPR
1488 	  && orig[i - 1].opcode == MEM_REF)
1489 	{
1490 	  if (vn_reference_fold_indirect (&orig, &i))
1491 	    *valueized_anything = true;
1492 	}
1493       else if (i > 0
1494 	       && vro->opcode == SSA_NAME
1495 	       && orig[i - 1].opcode == MEM_REF)
1496 	{
1497 	  if (vn_reference_maybe_forwprop_address (&orig, &i))
1498 	    *valueized_anything = true;
1499 	}
1500       /* If it transforms a non-constant ARRAY_REF into a constant
1501 	 one, adjust the constant offset.  */
1502       else if (vro->opcode == ARRAY_REF
1503 	       && known_eq (vro->off, -1)
1504 	       && poly_int_tree_p (vro->op0)
1505 	       && poly_int_tree_p (vro->op1)
1506 	       && TREE_CODE (vro->op2) == INTEGER_CST)
1507 	{
1508 	  poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1509 				  - wi::to_poly_offset (vro->op1))
1510 				 * wi::to_offset (vro->op2)
1511 				 * vn_ref_op_align_unit (vro));
1512 	  off.to_shwi (&vro->off);
1513 	}
1514     }
1515 
1516   return orig;
1517 }
1518 
1519 static vec<vn_reference_op_s>
valueize_refs(vec<vn_reference_op_s> orig)1520 valueize_refs (vec<vn_reference_op_s> orig)
1521 {
1522   bool tem;
1523   return valueize_refs_1 (orig, &tem);
1524 }
1525 
1526 static vec<vn_reference_op_s> shared_lookup_references;
1527 
1528 /* Create a vector of vn_reference_op_s structures from REF, a
1529    REFERENCE_CLASS_P tree.  The vector is shared among all callers of
1530    this function.  *VALUEIZED_ANYTHING will specify whether any
1531    operands were valueized.  */
1532 
1533 static vec<vn_reference_op_s>
valueize_shared_reference_ops_from_ref(tree ref,bool * valueized_anything)1534 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1535 {
1536   if (!ref)
1537     return vNULL;
1538   shared_lookup_references.truncate (0);
1539   copy_reference_ops_from_ref (ref, &shared_lookup_references);
1540   shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1541 					      valueized_anything);
1542   return shared_lookup_references;
1543 }
1544 
1545 /* Create a vector of vn_reference_op_s structures from CALL, a
1546    call statement.  The vector is shared among all callers of
1547    this function.  */
1548 
1549 static vec<vn_reference_op_s>
valueize_shared_reference_ops_from_call(gcall * call)1550 valueize_shared_reference_ops_from_call (gcall *call)
1551 {
1552   if (!call)
1553     return vNULL;
1554   shared_lookup_references.truncate (0);
1555   copy_reference_ops_from_call (call, &shared_lookup_references);
1556   shared_lookup_references = valueize_refs (shared_lookup_references);
1557   return shared_lookup_references;
1558 }
1559 
1560 /* Lookup a SCCVN reference operation VR in the current hash table.
1561    Returns the resulting value number if it exists in the hash table,
1562    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
1563    vn_reference_t stored in the hashtable if something is found.  */
1564 
1565 static tree
vn_reference_lookup_1(vn_reference_t vr,vn_reference_t * vnresult)1566 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1567 {
1568   vn_reference_s **slot;
1569   hashval_t hash;
1570 
1571   hash = vr->hashcode;
1572   slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1573   if (!slot && current_info == optimistic_info)
1574     slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1575   if (slot)
1576     {
1577       if (vnresult)
1578 	*vnresult = (vn_reference_t)*slot;
1579       return ((vn_reference_t)*slot)->result;
1580     }
1581 
1582   return NULL_TREE;
1583 }
1584 
1585 /* Callback for walk_non_aliased_vuses.  Adjusts the vn_reference_t VR_
1586    with the current VUSE and performs the expression lookup.  */
1587 
1588 static void *
vn_reference_lookup_2(ao_ref * op ATTRIBUTE_UNUSED,tree vuse,unsigned int cnt,void * vr_)1589 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1590 		       unsigned int cnt, void *vr_)
1591 {
1592   vn_reference_t vr = (vn_reference_t)vr_;
1593   vn_reference_s **slot;
1594   hashval_t hash;
1595 
1596   /* This bounds the stmt walks we perform on reference lookups
1597      to O(1) instead of O(N) where N is the number of dominating
1598      stores.  */
1599   if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1600     return (void *)-1;
1601 
1602   if (last_vuse_ptr)
1603     *last_vuse_ptr = vuse;
1604 
1605   /* Fixup vuse and hash.  */
1606   if (vr->vuse)
1607     vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1608   vr->vuse = vuse_ssa_val (vuse);
1609   if (vr->vuse)
1610     vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1611 
1612   hash = vr->hashcode;
1613   slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1614   if (!slot && current_info == optimistic_info)
1615     slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1616   if (slot)
1617     return *slot;
1618 
1619   return NULL;
1620 }
1621 
1622 /* Lookup an existing or insert a new vn_reference entry into the
1623    value table for the VUSE, SET, TYPE, OPERANDS reference which
1624    has the value VALUE which is either a constant or an SSA name.  */
1625 
1626 static vn_reference_t
vn_reference_lookup_or_insert_for_pieces(tree vuse,alias_set_type set,tree type,vec<vn_reference_op_s,va_heap> operands,tree value)1627 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1628 					  alias_set_type set,
1629 					  tree type,
1630 					  vec<vn_reference_op_s,
1631 					        va_heap> operands,
1632 					  tree value)
1633 {
1634   vn_reference_s vr1;
1635   vn_reference_t result;
1636   unsigned value_id;
1637   vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1638   vr1.operands = operands;
1639   vr1.type = type;
1640   vr1.set = set;
1641   vr1.hashcode = vn_reference_compute_hash (&vr1);
1642   if (vn_reference_lookup_1 (&vr1, &result))
1643     return result;
1644   if (TREE_CODE (value) == SSA_NAME)
1645     value_id = VN_INFO (value)->value_id;
1646   else
1647     value_id = get_or_alloc_constant_value_id (value);
1648   return vn_reference_insert_pieces (vuse, set, type,
1649 				     operands.copy (), value, value_id);
1650 }
1651 
1652 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1653 
1654 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables.  */
1655 
1656 static tree
vn_lookup_simplify_result(code_helper rcode,tree type,tree * ops_)1657 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops_)
1658 {
1659   if (!rcode.is_tree_code ())
1660     return NULL_TREE;
1661   tree *ops = ops_;
1662   unsigned int length = TREE_CODE_LENGTH ((tree_code) rcode);
1663   if (rcode == CONSTRUCTOR
1664       /* ???  We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
1665          and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree.  */
1666       && TREE_CODE (ops_[0]) == CONSTRUCTOR)
1667     {
1668       length = CONSTRUCTOR_NELTS (ops_[0]);
1669       ops = XALLOCAVEC (tree, length);
1670       for (unsigned i = 0; i < length; ++i)
1671 	ops[i] = CONSTRUCTOR_ELT (ops_[0], i)->value;
1672     }
1673   vn_nary_op_t vnresult = NULL;
1674   return vn_nary_op_lookup_pieces (length, (tree_code) rcode,
1675 				   type, ops, &vnresult);
1676 }
1677 
1678 /* Return a value-number for RCODE OPS... either by looking up an existing
1679    value-number for the simplified result or by inserting the operation if
1680    INSERT is true.  */
1681 
1682 static tree
vn_nary_build_or_lookup_1(code_helper rcode,tree type,tree * ops,bool insert)1683 vn_nary_build_or_lookup_1 (code_helper rcode, tree type, tree *ops,
1684 			   bool insert)
1685 {
1686   tree result = NULL_TREE;
1687   /* We will be creating a value number for
1688        RCODE (OPS...).
1689      So first simplify and lookup this expression to see if it
1690      is already available.  */
1691   mprts_hook = vn_lookup_simplify_result;
1692   bool res = false;
1693   switch (TREE_CODE_LENGTH ((tree_code) rcode))
1694     {
1695     case 1:
1696       res = gimple_resimplify1 (NULL, &rcode, type, ops, vn_valueize);
1697       break;
1698     case 2:
1699       res = gimple_resimplify2 (NULL, &rcode, type, ops, vn_valueize);
1700       break;
1701     case 3:
1702       res = gimple_resimplify3 (NULL, &rcode, type, ops, vn_valueize);
1703       break;
1704     }
1705   mprts_hook = NULL;
1706   gimple *new_stmt = NULL;
1707   if (res
1708       && gimple_simplified_result_is_gimple_val (rcode, ops))
1709     /* The expression is already available.  */
1710     result = ops[0];
1711   else
1712     {
1713       tree val = vn_lookup_simplify_result (rcode, type, ops);
1714       if (!val && insert)
1715 	{
1716 	  gimple_seq stmts = NULL;
1717 	  result = maybe_push_res_to_seq (rcode, type, ops, &stmts);
1718 	  if (result)
1719 	    {
1720 	      gcc_assert (gimple_seq_singleton_p (stmts));
1721 	      new_stmt = gimple_seq_first_stmt (stmts);
1722 	    }
1723 	}
1724       else
1725 	/* The expression is already available.  */
1726 	result = val;
1727     }
1728   if (new_stmt)
1729     {
1730       /* The expression is not yet available, value-number lhs to
1731 	 the new SSA_NAME we created.  */
1732       /* Initialize value-number information properly.  */
1733       VN_INFO_GET (result)->valnum = result;
1734       VN_INFO (result)->value_id = get_next_value_id ();
1735       gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1736 					  new_stmt);
1737       VN_INFO (result)->needs_insertion = true;
1738       /* ???  PRE phi-translation inserts NARYs without corresponding
1739          SSA name result.  Re-use those but set their result according
1740 	 to the stmt we just built.  */
1741       vn_nary_op_t nary = NULL;
1742       vn_nary_op_lookup_stmt (new_stmt, &nary);
1743       if (nary)
1744 	{
1745 	  gcc_assert (nary->result == NULL_TREE);
1746 	  nary->result = gimple_assign_lhs (new_stmt);
1747 	}
1748       /* As all "inserted" statements are singleton SCCs, insert
1749 	 to the valid table.  This is strictly needed to
1750 	 avoid re-generating new value SSA_NAMEs for the same
1751 	 expression during SCC iteration over and over (the
1752 	 optimistic table gets cleared after each iteration).
1753 	 We do not need to insert into the optimistic table, as
1754 	 lookups there will fall back to the valid table.  */
1755       else if (current_info == optimistic_info)
1756 	{
1757 	  current_info = valid_info;
1758 	  vn_nary_op_insert_stmt (new_stmt, result);
1759 	  current_info = optimistic_info;
1760 	}
1761       else
1762 	vn_nary_op_insert_stmt (new_stmt, result);
1763       if (dump_file && (dump_flags & TDF_DETAILS))
1764 	{
1765 	  fprintf (dump_file, "Inserting name ");
1766 	  print_generic_expr (dump_file, result);
1767 	  fprintf (dump_file, " for expression ");
1768 	  print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1769 	  fprintf (dump_file, "\n");
1770 	}
1771     }
1772   return result;
1773 }
1774 
1775 /* Return a value-number for RCODE OPS... either by looking up an existing
1776    value-number for the simplified result or by inserting the operation.  */
1777 
1778 static tree
vn_nary_build_or_lookup(code_helper rcode,tree type,tree * ops)1779 vn_nary_build_or_lookup (code_helper rcode, tree type, tree *ops)
1780 {
1781   return vn_nary_build_or_lookup_1 (rcode, type, ops, true);
1782 }
1783 
1784 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1785    its value if present.  */
1786 
1787 tree
vn_nary_simplify(vn_nary_op_t nary)1788 vn_nary_simplify (vn_nary_op_t nary)
1789 {
1790   if (nary->length > 3)
1791     return NULL_TREE;
1792   tree ops[3];
1793   memcpy (ops, nary->op, sizeof (tree) * nary->length);
1794   return vn_nary_build_or_lookup_1 (nary->opcode, nary->type, ops, false);
1795 }
1796 
1797 
1798 /* Callback for walk_non_aliased_vuses.  Tries to perform a lookup
1799    from the statement defining VUSE and if not successful tries to
1800    translate *REFP and VR_ through an aggregate copy at the definition
1801    of VUSE.  If *DISAMBIGUATE_ONLY is true then do not perform translation
1802    of *REF and *VR.  If only disambiguation was performed then
1803    *DISAMBIGUATE_ONLY is set to true.  */
1804 
1805 static void *
vn_reference_lookup_3(ao_ref * ref,tree vuse,void * vr_,bool * disambiguate_only)1806 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1807 		       bool *disambiguate_only)
1808 {
1809   vn_reference_t vr = (vn_reference_t)vr_;
1810   gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1811   tree base = ao_ref_base (ref);
1812   HOST_WIDE_INT offseti, maxsizei;
1813   static vec<vn_reference_op_s> lhs_ops;
1814   ao_ref lhs_ref;
1815   bool lhs_ref_ok = false;
1816   poly_int64 copy_size;
1817 
1818   /* If the reference is based on a parameter that was determined as
1819      pointing to readonly memory it doesn't change.  */
1820   if (TREE_CODE (base) == MEM_REF
1821       && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1822       && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1823       && bitmap_bit_p (const_parms,
1824 		       SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1825     {
1826       *disambiguate_only = true;
1827       return NULL;
1828     }
1829 
1830   /* First try to disambiguate after value-replacing in the definitions LHS.  */
1831   if (is_gimple_assign (def_stmt))
1832     {
1833       tree lhs = gimple_assign_lhs (def_stmt);
1834       bool valueized_anything = false;
1835       /* Avoid re-allocation overhead.  */
1836       lhs_ops.truncate (0);
1837       copy_reference_ops_from_ref (lhs, &lhs_ops);
1838       lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1839       if (valueized_anything)
1840 	{
1841 	  lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1842 						      get_alias_set (lhs),
1843 						      TREE_TYPE (lhs), lhs_ops);
1844 	  if (lhs_ref_ok
1845 	      && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1846 	    {
1847 	      *disambiguate_only = true;
1848 	      return NULL;
1849 	    }
1850 	}
1851       else
1852 	{
1853 	  ao_ref_init (&lhs_ref, lhs);
1854 	  lhs_ref_ok = true;
1855 	}
1856 
1857       /* If we reach a clobbering statement try to skip it and see if
1858          we find a VN result with exactly the same value as the
1859 	 possible clobber.  In this case we can ignore the clobber
1860 	 and return the found value.
1861 	 Note that we don't need to worry about partial overlapping
1862 	 accesses as we then can use TBAA to disambiguate against the
1863 	 clobbering statement when looking up a load (thus the
1864 	 VN_WALKREWRITE guard).  */
1865       if (vn_walk_kind == VN_WALKREWRITE
1866 	  && is_gimple_reg_type (TREE_TYPE (lhs))
1867 	  && types_compatible_p (TREE_TYPE (lhs), vr->type)
1868 	  /* The overlap restriction breaks down when either access
1869 	     alias-set is zero.  Still for accesses of the size of
1870 	     an addressable unit there can be no overlaps.  Overlaps
1871 	     between different union members are not an issue since
1872 	     activation of a union member via a store makes the
1873 	     values of untouched bytes unspecified.  */
1874 	  && (known_eq (ref->size, BITS_PER_UNIT)
1875 	      || (get_alias_set (lhs) != 0
1876 		  && ao_ref_alias_set (ref) != 0)))
1877 	{
1878 	  tree *saved_last_vuse_ptr = last_vuse_ptr;
1879 	  /* Do not update last_vuse_ptr in vn_reference_lookup_2.  */
1880 	  last_vuse_ptr = NULL;
1881 	  tree saved_vuse = vr->vuse;
1882 	  hashval_t saved_hashcode = vr->hashcode;
1883 	  void *res = vn_reference_lookup_2 (ref,
1884 					     gimple_vuse (def_stmt), 0, vr);
1885 	  /* Need to restore vr->vuse and vr->hashcode.  */
1886 	  vr->vuse = saved_vuse;
1887 	  vr->hashcode = saved_hashcode;
1888 	  last_vuse_ptr = saved_last_vuse_ptr;
1889 	  if (res && res != (void *)-1)
1890 	    {
1891 	      vn_reference_t vnresult = (vn_reference_t) res;
1892 	      if (vnresult->result
1893 		  && operand_equal_p (vnresult->result,
1894 				      gimple_assign_rhs1 (def_stmt), 0))
1895 		return res;
1896 	    }
1897 	}
1898     }
1899   else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1900 	   && gimple_call_num_args (def_stmt) <= 4)
1901     {
1902       /* For builtin calls valueize its arguments and call the
1903          alias oracle again.  Valueization may improve points-to
1904 	 info of pointers and constify size and position arguments.
1905 	 Originally this was motivated by PR61034 which has
1906 	 conditional calls to free falsely clobbering ref because
1907 	 of imprecise points-to info of the argument.  */
1908       tree oldargs[4];
1909       bool valueized_anything = false;
1910       for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1911 	{
1912 	  oldargs[i] = gimple_call_arg (def_stmt, i);
1913 	  tree val = vn_valueize (oldargs[i]);
1914 	  if (val != oldargs[i])
1915 	    {
1916 	      gimple_call_set_arg (def_stmt, i, val);
1917 	      valueized_anything = true;
1918 	    }
1919 	}
1920       if (valueized_anything)
1921 	{
1922 	  bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1923 					       ref);
1924 	  for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1925 	    gimple_call_set_arg (def_stmt, i, oldargs[i]);
1926 	  if (!res)
1927 	    {
1928 	      *disambiguate_only = true;
1929 	      return NULL;
1930 	    }
1931 	}
1932     }
1933 
1934   if (*disambiguate_only)
1935     return (void *)-1;
1936 
1937   /* If we cannot constrain the size of the reference we cannot
1938      test if anything kills it.  */
1939   if (!ref->max_size_known_p ())
1940     return (void *)-1;
1941 
1942   poly_int64 offset = ref->offset;
1943   poly_int64 maxsize = ref->max_size;
1944 
1945   /* We can't deduce anything useful from clobbers.  */
1946   if (gimple_clobber_p (def_stmt))
1947     return (void *)-1;
1948 
1949   /* def_stmt may-defs *ref.  See if we can derive a value for *ref
1950      from that definition.
1951      1) Memset.  */
1952   if (is_gimple_reg_type (vr->type)
1953       && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1954       && integer_zerop (gimple_call_arg (def_stmt, 1))
1955       && poly_int_tree_p (gimple_call_arg (def_stmt, 2))
1956       && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1957     {
1958       tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1959       tree base2;
1960       poly_int64 offset2, size2, maxsize2;
1961       bool reverse;
1962       base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1963 				       &reverse);
1964       tree len = gimple_call_arg (def_stmt, 2);
1965       if (known_size_p (maxsize2)
1966 	  && operand_equal_p (base, base2, 0)
1967 	  && known_subrange_p (offset, maxsize, offset2,
1968 			       wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
1969 	{
1970 	  tree val = build_zero_cst (vr->type);
1971 	  return vn_reference_lookup_or_insert_for_pieces
1972 	           (vuse, vr->set, vr->type, vr->operands, val);
1973 	}
1974     }
1975 
1976   /* 2) Assignment from an empty CONSTRUCTOR.  */
1977   else if (is_gimple_reg_type (vr->type)
1978 	   && gimple_assign_single_p (def_stmt)
1979 	   && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1980 	   && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1981     {
1982       tree base2;
1983       poly_int64 offset2, size2, maxsize2;
1984       bool reverse;
1985       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1986 				       &offset2, &size2, &maxsize2, &reverse);
1987       if (known_size_p (maxsize2)
1988 	  && known_eq (maxsize2, size2)
1989 	  && operand_equal_p (base, base2, 0)
1990 	  && known_subrange_p (offset, maxsize, offset2, size2))
1991 	{
1992 	  tree val = build_zero_cst (vr->type);
1993 	  return vn_reference_lookup_or_insert_for_pieces
1994 	           (vuse, vr->set, vr->type, vr->operands, val);
1995 	}
1996     }
1997 
1998   /* 3) Assignment from a constant.  We can use folds native encode/interpret
1999      routines to extract the assigned bits.  */
2000   else if (known_eq (ref->size, maxsize)
2001 	   && is_gimple_reg_type (vr->type)
2002 	   && !contains_storage_order_barrier_p (vr->operands)
2003 	   && gimple_assign_single_p (def_stmt)
2004 	   && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2005 	   /* native_encode and native_decode operate on arrays of bytes
2006 	      and so fundamentally need a compile-time size and offset.  */
2007 	   && maxsize.is_constant (&maxsizei)
2008 	   && maxsizei % BITS_PER_UNIT == 0
2009 	   && offset.is_constant (&offseti)
2010 	   && offseti % BITS_PER_UNIT == 0
2011 	   && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2012 	       || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2013 		   && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2014     {
2015       tree base2;
2016       HOST_WIDE_INT offset2, size2;
2017       bool reverse;
2018       base2 = get_ref_base_and_extent_hwi (gimple_assign_lhs (def_stmt),
2019 					   &offset2, &size2, &reverse);
2020       if (base2
2021 	  && !reverse
2022 	  && size2 % BITS_PER_UNIT == 0
2023 	  && offset2 % BITS_PER_UNIT == 0
2024 	  && operand_equal_p (base, base2, 0)
2025 	  && known_subrange_p (offseti, maxsizei, offset2, size2))
2026 	{
2027 	  /* We support up to 512-bit values (for V8DFmode).  */
2028 	  unsigned char buffer[64];
2029 	  int len;
2030 
2031 	  tree rhs = gimple_assign_rhs1 (def_stmt);
2032 	  if (TREE_CODE (rhs) == SSA_NAME)
2033 	    rhs = SSA_VAL (rhs);
2034 	  len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
2035 				    buffer, sizeof (buffer),
2036 				    (offseti - offset2) / BITS_PER_UNIT);
2037 	  if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2038 	    {
2039 	      tree type = vr->type;
2040 	      /* Make sure to interpret in a type that has a range
2041 	         covering the whole access size.  */
2042 	      if (INTEGRAL_TYPE_P (vr->type)
2043 		  && maxsizei != TYPE_PRECISION (vr->type))
2044 		type = build_nonstandard_integer_type (maxsizei,
2045 						       TYPE_UNSIGNED (type));
2046 	      tree val = native_interpret_expr (type, buffer,
2047 						maxsizei / BITS_PER_UNIT);
2048 	      /* If we chop off bits because the types precision doesn't
2049 		 match the memory access size this is ok when optimizing
2050 		 reads but not when called from the DSE code during
2051 		 elimination.  */
2052 	      if (val
2053 		  && type != vr->type)
2054 		{
2055 		  if (! int_fits_type_p (val, vr->type))
2056 		    val = NULL_TREE;
2057 		  else
2058 		    val = fold_convert (vr->type, val);
2059 		}
2060 
2061 	      if (val)
2062 		return vn_reference_lookup_or_insert_for_pieces
2063 			 (vuse, vr->set, vr->type, vr->operands, val);
2064 	    }
2065 	}
2066     }
2067 
2068   /* 4) Assignment from an SSA name which definition we may be able
2069      to access pieces from.  */
2070   else if (known_eq (ref->size, maxsize)
2071 	   && is_gimple_reg_type (vr->type)
2072 	   && !contains_storage_order_barrier_p (vr->operands)
2073 	   && gimple_assign_single_p (def_stmt)
2074 	   && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2075     {
2076       tree base2;
2077       poly_int64 offset2, size2, maxsize2;
2078       bool reverse;
2079       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2080 				       &offset2, &size2, &maxsize2,
2081 				       &reverse);
2082       tree def_rhs = gimple_assign_rhs1 (def_stmt);
2083       if (!reverse
2084 	  && known_size_p (maxsize2)
2085 	  && known_eq (maxsize2, size2)
2086 	  && operand_equal_p (base, base2, 0)
2087 	  && known_subrange_p (offset, maxsize, offset2, size2)
2088 	  /* ???  We can't handle bitfield precision extracts without
2089 	     either using an alternate type for the BIT_FIELD_REF and
2090 	     then doing a conversion or possibly adjusting the offset
2091 	     according to endianness.  */
2092 	  && (! INTEGRAL_TYPE_P (vr->type)
2093 	      || known_eq (ref->size, TYPE_PRECISION (vr->type)))
2094 	  && multiple_p (ref->size, BITS_PER_UNIT)
2095 	  && (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
2096 	      || type_has_mode_precision_p (TREE_TYPE (def_rhs))))
2097 	{
2098 	  code_helper rcode = BIT_FIELD_REF;
2099 	  tree ops[3];
2100 	  ops[0] = SSA_VAL (def_rhs);
2101 	  ops[1] = bitsize_int (ref->size);
2102 	  ops[2] = bitsize_int (offset - offset2);
2103 	  tree val = vn_nary_build_or_lookup (rcode, vr->type, ops);
2104 	  if (val
2105 	      && (TREE_CODE (val) != SSA_NAME
2106 		  || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2107 	    {
2108 	      vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2109 		  (vuse, vr->set, vr->type, vr->operands, val);
2110 	      return res;
2111 	    }
2112 	}
2113     }
2114 
2115   /* 5) For aggregate copies translate the reference through them if
2116      the copy kills ref.  */
2117   else if (vn_walk_kind == VN_WALKREWRITE
2118 	   && gimple_assign_single_p (def_stmt)
2119 	   && (DECL_P (gimple_assign_rhs1 (def_stmt))
2120 	       || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2121 	       || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2122     {
2123       tree base2;
2124       int i, j, k;
2125       auto_vec<vn_reference_op_s> rhs;
2126       vn_reference_op_t vro;
2127       ao_ref r;
2128 
2129       if (!lhs_ref_ok)
2130 	return (void *)-1;
2131 
2132       /* See if the assignment kills REF.  */
2133       base2 = ao_ref_base (&lhs_ref);
2134       if (!lhs_ref.max_size_known_p ()
2135 	  || (base != base2
2136 	      && (TREE_CODE (base) != MEM_REF
2137 		  || TREE_CODE (base2) != MEM_REF
2138 		  || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2139 		  || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2140 					  TREE_OPERAND (base2, 1))))
2141 	  || !stmt_kills_ref_p (def_stmt, ref))
2142 	return (void *)-1;
2143 
2144       /* Find the common base of ref and the lhs.  lhs_ops already
2145          contains valueized operands for the lhs.  */
2146       i = vr->operands.length () - 1;
2147       j = lhs_ops.length () - 1;
2148       while (j >= 0 && i >= 0
2149 	     && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2150 	{
2151 	  i--;
2152 	  j--;
2153 	}
2154 
2155       /* ???  The innermost op should always be a MEM_REF and we already
2156          checked that the assignment to the lhs kills vr.  Thus for
2157 	 aggregate copies using char[] types the vn_reference_op_eq
2158 	 may fail when comparing types for compatibility.  But we really
2159 	 don't care here - further lookups with the rewritten operands
2160 	 will simply fail if we messed up types too badly.  */
2161       poly_int64 extra_off = 0;
2162       if (j == 0 && i >= 0
2163 	  && lhs_ops[0].opcode == MEM_REF
2164 	  && maybe_ne (lhs_ops[0].off, -1))
2165 	{
2166 	  if (known_eq (lhs_ops[0].off, vr->operands[i].off))
2167 	    i--, j--;
2168 	  else if (vr->operands[i].opcode == MEM_REF
2169 		   && maybe_ne (vr->operands[i].off, -1))
2170 	    {
2171 	      extra_off = vr->operands[i].off - lhs_ops[0].off;
2172 	      i--, j--;
2173 	    }
2174 	}
2175 
2176       /* i now points to the first additional op.
2177 	 ???  LHS may not be completely contained in VR, one or more
2178 	 VIEW_CONVERT_EXPRs could be in its way.  We could at least
2179 	 try handling outermost VIEW_CONVERT_EXPRs.  */
2180       if (j != -1)
2181 	return (void *)-1;
2182 
2183       /* Punt if the additional ops contain a storage order barrier.  */
2184       for (k = i; k >= 0; k--)
2185 	{
2186 	  vro = &vr->operands[k];
2187 	  if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2188 	    return (void *)-1;
2189 	}
2190 
2191       /* Now re-write REF to be based on the rhs of the assignment.  */
2192       copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2193 
2194       /* Apply an extra offset to the inner MEM_REF of the RHS.  */
2195       if (maybe_ne (extra_off, 0))
2196 	{
2197 	  if (rhs.length () < 2
2198 	      || rhs[0].opcode != MEM_REF
2199 	      || known_eq (rhs[0].off, -1))
2200 	    return (void *)-1;
2201 	  rhs[0].off += extra_off;
2202 	  rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2203 					build_int_cst (TREE_TYPE (rhs[0].op0),
2204 						       extra_off));
2205 	}
2206 
2207       /* We need to pre-pend vr->operands[0..i] to rhs.  */
2208       vec<vn_reference_op_s> old = vr->operands;
2209       if (i + 1 + rhs.length () > vr->operands.length ())
2210 	vr->operands.safe_grow (i + 1 + rhs.length ());
2211       else
2212 	vr->operands.truncate (i + 1 + rhs.length ());
2213       FOR_EACH_VEC_ELT (rhs, j, vro)
2214 	vr->operands[i + 1 + j] = *vro;
2215       vr->operands = valueize_refs (vr->operands);
2216       if (old == shared_lookup_references)
2217 	shared_lookup_references = vr->operands;
2218       vr->hashcode = vn_reference_compute_hash (vr);
2219 
2220       /* Try folding the new reference to a constant.  */
2221       tree val = fully_constant_vn_reference_p (vr);
2222       if (val)
2223 	return vn_reference_lookup_or_insert_for_pieces
2224 		 (vuse, vr->set, vr->type, vr->operands, val);
2225 
2226       /* Adjust *ref from the new operands.  */
2227       if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2228 	return (void *)-1;
2229       /* This can happen with bitfields.  */
2230       if (maybe_ne (ref->size, r.size))
2231 	return (void *)-1;
2232       *ref = r;
2233 
2234       /* Do not update last seen VUSE after translating.  */
2235       last_vuse_ptr = NULL;
2236 
2237       /* Keep looking for the adjusted *REF / VR pair.  */
2238       return NULL;
2239     }
2240 
2241   /* 6) For memcpy copies translate the reference through them if
2242      the copy kills ref.  */
2243   else if (vn_walk_kind == VN_WALKREWRITE
2244 	   && is_gimple_reg_type (vr->type)
2245 	   /* ???  Handle BCOPY as well.  */
2246 	   && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2247 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2248 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2249 	   && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2250 	       || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2251 	   && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2252 	       || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2253 	   && poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size))
2254     {
2255       tree lhs, rhs;
2256       ao_ref r;
2257       poly_int64 rhs_offset, lhs_offset;
2258       vn_reference_op_s op;
2259       poly_uint64 mem_offset;
2260       poly_int64 at, byte_maxsize;
2261 
2262       /* Only handle non-variable, addressable refs.  */
2263       if (maybe_ne (ref->size, maxsize)
2264 	  || !multiple_p (offset, BITS_PER_UNIT, &at)
2265 	  || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
2266 	return (void *)-1;
2267 
2268       /* Extract a pointer base and an offset for the destination.  */
2269       lhs = gimple_call_arg (def_stmt, 0);
2270       lhs_offset = 0;
2271       if (TREE_CODE (lhs) == SSA_NAME)
2272 	{
2273 	  lhs = SSA_VAL (lhs);
2274 	  if (TREE_CODE (lhs) == SSA_NAME)
2275 	    {
2276 	      gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2277 	      if (gimple_assign_single_p (def_stmt)
2278 		  && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2279 		lhs = gimple_assign_rhs1 (def_stmt);
2280 	    }
2281 	}
2282       if (TREE_CODE (lhs) == ADDR_EXPR)
2283 	{
2284 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2285 						    &lhs_offset);
2286 	  if (!tem)
2287 	    return (void *)-1;
2288 	  if (TREE_CODE (tem) == MEM_REF
2289 	      && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2290 	    {
2291 	      lhs = TREE_OPERAND (tem, 0);
2292 	      if (TREE_CODE (lhs) == SSA_NAME)
2293 		lhs = SSA_VAL (lhs);
2294 	      lhs_offset += mem_offset;
2295 	    }
2296 	  else if (DECL_P (tem))
2297 	    lhs = build_fold_addr_expr (tem);
2298 	  else
2299 	    return (void *)-1;
2300 	}
2301       if (TREE_CODE (lhs) != SSA_NAME
2302 	  && TREE_CODE (lhs) != ADDR_EXPR)
2303 	return (void *)-1;
2304 
2305       /* Extract a pointer base and an offset for the source.  */
2306       rhs = gimple_call_arg (def_stmt, 1);
2307       rhs_offset = 0;
2308       if (TREE_CODE (rhs) == SSA_NAME)
2309 	rhs = SSA_VAL (rhs);
2310       if (TREE_CODE (rhs) == ADDR_EXPR)
2311 	{
2312 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2313 						    &rhs_offset);
2314 	  if (!tem)
2315 	    return (void *)-1;
2316 	  if (TREE_CODE (tem) == MEM_REF
2317 	      && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2318 	    {
2319 	      rhs = TREE_OPERAND (tem, 0);
2320 	      rhs_offset += mem_offset;
2321 	    }
2322 	  else if (DECL_P (tem)
2323 		   || TREE_CODE (tem) == STRING_CST)
2324 	    rhs = build_fold_addr_expr (tem);
2325 	  else
2326 	    return (void *)-1;
2327 	}
2328       if (TREE_CODE (rhs) != SSA_NAME
2329 	  && TREE_CODE (rhs) != ADDR_EXPR)
2330 	return (void *)-1;
2331 
2332       /* The bases of the destination and the references have to agree.  */
2333       if (TREE_CODE (base) == MEM_REF)
2334 	{
2335 	  if (TREE_OPERAND (base, 0) != lhs
2336 	      || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
2337 	    return (void *) -1;
2338 	  at += mem_offset;
2339 	}
2340       else if (!DECL_P (base)
2341 	       || TREE_CODE (lhs) != ADDR_EXPR
2342 	       || TREE_OPERAND (lhs, 0) != base)
2343 	return (void *)-1;
2344 
2345       /* If the access is completely outside of the memcpy destination
2346 	 area there is no aliasing.  */
2347       if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
2348 	return NULL;
2349       /* And the access has to be contained within the memcpy destination.  */
2350       if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
2351 	return (void *)-1;
2352 
2353       /* Make room for 2 operands in the new reference.  */
2354       if (vr->operands.length () < 2)
2355 	{
2356 	  vec<vn_reference_op_s> old = vr->operands;
2357 	  vr->operands.safe_grow_cleared (2);
2358 	  if (old == shared_lookup_references)
2359 	    shared_lookup_references = vr->operands;
2360 	}
2361       else
2362 	vr->operands.truncate (2);
2363 
2364       /* The looked-through reference is a simple MEM_REF.  */
2365       memset (&op, 0, sizeof (op));
2366       op.type = vr->type;
2367       op.opcode = MEM_REF;
2368       op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2369       op.off = at - lhs_offset + rhs_offset;
2370       vr->operands[0] = op;
2371       op.type = TREE_TYPE (rhs);
2372       op.opcode = TREE_CODE (rhs);
2373       op.op0 = rhs;
2374       op.off = -1;
2375       vr->operands[1] = op;
2376       vr->hashcode = vn_reference_compute_hash (vr);
2377 
2378       /* Try folding the new reference to a constant.  */
2379       tree val = fully_constant_vn_reference_p (vr);
2380       if (val)
2381 	return vn_reference_lookup_or_insert_for_pieces
2382 		 (vuse, vr->set, vr->type, vr->operands, val);
2383 
2384       /* Adjust *ref from the new operands.  */
2385       if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2386 	return (void *)-1;
2387       /* This can happen with bitfields.  */
2388       if (maybe_ne (ref->size, r.size))
2389 	return (void *)-1;
2390       *ref = r;
2391 
2392       /* Do not update last seen VUSE after translating.  */
2393       last_vuse_ptr = NULL;
2394 
2395       /* Keep looking for the adjusted *REF / VR pair.  */
2396       return NULL;
2397     }
2398 
2399   /* Bail out and stop walking.  */
2400   return (void *)-1;
2401 }
2402 
2403 /* Return a reference op vector from OP that can be used for
2404    vn_reference_lookup_pieces.  The caller is responsible for releasing
2405    the vector.  */
2406 
2407 vec<vn_reference_op_s>
vn_reference_operands_for_lookup(tree op)2408 vn_reference_operands_for_lookup (tree op)
2409 {
2410   bool valueized;
2411   return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2412 }
2413 
2414 /* Lookup a reference operation by it's parts, in the current hash table.
2415    Returns the resulting value number if it exists in the hash table,
2416    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
2417    vn_reference_t stored in the hashtable if something is found.  */
2418 
2419 tree
vn_reference_lookup_pieces(tree vuse,alias_set_type set,tree type,vec<vn_reference_op_s> operands,vn_reference_t * vnresult,vn_lookup_kind kind)2420 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2421 			    vec<vn_reference_op_s> operands,
2422 			    vn_reference_t *vnresult, vn_lookup_kind kind)
2423 {
2424   struct vn_reference_s vr1;
2425   vn_reference_t tmp;
2426   tree cst;
2427 
2428   if (!vnresult)
2429     vnresult = &tmp;
2430   *vnresult = NULL;
2431 
2432   vr1.vuse = vuse_ssa_val (vuse);
2433   shared_lookup_references.truncate (0);
2434   shared_lookup_references.safe_grow (operands.length ());
2435   memcpy (shared_lookup_references.address (),
2436 	  operands.address (),
2437 	  sizeof (vn_reference_op_s)
2438 	  * operands.length ());
2439   vr1.operands = operands = shared_lookup_references
2440     = valueize_refs (shared_lookup_references);
2441   vr1.type = type;
2442   vr1.set = set;
2443   vr1.hashcode = vn_reference_compute_hash (&vr1);
2444   if ((cst = fully_constant_vn_reference_p (&vr1)))
2445     return cst;
2446 
2447   vn_reference_lookup_1 (&vr1, vnresult);
2448   if (!*vnresult
2449       && kind != VN_NOWALK
2450       && vr1.vuse)
2451     {
2452       ao_ref r;
2453       vn_walk_kind = kind;
2454       if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2455 	*vnresult =
2456 	  (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2457 						  vn_reference_lookup_2,
2458 						  vn_reference_lookup_3,
2459 						  vuse_ssa_val, &vr1);
2460       gcc_checking_assert (vr1.operands == shared_lookup_references);
2461     }
2462 
2463   if (*vnresult)
2464      return (*vnresult)->result;
2465 
2466   return NULL_TREE;
2467 }
2468 
2469 /* Lookup OP in the current hash table, and return the resulting value
2470    number if it exists in the hash table.  Return NULL_TREE if it does
2471    not exist in the hash table or if the result field of the structure
2472    was NULL..  VNRESULT will be filled in with the vn_reference_t
2473    stored in the hashtable if one exists.  When TBAA_P is false assume
2474    we are looking up a store and treat it as having alias-set zero.  */
2475 
2476 tree
vn_reference_lookup(tree op,tree vuse,vn_lookup_kind kind,vn_reference_t * vnresult,bool tbaa_p)2477 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2478 		     vn_reference_t *vnresult, bool tbaa_p)
2479 {
2480   vec<vn_reference_op_s> operands;
2481   struct vn_reference_s vr1;
2482   tree cst;
2483   bool valuezied_anything;
2484 
2485   if (vnresult)
2486     *vnresult = NULL;
2487 
2488   vr1.vuse = vuse_ssa_val (vuse);
2489   vr1.operands = operands
2490     = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2491   vr1.type = TREE_TYPE (op);
2492   vr1.set = tbaa_p ? get_alias_set (op) : 0;
2493   vr1.hashcode = vn_reference_compute_hash (&vr1);
2494   if ((cst = fully_constant_vn_reference_p (&vr1)))
2495     return cst;
2496 
2497   if (kind != VN_NOWALK
2498       && vr1.vuse)
2499     {
2500       vn_reference_t wvnresult;
2501       ao_ref r;
2502       /* Make sure to use a valueized reference if we valueized anything.
2503          Otherwise preserve the full reference for advanced TBAA.  */
2504       if (!valuezied_anything
2505 	  || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2506 					     vr1.operands))
2507 	ao_ref_init (&r, op);
2508       if (! tbaa_p)
2509 	r.ref_alias_set = r.base_alias_set = 0;
2510       vn_walk_kind = kind;
2511       wvnresult =
2512 	(vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2513 						vn_reference_lookup_2,
2514 						vn_reference_lookup_3,
2515 						vuse_ssa_val, &vr1);
2516       gcc_checking_assert (vr1.operands == shared_lookup_references);
2517       if (wvnresult)
2518 	{
2519 	  if (vnresult)
2520 	    *vnresult = wvnresult;
2521 	  return wvnresult->result;
2522 	}
2523 
2524       return NULL_TREE;
2525     }
2526 
2527   return vn_reference_lookup_1 (&vr1, vnresult);
2528 }
2529 
2530 /* Lookup CALL in the current hash table and return the entry in
2531    *VNRESULT if found.  Populates *VR for the hashtable lookup.  */
2532 
2533 void
vn_reference_lookup_call(gcall * call,vn_reference_t * vnresult,vn_reference_t vr)2534 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2535 			  vn_reference_t vr)
2536 {
2537   if (vnresult)
2538     *vnresult = NULL;
2539 
2540   tree vuse = gimple_vuse (call);
2541 
2542   vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2543   vr->operands = valueize_shared_reference_ops_from_call (call);
2544   vr->type = gimple_expr_type (call);
2545   vr->set = 0;
2546   vr->hashcode = vn_reference_compute_hash (vr);
2547   vn_reference_lookup_1 (vr, vnresult);
2548 }
2549 
2550 /* Insert OP into the current hash table with a value number of
2551    RESULT, and return the resulting reference structure we created.  */
2552 
2553 static vn_reference_t
vn_reference_insert(tree op,tree result,tree vuse,tree vdef)2554 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2555 {
2556   vn_reference_s **slot;
2557   vn_reference_t vr1;
2558   bool tem;
2559 
2560   vr1 = current_info->references_pool->allocate ();
2561   if (TREE_CODE (result) == SSA_NAME)
2562     vr1->value_id = VN_INFO (result)->value_id;
2563   else
2564     vr1->value_id = get_or_alloc_constant_value_id (result);
2565   vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2566   vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2567   vr1->type = TREE_TYPE (op);
2568   vr1->set = get_alias_set (op);
2569   vr1->hashcode = vn_reference_compute_hash (vr1);
2570   vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2571   vr1->result_vdef = vdef;
2572 
2573   slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2574 							INSERT);
2575 
2576   /* Because we lookup stores using vuses, and value number failures
2577      using the vdefs (see visit_reference_op_store for how and why),
2578      it's possible that on failure we may try to insert an already
2579      inserted store.  This is not wrong, there is no ssa name for a
2580      store that we could use as a differentiator anyway.  Thus, unlike
2581      the other lookup functions, you cannot gcc_assert (!*slot)
2582      here.  */
2583 
2584   /* But free the old slot in case of a collision.  */
2585   if (*slot)
2586     free_reference (*slot);
2587 
2588   *slot = vr1;
2589   return vr1;
2590 }
2591 
2592 /* Insert a reference by it's pieces into the current hash table with
2593    a value number of RESULT.  Return the resulting reference
2594    structure we created.  */
2595 
2596 vn_reference_t
vn_reference_insert_pieces(tree vuse,alias_set_type set,tree type,vec<vn_reference_op_s> operands,tree result,unsigned int value_id)2597 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2598 			    vec<vn_reference_op_s> operands,
2599 			    tree result, unsigned int value_id)
2600 
2601 {
2602   vn_reference_s **slot;
2603   vn_reference_t vr1;
2604 
2605   vr1 = current_info->references_pool->allocate ();
2606   vr1->value_id = value_id;
2607   vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2608   vr1->operands = valueize_refs (operands);
2609   vr1->type = type;
2610   vr1->set = set;
2611   vr1->hashcode = vn_reference_compute_hash (vr1);
2612   if (result && TREE_CODE (result) == SSA_NAME)
2613     result = SSA_VAL (result);
2614   vr1->result = result;
2615 
2616   slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2617 							INSERT);
2618 
2619   /* At this point we should have all the things inserted that we have
2620      seen before, and we should never try inserting something that
2621      already exists.  */
2622   gcc_assert (!*slot);
2623   if (*slot)
2624     free_reference (*slot);
2625 
2626   *slot = vr1;
2627   return vr1;
2628 }
2629 
2630 /* Compute and return the hash value for nary operation VBO1.  */
2631 
2632 static hashval_t
vn_nary_op_compute_hash(const vn_nary_op_t vno1)2633 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2634 {
2635   inchash::hash hstate;
2636   unsigned i;
2637 
2638   for (i = 0; i < vno1->length; ++i)
2639     if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2640       vno1->op[i] = SSA_VAL (vno1->op[i]);
2641 
2642   if (((vno1->length == 2
2643 	&& commutative_tree_code (vno1->opcode))
2644        || (vno1->length == 3
2645 	   && commutative_ternary_tree_code (vno1->opcode)))
2646       && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2647     std::swap (vno1->op[0], vno1->op[1]);
2648   else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2649 	   && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2650     {
2651       std::swap (vno1->op[0], vno1->op[1]);
2652       vno1->opcode = swap_tree_comparison  (vno1->opcode);
2653     }
2654 
2655   hstate.add_int (vno1->opcode);
2656   for (i = 0; i < vno1->length; ++i)
2657     inchash::add_expr (vno1->op[i], hstate);
2658 
2659   return hstate.end ();
2660 }
2661 
2662 /* Compare nary operations VNO1 and VNO2 and return true if they are
2663    equivalent.  */
2664 
2665 bool
vn_nary_op_eq(const_vn_nary_op_t const vno1,const_vn_nary_op_t const vno2)2666 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2667 {
2668   unsigned i;
2669 
2670   if (vno1->hashcode != vno2->hashcode)
2671     return false;
2672 
2673   if (vno1->length != vno2->length)
2674     return false;
2675 
2676   if (vno1->opcode != vno2->opcode
2677       || !types_compatible_p (vno1->type, vno2->type))
2678     return false;
2679 
2680   for (i = 0; i < vno1->length; ++i)
2681     if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2682       return false;
2683 
2684   /* BIT_INSERT_EXPR has an implict operand as the type precision
2685      of op1.  Need to check to make sure they are the same.  */
2686   if (vno1->opcode == BIT_INSERT_EXPR
2687       && TREE_CODE (vno1->op[1]) == INTEGER_CST
2688       && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2689 	 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2690     return false;
2691 
2692   return true;
2693 }
2694 
2695 /* Initialize VNO from the pieces provided.  */
2696 
2697 static void
init_vn_nary_op_from_pieces(vn_nary_op_t vno,unsigned int length,enum tree_code code,tree type,tree * ops)2698 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2699 			     enum tree_code code, tree type, tree *ops)
2700 {
2701   vno->opcode = code;
2702   vno->length = length;
2703   vno->type = type;
2704   memcpy (&vno->op[0], ops, sizeof (tree) * length);
2705 }
2706 
2707 /* Initialize VNO from OP.  */
2708 
2709 static void
init_vn_nary_op_from_op(vn_nary_op_t vno,tree op)2710 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2711 {
2712   unsigned i;
2713 
2714   vno->opcode = TREE_CODE (op);
2715   vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2716   vno->type = TREE_TYPE (op);
2717   for (i = 0; i < vno->length; ++i)
2718     vno->op[i] = TREE_OPERAND (op, i);
2719 }
2720 
2721 /* Return the number of operands for a vn_nary ops structure from STMT.  */
2722 
2723 static unsigned int
vn_nary_length_from_stmt(gimple * stmt)2724 vn_nary_length_from_stmt (gimple *stmt)
2725 {
2726   switch (gimple_assign_rhs_code (stmt))
2727     {
2728     case REALPART_EXPR:
2729     case IMAGPART_EXPR:
2730     case VIEW_CONVERT_EXPR:
2731       return 1;
2732 
2733     case BIT_FIELD_REF:
2734       return 3;
2735 
2736     case CONSTRUCTOR:
2737       return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2738 
2739     default:
2740       return gimple_num_ops (stmt) - 1;
2741     }
2742 }
2743 
2744 /* Initialize VNO from STMT.  */
2745 
2746 static void
init_vn_nary_op_from_stmt(vn_nary_op_t vno,gimple * stmt)2747 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2748 {
2749   unsigned i;
2750 
2751   vno->opcode = gimple_assign_rhs_code (stmt);
2752   vno->type = gimple_expr_type (stmt);
2753   switch (vno->opcode)
2754     {
2755     case REALPART_EXPR:
2756     case IMAGPART_EXPR:
2757     case VIEW_CONVERT_EXPR:
2758       vno->length = 1;
2759       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2760       break;
2761 
2762     case BIT_FIELD_REF:
2763       vno->length = 3;
2764       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2765       vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2766       vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2767       break;
2768 
2769     case CONSTRUCTOR:
2770       vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2771       for (i = 0; i < vno->length; ++i)
2772 	vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2773       break;
2774 
2775     default:
2776       gcc_checking_assert (!gimple_assign_single_p (stmt));
2777       vno->length = gimple_num_ops (stmt) - 1;
2778       for (i = 0; i < vno->length; ++i)
2779 	vno->op[i] = gimple_op (stmt, i + 1);
2780     }
2781 }
2782 
2783 /* Compute the hashcode for VNO and look for it in the hash table;
2784    return the resulting value number if it exists in the hash table.
2785    Return NULL_TREE if it does not exist in the hash table or if the
2786    result field of the operation is NULL.  VNRESULT will contain the
2787    vn_nary_op_t from the hashtable if it exists.  */
2788 
2789 static tree
vn_nary_op_lookup_1(vn_nary_op_t vno,vn_nary_op_t * vnresult)2790 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2791 {
2792   vn_nary_op_s **slot;
2793 
2794   if (vnresult)
2795     *vnresult = NULL;
2796 
2797   vno->hashcode = vn_nary_op_compute_hash (vno);
2798   slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2799 						  NO_INSERT);
2800   if (!slot && current_info == optimistic_info)
2801     slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2802 						  NO_INSERT);
2803   if (!slot)
2804     return NULL_TREE;
2805   if (vnresult)
2806     *vnresult = *slot;
2807   return (*slot)->result;
2808 }
2809 
2810 /* Lookup a n-ary operation by its pieces and return the resulting value
2811    number if it exists in the hash table.  Return NULL_TREE if it does
2812    not exist in the hash table or if the result field of the operation
2813    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2814    if it exists.  */
2815 
2816 tree
vn_nary_op_lookup_pieces(unsigned int length,enum tree_code code,tree type,tree * ops,vn_nary_op_t * vnresult)2817 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2818 			  tree type, tree *ops, vn_nary_op_t *vnresult)
2819 {
2820   vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2821 				  sizeof_vn_nary_op (length));
2822   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2823   return vn_nary_op_lookup_1 (vno1, vnresult);
2824 }
2825 
2826 /* Lookup OP in the current hash table, and return the resulting value
2827    number if it exists in the hash table.  Return NULL_TREE if it does
2828    not exist in the hash table or if the result field of the operation
2829    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2830    if it exists.  */
2831 
2832 tree
vn_nary_op_lookup(tree op,vn_nary_op_t * vnresult)2833 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2834 {
2835   vn_nary_op_t vno1
2836     = XALLOCAVAR (struct vn_nary_op_s,
2837 		  sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2838   init_vn_nary_op_from_op (vno1, op);
2839   return vn_nary_op_lookup_1 (vno1, vnresult);
2840 }
2841 
2842 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2843    value number if it exists in the hash table.  Return NULL_TREE if
2844    it does not exist in the hash table.  VNRESULT will contain the
2845    vn_nary_op_t from the hashtable if it exists.  */
2846 
2847 tree
vn_nary_op_lookup_stmt(gimple * stmt,vn_nary_op_t * vnresult)2848 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2849 {
2850   vn_nary_op_t vno1
2851     = XALLOCAVAR (struct vn_nary_op_s,
2852 		  sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2853   init_vn_nary_op_from_stmt (vno1, stmt);
2854   return vn_nary_op_lookup_1 (vno1, vnresult);
2855 }
2856 
2857 /* Allocate a vn_nary_op_t with LENGTH operands on STACK.  */
2858 
2859 static vn_nary_op_t
alloc_vn_nary_op_noinit(unsigned int length,struct obstack * stack)2860 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2861 {
2862   return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2863 }
2864 
2865 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2866    obstack.  */
2867 
2868 static vn_nary_op_t
alloc_vn_nary_op(unsigned int length,tree result,unsigned int value_id)2869 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2870 {
2871   vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2872 					       &current_info->nary_obstack);
2873 
2874   vno1->value_id = value_id;
2875   vno1->length = length;
2876   vno1->result = result;
2877 
2878   return vno1;
2879 }
2880 
2881 /* Insert VNO into TABLE.  If COMPUTE_HASH is true, then compute
2882    VNO->HASHCODE first.  */
2883 
2884 static vn_nary_op_t
vn_nary_op_insert_into(vn_nary_op_t vno,vn_nary_op_table_type * table,bool compute_hash)2885 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2886 			bool compute_hash)
2887 {
2888   vn_nary_op_s **slot;
2889 
2890   if (compute_hash)
2891     vno->hashcode = vn_nary_op_compute_hash (vno);
2892 
2893   slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2894   /* While we do not want to insert things twice it's awkward to
2895      avoid it in the case where visit_nary_op pattern-matches stuff
2896      and ends up simplifying the replacement to itself.  We then
2897      get two inserts, one from visit_nary_op and one from
2898      vn_nary_build_or_lookup.
2899      So allow inserts with the same value number.  */
2900   if (*slot && (*slot)->result == vno->result)
2901     return *slot;
2902 
2903   gcc_assert (!*slot);
2904 
2905   *slot = vno;
2906   return vno;
2907 }
2908 
2909 /* Insert a n-ary operation into the current hash table using it's
2910    pieces.  Return the vn_nary_op_t structure we created and put in
2911    the hashtable.  */
2912 
2913 vn_nary_op_t
vn_nary_op_insert_pieces(unsigned int length,enum tree_code code,tree type,tree * ops,tree result,unsigned int value_id)2914 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2915 			  tree type, tree *ops,
2916 			  tree result, unsigned int value_id)
2917 {
2918   vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2919   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2920   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2921 }
2922 
2923 /* Insert OP into the current hash table with a value number of
2924    RESULT.  Return the vn_nary_op_t structure we created and put in
2925    the hashtable.  */
2926 
2927 vn_nary_op_t
vn_nary_op_insert(tree op,tree result)2928 vn_nary_op_insert (tree op, tree result)
2929 {
2930   unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2931   vn_nary_op_t vno1;
2932 
2933   vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2934   init_vn_nary_op_from_op (vno1, op);
2935   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2936 }
2937 
2938 /* Insert the rhs of STMT into the current hash table with a value number of
2939    RESULT.  */
2940 
2941 static vn_nary_op_t
vn_nary_op_insert_stmt(gimple * stmt,tree result)2942 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2943 {
2944   vn_nary_op_t vno1
2945     = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2946 			result, VN_INFO (result)->value_id);
2947   init_vn_nary_op_from_stmt (vno1, stmt);
2948   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2949 }
2950 
2951 /* Compute a hashcode for PHI operation VP1 and return it.  */
2952 
2953 static inline hashval_t
vn_phi_compute_hash(vn_phi_t vp1)2954 vn_phi_compute_hash (vn_phi_t vp1)
2955 {
2956   inchash::hash hstate (vp1->phiargs.length () > 2
2957 			? vp1->block->index : vp1->phiargs.length ());
2958   tree phi1op;
2959   tree type;
2960   edge e;
2961   edge_iterator ei;
2962 
2963   /* If all PHI arguments are constants we need to distinguish
2964      the PHI node via its type.  */
2965   type = vp1->type;
2966   hstate.merge_hash (vn_hash_type (type));
2967 
2968   FOR_EACH_EDGE (e, ei, vp1->block->preds)
2969     {
2970       /* Don't hash backedge values they need to be handled as VN_TOP
2971          for optimistic value-numbering.  */
2972       if (e->flags & EDGE_DFS_BACK)
2973 	continue;
2974 
2975       phi1op = vp1->phiargs[e->dest_idx];
2976       if (phi1op == VN_TOP)
2977 	continue;
2978       inchash::add_expr (phi1op, hstate);
2979     }
2980 
2981   return hstate.end ();
2982 }
2983 
2984 
2985 /* Return true if COND1 and COND2 represent the same condition, set
2986    *INVERTED_P if one needs to be inverted to make it the same as
2987    the other.  */
2988 
2989 static bool
cond_stmts_equal_p(gcond * cond1,tree lhs1,tree rhs1,gcond * cond2,tree lhs2,tree rhs2,bool * inverted_p)2990 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
2991 		    gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
2992 {
2993   enum tree_code code1 = gimple_cond_code (cond1);
2994   enum tree_code code2 = gimple_cond_code (cond2);
2995 
2996   *inverted_p = false;
2997   if (code1 == code2)
2998     ;
2999   else if (code1 == swap_tree_comparison (code2))
3000     std::swap (lhs2, rhs2);
3001   else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3002     *inverted_p = true;
3003   else if (code1 == invert_tree_comparison
3004 	   	      (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3005     {
3006       std::swap (lhs2, rhs2);
3007       *inverted_p = true;
3008     }
3009   else
3010     return false;
3011 
3012   return ((expressions_equal_p (lhs1, lhs2)
3013 	   && expressions_equal_p (rhs1, rhs2))
3014 	  || (commutative_tree_code (code1)
3015 	      && expressions_equal_p (lhs1, rhs2)
3016 	      && expressions_equal_p (rhs1, lhs2)));
3017 }
3018 
3019 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
3020 
3021 static int
vn_phi_eq(const_vn_phi_t const vp1,const_vn_phi_t const vp2)3022 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3023 {
3024   if (vp1->hashcode != vp2->hashcode)
3025     return false;
3026 
3027   if (vp1->block != vp2->block)
3028     {
3029       if (vp1->phiargs.length () != vp2->phiargs.length ())
3030 	return false;
3031 
3032       switch (vp1->phiargs.length ())
3033 	{
3034 	case 1:
3035 	  /* Single-arg PHIs are just copies.  */
3036 	  break;
3037 
3038 	case 2:
3039 	  {
3040 	    /* Rule out backedges into the PHI.  */
3041 	    if (vp1->block->loop_father->header == vp1->block
3042 		|| vp2->block->loop_father->header == vp2->block)
3043 	      return false;
3044 
3045 	    /* If the PHI nodes do not have compatible types
3046 	       they are not the same.  */
3047 	    if (!types_compatible_p (vp1->type, vp2->type))
3048 	      return false;
3049 
3050 	    basic_block idom1
3051 	      = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3052 	    basic_block idom2
3053 	      = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3054 	    /* If the immediate dominator end in switch stmts multiple
3055 	       values may end up in the same PHI arg via intermediate
3056 	       CFG merges.  */
3057 	    if (EDGE_COUNT (idom1->succs) != 2
3058 		|| EDGE_COUNT (idom2->succs) != 2)
3059 	      return false;
3060 
3061 	    /* Verify the controlling stmt is the same.  */
3062 	    gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3063 	    gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3064 	    if (! last1 || ! last2)
3065 	      return false;
3066 	    bool inverted_p;
3067 	    if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3068 				      last2, vp2->cclhs, vp2->ccrhs,
3069 				      &inverted_p))
3070 	      return false;
3071 
3072 	    /* Get at true/false controlled edges into the PHI.  */
3073 	    edge te1, te2, fe1, fe2;
3074 	    if (! extract_true_false_controlled_edges (idom1, vp1->block,
3075 						       &te1, &fe1)
3076 		|| ! extract_true_false_controlled_edges (idom2, vp2->block,
3077 							  &te2, &fe2))
3078 	      return false;
3079 
3080 	    /* Swap edges if the second condition is the inverted of the
3081 	       first.  */
3082 	    if (inverted_p)
3083 	      std::swap (te2, fe2);
3084 
3085 	    /* ???  Handle VN_TOP specially.  */
3086 	    if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3087 				       vp2->phiargs[te2->dest_idx])
3088 		|| ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3089 					  vp2->phiargs[fe2->dest_idx]))
3090 	      return false;
3091 
3092 	    return true;
3093 	  }
3094 
3095 	default:
3096 	  return false;
3097 	}
3098     }
3099 
3100   /* If the PHI nodes do not have compatible types
3101      they are not the same.  */
3102   if (!types_compatible_p (vp1->type, vp2->type))
3103     return false;
3104 
3105   /* Any phi in the same block will have it's arguments in the
3106      same edge order, because of how we store phi nodes.  */
3107   int i;
3108   tree phi1op;
3109   FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3110     {
3111       tree phi2op = vp2->phiargs[i];
3112       if (phi1op == VN_TOP || phi2op == VN_TOP)
3113 	continue;
3114       if (!expressions_equal_p (phi1op, phi2op))
3115 	return false;
3116     }
3117 
3118   return true;
3119 }
3120 
3121 static vec<tree> shared_lookup_phiargs;
3122 
3123 /* Lookup PHI in the current hash table, and return the resulting
3124    value number if it exists in the hash table.  Return NULL_TREE if
3125    it does not exist in the hash table. */
3126 
3127 static tree
vn_phi_lookup(gimple * phi)3128 vn_phi_lookup (gimple *phi)
3129 {
3130   vn_phi_s **slot;
3131   struct vn_phi_s vp1;
3132   edge e;
3133   edge_iterator ei;
3134 
3135   shared_lookup_phiargs.truncate (0);
3136   shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3137 
3138   /* Canonicalize the SSA_NAME's to their value number.  */
3139   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3140     {
3141       tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3142       def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3143       shared_lookup_phiargs[e->dest_idx] = def;
3144     }
3145   vp1.type = TREE_TYPE (gimple_phi_result (phi));
3146   vp1.phiargs = shared_lookup_phiargs;
3147   vp1.block = gimple_bb (phi);
3148   /* Extract values of the controlling condition.  */
3149   vp1.cclhs = NULL_TREE;
3150   vp1.ccrhs = NULL_TREE;
3151   basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1.block);
3152   if (EDGE_COUNT (idom1->succs) == 2)
3153     if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3154       {
3155 	vp1.cclhs = vn_valueize (gimple_cond_lhs (last1));
3156 	vp1.ccrhs = vn_valueize (gimple_cond_rhs (last1));
3157       }
3158   vp1.hashcode = vn_phi_compute_hash (&vp1);
3159   slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3160 						  NO_INSERT);
3161   if (!slot && current_info == optimistic_info)
3162     slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3163 						  NO_INSERT);
3164   if (!slot)
3165     return NULL_TREE;
3166   return (*slot)->result;
3167 }
3168 
3169 /* Insert PHI into the current hash table with a value number of
3170    RESULT.  */
3171 
3172 static vn_phi_t
vn_phi_insert(gimple * phi,tree result)3173 vn_phi_insert (gimple *phi, tree result)
3174 {
3175   vn_phi_s **slot;
3176   vn_phi_t vp1 = current_info->phis_pool->allocate ();
3177   vec<tree> args = vNULL;
3178   edge e;
3179   edge_iterator ei;
3180 
3181   args.safe_grow (gimple_phi_num_args (phi));
3182 
3183   /* Canonicalize the SSA_NAME's to their value number.  */
3184   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3185     {
3186       tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3187       def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3188       args[e->dest_idx] = def;
3189     }
3190   vp1->value_id = VN_INFO (result)->value_id;
3191   vp1->type = TREE_TYPE (gimple_phi_result (phi));
3192   vp1->phiargs = args;
3193   vp1->block = gimple_bb (phi);
3194   /* Extract values of the controlling condition.  */
3195   vp1->cclhs = NULL_TREE;
3196   vp1->ccrhs = NULL_TREE;
3197   basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3198   if (EDGE_COUNT (idom1->succs) == 2)
3199     if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3200       {
3201 	vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3202 	vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3203       }
3204   vp1->result = result;
3205   vp1->hashcode = vn_phi_compute_hash (vp1);
3206 
3207   slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3208 
3209   /* Because we iterate over phi operations more than once, it's
3210      possible the slot might already exist here, hence no assert.*/
3211   *slot = vp1;
3212   return vp1;
3213 }
3214 
3215 
3216 /* Print set of components in strongly connected component SCC to OUT. */
3217 
3218 static void
print_scc(FILE * out,vec<tree> scc)3219 print_scc (FILE *out, vec<tree> scc)
3220 {
3221   tree var;
3222   unsigned int i;
3223 
3224   fprintf (out, "SCC consists of %u:", scc.length ());
3225   FOR_EACH_VEC_ELT (scc, i, var)
3226     {
3227       fprintf (out, " ");
3228       print_generic_expr (out, var);
3229     }
3230   fprintf (out, "\n");
3231 }
3232 
3233 /* Return true if BB1 is dominated by BB2 taking into account edges
3234    that are not executable.  */
3235 
3236 static bool
dominated_by_p_w_unex(basic_block bb1,basic_block bb2)3237 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3238 {
3239   edge_iterator ei;
3240   edge e;
3241 
3242   if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3243     return true;
3244 
3245   /* Before iterating we'd like to know if there exists a
3246      (executable) path from bb2 to bb1 at all, if not we can
3247      directly return false.  For now simply iterate once.  */
3248 
3249   /* Iterate to the single executable bb1 predecessor.  */
3250   if (EDGE_COUNT (bb1->preds) > 1)
3251     {
3252       edge prede = NULL;
3253       FOR_EACH_EDGE (e, ei, bb1->preds)
3254 	if (e->flags & EDGE_EXECUTABLE)
3255 	  {
3256 	    if (prede)
3257 	      {
3258 		prede = NULL;
3259 		break;
3260 	      }
3261 	    prede = e;
3262 	  }
3263       if (prede)
3264 	{
3265 	  bb1 = prede->src;
3266 
3267 	  /* Re-do the dominance check with changed bb1.  */
3268 	  if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3269 	    return true;
3270 	}
3271     }
3272 
3273   /* Iterate to the single executable bb2 successor.  */
3274   edge succe = NULL;
3275   FOR_EACH_EDGE (e, ei, bb2->succs)
3276     if (e->flags & EDGE_EXECUTABLE)
3277       {
3278 	if (succe)
3279 	  {
3280 	    succe = NULL;
3281 	    break;
3282 	  }
3283 	succe = e;
3284       }
3285   if (succe)
3286     {
3287       /* Verify the reached block is only reached through succe.
3288 	 If there is only one edge we can spare us the dominator
3289 	 check and iterate directly.  */
3290       if (EDGE_COUNT (succe->dest->preds) > 1)
3291 	{
3292 	  FOR_EACH_EDGE (e, ei, succe->dest->preds)
3293 	    if (e != succe
3294 		&& (e->flags & EDGE_EXECUTABLE))
3295 	      {
3296 		succe = NULL;
3297 		break;
3298 	      }
3299 	}
3300       if (succe)
3301 	{
3302 	  bb2 = succe->dest;
3303 
3304 	  /* Re-do the dominance check with changed bb2.  */
3305 	  if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3306 	    return true;
3307 	}
3308     }
3309 
3310   /* We could now iterate updating bb1 / bb2.  */
3311   return false;
3312 }
3313 
3314 /* Set the value number of FROM to TO, return true if it has changed
3315    as a result.  */
3316 
3317 static inline bool
set_ssa_val_to(tree from,tree to)3318 set_ssa_val_to (tree from, tree to)
3319 {
3320   tree currval = SSA_VAL (from);
3321   poly_int64 toff, coff;
3322 
3323   /* The only thing we allow as value numbers are ssa_names
3324      and invariants.  So assert that here.  We don't allow VN_TOP
3325      as visiting a stmt should produce a value-number other than
3326      that.
3327      ???  Still VN_TOP can happen for unreachable code, so force
3328      it to varying in that case.  Not all code is prepared to
3329      get VN_TOP on valueization.  */
3330   if (to == VN_TOP)
3331     {
3332       if (dump_file && (dump_flags & TDF_DETAILS))
3333 	fprintf (dump_file, "Forcing value number to varying on "
3334 		 "receiving VN_TOP\n");
3335       to = from;
3336     }
3337 
3338   gcc_assert (to != NULL_TREE
3339 	      && ((TREE_CODE (to) == SSA_NAME
3340 		   && (to == from || SSA_VAL (to) == to))
3341 		  || is_gimple_min_invariant (to)));
3342 
3343   if (from != to)
3344     {
3345       if (currval == from)
3346 	{
3347 	  if (dump_file && (dump_flags & TDF_DETAILS))
3348 	    {
3349 	      fprintf (dump_file, "Not changing value number of ");
3350 	      print_generic_expr (dump_file, from);
3351 	      fprintf (dump_file, " from VARYING to ");
3352 	      print_generic_expr (dump_file, to);
3353 	      fprintf (dump_file, "\n");
3354 	    }
3355 	  return false;
3356 	}
3357       else if (currval != VN_TOP
3358 	       && ! is_gimple_min_invariant (currval)
3359 	       && is_gimple_min_invariant (to))
3360 	{
3361 	  if (dump_file && (dump_flags & TDF_DETAILS))
3362 	    {
3363 	      fprintf (dump_file, "Forcing VARYING instead of changing "
3364 		       "value number of ");
3365 	      print_generic_expr (dump_file, from);
3366 	      fprintf (dump_file, " from ");
3367 	      print_generic_expr (dump_file, currval);
3368 	      fprintf (dump_file, " (non-constant) to ");
3369 	      print_generic_expr (dump_file, to);
3370 	      fprintf (dump_file, " (constant)\n");
3371 	    }
3372 	  to = from;
3373 	}
3374       else if (TREE_CODE (to) == SSA_NAME
3375 	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3376 	to = from;
3377     }
3378 
3379   if (dump_file && (dump_flags & TDF_DETAILS))
3380     {
3381       fprintf (dump_file, "Setting value number of ");
3382       print_generic_expr (dump_file, from);
3383       fprintf (dump_file, " to ");
3384       print_generic_expr (dump_file, to);
3385     }
3386 
3387   if (currval != to
3388       && !operand_equal_p (currval, to, 0)
3389       /* Different undefined SSA names are not actually different.  See
3390          PR82320 for a testcase were we'd otherwise not terminate iteration.  */
3391       && !(TREE_CODE (currval) == SSA_NAME
3392 	   && TREE_CODE (to) == SSA_NAME
3393 	   && ssa_undefined_value_p (currval, false)
3394 	   && ssa_undefined_value_p (to, false))
3395       /* ???  For addresses involving volatile objects or types operand_equal_p
3396          does not reliably detect ADDR_EXPRs as equal.  We know we are only
3397 	 getting invariant gimple addresses here, so can use
3398 	 get_addr_base_and_unit_offset to do this comparison.  */
3399       && !(TREE_CODE (currval) == ADDR_EXPR
3400 	   && TREE_CODE (to) == ADDR_EXPR
3401 	   && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3402 	       == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3403 	   && known_eq (coff, toff)))
3404     {
3405       if (dump_file && (dump_flags & TDF_DETAILS))
3406 	fprintf (dump_file, " (changed)\n");
3407 
3408       /* If we equate two SSA names we have to make the side-band info
3409          of the leader conservative (and remember whatever original value
3410 	 was present).  */
3411       if (TREE_CODE (to) == SSA_NAME)
3412 	{
3413 	  if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3414 	      && SSA_NAME_RANGE_INFO (to))
3415 	    {
3416 	      if (SSA_NAME_IS_DEFAULT_DEF (to)
3417 		  || dominated_by_p_w_unex
3418 			(gimple_bb (SSA_NAME_DEF_STMT (from)),
3419 			 gimple_bb (SSA_NAME_DEF_STMT (to))))
3420 		/* Keep the info from the dominator.  */
3421 		;
3422 	      else
3423 		{
3424 		  /* Save old info.  */
3425 		  if (! VN_INFO (to)->info.range_info)
3426 		    {
3427 		      VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3428 		      VN_INFO (to)->range_info_anti_range_p
3429 			= SSA_NAME_ANTI_RANGE_P (to);
3430 		    }
3431 		  /* Rather than allocating memory and unioning the info
3432 		     just clear it.  */
3433 		  if (dump_file && (dump_flags & TDF_DETAILS))
3434 		    {
3435 		      fprintf (dump_file, "clearing range info of ");
3436 		      print_generic_expr (dump_file, to);
3437 		      fprintf (dump_file, "\n");
3438 		    }
3439 		  SSA_NAME_RANGE_INFO (to) = NULL;
3440 		}
3441 	    }
3442 	  else if (POINTER_TYPE_P (TREE_TYPE (to))
3443 		   && SSA_NAME_PTR_INFO (to))
3444 	    {
3445 	      if (SSA_NAME_IS_DEFAULT_DEF (to)
3446 		  || dominated_by_p_w_unex
3447 			(gimple_bb (SSA_NAME_DEF_STMT (from)),
3448 			 gimple_bb (SSA_NAME_DEF_STMT (to))))
3449 		/* Keep the info from the dominator.  */
3450 		;
3451 	      else if (! SSA_NAME_PTR_INFO (from)
3452 		       /* Handle the case of trivially equivalent info.  */
3453 		       || memcmp (SSA_NAME_PTR_INFO (to),
3454 				  SSA_NAME_PTR_INFO (from),
3455 				  sizeof (ptr_info_def)) != 0)
3456 		{
3457 		  /* Save old info.  */
3458 		  if (! VN_INFO (to)->info.ptr_info)
3459 		    VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3460 		  /* Rather than allocating memory and unioning the info
3461 		     just clear it.  */
3462 		  if (dump_file && (dump_flags & TDF_DETAILS))
3463 		    {
3464 		      fprintf (dump_file, "clearing points-to info of ");
3465 		      print_generic_expr (dump_file, to);
3466 		      fprintf (dump_file, "\n");
3467 		    }
3468 		  SSA_NAME_PTR_INFO (to) = NULL;
3469 		}
3470 	    }
3471 	}
3472 
3473       VN_INFO (from)->valnum = to;
3474       return true;
3475     }
3476   if (dump_file && (dump_flags & TDF_DETAILS))
3477     fprintf (dump_file, "\n");
3478   return false;
3479 }
3480 
3481 /* Mark as processed all the definitions in the defining stmt of USE, or
3482    the USE itself.  */
3483 
3484 static void
mark_use_processed(tree use)3485 mark_use_processed (tree use)
3486 {
3487   ssa_op_iter iter;
3488   def_operand_p defp;
3489   gimple *stmt = SSA_NAME_DEF_STMT (use);
3490 
3491   if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3492     {
3493       VN_INFO (use)->use_processed = true;
3494       return;
3495     }
3496 
3497   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3498     {
3499       tree def = DEF_FROM_PTR (defp);
3500 
3501       VN_INFO (def)->use_processed = true;
3502     }
3503 }
3504 
3505 /* Set all definitions in STMT to value number to themselves.
3506    Return true if a value number changed. */
3507 
3508 static bool
defs_to_varying(gimple * stmt)3509 defs_to_varying (gimple *stmt)
3510 {
3511   bool changed = false;
3512   ssa_op_iter iter;
3513   def_operand_p defp;
3514 
3515   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3516     {
3517       tree def = DEF_FROM_PTR (defp);
3518       changed |= set_ssa_val_to (def, def);
3519     }
3520   return changed;
3521 }
3522 
3523 /* Visit a copy between LHS and RHS, return true if the value number
3524    changed.  */
3525 
3526 static bool
visit_copy(tree lhs,tree rhs)3527 visit_copy (tree lhs, tree rhs)
3528 {
3529   /* Valueize.  */
3530   rhs = SSA_VAL (rhs);
3531 
3532   return set_ssa_val_to (lhs, rhs);
3533 }
3534 
3535 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3536    is the same.  */
3537 
3538 static tree
valueized_wider_op(tree wide_type,tree op)3539 valueized_wider_op (tree wide_type, tree op)
3540 {
3541   if (TREE_CODE (op) == SSA_NAME)
3542     op = SSA_VAL (op);
3543 
3544   /* Either we have the op widened available.  */
3545   tree ops[3] = {};
3546   ops[0] = op;
3547   tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3548 				       wide_type, ops, NULL);
3549   if (tem)
3550     return tem;
3551 
3552   /* Or the op is truncated from some existing value.  */
3553   if (TREE_CODE (op) == SSA_NAME)
3554     {
3555       gimple *def = SSA_NAME_DEF_STMT (op);
3556       if (is_gimple_assign (def)
3557 	  && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3558 	{
3559 	  tem = gimple_assign_rhs1 (def);
3560 	  if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3561 	    {
3562 	      if (TREE_CODE (tem) == SSA_NAME)
3563 		tem = SSA_VAL (tem);
3564 	      return tem;
3565 	    }
3566 	}
3567     }
3568 
3569   /* For constants simply extend it.  */
3570   if (TREE_CODE (op) == INTEGER_CST)
3571     return wide_int_to_tree (wide_type, wi::to_wide (op));
3572 
3573   return NULL_TREE;
3574 }
3575 
3576 /* Visit a nary operator RHS, value number it, and return true if the
3577    value number of LHS has changed as a result.  */
3578 
3579 static bool
visit_nary_op(tree lhs,gassign * stmt)3580 visit_nary_op (tree lhs, gassign *stmt)
3581 {
3582   tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3583   if (result)
3584     return set_ssa_val_to (lhs, result);
3585 
3586   /* Do some special pattern matching for redundancies of operations
3587      in different types.  */
3588   enum tree_code code = gimple_assign_rhs_code (stmt);
3589   tree type = TREE_TYPE (lhs);
3590   tree rhs1 = gimple_assign_rhs1 (stmt);
3591   switch (code)
3592     {
3593     CASE_CONVERT:
3594       /* Match arithmetic done in a different type where we can easily
3595          substitute the result from some earlier sign-changed or widened
3596 	 operation.  */
3597       if (INTEGRAL_TYPE_P (type)
3598 	  && TREE_CODE (rhs1) == SSA_NAME
3599 	  /* We only handle sign-changes or zero-extension -> & mask.  */
3600 	  && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3601 	       && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3602 	      || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3603 	{
3604 	  gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3605 	  if (def
3606 	      && (gimple_assign_rhs_code (def) == PLUS_EXPR
3607 		  || gimple_assign_rhs_code (def) == MINUS_EXPR
3608 		  || gimple_assign_rhs_code (def) == MULT_EXPR))
3609 	    {
3610 	      tree ops[3] = {};
3611 	      /* Either we have the op widened available.  */
3612 	      ops[0] = valueized_wider_op (type,
3613 					   gimple_assign_rhs1 (def));
3614 	      if (ops[0])
3615 		ops[1] = valueized_wider_op (type,
3616 					     gimple_assign_rhs2 (def));
3617 	      if (ops[0] && ops[1])
3618 		{
3619 		  ops[0] = vn_nary_op_lookup_pieces
3620 		      (2, gimple_assign_rhs_code (def), type, ops, NULL);
3621 		  /* We have wider operation available.  */
3622 		  if (ops[0]
3623 		      /* If the leader is a wrapping operation we can
3624 			 insert it for code hoisting w/o introducing
3625 			 undefined overflow.  If it is not it has to
3626 			 be available.  See PR86554.  */
3627 		      && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))
3628 			  || TREE_CODE (ops[0]) != SSA_NAME
3629 			  || SSA_NAME_IS_DEFAULT_DEF (ops[0])
3630 			  || dominated_by_p_w_unex
3631 			       (gimple_bb (stmt),
3632 				gimple_bb (SSA_NAME_DEF_STMT (ops[0])))))
3633 		    {
3634 		      unsigned lhs_prec = TYPE_PRECISION (type);
3635 		      unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3636 		      if (lhs_prec == rhs_prec)
3637 			{
3638 			  ops[1] = NULL_TREE;
3639 			  result = vn_nary_build_or_lookup (NOP_EXPR,
3640 							    type, ops);
3641 			  if (result)
3642 			    {
3643 			      bool changed = set_ssa_val_to (lhs, result);
3644 			      vn_nary_op_insert_stmt (stmt, result);
3645 			      return changed;
3646 			    }
3647 			}
3648 		      else
3649 			{
3650 			  ops[1] = wide_int_to_tree (type,
3651 						     wi::mask (rhs_prec, false,
3652 							       lhs_prec));
3653 			  result = vn_nary_build_or_lookup (BIT_AND_EXPR,
3654 							    TREE_TYPE (lhs),
3655 							    ops);
3656 			  if (result)
3657 			    {
3658 			      bool changed = set_ssa_val_to (lhs, result);
3659 			      vn_nary_op_insert_stmt (stmt, result);
3660 			      return changed;
3661 			    }
3662 			}
3663 		    }
3664 		}
3665 	    }
3666 	}
3667     default:;
3668     }
3669 
3670   bool changed = set_ssa_val_to (lhs, lhs);
3671   vn_nary_op_insert_stmt (stmt, lhs);
3672   return changed;
3673 }
3674 
3675 /* Visit a call STMT storing into LHS.  Return true if the value number
3676    of the LHS has changed as a result.  */
3677 
3678 static bool
visit_reference_op_call(tree lhs,gcall * stmt)3679 visit_reference_op_call (tree lhs, gcall *stmt)
3680 {
3681   bool changed = false;
3682   struct vn_reference_s vr1;
3683   vn_reference_t vnresult = NULL;
3684   tree vdef = gimple_vdef (stmt);
3685 
3686   /* Non-ssa lhs is handled in copy_reference_ops_from_call.  */
3687   if (lhs && TREE_CODE (lhs) != SSA_NAME)
3688     lhs = NULL_TREE;
3689 
3690   vn_reference_lookup_call (stmt, &vnresult, &vr1);
3691   if (vnresult)
3692     {
3693       if (vnresult->result_vdef && vdef)
3694 	changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3695       else if (vdef)
3696 	/* If the call was discovered to be pure or const reflect
3697 	   that as far as possible.  */
3698 	changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3699 
3700       if (!vnresult->result && lhs)
3701 	vnresult->result = lhs;
3702 
3703       if (vnresult->result && lhs)
3704 	changed |= set_ssa_val_to (lhs, vnresult->result);
3705     }
3706   else
3707     {
3708       vn_reference_t vr2;
3709       vn_reference_s **slot;
3710       tree vdef_val = vdef;
3711       if (vdef)
3712 	{
3713 	  /* If we value numbered an indirect functions function to
3714 	     one not clobbering memory value number its VDEF to its
3715 	     VUSE.  */
3716 	  tree fn = gimple_call_fn (stmt);
3717 	  if (fn && TREE_CODE (fn) == SSA_NAME)
3718 	    {
3719 	      fn = SSA_VAL (fn);
3720 	      if (TREE_CODE (fn) == ADDR_EXPR
3721 		  && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3722 		  && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3723 		      & (ECF_CONST | ECF_PURE)))
3724 		vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3725 	    }
3726 	  changed |= set_ssa_val_to (vdef, vdef_val);
3727 	}
3728       if (lhs)
3729 	changed |= set_ssa_val_to (lhs, lhs);
3730       vr2 = current_info->references_pool->allocate ();
3731       vr2->vuse = vr1.vuse;
3732       /* As we are not walking the virtual operand chain we know the
3733 	 shared_lookup_references are still original so we can re-use
3734 	 them here.  */
3735       vr2->operands = vr1.operands.copy ();
3736       vr2->type = vr1.type;
3737       vr2->set = vr1.set;
3738       vr2->hashcode = vr1.hashcode;
3739       vr2->result = lhs;
3740       vr2->result_vdef = vdef_val;
3741       slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3742 							    INSERT);
3743       gcc_assert (!*slot);
3744       *slot = vr2;
3745     }
3746 
3747   return changed;
3748 }
3749 
3750 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3751    and return true if the value number of the LHS has changed as a result.  */
3752 
3753 static bool
visit_reference_op_load(tree lhs,tree op,gimple * stmt)3754 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3755 {
3756   bool changed = false;
3757   tree last_vuse;
3758   tree result;
3759 
3760   last_vuse = gimple_vuse (stmt);
3761   last_vuse_ptr = &last_vuse;
3762   result = vn_reference_lookup (op, gimple_vuse (stmt),
3763 				default_vn_walk_kind, NULL, true);
3764   last_vuse_ptr = NULL;
3765 
3766   /* We handle type-punning through unions by value-numbering based
3767      on offset and size of the access.  Be prepared to handle a
3768      type-mismatch here via creating a VIEW_CONVERT_EXPR.  */
3769   if (result
3770       && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3771     {
3772       /* We will be setting the value number of lhs to the value number
3773 	 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3774 	 So first simplify and lookup this expression to see if it
3775 	 is already available.  */
3776       code_helper rcode = VIEW_CONVERT_EXPR;
3777       tree ops[3] = { result };
3778       result = vn_nary_build_or_lookup (rcode, TREE_TYPE (op), ops);
3779     }
3780 
3781   if (result)
3782     changed = set_ssa_val_to (lhs, result);
3783   else
3784     {
3785       changed = set_ssa_val_to (lhs, lhs);
3786       vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3787     }
3788 
3789   return changed;
3790 }
3791 
3792 
3793 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3794    and return true if the value number of the LHS has changed as a result.  */
3795 
3796 static bool
visit_reference_op_store(tree lhs,tree op,gimple * stmt)3797 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3798 {
3799   bool changed = false;
3800   vn_reference_t vnresult = NULL;
3801   tree assign;
3802   bool resultsame = false;
3803   tree vuse = gimple_vuse (stmt);
3804   tree vdef = gimple_vdef (stmt);
3805 
3806   if (TREE_CODE (op) == SSA_NAME)
3807     op = SSA_VAL (op);
3808 
3809   /* First we want to lookup using the *vuses* from the store and see
3810      if there the last store to this location with the same address
3811      had the same value.
3812 
3813      The vuses represent the memory state before the store.  If the
3814      memory state, address, and value of the store is the same as the
3815      last store to this location, then this store will produce the
3816      same memory state as that store.
3817 
3818      In this case the vdef versions for this store are value numbered to those
3819      vuse versions, since they represent the same memory state after
3820      this store.
3821 
3822      Otherwise, the vdefs for the store are used when inserting into
3823      the table, since the store generates a new memory state.  */
3824 
3825   vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3826   if (vnresult
3827       && vnresult->result)
3828     {
3829       tree result = vnresult->result;
3830       if (TREE_CODE (result) == SSA_NAME)
3831 	result = SSA_VAL (result);
3832       resultsame = expressions_equal_p (result, op);
3833       if (resultsame)
3834 	{
3835 	  /* If the TBAA state isn't compatible for downstream reads
3836 	     we cannot value-number the VDEFs the same.  */
3837 	  alias_set_type set = get_alias_set (lhs);
3838 	  if (vnresult->set != set
3839 	      && ! alias_set_subset_of (set, vnresult->set))
3840 	    resultsame = false;
3841 	}
3842     }
3843 
3844   if (!resultsame)
3845     {
3846       /* Only perform the following when being called from PRE
3847 	 which embeds tail merging.  */
3848       if (default_vn_walk_kind == VN_WALK)
3849 	{
3850 	  assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3851 	  vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3852 	  if (vnresult)
3853 	    {
3854 	      VN_INFO (vdef)->use_processed = true;
3855 	      return set_ssa_val_to (vdef, vnresult->result_vdef);
3856 	    }
3857 	}
3858 
3859       if (dump_file && (dump_flags & TDF_DETAILS))
3860 	{
3861 	  fprintf (dump_file, "No store match\n");
3862 	  fprintf (dump_file, "Value numbering store ");
3863 	  print_generic_expr (dump_file, lhs);
3864 	  fprintf (dump_file, " to ");
3865 	  print_generic_expr (dump_file, op);
3866 	  fprintf (dump_file, "\n");
3867 	}
3868       /* Have to set value numbers before insert, since insert is
3869 	 going to valueize the references in-place.  */
3870       if (vdef)
3871 	changed |= set_ssa_val_to (vdef, vdef);
3872 
3873       /* Do not insert structure copies into the tables.  */
3874       if (is_gimple_min_invariant (op)
3875 	  || is_gimple_reg (op))
3876         vn_reference_insert (lhs, op, vdef, NULL);
3877 
3878       /* Only perform the following when being called from PRE
3879 	 which embeds tail merging.  */
3880       if (default_vn_walk_kind == VN_WALK)
3881 	{
3882 	  assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3883 	  vn_reference_insert (assign, lhs, vuse, vdef);
3884 	}
3885     }
3886   else
3887     {
3888       /* We had a match, so value number the vdef to have the value
3889 	 number of the vuse it came from.  */
3890 
3891       if (dump_file && (dump_flags & TDF_DETAILS))
3892 	fprintf (dump_file, "Store matched earlier value, "
3893 		 "value numbering store vdefs to matching vuses.\n");
3894 
3895       changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3896     }
3897 
3898   return changed;
3899 }
3900 
3901 /* Visit and value number PHI, return true if the value number
3902    changed.  */
3903 
3904 static bool
visit_phi(gimple * phi)3905 visit_phi (gimple *phi)
3906 {
3907   tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
3908   unsigned n_executable = 0;
3909   bool allsame = true;
3910   edge_iterator ei;
3911   edge e;
3912 
3913   /* TODO: We could check for this in init_sccvn, and replace this
3914      with a gcc_assert.  */
3915   if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3916     return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3917 
3918   /* See if all non-TOP arguments have the same value.  TOP is
3919      equivalent to everything, so we can ignore it.  */
3920   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3921     if (e->flags & EDGE_EXECUTABLE)
3922       {
3923 	tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3924 
3925 	++n_executable;
3926 	if (TREE_CODE (def) == SSA_NAME)
3927 	  def = SSA_VAL (def);
3928 	if (def == VN_TOP)
3929 	  ;
3930 	/* Ignore undefined defs for sameval but record one.  */
3931 	else if (TREE_CODE (def) == SSA_NAME
3932 		 && ssa_undefined_value_p (def, false))
3933 	  seen_undef = def;
3934 	else if (sameval == VN_TOP)
3935 	  sameval = def;
3936 	else if (!expressions_equal_p (def, sameval))
3937 	  {
3938 	    allsame = false;
3939 	    break;
3940 	  }
3941       }
3942 
3943 
3944   /* If none of the edges was executable keep the value-number at VN_TOP,
3945      if only a single edge is exectuable use its value.  */
3946   if (n_executable <= 1)
3947     result = seen_undef ? seen_undef : sameval;
3948   /* If we saw only undefined values and VN_TOP use one of the
3949      undefined values.  */
3950   else if (sameval == VN_TOP)
3951     result = seen_undef ? seen_undef : sameval;
3952   /* First see if it is equivalent to a phi node in this block.  We prefer
3953      this as it allows IV elimination - see PRs 66502 and 67167.  */
3954   else if ((result = vn_phi_lookup (phi)))
3955     ;
3956   /* If all values are the same use that, unless we've seen undefined
3957      values as well and the value isn't constant.
3958      CCP/copyprop have the same restriction to not remove uninit warnings.  */
3959   else if (allsame
3960 	   && (! seen_undef || is_gimple_min_invariant (sameval)))
3961     result = sameval;
3962   else
3963     {
3964       result = PHI_RESULT (phi);
3965       /* Only insert PHIs that are varying, for constant value numbers
3966          we mess up equivalences otherwise as we are only comparing
3967 	 the immediate controlling predicates.  */
3968       vn_phi_insert (phi, result);
3969     }
3970 
3971   return set_ssa_val_to (PHI_RESULT (phi), result);
3972 }
3973 
3974 /* Try to simplify RHS using equivalences and constant folding.  */
3975 
3976 static tree
try_to_simplify(gassign * stmt)3977 try_to_simplify (gassign *stmt)
3978 {
3979   enum tree_code code = gimple_assign_rhs_code (stmt);
3980   tree tem;
3981 
3982   /* For stores we can end up simplifying a SSA_NAME rhs.  Just return
3983      in this case, there is no point in doing extra work.  */
3984   if (code == SSA_NAME)
3985     return NULL_TREE;
3986 
3987   /* First try constant folding based on our current lattice.  */
3988   mprts_hook = vn_lookup_simplify_result;
3989   tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3990   mprts_hook = NULL;
3991   if (tem
3992       && (TREE_CODE (tem) == SSA_NAME
3993 	  || is_gimple_min_invariant (tem)))
3994     return tem;
3995 
3996   return NULL_TREE;
3997 }
3998 
3999 /* Visit and value number USE, return true if the value number
4000    changed. */
4001 
4002 static bool
visit_use(tree use)4003 visit_use (tree use)
4004 {
4005   bool changed = false;
4006   gimple *stmt = SSA_NAME_DEF_STMT (use);
4007 
4008   mark_use_processed (use);
4009 
4010   gcc_assert (!SSA_NAME_IN_FREE_LIST (use)
4011 	      && !SSA_NAME_IS_DEFAULT_DEF (use));
4012 
4013   if (dump_file && (dump_flags & TDF_DETAILS))
4014     {
4015       fprintf (dump_file, "Value numbering ");
4016       print_generic_expr (dump_file, use);
4017       fprintf (dump_file, " stmt = ");
4018       print_gimple_stmt (dump_file, stmt, 0);
4019     }
4020 
4021   if (gimple_code (stmt) == GIMPLE_PHI)
4022     changed = visit_phi (stmt);
4023   else if (gimple_has_volatile_ops (stmt))
4024     changed = defs_to_varying (stmt);
4025   else if (gassign *ass = dyn_cast <gassign *> (stmt))
4026     {
4027       enum tree_code code = gimple_assign_rhs_code (ass);
4028       tree lhs = gimple_assign_lhs (ass);
4029       tree rhs1 = gimple_assign_rhs1 (ass);
4030       tree simplified;
4031 
4032       /* Shortcut for copies. Simplifying copies is pointless,
4033 	 since we copy the expression and value they represent.  */
4034       if (code == SSA_NAME
4035 	  && TREE_CODE (lhs) == SSA_NAME)
4036 	{
4037 	  changed = visit_copy (lhs, rhs1);
4038 	  goto done;
4039 	}
4040       simplified = try_to_simplify (ass);
4041       if (simplified)
4042 	{
4043 	  if (dump_file && (dump_flags & TDF_DETAILS))
4044 	    {
4045 	      fprintf (dump_file, "RHS ");
4046 	      print_gimple_expr (dump_file, ass, 0);
4047 	      fprintf (dump_file, " simplified to ");
4048 	      print_generic_expr (dump_file, simplified);
4049 	      fprintf (dump_file, "\n");
4050 	    }
4051 	}
4052       /* Setting value numbers to constants will occasionally
4053 	 screw up phi congruence because constants are not
4054 	 uniquely associated with a single ssa name that can be
4055 	 looked up.  */
4056       if (simplified
4057 	  && is_gimple_min_invariant (simplified)
4058 	  && TREE_CODE (lhs) == SSA_NAME)
4059 	{
4060 	  changed = set_ssa_val_to (lhs, simplified);
4061 	  goto done;
4062 	}
4063       else if (simplified
4064 	       && TREE_CODE (simplified) == SSA_NAME
4065 	       && TREE_CODE (lhs) == SSA_NAME)
4066 	{
4067 	  changed = visit_copy (lhs, simplified);
4068 	  goto done;
4069 	}
4070 
4071       if ((TREE_CODE (lhs) == SSA_NAME
4072 	   /* We can substitute SSA_NAMEs that are live over
4073 	      abnormal edges with their constant value.  */
4074 	   && !(gimple_assign_copy_p (ass)
4075 		&& is_gimple_min_invariant (rhs1))
4076 	   && !(simplified
4077 		&& is_gimple_min_invariant (simplified))
4078 	   && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4079 	  /* Stores or copies from SSA_NAMEs that are live over
4080 	     abnormal edges are a problem.  */
4081 	  || (code == SSA_NAME
4082 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4083 	changed = defs_to_varying (ass);
4084       else if (REFERENCE_CLASS_P (lhs)
4085 	       || DECL_P (lhs))
4086 	changed = visit_reference_op_store (lhs, rhs1, ass);
4087       else if (TREE_CODE (lhs) == SSA_NAME)
4088 	{
4089 	  if ((gimple_assign_copy_p (ass)
4090 	       && is_gimple_min_invariant (rhs1))
4091 	      || (simplified
4092 		  && is_gimple_min_invariant (simplified)))
4093 	    {
4094 	      if (simplified)
4095 		changed = set_ssa_val_to (lhs, simplified);
4096 	      else
4097 		changed = set_ssa_val_to (lhs, rhs1);
4098 	    }
4099 	  else
4100 	    {
4101 	      /* Visit the original statement.  */
4102 	      switch (vn_get_stmt_kind (ass))
4103 		{
4104 		case VN_NARY:
4105 		changed = visit_nary_op (lhs, ass);
4106 		break;
4107 		case VN_REFERENCE:
4108 		changed = visit_reference_op_load (lhs, rhs1, ass);
4109 		break;
4110 		default:
4111 		changed = defs_to_varying (ass);
4112 		break;
4113 		}
4114 	    }
4115 	}
4116       else
4117 	changed = defs_to_varying (ass);
4118     }
4119   else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4120     {
4121       tree lhs = gimple_call_lhs (call_stmt);
4122       if (lhs && TREE_CODE (lhs) == SSA_NAME)
4123 	{
4124 	  /* Try constant folding based on our current lattice.  */
4125 	  tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4126 							    vn_valueize);
4127 	  if (simplified)
4128 	    {
4129 	      if (dump_file && (dump_flags & TDF_DETAILS))
4130 		{
4131 		  fprintf (dump_file, "call ");
4132 		  print_gimple_expr (dump_file, call_stmt, 0);
4133 		  fprintf (dump_file, " simplified to ");
4134 		  print_generic_expr (dump_file, simplified);
4135 		  fprintf (dump_file, "\n");
4136 		}
4137 	    }
4138 	  /* Setting value numbers to constants will occasionally
4139 	     screw up phi congruence because constants are not
4140 	     uniquely associated with a single ssa name that can be
4141 	     looked up.  */
4142 	  if (simplified
4143 	      && is_gimple_min_invariant (simplified))
4144 	    {
4145 	      changed = set_ssa_val_to (lhs, simplified);
4146 	      if (gimple_vdef (call_stmt))
4147 		changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4148 					   SSA_VAL (gimple_vuse (call_stmt)));
4149 	      goto done;
4150 	    }
4151 	  else if (simplified
4152 		   && TREE_CODE (simplified) == SSA_NAME)
4153 	    {
4154 	      changed = visit_copy (lhs, simplified);
4155 	      if (gimple_vdef (call_stmt))
4156 		changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4157 					   SSA_VAL (gimple_vuse (call_stmt)));
4158 	      goto done;
4159 	    }
4160 	  else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4161 	    {
4162 	      changed = defs_to_varying (call_stmt);
4163 	      goto done;
4164 	    }
4165 	}
4166 
4167       /* Pick up flags from a devirtualization target.  */
4168       tree fn = gimple_call_fn (stmt);
4169       int extra_fnflags = 0;
4170       if (fn && TREE_CODE (fn) == SSA_NAME)
4171 	{
4172 	  fn = SSA_VAL (fn);
4173 	  if (TREE_CODE (fn) == ADDR_EXPR
4174 	      && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4175 	    extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4176 	}
4177       if (!gimple_call_internal_p (call_stmt)
4178 	  && (/* Calls to the same function with the same vuse
4179 		 and the same operands do not necessarily return the same
4180 		 value, unless they're pure or const.  */
4181 	      ((gimple_call_flags (call_stmt) | extra_fnflags)
4182 	       & (ECF_PURE | ECF_CONST))
4183 	      /* If calls have a vdef, subsequent calls won't have
4184 		 the same incoming vuse.  So, if 2 calls with vdef have the
4185 		 same vuse, we know they're not subsequent.
4186 		 We can value number 2 calls to the same function with the
4187 		 same vuse and the same operands which are not subsequent
4188 		 the same, because there is no code in the program that can
4189 		 compare the 2 values...  */
4190 	      || (gimple_vdef (call_stmt)
4191 		  /* ... unless the call returns a pointer which does
4192 		     not alias with anything else.  In which case the
4193 		     information that the values are distinct are encoded
4194 		     in the IL.  */
4195 		  && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4196 		  /* Only perform the following when being called from PRE
4197 		     which embeds tail merging.  */
4198 		  && default_vn_walk_kind == VN_WALK)))
4199 	changed = visit_reference_op_call (lhs, call_stmt);
4200       else
4201 	changed = defs_to_varying (call_stmt);
4202     }
4203   else
4204     changed = defs_to_varying (stmt);
4205  done:
4206   return changed;
4207 }
4208 
4209 /* Compare two operands by reverse postorder index */
4210 
4211 static int
compare_ops(const void * pa,const void * pb)4212 compare_ops (const void *pa, const void *pb)
4213 {
4214   const tree opa = *((const tree *)pa);
4215   const tree opb = *((const tree *)pb);
4216   gimple *opstmta = SSA_NAME_DEF_STMT (opa);
4217   gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
4218   basic_block bba;
4219   basic_block bbb;
4220 
4221   if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
4222     return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4223   else if (gimple_nop_p (opstmta))
4224     return -1;
4225   else if (gimple_nop_p (opstmtb))
4226     return 1;
4227 
4228   bba = gimple_bb (opstmta);
4229   bbb = gimple_bb (opstmtb);
4230 
4231   if (!bba && !bbb)
4232     return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4233   else if (!bba)
4234     return -1;
4235   else if (!bbb)
4236     return 1;
4237 
4238   if (bba == bbb)
4239     {
4240       if (gimple_code (opstmta) == GIMPLE_PHI
4241 	  && gimple_code (opstmtb) == GIMPLE_PHI)
4242 	return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4243       else if (gimple_code (opstmta) == GIMPLE_PHI)
4244 	return -1;
4245       else if (gimple_code (opstmtb) == GIMPLE_PHI)
4246 	return 1;
4247       else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
4248         return gimple_uid (opstmta) - gimple_uid (opstmtb);
4249       else
4250 	return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4251     }
4252   return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
4253 }
4254 
4255 /* Sort an array containing members of a strongly connected component
4256    SCC so that the members are ordered by RPO number.
4257    This means that when the sort is complete, iterating through the
4258    array will give you the members in RPO order.  */
4259 
4260 static void
sort_scc(vec<tree> scc)4261 sort_scc (vec<tree> scc)
4262 {
4263   scc.qsort (compare_ops);
4264 }
4265 
4266 /* Insert the no longer used nary ONARY to the hash INFO.  */
4267 
4268 static void
copy_nary(vn_nary_op_t onary,vn_tables_t info)4269 copy_nary (vn_nary_op_t onary, vn_tables_t info)
4270 {
4271   size_t size = sizeof_vn_nary_op (onary->length);
4272   vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
4273 					       &info->nary_obstack);
4274   memcpy (nary, onary, size);
4275   vn_nary_op_insert_into (nary, info->nary, false);
4276 }
4277 
4278 /* Insert the no longer used phi OPHI to the hash INFO.  */
4279 
4280 static void
copy_phi(vn_phi_t ophi,vn_tables_t info)4281 copy_phi (vn_phi_t ophi, vn_tables_t info)
4282 {
4283   vn_phi_t phi = info->phis_pool->allocate ();
4284   vn_phi_s **slot;
4285   memcpy (phi, ophi, sizeof (*phi));
4286   ophi->phiargs.create (0);
4287   slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
4288   gcc_assert (!*slot);
4289   *slot = phi;
4290 }
4291 
4292 /* Insert the no longer used reference OREF to the hash INFO.  */
4293 
4294 static void
copy_reference(vn_reference_t oref,vn_tables_t info)4295 copy_reference (vn_reference_t oref, vn_tables_t info)
4296 {
4297   vn_reference_t ref;
4298   vn_reference_s **slot;
4299   ref = info->references_pool->allocate ();
4300   memcpy (ref, oref, sizeof (*ref));
4301   oref->operands.create (0);
4302   slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
4303   if (*slot)
4304     free_reference (*slot);
4305   *slot = ref;
4306 }
4307 
4308 /* Process a strongly connected component in the SSA graph.  */
4309 
4310 static void
process_scc(vec<tree> scc)4311 process_scc (vec<tree> scc)
4312 {
4313   tree var;
4314   unsigned int i;
4315   unsigned int iterations = 0;
4316   bool changed = true;
4317   vn_nary_op_iterator_type hin;
4318   vn_phi_iterator_type hip;
4319   vn_reference_iterator_type hir;
4320   vn_nary_op_t nary;
4321   vn_phi_t phi;
4322   vn_reference_t ref;
4323 
4324   /* If the SCC has a single member, just visit it.  */
4325   if (scc.length () == 1)
4326     {
4327       tree use = scc[0];
4328       if (VN_INFO (use)->use_processed)
4329 	return;
4330       /* We need to make sure it doesn't form a cycle itself, which can
4331 	 happen for self-referential PHI nodes.  In that case we would
4332 	 end up inserting an expression with VN_TOP operands into the
4333 	 valid table which makes us derive bogus equivalences later.
4334 	 The cheapest way to check this is to assume it for all PHI nodes.  */
4335       if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4336 	/* Fallthru to iteration.  */ ;
4337       else
4338 	{
4339 	  visit_use (use);
4340 	  return;
4341 	}
4342     }
4343 
4344   if (dump_file && (dump_flags & TDF_DETAILS))
4345     print_scc (dump_file, scc);
4346 
4347   /* Iterate over the SCC with the optimistic table until it stops
4348      changing.  */
4349   current_info = optimistic_info;
4350   while (changed)
4351     {
4352       changed = false;
4353       iterations++;
4354       if (dump_file && (dump_flags & TDF_DETAILS))
4355 	fprintf (dump_file, "Starting iteration %d\n", iterations);
4356       /* As we are value-numbering optimistically we have to
4357 	 clear the expression tables and the simplified expressions
4358 	 in each iteration until we converge.  */
4359       optimistic_info->nary->empty ();
4360       optimistic_info->phis->empty ();
4361       optimistic_info->references->empty ();
4362       obstack_free (&optimistic_info->nary_obstack, NULL);
4363       gcc_obstack_init (&optimistic_info->nary_obstack);
4364       optimistic_info->phis_pool->release ();
4365       optimistic_info->references_pool->release ();
4366       FOR_EACH_VEC_ELT (scc, i, var)
4367 	gcc_assert (!VN_INFO (var)->needs_insertion
4368 		    && VN_INFO (var)->expr == NULL);
4369       FOR_EACH_VEC_ELT (scc, i, var)
4370 	changed |= visit_use (var);
4371     }
4372 
4373   if (dump_file && (dump_flags & TDF_DETAILS))
4374     fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4375   statistics_histogram_event (cfun, "SCC iterations", iterations);
4376 
4377   /* Finally, copy the contents of the no longer used optimistic
4378      table to the valid table.  */
4379   FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4380     copy_nary (nary, valid_info);
4381   FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4382     copy_phi (phi, valid_info);
4383   FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4384 			       ref, vn_reference_t, hir)
4385     copy_reference (ref, valid_info);
4386 
4387   current_info = valid_info;
4388 }
4389 
4390 
4391 /* Pop the components of the found SCC for NAME off the SCC stack
4392    and process them.  Returns true if all went well, false if
4393    we run into resource limits.  */
4394 
4395 static void
extract_and_process_scc_for_name(tree name)4396 extract_and_process_scc_for_name (tree name)
4397 {
4398   auto_vec<tree> scc;
4399   tree x;
4400 
4401   /* Found an SCC, pop the components off the SCC stack and
4402      process them.  */
4403   do
4404     {
4405       x = sccstack.pop ();
4406 
4407       VN_INFO (x)->on_sccstack = false;
4408       scc.safe_push (x);
4409     } while (x != name);
4410 
4411   /* Drop all defs in the SCC to varying in case a SCC turns out to be
4412      incredibly large.
4413      ???  Just switch to a non-optimistic mode that avoids any iteration.  */
4414   if (scc.length () > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4415     {
4416       if (dump_file)
4417 	{
4418 	  print_scc (dump_file, scc);
4419 	  fprintf (dump_file, "WARNING: Giving up value-numbering SCC due to "
4420 		   "size %u exceeding %u\n", scc.length (),
4421 		   (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4422 	}
4423       tree var;
4424       unsigned i;
4425       FOR_EACH_VEC_ELT (scc, i, var)
4426 	{
4427 	  gimple *def = SSA_NAME_DEF_STMT (var);
4428 	  mark_use_processed (var);
4429 	  if (SSA_NAME_IS_DEFAULT_DEF (var)
4430 	      || gimple_code (def) == GIMPLE_PHI)
4431 	    set_ssa_val_to (var, var);
4432 	  else
4433 	    defs_to_varying (def);
4434 	}
4435       return;
4436     }
4437 
4438   if (scc.length () > 1)
4439     sort_scc (scc);
4440 
4441   process_scc (scc);
4442 }
4443 
4444 /* Depth first search on NAME to discover and process SCC's in the SSA
4445    graph.
4446    Execution of this algorithm relies on the fact that the SCC's are
4447    popped off the stack in topological order.
4448    Returns true if successful, false if we stopped processing SCC's due
4449    to resource constraints.  */
4450 
4451 static void
DFS(tree name)4452 DFS (tree name)
4453 {
4454   auto_vec<ssa_op_iter> itervec;
4455   auto_vec<tree> namevec;
4456   use_operand_p usep = NULL;
4457   gimple *defstmt;
4458   tree use;
4459   ssa_op_iter iter;
4460 
4461 start_over:
4462   /* SCC info */
4463   VN_INFO (name)->dfsnum = next_dfs_num++;
4464   VN_INFO (name)->visited = true;
4465   VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4466 
4467   sccstack.safe_push (name);
4468   VN_INFO (name)->on_sccstack = true;
4469   defstmt = SSA_NAME_DEF_STMT (name);
4470 
4471   /* Recursively DFS on our operands, looking for SCC's.  */
4472   if (!gimple_nop_p (defstmt))
4473     {
4474       /* Push a new iterator.  */
4475       if (gphi *phi = dyn_cast <gphi *> (defstmt))
4476 	usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4477       else
4478 	usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4479     }
4480   else
4481     clear_and_done_ssa_iter (&iter);
4482 
4483   while (1)
4484     {
4485       /* If we are done processing uses of a name, go up the stack
4486 	 of iterators and process SCCs as we found them.  */
4487       if (op_iter_done (&iter))
4488 	{
4489 	  /* See if we found an SCC.  */
4490 	  if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4491 	    extract_and_process_scc_for_name (name);
4492 
4493 	  /* Check if we are done.  */
4494 	  if (namevec.is_empty ())
4495 	    return;
4496 
4497 	  /* Restore the last use walker and continue walking there.  */
4498 	  use = name;
4499 	  name = namevec.pop ();
4500 	  memcpy (&iter, &itervec.last (),
4501 		  sizeof (ssa_op_iter));
4502 	  itervec.pop ();
4503 	  goto continue_walking;
4504 	}
4505 
4506       use = USE_FROM_PTR (usep);
4507 
4508       /* Since we handle phi nodes, we will sometimes get
4509 	 invariants in the use expression.  */
4510       if (TREE_CODE (use) == SSA_NAME)
4511 	{
4512 	  if (! (VN_INFO (use)->visited))
4513 	    {
4514 	      /* Recurse by pushing the current use walking state on
4515 		 the stack and starting over.  */
4516 	      itervec.safe_push (iter);
4517 	      namevec.safe_push (name);
4518 	      name = use;
4519 	      goto start_over;
4520 
4521 continue_walking:
4522 	      VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4523 					 VN_INFO (use)->low);
4524 	    }
4525 	  if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4526 	      && VN_INFO (use)->on_sccstack)
4527 	    {
4528 	      VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4529 					 VN_INFO (name)->low);
4530 	    }
4531 	}
4532 
4533       usep = op_iter_next_use (&iter);
4534     }
4535 }
4536 
4537 /* Allocate a value number table.  */
4538 
4539 static void
allocate_vn_table(vn_tables_t table)4540 allocate_vn_table (vn_tables_t table)
4541 {
4542   table->phis = new vn_phi_table_type (23);
4543   table->nary = new vn_nary_op_table_type (23);
4544   table->references = new vn_reference_table_type (23);
4545 
4546   gcc_obstack_init (&table->nary_obstack);
4547   table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4548   table->references_pool = new object_allocator<vn_reference_s>
4549     ("VN references");
4550 }
4551 
4552 /* Free a value number table.  */
4553 
4554 static void
free_vn_table(vn_tables_t table)4555 free_vn_table (vn_tables_t table)
4556 {
4557   delete table->phis;
4558   table->phis = NULL;
4559   delete table->nary;
4560   table->nary = NULL;
4561   delete table->references;
4562   table->references = NULL;
4563   obstack_free (&table->nary_obstack, NULL);
4564   delete table->phis_pool;
4565   delete table->references_pool;
4566 }
4567 
4568 static void
init_scc_vn(void)4569 init_scc_vn (void)
4570 {
4571   int j;
4572   int *rpo_numbers_temp;
4573 
4574   calculate_dominance_info (CDI_DOMINATORS);
4575   mark_dfs_back_edges ();
4576 
4577   sccstack.create (0);
4578   constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4579 
4580   constant_value_ids = BITMAP_ALLOC (NULL);
4581 
4582   next_dfs_num = 1;
4583   next_value_id = 1;
4584 
4585   vn_ssa_aux_table.create (num_ssa_names + 1);
4586   /* VEC_alloc doesn't actually grow it to the right size, it just
4587      preallocates the space to do so.  */
4588   vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4589   gcc_obstack_init (&vn_ssa_aux_obstack);
4590 
4591   shared_lookup_phiargs.create (0);
4592   shared_lookup_references.create (0);
4593   rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4594   rpo_numbers_temp =
4595     XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4596   pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4597 
4598   /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4599      the i'th block in RPO order is bb.  We want to map bb's to RPO
4600      numbers, so we need to rearrange this array.  */
4601   for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4602     rpo_numbers[rpo_numbers_temp[j]] = j;
4603 
4604   XDELETE (rpo_numbers_temp);
4605 
4606   VN_TOP = build_decl (UNKNOWN_LOCATION, VAR_DECL,
4607 		       get_identifier ("VN_TOP"), error_mark_node);
4608 
4609   renumber_gimple_stmt_uids ();
4610 
4611   /* Create the valid and optimistic value numbering tables.  */
4612   valid_info = XCNEW (struct vn_tables_s);
4613   allocate_vn_table (valid_info);
4614   optimistic_info = XCNEW (struct vn_tables_s);
4615   allocate_vn_table (optimistic_info);
4616   current_info = valid_info;
4617 
4618   /* Create the VN_INFO structures, and initialize value numbers to
4619      TOP or VARYING for parameters.  */
4620   size_t i;
4621   tree name;
4622 
4623   FOR_EACH_SSA_NAME (i, name, cfun)
4624     {
4625       VN_INFO_GET (name)->valnum = VN_TOP;
4626       VN_INFO (name)->needs_insertion = false;
4627       VN_INFO (name)->expr = NULL;
4628       VN_INFO (name)->value_id = 0;
4629 
4630       if (!SSA_NAME_IS_DEFAULT_DEF (name))
4631 	continue;
4632 
4633       switch (TREE_CODE (SSA_NAME_VAR (name)))
4634 	{
4635 	case VAR_DECL:
4636 	  /* All undefined vars are VARYING.  */
4637 	  VN_INFO (name)->valnum = name;
4638 	  VN_INFO (name)->visited = true;
4639 	  break;
4640 
4641 	case PARM_DECL:
4642 	  /* Parameters are VARYING but we can record a condition
4643 	     if we know it is a non-NULL pointer.  */
4644 	  VN_INFO (name)->visited = true;
4645 	  VN_INFO (name)->valnum = name;
4646 	  if (POINTER_TYPE_P (TREE_TYPE (name))
4647 	      && nonnull_arg_p (SSA_NAME_VAR (name)))
4648 	    {
4649 	      tree ops[2];
4650 	      ops[0] = name;
4651 	      ops[1] = build_int_cst (TREE_TYPE (name), 0);
4652 	      vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4653 					boolean_true_node, 0);
4654 	      if (dump_file && (dump_flags & TDF_DETAILS))
4655 		{
4656 		  fprintf (dump_file, "Recording ");
4657 		  print_generic_expr (dump_file, name, TDF_SLIM);
4658 		  fprintf (dump_file, " != 0\n");
4659 		}
4660 	    }
4661 	  break;
4662 
4663 	case RESULT_DECL:
4664 	  /* If the result is passed by invisible reference the default
4665 	     def is initialized, otherwise it's uninitialized.  Still
4666 	     undefined is varying.  */
4667 	  VN_INFO (name)->visited = true;
4668 	  VN_INFO (name)->valnum = name;
4669 	  break;
4670 
4671 	default:
4672 	  gcc_unreachable ();
4673 	}
4674     }
4675 }
4676 
4677 /* Restore SSA info that has been reset on value leaders.  */
4678 
4679 void
scc_vn_restore_ssa_info(void)4680 scc_vn_restore_ssa_info (void)
4681 {
4682   unsigned i;
4683   tree name;
4684 
4685   FOR_EACH_SSA_NAME (i, name, cfun)
4686     {
4687       if (has_VN_INFO (name))
4688 	{
4689 	  if (VN_INFO (name)->needs_insertion)
4690 	    ;
4691 	  else if (POINTER_TYPE_P (TREE_TYPE (name))
4692 		   && VN_INFO (name)->info.ptr_info)
4693 	    SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4694 	  else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4695 		   && VN_INFO (name)->info.range_info)
4696 	    {
4697 	      SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4698 	      SSA_NAME_ANTI_RANGE_P (name)
4699 		= VN_INFO (name)->range_info_anti_range_p;
4700 	    }
4701 	}
4702     }
4703 }
4704 
4705 void
free_scc_vn(void)4706 free_scc_vn (void)
4707 {
4708   size_t i;
4709   tree name;
4710 
4711   delete constant_to_value_id;
4712   constant_to_value_id = NULL;
4713   BITMAP_FREE (constant_value_ids);
4714   shared_lookup_phiargs.release ();
4715   shared_lookup_references.release ();
4716   XDELETEVEC (rpo_numbers);
4717 
4718   FOR_EACH_SSA_NAME (i, name, cfun)
4719     {
4720       if (has_VN_INFO (name)
4721 	  && VN_INFO (name)->needs_insertion)
4722 	release_ssa_name (name);
4723     }
4724   obstack_free (&vn_ssa_aux_obstack, NULL);
4725   vn_ssa_aux_table.release ();
4726 
4727   sccstack.release ();
4728   free_vn_table (valid_info);
4729   XDELETE (valid_info);
4730   free_vn_table (optimistic_info);
4731   XDELETE (optimistic_info);
4732 
4733   BITMAP_FREE (const_parms);
4734 }
4735 
4736 /* Set *ID according to RESULT.  */
4737 
4738 static void
set_value_id_for_result(tree result,unsigned int * id)4739 set_value_id_for_result (tree result, unsigned int *id)
4740 {
4741   if (result && TREE_CODE (result) == SSA_NAME)
4742     *id = VN_INFO (result)->value_id;
4743   else if (result && is_gimple_min_invariant (result))
4744     *id = get_or_alloc_constant_value_id (result);
4745   else
4746     *id = get_next_value_id ();
4747 }
4748 
4749 /* Set the value ids in the valid hash tables.  */
4750 
4751 static void
set_hashtable_value_ids(void)4752 set_hashtable_value_ids (void)
4753 {
4754   vn_nary_op_iterator_type hin;
4755   vn_phi_iterator_type hip;
4756   vn_reference_iterator_type hir;
4757   vn_nary_op_t vno;
4758   vn_reference_t vr;
4759   vn_phi_t vp;
4760 
4761   /* Now set the value ids of the things we had put in the hash
4762      table.  */
4763 
4764   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4765     set_value_id_for_result (vno->result, &vno->value_id);
4766 
4767   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4768     set_value_id_for_result (vp->result, &vp->value_id);
4769 
4770   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4771 			       hir)
4772     set_value_id_for_result (vr->result, &vr->value_id);
4773 }
4774 
4775 class sccvn_dom_walker : public dom_walker
4776 {
4777 public:
sccvn_dom_walker()4778   sccvn_dom_walker ()
4779     : dom_walker (CDI_DOMINATORS, REACHABLE_BLOCKS), cond_stack (0) {}
4780 
4781   virtual edge before_dom_children (basic_block);
4782   virtual void after_dom_children (basic_block);
4783 
4784   void record_cond (basic_block,
4785 		    enum tree_code code, tree lhs, tree rhs, bool value);
4786   void record_conds (basic_block,
4787 		     enum tree_code code, tree lhs, tree rhs, bool value);
4788 
4789   auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4790     cond_stack;
4791 };
4792 
4793 /* Record a temporary condition for the BB and its dominated blocks.  */
4794 
4795 void
record_cond(basic_block bb,enum tree_code code,tree lhs,tree rhs,bool value)4796 sccvn_dom_walker::record_cond (basic_block bb,
4797 			       enum tree_code code, tree lhs, tree rhs,
4798 			       bool value)
4799 {
4800   tree ops[2] = { lhs, rhs };
4801   vn_nary_op_t old = NULL;
4802   if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4803     current_info->nary->remove_elt_with_hash (old, old->hashcode);
4804   vn_nary_op_t cond
4805     = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4806 				value
4807 				? boolean_true_node
4808 				: boolean_false_node, 0);
4809   if (dump_file && (dump_flags & TDF_DETAILS))
4810     {
4811       fprintf (dump_file, "Recording temporarily ");
4812       print_generic_expr (dump_file, ops[0], TDF_SLIM);
4813       fprintf (dump_file, " %s ", get_tree_code_name (code));
4814       print_generic_expr (dump_file, ops[1], TDF_SLIM);
4815       fprintf (dump_file, " == %s%s\n",
4816 	       value ? "true" : "false",
4817 	       old ? " (old entry saved)" : "");
4818     }
4819   cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4820 }
4821 
4822 /* Record temporary conditions for the BB and its dominated blocks
4823    according to LHS CODE RHS == VALUE and its dominated conditions.  */
4824 
4825 void
record_conds(basic_block bb,enum tree_code code,tree lhs,tree rhs,bool value)4826 sccvn_dom_walker::record_conds (basic_block bb,
4827 				enum tree_code code, tree lhs, tree rhs,
4828 				bool value)
4829 {
4830   /* Record the original condition.  */
4831   record_cond (bb, code, lhs, rhs, value);
4832 
4833   if (!value)
4834     return;
4835 
4836   /* Record dominated conditions if the condition is true.  Note that
4837      the inversion is already recorded.  */
4838   switch (code)
4839     {
4840     case LT_EXPR:
4841     case GT_EXPR:
4842       record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4843       record_cond (bb, NE_EXPR, lhs, rhs, true);
4844       record_cond (bb, EQ_EXPR, lhs, rhs, false);
4845       break;
4846 
4847     case EQ_EXPR:
4848       record_cond (bb, LE_EXPR, lhs, rhs, true);
4849       record_cond (bb, GE_EXPR, lhs, rhs, true);
4850       record_cond (bb, LT_EXPR, lhs, rhs, false);
4851       record_cond (bb, GT_EXPR, lhs, rhs, false);
4852       break;
4853 
4854     default:
4855       break;
4856     }
4857 }
4858 
4859 /* Restore expressions and values derived from conditionals.  */
4860 
4861 void
after_dom_children(basic_block bb)4862 sccvn_dom_walker::after_dom_children (basic_block bb)
4863 {
4864   while (!cond_stack.is_empty ()
4865 	 && cond_stack.last ().first == bb)
4866     {
4867       vn_nary_op_t cond = cond_stack.last ().second.first;
4868       vn_nary_op_t old = cond_stack.last ().second.second;
4869       current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4870       if (old)
4871 	vn_nary_op_insert_into (old, current_info->nary, false);
4872       cond_stack.pop ();
4873     }
4874 }
4875 
4876 /* Value number all statements in BB.  */
4877 
4878 edge
before_dom_children(basic_block bb)4879 sccvn_dom_walker::before_dom_children (basic_block bb)
4880 {
4881   if (dump_file && (dump_flags & TDF_DETAILS))
4882     fprintf (dump_file, "Visiting BB %d\n", bb->index);
4883 
4884   /* If we have a single predecessor record the equivalence from a
4885      possible condition on the predecessor edge.  */
4886   edge pred_e = single_pred_edge_ignoring_loop_edges (bb, false);
4887   if (pred_e)
4888     {
4889       /* Check if there are multiple executable successor edges in
4890 	 the source block.  Otherwise there is no additional info
4891 	 to be recorded.  */
4892       edge_iterator ei;
4893       edge e2;
4894       FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4895 	if (e2 != pred_e
4896 	    && e2->flags & EDGE_EXECUTABLE)
4897 	  break;
4898       if (e2 && (e2->flags & EDGE_EXECUTABLE))
4899 	{
4900 	  gimple *stmt = last_stmt (pred_e->src);
4901 	  if (stmt
4902 	      && gimple_code (stmt) == GIMPLE_COND)
4903 	    {
4904 	      enum tree_code code = gimple_cond_code (stmt);
4905 	      tree lhs = gimple_cond_lhs (stmt);
4906 	      tree rhs = gimple_cond_rhs (stmt);
4907 	      record_conds (bb, code, lhs, rhs,
4908 			    (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4909 	      code = invert_tree_comparison (code, HONOR_NANS (lhs));
4910 	      if (code != ERROR_MARK)
4911 		record_conds (bb, code, lhs, rhs,
4912 			      (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4913 	    }
4914 	}
4915     }
4916 
4917   /* Value-number all defs in the basic-block.  */
4918   for (gphi_iterator gsi = gsi_start_phis (bb);
4919        !gsi_end_p (gsi); gsi_next (&gsi))
4920     {
4921       gphi *phi = gsi.phi ();
4922       tree res = PHI_RESULT (phi);
4923       if (!VN_INFO (res)->visited)
4924 	DFS (res);
4925     }
4926   for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4927        !gsi_end_p (gsi); gsi_next (&gsi))
4928     {
4929       ssa_op_iter i;
4930       tree op;
4931       FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4932 	if (!VN_INFO (op)->visited)
4933 	  DFS (op);
4934     }
4935 
4936   /* Finally look at the last stmt.  */
4937   gimple *stmt = last_stmt (bb);
4938   if (!stmt)
4939     return NULL;
4940 
4941   enum gimple_code code = gimple_code (stmt);
4942   if (code != GIMPLE_COND
4943       && code != GIMPLE_SWITCH
4944       && code != GIMPLE_GOTO)
4945     return NULL;
4946 
4947   if (dump_file && (dump_flags & TDF_DETAILS))
4948     {
4949       fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4950       print_gimple_stmt (dump_file, stmt, 0);
4951     }
4952 
4953   /* ???  We can even handle stmts with outgoing EH or ABNORMAL edges
4954      if value-numbering can prove they are not reachable.  Handling
4955      computed gotos is also possible.  */
4956   tree val;
4957   switch (code)
4958     {
4959     case GIMPLE_COND:
4960       {
4961 	tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4962 	tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4963 	val = gimple_simplify (gimple_cond_code (stmt),
4964 			       boolean_type_node, lhs, rhs,
4965 			       NULL, vn_valueize);
4966 	/* If that didn't simplify to a constant see if we have recorded
4967 	   temporary expressions from taken edges.  */
4968 	if (!val || TREE_CODE (val) != INTEGER_CST)
4969 	  {
4970 	    tree ops[2];
4971 	    ops[0] = lhs;
4972 	    ops[1] = rhs;
4973 	    val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4974 					    boolean_type_node, ops, NULL);
4975 	  }
4976 	break;
4977       }
4978     case GIMPLE_SWITCH:
4979       val = gimple_switch_index (as_a <gswitch *> (stmt));
4980       break;
4981     case GIMPLE_GOTO:
4982       val = gimple_goto_dest (stmt);
4983       break;
4984     default:
4985       gcc_unreachable ();
4986     }
4987   if (!val)
4988     return NULL;
4989 
4990   edge taken = find_taken_edge (bb, vn_valueize (val));
4991   if (!taken)
4992     return NULL;
4993 
4994   if (dump_file && (dump_flags & TDF_DETAILS))
4995     fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4996 	     "not executable\n", bb->index, bb->index, taken->dest->index);
4997 
4998   return taken;
4999 }
5000 
5001 /* Do SCCVN.  Returns true if it finished, false if we bailed out
5002    due to resource constraints.  DEFAULT_VN_WALK_KIND_ specifies
5003    how we use the alias oracle walking during the VN process.  */
5004 
5005 void
run_scc_vn(vn_lookup_kind default_vn_walk_kind_)5006 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
5007 {
5008   size_t i;
5009 
5010   default_vn_walk_kind = default_vn_walk_kind_;
5011 
5012   init_scc_vn ();
5013 
5014   /* Collect pointers we know point to readonly memory.  */
5015   const_parms = BITMAP_ALLOC (NULL);
5016   tree fnspec = lookup_attribute ("fn spec",
5017 				  TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
5018   if (fnspec)
5019     {
5020       fnspec = TREE_VALUE (TREE_VALUE (fnspec));
5021       i = 1;
5022       for (tree arg = DECL_ARGUMENTS (cfun->decl);
5023 	   arg; arg = DECL_CHAIN (arg), ++i)
5024 	{
5025 	  if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
5026 	    break;
5027 	  if (TREE_STRING_POINTER (fnspec)[i]  == 'R'
5028 	      || TREE_STRING_POINTER (fnspec)[i] == 'r')
5029 	    {
5030 	      tree name = ssa_default_def (cfun, arg);
5031 	      if (name)
5032 		bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
5033 	    }
5034 	}
5035     }
5036 
5037   /* Walk all blocks in dominator order, value-numbering stmts
5038      SSA defs and decide whether outgoing edges are not executable.  */
5039   sccvn_dom_walker walker;
5040   walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5041 
5042   /* Initialize the value ids and prune out remaining VN_TOPs
5043      from dead code.  */
5044   tree name;
5045   FOR_EACH_SSA_NAME (i, name, cfun)
5046     {
5047       vn_ssa_aux_t info = VN_INFO (name);
5048       if (!info->visited
5049 	  || info->valnum == VN_TOP)
5050 	info->valnum = name;
5051       if (info->valnum == name)
5052 	info->value_id = get_next_value_id ();
5053       else if (is_gimple_min_invariant (info->valnum))
5054 	info->value_id = get_or_alloc_constant_value_id (info->valnum);
5055     }
5056 
5057   /* Propagate.  */
5058   FOR_EACH_SSA_NAME (i, name, cfun)
5059     {
5060       vn_ssa_aux_t info = VN_INFO (name);
5061       if (TREE_CODE (info->valnum) == SSA_NAME
5062 	  && info->valnum != name
5063 	  && info->value_id != VN_INFO (info->valnum)->value_id)
5064 	info->value_id = VN_INFO (info->valnum)->value_id;
5065     }
5066 
5067   set_hashtable_value_ids ();
5068 
5069   if (dump_file && (dump_flags & TDF_DETAILS))
5070     {
5071       fprintf (dump_file, "Value numbers:\n");
5072       FOR_EACH_SSA_NAME (i, name, cfun)
5073 	{
5074 	  if (VN_INFO (name)->visited
5075 	      && SSA_VAL (name) != name)
5076 	    {
5077 	      print_generic_expr (dump_file, name);
5078 	      fprintf (dump_file, " = ");
5079 	      print_generic_expr (dump_file, SSA_VAL (name));
5080 	      fprintf (dump_file, "\n");
5081 	    }
5082 	}
5083     }
5084 }
5085 
5086 /* Return the maximum value id we have ever seen.  */
5087 
5088 unsigned int
get_max_value_id(void)5089 get_max_value_id (void)
5090 {
5091   return next_value_id;
5092 }
5093 
5094 /* Return the next unique value id.  */
5095 
5096 unsigned int
get_next_value_id(void)5097 get_next_value_id (void)
5098 {
5099   return next_value_id++;
5100 }
5101 
5102 
5103 /* Compare two expressions E1 and E2 and return true if they are equal.  */
5104 
5105 bool
expressions_equal_p(tree e1,tree e2)5106 expressions_equal_p (tree e1, tree e2)
5107 {
5108   /* The obvious case.  */
5109   if (e1 == e2)
5110     return true;
5111 
5112   /* If either one is VN_TOP consider them equal.  */
5113   if (e1 == VN_TOP || e2 == VN_TOP)
5114     return true;
5115 
5116   /* If only one of them is null, they cannot be equal.  */
5117   if (!e1 || !e2)
5118     return false;
5119 
5120   /* Now perform the actual comparison.  */
5121   if (TREE_CODE (e1) == TREE_CODE (e2)
5122       && operand_equal_p (e1, e2, OEP_PURE_SAME))
5123     return true;
5124 
5125   return false;
5126 }
5127 
5128 
5129 /* Return true if the nary operation NARY may trap.  This is a copy
5130    of stmt_could_throw_1_p adjusted to the SCCVN IL.  */
5131 
5132 bool
vn_nary_may_trap(vn_nary_op_t nary)5133 vn_nary_may_trap (vn_nary_op_t nary)
5134 {
5135   tree type;
5136   tree rhs2 = NULL_TREE;
5137   bool honor_nans = false;
5138   bool honor_snans = false;
5139   bool fp_operation = false;
5140   bool honor_trapv = false;
5141   bool handled, ret;
5142   unsigned i;
5143 
5144   if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5145       || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5146       || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5147     {
5148       type = nary->type;
5149       fp_operation = FLOAT_TYPE_P (type);
5150       if (fp_operation)
5151 	{
5152 	  honor_nans = flag_trapping_math && !flag_finite_math_only;
5153 	  honor_snans = flag_signaling_nans != 0;
5154 	}
5155       else if (INTEGRAL_TYPE_P (type)
5156 	       && TYPE_OVERFLOW_TRAPS (type))
5157 	honor_trapv = true;
5158     }
5159   if (nary->length >= 2)
5160     rhs2 = nary->op[1];
5161   ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5162 				       honor_trapv,
5163 				       honor_nans, honor_snans, rhs2,
5164 				       &handled);
5165   if (handled
5166       && ret)
5167     return true;
5168 
5169   for (i = 0; i < nary->length; ++i)
5170     if (tree_could_trap_p (nary->op[i]))
5171       return true;
5172 
5173   return false;
5174 }
5175 
5176 
5177 class eliminate_dom_walker : public dom_walker
5178 {
5179 public:
5180   eliminate_dom_walker (cdi_direction, bitmap);
5181   ~eliminate_dom_walker ();
5182 
5183   virtual edge before_dom_children (basic_block);
5184   virtual void after_dom_children (basic_block);
5185 
5186   tree eliminate_avail (tree op);
5187   void eliminate_push_avail (tree op);
5188   tree eliminate_insert (gimple_stmt_iterator *gsi, tree val);
5189 
5190   bool do_pre;
5191   unsigned int el_todo;
5192   unsigned int eliminations;
5193   unsigned int insertions;
5194 
5195   /* SSA names that had their defs inserted by PRE if do_pre.  */
5196   bitmap inserted_exprs;
5197 
5198   /* Blocks with statements that have had their EH properties changed.  */
5199   bitmap need_eh_cleanup;
5200 
5201   /* Blocks with statements that have had their AB properties changed.  */
5202   bitmap need_ab_cleanup;
5203 
5204   auto_vec<gimple *> to_remove;
5205   auto_vec<gimple *> to_fixup;
5206   auto_vec<tree> avail;
5207   auto_vec<tree> avail_stack;
5208 };
5209 
eliminate_dom_walker(cdi_direction direction,bitmap inserted_exprs_)5210 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5211 					    bitmap inserted_exprs_)
5212   : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5213     el_todo (0), eliminations (0), insertions (0),
5214     inserted_exprs (inserted_exprs_)
5215 {
5216   need_eh_cleanup = BITMAP_ALLOC (NULL);
5217   need_ab_cleanup = BITMAP_ALLOC (NULL);
5218 }
5219 
~eliminate_dom_walker()5220 eliminate_dom_walker::~eliminate_dom_walker ()
5221 {
5222   BITMAP_FREE (need_eh_cleanup);
5223   BITMAP_FREE (need_ab_cleanup);
5224 }
5225 
5226 /* Return a leader for OP that is available at the current point of the
5227    eliminate domwalk.  */
5228 
5229 tree
eliminate_avail(tree op)5230 eliminate_dom_walker::eliminate_avail (tree op)
5231 {
5232   tree valnum = VN_INFO (op)->valnum;
5233   if (TREE_CODE (valnum) == SSA_NAME)
5234     {
5235       if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5236 	return valnum;
5237       if (avail.length () > SSA_NAME_VERSION (valnum))
5238 	return avail[SSA_NAME_VERSION (valnum)];
5239     }
5240   else if (is_gimple_min_invariant (valnum))
5241     return valnum;
5242   return NULL_TREE;
5243 }
5244 
5245 /* At the current point of the eliminate domwalk make OP available.  */
5246 
5247 void
eliminate_push_avail(tree op)5248 eliminate_dom_walker::eliminate_push_avail (tree op)
5249 {
5250   tree valnum = VN_INFO (op)->valnum;
5251   if (TREE_CODE (valnum) == SSA_NAME)
5252     {
5253       if (avail.length () <= SSA_NAME_VERSION (valnum))
5254 	avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
5255       tree pushop = op;
5256       if (avail[SSA_NAME_VERSION (valnum)])
5257 	pushop = avail[SSA_NAME_VERSION (valnum)];
5258       avail_stack.safe_push (pushop);
5259       avail[SSA_NAME_VERSION (valnum)] = op;
5260     }
5261 }
5262 
5263 /* Insert the expression recorded by SCCVN for VAL at *GSI.  Returns
5264    the leader for the expression if insertion was successful.  */
5265 
5266 tree
eliminate_insert(gimple_stmt_iterator * gsi,tree val)5267 eliminate_dom_walker::eliminate_insert (gimple_stmt_iterator *gsi, tree val)
5268 {
5269   /* We can insert a sequence with a single assignment only.  */
5270   gimple_seq stmts = VN_INFO (val)->expr;
5271   if (!gimple_seq_singleton_p (stmts))
5272     return NULL_TREE;
5273   gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5274   if (!stmt
5275       || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5276 	  && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5277 	  && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5278 	  && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5279 	      || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5280     return NULL_TREE;
5281 
5282   tree op = gimple_assign_rhs1 (stmt);
5283   if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5284       || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5285     op = TREE_OPERAND (op, 0);
5286   tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (op) : op;
5287   if (!leader)
5288     return NULL_TREE;
5289 
5290   tree res;
5291   stmts = NULL;
5292   if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5293     res = gimple_build (&stmts, BIT_FIELD_REF,
5294 			TREE_TYPE (val), leader,
5295 			TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5296 			TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5297   else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5298     res = gimple_build (&stmts, BIT_AND_EXPR,
5299 			TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5300   else
5301     res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5302 			TREE_TYPE (val), leader);
5303   if (TREE_CODE (res) != SSA_NAME
5304       || SSA_NAME_IS_DEFAULT_DEF (res)
5305       || gimple_bb (SSA_NAME_DEF_STMT (res)))
5306     {
5307       gimple_seq_discard (stmts);
5308 
5309       /* During propagation we have to treat SSA info conservatively
5310          and thus we can end up simplifying the inserted expression
5311 	 at elimination time to sth not defined in stmts.  */
5312       /* But then this is a redundancy we failed to detect.  Which means
5313          res now has two values.  That doesn't play well with how
5314 	 we track availability here, so give up.  */
5315       if (dump_file && (dump_flags & TDF_DETAILS))
5316 	{
5317 	  if (TREE_CODE (res) == SSA_NAME)
5318 	    res = eliminate_avail (res);
5319 	  if (res)
5320 	    {
5321 	      fprintf (dump_file, "Failed to insert expression for value ");
5322 	      print_generic_expr (dump_file, val);
5323 	      fprintf (dump_file, " which is really fully redundant to ");
5324 	      print_generic_expr (dump_file, res);
5325 	      fprintf (dump_file, "\n");
5326 	    }
5327 	}
5328 
5329       return NULL_TREE;
5330     }
5331   else
5332     {
5333       gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
5334       VN_INFO_GET (res)->valnum = val;
5335     }
5336 
5337   insertions++;
5338   if (dump_file && (dump_flags & TDF_DETAILS))
5339     {
5340       fprintf (dump_file, "Inserted ");
5341       print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
5342     }
5343 
5344   return res;
5345 }
5346 
5347 
5348 
5349 /* Perform elimination for the basic-block B during the domwalk.  */
5350 
5351 edge
before_dom_children(basic_block b)5352 eliminate_dom_walker::before_dom_children (basic_block b)
5353 {
5354   /* Mark new bb.  */
5355   avail_stack.safe_push (NULL_TREE);
5356 
5357   /* Skip unreachable blocks marked unreachable during the SCCVN domwalk.  */
5358   edge_iterator ei;
5359   edge e;
5360   FOR_EACH_EDGE (e, ei, b->preds)
5361     if (e->flags & EDGE_EXECUTABLE)
5362       break;
5363   if (! e)
5364     return NULL;
5365 
5366   for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5367     {
5368       gphi *phi = gsi.phi ();
5369       tree res = PHI_RESULT (phi);
5370 
5371       if (virtual_operand_p (res))
5372 	{
5373 	  gsi_next (&gsi);
5374 	  continue;
5375 	}
5376 
5377       tree sprime = eliminate_avail (res);
5378       if (sprime
5379 	  && sprime != res)
5380 	{
5381 	  if (dump_file && (dump_flags & TDF_DETAILS))
5382 	    {
5383 	      fprintf (dump_file, "Replaced redundant PHI node defining ");
5384 	      print_generic_expr (dump_file, res);
5385 	      fprintf (dump_file, " with ");
5386 	      print_generic_expr (dump_file, sprime);
5387 	      fprintf (dump_file, "\n");
5388 	    }
5389 
5390 	  /* If we inserted this PHI node ourself, it's not an elimination.  */
5391 	  if (! inserted_exprs
5392 	      || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5393 	    eliminations++;
5394 
5395 	  /* If we will propagate into all uses don't bother to do
5396 	     anything.  */
5397 	  if (may_propagate_copy (res, sprime))
5398 	    {
5399 	      /* Mark the PHI for removal.  */
5400 	      to_remove.safe_push (phi);
5401 	      gsi_next (&gsi);
5402 	      continue;
5403 	    }
5404 
5405 	  remove_phi_node (&gsi, false);
5406 
5407 	  if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5408 	    sprime = fold_convert (TREE_TYPE (res), sprime);
5409 	  gimple *stmt = gimple_build_assign (res, sprime);
5410 	  gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5411 	  gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5412 	  continue;
5413 	}
5414 
5415       eliminate_push_avail (res);
5416       gsi_next (&gsi);
5417     }
5418 
5419   for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5420        !gsi_end_p (gsi);
5421        gsi_next (&gsi))
5422     {
5423       tree sprime = NULL_TREE;
5424       gimple *stmt = gsi_stmt (gsi);
5425       tree lhs = gimple_get_lhs (stmt);
5426       if (lhs && TREE_CODE (lhs) == SSA_NAME
5427 	  && !gimple_has_volatile_ops (stmt)
5428 	  /* See PR43491.  Do not replace a global register variable when
5429 	     it is a the RHS of an assignment.  Do replace local register
5430 	     variables since gcc does not guarantee a local variable will
5431 	     be allocated in register.
5432 	     ???  The fix isn't effective here.  This should instead
5433 	     be ensured by not value-numbering them the same but treating
5434 	     them like volatiles?  */
5435 	  && !(gimple_assign_single_p (stmt)
5436 	       && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
5437 		   && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
5438 		   && is_global_var (gimple_assign_rhs1 (stmt)))))
5439 	{
5440 	  sprime = eliminate_avail (lhs);
5441 	  if (!sprime)
5442 	    {
5443 	      /* If there is no existing usable leader but SCCVN thinks
5444 		 it has an expression it wants to use as replacement,
5445 		 insert that.  */
5446 	      tree val = VN_INFO (lhs)->valnum;
5447 	      if (val != VN_TOP
5448 		  && TREE_CODE (val) == SSA_NAME
5449 		  && VN_INFO (val)->needs_insertion
5450 		  && VN_INFO (val)->expr != NULL
5451 		  && (sprime = eliminate_insert (&gsi, val)) != NULL_TREE)
5452 		eliminate_push_avail (sprime);
5453 	    }
5454 
5455 	  /* If this now constitutes a copy duplicate points-to
5456 	     and range info appropriately.  This is especially
5457 	     important for inserted code.  See tree-ssa-copy.c
5458 	     for similar code.  */
5459 	  if (sprime
5460 	      && TREE_CODE (sprime) == SSA_NAME)
5461 	    {
5462 	      basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5463 	      if (POINTER_TYPE_P (TREE_TYPE (lhs))
5464 		  && VN_INFO_PTR_INFO (lhs)
5465 		  && ! VN_INFO_PTR_INFO (sprime))
5466 		{
5467 		  duplicate_ssa_name_ptr_info (sprime,
5468 					       VN_INFO_PTR_INFO (lhs));
5469 		  if (b != sprime_b)
5470 		    mark_ptr_info_alignment_unknown
5471 			(SSA_NAME_PTR_INFO (sprime));
5472 		}
5473 	      else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5474 		       && VN_INFO_RANGE_INFO (lhs)
5475 		       && ! VN_INFO_RANGE_INFO (sprime)
5476 		       && b == sprime_b)
5477 		duplicate_ssa_name_range_info (sprime,
5478 					       VN_INFO_RANGE_TYPE (lhs),
5479 					       VN_INFO_RANGE_INFO (lhs));
5480 	    }
5481 
5482 	  /* Inhibit the use of an inserted PHI on a loop header when
5483 	     the address of the memory reference is a simple induction
5484 	     variable.  In other cases the vectorizer won't do anything
5485 	     anyway (either it's loop invariant or a complicated
5486 	     expression).  */
5487 	  if (sprime
5488 	      && TREE_CODE (sprime) == SSA_NAME
5489 	      && do_pre
5490 	      && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5491 	      && loop_outer (b->loop_father)
5492 	      && has_zero_uses (sprime)
5493 	      && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5494 	      && gimple_assign_load_p (stmt))
5495 	    {
5496 	      gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5497 	      basic_block def_bb = gimple_bb (def_stmt);
5498 	      if (gimple_code (def_stmt) == GIMPLE_PHI
5499 		  && def_bb->loop_father->header == def_bb)
5500 		{
5501 		  loop_p loop = def_bb->loop_father;
5502 		  ssa_op_iter iter;
5503 		  tree op;
5504 		  bool found = false;
5505 		  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5506 		    {
5507 		      affine_iv iv;
5508 		      def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5509 		      if (def_bb
5510 			  && flow_bb_inside_loop_p (loop, def_bb)
5511 			  && simple_iv (loop, loop, op, &iv, true))
5512 			{
5513 			  found = true;
5514 			  break;
5515 			}
5516 		    }
5517 		  if (found)
5518 		    {
5519 		      if (dump_file && (dump_flags & TDF_DETAILS))
5520 			{
5521 			  fprintf (dump_file, "Not replacing ");
5522 			  print_gimple_expr (dump_file, stmt, 0);
5523 			  fprintf (dump_file, " with ");
5524 			  print_generic_expr (dump_file, sprime);
5525 			  fprintf (dump_file, " which would add a loop"
5526 				   " carried dependence to loop %d\n",
5527 				   loop->num);
5528 			}
5529 		      /* Don't keep sprime available.  */
5530 		      sprime = NULL_TREE;
5531 		    }
5532 		}
5533 	    }
5534 
5535 	  if (sprime)
5536 	    {
5537 	      /* If we can propagate the value computed for LHS into
5538 		 all uses don't bother doing anything with this stmt.  */
5539 	      if (may_propagate_copy (lhs, sprime))
5540 		{
5541 		  /* Mark it for removal.  */
5542 		  to_remove.safe_push (stmt);
5543 
5544 		  /* ???  Don't count copy/constant propagations.  */
5545 		  if (gimple_assign_single_p (stmt)
5546 		      && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5547 			  || gimple_assign_rhs1 (stmt) == sprime))
5548 		    continue;
5549 
5550 		  if (dump_file && (dump_flags & TDF_DETAILS))
5551 		    {
5552 		      fprintf (dump_file, "Replaced ");
5553 		      print_gimple_expr (dump_file, stmt, 0);
5554 		      fprintf (dump_file, " with ");
5555 		      print_generic_expr (dump_file, sprime);
5556 		      fprintf (dump_file, " in all uses of ");
5557 		      print_gimple_stmt (dump_file, stmt, 0);
5558 		    }
5559 
5560 		  eliminations++;
5561 		  continue;
5562 		}
5563 
5564 	      /* If this is an assignment from our leader (which
5565 	         happens in the case the value-number is a constant)
5566 		 then there is nothing to do.  */
5567 	      if (gimple_assign_single_p (stmt)
5568 		  && sprime == gimple_assign_rhs1 (stmt))
5569 		continue;
5570 
5571 	      /* Else replace its RHS.  */
5572 	      bool can_make_abnormal_goto
5573 		  = is_gimple_call (stmt)
5574 		  && stmt_can_make_abnormal_goto (stmt);
5575 
5576 	      if (dump_file && (dump_flags & TDF_DETAILS))
5577 		{
5578 		  fprintf (dump_file, "Replaced ");
5579 		  print_gimple_expr (dump_file, stmt, 0);
5580 		  fprintf (dump_file, " with ");
5581 		  print_generic_expr (dump_file, sprime);
5582 		  fprintf (dump_file, " in ");
5583 		  print_gimple_stmt (dump_file, stmt, 0);
5584 		}
5585 
5586 	      eliminations++;
5587 	      gimple *orig_stmt = stmt;
5588 	      if (!useless_type_conversion_p (TREE_TYPE (lhs),
5589 					      TREE_TYPE (sprime)))
5590 		sprime = fold_convert (TREE_TYPE (lhs), sprime);
5591 	      tree vdef = gimple_vdef (stmt);
5592 	      tree vuse = gimple_vuse (stmt);
5593 	      propagate_tree_value_into_stmt (&gsi, sprime);
5594 	      stmt = gsi_stmt (gsi);
5595 	      update_stmt (stmt);
5596 	      if (vdef != gimple_vdef (stmt))
5597 		VN_INFO (vdef)->valnum = vuse;
5598 
5599 	      /* If we removed EH side-effects from the statement, clean
5600 		 its EH information.  */
5601 	      if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5602 		{
5603 		  bitmap_set_bit (need_eh_cleanup,
5604 				  gimple_bb (stmt)->index);
5605 		  if (dump_file && (dump_flags & TDF_DETAILS))
5606 		    fprintf (dump_file, "  Removed EH side-effects.\n");
5607 		}
5608 
5609 	      /* Likewise for AB side-effects.  */
5610 	      if (can_make_abnormal_goto
5611 		  && !stmt_can_make_abnormal_goto (stmt))
5612 		{
5613 		  bitmap_set_bit (need_ab_cleanup,
5614 				  gimple_bb (stmt)->index);
5615 		  if (dump_file && (dump_flags & TDF_DETAILS))
5616 		    fprintf (dump_file, "  Removed AB side-effects.\n");
5617 		}
5618 
5619 	      continue;
5620 	    }
5621 	}
5622 
5623       /* If the statement is a scalar store, see if the expression
5624          has the same value number as its rhs.  If so, the store is
5625          dead.  */
5626       if (gimple_assign_single_p (stmt)
5627 	  && !gimple_has_volatile_ops (stmt)
5628 	  && !is_gimple_reg (gimple_assign_lhs (stmt))
5629 	  && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5630 	      || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5631 	{
5632 	  tree val;
5633 	  tree rhs = gimple_assign_rhs1 (stmt);
5634 	  vn_reference_t vnresult;
5635 	  val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5636 				     &vnresult, false);
5637 	  if (TREE_CODE (rhs) == SSA_NAME)
5638 	    rhs = VN_INFO (rhs)->valnum;
5639 	  if (val
5640 	      && operand_equal_p (val, rhs, 0))
5641 	    {
5642 	      /* We can only remove the later store if the former aliases
5643 		 at least all accesses the later one does or if the store
5644 		 was to readonly memory storing the same value.  */
5645 	      alias_set_type set = get_alias_set (lhs);
5646 	      if (! vnresult
5647 		  || vnresult->set == set
5648 		  || alias_set_subset_of (set, vnresult->set))
5649 		{
5650 		  if (dump_file && (dump_flags & TDF_DETAILS))
5651 		    {
5652 		      fprintf (dump_file, "Deleted redundant store ");
5653 		      print_gimple_stmt (dump_file, stmt, 0);
5654 		    }
5655 
5656 		  /* Queue stmt for removal.  */
5657 		  to_remove.safe_push (stmt);
5658 		  continue;
5659 		}
5660 	    }
5661 	}
5662 
5663       /* If this is a control statement value numbering left edges
5664 	 unexecuted on force the condition in a way consistent with
5665 	 that.  */
5666       if (gcond *cond = dyn_cast <gcond *> (stmt))
5667 	{
5668 	  if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5669 	      ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5670 	    {
5671               if (dump_file && (dump_flags & TDF_DETAILS))
5672                 {
5673                   fprintf (dump_file, "Removing unexecutable edge from ");
5674 		  print_gimple_stmt (dump_file, stmt, 0);
5675                 }
5676 	      if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5677 		  == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5678 		gimple_cond_make_true (cond);
5679 	      else
5680 		gimple_cond_make_false (cond);
5681 	      update_stmt (cond);
5682 	      el_todo |= TODO_cleanup_cfg;
5683 	      continue;
5684 	    }
5685 	}
5686 
5687       bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5688       bool was_noreturn = (is_gimple_call (stmt)
5689 			   && gimple_call_noreturn_p (stmt));
5690       tree vdef = gimple_vdef (stmt);
5691       tree vuse = gimple_vuse (stmt);
5692 
5693       /* If we didn't replace the whole stmt (or propagate the result
5694          into all uses), replace all uses on this stmt with their
5695 	 leaders.  */
5696       bool modified = false;
5697       use_operand_p use_p;
5698       ssa_op_iter iter;
5699       FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5700 	{
5701 	  tree use = USE_FROM_PTR (use_p);
5702 	  /* ???  The call code above leaves stmt operands un-updated.  */
5703 	  if (TREE_CODE (use) != SSA_NAME)
5704 	    continue;
5705 	  tree sprime = eliminate_avail (use);
5706 	  if (sprime && sprime != use
5707 	      && may_propagate_copy (use, sprime)
5708 	      /* We substitute into debug stmts to avoid excessive
5709 	         debug temporaries created by removed stmts, but we need
5710 		 to avoid doing so for inserted sprimes as we never want
5711 		 to create debug temporaries for them.  */
5712 	      && (!inserted_exprs
5713 		  || TREE_CODE (sprime) != SSA_NAME
5714 		  || !is_gimple_debug (stmt)
5715 		  || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5716 	    {
5717 	      propagate_value (use_p, sprime);
5718 	      modified = true;
5719 	    }
5720 	}
5721 
5722       /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5723          into which is a requirement for the IPA devirt machinery.  */
5724       gimple *old_stmt = stmt;
5725       if (modified)
5726 	{
5727 	  /* If a formerly non-invariant ADDR_EXPR is turned into an
5728 	     invariant one it was on a separate stmt.  */
5729 	  if (gimple_assign_single_p (stmt)
5730 	      && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5731 	    recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5732 	  gimple_stmt_iterator prev = gsi;
5733 	  gsi_prev (&prev);
5734 	  if (fold_stmt (&gsi))
5735 	    {
5736 	      /* fold_stmt may have created new stmts inbetween
5737 		 the previous stmt and the folded stmt.  Mark
5738 		 all defs created there as varying to not confuse
5739 		 the SCCVN machinery as we're using that even during
5740 		 elimination.  */
5741 	      if (gsi_end_p (prev))
5742 		prev = gsi_start_bb (b);
5743 	      else
5744 		gsi_next (&prev);
5745 	      if (gsi_stmt (prev) != gsi_stmt (gsi))
5746 		do
5747 		  {
5748 		    tree def;
5749 		    ssa_op_iter dit;
5750 		    FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5751 					       dit, SSA_OP_ALL_DEFS)
5752 		      /* As existing DEFs may move between stmts
5753 			 we have to guard VN_INFO_GET.  */
5754 		      if (! has_VN_INFO (def))
5755 			VN_INFO_GET (def)->valnum = def;
5756 		    if (gsi_stmt (prev) == gsi_stmt (gsi))
5757 		      break;
5758 		    gsi_next (&prev);
5759 		  }
5760 		while (1);
5761 	    }
5762 	  stmt = gsi_stmt (gsi);
5763 	  /* In case we folded the stmt away schedule the NOP for removal.  */
5764 	  if (gimple_nop_p (stmt))
5765 	    to_remove.safe_push (stmt);
5766 	}
5767 
5768       /* Visit indirect calls and turn them into direct calls if
5769 	 possible using the devirtualization machinery.  Do this before
5770 	 checking for required EH/abnormal/noreturn cleanup as devird
5771 	 may expose more of those.  */
5772       if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5773 	{
5774 	  tree fn = gimple_call_fn (call_stmt);
5775 	  if (fn
5776 	      && flag_devirtualize
5777 	      && virtual_method_call_p (fn))
5778 	    {
5779 	      tree otr_type = obj_type_ref_class (fn);
5780 	      unsigned HOST_WIDE_INT otr_tok
5781 		= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5782 	      tree instance;
5783 	      ipa_polymorphic_call_context context (current_function_decl,
5784 						    fn, stmt, &instance);
5785 	      context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5786 					otr_type, stmt);
5787 	      bool final;
5788 	      vec <cgraph_node *> targets
5789 		= possible_polymorphic_call_targets (obj_type_ref_class (fn),
5790 						     otr_tok, context, &final);
5791 	      if (dump_file)
5792 		dump_possible_polymorphic_call_targets (dump_file,
5793 							obj_type_ref_class (fn),
5794 							otr_tok, context);
5795 	      if (final && targets.length () <= 1 && dbg_cnt (devirt))
5796 		{
5797 		  tree fn;
5798 		  if (targets.length () == 1)
5799 		    fn = targets[0]->decl;
5800 		  else
5801 		    fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5802 		  if (dump_enabled_p ())
5803 		    {
5804 		      location_t loc = gimple_location (stmt);
5805 		      dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
5806 				       "converting indirect call to "
5807 				       "function %s\n",
5808 				       lang_hooks.decl_printable_name (fn, 2));
5809 		    }
5810 		  gimple_call_set_fndecl (call_stmt, fn);
5811 		  /* If changing the call to __builtin_unreachable
5812 		     or similar noreturn function, adjust gimple_call_fntype
5813 		     too.  */
5814 		  if (gimple_call_noreturn_p (call_stmt)
5815 		      && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5816 		      && TYPE_ARG_TYPES (TREE_TYPE (fn))
5817 		      && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5818 			  == void_type_node))
5819 		    gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5820 		  maybe_remove_unused_call_args (cfun, call_stmt);
5821 		  modified = true;
5822 		}
5823 	    }
5824 	}
5825 
5826       if (modified)
5827 	{
5828 	  /* When changing a call into a noreturn call, cfg cleanup
5829 	     is needed to fix up the noreturn call.  */
5830 	  if (!was_noreturn
5831 	      && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5832 	    to_fixup.safe_push  (stmt);
5833 	  /* When changing a condition or switch into one we know what
5834 	     edge will be executed, schedule a cfg cleanup.  */
5835 	  if ((gimple_code (stmt) == GIMPLE_COND
5836 	       && (gimple_cond_true_p (as_a <gcond *> (stmt))
5837 		   || gimple_cond_false_p (as_a <gcond *> (stmt))))
5838 	      || (gimple_code (stmt) == GIMPLE_SWITCH
5839 		  && TREE_CODE (gimple_switch_index
5840 				  (as_a <gswitch *> (stmt))) == INTEGER_CST))
5841 	    el_todo |= TODO_cleanup_cfg;
5842 	  /* If we removed EH side-effects from the statement, clean
5843 	     its EH information.  */
5844 	  if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5845 	    {
5846 	      bitmap_set_bit (need_eh_cleanup,
5847 			      gimple_bb (stmt)->index);
5848 	      if (dump_file && (dump_flags & TDF_DETAILS))
5849 		fprintf (dump_file, "  Removed EH side-effects.\n");
5850 	    }
5851 	  /* Likewise for AB side-effects.  */
5852 	  if (can_make_abnormal_goto
5853 	      && !stmt_can_make_abnormal_goto (stmt))
5854 	    {
5855 	      bitmap_set_bit (need_ab_cleanup,
5856 			      gimple_bb (stmt)->index);
5857 	      if (dump_file && (dump_flags & TDF_DETAILS))
5858 		fprintf (dump_file, "  Removed AB side-effects.\n");
5859 	    }
5860 	  update_stmt (stmt);
5861 	  if (vdef != gimple_vdef (stmt))
5862 	    VN_INFO (vdef)->valnum = vuse;
5863 	}
5864 
5865       /* Make new values available - for fully redundant LHS we
5866          continue with the next stmt above and skip this.  */
5867       def_operand_p defp;
5868       FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5869 	eliminate_push_avail (DEF_FROM_PTR (defp));
5870     }
5871 
5872   /* Replace destination PHI arguments.  */
5873   FOR_EACH_EDGE (e, ei, b->succs)
5874     if (e->flags & EDGE_EXECUTABLE)
5875       for (gphi_iterator gsi = gsi_start_phis (e->dest);
5876 	   !gsi_end_p (gsi);
5877 	   gsi_next (&gsi))
5878 	{
5879 	  gphi *phi = gsi.phi ();
5880 	  use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5881 	  tree arg = USE_FROM_PTR (use_p);
5882 	  if (TREE_CODE (arg) != SSA_NAME
5883 	      || virtual_operand_p (arg))
5884 	    continue;
5885 	  tree sprime = eliminate_avail (arg);
5886 	  if (sprime && may_propagate_copy (arg, sprime))
5887 	    propagate_value (use_p, sprime);
5888 	}
5889   return NULL;
5890 }
5891 
5892 /* Make no longer available leaders no longer available.  */
5893 
5894 void
after_dom_children(basic_block)5895 eliminate_dom_walker::after_dom_children (basic_block)
5896 {
5897   tree entry;
5898   while ((entry = avail_stack.pop ()) != NULL_TREE)
5899     {
5900       tree valnum = VN_INFO (entry)->valnum;
5901       tree old = avail[SSA_NAME_VERSION (valnum)];
5902       if (old == entry)
5903 	avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5904       else
5905 	avail[SSA_NAME_VERSION (valnum)] = entry;
5906     }
5907 }
5908 
5909 /* Eliminate fully redundant computations.  */
5910 
5911 unsigned int
vn_eliminate(bitmap inserted_exprs)5912 vn_eliminate (bitmap inserted_exprs)
5913 {
5914   eliminate_dom_walker el (CDI_DOMINATORS, inserted_exprs);
5915   el.avail.reserve (num_ssa_names);
5916 
5917   el.walk (cfun->cfg->x_entry_block_ptr);
5918 
5919   /* We cannot remove stmts during BB walk, especially not release SSA
5920      names there as this confuses the VN machinery.  The stmts ending
5921      up in to_remove are either stores or simple copies.
5922      Remove stmts in reverse order to make debug stmt creation possible.  */
5923   while (!el.to_remove.is_empty ())
5924     {
5925       gimple *stmt = el.to_remove.pop ();
5926 
5927       if (dump_file && (dump_flags & TDF_DETAILS))
5928 	{
5929 	  fprintf (dump_file, "Removing dead stmt ");
5930 	  print_gimple_stmt (dump_file, stmt, 0, 0);
5931 	}
5932 
5933       gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5934       if (gimple_code (stmt) == GIMPLE_PHI)
5935 	remove_phi_node (&gsi, true);
5936       else
5937 	{
5938 	  basic_block bb = gimple_bb (stmt);
5939 	  unlink_stmt_vdef (stmt);
5940 	  if (gsi_remove (&gsi, true))
5941 	    bitmap_set_bit (el.need_eh_cleanup, bb->index);
5942 	  if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
5943 	    bitmap_set_bit (el.need_ab_cleanup, bb->index);
5944 	  release_defs (stmt);
5945 	}
5946 
5947       /* Removing a stmt may expose a forwarder block.  */
5948       el.el_todo |= TODO_cleanup_cfg;
5949     }
5950 
5951   /* Fixup stmts that became noreturn calls.  This may require splitting
5952      blocks and thus isn't possible during the dominator walk.  Do this
5953      in reverse order so we don't inadvertedly remove a stmt we want to
5954      fixup by visiting a dominating now noreturn call first.  */
5955   while (!el.to_fixup.is_empty ())
5956     {
5957       gimple *stmt = el.to_fixup.pop ();
5958 
5959       if (dump_file && (dump_flags & TDF_DETAILS))
5960 	{
5961 	  fprintf (dump_file, "Fixing up noreturn call ");
5962 	  print_gimple_stmt (dump_file, stmt, 0);
5963 	}
5964 
5965       if (fixup_noreturn_call (stmt))
5966 	el.el_todo |= TODO_cleanup_cfg;
5967     }
5968 
5969   bool do_eh_cleanup = !bitmap_empty_p (el.need_eh_cleanup);
5970   bool do_ab_cleanup = !bitmap_empty_p (el.need_ab_cleanup);
5971 
5972   if (do_eh_cleanup)
5973     gimple_purge_all_dead_eh_edges (el.need_eh_cleanup);
5974 
5975   if (do_ab_cleanup)
5976     gimple_purge_all_dead_abnormal_call_edges (el.need_ab_cleanup);
5977 
5978   if (do_eh_cleanup || do_ab_cleanup)
5979     el.el_todo |= TODO_cleanup_cfg;
5980 
5981   statistics_counter_event (cfun, "Eliminated", el.eliminations);
5982   statistics_counter_event (cfun, "Insertions", el.insertions);
5983 
5984   return el.el_todo;
5985 }
5986 
5987 
5988 namespace {
5989 
5990 const pass_data pass_data_fre =
5991 {
5992   GIMPLE_PASS, /* type */
5993   "fre", /* name */
5994   OPTGROUP_NONE, /* optinfo_flags */
5995   TV_TREE_FRE, /* tv_id */
5996   ( PROP_cfg | PROP_ssa ), /* properties_required */
5997   0, /* properties_provided */
5998   0, /* properties_destroyed */
5999   0, /* todo_flags_start */
6000   0, /* todo_flags_finish */
6001 };
6002 
6003 class pass_fre : public gimple_opt_pass
6004 {
6005 public:
pass_fre(gcc::context * ctxt)6006   pass_fre (gcc::context *ctxt)
6007     : gimple_opt_pass (pass_data_fre, ctxt)
6008   {}
6009 
6010   /* opt_pass methods: */
clone()6011   opt_pass * clone () { return new pass_fre (m_ctxt); }
gate(function *)6012   virtual bool gate (function *) { return flag_tree_fre != 0; }
6013   virtual unsigned int execute (function *);
6014 
6015 }; // class pass_fre
6016 
6017 unsigned int
execute(function *)6018 pass_fre::execute (function *)
6019 {
6020   unsigned int todo = 0;
6021 
6022   run_scc_vn (VN_WALKREWRITE);
6023 
6024   /* Remove all the redundant expressions.  */
6025   todo |= vn_eliminate (NULL);
6026 
6027   scc_vn_restore_ssa_info ();
6028   free_scc_vn ();
6029 
6030   return todo;
6031 }
6032 
6033 } // anon namespace
6034 
6035 gimple_opt_pass *
make_pass_fre(gcc::context * ctxt)6036 make_pass_fre (gcc::context *ctxt)
6037 {
6038   return new pass_fre (ctxt);
6039 }
6040