1 /* SCC value numbering for trees
2    Copyright (C) 2006-2016 Free Software Foundation, Inc.
3    Contributed by Daniel Berlin <dan@dberlin.org>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "emit-rtl.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "cfganal.h"
39 #include "tree-inline.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimplify.h"
44 #include "flags.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "stmt.h"
50 #include "expr.h"
51 #include "tree-dfa.h"
52 #include "tree-ssa.h"
53 #include "dumpfile.h"
54 #include "cfgloop.h"
55 #include "params.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-ssa-sccvn.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
62 
63 /* This algorithm is based on the SCC algorithm presented by Keith
64    Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
65    (http://citeseer.ist.psu.edu/41805.html).  In
66    straight line code, it is equivalent to a regular hash based value
67    numbering that is performed in reverse postorder.
68 
69    For code with cycles, there are two alternatives, both of which
70    require keeping the hashtables separate from the actual list of
71    value numbers for SSA names.
72 
73    1. Iterate value numbering in an RPO walk of the blocks, removing
74    all the entries from the hashtable after each iteration (but
75    keeping the SSA name->value number mapping between iterations).
76    Iterate until it does not change.
77 
78    2. Perform value numbering as part of an SCC walk on the SSA graph,
79    iterating only the cycles in the SSA graph until they do not change
80    (using a separate, optimistic hashtable for value numbering the SCC
81    operands).
82 
83    The second is not just faster in practice (because most SSA graph
84    cycles do not involve all the variables in the graph), it also has
85    some nice properties.
86 
87    One of these nice properties is that when we pop an SCC off the
88    stack, we are guaranteed to have processed all the operands coming from
89    *outside of that SCC*, so we do not need to do anything special to
90    ensure they have value numbers.
91 
92    Another nice property is that the SCC walk is done as part of a DFS
93    of the SSA graph, which makes it easy to perform combining and
94    simplifying operations at the same time.
95 
96    The code below is deliberately written in a way that makes it easy
97    to separate the SCC walk from the other work it does.
98 
99    In order to propagate constants through the code, we track which
100    expressions contain constants, and use those while folding.  In
101    theory, we could also track expressions whose value numbers are
102    replaced, in case we end up folding based on expression
103    identities.
104 
105    In order to value number memory, we assign value numbers to vuses.
106    This enables us to note that, for example, stores to the same
107    address of the same value from the same starting memory states are
108    equivalent.
109    TODO:
110 
111    1. We can iterate only the changing portions of the SCC's, but
112    I have not seen an SCC big enough for this to be a win.
113    2. If you differentiate between phi nodes for loops and phi nodes
114    for if-then-else, you can properly consider phi nodes in different
115    blocks for equivalence.
116    3. We could value number vuses in more cases, particularly, whole
117    structure copies.
118 */
119 
120 
121 static tree *last_vuse_ptr;
122 static vn_lookup_kind vn_walk_kind;
123 static vn_lookup_kind default_vn_walk_kind;
124 bitmap const_parms;
125 
126 /* vn_nary_op hashtable helpers.  */
127 
128 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
129 {
130   typedef vn_nary_op_s *compare_type;
131   static inline hashval_t hash (const vn_nary_op_s *);
132   static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
133 };
134 
135 /* Return the computed hashcode for nary operation P1.  */
136 
137 inline hashval_t
hash(const vn_nary_op_s * vno1)138 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
139 {
140   return vno1->hashcode;
141 }
142 
143 /* Compare nary operations P1 and P2 and return true if they are
144    equivalent.  */
145 
146 inline bool
equal(const vn_nary_op_s * vno1,const vn_nary_op_s * vno2)147 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
148 {
149   return vn_nary_op_eq (vno1, vno2);
150 }
151 
152 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
153 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
154 
155 
156 /* vn_phi hashtable helpers.  */
157 
158 static int
159 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
160 
161 struct vn_phi_hasher : pointer_hash <vn_phi_s>
162 {
163   static inline hashval_t hash (const vn_phi_s *);
164   static inline bool equal (const vn_phi_s *, const vn_phi_s *);
165   static inline void remove (vn_phi_s *);
166 };
167 
168 /* Return the computed hashcode for phi operation P1.  */
169 
170 inline hashval_t
hash(const vn_phi_s * vp1)171 vn_phi_hasher::hash (const vn_phi_s *vp1)
172 {
173   return vp1->hashcode;
174 }
175 
176 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
177 
178 inline bool
equal(const vn_phi_s * vp1,const vn_phi_s * vp2)179 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
180 {
181   return vn_phi_eq (vp1, vp2);
182 }
183 
184 /* Free a phi operation structure VP.  */
185 
186 inline void
remove(vn_phi_s * phi)187 vn_phi_hasher::remove (vn_phi_s *phi)
188 {
189   phi->phiargs.release ();
190 }
191 
192 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
193 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
194 
195 
196 /* Compare two reference operands P1 and P2 for equality.  Return true if
197    they are equal, and false otherwise.  */
198 
199 static int
vn_reference_op_eq(const void * p1,const void * p2)200 vn_reference_op_eq (const void *p1, const void *p2)
201 {
202   const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
203   const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
204 
205   return (vro1->opcode == vro2->opcode
206 	  /* We do not care for differences in type qualification.  */
207 	  && (vro1->type == vro2->type
208 	      || (vro1->type && vro2->type
209 		  && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
210 					 TYPE_MAIN_VARIANT (vro2->type))))
211 	  && expressions_equal_p (vro1->op0, vro2->op0)
212 	  && expressions_equal_p (vro1->op1, vro2->op1)
213 	  && expressions_equal_p (vro1->op2, vro2->op2));
214 }
215 
216 /* Free a reference operation structure VP.  */
217 
218 static inline void
free_reference(vn_reference_s * vr)219 free_reference (vn_reference_s *vr)
220 {
221   vr->operands.release ();
222 }
223 
224 
225 /* vn_reference hashtable helpers.  */
226 
227 struct vn_reference_hasher : pointer_hash <vn_reference_s>
228 {
229   static inline hashval_t hash (const vn_reference_s *);
230   static inline bool equal (const vn_reference_s *, const vn_reference_s *);
231   static inline void remove (vn_reference_s *);
232 };
233 
234 /* Return the hashcode for a given reference operation P1.  */
235 
236 inline hashval_t
hash(const vn_reference_s * vr1)237 vn_reference_hasher::hash (const vn_reference_s *vr1)
238 {
239   return vr1->hashcode;
240 }
241 
242 inline bool
equal(const vn_reference_s * v,const vn_reference_s * c)243 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
244 {
245   return vn_reference_eq (v, c);
246 }
247 
248 inline void
remove(vn_reference_s * v)249 vn_reference_hasher::remove (vn_reference_s *v)
250 {
251   free_reference (v);
252 }
253 
254 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
255 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
256 
257 
258 /* The set of hashtables and alloc_pool's for their items.  */
259 
260 typedef struct vn_tables_s
261 {
262   vn_nary_op_table_type *nary;
263   vn_phi_table_type *phis;
264   vn_reference_table_type *references;
265   struct obstack nary_obstack;
266   object_allocator<vn_phi_s> *phis_pool;
267   object_allocator<vn_reference_s> *references_pool;
268 } *vn_tables_t;
269 
270 
271 /* vn_constant hashtable helpers.  */
272 
273 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
274 {
275   static inline hashval_t hash (const vn_constant_s *);
276   static inline bool equal (const vn_constant_s *, const vn_constant_s *);
277 };
278 
279 /* Hash table hash function for vn_constant_t.  */
280 
281 inline hashval_t
hash(const vn_constant_s * vc1)282 vn_constant_hasher::hash (const vn_constant_s *vc1)
283 {
284   return vc1->hashcode;
285 }
286 
287 /* Hash table equality function for vn_constant_t.  */
288 
289 inline bool
equal(const vn_constant_s * vc1,const vn_constant_s * vc2)290 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
291 {
292   if (vc1->hashcode != vc2->hashcode)
293     return false;
294 
295   return vn_constant_eq_with_type (vc1->constant, vc2->constant);
296 }
297 
298 static hash_table<vn_constant_hasher> *constant_to_value_id;
299 static bitmap constant_value_ids;
300 
301 
302 /* Valid hashtables storing information we have proven to be
303    correct.  */
304 
305 static vn_tables_t valid_info;
306 
307 /* Optimistic hashtables storing information we are making assumptions about
308    during iterations.  */
309 
310 static vn_tables_t optimistic_info;
311 
312 /* Pointer to the set of hashtables that is currently being used.
313    Should always point to either the optimistic_info, or the
314    valid_info.  */
315 
316 static vn_tables_t current_info;
317 
318 
319 /* Reverse post order index for each basic block.  */
320 
321 static int *rpo_numbers;
322 
323 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
324 
325 /* Return the SSA value of the VUSE x, supporting released VDEFs
326    during elimination which will value-number the VDEF to the
327    associated VUSE (but not substitute in the whole lattice).  */
328 
329 static inline tree
vuse_ssa_val(tree x)330 vuse_ssa_val (tree x)
331 {
332   if (!x)
333     return NULL_TREE;
334 
335   do
336     {
337       x = SSA_VAL (x);
338     }
339   while (SSA_NAME_IN_FREE_LIST (x));
340 
341   return x;
342 }
343 
344 /* This represents the top of the VN lattice, which is the universal
345    value.  */
346 
347 tree VN_TOP;
348 
349 /* Unique counter for our value ids.  */
350 
351 static unsigned int next_value_id;
352 
353 /* Next DFS number and the stack for strongly connected component
354    detection. */
355 
356 static unsigned int next_dfs_num;
357 static vec<tree> sccstack;
358 
359 
360 
361 /* Table of vn_ssa_aux_t's, one per ssa_name.  The vn_ssa_aux_t objects
362    are allocated on an obstack for locality reasons, and to free them
363    without looping over the vec.  */
364 
365 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
366 static struct obstack vn_ssa_aux_obstack;
367 
368 /* Return whether there is value numbering information for a given SSA name.  */
369 
370 bool
has_VN_INFO(tree name)371 has_VN_INFO (tree name)
372 {
373   if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
374     return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
375   return false;
376 }
377 
378 /* Return the value numbering information for a given SSA name.  */
379 
380 vn_ssa_aux_t
VN_INFO(tree name)381 VN_INFO (tree name)
382 {
383   vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
384   gcc_checking_assert (res);
385   return res;
386 }
387 
388 /* Set the value numbering info for a given SSA name to a given
389    value.  */
390 
391 static inline void
VN_INFO_SET(tree name,vn_ssa_aux_t value)392 VN_INFO_SET (tree name, vn_ssa_aux_t value)
393 {
394   vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
395 }
396 
397 /* Initialize the value numbering info for a given SSA name.
398    This should be called just once for every SSA name.  */
399 
400 vn_ssa_aux_t
VN_INFO_GET(tree name)401 VN_INFO_GET (tree name)
402 {
403   vn_ssa_aux_t newinfo;
404 
405   gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
406 	      || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
407   newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
408   memset (newinfo, 0, sizeof (struct vn_ssa_aux));
409   if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
410     vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
411   vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
412   return newinfo;
413 }
414 
415 
416 /* Return the vn_kind the expression computed by the stmt should be
417    associated with.  */
418 
419 enum vn_kind
vn_get_stmt_kind(gimple * stmt)420 vn_get_stmt_kind (gimple *stmt)
421 {
422   switch (gimple_code (stmt))
423     {
424     case GIMPLE_CALL:
425       return VN_REFERENCE;
426     case GIMPLE_PHI:
427       return VN_PHI;
428     case GIMPLE_ASSIGN:
429       {
430 	enum tree_code code = gimple_assign_rhs_code (stmt);
431 	tree rhs1 = gimple_assign_rhs1 (stmt);
432 	switch (get_gimple_rhs_class (code))
433 	  {
434 	  case GIMPLE_UNARY_RHS:
435 	  case GIMPLE_BINARY_RHS:
436 	  case GIMPLE_TERNARY_RHS:
437 	    return VN_NARY;
438 	  case GIMPLE_SINGLE_RHS:
439 	    switch (TREE_CODE_CLASS (code))
440 	      {
441 	      case tcc_reference:
442 		/* VOP-less references can go through unary case.  */
443 		if ((code == REALPART_EXPR
444 		     || code == IMAGPART_EXPR
445 		     || code == VIEW_CONVERT_EXPR
446 		     || code == BIT_FIELD_REF)
447 		    && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
448 		  return VN_NARY;
449 
450 		/* Fallthrough.  */
451 	      case tcc_declaration:
452 		return VN_REFERENCE;
453 
454 	      case tcc_constant:
455 		return VN_CONSTANT;
456 
457 	      default:
458 		if (code == ADDR_EXPR)
459 		  return (is_gimple_min_invariant (rhs1)
460 			  ? VN_CONSTANT : VN_REFERENCE);
461 		else if (code == CONSTRUCTOR)
462 		  return VN_NARY;
463 		return VN_NONE;
464 	      }
465 	  default:
466 	    return VN_NONE;
467 	  }
468       }
469     default:
470       return VN_NONE;
471     }
472 }
473 
474 /* Lookup a value id for CONSTANT and return it.  If it does not
475    exist returns 0.  */
476 
477 unsigned int
get_constant_value_id(tree constant)478 get_constant_value_id (tree constant)
479 {
480   vn_constant_s **slot;
481   struct vn_constant_s vc;
482 
483   vc.hashcode = vn_hash_constant_with_type (constant);
484   vc.constant = constant;
485   slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
486   if (slot)
487     return (*slot)->value_id;
488   return 0;
489 }
490 
491 /* Lookup a value id for CONSTANT, and if it does not exist, create a
492    new one and return it.  If it does exist, return it.  */
493 
494 unsigned int
get_or_alloc_constant_value_id(tree constant)495 get_or_alloc_constant_value_id (tree constant)
496 {
497   vn_constant_s **slot;
498   struct vn_constant_s vc;
499   vn_constant_t vcp;
500 
501   vc.hashcode = vn_hash_constant_with_type (constant);
502   vc.constant = constant;
503   slot = constant_to_value_id->find_slot (&vc, INSERT);
504   if (*slot)
505     return (*slot)->value_id;
506 
507   vcp = XNEW (struct vn_constant_s);
508   vcp->hashcode = vc.hashcode;
509   vcp->constant = constant;
510   vcp->value_id = get_next_value_id ();
511   *slot = vcp;
512   bitmap_set_bit (constant_value_ids, vcp->value_id);
513   return vcp->value_id;
514 }
515 
516 /* Return true if V is a value id for a constant.  */
517 
518 bool
value_id_constant_p(unsigned int v)519 value_id_constant_p (unsigned int v)
520 {
521   return bitmap_bit_p (constant_value_ids, v);
522 }
523 
524 /* Compute the hash for a reference operand VRO1.  */
525 
526 static void
vn_reference_op_compute_hash(const vn_reference_op_t vro1,inchash::hash & hstate)527 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
528 {
529   hstate.add_int (vro1->opcode);
530   if (vro1->op0)
531     inchash::add_expr (vro1->op0, hstate);
532   if (vro1->op1)
533     inchash::add_expr (vro1->op1, hstate);
534   if (vro1->op2)
535     inchash::add_expr (vro1->op2, hstate);
536 }
537 
538 /* Compute a hash for the reference operation VR1 and return it.  */
539 
540 static hashval_t
vn_reference_compute_hash(const vn_reference_t vr1)541 vn_reference_compute_hash (const vn_reference_t vr1)
542 {
543   inchash::hash hstate;
544   hashval_t result;
545   int i;
546   vn_reference_op_t vro;
547   HOST_WIDE_INT off = -1;
548   bool deref = false;
549 
550   FOR_EACH_VEC_ELT (vr1->operands, i, vro)
551     {
552       if (vro->opcode == MEM_REF)
553 	deref = true;
554       else if (vro->opcode != ADDR_EXPR)
555 	deref = false;
556       if (vro->off != -1)
557 	{
558 	  if (off == -1)
559 	    off = 0;
560 	  off += vro->off;
561 	}
562       else
563 	{
564 	  if (off != -1
565 	      && off != 0)
566 	    hstate.add_int (off);
567 	  off = -1;
568 	  if (deref
569 	      && vro->opcode == ADDR_EXPR)
570 	    {
571 	      if (vro->op0)
572 		{
573 		  tree op = TREE_OPERAND (vro->op0, 0);
574 		  hstate.add_int (TREE_CODE (op));
575 		  inchash::add_expr (op, hstate);
576 		}
577 	    }
578 	  else
579 	    vn_reference_op_compute_hash (vro, hstate);
580 	}
581     }
582   result = hstate.end ();
583   /* ??? We would ICE later if we hash instead of adding that in. */
584   if (vr1->vuse)
585     result += SSA_NAME_VERSION (vr1->vuse);
586 
587   return result;
588 }
589 
590 /* Return true if reference operations VR1 and VR2 are equivalent.  This
591    means they have the same set of operands and vuses.  */
592 
593 bool
vn_reference_eq(const_vn_reference_t const vr1,const_vn_reference_t const vr2)594 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
595 {
596   unsigned i, j;
597 
598   /* Early out if this is not a hash collision.  */
599   if (vr1->hashcode != vr2->hashcode)
600     return false;
601 
602   /* The VOP needs to be the same.  */
603   if (vr1->vuse != vr2->vuse)
604     return false;
605 
606   /* If the operands are the same we are done.  */
607   if (vr1->operands == vr2->operands)
608     return true;
609 
610   if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
611     return false;
612 
613   if (INTEGRAL_TYPE_P (vr1->type)
614       && INTEGRAL_TYPE_P (vr2->type))
615     {
616       if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
617 	return false;
618     }
619   else if (INTEGRAL_TYPE_P (vr1->type)
620 	   && (TYPE_PRECISION (vr1->type)
621 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
622     return false;
623   else if (INTEGRAL_TYPE_P (vr2->type)
624 	   && (TYPE_PRECISION (vr2->type)
625 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
626     return false;
627 
628   i = 0;
629   j = 0;
630   do
631     {
632       HOST_WIDE_INT off1 = 0, off2 = 0;
633       vn_reference_op_t vro1, vro2;
634       vn_reference_op_s tem1, tem2;
635       bool deref1 = false, deref2 = false;
636       for (; vr1->operands.iterate (i, &vro1); i++)
637 	{
638 	  if (vro1->opcode == MEM_REF)
639 	    deref1 = true;
640 	  /* Do not look through a storage order barrier.  */
641 	  else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
642 	    return false;
643 	  if (vro1->off == -1)
644 	    break;
645 	  off1 += vro1->off;
646 	}
647       for (; vr2->operands.iterate (j, &vro2); j++)
648 	{
649 	  if (vro2->opcode == MEM_REF)
650 	    deref2 = true;
651 	  /* Do not look through a storage order barrier.  */
652 	  else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
653 	    return false;
654 	  if (vro2->off == -1)
655 	    break;
656 	  off2 += vro2->off;
657 	}
658       if (off1 != off2)
659 	return false;
660       if (deref1 && vro1->opcode == ADDR_EXPR)
661 	{
662 	  memset (&tem1, 0, sizeof (tem1));
663 	  tem1.op0 = TREE_OPERAND (vro1->op0, 0);
664 	  tem1.type = TREE_TYPE (tem1.op0);
665 	  tem1.opcode = TREE_CODE (tem1.op0);
666 	  vro1 = &tem1;
667 	  deref1 = false;
668 	}
669       if (deref2 && vro2->opcode == ADDR_EXPR)
670 	{
671 	  memset (&tem2, 0, sizeof (tem2));
672 	  tem2.op0 = TREE_OPERAND (vro2->op0, 0);
673 	  tem2.type = TREE_TYPE (tem2.op0);
674 	  tem2.opcode = TREE_CODE (tem2.op0);
675 	  vro2 = &tem2;
676 	  deref2 = false;
677 	}
678       if (deref1 != deref2)
679 	return false;
680       if (!vn_reference_op_eq (vro1, vro2))
681 	return false;
682       ++j;
683       ++i;
684     }
685   while (vr1->operands.length () != i
686 	 || vr2->operands.length () != j);
687 
688   return true;
689 }
690 
691 /* Copy the operations present in load/store REF into RESULT, a vector of
692    vn_reference_op_s's.  */
693 
694 static void
copy_reference_ops_from_ref(tree ref,vec<vn_reference_op_s> * result)695 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
696 {
697   if (TREE_CODE (ref) == TARGET_MEM_REF)
698     {
699       vn_reference_op_s temp;
700 
701       result->reserve (3);
702 
703       memset (&temp, 0, sizeof (temp));
704       temp.type = TREE_TYPE (ref);
705       temp.opcode = TREE_CODE (ref);
706       temp.op0 = TMR_INDEX (ref);
707       temp.op1 = TMR_STEP (ref);
708       temp.op2 = TMR_OFFSET (ref);
709       temp.off = -1;
710       temp.clique = MR_DEPENDENCE_CLIQUE (ref);
711       temp.base = MR_DEPENDENCE_BASE (ref);
712       result->quick_push (temp);
713 
714       memset (&temp, 0, sizeof (temp));
715       temp.type = NULL_TREE;
716       temp.opcode = ERROR_MARK;
717       temp.op0 = TMR_INDEX2 (ref);
718       temp.off = -1;
719       result->quick_push (temp);
720 
721       memset (&temp, 0, sizeof (temp));
722       temp.type = NULL_TREE;
723       temp.opcode = TREE_CODE (TMR_BASE (ref));
724       temp.op0 = TMR_BASE (ref);
725       temp.off = -1;
726       result->quick_push (temp);
727       return;
728     }
729 
730   /* For non-calls, store the information that makes up the address.  */
731   tree orig = ref;
732   while (ref)
733     {
734       vn_reference_op_s temp;
735 
736       memset (&temp, 0, sizeof (temp));
737       temp.type = TREE_TYPE (ref);
738       temp.opcode = TREE_CODE (ref);
739       temp.off = -1;
740 
741       switch (temp.opcode)
742 	{
743 	case MODIFY_EXPR:
744 	  temp.op0 = TREE_OPERAND (ref, 1);
745 	  break;
746 	case WITH_SIZE_EXPR:
747 	  temp.op0 = TREE_OPERAND (ref, 1);
748 	  temp.off = 0;
749 	  break;
750 	case MEM_REF:
751 	  /* The base address gets its own vn_reference_op_s structure.  */
752 	  temp.op0 = TREE_OPERAND (ref, 1);
753 	    {
754 	      offset_int off = mem_ref_offset (ref);
755 	      if (wi::fits_shwi_p (off))
756 		temp.off = off.to_shwi ();
757 	    }
758 	  temp.clique = MR_DEPENDENCE_CLIQUE (ref);
759 	  temp.base = MR_DEPENDENCE_BASE (ref);
760 	  temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
761 	  break;
762 	case BIT_FIELD_REF:
763 	  /* Record bits, position and storage order.  */
764 	  temp.op0 = TREE_OPERAND (ref, 1);
765 	  temp.op1 = TREE_OPERAND (ref, 2);
766 	  if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
767 	    {
768 	      HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
769 	      if (off % BITS_PER_UNIT == 0)
770 		temp.off = off / BITS_PER_UNIT;
771 	    }
772 	  temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
773 	  break;
774 	case COMPONENT_REF:
775 	  /* The field decl is enough to unambiguously specify the field,
776 	     a matching type is not necessary and a mismatching type
777 	     is always a spurious difference.  */
778 	  temp.type = NULL_TREE;
779 	  temp.op0 = TREE_OPERAND (ref, 1);
780 	  temp.op1 = TREE_OPERAND (ref, 2);
781 	  {
782 	    tree this_offset = component_ref_field_offset (ref);
783 	    if (this_offset
784 		&& TREE_CODE (this_offset) == INTEGER_CST)
785 	      {
786 		tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
787 		if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
788 		  {
789 		    offset_int off
790 		      = (wi::to_offset (this_offset)
791 			 + wi::lrshift (wi::to_offset (bit_offset),
792 					LOG2_BITS_PER_UNIT));
793 		    if (wi::fits_shwi_p (off)
794 			/* Probibit value-numbering zero offset components
795 			   of addresses the same before the pass folding
796 			   __builtin_object_size had a chance to run
797 			   (checking cfun->after_inlining does the
798 			   trick here).  */
799 			&& (TREE_CODE (orig) != ADDR_EXPR
800 			    || off != 0
801 			    || cfun->after_inlining))
802 		      temp.off = off.to_shwi ();
803 		  }
804 	      }
805 	  }
806 	  break;
807 	case ARRAY_RANGE_REF:
808 	case ARRAY_REF:
809 	  /* Record index as operand.  */
810 	  temp.op0 = TREE_OPERAND (ref, 1);
811 	  /* Always record lower bounds and element size.  */
812 	  temp.op1 = array_ref_low_bound (ref);
813 	  temp.op2 = array_ref_element_size (ref);
814 	  if (TREE_CODE (temp.op0) == INTEGER_CST
815 	      && TREE_CODE (temp.op1) == INTEGER_CST
816 	      && TREE_CODE (temp.op2) == INTEGER_CST)
817 	    {
818 	      offset_int off = ((wi::to_offset (temp.op0)
819 				 - wi::to_offset (temp.op1))
820 				* wi::to_offset (temp.op2));
821 	      if (wi::fits_shwi_p (off))
822 		temp.off = off.to_shwi();
823 	    }
824 	  break;
825 	case VAR_DECL:
826 	  if (DECL_HARD_REGISTER (ref))
827 	    {
828 	      temp.op0 = ref;
829 	      break;
830 	    }
831 	  /* Fallthru.  */
832 	case PARM_DECL:
833 	case CONST_DECL:
834 	case RESULT_DECL:
835 	  /* Canonicalize decls to MEM[&decl] which is what we end up with
836 	     when valueizing MEM[ptr] with ptr = &decl.  */
837 	  temp.opcode = MEM_REF;
838 	  temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
839 	  temp.off = 0;
840 	  result->safe_push (temp);
841 	  temp.opcode = ADDR_EXPR;
842 	  temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
843 	  temp.type = TREE_TYPE (temp.op0);
844 	  temp.off = -1;
845 	  break;
846 	case STRING_CST:
847 	case INTEGER_CST:
848 	case COMPLEX_CST:
849 	case VECTOR_CST:
850 	case REAL_CST:
851 	case FIXED_CST:
852 	case CONSTRUCTOR:
853 	case SSA_NAME:
854 	  temp.op0 = ref;
855 	  break;
856 	case ADDR_EXPR:
857 	  if (is_gimple_min_invariant (ref))
858 	    {
859 	      temp.op0 = ref;
860 	      break;
861 	    }
862 	  break;
863 	  /* These are only interesting for their operands, their
864 	     existence, and their type.  They will never be the last
865 	     ref in the chain of references (IE they require an
866 	     operand), so we don't have to put anything
867 	     for op* as it will be handled by the iteration  */
868 	case REALPART_EXPR:
869 	  temp.off = 0;
870 	  break;
871 	case VIEW_CONVERT_EXPR:
872 	  temp.off = 0;
873 	  temp.reverse = storage_order_barrier_p (ref);
874 	  break;
875 	case IMAGPART_EXPR:
876 	  /* This is only interesting for its constant offset.  */
877 	  temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
878 	  break;
879 	default:
880 	  gcc_unreachable ();
881 	}
882       result->safe_push (temp);
883 
884       if (REFERENCE_CLASS_P (ref)
885 	  || TREE_CODE (ref) == MODIFY_EXPR
886 	  || TREE_CODE (ref) == WITH_SIZE_EXPR
887 	  || (TREE_CODE (ref) == ADDR_EXPR
888 	      && !is_gimple_min_invariant (ref)))
889 	ref = TREE_OPERAND (ref, 0);
890       else
891 	ref = NULL_TREE;
892     }
893 }
894 
895 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
896    operands in *OPS, the reference alias set SET and the reference type TYPE.
897    Return true if something useful was produced.  */
898 
899 bool
ao_ref_init_from_vn_reference(ao_ref * ref,alias_set_type set,tree type,vec<vn_reference_op_s> ops)900 ao_ref_init_from_vn_reference (ao_ref *ref,
901 			       alias_set_type set, tree type,
902 			       vec<vn_reference_op_s> ops)
903 {
904   vn_reference_op_t op;
905   unsigned i;
906   tree base = NULL_TREE;
907   tree *op0_p = &base;
908   offset_int offset = 0;
909   offset_int max_size;
910   offset_int size = -1;
911   tree size_tree = NULL_TREE;
912   alias_set_type base_alias_set = -1;
913 
914   /* First get the final access size from just the outermost expression.  */
915   op = &ops[0];
916   if (op->opcode == COMPONENT_REF)
917     size_tree = DECL_SIZE (op->op0);
918   else if (op->opcode == BIT_FIELD_REF)
919     size_tree = op->op0;
920   else
921     {
922       machine_mode mode = TYPE_MODE (type);
923       if (mode == BLKmode)
924 	size_tree = TYPE_SIZE (type);
925       else
926 	size = int (GET_MODE_BITSIZE (mode));
927     }
928   if (size_tree != NULL_TREE
929       && TREE_CODE (size_tree) == INTEGER_CST)
930     size = wi::to_offset (size_tree);
931 
932   /* Initially, maxsize is the same as the accessed element size.
933      In the following it will only grow (or become -1).  */
934   max_size = size;
935 
936   /* Compute cumulative bit-offset for nested component-refs and array-refs,
937      and find the ultimate containing object.  */
938   FOR_EACH_VEC_ELT (ops, i, op)
939     {
940       switch (op->opcode)
941 	{
942 	/* These may be in the reference ops, but we cannot do anything
943 	   sensible with them here.  */
944 	case ADDR_EXPR:
945 	  /* Apart from ADDR_EXPR arguments to MEM_REF.  */
946 	  if (base != NULL_TREE
947 	      && TREE_CODE (base) == MEM_REF
948 	      && op->op0
949 	      && DECL_P (TREE_OPERAND (op->op0, 0)))
950 	    {
951 	      vn_reference_op_t pop = &ops[i-1];
952 	      base = TREE_OPERAND (op->op0, 0);
953 	      if (pop->off == -1)
954 		{
955 		  max_size = -1;
956 		  offset = 0;
957 		}
958 	      else
959 		offset += pop->off * BITS_PER_UNIT;
960 	      op0_p = NULL;
961 	      break;
962 	    }
963 	  /* Fallthru.  */
964 	case CALL_EXPR:
965 	  return false;
966 
967 	/* Record the base objects.  */
968 	case MEM_REF:
969 	  base_alias_set = get_deref_alias_set (op->op0);
970 	  *op0_p = build2 (MEM_REF, op->type,
971 			   NULL_TREE, op->op0);
972 	  MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
973 	  MR_DEPENDENCE_BASE (*op0_p) = op->base;
974 	  op0_p = &TREE_OPERAND (*op0_p, 0);
975 	  break;
976 
977 	case VAR_DECL:
978 	case PARM_DECL:
979 	case RESULT_DECL:
980 	case SSA_NAME:
981 	  *op0_p = op->op0;
982 	  op0_p = NULL;
983 	  break;
984 
985 	/* And now the usual component-reference style ops.  */
986 	case BIT_FIELD_REF:
987 	  offset += wi::to_offset (op->op1);
988 	  break;
989 
990 	case COMPONENT_REF:
991 	  {
992 	    tree field = op->op0;
993 	    /* We do not have a complete COMPONENT_REF tree here so we
994 	       cannot use component_ref_field_offset.  Do the interesting
995 	       parts manually.  */
996 	    tree this_offset = DECL_FIELD_OFFSET (field);
997 
998 	    if (op->op1 || TREE_CODE (this_offset) != INTEGER_CST)
999 	      max_size = -1;
1000 	    else
1001 	      {
1002 		offset_int woffset = wi::lshift (wi::to_offset (this_offset),
1003 						 LOG2_BITS_PER_UNIT);
1004 		woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1005 		offset += woffset;
1006 	      }
1007 	    break;
1008 	  }
1009 
1010 	case ARRAY_RANGE_REF:
1011 	case ARRAY_REF:
1012 	  /* We recorded the lower bound and the element size.  */
1013 	  if (TREE_CODE (op->op0) != INTEGER_CST
1014 	      || TREE_CODE (op->op1) != INTEGER_CST
1015 	      || TREE_CODE (op->op2) != INTEGER_CST)
1016 	    max_size = -1;
1017 	  else
1018 	    {
1019 	      offset_int woffset
1020 		= wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1),
1021 			    TYPE_PRECISION (TREE_TYPE (op->op0)));
1022 	      woffset *= wi::to_offset (op->op2);
1023 	      woffset = wi::lshift (woffset, LOG2_BITS_PER_UNIT);
1024 	      offset += woffset;
1025 	    }
1026 	  break;
1027 
1028 	case REALPART_EXPR:
1029 	  break;
1030 
1031 	case IMAGPART_EXPR:
1032 	  offset += size;
1033 	  break;
1034 
1035 	case VIEW_CONVERT_EXPR:
1036 	  break;
1037 
1038 	case STRING_CST:
1039 	case INTEGER_CST:
1040 	case COMPLEX_CST:
1041 	case VECTOR_CST:
1042 	case REAL_CST:
1043 	case CONSTRUCTOR:
1044 	case CONST_DECL:
1045 	  return false;
1046 
1047 	default:
1048 	  return false;
1049 	}
1050     }
1051 
1052   if (base == NULL_TREE)
1053     return false;
1054 
1055   ref->ref = NULL_TREE;
1056   ref->base = base;
1057   ref->ref_alias_set = set;
1058   if (base_alias_set != -1)
1059     ref->base_alias_set = base_alias_set;
1060   else
1061     ref->base_alias_set = get_alias_set (base);
1062   /* We discount volatiles from value-numbering elsewhere.  */
1063   ref->volatile_p = false;
1064 
1065   if (!wi::fits_shwi_p (size) || wi::neg_p (size))
1066     {
1067       ref->offset = 0;
1068       ref->size = -1;
1069       ref->max_size = -1;
1070       return true;
1071     }
1072 
1073   ref->size = size.to_shwi ();
1074 
1075   if (!wi::fits_shwi_p (offset))
1076     {
1077       ref->offset = 0;
1078       ref->max_size = -1;
1079       return true;
1080     }
1081 
1082   ref->offset = offset.to_shwi ();
1083 
1084   if (!wi::fits_shwi_p (max_size) || wi::neg_p (max_size))
1085     ref->max_size = -1;
1086   else
1087     ref->max_size = max_size.to_shwi ();
1088 
1089   return true;
1090 }
1091 
1092 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1093    vn_reference_op_s's.  */
1094 
1095 static void
copy_reference_ops_from_call(gcall * call,vec<vn_reference_op_s> * result)1096 copy_reference_ops_from_call (gcall *call,
1097 			      vec<vn_reference_op_s> *result)
1098 {
1099   vn_reference_op_s temp;
1100   unsigned i;
1101   tree lhs = gimple_call_lhs (call);
1102   int lr;
1103 
1104   /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1105      different.  By adding the lhs here in the vector, we ensure that the
1106      hashcode is different, guaranteeing a different value number.  */
1107   if (lhs && TREE_CODE (lhs) != SSA_NAME)
1108     {
1109       memset (&temp, 0, sizeof (temp));
1110       temp.opcode = MODIFY_EXPR;
1111       temp.type = TREE_TYPE (lhs);
1112       temp.op0 = lhs;
1113       temp.off = -1;
1114       result->safe_push (temp);
1115     }
1116 
1117   /* Copy the type, opcode, function, static chain and EH region, if any.  */
1118   memset (&temp, 0, sizeof (temp));
1119   temp.type = gimple_call_return_type (call);
1120   temp.opcode = CALL_EXPR;
1121   temp.op0 = gimple_call_fn (call);
1122   temp.op1 = gimple_call_chain (call);
1123   if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1124     temp.op2 = size_int (lr);
1125   temp.off = -1;
1126   if (gimple_call_with_bounds_p (call))
1127     temp.with_bounds = 1;
1128   result->safe_push (temp);
1129 
1130   /* Copy the call arguments.  As they can be references as well,
1131      just chain them together.  */
1132   for (i = 0; i < gimple_call_num_args (call); ++i)
1133     {
1134       tree callarg = gimple_call_arg (call, i);
1135       copy_reference_ops_from_ref (callarg, result);
1136     }
1137 }
1138 
1139 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1140    *I_P to point to the last element of the replacement.  */
1141 static bool
vn_reference_fold_indirect(vec<vn_reference_op_s> * ops,unsigned int * i_p)1142 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1143 			    unsigned int *i_p)
1144 {
1145   unsigned int i = *i_p;
1146   vn_reference_op_t op = &(*ops)[i];
1147   vn_reference_op_t mem_op = &(*ops)[i - 1];
1148   tree addr_base;
1149   HOST_WIDE_INT addr_offset = 0;
1150 
1151   /* The only thing we have to do is from &OBJ.foo.bar add the offset
1152      from .foo.bar to the preceding MEM_REF offset and replace the
1153      address with &OBJ.  */
1154   addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1155 					     &addr_offset);
1156   gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1157   if (addr_base != TREE_OPERAND (op->op0, 0))
1158     {
1159       offset_int off = offset_int::from (mem_op->op0, SIGNED);
1160       off += addr_offset;
1161       mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1162       op->op0 = build_fold_addr_expr (addr_base);
1163       if (tree_fits_shwi_p (mem_op->op0))
1164 	mem_op->off = tree_to_shwi (mem_op->op0);
1165       else
1166 	mem_op->off = -1;
1167       return true;
1168     }
1169   return false;
1170 }
1171 
1172 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1173    *I_P to point to the last element of the replacement.  */
1174 static bool
vn_reference_maybe_forwprop_address(vec<vn_reference_op_s> * ops,unsigned int * i_p)1175 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1176 				     unsigned int *i_p)
1177 {
1178   unsigned int i = *i_p;
1179   vn_reference_op_t op = &(*ops)[i];
1180   vn_reference_op_t mem_op = &(*ops)[i - 1];
1181   gimple *def_stmt;
1182   enum tree_code code;
1183   offset_int off;
1184 
1185   def_stmt = SSA_NAME_DEF_STMT (op->op0);
1186   if (!is_gimple_assign (def_stmt))
1187     return false;
1188 
1189   code = gimple_assign_rhs_code (def_stmt);
1190   if (code != ADDR_EXPR
1191       && code != POINTER_PLUS_EXPR)
1192     return false;
1193 
1194   off = offset_int::from (mem_op->op0, SIGNED);
1195 
1196   /* The only thing we have to do is from &OBJ.foo.bar add the offset
1197      from .foo.bar to the preceding MEM_REF offset and replace the
1198      address with &OBJ.  */
1199   if (code == ADDR_EXPR)
1200     {
1201       tree addr, addr_base;
1202       HOST_WIDE_INT addr_offset;
1203 
1204       addr = gimple_assign_rhs1 (def_stmt);
1205       addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1206 						 &addr_offset);
1207       /* If that didn't work because the address isn't invariant propagate
1208          the reference tree from the address operation in case the current
1209 	 dereference isn't offsetted.  */
1210       if (!addr_base
1211 	  && *i_p == ops->length () - 1
1212 	  && off == 0
1213 	  /* This makes us disable this transform for PRE where the
1214 	     reference ops might be also used for code insertion which
1215 	     is invalid.  */
1216 	  && default_vn_walk_kind == VN_WALKREWRITE)
1217 	{
1218 	  auto_vec<vn_reference_op_s, 32> tem;
1219 	  copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1220 	  /* Make sure to preserve TBAA info.  The only objects not
1221 	     wrapped in MEM_REFs that can have their address taken are
1222 	     STRING_CSTs.  */
1223 	  if (tem.length () >= 2
1224 	      && tem[tem.length () - 2].opcode == MEM_REF)
1225 	    {
1226 	      vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1227 	      new_mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1228 						  new_mem_op->op0);
1229 	    }
1230 	  else
1231 	    gcc_assert (tem.last ().opcode == STRING_CST);
1232 	  ops->pop ();
1233 	  ops->pop ();
1234 	  ops->safe_splice (tem);
1235 	  --*i_p;
1236 	  return true;
1237 	}
1238       if (!addr_base
1239 	  || TREE_CODE (addr_base) != MEM_REF)
1240 	return false;
1241 
1242       off += addr_offset;
1243       off += mem_ref_offset (addr_base);
1244       op->op0 = TREE_OPERAND (addr_base, 0);
1245     }
1246   else
1247     {
1248       tree ptr, ptroff;
1249       ptr = gimple_assign_rhs1 (def_stmt);
1250       ptroff = gimple_assign_rhs2 (def_stmt);
1251       if (TREE_CODE (ptr) != SSA_NAME
1252 	  || TREE_CODE (ptroff) != INTEGER_CST)
1253 	return false;
1254 
1255       off += wi::to_offset (ptroff);
1256       op->op0 = ptr;
1257     }
1258 
1259   mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1260   if (tree_fits_shwi_p (mem_op->op0))
1261     mem_op->off = tree_to_shwi (mem_op->op0);
1262   else
1263     mem_op->off = -1;
1264   if (TREE_CODE (op->op0) == SSA_NAME)
1265     op->op0 = SSA_VAL (op->op0);
1266   if (TREE_CODE (op->op0) != SSA_NAME)
1267     op->opcode = TREE_CODE (op->op0);
1268 
1269   /* And recurse.  */
1270   if (TREE_CODE (op->op0) == SSA_NAME)
1271     vn_reference_maybe_forwprop_address (ops, i_p);
1272   else if (TREE_CODE (op->op0) == ADDR_EXPR)
1273     vn_reference_fold_indirect (ops, i_p);
1274   return true;
1275 }
1276 
1277 /* Optimize the reference REF to a constant if possible or return
1278    NULL_TREE if not.  */
1279 
1280 tree
fully_constant_vn_reference_p(vn_reference_t ref)1281 fully_constant_vn_reference_p (vn_reference_t ref)
1282 {
1283   vec<vn_reference_op_s> operands = ref->operands;
1284   vn_reference_op_t op;
1285 
1286   /* Try to simplify the translated expression if it is
1287      a call to a builtin function with at most two arguments.  */
1288   op = &operands[0];
1289   if (op->opcode == CALL_EXPR
1290       && TREE_CODE (op->op0) == ADDR_EXPR
1291       && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1292       && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1293       && operands.length () >= 2
1294       && operands.length () <= 3)
1295     {
1296       vn_reference_op_t arg0, arg1 = NULL;
1297       bool anyconst = false;
1298       arg0 = &operands[1];
1299       if (operands.length () > 2)
1300 	arg1 = &operands[2];
1301       if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1302 	  || (arg0->opcode == ADDR_EXPR
1303 	      && is_gimple_min_invariant (arg0->op0)))
1304 	anyconst = true;
1305       if (arg1
1306 	  && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1307 	      || (arg1->opcode == ADDR_EXPR
1308 		  && is_gimple_min_invariant (arg1->op0))))
1309 	anyconst = true;
1310       if (anyconst)
1311 	{
1312 	  tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1313 					 arg1 ? 2 : 1,
1314 					 arg0->op0,
1315 					 arg1 ? arg1->op0 : NULL);
1316 	  if (folded
1317 	      && TREE_CODE (folded) == NOP_EXPR)
1318 	    folded = TREE_OPERAND (folded, 0);
1319 	  if (folded
1320 	      && is_gimple_min_invariant (folded))
1321 	    return folded;
1322 	}
1323     }
1324 
1325   /* Simplify reads from constants or constant initializers.  */
1326   else if (BITS_PER_UNIT == 8
1327 	   && is_gimple_reg_type (ref->type)
1328 	   && (!INTEGRAL_TYPE_P (ref->type)
1329 	       || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1330     {
1331       HOST_WIDE_INT off = 0;
1332       HOST_WIDE_INT size;
1333       if (INTEGRAL_TYPE_P (ref->type))
1334 	size = TYPE_PRECISION (ref->type);
1335       else
1336 	size = tree_to_shwi (TYPE_SIZE (ref->type));
1337       if (size % BITS_PER_UNIT != 0
1338 	  || size > MAX_BITSIZE_MODE_ANY_MODE)
1339 	return NULL_TREE;
1340       size /= BITS_PER_UNIT;
1341       unsigned i;
1342       for (i = 0; i < operands.length (); ++i)
1343 	{
1344 	  if (operands[i].off == -1)
1345 	    return NULL_TREE;
1346 	  off += operands[i].off;
1347 	  if (operands[i].opcode == MEM_REF)
1348 	    {
1349 	      ++i;
1350 	      break;
1351 	    }
1352 	}
1353       vn_reference_op_t base = &operands[--i];
1354       tree ctor = error_mark_node;
1355       tree decl = NULL_TREE;
1356       if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1357 	ctor = base->op0;
1358       else if (base->opcode == MEM_REF
1359 	       && base[1].opcode == ADDR_EXPR
1360 	       && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1361 		   || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1362 	{
1363 	  decl = TREE_OPERAND (base[1].op0, 0);
1364 	  ctor = ctor_for_folding (decl);
1365 	}
1366       if (ctor == NULL_TREE)
1367 	return build_zero_cst (ref->type);
1368       else if (ctor != error_mark_node)
1369 	{
1370 	  if (decl)
1371 	    {
1372 	      tree res = fold_ctor_reference (ref->type, ctor,
1373 					      off * BITS_PER_UNIT,
1374 					      size * BITS_PER_UNIT, decl);
1375 	      if (res)
1376 		{
1377 		  STRIP_USELESS_TYPE_CONVERSION (res);
1378 		  if (is_gimple_min_invariant (res))
1379 		    return res;
1380 		}
1381 	    }
1382 	  else
1383 	    {
1384 	      unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1385 	      int len = native_encode_expr (ctor, buf, size, off);
1386 	      if (len > 0)
1387 		return native_interpret_expr (ref->type, buf, len);
1388 	    }
1389 	}
1390     }
1391 
1392   return NULL_TREE;
1393 }
1394 
1395 /* Return true if OPS contain a storage order barrier.  */
1396 
1397 static bool
contains_storage_order_barrier_p(vec<vn_reference_op_s> ops)1398 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1399 {
1400   vn_reference_op_t op;
1401   unsigned i;
1402 
1403   FOR_EACH_VEC_ELT (ops, i, op)
1404     if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1405       return true;
1406 
1407   return false;
1408 }
1409 
1410 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1411    structures into their value numbers.  This is done in-place, and
1412    the vector passed in is returned.  *VALUEIZED_ANYTHING will specify
1413    whether any operands were valueized.  */
1414 
1415 static vec<vn_reference_op_s>
valueize_refs_1(vec<vn_reference_op_s> orig,bool * valueized_anything)1416 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1417 {
1418   vn_reference_op_t vro;
1419   unsigned int i;
1420 
1421   *valueized_anything = false;
1422 
1423   FOR_EACH_VEC_ELT (orig, i, vro)
1424     {
1425       if (vro->opcode == SSA_NAME
1426 	  || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1427 	{
1428 	  tree tem = SSA_VAL (vro->op0);
1429 	  if (tem != vro->op0)
1430 	    {
1431 	      *valueized_anything = true;
1432 	      vro->op0 = tem;
1433 	    }
1434 	  /* If it transforms from an SSA_NAME to a constant, update
1435 	     the opcode.  */
1436 	  if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1437 	    vro->opcode = TREE_CODE (vro->op0);
1438 	}
1439       if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1440 	{
1441 	  tree tem = SSA_VAL (vro->op1);
1442 	  if (tem != vro->op1)
1443 	    {
1444 	      *valueized_anything = true;
1445 	      vro->op1 = tem;
1446 	    }
1447 	}
1448       if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1449 	{
1450 	  tree tem = SSA_VAL (vro->op2);
1451 	  if (tem != vro->op2)
1452 	    {
1453 	      *valueized_anything = true;
1454 	      vro->op2 = tem;
1455 	    }
1456 	}
1457       /* If it transforms from an SSA_NAME to an address, fold with
1458 	 a preceding indirect reference.  */
1459       if (i > 0
1460 	  && vro->op0
1461 	  && TREE_CODE (vro->op0) == ADDR_EXPR
1462 	  && orig[i - 1].opcode == MEM_REF)
1463 	{
1464 	  if (vn_reference_fold_indirect (&orig, &i))
1465 	    *valueized_anything = true;
1466 	}
1467       else if (i > 0
1468 	       && vro->opcode == SSA_NAME
1469 	       && orig[i - 1].opcode == MEM_REF)
1470 	{
1471 	  if (vn_reference_maybe_forwprop_address (&orig, &i))
1472 	    *valueized_anything = true;
1473 	}
1474       /* If it transforms a non-constant ARRAY_REF into a constant
1475 	 one, adjust the constant offset.  */
1476       else if (vro->opcode == ARRAY_REF
1477 	       && vro->off == -1
1478 	       && TREE_CODE (vro->op0) == INTEGER_CST
1479 	       && TREE_CODE (vro->op1) == INTEGER_CST
1480 	       && TREE_CODE (vro->op2) == INTEGER_CST)
1481 	{
1482 	  offset_int off = ((wi::to_offset (vro->op0)
1483 			     - wi::to_offset (vro->op1))
1484 			    * wi::to_offset (vro->op2));
1485 	  if (wi::fits_shwi_p (off))
1486 	    vro->off = off.to_shwi ();
1487 	}
1488     }
1489 
1490   return orig;
1491 }
1492 
1493 static vec<vn_reference_op_s>
valueize_refs(vec<vn_reference_op_s> orig)1494 valueize_refs (vec<vn_reference_op_s> orig)
1495 {
1496   bool tem;
1497   return valueize_refs_1 (orig, &tem);
1498 }
1499 
1500 static vec<vn_reference_op_s> shared_lookup_references;
1501 
1502 /* Create a vector of vn_reference_op_s structures from REF, a
1503    REFERENCE_CLASS_P tree.  The vector is shared among all callers of
1504    this function.  *VALUEIZED_ANYTHING will specify whether any
1505    operands were valueized.  */
1506 
1507 static vec<vn_reference_op_s>
valueize_shared_reference_ops_from_ref(tree ref,bool * valueized_anything)1508 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1509 {
1510   if (!ref)
1511     return vNULL;
1512   shared_lookup_references.truncate (0);
1513   copy_reference_ops_from_ref (ref, &shared_lookup_references);
1514   shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1515 					      valueized_anything);
1516   return shared_lookup_references;
1517 }
1518 
1519 /* Create a vector of vn_reference_op_s structures from CALL, a
1520    call statement.  The vector is shared among all callers of
1521    this function.  */
1522 
1523 static vec<vn_reference_op_s>
valueize_shared_reference_ops_from_call(gcall * call)1524 valueize_shared_reference_ops_from_call (gcall *call)
1525 {
1526   if (!call)
1527     return vNULL;
1528   shared_lookup_references.truncate (0);
1529   copy_reference_ops_from_call (call, &shared_lookup_references);
1530   shared_lookup_references = valueize_refs (shared_lookup_references);
1531   return shared_lookup_references;
1532 }
1533 
1534 /* Lookup a SCCVN reference operation VR in the current hash table.
1535    Returns the resulting value number if it exists in the hash table,
1536    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
1537    vn_reference_t stored in the hashtable if something is found.  */
1538 
1539 static tree
vn_reference_lookup_1(vn_reference_t vr,vn_reference_t * vnresult)1540 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1541 {
1542   vn_reference_s **slot;
1543   hashval_t hash;
1544 
1545   hash = vr->hashcode;
1546   slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1547   if (!slot && current_info == optimistic_info)
1548     slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1549   if (slot)
1550     {
1551       if (vnresult)
1552 	*vnresult = (vn_reference_t)*slot;
1553       return ((vn_reference_t)*slot)->result;
1554     }
1555 
1556   return NULL_TREE;
1557 }
1558 
1559 /* Callback for walk_non_aliased_vuses.  Adjusts the vn_reference_t VR_
1560    with the current VUSE and performs the expression lookup.  */
1561 
1562 static void *
vn_reference_lookup_2(ao_ref * op ATTRIBUTE_UNUSED,tree vuse,unsigned int cnt,void * vr_)1563 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1564 		       unsigned int cnt, void *vr_)
1565 {
1566   vn_reference_t vr = (vn_reference_t)vr_;
1567   vn_reference_s **slot;
1568   hashval_t hash;
1569 
1570   /* This bounds the stmt walks we perform on reference lookups
1571      to O(1) instead of O(N) where N is the number of dominating
1572      stores.  */
1573   if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1574     return (void *)-1;
1575 
1576   if (last_vuse_ptr)
1577     *last_vuse_ptr = vuse;
1578 
1579   /* Fixup vuse and hash.  */
1580   if (vr->vuse)
1581     vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1582   vr->vuse = vuse_ssa_val (vuse);
1583   if (vr->vuse)
1584     vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1585 
1586   hash = vr->hashcode;
1587   slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1588   if (!slot && current_info == optimistic_info)
1589     slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1590   if (slot)
1591     return *slot;
1592 
1593   return NULL;
1594 }
1595 
1596 /* Lookup an existing or insert a new vn_reference entry into the
1597    value table for the VUSE, SET, TYPE, OPERANDS reference which
1598    has the value VALUE which is either a constant or an SSA name.  */
1599 
1600 static vn_reference_t
vn_reference_lookup_or_insert_for_pieces(tree vuse,alias_set_type set,tree type,vec<vn_reference_op_s,va_heap> operands,tree value)1601 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1602 					  alias_set_type set,
1603 					  tree type,
1604 					  vec<vn_reference_op_s,
1605 					        va_heap> operands,
1606 					  tree value)
1607 {
1608   vn_reference_s vr1;
1609   vn_reference_t result;
1610   unsigned value_id;
1611   vr1.vuse = vuse;
1612   vr1.operands = operands;
1613   vr1.type = type;
1614   vr1.set = set;
1615   vr1.hashcode = vn_reference_compute_hash (&vr1);
1616   if (vn_reference_lookup_1 (&vr1, &result))
1617     return result;
1618   if (TREE_CODE (value) == SSA_NAME)
1619     value_id = VN_INFO (value)->value_id;
1620   else
1621     value_id = get_or_alloc_constant_value_id (value);
1622   return vn_reference_insert_pieces (vuse, set, type,
1623 				     operands.copy (), value, value_id);
1624 }
1625 
1626 /* Callback for walk_non_aliased_vuses.  Tries to perform a lookup
1627    from the statement defining VUSE and if not successful tries to
1628    translate *REFP and VR_ through an aggregate copy at the definition
1629    of VUSE.  If *DISAMBIGUATE_ONLY is true then do not perform translation
1630    of *REF and *VR.  If only disambiguation was performed then
1631    *DISAMBIGUATE_ONLY is set to true.  */
1632 
1633 static void *
vn_reference_lookup_3(ao_ref * ref,tree vuse,void * vr_,bool * disambiguate_only)1634 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1635 		       bool *disambiguate_only)
1636 {
1637   vn_reference_t vr = (vn_reference_t)vr_;
1638   gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1639   tree base = ao_ref_base (ref);
1640   HOST_WIDE_INT offset, maxsize;
1641   static vec<vn_reference_op_s>
1642     lhs_ops = vNULL;
1643   ao_ref lhs_ref;
1644   bool lhs_ref_ok = false;
1645 
1646   /* If the reference is based on a parameter that was determined as
1647      pointing to readonly memory it doesn't change.  */
1648   if (TREE_CODE (base) == MEM_REF
1649       && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1650       && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1651       && bitmap_bit_p (const_parms,
1652 		       SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1653     {
1654       *disambiguate_only = true;
1655       return NULL;
1656     }
1657 
1658   /* First try to disambiguate after value-replacing in the definitions LHS.  */
1659   if (is_gimple_assign (def_stmt))
1660     {
1661       tree lhs = gimple_assign_lhs (def_stmt);
1662       bool valueized_anything = false;
1663       /* Avoid re-allocation overhead.  */
1664       lhs_ops.truncate (0);
1665       copy_reference_ops_from_ref (lhs, &lhs_ops);
1666       lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1667       if (valueized_anything)
1668 	{
1669 	  lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1670 						      get_alias_set (lhs),
1671 						      TREE_TYPE (lhs), lhs_ops);
1672 	  if (lhs_ref_ok
1673 	      && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1674 	    {
1675 	      *disambiguate_only = true;
1676 	      return NULL;
1677 	    }
1678 	}
1679       else
1680 	{
1681 	  ao_ref_init (&lhs_ref, lhs);
1682 	  lhs_ref_ok = true;
1683 	}
1684     }
1685   else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1686 	   && gimple_call_num_args (def_stmt) <= 4)
1687     {
1688       /* For builtin calls valueize its arguments and call the
1689          alias oracle again.  Valueization may improve points-to
1690 	 info of pointers and constify size and position arguments.
1691 	 Originally this was motivated by PR61034 which has
1692 	 conditional calls to free falsely clobbering ref because
1693 	 of imprecise points-to info of the argument.  */
1694       tree oldargs[4];
1695       bool valueized_anything = false;
1696       for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1697 	{
1698 	  oldargs[i] = gimple_call_arg (def_stmt, i);
1699 	  if (TREE_CODE (oldargs[i]) == SSA_NAME
1700 	      && VN_INFO (oldargs[i])->valnum != oldargs[i])
1701 	    {
1702 	      gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1703 	      valueized_anything = true;
1704 	    }
1705 	}
1706       if (valueized_anything)
1707 	{
1708 	  bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1709 					       ref);
1710 	  for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1711 	    gimple_call_set_arg (def_stmt, i, oldargs[i]);
1712 	  if (!res)
1713 	    {
1714 	      *disambiguate_only = true;
1715 	      return NULL;
1716 	    }
1717 	}
1718     }
1719 
1720   if (*disambiguate_only)
1721     return (void *)-1;
1722 
1723   offset = ref->offset;
1724   maxsize = ref->max_size;
1725 
1726   /* If we cannot constrain the size of the reference we cannot
1727      test if anything kills it.  */
1728   if (maxsize == -1)
1729     return (void *)-1;
1730 
1731   /* We can't deduce anything useful from clobbers.  */
1732   if (gimple_clobber_p (def_stmt))
1733     return (void *)-1;
1734 
1735   /* def_stmt may-defs *ref.  See if we can derive a value for *ref
1736      from that definition.
1737      1) Memset.  */
1738   if (is_gimple_reg_type (vr->type)
1739       && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1740       && integer_zerop (gimple_call_arg (def_stmt, 1))
1741       && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1742       && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1743     {
1744       tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1745       tree base2;
1746       HOST_WIDE_INT offset2, size2, maxsize2;
1747       bool reverse;
1748       base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1749 				       &reverse);
1750       size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1751       if ((unsigned HOST_WIDE_INT)size2 / 8
1752 	  == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1753 	  && maxsize2 != -1
1754 	  && operand_equal_p (base, base2, 0)
1755 	  && offset2 <= offset
1756 	  && offset2 + size2 >= offset + maxsize)
1757 	{
1758 	  tree val = build_zero_cst (vr->type);
1759 	  return vn_reference_lookup_or_insert_for_pieces
1760 	           (vuse, vr->set, vr->type, vr->operands, val);
1761 	}
1762     }
1763 
1764   /* 2) Assignment from an empty CONSTRUCTOR.  */
1765   else if (is_gimple_reg_type (vr->type)
1766 	   && gimple_assign_single_p (def_stmt)
1767 	   && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1768 	   && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1769     {
1770       tree base2;
1771       HOST_WIDE_INT offset2, size2, maxsize2;
1772       bool reverse;
1773       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1774 				       &offset2, &size2, &maxsize2, &reverse);
1775       if (maxsize2 != -1
1776 	  && operand_equal_p (base, base2, 0)
1777 	  && offset2 <= offset
1778 	  && offset2 + size2 >= offset + maxsize)
1779 	{
1780 	  tree val = build_zero_cst (vr->type);
1781 	  return vn_reference_lookup_or_insert_for_pieces
1782 	           (vuse, vr->set, vr->type, vr->operands, val);
1783 	}
1784     }
1785 
1786   /* 3) Assignment from a constant.  We can use folds native encode/interpret
1787      routines to extract the assigned bits.  */
1788   else if (vn_walk_kind == VN_WALKREWRITE
1789 	   && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1790 	   && ref->size == maxsize
1791 	   && maxsize % BITS_PER_UNIT == 0
1792 	   && offset % BITS_PER_UNIT == 0
1793 	   && is_gimple_reg_type (vr->type)
1794 	   && !contains_storage_order_barrier_p (vr->operands)
1795 	   && gimple_assign_single_p (def_stmt)
1796 	   && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1797     {
1798       tree base2;
1799       HOST_WIDE_INT offset2, size2, maxsize2;
1800       bool reverse;
1801       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1802 				       &offset2, &size2, &maxsize2, &reverse);
1803       if (!reverse
1804 	  && maxsize2 != -1
1805 	  && maxsize2 == size2
1806 	  && size2 % BITS_PER_UNIT == 0
1807 	  && offset2 % BITS_PER_UNIT == 0
1808 	  && operand_equal_p (base, base2, 0)
1809 	  && offset2 <= offset
1810 	  && offset2 + size2 >= offset + maxsize)
1811 	{
1812 	  /* We support up to 512-bit values (for V8DFmode).  */
1813 	  unsigned char buffer[64];
1814 	  int len;
1815 
1816 	  len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1817 				    buffer, sizeof (buffer));
1818 	  if (len > 0)
1819 	    {
1820 	      tree type = vr->type;
1821 	      /* Make sure to interpret in a type that has a range
1822 	         covering the whole access size.  */
1823 	      if (INTEGRAL_TYPE_P (vr->type)
1824 		  && ref->size != TYPE_PRECISION (vr->type))
1825 		type = build_nonstandard_integer_type (ref->size,
1826 						       TYPE_UNSIGNED (type));
1827 	      tree val = native_interpret_expr (type,
1828 						buffer
1829 						+ ((offset - offset2)
1830 						   / BITS_PER_UNIT),
1831 						ref->size / BITS_PER_UNIT);
1832 	      /* If we chop off bits because the types precision doesn't
1833 		 match the memory access size this is ok when optimizing
1834 		 reads but not when called from the DSE code during
1835 		 elimination.  */
1836 	      if (val
1837 		  && type != vr->type)
1838 		{
1839 		  if (! int_fits_type_p (val, vr->type))
1840 		    val = NULL_TREE;
1841 		  else
1842 		    val = fold_convert (vr->type, val);
1843 		}
1844 
1845 	      if (val)
1846 		return vn_reference_lookup_or_insert_for_pieces
1847 			 (vuse, vr->set, vr->type, vr->operands, val);
1848 	    }
1849 	}
1850     }
1851 
1852   /* 4) Assignment from an SSA name which definition we may be able
1853      to access pieces from.  */
1854   else if (ref->size == maxsize
1855 	   && is_gimple_reg_type (vr->type)
1856 	   && !contains_storage_order_barrier_p (vr->operands)
1857 	   && gimple_assign_single_p (def_stmt)
1858 	   && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1859     {
1860       tree rhs1 = gimple_assign_rhs1 (def_stmt);
1861       gimple *def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1862       if (is_gimple_assign (def_stmt2)
1863 	  && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1864 	      || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1865 	  && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1866 	{
1867 	  tree base2;
1868 	  HOST_WIDE_INT offset2, size2, maxsize2, off;
1869 	  bool reverse;
1870 	  base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1871 					   &offset2, &size2, &maxsize2,
1872 					   &reverse);
1873 	  off = offset - offset2;
1874 	  if (!reverse
1875 	      && maxsize2 != -1
1876 	      && maxsize2 == size2
1877 	      && operand_equal_p (base, base2, 0)
1878 	      && offset2 <= offset
1879 	      && offset2 + size2 >= offset + maxsize)
1880 	    {
1881 	      tree val = NULL_TREE;
1882 	      HOST_WIDE_INT elsz
1883 		= TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1884 	      if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1885 		{
1886 		  if (off == 0)
1887 		    val = gimple_assign_rhs1 (def_stmt2);
1888 		  else if (off == elsz)
1889 		    val = gimple_assign_rhs2 (def_stmt2);
1890 		}
1891 	      else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1892 		       && off % elsz == 0)
1893 		{
1894 		  tree ctor = gimple_assign_rhs1 (def_stmt2);
1895 		  unsigned i = off / elsz;
1896 		  if (i < CONSTRUCTOR_NELTS (ctor))
1897 		    {
1898 		      constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1899 		      if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1900 			{
1901 			  if (TREE_CODE (TREE_TYPE (elt->value))
1902 			      != VECTOR_TYPE)
1903 			    val = elt->value;
1904 			}
1905 		    }
1906 		}
1907 	      if (val)
1908 		return vn_reference_lookup_or_insert_for_pieces
1909 		         (vuse, vr->set, vr->type, vr->operands, val);
1910 	    }
1911 	}
1912     }
1913 
1914   /* 5) For aggregate copies translate the reference through them if
1915      the copy kills ref.  */
1916   else if (vn_walk_kind == VN_WALKREWRITE
1917 	   && gimple_assign_single_p (def_stmt)
1918 	   && (DECL_P (gimple_assign_rhs1 (def_stmt))
1919 	       || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1920 	       || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1921     {
1922       tree base2;
1923       HOST_WIDE_INT maxsize2;
1924       int i, j, k;
1925       auto_vec<vn_reference_op_s> rhs;
1926       vn_reference_op_t vro;
1927       ao_ref r;
1928 
1929       if (!lhs_ref_ok)
1930 	return (void *)-1;
1931 
1932       /* See if the assignment kills REF.  */
1933       base2 = ao_ref_base (&lhs_ref);
1934       maxsize2 = lhs_ref.max_size;
1935       if (maxsize2 == -1
1936 	  || (base != base2
1937 	      && (TREE_CODE (base) != MEM_REF
1938 		  || TREE_CODE (base2) != MEM_REF
1939 		  || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
1940 		  || !tree_int_cst_equal (TREE_OPERAND (base, 1),
1941 					  TREE_OPERAND (base2, 1))))
1942 	  || !stmt_kills_ref_p (def_stmt, ref))
1943 	return (void *)-1;
1944 
1945       /* Find the common base of ref and the lhs.  lhs_ops already
1946          contains valueized operands for the lhs.  */
1947       i = vr->operands.length () - 1;
1948       j = lhs_ops.length () - 1;
1949       while (j >= 0 && i >= 0
1950 	     && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1951 	{
1952 	  i--;
1953 	  j--;
1954 	}
1955 
1956       /* ???  The innermost op should always be a MEM_REF and we already
1957          checked that the assignment to the lhs kills vr.  Thus for
1958 	 aggregate copies using char[] types the vn_reference_op_eq
1959 	 may fail when comparing types for compatibility.  But we really
1960 	 don't care here - further lookups with the rewritten operands
1961 	 will simply fail if we messed up types too badly.  */
1962       HOST_WIDE_INT extra_off = 0;
1963       if (j == 0 && i >= 0
1964 	  && lhs_ops[0].opcode == MEM_REF
1965 	  && lhs_ops[0].off != -1)
1966 	{
1967 	  if (lhs_ops[0].off == vr->operands[i].off)
1968 	    i--, j--;
1969 	  else if (vr->operands[i].opcode == MEM_REF
1970 		   && vr->operands[i].off != -1)
1971 	    {
1972 	      extra_off = vr->operands[i].off - lhs_ops[0].off;
1973 	      i--, j--;
1974 	    }
1975 	}
1976 
1977       /* i now points to the first additional op.
1978 	 ???  LHS may not be completely contained in VR, one or more
1979 	 VIEW_CONVERT_EXPRs could be in its way.  We could at least
1980 	 try handling outermost VIEW_CONVERT_EXPRs.  */
1981       if (j != -1)
1982 	return (void *)-1;
1983 
1984       /* Punt if the additional ops contain a storage order barrier.  */
1985       for (k = i; k >= 0; k--)
1986 	{
1987 	  vro = &vr->operands[k];
1988 	  if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
1989 	    return (void *)-1;
1990 	}
1991 
1992       /* Now re-write REF to be based on the rhs of the assignment.  */
1993       copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1994 
1995       /* Apply an extra offset to the inner MEM_REF of the RHS.  */
1996       if (extra_off != 0)
1997 	{
1998 	  if (rhs.length () < 2
1999 	      || rhs[0].opcode != MEM_REF
2000 	      || rhs[0].off == -1)
2001 	    return (void *)-1;
2002 	  rhs[0].off += extra_off;
2003 	  rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2004 					build_int_cst (TREE_TYPE (rhs[0].op0),
2005 						       extra_off));
2006 	}
2007 
2008       /* We need to pre-pend vr->operands[0..i] to rhs.  */
2009       vec<vn_reference_op_s> old = vr->operands;
2010       if (i + 1 + rhs.length () > vr->operands.length ())
2011 	vr->operands.safe_grow (i + 1 + rhs.length ());
2012       else
2013 	vr->operands.truncate (i + 1 + rhs.length ());
2014       FOR_EACH_VEC_ELT (rhs, j, vro)
2015 	vr->operands[i + 1 + j] = *vro;
2016       vr->operands = valueize_refs (vr->operands);
2017       if (old == shared_lookup_references)
2018 	shared_lookup_references = vr->operands;
2019       vr->hashcode = vn_reference_compute_hash (vr);
2020 
2021       /* Try folding the new reference to a constant.  */
2022       tree val = fully_constant_vn_reference_p (vr);
2023       if (val)
2024 	return vn_reference_lookup_or_insert_for_pieces
2025 		 (vuse, vr->set, vr->type, vr->operands, val);
2026 
2027       /* Adjust *ref from the new operands.  */
2028       if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2029 	return (void *)-1;
2030       /* This can happen with bitfields.  */
2031       if (ref->size != r.size)
2032 	return (void *)-1;
2033       *ref = r;
2034 
2035       /* Do not update last seen VUSE after translating.  */
2036       last_vuse_ptr = NULL;
2037 
2038       /* Keep looking for the adjusted *REF / VR pair.  */
2039       return NULL;
2040     }
2041 
2042   /* 6) For memcpy copies translate the reference through them if
2043      the copy kills ref.  */
2044   else if (vn_walk_kind == VN_WALKREWRITE
2045 	   && is_gimple_reg_type (vr->type)
2046 	   /* ???  Handle BCOPY as well.  */
2047 	   && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2048 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2049 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2050 	   && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2051 	       || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2052 	   && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2053 	       || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2054 	   && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2055     {
2056       tree lhs, rhs;
2057       ao_ref r;
2058       HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2059       vn_reference_op_s op;
2060       HOST_WIDE_INT at;
2061 
2062       /* Only handle non-variable, addressable refs.  */
2063       if (ref->size != maxsize
2064 	  || offset % BITS_PER_UNIT != 0
2065 	  || ref->size % BITS_PER_UNIT != 0)
2066 	return (void *)-1;
2067 
2068       /* Extract a pointer base and an offset for the destination.  */
2069       lhs = gimple_call_arg (def_stmt, 0);
2070       lhs_offset = 0;
2071       if (TREE_CODE (lhs) == SSA_NAME)
2072 	{
2073 	  lhs = SSA_VAL (lhs);
2074 	  if (TREE_CODE (lhs) == SSA_NAME)
2075 	    {
2076 	      gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2077 	      if (gimple_assign_single_p (def_stmt)
2078 		  && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2079 		lhs = gimple_assign_rhs1 (def_stmt);
2080 	    }
2081 	}
2082       if (TREE_CODE (lhs) == ADDR_EXPR)
2083 	{
2084 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2085 						    &lhs_offset);
2086 	  if (!tem)
2087 	    return (void *)-1;
2088 	  if (TREE_CODE (tem) == MEM_REF
2089 	      && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2090 	    {
2091 	      lhs = TREE_OPERAND (tem, 0);
2092 	      if (TREE_CODE (lhs) == SSA_NAME)
2093 		lhs = SSA_VAL (lhs);
2094 	      lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2095 	    }
2096 	  else if (DECL_P (tem))
2097 	    lhs = build_fold_addr_expr (tem);
2098 	  else
2099 	    return (void *)-1;
2100 	}
2101       if (TREE_CODE (lhs) != SSA_NAME
2102 	  && TREE_CODE (lhs) != ADDR_EXPR)
2103 	return (void *)-1;
2104 
2105       /* Extract a pointer base and an offset for the source.  */
2106       rhs = gimple_call_arg (def_stmt, 1);
2107       rhs_offset = 0;
2108       if (TREE_CODE (rhs) == SSA_NAME)
2109 	rhs = SSA_VAL (rhs);
2110       if (TREE_CODE (rhs) == ADDR_EXPR)
2111 	{
2112 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2113 						    &rhs_offset);
2114 	  if (!tem)
2115 	    return (void *)-1;
2116 	  if (TREE_CODE (tem) == MEM_REF
2117 	      && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2118 	    {
2119 	      rhs = TREE_OPERAND (tem, 0);
2120 	      rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2121 	    }
2122 	  else if (DECL_P (tem))
2123 	    rhs = build_fold_addr_expr (tem);
2124 	  else
2125 	    return (void *)-1;
2126 	}
2127       if (TREE_CODE (rhs) != SSA_NAME
2128 	  && TREE_CODE (rhs) != ADDR_EXPR)
2129 	return (void *)-1;
2130 
2131       copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2132 
2133       /* The bases of the destination and the references have to agree.  */
2134       if ((TREE_CODE (base) != MEM_REF
2135 	   && !DECL_P (base))
2136 	  || (TREE_CODE (base) == MEM_REF
2137 	      && (TREE_OPERAND (base, 0) != lhs
2138 		  || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2139 	  || (DECL_P (base)
2140 	      && (TREE_CODE (lhs) != ADDR_EXPR
2141 		  || TREE_OPERAND (lhs, 0) != base)))
2142 	return (void *)-1;
2143 
2144       at = offset / BITS_PER_UNIT;
2145       if (TREE_CODE (base) == MEM_REF)
2146 	at += tree_to_uhwi (TREE_OPERAND (base, 1));
2147       /* If the access is completely outside of the memcpy destination
2148 	 area there is no aliasing.  */
2149       if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2150 	  || lhs_offset + copy_size <= at)
2151 	return NULL;
2152       /* And the access has to be contained within the memcpy destination.  */
2153       if (lhs_offset > at
2154 	  || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2155 	return (void *)-1;
2156 
2157       /* Make room for 2 operands in the new reference.  */
2158       if (vr->operands.length () < 2)
2159 	{
2160 	  vec<vn_reference_op_s> old = vr->operands;
2161 	  vr->operands.safe_grow_cleared (2);
2162 	  if (old == shared_lookup_references)
2163 	    shared_lookup_references = vr->operands;
2164 	}
2165       else
2166 	vr->operands.truncate (2);
2167 
2168       /* The looked-through reference is a simple MEM_REF.  */
2169       memset (&op, 0, sizeof (op));
2170       op.type = vr->type;
2171       op.opcode = MEM_REF;
2172       op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2173       op.off = at - lhs_offset + rhs_offset;
2174       vr->operands[0] = op;
2175       op.type = TREE_TYPE (rhs);
2176       op.opcode = TREE_CODE (rhs);
2177       op.op0 = rhs;
2178       op.off = -1;
2179       vr->operands[1] = op;
2180       vr->hashcode = vn_reference_compute_hash (vr);
2181 
2182       /* Adjust *ref from the new operands.  */
2183       if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2184 	return (void *)-1;
2185       /* This can happen with bitfields.  */
2186       if (ref->size != r.size)
2187 	return (void *)-1;
2188       *ref = r;
2189 
2190       /* Do not update last seen VUSE after translating.  */
2191       last_vuse_ptr = NULL;
2192 
2193       /* Keep looking for the adjusted *REF / VR pair.  */
2194       return NULL;
2195     }
2196 
2197   /* Bail out and stop walking.  */
2198   return (void *)-1;
2199 }
2200 
2201 /* Lookup a reference operation by it's parts, in the current hash table.
2202    Returns the resulting value number if it exists in the hash table,
2203    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
2204    vn_reference_t stored in the hashtable if something is found.  */
2205 
2206 tree
vn_reference_lookup_pieces(tree vuse,alias_set_type set,tree type,vec<vn_reference_op_s> operands,vn_reference_t * vnresult,vn_lookup_kind kind)2207 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2208 			    vec<vn_reference_op_s> operands,
2209 			    vn_reference_t *vnresult, vn_lookup_kind kind)
2210 {
2211   struct vn_reference_s vr1;
2212   vn_reference_t tmp;
2213   tree cst;
2214 
2215   if (!vnresult)
2216     vnresult = &tmp;
2217   *vnresult = NULL;
2218 
2219   vr1.vuse = vuse_ssa_val (vuse);
2220   shared_lookup_references.truncate (0);
2221   shared_lookup_references.safe_grow (operands.length ());
2222   memcpy (shared_lookup_references.address (),
2223 	  operands.address (),
2224 	  sizeof (vn_reference_op_s)
2225 	  * operands.length ());
2226   vr1.operands = operands = shared_lookup_references
2227     = valueize_refs (shared_lookup_references);
2228   vr1.type = type;
2229   vr1.set = set;
2230   vr1.hashcode = vn_reference_compute_hash (&vr1);
2231   if ((cst = fully_constant_vn_reference_p (&vr1)))
2232     return cst;
2233 
2234   vn_reference_lookup_1 (&vr1, vnresult);
2235   if (!*vnresult
2236       && kind != VN_NOWALK
2237       && vr1.vuse)
2238     {
2239       ao_ref r;
2240       vn_walk_kind = kind;
2241       if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2242 	*vnresult =
2243 	  (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2244 						  vn_reference_lookup_2,
2245 						  vn_reference_lookup_3,
2246 						  vuse_ssa_val, &vr1);
2247       gcc_checking_assert (vr1.operands == shared_lookup_references);
2248     }
2249 
2250   if (*vnresult)
2251      return (*vnresult)->result;
2252 
2253   return NULL_TREE;
2254 }
2255 
2256 /* Lookup OP in the current hash table, and return the resulting value
2257    number if it exists in the hash table.  Return NULL_TREE if it does
2258    not exist in the hash table or if the result field of the structure
2259    was NULL..  VNRESULT will be filled in with the vn_reference_t
2260    stored in the hashtable if one exists.  When TBAA_P is false assume
2261    we are looking up a store and treat it as having alias-set zero.  */
2262 
2263 tree
vn_reference_lookup(tree op,tree vuse,vn_lookup_kind kind,vn_reference_t * vnresult,bool tbaa_p)2264 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2265 		     vn_reference_t *vnresult, bool tbaa_p)
2266 {
2267   vec<vn_reference_op_s> operands;
2268   struct vn_reference_s vr1;
2269   tree cst;
2270   bool valuezied_anything;
2271 
2272   if (vnresult)
2273     *vnresult = NULL;
2274 
2275   vr1.vuse = vuse_ssa_val (vuse);
2276   vr1.operands = operands
2277     = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2278   vr1.type = TREE_TYPE (op);
2279   vr1.set = tbaa_p ? get_alias_set (op) : 0;
2280   vr1.hashcode = vn_reference_compute_hash (&vr1);
2281   if ((cst = fully_constant_vn_reference_p (&vr1)))
2282     return cst;
2283 
2284   if (kind != VN_NOWALK
2285       && vr1.vuse)
2286     {
2287       vn_reference_t wvnresult;
2288       ao_ref r;
2289       /* Make sure to use a valueized reference if we valueized anything.
2290          Otherwise preserve the full reference for advanced TBAA.  */
2291       if (!valuezied_anything
2292 	  || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2293 					     vr1.operands))
2294 	ao_ref_init (&r, op);
2295       if (! tbaa_p)
2296 	r.ref_alias_set = r.base_alias_set = 0;
2297       vn_walk_kind = kind;
2298       wvnresult =
2299 	(vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2300 						vn_reference_lookup_2,
2301 						vn_reference_lookup_3,
2302 						vuse_ssa_val, &vr1);
2303       gcc_checking_assert (vr1.operands == shared_lookup_references);
2304       if (wvnresult)
2305 	{
2306 	  if (vnresult)
2307 	    *vnresult = wvnresult;
2308 	  return wvnresult->result;
2309 	}
2310 
2311       return NULL_TREE;
2312     }
2313 
2314   return vn_reference_lookup_1 (&vr1, vnresult);
2315 }
2316 
2317 /* Lookup CALL in the current hash table and return the entry in
2318    *VNRESULT if found.  Populates *VR for the hashtable lookup.  */
2319 
2320 void
vn_reference_lookup_call(gcall * call,vn_reference_t * vnresult,vn_reference_t vr)2321 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2322 			  vn_reference_t vr)
2323 {
2324   if (vnresult)
2325     *vnresult = NULL;
2326 
2327   tree vuse = gimple_vuse (call);
2328 
2329   vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2330   vr->operands = valueize_shared_reference_ops_from_call (call);
2331   vr->type = gimple_expr_type (call);
2332   vr->set = 0;
2333   vr->hashcode = vn_reference_compute_hash (vr);
2334   vn_reference_lookup_1 (vr, vnresult);
2335 }
2336 
2337 /* Insert OP into the current hash table with a value number of
2338    RESULT, and return the resulting reference structure we created.  */
2339 
2340 static vn_reference_t
vn_reference_insert(tree op,tree result,tree vuse,tree vdef)2341 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2342 {
2343   vn_reference_s **slot;
2344   vn_reference_t vr1;
2345   bool tem;
2346 
2347   vr1 = current_info->references_pool->allocate ();
2348   if (TREE_CODE (result) == SSA_NAME)
2349     vr1->value_id = VN_INFO (result)->value_id;
2350   else
2351     vr1->value_id = get_or_alloc_constant_value_id (result);
2352   vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2353   vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2354   vr1->type = TREE_TYPE (op);
2355   vr1->set = get_alias_set (op);
2356   vr1->hashcode = vn_reference_compute_hash (vr1);
2357   vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2358   vr1->result_vdef = vdef;
2359 
2360   slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2361 							INSERT);
2362 
2363   /* Because we lookup stores using vuses, and value number failures
2364      using the vdefs (see visit_reference_op_store for how and why),
2365      it's possible that on failure we may try to insert an already
2366      inserted store.  This is not wrong, there is no ssa name for a
2367      store that we could use as a differentiator anyway.  Thus, unlike
2368      the other lookup functions, you cannot gcc_assert (!*slot)
2369      here.  */
2370 
2371   /* But free the old slot in case of a collision.  */
2372   if (*slot)
2373     free_reference (*slot);
2374 
2375   *slot = vr1;
2376   return vr1;
2377 }
2378 
2379 /* Insert a reference by it's pieces into the current hash table with
2380    a value number of RESULT.  Return the resulting reference
2381    structure we created.  */
2382 
2383 vn_reference_t
vn_reference_insert_pieces(tree vuse,alias_set_type set,tree type,vec<vn_reference_op_s> operands,tree result,unsigned int value_id)2384 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2385 			    vec<vn_reference_op_s> operands,
2386 			    tree result, unsigned int value_id)
2387 
2388 {
2389   vn_reference_s **slot;
2390   vn_reference_t vr1;
2391 
2392   vr1 = current_info->references_pool->allocate ();
2393   vr1->value_id = value_id;
2394   vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2395   vr1->operands = valueize_refs (operands);
2396   vr1->type = type;
2397   vr1->set = set;
2398   vr1->hashcode = vn_reference_compute_hash (vr1);
2399   if (result && TREE_CODE (result) == SSA_NAME)
2400     result = SSA_VAL (result);
2401   vr1->result = result;
2402 
2403   slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2404 							INSERT);
2405 
2406   /* At this point we should have all the things inserted that we have
2407      seen before, and we should never try inserting something that
2408      already exists.  */
2409   gcc_assert (!*slot);
2410   if (*slot)
2411     free_reference (*slot);
2412 
2413   *slot = vr1;
2414   return vr1;
2415 }
2416 
2417 /* Compute and return the hash value for nary operation VBO1.  */
2418 
2419 static hashval_t
vn_nary_op_compute_hash(const vn_nary_op_t vno1)2420 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2421 {
2422   inchash::hash hstate;
2423   unsigned i;
2424 
2425   for (i = 0; i < vno1->length; ++i)
2426     if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2427       vno1->op[i] = SSA_VAL (vno1->op[i]);
2428 
2429   if (((vno1->length == 2
2430 	&& commutative_tree_code (vno1->opcode))
2431        || (vno1->length == 3
2432 	   && commutative_ternary_tree_code (vno1->opcode)))
2433       && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2434     std::swap (vno1->op[0], vno1->op[1]);
2435   else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2436 	   && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2437     {
2438       std::swap (vno1->op[0], vno1->op[1]);
2439       vno1->opcode = swap_tree_comparison  (vno1->opcode);
2440     }
2441 
2442   hstate.add_int (vno1->opcode);
2443   for (i = 0; i < vno1->length; ++i)
2444     inchash::add_expr (vno1->op[i], hstate);
2445 
2446   return hstate.end ();
2447 }
2448 
2449 /* Compare nary operations VNO1 and VNO2 and return true if they are
2450    equivalent.  */
2451 
2452 bool
vn_nary_op_eq(const_vn_nary_op_t const vno1,const_vn_nary_op_t const vno2)2453 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2454 {
2455   unsigned i;
2456 
2457   if (vno1->hashcode != vno2->hashcode)
2458     return false;
2459 
2460   if (vno1->length != vno2->length)
2461     return false;
2462 
2463   if (vno1->opcode != vno2->opcode
2464       || !types_compatible_p (vno1->type, vno2->type))
2465     return false;
2466 
2467   for (i = 0; i < vno1->length; ++i)
2468     if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2469       return false;
2470 
2471   return true;
2472 }
2473 
2474 /* Initialize VNO from the pieces provided.  */
2475 
2476 static void
init_vn_nary_op_from_pieces(vn_nary_op_t vno,unsigned int length,enum tree_code code,tree type,tree * ops)2477 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2478 			     enum tree_code code, tree type, tree *ops)
2479 {
2480   vno->opcode = code;
2481   vno->length = length;
2482   vno->type = type;
2483   memcpy (&vno->op[0], ops, sizeof (tree) * length);
2484 }
2485 
2486 /* Initialize VNO from OP.  */
2487 
2488 static void
init_vn_nary_op_from_op(vn_nary_op_t vno,tree op)2489 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2490 {
2491   unsigned i;
2492 
2493   vno->opcode = TREE_CODE (op);
2494   vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2495   vno->type = TREE_TYPE (op);
2496   for (i = 0; i < vno->length; ++i)
2497     vno->op[i] = TREE_OPERAND (op, i);
2498 }
2499 
2500 /* Return the number of operands for a vn_nary ops structure from STMT.  */
2501 
2502 static unsigned int
vn_nary_length_from_stmt(gimple * stmt)2503 vn_nary_length_from_stmt (gimple *stmt)
2504 {
2505   switch (gimple_assign_rhs_code (stmt))
2506     {
2507     case REALPART_EXPR:
2508     case IMAGPART_EXPR:
2509     case VIEW_CONVERT_EXPR:
2510       return 1;
2511 
2512     case BIT_FIELD_REF:
2513       return 3;
2514 
2515     case CONSTRUCTOR:
2516       return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2517 
2518     default:
2519       return gimple_num_ops (stmt) - 1;
2520     }
2521 }
2522 
2523 /* Initialize VNO from STMT.  */
2524 
2525 static void
init_vn_nary_op_from_stmt(vn_nary_op_t vno,gimple * stmt)2526 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2527 {
2528   unsigned i;
2529 
2530   vno->opcode = gimple_assign_rhs_code (stmt);
2531   vno->type = gimple_expr_type (stmt);
2532   switch (vno->opcode)
2533     {
2534     case REALPART_EXPR:
2535     case IMAGPART_EXPR:
2536     case VIEW_CONVERT_EXPR:
2537       vno->length = 1;
2538       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2539       break;
2540 
2541     case BIT_FIELD_REF:
2542       vno->length = 3;
2543       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2544       vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2545       vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2546       break;
2547 
2548     case CONSTRUCTOR:
2549       vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2550       for (i = 0; i < vno->length; ++i)
2551 	vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2552       break;
2553 
2554     default:
2555       gcc_checking_assert (!gimple_assign_single_p (stmt));
2556       vno->length = gimple_num_ops (stmt) - 1;
2557       for (i = 0; i < vno->length; ++i)
2558 	vno->op[i] = gimple_op (stmt, i + 1);
2559     }
2560 }
2561 
2562 /* Compute the hashcode for VNO and look for it in the hash table;
2563    return the resulting value number if it exists in the hash table.
2564    Return NULL_TREE if it does not exist in the hash table or if the
2565    result field of the operation is NULL.  VNRESULT will contain the
2566    vn_nary_op_t from the hashtable if it exists.  */
2567 
2568 static tree
vn_nary_op_lookup_1(vn_nary_op_t vno,vn_nary_op_t * vnresult)2569 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2570 {
2571   vn_nary_op_s **slot;
2572 
2573   if (vnresult)
2574     *vnresult = NULL;
2575 
2576   vno->hashcode = vn_nary_op_compute_hash (vno);
2577   slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2578 						  NO_INSERT);
2579   if (!slot && current_info == optimistic_info)
2580     slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2581 						  NO_INSERT);
2582   if (!slot)
2583     return NULL_TREE;
2584   if (vnresult)
2585     *vnresult = *slot;
2586   return (*slot)->result;
2587 }
2588 
2589 /* Lookup a n-ary operation by its pieces and return the resulting value
2590    number if it exists in the hash table.  Return NULL_TREE if it does
2591    not exist in the hash table or if the result field of the operation
2592    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2593    if it exists.  */
2594 
2595 tree
vn_nary_op_lookup_pieces(unsigned int length,enum tree_code code,tree type,tree * ops,vn_nary_op_t * vnresult)2596 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2597 			  tree type, tree *ops, vn_nary_op_t *vnresult)
2598 {
2599   vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2600 				  sizeof_vn_nary_op (length));
2601   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2602   return vn_nary_op_lookup_1 (vno1, vnresult);
2603 }
2604 
2605 /* Lookup OP in the current hash table, and return the resulting value
2606    number if it exists in the hash table.  Return NULL_TREE if it does
2607    not exist in the hash table or if the result field of the operation
2608    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2609    if it exists.  */
2610 
2611 tree
vn_nary_op_lookup(tree op,vn_nary_op_t * vnresult)2612 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2613 {
2614   vn_nary_op_t vno1
2615     = XALLOCAVAR (struct vn_nary_op_s,
2616 		  sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2617   init_vn_nary_op_from_op (vno1, op);
2618   return vn_nary_op_lookup_1 (vno1, vnresult);
2619 }
2620 
2621 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2622    value number if it exists in the hash table.  Return NULL_TREE if
2623    it does not exist in the hash table.  VNRESULT will contain the
2624    vn_nary_op_t from the hashtable if it exists.  */
2625 
2626 tree
vn_nary_op_lookup_stmt(gimple * stmt,vn_nary_op_t * vnresult)2627 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2628 {
2629   vn_nary_op_t vno1
2630     = XALLOCAVAR (struct vn_nary_op_s,
2631 		  sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2632   init_vn_nary_op_from_stmt (vno1, stmt);
2633   return vn_nary_op_lookup_1 (vno1, vnresult);
2634 }
2635 
2636 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables.  */
2637 
2638 static tree
vn_lookup_simplify_result(code_helper rcode,tree type,tree * ops)2639 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops)
2640 {
2641   if (!rcode.is_tree_code ())
2642     return NULL_TREE;
2643   vn_nary_op_t vnresult = NULL;
2644   return vn_nary_op_lookup_pieces (TREE_CODE_LENGTH ((tree_code) rcode),
2645 				   (tree_code) rcode, type, ops, &vnresult);
2646 }
2647 
2648 /* Allocate a vn_nary_op_t with LENGTH operands on STACK.  */
2649 
2650 static vn_nary_op_t
alloc_vn_nary_op_noinit(unsigned int length,struct obstack * stack)2651 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2652 {
2653   return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2654 }
2655 
2656 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2657    obstack.  */
2658 
2659 static vn_nary_op_t
alloc_vn_nary_op(unsigned int length,tree result,unsigned int value_id)2660 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2661 {
2662   vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2663 					       &current_info->nary_obstack);
2664 
2665   vno1->value_id = value_id;
2666   vno1->length = length;
2667   vno1->result = result;
2668 
2669   return vno1;
2670 }
2671 
2672 /* Insert VNO into TABLE.  If COMPUTE_HASH is true, then compute
2673    VNO->HASHCODE first.  */
2674 
2675 static vn_nary_op_t
vn_nary_op_insert_into(vn_nary_op_t vno,vn_nary_op_table_type * table,bool compute_hash)2676 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2677 			bool compute_hash)
2678 {
2679   vn_nary_op_s **slot;
2680 
2681   if (compute_hash)
2682     vno->hashcode = vn_nary_op_compute_hash (vno);
2683 
2684   slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2685   gcc_assert (!*slot);
2686 
2687   *slot = vno;
2688   return vno;
2689 }
2690 
2691 /* Insert a n-ary operation into the current hash table using it's
2692    pieces.  Return the vn_nary_op_t structure we created and put in
2693    the hashtable.  */
2694 
2695 vn_nary_op_t
vn_nary_op_insert_pieces(unsigned int length,enum tree_code code,tree type,tree * ops,tree result,unsigned int value_id)2696 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2697 			  tree type, tree *ops,
2698 			  tree result, unsigned int value_id)
2699 {
2700   vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2701   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2702   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2703 }
2704 
2705 /* Insert OP into the current hash table with a value number of
2706    RESULT.  Return the vn_nary_op_t structure we created and put in
2707    the hashtable.  */
2708 
2709 vn_nary_op_t
vn_nary_op_insert(tree op,tree result)2710 vn_nary_op_insert (tree op, tree result)
2711 {
2712   unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2713   vn_nary_op_t vno1;
2714 
2715   vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2716   init_vn_nary_op_from_op (vno1, op);
2717   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2718 }
2719 
2720 /* Insert the rhs of STMT into the current hash table with a value number of
2721    RESULT.  */
2722 
2723 static vn_nary_op_t
vn_nary_op_insert_stmt(gimple * stmt,tree result)2724 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2725 {
2726   vn_nary_op_t vno1
2727     = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2728 			result, VN_INFO (result)->value_id);
2729   init_vn_nary_op_from_stmt (vno1, stmt);
2730   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2731 }
2732 
2733 /* Compute a hashcode for PHI operation VP1 and return it.  */
2734 
2735 static inline hashval_t
vn_phi_compute_hash(vn_phi_t vp1)2736 vn_phi_compute_hash (vn_phi_t vp1)
2737 {
2738   inchash::hash hstate (vp1->phiargs.length () > 2
2739 			? vp1->block->index : vp1->phiargs.length ());
2740   tree phi1op;
2741   tree type;
2742   edge e;
2743   edge_iterator ei;
2744 
2745   /* If all PHI arguments are constants we need to distinguish
2746      the PHI node via its type.  */
2747   type = vp1->type;
2748   hstate.merge_hash (vn_hash_type (type));
2749 
2750   FOR_EACH_EDGE (e, ei, vp1->block->preds)
2751     {
2752       /* Don't hash backedge values they need to be handled as VN_TOP
2753          for optimistic value-numbering.  */
2754       if (e->flags & EDGE_DFS_BACK)
2755 	continue;
2756 
2757       phi1op = vp1->phiargs[e->dest_idx];
2758       if (phi1op == VN_TOP)
2759 	continue;
2760       inchash::add_expr (phi1op, hstate);
2761     }
2762 
2763   return hstate.end ();
2764 }
2765 
2766 
2767 /* Return true if COND1 and COND2 represent the same condition, set
2768    *INVERTED_P if one needs to be inverted to make it the same as
2769    the other.  */
2770 
2771 static bool
cond_stmts_equal_p(gcond * cond1,tree lhs1,tree rhs1,gcond * cond2,tree lhs2,tree rhs2,bool * inverted_p)2772 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
2773 		    gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
2774 {
2775   enum tree_code code1 = gimple_cond_code (cond1);
2776   enum tree_code code2 = gimple_cond_code (cond2);
2777 
2778   *inverted_p = false;
2779   if (code1 == code2)
2780     ;
2781   else if (code1 == swap_tree_comparison (code2))
2782     std::swap (lhs2, rhs2);
2783   else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
2784     *inverted_p = true;
2785   else if (code1 == invert_tree_comparison
2786 	   	      (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
2787     {
2788       std::swap (lhs2, rhs2);
2789       *inverted_p = true;
2790     }
2791   else
2792     return false;
2793 
2794   return ((expressions_equal_p (lhs1, lhs2)
2795 	   && expressions_equal_p (rhs1, rhs2))
2796 	  || (commutative_tree_code (code1)
2797 	      && expressions_equal_p (lhs1, rhs2)
2798 	      && expressions_equal_p (rhs1, lhs2)));
2799 }
2800 
2801 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
2802 
2803 static int
vn_phi_eq(const_vn_phi_t const vp1,const_vn_phi_t const vp2)2804 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2805 {
2806   if (vp1->hashcode != vp2->hashcode)
2807     return false;
2808 
2809   if (vp1->block != vp2->block)
2810     {
2811       if (vp1->phiargs.length () != vp2->phiargs.length ())
2812 	return false;
2813 
2814       switch (vp1->phiargs.length ())
2815 	{
2816 	case 1:
2817 	  /* Single-arg PHIs are just copies.  */
2818 	  break;
2819 
2820 	case 2:
2821 	  {
2822 	    /* Rule out backedges into the PHI.  */
2823 	    if (vp1->block->loop_father->header == vp1->block
2824 		|| vp2->block->loop_father->header == vp2->block)
2825 	      return false;
2826 
2827 	    /* If the PHI nodes do not have compatible types
2828 	       they are not the same.  */
2829 	    if (!types_compatible_p (vp1->type, vp2->type))
2830 	      return false;
2831 
2832 	    basic_block idom1
2833 	      = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
2834 	    basic_block idom2
2835 	      = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
2836 	    /* If the immediate dominator end in switch stmts multiple
2837 	       values may end up in the same PHI arg via intermediate
2838 	       CFG merges.  */
2839 	    if (EDGE_COUNT (idom1->succs) != 2
2840 		|| EDGE_COUNT (idom2->succs) != 2)
2841 	      return false;
2842 
2843 	    /* Verify the controlling stmt is the same.  */
2844 	    gimple *last1 = last_stmt (idom1);
2845 	    gimple *last2 = last_stmt (idom2);
2846 	    if (gimple_code (last1) != GIMPLE_COND
2847 		|| gimple_code (last2) != GIMPLE_COND)
2848 	      return false;
2849 	    bool inverted_p;
2850 	    if (! cond_stmts_equal_p (as_a <gcond *> (last1),
2851 				      vp1->cclhs, vp1->ccrhs,
2852 				      as_a <gcond *> (last2),
2853 				      vp2->cclhs, vp2->ccrhs,
2854 				      &inverted_p))
2855 	      return false;
2856 
2857 	    /* Get at true/false controlled edges into the PHI.  */
2858 	    edge te1, te2, fe1, fe2;
2859 	    if (! extract_true_false_controlled_edges (idom1, vp1->block,
2860 						       &te1, &fe1)
2861 		|| ! extract_true_false_controlled_edges (idom2, vp2->block,
2862 							  &te2, &fe2))
2863 	      return false;
2864 
2865 	    /* Swap edges if the second condition is the inverted of the
2866 	       first.  */
2867 	    if (inverted_p)
2868 	      std::swap (te2, fe2);
2869 
2870 	    /* ???  Handle VN_TOP specially.  */
2871 	    if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
2872 				       vp2->phiargs[te2->dest_idx])
2873 		|| ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
2874 					  vp2->phiargs[fe2->dest_idx]))
2875 	      return false;
2876 
2877 	    return true;
2878 	  }
2879 
2880 	default:
2881 	  return false;
2882 	}
2883     }
2884 
2885   /* If the PHI nodes do not have compatible types
2886      they are not the same.  */
2887   if (!types_compatible_p (vp1->type, vp2->type))
2888     return false;
2889 
2890   /* Any phi in the same block will have it's arguments in the
2891      same edge order, because of how we store phi nodes.  */
2892   int i;
2893   tree phi1op;
2894   FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2895     {
2896       tree phi2op = vp2->phiargs[i];
2897       if (phi1op == VN_TOP || phi2op == VN_TOP)
2898 	continue;
2899       if (!expressions_equal_p (phi1op, phi2op))
2900 	return false;
2901     }
2902 
2903   return true;
2904 }
2905 
2906 static vec<tree> shared_lookup_phiargs;
2907 
2908 /* Lookup PHI in the current hash table, and return the resulting
2909    value number if it exists in the hash table.  Return NULL_TREE if
2910    it does not exist in the hash table. */
2911 
2912 static tree
vn_phi_lookup(gimple * phi)2913 vn_phi_lookup (gimple *phi)
2914 {
2915   vn_phi_s **slot;
2916   struct vn_phi_s vp1;
2917   edge e;
2918   edge_iterator ei;
2919 
2920   shared_lookup_phiargs.truncate (0);
2921   shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
2922 
2923   /* Canonicalize the SSA_NAME's to their value number.  */
2924   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
2925     {
2926       tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
2927       def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2928       shared_lookup_phiargs[e->dest_idx] = def;
2929     }
2930   vp1.type = TREE_TYPE (gimple_phi_result (phi));
2931   vp1.phiargs = shared_lookup_phiargs;
2932   vp1.block = gimple_bb (phi);
2933   /* Extract values of the controlling condition.  */
2934   vp1.cclhs = NULL_TREE;
2935   vp1.ccrhs = NULL_TREE;
2936   basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1.block);
2937   if (EDGE_COUNT (idom1->succs) == 2)
2938     if (gcond *last1 = dyn_cast <gcond *> (last_stmt (idom1)))
2939       {
2940 	vp1.cclhs = vn_valueize (gimple_cond_lhs (last1));
2941 	vp1.ccrhs = vn_valueize (gimple_cond_rhs (last1));
2942       }
2943   vp1.hashcode = vn_phi_compute_hash (&vp1);
2944   slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2945 						  NO_INSERT);
2946   if (!slot && current_info == optimistic_info)
2947     slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
2948 						  NO_INSERT);
2949   if (!slot)
2950     return NULL_TREE;
2951   return (*slot)->result;
2952 }
2953 
2954 /* Insert PHI into the current hash table with a value number of
2955    RESULT.  */
2956 
2957 static vn_phi_t
vn_phi_insert(gimple * phi,tree result)2958 vn_phi_insert (gimple *phi, tree result)
2959 {
2960   vn_phi_s **slot;
2961   vn_phi_t vp1 = current_info->phis_pool->allocate ();
2962   vec<tree> args = vNULL;
2963   edge e;
2964   edge_iterator ei;
2965 
2966   args.safe_grow (gimple_phi_num_args (phi));
2967 
2968   /* Canonicalize the SSA_NAME's to their value number.  */
2969   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
2970     {
2971       tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
2972       def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2973       args[e->dest_idx] = def;
2974     }
2975   vp1->value_id = VN_INFO (result)->value_id;
2976   vp1->type = TREE_TYPE (gimple_phi_result (phi));
2977   vp1->phiargs = args;
2978   vp1->block = gimple_bb (phi);
2979   /* Extract values of the controlling condition.  */
2980   vp1->cclhs = NULL_TREE;
2981   vp1->ccrhs = NULL_TREE;
2982   basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
2983   if (EDGE_COUNT (idom1->succs) == 2)
2984     if (gcond *last1 = dyn_cast <gcond *> (last_stmt (idom1)))
2985       {
2986 	vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
2987 	vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
2988       }
2989   vp1->result = result;
2990   vp1->hashcode = vn_phi_compute_hash (vp1);
2991 
2992   slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
2993 
2994   /* Because we iterate over phi operations more than once, it's
2995      possible the slot might already exist here, hence no assert.*/
2996   *slot = vp1;
2997   return vp1;
2998 }
2999 
3000 
3001 /* Print set of components in strongly connected component SCC to OUT. */
3002 
3003 static void
print_scc(FILE * out,vec<tree> scc)3004 print_scc (FILE *out, vec<tree> scc)
3005 {
3006   tree var;
3007   unsigned int i;
3008 
3009   fprintf (out, "SCC consists of:");
3010   FOR_EACH_VEC_ELT (scc, i, var)
3011     {
3012       fprintf (out, " ");
3013       print_generic_expr (out, var, 0);
3014     }
3015   fprintf (out, "\n");
3016 }
3017 
3018 /* Return true if BB1 is dominated by BB2 taking into account edges
3019    that are not executable.  */
3020 
3021 static bool
dominated_by_p_w_unex(basic_block bb1,basic_block bb2)3022 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3023 {
3024   edge_iterator ei;
3025   edge e;
3026 
3027   if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3028     return true;
3029 
3030   /* Before iterating we'd like to know if there exists a
3031      (executable) path from bb2 to bb1 at all, if not we can
3032      directly return false.  For now simply iterate once.  */
3033 
3034   /* Iterate to the single executable bb1 predecessor.  */
3035   if (EDGE_COUNT (bb1->preds) > 1)
3036     {
3037       edge prede = NULL;
3038       FOR_EACH_EDGE (e, ei, bb1->preds)
3039 	if (e->flags & EDGE_EXECUTABLE)
3040 	  {
3041 	    if (prede)
3042 	      {
3043 		prede = NULL;
3044 		break;
3045 	      }
3046 	    prede = e;
3047 	  }
3048       if (prede)
3049 	{
3050 	  bb1 = prede->src;
3051 
3052 	  /* Re-do the dominance check with changed bb1.  */
3053 	  if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3054 	    return true;
3055 	}
3056     }
3057 
3058   /* Iterate to the single executable bb2 successor.  */
3059   edge succe = NULL;
3060   FOR_EACH_EDGE (e, ei, bb2->succs)
3061     if (e->flags & EDGE_EXECUTABLE)
3062       {
3063 	if (succe)
3064 	  {
3065 	    succe = NULL;
3066 	    break;
3067 	  }
3068 	succe = e;
3069       }
3070   if (succe)
3071     {
3072       /* Verify the reached block is only reached through succe.
3073 	 If there is only one edge we can spare us the dominator
3074 	 check and iterate directly.  */
3075       if (EDGE_COUNT (succe->dest->preds) > 1)
3076 	{
3077 	  FOR_EACH_EDGE (e, ei, succe->dest->preds)
3078 	    if (e != succe
3079 		&& (e->flags & EDGE_EXECUTABLE))
3080 	      {
3081 		succe = NULL;
3082 		break;
3083 	      }
3084 	}
3085       if (succe)
3086 	{
3087 	  bb2 = succe->dest;
3088 
3089 	  /* Re-do the dominance check with changed bb2.  */
3090 	  if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3091 	    return true;
3092 	}
3093     }
3094 
3095   /* We could now iterate updating bb1 / bb2.  */
3096   return false;
3097 }
3098 
3099 /* Set the value number of FROM to TO, return true if it has changed
3100    as a result.  */
3101 
3102 static inline bool
set_ssa_val_to(tree from,tree to)3103 set_ssa_val_to (tree from, tree to)
3104 {
3105   tree currval = SSA_VAL (from);
3106   HOST_WIDE_INT toff, coff;
3107 
3108   /* The only thing we allow as value numbers are ssa_names
3109      and invariants.  So assert that here.  We don't allow VN_TOP
3110      as visiting a stmt should produce a value-number other than
3111      that.
3112      ???  Still VN_TOP can happen for unreachable code, so force
3113      it to varying in that case.  Not all code is prepared to
3114      get VN_TOP on valueization.  */
3115   if (to == VN_TOP)
3116     {
3117       if (dump_file && (dump_flags & TDF_DETAILS))
3118 	fprintf (dump_file, "Forcing value number to varying on "
3119 		 "receiving VN_TOP\n");
3120       to = from;
3121     }
3122 
3123   gcc_assert (to != NULL_TREE
3124 	      && ((TREE_CODE (to) == SSA_NAME
3125 		   && (to == from || SSA_VAL (to) == to))
3126 		  || is_gimple_min_invariant (to)));
3127 
3128   if (from != to)
3129     {
3130       if (currval == from)
3131 	{
3132 	  if (dump_file && (dump_flags & TDF_DETAILS))
3133 	    {
3134 	      fprintf (dump_file, "Not changing value number of ");
3135 	      print_generic_expr (dump_file, from, 0);
3136 	      fprintf (dump_file, " from VARYING to ");
3137 	      print_generic_expr (dump_file, to, 0);
3138 	      fprintf (dump_file, "\n");
3139 	    }
3140 	  return false;
3141 	}
3142       else if (currval != VN_TOP
3143 	       && ! is_gimple_min_invariant (currval)
3144 	       && is_gimple_min_invariant (to))
3145 	{
3146 	  if (dump_file && (dump_flags & TDF_DETAILS))
3147 	    {
3148 	      fprintf (dump_file, "Forcing VARYING instead of changing "
3149 		       "value number of ");
3150 	      print_generic_expr (dump_file, from, 0);
3151 	      fprintf (dump_file, " from ");
3152 	      print_generic_expr (dump_file, currval, 0);
3153 	      fprintf (dump_file, " (non-constant) to ");
3154 	      print_generic_expr (dump_file, to, 0);
3155 	      fprintf (dump_file, " (constant)\n");
3156 	    }
3157 	  to = from;
3158 	}
3159       else if (TREE_CODE (to) == SSA_NAME
3160 	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3161 	to = from;
3162     }
3163 
3164   if (dump_file && (dump_flags & TDF_DETAILS))
3165     {
3166       fprintf (dump_file, "Setting value number of ");
3167       print_generic_expr (dump_file, from, 0);
3168       fprintf (dump_file, " to ");
3169       print_generic_expr (dump_file, to, 0);
3170     }
3171 
3172   if (currval != to
3173       && !operand_equal_p (currval, to, 0)
3174       /* ???  For addresses involving volatile objects or types operand_equal_p
3175          does not reliably detect ADDR_EXPRs as equal.  We know we are only
3176 	 getting invariant gimple addresses here, so can use
3177 	 get_addr_base_and_unit_offset to do this comparison.  */
3178       && !(TREE_CODE (currval) == ADDR_EXPR
3179 	   && TREE_CODE (to) == ADDR_EXPR
3180 	   && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3181 	       == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3182 	   && coff == toff))
3183     {
3184       /* If we equate two SSA names we have to make the side-band info
3185          of the leader conservative (and remember whatever original value
3186 	 was present).  */
3187       if (TREE_CODE (to) == SSA_NAME)
3188 	{
3189 	  if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3190 	      && SSA_NAME_RANGE_INFO (to))
3191 	    {
3192 	      if (SSA_NAME_IS_DEFAULT_DEF (to)
3193 		  || dominated_by_p_w_unex
3194 			(gimple_bb (SSA_NAME_DEF_STMT (from)),
3195 			 gimple_bb (SSA_NAME_DEF_STMT (to))))
3196 		/* Keep the info from the dominator.  */
3197 		;
3198 	      else if (SSA_NAME_IS_DEFAULT_DEF (from)
3199 		       || dominated_by_p_w_unex
3200 			    (gimple_bb (SSA_NAME_DEF_STMT (to)),
3201 			     gimple_bb (SSA_NAME_DEF_STMT (from))))
3202 		{
3203 		  /* Save old info.  */
3204 		  if (! VN_INFO (to)->info.range_info)
3205 		    {
3206 		      VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3207 		      VN_INFO (to)->range_info_anti_range_p
3208 			= SSA_NAME_ANTI_RANGE_P (to);
3209 		    }
3210 		  /* Use that from the dominator.  */
3211 		  SSA_NAME_RANGE_INFO (to) = SSA_NAME_RANGE_INFO (from);
3212 		  SSA_NAME_ANTI_RANGE_P (to) = SSA_NAME_ANTI_RANGE_P (from);
3213 		}
3214 	      else
3215 		{
3216 		  /* Save old info.  */
3217 		  if (! VN_INFO (to)->info.range_info)
3218 		    {
3219 		      VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3220 		      VN_INFO (to)->range_info_anti_range_p
3221 			= SSA_NAME_ANTI_RANGE_P (to);
3222 		    }
3223 		  /* Rather than allocating memory and unioning the info
3224 		     just clear it.  */
3225 		  SSA_NAME_RANGE_INFO (to) = NULL;
3226 		}
3227 	    }
3228 	  else if (POINTER_TYPE_P (TREE_TYPE (to))
3229 		   && SSA_NAME_PTR_INFO (to))
3230 	    {
3231 	      if (SSA_NAME_IS_DEFAULT_DEF (to)
3232 		  || dominated_by_p_w_unex
3233 			(gimple_bb (SSA_NAME_DEF_STMT (from)),
3234 			 gimple_bb (SSA_NAME_DEF_STMT (to))))
3235 		/* Keep the info from the dominator.  */
3236 		;
3237 	      else if (SSA_NAME_IS_DEFAULT_DEF (from)
3238 		       || dominated_by_p_w_unex
3239 			    (gimple_bb (SSA_NAME_DEF_STMT (to)),
3240 			     gimple_bb (SSA_NAME_DEF_STMT (from))))
3241 		{
3242 		  /* Save old info.  */
3243 		  if (! VN_INFO (to)->info.ptr_info)
3244 		    VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3245 		  /* Use that from the dominator.  */
3246 		  SSA_NAME_PTR_INFO (to) = SSA_NAME_PTR_INFO (from);
3247 		}
3248 	      else if (! SSA_NAME_PTR_INFO (from)
3249 		       /* Handle the case of trivially equivalent info.  */
3250 		       || memcmp (SSA_NAME_PTR_INFO (to),
3251 				  SSA_NAME_PTR_INFO (from),
3252 				  sizeof (ptr_info_def)) != 0)
3253 		{
3254 		  /* Save old info.  */
3255 		  if (! VN_INFO (to)->info.ptr_info)
3256 		    VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3257 		  /* Rather than allocating memory and unioning the info
3258 		     just clear it.  */
3259 		  SSA_NAME_PTR_INFO (to) = NULL;
3260 		}
3261 	    }
3262 	}
3263 
3264       VN_INFO (from)->valnum = to;
3265       if (dump_file && (dump_flags & TDF_DETAILS))
3266 	fprintf (dump_file, " (changed)\n");
3267       return true;
3268     }
3269   if (dump_file && (dump_flags & TDF_DETAILS))
3270     fprintf (dump_file, "\n");
3271   return false;
3272 }
3273 
3274 /* Mark as processed all the definitions in the defining stmt of USE, or
3275    the USE itself.  */
3276 
3277 static void
mark_use_processed(tree use)3278 mark_use_processed (tree use)
3279 {
3280   ssa_op_iter iter;
3281   def_operand_p defp;
3282   gimple *stmt = SSA_NAME_DEF_STMT (use);
3283 
3284   if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3285     {
3286       VN_INFO (use)->use_processed = true;
3287       return;
3288     }
3289 
3290   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3291     {
3292       tree def = DEF_FROM_PTR (defp);
3293 
3294       VN_INFO (def)->use_processed = true;
3295     }
3296 }
3297 
3298 /* Set all definitions in STMT to value number to themselves.
3299    Return true if a value number changed. */
3300 
3301 static bool
defs_to_varying(gimple * stmt)3302 defs_to_varying (gimple *stmt)
3303 {
3304   bool changed = false;
3305   ssa_op_iter iter;
3306   def_operand_p defp;
3307 
3308   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3309     {
3310       tree def = DEF_FROM_PTR (defp);
3311       changed |= set_ssa_val_to (def, def);
3312     }
3313   return changed;
3314 }
3315 
3316 /* Visit a copy between LHS and RHS, return true if the value number
3317    changed.  */
3318 
3319 static bool
visit_copy(tree lhs,tree rhs)3320 visit_copy (tree lhs, tree rhs)
3321 {
3322   /* Valueize.  */
3323   rhs = SSA_VAL (rhs);
3324 
3325   return set_ssa_val_to (lhs, rhs);
3326 }
3327 
3328 /* Visit a nary operator RHS, value number it, and return true if the
3329    value number of LHS has changed as a result.  */
3330 
3331 static bool
visit_nary_op(tree lhs,gimple * stmt)3332 visit_nary_op (tree lhs, gimple *stmt)
3333 {
3334   bool changed = false;
3335   tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3336 
3337   if (result)
3338     changed = set_ssa_val_to (lhs, result);
3339   else
3340     {
3341       changed = set_ssa_val_to (lhs, lhs);
3342       vn_nary_op_insert_stmt (stmt, lhs);
3343     }
3344 
3345   return changed;
3346 }
3347 
3348 /* Visit a call STMT storing into LHS.  Return true if the value number
3349    of the LHS has changed as a result.  */
3350 
3351 static bool
visit_reference_op_call(tree lhs,gcall * stmt)3352 visit_reference_op_call (tree lhs, gcall *stmt)
3353 {
3354   bool changed = false;
3355   struct vn_reference_s vr1;
3356   vn_reference_t vnresult = NULL;
3357   tree vdef = gimple_vdef (stmt);
3358 
3359   /* Non-ssa lhs is handled in copy_reference_ops_from_call.  */
3360   if (lhs && TREE_CODE (lhs) != SSA_NAME)
3361     lhs = NULL_TREE;
3362 
3363   vn_reference_lookup_call (stmt, &vnresult, &vr1);
3364   if (vnresult)
3365     {
3366       if (vnresult->result_vdef && vdef)
3367 	changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3368       else if (vdef)
3369 	/* If the call was discovered to be pure or const reflect
3370 	   that as far as possible.  */
3371 	changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3372 
3373       if (!vnresult->result && lhs)
3374 	vnresult->result = lhs;
3375 
3376       if (vnresult->result && lhs)
3377 	changed |= set_ssa_val_to (lhs, vnresult->result);
3378     }
3379   else
3380     {
3381       vn_reference_t vr2;
3382       vn_reference_s **slot;
3383       if (vdef)
3384 	changed |= set_ssa_val_to (vdef, vdef);
3385       if (lhs)
3386 	changed |= set_ssa_val_to (lhs, lhs);
3387       vr2 = current_info->references_pool->allocate ();
3388       vr2->vuse = vr1.vuse;
3389       /* As we are not walking the virtual operand chain we know the
3390 	 shared_lookup_references are still original so we can re-use
3391 	 them here.  */
3392       vr2->operands = vr1.operands.copy ();
3393       vr2->type = vr1.type;
3394       vr2->set = vr1.set;
3395       vr2->hashcode = vr1.hashcode;
3396       vr2->result = lhs;
3397       vr2->result_vdef = vdef;
3398       slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3399 							    INSERT);
3400       gcc_assert (!*slot);
3401       *slot = vr2;
3402     }
3403 
3404   return changed;
3405 }
3406 
3407 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3408    and return true if the value number of the LHS has changed as a result.  */
3409 
3410 static bool
visit_reference_op_load(tree lhs,tree op,gimple * stmt)3411 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3412 {
3413   bool changed = false;
3414   tree last_vuse;
3415   tree result;
3416 
3417   last_vuse = gimple_vuse (stmt);
3418   last_vuse_ptr = &last_vuse;
3419   result = vn_reference_lookup (op, gimple_vuse (stmt),
3420 				default_vn_walk_kind, NULL, true);
3421   last_vuse_ptr = NULL;
3422 
3423   /* We handle type-punning through unions by value-numbering based
3424      on offset and size of the access.  Be prepared to handle a
3425      type-mismatch here via creating a VIEW_CONVERT_EXPR.  */
3426   if (result
3427       && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3428     {
3429       /* We will be setting the value number of lhs to the value number
3430 	 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3431 	 So first simplify and lookup this expression to see if it
3432 	 is already available.  */
3433       mprts_hook = vn_lookup_simplify_result;
3434       code_helper rcode = VIEW_CONVERT_EXPR;
3435       tree ops[3] = { result };
3436       bool res = gimple_resimplify1 (NULL, &rcode, TREE_TYPE (op), ops,
3437 				     vn_valueize);
3438       mprts_hook = NULL;
3439       gimple *new_stmt = NULL;
3440       if (res
3441 	  && gimple_simplified_result_is_gimple_val (rcode, ops))
3442 	/* The expression is already available.  */
3443 	result = ops[0];
3444       else
3445 	{
3446 	  tree val = vn_lookup_simplify_result (rcode, TREE_TYPE (op), ops);
3447 	  if (!val)
3448 	    {
3449 	      gimple_seq stmts = NULL;
3450 	      result = maybe_push_res_to_seq (rcode, TREE_TYPE (op), ops,
3451 					      &stmts);
3452 	      if (result)
3453 		{
3454 		  gcc_assert (gimple_seq_singleton_p (stmts));
3455 		  new_stmt = gimple_seq_first_stmt (stmts);
3456 		}
3457 	    }
3458 	  else
3459 	    /* The expression is already available.  */
3460 	    result = val;
3461 	}
3462       if (new_stmt)
3463 	{
3464 	  /* The expression is not yet available, value-number lhs to
3465 	     the new SSA_NAME we created.  */
3466 	  /* Initialize value-number information properly.  */
3467 	  VN_INFO_GET (result)->valnum = result;
3468 	  VN_INFO (result)->value_id = get_next_value_id ();
3469 	  gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
3470 					      new_stmt);
3471 	  VN_INFO (result)->needs_insertion = true;
3472 	  /* As all "inserted" statements are singleton SCCs, insert
3473 	     to the valid table.  This is strictly needed to
3474 	     avoid re-generating new value SSA_NAMEs for the same
3475 	     expression during SCC iteration over and over (the
3476 	     optimistic table gets cleared after each iteration).
3477 	     We do not need to insert into the optimistic table, as
3478 	     lookups there will fall back to the valid table.  */
3479 	  if (current_info == optimistic_info)
3480 	    {
3481 	      current_info = valid_info;
3482 	      vn_nary_op_insert_stmt (new_stmt, result);
3483 	      current_info = optimistic_info;
3484 	    }
3485 	  else
3486 	    vn_nary_op_insert_stmt (new_stmt, result);
3487 	  if (dump_file && (dump_flags & TDF_DETAILS))
3488 	    {
3489 	      fprintf (dump_file, "Inserting name ");
3490 	      print_generic_expr (dump_file, result, 0);
3491 	      fprintf (dump_file, " for expression ");
3492 	      print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
3493 	      fprintf (dump_file, "\n");
3494 	    }
3495 	}
3496     }
3497 
3498   if (result)
3499     changed = set_ssa_val_to (lhs, result);
3500   else
3501     {
3502       changed = set_ssa_val_to (lhs, lhs);
3503       vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3504     }
3505 
3506   return changed;
3507 }
3508 
3509 
3510 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3511    and return true if the value number of the LHS has changed as a result.  */
3512 
3513 static bool
visit_reference_op_store(tree lhs,tree op,gimple * stmt)3514 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3515 {
3516   bool changed = false;
3517   vn_reference_t vnresult = NULL;
3518   tree assign;
3519   bool resultsame = false;
3520   tree vuse = gimple_vuse (stmt);
3521   tree vdef = gimple_vdef (stmt);
3522 
3523   if (TREE_CODE (op) == SSA_NAME)
3524     op = SSA_VAL (op);
3525 
3526   /* First we want to lookup using the *vuses* from the store and see
3527      if there the last store to this location with the same address
3528      had the same value.
3529 
3530      The vuses represent the memory state before the store.  If the
3531      memory state, address, and value of the store is the same as the
3532      last store to this location, then this store will produce the
3533      same memory state as that store.
3534 
3535      In this case the vdef versions for this store are value numbered to those
3536      vuse versions, since they represent the same memory state after
3537      this store.
3538 
3539      Otherwise, the vdefs for the store are used when inserting into
3540      the table, since the store generates a new memory state.  */
3541 
3542   vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3543   if (vnresult
3544       && vnresult->result)
3545     {
3546       tree result = vnresult->result;
3547       if (TREE_CODE (result) == SSA_NAME)
3548 	result = SSA_VAL (result);
3549       resultsame = expressions_equal_p (result, op);
3550       if (resultsame)
3551 	{
3552 	  /* If the TBAA state isn't compatible for downstream reads
3553 	     we cannot value-number the VDEFs the same.  */
3554 	  alias_set_type set = get_alias_set (lhs);
3555 	  if (vnresult->set != set
3556 	      && ! alias_set_subset_of (set, vnresult->set))
3557 	    resultsame = false;
3558 	}
3559     }
3560 
3561   if (!resultsame)
3562     {
3563       /* Only perform the following when being called from PRE
3564 	 which embeds tail merging.  */
3565       if (default_vn_walk_kind == VN_WALK)
3566 	{
3567 	  assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3568 	  vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3569 	  if (vnresult)
3570 	    {
3571 	      VN_INFO (vdef)->use_processed = true;
3572 	      return set_ssa_val_to (vdef, vnresult->result_vdef);
3573 	    }
3574 	}
3575 
3576       if (dump_file && (dump_flags & TDF_DETAILS))
3577 	{
3578 	  fprintf (dump_file, "No store match\n");
3579 	  fprintf (dump_file, "Value numbering store ");
3580 	  print_generic_expr (dump_file, lhs, 0);
3581 	  fprintf (dump_file, " to ");
3582 	  print_generic_expr (dump_file, op, 0);
3583 	  fprintf (dump_file, "\n");
3584 	}
3585       /* Have to set value numbers before insert, since insert is
3586 	 going to valueize the references in-place.  */
3587       if (vdef)
3588 	changed |= set_ssa_val_to (vdef, vdef);
3589 
3590       /* Do not insert structure copies into the tables.  */
3591       if (is_gimple_min_invariant (op)
3592 	  || is_gimple_reg (op))
3593         vn_reference_insert (lhs, op, vdef, NULL);
3594 
3595       /* Only perform the following when being called from PRE
3596 	 which embeds tail merging.  */
3597       if (default_vn_walk_kind == VN_WALK)
3598 	{
3599 	  assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3600 	  vn_reference_insert (assign, lhs, vuse, vdef);
3601 	}
3602     }
3603   else
3604     {
3605       /* We had a match, so value number the vdef to have the value
3606 	 number of the vuse it came from.  */
3607 
3608       if (dump_file && (dump_flags & TDF_DETAILS))
3609 	fprintf (dump_file, "Store matched earlier value,"
3610 		 "value numbering store vdefs to matching vuses.\n");
3611 
3612       changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3613     }
3614 
3615   return changed;
3616 }
3617 
3618 /* Visit and value number PHI, return true if the value number
3619    changed.  */
3620 
3621 static bool
visit_phi(gimple * phi)3622 visit_phi (gimple *phi)
3623 {
3624   bool changed = false;
3625   tree result;
3626   tree sameval = VN_TOP;
3627   bool allsame = true;
3628   unsigned n_executable = 0;
3629 
3630   /* TODO: We could check for this in init_sccvn, and replace this
3631      with a gcc_assert.  */
3632   if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3633     return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3634 
3635   /* See if all non-TOP arguments have the same value.  TOP is
3636      equivalent to everything, so we can ignore it.  */
3637   edge_iterator ei;
3638   edge e;
3639   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3640     if (e->flags & EDGE_EXECUTABLE)
3641       {
3642 	tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3643 
3644 	++n_executable;
3645 	if (TREE_CODE (def) == SSA_NAME)
3646 	  def = SSA_VAL (def);
3647 	if (def == VN_TOP)
3648 	  continue;
3649 	if (sameval == VN_TOP)
3650 	  sameval = def;
3651 	else if (!expressions_equal_p (def, sameval))
3652 	  {
3653 	    allsame = false;
3654 	    break;
3655 	  }
3656       }
3657 
3658   /* If none of the edges was executable or all incoming values are
3659      undefined keep the value-number at VN_TOP.  If only a single edge
3660      is exectuable use its value.  */
3661   if (sameval == VN_TOP
3662       || n_executable == 1)
3663     return set_ssa_val_to (PHI_RESULT (phi), sameval);
3664 
3665   /* First see if it is equivalent to a phi node in this block.  We prefer
3666      this as it allows IV elimination - see PRs 66502 and 67167.  */
3667   result = vn_phi_lookup (phi);
3668   if (result)
3669     changed = set_ssa_val_to (PHI_RESULT (phi), result);
3670   /* Otherwise all value numbered to the same value, the phi node has that
3671      value.  */
3672   else if (allsame)
3673     changed = set_ssa_val_to (PHI_RESULT (phi), sameval);
3674   else
3675     {
3676       vn_phi_insert (phi, PHI_RESULT (phi));
3677       changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3678     }
3679 
3680   return changed;
3681 }
3682 
3683 /* Try to simplify RHS using equivalences and constant folding.  */
3684 
3685 static tree
try_to_simplify(gassign * stmt)3686 try_to_simplify (gassign *stmt)
3687 {
3688   enum tree_code code = gimple_assign_rhs_code (stmt);
3689   tree tem;
3690 
3691   /* For stores we can end up simplifying a SSA_NAME rhs.  Just return
3692      in this case, there is no point in doing extra work.  */
3693   if (code == SSA_NAME)
3694     return NULL_TREE;
3695 
3696   /* First try constant folding based on our current lattice.  */
3697   mprts_hook = vn_lookup_simplify_result;
3698   tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3699   mprts_hook = NULL;
3700   if (tem
3701       && (TREE_CODE (tem) == SSA_NAME
3702 	  || is_gimple_min_invariant (tem)))
3703     return tem;
3704 
3705   return NULL_TREE;
3706 }
3707 
3708 /* Visit and value number USE, return true if the value number
3709    changed. */
3710 
3711 static bool
visit_use(tree use)3712 visit_use (tree use)
3713 {
3714   bool changed = false;
3715   gimple *stmt = SSA_NAME_DEF_STMT (use);
3716 
3717   mark_use_processed (use);
3718 
3719   gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3720   if (dump_file && (dump_flags & TDF_DETAILS)
3721       && !SSA_NAME_IS_DEFAULT_DEF (use))
3722     {
3723       fprintf (dump_file, "Value numbering ");
3724       print_generic_expr (dump_file, use, 0);
3725       fprintf (dump_file, " stmt = ");
3726       print_gimple_stmt (dump_file, stmt, 0, 0);
3727     }
3728 
3729   /* Handle uninitialized uses.  */
3730   if (SSA_NAME_IS_DEFAULT_DEF (use))
3731     changed = set_ssa_val_to (use, use);
3732   else if (gimple_code (stmt) == GIMPLE_PHI)
3733     changed = visit_phi (stmt);
3734   else if (gimple_has_volatile_ops (stmt))
3735     changed = defs_to_varying (stmt);
3736   else if (gassign *ass = dyn_cast <gassign *> (stmt))
3737     {
3738       enum tree_code code = gimple_assign_rhs_code (ass);
3739       tree lhs = gimple_assign_lhs (ass);
3740       tree rhs1 = gimple_assign_rhs1 (ass);
3741       tree simplified;
3742 
3743       /* Shortcut for copies. Simplifying copies is pointless,
3744 	 since we copy the expression and value they represent.  */
3745       if (code == SSA_NAME
3746 	  && TREE_CODE (lhs) == SSA_NAME)
3747 	{
3748 	  changed = visit_copy (lhs, rhs1);
3749 	  goto done;
3750 	}
3751       simplified = try_to_simplify (ass);
3752       if (simplified)
3753 	{
3754 	  if (dump_file && (dump_flags & TDF_DETAILS))
3755 	    {
3756 	      fprintf (dump_file, "RHS ");
3757 	      print_gimple_expr (dump_file, ass, 0, 0);
3758 	      fprintf (dump_file, " simplified to ");
3759 	      print_generic_expr (dump_file, simplified, 0);
3760 	      fprintf (dump_file, "\n");
3761 	    }
3762 	}
3763       /* Setting value numbers to constants will occasionally
3764 	 screw up phi congruence because constants are not
3765 	 uniquely associated with a single ssa name that can be
3766 	 looked up.  */
3767       if (simplified
3768 	  && is_gimple_min_invariant (simplified)
3769 	  && TREE_CODE (lhs) == SSA_NAME)
3770 	{
3771 	  changed = set_ssa_val_to (lhs, simplified);
3772 	  goto done;
3773 	}
3774       else if (simplified
3775 	       && TREE_CODE (simplified) == SSA_NAME
3776 	       && TREE_CODE (lhs) == SSA_NAME)
3777 	{
3778 	  changed = visit_copy (lhs, simplified);
3779 	  goto done;
3780 	}
3781 
3782       if ((TREE_CODE (lhs) == SSA_NAME
3783 	   /* We can substitute SSA_NAMEs that are live over
3784 	      abnormal edges with their constant value.  */
3785 	   && !(gimple_assign_copy_p (ass)
3786 		&& is_gimple_min_invariant (rhs1))
3787 	   && !(simplified
3788 		&& is_gimple_min_invariant (simplified))
3789 	   && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3790 	  /* Stores or copies from SSA_NAMEs that are live over
3791 	     abnormal edges are a problem.  */
3792 	  || (code == SSA_NAME
3793 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3794 	changed = defs_to_varying (ass);
3795       else if (REFERENCE_CLASS_P (lhs)
3796 	       || DECL_P (lhs))
3797 	changed = visit_reference_op_store (lhs, rhs1, ass);
3798       else if (TREE_CODE (lhs) == SSA_NAME)
3799 	{
3800 	  if ((gimple_assign_copy_p (ass)
3801 	       && is_gimple_min_invariant (rhs1))
3802 	      || (simplified
3803 		  && is_gimple_min_invariant (simplified)))
3804 	    {
3805 	      if (simplified)
3806 		changed = set_ssa_val_to (lhs, simplified);
3807 	      else
3808 		changed = set_ssa_val_to (lhs, rhs1);
3809 	    }
3810 	  else
3811 	    {
3812 	      /* Visit the original statement.  */
3813 	      switch (vn_get_stmt_kind (ass))
3814 		{
3815 		case VN_NARY:
3816 		changed = visit_nary_op (lhs, ass);
3817 		break;
3818 		case VN_REFERENCE:
3819 		changed = visit_reference_op_load (lhs, rhs1, ass);
3820 		break;
3821 		default:
3822 		changed = defs_to_varying (ass);
3823 		break;
3824 		}
3825 	    }
3826 	}
3827       else
3828 	changed = defs_to_varying (ass);
3829     }
3830   else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
3831     {
3832       tree lhs = gimple_call_lhs (call_stmt);
3833       if (lhs && TREE_CODE (lhs) == SSA_NAME)
3834 	{
3835 	  /* Try constant folding based on our current lattice.  */
3836 	  tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
3837 							    vn_valueize);
3838 	  if (simplified)
3839 	    {
3840 	      if (dump_file && (dump_flags & TDF_DETAILS))
3841 		{
3842 		  fprintf (dump_file, "call ");
3843 		  print_gimple_expr (dump_file, call_stmt, 0, 0);
3844 		  fprintf (dump_file, " simplified to ");
3845 		  print_generic_expr (dump_file, simplified, 0);
3846 		  fprintf (dump_file, "\n");
3847 		}
3848 	    }
3849 	  /* Setting value numbers to constants will occasionally
3850 	     screw up phi congruence because constants are not
3851 	     uniquely associated with a single ssa name that can be
3852 	     looked up.  */
3853 	  if (simplified
3854 	      && is_gimple_min_invariant (simplified))
3855 	    {
3856 	      changed = set_ssa_val_to (lhs, simplified);
3857 	      if (gimple_vdef (call_stmt))
3858 		changed |= set_ssa_val_to (gimple_vdef (call_stmt),
3859 					   SSA_VAL (gimple_vuse (call_stmt)));
3860 	      goto done;
3861 	    }
3862 	  else if (simplified
3863 		   && TREE_CODE (simplified) == SSA_NAME)
3864 	    {
3865 	      changed = visit_copy (lhs, simplified);
3866 	      if (gimple_vdef (call_stmt))
3867 		changed |= set_ssa_val_to (gimple_vdef (call_stmt),
3868 					   SSA_VAL (gimple_vuse (call_stmt)));
3869 	      goto done;
3870 	    }
3871 	  else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3872 	    {
3873 	      changed = defs_to_varying (call_stmt);
3874 	      goto done;
3875 	    }
3876 	}
3877 
3878       if (!gimple_call_internal_p (call_stmt)
3879 	  && (/* Calls to the same function with the same vuse
3880 		 and the same operands do not necessarily return the same
3881 		 value, unless they're pure or const.  */
3882 	      gimple_call_flags (call_stmt) & (ECF_PURE | ECF_CONST)
3883 	      /* If calls have a vdef, subsequent calls won't have
3884 		 the same incoming vuse.  So, if 2 calls with vdef have the
3885 		 same vuse, we know they're not subsequent.
3886 		 We can value number 2 calls to the same function with the
3887 		 same vuse and the same operands which are not subsequent
3888 		 the same, because there is no code in the program that can
3889 		 compare the 2 values...  */
3890 	      || (gimple_vdef (call_stmt)
3891 		  /* ... unless the call returns a pointer which does
3892 		     not alias with anything else.  In which case the
3893 		     information that the values are distinct are encoded
3894 		     in the IL.  */
3895 		  && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
3896 		  /* Only perform the following when being called from PRE
3897 		     which embeds tail merging.  */
3898 		  && default_vn_walk_kind == VN_WALK)))
3899 	changed = visit_reference_op_call (lhs, call_stmt);
3900       else
3901 	changed = defs_to_varying (call_stmt);
3902     }
3903   else
3904     changed = defs_to_varying (stmt);
3905  done:
3906   return changed;
3907 }
3908 
3909 /* Compare two operands by reverse postorder index */
3910 
3911 static int
compare_ops(const void * pa,const void * pb)3912 compare_ops (const void *pa, const void *pb)
3913 {
3914   const tree opa = *((const tree *)pa);
3915   const tree opb = *((const tree *)pb);
3916   gimple *opstmta = SSA_NAME_DEF_STMT (opa);
3917   gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
3918   basic_block bba;
3919   basic_block bbb;
3920 
3921   if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3922     return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3923   else if (gimple_nop_p (opstmta))
3924     return -1;
3925   else if (gimple_nop_p (opstmtb))
3926     return 1;
3927 
3928   bba = gimple_bb (opstmta);
3929   bbb = gimple_bb (opstmtb);
3930 
3931   if (!bba && !bbb)
3932     return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3933   else if (!bba)
3934     return -1;
3935   else if (!bbb)
3936     return 1;
3937 
3938   if (bba == bbb)
3939     {
3940       if (gimple_code (opstmta) == GIMPLE_PHI
3941 	  && gimple_code (opstmtb) == GIMPLE_PHI)
3942 	return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3943       else if (gimple_code (opstmta) == GIMPLE_PHI)
3944 	return -1;
3945       else if (gimple_code (opstmtb) == GIMPLE_PHI)
3946 	return 1;
3947       else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3948         return gimple_uid (opstmta) - gimple_uid (opstmtb);
3949       else
3950 	return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3951     }
3952   return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3953 }
3954 
3955 /* Sort an array containing members of a strongly connected component
3956    SCC so that the members are ordered by RPO number.
3957    This means that when the sort is complete, iterating through the
3958    array will give you the members in RPO order.  */
3959 
3960 static void
sort_scc(vec<tree> scc)3961 sort_scc (vec<tree> scc)
3962 {
3963   scc.qsort (compare_ops);
3964 }
3965 
3966 /* Insert the no longer used nary ONARY to the hash INFO.  */
3967 
3968 static void
copy_nary(vn_nary_op_t onary,vn_tables_t info)3969 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3970 {
3971   size_t size = sizeof_vn_nary_op (onary->length);
3972   vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3973 					       &info->nary_obstack);
3974   memcpy (nary, onary, size);
3975   vn_nary_op_insert_into (nary, info->nary, false);
3976 }
3977 
3978 /* Insert the no longer used phi OPHI to the hash INFO.  */
3979 
3980 static void
copy_phi(vn_phi_t ophi,vn_tables_t info)3981 copy_phi (vn_phi_t ophi, vn_tables_t info)
3982 {
3983   vn_phi_t phi = info->phis_pool->allocate ();
3984   vn_phi_s **slot;
3985   memcpy (phi, ophi, sizeof (*phi));
3986   ophi->phiargs.create (0);
3987   slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
3988   gcc_assert (!*slot);
3989   *slot = phi;
3990 }
3991 
3992 /* Insert the no longer used reference OREF to the hash INFO.  */
3993 
3994 static void
copy_reference(vn_reference_t oref,vn_tables_t info)3995 copy_reference (vn_reference_t oref, vn_tables_t info)
3996 {
3997   vn_reference_t ref;
3998   vn_reference_s **slot;
3999   ref = info->references_pool->allocate ();
4000   memcpy (ref, oref, sizeof (*ref));
4001   oref->operands.create (0);
4002   slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
4003   if (*slot)
4004     free_reference (*slot);
4005   *slot = ref;
4006 }
4007 
4008 /* Process a strongly connected component in the SSA graph.  */
4009 
4010 static void
process_scc(vec<tree> scc)4011 process_scc (vec<tree> scc)
4012 {
4013   tree var;
4014   unsigned int i;
4015   unsigned int iterations = 0;
4016   bool changed = true;
4017   vn_nary_op_iterator_type hin;
4018   vn_phi_iterator_type hip;
4019   vn_reference_iterator_type hir;
4020   vn_nary_op_t nary;
4021   vn_phi_t phi;
4022   vn_reference_t ref;
4023 
4024   /* If the SCC has a single member, just visit it.  */
4025   if (scc.length () == 1)
4026     {
4027       tree use = scc[0];
4028       if (VN_INFO (use)->use_processed)
4029 	return;
4030       /* We need to make sure it doesn't form a cycle itself, which can
4031 	 happen for self-referential PHI nodes.  In that case we would
4032 	 end up inserting an expression with VN_TOP operands into the
4033 	 valid table which makes us derive bogus equivalences later.
4034 	 The cheapest way to check this is to assume it for all PHI nodes.  */
4035       if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4036 	/* Fallthru to iteration.  */ ;
4037       else
4038 	{
4039 	  visit_use (use);
4040 	  return;
4041 	}
4042     }
4043 
4044   if (dump_file && (dump_flags & TDF_DETAILS))
4045     print_scc (dump_file, scc);
4046 
4047   /* Iterate over the SCC with the optimistic table until it stops
4048      changing.  */
4049   current_info = optimistic_info;
4050   while (changed)
4051     {
4052       changed = false;
4053       iterations++;
4054       if (dump_file && (dump_flags & TDF_DETAILS))
4055 	fprintf (dump_file, "Starting iteration %d\n", iterations);
4056       /* As we are value-numbering optimistically we have to
4057 	 clear the expression tables and the simplified expressions
4058 	 in each iteration until we converge.  */
4059       optimistic_info->nary->empty ();
4060       optimistic_info->phis->empty ();
4061       optimistic_info->references->empty ();
4062       obstack_free (&optimistic_info->nary_obstack, NULL);
4063       gcc_obstack_init (&optimistic_info->nary_obstack);
4064       optimistic_info->phis_pool->release ();
4065       optimistic_info->references_pool->release ();
4066       FOR_EACH_VEC_ELT (scc, i, var)
4067 	gcc_assert (!VN_INFO (var)->needs_insertion
4068 		    && VN_INFO (var)->expr == NULL);
4069       FOR_EACH_VEC_ELT (scc, i, var)
4070 	changed |= visit_use (var);
4071     }
4072 
4073   if (dump_file && (dump_flags & TDF_DETAILS))
4074     fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4075   statistics_histogram_event (cfun, "SCC iterations", iterations);
4076 
4077   /* Finally, copy the contents of the no longer used optimistic
4078      table to the valid table.  */
4079   FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4080     copy_nary (nary, valid_info);
4081   FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4082     copy_phi (phi, valid_info);
4083   FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4084 			       ref, vn_reference_t, hir)
4085     copy_reference (ref, valid_info);
4086 
4087   current_info = valid_info;
4088 }
4089 
4090 
4091 /* Pop the components of the found SCC for NAME off the SCC stack
4092    and process them.  Returns true if all went well, false if
4093    we run into resource limits.  */
4094 
4095 static bool
extract_and_process_scc_for_name(tree name)4096 extract_and_process_scc_for_name (tree name)
4097 {
4098   auto_vec<tree> scc;
4099   tree x;
4100 
4101   /* Found an SCC, pop the components off the SCC stack and
4102      process them.  */
4103   do
4104     {
4105       x = sccstack.pop ();
4106 
4107       VN_INFO (x)->on_sccstack = false;
4108       scc.safe_push (x);
4109     } while (x != name);
4110 
4111   /* Bail out of SCCVN in case a SCC turns out to be incredibly large.  */
4112   if (scc.length ()
4113       > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4114     {
4115       if (dump_file)
4116 	fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
4117 		 "SCC size %u exceeding %u\n", scc.length (),
4118 		 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4119 
4120       return false;
4121     }
4122 
4123   if (scc.length () > 1)
4124     sort_scc (scc);
4125 
4126   process_scc (scc);
4127 
4128   return true;
4129 }
4130 
4131 /* Depth first search on NAME to discover and process SCC's in the SSA
4132    graph.
4133    Execution of this algorithm relies on the fact that the SCC's are
4134    popped off the stack in topological order.
4135    Returns true if successful, false if we stopped processing SCC's due
4136    to resource constraints.  */
4137 
4138 static bool
DFS(tree name)4139 DFS (tree name)
4140 {
4141   vec<ssa_op_iter> itervec = vNULL;
4142   vec<tree> namevec = vNULL;
4143   use_operand_p usep = NULL;
4144   gimple *defstmt;
4145   tree use;
4146   ssa_op_iter iter;
4147 
4148 start_over:
4149   /* SCC info */
4150   VN_INFO (name)->dfsnum = next_dfs_num++;
4151   VN_INFO (name)->visited = true;
4152   VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4153 
4154   sccstack.safe_push (name);
4155   VN_INFO (name)->on_sccstack = true;
4156   defstmt = SSA_NAME_DEF_STMT (name);
4157 
4158   /* Recursively DFS on our operands, looking for SCC's.  */
4159   if (!gimple_nop_p (defstmt))
4160     {
4161       /* Push a new iterator.  */
4162       if (gphi *phi = dyn_cast <gphi *> (defstmt))
4163 	usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4164       else
4165 	usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4166     }
4167   else
4168     clear_and_done_ssa_iter (&iter);
4169 
4170   while (1)
4171     {
4172       /* If we are done processing uses of a name, go up the stack
4173 	 of iterators and process SCCs as we found them.  */
4174       if (op_iter_done (&iter))
4175 	{
4176 	  /* See if we found an SCC.  */
4177 	  if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4178 	    if (!extract_and_process_scc_for_name (name))
4179 	      {
4180 		namevec.release ();
4181 		itervec.release ();
4182 		return false;
4183 	      }
4184 
4185 	  /* Check if we are done.  */
4186 	  if (namevec.is_empty ())
4187 	    {
4188 	      namevec.release ();
4189 	      itervec.release ();
4190 	      return true;
4191 	    }
4192 
4193 	  /* Restore the last use walker and continue walking there.  */
4194 	  use = name;
4195 	  name = namevec.pop ();
4196 	  memcpy (&iter, &itervec.last (),
4197 		  sizeof (ssa_op_iter));
4198 	  itervec.pop ();
4199 	  goto continue_walking;
4200 	}
4201 
4202       use = USE_FROM_PTR (usep);
4203 
4204       /* Since we handle phi nodes, we will sometimes get
4205 	 invariants in the use expression.  */
4206       if (TREE_CODE (use) == SSA_NAME)
4207 	{
4208 	  if (! (VN_INFO (use)->visited))
4209 	    {
4210 	      /* Recurse by pushing the current use walking state on
4211 		 the stack and starting over.  */
4212 	      itervec.safe_push (iter);
4213 	      namevec.safe_push (name);
4214 	      name = use;
4215 	      goto start_over;
4216 
4217 continue_walking:
4218 	      VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4219 					 VN_INFO (use)->low);
4220 	    }
4221 	  if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4222 	      && VN_INFO (use)->on_sccstack)
4223 	    {
4224 	      VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4225 					 VN_INFO (name)->low);
4226 	    }
4227 	}
4228 
4229       usep = op_iter_next_use (&iter);
4230     }
4231 }
4232 
4233 /* Allocate a value number table.  */
4234 
4235 static void
allocate_vn_table(vn_tables_t table)4236 allocate_vn_table (vn_tables_t table)
4237 {
4238   table->phis = new vn_phi_table_type (23);
4239   table->nary = new vn_nary_op_table_type (23);
4240   table->references = new vn_reference_table_type (23);
4241 
4242   gcc_obstack_init (&table->nary_obstack);
4243   table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4244   table->references_pool = new object_allocator<vn_reference_s>
4245     ("VN references");
4246 }
4247 
4248 /* Free a value number table.  */
4249 
4250 static void
free_vn_table(vn_tables_t table)4251 free_vn_table (vn_tables_t table)
4252 {
4253   delete table->phis;
4254   table->phis = NULL;
4255   delete table->nary;
4256   table->nary = NULL;
4257   delete table->references;
4258   table->references = NULL;
4259   obstack_free (&table->nary_obstack, NULL);
4260   delete table->phis_pool;
4261   delete table->references_pool;
4262 }
4263 
4264 static void
init_scc_vn(void)4265 init_scc_vn (void)
4266 {
4267   size_t i;
4268   int j;
4269   int *rpo_numbers_temp;
4270 
4271   calculate_dominance_info (CDI_DOMINATORS);
4272   mark_dfs_back_edges ();
4273 
4274   sccstack.create (0);
4275   constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4276 
4277   constant_value_ids = BITMAP_ALLOC (NULL);
4278 
4279   next_dfs_num = 1;
4280   next_value_id = 1;
4281 
4282   vn_ssa_aux_table.create (num_ssa_names + 1);
4283   /* VEC_alloc doesn't actually grow it to the right size, it just
4284      preallocates the space to do so.  */
4285   vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4286   gcc_obstack_init (&vn_ssa_aux_obstack);
4287 
4288   shared_lookup_phiargs.create (0);
4289   shared_lookup_references.create (0);
4290   rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4291   rpo_numbers_temp =
4292     XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4293   pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4294 
4295   /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4296      the i'th block in RPO order is bb.  We want to map bb's to RPO
4297      numbers, so we need to rearrange this array.  */
4298   for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4299     rpo_numbers[rpo_numbers_temp[j]] = j;
4300 
4301   XDELETE (rpo_numbers_temp);
4302 
4303   VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4304 
4305   renumber_gimple_stmt_uids ();
4306 
4307   /* Create the valid and optimistic value numbering tables.  */
4308   valid_info = XCNEW (struct vn_tables_s);
4309   allocate_vn_table (valid_info);
4310   optimistic_info = XCNEW (struct vn_tables_s);
4311   allocate_vn_table (optimistic_info);
4312   current_info = valid_info;
4313 
4314   /* Create the VN_INFO structures, and initialize value numbers to
4315      TOP or VARYING for parameters.  */
4316   for (i = 1; i < num_ssa_names; i++)
4317     {
4318       tree name = ssa_name (i);
4319       if (!name)
4320 	continue;
4321 
4322       VN_INFO_GET (name)->valnum = VN_TOP;
4323       VN_INFO (name)->needs_insertion = false;
4324       VN_INFO (name)->expr = NULL;
4325       VN_INFO (name)->value_id = 0;
4326 
4327       if (!SSA_NAME_IS_DEFAULT_DEF (name))
4328 	continue;
4329 
4330       switch (TREE_CODE (SSA_NAME_VAR (name)))
4331 	{
4332 	case VAR_DECL:
4333 	  /* Undefined vars keep TOP.  */
4334 	  break;
4335 
4336 	case PARM_DECL:
4337 	  /* Parameters are VARYING but we can record a condition
4338 	     if we know it is a non-NULL pointer.  */
4339 	  VN_INFO (name)->visited = true;
4340 	  VN_INFO (name)->valnum = name;
4341 	  if (POINTER_TYPE_P (TREE_TYPE (name))
4342 	      && nonnull_arg_p (SSA_NAME_VAR (name)))
4343 	    {
4344 	      tree ops[2];
4345 	      ops[0] = name;
4346 	      ops[1] = build_int_cst (TREE_TYPE (name), 0);
4347 	      vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4348 					boolean_true_node, 0);
4349 	      if (dump_file && (dump_flags & TDF_DETAILS))
4350 		{
4351 		  fprintf (dump_file, "Recording ");
4352 		  print_generic_expr (dump_file, name, TDF_SLIM);
4353 		  fprintf (dump_file, " != 0\n");
4354 		}
4355 	    }
4356 	  break;
4357 
4358 	case RESULT_DECL:
4359 	  /* If the result is passed by invisible reference the default
4360 	     def is initialized, otherwise it's uninitialized.  */
4361 	  if (DECL_BY_REFERENCE (SSA_NAME_VAR (name)))
4362 	    {
4363 	      VN_INFO (name)->visited = true;
4364 	      VN_INFO (name)->valnum = name;
4365 	    }
4366 	  break;
4367 
4368 	default:
4369 	  gcc_unreachable ();
4370 	}
4371     }
4372 }
4373 
4374 /* Restore SSA info that has been reset on value leaders.  */
4375 
4376 void
scc_vn_restore_ssa_info(void)4377 scc_vn_restore_ssa_info (void)
4378 {
4379   for (unsigned i = 0; i < num_ssa_names; i++)
4380     {
4381       tree name = ssa_name (i);
4382       if (name
4383 	  && has_VN_INFO (name))
4384 	{
4385 	  if (VN_INFO (name)->needs_insertion)
4386 	    ;
4387 	  else if (POINTER_TYPE_P (TREE_TYPE (name))
4388 		   && VN_INFO (name)->info.ptr_info)
4389 	    SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4390 	  else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4391 		   && VN_INFO (name)->info.range_info)
4392 	    {
4393 	      SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4394 	      SSA_NAME_ANTI_RANGE_P (name)
4395 		= VN_INFO (name)->range_info_anti_range_p;
4396 	    }
4397 	}
4398     }
4399 }
4400 
4401 void
free_scc_vn(void)4402 free_scc_vn (void)
4403 {
4404   size_t i;
4405 
4406   delete constant_to_value_id;
4407   constant_to_value_id = NULL;
4408   BITMAP_FREE (constant_value_ids);
4409   shared_lookup_phiargs.release ();
4410   shared_lookup_references.release ();
4411   XDELETEVEC (rpo_numbers);
4412 
4413   for (i = 0; i < num_ssa_names; i++)
4414     {
4415       tree name = ssa_name (i);
4416       if (name
4417 	  && has_VN_INFO (name)
4418 	  && VN_INFO (name)->needs_insertion)
4419 	release_ssa_name (name);
4420     }
4421   obstack_free (&vn_ssa_aux_obstack, NULL);
4422   vn_ssa_aux_table.release ();
4423 
4424   sccstack.release ();
4425   free_vn_table (valid_info);
4426   XDELETE (valid_info);
4427   free_vn_table (optimistic_info);
4428   XDELETE (optimistic_info);
4429 
4430   BITMAP_FREE (const_parms);
4431 }
4432 
4433 /* Set *ID according to RESULT.  */
4434 
4435 static void
set_value_id_for_result(tree result,unsigned int * id)4436 set_value_id_for_result (tree result, unsigned int *id)
4437 {
4438   if (result && TREE_CODE (result) == SSA_NAME)
4439     *id = VN_INFO (result)->value_id;
4440   else if (result && is_gimple_min_invariant (result))
4441     *id = get_or_alloc_constant_value_id (result);
4442   else
4443     *id = get_next_value_id ();
4444 }
4445 
4446 /* Set the value ids in the valid hash tables.  */
4447 
4448 static void
set_hashtable_value_ids(void)4449 set_hashtable_value_ids (void)
4450 {
4451   vn_nary_op_iterator_type hin;
4452   vn_phi_iterator_type hip;
4453   vn_reference_iterator_type hir;
4454   vn_nary_op_t vno;
4455   vn_reference_t vr;
4456   vn_phi_t vp;
4457 
4458   /* Now set the value ids of the things we had put in the hash
4459      table.  */
4460 
4461   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4462     set_value_id_for_result (vno->result, &vno->value_id);
4463 
4464   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4465     set_value_id_for_result (vp->result, &vp->value_id);
4466 
4467   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4468 			       hir)
4469     set_value_id_for_result (vr->result, &vr->value_id);
4470 }
4471 
4472 class sccvn_dom_walker : public dom_walker
4473 {
4474 public:
sccvn_dom_walker()4475   sccvn_dom_walker ()
4476     : dom_walker (CDI_DOMINATORS, true), fail (false), cond_stack (vNULL) {}
4477   ~sccvn_dom_walker ();
4478 
4479   virtual edge before_dom_children (basic_block);
4480   virtual void after_dom_children (basic_block);
4481 
4482   void record_cond (basic_block,
4483 		    enum tree_code code, tree lhs, tree rhs, bool value);
4484   void record_conds (basic_block,
4485 		     enum tree_code code, tree lhs, tree rhs, bool value);
4486 
4487   bool fail;
4488   vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4489     cond_stack;
4490 };
4491 
~sccvn_dom_walker()4492 sccvn_dom_walker::~sccvn_dom_walker ()
4493 {
4494   cond_stack.release ();
4495 }
4496 
4497 /* Record a temporary condition for the BB and its dominated blocks.  */
4498 
4499 void
record_cond(basic_block bb,enum tree_code code,tree lhs,tree rhs,bool value)4500 sccvn_dom_walker::record_cond (basic_block bb,
4501 			       enum tree_code code, tree lhs, tree rhs,
4502 			       bool value)
4503 {
4504   tree ops[2] = { lhs, rhs };
4505   vn_nary_op_t old = NULL;
4506   if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4507     current_info->nary->remove_elt_with_hash (old, old->hashcode);
4508   vn_nary_op_t cond
4509     = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4510 				value
4511 				? boolean_true_node
4512 				: boolean_false_node, 0);
4513   if (dump_file && (dump_flags & TDF_DETAILS))
4514     {
4515       fprintf (dump_file, "Recording temporarily ");
4516       print_generic_expr (dump_file, ops[0], TDF_SLIM);
4517       fprintf (dump_file, " %s ", get_tree_code_name (code));
4518       print_generic_expr (dump_file, ops[1], TDF_SLIM);
4519       fprintf (dump_file, " == %s%s\n",
4520 	       value ? "true" : "false",
4521 	       old ? " (old entry saved)" : "");
4522     }
4523   cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4524 }
4525 
4526 /* Record temporary conditions for the BB and its dominated blocks
4527    according to LHS CODE RHS == VALUE and its dominated conditions.  */
4528 
4529 void
record_conds(basic_block bb,enum tree_code code,tree lhs,tree rhs,bool value)4530 sccvn_dom_walker::record_conds (basic_block bb,
4531 				enum tree_code code, tree lhs, tree rhs,
4532 				bool value)
4533 {
4534   /* Record the original condition.  */
4535   record_cond (bb, code, lhs, rhs, value);
4536 
4537   if (!value)
4538     return;
4539 
4540   /* Record dominated conditions if the condition is true.  Note that
4541      the inversion is already recorded.  */
4542   switch (code)
4543     {
4544     case LT_EXPR:
4545     case GT_EXPR:
4546       record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4547       record_cond (bb, NE_EXPR, lhs, rhs, true);
4548       record_cond (bb, EQ_EXPR, lhs, rhs, false);
4549       break;
4550 
4551     case EQ_EXPR:
4552       record_cond (bb, LE_EXPR, lhs, rhs, true);
4553       record_cond (bb, GE_EXPR, lhs, rhs, true);
4554       record_cond (bb, LT_EXPR, lhs, rhs, false);
4555       record_cond (bb, GT_EXPR, lhs, rhs, false);
4556       break;
4557 
4558     default:
4559       break;
4560     }
4561 }
4562 
4563 /* Restore expressions and values derived from conditionals.  */
4564 
4565 void
after_dom_children(basic_block bb)4566 sccvn_dom_walker::after_dom_children (basic_block bb)
4567 {
4568   while (!cond_stack.is_empty ()
4569 	 && cond_stack.last ().first == bb)
4570     {
4571       vn_nary_op_t cond = cond_stack.last ().second.first;
4572       vn_nary_op_t old = cond_stack.last ().second.second;
4573       current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4574       if (old)
4575 	vn_nary_op_insert_into (old, current_info->nary, false);
4576       cond_stack.pop ();
4577     }
4578 }
4579 
4580 /* Value number all statements in BB.  */
4581 
4582 edge
before_dom_children(basic_block bb)4583 sccvn_dom_walker::before_dom_children (basic_block bb)
4584 {
4585   edge e;
4586   edge_iterator ei;
4587 
4588   if (fail)
4589     return NULL;
4590 
4591   if (dump_file && (dump_flags & TDF_DETAILS))
4592     fprintf (dump_file, "Visiting BB %d\n", bb->index);
4593 
4594   /* If we have a single predecessor record the equivalence from a
4595      possible condition on the predecessor edge.  */
4596   edge pred_e = NULL;
4597   FOR_EACH_EDGE (e, ei, bb->preds)
4598     {
4599       /* Ignore simple backedges from this to allow recording conditions
4600          in loop headers.  */
4601       if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4602 	continue;
4603       if (! pred_e)
4604 	pred_e = e;
4605       else
4606 	{
4607 	  pred_e = NULL;
4608 	  break;
4609 	}
4610     }
4611   if (pred_e)
4612     {
4613       /* Check if there are multiple executable successor edges in
4614 	 the source block.  Otherwise there is no additional info
4615 	 to be recorded.  */
4616       edge e2;
4617       FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4618 	if (e2 != pred_e
4619 	    && e2->flags & EDGE_EXECUTABLE)
4620 	  break;
4621       if (e2 && (e2->flags & EDGE_EXECUTABLE))
4622 	{
4623 	  gimple *stmt = last_stmt (pred_e->src);
4624 	  if (stmt
4625 	      && gimple_code (stmt) == GIMPLE_COND)
4626 	    {
4627 	      enum tree_code code = gimple_cond_code (stmt);
4628 	      tree lhs = gimple_cond_lhs (stmt);
4629 	      tree rhs = gimple_cond_rhs (stmt);
4630 	      record_conds (bb, code, lhs, rhs,
4631 			    (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4632 	      code = invert_tree_comparison (code, HONOR_NANS (lhs));
4633 	      if (code != ERROR_MARK)
4634 		record_conds (bb, code, lhs, rhs,
4635 			      (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4636 	    }
4637 	}
4638     }
4639 
4640   /* Value-number all defs in the basic-block.  */
4641   for (gphi_iterator gsi = gsi_start_phis (bb);
4642        !gsi_end_p (gsi); gsi_next (&gsi))
4643     {
4644       gphi *phi = gsi.phi ();
4645       tree res = PHI_RESULT (phi);
4646       if (!VN_INFO (res)->visited
4647 	  && !DFS (res))
4648 	{
4649 	  fail = true;
4650 	  return NULL;
4651 	}
4652     }
4653   for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4654        !gsi_end_p (gsi); gsi_next (&gsi))
4655     {
4656       ssa_op_iter i;
4657       tree op;
4658       FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4659 	if (!VN_INFO (op)->visited
4660 	    && !DFS (op))
4661 	  {
4662 	    fail = true;
4663 	    return NULL;
4664 	  }
4665     }
4666 
4667   /* Finally look at the last stmt.  */
4668   gimple *stmt = last_stmt (bb);
4669   if (!stmt)
4670     return NULL;
4671 
4672   enum gimple_code code = gimple_code (stmt);
4673   if (code != GIMPLE_COND
4674       && code != GIMPLE_SWITCH
4675       && code != GIMPLE_GOTO)
4676     return NULL;
4677 
4678   if (dump_file && (dump_flags & TDF_DETAILS))
4679     {
4680       fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4681       print_gimple_stmt (dump_file, stmt, 0, 0);
4682     }
4683 
4684   /* ???  We can even handle stmts with outgoing EH or ABNORMAL edges
4685      if value-numbering can prove they are not reachable.  Handling
4686      computed gotos is also possible.  */
4687   tree val;
4688   switch (code)
4689     {
4690     case GIMPLE_COND:
4691       {
4692 	tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4693 	tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4694 	val = gimple_simplify (gimple_cond_code (stmt),
4695 			       boolean_type_node, lhs, rhs,
4696 			       NULL, vn_valueize);
4697 	/* If that didn't simplify to a constant see if we have recorded
4698 	   temporary expressions from taken edges.  */
4699 	if (!val || TREE_CODE (val) != INTEGER_CST)
4700 	  {
4701 	    tree ops[2];
4702 	    ops[0] = lhs;
4703 	    ops[1] = rhs;
4704 	    val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4705 					    boolean_type_node, ops, NULL);
4706 	  }
4707 	break;
4708       }
4709     case GIMPLE_SWITCH:
4710       val = gimple_switch_index (as_a <gswitch *> (stmt));
4711       break;
4712     case GIMPLE_GOTO:
4713       val = gimple_goto_dest (stmt);
4714       break;
4715     default:
4716       gcc_unreachable ();
4717     }
4718   if (!val)
4719     return NULL;
4720 
4721   edge taken = find_taken_edge (bb, vn_valueize (val));
4722   if (!taken)
4723     return NULL;
4724 
4725   if (dump_file && (dump_flags & TDF_DETAILS))
4726     fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4727 	     "not executable\n", bb->index, bb->index, taken->dest->index);
4728 
4729   return taken;
4730 }
4731 
4732 /* Do SCCVN.  Returns true if it finished, false if we bailed out
4733    due to resource constraints.  DEFAULT_VN_WALK_KIND_ specifies
4734    how we use the alias oracle walking during the VN process.  */
4735 
4736 bool
run_scc_vn(vn_lookup_kind default_vn_walk_kind_)4737 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4738 {
4739   size_t i;
4740 
4741   default_vn_walk_kind = default_vn_walk_kind_;
4742 
4743   init_scc_vn ();
4744 
4745   /* Collect pointers we know point to readonly memory.  */
4746   const_parms = BITMAP_ALLOC (NULL);
4747   tree fnspec = lookup_attribute ("fn spec",
4748 				  TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
4749   if (fnspec)
4750     {
4751       fnspec = TREE_VALUE (TREE_VALUE (fnspec));
4752       i = 1;
4753       for (tree arg = DECL_ARGUMENTS (cfun->decl);
4754 	   arg; arg = DECL_CHAIN (arg), ++i)
4755 	{
4756 	  if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
4757 	    break;
4758 	  if (TREE_STRING_POINTER (fnspec)[i]  == 'R'
4759 	      || TREE_STRING_POINTER (fnspec)[i] == 'r')
4760 	    {
4761 	      tree name = ssa_default_def (cfun, arg);
4762 	      if (name)
4763 		bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
4764 	    }
4765 	}
4766     }
4767 
4768   /* Walk all blocks in dominator order, value-numbering stmts
4769      SSA defs and decide whether outgoing edges are not executable.  */
4770   sccvn_dom_walker walker;
4771   walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4772   if (walker.fail)
4773     {
4774       scc_vn_restore_ssa_info ();
4775       free_scc_vn ();
4776       return false;
4777     }
4778 
4779   /* Initialize the value ids and prune out remaining VN_TOPs
4780      from dead code.  */
4781   for (i = 1; i < num_ssa_names; ++i)
4782     {
4783       tree name = ssa_name (i);
4784       vn_ssa_aux_t info;
4785       if (!name)
4786 	continue;
4787       info = VN_INFO (name);
4788       if (!info->visited)
4789 	info->valnum = name;
4790       if (info->valnum == name
4791 	  || info->valnum == VN_TOP)
4792 	info->value_id = get_next_value_id ();
4793       else if (is_gimple_min_invariant (info->valnum))
4794 	info->value_id = get_or_alloc_constant_value_id (info->valnum);
4795     }
4796 
4797   /* Propagate.  */
4798   for (i = 1; i < num_ssa_names; ++i)
4799     {
4800       tree name = ssa_name (i);
4801       vn_ssa_aux_t info;
4802       if (!name)
4803 	continue;
4804       info = VN_INFO (name);
4805       if (TREE_CODE (info->valnum) == SSA_NAME
4806 	  && info->valnum != name
4807 	  && info->value_id != VN_INFO (info->valnum)->value_id)
4808 	info->value_id = VN_INFO (info->valnum)->value_id;
4809     }
4810 
4811   set_hashtable_value_ids ();
4812 
4813   if (dump_file && (dump_flags & TDF_DETAILS))
4814     {
4815       fprintf (dump_file, "Value numbers:\n");
4816       for (i = 0; i < num_ssa_names; i++)
4817 	{
4818 	  tree name = ssa_name (i);
4819 	  if (name
4820 	      && VN_INFO (name)->visited
4821 	      && SSA_VAL (name) != name)
4822 	    {
4823 	      print_generic_expr (dump_file, name, 0);
4824 	      fprintf (dump_file, " = ");
4825 	      print_generic_expr (dump_file, SSA_VAL (name), 0);
4826 	      fprintf (dump_file, "\n");
4827 	    }
4828 	}
4829     }
4830 
4831   return true;
4832 }
4833 
4834 /* Return the maximum value id we have ever seen.  */
4835 
4836 unsigned int
get_max_value_id(void)4837 get_max_value_id (void)
4838 {
4839   return next_value_id;
4840 }
4841 
4842 /* Return the next unique value id.  */
4843 
4844 unsigned int
get_next_value_id(void)4845 get_next_value_id (void)
4846 {
4847   return next_value_id++;
4848 }
4849 
4850 
4851 /* Compare two expressions E1 and E2 and return true if they are equal.  */
4852 
4853 bool
expressions_equal_p(tree e1,tree e2)4854 expressions_equal_p (tree e1, tree e2)
4855 {
4856   /* The obvious case.  */
4857   if (e1 == e2)
4858     return true;
4859 
4860   /* If either one is VN_TOP consider them equal.  */
4861   if (e1 == VN_TOP || e2 == VN_TOP)
4862     return true;
4863 
4864   /* If only one of them is null, they cannot be equal.  */
4865   if (!e1 || !e2)
4866     return false;
4867 
4868   /* Now perform the actual comparison.  */
4869   if (TREE_CODE (e1) == TREE_CODE (e2)
4870       && operand_equal_p (e1, e2, OEP_PURE_SAME))
4871     return true;
4872 
4873   return false;
4874 }
4875 
4876 
4877 /* Return true if the nary operation NARY may trap.  This is a copy
4878    of stmt_could_throw_1_p adjusted to the SCCVN IL.  */
4879 
4880 bool
vn_nary_may_trap(vn_nary_op_t nary)4881 vn_nary_may_trap (vn_nary_op_t nary)
4882 {
4883   tree type;
4884   tree rhs2 = NULL_TREE;
4885   bool honor_nans = false;
4886   bool honor_snans = false;
4887   bool fp_operation = false;
4888   bool honor_trapv = false;
4889   bool handled, ret;
4890   unsigned i;
4891 
4892   if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4893       || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4894       || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4895     {
4896       type = nary->type;
4897       fp_operation = FLOAT_TYPE_P (type);
4898       if (fp_operation)
4899 	{
4900 	  honor_nans = flag_trapping_math && !flag_finite_math_only;
4901 	  honor_snans = flag_signaling_nans != 0;
4902 	}
4903       else if (INTEGRAL_TYPE_P (type)
4904 	       && TYPE_OVERFLOW_TRAPS (type))
4905 	honor_trapv = true;
4906     }
4907   if (nary->length >= 2)
4908     rhs2 = nary->op[1];
4909   ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4910 				       honor_trapv,
4911 				       honor_nans, honor_snans, rhs2,
4912 				       &handled);
4913   if (handled
4914       && ret)
4915     return true;
4916 
4917   for (i = 0; i < nary->length; ++i)
4918     if (tree_could_trap_p (nary->op[i]))
4919       return true;
4920 
4921   return false;
4922 }
4923