1 /* SCC value numbering for trees
2    Copyright (C) 2006-2014 Free Software Foundation, Inc.
3    Contributed by Daniel Berlin <dan@dberlin.org>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "stor-layout.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "tree-inline.h"
30 #include "hash-table.h"
31 #include "tree-ssa-alias.h"
32 #include "internal-fn.h"
33 #include "gimple-fold.h"
34 #include "tree-eh.h"
35 #include "gimple-expr.h"
36 #include "is-a.h"
37 #include "gimple.h"
38 #include "gimplify.h"
39 #include "gimple-ssa.h"
40 #include "tree-phinodes.h"
41 #include "ssa-iterators.h"
42 #include "stringpool.h"
43 #include "tree-ssanames.h"
44 #include "expr.h"
45 #include "tree-dfa.h"
46 #include "tree-ssa.h"
47 #include "dumpfile.h"
48 #include "alloc-pool.h"
49 #include "flags.h"
50 #include "cfgloop.h"
51 #include "params.h"
52 #include "tree-ssa-propagate.h"
53 #include "tree-ssa-sccvn.h"
54 
55 /* This algorithm is based on the SCC algorithm presented by Keith
56    Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
57    (http://citeseer.ist.psu.edu/41805.html).  In
58    straight line code, it is equivalent to a regular hash based value
59    numbering that is performed in reverse postorder.
60 
61    For code with cycles, there are two alternatives, both of which
62    require keeping the hashtables separate from the actual list of
63    value numbers for SSA names.
64 
65    1. Iterate value numbering in an RPO walk of the blocks, removing
66    all the entries from the hashtable after each iteration (but
67    keeping the SSA name->value number mapping between iterations).
68    Iterate until it does not change.
69 
70    2. Perform value numbering as part of an SCC walk on the SSA graph,
71    iterating only the cycles in the SSA graph until they do not change
72    (using a separate, optimistic hashtable for value numbering the SCC
73    operands).
74 
75    The second is not just faster in practice (because most SSA graph
76    cycles do not involve all the variables in the graph), it also has
77    some nice properties.
78 
79    One of these nice properties is that when we pop an SCC off the
80    stack, we are guaranteed to have processed all the operands coming from
81    *outside of that SCC*, so we do not need to do anything special to
82    ensure they have value numbers.
83 
84    Another nice property is that the SCC walk is done as part of a DFS
85    of the SSA graph, which makes it easy to perform combining and
86    simplifying operations at the same time.
87 
88    The code below is deliberately written in a way that makes it easy
89    to separate the SCC walk from the other work it does.
90 
91    In order to propagate constants through the code, we track which
92    expressions contain constants, and use those while folding.  In
93    theory, we could also track expressions whose value numbers are
94    replaced, in case we end up folding based on expression
95    identities.
96 
97    In order to value number memory, we assign value numbers to vuses.
98    This enables us to note that, for example, stores to the same
99    address of the same value from the same starting memory states are
100    equivalent.
101    TODO:
102 
103    1. We can iterate only the changing portions of the SCC's, but
104    I have not seen an SCC big enough for this to be a win.
105    2. If you differentiate between phi nodes for loops and phi nodes
106    for if-then-else, you can properly consider phi nodes in different
107    blocks for equivalence.
108    3. We could value number vuses in more cases, particularly, whole
109    structure copies.
110 */
111 
112 
113 /* vn_nary_op hashtable helpers.  */
114 
115 struct vn_nary_op_hasher : typed_noop_remove <vn_nary_op_s>
116 {
117   typedef vn_nary_op_s value_type;
118   typedef vn_nary_op_s compare_type;
119   static inline hashval_t hash (const value_type *);
120   static inline bool equal (const value_type *, const compare_type *);
121 };
122 
123 /* Return the computed hashcode for nary operation P1.  */
124 
125 inline hashval_t
hash(const value_type * vno1)126 vn_nary_op_hasher::hash (const value_type *vno1)
127 {
128   return vno1->hashcode;
129 }
130 
131 /* Compare nary operations P1 and P2 and return true if they are
132    equivalent.  */
133 
134 inline bool
equal(const value_type * vno1,const compare_type * vno2)135 vn_nary_op_hasher::equal (const value_type *vno1, const compare_type *vno2)
136 {
137   return vn_nary_op_eq (vno1, vno2);
138 }
139 
140 typedef hash_table <vn_nary_op_hasher> vn_nary_op_table_type;
141 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
142 
143 
144 /* vn_phi hashtable helpers.  */
145 
146 static int
147 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
148 
149 struct vn_phi_hasher
150 {
151   typedef vn_phi_s value_type;
152   typedef vn_phi_s compare_type;
153   static inline hashval_t hash (const value_type *);
154   static inline bool equal (const value_type *, const compare_type *);
155   static inline void remove (value_type *);
156 };
157 
158 /* Return the computed hashcode for phi operation P1.  */
159 
160 inline hashval_t
hash(const value_type * vp1)161 vn_phi_hasher::hash (const value_type *vp1)
162 {
163   return vp1->hashcode;
164 }
165 
166 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
167 
168 inline bool
equal(const value_type * vp1,const compare_type * vp2)169 vn_phi_hasher::equal (const value_type *vp1, const compare_type *vp2)
170 {
171   return vn_phi_eq (vp1, vp2);
172 }
173 
174 /* Free a phi operation structure VP.  */
175 
176 inline void
remove(value_type * phi)177 vn_phi_hasher::remove (value_type *phi)
178 {
179   phi->phiargs.release ();
180 }
181 
182 typedef hash_table <vn_phi_hasher> vn_phi_table_type;
183 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
184 
185 
186 /* Compare two reference operands P1 and P2 for equality.  Return true if
187    they are equal, and false otherwise.  */
188 
189 static int
vn_reference_op_eq(const void * p1,const void * p2)190 vn_reference_op_eq (const void *p1, const void *p2)
191 {
192   const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
193   const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
194 
195   return (vro1->opcode == vro2->opcode
196 	  /* We do not care for differences in type qualification.  */
197 	  && (vro1->type == vro2->type
198 	      || (vro1->type && vro2->type
199 		  && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
200 					 TYPE_MAIN_VARIANT (vro2->type))))
201 	  && expressions_equal_p (vro1->op0, vro2->op0)
202 	  && expressions_equal_p (vro1->op1, vro2->op1)
203 	  && expressions_equal_p (vro1->op2, vro2->op2));
204 }
205 
206 /* Free a reference operation structure VP.  */
207 
208 static inline void
free_reference(vn_reference_s * vr)209 free_reference (vn_reference_s *vr)
210 {
211   vr->operands.release ();
212 }
213 
214 
215 /* vn_reference hashtable helpers.  */
216 
217 struct vn_reference_hasher
218 {
219   typedef vn_reference_s value_type;
220   typedef vn_reference_s compare_type;
221   static inline hashval_t hash (const value_type *);
222   static inline bool equal (const value_type *, const compare_type *);
223   static inline void remove (value_type *);
224 };
225 
226 /* Return the hashcode for a given reference operation P1.  */
227 
228 inline hashval_t
hash(const value_type * vr1)229 vn_reference_hasher::hash (const value_type *vr1)
230 {
231   return vr1->hashcode;
232 }
233 
234 inline bool
equal(const value_type * v,const compare_type * c)235 vn_reference_hasher::equal (const value_type *v, const compare_type *c)
236 {
237   return vn_reference_eq (v, c);
238 }
239 
240 inline void
remove(value_type * v)241 vn_reference_hasher::remove (value_type *v)
242 {
243   free_reference (v);
244 }
245 
246 typedef hash_table <vn_reference_hasher> vn_reference_table_type;
247 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
248 
249 
250 /* The set of hashtables and alloc_pool's for their items.  */
251 
252 typedef struct vn_tables_s
253 {
254   vn_nary_op_table_type nary;
255   vn_phi_table_type phis;
256   vn_reference_table_type references;
257   struct obstack nary_obstack;
258   alloc_pool phis_pool;
259   alloc_pool references_pool;
260 } *vn_tables_t;
261 
262 
263 /* vn_constant hashtable helpers.  */
264 
265 struct vn_constant_hasher : typed_free_remove <vn_constant_s>
266 {
267   typedef vn_constant_s value_type;
268   typedef vn_constant_s compare_type;
269   static inline hashval_t hash (const value_type *);
270   static inline bool equal (const value_type *, const compare_type *);
271 };
272 
273 /* Hash table hash function for vn_constant_t.  */
274 
275 inline hashval_t
hash(const value_type * vc1)276 vn_constant_hasher::hash (const value_type *vc1)
277 {
278   return vc1->hashcode;
279 }
280 
281 /* Hash table equality function for vn_constant_t.  */
282 
283 inline bool
equal(const value_type * vc1,const compare_type * vc2)284 vn_constant_hasher::equal (const value_type *vc1, const compare_type *vc2)
285 {
286   if (vc1->hashcode != vc2->hashcode)
287     return false;
288 
289   return vn_constant_eq_with_type (vc1->constant, vc2->constant);
290 }
291 
292 static hash_table <vn_constant_hasher> constant_to_value_id;
293 static bitmap constant_value_ids;
294 
295 
296 /* Valid hashtables storing information we have proven to be
297    correct.  */
298 
299 static vn_tables_t valid_info;
300 
301 /* Optimistic hashtables storing information we are making assumptions about
302    during iterations.  */
303 
304 static vn_tables_t optimistic_info;
305 
306 /* Pointer to the set of hashtables that is currently being used.
307    Should always point to either the optimistic_info, or the
308    valid_info.  */
309 
310 static vn_tables_t current_info;
311 
312 
313 /* Reverse post order index for each basic block.  */
314 
315 static int *rpo_numbers;
316 
317 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
318 
319 /* This represents the top of the VN lattice, which is the universal
320    value.  */
321 
322 tree VN_TOP;
323 
324 /* Unique counter for our value ids.  */
325 
326 static unsigned int next_value_id;
327 
328 /* Next DFS number and the stack for strongly connected component
329    detection. */
330 
331 static unsigned int next_dfs_num;
332 static vec<tree> sccstack;
333 
334 
335 
336 /* Table of vn_ssa_aux_t's, one per ssa_name.  The vn_ssa_aux_t objects
337    are allocated on an obstack for locality reasons, and to free them
338    without looping over the vec.  */
339 
340 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
341 static struct obstack vn_ssa_aux_obstack;
342 
343 /* Return the value numbering information for a given SSA name.  */
344 
345 vn_ssa_aux_t
VN_INFO(tree name)346 VN_INFO (tree name)
347 {
348   vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
349   gcc_checking_assert (res);
350   return res;
351 }
352 
353 /* Set the value numbering info for a given SSA name to a given
354    value.  */
355 
356 static inline void
VN_INFO_SET(tree name,vn_ssa_aux_t value)357 VN_INFO_SET (tree name, vn_ssa_aux_t value)
358 {
359   vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
360 }
361 
362 /* Initialize the value numbering info for a given SSA name.
363    This should be called just once for every SSA name.  */
364 
365 vn_ssa_aux_t
VN_INFO_GET(tree name)366 VN_INFO_GET (tree name)
367 {
368   vn_ssa_aux_t newinfo;
369 
370   newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
371   memset (newinfo, 0, sizeof (struct vn_ssa_aux));
372   if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
373     vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
374   vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
375   return newinfo;
376 }
377 
378 
379 /* Get the representative expression for the SSA_NAME NAME.  Returns
380    the representative SSA_NAME if there is no expression associated with it.  */
381 
382 tree
vn_get_expr_for(tree name)383 vn_get_expr_for (tree name)
384 {
385   vn_ssa_aux_t vn = VN_INFO (name);
386   gimple def_stmt;
387   tree expr = NULL_TREE;
388   enum tree_code code;
389 
390   if (vn->valnum == VN_TOP)
391     return name;
392 
393   /* If the value-number is a constant it is the representative
394      expression.  */
395   if (TREE_CODE (vn->valnum) != SSA_NAME)
396     return vn->valnum;
397 
398   /* Get to the information of the value of this SSA_NAME.  */
399   vn = VN_INFO (vn->valnum);
400 
401   /* If the value-number is a constant it is the representative
402      expression.  */
403   if (TREE_CODE (vn->valnum) != SSA_NAME)
404     return vn->valnum;
405 
406   /* Else if we have an expression, return it.  */
407   if (vn->expr != NULL_TREE)
408     return vn->expr;
409 
410   /* Otherwise use the defining statement to build the expression.  */
411   def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
412 
413   /* If the value number is not an assignment use it directly.  */
414   if (!is_gimple_assign (def_stmt))
415     return vn->valnum;
416 
417   /* FIXME tuples.  This is incomplete and likely will miss some
418      simplifications.  */
419   code = gimple_assign_rhs_code (def_stmt);
420   switch (TREE_CODE_CLASS (code))
421     {
422     case tcc_reference:
423       if ((code == REALPART_EXPR
424 	   || code == IMAGPART_EXPR
425 	   || code == VIEW_CONVERT_EXPR)
426 	  && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
427 				      0)) == SSA_NAME)
428 	expr = fold_build1 (code,
429 			    gimple_expr_type (def_stmt),
430 			    TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
431       break;
432 
433     case tcc_unary:
434       expr = fold_build1 (code,
435 			  gimple_expr_type (def_stmt),
436 			  gimple_assign_rhs1 (def_stmt));
437       break;
438 
439     case tcc_binary:
440       expr = fold_build2 (code,
441 			  gimple_expr_type (def_stmt),
442 			  gimple_assign_rhs1 (def_stmt),
443 			  gimple_assign_rhs2 (def_stmt));
444       break;
445 
446     case tcc_exceptional:
447       if (code == CONSTRUCTOR
448 	  && TREE_CODE
449 	       (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
450 	expr = gimple_assign_rhs1 (def_stmt);
451       break;
452 
453     default:;
454     }
455   if (expr == NULL_TREE)
456     return vn->valnum;
457 
458   /* Cache the expression.  */
459   vn->expr = expr;
460 
461   return expr;
462 }
463 
464 /* Return the vn_kind the expression computed by the stmt should be
465    associated with.  */
466 
467 enum vn_kind
vn_get_stmt_kind(gimple stmt)468 vn_get_stmt_kind (gimple stmt)
469 {
470   switch (gimple_code (stmt))
471     {
472     case GIMPLE_CALL:
473       return VN_REFERENCE;
474     case GIMPLE_PHI:
475       return VN_PHI;
476     case GIMPLE_ASSIGN:
477       {
478 	enum tree_code code = gimple_assign_rhs_code (stmt);
479 	tree rhs1 = gimple_assign_rhs1 (stmt);
480 	switch (get_gimple_rhs_class (code))
481 	  {
482 	  case GIMPLE_UNARY_RHS:
483 	  case GIMPLE_BINARY_RHS:
484 	  case GIMPLE_TERNARY_RHS:
485 	    return VN_NARY;
486 	  case GIMPLE_SINGLE_RHS:
487 	    switch (TREE_CODE_CLASS (code))
488 	      {
489 	      case tcc_reference:
490 		/* VOP-less references can go through unary case.  */
491 		if ((code == REALPART_EXPR
492 		     || code == IMAGPART_EXPR
493 		     || code == VIEW_CONVERT_EXPR
494 		     || code == BIT_FIELD_REF)
495 		    && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
496 		  return VN_NARY;
497 
498 		/* Fallthrough.  */
499 	      case tcc_declaration:
500 		return VN_REFERENCE;
501 
502 	      case tcc_constant:
503 		return VN_CONSTANT;
504 
505 	      default:
506 		if (code == ADDR_EXPR)
507 		  return (is_gimple_min_invariant (rhs1)
508 			  ? VN_CONSTANT : VN_REFERENCE);
509 		else if (code == CONSTRUCTOR)
510 		  return VN_NARY;
511 		return VN_NONE;
512 	      }
513 	  default:
514 	    return VN_NONE;
515 	  }
516       }
517     default:
518       return VN_NONE;
519     }
520 }
521 
522 /* Lookup a value id for CONSTANT and return it.  If it does not
523    exist returns 0.  */
524 
525 unsigned int
get_constant_value_id(tree constant)526 get_constant_value_id (tree constant)
527 {
528   vn_constant_s **slot;
529   struct vn_constant_s vc;
530 
531   vc.hashcode = vn_hash_constant_with_type (constant);
532   vc.constant = constant;
533   slot = constant_to_value_id.find_slot_with_hash (&vc, vc.hashcode, NO_INSERT);
534   if (slot)
535     return (*slot)->value_id;
536   return 0;
537 }
538 
539 /* Lookup a value id for CONSTANT, and if it does not exist, create a
540    new one and return it.  If it does exist, return it.  */
541 
542 unsigned int
get_or_alloc_constant_value_id(tree constant)543 get_or_alloc_constant_value_id (tree constant)
544 {
545   vn_constant_s **slot;
546   struct vn_constant_s vc;
547   vn_constant_t vcp;
548 
549   vc.hashcode = vn_hash_constant_with_type (constant);
550   vc.constant = constant;
551   slot = constant_to_value_id.find_slot_with_hash (&vc, vc.hashcode, INSERT);
552   if (*slot)
553     return (*slot)->value_id;
554 
555   vcp = XNEW (struct vn_constant_s);
556   vcp->hashcode = vc.hashcode;
557   vcp->constant = constant;
558   vcp->value_id = get_next_value_id ();
559   *slot = vcp;
560   bitmap_set_bit (constant_value_ids, vcp->value_id);
561   return vcp->value_id;
562 }
563 
564 /* Return true if V is a value id for a constant.  */
565 
566 bool
value_id_constant_p(unsigned int v)567 value_id_constant_p (unsigned int v)
568 {
569   return bitmap_bit_p (constant_value_ids, v);
570 }
571 
572 /* Compute the hash for a reference operand VRO1.  */
573 
574 static hashval_t
vn_reference_op_compute_hash(const vn_reference_op_t vro1,hashval_t result)575 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
576 {
577   result = iterative_hash_hashval_t (vro1->opcode, result);
578   if (vro1->op0)
579     result = iterative_hash_expr (vro1->op0, result);
580   if (vro1->op1)
581     result = iterative_hash_expr (vro1->op1, result);
582   if (vro1->op2)
583     result = iterative_hash_expr (vro1->op2, result);
584   return result;
585 }
586 
587 /* Compute a hash for the reference operation VR1 and return it.  */
588 
589 hashval_t
vn_reference_compute_hash(const vn_reference_t vr1)590 vn_reference_compute_hash (const vn_reference_t vr1)
591 {
592   hashval_t result = 0;
593   int i;
594   vn_reference_op_t vro;
595   HOST_WIDE_INT off = -1;
596   bool deref = false;
597 
598   FOR_EACH_VEC_ELT (vr1->operands, i, vro)
599     {
600       if (vro->opcode == MEM_REF)
601 	deref = true;
602       else if (vro->opcode != ADDR_EXPR)
603 	deref = false;
604       if (vro->off != -1)
605 	{
606 	  if (off == -1)
607 	    off = 0;
608 	  off += vro->off;
609 	}
610       else
611 	{
612 	  if (off != -1
613 	      && off != 0)
614 	    result = iterative_hash_hashval_t (off, result);
615 	  off = -1;
616 	  if (deref
617 	      && vro->opcode == ADDR_EXPR)
618 	    {
619 	      if (vro->op0)
620 		{
621 		  tree op = TREE_OPERAND (vro->op0, 0);
622 		  result = iterative_hash_hashval_t (TREE_CODE (op), result);
623 		  result = iterative_hash_expr (op, result);
624 		}
625 	    }
626 	  else
627 	    result = vn_reference_op_compute_hash (vro, result);
628 	}
629     }
630   if (vr1->vuse)
631     result += SSA_NAME_VERSION (vr1->vuse);
632 
633   return result;
634 }
635 
636 /* Return true if reference operations VR1 and VR2 are equivalent.  This
637    means they have the same set of operands and vuses.  */
638 
639 bool
vn_reference_eq(const_vn_reference_t const vr1,const_vn_reference_t const vr2)640 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
641 {
642   unsigned i, j;
643 
644   if (vr1->hashcode != vr2->hashcode)
645     return false;
646 
647   /* Early out if this is not a hash collision.  */
648   if (vr1->hashcode != vr2->hashcode)
649     return false;
650 
651   /* The VOP needs to be the same.  */
652   if (vr1->vuse != vr2->vuse)
653     return false;
654 
655   /* If the operands are the same we are done.  */
656   if (vr1->operands == vr2->operands)
657     return true;
658 
659   if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
660     return false;
661 
662   if (INTEGRAL_TYPE_P (vr1->type)
663       && INTEGRAL_TYPE_P (vr2->type))
664     {
665       if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
666 	return false;
667     }
668   else if (INTEGRAL_TYPE_P (vr1->type)
669 	   && (TYPE_PRECISION (vr1->type)
670 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
671     return false;
672   else if (INTEGRAL_TYPE_P (vr2->type)
673 	   && (TYPE_PRECISION (vr2->type)
674 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
675     return false;
676 
677   i = 0;
678   j = 0;
679   do
680     {
681       HOST_WIDE_INT off1 = 0, off2 = 0;
682       vn_reference_op_t vro1, vro2;
683       vn_reference_op_s tem1, tem2;
684       bool deref1 = false, deref2 = false;
685       for (; vr1->operands.iterate (i, &vro1); i++)
686 	{
687 	  if (vro1->opcode == MEM_REF)
688 	    deref1 = true;
689 	  if (vro1->off == -1)
690 	    break;
691 	  off1 += vro1->off;
692 	}
693       for (; vr2->operands.iterate (j, &vro2); j++)
694 	{
695 	  if (vro2->opcode == MEM_REF)
696 	    deref2 = true;
697 	  if (vro2->off == -1)
698 	    break;
699 	  off2 += vro2->off;
700 	}
701       if (off1 != off2)
702 	return false;
703       if (deref1 && vro1->opcode == ADDR_EXPR)
704 	{
705 	  memset (&tem1, 0, sizeof (tem1));
706 	  tem1.op0 = TREE_OPERAND (vro1->op0, 0);
707 	  tem1.type = TREE_TYPE (tem1.op0);
708 	  tem1.opcode = TREE_CODE (tem1.op0);
709 	  vro1 = &tem1;
710 	  deref1 = false;
711 	}
712       if (deref2 && vro2->opcode == ADDR_EXPR)
713 	{
714 	  memset (&tem2, 0, sizeof (tem2));
715 	  tem2.op0 = TREE_OPERAND (vro2->op0, 0);
716 	  tem2.type = TREE_TYPE (tem2.op0);
717 	  tem2.opcode = TREE_CODE (tem2.op0);
718 	  vro2 = &tem2;
719 	  deref2 = false;
720 	}
721       if (deref1 != deref2)
722 	return false;
723       if (!vn_reference_op_eq (vro1, vro2))
724 	return false;
725       ++j;
726       ++i;
727     }
728   while (vr1->operands.length () != i
729 	 || vr2->operands.length () != j);
730 
731   return true;
732 }
733 
734 /* Copy the operations present in load/store REF into RESULT, a vector of
735    vn_reference_op_s's.  */
736 
737 void
copy_reference_ops_from_ref(tree ref,vec<vn_reference_op_s> * result)738 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
739 {
740   if (TREE_CODE (ref) == TARGET_MEM_REF)
741     {
742       vn_reference_op_s temp;
743 
744       result->reserve (3);
745 
746       memset (&temp, 0, sizeof (temp));
747       temp.type = TREE_TYPE (ref);
748       temp.opcode = TREE_CODE (ref);
749       temp.op0 = TMR_INDEX (ref);
750       temp.op1 = TMR_STEP (ref);
751       temp.op2 = TMR_OFFSET (ref);
752       temp.off = -1;
753       result->quick_push (temp);
754 
755       memset (&temp, 0, sizeof (temp));
756       temp.type = NULL_TREE;
757       temp.opcode = ERROR_MARK;
758       temp.op0 = TMR_INDEX2 (ref);
759       temp.off = -1;
760       result->quick_push (temp);
761 
762       memset (&temp, 0, sizeof (temp));
763       temp.type = NULL_TREE;
764       temp.opcode = TREE_CODE (TMR_BASE (ref));
765       temp.op0 = TMR_BASE (ref);
766       temp.off = -1;
767       result->quick_push (temp);
768       return;
769     }
770 
771   /* For non-calls, store the information that makes up the address.  */
772   tree orig = ref;
773   while (ref)
774     {
775       vn_reference_op_s temp;
776 
777       memset (&temp, 0, sizeof (temp));
778       temp.type = TREE_TYPE (ref);
779       temp.opcode = TREE_CODE (ref);
780       temp.off = -1;
781 
782       switch (temp.opcode)
783 	{
784 	case MODIFY_EXPR:
785 	  temp.op0 = TREE_OPERAND (ref, 1);
786 	  break;
787 	case WITH_SIZE_EXPR:
788 	  temp.op0 = TREE_OPERAND (ref, 1);
789 	  temp.off = 0;
790 	  break;
791 	case MEM_REF:
792 	  /* The base address gets its own vn_reference_op_s structure.  */
793 	  temp.op0 = TREE_OPERAND (ref, 1);
794 	  if (tree_fits_shwi_p (TREE_OPERAND (ref, 1)))
795 	    temp.off = tree_to_shwi (TREE_OPERAND (ref, 1));
796 	  break;
797 	case BIT_FIELD_REF:
798 	  /* Record bits and position.  */
799 	  temp.op0 = TREE_OPERAND (ref, 1);
800 	  temp.op1 = TREE_OPERAND (ref, 2);
801 	  break;
802 	case COMPONENT_REF:
803 	  /* The field decl is enough to unambiguously specify the field,
804 	     a matching type is not necessary and a mismatching type
805 	     is always a spurious difference.  */
806 	  temp.type = NULL_TREE;
807 	  temp.op0 = TREE_OPERAND (ref, 1);
808 	  temp.op1 = TREE_OPERAND (ref, 2);
809 	  {
810 	    tree this_offset = component_ref_field_offset (ref);
811 	    if (this_offset
812 		&& TREE_CODE (this_offset) == INTEGER_CST)
813 	      {
814 		tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
815 		if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
816 		  {
817 		    double_int off
818 		      = tree_to_double_int (this_offset)
819 			+ tree_to_double_int (bit_offset)
820 			.rshift (BITS_PER_UNIT == 8
821 				   ? 3 : exact_log2 (BITS_PER_UNIT));
822 		    if (off.fits_shwi ()
823 			/* Probibit value-numbering zero offset components
824 			   of addresses the same before the pass folding
825 			   __builtin_object_size had a chance to run
826 			   (checking cfun->after_inlining does the
827 			   trick here).  */
828 			&& (TREE_CODE (orig) != ADDR_EXPR
829 			    || !off.is_zero ()
830 			    || cfun->after_inlining))
831 		      temp.off = off.low;
832 		  }
833 	      }
834 	  }
835 	  break;
836 	case ARRAY_RANGE_REF:
837 	case ARRAY_REF:
838 	  /* Record index as operand.  */
839 	  temp.op0 = TREE_OPERAND (ref, 1);
840 	  /* Always record lower bounds and element size.  */
841 	  temp.op1 = array_ref_low_bound (ref);
842 	  temp.op2 = array_ref_element_size (ref);
843 	  if (TREE_CODE (temp.op0) == INTEGER_CST
844 	      && TREE_CODE (temp.op1) == INTEGER_CST
845 	      && TREE_CODE (temp.op2) == INTEGER_CST)
846 	    {
847 	      double_int off = tree_to_double_int (temp.op0);
848 	      off += -tree_to_double_int (temp.op1);
849 	      off *= tree_to_double_int (temp.op2);
850 	      if (off.fits_shwi ())
851 		temp.off = off.low;
852 	    }
853 	  break;
854 	case VAR_DECL:
855 	  if (DECL_HARD_REGISTER (ref))
856 	    {
857 	      temp.op0 = ref;
858 	      break;
859 	    }
860 	  /* Fallthru.  */
861 	case PARM_DECL:
862 	case CONST_DECL:
863 	case RESULT_DECL:
864 	  /* Canonicalize decls to MEM[&decl] which is what we end up with
865 	     when valueizing MEM[ptr] with ptr = &decl.  */
866 	  temp.opcode = MEM_REF;
867 	  temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
868 	  temp.off = 0;
869 	  result->safe_push (temp);
870 	  temp.opcode = ADDR_EXPR;
871 	  temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
872 	  temp.type = TREE_TYPE (temp.op0);
873 	  temp.off = -1;
874 	  break;
875 	case STRING_CST:
876 	case INTEGER_CST:
877 	case COMPLEX_CST:
878 	case VECTOR_CST:
879 	case REAL_CST:
880 	case FIXED_CST:
881 	case CONSTRUCTOR:
882 	case SSA_NAME:
883 	  temp.op0 = ref;
884 	  break;
885 	case ADDR_EXPR:
886 	  if (is_gimple_min_invariant (ref))
887 	    {
888 	      temp.op0 = ref;
889 	      break;
890 	    }
891 	  /* Fallthrough.  */
892 	  /* These are only interesting for their operands, their
893 	     existence, and their type.  They will never be the last
894 	     ref in the chain of references (IE they require an
895 	     operand), so we don't have to put anything
896 	     for op* as it will be handled by the iteration  */
897 	case REALPART_EXPR:
898 	case VIEW_CONVERT_EXPR:
899 	  temp.off = 0;
900 	  break;
901 	case IMAGPART_EXPR:
902 	  /* This is only interesting for its constant offset.  */
903 	  temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
904 	  break;
905 	default:
906 	  gcc_unreachable ();
907 	}
908       result->safe_push (temp);
909 
910       if (REFERENCE_CLASS_P (ref)
911 	  || TREE_CODE (ref) == MODIFY_EXPR
912 	  || TREE_CODE (ref) == WITH_SIZE_EXPR
913 	  || (TREE_CODE (ref) == ADDR_EXPR
914 	      && !is_gimple_min_invariant (ref)))
915 	ref = TREE_OPERAND (ref, 0);
916       else
917 	ref = NULL_TREE;
918     }
919 }
920 
921 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
922    operands in *OPS, the reference alias set SET and the reference type TYPE.
923    Return true if something useful was produced.  */
924 
925 bool
ao_ref_init_from_vn_reference(ao_ref * ref,alias_set_type set,tree type,vec<vn_reference_op_s> ops)926 ao_ref_init_from_vn_reference (ao_ref *ref,
927 			       alias_set_type set, tree type,
928 			       vec<vn_reference_op_s> ops)
929 {
930   vn_reference_op_t op;
931   unsigned i;
932   tree base = NULL_TREE;
933   tree *op0_p = &base;
934   HOST_WIDE_INT offset = 0;
935   HOST_WIDE_INT max_size;
936   HOST_WIDE_INT size = -1;
937   tree size_tree = NULL_TREE;
938   alias_set_type base_alias_set = -1;
939 
940   /* First get the final access size from just the outermost expression.  */
941   op = &ops[0];
942   if (op->opcode == COMPONENT_REF)
943     size_tree = DECL_SIZE (op->op0);
944   else if (op->opcode == BIT_FIELD_REF)
945     size_tree = op->op0;
946   else
947     {
948       enum machine_mode mode = TYPE_MODE (type);
949       if (mode == BLKmode)
950 	size_tree = TYPE_SIZE (type);
951       else
952         size = GET_MODE_BITSIZE (mode);
953     }
954   if (size_tree != NULL_TREE)
955     {
956       if (!tree_fits_uhwi_p (size_tree))
957 	size = -1;
958       else
959 	size = tree_to_uhwi (size_tree);
960     }
961 
962   /* Initially, maxsize is the same as the accessed element size.
963      In the following it will only grow (or become -1).  */
964   max_size = size;
965 
966   /* Compute cumulative bit-offset for nested component-refs and array-refs,
967      and find the ultimate containing object.  */
968   FOR_EACH_VEC_ELT (ops, i, op)
969     {
970       switch (op->opcode)
971 	{
972 	/* These may be in the reference ops, but we cannot do anything
973 	   sensible with them here.  */
974 	case ADDR_EXPR:
975 	  /* Apart from ADDR_EXPR arguments to MEM_REF.  */
976 	  if (base != NULL_TREE
977 	      && TREE_CODE (base) == MEM_REF
978 	      && op->op0
979 	      && DECL_P (TREE_OPERAND (op->op0, 0)))
980 	    {
981 	      vn_reference_op_t pop = &ops[i-1];
982 	      base = TREE_OPERAND (op->op0, 0);
983 	      if (pop->off == -1)
984 		{
985 		  max_size = -1;
986 		  offset = 0;
987 		}
988 	      else
989 		offset += pop->off * BITS_PER_UNIT;
990 	      op0_p = NULL;
991 	      break;
992 	    }
993 	  /* Fallthru.  */
994 	case CALL_EXPR:
995 	  return false;
996 
997 	/* Record the base objects.  */
998 	case MEM_REF:
999 	  base_alias_set = get_deref_alias_set (op->op0);
1000 	  *op0_p = build2 (MEM_REF, op->type,
1001 			   NULL_TREE, op->op0);
1002 	  op0_p = &TREE_OPERAND (*op0_p, 0);
1003 	  break;
1004 
1005 	case VAR_DECL:
1006 	case PARM_DECL:
1007 	case RESULT_DECL:
1008 	case SSA_NAME:
1009 	  *op0_p = op->op0;
1010 	  op0_p = NULL;
1011 	  break;
1012 
1013 	/* And now the usual component-reference style ops.  */
1014 	case BIT_FIELD_REF:
1015 	  offset += tree_to_shwi (op->op1);
1016 	  break;
1017 
1018 	case COMPONENT_REF:
1019 	  {
1020 	    tree field = op->op0;
1021 	    /* We do not have a complete COMPONENT_REF tree here so we
1022 	       cannot use component_ref_field_offset.  Do the interesting
1023 	       parts manually.  */
1024 
1025 	    if (op->op1
1026 		|| !tree_fits_uhwi_p (DECL_FIELD_OFFSET (field)))
1027 	      max_size = -1;
1028 	    else
1029 	      {
1030 		offset += (tree_to_uhwi (DECL_FIELD_OFFSET (field))
1031 			   * BITS_PER_UNIT);
1032 		offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
1033 	      }
1034 	    break;
1035 	  }
1036 
1037 	case ARRAY_RANGE_REF:
1038 	case ARRAY_REF:
1039 	  /* We recorded the lower bound and the element size.  */
1040 	  if (!tree_fits_shwi_p (op->op0)
1041 	      || !tree_fits_shwi_p (op->op1)
1042 	      || !tree_fits_shwi_p (op->op2))
1043 	    max_size = -1;
1044 	  else
1045 	    {
1046 	      HOST_WIDE_INT hindex = tree_to_shwi (op->op0);
1047 	      hindex -= tree_to_shwi (op->op1);
1048 	      hindex *= tree_to_shwi (op->op2);
1049 	      hindex *= BITS_PER_UNIT;
1050 	      offset += hindex;
1051 	    }
1052 	  break;
1053 
1054 	case REALPART_EXPR:
1055 	  break;
1056 
1057 	case IMAGPART_EXPR:
1058 	  offset += size;
1059 	  break;
1060 
1061 	case VIEW_CONVERT_EXPR:
1062 	  break;
1063 
1064 	case STRING_CST:
1065 	case INTEGER_CST:
1066 	case COMPLEX_CST:
1067 	case VECTOR_CST:
1068 	case REAL_CST:
1069 	case CONSTRUCTOR:
1070 	case CONST_DECL:
1071 	  return false;
1072 
1073 	default:
1074 	  return false;
1075 	}
1076     }
1077 
1078   if (base == NULL_TREE)
1079     return false;
1080 
1081   ref->ref = NULL_TREE;
1082   ref->base = base;
1083   ref->offset = offset;
1084   ref->size = size;
1085   ref->max_size = max_size;
1086   ref->ref_alias_set = set;
1087   if (base_alias_set != -1)
1088     ref->base_alias_set = base_alias_set;
1089   else
1090     ref->base_alias_set = get_alias_set (base);
1091   /* We discount volatiles from value-numbering elsewhere.  */
1092   ref->volatile_p = false;
1093 
1094   return true;
1095 }
1096 
1097 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1098    vn_reference_op_s's.  */
1099 
1100 void
copy_reference_ops_from_call(gimple call,vec<vn_reference_op_s> * result)1101 copy_reference_ops_from_call (gimple call,
1102 			      vec<vn_reference_op_s> *result)
1103 {
1104   vn_reference_op_s temp;
1105   unsigned i;
1106   tree lhs = gimple_call_lhs (call);
1107 
1108   /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1109      different.  By adding the lhs here in the vector, we ensure that the
1110      hashcode is different, guaranteeing a different value number.  */
1111   if (lhs && TREE_CODE (lhs) != SSA_NAME)
1112     {
1113       memset (&temp, 0, sizeof (temp));
1114       temp.opcode = MODIFY_EXPR;
1115       temp.type = TREE_TYPE (lhs);
1116       temp.op0 = lhs;
1117       temp.off = -1;
1118       result->safe_push (temp);
1119     }
1120 
1121   /* Copy the type, opcode, function being called and static chain.  */
1122   memset (&temp, 0, sizeof (temp));
1123   temp.type = gimple_call_return_type (call);
1124   temp.opcode = CALL_EXPR;
1125   temp.op0 = gimple_call_fn (call);
1126   temp.op1 = gimple_call_chain (call);
1127   temp.off = -1;
1128   result->safe_push (temp);
1129 
1130   /* Copy the call arguments.  As they can be references as well,
1131      just chain them together.  */
1132   for (i = 0; i < gimple_call_num_args (call); ++i)
1133     {
1134       tree callarg = gimple_call_arg (call, i);
1135       copy_reference_ops_from_ref (callarg, result);
1136     }
1137 }
1138 
1139 /* Create a vector of vn_reference_op_s structures from CALL, a
1140    call statement.  The vector is not shared.  */
1141 
1142 static vec<vn_reference_op_s>
create_reference_ops_from_call(gimple call)1143 create_reference_ops_from_call (gimple call)
1144 {
1145   vec<vn_reference_op_s> result = vNULL;
1146 
1147   copy_reference_ops_from_call (call, &result);
1148   return result;
1149 }
1150 
1151 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1152    *I_P to point to the last element of the replacement.  */
1153 void
vn_reference_fold_indirect(vec<vn_reference_op_s> * ops,unsigned int * i_p)1154 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1155 			    unsigned int *i_p)
1156 {
1157   unsigned int i = *i_p;
1158   vn_reference_op_t op = &(*ops)[i];
1159   vn_reference_op_t mem_op = &(*ops)[i - 1];
1160   tree addr_base;
1161   HOST_WIDE_INT addr_offset = 0;
1162 
1163   /* The only thing we have to do is from &OBJ.foo.bar add the offset
1164      from .foo.bar to the preceding MEM_REF offset and replace the
1165      address with &OBJ.  */
1166   addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1167 					     &addr_offset);
1168   gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1169   if (addr_base != TREE_OPERAND (op->op0, 0))
1170     {
1171       double_int off = tree_to_double_int (mem_op->op0);
1172       off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1173       off += double_int::from_shwi (addr_offset);
1174       mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1175       op->op0 = build_fold_addr_expr (addr_base);
1176       if (tree_fits_shwi_p (mem_op->op0))
1177 	mem_op->off = tree_to_shwi (mem_op->op0);
1178       else
1179 	mem_op->off = -1;
1180     }
1181 }
1182 
1183 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1184    *I_P to point to the last element of the replacement.  */
1185 static void
vn_reference_maybe_forwprop_address(vec<vn_reference_op_s> * ops,unsigned int * i_p)1186 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1187 				     unsigned int *i_p)
1188 {
1189   unsigned int i = *i_p;
1190   vn_reference_op_t op = &(*ops)[i];
1191   vn_reference_op_t mem_op = &(*ops)[i - 1];
1192   gimple def_stmt;
1193   enum tree_code code;
1194   double_int off;
1195 
1196   def_stmt = SSA_NAME_DEF_STMT (op->op0);
1197   if (!is_gimple_assign (def_stmt))
1198     return;
1199 
1200   code = gimple_assign_rhs_code (def_stmt);
1201   if (code != ADDR_EXPR
1202       && code != POINTER_PLUS_EXPR)
1203     return;
1204 
1205   off = tree_to_double_int (mem_op->op0);
1206   off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1207 
1208   /* The only thing we have to do is from &OBJ.foo.bar add the offset
1209      from .foo.bar to the preceding MEM_REF offset and replace the
1210      address with &OBJ.  */
1211   if (code == ADDR_EXPR)
1212     {
1213       tree addr, addr_base;
1214       HOST_WIDE_INT addr_offset;
1215 
1216       addr = gimple_assign_rhs1 (def_stmt);
1217       addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1218 						 &addr_offset);
1219       if (!addr_base
1220 	  || TREE_CODE (addr_base) != MEM_REF)
1221 	return;
1222 
1223       off += double_int::from_shwi (addr_offset);
1224       off += mem_ref_offset (addr_base);
1225       op->op0 = TREE_OPERAND (addr_base, 0);
1226     }
1227   else
1228     {
1229       tree ptr, ptroff;
1230       ptr = gimple_assign_rhs1 (def_stmt);
1231       ptroff = gimple_assign_rhs2 (def_stmt);
1232       if (TREE_CODE (ptr) != SSA_NAME
1233 	  || TREE_CODE (ptroff) != INTEGER_CST)
1234 	return;
1235 
1236       off += tree_to_double_int (ptroff);
1237       op->op0 = ptr;
1238     }
1239 
1240   mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1241   if (tree_fits_shwi_p (mem_op->op0))
1242     mem_op->off = tree_to_shwi (mem_op->op0);
1243   else
1244     mem_op->off = -1;
1245   if (TREE_CODE (op->op0) == SSA_NAME)
1246     op->op0 = SSA_VAL (op->op0);
1247   if (TREE_CODE (op->op0) != SSA_NAME)
1248     op->opcode = TREE_CODE (op->op0);
1249 
1250   /* And recurse.  */
1251   if (TREE_CODE (op->op0) == SSA_NAME)
1252     vn_reference_maybe_forwprop_address (ops, i_p);
1253   else if (TREE_CODE (op->op0) == ADDR_EXPR)
1254     vn_reference_fold_indirect (ops, i_p);
1255 }
1256 
1257 /* Optimize the reference REF to a constant if possible or return
1258    NULL_TREE if not.  */
1259 
1260 tree
fully_constant_vn_reference_p(vn_reference_t ref)1261 fully_constant_vn_reference_p (vn_reference_t ref)
1262 {
1263   vec<vn_reference_op_s> operands = ref->operands;
1264   vn_reference_op_t op;
1265 
1266   /* Try to simplify the translated expression if it is
1267      a call to a builtin function with at most two arguments.  */
1268   op = &operands[0];
1269   if (op->opcode == CALL_EXPR
1270       && TREE_CODE (op->op0) == ADDR_EXPR
1271       && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1272       && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1273       && operands.length () >= 2
1274       && operands.length () <= 3)
1275     {
1276       vn_reference_op_t arg0, arg1 = NULL;
1277       bool anyconst = false;
1278       arg0 = &operands[1];
1279       if (operands.length () > 2)
1280 	arg1 = &operands[2];
1281       if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1282 	  || (arg0->opcode == ADDR_EXPR
1283 	      && is_gimple_min_invariant (arg0->op0)))
1284 	anyconst = true;
1285       if (arg1
1286 	  && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1287 	      || (arg1->opcode == ADDR_EXPR
1288 		  && is_gimple_min_invariant (arg1->op0))))
1289 	anyconst = true;
1290       if (anyconst)
1291 	{
1292 	  tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1293 					 arg1 ? 2 : 1,
1294 					 arg0->op0,
1295 					 arg1 ? arg1->op0 : NULL);
1296 	  if (folded
1297 	      && TREE_CODE (folded) == NOP_EXPR)
1298 	    folded = TREE_OPERAND (folded, 0);
1299 	  if (folded
1300 	      && is_gimple_min_invariant (folded))
1301 	    return folded;
1302 	}
1303     }
1304 
1305   /* Simplify reads from constant strings.  */
1306   else if (op->opcode == ARRAY_REF
1307 	   && TREE_CODE (op->op0) == INTEGER_CST
1308 	   && integer_zerop (op->op1)
1309 	   && operands.length () == 2)
1310     {
1311       vn_reference_op_t arg0;
1312       arg0 = &operands[1];
1313       if (arg0->opcode == STRING_CST
1314 	  && (TYPE_MODE (op->type)
1315 	      == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1316 	  && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1317 	  && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1318 	  && tree_int_cst_sgn (op->op0) >= 0
1319 	  && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1320 	return build_int_cst_type (op->type,
1321 				   (TREE_STRING_POINTER (arg0->op0)
1322 				    [TREE_INT_CST_LOW (op->op0)]));
1323     }
1324 
1325   return NULL_TREE;
1326 }
1327 
1328 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1329    structures into their value numbers.  This is done in-place, and
1330    the vector passed in is returned.  *VALUEIZED_ANYTHING will specify
1331    whether any operands were valueized.  */
1332 
1333 static vec<vn_reference_op_s>
valueize_refs_1(vec<vn_reference_op_s> orig,bool * valueized_anything)1334 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1335 {
1336   vn_reference_op_t vro;
1337   unsigned int i;
1338 
1339   *valueized_anything = false;
1340 
1341   FOR_EACH_VEC_ELT (orig, i, vro)
1342     {
1343       if (vro->opcode == SSA_NAME
1344 	  || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1345 	{
1346 	  tree tem = SSA_VAL (vro->op0);
1347 	  if (tem != vro->op0)
1348 	    {
1349 	      *valueized_anything = true;
1350 	      vro->op0 = tem;
1351 	    }
1352 	  /* If it transforms from an SSA_NAME to a constant, update
1353 	     the opcode.  */
1354 	  if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1355 	    vro->opcode = TREE_CODE (vro->op0);
1356 	}
1357       if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1358 	{
1359 	  tree tem = SSA_VAL (vro->op1);
1360 	  if (tem != vro->op1)
1361 	    {
1362 	      *valueized_anything = true;
1363 	      vro->op1 = tem;
1364 	    }
1365 	}
1366       if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1367 	{
1368 	  tree tem = SSA_VAL (vro->op2);
1369 	  if (tem != vro->op2)
1370 	    {
1371 	      *valueized_anything = true;
1372 	      vro->op2 = tem;
1373 	    }
1374 	}
1375       /* If it transforms from an SSA_NAME to an address, fold with
1376 	 a preceding indirect reference.  */
1377       if (i > 0
1378 	  && vro->op0
1379 	  && TREE_CODE (vro->op0) == ADDR_EXPR
1380 	  && orig[i - 1].opcode == MEM_REF)
1381 	vn_reference_fold_indirect (&orig, &i);
1382       else if (i > 0
1383 	       && vro->opcode == SSA_NAME
1384 	       && orig[i - 1].opcode == MEM_REF)
1385 	vn_reference_maybe_forwprop_address (&orig, &i);
1386       /* If it transforms a non-constant ARRAY_REF into a constant
1387 	 one, adjust the constant offset.  */
1388       else if (vro->opcode == ARRAY_REF
1389 	       && vro->off == -1
1390 	       && TREE_CODE (vro->op0) == INTEGER_CST
1391 	       && TREE_CODE (vro->op1) == INTEGER_CST
1392 	       && TREE_CODE (vro->op2) == INTEGER_CST)
1393 	{
1394 	  double_int off = tree_to_double_int (vro->op0);
1395 	  off += -tree_to_double_int (vro->op1);
1396 	  off *= tree_to_double_int (vro->op2);
1397 	  if (off.fits_shwi ())
1398 	    vro->off = off.low;
1399 	}
1400     }
1401 
1402   return orig;
1403 }
1404 
1405 static vec<vn_reference_op_s>
valueize_refs(vec<vn_reference_op_s> orig)1406 valueize_refs (vec<vn_reference_op_s> orig)
1407 {
1408   bool tem;
1409   return valueize_refs_1 (orig, &tem);
1410 }
1411 
1412 static vec<vn_reference_op_s> shared_lookup_references;
1413 
1414 /* Create a vector of vn_reference_op_s structures from REF, a
1415    REFERENCE_CLASS_P tree.  The vector is shared among all callers of
1416    this function.  *VALUEIZED_ANYTHING will specify whether any
1417    operands were valueized.  */
1418 
1419 static vec<vn_reference_op_s>
valueize_shared_reference_ops_from_ref(tree ref,bool * valueized_anything)1420 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1421 {
1422   if (!ref)
1423     return vNULL;
1424   shared_lookup_references.truncate (0);
1425   copy_reference_ops_from_ref (ref, &shared_lookup_references);
1426   shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1427 					      valueized_anything);
1428   return shared_lookup_references;
1429 }
1430 
1431 /* Create a vector of vn_reference_op_s structures from CALL, a
1432    call statement.  The vector is shared among all callers of
1433    this function.  */
1434 
1435 static vec<vn_reference_op_s>
valueize_shared_reference_ops_from_call(gimple call)1436 valueize_shared_reference_ops_from_call (gimple call)
1437 {
1438   if (!call)
1439     return vNULL;
1440   shared_lookup_references.truncate (0);
1441   copy_reference_ops_from_call (call, &shared_lookup_references);
1442   shared_lookup_references = valueize_refs (shared_lookup_references);
1443   return shared_lookup_references;
1444 }
1445 
1446 /* Lookup a SCCVN reference operation VR in the current hash table.
1447    Returns the resulting value number if it exists in the hash table,
1448    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
1449    vn_reference_t stored in the hashtable if something is found.  */
1450 
1451 static tree
vn_reference_lookup_1(vn_reference_t vr,vn_reference_t * vnresult)1452 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1453 {
1454   vn_reference_s **slot;
1455   hashval_t hash;
1456 
1457   hash = vr->hashcode;
1458   slot = current_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
1459   if (!slot && current_info == optimistic_info)
1460     slot = valid_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
1461   if (slot)
1462     {
1463       if (vnresult)
1464 	*vnresult = (vn_reference_t)*slot;
1465       return ((vn_reference_t)*slot)->result;
1466     }
1467 
1468   return NULL_TREE;
1469 }
1470 
1471 static tree *last_vuse_ptr;
1472 static vn_lookup_kind vn_walk_kind;
1473 static vn_lookup_kind default_vn_walk_kind;
1474 
1475 /* Callback for walk_non_aliased_vuses.  Adjusts the vn_reference_t VR_
1476    with the current VUSE and performs the expression lookup.  */
1477 
1478 static void *
vn_reference_lookup_2(ao_ref * op ATTRIBUTE_UNUSED,tree vuse,unsigned int cnt,void * vr_)1479 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1480 		       unsigned int cnt, void *vr_)
1481 {
1482   vn_reference_t vr = (vn_reference_t)vr_;
1483   vn_reference_s **slot;
1484   hashval_t hash;
1485 
1486   /* This bounds the stmt walks we perform on reference lookups
1487      to O(1) instead of O(N) where N is the number of dominating
1488      stores.  */
1489   if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1490     return (void *)-1;
1491 
1492   if (last_vuse_ptr)
1493     *last_vuse_ptr = vuse;
1494 
1495   /* Fixup vuse and hash.  */
1496   if (vr->vuse)
1497     vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1498   vr->vuse = SSA_VAL (vuse);
1499   if (vr->vuse)
1500     vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1501 
1502   hash = vr->hashcode;
1503   slot = current_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
1504   if (!slot && current_info == optimistic_info)
1505     slot = valid_info->references.find_slot_with_hash (vr, hash, NO_INSERT);
1506   if (slot)
1507     return *slot;
1508 
1509   return NULL;
1510 }
1511 
1512 /* Lookup an existing or insert a new vn_reference entry into the
1513    value table for the VUSE, SET, TYPE, OPERANDS reference which
1514    has the value VALUE which is either a constant or an SSA name.  */
1515 
1516 static vn_reference_t
vn_reference_lookup_or_insert_for_pieces(tree vuse,alias_set_type set,tree type,vec<vn_reference_op_s,va_heap> operands,tree value)1517 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1518 					  alias_set_type set,
1519 					  tree type,
1520 					  vec<vn_reference_op_s,
1521 					        va_heap> operands,
1522 					  tree value)
1523 {
1524   struct vn_reference_s vr1;
1525   vn_reference_t result;
1526   unsigned value_id;
1527   vr1.vuse = vuse;
1528   vr1.operands = operands;
1529   vr1.type = type;
1530   vr1.set = set;
1531   vr1.hashcode = vn_reference_compute_hash (&vr1);
1532   if (vn_reference_lookup_1 (&vr1, &result))
1533     return result;
1534   if (TREE_CODE (value) == SSA_NAME)
1535     value_id = VN_INFO (value)->value_id;
1536   else
1537     value_id = get_or_alloc_constant_value_id (value);
1538   return vn_reference_insert_pieces (vuse, set, type,
1539 				     operands.copy (), value, value_id);
1540 }
1541 
1542 /* Callback for walk_non_aliased_vuses.  Tries to perform a lookup
1543    from the statement defining VUSE and if not successful tries to
1544    translate *REFP and VR_ through an aggregate copy at the definition
1545    of VUSE.  */
1546 
1547 static void *
vn_reference_lookup_3(ao_ref * ref,tree vuse,void * vr_)1548 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1549 {
1550   vn_reference_t vr = (vn_reference_t)vr_;
1551   gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1552   tree base;
1553   HOST_WIDE_INT offset, maxsize;
1554   static vec<vn_reference_op_s>
1555     lhs_ops = vNULL;
1556   ao_ref lhs_ref;
1557   bool lhs_ref_ok = false;
1558 
1559   /* First try to disambiguate after value-replacing in the definitions LHS.  */
1560   if (is_gimple_assign (def_stmt))
1561     {
1562       vec<vn_reference_op_s> tem;
1563       tree lhs = gimple_assign_lhs (def_stmt);
1564       bool valueized_anything = false;
1565       /* Avoid re-allocation overhead.  */
1566       lhs_ops.truncate (0);
1567       copy_reference_ops_from_ref (lhs, &lhs_ops);
1568       tem = lhs_ops;
1569       lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1570       gcc_assert (lhs_ops == tem);
1571       if (valueized_anything)
1572 	{
1573 	  lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1574 						      get_alias_set (lhs),
1575 						      TREE_TYPE (lhs), lhs_ops);
1576 	  if (lhs_ref_ok
1577 	      && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1578 	    return NULL;
1579 	}
1580       else
1581 	{
1582 	  ao_ref_init (&lhs_ref, lhs);
1583 	  lhs_ref_ok = true;
1584 	}
1585     }
1586 
1587   base = ao_ref_base (ref);
1588   offset = ref->offset;
1589   maxsize = ref->max_size;
1590 
1591   /* If we cannot constrain the size of the reference we cannot
1592      test if anything kills it.  */
1593   if (maxsize == -1)
1594     return (void *)-1;
1595 
1596   /* We can't deduce anything useful from clobbers.  */
1597   if (gimple_clobber_p (def_stmt))
1598     return (void *)-1;
1599 
1600   /* def_stmt may-defs *ref.  See if we can derive a value for *ref
1601      from that definition.
1602      1) Memset.  */
1603   if (is_gimple_reg_type (vr->type)
1604       && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1605       && integer_zerop (gimple_call_arg (def_stmt, 1))
1606       && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1607       && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1608     {
1609       tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1610       tree base2;
1611       HOST_WIDE_INT offset2, size2, maxsize2;
1612       base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1613       size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1614       if ((unsigned HOST_WIDE_INT)size2 / 8
1615 	  == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1616 	  && maxsize2 != -1
1617 	  && operand_equal_p (base, base2, 0)
1618 	  && offset2 <= offset
1619 	  && offset2 + size2 >= offset + maxsize)
1620 	{
1621 	  tree val = build_zero_cst (vr->type);
1622 	  return vn_reference_lookup_or_insert_for_pieces
1623 	           (vuse, vr->set, vr->type, vr->operands, val);
1624 	}
1625     }
1626 
1627   /* 2) Assignment from an empty CONSTRUCTOR.  */
1628   else if (is_gimple_reg_type (vr->type)
1629 	   && gimple_assign_single_p (def_stmt)
1630 	   && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1631 	   && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1632     {
1633       tree base2;
1634       HOST_WIDE_INT offset2, size2, maxsize2;
1635       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1636 				       &offset2, &size2, &maxsize2);
1637       if (maxsize2 != -1
1638 	  && operand_equal_p (base, base2, 0)
1639 	  && offset2 <= offset
1640 	  && offset2 + size2 >= offset + maxsize)
1641 	{
1642 	  tree val = build_zero_cst (vr->type);
1643 	  return vn_reference_lookup_or_insert_for_pieces
1644 	           (vuse, vr->set, vr->type, vr->operands, val);
1645 	}
1646     }
1647 
1648   /* 3) Assignment from a constant.  We can use folds native encode/interpret
1649      routines to extract the assigned bits.  */
1650   else if (vn_walk_kind == VN_WALKREWRITE
1651 	   && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1652 	   && ref->size == maxsize
1653 	   && maxsize % BITS_PER_UNIT == 0
1654 	   && offset % BITS_PER_UNIT == 0
1655 	   && is_gimple_reg_type (vr->type)
1656 	   && gimple_assign_single_p (def_stmt)
1657 	   && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1658     {
1659       tree base2;
1660       HOST_WIDE_INT offset2, size2, maxsize2;
1661       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1662 				       &offset2, &size2, &maxsize2);
1663       if (maxsize2 != -1
1664 	  && maxsize2 == size2
1665 	  && size2 % BITS_PER_UNIT == 0
1666 	  && offset2 % BITS_PER_UNIT == 0
1667 	  && operand_equal_p (base, base2, 0)
1668 	  && offset2 <= offset
1669 	  && offset2 + size2 >= offset + maxsize)
1670 	{
1671 	  /* We support up to 512-bit values (for V8DFmode).  */
1672 	  unsigned char buffer[64];
1673 	  int len;
1674 
1675 	  len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1676 				    buffer, sizeof (buffer));
1677 	  if (len > 0)
1678 	    {
1679 	      tree val = native_interpret_expr (vr->type,
1680 						buffer
1681 						+ ((offset - offset2)
1682 						   / BITS_PER_UNIT),
1683 						ref->size / BITS_PER_UNIT);
1684 	      if (val)
1685 		return vn_reference_lookup_or_insert_for_pieces
1686 		         (vuse, vr->set, vr->type, vr->operands, val);
1687 	    }
1688 	}
1689     }
1690 
1691   /* 4) Assignment from an SSA name which definition we may be able
1692      to access pieces from.  */
1693   else if (ref->size == maxsize
1694 	   && is_gimple_reg_type (vr->type)
1695 	   && gimple_assign_single_p (def_stmt)
1696 	   && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1697     {
1698       tree rhs1 = gimple_assign_rhs1 (def_stmt);
1699       gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1700       if (is_gimple_assign (def_stmt2)
1701 	  && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1702 	      || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1703 	  && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1704 	{
1705 	  tree base2;
1706 	  HOST_WIDE_INT offset2, size2, maxsize2, off;
1707 	  base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1708 					   &offset2, &size2, &maxsize2);
1709 	  off = offset - offset2;
1710 	  if (maxsize2 != -1
1711 	      && maxsize2 == size2
1712 	      && operand_equal_p (base, base2, 0)
1713 	      && offset2 <= offset
1714 	      && offset2 + size2 >= offset + maxsize)
1715 	    {
1716 	      tree val = NULL_TREE;
1717 	      HOST_WIDE_INT elsz
1718 		= TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1719 	      if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1720 		{
1721 		  if (off == 0)
1722 		    val = gimple_assign_rhs1 (def_stmt2);
1723 		  else if (off == elsz)
1724 		    val = gimple_assign_rhs2 (def_stmt2);
1725 		}
1726 	      else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1727 		       && off % elsz == 0)
1728 		{
1729 		  tree ctor = gimple_assign_rhs1 (def_stmt2);
1730 		  unsigned i = off / elsz;
1731 		  if (i < CONSTRUCTOR_NELTS (ctor))
1732 		    {
1733 		      constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1734 		      if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1735 			{
1736 			  if (TREE_CODE (TREE_TYPE (elt->value))
1737 			      != VECTOR_TYPE)
1738 			    val = elt->value;
1739 			}
1740 		    }
1741 		}
1742 	      if (val)
1743 		return vn_reference_lookup_or_insert_for_pieces
1744 		         (vuse, vr->set, vr->type, vr->operands, val);
1745 	    }
1746 	}
1747     }
1748 
1749   /* 5) For aggregate copies translate the reference through them if
1750      the copy kills ref.  */
1751   else if (vn_walk_kind == VN_WALKREWRITE
1752 	   && gimple_assign_single_p (def_stmt)
1753 	   && (DECL_P (gimple_assign_rhs1 (def_stmt))
1754 	       || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1755 	       || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1756     {
1757       tree base2;
1758       HOST_WIDE_INT offset2, size2, maxsize2;
1759       int i, j;
1760       auto_vec<vn_reference_op_s> rhs;
1761       vn_reference_op_t vro;
1762       ao_ref r;
1763 
1764       if (!lhs_ref_ok)
1765 	return (void *)-1;
1766 
1767       /* See if the assignment kills REF.  */
1768       base2 = ao_ref_base (&lhs_ref);
1769       offset2 = lhs_ref.offset;
1770       size2 = lhs_ref.size;
1771       maxsize2 = lhs_ref.max_size;
1772       if (maxsize2 == -1
1773 	  || (base != base2 && !operand_equal_p (base, base2, 0))
1774 	  || offset2 > offset
1775 	  || offset2 + size2 < offset + maxsize)
1776 	return (void *)-1;
1777 
1778       /* Find the common base of ref and the lhs.  lhs_ops already
1779          contains valueized operands for the lhs.  */
1780       i = vr->operands.length () - 1;
1781       j = lhs_ops.length () - 1;
1782       while (j >= 0 && i >= 0
1783 	     && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1784 	{
1785 	  i--;
1786 	  j--;
1787 	}
1788 
1789       /* ???  The innermost op should always be a MEM_REF and we already
1790          checked that the assignment to the lhs kills vr.  Thus for
1791 	 aggregate copies using char[] types the vn_reference_op_eq
1792 	 may fail when comparing types for compatibility.  But we really
1793 	 don't care here - further lookups with the rewritten operands
1794 	 will simply fail if we messed up types too badly.  */
1795       if (j == 0 && i >= 0
1796 	  && lhs_ops[0].opcode == MEM_REF
1797 	  && lhs_ops[0].off != -1
1798 	  && (lhs_ops[0].off == vr->operands[i].off))
1799 	i--, j--;
1800 
1801       /* i now points to the first additional op.
1802 	 ???  LHS may not be completely contained in VR, one or more
1803 	 VIEW_CONVERT_EXPRs could be in its way.  We could at least
1804 	 try handling outermost VIEW_CONVERT_EXPRs.  */
1805       if (j != -1)
1806 	return (void *)-1;
1807 
1808       /* Now re-write REF to be based on the rhs of the assignment.  */
1809       copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1810       /* We need to pre-pend vr->operands[0..i] to rhs.  */
1811       if (i + 1 + rhs.length () > vr->operands.length ())
1812 	{
1813 	  vec<vn_reference_op_s> old = vr->operands;
1814 	  vr->operands.safe_grow (i + 1 + rhs.length ());
1815 	  if (old == shared_lookup_references
1816 	      && vr->operands != old)
1817 	    shared_lookup_references = vNULL;
1818 	}
1819       else
1820 	vr->operands.truncate (i + 1 + rhs.length ());
1821       FOR_EACH_VEC_ELT (rhs, j, vro)
1822 	vr->operands[i + 1 + j] = *vro;
1823       vr->operands = valueize_refs (vr->operands);
1824       vr->hashcode = vn_reference_compute_hash (vr);
1825 
1826       /* Adjust *ref from the new operands.  */
1827       if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1828 	return (void *)-1;
1829       /* This can happen with bitfields.  */
1830       if (ref->size != r.size)
1831 	return (void *)-1;
1832       *ref = r;
1833 
1834       /* Do not update last seen VUSE after translating.  */
1835       last_vuse_ptr = NULL;
1836 
1837       /* Keep looking for the adjusted *REF / VR pair.  */
1838       return NULL;
1839     }
1840 
1841   /* 6) For memcpy copies translate the reference through them if
1842      the copy kills ref.  */
1843   else if (vn_walk_kind == VN_WALKREWRITE
1844 	   && is_gimple_reg_type (vr->type)
1845 	   /* ???  Handle BCOPY as well.  */
1846 	   && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1847 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1848 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1849 	   && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1850 	       || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1851 	   && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1852 	       || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1853 	   && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
1854     {
1855       tree lhs, rhs;
1856       ao_ref r;
1857       HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1858       vn_reference_op_s op;
1859       HOST_WIDE_INT at;
1860 
1861 
1862       /* Only handle non-variable, addressable refs.  */
1863       if (ref->size != maxsize
1864 	  || offset % BITS_PER_UNIT != 0
1865 	  || ref->size % BITS_PER_UNIT != 0)
1866 	return (void *)-1;
1867 
1868       /* Extract a pointer base and an offset for the destination.  */
1869       lhs = gimple_call_arg (def_stmt, 0);
1870       lhs_offset = 0;
1871       if (TREE_CODE (lhs) == SSA_NAME)
1872 	lhs = SSA_VAL (lhs);
1873       if (TREE_CODE (lhs) == ADDR_EXPR)
1874 	{
1875 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1876 						    &lhs_offset);
1877 	  if (!tem)
1878 	    return (void *)-1;
1879 	  if (TREE_CODE (tem) == MEM_REF
1880 	      && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
1881 	    {
1882 	      lhs = TREE_OPERAND (tem, 0);
1883 	      lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
1884 	    }
1885 	  else if (DECL_P (tem))
1886 	    lhs = build_fold_addr_expr (tem);
1887 	  else
1888 	    return (void *)-1;
1889 	}
1890       if (TREE_CODE (lhs) != SSA_NAME
1891 	  && TREE_CODE (lhs) != ADDR_EXPR)
1892 	return (void *)-1;
1893 
1894       /* Extract a pointer base and an offset for the source.  */
1895       rhs = gimple_call_arg (def_stmt, 1);
1896       rhs_offset = 0;
1897       if (TREE_CODE (rhs) == SSA_NAME)
1898 	rhs = SSA_VAL (rhs);
1899       if (TREE_CODE (rhs) == ADDR_EXPR)
1900 	{
1901 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1902 						    &rhs_offset);
1903 	  if (!tem)
1904 	    return (void *)-1;
1905 	  if (TREE_CODE (tem) == MEM_REF
1906 	      && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
1907 	    {
1908 	      rhs = TREE_OPERAND (tem, 0);
1909 	      rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
1910 	    }
1911 	  else if (DECL_P (tem))
1912 	    rhs = build_fold_addr_expr (tem);
1913 	  else
1914 	    return (void *)-1;
1915 	}
1916       if (TREE_CODE (rhs) != SSA_NAME
1917 	  && TREE_CODE (rhs) != ADDR_EXPR)
1918 	return (void *)-1;
1919 
1920       copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
1921 
1922       /* The bases of the destination and the references have to agree.  */
1923       if ((TREE_CODE (base) != MEM_REF
1924 	   && !DECL_P (base))
1925 	  || (TREE_CODE (base) == MEM_REF
1926 	      && (TREE_OPERAND (base, 0) != lhs
1927 		  || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
1928 	  || (DECL_P (base)
1929 	      && (TREE_CODE (lhs) != ADDR_EXPR
1930 		  || TREE_OPERAND (lhs, 0) != base)))
1931 	return (void *)-1;
1932 
1933       /* And the access has to be contained within the memcpy destination.  */
1934       at = offset / BITS_PER_UNIT;
1935       if (TREE_CODE (base) == MEM_REF)
1936 	at += tree_to_uhwi (TREE_OPERAND (base, 1));
1937       if (lhs_offset > at
1938 	  || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1939 	return (void *)-1;
1940 
1941       /* Make room for 2 operands in the new reference.  */
1942       if (vr->operands.length () < 2)
1943 	{
1944 	  vec<vn_reference_op_s> old = vr->operands;
1945 	  vr->operands.safe_grow_cleared (2);
1946 	  if (old == shared_lookup_references
1947 	      && vr->operands != old)
1948 	    shared_lookup_references.create (0);
1949 	}
1950       else
1951 	vr->operands.truncate (2);
1952 
1953       /* The looked-through reference is a simple MEM_REF.  */
1954       memset (&op, 0, sizeof (op));
1955       op.type = vr->type;
1956       op.opcode = MEM_REF;
1957       op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1958       op.off = at - lhs_offset + rhs_offset;
1959       vr->operands[0] = op;
1960       op.type = TREE_TYPE (rhs);
1961       op.opcode = TREE_CODE (rhs);
1962       op.op0 = rhs;
1963       op.off = -1;
1964       vr->operands[1] = op;
1965       vr->hashcode = vn_reference_compute_hash (vr);
1966 
1967       /* Adjust *ref from the new operands.  */
1968       if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1969 	return (void *)-1;
1970       /* This can happen with bitfields.  */
1971       if (ref->size != r.size)
1972 	return (void *)-1;
1973       *ref = r;
1974 
1975       /* Do not update last seen VUSE after translating.  */
1976       last_vuse_ptr = NULL;
1977 
1978       /* Keep looking for the adjusted *REF / VR pair.  */
1979       return NULL;
1980     }
1981 
1982   /* Bail out and stop walking.  */
1983   return (void *)-1;
1984 }
1985 
1986 /* Lookup a reference operation by it's parts, in the current hash table.
1987    Returns the resulting value number if it exists in the hash table,
1988    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
1989    vn_reference_t stored in the hashtable if something is found.  */
1990 
1991 tree
vn_reference_lookup_pieces(tree vuse,alias_set_type set,tree type,vec<vn_reference_op_s> operands,vn_reference_t * vnresult,vn_lookup_kind kind)1992 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1993 			    vec<vn_reference_op_s> operands,
1994 			    vn_reference_t *vnresult, vn_lookup_kind kind)
1995 {
1996   struct vn_reference_s vr1;
1997   vn_reference_t tmp;
1998   tree cst;
1999 
2000   if (!vnresult)
2001     vnresult = &tmp;
2002   *vnresult = NULL;
2003 
2004   vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2005   shared_lookup_references.truncate (0);
2006   shared_lookup_references.safe_grow (operands.length ());
2007   memcpy (shared_lookup_references.address (),
2008 	  operands.address (),
2009 	  sizeof (vn_reference_op_s)
2010 	  * operands.length ());
2011   vr1.operands = operands = shared_lookup_references
2012     = valueize_refs (shared_lookup_references);
2013   vr1.type = type;
2014   vr1.set = set;
2015   vr1.hashcode = vn_reference_compute_hash (&vr1);
2016   if ((cst = fully_constant_vn_reference_p (&vr1)))
2017     return cst;
2018 
2019   vn_reference_lookup_1 (&vr1, vnresult);
2020   if (!*vnresult
2021       && kind != VN_NOWALK
2022       && vr1.vuse)
2023     {
2024       ao_ref r;
2025       vn_walk_kind = kind;
2026       if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2027 	*vnresult =
2028 	  (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2029 						  vn_reference_lookup_2,
2030 						  vn_reference_lookup_3, &vr1);
2031       if (vr1.operands != operands)
2032 	vr1.operands.release ();
2033     }
2034 
2035   if (*vnresult)
2036      return (*vnresult)->result;
2037 
2038   return NULL_TREE;
2039 }
2040 
2041 /* Lookup OP in the current hash table, and return the resulting value
2042    number if it exists in the hash table.  Return NULL_TREE if it does
2043    not exist in the hash table or if the result field of the structure
2044    was NULL..  VNRESULT will be filled in with the vn_reference_t
2045    stored in the hashtable if one exists.  */
2046 
2047 tree
vn_reference_lookup(tree op,tree vuse,vn_lookup_kind kind,vn_reference_t * vnresult)2048 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2049 		     vn_reference_t *vnresult)
2050 {
2051   vec<vn_reference_op_s> operands;
2052   struct vn_reference_s vr1;
2053   tree cst;
2054   bool valuezied_anything;
2055 
2056   if (vnresult)
2057     *vnresult = NULL;
2058 
2059   vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2060   vr1.operands = operands
2061     = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2062   vr1.type = TREE_TYPE (op);
2063   vr1.set = get_alias_set (op);
2064   vr1.hashcode = vn_reference_compute_hash (&vr1);
2065   if ((cst = fully_constant_vn_reference_p (&vr1)))
2066     return cst;
2067 
2068   if (kind != VN_NOWALK
2069       && vr1.vuse)
2070     {
2071       vn_reference_t wvnresult;
2072       ao_ref r;
2073       /* Make sure to use a valueized reference if we valueized anything.
2074          Otherwise preserve the full reference for advanced TBAA.  */
2075       if (!valuezied_anything
2076 	  || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2077 					     vr1.operands))
2078 	ao_ref_init (&r, op);
2079       vn_walk_kind = kind;
2080       wvnresult =
2081 	(vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2082 						vn_reference_lookup_2,
2083 						vn_reference_lookup_3, &vr1);
2084       if (vr1.operands != operands)
2085 	vr1.operands.release ();
2086       if (wvnresult)
2087 	{
2088 	  if (vnresult)
2089 	    *vnresult = wvnresult;
2090 	  return wvnresult->result;
2091 	}
2092 
2093       return NULL_TREE;
2094     }
2095 
2096   return vn_reference_lookup_1 (&vr1, vnresult);
2097 }
2098 
2099 
2100 /* Insert OP into the current hash table with a value number of
2101    RESULT, and return the resulting reference structure we created.  */
2102 
2103 vn_reference_t
vn_reference_insert(tree op,tree result,tree vuse,tree vdef)2104 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2105 {
2106   vn_reference_s **slot;
2107   vn_reference_t vr1;
2108   bool tem;
2109 
2110   vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2111   if (TREE_CODE (result) == SSA_NAME)
2112     vr1->value_id = VN_INFO (result)->value_id;
2113   else
2114     vr1->value_id = get_or_alloc_constant_value_id (result);
2115   vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2116   vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2117   vr1->type = TREE_TYPE (op);
2118   vr1->set = get_alias_set (op);
2119   vr1->hashcode = vn_reference_compute_hash (vr1);
2120   vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2121   vr1->result_vdef = vdef;
2122 
2123   slot = current_info->references.find_slot_with_hash (vr1, vr1->hashcode,
2124 						       INSERT);
2125 
2126   /* Because we lookup stores using vuses, and value number failures
2127      using the vdefs (see visit_reference_op_store for how and why),
2128      it's possible that on failure we may try to insert an already
2129      inserted store.  This is not wrong, there is no ssa name for a
2130      store that we could use as a differentiator anyway.  Thus, unlike
2131      the other lookup functions, you cannot gcc_assert (!*slot)
2132      here.  */
2133 
2134   /* But free the old slot in case of a collision.  */
2135   if (*slot)
2136     free_reference (*slot);
2137 
2138   *slot = vr1;
2139   return vr1;
2140 }
2141 
2142 /* Insert a reference by it's pieces into the current hash table with
2143    a value number of RESULT.  Return the resulting reference
2144    structure we created.  */
2145 
2146 vn_reference_t
vn_reference_insert_pieces(tree vuse,alias_set_type set,tree type,vec<vn_reference_op_s> operands,tree result,unsigned int value_id)2147 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2148 			    vec<vn_reference_op_s> operands,
2149 			    tree result, unsigned int value_id)
2150 
2151 {
2152   vn_reference_s **slot;
2153   vn_reference_t vr1;
2154 
2155   vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2156   vr1->value_id = value_id;
2157   vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2158   vr1->operands = valueize_refs (operands);
2159   vr1->type = type;
2160   vr1->set = set;
2161   vr1->hashcode = vn_reference_compute_hash (vr1);
2162   if (result && TREE_CODE (result) == SSA_NAME)
2163     result = SSA_VAL (result);
2164   vr1->result = result;
2165 
2166   slot = current_info->references.find_slot_with_hash (vr1, vr1->hashcode,
2167 						       INSERT);
2168 
2169   /* At this point we should have all the things inserted that we have
2170      seen before, and we should never try inserting something that
2171      already exists.  */
2172   gcc_assert (!*slot);
2173   if (*slot)
2174     free_reference (*slot);
2175 
2176   *slot = vr1;
2177   return vr1;
2178 }
2179 
2180 /* Compute and return the hash value for nary operation VBO1.  */
2181 
2182 hashval_t
vn_nary_op_compute_hash(const vn_nary_op_t vno1)2183 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2184 {
2185   hashval_t hash;
2186   unsigned i;
2187 
2188   for (i = 0; i < vno1->length; ++i)
2189     if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2190       vno1->op[i] = SSA_VAL (vno1->op[i]);
2191 
2192   if (vno1->length == 2
2193       && commutative_tree_code (vno1->opcode)
2194       && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2195     {
2196       tree temp = vno1->op[0];
2197       vno1->op[0] = vno1->op[1];
2198       vno1->op[1] = temp;
2199     }
2200 
2201   hash = iterative_hash_hashval_t (vno1->opcode, 0);
2202   for (i = 0; i < vno1->length; ++i)
2203     hash = iterative_hash_expr (vno1->op[i], hash);
2204 
2205   return hash;
2206 }
2207 
2208 /* Compare nary operations VNO1 and VNO2 and return true if they are
2209    equivalent.  */
2210 
2211 bool
vn_nary_op_eq(const_vn_nary_op_t const vno1,const_vn_nary_op_t const vno2)2212 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2213 {
2214   unsigned i;
2215 
2216   if (vno1->hashcode != vno2->hashcode)
2217     return false;
2218 
2219   if (vno1->length != vno2->length)
2220     return false;
2221 
2222   if (vno1->opcode != vno2->opcode
2223       || !types_compatible_p (vno1->type, vno2->type))
2224     return false;
2225 
2226   for (i = 0; i < vno1->length; ++i)
2227     if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2228       return false;
2229 
2230   return true;
2231 }
2232 
2233 /* Initialize VNO from the pieces provided.  */
2234 
2235 static void
init_vn_nary_op_from_pieces(vn_nary_op_t vno,unsigned int length,enum tree_code code,tree type,tree * ops)2236 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2237 			     enum tree_code code, tree type, tree *ops)
2238 {
2239   vno->opcode = code;
2240   vno->length = length;
2241   vno->type = type;
2242   memcpy (&vno->op[0], ops, sizeof (tree) * length);
2243 }
2244 
2245 /* Initialize VNO from OP.  */
2246 
2247 static void
init_vn_nary_op_from_op(vn_nary_op_t vno,tree op)2248 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2249 {
2250   unsigned i;
2251 
2252   vno->opcode = TREE_CODE (op);
2253   vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2254   vno->type = TREE_TYPE (op);
2255   for (i = 0; i < vno->length; ++i)
2256     vno->op[i] = TREE_OPERAND (op, i);
2257 }
2258 
2259 /* Return the number of operands for a vn_nary ops structure from STMT.  */
2260 
2261 static unsigned int
vn_nary_length_from_stmt(gimple stmt)2262 vn_nary_length_from_stmt (gimple stmt)
2263 {
2264   switch (gimple_assign_rhs_code (stmt))
2265     {
2266     case REALPART_EXPR:
2267     case IMAGPART_EXPR:
2268     case VIEW_CONVERT_EXPR:
2269       return 1;
2270 
2271     case BIT_FIELD_REF:
2272       return 3;
2273 
2274     case CONSTRUCTOR:
2275       return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2276 
2277     default:
2278       return gimple_num_ops (stmt) - 1;
2279     }
2280 }
2281 
2282 /* Initialize VNO from STMT.  */
2283 
2284 static void
init_vn_nary_op_from_stmt(vn_nary_op_t vno,gimple stmt)2285 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2286 {
2287   unsigned i;
2288 
2289   vno->opcode = gimple_assign_rhs_code (stmt);
2290   vno->type = gimple_expr_type (stmt);
2291   switch (vno->opcode)
2292     {
2293     case REALPART_EXPR:
2294     case IMAGPART_EXPR:
2295     case VIEW_CONVERT_EXPR:
2296       vno->length = 1;
2297       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2298       break;
2299 
2300     case BIT_FIELD_REF:
2301       vno->length = 3;
2302       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2303       vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2304       vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2305       break;
2306 
2307     case CONSTRUCTOR:
2308       vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2309       for (i = 0; i < vno->length; ++i)
2310 	vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2311       break;
2312 
2313     default:
2314       gcc_checking_assert (!gimple_assign_single_p (stmt));
2315       vno->length = gimple_num_ops (stmt) - 1;
2316       for (i = 0; i < vno->length; ++i)
2317 	vno->op[i] = gimple_op (stmt, i + 1);
2318     }
2319 }
2320 
2321 /* Compute the hashcode for VNO and look for it in the hash table;
2322    return the resulting value number if it exists in the hash table.
2323    Return NULL_TREE if it does not exist in the hash table or if the
2324    result field of the operation is NULL.  VNRESULT will contain the
2325    vn_nary_op_t from the hashtable if it exists.  */
2326 
2327 static tree
vn_nary_op_lookup_1(vn_nary_op_t vno,vn_nary_op_t * vnresult)2328 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2329 {
2330   vn_nary_op_s **slot;
2331 
2332   if (vnresult)
2333     *vnresult = NULL;
2334 
2335   vno->hashcode = vn_nary_op_compute_hash (vno);
2336   slot = current_info->nary.find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
2337   if (!slot && current_info == optimistic_info)
2338     slot = valid_info->nary.find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
2339   if (!slot)
2340     return NULL_TREE;
2341   if (vnresult)
2342     *vnresult = *slot;
2343   return (*slot)->result;
2344 }
2345 
2346 /* Lookup a n-ary operation by its pieces and return the resulting value
2347    number if it exists in the hash table.  Return NULL_TREE if it does
2348    not exist in the hash table or if the result field of the operation
2349    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2350    if it exists.  */
2351 
2352 tree
vn_nary_op_lookup_pieces(unsigned int length,enum tree_code code,tree type,tree * ops,vn_nary_op_t * vnresult)2353 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2354 			  tree type, tree *ops, vn_nary_op_t *vnresult)
2355 {
2356   vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2357 				  sizeof_vn_nary_op (length));
2358   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2359   return vn_nary_op_lookup_1 (vno1, vnresult);
2360 }
2361 
2362 /* Lookup OP in the current hash table, and return the resulting value
2363    number if it exists in the hash table.  Return NULL_TREE if it does
2364    not exist in the hash table or if the result field of the operation
2365    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2366    if it exists.  */
2367 
2368 tree
vn_nary_op_lookup(tree op,vn_nary_op_t * vnresult)2369 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2370 {
2371   vn_nary_op_t vno1
2372     = XALLOCAVAR (struct vn_nary_op_s,
2373 		  sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2374   init_vn_nary_op_from_op (vno1, op);
2375   return vn_nary_op_lookup_1 (vno1, vnresult);
2376 }
2377 
2378 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2379    value number if it exists in the hash table.  Return NULL_TREE if
2380    it does not exist in the hash table.  VNRESULT will contain the
2381    vn_nary_op_t from the hashtable if it exists.  */
2382 
2383 tree
vn_nary_op_lookup_stmt(gimple stmt,vn_nary_op_t * vnresult)2384 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2385 {
2386   vn_nary_op_t vno1
2387     = XALLOCAVAR (struct vn_nary_op_s,
2388 		  sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2389   init_vn_nary_op_from_stmt (vno1, stmt);
2390   return vn_nary_op_lookup_1 (vno1, vnresult);
2391 }
2392 
2393 /* Allocate a vn_nary_op_t with LENGTH operands on STACK.  */
2394 
2395 static vn_nary_op_t
alloc_vn_nary_op_noinit(unsigned int length,struct obstack * stack)2396 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2397 {
2398   return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2399 }
2400 
2401 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2402    obstack.  */
2403 
2404 static vn_nary_op_t
alloc_vn_nary_op(unsigned int length,tree result,unsigned int value_id)2405 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2406 {
2407   vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2408 					       &current_info->nary_obstack);
2409 
2410   vno1->value_id = value_id;
2411   vno1->length = length;
2412   vno1->result = result;
2413 
2414   return vno1;
2415 }
2416 
2417 /* Insert VNO into TABLE.  If COMPUTE_HASH is true, then compute
2418    VNO->HASHCODE first.  */
2419 
2420 static vn_nary_op_t
vn_nary_op_insert_into(vn_nary_op_t vno,vn_nary_op_table_type table,bool compute_hash)2421 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type table,
2422 			bool compute_hash)
2423 {
2424   vn_nary_op_s **slot;
2425 
2426   if (compute_hash)
2427     vno->hashcode = vn_nary_op_compute_hash (vno);
2428 
2429   slot = table.find_slot_with_hash (vno, vno->hashcode, INSERT);
2430   gcc_assert (!*slot);
2431 
2432   *slot = vno;
2433   return vno;
2434 }
2435 
2436 /* Insert a n-ary operation into the current hash table using it's
2437    pieces.  Return the vn_nary_op_t structure we created and put in
2438    the hashtable.  */
2439 
2440 vn_nary_op_t
vn_nary_op_insert_pieces(unsigned int length,enum tree_code code,tree type,tree * ops,tree result,unsigned int value_id)2441 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2442 			  tree type, tree *ops,
2443 			  tree result, unsigned int value_id)
2444 {
2445   vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2446   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2447   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2448 }
2449 
2450 /* Insert OP into the current hash table with a value number of
2451    RESULT.  Return the vn_nary_op_t structure we created and put in
2452    the hashtable.  */
2453 
2454 vn_nary_op_t
vn_nary_op_insert(tree op,tree result)2455 vn_nary_op_insert (tree op, tree result)
2456 {
2457   unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2458   vn_nary_op_t vno1;
2459 
2460   vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2461   init_vn_nary_op_from_op (vno1, op);
2462   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2463 }
2464 
2465 /* Insert the rhs of STMT into the current hash table with a value number of
2466    RESULT.  */
2467 
2468 vn_nary_op_t
vn_nary_op_insert_stmt(gimple stmt,tree result)2469 vn_nary_op_insert_stmt (gimple stmt, tree result)
2470 {
2471   vn_nary_op_t vno1
2472     = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2473 			result, VN_INFO (result)->value_id);
2474   init_vn_nary_op_from_stmt (vno1, stmt);
2475   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2476 }
2477 
2478 /* Compute a hashcode for PHI operation VP1 and return it.  */
2479 
2480 static inline hashval_t
vn_phi_compute_hash(vn_phi_t vp1)2481 vn_phi_compute_hash (vn_phi_t vp1)
2482 {
2483   hashval_t result;
2484   int i;
2485   tree phi1op;
2486   tree type;
2487 
2488   result = vp1->block->index;
2489 
2490   /* If all PHI arguments are constants we need to distinguish
2491      the PHI node via its type.  */
2492   type = vp1->type;
2493   result += vn_hash_type (type);
2494 
2495   FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2496     {
2497       if (phi1op == VN_TOP)
2498 	continue;
2499       result = iterative_hash_expr (phi1op, result);
2500     }
2501 
2502   return result;
2503 }
2504 
2505 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
2506 
2507 static int
vn_phi_eq(const_vn_phi_t const vp1,const_vn_phi_t const vp2)2508 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2509 {
2510   if (vp1->hashcode != vp2->hashcode)
2511     return false;
2512 
2513   if (vp1->block == vp2->block)
2514     {
2515       int i;
2516       tree phi1op;
2517 
2518       /* If the PHI nodes do not have compatible types
2519 	 they are not the same.  */
2520       if (!types_compatible_p (vp1->type, vp2->type))
2521 	return false;
2522 
2523       /* Any phi in the same block will have it's arguments in the
2524 	 same edge order, because of how we store phi nodes.  */
2525       FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2526 	{
2527 	  tree phi2op = vp2->phiargs[i];
2528 	  if (phi1op == VN_TOP || phi2op == VN_TOP)
2529 	    continue;
2530 	  if (!expressions_equal_p (phi1op, phi2op))
2531 	    return false;
2532 	}
2533       return true;
2534     }
2535   return false;
2536 }
2537 
2538 static vec<tree> shared_lookup_phiargs;
2539 
2540 /* Lookup PHI in the current hash table, and return the resulting
2541    value number if it exists in the hash table.  Return NULL_TREE if
2542    it does not exist in the hash table. */
2543 
2544 static tree
vn_phi_lookup(gimple phi)2545 vn_phi_lookup (gimple phi)
2546 {
2547   vn_phi_s **slot;
2548   struct vn_phi_s vp1;
2549   unsigned i;
2550 
2551   shared_lookup_phiargs.truncate (0);
2552 
2553   /* Canonicalize the SSA_NAME's to their value number.  */
2554   for (i = 0; i < gimple_phi_num_args (phi); i++)
2555     {
2556       tree def = PHI_ARG_DEF (phi, i);
2557       def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2558       shared_lookup_phiargs.safe_push (def);
2559     }
2560   vp1.type = TREE_TYPE (gimple_phi_result (phi));
2561   vp1.phiargs = shared_lookup_phiargs;
2562   vp1.block = gimple_bb (phi);
2563   vp1.hashcode = vn_phi_compute_hash (&vp1);
2564   slot = current_info->phis.find_slot_with_hash (&vp1, vp1.hashcode, NO_INSERT);
2565   if (!slot && current_info == optimistic_info)
2566     slot = valid_info->phis.find_slot_with_hash (&vp1, vp1.hashcode, NO_INSERT);
2567   if (!slot)
2568     return NULL_TREE;
2569   return (*slot)->result;
2570 }
2571 
2572 /* Insert PHI into the current hash table with a value number of
2573    RESULT.  */
2574 
2575 static vn_phi_t
vn_phi_insert(gimple phi,tree result)2576 vn_phi_insert (gimple phi, tree result)
2577 {
2578   vn_phi_s **slot;
2579   vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2580   unsigned i;
2581   vec<tree> args = vNULL;
2582 
2583   /* Canonicalize the SSA_NAME's to their value number.  */
2584   for (i = 0; i < gimple_phi_num_args (phi); i++)
2585     {
2586       tree def = PHI_ARG_DEF (phi, i);
2587       def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2588       args.safe_push (def);
2589     }
2590   vp1->value_id = VN_INFO (result)->value_id;
2591   vp1->type = TREE_TYPE (gimple_phi_result (phi));
2592   vp1->phiargs = args;
2593   vp1->block = gimple_bb (phi);
2594   vp1->result = result;
2595   vp1->hashcode = vn_phi_compute_hash (vp1);
2596 
2597   slot = current_info->phis.find_slot_with_hash (vp1, vp1->hashcode, INSERT);
2598 
2599   /* Because we iterate over phi operations more than once, it's
2600      possible the slot might already exist here, hence no assert.*/
2601   *slot = vp1;
2602   return vp1;
2603 }
2604 
2605 
2606 /* Print set of components in strongly connected component SCC to OUT. */
2607 
2608 static void
print_scc(FILE * out,vec<tree> scc)2609 print_scc (FILE *out, vec<tree> scc)
2610 {
2611   tree var;
2612   unsigned int i;
2613 
2614   fprintf (out, "SCC consists of:");
2615   FOR_EACH_VEC_ELT (scc, i, var)
2616     {
2617       fprintf (out, " ");
2618       print_generic_expr (out, var, 0);
2619     }
2620   fprintf (out, "\n");
2621 }
2622 
2623 /* Set the value number of FROM to TO, return true if it has changed
2624    as a result.  */
2625 
2626 static inline bool
set_ssa_val_to(tree from,tree to)2627 set_ssa_val_to (tree from, tree to)
2628 {
2629   tree currval = SSA_VAL (from);
2630   HOST_WIDE_INT toff, coff;
2631 
2632   if (from != to)
2633     {
2634       if (currval == from)
2635 	{
2636 	  if (dump_file && (dump_flags & TDF_DETAILS))
2637 	    {
2638 	      fprintf (dump_file, "Not changing value number of ");
2639 	      print_generic_expr (dump_file, from, 0);
2640 	      fprintf (dump_file, " from VARYING to ");
2641 	      print_generic_expr (dump_file, to, 0);
2642 	      fprintf (dump_file, "\n");
2643 	    }
2644 	  return false;
2645 	}
2646       else if (TREE_CODE (to) == SSA_NAME
2647 	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2648 	to = from;
2649     }
2650 
2651   /* The only thing we allow as value numbers are VN_TOP, ssa_names
2652      and invariants.  So assert that here.  */
2653   gcc_assert (to != NULL_TREE
2654 	      && (to == VN_TOP
2655 		  || TREE_CODE (to) == SSA_NAME
2656 		  || is_gimple_min_invariant (to)));
2657 
2658   if (dump_file && (dump_flags & TDF_DETAILS))
2659     {
2660       fprintf (dump_file, "Setting value number of ");
2661       print_generic_expr (dump_file, from, 0);
2662       fprintf (dump_file, " to ");
2663       print_generic_expr (dump_file, to, 0);
2664     }
2665 
2666   if (currval != to
2667       && !operand_equal_p (currval, to, 0)
2668       /* ???  For addresses involving volatile objects or types operand_equal_p
2669          does not reliably detect ADDR_EXPRs as equal.  We know we are only
2670 	 getting invariant gimple addresses here, so can use
2671 	 get_addr_base_and_unit_offset to do this comparison.  */
2672       && !(TREE_CODE (currval) == ADDR_EXPR
2673 	   && TREE_CODE (to) == ADDR_EXPR
2674 	   && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
2675 	       == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
2676 	   && coff == toff))
2677     {
2678       VN_INFO (from)->valnum = to;
2679       if (dump_file && (dump_flags & TDF_DETAILS))
2680 	fprintf (dump_file, " (changed)\n");
2681       return true;
2682     }
2683   if (dump_file && (dump_flags & TDF_DETAILS))
2684     fprintf (dump_file, "\n");
2685   return false;
2686 }
2687 
2688 /* Mark as processed all the definitions in the defining stmt of USE, or
2689    the USE itself.  */
2690 
2691 static void
mark_use_processed(tree use)2692 mark_use_processed (tree use)
2693 {
2694   ssa_op_iter iter;
2695   def_operand_p defp;
2696   gimple stmt = SSA_NAME_DEF_STMT (use);
2697 
2698   if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2699     {
2700       VN_INFO (use)->use_processed = true;
2701       return;
2702     }
2703 
2704   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2705     {
2706       tree def = DEF_FROM_PTR (defp);
2707 
2708       VN_INFO (def)->use_processed = true;
2709     }
2710 }
2711 
2712 /* Set all definitions in STMT to value number to themselves.
2713    Return true if a value number changed. */
2714 
2715 static bool
defs_to_varying(gimple stmt)2716 defs_to_varying (gimple stmt)
2717 {
2718   bool changed = false;
2719   ssa_op_iter iter;
2720   def_operand_p defp;
2721 
2722   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2723     {
2724       tree def = DEF_FROM_PTR (defp);
2725       changed |= set_ssa_val_to (def, def);
2726     }
2727   return changed;
2728 }
2729 
2730 static bool expr_has_constants (tree expr);
2731 static tree valueize_expr (tree expr);
2732 
2733 /* Visit a copy between LHS and RHS, return true if the value number
2734    changed.  */
2735 
2736 static bool
visit_copy(tree lhs,tree rhs)2737 visit_copy (tree lhs, tree rhs)
2738 {
2739   /* The copy may have a more interesting constant filled expression
2740      (we don't, since we know our RHS is just an SSA name).  */
2741   VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2742   VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2743 
2744   /* And finally valueize.  */
2745   rhs = SSA_VAL (rhs);
2746 
2747   return set_ssa_val_to (lhs, rhs);
2748 }
2749 
2750 /* Visit a nary operator RHS, value number it, and return true if the
2751    value number of LHS has changed as a result.  */
2752 
2753 static bool
visit_nary_op(tree lhs,gimple stmt)2754 visit_nary_op (tree lhs, gimple stmt)
2755 {
2756   bool changed = false;
2757   tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2758 
2759   if (result)
2760     changed = set_ssa_val_to (lhs, result);
2761   else
2762     {
2763       changed = set_ssa_val_to (lhs, lhs);
2764       vn_nary_op_insert_stmt (stmt, lhs);
2765     }
2766 
2767   return changed;
2768 }
2769 
2770 /* Visit a call STMT storing into LHS.  Return true if the value number
2771    of the LHS has changed as a result.  */
2772 
2773 static bool
visit_reference_op_call(tree lhs,gimple stmt)2774 visit_reference_op_call (tree lhs, gimple stmt)
2775 {
2776   bool changed = false;
2777   struct vn_reference_s vr1;
2778   vn_reference_t vnresult = NULL;
2779   tree vuse = gimple_vuse (stmt);
2780   tree vdef = gimple_vdef (stmt);
2781 
2782   /* Non-ssa lhs is handled in copy_reference_ops_from_call.  */
2783   if (lhs && TREE_CODE (lhs) != SSA_NAME)
2784     lhs = NULL_TREE;
2785 
2786   vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2787   vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2788   vr1.type = gimple_expr_type (stmt);
2789   vr1.set = 0;
2790   vr1.hashcode = vn_reference_compute_hash (&vr1);
2791   vn_reference_lookup_1 (&vr1, &vnresult);
2792 
2793   if (vnresult)
2794     {
2795       if (vnresult->result_vdef && vdef)
2796 	changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2797 
2798       if (!vnresult->result && lhs)
2799 	vnresult->result = lhs;
2800 
2801       if (vnresult->result && lhs)
2802 	{
2803 	  changed |= set_ssa_val_to (lhs, vnresult->result);
2804 
2805 	  if (VN_INFO (vnresult->result)->has_constants)
2806 	    VN_INFO (lhs)->has_constants = true;
2807 	}
2808     }
2809   else
2810     {
2811       vn_reference_s **slot;
2812       vn_reference_t vr2;
2813       if (vdef)
2814 	changed |= set_ssa_val_to (vdef, vdef);
2815       if (lhs)
2816 	changed |= set_ssa_val_to (lhs, lhs);
2817       vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2818       vr2->vuse = vr1.vuse;
2819       vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2820       vr2->type = vr1.type;
2821       vr2->set = vr1.set;
2822       vr2->hashcode = vr1.hashcode;
2823       vr2->result = lhs;
2824       vr2->result_vdef = vdef;
2825       slot = current_info->references.find_slot_with_hash (vr2, vr2->hashcode,
2826 							   INSERT);
2827       if (*slot)
2828 	free_reference (*slot);
2829       *slot = vr2;
2830     }
2831 
2832   return changed;
2833 }
2834 
2835 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2836    and return true if the value number of the LHS has changed as a result.  */
2837 
2838 static bool
visit_reference_op_load(tree lhs,tree op,gimple stmt)2839 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2840 {
2841   bool changed = false;
2842   tree last_vuse;
2843   tree result;
2844 
2845   last_vuse = gimple_vuse (stmt);
2846   last_vuse_ptr = &last_vuse;
2847   result = vn_reference_lookup (op, gimple_vuse (stmt),
2848 				default_vn_walk_kind, NULL);
2849   last_vuse_ptr = NULL;
2850 
2851   /* If we have a VCE, try looking up its operand as it might be stored in
2852      a different type.  */
2853   if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2854     result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2855     				  default_vn_walk_kind, NULL);
2856 
2857   /* We handle type-punning through unions by value-numbering based
2858      on offset and size of the access.  Be prepared to handle a
2859      type-mismatch here via creating a VIEW_CONVERT_EXPR.  */
2860   if (result
2861       && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2862     {
2863       /* We will be setting the value number of lhs to the value number
2864 	 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2865 	 So first simplify and lookup this expression to see if it
2866 	 is already available.  */
2867       tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2868       if ((CONVERT_EXPR_P (val)
2869 	   || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2870 	  && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2871         {
2872 	  tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2873 	  if ((CONVERT_EXPR_P (tem)
2874 	       || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2875 	      && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2876 						    TREE_TYPE (val), tem)))
2877 	    val = tem;
2878 	}
2879       result = val;
2880       if (!is_gimple_min_invariant (val)
2881 	  && TREE_CODE (val) != SSA_NAME)
2882 	result = vn_nary_op_lookup (val, NULL);
2883       /* If the expression is not yet available, value-number lhs to
2884 	 a new SSA_NAME we create.  */
2885       if (!result)
2886         {
2887 	  result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
2888 				       "vntemp");
2889 	  /* Initialize value-number information properly.  */
2890 	  VN_INFO_GET (result)->valnum = result;
2891 	  VN_INFO (result)->value_id = get_next_value_id ();
2892 	  VN_INFO (result)->expr = val;
2893 	  VN_INFO (result)->has_constants = expr_has_constants (val);
2894 	  VN_INFO (result)->needs_insertion = true;
2895 	  /* As all "inserted" statements are singleton SCCs, insert
2896 	     to the valid table.  This is strictly needed to
2897 	     avoid re-generating new value SSA_NAMEs for the same
2898 	     expression during SCC iteration over and over (the
2899 	     optimistic table gets cleared after each iteration).
2900 	     We do not need to insert into the optimistic table, as
2901 	     lookups there will fall back to the valid table.  */
2902 	  if (current_info == optimistic_info)
2903 	    {
2904 	      current_info = valid_info;
2905 	      vn_nary_op_insert (val, result);
2906 	      current_info = optimistic_info;
2907 	    }
2908 	  else
2909 	    vn_nary_op_insert (val, result);
2910 	  if (dump_file && (dump_flags & TDF_DETAILS))
2911 	    {
2912 	      fprintf (dump_file, "Inserting name ");
2913 	      print_generic_expr (dump_file, result, 0);
2914 	      fprintf (dump_file, " for expression ");
2915 	      print_generic_expr (dump_file, val, 0);
2916 	      fprintf (dump_file, "\n");
2917 	    }
2918 	}
2919     }
2920 
2921   if (result)
2922     {
2923       changed = set_ssa_val_to (lhs, result);
2924       if (TREE_CODE (result) == SSA_NAME
2925 	  && VN_INFO (result)->has_constants)
2926 	{
2927 	  VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2928 	  VN_INFO (lhs)->has_constants = true;
2929 	}
2930     }
2931   else
2932     {
2933       changed = set_ssa_val_to (lhs, lhs);
2934       vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
2935     }
2936 
2937   return changed;
2938 }
2939 
2940 
2941 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2942    and return true if the value number of the LHS has changed as a result.  */
2943 
2944 static bool
visit_reference_op_store(tree lhs,tree op,gimple stmt)2945 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2946 {
2947   bool changed = false;
2948   vn_reference_t vnresult = NULL;
2949   tree result, assign;
2950   bool resultsame = false;
2951   tree vuse = gimple_vuse (stmt);
2952   tree vdef = gimple_vdef (stmt);
2953 
2954   /* First we want to lookup using the *vuses* from the store and see
2955      if there the last store to this location with the same address
2956      had the same value.
2957 
2958      The vuses represent the memory state before the store.  If the
2959      memory state, address, and value of the store is the same as the
2960      last store to this location, then this store will produce the
2961      same memory state as that store.
2962 
2963      In this case the vdef versions for this store are value numbered to those
2964      vuse versions, since they represent the same memory state after
2965      this store.
2966 
2967      Otherwise, the vdefs for the store are used when inserting into
2968      the table, since the store generates a new memory state.  */
2969 
2970   result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
2971 
2972   if (result)
2973     {
2974       if (TREE_CODE (result) == SSA_NAME)
2975 	result = SSA_VAL (result);
2976       if (TREE_CODE (op) == SSA_NAME)
2977 	op = SSA_VAL (op);
2978       resultsame = expressions_equal_p (result, op);
2979     }
2980 
2981   if (!result || !resultsame)
2982     {
2983       assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
2984       vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
2985       if (vnresult)
2986 	{
2987 	  VN_INFO (vdef)->use_processed = true;
2988 	  return set_ssa_val_to (vdef, vnresult->result_vdef);
2989 	}
2990     }
2991 
2992   if (!result || !resultsame)
2993     {
2994       if (dump_file && (dump_flags & TDF_DETAILS))
2995 	{
2996 	  fprintf (dump_file, "No store match\n");
2997 	  fprintf (dump_file, "Value numbering store ");
2998 	  print_generic_expr (dump_file, lhs, 0);
2999 	  fprintf (dump_file, " to ");
3000 	  print_generic_expr (dump_file, op, 0);
3001 	  fprintf (dump_file, "\n");
3002 	}
3003       /* Have to set value numbers before insert, since insert is
3004 	 going to valueize the references in-place.  */
3005       if (vdef)
3006 	{
3007 	  changed |= set_ssa_val_to (vdef, vdef);
3008 	}
3009 
3010       /* Do not insert structure copies into the tables.  */
3011       if (is_gimple_min_invariant (op)
3012 	  || is_gimple_reg (op))
3013         vn_reference_insert (lhs, op, vdef, NULL);
3014 
3015       assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3016       vn_reference_insert (assign, lhs, vuse, vdef);
3017     }
3018   else
3019     {
3020       /* We had a match, so value number the vdef to have the value
3021 	 number of the vuse it came from.  */
3022 
3023       if (dump_file && (dump_flags & TDF_DETAILS))
3024 	fprintf (dump_file, "Store matched earlier value,"
3025 		 "value numbering store vdefs to matching vuses.\n");
3026 
3027       changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3028     }
3029 
3030   return changed;
3031 }
3032 
3033 /* Visit and value number PHI, return true if the value number
3034    changed.  */
3035 
3036 static bool
visit_phi(gimple phi)3037 visit_phi (gimple phi)
3038 {
3039   bool changed = false;
3040   tree result;
3041   tree sameval = VN_TOP;
3042   bool allsame = true;
3043   unsigned i;
3044 
3045   /* TODO: We could check for this in init_sccvn, and replace this
3046      with a gcc_assert.  */
3047   if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3048     return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3049 
3050   /* See if all non-TOP arguments have the same value.  TOP is
3051      equivalent to everything, so we can ignore it.  */
3052   for (i = 0; i < gimple_phi_num_args (phi); i++)
3053     {
3054       tree def = PHI_ARG_DEF (phi, i);
3055 
3056       if (TREE_CODE (def) == SSA_NAME)
3057 	def = SSA_VAL (def);
3058       if (def == VN_TOP)
3059 	continue;
3060       if (sameval == VN_TOP)
3061 	{
3062 	  sameval = def;
3063 	}
3064       else
3065 	{
3066 	  if (!expressions_equal_p (def, sameval))
3067 	    {
3068 	      allsame = false;
3069 	      break;
3070 	    }
3071 	}
3072     }
3073 
3074   /* If all value numbered to the same value, the phi node has that
3075      value.  */
3076   if (allsame)
3077     return set_ssa_val_to (PHI_RESULT (phi), sameval);
3078 
3079   /* Otherwise, see if it is equivalent to a phi node in this block.  */
3080   result = vn_phi_lookup (phi);
3081   if (result)
3082     changed = set_ssa_val_to (PHI_RESULT (phi), result);
3083   else
3084     {
3085       vn_phi_insert (phi, PHI_RESULT (phi));
3086       VN_INFO (PHI_RESULT (phi))->has_constants = false;
3087       VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
3088       changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3089     }
3090 
3091   return changed;
3092 }
3093 
3094 /* Return true if EXPR contains constants.  */
3095 
3096 static bool
expr_has_constants(tree expr)3097 expr_has_constants (tree expr)
3098 {
3099   switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3100     {
3101     case tcc_unary:
3102       return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
3103 
3104     case tcc_binary:
3105       return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
3106 	|| is_gimple_min_invariant (TREE_OPERAND (expr, 1));
3107       /* Constants inside reference ops are rarely interesting, but
3108 	 it can take a lot of looking to find them.  */
3109     case tcc_reference:
3110     case tcc_declaration:
3111       return false;
3112     default:
3113       return is_gimple_min_invariant (expr);
3114     }
3115   return false;
3116 }
3117 
3118 /* Return true if STMT contains constants.  */
3119 
3120 static bool
stmt_has_constants(gimple stmt)3121 stmt_has_constants (gimple stmt)
3122 {
3123   tree tem;
3124 
3125   if (gimple_code (stmt) != GIMPLE_ASSIGN)
3126     return false;
3127 
3128   switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3129     {
3130     case GIMPLE_TERNARY_RHS:
3131       tem = gimple_assign_rhs3 (stmt);
3132       if (TREE_CODE (tem) == SSA_NAME)
3133 	tem = SSA_VAL (tem);
3134       if (is_gimple_min_invariant (tem))
3135 	return true;
3136       /* Fallthru.  */
3137 
3138     case GIMPLE_BINARY_RHS:
3139       tem = gimple_assign_rhs2 (stmt);
3140       if (TREE_CODE (tem) == SSA_NAME)
3141 	tem = SSA_VAL (tem);
3142       if (is_gimple_min_invariant (tem))
3143 	return true;
3144       /* Fallthru.  */
3145 
3146     case GIMPLE_SINGLE_RHS:
3147       /* Constants inside reference ops are rarely interesting, but
3148 	 it can take a lot of looking to find them.  */
3149     case GIMPLE_UNARY_RHS:
3150       tem = gimple_assign_rhs1 (stmt);
3151       if (TREE_CODE (tem) == SSA_NAME)
3152 	tem = SSA_VAL (tem);
3153       return is_gimple_min_invariant (tem);
3154 
3155     default:
3156       gcc_unreachable ();
3157     }
3158   return false;
3159 }
3160 
3161 /* Replace SSA_NAMES in expr with their value numbers, and return the
3162    result.
3163    This is performed in place. */
3164 
3165 static tree
valueize_expr(tree expr)3166 valueize_expr (tree expr)
3167 {
3168   switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3169     {
3170     case tcc_binary:
3171       TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
3172       /* Fallthru.  */
3173     case tcc_unary:
3174       TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
3175       break;
3176     default:;
3177     }
3178   return expr;
3179 }
3180 
3181 /* Simplify the binary expression RHS, and return the result if
3182    simplified. */
3183 
3184 static tree
simplify_binary_expression(gimple stmt)3185 simplify_binary_expression (gimple stmt)
3186 {
3187   tree result = NULL_TREE;
3188   tree op0 = gimple_assign_rhs1 (stmt);
3189   tree op1 = gimple_assign_rhs2 (stmt);
3190   enum tree_code code = gimple_assign_rhs_code (stmt);
3191 
3192   /* This will not catch every single case we could combine, but will
3193      catch those with constants.  The goal here is to simultaneously
3194      combine constants between expressions, but avoid infinite
3195      expansion of expressions during simplification.  */
3196   op0 = vn_valueize (op0);
3197   if (TREE_CODE (op0) == SSA_NAME
3198       && (VN_INFO (op0)->has_constants
3199 	  || TREE_CODE_CLASS (code) == tcc_comparison
3200 	  || code == COMPLEX_EXPR))
3201     op0 = valueize_expr (vn_get_expr_for (op0));
3202 
3203   op1 = vn_valueize (op1);
3204   if (TREE_CODE (op1) == SSA_NAME
3205       && (VN_INFO (op1)->has_constants
3206 	  || code == COMPLEX_EXPR))
3207     op1 = valueize_expr (vn_get_expr_for (op1));
3208 
3209   /* Pointer plus constant can be represented as invariant address.
3210      Do so to allow further propatation, see also tree forwprop.  */
3211   if (code == POINTER_PLUS_EXPR
3212       && tree_fits_uhwi_p (op1)
3213       && TREE_CODE (op0) == ADDR_EXPR
3214       && is_gimple_min_invariant (op0))
3215     return build_invariant_address (TREE_TYPE (op0),
3216 				    TREE_OPERAND (op0, 0),
3217 				    tree_to_uhwi (op1));
3218 
3219   /* Avoid folding if nothing changed.  */
3220   if (op0 == gimple_assign_rhs1 (stmt)
3221       && op1 == gimple_assign_rhs2 (stmt))
3222     return NULL_TREE;
3223 
3224   fold_defer_overflow_warnings ();
3225 
3226   result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3227   if (result)
3228     STRIP_USELESS_TYPE_CONVERSION (result);
3229 
3230   fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3231 				  stmt, 0);
3232 
3233   /* Make sure result is not a complex expression consisting
3234      of operators of operators (IE (a + b) + (a + c))
3235      Otherwise, we will end up with unbounded expressions if
3236      fold does anything at all.  */
3237   if (result && valid_gimple_rhs_p (result))
3238     return result;
3239 
3240   return NULL_TREE;
3241 }
3242 
3243 /* Simplify the unary expression RHS, and return the result if
3244    simplified. */
3245 
3246 static tree
simplify_unary_expression(gimple stmt)3247 simplify_unary_expression (gimple stmt)
3248 {
3249   tree result = NULL_TREE;
3250   tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3251   enum tree_code code = gimple_assign_rhs_code (stmt);
3252 
3253   /* We handle some tcc_reference codes here that are all
3254      GIMPLE_ASSIGN_SINGLE codes.  */
3255   if (code == REALPART_EXPR
3256       || code == IMAGPART_EXPR
3257       || code == VIEW_CONVERT_EXPR
3258       || code == BIT_FIELD_REF)
3259     op0 = TREE_OPERAND (op0, 0);
3260 
3261   if (TREE_CODE (op0) != SSA_NAME)
3262     return NULL_TREE;
3263 
3264   orig_op0 = op0;
3265   op0 = vn_valueize (op0);
3266   if (TREE_CODE (op0) == SSA_NAME)
3267     {
3268       if (VN_INFO (op0)->has_constants)
3269 	op0 = valueize_expr (vn_get_expr_for (op0));
3270       else if (CONVERT_EXPR_CODE_P (code)
3271 	       || code == REALPART_EXPR
3272 	       || code == IMAGPART_EXPR
3273 	       || code == VIEW_CONVERT_EXPR
3274 	       || code == BIT_FIELD_REF)
3275 	{
3276 	  /* We want to do tree-combining on conversion-like expressions.
3277 	     Make sure we feed only SSA_NAMEs or constants to fold though.  */
3278 	  tree tem = valueize_expr (vn_get_expr_for (op0));
3279 	  if (UNARY_CLASS_P (tem)
3280 	      || BINARY_CLASS_P (tem)
3281 	      || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3282 	      || TREE_CODE (tem) == SSA_NAME
3283 	      || TREE_CODE (tem) == CONSTRUCTOR
3284 	      || is_gimple_min_invariant (tem))
3285 	    op0 = tem;
3286 	}
3287     }
3288 
3289   /* Avoid folding if nothing changed, but remember the expression.  */
3290   if (op0 == orig_op0)
3291     return NULL_TREE;
3292 
3293   if (code == BIT_FIELD_REF)
3294     {
3295       tree rhs = gimple_assign_rhs1 (stmt);
3296       result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3297 			     op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3298     }
3299   else
3300     result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3301   if (result)
3302     {
3303       STRIP_USELESS_TYPE_CONVERSION (result);
3304       if (valid_gimple_rhs_p (result))
3305         return result;
3306     }
3307 
3308   return NULL_TREE;
3309 }
3310 
3311 /* Try to simplify RHS using equivalences and constant folding.  */
3312 
3313 static tree
try_to_simplify(gimple stmt)3314 try_to_simplify (gimple stmt)
3315 {
3316   enum tree_code code = gimple_assign_rhs_code (stmt);
3317   tree tem;
3318 
3319   /* For stores we can end up simplifying a SSA_NAME rhs.  Just return
3320      in this case, there is no point in doing extra work.  */
3321   if (code == SSA_NAME)
3322     return NULL_TREE;
3323 
3324   /* First try constant folding based on our current lattice.  */
3325   tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
3326   if (tem
3327       && (TREE_CODE (tem) == SSA_NAME
3328 	  || is_gimple_min_invariant (tem)))
3329     return tem;
3330 
3331   /* If that didn't work try combining multiple statements.  */
3332   switch (TREE_CODE_CLASS (code))
3333     {
3334     case tcc_reference:
3335       /* Fallthrough for some unary codes that can operate on registers.  */
3336       if (!(code == REALPART_EXPR
3337 	    || code == IMAGPART_EXPR
3338 	    || code == VIEW_CONVERT_EXPR
3339 	    || code == BIT_FIELD_REF))
3340 	break;
3341       /* We could do a little more with unary ops, if they expand
3342 	 into binary ops, but it's debatable whether it is worth it. */
3343     case tcc_unary:
3344       return simplify_unary_expression (stmt);
3345 
3346     case tcc_comparison:
3347     case tcc_binary:
3348       return simplify_binary_expression (stmt);
3349 
3350     default:
3351       break;
3352     }
3353 
3354   return NULL_TREE;
3355 }
3356 
3357 /* Visit and value number USE, return true if the value number
3358    changed. */
3359 
3360 static bool
visit_use(tree use)3361 visit_use (tree use)
3362 {
3363   bool changed = false;
3364   gimple stmt = SSA_NAME_DEF_STMT (use);
3365 
3366   mark_use_processed (use);
3367 
3368   gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3369   if (dump_file && (dump_flags & TDF_DETAILS)
3370       && !SSA_NAME_IS_DEFAULT_DEF (use))
3371     {
3372       fprintf (dump_file, "Value numbering ");
3373       print_generic_expr (dump_file, use, 0);
3374       fprintf (dump_file, " stmt = ");
3375       print_gimple_stmt (dump_file, stmt, 0, 0);
3376     }
3377 
3378   /* Handle uninitialized uses.  */
3379   if (SSA_NAME_IS_DEFAULT_DEF (use))
3380     changed = set_ssa_val_to (use, use);
3381   else
3382     {
3383       if (gimple_code (stmt) == GIMPLE_PHI)
3384 	changed = visit_phi (stmt);
3385       else if (gimple_has_volatile_ops (stmt))
3386 	changed = defs_to_varying (stmt);
3387       else if (is_gimple_assign (stmt))
3388 	{
3389 	  enum tree_code code = gimple_assign_rhs_code (stmt);
3390 	  tree lhs = gimple_assign_lhs (stmt);
3391 	  tree rhs1 = gimple_assign_rhs1 (stmt);
3392 	  tree simplified;
3393 
3394 	  /* Shortcut for copies. Simplifying copies is pointless,
3395 	     since we copy the expression and value they represent.  */
3396 	  if (code == SSA_NAME
3397 	      && TREE_CODE (lhs) == SSA_NAME)
3398 	    {
3399 	      changed = visit_copy (lhs, rhs1);
3400 	      goto done;
3401 	    }
3402 	  simplified = try_to_simplify (stmt);
3403 	  if (simplified)
3404 	    {
3405 	      if (dump_file && (dump_flags & TDF_DETAILS))
3406 		{
3407 		  fprintf (dump_file, "RHS ");
3408 		  print_gimple_expr (dump_file, stmt, 0, 0);
3409 		  fprintf (dump_file, " simplified to ");
3410 		  print_generic_expr (dump_file, simplified, 0);
3411 		  if (TREE_CODE (lhs) == SSA_NAME)
3412 		    fprintf (dump_file, " has constants %d\n",
3413 			     expr_has_constants (simplified));
3414 		  else
3415 		    fprintf (dump_file, "\n");
3416 		}
3417 	    }
3418 	  /* Setting value numbers to constants will occasionally
3419 	     screw up phi congruence because constants are not
3420 	     uniquely associated with a single ssa name that can be
3421 	     looked up.  */
3422 	  if (simplified
3423 	      && is_gimple_min_invariant (simplified)
3424 	      && TREE_CODE (lhs) == SSA_NAME)
3425 	    {
3426 	      VN_INFO (lhs)->expr = simplified;
3427 	      VN_INFO (lhs)->has_constants = true;
3428 	      changed = set_ssa_val_to (lhs, simplified);
3429 	      goto done;
3430 	    }
3431 	  else if (simplified
3432 		   && TREE_CODE (simplified) == SSA_NAME
3433 		   && TREE_CODE (lhs) == SSA_NAME)
3434 	    {
3435 	      changed = visit_copy (lhs, simplified);
3436 	      goto done;
3437 	    }
3438 	  else if (simplified)
3439 	    {
3440 	      if (TREE_CODE (lhs) == SSA_NAME)
3441 		{
3442 		  VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3443 		  /* We have to unshare the expression or else
3444 		     valuizing may change the IL stream.  */
3445 		  VN_INFO (lhs)->expr = unshare_expr (simplified);
3446 		}
3447 	    }
3448 	  else if (stmt_has_constants (stmt)
3449 		   && TREE_CODE (lhs) == SSA_NAME)
3450 	    VN_INFO (lhs)->has_constants = true;
3451 	  else if (TREE_CODE (lhs) == SSA_NAME)
3452 	    {
3453 	      /* We reset expr and constantness here because we may
3454 		 have been value numbering optimistically, and
3455 		 iterating. They may become non-constant in this case,
3456 		 even if they were optimistically constant. */
3457 
3458 	      VN_INFO (lhs)->has_constants = false;
3459 	      VN_INFO (lhs)->expr = NULL_TREE;
3460 	    }
3461 
3462 	  if ((TREE_CODE (lhs) == SSA_NAME
3463 	       /* We can substitute SSA_NAMEs that are live over
3464 		  abnormal edges with their constant value.  */
3465 	       && !(gimple_assign_copy_p (stmt)
3466 		    && is_gimple_min_invariant (rhs1))
3467 	       && !(simplified
3468 		    && is_gimple_min_invariant (simplified))
3469 	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3470 	      /* Stores or copies from SSA_NAMEs that are live over
3471 		 abnormal edges are a problem.  */
3472 	      || (code == SSA_NAME
3473 		  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3474 	    changed = defs_to_varying (stmt);
3475 	  else if (REFERENCE_CLASS_P (lhs)
3476 		   || DECL_P (lhs))
3477 	    changed = visit_reference_op_store (lhs, rhs1, stmt);
3478 	  else if (TREE_CODE (lhs) == SSA_NAME)
3479 	    {
3480 	      if ((gimple_assign_copy_p (stmt)
3481 		   && is_gimple_min_invariant (rhs1))
3482 		  || (simplified
3483 		      && is_gimple_min_invariant (simplified)))
3484 		{
3485 		  VN_INFO (lhs)->has_constants = true;
3486 		  if (simplified)
3487 		    changed = set_ssa_val_to (lhs, simplified);
3488 		  else
3489 		    changed = set_ssa_val_to (lhs, rhs1);
3490 		}
3491 	      else
3492 		{
3493 		  /* First try to lookup the simplified expression.  */
3494 		  if (simplified)
3495 		    {
3496 		      enum gimple_rhs_class rhs_class;
3497 
3498 
3499 		      rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
3500 		      if ((rhs_class == GIMPLE_UNARY_RHS
3501 			   || rhs_class == GIMPLE_BINARY_RHS
3502 			   || rhs_class == GIMPLE_TERNARY_RHS)
3503 			  && valid_gimple_rhs_p (simplified))
3504 			{
3505 			  tree result = vn_nary_op_lookup (simplified, NULL);
3506 			  if (result)
3507 			    {
3508 			      changed = set_ssa_val_to (lhs, result);
3509 			      goto done;
3510 			    }
3511 			}
3512 		    }
3513 
3514 		  /* Otherwise visit the original statement.  */
3515 		  switch (vn_get_stmt_kind (stmt))
3516 		    {
3517 		    case VN_NARY:
3518 		      changed = visit_nary_op (lhs, stmt);
3519 		      break;
3520 		    case VN_REFERENCE:
3521 		      changed = visit_reference_op_load (lhs, rhs1, stmt);
3522 		      break;
3523 		    default:
3524 		      changed = defs_to_varying (stmt);
3525 		      break;
3526 		    }
3527 		}
3528 	    }
3529 	  else
3530 	    changed = defs_to_varying (stmt);
3531 	}
3532       else if (is_gimple_call (stmt))
3533 	{
3534 	  tree lhs = gimple_call_lhs (stmt);
3535 
3536 	  /* ???  We could try to simplify calls.  */
3537 
3538 	  if (lhs && TREE_CODE (lhs) == SSA_NAME)
3539 	    {
3540 	      if (stmt_has_constants (stmt))
3541 		VN_INFO (lhs)->has_constants = true;
3542 	      else
3543 		{
3544 		  /* We reset expr and constantness here because we may
3545 		     have been value numbering optimistically, and
3546 		     iterating.  They may become non-constant in this case,
3547 		     even if they were optimistically constant.  */
3548 		  VN_INFO (lhs)->has_constants = false;
3549 		  VN_INFO (lhs)->expr = NULL_TREE;
3550 		}
3551 
3552 	      if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3553 		{
3554 		  changed = defs_to_varying (stmt);
3555 		  goto done;
3556 		}
3557 	    }
3558 
3559 	  if (!gimple_call_internal_p (stmt)
3560 	      && (/* Calls to the same function with the same vuse
3561 		     and the same operands do not necessarily return the same
3562 		     value, unless they're pure or const.  */
3563 		  gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3564 		  /* If calls have a vdef, subsequent calls won't have
3565 		     the same incoming vuse.  So, if 2 calls with vdef have the
3566 		     same vuse, we know they're not subsequent.
3567 		     We can value number 2 calls to the same function with the
3568 		     same vuse and the same operands which are not subsequent
3569 		     the same, because there is no code in the program that can
3570 		     compare the 2 values...  */
3571 		  || (gimple_vdef (stmt)
3572 		      /* ... unless the call returns a pointer which does
3573 		         not alias with anything else.  In which case the
3574 			 information that the values are distinct are encoded
3575 			 in the IL.  */
3576 		      && !(gimple_call_return_flags (stmt) & ERF_NOALIAS))))
3577 	    changed = visit_reference_op_call (lhs, stmt);
3578 	  else
3579 	    changed = defs_to_varying (stmt);
3580 	}
3581       else
3582 	changed = defs_to_varying (stmt);
3583     }
3584  done:
3585   return changed;
3586 }
3587 
3588 /* Compare two operands by reverse postorder index */
3589 
3590 static int
compare_ops(const void * pa,const void * pb)3591 compare_ops (const void *pa, const void *pb)
3592 {
3593   const tree opa = *((const tree *)pa);
3594   const tree opb = *((const tree *)pb);
3595   gimple opstmta = SSA_NAME_DEF_STMT (opa);
3596   gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3597   basic_block bba;
3598   basic_block bbb;
3599 
3600   if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3601     return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3602   else if (gimple_nop_p (opstmta))
3603     return -1;
3604   else if (gimple_nop_p (opstmtb))
3605     return 1;
3606 
3607   bba = gimple_bb (opstmta);
3608   bbb = gimple_bb (opstmtb);
3609 
3610   if (!bba && !bbb)
3611     return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3612   else if (!bba)
3613     return -1;
3614   else if (!bbb)
3615     return 1;
3616 
3617   if (bba == bbb)
3618     {
3619       if (gimple_code (opstmta) == GIMPLE_PHI
3620 	  && gimple_code (opstmtb) == GIMPLE_PHI)
3621 	return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3622       else if (gimple_code (opstmta) == GIMPLE_PHI)
3623 	return -1;
3624       else if (gimple_code (opstmtb) == GIMPLE_PHI)
3625 	return 1;
3626       else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3627         return gimple_uid (opstmta) - gimple_uid (opstmtb);
3628       else
3629 	return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3630     }
3631   return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3632 }
3633 
3634 /* Sort an array containing members of a strongly connected component
3635    SCC so that the members are ordered by RPO number.
3636    This means that when the sort is complete, iterating through the
3637    array will give you the members in RPO order.  */
3638 
3639 static void
sort_scc(vec<tree> scc)3640 sort_scc (vec<tree> scc)
3641 {
3642   scc.qsort (compare_ops);
3643 }
3644 
3645 /* Insert the no longer used nary ONARY to the hash INFO.  */
3646 
3647 static void
copy_nary(vn_nary_op_t onary,vn_tables_t info)3648 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3649 {
3650   size_t size = sizeof_vn_nary_op (onary->length);
3651   vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3652 					       &info->nary_obstack);
3653   memcpy (nary, onary, size);
3654   vn_nary_op_insert_into (nary, info->nary, false);
3655 }
3656 
3657 /* Insert the no longer used phi OPHI to the hash INFO.  */
3658 
3659 static void
copy_phi(vn_phi_t ophi,vn_tables_t info)3660 copy_phi (vn_phi_t ophi, vn_tables_t info)
3661 {
3662   vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3663   vn_phi_s **slot;
3664   memcpy (phi, ophi, sizeof (*phi));
3665   ophi->phiargs.create (0);
3666   slot = info->phis.find_slot_with_hash (phi, phi->hashcode, INSERT);
3667   gcc_assert (!*slot);
3668   *slot = phi;
3669 }
3670 
3671 /* Insert the no longer used reference OREF to the hash INFO.  */
3672 
3673 static void
copy_reference(vn_reference_t oref,vn_tables_t info)3674 copy_reference (vn_reference_t oref, vn_tables_t info)
3675 {
3676   vn_reference_t ref;
3677   vn_reference_s **slot;
3678   ref = (vn_reference_t) pool_alloc (info->references_pool);
3679   memcpy (ref, oref, sizeof (*ref));
3680   oref->operands.create (0);
3681   slot = info->references.find_slot_with_hash (ref, ref->hashcode, INSERT);
3682   if (*slot)
3683     free_reference (*slot);
3684   *slot = ref;
3685 }
3686 
3687 /* Process a strongly connected component in the SSA graph.  */
3688 
3689 static void
process_scc(vec<tree> scc)3690 process_scc (vec<tree> scc)
3691 {
3692   tree var;
3693   unsigned int i;
3694   unsigned int iterations = 0;
3695   bool changed = true;
3696   vn_nary_op_iterator_type hin;
3697   vn_phi_iterator_type hip;
3698   vn_reference_iterator_type hir;
3699   vn_nary_op_t nary;
3700   vn_phi_t phi;
3701   vn_reference_t ref;
3702 
3703   /* If the SCC has a single member, just visit it.  */
3704   if (scc.length () == 1)
3705     {
3706       tree use = scc[0];
3707       if (VN_INFO (use)->use_processed)
3708 	return;
3709       /* We need to make sure it doesn't form a cycle itself, which can
3710 	 happen for self-referential PHI nodes.  In that case we would
3711 	 end up inserting an expression with VN_TOP operands into the
3712 	 valid table which makes us derive bogus equivalences later.
3713 	 The cheapest way to check this is to assume it for all PHI nodes.  */
3714       if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3715 	/* Fallthru to iteration.  */ ;
3716       else
3717 	{
3718 	  visit_use (use);
3719 	  return;
3720 	}
3721     }
3722 
3723   /* Iterate over the SCC with the optimistic table until it stops
3724      changing.  */
3725   current_info = optimistic_info;
3726   while (changed)
3727     {
3728       changed = false;
3729       iterations++;
3730       if (dump_file && (dump_flags & TDF_DETAILS))
3731 	fprintf (dump_file, "Starting iteration %d\n", iterations);
3732       /* As we are value-numbering optimistically we have to
3733 	 clear the expression tables and the simplified expressions
3734 	 in each iteration until we converge.  */
3735       optimistic_info->nary.empty ();
3736       optimistic_info->phis.empty ();
3737       optimistic_info->references.empty ();
3738       obstack_free (&optimistic_info->nary_obstack, NULL);
3739       gcc_obstack_init (&optimistic_info->nary_obstack);
3740       empty_alloc_pool (optimistic_info->phis_pool);
3741       empty_alloc_pool (optimistic_info->references_pool);
3742       FOR_EACH_VEC_ELT (scc, i, var)
3743 	VN_INFO (var)->expr = NULL_TREE;
3744       FOR_EACH_VEC_ELT (scc, i, var)
3745 	changed |= visit_use (var);
3746     }
3747 
3748   statistics_histogram_event (cfun, "SCC iterations", iterations);
3749 
3750   /* Finally, copy the contents of the no longer used optimistic
3751      table to the valid table.  */
3752   FOR_EACH_HASH_TABLE_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hin)
3753     copy_nary (nary, valid_info);
3754   FOR_EACH_HASH_TABLE_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hip)
3755     copy_phi (phi, valid_info);
3756   FOR_EACH_HASH_TABLE_ELEMENT (optimistic_info->references,
3757 			       ref, vn_reference_t, hir)
3758     copy_reference (ref, valid_info);
3759 
3760   current_info = valid_info;
3761 }
3762 
3763 
3764 /* Pop the components of the found SCC for NAME off the SCC stack
3765    and process them.  Returns true if all went well, false if
3766    we run into resource limits.  */
3767 
3768 static bool
extract_and_process_scc_for_name(tree name)3769 extract_and_process_scc_for_name (tree name)
3770 {
3771   auto_vec<tree> scc;
3772   tree x;
3773 
3774   /* Found an SCC, pop the components off the SCC stack and
3775      process them.  */
3776   do
3777     {
3778       x = sccstack.pop ();
3779 
3780       VN_INFO (x)->on_sccstack = false;
3781       scc.safe_push (x);
3782     } while (x != name);
3783 
3784   /* Bail out of SCCVN in case a SCC turns out to be incredibly large.  */
3785   if (scc.length ()
3786       > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3787     {
3788       if (dump_file)
3789 	fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3790 		 "SCC size %u exceeding %u\n", scc.length (),
3791 		 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3792 
3793       return false;
3794     }
3795 
3796   if (scc.length () > 1)
3797     sort_scc (scc);
3798 
3799   if (dump_file && (dump_flags & TDF_DETAILS))
3800     print_scc (dump_file, scc);
3801 
3802   process_scc (scc);
3803 
3804   return true;
3805 }
3806 
3807 /* Depth first search on NAME to discover and process SCC's in the SSA
3808    graph.
3809    Execution of this algorithm relies on the fact that the SCC's are
3810    popped off the stack in topological order.
3811    Returns true if successful, false if we stopped processing SCC's due
3812    to resource constraints.  */
3813 
3814 static bool
DFS(tree name)3815 DFS (tree name)
3816 {
3817   vec<ssa_op_iter> itervec = vNULL;
3818   vec<tree> namevec = vNULL;
3819   use_operand_p usep = NULL;
3820   gimple defstmt;
3821   tree use;
3822   ssa_op_iter iter;
3823 
3824 start_over:
3825   /* SCC info */
3826   VN_INFO (name)->dfsnum = next_dfs_num++;
3827   VN_INFO (name)->visited = true;
3828   VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3829 
3830   sccstack.safe_push (name);
3831   VN_INFO (name)->on_sccstack = true;
3832   defstmt = SSA_NAME_DEF_STMT (name);
3833 
3834   /* Recursively DFS on our operands, looking for SCC's.  */
3835   if (!gimple_nop_p (defstmt))
3836     {
3837       /* Push a new iterator.  */
3838       if (gimple_code (defstmt) == GIMPLE_PHI)
3839 	usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3840       else
3841 	usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3842     }
3843   else
3844     clear_and_done_ssa_iter (&iter);
3845 
3846   while (1)
3847     {
3848       /* If we are done processing uses of a name, go up the stack
3849 	 of iterators and process SCCs as we found them.  */
3850       if (op_iter_done (&iter))
3851 	{
3852 	  /* See if we found an SCC.  */
3853 	  if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3854 	    if (!extract_and_process_scc_for_name (name))
3855 	      {
3856 		namevec.release ();
3857 		itervec.release ();
3858 		return false;
3859 	      }
3860 
3861 	  /* Check if we are done.  */
3862 	  if (namevec.is_empty ())
3863 	    {
3864 	      namevec.release ();
3865 	      itervec.release ();
3866 	      return true;
3867 	    }
3868 
3869 	  /* Restore the last use walker and continue walking there.  */
3870 	  use = name;
3871 	  name = namevec.pop ();
3872 	  memcpy (&iter, &itervec.last (),
3873 		  sizeof (ssa_op_iter));
3874 	  itervec.pop ();
3875 	  goto continue_walking;
3876 	}
3877 
3878       use = USE_FROM_PTR (usep);
3879 
3880       /* Since we handle phi nodes, we will sometimes get
3881 	 invariants in the use expression.  */
3882       if (TREE_CODE (use) == SSA_NAME)
3883 	{
3884 	  if (! (VN_INFO (use)->visited))
3885 	    {
3886 	      /* Recurse by pushing the current use walking state on
3887 		 the stack and starting over.  */
3888 	      itervec.safe_push (iter);
3889 	      namevec.safe_push (name);
3890 	      name = use;
3891 	      goto start_over;
3892 
3893 continue_walking:
3894 	      VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3895 					 VN_INFO (use)->low);
3896 	    }
3897 	  if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3898 	      && VN_INFO (use)->on_sccstack)
3899 	    {
3900 	      VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3901 					 VN_INFO (name)->low);
3902 	    }
3903 	}
3904 
3905       usep = op_iter_next_use (&iter);
3906     }
3907 }
3908 
3909 /* Allocate a value number table.  */
3910 
3911 static void
allocate_vn_table(vn_tables_t table)3912 allocate_vn_table (vn_tables_t table)
3913 {
3914   table->phis.create (23);
3915   table->nary.create (23);
3916   table->references.create (23);
3917 
3918   gcc_obstack_init (&table->nary_obstack);
3919   table->phis_pool = create_alloc_pool ("VN phis",
3920 					sizeof (struct vn_phi_s),
3921 					30);
3922   table->references_pool = create_alloc_pool ("VN references",
3923 					      sizeof (struct vn_reference_s),
3924 					      30);
3925 }
3926 
3927 /* Free a value number table.  */
3928 
3929 static void
free_vn_table(vn_tables_t table)3930 free_vn_table (vn_tables_t table)
3931 {
3932   table->phis.dispose ();
3933   table->nary.dispose ();
3934   table->references.dispose ();
3935   obstack_free (&table->nary_obstack, NULL);
3936   free_alloc_pool (table->phis_pool);
3937   free_alloc_pool (table->references_pool);
3938 }
3939 
3940 static void
init_scc_vn(void)3941 init_scc_vn (void)
3942 {
3943   size_t i;
3944   int j;
3945   int *rpo_numbers_temp;
3946 
3947   calculate_dominance_info (CDI_DOMINATORS);
3948   sccstack.create (0);
3949   constant_to_value_id.create (23);
3950 
3951   constant_value_ids = BITMAP_ALLOC (NULL);
3952 
3953   next_dfs_num = 1;
3954   next_value_id = 1;
3955 
3956   vn_ssa_aux_table.create (num_ssa_names + 1);
3957   /* VEC_alloc doesn't actually grow it to the right size, it just
3958      preallocates the space to do so.  */
3959   vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
3960   gcc_obstack_init (&vn_ssa_aux_obstack);
3961 
3962   shared_lookup_phiargs.create (0);
3963   shared_lookup_references.create (0);
3964   rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
3965   rpo_numbers_temp =
3966     XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
3967   pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3968 
3969   /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3970      the i'th block in RPO order is bb.  We want to map bb's to RPO
3971      numbers, so we need to rearrange this array.  */
3972   for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
3973     rpo_numbers[rpo_numbers_temp[j]] = j;
3974 
3975   XDELETE (rpo_numbers_temp);
3976 
3977   VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3978 
3979   /* Create the VN_INFO structures, and initialize value numbers to
3980      TOP.  */
3981   for (i = 0; i < num_ssa_names; i++)
3982     {
3983       tree name = ssa_name (i);
3984       if (name)
3985 	{
3986 	  VN_INFO_GET (name)->valnum = VN_TOP;
3987 	  VN_INFO (name)->expr = NULL_TREE;
3988 	  VN_INFO (name)->value_id = 0;
3989 	}
3990     }
3991 
3992   renumber_gimple_stmt_uids ();
3993 
3994   /* Create the valid and optimistic value numbering tables.  */
3995   valid_info = XCNEW (struct vn_tables_s);
3996   allocate_vn_table (valid_info);
3997   optimistic_info = XCNEW (struct vn_tables_s);
3998   allocate_vn_table (optimistic_info);
3999 }
4000 
4001 void
free_scc_vn(void)4002 free_scc_vn (void)
4003 {
4004   size_t i;
4005 
4006   constant_to_value_id.dispose ();
4007   BITMAP_FREE (constant_value_ids);
4008   shared_lookup_phiargs.release ();
4009   shared_lookup_references.release ();
4010   XDELETEVEC (rpo_numbers);
4011 
4012   for (i = 0; i < num_ssa_names; i++)
4013     {
4014       tree name = ssa_name (i);
4015       if (name
4016 	  && VN_INFO (name)->needs_insertion)
4017 	release_ssa_name (name);
4018     }
4019   obstack_free (&vn_ssa_aux_obstack, NULL);
4020   vn_ssa_aux_table.release ();
4021 
4022   sccstack.release ();
4023   free_vn_table (valid_info);
4024   XDELETE (valid_info);
4025   free_vn_table (optimistic_info);
4026   XDELETE (optimistic_info);
4027 }
4028 
4029 /* Set *ID according to RESULT.  */
4030 
4031 static void
set_value_id_for_result(tree result,unsigned int * id)4032 set_value_id_for_result (tree result, unsigned int *id)
4033 {
4034   if (result && TREE_CODE (result) == SSA_NAME)
4035     *id = VN_INFO (result)->value_id;
4036   else if (result && is_gimple_min_invariant (result))
4037     *id = get_or_alloc_constant_value_id (result);
4038   else
4039     *id = get_next_value_id ();
4040 }
4041 
4042 /* Set the value ids in the valid hash tables.  */
4043 
4044 static void
set_hashtable_value_ids(void)4045 set_hashtable_value_ids (void)
4046 {
4047   vn_nary_op_iterator_type hin;
4048   vn_phi_iterator_type hip;
4049   vn_reference_iterator_type hir;
4050   vn_nary_op_t vno;
4051   vn_reference_t vr;
4052   vn_phi_t vp;
4053 
4054   /* Now set the value ids of the things we had put in the hash
4055      table.  */
4056 
4057   FOR_EACH_HASH_TABLE_ELEMENT (valid_info->nary, vno, vn_nary_op_t, hin)
4058     set_value_id_for_result (vno->result, &vno->value_id);
4059 
4060   FOR_EACH_HASH_TABLE_ELEMENT (valid_info->phis, vp, vn_phi_t, hip)
4061     set_value_id_for_result (vp->result, &vp->value_id);
4062 
4063   FOR_EACH_HASH_TABLE_ELEMENT (valid_info->references, vr, vn_reference_t, hir)
4064     set_value_id_for_result (vr->result, &vr->value_id);
4065 }
4066 
4067 /* Do SCCVN.  Returns true if it finished, false if we bailed out
4068    due to resource constraints.  DEFAULT_VN_WALK_KIND_ specifies
4069    how we use the alias oracle walking during the VN process.  */
4070 
4071 bool
run_scc_vn(vn_lookup_kind default_vn_walk_kind_)4072 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4073 {
4074   size_t i;
4075   tree param;
4076 
4077   default_vn_walk_kind = default_vn_walk_kind_;
4078 
4079   init_scc_vn ();
4080   current_info = valid_info;
4081 
4082   for (param = DECL_ARGUMENTS (current_function_decl);
4083        param;
4084        param = DECL_CHAIN (param))
4085     {
4086       tree def = ssa_default_def (cfun, param);
4087       if (def)
4088 	VN_INFO (def)->valnum = def;
4089     }
4090 
4091   for (i = 1; i < num_ssa_names; ++i)
4092     {
4093       tree name = ssa_name (i);
4094       if (name
4095 	  && VN_INFO (name)->visited == false
4096 	  && !has_zero_uses (name))
4097 	if (!DFS (name))
4098 	  {
4099 	    free_scc_vn ();
4100 	    return false;
4101 	  }
4102     }
4103 
4104   /* Initialize the value ids.  */
4105 
4106   for (i = 1; i < num_ssa_names; ++i)
4107     {
4108       tree name = ssa_name (i);
4109       vn_ssa_aux_t info;
4110       if (!name)
4111 	continue;
4112       info = VN_INFO (name);
4113       if (info->valnum == name
4114 	  || info->valnum == VN_TOP)
4115 	info->value_id = get_next_value_id ();
4116       else if (is_gimple_min_invariant (info->valnum))
4117 	info->value_id = get_or_alloc_constant_value_id (info->valnum);
4118     }
4119 
4120   /* Propagate.  */
4121   for (i = 1; i < num_ssa_names; ++i)
4122     {
4123       tree name = ssa_name (i);
4124       vn_ssa_aux_t info;
4125       if (!name)
4126 	continue;
4127       info = VN_INFO (name);
4128       if (TREE_CODE (info->valnum) == SSA_NAME
4129 	  && info->valnum != name
4130 	  && info->value_id != VN_INFO (info->valnum)->value_id)
4131 	info->value_id = VN_INFO (info->valnum)->value_id;
4132     }
4133 
4134   set_hashtable_value_ids ();
4135 
4136   if (dump_file && (dump_flags & TDF_DETAILS))
4137     {
4138       fprintf (dump_file, "Value numbers:\n");
4139       for (i = 0; i < num_ssa_names; i++)
4140 	{
4141 	  tree name = ssa_name (i);
4142 	  if (name
4143 	      && VN_INFO (name)->visited
4144 	      && SSA_VAL (name) != name)
4145 	    {
4146 	      print_generic_expr (dump_file, name, 0);
4147 	      fprintf (dump_file, " = ");
4148 	      print_generic_expr (dump_file, SSA_VAL (name), 0);
4149 	      fprintf (dump_file, "\n");
4150 	    }
4151 	}
4152     }
4153 
4154   return true;
4155 }
4156 
4157 /* Return the maximum value id we have ever seen.  */
4158 
4159 unsigned int
get_max_value_id(void)4160 get_max_value_id (void)
4161 {
4162   return next_value_id;
4163 }
4164 
4165 /* Return the next unique value id.  */
4166 
4167 unsigned int
get_next_value_id(void)4168 get_next_value_id (void)
4169 {
4170   return next_value_id++;
4171 }
4172 
4173 
4174 /* Compare two expressions E1 and E2 and return true if they are equal.  */
4175 
4176 bool
expressions_equal_p(tree e1,tree e2)4177 expressions_equal_p (tree e1, tree e2)
4178 {
4179   /* The obvious case.  */
4180   if (e1 == e2)
4181     return true;
4182 
4183   /* If only one of them is null, they cannot be equal.  */
4184   if (!e1 || !e2)
4185     return false;
4186 
4187   /* Now perform the actual comparison.  */
4188   if (TREE_CODE (e1) == TREE_CODE (e2)
4189       && operand_equal_p (e1, e2, OEP_PURE_SAME))
4190     return true;
4191 
4192   return false;
4193 }
4194 
4195 
4196 /* Return true if the nary operation NARY may trap.  This is a copy
4197    of stmt_could_throw_1_p adjusted to the SCCVN IL.  */
4198 
4199 bool
vn_nary_may_trap(vn_nary_op_t nary)4200 vn_nary_may_trap (vn_nary_op_t nary)
4201 {
4202   tree type;
4203   tree rhs2 = NULL_TREE;
4204   bool honor_nans = false;
4205   bool honor_snans = false;
4206   bool fp_operation = false;
4207   bool honor_trapv = false;
4208   bool handled, ret;
4209   unsigned i;
4210 
4211   if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4212       || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4213       || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4214     {
4215       type = nary->type;
4216       fp_operation = FLOAT_TYPE_P (type);
4217       if (fp_operation)
4218 	{
4219 	  honor_nans = flag_trapping_math && !flag_finite_math_only;
4220 	  honor_snans = flag_signaling_nans != 0;
4221 	}
4222       else if (INTEGRAL_TYPE_P (type)
4223 	       && TYPE_OVERFLOW_TRAPS (type))
4224 	honor_trapv = true;
4225     }
4226   if (nary->length >= 2)
4227     rhs2 = nary->op[1];
4228   ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4229 				       honor_trapv,
4230 				       honor_nans, honor_snans, rhs2,
4231 				       &handled);
4232   if (handled
4233       && ret)
4234     return true;
4235 
4236   for (i = 0; i < nary->length; ++i)
4237     if (tree_could_trap_p (nary->op[i]))
4238       return true;
4239 
4240   return false;
4241 }
4242