1 /* SCC value numbering for trees
2    Copyright (C) 2006-2013 Free Software Foundation, Inc.
3    Contributed by Daniel Berlin <dan@dberlin.org>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "basic-block.h"
27 #include "gimple-pretty-print.h"
28 #include "tree-inline.h"
29 #include "tree-flow.h"
30 #include "gimple.h"
31 #include "dumpfile.h"
32 #include "hashtab.h"
33 #include "alloc-pool.h"
34 #include "flags.h"
35 #include "bitmap.h"
36 #include "cfgloop.h"
37 #include "params.h"
38 #include "tree-ssa-propagate.h"
39 #include "tree-ssa-sccvn.h"
40 #include "gimple-fold.h"
41 
42 /* This algorithm is based on the SCC algorithm presented by Keith
43    Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
44    (http://citeseer.ist.psu.edu/41805.html).  In
45    straight line code, it is equivalent to a regular hash based value
46    numbering that is performed in reverse postorder.
47 
48    For code with cycles, there are two alternatives, both of which
49    require keeping the hashtables separate from the actual list of
50    value numbers for SSA names.
51 
52    1. Iterate value numbering in an RPO walk of the blocks, removing
53    all the entries from the hashtable after each iteration (but
54    keeping the SSA name->value number mapping between iterations).
55    Iterate until it does not change.
56 
57    2. Perform value numbering as part of an SCC walk on the SSA graph,
58    iterating only the cycles in the SSA graph until they do not change
59    (using a separate, optimistic hashtable for value numbering the SCC
60    operands).
61 
62    The second is not just faster in practice (because most SSA graph
63    cycles do not involve all the variables in the graph), it also has
64    some nice properties.
65 
66    One of these nice properties is that when we pop an SCC off the
67    stack, we are guaranteed to have processed all the operands coming from
68    *outside of that SCC*, so we do not need to do anything special to
69    ensure they have value numbers.
70 
71    Another nice property is that the SCC walk is done as part of a DFS
72    of the SSA graph, which makes it easy to perform combining and
73    simplifying operations at the same time.
74 
75    The code below is deliberately written in a way that makes it easy
76    to separate the SCC walk from the other work it does.
77 
78    In order to propagate constants through the code, we track which
79    expressions contain constants, and use those while folding.  In
80    theory, we could also track expressions whose value numbers are
81    replaced, in case we end up folding based on expression
82    identities.
83 
84    In order to value number memory, we assign value numbers to vuses.
85    This enables us to note that, for example, stores to the same
86    address of the same value from the same starting memory states are
87    equivalent.
88    TODO:
89 
90    1. We can iterate only the changing portions of the SCC's, but
91    I have not seen an SCC big enough for this to be a win.
92    2. If you differentiate between phi nodes for loops and phi nodes
93    for if-then-else, you can properly consider phi nodes in different
94    blocks for equivalence.
95    3. We could value number vuses in more cases, particularly, whole
96    structure copies.
97 */
98 
99 /* The set of hashtables and alloc_pool's for their items.  */
100 
101 typedef struct vn_tables_s
102 {
103   htab_t nary;
104   htab_t phis;
105   htab_t references;
106   struct obstack nary_obstack;
107   alloc_pool phis_pool;
108   alloc_pool references_pool;
109 } *vn_tables_t;
110 
111 static htab_t constant_to_value_id;
112 static bitmap constant_value_ids;
113 
114 
115 /* Valid hashtables storing information we have proven to be
116    correct.  */
117 
118 static vn_tables_t valid_info;
119 
120 /* Optimistic hashtables storing information we are making assumptions about
121    during iterations.  */
122 
123 static vn_tables_t optimistic_info;
124 
125 /* Pointer to the set of hashtables that is currently being used.
126    Should always point to either the optimistic_info, or the
127    valid_info.  */
128 
129 static vn_tables_t current_info;
130 
131 
132 /* Reverse post order index for each basic block.  */
133 
134 static int *rpo_numbers;
135 
136 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
137 
138 /* This represents the top of the VN lattice, which is the universal
139    value.  */
140 
141 tree VN_TOP;
142 
143 /* Unique counter for our value ids.  */
144 
145 static unsigned int next_value_id;
146 
147 /* Next DFS number and the stack for strongly connected component
148    detection. */
149 
150 static unsigned int next_dfs_num;
151 static vec<tree> sccstack;
152 
153 
154 
155 /* Table of vn_ssa_aux_t's, one per ssa_name.  The vn_ssa_aux_t objects
156    are allocated on an obstack for locality reasons, and to free them
157    without looping over the vec.  */
158 
159 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
160 static struct obstack vn_ssa_aux_obstack;
161 
162 /* Return the value numbering information for a given SSA name.  */
163 
164 vn_ssa_aux_t
VN_INFO(tree name)165 VN_INFO (tree name)
166 {
167   vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
168   gcc_checking_assert (res);
169   return res;
170 }
171 
172 /* Set the value numbering info for a given SSA name to a given
173    value.  */
174 
175 static inline void
VN_INFO_SET(tree name,vn_ssa_aux_t value)176 VN_INFO_SET (tree name, vn_ssa_aux_t value)
177 {
178   vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
179 }
180 
181 /* Initialize the value numbering info for a given SSA name.
182    This should be called just once for every SSA name.  */
183 
184 vn_ssa_aux_t
VN_INFO_GET(tree name)185 VN_INFO_GET (tree name)
186 {
187   vn_ssa_aux_t newinfo;
188 
189   newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
190   memset (newinfo, 0, sizeof (struct vn_ssa_aux));
191   if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
192     vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
193   vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
194   return newinfo;
195 }
196 
197 
198 /* Get the representative expression for the SSA_NAME NAME.  Returns
199    the representative SSA_NAME if there is no expression associated with it.  */
200 
201 tree
vn_get_expr_for(tree name)202 vn_get_expr_for (tree name)
203 {
204   vn_ssa_aux_t vn = VN_INFO (name);
205   gimple def_stmt;
206   tree expr = NULL_TREE;
207   enum tree_code code;
208 
209   if (vn->valnum == VN_TOP)
210     return name;
211 
212   /* If the value-number is a constant it is the representative
213      expression.  */
214   if (TREE_CODE (vn->valnum) != SSA_NAME)
215     return vn->valnum;
216 
217   /* Get to the information of the value of this SSA_NAME.  */
218   vn = VN_INFO (vn->valnum);
219 
220   /* If the value-number is a constant it is the representative
221      expression.  */
222   if (TREE_CODE (vn->valnum) != SSA_NAME)
223     return vn->valnum;
224 
225   /* Else if we have an expression, return it.  */
226   if (vn->expr != NULL_TREE)
227     return vn->expr;
228 
229   /* Otherwise use the defining statement to build the expression.  */
230   def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
231 
232   /* If the value number is not an assignment use it directly.  */
233   if (!is_gimple_assign (def_stmt))
234     return vn->valnum;
235 
236   /* FIXME tuples.  This is incomplete and likely will miss some
237      simplifications.  */
238   code = gimple_assign_rhs_code (def_stmt);
239   switch (TREE_CODE_CLASS (code))
240     {
241     case tcc_reference:
242       if ((code == REALPART_EXPR
243 	   || code == IMAGPART_EXPR
244 	   || code == VIEW_CONVERT_EXPR)
245 	  && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
246 				      0)) == SSA_NAME)
247 	expr = fold_build1 (code,
248 			    gimple_expr_type (def_stmt),
249 			    TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
250       break;
251 
252     case tcc_unary:
253       expr = fold_build1 (code,
254 			  gimple_expr_type (def_stmt),
255 			  gimple_assign_rhs1 (def_stmt));
256       break;
257 
258     case tcc_binary:
259       expr = fold_build2 (code,
260 			  gimple_expr_type (def_stmt),
261 			  gimple_assign_rhs1 (def_stmt),
262 			  gimple_assign_rhs2 (def_stmt));
263       break;
264 
265     case tcc_exceptional:
266       if (code == CONSTRUCTOR
267 	  && TREE_CODE
268 	       (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
269 	expr = gimple_assign_rhs1 (def_stmt);
270       break;
271 
272     default:;
273     }
274   if (expr == NULL_TREE)
275     return vn->valnum;
276 
277   /* Cache the expression.  */
278   vn->expr = expr;
279 
280   return expr;
281 }
282 
283 /* Return the vn_kind the expression computed by the stmt should be
284    associated with.  */
285 
286 enum vn_kind
vn_get_stmt_kind(gimple stmt)287 vn_get_stmt_kind (gimple stmt)
288 {
289   switch (gimple_code (stmt))
290     {
291     case GIMPLE_CALL:
292       return VN_REFERENCE;
293     case GIMPLE_PHI:
294       return VN_PHI;
295     case GIMPLE_ASSIGN:
296       {
297 	enum tree_code code = gimple_assign_rhs_code (stmt);
298 	tree rhs1 = gimple_assign_rhs1 (stmt);
299 	switch (get_gimple_rhs_class (code))
300 	  {
301 	  case GIMPLE_UNARY_RHS:
302 	  case GIMPLE_BINARY_RHS:
303 	  case GIMPLE_TERNARY_RHS:
304 	    return VN_NARY;
305 	  case GIMPLE_SINGLE_RHS:
306 	    switch (TREE_CODE_CLASS (code))
307 	      {
308 	      case tcc_reference:
309 		/* VOP-less references can go through unary case.  */
310 		if ((code == REALPART_EXPR
311 		     || code == IMAGPART_EXPR
312 		     || code == VIEW_CONVERT_EXPR
313 		     || code == BIT_FIELD_REF)
314 		    && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
315 		  return VN_NARY;
316 
317 		/* Fallthrough.  */
318 	      case tcc_declaration:
319 		return VN_REFERENCE;
320 
321 	      case tcc_constant:
322 		return VN_CONSTANT;
323 
324 	      default:
325 		if (code == ADDR_EXPR)
326 		  return (is_gimple_min_invariant (rhs1)
327 			  ? VN_CONSTANT : VN_REFERENCE);
328 		else if (code == CONSTRUCTOR)
329 		  return VN_NARY;
330 		return VN_NONE;
331 	      }
332 	  default:
333 	    return VN_NONE;
334 	  }
335       }
336     default:
337       return VN_NONE;
338     }
339 }
340 
341 /* Free a phi operation structure VP.  */
342 
343 static void
free_phi(void * vp)344 free_phi (void *vp)
345 {
346   vn_phi_t phi = (vn_phi_t) vp;
347   phi->phiargs.release ();
348 }
349 
350 /* Free a reference operation structure VP.  */
351 
352 static void
free_reference(void * vp)353 free_reference (void *vp)
354 {
355   vn_reference_t vr = (vn_reference_t) vp;
356   vr->operands.release ();
357 }
358 
359 /* Hash table equality function for vn_constant_t.  */
360 
361 static int
vn_constant_eq(const void * p1,const void * p2)362 vn_constant_eq (const void *p1, const void *p2)
363 {
364   const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
365   const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
366 
367   if (vc1->hashcode != vc2->hashcode)
368     return false;
369 
370   return vn_constant_eq_with_type (vc1->constant, vc2->constant);
371 }
372 
373 /* Hash table hash function for vn_constant_t.  */
374 
375 static hashval_t
vn_constant_hash(const void * p1)376 vn_constant_hash (const void *p1)
377 {
378   const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
379   return vc1->hashcode;
380 }
381 
382 /* Lookup a value id for CONSTANT and return it.  If it does not
383    exist returns 0.  */
384 
385 unsigned int
get_constant_value_id(tree constant)386 get_constant_value_id (tree constant)
387 {
388   void **slot;
389   struct vn_constant_s vc;
390 
391   vc.hashcode = vn_hash_constant_with_type (constant);
392   vc.constant = constant;
393   slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
394 				   vc.hashcode, NO_INSERT);
395   if (slot)
396     return ((vn_constant_t)*slot)->value_id;
397   return 0;
398 }
399 
400 /* Lookup a value id for CONSTANT, and if it does not exist, create a
401    new one and return it.  If it does exist, return it.  */
402 
403 unsigned int
get_or_alloc_constant_value_id(tree constant)404 get_or_alloc_constant_value_id (tree constant)
405 {
406   void **slot;
407   struct vn_constant_s vc;
408   vn_constant_t vcp;
409 
410   vc.hashcode = vn_hash_constant_with_type (constant);
411   vc.constant = constant;
412   slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
413 				   vc.hashcode, INSERT);
414   if (*slot)
415     return ((vn_constant_t)*slot)->value_id;
416 
417   vcp = XNEW (struct vn_constant_s);
418   vcp->hashcode = vc.hashcode;
419   vcp->constant = constant;
420   vcp->value_id = get_next_value_id ();
421   *slot = (void *) vcp;
422   bitmap_set_bit (constant_value_ids, vcp->value_id);
423   return vcp->value_id;
424 }
425 
426 /* Return true if V is a value id for a constant.  */
427 
428 bool
value_id_constant_p(unsigned int v)429 value_id_constant_p (unsigned int v)
430 {
431   return bitmap_bit_p (constant_value_ids, v);
432 }
433 
434 /* Compare two reference operands P1 and P2 for equality.  Return true if
435    they are equal, and false otherwise.  */
436 
437 static int
vn_reference_op_eq(const void * p1,const void * p2)438 vn_reference_op_eq (const void *p1, const void *p2)
439 {
440   const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
441   const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
442 
443   return (vro1->opcode == vro2->opcode
444 	  /* We do not care for differences in type qualification.  */
445 	  && (vro1->type == vro2->type
446 	      || (vro1->type && vro2->type
447 		  && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
448 					 TYPE_MAIN_VARIANT (vro2->type))))
449 	  && expressions_equal_p (vro1->op0, vro2->op0)
450 	  && expressions_equal_p (vro1->op1, vro2->op1)
451 	  && expressions_equal_p (vro1->op2, vro2->op2));
452 }
453 
454 /* Compute the hash for a reference operand VRO1.  */
455 
456 static hashval_t
vn_reference_op_compute_hash(const vn_reference_op_t vro1,hashval_t result)457 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
458 {
459   result = iterative_hash_hashval_t (vro1->opcode, result);
460   if (vro1->op0)
461     result = iterative_hash_expr (vro1->op0, result);
462   if (vro1->op1)
463     result = iterative_hash_expr (vro1->op1, result);
464   if (vro1->op2)
465     result = iterative_hash_expr (vro1->op2, result);
466   return result;
467 }
468 
469 /* Return the hashcode for a given reference operation P1.  */
470 
471 static hashval_t
vn_reference_hash(const void * p1)472 vn_reference_hash (const void *p1)
473 {
474   const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
475   return vr1->hashcode;
476 }
477 
478 /* Compute a hash for the reference operation VR1 and return it.  */
479 
480 hashval_t
vn_reference_compute_hash(const vn_reference_t vr1)481 vn_reference_compute_hash (const vn_reference_t vr1)
482 {
483   hashval_t result = 0;
484   int i;
485   vn_reference_op_t vro;
486   HOST_WIDE_INT off = -1;
487   bool deref = false;
488 
489   FOR_EACH_VEC_ELT (vr1->operands, i, vro)
490     {
491       if (vro->opcode == MEM_REF)
492 	deref = true;
493       else if (vro->opcode != ADDR_EXPR)
494 	deref = false;
495       if (vro->off != -1)
496 	{
497 	  if (off == -1)
498 	    off = 0;
499 	  off += vro->off;
500 	}
501       else
502 	{
503 	  if (off != -1
504 	      && off != 0)
505 	    result = iterative_hash_hashval_t (off, result);
506 	  off = -1;
507 	  if (deref
508 	      && vro->opcode == ADDR_EXPR)
509 	    {
510 	      if (vro->op0)
511 		{
512 		  tree op = TREE_OPERAND (vro->op0, 0);
513 		  result = iterative_hash_hashval_t (TREE_CODE (op), result);
514 		  result = iterative_hash_expr (op, result);
515 		}
516 	    }
517 	  else
518 	    result = vn_reference_op_compute_hash (vro, result);
519 	}
520     }
521   if (vr1->vuse)
522     result += SSA_NAME_VERSION (vr1->vuse);
523 
524   return result;
525 }
526 
527 /* Return true if reference operations P1 and P2 are equivalent.  This
528    means they have the same set of operands and vuses.  */
529 
530 int
vn_reference_eq(const void * p1,const void * p2)531 vn_reference_eq (const void *p1, const void *p2)
532 {
533   unsigned i, j;
534 
535   const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
536   const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
537   if (vr1->hashcode != vr2->hashcode)
538     return false;
539 
540   /* Early out if this is not a hash collision.  */
541   if (vr1->hashcode != vr2->hashcode)
542     return false;
543 
544   /* The VOP needs to be the same.  */
545   if (vr1->vuse != vr2->vuse)
546     return false;
547 
548   /* If the operands are the same we are done.  */
549   if (vr1->operands == vr2->operands)
550     return true;
551 
552   if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
553     return false;
554 
555   if (INTEGRAL_TYPE_P (vr1->type)
556       && INTEGRAL_TYPE_P (vr2->type))
557     {
558       if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
559 	return false;
560     }
561   else if (INTEGRAL_TYPE_P (vr1->type)
562 	   && (TYPE_PRECISION (vr1->type)
563 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
564     return false;
565   else if (INTEGRAL_TYPE_P (vr2->type)
566 	   && (TYPE_PRECISION (vr2->type)
567 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
568     return false;
569 
570   i = 0;
571   j = 0;
572   do
573     {
574       HOST_WIDE_INT off1 = 0, off2 = 0;
575       vn_reference_op_t vro1, vro2;
576       vn_reference_op_s tem1, tem2;
577       bool deref1 = false, deref2 = false;
578       for (; vr1->operands.iterate (i, &vro1); i++)
579 	{
580 	  if (vro1->opcode == MEM_REF)
581 	    deref1 = true;
582 	  if (vro1->off == -1)
583 	    break;
584 	  off1 += vro1->off;
585 	}
586       for (; vr2->operands.iterate (j, &vro2); j++)
587 	{
588 	  if (vro2->opcode == MEM_REF)
589 	    deref2 = true;
590 	  if (vro2->off == -1)
591 	    break;
592 	  off2 += vro2->off;
593 	}
594       if (off1 != off2)
595 	return false;
596       if (deref1 && vro1->opcode == ADDR_EXPR)
597 	{
598 	  memset (&tem1, 0, sizeof (tem1));
599 	  tem1.op0 = TREE_OPERAND (vro1->op0, 0);
600 	  tem1.type = TREE_TYPE (tem1.op0);
601 	  tem1.opcode = TREE_CODE (tem1.op0);
602 	  vro1 = &tem1;
603 	  deref1 = false;
604 	}
605       if (deref2 && vro2->opcode == ADDR_EXPR)
606 	{
607 	  memset (&tem2, 0, sizeof (tem2));
608 	  tem2.op0 = TREE_OPERAND (vro2->op0, 0);
609 	  tem2.type = TREE_TYPE (tem2.op0);
610 	  tem2.opcode = TREE_CODE (tem2.op0);
611 	  vro2 = &tem2;
612 	  deref2 = false;
613 	}
614       if (deref1 != deref2)
615 	return false;
616       if (!vn_reference_op_eq (vro1, vro2))
617 	return false;
618       ++j;
619       ++i;
620     }
621   while (vr1->operands.length () != i
622 	 || vr2->operands.length () != j);
623 
624   return true;
625 }
626 
627 /* Copy the operations present in load/store REF into RESULT, a vector of
628    vn_reference_op_s's.  */
629 
630 void
copy_reference_ops_from_ref(tree ref,vec<vn_reference_op_s> * result)631 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
632 {
633   if (TREE_CODE (ref) == TARGET_MEM_REF)
634     {
635       vn_reference_op_s temp;
636 
637       memset (&temp, 0, sizeof (temp));
638       temp.type = TREE_TYPE (ref);
639       temp.opcode = TREE_CODE (ref);
640       temp.op0 = TMR_INDEX (ref);
641       temp.op1 = TMR_STEP (ref);
642       temp.op2 = TMR_OFFSET (ref);
643       temp.off = -1;
644       result->safe_push (temp);
645 
646       memset (&temp, 0, sizeof (temp));
647       temp.type = NULL_TREE;
648       temp.opcode = ERROR_MARK;
649       temp.op0 = TMR_INDEX2 (ref);
650       temp.off = -1;
651       result->safe_push (temp);
652 
653       memset (&temp, 0, sizeof (temp));
654       temp.type = NULL_TREE;
655       temp.opcode = TREE_CODE (TMR_BASE (ref));
656       temp.op0 = TMR_BASE (ref);
657       temp.off = -1;
658       result->safe_push (temp);
659       return;
660     }
661 
662   /* For non-calls, store the information that makes up the address.  */
663 
664   while (ref)
665     {
666       vn_reference_op_s temp;
667 
668       memset (&temp, 0, sizeof (temp));
669       temp.type = TREE_TYPE (ref);
670       temp.opcode = TREE_CODE (ref);
671       temp.off = -1;
672 
673       switch (temp.opcode)
674 	{
675 	case MODIFY_EXPR:
676 	  temp.op0 = TREE_OPERAND (ref, 1);
677 	  break;
678 	case WITH_SIZE_EXPR:
679 	  temp.op0 = TREE_OPERAND (ref, 1);
680 	  temp.off = 0;
681 	  break;
682 	case MEM_REF:
683 	  /* The base address gets its own vn_reference_op_s structure.  */
684 	  temp.op0 = TREE_OPERAND (ref, 1);
685 	  if (host_integerp (TREE_OPERAND (ref, 1), 0))
686 	    temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
687 	  break;
688 	case BIT_FIELD_REF:
689 	  /* Record bits and position.  */
690 	  temp.op0 = TREE_OPERAND (ref, 1);
691 	  temp.op1 = TREE_OPERAND (ref, 2);
692 	  break;
693 	case COMPONENT_REF:
694 	  /* The field decl is enough to unambiguously specify the field,
695 	     a matching type is not necessary and a mismatching type
696 	     is always a spurious difference.  */
697 	  temp.type = NULL_TREE;
698 	  temp.op0 = TREE_OPERAND (ref, 1);
699 	  temp.op1 = TREE_OPERAND (ref, 2);
700 	  {
701 	    tree this_offset = component_ref_field_offset (ref);
702 	    if (this_offset
703 		&& TREE_CODE (this_offset) == INTEGER_CST)
704 	      {
705 		tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
706 		if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
707 		  {
708 		    double_int off
709 		      = tree_to_double_int (this_offset)
710 			+ tree_to_double_int (bit_offset)
711 			  .arshift (BITS_PER_UNIT == 8
712 				    ? 3 : exact_log2 (BITS_PER_UNIT),
713 				    HOST_BITS_PER_DOUBLE_INT);
714 		    if (off.fits_shwi ())
715 		      temp.off = off.low;
716 		  }
717 	      }
718 	  }
719 	  break;
720 	case ARRAY_RANGE_REF:
721 	case ARRAY_REF:
722 	  /* Record index as operand.  */
723 	  temp.op0 = TREE_OPERAND (ref, 1);
724 	  /* Always record lower bounds and element size.  */
725 	  temp.op1 = array_ref_low_bound (ref);
726 	  temp.op2 = array_ref_element_size (ref);
727 	  if (TREE_CODE (temp.op0) == INTEGER_CST
728 	      && TREE_CODE (temp.op1) == INTEGER_CST
729 	      && TREE_CODE (temp.op2) == INTEGER_CST)
730 	    {
731 	      double_int off = tree_to_double_int (temp.op0);
732 	      off += -tree_to_double_int (temp.op1);
733 	      off *= tree_to_double_int (temp.op2);
734 	      if (off.fits_shwi ())
735 		temp.off = off.low;
736 	    }
737 	  break;
738 	case VAR_DECL:
739 	  if (DECL_HARD_REGISTER (ref))
740 	    {
741 	      temp.op0 = ref;
742 	      break;
743 	    }
744 	  /* Fallthru.  */
745 	case PARM_DECL:
746 	case CONST_DECL:
747 	case RESULT_DECL:
748 	  /* Canonicalize decls to MEM[&decl] which is what we end up with
749 	     when valueizing MEM[ptr] with ptr = &decl.  */
750 	  temp.opcode = MEM_REF;
751 	  temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
752 	  temp.off = 0;
753 	  result->safe_push (temp);
754 	  temp.opcode = ADDR_EXPR;
755 	  temp.op0 = build_fold_addr_expr (ref);
756 	  temp.type = TREE_TYPE (temp.op0);
757 	  temp.off = -1;
758 	  break;
759 	case STRING_CST:
760 	case INTEGER_CST:
761 	case COMPLEX_CST:
762 	case VECTOR_CST:
763 	case REAL_CST:
764 	case FIXED_CST:
765 	case CONSTRUCTOR:
766 	case SSA_NAME:
767 	  temp.op0 = ref;
768 	  break;
769 	case ADDR_EXPR:
770 	  if (is_gimple_min_invariant (ref))
771 	    {
772 	      temp.op0 = ref;
773 	      break;
774 	    }
775 	  /* Fallthrough.  */
776 	  /* These are only interesting for their operands, their
777 	     existence, and their type.  They will never be the last
778 	     ref in the chain of references (IE they require an
779 	     operand), so we don't have to put anything
780 	     for op* as it will be handled by the iteration  */
781 	case REALPART_EXPR:
782 	case VIEW_CONVERT_EXPR:
783 	  temp.off = 0;
784 	  break;
785 	case IMAGPART_EXPR:
786 	  /* This is only interesting for its constant offset.  */
787 	  temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
788 	  break;
789 	default:
790 	  gcc_unreachable ();
791 	}
792       result->safe_push (temp);
793 
794       if (REFERENCE_CLASS_P (ref)
795 	  || TREE_CODE (ref) == MODIFY_EXPR
796 	  || TREE_CODE (ref) == WITH_SIZE_EXPR
797 	  || (TREE_CODE (ref) == ADDR_EXPR
798 	      && !is_gimple_min_invariant (ref)))
799 	ref = TREE_OPERAND (ref, 0);
800       else
801 	ref = NULL_TREE;
802     }
803 }
804 
805 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
806    operands in *OPS, the reference alias set SET and the reference type TYPE.
807    Return true if something useful was produced.  */
808 
809 bool
ao_ref_init_from_vn_reference(ao_ref * ref,alias_set_type set,tree type,vec<vn_reference_op_s> ops)810 ao_ref_init_from_vn_reference (ao_ref *ref,
811 			       alias_set_type set, tree type,
812 			       vec<vn_reference_op_s> ops)
813 {
814   vn_reference_op_t op;
815   unsigned i;
816   tree base = NULL_TREE;
817   tree *op0_p = &base;
818   HOST_WIDE_INT offset = 0;
819   HOST_WIDE_INT max_size;
820   HOST_WIDE_INT size = -1;
821   tree size_tree = NULL_TREE;
822   alias_set_type base_alias_set = -1;
823 
824   /* First get the final access size from just the outermost expression.  */
825   op = &ops[0];
826   if (op->opcode == COMPONENT_REF)
827     size_tree = DECL_SIZE (op->op0);
828   else if (op->opcode == BIT_FIELD_REF)
829     size_tree = op->op0;
830   else
831     {
832       enum machine_mode mode = TYPE_MODE (type);
833       if (mode == BLKmode)
834 	size_tree = TYPE_SIZE (type);
835       else
836         size = GET_MODE_BITSIZE (mode);
837     }
838   if (size_tree != NULL_TREE)
839     {
840       if (!host_integerp (size_tree, 1))
841 	size = -1;
842       else
843 	size = TREE_INT_CST_LOW (size_tree);
844     }
845 
846   /* Initially, maxsize is the same as the accessed element size.
847      In the following it will only grow (or become -1).  */
848   max_size = size;
849 
850   /* Compute cumulative bit-offset for nested component-refs and array-refs,
851      and find the ultimate containing object.  */
852   FOR_EACH_VEC_ELT (ops, i, op)
853     {
854       switch (op->opcode)
855 	{
856 	/* These may be in the reference ops, but we cannot do anything
857 	   sensible with them here.  */
858 	case ADDR_EXPR:
859 	  /* Apart from ADDR_EXPR arguments to MEM_REF.  */
860 	  if (base != NULL_TREE
861 	      && TREE_CODE (base) == MEM_REF
862 	      && op->op0
863 	      && DECL_P (TREE_OPERAND (op->op0, 0)))
864 	    {
865 	      vn_reference_op_t pop = &ops[i-1];
866 	      base = TREE_OPERAND (op->op0, 0);
867 	      if (pop->off == -1)
868 		{
869 		  max_size = -1;
870 		  offset = 0;
871 		}
872 	      else
873 		offset += pop->off * BITS_PER_UNIT;
874 	      op0_p = NULL;
875 	      break;
876 	    }
877 	  /* Fallthru.  */
878 	case CALL_EXPR:
879 	  return false;
880 
881 	/* Record the base objects.  */
882 	case MEM_REF:
883 	  base_alias_set = get_deref_alias_set (op->op0);
884 	  *op0_p = build2 (MEM_REF, op->type,
885 			   NULL_TREE, op->op0);
886 	  op0_p = &TREE_OPERAND (*op0_p, 0);
887 	  break;
888 
889 	case VAR_DECL:
890 	case PARM_DECL:
891 	case RESULT_DECL:
892 	case SSA_NAME:
893 	  *op0_p = op->op0;
894 	  op0_p = NULL;
895 	  break;
896 
897 	/* And now the usual component-reference style ops.  */
898 	case BIT_FIELD_REF:
899 	  offset += tree_low_cst (op->op1, 0);
900 	  break;
901 
902 	case COMPONENT_REF:
903 	  {
904 	    tree field = op->op0;
905 	    /* We do not have a complete COMPONENT_REF tree here so we
906 	       cannot use component_ref_field_offset.  Do the interesting
907 	       parts manually.  */
908 
909 	    if (op->op1
910 		|| !host_integerp (DECL_FIELD_OFFSET (field), 1))
911 	      max_size = -1;
912 	    else
913 	      {
914 		offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
915 			   * BITS_PER_UNIT);
916 		offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
917 	      }
918 	    break;
919 	  }
920 
921 	case ARRAY_RANGE_REF:
922 	case ARRAY_REF:
923 	  /* We recorded the lower bound and the element size.  */
924 	  if (!host_integerp (op->op0, 0)
925 	      || !host_integerp (op->op1, 0)
926 	      || !host_integerp (op->op2, 0))
927 	    max_size = -1;
928 	  else
929 	    {
930 	      HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
931 	      hindex -= TREE_INT_CST_LOW (op->op1);
932 	      hindex *= TREE_INT_CST_LOW (op->op2);
933 	      hindex *= BITS_PER_UNIT;
934 	      offset += hindex;
935 	    }
936 	  break;
937 
938 	case REALPART_EXPR:
939 	  break;
940 
941 	case IMAGPART_EXPR:
942 	  offset += size;
943 	  break;
944 
945 	case VIEW_CONVERT_EXPR:
946 	  break;
947 
948 	case STRING_CST:
949 	case INTEGER_CST:
950 	case COMPLEX_CST:
951 	case VECTOR_CST:
952 	case REAL_CST:
953 	case CONSTRUCTOR:
954 	case CONST_DECL:
955 	  return false;
956 
957 	default:
958 	  return false;
959 	}
960     }
961 
962   if (base == NULL_TREE)
963     return false;
964 
965   ref->ref = NULL_TREE;
966   ref->base = base;
967   ref->offset = offset;
968   ref->size = size;
969   ref->max_size = max_size;
970   ref->ref_alias_set = set;
971   if (base_alias_set != -1)
972     ref->base_alias_set = base_alias_set;
973   else
974     ref->base_alias_set = get_alias_set (base);
975   /* We discount volatiles from value-numbering elsewhere.  */
976   ref->volatile_p = false;
977 
978   return true;
979 }
980 
981 /* Copy the operations present in load/store/call REF into RESULT, a vector of
982    vn_reference_op_s's.  */
983 
984 void
copy_reference_ops_from_call(gimple call,vec<vn_reference_op_s> * result)985 copy_reference_ops_from_call (gimple call,
986 			      vec<vn_reference_op_s> *result)
987 {
988   vn_reference_op_s temp;
989   unsigned i;
990   tree lhs = gimple_call_lhs (call);
991 
992   /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
993      different.  By adding the lhs here in the vector, we ensure that the
994      hashcode is different, guaranteeing a different value number.  */
995   if (lhs && TREE_CODE (lhs) != SSA_NAME)
996     {
997       memset (&temp, 0, sizeof (temp));
998       temp.opcode = MODIFY_EXPR;
999       temp.type = TREE_TYPE (lhs);
1000       temp.op0 = lhs;
1001       temp.off = -1;
1002       result->safe_push (temp);
1003     }
1004 
1005   /* Copy the type, opcode, function being called and static chain.  */
1006   memset (&temp, 0, sizeof (temp));
1007   temp.type = gimple_call_return_type (call);
1008   temp.opcode = CALL_EXPR;
1009   temp.op0 = gimple_call_fn (call);
1010   temp.op1 = gimple_call_chain (call);
1011   temp.off = -1;
1012   result->safe_push (temp);
1013 
1014   /* Copy the call arguments.  As they can be references as well,
1015      just chain them together.  */
1016   for (i = 0; i < gimple_call_num_args (call); ++i)
1017     {
1018       tree callarg = gimple_call_arg (call, i);
1019       copy_reference_ops_from_ref (callarg, result);
1020     }
1021 }
1022 
1023 /* Create a vector of vn_reference_op_s structures from REF, a
1024    REFERENCE_CLASS_P tree.  The vector is not shared. */
1025 
1026 static vec<vn_reference_op_s>
create_reference_ops_from_ref(tree ref)1027 create_reference_ops_from_ref (tree ref)
1028 {
1029   vec<vn_reference_op_s> result = vNULL;
1030 
1031   copy_reference_ops_from_ref (ref, &result);
1032   return result;
1033 }
1034 
1035 /* Create a vector of vn_reference_op_s structures from CALL, a
1036    call statement.  The vector is not shared.  */
1037 
1038 static vec<vn_reference_op_s>
create_reference_ops_from_call(gimple call)1039 create_reference_ops_from_call (gimple call)
1040 {
1041   vec<vn_reference_op_s> result = vNULL;
1042 
1043   copy_reference_ops_from_call (call, &result);
1044   return result;
1045 }
1046 
1047 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1048    *I_P to point to the last element of the replacement.  */
1049 void
vn_reference_fold_indirect(vec<vn_reference_op_s> * ops,unsigned int * i_p)1050 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1051 			    unsigned int *i_p)
1052 {
1053   unsigned int i = *i_p;
1054   vn_reference_op_t op = &(*ops)[i];
1055   vn_reference_op_t mem_op = &(*ops)[i - 1];
1056   tree addr_base;
1057   HOST_WIDE_INT addr_offset = 0;
1058 
1059   /* The only thing we have to do is from &OBJ.foo.bar add the offset
1060      from .foo.bar to the preceding MEM_REF offset and replace the
1061      address with &OBJ.  */
1062   addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1063 					     &addr_offset);
1064   gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1065   if (addr_base != op->op0)
1066     {
1067       double_int off = tree_to_double_int (mem_op->op0);
1068       off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1069       off += double_int::from_shwi (addr_offset);
1070       mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1071       op->op0 = build_fold_addr_expr (addr_base);
1072       if (host_integerp (mem_op->op0, 0))
1073 	mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1074       else
1075 	mem_op->off = -1;
1076     }
1077 }
1078 
1079 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1080    *I_P to point to the last element of the replacement.  */
1081 static void
vn_reference_maybe_forwprop_address(vec<vn_reference_op_s> * ops,unsigned int * i_p)1082 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1083 				     unsigned int *i_p)
1084 {
1085   unsigned int i = *i_p;
1086   vn_reference_op_t op = &(*ops)[i];
1087   vn_reference_op_t mem_op = &(*ops)[i - 1];
1088   gimple def_stmt;
1089   enum tree_code code;
1090   double_int off;
1091 
1092   def_stmt = SSA_NAME_DEF_STMT (op->op0);
1093   if (!is_gimple_assign (def_stmt))
1094     return;
1095 
1096   code = gimple_assign_rhs_code (def_stmt);
1097   if (code != ADDR_EXPR
1098       && code != POINTER_PLUS_EXPR)
1099     return;
1100 
1101   off = tree_to_double_int (mem_op->op0);
1102   off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1103 
1104   /* The only thing we have to do is from &OBJ.foo.bar add the offset
1105      from .foo.bar to the preceding MEM_REF offset and replace the
1106      address with &OBJ.  */
1107   if (code == ADDR_EXPR)
1108     {
1109       tree addr, addr_base;
1110       HOST_WIDE_INT addr_offset;
1111 
1112       addr = gimple_assign_rhs1 (def_stmt);
1113       addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1114 						 &addr_offset);
1115       if (!addr_base
1116 	  || TREE_CODE (addr_base) != MEM_REF)
1117 	return;
1118 
1119       off += double_int::from_shwi (addr_offset);
1120       off += mem_ref_offset (addr_base);
1121       op->op0 = TREE_OPERAND (addr_base, 0);
1122     }
1123   else
1124     {
1125       tree ptr, ptroff;
1126       ptr = gimple_assign_rhs1 (def_stmt);
1127       ptroff = gimple_assign_rhs2 (def_stmt);
1128       if (TREE_CODE (ptr) != SSA_NAME
1129 	  || TREE_CODE (ptroff) != INTEGER_CST)
1130 	return;
1131 
1132       off += tree_to_double_int (ptroff);
1133       op->op0 = ptr;
1134     }
1135 
1136   mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1137   if (host_integerp (mem_op->op0, 0))
1138     mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1139   else
1140     mem_op->off = -1;
1141   if (TREE_CODE (op->op0) == SSA_NAME)
1142     op->op0 = SSA_VAL (op->op0);
1143   if (TREE_CODE (op->op0) != SSA_NAME)
1144     op->opcode = TREE_CODE (op->op0);
1145 
1146   /* And recurse.  */
1147   if (TREE_CODE (op->op0) == SSA_NAME)
1148     vn_reference_maybe_forwprop_address (ops, i_p);
1149   else if (TREE_CODE (op->op0) == ADDR_EXPR)
1150     vn_reference_fold_indirect (ops, i_p);
1151 }
1152 
1153 /* Optimize the reference REF to a constant if possible or return
1154    NULL_TREE if not.  */
1155 
1156 tree
fully_constant_vn_reference_p(vn_reference_t ref)1157 fully_constant_vn_reference_p (vn_reference_t ref)
1158 {
1159   vec<vn_reference_op_s> operands = ref->operands;
1160   vn_reference_op_t op;
1161 
1162   /* Try to simplify the translated expression if it is
1163      a call to a builtin function with at most two arguments.  */
1164   op = &operands[0];
1165   if (op->opcode == CALL_EXPR
1166       && TREE_CODE (op->op0) == ADDR_EXPR
1167       && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1168       && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1169       && operands.length () >= 2
1170       && operands.length () <= 3)
1171     {
1172       vn_reference_op_t arg0, arg1 = NULL;
1173       bool anyconst = false;
1174       arg0 = &operands[1];
1175       if (operands.length () > 2)
1176 	arg1 = &operands[2];
1177       if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1178 	  || (arg0->opcode == ADDR_EXPR
1179 	      && is_gimple_min_invariant (arg0->op0)))
1180 	anyconst = true;
1181       if (arg1
1182 	  && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1183 	      || (arg1->opcode == ADDR_EXPR
1184 		  && is_gimple_min_invariant (arg1->op0))))
1185 	anyconst = true;
1186       if (anyconst)
1187 	{
1188 	  tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1189 					 arg1 ? 2 : 1,
1190 					 arg0->op0,
1191 					 arg1 ? arg1->op0 : NULL);
1192 	  if (folded
1193 	      && TREE_CODE (folded) == NOP_EXPR)
1194 	    folded = TREE_OPERAND (folded, 0);
1195 	  if (folded
1196 	      && is_gimple_min_invariant (folded))
1197 	    return folded;
1198 	}
1199     }
1200 
1201   /* Simplify reads from constant strings.  */
1202   else if (op->opcode == ARRAY_REF
1203 	   && TREE_CODE (op->op0) == INTEGER_CST
1204 	   && integer_zerop (op->op1)
1205 	   && operands.length () == 2)
1206     {
1207       vn_reference_op_t arg0;
1208       arg0 = &operands[1];
1209       if (arg0->opcode == STRING_CST
1210 	  && (TYPE_MODE (op->type)
1211 	      == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1212 	  && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1213 	  && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1214 	  && tree_int_cst_sgn (op->op0) >= 0
1215 	  && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1216 	return build_int_cst_type (op->type,
1217 				   (TREE_STRING_POINTER (arg0->op0)
1218 				    [TREE_INT_CST_LOW (op->op0)]));
1219     }
1220 
1221   return NULL_TREE;
1222 }
1223 
1224 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1225    structures into their value numbers.  This is done in-place, and
1226    the vector passed in is returned.  *VALUEIZED_ANYTHING will specify
1227    whether any operands were valueized.  */
1228 
1229 static vec<vn_reference_op_s>
valueize_refs_1(vec<vn_reference_op_s> orig,bool * valueized_anything)1230 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1231 {
1232   vn_reference_op_t vro;
1233   unsigned int i;
1234 
1235   *valueized_anything = false;
1236 
1237   FOR_EACH_VEC_ELT (orig, i, vro)
1238     {
1239       if (vro->opcode == SSA_NAME
1240 	  || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1241 	{
1242 	  tree tem = SSA_VAL (vro->op0);
1243 	  if (tem != vro->op0)
1244 	    {
1245 	      *valueized_anything = true;
1246 	      vro->op0 = tem;
1247 	    }
1248 	  /* If it transforms from an SSA_NAME to a constant, update
1249 	     the opcode.  */
1250 	  if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1251 	    vro->opcode = TREE_CODE (vro->op0);
1252 	}
1253       if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1254 	{
1255 	  tree tem = SSA_VAL (vro->op1);
1256 	  if (tem != vro->op1)
1257 	    {
1258 	      *valueized_anything = true;
1259 	      vro->op1 = tem;
1260 	    }
1261 	}
1262       if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1263 	{
1264 	  tree tem = SSA_VAL (vro->op2);
1265 	  if (tem != vro->op2)
1266 	    {
1267 	      *valueized_anything = true;
1268 	      vro->op2 = tem;
1269 	    }
1270 	}
1271       /* If it transforms from an SSA_NAME to an address, fold with
1272 	 a preceding indirect reference.  */
1273       if (i > 0
1274 	  && vro->op0
1275 	  && TREE_CODE (vro->op0) == ADDR_EXPR
1276 	  && orig[i - 1].opcode == MEM_REF)
1277 	vn_reference_fold_indirect (&orig, &i);
1278       else if (i > 0
1279 	       && vro->opcode == SSA_NAME
1280 	       && orig[i - 1].opcode == MEM_REF)
1281 	vn_reference_maybe_forwprop_address (&orig, &i);
1282       /* If it transforms a non-constant ARRAY_REF into a constant
1283 	 one, adjust the constant offset.  */
1284       else if (vro->opcode == ARRAY_REF
1285 	       && vro->off == -1
1286 	       && TREE_CODE (vro->op0) == INTEGER_CST
1287 	       && TREE_CODE (vro->op1) == INTEGER_CST
1288 	       && TREE_CODE (vro->op2) == INTEGER_CST)
1289 	{
1290 	  double_int off = tree_to_double_int (vro->op0);
1291 	  off += -tree_to_double_int (vro->op1);
1292 	  off *= tree_to_double_int (vro->op2);
1293 	  if (off.fits_shwi ())
1294 	    vro->off = off.low;
1295 	}
1296     }
1297 
1298   return orig;
1299 }
1300 
1301 static vec<vn_reference_op_s>
valueize_refs(vec<vn_reference_op_s> orig)1302 valueize_refs (vec<vn_reference_op_s> orig)
1303 {
1304   bool tem;
1305   return valueize_refs_1 (orig, &tem);
1306 }
1307 
1308 static vec<vn_reference_op_s> shared_lookup_references;
1309 
1310 /* Create a vector of vn_reference_op_s structures from REF, a
1311    REFERENCE_CLASS_P tree.  The vector is shared among all callers of
1312    this function.  *VALUEIZED_ANYTHING will specify whether any
1313    operands were valueized.  */
1314 
1315 static vec<vn_reference_op_s>
valueize_shared_reference_ops_from_ref(tree ref,bool * valueized_anything)1316 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1317 {
1318   if (!ref)
1319     return vNULL;
1320   shared_lookup_references.truncate (0);
1321   copy_reference_ops_from_ref (ref, &shared_lookup_references);
1322   shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1323 					      valueized_anything);
1324   return shared_lookup_references;
1325 }
1326 
1327 /* Create a vector of vn_reference_op_s structures from CALL, a
1328    call statement.  The vector is shared among all callers of
1329    this function.  */
1330 
1331 static vec<vn_reference_op_s>
valueize_shared_reference_ops_from_call(gimple call)1332 valueize_shared_reference_ops_from_call (gimple call)
1333 {
1334   if (!call)
1335     return vNULL;
1336   shared_lookup_references.truncate (0);
1337   copy_reference_ops_from_call (call, &shared_lookup_references);
1338   shared_lookup_references = valueize_refs (shared_lookup_references);
1339   return shared_lookup_references;
1340 }
1341 
1342 /* Lookup a SCCVN reference operation VR in the current hash table.
1343    Returns the resulting value number if it exists in the hash table,
1344    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
1345    vn_reference_t stored in the hashtable if something is found.  */
1346 
1347 static tree
vn_reference_lookup_1(vn_reference_t vr,vn_reference_t * vnresult)1348 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1349 {
1350   void **slot;
1351   hashval_t hash;
1352 
1353   hash = vr->hashcode;
1354   slot = htab_find_slot_with_hash (current_info->references, vr,
1355 				   hash, NO_INSERT);
1356   if (!slot && current_info == optimistic_info)
1357     slot = htab_find_slot_with_hash (valid_info->references, vr,
1358 				     hash, NO_INSERT);
1359   if (slot)
1360     {
1361       if (vnresult)
1362 	*vnresult = (vn_reference_t)*slot;
1363       return ((vn_reference_t)*slot)->result;
1364     }
1365 
1366   return NULL_TREE;
1367 }
1368 
1369 static tree *last_vuse_ptr;
1370 static vn_lookup_kind vn_walk_kind;
1371 static vn_lookup_kind default_vn_walk_kind;
1372 
1373 /* Callback for walk_non_aliased_vuses.  Adjusts the vn_reference_t VR_
1374    with the current VUSE and performs the expression lookup.  */
1375 
1376 static void *
vn_reference_lookup_2(ao_ref * op ATTRIBUTE_UNUSED,tree vuse,unsigned int cnt,void * vr_)1377 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1378 		       unsigned int cnt, void *vr_)
1379 {
1380   vn_reference_t vr = (vn_reference_t)vr_;
1381   void **slot;
1382   hashval_t hash;
1383 
1384   /* This bounds the stmt walks we perform on reference lookups
1385      to O(1) instead of O(N) where N is the number of dominating
1386      stores.  */
1387   if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1388     return (void *)-1;
1389 
1390   if (last_vuse_ptr)
1391     *last_vuse_ptr = vuse;
1392 
1393   /* Fixup vuse and hash.  */
1394   if (vr->vuse)
1395     vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1396   vr->vuse = SSA_VAL (vuse);
1397   if (vr->vuse)
1398     vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1399 
1400   hash = vr->hashcode;
1401   slot = htab_find_slot_with_hash (current_info->references, vr,
1402 				   hash, NO_INSERT);
1403   if (!slot && current_info == optimistic_info)
1404     slot = htab_find_slot_with_hash (valid_info->references, vr,
1405 				     hash, NO_INSERT);
1406   if (slot)
1407     return *slot;
1408 
1409   return NULL;
1410 }
1411 
1412 /* Lookup an existing or insert a new vn_reference entry into the
1413    value table for the VUSE, SET, TYPE, OPERANDS reference which
1414    has the value VALUE which is either a constant or an SSA name.  */
1415 
1416 static vn_reference_t
vn_reference_lookup_or_insert_for_pieces(tree vuse,alias_set_type set,tree type,vec<vn_reference_op_s,va_heap> operands,tree value)1417 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1418 					  alias_set_type set,
1419 					  tree type,
1420 					  vec<vn_reference_op_s,
1421 					        va_heap> operands,
1422 					  tree value)
1423 {
1424   struct vn_reference_s vr1;
1425   vn_reference_t result;
1426   unsigned value_id;
1427   vr1.vuse = vuse;
1428   vr1.operands = operands;
1429   vr1.type = type;
1430   vr1.set = set;
1431   vr1.hashcode = vn_reference_compute_hash (&vr1);
1432   if (vn_reference_lookup_1 (&vr1, &result))
1433     return result;
1434   if (TREE_CODE (value) == SSA_NAME)
1435     value_id = VN_INFO (value)->value_id;
1436   else
1437     value_id = get_or_alloc_constant_value_id (value);
1438   return vn_reference_insert_pieces (vuse, set, type,
1439 				     operands.copy (), value, value_id);
1440 }
1441 
1442 /* Callback for walk_non_aliased_vuses.  Tries to perform a lookup
1443    from the statement defining VUSE and if not successful tries to
1444    translate *REFP and VR_ through an aggregate copy at the definition
1445    of VUSE.  */
1446 
1447 static void *
vn_reference_lookup_3(ao_ref * ref,tree vuse,void * vr_)1448 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1449 {
1450   vn_reference_t vr = (vn_reference_t)vr_;
1451   gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1452   tree base;
1453   HOST_WIDE_INT offset, maxsize;
1454   static vec<vn_reference_op_s>
1455     lhs_ops = vNULL;
1456   ao_ref lhs_ref;
1457   bool lhs_ref_ok = false;
1458 
1459   /* First try to disambiguate after value-replacing in the definitions LHS.  */
1460   if (is_gimple_assign (def_stmt))
1461     {
1462       vec<vn_reference_op_s> tem;
1463       tree lhs = gimple_assign_lhs (def_stmt);
1464       bool valueized_anything = false;
1465       /* Avoid re-allocation overhead.  */
1466       lhs_ops.truncate (0);
1467       copy_reference_ops_from_ref (lhs, &lhs_ops);
1468       tem = lhs_ops;
1469       lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1470       gcc_assert (lhs_ops == tem);
1471       if (valueized_anything)
1472 	{
1473 	  lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1474 						      get_alias_set (lhs),
1475 						      TREE_TYPE (lhs), lhs_ops);
1476 	  if (lhs_ref_ok
1477 	      && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1478 	    return NULL;
1479 	}
1480       else
1481 	{
1482 	  ao_ref_init (&lhs_ref, lhs);
1483 	  lhs_ref_ok = true;
1484 	}
1485     }
1486 
1487   base = ao_ref_base (ref);
1488   offset = ref->offset;
1489   maxsize = ref->max_size;
1490 
1491   /* If we cannot constrain the size of the reference we cannot
1492      test if anything kills it.  */
1493   if (maxsize == -1)
1494     return (void *)-1;
1495 
1496   /* We can't deduce anything useful from clobbers.  */
1497   if (gimple_clobber_p (def_stmt))
1498     return (void *)-1;
1499 
1500   /* def_stmt may-defs *ref.  See if we can derive a value for *ref
1501      from that definition.
1502      1) Memset.  */
1503   if (is_gimple_reg_type (vr->type)
1504       && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1505       && integer_zerop (gimple_call_arg (def_stmt, 1))
1506       && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1507       && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1508     {
1509       tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1510       tree base2;
1511       HOST_WIDE_INT offset2, size2, maxsize2;
1512       base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1513       size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1514       if ((unsigned HOST_WIDE_INT)size2 / 8
1515 	  == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1516 	  && maxsize2 != -1
1517 	  && operand_equal_p (base, base2, 0)
1518 	  && offset2 <= offset
1519 	  && offset2 + size2 >= offset + maxsize)
1520 	{
1521 	  tree val = build_zero_cst (vr->type);
1522 	  return vn_reference_lookup_or_insert_for_pieces
1523 	           (vuse, vr->set, vr->type, vr->operands, val);
1524 	}
1525     }
1526 
1527   /* 2) Assignment from an empty CONSTRUCTOR.  */
1528   else if (is_gimple_reg_type (vr->type)
1529 	   && gimple_assign_single_p (def_stmt)
1530 	   && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1531 	   && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1532     {
1533       tree base2;
1534       HOST_WIDE_INT offset2, size2, maxsize2;
1535       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1536 				       &offset2, &size2, &maxsize2);
1537       if (maxsize2 != -1
1538 	  && operand_equal_p (base, base2, 0)
1539 	  && offset2 <= offset
1540 	  && offset2 + size2 >= offset + maxsize)
1541 	{
1542 	  tree val = build_zero_cst (vr->type);
1543 	  return vn_reference_lookup_or_insert_for_pieces
1544 	           (vuse, vr->set, vr->type, vr->operands, val);
1545 	}
1546     }
1547 
1548   /* 3) Assignment from a constant.  We can use folds native encode/interpret
1549      routines to extract the assigned bits.  */
1550   else if (vn_walk_kind == VN_WALKREWRITE
1551 	   && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1552 	   && ref->size == maxsize
1553 	   && maxsize % BITS_PER_UNIT == 0
1554 	   && offset % BITS_PER_UNIT == 0
1555 	   && is_gimple_reg_type (vr->type)
1556 	   && gimple_assign_single_p (def_stmt)
1557 	   && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1558     {
1559       tree base2;
1560       HOST_WIDE_INT offset2, size2, maxsize2;
1561       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1562 				       &offset2, &size2, &maxsize2);
1563       if (maxsize2 != -1
1564 	  && maxsize2 == size2
1565 	  && size2 % BITS_PER_UNIT == 0
1566 	  && offset2 % BITS_PER_UNIT == 0
1567 	  && operand_equal_p (base, base2, 0)
1568 	  && offset2 <= offset
1569 	  && offset2 + size2 >= offset + maxsize)
1570 	{
1571 	  /* We support up to 512-bit values (for V8DFmode).  */
1572 	  unsigned char buffer[64];
1573 	  int len;
1574 
1575 	  len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1576 				    buffer, sizeof (buffer));
1577 	  if (len > 0)
1578 	    {
1579 	      tree val = native_interpret_expr (vr->type,
1580 						buffer
1581 						+ ((offset - offset2)
1582 						   / BITS_PER_UNIT),
1583 						ref->size / BITS_PER_UNIT);
1584 	      if (val)
1585 		return vn_reference_lookup_or_insert_for_pieces
1586 		         (vuse, vr->set, vr->type, vr->operands, val);
1587 	    }
1588 	}
1589     }
1590 
1591   /* 4) Assignment from an SSA name which definition we may be able
1592      to access pieces from.  */
1593   else if (ref->size == maxsize
1594 	   && is_gimple_reg_type (vr->type)
1595 	   && gimple_assign_single_p (def_stmt)
1596 	   && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1597     {
1598       tree rhs1 = gimple_assign_rhs1 (def_stmt);
1599       gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1600       if (is_gimple_assign (def_stmt2)
1601 	  && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1602 	      || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1603 	  && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1604 	{
1605 	  tree base2;
1606 	  HOST_WIDE_INT offset2, size2, maxsize2, off;
1607 	  base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1608 					   &offset2, &size2, &maxsize2);
1609 	  off = offset - offset2;
1610 	  if (maxsize2 != -1
1611 	      && maxsize2 == size2
1612 	      && operand_equal_p (base, base2, 0)
1613 	      && offset2 <= offset
1614 	      && offset2 + size2 >= offset + maxsize)
1615 	    {
1616 	      tree val = NULL_TREE;
1617 	      HOST_WIDE_INT elsz
1618 		= TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1619 	      if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1620 		{
1621 		  if (off == 0)
1622 		    val = gimple_assign_rhs1 (def_stmt2);
1623 		  else if (off == elsz)
1624 		    val = gimple_assign_rhs2 (def_stmt2);
1625 		}
1626 	      else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1627 		       && off % elsz == 0)
1628 		{
1629 		  tree ctor = gimple_assign_rhs1 (def_stmt2);
1630 		  unsigned i = off / elsz;
1631 		  if (i < CONSTRUCTOR_NELTS (ctor))
1632 		    {
1633 		      constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1634 		      if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1635 			{
1636 			  if (TREE_CODE (TREE_TYPE (elt->value))
1637 			      != VECTOR_TYPE)
1638 			    val = elt->value;
1639 			}
1640 		    }
1641 		}
1642 	      if (val)
1643 		return vn_reference_lookup_or_insert_for_pieces
1644 		         (vuse, vr->set, vr->type, vr->operands, val);
1645 	    }
1646 	}
1647     }
1648 
1649   /* 5) For aggregate copies translate the reference through them if
1650      the copy kills ref.  */
1651   else if (vn_walk_kind == VN_WALKREWRITE
1652 	   && gimple_assign_single_p (def_stmt)
1653 	   && (DECL_P (gimple_assign_rhs1 (def_stmt))
1654 	       || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1655 	       || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1656     {
1657       tree base2;
1658       HOST_WIDE_INT offset2, size2, maxsize2;
1659       int i, j;
1660       vec<vn_reference_op_s>
1661 	  rhs = vNULL;
1662       vn_reference_op_t vro;
1663       ao_ref r;
1664 
1665       if (!lhs_ref_ok)
1666 	return (void *)-1;
1667 
1668       /* See if the assignment kills REF.  */
1669       base2 = ao_ref_base (&lhs_ref);
1670       offset2 = lhs_ref.offset;
1671       size2 = lhs_ref.size;
1672       maxsize2 = lhs_ref.max_size;
1673       if (maxsize2 == -1
1674 	  || (base != base2 && !operand_equal_p (base, base2, 0))
1675 	  || offset2 > offset
1676 	  || offset2 + size2 < offset + maxsize)
1677 	return (void *)-1;
1678 
1679       /* Find the common base of ref and the lhs.  lhs_ops already
1680          contains valueized operands for the lhs.  */
1681       i = vr->operands.length () - 1;
1682       j = lhs_ops.length () - 1;
1683       while (j >= 0 && i >= 0
1684 	     && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1685 	{
1686 	  i--;
1687 	  j--;
1688 	}
1689 
1690       /* ???  The innermost op should always be a MEM_REF and we already
1691          checked that the assignment to the lhs kills vr.  Thus for
1692 	 aggregate copies using char[] types the vn_reference_op_eq
1693 	 may fail when comparing types for compatibility.  But we really
1694 	 don't care here - further lookups with the rewritten operands
1695 	 will simply fail if we messed up types too badly.  */
1696       if (j == 0 && i >= 0
1697 	  && lhs_ops[0].opcode == MEM_REF
1698 	  && lhs_ops[0].off != -1
1699 	  && (lhs_ops[0].off == vr->operands[i].off))
1700 	i--, j--;
1701 
1702       /* i now points to the first additional op.
1703 	 ???  LHS may not be completely contained in VR, one or more
1704 	 VIEW_CONVERT_EXPRs could be in its way.  We could at least
1705 	 try handling outermost VIEW_CONVERT_EXPRs.  */
1706       if (j != -1)
1707 	return (void *)-1;
1708 
1709       /* Now re-write REF to be based on the rhs of the assignment.  */
1710       copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1711       /* We need to pre-pend vr->operands[0..i] to rhs.  */
1712       if (i + 1 + rhs.length () > vr->operands.length ())
1713 	{
1714 	  vec<vn_reference_op_s> old = vr->operands;
1715 	  vr->operands.safe_grow (i + 1 + rhs.length ());
1716 	  if (old == shared_lookup_references
1717 	      && vr->operands != old)
1718 	    shared_lookup_references = vNULL;
1719 	}
1720       else
1721 	vr->operands.truncate (i + 1 + rhs.length ());
1722       FOR_EACH_VEC_ELT (rhs, j, vro)
1723 	vr->operands[i + 1 + j] = *vro;
1724       rhs.release ();
1725       vr->operands = valueize_refs (vr->operands);
1726       vr->hashcode = vn_reference_compute_hash (vr);
1727 
1728       /* Adjust *ref from the new operands.  */
1729       if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1730 	return (void *)-1;
1731       /* This can happen with bitfields.  */
1732       if (ref->size != r.size)
1733 	return (void *)-1;
1734       *ref = r;
1735 
1736       /* Do not update last seen VUSE after translating.  */
1737       last_vuse_ptr = NULL;
1738 
1739       /* Keep looking for the adjusted *REF / VR pair.  */
1740       return NULL;
1741     }
1742 
1743   /* 6) For memcpy copies translate the reference through them if
1744      the copy kills ref.  */
1745   else if (vn_walk_kind == VN_WALKREWRITE
1746 	   && is_gimple_reg_type (vr->type)
1747 	   /* ???  Handle BCOPY as well.  */
1748 	   && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1749 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1750 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1751 	   && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1752 	       || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1753 	   && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1754 	       || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1755 	   && host_integerp (gimple_call_arg (def_stmt, 2), 1))
1756     {
1757       tree lhs, rhs;
1758       ao_ref r;
1759       HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1760       vn_reference_op_s op;
1761       HOST_WIDE_INT at;
1762 
1763 
1764       /* Only handle non-variable, addressable refs.  */
1765       if (ref->size != maxsize
1766 	  || offset % BITS_PER_UNIT != 0
1767 	  || ref->size % BITS_PER_UNIT != 0)
1768 	return (void *)-1;
1769 
1770       /* Extract a pointer base and an offset for the destination.  */
1771       lhs = gimple_call_arg (def_stmt, 0);
1772       lhs_offset = 0;
1773       if (TREE_CODE (lhs) == SSA_NAME)
1774 	lhs = SSA_VAL (lhs);
1775       if (TREE_CODE (lhs) == ADDR_EXPR)
1776 	{
1777 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1778 						    &lhs_offset);
1779 	  if (!tem)
1780 	    return (void *)-1;
1781 	  if (TREE_CODE (tem) == MEM_REF
1782 	      && host_integerp (TREE_OPERAND (tem, 1), 1))
1783 	    {
1784 	      lhs = TREE_OPERAND (tem, 0);
1785 	      lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1786 	    }
1787 	  else if (DECL_P (tem))
1788 	    lhs = build_fold_addr_expr (tem);
1789 	  else
1790 	    return (void *)-1;
1791 	}
1792       if (TREE_CODE (lhs) != SSA_NAME
1793 	  && TREE_CODE (lhs) != ADDR_EXPR)
1794 	return (void *)-1;
1795 
1796       /* Extract a pointer base and an offset for the source.  */
1797       rhs = gimple_call_arg (def_stmt, 1);
1798       rhs_offset = 0;
1799       if (TREE_CODE (rhs) == SSA_NAME)
1800 	rhs = SSA_VAL (rhs);
1801       if (TREE_CODE (rhs) == ADDR_EXPR)
1802 	{
1803 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1804 						    &rhs_offset);
1805 	  if (!tem)
1806 	    return (void *)-1;
1807 	  if (TREE_CODE (tem) == MEM_REF
1808 	      && host_integerp (TREE_OPERAND (tem, 1), 1))
1809 	    {
1810 	      rhs = TREE_OPERAND (tem, 0);
1811 	      rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1812 	    }
1813 	  else if (DECL_P (tem))
1814 	    rhs = build_fold_addr_expr (tem);
1815 	  else
1816 	    return (void *)-1;
1817 	}
1818       if (TREE_CODE (rhs) != SSA_NAME
1819 	  && TREE_CODE (rhs) != ADDR_EXPR)
1820 	return (void *)-1;
1821 
1822       copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1823 
1824       /* The bases of the destination and the references have to agree.  */
1825       if ((TREE_CODE (base) != MEM_REF
1826 	   && !DECL_P (base))
1827 	  || (TREE_CODE (base) == MEM_REF
1828 	      && (TREE_OPERAND (base, 0) != lhs
1829 		  || !host_integerp (TREE_OPERAND (base, 1), 1)))
1830 	  || (DECL_P (base)
1831 	      && (TREE_CODE (lhs) != ADDR_EXPR
1832 		  || TREE_OPERAND (lhs, 0) != base)))
1833 	return (void *)-1;
1834 
1835       /* And the access has to be contained within the memcpy destination.  */
1836       at = offset / BITS_PER_UNIT;
1837       if (TREE_CODE (base) == MEM_REF)
1838 	at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1839       if (lhs_offset > at
1840 	  || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1841 	return (void *)-1;
1842 
1843       /* Make room for 2 operands in the new reference.  */
1844       if (vr->operands.length () < 2)
1845 	{
1846 	  vec<vn_reference_op_s> old = vr->operands;
1847 	  vr->operands.safe_grow_cleared (2);
1848 	  if (old == shared_lookup_references
1849 	      && vr->operands != old)
1850 	    shared_lookup_references.create (0);
1851 	}
1852       else
1853 	vr->operands.truncate (2);
1854 
1855       /* The looked-through reference is a simple MEM_REF.  */
1856       memset (&op, 0, sizeof (op));
1857       op.type = vr->type;
1858       op.opcode = MEM_REF;
1859       op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1860       op.off = at - lhs_offset + rhs_offset;
1861       vr->operands[0] = op;
1862       op.type = TREE_TYPE (rhs);
1863       op.opcode = TREE_CODE (rhs);
1864       op.op0 = rhs;
1865       op.off = -1;
1866       vr->operands[1] = op;
1867       vr->hashcode = vn_reference_compute_hash (vr);
1868 
1869       /* Adjust *ref from the new operands.  */
1870       if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1871 	return (void *)-1;
1872       /* This can happen with bitfields.  */
1873       if (ref->size != r.size)
1874 	return (void *)-1;
1875       *ref = r;
1876 
1877       /* Do not update last seen VUSE after translating.  */
1878       last_vuse_ptr = NULL;
1879 
1880       /* Keep looking for the adjusted *REF / VR pair.  */
1881       return NULL;
1882     }
1883 
1884   /* Bail out and stop walking.  */
1885   return (void *)-1;
1886 }
1887 
1888 /* Lookup a reference operation by it's parts, in the current hash table.
1889    Returns the resulting value number if it exists in the hash table,
1890    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
1891    vn_reference_t stored in the hashtable if something is found.  */
1892 
1893 tree
vn_reference_lookup_pieces(tree vuse,alias_set_type set,tree type,vec<vn_reference_op_s> operands,vn_reference_t * vnresult,vn_lookup_kind kind)1894 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1895 			    vec<vn_reference_op_s> operands,
1896 			    vn_reference_t *vnresult, vn_lookup_kind kind)
1897 {
1898   struct vn_reference_s vr1;
1899   vn_reference_t tmp;
1900   tree cst;
1901 
1902   if (!vnresult)
1903     vnresult = &tmp;
1904   *vnresult = NULL;
1905 
1906   vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1907   shared_lookup_references.truncate (0);
1908   shared_lookup_references.safe_grow (operands.length ());
1909   memcpy (shared_lookup_references.address (),
1910 	  operands.address (),
1911 	  sizeof (vn_reference_op_s)
1912 	  * operands.length ());
1913   vr1.operands = operands = shared_lookup_references
1914     = valueize_refs (shared_lookup_references);
1915   vr1.type = type;
1916   vr1.set = set;
1917   vr1.hashcode = vn_reference_compute_hash (&vr1);
1918   if ((cst = fully_constant_vn_reference_p (&vr1)))
1919     return cst;
1920 
1921   vn_reference_lookup_1 (&vr1, vnresult);
1922   if (!*vnresult
1923       && kind != VN_NOWALK
1924       && vr1.vuse)
1925     {
1926       ao_ref r;
1927       vn_walk_kind = kind;
1928       if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1929 	*vnresult =
1930 	  (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1931 						  vn_reference_lookup_2,
1932 						  vn_reference_lookup_3, &vr1);
1933       if (vr1.operands != operands)
1934 	vr1.operands.release ();
1935     }
1936 
1937   if (*vnresult)
1938      return (*vnresult)->result;
1939 
1940   return NULL_TREE;
1941 }
1942 
1943 /* Lookup OP in the current hash table, and return the resulting value
1944    number if it exists in the hash table.  Return NULL_TREE if it does
1945    not exist in the hash table or if the result field of the structure
1946    was NULL..  VNRESULT will be filled in with the vn_reference_t
1947    stored in the hashtable if one exists.  */
1948 
1949 tree
vn_reference_lookup(tree op,tree vuse,vn_lookup_kind kind,vn_reference_t * vnresult)1950 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1951 		     vn_reference_t *vnresult)
1952 {
1953   vec<vn_reference_op_s> operands;
1954   struct vn_reference_s vr1;
1955   tree cst;
1956   bool valuezied_anything;
1957 
1958   if (vnresult)
1959     *vnresult = NULL;
1960 
1961   vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1962   vr1.operands = operands
1963     = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
1964   vr1.type = TREE_TYPE (op);
1965   vr1.set = get_alias_set (op);
1966   vr1.hashcode = vn_reference_compute_hash (&vr1);
1967   if ((cst = fully_constant_vn_reference_p (&vr1)))
1968     return cst;
1969 
1970   if (kind != VN_NOWALK
1971       && vr1.vuse)
1972     {
1973       vn_reference_t wvnresult;
1974       ao_ref r;
1975       /* Make sure to use a valueized reference if we valueized anything.
1976          Otherwise preserve the full reference for advanced TBAA.  */
1977       if (!valuezied_anything
1978 	  || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
1979 					     vr1.operands))
1980 	ao_ref_init (&r, op);
1981       vn_walk_kind = kind;
1982       wvnresult =
1983 	(vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1984 						vn_reference_lookup_2,
1985 						vn_reference_lookup_3, &vr1);
1986       if (vr1.operands != operands)
1987 	vr1.operands.release ();
1988       if (wvnresult)
1989 	{
1990 	  if (vnresult)
1991 	    *vnresult = wvnresult;
1992 	  return wvnresult->result;
1993 	}
1994 
1995       return NULL_TREE;
1996     }
1997 
1998   return vn_reference_lookup_1 (&vr1, vnresult);
1999 }
2000 
2001 
2002 /* Insert OP into the current hash table with a value number of
2003    RESULT, and return the resulting reference structure we created.  */
2004 
2005 vn_reference_t
vn_reference_insert(tree op,tree result,tree vuse,tree vdef)2006 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2007 {
2008   void **slot;
2009   vn_reference_t vr1;
2010 
2011   vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2012   if (TREE_CODE (result) == SSA_NAME)
2013     vr1->value_id = VN_INFO (result)->value_id;
2014   else
2015     vr1->value_id = get_or_alloc_constant_value_id (result);
2016   vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2017   vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
2018   vr1->type = TREE_TYPE (op);
2019   vr1->set = get_alias_set (op);
2020   vr1->hashcode = vn_reference_compute_hash (vr1);
2021   vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2022   vr1->result_vdef = vdef;
2023 
2024   slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
2025 				   INSERT);
2026 
2027   /* Because we lookup stores using vuses, and value number failures
2028      using the vdefs (see visit_reference_op_store for how and why),
2029      it's possible that on failure we may try to insert an already
2030      inserted store.  This is not wrong, there is no ssa name for a
2031      store that we could use as a differentiator anyway.  Thus, unlike
2032      the other lookup functions, you cannot gcc_assert (!*slot)
2033      here.  */
2034 
2035   /* But free the old slot in case of a collision.  */
2036   if (*slot)
2037     free_reference (*slot);
2038 
2039   *slot = vr1;
2040   return vr1;
2041 }
2042 
2043 /* Insert a reference by it's pieces into the current hash table with
2044    a value number of RESULT.  Return the resulting reference
2045    structure we created.  */
2046 
2047 vn_reference_t
vn_reference_insert_pieces(tree vuse,alias_set_type set,tree type,vec<vn_reference_op_s> operands,tree result,unsigned int value_id)2048 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2049 			    vec<vn_reference_op_s> operands,
2050 			    tree result, unsigned int value_id)
2051 
2052 {
2053   void **slot;
2054   vn_reference_t vr1;
2055 
2056   vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2057   vr1->value_id = value_id;
2058   vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2059   vr1->operands = valueize_refs (operands);
2060   vr1->type = type;
2061   vr1->set = set;
2062   vr1->hashcode = vn_reference_compute_hash (vr1);
2063   if (result && TREE_CODE (result) == SSA_NAME)
2064     result = SSA_VAL (result);
2065   vr1->result = result;
2066 
2067   slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
2068 				   INSERT);
2069 
2070   /* At this point we should have all the things inserted that we have
2071      seen before, and we should never try inserting something that
2072      already exists.  */
2073   gcc_assert (!*slot);
2074   if (*slot)
2075     free_reference (*slot);
2076 
2077   *slot = vr1;
2078   return vr1;
2079 }
2080 
2081 /* Compute and return the hash value for nary operation VBO1.  */
2082 
2083 hashval_t
vn_nary_op_compute_hash(const vn_nary_op_t vno1)2084 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2085 {
2086   hashval_t hash;
2087   unsigned i;
2088 
2089   for (i = 0; i < vno1->length; ++i)
2090     if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2091       vno1->op[i] = SSA_VAL (vno1->op[i]);
2092 
2093   if (vno1->length == 2
2094       && commutative_tree_code (vno1->opcode)
2095       && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2096     {
2097       tree temp = vno1->op[0];
2098       vno1->op[0] = vno1->op[1];
2099       vno1->op[1] = temp;
2100     }
2101 
2102   hash = iterative_hash_hashval_t (vno1->opcode, 0);
2103   for (i = 0; i < vno1->length; ++i)
2104     hash = iterative_hash_expr (vno1->op[i], hash);
2105 
2106   return hash;
2107 }
2108 
2109 /* Return the computed hashcode for nary operation P1.  */
2110 
2111 static hashval_t
vn_nary_op_hash(const void * p1)2112 vn_nary_op_hash (const void *p1)
2113 {
2114   const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2115   return vno1->hashcode;
2116 }
2117 
2118 /* Compare nary operations P1 and P2 and return true if they are
2119    equivalent.  */
2120 
2121 int
vn_nary_op_eq(const void * p1,const void * p2)2122 vn_nary_op_eq (const void *p1, const void *p2)
2123 {
2124   const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2125   const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
2126   unsigned i;
2127 
2128   if (vno1->hashcode != vno2->hashcode)
2129     return false;
2130 
2131   if (vno1->length != vno2->length)
2132     return false;
2133 
2134   if (vno1->opcode != vno2->opcode
2135       || !types_compatible_p (vno1->type, vno2->type))
2136     return false;
2137 
2138   for (i = 0; i < vno1->length; ++i)
2139     if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2140       return false;
2141 
2142   return true;
2143 }
2144 
2145 /* Initialize VNO from the pieces provided.  */
2146 
2147 static void
init_vn_nary_op_from_pieces(vn_nary_op_t vno,unsigned int length,enum tree_code code,tree type,tree * ops)2148 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2149 			     enum tree_code code, tree type, tree *ops)
2150 {
2151   vno->opcode = code;
2152   vno->length = length;
2153   vno->type = type;
2154   memcpy (&vno->op[0], ops, sizeof (tree) * length);
2155 }
2156 
2157 /* Initialize VNO from OP.  */
2158 
2159 static void
init_vn_nary_op_from_op(vn_nary_op_t vno,tree op)2160 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2161 {
2162   unsigned i;
2163 
2164   vno->opcode = TREE_CODE (op);
2165   vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2166   vno->type = TREE_TYPE (op);
2167   for (i = 0; i < vno->length; ++i)
2168     vno->op[i] = TREE_OPERAND (op, i);
2169 }
2170 
2171 /* Return the number of operands for a vn_nary ops structure from STMT.  */
2172 
2173 static unsigned int
vn_nary_length_from_stmt(gimple stmt)2174 vn_nary_length_from_stmt (gimple stmt)
2175 {
2176   switch (gimple_assign_rhs_code (stmt))
2177     {
2178     case REALPART_EXPR:
2179     case IMAGPART_EXPR:
2180     case VIEW_CONVERT_EXPR:
2181       return 1;
2182 
2183     case BIT_FIELD_REF:
2184       return 3;
2185 
2186     case CONSTRUCTOR:
2187       return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2188 
2189     default:
2190       return gimple_num_ops (stmt) - 1;
2191     }
2192 }
2193 
2194 /* Initialize VNO from STMT.  */
2195 
2196 static void
init_vn_nary_op_from_stmt(vn_nary_op_t vno,gimple stmt)2197 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2198 {
2199   unsigned i;
2200 
2201   vno->opcode = gimple_assign_rhs_code (stmt);
2202   vno->type = gimple_expr_type (stmt);
2203   switch (vno->opcode)
2204     {
2205     case REALPART_EXPR:
2206     case IMAGPART_EXPR:
2207     case VIEW_CONVERT_EXPR:
2208       vno->length = 1;
2209       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2210       break;
2211 
2212     case BIT_FIELD_REF:
2213       vno->length = 3;
2214       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2215       vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2216       vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2217       break;
2218 
2219     case CONSTRUCTOR:
2220       vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2221       for (i = 0; i < vno->length; ++i)
2222 	vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2223       break;
2224 
2225     default:
2226       gcc_checking_assert (!gimple_assign_single_p (stmt));
2227       vno->length = gimple_num_ops (stmt) - 1;
2228       for (i = 0; i < vno->length; ++i)
2229 	vno->op[i] = gimple_op (stmt, i + 1);
2230     }
2231 }
2232 
2233 /* Compute the hashcode for VNO and look for it in the hash table;
2234    return the resulting value number if it exists in the hash table.
2235    Return NULL_TREE if it does not exist in the hash table or if the
2236    result field of the operation is NULL.  VNRESULT will contain the
2237    vn_nary_op_t from the hashtable if it exists.  */
2238 
2239 static tree
vn_nary_op_lookup_1(vn_nary_op_t vno,vn_nary_op_t * vnresult)2240 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2241 {
2242   void **slot;
2243 
2244   if (vnresult)
2245     *vnresult = NULL;
2246 
2247   vno->hashcode = vn_nary_op_compute_hash (vno);
2248   slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
2249 				   NO_INSERT);
2250   if (!slot && current_info == optimistic_info)
2251     slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
2252 				     NO_INSERT);
2253   if (!slot)
2254     return NULL_TREE;
2255   if (vnresult)
2256     *vnresult = (vn_nary_op_t)*slot;
2257   return ((vn_nary_op_t)*slot)->result;
2258 }
2259 
2260 /* Lookup a n-ary operation by its pieces and return the resulting value
2261    number if it exists in the hash table.  Return NULL_TREE if it does
2262    not exist in the hash table or if the result field of the operation
2263    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2264    if it exists.  */
2265 
2266 tree
vn_nary_op_lookup_pieces(unsigned int length,enum tree_code code,tree type,tree * ops,vn_nary_op_t * vnresult)2267 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2268 			  tree type, tree *ops, vn_nary_op_t *vnresult)
2269 {
2270   vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2271 				  sizeof_vn_nary_op (length));
2272   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2273   return vn_nary_op_lookup_1 (vno1, vnresult);
2274 }
2275 
2276 /* Lookup OP in the current hash table, and return the resulting value
2277    number if it exists in the hash table.  Return NULL_TREE if it does
2278    not exist in the hash table or if the result field of the operation
2279    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2280    if it exists.  */
2281 
2282 tree
vn_nary_op_lookup(tree op,vn_nary_op_t * vnresult)2283 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2284 {
2285   vn_nary_op_t vno1
2286     = XALLOCAVAR (struct vn_nary_op_s,
2287 		  sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2288   init_vn_nary_op_from_op (vno1, op);
2289   return vn_nary_op_lookup_1 (vno1, vnresult);
2290 }
2291 
2292 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2293    value number if it exists in the hash table.  Return NULL_TREE if
2294    it does not exist in the hash table.  VNRESULT will contain the
2295    vn_nary_op_t from the hashtable if it exists.  */
2296 
2297 tree
vn_nary_op_lookup_stmt(gimple stmt,vn_nary_op_t * vnresult)2298 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2299 {
2300   vn_nary_op_t vno1
2301     = XALLOCAVAR (struct vn_nary_op_s,
2302 		  sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2303   init_vn_nary_op_from_stmt (vno1, stmt);
2304   return vn_nary_op_lookup_1 (vno1, vnresult);
2305 }
2306 
2307 /* Allocate a vn_nary_op_t with LENGTH operands on STACK.  */
2308 
2309 static vn_nary_op_t
alloc_vn_nary_op_noinit(unsigned int length,struct obstack * stack)2310 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2311 {
2312   return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2313 }
2314 
2315 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2316    obstack.  */
2317 
2318 static vn_nary_op_t
alloc_vn_nary_op(unsigned int length,tree result,unsigned int value_id)2319 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2320 {
2321   vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2322 					       &current_info->nary_obstack);
2323 
2324   vno1->value_id = value_id;
2325   vno1->length = length;
2326   vno1->result = result;
2327 
2328   return vno1;
2329 }
2330 
2331 /* Insert VNO into TABLE.  If COMPUTE_HASH is true, then compute
2332    VNO->HASHCODE first.  */
2333 
2334 static vn_nary_op_t
vn_nary_op_insert_into(vn_nary_op_t vno,htab_t table,bool compute_hash)2335 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
2336 {
2337   void **slot;
2338 
2339   if (compute_hash)
2340     vno->hashcode = vn_nary_op_compute_hash (vno);
2341 
2342   slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
2343   gcc_assert (!*slot);
2344 
2345   *slot = vno;
2346   return vno;
2347 }
2348 
2349 /* Insert a n-ary operation into the current hash table using it's
2350    pieces.  Return the vn_nary_op_t structure we created and put in
2351    the hashtable.  */
2352 
2353 vn_nary_op_t
vn_nary_op_insert_pieces(unsigned int length,enum tree_code code,tree type,tree * ops,tree result,unsigned int value_id)2354 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2355 			  tree type, tree *ops,
2356 			  tree result, unsigned int value_id)
2357 {
2358   vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2359   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2360   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2361 }
2362 
2363 /* Insert OP into the current hash table with a value number of
2364    RESULT.  Return the vn_nary_op_t structure we created and put in
2365    the hashtable.  */
2366 
2367 vn_nary_op_t
vn_nary_op_insert(tree op,tree result)2368 vn_nary_op_insert (tree op, tree result)
2369 {
2370   unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2371   vn_nary_op_t vno1;
2372 
2373   vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2374   init_vn_nary_op_from_op (vno1, op);
2375   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2376 }
2377 
2378 /* Insert the rhs of STMT into the current hash table with a value number of
2379    RESULT.  */
2380 
2381 vn_nary_op_t
vn_nary_op_insert_stmt(gimple stmt,tree result)2382 vn_nary_op_insert_stmt (gimple stmt, tree result)
2383 {
2384   vn_nary_op_t vno1
2385     = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2386 			result, VN_INFO (result)->value_id);
2387   init_vn_nary_op_from_stmt (vno1, stmt);
2388   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2389 }
2390 
2391 /* Compute a hashcode for PHI operation VP1 and return it.  */
2392 
2393 static inline hashval_t
vn_phi_compute_hash(vn_phi_t vp1)2394 vn_phi_compute_hash (vn_phi_t vp1)
2395 {
2396   hashval_t result;
2397   int i;
2398   tree phi1op;
2399   tree type;
2400 
2401   result = vp1->block->index;
2402 
2403   /* If all PHI arguments are constants we need to distinguish
2404      the PHI node via its type.  */
2405   type = vp1->type;
2406   result += vn_hash_type (type);
2407 
2408   FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2409     {
2410       if (phi1op == VN_TOP)
2411 	continue;
2412       result = iterative_hash_expr (phi1op, result);
2413     }
2414 
2415   return result;
2416 }
2417 
2418 /* Return the computed hashcode for phi operation P1.  */
2419 
2420 static hashval_t
vn_phi_hash(const void * p1)2421 vn_phi_hash (const void *p1)
2422 {
2423   const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2424   return vp1->hashcode;
2425 }
2426 
2427 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
2428 
2429 static int
vn_phi_eq(const void * p1,const void * p2)2430 vn_phi_eq (const void *p1, const void *p2)
2431 {
2432   const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2433   const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
2434 
2435   if (vp1->hashcode != vp2->hashcode)
2436     return false;
2437 
2438   if (vp1->block == vp2->block)
2439     {
2440       int i;
2441       tree phi1op;
2442 
2443       /* If the PHI nodes do not have compatible types
2444 	 they are not the same.  */
2445       if (!types_compatible_p (vp1->type, vp2->type))
2446 	return false;
2447 
2448       /* Any phi in the same block will have it's arguments in the
2449 	 same edge order, because of how we store phi nodes.  */
2450       FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2451 	{
2452 	  tree phi2op = vp2->phiargs[i];
2453 	  if (phi1op == VN_TOP || phi2op == VN_TOP)
2454 	    continue;
2455 	  if (!expressions_equal_p (phi1op, phi2op))
2456 	    return false;
2457 	}
2458       return true;
2459     }
2460   return false;
2461 }
2462 
2463 static vec<tree> shared_lookup_phiargs;
2464 
2465 /* Lookup PHI in the current hash table, and return the resulting
2466    value number if it exists in the hash table.  Return NULL_TREE if
2467    it does not exist in the hash table. */
2468 
2469 static tree
vn_phi_lookup(gimple phi)2470 vn_phi_lookup (gimple phi)
2471 {
2472   void **slot;
2473   struct vn_phi_s vp1;
2474   unsigned i;
2475 
2476   shared_lookup_phiargs.truncate (0);
2477 
2478   /* Canonicalize the SSA_NAME's to their value number.  */
2479   for (i = 0; i < gimple_phi_num_args (phi); i++)
2480     {
2481       tree def = PHI_ARG_DEF (phi, i);
2482       def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2483       shared_lookup_phiargs.safe_push (def);
2484     }
2485   vp1.type = TREE_TYPE (gimple_phi_result (phi));
2486   vp1.phiargs = shared_lookup_phiargs;
2487   vp1.block = gimple_bb (phi);
2488   vp1.hashcode = vn_phi_compute_hash (&vp1);
2489   slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2490 				   NO_INSERT);
2491   if (!slot && current_info == optimistic_info)
2492     slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2493 				     NO_INSERT);
2494   if (!slot)
2495     return NULL_TREE;
2496   return ((vn_phi_t)*slot)->result;
2497 }
2498 
2499 /* Insert PHI into the current hash table with a value number of
2500    RESULT.  */
2501 
2502 static vn_phi_t
vn_phi_insert(gimple phi,tree result)2503 vn_phi_insert (gimple phi, tree result)
2504 {
2505   void **slot;
2506   vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2507   unsigned i;
2508   vec<tree> args = vNULL;
2509 
2510   /* Canonicalize the SSA_NAME's to their value number.  */
2511   for (i = 0; i < gimple_phi_num_args (phi); i++)
2512     {
2513       tree def = PHI_ARG_DEF (phi, i);
2514       def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2515       args.safe_push (def);
2516     }
2517   vp1->value_id = VN_INFO (result)->value_id;
2518   vp1->type = TREE_TYPE (gimple_phi_result (phi));
2519   vp1->phiargs = args;
2520   vp1->block = gimple_bb (phi);
2521   vp1->result = result;
2522   vp1->hashcode = vn_phi_compute_hash (vp1);
2523 
2524   slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2525 				   INSERT);
2526 
2527   /* Because we iterate over phi operations more than once, it's
2528      possible the slot might already exist here, hence no assert.*/
2529   *slot = vp1;
2530   return vp1;
2531 }
2532 
2533 
2534 /* Print set of components in strongly connected component SCC to OUT. */
2535 
2536 static void
print_scc(FILE * out,vec<tree> scc)2537 print_scc (FILE *out, vec<tree> scc)
2538 {
2539   tree var;
2540   unsigned int i;
2541 
2542   fprintf (out, "SCC consists of:");
2543   FOR_EACH_VEC_ELT (scc, i, var)
2544     {
2545       fprintf (out, " ");
2546       print_generic_expr (out, var, 0);
2547     }
2548   fprintf (out, "\n");
2549 }
2550 
2551 /* Set the value number of FROM to TO, return true if it has changed
2552    as a result.  */
2553 
2554 static inline bool
set_ssa_val_to(tree from,tree to)2555 set_ssa_val_to (tree from, tree to)
2556 {
2557   tree currval = SSA_VAL (from);
2558 
2559   if (from != to)
2560     {
2561       if (currval == from)
2562 	{
2563 	  if (dump_file && (dump_flags & TDF_DETAILS))
2564 	    {
2565 	      fprintf (dump_file, "Not changing value number of ");
2566 	      print_generic_expr (dump_file, from, 0);
2567 	      fprintf (dump_file, " from VARYING to ");
2568 	      print_generic_expr (dump_file, to, 0);
2569 	      fprintf (dump_file, "\n");
2570 	    }
2571 	  return false;
2572 	}
2573       else if (TREE_CODE (to) == SSA_NAME
2574 	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2575 	to = from;
2576     }
2577 
2578   /* The only thing we allow as value numbers are VN_TOP, ssa_names
2579      and invariants.  So assert that here.  */
2580   gcc_assert (to != NULL_TREE
2581 	      && (to == VN_TOP
2582 		  || TREE_CODE (to) == SSA_NAME
2583 		  || is_gimple_min_invariant (to)));
2584 
2585   if (dump_file && (dump_flags & TDF_DETAILS))
2586     {
2587       fprintf (dump_file, "Setting value number of ");
2588       print_generic_expr (dump_file, from, 0);
2589       fprintf (dump_file, " to ");
2590       print_generic_expr (dump_file, to, 0);
2591     }
2592 
2593   if (currval != to  && !operand_equal_p (currval, to, OEP_PURE_SAME))
2594     {
2595       VN_INFO (from)->valnum = to;
2596       if (dump_file && (dump_flags & TDF_DETAILS))
2597 	fprintf (dump_file, " (changed)\n");
2598       return true;
2599     }
2600   if (dump_file && (dump_flags & TDF_DETAILS))
2601     fprintf (dump_file, "\n");
2602   return false;
2603 }
2604 
2605 /* Mark as processed all the definitions in the defining stmt of USE, or
2606    the USE itself.  */
2607 
2608 static void
mark_use_processed(tree use)2609 mark_use_processed (tree use)
2610 {
2611   ssa_op_iter iter;
2612   def_operand_p defp;
2613   gimple stmt = SSA_NAME_DEF_STMT (use);
2614 
2615   if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2616     {
2617       VN_INFO (use)->use_processed = true;
2618       return;
2619     }
2620 
2621   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2622     {
2623       tree def = DEF_FROM_PTR (defp);
2624 
2625       VN_INFO (def)->use_processed = true;
2626     }
2627 }
2628 
2629 /* Set all definitions in STMT to value number to themselves.
2630    Return true if a value number changed. */
2631 
2632 static bool
defs_to_varying(gimple stmt)2633 defs_to_varying (gimple stmt)
2634 {
2635   bool changed = false;
2636   ssa_op_iter iter;
2637   def_operand_p defp;
2638 
2639   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2640     {
2641       tree def = DEF_FROM_PTR (defp);
2642       changed |= set_ssa_val_to (def, def);
2643     }
2644   return changed;
2645 }
2646 
2647 static bool expr_has_constants (tree expr);
2648 static tree valueize_expr (tree expr);
2649 
2650 /* Visit a copy between LHS and RHS, return true if the value number
2651    changed.  */
2652 
2653 static bool
visit_copy(tree lhs,tree rhs)2654 visit_copy (tree lhs, tree rhs)
2655 {
2656   /* Follow chains of copies to their destination.  */
2657   while (TREE_CODE (rhs) == SSA_NAME
2658 	 && SSA_VAL (rhs) != rhs)
2659     rhs = SSA_VAL (rhs);
2660 
2661   /* The copy may have a more interesting constant filled expression
2662      (we don't, since we know our RHS is just an SSA name).  */
2663   if (TREE_CODE (rhs) == SSA_NAME)
2664     {
2665       VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2666       VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2667     }
2668 
2669   return set_ssa_val_to (lhs, rhs);
2670 }
2671 
2672 /* Visit a nary operator RHS, value number it, and return true if the
2673    value number of LHS has changed as a result.  */
2674 
2675 static bool
visit_nary_op(tree lhs,gimple stmt)2676 visit_nary_op (tree lhs, gimple stmt)
2677 {
2678   bool changed = false;
2679   tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2680 
2681   if (result)
2682     changed = set_ssa_val_to (lhs, result);
2683   else
2684     {
2685       changed = set_ssa_val_to (lhs, lhs);
2686       vn_nary_op_insert_stmt (stmt, lhs);
2687     }
2688 
2689   return changed;
2690 }
2691 
2692 /* Visit a call STMT storing into LHS.  Return true if the value number
2693    of the LHS has changed as a result.  */
2694 
2695 static bool
visit_reference_op_call(tree lhs,gimple stmt)2696 visit_reference_op_call (tree lhs, gimple stmt)
2697 {
2698   bool changed = false;
2699   struct vn_reference_s vr1;
2700   vn_reference_t vnresult = NULL;
2701   tree vuse = gimple_vuse (stmt);
2702   tree vdef = gimple_vdef (stmt);
2703 
2704   /* Non-ssa lhs is handled in copy_reference_ops_from_call.  */
2705   if (lhs && TREE_CODE (lhs) != SSA_NAME)
2706     lhs = NULL_TREE;
2707 
2708   vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2709   vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2710   vr1.type = gimple_expr_type (stmt);
2711   vr1.set = 0;
2712   vr1.hashcode = vn_reference_compute_hash (&vr1);
2713   vn_reference_lookup_1 (&vr1, &vnresult);
2714 
2715   if (vnresult)
2716     {
2717       if (vnresult->result_vdef)
2718 	changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2719 
2720       if (!vnresult->result && lhs)
2721 	vnresult->result = lhs;
2722 
2723       if (vnresult->result && lhs)
2724 	{
2725 	  changed |= set_ssa_val_to (lhs, vnresult->result);
2726 
2727 	  if (VN_INFO (vnresult->result)->has_constants)
2728 	    VN_INFO (lhs)->has_constants = true;
2729 	}
2730     }
2731   else
2732     {
2733       void **slot;
2734       vn_reference_t vr2;
2735       if (vdef)
2736 	changed |= set_ssa_val_to (vdef, vdef);
2737       if (lhs)
2738 	changed |= set_ssa_val_to (lhs, lhs);
2739       vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2740       vr2->vuse = vr1.vuse;
2741       vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2742       vr2->type = vr1.type;
2743       vr2->set = vr1.set;
2744       vr2->hashcode = vr1.hashcode;
2745       vr2->result = lhs;
2746       vr2->result_vdef = vdef;
2747       slot = htab_find_slot_with_hash (current_info->references,
2748 				       vr2, vr2->hashcode, INSERT);
2749       if (*slot)
2750 	free_reference (*slot);
2751       *slot = vr2;
2752     }
2753 
2754   return changed;
2755 }
2756 
2757 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2758    and return true if the value number of the LHS has changed as a result.  */
2759 
2760 static bool
visit_reference_op_load(tree lhs,tree op,gimple stmt)2761 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2762 {
2763   bool changed = false;
2764   tree last_vuse;
2765   tree result;
2766 
2767   last_vuse = gimple_vuse (stmt);
2768   last_vuse_ptr = &last_vuse;
2769   result = vn_reference_lookup (op, gimple_vuse (stmt),
2770 				default_vn_walk_kind, NULL);
2771   last_vuse_ptr = NULL;
2772 
2773   /* If we have a VCE, try looking up its operand as it might be stored in
2774      a different type.  */
2775   if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2776     result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2777     				  default_vn_walk_kind, NULL);
2778 
2779   /* We handle type-punning through unions by value-numbering based
2780      on offset and size of the access.  Be prepared to handle a
2781      type-mismatch here via creating a VIEW_CONVERT_EXPR.  */
2782   if (result
2783       && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2784     {
2785       /* We will be setting the value number of lhs to the value number
2786 	 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2787 	 So first simplify and lookup this expression to see if it
2788 	 is already available.  */
2789       tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2790       if ((CONVERT_EXPR_P (val)
2791 	   || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2792 	  && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2793         {
2794 	  tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2795 	  if ((CONVERT_EXPR_P (tem)
2796 	       || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2797 	      && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2798 						    TREE_TYPE (val), tem)))
2799 	    val = tem;
2800 	}
2801       result = val;
2802       if (!is_gimple_min_invariant (val)
2803 	  && TREE_CODE (val) != SSA_NAME)
2804 	result = vn_nary_op_lookup (val, NULL);
2805       /* If the expression is not yet available, value-number lhs to
2806 	 a new SSA_NAME we create.  */
2807       if (!result)
2808         {
2809 	  result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
2810 				       "vntemp");
2811 	  /* Initialize value-number information properly.  */
2812 	  VN_INFO_GET (result)->valnum = result;
2813 	  VN_INFO (result)->value_id = get_next_value_id ();
2814 	  VN_INFO (result)->expr = val;
2815 	  VN_INFO (result)->has_constants = expr_has_constants (val);
2816 	  VN_INFO (result)->needs_insertion = true;
2817 	  /* As all "inserted" statements are singleton SCCs, insert
2818 	     to the valid table.  This is strictly needed to
2819 	     avoid re-generating new value SSA_NAMEs for the same
2820 	     expression during SCC iteration over and over (the
2821 	     optimistic table gets cleared after each iteration).
2822 	     We do not need to insert into the optimistic table, as
2823 	     lookups there will fall back to the valid table.  */
2824 	  if (current_info == optimistic_info)
2825 	    {
2826 	      current_info = valid_info;
2827 	      vn_nary_op_insert (val, result);
2828 	      current_info = optimistic_info;
2829 	    }
2830 	  else
2831 	    vn_nary_op_insert (val, result);
2832 	  if (dump_file && (dump_flags & TDF_DETAILS))
2833 	    {
2834 	      fprintf (dump_file, "Inserting name ");
2835 	      print_generic_expr (dump_file, result, 0);
2836 	      fprintf (dump_file, " for expression ");
2837 	      print_generic_expr (dump_file, val, 0);
2838 	      fprintf (dump_file, "\n");
2839 	    }
2840 	}
2841     }
2842 
2843   if (result)
2844     {
2845       changed = set_ssa_val_to (lhs, result);
2846       if (TREE_CODE (result) == SSA_NAME
2847 	  && VN_INFO (result)->has_constants)
2848 	{
2849 	  VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2850 	  VN_INFO (lhs)->has_constants = true;
2851 	}
2852     }
2853   else
2854     {
2855       changed = set_ssa_val_to (lhs, lhs);
2856       vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
2857     }
2858 
2859   return changed;
2860 }
2861 
2862 
2863 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2864    and return true if the value number of the LHS has changed as a result.  */
2865 
2866 static bool
visit_reference_op_store(tree lhs,tree op,gimple stmt)2867 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2868 {
2869   bool changed = false;
2870   vn_reference_t vnresult = NULL;
2871   tree result, assign;
2872   bool resultsame = false;
2873   tree vuse = gimple_vuse (stmt);
2874   tree vdef = gimple_vdef (stmt);
2875 
2876   /* First we want to lookup using the *vuses* from the store and see
2877      if there the last store to this location with the same address
2878      had the same value.
2879 
2880      The vuses represent the memory state before the store.  If the
2881      memory state, address, and value of the store is the same as the
2882      last store to this location, then this store will produce the
2883      same memory state as that store.
2884 
2885      In this case the vdef versions for this store are value numbered to those
2886      vuse versions, since they represent the same memory state after
2887      this store.
2888 
2889      Otherwise, the vdefs for the store are used when inserting into
2890      the table, since the store generates a new memory state.  */
2891 
2892   result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
2893 
2894   if (result)
2895     {
2896       if (TREE_CODE (result) == SSA_NAME)
2897 	result = SSA_VAL (result);
2898       if (TREE_CODE (op) == SSA_NAME)
2899 	op = SSA_VAL (op);
2900       resultsame = expressions_equal_p (result, op);
2901     }
2902 
2903   if (!result || !resultsame)
2904     {
2905       assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
2906       vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
2907       if (vnresult)
2908 	{
2909 	  VN_INFO (vdef)->use_processed = true;
2910 	  return set_ssa_val_to (vdef, vnresult->result_vdef);
2911 	}
2912     }
2913 
2914   if (!result || !resultsame)
2915     {
2916       if (dump_file && (dump_flags & TDF_DETAILS))
2917 	{
2918 	  fprintf (dump_file, "No store match\n");
2919 	  fprintf (dump_file, "Value numbering store ");
2920 	  print_generic_expr (dump_file, lhs, 0);
2921 	  fprintf (dump_file, " to ");
2922 	  print_generic_expr (dump_file, op, 0);
2923 	  fprintf (dump_file, "\n");
2924 	}
2925       /* Have to set value numbers before insert, since insert is
2926 	 going to valueize the references in-place.  */
2927       if (vdef)
2928 	{
2929 	  changed |= set_ssa_val_to (vdef, vdef);
2930 	}
2931 
2932       /* Do not insert structure copies into the tables.  */
2933       if (is_gimple_min_invariant (op)
2934 	  || is_gimple_reg (op))
2935         vn_reference_insert (lhs, op, vdef, NULL);
2936 
2937       assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
2938       vn_reference_insert (assign, lhs, vuse, vdef);
2939     }
2940   else
2941     {
2942       /* We had a match, so value number the vdef to have the value
2943 	 number of the vuse it came from.  */
2944 
2945       if (dump_file && (dump_flags & TDF_DETAILS))
2946 	fprintf (dump_file, "Store matched earlier value,"
2947 		 "value numbering store vdefs to matching vuses.\n");
2948 
2949       changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
2950     }
2951 
2952   return changed;
2953 }
2954 
2955 /* Visit and value number PHI, return true if the value number
2956    changed.  */
2957 
2958 static bool
visit_phi(gimple phi)2959 visit_phi (gimple phi)
2960 {
2961   bool changed = false;
2962   tree result;
2963   tree sameval = VN_TOP;
2964   bool allsame = true;
2965   unsigned i;
2966 
2967   /* TODO: We could check for this in init_sccvn, and replace this
2968      with a gcc_assert.  */
2969   if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2970     return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2971 
2972   /* See if all non-TOP arguments have the same value.  TOP is
2973      equivalent to everything, so we can ignore it.  */
2974   for (i = 0; i < gimple_phi_num_args (phi); i++)
2975     {
2976       tree def = PHI_ARG_DEF (phi, i);
2977 
2978       if (TREE_CODE (def) == SSA_NAME)
2979 	def = SSA_VAL (def);
2980       if (def == VN_TOP)
2981 	continue;
2982       if (sameval == VN_TOP)
2983 	{
2984 	  sameval = def;
2985 	}
2986       else
2987 	{
2988 	  if (!expressions_equal_p (def, sameval))
2989 	    {
2990 	      allsame = false;
2991 	      break;
2992 	    }
2993 	}
2994     }
2995 
2996   /* If all value numbered to the same value, the phi node has that
2997      value.  */
2998   if (allsame)
2999     {
3000       if (is_gimple_min_invariant (sameval))
3001 	{
3002 	  VN_INFO (PHI_RESULT (phi))->has_constants = true;
3003 	  VN_INFO (PHI_RESULT (phi))->expr = sameval;
3004 	}
3005       else
3006 	{
3007 	  VN_INFO (PHI_RESULT (phi))->has_constants = false;
3008 	  VN_INFO (PHI_RESULT (phi))->expr = sameval;
3009 	}
3010 
3011       if (TREE_CODE (sameval) == SSA_NAME)
3012 	return visit_copy (PHI_RESULT (phi), sameval);
3013 
3014       return set_ssa_val_to (PHI_RESULT (phi), sameval);
3015     }
3016 
3017   /* Otherwise, see if it is equivalent to a phi node in this block.  */
3018   result = vn_phi_lookup (phi);
3019   if (result)
3020     {
3021       if (TREE_CODE (result) == SSA_NAME)
3022 	changed = visit_copy (PHI_RESULT (phi), result);
3023       else
3024 	changed = set_ssa_val_to (PHI_RESULT (phi), result);
3025     }
3026   else
3027     {
3028       vn_phi_insert (phi, PHI_RESULT (phi));
3029       VN_INFO (PHI_RESULT (phi))->has_constants = false;
3030       VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
3031       changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3032     }
3033 
3034   return changed;
3035 }
3036 
3037 /* Return true if EXPR contains constants.  */
3038 
3039 static bool
expr_has_constants(tree expr)3040 expr_has_constants (tree expr)
3041 {
3042   switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3043     {
3044     case tcc_unary:
3045       return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
3046 
3047     case tcc_binary:
3048       return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
3049 	|| is_gimple_min_invariant (TREE_OPERAND (expr, 1));
3050       /* Constants inside reference ops are rarely interesting, but
3051 	 it can take a lot of looking to find them.  */
3052     case tcc_reference:
3053     case tcc_declaration:
3054       return false;
3055     default:
3056       return is_gimple_min_invariant (expr);
3057     }
3058   return false;
3059 }
3060 
3061 /* Return true if STMT contains constants.  */
3062 
3063 static bool
stmt_has_constants(gimple stmt)3064 stmt_has_constants (gimple stmt)
3065 {
3066   if (gimple_code (stmt) != GIMPLE_ASSIGN)
3067     return false;
3068 
3069   switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3070     {
3071     case GIMPLE_UNARY_RHS:
3072       return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
3073 
3074     case GIMPLE_BINARY_RHS:
3075       return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
3076 	      || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
3077     case GIMPLE_TERNARY_RHS:
3078       return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
3079 	      || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
3080 	      || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
3081     case GIMPLE_SINGLE_RHS:
3082       /* Constants inside reference ops are rarely interesting, but
3083 	 it can take a lot of looking to find them.  */
3084       return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
3085     default:
3086       gcc_unreachable ();
3087     }
3088   return false;
3089 }
3090 
3091 /* Replace SSA_NAMES in expr with their value numbers, and return the
3092    result.
3093    This is performed in place. */
3094 
3095 static tree
valueize_expr(tree expr)3096 valueize_expr (tree expr)
3097 {
3098   switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3099     {
3100     case tcc_binary:
3101       TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
3102       /* Fallthru.  */
3103     case tcc_unary:
3104       TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
3105       break;
3106     default:;
3107     }
3108   return expr;
3109 }
3110 
3111 /* Simplify the binary expression RHS, and return the result if
3112    simplified. */
3113 
3114 static tree
simplify_binary_expression(gimple stmt)3115 simplify_binary_expression (gimple stmt)
3116 {
3117   tree result = NULL_TREE;
3118   tree op0 = gimple_assign_rhs1 (stmt);
3119   tree op1 = gimple_assign_rhs2 (stmt);
3120   enum tree_code code = gimple_assign_rhs_code (stmt);
3121 
3122   /* This will not catch every single case we could combine, but will
3123      catch those with constants.  The goal here is to simultaneously
3124      combine constants between expressions, but avoid infinite
3125      expansion of expressions during simplification.  */
3126   if (TREE_CODE (op0) == SSA_NAME)
3127     {
3128       if (VN_INFO (op0)->has_constants
3129 	  || TREE_CODE_CLASS (code) == tcc_comparison
3130 	  || code == COMPLEX_EXPR)
3131 	op0 = valueize_expr (vn_get_expr_for (op0));
3132       else
3133 	op0 = vn_valueize (op0);
3134     }
3135 
3136   if (TREE_CODE (op1) == SSA_NAME)
3137     {
3138       if (VN_INFO (op1)->has_constants
3139 	  || code == COMPLEX_EXPR)
3140 	op1 = valueize_expr (vn_get_expr_for (op1));
3141       else
3142 	op1 = vn_valueize (op1);
3143     }
3144 
3145   /* Pointer plus constant can be represented as invariant address.
3146      Do so to allow further propatation, see also tree forwprop.  */
3147   if (code == POINTER_PLUS_EXPR
3148       && host_integerp (op1, 1)
3149       && TREE_CODE (op0) == ADDR_EXPR
3150       && is_gimple_min_invariant (op0))
3151     return build_invariant_address (TREE_TYPE (op0),
3152 				    TREE_OPERAND (op0, 0),
3153 				    TREE_INT_CST_LOW (op1));
3154 
3155   /* Avoid folding if nothing changed.  */
3156   if (op0 == gimple_assign_rhs1 (stmt)
3157       && op1 == gimple_assign_rhs2 (stmt))
3158     return NULL_TREE;
3159 
3160   fold_defer_overflow_warnings ();
3161 
3162   result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3163   if (result)
3164     STRIP_USELESS_TYPE_CONVERSION (result);
3165 
3166   fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3167 				  stmt, 0);
3168 
3169   /* Make sure result is not a complex expression consisting
3170      of operators of operators (IE (a + b) + (a + c))
3171      Otherwise, we will end up with unbounded expressions if
3172      fold does anything at all.  */
3173   if (result && valid_gimple_rhs_p (result))
3174     return result;
3175 
3176   return NULL_TREE;
3177 }
3178 
3179 /* Simplify the unary expression RHS, and return the result if
3180    simplified. */
3181 
3182 static tree
simplify_unary_expression(gimple stmt)3183 simplify_unary_expression (gimple stmt)
3184 {
3185   tree result = NULL_TREE;
3186   tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3187   enum tree_code code = gimple_assign_rhs_code (stmt);
3188 
3189   /* We handle some tcc_reference codes here that are all
3190      GIMPLE_ASSIGN_SINGLE codes.  */
3191   if (code == REALPART_EXPR
3192       || code == IMAGPART_EXPR
3193       || code == VIEW_CONVERT_EXPR
3194       || code == BIT_FIELD_REF)
3195     op0 = TREE_OPERAND (op0, 0);
3196 
3197   if (TREE_CODE (op0) != SSA_NAME)
3198     return NULL_TREE;
3199 
3200   orig_op0 = op0;
3201   if (VN_INFO (op0)->has_constants)
3202     op0 = valueize_expr (vn_get_expr_for (op0));
3203   else if (CONVERT_EXPR_CODE_P (code)
3204 	   || code == REALPART_EXPR
3205 	   || code == IMAGPART_EXPR
3206 	   || code == VIEW_CONVERT_EXPR
3207 	   || code == BIT_FIELD_REF)
3208     {
3209       /* We want to do tree-combining on conversion-like expressions.
3210          Make sure we feed only SSA_NAMEs or constants to fold though.  */
3211       tree tem = valueize_expr (vn_get_expr_for (op0));
3212       if (UNARY_CLASS_P (tem)
3213 	  || BINARY_CLASS_P (tem)
3214 	  || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3215 	  || TREE_CODE (tem) == SSA_NAME
3216 	  || TREE_CODE (tem) == CONSTRUCTOR
3217 	  || is_gimple_min_invariant (tem))
3218 	op0 = tem;
3219     }
3220 
3221   /* Avoid folding if nothing changed, but remember the expression.  */
3222   if (op0 == orig_op0)
3223     return NULL_TREE;
3224 
3225   if (code == BIT_FIELD_REF)
3226     {
3227       tree rhs = gimple_assign_rhs1 (stmt);
3228       result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3229 			     op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3230     }
3231   else
3232     result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3233   if (result)
3234     {
3235       STRIP_USELESS_TYPE_CONVERSION (result);
3236       if (valid_gimple_rhs_p (result))
3237         return result;
3238     }
3239 
3240   return NULL_TREE;
3241 }
3242 
3243 /* Try to simplify RHS using equivalences and constant folding.  */
3244 
3245 static tree
try_to_simplify(gimple stmt)3246 try_to_simplify (gimple stmt)
3247 {
3248   enum tree_code code = gimple_assign_rhs_code (stmt);
3249   tree tem;
3250 
3251   /* For stores we can end up simplifying a SSA_NAME rhs.  Just return
3252      in this case, there is no point in doing extra work.  */
3253   if (code == SSA_NAME)
3254     return NULL_TREE;
3255 
3256   /* First try constant folding based on our current lattice.  */
3257   tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
3258   if (tem
3259       && (TREE_CODE (tem) == SSA_NAME
3260 	  || is_gimple_min_invariant (tem)))
3261     return tem;
3262 
3263   /* If that didn't work try combining multiple statements.  */
3264   switch (TREE_CODE_CLASS (code))
3265     {
3266     case tcc_reference:
3267       /* Fallthrough for some unary codes that can operate on registers.  */
3268       if (!(code == REALPART_EXPR
3269 	    || code == IMAGPART_EXPR
3270 	    || code == VIEW_CONVERT_EXPR
3271 	    || code == BIT_FIELD_REF))
3272 	break;
3273       /* We could do a little more with unary ops, if they expand
3274 	 into binary ops, but it's debatable whether it is worth it. */
3275     case tcc_unary:
3276       return simplify_unary_expression (stmt);
3277 
3278     case tcc_comparison:
3279     case tcc_binary:
3280       return simplify_binary_expression (stmt);
3281 
3282     default:
3283       break;
3284     }
3285 
3286   return NULL_TREE;
3287 }
3288 
3289 /* Visit and value number USE, return true if the value number
3290    changed. */
3291 
3292 static bool
visit_use(tree use)3293 visit_use (tree use)
3294 {
3295   bool changed = false;
3296   gimple stmt = SSA_NAME_DEF_STMT (use);
3297 
3298   mark_use_processed (use);
3299 
3300   gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3301   if (dump_file && (dump_flags & TDF_DETAILS)
3302       && !SSA_NAME_IS_DEFAULT_DEF (use))
3303     {
3304       fprintf (dump_file, "Value numbering ");
3305       print_generic_expr (dump_file, use, 0);
3306       fprintf (dump_file, " stmt = ");
3307       print_gimple_stmt (dump_file, stmt, 0, 0);
3308     }
3309 
3310   /* Handle uninitialized uses.  */
3311   if (SSA_NAME_IS_DEFAULT_DEF (use))
3312     changed = set_ssa_val_to (use, use);
3313   else
3314     {
3315       if (gimple_code (stmt) == GIMPLE_PHI)
3316 	changed = visit_phi (stmt);
3317       else if (gimple_has_volatile_ops (stmt))
3318 	changed = defs_to_varying (stmt);
3319       else if (is_gimple_assign (stmt))
3320 	{
3321 	  enum tree_code code = gimple_assign_rhs_code (stmt);
3322 	  tree lhs = gimple_assign_lhs (stmt);
3323 	  tree rhs1 = gimple_assign_rhs1 (stmt);
3324 	  tree simplified;
3325 
3326 	  /* Shortcut for copies. Simplifying copies is pointless,
3327 	     since we copy the expression and value they represent.  */
3328 	  if (code == SSA_NAME
3329 	      && TREE_CODE (lhs) == SSA_NAME)
3330 	    {
3331 	      changed = visit_copy (lhs, rhs1);
3332 	      goto done;
3333 	    }
3334 	  simplified = try_to_simplify (stmt);
3335 	  if (simplified)
3336 	    {
3337 	      if (dump_file && (dump_flags & TDF_DETAILS))
3338 		{
3339 		  fprintf (dump_file, "RHS ");
3340 		  print_gimple_expr (dump_file, stmt, 0, 0);
3341 		  fprintf (dump_file, " simplified to ");
3342 		  print_generic_expr (dump_file, simplified, 0);
3343 		  if (TREE_CODE (lhs) == SSA_NAME)
3344 		    fprintf (dump_file, " has constants %d\n",
3345 			     expr_has_constants (simplified));
3346 		  else
3347 		    fprintf (dump_file, "\n");
3348 		}
3349 	    }
3350 	  /* Setting value numbers to constants will occasionally
3351 	     screw up phi congruence because constants are not
3352 	     uniquely associated with a single ssa name that can be
3353 	     looked up.  */
3354 	  if (simplified
3355 	      && is_gimple_min_invariant (simplified)
3356 	      && TREE_CODE (lhs) == SSA_NAME)
3357 	    {
3358 	      VN_INFO (lhs)->expr = simplified;
3359 	      VN_INFO (lhs)->has_constants = true;
3360 	      changed = set_ssa_val_to (lhs, simplified);
3361 	      goto done;
3362 	    }
3363 	  else if (simplified
3364 		   && TREE_CODE (simplified) == SSA_NAME
3365 		   && TREE_CODE (lhs) == SSA_NAME)
3366 	    {
3367 	      changed = visit_copy (lhs, simplified);
3368 	      goto done;
3369 	    }
3370 	  else if (simplified)
3371 	    {
3372 	      if (TREE_CODE (lhs) == SSA_NAME)
3373 		{
3374 		  VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3375 		  /* We have to unshare the expression or else
3376 		     valuizing may change the IL stream.  */
3377 		  VN_INFO (lhs)->expr = unshare_expr (simplified);
3378 		}
3379 	    }
3380 	  else if (stmt_has_constants (stmt)
3381 		   && TREE_CODE (lhs) == SSA_NAME)
3382 	    VN_INFO (lhs)->has_constants = true;
3383 	  else if (TREE_CODE (lhs) == SSA_NAME)
3384 	    {
3385 	      /* We reset expr and constantness here because we may
3386 		 have been value numbering optimistically, and
3387 		 iterating. They may become non-constant in this case,
3388 		 even if they were optimistically constant. */
3389 
3390 	      VN_INFO (lhs)->has_constants = false;
3391 	      VN_INFO (lhs)->expr = NULL_TREE;
3392 	    }
3393 
3394 	  if ((TREE_CODE (lhs) == SSA_NAME
3395 	       /* We can substitute SSA_NAMEs that are live over
3396 		  abnormal edges with their constant value.  */
3397 	       && !(gimple_assign_copy_p (stmt)
3398 		    && is_gimple_min_invariant (rhs1))
3399 	       && !(simplified
3400 		    && is_gimple_min_invariant (simplified))
3401 	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3402 	      /* Stores or copies from SSA_NAMEs that are live over
3403 		 abnormal edges are a problem.  */
3404 	      || (code == SSA_NAME
3405 		  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3406 	    changed = defs_to_varying (stmt);
3407 	  else if (REFERENCE_CLASS_P (lhs)
3408 		   || DECL_P (lhs))
3409 	    changed = visit_reference_op_store (lhs, rhs1, stmt);
3410 	  else if (TREE_CODE (lhs) == SSA_NAME)
3411 	    {
3412 	      if ((gimple_assign_copy_p (stmt)
3413 		   && is_gimple_min_invariant (rhs1))
3414 		  || (simplified
3415 		      && is_gimple_min_invariant (simplified)))
3416 		{
3417 		  VN_INFO (lhs)->has_constants = true;
3418 		  if (simplified)
3419 		    changed = set_ssa_val_to (lhs, simplified);
3420 		  else
3421 		    changed = set_ssa_val_to (lhs, rhs1);
3422 		}
3423 	      else
3424 		{
3425 		  /* First try to lookup the simplified expression.  */
3426 		  if (simplified)
3427 		    {
3428 		      enum gimple_rhs_class rhs_class;
3429 
3430 
3431 		      rhs_class = get_gimple_rhs_class (TREE_CODE (simplified));
3432 		      if ((rhs_class == GIMPLE_UNARY_RHS
3433 			   || rhs_class == GIMPLE_BINARY_RHS
3434 			   || rhs_class == GIMPLE_TERNARY_RHS)
3435 			  && valid_gimple_rhs_p (simplified))
3436 			{
3437 			  tree result = vn_nary_op_lookup (simplified, NULL);
3438 			  if (result)
3439 			    {
3440 			      changed = set_ssa_val_to (lhs, result);
3441 			      goto done;
3442 			    }
3443 			}
3444 		    }
3445 
3446 		  /* Otherwise visit the original statement.  */
3447 		  switch (vn_get_stmt_kind (stmt))
3448 		    {
3449 		    case VN_NARY:
3450 		      changed = visit_nary_op (lhs, stmt);
3451 		      break;
3452 		    case VN_REFERENCE:
3453 		      changed = visit_reference_op_load (lhs, rhs1, stmt);
3454 		      break;
3455 		    default:
3456 		      changed = defs_to_varying (stmt);
3457 		      break;
3458 		    }
3459 		}
3460 	    }
3461 	  else
3462 	    changed = defs_to_varying (stmt);
3463 	}
3464       else if (is_gimple_call (stmt))
3465 	{
3466 	  tree lhs = gimple_call_lhs (stmt);
3467 
3468 	  /* ???  We could try to simplify calls.  */
3469 
3470 	  if (lhs && TREE_CODE (lhs) == SSA_NAME)
3471 	    {
3472 	      if (stmt_has_constants (stmt))
3473 		VN_INFO (lhs)->has_constants = true;
3474 	      else
3475 		{
3476 		  /* We reset expr and constantness here because we may
3477 		     have been value numbering optimistically, and
3478 		     iterating.  They may become non-constant in this case,
3479 		     even if they were optimistically constant.  */
3480 		  VN_INFO (lhs)->has_constants = false;
3481 		  VN_INFO (lhs)->expr = NULL_TREE;
3482 		}
3483 
3484 	      if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3485 		{
3486 		  changed = defs_to_varying (stmt);
3487 		  goto done;
3488 		}
3489 	    }
3490 
3491 	  if (!gimple_call_internal_p (stmt)
3492 	      && (/* Calls to the same function with the same vuse
3493 		     and the same operands do not necessarily return the same
3494 		     value, unless they're pure or const.  */
3495 		  gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3496 		  /* If calls have a vdef, subsequent calls won't have
3497 		     the same incoming vuse.  So, if 2 calls with vdef have the
3498 		     same vuse, we know they're not subsequent.
3499 		     We can value number 2 calls to the same function with the
3500 		     same vuse and the same operands which are not subsequent
3501 		     the same, because there is no code in the program that can
3502 		     compare the 2 values...  */
3503 		  || (gimple_vdef (stmt)
3504 		      /* ... unless the call returns a pointer which does
3505 		         not alias with anything else.  In which case the
3506 			 information that the values are distinct are encoded
3507 			 in the IL.  */
3508 		      && !(gimple_call_return_flags (stmt) & ERF_NOALIAS))))
3509 	    changed = visit_reference_op_call (lhs, stmt);
3510 	  else
3511 	    changed = defs_to_varying (stmt);
3512 	}
3513       else
3514 	changed = defs_to_varying (stmt);
3515     }
3516  done:
3517   return changed;
3518 }
3519 
3520 /* Compare two operands by reverse postorder index */
3521 
3522 static int
compare_ops(const void * pa,const void * pb)3523 compare_ops (const void *pa, const void *pb)
3524 {
3525   const tree opa = *((const tree *)pa);
3526   const tree opb = *((const tree *)pb);
3527   gimple opstmta = SSA_NAME_DEF_STMT (opa);
3528   gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3529   basic_block bba;
3530   basic_block bbb;
3531 
3532   if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3533     return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3534   else if (gimple_nop_p (opstmta))
3535     return -1;
3536   else if (gimple_nop_p (opstmtb))
3537     return 1;
3538 
3539   bba = gimple_bb (opstmta);
3540   bbb = gimple_bb (opstmtb);
3541 
3542   if (!bba && !bbb)
3543     return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3544   else if (!bba)
3545     return -1;
3546   else if (!bbb)
3547     return 1;
3548 
3549   if (bba == bbb)
3550     {
3551       if (gimple_code (opstmta) == GIMPLE_PHI
3552 	  && gimple_code (opstmtb) == GIMPLE_PHI)
3553 	return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3554       else if (gimple_code (opstmta) == GIMPLE_PHI)
3555 	return -1;
3556       else if (gimple_code (opstmtb) == GIMPLE_PHI)
3557 	return 1;
3558       else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3559         return gimple_uid (opstmta) - gimple_uid (opstmtb);
3560       else
3561 	return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3562     }
3563   return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3564 }
3565 
3566 /* Sort an array containing members of a strongly connected component
3567    SCC so that the members are ordered by RPO number.
3568    This means that when the sort is complete, iterating through the
3569    array will give you the members in RPO order.  */
3570 
3571 static void
sort_scc(vec<tree> scc)3572 sort_scc (vec<tree> scc)
3573 {
3574   scc.qsort (compare_ops);
3575 }
3576 
3577 /* Insert the no longer used nary ONARY to the hash INFO.  */
3578 
3579 static void
copy_nary(vn_nary_op_t onary,vn_tables_t info)3580 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3581 {
3582   size_t size = sizeof_vn_nary_op (onary->length);
3583   vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3584 					       &info->nary_obstack);
3585   memcpy (nary, onary, size);
3586   vn_nary_op_insert_into (nary, info->nary, false);
3587 }
3588 
3589 /* Insert the no longer used phi OPHI to the hash INFO.  */
3590 
3591 static void
copy_phi(vn_phi_t ophi,vn_tables_t info)3592 copy_phi (vn_phi_t ophi, vn_tables_t info)
3593 {
3594   vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3595   void **slot;
3596   memcpy (phi, ophi, sizeof (*phi));
3597   ophi->phiargs.create (0);
3598   slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3599   gcc_assert (!*slot);
3600   *slot = phi;
3601 }
3602 
3603 /* Insert the no longer used reference OREF to the hash INFO.  */
3604 
3605 static void
copy_reference(vn_reference_t oref,vn_tables_t info)3606 copy_reference (vn_reference_t oref, vn_tables_t info)
3607 {
3608   vn_reference_t ref;
3609   void **slot;
3610   ref = (vn_reference_t) pool_alloc (info->references_pool);
3611   memcpy (ref, oref, sizeof (*ref));
3612   oref->operands.create (0);
3613   slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3614 				   INSERT);
3615   if (*slot)
3616     free_reference (*slot);
3617   *slot = ref;
3618 }
3619 
3620 /* Process a strongly connected component in the SSA graph.  */
3621 
3622 static void
process_scc(vec<tree> scc)3623 process_scc (vec<tree> scc)
3624 {
3625   tree var;
3626   unsigned int i;
3627   unsigned int iterations = 0;
3628   bool changed = true;
3629   htab_iterator hi;
3630   vn_nary_op_t nary;
3631   vn_phi_t phi;
3632   vn_reference_t ref;
3633 
3634   /* If the SCC has a single member, just visit it.  */
3635   if (scc.length () == 1)
3636     {
3637       tree use = scc[0];
3638       if (VN_INFO (use)->use_processed)
3639 	return;
3640       /* We need to make sure it doesn't form a cycle itself, which can
3641 	 happen for self-referential PHI nodes.  In that case we would
3642 	 end up inserting an expression with VN_TOP operands into the
3643 	 valid table which makes us derive bogus equivalences later.
3644 	 The cheapest way to check this is to assume it for all PHI nodes.  */
3645       if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3646 	/* Fallthru to iteration.  */ ;
3647       else
3648 	{
3649 	  visit_use (use);
3650 	  return;
3651 	}
3652     }
3653 
3654   /* Iterate over the SCC with the optimistic table until it stops
3655      changing.  */
3656   current_info = optimistic_info;
3657   while (changed)
3658     {
3659       changed = false;
3660       iterations++;
3661       if (dump_file && (dump_flags & TDF_DETAILS))
3662 	fprintf (dump_file, "Starting iteration %d\n", iterations);
3663       /* As we are value-numbering optimistically we have to
3664 	 clear the expression tables and the simplified expressions
3665 	 in each iteration until we converge.  */
3666       htab_empty (optimistic_info->nary);
3667       htab_empty (optimistic_info->phis);
3668       htab_empty (optimistic_info->references);
3669       obstack_free (&optimistic_info->nary_obstack, NULL);
3670       gcc_obstack_init (&optimistic_info->nary_obstack);
3671       empty_alloc_pool (optimistic_info->phis_pool);
3672       empty_alloc_pool (optimistic_info->references_pool);
3673       FOR_EACH_VEC_ELT (scc, i, var)
3674 	VN_INFO (var)->expr = NULL_TREE;
3675       FOR_EACH_VEC_ELT (scc, i, var)
3676 	changed |= visit_use (var);
3677     }
3678 
3679   statistics_histogram_event (cfun, "SCC iterations", iterations);
3680 
3681   /* Finally, copy the contents of the no longer used optimistic
3682      table to the valid table.  */
3683   FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3684     copy_nary (nary, valid_info);
3685   FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3686     copy_phi (phi, valid_info);
3687   FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3688     copy_reference (ref, valid_info);
3689 
3690   current_info = valid_info;
3691 }
3692 
3693 
3694 /* Pop the components of the found SCC for NAME off the SCC stack
3695    and process them.  Returns true if all went well, false if
3696    we run into resource limits.  */
3697 
3698 static bool
extract_and_process_scc_for_name(tree name)3699 extract_and_process_scc_for_name (tree name)
3700 {
3701   vec<tree> scc = vNULL;
3702   tree x;
3703 
3704   /* Found an SCC, pop the components off the SCC stack and
3705      process them.  */
3706   do
3707     {
3708       x = sccstack.pop ();
3709 
3710       VN_INFO (x)->on_sccstack = false;
3711       scc.safe_push (x);
3712     } while (x != name);
3713 
3714   /* Bail out of SCCVN in case a SCC turns out to be incredibly large.  */
3715   if (scc.length ()
3716       > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3717     {
3718       if (dump_file)
3719 	fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3720 		 "SCC size %u exceeding %u\n", scc.length (),
3721 		 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3722 
3723       scc.release ();
3724       return false;
3725     }
3726 
3727   if (scc.length () > 1)
3728     sort_scc (scc);
3729 
3730   if (dump_file && (dump_flags & TDF_DETAILS))
3731     print_scc (dump_file, scc);
3732 
3733   process_scc (scc);
3734 
3735   scc.release ();
3736 
3737   return true;
3738 }
3739 
3740 /* Depth first search on NAME to discover and process SCC's in the SSA
3741    graph.
3742    Execution of this algorithm relies on the fact that the SCC's are
3743    popped off the stack in topological order.
3744    Returns true if successful, false if we stopped processing SCC's due
3745    to resource constraints.  */
3746 
3747 static bool
DFS(tree name)3748 DFS (tree name)
3749 {
3750   vec<ssa_op_iter> itervec = vNULL;
3751   vec<tree> namevec = vNULL;
3752   use_operand_p usep = NULL;
3753   gimple defstmt;
3754   tree use;
3755   ssa_op_iter iter;
3756 
3757 start_over:
3758   /* SCC info */
3759   VN_INFO (name)->dfsnum = next_dfs_num++;
3760   VN_INFO (name)->visited = true;
3761   VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3762 
3763   sccstack.safe_push (name);
3764   VN_INFO (name)->on_sccstack = true;
3765   defstmt = SSA_NAME_DEF_STMT (name);
3766 
3767   /* Recursively DFS on our operands, looking for SCC's.  */
3768   if (!gimple_nop_p (defstmt))
3769     {
3770       /* Push a new iterator.  */
3771       if (gimple_code (defstmt) == GIMPLE_PHI)
3772 	usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3773       else
3774 	usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3775     }
3776   else
3777     clear_and_done_ssa_iter (&iter);
3778 
3779   while (1)
3780     {
3781       /* If we are done processing uses of a name, go up the stack
3782 	 of iterators and process SCCs as we found them.  */
3783       if (op_iter_done (&iter))
3784 	{
3785 	  /* See if we found an SCC.  */
3786 	  if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3787 	    if (!extract_and_process_scc_for_name (name))
3788 	      {
3789 		namevec.release ();
3790 		itervec.release ();
3791 		return false;
3792 	      }
3793 
3794 	  /* Check if we are done.  */
3795 	  if (namevec.is_empty ())
3796 	    {
3797 	      namevec.release ();
3798 	      itervec.release ();
3799 	      return true;
3800 	    }
3801 
3802 	  /* Restore the last use walker and continue walking there.  */
3803 	  use = name;
3804 	  name = namevec.pop ();
3805 	  memcpy (&iter, &itervec.last (),
3806 		  sizeof (ssa_op_iter));
3807 	  itervec.pop ();
3808 	  goto continue_walking;
3809 	}
3810 
3811       use = USE_FROM_PTR (usep);
3812 
3813       /* Since we handle phi nodes, we will sometimes get
3814 	 invariants in the use expression.  */
3815       if (TREE_CODE (use) == SSA_NAME)
3816 	{
3817 	  if (! (VN_INFO (use)->visited))
3818 	    {
3819 	      /* Recurse by pushing the current use walking state on
3820 		 the stack and starting over.  */
3821 	      itervec.safe_push (iter);
3822 	      namevec.safe_push (name);
3823 	      name = use;
3824 	      goto start_over;
3825 
3826 continue_walking:
3827 	      VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3828 					 VN_INFO (use)->low);
3829 	    }
3830 	  if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3831 	      && VN_INFO (use)->on_sccstack)
3832 	    {
3833 	      VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3834 					 VN_INFO (name)->low);
3835 	    }
3836 	}
3837 
3838       usep = op_iter_next_use (&iter);
3839     }
3840 }
3841 
3842 /* Allocate a value number table.  */
3843 
3844 static void
allocate_vn_table(vn_tables_t table)3845 allocate_vn_table (vn_tables_t table)
3846 {
3847   table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3848   table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3849   table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3850 				   free_reference);
3851 
3852   gcc_obstack_init (&table->nary_obstack);
3853   table->phis_pool = create_alloc_pool ("VN phis",
3854 					sizeof (struct vn_phi_s),
3855 					30);
3856   table->references_pool = create_alloc_pool ("VN references",
3857 					      sizeof (struct vn_reference_s),
3858 					      30);
3859 }
3860 
3861 /* Free a value number table.  */
3862 
3863 static void
free_vn_table(vn_tables_t table)3864 free_vn_table (vn_tables_t table)
3865 {
3866   htab_delete (table->phis);
3867   htab_delete (table->nary);
3868   htab_delete (table->references);
3869   obstack_free (&table->nary_obstack, NULL);
3870   free_alloc_pool (table->phis_pool);
3871   free_alloc_pool (table->references_pool);
3872 }
3873 
3874 static void
init_scc_vn(void)3875 init_scc_vn (void)
3876 {
3877   size_t i;
3878   int j;
3879   int *rpo_numbers_temp;
3880 
3881   calculate_dominance_info (CDI_DOMINATORS);
3882   sccstack.create (0);
3883   constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3884 				  free);
3885 
3886   constant_value_ids = BITMAP_ALLOC (NULL);
3887 
3888   next_dfs_num = 1;
3889   next_value_id = 1;
3890 
3891   vn_ssa_aux_table.create (num_ssa_names + 1);
3892   /* VEC_alloc doesn't actually grow it to the right size, it just
3893      preallocates the space to do so.  */
3894   vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
3895   gcc_obstack_init (&vn_ssa_aux_obstack);
3896 
3897   shared_lookup_phiargs.create (0);
3898   shared_lookup_references.create (0);
3899   rpo_numbers = XNEWVEC (int, last_basic_block);
3900   rpo_numbers_temp = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
3901   pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3902 
3903   /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3904      the i'th block in RPO order is bb.  We want to map bb's to RPO
3905      numbers, so we need to rearrange this array.  */
3906   for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3907     rpo_numbers[rpo_numbers_temp[j]] = j;
3908 
3909   XDELETE (rpo_numbers_temp);
3910 
3911   VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3912 
3913   /* Create the VN_INFO structures, and initialize value numbers to
3914      TOP.  */
3915   for (i = 0; i < num_ssa_names; i++)
3916     {
3917       tree name = ssa_name (i);
3918       if (name)
3919 	{
3920 	  VN_INFO_GET (name)->valnum = VN_TOP;
3921 	  VN_INFO (name)->expr = NULL_TREE;
3922 	  VN_INFO (name)->value_id = 0;
3923 	}
3924     }
3925 
3926   renumber_gimple_stmt_uids ();
3927 
3928   /* Create the valid and optimistic value numbering tables.  */
3929   valid_info = XCNEW (struct vn_tables_s);
3930   allocate_vn_table (valid_info);
3931   optimistic_info = XCNEW (struct vn_tables_s);
3932   allocate_vn_table (optimistic_info);
3933 }
3934 
3935 void
free_scc_vn(void)3936 free_scc_vn (void)
3937 {
3938   size_t i;
3939 
3940   htab_delete (constant_to_value_id);
3941   BITMAP_FREE (constant_value_ids);
3942   shared_lookup_phiargs.release ();
3943   shared_lookup_references.release ();
3944   XDELETEVEC (rpo_numbers);
3945 
3946   for (i = 0; i < num_ssa_names; i++)
3947     {
3948       tree name = ssa_name (i);
3949       if (name
3950 	  && VN_INFO (name)->needs_insertion)
3951 	release_ssa_name (name);
3952     }
3953   obstack_free (&vn_ssa_aux_obstack, NULL);
3954   vn_ssa_aux_table.release ();
3955 
3956   sccstack.release ();
3957   free_vn_table (valid_info);
3958   XDELETE (valid_info);
3959   free_vn_table (optimistic_info);
3960   XDELETE (optimistic_info);
3961 }
3962 
3963 /* Set *ID according to RESULT.  */
3964 
3965 static void
set_value_id_for_result(tree result,unsigned int * id)3966 set_value_id_for_result (tree result, unsigned int *id)
3967 {
3968   if (result && TREE_CODE (result) == SSA_NAME)
3969     *id = VN_INFO (result)->value_id;
3970   else if (result && is_gimple_min_invariant (result))
3971     *id = get_or_alloc_constant_value_id (result);
3972   else
3973     *id = get_next_value_id ();
3974 }
3975 
3976 /* Set the value ids in the valid hash tables.  */
3977 
3978 static void
set_hashtable_value_ids(void)3979 set_hashtable_value_ids (void)
3980 {
3981   htab_iterator hi;
3982   vn_nary_op_t vno;
3983   vn_reference_t vr;
3984   vn_phi_t vp;
3985 
3986   /* Now set the value ids of the things we had put in the hash
3987      table.  */
3988 
3989   FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3990 			 vno, vn_nary_op_t, hi)
3991     set_value_id_for_result (vno->result, &vno->value_id);
3992 
3993   FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3994 			 vp, vn_phi_t, hi)
3995     set_value_id_for_result (vp->result, &vp->value_id);
3996 
3997   FOR_EACH_HTAB_ELEMENT (valid_info->references,
3998 			 vr, vn_reference_t, hi)
3999     set_value_id_for_result (vr->result, &vr->value_id);
4000 }
4001 
4002 /* Do SCCVN.  Returns true if it finished, false if we bailed out
4003    due to resource constraints.  DEFAULT_VN_WALK_KIND_ specifies
4004    how we use the alias oracle walking during the VN process.  */
4005 
4006 bool
run_scc_vn(vn_lookup_kind default_vn_walk_kind_)4007 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4008 {
4009   size_t i;
4010   tree param;
4011 
4012   default_vn_walk_kind = default_vn_walk_kind_;
4013 
4014   init_scc_vn ();
4015   current_info = valid_info;
4016 
4017   for (param = DECL_ARGUMENTS (current_function_decl);
4018        param;
4019        param = DECL_CHAIN (param))
4020     {
4021       tree def = ssa_default_def (cfun, param);
4022       if (def)
4023 	VN_INFO (def)->valnum = def;
4024     }
4025 
4026   for (i = 1; i < num_ssa_names; ++i)
4027     {
4028       tree name = ssa_name (i);
4029       if (name
4030 	  && VN_INFO (name)->visited == false
4031 	  && !has_zero_uses (name))
4032 	if (!DFS (name))
4033 	  {
4034 	    free_scc_vn ();
4035 	    return false;
4036 	  }
4037     }
4038 
4039   /* Initialize the value ids.  */
4040 
4041   for (i = 1; i < num_ssa_names; ++i)
4042     {
4043       tree name = ssa_name (i);
4044       vn_ssa_aux_t info;
4045       if (!name)
4046 	continue;
4047       info = VN_INFO (name);
4048       if (info->valnum == name
4049 	  || info->valnum == VN_TOP)
4050 	info->value_id = get_next_value_id ();
4051       else if (is_gimple_min_invariant (info->valnum))
4052 	info->value_id = get_or_alloc_constant_value_id (info->valnum);
4053     }
4054 
4055   /* Propagate.  */
4056   for (i = 1; i < num_ssa_names; ++i)
4057     {
4058       tree name = ssa_name (i);
4059       vn_ssa_aux_t info;
4060       if (!name)
4061 	continue;
4062       info = VN_INFO (name);
4063       if (TREE_CODE (info->valnum) == SSA_NAME
4064 	  && info->valnum != name
4065 	  && info->value_id != VN_INFO (info->valnum)->value_id)
4066 	info->value_id = VN_INFO (info->valnum)->value_id;
4067     }
4068 
4069   set_hashtable_value_ids ();
4070 
4071   if (dump_file && (dump_flags & TDF_DETAILS))
4072     {
4073       fprintf (dump_file, "Value numbers:\n");
4074       for (i = 0; i < num_ssa_names; i++)
4075 	{
4076 	  tree name = ssa_name (i);
4077 	  if (name
4078 	      && VN_INFO (name)->visited
4079 	      && SSA_VAL (name) != name)
4080 	    {
4081 	      print_generic_expr (dump_file, name, 0);
4082 	      fprintf (dump_file, " = ");
4083 	      print_generic_expr (dump_file, SSA_VAL (name), 0);
4084 	      fprintf (dump_file, "\n");
4085 	    }
4086 	}
4087     }
4088 
4089   return true;
4090 }
4091 
4092 /* Return the maximum value id we have ever seen.  */
4093 
4094 unsigned int
get_max_value_id(void)4095 get_max_value_id (void)
4096 {
4097   return next_value_id;
4098 }
4099 
4100 /* Return the next unique value id.  */
4101 
4102 unsigned int
get_next_value_id(void)4103 get_next_value_id (void)
4104 {
4105   return next_value_id++;
4106 }
4107 
4108 
4109 /* Compare two expressions E1 and E2 and return true if they are equal.  */
4110 
4111 bool
expressions_equal_p(tree e1,tree e2)4112 expressions_equal_p (tree e1, tree e2)
4113 {
4114   /* The obvious case.  */
4115   if (e1 == e2)
4116     return true;
4117 
4118   /* If only one of them is null, they cannot be equal.  */
4119   if (!e1 || !e2)
4120     return false;
4121 
4122   /* Now perform the actual comparison.  */
4123   if (TREE_CODE (e1) == TREE_CODE (e2)
4124       && operand_equal_p (e1, e2, OEP_PURE_SAME))
4125     return true;
4126 
4127   return false;
4128 }
4129 
4130 
4131 /* Return true if the nary operation NARY may trap.  This is a copy
4132    of stmt_could_throw_1_p adjusted to the SCCVN IL.  */
4133 
4134 bool
vn_nary_may_trap(vn_nary_op_t nary)4135 vn_nary_may_trap (vn_nary_op_t nary)
4136 {
4137   tree type;
4138   tree rhs2 = NULL_TREE;
4139   bool honor_nans = false;
4140   bool honor_snans = false;
4141   bool fp_operation = false;
4142   bool honor_trapv = false;
4143   bool handled, ret;
4144   unsigned i;
4145 
4146   if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4147       || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4148       || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4149     {
4150       type = nary->type;
4151       fp_operation = FLOAT_TYPE_P (type);
4152       if (fp_operation)
4153 	{
4154 	  honor_nans = flag_trapping_math && !flag_finite_math_only;
4155 	  honor_snans = flag_signaling_nans != 0;
4156 	}
4157       else if (INTEGRAL_TYPE_P (type)
4158 	       && TYPE_OVERFLOW_TRAPS (type))
4159 	honor_trapv = true;
4160     }
4161   if (nary->length >= 2)
4162     rhs2 = nary->op[1];
4163   ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4164 				       honor_trapv,
4165 				       honor_nans, honor_snans, rhs2,
4166 				       &handled);
4167   if (handled
4168       && ret)
4169     return true;
4170 
4171   for (i = 0; i < nary->length; ++i)
4172     if (tree_could_trap_p (nary->op[i]))
4173       return true;
4174 
4175   return false;
4176 }
4177