1 /* SCC value numbering for trees
2    Copyright (C) 2006, 2007, 2008, 2009, 2010
3    Free Software Foundation, Inc.
4    Contributed by Daniel Berlin <dan@dberlin.org>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12 
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 GNU General Public License for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
40 #include "flags.h"
41 #include "bitmap.h"
42 #include "langhooks.h"
43 #include "cfgloop.h"
44 #include "params.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
47 #include "gimple-fold.h"
48 
49 /* This algorithm is based on the SCC algorithm presented by Keith
50    Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51    (http://citeseer.ist.psu.edu/41805.html).  In
52    straight line code, it is equivalent to a regular hash based value
53    numbering that is performed in reverse postorder.
54 
55    For code with cycles, there are two alternatives, both of which
56    require keeping the hashtables separate from the actual list of
57    value numbers for SSA names.
58 
59    1. Iterate value numbering in an RPO walk of the blocks, removing
60    all the entries from the hashtable after each iteration (but
61    keeping the SSA name->value number mapping between iterations).
62    Iterate until it does not change.
63 
64    2. Perform value numbering as part of an SCC walk on the SSA graph,
65    iterating only the cycles in the SSA graph until they do not change
66    (using a separate, optimistic hashtable for value numbering the SCC
67    operands).
68 
69    The second is not just faster in practice (because most SSA graph
70    cycles do not involve all the variables in the graph), it also has
71    some nice properties.
72 
73    One of these nice properties is that when we pop an SCC off the
74    stack, we are guaranteed to have processed all the operands coming from
75    *outside of that SCC*, so we do not need to do anything special to
76    ensure they have value numbers.
77 
78    Another nice property is that the SCC walk is done as part of a DFS
79    of the SSA graph, which makes it easy to perform combining and
80    simplifying operations at the same time.
81 
82    The code below is deliberately written in a way that makes it easy
83    to separate the SCC walk from the other work it does.
84 
85    In order to propagate constants through the code, we track which
86    expressions contain constants, and use those while folding.  In
87    theory, we could also track expressions whose value numbers are
88    replaced, in case we end up folding based on expression
89    identities.
90 
91    In order to value number memory, we assign value numbers to vuses.
92    This enables us to note that, for example, stores to the same
93    address of the same value from the same starting memory states are
94    equivalent.
95    TODO:
96 
97    1. We can iterate only the changing portions of the SCC's, but
98    I have not seen an SCC big enough for this to be a win.
99    2. If you differentiate between phi nodes for loops and phi nodes
100    for if-then-else, you can properly consider phi nodes in different
101    blocks for equivalence.
102    3. We could value number vuses in more cases, particularly, whole
103    structure copies.
104 */
105 
106 /* The set of hashtables and alloc_pool's for their items.  */
107 
108 typedef struct vn_tables_s
109 {
110   htab_t nary;
111   htab_t phis;
112   htab_t references;
113   struct obstack nary_obstack;
114   alloc_pool phis_pool;
115   alloc_pool references_pool;
116 } *vn_tables_t;
117 
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
120 
121 
122 /* Valid hashtables storing information we have proven to be
123    correct.  */
124 
125 static vn_tables_t valid_info;
126 
127 /* Optimistic hashtables storing information we are making assumptions about
128    during iterations.  */
129 
130 static vn_tables_t optimistic_info;
131 
132 /* Pointer to the set of hashtables that is currently being used.
133    Should always point to either the optimistic_info, or the
134    valid_info.  */
135 
136 static vn_tables_t current_info;
137 
138 
139 /* Reverse post order index for each basic block.  */
140 
141 static int *rpo_numbers;
142 
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
144 
145 /* This represents the top of the VN lattice, which is the universal
146    value.  */
147 
148 tree VN_TOP;
149 
150 /* Unique counter for our value ids.  */
151 
152 static unsigned int next_value_id;
153 
154 /* Next DFS number and the stack for strongly connected component
155    detection. */
156 
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
159 
160 
161 DEF_VEC_P(vn_ssa_aux_t);
162 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
163 
164 /* Table of vn_ssa_aux_t's, one per ssa_name.  The vn_ssa_aux_t objects
165    are allocated on an obstack for locality reasons, and to free them
166    without looping over the VEC.  */
167 
168 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
169 static struct obstack vn_ssa_aux_obstack;
170 
171 /* Return the value numbering information for a given SSA name.  */
172 
173 vn_ssa_aux_t
174 VN_INFO (tree name)
175 {
176   vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
177 				SSA_NAME_VERSION (name));
178   gcc_checking_assert (res);
179   return res;
180 }
181 
182 /* Set the value numbering info for a given SSA name to a given
183    value.  */
184 
185 static inline void
186 VN_INFO_SET (tree name, vn_ssa_aux_t value)
187 {
188   VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
189 	       SSA_NAME_VERSION (name), value);
190 }
191 
192 /* Initialize the value numbering info for a given SSA name.
193    This should be called just once for every SSA name.  */
194 
195 vn_ssa_aux_t
196 VN_INFO_GET (tree name)
197 {
198   vn_ssa_aux_t newinfo;
199 
200   newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
201   memset (newinfo, 0, sizeof (struct vn_ssa_aux));
202   if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
203     VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
204 		   SSA_NAME_VERSION (name) + 1);
205   VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
206 	       SSA_NAME_VERSION (name), newinfo);
207   return newinfo;
208 }
209 
210 
211 /* Get the representative expression for the SSA_NAME NAME.  Returns
212    the representative SSA_NAME if there is no expression associated with it.  */
213 
214 tree
215 vn_get_expr_for (tree name)
216 {
217   vn_ssa_aux_t vn = VN_INFO (name);
218   gimple def_stmt;
219   tree expr = NULL_TREE;
220   enum tree_code code;
221 
222   if (vn->valnum == VN_TOP)
223     return name;
224 
225   /* If the value-number is a constant it is the representative
226      expression.  */
227   if (TREE_CODE (vn->valnum) != SSA_NAME)
228     return vn->valnum;
229 
230   /* Get to the information of the value of this SSA_NAME.  */
231   vn = VN_INFO (vn->valnum);
232 
233   /* If the value-number is a constant it is the representative
234      expression.  */
235   if (TREE_CODE (vn->valnum) != SSA_NAME)
236     return vn->valnum;
237 
238   /* Else if we have an expression, return it.  */
239   if (vn->expr != NULL_TREE)
240     return vn->expr;
241 
242   /* Otherwise use the defining statement to build the expression.  */
243   def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
244 
245   /* If the value number is not an assignment use it directly.  */
246   if (!is_gimple_assign (def_stmt))
247     return vn->valnum;
248 
249   /* FIXME tuples.  This is incomplete and likely will miss some
250      simplifications.  */
251   code = gimple_assign_rhs_code (def_stmt);
252   switch (TREE_CODE_CLASS (code))
253     {
254     case tcc_reference:
255       if ((code == REALPART_EXPR
256 	   || code == IMAGPART_EXPR
257 	   || code == VIEW_CONVERT_EXPR)
258 	  && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
259 				      0)) == SSA_NAME)
260 	expr = fold_build1 (code,
261 			    gimple_expr_type (def_stmt),
262 			    TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
263       break;
264 
265     case tcc_unary:
266       expr = fold_build1 (code,
267 			  gimple_expr_type (def_stmt),
268 			  gimple_assign_rhs1 (def_stmt));
269       break;
270 
271     case tcc_binary:
272       expr = fold_build2 (code,
273 			  gimple_expr_type (def_stmt),
274 			  gimple_assign_rhs1 (def_stmt),
275 			  gimple_assign_rhs2 (def_stmt));
276       break;
277 
278     case tcc_exceptional:
279       if (code == CONSTRUCTOR
280 	  && TREE_CODE
281 	       (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
282 	expr = gimple_assign_rhs1 (def_stmt);
283       break;
284 
285     default:;
286     }
287   if (expr == NULL_TREE)
288     return vn->valnum;
289 
290   /* Cache the expression.  */
291   vn->expr = expr;
292 
293   return expr;
294 }
295 
296 
297 /* Free a phi operation structure VP.  */
298 
299 static void
300 free_phi (void *vp)
301 {
302   vn_phi_t phi = (vn_phi_t) vp;
303   VEC_free (tree, heap, phi->phiargs);
304 }
305 
306 /* Free a reference operation structure VP.  */
307 
308 static void
309 free_reference (void *vp)
310 {
311   vn_reference_t vr = (vn_reference_t) vp;
312   VEC_free (vn_reference_op_s, heap, vr->operands);
313 }
314 
315 /* Hash table equality function for vn_constant_t.  */
316 
317 static int
318 vn_constant_eq (const void *p1, const void *p2)
319 {
320   const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
321   const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
322 
323   if (vc1->hashcode != vc2->hashcode)
324     return false;
325 
326   return vn_constant_eq_with_type (vc1->constant, vc2->constant);
327 }
328 
329 /* Hash table hash function for vn_constant_t.  */
330 
331 static hashval_t
332 vn_constant_hash (const void *p1)
333 {
334   const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
335   return vc1->hashcode;
336 }
337 
338 /* Lookup a value id for CONSTANT and return it.  If it does not
339    exist returns 0.  */
340 
341 unsigned int
342 get_constant_value_id (tree constant)
343 {
344   void **slot;
345   struct vn_constant_s vc;
346 
347   vc.hashcode = vn_hash_constant_with_type (constant);
348   vc.constant = constant;
349   slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
350 				   vc.hashcode, NO_INSERT);
351   if (slot)
352     return ((vn_constant_t)*slot)->value_id;
353   return 0;
354 }
355 
356 /* Lookup a value id for CONSTANT, and if it does not exist, create a
357    new one and return it.  If it does exist, return it.  */
358 
359 unsigned int
360 get_or_alloc_constant_value_id (tree constant)
361 {
362   void **slot;
363   struct vn_constant_s vc;
364   vn_constant_t vcp;
365 
366   vc.hashcode = vn_hash_constant_with_type (constant);
367   vc.constant = constant;
368   slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
369 				   vc.hashcode, INSERT);
370   if (*slot)
371     return ((vn_constant_t)*slot)->value_id;
372 
373   vcp = XNEW (struct vn_constant_s);
374   vcp->hashcode = vc.hashcode;
375   vcp->constant = constant;
376   vcp->value_id = get_next_value_id ();
377   *slot = (void *) vcp;
378   bitmap_set_bit (constant_value_ids, vcp->value_id);
379   return vcp->value_id;
380 }
381 
382 /* Return true if V is a value id for a constant.  */
383 
384 bool
385 value_id_constant_p (unsigned int v)
386 {
387   return bitmap_bit_p (constant_value_ids, v);
388 }
389 
390 /* Compare two reference operands P1 and P2 for equality.  Return true if
391    they are equal, and false otherwise.  */
392 
393 static int
394 vn_reference_op_eq (const void *p1, const void *p2)
395 {
396   const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
397   const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
398 
399   return (vro1->opcode == vro2->opcode
400 	  /* We do not care for differences in type qualification.  */
401 	  && (vro1->type == vro2->type
402 	      || (vro1->type && vro2->type
403 		  && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
404 					 TYPE_MAIN_VARIANT (vro2->type))))
405 	  && expressions_equal_p (vro1->op0, vro2->op0)
406 	  && expressions_equal_p (vro1->op1, vro2->op1)
407 	  && expressions_equal_p (vro1->op2, vro2->op2));
408 }
409 
410 /* Compute the hash for a reference operand VRO1.  */
411 
412 static hashval_t
413 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
414 {
415   result = iterative_hash_hashval_t (vro1->opcode, result);
416   if (vro1->op0)
417     result = iterative_hash_expr (vro1->op0, result);
418   if (vro1->op1)
419     result = iterative_hash_expr (vro1->op1, result);
420   if (vro1->op2)
421     result = iterative_hash_expr (vro1->op2, result);
422   return result;
423 }
424 
425 /* Return the hashcode for a given reference operation P1.  */
426 
427 static hashval_t
428 vn_reference_hash (const void *p1)
429 {
430   const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
431   return vr1->hashcode;
432 }
433 
434 /* Compute a hash for the reference operation VR1 and return it.  */
435 
436 hashval_t
437 vn_reference_compute_hash (const vn_reference_t vr1)
438 {
439   hashval_t result = 0;
440   int i;
441   vn_reference_op_t vro;
442   HOST_WIDE_INT off = -1;
443   bool deref = false;
444 
445   FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro)
446     {
447       if (vro->opcode == MEM_REF)
448 	deref = true;
449       else if (vro->opcode != ADDR_EXPR)
450 	deref = false;
451       if (vro->off != -1)
452 	{
453 	  if (off == -1)
454 	    off = 0;
455 	  off += vro->off;
456 	}
457       else
458 	{
459 	  if (off != -1
460 	      && off != 0)
461 	    result = iterative_hash_hashval_t (off, result);
462 	  off = -1;
463 	  if (deref
464 	      && vro->opcode == ADDR_EXPR)
465 	    {
466 	      if (vro->op0)
467 		{
468 		  tree op = TREE_OPERAND (vro->op0, 0);
469 		  result = iterative_hash_hashval_t (TREE_CODE (op), result);
470 		  result = iterative_hash_expr (op, result);
471 		}
472 	    }
473 	  else
474 	    result = vn_reference_op_compute_hash (vro, result);
475 	}
476     }
477   if (vr1->vuse)
478     result += SSA_NAME_VERSION (vr1->vuse);
479 
480   return result;
481 }
482 
483 /* Return true if reference operations P1 and P2 are equivalent.  This
484    means they have the same set of operands and vuses.  */
485 
486 int
487 vn_reference_eq (const void *p1, const void *p2)
488 {
489   unsigned i, j;
490 
491   const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
492   const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
493   if (vr1->hashcode != vr2->hashcode)
494     return false;
495 
496   /* Early out if this is not a hash collision.  */
497   if (vr1->hashcode != vr2->hashcode)
498     return false;
499 
500   /* The VOP needs to be the same.  */
501   if (vr1->vuse != vr2->vuse)
502     return false;
503 
504   /* If the operands are the same we are done.  */
505   if (vr1->operands == vr2->operands)
506     return true;
507 
508   if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
509     return false;
510 
511   if (INTEGRAL_TYPE_P (vr1->type)
512       && INTEGRAL_TYPE_P (vr2->type))
513     {
514       if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
515 	return false;
516     }
517   else if (INTEGRAL_TYPE_P (vr1->type)
518 	   && (TYPE_PRECISION (vr1->type)
519 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
520     return false;
521   else if (INTEGRAL_TYPE_P (vr2->type)
522 	   && (TYPE_PRECISION (vr2->type)
523 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
524     return false;
525 
526   i = 0;
527   j = 0;
528   do
529     {
530       HOST_WIDE_INT off1 = 0, off2 = 0;
531       vn_reference_op_t vro1, vro2;
532       vn_reference_op_s tem1, tem2;
533       bool deref1 = false, deref2 = false;
534       for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
535 	{
536 	  if (vro1->opcode == MEM_REF)
537 	    deref1 = true;
538 	  if (vro1->off == -1)
539 	    break;
540 	  off1 += vro1->off;
541 	}
542       for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
543 	{
544 	  if (vro2->opcode == MEM_REF)
545 	    deref2 = true;
546 	  if (vro2->off == -1)
547 	    break;
548 	  off2 += vro2->off;
549 	}
550       if (off1 != off2)
551 	return false;
552       if (deref1 && vro1->opcode == ADDR_EXPR)
553 	{
554 	  memset (&tem1, 0, sizeof (tem1));
555 	  tem1.op0 = TREE_OPERAND (vro1->op0, 0);
556 	  tem1.type = TREE_TYPE (tem1.op0);
557 	  tem1.opcode = TREE_CODE (tem1.op0);
558 	  vro1 = &tem1;
559 	  deref1 = false;
560 	}
561       if (deref2 && vro2->opcode == ADDR_EXPR)
562 	{
563 	  memset (&tem2, 0, sizeof (tem2));
564 	  tem2.op0 = TREE_OPERAND (vro2->op0, 0);
565 	  tem2.type = TREE_TYPE (tem2.op0);
566 	  tem2.opcode = TREE_CODE (tem2.op0);
567 	  vro2 = &tem2;
568 	  deref2 = false;
569 	}
570       if (deref1 != deref2)
571 	return false;
572       if (!vn_reference_op_eq (vro1, vro2))
573 	return false;
574       ++j;
575       ++i;
576     }
577   while (VEC_length (vn_reference_op_s, vr1->operands) != i
578 	 || VEC_length (vn_reference_op_s, vr2->operands) != j);
579 
580   return true;
581 }
582 
583 /* Copy the operations present in load/store REF into RESULT, a vector of
584    vn_reference_op_s's.  */
585 
586 void
587 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
588 {
589   if (TREE_CODE (ref) == TARGET_MEM_REF)
590     {
591       vn_reference_op_s temp;
592 
593       memset (&temp, 0, sizeof (temp));
594       temp.type = TREE_TYPE (ref);
595       temp.opcode = TREE_CODE (ref);
596       temp.op0 = TMR_INDEX (ref);
597       temp.op1 = TMR_STEP (ref);
598       temp.op2 = TMR_OFFSET (ref);
599       temp.off = -1;
600       VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
601 
602       memset (&temp, 0, sizeof (temp));
603       temp.type = NULL_TREE;
604       temp.opcode = ERROR_MARK;
605       temp.op0 = TMR_INDEX2 (ref);
606       temp.off = -1;
607       VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
608 
609       memset (&temp, 0, sizeof (temp));
610       temp.type = NULL_TREE;
611       temp.opcode = TREE_CODE (TMR_BASE (ref));
612       temp.op0 = TMR_BASE (ref);
613       temp.off = -1;
614       VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
615       return;
616     }
617 
618   /* For non-calls, store the information that makes up the address.  */
619 
620   while (ref)
621     {
622       vn_reference_op_s temp;
623 
624       memset (&temp, 0, sizeof (temp));
625       temp.type = TREE_TYPE (ref);
626       temp.opcode = TREE_CODE (ref);
627       temp.off = -1;
628 
629       switch (temp.opcode)
630 	{
631 	case WITH_SIZE_EXPR:
632 	  temp.op0 = TREE_OPERAND (ref, 1);
633 	  temp.off = 0;
634 	  break;
635 	case MEM_REF:
636 	  /* The base address gets its own vn_reference_op_s structure.  */
637 	  temp.op0 = TREE_OPERAND (ref, 1);
638 	  if (host_integerp (TREE_OPERAND (ref, 1), 0))
639 	    temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
640 	  break;
641 	case BIT_FIELD_REF:
642 	  /* Record bits and position.  */
643 	  temp.op0 = TREE_OPERAND (ref, 1);
644 	  temp.op1 = TREE_OPERAND (ref, 2);
645 	  break;
646 	case COMPONENT_REF:
647 	  /* The field decl is enough to unambiguously specify the field,
648 	     a matching type is not necessary and a mismatching type
649 	     is always a spurious difference.  */
650 	  temp.type = NULL_TREE;
651 	  temp.op0 = TREE_OPERAND (ref, 1);
652 	  temp.op1 = TREE_OPERAND (ref, 2);
653 	  {
654 	    tree this_offset = component_ref_field_offset (ref);
655 	    if (this_offset
656 		&& TREE_CODE (this_offset) == INTEGER_CST)
657 	      {
658 		tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
659 		if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
660 		  {
661 		    double_int off
662 		      = double_int_add (tree_to_double_int (this_offset),
663 					double_int_rshift
664 					  (tree_to_double_int (bit_offset),
665 					   BITS_PER_UNIT == 8
666 					   ? 3 : exact_log2 (BITS_PER_UNIT),
667 					   HOST_BITS_PER_DOUBLE_INT, true));
668 		    if (double_int_fits_in_shwi_p (off))
669 		      temp.off = off.low;
670 		  }
671 	      }
672 	  }
673 	  break;
674 	case ARRAY_RANGE_REF:
675 	case ARRAY_REF:
676 	  /* Record index as operand.  */
677 	  temp.op0 = TREE_OPERAND (ref, 1);
678 	  /* Always record lower bounds and element size.  */
679 	  temp.op1 = array_ref_low_bound (ref);
680 	  temp.op2 = array_ref_element_size (ref);
681 	  if (TREE_CODE (temp.op0) == INTEGER_CST
682 	      && TREE_CODE (temp.op1) == INTEGER_CST
683 	      && TREE_CODE (temp.op2) == INTEGER_CST)
684 	    {
685 	      double_int off = tree_to_double_int (temp.op0);
686 	      off = double_int_add (off,
687 				    double_int_neg
688 				      (tree_to_double_int (temp.op1)));
689 	      off = double_int_mul (off, tree_to_double_int (temp.op2));
690 	      if (double_int_fits_in_shwi_p (off))
691 		temp.off = off.low;
692 	    }
693 	  break;
694 	case VAR_DECL:
695 	  if (DECL_HARD_REGISTER (ref))
696 	    {
697 	      temp.op0 = ref;
698 	      break;
699 	    }
700 	  /* Fallthru.  */
701 	case PARM_DECL:
702 	case CONST_DECL:
703 	case RESULT_DECL:
704 	  /* Canonicalize decls to MEM[&decl] which is what we end up with
705 	     when valueizing MEM[ptr] with ptr = &decl.  */
706 	  temp.opcode = MEM_REF;
707 	  temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
708 	  temp.off = 0;
709 	  VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
710 	  temp.opcode = ADDR_EXPR;
711 	  temp.op0 = build_fold_addr_expr (ref);
712 	  temp.type = TREE_TYPE (temp.op0);
713 	  temp.off = -1;
714 	  break;
715 	case STRING_CST:
716 	case INTEGER_CST:
717 	case COMPLEX_CST:
718 	case VECTOR_CST:
719 	case REAL_CST:
720 	case FIXED_CST:
721 	case CONSTRUCTOR:
722 	case SSA_NAME:
723 	  temp.op0 = ref;
724 	  break;
725 	case ADDR_EXPR:
726 	  if (is_gimple_min_invariant (ref))
727 	    {
728 	      temp.op0 = ref;
729 	      break;
730 	    }
731 	  /* Fallthrough.  */
732 	  /* These are only interesting for their operands, their
733 	     existence, and their type.  They will never be the last
734 	     ref in the chain of references (IE they require an
735 	     operand), so we don't have to put anything
736 	     for op* as it will be handled by the iteration  */
737 	case REALPART_EXPR:
738 	case VIEW_CONVERT_EXPR:
739 	  temp.off = 0;
740 	  break;
741 	case IMAGPART_EXPR:
742 	  /* This is only interesting for its constant offset.  */
743 	  temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
744 	  break;
745 	default:
746 	  gcc_unreachable ();
747 	}
748       VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
749 
750       if (REFERENCE_CLASS_P (ref)
751 	  || TREE_CODE (ref) == WITH_SIZE_EXPR
752 	  || (TREE_CODE (ref) == ADDR_EXPR
753 	      && !is_gimple_min_invariant (ref)))
754 	ref = TREE_OPERAND (ref, 0);
755       else
756 	ref = NULL_TREE;
757     }
758 }
759 
760 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
761    operands in *OPS, the reference alias set SET and the reference type TYPE.
762    Return true if something useful was produced.  */
763 
764 bool
765 ao_ref_init_from_vn_reference (ao_ref *ref,
766 			       alias_set_type set, tree type,
767 			       VEC (vn_reference_op_s, heap) *ops)
768 {
769   vn_reference_op_t op;
770   unsigned i;
771   tree base = NULL_TREE;
772   tree *op0_p = &base;
773   HOST_WIDE_INT offset = 0;
774   HOST_WIDE_INT max_size;
775   HOST_WIDE_INT size = -1;
776   tree size_tree = NULL_TREE;
777   alias_set_type base_alias_set = -1;
778 
779   /* First get the final access size from just the outermost expression.  */
780   op = VEC_index (vn_reference_op_s, ops, 0);
781   if (op->opcode == COMPONENT_REF)
782     size_tree = DECL_SIZE (op->op0);
783   else if (op->opcode == BIT_FIELD_REF)
784     size_tree = op->op0;
785   else
786     {
787       enum machine_mode mode = TYPE_MODE (type);
788       if (mode == BLKmode)
789 	size_tree = TYPE_SIZE (type);
790       else
791         size = GET_MODE_BITSIZE (mode);
792     }
793   if (size_tree != NULL_TREE)
794     {
795       if (!host_integerp (size_tree, 1))
796 	size = -1;
797       else
798 	size = TREE_INT_CST_LOW (size_tree);
799     }
800 
801   /* Initially, maxsize is the same as the accessed element size.
802      In the following it will only grow (or become -1).  */
803   max_size = size;
804 
805   /* Compute cumulative bit-offset for nested component-refs and array-refs,
806      and find the ultimate containing object.  */
807   FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
808     {
809       switch (op->opcode)
810 	{
811 	/* These may be in the reference ops, but we cannot do anything
812 	   sensible with them here.  */
813 	case ADDR_EXPR:
814 	  /* Apart from ADDR_EXPR arguments to MEM_REF.  */
815 	  if (base != NULL_TREE
816 	      && TREE_CODE (base) == MEM_REF
817 	      && op->op0
818 	      && DECL_P (TREE_OPERAND (op->op0, 0)))
819 	    {
820 	      vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
821 	      base = TREE_OPERAND (op->op0, 0);
822 	      if (pop->off == -1)
823 		{
824 		  max_size = -1;
825 		  offset = 0;
826 		}
827 	      else
828 		offset += pop->off * BITS_PER_UNIT;
829 	      op0_p = NULL;
830 	      break;
831 	    }
832 	  /* Fallthru.  */
833 	case CALL_EXPR:
834 	  return false;
835 
836 	/* Record the base objects.  */
837 	case MEM_REF:
838 	  base_alias_set = get_deref_alias_set (op->op0);
839 	  *op0_p = build2 (MEM_REF, op->type,
840 			   NULL_TREE, op->op0);
841 	  op0_p = &TREE_OPERAND (*op0_p, 0);
842 	  break;
843 
844 	case VAR_DECL:
845 	case PARM_DECL:
846 	case RESULT_DECL:
847 	case SSA_NAME:
848 	  *op0_p = op->op0;
849 	  op0_p = NULL;
850 	  break;
851 
852 	/* And now the usual component-reference style ops.  */
853 	case BIT_FIELD_REF:
854 	  offset += tree_low_cst (op->op1, 0);
855 	  break;
856 
857 	case COMPONENT_REF:
858 	  {
859 	    tree field = op->op0;
860 	    /* We do not have a complete COMPONENT_REF tree here so we
861 	       cannot use component_ref_field_offset.  Do the interesting
862 	       parts manually.  */
863 
864 	    if (op->op1
865 		|| !host_integerp (DECL_FIELD_OFFSET (field), 1))
866 	      max_size = -1;
867 	    else
868 	      {
869 		offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
870 			   * BITS_PER_UNIT);
871 		offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
872 	      }
873 	    break;
874 	  }
875 
876 	case ARRAY_RANGE_REF:
877 	case ARRAY_REF:
878 	  /* We recorded the lower bound and the element size.  */
879 	  if (!host_integerp (op->op0, 0)
880 	      || !host_integerp (op->op1, 0)
881 	      || !host_integerp (op->op2, 0))
882 	    max_size = -1;
883 	  else
884 	    {
885 	      HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
886 	      hindex -= TREE_INT_CST_LOW (op->op1);
887 	      hindex *= TREE_INT_CST_LOW (op->op2);
888 	      hindex *= BITS_PER_UNIT;
889 	      offset += hindex;
890 	    }
891 	  break;
892 
893 	case REALPART_EXPR:
894 	  break;
895 
896 	case IMAGPART_EXPR:
897 	  offset += size;
898 	  break;
899 
900 	case VIEW_CONVERT_EXPR:
901 	  break;
902 
903 	case STRING_CST:
904 	case INTEGER_CST:
905 	case COMPLEX_CST:
906 	case VECTOR_CST:
907 	case REAL_CST:
908 	case CONSTRUCTOR:
909 	case CONST_DECL:
910 	  return false;
911 
912 	default:
913 	  return false;
914 	}
915     }
916 
917   if (base == NULL_TREE)
918     return false;
919 
920   ref->ref = NULL_TREE;
921   ref->base = base;
922   ref->offset = offset;
923   ref->size = size;
924   ref->max_size = max_size;
925   ref->ref_alias_set = set;
926   if (base_alias_set != -1)
927     ref->base_alias_set = base_alias_set;
928   else
929     ref->base_alias_set = get_alias_set (base);
930   /* We discount volatiles from value-numbering elsewhere.  */
931   ref->volatile_p = false;
932 
933   return true;
934 }
935 
936 /* Copy the operations present in load/store/call REF into RESULT, a vector of
937    vn_reference_op_s's.  */
938 
939 void
940 copy_reference_ops_from_call (gimple call,
941 			      VEC(vn_reference_op_s, heap) **result)
942 {
943   vn_reference_op_s temp;
944   unsigned i;
945 
946   /* Copy the type, opcode, function being called and static chain.  */
947   memset (&temp, 0, sizeof (temp));
948   temp.type = gimple_call_return_type (call);
949   temp.opcode = CALL_EXPR;
950   temp.op0 = gimple_call_fn (call);
951   temp.op1 = gimple_call_chain (call);
952   temp.off = -1;
953   VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
954 
955   /* Copy the call arguments.  As they can be references as well,
956      just chain them together.  */
957   for (i = 0; i < gimple_call_num_args (call); ++i)
958     {
959       tree callarg = gimple_call_arg (call, i);
960       copy_reference_ops_from_ref (callarg, result);
961     }
962 }
963 
964 /* Create a vector of vn_reference_op_s structures from REF, a
965    REFERENCE_CLASS_P tree.  The vector is not shared. */
966 
967 static VEC(vn_reference_op_s, heap) *
968 create_reference_ops_from_ref (tree ref)
969 {
970   VEC (vn_reference_op_s, heap) *result = NULL;
971 
972   copy_reference_ops_from_ref (ref, &result);
973   return result;
974 }
975 
976 /* Create a vector of vn_reference_op_s structures from CALL, a
977    call statement.  The vector is not shared.  */
978 
979 static VEC(vn_reference_op_s, heap) *
980 create_reference_ops_from_call (gimple call)
981 {
982   VEC (vn_reference_op_s, heap) *result = NULL;
983 
984   copy_reference_ops_from_call (call, &result);
985   return result;
986 }
987 
988 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
989    *I_P to point to the last element of the replacement.  */
990 void
991 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
992 			    unsigned int *i_p)
993 {
994   unsigned int i = *i_p;
995   vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
996   vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
997   tree addr_base;
998   HOST_WIDE_INT addr_offset;
999 
1000   /* The only thing we have to do is from &OBJ.foo.bar add the offset
1001      from .foo.bar to the preceeding MEM_REF offset and replace the
1002      address with &OBJ.  */
1003   addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1004 					     &addr_offset);
1005   gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1006   if (addr_base != op->op0)
1007     {
1008       double_int off = tree_to_double_int (mem_op->op0);
1009       off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1010       off = double_int_add (off, shwi_to_double_int (addr_offset));
1011       mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1012       op->op0 = build_fold_addr_expr (addr_base);
1013       if (host_integerp (mem_op->op0, 0))
1014 	mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1015       else
1016 	mem_op->off = -1;
1017     }
1018 }
1019 
1020 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1021    *I_P to point to the last element of the replacement.  */
1022 static void
1023 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
1024 				     unsigned int *i_p)
1025 {
1026   unsigned int i = *i_p;
1027   vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
1028   vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
1029   gimple def_stmt;
1030   enum tree_code code;
1031   double_int off;
1032 
1033   def_stmt = SSA_NAME_DEF_STMT (op->op0);
1034   if (!is_gimple_assign (def_stmt))
1035     return;
1036 
1037   code = gimple_assign_rhs_code (def_stmt);
1038   if (code != ADDR_EXPR
1039       && code != POINTER_PLUS_EXPR)
1040     return;
1041 
1042   off = tree_to_double_int (mem_op->op0);
1043   off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1044 
1045   /* The only thing we have to do is from &OBJ.foo.bar add the offset
1046      from .foo.bar to the preceeding MEM_REF offset and replace the
1047      address with &OBJ.  */
1048   if (code == ADDR_EXPR)
1049     {
1050       tree addr, addr_base;
1051       HOST_WIDE_INT addr_offset;
1052 
1053       addr = gimple_assign_rhs1 (def_stmt);
1054       addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1055 						 &addr_offset);
1056       if (!addr_base
1057 	  || TREE_CODE (addr_base) != MEM_REF)
1058 	return;
1059 
1060       off = double_int_add (off, shwi_to_double_int (addr_offset));
1061       off = double_int_add (off, mem_ref_offset (addr_base));
1062       op->op0 = TREE_OPERAND (addr_base, 0);
1063     }
1064   else
1065     {
1066       tree ptr, ptroff;
1067       ptr = gimple_assign_rhs1 (def_stmt);
1068       ptroff = gimple_assign_rhs2 (def_stmt);
1069       if (TREE_CODE (ptr) != SSA_NAME
1070 	  || TREE_CODE (ptroff) != INTEGER_CST)
1071 	return;
1072 
1073       off = double_int_add (off, tree_to_double_int (ptroff));
1074       op->op0 = ptr;
1075     }
1076 
1077   mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1078   if (host_integerp (mem_op->op0, 0))
1079     mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1080   else
1081     mem_op->off = -1;
1082   if (TREE_CODE (op->op0) == SSA_NAME)
1083     op->op0 = SSA_VAL (op->op0);
1084   if (TREE_CODE (op->op0) != SSA_NAME)
1085     op->opcode = TREE_CODE (op->op0);
1086 
1087   /* And recurse.  */
1088   if (TREE_CODE (op->op0) == SSA_NAME)
1089     vn_reference_maybe_forwprop_address (ops, i_p);
1090   else if (TREE_CODE (op->op0) == ADDR_EXPR)
1091     vn_reference_fold_indirect (ops, i_p);
1092 }
1093 
1094 /* Optimize the reference REF to a constant if possible or return
1095    NULL_TREE if not.  */
1096 
1097 tree
1098 fully_constant_vn_reference_p (vn_reference_t ref)
1099 {
1100   VEC (vn_reference_op_s, heap) *operands = ref->operands;
1101   vn_reference_op_t op;
1102 
1103   /* Try to simplify the translated expression if it is
1104      a call to a builtin function with at most two arguments.  */
1105   op = VEC_index (vn_reference_op_s, operands, 0);
1106   if (op->opcode == CALL_EXPR
1107       && TREE_CODE (op->op0) == ADDR_EXPR
1108       && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1109       && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1110       && VEC_length (vn_reference_op_s, operands) >= 2
1111       && VEC_length (vn_reference_op_s, operands) <= 3)
1112     {
1113       vn_reference_op_t arg0, arg1 = NULL;
1114       bool anyconst = false;
1115       arg0 = VEC_index (vn_reference_op_s, operands, 1);
1116       if (VEC_length (vn_reference_op_s, operands) > 2)
1117 	arg1 = VEC_index (vn_reference_op_s, operands, 2);
1118       if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1119 	  || (arg0->opcode == ADDR_EXPR
1120 	      && is_gimple_min_invariant (arg0->op0)))
1121 	anyconst = true;
1122       if (arg1
1123 	  && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1124 	      || (arg1->opcode == ADDR_EXPR
1125 		  && is_gimple_min_invariant (arg1->op0))))
1126 	anyconst = true;
1127       if (anyconst)
1128 	{
1129 	  tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1130 					 arg1 ? 2 : 1,
1131 					 arg0->op0,
1132 					 arg1 ? arg1->op0 : NULL);
1133 	  if (folded
1134 	      && TREE_CODE (folded) == NOP_EXPR)
1135 	    folded = TREE_OPERAND (folded, 0);
1136 	  if (folded
1137 	      && is_gimple_min_invariant (folded))
1138 	    return folded;
1139 	}
1140     }
1141 
1142   /* Simplify reads from constant strings.  */
1143   else if (op->opcode == ARRAY_REF
1144 	   && TREE_CODE (op->op0) == INTEGER_CST
1145 	   && integer_zerop (op->op1)
1146 	   && VEC_length (vn_reference_op_s, operands) == 2)
1147     {
1148       vn_reference_op_t arg0;
1149       arg0 = VEC_index (vn_reference_op_s, operands, 1);
1150       if (arg0->opcode == STRING_CST
1151 	  && (TYPE_MODE (op->type)
1152 	      == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1153 	  && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1154 	  && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1155 	  && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1156 	return build_int_cst_type (op->type,
1157 				   (TREE_STRING_POINTER (arg0->op0)
1158 				    [TREE_INT_CST_LOW (op->op0)]));
1159     }
1160 
1161   return NULL_TREE;
1162 }
1163 
1164 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1165    structures into their value numbers.  This is done in-place, and
1166    the vector passed in is returned.  *VALUEIZED_ANYTHING will specify
1167    whether any operands were valueized.  */
1168 
1169 static VEC (vn_reference_op_s, heap) *
1170 valueize_refs_1 (VEC (vn_reference_op_s, heap) *orig, bool *valueized_anything)
1171 {
1172   vn_reference_op_t vro;
1173   unsigned int i;
1174 
1175   *valueized_anything = false;
1176 
1177   FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro)
1178     {
1179       if (vro->opcode == SSA_NAME
1180 	  || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1181 	{
1182 	  tree tem = SSA_VAL (vro->op0);
1183 	  if (tem != vro->op0)
1184 	    {
1185 	      *valueized_anything = true;
1186 	      vro->op0 = tem;
1187 	    }
1188 	  /* If it transforms from an SSA_NAME to a constant, update
1189 	     the opcode.  */
1190 	  if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1191 	    vro->opcode = TREE_CODE (vro->op0);
1192 	}
1193       if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1194 	{
1195 	  tree tem = SSA_VAL (vro->op1);
1196 	  if (tem != vro->op1)
1197 	    {
1198 	      *valueized_anything = true;
1199 	      vro->op1 = tem;
1200 	    }
1201 	}
1202       if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1203 	{
1204 	  tree tem = SSA_VAL (vro->op2);
1205 	  if (tem != vro->op2)
1206 	    {
1207 	      *valueized_anything = true;
1208 	      vro->op2 = tem;
1209 	    }
1210 	}
1211       /* If it transforms from an SSA_NAME to an address, fold with
1212 	 a preceding indirect reference.  */
1213       if (i > 0
1214 	  && vro->op0
1215 	  && TREE_CODE (vro->op0) == ADDR_EXPR
1216 	  && VEC_index (vn_reference_op_s,
1217 			orig, i - 1)->opcode == MEM_REF)
1218 	vn_reference_fold_indirect (&orig, &i);
1219       else if (i > 0
1220 	       && vro->opcode == SSA_NAME
1221 	       && VEC_index (vn_reference_op_s,
1222 			     orig, i - 1)->opcode == MEM_REF)
1223 	vn_reference_maybe_forwprop_address (&orig, &i);
1224       /* If it transforms a non-constant ARRAY_REF into a constant
1225 	 one, adjust the constant offset.  */
1226       else if (vro->opcode == ARRAY_REF
1227 	       && vro->off == -1
1228 	       && TREE_CODE (vro->op0) == INTEGER_CST
1229 	       && TREE_CODE (vro->op1) == INTEGER_CST
1230 	       && TREE_CODE (vro->op2) == INTEGER_CST)
1231 	{
1232 	  double_int off = tree_to_double_int (vro->op0);
1233 	  off = double_int_add (off,
1234 				double_int_neg
1235 				  (tree_to_double_int (vro->op1)));
1236 	  off = double_int_mul (off, tree_to_double_int (vro->op2));
1237 	  if (double_int_fits_in_shwi_p (off))
1238 	    vro->off = off.low;
1239 	}
1240     }
1241 
1242   return orig;
1243 }
1244 
1245 static VEC (vn_reference_op_s, heap) *
1246 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1247 {
1248   bool tem;
1249   return valueize_refs_1 (orig, &tem);
1250 }
1251 
1252 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1253 
1254 /* Create a vector of vn_reference_op_s structures from REF, a
1255    REFERENCE_CLASS_P tree.  The vector is shared among all callers of
1256    this function.  *VALUEIZED_ANYTHING will specify whether any
1257    operands were valueized.  */
1258 
1259 static VEC(vn_reference_op_s, heap) *
1260 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1261 {
1262   if (!ref)
1263     return NULL;
1264   VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1265   copy_reference_ops_from_ref (ref, &shared_lookup_references);
1266   shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1267 					      valueized_anything);
1268   return shared_lookup_references;
1269 }
1270 
1271 /* Create a vector of vn_reference_op_s structures from CALL, a
1272    call statement.  The vector is shared among all callers of
1273    this function.  */
1274 
1275 static VEC(vn_reference_op_s, heap) *
1276 valueize_shared_reference_ops_from_call (gimple call)
1277 {
1278   if (!call)
1279     return NULL;
1280   VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1281   copy_reference_ops_from_call (call, &shared_lookup_references);
1282   shared_lookup_references = valueize_refs (shared_lookup_references);
1283   return shared_lookup_references;
1284 }
1285 
1286 /* Lookup a SCCVN reference operation VR in the current hash table.
1287    Returns the resulting value number if it exists in the hash table,
1288    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
1289    vn_reference_t stored in the hashtable if something is found.  */
1290 
1291 static tree
1292 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1293 {
1294   void **slot;
1295   hashval_t hash;
1296 
1297   hash = vr->hashcode;
1298   slot = htab_find_slot_with_hash (current_info->references, vr,
1299 				   hash, NO_INSERT);
1300   if (!slot && current_info == optimistic_info)
1301     slot = htab_find_slot_with_hash (valid_info->references, vr,
1302 				     hash, NO_INSERT);
1303   if (slot)
1304     {
1305       if (vnresult)
1306 	*vnresult = (vn_reference_t)*slot;
1307       return ((vn_reference_t)*slot)->result;
1308     }
1309 
1310   return NULL_TREE;
1311 }
1312 
1313 static tree *last_vuse_ptr;
1314 static vn_lookup_kind vn_walk_kind;
1315 static vn_lookup_kind default_vn_walk_kind;
1316 
1317 /* Callback for walk_non_aliased_vuses.  Adjusts the vn_reference_t VR_
1318    with the current VUSE and performs the expression lookup.  */
1319 
1320 static void *
1321 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1322 {
1323   vn_reference_t vr = (vn_reference_t)vr_;
1324   void **slot;
1325   hashval_t hash;
1326 
1327   if (last_vuse_ptr)
1328     *last_vuse_ptr = vuse;
1329 
1330   /* Fixup vuse and hash.  */
1331   if (vr->vuse)
1332     vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1333   vr->vuse = SSA_VAL (vuse);
1334   if (vr->vuse)
1335     vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1336 
1337   hash = vr->hashcode;
1338   slot = htab_find_slot_with_hash (current_info->references, vr,
1339 				   hash, NO_INSERT);
1340   if (!slot && current_info == optimistic_info)
1341     slot = htab_find_slot_with_hash (valid_info->references, vr,
1342 				     hash, NO_INSERT);
1343   if (slot)
1344     return *slot;
1345 
1346   return NULL;
1347 }
1348 
1349 /* Lookup an existing or insert a new vn_reference entry into the
1350    value table for the VUSE, SET, TYPE, OPERANDS reference which
1351    has the value VALUE which is either a constant or an SSA name.  */
1352 
1353 static vn_reference_t
1354 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1355 					  alias_set_type set,
1356 					  tree type,
1357 					  VEC (vn_reference_op_s,
1358 					       heap) *operands,
1359 					  tree value)
1360 {
1361   struct vn_reference_s vr1;
1362   vn_reference_t result;
1363   unsigned value_id;
1364   vr1.vuse = vuse;
1365   vr1.operands = operands;
1366   vr1.type = type;
1367   vr1.set = set;
1368   vr1.hashcode = vn_reference_compute_hash (&vr1);
1369   if (vn_reference_lookup_1 (&vr1, &result))
1370     return result;
1371   if (TREE_CODE (value) == SSA_NAME)
1372     value_id = VN_INFO (value)->value_id;
1373   else
1374     value_id = get_or_alloc_constant_value_id (value);
1375   return vn_reference_insert_pieces (vuse, set, type,
1376 				     VEC_copy (vn_reference_op_s, heap,
1377 					       operands), value, value_id);
1378 }
1379 
1380 /* Callback for walk_non_aliased_vuses.  Tries to perform a lookup
1381    from the statement defining VUSE and if not successful tries to
1382    translate *REFP and VR_ through an aggregate copy at the defintion
1383    of VUSE.  */
1384 
1385 static void *
1386 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1387 {
1388   vn_reference_t vr = (vn_reference_t)vr_;
1389   gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1390   tree base;
1391   HOST_WIDE_INT offset, maxsize;
1392   static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
1393   ao_ref lhs_ref;
1394   bool lhs_ref_ok = false;
1395 
1396   /* First try to disambiguate after value-replacing in the definitions LHS.  */
1397   if (is_gimple_assign (def_stmt))
1398     {
1399       VEC (vn_reference_op_s, heap) *tem;
1400       tree lhs = gimple_assign_lhs (def_stmt);
1401       bool valueized_anything = false;
1402       /* Avoid re-allocation overhead.  */
1403       VEC_truncate (vn_reference_op_s, lhs_ops, 0);
1404       copy_reference_ops_from_ref (lhs, &lhs_ops);
1405       tem = lhs_ops;
1406       lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1407       gcc_assert (lhs_ops == tem);
1408       if (valueized_anything)
1409 	{
1410 	  lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1411 						      get_alias_set (lhs),
1412 						      TREE_TYPE (lhs), lhs_ops);
1413 	  if (lhs_ref_ok
1414 	      && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1415 	    return NULL;
1416 	}
1417       else
1418 	{
1419 	  ao_ref_init (&lhs_ref, lhs);
1420 	  lhs_ref_ok = true;
1421 	}
1422     }
1423 
1424   base = ao_ref_base (ref);
1425   offset = ref->offset;
1426   maxsize = ref->max_size;
1427 
1428   /* If we cannot constrain the size of the reference we cannot
1429      test if anything kills it.  */
1430   if (maxsize == -1)
1431     return (void *)-1;
1432 
1433   /* We can't deduce anything useful from clobbers.  */
1434   if (gimple_clobber_p (def_stmt))
1435     return (void *)-1;
1436 
1437   /* def_stmt may-defs *ref.  See if we can derive a value for *ref
1438      from that definition.
1439      1) Memset.  */
1440   if (is_gimple_reg_type (vr->type)
1441       && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1442       && integer_zerop (gimple_call_arg (def_stmt, 1))
1443       && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1444       && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1445     {
1446       tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1447       tree base2;
1448       HOST_WIDE_INT offset2, size2, maxsize2;
1449       base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1450       size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1451       if ((unsigned HOST_WIDE_INT)size2 / 8
1452 	  == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1453 	  && maxsize2 != -1
1454 	  && operand_equal_p (base, base2, 0)
1455 	  && offset2 <= offset
1456 	  && offset2 + size2 >= offset + maxsize)
1457 	{
1458 	  tree val = build_zero_cst (vr->type);
1459 	  return vn_reference_lookup_or_insert_for_pieces
1460 	           (vuse, vr->set, vr->type, vr->operands, val);
1461 	}
1462     }
1463 
1464   /* 2) Assignment from an empty CONSTRUCTOR.  */
1465   else if (is_gimple_reg_type (vr->type)
1466 	   && gimple_assign_single_p (def_stmt)
1467 	   && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1468 	   && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1469     {
1470       tree base2;
1471       HOST_WIDE_INT offset2, size2, maxsize2;
1472       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1473 				       &offset2, &size2, &maxsize2);
1474       if (maxsize2 != -1
1475 	  && operand_equal_p (base, base2, 0)
1476 	  && offset2 <= offset
1477 	  && offset2 + size2 >= offset + maxsize)
1478 	{
1479 	  tree val = build_zero_cst (vr->type);
1480 	  return vn_reference_lookup_or_insert_for_pieces
1481 	           (vuse, vr->set, vr->type, vr->operands, val);
1482 	}
1483     }
1484 
1485   /* 3) Assignment from a constant.  We can use folds native encode/interpret
1486      routines to extract the assigned bits.  */
1487   else if (vn_walk_kind == VN_WALKREWRITE
1488 	   && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1489 	   && ref->size == maxsize
1490 	   && maxsize % BITS_PER_UNIT == 0
1491 	   && offset % BITS_PER_UNIT == 0
1492 	   && is_gimple_reg_type (vr->type)
1493 	   && gimple_assign_single_p (def_stmt)
1494 	   && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1495     {
1496       tree base2;
1497       HOST_WIDE_INT offset2, size2, maxsize2;
1498       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1499 				       &offset2, &size2, &maxsize2);
1500       if (maxsize2 != -1
1501 	  && maxsize2 == size2
1502 	  && size2 % BITS_PER_UNIT == 0
1503 	  && offset2 % BITS_PER_UNIT == 0
1504 	  && operand_equal_p (base, base2, 0)
1505 	  && offset2 <= offset
1506 	  && offset2 + size2 >= offset + maxsize)
1507 	{
1508 	  /* We support up to 512-bit values (for V8DFmode).  */
1509 	  unsigned char buffer[64];
1510 	  int len;
1511 
1512 	  len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1513 				    buffer, sizeof (buffer));
1514 	  if (len > 0)
1515 	    {
1516 	      tree val = native_interpret_expr (vr->type,
1517 						buffer
1518 						+ ((offset - offset2)
1519 						   / BITS_PER_UNIT),
1520 						ref->size / BITS_PER_UNIT);
1521 	      if (val)
1522 		return vn_reference_lookup_or_insert_for_pieces
1523 		         (vuse, vr->set, vr->type, vr->operands, val);
1524 	    }
1525 	}
1526     }
1527 
1528   /* 4) Assignment from an SSA name which definition we may be able
1529      to access pieces from.  */
1530   else if (ref->size == maxsize
1531 	   && is_gimple_reg_type (vr->type)
1532 	   && gimple_assign_single_p (def_stmt)
1533 	   && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1534     {
1535       tree rhs1 = gimple_assign_rhs1 (def_stmt);
1536       gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1537       if (is_gimple_assign (def_stmt2)
1538 	  && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1539 	      || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1540 	  && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1541 	{
1542 	  tree base2;
1543 	  HOST_WIDE_INT offset2, size2, maxsize2, off;
1544 	  base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1545 					   &offset2, &size2, &maxsize2);
1546 	  off = offset - offset2;
1547 	  if (maxsize2 != -1
1548 	      && maxsize2 == size2
1549 	      && operand_equal_p (base, base2, 0)
1550 	      && offset2 <= offset
1551 	      && offset2 + size2 >= offset + maxsize)
1552 	    {
1553 	      tree val = NULL_TREE;
1554 	      HOST_WIDE_INT elsz
1555 		= TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1556 	      if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1557 		{
1558 		  if (off == 0)
1559 		    val = gimple_assign_rhs1 (def_stmt2);
1560 		  else if (off == elsz)
1561 		    val = gimple_assign_rhs2 (def_stmt2);
1562 		}
1563 	      else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1564 		       && off % elsz == 0)
1565 		{
1566 		  tree ctor = gimple_assign_rhs1 (def_stmt2);
1567 		  unsigned i = off / elsz;
1568 		  if (i < CONSTRUCTOR_NELTS (ctor))
1569 		    {
1570 		      constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1571 		      if (compare_tree_int (elt->index, i) == 0)
1572 			val = elt->value;
1573 		    }
1574 		}
1575 	      if (val)
1576 		return vn_reference_lookup_or_insert_for_pieces
1577 		         (vuse, vr->set, vr->type, vr->operands, val);
1578 	    }
1579 	}
1580     }
1581 
1582   /* 5) For aggregate copies translate the reference through them if
1583      the copy kills ref.  */
1584   else if (vn_walk_kind == VN_WALKREWRITE
1585 	   && gimple_assign_single_p (def_stmt)
1586 	   && (DECL_P (gimple_assign_rhs1 (def_stmt))
1587 	       || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1588 	       || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1589     {
1590       tree base2;
1591       HOST_WIDE_INT offset2, size2, maxsize2;
1592       int i, j;
1593       VEC (vn_reference_op_s, heap) *rhs = NULL;
1594       vn_reference_op_t vro;
1595       ao_ref r;
1596 
1597       if (!lhs_ref_ok)
1598 	return (void *)-1;
1599 
1600       /* See if the assignment kills REF.  */
1601       base2 = ao_ref_base (&lhs_ref);
1602       offset2 = lhs_ref.offset;
1603       size2 = lhs_ref.size;
1604       maxsize2 = lhs_ref.max_size;
1605       if (maxsize2 == -1
1606 	  || (base != base2 && !operand_equal_p (base, base2, 0))
1607 	  || offset2 > offset
1608 	  || offset2 + size2 < offset + maxsize)
1609 	return (void *)-1;
1610 
1611       /* Find the common base of ref and the lhs.  lhs_ops already
1612          contains valueized operands for the lhs.  */
1613       i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1614       j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
1615       while (j >= 0 && i >= 0
1616 	     && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1617 					       vr->operands, i),
1618 				    VEC_index (vn_reference_op_s, lhs_ops, j)))
1619 	{
1620 	  i--;
1621 	  j--;
1622 	}
1623 
1624       /* ???  The innermost op should always be a MEM_REF and we already
1625          checked that the assignment to the lhs kills vr.  Thus for
1626 	 aggregate copies using char[] types the vn_reference_op_eq
1627 	 may fail when comparing types for compatibility.  But we really
1628 	 don't care here - further lookups with the rewritten operands
1629 	 will simply fail if we messed up types too badly.  */
1630       if (j == 0 && i >= 0
1631 	  && VEC_index (vn_reference_op_s, lhs_ops, 0)->opcode == MEM_REF
1632 	  && VEC_index (vn_reference_op_s, lhs_ops, 0)->off != -1
1633 	  && (VEC_index (vn_reference_op_s, lhs_ops, 0)->off
1634 	      == VEC_index (vn_reference_op_s, vr->operands, i)->off))
1635 	i--, j--;
1636 
1637       /* i now points to the first additional op.
1638 	 ???  LHS may not be completely contained in VR, one or more
1639 	 VIEW_CONVERT_EXPRs could be in its way.  We could at least
1640 	 try handling outermost VIEW_CONVERT_EXPRs.  */
1641       if (j != -1)
1642 	return (void *)-1;
1643 
1644       /* Now re-write REF to be based on the rhs of the assignment.  */
1645       copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1646       /* We need to pre-pend vr->operands[0..i] to rhs.  */
1647       if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1648 	  > VEC_length (vn_reference_op_s, vr->operands))
1649 	{
1650 	  VEC (vn_reference_op_s, heap) *old = vr->operands;
1651 	  VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1652 			 i + 1 + VEC_length (vn_reference_op_s, rhs));
1653 	  if (old == shared_lookup_references
1654 	      && vr->operands != old)
1655 	    shared_lookup_references = NULL;
1656 	}
1657       else
1658 	VEC_truncate (vn_reference_op_s, vr->operands,
1659 		      i + 1 + VEC_length (vn_reference_op_s, rhs));
1660       FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
1661 	VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1662       VEC_free (vn_reference_op_s, heap, rhs);
1663       vr->operands = valueize_refs (vr->operands);
1664       vr->hashcode = vn_reference_compute_hash (vr);
1665 
1666       /* Adjust *ref from the new operands.  */
1667       if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1668 	return (void *)-1;
1669       /* This can happen with bitfields.  */
1670       if (ref->size != r.size)
1671 	return (void *)-1;
1672       *ref = r;
1673 
1674       /* Do not update last seen VUSE after translating.  */
1675       last_vuse_ptr = NULL;
1676 
1677       /* Keep looking for the adjusted *REF / VR pair.  */
1678       return NULL;
1679     }
1680 
1681   /* 6) For memcpy copies translate the reference through them if
1682      the copy kills ref.  */
1683   else if (vn_walk_kind == VN_WALKREWRITE
1684 	   && is_gimple_reg_type (vr->type)
1685 	   /* ???  Handle BCOPY as well.  */
1686 	   && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1687 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1688 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1689 	   && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1690 	       || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1691 	   && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1692 	       || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1693 	   && host_integerp (gimple_call_arg (def_stmt, 2), 1))
1694     {
1695       tree lhs, rhs;
1696       ao_ref r;
1697       HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1698       vn_reference_op_s op;
1699       HOST_WIDE_INT at;
1700 
1701 
1702       /* Only handle non-variable, addressable refs.  */
1703       if (ref->size != maxsize
1704 	  || offset % BITS_PER_UNIT != 0
1705 	  || ref->size % BITS_PER_UNIT != 0)
1706 	return (void *)-1;
1707 
1708       /* Extract a pointer base and an offset for the destination.  */
1709       lhs = gimple_call_arg (def_stmt, 0);
1710       lhs_offset = 0;
1711       if (TREE_CODE (lhs) == SSA_NAME)
1712 	lhs = SSA_VAL (lhs);
1713       if (TREE_CODE (lhs) == ADDR_EXPR)
1714 	{
1715 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1716 						    &lhs_offset);
1717 	  if (!tem)
1718 	    return (void *)-1;
1719 	  if (TREE_CODE (tem) == MEM_REF
1720 	      && host_integerp (TREE_OPERAND (tem, 1), 1))
1721 	    {
1722 	      lhs = TREE_OPERAND (tem, 0);
1723 	      lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1724 	    }
1725 	  else if (DECL_P (tem))
1726 	    lhs = build_fold_addr_expr (tem);
1727 	  else
1728 	    return (void *)-1;
1729 	}
1730       if (TREE_CODE (lhs) != SSA_NAME
1731 	  && TREE_CODE (lhs) != ADDR_EXPR)
1732 	return (void *)-1;
1733 
1734       /* Extract a pointer base and an offset for the source.  */
1735       rhs = gimple_call_arg (def_stmt, 1);
1736       rhs_offset = 0;
1737       if (TREE_CODE (rhs) == SSA_NAME)
1738 	rhs = SSA_VAL (rhs);
1739       if (TREE_CODE (rhs) == ADDR_EXPR)
1740 	{
1741 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1742 						    &rhs_offset);
1743 	  if (!tem)
1744 	    return (void *)-1;
1745 	  if (TREE_CODE (tem) == MEM_REF
1746 	      && host_integerp (TREE_OPERAND (tem, 1), 1))
1747 	    {
1748 	      rhs = TREE_OPERAND (tem, 0);
1749 	      rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1750 	    }
1751 	  else if (DECL_P (tem))
1752 	    rhs = build_fold_addr_expr (tem);
1753 	  else
1754 	    return (void *)-1;
1755 	}
1756       if (TREE_CODE (rhs) != SSA_NAME
1757 	  && TREE_CODE (rhs) != ADDR_EXPR)
1758 	return (void *)-1;
1759 
1760       copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1761 
1762       /* The bases of the destination and the references have to agree.  */
1763       if ((TREE_CODE (base) != MEM_REF
1764 	   && !DECL_P (base))
1765 	  || (TREE_CODE (base) == MEM_REF
1766 	      && (TREE_OPERAND (base, 0) != lhs
1767 		  || !host_integerp (TREE_OPERAND (base, 1), 1)))
1768 	  || (DECL_P (base)
1769 	      && (TREE_CODE (lhs) != ADDR_EXPR
1770 		  || TREE_OPERAND (lhs, 0) != base)))
1771 	return (void *)-1;
1772 
1773       /* And the access has to be contained within the memcpy destination.  */
1774       at = offset / BITS_PER_UNIT;
1775       if (TREE_CODE (base) == MEM_REF)
1776 	at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1777       if (lhs_offset > at
1778 	  || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1779 	return (void *)-1;
1780 
1781       /* Make room for 2 operands in the new reference.  */
1782       if (VEC_length (vn_reference_op_s, vr->operands) < 2)
1783 	{
1784 	  VEC (vn_reference_op_s, heap) *old = vr->operands;
1785 	  VEC_safe_grow (vn_reference_op_s, heap, vr->operands, 2);
1786 	  if (old == shared_lookup_references
1787 	      && vr->operands != old)
1788 	    shared_lookup_references = NULL;
1789 	}
1790       else
1791 	VEC_truncate (vn_reference_op_s, vr->operands, 2);
1792 
1793       /* The looked-through reference is a simple MEM_REF.  */
1794       memset (&op, 0, sizeof (op));
1795       op.type = vr->type;
1796       op.opcode = MEM_REF;
1797       op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1798       op.off = at - lhs_offset + rhs_offset;
1799       VEC_replace (vn_reference_op_s, vr->operands, 0, &op);
1800       op.type = TREE_TYPE (rhs);
1801       op.opcode = TREE_CODE (rhs);
1802       op.op0 = rhs;
1803       op.off = -1;
1804       VEC_replace (vn_reference_op_s, vr->operands, 1, &op);
1805       vr->hashcode = vn_reference_compute_hash (vr);
1806 
1807       /* Adjust *ref from the new operands.  */
1808       if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1809 	return (void *)-1;
1810       /* This can happen with bitfields.  */
1811       if (ref->size != r.size)
1812 	return (void *)-1;
1813       *ref = r;
1814 
1815       /* Do not update last seen VUSE after translating.  */
1816       last_vuse_ptr = NULL;
1817 
1818       /* Keep looking for the adjusted *REF / VR pair.  */
1819       return NULL;
1820     }
1821 
1822   /* Bail out and stop walking.  */
1823   return (void *)-1;
1824 }
1825 
1826 /* Lookup a reference operation by it's parts, in the current hash table.
1827    Returns the resulting value number if it exists in the hash table,
1828    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
1829    vn_reference_t stored in the hashtable if something is found.  */
1830 
1831 tree
1832 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1833 			    VEC (vn_reference_op_s, heap) *operands,
1834 			    vn_reference_t *vnresult, vn_lookup_kind kind)
1835 {
1836   struct vn_reference_s vr1;
1837   vn_reference_t tmp;
1838   tree cst;
1839 
1840   if (!vnresult)
1841     vnresult = &tmp;
1842   *vnresult = NULL;
1843 
1844   vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1845   VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1846   VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1847 		 VEC_length (vn_reference_op_s, operands));
1848   memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1849 	  VEC_address (vn_reference_op_s, operands),
1850 	  sizeof (vn_reference_op_s)
1851 	  * VEC_length (vn_reference_op_s, operands));
1852   vr1.operands = operands = shared_lookup_references
1853     = valueize_refs (shared_lookup_references);
1854   vr1.type = type;
1855   vr1.set = set;
1856   vr1.hashcode = vn_reference_compute_hash (&vr1);
1857   if ((cst = fully_constant_vn_reference_p (&vr1)))
1858     return cst;
1859 
1860   vn_reference_lookup_1 (&vr1, vnresult);
1861   if (!*vnresult
1862       && kind != VN_NOWALK
1863       && vr1.vuse)
1864     {
1865       ao_ref r;
1866       vn_walk_kind = kind;
1867       if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1868 	*vnresult =
1869 	  (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1870 						  vn_reference_lookup_2,
1871 						  vn_reference_lookup_3, &vr1);
1872       if (vr1.operands != operands)
1873 	VEC_free (vn_reference_op_s, heap, vr1.operands);
1874     }
1875 
1876   if (*vnresult)
1877      return (*vnresult)->result;
1878 
1879   return NULL_TREE;
1880 }
1881 
1882 /* Lookup OP in the current hash table, and return the resulting value
1883    number if it exists in the hash table.  Return NULL_TREE if it does
1884    not exist in the hash table or if the result field of the structure
1885    was NULL..  VNRESULT will be filled in with the vn_reference_t
1886    stored in the hashtable if one exists.  */
1887 
1888 tree
1889 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1890 		     vn_reference_t *vnresult)
1891 {
1892   VEC (vn_reference_op_s, heap) *operands;
1893   struct vn_reference_s vr1;
1894   tree cst;
1895   bool valuezied_anything;
1896 
1897   if (vnresult)
1898     *vnresult = NULL;
1899 
1900   vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1901   vr1.operands = operands
1902     = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
1903   vr1.type = TREE_TYPE (op);
1904   vr1.set = get_alias_set (op);
1905   vr1.hashcode = vn_reference_compute_hash (&vr1);
1906   if ((cst = fully_constant_vn_reference_p (&vr1)))
1907     return cst;
1908 
1909   if (kind != VN_NOWALK
1910       && vr1.vuse)
1911     {
1912       vn_reference_t wvnresult;
1913       ao_ref r;
1914       /* Make sure to use a valueized reference if we valueized anything.
1915          Otherwise preserve the full reference for advanced TBAA.  */
1916       if (!valuezied_anything
1917 	  || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
1918 					     vr1.operands))
1919 	ao_ref_init (&r, op);
1920       vn_walk_kind = kind;
1921       wvnresult =
1922 	(vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1923 						vn_reference_lookup_2,
1924 						vn_reference_lookup_3, &vr1);
1925       if (vr1.operands != operands)
1926 	VEC_free (vn_reference_op_s, heap, vr1.operands);
1927       if (wvnresult)
1928 	{
1929 	  if (vnresult)
1930 	    *vnresult = wvnresult;
1931 	  return wvnresult->result;
1932 	}
1933 
1934       return NULL_TREE;
1935     }
1936 
1937   return vn_reference_lookup_1 (&vr1, vnresult);
1938 }
1939 
1940 
1941 /* Insert OP into the current hash table with a value number of
1942    RESULT, and return the resulting reference structure we created.  */
1943 
1944 vn_reference_t
1945 vn_reference_insert (tree op, tree result, tree vuse)
1946 {
1947   void **slot;
1948   vn_reference_t vr1;
1949 
1950   vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1951   if (TREE_CODE (result) == SSA_NAME)
1952     vr1->value_id = VN_INFO (result)->value_id;
1953   else
1954     vr1->value_id = get_or_alloc_constant_value_id (result);
1955   vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1956   vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1957   vr1->type = TREE_TYPE (op);
1958   vr1->set = get_alias_set (op);
1959   vr1->hashcode = vn_reference_compute_hash (vr1);
1960   vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1961 
1962   slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1963 				   INSERT);
1964 
1965   /* Because we lookup stores using vuses, and value number failures
1966      using the vdefs (see visit_reference_op_store for how and why),
1967      it's possible that on failure we may try to insert an already
1968      inserted store.  This is not wrong, there is no ssa name for a
1969      store that we could use as a differentiator anyway.  Thus, unlike
1970      the other lookup functions, you cannot gcc_assert (!*slot)
1971      here.  */
1972 
1973   /* But free the old slot in case of a collision.  */
1974   if (*slot)
1975     free_reference (*slot);
1976 
1977   *slot = vr1;
1978   return vr1;
1979 }
1980 
1981 /* Insert a reference by it's pieces into the current hash table with
1982    a value number of RESULT.  Return the resulting reference
1983    structure we created.  */
1984 
1985 vn_reference_t
1986 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1987 			    VEC (vn_reference_op_s, heap) *operands,
1988 			    tree result, unsigned int value_id)
1989 
1990 {
1991   void **slot;
1992   vn_reference_t vr1;
1993 
1994   vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1995   vr1->value_id = value_id;
1996   vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1997   vr1->operands = valueize_refs (operands);
1998   vr1->type = type;
1999   vr1->set = set;
2000   vr1->hashcode = vn_reference_compute_hash (vr1);
2001   if (result && TREE_CODE (result) == SSA_NAME)
2002     result = SSA_VAL (result);
2003   vr1->result = result;
2004 
2005   slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
2006 				   INSERT);
2007 
2008   /* At this point we should have all the things inserted that we have
2009      seen before, and we should never try inserting something that
2010      already exists.  */
2011   gcc_assert (!*slot);
2012   if (*slot)
2013     free_reference (*slot);
2014 
2015   *slot = vr1;
2016   return vr1;
2017 }
2018 
2019 /* Compute and return the hash value for nary operation VBO1.  */
2020 
2021 hashval_t
2022 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2023 {
2024   hashval_t hash;
2025   unsigned i;
2026 
2027   for (i = 0; i < vno1->length; ++i)
2028     if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2029       vno1->op[i] = SSA_VAL (vno1->op[i]);
2030 
2031   if (vno1->length == 2
2032       && commutative_tree_code (vno1->opcode)
2033       && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2034     {
2035       tree temp = vno1->op[0];
2036       vno1->op[0] = vno1->op[1];
2037       vno1->op[1] = temp;
2038     }
2039 
2040   hash = iterative_hash_hashval_t (vno1->opcode, 0);
2041   for (i = 0; i < vno1->length; ++i)
2042     hash = iterative_hash_expr (vno1->op[i], hash);
2043 
2044   return hash;
2045 }
2046 
2047 /* Return the computed hashcode for nary operation P1.  */
2048 
2049 static hashval_t
2050 vn_nary_op_hash (const void *p1)
2051 {
2052   const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2053   return vno1->hashcode;
2054 }
2055 
2056 /* Compare nary operations P1 and P2 and return true if they are
2057    equivalent.  */
2058 
2059 int
2060 vn_nary_op_eq (const void *p1, const void *p2)
2061 {
2062   const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2063   const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
2064   unsigned i;
2065 
2066   if (vno1->hashcode != vno2->hashcode)
2067     return false;
2068 
2069   if (vno1->length != vno2->length)
2070     return false;
2071 
2072   if (vno1->opcode != vno2->opcode
2073       || !types_compatible_p (vno1->type, vno2->type))
2074     return false;
2075 
2076   for (i = 0; i < vno1->length; ++i)
2077     if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2078       return false;
2079 
2080   return true;
2081 }
2082 
2083 /* Initialize VNO from the pieces provided.  */
2084 
2085 static void
2086 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2087 			     enum tree_code code, tree type, tree *ops)
2088 {
2089   vno->opcode = code;
2090   vno->length = length;
2091   vno->type = type;
2092   memcpy (&vno->op[0], ops, sizeof (tree) * length);
2093 }
2094 
2095 /* Initialize VNO from OP.  */
2096 
2097 static void
2098 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2099 {
2100   unsigned i;
2101 
2102   vno->opcode = TREE_CODE (op);
2103   vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2104   vno->type = TREE_TYPE (op);
2105   for (i = 0; i < vno->length; ++i)
2106     vno->op[i] = TREE_OPERAND (op, i);
2107 }
2108 
2109 /* Return the number of operands for a vn_nary ops structure from STMT.  */
2110 
2111 static unsigned int
2112 vn_nary_length_from_stmt (gimple stmt)
2113 {
2114   switch (gimple_assign_rhs_code (stmt))
2115     {
2116     case REALPART_EXPR:
2117     case IMAGPART_EXPR:
2118     case VIEW_CONVERT_EXPR:
2119       return 1;
2120 
2121     case CONSTRUCTOR:
2122       return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2123 
2124     default:
2125       return gimple_num_ops (stmt) - 1;
2126     }
2127 }
2128 
2129 /* Initialize VNO from STMT.  */
2130 
2131 static void
2132 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2133 {
2134   unsigned i;
2135 
2136   vno->opcode = gimple_assign_rhs_code (stmt);
2137   vno->type = gimple_expr_type (stmt);
2138   switch (vno->opcode)
2139     {
2140     case REALPART_EXPR:
2141     case IMAGPART_EXPR:
2142     case VIEW_CONVERT_EXPR:
2143       vno->length = 1;
2144       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2145       break;
2146 
2147     case CONSTRUCTOR:
2148       vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2149       for (i = 0; i < vno->length; ++i)
2150 	vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2151       break;
2152 
2153     default:
2154       vno->length = gimple_num_ops (stmt) - 1;
2155       for (i = 0; i < vno->length; ++i)
2156 	vno->op[i] = gimple_op (stmt, i + 1);
2157     }
2158 }
2159 
2160 /* Compute the hashcode for VNO and look for it in the hash table;
2161    return the resulting value number if it exists in the hash table.
2162    Return NULL_TREE if it does not exist in the hash table or if the
2163    result field of the operation is NULL.  VNRESULT will contain the
2164    vn_nary_op_t from the hashtable if it exists.  */
2165 
2166 static tree
2167 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2168 {
2169   void **slot;
2170 
2171   if (vnresult)
2172     *vnresult = NULL;
2173 
2174   vno->hashcode = vn_nary_op_compute_hash (vno);
2175   slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
2176 				   NO_INSERT);
2177   if (!slot && current_info == optimistic_info)
2178     slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
2179 				     NO_INSERT);
2180   if (!slot)
2181     return NULL_TREE;
2182   if (vnresult)
2183     *vnresult = (vn_nary_op_t)*slot;
2184   return ((vn_nary_op_t)*slot)->result;
2185 }
2186 
2187 /* Lookup a n-ary operation by its pieces and return the resulting value
2188    number if it exists in the hash table.  Return NULL_TREE if it does
2189    not exist in the hash table or if the result field of the operation
2190    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2191    if it exists.  */
2192 
2193 tree
2194 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2195 			  tree type, tree *ops, vn_nary_op_t *vnresult)
2196 {
2197   vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2198 				  sizeof_vn_nary_op (length));
2199   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2200   return vn_nary_op_lookup_1 (vno1, vnresult);
2201 }
2202 
2203 /* Lookup OP in the current hash table, and return the resulting value
2204    number if it exists in the hash table.  Return NULL_TREE if it does
2205    not exist in the hash table or if the result field of the operation
2206    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2207    if it exists.  */
2208 
2209 tree
2210 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2211 {
2212   vn_nary_op_t vno1
2213     = XALLOCAVAR (struct vn_nary_op_s,
2214 		  sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2215   init_vn_nary_op_from_op (vno1, op);
2216   return vn_nary_op_lookup_1 (vno1, vnresult);
2217 }
2218 
2219 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2220    value number if it exists in the hash table.  Return NULL_TREE if
2221    it does not exist in the hash table.  VNRESULT will contain the
2222    vn_nary_op_t from the hashtable if it exists.  */
2223 
2224 tree
2225 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2226 {
2227   vn_nary_op_t vno1
2228     = XALLOCAVAR (struct vn_nary_op_s,
2229 		  sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2230   init_vn_nary_op_from_stmt (vno1, stmt);
2231   return vn_nary_op_lookup_1 (vno1, vnresult);
2232 }
2233 
2234 /* Allocate a vn_nary_op_t with LENGTH operands on STACK.  */
2235 
2236 static vn_nary_op_t
2237 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2238 {
2239   return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2240 }
2241 
2242 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2243    obstack.  */
2244 
2245 static vn_nary_op_t
2246 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2247 {
2248   vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2249 					       &current_info->nary_obstack);
2250 
2251   vno1->value_id = value_id;
2252   vno1->length = length;
2253   vno1->result = result;
2254 
2255   return vno1;
2256 }
2257 
2258 /* Insert VNO into TABLE.  If COMPUTE_HASH is true, then compute
2259    VNO->HASHCODE first.  */
2260 
2261 static vn_nary_op_t
2262 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
2263 {
2264   void **slot;
2265 
2266   if (compute_hash)
2267     vno->hashcode = vn_nary_op_compute_hash (vno);
2268 
2269   slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
2270   gcc_assert (!*slot);
2271 
2272   *slot = vno;
2273   return vno;
2274 }
2275 
2276 /* Insert a n-ary operation into the current hash table using it's
2277    pieces.  Return the vn_nary_op_t structure we created and put in
2278    the hashtable.  */
2279 
2280 vn_nary_op_t
2281 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2282 			  tree type, tree *ops,
2283 			  tree result, unsigned int value_id)
2284 {
2285   vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2286   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2287   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2288 }
2289 
2290 /* Insert OP into the current hash table with a value number of
2291    RESULT.  Return the vn_nary_op_t structure we created and put in
2292    the hashtable.  */
2293 
2294 vn_nary_op_t
2295 vn_nary_op_insert (tree op, tree result)
2296 {
2297   unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2298   vn_nary_op_t vno1;
2299 
2300   vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2301   init_vn_nary_op_from_op (vno1, op);
2302   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2303 }
2304 
2305 /* Insert the rhs of STMT into the current hash table with a value number of
2306    RESULT.  */
2307 
2308 vn_nary_op_t
2309 vn_nary_op_insert_stmt (gimple stmt, tree result)
2310 {
2311   vn_nary_op_t vno1
2312     = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2313 			result, VN_INFO (result)->value_id);
2314   init_vn_nary_op_from_stmt (vno1, stmt);
2315   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2316 }
2317 
2318 /* Compute a hashcode for PHI operation VP1 and return it.  */
2319 
2320 static inline hashval_t
2321 vn_phi_compute_hash (vn_phi_t vp1)
2322 {
2323   hashval_t result;
2324   int i;
2325   tree phi1op;
2326   tree type;
2327 
2328   result = vp1->block->index;
2329 
2330   /* If all PHI arguments are constants we need to distinguish
2331      the PHI node via its type.  */
2332   type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
2333   result += (INTEGRAL_TYPE_P (type)
2334 	     + (INTEGRAL_TYPE_P (type)
2335 		? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
2336 
2337   FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2338     {
2339       if (phi1op == VN_TOP)
2340 	continue;
2341       result = iterative_hash_expr (phi1op, result);
2342     }
2343 
2344   return result;
2345 }
2346 
2347 /* Return the computed hashcode for phi operation P1.  */
2348 
2349 static hashval_t
2350 vn_phi_hash (const void *p1)
2351 {
2352   const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2353   return vp1->hashcode;
2354 }
2355 
2356 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
2357 
2358 static int
2359 vn_phi_eq (const void *p1, const void *p2)
2360 {
2361   const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2362   const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
2363 
2364   if (vp1->hashcode != vp2->hashcode)
2365     return false;
2366 
2367   if (vp1->block == vp2->block)
2368     {
2369       int i;
2370       tree phi1op;
2371 
2372       /* If the PHI nodes do not have compatible types
2373 	 they are not the same.  */
2374       if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
2375 			       TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
2376 	return false;
2377 
2378       /* Any phi in the same block will have it's arguments in the
2379 	 same edge order, because of how we store phi nodes.  */
2380       FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2381 	{
2382 	  tree phi2op = VEC_index (tree, vp2->phiargs, i);
2383 	  if (phi1op == VN_TOP || phi2op == VN_TOP)
2384 	    continue;
2385 	  if (!expressions_equal_p (phi1op, phi2op))
2386 	    return false;
2387 	}
2388       return true;
2389     }
2390   return false;
2391 }
2392 
2393 static VEC(tree, heap) *shared_lookup_phiargs;
2394 
2395 /* Lookup PHI in the current hash table, and return the resulting
2396    value number if it exists in the hash table.  Return NULL_TREE if
2397    it does not exist in the hash table. */
2398 
2399 static tree
2400 vn_phi_lookup (gimple phi)
2401 {
2402   void **slot;
2403   struct vn_phi_s vp1;
2404   unsigned i;
2405 
2406   VEC_truncate (tree, shared_lookup_phiargs, 0);
2407 
2408   /* Canonicalize the SSA_NAME's to their value number.  */
2409   for (i = 0; i < gimple_phi_num_args (phi); i++)
2410     {
2411       tree def = PHI_ARG_DEF (phi, i);
2412       def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2413       VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
2414     }
2415   vp1.phiargs = shared_lookup_phiargs;
2416   vp1.block = gimple_bb (phi);
2417   vp1.hashcode = vn_phi_compute_hash (&vp1);
2418   slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2419 				   NO_INSERT);
2420   if (!slot && current_info == optimistic_info)
2421     slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2422 				     NO_INSERT);
2423   if (!slot)
2424     return NULL_TREE;
2425   return ((vn_phi_t)*slot)->result;
2426 }
2427 
2428 /* Insert PHI into the current hash table with a value number of
2429    RESULT.  */
2430 
2431 static vn_phi_t
2432 vn_phi_insert (gimple phi, tree result)
2433 {
2434   void **slot;
2435   vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2436   unsigned i;
2437   VEC (tree, heap) *args = NULL;
2438 
2439   /* Canonicalize the SSA_NAME's to their value number.  */
2440   for (i = 0; i < gimple_phi_num_args (phi); i++)
2441     {
2442       tree def = PHI_ARG_DEF (phi, i);
2443       def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2444       VEC_safe_push (tree, heap, args, def);
2445     }
2446   vp1->value_id = VN_INFO (result)->value_id;
2447   vp1->phiargs = args;
2448   vp1->block = gimple_bb (phi);
2449   vp1->result = result;
2450   vp1->hashcode = vn_phi_compute_hash (vp1);
2451 
2452   slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2453 				   INSERT);
2454 
2455   /* Because we iterate over phi operations more than once, it's
2456      possible the slot might already exist here, hence no assert.*/
2457   *slot = vp1;
2458   return vp1;
2459 }
2460 
2461 
2462 /* Print set of components in strongly connected component SCC to OUT. */
2463 
2464 static void
2465 print_scc (FILE *out, VEC (tree, heap) *scc)
2466 {
2467   tree var;
2468   unsigned int i;
2469 
2470   fprintf (out, "SCC consists of:");
2471   FOR_EACH_VEC_ELT (tree, scc, i, var)
2472     {
2473       fprintf (out, " ");
2474       print_generic_expr (out, var, 0);
2475     }
2476   fprintf (out, "\n");
2477 }
2478 
2479 /* Set the value number of FROM to TO, return true if it has changed
2480    as a result.  */
2481 
2482 static inline bool
2483 set_ssa_val_to (tree from, tree to)
2484 {
2485   tree currval = SSA_VAL (from);
2486   HOST_WIDE_INT toff, coff;
2487 
2488   if (from != to)
2489     {
2490       if (currval == from)
2491 	{
2492 	  if (dump_file && (dump_flags & TDF_DETAILS))
2493 	    {
2494 	      fprintf (dump_file, "Not changing value number of ");
2495 	      print_generic_expr (dump_file, from, 0);
2496 	      fprintf (dump_file, " from VARYING to ");
2497 	      print_generic_expr (dump_file, to, 0);
2498 	      fprintf (dump_file, "\n");
2499 	    }
2500 	  return false;
2501 	}
2502       else if (TREE_CODE (to) == SSA_NAME
2503 	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2504 	to = from;
2505     }
2506 
2507   /* The only thing we allow as value numbers are VN_TOP, ssa_names
2508      and invariants.  So assert that here.  */
2509   gcc_assert (to != NULL_TREE
2510 	      && (to == VN_TOP
2511 		  || TREE_CODE (to) == SSA_NAME
2512 		  || is_gimple_min_invariant (to)));
2513 
2514   if (dump_file && (dump_flags & TDF_DETAILS))
2515     {
2516       fprintf (dump_file, "Setting value number of ");
2517       print_generic_expr (dump_file, from, 0);
2518       fprintf (dump_file, " to ");
2519       print_generic_expr (dump_file, to, 0);
2520     }
2521 
2522   if (currval != to
2523       && !operand_equal_p (currval, to, OEP_PURE_SAME)
2524       /* ???  For addresses involving volatile objects or types operand_equal_p
2525 	 does not reliably detect ADDR_EXPRs as equal.  We know we are only
2526 	 getting invariant gimple addresses here, so can use
2527 	 get_addr_base_and_unit_offset to do this comparison.  */
2528       && !(TREE_CODE (currval) == ADDR_EXPR
2529 	   && TREE_CODE (to) == ADDR_EXPR
2530 	   && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
2531 	       == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
2532 	   && coff == toff))
2533     {
2534       VN_INFO (from)->valnum = to;
2535       if (dump_file && (dump_flags & TDF_DETAILS))
2536 	fprintf (dump_file, " (changed)\n");
2537       return true;
2538     }
2539   if (dump_file && (dump_flags & TDF_DETAILS))
2540     fprintf (dump_file, "\n");
2541   return false;
2542 }
2543 
2544 /* Set all definitions in STMT to value number to themselves.
2545    Return true if a value number changed. */
2546 
2547 static bool
2548 defs_to_varying (gimple stmt)
2549 {
2550   bool changed = false;
2551   ssa_op_iter iter;
2552   def_operand_p defp;
2553 
2554   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2555     {
2556       tree def = DEF_FROM_PTR (defp);
2557 
2558       VN_INFO (def)->use_processed = true;
2559       changed |= set_ssa_val_to (def, def);
2560     }
2561   return changed;
2562 }
2563 
2564 static bool expr_has_constants (tree expr);
2565 static tree valueize_expr (tree expr);
2566 
2567 /* Visit a copy between LHS and RHS, return true if the value number
2568    changed.  */
2569 
2570 static bool
2571 visit_copy (tree lhs, tree rhs)
2572 {
2573   /* Follow chains of copies to their destination.  */
2574   while (TREE_CODE (rhs) == SSA_NAME
2575 	 && SSA_VAL (rhs) != rhs)
2576     rhs = SSA_VAL (rhs);
2577 
2578   /* The copy may have a more interesting constant filled expression
2579      (we don't, since we know our RHS is just an SSA name).  */
2580   if (TREE_CODE (rhs) == SSA_NAME)
2581     {
2582       VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2583       VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2584     }
2585 
2586   return set_ssa_val_to (lhs, rhs);
2587 }
2588 
2589 /* Visit a nary operator RHS, value number it, and return true if the
2590    value number of LHS has changed as a result.  */
2591 
2592 static bool
2593 visit_nary_op (tree lhs, gimple stmt)
2594 {
2595   bool changed = false;
2596   tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2597 
2598   if (result)
2599     changed = set_ssa_val_to (lhs, result);
2600   else
2601     {
2602       changed = set_ssa_val_to (lhs, lhs);
2603       vn_nary_op_insert_stmt (stmt, lhs);
2604     }
2605 
2606   return changed;
2607 }
2608 
2609 /* Visit a call STMT storing into LHS.  Return true if the value number
2610    of the LHS has changed as a result.  */
2611 
2612 static bool
2613 visit_reference_op_call (tree lhs, gimple stmt)
2614 {
2615   bool changed = false;
2616   struct vn_reference_s vr1;
2617   tree result;
2618   tree vuse = gimple_vuse (stmt);
2619 
2620   vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2621   vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2622   vr1.type = gimple_expr_type (stmt);
2623   vr1.set = 0;
2624   vr1.hashcode = vn_reference_compute_hash (&vr1);
2625   result = vn_reference_lookup_1 (&vr1, NULL);
2626   if (result)
2627     {
2628       changed = set_ssa_val_to (lhs, result);
2629       if (TREE_CODE (result) == SSA_NAME
2630 	  && VN_INFO (result)->has_constants)
2631 	VN_INFO (lhs)->has_constants = true;
2632     }
2633   else
2634     {
2635       void **slot;
2636       vn_reference_t vr2;
2637       changed = set_ssa_val_to (lhs, lhs);
2638       vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2639       vr2->vuse = vr1.vuse;
2640       vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2641       vr2->type = vr1.type;
2642       vr2->set = vr1.set;
2643       vr2->hashcode = vr1.hashcode;
2644       vr2->result = lhs;
2645       slot = htab_find_slot_with_hash (current_info->references,
2646 				       vr2, vr2->hashcode, INSERT);
2647       if (*slot)
2648 	free_reference (*slot);
2649       *slot = vr2;
2650     }
2651 
2652   return changed;
2653 }
2654 
2655 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2656    and return true if the value number of the LHS has changed as a result.  */
2657 
2658 static bool
2659 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2660 {
2661   bool changed = false;
2662   tree last_vuse;
2663   tree result;
2664 
2665   last_vuse = gimple_vuse (stmt);
2666   last_vuse_ptr = &last_vuse;
2667   result = vn_reference_lookup (op, gimple_vuse (stmt),
2668 				default_vn_walk_kind, NULL);
2669   last_vuse_ptr = NULL;
2670 
2671   /* If we have a VCE, try looking up its operand as it might be stored in
2672      a different type.  */
2673   if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2674     result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2675     				  default_vn_walk_kind, NULL);
2676 
2677   /* We handle type-punning through unions by value-numbering based
2678      on offset and size of the access.  Be prepared to handle a
2679      type-mismatch here via creating a VIEW_CONVERT_EXPR.  */
2680   if (result
2681       && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2682     {
2683       /* We will be setting the value number of lhs to the value number
2684 	 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2685 	 So first simplify and lookup this expression to see if it
2686 	 is already available.  */
2687       tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2688       if ((CONVERT_EXPR_P (val)
2689 	   || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2690 	  && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2691         {
2692 	  tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2693 	  if ((CONVERT_EXPR_P (tem)
2694 	       || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2695 	      && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2696 						    TREE_TYPE (val), tem)))
2697 	    val = tem;
2698 	}
2699       result = val;
2700       if (!is_gimple_min_invariant (val)
2701 	  && TREE_CODE (val) != SSA_NAME)
2702 	result = vn_nary_op_lookup (val, NULL);
2703       /* If the expression is not yet available, value-number lhs to
2704 	 a new SSA_NAME we create.  */
2705       if (!result)
2706         {
2707 	  result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
2708 	  /* Initialize value-number information properly.  */
2709 	  VN_INFO_GET (result)->valnum = result;
2710 	  VN_INFO (result)->value_id = get_next_value_id ();
2711 	  VN_INFO (result)->expr = val;
2712 	  VN_INFO (result)->has_constants = expr_has_constants (val);
2713 	  VN_INFO (result)->needs_insertion = true;
2714 	  /* As all "inserted" statements are singleton SCCs, insert
2715 	     to the valid table.  This is strictly needed to
2716 	     avoid re-generating new value SSA_NAMEs for the same
2717 	     expression during SCC iteration over and over (the
2718 	     optimistic table gets cleared after each iteration).
2719 	     We do not need to insert into the optimistic table, as
2720 	     lookups there will fall back to the valid table.  */
2721 	  if (current_info == optimistic_info)
2722 	    {
2723 	      current_info = valid_info;
2724 	      vn_nary_op_insert (val, result);
2725 	      current_info = optimistic_info;
2726 	    }
2727 	  else
2728 	    vn_nary_op_insert (val, result);
2729 	  if (dump_file && (dump_flags & TDF_DETAILS))
2730 	    {
2731 	      fprintf (dump_file, "Inserting name ");
2732 	      print_generic_expr (dump_file, result, 0);
2733 	      fprintf (dump_file, " for expression ");
2734 	      print_generic_expr (dump_file, val, 0);
2735 	      fprintf (dump_file, "\n");
2736 	    }
2737 	}
2738     }
2739 
2740   if (result)
2741     {
2742       changed = set_ssa_val_to (lhs, result);
2743       if (TREE_CODE (result) == SSA_NAME
2744 	  && VN_INFO (result)->has_constants)
2745 	{
2746 	  VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2747 	  VN_INFO (lhs)->has_constants = true;
2748 	}
2749     }
2750   else
2751     {
2752       changed = set_ssa_val_to (lhs, lhs);
2753       vn_reference_insert (op, lhs, last_vuse);
2754     }
2755 
2756   return changed;
2757 }
2758 
2759 
2760 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2761    and return true if the value number of the LHS has changed as a result.  */
2762 
2763 static bool
2764 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2765 {
2766   bool changed = false;
2767   tree result;
2768   bool resultsame = false;
2769 
2770   /* First we want to lookup using the *vuses* from the store and see
2771      if there the last store to this location with the same address
2772      had the same value.
2773 
2774      The vuses represent the memory state before the store.  If the
2775      memory state, address, and value of the store is the same as the
2776      last store to this location, then this store will produce the
2777      same memory state as that store.
2778 
2779      In this case the vdef versions for this store are value numbered to those
2780      vuse versions, since they represent the same memory state after
2781      this store.
2782 
2783      Otherwise, the vdefs for the store are used when inserting into
2784      the table, since the store generates a new memory state.  */
2785 
2786   result = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_NOWALK, NULL);
2787 
2788   if (result)
2789     {
2790       if (TREE_CODE (result) == SSA_NAME)
2791 	result = SSA_VAL (result);
2792       if (TREE_CODE (op) == SSA_NAME)
2793 	op = SSA_VAL (op);
2794       resultsame = expressions_equal_p (result, op);
2795     }
2796 
2797   if (!result || !resultsame)
2798     {
2799       tree vdef;
2800 
2801       if (dump_file && (dump_flags & TDF_DETAILS))
2802 	{
2803 	  fprintf (dump_file, "No store match\n");
2804 	  fprintf (dump_file, "Value numbering store ");
2805 	  print_generic_expr (dump_file, lhs, 0);
2806 	  fprintf (dump_file, " to ");
2807 	  print_generic_expr (dump_file, op, 0);
2808 	  fprintf (dump_file, "\n");
2809 	}
2810       /* Have to set value numbers before insert, since insert is
2811 	 going to valueize the references in-place.  */
2812       if ((vdef = gimple_vdef (stmt)))
2813 	{
2814 	  VN_INFO (vdef)->use_processed = true;
2815 	  changed |= set_ssa_val_to (vdef, vdef);
2816 	}
2817 
2818       /* Do not insert structure copies into the tables.  */
2819       if (is_gimple_min_invariant (op)
2820 	  || is_gimple_reg (op))
2821         vn_reference_insert (lhs, op, vdef);
2822     }
2823   else
2824     {
2825       /* We had a match, so value number the vdef to have the value
2826 	 number of the vuse it came from.  */
2827       tree def, use;
2828 
2829       if (dump_file && (dump_flags & TDF_DETAILS))
2830 	fprintf (dump_file, "Store matched earlier value,"
2831 		 "value numbering store vdefs to matching vuses.\n");
2832 
2833       def = gimple_vdef (stmt);
2834       use = gimple_vuse (stmt);
2835 
2836       VN_INFO (def)->use_processed = true;
2837       changed |= set_ssa_val_to (def, SSA_VAL (use));
2838     }
2839 
2840   return changed;
2841 }
2842 
2843 /* Visit and value number PHI, return true if the value number
2844    changed.  */
2845 
2846 static bool
2847 visit_phi (gimple phi)
2848 {
2849   bool changed = false;
2850   tree result;
2851   tree sameval = VN_TOP;
2852   bool allsame = true;
2853   unsigned i;
2854 
2855   /* TODO: We could check for this in init_sccvn, and replace this
2856      with a gcc_assert.  */
2857   if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2858     return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2859 
2860   /* See if all non-TOP arguments have the same value.  TOP is
2861      equivalent to everything, so we can ignore it.  */
2862   for (i = 0; i < gimple_phi_num_args (phi); i++)
2863     {
2864       tree def = PHI_ARG_DEF (phi, i);
2865 
2866       if (TREE_CODE (def) == SSA_NAME)
2867 	def = SSA_VAL (def);
2868       if (def == VN_TOP)
2869 	continue;
2870       if (sameval == VN_TOP)
2871 	{
2872 	  sameval = def;
2873 	}
2874       else
2875 	{
2876 	  if (!expressions_equal_p (def, sameval))
2877 	    {
2878 	      allsame = false;
2879 	      break;
2880 	    }
2881 	}
2882     }
2883 
2884   /* If all value numbered to the same value, the phi node has that
2885      value.  */
2886   if (allsame)
2887     {
2888       if (is_gimple_min_invariant (sameval))
2889 	{
2890 	  VN_INFO (PHI_RESULT (phi))->has_constants = true;
2891 	  VN_INFO (PHI_RESULT (phi))->expr = sameval;
2892 	}
2893       else
2894 	{
2895 	  VN_INFO (PHI_RESULT (phi))->has_constants = false;
2896 	  VN_INFO (PHI_RESULT (phi))->expr = sameval;
2897 	}
2898 
2899       if (TREE_CODE (sameval) == SSA_NAME)
2900 	return visit_copy (PHI_RESULT (phi), sameval);
2901 
2902       return set_ssa_val_to (PHI_RESULT (phi), sameval);
2903     }
2904 
2905   /* Otherwise, see if it is equivalent to a phi node in this block.  */
2906   result = vn_phi_lookup (phi);
2907   if (result)
2908     {
2909       if (TREE_CODE (result) == SSA_NAME)
2910 	changed = visit_copy (PHI_RESULT (phi), result);
2911       else
2912 	changed = set_ssa_val_to (PHI_RESULT (phi), result);
2913     }
2914   else
2915     {
2916       vn_phi_insert (phi, PHI_RESULT (phi));
2917       VN_INFO (PHI_RESULT (phi))->has_constants = false;
2918       VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2919       changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2920     }
2921 
2922   return changed;
2923 }
2924 
2925 /* Return true if EXPR contains constants.  */
2926 
2927 static bool
2928 expr_has_constants (tree expr)
2929 {
2930   switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2931     {
2932     case tcc_unary:
2933       return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2934 
2935     case tcc_binary:
2936       return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2937 	|| is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2938       /* Constants inside reference ops are rarely interesting, but
2939 	 it can take a lot of looking to find them.  */
2940     case tcc_reference:
2941     case tcc_declaration:
2942       return false;
2943     default:
2944       return is_gimple_min_invariant (expr);
2945     }
2946   return false;
2947 }
2948 
2949 /* Return true if STMT contains constants.  */
2950 
2951 static bool
2952 stmt_has_constants (gimple stmt)
2953 {
2954   if (gimple_code (stmt) != GIMPLE_ASSIGN)
2955     return false;
2956 
2957   switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2958     {
2959     case GIMPLE_UNARY_RHS:
2960       return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2961 
2962     case GIMPLE_BINARY_RHS:
2963       return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2964 	      || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2965     case GIMPLE_TERNARY_RHS:
2966       return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2967 	      || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
2968 	      || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
2969     case GIMPLE_SINGLE_RHS:
2970       /* Constants inside reference ops are rarely interesting, but
2971 	 it can take a lot of looking to find them.  */
2972       return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2973     default:
2974       gcc_unreachable ();
2975     }
2976   return false;
2977 }
2978 
2979 /* Replace SSA_NAMES in expr with their value numbers, and return the
2980    result.
2981    This is performed in place. */
2982 
2983 static tree
2984 valueize_expr (tree expr)
2985 {
2986   switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2987     {
2988     case tcc_binary:
2989       TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
2990       /* Fallthru.  */
2991     case tcc_unary:
2992       TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
2993       break;
2994     default:;
2995     }
2996   return expr;
2997 }
2998 
2999 /* Simplify the binary expression RHS, and return the result if
3000    simplified. */
3001 
3002 static tree
3003 simplify_binary_expression (gimple stmt)
3004 {
3005   tree result = NULL_TREE;
3006   tree op0 = gimple_assign_rhs1 (stmt);
3007   tree op1 = gimple_assign_rhs2 (stmt);
3008   enum tree_code code = gimple_assign_rhs_code (stmt);
3009 
3010   /* This will not catch every single case we could combine, but will
3011      catch those with constants.  The goal here is to simultaneously
3012      combine constants between expressions, but avoid infinite
3013      expansion of expressions during simplification.  */
3014   if (TREE_CODE (op0) == SSA_NAME)
3015     {
3016       if (VN_INFO (op0)->has_constants
3017 	  || TREE_CODE_CLASS (code) == tcc_comparison
3018 	  || code == COMPLEX_EXPR)
3019 	op0 = valueize_expr (vn_get_expr_for (op0));
3020       else
3021 	op0 = vn_valueize (op0);
3022     }
3023 
3024   if (TREE_CODE (op1) == SSA_NAME)
3025     {
3026       if (VN_INFO (op1)->has_constants
3027 	  || code == COMPLEX_EXPR)
3028 	op1 = valueize_expr (vn_get_expr_for (op1));
3029       else
3030 	op1 = vn_valueize (op1);
3031     }
3032 
3033   /* Pointer plus constant can be represented as invariant address.
3034      Do so to allow further propatation, see also tree forwprop.  */
3035   if (code == POINTER_PLUS_EXPR
3036       && host_integerp (op1, 1)
3037       && TREE_CODE (op0) == ADDR_EXPR
3038       && is_gimple_min_invariant (op0))
3039     return build_invariant_address (TREE_TYPE (op0),
3040 				    TREE_OPERAND (op0, 0),
3041 				    TREE_INT_CST_LOW (op1));
3042 
3043   /* Avoid folding if nothing changed.  */
3044   if (op0 == gimple_assign_rhs1 (stmt)
3045       && op1 == gimple_assign_rhs2 (stmt))
3046     return NULL_TREE;
3047 
3048   fold_defer_overflow_warnings ();
3049 
3050   result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3051   if (result)
3052     STRIP_USELESS_TYPE_CONVERSION (result);
3053 
3054   fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3055 				  stmt, 0);
3056 
3057   /* Make sure result is not a complex expression consisting
3058      of operators of operators (IE (a + b) + (a + c))
3059      Otherwise, we will end up with unbounded expressions if
3060      fold does anything at all.  */
3061   if (result && valid_gimple_rhs_p (result))
3062     return result;
3063 
3064   return NULL_TREE;
3065 }
3066 
3067 /* Simplify the unary expression RHS, and return the result if
3068    simplified. */
3069 
3070 static tree
3071 simplify_unary_expression (gimple stmt)
3072 {
3073   tree result = NULL_TREE;
3074   tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3075   enum tree_code code = gimple_assign_rhs_code (stmt);
3076 
3077   /* We handle some tcc_reference codes here that are all
3078      GIMPLE_ASSIGN_SINGLE codes.  */
3079   if (code == REALPART_EXPR
3080       || code == IMAGPART_EXPR
3081       || code == VIEW_CONVERT_EXPR
3082       || code == BIT_FIELD_REF)
3083     op0 = TREE_OPERAND (op0, 0);
3084 
3085   if (TREE_CODE (op0) != SSA_NAME)
3086     return NULL_TREE;
3087 
3088   orig_op0 = op0;
3089   if (VN_INFO (op0)->has_constants)
3090     op0 = valueize_expr (vn_get_expr_for (op0));
3091   else if (CONVERT_EXPR_CODE_P (code)
3092 	   || code == REALPART_EXPR
3093 	   || code == IMAGPART_EXPR
3094 	   || code == VIEW_CONVERT_EXPR
3095 	   || code == BIT_FIELD_REF)
3096     {
3097       /* We want to do tree-combining on conversion-like expressions.
3098          Make sure we feed only SSA_NAMEs or constants to fold though.  */
3099       tree tem = valueize_expr (vn_get_expr_for (op0));
3100       if (UNARY_CLASS_P (tem)
3101 	  || BINARY_CLASS_P (tem)
3102 	  || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3103 	  || TREE_CODE (tem) == SSA_NAME
3104 	  || TREE_CODE (tem) == CONSTRUCTOR
3105 	  || is_gimple_min_invariant (tem))
3106 	op0 = tem;
3107     }
3108 
3109   /* Avoid folding if nothing changed, but remember the expression.  */
3110   if (op0 == orig_op0)
3111     return NULL_TREE;
3112 
3113   if (code == BIT_FIELD_REF)
3114     {
3115       tree rhs = gimple_assign_rhs1 (stmt);
3116       result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3117 			     op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3118     }
3119   else
3120     result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3121   if (result)
3122     {
3123       STRIP_USELESS_TYPE_CONVERSION (result);
3124       if (valid_gimple_rhs_p (result))
3125         return result;
3126     }
3127 
3128   return NULL_TREE;
3129 }
3130 
3131 /* Try to simplify RHS using equivalences and constant folding.  */
3132 
3133 static tree
3134 try_to_simplify (gimple stmt)
3135 {
3136   enum tree_code code = gimple_assign_rhs_code (stmt);
3137   tree tem;
3138 
3139   /* For stores we can end up simplifying a SSA_NAME rhs.  Just return
3140      in this case, there is no point in doing extra work.  */
3141   if (code == SSA_NAME)
3142     return NULL_TREE;
3143 
3144   /* First try constant folding based on our current lattice.  */
3145   tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
3146   if (tem
3147       && (TREE_CODE (tem) == SSA_NAME
3148 	  || is_gimple_min_invariant (tem)))
3149     return tem;
3150 
3151   /* If that didn't work try combining multiple statements.  */
3152   switch (TREE_CODE_CLASS (code))
3153     {
3154     case tcc_reference:
3155       /* Fallthrough for some unary codes that can operate on registers.  */
3156       if (!(code == REALPART_EXPR
3157 	    || code == IMAGPART_EXPR
3158 	    || code == VIEW_CONVERT_EXPR
3159 	    || code == BIT_FIELD_REF))
3160 	break;
3161       /* We could do a little more with unary ops, if they expand
3162 	 into binary ops, but it's debatable whether it is worth it. */
3163     case tcc_unary:
3164       return simplify_unary_expression (stmt);
3165 
3166     case tcc_comparison:
3167     case tcc_binary:
3168       return simplify_binary_expression (stmt);
3169 
3170     default:
3171       break;
3172     }
3173 
3174   return NULL_TREE;
3175 }
3176 
3177 /* Visit and value number USE, return true if the value number
3178    changed. */
3179 
3180 static bool
3181 visit_use (tree use)
3182 {
3183   bool changed = false;
3184   gimple stmt = SSA_NAME_DEF_STMT (use);
3185 
3186   VN_INFO (use)->use_processed = true;
3187 
3188   gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3189   if (dump_file && (dump_flags & TDF_DETAILS)
3190       && !SSA_NAME_IS_DEFAULT_DEF (use))
3191     {
3192       fprintf (dump_file, "Value numbering ");
3193       print_generic_expr (dump_file, use, 0);
3194       fprintf (dump_file, " stmt = ");
3195       print_gimple_stmt (dump_file, stmt, 0, 0);
3196     }
3197 
3198   /* Handle uninitialized uses.  */
3199   if (SSA_NAME_IS_DEFAULT_DEF (use))
3200     changed = set_ssa_val_to (use, use);
3201   else
3202     {
3203       if (gimple_code (stmt) == GIMPLE_PHI)
3204 	changed = visit_phi (stmt);
3205       else if (!gimple_has_lhs (stmt)
3206 	       || gimple_has_volatile_ops (stmt))
3207 	changed = defs_to_varying (stmt);
3208       else if (is_gimple_assign (stmt))
3209 	{
3210 	  enum tree_code code = gimple_assign_rhs_code (stmt);
3211 	  tree lhs = gimple_assign_lhs (stmt);
3212 	  tree rhs1 = gimple_assign_rhs1 (stmt);
3213 	  tree simplified;
3214 
3215 	  /* Shortcut for copies. Simplifying copies is pointless,
3216 	     since we copy the expression and value they represent.  */
3217 	  if (code == SSA_NAME
3218 	      && TREE_CODE (lhs) == SSA_NAME)
3219 	    {
3220 	      changed = visit_copy (lhs, rhs1);
3221 	      goto done;
3222 	    }
3223 	  simplified = try_to_simplify (stmt);
3224 	  if (simplified)
3225 	    {
3226 	      if (dump_file && (dump_flags & TDF_DETAILS))
3227 		{
3228 		  fprintf (dump_file, "RHS ");
3229 		  print_gimple_expr (dump_file, stmt, 0, 0);
3230 		  fprintf (dump_file, " simplified to ");
3231 		  print_generic_expr (dump_file, simplified, 0);
3232 		  if (TREE_CODE (lhs) == SSA_NAME)
3233 		    fprintf (dump_file, " has constants %d\n",
3234 			     expr_has_constants (simplified));
3235 		  else
3236 		    fprintf (dump_file, "\n");
3237 		}
3238 	    }
3239 	  /* Setting value numbers to constants will occasionally
3240 	     screw up phi congruence because constants are not
3241 	     uniquely associated with a single ssa name that can be
3242 	     looked up.  */
3243 	  if (simplified
3244 	      && is_gimple_min_invariant (simplified)
3245 	      && TREE_CODE (lhs) == SSA_NAME)
3246 	    {
3247 	      VN_INFO (lhs)->expr = simplified;
3248 	      VN_INFO (lhs)->has_constants = true;
3249 	      changed = set_ssa_val_to (lhs, simplified);
3250 	      goto done;
3251 	    }
3252 	  else if (simplified
3253 		   && TREE_CODE (simplified) == SSA_NAME
3254 		   && TREE_CODE (lhs) == SSA_NAME)
3255 	    {
3256 	      changed = visit_copy (lhs, simplified);
3257 	      goto done;
3258 	    }
3259 	  else if (simplified)
3260 	    {
3261 	      if (TREE_CODE (lhs) == SSA_NAME)
3262 		{
3263 		  VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3264 		  /* We have to unshare the expression or else
3265 		     valuizing may change the IL stream.  */
3266 		  VN_INFO (lhs)->expr = unshare_expr (simplified);
3267 		}
3268 	    }
3269 	  else if (stmt_has_constants (stmt)
3270 		   && TREE_CODE (lhs) == SSA_NAME)
3271 	    VN_INFO (lhs)->has_constants = true;
3272 	  else if (TREE_CODE (lhs) == SSA_NAME)
3273 	    {
3274 	      /* We reset expr and constantness here because we may
3275 		 have been value numbering optimistically, and
3276 		 iterating. They may become non-constant in this case,
3277 		 even if they were optimistically constant. */
3278 
3279 	      VN_INFO (lhs)->has_constants = false;
3280 	      VN_INFO (lhs)->expr = NULL_TREE;
3281 	    }
3282 
3283 	  if ((TREE_CODE (lhs) == SSA_NAME
3284 	       /* We can substitute SSA_NAMEs that are live over
3285 		  abnormal edges with their constant value.  */
3286 	       && !(gimple_assign_copy_p (stmt)
3287 		    && is_gimple_min_invariant (rhs1))
3288 	       && !(simplified
3289 		    && is_gimple_min_invariant (simplified))
3290 	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3291 	      /* Stores or copies from SSA_NAMEs that are live over
3292 		 abnormal edges are a problem.  */
3293 	      || (code == SSA_NAME
3294 		  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3295 	    changed = defs_to_varying (stmt);
3296 	  else if (REFERENCE_CLASS_P (lhs)
3297 		   || DECL_P (lhs))
3298 	    changed = visit_reference_op_store (lhs, rhs1, stmt);
3299 	  else if (TREE_CODE (lhs) == SSA_NAME)
3300 	    {
3301 	      if ((gimple_assign_copy_p (stmt)
3302 		   && is_gimple_min_invariant (rhs1))
3303 		  || (simplified
3304 		      && is_gimple_min_invariant (simplified)))
3305 		{
3306 		  VN_INFO (lhs)->has_constants = true;
3307 		  if (simplified)
3308 		    changed = set_ssa_val_to (lhs, simplified);
3309 		  else
3310 		    changed = set_ssa_val_to (lhs, rhs1);
3311 		}
3312 	      else
3313 		{
3314 		  switch (get_gimple_rhs_class (code))
3315 		    {
3316 		    case GIMPLE_UNARY_RHS:
3317 		    case GIMPLE_BINARY_RHS:
3318 		    case GIMPLE_TERNARY_RHS:
3319 		      changed = visit_nary_op (lhs, stmt);
3320 		      break;
3321 		    case GIMPLE_SINGLE_RHS:
3322 		      switch (TREE_CODE_CLASS (code))
3323 			{
3324 			case tcc_reference:
3325 			  /* VOP-less references can go through unary case.  */
3326 			  if ((code == REALPART_EXPR
3327 			       || code == IMAGPART_EXPR
3328 			       || code == VIEW_CONVERT_EXPR
3329 			       || code == BIT_FIELD_REF)
3330 			      && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
3331 			    {
3332 			      changed = visit_nary_op (lhs, stmt);
3333 			      break;
3334 			    }
3335 			  /* Fallthrough.  */
3336 			case tcc_declaration:
3337 			  changed = visit_reference_op_load (lhs, rhs1, stmt);
3338 			  break;
3339 			default:
3340 			  if (code == ADDR_EXPR)
3341 			    {
3342 			      changed = visit_nary_op (lhs, stmt);
3343 			      break;
3344 			    }
3345 			  else if (code == CONSTRUCTOR)
3346 			    {
3347 			      changed = visit_nary_op (lhs, stmt);
3348 			      break;
3349 			    }
3350 			  changed = defs_to_varying (stmt);
3351 			}
3352 		      break;
3353 		    default:
3354 		      changed = defs_to_varying (stmt);
3355 		      break;
3356 		    }
3357 		}
3358 	    }
3359 	  else
3360 	    changed = defs_to_varying (stmt);
3361 	}
3362       else if (is_gimple_call (stmt))
3363 	{
3364 	  tree lhs = gimple_call_lhs (stmt);
3365 
3366 	  /* ???  We could try to simplify calls.  */
3367 
3368 	  if (stmt_has_constants (stmt)
3369 	      && TREE_CODE (lhs) == SSA_NAME)
3370 	    VN_INFO (lhs)->has_constants = true;
3371 	  else if (TREE_CODE (lhs) == SSA_NAME)
3372 	    {
3373 	      /* We reset expr and constantness here because we may
3374 		 have been value numbering optimistically, and
3375 		 iterating. They may become non-constant in this case,
3376 		 even if they were optimistically constant. */
3377 	      VN_INFO (lhs)->has_constants = false;
3378 	      VN_INFO (lhs)->expr = NULL_TREE;
3379 	    }
3380 
3381 	  if (TREE_CODE (lhs) == SSA_NAME
3382 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3383 	    changed = defs_to_varying (stmt);
3384 	  /* ???  We should handle stores from calls.  */
3385 	  else if (TREE_CODE (lhs) == SSA_NAME)
3386 	    {
3387 	      if (!gimple_call_internal_p (stmt)
3388 		  && gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
3389 		changed = visit_reference_op_call (lhs, stmt);
3390 	      else
3391 		changed = defs_to_varying (stmt);
3392 	    }
3393 	  else
3394 	    changed = defs_to_varying (stmt);
3395 	}
3396     }
3397  done:
3398   return changed;
3399 }
3400 
3401 /* Compare two operands by reverse postorder index */
3402 
3403 static int
3404 compare_ops (const void *pa, const void *pb)
3405 {
3406   const tree opa = *((const tree *)pa);
3407   const tree opb = *((const tree *)pb);
3408   gimple opstmta = SSA_NAME_DEF_STMT (opa);
3409   gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3410   basic_block bba;
3411   basic_block bbb;
3412 
3413   if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3414     return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3415   else if (gimple_nop_p (opstmta))
3416     return -1;
3417   else if (gimple_nop_p (opstmtb))
3418     return 1;
3419 
3420   bba = gimple_bb (opstmta);
3421   bbb = gimple_bb (opstmtb);
3422 
3423   if (!bba && !bbb)
3424     return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3425   else if (!bba)
3426     return -1;
3427   else if (!bbb)
3428     return 1;
3429 
3430   if (bba == bbb)
3431     {
3432       if (gimple_code (opstmta) == GIMPLE_PHI
3433 	  && gimple_code (opstmtb) == GIMPLE_PHI)
3434 	return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3435       else if (gimple_code (opstmta) == GIMPLE_PHI)
3436 	return -1;
3437       else if (gimple_code (opstmtb) == GIMPLE_PHI)
3438 	return 1;
3439       else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3440         return gimple_uid (opstmta) - gimple_uid (opstmtb);
3441       else
3442 	return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3443     }
3444   return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3445 }
3446 
3447 /* Sort an array containing members of a strongly connected component
3448    SCC so that the members are ordered by RPO number.
3449    This means that when the sort is complete, iterating through the
3450    array will give you the members in RPO order.  */
3451 
3452 static void
3453 sort_scc (VEC (tree, heap) *scc)
3454 {
3455   VEC_qsort (tree, scc, compare_ops);
3456 }
3457 
3458 /* Insert the no longer used nary ONARY to the hash INFO.  */
3459 
3460 static void
3461 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3462 {
3463   size_t size = sizeof_vn_nary_op (onary->length);
3464   vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3465 					       &info->nary_obstack);
3466   memcpy (nary, onary, size);
3467   vn_nary_op_insert_into (nary, info->nary, false);
3468 }
3469 
3470 /* Insert the no longer used phi OPHI to the hash INFO.  */
3471 
3472 static void
3473 copy_phi (vn_phi_t ophi, vn_tables_t info)
3474 {
3475   vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3476   void **slot;
3477   memcpy (phi, ophi, sizeof (*phi));
3478   ophi->phiargs = NULL;
3479   slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3480   gcc_assert (!*slot);
3481   *slot = phi;
3482 }
3483 
3484 /* Insert the no longer used reference OREF to the hash INFO.  */
3485 
3486 static void
3487 copy_reference (vn_reference_t oref, vn_tables_t info)
3488 {
3489   vn_reference_t ref;
3490   void **slot;
3491   ref = (vn_reference_t) pool_alloc (info->references_pool);
3492   memcpy (ref, oref, sizeof (*ref));
3493   oref->operands = NULL;
3494   slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3495 				   INSERT);
3496   if (*slot)
3497     free_reference (*slot);
3498   *slot = ref;
3499 }
3500 
3501 /* Process a strongly connected component in the SSA graph.  */
3502 
3503 static void
3504 process_scc (VEC (tree, heap) *scc)
3505 {
3506   tree var;
3507   unsigned int i;
3508   unsigned int iterations = 0;
3509   bool changed = true;
3510   htab_iterator hi;
3511   vn_nary_op_t nary;
3512   vn_phi_t phi;
3513   vn_reference_t ref;
3514 
3515   /* If the SCC has a single member, just visit it.  */
3516   if (VEC_length (tree, scc) == 1)
3517     {
3518       tree use = VEC_index (tree, scc, 0);
3519       if (VN_INFO (use)->use_processed)
3520 	return;
3521       /* We need to make sure it doesn't form a cycle itself, which can
3522 	 happen for self-referential PHI nodes.  In that case we would
3523 	 end up inserting an expression with VN_TOP operands into the
3524 	 valid table which makes us derive bogus equivalences later.
3525 	 The cheapest way to check this is to assume it for all PHI nodes.  */
3526       if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3527 	/* Fallthru to iteration.  */ ;
3528       else
3529 	{
3530 	  visit_use (use);
3531 	  return;
3532 	}
3533     }
3534 
3535   /* Iterate over the SCC with the optimistic table until it stops
3536      changing.  */
3537   current_info = optimistic_info;
3538   while (changed)
3539     {
3540       changed = false;
3541       iterations++;
3542       if (dump_file && (dump_flags & TDF_DETAILS))
3543 	fprintf (dump_file, "Starting iteration %d\n", iterations);
3544       /* As we are value-numbering optimistically we have to
3545 	 clear the expression tables and the simplified expressions
3546 	 in each iteration until we converge.  */
3547       htab_empty (optimistic_info->nary);
3548       htab_empty (optimistic_info->phis);
3549       htab_empty (optimistic_info->references);
3550       obstack_free (&optimistic_info->nary_obstack, NULL);
3551       gcc_obstack_init (&optimistic_info->nary_obstack);
3552       empty_alloc_pool (optimistic_info->phis_pool);
3553       empty_alloc_pool (optimistic_info->references_pool);
3554       FOR_EACH_VEC_ELT (tree, scc, i, var)
3555 	VN_INFO (var)->expr = NULL_TREE;
3556       FOR_EACH_VEC_ELT (tree, scc, i, var)
3557 	changed |= visit_use (var);
3558     }
3559 
3560   statistics_histogram_event (cfun, "SCC iterations", iterations);
3561 
3562   /* Finally, copy the contents of the no longer used optimistic
3563      table to the valid table.  */
3564   FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3565     copy_nary (nary, valid_info);
3566   FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3567     copy_phi (phi, valid_info);
3568   FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3569     copy_reference (ref, valid_info);
3570 
3571   current_info = valid_info;
3572 }
3573 
3574 DEF_VEC_O(ssa_op_iter);
3575 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3576 
3577 /* Pop the components of the found SCC for NAME off the SCC stack
3578    and process them.  Returns true if all went well, false if
3579    we run into resource limits.  */
3580 
3581 static bool
3582 extract_and_process_scc_for_name (tree name)
3583 {
3584   VEC (tree, heap) *scc = NULL;
3585   tree x;
3586 
3587   /* Found an SCC, pop the components off the SCC stack and
3588      process them.  */
3589   do
3590     {
3591       x = VEC_pop (tree, sccstack);
3592 
3593       VN_INFO (x)->on_sccstack = false;
3594       VEC_safe_push (tree, heap, scc, x);
3595     } while (x != name);
3596 
3597   /* Bail out of SCCVN in case a SCC turns out to be incredibly large.  */
3598   if (VEC_length (tree, scc)
3599       > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3600     {
3601       if (dump_file)
3602 	fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3603 		 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3604 		 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3605 
3606       VEC_free (tree, heap, scc);
3607       return false;
3608     }
3609 
3610   if (VEC_length (tree, scc) > 1)
3611     sort_scc (scc);
3612 
3613   if (dump_file && (dump_flags & TDF_DETAILS))
3614     print_scc (dump_file, scc);
3615 
3616   process_scc (scc);
3617 
3618   VEC_free (tree, heap, scc);
3619 
3620   return true;
3621 }
3622 
3623 /* Depth first search on NAME to discover and process SCC's in the SSA
3624    graph.
3625    Execution of this algorithm relies on the fact that the SCC's are
3626    popped off the stack in topological order.
3627    Returns true if successful, false if we stopped processing SCC's due
3628    to resource constraints.  */
3629 
3630 static bool
3631 DFS (tree name)
3632 {
3633   VEC(ssa_op_iter, heap) *itervec = NULL;
3634   VEC(tree, heap) *namevec = NULL;
3635   use_operand_p usep = NULL;
3636   gimple defstmt;
3637   tree use;
3638   ssa_op_iter iter;
3639 
3640 start_over:
3641   /* SCC info */
3642   VN_INFO (name)->dfsnum = next_dfs_num++;
3643   VN_INFO (name)->visited = true;
3644   VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3645 
3646   VEC_safe_push (tree, heap, sccstack, name);
3647   VN_INFO (name)->on_sccstack = true;
3648   defstmt = SSA_NAME_DEF_STMT (name);
3649 
3650   /* Recursively DFS on our operands, looking for SCC's.  */
3651   if (!gimple_nop_p (defstmt))
3652     {
3653       /* Push a new iterator.  */
3654       if (gimple_code (defstmt) == GIMPLE_PHI)
3655 	usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3656       else
3657 	usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3658     }
3659   else
3660     clear_and_done_ssa_iter (&iter);
3661 
3662   while (1)
3663     {
3664       /* If we are done processing uses of a name, go up the stack
3665 	 of iterators and process SCCs as we found them.  */
3666       if (op_iter_done (&iter))
3667 	{
3668 	  /* See if we found an SCC.  */
3669 	  if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3670 	    if (!extract_and_process_scc_for_name (name))
3671 	      {
3672 		VEC_free (tree, heap, namevec);
3673 		VEC_free (ssa_op_iter, heap, itervec);
3674 		return false;
3675 	      }
3676 
3677 	  /* Check if we are done.  */
3678 	  if (VEC_empty (tree, namevec))
3679 	    {
3680 	      VEC_free (tree, heap, namevec);
3681 	      VEC_free (ssa_op_iter, heap, itervec);
3682 	      return true;
3683 	    }
3684 
3685 	  /* Restore the last use walker and continue walking there.  */
3686 	  use = name;
3687 	  name = VEC_pop (tree, namevec);
3688 	  memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3689 		  sizeof (ssa_op_iter));
3690 	  VEC_pop (ssa_op_iter, itervec);
3691 	  goto continue_walking;
3692 	}
3693 
3694       use = USE_FROM_PTR (usep);
3695 
3696       /* Since we handle phi nodes, we will sometimes get
3697 	 invariants in the use expression.  */
3698       if (TREE_CODE (use) == SSA_NAME)
3699 	{
3700 	  if (! (VN_INFO (use)->visited))
3701 	    {
3702 	      /* Recurse by pushing the current use walking state on
3703 		 the stack and starting over.  */
3704 	      VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3705 	      VEC_safe_push(tree, heap, namevec, name);
3706 	      name = use;
3707 	      goto start_over;
3708 
3709 continue_walking:
3710 	      VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3711 					 VN_INFO (use)->low);
3712 	    }
3713 	  if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3714 	      && VN_INFO (use)->on_sccstack)
3715 	    {
3716 	      VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3717 					 VN_INFO (name)->low);
3718 	    }
3719 	}
3720 
3721       usep = op_iter_next_use (&iter);
3722     }
3723 }
3724 
3725 /* Allocate a value number table.  */
3726 
3727 static void
3728 allocate_vn_table (vn_tables_t table)
3729 {
3730   table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3731   table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3732   table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3733 				   free_reference);
3734 
3735   gcc_obstack_init (&table->nary_obstack);
3736   table->phis_pool = create_alloc_pool ("VN phis",
3737 					sizeof (struct vn_phi_s),
3738 					30);
3739   table->references_pool = create_alloc_pool ("VN references",
3740 					      sizeof (struct vn_reference_s),
3741 					      30);
3742 }
3743 
3744 /* Free a value number table.  */
3745 
3746 static void
3747 free_vn_table (vn_tables_t table)
3748 {
3749   htab_delete (table->phis);
3750   htab_delete (table->nary);
3751   htab_delete (table->references);
3752   obstack_free (&table->nary_obstack, NULL);
3753   free_alloc_pool (table->phis_pool);
3754   free_alloc_pool (table->references_pool);
3755 }
3756 
3757 static void
3758 init_scc_vn (void)
3759 {
3760   size_t i;
3761   int j;
3762   int *rpo_numbers_temp;
3763 
3764   calculate_dominance_info (CDI_DOMINATORS);
3765   sccstack = NULL;
3766   constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3767 				  free);
3768 
3769   constant_value_ids = BITMAP_ALLOC (NULL);
3770 
3771   next_dfs_num = 1;
3772   next_value_id = 1;
3773 
3774   vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3775   /* VEC_alloc doesn't actually grow it to the right size, it just
3776      preallocates the space to do so.  */
3777   VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3778   gcc_obstack_init (&vn_ssa_aux_obstack);
3779 
3780   shared_lookup_phiargs = NULL;
3781   shared_lookup_references = NULL;
3782   rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3783   rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3784   pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3785 
3786   /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3787      the i'th block in RPO order is bb.  We want to map bb's to RPO
3788      numbers, so we need to rearrange this array.  */
3789   for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3790     rpo_numbers[rpo_numbers_temp[j]] = j;
3791 
3792   XDELETE (rpo_numbers_temp);
3793 
3794   VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3795 
3796   /* Create the VN_INFO structures, and initialize value numbers to
3797      TOP.  */
3798   for (i = 0; i < num_ssa_names; i++)
3799     {
3800       tree name = ssa_name (i);
3801       if (name)
3802 	{
3803 	  VN_INFO_GET (name)->valnum = VN_TOP;
3804 	  VN_INFO (name)->expr = NULL_TREE;
3805 	  VN_INFO (name)->value_id = 0;
3806 	}
3807     }
3808 
3809   renumber_gimple_stmt_uids ();
3810 
3811   /* Create the valid and optimistic value numbering tables.  */
3812   valid_info = XCNEW (struct vn_tables_s);
3813   allocate_vn_table (valid_info);
3814   optimistic_info = XCNEW (struct vn_tables_s);
3815   allocate_vn_table (optimistic_info);
3816 }
3817 
3818 void
3819 free_scc_vn (void)
3820 {
3821   size_t i;
3822 
3823   htab_delete (constant_to_value_id);
3824   BITMAP_FREE (constant_value_ids);
3825   VEC_free (tree, heap, shared_lookup_phiargs);
3826   VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3827   XDELETEVEC (rpo_numbers);
3828 
3829   for (i = 0; i < num_ssa_names; i++)
3830     {
3831       tree name = ssa_name (i);
3832       if (name
3833 	  && VN_INFO (name)->needs_insertion)
3834 	release_ssa_name (name);
3835     }
3836   obstack_free (&vn_ssa_aux_obstack, NULL);
3837   VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3838 
3839   VEC_free (tree, heap, sccstack);
3840   free_vn_table (valid_info);
3841   XDELETE (valid_info);
3842   free_vn_table (optimistic_info);
3843   XDELETE (optimistic_info);
3844 }
3845 
3846 /* Set *ID if we computed something useful in RESULT.  */
3847 
3848 static void
3849 set_value_id_for_result (tree result, unsigned int *id)
3850 {
3851   if (result)
3852     {
3853       if (TREE_CODE (result) == SSA_NAME)
3854 	*id = VN_INFO (result)->value_id;
3855       else if (is_gimple_min_invariant (result))
3856 	*id = get_or_alloc_constant_value_id (result);
3857     }
3858 }
3859 
3860 /* Set the value ids in the valid hash tables.  */
3861 
3862 static void
3863 set_hashtable_value_ids (void)
3864 {
3865   htab_iterator hi;
3866   vn_nary_op_t vno;
3867   vn_reference_t vr;
3868   vn_phi_t vp;
3869 
3870   /* Now set the value ids of the things we had put in the hash
3871      table.  */
3872 
3873   FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3874 			 vno, vn_nary_op_t, hi)
3875     set_value_id_for_result (vno->result, &vno->value_id);
3876 
3877   FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3878 			 vp, vn_phi_t, hi)
3879     set_value_id_for_result (vp->result, &vp->value_id);
3880 
3881   FOR_EACH_HTAB_ELEMENT (valid_info->references,
3882 			 vr, vn_reference_t, hi)
3883     set_value_id_for_result (vr->result, &vr->value_id);
3884 }
3885 
3886 /* Do SCCVN.  Returns true if it finished, false if we bailed out
3887    due to resource constraints.  DEFAULT_VN_WALK_KIND_ specifies
3888    how we use the alias oracle walking during the VN process.  */
3889 
3890 bool
3891 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
3892 {
3893   size_t i;
3894   tree param;
3895   bool changed = true;
3896 
3897   default_vn_walk_kind = default_vn_walk_kind_;
3898 
3899   init_scc_vn ();
3900   current_info = valid_info;
3901 
3902   for (param = DECL_ARGUMENTS (current_function_decl);
3903        param;
3904        param = DECL_CHAIN (param))
3905     {
3906       if (gimple_default_def (cfun, param) != NULL)
3907 	{
3908 	  tree def = gimple_default_def (cfun, param);
3909 	  VN_INFO (def)->valnum = def;
3910 	}
3911     }
3912 
3913   for (i = 1; i < num_ssa_names; ++i)
3914     {
3915       tree name = ssa_name (i);
3916       if (name
3917 	  && VN_INFO (name)->visited == false
3918 	  && !has_zero_uses (name))
3919 	if (!DFS (name))
3920 	  {
3921 	    free_scc_vn ();
3922 	    return false;
3923 	  }
3924     }
3925 
3926   /* Initialize the value ids.  */
3927 
3928   for (i = 1; i < num_ssa_names; ++i)
3929     {
3930       tree name = ssa_name (i);
3931       vn_ssa_aux_t info;
3932       if (!name)
3933 	continue;
3934       info = VN_INFO (name);
3935       if (info->valnum == name
3936 	  || info->valnum == VN_TOP)
3937 	info->value_id = get_next_value_id ();
3938       else if (is_gimple_min_invariant (info->valnum))
3939 	info->value_id = get_or_alloc_constant_value_id (info->valnum);
3940     }
3941 
3942   /* Propagate until they stop changing.  */
3943   while (changed)
3944     {
3945       changed = false;
3946       for (i = 1; i < num_ssa_names; ++i)
3947 	{
3948 	  tree name = ssa_name (i);
3949 	  vn_ssa_aux_t info;
3950 	  if (!name)
3951 	    continue;
3952 	  info = VN_INFO (name);
3953 	  if (TREE_CODE (info->valnum) == SSA_NAME
3954 	      && info->valnum != name
3955 	      && info->value_id != VN_INFO (info->valnum)->value_id)
3956 	    {
3957 	      changed = true;
3958 	      info->value_id = VN_INFO (info->valnum)->value_id;
3959 	    }
3960 	}
3961     }
3962 
3963   set_hashtable_value_ids ();
3964 
3965   if (dump_file && (dump_flags & TDF_DETAILS))
3966     {
3967       fprintf (dump_file, "Value numbers:\n");
3968       for (i = 0; i < num_ssa_names; i++)
3969 	{
3970 	  tree name = ssa_name (i);
3971 	  if (name
3972 	      && VN_INFO (name)->visited
3973 	      && SSA_VAL (name) != name)
3974 	    {
3975 	      print_generic_expr (dump_file, name, 0);
3976 	      fprintf (dump_file, " = ");
3977 	      print_generic_expr (dump_file, SSA_VAL (name), 0);
3978 	      fprintf (dump_file, "\n");
3979 	    }
3980 	}
3981     }
3982 
3983   return true;
3984 }
3985 
3986 /* Return the maximum value id we have ever seen.  */
3987 
3988 unsigned int
3989 get_max_value_id (void)
3990 {
3991   return next_value_id;
3992 }
3993 
3994 /* Return the next unique value id.  */
3995 
3996 unsigned int
3997 get_next_value_id (void)
3998 {
3999   return next_value_id++;
4000 }
4001 
4002 
4003 /* Compare two expressions E1 and E2 and return true if they are equal.  */
4004 
4005 bool
4006 expressions_equal_p (tree e1, tree e2)
4007 {
4008   /* The obvious case.  */
4009   if (e1 == e2)
4010     return true;
4011 
4012   /* If only one of them is null, they cannot be equal.  */
4013   if (!e1 || !e2)
4014     return false;
4015 
4016   /* Now perform the actual comparison.  */
4017   if (TREE_CODE (e1) == TREE_CODE (e2)
4018       && operand_equal_p (e1, e2, OEP_PURE_SAME))
4019     return true;
4020 
4021   return false;
4022 }
4023 
4024 
4025 /* Return true if the nary operation NARY may trap.  This is a copy
4026    of stmt_could_throw_1_p adjusted to the SCCVN IL.  */
4027 
4028 bool
4029 vn_nary_may_trap (vn_nary_op_t nary)
4030 {
4031   tree type;
4032   tree rhs2 = NULL_TREE;
4033   bool honor_nans = false;
4034   bool honor_snans = false;
4035   bool fp_operation = false;
4036   bool honor_trapv = false;
4037   bool handled, ret;
4038   unsigned i;
4039 
4040   if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4041       || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4042       || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4043     {
4044       type = nary->type;
4045       fp_operation = FLOAT_TYPE_P (type);
4046       if (fp_operation)
4047 	{
4048 	  honor_nans = flag_trapping_math && !flag_finite_math_only;
4049 	  honor_snans = flag_signaling_nans != 0;
4050 	}
4051       else if (INTEGRAL_TYPE_P (type)
4052 	       && TYPE_OVERFLOW_TRAPS (type))
4053 	honor_trapv = true;
4054     }
4055   if (nary->length >= 2)
4056     rhs2 = nary->op[1];
4057   ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4058 				       honor_trapv,
4059 				       honor_nans, honor_snans, rhs2,
4060 				       &handled);
4061   if (handled
4062       && ret)
4063     return true;
4064 
4065   for (i = 0; i < nary->length; ++i)
4066     if (tree_could_trap_p (nary->op[i]))
4067       return true;
4068 
4069   return false;
4070 }
4071