1 /* SCC value numbering for trees
2    Copyright (C) 2006, 2007, 2008, 2009, 2010
3    Free Software Foundation, Inc.
4    Contributed by Daniel Berlin <dan@dberlin.org>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12 
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 GNU General Public License for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
40 #include "flags.h"
41 #include "bitmap.h"
42 #include "langhooks.h"
43 #include "cfgloop.h"
44 #include "params.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
47 #include "gimple-fold.h"
48 
49 /* This algorithm is based on the SCC algorithm presented by Keith
50    Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51    (http://citeseer.ist.psu.edu/41805.html).  In
52    straight line code, it is equivalent to a regular hash based value
53    numbering that is performed in reverse postorder.
54 
55    For code with cycles, there are two alternatives, both of which
56    require keeping the hashtables separate from the actual list of
57    value numbers for SSA names.
58 
59    1. Iterate value numbering in an RPO walk of the blocks, removing
60    all the entries from the hashtable after each iteration (but
61    keeping the SSA name->value number mapping between iterations).
62    Iterate until it does not change.
63 
64    2. Perform value numbering as part of an SCC walk on the SSA graph,
65    iterating only the cycles in the SSA graph until they do not change
66    (using a separate, optimistic hashtable for value numbering the SCC
67    operands).
68 
69    The second is not just faster in practice (because most SSA graph
70    cycles do not involve all the variables in the graph), it also has
71    some nice properties.
72 
73    One of these nice properties is that when we pop an SCC off the
74    stack, we are guaranteed to have processed all the operands coming from
75    *outside of that SCC*, so we do not need to do anything special to
76    ensure they have value numbers.
77 
78    Another nice property is that the SCC walk is done as part of a DFS
79    of the SSA graph, which makes it easy to perform combining and
80    simplifying operations at the same time.
81 
82    The code below is deliberately written in a way that makes it easy
83    to separate the SCC walk from the other work it does.
84 
85    In order to propagate constants through the code, we track which
86    expressions contain constants, and use those while folding.  In
87    theory, we could also track expressions whose value numbers are
88    replaced, in case we end up folding based on expression
89    identities.
90 
91    In order to value number memory, we assign value numbers to vuses.
92    This enables us to note that, for example, stores to the same
93    address of the same value from the same starting memory states are
94    equivalent.
95    TODO:
96 
97    1. We can iterate only the changing portions of the SCC's, but
98    I have not seen an SCC big enough for this to be a win.
99    2. If you differentiate between phi nodes for loops and phi nodes
100    for if-then-else, you can properly consider phi nodes in different
101    blocks for equivalence.
102    3. We could value number vuses in more cases, particularly, whole
103    structure copies.
104 */
105 
106 /* The set of hashtables and alloc_pool's for their items.  */
107 
108 typedef struct vn_tables_s
109 {
110   htab_t nary;
111   htab_t phis;
112   htab_t references;
113   struct obstack nary_obstack;
114   alloc_pool phis_pool;
115   alloc_pool references_pool;
116 } *vn_tables_t;
117 
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
120 
121 
122 /* Valid hashtables storing information we have proven to be
123    correct.  */
124 
125 static vn_tables_t valid_info;
126 
127 /* Optimistic hashtables storing information we are making assumptions about
128    during iterations.  */
129 
130 static vn_tables_t optimistic_info;
131 
132 /* Pointer to the set of hashtables that is currently being used.
133    Should always point to either the optimistic_info, or the
134    valid_info.  */
135 
136 static vn_tables_t current_info;
137 
138 
139 /* Reverse post order index for each basic block.  */
140 
141 static int *rpo_numbers;
142 
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
144 
145 /* This represents the top of the VN lattice, which is the universal
146    value.  */
147 
148 tree VN_TOP;
149 
150 /* Unique counter for our value ids.  */
151 
152 static unsigned int next_value_id;
153 
154 /* Next DFS number and the stack for strongly connected component
155    detection. */
156 
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
159 
160 
161 DEF_VEC_P(vn_ssa_aux_t);
162 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
163 
164 /* Table of vn_ssa_aux_t's, one per ssa_name.  The vn_ssa_aux_t objects
165    are allocated on an obstack for locality reasons, and to free them
166    without looping over the VEC.  */
167 
168 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
169 static struct obstack vn_ssa_aux_obstack;
170 
171 /* Return the value numbering information for a given SSA name.  */
172 
173 vn_ssa_aux_t
174 VN_INFO (tree name)
175 {
176   vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
177 				SSA_NAME_VERSION (name));
178   gcc_checking_assert (res);
179   return res;
180 }
181 
182 /* Set the value numbering info for a given SSA name to a given
183    value.  */
184 
185 static inline void
186 VN_INFO_SET (tree name, vn_ssa_aux_t value)
187 {
188   VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
189 	       SSA_NAME_VERSION (name), value);
190 }
191 
192 /* Initialize the value numbering info for a given SSA name.
193    This should be called just once for every SSA name.  */
194 
195 vn_ssa_aux_t
196 VN_INFO_GET (tree name)
197 {
198   vn_ssa_aux_t newinfo;
199 
200   newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
201   memset (newinfo, 0, sizeof (struct vn_ssa_aux));
202   if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
203     VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
204 		   SSA_NAME_VERSION (name) + 1);
205   VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
206 	       SSA_NAME_VERSION (name), newinfo);
207   return newinfo;
208 }
209 
210 
211 /* Get the representative expression for the SSA_NAME NAME.  Returns
212    the representative SSA_NAME if there is no expression associated with it.  */
213 
214 tree
215 vn_get_expr_for (tree name)
216 {
217   vn_ssa_aux_t vn = VN_INFO (name);
218   gimple def_stmt;
219   tree expr = NULL_TREE;
220   enum tree_code code;
221 
222   if (vn->valnum == VN_TOP)
223     return name;
224 
225   /* If the value-number is a constant it is the representative
226      expression.  */
227   if (TREE_CODE (vn->valnum) != SSA_NAME)
228     return vn->valnum;
229 
230   /* Get to the information of the value of this SSA_NAME.  */
231   vn = VN_INFO (vn->valnum);
232 
233   /* If the value-number is a constant it is the representative
234      expression.  */
235   if (TREE_CODE (vn->valnum) != SSA_NAME)
236     return vn->valnum;
237 
238   /* Else if we have an expression, return it.  */
239   if (vn->expr != NULL_TREE)
240     return vn->expr;
241 
242   /* Otherwise use the defining statement to build the expression.  */
243   def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
244 
245   /* If the value number is not an assignment use it directly.  */
246   if (!is_gimple_assign (def_stmt))
247     return vn->valnum;
248 
249   /* FIXME tuples.  This is incomplete and likely will miss some
250      simplifications.  */
251   code = gimple_assign_rhs_code (def_stmt);
252   switch (TREE_CODE_CLASS (code))
253     {
254     case tcc_reference:
255       if ((code == REALPART_EXPR
256 	   || code == IMAGPART_EXPR
257 	   || code == VIEW_CONVERT_EXPR)
258 	  && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
259 				      0)) == SSA_NAME)
260 	expr = fold_build1 (code,
261 			    gimple_expr_type (def_stmt),
262 			    TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
263       break;
264 
265     case tcc_unary:
266       expr = fold_build1 (code,
267 			  gimple_expr_type (def_stmt),
268 			  gimple_assign_rhs1 (def_stmt));
269       break;
270 
271     case tcc_binary:
272       expr = fold_build2 (code,
273 			  gimple_expr_type (def_stmt),
274 			  gimple_assign_rhs1 (def_stmt),
275 			  gimple_assign_rhs2 (def_stmt));
276       break;
277 
278     case tcc_exceptional:
279       if (code == CONSTRUCTOR
280 	  && TREE_CODE
281 	       (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
282 	expr = gimple_assign_rhs1 (def_stmt);
283       break;
284 
285     default:;
286     }
287   if (expr == NULL_TREE)
288     return vn->valnum;
289 
290   /* Cache the expression.  */
291   vn->expr = expr;
292 
293   return expr;
294 }
295 
296 
297 /* Free a phi operation structure VP.  */
298 
299 static void
300 free_phi (void *vp)
301 {
302   vn_phi_t phi = (vn_phi_t) vp;
303   VEC_free (tree, heap, phi->phiargs);
304 }
305 
306 /* Free a reference operation structure VP.  */
307 
308 static void
309 free_reference (void *vp)
310 {
311   vn_reference_t vr = (vn_reference_t) vp;
312   VEC_free (vn_reference_op_s, heap, vr->operands);
313 }
314 
315 /* Hash table equality function for vn_constant_t.  */
316 
317 static int
318 vn_constant_eq (const void *p1, const void *p2)
319 {
320   const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
321   const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
322 
323   if (vc1->hashcode != vc2->hashcode)
324     return false;
325 
326   return vn_constant_eq_with_type (vc1->constant, vc2->constant);
327 }
328 
329 /* Hash table hash function for vn_constant_t.  */
330 
331 static hashval_t
332 vn_constant_hash (const void *p1)
333 {
334   const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
335   return vc1->hashcode;
336 }
337 
338 /* Lookup a value id for CONSTANT and return it.  If it does not
339    exist returns 0.  */
340 
341 unsigned int
342 get_constant_value_id (tree constant)
343 {
344   void **slot;
345   struct vn_constant_s vc;
346 
347   vc.hashcode = vn_hash_constant_with_type (constant);
348   vc.constant = constant;
349   slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
350 				   vc.hashcode, NO_INSERT);
351   if (slot)
352     return ((vn_constant_t)*slot)->value_id;
353   return 0;
354 }
355 
356 /* Lookup a value id for CONSTANT, and if it does not exist, create a
357    new one and return it.  If it does exist, return it.  */
358 
359 unsigned int
360 get_or_alloc_constant_value_id (tree constant)
361 {
362   void **slot;
363   struct vn_constant_s vc;
364   vn_constant_t vcp;
365 
366   vc.hashcode = vn_hash_constant_with_type (constant);
367   vc.constant = constant;
368   slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
369 				   vc.hashcode, INSERT);
370   if (*slot)
371     return ((vn_constant_t)*slot)->value_id;
372 
373   vcp = XNEW (struct vn_constant_s);
374   vcp->hashcode = vc.hashcode;
375   vcp->constant = constant;
376   vcp->value_id = get_next_value_id ();
377   *slot = (void *) vcp;
378   bitmap_set_bit (constant_value_ids, vcp->value_id);
379   return vcp->value_id;
380 }
381 
382 /* Return true if V is a value id for a constant.  */
383 
384 bool
385 value_id_constant_p (unsigned int v)
386 {
387   return bitmap_bit_p (constant_value_ids, v);
388 }
389 
390 /* Compare two reference operands P1 and P2 for equality.  Return true if
391    they are equal, and false otherwise.  */
392 
393 static int
394 vn_reference_op_eq (const void *p1, const void *p2)
395 {
396   const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
397   const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
398 
399   return (vro1->opcode == vro2->opcode
400 	  /* We do not care for differences in type qualification.  */
401 	  && (vro1->type == vro2->type
402 	      || (vro1->type && vro2->type
403 		  && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
404 					 TYPE_MAIN_VARIANT (vro2->type))))
405 	  && expressions_equal_p (vro1->op0, vro2->op0)
406 	  && expressions_equal_p (vro1->op1, vro2->op1)
407 	  && expressions_equal_p (vro1->op2, vro2->op2));
408 }
409 
410 /* Compute the hash for a reference operand VRO1.  */
411 
412 static hashval_t
413 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
414 {
415   result = iterative_hash_hashval_t (vro1->opcode, result);
416   if (vro1->op0)
417     result = iterative_hash_expr (vro1->op0, result);
418   if (vro1->op1)
419     result = iterative_hash_expr (vro1->op1, result);
420   if (vro1->op2)
421     result = iterative_hash_expr (vro1->op2, result);
422   return result;
423 }
424 
425 /* Return the hashcode for a given reference operation P1.  */
426 
427 static hashval_t
428 vn_reference_hash (const void *p1)
429 {
430   const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
431   return vr1->hashcode;
432 }
433 
434 /* Compute a hash for the reference operation VR1 and return it.  */
435 
436 hashval_t
437 vn_reference_compute_hash (const vn_reference_t vr1)
438 {
439   hashval_t result = 0;
440   int i;
441   vn_reference_op_t vro;
442   HOST_WIDE_INT off = -1;
443   bool deref = false;
444 
445   FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro)
446     {
447       if (vro->opcode == MEM_REF)
448 	deref = true;
449       else if (vro->opcode != ADDR_EXPR)
450 	deref = false;
451       if (vro->off != -1)
452 	{
453 	  if (off == -1)
454 	    off = 0;
455 	  off += vro->off;
456 	}
457       else
458 	{
459 	  if (off != -1
460 	      && off != 0)
461 	    result = iterative_hash_hashval_t (off, result);
462 	  off = -1;
463 	  if (deref
464 	      && vro->opcode == ADDR_EXPR)
465 	    {
466 	      if (vro->op0)
467 		{
468 		  tree op = TREE_OPERAND (vro->op0, 0);
469 		  result = iterative_hash_hashval_t (TREE_CODE (op), result);
470 		  result = iterative_hash_expr (op, result);
471 		}
472 	    }
473 	  else
474 	    result = vn_reference_op_compute_hash (vro, result);
475 	}
476     }
477   if (vr1->vuse)
478     result += SSA_NAME_VERSION (vr1->vuse);
479 
480   return result;
481 }
482 
483 /* Return true if reference operations P1 and P2 are equivalent.  This
484    means they have the same set of operands and vuses.  */
485 
486 int
487 vn_reference_eq (const void *p1, const void *p2)
488 {
489   unsigned i, j;
490 
491   const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
492   const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
493   if (vr1->hashcode != vr2->hashcode)
494     return false;
495 
496   /* Early out if this is not a hash collision.  */
497   if (vr1->hashcode != vr2->hashcode)
498     return false;
499 
500   /* The VOP needs to be the same.  */
501   if (vr1->vuse != vr2->vuse)
502     return false;
503 
504   /* If the operands are the same we are done.  */
505   if (vr1->operands == vr2->operands)
506     return true;
507 
508   if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
509     return false;
510 
511   if (INTEGRAL_TYPE_P (vr1->type)
512       && INTEGRAL_TYPE_P (vr2->type))
513     {
514       if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
515 	return false;
516     }
517   else if (INTEGRAL_TYPE_P (vr1->type)
518 	   && (TYPE_PRECISION (vr1->type)
519 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
520     return false;
521   else if (INTEGRAL_TYPE_P (vr2->type)
522 	   && (TYPE_PRECISION (vr2->type)
523 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
524     return false;
525 
526   i = 0;
527   j = 0;
528   do
529     {
530       HOST_WIDE_INT off1 = 0, off2 = 0;
531       vn_reference_op_t vro1, vro2;
532       vn_reference_op_s tem1, tem2;
533       bool deref1 = false, deref2 = false;
534       for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
535 	{
536 	  if (vro1->opcode == MEM_REF)
537 	    deref1 = true;
538 	  if (vro1->off == -1)
539 	    break;
540 	  off1 += vro1->off;
541 	}
542       for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
543 	{
544 	  if (vro2->opcode == MEM_REF)
545 	    deref2 = true;
546 	  if (vro2->off == -1)
547 	    break;
548 	  off2 += vro2->off;
549 	}
550       if (off1 != off2)
551 	return false;
552       if (deref1 && vro1->opcode == ADDR_EXPR)
553 	{
554 	  memset (&tem1, 0, sizeof (tem1));
555 	  tem1.op0 = TREE_OPERAND (vro1->op0, 0);
556 	  tem1.type = TREE_TYPE (tem1.op0);
557 	  tem1.opcode = TREE_CODE (tem1.op0);
558 	  vro1 = &tem1;
559 	  deref1 = false;
560 	}
561       if (deref2 && vro2->opcode == ADDR_EXPR)
562 	{
563 	  memset (&tem2, 0, sizeof (tem2));
564 	  tem2.op0 = TREE_OPERAND (vro2->op0, 0);
565 	  tem2.type = TREE_TYPE (tem2.op0);
566 	  tem2.opcode = TREE_CODE (tem2.op0);
567 	  vro2 = &tem2;
568 	  deref2 = false;
569 	}
570       if (deref1 != deref2)
571 	return false;
572       if (!vn_reference_op_eq (vro1, vro2))
573 	return false;
574       ++j;
575       ++i;
576     }
577   while (VEC_length (vn_reference_op_s, vr1->operands) != i
578 	 || VEC_length (vn_reference_op_s, vr2->operands) != j);
579 
580   return true;
581 }
582 
583 /* Copy the operations present in load/store REF into RESULT, a vector of
584    vn_reference_op_s's.  */
585 
586 void
587 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
588 {
589   if (TREE_CODE (ref) == TARGET_MEM_REF)
590     {
591       vn_reference_op_s temp;
592 
593       memset (&temp, 0, sizeof (temp));
594       temp.type = TREE_TYPE (ref);
595       temp.opcode = TREE_CODE (ref);
596       temp.op0 = TMR_INDEX (ref);
597       temp.op1 = TMR_STEP (ref);
598       temp.op2 = TMR_OFFSET (ref);
599       temp.off = -1;
600       VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
601 
602       memset (&temp, 0, sizeof (temp));
603       temp.type = NULL_TREE;
604       temp.opcode = ERROR_MARK;
605       temp.op0 = TMR_INDEX2 (ref);
606       temp.off = -1;
607       VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
608 
609       memset (&temp, 0, sizeof (temp));
610       temp.type = NULL_TREE;
611       temp.opcode = TREE_CODE (TMR_BASE (ref));
612       temp.op0 = TMR_BASE (ref);
613       temp.off = -1;
614       VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
615       return;
616     }
617 
618   /* For non-calls, store the information that makes up the address.  */
619 
620   while (ref)
621     {
622       vn_reference_op_s temp;
623 
624       memset (&temp, 0, sizeof (temp));
625       temp.type = TREE_TYPE (ref);
626       temp.opcode = TREE_CODE (ref);
627       temp.off = -1;
628 
629       switch (temp.opcode)
630 	{
631 	case WITH_SIZE_EXPR:
632 	  temp.op0 = TREE_OPERAND (ref, 1);
633 	  temp.off = 0;
634 	  break;
635 	case MEM_REF:
636 	  /* The base address gets its own vn_reference_op_s structure.  */
637 	  temp.op0 = TREE_OPERAND (ref, 1);
638 	  if (host_integerp (TREE_OPERAND (ref, 1), 0))
639 	    temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
640 	  break;
641 	case BIT_FIELD_REF:
642 	  /* Record bits and position.  */
643 	  temp.op0 = TREE_OPERAND (ref, 1);
644 	  temp.op1 = TREE_OPERAND (ref, 2);
645 	  break;
646 	case COMPONENT_REF:
647 	  /* The field decl is enough to unambiguously specify the field,
648 	     a matching type is not necessary and a mismatching type
649 	     is always a spurious difference.  */
650 	  temp.type = NULL_TREE;
651 	  temp.op0 = TREE_OPERAND (ref, 1);
652 	  temp.op1 = TREE_OPERAND (ref, 2);
653 	  {
654 	    tree this_offset = component_ref_field_offset (ref);
655 	    if (this_offset
656 		&& TREE_CODE (this_offset) == INTEGER_CST)
657 	      {
658 		tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
659 		if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
660 		  {
661 		    double_int off
662 		      = double_int_add (tree_to_double_int (this_offset),
663 					double_int_rshift
664 					  (tree_to_double_int (bit_offset),
665 					   BITS_PER_UNIT == 8
666 					   ? 3 : exact_log2 (BITS_PER_UNIT),
667 					   HOST_BITS_PER_DOUBLE_INT, true));
668 		    if (double_int_fits_in_shwi_p (off))
669 		      temp.off = off.low;
670 		  }
671 	      }
672 	  }
673 	  break;
674 	case ARRAY_RANGE_REF:
675 	case ARRAY_REF:
676 	  /* Record index as operand.  */
677 	  temp.op0 = TREE_OPERAND (ref, 1);
678 	  /* Always record lower bounds and element size.  */
679 	  temp.op1 = array_ref_low_bound (ref);
680 	  temp.op2 = array_ref_element_size (ref);
681 	  if (TREE_CODE (temp.op0) == INTEGER_CST
682 	      && TREE_CODE (temp.op1) == INTEGER_CST
683 	      && TREE_CODE (temp.op2) == INTEGER_CST)
684 	    {
685 	      double_int off = tree_to_double_int (temp.op0);
686 	      off = double_int_add (off,
687 				    double_int_neg
688 				      (tree_to_double_int (temp.op1)));
689 	      off = double_int_mul (off, tree_to_double_int (temp.op2));
690 	      if (double_int_fits_in_shwi_p (off))
691 		temp.off = off.low;
692 	    }
693 	  break;
694 	case VAR_DECL:
695 	  if (DECL_HARD_REGISTER (ref))
696 	    {
697 	      temp.op0 = ref;
698 	      break;
699 	    }
700 	  /* Fallthru.  */
701 	case PARM_DECL:
702 	case CONST_DECL:
703 	case RESULT_DECL:
704 	  /* Canonicalize decls to MEM[&decl] which is what we end up with
705 	     when valueizing MEM[ptr] with ptr = &decl.  */
706 	  temp.opcode = MEM_REF;
707 	  temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
708 	  temp.off = 0;
709 	  VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
710 	  temp.opcode = ADDR_EXPR;
711 	  temp.op0 = build_fold_addr_expr (ref);
712 	  temp.type = TREE_TYPE (temp.op0);
713 	  temp.off = -1;
714 	  break;
715 	case STRING_CST:
716 	case INTEGER_CST:
717 	case COMPLEX_CST:
718 	case VECTOR_CST:
719 	case REAL_CST:
720 	case FIXED_CST:
721 	case CONSTRUCTOR:
722 	case SSA_NAME:
723 	  temp.op0 = ref;
724 	  break;
725 	case ADDR_EXPR:
726 	  if (is_gimple_min_invariant (ref))
727 	    {
728 	      temp.op0 = ref;
729 	      break;
730 	    }
731 	  /* Fallthrough.  */
732 	  /* These are only interesting for their operands, their
733 	     existence, and their type.  They will never be the last
734 	     ref in the chain of references (IE they require an
735 	     operand), so we don't have to put anything
736 	     for op* as it will be handled by the iteration  */
737 	case REALPART_EXPR:
738 	case VIEW_CONVERT_EXPR:
739 	  temp.off = 0;
740 	  break;
741 	case IMAGPART_EXPR:
742 	  /* This is only interesting for its constant offset.  */
743 	  temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
744 	  break;
745 	default:
746 	  gcc_unreachable ();
747 	}
748       VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
749 
750       if (REFERENCE_CLASS_P (ref)
751 	  || TREE_CODE (ref) == WITH_SIZE_EXPR
752 	  || (TREE_CODE (ref) == ADDR_EXPR
753 	      && !is_gimple_min_invariant (ref)))
754 	ref = TREE_OPERAND (ref, 0);
755       else
756 	ref = NULL_TREE;
757     }
758 }
759 
760 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
761    operands in *OPS, the reference alias set SET and the reference type TYPE.
762    Return true if something useful was produced.  */
763 
764 bool
765 ao_ref_init_from_vn_reference (ao_ref *ref,
766 			       alias_set_type set, tree type,
767 			       VEC (vn_reference_op_s, heap) *ops)
768 {
769   vn_reference_op_t op;
770   unsigned i;
771   tree base = NULL_TREE;
772   tree *op0_p = &base;
773   HOST_WIDE_INT offset = 0;
774   HOST_WIDE_INT max_size;
775   HOST_WIDE_INT size = -1;
776   tree size_tree = NULL_TREE;
777   alias_set_type base_alias_set = -1;
778 
779   /* First get the final access size from just the outermost expression.  */
780   op = VEC_index (vn_reference_op_s, ops, 0);
781   if (op->opcode == COMPONENT_REF)
782     size_tree = DECL_SIZE (op->op0);
783   else if (op->opcode == BIT_FIELD_REF)
784     size_tree = op->op0;
785   else
786     {
787       enum machine_mode mode = TYPE_MODE (type);
788       if (mode == BLKmode)
789 	size_tree = TYPE_SIZE (type);
790       else
791         size = GET_MODE_BITSIZE (mode);
792     }
793   if (size_tree != NULL_TREE)
794     {
795       if (!host_integerp (size_tree, 1))
796 	size = -1;
797       else
798 	size = TREE_INT_CST_LOW (size_tree);
799     }
800 
801   /* Initially, maxsize is the same as the accessed element size.
802      In the following it will only grow (or become -1).  */
803   max_size = size;
804 
805   /* Compute cumulative bit-offset for nested component-refs and array-refs,
806      and find the ultimate containing object.  */
807   FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
808     {
809       switch (op->opcode)
810 	{
811 	/* These may be in the reference ops, but we cannot do anything
812 	   sensible with them here.  */
813 	case ADDR_EXPR:
814 	  /* Apart from ADDR_EXPR arguments to MEM_REF.  */
815 	  if (base != NULL_TREE
816 	      && TREE_CODE (base) == MEM_REF
817 	      && op->op0
818 	      && DECL_P (TREE_OPERAND (op->op0, 0)))
819 	    {
820 	      vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
821 	      base = TREE_OPERAND (op->op0, 0);
822 	      if (pop->off == -1)
823 		{
824 		  max_size = -1;
825 		  offset = 0;
826 		}
827 	      else
828 		offset += pop->off * BITS_PER_UNIT;
829 	      op0_p = NULL;
830 	      break;
831 	    }
832 	  /* Fallthru.  */
833 	case CALL_EXPR:
834 	  return false;
835 
836 	/* Record the base objects.  */
837 	case MEM_REF:
838 	  base_alias_set = get_deref_alias_set (op->op0);
839 	  *op0_p = build2 (MEM_REF, op->type,
840 			   NULL_TREE, op->op0);
841 	  op0_p = &TREE_OPERAND (*op0_p, 0);
842 	  break;
843 
844 	case VAR_DECL:
845 	case PARM_DECL:
846 	case RESULT_DECL:
847 	case SSA_NAME:
848 	  *op0_p = op->op0;
849 	  op0_p = NULL;
850 	  break;
851 
852 	/* And now the usual component-reference style ops.  */
853 	case BIT_FIELD_REF:
854 	  offset += tree_low_cst (op->op1, 0);
855 	  break;
856 
857 	case COMPONENT_REF:
858 	  {
859 	    tree field = op->op0;
860 	    /* We do not have a complete COMPONENT_REF tree here so we
861 	       cannot use component_ref_field_offset.  Do the interesting
862 	       parts manually.  */
863 
864 	    if (op->op1
865 		|| !host_integerp (DECL_FIELD_OFFSET (field), 1))
866 	      max_size = -1;
867 	    else
868 	      {
869 		offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
870 			   * BITS_PER_UNIT);
871 		offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
872 	      }
873 	    break;
874 	  }
875 
876 	case ARRAY_RANGE_REF:
877 	case ARRAY_REF:
878 	  /* We recorded the lower bound and the element size.  */
879 	  if (!host_integerp (op->op0, 0)
880 	      || !host_integerp (op->op1, 0)
881 	      || !host_integerp (op->op2, 0))
882 	    max_size = -1;
883 	  else
884 	    {
885 	      HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
886 	      hindex -= TREE_INT_CST_LOW (op->op1);
887 	      hindex *= TREE_INT_CST_LOW (op->op2);
888 	      hindex *= BITS_PER_UNIT;
889 	      offset += hindex;
890 	    }
891 	  break;
892 
893 	case REALPART_EXPR:
894 	  break;
895 
896 	case IMAGPART_EXPR:
897 	  offset += size;
898 	  break;
899 
900 	case VIEW_CONVERT_EXPR:
901 	  break;
902 
903 	case STRING_CST:
904 	case INTEGER_CST:
905 	case COMPLEX_CST:
906 	case VECTOR_CST:
907 	case REAL_CST:
908 	case CONSTRUCTOR:
909 	case CONST_DECL:
910 	  return false;
911 
912 	default:
913 	  return false;
914 	}
915     }
916 
917   if (base == NULL_TREE)
918     return false;
919 
920   ref->ref = NULL_TREE;
921   ref->base = base;
922   ref->offset = offset;
923   ref->size = size;
924   ref->max_size = max_size;
925   ref->ref_alias_set = set;
926   if (base_alias_set != -1)
927     ref->base_alias_set = base_alias_set;
928   else
929     ref->base_alias_set = get_alias_set (base);
930   /* We discount volatiles from value-numbering elsewhere.  */
931   ref->volatile_p = false;
932 
933   return true;
934 }
935 
936 /* Copy the operations present in load/store/call REF into RESULT, a vector of
937    vn_reference_op_s's.  */
938 
939 void
940 copy_reference_ops_from_call (gimple call,
941 			      VEC(vn_reference_op_s, heap) **result)
942 {
943   vn_reference_op_s temp;
944   unsigned i;
945 
946   /* Copy the type, opcode, function being called and static chain.  */
947   memset (&temp, 0, sizeof (temp));
948   temp.type = gimple_call_return_type (call);
949   temp.opcode = CALL_EXPR;
950   temp.op0 = gimple_call_fn (call);
951   temp.op1 = gimple_call_chain (call);
952   temp.off = -1;
953   VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
954 
955   /* Copy the call arguments.  As they can be references as well,
956      just chain them together.  */
957   for (i = 0; i < gimple_call_num_args (call); ++i)
958     {
959       tree callarg = gimple_call_arg (call, i);
960       copy_reference_ops_from_ref (callarg, result);
961     }
962 }
963 
964 /* Create a vector of vn_reference_op_s structures from REF, a
965    REFERENCE_CLASS_P tree.  The vector is not shared. */
966 
967 static VEC(vn_reference_op_s, heap) *
968 create_reference_ops_from_ref (tree ref)
969 {
970   VEC (vn_reference_op_s, heap) *result = NULL;
971 
972   copy_reference_ops_from_ref (ref, &result);
973   return result;
974 }
975 
976 /* Create a vector of vn_reference_op_s structures from CALL, a
977    call statement.  The vector is not shared.  */
978 
979 static VEC(vn_reference_op_s, heap) *
980 create_reference_ops_from_call (gimple call)
981 {
982   VEC (vn_reference_op_s, heap) *result = NULL;
983 
984   copy_reference_ops_from_call (call, &result);
985   return result;
986 }
987 
988 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
989    *I_P to point to the last element of the replacement.  */
990 void
991 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
992 			    unsigned int *i_p)
993 {
994   unsigned int i = *i_p;
995   vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
996   vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
997   tree addr_base;
998   HOST_WIDE_INT addr_offset;
999 
1000   /* The only thing we have to do is from &OBJ.foo.bar add the offset
1001      from .foo.bar to the preceeding MEM_REF offset and replace the
1002      address with &OBJ.  */
1003   addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1004 					     &addr_offset);
1005   gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1006   if (addr_base != op->op0)
1007     {
1008       double_int off = tree_to_double_int (mem_op->op0);
1009       off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1010       off = double_int_add (off, shwi_to_double_int (addr_offset));
1011       mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1012       op->op0 = build_fold_addr_expr (addr_base);
1013       if (host_integerp (mem_op->op0, 0))
1014 	mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1015       else
1016 	mem_op->off = -1;
1017     }
1018 }
1019 
1020 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1021    *I_P to point to the last element of the replacement.  */
1022 static void
1023 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
1024 				     unsigned int *i_p)
1025 {
1026   unsigned int i = *i_p;
1027   vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
1028   vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
1029   gimple def_stmt;
1030   enum tree_code code;
1031   double_int off;
1032 
1033   def_stmt = SSA_NAME_DEF_STMT (op->op0);
1034   if (!is_gimple_assign (def_stmt))
1035     return;
1036 
1037   code = gimple_assign_rhs_code (def_stmt);
1038   if (code != ADDR_EXPR
1039       && code != POINTER_PLUS_EXPR)
1040     return;
1041 
1042   off = tree_to_double_int (mem_op->op0);
1043   off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1044 
1045   /* The only thing we have to do is from &OBJ.foo.bar add the offset
1046      from .foo.bar to the preceeding MEM_REF offset and replace the
1047      address with &OBJ.  */
1048   if (code == ADDR_EXPR)
1049     {
1050       tree addr, addr_base;
1051       HOST_WIDE_INT addr_offset;
1052 
1053       addr = gimple_assign_rhs1 (def_stmt);
1054       addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1055 						 &addr_offset);
1056       if (!addr_base
1057 	  || TREE_CODE (addr_base) != MEM_REF)
1058 	return;
1059 
1060       off = double_int_add (off, shwi_to_double_int (addr_offset));
1061       off = double_int_add (off, mem_ref_offset (addr_base));
1062       op->op0 = TREE_OPERAND (addr_base, 0);
1063     }
1064   else
1065     {
1066       tree ptr, ptroff;
1067       ptr = gimple_assign_rhs1 (def_stmt);
1068       ptroff = gimple_assign_rhs2 (def_stmt);
1069       if (TREE_CODE (ptr) != SSA_NAME
1070 	  || TREE_CODE (ptroff) != INTEGER_CST)
1071 	return;
1072 
1073       off = double_int_add (off, tree_to_double_int (ptroff));
1074       op->op0 = ptr;
1075     }
1076 
1077   mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1078   if (host_integerp (mem_op->op0, 0))
1079     mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1080   else
1081     mem_op->off = -1;
1082   if (TREE_CODE (op->op0) == SSA_NAME)
1083     op->op0 = SSA_VAL (op->op0);
1084   if (TREE_CODE (op->op0) != SSA_NAME)
1085     op->opcode = TREE_CODE (op->op0);
1086 
1087   /* And recurse.  */
1088   if (TREE_CODE (op->op0) == SSA_NAME)
1089     vn_reference_maybe_forwprop_address (ops, i_p);
1090   else if (TREE_CODE (op->op0) == ADDR_EXPR)
1091     vn_reference_fold_indirect (ops, i_p);
1092 }
1093 
1094 /* Optimize the reference REF to a constant if possible or return
1095    NULL_TREE if not.  */
1096 
1097 tree
1098 fully_constant_vn_reference_p (vn_reference_t ref)
1099 {
1100   VEC (vn_reference_op_s, heap) *operands = ref->operands;
1101   vn_reference_op_t op;
1102 
1103   /* Try to simplify the translated expression if it is
1104      a call to a builtin function with at most two arguments.  */
1105   op = VEC_index (vn_reference_op_s, operands, 0);
1106   if (op->opcode == CALL_EXPR
1107       && TREE_CODE (op->op0) == ADDR_EXPR
1108       && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1109       && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1110       && VEC_length (vn_reference_op_s, operands) >= 2
1111       && VEC_length (vn_reference_op_s, operands) <= 3)
1112     {
1113       vn_reference_op_t arg0, arg1 = NULL;
1114       bool anyconst = false;
1115       arg0 = VEC_index (vn_reference_op_s, operands, 1);
1116       if (VEC_length (vn_reference_op_s, operands) > 2)
1117 	arg1 = VEC_index (vn_reference_op_s, operands, 2);
1118       if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1119 	  || (arg0->opcode == ADDR_EXPR
1120 	      && is_gimple_min_invariant (arg0->op0)))
1121 	anyconst = true;
1122       if (arg1
1123 	  && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1124 	      || (arg1->opcode == ADDR_EXPR
1125 		  && is_gimple_min_invariant (arg1->op0))))
1126 	anyconst = true;
1127       if (anyconst)
1128 	{
1129 	  tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1130 					 arg1 ? 2 : 1,
1131 					 arg0->op0,
1132 					 arg1 ? arg1->op0 : NULL);
1133 	  if (folded
1134 	      && TREE_CODE (folded) == NOP_EXPR)
1135 	    folded = TREE_OPERAND (folded, 0);
1136 	  if (folded
1137 	      && is_gimple_min_invariant (folded))
1138 	    return folded;
1139 	}
1140     }
1141 
1142   /* Simplify reads from constant strings.  */
1143   else if (op->opcode == ARRAY_REF
1144 	   && TREE_CODE (op->op0) == INTEGER_CST
1145 	   && integer_zerop (op->op1)
1146 	   && VEC_length (vn_reference_op_s, operands) == 2)
1147     {
1148       vn_reference_op_t arg0;
1149       arg0 = VEC_index (vn_reference_op_s, operands, 1);
1150       if (arg0->opcode == STRING_CST
1151 	  && (TYPE_MODE (op->type)
1152 	      == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1153 	  && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1154 	  && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1155 	  && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1156 	return build_int_cst_type (op->type,
1157 				   (TREE_STRING_POINTER (arg0->op0)
1158 				    [TREE_INT_CST_LOW (op->op0)]));
1159     }
1160 
1161   return NULL_TREE;
1162 }
1163 
1164 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1165    structures into their value numbers.  This is done in-place, and
1166    the vector passed in is returned.  *VALUEIZED_ANYTHING will specify
1167    whether any operands were valueized.  */
1168 
1169 static VEC (vn_reference_op_s, heap) *
1170 valueize_refs_1 (VEC (vn_reference_op_s, heap) *orig, bool *valueized_anything)
1171 {
1172   vn_reference_op_t vro;
1173   unsigned int i;
1174 
1175   *valueized_anything = false;
1176 
1177   FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro)
1178     {
1179       if (vro->opcode == SSA_NAME
1180 	  || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1181 	{
1182 	  tree tem = SSA_VAL (vro->op0);
1183 	  if (tem != vro->op0)
1184 	    {
1185 	      *valueized_anything = true;
1186 	      vro->op0 = tem;
1187 	    }
1188 	  /* If it transforms from an SSA_NAME to a constant, update
1189 	     the opcode.  */
1190 	  if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1191 	    vro->opcode = TREE_CODE (vro->op0);
1192 	}
1193       if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1194 	{
1195 	  tree tem = SSA_VAL (vro->op1);
1196 	  if (tem != vro->op1)
1197 	    {
1198 	      *valueized_anything = true;
1199 	      vro->op1 = tem;
1200 	    }
1201 	}
1202       if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1203 	{
1204 	  tree tem = SSA_VAL (vro->op2);
1205 	  if (tem != vro->op2)
1206 	    {
1207 	      *valueized_anything = true;
1208 	      vro->op2 = tem;
1209 	    }
1210 	}
1211       /* If it transforms from an SSA_NAME to an address, fold with
1212 	 a preceding indirect reference.  */
1213       if (i > 0
1214 	  && vro->op0
1215 	  && TREE_CODE (vro->op0) == ADDR_EXPR
1216 	  && VEC_index (vn_reference_op_s,
1217 			orig, i - 1)->opcode == MEM_REF)
1218 	vn_reference_fold_indirect (&orig, &i);
1219       else if (i > 0
1220 	       && vro->opcode == SSA_NAME
1221 	       && VEC_index (vn_reference_op_s,
1222 			     orig, i - 1)->opcode == MEM_REF)
1223 	vn_reference_maybe_forwprop_address (&orig, &i);
1224       /* If it transforms a non-constant ARRAY_REF into a constant
1225 	 one, adjust the constant offset.  */
1226       else if (vro->opcode == ARRAY_REF
1227 	       && vro->off == -1
1228 	       && TREE_CODE (vro->op0) == INTEGER_CST
1229 	       && TREE_CODE (vro->op1) == INTEGER_CST
1230 	       && TREE_CODE (vro->op2) == INTEGER_CST)
1231 	{
1232 	  double_int off = tree_to_double_int (vro->op0);
1233 	  off = double_int_add (off,
1234 				double_int_neg
1235 				  (tree_to_double_int (vro->op1)));
1236 	  off = double_int_mul (off, tree_to_double_int (vro->op2));
1237 	  if (double_int_fits_in_shwi_p (off))
1238 	    vro->off = off.low;
1239 	}
1240     }
1241 
1242   return orig;
1243 }
1244 
1245 static VEC (vn_reference_op_s, heap) *
1246 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1247 {
1248   bool tem;
1249   return valueize_refs_1 (orig, &tem);
1250 }
1251 
1252 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1253 
1254 /* Create a vector of vn_reference_op_s structures from REF, a
1255    REFERENCE_CLASS_P tree.  The vector is shared among all callers of
1256    this function.  *VALUEIZED_ANYTHING will specify whether any
1257    operands were valueized.  */
1258 
1259 static VEC(vn_reference_op_s, heap) *
1260 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1261 {
1262   if (!ref)
1263     return NULL;
1264   VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1265   copy_reference_ops_from_ref (ref, &shared_lookup_references);
1266   shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1267 					      valueized_anything);
1268   return shared_lookup_references;
1269 }
1270 
1271 /* Create a vector of vn_reference_op_s structures from CALL, a
1272    call statement.  The vector is shared among all callers of
1273    this function.  */
1274 
1275 static VEC(vn_reference_op_s, heap) *
1276 valueize_shared_reference_ops_from_call (gimple call)
1277 {
1278   if (!call)
1279     return NULL;
1280   VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1281   copy_reference_ops_from_call (call, &shared_lookup_references);
1282   shared_lookup_references = valueize_refs (shared_lookup_references);
1283   return shared_lookup_references;
1284 }
1285 
1286 /* Lookup a SCCVN reference operation VR in the current hash table.
1287    Returns the resulting value number if it exists in the hash table,
1288    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
1289    vn_reference_t stored in the hashtable if something is found.  */
1290 
1291 static tree
1292 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1293 {
1294   void **slot;
1295   hashval_t hash;
1296 
1297   hash = vr->hashcode;
1298   slot = htab_find_slot_with_hash (current_info->references, vr,
1299 				   hash, NO_INSERT);
1300   if (!slot && current_info == optimistic_info)
1301     slot = htab_find_slot_with_hash (valid_info->references, vr,
1302 				     hash, NO_INSERT);
1303   if (slot)
1304     {
1305       if (vnresult)
1306 	*vnresult = (vn_reference_t)*slot;
1307       return ((vn_reference_t)*slot)->result;
1308     }
1309 
1310   return NULL_TREE;
1311 }
1312 
1313 static tree *last_vuse_ptr;
1314 static vn_lookup_kind vn_walk_kind;
1315 static vn_lookup_kind default_vn_walk_kind;
1316 
1317 /* Callback for walk_non_aliased_vuses.  Adjusts the vn_reference_t VR_
1318    with the current VUSE and performs the expression lookup.  */
1319 
1320 static void *
1321 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1322 {
1323   vn_reference_t vr = (vn_reference_t)vr_;
1324   void **slot;
1325   hashval_t hash;
1326 
1327   if (last_vuse_ptr)
1328     *last_vuse_ptr = vuse;
1329 
1330   /* Fixup vuse and hash.  */
1331   if (vr->vuse)
1332     vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1333   vr->vuse = SSA_VAL (vuse);
1334   if (vr->vuse)
1335     vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1336 
1337   hash = vr->hashcode;
1338   slot = htab_find_slot_with_hash (current_info->references, vr,
1339 				   hash, NO_INSERT);
1340   if (!slot && current_info == optimistic_info)
1341     slot = htab_find_slot_with_hash (valid_info->references, vr,
1342 				     hash, NO_INSERT);
1343   if (slot)
1344     return *slot;
1345 
1346   return NULL;
1347 }
1348 
1349 /* Lookup an existing or insert a new vn_reference entry into the
1350    value table for the VUSE, SET, TYPE, OPERANDS reference which
1351    has the value VALUE which is either a constant or an SSA name.  */
1352 
1353 static vn_reference_t
1354 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1355 					  alias_set_type set,
1356 					  tree type,
1357 					  VEC (vn_reference_op_s,
1358 					       heap) *operands,
1359 					  tree value)
1360 {
1361   struct vn_reference_s vr1;
1362   vn_reference_t result;
1363   unsigned value_id;
1364   vr1.vuse = vuse;
1365   vr1.operands = operands;
1366   vr1.type = type;
1367   vr1.set = set;
1368   vr1.hashcode = vn_reference_compute_hash (&vr1);
1369   if (vn_reference_lookup_1 (&vr1, &result))
1370     return result;
1371   if (TREE_CODE (value) == SSA_NAME)
1372     value_id = VN_INFO (value)->value_id;
1373   else
1374     value_id = get_or_alloc_constant_value_id (value);
1375   return vn_reference_insert_pieces (vuse, set, type,
1376 				     VEC_copy (vn_reference_op_s, heap,
1377 					       operands), value, value_id);
1378 }
1379 
1380 /* Callback for walk_non_aliased_vuses.  Tries to perform a lookup
1381    from the statement defining VUSE and if not successful tries to
1382    translate *REFP and VR_ through an aggregate copy at the defintion
1383    of VUSE.  */
1384 
1385 static void *
1386 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1387 {
1388   vn_reference_t vr = (vn_reference_t)vr_;
1389   gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1390   tree base;
1391   HOST_WIDE_INT offset, maxsize;
1392   static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
1393   ao_ref lhs_ref;
1394   bool lhs_ref_ok = false;
1395 
1396   /* First try to disambiguate after value-replacing in the definitions LHS.  */
1397   if (is_gimple_assign (def_stmt))
1398     {
1399       VEC (vn_reference_op_s, heap) *tem;
1400       tree lhs = gimple_assign_lhs (def_stmt);
1401       bool valueized_anything = false;
1402       /* Avoid re-allocation overhead.  */
1403       VEC_truncate (vn_reference_op_s, lhs_ops, 0);
1404       copy_reference_ops_from_ref (lhs, &lhs_ops);
1405       tem = lhs_ops;
1406       lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1407       gcc_assert (lhs_ops == tem);
1408       if (valueized_anything)
1409 	{
1410 	  lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1411 						      get_alias_set (lhs),
1412 						      TREE_TYPE (lhs), lhs_ops);
1413 	  if (lhs_ref_ok
1414 	      && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1415 	    return NULL;
1416 	}
1417       else
1418 	{
1419 	  ao_ref_init (&lhs_ref, lhs);
1420 	  lhs_ref_ok = true;
1421 	}
1422     }
1423 
1424   base = ao_ref_base (ref);
1425   offset = ref->offset;
1426   maxsize = ref->max_size;
1427 
1428   /* If we cannot constrain the size of the reference we cannot
1429      test if anything kills it.  */
1430   if (maxsize == -1)
1431     return (void *)-1;
1432 
1433   /* We can't deduce anything useful from clobbers.  */
1434   if (gimple_clobber_p (def_stmt))
1435     return (void *)-1;
1436 
1437   /* def_stmt may-defs *ref.  See if we can derive a value for *ref
1438      from that definition.
1439      1) Memset.  */
1440   if (is_gimple_reg_type (vr->type)
1441       && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1442       && integer_zerop (gimple_call_arg (def_stmt, 1))
1443       && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1444       && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1445     {
1446       tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1447       tree base2;
1448       HOST_WIDE_INT offset2, size2, maxsize2;
1449       base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1450       size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1451       if ((unsigned HOST_WIDE_INT)size2 / 8
1452 	  == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1453 	  && maxsize2 != -1
1454 	  && operand_equal_p (base, base2, 0)
1455 	  && offset2 <= offset
1456 	  && offset2 + size2 >= offset + maxsize)
1457 	{
1458 	  tree val = build_zero_cst (vr->type);
1459 	  return vn_reference_lookup_or_insert_for_pieces
1460 	           (vuse, vr->set, vr->type, vr->operands, val);
1461 	}
1462     }
1463 
1464   /* 2) Assignment from an empty CONSTRUCTOR.  */
1465   else if (is_gimple_reg_type (vr->type)
1466 	   && gimple_assign_single_p (def_stmt)
1467 	   && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1468 	   && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1469     {
1470       tree base2;
1471       HOST_WIDE_INT offset2, size2, maxsize2;
1472       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1473 				       &offset2, &size2, &maxsize2);
1474       if (maxsize2 != -1
1475 	  && operand_equal_p (base, base2, 0)
1476 	  && offset2 <= offset
1477 	  && offset2 + size2 >= offset + maxsize)
1478 	{
1479 	  tree val = build_zero_cst (vr->type);
1480 	  return vn_reference_lookup_or_insert_for_pieces
1481 	           (vuse, vr->set, vr->type, vr->operands, val);
1482 	}
1483     }
1484 
1485   /* 3) Assignment from a constant.  We can use folds native encode/interpret
1486      routines to extract the assigned bits.  */
1487   else if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
1488 	   && ref->size == maxsize
1489 	   && maxsize % BITS_PER_UNIT == 0
1490 	   && offset % BITS_PER_UNIT == 0
1491 	   && is_gimple_reg_type (vr->type)
1492 	   && gimple_assign_single_p (def_stmt)
1493 	   && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1494     {
1495       tree base2;
1496       HOST_WIDE_INT offset2, size2, maxsize2;
1497       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1498 				       &offset2, &size2, &maxsize2);
1499       if (maxsize2 != -1
1500 	  && maxsize2 == size2
1501 	  && size2 % BITS_PER_UNIT == 0
1502 	  && offset2 % BITS_PER_UNIT == 0
1503 	  && operand_equal_p (base, base2, 0)
1504 	  && offset2 <= offset
1505 	  && offset2 + size2 >= offset + maxsize)
1506 	{
1507 	  /* We support up to 512-bit values (for V8DFmode).  */
1508 	  unsigned char buffer[64];
1509 	  int len;
1510 
1511 	  len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1512 				    buffer, sizeof (buffer));
1513 	  if (len > 0)
1514 	    {
1515 	      tree val = native_interpret_expr (vr->type,
1516 						buffer
1517 						+ ((offset - offset2)
1518 						   / BITS_PER_UNIT),
1519 						ref->size / BITS_PER_UNIT);
1520 	      if (val)
1521 		return vn_reference_lookup_or_insert_for_pieces
1522 		         (vuse, vr->set, vr->type, vr->operands, val);
1523 	    }
1524 	}
1525     }
1526 
1527   /* 4) Assignment from an SSA name which definition we may be able
1528      to access pieces from.  */
1529   else if (ref->size == maxsize
1530 	   && is_gimple_reg_type (vr->type)
1531 	   && gimple_assign_single_p (def_stmt)
1532 	   && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1533     {
1534       tree rhs1 = gimple_assign_rhs1 (def_stmt);
1535       gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1536       if (is_gimple_assign (def_stmt2)
1537 	  && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1538 	      || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1539 	  && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1540 	{
1541 	  tree base2;
1542 	  HOST_WIDE_INT offset2, size2, maxsize2, off;
1543 	  base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1544 					   &offset2, &size2, &maxsize2);
1545 	  off = offset - offset2;
1546 	  if (maxsize2 != -1
1547 	      && maxsize2 == size2
1548 	      && operand_equal_p (base, base2, 0)
1549 	      && offset2 <= offset
1550 	      && offset2 + size2 >= offset + maxsize)
1551 	    {
1552 	      tree val = NULL_TREE;
1553 	      HOST_WIDE_INT elsz
1554 		= TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1555 	      if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1556 		{
1557 		  if (off == 0)
1558 		    val = gimple_assign_rhs1 (def_stmt2);
1559 		  else if (off == elsz)
1560 		    val = gimple_assign_rhs2 (def_stmt2);
1561 		}
1562 	      else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1563 		       && off % elsz == 0)
1564 		{
1565 		  tree ctor = gimple_assign_rhs1 (def_stmt2);
1566 		  unsigned i = off / elsz;
1567 		  if (i < CONSTRUCTOR_NELTS (ctor))
1568 		    {
1569 		      constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1570 		      if (compare_tree_int (elt->index, i) == 0)
1571 			val = elt->value;
1572 		    }
1573 		}
1574 	      if (val)
1575 		return vn_reference_lookup_or_insert_for_pieces
1576 		         (vuse, vr->set, vr->type, vr->operands, val);
1577 	    }
1578 	}
1579     }
1580 
1581   /* 5) For aggregate copies translate the reference through them if
1582      the copy kills ref.  */
1583   else if (vn_walk_kind == VN_WALKREWRITE
1584 	   && gimple_assign_single_p (def_stmt)
1585 	   && (DECL_P (gimple_assign_rhs1 (def_stmt))
1586 	       || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1587 	       || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1588     {
1589       tree base2;
1590       HOST_WIDE_INT offset2, size2, maxsize2;
1591       int i, j;
1592       VEC (vn_reference_op_s, heap) *rhs = NULL;
1593       vn_reference_op_t vro;
1594       ao_ref r;
1595 
1596       if (!lhs_ref_ok)
1597 	return (void *)-1;
1598 
1599       /* See if the assignment kills REF.  */
1600       base2 = ao_ref_base (&lhs_ref);
1601       offset2 = lhs_ref.offset;
1602       size2 = lhs_ref.size;
1603       maxsize2 = lhs_ref.max_size;
1604       if (maxsize2 == -1
1605 	  || (base != base2 && !operand_equal_p (base, base2, 0))
1606 	  || offset2 > offset
1607 	  || offset2 + size2 < offset + maxsize)
1608 	return (void *)-1;
1609 
1610       /* Find the common base of ref and the lhs.  lhs_ops already
1611          contains valueized operands for the lhs.  */
1612       i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1613       j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
1614       while (j >= 0 && i >= 0
1615 	     && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1616 					       vr->operands, i),
1617 				    VEC_index (vn_reference_op_s, lhs_ops, j)))
1618 	{
1619 	  i--;
1620 	  j--;
1621 	}
1622 
1623       /* ???  The innermost op should always be a MEM_REF and we already
1624          checked that the assignment to the lhs kills vr.  Thus for
1625 	 aggregate copies using char[] types the vn_reference_op_eq
1626 	 may fail when comparing types for compatibility.  But we really
1627 	 don't care here - further lookups with the rewritten operands
1628 	 will simply fail if we messed up types too badly.  */
1629       if (j == 0 && i >= 0
1630 	  && VEC_index (vn_reference_op_s, lhs_ops, 0)->opcode == MEM_REF
1631 	  && VEC_index (vn_reference_op_s, lhs_ops, 0)->off != -1
1632 	  && (VEC_index (vn_reference_op_s, lhs_ops, 0)->off
1633 	      == VEC_index (vn_reference_op_s, vr->operands, i)->off))
1634 	i--, j--;
1635 
1636       /* i now points to the first additional op.
1637 	 ???  LHS may not be completely contained in VR, one or more
1638 	 VIEW_CONVERT_EXPRs could be in its way.  We could at least
1639 	 try handling outermost VIEW_CONVERT_EXPRs.  */
1640       if (j != -1)
1641 	return (void *)-1;
1642 
1643       /* Now re-write REF to be based on the rhs of the assignment.  */
1644       copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1645       /* We need to pre-pend vr->operands[0..i] to rhs.  */
1646       if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1647 	  > VEC_length (vn_reference_op_s, vr->operands))
1648 	{
1649 	  VEC (vn_reference_op_s, heap) *old = vr->operands;
1650 	  VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1651 			 i + 1 + VEC_length (vn_reference_op_s, rhs));
1652 	  if (old == shared_lookup_references
1653 	      && vr->operands != old)
1654 	    shared_lookup_references = NULL;
1655 	}
1656       else
1657 	VEC_truncate (vn_reference_op_s, vr->operands,
1658 		      i + 1 + VEC_length (vn_reference_op_s, rhs));
1659       FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
1660 	VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1661       VEC_free (vn_reference_op_s, heap, rhs);
1662       vr->operands = valueize_refs (vr->operands);
1663       vr->hashcode = vn_reference_compute_hash (vr);
1664 
1665       /* Adjust *ref from the new operands.  */
1666       if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1667 	return (void *)-1;
1668       /* This can happen with bitfields.  */
1669       if (ref->size != r.size)
1670 	return (void *)-1;
1671       *ref = r;
1672 
1673       /* Do not update last seen VUSE after translating.  */
1674       last_vuse_ptr = NULL;
1675 
1676       /* Keep looking for the adjusted *REF / VR pair.  */
1677       return NULL;
1678     }
1679 
1680   /* 6) For memcpy copies translate the reference through them if
1681      the copy kills ref.  */
1682   else if (vn_walk_kind == VN_WALKREWRITE
1683 	   && is_gimple_reg_type (vr->type)
1684 	   /* ???  Handle BCOPY as well.  */
1685 	   && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1686 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1687 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1688 	   && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1689 	       || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1690 	   && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1691 	       || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1692 	   && host_integerp (gimple_call_arg (def_stmt, 2), 1))
1693     {
1694       tree lhs, rhs;
1695       ao_ref r;
1696       HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1697       vn_reference_op_s op;
1698       HOST_WIDE_INT at;
1699 
1700 
1701       /* Only handle non-variable, addressable refs.  */
1702       if (ref->size != maxsize
1703 	  || offset % BITS_PER_UNIT != 0
1704 	  || ref->size % BITS_PER_UNIT != 0)
1705 	return (void *)-1;
1706 
1707       /* Extract a pointer base and an offset for the destination.  */
1708       lhs = gimple_call_arg (def_stmt, 0);
1709       lhs_offset = 0;
1710       if (TREE_CODE (lhs) == SSA_NAME)
1711 	lhs = SSA_VAL (lhs);
1712       if (TREE_CODE (lhs) == ADDR_EXPR)
1713 	{
1714 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1715 						    &lhs_offset);
1716 	  if (!tem)
1717 	    return (void *)-1;
1718 	  if (TREE_CODE (tem) == MEM_REF
1719 	      && host_integerp (TREE_OPERAND (tem, 1), 1))
1720 	    {
1721 	      lhs = TREE_OPERAND (tem, 0);
1722 	      lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1723 	    }
1724 	  else if (DECL_P (tem))
1725 	    lhs = build_fold_addr_expr (tem);
1726 	  else
1727 	    return (void *)-1;
1728 	}
1729       if (TREE_CODE (lhs) != SSA_NAME
1730 	  && TREE_CODE (lhs) != ADDR_EXPR)
1731 	return (void *)-1;
1732 
1733       /* Extract a pointer base and an offset for the source.  */
1734       rhs = gimple_call_arg (def_stmt, 1);
1735       rhs_offset = 0;
1736       if (TREE_CODE (rhs) == SSA_NAME)
1737 	rhs = SSA_VAL (rhs);
1738       if (TREE_CODE (rhs) == ADDR_EXPR)
1739 	{
1740 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1741 						    &rhs_offset);
1742 	  if (!tem)
1743 	    return (void *)-1;
1744 	  if (TREE_CODE (tem) == MEM_REF
1745 	      && host_integerp (TREE_OPERAND (tem, 1), 1))
1746 	    {
1747 	      rhs = TREE_OPERAND (tem, 0);
1748 	      rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1749 	    }
1750 	  else if (DECL_P (tem))
1751 	    rhs = build_fold_addr_expr (tem);
1752 	  else
1753 	    return (void *)-1;
1754 	}
1755       if (TREE_CODE (rhs) != SSA_NAME
1756 	  && TREE_CODE (rhs) != ADDR_EXPR)
1757 	return (void *)-1;
1758 
1759       copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1760 
1761       /* The bases of the destination and the references have to agree.  */
1762       if ((TREE_CODE (base) != MEM_REF
1763 	   && !DECL_P (base))
1764 	  || (TREE_CODE (base) == MEM_REF
1765 	      && (TREE_OPERAND (base, 0) != lhs
1766 		  || !host_integerp (TREE_OPERAND (base, 1), 1)))
1767 	  || (DECL_P (base)
1768 	      && (TREE_CODE (lhs) != ADDR_EXPR
1769 		  || TREE_OPERAND (lhs, 0) != base)))
1770 	return (void *)-1;
1771 
1772       /* And the access has to be contained within the memcpy destination.  */
1773       at = offset / BITS_PER_UNIT;
1774       if (TREE_CODE (base) == MEM_REF)
1775 	at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1776       if (lhs_offset > at
1777 	  || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1778 	return (void *)-1;
1779 
1780       /* Make room for 2 operands in the new reference.  */
1781       if (VEC_length (vn_reference_op_s, vr->operands) < 2)
1782 	{
1783 	  VEC (vn_reference_op_s, heap) *old = vr->operands;
1784 	  VEC_safe_grow (vn_reference_op_s, heap, vr->operands, 2);
1785 	  if (old == shared_lookup_references
1786 	      && vr->operands != old)
1787 	    shared_lookup_references = NULL;
1788 	}
1789       else
1790 	VEC_truncate (vn_reference_op_s, vr->operands, 2);
1791 
1792       /* The looked-through reference is a simple MEM_REF.  */
1793       memset (&op, 0, sizeof (op));
1794       op.type = vr->type;
1795       op.opcode = MEM_REF;
1796       op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1797       op.off = at - lhs_offset + rhs_offset;
1798       VEC_replace (vn_reference_op_s, vr->operands, 0, &op);
1799       op.type = TREE_TYPE (rhs);
1800       op.opcode = TREE_CODE (rhs);
1801       op.op0 = rhs;
1802       op.off = -1;
1803       VEC_replace (vn_reference_op_s, vr->operands, 1, &op);
1804       vr->hashcode = vn_reference_compute_hash (vr);
1805 
1806       /* Adjust *ref from the new operands.  */
1807       if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1808 	return (void *)-1;
1809       /* This can happen with bitfields.  */
1810       if (ref->size != r.size)
1811 	return (void *)-1;
1812       *ref = r;
1813 
1814       /* Do not update last seen VUSE after translating.  */
1815       last_vuse_ptr = NULL;
1816 
1817       /* Keep looking for the adjusted *REF / VR pair.  */
1818       return NULL;
1819     }
1820 
1821   /* Bail out and stop walking.  */
1822   return (void *)-1;
1823 }
1824 
1825 /* Lookup a reference operation by it's parts, in the current hash table.
1826    Returns the resulting value number if it exists in the hash table,
1827    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
1828    vn_reference_t stored in the hashtable if something is found.  */
1829 
1830 tree
1831 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1832 			    VEC (vn_reference_op_s, heap) *operands,
1833 			    vn_reference_t *vnresult, vn_lookup_kind kind)
1834 {
1835   struct vn_reference_s vr1;
1836   vn_reference_t tmp;
1837   tree cst;
1838 
1839   if (!vnresult)
1840     vnresult = &tmp;
1841   *vnresult = NULL;
1842 
1843   vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1844   VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1845   VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1846 		 VEC_length (vn_reference_op_s, operands));
1847   memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1848 	  VEC_address (vn_reference_op_s, operands),
1849 	  sizeof (vn_reference_op_s)
1850 	  * VEC_length (vn_reference_op_s, operands));
1851   vr1.operands = operands = shared_lookup_references
1852     = valueize_refs (shared_lookup_references);
1853   vr1.type = type;
1854   vr1.set = set;
1855   vr1.hashcode = vn_reference_compute_hash (&vr1);
1856   if ((cst = fully_constant_vn_reference_p (&vr1)))
1857     return cst;
1858 
1859   vn_reference_lookup_1 (&vr1, vnresult);
1860   if (!*vnresult
1861       && kind != VN_NOWALK
1862       && vr1.vuse)
1863     {
1864       ao_ref r;
1865       vn_walk_kind = kind;
1866       if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1867 	*vnresult =
1868 	  (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1869 						  vn_reference_lookup_2,
1870 						  vn_reference_lookup_3, &vr1);
1871       if (vr1.operands != operands)
1872 	VEC_free (vn_reference_op_s, heap, vr1.operands);
1873     }
1874 
1875   if (*vnresult)
1876      return (*vnresult)->result;
1877 
1878   return NULL_TREE;
1879 }
1880 
1881 /* Lookup OP in the current hash table, and return the resulting value
1882    number if it exists in the hash table.  Return NULL_TREE if it does
1883    not exist in the hash table or if the result field of the structure
1884    was NULL..  VNRESULT will be filled in with the vn_reference_t
1885    stored in the hashtable if one exists.  */
1886 
1887 tree
1888 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1889 		     vn_reference_t *vnresult)
1890 {
1891   VEC (vn_reference_op_s, heap) *operands;
1892   struct vn_reference_s vr1;
1893   tree cst;
1894   bool valuezied_anything;
1895 
1896   if (vnresult)
1897     *vnresult = NULL;
1898 
1899   vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1900   vr1.operands = operands
1901     = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
1902   vr1.type = TREE_TYPE (op);
1903   vr1.set = get_alias_set (op);
1904   vr1.hashcode = vn_reference_compute_hash (&vr1);
1905   if ((cst = fully_constant_vn_reference_p (&vr1)))
1906     return cst;
1907 
1908   if (kind != VN_NOWALK
1909       && vr1.vuse)
1910     {
1911       vn_reference_t wvnresult;
1912       ao_ref r;
1913       /* Make sure to use a valueized reference if we valueized anything.
1914          Otherwise preserve the full reference for advanced TBAA.  */
1915       if (!valuezied_anything
1916 	  || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
1917 					     vr1.operands))
1918 	ao_ref_init (&r, op);
1919       vn_walk_kind = kind;
1920       wvnresult =
1921 	(vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1922 						vn_reference_lookup_2,
1923 						vn_reference_lookup_3, &vr1);
1924       if (vr1.operands != operands)
1925 	VEC_free (vn_reference_op_s, heap, vr1.operands);
1926       if (wvnresult)
1927 	{
1928 	  if (vnresult)
1929 	    *vnresult = wvnresult;
1930 	  return wvnresult->result;
1931 	}
1932 
1933       return NULL_TREE;
1934     }
1935 
1936   return vn_reference_lookup_1 (&vr1, vnresult);
1937 }
1938 
1939 
1940 /* Insert OP into the current hash table with a value number of
1941    RESULT, and return the resulting reference structure we created.  */
1942 
1943 vn_reference_t
1944 vn_reference_insert (tree op, tree result, tree vuse)
1945 {
1946   void **slot;
1947   vn_reference_t vr1;
1948 
1949   vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1950   if (TREE_CODE (result) == SSA_NAME)
1951     vr1->value_id = VN_INFO (result)->value_id;
1952   else
1953     vr1->value_id = get_or_alloc_constant_value_id (result);
1954   vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1955   vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1956   vr1->type = TREE_TYPE (op);
1957   vr1->set = get_alias_set (op);
1958   vr1->hashcode = vn_reference_compute_hash (vr1);
1959   vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1960 
1961   slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1962 				   INSERT);
1963 
1964   /* Because we lookup stores using vuses, and value number failures
1965      using the vdefs (see visit_reference_op_store for how and why),
1966      it's possible that on failure we may try to insert an already
1967      inserted store.  This is not wrong, there is no ssa name for a
1968      store that we could use as a differentiator anyway.  Thus, unlike
1969      the other lookup functions, you cannot gcc_assert (!*slot)
1970      here.  */
1971 
1972   /* But free the old slot in case of a collision.  */
1973   if (*slot)
1974     free_reference (*slot);
1975 
1976   *slot = vr1;
1977   return vr1;
1978 }
1979 
1980 /* Insert a reference by it's pieces into the current hash table with
1981    a value number of RESULT.  Return the resulting reference
1982    structure we created.  */
1983 
1984 vn_reference_t
1985 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1986 			    VEC (vn_reference_op_s, heap) *operands,
1987 			    tree result, unsigned int value_id)
1988 
1989 {
1990   void **slot;
1991   vn_reference_t vr1;
1992 
1993   vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1994   vr1->value_id = value_id;
1995   vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1996   vr1->operands = valueize_refs (operands);
1997   vr1->type = type;
1998   vr1->set = set;
1999   vr1->hashcode = vn_reference_compute_hash (vr1);
2000   if (result && TREE_CODE (result) == SSA_NAME)
2001     result = SSA_VAL (result);
2002   vr1->result = result;
2003 
2004   slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
2005 				   INSERT);
2006 
2007   /* At this point we should have all the things inserted that we have
2008      seen before, and we should never try inserting something that
2009      already exists.  */
2010   gcc_assert (!*slot);
2011   if (*slot)
2012     free_reference (*slot);
2013 
2014   *slot = vr1;
2015   return vr1;
2016 }
2017 
2018 /* Compute and return the hash value for nary operation VBO1.  */
2019 
2020 hashval_t
2021 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2022 {
2023   hashval_t hash;
2024   unsigned i;
2025 
2026   for (i = 0; i < vno1->length; ++i)
2027     if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2028       vno1->op[i] = SSA_VAL (vno1->op[i]);
2029 
2030   if (vno1->length == 2
2031       && commutative_tree_code (vno1->opcode)
2032       && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2033     {
2034       tree temp = vno1->op[0];
2035       vno1->op[0] = vno1->op[1];
2036       vno1->op[1] = temp;
2037     }
2038 
2039   hash = iterative_hash_hashval_t (vno1->opcode, 0);
2040   for (i = 0; i < vno1->length; ++i)
2041     hash = iterative_hash_expr (vno1->op[i], hash);
2042 
2043   return hash;
2044 }
2045 
2046 /* Return the computed hashcode for nary operation P1.  */
2047 
2048 static hashval_t
2049 vn_nary_op_hash (const void *p1)
2050 {
2051   const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2052   return vno1->hashcode;
2053 }
2054 
2055 /* Compare nary operations P1 and P2 and return true if they are
2056    equivalent.  */
2057 
2058 int
2059 vn_nary_op_eq (const void *p1, const void *p2)
2060 {
2061   const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2062   const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
2063   unsigned i;
2064 
2065   if (vno1->hashcode != vno2->hashcode)
2066     return false;
2067 
2068   if (vno1->length != vno2->length)
2069     return false;
2070 
2071   if (vno1->opcode != vno2->opcode
2072       || !types_compatible_p (vno1->type, vno2->type))
2073     return false;
2074 
2075   for (i = 0; i < vno1->length; ++i)
2076     if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2077       return false;
2078 
2079   return true;
2080 }
2081 
2082 /* Initialize VNO from the pieces provided.  */
2083 
2084 static void
2085 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2086 			     enum tree_code code, tree type, tree *ops)
2087 {
2088   vno->opcode = code;
2089   vno->length = length;
2090   vno->type = type;
2091   memcpy (&vno->op[0], ops, sizeof (tree) * length);
2092 }
2093 
2094 /* Initialize VNO from OP.  */
2095 
2096 static void
2097 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2098 {
2099   unsigned i;
2100 
2101   vno->opcode = TREE_CODE (op);
2102   vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2103   vno->type = TREE_TYPE (op);
2104   for (i = 0; i < vno->length; ++i)
2105     vno->op[i] = TREE_OPERAND (op, i);
2106 }
2107 
2108 /* Return the number of operands for a vn_nary ops structure from STMT.  */
2109 
2110 static unsigned int
2111 vn_nary_length_from_stmt (gimple stmt)
2112 {
2113   switch (gimple_assign_rhs_code (stmt))
2114     {
2115     case REALPART_EXPR:
2116     case IMAGPART_EXPR:
2117     case VIEW_CONVERT_EXPR:
2118       return 1;
2119 
2120     case CONSTRUCTOR:
2121       return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2122 
2123     default:
2124       return gimple_num_ops (stmt) - 1;
2125     }
2126 }
2127 
2128 /* Initialize VNO from STMT.  */
2129 
2130 static void
2131 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2132 {
2133   unsigned i;
2134 
2135   vno->opcode = gimple_assign_rhs_code (stmt);
2136   vno->type = gimple_expr_type (stmt);
2137   switch (vno->opcode)
2138     {
2139     case REALPART_EXPR:
2140     case IMAGPART_EXPR:
2141     case VIEW_CONVERT_EXPR:
2142       vno->length = 1;
2143       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2144       break;
2145 
2146     case CONSTRUCTOR:
2147       vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2148       for (i = 0; i < vno->length; ++i)
2149 	vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2150       break;
2151 
2152     default:
2153       vno->length = gimple_num_ops (stmt) - 1;
2154       for (i = 0; i < vno->length; ++i)
2155 	vno->op[i] = gimple_op (stmt, i + 1);
2156     }
2157 }
2158 
2159 /* Compute the hashcode for VNO and look for it in the hash table;
2160    return the resulting value number if it exists in the hash table.
2161    Return NULL_TREE if it does not exist in the hash table or if the
2162    result field of the operation is NULL.  VNRESULT will contain the
2163    vn_nary_op_t from the hashtable if it exists.  */
2164 
2165 static tree
2166 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2167 {
2168   void **slot;
2169 
2170   if (vnresult)
2171     *vnresult = NULL;
2172 
2173   vno->hashcode = vn_nary_op_compute_hash (vno);
2174   slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
2175 				   NO_INSERT);
2176   if (!slot && current_info == optimistic_info)
2177     slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
2178 				     NO_INSERT);
2179   if (!slot)
2180     return NULL_TREE;
2181   if (vnresult)
2182     *vnresult = (vn_nary_op_t)*slot;
2183   return ((vn_nary_op_t)*slot)->result;
2184 }
2185 
2186 /* Lookup a n-ary operation by its pieces and return the resulting value
2187    number if it exists in the hash table.  Return NULL_TREE if it does
2188    not exist in the hash table or if the result field of the operation
2189    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2190    if it exists.  */
2191 
2192 tree
2193 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2194 			  tree type, tree *ops, vn_nary_op_t *vnresult)
2195 {
2196   vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2197 				  sizeof_vn_nary_op (length));
2198   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2199   return vn_nary_op_lookup_1 (vno1, vnresult);
2200 }
2201 
2202 /* Lookup OP in the current hash table, and return the resulting value
2203    number if it exists in the hash table.  Return NULL_TREE if it does
2204    not exist in the hash table or if the result field of the operation
2205    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2206    if it exists.  */
2207 
2208 tree
2209 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2210 {
2211   vn_nary_op_t vno1
2212     = XALLOCAVAR (struct vn_nary_op_s,
2213 		  sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2214   init_vn_nary_op_from_op (vno1, op);
2215   return vn_nary_op_lookup_1 (vno1, vnresult);
2216 }
2217 
2218 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2219    value number if it exists in the hash table.  Return NULL_TREE if
2220    it does not exist in the hash table.  VNRESULT will contain the
2221    vn_nary_op_t from the hashtable if it exists.  */
2222 
2223 tree
2224 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2225 {
2226   vn_nary_op_t vno1
2227     = XALLOCAVAR (struct vn_nary_op_s,
2228 		  sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2229   init_vn_nary_op_from_stmt (vno1, stmt);
2230   return vn_nary_op_lookup_1 (vno1, vnresult);
2231 }
2232 
2233 /* Allocate a vn_nary_op_t with LENGTH operands on STACK.  */
2234 
2235 static vn_nary_op_t
2236 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2237 {
2238   return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2239 }
2240 
2241 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2242    obstack.  */
2243 
2244 static vn_nary_op_t
2245 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2246 {
2247   vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2248 					       &current_info->nary_obstack);
2249 
2250   vno1->value_id = value_id;
2251   vno1->length = length;
2252   vno1->result = result;
2253 
2254   return vno1;
2255 }
2256 
2257 /* Insert VNO into TABLE.  If COMPUTE_HASH is true, then compute
2258    VNO->HASHCODE first.  */
2259 
2260 static vn_nary_op_t
2261 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
2262 {
2263   void **slot;
2264 
2265   if (compute_hash)
2266     vno->hashcode = vn_nary_op_compute_hash (vno);
2267 
2268   slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
2269   gcc_assert (!*slot);
2270 
2271   *slot = vno;
2272   return vno;
2273 }
2274 
2275 /* Insert a n-ary operation into the current hash table using it's
2276    pieces.  Return the vn_nary_op_t structure we created and put in
2277    the hashtable.  */
2278 
2279 vn_nary_op_t
2280 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2281 			  tree type, tree *ops,
2282 			  tree result, unsigned int value_id)
2283 {
2284   vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2285   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2286   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2287 }
2288 
2289 /* Insert OP into the current hash table with a value number of
2290    RESULT.  Return the vn_nary_op_t structure we created and put in
2291    the hashtable.  */
2292 
2293 vn_nary_op_t
2294 vn_nary_op_insert (tree op, tree result)
2295 {
2296   unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2297   vn_nary_op_t vno1;
2298 
2299   vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2300   init_vn_nary_op_from_op (vno1, op);
2301   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2302 }
2303 
2304 /* Insert the rhs of STMT into the current hash table with a value number of
2305    RESULT.  */
2306 
2307 vn_nary_op_t
2308 vn_nary_op_insert_stmt (gimple stmt, tree result)
2309 {
2310   vn_nary_op_t vno1
2311     = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2312 			result, VN_INFO (result)->value_id);
2313   init_vn_nary_op_from_stmt (vno1, stmt);
2314   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2315 }
2316 
2317 /* Compute a hashcode for PHI operation VP1 and return it.  */
2318 
2319 static inline hashval_t
2320 vn_phi_compute_hash (vn_phi_t vp1)
2321 {
2322   hashval_t result;
2323   int i;
2324   tree phi1op;
2325   tree type;
2326 
2327   result = vp1->block->index;
2328 
2329   /* If all PHI arguments are constants we need to distinguish
2330      the PHI node via its type.  */
2331   type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
2332   result += (INTEGRAL_TYPE_P (type)
2333 	     + (INTEGRAL_TYPE_P (type)
2334 		? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
2335 
2336   FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2337     {
2338       if (phi1op == VN_TOP)
2339 	continue;
2340       result = iterative_hash_expr (phi1op, result);
2341     }
2342 
2343   return result;
2344 }
2345 
2346 /* Return the computed hashcode for phi operation P1.  */
2347 
2348 static hashval_t
2349 vn_phi_hash (const void *p1)
2350 {
2351   const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2352   return vp1->hashcode;
2353 }
2354 
2355 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
2356 
2357 static int
2358 vn_phi_eq (const void *p1, const void *p2)
2359 {
2360   const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2361   const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
2362 
2363   if (vp1->hashcode != vp2->hashcode)
2364     return false;
2365 
2366   if (vp1->block == vp2->block)
2367     {
2368       int i;
2369       tree phi1op;
2370 
2371       /* If the PHI nodes do not have compatible types
2372 	 they are not the same.  */
2373       if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
2374 			       TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
2375 	return false;
2376 
2377       /* Any phi in the same block will have it's arguments in the
2378 	 same edge order, because of how we store phi nodes.  */
2379       FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2380 	{
2381 	  tree phi2op = VEC_index (tree, vp2->phiargs, i);
2382 	  if (phi1op == VN_TOP || phi2op == VN_TOP)
2383 	    continue;
2384 	  if (!expressions_equal_p (phi1op, phi2op))
2385 	    return false;
2386 	}
2387       return true;
2388     }
2389   return false;
2390 }
2391 
2392 static VEC(tree, heap) *shared_lookup_phiargs;
2393 
2394 /* Lookup PHI in the current hash table, and return the resulting
2395    value number if it exists in the hash table.  Return NULL_TREE if
2396    it does not exist in the hash table. */
2397 
2398 static tree
2399 vn_phi_lookup (gimple phi)
2400 {
2401   void **slot;
2402   struct vn_phi_s vp1;
2403   unsigned i;
2404 
2405   VEC_truncate (tree, shared_lookup_phiargs, 0);
2406 
2407   /* Canonicalize the SSA_NAME's to their value number.  */
2408   for (i = 0; i < gimple_phi_num_args (phi); i++)
2409     {
2410       tree def = PHI_ARG_DEF (phi, i);
2411       def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2412       VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
2413     }
2414   vp1.phiargs = shared_lookup_phiargs;
2415   vp1.block = gimple_bb (phi);
2416   vp1.hashcode = vn_phi_compute_hash (&vp1);
2417   slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2418 				   NO_INSERT);
2419   if (!slot && current_info == optimistic_info)
2420     slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2421 				     NO_INSERT);
2422   if (!slot)
2423     return NULL_TREE;
2424   return ((vn_phi_t)*slot)->result;
2425 }
2426 
2427 /* Insert PHI into the current hash table with a value number of
2428    RESULT.  */
2429 
2430 static vn_phi_t
2431 vn_phi_insert (gimple phi, tree result)
2432 {
2433   void **slot;
2434   vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2435   unsigned i;
2436   VEC (tree, heap) *args = NULL;
2437 
2438   /* Canonicalize the SSA_NAME's to their value number.  */
2439   for (i = 0; i < gimple_phi_num_args (phi); i++)
2440     {
2441       tree def = PHI_ARG_DEF (phi, i);
2442       def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2443       VEC_safe_push (tree, heap, args, def);
2444     }
2445   vp1->value_id = VN_INFO (result)->value_id;
2446   vp1->phiargs = args;
2447   vp1->block = gimple_bb (phi);
2448   vp1->result = result;
2449   vp1->hashcode = vn_phi_compute_hash (vp1);
2450 
2451   slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2452 				   INSERT);
2453 
2454   /* Because we iterate over phi operations more than once, it's
2455      possible the slot might already exist here, hence no assert.*/
2456   *slot = vp1;
2457   return vp1;
2458 }
2459 
2460 
2461 /* Print set of components in strongly connected component SCC to OUT. */
2462 
2463 static void
2464 print_scc (FILE *out, VEC (tree, heap) *scc)
2465 {
2466   tree var;
2467   unsigned int i;
2468 
2469   fprintf (out, "SCC consists of:");
2470   FOR_EACH_VEC_ELT (tree, scc, i, var)
2471     {
2472       fprintf (out, " ");
2473       print_generic_expr (out, var, 0);
2474     }
2475   fprintf (out, "\n");
2476 }
2477 
2478 /* Set the value number of FROM to TO, return true if it has changed
2479    as a result.  */
2480 
2481 static inline bool
2482 set_ssa_val_to (tree from, tree to)
2483 {
2484   tree currval = SSA_VAL (from);
2485 
2486   if (from != to)
2487     {
2488       if (currval == from)
2489 	{
2490 	  if (dump_file && (dump_flags & TDF_DETAILS))
2491 	    {
2492 	      fprintf (dump_file, "Not changing value number of ");
2493 	      print_generic_expr (dump_file, from, 0);
2494 	      fprintf (dump_file, " from VARYING to ");
2495 	      print_generic_expr (dump_file, to, 0);
2496 	      fprintf (dump_file, "\n");
2497 	    }
2498 	  return false;
2499 	}
2500       else if (TREE_CODE (to) == SSA_NAME
2501 	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2502 	to = from;
2503     }
2504 
2505   /* The only thing we allow as value numbers are VN_TOP, ssa_names
2506      and invariants.  So assert that here.  */
2507   gcc_assert (to != NULL_TREE
2508 	      && (to == VN_TOP
2509 		  || TREE_CODE (to) == SSA_NAME
2510 		  || is_gimple_min_invariant (to)));
2511 
2512   if (dump_file && (dump_flags & TDF_DETAILS))
2513     {
2514       fprintf (dump_file, "Setting value number of ");
2515       print_generic_expr (dump_file, from, 0);
2516       fprintf (dump_file, " to ");
2517       print_generic_expr (dump_file, to, 0);
2518     }
2519 
2520   if (currval != to  && !operand_equal_p (currval, to, OEP_PURE_SAME))
2521     {
2522       VN_INFO (from)->valnum = to;
2523       if (dump_file && (dump_flags & TDF_DETAILS))
2524 	fprintf (dump_file, " (changed)\n");
2525       return true;
2526     }
2527   if (dump_file && (dump_flags & TDF_DETAILS))
2528     fprintf (dump_file, "\n");
2529   return false;
2530 }
2531 
2532 /* Set all definitions in STMT to value number to themselves.
2533    Return true if a value number changed. */
2534 
2535 static bool
2536 defs_to_varying (gimple stmt)
2537 {
2538   bool changed = false;
2539   ssa_op_iter iter;
2540   def_operand_p defp;
2541 
2542   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2543     {
2544       tree def = DEF_FROM_PTR (defp);
2545 
2546       VN_INFO (def)->use_processed = true;
2547       changed |= set_ssa_val_to (def, def);
2548     }
2549   return changed;
2550 }
2551 
2552 static bool expr_has_constants (tree expr);
2553 static tree valueize_expr (tree expr);
2554 
2555 /* Visit a copy between LHS and RHS, return true if the value number
2556    changed.  */
2557 
2558 static bool
2559 visit_copy (tree lhs, tree rhs)
2560 {
2561   /* Follow chains of copies to their destination.  */
2562   while (TREE_CODE (rhs) == SSA_NAME
2563 	 && SSA_VAL (rhs) != rhs)
2564     rhs = SSA_VAL (rhs);
2565 
2566   /* The copy may have a more interesting constant filled expression
2567      (we don't, since we know our RHS is just an SSA name).  */
2568   if (TREE_CODE (rhs) == SSA_NAME)
2569     {
2570       VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2571       VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2572     }
2573 
2574   return set_ssa_val_to (lhs, rhs);
2575 }
2576 
2577 /* Visit a nary operator RHS, value number it, and return true if the
2578    value number of LHS has changed as a result.  */
2579 
2580 static bool
2581 visit_nary_op (tree lhs, gimple stmt)
2582 {
2583   bool changed = false;
2584   tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2585 
2586   if (result)
2587     changed = set_ssa_val_to (lhs, result);
2588   else
2589     {
2590       changed = set_ssa_val_to (lhs, lhs);
2591       vn_nary_op_insert_stmt (stmt, lhs);
2592     }
2593 
2594   return changed;
2595 }
2596 
2597 /* Visit a call STMT storing into LHS.  Return true if the value number
2598    of the LHS has changed as a result.  */
2599 
2600 static bool
2601 visit_reference_op_call (tree lhs, gimple stmt)
2602 {
2603   bool changed = false;
2604   struct vn_reference_s vr1;
2605   tree result;
2606   tree vuse = gimple_vuse (stmt);
2607 
2608   vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2609   vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2610   vr1.type = gimple_expr_type (stmt);
2611   vr1.set = 0;
2612   vr1.hashcode = vn_reference_compute_hash (&vr1);
2613   result = vn_reference_lookup_1 (&vr1, NULL);
2614   if (result)
2615     {
2616       changed = set_ssa_val_to (lhs, result);
2617       if (TREE_CODE (result) == SSA_NAME
2618 	  && VN_INFO (result)->has_constants)
2619 	VN_INFO (lhs)->has_constants = true;
2620     }
2621   else
2622     {
2623       void **slot;
2624       vn_reference_t vr2;
2625       changed = set_ssa_val_to (lhs, lhs);
2626       vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2627       vr2->vuse = vr1.vuse;
2628       vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2629       vr2->type = vr1.type;
2630       vr2->set = vr1.set;
2631       vr2->hashcode = vr1.hashcode;
2632       vr2->result = lhs;
2633       slot = htab_find_slot_with_hash (current_info->references,
2634 				       vr2, vr2->hashcode, INSERT);
2635       if (*slot)
2636 	free_reference (*slot);
2637       *slot = vr2;
2638     }
2639 
2640   return changed;
2641 }
2642 
2643 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2644    and return true if the value number of the LHS has changed as a result.  */
2645 
2646 static bool
2647 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2648 {
2649   bool changed = false;
2650   tree last_vuse;
2651   tree result;
2652 
2653   last_vuse = gimple_vuse (stmt);
2654   last_vuse_ptr = &last_vuse;
2655   result = vn_reference_lookup (op, gimple_vuse (stmt),
2656 				default_vn_walk_kind, NULL);
2657   last_vuse_ptr = NULL;
2658 
2659   /* If we have a VCE, try looking up its operand as it might be stored in
2660      a different type.  */
2661   if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2662     result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2663     				  default_vn_walk_kind, NULL);
2664 
2665   /* We handle type-punning through unions by value-numbering based
2666      on offset and size of the access.  Be prepared to handle a
2667      type-mismatch here via creating a VIEW_CONVERT_EXPR.  */
2668   if (result
2669       && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2670     {
2671       /* We will be setting the value number of lhs to the value number
2672 	 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2673 	 So first simplify and lookup this expression to see if it
2674 	 is already available.  */
2675       tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2676       if ((CONVERT_EXPR_P (val)
2677 	   || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2678 	  && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2679         {
2680 	  tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2681 	  if ((CONVERT_EXPR_P (tem)
2682 	       || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2683 	      && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2684 						    TREE_TYPE (val), tem)))
2685 	    val = tem;
2686 	}
2687       result = val;
2688       if (!is_gimple_min_invariant (val)
2689 	  && TREE_CODE (val) != SSA_NAME)
2690 	result = vn_nary_op_lookup (val, NULL);
2691       /* If the expression is not yet available, value-number lhs to
2692 	 a new SSA_NAME we create.  */
2693       if (!result)
2694         {
2695 	  result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
2696 	  /* Initialize value-number information properly.  */
2697 	  VN_INFO_GET (result)->valnum = result;
2698 	  VN_INFO (result)->value_id = get_next_value_id ();
2699 	  VN_INFO (result)->expr = val;
2700 	  VN_INFO (result)->has_constants = expr_has_constants (val);
2701 	  VN_INFO (result)->needs_insertion = true;
2702 	  /* As all "inserted" statements are singleton SCCs, insert
2703 	     to the valid table.  This is strictly needed to
2704 	     avoid re-generating new value SSA_NAMEs for the same
2705 	     expression during SCC iteration over and over (the
2706 	     optimistic table gets cleared after each iteration).
2707 	     We do not need to insert into the optimistic table, as
2708 	     lookups there will fall back to the valid table.  */
2709 	  if (current_info == optimistic_info)
2710 	    {
2711 	      current_info = valid_info;
2712 	      vn_nary_op_insert (val, result);
2713 	      current_info = optimistic_info;
2714 	    }
2715 	  else
2716 	    vn_nary_op_insert (val, result);
2717 	  if (dump_file && (dump_flags & TDF_DETAILS))
2718 	    {
2719 	      fprintf (dump_file, "Inserting name ");
2720 	      print_generic_expr (dump_file, result, 0);
2721 	      fprintf (dump_file, " for expression ");
2722 	      print_generic_expr (dump_file, val, 0);
2723 	      fprintf (dump_file, "\n");
2724 	    }
2725 	}
2726     }
2727 
2728   if (result)
2729     {
2730       changed = set_ssa_val_to (lhs, result);
2731       if (TREE_CODE (result) == SSA_NAME
2732 	  && VN_INFO (result)->has_constants)
2733 	{
2734 	  VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2735 	  VN_INFO (lhs)->has_constants = true;
2736 	}
2737     }
2738   else
2739     {
2740       changed = set_ssa_val_to (lhs, lhs);
2741       vn_reference_insert (op, lhs, last_vuse);
2742     }
2743 
2744   return changed;
2745 }
2746 
2747 
2748 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2749    and return true if the value number of the LHS has changed as a result.  */
2750 
2751 static bool
2752 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2753 {
2754   bool changed = false;
2755   tree result;
2756   bool resultsame = false;
2757 
2758   /* First we want to lookup using the *vuses* from the store and see
2759      if there the last store to this location with the same address
2760      had the same value.
2761 
2762      The vuses represent the memory state before the store.  If the
2763      memory state, address, and value of the store is the same as the
2764      last store to this location, then this store will produce the
2765      same memory state as that store.
2766 
2767      In this case the vdef versions for this store are value numbered to those
2768      vuse versions, since they represent the same memory state after
2769      this store.
2770 
2771      Otherwise, the vdefs for the store are used when inserting into
2772      the table, since the store generates a new memory state.  */
2773 
2774   result = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_NOWALK, NULL);
2775 
2776   if (result)
2777     {
2778       if (TREE_CODE (result) == SSA_NAME)
2779 	result = SSA_VAL (result);
2780       if (TREE_CODE (op) == SSA_NAME)
2781 	op = SSA_VAL (op);
2782       resultsame = expressions_equal_p (result, op);
2783     }
2784 
2785   if (!result || !resultsame)
2786     {
2787       tree vdef;
2788 
2789       if (dump_file && (dump_flags & TDF_DETAILS))
2790 	{
2791 	  fprintf (dump_file, "No store match\n");
2792 	  fprintf (dump_file, "Value numbering store ");
2793 	  print_generic_expr (dump_file, lhs, 0);
2794 	  fprintf (dump_file, " to ");
2795 	  print_generic_expr (dump_file, op, 0);
2796 	  fprintf (dump_file, "\n");
2797 	}
2798       /* Have to set value numbers before insert, since insert is
2799 	 going to valueize the references in-place.  */
2800       if ((vdef = gimple_vdef (stmt)))
2801 	{
2802 	  VN_INFO (vdef)->use_processed = true;
2803 	  changed |= set_ssa_val_to (vdef, vdef);
2804 	}
2805 
2806       /* Do not insert structure copies into the tables.  */
2807       if (is_gimple_min_invariant (op)
2808 	  || is_gimple_reg (op))
2809         vn_reference_insert (lhs, op, vdef);
2810     }
2811   else
2812     {
2813       /* We had a match, so value number the vdef to have the value
2814 	 number of the vuse it came from.  */
2815       tree def, use;
2816 
2817       if (dump_file && (dump_flags & TDF_DETAILS))
2818 	fprintf (dump_file, "Store matched earlier value,"
2819 		 "value numbering store vdefs to matching vuses.\n");
2820 
2821       def = gimple_vdef (stmt);
2822       use = gimple_vuse (stmt);
2823 
2824       VN_INFO (def)->use_processed = true;
2825       changed |= set_ssa_val_to (def, SSA_VAL (use));
2826     }
2827 
2828   return changed;
2829 }
2830 
2831 /* Visit and value number PHI, return true if the value number
2832    changed.  */
2833 
2834 static bool
2835 visit_phi (gimple phi)
2836 {
2837   bool changed = false;
2838   tree result;
2839   tree sameval = VN_TOP;
2840   bool allsame = true;
2841   unsigned i;
2842 
2843   /* TODO: We could check for this in init_sccvn, and replace this
2844      with a gcc_assert.  */
2845   if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2846     return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2847 
2848   /* See if all non-TOP arguments have the same value.  TOP is
2849      equivalent to everything, so we can ignore it.  */
2850   for (i = 0; i < gimple_phi_num_args (phi); i++)
2851     {
2852       tree def = PHI_ARG_DEF (phi, i);
2853 
2854       if (TREE_CODE (def) == SSA_NAME)
2855 	def = SSA_VAL (def);
2856       if (def == VN_TOP)
2857 	continue;
2858       if (sameval == VN_TOP)
2859 	{
2860 	  sameval = def;
2861 	}
2862       else
2863 	{
2864 	  if (!expressions_equal_p (def, sameval))
2865 	    {
2866 	      allsame = false;
2867 	      break;
2868 	    }
2869 	}
2870     }
2871 
2872   /* If all value numbered to the same value, the phi node has that
2873      value.  */
2874   if (allsame)
2875     {
2876       if (is_gimple_min_invariant (sameval))
2877 	{
2878 	  VN_INFO (PHI_RESULT (phi))->has_constants = true;
2879 	  VN_INFO (PHI_RESULT (phi))->expr = sameval;
2880 	}
2881       else
2882 	{
2883 	  VN_INFO (PHI_RESULT (phi))->has_constants = false;
2884 	  VN_INFO (PHI_RESULT (phi))->expr = sameval;
2885 	}
2886 
2887       if (TREE_CODE (sameval) == SSA_NAME)
2888 	return visit_copy (PHI_RESULT (phi), sameval);
2889 
2890       return set_ssa_val_to (PHI_RESULT (phi), sameval);
2891     }
2892 
2893   /* Otherwise, see if it is equivalent to a phi node in this block.  */
2894   result = vn_phi_lookup (phi);
2895   if (result)
2896     {
2897       if (TREE_CODE (result) == SSA_NAME)
2898 	changed = visit_copy (PHI_RESULT (phi), result);
2899       else
2900 	changed = set_ssa_val_to (PHI_RESULT (phi), result);
2901     }
2902   else
2903     {
2904       vn_phi_insert (phi, PHI_RESULT (phi));
2905       VN_INFO (PHI_RESULT (phi))->has_constants = false;
2906       VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2907       changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2908     }
2909 
2910   return changed;
2911 }
2912 
2913 /* Return true if EXPR contains constants.  */
2914 
2915 static bool
2916 expr_has_constants (tree expr)
2917 {
2918   switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2919     {
2920     case tcc_unary:
2921       return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2922 
2923     case tcc_binary:
2924       return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2925 	|| is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2926       /* Constants inside reference ops are rarely interesting, but
2927 	 it can take a lot of looking to find them.  */
2928     case tcc_reference:
2929     case tcc_declaration:
2930       return false;
2931     default:
2932       return is_gimple_min_invariant (expr);
2933     }
2934   return false;
2935 }
2936 
2937 /* Return true if STMT contains constants.  */
2938 
2939 static bool
2940 stmt_has_constants (gimple stmt)
2941 {
2942   if (gimple_code (stmt) != GIMPLE_ASSIGN)
2943     return false;
2944 
2945   switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2946     {
2947     case GIMPLE_UNARY_RHS:
2948       return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2949 
2950     case GIMPLE_BINARY_RHS:
2951       return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2952 	      || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2953     case GIMPLE_TERNARY_RHS:
2954       return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2955 	      || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
2956 	      || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
2957     case GIMPLE_SINGLE_RHS:
2958       /* Constants inside reference ops are rarely interesting, but
2959 	 it can take a lot of looking to find them.  */
2960       return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2961     default:
2962       gcc_unreachable ();
2963     }
2964   return false;
2965 }
2966 
2967 /* Replace SSA_NAMES in expr with their value numbers, and return the
2968    result.
2969    This is performed in place. */
2970 
2971 static tree
2972 valueize_expr (tree expr)
2973 {
2974   switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2975     {
2976     case tcc_binary:
2977       TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
2978       /* Fallthru.  */
2979     case tcc_unary:
2980       TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
2981       break;
2982     default:;
2983     }
2984   return expr;
2985 }
2986 
2987 /* Simplify the binary expression RHS, and return the result if
2988    simplified. */
2989 
2990 static tree
2991 simplify_binary_expression (gimple stmt)
2992 {
2993   tree result = NULL_TREE;
2994   tree op0 = gimple_assign_rhs1 (stmt);
2995   tree op1 = gimple_assign_rhs2 (stmt);
2996   enum tree_code code = gimple_assign_rhs_code (stmt);
2997 
2998   /* This will not catch every single case we could combine, but will
2999      catch those with constants.  The goal here is to simultaneously
3000      combine constants between expressions, but avoid infinite
3001      expansion of expressions during simplification.  */
3002   if (TREE_CODE (op0) == SSA_NAME)
3003     {
3004       if (VN_INFO (op0)->has_constants
3005 	  || TREE_CODE_CLASS (code) == tcc_comparison
3006 	  || code == COMPLEX_EXPR)
3007 	op0 = valueize_expr (vn_get_expr_for (op0));
3008       else
3009 	op0 = vn_valueize (op0);
3010     }
3011 
3012   if (TREE_CODE (op1) == SSA_NAME)
3013     {
3014       if (VN_INFO (op1)->has_constants
3015 	  || code == COMPLEX_EXPR)
3016 	op1 = valueize_expr (vn_get_expr_for (op1));
3017       else
3018 	op1 = vn_valueize (op1);
3019     }
3020 
3021   /* Pointer plus constant can be represented as invariant address.
3022      Do so to allow further propatation, see also tree forwprop.  */
3023   if (code == POINTER_PLUS_EXPR
3024       && host_integerp (op1, 1)
3025       && TREE_CODE (op0) == ADDR_EXPR
3026       && is_gimple_min_invariant (op0))
3027     return build_invariant_address (TREE_TYPE (op0),
3028 				    TREE_OPERAND (op0, 0),
3029 				    TREE_INT_CST_LOW (op1));
3030 
3031   /* Avoid folding if nothing changed.  */
3032   if (op0 == gimple_assign_rhs1 (stmt)
3033       && op1 == gimple_assign_rhs2 (stmt))
3034     return NULL_TREE;
3035 
3036   fold_defer_overflow_warnings ();
3037 
3038   result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3039   if (result)
3040     STRIP_USELESS_TYPE_CONVERSION (result);
3041 
3042   fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3043 				  stmt, 0);
3044 
3045   /* Make sure result is not a complex expression consisting
3046      of operators of operators (IE (a + b) + (a + c))
3047      Otherwise, we will end up with unbounded expressions if
3048      fold does anything at all.  */
3049   if (result && valid_gimple_rhs_p (result))
3050     return result;
3051 
3052   return NULL_TREE;
3053 }
3054 
3055 /* Simplify the unary expression RHS, and return the result if
3056    simplified. */
3057 
3058 static tree
3059 simplify_unary_expression (gimple stmt)
3060 {
3061   tree result = NULL_TREE;
3062   tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3063   enum tree_code code = gimple_assign_rhs_code (stmt);
3064 
3065   /* We handle some tcc_reference codes here that are all
3066      GIMPLE_ASSIGN_SINGLE codes.  */
3067   if (code == REALPART_EXPR
3068       || code == IMAGPART_EXPR
3069       || code == VIEW_CONVERT_EXPR
3070       || code == BIT_FIELD_REF)
3071     op0 = TREE_OPERAND (op0, 0);
3072 
3073   if (TREE_CODE (op0) != SSA_NAME)
3074     return NULL_TREE;
3075 
3076   orig_op0 = op0;
3077   if (VN_INFO (op0)->has_constants)
3078     op0 = valueize_expr (vn_get_expr_for (op0));
3079   else if (CONVERT_EXPR_CODE_P (code)
3080 	   || code == REALPART_EXPR
3081 	   || code == IMAGPART_EXPR
3082 	   || code == VIEW_CONVERT_EXPR
3083 	   || code == BIT_FIELD_REF)
3084     {
3085       /* We want to do tree-combining on conversion-like expressions.
3086          Make sure we feed only SSA_NAMEs or constants to fold though.  */
3087       tree tem = valueize_expr (vn_get_expr_for (op0));
3088       if (UNARY_CLASS_P (tem)
3089 	  || BINARY_CLASS_P (tem)
3090 	  || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3091 	  || TREE_CODE (tem) == SSA_NAME
3092 	  || TREE_CODE (tem) == CONSTRUCTOR
3093 	  || is_gimple_min_invariant (tem))
3094 	op0 = tem;
3095     }
3096 
3097   /* Avoid folding if nothing changed, but remember the expression.  */
3098   if (op0 == orig_op0)
3099     return NULL_TREE;
3100 
3101   if (code == BIT_FIELD_REF)
3102     {
3103       tree rhs = gimple_assign_rhs1 (stmt);
3104       result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3105 			     op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3106     }
3107   else
3108     result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3109   if (result)
3110     {
3111       STRIP_USELESS_TYPE_CONVERSION (result);
3112       if (valid_gimple_rhs_p (result))
3113         return result;
3114     }
3115 
3116   return NULL_TREE;
3117 }
3118 
3119 /* Try to simplify RHS using equivalences and constant folding.  */
3120 
3121 static tree
3122 try_to_simplify (gimple stmt)
3123 {
3124   enum tree_code code = gimple_assign_rhs_code (stmt);
3125   tree tem;
3126 
3127   /* For stores we can end up simplifying a SSA_NAME rhs.  Just return
3128      in this case, there is no point in doing extra work.  */
3129   if (code == SSA_NAME)
3130     return NULL_TREE;
3131 
3132   /* First try constant folding based on our current lattice.  */
3133   tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
3134   if (tem
3135       && (TREE_CODE (tem) == SSA_NAME
3136 	  || is_gimple_min_invariant (tem)))
3137     return tem;
3138 
3139   /* If that didn't work try combining multiple statements.  */
3140   switch (TREE_CODE_CLASS (code))
3141     {
3142     case tcc_reference:
3143       /* Fallthrough for some unary codes that can operate on registers.  */
3144       if (!(code == REALPART_EXPR
3145 	    || code == IMAGPART_EXPR
3146 	    || code == VIEW_CONVERT_EXPR
3147 	    || code == BIT_FIELD_REF))
3148 	break;
3149       /* We could do a little more with unary ops, if they expand
3150 	 into binary ops, but it's debatable whether it is worth it. */
3151     case tcc_unary:
3152       return simplify_unary_expression (stmt);
3153 
3154     case tcc_comparison:
3155     case tcc_binary:
3156       return simplify_binary_expression (stmt);
3157 
3158     default:
3159       break;
3160     }
3161 
3162   return NULL_TREE;
3163 }
3164 
3165 /* Visit and value number USE, return true if the value number
3166    changed. */
3167 
3168 static bool
3169 visit_use (tree use)
3170 {
3171   bool changed = false;
3172   gimple stmt = SSA_NAME_DEF_STMT (use);
3173 
3174   VN_INFO (use)->use_processed = true;
3175 
3176   gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3177   if (dump_file && (dump_flags & TDF_DETAILS)
3178       && !SSA_NAME_IS_DEFAULT_DEF (use))
3179     {
3180       fprintf (dump_file, "Value numbering ");
3181       print_generic_expr (dump_file, use, 0);
3182       fprintf (dump_file, " stmt = ");
3183       print_gimple_stmt (dump_file, stmt, 0, 0);
3184     }
3185 
3186   /* Handle uninitialized uses.  */
3187   if (SSA_NAME_IS_DEFAULT_DEF (use))
3188     changed = set_ssa_val_to (use, use);
3189   else
3190     {
3191       if (gimple_code (stmt) == GIMPLE_PHI)
3192 	changed = visit_phi (stmt);
3193       else if (!gimple_has_lhs (stmt)
3194 	       || gimple_has_volatile_ops (stmt))
3195 	changed = defs_to_varying (stmt);
3196       else if (is_gimple_assign (stmt))
3197 	{
3198 	  enum tree_code code = gimple_assign_rhs_code (stmt);
3199 	  tree lhs = gimple_assign_lhs (stmt);
3200 	  tree rhs1 = gimple_assign_rhs1 (stmt);
3201 	  tree simplified;
3202 
3203 	  /* Shortcut for copies. Simplifying copies is pointless,
3204 	     since we copy the expression and value they represent.  */
3205 	  if (code == SSA_NAME
3206 	      && TREE_CODE (lhs) == SSA_NAME)
3207 	    {
3208 	      changed = visit_copy (lhs, rhs1);
3209 	      goto done;
3210 	    }
3211 	  simplified = try_to_simplify (stmt);
3212 	  if (simplified)
3213 	    {
3214 	      if (dump_file && (dump_flags & TDF_DETAILS))
3215 		{
3216 		  fprintf (dump_file, "RHS ");
3217 		  print_gimple_expr (dump_file, stmt, 0, 0);
3218 		  fprintf (dump_file, " simplified to ");
3219 		  print_generic_expr (dump_file, simplified, 0);
3220 		  if (TREE_CODE (lhs) == SSA_NAME)
3221 		    fprintf (dump_file, " has constants %d\n",
3222 			     expr_has_constants (simplified));
3223 		  else
3224 		    fprintf (dump_file, "\n");
3225 		}
3226 	    }
3227 	  /* Setting value numbers to constants will occasionally
3228 	     screw up phi congruence because constants are not
3229 	     uniquely associated with a single ssa name that can be
3230 	     looked up.  */
3231 	  if (simplified
3232 	      && is_gimple_min_invariant (simplified)
3233 	      && TREE_CODE (lhs) == SSA_NAME)
3234 	    {
3235 	      VN_INFO (lhs)->expr = simplified;
3236 	      VN_INFO (lhs)->has_constants = true;
3237 	      changed = set_ssa_val_to (lhs, simplified);
3238 	      goto done;
3239 	    }
3240 	  else if (simplified
3241 		   && TREE_CODE (simplified) == SSA_NAME
3242 		   && TREE_CODE (lhs) == SSA_NAME)
3243 	    {
3244 	      changed = visit_copy (lhs, simplified);
3245 	      goto done;
3246 	    }
3247 	  else if (simplified)
3248 	    {
3249 	      if (TREE_CODE (lhs) == SSA_NAME)
3250 		{
3251 		  VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3252 		  /* We have to unshare the expression or else
3253 		     valuizing may change the IL stream.  */
3254 		  VN_INFO (lhs)->expr = unshare_expr (simplified);
3255 		}
3256 	    }
3257 	  else if (stmt_has_constants (stmt)
3258 		   && TREE_CODE (lhs) == SSA_NAME)
3259 	    VN_INFO (lhs)->has_constants = true;
3260 	  else if (TREE_CODE (lhs) == SSA_NAME)
3261 	    {
3262 	      /* We reset expr and constantness here because we may
3263 		 have been value numbering optimistically, and
3264 		 iterating. They may become non-constant in this case,
3265 		 even if they were optimistically constant. */
3266 
3267 	      VN_INFO (lhs)->has_constants = false;
3268 	      VN_INFO (lhs)->expr = NULL_TREE;
3269 	    }
3270 
3271 	  if ((TREE_CODE (lhs) == SSA_NAME
3272 	       /* We can substitute SSA_NAMEs that are live over
3273 		  abnormal edges with their constant value.  */
3274 	       && !(gimple_assign_copy_p (stmt)
3275 		    && is_gimple_min_invariant (rhs1))
3276 	       && !(simplified
3277 		    && is_gimple_min_invariant (simplified))
3278 	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3279 	      /* Stores or copies from SSA_NAMEs that are live over
3280 		 abnormal edges are a problem.  */
3281 	      || (code == SSA_NAME
3282 		  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3283 	    changed = defs_to_varying (stmt);
3284 	  else if (REFERENCE_CLASS_P (lhs)
3285 		   || DECL_P (lhs))
3286 	    changed = visit_reference_op_store (lhs, rhs1, stmt);
3287 	  else if (TREE_CODE (lhs) == SSA_NAME)
3288 	    {
3289 	      if ((gimple_assign_copy_p (stmt)
3290 		   && is_gimple_min_invariant (rhs1))
3291 		  || (simplified
3292 		      && is_gimple_min_invariant (simplified)))
3293 		{
3294 		  VN_INFO (lhs)->has_constants = true;
3295 		  if (simplified)
3296 		    changed = set_ssa_val_to (lhs, simplified);
3297 		  else
3298 		    changed = set_ssa_val_to (lhs, rhs1);
3299 		}
3300 	      else
3301 		{
3302 		  switch (get_gimple_rhs_class (code))
3303 		    {
3304 		    case GIMPLE_UNARY_RHS:
3305 		    case GIMPLE_BINARY_RHS:
3306 		    case GIMPLE_TERNARY_RHS:
3307 		      changed = visit_nary_op (lhs, stmt);
3308 		      break;
3309 		    case GIMPLE_SINGLE_RHS:
3310 		      switch (TREE_CODE_CLASS (code))
3311 			{
3312 			case tcc_reference:
3313 			  /* VOP-less references can go through unary case.  */
3314 			  if ((code == REALPART_EXPR
3315 			       || code == IMAGPART_EXPR
3316 			       || code == VIEW_CONVERT_EXPR
3317 			       || code == BIT_FIELD_REF)
3318 			      && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
3319 			    {
3320 			      changed = visit_nary_op (lhs, stmt);
3321 			      break;
3322 			    }
3323 			  /* Fallthrough.  */
3324 			case tcc_declaration:
3325 			  changed = visit_reference_op_load (lhs, rhs1, stmt);
3326 			  break;
3327 			default:
3328 			  if (code == ADDR_EXPR)
3329 			    {
3330 			      changed = visit_nary_op (lhs, stmt);
3331 			      break;
3332 			    }
3333 			  else if (code == CONSTRUCTOR)
3334 			    {
3335 			      changed = visit_nary_op (lhs, stmt);
3336 			      break;
3337 			    }
3338 			  changed = defs_to_varying (stmt);
3339 			}
3340 		      break;
3341 		    default:
3342 		      changed = defs_to_varying (stmt);
3343 		      break;
3344 		    }
3345 		}
3346 	    }
3347 	  else
3348 	    changed = defs_to_varying (stmt);
3349 	}
3350       else if (is_gimple_call (stmt))
3351 	{
3352 	  tree lhs = gimple_call_lhs (stmt);
3353 
3354 	  /* ???  We could try to simplify calls.  */
3355 
3356 	  if (stmt_has_constants (stmt)
3357 	      && TREE_CODE (lhs) == SSA_NAME)
3358 	    VN_INFO (lhs)->has_constants = true;
3359 	  else if (TREE_CODE (lhs) == SSA_NAME)
3360 	    {
3361 	      /* We reset expr and constantness here because we may
3362 		 have been value numbering optimistically, and
3363 		 iterating. They may become non-constant in this case,
3364 		 even if they were optimistically constant. */
3365 	      VN_INFO (lhs)->has_constants = false;
3366 	      VN_INFO (lhs)->expr = NULL_TREE;
3367 	    }
3368 
3369 	  if (TREE_CODE (lhs) == SSA_NAME
3370 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3371 	    changed = defs_to_varying (stmt);
3372 	  /* ???  We should handle stores from calls.  */
3373 	  else if (TREE_CODE (lhs) == SSA_NAME)
3374 	    {
3375 	      if (!gimple_call_internal_p (stmt)
3376 		  && gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
3377 		changed = visit_reference_op_call (lhs, stmt);
3378 	      else
3379 		changed = defs_to_varying (stmt);
3380 	    }
3381 	  else
3382 	    changed = defs_to_varying (stmt);
3383 	}
3384     }
3385  done:
3386   return changed;
3387 }
3388 
3389 /* Compare two operands by reverse postorder index */
3390 
3391 static int
3392 compare_ops (const void *pa, const void *pb)
3393 {
3394   const tree opa = *((const tree *)pa);
3395   const tree opb = *((const tree *)pb);
3396   gimple opstmta = SSA_NAME_DEF_STMT (opa);
3397   gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3398   basic_block bba;
3399   basic_block bbb;
3400 
3401   if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3402     return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3403   else if (gimple_nop_p (opstmta))
3404     return -1;
3405   else if (gimple_nop_p (opstmtb))
3406     return 1;
3407 
3408   bba = gimple_bb (opstmta);
3409   bbb = gimple_bb (opstmtb);
3410 
3411   if (!bba && !bbb)
3412     return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3413   else if (!bba)
3414     return -1;
3415   else if (!bbb)
3416     return 1;
3417 
3418   if (bba == bbb)
3419     {
3420       if (gimple_code (opstmta) == GIMPLE_PHI
3421 	  && gimple_code (opstmtb) == GIMPLE_PHI)
3422 	return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3423       else if (gimple_code (opstmta) == GIMPLE_PHI)
3424 	return -1;
3425       else if (gimple_code (opstmtb) == GIMPLE_PHI)
3426 	return 1;
3427       else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3428         return gimple_uid (opstmta) - gimple_uid (opstmtb);
3429       else
3430 	return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3431     }
3432   return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3433 }
3434 
3435 /* Sort an array containing members of a strongly connected component
3436    SCC so that the members are ordered by RPO number.
3437    This means that when the sort is complete, iterating through the
3438    array will give you the members in RPO order.  */
3439 
3440 static void
3441 sort_scc (VEC (tree, heap) *scc)
3442 {
3443   VEC_qsort (tree, scc, compare_ops);
3444 }
3445 
3446 /* Insert the no longer used nary ONARY to the hash INFO.  */
3447 
3448 static void
3449 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3450 {
3451   size_t size = sizeof_vn_nary_op (onary->length);
3452   vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3453 					       &info->nary_obstack);
3454   memcpy (nary, onary, size);
3455   vn_nary_op_insert_into (nary, info->nary, false);
3456 }
3457 
3458 /* Insert the no longer used phi OPHI to the hash INFO.  */
3459 
3460 static void
3461 copy_phi (vn_phi_t ophi, vn_tables_t info)
3462 {
3463   vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3464   void **slot;
3465   memcpy (phi, ophi, sizeof (*phi));
3466   ophi->phiargs = NULL;
3467   slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3468   gcc_assert (!*slot);
3469   *slot = phi;
3470 }
3471 
3472 /* Insert the no longer used reference OREF to the hash INFO.  */
3473 
3474 static void
3475 copy_reference (vn_reference_t oref, vn_tables_t info)
3476 {
3477   vn_reference_t ref;
3478   void **slot;
3479   ref = (vn_reference_t) pool_alloc (info->references_pool);
3480   memcpy (ref, oref, sizeof (*ref));
3481   oref->operands = NULL;
3482   slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3483 				   INSERT);
3484   if (*slot)
3485     free_reference (*slot);
3486   *slot = ref;
3487 }
3488 
3489 /* Process a strongly connected component in the SSA graph.  */
3490 
3491 static void
3492 process_scc (VEC (tree, heap) *scc)
3493 {
3494   tree var;
3495   unsigned int i;
3496   unsigned int iterations = 0;
3497   bool changed = true;
3498   htab_iterator hi;
3499   vn_nary_op_t nary;
3500   vn_phi_t phi;
3501   vn_reference_t ref;
3502 
3503   /* If the SCC has a single member, just visit it.  */
3504   if (VEC_length (tree, scc) == 1)
3505     {
3506       tree use = VEC_index (tree, scc, 0);
3507       if (VN_INFO (use)->use_processed)
3508 	return;
3509       /* We need to make sure it doesn't form a cycle itself, which can
3510 	 happen for self-referential PHI nodes.  In that case we would
3511 	 end up inserting an expression with VN_TOP operands into the
3512 	 valid table which makes us derive bogus equivalences later.
3513 	 The cheapest way to check this is to assume it for all PHI nodes.  */
3514       if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3515 	/* Fallthru to iteration.  */ ;
3516       else
3517 	{
3518 	  visit_use (use);
3519 	  return;
3520 	}
3521     }
3522 
3523   /* Iterate over the SCC with the optimistic table until it stops
3524      changing.  */
3525   current_info = optimistic_info;
3526   while (changed)
3527     {
3528       changed = false;
3529       iterations++;
3530       if (dump_file && (dump_flags & TDF_DETAILS))
3531 	fprintf (dump_file, "Starting iteration %d\n", iterations);
3532       /* As we are value-numbering optimistically we have to
3533 	 clear the expression tables and the simplified expressions
3534 	 in each iteration until we converge.  */
3535       htab_empty (optimistic_info->nary);
3536       htab_empty (optimistic_info->phis);
3537       htab_empty (optimistic_info->references);
3538       obstack_free (&optimistic_info->nary_obstack, NULL);
3539       gcc_obstack_init (&optimistic_info->nary_obstack);
3540       empty_alloc_pool (optimistic_info->phis_pool);
3541       empty_alloc_pool (optimistic_info->references_pool);
3542       FOR_EACH_VEC_ELT (tree, scc, i, var)
3543 	VN_INFO (var)->expr = NULL_TREE;
3544       FOR_EACH_VEC_ELT (tree, scc, i, var)
3545 	changed |= visit_use (var);
3546     }
3547 
3548   statistics_histogram_event (cfun, "SCC iterations", iterations);
3549 
3550   /* Finally, copy the contents of the no longer used optimistic
3551      table to the valid table.  */
3552   FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3553     copy_nary (nary, valid_info);
3554   FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3555     copy_phi (phi, valid_info);
3556   FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3557     copy_reference (ref, valid_info);
3558 
3559   current_info = valid_info;
3560 }
3561 
3562 DEF_VEC_O(ssa_op_iter);
3563 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3564 
3565 /* Pop the components of the found SCC for NAME off the SCC stack
3566    and process them.  Returns true if all went well, false if
3567    we run into resource limits.  */
3568 
3569 static bool
3570 extract_and_process_scc_for_name (tree name)
3571 {
3572   VEC (tree, heap) *scc = NULL;
3573   tree x;
3574 
3575   /* Found an SCC, pop the components off the SCC stack and
3576      process them.  */
3577   do
3578     {
3579       x = VEC_pop (tree, sccstack);
3580 
3581       VN_INFO (x)->on_sccstack = false;
3582       VEC_safe_push (tree, heap, scc, x);
3583     } while (x != name);
3584 
3585   /* Bail out of SCCVN in case a SCC turns out to be incredibly large.  */
3586   if (VEC_length (tree, scc)
3587       > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3588     {
3589       if (dump_file)
3590 	fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3591 		 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3592 		 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3593 
3594       VEC_free (tree, heap, scc);
3595       return false;
3596     }
3597 
3598   if (VEC_length (tree, scc) > 1)
3599     sort_scc (scc);
3600 
3601   if (dump_file && (dump_flags & TDF_DETAILS))
3602     print_scc (dump_file, scc);
3603 
3604   process_scc (scc);
3605 
3606   VEC_free (tree, heap, scc);
3607 
3608   return true;
3609 }
3610 
3611 /* Depth first search on NAME to discover and process SCC's in the SSA
3612    graph.
3613    Execution of this algorithm relies on the fact that the SCC's are
3614    popped off the stack in topological order.
3615    Returns true if successful, false if we stopped processing SCC's due
3616    to resource constraints.  */
3617 
3618 static bool
3619 DFS (tree name)
3620 {
3621   VEC(ssa_op_iter, heap) *itervec = NULL;
3622   VEC(tree, heap) *namevec = NULL;
3623   use_operand_p usep = NULL;
3624   gimple defstmt;
3625   tree use;
3626   ssa_op_iter iter;
3627 
3628 start_over:
3629   /* SCC info */
3630   VN_INFO (name)->dfsnum = next_dfs_num++;
3631   VN_INFO (name)->visited = true;
3632   VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3633 
3634   VEC_safe_push (tree, heap, sccstack, name);
3635   VN_INFO (name)->on_sccstack = true;
3636   defstmt = SSA_NAME_DEF_STMT (name);
3637 
3638   /* Recursively DFS on our operands, looking for SCC's.  */
3639   if (!gimple_nop_p (defstmt))
3640     {
3641       /* Push a new iterator.  */
3642       if (gimple_code (defstmt) == GIMPLE_PHI)
3643 	usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3644       else
3645 	usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3646     }
3647   else
3648     clear_and_done_ssa_iter (&iter);
3649 
3650   while (1)
3651     {
3652       /* If we are done processing uses of a name, go up the stack
3653 	 of iterators and process SCCs as we found them.  */
3654       if (op_iter_done (&iter))
3655 	{
3656 	  /* See if we found an SCC.  */
3657 	  if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3658 	    if (!extract_and_process_scc_for_name (name))
3659 	      {
3660 		VEC_free (tree, heap, namevec);
3661 		VEC_free (ssa_op_iter, heap, itervec);
3662 		return false;
3663 	      }
3664 
3665 	  /* Check if we are done.  */
3666 	  if (VEC_empty (tree, namevec))
3667 	    {
3668 	      VEC_free (tree, heap, namevec);
3669 	      VEC_free (ssa_op_iter, heap, itervec);
3670 	      return true;
3671 	    }
3672 
3673 	  /* Restore the last use walker and continue walking there.  */
3674 	  use = name;
3675 	  name = VEC_pop (tree, namevec);
3676 	  memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3677 		  sizeof (ssa_op_iter));
3678 	  VEC_pop (ssa_op_iter, itervec);
3679 	  goto continue_walking;
3680 	}
3681 
3682       use = USE_FROM_PTR (usep);
3683 
3684       /* Since we handle phi nodes, we will sometimes get
3685 	 invariants in the use expression.  */
3686       if (TREE_CODE (use) == SSA_NAME)
3687 	{
3688 	  if (! (VN_INFO (use)->visited))
3689 	    {
3690 	      /* Recurse by pushing the current use walking state on
3691 		 the stack and starting over.  */
3692 	      VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3693 	      VEC_safe_push(tree, heap, namevec, name);
3694 	      name = use;
3695 	      goto start_over;
3696 
3697 continue_walking:
3698 	      VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3699 					 VN_INFO (use)->low);
3700 	    }
3701 	  if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3702 	      && VN_INFO (use)->on_sccstack)
3703 	    {
3704 	      VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3705 					 VN_INFO (name)->low);
3706 	    }
3707 	}
3708 
3709       usep = op_iter_next_use (&iter);
3710     }
3711 }
3712 
3713 /* Allocate a value number table.  */
3714 
3715 static void
3716 allocate_vn_table (vn_tables_t table)
3717 {
3718   table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3719   table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3720   table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3721 				   free_reference);
3722 
3723   gcc_obstack_init (&table->nary_obstack);
3724   table->phis_pool = create_alloc_pool ("VN phis",
3725 					sizeof (struct vn_phi_s),
3726 					30);
3727   table->references_pool = create_alloc_pool ("VN references",
3728 					      sizeof (struct vn_reference_s),
3729 					      30);
3730 }
3731 
3732 /* Free a value number table.  */
3733 
3734 static void
3735 free_vn_table (vn_tables_t table)
3736 {
3737   htab_delete (table->phis);
3738   htab_delete (table->nary);
3739   htab_delete (table->references);
3740   obstack_free (&table->nary_obstack, NULL);
3741   free_alloc_pool (table->phis_pool);
3742   free_alloc_pool (table->references_pool);
3743 }
3744 
3745 static void
3746 init_scc_vn (void)
3747 {
3748   size_t i;
3749   int j;
3750   int *rpo_numbers_temp;
3751 
3752   calculate_dominance_info (CDI_DOMINATORS);
3753   sccstack = NULL;
3754   constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3755 				  free);
3756 
3757   constant_value_ids = BITMAP_ALLOC (NULL);
3758 
3759   next_dfs_num = 1;
3760   next_value_id = 1;
3761 
3762   vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3763   /* VEC_alloc doesn't actually grow it to the right size, it just
3764      preallocates the space to do so.  */
3765   VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3766   gcc_obstack_init (&vn_ssa_aux_obstack);
3767 
3768   shared_lookup_phiargs = NULL;
3769   shared_lookup_references = NULL;
3770   rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3771   rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3772   pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3773 
3774   /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3775      the i'th block in RPO order is bb.  We want to map bb's to RPO
3776      numbers, so we need to rearrange this array.  */
3777   for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3778     rpo_numbers[rpo_numbers_temp[j]] = j;
3779 
3780   XDELETE (rpo_numbers_temp);
3781 
3782   VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3783 
3784   /* Create the VN_INFO structures, and initialize value numbers to
3785      TOP.  */
3786   for (i = 0; i < num_ssa_names; i++)
3787     {
3788       tree name = ssa_name (i);
3789       if (name)
3790 	{
3791 	  VN_INFO_GET (name)->valnum = VN_TOP;
3792 	  VN_INFO (name)->expr = NULL_TREE;
3793 	  VN_INFO (name)->value_id = 0;
3794 	}
3795     }
3796 
3797   renumber_gimple_stmt_uids ();
3798 
3799   /* Create the valid and optimistic value numbering tables.  */
3800   valid_info = XCNEW (struct vn_tables_s);
3801   allocate_vn_table (valid_info);
3802   optimistic_info = XCNEW (struct vn_tables_s);
3803   allocate_vn_table (optimistic_info);
3804 }
3805 
3806 void
3807 free_scc_vn (void)
3808 {
3809   size_t i;
3810 
3811   htab_delete (constant_to_value_id);
3812   BITMAP_FREE (constant_value_ids);
3813   VEC_free (tree, heap, shared_lookup_phiargs);
3814   VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3815   XDELETEVEC (rpo_numbers);
3816 
3817   for (i = 0; i < num_ssa_names; i++)
3818     {
3819       tree name = ssa_name (i);
3820       if (name
3821 	  && VN_INFO (name)->needs_insertion)
3822 	release_ssa_name (name);
3823     }
3824   obstack_free (&vn_ssa_aux_obstack, NULL);
3825   VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3826 
3827   VEC_free (tree, heap, sccstack);
3828   free_vn_table (valid_info);
3829   XDELETE (valid_info);
3830   free_vn_table (optimistic_info);
3831   XDELETE (optimistic_info);
3832 }
3833 
3834 /* Set *ID if we computed something useful in RESULT.  */
3835 
3836 static void
3837 set_value_id_for_result (tree result, unsigned int *id)
3838 {
3839   if (result)
3840     {
3841       if (TREE_CODE (result) == SSA_NAME)
3842 	*id = VN_INFO (result)->value_id;
3843       else if (is_gimple_min_invariant (result))
3844 	*id = get_or_alloc_constant_value_id (result);
3845     }
3846 }
3847 
3848 /* Set the value ids in the valid hash tables.  */
3849 
3850 static void
3851 set_hashtable_value_ids (void)
3852 {
3853   htab_iterator hi;
3854   vn_nary_op_t vno;
3855   vn_reference_t vr;
3856   vn_phi_t vp;
3857 
3858   /* Now set the value ids of the things we had put in the hash
3859      table.  */
3860 
3861   FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3862 			 vno, vn_nary_op_t, hi)
3863     set_value_id_for_result (vno->result, &vno->value_id);
3864 
3865   FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3866 			 vp, vn_phi_t, hi)
3867     set_value_id_for_result (vp->result, &vp->value_id);
3868 
3869   FOR_EACH_HTAB_ELEMENT (valid_info->references,
3870 			 vr, vn_reference_t, hi)
3871     set_value_id_for_result (vr->result, &vr->value_id);
3872 }
3873 
3874 /* Do SCCVN.  Returns true if it finished, false if we bailed out
3875    due to resource constraints.  DEFAULT_VN_WALK_KIND_ specifies
3876    how we use the alias oracle walking during the VN process.  */
3877 
3878 bool
3879 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
3880 {
3881   size_t i;
3882   tree param;
3883   bool changed = true;
3884 
3885   default_vn_walk_kind = default_vn_walk_kind_;
3886 
3887   init_scc_vn ();
3888   current_info = valid_info;
3889 
3890   for (param = DECL_ARGUMENTS (current_function_decl);
3891        param;
3892        param = DECL_CHAIN (param))
3893     {
3894       if (gimple_default_def (cfun, param) != NULL)
3895 	{
3896 	  tree def = gimple_default_def (cfun, param);
3897 	  VN_INFO (def)->valnum = def;
3898 	}
3899     }
3900 
3901   for (i = 1; i < num_ssa_names; ++i)
3902     {
3903       tree name = ssa_name (i);
3904       if (name
3905 	  && VN_INFO (name)->visited == false
3906 	  && !has_zero_uses (name))
3907 	if (!DFS (name))
3908 	  {
3909 	    free_scc_vn ();
3910 	    return false;
3911 	  }
3912     }
3913 
3914   /* Initialize the value ids.  */
3915 
3916   for (i = 1; i < num_ssa_names; ++i)
3917     {
3918       tree name = ssa_name (i);
3919       vn_ssa_aux_t info;
3920       if (!name)
3921 	continue;
3922       info = VN_INFO (name);
3923       if (info->valnum == name
3924 	  || info->valnum == VN_TOP)
3925 	info->value_id = get_next_value_id ();
3926       else if (is_gimple_min_invariant (info->valnum))
3927 	info->value_id = get_or_alloc_constant_value_id (info->valnum);
3928     }
3929 
3930   /* Propagate until they stop changing.  */
3931   while (changed)
3932     {
3933       changed = false;
3934       for (i = 1; i < num_ssa_names; ++i)
3935 	{
3936 	  tree name = ssa_name (i);
3937 	  vn_ssa_aux_t info;
3938 	  if (!name)
3939 	    continue;
3940 	  info = VN_INFO (name);
3941 	  if (TREE_CODE (info->valnum) == SSA_NAME
3942 	      && info->valnum != name
3943 	      && info->value_id != VN_INFO (info->valnum)->value_id)
3944 	    {
3945 	      changed = true;
3946 	      info->value_id = VN_INFO (info->valnum)->value_id;
3947 	    }
3948 	}
3949     }
3950 
3951   set_hashtable_value_ids ();
3952 
3953   if (dump_file && (dump_flags & TDF_DETAILS))
3954     {
3955       fprintf (dump_file, "Value numbers:\n");
3956       for (i = 0; i < num_ssa_names; i++)
3957 	{
3958 	  tree name = ssa_name (i);
3959 	  if (name
3960 	      && VN_INFO (name)->visited
3961 	      && SSA_VAL (name) != name)
3962 	    {
3963 	      print_generic_expr (dump_file, name, 0);
3964 	      fprintf (dump_file, " = ");
3965 	      print_generic_expr (dump_file, SSA_VAL (name), 0);
3966 	      fprintf (dump_file, "\n");
3967 	    }
3968 	}
3969     }
3970 
3971   return true;
3972 }
3973 
3974 /* Return the maximum value id we have ever seen.  */
3975 
3976 unsigned int
3977 get_max_value_id (void)
3978 {
3979   return next_value_id;
3980 }
3981 
3982 /* Return the next unique value id.  */
3983 
3984 unsigned int
3985 get_next_value_id (void)
3986 {
3987   return next_value_id++;
3988 }
3989 
3990 
3991 /* Compare two expressions E1 and E2 and return true if they are equal.  */
3992 
3993 bool
3994 expressions_equal_p (tree e1, tree e2)
3995 {
3996   /* The obvious case.  */
3997   if (e1 == e2)
3998     return true;
3999 
4000   /* If only one of them is null, they cannot be equal.  */
4001   if (!e1 || !e2)
4002     return false;
4003 
4004   /* Now perform the actual comparison.  */
4005   if (TREE_CODE (e1) == TREE_CODE (e2)
4006       && operand_equal_p (e1, e2, OEP_PURE_SAME))
4007     return true;
4008 
4009   return false;
4010 }
4011 
4012 
4013 /* Return true if the nary operation NARY may trap.  This is a copy
4014    of stmt_could_throw_1_p adjusted to the SCCVN IL.  */
4015 
4016 bool
4017 vn_nary_may_trap (vn_nary_op_t nary)
4018 {
4019   tree type;
4020   tree rhs2 = NULL_TREE;
4021   bool honor_nans = false;
4022   bool honor_snans = false;
4023   bool fp_operation = false;
4024   bool honor_trapv = false;
4025   bool handled, ret;
4026   unsigned i;
4027 
4028   if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4029       || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4030       || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4031     {
4032       type = nary->type;
4033       fp_operation = FLOAT_TYPE_P (type);
4034       if (fp_operation)
4035 	{
4036 	  honor_nans = flag_trapping_math && !flag_finite_math_only;
4037 	  honor_snans = flag_signaling_nans != 0;
4038 	}
4039       else if (INTEGRAL_TYPE_P (type)
4040 	       && TYPE_OVERFLOW_TRAPS (type))
4041 	honor_trapv = true;
4042     }
4043   if (nary->length >= 2)
4044     rhs2 = nary->op[1];
4045   ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4046 				       honor_trapv,
4047 				       honor_nans, honor_snans, rhs2,
4048 				       &handled);
4049   if (handled
4050       && ret)
4051     return true;
4052 
4053   for (i = 0; i < nary->length; ++i)
4054     if (tree_could_trap_p (nary->op[i]))
4055       return true;
4056 
4057   return false;
4058 }
4059