1 /* SCC value numbering for trees
2    Copyright (C) 2006-2017 Free Software Foundation, Inc.
3    Contributed by Daniel Berlin <dan@dberlin.org>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "params.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-ssa-sccvn.h"
59 #include "tree-cfg.h"
60 #include "domwalk.h"
61 #include "gimple-iterator.h"
62 #include "gimple-match.h"
63 
64 /* This algorithm is based on the SCC algorithm presented by Keith
65    Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
66    (http://citeseer.ist.psu.edu/41805.html).  In
67    straight line code, it is equivalent to a regular hash based value
68    numbering that is performed in reverse postorder.
69 
70    For code with cycles, there are two alternatives, both of which
71    require keeping the hashtables separate from the actual list of
72    value numbers for SSA names.
73 
74    1. Iterate value numbering in an RPO walk of the blocks, removing
75    all the entries from the hashtable after each iteration (but
76    keeping the SSA name->value number mapping between iterations).
77    Iterate until it does not change.
78 
79    2. Perform value numbering as part of an SCC walk on the SSA graph,
80    iterating only the cycles in the SSA graph until they do not change
81    (using a separate, optimistic hashtable for value numbering the SCC
82    operands).
83 
84    The second is not just faster in practice (because most SSA graph
85    cycles do not involve all the variables in the graph), it also has
86    some nice properties.
87 
88    One of these nice properties is that when we pop an SCC off the
89    stack, we are guaranteed to have processed all the operands coming from
90    *outside of that SCC*, so we do not need to do anything special to
91    ensure they have value numbers.
92 
93    Another nice property is that the SCC walk is done as part of a DFS
94    of the SSA graph, which makes it easy to perform combining and
95    simplifying operations at the same time.
96 
97    The code below is deliberately written in a way that makes it easy
98    to separate the SCC walk from the other work it does.
99 
100    In order to propagate constants through the code, we track which
101    expressions contain constants, and use those while folding.  In
102    theory, we could also track expressions whose value numbers are
103    replaced, in case we end up folding based on expression
104    identities.
105 
106    In order to value number memory, we assign value numbers to vuses.
107    This enables us to note that, for example, stores to the same
108    address of the same value from the same starting memory states are
109    equivalent.
110    TODO:
111 
112    1. We can iterate only the changing portions of the SCC's, but
113    I have not seen an SCC big enough for this to be a win.
114    2. If you differentiate between phi nodes for loops and phi nodes
115    for if-then-else, you can properly consider phi nodes in different
116    blocks for equivalence.
117    3. We could value number vuses in more cases, particularly, whole
118    structure copies.
119 */
120 
121 
122 static tree *last_vuse_ptr;
123 static vn_lookup_kind vn_walk_kind;
124 static vn_lookup_kind default_vn_walk_kind;
125 bitmap const_parms;
126 
127 /* vn_nary_op hashtable helpers.  */
128 
129 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
130 {
131   typedef vn_nary_op_s *compare_type;
132   static inline hashval_t hash (const vn_nary_op_s *);
133   static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
134 };
135 
136 /* Return the computed hashcode for nary operation P1.  */
137 
138 inline hashval_t
139 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
140 {
141   return vno1->hashcode;
142 }
143 
144 /* Compare nary operations P1 and P2 and return true if they are
145    equivalent.  */
146 
147 inline bool
148 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
149 {
150   return vn_nary_op_eq (vno1, vno2);
151 }
152 
153 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
154 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
155 
156 
157 /* vn_phi hashtable helpers.  */
158 
159 static int
160 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
161 
162 struct vn_phi_hasher : pointer_hash <vn_phi_s>
163 {
164   static inline hashval_t hash (const vn_phi_s *);
165   static inline bool equal (const vn_phi_s *, const vn_phi_s *);
166   static inline void remove (vn_phi_s *);
167 };
168 
169 /* Return the computed hashcode for phi operation P1.  */
170 
171 inline hashval_t
172 vn_phi_hasher::hash (const vn_phi_s *vp1)
173 {
174   return vp1->hashcode;
175 }
176 
177 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
178 
179 inline bool
180 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
181 {
182   return vn_phi_eq (vp1, vp2);
183 }
184 
185 /* Free a phi operation structure VP.  */
186 
187 inline void
188 vn_phi_hasher::remove (vn_phi_s *phi)
189 {
190   phi->phiargs.release ();
191 }
192 
193 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
194 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
195 
196 
197 /* Compare two reference operands P1 and P2 for equality.  Return true if
198    they are equal, and false otherwise.  */
199 
200 static int
201 vn_reference_op_eq (const void *p1, const void *p2)
202 {
203   const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
204   const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
205 
206   return (vro1->opcode == vro2->opcode
207 	  /* We do not care for differences in type qualification.  */
208 	  && (vro1->type == vro2->type
209 	      || (vro1->type && vro2->type
210 		  && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
211 					 TYPE_MAIN_VARIANT (vro2->type))))
212 	  && expressions_equal_p (vro1->op0, vro2->op0)
213 	  && expressions_equal_p (vro1->op1, vro2->op1)
214 	  && expressions_equal_p (vro1->op2, vro2->op2));
215 }
216 
217 /* Free a reference operation structure VP.  */
218 
219 static inline void
220 free_reference (vn_reference_s *vr)
221 {
222   vr->operands.release ();
223 }
224 
225 
226 /* vn_reference hashtable helpers.  */
227 
228 struct vn_reference_hasher : pointer_hash <vn_reference_s>
229 {
230   static inline hashval_t hash (const vn_reference_s *);
231   static inline bool equal (const vn_reference_s *, const vn_reference_s *);
232   static inline void remove (vn_reference_s *);
233 };
234 
235 /* Return the hashcode for a given reference operation P1.  */
236 
237 inline hashval_t
238 vn_reference_hasher::hash (const vn_reference_s *vr1)
239 {
240   return vr1->hashcode;
241 }
242 
243 inline bool
244 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
245 {
246   return vn_reference_eq (v, c);
247 }
248 
249 inline void
250 vn_reference_hasher::remove (vn_reference_s *v)
251 {
252   free_reference (v);
253 }
254 
255 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
256 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
257 
258 
259 /* The set of hashtables and alloc_pool's for their items.  */
260 
261 typedef struct vn_tables_s
262 {
263   vn_nary_op_table_type *nary;
264   vn_phi_table_type *phis;
265   vn_reference_table_type *references;
266   struct obstack nary_obstack;
267   object_allocator<vn_phi_s> *phis_pool;
268   object_allocator<vn_reference_s> *references_pool;
269 } *vn_tables_t;
270 
271 
272 /* vn_constant hashtable helpers.  */
273 
274 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
275 {
276   static inline hashval_t hash (const vn_constant_s *);
277   static inline bool equal (const vn_constant_s *, const vn_constant_s *);
278 };
279 
280 /* Hash table hash function for vn_constant_t.  */
281 
282 inline hashval_t
283 vn_constant_hasher::hash (const vn_constant_s *vc1)
284 {
285   return vc1->hashcode;
286 }
287 
288 /* Hash table equality function for vn_constant_t.  */
289 
290 inline bool
291 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
292 {
293   if (vc1->hashcode != vc2->hashcode)
294     return false;
295 
296   return vn_constant_eq_with_type (vc1->constant, vc2->constant);
297 }
298 
299 static hash_table<vn_constant_hasher> *constant_to_value_id;
300 static bitmap constant_value_ids;
301 
302 
303 /* Valid hashtables storing information we have proven to be
304    correct.  */
305 
306 static vn_tables_t valid_info;
307 
308 /* Optimistic hashtables storing information we are making assumptions about
309    during iterations.  */
310 
311 static vn_tables_t optimistic_info;
312 
313 /* Pointer to the set of hashtables that is currently being used.
314    Should always point to either the optimistic_info, or the
315    valid_info.  */
316 
317 static vn_tables_t current_info;
318 
319 
320 /* Reverse post order index for each basic block.  */
321 
322 static int *rpo_numbers;
323 
324 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
325 
326 /* Return the SSA value of the VUSE x, supporting released VDEFs
327    during elimination which will value-number the VDEF to the
328    associated VUSE (but not substitute in the whole lattice).  */
329 
330 static inline tree
331 vuse_ssa_val (tree x)
332 {
333   if (!x)
334     return NULL_TREE;
335 
336   do
337     {
338       x = SSA_VAL (x);
339     }
340   while (SSA_NAME_IN_FREE_LIST (x));
341 
342   return x;
343 }
344 
345 /* This represents the top of the VN lattice, which is the universal
346    value.  */
347 
348 tree VN_TOP;
349 
350 /* Unique counter for our value ids.  */
351 
352 static unsigned int next_value_id;
353 
354 /* Next DFS number and the stack for strongly connected component
355    detection. */
356 
357 static unsigned int next_dfs_num;
358 static vec<tree> sccstack;
359 
360 
361 
362 /* Table of vn_ssa_aux_t's, one per ssa_name.  The vn_ssa_aux_t objects
363    are allocated on an obstack for locality reasons, and to free them
364    without looping over the vec.  */
365 
366 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
367 static struct obstack vn_ssa_aux_obstack;
368 
369 /* Return whether there is value numbering information for a given SSA name.  */
370 
371 bool
372 has_VN_INFO (tree name)
373 {
374   if (SSA_NAME_VERSION (name) < vn_ssa_aux_table.length ())
375     return vn_ssa_aux_table[SSA_NAME_VERSION (name)] != NULL;
376   return false;
377 }
378 
379 /* Return the value numbering information for a given SSA name.  */
380 
381 vn_ssa_aux_t
382 VN_INFO (tree name)
383 {
384   vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
385   gcc_checking_assert (res);
386   return res;
387 }
388 
389 /* Set the value numbering info for a given SSA name to a given
390    value.  */
391 
392 static inline void
393 VN_INFO_SET (tree name, vn_ssa_aux_t value)
394 {
395   vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
396 }
397 
398 /* Initialize the value numbering info for a given SSA name.
399    This should be called just once for every SSA name.  */
400 
401 vn_ssa_aux_t
402 VN_INFO_GET (tree name)
403 {
404   vn_ssa_aux_t newinfo;
405 
406   gcc_assert (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ()
407 	      || vn_ssa_aux_table[SSA_NAME_VERSION (name)] == NULL);
408   newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
409   memset (newinfo, 0, sizeof (struct vn_ssa_aux));
410   if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
411     vn_ssa_aux_table.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
412   vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
413   return newinfo;
414 }
415 
416 
417 /* Return the vn_kind the expression computed by the stmt should be
418    associated with.  */
419 
420 enum vn_kind
421 vn_get_stmt_kind (gimple *stmt)
422 {
423   switch (gimple_code (stmt))
424     {
425     case GIMPLE_CALL:
426       return VN_REFERENCE;
427     case GIMPLE_PHI:
428       return VN_PHI;
429     case GIMPLE_ASSIGN:
430       {
431 	enum tree_code code = gimple_assign_rhs_code (stmt);
432 	tree rhs1 = gimple_assign_rhs1 (stmt);
433 	switch (get_gimple_rhs_class (code))
434 	  {
435 	  case GIMPLE_UNARY_RHS:
436 	  case GIMPLE_BINARY_RHS:
437 	  case GIMPLE_TERNARY_RHS:
438 	    return VN_NARY;
439 	  case GIMPLE_SINGLE_RHS:
440 	    switch (TREE_CODE_CLASS (code))
441 	      {
442 	      case tcc_reference:
443 		/* VOP-less references can go through unary case.  */
444 		if ((code == REALPART_EXPR
445 		     || code == IMAGPART_EXPR
446 		     || code == VIEW_CONVERT_EXPR
447 		     || code == BIT_FIELD_REF)
448 		    && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
449 		  return VN_NARY;
450 
451 		/* Fallthrough.  */
452 	      case tcc_declaration:
453 		return VN_REFERENCE;
454 
455 	      case tcc_constant:
456 		return VN_CONSTANT;
457 
458 	      default:
459 		if (code == ADDR_EXPR)
460 		  return (is_gimple_min_invariant (rhs1)
461 			  ? VN_CONSTANT : VN_REFERENCE);
462 		else if (code == CONSTRUCTOR)
463 		  return VN_NARY;
464 		return VN_NONE;
465 	      }
466 	  default:
467 	    return VN_NONE;
468 	  }
469       }
470     default:
471       return VN_NONE;
472     }
473 }
474 
475 /* Lookup a value id for CONSTANT and return it.  If it does not
476    exist returns 0.  */
477 
478 unsigned int
479 get_constant_value_id (tree constant)
480 {
481   vn_constant_s **slot;
482   struct vn_constant_s vc;
483 
484   vc.hashcode = vn_hash_constant_with_type (constant);
485   vc.constant = constant;
486   slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
487   if (slot)
488     return (*slot)->value_id;
489   return 0;
490 }
491 
492 /* Lookup a value id for CONSTANT, and if it does not exist, create a
493    new one and return it.  If it does exist, return it.  */
494 
495 unsigned int
496 get_or_alloc_constant_value_id (tree constant)
497 {
498   vn_constant_s **slot;
499   struct vn_constant_s vc;
500   vn_constant_t vcp;
501 
502   vc.hashcode = vn_hash_constant_with_type (constant);
503   vc.constant = constant;
504   slot = constant_to_value_id->find_slot (&vc, INSERT);
505   if (*slot)
506     return (*slot)->value_id;
507 
508   vcp = XNEW (struct vn_constant_s);
509   vcp->hashcode = vc.hashcode;
510   vcp->constant = constant;
511   vcp->value_id = get_next_value_id ();
512   *slot = vcp;
513   bitmap_set_bit (constant_value_ids, vcp->value_id);
514   return vcp->value_id;
515 }
516 
517 /* Return true if V is a value id for a constant.  */
518 
519 bool
520 value_id_constant_p (unsigned int v)
521 {
522   return bitmap_bit_p (constant_value_ids, v);
523 }
524 
525 /* Compute the hash for a reference operand VRO1.  */
526 
527 static void
528 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
529 {
530   hstate.add_int (vro1->opcode);
531   if (vro1->op0)
532     inchash::add_expr (vro1->op0, hstate);
533   if (vro1->op1)
534     inchash::add_expr (vro1->op1, hstate);
535   if (vro1->op2)
536     inchash::add_expr (vro1->op2, hstate);
537 }
538 
539 /* Compute a hash for the reference operation VR1 and return it.  */
540 
541 static hashval_t
542 vn_reference_compute_hash (const vn_reference_t vr1)
543 {
544   inchash::hash hstate;
545   hashval_t result;
546   int i;
547   vn_reference_op_t vro;
548   HOST_WIDE_INT off = -1;
549   bool deref = false;
550 
551   FOR_EACH_VEC_ELT (vr1->operands, i, vro)
552     {
553       if (vro->opcode == MEM_REF)
554 	deref = true;
555       else if (vro->opcode != ADDR_EXPR)
556 	deref = false;
557       if (vro->off != -1)
558 	{
559 	  if (off == -1)
560 	    off = 0;
561 	  off += vro->off;
562 	}
563       else
564 	{
565 	  if (off != -1
566 	      && off != 0)
567 	    hstate.add_int (off);
568 	  off = -1;
569 	  if (deref
570 	      && vro->opcode == ADDR_EXPR)
571 	    {
572 	      if (vro->op0)
573 		{
574 		  tree op = TREE_OPERAND (vro->op0, 0);
575 		  hstate.add_int (TREE_CODE (op));
576 		  inchash::add_expr (op, hstate);
577 		}
578 	    }
579 	  else
580 	    vn_reference_op_compute_hash (vro, hstate);
581 	}
582     }
583   result = hstate.end ();
584   /* ??? We would ICE later if we hash instead of adding that in. */
585   if (vr1->vuse)
586     result += SSA_NAME_VERSION (vr1->vuse);
587 
588   return result;
589 }
590 
591 /* Return true if reference operations VR1 and VR2 are equivalent.  This
592    means they have the same set of operands and vuses.  */
593 
594 bool
595 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
596 {
597   unsigned i, j;
598 
599   /* Early out if this is not a hash collision.  */
600   if (vr1->hashcode != vr2->hashcode)
601     return false;
602 
603   /* The VOP needs to be the same.  */
604   if (vr1->vuse != vr2->vuse)
605     return false;
606 
607   /* If the operands are the same we are done.  */
608   if (vr1->operands == vr2->operands)
609     return true;
610 
611   if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
612     return false;
613 
614   if (INTEGRAL_TYPE_P (vr1->type)
615       && INTEGRAL_TYPE_P (vr2->type))
616     {
617       if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
618 	return false;
619     }
620   else if (INTEGRAL_TYPE_P (vr1->type)
621 	   && (TYPE_PRECISION (vr1->type)
622 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
623     return false;
624   else if (INTEGRAL_TYPE_P (vr2->type)
625 	   && (TYPE_PRECISION (vr2->type)
626 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
627     return false;
628 
629   i = 0;
630   j = 0;
631   do
632     {
633       HOST_WIDE_INT off1 = 0, off2 = 0;
634       vn_reference_op_t vro1, vro2;
635       vn_reference_op_s tem1, tem2;
636       bool deref1 = false, deref2 = false;
637       for (; vr1->operands.iterate (i, &vro1); i++)
638 	{
639 	  if (vro1->opcode == MEM_REF)
640 	    deref1 = true;
641 	  /* Do not look through a storage order barrier.  */
642 	  else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
643 	    return false;
644 	  if (vro1->off == -1)
645 	    break;
646 	  off1 += vro1->off;
647 	}
648       for (; vr2->operands.iterate (j, &vro2); j++)
649 	{
650 	  if (vro2->opcode == MEM_REF)
651 	    deref2 = true;
652 	  /* Do not look through a storage order barrier.  */
653 	  else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
654 	    return false;
655 	  if (vro2->off == -1)
656 	    break;
657 	  off2 += vro2->off;
658 	}
659       if (off1 != off2)
660 	return false;
661       if (deref1 && vro1->opcode == ADDR_EXPR)
662 	{
663 	  memset (&tem1, 0, sizeof (tem1));
664 	  tem1.op0 = TREE_OPERAND (vro1->op0, 0);
665 	  tem1.type = TREE_TYPE (tem1.op0);
666 	  tem1.opcode = TREE_CODE (tem1.op0);
667 	  vro1 = &tem1;
668 	  deref1 = false;
669 	}
670       if (deref2 && vro2->opcode == ADDR_EXPR)
671 	{
672 	  memset (&tem2, 0, sizeof (tem2));
673 	  tem2.op0 = TREE_OPERAND (vro2->op0, 0);
674 	  tem2.type = TREE_TYPE (tem2.op0);
675 	  tem2.opcode = TREE_CODE (tem2.op0);
676 	  vro2 = &tem2;
677 	  deref2 = false;
678 	}
679       if (deref1 != deref2)
680 	return false;
681       if (!vn_reference_op_eq (vro1, vro2))
682 	return false;
683       ++j;
684       ++i;
685     }
686   while (vr1->operands.length () != i
687 	 || vr2->operands.length () != j);
688 
689   return true;
690 }
691 
692 /* Copy the operations present in load/store REF into RESULT, a vector of
693    vn_reference_op_s's.  */
694 
695 static void
696 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
697 {
698   if (TREE_CODE (ref) == TARGET_MEM_REF)
699     {
700       vn_reference_op_s temp;
701 
702       result->reserve (3);
703 
704       memset (&temp, 0, sizeof (temp));
705       temp.type = TREE_TYPE (ref);
706       temp.opcode = TREE_CODE (ref);
707       temp.op0 = TMR_INDEX (ref);
708       temp.op1 = TMR_STEP (ref);
709       temp.op2 = TMR_OFFSET (ref);
710       temp.off = -1;
711       temp.clique = MR_DEPENDENCE_CLIQUE (ref);
712       temp.base = MR_DEPENDENCE_BASE (ref);
713       result->quick_push (temp);
714 
715       memset (&temp, 0, sizeof (temp));
716       temp.type = NULL_TREE;
717       temp.opcode = ERROR_MARK;
718       temp.op0 = TMR_INDEX2 (ref);
719       temp.off = -1;
720       result->quick_push (temp);
721 
722       memset (&temp, 0, sizeof (temp));
723       temp.type = NULL_TREE;
724       temp.opcode = TREE_CODE (TMR_BASE (ref));
725       temp.op0 = TMR_BASE (ref);
726       temp.off = -1;
727       result->quick_push (temp);
728       return;
729     }
730 
731   /* For non-calls, store the information that makes up the address.  */
732   tree orig = ref;
733   while (ref)
734     {
735       vn_reference_op_s temp;
736 
737       memset (&temp, 0, sizeof (temp));
738       temp.type = TREE_TYPE (ref);
739       temp.opcode = TREE_CODE (ref);
740       temp.off = -1;
741 
742       switch (temp.opcode)
743 	{
744 	case MODIFY_EXPR:
745 	  temp.op0 = TREE_OPERAND (ref, 1);
746 	  break;
747 	case WITH_SIZE_EXPR:
748 	  temp.op0 = TREE_OPERAND (ref, 1);
749 	  temp.off = 0;
750 	  break;
751 	case MEM_REF:
752 	  /* The base address gets its own vn_reference_op_s structure.  */
753 	  temp.op0 = TREE_OPERAND (ref, 1);
754 	    {
755 	      offset_int off = mem_ref_offset (ref);
756 	      if (wi::fits_shwi_p (off))
757 		temp.off = off.to_shwi ();
758 	    }
759 	  temp.clique = MR_DEPENDENCE_CLIQUE (ref);
760 	  temp.base = MR_DEPENDENCE_BASE (ref);
761 	  temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
762 	  break;
763 	case BIT_FIELD_REF:
764 	  /* Record bits, position and storage order.  */
765 	  temp.op0 = TREE_OPERAND (ref, 1);
766 	  temp.op1 = TREE_OPERAND (ref, 2);
767 	  if (tree_fits_shwi_p (TREE_OPERAND (ref, 2)))
768 	    {
769 	      HOST_WIDE_INT off = tree_to_shwi (TREE_OPERAND (ref, 2));
770 	      if (off % BITS_PER_UNIT == 0)
771 		temp.off = off / BITS_PER_UNIT;
772 	    }
773 	  temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
774 	  break;
775 	case COMPONENT_REF:
776 	  /* The field decl is enough to unambiguously specify the field,
777 	     a matching type is not necessary and a mismatching type
778 	     is always a spurious difference.  */
779 	  temp.type = NULL_TREE;
780 	  temp.op0 = TREE_OPERAND (ref, 1);
781 	  temp.op1 = TREE_OPERAND (ref, 2);
782 	  {
783 	    tree this_offset = component_ref_field_offset (ref);
784 	    if (this_offset
785 		&& TREE_CODE (this_offset) == INTEGER_CST)
786 	      {
787 		tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
788 		if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
789 		  {
790 		    offset_int off
791 		      = (wi::to_offset (this_offset)
792 			 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
793 		    if (wi::fits_shwi_p (off)
794 			/* Probibit value-numbering zero offset components
795 			   of addresses the same before the pass folding
796 			   __builtin_object_size had a chance to run
797 			   (checking cfun->after_inlining does the
798 			   trick here).  */
799 			&& (TREE_CODE (orig) != ADDR_EXPR
800 			    || off != 0
801 			    || cfun->after_inlining))
802 		      temp.off = off.to_shwi ();
803 		  }
804 	      }
805 	  }
806 	  break;
807 	case ARRAY_RANGE_REF:
808 	case ARRAY_REF:
809 	  {
810 	    tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
811 	    /* Record index as operand.  */
812 	    temp.op0 = TREE_OPERAND (ref, 1);
813 	    /* Always record lower bounds and element size.  */
814 	    temp.op1 = array_ref_low_bound (ref);
815 	    /* But record element size in units of the type alignment.  */
816 	    temp.op2 = TREE_OPERAND (ref, 3);
817 	    temp.align = eltype->type_common.align;
818 	    if (! temp.op2)
819 	      temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
820 				     size_int (TYPE_ALIGN_UNIT (eltype)));
821 	    if (TREE_CODE (temp.op0) == INTEGER_CST
822 		&& TREE_CODE (temp.op1) == INTEGER_CST
823 		&& TREE_CODE (temp.op2) == INTEGER_CST)
824 	      {
825 		offset_int off = ((wi::to_offset (temp.op0)
826 				   - wi::to_offset (temp.op1))
827 				  * wi::to_offset (temp.op2)
828 				  * vn_ref_op_align_unit (&temp));
829 		if (wi::fits_shwi_p (off))
830 		  temp.off = off.to_shwi();
831 	      }
832 	  }
833 	  break;
834 	case VAR_DECL:
835 	  if (DECL_HARD_REGISTER (ref))
836 	    {
837 	      temp.op0 = ref;
838 	      break;
839 	    }
840 	  /* Fallthru.  */
841 	case PARM_DECL:
842 	case CONST_DECL:
843 	case RESULT_DECL:
844 	  /* Canonicalize decls to MEM[&decl] which is what we end up with
845 	     when valueizing MEM[ptr] with ptr = &decl.  */
846 	  temp.opcode = MEM_REF;
847 	  temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
848 	  temp.off = 0;
849 	  result->safe_push (temp);
850 	  temp.opcode = ADDR_EXPR;
851 	  temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
852 	  temp.type = TREE_TYPE (temp.op0);
853 	  temp.off = -1;
854 	  break;
855 	case STRING_CST:
856 	case INTEGER_CST:
857 	case COMPLEX_CST:
858 	case VECTOR_CST:
859 	case REAL_CST:
860 	case FIXED_CST:
861 	case CONSTRUCTOR:
862 	case SSA_NAME:
863 	  temp.op0 = ref;
864 	  break;
865 	case ADDR_EXPR:
866 	  if (is_gimple_min_invariant (ref))
867 	    {
868 	      temp.op0 = ref;
869 	      break;
870 	    }
871 	  break;
872 	  /* These are only interesting for their operands, their
873 	     existence, and their type.  They will never be the last
874 	     ref in the chain of references (IE they require an
875 	     operand), so we don't have to put anything
876 	     for op* as it will be handled by the iteration  */
877 	case REALPART_EXPR:
878 	  temp.off = 0;
879 	  break;
880 	case VIEW_CONVERT_EXPR:
881 	  temp.off = 0;
882 	  temp.reverse = storage_order_barrier_p (ref);
883 	  break;
884 	case IMAGPART_EXPR:
885 	  /* This is only interesting for its constant offset.  */
886 	  temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
887 	  break;
888 	default:
889 	  gcc_unreachable ();
890 	}
891       result->safe_push (temp);
892 
893       if (REFERENCE_CLASS_P (ref)
894 	  || TREE_CODE (ref) == MODIFY_EXPR
895 	  || TREE_CODE (ref) == WITH_SIZE_EXPR
896 	  || (TREE_CODE (ref) == ADDR_EXPR
897 	      && !is_gimple_min_invariant (ref)))
898 	ref = TREE_OPERAND (ref, 0);
899       else
900 	ref = NULL_TREE;
901     }
902 }
903 
904 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
905    operands in *OPS, the reference alias set SET and the reference type TYPE.
906    Return true if something useful was produced.  */
907 
908 bool
909 ao_ref_init_from_vn_reference (ao_ref *ref,
910 			       alias_set_type set, tree type,
911 			       vec<vn_reference_op_s> ops)
912 {
913   vn_reference_op_t op;
914   unsigned i;
915   tree base = NULL_TREE;
916   tree *op0_p = &base;
917   offset_int offset = 0;
918   offset_int max_size;
919   offset_int size = -1;
920   tree size_tree = NULL_TREE;
921   alias_set_type base_alias_set = -1;
922 
923   /* First get the final access size from just the outermost expression.  */
924   op = &ops[0];
925   if (op->opcode == COMPONENT_REF)
926     size_tree = DECL_SIZE (op->op0);
927   else if (op->opcode == BIT_FIELD_REF)
928     size_tree = op->op0;
929   else
930     {
931       machine_mode mode = TYPE_MODE (type);
932       if (mode == BLKmode)
933 	size_tree = TYPE_SIZE (type);
934       else
935 	size = int (GET_MODE_BITSIZE (mode));
936     }
937   if (size_tree != NULL_TREE
938       && TREE_CODE (size_tree) == INTEGER_CST)
939     size = wi::to_offset (size_tree);
940 
941   /* Initially, maxsize is the same as the accessed element size.
942      In the following it will only grow (or become -1).  */
943   max_size = size;
944 
945   /* Compute cumulative bit-offset for nested component-refs and array-refs,
946      and find the ultimate containing object.  */
947   FOR_EACH_VEC_ELT (ops, i, op)
948     {
949       switch (op->opcode)
950 	{
951 	/* These may be in the reference ops, but we cannot do anything
952 	   sensible with them here.  */
953 	case ADDR_EXPR:
954 	  /* Apart from ADDR_EXPR arguments to MEM_REF.  */
955 	  if (base != NULL_TREE
956 	      && TREE_CODE (base) == MEM_REF
957 	      && op->op0
958 	      && DECL_P (TREE_OPERAND (op->op0, 0)))
959 	    {
960 	      vn_reference_op_t pop = &ops[i-1];
961 	      base = TREE_OPERAND (op->op0, 0);
962 	      if (pop->off == -1)
963 		{
964 		  max_size = -1;
965 		  offset = 0;
966 		}
967 	      else
968 		offset += pop->off * BITS_PER_UNIT;
969 	      op0_p = NULL;
970 	      break;
971 	    }
972 	  /* Fallthru.  */
973 	case CALL_EXPR:
974 	  return false;
975 
976 	/* Record the base objects.  */
977 	case MEM_REF:
978 	  base_alias_set = get_deref_alias_set (op->op0);
979 	  *op0_p = build2 (MEM_REF, op->type,
980 			   NULL_TREE, op->op0);
981 	  MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
982 	  MR_DEPENDENCE_BASE (*op0_p) = op->base;
983 	  op0_p = &TREE_OPERAND (*op0_p, 0);
984 	  break;
985 
986 	case VAR_DECL:
987 	case PARM_DECL:
988 	case RESULT_DECL:
989 	case SSA_NAME:
990 	  *op0_p = op->op0;
991 	  op0_p = NULL;
992 	  break;
993 
994 	/* And now the usual component-reference style ops.  */
995 	case BIT_FIELD_REF:
996 	  offset += wi::to_offset (op->op1);
997 	  break;
998 
999 	case COMPONENT_REF:
1000 	  {
1001 	    tree field = op->op0;
1002 	    /* We do not have a complete COMPONENT_REF tree here so we
1003 	       cannot use component_ref_field_offset.  Do the interesting
1004 	       parts manually.  */
1005 	    tree this_offset = DECL_FIELD_OFFSET (field);
1006 
1007 	    if (op->op1 || TREE_CODE (this_offset) != INTEGER_CST)
1008 	      max_size = -1;
1009 	    else
1010 	      {
1011 		offset_int woffset = (wi::to_offset (this_offset)
1012 				      << LOG2_BITS_PER_UNIT);
1013 		woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1014 		offset += woffset;
1015 	      }
1016 	    break;
1017 	  }
1018 
1019 	case ARRAY_RANGE_REF:
1020 	case ARRAY_REF:
1021 	  /* We recorded the lower bound and the element size.  */
1022 	  if (TREE_CODE (op->op0) != INTEGER_CST
1023 	      || TREE_CODE (op->op1) != INTEGER_CST
1024 	      || TREE_CODE (op->op2) != INTEGER_CST)
1025 	    max_size = -1;
1026 	  else
1027 	    {
1028 	      offset_int woffset
1029 		= wi::sext (wi::to_offset (op->op0) - wi::to_offset (op->op1),
1030 			    TYPE_PRECISION (TREE_TYPE (op->op0)));
1031 	      woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1032 	      woffset <<= LOG2_BITS_PER_UNIT;
1033 	      offset += woffset;
1034 	    }
1035 	  break;
1036 
1037 	case REALPART_EXPR:
1038 	  break;
1039 
1040 	case IMAGPART_EXPR:
1041 	  offset += size;
1042 	  break;
1043 
1044 	case VIEW_CONVERT_EXPR:
1045 	  break;
1046 
1047 	case STRING_CST:
1048 	case INTEGER_CST:
1049 	case COMPLEX_CST:
1050 	case VECTOR_CST:
1051 	case REAL_CST:
1052 	case CONSTRUCTOR:
1053 	case CONST_DECL:
1054 	  return false;
1055 
1056 	default:
1057 	  return false;
1058 	}
1059     }
1060 
1061   if (base == NULL_TREE)
1062     return false;
1063 
1064   ref->ref = NULL_TREE;
1065   ref->base = base;
1066   ref->ref_alias_set = set;
1067   if (base_alias_set != -1)
1068     ref->base_alias_set = base_alias_set;
1069   else
1070     ref->base_alias_set = get_alias_set (base);
1071   /* We discount volatiles from value-numbering elsewhere.  */
1072   ref->volatile_p = false;
1073 
1074   if (!wi::fits_shwi_p (size) || wi::neg_p (size))
1075     {
1076       ref->offset = 0;
1077       ref->size = -1;
1078       ref->max_size = -1;
1079       return true;
1080     }
1081 
1082   ref->size = size.to_shwi ();
1083 
1084   if (!wi::fits_shwi_p (offset))
1085     {
1086       ref->offset = 0;
1087       ref->max_size = -1;
1088       return true;
1089     }
1090 
1091   ref->offset = offset.to_shwi ();
1092 
1093   if (!wi::fits_shwi_p (max_size) || wi::neg_p (max_size))
1094     ref->max_size = -1;
1095   else
1096     ref->max_size = max_size.to_shwi ();
1097 
1098   return true;
1099 }
1100 
1101 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1102    vn_reference_op_s's.  */
1103 
1104 static void
1105 copy_reference_ops_from_call (gcall *call,
1106 			      vec<vn_reference_op_s> *result)
1107 {
1108   vn_reference_op_s temp;
1109   unsigned i;
1110   tree lhs = gimple_call_lhs (call);
1111   int lr;
1112 
1113   /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1114      different.  By adding the lhs here in the vector, we ensure that the
1115      hashcode is different, guaranteeing a different value number.  */
1116   if (lhs && TREE_CODE (lhs) != SSA_NAME)
1117     {
1118       memset (&temp, 0, sizeof (temp));
1119       temp.opcode = MODIFY_EXPR;
1120       temp.type = TREE_TYPE (lhs);
1121       temp.op0 = lhs;
1122       temp.off = -1;
1123       result->safe_push (temp);
1124     }
1125 
1126   /* Copy the type, opcode, function, static chain and EH region, if any.  */
1127   memset (&temp, 0, sizeof (temp));
1128   temp.type = gimple_call_return_type (call);
1129   temp.opcode = CALL_EXPR;
1130   temp.op0 = gimple_call_fn (call);
1131   temp.op1 = gimple_call_chain (call);
1132   if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1133     temp.op2 = size_int (lr);
1134   temp.off = -1;
1135   if (gimple_call_with_bounds_p (call))
1136     temp.with_bounds = 1;
1137   result->safe_push (temp);
1138 
1139   /* Copy the call arguments.  As they can be references as well,
1140      just chain them together.  */
1141   for (i = 0; i < gimple_call_num_args (call); ++i)
1142     {
1143       tree callarg = gimple_call_arg (call, i);
1144       copy_reference_ops_from_ref (callarg, result);
1145     }
1146 }
1147 
1148 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1149    *I_P to point to the last element of the replacement.  */
1150 static bool
1151 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1152 			    unsigned int *i_p)
1153 {
1154   unsigned int i = *i_p;
1155   vn_reference_op_t op = &(*ops)[i];
1156   vn_reference_op_t mem_op = &(*ops)[i - 1];
1157   tree addr_base;
1158   HOST_WIDE_INT addr_offset = 0;
1159 
1160   /* The only thing we have to do is from &OBJ.foo.bar add the offset
1161      from .foo.bar to the preceding MEM_REF offset and replace the
1162      address with &OBJ.  */
1163   addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1164 					     &addr_offset);
1165   gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1166   if (addr_base != TREE_OPERAND (op->op0, 0))
1167     {
1168       offset_int off = offset_int::from (mem_op->op0, SIGNED);
1169       off += addr_offset;
1170       mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1171       op->op0 = build_fold_addr_expr (addr_base);
1172       if (tree_fits_shwi_p (mem_op->op0))
1173 	mem_op->off = tree_to_shwi (mem_op->op0);
1174       else
1175 	mem_op->off = -1;
1176       return true;
1177     }
1178   return false;
1179 }
1180 
1181 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1182    *I_P to point to the last element of the replacement.  */
1183 static bool
1184 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1185 				     unsigned int *i_p)
1186 {
1187   unsigned int i = *i_p;
1188   vn_reference_op_t op = &(*ops)[i];
1189   vn_reference_op_t mem_op = &(*ops)[i - 1];
1190   gimple *def_stmt;
1191   enum tree_code code;
1192   offset_int off;
1193 
1194   def_stmt = SSA_NAME_DEF_STMT (op->op0);
1195   if (!is_gimple_assign (def_stmt))
1196     return false;
1197 
1198   code = gimple_assign_rhs_code (def_stmt);
1199   if (code != ADDR_EXPR
1200       && code != POINTER_PLUS_EXPR)
1201     return false;
1202 
1203   off = offset_int::from (mem_op->op0, SIGNED);
1204 
1205   /* The only thing we have to do is from &OBJ.foo.bar add the offset
1206      from .foo.bar to the preceding MEM_REF offset and replace the
1207      address with &OBJ.  */
1208   if (code == ADDR_EXPR)
1209     {
1210       tree addr, addr_base;
1211       HOST_WIDE_INT addr_offset;
1212 
1213       addr = gimple_assign_rhs1 (def_stmt);
1214       addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1215 						 &addr_offset);
1216       /* If that didn't work because the address isn't invariant propagate
1217          the reference tree from the address operation in case the current
1218 	 dereference isn't offsetted.  */
1219       if (!addr_base
1220 	  && *i_p == ops->length () - 1
1221 	  && off == 0
1222 	  /* This makes us disable this transform for PRE where the
1223 	     reference ops might be also used for code insertion which
1224 	     is invalid.  */
1225 	  && default_vn_walk_kind == VN_WALKREWRITE)
1226 	{
1227 	  auto_vec<vn_reference_op_s, 32> tem;
1228 	  copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1229 	  /* Make sure to preserve TBAA info.  The only objects not
1230 	     wrapped in MEM_REFs that can have their address taken are
1231 	     STRING_CSTs.  */
1232 	  if (tem.length () >= 2
1233 	      && tem[tem.length () - 2].opcode == MEM_REF)
1234 	    {
1235 	      vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1236 	      new_mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1237 						  new_mem_op->op0);
1238 	    }
1239 	  else
1240 	    gcc_assert (tem.last ().opcode == STRING_CST);
1241 	  ops->pop ();
1242 	  ops->pop ();
1243 	  ops->safe_splice (tem);
1244 	  --*i_p;
1245 	  return true;
1246 	}
1247       if (!addr_base
1248 	  || TREE_CODE (addr_base) != MEM_REF
1249 	  || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1250 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base, 0))))
1251 	return false;
1252 
1253       off += addr_offset;
1254       off += mem_ref_offset (addr_base);
1255       op->op0 = TREE_OPERAND (addr_base, 0);
1256     }
1257   else
1258     {
1259       tree ptr, ptroff;
1260       ptr = gimple_assign_rhs1 (def_stmt);
1261       ptroff = gimple_assign_rhs2 (def_stmt);
1262       if (TREE_CODE (ptr) != SSA_NAME
1263 	  || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1264 	  || TREE_CODE (ptroff) != INTEGER_CST)
1265 	return false;
1266 
1267       off += wi::to_offset (ptroff);
1268       op->op0 = ptr;
1269     }
1270 
1271   mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1272   if (tree_fits_shwi_p (mem_op->op0))
1273     mem_op->off = tree_to_shwi (mem_op->op0);
1274   else
1275     mem_op->off = -1;
1276   if (TREE_CODE (op->op0) == SSA_NAME)
1277     op->op0 = SSA_VAL (op->op0);
1278   if (TREE_CODE (op->op0) != SSA_NAME)
1279     op->opcode = TREE_CODE (op->op0);
1280 
1281   /* And recurse.  */
1282   if (TREE_CODE (op->op0) == SSA_NAME)
1283     vn_reference_maybe_forwprop_address (ops, i_p);
1284   else if (TREE_CODE (op->op0) == ADDR_EXPR)
1285     vn_reference_fold_indirect (ops, i_p);
1286   return true;
1287 }
1288 
1289 /* Optimize the reference REF to a constant if possible or return
1290    NULL_TREE if not.  */
1291 
1292 tree
1293 fully_constant_vn_reference_p (vn_reference_t ref)
1294 {
1295   vec<vn_reference_op_s> operands = ref->operands;
1296   vn_reference_op_t op;
1297 
1298   /* Try to simplify the translated expression if it is
1299      a call to a builtin function with at most two arguments.  */
1300   op = &operands[0];
1301   if (op->opcode == CALL_EXPR
1302       && TREE_CODE (op->op0) == ADDR_EXPR
1303       && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1304       && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1305       && operands.length () >= 2
1306       && operands.length () <= 3)
1307     {
1308       vn_reference_op_t arg0, arg1 = NULL;
1309       bool anyconst = false;
1310       arg0 = &operands[1];
1311       if (operands.length () > 2)
1312 	arg1 = &operands[2];
1313       if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1314 	  || (arg0->opcode == ADDR_EXPR
1315 	      && is_gimple_min_invariant (arg0->op0)))
1316 	anyconst = true;
1317       if (arg1
1318 	  && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1319 	      || (arg1->opcode == ADDR_EXPR
1320 		  && is_gimple_min_invariant (arg1->op0))))
1321 	anyconst = true;
1322       if (anyconst)
1323 	{
1324 	  tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1325 					 arg1 ? 2 : 1,
1326 					 arg0->op0,
1327 					 arg1 ? arg1->op0 : NULL);
1328 	  if (folded
1329 	      && TREE_CODE (folded) == NOP_EXPR)
1330 	    folded = TREE_OPERAND (folded, 0);
1331 	  if (folded
1332 	      && is_gimple_min_invariant (folded))
1333 	    return folded;
1334 	}
1335     }
1336 
1337   /* Simplify reads from constants or constant initializers.  */
1338   else if (BITS_PER_UNIT == 8
1339 	   && is_gimple_reg_type (ref->type)
1340 	   && (!INTEGRAL_TYPE_P (ref->type)
1341 	       || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1342     {
1343       HOST_WIDE_INT off = 0;
1344       HOST_WIDE_INT size;
1345       if (INTEGRAL_TYPE_P (ref->type))
1346 	size = TYPE_PRECISION (ref->type);
1347       else
1348 	size = tree_to_shwi (TYPE_SIZE (ref->type));
1349       if (size % BITS_PER_UNIT != 0
1350 	  || size > MAX_BITSIZE_MODE_ANY_MODE)
1351 	return NULL_TREE;
1352       size /= BITS_PER_UNIT;
1353       unsigned i;
1354       for (i = 0; i < operands.length (); ++i)
1355 	{
1356 	  if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1357 	    {
1358 	      ++i;
1359 	      break;
1360 	    }
1361 	  if (operands[i].off == -1)
1362 	    return NULL_TREE;
1363 	  off += operands[i].off;
1364 	  if (operands[i].opcode == MEM_REF)
1365 	    {
1366 	      ++i;
1367 	      break;
1368 	    }
1369 	}
1370       vn_reference_op_t base = &operands[--i];
1371       tree ctor = error_mark_node;
1372       tree decl = NULL_TREE;
1373       if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1374 	ctor = base->op0;
1375       else if (base->opcode == MEM_REF
1376 	       && base[1].opcode == ADDR_EXPR
1377 	       && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1378 		   || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL))
1379 	{
1380 	  decl = TREE_OPERAND (base[1].op0, 0);
1381 	  ctor = ctor_for_folding (decl);
1382 	}
1383       if (ctor == NULL_TREE)
1384 	return build_zero_cst (ref->type);
1385       else if (ctor != error_mark_node)
1386 	{
1387 	  if (decl)
1388 	    {
1389 	      tree res = fold_ctor_reference (ref->type, ctor,
1390 					      off * BITS_PER_UNIT,
1391 					      size * BITS_PER_UNIT, decl);
1392 	      if (res)
1393 		{
1394 		  STRIP_USELESS_TYPE_CONVERSION (res);
1395 		  if (is_gimple_min_invariant (res))
1396 		    return res;
1397 		}
1398 	    }
1399 	  else
1400 	    {
1401 	      unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1402 	      int len = native_encode_expr (ctor, buf, size, off);
1403 	      if (len > 0)
1404 		return native_interpret_expr (ref->type, buf, len);
1405 	    }
1406 	}
1407     }
1408 
1409   return NULL_TREE;
1410 }
1411 
1412 /* Return true if OPS contain a storage order barrier.  */
1413 
1414 static bool
1415 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1416 {
1417   vn_reference_op_t op;
1418   unsigned i;
1419 
1420   FOR_EACH_VEC_ELT (ops, i, op)
1421     if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1422       return true;
1423 
1424   return false;
1425 }
1426 
1427 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1428    structures into their value numbers.  This is done in-place, and
1429    the vector passed in is returned.  *VALUEIZED_ANYTHING will specify
1430    whether any operands were valueized.  */
1431 
1432 static vec<vn_reference_op_s>
1433 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1434 {
1435   vn_reference_op_t vro;
1436   unsigned int i;
1437 
1438   *valueized_anything = false;
1439 
1440   FOR_EACH_VEC_ELT (orig, i, vro)
1441     {
1442       if (vro->opcode == SSA_NAME
1443 	  || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1444 	{
1445 	  tree tem = SSA_VAL (vro->op0);
1446 	  if (tem != vro->op0)
1447 	    {
1448 	      *valueized_anything = true;
1449 	      vro->op0 = tem;
1450 	    }
1451 	  /* If it transforms from an SSA_NAME to a constant, update
1452 	     the opcode.  */
1453 	  if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1454 	    vro->opcode = TREE_CODE (vro->op0);
1455 	}
1456       if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1457 	{
1458 	  tree tem = SSA_VAL (vro->op1);
1459 	  if (tem != vro->op1)
1460 	    {
1461 	      *valueized_anything = true;
1462 	      vro->op1 = tem;
1463 	    }
1464 	}
1465       if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1466 	{
1467 	  tree tem = SSA_VAL (vro->op2);
1468 	  if (tem != vro->op2)
1469 	    {
1470 	      *valueized_anything = true;
1471 	      vro->op2 = tem;
1472 	    }
1473 	}
1474       /* If it transforms from an SSA_NAME to an address, fold with
1475 	 a preceding indirect reference.  */
1476       if (i > 0
1477 	  && vro->op0
1478 	  && TREE_CODE (vro->op0) == ADDR_EXPR
1479 	  && orig[i - 1].opcode == MEM_REF)
1480 	{
1481 	  if (vn_reference_fold_indirect (&orig, &i))
1482 	    *valueized_anything = true;
1483 	}
1484       else if (i > 0
1485 	       && vro->opcode == SSA_NAME
1486 	       && orig[i - 1].opcode == MEM_REF)
1487 	{
1488 	  if (vn_reference_maybe_forwprop_address (&orig, &i))
1489 	    *valueized_anything = true;
1490 	}
1491       /* If it transforms a non-constant ARRAY_REF into a constant
1492 	 one, adjust the constant offset.  */
1493       else if (vro->opcode == ARRAY_REF
1494 	       && vro->off == -1
1495 	       && TREE_CODE (vro->op0) == INTEGER_CST
1496 	       && TREE_CODE (vro->op1) == INTEGER_CST
1497 	       && TREE_CODE (vro->op2) == INTEGER_CST)
1498 	{
1499 	  offset_int off = ((wi::to_offset (vro->op0)
1500 			     - wi::to_offset (vro->op1))
1501 			    * wi::to_offset (vro->op2)
1502 			    * vn_ref_op_align_unit (vro));
1503 	  if (wi::fits_shwi_p (off))
1504 	    vro->off = off.to_shwi ();
1505 	}
1506     }
1507 
1508   return orig;
1509 }
1510 
1511 static vec<vn_reference_op_s>
1512 valueize_refs (vec<vn_reference_op_s> orig)
1513 {
1514   bool tem;
1515   return valueize_refs_1 (orig, &tem);
1516 }
1517 
1518 static vec<vn_reference_op_s> shared_lookup_references;
1519 
1520 /* Create a vector of vn_reference_op_s structures from REF, a
1521    REFERENCE_CLASS_P tree.  The vector is shared among all callers of
1522    this function.  *VALUEIZED_ANYTHING will specify whether any
1523    operands were valueized.  */
1524 
1525 static vec<vn_reference_op_s>
1526 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1527 {
1528   if (!ref)
1529     return vNULL;
1530   shared_lookup_references.truncate (0);
1531   copy_reference_ops_from_ref (ref, &shared_lookup_references);
1532   shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1533 					      valueized_anything);
1534   return shared_lookup_references;
1535 }
1536 
1537 /* Create a vector of vn_reference_op_s structures from CALL, a
1538    call statement.  The vector is shared among all callers of
1539    this function.  */
1540 
1541 static vec<vn_reference_op_s>
1542 valueize_shared_reference_ops_from_call (gcall *call)
1543 {
1544   if (!call)
1545     return vNULL;
1546   shared_lookup_references.truncate (0);
1547   copy_reference_ops_from_call (call, &shared_lookup_references);
1548   shared_lookup_references = valueize_refs (shared_lookup_references);
1549   return shared_lookup_references;
1550 }
1551 
1552 /* Lookup a SCCVN reference operation VR in the current hash table.
1553    Returns the resulting value number if it exists in the hash table,
1554    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
1555    vn_reference_t stored in the hashtable if something is found.  */
1556 
1557 static tree
1558 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1559 {
1560   vn_reference_s **slot;
1561   hashval_t hash;
1562 
1563   hash = vr->hashcode;
1564   slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1565   if (!slot && current_info == optimistic_info)
1566     slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1567   if (slot)
1568     {
1569       if (vnresult)
1570 	*vnresult = (vn_reference_t)*slot;
1571       return ((vn_reference_t)*slot)->result;
1572     }
1573 
1574   return NULL_TREE;
1575 }
1576 
1577 /* Callback for walk_non_aliased_vuses.  Adjusts the vn_reference_t VR_
1578    with the current VUSE and performs the expression lookup.  */
1579 
1580 static void *
1581 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1582 		       unsigned int cnt, void *vr_)
1583 {
1584   vn_reference_t vr = (vn_reference_t)vr_;
1585   vn_reference_s **slot;
1586   hashval_t hash;
1587 
1588   /* This bounds the stmt walks we perform on reference lookups
1589      to O(1) instead of O(N) where N is the number of dominating
1590      stores.  */
1591   if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1592     return (void *)-1;
1593 
1594   if (last_vuse_ptr)
1595     *last_vuse_ptr = vuse;
1596 
1597   /* Fixup vuse and hash.  */
1598   if (vr->vuse)
1599     vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1600   vr->vuse = vuse_ssa_val (vuse);
1601   if (vr->vuse)
1602     vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1603 
1604   hash = vr->hashcode;
1605   slot = current_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1606   if (!slot && current_info == optimistic_info)
1607     slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1608   if (slot)
1609     return *slot;
1610 
1611   return NULL;
1612 }
1613 
1614 /* Lookup an existing or insert a new vn_reference entry into the
1615    value table for the VUSE, SET, TYPE, OPERANDS reference which
1616    has the value VALUE which is either a constant or an SSA name.  */
1617 
1618 static vn_reference_t
1619 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1620 					  alias_set_type set,
1621 					  tree type,
1622 					  vec<vn_reference_op_s,
1623 					        va_heap> operands,
1624 					  tree value)
1625 {
1626   vn_reference_s vr1;
1627   vn_reference_t result;
1628   unsigned value_id;
1629   vr1.vuse = vuse;
1630   vr1.operands = operands;
1631   vr1.type = type;
1632   vr1.set = set;
1633   vr1.hashcode = vn_reference_compute_hash (&vr1);
1634   if (vn_reference_lookup_1 (&vr1, &result))
1635     return result;
1636   if (TREE_CODE (value) == SSA_NAME)
1637     value_id = VN_INFO (value)->value_id;
1638   else
1639     value_id = get_or_alloc_constant_value_id (value);
1640   return vn_reference_insert_pieces (vuse, set, type,
1641 				     operands.copy (), value, value_id);
1642 }
1643 
1644 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *stmt, tree result);
1645 
1646 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables.  */
1647 
1648 static tree
1649 vn_lookup_simplify_result (code_helper rcode, tree type, tree *ops_)
1650 {
1651   if (!rcode.is_tree_code ())
1652     return NULL_TREE;
1653   tree *ops = ops_;
1654   unsigned int length = TREE_CODE_LENGTH ((tree_code) rcode);
1655   if (rcode == CONSTRUCTOR
1656       /* ???  We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
1657 	 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree.  */
1658       && TREE_CODE (ops_[0]) == CONSTRUCTOR)
1659     {
1660       length = CONSTRUCTOR_NELTS (ops_[0]);
1661       ops = XALLOCAVEC (tree, length);
1662       for (unsigned i = 0; i < length; ++i)
1663 	ops[i] = CONSTRUCTOR_ELT (ops_[0], i)->value;
1664     }
1665   vn_nary_op_t vnresult = NULL;
1666   return vn_nary_op_lookup_pieces (length, (tree_code) rcode,
1667 				   type, ops, &vnresult);
1668 }
1669 
1670 /* Return a value-number for RCODE OPS... either by looking up an existing
1671    value-number for the simplified result or by inserting the operation if
1672    INSERT is true.  */
1673 
1674 static tree
1675 vn_nary_build_or_lookup_1 (code_helper rcode, tree type, tree *ops,
1676 			   bool insert)
1677 {
1678   tree result = NULL_TREE;
1679   /* We will be creating a value number for
1680        RCODE (OPS...).
1681      So first simplify and lookup this expression to see if it
1682      is already available.  */
1683   mprts_hook = vn_lookup_simplify_result;
1684   bool res = false;
1685   switch (TREE_CODE_LENGTH ((tree_code) rcode))
1686     {
1687     case 1:
1688       res = gimple_resimplify1 (NULL, &rcode, type, ops, vn_valueize);
1689       break;
1690     case 2:
1691       res = gimple_resimplify2 (NULL, &rcode, type, ops, vn_valueize);
1692       break;
1693     case 3:
1694       res = gimple_resimplify3 (NULL, &rcode, type, ops, vn_valueize);
1695       break;
1696     }
1697   mprts_hook = NULL;
1698   gimple *new_stmt = NULL;
1699   if (res
1700       && gimple_simplified_result_is_gimple_val (rcode, ops))
1701     /* The expression is already available.  */
1702     result = ops[0];
1703   else
1704     {
1705       tree val = vn_lookup_simplify_result (rcode, type, ops);
1706       if (!val && insert)
1707 	{
1708 	  gimple_seq stmts = NULL;
1709 	  result = maybe_push_res_to_seq (rcode, type, ops, &stmts);
1710 	  if (result)
1711 	    {
1712 	      gcc_assert (gimple_seq_singleton_p (stmts));
1713 	      new_stmt = gimple_seq_first_stmt (stmts);
1714 	    }
1715 	}
1716       else
1717 	/* The expression is already available.  */
1718 	result = val;
1719     }
1720   if (new_stmt)
1721     {
1722       /* The expression is not yet available, value-number lhs to
1723 	 the new SSA_NAME we created.  */
1724       /* Initialize value-number information properly.  */
1725       VN_INFO_GET (result)->valnum = result;
1726       VN_INFO (result)->value_id = get_next_value_id ();
1727       gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1728 					  new_stmt);
1729       VN_INFO (result)->needs_insertion = true;
1730       /* ???  PRE phi-translation inserts NARYs without corresponding
1731          SSA name result.  Re-use those but set their result according
1732 	 to the stmt we just built.  */
1733       vn_nary_op_t nary = NULL;
1734       vn_nary_op_lookup_stmt (new_stmt, &nary);
1735       if (nary)
1736 	{
1737 	  gcc_assert (nary->result == NULL_TREE);
1738 	  nary->result = gimple_assign_lhs (new_stmt);
1739 	}
1740       /* As all "inserted" statements are singleton SCCs, insert
1741 	 to the valid table.  This is strictly needed to
1742 	 avoid re-generating new value SSA_NAMEs for the same
1743 	 expression during SCC iteration over and over (the
1744 	 optimistic table gets cleared after each iteration).
1745 	 We do not need to insert into the optimistic table, as
1746 	 lookups there will fall back to the valid table.  */
1747       else if (current_info == optimistic_info)
1748 	{
1749 	  current_info = valid_info;
1750 	  vn_nary_op_insert_stmt (new_stmt, result);
1751 	  current_info = optimistic_info;
1752 	}
1753       else
1754 	vn_nary_op_insert_stmt (new_stmt, result);
1755       if (dump_file && (dump_flags & TDF_DETAILS))
1756 	{
1757 	  fprintf (dump_file, "Inserting name ");
1758 	  print_generic_expr (dump_file, result, 0);
1759 	  fprintf (dump_file, " for expression ");
1760 	  print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1761 	  fprintf (dump_file, "\n");
1762 	}
1763     }
1764   return result;
1765 }
1766 
1767 /* Return a value-number for RCODE OPS... either by looking up an existing
1768    value-number for the simplified result or by inserting the operation.  */
1769 
1770 static tree
1771 vn_nary_build_or_lookup (code_helper rcode, tree type, tree *ops)
1772 {
1773   return vn_nary_build_or_lookup_1 (rcode, type, ops, true);
1774 }
1775 
1776 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1777    its value if present.  */
1778 
1779 tree
1780 vn_nary_simplify (vn_nary_op_t nary)
1781 {
1782   if (nary->length > 3)
1783     return NULL_TREE;
1784   tree ops[3];
1785   memcpy (ops, nary->op, sizeof (tree) * nary->length);
1786   return vn_nary_build_or_lookup_1 (nary->opcode, nary->type, ops, false);
1787 }
1788 
1789 
1790 /* Callback for walk_non_aliased_vuses.  Tries to perform a lookup
1791    from the statement defining VUSE and if not successful tries to
1792    translate *REFP and VR_ through an aggregate copy at the definition
1793    of VUSE.  If *DISAMBIGUATE_ONLY is true then do not perform translation
1794    of *REF and *VR.  If only disambiguation was performed then
1795    *DISAMBIGUATE_ONLY is set to true.  */
1796 
1797 static void *
1798 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1799 		       bool *disambiguate_only)
1800 {
1801   vn_reference_t vr = (vn_reference_t)vr_;
1802   gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1803   tree base = ao_ref_base (ref);
1804   HOST_WIDE_INT offset, maxsize;
1805   static vec<vn_reference_op_s> lhs_ops;
1806   ao_ref lhs_ref;
1807   bool lhs_ref_ok = false;
1808 
1809   /* If the reference is based on a parameter that was determined as
1810      pointing to readonly memory it doesn't change.  */
1811   if (TREE_CODE (base) == MEM_REF
1812       && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1813       && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1814       && bitmap_bit_p (const_parms,
1815 		       SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1816     {
1817       *disambiguate_only = true;
1818       return NULL;
1819     }
1820 
1821   /* First try to disambiguate after value-replacing in the definitions LHS.  */
1822   if (is_gimple_assign (def_stmt))
1823     {
1824       tree lhs = gimple_assign_lhs (def_stmt);
1825       bool valueized_anything = false;
1826       /* Avoid re-allocation overhead.  */
1827       lhs_ops.truncate (0);
1828       copy_reference_ops_from_ref (lhs, &lhs_ops);
1829       lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1830       if (valueized_anything)
1831 	{
1832 	  lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1833 						      get_alias_set (lhs),
1834 						      TREE_TYPE (lhs), lhs_ops);
1835 	  if (lhs_ref_ok
1836 	      && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1837 	    {
1838 	      *disambiguate_only = true;
1839 	      return NULL;
1840 	    }
1841 	}
1842       else
1843 	{
1844 	  ao_ref_init (&lhs_ref, lhs);
1845 	  lhs_ref_ok = true;
1846 	}
1847     }
1848   else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1849 	   && gimple_call_num_args (def_stmt) <= 4)
1850     {
1851       /* For builtin calls valueize its arguments and call the
1852          alias oracle again.  Valueization may improve points-to
1853 	 info of pointers and constify size and position arguments.
1854 	 Originally this was motivated by PR61034 which has
1855 	 conditional calls to free falsely clobbering ref because
1856 	 of imprecise points-to info of the argument.  */
1857       tree oldargs[4];
1858       bool valueized_anything = false;
1859       for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1860 	{
1861 	  oldargs[i] = gimple_call_arg (def_stmt, i);
1862 	  if (TREE_CODE (oldargs[i]) == SSA_NAME
1863 	      && VN_INFO (oldargs[i])->valnum != oldargs[i])
1864 	    {
1865 	      gimple_call_set_arg (def_stmt, i, VN_INFO (oldargs[i])->valnum);
1866 	      valueized_anything = true;
1867 	    }
1868 	}
1869       if (valueized_anything)
1870 	{
1871 	  bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1872 					       ref);
1873 	  for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1874 	    gimple_call_set_arg (def_stmt, i, oldargs[i]);
1875 	  if (!res)
1876 	    {
1877 	      *disambiguate_only = true;
1878 	      return NULL;
1879 	    }
1880 	}
1881     }
1882 
1883   if (*disambiguate_only)
1884     return (void *)-1;
1885 
1886   offset = ref->offset;
1887   maxsize = ref->max_size;
1888 
1889   /* If we cannot constrain the size of the reference we cannot
1890      test if anything kills it.  */
1891   if (maxsize == -1)
1892     return (void *)-1;
1893 
1894   /* We can't deduce anything useful from clobbers.  */
1895   if (gimple_clobber_p (def_stmt))
1896     return (void *)-1;
1897 
1898   /* def_stmt may-defs *ref.  See if we can derive a value for *ref
1899      from that definition.
1900      1) Memset.  */
1901   if (is_gimple_reg_type (vr->type)
1902       && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1903       && integer_zerop (gimple_call_arg (def_stmt, 1))
1904       && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2))
1905       && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1906     {
1907       tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1908       tree base2;
1909       HOST_WIDE_INT offset2, size2, maxsize2;
1910       bool reverse;
1911       base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
1912 				       &reverse);
1913       size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8;
1914       if ((unsigned HOST_WIDE_INT)size2 / 8
1915 	  == tree_to_uhwi (gimple_call_arg (def_stmt, 2))
1916 	  && maxsize2 != -1
1917 	  && operand_equal_p (base, base2, 0)
1918 	  && offset2 <= offset
1919 	  && offset2 + size2 >= offset + maxsize)
1920 	{
1921 	  tree val = build_zero_cst (vr->type);
1922 	  return vn_reference_lookup_or_insert_for_pieces
1923 	           (vuse, vr->set, vr->type, vr->operands, val);
1924 	}
1925     }
1926 
1927   /* 2) Assignment from an empty CONSTRUCTOR.  */
1928   else if (is_gimple_reg_type (vr->type)
1929 	   && gimple_assign_single_p (def_stmt)
1930 	   && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1931 	   && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1932     {
1933       tree base2;
1934       HOST_WIDE_INT offset2, size2, maxsize2;
1935       bool reverse;
1936       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1937 				       &offset2, &size2, &maxsize2, &reverse);
1938       if (maxsize2 != -1
1939 	  && maxsize2 == size2
1940 	  && operand_equal_p (base, base2, 0)
1941 	  && offset2 <= offset
1942 	  && offset2 + size2 >= offset + maxsize)
1943 	{
1944 	  tree val = build_zero_cst (vr->type);
1945 	  return vn_reference_lookup_or_insert_for_pieces
1946 	           (vuse, vr->set, vr->type, vr->operands, val);
1947 	}
1948     }
1949 
1950   /* 3) Assignment from a constant.  We can use folds native encode/interpret
1951      routines to extract the assigned bits.  */
1952   else if (ref->size == maxsize
1953 	   && is_gimple_reg_type (vr->type)
1954 	   && !contains_storage_order_barrier_p (vr->operands)
1955 	   && gimple_assign_single_p (def_stmt)
1956 	   && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1957 	   && maxsize % BITS_PER_UNIT == 0
1958 	   && offset % BITS_PER_UNIT == 0
1959 	   && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
1960 	       || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
1961 		   && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
1962     {
1963       tree base2;
1964       HOST_WIDE_INT offset2, size2, maxsize2;
1965       bool reverse;
1966       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1967 				       &offset2, &size2, &maxsize2, &reverse);
1968       if (!reverse
1969 	  && maxsize2 != -1
1970 	  && maxsize2 == size2
1971 	  && size2 % BITS_PER_UNIT == 0
1972 	  && offset2 % BITS_PER_UNIT == 0
1973 	  && operand_equal_p (base, base2, 0)
1974 	  && offset2 <= offset
1975 	  && offset2 + size2 >= offset + maxsize)
1976 	{
1977 	  /* We support up to 512-bit values (for V8DFmode).  */
1978 	  unsigned char buffer[64];
1979 	  int len;
1980 
1981 	  tree rhs = gimple_assign_rhs1 (def_stmt);
1982 	  if (TREE_CODE (rhs) == SSA_NAME)
1983 	    rhs = SSA_VAL (rhs);
1984 	  unsigned pad = 0;
1985 	  enum machine_mode mode = TYPE_MODE (TREE_TYPE (rhs));
1986 	  if (BYTES_BIG_ENDIAN
1987 	      && (SCALAR_INT_MODE_P (mode)
1988 		  || ALL_SCALAR_FIXED_POINT_MODE_P (mode)
1989 		  || SCALAR_FLOAT_MODE_P (mode)))
1990 	    {
1991 	      /* On big-endian the padding is at the 'front' so
1992 		 just skip the initial bytes.  */
1993 	      pad = GET_MODE_SIZE (mode) - size2 / BITS_PER_UNIT;
1994 	    }
1995 	  len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1996 				    buffer, sizeof (buffer),
1997 				    (offset - offset2) / BITS_PER_UNIT + pad);
1998 	  if (len > 0 && len * BITS_PER_UNIT >= ref->size)
1999 	    {
2000 	      tree type = vr->type;
2001 	      /* Make sure to interpret in a type that has a range
2002 	         covering the whole access size.  */
2003 	      if (INTEGRAL_TYPE_P (vr->type)
2004 		  && ref->size != TYPE_PRECISION (vr->type))
2005 		type = build_nonstandard_integer_type (ref->size,
2006 						       TYPE_UNSIGNED (type));
2007 	      tree val = native_interpret_expr (type, buffer,
2008 						ref->size / BITS_PER_UNIT);
2009 	      /* If we chop off bits because the types precision doesn't
2010 		 match the memory access size this is ok when optimizing
2011 		 reads but not when called from the DSE code during
2012 		 elimination.  */
2013 	      if (val
2014 		  && type != vr->type)
2015 		{
2016 		  if (! int_fits_type_p (val, vr->type))
2017 		    val = NULL_TREE;
2018 		  else
2019 		    val = fold_convert (vr->type, val);
2020 		}
2021 
2022 	      if (val)
2023 		return vn_reference_lookup_or_insert_for_pieces
2024 			 (vuse, vr->set, vr->type, vr->operands, val);
2025 	    }
2026 	}
2027     }
2028 
2029   /* 4) Assignment from an SSA name which definition we may be able
2030      to access pieces from.  */
2031   else if (ref->size == maxsize
2032 	   && is_gimple_reg_type (vr->type)
2033 	   && !contains_storage_order_barrier_p (vr->operands)
2034 	   && gimple_assign_single_p (def_stmt)
2035 	   && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2036     {
2037       tree base2;
2038       HOST_WIDE_INT offset2, size2, maxsize2;
2039       bool reverse;
2040       base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2041 				       &offset2, &size2, &maxsize2,
2042 				       &reverse);
2043       tree def_rhs = gimple_assign_rhs1 (def_stmt);
2044       if (!reverse
2045 	  && maxsize2 != -1
2046 	  && maxsize2 == size2
2047 	  && operand_equal_p (base, base2, 0)
2048 	  && offset2 <= offset
2049 	  && offset2 + size2 >= offset + maxsize
2050 	  /* ???  We can't handle bitfield precision extracts without
2051 	     either using an alternate type for the BIT_FIELD_REF and
2052 	     then doing a conversion or possibly adjusting the offset
2053 	     according to endianness.  */
2054 	  && (! INTEGRAL_TYPE_P (vr->type)
2055 	      || ref->size == TYPE_PRECISION (vr->type))
2056 	  && ref->size % BITS_PER_UNIT == 0
2057 	  && (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
2058 	      || (TYPE_PRECISION (TREE_TYPE (def_rhs))
2059 		  == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (def_rhs))))))
2060 	{
2061 	  code_helper rcode = BIT_FIELD_REF;
2062 	  tree ops[3];
2063 	  ops[0] = SSA_VAL (def_rhs);
2064 	  ops[1] = bitsize_int (ref->size);
2065 	  ops[2] = bitsize_int (offset - offset2);
2066 	  tree val = vn_nary_build_or_lookup (rcode, vr->type, ops);
2067 	  if (val
2068 	      && (TREE_CODE (val) != SSA_NAME
2069 		  || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2070 	    {
2071 	      vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2072 		  (vuse, vr->set, vr->type, vr->operands, val);
2073 	      return res;
2074 	    }
2075 	}
2076     }
2077 
2078   /* 5) For aggregate copies translate the reference through them if
2079      the copy kills ref.  */
2080   else if (vn_walk_kind == VN_WALKREWRITE
2081 	   && gimple_assign_single_p (def_stmt)
2082 	   && (DECL_P (gimple_assign_rhs1 (def_stmt))
2083 	       || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2084 	       || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2085     {
2086       tree base2;
2087       HOST_WIDE_INT maxsize2;
2088       int i, j, k;
2089       auto_vec<vn_reference_op_s> rhs;
2090       vn_reference_op_t vro;
2091       ao_ref r;
2092 
2093       if (!lhs_ref_ok)
2094 	return (void *)-1;
2095 
2096       /* See if the assignment kills REF.  */
2097       base2 = ao_ref_base (&lhs_ref);
2098       maxsize2 = lhs_ref.max_size;
2099       if (maxsize2 == -1
2100 	  || (base != base2
2101 	      && (TREE_CODE (base) != MEM_REF
2102 		  || TREE_CODE (base2) != MEM_REF
2103 		  || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2104 		  || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2105 					  TREE_OPERAND (base2, 1))))
2106 	  || !stmt_kills_ref_p (def_stmt, ref))
2107 	return (void *)-1;
2108 
2109       /* Find the common base of ref and the lhs.  lhs_ops already
2110          contains valueized operands for the lhs.  */
2111       i = vr->operands.length () - 1;
2112       j = lhs_ops.length () - 1;
2113       while (j >= 0 && i >= 0
2114 	     && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2115 	{
2116 	  i--;
2117 	  j--;
2118 	}
2119 
2120       /* ???  The innermost op should always be a MEM_REF and we already
2121          checked that the assignment to the lhs kills vr.  Thus for
2122 	 aggregate copies using char[] types the vn_reference_op_eq
2123 	 may fail when comparing types for compatibility.  But we really
2124 	 don't care here - further lookups with the rewritten operands
2125 	 will simply fail if we messed up types too badly.  */
2126       HOST_WIDE_INT extra_off = 0;
2127       if (j == 0 && i >= 0
2128 	  && lhs_ops[0].opcode == MEM_REF
2129 	  && lhs_ops[0].off != -1)
2130 	{
2131 	  if (lhs_ops[0].off == vr->operands[i].off)
2132 	    i--, j--;
2133 	  else if (vr->operands[i].opcode == MEM_REF
2134 		   && vr->operands[i].off != -1)
2135 	    {
2136 	      extra_off = vr->operands[i].off - lhs_ops[0].off;
2137 	      i--, j--;
2138 	    }
2139 	}
2140 
2141       /* i now points to the first additional op.
2142 	 ???  LHS may not be completely contained in VR, one or more
2143 	 VIEW_CONVERT_EXPRs could be in its way.  We could at least
2144 	 try handling outermost VIEW_CONVERT_EXPRs.  */
2145       if (j != -1)
2146 	return (void *)-1;
2147 
2148       /* Punt if the additional ops contain a storage order barrier.  */
2149       for (k = i; k >= 0; k--)
2150 	{
2151 	  vro = &vr->operands[k];
2152 	  if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2153 	    return (void *)-1;
2154 	}
2155 
2156       /* Now re-write REF to be based on the rhs of the assignment.  */
2157       copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2158 
2159       /* Apply an extra offset to the inner MEM_REF of the RHS.  */
2160       if (extra_off != 0)
2161 	{
2162 	  if (rhs.length () < 2
2163 	      || rhs[0].opcode != MEM_REF
2164 	      || rhs[0].off == -1)
2165 	    return (void *)-1;
2166 	  rhs[0].off += extra_off;
2167 	  rhs[0].op0 = int_const_binop (PLUS_EXPR, rhs[0].op0,
2168 					build_int_cst (TREE_TYPE (rhs[0].op0),
2169 						       extra_off));
2170 	}
2171 
2172       /* We need to pre-pend vr->operands[0..i] to rhs.  */
2173       vec<vn_reference_op_s> old = vr->operands;
2174       if (i + 1 + rhs.length () > vr->operands.length ())
2175 	vr->operands.safe_grow (i + 1 + rhs.length ());
2176       else
2177 	vr->operands.truncate (i + 1 + rhs.length ());
2178       FOR_EACH_VEC_ELT (rhs, j, vro)
2179 	vr->operands[i + 1 + j] = *vro;
2180       vr->operands = valueize_refs (vr->operands);
2181       if (old == shared_lookup_references)
2182 	shared_lookup_references = vr->operands;
2183       vr->hashcode = vn_reference_compute_hash (vr);
2184 
2185       /* Try folding the new reference to a constant.  */
2186       tree val = fully_constant_vn_reference_p (vr);
2187       if (val)
2188 	return vn_reference_lookup_or_insert_for_pieces
2189 		 (vuse, vr->set, vr->type, vr->operands, val);
2190 
2191       /* Adjust *ref from the new operands.  */
2192       if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2193 	return (void *)-1;
2194       /* This can happen with bitfields.  */
2195       if (ref->size != r.size)
2196 	return (void *)-1;
2197       *ref = r;
2198 
2199       /* Do not update last seen VUSE after translating.  */
2200       last_vuse_ptr = NULL;
2201 
2202       /* Keep looking for the adjusted *REF / VR pair.  */
2203       return NULL;
2204     }
2205 
2206   /* 6) For memcpy copies translate the reference through them if
2207      the copy kills ref.  */
2208   else if (vn_walk_kind == VN_WALKREWRITE
2209 	   && is_gimple_reg_type (vr->type)
2210 	   /* ???  Handle BCOPY as well.  */
2211 	   && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2212 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2213 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2214 	   && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2215 	       || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2216 	   && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2217 	       || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2218 	   && tree_fits_uhwi_p (gimple_call_arg (def_stmt, 2)))
2219     {
2220       tree lhs, rhs;
2221       ao_ref r;
2222       HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
2223       vn_reference_op_s op;
2224       HOST_WIDE_INT at;
2225 
2226       /* Only handle non-variable, addressable refs.  */
2227       if (ref->size != maxsize
2228 	  || offset % BITS_PER_UNIT != 0
2229 	  || ref->size % BITS_PER_UNIT != 0)
2230 	return (void *)-1;
2231 
2232       /* Extract a pointer base and an offset for the destination.  */
2233       lhs = gimple_call_arg (def_stmt, 0);
2234       lhs_offset = 0;
2235       if (TREE_CODE (lhs) == SSA_NAME)
2236 	{
2237 	  lhs = SSA_VAL (lhs);
2238 	  if (TREE_CODE (lhs) == SSA_NAME)
2239 	    {
2240 	      gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2241 	      if (gimple_assign_single_p (def_stmt)
2242 		  && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2243 		lhs = gimple_assign_rhs1 (def_stmt);
2244 	    }
2245 	}
2246       if (TREE_CODE (lhs) == ADDR_EXPR)
2247 	{
2248 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2249 						    &lhs_offset);
2250 	  if (!tem)
2251 	    return (void *)-1;
2252 	  if (TREE_CODE (tem) == MEM_REF
2253 	      && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2254 	    {
2255 	      lhs = TREE_OPERAND (tem, 0);
2256 	      if (TREE_CODE (lhs) == SSA_NAME)
2257 		lhs = SSA_VAL (lhs);
2258 	      lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2259 	    }
2260 	  else if (DECL_P (tem))
2261 	    lhs = build_fold_addr_expr (tem);
2262 	  else
2263 	    return (void *)-1;
2264 	}
2265       if (TREE_CODE (lhs) != SSA_NAME
2266 	  && TREE_CODE (lhs) != ADDR_EXPR)
2267 	return (void *)-1;
2268 
2269       /* Extract a pointer base and an offset for the source.  */
2270       rhs = gimple_call_arg (def_stmt, 1);
2271       rhs_offset = 0;
2272       if (TREE_CODE (rhs) == SSA_NAME)
2273 	rhs = SSA_VAL (rhs);
2274       if (TREE_CODE (rhs) == ADDR_EXPR)
2275 	{
2276 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2277 						    &rhs_offset);
2278 	  if (!tem)
2279 	    return (void *)-1;
2280 	  if (TREE_CODE (tem) == MEM_REF
2281 	      && tree_fits_uhwi_p (TREE_OPERAND (tem, 1)))
2282 	    {
2283 	      rhs = TREE_OPERAND (tem, 0);
2284 	      rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1));
2285 	    }
2286 	  else if (DECL_P (tem))
2287 	    rhs = build_fold_addr_expr (tem);
2288 	  else
2289 	    return (void *)-1;
2290 	}
2291       if (TREE_CODE (rhs) != SSA_NAME
2292 	  && TREE_CODE (rhs) != ADDR_EXPR)
2293 	return (void *)-1;
2294 
2295       copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2));
2296 
2297       /* The bases of the destination and the references have to agree.  */
2298       if ((TREE_CODE (base) != MEM_REF
2299 	   && !DECL_P (base))
2300 	  || (TREE_CODE (base) == MEM_REF
2301 	      && (TREE_OPERAND (base, 0) != lhs
2302 		  || !tree_fits_uhwi_p (TREE_OPERAND (base, 1))))
2303 	  || (DECL_P (base)
2304 	      && (TREE_CODE (lhs) != ADDR_EXPR
2305 		  || TREE_OPERAND (lhs, 0) != base)))
2306 	return (void *)-1;
2307 
2308       at = offset / BITS_PER_UNIT;
2309       if (TREE_CODE (base) == MEM_REF)
2310 	at += tree_to_uhwi (TREE_OPERAND (base, 1));
2311       /* If the access is completely outside of the memcpy destination
2312 	 area there is no aliasing.  */
2313       if (lhs_offset >= at + maxsize / BITS_PER_UNIT
2314 	  || lhs_offset + copy_size <= at)
2315 	return NULL;
2316       /* And the access has to be contained within the memcpy destination.  */
2317       if (lhs_offset > at
2318 	  || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
2319 	return (void *)-1;
2320 
2321       /* Make room for 2 operands in the new reference.  */
2322       if (vr->operands.length () < 2)
2323 	{
2324 	  vec<vn_reference_op_s> old = vr->operands;
2325 	  vr->operands.safe_grow_cleared (2);
2326 	  if (old == shared_lookup_references)
2327 	    shared_lookup_references = vr->operands;
2328 	}
2329       else
2330 	vr->operands.truncate (2);
2331 
2332       /* The looked-through reference is a simple MEM_REF.  */
2333       memset (&op, 0, sizeof (op));
2334       op.type = vr->type;
2335       op.opcode = MEM_REF;
2336       op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2337       op.off = at - lhs_offset + rhs_offset;
2338       vr->operands[0] = op;
2339       op.type = TREE_TYPE (rhs);
2340       op.opcode = TREE_CODE (rhs);
2341       op.op0 = rhs;
2342       op.off = -1;
2343       vr->operands[1] = op;
2344       vr->hashcode = vn_reference_compute_hash (vr);
2345 
2346       /* Try folding the new reference to a constant.  */
2347       tree val = fully_constant_vn_reference_p (vr);
2348       if (val)
2349 	return vn_reference_lookup_or_insert_for_pieces
2350 		 (vuse, vr->set, vr->type, vr->operands, val);
2351 
2352       /* Adjust *ref from the new operands.  */
2353       if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2354 	return (void *)-1;
2355       /* This can happen with bitfields.  */
2356       if (ref->size != r.size)
2357 	return (void *)-1;
2358       *ref = r;
2359 
2360       /* Do not update last seen VUSE after translating.  */
2361       last_vuse_ptr = NULL;
2362 
2363       /* Keep looking for the adjusted *REF / VR pair.  */
2364       return NULL;
2365     }
2366 
2367   /* Bail out and stop walking.  */
2368   return (void *)-1;
2369 }
2370 
2371 /* Return a reference op vector from OP that can be used for
2372    vn_reference_lookup_pieces.  The caller is responsible for releasing
2373    the vector.  */
2374 
2375 vec<vn_reference_op_s>
2376 vn_reference_operands_for_lookup (tree op)
2377 {
2378   bool valueized;
2379   return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2380 }
2381 
2382 /* Lookup a reference operation by it's parts, in the current hash table.
2383    Returns the resulting value number if it exists in the hash table,
2384    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
2385    vn_reference_t stored in the hashtable if something is found.  */
2386 
2387 tree
2388 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2389 			    vec<vn_reference_op_s> operands,
2390 			    vn_reference_t *vnresult, vn_lookup_kind kind)
2391 {
2392   struct vn_reference_s vr1;
2393   vn_reference_t tmp;
2394   tree cst;
2395 
2396   if (!vnresult)
2397     vnresult = &tmp;
2398   *vnresult = NULL;
2399 
2400   vr1.vuse = vuse_ssa_val (vuse);
2401   shared_lookup_references.truncate (0);
2402   shared_lookup_references.safe_grow (operands.length ());
2403   memcpy (shared_lookup_references.address (),
2404 	  operands.address (),
2405 	  sizeof (vn_reference_op_s)
2406 	  * operands.length ());
2407   vr1.operands = operands = shared_lookup_references
2408     = valueize_refs (shared_lookup_references);
2409   vr1.type = type;
2410   vr1.set = set;
2411   vr1.hashcode = vn_reference_compute_hash (&vr1);
2412   if ((cst = fully_constant_vn_reference_p (&vr1)))
2413     return cst;
2414 
2415   vn_reference_lookup_1 (&vr1, vnresult);
2416   if (!*vnresult
2417       && kind != VN_NOWALK
2418       && vr1.vuse)
2419     {
2420       ao_ref r;
2421       vn_walk_kind = kind;
2422       if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2423 	*vnresult =
2424 	  (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2425 						  vn_reference_lookup_2,
2426 						  vn_reference_lookup_3,
2427 						  vuse_ssa_val, &vr1);
2428       gcc_checking_assert (vr1.operands == shared_lookup_references);
2429     }
2430 
2431   if (*vnresult)
2432      return (*vnresult)->result;
2433 
2434   return NULL_TREE;
2435 }
2436 
2437 /* Lookup OP in the current hash table, and return the resulting value
2438    number if it exists in the hash table.  Return NULL_TREE if it does
2439    not exist in the hash table or if the result field of the structure
2440    was NULL..  VNRESULT will be filled in with the vn_reference_t
2441    stored in the hashtable if one exists.  When TBAA_P is false assume
2442    we are looking up a store and treat it as having alias-set zero.  */
2443 
2444 tree
2445 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2446 		     vn_reference_t *vnresult, bool tbaa_p)
2447 {
2448   vec<vn_reference_op_s> operands;
2449   struct vn_reference_s vr1;
2450   tree cst;
2451   bool valuezied_anything;
2452 
2453   if (vnresult)
2454     *vnresult = NULL;
2455 
2456   vr1.vuse = vuse_ssa_val (vuse);
2457   vr1.operands = operands
2458     = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2459   vr1.type = TREE_TYPE (op);
2460   vr1.set = tbaa_p ? get_alias_set (op) : 0;
2461   vr1.hashcode = vn_reference_compute_hash (&vr1);
2462   if ((cst = fully_constant_vn_reference_p (&vr1)))
2463     return cst;
2464 
2465   if (kind != VN_NOWALK
2466       && vr1.vuse)
2467     {
2468       vn_reference_t wvnresult;
2469       ao_ref r;
2470       /* Make sure to use a valueized reference if we valueized anything.
2471          Otherwise preserve the full reference for advanced TBAA.  */
2472       if (!valuezied_anything
2473 	  || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2474 					     vr1.operands))
2475 	ao_ref_init (&r, op);
2476       if (! tbaa_p)
2477 	r.ref_alias_set = r.base_alias_set = 0;
2478       vn_walk_kind = kind;
2479       wvnresult =
2480 	(vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2481 						vn_reference_lookup_2,
2482 						vn_reference_lookup_3,
2483 						vuse_ssa_val, &vr1);
2484       gcc_checking_assert (vr1.operands == shared_lookup_references);
2485       if (wvnresult)
2486 	{
2487 	  if (vnresult)
2488 	    *vnresult = wvnresult;
2489 	  return wvnresult->result;
2490 	}
2491 
2492       return NULL_TREE;
2493     }
2494 
2495   return vn_reference_lookup_1 (&vr1, vnresult);
2496 }
2497 
2498 /* Lookup CALL in the current hash table and return the entry in
2499    *VNRESULT if found.  Populates *VR for the hashtable lookup.  */
2500 
2501 void
2502 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2503 			  vn_reference_t vr)
2504 {
2505   if (vnresult)
2506     *vnresult = NULL;
2507 
2508   tree vuse = gimple_vuse (call);
2509 
2510   vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2511   vr->operands = valueize_shared_reference_ops_from_call (call);
2512   vr->type = gimple_expr_type (call);
2513   vr->set = 0;
2514   vr->hashcode = vn_reference_compute_hash (vr);
2515   vn_reference_lookup_1 (vr, vnresult);
2516 }
2517 
2518 /* Insert OP into the current hash table with a value number of
2519    RESULT, and return the resulting reference structure we created.  */
2520 
2521 static vn_reference_t
2522 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2523 {
2524   vn_reference_s **slot;
2525   vn_reference_t vr1;
2526   bool tem;
2527 
2528   vr1 = current_info->references_pool->allocate ();
2529   if (TREE_CODE (result) == SSA_NAME)
2530     vr1->value_id = VN_INFO (result)->value_id;
2531   else
2532     vr1->value_id = get_or_alloc_constant_value_id (result);
2533   vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2534   vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2535   vr1->type = TREE_TYPE (op);
2536   vr1->set = get_alias_set (op);
2537   vr1->hashcode = vn_reference_compute_hash (vr1);
2538   vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2539   vr1->result_vdef = vdef;
2540 
2541   slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2542 							INSERT);
2543 
2544   /* Because we lookup stores using vuses, and value number failures
2545      using the vdefs (see visit_reference_op_store for how and why),
2546      it's possible that on failure we may try to insert an already
2547      inserted store.  This is not wrong, there is no ssa name for a
2548      store that we could use as a differentiator anyway.  Thus, unlike
2549      the other lookup functions, you cannot gcc_assert (!*slot)
2550      here.  */
2551 
2552   /* But free the old slot in case of a collision.  */
2553   if (*slot)
2554     free_reference (*slot);
2555 
2556   *slot = vr1;
2557   return vr1;
2558 }
2559 
2560 /* Insert a reference by it's pieces into the current hash table with
2561    a value number of RESULT.  Return the resulting reference
2562    structure we created.  */
2563 
2564 vn_reference_t
2565 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2566 			    vec<vn_reference_op_s> operands,
2567 			    tree result, unsigned int value_id)
2568 
2569 {
2570   vn_reference_s **slot;
2571   vn_reference_t vr1;
2572 
2573   vr1 = current_info->references_pool->allocate ();
2574   vr1->value_id = value_id;
2575   vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2576   vr1->operands = valueize_refs (operands);
2577   vr1->type = type;
2578   vr1->set = set;
2579   vr1->hashcode = vn_reference_compute_hash (vr1);
2580   if (result && TREE_CODE (result) == SSA_NAME)
2581     result = SSA_VAL (result);
2582   vr1->result = result;
2583 
2584   slot = current_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2585 							INSERT);
2586 
2587   /* At this point we should have all the things inserted that we have
2588      seen before, and we should never try inserting something that
2589      already exists.  */
2590   gcc_assert (!*slot);
2591   if (*slot)
2592     free_reference (*slot);
2593 
2594   *slot = vr1;
2595   return vr1;
2596 }
2597 
2598 /* Compute and return the hash value for nary operation VBO1.  */
2599 
2600 static hashval_t
2601 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2602 {
2603   inchash::hash hstate;
2604   unsigned i;
2605 
2606   for (i = 0; i < vno1->length; ++i)
2607     if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2608       vno1->op[i] = SSA_VAL (vno1->op[i]);
2609 
2610   if (((vno1->length == 2
2611 	&& commutative_tree_code (vno1->opcode))
2612        || (vno1->length == 3
2613 	   && commutative_ternary_tree_code (vno1->opcode)))
2614       && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2615     std::swap (vno1->op[0], vno1->op[1]);
2616   else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2617 	   && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2618     {
2619       std::swap (vno1->op[0], vno1->op[1]);
2620       vno1->opcode = swap_tree_comparison  (vno1->opcode);
2621     }
2622 
2623   hstate.add_int (vno1->opcode);
2624   for (i = 0; i < vno1->length; ++i)
2625     inchash::add_expr (vno1->op[i], hstate);
2626 
2627   return hstate.end ();
2628 }
2629 
2630 /* Compare nary operations VNO1 and VNO2 and return true if they are
2631    equivalent.  */
2632 
2633 bool
2634 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2635 {
2636   unsigned i;
2637 
2638   if (vno1->hashcode != vno2->hashcode)
2639     return false;
2640 
2641   if (vno1->length != vno2->length)
2642     return false;
2643 
2644   if (vno1->opcode != vno2->opcode
2645       || !types_compatible_p (vno1->type, vno2->type))
2646     return false;
2647 
2648   for (i = 0; i < vno1->length; ++i)
2649     if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2650       return false;
2651 
2652   return true;
2653 }
2654 
2655 /* Initialize VNO from the pieces provided.  */
2656 
2657 static void
2658 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2659 			     enum tree_code code, tree type, tree *ops)
2660 {
2661   vno->opcode = code;
2662   vno->length = length;
2663   vno->type = type;
2664   memcpy (&vno->op[0], ops, sizeof (tree) * length);
2665 }
2666 
2667 /* Initialize VNO from OP.  */
2668 
2669 static void
2670 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2671 {
2672   unsigned i;
2673 
2674   vno->opcode = TREE_CODE (op);
2675   vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2676   vno->type = TREE_TYPE (op);
2677   for (i = 0; i < vno->length; ++i)
2678     vno->op[i] = TREE_OPERAND (op, i);
2679 }
2680 
2681 /* Return the number of operands for a vn_nary ops structure from STMT.  */
2682 
2683 static unsigned int
2684 vn_nary_length_from_stmt (gimple *stmt)
2685 {
2686   switch (gimple_assign_rhs_code (stmt))
2687     {
2688     case REALPART_EXPR:
2689     case IMAGPART_EXPR:
2690     case VIEW_CONVERT_EXPR:
2691       return 1;
2692 
2693     case BIT_FIELD_REF:
2694       return 3;
2695 
2696     case CONSTRUCTOR:
2697       return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2698 
2699     default:
2700       return gimple_num_ops (stmt) - 1;
2701     }
2702 }
2703 
2704 /* Initialize VNO from STMT.  */
2705 
2706 static void
2707 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2708 {
2709   unsigned i;
2710 
2711   vno->opcode = gimple_assign_rhs_code (stmt);
2712   vno->type = gimple_expr_type (stmt);
2713   switch (vno->opcode)
2714     {
2715     case REALPART_EXPR:
2716     case IMAGPART_EXPR:
2717     case VIEW_CONVERT_EXPR:
2718       vno->length = 1;
2719       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2720       break;
2721 
2722     case BIT_FIELD_REF:
2723       vno->length = 3;
2724       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2725       vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2726       vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2727       break;
2728 
2729     case CONSTRUCTOR:
2730       vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2731       for (i = 0; i < vno->length; ++i)
2732 	vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2733       break;
2734 
2735     default:
2736       gcc_checking_assert (!gimple_assign_single_p (stmt));
2737       vno->length = gimple_num_ops (stmt) - 1;
2738       for (i = 0; i < vno->length; ++i)
2739 	vno->op[i] = gimple_op (stmt, i + 1);
2740     }
2741 }
2742 
2743 /* Compute the hashcode for VNO and look for it in the hash table;
2744    return the resulting value number if it exists in the hash table.
2745    Return NULL_TREE if it does not exist in the hash table or if the
2746    result field of the operation is NULL.  VNRESULT will contain the
2747    vn_nary_op_t from the hashtable if it exists.  */
2748 
2749 static tree
2750 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2751 {
2752   vn_nary_op_s **slot;
2753 
2754   if (vnresult)
2755     *vnresult = NULL;
2756 
2757   vno->hashcode = vn_nary_op_compute_hash (vno);
2758   slot = current_info->nary->find_slot_with_hash (vno, vno->hashcode,
2759 						  NO_INSERT);
2760   if (!slot && current_info == optimistic_info)
2761     slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode,
2762 						  NO_INSERT);
2763   if (!slot)
2764     return NULL_TREE;
2765   if (vnresult)
2766     *vnresult = *slot;
2767   return (*slot)->result;
2768 }
2769 
2770 /* Lookup a n-ary operation by its pieces and return the resulting value
2771    number if it exists in the hash table.  Return NULL_TREE if it does
2772    not exist in the hash table or if the result field of the operation
2773    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2774    if it exists.  */
2775 
2776 tree
2777 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2778 			  tree type, tree *ops, vn_nary_op_t *vnresult)
2779 {
2780   vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2781 				  sizeof_vn_nary_op (length));
2782   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2783   return vn_nary_op_lookup_1 (vno1, vnresult);
2784 }
2785 
2786 /* Lookup OP in the current hash table, and return the resulting value
2787    number if it exists in the hash table.  Return NULL_TREE if it does
2788    not exist in the hash table or if the result field of the operation
2789    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2790    if it exists.  */
2791 
2792 tree
2793 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2794 {
2795   vn_nary_op_t vno1
2796     = XALLOCAVAR (struct vn_nary_op_s,
2797 		  sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2798   init_vn_nary_op_from_op (vno1, op);
2799   return vn_nary_op_lookup_1 (vno1, vnresult);
2800 }
2801 
2802 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2803    value number if it exists in the hash table.  Return NULL_TREE if
2804    it does not exist in the hash table.  VNRESULT will contain the
2805    vn_nary_op_t from the hashtable if it exists.  */
2806 
2807 tree
2808 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2809 {
2810   vn_nary_op_t vno1
2811     = XALLOCAVAR (struct vn_nary_op_s,
2812 		  sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2813   init_vn_nary_op_from_stmt (vno1, stmt);
2814   return vn_nary_op_lookup_1 (vno1, vnresult);
2815 }
2816 
2817 /* Allocate a vn_nary_op_t with LENGTH operands on STACK.  */
2818 
2819 static vn_nary_op_t
2820 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2821 {
2822   return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2823 }
2824 
2825 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2826    obstack.  */
2827 
2828 static vn_nary_op_t
2829 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2830 {
2831   vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2832 					       &current_info->nary_obstack);
2833 
2834   vno1->value_id = value_id;
2835   vno1->length = length;
2836   vno1->result = result;
2837 
2838   return vno1;
2839 }
2840 
2841 /* Insert VNO into TABLE.  If COMPUTE_HASH is true, then compute
2842    VNO->HASHCODE first.  */
2843 
2844 static vn_nary_op_t
2845 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
2846 			bool compute_hash)
2847 {
2848   vn_nary_op_s **slot;
2849 
2850   if (compute_hash)
2851     vno->hashcode = vn_nary_op_compute_hash (vno);
2852 
2853   slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
2854   /* While we do not want to insert things twice it's awkward to
2855      avoid it in the case where visit_nary_op pattern-matches stuff
2856      and ends up simplifying the replacement to itself.  We then
2857      get two inserts, one from visit_nary_op and one from
2858      vn_nary_build_or_lookup.
2859      So allow inserts with the same value number.  */
2860   if (*slot && (*slot)->result == vno->result)
2861     return *slot;
2862 
2863   gcc_assert (!*slot);
2864 
2865   *slot = vno;
2866   return vno;
2867 }
2868 
2869 /* Insert a n-ary operation into the current hash table using it's
2870    pieces.  Return the vn_nary_op_t structure we created and put in
2871    the hashtable.  */
2872 
2873 vn_nary_op_t
2874 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2875 			  tree type, tree *ops,
2876 			  tree result, unsigned int value_id)
2877 {
2878   vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2879   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2880   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2881 }
2882 
2883 /* Insert OP into the current hash table with a value number of
2884    RESULT.  Return the vn_nary_op_t structure we created and put in
2885    the hashtable.  */
2886 
2887 vn_nary_op_t
2888 vn_nary_op_insert (tree op, tree result)
2889 {
2890   unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2891   vn_nary_op_t vno1;
2892 
2893   vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2894   init_vn_nary_op_from_op (vno1, op);
2895   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2896 }
2897 
2898 /* Insert the rhs of STMT into the current hash table with a value number of
2899    RESULT.  */
2900 
2901 static vn_nary_op_t
2902 vn_nary_op_insert_stmt (gimple *stmt, tree result)
2903 {
2904   vn_nary_op_t vno1
2905     = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2906 			result, VN_INFO (result)->value_id);
2907   init_vn_nary_op_from_stmt (vno1, stmt);
2908   return vn_nary_op_insert_into (vno1, current_info->nary, true);
2909 }
2910 
2911 /* Compute a hashcode for PHI operation VP1 and return it.  */
2912 
2913 static inline hashval_t
2914 vn_phi_compute_hash (vn_phi_t vp1)
2915 {
2916   inchash::hash hstate (vp1->phiargs.length () > 2
2917 			? vp1->block->index : vp1->phiargs.length ());
2918   tree phi1op;
2919   tree type;
2920   edge e;
2921   edge_iterator ei;
2922 
2923   /* If all PHI arguments are constants we need to distinguish
2924      the PHI node via its type.  */
2925   type = vp1->type;
2926   hstate.merge_hash (vn_hash_type (type));
2927 
2928   FOR_EACH_EDGE (e, ei, vp1->block->preds)
2929     {
2930       /* Don't hash backedge values they need to be handled as VN_TOP
2931          for optimistic value-numbering.  */
2932       if (e->flags & EDGE_DFS_BACK)
2933 	continue;
2934 
2935       phi1op = vp1->phiargs[e->dest_idx];
2936       if (phi1op == VN_TOP)
2937 	continue;
2938       inchash::add_expr (phi1op, hstate);
2939     }
2940 
2941   return hstate.end ();
2942 }
2943 
2944 
2945 /* Return true if COND1 and COND2 represent the same condition, set
2946    *INVERTED_P if one needs to be inverted to make it the same as
2947    the other.  */
2948 
2949 static bool
2950 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
2951 		    gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
2952 {
2953   enum tree_code code1 = gimple_cond_code (cond1);
2954   enum tree_code code2 = gimple_cond_code (cond2);
2955 
2956   *inverted_p = false;
2957   if (code1 == code2)
2958     ;
2959   else if (code1 == swap_tree_comparison (code2))
2960     std::swap (lhs2, rhs2);
2961   else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
2962     *inverted_p = true;
2963   else if (code1 == invert_tree_comparison
2964 	   	      (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
2965     {
2966       std::swap (lhs2, rhs2);
2967       *inverted_p = true;
2968     }
2969   else
2970     return false;
2971 
2972   return ((expressions_equal_p (lhs1, lhs2)
2973 	   && expressions_equal_p (rhs1, rhs2))
2974 	  || (commutative_tree_code (code1)
2975 	      && expressions_equal_p (lhs1, rhs2)
2976 	      && expressions_equal_p (rhs1, lhs2)));
2977 }
2978 
2979 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
2980 
2981 static int
2982 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
2983 {
2984   if (vp1->hashcode != vp2->hashcode)
2985     return false;
2986 
2987   if (vp1->block != vp2->block)
2988     {
2989       if (vp1->phiargs.length () != vp2->phiargs.length ())
2990 	return false;
2991 
2992       switch (vp1->phiargs.length ())
2993 	{
2994 	case 1:
2995 	  /* Single-arg PHIs are just copies.  */
2996 	  break;
2997 
2998 	case 2:
2999 	  {
3000 	    /* Rule out backedges into the PHI.  */
3001 	    if (vp1->block->loop_father->header == vp1->block
3002 		|| vp2->block->loop_father->header == vp2->block)
3003 	      return false;
3004 
3005 	    /* If the PHI nodes do not have compatible types
3006 	       they are not the same.  */
3007 	    if (!types_compatible_p (vp1->type, vp2->type))
3008 	      return false;
3009 
3010 	    basic_block idom1
3011 	      = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3012 	    basic_block idom2
3013 	      = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3014 	    /* If the immediate dominator end in switch stmts multiple
3015 	       values may end up in the same PHI arg via intermediate
3016 	       CFG merges.  */
3017 	    if (EDGE_COUNT (idom1->succs) != 2
3018 		|| EDGE_COUNT (idom2->succs) != 2)
3019 	      return false;
3020 
3021 	    /* Verify the controlling stmt is the same.  */
3022 	    gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3023 	    gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3024 	    if (! last1 || ! last2)
3025 	      return false;
3026 	    bool inverted_p;
3027 	    if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3028 				      last2, vp2->cclhs, vp2->ccrhs,
3029 				      &inverted_p))
3030 	      return false;
3031 
3032 	    /* Get at true/false controlled edges into the PHI.  */
3033 	    edge te1, te2, fe1, fe2;
3034 	    if (! extract_true_false_controlled_edges (idom1, vp1->block,
3035 						       &te1, &fe1)
3036 		|| ! extract_true_false_controlled_edges (idom2, vp2->block,
3037 							  &te2, &fe2))
3038 	      return false;
3039 
3040 	    /* Swap edges if the second condition is the inverted of the
3041 	       first.  */
3042 	    if (inverted_p)
3043 	      std::swap (te2, fe2);
3044 
3045 	    /* ???  Handle VN_TOP specially.  */
3046 	    if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3047 				       vp2->phiargs[te2->dest_idx])
3048 		|| ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3049 					  vp2->phiargs[fe2->dest_idx]))
3050 	      return false;
3051 
3052 	    return true;
3053 	  }
3054 
3055 	default:
3056 	  return false;
3057 	}
3058     }
3059 
3060   /* If the PHI nodes do not have compatible types
3061      they are not the same.  */
3062   if (!types_compatible_p (vp1->type, vp2->type))
3063     return false;
3064 
3065   /* Any phi in the same block will have it's arguments in the
3066      same edge order, because of how we store phi nodes.  */
3067   int i;
3068   tree phi1op;
3069   FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
3070     {
3071       tree phi2op = vp2->phiargs[i];
3072       if (phi1op == VN_TOP || phi2op == VN_TOP)
3073 	continue;
3074       if (!expressions_equal_p (phi1op, phi2op))
3075 	return false;
3076     }
3077 
3078   return true;
3079 }
3080 
3081 static vec<tree> shared_lookup_phiargs;
3082 
3083 /* Lookup PHI in the current hash table, and return the resulting
3084    value number if it exists in the hash table.  Return NULL_TREE if
3085    it does not exist in the hash table. */
3086 
3087 static tree
3088 vn_phi_lookup (gimple *phi)
3089 {
3090   vn_phi_s **slot;
3091   struct vn_phi_s vp1;
3092   edge e;
3093   edge_iterator ei;
3094 
3095   shared_lookup_phiargs.truncate (0);
3096   shared_lookup_phiargs.safe_grow (gimple_phi_num_args (phi));
3097 
3098   /* Canonicalize the SSA_NAME's to their value number.  */
3099   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3100     {
3101       tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3102       def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3103       shared_lookup_phiargs[e->dest_idx] = def;
3104     }
3105   vp1.type = TREE_TYPE (gimple_phi_result (phi));
3106   vp1.phiargs = shared_lookup_phiargs;
3107   vp1.block = gimple_bb (phi);
3108   /* Extract values of the controlling condition.  */
3109   vp1.cclhs = NULL_TREE;
3110   vp1.ccrhs = NULL_TREE;
3111   basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1.block);
3112   if (EDGE_COUNT (idom1->succs) == 2)
3113     if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3114       {
3115 	vp1.cclhs = vn_valueize (gimple_cond_lhs (last1));
3116 	vp1.ccrhs = vn_valueize (gimple_cond_rhs (last1));
3117       }
3118   vp1.hashcode = vn_phi_compute_hash (&vp1);
3119   slot = current_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3120 						  NO_INSERT);
3121   if (!slot && current_info == optimistic_info)
3122     slot = valid_info->phis->find_slot_with_hash (&vp1, vp1.hashcode,
3123 						  NO_INSERT);
3124   if (!slot)
3125     return NULL_TREE;
3126   return (*slot)->result;
3127 }
3128 
3129 /* Insert PHI into the current hash table with a value number of
3130    RESULT.  */
3131 
3132 static vn_phi_t
3133 vn_phi_insert (gimple *phi, tree result)
3134 {
3135   vn_phi_s **slot;
3136   vn_phi_t vp1 = current_info->phis_pool->allocate ();
3137   vec<tree> args = vNULL;
3138   edge e;
3139   edge_iterator ei;
3140 
3141   args.safe_grow (gimple_phi_num_args (phi));
3142 
3143   /* Canonicalize the SSA_NAME's to their value number.  */
3144   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3145     {
3146       tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3147       def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
3148       args[e->dest_idx] = def;
3149     }
3150   vp1->value_id = VN_INFO (result)->value_id;
3151   vp1->type = TREE_TYPE (gimple_phi_result (phi));
3152   vp1->phiargs = args;
3153   vp1->block = gimple_bb (phi);
3154   /* Extract values of the controlling condition.  */
3155   vp1->cclhs = NULL_TREE;
3156   vp1->ccrhs = NULL_TREE;
3157   basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3158   if (EDGE_COUNT (idom1->succs) == 2)
3159     if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3160       {
3161 	vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3162 	vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3163       }
3164   vp1->result = result;
3165   vp1->hashcode = vn_phi_compute_hash (vp1);
3166 
3167   slot = current_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3168 
3169   /* Because we iterate over phi operations more than once, it's
3170      possible the slot might already exist here, hence no assert.*/
3171   *slot = vp1;
3172   return vp1;
3173 }
3174 
3175 
3176 /* Print set of components in strongly connected component SCC to OUT. */
3177 
3178 static void
3179 print_scc (FILE *out, vec<tree> scc)
3180 {
3181   tree var;
3182   unsigned int i;
3183 
3184   fprintf (out, "SCC consists of:");
3185   FOR_EACH_VEC_ELT (scc, i, var)
3186     {
3187       fprintf (out, " ");
3188       print_generic_expr (out, var, 0);
3189     }
3190   fprintf (out, "\n");
3191 }
3192 
3193 /* Return true if BB1 is dominated by BB2 taking into account edges
3194    that are not executable.  */
3195 
3196 static bool
3197 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3198 {
3199   edge_iterator ei;
3200   edge e;
3201 
3202   if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3203     return true;
3204 
3205   /* Before iterating we'd like to know if there exists a
3206      (executable) path from bb2 to bb1 at all, if not we can
3207      directly return false.  For now simply iterate once.  */
3208 
3209   /* Iterate to the single executable bb1 predecessor.  */
3210   if (EDGE_COUNT (bb1->preds) > 1)
3211     {
3212       edge prede = NULL;
3213       FOR_EACH_EDGE (e, ei, bb1->preds)
3214 	if (e->flags & EDGE_EXECUTABLE)
3215 	  {
3216 	    if (prede)
3217 	      {
3218 		prede = NULL;
3219 		break;
3220 	      }
3221 	    prede = e;
3222 	  }
3223       if (prede)
3224 	{
3225 	  bb1 = prede->src;
3226 
3227 	  /* Re-do the dominance check with changed bb1.  */
3228 	  if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3229 	    return true;
3230 	}
3231     }
3232 
3233   /* Iterate to the single executable bb2 successor.  */
3234   edge succe = NULL;
3235   FOR_EACH_EDGE (e, ei, bb2->succs)
3236     if (e->flags & EDGE_EXECUTABLE)
3237       {
3238 	if (succe)
3239 	  {
3240 	    succe = NULL;
3241 	    break;
3242 	  }
3243 	succe = e;
3244       }
3245   if (succe)
3246     {
3247       /* Verify the reached block is only reached through succe.
3248 	 If there is only one edge we can spare us the dominator
3249 	 check and iterate directly.  */
3250       if (EDGE_COUNT (succe->dest->preds) > 1)
3251 	{
3252 	  FOR_EACH_EDGE (e, ei, succe->dest->preds)
3253 	    if (e != succe
3254 		&& (e->flags & EDGE_EXECUTABLE))
3255 	      {
3256 		succe = NULL;
3257 		break;
3258 	      }
3259 	}
3260       if (succe)
3261 	{
3262 	  bb2 = succe->dest;
3263 
3264 	  /* Re-do the dominance check with changed bb2.  */
3265 	  if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3266 	    return true;
3267 	}
3268     }
3269 
3270   /* We could now iterate updating bb1 / bb2.  */
3271   return false;
3272 }
3273 
3274 /* Set the value number of FROM to TO, return true if it has changed
3275    as a result.  */
3276 
3277 static inline bool
3278 set_ssa_val_to (tree from, tree to)
3279 {
3280   tree currval = SSA_VAL (from);
3281   HOST_WIDE_INT toff, coff;
3282 
3283   /* The only thing we allow as value numbers are ssa_names
3284      and invariants.  So assert that here.  We don't allow VN_TOP
3285      as visiting a stmt should produce a value-number other than
3286      that.
3287      ???  Still VN_TOP can happen for unreachable code, so force
3288      it to varying in that case.  Not all code is prepared to
3289      get VN_TOP on valueization.  */
3290   if (to == VN_TOP)
3291     {
3292       if (dump_file && (dump_flags & TDF_DETAILS))
3293 	fprintf (dump_file, "Forcing value number to varying on "
3294 		 "receiving VN_TOP\n");
3295       to = from;
3296     }
3297 
3298   gcc_assert (to != NULL_TREE
3299 	      && ((TREE_CODE (to) == SSA_NAME
3300 		   && (to == from || SSA_VAL (to) == to))
3301 		  || is_gimple_min_invariant (to)));
3302 
3303   if (from != to)
3304     {
3305       if (currval == from)
3306 	{
3307 	  if (dump_file && (dump_flags & TDF_DETAILS))
3308 	    {
3309 	      fprintf (dump_file, "Not changing value number of ");
3310 	      print_generic_expr (dump_file, from, 0);
3311 	      fprintf (dump_file, " from VARYING to ");
3312 	      print_generic_expr (dump_file, to, 0);
3313 	      fprintf (dump_file, "\n");
3314 	    }
3315 	  return false;
3316 	}
3317       else if (currval != VN_TOP
3318 	       && ! is_gimple_min_invariant (currval)
3319 	       && is_gimple_min_invariant (to))
3320 	{
3321 	  if (dump_file && (dump_flags & TDF_DETAILS))
3322 	    {
3323 	      fprintf (dump_file, "Forcing VARYING instead of changing "
3324 		       "value number of ");
3325 	      print_generic_expr (dump_file, from, 0);
3326 	      fprintf (dump_file, " from ");
3327 	      print_generic_expr (dump_file, currval, 0);
3328 	      fprintf (dump_file, " (non-constant) to ");
3329 	      print_generic_expr (dump_file, to, 0);
3330 	      fprintf (dump_file, " (constant)\n");
3331 	    }
3332 	  to = from;
3333 	}
3334       else if (TREE_CODE (to) == SSA_NAME
3335 	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3336 	to = from;
3337     }
3338 
3339   if (dump_file && (dump_flags & TDF_DETAILS))
3340     {
3341       fprintf (dump_file, "Setting value number of ");
3342       print_generic_expr (dump_file, from, 0);
3343       fprintf (dump_file, " to ");
3344       print_generic_expr (dump_file, to, 0);
3345     }
3346 
3347   if (currval != to
3348       && !operand_equal_p (currval, to, 0)
3349       /* ???  For addresses involving volatile objects or types operand_equal_p
3350          does not reliably detect ADDR_EXPRs as equal.  We know we are only
3351 	 getting invariant gimple addresses here, so can use
3352 	 get_addr_base_and_unit_offset to do this comparison.  */
3353       && !(TREE_CODE (currval) == ADDR_EXPR
3354 	   && TREE_CODE (to) == ADDR_EXPR
3355 	   && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3356 	       == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3357 	   && coff == toff))
3358     {
3359       /* If we equate two SSA names we have to make the side-band info
3360          of the leader conservative (and remember whatever original value
3361 	 was present).  */
3362       if (TREE_CODE (to) == SSA_NAME)
3363 	{
3364 	  if (INTEGRAL_TYPE_P (TREE_TYPE (to))
3365 	      && SSA_NAME_RANGE_INFO (to))
3366 	    {
3367 	      if (SSA_NAME_IS_DEFAULT_DEF (to)
3368 		  || dominated_by_p_w_unex
3369 			(gimple_bb (SSA_NAME_DEF_STMT (from)),
3370 			 gimple_bb (SSA_NAME_DEF_STMT (to))))
3371 		/* Keep the info from the dominator.  */
3372 		;
3373 	      else if (SSA_NAME_IS_DEFAULT_DEF (from)
3374 		       || dominated_by_p_w_unex
3375 			    (gimple_bb (SSA_NAME_DEF_STMT (to)),
3376 			     gimple_bb (SSA_NAME_DEF_STMT (from))))
3377 		{
3378 		  /* Save old info.  */
3379 		  if (! VN_INFO (to)->info.range_info)
3380 		    {
3381 		      VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3382 		      VN_INFO (to)->range_info_anti_range_p
3383 			= SSA_NAME_ANTI_RANGE_P (to);
3384 		    }
3385 		  /* Use that from the dominator.  */
3386 		  SSA_NAME_RANGE_INFO (to) = SSA_NAME_RANGE_INFO (from);
3387 		  SSA_NAME_ANTI_RANGE_P (to) = SSA_NAME_ANTI_RANGE_P (from);
3388 		}
3389 	      else
3390 		{
3391 		  /* Save old info.  */
3392 		  if (! VN_INFO (to)->info.range_info)
3393 		    {
3394 		      VN_INFO (to)->info.range_info = SSA_NAME_RANGE_INFO (to);
3395 		      VN_INFO (to)->range_info_anti_range_p
3396 			= SSA_NAME_ANTI_RANGE_P (to);
3397 		    }
3398 		  /* Rather than allocating memory and unioning the info
3399 		     just clear it.  */
3400 		  SSA_NAME_RANGE_INFO (to) = NULL;
3401 		}
3402 	    }
3403 	  else if (POINTER_TYPE_P (TREE_TYPE (to))
3404 		   && SSA_NAME_PTR_INFO (to))
3405 	    {
3406 	      if (SSA_NAME_IS_DEFAULT_DEF (to)
3407 		  || dominated_by_p_w_unex
3408 			(gimple_bb (SSA_NAME_DEF_STMT (from)),
3409 			 gimple_bb (SSA_NAME_DEF_STMT (to))))
3410 		/* Keep the info from the dominator.  */
3411 		;
3412 	      else if (SSA_NAME_IS_DEFAULT_DEF (from)
3413 		       || dominated_by_p_w_unex
3414 			    (gimple_bb (SSA_NAME_DEF_STMT (to)),
3415 			     gimple_bb (SSA_NAME_DEF_STMT (from))))
3416 		{
3417 		  /* Save old info.  */
3418 		  if (! VN_INFO (to)->info.ptr_info)
3419 		    VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3420 		  /* Use that from the dominator.  */
3421 		  SSA_NAME_PTR_INFO (to) = SSA_NAME_PTR_INFO (from);
3422 		}
3423 	      else if (! SSA_NAME_PTR_INFO (from)
3424 		       /* Handle the case of trivially equivalent info.  */
3425 		       || memcmp (SSA_NAME_PTR_INFO (to),
3426 				  SSA_NAME_PTR_INFO (from),
3427 				  sizeof (ptr_info_def)) != 0)
3428 		{
3429 		  /* Save old info.  */
3430 		  if (! VN_INFO (to)->info.ptr_info)
3431 		    VN_INFO (to)->info.ptr_info = SSA_NAME_PTR_INFO (to);
3432 		  /* Rather than allocating memory and unioning the info
3433 		     just clear it.  */
3434 		  SSA_NAME_PTR_INFO (to) = NULL;
3435 		}
3436 	    }
3437 	}
3438 
3439       VN_INFO (from)->valnum = to;
3440       if (dump_file && (dump_flags & TDF_DETAILS))
3441 	fprintf (dump_file, " (changed)\n");
3442       return true;
3443     }
3444   if (dump_file && (dump_flags & TDF_DETAILS))
3445     fprintf (dump_file, "\n");
3446   return false;
3447 }
3448 
3449 /* Mark as processed all the definitions in the defining stmt of USE, or
3450    the USE itself.  */
3451 
3452 static void
3453 mark_use_processed (tree use)
3454 {
3455   ssa_op_iter iter;
3456   def_operand_p defp;
3457   gimple *stmt = SSA_NAME_DEF_STMT (use);
3458 
3459   if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
3460     {
3461       VN_INFO (use)->use_processed = true;
3462       return;
3463     }
3464 
3465   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3466     {
3467       tree def = DEF_FROM_PTR (defp);
3468 
3469       VN_INFO (def)->use_processed = true;
3470     }
3471 }
3472 
3473 /* Set all definitions in STMT to value number to themselves.
3474    Return true if a value number changed. */
3475 
3476 static bool
3477 defs_to_varying (gimple *stmt)
3478 {
3479   bool changed = false;
3480   ssa_op_iter iter;
3481   def_operand_p defp;
3482 
3483   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3484     {
3485       tree def = DEF_FROM_PTR (defp);
3486       changed |= set_ssa_val_to (def, def);
3487     }
3488   return changed;
3489 }
3490 
3491 /* Visit a copy between LHS and RHS, return true if the value number
3492    changed.  */
3493 
3494 static bool
3495 visit_copy (tree lhs, tree rhs)
3496 {
3497   /* Valueize.  */
3498   rhs = SSA_VAL (rhs);
3499 
3500   return set_ssa_val_to (lhs, rhs);
3501 }
3502 
3503 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3504    is the same.  */
3505 
3506 static tree
3507 valueized_wider_op (tree wide_type, tree op)
3508 {
3509   if (TREE_CODE (op) == SSA_NAME)
3510     op = SSA_VAL (op);
3511 
3512   /* Either we have the op widened available.  */
3513   tree ops[3] = {};
3514   ops[0] = op;
3515   tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3516 				       wide_type, ops, NULL);
3517   if (tem)
3518     return tem;
3519 
3520   /* Or the op is truncated from some existing value.  */
3521   if (TREE_CODE (op) == SSA_NAME)
3522     {
3523       gimple *def = SSA_NAME_DEF_STMT (op);
3524       if (is_gimple_assign (def)
3525 	  && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3526 	{
3527 	  tem = gimple_assign_rhs1 (def);
3528 	  if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3529 	    {
3530 	      if (TREE_CODE (tem) == SSA_NAME)
3531 		tem = SSA_VAL (tem);
3532 	      return tem;
3533 	    }
3534 	}
3535     }
3536 
3537   /* For constants simply extend it.  */
3538   if (TREE_CODE (op) == INTEGER_CST)
3539     return wide_int_to_tree (wide_type, op);
3540 
3541   return NULL_TREE;
3542 }
3543 
3544 /* Visit a nary operator RHS, value number it, and return true if the
3545    value number of LHS has changed as a result.  */
3546 
3547 static bool
3548 visit_nary_op (tree lhs, gassign *stmt)
3549 {
3550   tree result = vn_nary_op_lookup_stmt (stmt, NULL);
3551   if (result)
3552     return set_ssa_val_to (lhs, result);
3553 
3554   /* Do some special pattern matching for redundancies of operations
3555      in different types.  */
3556   enum tree_code code = gimple_assign_rhs_code (stmt);
3557   tree type = TREE_TYPE (lhs);
3558   tree rhs1 = gimple_assign_rhs1 (stmt);
3559   switch (code)
3560     {
3561     CASE_CONVERT:
3562       /* Match arithmetic done in a different type where we can easily
3563          substitute the result from some earlier sign-changed or widened
3564 	 operation.  */
3565       if (INTEGRAL_TYPE_P (type)
3566 	  && TREE_CODE (rhs1) == SSA_NAME
3567 	  /* We only handle sign-changes or zero-extension -> & mask.  */
3568 	  && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3569 	       && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3570 	      || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3571 	{
3572 	  gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3573 	  if (def
3574 	      && (gimple_assign_rhs_code (def) == PLUS_EXPR
3575 		  || gimple_assign_rhs_code (def) == MINUS_EXPR
3576 		  || gimple_assign_rhs_code (def) == MULT_EXPR))
3577 	    {
3578 	      tree ops[3] = {};
3579 	      /* Either we have the op widened available.  */
3580 	      ops[0] = valueized_wider_op (type,
3581 					   gimple_assign_rhs1 (def));
3582 	      if (ops[0])
3583 		ops[1] = valueized_wider_op (type,
3584 					     gimple_assign_rhs2 (def));
3585 	      if (ops[0] && ops[1])
3586 		{
3587 		  ops[0] = vn_nary_op_lookup_pieces
3588 		      (2, gimple_assign_rhs_code (def), type, ops, NULL);
3589 		  /* We have wider operation available.  */
3590 		  if (ops[0]
3591 		      /* If the leader is a wrapping operation we can
3592 			 insert it for code hoisting w/o introducing
3593 			 undefined overflow.  If it is not it has to
3594 			 be available.  See PR86554.  */
3595 		      && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))
3596 			  || TREE_CODE (ops[0]) != SSA_NAME
3597 			  || SSA_NAME_IS_DEFAULT_DEF (ops[0])
3598 			  || dominated_by_p_w_unex
3599 			       (gimple_bb (stmt),
3600 				gimple_bb (SSA_NAME_DEF_STMT (ops[0])))))
3601 		    {
3602 		      unsigned lhs_prec = TYPE_PRECISION (type);
3603 		      unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3604 		      if (lhs_prec == rhs_prec)
3605 			{
3606 			  ops[1] = NULL_TREE;
3607 			  result = vn_nary_build_or_lookup (NOP_EXPR,
3608 							    type, ops);
3609 			  if (result)
3610 			    {
3611 			      bool changed = set_ssa_val_to (lhs, result);
3612 			      vn_nary_op_insert_stmt (stmt, result);
3613 			      return changed;
3614 			    }
3615 			}
3616 		      else
3617 			{
3618 			  ops[1] = wide_int_to_tree (type,
3619 						     wi::mask (rhs_prec, false,
3620 							       lhs_prec));
3621 			  result = vn_nary_build_or_lookup (BIT_AND_EXPR,
3622 							    TREE_TYPE (lhs),
3623 							    ops);
3624 			  if (result)
3625 			    {
3626 			      bool changed = set_ssa_val_to (lhs, result);
3627 			      vn_nary_op_insert_stmt (stmt, result);
3628 			      return changed;
3629 			    }
3630 			}
3631 		    }
3632 		}
3633 	    }
3634 	}
3635     default:;
3636     }
3637 
3638   bool changed = set_ssa_val_to (lhs, lhs);
3639   vn_nary_op_insert_stmt (stmt, lhs);
3640   return changed;
3641 }
3642 
3643 /* Visit a call STMT storing into LHS.  Return true if the value number
3644    of the LHS has changed as a result.  */
3645 
3646 static bool
3647 visit_reference_op_call (tree lhs, gcall *stmt)
3648 {
3649   bool changed = false;
3650   struct vn_reference_s vr1;
3651   vn_reference_t vnresult = NULL;
3652   tree vdef = gimple_vdef (stmt);
3653 
3654   /* Non-ssa lhs is handled in copy_reference_ops_from_call.  */
3655   if (lhs && TREE_CODE (lhs) != SSA_NAME)
3656     lhs = NULL_TREE;
3657 
3658   vn_reference_lookup_call (stmt, &vnresult, &vr1);
3659   if (vnresult)
3660     {
3661       if (vnresult->result_vdef && vdef)
3662 	changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3663       else if (vdef)
3664 	/* If the call was discovered to be pure or const reflect
3665 	   that as far as possible.  */
3666 	changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3667 
3668       if (!vnresult->result && lhs)
3669 	vnresult->result = lhs;
3670 
3671       if (vnresult->result && lhs)
3672 	changed |= set_ssa_val_to (lhs, vnresult->result);
3673     }
3674   else
3675     {
3676       vn_reference_t vr2;
3677       vn_reference_s **slot;
3678       tree vdef_val = vdef;
3679       if (vdef)
3680 	{
3681 	  /* If we value numbered an indirect functions function to
3682 	     one not clobbering memory value number its VDEF to its
3683 	     VUSE.  */
3684 	  tree fn = gimple_call_fn (stmt);
3685 	  if (fn && TREE_CODE (fn) == SSA_NAME)
3686 	    {
3687 	      fn = SSA_VAL (fn);
3688 	      if (TREE_CODE (fn) == ADDR_EXPR
3689 		  && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3690 		  && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3691 		      & (ECF_CONST | ECF_PURE)))
3692 		vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3693 	    }
3694 	  changed |= set_ssa_val_to (vdef, vdef_val);
3695 	}
3696       if (lhs)
3697 	changed |= set_ssa_val_to (lhs, lhs);
3698       vr2 = current_info->references_pool->allocate ();
3699       vr2->vuse = vr1.vuse;
3700       /* As we are not walking the virtual operand chain we know the
3701 	 shared_lookup_references are still original so we can re-use
3702 	 them here.  */
3703       vr2->operands = vr1.operands.copy ();
3704       vr2->type = vr1.type;
3705       vr2->set = vr1.set;
3706       vr2->hashcode = vr1.hashcode;
3707       vr2->result = lhs;
3708       vr2->result_vdef = vdef_val;
3709       slot = current_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3710 							    INSERT);
3711       gcc_assert (!*slot);
3712       *slot = vr2;
3713     }
3714 
3715   return changed;
3716 }
3717 
3718 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3719    and return true if the value number of the LHS has changed as a result.  */
3720 
3721 static bool
3722 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3723 {
3724   bool changed = false;
3725   tree last_vuse;
3726   tree result;
3727 
3728   last_vuse = gimple_vuse (stmt);
3729   last_vuse_ptr = &last_vuse;
3730   result = vn_reference_lookup (op, gimple_vuse (stmt),
3731 				default_vn_walk_kind, NULL, true);
3732   last_vuse_ptr = NULL;
3733 
3734   /* We handle type-punning through unions by value-numbering based
3735      on offset and size of the access.  Be prepared to handle a
3736      type-mismatch here via creating a VIEW_CONVERT_EXPR.  */
3737   if (result
3738       && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3739     {
3740       /* We will be setting the value number of lhs to the value number
3741 	 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3742 	 So first simplify and lookup this expression to see if it
3743 	 is already available.  */
3744       code_helper rcode = VIEW_CONVERT_EXPR;
3745       tree ops[3] = { result };
3746       result = vn_nary_build_or_lookup (rcode, TREE_TYPE (op), ops);
3747     }
3748 
3749   if (result)
3750     changed = set_ssa_val_to (lhs, result);
3751   else
3752     {
3753       changed = set_ssa_val_to (lhs, lhs);
3754       vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3755     }
3756 
3757   return changed;
3758 }
3759 
3760 
3761 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3762    and return true if the value number of the LHS has changed as a result.  */
3763 
3764 static bool
3765 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3766 {
3767   bool changed = false;
3768   vn_reference_t vnresult = NULL;
3769   tree assign;
3770   bool resultsame = false;
3771   tree vuse = gimple_vuse (stmt);
3772   tree vdef = gimple_vdef (stmt);
3773 
3774   if (TREE_CODE (op) == SSA_NAME)
3775     op = SSA_VAL (op);
3776 
3777   /* First we want to lookup using the *vuses* from the store and see
3778      if there the last store to this location with the same address
3779      had the same value.
3780 
3781      The vuses represent the memory state before the store.  If the
3782      memory state, address, and value of the store is the same as the
3783      last store to this location, then this store will produce the
3784      same memory state as that store.
3785 
3786      In this case the vdef versions for this store are value numbered to those
3787      vuse versions, since they represent the same memory state after
3788      this store.
3789 
3790      Otherwise, the vdefs for the store are used when inserting into
3791      the table, since the store generates a new memory state.  */
3792 
3793   vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
3794   if (vnresult
3795       && vnresult->result)
3796     {
3797       tree result = vnresult->result;
3798       if (TREE_CODE (result) == SSA_NAME)
3799 	result = SSA_VAL (result);
3800       resultsame = expressions_equal_p (result, op);
3801       if (resultsame)
3802 	{
3803 	  /* If the TBAA state isn't compatible for downstream reads
3804 	     we cannot value-number the VDEFs the same.  */
3805 	  alias_set_type set = get_alias_set (lhs);
3806 	  if (vnresult->set != set
3807 	      && ! alias_set_subset_of (set, vnresult->set))
3808 	    resultsame = false;
3809 	}
3810     }
3811 
3812   if (!resultsame)
3813     {
3814       /* Only perform the following when being called from PRE
3815 	 which embeds tail merging.  */
3816       if (default_vn_walk_kind == VN_WALK)
3817 	{
3818 	  assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3819 	  vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
3820 	  if (vnresult)
3821 	    {
3822 	      VN_INFO (vdef)->use_processed = true;
3823 	      return set_ssa_val_to (vdef, vnresult->result_vdef);
3824 	    }
3825 	}
3826 
3827       if (dump_file && (dump_flags & TDF_DETAILS))
3828 	{
3829 	  fprintf (dump_file, "No store match\n");
3830 	  fprintf (dump_file, "Value numbering store ");
3831 	  print_generic_expr (dump_file, lhs, 0);
3832 	  fprintf (dump_file, " to ");
3833 	  print_generic_expr (dump_file, op, 0);
3834 	  fprintf (dump_file, "\n");
3835 	}
3836       /* Have to set value numbers before insert, since insert is
3837 	 going to valueize the references in-place.  */
3838       if (vdef)
3839 	changed |= set_ssa_val_to (vdef, vdef);
3840 
3841       /* Do not insert structure copies into the tables.  */
3842       if (is_gimple_min_invariant (op)
3843 	  || is_gimple_reg (op))
3844         vn_reference_insert (lhs, op, vdef, NULL);
3845 
3846       /* Only perform the following when being called from PRE
3847 	 which embeds tail merging.  */
3848       if (default_vn_walk_kind == VN_WALK)
3849 	{
3850 	  assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
3851 	  vn_reference_insert (assign, lhs, vuse, vdef);
3852 	}
3853     }
3854   else
3855     {
3856       /* We had a match, so value number the vdef to have the value
3857 	 number of the vuse it came from.  */
3858 
3859       if (dump_file && (dump_flags & TDF_DETAILS))
3860 	fprintf (dump_file, "Store matched earlier value, "
3861 		 "value numbering store vdefs to matching vuses.\n");
3862 
3863       changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
3864     }
3865 
3866   return changed;
3867 }
3868 
3869 /* Visit and value number PHI, return true if the value number
3870    changed.  */
3871 
3872 static bool
3873 visit_phi (gimple *phi)
3874 {
3875   bool changed = false;
3876   tree result;
3877   tree sameval = VN_TOP;
3878   bool allsame = true;
3879   unsigned n_executable = 0;
3880 
3881   /* TODO: We could check for this in init_sccvn, and replace this
3882      with a gcc_assert.  */
3883   if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
3884     return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3885 
3886   /* See if all non-TOP arguments have the same value.  TOP is
3887      equivalent to everything, so we can ignore it.  */
3888   edge_iterator ei;
3889   edge e;
3890   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3891     if (e->flags & EDGE_EXECUTABLE)
3892       {
3893 	tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3894 
3895 	++n_executable;
3896 	if (TREE_CODE (def) == SSA_NAME)
3897 	  def = SSA_VAL (def);
3898 	if (def == VN_TOP)
3899 	  continue;
3900 	if (sameval == VN_TOP)
3901 	  sameval = def;
3902 	else if (!expressions_equal_p (def, sameval))
3903 	  {
3904 	    allsame = false;
3905 	    break;
3906 	  }
3907       }
3908 
3909   /* If none of the edges was executable or all incoming values are
3910      undefined keep the value-number at VN_TOP.  If only a single edge
3911      is exectuable use its value.  */
3912   if (sameval == VN_TOP
3913       || n_executable == 1)
3914     return set_ssa_val_to (PHI_RESULT (phi), sameval);
3915 
3916   /* First see if it is equivalent to a phi node in this block.  We prefer
3917      this as it allows IV elimination - see PRs 66502 and 67167.  */
3918   result = vn_phi_lookup (phi);
3919   if (result)
3920     changed = set_ssa_val_to (PHI_RESULT (phi), result);
3921   /* Otherwise all value numbered to the same value, the phi node has that
3922      value.  */
3923   else if (allsame)
3924     changed = set_ssa_val_to (PHI_RESULT (phi), sameval);
3925   else
3926     {
3927       vn_phi_insert (phi, PHI_RESULT (phi));
3928       changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3929     }
3930 
3931   return changed;
3932 }
3933 
3934 /* Try to simplify RHS using equivalences and constant folding.  */
3935 
3936 static tree
3937 try_to_simplify (gassign *stmt)
3938 {
3939   enum tree_code code = gimple_assign_rhs_code (stmt);
3940   tree tem;
3941 
3942   /* For stores we can end up simplifying a SSA_NAME rhs.  Just return
3943      in this case, there is no point in doing extra work.  */
3944   if (code == SSA_NAME)
3945     return NULL_TREE;
3946 
3947   /* First try constant folding based on our current lattice.  */
3948   mprts_hook = vn_lookup_simplify_result;
3949   tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
3950   mprts_hook = NULL;
3951   if (tem
3952       && (TREE_CODE (tem) == SSA_NAME
3953 	  || is_gimple_min_invariant (tem)))
3954     return tem;
3955 
3956   return NULL_TREE;
3957 }
3958 
3959 /* Visit and value number USE, return true if the value number
3960    changed. */
3961 
3962 static bool
3963 visit_use (tree use)
3964 {
3965   bool changed = false;
3966   gimple *stmt = SSA_NAME_DEF_STMT (use);
3967 
3968   mark_use_processed (use);
3969 
3970   gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3971   if (dump_file && (dump_flags & TDF_DETAILS)
3972       && !SSA_NAME_IS_DEFAULT_DEF (use))
3973     {
3974       fprintf (dump_file, "Value numbering ");
3975       print_generic_expr (dump_file, use, 0);
3976       fprintf (dump_file, " stmt = ");
3977       print_gimple_stmt (dump_file, stmt, 0, 0);
3978     }
3979 
3980   /* Handle uninitialized uses.  */
3981   if (SSA_NAME_IS_DEFAULT_DEF (use))
3982     changed = set_ssa_val_to (use, use);
3983   else if (gimple_code (stmt) == GIMPLE_PHI)
3984     changed = visit_phi (stmt);
3985   else if (gimple_has_volatile_ops (stmt))
3986     changed = defs_to_varying (stmt);
3987   else if (gassign *ass = dyn_cast <gassign *> (stmt))
3988     {
3989       enum tree_code code = gimple_assign_rhs_code (ass);
3990       tree lhs = gimple_assign_lhs (ass);
3991       tree rhs1 = gimple_assign_rhs1 (ass);
3992       tree simplified;
3993 
3994       /* Shortcut for copies. Simplifying copies is pointless,
3995 	 since we copy the expression and value they represent.  */
3996       if (code == SSA_NAME
3997 	  && TREE_CODE (lhs) == SSA_NAME)
3998 	{
3999 	  changed = visit_copy (lhs, rhs1);
4000 	  goto done;
4001 	}
4002       simplified = try_to_simplify (ass);
4003       if (simplified)
4004 	{
4005 	  if (dump_file && (dump_flags & TDF_DETAILS))
4006 	    {
4007 	      fprintf (dump_file, "RHS ");
4008 	      print_gimple_expr (dump_file, ass, 0, 0);
4009 	      fprintf (dump_file, " simplified to ");
4010 	      print_generic_expr (dump_file, simplified, 0);
4011 	      fprintf (dump_file, "\n");
4012 	    }
4013 	}
4014       /* Setting value numbers to constants will occasionally
4015 	 screw up phi congruence because constants are not
4016 	 uniquely associated with a single ssa name that can be
4017 	 looked up.  */
4018       if (simplified
4019 	  && is_gimple_min_invariant (simplified)
4020 	  && TREE_CODE (lhs) == SSA_NAME)
4021 	{
4022 	  changed = set_ssa_val_to (lhs, simplified);
4023 	  goto done;
4024 	}
4025       else if (simplified
4026 	       && TREE_CODE (simplified) == SSA_NAME
4027 	       && TREE_CODE (lhs) == SSA_NAME)
4028 	{
4029 	  changed = visit_copy (lhs, simplified);
4030 	  goto done;
4031 	}
4032 
4033       if ((TREE_CODE (lhs) == SSA_NAME
4034 	   /* We can substitute SSA_NAMEs that are live over
4035 	      abnormal edges with their constant value.  */
4036 	   && !(gimple_assign_copy_p (ass)
4037 		&& is_gimple_min_invariant (rhs1))
4038 	   && !(simplified
4039 		&& is_gimple_min_invariant (simplified))
4040 	   && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4041 	  /* Stores or copies from SSA_NAMEs that are live over
4042 	     abnormal edges are a problem.  */
4043 	  || (code == SSA_NAME
4044 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4045 	changed = defs_to_varying (ass);
4046       else if (REFERENCE_CLASS_P (lhs)
4047 	       || DECL_P (lhs))
4048 	changed = visit_reference_op_store (lhs, rhs1, ass);
4049       else if (TREE_CODE (lhs) == SSA_NAME)
4050 	{
4051 	  if ((gimple_assign_copy_p (ass)
4052 	       && is_gimple_min_invariant (rhs1))
4053 	      || (simplified
4054 		  && is_gimple_min_invariant (simplified)))
4055 	    {
4056 	      if (simplified)
4057 		changed = set_ssa_val_to (lhs, simplified);
4058 	      else
4059 		changed = set_ssa_val_to (lhs, rhs1);
4060 	    }
4061 	  else
4062 	    {
4063 	      /* Visit the original statement.  */
4064 	      switch (vn_get_stmt_kind (ass))
4065 		{
4066 		case VN_NARY:
4067 		changed = visit_nary_op (lhs, ass);
4068 		break;
4069 		case VN_REFERENCE:
4070 		changed = visit_reference_op_load (lhs, rhs1, ass);
4071 		break;
4072 		default:
4073 		changed = defs_to_varying (ass);
4074 		break;
4075 		}
4076 	    }
4077 	}
4078       else
4079 	changed = defs_to_varying (ass);
4080     }
4081   else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4082     {
4083       tree lhs = gimple_call_lhs (call_stmt);
4084       if (lhs && TREE_CODE (lhs) == SSA_NAME)
4085 	{
4086 	  /* Try constant folding based on our current lattice.  */
4087 	  tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4088 							    vn_valueize);
4089 	  if (simplified)
4090 	    {
4091 	      if (dump_file && (dump_flags & TDF_DETAILS))
4092 		{
4093 		  fprintf (dump_file, "call ");
4094 		  print_gimple_expr (dump_file, call_stmt, 0, 0);
4095 		  fprintf (dump_file, " simplified to ");
4096 		  print_generic_expr (dump_file, simplified, 0);
4097 		  fprintf (dump_file, "\n");
4098 		}
4099 	    }
4100 	  /* Setting value numbers to constants will occasionally
4101 	     screw up phi congruence because constants are not
4102 	     uniquely associated with a single ssa name that can be
4103 	     looked up.  */
4104 	  if (simplified
4105 	      && is_gimple_min_invariant (simplified))
4106 	    {
4107 	      changed = set_ssa_val_to (lhs, simplified);
4108 	      if (gimple_vdef (call_stmt))
4109 		changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4110 					   SSA_VAL (gimple_vuse (call_stmt)));
4111 	      goto done;
4112 	    }
4113 	  else if (simplified
4114 		   && TREE_CODE (simplified) == SSA_NAME)
4115 	    {
4116 	      changed = visit_copy (lhs, simplified);
4117 	      if (gimple_vdef (call_stmt))
4118 		changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4119 					   SSA_VAL (gimple_vuse (call_stmt)));
4120 	      goto done;
4121 	    }
4122 	  else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4123 	    {
4124 	      changed = defs_to_varying (call_stmt);
4125 	      goto done;
4126 	    }
4127 	}
4128 
4129       /* Pick up flags from a devirtualization target.  */
4130       tree fn = gimple_call_fn (stmt);
4131       int extra_fnflags = 0;
4132       if (fn && TREE_CODE (fn) == SSA_NAME)
4133 	{
4134 	  fn = SSA_VAL (fn);
4135 	  if (TREE_CODE (fn) == ADDR_EXPR
4136 	      && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4137 	    extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4138 	}
4139       if (!gimple_call_internal_p (call_stmt)
4140 	  && (/* Calls to the same function with the same vuse
4141 		 and the same operands do not necessarily return the same
4142 		 value, unless they're pure or const.  */
4143 	      ((gimple_call_flags (call_stmt) | extra_fnflags)
4144 	       & (ECF_PURE | ECF_CONST))
4145 	      /* If calls have a vdef, subsequent calls won't have
4146 		 the same incoming vuse.  So, if 2 calls with vdef have the
4147 		 same vuse, we know they're not subsequent.
4148 		 We can value number 2 calls to the same function with the
4149 		 same vuse and the same operands which are not subsequent
4150 		 the same, because there is no code in the program that can
4151 		 compare the 2 values...  */
4152 	      || (gimple_vdef (call_stmt)
4153 		  /* ... unless the call returns a pointer which does
4154 		     not alias with anything else.  In which case the
4155 		     information that the values are distinct are encoded
4156 		     in the IL.  */
4157 		  && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4158 		  /* Only perform the following when being called from PRE
4159 		     which embeds tail merging.  */
4160 		  && default_vn_walk_kind == VN_WALK)))
4161 	changed = visit_reference_op_call (lhs, call_stmt);
4162       else
4163 	changed = defs_to_varying (call_stmt);
4164     }
4165   else
4166     changed = defs_to_varying (stmt);
4167  done:
4168   return changed;
4169 }
4170 
4171 /* Compare two operands by reverse postorder index */
4172 
4173 static int
4174 compare_ops (const void *pa, const void *pb)
4175 {
4176   const tree opa = *((const tree *)pa);
4177   const tree opb = *((const tree *)pb);
4178   gimple *opstmta = SSA_NAME_DEF_STMT (opa);
4179   gimple *opstmtb = SSA_NAME_DEF_STMT (opb);
4180   basic_block bba;
4181   basic_block bbb;
4182 
4183   if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
4184     return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4185   else if (gimple_nop_p (opstmta))
4186     return -1;
4187   else if (gimple_nop_p (opstmtb))
4188     return 1;
4189 
4190   bba = gimple_bb (opstmta);
4191   bbb = gimple_bb (opstmtb);
4192 
4193   if (!bba && !bbb)
4194     return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4195   else if (!bba)
4196     return -1;
4197   else if (!bbb)
4198     return 1;
4199 
4200   if (bba == bbb)
4201     {
4202       if (gimple_code (opstmta) == GIMPLE_PHI
4203 	  && gimple_code (opstmtb) == GIMPLE_PHI)
4204 	return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4205       else if (gimple_code (opstmta) == GIMPLE_PHI)
4206 	return -1;
4207       else if (gimple_code (opstmtb) == GIMPLE_PHI)
4208 	return 1;
4209       else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
4210         return gimple_uid (opstmta) - gimple_uid (opstmtb);
4211       else
4212 	return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
4213     }
4214   return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
4215 }
4216 
4217 /* Sort an array containing members of a strongly connected component
4218    SCC so that the members are ordered by RPO number.
4219    This means that when the sort is complete, iterating through the
4220    array will give you the members in RPO order.  */
4221 
4222 static void
4223 sort_scc (vec<tree> scc)
4224 {
4225   scc.qsort (compare_ops);
4226 }
4227 
4228 /* Insert the no longer used nary ONARY to the hash INFO.  */
4229 
4230 static void
4231 copy_nary (vn_nary_op_t onary, vn_tables_t info)
4232 {
4233   size_t size = sizeof_vn_nary_op (onary->length);
4234   vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
4235 					       &info->nary_obstack);
4236   memcpy (nary, onary, size);
4237   vn_nary_op_insert_into (nary, info->nary, false);
4238 }
4239 
4240 /* Insert the no longer used phi OPHI to the hash INFO.  */
4241 
4242 static void
4243 copy_phi (vn_phi_t ophi, vn_tables_t info)
4244 {
4245   vn_phi_t phi = info->phis_pool->allocate ();
4246   vn_phi_s **slot;
4247   memcpy (phi, ophi, sizeof (*phi));
4248   ophi->phiargs.create (0);
4249   slot = info->phis->find_slot_with_hash (phi, phi->hashcode, INSERT);
4250   gcc_assert (!*slot);
4251   *slot = phi;
4252 }
4253 
4254 /* Insert the no longer used reference OREF to the hash INFO.  */
4255 
4256 static void
4257 copy_reference (vn_reference_t oref, vn_tables_t info)
4258 {
4259   vn_reference_t ref;
4260   vn_reference_s **slot;
4261   ref = info->references_pool->allocate ();
4262   memcpy (ref, oref, sizeof (*ref));
4263   oref->operands.create (0);
4264   slot = info->references->find_slot_with_hash (ref, ref->hashcode, INSERT);
4265   if (*slot)
4266     free_reference (*slot);
4267   *slot = ref;
4268 }
4269 
4270 /* Process a strongly connected component in the SSA graph.  */
4271 
4272 static void
4273 process_scc (vec<tree> scc)
4274 {
4275   tree var;
4276   unsigned int i;
4277   unsigned int iterations = 0;
4278   bool changed = true;
4279   vn_nary_op_iterator_type hin;
4280   vn_phi_iterator_type hip;
4281   vn_reference_iterator_type hir;
4282   vn_nary_op_t nary;
4283   vn_phi_t phi;
4284   vn_reference_t ref;
4285 
4286   /* If the SCC has a single member, just visit it.  */
4287   if (scc.length () == 1)
4288     {
4289       tree use = scc[0];
4290       if (VN_INFO (use)->use_processed)
4291 	return;
4292       /* We need to make sure it doesn't form a cycle itself, which can
4293 	 happen for self-referential PHI nodes.  In that case we would
4294 	 end up inserting an expression with VN_TOP operands into the
4295 	 valid table which makes us derive bogus equivalences later.
4296 	 The cheapest way to check this is to assume it for all PHI nodes.  */
4297       if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
4298 	/* Fallthru to iteration.  */ ;
4299       else
4300 	{
4301 	  visit_use (use);
4302 	  return;
4303 	}
4304     }
4305 
4306   if (dump_file && (dump_flags & TDF_DETAILS))
4307     print_scc (dump_file, scc);
4308 
4309   /* Iterate over the SCC with the optimistic table until it stops
4310      changing.  */
4311   current_info = optimistic_info;
4312   while (changed)
4313     {
4314       changed = false;
4315       iterations++;
4316       if (dump_file && (dump_flags & TDF_DETAILS))
4317 	fprintf (dump_file, "Starting iteration %d\n", iterations);
4318       /* As we are value-numbering optimistically we have to
4319 	 clear the expression tables and the simplified expressions
4320 	 in each iteration until we converge.  */
4321       optimistic_info->nary->empty ();
4322       optimistic_info->phis->empty ();
4323       optimistic_info->references->empty ();
4324       obstack_free (&optimistic_info->nary_obstack, NULL);
4325       gcc_obstack_init (&optimistic_info->nary_obstack);
4326       optimistic_info->phis_pool->release ();
4327       optimistic_info->references_pool->release ();
4328       FOR_EACH_VEC_ELT (scc, i, var)
4329 	gcc_assert (!VN_INFO (var)->needs_insertion
4330 		    && VN_INFO (var)->expr == NULL);
4331       FOR_EACH_VEC_ELT (scc, i, var)
4332 	changed |= visit_use (var);
4333     }
4334 
4335   if (dump_file && (dump_flags & TDF_DETAILS))
4336     fprintf (dump_file, "Processing SCC needed %d iterations\n", iterations);
4337   statistics_histogram_event (cfun, "SCC iterations", iterations);
4338 
4339   /* Finally, copy the contents of the no longer used optimistic
4340      table to the valid table.  */
4341   FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->nary, nary, vn_nary_op_t, hin)
4342     copy_nary (nary, valid_info);
4343   FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->phis, phi, vn_phi_t, hip)
4344     copy_phi (phi, valid_info);
4345   FOR_EACH_HASH_TABLE_ELEMENT (*optimistic_info->references,
4346 			       ref, vn_reference_t, hir)
4347     copy_reference (ref, valid_info);
4348 
4349   current_info = valid_info;
4350 }
4351 
4352 
4353 /* Pop the components of the found SCC for NAME off the SCC stack
4354    and process them.  Returns true if all went well, false if
4355    we run into resource limits.  */
4356 
4357 static bool
4358 extract_and_process_scc_for_name (tree name)
4359 {
4360   auto_vec<tree> scc;
4361   tree x;
4362 
4363   /* Found an SCC, pop the components off the SCC stack and
4364      process them.  */
4365   do
4366     {
4367       x = sccstack.pop ();
4368 
4369       VN_INFO (x)->on_sccstack = false;
4370       scc.safe_push (x);
4371     } while (x != name);
4372 
4373   /* Bail out of SCCVN in case a SCC turns out to be incredibly large.  */
4374   if (scc.length ()
4375       > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
4376     {
4377       if (dump_file)
4378 	fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
4379 		 "SCC size %u exceeding %u\n", scc.length (),
4380 		 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
4381 
4382       return false;
4383     }
4384 
4385   if (scc.length () > 1)
4386     sort_scc (scc);
4387 
4388   process_scc (scc);
4389 
4390   return true;
4391 }
4392 
4393 /* Depth first search on NAME to discover and process SCC's in the SSA
4394    graph.
4395    Execution of this algorithm relies on the fact that the SCC's are
4396    popped off the stack in topological order.
4397    Returns true if successful, false if we stopped processing SCC's due
4398    to resource constraints.  */
4399 
4400 static bool
4401 DFS (tree name)
4402 {
4403   auto_vec<ssa_op_iter> itervec;
4404   auto_vec<tree> namevec;
4405   use_operand_p usep = NULL;
4406   gimple *defstmt;
4407   tree use;
4408   ssa_op_iter iter;
4409 
4410 start_over:
4411   /* SCC info */
4412   VN_INFO (name)->dfsnum = next_dfs_num++;
4413   VN_INFO (name)->visited = true;
4414   VN_INFO (name)->low = VN_INFO (name)->dfsnum;
4415 
4416   sccstack.safe_push (name);
4417   VN_INFO (name)->on_sccstack = true;
4418   defstmt = SSA_NAME_DEF_STMT (name);
4419 
4420   /* Recursively DFS on our operands, looking for SCC's.  */
4421   if (!gimple_nop_p (defstmt))
4422     {
4423       /* Push a new iterator.  */
4424       if (gphi *phi = dyn_cast <gphi *> (defstmt))
4425 	usep = op_iter_init_phiuse (&iter, phi, SSA_OP_ALL_USES);
4426       else
4427 	usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
4428     }
4429   else
4430     clear_and_done_ssa_iter (&iter);
4431 
4432   while (1)
4433     {
4434       /* If we are done processing uses of a name, go up the stack
4435 	 of iterators and process SCCs as we found them.  */
4436       if (op_iter_done (&iter))
4437 	{
4438 	  /* See if we found an SCC.  */
4439 	  if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
4440 	    if (!extract_and_process_scc_for_name (name))
4441 	      return false;
4442 
4443 	  /* Check if we are done.  */
4444 	  if (namevec.is_empty ())
4445 	    return true;
4446 
4447 	  /* Restore the last use walker and continue walking there.  */
4448 	  use = name;
4449 	  name = namevec.pop ();
4450 	  memcpy (&iter, &itervec.last (),
4451 		  sizeof (ssa_op_iter));
4452 	  itervec.pop ();
4453 	  goto continue_walking;
4454 	}
4455 
4456       use = USE_FROM_PTR (usep);
4457 
4458       /* Since we handle phi nodes, we will sometimes get
4459 	 invariants in the use expression.  */
4460       if (TREE_CODE (use) == SSA_NAME)
4461 	{
4462 	  if (! (VN_INFO (use)->visited))
4463 	    {
4464 	      /* Recurse by pushing the current use walking state on
4465 		 the stack and starting over.  */
4466 	      itervec.safe_push (iter);
4467 	      namevec.safe_push (name);
4468 	      name = use;
4469 	      goto start_over;
4470 
4471 continue_walking:
4472 	      VN_INFO (name)->low = MIN (VN_INFO (name)->low,
4473 					 VN_INFO (use)->low);
4474 	    }
4475 	  if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
4476 	      && VN_INFO (use)->on_sccstack)
4477 	    {
4478 	      VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
4479 					 VN_INFO (name)->low);
4480 	    }
4481 	}
4482 
4483       usep = op_iter_next_use (&iter);
4484     }
4485 }
4486 
4487 /* Allocate a value number table.  */
4488 
4489 static void
4490 allocate_vn_table (vn_tables_t table)
4491 {
4492   table->phis = new vn_phi_table_type (23);
4493   table->nary = new vn_nary_op_table_type (23);
4494   table->references = new vn_reference_table_type (23);
4495 
4496   gcc_obstack_init (&table->nary_obstack);
4497   table->phis_pool = new object_allocator<vn_phi_s> ("VN phis");
4498   table->references_pool = new object_allocator<vn_reference_s>
4499     ("VN references");
4500 }
4501 
4502 /* Free a value number table.  */
4503 
4504 static void
4505 free_vn_table (vn_tables_t table)
4506 {
4507   delete table->phis;
4508   table->phis = NULL;
4509   delete table->nary;
4510   table->nary = NULL;
4511   delete table->references;
4512   table->references = NULL;
4513   obstack_free (&table->nary_obstack, NULL);
4514   delete table->phis_pool;
4515   delete table->references_pool;
4516 }
4517 
4518 static void
4519 init_scc_vn (void)
4520 {
4521   int j;
4522   int *rpo_numbers_temp;
4523 
4524   calculate_dominance_info (CDI_DOMINATORS);
4525   mark_dfs_back_edges ();
4526 
4527   sccstack.create (0);
4528   constant_to_value_id = new hash_table<vn_constant_hasher> (23);
4529 
4530   constant_value_ids = BITMAP_ALLOC (NULL);
4531 
4532   next_dfs_num = 1;
4533   next_value_id = 1;
4534 
4535   vn_ssa_aux_table.create (num_ssa_names + 1);
4536   /* VEC_alloc doesn't actually grow it to the right size, it just
4537      preallocates the space to do so.  */
4538   vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
4539   gcc_obstack_init (&vn_ssa_aux_obstack);
4540 
4541   shared_lookup_phiargs.create (0);
4542   shared_lookup_references.create (0);
4543   rpo_numbers = XNEWVEC (int, last_basic_block_for_fn (cfun));
4544   rpo_numbers_temp =
4545     XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS);
4546   pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
4547 
4548   /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
4549      the i'th block in RPO order is bb.  We want to map bb's to RPO
4550      numbers, so we need to rearrange this array.  */
4551   for (j = 0; j < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; j++)
4552     rpo_numbers[rpo_numbers_temp[j]] = j;
4553 
4554   XDELETE (rpo_numbers_temp);
4555 
4556   VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
4557 
4558   renumber_gimple_stmt_uids ();
4559 
4560   /* Create the valid and optimistic value numbering tables.  */
4561   valid_info = XCNEW (struct vn_tables_s);
4562   allocate_vn_table (valid_info);
4563   optimistic_info = XCNEW (struct vn_tables_s);
4564   allocate_vn_table (optimistic_info);
4565   current_info = valid_info;
4566 
4567   /* Create the VN_INFO structures, and initialize value numbers to
4568      TOP or VARYING for parameters.  */
4569   size_t i;
4570   tree name;
4571 
4572   FOR_EACH_SSA_NAME (i, name, cfun)
4573     {
4574       VN_INFO_GET (name)->valnum = VN_TOP;
4575       VN_INFO (name)->needs_insertion = false;
4576       VN_INFO (name)->expr = NULL;
4577       VN_INFO (name)->value_id = 0;
4578 
4579       if (!SSA_NAME_IS_DEFAULT_DEF (name))
4580 	continue;
4581 
4582       switch (TREE_CODE (SSA_NAME_VAR (name)))
4583 	{
4584 	case VAR_DECL:
4585 	  /* Undefined vars keep TOP.  */
4586 	  break;
4587 
4588 	case PARM_DECL:
4589 	  /* Parameters are VARYING but we can record a condition
4590 	     if we know it is a non-NULL pointer.  */
4591 	  VN_INFO (name)->visited = true;
4592 	  VN_INFO (name)->valnum = name;
4593 	  if (POINTER_TYPE_P (TREE_TYPE (name))
4594 	      && nonnull_arg_p (SSA_NAME_VAR (name)))
4595 	    {
4596 	      tree ops[2];
4597 	      ops[0] = name;
4598 	      ops[1] = build_int_cst (TREE_TYPE (name), 0);
4599 	      vn_nary_op_insert_pieces (2, NE_EXPR, boolean_type_node, ops,
4600 					boolean_true_node, 0);
4601 	      if (dump_file && (dump_flags & TDF_DETAILS))
4602 		{
4603 		  fprintf (dump_file, "Recording ");
4604 		  print_generic_expr (dump_file, name, TDF_SLIM);
4605 		  fprintf (dump_file, " != 0\n");
4606 		}
4607 	    }
4608 	  break;
4609 
4610 	case RESULT_DECL:
4611 	  /* If the result is passed by invisible reference the default
4612 	     def is initialized, otherwise it's uninitialized.  */
4613 	  if (DECL_BY_REFERENCE (SSA_NAME_VAR (name)))
4614 	    {
4615 	      VN_INFO (name)->visited = true;
4616 	      VN_INFO (name)->valnum = name;
4617 	    }
4618 	  break;
4619 
4620 	default:
4621 	  gcc_unreachable ();
4622 	}
4623     }
4624 }
4625 
4626 /* Restore SSA info that has been reset on value leaders.  */
4627 
4628 void
4629 scc_vn_restore_ssa_info (void)
4630 {
4631   unsigned i;
4632   tree name;
4633 
4634   FOR_EACH_SSA_NAME (i, name, cfun)
4635     {
4636       if (has_VN_INFO (name))
4637 	{
4638 	  if (VN_INFO (name)->needs_insertion)
4639 	    ;
4640 	  else if (POINTER_TYPE_P (TREE_TYPE (name))
4641 		   && VN_INFO (name)->info.ptr_info)
4642 	    SSA_NAME_PTR_INFO (name) = VN_INFO (name)->info.ptr_info;
4643 	  else if (INTEGRAL_TYPE_P (TREE_TYPE (name))
4644 		   && VN_INFO (name)->info.range_info)
4645 	    {
4646 	      SSA_NAME_RANGE_INFO (name) = VN_INFO (name)->info.range_info;
4647 	      SSA_NAME_ANTI_RANGE_P (name)
4648 		= VN_INFO (name)->range_info_anti_range_p;
4649 	    }
4650 	}
4651     }
4652 }
4653 
4654 void
4655 free_scc_vn (void)
4656 {
4657   size_t i;
4658   tree name;
4659 
4660   delete constant_to_value_id;
4661   constant_to_value_id = NULL;
4662   BITMAP_FREE (constant_value_ids);
4663   shared_lookup_phiargs.release ();
4664   shared_lookup_references.release ();
4665   XDELETEVEC (rpo_numbers);
4666 
4667   FOR_EACH_SSA_NAME (i, name, cfun)
4668     {
4669       if (has_VN_INFO (name)
4670 	  && VN_INFO (name)->needs_insertion)
4671 	release_ssa_name (name);
4672     }
4673   obstack_free (&vn_ssa_aux_obstack, NULL);
4674   vn_ssa_aux_table.release ();
4675 
4676   sccstack.release ();
4677   free_vn_table (valid_info);
4678   XDELETE (valid_info);
4679   free_vn_table (optimistic_info);
4680   XDELETE (optimistic_info);
4681 
4682   BITMAP_FREE (const_parms);
4683 }
4684 
4685 /* Set *ID according to RESULT.  */
4686 
4687 static void
4688 set_value_id_for_result (tree result, unsigned int *id)
4689 {
4690   if (result && TREE_CODE (result) == SSA_NAME)
4691     *id = VN_INFO (result)->value_id;
4692   else if (result && is_gimple_min_invariant (result))
4693     *id = get_or_alloc_constant_value_id (result);
4694   else
4695     *id = get_next_value_id ();
4696 }
4697 
4698 /* Set the value ids in the valid hash tables.  */
4699 
4700 static void
4701 set_hashtable_value_ids (void)
4702 {
4703   vn_nary_op_iterator_type hin;
4704   vn_phi_iterator_type hip;
4705   vn_reference_iterator_type hir;
4706   vn_nary_op_t vno;
4707   vn_reference_t vr;
4708   vn_phi_t vp;
4709 
4710   /* Now set the value ids of the things we had put in the hash
4711      table.  */
4712 
4713   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4714     set_value_id_for_result (vno->result, &vno->value_id);
4715 
4716   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4717     set_value_id_for_result (vp->result, &vp->value_id);
4718 
4719   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4720 			       hir)
4721     set_value_id_for_result (vr->result, &vr->value_id);
4722 }
4723 
4724 class sccvn_dom_walker : public dom_walker
4725 {
4726 public:
4727   sccvn_dom_walker ()
4728     : dom_walker (CDI_DOMINATORS, true), fail (false), cond_stack (0) {}
4729 
4730   virtual edge before_dom_children (basic_block);
4731   virtual void after_dom_children (basic_block);
4732 
4733   void record_cond (basic_block,
4734 		    enum tree_code code, tree lhs, tree rhs, bool value);
4735   void record_conds (basic_block,
4736 		     enum tree_code code, tree lhs, tree rhs, bool value);
4737 
4738   bool fail;
4739   auto_vec<std::pair <basic_block, std::pair <vn_nary_op_t, vn_nary_op_t> > >
4740     cond_stack;
4741 };
4742 
4743 /* Record a temporary condition for the BB and its dominated blocks.  */
4744 
4745 void
4746 sccvn_dom_walker::record_cond (basic_block bb,
4747 			       enum tree_code code, tree lhs, tree rhs,
4748 			       bool value)
4749 {
4750   tree ops[2] = { lhs, rhs };
4751   vn_nary_op_t old = NULL;
4752   if (vn_nary_op_lookup_pieces (2, code, boolean_type_node, ops, &old))
4753     current_info->nary->remove_elt_with_hash (old, old->hashcode);
4754   vn_nary_op_t cond
4755     = vn_nary_op_insert_pieces (2, code, boolean_type_node, ops,
4756 				value
4757 				? boolean_true_node
4758 				: boolean_false_node, 0);
4759   if (dump_file && (dump_flags & TDF_DETAILS))
4760     {
4761       fprintf (dump_file, "Recording temporarily ");
4762       print_generic_expr (dump_file, ops[0], TDF_SLIM);
4763       fprintf (dump_file, " %s ", get_tree_code_name (code));
4764       print_generic_expr (dump_file, ops[1], TDF_SLIM);
4765       fprintf (dump_file, " == %s%s\n",
4766 	       value ? "true" : "false",
4767 	       old ? " (old entry saved)" : "");
4768     }
4769   cond_stack.safe_push (std::make_pair (bb, std::make_pair (cond, old)));
4770 }
4771 
4772 /* Record temporary conditions for the BB and its dominated blocks
4773    according to LHS CODE RHS == VALUE and its dominated conditions.  */
4774 
4775 void
4776 sccvn_dom_walker::record_conds (basic_block bb,
4777 				enum tree_code code, tree lhs, tree rhs,
4778 				bool value)
4779 {
4780   /* Record the original condition.  */
4781   record_cond (bb, code, lhs, rhs, value);
4782 
4783   if (!value)
4784     return;
4785 
4786   /* Record dominated conditions if the condition is true.  Note that
4787      the inversion is already recorded.  */
4788   switch (code)
4789     {
4790     case LT_EXPR:
4791     case GT_EXPR:
4792       record_cond (bb, code == LT_EXPR ? LE_EXPR : GE_EXPR, lhs, rhs, true);
4793       record_cond (bb, NE_EXPR, lhs, rhs, true);
4794       record_cond (bb, EQ_EXPR, lhs, rhs, false);
4795       break;
4796 
4797     case EQ_EXPR:
4798       record_cond (bb, LE_EXPR, lhs, rhs, true);
4799       record_cond (bb, GE_EXPR, lhs, rhs, true);
4800       record_cond (bb, LT_EXPR, lhs, rhs, false);
4801       record_cond (bb, GT_EXPR, lhs, rhs, false);
4802       break;
4803 
4804     default:
4805       break;
4806     }
4807 }
4808 
4809 /* Restore expressions and values derived from conditionals.  */
4810 
4811 void
4812 sccvn_dom_walker::after_dom_children (basic_block bb)
4813 {
4814   while (!cond_stack.is_empty ()
4815 	 && cond_stack.last ().first == bb)
4816     {
4817       vn_nary_op_t cond = cond_stack.last ().second.first;
4818       vn_nary_op_t old = cond_stack.last ().second.second;
4819       current_info->nary->remove_elt_with_hash (cond, cond->hashcode);
4820       if (old)
4821 	vn_nary_op_insert_into (old, current_info->nary, false);
4822       cond_stack.pop ();
4823     }
4824 }
4825 
4826 /* Value number all statements in BB.  */
4827 
4828 edge
4829 sccvn_dom_walker::before_dom_children (basic_block bb)
4830 {
4831   edge e;
4832   edge_iterator ei;
4833 
4834   if (fail)
4835     return NULL;
4836 
4837   if (dump_file && (dump_flags & TDF_DETAILS))
4838     fprintf (dump_file, "Visiting BB %d\n", bb->index);
4839 
4840   /* If we have a single predecessor record the equivalence from a
4841      possible condition on the predecessor edge.  */
4842   edge pred_e = NULL;
4843   FOR_EACH_EDGE (e, ei, bb->preds)
4844     {
4845       /* Ignore simple backedges from this to allow recording conditions
4846          in loop headers.  */
4847       if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4848 	continue;
4849       if (! pred_e)
4850 	pred_e = e;
4851       else
4852 	{
4853 	  pred_e = NULL;
4854 	  break;
4855 	}
4856     }
4857   if (pred_e)
4858     {
4859       /* Check if there are multiple executable successor edges in
4860 	 the source block.  Otherwise there is no additional info
4861 	 to be recorded.  */
4862       edge e2;
4863       FOR_EACH_EDGE (e2, ei, pred_e->src->succs)
4864 	if (e2 != pred_e
4865 	    && e2->flags & EDGE_EXECUTABLE)
4866 	  break;
4867       if (e2 && (e2->flags & EDGE_EXECUTABLE))
4868 	{
4869 	  gimple *stmt = last_stmt (pred_e->src);
4870 	  if (stmt
4871 	      && gimple_code (stmt) == GIMPLE_COND)
4872 	    {
4873 	      enum tree_code code = gimple_cond_code (stmt);
4874 	      tree lhs = gimple_cond_lhs (stmt);
4875 	      tree rhs = gimple_cond_rhs (stmt);
4876 	      record_conds (bb, code, lhs, rhs,
4877 			    (pred_e->flags & EDGE_TRUE_VALUE) != 0);
4878 	      code = invert_tree_comparison (code, HONOR_NANS (lhs));
4879 	      if (code != ERROR_MARK)
4880 		record_conds (bb, code, lhs, rhs,
4881 			      (pred_e->flags & EDGE_TRUE_VALUE) == 0);
4882 	    }
4883 	}
4884     }
4885 
4886   /* Value-number all defs in the basic-block.  */
4887   for (gphi_iterator gsi = gsi_start_phis (bb);
4888        !gsi_end_p (gsi); gsi_next (&gsi))
4889     {
4890       gphi *phi = gsi.phi ();
4891       tree res = PHI_RESULT (phi);
4892       if (!VN_INFO (res)->visited
4893 	  && !DFS (res))
4894 	{
4895 	  fail = true;
4896 	  return NULL;
4897 	}
4898     }
4899   for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
4900        !gsi_end_p (gsi); gsi_next (&gsi))
4901     {
4902       ssa_op_iter i;
4903       tree op;
4904       FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
4905 	if (!VN_INFO (op)->visited
4906 	    && !DFS (op))
4907 	  {
4908 	    fail = true;
4909 	    return NULL;
4910 	  }
4911     }
4912 
4913   /* Finally look at the last stmt.  */
4914   gimple *stmt = last_stmt (bb);
4915   if (!stmt)
4916     return NULL;
4917 
4918   enum gimple_code code = gimple_code (stmt);
4919   if (code != GIMPLE_COND
4920       && code != GIMPLE_SWITCH
4921       && code != GIMPLE_GOTO)
4922     return NULL;
4923 
4924   if (dump_file && (dump_flags & TDF_DETAILS))
4925     {
4926       fprintf (dump_file, "Visiting control stmt ending BB %d: ", bb->index);
4927       print_gimple_stmt (dump_file, stmt, 0, 0);
4928     }
4929 
4930   /* ???  We can even handle stmts with outgoing EH or ABNORMAL edges
4931      if value-numbering can prove they are not reachable.  Handling
4932      computed gotos is also possible.  */
4933   tree val;
4934   switch (code)
4935     {
4936     case GIMPLE_COND:
4937       {
4938 	tree lhs = vn_valueize (gimple_cond_lhs (stmt));
4939 	tree rhs = vn_valueize (gimple_cond_rhs (stmt));
4940 	val = gimple_simplify (gimple_cond_code (stmt),
4941 			       boolean_type_node, lhs, rhs,
4942 			       NULL, vn_valueize);
4943 	/* If that didn't simplify to a constant see if we have recorded
4944 	   temporary expressions from taken edges.  */
4945 	if (!val || TREE_CODE (val) != INTEGER_CST)
4946 	  {
4947 	    tree ops[2];
4948 	    ops[0] = lhs;
4949 	    ops[1] = rhs;
4950 	    val = vn_nary_op_lookup_pieces (2, gimple_cond_code (stmt),
4951 					    boolean_type_node, ops, NULL);
4952 	  }
4953 	break;
4954       }
4955     case GIMPLE_SWITCH:
4956       val = gimple_switch_index (as_a <gswitch *> (stmt));
4957       break;
4958     case GIMPLE_GOTO:
4959       val = gimple_goto_dest (stmt);
4960       break;
4961     default:
4962       gcc_unreachable ();
4963     }
4964   if (!val)
4965     return NULL;
4966 
4967   edge taken = find_taken_edge (bb, vn_valueize (val));
4968   if (!taken)
4969     return NULL;
4970 
4971   if (dump_file && (dump_flags & TDF_DETAILS))
4972     fprintf (dump_file, "Marking all edges out of BB %d but (%d -> %d) as "
4973 	     "not executable\n", bb->index, bb->index, taken->dest->index);
4974 
4975   return taken;
4976 }
4977 
4978 /* Do SCCVN.  Returns true if it finished, false if we bailed out
4979    due to resource constraints.  DEFAULT_VN_WALK_KIND_ specifies
4980    how we use the alias oracle walking during the VN process.  */
4981 
4982 bool
4983 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
4984 {
4985   size_t i;
4986 
4987   default_vn_walk_kind = default_vn_walk_kind_;
4988 
4989   init_scc_vn ();
4990 
4991   /* Collect pointers we know point to readonly memory.  */
4992   const_parms = BITMAP_ALLOC (NULL);
4993   tree fnspec = lookup_attribute ("fn spec",
4994 				  TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
4995   if (fnspec)
4996     {
4997       fnspec = TREE_VALUE (TREE_VALUE (fnspec));
4998       i = 1;
4999       for (tree arg = DECL_ARGUMENTS (cfun->decl);
5000 	   arg; arg = DECL_CHAIN (arg), ++i)
5001 	{
5002 	  if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
5003 	    break;
5004 	  if (TREE_STRING_POINTER (fnspec)[i]  == 'R'
5005 	      || TREE_STRING_POINTER (fnspec)[i] == 'r')
5006 	    {
5007 	      tree name = ssa_default_def (cfun, arg);
5008 	      if (name)
5009 		bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
5010 	    }
5011 	}
5012     }
5013 
5014   /* Walk all blocks in dominator order, value-numbering stmts
5015      SSA defs and decide whether outgoing edges are not executable.  */
5016   sccvn_dom_walker walker;
5017   walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5018   if (walker.fail)
5019     {
5020       scc_vn_restore_ssa_info ();
5021       free_scc_vn ();
5022       return false;
5023     }
5024 
5025   /* Initialize the value ids and prune out remaining VN_TOPs
5026      from dead code.  */
5027   tree name;
5028 
5029   FOR_EACH_SSA_NAME (i, name, cfun)
5030     {
5031       vn_ssa_aux_t info = VN_INFO (name);
5032       if (!info->visited)
5033 	info->valnum = name;
5034       if (info->valnum == name
5035 	  || info->valnum == VN_TOP)
5036 	info->value_id = get_next_value_id ();
5037       else if (is_gimple_min_invariant (info->valnum))
5038 	info->value_id = get_or_alloc_constant_value_id (info->valnum);
5039     }
5040 
5041   /* Propagate.  */
5042   FOR_EACH_SSA_NAME (i, name, cfun)
5043     {
5044       vn_ssa_aux_t info = VN_INFO (name);
5045       if (TREE_CODE (info->valnum) == SSA_NAME
5046 	  && info->valnum != name
5047 	  && info->value_id != VN_INFO (info->valnum)->value_id)
5048 	info->value_id = VN_INFO (info->valnum)->value_id;
5049     }
5050 
5051   set_hashtable_value_ids ();
5052 
5053   if (dump_file && (dump_flags & TDF_DETAILS))
5054     {
5055       fprintf (dump_file, "Value numbers:\n");
5056       FOR_EACH_SSA_NAME (i, name, cfun)
5057 	{
5058 	  if (VN_INFO (name)->visited
5059 	      && SSA_VAL (name) != name)
5060 	    {
5061 	      print_generic_expr (dump_file, name, 0);
5062 	      fprintf (dump_file, " = ");
5063 	      print_generic_expr (dump_file, SSA_VAL (name), 0);
5064 	      fprintf (dump_file, "\n");
5065 	    }
5066 	}
5067     }
5068 
5069   return true;
5070 }
5071 
5072 /* Return the maximum value id we have ever seen.  */
5073 
5074 unsigned int
5075 get_max_value_id (void)
5076 {
5077   return next_value_id;
5078 }
5079 
5080 /* Return the next unique value id.  */
5081 
5082 unsigned int
5083 get_next_value_id (void)
5084 {
5085   return next_value_id++;
5086 }
5087 
5088 
5089 /* Compare two expressions E1 and E2 and return true if they are equal.  */
5090 
5091 bool
5092 expressions_equal_p (tree e1, tree e2)
5093 {
5094   /* The obvious case.  */
5095   if (e1 == e2)
5096     return true;
5097 
5098   /* If either one is VN_TOP consider them equal.  */
5099   if (e1 == VN_TOP || e2 == VN_TOP)
5100     return true;
5101 
5102   /* If only one of them is null, they cannot be equal.  */
5103   if (!e1 || !e2)
5104     return false;
5105 
5106   /* Now perform the actual comparison.  */
5107   if (TREE_CODE (e1) == TREE_CODE (e2)
5108       && operand_equal_p (e1, e2, OEP_PURE_SAME))
5109     return true;
5110 
5111   return false;
5112 }
5113 
5114 
5115 /* Return true if the nary operation NARY may trap.  This is a copy
5116    of stmt_could_throw_1_p adjusted to the SCCVN IL.  */
5117 
5118 bool
5119 vn_nary_may_trap (vn_nary_op_t nary)
5120 {
5121   tree type;
5122   tree rhs2 = NULL_TREE;
5123   bool honor_nans = false;
5124   bool honor_snans = false;
5125   bool fp_operation = false;
5126   bool honor_trapv = false;
5127   bool handled, ret;
5128   unsigned i;
5129 
5130   if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5131       || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5132       || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5133     {
5134       type = nary->type;
5135       fp_operation = FLOAT_TYPE_P (type);
5136       if (fp_operation)
5137 	{
5138 	  honor_nans = flag_trapping_math && !flag_finite_math_only;
5139 	  honor_snans = flag_signaling_nans != 0;
5140 	}
5141       else if (INTEGRAL_TYPE_P (type)
5142 	       && TYPE_OVERFLOW_TRAPS (type))
5143 	honor_trapv = true;
5144     }
5145   if (nary->length >= 2)
5146     rhs2 = nary->op[1];
5147   ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5148 				       honor_trapv,
5149 				       honor_nans, honor_snans, rhs2,
5150 				       &handled);
5151   if (handled
5152       && ret)
5153     return true;
5154 
5155   for (i = 0; i < nary->length; ++i)
5156     if (tree_could_trap_p (nary->op[i]))
5157       return true;
5158 
5159   return false;
5160 }
5161 
5162 /* Return true if the reference operation REF may trap.  */
5163 
5164 bool
5165 vn_reference_may_trap (vn_reference_t ref)
5166 {
5167   switch (ref->operands[0].opcode)
5168     {
5169     case MODIFY_EXPR:
5170     case CALL_EXPR:
5171       /* We do not handle calls.  */
5172     case ADDR_EXPR:
5173       /* And toplevel address computations never trap.  */
5174       return false;
5175     default:;
5176     }
5177 
5178   vn_reference_op_t op;
5179   unsigned i;
5180   FOR_EACH_VEC_ELT (ref->operands, i, op)
5181     {
5182       switch (op->opcode)
5183 	{
5184 	case WITH_SIZE_EXPR:
5185 	case TARGET_MEM_REF:
5186 	  /* Always variable.  */
5187 	  return true;
5188 	case COMPONENT_REF:
5189 	  if (op->op1 && TREE_CODE (op->op1) == SSA_NAME)
5190 	    return true;
5191 	  break;
5192 	case ARRAY_RANGE_REF:
5193 	case ARRAY_REF:
5194 	  if (TREE_CODE (op->op0) == SSA_NAME)
5195 	    return true;
5196 	  break;
5197 	case MEM_REF:
5198 	  /* Nothing interesting in itself, the base is separate.  */
5199 	  break;
5200 	/* The following are the address bases.  */
5201 	case SSA_NAME:
5202 	  return true;
5203 	case ADDR_EXPR:
5204 	  if (op->op0)
5205 	    return tree_could_trap_p (TREE_OPERAND (op->op0, 0));
5206 	  return false;
5207 	default:;
5208 	}
5209     }
5210   return false;
5211 }
5212