1 /* SCC value numbering for trees
2    Copyright (C) 2006-2020 Free Software Foundation, Inc.
3    Contributed by Daniel Berlin <dan@dberlin.org>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "splay-tree.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-cfg.h"
58 #include "domwalk.h"
59 #include "gimple-iterator.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
67 #include "dbgcnt.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-loop-niter.h"
72 #include "builtins.h"
73 #include "tree-ssa-sccvn.h"
74 
75 /* This algorithm is based on the SCC algorithm presented by Keith
76    Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
77    (http://citeseer.ist.psu.edu/41805.html).  In
78    straight line code, it is equivalent to a regular hash based value
79    numbering that is performed in reverse postorder.
80 
81    For code with cycles, there are two alternatives, both of which
82    require keeping the hashtables separate from the actual list of
83    value numbers for SSA names.
84 
85    1. Iterate value numbering in an RPO walk of the blocks, removing
86    all the entries from the hashtable after each iteration (but
87    keeping the SSA name->value number mapping between iterations).
88    Iterate until it does not change.
89 
90    2. Perform value numbering as part of an SCC walk on the SSA graph,
91    iterating only the cycles in the SSA graph until they do not change
92    (using a separate, optimistic hashtable for value numbering the SCC
93    operands).
94 
95    The second is not just faster in practice (because most SSA graph
96    cycles do not involve all the variables in the graph), it also has
97    some nice properties.
98 
99    One of these nice properties is that when we pop an SCC off the
100    stack, we are guaranteed to have processed all the operands coming from
101    *outside of that SCC*, so we do not need to do anything special to
102    ensure they have value numbers.
103 
104    Another nice property is that the SCC walk is done as part of a DFS
105    of the SSA graph, which makes it easy to perform combining and
106    simplifying operations at the same time.
107 
108    The code below is deliberately written in a way that makes it easy
109    to separate the SCC walk from the other work it does.
110 
111    In order to propagate constants through the code, we track which
112    expressions contain constants, and use those while folding.  In
113    theory, we could also track expressions whose value numbers are
114    replaced, in case we end up folding based on expression
115    identities.
116 
117    In order to value number memory, we assign value numbers to vuses.
118    This enables us to note that, for example, stores to the same
119    address of the same value from the same starting memory states are
120    equivalent.
121    TODO:
122 
123    1. We can iterate only the changing portions of the SCC's, but
124    I have not seen an SCC big enough for this to be a win.
125    2. If you differentiate between phi nodes for loops and phi nodes
126    for if-then-else, you can properly consider phi nodes in different
127    blocks for equivalence.
128    3. We could value number vuses in more cases, particularly, whole
129    structure copies.
130 */
131 
132 /* There's no BB_EXECUTABLE but we can use BB_VISITED.  */
133 #define BB_EXECUTABLE BB_VISITED
134 
135 static vn_lookup_kind default_vn_walk_kind;
136 
137 /* vn_nary_op hashtable helpers.  */
138 
139 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
140 {
141   typedef vn_nary_op_s *compare_type;
142   static inline hashval_t hash (const vn_nary_op_s *);
143   static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
144 };
145 
146 /* Return the computed hashcode for nary operation P1.  */
147 
148 inline hashval_t
149 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
150 {
151   return vno1->hashcode;
152 }
153 
154 /* Compare nary operations P1 and P2 and return true if they are
155    equivalent.  */
156 
157 inline bool
158 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
159 {
160   return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
161 }
162 
163 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
165 
166 
167 /* vn_phi hashtable helpers.  */
168 
169 static int
170 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
171 
172 struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
173 {
174   static inline hashval_t hash (const vn_phi_s *);
175   static inline bool equal (const vn_phi_s *, const vn_phi_s *);
176 };
177 
178 /* Return the computed hashcode for phi operation P1.  */
179 
180 inline hashval_t
181 vn_phi_hasher::hash (const vn_phi_s *vp1)
182 {
183   return vp1->hashcode;
184 }
185 
186 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
187 
188 inline bool
189 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
190 {
191   return vp1 == vp2 || vn_phi_eq (vp1, vp2);
192 }
193 
194 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
195 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
196 
197 
198 /* Compare two reference operands P1 and P2 for equality.  Return true if
199    they are equal, and false otherwise.  */
200 
201 static int
202 vn_reference_op_eq (const void *p1, const void *p2)
203 {
204   const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
205   const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
206 
207   return (vro1->opcode == vro2->opcode
208 	  /* We do not care for differences in type qualification.  */
209 	  && (vro1->type == vro2->type
210 	      || (vro1->type && vro2->type
211 		  && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
212 					 TYPE_MAIN_VARIANT (vro2->type))))
213 	  && expressions_equal_p (vro1->op0, vro2->op0)
214 	  && expressions_equal_p (vro1->op1, vro2->op1)
215 	  && expressions_equal_p (vro1->op2, vro2->op2));
216 }
217 
218 /* Free a reference operation structure VP.  */
219 
220 static inline void
221 free_reference (vn_reference_s *vr)
222 {
223   vr->operands.release ();
224 }
225 
226 
227 /* vn_reference hashtable helpers.  */
228 
229 struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
230 {
231   static inline hashval_t hash (const vn_reference_s *);
232   static inline bool equal (const vn_reference_s *, const vn_reference_s *);
233 };
234 
235 /* Return the hashcode for a given reference operation P1.  */
236 
237 inline hashval_t
238 vn_reference_hasher::hash (const vn_reference_s *vr1)
239 {
240   return vr1->hashcode;
241 }
242 
243 inline bool
244 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
245 {
246   return v == c || vn_reference_eq (v, c);
247 }
248 
249 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
250 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
251 
252 
253 /* The set of VN hashtables.  */
254 
255 typedef struct vn_tables_s
256 {
257   vn_nary_op_table_type *nary;
258   vn_phi_table_type *phis;
259   vn_reference_table_type *references;
260 } *vn_tables_t;
261 
262 
263 /* vn_constant hashtable helpers.  */
264 
265 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
266 {
267   static inline hashval_t hash (const vn_constant_s *);
268   static inline bool equal (const vn_constant_s *, const vn_constant_s *);
269 };
270 
271 /* Hash table hash function for vn_constant_t.  */
272 
273 inline hashval_t
274 vn_constant_hasher::hash (const vn_constant_s *vc1)
275 {
276   return vc1->hashcode;
277 }
278 
279 /* Hash table equality function for vn_constant_t.  */
280 
281 inline bool
282 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
283 {
284   if (vc1->hashcode != vc2->hashcode)
285     return false;
286 
287   return vn_constant_eq_with_type (vc1->constant, vc2->constant);
288 }
289 
290 static hash_table<vn_constant_hasher> *constant_to_value_id;
291 static bitmap constant_value_ids;
292 
293 
294 /* Obstack we allocate the vn-tables elements from.  */
295 static obstack vn_tables_obstack;
296 /* Special obstack we never unwind.  */
297 static obstack vn_tables_insert_obstack;
298 
299 static vn_reference_t last_inserted_ref;
300 static vn_phi_t last_inserted_phi;
301 static vn_nary_op_t last_inserted_nary;
302 
303 /* Valid hashtables storing information we have proven to be
304    correct.  */
305 static vn_tables_t valid_info;
306 
307 
308 /* Valueization hook.  Valueize NAME if it is an SSA name, otherwise
309    just return it.  */
310 tree (*vn_valueize) (tree);
311 tree vn_valueize_wrapper (tree t, void* context ATTRIBUTE_UNUSED)
312 {
313   return vn_valueize (t);
314 }
315 
316 
317 /* This represents the top of the VN lattice, which is the universal
318    value.  */
319 
320 tree VN_TOP;
321 
322 /* Unique counter for our value ids.  */
323 
324 static unsigned int next_value_id;
325 
326 
327 /* Table of vn_ssa_aux_t's, one per ssa_name.  The vn_ssa_aux_t objects
328    are allocated on an obstack for locality reasons, and to free them
329    without looping over the vec.  */
330 
331 struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t>
332 {
333   typedef vn_ssa_aux_t value_type;
334   typedef tree compare_type;
335   static inline hashval_t hash (const value_type &);
336   static inline bool equal (const value_type &, const compare_type &);
337   static inline void mark_deleted (value_type &) {}
338   static const bool empty_zero_p = true;
339   static inline void mark_empty (value_type &e) { e = NULL; }
340   static inline bool is_deleted (value_type &) { return false; }
341   static inline bool is_empty (value_type &e) { return e == NULL; }
342 };
343 
344 hashval_t
345 vn_ssa_aux_hasher::hash (const value_type &entry)
346 {
347   return SSA_NAME_VERSION (entry->name);
348 }
349 
350 bool
351 vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name)
352 {
353   return name == entry->name;
354 }
355 
356 static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash;
357 typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type;
358 static struct obstack vn_ssa_aux_obstack;
359 
360 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
361 static unsigned int vn_nary_length_from_stmt (gimple *);
362 static vn_nary_op_t alloc_vn_nary_op_noinit (unsigned int, obstack *);
363 static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
364 					    vn_nary_op_table_type *, bool);
365 static void init_vn_nary_op_from_stmt (vn_nary_op_t, gimple *);
366 static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
367 					 enum tree_code, tree, tree *);
368 static tree vn_lookup_simplify_result (gimple_match_op *);
369 static vn_reference_t vn_reference_lookup_or_insert_for_pieces
370 	  (tree, alias_set_type, alias_set_type, tree,
371 	   vec<vn_reference_op_s, va_heap>, tree);
372 
373 /* Return whether there is value numbering information for a given SSA name.  */
374 
375 bool
376 has_VN_INFO (tree name)
377 {
378   return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name));
379 }
380 
381 vn_ssa_aux_t
382 VN_INFO (tree name)
383 {
384   vn_ssa_aux_t *res
385     = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name),
386 					    INSERT);
387   if (*res != NULL)
388     return *res;
389 
390   vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
391   memset (newinfo, 0, sizeof (struct vn_ssa_aux));
392   newinfo->name = name;
393   newinfo->valnum = VN_TOP;
394   /* We are using the visited flag to handle uses with defs not within the
395      region being value-numbered.  */
396   newinfo->visited = false;
397 
398   /* Given we create the VN_INFOs on-demand now we have to do initialization
399      different than VN_TOP here.  */
400   if (SSA_NAME_IS_DEFAULT_DEF (name))
401     switch (TREE_CODE (SSA_NAME_VAR (name)))
402       {
403       case VAR_DECL:
404         /* All undefined vars are VARYING.  */
405         newinfo->valnum = name;
406 	newinfo->visited = true;
407 	break;
408 
409       case PARM_DECL:
410 	/* Parameters are VARYING but we can record a condition
411 	   if we know it is a non-NULL pointer.  */
412 	newinfo->visited = true;
413 	newinfo->valnum = name;
414 	if (POINTER_TYPE_P (TREE_TYPE (name))
415 	    && nonnull_arg_p (SSA_NAME_VAR (name)))
416 	  {
417 	    tree ops[2];
418 	    ops[0] = name;
419 	    ops[1] = build_int_cst (TREE_TYPE (name), 0);
420 	    vn_nary_op_t nary;
421 	    /* Allocate from non-unwinding stack.  */
422 	    nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
423 	    init_vn_nary_op_from_pieces (nary, 2, NE_EXPR,
424 					 boolean_type_node, ops);
425 	    nary->predicated_values = 0;
426 	    nary->u.result = boolean_true_node;
427 	    vn_nary_op_insert_into (nary, valid_info->nary, true);
428 	    gcc_assert (nary->unwind_to == NULL);
429 	    /* Also do not link it into the undo chain.  */
430 	    last_inserted_nary = nary->next;
431 	    nary->next = (vn_nary_op_t)(void *)-1;
432 	    nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
433 	    init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR,
434 					 boolean_type_node, ops);
435 	    nary->predicated_values = 0;
436 	    nary->u.result = boolean_false_node;
437 	    vn_nary_op_insert_into (nary, valid_info->nary, true);
438 	    gcc_assert (nary->unwind_to == NULL);
439 	    last_inserted_nary = nary->next;
440 	    nary->next = (vn_nary_op_t)(void *)-1;
441 	    if (dump_file && (dump_flags & TDF_DETAILS))
442 	      {
443 		fprintf (dump_file, "Recording ");
444 		print_generic_expr (dump_file, name, TDF_SLIM);
445 		fprintf (dump_file, " != 0\n");
446 	      }
447 	  }
448 	break;
449 
450       case RESULT_DECL:
451 	/* If the result is passed by invisible reference the default
452 	   def is initialized, otherwise it's uninitialized.  Still
453 	   undefined is varying.  */
454 	newinfo->visited = true;
455 	newinfo->valnum = name;
456 	break;
457 
458       default:
459 	gcc_unreachable ();
460       }
461   return newinfo;
462 }
463 
464 /* Return the SSA value of X.  */
465 
466 inline tree
467 SSA_VAL (tree x, bool *visited = NULL)
468 {
469   vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x));
470   if (visited)
471     *visited = tem && tem->visited;
472   return tem && tem->visited ? tem->valnum : x;
473 }
474 
475 /* Return the SSA value of the VUSE x, supporting released VDEFs
476    during elimination which will value-number the VDEF to the
477    associated VUSE (but not substitute in the whole lattice).  */
478 
479 static inline tree
480 vuse_ssa_val (tree x)
481 {
482   if (!x)
483     return NULL_TREE;
484 
485   do
486     {
487       x = SSA_VAL (x);
488       gcc_assert (x != VN_TOP);
489     }
490   while (SSA_NAME_IN_FREE_LIST (x));
491 
492   return x;
493 }
494 
495 /* Similar to the above but used as callback for walk_non_aliased_vuses
496    and thus should stop at unvisited VUSE to not walk across region
497    boundaries.  */
498 
499 static tree
500 vuse_valueize (tree vuse)
501 {
502   do
503     {
504       bool visited;
505       vuse = SSA_VAL (vuse, &visited);
506       if (!visited)
507 	return NULL_TREE;
508       gcc_assert (vuse != VN_TOP);
509     }
510   while (SSA_NAME_IN_FREE_LIST (vuse));
511   return vuse;
512 }
513 
514 
515 /* Return the vn_kind the expression computed by the stmt should be
516    associated with.  */
517 
518 enum vn_kind
519 vn_get_stmt_kind (gimple *stmt)
520 {
521   switch (gimple_code (stmt))
522     {
523     case GIMPLE_CALL:
524       return VN_REFERENCE;
525     case GIMPLE_PHI:
526       return VN_PHI;
527     case GIMPLE_ASSIGN:
528       {
529 	enum tree_code code = gimple_assign_rhs_code (stmt);
530 	tree rhs1 = gimple_assign_rhs1 (stmt);
531 	switch (get_gimple_rhs_class (code))
532 	  {
533 	  case GIMPLE_UNARY_RHS:
534 	  case GIMPLE_BINARY_RHS:
535 	  case GIMPLE_TERNARY_RHS:
536 	    return VN_NARY;
537 	  case GIMPLE_SINGLE_RHS:
538 	    switch (TREE_CODE_CLASS (code))
539 	      {
540 	      case tcc_reference:
541 		/* VOP-less references can go through unary case.  */
542 		if ((code == REALPART_EXPR
543 		     || code == IMAGPART_EXPR
544 		     || code == VIEW_CONVERT_EXPR
545 		     || code == BIT_FIELD_REF)
546 		    && (TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME
547 			|| is_gimple_min_invariant (TREE_OPERAND (rhs1, 0))))
548 		  return VN_NARY;
549 
550 		/* Fallthrough.  */
551 	      case tcc_declaration:
552 		return VN_REFERENCE;
553 
554 	      case tcc_constant:
555 		return VN_CONSTANT;
556 
557 	      default:
558 		if (code == ADDR_EXPR)
559 		  return (is_gimple_min_invariant (rhs1)
560 			  ? VN_CONSTANT : VN_REFERENCE);
561 		else if (code == CONSTRUCTOR)
562 		  return VN_NARY;
563 		return VN_NONE;
564 	      }
565 	  default:
566 	    return VN_NONE;
567 	  }
568       }
569     default:
570       return VN_NONE;
571     }
572 }
573 
574 /* Lookup a value id for CONSTANT and return it.  If it does not
575    exist returns 0.  */
576 
577 unsigned int
578 get_constant_value_id (tree constant)
579 {
580   vn_constant_s **slot;
581   struct vn_constant_s vc;
582 
583   vc.hashcode = vn_hash_constant_with_type (constant);
584   vc.constant = constant;
585   slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
586   if (slot)
587     return (*slot)->value_id;
588   return 0;
589 }
590 
591 /* Lookup a value id for CONSTANT, and if it does not exist, create a
592    new one and return it.  If it does exist, return it.  */
593 
594 unsigned int
595 get_or_alloc_constant_value_id (tree constant)
596 {
597   vn_constant_s **slot;
598   struct vn_constant_s vc;
599   vn_constant_t vcp;
600 
601   /* If the hashtable isn't initialized we're not running from PRE and thus
602      do not need value-ids.  */
603   if (!constant_to_value_id)
604     return 0;
605 
606   vc.hashcode = vn_hash_constant_with_type (constant);
607   vc.constant = constant;
608   slot = constant_to_value_id->find_slot (&vc, INSERT);
609   if (*slot)
610     return (*slot)->value_id;
611 
612   vcp = XNEW (struct vn_constant_s);
613   vcp->hashcode = vc.hashcode;
614   vcp->constant = constant;
615   vcp->value_id = get_next_value_id ();
616   *slot = vcp;
617   bitmap_set_bit (constant_value_ids, vcp->value_id);
618   return vcp->value_id;
619 }
620 
621 /* Return true if V is a value id for a constant.  */
622 
623 bool
624 value_id_constant_p (unsigned int v)
625 {
626   return bitmap_bit_p (constant_value_ids, v);
627 }
628 
629 /* Compute the hash for a reference operand VRO1.  */
630 
631 static void
632 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
633 {
634   hstate.add_int (vro1->opcode);
635   if (vro1->op0)
636     inchash::add_expr (vro1->op0, hstate);
637   if (vro1->op1)
638     inchash::add_expr (vro1->op1, hstate);
639   if (vro1->op2)
640     inchash::add_expr (vro1->op2, hstate);
641 }
642 
643 /* Compute a hash for the reference operation VR1 and return it.  */
644 
645 static hashval_t
646 vn_reference_compute_hash (const vn_reference_t vr1)
647 {
648   inchash::hash hstate;
649   hashval_t result;
650   int i;
651   vn_reference_op_t vro;
652   poly_int64 off = -1;
653   bool deref = false;
654 
655   FOR_EACH_VEC_ELT (vr1->operands, i, vro)
656     {
657       if (vro->opcode == MEM_REF)
658 	deref = true;
659       else if (vro->opcode != ADDR_EXPR)
660 	deref = false;
661       if (maybe_ne (vro->off, -1))
662 	{
663 	  if (known_eq (off, -1))
664 	    off = 0;
665 	  off += vro->off;
666 	}
667       else
668 	{
669 	  if (maybe_ne (off, -1)
670 	      && maybe_ne (off, 0))
671 	    hstate.add_poly_int (off);
672 	  off = -1;
673 	  if (deref
674 	      && vro->opcode == ADDR_EXPR)
675 	    {
676 	      if (vro->op0)
677 		{
678 		  tree op = TREE_OPERAND (vro->op0, 0);
679 		  hstate.add_int (TREE_CODE (op));
680 		  inchash::add_expr (op, hstate);
681 		}
682 	    }
683 	  else
684 	    vn_reference_op_compute_hash (vro, hstate);
685 	}
686     }
687   result = hstate.end ();
688   /* ??? We would ICE later if we hash instead of adding that in. */
689   if (vr1->vuse)
690     result += SSA_NAME_VERSION (vr1->vuse);
691 
692   return result;
693 }
694 
695 /* Return true if reference operations VR1 and VR2 are equivalent.  This
696    means they have the same set of operands and vuses.  */
697 
698 bool
699 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
700 {
701   unsigned i, j;
702 
703   /* Early out if this is not a hash collision.  */
704   if (vr1->hashcode != vr2->hashcode)
705     return false;
706 
707   /* The VOP needs to be the same.  */
708   if (vr1->vuse != vr2->vuse)
709     return false;
710 
711   /* If the operands are the same we are done.  */
712   if (vr1->operands == vr2->operands)
713     return true;
714 
715   if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
716     return false;
717 
718   if (INTEGRAL_TYPE_P (vr1->type)
719       && INTEGRAL_TYPE_P (vr2->type))
720     {
721       if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
722 	return false;
723     }
724   else if (INTEGRAL_TYPE_P (vr1->type)
725 	   && (TYPE_PRECISION (vr1->type)
726 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
727     return false;
728   else if (INTEGRAL_TYPE_P (vr2->type)
729 	   && (TYPE_PRECISION (vr2->type)
730 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
731     return false;
732 
733   i = 0;
734   j = 0;
735   do
736     {
737       poly_int64 off1 = 0, off2 = 0;
738       vn_reference_op_t vro1, vro2;
739       vn_reference_op_s tem1, tem2;
740       bool deref1 = false, deref2 = false;
741       bool reverse1 = false, reverse2 = false;
742       for (; vr1->operands.iterate (i, &vro1); i++)
743 	{
744 	  if (vro1->opcode == MEM_REF)
745 	    deref1 = true;
746 	  /* Do not look through a storage order barrier.  */
747 	  else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
748 	    return false;
749 	  reverse1 |= vro1->reverse;
750 	  if (known_eq (vro1->off, -1))
751 	    break;
752 	  off1 += vro1->off;
753 	}
754       for (; vr2->operands.iterate (j, &vro2); j++)
755 	{
756 	  if (vro2->opcode == MEM_REF)
757 	    deref2 = true;
758 	  /* Do not look through a storage order barrier.  */
759 	  else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
760 	    return false;
761 	  reverse2 |= vro2->reverse;
762 	  if (known_eq (vro2->off, -1))
763 	    break;
764 	  off2 += vro2->off;
765 	}
766       if (maybe_ne (off1, off2) || reverse1 != reverse2)
767 	return false;
768       if (deref1 && vro1->opcode == ADDR_EXPR)
769 	{
770 	  memset (&tem1, 0, sizeof (tem1));
771 	  tem1.op0 = TREE_OPERAND (vro1->op0, 0);
772 	  tem1.type = TREE_TYPE (tem1.op0);
773 	  tem1.opcode = TREE_CODE (tem1.op0);
774 	  vro1 = &tem1;
775 	  deref1 = false;
776 	}
777       if (deref2 && vro2->opcode == ADDR_EXPR)
778 	{
779 	  memset (&tem2, 0, sizeof (tem2));
780 	  tem2.op0 = TREE_OPERAND (vro2->op0, 0);
781 	  tem2.type = TREE_TYPE (tem2.op0);
782 	  tem2.opcode = TREE_CODE (tem2.op0);
783 	  vro2 = &tem2;
784 	  deref2 = false;
785 	}
786       if (deref1 != deref2)
787 	return false;
788       if (!vn_reference_op_eq (vro1, vro2))
789 	return false;
790       ++j;
791       ++i;
792     }
793   while (vr1->operands.length () != i
794 	 || vr2->operands.length () != j);
795 
796   return true;
797 }
798 
799 /* Copy the operations present in load/store REF into RESULT, a vector of
800    vn_reference_op_s's.  */
801 
802 static void
803 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
804 {
805   /* For non-calls, store the information that makes up the address.  */
806   tree orig = ref;
807   while (ref)
808     {
809       vn_reference_op_s temp;
810 
811       memset (&temp, 0, sizeof (temp));
812       temp.type = TREE_TYPE (ref);
813       temp.opcode = TREE_CODE (ref);
814       temp.off = -1;
815 
816       switch (temp.opcode)
817 	{
818 	case MODIFY_EXPR:
819 	  temp.op0 = TREE_OPERAND (ref, 1);
820 	  break;
821 	case WITH_SIZE_EXPR:
822 	  temp.op0 = TREE_OPERAND (ref, 1);
823 	  temp.off = 0;
824 	  break;
825 	case MEM_REF:
826 	  /* The base address gets its own vn_reference_op_s structure.  */
827 	  temp.op0 = TREE_OPERAND (ref, 1);
828 	  if (!mem_ref_offset (ref).to_shwi (&temp.off))
829 	    temp.off = -1;
830 	  temp.clique = MR_DEPENDENCE_CLIQUE (ref);
831 	  temp.base = MR_DEPENDENCE_BASE (ref);
832 	  temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
833 	  break;
834 	case TARGET_MEM_REF:
835 	  /* The base address gets its own vn_reference_op_s structure.  */
836 	  temp.op0 = TMR_INDEX (ref);
837 	  temp.op1 = TMR_STEP (ref);
838 	  temp.op2 = TMR_OFFSET (ref);
839 	  temp.clique = MR_DEPENDENCE_CLIQUE (ref);
840 	  temp.base = MR_DEPENDENCE_BASE (ref);
841 	  result->safe_push (temp);
842 	  memset (&temp, 0, sizeof (temp));
843 	  temp.type = NULL_TREE;
844 	  temp.opcode = ERROR_MARK;
845 	  temp.op0 = TMR_INDEX2 (ref);
846 	  temp.off = -1;
847 	  break;
848 	case BIT_FIELD_REF:
849 	  /* Record bits, position and storage order.  */
850 	  temp.op0 = TREE_OPERAND (ref, 1);
851 	  temp.op1 = TREE_OPERAND (ref, 2);
852 	  if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
853 	    temp.off = -1;
854 	  temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
855 	  break;
856 	case COMPONENT_REF:
857 	  /* The field decl is enough to unambiguously specify the field,
858 	     a matching type is not necessary and a mismatching type
859 	     is always a spurious difference.  */
860 	  temp.type = NULL_TREE;
861 	  temp.op0 = TREE_OPERAND (ref, 1);
862 	  temp.op1 = TREE_OPERAND (ref, 2);
863 	  temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
864 			  && TYPE_REVERSE_STORAGE_ORDER
865 			       (TREE_TYPE (TREE_OPERAND (ref, 0))));
866 	  {
867 	    tree this_offset = component_ref_field_offset (ref);
868 	    if (this_offset
869 		&& poly_int_tree_p (this_offset))
870 	      {
871 		tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
872 		if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
873 		  {
874 		    poly_offset_int off
875 		      = (wi::to_poly_offset (this_offset)
876 			 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
877 		    /* Probibit value-numbering zero offset components
878 		       of addresses the same before the pass folding
879 		       __builtin_object_size had a chance to run
880 		       (checking cfun->after_inlining does the
881 		       trick here).  */
882 		    if (TREE_CODE (orig) != ADDR_EXPR
883 			|| maybe_ne (off, 0)
884 			|| cfun->after_inlining)
885 		      off.to_shwi (&temp.off);
886 		  }
887 	      }
888 	  }
889 	  break;
890 	case ARRAY_RANGE_REF:
891 	case ARRAY_REF:
892 	  {
893 	    tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
894 	    /* Record index as operand.  */
895 	    temp.op0 = TREE_OPERAND (ref, 1);
896 	    /* Always record lower bounds and element size.  */
897 	    temp.op1 = array_ref_low_bound (ref);
898 	    /* But record element size in units of the type alignment.  */
899 	    temp.op2 = TREE_OPERAND (ref, 3);
900 	    temp.align = eltype->type_common.align;
901 	    if (! temp.op2)
902 	      temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
903 				     size_int (TYPE_ALIGN_UNIT (eltype)));
904 	    if (poly_int_tree_p (temp.op0)
905 		&& poly_int_tree_p (temp.op1)
906 		&& TREE_CODE (temp.op2) == INTEGER_CST)
907 	      {
908 		poly_offset_int off = ((wi::to_poly_offset (temp.op0)
909 					- wi::to_poly_offset (temp.op1))
910 				       * wi::to_offset (temp.op2)
911 				       * vn_ref_op_align_unit (&temp));
912 		off.to_shwi (&temp.off);
913 	      }
914 	    temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
915 			    && TYPE_REVERSE_STORAGE_ORDER
916 				 (TREE_TYPE (TREE_OPERAND (ref, 0))));
917 	  }
918 	  break;
919 	case VAR_DECL:
920 	  if (DECL_HARD_REGISTER (ref))
921 	    {
922 	      temp.op0 = ref;
923 	      break;
924 	    }
925 	  /* Fallthru.  */
926 	case PARM_DECL:
927 	case CONST_DECL:
928 	case RESULT_DECL:
929 	  /* Canonicalize decls to MEM[&decl] which is what we end up with
930 	     when valueizing MEM[ptr] with ptr = &decl.  */
931 	  temp.opcode = MEM_REF;
932 	  temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
933 	  temp.off = 0;
934 	  result->safe_push (temp);
935 	  temp.opcode = ADDR_EXPR;
936 	  temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
937 	  temp.type = TREE_TYPE (temp.op0);
938 	  temp.off = -1;
939 	  break;
940 	case STRING_CST:
941 	case INTEGER_CST:
942 	case POLY_INT_CST:
943 	case COMPLEX_CST:
944 	case VECTOR_CST:
945 	case REAL_CST:
946 	case FIXED_CST:
947 	case CONSTRUCTOR:
948 	case SSA_NAME:
949 	  temp.op0 = ref;
950 	  break;
951 	case ADDR_EXPR:
952 	  if (is_gimple_min_invariant (ref))
953 	    {
954 	      temp.op0 = ref;
955 	      break;
956 	    }
957 	  break;
958 	  /* These are only interesting for their operands, their
959 	     existence, and their type.  They will never be the last
960 	     ref in the chain of references (IE they require an
961 	     operand), so we don't have to put anything
962 	     for op* as it will be handled by the iteration  */
963 	case REALPART_EXPR:
964 	  temp.off = 0;
965 	  break;
966 	case VIEW_CONVERT_EXPR:
967 	  temp.off = 0;
968 	  temp.reverse = storage_order_barrier_p (ref);
969 	  break;
970 	case IMAGPART_EXPR:
971 	  /* This is only interesting for its constant offset.  */
972 	  temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
973 	  break;
974 	default:
975 	  gcc_unreachable ();
976 	}
977       result->safe_push (temp);
978 
979       if (REFERENCE_CLASS_P (ref)
980 	  || TREE_CODE (ref) == MODIFY_EXPR
981 	  || TREE_CODE (ref) == WITH_SIZE_EXPR
982 	  || (TREE_CODE (ref) == ADDR_EXPR
983 	      && !is_gimple_min_invariant (ref)))
984 	ref = TREE_OPERAND (ref, 0);
985       else
986 	ref = NULL_TREE;
987     }
988 }
989 
990 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
991    operands in *OPS, the reference alias set SET and the reference type TYPE.
992    Return true if something useful was produced.  */
993 
994 bool
995 ao_ref_init_from_vn_reference (ao_ref *ref,
996 			       alias_set_type set, alias_set_type base_set,
997 			       tree type, vec<vn_reference_op_s> ops)
998 {
999   vn_reference_op_t op;
1000   unsigned i;
1001   tree base = NULL_TREE;
1002   tree *op0_p = &base;
1003   poly_offset_int offset = 0;
1004   poly_offset_int max_size;
1005   poly_offset_int size = -1;
1006   tree size_tree = NULL_TREE;
1007 
1008   machine_mode mode = TYPE_MODE (type);
1009   if (mode == BLKmode)
1010     size_tree = TYPE_SIZE (type);
1011   else
1012     size = GET_MODE_BITSIZE (mode);
1013   if (size_tree != NULL_TREE
1014       && poly_int_tree_p (size_tree))
1015     size = wi::to_poly_offset (size_tree);
1016 
1017   /* Lower the final access size from the outermost expression.  */
1018   op = &ops[0];
1019   size_tree = NULL_TREE;
1020   if (op->opcode == COMPONENT_REF)
1021     size_tree = DECL_SIZE (op->op0);
1022   else if (op->opcode == BIT_FIELD_REF)
1023     size_tree = op->op0;
1024   if (size_tree != NULL_TREE
1025       && poly_int_tree_p (size_tree)
1026       && (!known_size_p (size)
1027 	  || known_lt (wi::to_poly_offset (size_tree), size)))
1028     size = wi::to_poly_offset (size_tree);
1029 
1030   /* Initially, maxsize is the same as the accessed element size.
1031      In the following it will only grow (or become -1).  */
1032   max_size = size;
1033 
1034   /* Compute cumulative bit-offset for nested component-refs and array-refs,
1035      and find the ultimate containing object.  */
1036   FOR_EACH_VEC_ELT (ops, i, op)
1037     {
1038       switch (op->opcode)
1039 	{
1040 	/* These may be in the reference ops, but we cannot do anything
1041 	   sensible with them here.  */
1042 	case ADDR_EXPR:
1043 	  /* Apart from ADDR_EXPR arguments to MEM_REF.  */
1044 	  if (base != NULL_TREE
1045 	      && TREE_CODE (base) == MEM_REF
1046 	      && op->op0
1047 	      && DECL_P (TREE_OPERAND (op->op0, 0)))
1048 	    {
1049 	      vn_reference_op_t pop = &ops[i-1];
1050 	      base = TREE_OPERAND (op->op0, 0);
1051 	      if (known_eq (pop->off, -1))
1052 		{
1053 		  max_size = -1;
1054 		  offset = 0;
1055 		}
1056 	      else
1057 		offset += pop->off * BITS_PER_UNIT;
1058 	      op0_p = NULL;
1059 	      break;
1060 	    }
1061 	  /* Fallthru.  */
1062 	case CALL_EXPR:
1063 	  return false;
1064 
1065 	/* Record the base objects.  */
1066 	case MEM_REF:
1067 	  *op0_p = build2 (MEM_REF, op->type,
1068 			   NULL_TREE, op->op0);
1069 	  MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
1070 	  MR_DEPENDENCE_BASE (*op0_p) = op->base;
1071 	  op0_p = &TREE_OPERAND (*op0_p, 0);
1072 	  break;
1073 
1074 	case VAR_DECL:
1075 	case PARM_DECL:
1076 	case RESULT_DECL:
1077 	case SSA_NAME:
1078 	  *op0_p = op->op0;
1079 	  op0_p = NULL;
1080 	  break;
1081 
1082 	/* And now the usual component-reference style ops.  */
1083 	case BIT_FIELD_REF:
1084 	  offset += wi::to_poly_offset (op->op1);
1085 	  break;
1086 
1087 	case COMPONENT_REF:
1088 	  {
1089 	    tree field = op->op0;
1090 	    /* We do not have a complete COMPONENT_REF tree here so we
1091 	       cannot use component_ref_field_offset.  Do the interesting
1092 	       parts manually.  */
1093 	    tree this_offset = DECL_FIELD_OFFSET (field);
1094 
1095 	    if (op->op1 || !poly_int_tree_p (this_offset))
1096 	      max_size = -1;
1097 	    else
1098 	      {
1099 		poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1100 					   << LOG2_BITS_PER_UNIT);
1101 		woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1102 		offset += woffset;
1103 	      }
1104 	    break;
1105 	  }
1106 
1107 	case ARRAY_RANGE_REF:
1108 	case ARRAY_REF:
1109 	  /* We recorded the lower bound and the element size.  */
1110 	  if (!poly_int_tree_p (op->op0)
1111 	      || !poly_int_tree_p (op->op1)
1112 	      || TREE_CODE (op->op2) != INTEGER_CST)
1113 	    max_size = -1;
1114 	  else
1115 	    {
1116 	      poly_offset_int woffset
1117 		= wi::sext (wi::to_poly_offset (op->op0)
1118 			    - wi::to_poly_offset (op->op1),
1119 			    TYPE_PRECISION (sizetype));
1120 	      woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1121 	      woffset <<= LOG2_BITS_PER_UNIT;
1122 	      offset += woffset;
1123 	    }
1124 	  break;
1125 
1126 	case REALPART_EXPR:
1127 	  break;
1128 
1129 	case IMAGPART_EXPR:
1130 	  offset += size;
1131 	  break;
1132 
1133 	case VIEW_CONVERT_EXPR:
1134 	  break;
1135 
1136 	case STRING_CST:
1137 	case INTEGER_CST:
1138 	case COMPLEX_CST:
1139 	case VECTOR_CST:
1140 	case REAL_CST:
1141 	case CONSTRUCTOR:
1142 	case CONST_DECL:
1143 	  return false;
1144 
1145 	default:
1146 	  return false;
1147 	}
1148     }
1149 
1150   if (base == NULL_TREE)
1151     return false;
1152 
1153   ref->ref = NULL_TREE;
1154   ref->base = base;
1155   ref->ref_alias_set = set;
1156   ref->base_alias_set = base_set;
1157   /* We discount volatiles from value-numbering elsewhere.  */
1158   ref->volatile_p = false;
1159 
1160   if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1161     {
1162       ref->offset = 0;
1163       ref->size = -1;
1164       ref->max_size = -1;
1165       return true;
1166     }
1167 
1168   if (!offset.to_shwi (&ref->offset))
1169     {
1170       ref->offset = 0;
1171       ref->max_size = -1;
1172       return true;
1173     }
1174 
1175   if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1176     ref->max_size = -1;
1177 
1178   return true;
1179 }
1180 
1181 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1182    vn_reference_op_s's.  */
1183 
1184 static void
1185 copy_reference_ops_from_call (gcall *call,
1186 			      vec<vn_reference_op_s> *result)
1187 {
1188   vn_reference_op_s temp;
1189   unsigned i;
1190   tree lhs = gimple_call_lhs (call);
1191   int lr;
1192 
1193   /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1194      different.  By adding the lhs here in the vector, we ensure that the
1195      hashcode is different, guaranteeing a different value number.  */
1196   if (lhs && TREE_CODE (lhs) != SSA_NAME)
1197     {
1198       memset (&temp, 0, sizeof (temp));
1199       temp.opcode = MODIFY_EXPR;
1200       temp.type = TREE_TYPE (lhs);
1201       temp.op0 = lhs;
1202       temp.off = -1;
1203       result->safe_push (temp);
1204     }
1205 
1206   /* Copy the type, opcode, function, static chain and EH region, if any.  */
1207   memset (&temp, 0, sizeof (temp));
1208   temp.type = gimple_call_fntype (call);
1209   temp.opcode = CALL_EXPR;
1210   temp.op0 = gimple_call_fn (call);
1211   temp.op1 = gimple_call_chain (call);
1212   if (stmt_could_throw_p (cfun, call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1213     temp.op2 = size_int (lr);
1214   temp.off = -1;
1215   result->safe_push (temp);
1216 
1217   /* Copy the call arguments.  As they can be references as well,
1218      just chain them together.  */
1219   for (i = 0; i < gimple_call_num_args (call); ++i)
1220     {
1221       tree callarg = gimple_call_arg (call, i);
1222       copy_reference_ops_from_ref (callarg, result);
1223     }
1224 }
1225 
1226 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1227    *I_P to point to the last element of the replacement.  */
1228 static bool
1229 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1230 			    unsigned int *i_p)
1231 {
1232   unsigned int i = *i_p;
1233   vn_reference_op_t op = &(*ops)[i];
1234   vn_reference_op_t mem_op = &(*ops)[i - 1];
1235   tree addr_base;
1236   poly_int64 addr_offset = 0;
1237 
1238   /* The only thing we have to do is from &OBJ.foo.bar add the offset
1239      from .foo.bar to the preceding MEM_REF offset and replace the
1240      address with &OBJ.  */
1241   addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1242 					     &addr_offset);
1243   gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1244   if (addr_base != TREE_OPERAND (op->op0, 0))
1245     {
1246       poly_offset_int off
1247 	= (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1248 				  SIGNED)
1249 	   + addr_offset);
1250       mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1251       op->op0 = build_fold_addr_expr (addr_base);
1252       if (tree_fits_shwi_p (mem_op->op0))
1253 	mem_op->off = tree_to_shwi (mem_op->op0);
1254       else
1255 	mem_op->off = -1;
1256       return true;
1257     }
1258   return false;
1259 }
1260 
1261 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1262    *I_P to point to the last element of the replacement.  */
1263 static bool
1264 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1265 				     unsigned int *i_p)
1266 {
1267   bool changed = false;
1268   vn_reference_op_t op;
1269 
1270   do
1271     {
1272       unsigned int i = *i_p;
1273       op = &(*ops)[i];
1274       vn_reference_op_t mem_op = &(*ops)[i - 1];
1275       gimple *def_stmt;
1276       enum tree_code code;
1277       poly_offset_int off;
1278 
1279       def_stmt = SSA_NAME_DEF_STMT (op->op0);
1280       if (!is_gimple_assign (def_stmt))
1281 	return changed;
1282 
1283       code = gimple_assign_rhs_code (def_stmt);
1284       if (code != ADDR_EXPR
1285 	  && code != POINTER_PLUS_EXPR)
1286 	return changed;
1287 
1288       off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1289 
1290       /* The only thing we have to do is from &OBJ.foo.bar add the offset
1291 	 from .foo.bar to the preceding MEM_REF offset and replace the
1292 	 address with &OBJ.  */
1293       if (code == ADDR_EXPR)
1294 	{
1295 	  tree addr, addr_base;
1296 	  poly_int64 addr_offset;
1297 
1298 	  addr = gimple_assign_rhs1 (def_stmt);
1299 	  addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1300 						     &addr_offset);
1301 	  /* If that didn't work because the address isn't invariant propagate
1302 	     the reference tree from the address operation in case the current
1303 	     dereference isn't offsetted.  */
1304 	  if (!addr_base
1305 	      && *i_p == ops->length () - 1
1306 	      && known_eq (off, 0)
1307 	      /* This makes us disable this transform for PRE where the
1308 		 reference ops might be also used for code insertion which
1309 		 is invalid.  */
1310 	      && default_vn_walk_kind == VN_WALKREWRITE)
1311 	    {
1312 	      auto_vec<vn_reference_op_s, 32> tem;
1313 	      copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1314 	      /* Make sure to preserve TBAA info.  The only objects not
1315 		 wrapped in MEM_REFs that can have their address taken are
1316 		 STRING_CSTs.  */
1317 	      if (tem.length () >= 2
1318 		  && tem[tem.length () - 2].opcode == MEM_REF)
1319 		{
1320 		  vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1321 		  new_mem_op->op0
1322 		      = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1323 					  wi::to_poly_wide (new_mem_op->op0));
1324 		}
1325 	      else
1326 		gcc_assert (tem.last ().opcode == STRING_CST);
1327 	      ops->pop ();
1328 	      ops->pop ();
1329 	      ops->safe_splice (tem);
1330 	      --*i_p;
1331 	      return true;
1332 	    }
1333 	  if (!addr_base
1334 	      || TREE_CODE (addr_base) != MEM_REF
1335 	      || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1336 		  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base,
1337 								    0))))
1338 	    return changed;
1339 
1340 	  off += addr_offset;
1341 	  off += mem_ref_offset (addr_base);
1342 	  op->op0 = TREE_OPERAND (addr_base, 0);
1343 	}
1344       else
1345 	{
1346 	  tree ptr, ptroff;
1347 	  ptr = gimple_assign_rhs1 (def_stmt);
1348 	  ptroff = gimple_assign_rhs2 (def_stmt);
1349 	  if (TREE_CODE (ptr) != SSA_NAME
1350 	      || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1351 	      /* Make sure to not endlessly recurse.
1352 		 See gcc.dg/tree-ssa/20040408-1.c for an example.  Can easily
1353 		 happen when we value-number a PHI to its backedge value.  */
1354 	      || SSA_VAL (ptr) == op->op0
1355 	      || !poly_int_tree_p (ptroff))
1356 	    return changed;
1357 
1358 	  off += wi::to_poly_offset (ptroff);
1359 	  op->op0 = ptr;
1360 	}
1361 
1362       mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1363       if (tree_fits_shwi_p (mem_op->op0))
1364 	mem_op->off = tree_to_shwi (mem_op->op0);
1365       else
1366 	mem_op->off = -1;
1367       /* ???  Can end up with endless recursion here!?
1368 	 gcc.c-torture/execute/strcmp-1.c  */
1369       if (TREE_CODE (op->op0) == SSA_NAME)
1370 	op->op0 = SSA_VAL (op->op0);
1371       if (TREE_CODE (op->op0) != SSA_NAME)
1372 	op->opcode = TREE_CODE (op->op0);
1373 
1374       changed = true;
1375     }
1376   /* Tail-recurse.  */
1377   while (TREE_CODE (op->op0) == SSA_NAME);
1378 
1379   /* Fold a remaining *&.  */
1380   if (TREE_CODE (op->op0) == ADDR_EXPR)
1381     vn_reference_fold_indirect (ops, i_p);
1382 
1383   return changed;
1384 }
1385 
1386 /* Optimize the reference REF to a constant if possible or return
1387    NULL_TREE if not.  */
1388 
1389 tree
1390 fully_constant_vn_reference_p (vn_reference_t ref)
1391 {
1392   vec<vn_reference_op_s> operands = ref->operands;
1393   vn_reference_op_t op;
1394 
1395   /* Try to simplify the translated expression if it is
1396      a call to a builtin function with at most two arguments.  */
1397   op = &operands[0];
1398   if (op->opcode == CALL_EXPR
1399       && TREE_CODE (op->op0) == ADDR_EXPR
1400       && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1401       && fndecl_built_in_p (TREE_OPERAND (op->op0, 0))
1402       && operands.length () >= 2
1403       && operands.length () <= 3)
1404     {
1405       vn_reference_op_t arg0, arg1 = NULL;
1406       bool anyconst = false;
1407       arg0 = &operands[1];
1408       if (operands.length () > 2)
1409 	arg1 = &operands[2];
1410       if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1411 	  || (arg0->opcode == ADDR_EXPR
1412 	      && is_gimple_min_invariant (arg0->op0)))
1413 	anyconst = true;
1414       if (arg1
1415 	  && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1416 	      || (arg1->opcode == ADDR_EXPR
1417 		  && is_gimple_min_invariant (arg1->op0))))
1418 	anyconst = true;
1419       if (anyconst)
1420 	{
1421 	  tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1422 					 arg1 ? 2 : 1,
1423 					 arg0->op0,
1424 					 arg1 ? arg1->op0 : NULL);
1425 	  if (folded
1426 	      && TREE_CODE (folded) == NOP_EXPR)
1427 	    folded = TREE_OPERAND (folded, 0);
1428 	  if (folded
1429 	      && is_gimple_min_invariant (folded))
1430 	    return folded;
1431 	}
1432     }
1433 
1434   /* Simplify reads from constants or constant initializers.  */
1435   else if (BITS_PER_UNIT == 8
1436 	   && COMPLETE_TYPE_P (ref->type)
1437 	   && is_gimple_reg_type (ref->type))
1438     {
1439       poly_int64 off = 0;
1440       HOST_WIDE_INT size;
1441       if (INTEGRAL_TYPE_P (ref->type))
1442 	size = TYPE_PRECISION (ref->type);
1443       else if (tree_fits_shwi_p (TYPE_SIZE (ref->type)))
1444 	size = tree_to_shwi (TYPE_SIZE (ref->type));
1445       else
1446 	return NULL_TREE;
1447       if (size % BITS_PER_UNIT != 0
1448 	  || size > MAX_BITSIZE_MODE_ANY_MODE)
1449 	return NULL_TREE;
1450       size /= BITS_PER_UNIT;
1451       unsigned i;
1452       for (i = 0; i < operands.length (); ++i)
1453 	{
1454 	  if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1455 	    {
1456 	      ++i;
1457 	      break;
1458 	    }
1459 	  if (known_eq (operands[i].off, -1))
1460 	    return NULL_TREE;
1461 	  off += operands[i].off;
1462 	  if (operands[i].opcode == MEM_REF)
1463 	    {
1464 	      ++i;
1465 	      break;
1466 	    }
1467 	}
1468       vn_reference_op_t base = &operands[--i];
1469       tree ctor = error_mark_node;
1470       tree decl = NULL_TREE;
1471       if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1472 	ctor = base->op0;
1473       else if (base->opcode == MEM_REF
1474 	       && base[1].opcode == ADDR_EXPR
1475 	       && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1476 		   || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1477 		   || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1478 	{
1479 	  decl = TREE_OPERAND (base[1].op0, 0);
1480 	  if (TREE_CODE (decl) == STRING_CST)
1481 	    ctor = decl;
1482 	  else
1483 	    ctor = ctor_for_folding (decl);
1484 	}
1485       if (ctor == NULL_TREE)
1486 	return build_zero_cst (ref->type);
1487       else if (ctor != error_mark_node)
1488 	{
1489 	  HOST_WIDE_INT const_off;
1490 	  if (decl)
1491 	    {
1492 	      tree res = fold_ctor_reference (ref->type, ctor,
1493 					      off * BITS_PER_UNIT,
1494 					      size * BITS_PER_UNIT, decl);
1495 	      if (res)
1496 		{
1497 		  STRIP_USELESS_TYPE_CONVERSION (res);
1498 		  if (is_gimple_min_invariant (res))
1499 		    return res;
1500 		}
1501 	    }
1502 	  else if (off.is_constant (&const_off))
1503 	    {
1504 	      unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1505 	      int len = native_encode_expr (ctor, buf, size, const_off);
1506 	      if (len > 0)
1507 		return native_interpret_expr (ref->type, buf, len);
1508 	    }
1509 	}
1510     }
1511 
1512   return NULL_TREE;
1513 }
1514 
1515 /* Return true if OPS contain a storage order barrier.  */
1516 
1517 static bool
1518 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1519 {
1520   vn_reference_op_t op;
1521   unsigned i;
1522 
1523   FOR_EACH_VEC_ELT (ops, i, op)
1524     if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1525       return true;
1526 
1527   return false;
1528 }
1529 
1530 /* Return true if OPS represent an access with reverse storage order.  */
1531 
1532 static bool
1533 reverse_storage_order_for_component_p (vec<vn_reference_op_s> ops)
1534 {
1535   unsigned i = 0;
1536   if (ops[i].opcode == REALPART_EXPR || ops[i].opcode == IMAGPART_EXPR)
1537     ++i;
1538   switch (ops[i].opcode)
1539     {
1540     case ARRAY_REF:
1541     case COMPONENT_REF:
1542     case BIT_FIELD_REF:
1543     case MEM_REF:
1544       return ops[i].reverse;
1545     default:
1546       return false;
1547     }
1548 }
1549 
1550 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1551    structures into their value numbers.  This is done in-place, and
1552    the vector passed in is returned.  *VALUEIZED_ANYTHING will specify
1553    whether any operands were valueized.  */
1554 
1555 static void
1556 valueize_refs_1 (vec<vn_reference_op_s> *orig, bool *valueized_anything,
1557 		 bool with_avail = false)
1558 {
1559   vn_reference_op_t vro;
1560   unsigned int i;
1561 
1562   *valueized_anything = false;
1563 
1564   FOR_EACH_VEC_ELT (*orig, i, vro)
1565     {
1566       if (vro->opcode == SSA_NAME
1567 	  || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1568 	{
1569 	  tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0);
1570 	  if (tem != vro->op0)
1571 	    {
1572 	      *valueized_anything = true;
1573 	      vro->op0 = tem;
1574 	    }
1575 	  /* If it transforms from an SSA_NAME to a constant, update
1576 	     the opcode.  */
1577 	  if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1578 	    vro->opcode = TREE_CODE (vro->op0);
1579 	}
1580       if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1581 	{
1582 	  tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1);
1583 	  if (tem != vro->op1)
1584 	    {
1585 	      *valueized_anything = true;
1586 	      vro->op1 = tem;
1587 	    }
1588 	}
1589       if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1590 	{
1591 	  tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2);
1592 	  if (tem != vro->op2)
1593 	    {
1594 	      *valueized_anything = true;
1595 	      vro->op2 = tem;
1596 	    }
1597 	}
1598       /* If it transforms from an SSA_NAME to an address, fold with
1599 	 a preceding indirect reference.  */
1600       if (i > 0
1601 	  && vro->op0
1602 	  && TREE_CODE (vro->op0) == ADDR_EXPR
1603 	  && (*orig)[i - 1].opcode == MEM_REF)
1604 	{
1605 	  if (vn_reference_fold_indirect (orig, &i))
1606 	    *valueized_anything = true;
1607 	}
1608       else if (i > 0
1609 	       && vro->opcode == SSA_NAME
1610 	       && (*orig)[i - 1].opcode == MEM_REF)
1611 	{
1612 	  if (vn_reference_maybe_forwprop_address (orig, &i))
1613 	    *valueized_anything = true;
1614 	}
1615       /* If it transforms a non-constant ARRAY_REF into a constant
1616 	 one, adjust the constant offset.  */
1617       else if (vro->opcode == ARRAY_REF
1618 	       && known_eq (vro->off, -1)
1619 	       && poly_int_tree_p (vro->op0)
1620 	       && poly_int_tree_p (vro->op1)
1621 	       && TREE_CODE (vro->op2) == INTEGER_CST)
1622 	{
1623 	  poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1624 				  - wi::to_poly_offset (vro->op1))
1625 				 * wi::to_offset (vro->op2)
1626 				 * vn_ref_op_align_unit (vro));
1627 	  off.to_shwi (&vro->off);
1628 	}
1629     }
1630 }
1631 
1632 static void
1633 valueize_refs (vec<vn_reference_op_s> *orig)
1634 {
1635   bool tem;
1636   valueize_refs_1 (orig, &tem);
1637 }
1638 
1639 static vec<vn_reference_op_s> shared_lookup_references;
1640 
1641 /* Create a vector of vn_reference_op_s structures from REF, a
1642    REFERENCE_CLASS_P tree.  The vector is shared among all callers of
1643    this function.  *VALUEIZED_ANYTHING will specify whether any
1644    operands were valueized.  */
1645 
1646 static vec<vn_reference_op_s>
1647 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1648 {
1649   if (!ref)
1650     return vNULL;
1651   shared_lookup_references.truncate (0);
1652   copy_reference_ops_from_ref (ref, &shared_lookup_references);
1653   valueize_refs_1 (&shared_lookup_references, valueized_anything);
1654   return shared_lookup_references;
1655 }
1656 
1657 /* Create a vector of vn_reference_op_s structures from CALL, a
1658    call statement.  The vector is shared among all callers of
1659    this function.  */
1660 
1661 static vec<vn_reference_op_s>
1662 valueize_shared_reference_ops_from_call (gcall *call)
1663 {
1664   if (!call)
1665     return vNULL;
1666   shared_lookup_references.truncate (0);
1667   copy_reference_ops_from_call (call, &shared_lookup_references);
1668   valueize_refs (&shared_lookup_references);
1669   return shared_lookup_references;
1670 }
1671 
1672 /* Lookup a SCCVN reference operation VR in the current hash table.
1673    Returns the resulting value number if it exists in the hash table,
1674    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
1675    vn_reference_t stored in the hashtable if something is found.  */
1676 
1677 static tree
1678 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1679 {
1680   vn_reference_s **slot;
1681   hashval_t hash;
1682 
1683   hash = vr->hashcode;
1684   slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1685   if (slot)
1686     {
1687       if (vnresult)
1688 	*vnresult = (vn_reference_t)*slot;
1689       return ((vn_reference_t)*slot)->result;
1690     }
1691 
1692   return NULL_TREE;
1693 }
1694 
1695 
1696 /* Partial definition tracking support.  */
1697 
1698 struct pd_range
1699 {
1700   HOST_WIDE_INT offset;
1701   HOST_WIDE_INT size;
1702 };
1703 
1704 struct pd_data
1705 {
1706   tree rhs;
1707   HOST_WIDE_INT offset;
1708   HOST_WIDE_INT size;
1709 };
1710 
1711 /* Context for alias walking.  */
1712 
1713 struct vn_walk_cb_data
1714 {
1715   vn_walk_cb_data (vn_reference_t vr_, tree orig_ref_, tree *last_vuse_ptr_,
1716 		   vn_lookup_kind vn_walk_kind_, bool tbaa_p_, tree mask_)
1717     : vr (vr_), last_vuse_ptr (last_vuse_ptr_), last_vuse (NULL_TREE),
1718       mask (mask_), masked_result (NULL_TREE), vn_walk_kind (vn_walk_kind_),
1719       tbaa_p (tbaa_p_), saved_operands (vNULL), first_set (-2),
1720       first_base_set (-2), known_ranges (NULL)
1721   {
1722     if (!last_vuse_ptr)
1723       last_vuse_ptr = &last_vuse;
1724     ao_ref_init (&orig_ref, orig_ref_);
1725     if (mask)
1726       {
1727 	wide_int w = wi::to_wide (mask);
1728 	unsigned int pos = 0, prec = w.get_precision ();
1729 	pd_data pd;
1730 	pd.rhs = build_constructor (NULL_TREE, NULL);
1731 	/* When bitwise and with a constant is done on a memory load,
1732 	   we don't really need all the bits to be defined or defined
1733 	   to constants, we don't really care what is in the position
1734 	   corresponding to 0 bits in the mask.
1735 	   So, push the ranges of those 0 bits in the mask as artificial
1736 	   zero stores and let the partial def handling code do the
1737 	   rest.  */
1738 	while (pos < prec)
1739 	  {
1740 	    int tz = wi::ctz (w);
1741 	    if (pos + tz > prec)
1742 	      tz = prec - pos;
1743 	    if (tz)
1744 	      {
1745 		if (BYTES_BIG_ENDIAN)
1746 		  pd.offset = prec - pos - tz;
1747 		else
1748 		  pd.offset = pos;
1749 		pd.size = tz;
1750 		void *r = push_partial_def (pd, 0, 0, 0, prec);
1751 		gcc_assert (r == NULL_TREE);
1752 	      }
1753 	    pos += tz;
1754 	    if (pos == prec)
1755 	      break;
1756 	    w = wi::lrshift (w, tz);
1757 	    tz = wi::ctz (wi::bit_not (w));
1758 	    if (pos + tz > prec)
1759 	      tz = prec - pos;
1760 	    pos += tz;
1761 	    w = wi::lrshift (w, tz);
1762 	  }
1763       }
1764   }
1765   ~vn_walk_cb_data ();
1766   void *finish (alias_set_type, alias_set_type, tree);
1767   void *push_partial_def (pd_data pd,
1768 			  alias_set_type, alias_set_type, HOST_WIDE_INT,
1769 			  HOST_WIDE_INT);
1770 
1771   vn_reference_t vr;
1772   ao_ref orig_ref;
1773   tree *last_vuse_ptr;
1774   tree last_vuse;
1775   tree mask;
1776   tree masked_result;
1777   vn_lookup_kind vn_walk_kind;
1778   bool tbaa_p;
1779   vec<vn_reference_op_s> saved_operands;
1780 
1781   /* The VDEFs of partial defs we come along.  */
1782   auto_vec<pd_data, 2> partial_defs;
1783   /* The first defs range to avoid splay tree setup in most cases.  */
1784   pd_range first_range;
1785   alias_set_type first_set;
1786   alias_set_type first_base_set;
1787   splay_tree known_ranges;
1788   obstack ranges_obstack;
1789 };
1790 
1791 vn_walk_cb_data::~vn_walk_cb_data ()
1792 {
1793   if (known_ranges)
1794     {
1795       splay_tree_delete (known_ranges);
1796       obstack_free (&ranges_obstack, NULL);
1797     }
1798   saved_operands.release ();
1799 }
1800 
1801 void *
1802 vn_walk_cb_data::finish (alias_set_type set, alias_set_type base_set, tree val)
1803 {
1804   if (first_set != -2)
1805     {
1806       set = first_set;
1807       base_set = first_base_set;
1808     }
1809   if (mask)
1810     {
1811       masked_result = val;
1812       return (void *) -1;
1813     }
1814   vec<vn_reference_op_s> &operands
1815     = saved_operands.exists () ? saved_operands : vr->operands;
1816   return vn_reference_lookup_or_insert_for_pieces (last_vuse, set, base_set,
1817 						   vr->type, operands, val);
1818 }
1819 
1820 /* pd_range splay-tree helpers.  */
1821 
1822 static int
1823 pd_range_compare (splay_tree_key offset1p, splay_tree_key offset2p)
1824 {
1825   HOST_WIDE_INT offset1 = *(HOST_WIDE_INT *)offset1p;
1826   HOST_WIDE_INT offset2 = *(HOST_WIDE_INT *)offset2p;
1827   if (offset1 < offset2)
1828     return -1;
1829   else if (offset1 > offset2)
1830     return 1;
1831   return 0;
1832 }
1833 
1834 static void *
1835 pd_tree_alloc (int size, void *data_)
1836 {
1837   vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
1838   return obstack_alloc (&data->ranges_obstack, size);
1839 }
1840 
1841 static void
1842 pd_tree_dealloc (void *, void *)
1843 {
1844 }
1845 
1846 /* Push PD to the vector of partial definitions returning a
1847    value when we are ready to combine things with VUSE, SET and MAXSIZEI,
1848    NULL when we want to continue looking for partial defs or -1
1849    on failure.  */
1850 
1851 void *
1852 vn_walk_cb_data::push_partial_def (pd_data pd,
1853 				   alias_set_type set, alias_set_type base_set,
1854 				   HOST_WIDE_INT offseti,
1855 				   HOST_WIDE_INT maxsizei)
1856 {
1857   const HOST_WIDE_INT bufsize = 64;
1858   /* We're using a fixed buffer for encoding so fail early if the object
1859      we want to interpret is bigger.  */
1860   if (maxsizei > bufsize * BITS_PER_UNIT
1861       || CHAR_BIT != 8
1862       || BITS_PER_UNIT != 8
1863       /* Not prepared to handle PDP endian.  */
1864       || BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
1865     return (void *)-1;
1866 
1867   /* Turn too large constant stores into non-constant stores.  */
1868   if (CONSTANT_CLASS_P (pd.rhs) && pd.size > bufsize * BITS_PER_UNIT)
1869     pd.rhs = error_mark_node;
1870 
1871   /* And for non-constant or CONSTRUCTOR stores shrink them to only keep at
1872      most a partial byte before and/or after the region.  */
1873   if (!CONSTANT_CLASS_P (pd.rhs))
1874     {
1875       if (pd.offset < offseti)
1876 	{
1877 	  HOST_WIDE_INT o = ROUND_DOWN (offseti - pd.offset, BITS_PER_UNIT);
1878 	  gcc_assert (pd.size > o);
1879 	  pd.size -= o;
1880 	  pd.offset += o;
1881 	}
1882       if (pd.size > maxsizei)
1883 	pd.size = maxsizei + ((pd.size - maxsizei) % BITS_PER_UNIT);
1884     }
1885 
1886   pd.offset -= offseti;
1887 
1888   bool pd_constant_p = (TREE_CODE (pd.rhs) == CONSTRUCTOR
1889 			|| CONSTANT_CLASS_P (pd.rhs));
1890   if (partial_defs.is_empty ())
1891     {
1892       /* If we get a clobber upfront, fail.  */
1893       if (TREE_CLOBBER_P (pd.rhs))
1894 	return (void *)-1;
1895       if (!pd_constant_p)
1896 	return (void *)-1;
1897       partial_defs.safe_push (pd);
1898       first_range.offset = pd.offset;
1899       first_range.size = pd.size;
1900       first_set = set;
1901       first_base_set = base_set;
1902       last_vuse_ptr = NULL;
1903       /* Continue looking for partial defs.  */
1904       return NULL;
1905     }
1906 
1907   if (!known_ranges)
1908     {
1909       /* ???  Optimize the case where the 2nd partial def completes things.  */
1910       gcc_obstack_init (&ranges_obstack);
1911       known_ranges = splay_tree_new_with_allocator (pd_range_compare, 0, 0,
1912 						    pd_tree_alloc,
1913 						    pd_tree_dealloc, this);
1914       splay_tree_insert (known_ranges,
1915 			 (splay_tree_key)&first_range.offset,
1916 			 (splay_tree_value)&first_range);
1917     }
1918 
1919   pd_range newr = { pd.offset, pd.size };
1920   splay_tree_node n;
1921   pd_range *r;
1922   /* Lookup the predecessor of offset + 1 and see if we need to merge.  */
1923   HOST_WIDE_INT loffset = newr.offset + 1;
1924   if ((n = splay_tree_predecessor (known_ranges, (splay_tree_key)&loffset))
1925       && ((r = (pd_range *)n->value), true)
1926       && ranges_known_overlap_p (r->offset, r->size + 1,
1927 				 newr.offset, newr.size))
1928     {
1929       /* Ignore partial defs already covered.  Here we also drop shadowed
1930          clobbers arriving here at the floor.  */
1931       if (known_subrange_p (newr.offset, newr.size, r->offset, r->size))
1932 	return NULL;
1933       r->size = MAX (r->offset + r->size, newr.offset + newr.size) - r->offset;
1934     }
1935   else
1936     {
1937       /* newr.offset wasn't covered yet, insert the range.  */
1938       r = XOBNEW (&ranges_obstack, pd_range);
1939       *r = newr;
1940       splay_tree_insert (known_ranges, (splay_tree_key)&r->offset,
1941 			 (splay_tree_value)r);
1942     }
1943   /* Merge r which now contains newr and is a member of the splay tree with
1944      adjacent overlapping ranges.  */
1945   pd_range *rafter;
1946   while ((n = splay_tree_successor (known_ranges, (splay_tree_key)&r->offset))
1947 	 && ((rafter = (pd_range *)n->value), true)
1948 	 && ranges_known_overlap_p (r->offset, r->size + 1,
1949 				    rafter->offset, rafter->size))
1950     {
1951       r->size = MAX (r->offset + r->size,
1952 		     rafter->offset + rafter->size) - r->offset;
1953       splay_tree_remove (known_ranges, (splay_tree_key)&rafter->offset);
1954     }
1955   /* If we get a clobber, fail.  */
1956   if (TREE_CLOBBER_P (pd.rhs))
1957     return (void *)-1;
1958   /* Non-constants are OK as long as they are shadowed by a constant.  */
1959   if (!pd_constant_p)
1960     return (void *)-1;
1961   partial_defs.safe_push (pd);
1962 
1963   /* Now we have merged newr into the range tree.  When we have covered
1964      [offseti, sizei] then the tree will contain exactly one node which has
1965      the desired properties and it will be 'r'.  */
1966   if (!known_subrange_p (0, maxsizei, r->offset, r->size))
1967     /* Continue looking for partial defs.  */
1968     return NULL;
1969 
1970   /* Now simply native encode all partial defs in reverse order.  */
1971   unsigned ndefs = partial_defs.length ();
1972   /* We support up to 512-bit values (for V8DFmode).  */
1973   unsigned char buffer[bufsize + 1];
1974   unsigned char this_buffer[bufsize + 1];
1975   int len;
1976 
1977   memset (buffer, 0, bufsize + 1);
1978   unsigned needed_len = ROUND_UP (maxsizei, BITS_PER_UNIT) / BITS_PER_UNIT;
1979   while (!partial_defs.is_empty ())
1980     {
1981       pd_data pd = partial_defs.pop ();
1982       unsigned int amnt;
1983       if (TREE_CODE (pd.rhs) == CONSTRUCTOR)
1984 	{
1985 	  /* Empty CONSTRUCTOR.  */
1986 	  if (pd.size >= needed_len * BITS_PER_UNIT)
1987 	    len = needed_len;
1988 	  else
1989 	    len = ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT;
1990 	  memset (this_buffer, 0, len);
1991 	}
1992       else
1993 	{
1994 	  len = native_encode_expr (pd.rhs, this_buffer, bufsize,
1995 				    MAX (0, -pd.offset) / BITS_PER_UNIT);
1996 	  if (len <= 0
1997 	      || len < (ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT
1998 			- MAX (0, -pd.offset) / BITS_PER_UNIT))
1999 	    {
2000 	      if (dump_file && (dump_flags & TDF_DETAILS))
2001 		fprintf (dump_file, "Failed to encode %u "
2002 			 "partial definitions\n", ndefs);
2003 	      return (void *)-1;
2004 	    }
2005 	}
2006 
2007       unsigned char *p = buffer;
2008       HOST_WIDE_INT size = pd.size;
2009       if (pd.offset < 0)
2010 	size -= ROUND_DOWN (-pd.offset, BITS_PER_UNIT);
2011       this_buffer[len] = 0;
2012       if (BYTES_BIG_ENDIAN)
2013 	{
2014 	  /* LSB of this_buffer[len - 1] byte should be at
2015 	     pd.offset + pd.size - 1 bits in buffer.  */
2016 	  amnt = ((unsigned HOST_WIDE_INT) pd.offset
2017 		  + pd.size) % BITS_PER_UNIT;
2018 	  if (amnt)
2019 	    shift_bytes_in_array_right (this_buffer, len + 1, amnt);
2020 	  unsigned char *q = this_buffer;
2021 	  unsigned int off = 0;
2022 	  if (pd.offset >= 0)
2023 	    {
2024 	      unsigned int msk;
2025 	      off = pd.offset / BITS_PER_UNIT;
2026 	      gcc_assert (off < needed_len);
2027 	      p = buffer + off;
2028 	      if (size <= amnt)
2029 		{
2030 		  msk = ((1 << size) - 1) << (BITS_PER_UNIT - amnt);
2031 		  *p = (*p & ~msk) | (this_buffer[len] & msk);
2032 		  size = 0;
2033 		}
2034 	      else
2035 		{
2036 		  if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
2037 		    q = (this_buffer + len
2038 			 - (ROUND_UP (size - amnt, BITS_PER_UNIT)
2039 			    / BITS_PER_UNIT));
2040 		  if (pd.offset % BITS_PER_UNIT)
2041 		    {
2042 		      msk = -1U << (BITS_PER_UNIT
2043 				    - (pd.offset % BITS_PER_UNIT));
2044 		      *p = (*p & msk) | (*q & ~msk);
2045 		      p++;
2046 		      q++;
2047 		      off++;
2048 		      size -= BITS_PER_UNIT - (pd.offset % BITS_PER_UNIT);
2049 		      gcc_assert (size >= 0);
2050 		    }
2051 		}
2052 	    }
2053 	  else if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
2054 	    {
2055 	      q = (this_buffer + len
2056 		   - (ROUND_UP (size - amnt, BITS_PER_UNIT)
2057 		      / BITS_PER_UNIT));
2058 	      if (pd.offset % BITS_PER_UNIT)
2059 		{
2060 		  q++;
2061 		  size -= BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) pd.offset
2062 					   % BITS_PER_UNIT);
2063 		  gcc_assert (size >= 0);
2064 		}
2065 	    }
2066 	  if ((unsigned HOST_WIDE_INT) size / BITS_PER_UNIT + off
2067 	      > needed_len)
2068 	    size = (needed_len - off) * BITS_PER_UNIT;
2069 	  memcpy (p, q, size / BITS_PER_UNIT);
2070 	  if (size % BITS_PER_UNIT)
2071 	    {
2072 	      unsigned int msk
2073 		= -1U << (BITS_PER_UNIT - (size % BITS_PER_UNIT));
2074 	      p += size / BITS_PER_UNIT;
2075 	      q += size / BITS_PER_UNIT;
2076 	      *p = (*q & msk) | (*p & ~msk);
2077 	    }
2078 	}
2079       else
2080 	{
2081 	  if (pd.offset >= 0)
2082 	    {
2083 	      /* LSB of this_buffer[0] byte should be at pd.offset bits
2084 		 in buffer.  */
2085 	      unsigned int msk;
2086 	      size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
2087 	      amnt = pd.offset % BITS_PER_UNIT;
2088 	      if (amnt)
2089 		shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2090 	      unsigned int off = pd.offset / BITS_PER_UNIT;
2091 	      gcc_assert (off < needed_len);
2092 	      size = MIN (size,
2093 			  (HOST_WIDE_INT) (needed_len - off) * BITS_PER_UNIT);
2094 	      p = buffer + off;
2095 	      if (amnt + size < BITS_PER_UNIT)
2096 		{
2097 		  /* Low amnt bits come from *p, then size bits
2098 		     from this_buffer[0] and the remaining again from
2099 		     *p.  */
2100 		  msk = ((1 << size) - 1) << amnt;
2101 		  *p = (*p & ~msk) | (this_buffer[0] & msk);
2102 		  size = 0;
2103 		}
2104 	      else if (amnt)
2105 		{
2106 		  msk = -1U << amnt;
2107 		  *p = (*p & ~msk) | (this_buffer[0] & msk);
2108 		  p++;
2109 		  size -= (BITS_PER_UNIT - amnt);
2110 		}
2111 	    }
2112 	  else
2113 	    {
2114 	      amnt = (unsigned HOST_WIDE_INT) pd.offset % BITS_PER_UNIT;
2115 	      if (amnt)
2116 		size -= BITS_PER_UNIT - amnt;
2117 	      size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
2118 	      if (amnt)
2119 		shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2120 	    }
2121 	  memcpy (p, this_buffer + (amnt != 0), size / BITS_PER_UNIT);
2122 	  p += size / BITS_PER_UNIT;
2123 	  if (size % BITS_PER_UNIT)
2124 	    {
2125 	      unsigned int msk = -1U << (size % BITS_PER_UNIT);
2126 	      *p = (this_buffer[(amnt != 0) + size / BITS_PER_UNIT]
2127 		    & ~msk) | (*p & msk);
2128 	    }
2129 	}
2130     }
2131 
2132   tree type = vr->type;
2133   /* Make sure to interpret in a type that has a range covering the whole
2134      access size.  */
2135   if (INTEGRAL_TYPE_P (vr->type) && maxsizei != TYPE_PRECISION (vr->type))
2136     type = build_nonstandard_integer_type (maxsizei, TYPE_UNSIGNED (type));
2137   tree val;
2138   if (BYTES_BIG_ENDIAN)
2139     {
2140       unsigned sz = needed_len;
2141       if (maxsizei % BITS_PER_UNIT)
2142 	shift_bytes_in_array_right (buffer, needed_len,
2143 				    BITS_PER_UNIT
2144 				    - (maxsizei % BITS_PER_UNIT));
2145       if (INTEGRAL_TYPE_P (type))
2146 	sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
2147       if (sz > needed_len)
2148 	{
2149 	  memcpy (this_buffer + (sz - needed_len), buffer, needed_len);
2150 	  val = native_interpret_expr (type, this_buffer, sz);
2151 	}
2152       else
2153 	val = native_interpret_expr (type, buffer, needed_len);
2154     }
2155   else
2156     val = native_interpret_expr (type, buffer, bufsize);
2157   /* If we chop off bits because the types precision doesn't match the memory
2158      access size this is ok when optimizing reads but not when called from
2159      the DSE code during elimination.  */
2160   if (val && type != vr->type)
2161     {
2162       if (! int_fits_type_p (val, vr->type))
2163 	val = NULL_TREE;
2164       else
2165 	val = fold_convert (vr->type, val);
2166     }
2167 
2168   if (val)
2169     {
2170       if (dump_file && (dump_flags & TDF_DETAILS))
2171 	fprintf (dump_file,
2172 		 "Successfully combined %u partial definitions\n", ndefs);
2173       /* We are using the alias-set of the first store we encounter which
2174 	 should be appropriate here.  */
2175       return finish (first_set, first_base_set, val);
2176     }
2177   else
2178     {
2179       if (dump_file && (dump_flags & TDF_DETAILS))
2180 	fprintf (dump_file,
2181 		 "Failed to interpret %u encoded partial definitions\n", ndefs);
2182       return (void *)-1;
2183     }
2184 }
2185 
2186 /* Callback for walk_non_aliased_vuses.  Adjusts the vn_reference_t VR_
2187    with the current VUSE and performs the expression lookup.  */
2188 
2189 static void *
2190 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *data_)
2191 {
2192   vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2193   vn_reference_t vr = data->vr;
2194   vn_reference_s **slot;
2195   hashval_t hash;
2196 
2197   /* If we have partial definitions recorded we have to go through
2198      vn_reference_lookup_3.  */
2199   if (!data->partial_defs.is_empty ())
2200     return NULL;
2201 
2202   if (data->last_vuse_ptr)
2203     {
2204       *data->last_vuse_ptr = vuse;
2205       data->last_vuse = vuse;
2206     }
2207 
2208   /* Fixup vuse and hash.  */
2209   if (vr->vuse)
2210     vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
2211   vr->vuse = vuse_ssa_val (vuse);
2212   if (vr->vuse)
2213     vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
2214 
2215   hash = vr->hashcode;
2216   slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
2217   if (slot)
2218     {
2219       if ((*slot)->result && data->saved_operands.exists ())
2220 	return data->finish (vr->set, vr->base_set, (*slot)->result);
2221       return *slot;
2222     }
2223 
2224   return NULL;
2225 }
2226 
2227 /* Lookup an existing or insert a new vn_reference entry into the
2228    value table for the VUSE, SET, TYPE, OPERANDS reference which
2229    has the value VALUE which is either a constant or an SSA name.  */
2230 
2231 static vn_reference_t
2232 vn_reference_lookup_or_insert_for_pieces (tree vuse,
2233 					  alias_set_type set,
2234 					  alias_set_type base_set,
2235 					  tree type,
2236 					  vec<vn_reference_op_s,
2237 					        va_heap> operands,
2238 					  tree value)
2239 {
2240   vn_reference_s vr1;
2241   vn_reference_t result;
2242   unsigned value_id;
2243   vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2244   vr1.operands = operands;
2245   vr1.type = type;
2246   vr1.set = set;
2247   vr1.base_set = base_set;
2248   vr1.hashcode = vn_reference_compute_hash (&vr1);
2249   if (vn_reference_lookup_1 (&vr1, &result))
2250     return result;
2251   if (TREE_CODE (value) == SSA_NAME)
2252     value_id = VN_INFO (value)->value_id;
2253   else
2254     value_id = get_or_alloc_constant_value_id (value);
2255   return vn_reference_insert_pieces (vuse, set, base_set, type,
2256 				     operands.copy (), value, value_id);
2257 }
2258 
2259 /* Return a value-number for RCODE OPS... either by looking up an existing
2260    value-number for the simplified result or by inserting the operation if
2261    INSERT is true.  */
2262 
2263 static tree
2264 vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert)
2265 {
2266   tree result = NULL_TREE;
2267   /* We will be creating a value number for
2268        RCODE (OPS...).
2269      So first simplify and lookup this expression to see if it
2270      is already available.  */
2271   /* For simplification valueize.  */
2272   unsigned i;
2273   for (i = 0; i < res_op->num_ops; ++i)
2274     if (TREE_CODE (res_op->ops[i]) == SSA_NAME)
2275       {
2276 	tree tem = vn_valueize (res_op->ops[i]);
2277 	if (!tem)
2278 	  break;
2279 	res_op->ops[i] = tem;
2280       }
2281   /* If valueization of an operand fails (it is not available), skip
2282      simplification.  */
2283   bool res = false;
2284   if (i == res_op->num_ops)
2285     {
2286       mprts_hook = vn_lookup_simplify_result;
2287       res = res_op->resimplify (NULL, vn_valueize);
2288       mprts_hook = NULL;
2289     }
2290   gimple *new_stmt = NULL;
2291   if (res
2292       && gimple_simplified_result_is_gimple_val (res_op))
2293     {
2294       /* The expression is already available.  */
2295       result = res_op->ops[0];
2296       /* Valueize it, simplification returns sth in AVAIL only.  */
2297       if (TREE_CODE (result) == SSA_NAME)
2298 	result = SSA_VAL (result);
2299     }
2300   else
2301     {
2302       tree val = vn_lookup_simplify_result (res_op);
2303       if (!val && insert)
2304 	{
2305 	  gimple_seq stmts = NULL;
2306 	  result = maybe_push_res_to_seq (res_op, &stmts);
2307 	  if (result)
2308 	    {
2309 	      gcc_assert (gimple_seq_singleton_p (stmts));
2310 	      new_stmt = gimple_seq_first_stmt (stmts);
2311 	    }
2312 	}
2313       else
2314 	/* The expression is already available.  */
2315 	result = val;
2316     }
2317   if (new_stmt)
2318     {
2319       /* The expression is not yet available, value-number lhs to
2320 	 the new SSA_NAME we created.  */
2321       /* Initialize value-number information properly.  */
2322       vn_ssa_aux_t result_info = VN_INFO (result);
2323       result_info->valnum = result;
2324       result_info->value_id = get_next_value_id ();
2325       result_info->visited = 1;
2326       gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
2327 					  new_stmt);
2328       result_info->needs_insertion = true;
2329       /* ???  PRE phi-translation inserts NARYs without corresponding
2330          SSA name result.  Re-use those but set their result according
2331 	 to the stmt we just built.  */
2332       vn_nary_op_t nary = NULL;
2333       vn_nary_op_lookup_stmt (new_stmt, &nary);
2334       if (nary)
2335 	{
2336 	  gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE);
2337 	  nary->u.result = gimple_assign_lhs (new_stmt);
2338 	}
2339       /* As all "inserted" statements are singleton SCCs, insert
2340 	 to the valid table.  This is strictly needed to
2341 	 avoid re-generating new value SSA_NAMEs for the same
2342 	 expression during SCC iteration over and over (the
2343 	 optimistic table gets cleared after each iteration).
2344 	 We do not need to insert into the optimistic table, as
2345 	 lookups there will fall back to the valid table.  */
2346       else
2347 	{
2348 	  unsigned int length = vn_nary_length_from_stmt (new_stmt);
2349 	  vn_nary_op_t vno1
2350 	    = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
2351 	  vno1->value_id = result_info->value_id;
2352 	  vno1->length = length;
2353 	  vno1->predicated_values = 0;
2354 	  vno1->u.result = result;
2355 	  init_vn_nary_op_from_stmt (vno1, new_stmt);
2356 	  vn_nary_op_insert_into (vno1, valid_info->nary, true);
2357 	  /* Also do not link it into the undo chain.  */
2358 	  last_inserted_nary = vno1->next;
2359 	  vno1->next = (vn_nary_op_t)(void *)-1;
2360 	}
2361       if (dump_file && (dump_flags & TDF_DETAILS))
2362 	{
2363 	  fprintf (dump_file, "Inserting name ");
2364 	  print_generic_expr (dump_file, result);
2365 	  fprintf (dump_file, " for expression ");
2366 	  print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
2367 	  fprintf (dump_file, "\n");
2368 	}
2369     }
2370   return result;
2371 }
2372 
2373 /* Return a value-number for RCODE OPS... either by looking up an existing
2374    value-number for the simplified result or by inserting the operation.  */
2375 
2376 static tree
2377 vn_nary_build_or_lookup (gimple_match_op *res_op)
2378 {
2379   return vn_nary_build_or_lookup_1 (res_op, true);
2380 }
2381 
2382 /* Try to simplify the expression RCODE OPS... of type TYPE and return
2383    its value if present.  */
2384 
2385 tree
2386 vn_nary_simplify (vn_nary_op_t nary)
2387 {
2388   if (nary->length > gimple_match_op::MAX_NUM_OPS)
2389     return NULL_TREE;
2390   gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
2391 		      nary->type, nary->length);
2392   memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
2393   return vn_nary_build_or_lookup_1 (&op, false);
2394 }
2395 
2396 /* Elimination engine.  */
2397 
2398 class eliminate_dom_walker : public dom_walker
2399 {
2400 public:
2401   eliminate_dom_walker (cdi_direction, bitmap);
2402   ~eliminate_dom_walker ();
2403 
2404   virtual edge before_dom_children (basic_block);
2405   virtual void after_dom_children (basic_block);
2406 
2407   virtual tree eliminate_avail (basic_block, tree op);
2408   virtual void eliminate_push_avail (basic_block, tree op);
2409   tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val);
2410 
2411   void eliminate_stmt (basic_block, gimple_stmt_iterator *);
2412 
2413   unsigned eliminate_cleanup (bool region_p = false);
2414 
2415   bool do_pre;
2416   unsigned int el_todo;
2417   unsigned int eliminations;
2418   unsigned int insertions;
2419 
2420   /* SSA names that had their defs inserted by PRE if do_pre.  */
2421   bitmap inserted_exprs;
2422 
2423   /* Blocks with statements that have had their EH properties changed.  */
2424   bitmap need_eh_cleanup;
2425 
2426   /* Blocks with statements that have had their AB properties changed.  */
2427   bitmap need_ab_cleanup;
2428 
2429   /* Local state for the eliminate domwalk.  */
2430   auto_vec<gimple *> to_remove;
2431   auto_vec<gimple *> to_fixup;
2432   auto_vec<tree> avail;
2433   auto_vec<tree> avail_stack;
2434 };
2435 
2436 /* Adaptor to the elimination engine using RPO availability.  */
2437 
2438 class rpo_elim : public eliminate_dom_walker
2439 {
2440 public:
2441   rpo_elim(basic_block entry_)
2442     : eliminate_dom_walker (CDI_DOMINATORS, NULL), entry (entry_),
2443       m_avail_freelist (NULL) {}
2444 
2445   virtual tree eliminate_avail (basic_block, tree op);
2446 
2447   virtual void eliminate_push_avail (basic_block, tree);
2448 
2449   basic_block entry;
2450   /* Freelist of avail entries which are allocated from the vn_ssa_aux
2451      obstack.  */
2452   vn_avail *m_avail_freelist;
2453 };
2454 
2455 /* Global RPO state for access from hooks.  */
2456 static rpo_elim *rpo_avail;
2457 basic_block vn_context_bb;
2458 
2459 /* Return true if BASE1 and BASE2 can be adjusted so they have the
2460    same address and adjust *OFFSET1 and *OFFSET2 accordingly.
2461    Otherwise return false.  */
2462 
2463 static bool
2464 adjust_offsets_for_equal_base_address (tree base1, poly_int64 *offset1,
2465 				       tree base2, poly_int64 *offset2)
2466 {
2467   poly_int64 soff;
2468   if (TREE_CODE (base1) == MEM_REF
2469       && TREE_CODE (base2) == MEM_REF)
2470     {
2471       if (mem_ref_offset (base1).to_shwi (&soff))
2472 	{
2473 	  base1 = TREE_OPERAND (base1, 0);
2474 	  *offset1 += soff * BITS_PER_UNIT;
2475 	}
2476       if (mem_ref_offset (base2).to_shwi (&soff))
2477 	{
2478 	  base2 = TREE_OPERAND (base2, 0);
2479 	  *offset2 += soff * BITS_PER_UNIT;
2480 	}
2481       return operand_equal_p (base1, base2, 0);
2482     }
2483   return operand_equal_p (base1, base2, OEP_ADDRESS_OF);
2484 }
2485 
2486 /* Callback for walk_non_aliased_vuses.  Tries to perform a lookup
2487    from the statement defining VUSE and if not successful tries to
2488    translate *REFP and VR_ through an aggregate copy at the definition
2489    of VUSE.  If *DISAMBIGUATE_ONLY is true then do not perform translation
2490    of *REF and *VR.  If only disambiguation was performed then
2491    *DISAMBIGUATE_ONLY is set to true.  */
2492 
2493 static void *
2494 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
2495 		       translate_flags *disambiguate_only)
2496 {
2497   vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2498   vn_reference_t vr = data->vr;
2499   gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2500   tree base = ao_ref_base (ref);
2501   HOST_WIDE_INT offseti = 0, maxsizei, sizei = 0;
2502   static vec<vn_reference_op_s> lhs_ops;
2503   ao_ref lhs_ref;
2504   bool lhs_ref_ok = false;
2505   poly_int64 copy_size;
2506 
2507   /* First try to disambiguate after value-replacing in the definitions LHS.  */
2508   if (is_gimple_assign (def_stmt))
2509     {
2510       tree lhs = gimple_assign_lhs (def_stmt);
2511       bool valueized_anything = false;
2512       /* Avoid re-allocation overhead.  */
2513       lhs_ops.truncate (0);
2514       basic_block saved_rpo_bb = vn_context_bb;
2515       vn_context_bb = gimple_bb (def_stmt);
2516       if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE)
2517 	{
2518 	  copy_reference_ops_from_ref (lhs, &lhs_ops);
2519 	  valueize_refs_1 (&lhs_ops, &valueized_anything, true);
2520 	}
2521       vn_context_bb = saved_rpo_bb;
2522       ao_ref_init (&lhs_ref, lhs);
2523       lhs_ref_ok = true;
2524       if (valueized_anything
2525 	  && ao_ref_init_from_vn_reference
2526 	       (&lhs_ref, ao_ref_alias_set (&lhs_ref),
2527 		ao_ref_base_alias_set (&lhs_ref), TREE_TYPE (lhs), lhs_ops)
2528 	  && !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
2529 	{
2530 	  *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2531 	  return NULL;
2532 	}
2533 
2534       /* Besides valueizing the LHS we can also use access-path based
2535          disambiguation on the original non-valueized ref.  */
2536       if (!ref->ref
2537 	  && lhs_ref_ok
2538 	  && data->orig_ref.ref)
2539 	{
2540 	  /* We want to use the non-valueized LHS for this, but avoid redundant
2541 	     work.  */
2542 	  ao_ref *lref = &lhs_ref;
2543 	  ao_ref lref_alt;
2544 	  if (valueized_anything)
2545 	    {
2546 	      ao_ref_init (&lref_alt, lhs);
2547 	      lref = &lref_alt;
2548 	    }
2549 	  if (!refs_may_alias_p_1 (&data->orig_ref, lref, data->tbaa_p))
2550 	    {
2551 	      *disambiguate_only = (valueized_anything
2552 				    ? TR_VALUEIZE_AND_DISAMBIGUATE
2553 				    : TR_DISAMBIGUATE);
2554 	      return NULL;
2555 	    }
2556 	}
2557 
2558       /* If we reach a clobbering statement try to skip it and see if
2559          we find a VN result with exactly the same value as the
2560 	 possible clobber.  In this case we can ignore the clobber
2561 	 and return the found value.  */
2562       if (is_gimple_reg_type (TREE_TYPE (lhs))
2563 	  && types_compatible_p (TREE_TYPE (lhs), vr->type)
2564 	  && ref->ref)
2565 	{
2566 	  tree *saved_last_vuse_ptr = data->last_vuse_ptr;
2567 	  /* Do not update last_vuse_ptr in vn_reference_lookup_2.  */
2568 	  data->last_vuse_ptr = NULL;
2569 	  tree saved_vuse = vr->vuse;
2570 	  hashval_t saved_hashcode = vr->hashcode;
2571 	  void *res = vn_reference_lookup_2 (ref, gimple_vuse (def_stmt), data);
2572 	  /* Need to restore vr->vuse and vr->hashcode.  */
2573 	  vr->vuse = saved_vuse;
2574 	  vr->hashcode = saved_hashcode;
2575 	  data->last_vuse_ptr = saved_last_vuse_ptr;
2576 	  if (res && res != (void *)-1)
2577 	    {
2578 	      vn_reference_t vnresult = (vn_reference_t) res;
2579 	      tree rhs = gimple_assign_rhs1 (def_stmt);
2580 	      if (TREE_CODE (rhs) == SSA_NAME)
2581 		rhs = SSA_VAL (rhs);
2582 	      if (vnresult->result
2583 		  && operand_equal_p (vnresult->result, rhs, 0)
2584 		  /* We have to honor our promise about union type punning
2585 		     and also support arbitrary overlaps with
2586 		     -fno-strict-aliasing.  So simply resort to alignment to
2587 		     rule out overlaps.  Do this check last because it is
2588 		     quite expensive compared to the hash-lookup above.  */
2589 		  && multiple_p (get_object_alignment (ref->ref), ref->size)
2590 		  && multiple_p (get_object_alignment (lhs), ref->size))
2591 		return res;
2592 	    }
2593 	}
2594     }
2595   else if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE
2596 	   && gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
2597 	   && gimple_call_num_args (def_stmt) <= 4)
2598     {
2599       /* For builtin calls valueize its arguments and call the
2600          alias oracle again.  Valueization may improve points-to
2601 	 info of pointers and constify size and position arguments.
2602 	 Originally this was motivated by PR61034 which has
2603 	 conditional calls to free falsely clobbering ref because
2604 	 of imprecise points-to info of the argument.  */
2605       tree oldargs[4];
2606       bool valueized_anything = false;
2607       for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2608 	{
2609 	  oldargs[i] = gimple_call_arg (def_stmt, i);
2610 	  tree val = vn_valueize (oldargs[i]);
2611 	  if (val != oldargs[i])
2612 	    {
2613 	      gimple_call_set_arg (def_stmt, i, val);
2614 	      valueized_anything = true;
2615 	    }
2616 	}
2617       if (valueized_anything)
2618 	{
2619 	  bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
2620 					       ref);
2621 	  for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2622 	    gimple_call_set_arg (def_stmt, i, oldargs[i]);
2623 	  if (!res)
2624 	    {
2625 	      *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2626 	      return NULL;
2627 	    }
2628 	}
2629     }
2630 
2631   if (*disambiguate_only > TR_TRANSLATE)
2632     return (void *)-1;
2633 
2634   /* If we cannot constrain the size of the reference we cannot
2635      test if anything kills it.  */
2636   if (!ref->max_size_known_p ())
2637     return (void *)-1;
2638 
2639   poly_int64 offset = ref->offset;
2640   poly_int64 maxsize = ref->max_size;
2641 
2642   /* def_stmt may-defs *ref.  See if we can derive a value for *ref
2643      from that definition.
2644      1) Memset.  */
2645   if (is_gimple_reg_type (vr->type)
2646       && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
2647 	  || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET_CHK))
2648       && (integer_zerop (gimple_call_arg (def_stmt, 1))
2649 	  || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
2650 	       || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
2651 	      && CHAR_BIT == 8
2652 	      && BITS_PER_UNIT == 8
2653 	      && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
2654 	      && offset.is_constant (&offseti)
2655 	      && ref->size.is_constant (&sizei)
2656 	      && (offseti % BITS_PER_UNIT == 0
2657 		  || TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST)))
2658       && (poly_int_tree_p (gimple_call_arg (def_stmt, 2))
2659 	  || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
2660 	      && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)))))
2661       && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2662 	  || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
2663     {
2664       tree base2;
2665       poly_int64 offset2, size2, maxsize2;
2666       bool reverse;
2667       tree ref2 = gimple_call_arg (def_stmt, 0);
2668       if (TREE_CODE (ref2) == SSA_NAME)
2669 	{
2670 	  ref2 = SSA_VAL (ref2);
2671 	  if (TREE_CODE (ref2) == SSA_NAME
2672 	      && (TREE_CODE (base) != MEM_REF
2673 		  || TREE_OPERAND (base, 0) != ref2))
2674 	    {
2675 	      gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
2676 	      if (gimple_assign_single_p (def_stmt)
2677 		  && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2678 		ref2 = gimple_assign_rhs1 (def_stmt);
2679 	    }
2680 	}
2681       if (TREE_CODE (ref2) == ADDR_EXPR)
2682 	{
2683 	  ref2 = TREE_OPERAND (ref2, 0);
2684 	  base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
2685 					   &reverse);
2686 	  if (!known_size_p (maxsize2)
2687 	      || !known_eq (maxsize2, size2)
2688 	      || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
2689 	    return (void *)-1;
2690 	}
2691       else if (TREE_CODE (ref2) == SSA_NAME)
2692 	{
2693 	  poly_int64 soff;
2694 	  if (TREE_CODE (base) != MEM_REF
2695 	      || !(mem_ref_offset (base)
2696 		   << LOG2_BITS_PER_UNIT).to_shwi (&soff))
2697 	    return (void *)-1;
2698 	  offset += soff;
2699 	  offset2 = 0;
2700 	  if (TREE_OPERAND (base, 0) != ref2)
2701 	    {
2702 	      gimple *def = SSA_NAME_DEF_STMT (ref2);
2703 	      if (is_gimple_assign (def)
2704 		  && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2705 		  && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
2706 		  && poly_int_tree_p (gimple_assign_rhs2 (def)))
2707 		{
2708 		  tree rhs2 = gimple_assign_rhs2 (def);
2709 		  if (!(poly_offset_int::from (wi::to_poly_wide (rhs2),
2710 					       SIGNED)
2711 			<< LOG2_BITS_PER_UNIT).to_shwi (&offset2))
2712 		    return (void *)-1;
2713 		  ref2 = gimple_assign_rhs1 (def);
2714 		  if (TREE_CODE (ref2) == SSA_NAME)
2715 		    ref2 = SSA_VAL (ref2);
2716 		}
2717 	      else
2718 		return (void *)-1;
2719 	    }
2720 	}
2721       else
2722 	return (void *)-1;
2723       tree len = gimple_call_arg (def_stmt, 2);
2724       HOST_WIDE_INT leni, offset2i;
2725       if (TREE_CODE (len) == SSA_NAME)
2726 	len = SSA_VAL (len);
2727       /* Sometimes the above trickery is smarter than alias analysis.  Take
2728          advantage of that.  */
2729       if (!ranges_maybe_overlap_p (offset, maxsize, offset2,
2730 				   (wi::to_poly_offset (len)
2731 				    << LOG2_BITS_PER_UNIT)))
2732 	return NULL;
2733       if (data->partial_defs.is_empty ()
2734 	  && known_subrange_p (offset, maxsize, offset2,
2735 			       wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
2736 	{
2737 	  tree val;
2738 	  if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2739 	    val = build_zero_cst (vr->type);
2740 	  else if (INTEGRAL_TYPE_P (vr->type)
2741 		   && known_eq (ref->size, 8)
2742 		   && offseti % BITS_PER_UNIT == 0)
2743 	    {
2744 	      gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
2745 				      vr->type, gimple_call_arg (def_stmt, 1));
2746 	      val = vn_nary_build_or_lookup (&res_op);
2747 	      if (!val
2748 		  || (TREE_CODE (val) == SSA_NAME
2749 		      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2750 		return (void *)-1;
2751 	    }
2752 	  else
2753 	    {
2754 	      unsigned buflen = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type)) + 1;
2755 	      if (INTEGRAL_TYPE_P (vr->type))
2756 		buflen = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (vr->type)) + 1;
2757 	      unsigned char *buf = XALLOCAVEC (unsigned char, buflen);
2758 	      memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
2759 		      buflen);
2760 	      if (BYTES_BIG_ENDIAN)
2761 		{
2762 		  unsigned int amnt
2763 		    = (((unsigned HOST_WIDE_INT) offseti + sizei)
2764 		       % BITS_PER_UNIT);
2765 		  if (amnt)
2766 		    {
2767 		      shift_bytes_in_array_right (buf, buflen,
2768 						  BITS_PER_UNIT - amnt);
2769 		      buf++;
2770 		      buflen--;
2771 		    }
2772 		}
2773 	      else if (offseti % BITS_PER_UNIT != 0)
2774 		{
2775 		  unsigned int amnt
2776 		    = BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) offseti
2777 				       % BITS_PER_UNIT);
2778 		  shift_bytes_in_array_left (buf, buflen, amnt);
2779 		  buf++;
2780 		  buflen--;
2781 		}
2782 	      val = native_interpret_expr (vr->type, buf, buflen);
2783 	      if (!val)
2784 		return (void *)-1;
2785 	    }
2786 	  return data->finish (0, 0, val);
2787 	}
2788       /* For now handle clearing memory with partial defs.  */
2789       else if (known_eq (ref->size, maxsize)
2790 	       && integer_zerop (gimple_call_arg (def_stmt, 1))
2791 	       && tree_fits_poly_int64_p (len)
2792 	       && tree_to_poly_int64 (len).is_constant (&leni)
2793 	       && leni <= INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT
2794 	       && offset.is_constant (&offseti)
2795 	       && offset2.is_constant (&offset2i)
2796 	       && maxsize.is_constant (&maxsizei)
2797 	       && ranges_known_overlap_p (offseti, maxsizei, offset2i,
2798 					  leni << LOG2_BITS_PER_UNIT))
2799 	{
2800 	  pd_data pd;
2801 	  pd.rhs = build_constructor (NULL_TREE, NULL);
2802 	  pd.offset = offset2i;
2803 	  pd.size = leni << LOG2_BITS_PER_UNIT;
2804 	  return data->push_partial_def (pd, 0, 0, offseti, maxsizei);
2805 	}
2806     }
2807 
2808   /* 2) Assignment from an empty CONSTRUCTOR.  */
2809   else if (is_gimple_reg_type (vr->type)
2810 	   && gimple_assign_single_p (def_stmt)
2811 	   && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2812 	   && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2813     {
2814       tree base2;
2815       poly_int64 offset2, size2, maxsize2;
2816       HOST_WIDE_INT offset2i, size2i;
2817       gcc_assert (lhs_ref_ok);
2818       base2 = ao_ref_base (&lhs_ref);
2819       offset2 = lhs_ref.offset;
2820       size2 = lhs_ref.size;
2821       maxsize2 = lhs_ref.max_size;
2822       if (known_size_p (maxsize2)
2823 	  && known_eq (maxsize2, size2)
2824 	  && adjust_offsets_for_equal_base_address (base, &offset,
2825 						    base2, &offset2))
2826 	{
2827 	  if (data->partial_defs.is_empty ()
2828 	      && known_subrange_p (offset, maxsize, offset2, size2))
2829 	    {
2830 	      /* While technically undefined behavior do not optimize
2831 	         a full read from a clobber.  */
2832 	      if (gimple_clobber_p (def_stmt))
2833 		return (void *)-1;
2834 	      tree val = build_zero_cst (vr->type);
2835 	      return data->finish (ao_ref_alias_set (&lhs_ref),
2836 				   ao_ref_base_alias_set (&lhs_ref), val);
2837 	    }
2838 	  else if (known_eq (ref->size, maxsize)
2839 		   && maxsize.is_constant (&maxsizei)
2840 		   && offset.is_constant (&offseti)
2841 		   && offset2.is_constant (&offset2i)
2842 		   && size2.is_constant (&size2i)
2843 		   && ranges_known_overlap_p (offseti, maxsizei,
2844 					      offset2i, size2i))
2845 	    {
2846 	      /* Let clobbers be consumed by the partial-def tracker
2847 	         which can choose to ignore them if they are shadowed
2848 		 by a later def.  */
2849 	      pd_data pd;
2850 	      pd.rhs = gimple_assign_rhs1 (def_stmt);
2851 	      pd.offset = offset2i;
2852 	      pd.size = size2i;
2853 	      return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
2854 					     ao_ref_base_alias_set (&lhs_ref),
2855 					     offseti, maxsizei);
2856 	    }
2857 	}
2858     }
2859 
2860   /* 3) Assignment from a constant.  We can use folds native encode/interpret
2861      routines to extract the assigned bits.  */
2862   else if (known_eq (ref->size, maxsize)
2863 	   && is_gimple_reg_type (vr->type)
2864 	   && !reverse_storage_order_for_component_p (vr->operands)
2865 	   && !contains_storage_order_barrier_p (vr->operands)
2866 	   && gimple_assign_single_p (def_stmt)
2867 	   && CHAR_BIT == 8
2868 	   && BITS_PER_UNIT == 8
2869 	   && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
2870 	   /* native_encode and native_decode operate on arrays of bytes
2871 	      and so fundamentally need a compile-time size and offset.  */
2872 	   && maxsize.is_constant (&maxsizei)
2873 	   && offset.is_constant (&offseti)
2874 	   && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2875 	       || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2876 		   && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2877     {
2878       tree lhs = gimple_assign_lhs (def_stmt);
2879       tree base2;
2880       poly_int64 offset2, size2, maxsize2;
2881       HOST_WIDE_INT offset2i, size2i;
2882       bool reverse;
2883       gcc_assert (lhs_ref_ok);
2884       base2 = ao_ref_base (&lhs_ref);
2885       offset2 = lhs_ref.offset;
2886       size2 = lhs_ref.size;
2887       maxsize2 = lhs_ref.max_size;
2888       reverse = reverse_storage_order_for_component_p (lhs);
2889       if (base2
2890 	  && !reverse
2891 	  && !storage_order_barrier_p (lhs)
2892 	  && known_eq (maxsize2, size2)
2893 	  && adjust_offsets_for_equal_base_address (base, &offset,
2894 						    base2, &offset2)
2895 	  && offset.is_constant (&offseti)
2896 	  && offset2.is_constant (&offset2i)
2897 	  && size2.is_constant (&size2i))
2898 	{
2899 	  if (data->partial_defs.is_empty ()
2900 	      && known_subrange_p (offseti, maxsizei, offset2, size2))
2901 	    {
2902 	      /* We support up to 512-bit values (for V8DFmode).  */
2903 	      unsigned char buffer[65];
2904 	      int len;
2905 
2906 	      tree rhs = gimple_assign_rhs1 (def_stmt);
2907 	      if (TREE_CODE (rhs) == SSA_NAME)
2908 		rhs = SSA_VAL (rhs);
2909 	      len = native_encode_expr (rhs,
2910 					buffer, sizeof (buffer) - 1,
2911 					(offseti - offset2i) / BITS_PER_UNIT);
2912 	      if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2913 		{
2914 		  tree type = vr->type;
2915 		  unsigned char *buf = buffer;
2916 		  unsigned int amnt = 0;
2917 		  /* Make sure to interpret in a type that has a range
2918 		     covering the whole access size.  */
2919 		  if (INTEGRAL_TYPE_P (vr->type)
2920 		      && maxsizei != TYPE_PRECISION (vr->type))
2921 		    type = build_nonstandard_integer_type (maxsizei,
2922 							   TYPE_UNSIGNED (type));
2923 		  if (BYTES_BIG_ENDIAN)
2924 		    {
2925 		      /* For big-endian native_encode_expr stored the rhs
2926 			 such that the LSB of it is the LSB of buffer[len - 1].
2927 			 That bit is stored into memory at position
2928 			 offset2 + size2 - 1, i.e. in byte
2929 			 base + (offset2 + size2 - 1) / BITS_PER_UNIT.
2930 			 E.g. for offset2 1 and size2 14, rhs -1 and memory
2931 			 previously cleared that is:
2932 			 0        1
2933 			 01111111|11111110
2934 			 Now, if we want to extract offset 2 and size 12 from
2935 			 it using native_interpret_expr (which actually works
2936 			 for integral bitfield types in terms of byte size of
2937 			 the mode), the native_encode_expr stored the value
2938 			 into buffer as
2939 			 XX111111|11111111
2940 			 and returned len 2 (the X bits are outside of
2941 			 precision).
2942 			 Let sz be maxsize / BITS_PER_UNIT if not extracting
2943 			 a bitfield, and GET_MODE_SIZE otherwise.
2944 			 We need to align the LSB of the value we want to
2945 			 extract as the LSB of buf[sz - 1].
2946 			 The LSB from memory we need to read is at position
2947 			 offset + maxsize - 1.  */
2948 		      HOST_WIDE_INT sz = maxsizei / BITS_PER_UNIT;
2949 		      if (INTEGRAL_TYPE_P (type))
2950 			sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
2951 		      amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
2952 			      - offseti - maxsizei) % BITS_PER_UNIT;
2953 		      if (amnt)
2954 			shift_bytes_in_array_right (buffer, len, amnt);
2955 		      amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
2956 			      - offseti - maxsizei - amnt) / BITS_PER_UNIT;
2957 		      if ((unsigned HOST_WIDE_INT) sz + amnt > (unsigned) len)
2958 			len = 0;
2959 		      else
2960 			{
2961 			  buf = buffer + len - sz - amnt;
2962 			  len -= (buf - buffer);
2963 			}
2964 		    }
2965 		  else
2966 		    {
2967 		      amnt = ((unsigned HOST_WIDE_INT) offset2i
2968 			      - offseti) % BITS_PER_UNIT;
2969 		      if (amnt)
2970 			{
2971 			  buffer[len] = 0;
2972 			  shift_bytes_in_array_left (buffer, len + 1, amnt);
2973 			  buf = buffer + 1;
2974 			}
2975 		    }
2976 		  tree val = native_interpret_expr (type, buf, len);
2977 		  /* If we chop off bits because the types precision doesn't
2978 		     match the memory access size this is ok when optimizing
2979 		     reads but not when called from the DSE code during
2980 		     elimination.  */
2981 		  if (val
2982 		      && type != vr->type)
2983 		    {
2984 		      if (! int_fits_type_p (val, vr->type))
2985 			val = NULL_TREE;
2986 		      else
2987 			val = fold_convert (vr->type, val);
2988 		    }
2989 
2990 		  if (val)
2991 		    return data->finish (ao_ref_alias_set (&lhs_ref),
2992 					 ao_ref_base_alias_set (&lhs_ref), val);
2993 		}
2994 	    }
2995 	  else if (ranges_known_overlap_p (offseti, maxsizei, offset2i,
2996 					   size2i))
2997 	    {
2998 	      pd_data pd;
2999 	      tree rhs = gimple_assign_rhs1 (def_stmt);
3000 	      if (TREE_CODE (rhs) == SSA_NAME)
3001 		rhs = SSA_VAL (rhs);
3002 	      pd.rhs = rhs;
3003 	      pd.offset = offset2i;
3004 	      pd.size = size2i;
3005 	      return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3006 					     ao_ref_base_alias_set (&lhs_ref),
3007 					     offseti, maxsizei);
3008 	    }
3009 	}
3010     }
3011 
3012   /* 4) Assignment from an SSA name which definition we may be able
3013      to access pieces from or we can combine to a larger entity.  */
3014   else if (known_eq (ref->size, maxsize)
3015 	   && is_gimple_reg_type (vr->type)
3016 	   && !reverse_storage_order_for_component_p (vr->operands)
3017 	   && !contains_storage_order_barrier_p (vr->operands)
3018 	   && gimple_assign_single_p (def_stmt)
3019 	   && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
3020     {
3021       tree lhs = gimple_assign_lhs (def_stmt);
3022       tree base2;
3023       poly_int64 offset2, size2, maxsize2;
3024       HOST_WIDE_INT offset2i, size2i, offseti;
3025       bool reverse;
3026       gcc_assert (lhs_ref_ok);
3027       base2 = ao_ref_base (&lhs_ref);
3028       offset2 = lhs_ref.offset;
3029       size2 = lhs_ref.size;
3030       maxsize2 = lhs_ref.max_size;
3031       reverse = reverse_storage_order_for_component_p (lhs);
3032       tree def_rhs = gimple_assign_rhs1 (def_stmt);
3033       if (!reverse
3034 	  && !storage_order_barrier_p (lhs)
3035 	  && known_size_p (maxsize2)
3036 	  && known_eq (maxsize2, size2)
3037 	  && adjust_offsets_for_equal_base_address (base, &offset,
3038 						    base2, &offset2))
3039 	{
3040 	  if (data->partial_defs.is_empty ()
3041 	      && known_subrange_p (offset, maxsize, offset2, size2)
3042 	      /* ???  We can't handle bitfield precision extracts without
3043 		 either using an alternate type for the BIT_FIELD_REF and
3044 		 then doing a conversion or possibly adjusting the offset
3045 		 according to endianness.  */
3046 	      && (! INTEGRAL_TYPE_P (vr->type)
3047 		  || known_eq (ref->size, TYPE_PRECISION (vr->type)))
3048 	      && multiple_p (ref->size, BITS_PER_UNIT))
3049 	    {
3050 	      tree val = NULL_TREE;
3051 	      if (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
3052 		  || type_has_mode_precision_p (TREE_TYPE (def_rhs)))
3053 		{
3054 		  gimple_match_op op (gimple_match_cond::UNCOND,
3055 				      BIT_FIELD_REF, vr->type,
3056 				      SSA_VAL (def_rhs),
3057 				      bitsize_int (ref->size),
3058 				      bitsize_int (offset - offset2));
3059 		  val = vn_nary_build_or_lookup (&op);
3060 		}
3061 	      else if (known_eq (ref->size, size2))
3062 		{
3063 		  gimple_match_op op (gimple_match_cond::UNCOND,
3064 				      VIEW_CONVERT_EXPR, vr->type,
3065 				      SSA_VAL (def_rhs));
3066 		  val = vn_nary_build_or_lookup (&op);
3067 		}
3068 	      if (val
3069 		  && (TREE_CODE (val) != SSA_NAME
3070 		      || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
3071 		return data->finish (ao_ref_alias_set (&lhs_ref),
3072 				     ao_ref_base_alias_set (&lhs_ref), val);
3073 	    }
3074 	  else if (maxsize.is_constant (&maxsizei)
3075 		   && offset.is_constant (&offseti)
3076 		   && offset2.is_constant (&offset2i)
3077 		   && size2.is_constant (&size2i)
3078 		   && ranges_known_overlap_p (offset, maxsize, offset2, size2))
3079 	    {
3080 	      pd_data pd;
3081 	      pd.rhs = SSA_VAL (def_rhs);
3082 	      pd.offset = offset2i;
3083 	      pd.size = size2i;
3084 	      return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3085 					     ao_ref_base_alias_set (&lhs_ref),
3086 					     offseti, maxsizei);
3087 	    }
3088 	}
3089     }
3090 
3091   /* 5) For aggregate copies translate the reference through them if
3092      the copy kills ref.  */
3093   else if (data->vn_walk_kind == VN_WALKREWRITE
3094 	   && gimple_assign_single_p (def_stmt)
3095 	   && (DECL_P (gimple_assign_rhs1 (def_stmt))
3096 	       || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
3097 	       || handled_component_p (gimple_assign_rhs1 (def_stmt))))
3098     {
3099       tree base2;
3100       int i, j, k;
3101       auto_vec<vn_reference_op_s> rhs;
3102       vn_reference_op_t vro;
3103       ao_ref r;
3104 
3105       gcc_assert (lhs_ref_ok);
3106 
3107       /* See if the assignment kills REF.  */
3108       base2 = ao_ref_base (&lhs_ref);
3109       if (!lhs_ref.max_size_known_p ()
3110 	  || (base != base2
3111 	      && (TREE_CODE (base) != MEM_REF
3112 		  || TREE_CODE (base2) != MEM_REF
3113 		  || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
3114 		  || !tree_int_cst_equal (TREE_OPERAND (base, 1),
3115 					  TREE_OPERAND (base2, 1))))
3116 	  || !stmt_kills_ref_p (def_stmt, ref))
3117 	return (void *)-1;
3118 
3119       /* Find the common base of ref and the lhs.  lhs_ops already
3120          contains valueized operands for the lhs.  */
3121       i = vr->operands.length () - 1;
3122       j = lhs_ops.length () - 1;
3123       while (j >= 0 && i >= 0
3124 	     && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
3125 	{
3126 	  i--;
3127 	  j--;
3128 	}
3129 
3130       /* ???  The innermost op should always be a MEM_REF and we already
3131          checked that the assignment to the lhs kills vr.  Thus for
3132 	 aggregate copies using char[] types the vn_reference_op_eq
3133 	 may fail when comparing types for compatibility.  But we really
3134 	 don't care here - further lookups with the rewritten operands
3135 	 will simply fail if we messed up types too badly.  */
3136       poly_int64 extra_off = 0;
3137       if (j == 0 && i >= 0
3138 	  && lhs_ops[0].opcode == MEM_REF
3139 	  && maybe_ne (lhs_ops[0].off, -1))
3140 	{
3141 	  if (known_eq (lhs_ops[0].off, vr->operands[i].off))
3142 	    i--, j--;
3143 	  else if (vr->operands[i].opcode == MEM_REF
3144 		   && maybe_ne (vr->operands[i].off, -1))
3145 	    {
3146 	      extra_off = vr->operands[i].off - lhs_ops[0].off;
3147 	      i--, j--;
3148 	    }
3149 	}
3150 
3151       /* i now points to the first additional op.
3152 	 ???  LHS may not be completely contained in VR, one or more
3153 	 VIEW_CONVERT_EXPRs could be in its way.  We could at least
3154 	 try handling outermost VIEW_CONVERT_EXPRs.  */
3155       if (j != -1)
3156 	return (void *)-1;
3157 
3158       /* Punt if the additional ops contain a storage order barrier.  */
3159       for (k = i; k >= 0; k--)
3160 	{
3161 	  vro = &vr->operands[k];
3162 	  if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
3163 	    return (void *)-1;
3164 	}
3165 
3166       /* Now re-write REF to be based on the rhs of the assignment.  */
3167       tree rhs1 = gimple_assign_rhs1 (def_stmt);
3168       copy_reference_ops_from_ref (rhs1, &rhs);
3169 
3170       /* Apply an extra offset to the inner MEM_REF of the RHS.  */
3171       if (maybe_ne (extra_off, 0))
3172 	{
3173 	  if (rhs.length () < 2)
3174 	    return (void *)-1;
3175 	  int ix = rhs.length () - 2;
3176 	  if (rhs[ix].opcode != MEM_REF
3177 	      || known_eq (rhs[ix].off, -1))
3178 	    return (void *)-1;
3179 	  rhs[ix].off += extra_off;
3180 	  rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
3181 					 build_int_cst (TREE_TYPE (rhs[ix].op0),
3182 							extra_off));
3183 	}
3184 
3185       /* Save the operands since we need to use the original ones for
3186 	 the hash entry we use.  */
3187       if (!data->saved_operands.exists ())
3188 	data->saved_operands = vr->operands.copy ();
3189 
3190       /* We need to pre-pend vr->operands[0..i] to rhs.  */
3191       vec<vn_reference_op_s> old = vr->operands;
3192       if (i + 1 + rhs.length () > vr->operands.length ())
3193 	vr->operands.safe_grow (i + 1 + rhs.length ());
3194       else
3195 	vr->operands.truncate (i + 1 + rhs.length ());
3196       FOR_EACH_VEC_ELT (rhs, j, vro)
3197 	vr->operands[i + 1 + j] = *vro;
3198       valueize_refs (&vr->operands);
3199       if (old == shared_lookup_references)
3200 	shared_lookup_references = vr->operands;
3201       vr->hashcode = vn_reference_compute_hash (vr);
3202 
3203       /* Try folding the new reference to a constant.  */
3204       tree val = fully_constant_vn_reference_p (vr);
3205       if (val)
3206 	{
3207 	  if (data->partial_defs.is_empty ())
3208 	    return data->finish (ao_ref_alias_set (&lhs_ref),
3209 				 ao_ref_base_alias_set (&lhs_ref), val);
3210 	  /* This is the only interesting case for partial-def handling
3211 	     coming from targets that like to gimplify init-ctors as
3212 	     aggregate copies from constant data like aarch64 for
3213 	     PR83518.  */
3214 	  if (maxsize.is_constant (&maxsizei) && known_eq (ref->size, maxsize))
3215 	    {
3216 	      pd_data pd;
3217 	      pd.rhs = val;
3218 	      pd.offset = 0;
3219 	      pd.size = maxsizei;
3220 	      return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3221 					     ao_ref_base_alias_set (&lhs_ref),
3222 					     0, maxsizei);
3223 	    }
3224 	}
3225 
3226       /* Continuing with partial defs isn't easily possible here, we
3227          have to find a full def from further lookups from here.  Probably
3228 	 not worth the special-casing everywhere.  */
3229       if (!data->partial_defs.is_empty ())
3230 	return (void *)-1;
3231 
3232       /* Adjust *ref from the new operands.  */
3233       ao_ref rhs1_ref;
3234       ao_ref_init (&rhs1_ref, rhs1);
3235       if (!ao_ref_init_from_vn_reference (&r, ao_ref_alias_set (&rhs1_ref),
3236 					  ao_ref_base_alias_set (&rhs1_ref),
3237 					  vr->type, vr->operands))
3238 	return (void *)-1;
3239       /* This can happen with bitfields.  */
3240       if (maybe_ne (ref->size, r.size))
3241 	{
3242 	  /* If the access lacks some subsetting simply apply that by
3243 	     shortening it.  That in the end can only be successful
3244 	     if we can pun the lookup result which in turn requires
3245 	     exact offsets.  */
3246 	  if (known_eq (r.size, r.max_size)
3247 	      && known_lt (ref->size, r.size))
3248 	    r.size = r.max_size = ref->size;
3249 	  else
3250 	    return (void *)-1;
3251 	}
3252       *ref = r;
3253 
3254       /* Do not update last seen VUSE after translating.  */
3255       data->last_vuse_ptr = NULL;
3256       /* Invalidate the original access path since it now contains
3257          the wrong base.  */
3258       data->orig_ref.ref = NULL_TREE;
3259       /* Use the alias-set of this LHS for recording an eventual result.  */
3260       if (data->first_set == -2)
3261 	{
3262 	  data->first_set = ao_ref_alias_set (&lhs_ref);
3263 	  data->first_base_set = ao_ref_base_alias_set (&lhs_ref);
3264 	}
3265 
3266       /* Keep looking for the adjusted *REF / VR pair.  */
3267       return NULL;
3268     }
3269 
3270   /* 6) For memcpy copies translate the reference through them if
3271      the copy kills ref.  */
3272   else if (data->vn_walk_kind == VN_WALKREWRITE
3273 	   && is_gimple_reg_type (vr->type)
3274 	   /* ???  Handle BCOPY as well.  */
3275 	   && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
3276 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY_CHK)
3277 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
3278 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY_CHK)
3279 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE)
3280 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE_CHK))
3281 	   && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
3282 	       || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
3283 	   && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
3284 	       || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
3285 	   && (poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size)
3286 	       || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
3287 		   && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)),
3288 				       &copy_size)))
3289 	   /* Handling this is more complicated, give up for now.  */
3290 	   && data->partial_defs.is_empty ())
3291     {
3292       tree lhs, rhs;
3293       ao_ref r;
3294       poly_int64 rhs_offset, lhs_offset;
3295       vn_reference_op_s op;
3296       poly_uint64 mem_offset;
3297       poly_int64 at, byte_maxsize;
3298 
3299       /* Only handle non-variable, addressable refs.  */
3300       if (maybe_ne (ref->size, maxsize)
3301 	  || !multiple_p (offset, BITS_PER_UNIT, &at)
3302 	  || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
3303 	return (void *)-1;
3304 
3305       /* Extract a pointer base and an offset for the destination.  */
3306       lhs = gimple_call_arg (def_stmt, 0);
3307       lhs_offset = 0;
3308       if (TREE_CODE (lhs) == SSA_NAME)
3309 	{
3310 	  lhs = vn_valueize (lhs);
3311 	  if (TREE_CODE (lhs) == SSA_NAME)
3312 	    {
3313 	      gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
3314 	      if (gimple_assign_single_p (def_stmt)
3315 		  && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
3316 		lhs = gimple_assign_rhs1 (def_stmt);
3317 	    }
3318 	}
3319       if (TREE_CODE (lhs) == ADDR_EXPR)
3320 	{
3321 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
3322 						    &lhs_offset);
3323 	  if (!tem)
3324 	    return (void *)-1;
3325 	  if (TREE_CODE (tem) == MEM_REF
3326 	      && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
3327 	    {
3328 	      lhs = TREE_OPERAND (tem, 0);
3329 	      if (TREE_CODE (lhs) == SSA_NAME)
3330 		lhs = vn_valueize (lhs);
3331 	      lhs_offset += mem_offset;
3332 	    }
3333 	  else if (DECL_P (tem))
3334 	    lhs = build_fold_addr_expr (tem);
3335 	  else
3336 	    return (void *)-1;
3337 	}
3338       if (TREE_CODE (lhs) != SSA_NAME
3339 	  && TREE_CODE (lhs) != ADDR_EXPR)
3340 	return (void *)-1;
3341 
3342       /* Extract a pointer base and an offset for the source.  */
3343       rhs = gimple_call_arg (def_stmt, 1);
3344       rhs_offset = 0;
3345       if (TREE_CODE (rhs) == SSA_NAME)
3346 	rhs = vn_valueize (rhs);
3347       if (TREE_CODE (rhs) == ADDR_EXPR)
3348 	{
3349 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
3350 						    &rhs_offset);
3351 	  if (!tem)
3352 	    return (void *)-1;
3353 	  if (TREE_CODE (tem) == MEM_REF
3354 	      && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
3355 	    {
3356 	      rhs = TREE_OPERAND (tem, 0);
3357 	      rhs_offset += mem_offset;
3358 	    }
3359 	  else if (DECL_P (tem)
3360 		   || TREE_CODE (tem) == STRING_CST)
3361 	    rhs = build_fold_addr_expr (tem);
3362 	  else
3363 	    return (void *)-1;
3364 	}
3365       if (TREE_CODE (rhs) == SSA_NAME)
3366 	rhs = SSA_VAL (rhs);
3367       else if (TREE_CODE (rhs) != ADDR_EXPR)
3368 	return (void *)-1;
3369 
3370       /* The bases of the destination and the references have to agree.  */
3371       if (TREE_CODE (base) == MEM_REF)
3372 	{
3373 	  if (TREE_OPERAND (base, 0) != lhs
3374 	      || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
3375 	    return (void *) -1;
3376 	  at += mem_offset;
3377 	}
3378       else if (!DECL_P (base)
3379 	       || TREE_CODE (lhs) != ADDR_EXPR
3380 	       || TREE_OPERAND (lhs, 0) != base)
3381 	return (void *)-1;
3382 
3383       /* If the access is completely outside of the memcpy destination
3384 	 area there is no aliasing.  */
3385       if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
3386 	return NULL;
3387       /* And the access has to be contained within the memcpy destination.  */
3388       if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
3389 	return (void *)-1;
3390 
3391       /* Save the operands since we need to use the original ones for
3392 	 the hash entry we use.  */
3393       if (!data->saved_operands.exists ())
3394 	data->saved_operands = vr->operands.copy ();
3395 
3396       /* Make room for 2 operands in the new reference.  */
3397       if (vr->operands.length () < 2)
3398 	{
3399 	  vec<vn_reference_op_s> old = vr->operands;
3400 	  vr->operands.safe_grow_cleared (2);
3401 	  if (old == shared_lookup_references)
3402 	    shared_lookup_references = vr->operands;
3403 	}
3404       else
3405 	vr->operands.truncate (2);
3406 
3407       /* The looked-through reference is a simple MEM_REF.  */
3408       memset (&op, 0, sizeof (op));
3409       op.type = vr->type;
3410       op.opcode = MEM_REF;
3411       op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
3412       op.off = at - lhs_offset + rhs_offset;
3413       vr->operands[0] = op;
3414       op.type = TREE_TYPE (rhs);
3415       op.opcode = TREE_CODE (rhs);
3416       op.op0 = rhs;
3417       op.off = -1;
3418       vr->operands[1] = op;
3419       vr->hashcode = vn_reference_compute_hash (vr);
3420 
3421       /* Try folding the new reference to a constant.  */
3422       tree val = fully_constant_vn_reference_p (vr);
3423       if (val)
3424 	return data->finish (0, 0, val);
3425 
3426       /* Adjust *ref from the new operands.  */
3427       if (!ao_ref_init_from_vn_reference (&r, 0, 0, vr->type, vr->operands))
3428 	return (void *)-1;
3429       /* This can happen with bitfields.  */
3430       if (maybe_ne (ref->size, r.size))
3431 	return (void *)-1;
3432       *ref = r;
3433 
3434       /* Do not update last seen VUSE after translating.  */
3435       data->last_vuse_ptr = NULL;
3436       /* Invalidate the original access path since it now contains
3437          the wrong base.  */
3438       data->orig_ref.ref = NULL_TREE;
3439       /* Use the alias-set of this stmt for recording an eventual result.  */
3440       if (data->first_set == -2)
3441 	{
3442 	  data->first_set = 0;
3443 	  data->first_base_set = 0;
3444 	}
3445 
3446       /* Keep looking for the adjusted *REF / VR pair.  */
3447       return NULL;
3448     }
3449 
3450   /* Bail out and stop walking.  */
3451   return (void *)-1;
3452 }
3453 
3454 /* Return a reference op vector from OP that can be used for
3455    vn_reference_lookup_pieces.  The caller is responsible for releasing
3456    the vector.  */
3457 
3458 vec<vn_reference_op_s>
3459 vn_reference_operands_for_lookup (tree op)
3460 {
3461   bool valueized;
3462   return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
3463 }
3464 
3465 /* Lookup a reference operation by it's parts, in the current hash table.
3466    Returns the resulting value number if it exists in the hash table,
3467    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
3468    vn_reference_t stored in the hashtable if something is found.  */
3469 
3470 tree
3471 vn_reference_lookup_pieces (tree vuse, alias_set_type set,
3472 			    alias_set_type base_set, tree type,
3473 			    vec<vn_reference_op_s> operands,
3474 			    vn_reference_t *vnresult, vn_lookup_kind kind)
3475 {
3476   struct vn_reference_s vr1;
3477   vn_reference_t tmp;
3478   tree cst;
3479 
3480   if (!vnresult)
3481     vnresult = &tmp;
3482   *vnresult = NULL;
3483 
3484   vr1.vuse = vuse_ssa_val (vuse);
3485   shared_lookup_references.truncate (0);
3486   shared_lookup_references.safe_grow (operands.length ());
3487   memcpy (shared_lookup_references.address (),
3488 	  operands.address (),
3489 	  sizeof (vn_reference_op_s)
3490 	  * operands.length ());
3491   bool valueized_p;
3492   valueize_refs_1 (&shared_lookup_references, &valueized_p);
3493   vr1.operands = shared_lookup_references;
3494   vr1.type = type;
3495   vr1.set = set;
3496   vr1.base_set = base_set;
3497   vr1.hashcode = vn_reference_compute_hash (&vr1);
3498   if ((cst = fully_constant_vn_reference_p (&vr1)))
3499     return cst;
3500 
3501   vn_reference_lookup_1 (&vr1, vnresult);
3502   if (!*vnresult
3503       && kind != VN_NOWALK
3504       && vr1.vuse)
3505     {
3506       ao_ref r;
3507       unsigned limit = param_sccvn_max_alias_queries_per_access;
3508       vn_walk_cb_data data (&vr1, NULL_TREE, NULL, kind, true, NULL_TREE);
3509       vec<vn_reference_op_s> ops_for_ref;
3510       if (!valueized_p)
3511 	ops_for_ref = vr1.operands;
3512       else
3513 	{
3514 	  /* For ao_ref_from_mem we have to ensure only available SSA names
3515 	     end up in base and the only convenient way to make this work
3516 	     for PRE is to re-valueize with that in mind.  */
3517 	  ops_for_ref.create (operands.length ());
3518 	  ops_for_ref.quick_grow (operands.length ());
3519 	  memcpy (ops_for_ref.address (),
3520 		  operands.address (),
3521 		  sizeof (vn_reference_op_s)
3522 		  * operands.length ());
3523 	  valueize_refs_1 (&ops_for_ref, &valueized_p, true);
3524 	}
3525       if (ao_ref_init_from_vn_reference (&r, set, base_set, type,
3526 					 ops_for_ref))
3527 	*vnresult
3528 	  = ((vn_reference_t)
3529 	     walk_non_aliased_vuses (&r, vr1.vuse, true, vn_reference_lookup_2,
3530 				     vn_reference_lookup_3, vuse_valueize,
3531 				     limit, &data));
3532       if (ops_for_ref != shared_lookup_references)
3533 	ops_for_ref.release ();
3534       gcc_checking_assert (vr1.operands == shared_lookup_references);
3535     }
3536 
3537   if (*vnresult)
3538      return (*vnresult)->result;
3539 
3540   return NULL_TREE;
3541 }
3542 
3543 /* Lookup OP in the current hash table, and return the resulting value
3544    number if it exists in the hash table.  Return NULL_TREE if it does
3545    not exist in the hash table or if the result field of the structure
3546    was NULL..  VNRESULT will be filled in with the vn_reference_t
3547    stored in the hashtable if one exists.  When TBAA_P is false assume
3548    we are looking up a store and treat it as having alias-set zero.
3549    *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded.
3550    MASK is either NULL_TREE, or can be an INTEGER_CST if the result of the
3551    load is bitwise anded with MASK and so we are only interested in a subset
3552    of the bits and can ignore if the other bits are uninitialized or
3553    not initialized with constants.  */
3554 
3555 tree
3556 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
3557 		     vn_reference_t *vnresult, bool tbaa_p,
3558 		     tree *last_vuse_ptr, tree mask)
3559 {
3560   vec<vn_reference_op_s> operands;
3561   struct vn_reference_s vr1;
3562   bool valueized_anything;
3563 
3564   if (vnresult)
3565     *vnresult = NULL;
3566 
3567   vr1.vuse = vuse_ssa_val (vuse);
3568   vr1.operands = operands
3569     = valueize_shared_reference_ops_from_ref (op, &valueized_anything);
3570   vr1.type = TREE_TYPE (op);
3571   ao_ref op_ref;
3572   ao_ref_init (&op_ref, op);
3573   vr1.set = ao_ref_alias_set (&op_ref);
3574   vr1.base_set = ao_ref_base_alias_set (&op_ref);
3575   vr1.hashcode = vn_reference_compute_hash (&vr1);
3576   if (mask == NULL_TREE)
3577     if (tree cst = fully_constant_vn_reference_p (&vr1))
3578       return cst;
3579 
3580   if (kind != VN_NOWALK && vr1.vuse)
3581     {
3582       vn_reference_t wvnresult;
3583       ao_ref r;
3584       unsigned limit = param_sccvn_max_alias_queries_per_access;
3585       auto_vec<vn_reference_op_s> ops_for_ref;
3586       if (valueized_anything)
3587 	{
3588 	  copy_reference_ops_from_ref (op, &ops_for_ref);
3589 	  bool tem;
3590 	  valueize_refs_1 (&ops_for_ref, &tem, true);
3591 	}
3592       /* Make sure to use a valueized reference if we valueized anything.
3593          Otherwise preserve the full reference for advanced TBAA.  */
3594       if (!valueized_anything
3595 	  || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.base_set,
3596 					     vr1.type, ops_for_ref))
3597 	ao_ref_init (&r, op);
3598       vn_walk_cb_data data (&vr1, r.ref ? NULL_TREE : op,
3599 			    last_vuse_ptr, kind, tbaa_p, mask);
3600 
3601       wvnresult
3602 	= ((vn_reference_t)
3603 	   walk_non_aliased_vuses (&r, vr1.vuse, tbaa_p, vn_reference_lookup_2,
3604 				   vn_reference_lookup_3, vuse_valueize, limit,
3605 				   &data));
3606       gcc_checking_assert (vr1.operands == shared_lookup_references);
3607       if (wvnresult)
3608 	{
3609 	  gcc_assert (mask == NULL_TREE);
3610 	  if (vnresult)
3611 	    *vnresult = wvnresult;
3612 	  return wvnresult->result;
3613 	}
3614       else if (mask)
3615 	return data.masked_result;
3616 
3617       return NULL_TREE;
3618     }
3619 
3620   if (last_vuse_ptr)
3621     *last_vuse_ptr = vr1.vuse;
3622   if (mask)
3623     return NULL_TREE;
3624   return vn_reference_lookup_1 (&vr1, vnresult);
3625 }
3626 
3627 /* Lookup CALL in the current hash table and return the entry in
3628    *VNRESULT if found.  Populates *VR for the hashtable lookup.  */
3629 
3630 void
3631 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
3632 			  vn_reference_t vr)
3633 {
3634   if (vnresult)
3635     *vnresult = NULL;
3636 
3637   tree vuse = gimple_vuse (call);
3638 
3639   vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
3640   vr->operands = valueize_shared_reference_ops_from_call (call);
3641   vr->type = gimple_expr_type (call);
3642   vr->punned = false;
3643   vr->set = 0;
3644   vr->base_set = 0;
3645   vr->hashcode = vn_reference_compute_hash (vr);
3646   vn_reference_lookup_1 (vr, vnresult);
3647 }
3648 
3649 /* Insert OP into the current hash table with a value number of RESULT.  */
3650 
3651 static void
3652 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
3653 {
3654   vn_reference_s **slot;
3655   vn_reference_t vr1;
3656   bool tem;
3657 
3658   vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3659   if (TREE_CODE (result) == SSA_NAME)
3660     vr1->value_id = VN_INFO (result)->value_id;
3661   else
3662     vr1->value_id = get_or_alloc_constant_value_id (result);
3663   vr1->vuse = vuse_ssa_val (vuse);
3664   vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
3665   vr1->type = TREE_TYPE (op);
3666   vr1->punned = false;
3667   ao_ref op_ref;
3668   ao_ref_init (&op_ref, op);
3669   vr1->set = ao_ref_alias_set (&op_ref);
3670   vr1->base_set = ao_ref_base_alias_set (&op_ref);
3671   vr1->hashcode = vn_reference_compute_hash (vr1);
3672   vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
3673   vr1->result_vdef = vdef;
3674 
3675   slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3676 						      INSERT);
3677 
3678   /* Because IL walking on reference lookup can end up visiting
3679      a def that is only to be visited later in iteration order
3680      when we are about to make an irreducible region reducible
3681      the def can be effectively processed and its ref being inserted
3682      by vn_reference_lookup_3 already.  So we cannot assert (!*slot)
3683      but save a lookup if we deal with already inserted refs here.  */
3684   if (*slot)
3685     {
3686       /* We cannot assert that we have the same value either because
3687          when disentangling an irreducible region we may end up visiting
3688 	 a use before the corresponding def.  That's a missed optimization
3689 	 only though.  See gcc.dg/tree-ssa/pr87126.c for example.  */
3690       if (dump_file && (dump_flags & TDF_DETAILS)
3691 	  && !operand_equal_p ((*slot)->result, vr1->result, 0))
3692 	{
3693 	  fprintf (dump_file, "Keeping old value ");
3694 	  print_generic_expr (dump_file, (*slot)->result);
3695 	  fprintf (dump_file, " because of collision\n");
3696 	}
3697       free_reference (vr1);
3698       obstack_free (&vn_tables_obstack, vr1);
3699       return;
3700     }
3701 
3702   *slot = vr1;
3703   vr1->next = last_inserted_ref;
3704   last_inserted_ref = vr1;
3705 }
3706 
3707 /* Insert a reference by it's pieces into the current hash table with
3708    a value number of RESULT.  Return the resulting reference
3709    structure we created.  */
3710 
3711 vn_reference_t
3712 vn_reference_insert_pieces (tree vuse, alias_set_type set,
3713 			    alias_set_type base_set, tree type,
3714 			    vec<vn_reference_op_s> operands,
3715 			    tree result, unsigned int value_id)
3716 
3717 {
3718   vn_reference_s **slot;
3719   vn_reference_t vr1;
3720 
3721   vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3722   vr1->value_id = value_id;
3723   vr1->vuse = vuse_ssa_val (vuse);
3724   vr1->operands = operands;
3725   valueize_refs (&vr1->operands);
3726   vr1->type = type;
3727   vr1->punned = false;
3728   vr1->set = set;
3729   vr1->base_set = base_set;
3730   vr1->hashcode = vn_reference_compute_hash (vr1);
3731   if (result && TREE_CODE (result) == SSA_NAME)
3732     result = SSA_VAL (result);
3733   vr1->result = result;
3734 
3735   slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3736 						      INSERT);
3737 
3738   /* At this point we should have all the things inserted that we have
3739      seen before, and we should never try inserting something that
3740      already exists.  */
3741   gcc_assert (!*slot);
3742 
3743   *slot = vr1;
3744   vr1->next = last_inserted_ref;
3745   last_inserted_ref = vr1;
3746   return vr1;
3747 }
3748 
3749 /* Compute and return the hash value for nary operation VBO1.  */
3750 
3751 static hashval_t
3752 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
3753 {
3754   inchash::hash hstate;
3755   unsigned i;
3756 
3757   for (i = 0; i < vno1->length; ++i)
3758     if (TREE_CODE (vno1->op[i]) == SSA_NAME)
3759       vno1->op[i] = SSA_VAL (vno1->op[i]);
3760 
3761   if (((vno1->length == 2
3762 	&& commutative_tree_code (vno1->opcode))
3763        || (vno1->length == 3
3764 	   && commutative_ternary_tree_code (vno1->opcode)))
3765       && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3766     std::swap (vno1->op[0], vno1->op[1]);
3767   else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
3768 	   && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3769     {
3770       std::swap (vno1->op[0], vno1->op[1]);
3771       vno1->opcode = swap_tree_comparison  (vno1->opcode);
3772     }
3773 
3774   hstate.add_int (vno1->opcode);
3775   for (i = 0; i < vno1->length; ++i)
3776     inchash::add_expr (vno1->op[i], hstate);
3777 
3778   return hstate.end ();
3779 }
3780 
3781 /* Compare nary operations VNO1 and VNO2 and return true if they are
3782    equivalent.  */
3783 
3784 bool
3785 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
3786 {
3787   unsigned i;
3788 
3789   if (vno1->hashcode != vno2->hashcode)
3790     return false;
3791 
3792   if (vno1->length != vno2->length)
3793     return false;
3794 
3795   if (vno1->opcode != vno2->opcode
3796       || !types_compatible_p (vno1->type, vno2->type))
3797     return false;
3798 
3799   for (i = 0; i < vno1->length; ++i)
3800     if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
3801       return false;
3802 
3803   /* BIT_INSERT_EXPR has an implict operand as the type precision
3804      of op1.  Need to check to make sure they are the same.  */
3805   if (vno1->opcode == BIT_INSERT_EXPR
3806       && TREE_CODE (vno1->op[1]) == INTEGER_CST
3807       && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
3808 	 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
3809     return false;
3810 
3811   return true;
3812 }
3813 
3814 /* Initialize VNO from the pieces provided.  */
3815 
3816 static void
3817 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
3818 			     enum tree_code code, tree type, tree *ops)
3819 {
3820   vno->opcode = code;
3821   vno->length = length;
3822   vno->type = type;
3823   memcpy (&vno->op[0], ops, sizeof (tree) * length);
3824 }
3825 
3826 /* Return the number of operands for a vn_nary ops structure from STMT.  */
3827 
3828 static unsigned int
3829 vn_nary_length_from_stmt (gimple *stmt)
3830 {
3831   switch (gimple_assign_rhs_code (stmt))
3832     {
3833     case REALPART_EXPR:
3834     case IMAGPART_EXPR:
3835     case VIEW_CONVERT_EXPR:
3836       return 1;
3837 
3838     case BIT_FIELD_REF:
3839       return 3;
3840 
3841     case CONSTRUCTOR:
3842       return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3843 
3844     default:
3845       return gimple_num_ops (stmt) - 1;
3846     }
3847 }
3848 
3849 /* Initialize VNO from STMT.  */
3850 
3851 static void
3852 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
3853 {
3854   unsigned i;
3855 
3856   vno->opcode = gimple_assign_rhs_code (stmt);
3857   vno->type = gimple_expr_type (stmt);
3858   switch (vno->opcode)
3859     {
3860     case REALPART_EXPR:
3861     case IMAGPART_EXPR:
3862     case VIEW_CONVERT_EXPR:
3863       vno->length = 1;
3864       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3865       break;
3866 
3867     case BIT_FIELD_REF:
3868       vno->length = 3;
3869       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3870       vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
3871       vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
3872       break;
3873 
3874     case CONSTRUCTOR:
3875       vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3876       for (i = 0; i < vno->length; ++i)
3877 	vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
3878       break;
3879 
3880     default:
3881       gcc_checking_assert (!gimple_assign_single_p (stmt));
3882       vno->length = gimple_num_ops (stmt) - 1;
3883       for (i = 0; i < vno->length; ++i)
3884 	vno->op[i] = gimple_op (stmt, i + 1);
3885     }
3886 }
3887 
3888 /* Compute the hashcode for VNO and look for it in the hash table;
3889    return the resulting value number if it exists in the hash table.
3890    Return NULL_TREE if it does not exist in the hash table or if the
3891    result field of the operation is NULL.  VNRESULT will contain the
3892    vn_nary_op_t from the hashtable if it exists.  */
3893 
3894 static tree
3895 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
3896 {
3897   vn_nary_op_s **slot;
3898 
3899   if (vnresult)
3900     *vnresult = NULL;
3901 
3902   vno->hashcode = vn_nary_op_compute_hash (vno);
3903   slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
3904   if (!slot)
3905     return NULL_TREE;
3906   if (vnresult)
3907     *vnresult = *slot;
3908   return (*slot)->predicated_values ? NULL_TREE : (*slot)->u.result;
3909 }
3910 
3911 /* Lookup a n-ary operation by its pieces and return the resulting value
3912    number if it exists in the hash table.  Return NULL_TREE if it does
3913    not exist in the hash table or if the result field of the operation
3914    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
3915    if it exists.  */
3916 
3917 tree
3918 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
3919 			  tree type, tree *ops, vn_nary_op_t *vnresult)
3920 {
3921   vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
3922 				  sizeof_vn_nary_op (length));
3923   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3924   return vn_nary_op_lookup_1 (vno1, vnresult);
3925 }
3926 
3927 /* Lookup the rhs of STMT in the current hash table, and return the resulting
3928    value number if it exists in the hash table.  Return NULL_TREE if
3929    it does not exist in the hash table.  VNRESULT will contain the
3930    vn_nary_op_t from the hashtable if it exists.  */
3931 
3932 tree
3933 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
3934 {
3935   vn_nary_op_t vno1
3936     = XALLOCAVAR (struct vn_nary_op_s,
3937 		  sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
3938   init_vn_nary_op_from_stmt (vno1, stmt);
3939   return vn_nary_op_lookup_1 (vno1, vnresult);
3940 }
3941 
3942 /* Allocate a vn_nary_op_t with LENGTH operands on STACK.  */
3943 
3944 static vn_nary_op_t
3945 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
3946 {
3947   return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
3948 }
3949 
3950 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
3951    obstack.  */
3952 
3953 static vn_nary_op_t
3954 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
3955 {
3956   vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
3957 
3958   vno1->value_id = value_id;
3959   vno1->length = length;
3960   vno1->predicated_values = 0;
3961   vno1->u.result = result;
3962 
3963   return vno1;
3964 }
3965 
3966 /* Insert VNO into TABLE.  If COMPUTE_HASH is true, then compute
3967    VNO->HASHCODE first.  */
3968 
3969 static vn_nary_op_t
3970 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
3971 			bool compute_hash)
3972 {
3973   vn_nary_op_s **slot;
3974 
3975   if (compute_hash)
3976     {
3977       vno->hashcode = vn_nary_op_compute_hash (vno);
3978       gcc_assert (! vno->predicated_values
3979 		  || (! vno->u.values->next
3980 		      && vno->u.values->n == 1));
3981     }
3982 
3983   slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
3984   vno->unwind_to = *slot;
3985   if (*slot)
3986     {
3987       /* Prefer non-predicated values.
3988          ???  Only if those are constant, otherwise, with constant predicated
3989 	 value, turn them into predicated values with entry-block validity
3990 	 (???  but we always find the first valid result currently).  */
3991       if ((*slot)->predicated_values
3992 	  && ! vno->predicated_values)
3993 	{
3994 	  /* ???  We cannot remove *slot from the unwind stack list.
3995 	     For the moment we deal with this by skipping not found
3996 	     entries but this isn't ideal ...  */
3997 	  *slot = vno;
3998 	  /* ???  Maintain a stack of states we can unwind in
3999 	     vn_nary_op_s?  But how far do we unwind?  In reality
4000 	     we need to push change records somewhere...  Or not
4001 	     unwind vn_nary_op_s and linking them but instead
4002 	     unwind the results "list", linking that, which also
4003 	     doesn't move on hashtable resize.  */
4004 	  /* We can also have a ->unwind_to recording *slot there.
4005 	     That way we can make u.values a fixed size array with
4006 	     recording the number of entries but of course we then
4007 	     have always N copies for each unwind_to-state.  Or we
4008              make sure to only ever append and each unwinding will
4009 	     pop off one entry (but how to deal with predicated
4010 	     replaced with non-predicated here?)  */
4011 	  vno->next = last_inserted_nary;
4012 	  last_inserted_nary = vno;
4013 	  return vno;
4014 	}
4015       else if (vno->predicated_values
4016 	       && ! (*slot)->predicated_values)
4017 	return *slot;
4018       else if (vno->predicated_values
4019 	       && (*slot)->predicated_values)
4020 	{
4021 	  /* ???  Factor this all into a insert_single_predicated_value
4022 	     routine.  */
4023 	  gcc_assert (!vno->u.values->next && vno->u.values->n == 1);
4024 	  basic_block vno_bb
4025 	    = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0]);
4026 	  vn_pval *nval = vno->u.values;
4027 	  vn_pval **next = &vno->u.values;
4028 	  bool found = false;
4029 	  for (vn_pval *val = (*slot)->u.values; val; val = val->next)
4030 	    {
4031 	      if (expressions_equal_p (val->result, vno->u.values->result))
4032 		{
4033 		  found = true;
4034 		  for (unsigned i = 0; i < val->n; ++i)
4035 		    {
4036 		      basic_block val_bb
4037 			= BASIC_BLOCK_FOR_FN (cfun,
4038 					      val->valid_dominated_by_p[i]);
4039 		      if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb))
4040 			/* Value registered with more generic predicate.  */
4041 			return *slot;
4042 		      else if (dominated_by_p (CDI_DOMINATORS, val_bb, vno_bb))
4043 			/* Shouldn't happen, we insert in RPO order.  */
4044 			gcc_unreachable ();
4045 		    }
4046 		  /* Append value.  */
4047 		  *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4048 						     sizeof (vn_pval)
4049 						     + val->n * sizeof (int));
4050 		  (*next)->next = NULL;
4051 		  (*next)->result = val->result;
4052 		  (*next)->n = val->n + 1;
4053 		  memcpy ((*next)->valid_dominated_by_p,
4054 			  val->valid_dominated_by_p,
4055 			  val->n * sizeof (int));
4056 		  (*next)->valid_dominated_by_p[val->n] = vno_bb->index;
4057 		  next = &(*next)->next;
4058 		  if (dump_file && (dump_flags & TDF_DETAILS))
4059 		    fprintf (dump_file, "Appending predicate to value.\n");
4060 		  continue;
4061 		}
4062 	      /* Copy other predicated values.  */
4063 	      *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4064 						 sizeof (vn_pval)
4065 						 + (val->n-1) * sizeof (int));
4066 	      memcpy (*next, val, sizeof (vn_pval) + (val->n-1) * sizeof (int));
4067 	      (*next)->next = NULL;
4068 	      next = &(*next)->next;
4069 	    }
4070 	  if (!found)
4071 	    *next = nval;
4072 
4073 	  *slot = vno;
4074 	  vno->next = last_inserted_nary;
4075 	  last_inserted_nary = vno;
4076 	  return vno;
4077 	}
4078 
4079       /* While we do not want to insert things twice it's awkward to
4080 	 avoid it in the case where visit_nary_op pattern-matches stuff
4081 	 and ends up simplifying the replacement to itself.  We then
4082 	 get two inserts, one from visit_nary_op and one from
4083 	 vn_nary_build_or_lookup.
4084 	 So allow inserts with the same value number.  */
4085       if ((*slot)->u.result == vno->u.result)
4086 	return *slot;
4087     }
4088 
4089   /* ???  There's also optimistic vs. previous commited state merging
4090      that is problematic for the case of unwinding.  */
4091 
4092   /* ???  We should return NULL if we do not use 'vno' and have the
4093      caller release it.  */
4094   gcc_assert (!*slot);
4095 
4096   *slot = vno;
4097   vno->next = last_inserted_nary;
4098   last_inserted_nary = vno;
4099   return vno;
4100 }
4101 
4102 /* Insert a n-ary operation into the current hash table using it's
4103    pieces.  Return the vn_nary_op_t structure we created and put in
4104    the hashtable.  */
4105 
4106 vn_nary_op_t
4107 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
4108 			  tree type, tree *ops,
4109 			  tree result, unsigned int value_id)
4110 {
4111   vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
4112   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4113   return vn_nary_op_insert_into (vno1, valid_info->nary, true);
4114 }
4115 
4116 static vn_nary_op_t
4117 vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code,
4118 				     tree type, tree *ops,
4119 				     tree result, unsigned int value_id,
4120 				     edge pred_e)
4121 {
4122   /* ???  Currently tracking BBs.  */
4123   if (! single_pred_p (pred_e->dest))
4124     {
4125       /* Never record for backedges.  */
4126       if (pred_e->flags & EDGE_DFS_BACK)
4127 	return NULL;
4128       edge_iterator ei;
4129       edge e;
4130       int cnt = 0;
4131       /* Ignore backedges.  */
4132       FOR_EACH_EDGE (e, ei, pred_e->dest->preds)
4133 	if (! dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4134 	  cnt++;
4135       if (cnt != 1)
4136 	return NULL;
4137     }
4138   if (dump_file && (dump_flags & TDF_DETAILS)
4139       /* ???  Fix dumping, but currently we only get comparisons.  */
4140       && TREE_CODE_CLASS (code) == tcc_comparison)
4141     {
4142       fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index,
4143 	       pred_e->dest->index);
4144       print_generic_expr (dump_file, ops[0], TDF_SLIM);
4145       fprintf (dump_file, " %s ", get_tree_code_name (code));
4146       print_generic_expr (dump_file, ops[1], TDF_SLIM);
4147       fprintf (dump_file, " == %s\n",
4148 	       integer_zerop (result) ? "false" : "true");
4149     }
4150   vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE, value_id);
4151   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4152   vno1->predicated_values = 1;
4153   vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4154 					      sizeof (vn_pval));
4155   vno1->u.values->next = NULL;
4156   vno1->u.values->result = result;
4157   vno1->u.values->n = 1;
4158   vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index;
4159   return vn_nary_op_insert_into (vno1, valid_info->nary, true);
4160 }
4161 
4162 static bool
4163 dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool);
4164 
4165 static tree
4166 vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb)
4167 {
4168   if (! vno->predicated_values)
4169     return vno->u.result;
4170   for (vn_pval *val = vno->u.values; val; val = val->next)
4171     for (unsigned i = 0; i < val->n; ++i)
4172       /* Do not handle backedge executability optimistically since
4173 	 when figuring out whether to iterate we do not consider
4174 	 changed predication.  */
4175       if (dominated_by_p_w_unex
4176 	    (bb, BASIC_BLOCK_FOR_FN (cfun, val->valid_dominated_by_p[i]),
4177 	     false))
4178 	return val->result;
4179   return NULL_TREE;
4180 }
4181 
4182 /* Insert the rhs of STMT into the current hash table with a value number of
4183    RESULT.  */
4184 
4185 static vn_nary_op_t
4186 vn_nary_op_insert_stmt (gimple *stmt, tree result)
4187 {
4188   vn_nary_op_t vno1
4189     = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
4190 			result, VN_INFO (result)->value_id);
4191   init_vn_nary_op_from_stmt (vno1, stmt);
4192   return vn_nary_op_insert_into (vno1, valid_info->nary, true);
4193 }
4194 
4195 /* Compute a hashcode for PHI operation VP1 and return it.  */
4196 
4197 static inline hashval_t
4198 vn_phi_compute_hash (vn_phi_t vp1)
4199 {
4200   inchash::hash hstate (EDGE_COUNT (vp1->block->preds) > 2
4201 			? vp1->block->index : EDGE_COUNT (vp1->block->preds));
4202   tree phi1op;
4203   tree type;
4204   edge e;
4205   edge_iterator ei;
4206 
4207   /* If all PHI arguments are constants we need to distinguish
4208      the PHI node via its type.  */
4209   type = vp1->type;
4210   hstate.merge_hash (vn_hash_type (type));
4211 
4212   FOR_EACH_EDGE (e, ei, vp1->block->preds)
4213     {
4214       /* Don't hash backedge values they need to be handled as VN_TOP
4215          for optimistic value-numbering.  */
4216       if (e->flags & EDGE_DFS_BACK)
4217 	continue;
4218 
4219       phi1op = vp1->phiargs[e->dest_idx];
4220       if (phi1op == VN_TOP)
4221 	continue;
4222       inchash::add_expr (phi1op, hstate);
4223     }
4224 
4225   return hstate.end ();
4226 }
4227 
4228 
4229 /* Return true if COND1 and COND2 represent the same condition, set
4230    *INVERTED_P if one needs to be inverted to make it the same as
4231    the other.  */
4232 
4233 static bool
4234 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
4235 		    gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
4236 {
4237   enum tree_code code1 = gimple_cond_code (cond1);
4238   enum tree_code code2 = gimple_cond_code (cond2);
4239 
4240   *inverted_p = false;
4241   if (code1 == code2)
4242     ;
4243   else if (code1 == swap_tree_comparison (code2))
4244     std::swap (lhs2, rhs2);
4245   else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
4246     *inverted_p = true;
4247   else if (code1 == invert_tree_comparison
4248 	   	      (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
4249     {
4250       std::swap (lhs2, rhs2);
4251       *inverted_p = true;
4252     }
4253   else
4254     return false;
4255 
4256   return ((expressions_equal_p (lhs1, lhs2)
4257 	   && expressions_equal_p (rhs1, rhs2))
4258 	  || (commutative_tree_code (code1)
4259 	      && expressions_equal_p (lhs1, rhs2)
4260 	      && expressions_equal_p (rhs1, lhs2)));
4261 }
4262 
4263 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
4264 
4265 static int
4266 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
4267 {
4268   if (vp1->hashcode != vp2->hashcode)
4269     return false;
4270 
4271   if (vp1->block != vp2->block)
4272     {
4273       if (EDGE_COUNT (vp1->block->preds) != EDGE_COUNT (vp2->block->preds))
4274 	return false;
4275 
4276       switch (EDGE_COUNT (vp1->block->preds))
4277 	{
4278 	case 1:
4279 	  /* Single-arg PHIs are just copies.  */
4280 	  break;
4281 
4282 	case 2:
4283 	  {
4284 	    /* Rule out backedges into the PHI.  */
4285 	    if (vp1->block->loop_father->header == vp1->block
4286 		|| vp2->block->loop_father->header == vp2->block)
4287 	      return false;
4288 
4289 	    /* If the PHI nodes do not have compatible types
4290 	       they are not the same.  */
4291 	    if (!types_compatible_p (vp1->type, vp2->type))
4292 	      return false;
4293 
4294 	    basic_block idom1
4295 	      = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4296 	    basic_block idom2
4297 	      = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
4298 	    /* If the immediate dominator end in switch stmts multiple
4299 	       values may end up in the same PHI arg via intermediate
4300 	       CFG merges.  */
4301 	    if (EDGE_COUNT (idom1->succs) != 2
4302 		|| EDGE_COUNT (idom2->succs) != 2)
4303 	      return false;
4304 
4305 	    /* Verify the controlling stmt is the same.  */
4306 	    gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
4307 	    gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
4308 	    if (! last1 || ! last2)
4309 	      return false;
4310 	    bool inverted_p;
4311 	    if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
4312 				      last2, vp2->cclhs, vp2->ccrhs,
4313 				      &inverted_p))
4314 	      return false;
4315 
4316 	    /* Get at true/false controlled edges into the PHI.  */
4317 	    edge te1, te2, fe1, fe2;
4318 	    if (! extract_true_false_controlled_edges (idom1, vp1->block,
4319 						       &te1, &fe1)
4320 		|| ! extract_true_false_controlled_edges (idom2, vp2->block,
4321 							  &te2, &fe2))
4322 	      return false;
4323 
4324 	    /* Swap edges if the second condition is the inverted of the
4325 	       first.  */
4326 	    if (inverted_p)
4327 	      std::swap (te2, fe2);
4328 
4329 	    /* ???  Handle VN_TOP specially.  */
4330 	    if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
4331 				       vp2->phiargs[te2->dest_idx])
4332 		|| ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
4333 					  vp2->phiargs[fe2->dest_idx]))
4334 	      return false;
4335 
4336 	    return true;
4337 	  }
4338 
4339 	default:
4340 	  return false;
4341 	}
4342     }
4343 
4344   /* If the PHI nodes do not have compatible types
4345      they are not the same.  */
4346   if (!types_compatible_p (vp1->type, vp2->type))
4347     return false;
4348 
4349   /* Any phi in the same block will have it's arguments in the
4350      same edge order, because of how we store phi nodes.  */
4351   for (unsigned i = 0; i < EDGE_COUNT (vp1->block->preds); ++i)
4352     {
4353       tree phi1op = vp1->phiargs[i];
4354       tree phi2op = vp2->phiargs[i];
4355       if (phi1op == VN_TOP || phi2op == VN_TOP)
4356 	continue;
4357       if (!expressions_equal_p (phi1op, phi2op))
4358 	return false;
4359     }
4360 
4361   return true;
4362 }
4363 
4364 /* Lookup PHI in the current hash table, and return the resulting
4365    value number if it exists in the hash table.  Return NULL_TREE if
4366    it does not exist in the hash table. */
4367 
4368 static tree
4369 vn_phi_lookup (gimple *phi, bool backedges_varying_p)
4370 {
4371   vn_phi_s **slot;
4372   struct vn_phi_s *vp1;
4373   edge e;
4374   edge_iterator ei;
4375 
4376   vp1 = XALLOCAVAR (struct vn_phi_s,
4377 		    sizeof (struct vn_phi_s)
4378 		    + (gimple_phi_num_args (phi) - 1) * sizeof (tree));
4379 
4380   /* Canonicalize the SSA_NAME's to their value number.  */
4381   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4382     {
4383       tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4384       if (TREE_CODE (def) == SSA_NAME
4385 	  && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
4386 	def = SSA_VAL (def);
4387       vp1->phiargs[e->dest_idx] = def;
4388     }
4389   vp1->type = TREE_TYPE (gimple_phi_result (phi));
4390   vp1->block = gimple_bb (phi);
4391   /* Extract values of the controlling condition.  */
4392   vp1->cclhs = NULL_TREE;
4393   vp1->ccrhs = NULL_TREE;
4394   basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4395   if (EDGE_COUNT (idom1->succs) == 2)
4396     if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
4397       {
4398 	/* ???  We want to use SSA_VAL here.  But possibly not
4399 	   allow VN_TOP.  */
4400 	vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
4401 	vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
4402       }
4403   vp1->hashcode = vn_phi_compute_hash (vp1);
4404   slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT);
4405   if (!slot)
4406     return NULL_TREE;
4407   return (*slot)->result;
4408 }
4409 
4410 /* Insert PHI into the current hash table with a value number of
4411    RESULT.  */
4412 
4413 static vn_phi_t
4414 vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p)
4415 {
4416   vn_phi_s **slot;
4417   vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,
4418 					   sizeof (vn_phi_s)
4419 					   + ((gimple_phi_num_args (phi) - 1)
4420 					      * sizeof (tree)));
4421   edge e;
4422   edge_iterator ei;
4423 
4424   /* Canonicalize the SSA_NAME's to their value number.  */
4425   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4426     {
4427       tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4428       if (TREE_CODE (def) == SSA_NAME
4429 	  && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
4430 	def = SSA_VAL (def);
4431       vp1->phiargs[e->dest_idx] = def;
4432     }
4433   vp1->value_id = VN_INFO (result)->value_id;
4434   vp1->type = TREE_TYPE (gimple_phi_result (phi));
4435   vp1->block = gimple_bb (phi);
4436   /* Extract values of the controlling condition.  */
4437   vp1->cclhs = NULL_TREE;
4438   vp1->ccrhs = NULL_TREE;
4439   basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4440   if (EDGE_COUNT (idom1->succs) == 2)
4441     if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
4442       {
4443 	/* ???  We want to use SSA_VAL here.  But possibly not
4444 	   allow VN_TOP.  */
4445 	vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
4446 	vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
4447       }
4448   vp1->result = result;
4449   vp1->hashcode = vn_phi_compute_hash (vp1);
4450 
4451   slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
4452   gcc_assert (!*slot);
4453 
4454   *slot = vp1;
4455   vp1->next = last_inserted_phi;
4456   last_inserted_phi = vp1;
4457   return vp1;
4458 }
4459 
4460 
4461 /* Return true if BB1 is dominated by BB2 taking into account edges
4462    that are not executable.  When ALLOW_BACK is false consider not
4463    executable backedges as executable.  */
4464 
4465 static bool
4466 dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool allow_back)
4467 {
4468   edge_iterator ei;
4469   edge e;
4470 
4471   if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4472     return true;
4473 
4474   /* Before iterating we'd like to know if there exists a
4475      (executable) path from bb2 to bb1 at all, if not we can
4476      directly return false.  For now simply iterate once.  */
4477 
4478   /* Iterate to the single executable bb1 predecessor.  */
4479   if (EDGE_COUNT (bb1->preds) > 1)
4480     {
4481       edge prede = NULL;
4482       FOR_EACH_EDGE (e, ei, bb1->preds)
4483 	if ((e->flags & EDGE_EXECUTABLE)
4484 	    || (!allow_back && (e->flags & EDGE_DFS_BACK)))
4485 	  {
4486 	    if (prede)
4487 	      {
4488 		prede = NULL;
4489 		break;
4490 	      }
4491 	    prede = e;
4492 	  }
4493       if (prede)
4494 	{
4495 	  bb1 = prede->src;
4496 
4497 	  /* Re-do the dominance check with changed bb1.  */
4498 	  if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4499 	    return true;
4500 	}
4501     }
4502 
4503   /* Iterate to the single executable bb2 successor.  */
4504   edge succe = NULL;
4505   FOR_EACH_EDGE (e, ei, bb2->succs)
4506     if ((e->flags & EDGE_EXECUTABLE)
4507 	|| (!allow_back && (e->flags & EDGE_DFS_BACK)))
4508       {
4509 	if (succe)
4510 	  {
4511 	    succe = NULL;
4512 	    break;
4513 	  }
4514 	succe = e;
4515       }
4516   if (succe)
4517     {
4518       /* Verify the reached block is only reached through succe.
4519 	 If there is only one edge we can spare us the dominator
4520 	 check and iterate directly.  */
4521       if (EDGE_COUNT (succe->dest->preds) > 1)
4522 	{
4523 	  FOR_EACH_EDGE (e, ei, succe->dest->preds)
4524 	    if (e != succe
4525 		&& ((e->flags & EDGE_EXECUTABLE)
4526 		    || (!allow_back && (e->flags & EDGE_DFS_BACK))))
4527 	      {
4528 		succe = NULL;
4529 		break;
4530 	      }
4531 	}
4532       if (succe)
4533 	{
4534 	  bb2 = succe->dest;
4535 
4536 	  /* Re-do the dominance check with changed bb2.  */
4537 	  if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4538 	    return true;
4539 	}
4540     }
4541 
4542   /* We could now iterate updating bb1 / bb2.  */
4543   return false;
4544 }
4545 
4546 /* Set the value number of FROM to TO, return true if it has changed
4547    as a result.  */
4548 
4549 static inline bool
4550 set_ssa_val_to (tree from, tree to)
4551 {
4552   vn_ssa_aux_t from_info = VN_INFO (from);
4553   tree currval = from_info->valnum; // SSA_VAL (from)
4554   poly_int64 toff, coff;
4555   bool curr_undefined = false;
4556   bool curr_invariant = false;
4557 
4558   /* The only thing we allow as value numbers are ssa_names
4559      and invariants.  So assert that here.  We don't allow VN_TOP
4560      as visiting a stmt should produce a value-number other than
4561      that.
4562      ???  Still VN_TOP can happen for unreachable code, so force
4563      it to varying in that case.  Not all code is prepared to
4564      get VN_TOP on valueization.  */
4565   if (to == VN_TOP)
4566     {
4567       /* ???  When iterating and visiting PHI <undef, backedge-value>
4568          for the first time we rightfully get VN_TOP and we need to
4569 	 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
4570 	 With SCCVN we were simply lucky we iterated the other PHI
4571 	 cycles first and thus visited the backedge-value DEF.  */
4572       if (currval == VN_TOP)
4573 	goto set_and_exit;
4574       if (dump_file && (dump_flags & TDF_DETAILS))
4575 	fprintf (dump_file, "Forcing value number to varying on "
4576 		 "receiving VN_TOP\n");
4577       to = from;
4578     }
4579 
4580   gcc_checking_assert (to != NULL_TREE
4581 		       && ((TREE_CODE (to) == SSA_NAME
4582 			    && (to == from || SSA_VAL (to) == to))
4583 			   || is_gimple_min_invariant (to)));
4584 
4585   if (from != to)
4586     {
4587       if (currval == from)
4588 	{
4589 	  if (dump_file && (dump_flags & TDF_DETAILS))
4590 	    {
4591 	      fprintf (dump_file, "Not changing value number of ");
4592 	      print_generic_expr (dump_file, from);
4593 	      fprintf (dump_file, " from VARYING to ");
4594 	      print_generic_expr (dump_file, to);
4595 	      fprintf (dump_file, "\n");
4596 	    }
4597 	  return false;
4598 	}
4599       curr_invariant = is_gimple_min_invariant (currval);
4600       curr_undefined = (TREE_CODE (currval) == SSA_NAME
4601 			&& ssa_undefined_value_p (currval, false));
4602       if (currval != VN_TOP
4603 	  && !curr_invariant
4604 	  && !curr_undefined
4605 	  && is_gimple_min_invariant (to))
4606 	{
4607 	  if (dump_file && (dump_flags & TDF_DETAILS))
4608 	    {
4609 	      fprintf (dump_file, "Forcing VARYING instead of changing "
4610 		       "value number of ");
4611 	      print_generic_expr (dump_file, from);
4612 	      fprintf (dump_file, " from ");
4613 	      print_generic_expr (dump_file, currval);
4614 	      fprintf (dump_file, " (non-constant) to ");
4615 	      print_generic_expr (dump_file, to);
4616 	      fprintf (dump_file, " (constant)\n");
4617 	    }
4618 	  to = from;
4619 	}
4620       else if (currval != VN_TOP
4621 	       && !curr_undefined
4622 	       && TREE_CODE (to) == SSA_NAME
4623 	       && ssa_undefined_value_p (to, false))
4624 	{
4625 	  if (dump_file && (dump_flags & TDF_DETAILS))
4626 	    {
4627 	      fprintf (dump_file, "Forcing VARYING instead of changing "
4628 		       "value number of ");
4629 	      print_generic_expr (dump_file, from);
4630 	      fprintf (dump_file, " from ");
4631 	      print_generic_expr (dump_file, currval);
4632 	      fprintf (dump_file, " (non-undefined) to ");
4633 	      print_generic_expr (dump_file, to);
4634 	      fprintf (dump_file, " (undefined)\n");
4635 	    }
4636 	  to = from;
4637 	}
4638       else if (TREE_CODE (to) == SSA_NAME
4639 	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
4640 	to = from;
4641     }
4642 
4643 set_and_exit:
4644   if (dump_file && (dump_flags & TDF_DETAILS))
4645     {
4646       fprintf (dump_file, "Setting value number of ");
4647       print_generic_expr (dump_file, from);
4648       fprintf (dump_file, " to ");
4649       print_generic_expr (dump_file, to);
4650     }
4651 
4652   if (currval != to
4653       && !operand_equal_p (currval, to, 0)
4654       /* Different undefined SSA names are not actually different.  See
4655          PR82320 for a testcase were we'd otherwise not terminate iteration.  */
4656       && !(curr_undefined
4657 	   && TREE_CODE (to) == SSA_NAME
4658 	   && ssa_undefined_value_p (to, false))
4659       /* ???  For addresses involving volatile objects or types operand_equal_p
4660          does not reliably detect ADDR_EXPRs as equal.  We know we are only
4661 	 getting invariant gimple addresses here, so can use
4662 	 get_addr_base_and_unit_offset to do this comparison.  */
4663       && !(TREE_CODE (currval) == ADDR_EXPR
4664 	   && TREE_CODE (to) == ADDR_EXPR
4665 	   && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
4666 	       == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
4667 	   && known_eq (coff, toff)))
4668     {
4669       if (to != from
4670 	  && currval != VN_TOP
4671 	  && !curr_undefined
4672 	  /* We do not want to allow lattice transitions from one value
4673 	     to another since that may lead to not terminating iteration
4674 	     (see PR95049).  Since there's no convenient way to check
4675 	     for the allowed transition of VAL -> PHI (loop entry value,
4676 	     same on two PHIs, to same PHI result) we restrict the check
4677 	     to invariants.  */
4678 	  && curr_invariant
4679 	  && is_gimple_min_invariant (to))
4680 	{
4681 	  if (dump_file && (dump_flags & TDF_DETAILS))
4682 	    fprintf (dump_file, " forced VARYING");
4683 	  to = from;
4684 	}
4685       if (dump_file && (dump_flags & TDF_DETAILS))
4686 	fprintf (dump_file, " (changed)\n");
4687       from_info->valnum = to;
4688       return true;
4689     }
4690   if (dump_file && (dump_flags & TDF_DETAILS))
4691     fprintf (dump_file, "\n");
4692   return false;
4693 }
4694 
4695 /* Set all definitions in STMT to value number to themselves.
4696    Return true if a value number changed. */
4697 
4698 static bool
4699 defs_to_varying (gimple *stmt)
4700 {
4701   bool changed = false;
4702   ssa_op_iter iter;
4703   def_operand_p defp;
4704 
4705   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
4706     {
4707       tree def = DEF_FROM_PTR (defp);
4708       changed |= set_ssa_val_to (def, def);
4709     }
4710   return changed;
4711 }
4712 
4713 /* Visit a copy between LHS and RHS, return true if the value number
4714    changed.  */
4715 
4716 static bool
4717 visit_copy (tree lhs, tree rhs)
4718 {
4719   /* Valueize.  */
4720   rhs = SSA_VAL (rhs);
4721 
4722   return set_ssa_val_to (lhs, rhs);
4723 }
4724 
4725 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
4726    is the same.  */
4727 
4728 static tree
4729 valueized_wider_op (tree wide_type, tree op, bool allow_truncate)
4730 {
4731   if (TREE_CODE (op) == SSA_NAME)
4732     op = vn_valueize (op);
4733 
4734   /* Either we have the op widened available.  */
4735   tree ops[3] = {};
4736   ops[0] = op;
4737   tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
4738 				       wide_type, ops, NULL);
4739   if (tem)
4740     return tem;
4741 
4742   /* Or the op is truncated from some existing value.  */
4743   if (allow_truncate && TREE_CODE (op) == SSA_NAME)
4744     {
4745       gimple *def = SSA_NAME_DEF_STMT (op);
4746       if (is_gimple_assign (def)
4747 	  && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
4748 	{
4749 	  tem = gimple_assign_rhs1 (def);
4750 	  if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
4751 	    {
4752 	      if (TREE_CODE (tem) == SSA_NAME)
4753 		tem = vn_valueize (tem);
4754 	      return tem;
4755 	    }
4756 	}
4757     }
4758 
4759   /* For constants simply extend it.  */
4760   if (TREE_CODE (op) == INTEGER_CST)
4761     return wide_int_to_tree (wide_type, wi::to_wide (op));
4762 
4763   return NULL_TREE;
4764 }
4765 
4766 /* Visit a nary operator RHS, value number it, and return true if the
4767    value number of LHS has changed as a result.  */
4768 
4769 static bool
4770 visit_nary_op (tree lhs, gassign *stmt)
4771 {
4772   vn_nary_op_t vnresult;
4773   tree result = vn_nary_op_lookup_stmt (stmt, &vnresult);
4774   if (! result && vnresult)
4775     result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt));
4776   if (result)
4777     return set_ssa_val_to (lhs, result);
4778 
4779   /* Do some special pattern matching for redundancies of operations
4780      in different types.  */
4781   enum tree_code code = gimple_assign_rhs_code (stmt);
4782   tree type = TREE_TYPE (lhs);
4783   tree rhs1 = gimple_assign_rhs1 (stmt);
4784   switch (code)
4785     {
4786     CASE_CONVERT:
4787       /* Match arithmetic done in a different type where we can easily
4788          substitute the result from some earlier sign-changed or widened
4789 	 operation.  */
4790       if (INTEGRAL_TYPE_P (type)
4791 	  && TREE_CODE (rhs1) == SSA_NAME
4792 	  /* We only handle sign-changes, zero-extension -> & mask or
4793 	     sign-extension if we know the inner operation doesn't
4794 	     overflow.  */
4795 	  && (((TYPE_UNSIGNED (TREE_TYPE (rhs1))
4796 		|| (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
4797 		    && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
4798 	       && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
4799 	      || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
4800 	{
4801 	  gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
4802 	  if (def
4803 	      && (gimple_assign_rhs_code (def) == PLUS_EXPR
4804 		  || gimple_assign_rhs_code (def) == MINUS_EXPR
4805 		  || gimple_assign_rhs_code (def) == MULT_EXPR))
4806 	    {
4807 	      tree ops[3] = {};
4808 	      /* When requiring a sign-extension we cannot model a
4809 		 previous truncation with a single op so don't bother.  */
4810 	      bool allow_truncate = TYPE_UNSIGNED (TREE_TYPE (rhs1));
4811 	      /* Either we have the op widened available.  */
4812 	      ops[0] = valueized_wider_op (type, gimple_assign_rhs1 (def),
4813 					   allow_truncate);
4814 	      if (ops[0])
4815 		ops[1] = valueized_wider_op (type, gimple_assign_rhs2 (def),
4816 					     allow_truncate);
4817 	      if (ops[0] && ops[1])
4818 		{
4819 		  ops[0] = vn_nary_op_lookup_pieces
4820 		      (2, gimple_assign_rhs_code (def), type, ops, NULL);
4821 		  /* We have wider operation available.  */
4822 		  if (ops[0]
4823 		      /* If the leader is a wrapping operation we can
4824 		         insert it for code hoisting w/o introducing
4825 			 undefined overflow.  If it is not it has to
4826 			 be available.  See PR86554.  */
4827 		      && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))
4828 			  || (rpo_avail && vn_context_bb
4829 			      && rpo_avail->eliminate_avail (vn_context_bb,
4830 							     ops[0]))))
4831 		    {
4832 		      unsigned lhs_prec = TYPE_PRECISION (type);
4833 		      unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
4834 		      if (lhs_prec == rhs_prec
4835 			  || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
4836 			      && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
4837 			{
4838 			  gimple_match_op match_op (gimple_match_cond::UNCOND,
4839 						    NOP_EXPR, type, ops[0]);
4840 			  result = vn_nary_build_or_lookup (&match_op);
4841 			  if (result)
4842 			    {
4843 			      bool changed = set_ssa_val_to (lhs, result);
4844 			      vn_nary_op_insert_stmt (stmt, result);
4845 			      return changed;
4846 			    }
4847 			}
4848 		      else
4849 			{
4850 			  tree mask = wide_int_to_tree
4851 			    (type, wi::mask (rhs_prec, false, lhs_prec));
4852 			  gimple_match_op match_op (gimple_match_cond::UNCOND,
4853 						    BIT_AND_EXPR,
4854 						    TREE_TYPE (lhs),
4855 						    ops[0], mask);
4856 			  result = vn_nary_build_or_lookup (&match_op);
4857 			  if (result)
4858 			    {
4859 			      bool changed = set_ssa_val_to (lhs, result);
4860 			      vn_nary_op_insert_stmt (stmt, result);
4861 			      return changed;
4862 			    }
4863 			}
4864 		    }
4865 		}
4866 	    }
4867 	}
4868       break;
4869     case BIT_AND_EXPR:
4870       if (INTEGRAL_TYPE_P (type)
4871 	  && TREE_CODE (rhs1) == SSA_NAME
4872 	  && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
4873 	  && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)
4874 	  && default_vn_walk_kind != VN_NOWALK
4875 	  && CHAR_BIT == 8
4876 	  && BITS_PER_UNIT == 8
4877 	  && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
4878 	  && !integer_all_onesp (gimple_assign_rhs2 (stmt))
4879 	  && !integer_zerop (gimple_assign_rhs2 (stmt)))
4880 	{
4881 	  gassign *ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
4882 	  if (ass
4883 	      && !gimple_has_volatile_ops (ass)
4884 	      && vn_get_stmt_kind (ass) == VN_REFERENCE)
4885 	    {
4886 	      tree last_vuse = gimple_vuse (ass);
4887 	      tree op = gimple_assign_rhs1 (ass);
4888 	      tree result = vn_reference_lookup (op, gimple_vuse (ass),
4889 						 default_vn_walk_kind,
4890 						 NULL, true, &last_vuse,
4891 						 gimple_assign_rhs2 (stmt));
4892 	      if (result
4893 		  && useless_type_conversion_p (TREE_TYPE (result),
4894 						TREE_TYPE (op)))
4895 		return set_ssa_val_to (lhs, result);
4896 	    }
4897 	}
4898       break;
4899     default:
4900       break;
4901     }
4902 
4903   bool changed = set_ssa_val_to (lhs, lhs);
4904   vn_nary_op_insert_stmt (stmt, lhs);
4905   return changed;
4906 }
4907 
4908 /* Visit a call STMT storing into LHS.  Return true if the value number
4909    of the LHS has changed as a result.  */
4910 
4911 static bool
4912 visit_reference_op_call (tree lhs, gcall *stmt)
4913 {
4914   bool changed = false;
4915   struct vn_reference_s vr1;
4916   vn_reference_t vnresult = NULL;
4917   tree vdef = gimple_vdef (stmt);
4918 
4919   /* Non-ssa lhs is handled in copy_reference_ops_from_call.  */
4920   if (lhs && TREE_CODE (lhs) != SSA_NAME)
4921     lhs = NULL_TREE;
4922 
4923   vn_reference_lookup_call (stmt, &vnresult, &vr1);
4924   if (vnresult)
4925     {
4926       if (vnresult->result_vdef && vdef)
4927 	changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
4928       else if (vdef)
4929 	/* If the call was discovered to be pure or const reflect
4930 	   that as far as possible.  */
4931 	changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
4932 
4933       if (!vnresult->result && lhs)
4934 	vnresult->result = lhs;
4935 
4936       if (vnresult->result && lhs)
4937 	changed |= set_ssa_val_to (lhs, vnresult->result);
4938     }
4939   else
4940     {
4941       vn_reference_t vr2;
4942       vn_reference_s **slot;
4943       tree vdef_val = vdef;
4944       if (vdef)
4945 	{
4946 	  /* If we value numbered an indirect functions function to
4947 	     one not clobbering memory value number its VDEF to its
4948 	     VUSE.  */
4949 	  tree fn = gimple_call_fn (stmt);
4950 	  if (fn && TREE_CODE (fn) == SSA_NAME)
4951 	    {
4952 	      fn = SSA_VAL (fn);
4953 	      if (TREE_CODE (fn) == ADDR_EXPR
4954 		  && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
4955 		  && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
4956 		      & (ECF_CONST | ECF_PURE)))
4957 		vdef_val = vuse_ssa_val (gimple_vuse (stmt));
4958 	    }
4959 	  changed |= set_ssa_val_to (vdef, vdef_val);
4960 	}
4961       if (lhs)
4962 	changed |= set_ssa_val_to (lhs, lhs);
4963       vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s);
4964       vr2->vuse = vr1.vuse;
4965       /* As we are not walking the virtual operand chain we know the
4966 	 shared_lookup_references are still original so we can re-use
4967 	 them here.  */
4968       vr2->operands = vr1.operands.copy ();
4969       vr2->type = vr1.type;
4970       vr2->punned = vr1.punned;
4971       vr2->set = vr1.set;
4972       vr2->base_set = vr1.base_set;
4973       vr2->hashcode = vr1.hashcode;
4974       vr2->result = lhs;
4975       vr2->result_vdef = vdef_val;
4976       vr2->value_id = 0;
4977       slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
4978 							  INSERT);
4979       gcc_assert (!*slot);
4980       *slot = vr2;
4981       vr2->next = last_inserted_ref;
4982       last_inserted_ref = vr2;
4983     }
4984 
4985   return changed;
4986 }
4987 
4988 /* Visit a load from a reference operator RHS, part of STMT, value number it,
4989    and return true if the value number of the LHS has changed as a result.  */
4990 
4991 static bool
4992 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
4993 {
4994   bool changed = false;
4995   tree last_vuse;
4996   tree result;
4997   vn_reference_t res;
4998 
4999   last_vuse = gimple_vuse (stmt);
5000   result = vn_reference_lookup (op, gimple_vuse (stmt),
5001 				default_vn_walk_kind, &res, true, &last_vuse);
5002 
5003   /* We handle type-punning through unions by value-numbering based
5004      on offset and size of the access.  Be prepared to handle a
5005      type-mismatch here via creating a VIEW_CONVERT_EXPR.  */
5006   if (result
5007       && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
5008     {
5009       /* Avoid the type punning in case the result mode has padding where
5010 	 the op we lookup has not.  */
5011       if (maybe_lt (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (result))),
5012 		    GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (op)))))
5013 	result = NULL_TREE;
5014       else
5015 	{
5016 	  /* We will be setting the value number of lhs to the value number
5017 	     of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
5018 	     So first simplify and lookup this expression to see if it
5019 	     is already available.  */
5020 	  gimple_match_op res_op (gimple_match_cond::UNCOND,
5021 				  VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
5022 	  result = vn_nary_build_or_lookup (&res_op);
5023 	  if (result
5024 	      && TREE_CODE (result) == SSA_NAME
5025 	      && VN_INFO (result)->needs_insertion)
5026 	    /* Track whether this is the canonical expression for different
5027 	       typed loads.  We use that as a stopgap measure for code
5028 	       hoisting when dealing with floating point loads.  */
5029 	    res->punned = true;
5030 	}
5031 
5032       /* When building the conversion fails avoid inserting the reference
5033          again.  */
5034       if (!result)
5035 	return set_ssa_val_to (lhs, lhs);
5036     }
5037 
5038   if (result)
5039     changed = set_ssa_val_to (lhs, result);
5040   else
5041     {
5042       changed = set_ssa_val_to (lhs, lhs);
5043       vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
5044     }
5045 
5046   return changed;
5047 }
5048 
5049 
5050 /* Visit a store to a reference operator LHS, part of STMT, value number it,
5051    and return true if the value number of the LHS has changed as a result.  */
5052 
5053 static bool
5054 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
5055 {
5056   bool changed = false;
5057   vn_reference_t vnresult = NULL;
5058   tree assign;
5059   bool resultsame = false;
5060   tree vuse = gimple_vuse (stmt);
5061   tree vdef = gimple_vdef (stmt);
5062 
5063   if (TREE_CODE (op) == SSA_NAME)
5064     op = SSA_VAL (op);
5065 
5066   /* First we want to lookup using the *vuses* from the store and see
5067      if there the last store to this location with the same address
5068      had the same value.
5069 
5070      The vuses represent the memory state before the store.  If the
5071      memory state, address, and value of the store is the same as the
5072      last store to this location, then this store will produce the
5073      same memory state as that store.
5074 
5075      In this case the vdef versions for this store are value numbered to those
5076      vuse versions, since they represent the same memory state after
5077      this store.
5078 
5079      Otherwise, the vdefs for the store are used when inserting into
5080      the table, since the store generates a new memory state.  */
5081 
5082   vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
5083   if (vnresult
5084       && vnresult->result)
5085     {
5086       tree result = vnresult->result;
5087       gcc_checking_assert (TREE_CODE (result) != SSA_NAME
5088 			   || result == SSA_VAL (result));
5089       resultsame = expressions_equal_p (result, op);
5090       if (resultsame)
5091 	{
5092 	  /* If the TBAA state isn't compatible for downstream reads
5093 	     we cannot value-number the VDEFs the same.  */
5094 	  ao_ref lhs_ref;
5095 	  ao_ref_init (&lhs_ref, lhs);
5096 	  alias_set_type set = ao_ref_alias_set (&lhs_ref);
5097 	  alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
5098 	  if ((vnresult->set != set
5099 	       && ! alias_set_subset_of (set, vnresult->set))
5100 	      || (vnresult->base_set != base_set
5101 		  && ! alias_set_subset_of (base_set, vnresult->base_set)))
5102 	    resultsame = false;
5103 	}
5104     }
5105 
5106   if (!resultsame)
5107     {
5108       /* Only perform the following when being called from PRE
5109 	 which embeds tail merging.  */
5110       if (default_vn_walk_kind == VN_WALK)
5111 	{
5112 	  assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
5113 	  vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
5114 	  if (vnresult)
5115 	    {
5116 	      VN_INFO (vdef)->visited = true;
5117 	      return set_ssa_val_to (vdef, vnresult->result_vdef);
5118 	    }
5119 	}
5120 
5121       if (dump_file && (dump_flags & TDF_DETAILS))
5122 	{
5123 	  fprintf (dump_file, "No store match\n");
5124 	  fprintf (dump_file, "Value numbering store ");
5125 	  print_generic_expr (dump_file, lhs);
5126 	  fprintf (dump_file, " to ");
5127 	  print_generic_expr (dump_file, op);
5128 	  fprintf (dump_file, "\n");
5129 	}
5130       /* Have to set value numbers before insert, since insert is
5131 	 going to valueize the references in-place.  */
5132       if (vdef)
5133 	changed |= set_ssa_val_to (vdef, vdef);
5134 
5135       /* Do not insert structure copies into the tables.  */
5136       if (is_gimple_min_invariant (op)
5137 	  || is_gimple_reg (op))
5138         vn_reference_insert (lhs, op, vdef, NULL);
5139 
5140       /* Only perform the following when being called from PRE
5141 	 which embeds tail merging.  */
5142       if (default_vn_walk_kind == VN_WALK)
5143 	{
5144 	  assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
5145 	  vn_reference_insert (assign, lhs, vuse, vdef);
5146 	}
5147     }
5148   else
5149     {
5150       /* We had a match, so value number the vdef to have the value
5151 	 number of the vuse it came from.  */
5152 
5153       if (dump_file && (dump_flags & TDF_DETAILS))
5154 	fprintf (dump_file, "Store matched earlier value, "
5155 		 "value numbering store vdefs to matching vuses.\n");
5156 
5157       changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
5158     }
5159 
5160   return changed;
5161 }
5162 
5163 /* Visit and value number PHI, return true if the value number
5164    changed.  When BACKEDGES_VARYING_P is true then assume all
5165    backedge values are varying.  When INSERTED is not NULL then
5166    this is just a ahead query for a possible iteration, set INSERTED
5167    to true if we'd insert into the hashtable.  */
5168 
5169 static bool
5170 visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p)
5171 {
5172   tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
5173   tree backedge_val = NULL_TREE;
5174   bool seen_non_backedge = false;
5175   tree sameval_base = NULL_TREE;
5176   poly_int64 soff, doff;
5177   unsigned n_executable = 0;
5178   edge_iterator ei;
5179   edge e;
5180 
5181   /* TODO: We could check for this in initialization, and replace this
5182      with a gcc_assert.  */
5183   if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
5184     return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
5185 
5186   /* We track whether a PHI was CSEd to to avoid excessive iterations
5187      that would be necessary only because the PHI changed arguments
5188      but not value.  */
5189   if (!inserted)
5190     gimple_set_plf (phi, GF_PLF_1, false);
5191 
5192   /* See if all non-TOP arguments have the same value.  TOP is
5193      equivalent to everything, so we can ignore it.  */
5194   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
5195     if (e->flags & EDGE_EXECUTABLE)
5196       {
5197 	tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5198 
5199 	if (def == PHI_RESULT (phi))
5200 	  continue;
5201 	++n_executable;
5202 	if (TREE_CODE (def) == SSA_NAME)
5203 	  {
5204 	    if (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))
5205 	      def = SSA_VAL (def);
5206 	    if (e->flags & EDGE_DFS_BACK)
5207 	      backedge_val = def;
5208 	  }
5209 	if (!(e->flags & EDGE_DFS_BACK))
5210 	  seen_non_backedge = true;
5211 	if (def == VN_TOP)
5212 	  ;
5213 	/* Ignore undefined defs for sameval but record one.  */
5214 	else if (TREE_CODE (def) == SSA_NAME
5215 		 && ! virtual_operand_p (def)
5216 		 && ssa_undefined_value_p (def, false))
5217 	  seen_undef = def;
5218 	else if (sameval == VN_TOP)
5219 	  sameval = def;
5220 	else if (!expressions_equal_p (def, sameval))
5221 	  {
5222 	    /* We know we're arriving only with invariant addresses here,
5223 	       try harder comparing them.  We can do some caching here
5224 	       which we cannot do in expressions_equal_p.  */
5225 	    if (TREE_CODE (def) == ADDR_EXPR
5226 		&& TREE_CODE (sameval) == ADDR_EXPR
5227 		&& sameval_base != (void *)-1)
5228 	      {
5229 		if (!sameval_base)
5230 		  sameval_base = get_addr_base_and_unit_offset
5231 				   (TREE_OPERAND (sameval, 0), &soff);
5232 		if (!sameval_base)
5233 		  sameval_base = (tree)(void *)-1;
5234 		else if ((get_addr_base_and_unit_offset
5235 			    (TREE_OPERAND (def, 0), &doff) == sameval_base)
5236 			 && known_eq (soff, doff))
5237 		  continue;
5238 	      }
5239 	    sameval = NULL_TREE;
5240 	    break;
5241 	  }
5242       }
5243 
5244   /* If the value we want to use is flowing over the backedge and we
5245      should take it as VARYING but it has a non-VARYING value drop to
5246      VARYING.
5247      If we value-number a virtual operand never value-number to the
5248      value from the backedge as that confuses the alias-walking code.
5249      See gcc.dg/torture/pr87176.c.  If the value is the same on a
5250      non-backedge everything is OK though.  */
5251   bool visited_p;
5252   if ((backedge_val
5253        && !seen_non_backedge
5254        && TREE_CODE (backedge_val) == SSA_NAME
5255        && sameval == backedge_val
5256        && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val)
5257 	   || SSA_VAL (backedge_val) != backedge_val))
5258       /* Do not value-number a virtual operand to sth not visited though
5259 	 given that allows us to escape a region in alias walking.  */
5260       || (sameval
5261 	  && TREE_CODE (sameval) == SSA_NAME
5262 	  && !SSA_NAME_IS_DEFAULT_DEF (sameval)
5263 	  && SSA_NAME_IS_VIRTUAL_OPERAND (sameval)
5264 	  && (SSA_VAL (sameval, &visited_p), !visited_p)))
5265     /* Note this just drops to VARYING without inserting the PHI into
5266        the hashes.  */
5267     result = PHI_RESULT (phi);
5268   /* If none of the edges was executable keep the value-number at VN_TOP,
5269      if only a single edge is exectuable use its value.  */
5270   else if (n_executable <= 1)
5271     result = seen_undef ? seen_undef : sameval;
5272   /* If we saw only undefined values and VN_TOP use one of the
5273      undefined values.  */
5274   else if (sameval == VN_TOP)
5275     result = seen_undef ? seen_undef : sameval;
5276   /* First see if it is equivalent to a phi node in this block.  We prefer
5277      this as it allows IV elimination - see PRs 66502 and 67167.  */
5278   else if ((result = vn_phi_lookup (phi, backedges_varying_p)))
5279     {
5280       if (!inserted
5281 	  && TREE_CODE (result) == SSA_NAME
5282 	  && gimple_code (SSA_NAME_DEF_STMT (result)) == GIMPLE_PHI)
5283 	{
5284 	  gimple_set_plf (SSA_NAME_DEF_STMT (result), GF_PLF_1, true);
5285 	  if (dump_file && (dump_flags & TDF_DETAILS))
5286 	    {
5287 	      fprintf (dump_file, "Marking CSEd to PHI node ");
5288 	      print_gimple_expr (dump_file, SSA_NAME_DEF_STMT (result),
5289 				 0, TDF_SLIM);
5290 	      fprintf (dump_file, "\n");
5291 	    }
5292 	}
5293     }
5294   /* If all values are the same use that, unless we've seen undefined
5295      values as well and the value isn't constant.
5296      CCP/copyprop have the same restriction to not remove uninit warnings.  */
5297   else if (sameval
5298 	   && (! seen_undef || is_gimple_min_invariant (sameval)))
5299     result = sameval;
5300   else
5301     {
5302       result = PHI_RESULT (phi);
5303       /* Only insert PHIs that are varying, for constant value numbers
5304          we mess up equivalences otherwise as we are only comparing
5305 	 the immediate controlling predicates.  */
5306       vn_phi_insert (phi, result, backedges_varying_p);
5307       if (inserted)
5308 	*inserted = true;
5309     }
5310 
5311   return set_ssa_val_to (PHI_RESULT (phi), result);
5312 }
5313 
5314 /* Try to simplify RHS using equivalences and constant folding.  */
5315 
5316 static tree
5317 try_to_simplify (gassign *stmt)
5318 {
5319   enum tree_code code = gimple_assign_rhs_code (stmt);
5320   tree tem;
5321 
5322   /* For stores we can end up simplifying a SSA_NAME rhs.  Just return
5323      in this case, there is no point in doing extra work.  */
5324   if (code == SSA_NAME)
5325     return NULL_TREE;
5326 
5327   /* First try constant folding based on our current lattice.  */
5328   mprts_hook = vn_lookup_simplify_result;
5329   tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
5330   mprts_hook = NULL;
5331   if (tem
5332       && (TREE_CODE (tem) == SSA_NAME
5333 	  || is_gimple_min_invariant (tem)))
5334     return tem;
5335 
5336   return NULL_TREE;
5337 }
5338 
5339 /* Visit and value number STMT, return true if the value number
5340    changed.  */
5341 
5342 static bool
5343 visit_stmt (gimple *stmt, bool backedges_varying_p = false)
5344 {
5345   bool changed = false;
5346 
5347   if (dump_file && (dump_flags & TDF_DETAILS))
5348     {
5349       fprintf (dump_file, "Value numbering stmt = ");
5350       print_gimple_stmt (dump_file, stmt, 0);
5351     }
5352 
5353   if (gimple_code (stmt) == GIMPLE_PHI)
5354     changed = visit_phi (stmt, NULL, backedges_varying_p);
5355   else if (gimple_has_volatile_ops (stmt))
5356     changed = defs_to_varying (stmt);
5357   else if (gassign *ass = dyn_cast <gassign *> (stmt))
5358     {
5359       enum tree_code code = gimple_assign_rhs_code (ass);
5360       tree lhs = gimple_assign_lhs (ass);
5361       tree rhs1 = gimple_assign_rhs1 (ass);
5362       tree simplified;
5363 
5364       /* Shortcut for copies. Simplifying copies is pointless,
5365 	 since we copy the expression and value they represent.  */
5366       if (code == SSA_NAME
5367 	  && TREE_CODE (lhs) == SSA_NAME)
5368 	{
5369 	  changed = visit_copy (lhs, rhs1);
5370 	  goto done;
5371 	}
5372       simplified = try_to_simplify (ass);
5373       if (simplified)
5374 	{
5375 	  if (dump_file && (dump_flags & TDF_DETAILS))
5376 	    {
5377 	      fprintf (dump_file, "RHS ");
5378 	      print_gimple_expr (dump_file, ass, 0);
5379 	      fprintf (dump_file, " simplified to ");
5380 	      print_generic_expr (dump_file, simplified);
5381 	      fprintf (dump_file, "\n");
5382 	    }
5383 	}
5384       /* Setting value numbers to constants will occasionally
5385 	 screw up phi congruence because constants are not
5386 	 uniquely associated with a single ssa name that can be
5387 	 looked up.  */
5388       if (simplified
5389 	  && is_gimple_min_invariant (simplified)
5390 	  && TREE_CODE (lhs) == SSA_NAME)
5391 	{
5392 	  changed = set_ssa_val_to (lhs, simplified);
5393 	  goto done;
5394 	}
5395       else if (simplified
5396 	       && TREE_CODE (simplified) == SSA_NAME
5397 	       && TREE_CODE (lhs) == SSA_NAME)
5398 	{
5399 	  changed = visit_copy (lhs, simplified);
5400 	  goto done;
5401 	}
5402 
5403       if ((TREE_CODE (lhs) == SSA_NAME
5404 	   /* We can substitute SSA_NAMEs that are live over
5405 	      abnormal edges with their constant value.  */
5406 	   && !(gimple_assign_copy_p (ass)
5407 		&& is_gimple_min_invariant (rhs1))
5408 	   && !(simplified
5409 		&& is_gimple_min_invariant (simplified))
5410 	   && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
5411 	  /* Stores or copies from SSA_NAMEs that are live over
5412 	     abnormal edges are a problem.  */
5413 	  || (code == SSA_NAME
5414 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
5415 	changed = defs_to_varying (ass);
5416       else if (REFERENCE_CLASS_P (lhs)
5417 	       || DECL_P (lhs))
5418 	changed = visit_reference_op_store (lhs, rhs1, ass);
5419       else if (TREE_CODE (lhs) == SSA_NAME)
5420 	{
5421 	  if ((gimple_assign_copy_p (ass)
5422 	       && is_gimple_min_invariant (rhs1))
5423 	      || (simplified
5424 		  && is_gimple_min_invariant (simplified)))
5425 	    {
5426 	      if (simplified)
5427 		changed = set_ssa_val_to (lhs, simplified);
5428 	      else
5429 		changed = set_ssa_val_to (lhs, rhs1);
5430 	    }
5431 	  else
5432 	    {
5433 	      /* Visit the original statement.  */
5434 	      switch (vn_get_stmt_kind (ass))
5435 		{
5436 		case VN_NARY:
5437 		  changed = visit_nary_op (lhs, ass);
5438 		  break;
5439 		case VN_REFERENCE:
5440 		  changed = visit_reference_op_load (lhs, rhs1, ass);
5441 		  break;
5442 		default:
5443 		  changed = defs_to_varying (ass);
5444 		  break;
5445 		}
5446 	    }
5447 	}
5448       else
5449 	changed = defs_to_varying (ass);
5450     }
5451   else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5452     {
5453       tree lhs = gimple_call_lhs (call_stmt);
5454       if (lhs && TREE_CODE (lhs) == SSA_NAME)
5455 	{
5456 	  /* Try constant folding based on our current lattice.  */
5457 	  tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
5458 							    vn_valueize);
5459 	  if (simplified)
5460 	    {
5461 	      if (dump_file && (dump_flags & TDF_DETAILS))
5462 		{
5463 		  fprintf (dump_file, "call ");
5464 		  print_gimple_expr (dump_file, call_stmt, 0);
5465 		  fprintf (dump_file, " simplified to ");
5466 		  print_generic_expr (dump_file, simplified);
5467 		  fprintf (dump_file, "\n");
5468 		}
5469 	    }
5470 	  /* Setting value numbers to constants will occasionally
5471 	     screw up phi congruence because constants are not
5472 	     uniquely associated with a single ssa name that can be
5473 	     looked up.  */
5474 	  if (simplified
5475 	      && is_gimple_min_invariant (simplified))
5476 	    {
5477 	      changed = set_ssa_val_to (lhs, simplified);
5478 	      if (gimple_vdef (call_stmt))
5479 		changed |= set_ssa_val_to (gimple_vdef (call_stmt),
5480 					   SSA_VAL (gimple_vuse (call_stmt)));
5481 	      goto done;
5482 	    }
5483 	  else if (simplified
5484 		   && TREE_CODE (simplified) == SSA_NAME)
5485 	    {
5486 	      changed = visit_copy (lhs, simplified);
5487 	      if (gimple_vdef (call_stmt))
5488 		changed |= set_ssa_val_to (gimple_vdef (call_stmt),
5489 					   SSA_VAL (gimple_vuse (call_stmt)));
5490 	      goto done;
5491 	    }
5492 	  else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
5493 	    {
5494 	      changed = defs_to_varying (call_stmt);
5495 	      goto done;
5496 	    }
5497 	}
5498 
5499       /* Pick up flags from a devirtualization target.  */
5500       tree fn = gimple_call_fn (stmt);
5501       int extra_fnflags = 0;
5502       if (fn && TREE_CODE (fn) == SSA_NAME)
5503 	{
5504 	  fn = SSA_VAL (fn);
5505 	  if (TREE_CODE (fn) == ADDR_EXPR
5506 	      && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
5507 	    extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
5508 	}
5509       if (!gimple_call_internal_p (call_stmt)
5510 	  && (/* Calls to the same function with the same vuse
5511 		 and the same operands do not necessarily return the same
5512 		 value, unless they're pure or const.  */
5513 	      ((gimple_call_flags (call_stmt) | extra_fnflags)
5514 	       & (ECF_PURE | ECF_CONST))
5515 	      /* If calls have a vdef, subsequent calls won't have
5516 		 the same incoming vuse.  So, if 2 calls with vdef have the
5517 		 same vuse, we know they're not subsequent.
5518 		 We can value number 2 calls to the same function with the
5519 		 same vuse and the same operands which are not subsequent
5520 		 the same, because there is no code in the program that can
5521 		 compare the 2 values...  */
5522 	      || (gimple_vdef (call_stmt)
5523 		  /* ... unless the call returns a pointer which does
5524 		     not alias with anything else.  In which case the
5525 		     information that the values are distinct are encoded
5526 		     in the IL.  */
5527 		  && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
5528 		  /* Only perform the following when being called from PRE
5529 		     which embeds tail merging.  */
5530 		  && default_vn_walk_kind == VN_WALK)))
5531 	changed = visit_reference_op_call (lhs, call_stmt);
5532       else
5533 	changed = defs_to_varying (call_stmt);
5534     }
5535   else
5536     changed = defs_to_varying (stmt);
5537  done:
5538   return changed;
5539 }
5540 
5541 
5542 /* Allocate a value number table.  */
5543 
5544 static void
5545 allocate_vn_table (vn_tables_t table, unsigned size)
5546 {
5547   table->phis = new vn_phi_table_type (size);
5548   table->nary = new vn_nary_op_table_type (size);
5549   table->references = new vn_reference_table_type (size);
5550 }
5551 
5552 /* Free a value number table.  */
5553 
5554 static void
5555 free_vn_table (vn_tables_t table)
5556 {
5557   /* Walk over elements and release vectors.  */
5558   vn_reference_iterator_type hir;
5559   vn_reference_t vr;
5560   FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)
5561     vr->operands.release ();
5562   delete table->phis;
5563   table->phis = NULL;
5564   delete table->nary;
5565   table->nary = NULL;
5566   delete table->references;
5567   table->references = NULL;
5568 }
5569 
5570 /* Set *ID according to RESULT.  */
5571 
5572 static void
5573 set_value_id_for_result (tree result, unsigned int *id)
5574 {
5575   if (result && TREE_CODE (result) == SSA_NAME)
5576     *id = VN_INFO (result)->value_id;
5577   else if (result && is_gimple_min_invariant (result))
5578     *id = get_or_alloc_constant_value_id (result);
5579   else
5580     *id = get_next_value_id ();
5581 }
5582 
5583 /* Set the value ids in the valid hash tables.  */
5584 
5585 static void
5586 set_hashtable_value_ids (void)
5587 {
5588   vn_nary_op_iterator_type hin;
5589   vn_phi_iterator_type hip;
5590   vn_reference_iterator_type hir;
5591   vn_nary_op_t vno;
5592   vn_reference_t vr;
5593   vn_phi_t vp;
5594 
5595   /* Now set the value ids of the things we had put in the hash
5596      table.  */
5597 
5598   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
5599     if (! vno->predicated_values)
5600       set_value_id_for_result (vno->u.result, &vno->value_id);
5601 
5602   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
5603     set_value_id_for_result (vp->result, &vp->value_id);
5604 
5605   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
5606 			       hir)
5607     set_value_id_for_result (vr->result, &vr->value_id);
5608 }
5609 
5610 /* Return the maximum value id we have ever seen.  */
5611 
5612 unsigned int
5613 get_max_value_id (void)
5614 {
5615   return next_value_id;
5616 }
5617 
5618 /* Return the next unique value id.  */
5619 
5620 unsigned int
5621 get_next_value_id (void)
5622 {
5623   return next_value_id++;
5624 }
5625 
5626 
5627 /* Compare two expressions E1 and E2 and return true if they are equal.  */
5628 
5629 bool
5630 expressions_equal_p (tree e1, tree e2)
5631 {
5632   /* The obvious case.  */
5633   if (e1 == e2)
5634     return true;
5635 
5636   /* If either one is VN_TOP consider them equal.  */
5637   if (e1 == VN_TOP || e2 == VN_TOP)
5638     return true;
5639 
5640   /* If only one of them is null, they cannot be equal.  */
5641   if (!e1 || !e2)
5642     return false;
5643 
5644   /* Now perform the actual comparison.  */
5645   if (TREE_CODE (e1) == TREE_CODE (e2)
5646       && operand_equal_p (e1, e2, OEP_PURE_SAME))
5647     return true;
5648 
5649   return false;
5650 }
5651 
5652 
5653 /* Return true if the nary operation NARY may trap.  This is a copy
5654    of stmt_could_throw_1_p adjusted to the SCCVN IL.  */
5655 
5656 bool
5657 vn_nary_may_trap (vn_nary_op_t nary)
5658 {
5659   tree type;
5660   tree rhs2 = NULL_TREE;
5661   bool honor_nans = false;
5662   bool honor_snans = false;
5663   bool fp_operation = false;
5664   bool honor_trapv = false;
5665   bool handled, ret;
5666   unsigned i;
5667 
5668   if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5669       || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5670       || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5671     {
5672       type = nary->type;
5673       fp_operation = FLOAT_TYPE_P (type);
5674       if (fp_operation)
5675 	{
5676 	  honor_nans = flag_trapping_math && !flag_finite_math_only;
5677 	  honor_snans = flag_signaling_nans != 0;
5678 	}
5679       else if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_TRAPS (type))
5680 	honor_trapv = true;
5681     }
5682   if (nary->length >= 2)
5683     rhs2 = nary->op[1];
5684   ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5685 				       honor_trapv, honor_nans, honor_snans,
5686 				       rhs2, &handled);
5687   if (handled && ret)
5688     return true;
5689 
5690   for (i = 0; i < nary->length; ++i)
5691     if (tree_could_trap_p (nary->op[i]))
5692       return true;
5693 
5694   return false;
5695 }
5696 
5697 /* Return true if the reference operation REF may trap.  */
5698 
5699 bool
5700 vn_reference_may_trap (vn_reference_t ref)
5701 {
5702   switch (ref->operands[0].opcode)
5703     {
5704     case MODIFY_EXPR:
5705     case CALL_EXPR:
5706       /* We do not handle calls.  */
5707     case ADDR_EXPR:
5708       /* And toplevel address computations never trap.  */
5709       return false;
5710     default:;
5711     }
5712 
5713   vn_reference_op_t op;
5714   unsigned i;
5715   FOR_EACH_VEC_ELT (ref->operands, i, op)
5716     {
5717       switch (op->opcode)
5718 	{
5719 	case WITH_SIZE_EXPR:
5720 	case TARGET_MEM_REF:
5721 	  /* Always variable.  */
5722 	  return true;
5723 	case COMPONENT_REF:
5724 	  if (op->op1 && TREE_CODE (op->op1) == SSA_NAME)
5725 	    return true;
5726 	  break;
5727 	case ARRAY_RANGE_REF:
5728 	case ARRAY_REF:
5729 	  if (TREE_CODE (op->op0) == SSA_NAME)
5730 	    return true;
5731 	  break;
5732 	case MEM_REF:
5733 	  /* Nothing interesting in itself, the base is separate.  */
5734 	  break;
5735 	/* The following are the address bases.  */
5736 	case SSA_NAME:
5737 	  return true;
5738 	case ADDR_EXPR:
5739 	  if (op->op0)
5740 	    return tree_could_trap_p (TREE_OPERAND (op->op0, 0));
5741 	  return false;
5742 	default:;
5743 	}
5744     }
5745   return false;
5746 }
5747 
5748 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5749 					    bitmap inserted_exprs_)
5750   : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5751     el_todo (0), eliminations (0), insertions (0),
5752     inserted_exprs (inserted_exprs_)
5753 {
5754   need_eh_cleanup = BITMAP_ALLOC (NULL);
5755   need_ab_cleanup = BITMAP_ALLOC (NULL);
5756 }
5757 
5758 eliminate_dom_walker::~eliminate_dom_walker ()
5759 {
5760   BITMAP_FREE (need_eh_cleanup);
5761   BITMAP_FREE (need_ab_cleanup);
5762 }
5763 
5764 /* Return a leader for OP that is available at the current point of the
5765    eliminate domwalk.  */
5766 
5767 tree
5768 eliminate_dom_walker::eliminate_avail (basic_block, tree op)
5769 {
5770   tree valnum = VN_INFO (op)->valnum;
5771   if (TREE_CODE (valnum) == SSA_NAME)
5772     {
5773       if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5774 	return valnum;
5775       if (avail.length () > SSA_NAME_VERSION (valnum))
5776 	return avail[SSA_NAME_VERSION (valnum)];
5777     }
5778   else if (is_gimple_min_invariant (valnum))
5779     return valnum;
5780   return NULL_TREE;
5781 }
5782 
5783 /* At the current point of the eliminate domwalk make OP available.  */
5784 
5785 void
5786 eliminate_dom_walker::eliminate_push_avail (basic_block, tree op)
5787 {
5788   tree valnum = VN_INFO (op)->valnum;
5789   if (TREE_CODE (valnum) == SSA_NAME)
5790     {
5791       if (avail.length () <= SSA_NAME_VERSION (valnum))
5792 	avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
5793       tree pushop = op;
5794       if (avail[SSA_NAME_VERSION (valnum)])
5795 	pushop = avail[SSA_NAME_VERSION (valnum)];
5796       avail_stack.safe_push (pushop);
5797       avail[SSA_NAME_VERSION (valnum)] = op;
5798     }
5799 }
5800 
5801 /* Insert the expression recorded by SCCVN for VAL at *GSI.  Returns
5802    the leader for the expression if insertion was successful.  */
5803 
5804 tree
5805 eliminate_dom_walker::eliminate_insert (basic_block bb,
5806 					gimple_stmt_iterator *gsi, tree val)
5807 {
5808   /* We can insert a sequence with a single assignment only.  */
5809   gimple_seq stmts = VN_INFO (val)->expr;
5810   if (!gimple_seq_singleton_p (stmts))
5811     return NULL_TREE;
5812   gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5813   if (!stmt
5814       || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5815 	  && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5816 	  && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5817 	  && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5818 	      || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5819     return NULL_TREE;
5820 
5821   tree op = gimple_assign_rhs1 (stmt);
5822   if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5823       || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5824     op = TREE_OPERAND (op, 0);
5825   tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (bb, op) : op;
5826   if (!leader)
5827     return NULL_TREE;
5828 
5829   tree res;
5830   stmts = NULL;
5831   if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5832     res = gimple_build (&stmts, BIT_FIELD_REF,
5833 			TREE_TYPE (val), leader,
5834 			TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5835 			TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5836   else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5837     res = gimple_build (&stmts, BIT_AND_EXPR,
5838 			TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5839   else
5840     res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5841 			TREE_TYPE (val), leader);
5842   if (TREE_CODE (res) != SSA_NAME
5843       || SSA_NAME_IS_DEFAULT_DEF (res)
5844       || gimple_bb (SSA_NAME_DEF_STMT (res)))
5845     {
5846       gimple_seq_discard (stmts);
5847 
5848       /* During propagation we have to treat SSA info conservatively
5849          and thus we can end up simplifying the inserted expression
5850 	 at elimination time to sth not defined in stmts.  */
5851       /* But then this is a redundancy we failed to detect.  Which means
5852          res now has two values.  That doesn't play well with how
5853 	 we track availability here, so give up.  */
5854       if (dump_file && (dump_flags & TDF_DETAILS))
5855 	{
5856 	  if (TREE_CODE (res) == SSA_NAME)
5857 	    res = eliminate_avail (bb, res);
5858 	  if (res)
5859 	    {
5860 	      fprintf (dump_file, "Failed to insert expression for value ");
5861 	      print_generic_expr (dump_file, val);
5862 	      fprintf (dump_file, " which is really fully redundant to ");
5863 	      print_generic_expr (dump_file, res);
5864 	      fprintf (dump_file, "\n");
5865 	    }
5866 	}
5867 
5868       return NULL_TREE;
5869     }
5870   else
5871     {
5872       gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
5873       VN_INFO (res)->valnum = val;
5874       VN_INFO (res)->visited = true;
5875     }
5876 
5877   insertions++;
5878   if (dump_file && (dump_flags & TDF_DETAILS))
5879     {
5880       fprintf (dump_file, "Inserted ");
5881       print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
5882     }
5883 
5884   return res;
5885 }
5886 
5887 void
5888 eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
5889 {
5890   tree sprime = NULL_TREE;
5891   gimple *stmt = gsi_stmt (*gsi);
5892   tree lhs = gimple_get_lhs (stmt);
5893   if (lhs && TREE_CODE (lhs) == SSA_NAME
5894       && !gimple_has_volatile_ops (stmt)
5895       /* See PR43491.  Do not replace a global register variable when
5896 	 it is a the RHS of an assignment.  Do replace local register
5897 	 variables since gcc does not guarantee a local variable will
5898 	 be allocated in register.
5899 	 ???  The fix isn't effective here.  This should instead
5900 	 be ensured by not value-numbering them the same but treating
5901 	 them like volatiles?  */
5902       && !(gimple_assign_single_p (stmt)
5903 	   && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
5904 	       && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
5905 	       && is_global_var (gimple_assign_rhs1 (stmt)))))
5906     {
5907       sprime = eliminate_avail (b, lhs);
5908       if (!sprime)
5909 	{
5910 	  /* If there is no existing usable leader but SCCVN thinks
5911 	     it has an expression it wants to use as replacement,
5912 	     insert that.  */
5913 	  tree val = VN_INFO (lhs)->valnum;
5914 	  if (val != VN_TOP
5915 	      && TREE_CODE (val) == SSA_NAME
5916 	      && VN_INFO (val)->needs_insertion
5917 	      && VN_INFO (val)->expr != NULL
5918 	      && (sprime = eliminate_insert (b, gsi, val)) != NULL_TREE)
5919 	    eliminate_push_avail (b, sprime);
5920 	}
5921 
5922       /* If this now constitutes a copy duplicate points-to
5923 	 and range info appropriately.  This is especially
5924 	 important for inserted code.  See tree-ssa-copy.c
5925 	 for similar code.  */
5926       if (sprime
5927 	  && TREE_CODE (sprime) == SSA_NAME)
5928 	{
5929 	  basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5930 	  if (POINTER_TYPE_P (TREE_TYPE (lhs))
5931 	      && SSA_NAME_PTR_INFO (lhs)
5932 	      && ! SSA_NAME_PTR_INFO (sprime))
5933 	    {
5934 	      duplicate_ssa_name_ptr_info (sprime,
5935 					   SSA_NAME_PTR_INFO (lhs));
5936 	      if (b != sprime_b)
5937 		reset_flow_sensitive_info (sprime);
5938 	    }
5939 	  else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5940 		   && SSA_NAME_RANGE_INFO (lhs)
5941 		   && ! SSA_NAME_RANGE_INFO (sprime)
5942 		   && b == sprime_b)
5943 	    duplicate_ssa_name_range_info (sprime,
5944 					   SSA_NAME_RANGE_TYPE (lhs),
5945 					   SSA_NAME_RANGE_INFO (lhs));
5946 	}
5947 
5948       /* Inhibit the use of an inserted PHI on a loop header when
5949 	 the address of the memory reference is a simple induction
5950 	 variable.  In other cases the vectorizer won't do anything
5951 	 anyway (either it's loop invariant or a complicated
5952 	 expression).  */
5953       if (sprime
5954 	  && TREE_CODE (sprime) == SSA_NAME
5955 	  && do_pre
5956 	  && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5957 	  && loop_outer (b->loop_father)
5958 	  && has_zero_uses (sprime)
5959 	  && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5960 	  && gimple_assign_load_p (stmt))
5961 	{
5962 	  gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5963 	  basic_block def_bb = gimple_bb (def_stmt);
5964 	  if (gimple_code (def_stmt) == GIMPLE_PHI
5965 	      && def_bb->loop_father->header == def_bb)
5966 	    {
5967 	      loop_p loop = def_bb->loop_father;
5968 	      ssa_op_iter iter;
5969 	      tree op;
5970 	      bool found = false;
5971 	      FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5972 		{
5973 		  affine_iv iv;
5974 		  def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5975 		  if (def_bb
5976 		      && flow_bb_inside_loop_p (loop, def_bb)
5977 		      && simple_iv (loop, loop, op, &iv, true))
5978 		    {
5979 		      found = true;
5980 		      break;
5981 		    }
5982 		}
5983 	      if (found)
5984 		{
5985 		  if (dump_file && (dump_flags & TDF_DETAILS))
5986 		    {
5987 		      fprintf (dump_file, "Not replacing ");
5988 		      print_gimple_expr (dump_file, stmt, 0);
5989 		      fprintf (dump_file, " with ");
5990 		      print_generic_expr (dump_file, sprime);
5991 		      fprintf (dump_file, " which would add a loop"
5992 			       " carried dependence to loop %d\n",
5993 			       loop->num);
5994 		    }
5995 		  /* Don't keep sprime available.  */
5996 		  sprime = NULL_TREE;
5997 		}
5998 	    }
5999 	}
6000 
6001       if (sprime)
6002 	{
6003 	  /* If we can propagate the value computed for LHS into
6004 	     all uses don't bother doing anything with this stmt.  */
6005 	  if (may_propagate_copy (lhs, sprime))
6006 	    {
6007 	      /* Mark it for removal.  */
6008 	      to_remove.safe_push (stmt);
6009 
6010 	      /* ???  Don't count copy/constant propagations.  */
6011 	      if (gimple_assign_single_p (stmt)
6012 		  && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
6013 		      || gimple_assign_rhs1 (stmt) == sprime))
6014 		return;
6015 
6016 	      if (dump_file && (dump_flags & TDF_DETAILS))
6017 		{
6018 		  fprintf (dump_file, "Replaced ");
6019 		  print_gimple_expr (dump_file, stmt, 0);
6020 		  fprintf (dump_file, " with ");
6021 		  print_generic_expr (dump_file, sprime);
6022 		  fprintf (dump_file, " in all uses of ");
6023 		  print_gimple_stmt (dump_file, stmt, 0);
6024 		}
6025 
6026 	      eliminations++;
6027 	      return;
6028 	    }
6029 
6030 	  /* If this is an assignment from our leader (which
6031 	     happens in the case the value-number is a constant)
6032 	     then there is nothing to do.  Likewise if we run into
6033 	     inserted code that needed a conversion because of
6034 	     our type-agnostic value-numbering of loads.  */
6035 	  if ((gimple_assign_single_p (stmt)
6036 	       || (is_gimple_assign (stmt)
6037 		   && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
6038 		       || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)))
6039 	      && sprime == gimple_assign_rhs1 (stmt))
6040 	    return;
6041 
6042 	  /* Else replace its RHS.  */
6043 	  if (dump_file && (dump_flags & TDF_DETAILS))
6044 	    {
6045 	      fprintf (dump_file, "Replaced ");
6046 	      print_gimple_expr (dump_file, stmt, 0);
6047 	      fprintf (dump_file, " with ");
6048 	      print_generic_expr (dump_file, sprime);
6049 	      fprintf (dump_file, " in ");
6050 	      print_gimple_stmt (dump_file, stmt, 0);
6051 	    }
6052 	  eliminations++;
6053 
6054 	  bool can_make_abnormal_goto = (is_gimple_call (stmt)
6055 					 && stmt_can_make_abnormal_goto (stmt));
6056 	  gimple *orig_stmt = stmt;
6057 	  if (!useless_type_conversion_p (TREE_TYPE (lhs),
6058 					  TREE_TYPE (sprime)))
6059 	    {
6060 	      /* We preserve conversions to but not from function or method
6061 		 types.  This asymmetry makes it necessary to re-instantiate
6062 		 conversions here.  */
6063 	      if (POINTER_TYPE_P (TREE_TYPE (lhs))
6064 		  && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs))))
6065 		sprime = fold_convert (TREE_TYPE (lhs), sprime);
6066 	      else
6067 		gcc_unreachable ();
6068 	    }
6069 	  tree vdef = gimple_vdef (stmt);
6070 	  tree vuse = gimple_vuse (stmt);
6071 	  propagate_tree_value_into_stmt (gsi, sprime);
6072 	  stmt = gsi_stmt (*gsi);
6073 	  update_stmt (stmt);
6074 	  /* In case the VDEF on the original stmt was released, value-number
6075 	     it to the VUSE.  This is to make vuse_ssa_val able to skip
6076 	     released virtual operands.  */
6077 	  if (vdef != gimple_vdef (stmt))
6078 	    {
6079 	      gcc_assert (SSA_NAME_IN_FREE_LIST (vdef));
6080 	      VN_INFO (vdef)->valnum = vuse;
6081 	    }
6082 
6083 	  /* If we removed EH side-effects from the statement, clean
6084 	     its EH information.  */
6085 	  if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
6086 	    {
6087 	      bitmap_set_bit (need_eh_cleanup,
6088 			      gimple_bb (stmt)->index);
6089 	      if (dump_file && (dump_flags & TDF_DETAILS))
6090 		fprintf (dump_file, "  Removed EH side-effects.\n");
6091 	    }
6092 
6093 	  /* Likewise for AB side-effects.  */
6094 	  if (can_make_abnormal_goto
6095 	      && !stmt_can_make_abnormal_goto (stmt))
6096 	    {
6097 	      bitmap_set_bit (need_ab_cleanup,
6098 			      gimple_bb (stmt)->index);
6099 	      if (dump_file && (dump_flags & TDF_DETAILS))
6100 		fprintf (dump_file, "  Removed AB side-effects.\n");
6101 	    }
6102 
6103 	  return;
6104 	}
6105     }
6106 
6107   /* If the statement is a scalar store, see if the expression
6108      has the same value number as its rhs.  If so, the store is
6109      dead.  */
6110   if (gimple_assign_single_p (stmt)
6111       && !gimple_has_volatile_ops (stmt)
6112       && !is_gimple_reg (gimple_assign_lhs (stmt))
6113       && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
6114 	  || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
6115     {
6116       tree rhs = gimple_assign_rhs1 (stmt);
6117       vn_reference_t vnresult;
6118       /* ???  gcc.dg/torture/pr91445.c shows that we lookup a boolean
6119          typed load of a byte known to be 0x11 as 1 so a store of
6120 	 a boolean 1 is detected as redundant.  Because of this we
6121 	 have to make sure to lookup with a ref where its size
6122 	 matches the precision.  */
6123       tree lookup_lhs = lhs;
6124       if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6125 	  && (TREE_CODE (lhs) != COMPONENT_REF
6126 	      || !DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
6127 	  && !type_has_mode_precision_p (TREE_TYPE (lhs)))
6128 	{
6129 	  if (TREE_CODE (lhs) == COMPONENT_REF
6130 	      || TREE_CODE (lhs) == MEM_REF)
6131 	    {
6132 	      tree ltype = build_nonstandard_integer_type
6133 				(TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (lhs))),
6134 				 TYPE_UNSIGNED (TREE_TYPE (lhs)));
6135 	      if (TREE_CODE (lhs) == COMPONENT_REF)
6136 		{
6137 		  tree foff = component_ref_field_offset (lhs);
6138 		  tree f = TREE_OPERAND (lhs, 1);
6139 		  if (!poly_int_tree_p (foff))
6140 		    lookup_lhs = NULL_TREE;
6141 		  else
6142 		    lookup_lhs = build3 (BIT_FIELD_REF, ltype,
6143 					 TREE_OPERAND (lhs, 0),
6144 					 TYPE_SIZE (TREE_TYPE (lhs)),
6145 					 bit_from_pos
6146 					   (foff, DECL_FIELD_BIT_OFFSET (f)));
6147 		}
6148 	      else
6149 		lookup_lhs = build2 (MEM_REF, ltype,
6150 				     TREE_OPERAND (lhs, 0),
6151 				     TREE_OPERAND (lhs, 1));
6152 	    }
6153 	  else
6154 	    lookup_lhs = NULL_TREE;
6155 	}
6156       tree val = NULL_TREE;
6157       if (lookup_lhs)
6158 	val = vn_reference_lookup (lookup_lhs, gimple_vuse (stmt),
6159 				   VN_WALKREWRITE, &vnresult, false);
6160       if (TREE_CODE (rhs) == SSA_NAME)
6161 	rhs = VN_INFO (rhs)->valnum;
6162       if (val
6163 	  && (operand_equal_p (val, rhs, 0)
6164 	      /* Due to the bitfield lookups above we can get bit
6165 		 interpretations of the same RHS as values here.  Those
6166 		 are redundant as well.  */
6167 	      || (TREE_CODE (val) == SSA_NAME
6168 		  && gimple_assign_single_p (SSA_NAME_DEF_STMT (val))
6169 		  && (val = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (val)))
6170 		  && TREE_CODE (val) == VIEW_CONVERT_EXPR
6171 		  && TREE_OPERAND (val, 0) == rhs)))
6172 	{
6173 	  /* We can only remove the later store if the former aliases
6174 	     at least all accesses the later one does or if the store
6175 	     was to readonly memory storing the same value.  */
6176 	  ao_ref lhs_ref;
6177 	  ao_ref_init (&lhs_ref, lhs);
6178 	  alias_set_type set = ao_ref_alias_set (&lhs_ref);
6179 	  alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
6180 	  if (! vnresult
6181 	      || ((vnresult->set == set
6182 		   || alias_set_subset_of (set, vnresult->set))
6183 		  && (vnresult->base_set == base_set
6184 		      || alias_set_subset_of (base_set, vnresult->base_set))))
6185 	    {
6186 	      if (dump_file && (dump_flags & TDF_DETAILS))
6187 		{
6188 		  fprintf (dump_file, "Deleted redundant store ");
6189 		  print_gimple_stmt (dump_file, stmt, 0);
6190 		}
6191 
6192 	      /* Queue stmt for removal.  */
6193 	      to_remove.safe_push (stmt);
6194 	      return;
6195 	    }
6196 	}
6197     }
6198 
6199   /* If this is a control statement value numbering left edges
6200      unexecuted on force the condition in a way consistent with
6201      that.  */
6202   if (gcond *cond = dyn_cast <gcond *> (stmt))
6203     {
6204       if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
6205 	  ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
6206 	{
6207 	  if (dump_file && (dump_flags & TDF_DETAILS))
6208 	    {
6209 	      fprintf (dump_file, "Removing unexecutable edge from ");
6210 	      print_gimple_stmt (dump_file, stmt, 0);
6211 	    }
6212 	  if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
6213 	      == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
6214 	    gimple_cond_make_true (cond);
6215 	  else
6216 	    gimple_cond_make_false (cond);
6217 	  update_stmt (cond);
6218 	  el_todo |= TODO_cleanup_cfg;
6219 	  return;
6220 	}
6221     }
6222 
6223   bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
6224   bool was_noreturn = (is_gimple_call (stmt)
6225 		       && gimple_call_noreturn_p (stmt));
6226   tree vdef = gimple_vdef (stmt);
6227   tree vuse = gimple_vuse (stmt);
6228 
6229   /* If we didn't replace the whole stmt (or propagate the result
6230      into all uses), replace all uses on this stmt with their
6231      leaders.  */
6232   bool modified = false;
6233   use_operand_p use_p;
6234   ssa_op_iter iter;
6235   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
6236     {
6237       tree use = USE_FROM_PTR (use_p);
6238       /* ???  The call code above leaves stmt operands un-updated.  */
6239       if (TREE_CODE (use) != SSA_NAME)
6240 	continue;
6241       tree sprime;
6242       if (SSA_NAME_IS_DEFAULT_DEF (use))
6243 	/* ???  For default defs BB shouldn't matter, but we have to
6244 	   solve the inconsistency between rpo eliminate and
6245 	   dom eliminate avail valueization first.  */
6246 	sprime = eliminate_avail (b, use);
6247       else
6248 	/* Look for sth available at the definition block of the argument.
6249 	   This avoids inconsistencies between availability there which
6250 	   decides if the stmt can be removed and availability at the
6251 	   use site.  The SSA property ensures that things available
6252 	   at the definition are also available at uses.  */
6253 	sprime = eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use)), use);
6254       if (sprime && sprime != use
6255 	  && may_propagate_copy (use, sprime, true)
6256 	  /* We substitute into debug stmts to avoid excessive
6257 	     debug temporaries created by removed stmts, but we need
6258 	     to avoid doing so for inserted sprimes as we never want
6259 	     to create debug temporaries for them.  */
6260 	  && (!inserted_exprs
6261 	      || TREE_CODE (sprime) != SSA_NAME
6262 	      || !is_gimple_debug (stmt)
6263 	      || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
6264 	{
6265 	  propagate_value (use_p, sprime);
6266 	  modified = true;
6267 	}
6268     }
6269 
6270   /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
6271      into which is a requirement for the IPA devirt machinery.  */
6272   gimple *old_stmt = stmt;
6273   if (modified)
6274     {
6275       /* If a formerly non-invariant ADDR_EXPR is turned into an
6276 	 invariant one it was on a separate stmt.  */
6277       if (gimple_assign_single_p (stmt)
6278 	  && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
6279 	recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
6280       gimple_stmt_iterator prev = *gsi;
6281       gsi_prev (&prev);
6282       if (fold_stmt (gsi))
6283 	{
6284 	  /* fold_stmt may have created new stmts inbetween
6285 	     the previous stmt and the folded stmt.  Mark
6286 	     all defs created there as varying to not confuse
6287 	     the SCCVN machinery as we're using that even during
6288 	     elimination.  */
6289 	  if (gsi_end_p (prev))
6290 	    prev = gsi_start_bb (b);
6291 	  else
6292 	    gsi_next (&prev);
6293 	  if (gsi_stmt (prev) != gsi_stmt (*gsi))
6294 	    do
6295 	      {
6296 		tree def;
6297 		ssa_op_iter dit;
6298 		FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
6299 					   dit, SSA_OP_ALL_DEFS)
6300 		    /* As existing DEFs may move between stmts
6301 		       only process new ones.  */
6302 		    if (! has_VN_INFO (def))
6303 		      {
6304 			VN_INFO (def)->valnum = def;
6305 			VN_INFO (def)->visited = true;
6306 		      }
6307 		if (gsi_stmt (prev) == gsi_stmt (*gsi))
6308 		  break;
6309 		gsi_next (&prev);
6310 	      }
6311 	    while (1);
6312 	}
6313       stmt = gsi_stmt (*gsi);
6314       /* In case we folded the stmt away schedule the NOP for removal.  */
6315       if (gimple_nop_p (stmt))
6316 	to_remove.safe_push (stmt);
6317     }
6318 
6319   /* Visit indirect calls and turn them into direct calls if
6320      possible using the devirtualization machinery.  Do this before
6321      checking for required EH/abnormal/noreturn cleanup as devird
6322      may expose more of those.  */
6323   if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
6324     {
6325       tree fn = gimple_call_fn (call_stmt);
6326       if (fn
6327 	  && flag_devirtualize
6328 	  && virtual_method_call_p (fn))
6329 	{
6330 	  tree otr_type = obj_type_ref_class (fn);
6331 	  unsigned HOST_WIDE_INT otr_tok
6332 	      = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
6333 	  tree instance;
6334 	  ipa_polymorphic_call_context context (current_function_decl,
6335 						fn, stmt, &instance);
6336 	  context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
6337 				    otr_type, stmt, NULL);
6338 	  bool final;
6339 	  vec <cgraph_node *> targets
6340 	      = possible_polymorphic_call_targets (obj_type_ref_class (fn),
6341 						   otr_tok, context, &final);
6342 	  if (dump_file)
6343 	    dump_possible_polymorphic_call_targets (dump_file,
6344 						    obj_type_ref_class (fn),
6345 						    otr_tok, context);
6346 	  if (final && targets.length () <= 1 && dbg_cnt (devirt))
6347 	    {
6348 	      tree fn;
6349 	      if (targets.length () == 1)
6350 		fn = targets[0]->decl;
6351 	      else
6352 		fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6353 	      if (dump_enabled_p ())
6354 		{
6355 		  dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
6356 				   "converting indirect call to "
6357 				   "function %s\n",
6358 				   lang_hooks.decl_printable_name (fn, 2));
6359 		}
6360 	      gimple_call_set_fndecl (call_stmt, fn);
6361 	      /* If changing the call to __builtin_unreachable
6362 		 or similar noreturn function, adjust gimple_call_fntype
6363 		 too.  */
6364 	      if (gimple_call_noreturn_p (call_stmt)
6365 		  && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
6366 		  && TYPE_ARG_TYPES (TREE_TYPE (fn))
6367 		  && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
6368 		      == void_type_node))
6369 		gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
6370 	      maybe_remove_unused_call_args (cfun, call_stmt);
6371 	      modified = true;
6372 	    }
6373 	}
6374     }
6375 
6376   if (modified)
6377     {
6378       /* When changing a call into a noreturn call, cfg cleanup
6379 	 is needed to fix up the noreturn call.  */
6380       if (!was_noreturn
6381 	  && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
6382 	to_fixup.safe_push  (stmt);
6383       /* When changing a condition or switch into one we know what
6384 	 edge will be executed, schedule a cfg cleanup.  */
6385       if ((gimple_code (stmt) == GIMPLE_COND
6386 	   && (gimple_cond_true_p (as_a <gcond *> (stmt))
6387 	       || gimple_cond_false_p (as_a <gcond *> (stmt))))
6388 	  || (gimple_code (stmt) == GIMPLE_SWITCH
6389 	      && TREE_CODE (gimple_switch_index
6390 			    (as_a <gswitch *> (stmt))) == INTEGER_CST))
6391 	el_todo |= TODO_cleanup_cfg;
6392       /* If we removed EH side-effects from the statement, clean
6393 	 its EH information.  */
6394       if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
6395 	{
6396 	  bitmap_set_bit (need_eh_cleanup,
6397 			  gimple_bb (stmt)->index);
6398 	  if (dump_file && (dump_flags & TDF_DETAILS))
6399 	    fprintf (dump_file, "  Removed EH side-effects.\n");
6400 	}
6401       /* Likewise for AB side-effects.  */
6402       if (can_make_abnormal_goto
6403 	  && !stmt_can_make_abnormal_goto (stmt))
6404 	{
6405 	  bitmap_set_bit (need_ab_cleanup,
6406 			  gimple_bb (stmt)->index);
6407 	  if (dump_file && (dump_flags & TDF_DETAILS))
6408 	    fprintf (dump_file, "  Removed AB side-effects.\n");
6409 	}
6410       update_stmt (stmt);
6411       /* In case the VDEF on the original stmt was released, value-number
6412          it to the VUSE.  This is to make vuse_ssa_val able to skip
6413 	 released virtual operands.  */
6414       if (vdef && SSA_NAME_IN_FREE_LIST (vdef))
6415 	VN_INFO (vdef)->valnum = vuse;
6416     }
6417 
6418   /* Make new values available - for fully redundant LHS we
6419      continue with the next stmt above and skip this.  */
6420   def_operand_p defp;
6421   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
6422     eliminate_push_avail (b, DEF_FROM_PTR (defp));
6423 }
6424 
6425 /* Perform elimination for the basic-block B during the domwalk.  */
6426 
6427 edge
6428 eliminate_dom_walker::before_dom_children (basic_block b)
6429 {
6430   /* Mark new bb.  */
6431   avail_stack.safe_push (NULL_TREE);
6432 
6433   /* Skip unreachable blocks marked unreachable during the SCCVN domwalk.  */
6434   if (!(b->flags & BB_EXECUTABLE))
6435     return NULL;
6436 
6437   vn_context_bb = b;
6438 
6439   for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
6440     {
6441       gphi *phi = gsi.phi ();
6442       tree res = PHI_RESULT (phi);
6443 
6444       if (virtual_operand_p (res))
6445 	{
6446 	  gsi_next (&gsi);
6447 	  continue;
6448 	}
6449 
6450       tree sprime = eliminate_avail (b, res);
6451       if (sprime
6452 	  && sprime != res)
6453 	{
6454 	  if (dump_file && (dump_flags & TDF_DETAILS))
6455 	    {
6456 	      fprintf (dump_file, "Replaced redundant PHI node defining ");
6457 	      print_generic_expr (dump_file, res);
6458 	      fprintf (dump_file, " with ");
6459 	      print_generic_expr (dump_file, sprime);
6460 	      fprintf (dump_file, "\n");
6461 	    }
6462 
6463 	  /* If we inserted this PHI node ourself, it's not an elimination.  */
6464 	  if (! inserted_exprs
6465 	      || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
6466 	    eliminations++;
6467 
6468 	  /* If we will propagate into all uses don't bother to do
6469 	     anything.  */
6470 	  if (may_propagate_copy (res, sprime))
6471 	    {
6472 	      /* Mark the PHI for removal.  */
6473 	      to_remove.safe_push (phi);
6474 	      gsi_next (&gsi);
6475 	      continue;
6476 	    }
6477 
6478 	  remove_phi_node (&gsi, false);
6479 
6480 	  if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
6481 	    sprime = fold_convert (TREE_TYPE (res), sprime);
6482 	  gimple *stmt = gimple_build_assign (res, sprime);
6483 	  gimple_stmt_iterator gsi2 = gsi_after_labels (b);
6484 	  gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
6485 	  continue;
6486 	}
6487 
6488       eliminate_push_avail (b, res);
6489       gsi_next (&gsi);
6490     }
6491 
6492   for (gimple_stmt_iterator gsi = gsi_start_bb (b);
6493        !gsi_end_p (gsi);
6494        gsi_next (&gsi))
6495     eliminate_stmt (b, &gsi);
6496 
6497   /* Replace destination PHI arguments.  */
6498   edge_iterator ei;
6499   edge e;
6500   FOR_EACH_EDGE (e, ei, b->succs)
6501     if (e->flags & EDGE_EXECUTABLE)
6502       for (gphi_iterator gsi = gsi_start_phis (e->dest);
6503 	   !gsi_end_p (gsi);
6504 	   gsi_next (&gsi))
6505 	{
6506 	  gphi *phi = gsi.phi ();
6507 	  use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
6508 	  tree arg = USE_FROM_PTR (use_p);
6509 	  if (TREE_CODE (arg) != SSA_NAME
6510 	      || virtual_operand_p (arg))
6511 	    continue;
6512 	  tree sprime = eliminate_avail (b, arg);
6513 	  if (sprime && may_propagate_copy (arg, sprime))
6514 	    propagate_value (use_p, sprime);
6515 	}
6516 
6517   vn_context_bb = NULL;
6518 
6519   return NULL;
6520 }
6521 
6522 /* Make no longer available leaders no longer available.  */
6523 
6524 void
6525 eliminate_dom_walker::after_dom_children (basic_block)
6526 {
6527   tree entry;
6528   while ((entry = avail_stack.pop ()) != NULL_TREE)
6529     {
6530       tree valnum = VN_INFO (entry)->valnum;
6531       tree old = avail[SSA_NAME_VERSION (valnum)];
6532       if (old == entry)
6533 	avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
6534       else
6535 	avail[SSA_NAME_VERSION (valnum)] = entry;
6536     }
6537 }
6538 
6539 /* Remove queued stmts and perform delayed cleanups.  */
6540 
6541 unsigned
6542 eliminate_dom_walker::eliminate_cleanup (bool region_p)
6543 {
6544   statistics_counter_event (cfun, "Eliminated", eliminations);
6545   statistics_counter_event (cfun, "Insertions", insertions);
6546 
6547   /* We cannot remove stmts during BB walk, especially not release SSA
6548      names there as this confuses the VN machinery.  The stmts ending
6549      up in to_remove are either stores or simple copies.
6550      Remove stmts in reverse order to make debug stmt creation possible.  */
6551   while (!to_remove.is_empty ())
6552     {
6553       bool do_release_defs = true;
6554       gimple *stmt = to_remove.pop ();
6555 
6556       /* When we are value-numbering a region we do not require exit PHIs to
6557 	 be present so we have to make sure to deal with uses outside of the
6558 	 region of stmts that we thought are eliminated.
6559 	 ??? Note we may be confused by uses in dead regions we didn't run
6560 	 elimination on.  Rather than checking individual uses we accept
6561 	 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
6562 	 contains such example).  */
6563       if (region_p)
6564 	{
6565 	  if (gphi *phi = dyn_cast <gphi *> (stmt))
6566 	    {
6567 	      tree lhs = gimple_phi_result (phi);
6568 	      if (!has_zero_uses (lhs))
6569 		{
6570 		  if (dump_file && (dump_flags & TDF_DETAILS))
6571 		    fprintf (dump_file, "Keeping eliminated stmt live "
6572 			     "as copy because of out-of-region uses\n");
6573 		  tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
6574 		  gimple *copy = gimple_build_assign (lhs, sprime);
6575 		  gimple_stmt_iterator gsi
6576 		    = gsi_after_labels (gimple_bb (stmt));
6577 		  gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
6578 		  do_release_defs = false;
6579 		}
6580 	    }
6581 	  else if (tree lhs = gimple_get_lhs (stmt))
6582 	    if (TREE_CODE (lhs) == SSA_NAME
6583 		&& !has_zero_uses (lhs))
6584 	      {
6585 		if (dump_file && (dump_flags & TDF_DETAILS))
6586 		  fprintf (dump_file, "Keeping eliminated stmt live "
6587 			   "as copy because of out-of-region uses\n");
6588 		tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
6589 		gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
6590 		if (is_gimple_assign (stmt))
6591 		  {
6592 		    gimple_assign_set_rhs_from_tree (&gsi, sprime);
6593 		    stmt = gsi_stmt (gsi);
6594 		    update_stmt (stmt);
6595 		    if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
6596 		      bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
6597 		    continue;
6598 		  }
6599 		else
6600 		  {
6601 		    gimple *copy = gimple_build_assign (lhs, sprime);
6602 		    gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
6603 		    do_release_defs = false;
6604 		  }
6605 	      }
6606 	}
6607 
6608       if (dump_file && (dump_flags & TDF_DETAILS))
6609 	{
6610 	  fprintf (dump_file, "Removing dead stmt ");
6611 	  print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
6612 	}
6613 
6614       gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
6615       if (gimple_code (stmt) == GIMPLE_PHI)
6616 	remove_phi_node (&gsi, do_release_defs);
6617       else
6618 	{
6619 	  basic_block bb = gimple_bb (stmt);
6620 	  unlink_stmt_vdef (stmt);
6621 	  if (gsi_remove (&gsi, true))
6622 	    bitmap_set_bit (need_eh_cleanup, bb->index);
6623 	  if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
6624 	    bitmap_set_bit (need_ab_cleanup, bb->index);
6625 	  if (do_release_defs)
6626 	    release_defs (stmt);
6627 	}
6628 
6629       /* Removing a stmt may expose a forwarder block.  */
6630       el_todo |= TODO_cleanup_cfg;
6631     }
6632 
6633   /* Fixup stmts that became noreturn calls.  This may require splitting
6634      blocks and thus isn't possible during the dominator walk.  Do this
6635      in reverse order so we don't inadvertedly remove a stmt we want to
6636      fixup by visiting a dominating now noreturn call first.  */
6637   while (!to_fixup.is_empty ())
6638     {
6639       gimple *stmt = to_fixup.pop ();
6640 
6641       if (dump_file && (dump_flags & TDF_DETAILS))
6642 	{
6643 	  fprintf (dump_file, "Fixing up noreturn call ");
6644 	  print_gimple_stmt (dump_file, stmt, 0);
6645 	}
6646 
6647       if (fixup_noreturn_call (stmt))
6648 	el_todo |= TODO_cleanup_cfg;
6649     }
6650 
6651   bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
6652   bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
6653 
6654   if (do_eh_cleanup)
6655     gimple_purge_all_dead_eh_edges (need_eh_cleanup);
6656 
6657   if (do_ab_cleanup)
6658     gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
6659 
6660   if (do_eh_cleanup || do_ab_cleanup)
6661     el_todo |= TODO_cleanup_cfg;
6662 
6663   return el_todo;
6664 }
6665 
6666 /* Eliminate fully redundant computations.  */
6667 
6668 unsigned
6669 eliminate_with_rpo_vn (bitmap inserted_exprs)
6670 {
6671   eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
6672 
6673   walker.walk (cfun->cfg->x_entry_block_ptr);
6674   return walker.eliminate_cleanup ();
6675 }
6676 
6677 static unsigned
6678 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
6679 	   bool iterate, bool eliminate);
6680 
6681 void
6682 run_rpo_vn (vn_lookup_kind kind)
6683 {
6684   default_vn_walk_kind = kind;
6685   do_rpo_vn (cfun, NULL, NULL, true, false);
6686 
6687   /* ???  Prune requirement of these.  */
6688   constant_to_value_id = new hash_table<vn_constant_hasher> (23);
6689   constant_value_ids = BITMAP_ALLOC (NULL);
6690 
6691   /* Initialize the value ids and prune out remaining VN_TOPs
6692      from dead code.  */
6693   tree name;
6694   unsigned i;
6695   FOR_EACH_SSA_NAME (i, name, cfun)
6696     {
6697       vn_ssa_aux_t info = VN_INFO (name);
6698       if (!info->visited
6699 	  || info->valnum == VN_TOP)
6700 	info->valnum = name;
6701       if (info->valnum == name)
6702 	info->value_id = get_next_value_id ();
6703       else if (is_gimple_min_invariant (info->valnum))
6704 	info->value_id = get_or_alloc_constant_value_id (info->valnum);
6705     }
6706 
6707   /* Propagate.  */
6708   FOR_EACH_SSA_NAME (i, name, cfun)
6709     {
6710       vn_ssa_aux_t info = VN_INFO (name);
6711       if (TREE_CODE (info->valnum) == SSA_NAME
6712 	  && info->valnum != name
6713 	  && info->value_id != VN_INFO (info->valnum)->value_id)
6714 	info->value_id = VN_INFO (info->valnum)->value_id;
6715     }
6716 
6717   set_hashtable_value_ids ();
6718 
6719   if (dump_file && (dump_flags & TDF_DETAILS))
6720     {
6721       fprintf (dump_file, "Value numbers:\n");
6722       FOR_EACH_SSA_NAME (i, name, cfun)
6723 	{
6724 	  if (VN_INFO (name)->visited
6725 	      && SSA_VAL (name) != name)
6726 	    {
6727 	      print_generic_expr (dump_file, name);
6728 	      fprintf (dump_file, " = ");
6729 	      print_generic_expr (dump_file, SSA_VAL (name));
6730 	      fprintf (dump_file, " (%04d)\n", VN_INFO (name)->value_id);
6731 	    }
6732 	}
6733     }
6734 }
6735 
6736 /* Free VN associated data structures.  */
6737 
6738 void
6739 free_rpo_vn (void)
6740 {
6741   free_vn_table (valid_info);
6742   XDELETE (valid_info);
6743   obstack_free (&vn_tables_obstack, NULL);
6744   obstack_free (&vn_tables_insert_obstack, NULL);
6745 
6746   vn_ssa_aux_iterator_type it;
6747   vn_ssa_aux_t info;
6748   FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash, info, vn_ssa_aux_t, it)
6749     if (info->needs_insertion)
6750       release_ssa_name (info->name);
6751   obstack_free (&vn_ssa_aux_obstack, NULL);
6752   delete vn_ssa_aux_hash;
6753 
6754   delete constant_to_value_id;
6755   constant_to_value_id = NULL;
6756   BITMAP_FREE (constant_value_ids);
6757 }
6758 
6759 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables.  */
6760 
6761 static tree
6762 vn_lookup_simplify_result (gimple_match_op *res_op)
6763 {
6764   if (!res_op->code.is_tree_code ())
6765     return NULL_TREE;
6766   tree *ops = res_op->ops;
6767   unsigned int length = res_op->num_ops;
6768   if (res_op->code == CONSTRUCTOR
6769       /* ???  We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
6770          and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree.  */
6771       && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
6772     {
6773       length = CONSTRUCTOR_NELTS (res_op->ops[0]);
6774       ops = XALLOCAVEC (tree, length);
6775       for (unsigned i = 0; i < length; ++i)
6776 	ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
6777     }
6778   vn_nary_op_t vnresult = NULL;
6779   tree res = vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
6780 				       res_op->type, ops, &vnresult);
6781   /* If this is used from expression simplification make sure to
6782      return an available expression.  */
6783   if (res && TREE_CODE (res) == SSA_NAME && mprts_hook && rpo_avail)
6784     res = rpo_avail->eliminate_avail (vn_context_bb, res);
6785   return res;
6786 }
6787 
6788 /* Return a leader for OPs value that is valid at BB.  */
6789 
6790 tree
6791 rpo_elim::eliminate_avail (basic_block bb, tree op)
6792 {
6793   bool visited;
6794   tree valnum = SSA_VAL (op, &visited);
6795   /* If we didn't visit OP then it must be defined outside of the
6796      region we process and also dominate it.  So it is available.  */
6797   if (!visited)
6798     return op;
6799   if (TREE_CODE (valnum) == SSA_NAME)
6800     {
6801       if (SSA_NAME_IS_DEFAULT_DEF (valnum))
6802 	return valnum;
6803       vn_avail *av = VN_INFO (valnum)->avail;
6804       if (!av)
6805 	return NULL_TREE;
6806       if (av->location == bb->index)
6807 	/* On tramp3d 90% of the cases are here.  */
6808 	return ssa_name (av->leader);
6809       do
6810 	{
6811 	  basic_block abb = BASIC_BLOCK_FOR_FN (cfun, av->location);
6812 	  /* ???  During elimination we have to use availability at the
6813 	     definition site of a use we try to replace.  This
6814 	     is required to not run into inconsistencies because
6815 	     of dominated_by_p_w_unex behavior and removing a definition
6816 	     while not replacing all uses.
6817 	     ???  We could try to consistently walk dominators
6818 	     ignoring non-executable regions.  The nearest common
6819 	     dominator of bb and abb is where we can stop walking.  We
6820 	     may also be able to "pre-compute" (bits of) the next immediate
6821 	     (non-)dominator during the RPO walk when marking edges as
6822 	     executable.  */
6823 	  if (dominated_by_p_w_unex (bb, abb, true))
6824 	    {
6825 	      tree leader = ssa_name (av->leader);
6826 	      /* Prevent eliminations that break loop-closed SSA.  */
6827 	      if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
6828 		  && ! SSA_NAME_IS_DEFAULT_DEF (leader)
6829 		  && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
6830 							 (leader))->loop_father,
6831 					      bb))
6832 		return NULL_TREE;
6833 	      if (dump_file && (dump_flags & TDF_DETAILS))
6834 		{
6835 		  print_generic_expr (dump_file, leader);
6836 		  fprintf (dump_file, " is available for ");
6837 		  print_generic_expr (dump_file, valnum);
6838 		  fprintf (dump_file, "\n");
6839 		}
6840 	      /* On tramp3d 99% of the _remaining_ cases succeed at
6841 	         the first enty.  */
6842 	      return leader;
6843 	    }
6844 	  /* ???  Can we somehow skip to the immediate dominator
6845 	     RPO index (bb_to_rpo)?  Again, maybe not worth, on
6846 	     tramp3d the worst number of elements in the vector is 9.  */
6847 	  av = av->next;
6848 	}
6849       while (av);
6850     }
6851   else if (valnum != VN_TOP)
6852     /* valnum is is_gimple_min_invariant.  */
6853     return valnum;
6854   return NULL_TREE;
6855 }
6856 
6857 /* Make LEADER a leader for its value at BB.  */
6858 
6859 void
6860 rpo_elim::eliminate_push_avail (basic_block bb, tree leader)
6861 {
6862   tree valnum = VN_INFO (leader)->valnum;
6863   if (valnum == VN_TOP
6864       || is_gimple_min_invariant (valnum))
6865     return;
6866   if (dump_file && (dump_flags & TDF_DETAILS))
6867     {
6868       fprintf (dump_file, "Making available beyond BB%d ", bb->index);
6869       print_generic_expr (dump_file, leader);
6870       fprintf (dump_file, " for value ");
6871       print_generic_expr (dump_file, valnum);
6872       fprintf (dump_file, "\n");
6873     }
6874   vn_ssa_aux_t value = VN_INFO (valnum);
6875   vn_avail *av;
6876   if (m_avail_freelist)
6877     {
6878       av = m_avail_freelist;
6879       m_avail_freelist = m_avail_freelist->next;
6880     }
6881   else
6882     av = XOBNEW (&vn_ssa_aux_obstack, vn_avail);
6883   av->location = bb->index;
6884   av->leader = SSA_NAME_VERSION (leader);
6885   av->next = value->avail;
6886   value->avail = av;
6887 }
6888 
6889 /* Valueization hook for RPO VN plus required state.  */
6890 
6891 tree
6892 rpo_vn_valueize (tree name)
6893 {
6894   if (TREE_CODE (name) == SSA_NAME)
6895     {
6896       vn_ssa_aux_t val = VN_INFO (name);
6897       if (val)
6898 	{
6899 	  tree tem = val->valnum;
6900 	  if (tem != VN_TOP && tem != name)
6901 	    {
6902 	      if (TREE_CODE (tem) != SSA_NAME)
6903 		return tem;
6904 	      /* For all values we only valueize to an available leader
6905 		 which means we can use SSA name info without restriction.  */
6906 	      tem = rpo_avail->eliminate_avail (vn_context_bb, tem);
6907 	      if (tem)
6908 		return tem;
6909 	    }
6910 	}
6911     }
6912   return name;
6913 }
6914 
6915 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
6916    inverted condition.  */
6917 
6918 static void
6919 insert_related_predicates_on_edge (enum tree_code code, tree *ops, edge pred_e)
6920 {
6921   switch (code)
6922     {
6923     case LT_EXPR:
6924       /* a < b -> a {!,<}= b */
6925       vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
6926 					   ops, boolean_true_node, 0, pred_e);
6927       vn_nary_op_insert_pieces_predicated (2, LE_EXPR, boolean_type_node,
6928 					   ops, boolean_true_node, 0, pred_e);
6929       /* a < b -> ! a {>,=} b */
6930       vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
6931 					   ops, boolean_false_node, 0, pred_e);
6932       vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
6933 					   ops, boolean_false_node, 0, pred_e);
6934       break;
6935     case GT_EXPR:
6936       /* a > b -> a {!,>}= b */
6937       vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
6938 					   ops, boolean_true_node, 0, pred_e);
6939       vn_nary_op_insert_pieces_predicated (2, GE_EXPR, boolean_type_node,
6940 					   ops, boolean_true_node, 0, pred_e);
6941       /* a > b -> ! a {<,=} b */
6942       vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
6943 					   ops, boolean_false_node, 0, pred_e);
6944       vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
6945 					   ops, boolean_false_node, 0, pred_e);
6946       break;
6947     case EQ_EXPR:
6948       /* a == b -> ! a {<,>} b */
6949       vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
6950 					   ops, boolean_false_node, 0, pred_e);
6951       vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
6952 					   ops, boolean_false_node, 0, pred_e);
6953       break;
6954     case LE_EXPR:
6955     case GE_EXPR:
6956     case NE_EXPR:
6957       /* Nothing besides inverted condition.  */
6958       break;
6959     default:;
6960     }
6961 }
6962 
6963 /* Main stmt worker for RPO VN, process BB.  */
6964 
6965 static unsigned
6966 process_bb (rpo_elim &avail, basic_block bb,
6967 	    bool bb_visited, bool iterate_phis, bool iterate, bool eliminate,
6968 	    bool do_region, bitmap exit_bbs, bool skip_phis)
6969 {
6970   unsigned todo = 0;
6971   edge_iterator ei;
6972   edge e;
6973 
6974   vn_context_bb = bb;
6975 
6976   /* If we are in loop-closed SSA preserve this state.  This is
6977      relevant when called on regions from outside of FRE/PRE.  */
6978   bool lc_phi_nodes = false;
6979   if (!skip_phis
6980       && loops_state_satisfies_p (LOOP_CLOSED_SSA))
6981     FOR_EACH_EDGE (e, ei, bb->preds)
6982       if (e->src->loop_father != e->dest->loop_father
6983 	  && flow_loop_nested_p (e->dest->loop_father,
6984 				 e->src->loop_father))
6985 	{
6986 	  lc_phi_nodes = true;
6987 	  break;
6988 	}
6989 
6990   /* When we visit a loop header substitute into loop info.  */
6991   if (!iterate && eliminate && bb->loop_father->header == bb)
6992     {
6993       /* Keep fields in sync with substitute_in_loop_info.  */
6994       if (bb->loop_father->nb_iterations)
6995 	bb->loop_father->nb_iterations
6996 	  = simplify_replace_tree (bb->loop_father->nb_iterations,
6997 				   NULL_TREE, NULL_TREE, &vn_valueize_wrapper);
6998     }
6999 
7000   /* Value-number all defs in the basic-block.  */
7001   if (!skip_phis)
7002     for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7003 	 gsi_next (&gsi))
7004       {
7005 	gphi *phi = gsi.phi ();
7006 	tree res = PHI_RESULT (phi);
7007 	vn_ssa_aux_t res_info = VN_INFO (res);
7008 	if (!bb_visited)
7009 	  {
7010 	    gcc_assert (!res_info->visited);
7011 	    res_info->valnum = VN_TOP;
7012 	    res_info->visited = true;
7013 	  }
7014 
7015 	/* When not iterating force backedge values to varying.  */
7016 	visit_stmt (phi, !iterate_phis);
7017 	if (virtual_operand_p (res))
7018 	  continue;
7019 
7020 	/* Eliminate */
7021 	/* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
7022 	   how we handle backedges and availability.
7023 	   And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization.  */
7024 	tree val = res_info->valnum;
7025 	if (res != val && !iterate && eliminate)
7026 	  {
7027 	    if (tree leader = avail.eliminate_avail (bb, res))
7028 	      {
7029 		if (leader != res
7030 		    /* Preserve loop-closed SSA form.  */
7031 		    && (! lc_phi_nodes
7032 			|| is_gimple_min_invariant (leader)))
7033 		  {
7034 		    if (dump_file && (dump_flags & TDF_DETAILS))
7035 		      {
7036 			fprintf (dump_file, "Replaced redundant PHI node "
7037 				 "defining ");
7038 			print_generic_expr (dump_file, res);
7039 			fprintf (dump_file, " with ");
7040 			print_generic_expr (dump_file, leader);
7041 			fprintf (dump_file, "\n");
7042 		      }
7043 		    avail.eliminations++;
7044 
7045 		    if (may_propagate_copy (res, leader))
7046 		      {
7047 			/* Schedule for removal.  */
7048 			avail.to_remove.safe_push (phi);
7049 			continue;
7050 		      }
7051 		    /* ???  Else generate a copy stmt.  */
7052 		  }
7053 	      }
7054 	  }
7055 	/* Only make defs available that not already are.  But make
7056 	   sure loop-closed SSA PHI node defs are picked up for
7057 	   downstream uses.  */
7058 	if (lc_phi_nodes
7059 	    || res == val
7060 	    || ! avail.eliminate_avail (bb, res))
7061 	  avail.eliminate_push_avail (bb, res);
7062       }
7063 
7064   /* For empty BBs mark outgoing edges executable.  For non-empty BBs
7065      we do this when processing the last stmt as we have to do this
7066      before elimination which otherwise forces GIMPLE_CONDs to
7067      if (1 != 0) style when seeing non-executable edges.  */
7068   if (gsi_end_p (gsi_start_bb (bb)))
7069     {
7070       FOR_EACH_EDGE (e, ei, bb->succs)
7071 	{
7072 	  if (!(e->flags & EDGE_EXECUTABLE))
7073 	    {
7074 	      if (dump_file && (dump_flags & TDF_DETAILS))
7075 		fprintf (dump_file,
7076 			 "marking outgoing edge %d -> %d executable\n",
7077 			 e->src->index, e->dest->index);
7078 	      e->flags |= EDGE_EXECUTABLE;
7079 	      e->dest->flags |= BB_EXECUTABLE;
7080 	    }
7081 	  else if (!(e->dest->flags & BB_EXECUTABLE))
7082 	    {
7083 	      if (dump_file && (dump_flags & TDF_DETAILS))
7084 		fprintf (dump_file,
7085 			 "marking destination block %d reachable\n",
7086 			 e->dest->index);
7087 	      e->dest->flags |= BB_EXECUTABLE;
7088 	    }
7089 	}
7090     }
7091   for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7092        !gsi_end_p (gsi); gsi_next (&gsi))
7093     {
7094       ssa_op_iter i;
7095       tree op;
7096       if (!bb_visited)
7097 	{
7098 	  FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
7099 	    {
7100 	      vn_ssa_aux_t op_info = VN_INFO (op);
7101 	      gcc_assert (!op_info->visited);
7102 	      op_info->valnum = VN_TOP;
7103 	      op_info->visited = true;
7104 	    }
7105 
7106 	  /* We somehow have to deal with uses that are not defined
7107 	     in the processed region.  Forcing unvisited uses to
7108 	     varying here doesn't play well with def-use following during
7109 	     expression simplification, so we deal with this by checking
7110 	     the visited flag in SSA_VAL.  */
7111 	}
7112 
7113       visit_stmt (gsi_stmt (gsi));
7114 
7115       gimple *last = gsi_stmt (gsi);
7116       e = NULL;
7117       switch (gimple_code (last))
7118 	{
7119 	case GIMPLE_SWITCH:
7120 	  e = find_taken_edge (bb, vn_valueize (gimple_switch_index
7121 						(as_a <gswitch *> (last))));
7122 	  break;
7123 	case GIMPLE_COND:
7124 	  {
7125 	    tree lhs = vn_valueize (gimple_cond_lhs (last));
7126 	    tree rhs = vn_valueize (gimple_cond_rhs (last));
7127 	    tree val = gimple_simplify (gimple_cond_code (last),
7128 					boolean_type_node, lhs, rhs,
7129 					NULL, vn_valueize);
7130 	    /* If the condition didn't simplfy see if we have recorded
7131 	       an expression from sofar taken edges.  */
7132 	    if (! val || TREE_CODE (val) != INTEGER_CST)
7133 	      {
7134 		vn_nary_op_t vnresult;
7135 		tree ops[2];
7136 		ops[0] = lhs;
7137 		ops[1] = rhs;
7138 		val = vn_nary_op_lookup_pieces (2, gimple_cond_code (last),
7139 						boolean_type_node, ops,
7140 						&vnresult);
7141 		/* Did we get a predicated value?  */
7142 		if (! val && vnresult && vnresult->predicated_values)
7143 		  {
7144 		    val = vn_nary_op_get_predicated_value (vnresult, bb);
7145 		    if (val && dump_file && (dump_flags & TDF_DETAILS))
7146 		      {
7147 			fprintf (dump_file, "Got predicated value ");
7148 			print_generic_expr (dump_file, val, TDF_NONE);
7149 			fprintf (dump_file, " for ");
7150 			print_gimple_stmt (dump_file, last, TDF_SLIM);
7151 		      }
7152 		  }
7153 	      }
7154 	    if (val)
7155 	      e = find_taken_edge (bb, val);
7156 	    if (! e)
7157 	      {
7158 		/* If we didn't manage to compute the taken edge then
7159 		   push predicated expressions for the condition itself
7160 		   and related conditions to the hashtables.  This allows
7161 		   simplification of redundant conditions which is
7162 		   important as early cleanup.  */
7163 		edge true_e, false_e;
7164 		extract_true_false_edges_from_block (bb, &true_e, &false_e);
7165 		enum tree_code code = gimple_cond_code (last);
7166 		enum tree_code icode
7167 		  = invert_tree_comparison (code, HONOR_NANS (lhs));
7168 		tree ops[2];
7169 		ops[0] = lhs;
7170 		ops[1] = rhs;
7171 		if (do_region
7172 		    && bitmap_bit_p (exit_bbs, true_e->dest->index))
7173 		  true_e = NULL;
7174 		if (do_region
7175 		    && bitmap_bit_p (exit_bbs, false_e->dest->index))
7176 		  false_e = NULL;
7177 		if (true_e)
7178 		  vn_nary_op_insert_pieces_predicated
7179 		    (2, code, boolean_type_node, ops,
7180 		     boolean_true_node, 0, true_e);
7181 		if (false_e)
7182 		  vn_nary_op_insert_pieces_predicated
7183 		    (2, code, boolean_type_node, ops,
7184 		     boolean_false_node, 0, false_e);
7185 		if (icode != ERROR_MARK)
7186 		  {
7187 		    if (true_e)
7188 		      vn_nary_op_insert_pieces_predicated
7189 			(2, icode, boolean_type_node, ops,
7190 			 boolean_false_node, 0, true_e);
7191 		    if (false_e)
7192 		      vn_nary_op_insert_pieces_predicated
7193 			(2, icode, boolean_type_node, ops,
7194 			 boolean_true_node, 0, false_e);
7195 		  }
7196 		/* Relax for non-integers, inverted condition handled
7197 		   above.  */
7198 		if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
7199 		  {
7200 		    if (true_e)
7201 		      insert_related_predicates_on_edge (code, ops, true_e);
7202 		    if (false_e)
7203 		      insert_related_predicates_on_edge (icode, ops, false_e);
7204 		  }
7205 	      }
7206 	    break;
7207 	  }
7208 	case GIMPLE_GOTO:
7209 	  e = find_taken_edge (bb, vn_valueize (gimple_goto_dest (last)));
7210 	  break;
7211 	default:
7212 	  e = NULL;
7213 	}
7214       if (e)
7215 	{
7216 	  todo = TODO_cleanup_cfg;
7217 	  if (!(e->flags & EDGE_EXECUTABLE))
7218 	    {
7219 	      if (dump_file && (dump_flags & TDF_DETAILS))
7220 		fprintf (dump_file,
7221 			 "marking known outgoing %sedge %d -> %d executable\n",
7222 			 e->flags & EDGE_DFS_BACK ? "back-" : "",
7223 			 e->src->index, e->dest->index);
7224 	      e->flags |= EDGE_EXECUTABLE;
7225 	      e->dest->flags |= BB_EXECUTABLE;
7226 	    }
7227 	  else if (!(e->dest->flags & BB_EXECUTABLE))
7228 	    {
7229 	      if (dump_file && (dump_flags & TDF_DETAILS))
7230 		fprintf (dump_file,
7231 			 "marking destination block %d reachable\n",
7232 			 e->dest->index);
7233 	      e->dest->flags |= BB_EXECUTABLE;
7234 	    }
7235 	}
7236       else if (gsi_one_before_end_p (gsi))
7237 	{
7238 	  FOR_EACH_EDGE (e, ei, bb->succs)
7239 	    {
7240 	      if (!(e->flags & EDGE_EXECUTABLE))
7241 		{
7242 		  if (dump_file && (dump_flags & TDF_DETAILS))
7243 		    fprintf (dump_file,
7244 			     "marking outgoing edge %d -> %d executable\n",
7245 			     e->src->index, e->dest->index);
7246 		  e->flags |= EDGE_EXECUTABLE;
7247 		  e->dest->flags |= BB_EXECUTABLE;
7248 		}
7249 	      else if (!(e->dest->flags & BB_EXECUTABLE))
7250 		{
7251 		  if (dump_file && (dump_flags & TDF_DETAILS))
7252 		    fprintf (dump_file,
7253 			     "marking destination block %d reachable\n",
7254 			     e->dest->index);
7255 		  e->dest->flags |= BB_EXECUTABLE;
7256 		}
7257 	    }
7258 	}
7259 
7260       /* Eliminate.  That also pushes to avail.  */
7261       if (eliminate && ! iterate)
7262 	avail.eliminate_stmt (bb, &gsi);
7263       else
7264 	/* If not eliminating, make all not already available defs
7265 	   available.  */
7266 	FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_DEF)
7267 	  if (! avail.eliminate_avail (bb, op))
7268 	    avail.eliminate_push_avail (bb, op);
7269     }
7270 
7271   /* Eliminate in destination PHI arguments.  Always substitute in dest
7272      PHIs, even for non-executable edges.  This handles region
7273      exits PHIs.  */
7274   if (!iterate && eliminate)
7275     FOR_EACH_EDGE (e, ei, bb->succs)
7276       for (gphi_iterator gsi = gsi_start_phis (e->dest);
7277 	   !gsi_end_p (gsi); gsi_next (&gsi))
7278 	{
7279 	  gphi *phi = gsi.phi ();
7280 	  use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
7281 	  tree arg = USE_FROM_PTR (use_p);
7282 	  if (TREE_CODE (arg) != SSA_NAME
7283 	      || virtual_operand_p (arg))
7284 	    continue;
7285 	  tree sprime;
7286 	  if (SSA_NAME_IS_DEFAULT_DEF (arg))
7287 	    {
7288 	      sprime = SSA_VAL (arg);
7289 	      gcc_assert (TREE_CODE (sprime) != SSA_NAME
7290 			  || SSA_NAME_IS_DEFAULT_DEF (sprime));
7291 	    }
7292 	  else
7293 	    /* Look for sth available at the definition block of the argument.
7294 	       This avoids inconsistencies between availability there which
7295 	       decides if the stmt can be removed and availability at the
7296 	       use site.  The SSA property ensures that things available
7297 	       at the definition are also available at uses.  */
7298 	    sprime = avail.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg)),
7299 					    arg);
7300 	  if (sprime
7301 	      && sprime != arg
7302 	      && may_propagate_copy (arg, sprime))
7303 	    propagate_value (use_p, sprime);
7304 	}
7305 
7306   vn_context_bb = NULL;
7307   return todo;
7308 }
7309 
7310 /* Unwind state per basic-block.  */
7311 
7312 struct unwind_state
7313 {
7314   /* Times this block has been visited.  */
7315   unsigned visited;
7316   /* Whether to handle this as iteration point or whether to treat
7317      incoming backedge PHI values as varying.  */
7318   bool iterate;
7319   /* Maximum RPO index this block is reachable from.  */
7320   int max_rpo;
7321   /* Unwind state.  */
7322   void *ob_top;
7323   vn_reference_t ref_top;
7324   vn_phi_t phi_top;
7325   vn_nary_op_t nary_top;
7326 };
7327 
7328 /* Unwind the RPO VN state for iteration.  */
7329 
7330 static void
7331 do_unwind (unwind_state *to, int rpo_idx, rpo_elim &avail, int *bb_to_rpo)
7332 {
7333   gcc_assert (to->iterate);
7334   for (; last_inserted_nary != to->nary_top;
7335        last_inserted_nary = last_inserted_nary->next)
7336     {
7337       vn_nary_op_t *slot;
7338       slot = valid_info->nary->find_slot_with_hash
7339 	(last_inserted_nary, last_inserted_nary->hashcode, NO_INSERT);
7340       /* Predication causes the need to restore previous state.  */
7341       if ((*slot)->unwind_to)
7342 	*slot = (*slot)->unwind_to;
7343       else
7344 	valid_info->nary->clear_slot (slot);
7345     }
7346   for (; last_inserted_phi != to->phi_top;
7347        last_inserted_phi = last_inserted_phi->next)
7348     {
7349       vn_phi_t *slot;
7350       slot = valid_info->phis->find_slot_with_hash
7351 	(last_inserted_phi, last_inserted_phi->hashcode, NO_INSERT);
7352       valid_info->phis->clear_slot (slot);
7353     }
7354   for (; last_inserted_ref != to->ref_top;
7355        last_inserted_ref = last_inserted_ref->next)
7356     {
7357       vn_reference_t *slot;
7358       slot = valid_info->references->find_slot_with_hash
7359 	(last_inserted_ref, last_inserted_ref->hashcode, NO_INSERT);
7360       (*slot)->operands.release ();
7361       valid_info->references->clear_slot (slot);
7362     }
7363   obstack_free (&vn_tables_obstack, to->ob_top);
7364 
7365   /* Prune [rpo_idx, ] from avail.  */
7366   /* ???  This is O(number-of-values-in-region) which is
7367      O(region-size) rather than O(iteration-piece).  */
7368   for (hash_table<vn_ssa_aux_hasher>::iterator i = vn_ssa_aux_hash->begin ();
7369        i != vn_ssa_aux_hash->end (); ++i)
7370     {
7371       while ((*i)->avail)
7372 	{
7373 	  if (bb_to_rpo[(*i)->avail->location] < rpo_idx)
7374 	    break;
7375 	  vn_avail *av = (*i)->avail;
7376 	  (*i)->avail = (*i)->avail->next;
7377 	  av->next = avail.m_avail_freelist;
7378 	  avail.m_avail_freelist = av;
7379 	}
7380     }
7381 }
7382 
7383 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
7384    If ITERATE is true then treat backedges optimistically as not
7385    executed and iterate.  If ELIMINATE is true then perform
7386    elimination, otherwise leave that to the caller.  */
7387 
7388 static unsigned
7389 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
7390 	   bool iterate, bool eliminate)
7391 {
7392   unsigned todo = 0;
7393 
7394   /* We currently do not support region-based iteration when
7395      elimination is requested.  */
7396   gcc_assert (!entry || !iterate || !eliminate);
7397   /* When iterating we need loop info up-to-date.  */
7398   gcc_assert (!iterate || !loops_state_satisfies_p (LOOPS_NEED_FIXUP));
7399 
7400   bool do_region = entry != NULL;
7401   if (!do_region)
7402     {
7403       entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn));
7404       exit_bbs = BITMAP_ALLOC (NULL);
7405       bitmap_set_bit (exit_bbs, EXIT_BLOCK);
7406     }
7407 
7408   /* Clear EDGE_DFS_BACK on "all" entry edges, RPO order compute will
7409      re-mark those that are contained in the region.  */
7410   edge_iterator ei;
7411   edge e;
7412   FOR_EACH_EDGE (e, ei, entry->dest->preds)
7413     e->flags &= ~EDGE_DFS_BACK;
7414 
7415   int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS);
7416   auto_vec<std::pair<int, int> > toplevel_scc_extents;
7417   int n = rev_post_order_and_mark_dfs_back_seme
7418     (fn, entry, exit_bbs, true, rpo, !iterate ? &toplevel_scc_extents : NULL);
7419 
7420   if (!do_region)
7421     BITMAP_FREE (exit_bbs);
7422 
7423   /* If there are any non-DFS_BACK edges into entry->dest skip
7424      processing PHI nodes for that block.  This supports
7425      value-numbering loop bodies w/o the actual loop.  */
7426   FOR_EACH_EDGE (e, ei, entry->dest->preds)
7427     if (e != entry
7428 	&& !(e->flags & EDGE_DFS_BACK))
7429       break;
7430   bool skip_entry_phis = e != NULL;
7431   if (skip_entry_phis && dump_file && (dump_flags & TDF_DETAILS))
7432     fprintf (dump_file, "Region does not contain all edges into "
7433 	     "the entry block, skipping its PHIs.\n");
7434 
7435   int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
7436   for (int i = 0; i < n; ++i)
7437     bb_to_rpo[rpo[i]] = i;
7438 
7439   unwind_state *rpo_state = XNEWVEC (unwind_state, n);
7440 
7441   rpo_elim avail (entry->dest);
7442   rpo_avail = &avail;
7443 
7444   /* Verify we have no extra entries into the region.  */
7445   if (flag_checking && do_region)
7446     {
7447       auto_bb_flag bb_in_region (fn);
7448       for (int i = 0; i < n; ++i)
7449 	{
7450 	  basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7451 	  bb->flags |= bb_in_region;
7452 	}
7453       /* We can't merge the first two loops because we cannot rely
7454          on EDGE_DFS_BACK for edges not within the region.  But if
7455 	 we decide to always have the bb_in_region flag we can
7456 	 do the checking during the RPO walk itself (but then it's
7457 	 also easy to handle MEME conservatively).  */
7458       for (int i = 0; i < n; ++i)
7459 	{
7460 	  basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7461 	  edge e;
7462 	  edge_iterator ei;
7463 	  FOR_EACH_EDGE (e, ei, bb->preds)
7464 	    gcc_assert (e == entry
7465 			|| (skip_entry_phis && bb == entry->dest)
7466 			|| (e->src->flags & bb_in_region));
7467 	}
7468       for (int i = 0; i < n; ++i)
7469 	{
7470 	  basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7471 	  bb->flags &= ~bb_in_region;
7472 	}
7473     }
7474 
7475   /* Create the VN state.  For the initial size of the various hashtables
7476      use a heuristic based on region size and number of SSA names.  */
7477   unsigned region_size = (((unsigned HOST_WIDE_INT)n * num_ssa_names)
7478 			  / (n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS));
7479   VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
7480   next_value_id = 1;
7481 
7482   vn_ssa_aux_hash = new hash_table <vn_ssa_aux_hasher> (region_size * 2);
7483   gcc_obstack_init (&vn_ssa_aux_obstack);
7484 
7485   gcc_obstack_init (&vn_tables_obstack);
7486   gcc_obstack_init (&vn_tables_insert_obstack);
7487   valid_info = XCNEW (struct vn_tables_s);
7488   allocate_vn_table (valid_info, region_size);
7489   last_inserted_ref = NULL;
7490   last_inserted_phi = NULL;
7491   last_inserted_nary = NULL;
7492 
7493   vn_valueize = rpo_vn_valueize;
7494 
7495   /* Initialize the unwind state and edge/BB executable state.  */
7496   unsigned curr_scc = 0;
7497   for (int i = 0; i < n; ++i)
7498     {
7499       basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7500       rpo_state[i].visited = 0;
7501       rpo_state[i].max_rpo = i;
7502       if (!iterate && curr_scc < toplevel_scc_extents.length ())
7503 	{
7504 	  if (i >= toplevel_scc_extents[curr_scc].first
7505 	      && i <= toplevel_scc_extents[curr_scc].second)
7506 	    rpo_state[i].max_rpo = toplevel_scc_extents[curr_scc].second;
7507 	  if (i == toplevel_scc_extents[curr_scc].second)
7508 	    curr_scc++;
7509 	}
7510       bb->flags &= ~BB_EXECUTABLE;
7511       bool has_backedges = false;
7512       edge e;
7513       edge_iterator ei;
7514       FOR_EACH_EDGE (e, ei, bb->preds)
7515 	{
7516 	  if (e->flags & EDGE_DFS_BACK)
7517 	    has_backedges = true;
7518 	  e->flags &= ~EDGE_EXECUTABLE;
7519 	  if (iterate || e == entry || (skip_entry_phis && bb == entry->dest))
7520 	    continue;
7521 	}
7522       rpo_state[i].iterate = iterate && has_backedges;
7523     }
7524   entry->flags |= EDGE_EXECUTABLE;
7525   entry->dest->flags |= BB_EXECUTABLE;
7526 
7527   /* As heuristic to improve compile-time we handle only the N innermost
7528      loops and the outermost one optimistically.  */
7529   if (iterate)
7530     {
7531       loop_p loop;
7532       unsigned max_depth = param_rpo_vn_max_loop_depth;
7533       FOR_EACH_LOOP (loop, LI_ONLY_INNERMOST)
7534 	if (loop_depth (loop) > max_depth)
7535 	  for (unsigned i = 2;
7536 	       i < loop_depth (loop) - max_depth; ++i)
7537 	    {
7538 	      basic_block header = superloop_at_depth (loop, i)->header;
7539 	      bool non_latch_backedge = false;
7540 	      edge e;
7541 	      edge_iterator ei;
7542 	      FOR_EACH_EDGE (e, ei, header->preds)
7543 		if (e->flags & EDGE_DFS_BACK)
7544 		  {
7545 		    /* There can be a non-latch backedge into the header
7546 		       which is part of an outer irreducible region.  We
7547 		       cannot avoid iterating this block then.  */
7548 		    if (!dominated_by_p (CDI_DOMINATORS,
7549 					 e->src, e->dest))
7550 		      {
7551 			if (dump_file && (dump_flags & TDF_DETAILS))
7552 			  fprintf (dump_file, "non-latch backedge %d -> %d "
7553 				   "forces iteration of loop %d\n",
7554 				   e->src->index, e->dest->index, loop->num);
7555 			non_latch_backedge = true;
7556 		      }
7557 		    else
7558 		      e->flags |= EDGE_EXECUTABLE;
7559 		  }
7560 	      rpo_state[bb_to_rpo[header->index]].iterate = non_latch_backedge;
7561 	    }
7562     }
7563 
7564   uint64_t nblk = 0;
7565   int idx = 0;
7566   if (iterate)
7567     /* Go and process all blocks, iterating as necessary.  */
7568     do
7569       {
7570 	basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
7571 
7572 	/* If the block has incoming backedges remember unwind state.  This
7573 	   is required even for non-executable blocks since in irreducible
7574 	   regions we might reach them via the backedge and re-start iterating
7575 	   from there.
7576 	   Note we can individually mark blocks with incoming backedges to
7577 	   not iterate where we then handle PHIs conservatively.  We do that
7578 	   heuristically to reduce compile-time for degenerate cases.  */
7579 	if (rpo_state[idx].iterate)
7580 	  {
7581 	    rpo_state[idx].ob_top = obstack_alloc (&vn_tables_obstack, 0);
7582 	    rpo_state[idx].ref_top = last_inserted_ref;
7583 	    rpo_state[idx].phi_top = last_inserted_phi;
7584 	    rpo_state[idx].nary_top = last_inserted_nary;
7585 	  }
7586 
7587 	if (!(bb->flags & BB_EXECUTABLE))
7588 	  {
7589 	    if (dump_file && (dump_flags & TDF_DETAILS))
7590 	      fprintf (dump_file, "Block %d: BB%d found not executable\n",
7591 		       idx, bb->index);
7592 	    idx++;
7593 	    continue;
7594 	  }
7595 
7596 	if (dump_file && (dump_flags & TDF_DETAILS))
7597 	  fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
7598 	nblk++;
7599 	todo |= process_bb (avail, bb,
7600 			    rpo_state[idx].visited != 0,
7601 			    rpo_state[idx].iterate,
7602 			    iterate, eliminate, do_region, exit_bbs, false);
7603 	rpo_state[idx].visited++;
7604 
7605 	/* Verify if changed values flow over executable outgoing backedges
7606 	   and those change destination PHI values (that's the thing we
7607 	   can easily verify).  Reduce over all such edges to the farthest
7608 	   away PHI.  */
7609 	int iterate_to = -1;
7610 	edge_iterator ei;
7611 	edge e;
7612 	FOR_EACH_EDGE (e, ei, bb->succs)
7613 	  if ((e->flags & (EDGE_DFS_BACK|EDGE_EXECUTABLE))
7614 	      == (EDGE_DFS_BACK|EDGE_EXECUTABLE)
7615 	      && rpo_state[bb_to_rpo[e->dest->index]].iterate)
7616 	    {
7617 	      int destidx = bb_to_rpo[e->dest->index];
7618 	      if (!rpo_state[destidx].visited)
7619 		{
7620 		  if (dump_file && (dump_flags & TDF_DETAILS))
7621 		    fprintf (dump_file, "Unvisited destination %d\n",
7622 			     e->dest->index);
7623 		  if (iterate_to == -1 || destidx < iterate_to)
7624 		    iterate_to = destidx;
7625 		  continue;
7626 		}
7627 	      if (dump_file && (dump_flags & TDF_DETAILS))
7628 		fprintf (dump_file, "Looking for changed values of backedge"
7629 			 " %d->%d destination PHIs\n",
7630 			 e->src->index, e->dest->index);
7631 	      vn_context_bb = e->dest;
7632 	      gphi_iterator gsi;
7633 	      for (gsi = gsi_start_phis (e->dest);
7634 		   !gsi_end_p (gsi); gsi_next (&gsi))
7635 		{
7636 		  bool inserted = false;
7637 		  /* While we'd ideally just iterate on value changes
7638 		     we CSE PHIs and do that even across basic-block
7639 		     boundaries.  So even hashtable state changes can
7640 		     be important (which is roughly equivalent to
7641 		     PHI argument value changes).  To not excessively
7642 		     iterate because of that we track whether a PHI
7643 		     was CSEd to with GF_PLF_1.  */
7644 		  bool phival_changed;
7645 		  if ((phival_changed = visit_phi (gsi.phi (),
7646 						   &inserted, false))
7647 		      || (inserted && gimple_plf (gsi.phi (), GF_PLF_1)))
7648 		    {
7649 		      if (!phival_changed
7650 			  && dump_file && (dump_flags & TDF_DETAILS))
7651 			fprintf (dump_file, "PHI was CSEd and hashtable "
7652 				 "state (changed)\n");
7653 		      if (iterate_to == -1 || destidx < iterate_to)
7654 			iterate_to = destidx;
7655 		      break;
7656 		    }
7657 		}
7658 	      vn_context_bb = NULL;
7659 	    }
7660 	if (iterate_to != -1)
7661 	  {
7662 	    do_unwind (&rpo_state[iterate_to], iterate_to, avail, bb_to_rpo);
7663 	    idx = iterate_to;
7664 	    if (dump_file && (dump_flags & TDF_DETAILS))
7665 	      fprintf (dump_file, "Iterating to %d BB%d\n",
7666 		       iterate_to, rpo[iterate_to]);
7667 	    continue;
7668 	  }
7669 
7670 	idx++;
7671       }
7672     while (idx < n);
7673 
7674   else /* !iterate */
7675     {
7676       /* Process all blocks greedily with a worklist that enforces RPO
7677          processing of reachable blocks.  */
7678       auto_bitmap worklist;
7679       bitmap_set_bit (worklist, 0);
7680       while (!bitmap_empty_p (worklist))
7681 	{
7682 	  int idx = bitmap_first_set_bit (worklist);
7683 	  bitmap_clear_bit (worklist, idx);
7684 	  basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
7685 	  gcc_assert ((bb->flags & BB_EXECUTABLE)
7686 		      && !rpo_state[idx].visited);
7687 
7688 	  if (dump_file && (dump_flags & TDF_DETAILS))
7689 	    fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
7690 
7691 	  /* When we run into predecessor edges where we cannot trust its
7692 	     executable state mark them executable so PHI processing will
7693 	     be conservative.
7694 	     ???  Do we need to force arguments flowing over that edge
7695 	     to be varying or will they even always be?  */
7696 	  edge_iterator ei;
7697 	  edge e;
7698 	  FOR_EACH_EDGE (e, ei, bb->preds)
7699 	    if (!(e->flags & EDGE_EXECUTABLE)
7700 		&& (bb == entry->dest
7701 		    || (!rpo_state[bb_to_rpo[e->src->index]].visited
7702 			&& (rpo_state[bb_to_rpo[e->src->index]].max_rpo
7703 			    >= (int)idx))))
7704 	      {
7705 		if (dump_file && (dump_flags & TDF_DETAILS))
7706 		  fprintf (dump_file, "Cannot trust state of predecessor "
7707 			   "edge %d -> %d, marking executable\n",
7708 			   e->src->index, e->dest->index);
7709 		e->flags |= EDGE_EXECUTABLE;
7710 	      }
7711 
7712 	  nblk++;
7713 	  todo |= process_bb (avail, bb, false, false, false, eliminate,
7714 			      do_region, exit_bbs,
7715 			      skip_entry_phis && bb == entry->dest);
7716 	  rpo_state[idx].visited++;
7717 
7718 	  FOR_EACH_EDGE (e, ei, bb->succs)
7719 	    if ((e->flags & EDGE_EXECUTABLE)
7720 		&& e->dest->index != EXIT_BLOCK
7721 		&& (!do_region || !bitmap_bit_p (exit_bbs, e->dest->index))
7722 		&& !rpo_state[bb_to_rpo[e->dest->index]].visited)
7723 	      bitmap_set_bit (worklist, bb_to_rpo[e->dest->index]);
7724 	}
7725     }
7726 
7727   /* If statistics or dump file active.  */
7728   int nex = 0;
7729   unsigned max_visited = 1;
7730   for (int i = 0; i < n; ++i)
7731     {
7732       basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7733       if (bb->flags & BB_EXECUTABLE)
7734 	nex++;
7735       statistics_histogram_event (cfun, "RPO block visited times",
7736 				  rpo_state[i].visited);
7737       if (rpo_state[i].visited > max_visited)
7738 	max_visited = rpo_state[i].visited;
7739     }
7740   unsigned nvalues = 0, navail = 0;
7741   for (hash_table<vn_ssa_aux_hasher>::iterator i = vn_ssa_aux_hash->begin ();
7742        i != vn_ssa_aux_hash->end (); ++i)
7743     {
7744       nvalues++;
7745       vn_avail *av = (*i)->avail;
7746       while (av)
7747 	{
7748 	  navail++;
7749 	  av = av->next;
7750 	}
7751     }
7752   statistics_counter_event (cfun, "RPO blocks", n);
7753   statistics_counter_event (cfun, "RPO blocks visited", nblk);
7754   statistics_counter_event (cfun, "RPO blocks executable", nex);
7755   statistics_histogram_event (cfun, "RPO iterations", 10*nblk / nex);
7756   statistics_histogram_event (cfun, "RPO num values", nvalues);
7757   statistics_histogram_event (cfun, "RPO num avail", navail);
7758   statistics_histogram_event (cfun, "RPO num lattice",
7759 			      vn_ssa_aux_hash->elements ());
7760   if (dump_file && (dump_flags & (TDF_DETAILS|TDF_STATS)))
7761     {
7762       fprintf (dump_file, "RPO iteration over %d blocks visited %" PRIu64
7763 	       " blocks in total discovering %d executable blocks iterating "
7764 	       "%d.%d times, a block was visited max. %u times\n",
7765 	       n, nblk, nex,
7766 	       (int)((10*nblk / nex)/10), (int)((10*nblk / nex)%10),
7767 	       max_visited);
7768       fprintf (dump_file, "RPO tracked %d values available at %d locations "
7769 	       "and %" PRIu64 " lattice elements\n",
7770 	       nvalues, navail, (uint64_t) vn_ssa_aux_hash->elements ());
7771     }
7772 
7773   if (eliminate)
7774     {
7775       /* When !iterate we already performed elimination during the RPO
7776          walk.  */
7777       if (iterate)
7778 	{
7779 	  /* Elimination for region-based VN needs to be done within the
7780 	     RPO walk.  */
7781 	  gcc_assert (! do_region);
7782 	  /* Note we can't use avail.walk here because that gets confused
7783 	     by the existing availability and it will be less efficient
7784 	     as well.  */
7785 	  todo |= eliminate_with_rpo_vn (NULL);
7786 	}
7787       else
7788 	todo |= avail.eliminate_cleanup (do_region);
7789     }
7790 
7791   vn_valueize = NULL;
7792   rpo_avail = NULL;
7793 
7794   XDELETEVEC (bb_to_rpo);
7795   XDELETEVEC (rpo);
7796   XDELETEVEC (rpo_state);
7797 
7798   return todo;
7799 }
7800 
7801 /* Region-based entry for RPO VN.  Performs value-numbering and elimination
7802    on the SEME region specified by ENTRY and EXIT_BBS.  If ENTRY is not
7803    the only edge into the region at ENTRY->dest PHI nodes in ENTRY->dest
7804    are not considered.  */
7805 
7806 unsigned
7807 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs)
7808 {
7809   default_vn_walk_kind = VN_WALKREWRITE;
7810   unsigned todo = do_rpo_vn (fn, entry, exit_bbs, false, true);
7811   free_rpo_vn ();
7812   return todo;
7813 }
7814 
7815 
7816 namespace {
7817 
7818 const pass_data pass_data_fre =
7819 {
7820   GIMPLE_PASS, /* type */
7821   "fre", /* name */
7822   OPTGROUP_NONE, /* optinfo_flags */
7823   TV_TREE_FRE, /* tv_id */
7824   ( PROP_cfg | PROP_ssa ), /* properties_required */
7825   0, /* properties_provided */
7826   0, /* properties_destroyed */
7827   0, /* todo_flags_start */
7828   0, /* todo_flags_finish */
7829 };
7830 
7831 class pass_fre : public gimple_opt_pass
7832 {
7833 public:
7834   pass_fre (gcc::context *ctxt)
7835     : gimple_opt_pass (pass_data_fre, ctxt), may_iterate (true)
7836   {}
7837 
7838   /* opt_pass methods: */
7839   opt_pass * clone () { return new pass_fre (m_ctxt); }
7840   void set_pass_param (unsigned int n, bool param)
7841     {
7842       gcc_assert (n == 0);
7843       may_iterate = param;
7844     }
7845   virtual bool gate (function *)
7846     {
7847       return flag_tree_fre != 0 && (may_iterate || optimize > 1);
7848     }
7849   virtual unsigned int execute (function *);
7850 
7851 private:
7852   bool may_iterate;
7853 }; // class pass_fre
7854 
7855 unsigned int
7856 pass_fre::execute (function *fun)
7857 {
7858   unsigned todo = 0;
7859 
7860   /* At -O[1g] use the cheap non-iterating mode.  */
7861   bool iterate_p = may_iterate && (optimize > 1);
7862   calculate_dominance_info (CDI_DOMINATORS);
7863   if (iterate_p)
7864     loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
7865 
7866   default_vn_walk_kind = VN_WALKREWRITE;
7867   todo = do_rpo_vn (fun, NULL, NULL, iterate_p, true);
7868   free_rpo_vn ();
7869 
7870   if (iterate_p)
7871     loop_optimizer_finalize ();
7872 
7873   /* For late FRE after IVOPTs and unrolling, see if we can
7874      remove some TREE_ADDRESSABLE and rewrite stuff into SSA.  */
7875   if (!may_iterate)
7876     todo |= TODO_update_address_taken;
7877 
7878   return todo;
7879 }
7880 
7881 } // anon namespace
7882 
7883 gimple_opt_pass *
7884 make_pass_fre (gcc::context *ctxt)
7885 {
7886   return new pass_fre (ctxt);
7887 }
7888 
7889 #undef BB_EXECUTABLE
7890