1 /* SCC value numbering for trees
2    Copyright (C) 2006-2020 Free Software Foundation, Inc.
3    Contributed by Daniel Berlin <dan@dberlin.org>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "splay-tree.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-cfg.h"
58 #include "domwalk.h"
59 #include "gimple-iterator.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
67 #include "dbgcnt.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-loop-niter.h"
72 #include "builtins.h"
73 #include "tree-ssa-sccvn.h"
74 
75 /* This algorithm is based on the SCC algorithm presented by Keith
76    Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
77    (http://citeseer.ist.psu.edu/41805.html).  In
78    straight line code, it is equivalent to a regular hash based value
79    numbering that is performed in reverse postorder.
80 
81    For code with cycles, there are two alternatives, both of which
82    require keeping the hashtables separate from the actual list of
83    value numbers for SSA names.
84 
85    1. Iterate value numbering in an RPO walk of the blocks, removing
86    all the entries from the hashtable after each iteration (but
87    keeping the SSA name->value number mapping between iterations).
88    Iterate until it does not change.
89 
90    2. Perform value numbering as part of an SCC walk on the SSA graph,
91    iterating only the cycles in the SSA graph until they do not change
92    (using a separate, optimistic hashtable for value numbering the SCC
93    operands).
94 
95    The second is not just faster in practice (because most SSA graph
96    cycles do not involve all the variables in the graph), it also has
97    some nice properties.
98 
99    One of these nice properties is that when we pop an SCC off the
100    stack, we are guaranteed to have processed all the operands coming from
101    *outside of that SCC*, so we do not need to do anything special to
102    ensure they have value numbers.
103 
104    Another nice property is that the SCC walk is done as part of a DFS
105    of the SSA graph, which makes it easy to perform combining and
106    simplifying operations at the same time.
107 
108    The code below is deliberately written in a way that makes it easy
109    to separate the SCC walk from the other work it does.
110 
111    In order to propagate constants through the code, we track which
112    expressions contain constants, and use those while folding.  In
113    theory, we could also track expressions whose value numbers are
114    replaced, in case we end up folding based on expression
115    identities.
116 
117    In order to value number memory, we assign value numbers to vuses.
118    This enables us to note that, for example, stores to the same
119    address of the same value from the same starting memory states are
120    equivalent.
121    TODO:
122 
123    1. We can iterate only the changing portions of the SCC's, but
124    I have not seen an SCC big enough for this to be a win.
125    2. If you differentiate between phi nodes for loops and phi nodes
126    for if-then-else, you can properly consider phi nodes in different
127    blocks for equivalence.
128    3. We could value number vuses in more cases, particularly, whole
129    structure copies.
130 */
131 
132 /* There's no BB_EXECUTABLE but we can use BB_VISITED.  */
133 #define BB_EXECUTABLE BB_VISITED
134 
135 static vn_lookup_kind default_vn_walk_kind;
136 
137 /* vn_nary_op hashtable helpers.  */
138 
139 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
140 {
141   typedef vn_nary_op_s *compare_type;
142   static inline hashval_t hash (const vn_nary_op_s *);
143   static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
144 };
145 
146 /* Return the computed hashcode for nary operation P1.  */
147 
148 inline hashval_t
hash(const vn_nary_op_s * vno1)149 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
150 {
151   return vno1->hashcode;
152 }
153 
154 /* Compare nary operations P1 and P2 and return true if they are
155    equivalent.  */
156 
157 inline bool
equal(const vn_nary_op_s * vno1,const vn_nary_op_s * vno2)158 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
159 {
160   return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
161 }
162 
163 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
165 
166 
167 /* vn_phi hashtable helpers.  */
168 
169 static int
170 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
171 
172 struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
173 {
174   static inline hashval_t hash (const vn_phi_s *);
175   static inline bool equal (const vn_phi_s *, const vn_phi_s *);
176 };
177 
178 /* Return the computed hashcode for phi operation P1.  */
179 
180 inline hashval_t
hash(const vn_phi_s * vp1)181 vn_phi_hasher::hash (const vn_phi_s *vp1)
182 {
183   return vp1->hashcode;
184 }
185 
186 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
187 
188 inline bool
equal(const vn_phi_s * vp1,const vn_phi_s * vp2)189 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
190 {
191   return vp1 == vp2 || vn_phi_eq (vp1, vp2);
192 }
193 
194 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
195 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
196 
197 
198 /* Compare two reference operands P1 and P2 for equality.  Return true if
199    they are equal, and false otherwise.  */
200 
201 static int
vn_reference_op_eq(const void * p1,const void * p2)202 vn_reference_op_eq (const void *p1, const void *p2)
203 {
204   const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
205   const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
206 
207   return (vro1->opcode == vro2->opcode
208 	  /* We do not care for differences in type qualification.  */
209 	  && (vro1->type == vro2->type
210 	      || (vro1->type && vro2->type
211 		  && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
212 					 TYPE_MAIN_VARIANT (vro2->type))))
213 	  && expressions_equal_p (vro1->op0, vro2->op0)
214 	  && expressions_equal_p (vro1->op1, vro2->op1)
215 	  && expressions_equal_p (vro1->op2, vro2->op2));
216 }
217 
218 /* Free a reference operation structure VP.  */
219 
220 static inline void
free_reference(vn_reference_s * vr)221 free_reference (vn_reference_s *vr)
222 {
223   vr->operands.release ();
224 }
225 
226 
227 /* vn_reference hashtable helpers.  */
228 
229 struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
230 {
231   static inline hashval_t hash (const vn_reference_s *);
232   static inline bool equal (const vn_reference_s *, const vn_reference_s *);
233 };
234 
235 /* Return the hashcode for a given reference operation P1.  */
236 
237 inline hashval_t
hash(const vn_reference_s * vr1)238 vn_reference_hasher::hash (const vn_reference_s *vr1)
239 {
240   return vr1->hashcode;
241 }
242 
243 inline bool
equal(const vn_reference_s * v,const vn_reference_s * c)244 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
245 {
246   return v == c || vn_reference_eq (v, c);
247 }
248 
249 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
250 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
251 
252 
253 /* The set of VN hashtables.  */
254 
255 typedef struct vn_tables_s
256 {
257   vn_nary_op_table_type *nary;
258   vn_phi_table_type *phis;
259   vn_reference_table_type *references;
260 } *vn_tables_t;
261 
262 
263 /* vn_constant hashtable helpers.  */
264 
265 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
266 {
267   static inline hashval_t hash (const vn_constant_s *);
268   static inline bool equal (const vn_constant_s *, const vn_constant_s *);
269 };
270 
271 /* Hash table hash function for vn_constant_t.  */
272 
273 inline hashval_t
hash(const vn_constant_s * vc1)274 vn_constant_hasher::hash (const vn_constant_s *vc1)
275 {
276   return vc1->hashcode;
277 }
278 
279 /* Hash table equality function for vn_constant_t.  */
280 
281 inline bool
equal(const vn_constant_s * vc1,const vn_constant_s * vc2)282 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
283 {
284   if (vc1->hashcode != vc2->hashcode)
285     return false;
286 
287   return vn_constant_eq_with_type (vc1->constant, vc2->constant);
288 }
289 
290 static hash_table<vn_constant_hasher> *constant_to_value_id;
291 static bitmap constant_value_ids;
292 
293 
294 /* Obstack we allocate the vn-tables elements from.  */
295 static obstack vn_tables_obstack;
296 /* Special obstack we never unwind.  */
297 static obstack vn_tables_insert_obstack;
298 
299 static vn_reference_t last_inserted_ref;
300 static vn_phi_t last_inserted_phi;
301 static vn_nary_op_t last_inserted_nary;
302 
303 /* Valid hashtables storing information we have proven to be
304    correct.  */
305 static vn_tables_t valid_info;
306 
307 
308 /* Valueization hook.  Valueize NAME if it is an SSA name, otherwise
309    just return it.  */
310 tree (*vn_valueize) (tree);
vn_valueize_wrapper(tree t,void * context ATTRIBUTE_UNUSED)311 tree vn_valueize_wrapper (tree t, void* context ATTRIBUTE_UNUSED)
312 {
313   return vn_valueize (t);
314 }
315 
316 
317 /* This represents the top of the VN lattice, which is the universal
318    value.  */
319 
320 tree VN_TOP;
321 
322 /* Unique counter for our value ids.  */
323 
324 static unsigned int next_value_id;
325 
326 
327 /* Table of vn_ssa_aux_t's, one per ssa_name.  The vn_ssa_aux_t objects
328    are allocated on an obstack for locality reasons, and to free them
329    without looping over the vec.  */
330 
331 struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t>
332 {
333   typedef vn_ssa_aux_t value_type;
334   typedef tree compare_type;
335   static inline hashval_t hash (const value_type &);
336   static inline bool equal (const value_type &, const compare_type &);
mark_deletedvn_ssa_aux_hasher337   static inline void mark_deleted (value_type &) {}
338   static const bool empty_zero_p = true;
mark_emptyvn_ssa_aux_hasher339   static inline void mark_empty (value_type &e) { e = NULL; }
is_deletedvn_ssa_aux_hasher340   static inline bool is_deleted (value_type &) { return false; }
is_emptyvn_ssa_aux_hasher341   static inline bool is_empty (value_type &e) { return e == NULL; }
342 };
343 
344 hashval_t
hash(const value_type & entry)345 vn_ssa_aux_hasher::hash (const value_type &entry)
346 {
347   return SSA_NAME_VERSION (entry->name);
348 }
349 
350 bool
equal(const value_type & entry,const compare_type & name)351 vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name)
352 {
353   return name == entry->name;
354 }
355 
356 static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash;
357 typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type;
358 static struct obstack vn_ssa_aux_obstack;
359 
360 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
361 static unsigned int vn_nary_length_from_stmt (gimple *);
362 static vn_nary_op_t alloc_vn_nary_op_noinit (unsigned int, obstack *);
363 static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
364 					    vn_nary_op_table_type *, bool);
365 static void init_vn_nary_op_from_stmt (vn_nary_op_t, gimple *);
366 static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
367 					 enum tree_code, tree, tree *);
368 static tree vn_lookup_simplify_result (gimple_match_op *);
369 static vn_reference_t vn_reference_lookup_or_insert_for_pieces
370 	  (tree, alias_set_type, alias_set_type, tree,
371 	   vec<vn_reference_op_s, va_heap>, tree);
372 
373 /* Return whether there is value numbering information for a given SSA name.  */
374 
375 bool
has_VN_INFO(tree name)376 has_VN_INFO (tree name)
377 {
378   return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name));
379 }
380 
381 vn_ssa_aux_t
VN_INFO(tree name)382 VN_INFO (tree name)
383 {
384   vn_ssa_aux_t *res
385     = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name),
386 					    INSERT);
387   if (*res != NULL)
388     return *res;
389 
390   vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
391   memset (newinfo, 0, sizeof (struct vn_ssa_aux));
392   newinfo->name = name;
393   newinfo->valnum = VN_TOP;
394   /* We are using the visited flag to handle uses with defs not within the
395      region being value-numbered.  */
396   newinfo->visited = false;
397 
398   /* Given we create the VN_INFOs on-demand now we have to do initialization
399      different than VN_TOP here.  */
400   if (SSA_NAME_IS_DEFAULT_DEF (name))
401     switch (TREE_CODE (SSA_NAME_VAR (name)))
402       {
403       case VAR_DECL:
404         /* All undefined vars are VARYING.  */
405         newinfo->valnum = name;
406 	newinfo->visited = true;
407 	break;
408 
409       case PARM_DECL:
410 	/* Parameters are VARYING but we can record a condition
411 	   if we know it is a non-NULL pointer.  */
412 	newinfo->visited = true;
413 	newinfo->valnum = name;
414 	if (POINTER_TYPE_P (TREE_TYPE (name))
415 	    && nonnull_arg_p (SSA_NAME_VAR (name)))
416 	  {
417 	    tree ops[2];
418 	    ops[0] = name;
419 	    ops[1] = build_int_cst (TREE_TYPE (name), 0);
420 	    vn_nary_op_t nary;
421 	    /* Allocate from non-unwinding stack.  */
422 	    nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
423 	    init_vn_nary_op_from_pieces (nary, 2, NE_EXPR,
424 					 boolean_type_node, ops);
425 	    nary->predicated_values = 0;
426 	    nary->u.result = boolean_true_node;
427 	    vn_nary_op_insert_into (nary, valid_info->nary, true);
428 	    gcc_assert (nary->unwind_to == NULL);
429 	    /* Also do not link it into the undo chain.  */
430 	    last_inserted_nary = nary->next;
431 	    nary->next = (vn_nary_op_t)(void *)-1;
432 	    nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
433 	    init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR,
434 					 boolean_type_node, ops);
435 	    nary->predicated_values = 0;
436 	    nary->u.result = boolean_false_node;
437 	    vn_nary_op_insert_into (nary, valid_info->nary, true);
438 	    gcc_assert (nary->unwind_to == NULL);
439 	    last_inserted_nary = nary->next;
440 	    nary->next = (vn_nary_op_t)(void *)-1;
441 	    if (dump_file && (dump_flags & TDF_DETAILS))
442 	      {
443 		fprintf (dump_file, "Recording ");
444 		print_generic_expr (dump_file, name, TDF_SLIM);
445 		fprintf (dump_file, " != 0\n");
446 	      }
447 	  }
448 	break;
449 
450       case RESULT_DECL:
451 	/* If the result is passed by invisible reference the default
452 	   def is initialized, otherwise it's uninitialized.  Still
453 	   undefined is varying.  */
454 	newinfo->visited = true;
455 	newinfo->valnum = name;
456 	break;
457 
458       default:
459 	gcc_unreachable ();
460       }
461   return newinfo;
462 }
463 
464 /* Return the SSA value of X.  */
465 
466 inline tree
467 SSA_VAL (tree x, bool *visited = NULL)
468 {
469   vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x));
470   if (visited)
471     *visited = tem && tem->visited;
472   return tem && tem->visited ? tem->valnum : x;
473 }
474 
475 /* Return the SSA value of the VUSE x, supporting released VDEFs
476    during elimination which will value-number the VDEF to the
477    associated VUSE (but not substitute in the whole lattice).  */
478 
479 static inline tree
vuse_ssa_val(tree x)480 vuse_ssa_val (tree x)
481 {
482   if (!x)
483     return NULL_TREE;
484 
485   do
486     {
487       x = SSA_VAL (x);
488       gcc_assert (x != VN_TOP);
489     }
490   while (SSA_NAME_IN_FREE_LIST (x));
491 
492   return x;
493 }
494 
495 /* Similar to the above but used as callback for walk_non_aliases_vuses
496    and thus should stop at unvisited VUSE to not walk across region
497    boundaries.  */
498 
499 static tree
vuse_valueize(tree vuse)500 vuse_valueize (tree vuse)
501 {
502   do
503     {
504       bool visited;
505       vuse = SSA_VAL (vuse, &visited);
506       if (!visited)
507 	return NULL_TREE;
508       gcc_assert (vuse != VN_TOP);
509     }
510   while (SSA_NAME_IN_FREE_LIST (vuse));
511   return vuse;
512 }
513 
514 
515 /* Return the vn_kind the expression computed by the stmt should be
516    associated with.  */
517 
518 enum vn_kind
vn_get_stmt_kind(gimple * stmt)519 vn_get_stmt_kind (gimple *stmt)
520 {
521   switch (gimple_code (stmt))
522     {
523     case GIMPLE_CALL:
524       return VN_REFERENCE;
525     case GIMPLE_PHI:
526       return VN_PHI;
527     case GIMPLE_ASSIGN:
528       {
529 	enum tree_code code = gimple_assign_rhs_code (stmt);
530 	tree rhs1 = gimple_assign_rhs1 (stmt);
531 	switch (get_gimple_rhs_class (code))
532 	  {
533 	  case GIMPLE_UNARY_RHS:
534 	  case GIMPLE_BINARY_RHS:
535 	  case GIMPLE_TERNARY_RHS:
536 	    return VN_NARY;
537 	  case GIMPLE_SINGLE_RHS:
538 	    switch (TREE_CODE_CLASS (code))
539 	      {
540 	      case tcc_reference:
541 		/* VOP-less references can go through unary case.  */
542 		if ((code == REALPART_EXPR
543 		     || code == IMAGPART_EXPR
544 		     || code == VIEW_CONVERT_EXPR
545 		     || code == BIT_FIELD_REF)
546 		    && (TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME
547 			|| is_gimple_min_invariant (TREE_OPERAND (rhs1, 0))))
548 		  return VN_NARY;
549 
550 		/* Fallthrough.  */
551 	      case tcc_declaration:
552 		return VN_REFERENCE;
553 
554 	      case tcc_constant:
555 		return VN_CONSTANT;
556 
557 	      default:
558 		if (code == ADDR_EXPR)
559 		  return (is_gimple_min_invariant (rhs1)
560 			  ? VN_CONSTANT : VN_REFERENCE);
561 		else if (code == CONSTRUCTOR)
562 		  return VN_NARY;
563 		return VN_NONE;
564 	      }
565 	  default:
566 	    return VN_NONE;
567 	  }
568       }
569     default:
570       return VN_NONE;
571     }
572 }
573 
574 /* Lookup a value id for CONSTANT and return it.  If it does not
575    exist returns 0.  */
576 
577 unsigned int
get_constant_value_id(tree constant)578 get_constant_value_id (tree constant)
579 {
580   vn_constant_s **slot;
581   struct vn_constant_s vc;
582 
583   vc.hashcode = vn_hash_constant_with_type (constant);
584   vc.constant = constant;
585   slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
586   if (slot)
587     return (*slot)->value_id;
588   return 0;
589 }
590 
591 /* Lookup a value id for CONSTANT, and if it does not exist, create a
592    new one and return it.  If it does exist, return it.  */
593 
594 unsigned int
get_or_alloc_constant_value_id(tree constant)595 get_or_alloc_constant_value_id (tree constant)
596 {
597   vn_constant_s **slot;
598   struct vn_constant_s vc;
599   vn_constant_t vcp;
600 
601   /* If the hashtable isn't initialized we're not running from PRE and thus
602      do not need value-ids.  */
603   if (!constant_to_value_id)
604     return 0;
605 
606   vc.hashcode = vn_hash_constant_with_type (constant);
607   vc.constant = constant;
608   slot = constant_to_value_id->find_slot (&vc, INSERT);
609   if (*slot)
610     return (*slot)->value_id;
611 
612   vcp = XNEW (struct vn_constant_s);
613   vcp->hashcode = vc.hashcode;
614   vcp->constant = constant;
615   vcp->value_id = get_next_value_id ();
616   *slot = vcp;
617   bitmap_set_bit (constant_value_ids, vcp->value_id);
618   return vcp->value_id;
619 }
620 
621 /* Return true if V is a value id for a constant.  */
622 
623 bool
value_id_constant_p(unsigned int v)624 value_id_constant_p (unsigned int v)
625 {
626   return bitmap_bit_p (constant_value_ids, v);
627 }
628 
629 /* Compute the hash for a reference operand VRO1.  */
630 
631 static void
vn_reference_op_compute_hash(const vn_reference_op_t vro1,inchash::hash & hstate)632 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
633 {
634   hstate.add_int (vro1->opcode);
635   if (vro1->op0)
636     inchash::add_expr (vro1->op0, hstate);
637   if (vro1->op1)
638     inchash::add_expr (vro1->op1, hstate);
639   if (vro1->op2)
640     inchash::add_expr (vro1->op2, hstate);
641 }
642 
643 /* Compute a hash for the reference operation VR1 and return it.  */
644 
645 static hashval_t
vn_reference_compute_hash(const vn_reference_t vr1)646 vn_reference_compute_hash (const vn_reference_t vr1)
647 {
648   inchash::hash hstate;
649   hashval_t result;
650   int i;
651   vn_reference_op_t vro;
652   poly_int64 off = -1;
653   bool deref = false;
654 
655   FOR_EACH_VEC_ELT (vr1->operands, i, vro)
656     {
657       if (vro->opcode == MEM_REF)
658 	deref = true;
659       else if (vro->opcode != ADDR_EXPR)
660 	deref = false;
661       if (maybe_ne (vro->off, -1))
662 	{
663 	  if (known_eq (off, -1))
664 	    off = 0;
665 	  off += vro->off;
666 	}
667       else
668 	{
669 	  if (maybe_ne (off, -1)
670 	      && maybe_ne (off, 0))
671 	    hstate.add_poly_int (off);
672 	  off = -1;
673 	  if (deref
674 	      && vro->opcode == ADDR_EXPR)
675 	    {
676 	      if (vro->op0)
677 		{
678 		  tree op = TREE_OPERAND (vro->op0, 0);
679 		  hstate.add_int (TREE_CODE (op));
680 		  inchash::add_expr (op, hstate);
681 		}
682 	    }
683 	  else
684 	    vn_reference_op_compute_hash (vro, hstate);
685 	}
686     }
687   result = hstate.end ();
688   /* ??? We would ICE later if we hash instead of adding that in. */
689   if (vr1->vuse)
690     result += SSA_NAME_VERSION (vr1->vuse);
691 
692   return result;
693 }
694 
695 /* Return true if reference operations VR1 and VR2 are equivalent.  This
696    means they have the same set of operands and vuses.  */
697 
698 bool
vn_reference_eq(const_vn_reference_t const vr1,const_vn_reference_t const vr2)699 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
700 {
701   unsigned i, j;
702 
703   /* Early out if this is not a hash collision.  */
704   if (vr1->hashcode != vr2->hashcode)
705     return false;
706 
707   /* The VOP needs to be the same.  */
708   if (vr1->vuse != vr2->vuse)
709     return false;
710 
711   /* If the operands are the same we are done.  */
712   if (vr1->operands == vr2->operands)
713     return true;
714 
715   if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
716     return false;
717 
718   if (INTEGRAL_TYPE_P (vr1->type)
719       && INTEGRAL_TYPE_P (vr2->type))
720     {
721       if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
722 	return false;
723     }
724   else if (INTEGRAL_TYPE_P (vr1->type)
725 	   && (TYPE_PRECISION (vr1->type)
726 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
727     return false;
728   else if (INTEGRAL_TYPE_P (vr2->type)
729 	   && (TYPE_PRECISION (vr2->type)
730 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
731     return false;
732 
733   i = 0;
734   j = 0;
735   do
736     {
737       poly_int64 off1 = 0, off2 = 0;
738       vn_reference_op_t vro1, vro2;
739       vn_reference_op_s tem1, tem2;
740       bool deref1 = false, deref2 = false;
741       for (; vr1->operands.iterate (i, &vro1); i++)
742 	{
743 	  if (vro1->opcode == MEM_REF)
744 	    deref1 = true;
745 	  /* Do not look through a storage order barrier.  */
746 	  else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
747 	    return false;
748 	  if (known_eq (vro1->off, -1))
749 	    break;
750 	  off1 += vro1->off;
751 	}
752       for (; vr2->operands.iterate (j, &vro2); j++)
753 	{
754 	  if (vro2->opcode == MEM_REF)
755 	    deref2 = true;
756 	  /* Do not look through a storage order barrier.  */
757 	  else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
758 	    return false;
759 	  if (known_eq (vro2->off, -1))
760 	    break;
761 	  off2 += vro2->off;
762 	}
763       if (maybe_ne (off1, off2))
764 	return false;
765       if (deref1 && vro1->opcode == ADDR_EXPR)
766 	{
767 	  memset (&tem1, 0, sizeof (tem1));
768 	  tem1.op0 = TREE_OPERAND (vro1->op0, 0);
769 	  tem1.type = TREE_TYPE (tem1.op0);
770 	  tem1.opcode = TREE_CODE (tem1.op0);
771 	  vro1 = &tem1;
772 	  deref1 = false;
773 	}
774       if (deref2 && vro2->opcode == ADDR_EXPR)
775 	{
776 	  memset (&tem2, 0, sizeof (tem2));
777 	  tem2.op0 = TREE_OPERAND (vro2->op0, 0);
778 	  tem2.type = TREE_TYPE (tem2.op0);
779 	  tem2.opcode = TREE_CODE (tem2.op0);
780 	  vro2 = &tem2;
781 	  deref2 = false;
782 	}
783       if (deref1 != deref2)
784 	return false;
785       if (!vn_reference_op_eq (vro1, vro2))
786 	return false;
787       ++j;
788       ++i;
789     }
790   while (vr1->operands.length () != i
791 	 || vr2->operands.length () != j);
792 
793   return true;
794 }
795 
796 /* Copy the operations present in load/store REF into RESULT, a vector of
797    vn_reference_op_s's.  */
798 
799 static void
copy_reference_ops_from_ref(tree ref,vec<vn_reference_op_s> * result)800 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
801 {
802   /* For non-calls, store the information that makes up the address.  */
803   tree orig = ref;
804   while (ref)
805     {
806       vn_reference_op_s temp;
807 
808       memset (&temp, 0, sizeof (temp));
809       temp.type = TREE_TYPE (ref);
810       temp.opcode = TREE_CODE (ref);
811       temp.off = -1;
812 
813       switch (temp.opcode)
814 	{
815 	case MODIFY_EXPR:
816 	  temp.op0 = TREE_OPERAND (ref, 1);
817 	  break;
818 	case WITH_SIZE_EXPR:
819 	  temp.op0 = TREE_OPERAND (ref, 1);
820 	  temp.off = 0;
821 	  break;
822 	case MEM_REF:
823 	  /* The base address gets its own vn_reference_op_s structure.  */
824 	  temp.op0 = TREE_OPERAND (ref, 1);
825 	  if (!mem_ref_offset (ref).to_shwi (&temp.off))
826 	    temp.off = -1;
827 	  temp.clique = MR_DEPENDENCE_CLIQUE (ref);
828 	  temp.base = MR_DEPENDENCE_BASE (ref);
829 	  temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
830 	  break;
831 	case TARGET_MEM_REF:
832 	  /* The base address gets its own vn_reference_op_s structure.  */
833 	  temp.op0 = TMR_INDEX (ref);
834 	  temp.op1 = TMR_STEP (ref);
835 	  temp.op2 = TMR_OFFSET (ref);
836 	  temp.clique = MR_DEPENDENCE_CLIQUE (ref);
837 	  temp.base = MR_DEPENDENCE_BASE (ref);
838 	  result->safe_push (temp);
839 	  memset (&temp, 0, sizeof (temp));
840 	  temp.type = NULL_TREE;
841 	  temp.opcode = ERROR_MARK;
842 	  temp.op0 = TMR_INDEX2 (ref);
843 	  temp.off = -1;
844 	  break;
845 	case BIT_FIELD_REF:
846 	  /* Record bits, position and storage order.  */
847 	  temp.op0 = TREE_OPERAND (ref, 1);
848 	  temp.op1 = TREE_OPERAND (ref, 2);
849 	  if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
850 	    temp.off = -1;
851 	  temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
852 	  break;
853 	case COMPONENT_REF:
854 	  /* The field decl is enough to unambiguously specify the field,
855 	     a matching type is not necessary and a mismatching type
856 	     is always a spurious difference.  */
857 	  temp.type = NULL_TREE;
858 	  temp.op0 = TREE_OPERAND (ref, 1);
859 	  temp.op1 = TREE_OPERAND (ref, 2);
860 	  {
861 	    tree this_offset = component_ref_field_offset (ref);
862 	    if (this_offset
863 		&& poly_int_tree_p (this_offset))
864 	      {
865 		tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
866 		if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
867 		  {
868 		    poly_offset_int off
869 		      = (wi::to_poly_offset (this_offset)
870 			 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
871 		    /* Probibit value-numbering zero offset components
872 		       of addresses the same before the pass folding
873 		       __builtin_object_size had a chance to run
874 		       (checking cfun->after_inlining does the
875 		       trick here).  */
876 		    if (TREE_CODE (orig) != ADDR_EXPR
877 			|| maybe_ne (off, 0)
878 			|| cfun->after_inlining)
879 		      off.to_shwi (&temp.off);
880 		  }
881 	      }
882 	  }
883 	  break;
884 	case ARRAY_RANGE_REF:
885 	case ARRAY_REF:
886 	  {
887 	    tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
888 	    /* Record index as operand.  */
889 	    temp.op0 = TREE_OPERAND (ref, 1);
890 	    /* Always record lower bounds and element size.  */
891 	    temp.op1 = array_ref_low_bound (ref);
892 	    /* But record element size in units of the type alignment.  */
893 	    temp.op2 = TREE_OPERAND (ref, 3);
894 	    temp.align = eltype->type_common.align;
895 	    if (! temp.op2)
896 	      temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
897 				     size_int (TYPE_ALIGN_UNIT (eltype)));
898 	    if (poly_int_tree_p (temp.op0)
899 		&& poly_int_tree_p (temp.op1)
900 		&& TREE_CODE (temp.op2) == INTEGER_CST)
901 	      {
902 		poly_offset_int off = ((wi::to_poly_offset (temp.op0)
903 					- wi::to_poly_offset (temp.op1))
904 				       * wi::to_offset (temp.op2)
905 				       * vn_ref_op_align_unit (&temp));
906 		off.to_shwi (&temp.off);
907 	      }
908 	  }
909 	  break;
910 	case VAR_DECL:
911 	  if (DECL_HARD_REGISTER (ref))
912 	    {
913 	      temp.op0 = ref;
914 	      break;
915 	    }
916 	  /* Fallthru.  */
917 	case PARM_DECL:
918 	case CONST_DECL:
919 	case RESULT_DECL:
920 	  /* Canonicalize decls to MEM[&decl] which is what we end up with
921 	     when valueizing MEM[ptr] with ptr = &decl.  */
922 	  temp.opcode = MEM_REF;
923 	  temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
924 	  temp.off = 0;
925 	  result->safe_push (temp);
926 	  temp.opcode = ADDR_EXPR;
927 	  temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
928 	  temp.type = TREE_TYPE (temp.op0);
929 	  temp.off = -1;
930 	  break;
931 	case STRING_CST:
932 	case INTEGER_CST:
933 	case POLY_INT_CST:
934 	case COMPLEX_CST:
935 	case VECTOR_CST:
936 	case REAL_CST:
937 	case FIXED_CST:
938 	case CONSTRUCTOR:
939 	case SSA_NAME:
940 	  temp.op0 = ref;
941 	  break;
942 	case ADDR_EXPR:
943 	  if (is_gimple_min_invariant (ref))
944 	    {
945 	      temp.op0 = ref;
946 	      break;
947 	    }
948 	  break;
949 	  /* These are only interesting for their operands, their
950 	     existence, and their type.  They will never be the last
951 	     ref in the chain of references (IE they require an
952 	     operand), so we don't have to put anything
953 	     for op* as it will be handled by the iteration  */
954 	case REALPART_EXPR:
955 	  temp.off = 0;
956 	  break;
957 	case VIEW_CONVERT_EXPR:
958 	  temp.off = 0;
959 	  temp.reverse = storage_order_barrier_p (ref);
960 	  break;
961 	case IMAGPART_EXPR:
962 	  /* This is only interesting for its constant offset.  */
963 	  temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
964 	  break;
965 	default:
966 	  gcc_unreachable ();
967 	}
968       result->safe_push (temp);
969 
970       if (REFERENCE_CLASS_P (ref)
971 	  || TREE_CODE (ref) == MODIFY_EXPR
972 	  || TREE_CODE (ref) == WITH_SIZE_EXPR
973 	  || (TREE_CODE (ref) == ADDR_EXPR
974 	      && !is_gimple_min_invariant (ref)))
975 	ref = TREE_OPERAND (ref, 0);
976       else
977 	ref = NULL_TREE;
978     }
979 }
980 
981 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
982    operands in *OPS, the reference alias set SET and the reference type TYPE.
983    Return true if something useful was produced.  */
984 
985 bool
ao_ref_init_from_vn_reference(ao_ref * ref,alias_set_type set,alias_set_type base_set,tree type,vec<vn_reference_op_s> ops)986 ao_ref_init_from_vn_reference (ao_ref *ref,
987 			       alias_set_type set, alias_set_type base_set,
988 			       tree type, vec<vn_reference_op_s> ops)
989 {
990   vn_reference_op_t op;
991   unsigned i;
992   tree base = NULL_TREE;
993   tree *op0_p = &base;
994   poly_offset_int offset = 0;
995   poly_offset_int max_size;
996   poly_offset_int size = -1;
997   tree size_tree = NULL_TREE;
998 
999   machine_mode mode = TYPE_MODE (type);
1000   if (mode == BLKmode)
1001     size_tree = TYPE_SIZE (type);
1002   else
1003     size = GET_MODE_BITSIZE (mode);
1004   if (size_tree != NULL_TREE
1005       && poly_int_tree_p (size_tree))
1006     size = wi::to_poly_offset (size_tree);
1007 
1008   /* Lower the final access size from the outermost expression.  */
1009   op = &ops[0];
1010   size_tree = NULL_TREE;
1011   if (op->opcode == COMPONENT_REF)
1012     size_tree = DECL_SIZE (op->op0);
1013   else if (op->opcode == BIT_FIELD_REF)
1014     size_tree = op->op0;
1015   if (size_tree != NULL_TREE
1016       && poly_int_tree_p (size_tree)
1017       && (!known_size_p (size)
1018 	  || known_lt (wi::to_poly_offset (size_tree), size)))
1019     size = wi::to_poly_offset (size_tree);
1020 
1021   /* Initially, maxsize is the same as the accessed element size.
1022      In the following it will only grow (or become -1).  */
1023   max_size = size;
1024 
1025   /* Compute cumulative bit-offset for nested component-refs and array-refs,
1026      and find the ultimate containing object.  */
1027   FOR_EACH_VEC_ELT (ops, i, op)
1028     {
1029       switch (op->opcode)
1030 	{
1031 	/* These may be in the reference ops, but we cannot do anything
1032 	   sensible with them here.  */
1033 	case ADDR_EXPR:
1034 	  /* Apart from ADDR_EXPR arguments to MEM_REF.  */
1035 	  if (base != NULL_TREE
1036 	      && TREE_CODE (base) == MEM_REF
1037 	      && op->op0
1038 	      && DECL_P (TREE_OPERAND (op->op0, 0)))
1039 	    {
1040 	      vn_reference_op_t pop = &ops[i-1];
1041 	      base = TREE_OPERAND (op->op0, 0);
1042 	      if (known_eq (pop->off, -1))
1043 		{
1044 		  max_size = -1;
1045 		  offset = 0;
1046 		}
1047 	      else
1048 		offset += pop->off * BITS_PER_UNIT;
1049 	      op0_p = NULL;
1050 	      break;
1051 	    }
1052 	  /* Fallthru.  */
1053 	case CALL_EXPR:
1054 	  return false;
1055 
1056 	/* Record the base objects.  */
1057 	case MEM_REF:
1058 	  *op0_p = build2 (MEM_REF, op->type,
1059 			   NULL_TREE, op->op0);
1060 	  MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
1061 	  MR_DEPENDENCE_BASE (*op0_p) = op->base;
1062 	  op0_p = &TREE_OPERAND (*op0_p, 0);
1063 	  break;
1064 
1065 	case VAR_DECL:
1066 	case PARM_DECL:
1067 	case RESULT_DECL:
1068 	case SSA_NAME:
1069 	  *op0_p = op->op0;
1070 	  op0_p = NULL;
1071 	  break;
1072 
1073 	/* And now the usual component-reference style ops.  */
1074 	case BIT_FIELD_REF:
1075 	  offset += wi::to_poly_offset (op->op1);
1076 	  break;
1077 
1078 	case COMPONENT_REF:
1079 	  {
1080 	    tree field = op->op0;
1081 	    /* We do not have a complete COMPONENT_REF tree here so we
1082 	       cannot use component_ref_field_offset.  Do the interesting
1083 	       parts manually.  */
1084 	    tree this_offset = DECL_FIELD_OFFSET (field);
1085 
1086 	    if (op->op1 || !poly_int_tree_p (this_offset))
1087 	      max_size = -1;
1088 	    else
1089 	      {
1090 		poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1091 					   << LOG2_BITS_PER_UNIT);
1092 		woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1093 		offset += woffset;
1094 	      }
1095 	    break;
1096 	  }
1097 
1098 	case ARRAY_RANGE_REF:
1099 	case ARRAY_REF:
1100 	  /* We recorded the lower bound and the element size.  */
1101 	  if (!poly_int_tree_p (op->op0)
1102 	      || !poly_int_tree_p (op->op1)
1103 	      || TREE_CODE (op->op2) != INTEGER_CST)
1104 	    max_size = -1;
1105 	  else
1106 	    {
1107 	      poly_offset_int woffset
1108 		= wi::sext (wi::to_poly_offset (op->op0)
1109 			    - wi::to_poly_offset (op->op1),
1110 			    TYPE_PRECISION (sizetype));
1111 	      woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1112 	      woffset <<= LOG2_BITS_PER_UNIT;
1113 	      offset += woffset;
1114 	    }
1115 	  break;
1116 
1117 	case REALPART_EXPR:
1118 	  break;
1119 
1120 	case IMAGPART_EXPR:
1121 	  offset += size;
1122 	  break;
1123 
1124 	case VIEW_CONVERT_EXPR:
1125 	  break;
1126 
1127 	case STRING_CST:
1128 	case INTEGER_CST:
1129 	case COMPLEX_CST:
1130 	case VECTOR_CST:
1131 	case REAL_CST:
1132 	case CONSTRUCTOR:
1133 	case CONST_DECL:
1134 	  return false;
1135 
1136 	default:
1137 	  return false;
1138 	}
1139     }
1140 
1141   if (base == NULL_TREE)
1142     return false;
1143 
1144   ref->ref = NULL_TREE;
1145   ref->base = base;
1146   ref->ref_alias_set = set;
1147   ref->base_alias_set = base_set;
1148   /* We discount volatiles from value-numbering elsewhere.  */
1149   ref->volatile_p = false;
1150 
1151   if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1152     {
1153       ref->offset = 0;
1154       ref->size = -1;
1155       ref->max_size = -1;
1156       return true;
1157     }
1158 
1159   if (!offset.to_shwi (&ref->offset))
1160     {
1161       ref->offset = 0;
1162       ref->max_size = -1;
1163       return true;
1164     }
1165 
1166   if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1167     ref->max_size = -1;
1168 
1169   return true;
1170 }
1171 
1172 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1173    vn_reference_op_s's.  */
1174 
1175 static void
copy_reference_ops_from_call(gcall * call,vec<vn_reference_op_s> * result)1176 copy_reference_ops_from_call (gcall *call,
1177 			      vec<vn_reference_op_s> *result)
1178 {
1179   vn_reference_op_s temp;
1180   unsigned i;
1181   tree lhs = gimple_call_lhs (call);
1182   int lr;
1183 
1184   /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1185      different.  By adding the lhs here in the vector, we ensure that the
1186      hashcode is different, guaranteeing a different value number.  */
1187   if (lhs && TREE_CODE (lhs) != SSA_NAME)
1188     {
1189       memset (&temp, 0, sizeof (temp));
1190       temp.opcode = MODIFY_EXPR;
1191       temp.type = TREE_TYPE (lhs);
1192       temp.op0 = lhs;
1193       temp.off = -1;
1194       result->safe_push (temp);
1195     }
1196 
1197   /* Copy the type, opcode, function, static chain and EH region, if any.  */
1198   memset (&temp, 0, sizeof (temp));
1199   temp.type = gimple_call_fntype (call);
1200   temp.opcode = CALL_EXPR;
1201   temp.op0 = gimple_call_fn (call);
1202   temp.op1 = gimple_call_chain (call);
1203   if (stmt_could_throw_p (cfun, call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1204     temp.op2 = size_int (lr);
1205   temp.off = -1;
1206   result->safe_push (temp);
1207 
1208   /* Copy the call arguments.  As they can be references as well,
1209      just chain them together.  */
1210   for (i = 0; i < gimple_call_num_args (call); ++i)
1211     {
1212       tree callarg = gimple_call_arg (call, i);
1213       copy_reference_ops_from_ref (callarg, result);
1214     }
1215 }
1216 
1217 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1218    *I_P to point to the last element of the replacement.  */
1219 static bool
vn_reference_fold_indirect(vec<vn_reference_op_s> * ops,unsigned int * i_p)1220 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1221 			    unsigned int *i_p)
1222 {
1223   unsigned int i = *i_p;
1224   vn_reference_op_t op = &(*ops)[i];
1225   vn_reference_op_t mem_op = &(*ops)[i - 1];
1226   tree addr_base;
1227   poly_int64 addr_offset = 0;
1228 
1229   /* The only thing we have to do is from &OBJ.foo.bar add the offset
1230      from .foo.bar to the preceding MEM_REF offset and replace the
1231      address with &OBJ.  */
1232   addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1233 					     &addr_offset);
1234   gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1235   if (addr_base != TREE_OPERAND (op->op0, 0))
1236     {
1237       poly_offset_int off
1238 	= (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1239 				  SIGNED)
1240 	   + addr_offset);
1241       mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1242       op->op0 = build_fold_addr_expr (addr_base);
1243       if (tree_fits_shwi_p (mem_op->op0))
1244 	mem_op->off = tree_to_shwi (mem_op->op0);
1245       else
1246 	mem_op->off = -1;
1247       return true;
1248     }
1249   return false;
1250 }
1251 
1252 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1253    *I_P to point to the last element of the replacement.  */
1254 static bool
vn_reference_maybe_forwprop_address(vec<vn_reference_op_s> * ops,unsigned int * i_p)1255 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1256 				     unsigned int *i_p)
1257 {
1258   bool changed = false;
1259   vn_reference_op_t op;
1260 
1261   do
1262     {
1263       unsigned int i = *i_p;
1264       op = &(*ops)[i];
1265       vn_reference_op_t mem_op = &(*ops)[i - 1];
1266       gimple *def_stmt;
1267       enum tree_code code;
1268       poly_offset_int off;
1269 
1270       def_stmt = SSA_NAME_DEF_STMT (op->op0);
1271       if (!is_gimple_assign (def_stmt))
1272 	return changed;
1273 
1274       code = gimple_assign_rhs_code (def_stmt);
1275       if (code != ADDR_EXPR
1276 	  && code != POINTER_PLUS_EXPR)
1277 	return changed;
1278 
1279       off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1280 
1281       /* The only thing we have to do is from &OBJ.foo.bar add the offset
1282 	 from .foo.bar to the preceding MEM_REF offset and replace the
1283 	 address with &OBJ.  */
1284       if (code == ADDR_EXPR)
1285 	{
1286 	  tree addr, addr_base;
1287 	  poly_int64 addr_offset;
1288 
1289 	  addr = gimple_assign_rhs1 (def_stmt);
1290 	  addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1291 						     &addr_offset);
1292 	  /* If that didn't work because the address isn't invariant propagate
1293 	     the reference tree from the address operation in case the current
1294 	     dereference isn't offsetted.  */
1295 	  if (!addr_base
1296 	      && *i_p == ops->length () - 1
1297 	      && known_eq (off, 0)
1298 	      /* This makes us disable this transform for PRE where the
1299 		 reference ops might be also used for code insertion which
1300 		 is invalid.  */
1301 	      && default_vn_walk_kind == VN_WALKREWRITE)
1302 	    {
1303 	      auto_vec<vn_reference_op_s, 32> tem;
1304 	      copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1305 	      /* Make sure to preserve TBAA info.  The only objects not
1306 		 wrapped in MEM_REFs that can have their address taken are
1307 		 STRING_CSTs.  */
1308 	      if (tem.length () >= 2
1309 		  && tem[tem.length () - 2].opcode == MEM_REF)
1310 		{
1311 		  vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1312 		  new_mem_op->op0
1313 		      = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1314 					  wi::to_poly_wide (new_mem_op->op0));
1315 		}
1316 	      else
1317 		gcc_assert (tem.last ().opcode == STRING_CST);
1318 	      ops->pop ();
1319 	      ops->pop ();
1320 	      ops->safe_splice (tem);
1321 	      --*i_p;
1322 	      return true;
1323 	    }
1324 	  if (!addr_base
1325 	      || TREE_CODE (addr_base) != MEM_REF
1326 	      || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1327 		  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base,
1328 								    0))))
1329 	    return changed;
1330 
1331 	  off += addr_offset;
1332 	  off += mem_ref_offset (addr_base);
1333 	  op->op0 = TREE_OPERAND (addr_base, 0);
1334 	}
1335       else
1336 	{
1337 	  tree ptr, ptroff;
1338 	  ptr = gimple_assign_rhs1 (def_stmt);
1339 	  ptroff = gimple_assign_rhs2 (def_stmt);
1340 	  if (TREE_CODE (ptr) != SSA_NAME
1341 	      || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1342 	      /* Make sure to not endlessly recurse.
1343 		 See gcc.dg/tree-ssa/20040408-1.c for an example.  Can easily
1344 		 happen when we value-number a PHI to its backedge value.  */
1345 	      || SSA_VAL (ptr) == op->op0
1346 	      || !poly_int_tree_p (ptroff))
1347 	    return changed;
1348 
1349 	  off += wi::to_poly_offset (ptroff);
1350 	  op->op0 = ptr;
1351 	}
1352 
1353       mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1354       if (tree_fits_shwi_p (mem_op->op0))
1355 	mem_op->off = tree_to_shwi (mem_op->op0);
1356       else
1357 	mem_op->off = -1;
1358       /* ???  Can end up with endless recursion here!?
1359 	 gcc.c-torture/execute/strcmp-1.c  */
1360       if (TREE_CODE (op->op0) == SSA_NAME)
1361 	op->op0 = SSA_VAL (op->op0);
1362       if (TREE_CODE (op->op0) != SSA_NAME)
1363 	op->opcode = TREE_CODE (op->op0);
1364 
1365       changed = true;
1366     }
1367   /* Tail-recurse.  */
1368   while (TREE_CODE (op->op0) == SSA_NAME);
1369 
1370   /* Fold a remaining *&.  */
1371   if (TREE_CODE (op->op0) == ADDR_EXPR)
1372     vn_reference_fold_indirect (ops, i_p);
1373 
1374   return changed;
1375 }
1376 
1377 /* Optimize the reference REF to a constant if possible or return
1378    NULL_TREE if not.  */
1379 
1380 tree
fully_constant_vn_reference_p(vn_reference_t ref)1381 fully_constant_vn_reference_p (vn_reference_t ref)
1382 {
1383   vec<vn_reference_op_s> operands = ref->operands;
1384   vn_reference_op_t op;
1385 
1386   /* Try to simplify the translated expression if it is
1387      a call to a builtin function with at most two arguments.  */
1388   op = &operands[0];
1389   if (op->opcode == CALL_EXPR
1390       && TREE_CODE (op->op0) == ADDR_EXPR
1391       && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1392       && fndecl_built_in_p (TREE_OPERAND (op->op0, 0))
1393       && operands.length () >= 2
1394       && operands.length () <= 3)
1395     {
1396       vn_reference_op_t arg0, arg1 = NULL;
1397       bool anyconst = false;
1398       arg0 = &operands[1];
1399       if (operands.length () > 2)
1400 	arg1 = &operands[2];
1401       if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1402 	  || (arg0->opcode == ADDR_EXPR
1403 	      && is_gimple_min_invariant (arg0->op0)))
1404 	anyconst = true;
1405       if (arg1
1406 	  && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1407 	      || (arg1->opcode == ADDR_EXPR
1408 		  && is_gimple_min_invariant (arg1->op0))))
1409 	anyconst = true;
1410       if (anyconst)
1411 	{
1412 	  tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1413 					 arg1 ? 2 : 1,
1414 					 arg0->op0,
1415 					 arg1 ? arg1->op0 : NULL);
1416 	  if (folded
1417 	      && TREE_CODE (folded) == NOP_EXPR)
1418 	    folded = TREE_OPERAND (folded, 0);
1419 	  if (folded
1420 	      && is_gimple_min_invariant (folded))
1421 	    return folded;
1422 	}
1423     }
1424 
1425   /* Simplify reads from constants or constant initializers.  */
1426   else if (BITS_PER_UNIT == 8
1427 	   && COMPLETE_TYPE_P (ref->type)
1428 	   && is_gimple_reg_type (ref->type))
1429     {
1430       poly_int64 off = 0;
1431       HOST_WIDE_INT size;
1432       if (INTEGRAL_TYPE_P (ref->type))
1433 	size = TYPE_PRECISION (ref->type);
1434       else if (tree_fits_shwi_p (TYPE_SIZE (ref->type)))
1435 	size = tree_to_shwi (TYPE_SIZE (ref->type));
1436       else
1437 	return NULL_TREE;
1438       if (size % BITS_PER_UNIT != 0
1439 	  || size > MAX_BITSIZE_MODE_ANY_MODE)
1440 	return NULL_TREE;
1441       size /= BITS_PER_UNIT;
1442       unsigned i;
1443       for (i = 0; i < operands.length (); ++i)
1444 	{
1445 	  if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1446 	    {
1447 	      ++i;
1448 	      break;
1449 	    }
1450 	  if (known_eq (operands[i].off, -1))
1451 	    return NULL_TREE;
1452 	  off += operands[i].off;
1453 	  if (operands[i].opcode == MEM_REF)
1454 	    {
1455 	      ++i;
1456 	      break;
1457 	    }
1458 	}
1459       vn_reference_op_t base = &operands[--i];
1460       tree ctor = error_mark_node;
1461       tree decl = NULL_TREE;
1462       if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1463 	ctor = base->op0;
1464       else if (base->opcode == MEM_REF
1465 	       && base[1].opcode == ADDR_EXPR
1466 	       && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1467 		   || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1468 		   || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1469 	{
1470 	  decl = TREE_OPERAND (base[1].op0, 0);
1471 	  if (TREE_CODE (decl) == STRING_CST)
1472 	    ctor = decl;
1473 	  else
1474 	    ctor = ctor_for_folding (decl);
1475 	}
1476       if (ctor == NULL_TREE)
1477 	return build_zero_cst (ref->type);
1478       else if (ctor != error_mark_node)
1479 	{
1480 	  HOST_WIDE_INT const_off;
1481 	  if (decl)
1482 	    {
1483 	      tree res = fold_ctor_reference (ref->type, ctor,
1484 					      off * BITS_PER_UNIT,
1485 					      size * BITS_PER_UNIT, decl);
1486 	      if (res)
1487 		{
1488 		  STRIP_USELESS_TYPE_CONVERSION (res);
1489 		  if (is_gimple_min_invariant (res))
1490 		    return res;
1491 		}
1492 	    }
1493 	  else if (off.is_constant (&const_off))
1494 	    {
1495 	      unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1496 	      int len = native_encode_expr (ctor, buf, size, const_off);
1497 	      if (len > 0)
1498 		return native_interpret_expr (ref->type, buf, len);
1499 	    }
1500 	}
1501     }
1502 
1503   return NULL_TREE;
1504 }
1505 
1506 /* Return true if OPS contain a storage order barrier.  */
1507 
1508 static bool
contains_storage_order_barrier_p(vec<vn_reference_op_s> ops)1509 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1510 {
1511   vn_reference_op_t op;
1512   unsigned i;
1513 
1514   FOR_EACH_VEC_ELT (ops, i, op)
1515     if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1516       return true;
1517 
1518   return false;
1519 }
1520 
1521 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1522    structures into their value numbers.  This is done in-place, and
1523    the vector passed in is returned.  *VALUEIZED_ANYTHING will specify
1524    whether any operands were valueized.  */
1525 
1526 static vec<vn_reference_op_s>
1527 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything,
1528 		 bool with_avail = false)
1529 {
1530   vn_reference_op_t vro;
1531   unsigned int i;
1532 
1533   *valueized_anything = false;
1534 
FOR_EACH_VEC_ELT(orig,i,vro)1535   FOR_EACH_VEC_ELT (orig, i, vro)
1536     {
1537       if (vro->opcode == SSA_NAME
1538 	  || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1539 	{
1540 	  tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0);
1541 	  if (tem != vro->op0)
1542 	    {
1543 	      *valueized_anything = true;
1544 	      vro->op0 = tem;
1545 	    }
1546 	  /* If it transforms from an SSA_NAME to a constant, update
1547 	     the opcode.  */
1548 	  if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1549 	    vro->opcode = TREE_CODE (vro->op0);
1550 	}
1551       if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1552 	{
1553 	  tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1);
1554 	  if (tem != vro->op1)
1555 	    {
1556 	      *valueized_anything = true;
1557 	      vro->op1 = tem;
1558 	    }
1559 	}
1560       if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1561 	{
1562 	  tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2);
1563 	  if (tem != vro->op2)
1564 	    {
1565 	      *valueized_anything = true;
1566 	      vro->op2 = tem;
1567 	    }
1568 	}
1569       /* If it transforms from an SSA_NAME to an address, fold with
1570 	 a preceding indirect reference.  */
1571       if (i > 0
1572 	  && vro->op0
1573 	  && TREE_CODE (vro->op0) == ADDR_EXPR
1574 	  && orig[i - 1].opcode == MEM_REF)
1575 	{
1576 	  if (vn_reference_fold_indirect (&orig, &i))
1577 	    *valueized_anything = true;
1578 	}
1579       else if (i > 0
1580 	       && vro->opcode == SSA_NAME
1581 	       && orig[i - 1].opcode == MEM_REF)
1582 	{
1583 	  if (vn_reference_maybe_forwprop_address (&orig, &i))
1584 	    *valueized_anything = true;
1585 	}
1586       /* If it transforms a non-constant ARRAY_REF into a constant
1587 	 one, adjust the constant offset.  */
1588       else if (vro->opcode == ARRAY_REF
1589 	       && known_eq (vro->off, -1)
1590 	       && poly_int_tree_p (vro->op0)
1591 	       && poly_int_tree_p (vro->op1)
1592 	       && TREE_CODE (vro->op2) == INTEGER_CST)
1593 	{
1594 	  poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1595 				  - wi::to_poly_offset (vro->op1))
1596 				 * wi::to_offset (vro->op2)
1597 				 * vn_ref_op_align_unit (vro));
1598 	  off.to_shwi (&vro->off);
1599 	}
1600     }
1601 
1602   return orig;
1603 }
1604 
1605 static vec<vn_reference_op_s>
valueize_refs(vec<vn_reference_op_s> orig)1606 valueize_refs (vec<vn_reference_op_s> orig)
1607 {
1608   bool tem;
1609   return valueize_refs_1 (orig, &tem);
1610 }
1611 
1612 static vec<vn_reference_op_s> shared_lookup_references;
1613 
1614 /* Create a vector of vn_reference_op_s structures from REF, a
1615    REFERENCE_CLASS_P tree.  The vector is shared among all callers of
1616    this function.  *VALUEIZED_ANYTHING will specify whether any
1617    operands were valueized.  */
1618 
1619 static vec<vn_reference_op_s>
valueize_shared_reference_ops_from_ref(tree ref,bool * valueized_anything)1620 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1621 {
1622   if (!ref)
1623     return vNULL;
1624   shared_lookup_references.truncate (0);
1625   copy_reference_ops_from_ref (ref, &shared_lookup_references);
1626   shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1627 					      valueized_anything);
1628   return shared_lookup_references;
1629 }
1630 
1631 /* Create a vector of vn_reference_op_s structures from CALL, a
1632    call statement.  The vector is shared among all callers of
1633    this function.  */
1634 
1635 static vec<vn_reference_op_s>
valueize_shared_reference_ops_from_call(gcall * call)1636 valueize_shared_reference_ops_from_call (gcall *call)
1637 {
1638   if (!call)
1639     return vNULL;
1640   shared_lookup_references.truncate (0);
1641   copy_reference_ops_from_call (call, &shared_lookup_references);
1642   shared_lookup_references = valueize_refs (shared_lookup_references);
1643   return shared_lookup_references;
1644 }
1645 
1646 /* Lookup a SCCVN reference operation VR in the current hash table.
1647    Returns the resulting value number if it exists in the hash table,
1648    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
1649    vn_reference_t stored in the hashtable if something is found.  */
1650 
1651 static tree
vn_reference_lookup_1(vn_reference_t vr,vn_reference_t * vnresult)1652 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1653 {
1654   vn_reference_s **slot;
1655   hashval_t hash;
1656 
1657   hash = vr->hashcode;
1658   slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1659   if (slot)
1660     {
1661       if (vnresult)
1662 	*vnresult = (vn_reference_t)*slot;
1663       return ((vn_reference_t)*slot)->result;
1664     }
1665 
1666   return NULL_TREE;
1667 }
1668 
1669 
1670 /* Partial definition tracking support.  */
1671 
1672 struct pd_range
1673 {
1674   HOST_WIDE_INT offset;
1675   HOST_WIDE_INT size;
1676 };
1677 
1678 struct pd_data
1679 {
1680   tree rhs;
1681   HOST_WIDE_INT offset;
1682   HOST_WIDE_INT size;
1683 };
1684 
1685 /* Context for alias walking.  */
1686 
1687 struct vn_walk_cb_data
1688 {
vn_walk_cb_datavn_walk_cb_data1689   vn_walk_cb_data (vn_reference_t vr_, tree orig_ref_, tree *last_vuse_ptr_,
1690 		   vn_lookup_kind vn_walk_kind_, bool tbaa_p_, tree mask_)
1691     : vr (vr_), last_vuse_ptr (last_vuse_ptr_), last_vuse (NULL_TREE),
1692       mask (mask_), masked_result (NULL_TREE), vn_walk_kind (vn_walk_kind_),
1693       tbaa_p (tbaa_p_), saved_operands (vNULL), first_set (-2),
1694       first_base_set (-2), known_ranges (NULL)
1695   {
1696     if (!last_vuse_ptr)
1697       last_vuse_ptr = &last_vuse;
1698     ao_ref_init (&orig_ref, orig_ref_);
1699     if (mask)
1700       {
1701 	wide_int w = wi::to_wide (mask);
1702 	unsigned int pos = 0, prec = w.get_precision ();
1703 	pd_data pd;
1704 	pd.rhs = build_constructor (NULL_TREE, NULL);
1705 	/* When bitwise and with a constant is done on a memory load,
1706 	   we don't really need all the bits to be defined or defined
1707 	   to constants, we don't really care what is in the position
1708 	   corresponding to 0 bits in the mask.
1709 	   So, push the ranges of those 0 bits in the mask as artificial
1710 	   zero stores and let the partial def handling code do the
1711 	   rest.  */
1712 	while (pos < prec)
1713 	  {
1714 	    int tz = wi::ctz (w);
1715 	    if (pos + tz > prec)
1716 	      tz = prec - pos;
1717 	    if (tz)
1718 	      {
1719 		if (BYTES_BIG_ENDIAN)
1720 		  pd.offset = prec - pos - tz;
1721 		else
1722 		  pd.offset = pos;
1723 		pd.size = tz;
1724 		void *r = push_partial_def (pd, 0, 0, 0, prec);
1725 		gcc_assert (r == NULL_TREE);
1726 	      }
1727 	    pos += tz;
1728 	    if (pos == prec)
1729 	      break;
1730 	    w = wi::lrshift (w, tz);
1731 	    tz = wi::ctz (wi::bit_not (w));
1732 	    if (pos + tz > prec)
1733 	      tz = prec - pos;
1734 	    pos += tz;
1735 	    w = wi::lrshift (w, tz);
1736 	  }
1737       }
1738   }
1739   ~vn_walk_cb_data ();
1740   void *finish (alias_set_type, alias_set_type, tree);
1741   void *push_partial_def (pd_data pd,
1742 			  alias_set_type, alias_set_type, HOST_WIDE_INT,
1743 			  HOST_WIDE_INT);
1744 
1745   vn_reference_t vr;
1746   ao_ref orig_ref;
1747   tree *last_vuse_ptr;
1748   tree last_vuse;
1749   tree mask;
1750   tree masked_result;
1751   vn_lookup_kind vn_walk_kind;
1752   bool tbaa_p;
1753   vec<vn_reference_op_s> saved_operands;
1754 
1755   /* The VDEFs of partial defs we come along.  */
1756   auto_vec<pd_data, 2> partial_defs;
1757   /* The first defs range to avoid splay tree setup in most cases.  */
1758   pd_range first_range;
1759   alias_set_type first_set;
1760   alias_set_type first_base_set;
1761   splay_tree known_ranges;
1762   obstack ranges_obstack;
1763 };
1764 
~vn_walk_cb_data()1765 vn_walk_cb_data::~vn_walk_cb_data ()
1766 {
1767   if (known_ranges)
1768     {
1769       splay_tree_delete (known_ranges);
1770       obstack_free (&ranges_obstack, NULL);
1771     }
1772   saved_operands.release ();
1773 }
1774 
1775 void *
finish(alias_set_type set,alias_set_type base_set,tree val)1776 vn_walk_cb_data::finish (alias_set_type set, alias_set_type base_set, tree val)
1777 {
1778   if (first_set != -2)
1779     {
1780       set = first_set;
1781       base_set = first_base_set;
1782     }
1783   if (mask)
1784     {
1785       masked_result = val;
1786       return (void *) -1;
1787     }
1788   vec<vn_reference_op_s> &operands
1789     = saved_operands.exists () ? saved_operands : vr->operands;
1790   return vn_reference_lookup_or_insert_for_pieces (last_vuse, set, base_set,
1791 						   vr->type, operands, val);
1792 }
1793 
1794 /* pd_range splay-tree helpers.  */
1795 
1796 static int
pd_range_compare(splay_tree_key offset1p,splay_tree_key offset2p)1797 pd_range_compare (splay_tree_key offset1p, splay_tree_key offset2p)
1798 {
1799   HOST_WIDE_INT offset1 = *(HOST_WIDE_INT *)offset1p;
1800   HOST_WIDE_INT offset2 = *(HOST_WIDE_INT *)offset2p;
1801   if (offset1 < offset2)
1802     return -1;
1803   else if (offset1 > offset2)
1804     return 1;
1805   return 0;
1806 }
1807 
1808 static void *
pd_tree_alloc(int size,void * data_)1809 pd_tree_alloc (int size, void *data_)
1810 {
1811   vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
1812   return obstack_alloc (&data->ranges_obstack, size);
1813 }
1814 
1815 static void
pd_tree_dealloc(void *,void *)1816 pd_tree_dealloc (void *, void *)
1817 {
1818 }
1819 
1820 /* Push PD to the vector of partial definitions returning a
1821    value when we are ready to combine things with VUSE, SET and MAXSIZEI,
1822    NULL when we want to continue looking for partial defs or -1
1823    on failure.  */
1824 
1825 void *
push_partial_def(pd_data pd,alias_set_type set,alias_set_type base_set,HOST_WIDE_INT offseti,HOST_WIDE_INT maxsizei)1826 vn_walk_cb_data::push_partial_def (pd_data pd,
1827 				   alias_set_type set, alias_set_type base_set,
1828 				   HOST_WIDE_INT offseti,
1829 				   HOST_WIDE_INT maxsizei)
1830 {
1831   const HOST_WIDE_INT bufsize = 64;
1832   /* We're using a fixed buffer for encoding so fail early if the object
1833      we want to interpret is bigger.  */
1834   if (maxsizei > bufsize * BITS_PER_UNIT
1835       || CHAR_BIT != 8
1836       || BITS_PER_UNIT != 8
1837       /* Not prepared to handle PDP endian.  */
1838       || BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
1839     return (void *)-1;
1840 
1841   /* Turn too large constant stores into non-constant stores.  */
1842   if (CONSTANT_CLASS_P (pd.rhs) && pd.size > bufsize * BITS_PER_UNIT)
1843     pd.rhs = error_mark_node;
1844 
1845   /* And for non-constant or CONSTRUCTOR stores shrink them to only keep at
1846      most a partial byte before and/or after the region.  */
1847   if (!CONSTANT_CLASS_P (pd.rhs))
1848     {
1849       if (pd.offset < offseti)
1850 	{
1851 	  HOST_WIDE_INT o = ROUND_DOWN (offseti - pd.offset, BITS_PER_UNIT);
1852 	  gcc_assert (pd.size > o);
1853 	  pd.size -= o;
1854 	  pd.offset += o;
1855 	}
1856       if (pd.size > maxsizei)
1857 	pd.size = maxsizei + ((pd.size - maxsizei) % BITS_PER_UNIT);
1858     }
1859 
1860   pd.offset -= offseti;
1861 
1862   bool pd_constant_p = (TREE_CODE (pd.rhs) == CONSTRUCTOR
1863 			|| CONSTANT_CLASS_P (pd.rhs));
1864   if (partial_defs.is_empty ())
1865     {
1866       /* If we get a clobber upfront, fail.  */
1867       if (TREE_CLOBBER_P (pd.rhs))
1868 	return (void *)-1;
1869       if (!pd_constant_p)
1870 	return (void *)-1;
1871       partial_defs.safe_push (pd);
1872       first_range.offset = pd.offset;
1873       first_range.size = pd.size;
1874       first_set = set;
1875       first_base_set = base_set;
1876       last_vuse_ptr = NULL;
1877       /* Continue looking for partial defs.  */
1878       return NULL;
1879     }
1880 
1881   if (!known_ranges)
1882     {
1883       /* ???  Optimize the case where the 2nd partial def completes things.  */
1884       gcc_obstack_init (&ranges_obstack);
1885       known_ranges = splay_tree_new_with_allocator (pd_range_compare, 0, 0,
1886 						    pd_tree_alloc,
1887 						    pd_tree_dealloc, this);
1888       splay_tree_insert (known_ranges,
1889 			 (splay_tree_key)&first_range.offset,
1890 			 (splay_tree_value)&first_range);
1891     }
1892 
1893   pd_range newr = { pd.offset, pd.size };
1894   splay_tree_node n;
1895   pd_range *r;
1896   /* Lookup the predecessor of offset + 1 and see if we need to merge.  */
1897   HOST_WIDE_INT loffset = newr.offset + 1;
1898   if ((n = splay_tree_predecessor (known_ranges, (splay_tree_key)&loffset))
1899       && ((r = (pd_range *)n->value), true)
1900       && ranges_known_overlap_p (r->offset, r->size + 1,
1901 				 newr.offset, newr.size))
1902     {
1903       /* Ignore partial defs already covered.  Here we also drop shadowed
1904          clobbers arriving here at the floor.  */
1905       if (known_subrange_p (newr.offset, newr.size, r->offset, r->size))
1906 	return NULL;
1907       r->size = MAX (r->offset + r->size, newr.offset + newr.size) - r->offset;
1908     }
1909   else
1910     {
1911       /* newr.offset wasn't covered yet, insert the range.  */
1912       r = XOBNEW (&ranges_obstack, pd_range);
1913       *r = newr;
1914       splay_tree_insert (known_ranges, (splay_tree_key)&r->offset,
1915 			 (splay_tree_value)r);
1916     }
1917   /* Merge r which now contains newr and is a member of the splay tree with
1918      adjacent overlapping ranges.  */
1919   pd_range *rafter;
1920   while ((n = splay_tree_successor (known_ranges, (splay_tree_key)&r->offset))
1921 	 && ((rafter = (pd_range *)n->value), true)
1922 	 && ranges_known_overlap_p (r->offset, r->size + 1,
1923 				    rafter->offset, rafter->size))
1924     {
1925       r->size = MAX (r->offset + r->size,
1926 		     rafter->offset + rafter->size) - r->offset;
1927       splay_tree_remove (known_ranges, (splay_tree_key)&rafter->offset);
1928     }
1929   /* If we get a clobber, fail.  */
1930   if (TREE_CLOBBER_P (pd.rhs))
1931     return (void *)-1;
1932   /* Non-constants are OK as long as they are shadowed by a constant.  */
1933   if (!pd_constant_p)
1934     return (void *)-1;
1935   partial_defs.safe_push (pd);
1936 
1937   /* Now we have merged newr into the range tree.  When we have covered
1938      [offseti, sizei] then the tree will contain exactly one node which has
1939      the desired properties and it will be 'r'.  */
1940   if (!known_subrange_p (0, maxsizei, r->offset, r->size))
1941     /* Continue looking for partial defs.  */
1942     return NULL;
1943 
1944   /* Now simply native encode all partial defs in reverse order.  */
1945   unsigned ndefs = partial_defs.length ();
1946   /* We support up to 512-bit values (for V8DFmode).  */
1947   unsigned char buffer[bufsize + 1];
1948   unsigned char this_buffer[bufsize + 1];
1949   int len;
1950 
1951   memset (buffer, 0, bufsize + 1);
1952   unsigned needed_len = ROUND_UP (maxsizei, BITS_PER_UNIT) / BITS_PER_UNIT;
1953   while (!partial_defs.is_empty ())
1954     {
1955       pd_data pd = partial_defs.pop ();
1956       unsigned int amnt;
1957       if (TREE_CODE (pd.rhs) == CONSTRUCTOR)
1958 	{
1959 	  /* Empty CONSTRUCTOR.  */
1960 	  if (pd.size >= needed_len * BITS_PER_UNIT)
1961 	    len = needed_len;
1962 	  else
1963 	    len = ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT;
1964 	  memset (this_buffer, 0, len);
1965 	}
1966       else
1967 	{
1968 	  len = native_encode_expr (pd.rhs, this_buffer, bufsize,
1969 				    MAX (0, -pd.offset) / BITS_PER_UNIT);
1970 	  if (len <= 0
1971 	      || len < (ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT
1972 			- MAX (0, -pd.offset) / BITS_PER_UNIT))
1973 	    {
1974 	      if (dump_file && (dump_flags & TDF_DETAILS))
1975 		fprintf (dump_file, "Failed to encode %u "
1976 			 "partial definitions\n", ndefs);
1977 	      return (void *)-1;
1978 	    }
1979 	}
1980 
1981       unsigned char *p = buffer;
1982       HOST_WIDE_INT size = pd.size;
1983       if (pd.offset < 0)
1984 	size -= ROUND_DOWN (-pd.offset, BITS_PER_UNIT);
1985       this_buffer[len] = 0;
1986       if (BYTES_BIG_ENDIAN)
1987 	{
1988 	  /* LSB of this_buffer[len - 1] byte should be at
1989 	     pd.offset + pd.size - 1 bits in buffer.  */
1990 	  amnt = ((unsigned HOST_WIDE_INT) pd.offset
1991 		  + pd.size) % BITS_PER_UNIT;
1992 	  if (amnt)
1993 	    shift_bytes_in_array_right (this_buffer, len + 1, amnt);
1994 	  unsigned char *q = this_buffer;
1995 	  unsigned int off = 0;
1996 	  if (pd.offset >= 0)
1997 	    {
1998 	      unsigned int msk;
1999 	      off = pd.offset / BITS_PER_UNIT;
2000 	      gcc_assert (off < needed_len);
2001 	      p = buffer + off;
2002 	      if (size <= amnt)
2003 		{
2004 		  msk = ((1 << size) - 1) << (BITS_PER_UNIT - amnt);
2005 		  *p = (*p & ~msk) | (this_buffer[len] & msk);
2006 		  size = 0;
2007 		}
2008 	      else
2009 		{
2010 		  if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
2011 		    q = (this_buffer + len
2012 			 - (ROUND_UP (size - amnt, BITS_PER_UNIT)
2013 			    / BITS_PER_UNIT));
2014 		  if (pd.offset % BITS_PER_UNIT)
2015 		    {
2016 		      msk = -1U << (BITS_PER_UNIT
2017 				    - (pd.offset % BITS_PER_UNIT));
2018 		      *p = (*p & msk) | (*q & ~msk);
2019 		      p++;
2020 		      q++;
2021 		      off++;
2022 		      size -= BITS_PER_UNIT - (pd.offset % BITS_PER_UNIT);
2023 		      gcc_assert (size >= 0);
2024 		    }
2025 		}
2026 	    }
2027 	  else if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
2028 	    {
2029 	      q = (this_buffer + len
2030 		   - (ROUND_UP (size - amnt, BITS_PER_UNIT)
2031 		      / BITS_PER_UNIT));
2032 	      if (pd.offset % BITS_PER_UNIT)
2033 		{
2034 		  q++;
2035 		  size -= BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) pd.offset
2036 					   % BITS_PER_UNIT);
2037 		  gcc_assert (size >= 0);
2038 		}
2039 	    }
2040 	  if ((unsigned HOST_WIDE_INT) size / BITS_PER_UNIT + off
2041 	      > needed_len)
2042 	    size = (needed_len - off) * BITS_PER_UNIT;
2043 	  memcpy (p, q, size / BITS_PER_UNIT);
2044 	  if (size % BITS_PER_UNIT)
2045 	    {
2046 	      unsigned int msk
2047 		= -1U << (BITS_PER_UNIT - (size % BITS_PER_UNIT));
2048 	      p += size / BITS_PER_UNIT;
2049 	      q += size / BITS_PER_UNIT;
2050 	      *p = (*q & msk) | (*p & ~msk);
2051 	    }
2052 	}
2053       else
2054 	{
2055 	  if (pd.offset >= 0)
2056 	    {
2057 	      /* LSB of this_buffer[0] byte should be at pd.offset bits
2058 		 in buffer.  */
2059 	      unsigned int msk;
2060 	      size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
2061 	      amnt = pd.offset % BITS_PER_UNIT;
2062 	      if (amnt)
2063 		shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2064 	      unsigned int off = pd.offset / BITS_PER_UNIT;
2065 	      gcc_assert (off < needed_len);
2066 	      size = MIN (size,
2067 			  (HOST_WIDE_INT) (needed_len - off) * BITS_PER_UNIT);
2068 	      p = buffer + off;
2069 	      if (amnt + size < BITS_PER_UNIT)
2070 		{
2071 		  /* Low amnt bits come from *p, then size bits
2072 		     from this_buffer[0] and the remaining again from
2073 		     *p.  */
2074 		  msk = ((1 << size) - 1) << amnt;
2075 		  *p = (*p & ~msk) | (this_buffer[0] & msk);
2076 		  size = 0;
2077 		}
2078 	      else if (amnt)
2079 		{
2080 		  msk = -1U << amnt;
2081 		  *p = (*p & ~msk) | (this_buffer[0] & msk);
2082 		  p++;
2083 		  size -= (BITS_PER_UNIT - amnt);
2084 		}
2085 	    }
2086 	  else
2087 	    {
2088 	      amnt = (unsigned HOST_WIDE_INT) pd.offset % BITS_PER_UNIT;
2089 	      if (amnt)
2090 		size -= BITS_PER_UNIT - amnt;
2091 	      size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
2092 	      if (amnt)
2093 		shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2094 	    }
2095 	  memcpy (p, this_buffer + (amnt != 0), size / BITS_PER_UNIT);
2096 	  p += size / BITS_PER_UNIT;
2097 	  if (size % BITS_PER_UNIT)
2098 	    {
2099 	      unsigned int msk = -1U << (size % BITS_PER_UNIT);
2100 	      *p = (this_buffer[(amnt != 0) + size / BITS_PER_UNIT]
2101 		    & ~msk) | (*p & msk);
2102 	    }
2103 	}
2104     }
2105 
2106   tree type = vr->type;
2107   /* Make sure to interpret in a type that has a range covering the whole
2108      access size.  */
2109   if (INTEGRAL_TYPE_P (vr->type) && maxsizei != TYPE_PRECISION (vr->type))
2110     type = build_nonstandard_integer_type (maxsizei, TYPE_UNSIGNED (type));
2111   tree val;
2112   if (BYTES_BIG_ENDIAN)
2113     {
2114       unsigned sz = needed_len;
2115       if (maxsizei % BITS_PER_UNIT)
2116 	shift_bytes_in_array_right (buffer, needed_len,
2117 				    BITS_PER_UNIT
2118 				    - (maxsizei % BITS_PER_UNIT));
2119       if (INTEGRAL_TYPE_P (type))
2120 	sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
2121       if (sz > needed_len)
2122 	{
2123 	  memcpy (this_buffer + (sz - needed_len), buffer, needed_len);
2124 	  val = native_interpret_expr (type, this_buffer, sz);
2125 	}
2126       else
2127 	val = native_interpret_expr (type, buffer, needed_len);
2128     }
2129   else
2130     val = native_interpret_expr (type, buffer, bufsize);
2131   /* If we chop off bits because the types precision doesn't match the memory
2132      access size this is ok when optimizing reads but not when called from
2133      the DSE code during elimination.  */
2134   if (val && type != vr->type)
2135     {
2136       if (! int_fits_type_p (val, vr->type))
2137 	val = NULL_TREE;
2138       else
2139 	val = fold_convert (vr->type, val);
2140     }
2141 
2142   if (val)
2143     {
2144       if (dump_file && (dump_flags & TDF_DETAILS))
2145 	fprintf (dump_file,
2146 		 "Successfully combined %u partial definitions\n", ndefs);
2147       /* We are using the alias-set of the first store we encounter which
2148 	 should be appropriate here.  */
2149       return finish (first_set, first_base_set, val);
2150     }
2151   else
2152     {
2153       if (dump_file && (dump_flags & TDF_DETAILS))
2154 	fprintf (dump_file,
2155 		 "Failed to interpret %u encoded partial definitions\n", ndefs);
2156       return (void *)-1;
2157     }
2158 }
2159 
2160 /* Callback for walk_non_aliased_vuses.  Adjusts the vn_reference_t VR_
2161    with the current VUSE and performs the expression lookup.  */
2162 
2163 static void *
vn_reference_lookup_2(ao_ref * op ATTRIBUTE_UNUSED,tree vuse,void * data_)2164 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *data_)
2165 {
2166   vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2167   vn_reference_t vr = data->vr;
2168   vn_reference_s **slot;
2169   hashval_t hash;
2170 
2171   /* If we have partial definitions recorded we have to go through
2172      vn_reference_lookup_3.  */
2173   if (!data->partial_defs.is_empty ())
2174     return NULL;
2175 
2176   if (data->last_vuse_ptr)
2177     {
2178       *data->last_vuse_ptr = vuse;
2179       data->last_vuse = vuse;
2180     }
2181 
2182   /* Fixup vuse and hash.  */
2183   if (vr->vuse)
2184     vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
2185   vr->vuse = vuse_ssa_val (vuse);
2186   if (vr->vuse)
2187     vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
2188 
2189   hash = vr->hashcode;
2190   slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
2191   if (slot)
2192     {
2193       if ((*slot)->result && data->saved_operands.exists ())
2194 	return data->finish (vr->set, vr->base_set, (*slot)->result);
2195       return *slot;
2196     }
2197 
2198   return NULL;
2199 }
2200 
2201 /* Lookup an existing or insert a new vn_reference entry into the
2202    value table for the VUSE, SET, TYPE, OPERANDS reference which
2203    has the value VALUE which is either a constant or an SSA name.  */
2204 
2205 static vn_reference_t
vn_reference_lookup_or_insert_for_pieces(tree vuse,alias_set_type set,alias_set_type base_set,tree type,vec<vn_reference_op_s,va_heap> operands,tree value)2206 vn_reference_lookup_or_insert_for_pieces (tree vuse,
2207 					  alias_set_type set,
2208 					  alias_set_type base_set,
2209 					  tree type,
2210 					  vec<vn_reference_op_s,
2211 					        va_heap> operands,
2212 					  tree value)
2213 {
2214   vn_reference_s vr1;
2215   vn_reference_t result;
2216   unsigned value_id;
2217   vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2218   vr1.operands = operands;
2219   vr1.type = type;
2220   vr1.set = set;
2221   vr1.base_set = base_set;
2222   vr1.hashcode = vn_reference_compute_hash (&vr1);
2223   if (vn_reference_lookup_1 (&vr1, &result))
2224     return result;
2225   if (TREE_CODE (value) == SSA_NAME)
2226     value_id = VN_INFO (value)->value_id;
2227   else
2228     value_id = get_or_alloc_constant_value_id (value);
2229   return vn_reference_insert_pieces (vuse, set, base_set, type,
2230 				     operands.copy (), value, value_id);
2231 }
2232 
2233 /* Return a value-number for RCODE OPS... either by looking up an existing
2234    value-number for the simplified result or by inserting the operation if
2235    INSERT is true.  */
2236 
2237 static tree
vn_nary_build_or_lookup_1(gimple_match_op * res_op,bool insert)2238 vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert)
2239 {
2240   tree result = NULL_TREE;
2241   /* We will be creating a value number for
2242        RCODE (OPS...).
2243      So first simplify and lookup this expression to see if it
2244      is already available.  */
2245   /* For simplification valueize.  */
2246   unsigned i;
2247   for (i = 0; i < res_op->num_ops; ++i)
2248     if (TREE_CODE (res_op->ops[i]) == SSA_NAME)
2249       {
2250 	tree tem = vn_valueize (res_op->ops[i]);
2251 	if (!tem)
2252 	  break;
2253 	res_op->ops[i] = tem;
2254       }
2255   /* If valueization of an operand fails (it is not available), skip
2256      simplification.  */
2257   bool res = false;
2258   if (i == res_op->num_ops)
2259     {
2260       mprts_hook = vn_lookup_simplify_result;
2261       res = res_op->resimplify (NULL, vn_valueize);
2262       mprts_hook = NULL;
2263     }
2264   gimple *new_stmt = NULL;
2265   if (res
2266       && gimple_simplified_result_is_gimple_val (res_op))
2267     {
2268       /* The expression is already available.  */
2269       result = res_op->ops[0];
2270       /* Valueize it, simplification returns sth in AVAIL only.  */
2271       if (TREE_CODE (result) == SSA_NAME)
2272 	result = SSA_VAL (result);
2273     }
2274   else
2275     {
2276       tree val = vn_lookup_simplify_result (res_op);
2277       if (!val && insert)
2278 	{
2279 	  gimple_seq stmts = NULL;
2280 	  result = maybe_push_res_to_seq (res_op, &stmts);
2281 	  if (result)
2282 	    {
2283 	      gcc_assert (gimple_seq_singleton_p (stmts));
2284 	      new_stmt = gimple_seq_first_stmt (stmts);
2285 	    }
2286 	}
2287       else
2288 	/* The expression is already available.  */
2289 	result = val;
2290     }
2291   if (new_stmt)
2292     {
2293       /* The expression is not yet available, value-number lhs to
2294 	 the new SSA_NAME we created.  */
2295       /* Initialize value-number information properly.  */
2296       vn_ssa_aux_t result_info = VN_INFO (result);
2297       result_info->valnum = result;
2298       result_info->value_id = get_next_value_id ();
2299       result_info->visited = 1;
2300       gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
2301 					  new_stmt);
2302       result_info->needs_insertion = true;
2303       /* ???  PRE phi-translation inserts NARYs without corresponding
2304          SSA name result.  Re-use those but set their result according
2305 	 to the stmt we just built.  */
2306       vn_nary_op_t nary = NULL;
2307       vn_nary_op_lookup_stmt (new_stmt, &nary);
2308       if (nary)
2309 	{
2310 	  gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE);
2311 	  nary->u.result = gimple_assign_lhs (new_stmt);
2312 	}
2313       /* As all "inserted" statements are singleton SCCs, insert
2314 	 to the valid table.  This is strictly needed to
2315 	 avoid re-generating new value SSA_NAMEs for the same
2316 	 expression during SCC iteration over and over (the
2317 	 optimistic table gets cleared after each iteration).
2318 	 We do not need to insert into the optimistic table, as
2319 	 lookups there will fall back to the valid table.  */
2320       else
2321 	{
2322 	  unsigned int length = vn_nary_length_from_stmt (new_stmt);
2323 	  vn_nary_op_t vno1
2324 	    = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
2325 	  vno1->value_id = result_info->value_id;
2326 	  vno1->length = length;
2327 	  vno1->predicated_values = 0;
2328 	  vno1->u.result = result;
2329 	  init_vn_nary_op_from_stmt (vno1, new_stmt);
2330 	  vn_nary_op_insert_into (vno1, valid_info->nary, true);
2331 	  /* Also do not link it into the undo chain.  */
2332 	  last_inserted_nary = vno1->next;
2333 	  vno1->next = (vn_nary_op_t)(void *)-1;
2334 	}
2335       if (dump_file && (dump_flags & TDF_DETAILS))
2336 	{
2337 	  fprintf (dump_file, "Inserting name ");
2338 	  print_generic_expr (dump_file, result);
2339 	  fprintf (dump_file, " for expression ");
2340 	  print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
2341 	  fprintf (dump_file, "\n");
2342 	}
2343     }
2344   return result;
2345 }
2346 
2347 /* Return a value-number for RCODE OPS... either by looking up an existing
2348    value-number for the simplified result or by inserting the operation.  */
2349 
2350 static tree
vn_nary_build_or_lookup(gimple_match_op * res_op)2351 vn_nary_build_or_lookup (gimple_match_op *res_op)
2352 {
2353   return vn_nary_build_or_lookup_1 (res_op, true);
2354 }
2355 
2356 /* Try to simplify the expression RCODE OPS... of type TYPE and return
2357    its value if present.  */
2358 
2359 tree
vn_nary_simplify(vn_nary_op_t nary)2360 vn_nary_simplify (vn_nary_op_t nary)
2361 {
2362   if (nary->length > gimple_match_op::MAX_NUM_OPS)
2363     return NULL_TREE;
2364   gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
2365 		      nary->type, nary->length);
2366   memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
2367   return vn_nary_build_or_lookup_1 (&op, false);
2368 }
2369 
2370 /* Elimination engine.  */
2371 
2372 class eliminate_dom_walker : public dom_walker
2373 {
2374 public:
2375   eliminate_dom_walker (cdi_direction, bitmap);
2376   ~eliminate_dom_walker ();
2377 
2378   virtual edge before_dom_children (basic_block);
2379   virtual void after_dom_children (basic_block);
2380 
2381   virtual tree eliminate_avail (basic_block, tree op);
2382   virtual void eliminate_push_avail (basic_block, tree op);
2383   tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val);
2384 
2385   void eliminate_stmt (basic_block, gimple_stmt_iterator *);
2386 
2387   unsigned eliminate_cleanup (bool region_p = false);
2388 
2389   bool do_pre;
2390   unsigned int el_todo;
2391   unsigned int eliminations;
2392   unsigned int insertions;
2393 
2394   /* SSA names that had their defs inserted by PRE if do_pre.  */
2395   bitmap inserted_exprs;
2396 
2397   /* Blocks with statements that have had their EH properties changed.  */
2398   bitmap need_eh_cleanup;
2399 
2400   /* Blocks with statements that have had their AB properties changed.  */
2401   bitmap need_ab_cleanup;
2402 
2403   /* Local state for the eliminate domwalk.  */
2404   auto_vec<gimple *> to_remove;
2405   auto_vec<gimple *> to_fixup;
2406   auto_vec<tree> avail;
2407   auto_vec<tree> avail_stack;
2408 };
2409 
2410 /* Adaptor to the elimination engine using RPO availability.  */
2411 
2412 class rpo_elim : public eliminate_dom_walker
2413 {
2414 public:
rpo_elim(basic_block entry_)2415   rpo_elim(basic_block entry_)
2416     : eliminate_dom_walker (CDI_DOMINATORS, NULL), entry (entry_),
2417       m_avail_freelist (NULL) {}
2418 
2419   virtual tree eliminate_avail (basic_block, tree op);
2420 
2421   virtual void eliminate_push_avail (basic_block, tree);
2422 
2423   basic_block entry;
2424   /* Freelist of avail entries which are allocated from the vn_ssa_aux
2425      obstack.  */
2426   vn_avail *m_avail_freelist;
2427 };
2428 
2429 /* Global RPO state for access from hooks.  */
2430 static rpo_elim *rpo_avail;
2431 basic_block vn_context_bb;
2432 
2433 /* Return true if BASE1 and BASE2 can be adjusted so they have the
2434    same address and adjust *OFFSET1 and *OFFSET2 accordingly.
2435    Otherwise return false.  */
2436 
2437 static bool
adjust_offsets_for_equal_base_address(tree base1,poly_int64 * offset1,tree base2,poly_int64 * offset2)2438 adjust_offsets_for_equal_base_address (tree base1, poly_int64 *offset1,
2439 				       tree base2, poly_int64 *offset2)
2440 {
2441   poly_int64 soff;
2442   if (TREE_CODE (base1) == MEM_REF
2443       && TREE_CODE (base2) == MEM_REF)
2444     {
2445       if (mem_ref_offset (base1).to_shwi (&soff))
2446 	{
2447 	  base1 = TREE_OPERAND (base1, 0);
2448 	  *offset1 += soff * BITS_PER_UNIT;
2449 	}
2450       if (mem_ref_offset (base2).to_shwi (&soff))
2451 	{
2452 	  base2 = TREE_OPERAND (base2, 0);
2453 	  *offset2 += soff * BITS_PER_UNIT;
2454 	}
2455       return operand_equal_p (base1, base2, 0);
2456     }
2457   return operand_equal_p (base1, base2, OEP_ADDRESS_OF);
2458 }
2459 
2460 /* Callback for walk_non_aliased_vuses.  Tries to perform a lookup
2461    from the statement defining VUSE and if not successful tries to
2462    translate *REFP and VR_ through an aggregate copy at the definition
2463    of VUSE.  If *DISAMBIGUATE_ONLY is true then do not perform translation
2464    of *REF and *VR.  If only disambiguation was performed then
2465    *DISAMBIGUATE_ONLY is set to true.  */
2466 
2467 static void *
vn_reference_lookup_3(ao_ref * ref,tree vuse,void * data_,translate_flags * disambiguate_only)2468 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
2469 		       translate_flags *disambiguate_only)
2470 {
2471   vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2472   vn_reference_t vr = data->vr;
2473   gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2474   tree base = ao_ref_base (ref);
2475   HOST_WIDE_INT offseti = 0, maxsizei, sizei = 0;
2476   static vec<vn_reference_op_s> lhs_ops;
2477   ao_ref lhs_ref;
2478   bool lhs_ref_ok = false;
2479   poly_int64 copy_size;
2480 
2481   /* First try to disambiguate after value-replacing in the definitions LHS.  */
2482   if (is_gimple_assign (def_stmt))
2483     {
2484       tree lhs = gimple_assign_lhs (def_stmt);
2485       bool valueized_anything = false;
2486       /* Avoid re-allocation overhead.  */
2487       lhs_ops.truncate (0);
2488       basic_block saved_rpo_bb = vn_context_bb;
2489       vn_context_bb = gimple_bb (def_stmt);
2490       if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE)
2491 	{
2492 	  copy_reference_ops_from_ref (lhs, &lhs_ops);
2493 	  lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything, true);
2494 	}
2495       vn_context_bb = saved_rpo_bb;
2496       ao_ref_init (&lhs_ref, lhs);
2497       lhs_ref_ok = true;
2498       if (valueized_anything
2499 	  && ao_ref_init_from_vn_reference
2500 	       (&lhs_ref, ao_ref_alias_set (&lhs_ref),
2501 		ao_ref_base_alias_set (&lhs_ref), TREE_TYPE (lhs), lhs_ops)
2502 	  && !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
2503 	{
2504 	  *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2505 	  return NULL;
2506 	}
2507 
2508       /* Besides valueizing the LHS we can also use access-path based
2509          disambiguation on the original non-valueized ref.  */
2510       if (!ref->ref
2511 	  && lhs_ref_ok
2512 	  && data->orig_ref.ref)
2513 	{
2514 	  /* We want to use the non-valueized LHS for this, but avoid redundant
2515 	     work.  */
2516 	  ao_ref *lref = &lhs_ref;
2517 	  ao_ref lref_alt;
2518 	  if (valueized_anything)
2519 	    {
2520 	      ao_ref_init (&lref_alt, lhs);
2521 	      lref = &lref_alt;
2522 	    }
2523 	  if (!refs_may_alias_p_1 (&data->orig_ref, lref, data->tbaa_p))
2524 	    {
2525 	      *disambiguate_only = (valueized_anything
2526 				    ? TR_VALUEIZE_AND_DISAMBIGUATE
2527 				    : TR_DISAMBIGUATE);
2528 	      return NULL;
2529 	    }
2530 	}
2531 
2532       /* If we reach a clobbering statement try to skip it and see if
2533          we find a VN result with exactly the same value as the
2534 	 possible clobber.  In this case we can ignore the clobber
2535 	 and return the found value.  */
2536       if (is_gimple_reg_type (TREE_TYPE (lhs))
2537 	  && types_compatible_p (TREE_TYPE (lhs), vr->type)
2538 	  && ref->ref)
2539 	{
2540 	  tree *saved_last_vuse_ptr = data->last_vuse_ptr;
2541 	  /* Do not update last_vuse_ptr in vn_reference_lookup_2.  */
2542 	  data->last_vuse_ptr = NULL;
2543 	  tree saved_vuse = vr->vuse;
2544 	  hashval_t saved_hashcode = vr->hashcode;
2545 	  void *res = vn_reference_lookup_2 (ref, gimple_vuse (def_stmt), data);
2546 	  /* Need to restore vr->vuse and vr->hashcode.  */
2547 	  vr->vuse = saved_vuse;
2548 	  vr->hashcode = saved_hashcode;
2549 	  data->last_vuse_ptr = saved_last_vuse_ptr;
2550 	  if (res && res != (void *)-1)
2551 	    {
2552 	      vn_reference_t vnresult = (vn_reference_t) res;
2553 	      tree rhs = gimple_assign_rhs1 (def_stmt);
2554 	      if (TREE_CODE (rhs) == SSA_NAME)
2555 		rhs = SSA_VAL (rhs);
2556 	      if (vnresult->result
2557 		  && operand_equal_p (vnresult->result, rhs, 0)
2558 		  /* We have to honor our promise about union type punning
2559 		     and also support arbitrary overlaps with
2560 		     -fno-strict-aliasing.  So simply resort to alignment to
2561 		     rule out overlaps.  Do this check last because it is
2562 		     quite expensive compared to the hash-lookup above.  */
2563 		  && multiple_p (get_object_alignment (ref->ref), ref->size)
2564 		  && multiple_p (get_object_alignment (lhs), ref->size))
2565 		return res;
2566 	    }
2567 	}
2568     }
2569   else if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE
2570 	   && gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
2571 	   && gimple_call_num_args (def_stmt) <= 4)
2572     {
2573       /* For builtin calls valueize its arguments and call the
2574          alias oracle again.  Valueization may improve points-to
2575 	 info of pointers and constify size and position arguments.
2576 	 Originally this was motivated by PR61034 which has
2577 	 conditional calls to free falsely clobbering ref because
2578 	 of imprecise points-to info of the argument.  */
2579       tree oldargs[4];
2580       bool valueized_anything = false;
2581       for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2582 	{
2583 	  oldargs[i] = gimple_call_arg (def_stmt, i);
2584 	  tree val = vn_valueize (oldargs[i]);
2585 	  if (val != oldargs[i])
2586 	    {
2587 	      gimple_call_set_arg (def_stmt, i, val);
2588 	      valueized_anything = true;
2589 	    }
2590 	}
2591       if (valueized_anything)
2592 	{
2593 	  bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
2594 					       ref);
2595 	  for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2596 	    gimple_call_set_arg (def_stmt, i, oldargs[i]);
2597 	  if (!res)
2598 	    {
2599 	      *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2600 	      return NULL;
2601 	    }
2602 	}
2603     }
2604 
2605   if (*disambiguate_only > TR_TRANSLATE)
2606     return (void *)-1;
2607 
2608   /* If we cannot constrain the size of the reference we cannot
2609      test if anything kills it.  */
2610   if (!ref->max_size_known_p ())
2611     return (void *)-1;
2612 
2613   poly_int64 offset = ref->offset;
2614   poly_int64 maxsize = ref->max_size;
2615 
2616   /* def_stmt may-defs *ref.  See if we can derive a value for *ref
2617      from that definition.
2618      1) Memset.  */
2619   if (is_gimple_reg_type (vr->type)
2620       && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
2621 	  || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET_CHK))
2622       && (integer_zerop (gimple_call_arg (def_stmt, 1))
2623 	  || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
2624 	       || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
2625 	      && CHAR_BIT == 8
2626 	      && BITS_PER_UNIT == 8
2627 	      && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
2628 	      && offset.is_constant (&offseti)
2629 	      && ref->size.is_constant (&sizei)
2630 	      && (offseti % BITS_PER_UNIT == 0
2631 		  || TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST)))
2632       && (poly_int_tree_p (gimple_call_arg (def_stmt, 2))
2633 	  || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
2634 	      && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)))))
2635       && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2636 	  || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
2637     {
2638       tree base2;
2639       poly_int64 offset2, size2, maxsize2;
2640       bool reverse;
2641       tree ref2 = gimple_call_arg (def_stmt, 0);
2642       if (TREE_CODE (ref2) == SSA_NAME)
2643 	{
2644 	  ref2 = SSA_VAL (ref2);
2645 	  if (TREE_CODE (ref2) == SSA_NAME
2646 	      && (TREE_CODE (base) != MEM_REF
2647 		  || TREE_OPERAND (base, 0) != ref2))
2648 	    {
2649 	      gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
2650 	      if (gimple_assign_single_p (def_stmt)
2651 		  && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2652 		ref2 = gimple_assign_rhs1 (def_stmt);
2653 	    }
2654 	}
2655       if (TREE_CODE (ref2) == ADDR_EXPR)
2656 	{
2657 	  ref2 = TREE_OPERAND (ref2, 0);
2658 	  base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
2659 					   &reverse);
2660 	  if (!known_size_p (maxsize2)
2661 	      || !known_eq (maxsize2, size2)
2662 	      || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
2663 	    return (void *)-1;
2664 	}
2665       else if (TREE_CODE (ref2) == SSA_NAME)
2666 	{
2667 	  poly_int64 soff;
2668 	  if (TREE_CODE (base) != MEM_REF
2669 	      || !(mem_ref_offset (base)
2670 		   << LOG2_BITS_PER_UNIT).to_shwi (&soff))
2671 	    return (void *)-1;
2672 	  offset += soff;
2673 	  offset2 = 0;
2674 	  if (TREE_OPERAND (base, 0) != ref2)
2675 	    {
2676 	      gimple *def = SSA_NAME_DEF_STMT (ref2);
2677 	      if (is_gimple_assign (def)
2678 		  && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2679 		  && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
2680 		  && poly_int_tree_p (gimple_assign_rhs2 (def)))
2681 		{
2682 		  tree rhs2 = gimple_assign_rhs2 (def);
2683 		  if (!(poly_offset_int::from (wi::to_poly_wide (rhs2),
2684 					       SIGNED)
2685 			<< LOG2_BITS_PER_UNIT).to_shwi (&offset2))
2686 		    return (void *)-1;
2687 		  ref2 = gimple_assign_rhs1 (def);
2688 		  if (TREE_CODE (ref2) == SSA_NAME)
2689 		    ref2 = SSA_VAL (ref2);
2690 		}
2691 	      else
2692 		return (void *)-1;
2693 	    }
2694 	}
2695       else
2696 	return (void *)-1;
2697       tree len = gimple_call_arg (def_stmt, 2);
2698       HOST_WIDE_INT leni, offset2i;
2699       if (TREE_CODE (len) == SSA_NAME)
2700 	len = SSA_VAL (len);
2701       /* Sometimes the above trickery is smarter than alias analysis.  Take
2702          advantage of that.  */
2703       if (!ranges_maybe_overlap_p (offset, maxsize, offset2,
2704 				   (wi::to_poly_offset (len)
2705 				    << LOG2_BITS_PER_UNIT)))
2706 	return NULL;
2707       if (data->partial_defs.is_empty ()
2708 	  && known_subrange_p (offset, maxsize, offset2,
2709 			       wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
2710 	{
2711 	  tree val;
2712 	  if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2713 	    val = build_zero_cst (vr->type);
2714 	  else if (INTEGRAL_TYPE_P (vr->type)
2715 		   && known_eq (ref->size, 8)
2716 		   && offseti % BITS_PER_UNIT == 0)
2717 	    {
2718 	      gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
2719 				      vr->type, gimple_call_arg (def_stmt, 1));
2720 	      val = vn_nary_build_or_lookup (&res_op);
2721 	      if (!val
2722 		  || (TREE_CODE (val) == SSA_NAME
2723 		      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2724 		return (void *)-1;
2725 	    }
2726 	  else
2727 	    {
2728 	      unsigned buflen = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type)) + 1;
2729 	      if (INTEGRAL_TYPE_P (vr->type))
2730 		buflen = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (vr->type)) + 1;
2731 	      unsigned char *buf = XALLOCAVEC (unsigned char, buflen);
2732 	      memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
2733 		      buflen);
2734 	      if (BYTES_BIG_ENDIAN)
2735 		{
2736 		  unsigned int amnt
2737 		    = (((unsigned HOST_WIDE_INT) offseti + sizei)
2738 		       % BITS_PER_UNIT);
2739 		  if (amnt)
2740 		    {
2741 		      shift_bytes_in_array_right (buf, buflen,
2742 						  BITS_PER_UNIT - amnt);
2743 		      buf++;
2744 		      buflen--;
2745 		    }
2746 		}
2747 	      else if (offseti % BITS_PER_UNIT != 0)
2748 		{
2749 		  unsigned int amnt
2750 		    = BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) offseti
2751 				       % BITS_PER_UNIT);
2752 		  shift_bytes_in_array_left (buf, buflen, amnt);
2753 		  buf++;
2754 		  buflen--;
2755 		}
2756 	      val = native_interpret_expr (vr->type, buf, buflen);
2757 	      if (!val)
2758 		return (void *)-1;
2759 	    }
2760 	  return data->finish (0, 0, val);
2761 	}
2762       /* For now handle clearing memory with partial defs.  */
2763       else if (known_eq (ref->size, maxsize)
2764 	       && integer_zerop (gimple_call_arg (def_stmt, 1))
2765 	       && tree_fits_poly_int64_p (len)
2766 	       && tree_to_poly_int64 (len).is_constant (&leni)
2767 	       && leni <= INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT
2768 	       && offset.is_constant (&offseti)
2769 	       && offset2.is_constant (&offset2i)
2770 	       && maxsize.is_constant (&maxsizei)
2771 	       && ranges_known_overlap_p (offseti, maxsizei, offset2i,
2772 					  leni << LOG2_BITS_PER_UNIT))
2773 	{
2774 	  pd_data pd;
2775 	  pd.rhs = build_constructor (NULL_TREE, NULL);
2776 	  pd.offset = offset2i;
2777 	  pd.size = leni << LOG2_BITS_PER_UNIT;
2778 	  return data->push_partial_def (pd, 0, 0, offseti, maxsizei);
2779 	}
2780     }
2781 
2782   /* 2) Assignment from an empty CONSTRUCTOR.  */
2783   else if (is_gimple_reg_type (vr->type)
2784 	   && gimple_assign_single_p (def_stmt)
2785 	   && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2786 	   && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2787     {
2788       tree base2;
2789       poly_int64 offset2, size2, maxsize2;
2790       HOST_WIDE_INT offset2i, size2i;
2791       gcc_assert (lhs_ref_ok);
2792       base2 = ao_ref_base (&lhs_ref);
2793       offset2 = lhs_ref.offset;
2794       size2 = lhs_ref.size;
2795       maxsize2 = lhs_ref.max_size;
2796       if (known_size_p (maxsize2)
2797 	  && known_eq (maxsize2, size2)
2798 	  && adjust_offsets_for_equal_base_address (base, &offset,
2799 						    base2, &offset2))
2800 	{
2801 	  if (data->partial_defs.is_empty ()
2802 	      && known_subrange_p (offset, maxsize, offset2, size2))
2803 	    {
2804 	      /* While technically undefined behavior do not optimize
2805 	         a full read from a clobber.  */
2806 	      if (gimple_clobber_p (def_stmt))
2807 		return (void *)-1;
2808 	      tree val = build_zero_cst (vr->type);
2809 	      return data->finish (ao_ref_alias_set (&lhs_ref),
2810 				   ao_ref_base_alias_set (&lhs_ref), val);
2811 	    }
2812 	  else if (known_eq (ref->size, maxsize)
2813 		   && maxsize.is_constant (&maxsizei)
2814 		   && offset.is_constant (&offseti)
2815 		   && offset2.is_constant (&offset2i)
2816 		   && size2.is_constant (&size2i)
2817 		   && ranges_known_overlap_p (offseti, maxsizei,
2818 					      offset2i, size2i))
2819 	    {
2820 	      /* Let clobbers be consumed by the partial-def tracker
2821 	         which can choose to ignore them if they are shadowed
2822 		 by a later def.  */
2823 	      pd_data pd;
2824 	      pd.rhs = gimple_assign_rhs1 (def_stmt);
2825 	      pd.offset = offset2i;
2826 	      pd.size = size2i;
2827 	      return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
2828 					     ao_ref_base_alias_set (&lhs_ref),
2829 					     offseti, maxsizei);
2830 	    }
2831 	}
2832     }
2833 
2834   /* 3) Assignment from a constant.  We can use folds native encode/interpret
2835      routines to extract the assigned bits.  */
2836   else if (known_eq (ref->size, maxsize)
2837 	   && is_gimple_reg_type (vr->type)
2838 	   && !contains_storage_order_barrier_p (vr->operands)
2839 	   && gimple_assign_single_p (def_stmt)
2840 	   && CHAR_BIT == 8
2841 	   && BITS_PER_UNIT == 8
2842 	   && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
2843 	   /* native_encode and native_decode operate on arrays of bytes
2844 	      and so fundamentally need a compile-time size and offset.  */
2845 	   && maxsize.is_constant (&maxsizei)
2846 	   && offset.is_constant (&offseti)
2847 	   && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2848 	       || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2849 		   && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2850     {
2851       tree lhs = gimple_assign_lhs (def_stmt);
2852       tree base2;
2853       poly_int64 offset2, size2, maxsize2;
2854       HOST_WIDE_INT offset2i, size2i;
2855       bool reverse;
2856       gcc_assert (lhs_ref_ok);
2857       base2 = ao_ref_base (&lhs_ref);
2858       offset2 = lhs_ref.offset;
2859       size2 = lhs_ref.size;
2860       maxsize2 = lhs_ref.max_size;
2861       reverse = reverse_storage_order_for_component_p (lhs);
2862       if (base2
2863 	  && !reverse
2864 	  && !storage_order_barrier_p (lhs)
2865 	  && known_eq (maxsize2, size2)
2866 	  && adjust_offsets_for_equal_base_address (base, &offset,
2867 						    base2, &offset2)
2868 	  && offset.is_constant (&offseti)
2869 	  && offset2.is_constant (&offset2i)
2870 	  && size2.is_constant (&size2i))
2871 	{
2872 	  if (data->partial_defs.is_empty ()
2873 	      && known_subrange_p (offseti, maxsizei, offset2, size2))
2874 	    {
2875 	      /* We support up to 512-bit values (for V8DFmode).  */
2876 	      unsigned char buffer[65];
2877 	      int len;
2878 
2879 	      tree rhs = gimple_assign_rhs1 (def_stmt);
2880 	      if (TREE_CODE (rhs) == SSA_NAME)
2881 		rhs = SSA_VAL (rhs);
2882 	      len = native_encode_expr (rhs,
2883 					buffer, sizeof (buffer) - 1,
2884 					(offseti - offset2i) / BITS_PER_UNIT);
2885 	      if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2886 		{
2887 		  tree type = vr->type;
2888 		  unsigned char *buf = buffer;
2889 		  unsigned int amnt = 0;
2890 		  /* Make sure to interpret in a type that has a range
2891 		     covering the whole access size.  */
2892 		  if (INTEGRAL_TYPE_P (vr->type)
2893 		      && maxsizei != TYPE_PRECISION (vr->type))
2894 		    type = build_nonstandard_integer_type (maxsizei,
2895 							   TYPE_UNSIGNED (type));
2896 		  if (BYTES_BIG_ENDIAN)
2897 		    {
2898 		      /* For big-endian native_encode_expr stored the rhs
2899 			 such that the LSB of it is the LSB of buffer[len - 1].
2900 			 That bit is stored into memory at position
2901 			 offset2 + size2 - 1, i.e. in byte
2902 			 base + (offset2 + size2 - 1) / BITS_PER_UNIT.
2903 			 E.g. for offset2 1 and size2 14, rhs -1 and memory
2904 			 previously cleared that is:
2905 			 0        1
2906 			 01111111|11111110
2907 			 Now, if we want to extract offset 2 and size 12 from
2908 			 it using native_interpret_expr (which actually works
2909 			 for integral bitfield types in terms of byte size of
2910 			 the mode), the native_encode_expr stored the value
2911 			 into buffer as
2912 			 XX111111|11111111
2913 			 and returned len 2 (the X bits are outside of
2914 			 precision).
2915 			 Let sz be maxsize / BITS_PER_UNIT if not extracting
2916 			 a bitfield, and GET_MODE_SIZE otherwise.
2917 			 We need to align the LSB of the value we want to
2918 			 extract as the LSB of buf[sz - 1].
2919 			 The LSB from memory we need to read is at position
2920 			 offset + maxsize - 1.  */
2921 		      HOST_WIDE_INT sz = maxsizei / BITS_PER_UNIT;
2922 		      if (INTEGRAL_TYPE_P (type))
2923 			sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
2924 		      amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
2925 			      - offseti - maxsizei) % BITS_PER_UNIT;
2926 		      if (amnt)
2927 			shift_bytes_in_array_right (buffer, len, amnt);
2928 		      amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
2929 			      - offseti - maxsizei - amnt) / BITS_PER_UNIT;
2930 		      if ((unsigned HOST_WIDE_INT) sz + amnt > (unsigned) len)
2931 			len = 0;
2932 		      else
2933 			{
2934 			  buf = buffer + len - sz - amnt;
2935 			  len -= (buf - buffer);
2936 			}
2937 		    }
2938 		  else
2939 		    {
2940 		      amnt = ((unsigned HOST_WIDE_INT) offset2i
2941 			      - offseti) % BITS_PER_UNIT;
2942 		      if (amnt)
2943 			{
2944 			  buffer[len] = 0;
2945 			  shift_bytes_in_array_left (buffer, len + 1, amnt);
2946 			  buf = buffer + 1;
2947 			}
2948 		    }
2949 		  tree val = native_interpret_expr (type, buf, len);
2950 		  /* If we chop off bits because the types precision doesn't
2951 		     match the memory access size this is ok when optimizing
2952 		     reads but not when called from the DSE code during
2953 		     elimination.  */
2954 		  if (val
2955 		      && type != vr->type)
2956 		    {
2957 		      if (! int_fits_type_p (val, vr->type))
2958 			val = NULL_TREE;
2959 		      else
2960 			val = fold_convert (vr->type, val);
2961 		    }
2962 
2963 		  if (val)
2964 		    return data->finish (ao_ref_alias_set (&lhs_ref),
2965 					 ao_ref_base_alias_set (&lhs_ref), val);
2966 		}
2967 	    }
2968 	  else if (ranges_known_overlap_p (offseti, maxsizei, offset2i,
2969 					   size2i))
2970 	    {
2971 	      pd_data pd;
2972 	      tree rhs = gimple_assign_rhs1 (def_stmt);
2973 	      if (TREE_CODE (rhs) == SSA_NAME)
2974 		rhs = SSA_VAL (rhs);
2975 	      pd.rhs = rhs;
2976 	      pd.offset = offset2i;
2977 	      pd.size = size2i;
2978 	      return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
2979 					     ao_ref_base_alias_set (&lhs_ref),
2980 					     offseti, maxsizei);
2981 	    }
2982 	}
2983     }
2984 
2985   /* 4) Assignment from an SSA name which definition we may be able
2986      to access pieces from or we can combine to a larger entity.  */
2987   else if (known_eq (ref->size, maxsize)
2988 	   && is_gimple_reg_type (vr->type)
2989 	   && !contains_storage_order_barrier_p (vr->operands)
2990 	   && gimple_assign_single_p (def_stmt)
2991 	   && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2992     {
2993       tree lhs = gimple_assign_lhs (def_stmt);
2994       tree base2;
2995       poly_int64 offset2, size2, maxsize2;
2996       HOST_WIDE_INT offset2i, size2i, offseti;
2997       bool reverse;
2998       gcc_assert (lhs_ref_ok);
2999       base2 = ao_ref_base (&lhs_ref);
3000       offset2 = lhs_ref.offset;
3001       size2 = lhs_ref.size;
3002       maxsize2 = lhs_ref.max_size;
3003       reverse = reverse_storage_order_for_component_p (lhs);
3004       tree def_rhs = gimple_assign_rhs1 (def_stmt);
3005       if (!reverse
3006 	  && !storage_order_barrier_p (lhs)
3007 	  && known_size_p (maxsize2)
3008 	  && known_eq (maxsize2, size2)
3009 	  && adjust_offsets_for_equal_base_address (base, &offset,
3010 						    base2, &offset2))
3011 	{
3012 	  if (data->partial_defs.is_empty ()
3013 	      && known_subrange_p (offset, maxsize, offset2, size2)
3014 	      /* ???  We can't handle bitfield precision extracts without
3015 		 either using an alternate type for the BIT_FIELD_REF and
3016 		 then doing a conversion or possibly adjusting the offset
3017 		 according to endianness.  */
3018 	      && (! INTEGRAL_TYPE_P (vr->type)
3019 		  || known_eq (ref->size, TYPE_PRECISION (vr->type)))
3020 	      && multiple_p (ref->size, BITS_PER_UNIT))
3021 	    {
3022 	      tree val = NULL_TREE;
3023 	      if (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
3024 		  || type_has_mode_precision_p (TREE_TYPE (def_rhs)))
3025 		{
3026 		  gimple_match_op op (gimple_match_cond::UNCOND,
3027 				      BIT_FIELD_REF, vr->type,
3028 				      SSA_VAL (def_rhs),
3029 				      bitsize_int (ref->size),
3030 				      bitsize_int (offset - offset2));
3031 		  val = vn_nary_build_or_lookup (&op);
3032 		}
3033 	      else if (known_eq (ref->size, size2))
3034 		{
3035 		  gimple_match_op op (gimple_match_cond::UNCOND,
3036 				      VIEW_CONVERT_EXPR, vr->type,
3037 				      SSA_VAL (def_rhs));
3038 		  val = vn_nary_build_or_lookup (&op);
3039 		}
3040 	      if (val
3041 		  && (TREE_CODE (val) != SSA_NAME
3042 		      || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
3043 		return data->finish (ao_ref_alias_set (&lhs_ref),
3044 				     ao_ref_base_alias_set (&lhs_ref), val);
3045 	    }
3046 	  else if (maxsize.is_constant (&maxsizei)
3047 		   && offset.is_constant (&offseti)
3048 		   && offset2.is_constant (&offset2i)
3049 		   && size2.is_constant (&size2i)
3050 		   && ranges_known_overlap_p (offset, maxsize, offset2, size2))
3051 	    {
3052 	      pd_data pd;
3053 	      pd.rhs = SSA_VAL (def_rhs);
3054 	      pd.offset = offset2i;
3055 	      pd.size = size2i;
3056 	      return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3057 					     ao_ref_base_alias_set (&lhs_ref),
3058 					     offseti, maxsizei);
3059 	    }
3060 	}
3061     }
3062 
3063   /* 5) For aggregate copies translate the reference through them if
3064      the copy kills ref.  */
3065   else if (data->vn_walk_kind == VN_WALKREWRITE
3066 	   && gimple_assign_single_p (def_stmt)
3067 	   && (DECL_P (gimple_assign_rhs1 (def_stmt))
3068 	       || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
3069 	       || handled_component_p (gimple_assign_rhs1 (def_stmt))))
3070     {
3071       tree base2;
3072       int i, j, k;
3073       auto_vec<vn_reference_op_s> rhs;
3074       vn_reference_op_t vro;
3075       ao_ref r;
3076 
3077       gcc_assert (lhs_ref_ok);
3078 
3079       /* See if the assignment kills REF.  */
3080       base2 = ao_ref_base (&lhs_ref);
3081       if (!lhs_ref.max_size_known_p ()
3082 	  || (base != base2
3083 	      && (TREE_CODE (base) != MEM_REF
3084 		  || TREE_CODE (base2) != MEM_REF
3085 		  || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
3086 		  || !tree_int_cst_equal (TREE_OPERAND (base, 1),
3087 					  TREE_OPERAND (base2, 1))))
3088 	  || !stmt_kills_ref_p (def_stmt, ref))
3089 	return (void *)-1;
3090 
3091       /* Find the common base of ref and the lhs.  lhs_ops already
3092          contains valueized operands for the lhs.  */
3093       i = vr->operands.length () - 1;
3094       j = lhs_ops.length () - 1;
3095       while (j >= 0 && i >= 0
3096 	     && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
3097 	{
3098 	  i--;
3099 	  j--;
3100 	}
3101 
3102       /* ???  The innermost op should always be a MEM_REF and we already
3103          checked that the assignment to the lhs kills vr.  Thus for
3104 	 aggregate copies using char[] types the vn_reference_op_eq
3105 	 may fail when comparing types for compatibility.  But we really
3106 	 don't care here - further lookups with the rewritten operands
3107 	 will simply fail if we messed up types too badly.  */
3108       poly_int64 extra_off = 0;
3109       if (j == 0 && i >= 0
3110 	  && lhs_ops[0].opcode == MEM_REF
3111 	  && maybe_ne (lhs_ops[0].off, -1))
3112 	{
3113 	  if (known_eq (lhs_ops[0].off, vr->operands[i].off))
3114 	    i--, j--;
3115 	  else if (vr->operands[i].opcode == MEM_REF
3116 		   && maybe_ne (vr->operands[i].off, -1))
3117 	    {
3118 	      extra_off = vr->operands[i].off - lhs_ops[0].off;
3119 	      i--, j--;
3120 	    }
3121 	}
3122 
3123       /* i now points to the first additional op.
3124 	 ???  LHS may not be completely contained in VR, one or more
3125 	 VIEW_CONVERT_EXPRs could be in its way.  We could at least
3126 	 try handling outermost VIEW_CONVERT_EXPRs.  */
3127       if (j != -1)
3128 	return (void *)-1;
3129 
3130       /* Punt if the additional ops contain a storage order barrier.  */
3131       for (k = i; k >= 0; k--)
3132 	{
3133 	  vro = &vr->operands[k];
3134 	  if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
3135 	    return (void *)-1;
3136 	}
3137 
3138       /* Now re-write REF to be based on the rhs of the assignment.  */
3139       tree rhs1 = gimple_assign_rhs1 (def_stmt);
3140       copy_reference_ops_from_ref (rhs1, &rhs);
3141 
3142       /* Apply an extra offset to the inner MEM_REF of the RHS.  */
3143       if (maybe_ne (extra_off, 0))
3144 	{
3145 	  if (rhs.length () < 2)
3146 	    return (void *)-1;
3147 	  int ix = rhs.length () - 2;
3148 	  if (rhs[ix].opcode != MEM_REF
3149 	      || known_eq (rhs[ix].off, -1))
3150 	    return (void *)-1;
3151 	  rhs[ix].off += extra_off;
3152 	  rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
3153 					 build_int_cst (TREE_TYPE (rhs[ix].op0),
3154 							extra_off));
3155 	}
3156 
3157       /* Save the operands since we need to use the original ones for
3158 	 the hash entry we use.  */
3159       if (!data->saved_operands.exists ())
3160 	data->saved_operands = vr->operands.copy ();
3161 
3162       /* We need to pre-pend vr->operands[0..i] to rhs.  */
3163       vec<vn_reference_op_s> old = vr->operands;
3164       if (i + 1 + rhs.length () > vr->operands.length ())
3165 	vr->operands.safe_grow (i + 1 + rhs.length ());
3166       else
3167 	vr->operands.truncate (i + 1 + rhs.length ());
3168       FOR_EACH_VEC_ELT (rhs, j, vro)
3169 	vr->operands[i + 1 + j] = *vro;
3170       vr->operands = valueize_refs (vr->operands);
3171       if (old == shared_lookup_references)
3172 	shared_lookup_references = vr->operands;
3173       vr->hashcode = vn_reference_compute_hash (vr);
3174 
3175       /* Try folding the new reference to a constant.  */
3176       tree val = fully_constant_vn_reference_p (vr);
3177       if (val)
3178 	{
3179 	  if (data->partial_defs.is_empty ())
3180 	    return data->finish (ao_ref_alias_set (&lhs_ref),
3181 				 ao_ref_base_alias_set (&lhs_ref), val);
3182 	  /* This is the only interesting case for partial-def handling
3183 	     coming from targets that like to gimplify init-ctors as
3184 	     aggregate copies from constant data like aarch64 for
3185 	     PR83518.  */
3186 	  if (maxsize.is_constant (&maxsizei) && known_eq (ref->size, maxsize))
3187 	    {
3188 	      pd_data pd;
3189 	      pd.rhs = val;
3190 	      pd.offset = 0;
3191 	      pd.size = maxsizei;
3192 	      return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3193 					     ao_ref_base_alias_set (&lhs_ref),
3194 					     0, maxsizei);
3195 	    }
3196 	}
3197 
3198       /* Continuing with partial defs isn't easily possible here, we
3199          have to find a full def from further lookups from here.  Probably
3200 	 not worth the special-casing everywhere.  */
3201       if (!data->partial_defs.is_empty ())
3202 	return (void *)-1;
3203 
3204       /* Adjust *ref from the new operands.  */
3205       ao_ref rhs1_ref;
3206       ao_ref_init (&rhs1_ref, rhs1);
3207       if (!ao_ref_init_from_vn_reference (&r, ao_ref_alias_set (&rhs1_ref),
3208 					  ao_ref_base_alias_set (&rhs1_ref),
3209 					  vr->type, vr->operands))
3210 	return (void *)-1;
3211       /* This can happen with bitfields.  */
3212       if (maybe_ne (ref->size, r.size))
3213 	{
3214 	  /* If the access lacks some subsetting simply apply that by
3215 	     shortening it.  That in the end can only be successful
3216 	     if we can pun the lookup result which in turn requires
3217 	     exact offsets.  */
3218 	  if (known_eq (r.size, r.max_size)
3219 	      && known_lt (ref->size, r.size))
3220 	    r.size = r.max_size = ref->size;
3221 	  else
3222 	    return (void *)-1;
3223 	}
3224       *ref = r;
3225 
3226       /* Do not update last seen VUSE after translating.  */
3227       data->last_vuse_ptr = NULL;
3228       /* Invalidate the original access path since it now contains
3229          the wrong base.  */
3230       data->orig_ref.ref = NULL_TREE;
3231       /* Use the alias-set of this LHS for recording an eventual result.  */
3232       if (data->first_set == -2)
3233 	{
3234 	  data->first_set = ao_ref_alias_set (&lhs_ref);
3235 	  data->first_base_set = ao_ref_base_alias_set (&lhs_ref);
3236 	}
3237 
3238       /* Keep looking for the adjusted *REF / VR pair.  */
3239       return NULL;
3240     }
3241 
3242   /* 6) For memcpy copies translate the reference through them if
3243      the copy kills ref.  */
3244   else if (data->vn_walk_kind == VN_WALKREWRITE
3245 	   && is_gimple_reg_type (vr->type)
3246 	   /* ???  Handle BCOPY as well.  */
3247 	   && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
3248 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY_CHK)
3249 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
3250 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY_CHK)
3251 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE)
3252 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE_CHK))
3253 	   && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
3254 	       || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
3255 	   && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
3256 	       || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
3257 	   && (poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size)
3258 	       || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
3259 		   && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)),
3260 				       &copy_size)))
3261 	   /* Handling this is more complicated, give up for now.  */
3262 	   && data->partial_defs.is_empty ())
3263     {
3264       tree lhs, rhs;
3265       ao_ref r;
3266       poly_int64 rhs_offset, lhs_offset;
3267       vn_reference_op_s op;
3268       poly_uint64 mem_offset;
3269       poly_int64 at, byte_maxsize;
3270 
3271       /* Only handle non-variable, addressable refs.  */
3272       if (maybe_ne (ref->size, maxsize)
3273 	  || !multiple_p (offset, BITS_PER_UNIT, &at)
3274 	  || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
3275 	return (void *)-1;
3276 
3277       /* Extract a pointer base and an offset for the destination.  */
3278       lhs = gimple_call_arg (def_stmt, 0);
3279       lhs_offset = 0;
3280       if (TREE_CODE (lhs) == SSA_NAME)
3281 	{
3282 	  lhs = vn_valueize (lhs);
3283 	  if (TREE_CODE (lhs) == SSA_NAME)
3284 	    {
3285 	      gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
3286 	      if (gimple_assign_single_p (def_stmt)
3287 		  && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
3288 		lhs = gimple_assign_rhs1 (def_stmt);
3289 	    }
3290 	}
3291       if (TREE_CODE (lhs) == ADDR_EXPR)
3292 	{
3293 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
3294 						    &lhs_offset);
3295 	  if (!tem)
3296 	    return (void *)-1;
3297 	  if (TREE_CODE (tem) == MEM_REF
3298 	      && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
3299 	    {
3300 	      lhs = TREE_OPERAND (tem, 0);
3301 	      if (TREE_CODE (lhs) == SSA_NAME)
3302 		lhs = vn_valueize (lhs);
3303 	      lhs_offset += mem_offset;
3304 	    }
3305 	  else if (DECL_P (tem))
3306 	    lhs = build_fold_addr_expr (tem);
3307 	  else
3308 	    return (void *)-1;
3309 	}
3310       if (TREE_CODE (lhs) != SSA_NAME
3311 	  && TREE_CODE (lhs) != ADDR_EXPR)
3312 	return (void *)-1;
3313 
3314       /* Extract a pointer base and an offset for the source.  */
3315       rhs = gimple_call_arg (def_stmt, 1);
3316       rhs_offset = 0;
3317       if (TREE_CODE (rhs) == SSA_NAME)
3318 	rhs = vn_valueize (rhs);
3319       if (TREE_CODE (rhs) == ADDR_EXPR)
3320 	{
3321 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
3322 						    &rhs_offset);
3323 	  if (!tem)
3324 	    return (void *)-1;
3325 	  if (TREE_CODE (tem) == MEM_REF
3326 	      && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
3327 	    {
3328 	      rhs = TREE_OPERAND (tem, 0);
3329 	      rhs_offset += mem_offset;
3330 	    }
3331 	  else if (DECL_P (tem)
3332 		   || TREE_CODE (tem) == STRING_CST)
3333 	    rhs = build_fold_addr_expr (tem);
3334 	  else
3335 	    return (void *)-1;
3336 	}
3337       if (TREE_CODE (rhs) == SSA_NAME)
3338 	rhs = SSA_VAL (rhs);
3339       else if (TREE_CODE (rhs) != ADDR_EXPR)
3340 	return (void *)-1;
3341 
3342       /* The bases of the destination and the references have to agree.  */
3343       if (TREE_CODE (base) == MEM_REF)
3344 	{
3345 	  if (TREE_OPERAND (base, 0) != lhs
3346 	      || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
3347 	    return (void *) -1;
3348 	  at += mem_offset;
3349 	}
3350       else if (!DECL_P (base)
3351 	       || TREE_CODE (lhs) != ADDR_EXPR
3352 	       || TREE_OPERAND (lhs, 0) != base)
3353 	return (void *)-1;
3354 
3355       /* If the access is completely outside of the memcpy destination
3356 	 area there is no aliasing.  */
3357       if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
3358 	return NULL;
3359       /* And the access has to be contained within the memcpy destination.  */
3360       if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
3361 	return (void *)-1;
3362 
3363       /* Save the operands since we need to use the original ones for
3364 	 the hash entry we use.  */
3365       if (!data->saved_operands.exists ())
3366 	data->saved_operands = vr->operands.copy ();
3367 
3368       /* Make room for 2 operands in the new reference.  */
3369       if (vr->operands.length () < 2)
3370 	{
3371 	  vec<vn_reference_op_s> old = vr->operands;
3372 	  vr->operands.safe_grow_cleared (2);
3373 	  if (old == shared_lookup_references)
3374 	    shared_lookup_references = vr->operands;
3375 	}
3376       else
3377 	vr->operands.truncate (2);
3378 
3379       /* The looked-through reference is a simple MEM_REF.  */
3380       memset (&op, 0, sizeof (op));
3381       op.type = vr->type;
3382       op.opcode = MEM_REF;
3383       op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
3384       op.off = at - lhs_offset + rhs_offset;
3385       vr->operands[0] = op;
3386       op.type = TREE_TYPE (rhs);
3387       op.opcode = TREE_CODE (rhs);
3388       op.op0 = rhs;
3389       op.off = -1;
3390       vr->operands[1] = op;
3391       vr->hashcode = vn_reference_compute_hash (vr);
3392 
3393       /* Try folding the new reference to a constant.  */
3394       tree val = fully_constant_vn_reference_p (vr);
3395       if (val)
3396 	return data->finish (0, 0, val);
3397 
3398       /* Adjust *ref from the new operands.  */
3399       if (!ao_ref_init_from_vn_reference (&r, 0, 0, vr->type, vr->operands))
3400 	return (void *)-1;
3401       /* This can happen with bitfields.  */
3402       if (maybe_ne (ref->size, r.size))
3403 	return (void *)-1;
3404       *ref = r;
3405 
3406       /* Do not update last seen VUSE after translating.  */
3407       data->last_vuse_ptr = NULL;
3408       /* Invalidate the original access path since it now contains
3409          the wrong base.  */
3410       data->orig_ref.ref = NULL_TREE;
3411       /* Use the alias-set of this stmt for recording an eventual result.  */
3412       if (data->first_set == -2)
3413 	{
3414 	  data->first_set = 0;
3415 	  data->first_base_set = 0;
3416 	}
3417 
3418       /* Keep looking for the adjusted *REF / VR pair.  */
3419       return NULL;
3420     }
3421 
3422   /* Bail out and stop walking.  */
3423   return (void *)-1;
3424 }
3425 
3426 /* Return a reference op vector from OP that can be used for
3427    vn_reference_lookup_pieces.  The caller is responsible for releasing
3428    the vector.  */
3429 
3430 vec<vn_reference_op_s>
vn_reference_operands_for_lookup(tree op)3431 vn_reference_operands_for_lookup (tree op)
3432 {
3433   bool valueized;
3434   return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
3435 }
3436 
3437 /* Lookup a reference operation by it's parts, in the current hash table.
3438    Returns the resulting value number if it exists in the hash table,
3439    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
3440    vn_reference_t stored in the hashtable if something is found.  */
3441 
3442 tree
vn_reference_lookup_pieces(tree vuse,alias_set_type set,alias_set_type base_set,tree type,vec<vn_reference_op_s> operands,vn_reference_t * vnresult,vn_lookup_kind kind)3443 vn_reference_lookup_pieces (tree vuse, alias_set_type set,
3444 			    alias_set_type base_set, tree type,
3445 			    vec<vn_reference_op_s> operands,
3446 			    vn_reference_t *vnresult, vn_lookup_kind kind)
3447 {
3448   struct vn_reference_s vr1;
3449   vn_reference_t tmp;
3450   tree cst;
3451 
3452   if (!vnresult)
3453     vnresult = &tmp;
3454   *vnresult = NULL;
3455 
3456   vr1.vuse = vuse_ssa_val (vuse);
3457   shared_lookup_references.truncate (0);
3458   shared_lookup_references.safe_grow (operands.length ());
3459   memcpy (shared_lookup_references.address (),
3460 	  operands.address (),
3461 	  sizeof (vn_reference_op_s)
3462 	  * operands.length ());
3463   vr1.operands = operands = shared_lookup_references
3464     = valueize_refs (shared_lookup_references);
3465   vr1.type = type;
3466   vr1.set = set;
3467   vr1.base_set = base_set;
3468   vr1.hashcode = vn_reference_compute_hash (&vr1);
3469   if ((cst = fully_constant_vn_reference_p (&vr1)))
3470     return cst;
3471 
3472   vn_reference_lookup_1 (&vr1, vnresult);
3473   if (!*vnresult
3474       && kind != VN_NOWALK
3475       && vr1.vuse)
3476     {
3477       ao_ref r;
3478       unsigned limit = param_sccvn_max_alias_queries_per_access;
3479       vn_walk_cb_data data (&vr1, NULL_TREE, NULL, kind, true, NULL_TREE);
3480       if (ao_ref_init_from_vn_reference (&r, set, base_set, type,
3481 					 vr1.operands))
3482 	*vnresult
3483 	  = ((vn_reference_t)
3484 	     walk_non_aliased_vuses (&r, vr1.vuse, true, vn_reference_lookup_2,
3485 				     vn_reference_lookup_3, vuse_valueize,
3486 				     limit, &data));
3487       gcc_checking_assert (vr1.operands == shared_lookup_references);
3488     }
3489 
3490   if (*vnresult)
3491      return (*vnresult)->result;
3492 
3493   return NULL_TREE;
3494 }
3495 
3496 /* Lookup OP in the current hash table, and return the resulting value
3497    number if it exists in the hash table.  Return NULL_TREE if it does
3498    not exist in the hash table or if the result field of the structure
3499    was NULL..  VNRESULT will be filled in with the vn_reference_t
3500    stored in the hashtable if one exists.  When TBAA_P is false assume
3501    we are looking up a store and treat it as having alias-set zero.
3502    *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded.
3503    MASK is either NULL_TREE, or can be an INTEGER_CST if the result of the
3504    load is bitwise anded with MASK and so we are only interested in a subset
3505    of the bits and can ignore if the other bits are uninitialized or
3506    not initialized with constants.  */
3507 
3508 tree
vn_reference_lookup(tree op,tree vuse,vn_lookup_kind kind,vn_reference_t * vnresult,bool tbaa_p,tree * last_vuse_ptr,tree mask)3509 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
3510 		     vn_reference_t *vnresult, bool tbaa_p,
3511 		     tree *last_vuse_ptr, tree mask)
3512 {
3513   vec<vn_reference_op_s> operands;
3514   struct vn_reference_s vr1;
3515   bool valuezied_anything;
3516 
3517   if (vnresult)
3518     *vnresult = NULL;
3519 
3520   vr1.vuse = vuse_ssa_val (vuse);
3521   vr1.operands = operands
3522     = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
3523   vr1.type = TREE_TYPE (op);
3524   ao_ref op_ref;
3525   ao_ref_init (&op_ref, op);
3526   vr1.set = ao_ref_alias_set (&op_ref);
3527   vr1.base_set = ao_ref_base_alias_set (&op_ref);
3528   vr1.hashcode = vn_reference_compute_hash (&vr1);
3529   if (mask == NULL_TREE)
3530     if (tree cst = fully_constant_vn_reference_p (&vr1))
3531       return cst;
3532 
3533   if (kind != VN_NOWALK && vr1.vuse)
3534     {
3535       vn_reference_t wvnresult;
3536       ao_ref r;
3537       unsigned limit = param_sccvn_max_alias_queries_per_access;
3538       /* Make sure to use a valueized reference if we valueized anything.
3539          Otherwise preserve the full reference for advanced TBAA.  */
3540       if (!valuezied_anything
3541 	  || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.base_set,
3542 					     vr1.type, vr1.operands))
3543 	ao_ref_init (&r, op);
3544       vn_walk_cb_data data (&vr1, r.ref ? NULL_TREE : op,
3545 			    last_vuse_ptr, kind, tbaa_p, mask);
3546 
3547       wvnresult
3548 	= ((vn_reference_t)
3549 	   walk_non_aliased_vuses (&r, vr1.vuse, tbaa_p, vn_reference_lookup_2,
3550 				   vn_reference_lookup_3, vuse_valueize, limit,
3551 				   &data));
3552       gcc_checking_assert (vr1.operands == shared_lookup_references);
3553       if (wvnresult)
3554 	{
3555 	  gcc_assert (mask == NULL_TREE);
3556 	  if (vnresult)
3557 	    *vnresult = wvnresult;
3558 	  return wvnresult->result;
3559 	}
3560       else if (mask)
3561 	return data.masked_result;
3562 
3563       return NULL_TREE;
3564     }
3565 
3566   if (last_vuse_ptr)
3567     *last_vuse_ptr = vr1.vuse;
3568   if (mask)
3569     return NULL_TREE;
3570   return vn_reference_lookup_1 (&vr1, vnresult);
3571 }
3572 
3573 /* Lookup CALL in the current hash table and return the entry in
3574    *VNRESULT if found.  Populates *VR for the hashtable lookup.  */
3575 
3576 void
vn_reference_lookup_call(gcall * call,vn_reference_t * vnresult,vn_reference_t vr)3577 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
3578 			  vn_reference_t vr)
3579 {
3580   if (vnresult)
3581     *vnresult = NULL;
3582 
3583   tree vuse = gimple_vuse (call);
3584 
3585   vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
3586   vr->operands = valueize_shared_reference_ops_from_call (call);
3587   vr->type = gimple_expr_type (call);
3588   vr->punned = false;
3589   vr->set = 0;
3590   vr->base_set = 0;
3591   vr->hashcode = vn_reference_compute_hash (vr);
3592   vn_reference_lookup_1 (vr, vnresult);
3593 }
3594 
3595 /* Insert OP into the current hash table with a value number of RESULT.  */
3596 
3597 static void
vn_reference_insert(tree op,tree result,tree vuse,tree vdef)3598 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
3599 {
3600   vn_reference_s **slot;
3601   vn_reference_t vr1;
3602   bool tem;
3603 
3604   vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3605   if (TREE_CODE (result) == SSA_NAME)
3606     vr1->value_id = VN_INFO (result)->value_id;
3607   else
3608     vr1->value_id = get_or_alloc_constant_value_id (result);
3609   vr1->vuse = vuse_ssa_val (vuse);
3610   vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
3611   vr1->type = TREE_TYPE (op);
3612   vr1->punned = false;
3613   ao_ref op_ref;
3614   ao_ref_init (&op_ref, op);
3615   vr1->set = ao_ref_alias_set (&op_ref);
3616   vr1->base_set = ao_ref_base_alias_set (&op_ref);
3617   vr1->hashcode = vn_reference_compute_hash (vr1);
3618   vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
3619   vr1->result_vdef = vdef;
3620 
3621   slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3622 						      INSERT);
3623 
3624   /* Because IL walking on reference lookup can end up visiting
3625      a def that is only to be visited later in iteration order
3626      when we are about to make an irreducible region reducible
3627      the def can be effectively processed and its ref being inserted
3628      by vn_reference_lookup_3 already.  So we cannot assert (!*slot)
3629      but save a lookup if we deal with already inserted refs here.  */
3630   if (*slot)
3631     {
3632       /* We cannot assert that we have the same value either because
3633          when disentangling an irreducible region we may end up visiting
3634 	 a use before the corresponding def.  That's a missed optimization
3635 	 only though.  See gcc.dg/tree-ssa/pr87126.c for example.  */
3636       if (dump_file && (dump_flags & TDF_DETAILS)
3637 	  && !operand_equal_p ((*slot)->result, vr1->result, 0))
3638 	{
3639 	  fprintf (dump_file, "Keeping old value ");
3640 	  print_generic_expr (dump_file, (*slot)->result);
3641 	  fprintf (dump_file, " because of collision\n");
3642 	}
3643       free_reference (vr1);
3644       obstack_free (&vn_tables_obstack, vr1);
3645       return;
3646     }
3647 
3648   *slot = vr1;
3649   vr1->next = last_inserted_ref;
3650   last_inserted_ref = vr1;
3651 }
3652 
3653 /* Insert a reference by it's pieces into the current hash table with
3654    a value number of RESULT.  Return the resulting reference
3655    structure we created.  */
3656 
3657 vn_reference_t
vn_reference_insert_pieces(tree vuse,alias_set_type set,alias_set_type base_set,tree type,vec<vn_reference_op_s> operands,tree result,unsigned int value_id)3658 vn_reference_insert_pieces (tree vuse, alias_set_type set,
3659 			    alias_set_type base_set, tree type,
3660 			    vec<vn_reference_op_s> operands,
3661 			    tree result, unsigned int value_id)
3662 
3663 {
3664   vn_reference_s **slot;
3665   vn_reference_t vr1;
3666 
3667   vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3668   vr1->value_id = value_id;
3669   vr1->vuse = vuse_ssa_val (vuse);
3670   vr1->operands = valueize_refs (operands);
3671   vr1->type = type;
3672   vr1->punned = false;
3673   vr1->set = set;
3674   vr1->base_set = base_set;
3675   vr1->hashcode = vn_reference_compute_hash (vr1);
3676   if (result && TREE_CODE (result) == SSA_NAME)
3677     result = SSA_VAL (result);
3678   vr1->result = result;
3679 
3680   slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3681 						      INSERT);
3682 
3683   /* At this point we should have all the things inserted that we have
3684      seen before, and we should never try inserting something that
3685      already exists.  */
3686   gcc_assert (!*slot);
3687 
3688   *slot = vr1;
3689   vr1->next = last_inserted_ref;
3690   last_inserted_ref = vr1;
3691   return vr1;
3692 }
3693 
3694 /* Compute and return the hash value for nary operation VBO1.  */
3695 
3696 static hashval_t
vn_nary_op_compute_hash(const vn_nary_op_t vno1)3697 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
3698 {
3699   inchash::hash hstate;
3700   unsigned i;
3701 
3702   for (i = 0; i < vno1->length; ++i)
3703     if (TREE_CODE (vno1->op[i]) == SSA_NAME)
3704       vno1->op[i] = SSA_VAL (vno1->op[i]);
3705 
3706   if (((vno1->length == 2
3707 	&& commutative_tree_code (vno1->opcode))
3708        || (vno1->length == 3
3709 	   && commutative_ternary_tree_code (vno1->opcode)))
3710       && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3711     std::swap (vno1->op[0], vno1->op[1]);
3712   else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
3713 	   && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3714     {
3715       std::swap (vno1->op[0], vno1->op[1]);
3716       vno1->opcode = swap_tree_comparison  (vno1->opcode);
3717     }
3718 
3719   hstate.add_int (vno1->opcode);
3720   for (i = 0; i < vno1->length; ++i)
3721     inchash::add_expr (vno1->op[i], hstate);
3722 
3723   return hstate.end ();
3724 }
3725 
3726 /* Compare nary operations VNO1 and VNO2 and return true if they are
3727    equivalent.  */
3728 
3729 bool
vn_nary_op_eq(const_vn_nary_op_t const vno1,const_vn_nary_op_t const vno2)3730 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
3731 {
3732   unsigned i;
3733 
3734   if (vno1->hashcode != vno2->hashcode)
3735     return false;
3736 
3737   if (vno1->length != vno2->length)
3738     return false;
3739 
3740   if (vno1->opcode != vno2->opcode
3741       || !types_compatible_p (vno1->type, vno2->type))
3742     return false;
3743 
3744   for (i = 0; i < vno1->length; ++i)
3745     if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
3746       return false;
3747 
3748   /* BIT_INSERT_EXPR has an implict operand as the type precision
3749      of op1.  Need to check to make sure they are the same.  */
3750   if (vno1->opcode == BIT_INSERT_EXPR
3751       && TREE_CODE (vno1->op[1]) == INTEGER_CST
3752       && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
3753 	 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
3754     return false;
3755 
3756   return true;
3757 }
3758 
3759 /* Initialize VNO from the pieces provided.  */
3760 
3761 static void
init_vn_nary_op_from_pieces(vn_nary_op_t vno,unsigned int length,enum tree_code code,tree type,tree * ops)3762 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
3763 			     enum tree_code code, tree type, tree *ops)
3764 {
3765   vno->opcode = code;
3766   vno->length = length;
3767   vno->type = type;
3768   memcpy (&vno->op[0], ops, sizeof (tree) * length);
3769 }
3770 
3771 /* Return the number of operands for a vn_nary ops structure from STMT.  */
3772 
3773 static unsigned int
vn_nary_length_from_stmt(gimple * stmt)3774 vn_nary_length_from_stmt (gimple *stmt)
3775 {
3776   switch (gimple_assign_rhs_code (stmt))
3777     {
3778     case REALPART_EXPR:
3779     case IMAGPART_EXPR:
3780     case VIEW_CONVERT_EXPR:
3781       return 1;
3782 
3783     case BIT_FIELD_REF:
3784       return 3;
3785 
3786     case CONSTRUCTOR:
3787       return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3788 
3789     default:
3790       return gimple_num_ops (stmt) - 1;
3791     }
3792 }
3793 
3794 /* Initialize VNO from STMT.  */
3795 
3796 static void
init_vn_nary_op_from_stmt(vn_nary_op_t vno,gimple * stmt)3797 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
3798 {
3799   unsigned i;
3800 
3801   vno->opcode = gimple_assign_rhs_code (stmt);
3802   vno->type = gimple_expr_type (stmt);
3803   switch (vno->opcode)
3804     {
3805     case REALPART_EXPR:
3806     case IMAGPART_EXPR:
3807     case VIEW_CONVERT_EXPR:
3808       vno->length = 1;
3809       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3810       break;
3811 
3812     case BIT_FIELD_REF:
3813       vno->length = 3;
3814       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3815       vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
3816       vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
3817       break;
3818 
3819     case CONSTRUCTOR:
3820       vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3821       for (i = 0; i < vno->length; ++i)
3822 	vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
3823       break;
3824 
3825     default:
3826       gcc_checking_assert (!gimple_assign_single_p (stmt));
3827       vno->length = gimple_num_ops (stmt) - 1;
3828       for (i = 0; i < vno->length; ++i)
3829 	vno->op[i] = gimple_op (stmt, i + 1);
3830     }
3831 }
3832 
3833 /* Compute the hashcode for VNO and look for it in the hash table;
3834    return the resulting value number if it exists in the hash table.
3835    Return NULL_TREE if it does not exist in the hash table or if the
3836    result field of the operation is NULL.  VNRESULT will contain the
3837    vn_nary_op_t from the hashtable if it exists.  */
3838 
3839 static tree
vn_nary_op_lookup_1(vn_nary_op_t vno,vn_nary_op_t * vnresult)3840 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
3841 {
3842   vn_nary_op_s **slot;
3843 
3844   if (vnresult)
3845     *vnresult = NULL;
3846 
3847   vno->hashcode = vn_nary_op_compute_hash (vno);
3848   slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
3849   if (!slot)
3850     return NULL_TREE;
3851   if (vnresult)
3852     *vnresult = *slot;
3853   return (*slot)->predicated_values ? NULL_TREE : (*slot)->u.result;
3854 }
3855 
3856 /* Lookup a n-ary operation by its pieces and return the resulting value
3857    number if it exists in the hash table.  Return NULL_TREE if it does
3858    not exist in the hash table or if the result field of the operation
3859    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
3860    if it exists.  */
3861 
3862 tree
vn_nary_op_lookup_pieces(unsigned int length,enum tree_code code,tree type,tree * ops,vn_nary_op_t * vnresult)3863 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
3864 			  tree type, tree *ops, vn_nary_op_t *vnresult)
3865 {
3866   vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
3867 				  sizeof_vn_nary_op (length));
3868   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3869   return vn_nary_op_lookup_1 (vno1, vnresult);
3870 }
3871 
3872 /* Lookup the rhs of STMT in the current hash table, and return the resulting
3873    value number if it exists in the hash table.  Return NULL_TREE if
3874    it does not exist in the hash table.  VNRESULT will contain the
3875    vn_nary_op_t from the hashtable if it exists.  */
3876 
3877 tree
vn_nary_op_lookup_stmt(gimple * stmt,vn_nary_op_t * vnresult)3878 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
3879 {
3880   vn_nary_op_t vno1
3881     = XALLOCAVAR (struct vn_nary_op_s,
3882 		  sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
3883   init_vn_nary_op_from_stmt (vno1, stmt);
3884   return vn_nary_op_lookup_1 (vno1, vnresult);
3885 }
3886 
3887 /* Allocate a vn_nary_op_t with LENGTH operands on STACK.  */
3888 
3889 static vn_nary_op_t
alloc_vn_nary_op_noinit(unsigned int length,struct obstack * stack)3890 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
3891 {
3892   return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
3893 }
3894 
3895 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
3896    obstack.  */
3897 
3898 static vn_nary_op_t
alloc_vn_nary_op(unsigned int length,tree result,unsigned int value_id)3899 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
3900 {
3901   vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
3902 
3903   vno1->value_id = value_id;
3904   vno1->length = length;
3905   vno1->predicated_values = 0;
3906   vno1->u.result = result;
3907 
3908   return vno1;
3909 }
3910 
3911 /* Insert VNO into TABLE.  If COMPUTE_HASH is true, then compute
3912    VNO->HASHCODE first.  */
3913 
3914 static vn_nary_op_t
vn_nary_op_insert_into(vn_nary_op_t vno,vn_nary_op_table_type * table,bool compute_hash)3915 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
3916 			bool compute_hash)
3917 {
3918   vn_nary_op_s **slot;
3919 
3920   if (compute_hash)
3921     {
3922       vno->hashcode = vn_nary_op_compute_hash (vno);
3923       gcc_assert (! vno->predicated_values
3924 		  || (! vno->u.values->next
3925 		      && vno->u.values->n == 1));
3926     }
3927 
3928   slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
3929   vno->unwind_to = *slot;
3930   if (*slot)
3931     {
3932       /* Prefer non-predicated values.
3933          ???  Only if those are constant, otherwise, with constant predicated
3934 	 value, turn them into predicated values with entry-block validity
3935 	 (???  but we always find the first valid result currently).  */
3936       if ((*slot)->predicated_values
3937 	  && ! vno->predicated_values)
3938 	{
3939 	  /* ???  We cannot remove *slot from the unwind stack list.
3940 	     For the moment we deal with this by skipping not found
3941 	     entries but this isn't ideal ...  */
3942 	  *slot = vno;
3943 	  /* ???  Maintain a stack of states we can unwind in
3944 	     vn_nary_op_s?  But how far do we unwind?  In reality
3945 	     we need to push change records somewhere...  Or not
3946 	     unwind vn_nary_op_s and linking them but instead
3947 	     unwind the results "list", linking that, which also
3948 	     doesn't move on hashtable resize.  */
3949 	  /* We can also have a ->unwind_to recording *slot there.
3950 	     That way we can make u.values a fixed size array with
3951 	     recording the number of entries but of course we then
3952 	     have always N copies for each unwind_to-state.  Or we
3953              make sure to only ever append and each unwinding will
3954 	     pop off one entry (but how to deal with predicated
3955 	     replaced with non-predicated here?)  */
3956 	  vno->next = last_inserted_nary;
3957 	  last_inserted_nary = vno;
3958 	  return vno;
3959 	}
3960       else if (vno->predicated_values
3961 	       && ! (*slot)->predicated_values)
3962 	return *slot;
3963       else if (vno->predicated_values
3964 	       && (*slot)->predicated_values)
3965 	{
3966 	  /* ???  Factor this all into a insert_single_predicated_value
3967 	     routine.  */
3968 	  gcc_assert (!vno->u.values->next && vno->u.values->n == 1);
3969 	  basic_block vno_bb
3970 	    = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0]);
3971 	  vn_pval *nval = vno->u.values;
3972 	  vn_pval **next = &vno->u.values;
3973 	  bool found = false;
3974 	  for (vn_pval *val = (*slot)->u.values; val; val = val->next)
3975 	    {
3976 	      if (expressions_equal_p (val->result, vno->u.values->result))
3977 		{
3978 		  found = true;
3979 		  for (unsigned i = 0; i < val->n; ++i)
3980 		    {
3981 		      basic_block val_bb
3982 			= BASIC_BLOCK_FOR_FN (cfun,
3983 					      val->valid_dominated_by_p[i]);
3984 		      if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb))
3985 			/* Value registered with more generic predicate.  */
3986 			return *slot;
3987 		      else if (dominated_by_p (CDI_DOMINATORS, val_bb, vno_bb))
3988 			/* Shouldn't happen, we insert in RPO order.  */
3989 			gcc_unreachable ();
3990 		    }
3991 		  /* Append value.  */
3992 		  *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3993 						     sizeof (vn_pval)
3994 						     + val->n * sizeof (int));
3995 		  (*next)->next = NULL;
3996 		  (*next)->result = val->result;
3997 		  (*next)->n = val->n + 1;
3998 		  memcpy ((*next)->valid_dominated_by_p,
3999 			  val->valid_dominated_by_p,
4000 			  val->n * sizeof (int));
4001 		  (*next)->valid_dominated_by_p[val->n] = vno_bb->index;
4002 		  next = &(*next)->next;
4003 		  if (dump_file && (dump_flags & TDF_DETAILS))
4004 		    fprintf (dump_file, "Appending predicate to value.\n");
4005 		  continue;
4006 		}
4007 	      /* Copy other predicated values.  */
4008 	      *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4009 						 sizeof (vn_pval)
4010 						 + (val->n-1) * sizeof (int));
4011 	      memcpy (*next, val, sizeof (vn_pval) + (val->n-1) * sizeof (int));
4012 	      (*next)->next = NULL;
4013 	      next = &(*next)->next;
4014 	    }
4015 	  if (!found)
4016 	    *next = nval;
4017 
4018 	  *slot = vno;
4019 	  vno->next = last_inserted_nary;
4020 	  last_inserted_nary = vno;
4021 	  return vno;
4022 	}
4023 
4024       /* While we do not want to insert things twice it's awkward to
4025 	 avoid it in the case where visit_nary_op pattern-matches stuff
4026 	 and ends up simplifying the replacement to itself.  We then
4027 	 get two inserts, one from visit_nary_op and one from
4028 	 vn_nary_build_or_lookup.
4029 	 So allow inserts with the same value number.  */
4030       if ((*slot)->u.result == vno->u.result)
4031 	return *slot;
4032     }
4033 
4034   /* ???  There's also optimistic vs. previous commited state merging
4035      that is problematic for the case of unwinding.  */
4036 
4037   /* ???  We should return NULL if we do not use 'vno' and have the
4038      caller release it.  */
4039   gcc_assert (!*slot);
4040 
4041   *slot = vno;
4042   vno->next = last_inserted_nary;
4043   last_inserted_nary = vno;
4044   return vno;
4045 }
4046 
4047 /* Insert a n-ary operation into the current hash table using it's
4048    pieces.  Return the vn_nary_op_t structure we created and put in
4049    the hashtable.  */
4050 
4051 vn_nary_op_t
vn_nary_op_insert_pieces(unsigned int length,enum tree_code code,tree type,tree * ops,tree result,unsigned int value_id)4052 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
4053 			  tree type, tree *ops,
4054 			  tree result, unsigned int value_id)
4055 {
4056   vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
4057   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4058   return vn_nary_op_insert_into (vno1, valid_info->nary, true);
4059 }
4060 
4061 static vn_nary_op_t
vn_nary_op_insert_pieces_predicated(unsigned int length,enum tree_code code,tree type,tree * ops,tree result,unsigned int value_id,edge pred_e)4062 vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code,
4063 				     tree type, tree *ops,
4064 				     tree result, unsigned int value_id,
4065 				     edge pred_e)
4066 {
4067   /* ???  Currently tracking BBs.  */
4068   if (! single_pred_p (pred_e->dest))
4069     {
4070       /* Never record for backedges.  */
4071       if (pred_e->flags & EDGE_DFS_BACK)
4072 	return NULL;
4073       edge_iterator ei;
4074       edge e;
4075       int cnt = 0;
4076       /* Ignore backedges.  */
4077       FOR_EACH_EDGE (e, ei, pred_e->dest->preds)
4078 	if (! dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4079 	  cnt++;
4080       if (cnt != 1)
4081 	return NULL;
4082     }
4083   if (dump_file && (dump_flags & TDF_DETAILS)
4084       /* ???  Fix dumping, but currently we only get comparisons.  */
4085       && TREE_CODE_CLASS (code) == tcc_comparison)
4086     {
4087       fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index,
4088 	       pred_e->dest->index);
4089       print_generic_expr (dump_file, ops[0], TDF_SLIM);
4090       fprintf (dump_file, " %s ", get_tree_code_name (code));
4091       print_generic_expr (dump_file, ops[1], TDF_SLIM);
4092       fprintf (dump_file, " == %s\n",
4093 	       integer_zerop (result) ? "false" : "true");
4094     }
4095   vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE, value_id);
4096   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4097   vno1->predicated_values = 1;
4098   vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4099 					      sizeof (vn_pval));
4100   vno1->u.values->next = NULL;
4101   vno1->u.values->result = result;
4102   vno1->u.values->n = 1;
4103   vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index;
4104   return vn_nary_op_insert_into (vno1, valid_info->nary, true);
4105 }
4106 
4107 static bool
4108 dominated_by_p_w_unex (basic_block bb1, basic_block bb2);
4109 
4110 static tree
vn_nary_op_get_predicated_value(vn_nary_op_t vno,basic_block bb)4111 vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb)
4112 {
4113   if (! vno->predicated_values)
4114     return vno->u.result;
4115   for (vn_pval *val = vno->u.values; val; val = val->next)
4116     for (unsigned i = 0; i < val->n; ++i)
4117       if (dominated_by_p_w_unex (bb,
4118 			  BASIC_BLOCK_FOR_FN
4119 			    (cfun, val->valid_dominated_by_p[i])))
4120 	return val->result;
4121   return NULL_TREE;
4122 }
4123 
4124 /* Insert the rhs of STMT into the current hash table with a value number of
4125    RESULT.  */
4126 
4127 static vn_nary_op_t
vn_nary_op_insert_stmt(gimple * stmt,tree result)4128 vn_nary_op_insert_stmt (gimple *stmt, tree result)
4129 {
4130   vn_nary_op_t vno1
4131     = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
4132 			result, VN_INFO (result)->value_id);
4133   init_vn_nary_op_from_stmt (vno1, stmt);
4134   return vn_nary_op_insert_into (vno1, valid_info->nary, true);
4135 }
4136 
4137 /* Compute a hashcode for PHI operation VP1 and return it.  */
4138 
4139 static inline hashval_t
vn_phi_compute_hash(vn_phi_t vp1)4140 vn_phi_compute_hash (vn_phi_t vp1)
4141 {
4142   inchash::hash hstate (EDGE_COUNT (vp1->block->preds) > 2
4143 			? vp1->block->index : EDGE_COUNT (vp1->block->preds));
4144   tree phi1op;
4145   tree type;
4146   edge e;
4147   edge_iterator ei;
4148 
4149   /* If all PHI arguments are constants we need to distinguish
4150      the PHI node via its type.  */
4151   type = vp1->type;
4152   hstate.merge_hash (vn_hash_type (type));
4153 
4154   FOR_EACH_EDGE (e, ei, vp1->block->preds)
4155     {
4156       /* Don't hash backedge values they need to be handled as VN_TOP
4157          for optimistic value-numbering.  */
4158       if (e->flags & EDGE_DFS_BACK)
4159 	continue;
4160 
4161       phi1op = vp1->phiargs[e->dest_idx];
4162       if (phi1op == VN_TOP)
4163 	continue;
4164       inchash::add_expr (phi1op, hstate);
4165     }
4166 
4167   return hstate.end ();
4168 }
4169 
4170 
4171 /* Return true if COND1 and COND2 represent the same condition, set
4172    *INVERTED_P if one needs to be inverted to make it the same as
4173    the other.  */
4174 
4175 static bool
cond_stmts_equal_p(gcond * cond1,tree lhs1,tree rhs1,gcond * cond2,tree lhs2,tree rhs2,bool * inverted_p)4176 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
4177 		    gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
4178 {
4179   enum tree_code code1 = gimple_cond_code (cond1);
4180   enum tree_code code2 = gimple_cond_code (cond2);
4181 
4182   *inverted_p = false;
4183   if (code1 == code2)
4184     ;
4185   else if (code1 == swap_tree_comparison (code2))
4186     std::swap (lhs2, rhs2);
4187   else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
4188     *inverted_p = true;
4189   else if (code1 == invert_tree_comparison
4190 	   	      (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
4191     {
4192       std::swap (lhs2, rhs2);
4193       *inverted_p = true;
4194     }
4195   else
4196     return false;
4197 
4198   return ((expressions_equal_p (lhs1, lhs2)
4199 	   && expressions_equal_p (rhs1, rhs2))
4200 	  || (commutative_tree_code (code1)
4201 	      && expressions_equal_p (lhs1, rhs2)
4202 	      && expressions_equal_p (rhs1, lhs2)));
4203 }
4204 
4205 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
4206 
4207 static int
vn_phi_eq(const_vn_phi_t const vp1,const_vn_phi_t const vp2)4208 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
4209 {
4210   if (vp1->hashcode != vp2->hashcode)
4211     return false;
4212 
4213   if (vp1->block != vp2->block)
4214     {
4215       if (EDGE_COUNT (vp1->block->preds) != EDGE_COUNT (vp2->block->preds))
4216 	return false;
4217 
4218       switch (EDGE_COUNT (vp1->block->preds))
4219 	{
4220 	case 1:
4221 	  /* Single-arg PHIs are just copies.  */
4222 	  break;
4223 
4224 	case 2:
4225 	  {
4226 	    /* Rule out backedges into the PHI.  */
4227 	    if (vp1->block->loop_father->header == vp1->block
4228 		|| vp2->block->loop_father->header == vp2->block)
4229 	      return false;
4230 
4231 	    /* If the PHI nodes do not have compatible types
4232 	       they are not the same.  */
4233 	    if (!types_compatible_p (vp1->type, vp2->type))
4234 	      return false;
4235 
4236 	    basic_block idom1
4237 	      = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4238 	    basic_block idom2
4239 	      = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
4240 	    /* If the immediate dominator end in switch stmts multiple
4241 	       values may end up in the same PHI arg via intermediate
4242 	       CFG merges.  */
4243 	    if (EDGE_COUNT (idom1->succs) != 2
4244 		|| EDGE_COUNT (idom2->succs) != 2)
4245 	      return false;
4246 
4247 	    /* Verify the controlling stmt is the same.  */
4248 	    gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
4249 	    gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
4250 	    if (! last1 || ! last2)
4251 	      return false;
4252 	    bool inverted_p;
4253 	    if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
4254 				      last2, vp2->cclhs, vp2->ccrhs,
4255 				      &inverted_p))
4256 	      return false;
4257 
4258 	    /* Get at true/false controlled edges into the PHI.  */
4259 	    edge te1, te2, fe1, fe2;
4260 	    if (! extract_true_false_controlled_edges (idom1, vp1->block,
4261 						       &te1, &fe1)
4262 		|| ! extract_true_false_controlled_edges (idom2, vp2->block,
4263 							  &te2, &fe2))
4264 	      return false;
4265 
4266 	    /* Swap edges if the second condition is the inverted of the
4267 	       first.  */
4268 	    if (inverted_p)
4269 	      std::swap (te2, fe2);
4270 
4271 	    /* ???  Handle VN_TOP specially.  */
4272 	    if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
4273 				       vp2->phiargs[te2->dest_idx])
4274 		|| ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
4275 					  vp2->phiargs[fe2->dest_idx]))
4276 	      return false;
4277 
4278 	    return true;
4279 	  }
4280 
4281 	default:
4282 	  return false;
4283 	}
4284     }
4285 
4286   /* If the PHI nodes do not have compatible types
4287      they are not the same.  */
4288   if (!types_compatible_p (vp1->type, vp2->type))
4289     return false;
4290 
4291   /* Any phi in the same block will have it's arguments in the
4292      same edge order, because of how we store phi nodes.  */
4293   for (unsigned i = 0; i < EDGE_COUNT (vp1->block->preds); ++i)
4294     {
4295       tree phi1op = vp1->phiargs[i];
4296       tree phi2op = vp2->phiargs[i];
4297       if (phi1op == VN_TOP || phi2op == VN_TOP)
4298 	continue;
4299       if (!expressions_equal_p (phi1op, phi2op))
4300 	return false;
4301     }
4302 
4303   return true;
4304 }
4305 
4306 /* Lookup PHI in the current hash table, and return the resulting
4307    value number if it exists in the hash table.  Return NULL_TREE if
4308    it does not exist in the hash table. */
4309 
4310 static tree
vn_phi_lookup(gimple * phi,bool backedges_varying_p)4311 vn_phi_lookup (gimple *phi, bool backedges_varying_p)
4312 {
4313   vn_phi_s **slot;
4314   struct vn_phi_s *vp1;
4315   edge e;
4316   edge_iterator ei;
4317 
4318   vp1 = XALLOCAVAR (struct vn_phi_s,
4319 		    sizeof (struct vn_phi_s)
4320 		    + (gimple_phi_num_args (phi) - 1) * sizeof (tree));
4321 
4322   /* Canonicalize the SSA_NAME's to their value number.  */
4323   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4324     {
4325       tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4326       if (TREE_CODE (def) == SSA_NAME
4327 	  && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
4328 	def = SSA_VAL (def);
4329       vp1->phiargs[e->dest_idx] = def;
4330     }
4331   vp1->type = TREE_TYPE (gimple_phi_result (phi));
4332   vp1->block = gimple_bb (phi);
4333   /* Extract values of the controlling condition.  */
4334   vp1->cclhs = NULL_TREE;
4335   vp1->ccrhs = NULL_TREE;
4336   basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4337   if (EDGE_COUNT (idom1->succs) == 2)
4338     if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
4339       {
4340 	/* ???  We want to use SSA_VAL here.  But possibly not
4341 	   allow VN_TOP.  */
4342 	vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
4343 	vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
4344       }
4345   vp1->hashcode = vn_phi_compute_hash (vp1);
4346   slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT);
4347   if (!slot)
4348     return NULL_TREE;
4349   return (*slot)->result;
4350 }
4351 
4352 /* Insert PHI into the current hash table with a value number of
4353    RESULT.  */
4354 
4355 static vn_phi_t
vn_phi_insert(gimple * phi,tree result,bool backedges_varying_p)4356 vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p)
4357 {
4358   vn_phi_s **slot;
4359   vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,
4360 					   sizeof (vn_phi_s)
4361 					   + ((gimple_phi_num_args (phi) - 1)
4362 					      * sizeof (tree)));
4363   edge e;
4364   edge_iterator ei;
4365 
4366   /* Canonicalize the SSA_NAME's to their value number.  */
4367   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4368     {
4369       tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4370       if (TREE_CODE (def) == SSA_NAME
4371 	  && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
4372 	def = SSA_VAL (def);
4373       vp1->phiargs[e->dest_idx] = def;
4374     }
4375   vp1->value_id = VN_INFO (result)->value_id;
4376   vp1->type = TREE_TYPE (gimple_phi_result (phi));
4377   vp1->block = gimple_bb (phi);
4378   /* Extract values of the controlling condition.  */
4379   vp1->cclhs = NULL_TREE;
4380   vp1->ccrhs = NULL_TREE;
4381   basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4382   if (EDGE_COUNT (idom1->succs) == 2)
4383     if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
4384       {
4385 	/* ???  We want to use SSA_VAL here.  But possibly not
4386 	   allow VN_TOP.  */
4387 	vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
4388 	vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
4389       }
4390   vp1->result = result;
4391   vp1->hashcode = vn_phi_compute_hash (vp1);
4392 
4393   slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
4394   gcc_assert (!*slot);
4395 
4396   *slot = vp1;
4397   vp1->next = last_inserted_phi;
4398   last_inserted_phi = vp1;
4399   return vp1;
4400 }
4401 
4402 
4403 /* Return true if BB1 is dominated by BB2 taking into account edges
4404    that are not executable.  */
4405 
4406 static bool
dominated_by_p_w_unex(basic_block bb1,basic_block bb2)4407 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
4408 {
4409   edge_iterator ei;
4410   edge e;
4411 
4412   if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4413     return true;
4414 
4415   /* Before iterating we'd like to know if there exists a
4416      (executable) path from bb2 to bb1 at all, if not we can
4417      directly return false.  For now simply iterate once.  */
4418 
4419   /* Iterate to the single executable bb1 predecessor.  */
4420   if (EDGE_COUNT (bb1->preds) > 1)
4421     {
4422       edge prede = NULL;
4423       FOR_EACH_EDGE (e, ei, bb1->preds)
4424 	if (e->flags & EDGE_EXECUTABLE)
4425 	  {
4426 	    if (prede)
4427 	      {
4428 		prede = NULL;
4429 		break;
4430 	      }
4431 	    prede = e;
4432 	  }
4433       if (prede)
4434 	{
4435 	  bb1 = prede->src;
4436 
4437 	  /* Re-do the dominance check with changed bb1.  */
4438 	  if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4439 	    return true;
4440 	}
4441     }
4442 
4443   /* Iterate to the single executable bb2 successor.  */
4444   edge succe = NULL;
4445   FOR_EACH_EDGE (e, ei, bb2->succs)
4446     if (e->flags & EDGE_EXECUTABLE)
4447       {
4448 	if (succe)
4449 	  {
4450 	    succe = NULL;
4451 	    break;
4452 	  }
4453 	succe = e;
4454       }
4455   if (succe)
4456     {
4457       /* Verify the reached block is only reached through succe.
4458 	 If there is only one edge we can spare us the dominator
4459 	 check and iterate directly.  */
4460       if (EDGE_COUNT (succe->dest->preds) > 1)
4461 	{
4462 	  FOR_EACH_EDGE (e, ei, succe->dest->preds)
4463 	    if (e != succe
4464 		&& (e->flags & EDGE_EXECUTABLE))
4465 	      {
4466 		succe = NULL;
4467 		break;
4468 	      }
4469 	}
4470       if (succe)
4471 	{
4472 	  bb2 = succe->dest;
4473 
4474 	  /* Re-do the dominance check with changed bb2.  */
4475 	  if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4476 	    return true;
4477 	}
4478     }
4479 
4480   /* We could now iterate updating bb1 / bb2.  */
4481   return false;
4482 }
4483 
4484 /* Set the value number of FROM to TO, return true if it has changed
4485    as a result.  */
4486 
4487 static inline bool
set_ssa_val_to(tree from,tree to)4488 set_ssa_val_to (tree from, tree to)
4489 {
4490   vn_ssa_aux_t from_info = VN_INFO (from);
4491   tree currval = from_info->valnum; // SSA_VAL (from)
4492   poly_int64 toff, coff;
4493   bool curr_undefined = false;
4494   bool curr_invariant = false;
4495 
4496   /* The only thing we allow as value numbers are ssa_names
4497      and invariants.  So assert that here.  We don't allow VN_TOP
4498      as visiting a stmt should produce a value-number other than
4499      that.
4500      ???  Still VN_TOP can happen for unreachable code, so force
4501      it to varying in that case.  Not all code is prepared to
4502      get VN_TOP on valueization.  */
4503   if (to == VN_TOP)
4504     {
4505       /* ???  When iterating and visiting PHI <undef, backedge-value>
4506          for the first time we rightfully get VN_TOP and we need to
4507 	 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
4508 	 With SCCVN we were simply lucky we iterated the other PHI
4509 	 cycles first and thus visited the backedge-value DEF.  */
4510       if (currval == VN_TOP)
4511 	goto set_and_exit;
4512       if (dump_file && (dump_flags & TDF_DETAILS))
4513 	fprintf (dump_file, "Forcing value number to varying on "
4514 		 "receiving VN_TOP\n");
4515       to = from;
4516     }
4517 
4518   gcc_checking_assert (to != NULL_TREE
4519 		       && ((TREE_CODE (to) == SSA_NAME
4520 			    && (to == from || SSA_VAL (to) == to))
4521 			   || is_gimple_min_invariant (to)));
4522 
4523   if (from != to)
4524     {
4525       if (currval == from)
4526 	{
4527 	  if (dump_file && (dump_flags & TDF_DETAILS))
4528 	    {
4529 	      fprintf (dump_file, "Not changing value number of ");
4530 	      print_generic_expr (dump_file, from);
4531 	      fprintf (dump_file, " from VARYING to ");
4532 	      print_generic_expr (dump_file, to);
4533 	      fprintf (dump_file, "\n");
4534 	    }
4535 	  return false;
4536 	}
4537       curr_invariant = is_gimple_min_invariant (currval);
4538       curr_undefined = (TREE_CODE (currval) == SSA_NAME
4539 			&& ssa_undefined_value_p (currval, false));
4540       if (currval != VN_TOP
4541 	  && !curr_invariant
4542 	  && !curr_undefined
4543 	  && is_gimple_min_invariant (to))
4544 	{
4545 	  if (dump_file && (dump_flags & TDF_DETAILS))
4546 	    {
4547 	      fprintf (dump_file, "Forcing VARYING instead of changing "
4548 		       "value number of ");
4549 	      print_generic_expr (dump_file, from);
4550 	      fprintf (dump_file, " from ");
4551 	      print_generic_expr (dump_file, currval);
4552 	      fprintf (dump_file, " (non-constant) to ");
4553 	      print_generic_expr (dump_file, to);
4554 	      fprintf (dump_file, " (constant)\n");
4555 	    }
4556 	  to = from;
4557 	}
4558       else if (currval != VN_TOP
4559 	       && !curr_undefined
4560 	       && TREE_CODE (to) == SSA_NAME
4561 	       && ssa_undefined_value_p (to, false))
4562 	{
4563 	  if (dump_file && (dump_flags & TDF_DETAILS))
4564 	    {
4565 	      fprintf (dump_file, "Forcing VARYING instead of changing "
4566 		       "value number of ");
4567 	      print_generic_expr (dump_file, from);
4568 	      fprintf (dump_file, " from ");
4569 	      print_generic_expr (dump_file, currval);
4570 	      fprintf (dump_file, " (non-undefined) to ");
4571 	      print_generic_expr (dump_file, to);
4572 	      fprintf (dump_file, " (undefined)\n");
4573 	    }
4574 	  to = from;
4575 	}
4576       else if (TREE_CODE (to) == SSA_NAME
4577 	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
4578 	to = from;
4579     }
4580 
4581 set_and_exit:
4582   if (dump_file && (dump_flags & TDF_DETAILS))
4583     {
4584       fprintf (dump_file, "Setting value number of ");
4585       print_generic_expr (dump_file, from);
4586       fprintf (dump_file, " to ");
4587       print_generic_expr (dump_file, to);
4588     }
4589 
4590   if (currval != to
4591       && !operand_equal_p (currval, to, 0)
4592       /* Different undefined SSA names are not actually different.  See
4593          PR82320 for a testcase were we'd otherwise not terminate iteration.  */
4594       && !(curr_undefined
4595 	   && TREE_CODE (to) == SSA_NAME
4596 	   && ssa_undefined_value_p (to, false))
4597       /* ???  For addresses involving volatile objects or types operand_equal_p
4598          does not reliably detect ADDR_EXPRs as equal.  We know we are only
4599 	 getting invariant gimple addresses here, so can use
4600 	 get_addr_base_and_unit_offset to do this comparison.  */
4601       && !(TREE_CODE (currval) == ADDR_EXPR
4602 	   && TREE_CODE (to) == ADDR_EXPR
4603 	   && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
4604 	       == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
4605 	   && known_eq (coff, toff)))
4606     {
4607       if (to != from
4608 	  && currval != VN_TOP
4609 	  && !curr_undefined
4610 	  /* We do not want to allow lattice transitions from one value
4611 	     to another since that may lead to not terminating iteration
4612 	     (see PR95049).  Since there's no convenient way to check
4613 	     for the allowed transition of VAL -> PHI (loop entry value,
4614 	     same on two PHIs, to same PHI result) we restrict the check
4615 	     to invariants.  */
4616 	  && curr_invariant
4617 	  && is_gimple_min_invariant (to))
4618 	{
4619 	  if (dump_file && (dump_flags & TDF_DETAILS))
4620 	    fprintf (dump_file, " forced VARYING");
4621 	  to = from;
4622 	}
4623       if (dump_file && (dump_flags & TDF_DETAILS))
4624 	fprintf (dump_file, " (changed)\n");
4625       from_info->valnum = to;
4626       return true;
4627     }
4628   if (dump_file && (dump_flags & TDF_DETAILS))
4629     fprintf (dump_file, "\n");
4630   return false;
4631 }
4632 
4633 /* Set all definitions in STMT to value number to themselves.
4634    Return true if a value number changed. */
4635 
4636 static bool
defs_to_varying(gimple * stmt)4637 defs_to_varying (gimple *stmt)
4638 {
4639   bool changed = false;
4640   ssa_op_iter iter;
4641   def_operand_p defp;
4642 
4643   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
4644     {
4645       tree def = DEF_FROM_PTR (defp);
4646       changed |= set_ssa_val_to (def, def);
4647     }
4648   return changed;
4649 }
4650 
4651 /* Visit a copy between LHS and RHS, return true if the value number
4652    changed.  */
4653 
4654 static bool
visit_copy(tree lhs,tree rhs)4655 visit_copy (tree lhs, tree rhs)
4656 {
4657   /* Valueize.  */
4658   rhs = SSA_VAL (rhs);
4659 
4660   return set_ssa_val_to (lhs, rhs);
4661 }
4662 
4663 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
4664    is the same.  */
4665 
4666 static tree
valueized_wider_op(tree wide_type,tree op,bool allow_truncate)4667 valueized_wider_op (tree wide_type, tree op, bool allow_truncate)
4668 {
4669   if (TREE_CODE (op) == SSA_NAME)
4670     op = vn_valueize (op);
4671 
4672   /* Either we have the op widened available.  */
4673   tree ops[3] = {};
4674   ops[0] = op;
4675   tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
4676 				       wide_type, ops, NULL);
4677   if (tem)
4678     return tem;
4679 
4680   /* Or the op is truncated from some existing value.  */
4681   if (allow_truncate && TREE_CODE (op) == SSA_NAME)
4682     {
4683       gimple *def = SSA_NAME_DEF_STMT (op);
4684       if (is_gimple_assign (def)
4685 	  && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
4686 	{
4687 	  tem = gimple_assign_rhs1 (def);
4688 	  if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
4689 	    {
4690 	      if (TREE_CODE (tem) == SSA_NAME)
4691 		tem = vn_valueize (tem);
4692 	      return tem;
4693 	    }
4694 	}
4695     }
4696 
4697   /* For constants simply extend it.  */
4698   if (TREE_CODE (op) == INTEGER_CST)
4699     return wide_int_to_tree (wide_type, wi::to_wide (op));
4700 
4701   return NULL_TREE;
4702 }
4703 
4704 /* Visit a nary operator RHS, value number it, and return true if the
4705    value number of LHS has changed as a result.  */
4706 
4707 static bool
visit_nary_op(tree lhs,gassign * stmt)4708 visit_nary_op (tree lhs, gassign *stmt)
4709 {
4710   vn_nary_op_t vnresult;
4711   tree result = vn_nary_op_lookup_stmt (stmt, &vnresult);
4712   if (! result && vnresult)
4713     result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt));
4714   if (result)
4715     return set_ssa_val_to (lhs, result);
4716 
4717   /* Do some special pattern matching for redundancies of operations
4718      in different types.  */
4719   enum tree_code code = gimple_assign_rhs_code (stmt);
4720   tree type = TREE_TYPE (lhs);
4721   tree rhs1 = gimple_assign_rhs1 (stmt);
4722   switch (code)
4723     {
4724     CASE_CONVERT:
4725       /* Match arithmetic done in a different type where we can easily
4726          substitute the result from some earlier sign-changed or widened
4727 	 operation.  */
4728       if (INTEGRAL_TYPE_P (type)
4729 	  && TREE_CODE (rhs1) == SSA_NAME
4730 	  /* We only handle sign-changes, zero-extension -> & mask or
4731 	     sign-extension if we know the inner operation doesn't
4732 	     overflow.  */
4733 	  && (((TYPE_UNSIGNED (TREE_TYPE (rhs1))
4734 		|| (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
4735 		    && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
4736 	       && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
4737 	      || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
4738 	{
4739 	  gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
4740 	  if (def
4741 	      && (gimple_assign_rhs_code (def) == PLUS_EXPR
4742 		  || gimple_assign_rhs_code (def) == MINUS_EXPR
4743 		  || gimple_assign_rhs_code (def) == MULT_EXPR))
4744 	    {
4745 	      tree ops[3] = {};
4746 	      /* When requiring a sign-extension we cannot model a
4747 		 previous truncation with a single op so don't bother.  */
4748 	      bool allow_truncate = TYPE_UNSIGNED (TREE_TYPE (rhs1));
4749 	      /* Either we have the op widened available.  */
4750 	      ops[0] = valueized_wider_op (type, gimple_assign_rhs1 (def),
4751 					   allow_truncate);
4752 	      if (ops[0])
4753 		ops[1] = valueized_wider_op (type, gimple_assign_rhs2 (def),
4754 					     allow_truncate);
4755 	      if (ops[0] && ops[1])
4756 		{
4757 		  ops[0] = vn_nary_op_lookup_pieces
4758 		      (2, gimple_assign_rhs_code (def), type, ops, NULL);
4759 		  /* We have wider operation available.  */
4760 		  if (ops[0]
4761 		      /* If the leader is a wrapping operation we can
4762 		         insert it for code hoisting w/o introducing
4763 			 undefined overflow.  If it is not it has to
4764 			 be available.  See PR86554.  */
4765 		      && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))
4766 			  || (rpo_avail && vn_context_bb
4767 			      && rpo_avail->eliminate_avail (vn_context_bb,
4768 							     ops[0]))))
4769 		    {
4770 		      unsigned lhs_prec = TYPE_PRECISION (type);
4771 		      unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
4772 		      if (lhs_prec == rhs_prec
4773 			  || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
4774 			      && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
4775 			{
4776 			  gimple_match_op match_op (gimple_match_cond::UNCOND,
4777 						    NOP_EXPR, type, ops[0]);
4778 			  result = vn_nary_build_or_lookup (&match_op);
4779 			  if (result)
4780 			    {
4781 			      bool changed = set_ssa_val_to (lhs, result);
4782 			      vn_nary_op_insert_stmt (stmt, result);
4783 			      return changed;
4784 			    }
4785 			}
4786 		      else
4787 			{
4788 			  tree mask = wide_int_to_tree
4789 			    (type, wi::mask (rhs_prec, false, lhs_prec));
4790 			  gimple_match_op match_op (gimple_match_cond::UNCOND,
4791 						    BIT_AND_EXPR,
4792 						    TREE_TYPE (lhs),
4793 						    ops[0], mask);
4794 			  result = vn_nary_build_or_lookup (&match_op);
4795 			  if (result)
4796 			    {
4797 			      bool changed = set_ssa_val_to (lhs, result);
4798 			      vn_nary_op_insert_stmt (stmt, result);
4799 			      return changed;
4800 			    }
4801 			}
4802 		    }
4803 		}
4804 	    }
4805 	}
4806       break;
4807     case BIT_AND_EXPR:
4808       if (INTEGRAL_TYPE_P (type)
4809 	  && TREE_CODE (rhs1) == SSA_NAME
4810 	  && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
4811 	  && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)
4812 	  && default_vn_walk_kind != VN_NOWALK
4813 	  && CHAR_BIT == 8
4814 	  && BITS_PER_UNIT == 8
4815 	  && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
4816 	  && !integer_all_onesp (gimple_assign_rhs2 (stmt))
4817 	  && !integer_zerop (gimple_assign_rhs2 (stmt)))
4818 	{
4819 	  gassign *ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
4820 	  if (ass
4821 	      && !gimple_has_volatile_ops (ass)
4822 	      && vn_get_stmt_kind (ass) == VN_REFERENCE)
4823 	    {
4824 	      tree last_vuse = gimple_vuse (ass);
4825 	      tree op = gimple_assign_rhs1 (ass);
4826 	      tree result = vn_reference_lookup (op, gimple_vuse (ass),
4827 						 default_vn_walk_kind,
4828 						 NULL, true, &last_vuse,
4829 						 gimple_assign_rhs2 (stmt));
4830 	      if (result
4831 		  && useless_type_conversion_p (TREE_TYPE (result),
4832 						TREE_TYPE (op)))
4833 		return set_ssa_val_to (lhs, result);
4834 	    }
4835 	}
4836       break;
4837     default:
4838       break;
4839     }
4840 
4841   bool changed = set_ssa_val_to (lhs, lhs);
4842   vn_nary_op_insert_stmt (stmt, lhs);
4843   return changed;
4844 }
4845 
4846 /* Visit a call STMT storing into LHS.  Return true if the value number
4847    of the LHS has changed as a result.  */
4848 
4849 static bool
visit_reference_op_call(tree lhs,gcall * stmt)4850 visit_reference_op_call (tree lhs, gcall *stmt)
4851 {
4852   bool changed = false;
4853   struct vn_reference_s vr1;
4854   vn_reference_t vnresult = NULL;
4855   tree vdef = gimple_vdef (stmt);
4856 
4857   /* Non-ssa lhs is handled in copy_reference_ops_from_call.  */
4858   if (lhs && TREE_CODE (lhs) != SSA_NAME)
4859     lhs = NULL_TREE;
4860 
4861   vn_reference_lookup_call (stmt, &vnresult, &vr1);
4862   if (vnresult)
4863     {
4864       if (vnresult->result_vdef && vdef)
4865 	changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
4866       else if (vdef)
4867 	/* If the call was discovered to be pure or const reflect
4868 	   that as far as possible.  */
4869 	changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
4870 
4871       if (!vnresult->result && lhs)
4872 	vnresult->result = lhs;
4873 
4874       if (vnresult->result && lhs)
4875 	changed |= set_ssa_val_to (lhs, vnresult->result);
4876     }
4877   else
4878     {
4879       vn_reference_t vr2;
4880       vn_reference_s **slot;
4881       tree vdef_val = vdef;
4882       if (vdef)
4883 	{
4884 	  /* If we value numbered an indirect functions function to
4885 	     one not clobbering memory value number its VDEF to its
4886 	     VUSE.  */
4887 	  tree fn = gimple_call_fn (stmt);
4888 	  if (fn && TREE_CODE (fn) == SSA_NAME)
4889 	    {
4890 	      fn = SSA_VAL (fn);
4891 	      if (TREE_CODE (fn) == ADDR_EXPR
4892 		  && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
4893 		  && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
4894 		      & (ECF_CONST | ECF_PURE)))
4895 		vdef_val = vuse_ssa_val (gimple_vuse (stmt));
4896 	    }
4897 	  changed |= set_ssa_val_to (vdef, vdef_val);
4898 	}
4899       if (lhs)
4900 	changed |= set_ssa_val_to (lhs, lhs);
4901       vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s);
4902       vr2->vuse = vr1.vuse;
4903       /* As we are not walking the virtual operand chain we know the
4904 	 shared_lookup_references are still original so we can re-use
4905 	 them here.  */
4906       vr2->operands = vr1.operands.copy ();
4907       vr2->type = vr1.type;
4908       vr2->punned = vr1.punned;
4909       vr2->set = vr1.set;
4910       vr2->base_set = vr1.base_set;
4911       vr2->hashcode = vr1.hashcode;
4912       vr2->result = lhs;
4913       vr2->result_vdef = vdef_val;
4914       vr2->value_id = 0;
4915       slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
4916 							  INSERT);
4917       gcc_assert (!*slot);
4918       *slot = vr2;
4919       vr2->next = last_inserted_ref;
4920       last_inserted_ref = vr2;
4921     }
4922 
4923   return changed;
4924 }
4925 
4926 /* Visit a load from a reference operator RHS, part of STMT, value number it,
4927    and return true if the value number of the LHS has changed as a result.  */
4928 
4929 static bool
visit_reference_op_load(tree lhs,tree op,gimple * stmt)4930 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
4931 {
4932   bool changed = false;
4933   tree last_vuse;
4934   tree result;
4935   vn_reference_t res;
4936 
4937   last_vuse = gimple_vuse (stmt);
4938   result = vn_reference_lookup (op, gimple_vuse (stmt),
4939 				default_vn_walk_kind, &res, true, &last_vuse);
4940 
4941   /* We handle type-punning through unions by value-numbering based
4942      on offset and size of the access.  Be prepared to handle a
4943      type-mismatch here via creating a VIEW_CONVERT_EXPR.  */
4944   if (result
4945       && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
4946     {
4947       /* Avoid the type punning in case the result mode has padding where
4948 	 the op we lookup has not.  */
4949       if (maybe_lt (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (result))),
4950 		    GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (op)))))
4951 	result = NULL_TREE;
4952       else
4953 	{
4954 	  /* We will be setting the value number of lhs to the value number
4955 	     of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
4956 	     So first simplify and lookup this expression to see if it
4957 	     is already available.  */
4958 	  gimple_match_op res_op (gimple_match_cond::UNCOND,
4959 				  VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
4960 	  result = vn_nary_build_or_lookup (&res_op);
4961 	  if (result
4962 	      && TREE_CODE (result) == SSA_NAME
4963 	      && VN_INFO (result)->needs_insertion)
4964 	    /* Track whether this is the canonical expression for different
4965 	       typed loads.  We use that as a stopgap measure for code
4966 	       hoisting when dealing with floating point loads.  */
4967 	    res->punned = true;
4968 	}
4969 
4970       /* When building the conversion fails avoid inserting the reference
4971          again.  */
4972       if (!result)
4973 	return set_ssa_val_to (lhs, lhs);
4974     }
4975 
4976   if (result)
4977     changed = set_ssa_val_to (lhs, result);
4978   else
4979     {
4980       changed = set_ssa_val_to (lhs, lhs);
4981       vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
4982     }
4983 
4984   return changed;
4985 }
4986 
4987 
4988 /* Visit a store to a reference operator LHS, part of STMT, value number it,
4989    and return true if the value number of the LHS has changed as a result.  */
4990 
4991 static bool
visit_reference_op_store(tree lhs,tree op,gimple * stmt)4992 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
4993 {
4994   bool changed = false;
4995   vn_reference_t vnresult = NULL;
4996   tree assign;
4997   bool resultsame = false;
4998   tree vuse = gimple_vuse (stmt);
4999   tree vdef = gimple_vdef (stmt);
5000 
5001   if (TREE_CODE (op) == SSA_NAME)
5002     op = SSA_VAL (op);
5003 
5004   /* First we want to lookup using the *vuses* from the store and see
5005      if there the last store to this location with the same address
5006      had the same value.
5007 
5008      The vuses represent the memory state before the store.  If the
5009      memory state, address, and value of the store is the same as the
5010      last store to this location, then this store will produce the
5011      same memory state as that store.
5012 
5013      In this case the vdef versions for this store are value numbered to those
5014      vuse versions, since they represent the same memory state after
5015      this store.
5016 
5017      Otherwise, the vdefs for the store are used when inserting into
5018      the table, since the store generates a new memory state.  */
5019 
5020   vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
5021   if (vnresult
5022       && vnresult->result)
5023     {
5024       tree result = vnresult->result;
5025       gcc_checking_assert (TREE_CODE (result) != SSA_NAME
5026 			   || result == SSA_VAL (result));
5027       resultsame = expressions_equal_p (result, op);
5028       if (resultsame)
5029 	{
5030 	  /* If the TBAA state isn't compatible for downstream reads
5031 	     we cannot value-number the VDEFs the same.  */
5032 	  ao_ref lhs_ref;
5033 	  ao_ref_init (&lhs_ref, lhs);
5034 	  alias_set_type set = ao_ref_alias_set (&lhs_ref);
5035 	  alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
5036 	  if ((vnresult->set != set
5037 	       && ! alias_set_subset_of (set, vnresult->set))
5038 	      || (vnresult->base_set != base_set
5039 		  && ! alias_set_subset_of (base_set, vnresult->base_set)))
5040 	    resultsame = false;
5041 	}
5042     }
5043 
5044   if (!resultsame)
5045     {
5046       /* Only perform the following when being called from PRE
5047 	 which embeds tail merging.  */
5048       if (default_vn_walk_kind == VN_WALK)
5049 	{
5050 	  assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
5051 	  vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
5052 	  if (vnresult)
5053 	    {
5054 	      VN_INFO (vdef)->visited = true;
5055 	      return set_ssa_val_to (vdef, vnresult->result_vdef);
5056 	    }
5057 	}
5058 
5059       if (dump_file && (dump_flags & TDF_DETAILS))
5060 	{
5061 	  fprintf (dump_file, "No store match\n");
5062 	  fprintf (dump_file, "Value numbering store ");
5063 	  print_generic_expr (dump_file, lhs);
5064 	  fprintf (dump_file, " to ");
5065 	  print_generic_expr (dump_file, op);
5066 	  fprintf (dump_file, "\n");
5067 	}
5068       /* Have to set value numbers before insert, since insert is
5069 	 going to valueize the references in-place.  */
5070       if (vdef)
5071 	changed |= set_ssa_val_to (vdef, vdef);
5072 
5073       /* Do not insert structure copies into the tables.  */
5074       if (is_gimple_min_invariant (op)
5075 	  || is_gimple_reg (op))
5076         vn_reference_insert (lhs, op, vdef, NULL);
5077 
5078       /* Only perform the following when being called from PRE
5079 	 which embeds tail merging.  */
5080       if (default_vn_walk_kind == VN_WALK)
5081 	{
5082 	  assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
5083 	  vn_reference_insert (assign, lhs, vuse, vdef);
5084 	}
5085     }
5086   else
5087     {
5088       /* We had a match, so value number the vdef to have the value
5089 	 number of the vuse it came from.  */
5090 
5091       if (dump_file && (dump_flags & TDF_DETAILS))
5092 	fprintf (dump_file, "Store matched earlier value, "
5093 		 "value numbering store vdefs to matching vuses.\n");
5094 
5095       changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
5096     }
5097 
5098   return changed;
5099 }
5100 
5101 /* Visit and value number PHI, return true if the value number
5102    changed.  When BACKEDGES_VARYING_P is true then assume all
5103    backedge values are varying.  When INSERTED is not NULL then
5104    this is just a ahead query for a possible iteration, set INSERTED
5105    to true if we'd insert into the hashtable.  */
5106 
5107 static bool
visit_phi(gimple * phi,bool * inserted,bool backedges_varying_p)5108 visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p)
5109 {
5110   tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
5111   tree backedge_val = NULL_TREE;
5112   bool seen_non_backedge = false;
5113   tree sameval_base = NULL_TREE;
5114   poly_int64 soff, doff;
5115   unsigned n_executable = 0;
5116   edge_iterator ei;
5117   edge e;
5118 
5119   /* TODO: We could check for this in initialization, and replace this
5120      with a gcc_assert.  */
5121   if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
5122     return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
5123 
5124   /* We track whether a PHI was CSEd to to avoid excessive iterations
5125      that would be necessary only because the PHI changed arguments
5126      but not value.  */
5127   if (!inserted)
5128     gimple_set_plf (phi, GF_PLF_1, false);
5129 
5130   /* See if all non-TOP arguments have the same value.  TOP is
5131      equivalent to everything, so we can ignore it.  */
5132   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
5133     if (e->flags & EDGE_EXECUTABLE)
5134       {
5135 	tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5136 
5137 	if (def == PHI_RESULT (phi))
5138 	  continue;
5139 	++n_executable;
5140 	if (TREE_CODE (def) == SSA_NAME)
5141 	  {
5142 	    if (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))
5143 	      def = SSA_VAL (def);
5144 	    if (e->flags & EDGE_DFS_BACK)
5145 	      backedge_val = def;
5146 	  }
5147 	if (!(e->flags & EDGE_DFS_BACK))
5148 	  seen_non_backedge = true;
5149 	if (def == VN_TOP)
5150 	  ;
5151 	/* Ignore undefined defs for sameval but record one.  */
5152 	else if (TREE_CODE (def) == SSA_NAME
5153 		 && ! virtual_operand_p (def)
5154 		 && ssa_undefined_value_p (def, false))
5155 	  seen_undef = def;
5156 	else if (sameval == VN_TOP)
5157 	  sameval = def;
5158 	else if (!expressions_equal_p (def, sameval))
5159 	  {
5160 	    /* We know we're arriving only with invariant addresses here,
5161 	       try harder comparing them.  We can do some caching here
5162 	       which we cannot do in expressions_equal_p.  */
5163 	    if (TREE_CODE (def) == ADDR_EXPR
5164 		&& TREE_CODE (sameval) == ADDR_EXPR
5165 		&& sameval_base != (void *)-1)
5166 	      {
5167 		if (!sameval_base)
5168 		  sameval_base = get_addr_base_and_unit_offset
5169 				   (TREE_OPERAND (sameval, 0), &soff);
5170 		if (!sameval_base)
5171 		  sameval_base = (tree)(void *)-1;
5172 		else if ((get_addr_base_and_unit_offset
5173 			    (TREE_OPERAND (def, 0), &doff) == sameval_base)
5174 			 && known_eq (soff, doff))
5175 		  continue;
5176 	      }
5177 	    sameval = NULL_TREE;
5178 	    break;
5179 	  }
5180       }
5181 
5182   /* If the value we want to use is flowing over the backedge and we
5183      should take it as VARYING but it has a non-VARYING value drop to
5184      VARYING.
5185      If we value-number a virtual operand never value-number to the
5186      value from the backedge as that confuses the alias-walking code.
5187      See gcc.dg/torture/pr87176.c.  If the value is the same on a
5188      non-backedge everything is OK though.  */
5189   bool visited_p;
5190   if ((backedge_val
5191        && !seen_non_backedge
5192        && TREE_CODE (backedge_val) == SSA_NAME
5193        && sameval == backedge_val
5194        && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val)
5195 	   || SSA_VAL (backedge_val) != backedge_val))
5196       /* Do not value-number a virtual operand to sth not visited though
5197 	 given that allows us to escape a region in alias walking.  */
5198       || (sameval
5199 	  && TREE_CODE (sameval) == SSA_NAME
5200 	  && !SSA_NAME_IS_DEFAULT_DEF (sameval)
5201 	  && SSA_NAME_IS_VIRTUAL_OPERAND (sameval)
5202 	  && (SSA_VAL (sameval, &visited_p), !visited_p)))
5203     /* Note this just drops to VARYING without inserting the PHI into
5204        the hashes.  */
5205     result = PHI_RESULT (phi);
5206   /* If none of the edges was executable keep the value-number at VN_TOP,
5207      if only a single edge is exectuable use its value.  */
5208   else if (n_executable <= 1)
5209     result = seen_undef ? seen_undef : sameval;
5210   /* If we saw only undefined values and VN_TOP use one of the
5211      undefined values.  */
5212   else if (sameval == VN_TOP)
5213     result = seen_undef ? seen_undef : sameval;
5214   /* First see if it is equivalent to a phi node in this block.  We prefer
5215      this as it allows IV elimination - see PRs 66502 and 67167.  */
5216   else if ((result = vn_phi_lookup (phi, backedges_varying_p)))
5217     {
5218       if (!inserted
5219 	  && TREE_CODE (result) == SSA_NAME
5220 	  && gimple_code (SSA_NAME_DEF_STMT (result)) == GIMPLE_PHI)
5221 	{
5222 	  gimple_set_plf (SSA_NAME_DEF_STMT (result), GF_PLF_1, true);
5223 	  if (dump_file && (dump_flags & TDF_DETAILS))
5224 	    {
5225 	      fprintf (dump_file, "Marking CSEd to PHI node ");
5226 	      print_gimple_expr (dump_file, SSA_NAME_DEF_STMT (result),
5227 				 0, TDF_SLIM);
5228 	      fprintf (dump_file, "\n");
5229 	    }
5230 	}
5231     }
5232   /* If all values are the same use that, unless we've seen undefined
5233      values as well and the value isn't constant.
5234      CCP/copyprop have the same restriction to not remove uninit warnings.  */
5235   else if (sameval
5236 	   && (! seen_undef || is_gimple_min_invariant (sameval)))
5237     result = sameval;
5238   else
5239     {
5240       result = PHI_RESULT (phi);
5241       /* Only insert PHIs that are varying, for constant value numbers
5242          we mess up equivalences otherwise as we are only comparing
5243 	 the immediate controlling predicates.  */
5244       vn_phi_insert (phi, result, backedges_varying_p);
5245       if (inserted)
5246 	*inserted = true;
5247     }
5248 
5249   return set_ssa_val_to (PHI_RESULT (phi), result);
5250 }
5251 
5252 /* Try to simplify RHS using equivalences and constant folding.  */
5253 
5254 static tree
try_to_simplify(gassign * stmt)5255 try_to_simplify (gassign *stmt)
5256 {
5257   enum tree_code code = gimple_assign_rhs_code (stmt);
5258   tree tem;
5259 
5260   /* For stores we can end up simplifying a SSA_NAME rhs.  Just return
5261      in this case, there is no point in doing extra work.  */
5262   if (code == SSA_NAME)
5263     return NULL_TREE;
5264 
5265   /* First try constant folding based on our current lattice.  */
5266   mprts_hook = vn_lookup_simplify_result;
5267   tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
5268   mprts_hook = NULL;
5269   if (tem
5270       && (TREE_CODE (tem) == SSA_NAME
5271 	  || is_gimple_min_invariant (tem)))
5272     return tem;
5273 
5274   return NULL_TREE;
5275 }
5276 
5277 /* Visit and value number STMT, return true if the value number
5278    changed.  */
5279 
5280 static bool
5281 visit_stmt (gimple *stmt, bool backedges_varying_p = false)
5282 {
5283   bool changed = false;
5284 
5285   if (dump_file && (dump_flags & TDF_DETAILS))
5286     {
5287       fprintf (dump_file, "Value numbering stmt = ");
5288       print_gimple_stmt (dump_file, stmt, 0);
5289     }
5290 
5291   if (gimple_code (stmt) == GIMPLE_PHI)
5292     changed = visit_phi (stmt, NULL, backedges_varying_p);
5293   else if (gimple_has_volatile_ops (stmt))
5294     changed = defs_to_varying (stmt);
5295   else if (gassign *ass = dyn_cast <gassign *> (stmt))
5296     {
5297       enum tree_code code = gimple_assign_rhs_code (ass);
5298       tree lhs = gimple_assign_lhs (ass);
5299       tree rhs1 = gimple_assign_rhs1 (ass);
5300       tree simplified;
5301 
5302       /* Shortcut for copies. Simplifying copies is pointless,
5303 	 since we copy the expression and value they represent.  */
5304       if (code == SSA_NAME
5305 	  && TREE_CODE (lhs) == SSA_NAME)
5306 	{
5307 	  changed = visit_copy (lhs, rhs1);
5308 	  goto done;
5309 	}
5310       simplified = try_to_simplify (ass);
5311       if (simplified)
5312 	{
5313 	  if (dump_file && (dump_flags & TDF_DETAILS))
5314 	    {
5315 	      fprintf (dump_file, "RHS ");
5316 	      print_gimple_expr (dump_file, ass, 0);
5317 	      fprintf (dump_file, " simplified to ");
5318 	      print_generic_expr (dump_file, simplified);
5319 	      fprintf (dump_file, "\n");
5320 	    }
5321 	}
5322       /* Setting value numbers to constants will occasionally
5323 	 screw up phi congruence because constants are not
5324 	 uniquely associated with a single ssa name that can be
5325 	 looked up.  */
5326       if (simplified
5327 	  && is_gimple_min_invariant (simplified)
5328 	  && TREE_CODE (lhs) == SSA_NAME)
5329 	{
5330 	  changed = set_ssa_val_to (lhs, simplified);
5331 	  goto done;
5332 	}
5333       else if (simplified
5334 	       && TREE_CODE (simplified) == SSA_NAME
5335 	       && TREE_CODE (lhs) == SSA_NAME)
5336 	{
5337 	  changed = visit_copy (lhs, simplified);
5338 	  goto done;
5339 	}
5340 
5341       if ((TREE_CODE (lhs) == SSA_NAME
5342 	   /* We can substitute SSA_NAMEs that are live over
5343 	      abnormal edges with their constant value.  */
5344 	   && !(gimple_assign_copy_p (ass)
5345 		&& is_gimple_min_invariant (rhs1))
5346 	   && !(simplified
5347 		&& is_gimple_min_invariant (simplified))
5348 	   && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
5349 	  /* Stores or copies from SSA_NAMEs that are live over
5350 	     abnormal edges are a problem.  */
5351 	  || (code == SSA_NAME
5352 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
5353 	changed = defs_to_varying (ass);
5354       else if (REFERENCE_CLASS_P (lhs)
5355 	       || DECL_P (lhs))
5356 	changed = visit_reference_op_store (lhs, rhs1, ass);
5357       else if (TREE_CODE (lhs) == SSA_NAME)
5358 	{
5359 	  if ((gimple_assign_copy_p (ass)
5360 	       && is_gimple_min_invariant (rhs1))
5361 	      || (simplified
5362 		  && is_gimple_min_invariant (simplified)))
5363 	    {
5364 	      if (simplified)
5365 		changed = set_ssa_val_to (lhs, simplified);
5366 	      else
5367 		changed = set_ssa_val_to (lhs, rhs1);
5368 	    }
5369 	  else
5370 	    {
5371 	      /* Visit the original statement.  */
5372 	      switch (vn_get_stmt_kind (ass))
5373 		{
5374 		case VN_NARY:
5375 		  changed = visit_nary_op (lhs, ass);
5376 		  break;
5377 		case VN_REFERENCE:
5378 		  changed = visit_reference_op_load (lhs, rhs1, ass);
5379 		  break;
5380 		default:
5381 		  changed = defs_to_varying (ass);
5382 		  break;
5383 		}
5384 	    }
5385 	}
5386       else
5387 	changed = defs_to_varying (ass);
5388     }
5389   else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5390     {
5391       tree lhs = gimple_call_lhs (call_stmt);
5392       if (lhs && TREE_CODE (lhs) == SSA_NAME)
5393 	{
5394 	  /* Try constant folding based on our current lattice.  */
5395 	  tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
5396 							    vn_valueize);
5397 	  if (simplified)
5398 	    {
5399 	      if (dump_file && (dump_flags & TDF_DETAILS))
5400 		{
5401 		  fprintf (dump_file, "call ");
5402 		  print_gimple_expr (dump_file, call_stmt, 0);
5403 		  fprintf (dump_file, " simplified to ");
5404 		  print_generic_expr (dump_file, simplified);
5405 		  fprintf (dump_file, "\n");
5406 		}
5407 	    }
5408 	  /* Setting value numbers to constants will occasionally
5409 	     screw up phi congruence because constants are not
5410 	     uniquely associated with a single ssa name that can be
5411 	     looked up.  */
5412 	  if (simplified
5413 	      && is_gimple_min_invariant (simplified))
5414 	    {
5415 	      changed = set_ssa_val_to (lhs, simplified);
5416 	      if (gimple_vdef (call_stmt))
5417 		changed |= set_ssa_val_to (gimple_vdef (call_stmt),
5418 					   SSA_VAL (gimple_vuse (call_stmt)));
5419 	      goto done;
5420 	    }
5421 	  else if (simplified
5422 		   && TREE_CODE (simplified) == SSA_NAME)
5423 	    {
5424 	      changed = visit_copy (lhs, simplified);
5425 	      if (gimple_vdef (call_stmt))
5426 		changed |= set_ssa_val_to (gimple_vdef (call_stmt),
5427 					   SSA_VAL (gimple_vuse (call_stmt)));
5428 	      goto done;
5429 	    }
5430 	  else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
5431 	    {
5432 	      changed = defs_to_varying (call_stmt);
5433 	      goto done;
5434 	    }
5435 	}
5436 
5437       /* Pick up flags from a devirtualization target.  */
5438       tree fn = gimple_call_fn (stmt);
5439       int extra_fnflags = 0;
5440       if (fn && TREE_CODE (fn) == SSA_NAME)
5441 	{
5442 	  fn = SSA_VAL (fn);
5443 	  if (TREE_CODE (fn) == ADDR_EXPR
5444 	      && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
5445 	    extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
5446 	}
5447       if (!gimple_call_internal_p (call_stmt)
5448 	  && (/* Calls to the same function with the same vuse
5449 		 and the same operands do not necessarily return the same
5450 		 value, unless they're pure or const.  */
5451 	      ((gimple_call_flags (call_stmt) | extra_fnflags)
5452 	       & (ECF_PURE | ECF_CONST))
5453 	      /* If calls have a vdef, subsequent calls won't have
5454 		 the same incoming vuse.  So, if 2 calls with vdef have the
5455 		 same vuse, we know they're not subsequent.
5456 		 We can value number 2 calls to the same function with the
5457 		 same vuse and the same operands which are not subsequent
5458 		 the same, because there is no code in the program that can
5459 		 compare the 2 values...  */
5460 	      || (gimple_vdef (call_stmt)
5461 		  /* ... unless the call returns a pointer which does
5462 		     not alias with anything else.  In which case the
5463 		     information that the values are distinct are encoded
5464 		     in the IL.  */
5465 		  && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
5466 		  /* Only perform the following when being called from PRE
5467 		     which embeds tail merging.  */
5468 		  && default_vn_walk_kind == VN_WALK)))
5469 	changed = visit_reference_op_call (lhs, call_stmt);
5470       else
5471 	changed = defs_to_varying (call_stmt);
5472     }
5473   else
5474     changed = defs_to_varying (stmt);
5475  done:
5476   return changed;
5477 }
5478 
5479 
5480 /* Allocate a value number table.  */
5481 
5482 static void
allocate_vn_table(vn_tables_t table,unsigned size)5483 allocate_vn_table (vn_tables_t table, unsigned size)
5484 {
5485   table->phis = new vn_phi_table_type (size);
5486   table->nary = new vn_nary_op_table_type (size);
5487   table->references = new vn_reference_table_type (size);
5488 }
5489 
5490 /* Free a value number table.  */
5491 
5492 static void
free_vn_table(vn_tables_t table)5493 free_vn_table (vn_tables_t table)
5494 {
5495   /* Walk over elements and release vectors.  */
5496   vn_reference_iterator_type hir;
5497   vn_reference_t vr;
5498   FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)
5499     vr->operands.release ();
5500   delete table->phis;
5501   table->phis = NULL;
5502   delete table->nary;
5503   table->nary = NULL;
5504   delete table->references;
5505   table->references = NULL;
5506 }
5507 
5508 /* Set *ID according to RESULT.  */
5509 
5510 static void
set_value_id_for_result(tree result,unsigned int * id)5511 set_value_id_for_result (tree result, unsigned int *id)
5512 {
5513   if (result && TREE_CODE (result) == SSA_NAME)
5514     *id = VN_INFO (result)->value_id;
5515   else if (result && is_gimple_min_invariant (result))
5516     *id = get_or_alloc_constant_value_id (result);
5517   else
5518     *id = get_next_value_id ();
5519 }
5520 
5521 /* Set the value ids in the valid hash tables.  */
5522 
5523 static void
set_hashtable_value_ids(void)5524 set_hashtable_value_ids (void)
5525 {
5526   vn_nary_op_iterator_type hin;
5527   vn_phi_iterator_type hip;
5528   vn_reference_iterator_type hir;
5529   vn_nary_op_t vno;
5530   vn_reference_t vr;
5531   vn_phi_t vp;
5532 
5533   /* Now set the value ids of the things we had put in the hash
5534      table.  */
5535 
5536   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
5537     if (! vno->predicated_values)
5538       set_value_id_for_result (vno->u.result, &vno->value_id);
5539 
5540   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
5541     set_value_id_for_result (vp->result, &vp->value_id);
5542 
5543   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
5544 			       hir)
5545     set_value_id_for_result (vr->result, &vr->value_id);
5546 }
5547 
5548 /* Return the maximum value id we have ever seen.  */
5549 
5550 unsigned int
get_max_value_id(void)5551 get_max_value_id (void)
5552 {
5553   return next_value_id;
5554 }
5555 
5556 /* Return the next unique value id.  */
5557 
5558 unsigned int
get_next_value_id(void)5559 get_next_value_id (void)
5560 {
5561   return next_value_id++;
5562 }
5563 
5564 
5565 /* Compare two expressions E1 and E2 and return true if they are equal.  */
5566 
5567 bool
expressions_equal_p(tree e1,tree e2)5568 expressions_equal_p (tree e1, tree e2)
5569 {
5570   /* The obvious case.  */
5571   if (e1 == e2)
5572     return true;
5573 
5574   /* If either one is VN_TOP consider them equal.  */
5575   if (e1 == VN_TOP || e2 == VN_TOP)
5576     return true;
5577 
5578   /* If only one of them is null, they cannot be equal.  */
5579   if (!e1 || !e2)
5580     return false;
5581 
5582   /* Now perform the actual comparison.  */
5583   if (TREE_CODE (e1) == TREE_CODE (e2)
5584       && operand_equal_p (e1, e2, OEP_PURE_SAME))
5585     return true;
5586 
5587   return false;
5588 }
5589 
5590 
5591 /* Return true if the nary operation NARY may trap.  This is a copy
5592    of stmt_could_throw_1_p adjusted to the SCCVN IL.  */
5593 
5594 bool
vn_nary_may_trap(vn_nary_op_t nary)5595 vn_nary_may_trap (vn_nary_op_t nary)
5596 {
5597   tree type;
5598   tree rhs2 = NULL_TREE;
5599   bool honor_nans = false;
5600   bool honor_snans = false;
5601   bool fp_operation = false;
5602   bool honor_trapv = false;
5603   bool handled, ret;
5604   unsigned i;
5605 
5606   if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5607       || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5608       || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5609     {
5610       type = nary->type;
5611       fp_operation = FLOAT_TYPE_P (type);
5612       if (fp_operation)
5613 	{
5614 	  honor_nans = flag_trapping_math && !flag_finite_math_only;
5615 	  honor_snans = flag_signaling_nans != 0;
5616 	}
5617       else if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_TRAPS (type))
5618 	honor_trapv = true;
5619     }
5620   if (nary->length >= 2)
5621     rhs2 = nary->op[1];
5622   ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5623 				       honor_trapv, honor_nans, honor_snans,
5624 				       rhs2, &handled);
5625   if (handled && ret)
5626     return true;
5627 
5628   for (i = 0; i < nary->length; ++i)
5629     if (tree_could_trap_p (nary->op[i]))
5630       return true;
5631 
5632   return false;
5633 }
5634 
5635 /* Return true if the reference operation REF may trap.  */
5636 
5637 bool
vn_reference_may_trap(vn_reference_t ref)5638 vn_reference_may_trap (vn_reference_t ref)
5639 {
5640   switch (ref->operands[0].opcode)
5641     {
5642     case MODIFY_EXPR:
5643     case CALL_EXPR:
5644       /* We do not handle calls.  */
5645     case ADDR_EXPR:
5646       /* And toplevel address computations never trap.  */
5647       return false;
5648     default:;
5649     }
5650 
5651   vn_reference_op_t op;
5652   unsigned i;
5653   FOR_EACH_VEC_ELT (ref->operands, i, op)
5654     {
5655       switch (op->opcode)
5656 	{
5657 	case WITH_SIZE_EXPR:
5658 	case TARGET_MEM_REF:
5659 	  /* Always variable.  */
5660 	  return true;
5661 	case COMPONENT_REF:
5662 	  if (op->op1 && TREE_CODE (op->op1) == SSA_NAME)
5663 	    return true;
5664 	  break;
5665 	case ARRAY_RANGE_REF:
5666 	case ARRAY_REF:
5667 	  if (TREE_CODE (op->op0) == SSA_NAME)
5668 	    return true;
5669 	  break;
5670 	case MEM_REF:
5671 	  /* Nothing interesting in itself, the base is separate.  */
5672 	  break;
5673 	/* The following are the address bases.  */
5674 	case SSA_NAME:
5675 	  return true;
5676 	case ADDR_EXPR:
5677 	  if (op->op0)
5678 	    return tree_could_trap_p (TREE_OPERAND (op->op0, 0));
5679 	  return false;
5680 	default:;
5681 	}
5682     }
5683   return false;
5684 }
5685 
eliminate_dom_walker(cdi_direction direction,bitmap inserted_exprs_)5686 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5687 					    bitmap inserted_exprs_)
5688   : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5689     el_todo (0), eliminations (0), insertions (0),
5690     inserted_exprs (inserted_exprs_)
5691 {
5692   need_eh_cleanup = BITMAP_ALLOC (NULL);
5693   need_ab_cleanup = BITMAP_ALLOC (NULL);
5694 }
5695 
~eliminate_dom_walker()5696 eliminate_dom_walker::~eliminate_dom_walker ()
5697 {
5698   BITMAP_FREE (need_eh_cleanup);
5699   BITMAP_FREE (need_ab_cleanup);
5700 }
5701 
5702 /* Return a leader for OP that is available at the current point of the
5703    eliminate domwalk.  */
5704 
5705 tree
eliminate_avail(basic_block,tree op)5706 eliminate_dom_walker::eliminate_avail (basic_block, tree op)
5707 {
5708   tree valnum = VN_INFO (op)->valnum;
5709   if (TREE_CODE (valnum) == SSA_NAME)
5710     {
5711       if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5712 	return valnum;
5713       if (avail.length () > SSA_NAME_VERSION (valnum))
5714 	return avail[SSA_NAME_VERSION (valnum)];
5715     }
5716   else if (is_gimple_min_invariant (valnum))
5717     return valnum;
5718   return NULL_TREE;
5719 }
5720 
5721 /* At the current point of the eliminate domwalk make OP available.  */
5722 
5723 void
eliminate_push_avail(basic_block,tree op)5724 eliminate_dom_walker::eliminate_push_avail (basic_block, tree op)
5725 {
5726   tree valnum = VN_INFO (op)->valnum;
5727   if (TREE_CODE (valnum) == SSA_NAME)
5728     {
5729       if (avail.length () <= SSA_NAME_VERSION (valnum))
5730 	avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
5731       tree pushop = op;
5732       if (avail[SSA_NAME_VERSION (valnum)])
5733 	pushop = avail[SSA_NAME_VERSION (valnum)];
5734       avail_stack.safe_push (pushop);
5735       avail[SSA_NAME_VERSION (valnum)] = op;
5736     }
5737 }
5738 
5739 /* Insert the expression recorded by SCCVN for VAL at *GSI.  Returns
5740    the leader for the expression if insertion was successful.  */
5741 
5742 tree
eliminate_insert(basic_block bb,gimple_stmt_iterator * gsi,tree val)5743 eliminate_dom_walker::eliminate_insert (basic_block bb,
5744 					gimple_stmt_iterator *gsi, tree val)
5745 {
5746   /* We can insert a sequence with a single assignment only.  */
5747   gimple_seq stmts = VN_INFO (val)->expr;
5748   if (!gimple_seq_singleton_p (stmts))
5749     return NULL_TREE;
5750   gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5751   if (!stmt
5752       || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5753 	  && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5754 	  && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5755 	  && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5756 	      || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5757     return NULL_TREE;
5758 
5759   tree op = gimple_assign_rhs1 (stmt);
5760   if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5761       || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5762     op = TREE_OPERAND (op, 0);
5763   tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (bb, op) : op;
5764   if (!leader)
5765     return NULL_TREE;
5766 
5767   tree res;
5768   stmts = NULL;
5769   if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5770     res = gimple_build (&stmts, BIT_FIELD_REF,
5771 			TREE_TYPE (val), leader,
5772 			TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5773 			TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5774   else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5775     res = gimple_build (&stmts, BIT_AND_EXPR,
5776 			TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5777   else
5778     res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5779 			TREE_TYPE (val), leader);
5780   if (TREE_CODE (res) != SSA_NAME
5781       || SSA_NAME_IS_DEFAULT_DEF (res)
5782       || gimple_bb (SSA_NAME_DEF_STMT (res)))
5783     {
5784       gimple_seq_discard (stmts);
5785 
5786       /* During propagation we have to treat SSA info conservatively
5787          and thus we can end up simplifying the inserted expression
5788 	 at elimination time to sth not defined in stmts.  */
5789       /* But then this is a redundancy we failed to detect.  Which means
5790          res now has two values.  That doesn't play well with how
5791 	 we track availability here, so give up.  */
5792       if (dump_file && (dump_flags & TDF_DETAILS))
5793 	{
5794 	  if (TREE_CODE (res) == SSA_NAME)
5795 	    res = eliminate_avail (bb, res);
5796 	  if (res)
5797 	    {
5798 	      fprintf (dump_file, "Failed to insert expression for value ");
5799 	      print_generic_expr (dump_file, val);
5800 	      fprintf (dump_file, " which is really fully redundant to ");
5801 	      print_generic_expr (dump_file, res);
5802 	      fprintf (dump_file, "\n");
5803 	    }
5804 	}
5805 
5806       return NULL_TREE;
5807     }
5808   else
5809     {
5810       gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
5811       VN_INFO (res)->valnum = val;
5812       VN_INFO (res)->visited = true;
5813     }
5814 
5815   insertions++;
5816   if (dump_file && (dump_flags & TDF_DETAILS))
5817     {
5818       fprintf (dump_file, "Inserted ");
5819       print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
5820     }
5821 
5822   return res;
5823 }
5824 
5825 void
eliminate_stmt(basic_block b,gimple_stmt_iterator * gsi)5826 eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
5827 {
5828   tree sprime = NULL_TREE;
5829   gimple *stmt = gsi_stmt (*gsi);
5830   tree lhs = gimple_get_lhs (stmt);
5831   if (lhs && TREE_CODE (lhs) == SSA_NAME
5832       && !gimple_has_volatile_ops (stmt)
5833       /* See PR43491.  Do not replace a global register variable when
5834 	 it is a the RHS of an assignment.  Do replace local register
5835 	 variables since gcc does not guarantee a local variable will
5836 	 be allocated in register.
5837 	 ???  The fix isn't effective here.  This should instead
5838 	 be ensured by not value-numbering them the same but treating
5839 	 them like volatiles?  */
5840       && !(gimple_assign_single_p (stmt)
5841 	   && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
5842 	       && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
5843 	       && is_global_var (gimple_assign_rhs1 (stmt)))))
5844     {
5845       sprime = eliminate_avail (b, lhs);
5846       if (!sprime)
5847 	{
5848 	  /* If there is no existing usable leader but SCCVN thinks
5849 	     it has an expression it wants to use as replacement,
5850 	     insert that.  */
5851 	  tree val = VN_INFO (lhs)->valnum;
5852 	  if (val != VN_TOP
5853 	      && TREE_CODE (val) == SSA_NAME
5854 	      && VN_INFO (val)->needs_insertion
5855 	      && VN_INFO (val)->expr != NULL
5856 	      && (sprime = eliminate_insert (b, gsi, val)) != NULL_TREE)
5857 	    eliminate_push_avail (b, sprime);
5858 	}
5859 
5860       /* If this now constitutes a copy duplicate points-to
5861 	 and range info appropriately.  This is especially
5862 	 important for inserted code.  See tree-ssa-copy.c
5863 	 for similar code.  */
5864       if (sprime
5865 	  && TREE_CODE (sprime) == SSA_NAME)
5866 	{
5867 	  basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5868 	  if (POINTER_TYPE_P (TREE_TYPE (lhs))
5869 	      && SSA_NAME_PTR_INFO (lhs)
5870 	      && ! SSA_NAME_PTR_INFO (sprime))
5871 	    {
5872 	      duplicate_ssa_name_ptr_info (sprime,
5873 					   SSA_NAME_PTR_INFO (lhs));
5874 	      if (b != sprime_b)
5875 		reset_flow_sensitive_info (sprime);
5876 	    }
5877 	  else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5878 		   && SSA_NAME_RANGE_INFO (lhs)
5879 		   && ! SSA_NAME_RANGE_INFO (sprime)
5880 		   && b == sprime_b)
5881 	    duplicate_ssa_name_range_info (sprime,
5882 					   SSA_NAME_RANGE_TYPE (lhs),
5883 					   SSA_NAME_RANGE_INFO (lhs));
5884 	}
5885 
5886       /* Inhibit the use of an inserted PHI on a loop header when
5887 	 the address of the memory reference is a simple induction
5888 	 variable.  In other cases the vectorizer won't do anything
5889 	 anyway (either it's loop invariant or a complicated
5890 	 expression).  */
5891       if (sprime
5892 	  && TREE_CODE (sprime) == SSA_NAME
5893 	  && do_pre
5894 	  && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5895 	  && loop_outer (b->loop_father)
5896 	  && has_zero_uses (sprime)
5897 	  && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5898 	  && gimple_assign_load_p (stmt))
5899 	{
5900 	  gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5901 	  basic_block def_bb = gimple_bb (def_stmt);
5902 	  if (gimple_code (def_stmt) == GIMPLE_PHI
5903 	      && def_bb->loop_father->header == def_bb)
5904 	    {
5905 	      loop_p loop = def_bb->loop_father;
5906 	      ssa_op_iter iter;
5907 	      tree op;
5908 	      bool found = false;
5909 	      FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5910 		{
5911 		  affine_iv iv;
5912 		  def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5913 		  if (def_bb
5914 		      && flow_bb_inside_loop_p (loop, def_bb)
5915 		      && simple_iv (loop, loop, op, &iv, true))
5916 		    {
5917 		      found = true;
5918 		      break;
5919 		    }
5920 		}
5921 	      if (found)
5922 		{
5923 		  if (dump_file && (dump_flags & TDF_DETAILS))
5924 		    {
5925 		      fprintf (dump_file, "Not replacing ");
5926 		      print_gimple_expr (dump_file, stmt, 0);
5927 		      fprintf (dump_file, " with ");
5928 		      print_generic_expr (dump_file, sprime);
5929 		      fprintf (dump_file, " which would add a loop"
5930 			       " carried dependence to loop %d\n",
5931 			       loop->num);
5932 		    }
5933 		  /* Don't keep sprime available.  */
5934 		  sprime = NULL_TREE;
5935 		}
5936 	    }
5937 	}
5938 
5939       if (sprime)
5940 	{
5941 	  /* If we can propagate the value computed for LHS into
5942 	     all uses don't bother doing anything with this stmt.  */
5943 	  if (may_propagate_copy (lhs, sprime))
5944 	    {
5945 	      /* Mark it for removal.  */
5946 	      to_remove.safe_push (stmt);
5947 
5948 	      /* ???  Don't count copy/constant propagations.  */
5949 	      if (gimple_assign_single_p (stmt)
5950 		  && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5951 		      || gimple_assign_rhs1 (stmt) == sprime))
5952 		return;
5953 
5954 	      if (dump_file && (dump_flags & TDF_DETAILS))
5955 		{
5956 		  fprintf (dump_file, "Replaced ");
5957 		  print_gimple_expr (dump_file, stmt, 0);
5958 		  fprintf (dump_file, " with ");
5959 		  print_generic_expr (dump_file, sprime);
5960 		  fprintf (dump_file, " in all uses of ");
5961 		  print_gimple_stmt (dump_file, stmt, 0);
5962 		}
5963 
5964 	      eliminations++;
5965 	      return;
5966 	    }
5967 
5968 	  /* If this is an assignment from our leader (which
5969 	     happens in the case the value-number is a constant)
5970 	     then there is nothing to do.  Likewise if we run into
5971 	     inserted code that needed a conversion because of
5972 	     our type-agnostic value-numbering of loads.  */
5973 	  if ((gimple_assign_single_p (stmt)
5974 	       || (is_gimple_assign (stmt)
5975 		   && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5976 		       || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)))
5977 	      && sprime == gimple_assign_rhs1 (stmt))
5978 	    return;
5979 
5980 	  /* Else replace its RHS.  */
5981 	  if (dump_file && (dump_flags & TDF_DETAILS))
5982 	    {
5983 	      fprintf (dump_file, "Replaced ");
5984 	      print_gimple_expr (dump_file, stmt, 0);
5985 	      fprintf (dump_file, " with ");
5986 	      print_generic_expr (dump_file, sprime);
5987 	      fprintf (dump_file, " in ");
5988 	      print_gimple_stmt (dump_file, stmt, 0);
5989 	    }
5990 	  eliminations++;
5991 
5992 	  bool can_make_abnormal_goto = (is_gimple_call (stmt)
5993 					 && stmt_can_make_abnormal_goto (stmt));
5994 	  gimple *orig_stmt = stmt;
5995 	  if (!useless_type_conversion_p (TREE_TYPE (lhs),
5996 					  TREE_TYPE (sprime)))
5997 	    {
5998 	      /* We preserve conversions to but not from function or method
5999 		 types.  This asymmetry makes it necessary to re-instantiate
6000 		 conversions here.  */
6001 	      if (POINTER_TYPE_P (TREE_TYPE (lhs))
6002 		  && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs))))
6003 		sprime = fold_convert (TREE_TYPE (lhs), sprime);
6004 	      else
6005 		gcc_unreachable ();
6006 	    }
6007 	  tree vdef = gimple_vdef (stmt);
6008 	  tree vuse = gimple_vuse (stmt);
6009 	  propagate_tree_value_into_stmt (gsi, sprime);
6010 	  stmt = gsi_stmt (*gsi);
6011 	  update_stmt (stmt);
6012 	  /* In case the VDEF on the original stmt was released, value-number
6013 	     it to the VUSE.  This is to make vuse_ssa_val able to skip
6014 	     released virtual operands.  */
6015 	  if (vdef != gimple_vdef (stmt))
6016 	    {
6017 	      gcc_assert (SSA_NAME_IN_FREE_LIST (vdef));
6018 	      VN_INFO (vdef)->valnum = vuse;
6019 	    }
6020 
6021 	  /* If we removed EH side-effects from the statement, clean
6022 	     its EH information.  */
6023 	  if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
6024 	    {
6025 	      bitmap_set_bit (need_eh_cleanup,
6026 			      gimple_bb (stmt)->index);
6027 	      if (dump_file && (dump_flags & TDF_DETAILS))
6028 		fprintf (dump_file, "  Removed EH side-effects.\n");
6029 	    }
6030 
6031 	  /* Likewise for AB side-effects.  */
6032 	  if (can_make_abnormal_goto
6033 	      && !stmt_can_make_abnormal_goto (stmt))
6034 	    {
6035 	      bitmap_set_bit (need_ab_cleanup,
6036 			      gimple_bb (stmt)->index);
6037 	      if (dump_file && (dump_flags & TDF_DETAILS))
6038 		fprintf (dump_file, "  Removed AB side-effects.\n");
6039 	    }
6040 
6041 	  return;
6042 	}
6043     }
6044 
6045   /* If the statement is a scalar store, see if the expression
6046      has the same value number as its rhs.  If so, the store is
6047      dead.  */
6048   if (gimple_assign_single_p (stmt)
6049       && !gimple_has_volatile_ops (stmt)
6050       && !is_gimple_reg (gimple_assign_lhs (stmt))
6051       && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
6052 	  || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
6053     {
6054       tree rhs = gimple_assign_rhs1 (stmt);
6055       vn_reference_t vnresult;
6056       /* ???  gcc.dg/torture/pr91445.c shows that we lookup a boolean
6057          typed load of a byte known to be 0x11 as 1 so a store of
6058 	 a boolean 1 is detected as redundant.  Because of this we
6059 	 have to make sure to lookup with a ref where its size
6060 	 matches the precision.  */
6061       tree lookup_lhs = lhs;
6062       if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6063 	  && (TREE_CODE (lhs) != COMPONENT_REF
6064 	      || !DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
6065 	  && !type_has_mode_precision_p (TREE_TYPE (lhs)))
6066 	{
6067 	  if (TREE_CODE (lhs) == COMPONENT_REF
6068 	      || TREE_CODE (lhs) == MEM_REF)
6069 	    {
6070 	      tree ltype = build_nonstandard_integer_type
6071 				(TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (lhs))),
6072 				 TYPE_UNSIGNED (TREE_TYPE (lhs)));
6073 	      if (TREE_CODE (lhs) == COMPONENT_REF)
6074 		{
6075 		  tree foff = component_ref_field_offset (lhs);
6076 		  tree f = TREE_OPERAND (lhs, 1);
6077 		  if (!poly_int_tree_p (foff))
6078 		    lookup_lhs = NULL_TREE;
6079 		  else
6080 		    lookup_lhs = build3 (BIT_FIELD_REF, ltype,
6081 					 TREE_OPERAND (lhs, 0),
6082 					 TYPE_SIZE (TREE_TYPE (lhs)),
6083 					 bit_from_pos
6084 					   (foff, DECL_FIELD_BIT_OFFSET (f)));
6085 		}
6086 	      else
6087 		lookup_lhs = build2 (MEM_REF, ltype,
6088 				     TREE_OPERAND (lhs, 0),
6089 				     TREE_OPERAND (lhs, 1));
6090 	    }
6091 	  else
6092 	    lookup_lhs = NULL_TREE;
6093 	}
6094       tree val = NULL_TREE;
6095       if (lookup_lhs)
6096 	val = vn_reference_lookup (lookup_lhs, gimple_vuse (stmt),
6097 				   VN_WALKREWRITE, &vnresult, false);
6098       if (TREE_CODE (rhs) == SSA_NAME)
6099 	rhs = VN_INFO (rhs)->valnum;
6100       if (val
6101 	  && (operand_equal_p (val, rhs, 0)
6102 	      /* Due to the bitfield lookups above we can get bit
6103 		 interpretations of the same RHS as values here.  Those
6104 		 are redundant as well.  */
6105 	      || (TREE_CODE (val) == SSA_NAME
6106 		  && gimple_assign_single_p (SSA_NAME_DEF_STMT (val))
6107 		  && (val = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (val)))
6108 		  && TREE_CODE (val) == VIEW_CONVERT_EXPR
6109 		  && TREE_OPERAND (val, 0) == rhs)))
6110 	{
6111 	  /* We can only remove the later store if the former aliases
6112 	     at least all accesses the later one does or if the store
6113 	     was to readonly memory storing the same value.  */
6114 	  ao_ref lhs_ref;
6115 	  ao_ref_init (&lhs_ref, lhs);
6116 	  alias_set_type set = ao_ref_alias_set (&lhs_ref);
6117 	  alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
6118 	  if (! vnresult
6119 	      || ((vnresult->set == set
6120 		   || alias_set_subset_of (set, vnresult->set))
6121 		  && (vnresult->base_set == base_set
6122 		      || alias_set_subset_of (base_set, vnresult->base_set))))
6123 	    {
6124 	      if (dump_file && (dump_flags & TDF_DETAILS))
6125 		{
6126 		  fprintf (dump_file, "Deleted redundant store ");
6127 		  print_gimple_stmt (dump_file, stmt, 0);
6128 		}
6129 
6130 	      /* Queue stmt for removal.  */
6131 	      to_remove.safe_push (stmt);
6132 	      return;
6133 	    }
6134 	}
6135     }
6136 
6137   /* If this is a control statement value numbering left edges
6138      unexecuted on force the condition in a way consistent with
6139      that.  */
6140   if (gcond *cond = dyn_cast <gcond *> (stmt))
6141     {
6142       if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
6143 	  ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
6144 	{
6145 	  if (dump_file && (dump_flags & TDF_DETAILS))
6146 	    {
6147 	      fprintf (dump_file, "Removing unexecutable edge from ");
6148 	      print_gimple_stmt (dump_file, stmt, 0);
6149 	    }
6150 	  if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
6151 	      == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
6152 	    gimple_cond_make_true (cond);
6153 	  else
6154 	    gimple_cond_make_false (cond);
6155 	  update_stmt (cond);
6156 	  el_todo |= TODO_cleanup_cfg;
6157 	  return;
6158 	}
6159     }
6160 
6161   bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
6162   bool was_noreturn = (is_gimple_call (stmt)
6163 		       && gimple_call_noreturn_p (stmt));
6164   tree vdef = gimple_vdef (stmt);
6165   tree vuse = gimple_vuse (stmt);
6166 
6167   /* If we didn't replace the whole stmt (or propagate the result
6168      into all uses), replace all uses on this stmt with their
6169      leaders.  */
6170   bool modified = false;
6171   use_operand_p use_p;
6172   ssa_op_iter iter;
6173   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
6174     {
6175       tree use = USE_FROM_PTR (use_p);
6176       /* ???  The call code above leaves stmt operands un-updated.  */
6177       if (TREE_CODE (use) != SSA_NAME)
6178 	continue;
6179       tree sprime;
6180       if (SSA_NAME_IS_DEFAULT_DEF (use))
6181 	/* ???  For default defs BB shouldn't matter, but we have to
6182 	   solve the inconsistency between rpo eliminate and
6183 	   dom eliminate avail valueization first.  */
6184 	sprime = eliminate_avail (b, use);
6185       else
6186 	/* Look for sth available at the definition block of the argument.
6187 	   This avoids inconsistencies between availability there which
6188 	   decides if the stmt can be removed and availability at the
6189 	   use site.  The SSA property ensures that things available
6190 	   at the definition are also available at uses.  */
6191 	sprime = eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use)), use);
6192       if (sprime && sprime != use
6193 	  && may_propagate_copy (use, sprime)
6194 	  /* We substitute into debug stmts to avoid excessive
6195 	     debug temporaries created by removed stmts, but we need
6196 	     to avoid doing so for inserted sprimes as we never want
6197 	     to create debug temporaries for them.  */
6198 	  && (!inserted_exprs
6199 	      || TREE_CODE (sprime) != SSA_NAME
6200 	      || !is_gimple_debug (stmt)
6201 	      || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
6202 	{
6203 	  propagate_value (use_p, sprime);
6204 	  modified = true;
6205 	}
6206     }
6207 
6208   /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
6209      into which is a requirement for the IPA devirt machinery.  */
6210   gimple *old_stmt = stmt;
6211   if (modified)
6212     {
6213       /* If a formerly non-invariant ADDR_EXPR is turned into an
6214 	 invariant one it was on a separate stmt.  */
6215       if (gimple_assign_single_p (stmt)
6216 	  && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
6217 	recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
6218       gimple_stmt_iterator prev = *gsi;
6219       gsi_prev (&prev);
6220       if (fold_stmt (gsi))
6221 	{
6222 	  /* fold_stmt may have created new stmts inbetween
6223 	     the previous stmt and the folded stmt.  Mark
6224 	     all defs created there as varying to not confuse
6225 	     the SCCVN machinery as we're using that even during
6226 	     elimination.  */
6227 	  if (gsi_end_p (prev))
6228 	    prev = gsi_start_bb (b);
6229 	  else
6230 	    gsi_next (&prev);
6231 	  if (gsi_stmt (prev) != gsi_stmt (*gsi))
6232 	    do
6233 	      {
6234 		tree def;
6235 		ssa_op_iter dit;
6236 		FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
6237 					   dit, SSA_OP_ALL_DEFS)
6238 		    /* As existing DEFs may move between stmts
6239 		       only process new ones.  */
6240 		    if (! has_VN_INFO (def))
6241 		      {
6242 			VN_INFO (def)->valnum = def;
6243 			VN_INFO (def)->visited = true;
6244 		      }
6245 		if (gsi_stmt (prev) == gsi_stmt (*gsi))
6246 		  break;
6247 		gsi_next (&prev);
6248 	      }
6249 	    while (1);
6250 	}
6251       stmt = gsi_stmt (*gsi);
6252       /* In case we folded the stmt away schedule the NOP for removal.  */
6253       if (gimple_nop_p (stmt))
6254 	to_remove.safe_push (stmt);
6255     }
6256 
6257   /* Visit indirect calls and turn them into direct calls if
6258      possible using the devirtualization machinery.  Do this before
6259      checking for required EH/abnormal/noreturn cleanup as devird
6260      may expose more of those.  */
6261   if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
6262     {
6263       tree fn = gimple_call_fn (call_stmt);
6264       if (fn
6265 	  && flag_devirtualize
6266 	  && virtual_method_call_p (fn))
6267 	{
6268 	  tree otr_type = obj_type_ref_class (fn);
6269 	  unsigned HOST_WIDE_INT otr_tok
6270 	      = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
6271 	  tree instance;
6272 	  ipa_polymorphic_call_context context (current_function_decl,
6273 						fn, stmt, &instance);
6274 	  context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
6275 				    otr_type, stmt, NULL);
6276 	  bool final;
6277 	  vec <cgraph_node *> targets
6278 	      = possible_polymorphic_call_targets (obj_type_ref_class (fn),
6279 						   otr_tok, context, &final);
6280 	  if (dump_file)
6281 	    dump_possible_polymorphic_call_targets (dump_file,
6282 						    obj_type_ref_class (fn),
6283 						    otr_tok, context);
6284 	  if (final && targets.length () <= 1 && dbg_cnt (devirt))
6285 	    {
6286 	      tree fn;
6287 	      if (targets.length () == 1)
6288 		fn = targets[0]->decl;
6289 	      else
6290 		fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6291 	      if (dump_enabled_p ())
6292 		{
6293 		  dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
6294 				   "converting indirect call to "
6295 				   "function %s\n",
6296 				   lang_hooks.decl_printable_name (fn, 2));
6297 		}
6298 	      gimple_call_set_fndecl (call_stmt, fn);
6299 	      /* If changing the call to __builtin_unreachable
6300 		 or similar noreturn function, adjust gimple_call_fntype
6301 		 too.  */
6302 	      if (gimple_call_noreturn_p (call_stmt)
6303 		  && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
6304 		  && TYPE_ARG_TYPES (TREE_TYPE (fn))
6305 		  && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
6306 		      == void_type_node))
6307 		gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
6308 	      maybe_remove_unused_call_args (cfun, call_stmt);
6309 	      modified = true;
6310 	    }
6311 	}
6312     }
6313 
6314   if (modified)
6315     {
6316       /* When changing a call into a noreturn call, cfg cleanup
6317 	 is needed to fix up the noreturn call.  */
6318       if (!was_noreturn
6319 	  && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
6320 	to_fixup.safe_push  (stmt);
6321       /* When changing a condition or switch into one we know what
6322 	 edge will be executed, schedule a cfg cleanup.  */
6323       if ((gimple_code (stmt) == GIMPLE_COND
6324 	   && (gimple_cond_true_p (as_a <gcond *> (stmt))
6325 	       || gimple_cond_false_p (as_a <gcond *> (stmt))))
6326 	  || (gimple_code (stmt) == GIMPLE_SWITCH
6327 	      && TREE_CODE (gimple_switch_index
6328 			    (as_a <gswitch *> (stmt))) == INTEGER_CST))
6329 	el_todo |= TODO_cleanup_cfg;
6330       /* If we removed EH side-effects from the statement, clean
6331 	 its EH information.  */
6332       if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
6333 	{
6334 	  bitmap_set_bit (need_eh_cleanup,
6335 			  gimple_bb (stmt)->index);
6336 	  if (dump_file && (dump_flags & TDF_DETAILS))
6337 	    fprintf (dump_file, "  Removed EH side-effects.\n");
6338 	}
6339       /* Likewise for AB side-effects.  */
6340       if (can_make_abnormal_goto
6341 	  && !stmt_can_make_abnormal_goto (stmt))
6342 	{
6343 	  bitmap_set_bit (need_ab_cleanup,
6344 			  gimple_bb (stmt)->index);
6345 	  if (dump_file && (dump_flags & TDF_DETAILS))
6346 	    fprintf (dump_file, "  Removed AB side-effects.\n");
6347 	}
6348       update_stmt (stmt);
6349       /* In case the VDEF on the original stmt was released, value-number
6350          it to the VUSE.  This is to make vuse_ssa_val able to skip
6351 	 released virtual operands.  */
6352       if (vdef && SSA_NAME_IN_FREE_LIST (vdef))
6353 	VN_INFO (vdef)->valnum = vuse;
6354     }
6355 
6356   /* Make new values available - for fully redundant LHS we
6357      continue with the next stmt above and skip this.  */
6358   def_operand_p defp;
6359   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
6360     eliminate_push_avail (b, DEF_FROM_PTR (defp));
6361 }
6362 
6363 /* Perform elimination for the basic-block B during the domwalk.  */
6364 
6365 edge
before_dom_children(basic_block b)6366 eliminate_dom_walker::before_dom_children (basic_block b)
6367 {
6368   /* Mark new bb.  */
6369   avail_stack.safe_push (NULL_TREE);
6370 
6371   /* Skip unreachable blocks marked unreachable during the SCCVN domwalk.  */
6372   if (!(b->flags & BB_EXECUTABLE))
6373     return NULL;
6374 
6375   vn_context_bb = b;
6376 
6377   for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
6378     {
6379       gphi *phi = gsi.phi ();
6380       tree res = PHI_RESULT (phi);
6381 
6382       if (virtual_operand_p (res))
6383 	{
6384 	  gsi_next (&gsi);
6385 	  continue;
6386 	}
6387 
6388       tree sprime = eliminate_avail (b, res);
6389       if (sprime
6390 	  && sprime != res)
6391 	{
6392 	  if (dump_file && (dump_flags & TDF_DETAILS))
6393 	    {
6394 	      fprintf (dump_file, "Replaced redundant PHI node defining ");
6395 	      print_generic_expr (dump_file, res);
6396 	      fprintf (dump_file, " with ");
6397 	      print_generic_expr (dump_file, sprime);
6398 	      fprintf (dump_file, "\n");
6399 	    }
6400 
6401 	  /* If we inserted this PHI node ourself, it's not an elimination.  */
6402 	  if (! inserted_exprs
6403 	      || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
6404 	    eliminations++;
6405 
6406 	  /* If we will propagate into all uses don't bother to do
6407 	     anything.  */
6408 	  if (may_propagate_copy (res, sprime))
6409 	    {
6410 	      /* Mark the PHI for removal.  */
6411 	      to_remove.safe_push (phi);
6412 	      gsi_next (&gsi);
6413 	      continue;
6414 	    }
6415 
6416 	  remove_phi_node (&gsi, false);
6417 
6418 	  if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
6419 	    sprime = fold_convert (TREE_TYPE (res), sprime);
6420 	  gimple *stmt = gimple_build_assign (res, sprime);
6421 	  gimple_stmt_iterator gsi2 = gsi_after_labels (b);
6422 	  gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
6423 	  continue;
6424 	}
6425 
6426       eliminate_push_avail (b, res);
6427       gsi_next (&gsi);
6428     }
6429 
6430   for (gimple_stmt_iterator gsi = gsi_start_bb (b);
6431        !gsi_end_p (gsi);
6432        gsi_next (&gsi))
6433     eliminate_stmt (b, &gsi);
6434 
6435   /* Replace destination PHI arguments.  */
6436   edge_iterator ei;
6437   edge e;
6438   FOR_EACH_EDGE (e, ei, b->succs)
6439     if (e->flags & EDGE_EXECUTABLE)
6440       for (gphi_iterator gsi = gsi_start_phis (e->dest);
6441 	   !gsi_end_p (gsi);
6442 	   gsi_next (&gsi))
6443 	{
6444 	  gphi *phi = gsi.phi ();
6445 	  use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
6446 	  tree arg = USE_FROM_PTR (use_p);
6447 	  if (TREE_CODE (arg) != SSA_NAME
6448 	      || virtual_operand_p (arg))
6449 	    continue;
6450 	  tree sprime = eliminate_avail (b, arg);
6451 	  if (sprime && may_propagate_copy (arg, sprime))
6452 	    propagate_value (use_p, sprime);
6453 	}
6454 
6455   vn_context_bb = NULL;
6456 
6457   return NULL;
6458 }
6459 
6460 /* Make no longer available leaders no longer available.  */
6461 
6462 void
after_dom_children(basic_block)6463 eliminate_dom_walker::after_dom_children (basic_block)
6464 {
6465   tree entry;
6466   while ((entry = avail_stack.pop ()) != NULL_TREE)
6467     {
6468       tree valnum = VN_INFO (entry)->valnum;
6469       tree old = avail[SSA_NAME_VERSION (valnum)];
6470       if (old == entry)
6471 	avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
6472       else
6473 	avail[SSA_NAME_VERSION (valnum)] = entry;
6474     }
6475 }
6476 
6477 /* Remove queued stmts and perform delayed cleanups.  */
6478 
6479 unsigned
eliminate_cleanup(bool region_p)6480 eliminate_dom_walker::eliminate_cleanup (bool region_p)
6481 {
6482   statistics_counter_event (cfun, "Eliminated", eliminations);
6483   statistics_counter_event (cfun, "Insertions", insertions);
6484 
6485   /* We cannot remove stmts during BB walk, especially not release SSA
6486      names there as this confuses the VN machinery.  The stmts ending
6487      up in to_remove are either stores or simple copies.
6488      Remove stmts in reverse order to make debug stmt creation possible.  */
6489   while (!to_remove.is_empty ())
6490     {
6491       bool do_release_defs = true;
6492       gimple *stmt = to_remove.pop ();
6493 
6494       /* When we are value-numbering a region we do not require exit PHIs to
6495 	 be present so we have to make sure to deal with uses outside of the
6496 	 region of stmts that we thought are eliminated.
6497 	 ??? Note we may be confused by uses in dead regions we didn't run
6498 	 elimination on.  Rather than checking individual uses we accept
6499 	 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
6500 	 contains such example).  */
6501       if (region_p)
6502 	{
6503 	  if (gphi *phi = dyn_cast <gphi *> (stmt))
6504 	    {
6505 	      tree lhs = gimple_phi_result (phi);
6506 	      if (!has_zero_uses (lhs))
6507 		{
6508 		  if (dump_file && (dump_flags & TDF_DETAILS))
6509 		    fprintf (dump_file, "Keeping eliminated stmt live "
6510 			     "as copy because of out-of-region uses\n");
6511 		  tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
6512 		  gimple *copy = gimple_build_assign (lhs, sprime);
6513 		  gimple_stmt_iterator gsi
6514 		    = gsi_after_labels (gimple_bb (stmt));
6515 		  gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
6516 		  do_release_defs = false;
6517 		}
6518 	    }
6519 	  else if (tree lhs = gimple_get_lhs (stmt))
6520 	    if (TREE_CODE (lhs) == SSA_NAME
6521 		&& !has_zero_uses (lhs))
6522 	      {
6523 		if (dump_file && (dump_flags & TDF_DETAILS))
6524 		  fprintf (dump_file, "Keeping eliminated stmt live "
6525 			   "as copy because of out-of-region uses\n");
6526 		tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
6527 		gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
6528 		if (is_gimple_assign (stmt))
6529 		  {
6530 		    gimple_assign_set_rhs_from_tree (&gsi, sprime);
6531 		    stmt = gsi_stmt (gsi);
6532 		    update_stmt (stmt);
6533 		    if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
6534 		      bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
6535 		    continue;
6536 		  }
6537 		else
6538 		  {
6539 		    gimple *copy = gimple_build_assign (lhs, sprime);
6540 		    gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
6541 		    do_release_defs = false;
6542 		  }
6543 	      }
6544 	}
6545 
6546       if (dump_file && (dump_flags & TDF_DETAILS))
6547 	{
6548 	  fprintf (dump_file, "Removing dead stmt ");
6549 	  print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
6550 	}
6551 
6552       gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
6553       if (gimple_code (stmt) == GIMPLE_PHI)
6554 	remove_phi_node (&gsi, do_release_defs);
6555       else
6556 	{
6557 	  basic_block bb = gimple_bb (stmt);
6558 	  unlink_stmt_vdef (stmt);
6559 	  if (gsi_remove (&gsi, true))
6560 	    bitmap_set_bit (need_eh_cleanup, bb->index);
6561 	  if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
6562 	    bitmap_set_bit (need_ab_cleanup, bb->index);
6563 	  if (do_release_defs)
6564 	    release_defs (stmt);
6565 	}
6566 
6567       /* Removing a stmt may expose a forwarder block.  */
6568       el_todo |= TODO_cleanup_cfg;
6569     }
6570 
6571   /* Fixup stmts that became noreturn calls.  This may require splitting
6572      blocks and thus isn't possible during the dominator walk.  Do this
6573      in reverse order so we don't inadvertedly remove a stmt we want to
6574      fixup by visiting a dominating now noreturn call first.  */
6575   while (!to_fixup.is_empty ())
6576     {
6577       gimple *stmt = to_fixup.pop ();
6578 
6579       if (dump_file && (dump_flags & TDF_DETAILS))
6580 	{
6581 	  fprintf (dump_file, "Fixing up noreturn call ");
6582 	  print_gimple_stmt (dump_file, stmt, 0);
6583 	}
6584 
6585       if (fixup_noreturn_call (stmt))
6586 	el_todo |= TODO_cleanup_cfg;
6587     }
6588 
6589   bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
6590   bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
6591 
6592   if (do_eh_cleanup)
6593     gimple_purge_all_dead_eh_edges (need_eh_cleanup);
6594 
6595   if (do_ab_cleanup)
6596     gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
6597 
6598   if (do_eh_cleanup || do_ab_cleanup)
6599     el_todo |= TODO_cleanup_cfg;
6600 
6601   return el_todo;
6602 }
6603 
6604 /* Eliminate fully redundant computations.  */
6605 
6606 unsigned
eliminate_with_rpo_vn(bitmap inserted_exprs)6607 eliminate_with_rpo_vn (bitmap inserted_exprs)
6608 {
6609   eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
6610 
6611   walker.walk (cfun->cfg->x_entry_block_ptr);
6612   return walker.eliminate_cleanup ();
6613 }
6614 
6615 static unsigned
6616 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
6617 	   bool iterate, bool eliminate);
6618 
6619 void
run_rpo_vn(vn_lookup_kind kind)6620 run_rpo_vn (vn_lookup_kind kind)
6621 {
6622   default_vn_walk_kind = kind;
6623   do_rpo_vn (cfun, NULL, NULL, true, false);
6624 
6625   /* ???  Prune requirement of these.  */
6626   constant_to_value_id = new hash_table<vn_constant_hasher> (23);
6627   constant_value_ids = BITMAP_ALLOC (NULL);
6628 
6629   /* Initialize the value ids and prune out remaining VN_TOPs
6630      from dead code.  */
6631   tree name;
6632   unsigned i;
6633   FOR_EACH_SSA_NAME (i, name, cfun)
6634     {
6635       vn_ssa_aux_t info = VN_INFO (name);
6636       if (!info->visited
6637 	  || info->valnum == VN_TOP)
6638 	info->valnum = name;
6639       if (info->valnum == name)
6640 	info->value_id = get_next_value_id ();
6641       else if (is_gimple_min_invariant (info->valnum))
6642 	info->value_id = get_or_alloc_constant_value_id (info->valnum);
6643     }
6644 
6645   /* Propagate.  */
6646   FOR_EACH_SSA_NAME (i, name, cfun)
6647     {
6648       vn_ssa_aux_t info = VN_INFO (name);
6649       if (TREE_CODE (info->valnum) == SSA_NAME
6650 	  && info->valnum != name
6651 	  && info->value_id != VN_INFO (info->valnum)->value_id)
6652 	info->value_id = VN_INFO (info->valnum)->value_id;
6653     }
6654 
6655   set_hashtable_value_ids ();
6656 
6657   if (dump_file && (dump_flags & TDF_DETAILS))
6658     {
6659       fprintf (dump_file, "Value numbers:\n");
6660       FOR_EACH_SSA_NAME (i, name, cfun)
6661 	{
6662 	  if (VN_INFO (name)->visited
6663 	      && SSA_VAL (name) != name)
6664 	    {
6665 	      print_generic_expr (dump_file, name);
6666 	      fprintf (dump_file, " = ");
6667 	      print_generic_expr (dump_file, SSA_VAL (name));
6668 	      fprintf (dump_file, " (%04d)\n", VN_INFO (name)->value_id);
6669 	    }
6670 	}
6671     }
6672 }
6673 
6674 /* Free VN associated data structures.  */
6675 
6676 void
free_rpo_vn(void)6677 free_rpo_vn (void)
6678 {
6679   free_vn_table (valid_info);
6680   XDELETE (valid_info);
6681   obstack_free (&vn_tables_obstack, NULL);
6682   obstack_free (&vn_tables_insert_obstack, NULL);
6683 
6684   vn_ssa_aux_iterator_type it;
6685   vn_ssa_aux_t info;
6686   FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash, info, vn_ssa_aux_t, it)
6687     if (info->needs_insertion)
6688       release_ssa_name (info->name);
6689   obstack_free (&vn_ssa_aux_obstack, NULL);
6690   delete vn_ssa_aux_hash;
6691 
6692   delete constant_to_value_id;
6693   constant_to_value_id = NULL;
6694   BITMAP_FREE (constant_value_ids);
6695 }
6696 
6697 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables.  */
6698 
6699 static tree
vn_lookup_simplify_result(gimple_match_op * res_op)6700 vn_lookup_simplify_result (gimple_match_op *res_op)
6701 {
6702   if (!res_op->code.is_tree_code ())
6703     return NULL_TREE;
6704   tree *ops = res_op->ops;
6705   unsigned int length = res_op->num_ops;
6706   if (res_op->code == CONSTRUCTOR
6707       /* ???  We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
6708          and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree.  */
6709       && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
6710     {
6711       length = CONSTRUCTOR_NELTS (res_op->ops[0]);
6712       ops = XALLOCAVEC (tree, length);
6713       for (unsigned i = 0; i < length; ++i)
6714 	ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
6715     }
6716   vn_nary_op_t vnresult = NULL;
6717   tree res = vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
6718 				       res_op->type, ops, &vnresult);
6719   /* If this is used from expression simplification make sure to
6720      return an available expression.  */
6721   if (res && TREE_CODE (res) == SSA_NAME && mprts_hook && rpo_avail)
6722     res = rpo_avail->eliminate_avail (vn_context_bb, res);
6723   return res;
6724 }
6725 
6726 /* Return a leader for OPs value that is valid at BB.  */
6727 
6728 tree
eliminate_avail(basic_block bb,tree op)6729 rpo_elim::eliminate_avail (basic_block bb, tree op)
6730 {
6731   bool visited;
6732   tree valnum = SSA_VAL (op, &visited);
6733   /* If we didn't visit OP then it must be defined outside of the
6734      region we process and also dominate it.  So it is available.  */
6735   if (!visited)
6736     return op;
6737   if (TREE_CODE (valnum) == SSA_NAME)
6738     {
6739       if (SSA_NAME_IS_DEFAULT_DEF (valnum))
6740 	return valnum;
6741       vn_avail *av = VN_INFO (valnum)->avail;
6742       if (!av)
6743 	return NULL_TREE;
6744       if (av->location == bb->index)
6745 	/* On tramp3d 90% of the cases are here.  */
6746 	return ssa_name (av->leader);
6747       do
6748 	{
6749 	  basic_block abb = BASIC_BLOCK_FOR_FN (cfun, av->location);
6750 	  /* ???  During elimination we have to use availability at the
6751 	     definition site of a use we try to replace.  This
6752 	     is required to not run into inconsistencies because
6753 	     of dominated_by_p_w_unex behavior and removing a definition
6754 	     while not replacing all uses.
6755 	     ???  We could try to consistently walk dominators
6756 	     ignoring non-executable regions.  The nearest common
6757 	     dominator of bb and abb is where we can stop walking.  We
6758 	     may also be able to "pre-compute" (bits of) the next immediate
6759 	     (non-)dominator during the RPO walk when marking edges as
6760 	     executable.  */
6761 	  if (dominated_by_p_w_unex (bb, abb))
6762 	    {
6763 	      tree leader = ssa_name (av->leader);
6764 	      /* Prevent eliminations that break loop-closed SSA.  */
6765 	      if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
6766 		  && ! SSA_NAME_IS_DEFAULT_DEF (leader)
6767 		  && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
6768 							 (leader))->loop_father,
6769 					      bb))
6770 		return NULL_TREE;
6771 	      if (dump_file && (dump_flags & TDF_DETAILS))
6772 		{
6773 		  print_generic_expr (dump_file, leader);
6774 		  fprintf (dump_file, " is available for ");
6775 		  print_generic_expr (dump_file, valnum);
6776 		  fprintf (dump_file, "\n");
6777 		}
6778 	      /* On tramp3d 99% of the _remaining_ cases succeed at
6779 	         the first enty.  */
6780 	      return leader;
6781 	    }
6782 	  /* ???  Can we somehow skip to the immediate dominator
6783 	     RPO index (bb_to_rpo)?  Again, maybe not worth, on
6784 	     tramp3d the worst number of elements in the vector is 9.  */
6785 	  av = av->next;
6786 	}
6787       while (av);
6788     }
6789   else if (valnum != VN_TOP)
6790     /* valnum is is_gimple_min_invariant.  */
6791     return valnum;
6792   return NULL_TREE;
6793 }
6794 
6795 /* Make LEADER a leader for its value at BB.  */
6796 
6797 void
eliminate_push_avail(basic_block bb,tree leader)6798 rpo_elim::eliminate_push_avail (basic_block bb, tree leader)
6799 {
6800   tree valnum = VN_INFO (leader)->valnum;
6801   if (valnum == VN_TOP
6802       || is_gimple_min_invariant (valnum))
6803     return;
6804   if (dump_file && (dump_flags & TDF_DETAILS))
6805     {
6806       fprintf (dump_file, "Making available beyond BB%d ", bb->index);
6807       print_generic_expr (dump_file, leader);
6808       fprintf (dump_file, " for value ");
6809       print_generic_expr (dump_file, valnum);
6810       fprintf (dump_file, "\n");
6811     }
6812   vn_ssa_aux_t value = VN_INFO (valnum);
6813   vn_avail *av;
6814   if (m_avail_freelist)
6815     {
6816       av = m_avail_freelist;
6817       m_avail_freelist = m_avail_freelist->next;
6818     }
6819   else
6820     av = XOBNEW (&vn_ssa_aux_obstack, vn_avail);
6821   av->location = bb->index;
6822   av->leader = SSA_NAME_VERSION (leader);
6823   av->next = value->avail;
6824   value->avail = av;
6825 }
6826 
6827 /* Valueization hook for RPO VN plus required state.  */
6828 
6829 tree
rpo_vn_valueize(tree name)6830 rpo_vn_valueize (tree name)
6831 {
6832   if (TREE_CODE (name) == SSA_NAME)
6833     {
6834       vn_ssa_aux_t val = VN_INFO (name);
6835       if (val)
6836 	{
6837 	  tree tem = val->valnum;
6838 	  if (tem != VN_TOP && tem != name)
6839 	    {
6840 	      if (TREE_CODE (tem) != SSA_NAME)
6841 		return tem;
6842 	      /* For all values we only valueize to an available leader
6843 		 which means we can use SSA name info without restriction.  */
6844 	      tem = rpo_avail->eliminate_avail (vn_context_bb, tem);
6845 	      if (tem)
6846 		return tem;
6847 	    }
6848 	}
6849     }
6850   return name;
6851 }
6852 
6853 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
6854    inverted condition.  */
6855 
6856 static void
insert_related_predicates_on_edge(enum tree_code code,tree * ops,edge pred_e)6857 insert_related_predicates_on_edge (enum tree_code code, tree *ops, edge pred_e)
6858 {
6859   switch (code)
6860     {
6861     case LT_EXPR:
6862       /* a < b -> a {!,<}= b */
6863       vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
6864 					   ops, boolean_true_node, 0, pred_e);
6865       vn_nary_op_insert_pieces_predicated (2, LE_EXPR, boolean_type_node,
6866 					   ops, boolean_true_node, 0, pred_e);
6867       /* a < b -> ! a {>,=} b */
6868       vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
6869 					   ops, boolean_false_node, 0, pred_e);
6870       vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
6871 					   ops, boolean_false_node, 0, pred_e);
6872       break;
6873     case GT_EXPR:
6874       /* a > b -> a {!,>}= b */
6875       vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
6876 					   ops, boolean_true_node, 0, pred_e);
6877       vn_nary_op_insert_pieces_predicated (2, GE_EXPR, boolean_type_node,
6878 					   ops, boolean_true_node, 0, pred_e);
6879       /* a > b -> ! a {<,=} b */
6880       vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
6881 					   ops, boolean_false_node, 0, pred_e);
6882       vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
6883 					   ops, boolean_false_node, 0, pred_e);
6884       break;
6885     case EQ_EXPR:
6886       /* a == b -> ! a {<,>} b */
6887       vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
6888 					   ops, boolean_false_node, 0, pred_e);
6889       vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
6890 					   ops, boolean_false_node, 0, pred_e);
6891       break;
6892     case LE_EXPR:
6893     case GE_EXPR:
6894     case NE_EXPR:
6895       /* Nothing besides inverted condition.  */
6896       break;
6897     default:;
6898     }
6899 }
6900 
6901 /* Main stmt worker for RPO VN, process BB.  */
6902 
6903 static unsigned
process_bb(rpo_elim & avail,basic_block bb,bool bb_visited,bool iterate_phis,bool iterate,bool eliminate,bool do_region,bitmap exit_bbs,bool skip_phis)6904 process_bb (rpo_elim &avail, basic_block bb,
6905 	    bool bb_visited, bool iterate_phis, bool iterate, bool eliminate,
6906 	    bool do_region, bitmap exit_bbs, bool skip_phis)
6907 {
6908   unsigned todo = 0;
6909   edge_iterator ei;
6910   edge e;
6911 
6912   vn_context_bb = bb;
6913 
6914   /* If we are in loop-closed SSA preserve this state.  This is
6915      relevant when called on regions from outside of FRE/PRE.  */
6916   bool lc_phi_nodes = false;
6917   if (!skip_phis
6918       && loops_state_satisfies_p (LOOP_CLOSED_SSA))
6919     FOR_EACH_EDGE (e, ei, bb->preds)
6920       if (e->src->loop_father != e->dest->loop_father
6921 	  && flow_loop_nested_p (e->dest->loop_father,
6922 				 e->src->loop_father))
6923 	{
6924 	  lc_phi_nodes = true;
6925 	  break;
6926 	}
6927 
6928   /* When we visit a loop header substitute into loop info.  */
6929   if (!iterate && eliminate && bb->loop_father->header == bb)
6930     {
6931       /* Keep fields in sync with substitute_in_loop_info.  */
6932       if (bb->loop_father->nb_iterations)
6933 	bb->loop_father->nb_iterations
6934 	  = simplify_replace_tree (bb->loop_father->nb_iterations,
6935 				   NULL_TREE, NULL_TREE, &vn_valueize_wrapper);
6936     }
6937 
6938   /* Value-number all defs in the basic-block.  */
6939   if (!skip_phis)
6940     for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6941 	 gsi_next (&gsi))
6942       {
6943 	gphi *phi = gsi.phi ();
6944 	tree res = PHI_RESULT (phi);
6945 	vn_ssa_aux_t res_info = VN_INFO (res);
6946 	if (!bb_visited)
6947 	  {
6948 	    gcc_assert (!res_info->visited);
6949 	    res_info->valnum = VN_TOP;
6950 	    res_info->visited = true;
6951 	  }
6952 
6953 	/* When not iterating force backedge values to varying.  */
6954 	visit_stmt (phi, !iterate_phis);
6955 	if (virtual_operand_p (res))
6956 	  continue;
6957 
6958 	/* Eliminate */
6959 	/* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
6960 	   how we handle backedges and availability.
6961 	   And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization.  */
6962 	tree val = res_info->valnum;
6963 	if (res != val && !iterate && eliminate)
6964 	  {
6965 	    if (tree leader = avail.eliminate_avail (bb, res))
6966 	      {
6967 		if (leader != res
6968 		    /* Preserve loop-closed SSA form.  */
6969 		    && (! lc_phi_nodes
6970 			|| is_gimple_min_invariant (leader)))
6971 		  {
6972 		    if (dump_file && (dump_flags & TDF_DETAILS))
6973 		      {
6974 			fprintf (dump_file, "Replaced redundant PHI node "
6975 				 "defining ");
6976 			print_generic_expr (dump_file, res);
6977 			fprintf (dump_file, " with ");
6978 			print_generic_expr (dump_file, leader);
6979 			fprintf (dump_file, "\n");
6980 		      }
6981 		    avail.eliminations++;
6982 
6983 		    if (may_propagate_copy (res, leader))
6984 		      {
6985 			/* Schedule for removal.  */
6986 			avail.to_remove.safe_push (phi);
6987 			continue;
6988 		      }
6989 		    /* ???  Else generate a copy stmt.  */
6990 		  }
6991 	      }
6992 	  }
6993 	/* Only make defs available that not already are.  But make
6994 	   sure loop-closed SSA PHI node defs are picked up for
6995 	   downstream uses.  */
6996 	if (lc_phi_nodes
6997 	    || res == val
6998 	    || ! avail.eliminate_avail (bb, res))
6999 	  avail.eliminate_push_avail (bb, res);
7000       }
7001 
7002   /* For empty BBs mark outgoing edges executable.  For non-empty BBs
7003      we do this when processing the last stmt as we have to do this
7004      before elimination which otherwise forces GIMPLE_CONDs to
7005      if (1 != 0) style when seeing non-executable edges.  */
7006   if (gsi_end_p (gsi_start_bb (bb)))
7007     {
7008       FOR_EACH_EDGE (e, ei, bb->succs)
7009 	{
7010 	  if (!(e->flags & EDGE_EXECUTABLE))
7011 	    {
7012 	      if (dump_file && (dump_flags & TDF_DETAILS))
7013 		fprintf (dump_file,
7014 			 "marking outgoing edge %d -> %d executable\n",
7015 			 e->src->index, e->dest->index);
7016 	      e->flags |= EDGE_EXECUTABLE;
7017 	      e->dest->flags |= BB_EXECUTABLE;
7018 	    }
7019 	  else if (!(e->dest->flags & BB_EXECUTABLE))
7020 	    {
7021 	      if (dump_file && (dump_flags & TDF_DETAILS))
7022 		fprintf (dump_file,
7023 			 "marking destination block %d reachable\n",
7024 			 e->dest->index);
7025 	      e->dest->flags |= BB_EXECUTABLE;
7026 	    }
7027 	}
7028     }
7029   for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7030        !gsi_end_p (gsi); gsi_next (&gsi))
7031     {
7032       ssa_op_iter i;
7033       tree op;
7034       if (!bb_visited)
7035 	{
7036 	  FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
7037 	    {
7038 	      vn_ssa_aux_t op_info = VN_INFO (op);
7039 	      gcc_assert (!op_info->visited);
7040 	      op_info->valnum = VN_TOP;
7041 	      op_info->visited = true;
7042 	    }
7043 
7044 	  /* We somehow have to deal with uses that are not defined
7045 	     in the processed region.  Forcing unvisited uses to
7046 	     varying here doesn't play well with def-use following during
7047 	     expression simplification, so we deal with this by checking
7048 	     the visited flag in SSA_VAL.  */
7049 	}
7050 
7051       visit_stmt (gsi_stmt (gsi));
7052 
7053       gimple *last = gsi_stmt (gsi);
7054       e = NULL;
7055       switch (gimple_code (last))
7056 	{
7057 	case GIMPLE_SWITCH:
7058 	  e = find_taken_edge (bb, vn_valueize (gimple_switch_index
7059 						(as_a <gswitch *> (last))));
7060 	  break;
7061 	case GIMPLE_COND:
7062 	  {
7063 	    tree lhs = vn_valueize (gimple_cond_lhs (last));
7064 	    tree rhs = vn_valueize (gimple_cond_rhs (last));
7065 	    tree val = gimple_simplify (gimple_cond_code (last),
7066 					boolean_type_node, lhs, rhs,
7067 					NULL, vn_valueize);
7068 	    /* If the condition didn't simplfy see if we have recorded
7069 	       an expression from sofar taken edges.  */
7070 	    if (! val || TREE_CODE (val) != INTEGER_CST)
7071 	      {
7072 		vn_nary_op_t vnresult;
7073 		tree ops[2];
7074 		ops[0] = lhs;
7075 		ops[1] = rhs;
7076 		val = vn_nary_op_lookup_pieces (2, gimple_cond_code (last),
7077 						boolean_type_node, ops,
7078 						&vnresult);
7079 		/* Did we get a predicated value?  */
7080 		if (! val && vnresult && vnresult->predicated_values)
7081 		  {
7082 		    val = vn_nary_op_get_predicated_value (vnresult, bb);
7083 		    if (val && dump_file && (dump_flags & TDF_DETAILS))
7084 		      {
7085 			fprintf (dump_file, "Got predicated value ");
7086 			print_generic_expr (dump_file, val, TDF_NONE);
7087 			fprintf (dump_file, " for ");
7088 			print_gimple_stmt (dump_file, last, TDF_SLIM);
7089 		      }
7090 		  }
7091 	      }
7092 	    if (val)
7093 	      e = find_taken_edge (bb, val);
7094 	    if (! e)
7095 	      {
7096 		/* If we didn't manage to compute the taken edge then
7097 		   push predicated expressions for the condition itself
7098 		   and related conditions to the hashtables.  This allows
7099 		   simplification of redundant conditions which is
7100 		   important as early cleanup.  */
7101 		edge true_e, false_e;
7102 		extract_true_false_edges_from_block (bb, &true_e, &false_e);
7103 		enum tree_code code = gimple_cond_code (last);
7104 		enum tree_code icode
7105 		  = invert_tree_comparison (code, HONOR_NANS (lhs));
7106 		tree ops[2];
7107 		ops[0] = lhs;
7108 		ops[1] = rhs;
7109 		if (do_region
7110 		    && bitmap_bit_p (exit_bbs, true_e->dest->index))
7111 		  true_e = NULL;
7112 		if (do_region
7113 		    && bitmap_bit_p (exit_bbs, false_e->dest->index))
7114 		  false_e = NULL;
7115 		if (true_e)
7116 		  vn_nary_op_insert_pieces_predicated
7117 		    (2, code, boolean_type_node, ops,
7118 		     boolean_true_node, 0, true_e);
7119 		if (false_e)
7120 		  vn_nary_op_insert_pieces_predicated
7121 		    (2, code, boolean_type_node, ops,
7122 		     boolean_false_node, 0, false_e);
7123 		if (icode != ERROR_MARK)
7124 		  {
7125 		    if (true_e)
7126 		      vn_nary_op_insert_pieces_predicated
7127 			(2, icode, boolean_type_node, ops,
7128 			 boolean_false_node, 0, true_e);
7129 		    if (false_e)
7130 		      vn_nary_op_insert_pieces_predicated
7131 			(2, icode, boolean_type_node, ops,
7132 			 boolean_true_node, 0, false_e);
7133 		  }
7134 		/* Relax for non-integers, inverted condition handled
7135 		   above.  */
7136 		if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
7137 		  {
7138 		    if (true_e)
7139 		      insert_related_predicates_on_edge (code, ops, true_e);
7140 		    if (false_e)
7141 		      insert_related_predicates_on_edge (icode, ops, false_e);
7142 		  }
7143 	      }
7144 	    break;
7145 	  }
7146 	case GIMPLE_GOTO:
7147 	  e = find_taken_edge (bb, vn_valueize (gimple_goto_dest (last)));
7148 	  break;
7149 	default:
7150 	  e = NULL;
7151 	}
7152       if (e)
7153 	{
7154 	  todo = TODO_cleanup_cfg;
7155 	  if (!(e->flags & EDGE_EXECUTABLE))
7156 	    {
7157 	      if (dump_file && (dump_flags & TDF_DETAILS))
7158 		fprintf (dump_file,
7159 			 "marking known outgoing %sedge %d -> %d executable\n",
7160 			 e->flags & EDGE_DFS_BACK ? "back-" : "",
7161 			 e->src->index, e->dest->index);
7162 	      e->flags |= EDGE_EXECUTABLE;
7163 	      e->dest->flags |= BB_EXECUTABLE;
7164 	    }
7165 	  else if (!(e->dest->flags & BB_EXECUTABLE))
7166 	    {
7167 	      if (dump_file && (dump_flags & TDF_DETAILS))
7168 		fprintf (dump_file,
7169 			 "marking destination block %d reachable\n",
7170 			 e->dest->index);
7171 	      e->dest->flags |= BB_EXECUTABLE;
7172 	    }
7173 	}
7174       else if (gsi_one_before_end_p (gsi))
7175 	{
7176 	  FOR_EACH_EDGE (e, ei, bb->succs)
7177 	    {
7178 	      if (!(e->flags & EDGE_EXECUTABLE))
7179 		{
7180 		  if (dump_file && (dump_flags & TDF_DETAILS))
7181 		    fprintf (dump_file,
7182 			     "marking outgoing edge %d -> %d executable\n",
7183 			     e->src->index, e->dest->index);
7184 		  e->flags |= EDGE_EXECUTABLE;
7185 		  e->dest->flags |= BB_EXECUTABLE;
7186 		}
7187 	      else if (!(e->dest->flags & BB_EXECUTABLE))
7188 		{
7189 		  if (dump_file && (dump_flags & TDF_DETAILS))
7190 		    fprintf (dump_file,
7191 			     "marking destination block %d reachable\n",
7192 			     e->dest->index);
7193 		  e->dest->flags |= BB_EXECUTABLE;
7194 		}
7195 	    }
7196 	}
7197 
7198       /* Eliminate.  That also pushes to avail.  */
7199       if (eliminate && ! iterate)
7200 	avail.eliminate_stmt (bb, &gsi);
7201       else
7202 	/* If not eliminating, make all not already available defs
7203 	   available.  */
7204 	FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_DEF)
7205 	  if (! avail.eliminate_avail (bb, op))
7206 	    avail.eliminate_push_avail (bb, op);
7207     }
7208 
7209   /* Eliminate in destination PHI arguments.  Always substitute in dest
7210      PHIs, even for non-executable edges.  This handles region
7211      exits PHIs.  */
7212   if (!iterate && eliminate)
7213     FOR_EACH_EDGE (e, ei, bb->succs)
7214       for (gphi_iterator gsi = gsi_start_phis (e->dest);
7215 	   !gsi_end_p (gsi); gsi_next (&gsi))
7216 	{
7217 	  gphi *phi = gsi.phi ();
7218 	  use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
7219 	  tree arg = USE_FROM_PTR (use_p);
7220 	  if (TREE_CODE (arg) != SSA_NAME
7221 	      || virtual_operand_p (arg))
7222 	    continue;
7223 	  tree sprime;
7224 	  if (SSA_NAME_IS_DEFAULT_DEF (arg))
7225 	    {
7226 	      sprime = SSA_VAL (arg);
7227 	      gcc_assert (TREE_CODE (sprime) != SSA_NAME
7228 			  || SSA_NAME_IS_DEFAULT_DEF (sprime));
7229 	    }
7230 	  else
7231 	    /* Look for sth available at the definition block of the argument.
7232 	       This avoids inconsistencies between availability there which
7233 	       decides if the stmt can be removed and availability at the
7234 	       use site.  The SSA property ensures that things available
7235 	       at the definition are also available at uses.  */
7236 	    sprime = avail.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg)),
7237 					    arg);
7238 	  if (sprime
7239 	      && sprime != arg
7240 	      && may_propagate_copy (arg, sprime))
7241 	    propagate_value (use_p, sprime);
7242 	}
7243 
7244   vn_context_bb = NULL;
7245   return todo;
7246 }
7247 
7248 /* Unwind state per basic-block.  */
7249 
7250 struct unwind_state
7251 {
7252   /* Times this block has been visited.  */
7253   unsigned visited;
7254   /* Whether to handle this as iteration point or whether to treat
7255      incoming backedge PHI values as varying.  */
7256   bool iterate;
7257   /* Maximum RPO index this block is reachable from.  */
7258   int max_rpo;
7259   /* Unwind state.  */
7260   void *ob_top;
7261   vn_reference_t ref_top;
7262   vn_phi_t phi_top;
7263   vn_nary_op_t nary_top;
7264 };
7265 
7266 /* Unwind the RPO VN state for iteration.  */
7267 
7268 static void
do_unwind(unwind_state * to,int rpo_idx,rpo_elim & avail,int * bb_to_rpo)7269 do_unwind (unwind_state *to, int rpo_idx, rpo_elim &avail, int *bb_to_rpo)
7270 {
7271   gcc_assert (to->iterate);
7272   for (; last_inserted_nary != to->nary_top;
7273        last_inserted_nary = last_inserted_nary->next)
7274     {
7275       vn_nary_op_t *slot;
7276       slot = valid_info->nary->find_slot_with_hash
7277 	(last_inserted_nary, last_inserted_nary->hashcode, NO_INSERT);
7278       /* Predication causes the need to restore previous state.  */
7279       if ((*slot)->unwind_to)
7280 	*slot = (*slot)->unwind_to;
7281       else
7282 	valid_info->nary->clear_slot (slot);
7283     }
7284   for (; last_inserted_phi != to->phi_top;
7285        last_inserted_phi = last_inserted_phi->next)
7286     {
7287       vn_phi_t *slot;
7288       slot = valid_info->phis->find_slot_with_hash
7289 	(last_inserted_phi, last_inserted_phi->hashcode, NO_INSERT);
7290       valid_info->phis->clear_slot (slot);
7291     }
7292   for (; last_inserted_ref != to->ref_top;
7293        last_inserted_ref = last_inserted_ref->next)
7294     {
7295       vn_reference_t *slot;
7296       slot = valid_info->references->find_slot_with_hash
7297 	(last_inserted_ref, last_inserted_ref->hashcode, NO_INSERT);
7298       (*slot)->operands.release ();
7299       valid_info->references->clear_slot (slot);
7300     }
7301   obstack_free (&vn_tables_obstack, to->ob_top);
7302 
7303   /* Prune [rpo_idx, ] from avail.  */
7304   /* ???  This is O(number-of-values-in-region) which is
7305      O(region-size) rather than O(iteration-piece).  */
7306   for (hash_table<vn_ssa_aux_hasher>::iterator i = vn_ssa_aux_hash->begin ();
7307        i != vn_ssa_aux_hash->end (); ++i)
7308     {
7309       while ((*i)->avail)
7310 	{
7311 	  if (bb_to_rpo[(*i)->avail->location] < rpo_idx)
7312 	    break;
7313 	  vn_avail *av = (*i)->avail;
7314 	  (*i)->avail = (*i)->avail->next;
7315 	  av->next = avail.m_avail_freelist;
7316 	  avail.m_avail_freelist = av;
7317 	}
7318     }
7319 }
7320 
7321 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
7322    If ITERATE is true then treat backedges optimistically as not
7323    executed and iterate.  If ELIMINATE is true then perform
7324    elimination, otherwise leave that to the caller.  */
7325 
7326 static unsigned
do_rpo_vn(function * fn,edge entry,bitmap exit_bbs,bool iterate,bool eliminate)7327 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
7328 	   bool iterate, bool eliminate)
7329 {
7330   unsigned todo = 0;
7331 
7332   /* We currently do not support region-based iteration when
7333      elimination is requested.  */
7334   gcc_assert (!entry || !iterate || !eliminate);
7335   /* When iterating we need loop info up-to-date.  */
7336   gcc_assert (!iterate || !loops_state_satisfies_p (LOOPS_NEED_FIXUP));
7337 
7338   bool do_region = entry != NULL;
7339   if (!do_region)
7340     {
7341       entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn));
7342       exit_bbs = BITMAP_ALLOC (NULL);
7343       bitmap_set_bit (exit_bbs, EXIT_BLOCK);
7344     }
7345 
7346   /* Clear EDGE_DFS_BACK on "all" entry edges, RPO order compute will
7347      re-mark those that are contained in the region.  */
7348   edge_iterator ei;
7349   edge e;
7350   FOR_EACH_EDGE (e, ei, entry->dest->preds)
7351     e->flags &= ~EDGE_DFS_BACK;
7352 
7353   int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS);
7354   int n = rev_post_order_and_mark_dfs_back_seme
7355     (fn, entry, exit_bbs, !loops_state_satisfies_p (LOOPS_NEED_FIXUP), rpo);
7356   /* rev_post_order_and_mark_dfs_back_seme fills RPO in reverse order.  */
7357   for (int i = 0; i < n / 2; ++i)
7358     std::swap (rpo[i], rpo[n-i-1]);
7359 
7360   if (!do_region)
7361     BITMAP_FREE (exit_bbs);
7362 
7363   /* If there are any non-DFS_BACK edges into entry->dest skip
7364      processing PHI nodes for that block.  This supports
7365      value-numbering loop bodies w/o the actual loop.  */
7366   FOR_EACH_EDGE (e, ei, entry->dest->preds)
7367     if (e != entry
7368 	&& !(e->flags & EDGE_DFS_BACK))
7369       break;
7370   bool skip_entry_phis = e != NULL;
7371   if (skip_entry_phis && dump_file && (dump_flags & TDF_DETAILS))
7372     fprintf (dump_file, "Region does not contain all edges into "
7373 	     "the entry block, skipping its PHIs.\n");
7374 
7375   int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
7376   for (int i = 0; i < n; ++i)
7377     bb_to_rpo[rpo[i]] = i;
7378 
7379   unwind_state *rpo_state = XNEWVEC (unwind_state, n);
7380 
7381   rpo_elim avail (entry->dest);
7382   rpo_avail = &avail;
7383 
7384   /* Verify we have no extra entries into the region.  */
7385   if (flag_checking && do_region)
7386     {
7387       auto_bb_flag bb_in_region (fn);
7388       for (int i = 0; i < n; ++i)
7389 	{
7390 	  basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7391 	  bb->flags |= bb_in_region;
7392 	}
7393       /* We can't merge the first two loops because we cannot rely
7394          on EDGE_DFS_BACK for edges not within the region.  But if
7395 	 we decide to always have the bb_in_region flag we can
7396 	 do the checking during the RPO walk itself (but then it's
7397 	 also easy to handle MEME conservatively).  */
7398       for (int i = 0; i < n; ++i)
7399 	{
7400 	  basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7401 	  edge e;
7402 	  edge_iterator ei;
7403 	  FOR_EACH_EDGE (e, ei, bb->preds)
7404 	    gcc_assert (e == entry
7405 			|| (skip_entry_phis && bb == entry->dest)
7406 			|| (e->src->flags & bb_in_region));
7407 	}
7408       for (int i = 0; i < n; ++i)
7409 	{
7410 	  basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7411 	  bb->flags &= ~bb_in_region;
7412 	}
7413     }
7414 
7415   /* Create the VN state.  For the initial size of the various hashtables
7416      use a heuristic based on region size and number of SSA names.  */
7417   unsigned region_size = (((unsigned HOST_WIDE_INT)n * num_ssa_names)
7418 			  / (n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS));
7419   VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
7420   next_value_id = 1;
7421 
7422   vn_ssa_aux_hash = new hash_table <vn_ssa_aux_hasher> (region_size * 2);
7423   gcc_obstack_init (&vn_ssa_aux_obstack);
7424 
7425   gcc_obstack_init (&vn_tables_obstack);
7426   gcc_obstack_init (&vn_tables_insert_obstack);
7427   valid_info = XCNEW (struct vn_tables_s);
7428   allocate_vn_table (valid_info, region_size);
7429   last_inserted_ref = NULL;
7430   last_inserted_phi = NULL;
7431   last_inserted_nary = NULL;
7432 
7433   vn_valueize = rpo_vn_valueize;
7434 
7435   /* Initialize the unwind state and edge/BB executable state.  */
7436   bool need_max_rpo_iterate = false;
7437   for (int i = 0; i < n; ++i)
7438     {
7439       basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7440       rpo_state[i].visited = 0;
7441       rpo_state[i].max_rpo = i;
7442       bb->flags &= ~BB_EXECUTABLE;
7443       bool has_backedges = false;
7444       edge e;
7445       edge_iterator ei;
7446       FOR_EACH_EDGE (e, ei, bb->preds)
7447 	{
7448 	  if (e->flags & EDGE_DFS_BACK)
7449 	    has_backedges = true;
7450 	  e->flags &= ~EDGE_EXECUTABLE;
7451 	  if (iterate || e == entry || (skip_entry_phis && bb == entry->dest))
7452 	    continue;
7453 	  if (bb_to_rpo[e->src->index] > i)
7454 	    {
7455 	      rpo_state[i].max_rpo = MAX (rpo_state[i].max_rpo,
7456 					  bb_to_rpo[e->src->index]);
7457 	      need_max_rpo_iterate = true;
7458 	    }
7459 	  else
7460 	    rpo_state[i].max_rpo
7461 	      = MAX (rpo_state[i].max_rpo,
7462 		     rpo_state[bb_to_rpo[e->src->index]].max_rpo);
7463 	}
7464       rpo_state[i].iterate = iterate && has_backedges;
7465     }
7466   entry->flags |= EDGE_EXECUTABLE;
7467   entry->dest->flags |= BB_EXECUTABLE;
7468 
7469   /* When there are irreducible regions the simplistic max_rpo computation
7470      above for the case of backedges doesn't work and we need to iterate
7471      until there are no more changes.  */
7472   unsigned nit = 0;
7473   while (need_max_rpo_iterate)
7474     {
7475       nit++;
7476       need_max_rpo_iterate = false;
7477       for (int i = 0; i < n; ++i)
7478 	{
7479 	  basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7480 	  edge e;
7481 	  edge_iterator ei;
7482 	  FOR_EACH_EDGE (e, ei, bb->preds)
7483 	    {
7484 	      if (e == entry || (skip_entry_phis && bb == entry->dest))
7485 		continue;
7486 	      int max_rpo = MAX (rpo_state[i].max_rpo,
7487 				 rpo_state[bb_to_rpo[e->src->index]].max_rpo);
7488 	      if (rpo_state[i].max_rpo != max_rpo)
7489 		{
7490 		  rpo_state[i].max_rpo = max_rpo;
7491 		  need_max_rpo_iterate = true;
7492 		}
7493 	    }
7494 	}
7495     }
7496   statistics_histogram_event (cfun, "RPO max_rpo iterations", nit);
7497 
7498   /* As heuristic to improve compile-time we handle only the N innermost
7499      loops and the outermost one optimistically.  */
7500   if (iterate)
7501     {
7502       loop_p loop;
7503       unsigned max_depth = param_rpo_vn_max_loop_depth;
7504       FOR_EACH_LOOP (loop, LI_ONLY_INNERMOST)
7505 	if (loop_depth (loop) > max_depth)
7506 	  for (unsigned i = 2;
7507 	       i < loop_depth (loop) - max_depth; ++i)
7508 	    {
7509 	      basic_block header = superloop_at_depth (loop, i)->header;
7510 	      bool non_latch_backedge = false;
7511 	      edge e;
7512 	      edge_iterator ei;
7513 	      FOR_EACH_EDGE (e, ei, header->preds)
7514 		if (e->flags & EDGE_DFS_BACK)
7515 		  {
7516 		    /* There can be a non-latch backedge into the header
7517 		       which is part of an outer irreducible region.  We
7518 		       cannot avoid iterating this block then.  */
7519 		    if (!dominated_by_p (CDI_DOMINATORS,
7520 					 e->src, e->dest))
7521 		      {
7522 			if (dump_file && (dump_flags & TDF_DETAILS))
7523 			  fprintf (dump_file, "non-latch backedge %d -> %d "
7524 				   "forces iteration of loop %d\n",
7525 				   e->src->index, e->dest->index, loop->num);
7526 			non_latch_backedge = true;
7527 		      }
7528 		    else
7529 		      e->flags |= EDGE_EXECUTABLE;
7530 		  }
7531 	      rpo_state[bb_to_rpo[header->index]].iterate = non_latch_backedge;
7532 	    }
7533     }
7534 
7535   uint64_t nblk = 0;
7536   int idx = 0;
7537   if (iterate)
7538     /* Go and process all blocks, iterating as necessary.  */
7539     do
7540       {
7541 	basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
7542 
7543 	/* If the block has incoming backedges remember unwind state.  This
7544 	   is required even for non-executable blocks since in irreducible
7545 	   regions we might reach them via the backedge and re-start iterating
7546 	   from there.
7547 	   Note we can individually mark blocks with incoming backedges to
7548 	   not iterate where we then handle PHIs conservatively.  We do that
7549 	   heuristically to reduce compile-time for degenerate cases.  */
7550 	if (rpo_state[idx].iterate)
7551 	  {
7552 	    rpo_state[idx].ob_top = obstack_alloc (&vn_tables_obstack, 0);
7553 	    rpo_state[idx].ref_top = last_inserted_ref;
7554 	    rpo_state[idx].phi_top = last_inserted_phi;
7555 	    rpo_state[idx].nary_top = last_inserted_nary;
7556 	  }
7557 
7558 	if (!(bb->flags & BB_EXECUTABLE))
7559 	  {
7560 	    if (dump_file && (dump_flags & TDF_DETAILS))
7561 	      fprintf (dump_file, "Block %d: BB%d found not executable\n",
7562 		       idx, bb->index);
7563 	    idx++;
7564 	    continue;
7565 	  }
7566 
7567 	if (dump_file && (dump_flags & TDF_DETAILS))
7568 	  fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
7569 	nblk++;
7570 	todo |= process_bb (avail, bb,
7571 			    rpo_state[idx].visited != 0,
7572 			    rpo_state[idx].iterate,
7573 			    iterate, eliminate, do_region, exit_bbs, false);
7574 	rpo_state[idx].visited++;
7575 
7576 	/* Verify if changed values flow over executable outgoing backedges
7577 	   and those change destination PHI values (that's the thing we
7578 	   can easily verify).  Reduce over all such edges to the farthest
7579 	   away PHI.  */
7580 	int iterate_to = -1;
7581 	edge_iterator ei;
7582 	edge e;
7583 	FOR_EACH_EDGE (e, ei, bb->succs)
7584 	  if ((e->flags & (EDGE_DFS_BACK|EDGE_EXECUTABLE))
7585 	      == (EDGE_DFS_BACK|EDGE_EXECUTABLE)
7586 	      && rpo_state[bb_to_rpo[e->dest->index]].iterate)
7587 	    {
7588 	      int destidx = bb_to_rpo[e->dest->index];
7589 	      if (!rpo_state[destidx].visited)
7590 		{
7591 		  if (dump_file && (dump_flags & TDF_DETAILS))
7592 		    fprintf (dump_file, "Unvisited destination %d\n",
7593 			     e->dest->index);
7594 		  if (iterate_to == -1 || destidx < iterate_to)
7595 		    iterate_to = destidx;
7596 		  continue;
7597 		}
7598 	      if (dump_file && (dump_flags & TDF_DETAILS))
7599 		fprintf (dump_file, "Looking for changed values of backedge"
7600 			 " %d->%d destination PHIs\n",
7601 			 e->src->index, e->dest->index);
7602 	      vn_context_bb = e->dest;
7603 	      gphi_iterator gsi;
7604 	      for (gsi = gsi_start_phis (e->dest);
7605 		   !gsi_end_p (gsi); gsi_next (&gsi))
7606 		{
7607 		  bool inserted = false;
7608 		  /* While we'd ideally just iterate on value changes
7609 		     we CSE PHIs and do that even across basic-block
7610 		     boundaries.  So even hashtable state changes can
7611 		     be important (which is roughly equivalent to
7612 		     PHI argument value changes).  To not excessively
7613 		     iterate because of that we track whether a PHI
7614 		     was CSEd to with GF_PLF_1.  */
7615 		  bool phival_changed;
7616 		  if ((phival_changed = visit_phi (gsi.phi (),
7617 						   &inserted, false))
7618 		      || (inserted && gimple_plf (gsi.phi (), GF_PLF_1)))
7619 		    {
7620 		      if (!phival_changed
7621 			  && dump_file && (dump_flags & TDF_DETAILS))
7622 			fprintf (dump_file, "PHI was CSEd and hashtable "
7623 				 "state (changed)\n");
7624 		      if (iterate_to == -1 || destidx < iterate_to)
7625 			iterate_to = destidx;
7626 		      break;
7627 		    }
7628 		}
7629 	      vn_context_bb = NULL;
7630 	    }
7631 	if (iterate_to != -1)
7632 	  {
7633 	    do_unwind (&rpo_state[iterate_to], iterate_to, avail, bb_to_rpo);
7634 	    idx = iterate_to;
7635 	    if (dump_file && (dump_flags & TDF_DETAILS))
7636 	      fprintf (dump_file, "Iterating to %d BB%d\n",
7637 		       iterate_to, rpo[iterate_to]);
7638 	    continue;
7639 	  }
7640 
7641 	idx++;
7642       }
7643     while (idx < n);
7644 
7645   else /* !iterate */
7646     {
7647       /* Process all blocks greedily with a worklist that enforces RPO
7648          processing of reachable blocks.  */
7649       auto_bitmap worklist;
7650       bitmap_set_bit (worklist, 0);
7651       while (!bitmap_empty_p (worklist))
7652 	{
7653 	  int idx = bitmap_first_set_bit (worklist);
7654 	  bitmap_clear_bit (worklist, idx);
7655 	  basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
7656 	  gcc_assert ((bb->flags & BB_EXECUTABLE)
7657 		      && !rpo_state[idx].visited);
7658 
7659 	  if (dump_file && (dump_flags & TDF_DETAILS))
7660 	    fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
7661 
7662 	  /* When we run into predecessor edges where we cannot trust its
7663 	     executable state mark them executable so PHI processing will
7664 	     be conservative.
7665 	     ???  Do we need to force arguments flowing over that edge
7666 	     to be varying or will they even always be?  */
7667 	  edge_iterator ei;
7668 	  edge e;
7669 	  FOR_EACH_EDGE (e, ei, bb->preds)
7670 	    if (!(e->flags & EDGE_EXECUTABLE)
7671 		&& (bb == entry->dest
7672 		    || (!rpo_state[bb_to_rpo[e->src->index]].visited
7673 			&& (rpo_state[bb_to_rpo[e->src->index]].max_rpo
7674 			    >= (int)idx))))
7675 	      {
7676 		if (dump_file && (dump_flags & TDF_DETAILS))
7677 		  fprintf (dump_file, "Cannot trust state of predecessor "
7678 			   "edge %d -> %d, marking executable\n",
7679 			   e->src->index, e->dest->index);
7680 		e->flags |= EDGE_EXECUTABLE;
7681 	      }
7682 
7683 	  nblk++;
7684 	  todo |= process_bb (avail, bb, false, false, false, eliminate,
7685 			      do_region, exit_bbs,
7686 			      skip_entry_phis && bb == entry->dest);
7687 	  rpo_state[idx].visited++;
7688 
7689 	  FOR_EACH_EDGE (e, ei, bb->succs)
7690 	    if ((e->flags & EDGE_EXECUTABLE)
7691 		&& e->dest->index != EXIT_BLOCK
7692 		&& (!do_region || !bitmap_bit_p (exit_bbs, e->dest->index))
7693 		&& !rpo_state[bb_to_rpo[e->dest->index]].visited)
7694 	      bitmap_set_bit (worklist, bb_to_rpo[e->dest->index]);
7695 	}
7696     }
7697 
7698   /* If statistics or dump file active.  */
7699   int nex = 0;
7700   unsigned max_visited = 1;
7701   for (int i = 0; i < n; ++i)
7702     {
7703       basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7704       if (bb->flags & BB_EXECUTABLE)
7705 	nex++;
7706       statistics_histogram_event (cfun, "RPO block visited times",
7707 				  rpo_state[i].visited);
7708       if (rpo_state[i].visited > max_visited)
7709 	max_visited = rpo_state[i].visited;
7710     }
7711   unsigned nvalues = 0, navail = 0;
7712   for (hash_table<vn_ssa_aux_hasher>::iterator i = vn_ssa_aux_hash->begin ();
7713        i != vn_ssa_aux_hash->end (); ++i)
7714     {
7715       nvalues++;
7716       vn_avail *av = (*i)->avail;
7717       while (av)
7718 	{
7719 	  navail++;
7720 	  av = av->next;
7721 	}
7722     }
7723   statistics_counter_event (cfun, "RPO blocks", n);
7724   statistics_counter_event (cfun, "RPO blocks visited", nblk);
7725   statistics_counter_event (cfun, "RPO blocks executable", nex);
7726   statistics_histogram_event (cfun, "RPO iterations", 10*nblk / nex);
7727   statistics_histogram_event (cfun, "RPO num values", nvalues);
7728   statistics_histogram_event (cfun, "RPO num avail", navail);
7729   statistics_histogram_event (cfun, "RPO num lattice",
7730 			      vn_ssa_aux_hash->elements ());
7731   if (dump_file && (dump_flags & (TDF_DETAILS|TDF_STATS)))
7732     {
7733       fprintf (dump_file, "RPO iteration over %d blocks visited %" PRIu64
7734 	       " blocks in total discovering %d executable blocks iterating "
7735 	       "%d.%d times, a block was visited max. %u times\n",
7736 	       n, nblk, nex,
7737 	       (int)((10*nblk / nex)/10), (int)((10*nblk / nex)%10),
7738 	       max_visited);
7739       fprintf (dump_file, "RPO tracked %d values available at %d locations "
7740 	       "and %" PRIu64 " lattice elements\n",
7741 	       nvalues, navail, (uint64_t) vn_ssa_aux_hash->elements ());
7742     }
7743 
7744   if (eliminate)
7745     {
7746       /* When !iterate we already performed elimination during the RPO
7747          walk.  */
7748       if (iterate)
7749 	{
7750 	  /* Elimination for region-based VN needs to be done within the
7751 	     RPO walk.  */
7752 	  gcc_assert (! do_region);
7753 	  /* Note we can't use avail.walk here because that gets confused
7754 	     by the existing availability and it will be less efficient
7755 	     as well.  */
7756 	  todo |= eliminate_with_rpo_vn (NULL);
7757 	}
7758       else
7759 	todo |= avail.eliminate_cleanup (do_region);
7760     }
7761 
7762   vn_valueize = NULL;
7763   rpo_avail = NULL;
7764 
7765   XDELETEVEC (bb_to_rpo);
7766   XDELETEVEC (rpo);
7767   XDELETEVEC (rpo_state);
7768 
7769   return todo;
7770 }
7771 
7772 /* Region-based entry for RPO VN.  Performs value-numbering and elimination
7773    on the SEME region specified by ENTRY and EXIT_BBS.  If ENTRY is not
7774    the only edge into the region at ENTRY->dest PHI nodes in ENTRY->dest
7775    are not considered.  */
7776 
7777 unsigned
do_rpo_vn(function * fn,edge entry,bitmap exit_bbs)7778 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs)
7779 {
7780   default_vn_walk_kind = VN_WALKREWRITE;
7781   unsigned todo = do_rpo_vn (fn, entry, exit_bbs, false, true);
7782   free_rpo_vn ();
7783   return todo;
7784 }
7785 
7786 
7787 namespace {
7788 
7789 const pass_data pass_data_fre =
7790 {
7791   GIMPLE_PASS, /* type */
7792   "fre", /* name */
7793   OPTGROUP_NONE, /* optinfo_flags */
7794   TV_TREE_FRE, /* tv_id */
7795   ( PROP_cfg | PROP_ssa ), /* properties_required */
7796   0, /* properties_provided */
7797   0, /* properties_destroyed */
7798   0, /* todo_flags_start */
7799   0, /* todo_flags_finish */
7800 };
7801 
7802 class pass_fre : public gimple_opt_pass
7803 {
7804 public:
pass_fre(gcc::context * ctxt)7805   pass_fre (gcc::context *ctxt)
7806     : gimple_opt_pass (pass_data_fre, ctxt), may_iterate (true)
7807   {}
7808 
7809   /* opt_pass methods: */
clone()7810   opt_pass * clone () { return new pass_fre (m_ctxt); }
set_pass_param(unsigned int n,bool param)7811   void set_pass_param (unsigned int n, bool param)
7812     {
7813       gcc_assert (n == 0);
7814       may_iterate = param;
7815     }
gate(function *)7816   virtual bool gate (function *)
7817     {
7818       return flag_tree_fre != 0 && (may_iterate || optimize > 1);
7819     }
7820   virtual unsigned int execute (function *);
7821 
7822 private:
7823   bool may_iterate;
7824 }; // class pass_fre
7825 
7826 unsigned int
execute(function * fun)7827 pass_fre::execute (function *fun)
7828 {
7829   unsigned todo = 0;
7830 
7831   /* At -O[1g] use the cheap non-iterating mode.  */
7832   bool iterate_p = may_iterate && (optimize > 1);
7833   calculate_dominance_info (CDI_DOMINATORS);
7834   if (iterate_p)
7835     loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
7836 
7837   default_vn_walk_kind = VN_WALKREWRITE;
7838   todo = do_rpo_vn (fun, NULL, NULL, iterate_p, true);
7839   free_rpo_vn ();
7840 
7841   if (iterate_p)
7842     loop_optimizer_finalize ();
7843 
7844   /* For late FRE after IVOPTs and unrolling, see if we can
7845      remove some TREE_ADDRESSABLE and rewrite stuff into SSA.  */
7846   if (!may_iterate)
7847     todo |= TODO_update_address_taken;
7848 
7849   return todo;
7850 }
7851 
7852 } // anon namespace
7853 
7854 gimple_opt_pass *
make_pass_fre(gcc::context * ctxt)7855 make_pass_fre (gcc::context *ctxt)
7856 {
7857   return new pass_fre (ctxt);
7858 }
7859 
7860 #undef BB_EXECUTABLE
7861