1 /* SCC value numbering for trees
2    Copyright (C) 2006-2021 Free Software Foundation, Inc.
3    Contributed by Daniel Berlin <dan@dberlin.org>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "splay-tree.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-cfg.h"
58 #include "domwalk.h"
59 #include "gimple-iterator.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
67 #include "dbgcnt.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-loop-niter.h"
72 #include "builtins.h"
73 #include "tree-ssa-sccvn.h"
74 
75 /* This algorithm is based on the SCC algorithm presented by Keith
76    Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
77    (http://citeseer.ist.psu.edu/41805.html).  In
78    straight line code, it is equivalent to a regular hash based value
79    numbering that is performed in reverse postorder.
80 
81    For code with cycles, there are two alternatives, both of which
82    require keeping the hashtables separate from the actual list of
83    value numbers for SSA names.
84 
85    1. Iterate value numbering in an RPO walk of the blocks, removing
86    all the entries from the hashtable after each iteration (but
87    keeping the SSA name->value number mapping between iterations).
88    Iterate until it does not change.
89 
90    2. Perform value numbering as part of an SCC walk on the SSA graph,
91    iterating only the cycles in the SSA graph until they do not change
92    (using a separate, optimistic hashtable for value numbering the SCC
93    operands).
94 
95    The second is not just faster in practice (because most SSA graph
96    cycles do not involve all the variables in the graph), it also has
97    some nice properties.
98 
99    One of these nice properties is that when we pop an SCC off the
100    stack, we are guaranteed to have processed all the operands coming from
101    *outside of that SCC*, so we do not need to do anything special to
102    ensure they have value numbers.
103 
104    Another nice property is that the SCC walk is done as part of a DFS
105    of the SSA graph, which makes it easy to perform combining and
106    simplifying operations at the same time.
107 
108    The code below is deliberately written in a way that makes it easy
109    to separate the SCC walk from the other work it does.
110 
111    In order to propagate constants through the code, we track which
112    expressions contain constants, and use those while folding.  In
113    theory, we could also track expressions whose value numbers are
114    replaced, in case we end up folding based on expression
115    identities.
116 
117    In order to value number memory, we assign value numbers to vuses.
118    This enables us to note that, for example, stores to the same
119    address of the same value from the same starting memory states are
120    equivalent.
121    TODO:
122 
123    1. We can iterate only the changing portions of the SCC's, but
124    I have not seen an SCC big enough for this to be a win.
125    2. If you differentiate between phi nodes for loops and phi nodes
126    for if-then-else, you can properly consider phi nodes in different
127    blocks for equivalence.
128    3. We could value number vuses in more cases, particularly, whole
129    structure copies.
130 */
131 
132 /* There's no BB_EXECUTABLE but we can use BB_VISITED.  */
133 #define BB_EXECUTABLE BB_VISITED
134 
135 static vn_lookup_kind default_vn_walk_kind;
136 
137 /* vn_nary_op hashtable helpers.  */
138 
139 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
140 {
141   typedef vn_nary_op_s *compare_type;
142   static inline hashval_t hash (const vn_nary_op_s *);
143   static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
144 };
145 
146 /* Return the computed hashcode for nary operation P1.  */
147 
148 inline hashval_t
hash(const vn_nary_op_s * vno1)149 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
150 {
151   return vno1->hashcode;
152 }
153 
154 /* Compare nary operations P1 and P2 and return true if they are
155    equivalent.  */
156 
157 inline bool
equal(const vn_nary_op_s * vno1,const vn_nary_op_s * vno2)158 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
159 {
160   return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
161 }
162 
163 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
165 
166 
167 /* vn_phi hashtable helpers.  */
168 
169 static int
170 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
171 
172 struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
173 {
174   static inline hashval_t hash (const vn_phi_s *);
175   static inline bool equal (const vn_phi_s *, const vn_phi_s *);
176 };
177 
178 /* Return the computed hashcode for phi operation P1.  */
179 
180 inline hashval_t
hash(const vn_phi_s * vp1)181 vn_phi_hasher::hash (const vn_phi_s *vp1)
182 {
183   return vp1->hashcode;
184 }
185 
186 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
187 
188 inline bool
equal(const vn_phi_s * vp1,const vn_phi_s * vp2)189 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
190 {
191   return vp1 == vp2 || vn_phi_eq (vp1, vp2);
192 }
193 
194 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
195 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
196 
197 
198 /* Compare two reference operands P1 and P2 for equality.  Return true if
199    they are equal, and false otherwise.  */
200 
201 static int
vn_reference_op_eq(const void * p1,const void * p2)202 vn_reference_op_eq (const void *p1, const void *p2)
203 {
204   const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
205   const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
206 
207   return (vro1->opcode == vro2->opcode
208 	  /* We do not care for differences in type qualification.  */
209 	  && (vro1->type == vro2->type
210 	      || (vro1->type && vro2->type
211 		  && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
212 					 TYPE_MAIN_VARIANT (vro2->type))))
213 	  && expressions_equal_p (vro1->op0, vro2->op0)
214 	  && expressions_equal_p (vro1->op1, vro2->op1)
215 	  && expressions_equal_p (vro1->op2, vro2->op2));
216 }
217 
218 /* Free a reference operation structure VP.  */
219 
220 static inline void
free_reference(vn_reference_s * vr)221 free_reference (vn_reference_s *vr)
222 {
223   vr->operands.release ();
224 }
225 
226 
227 /* vn_reference hashtable helpers.  */
228 
229 struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
230 {
231   static inline hashval_t hash (const vn_reference_s *);
232   static inline bool equal (const vn_reference_s *, const vn_reference_s *);
233 };
234 
235 /* Return the hashcode for a given reference operation P1.  */
236 
237 inline hashval_t
hash(const vn_reference_s * vr1)238 vn_reference_hasher::hash (const vn_reference_s *vr1)
239 {
240   return vr1->hashcode;
241 }
242 
243 inline bool
equal(const vn_reference_s * v,const vn_reference_s * c)244 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
245 {
246   return v == c || vn_reference_eq (v, c);
247 }
248 
249 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
250 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
251 
252 /* Pretty-print OPS to OUTFILE.  */
253 
254 void
print_vn_reference_ops(FILE * outfile,const vec<vn_reference_op_s> ops)255 print_vn_reference_ops (FILE *outfile, const vec<vn_reference_op_s> ops)
256 {
257   vn_reference_op_t vro;
258   unsigned int i;
259   fprintf (outfile, "{");
260   for (i = 0; ops.iterate (i, &vro); i++)
261     {
262       bool closebrace = false;
263       if (vro->opcode != SSA_NAME
264 	  && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
265 	{
266 	  fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
267 	  if (vro->op0)
268 	    {
269 	      fprintf (outfile, "<");
270 	      closebrace = true;
271 	    }
272 	}
273       if (vro->op0)
274 	{
275 	  print_generic_expr (outfile, vro->op0);
276 	  if (vro->op1)
277 	    {
278 	      fprintf (outfile, ",");
279 	      print_generic_expr (outfile, vro->op1);
280 	    }
281 	  if (vro->op2)
282 	    {
283 	      fprintf (outfile, ",");
284 	      print_generic_expr (outfile, vro->op2);
285 	    }
286 	}
287       if (closebrace)
288 	fprintf (outfile, ">");
289       if (i != ops.length () - 1)
290 	fprintf (outfile, ",");
291     }
292   fprintf (outfile, "}");
293 }
294 
295 DEBUG_FUNCTION void
debug_vn_reference_ops(const vec<vn_reference_op_s> ops)296 debug_vn_reference_ops (const vec<vn_reference_op_s> ops)
297 {
298   print_vn_reference_ops (stderr, ops);
299   fputc ('\n', stderr);
300 }
301 
302 /* The set of VN hashtables.  */
303 
304 typedef struct vn_tables_s
305 {
306   vn_nary_op_table_type *nary;
307   vn_phi_table_type *phis;
308   vn_reference_table_type *references;
309 } *vn_tables_t;
310 
311 
312 /* vn_constant hashtable helpers.  */
313 
314 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
315 {
316   static inline hashval_t hash (const vn_constant_s *);
317   static inline bool equal (const vn_constant_s *, const vn_constant_s *);
318 };
319 
320 /* Hash table hash function for vn_constant_t.  */
321 
322 inline hashval_t
hash(const vn_constant_s * vc1)323 vn_constant_hasher::hash (const vn_constant_s *vc1)
324 {
325   return vc1->hashcode;
326 }
327 
328 /* Hash table equality function for vn_constant_t.  */
329 
330 inline bool
equal(const vn_constant_s * vc1,const vn_constant_s * vc2)331 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
332 {
333   if (vc1->hashcode != vc2->hashcode)
334     return false;
335 
336   return vn_constant_eq_with_type (vc1->constant, vc2->constant);
337 }
338 
339 static hash_table<vn_constant_hasher> *constant_to_value_id;
340 
341 
342 /* Obstack we allocate the vn-tables elements from.  */
343 static obstack vn_tables_obstack;
344 /* Special obstack we never unwind.  */
345 static obstack vn_tables_insert_obstack;
346 
347 static vn_reference_t last_inserted_ref;
348 static vn_phi_t last_inserted_phi;
349 static vn_nary_op_t last_inserted_nary;
350 static vn_ssa_aux_t last_pushed_avail;
351 
352 /* Valid hashtables storing information we have proven to be
353    correct.  */
354 static vn_tables_t valid_info;
355 
356 
357 /* Valueization hook for simplify_replace_tree.  Valueize NAME if it is
358    an SSA name, otherwise just return it.  */
359 tree (*vn_valueize) (tree);
360 static tree
vn_valueize_for_srt(tree t,void * context ATTRIBUTE_UNUSED)361 vn_valueize_for_srt (tree t, void* context ATTRIBUTE_UNUSED)
362 {
363   basic_block saved_vn_context_bb = vn_context_bb;
364   /* Look for sth available at the definition block of the argument.
365      This avoids inconsistencies between availability there which
366      decides if the stmt can be removed and availability at the
367      use site.  The SSA property ensures that things available
368      at the definition are also available at uses.  */
369   if (!SSA_NAME_IS_DEFAULT_DEF (t))
370     vn_context_bb = gimple_bb (SSA_NAME_DEF_STMT (t));
371   tree res = vn_valueize (t);
372   vn_context_bb = saved_vn_context_bb;
373   return res;
374 }
375 
376 
377 /* This represents the top of the VN lattice, which is the universal
378    value.  */
379 
380 tree VN_TOP;
381 
382 /* Unique counter for our value ids.  */
383 
384 static unsigned int next_value_id;
385 static int next_constant_value_id;
386 
387 
388 /* Table of vn_ssa_aux_t's, one per ssa_name.  The vn_ssa_aux_t objects
389    are allocated on an obstack for locality reasons, and to free them
390    without looping over the vec.  */
391 
392 struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t>
393 {
394   typedef vn_ssa_aux_t value_type;
395   typedef tree compare_type;
396   static inline hashval_t hash (const value_type &);
397   static inline bool equal (const value_type &, const compare_type &);
mark_deletedvn_ssa_aux_hasher398   static inline void mark_deleted (value_type &) {}
399   static const bool empty_zero_p = true;
mark_emptyvn_ssa_aux_hasher400   static inline void mark_empty (value_type &e) { e = NULL; }
is_deletedvn_ssa_aux_hasher401   static inline bool is_deleted (value_type &) { return false; }
is_emptyvn_ssa_aux_hasher402   static inline bool is_empty (value_type &e) { return e == NULL; }
403 };
404 
405 hashval_t
hash(const value_type & entry)406 vn_ssa_aux_hasher::hash (const value_type &entry)
407 {
408   return SSA_NAME_VERSION (entry->name);
409 }
410 
411 bool
equal(const value_type & entry,const compare_type & name)412 vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name)
413 {
414   return name == entry->name;
415 }
416 
417 static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash;
418 typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type;
419 static struct obstack vn_ssa_aux_obstack;
420 
421 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
422 static unsigned int vn_nary_length_from_stmt (gimple *);
423 static vn_nary_op_t alloc_vn_nary_op_noinit (unsigned int, obstack *);
424 static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
425 					    vn_nary_op_table_type *, bool);
426 static void init_vn_nary_op_from_stmt (vn_nary_op_t, gimple *);
427 static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
428 					 enum tree_code, tree, tree *);
429 static tree vn_lookup_simplify_result (gimple_match_op *);
430 static vn_reference_t vn_reference_lookup_or_insert_for_pieces
431 	  (tree, alias_set_type, alias_set_type, tree,
432 	   vec<vn_reference_op_s, va_heap>, tree);
433 
434 /* Return whether there is value numbering information for a given SSA name.  */
435 
436 bool
has_VN_INFO(tree name)437 has_VN_INFO (tree name)
438 {
439   return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name));
440 }
441 
442 vn_ssa_aux_t
VN_INFO(tree name)443 VN_INFO (tree name)
444 {
445   vn_ssa_aux_t *res
446     = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name),
447 					    INSERT);
448   if (*res != NULL)
449     return *res;
450 
451   vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
452   memset (newinfo, 0, sizeof (struct vn_ssa_aux));
453   newinfo->name = name;
454   newinfo->valnum = VN_TOP;
455   /* We are using the visited flag to handle uses with defs not within the
456      region being value-numbered.  */
457   newinfo->visited = false;
458 
459   /* Given we create the VN_INFOs on-demand now we have to do initialization
460      different than VN_TOP here.  */
461   if (SSA_NAME_IS_DEFAULT_DEF (name))
462     switch (TREE_CODE (SSA_NAME_VAR (name)))
463       {
464       case VAR_DECL:
465         /* All undefined vars are VARYING.  */
466         newinfo->valnum = name;
467 	newinfo->visited = true;
468 	break;
469 
470       case PARM_DECL:
471 	/* Parameters are VARYING but we can record a condition
472 	   if we know it is a non-NULL pointer.  */
473 	newinfo->visited = true;
474 	newinfo->valnum = name;
475 	if (POINTER_TYPE_P (TREE_TYPE (name))
476 	    && nonnull_arg_p (SSA_NAME_VAR (name)))
477 	  {
478 	    tree ops[2];
479 	    ops[0] = name;
480 	    ops[1] = build_int_cst (TREE_TYPE (name), 0);
481 	    vn_nary_op_t nary;
482 	    /* Allocate from non-unwinding stack.  */
483 	    nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
484 	    init_vn_nary_op_from_pieces (nary, 2, NE_EXPR,
485 					 boolean_type_node, ops);
486 	    nary->predicated_values = 0;
487 	    nary->u.result = boolean_true_node;
488 	    vn_nary_op_insert_into (nary, valid_info->nary, true);
489 	    gcc_assert (nary->unwind_to == NULL);
490 	    /* Also do not link it into the undo chain.  */
491 	    last_inserted_nary = nary->next;
492 	    nary->next = (vn_nary_op_t)(void *)-1;
493 	    nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
494 	    init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR,
495 					 boolean_type_node, ops);
496 	    nary->predicated_values = 0;
497 	    nary->u.result = boolean_false_node;
498 	    vn_nary_op_insert_into (nary, valid_info->nary, true);
499 	    gcc_assert (nary->unwind_to == NULL);
500 	    last_inserted_nary = nary->next;
501 	    nary->next = (vn_nary_op_t)(void *)-1;
502 	    if (dump_file && (dump_flags & TDF_DETAILS))
503 	      {
504 		fprintf (dump_file, "Recording ");
505 		print_generic_expr (dump_file, name, TDF_SLIM);
506 		fprintf (dump_file, " != 0\n");
507 	      }
508 	  }
509 	break;
510 
511       case RESULT_DECL:
512 	/* If the result is passed by invisible reference the default
513 	   def is initialized, otherwise it's uninitialized.  Still
514 	   undefined is varying.  */
515 	newinfo->visited = true;
516 	newinfo->valnum = name;
517 	break;
518 
519       default:
520 	gcc_unreachable ();
521       }
522   return newinfo;
523 }
524 
525 /* Return the SSA value of X.  */
526 
527 inline tree
528 SSA_VAL (tree x, bool *visited = NULL)
529 {
530   vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x));
531   if (visited)
532     *visited = tem && tem->visited;
533   return tem && tem->visited ? tem->valnum : x;
534 }
535 
536 /* Return the SSA value of the VUSE x, supporting released VDEFs
537    during elimination which will value-number the VDEF to the
538    associated VUSE (but not substitute in the whole lattice).  */
539 
540 static inline tree
vuse_ssa_val(tree x)541 vuse_ssa_val (tree x)
542 {
543   if (!x)
544     return NULL_TREE;
545 
546   do
547     {
548       x = SSA_VAL (x);
549       gcc_assert (x != VN_TOP);
550     }
551   while (SSA_NAME_IN_FREE_LIST (x));
552 
553   return x;
554 }
555 
556 /* Similar to the above but used as callback for walk_non_aliased_vuses
557    and thus should stop at unvisited VUSE to not walk across region
558    boundaries.  */
559 
560 static tree
vuse_valueize(tree vuse)561 vuse_valueize (tree vuse)
562 {
563   do
564     {
565       bool visited;
566       vuse = SSA_VAL (vuse, &visited);
567       if (!visited)
568 	return NULL_TREE;
569       gcc_assert (vuse != VN_TOP);
570     }
571   while (SSA_NAME_IN_FREE_LIST (vuse));
572   return vuse;
573 }
574 
575 
576 /* Return the vn_kind the expression computed by the stmt should be
577    associated with.  */
578 
579 enum vn_kind
vn_get_stmt_kind(gimple * stmt)580 vn_get_stmt_kind (gimple *stmt)
581 {
582   switch (gimple_code (stmt))
583     {
584     case GIMPLE_CALL:
585       return VN_REFERENCE;
586     case GIMPLE_PHI:
587       return VN_PHI;
588     case GIMPLE_ASSIGN:
589       {
590 	enum tree_code code = gimple_assign_rhs_code (stmt);
591 	tree rhs1 = gimple_assign_rhs1 (stmt);
592 	switch (get_gimple_rhs_class (code))
593 	  {
594 	  case GIMPLE_UNARY_RHS:
595 	  case GIMPLE_BINARY_RHS:
596 	  case GIMPLE_TERNARY_RHS:
597 	    return VN_NARY;
598 	  case GIMPLE_SINGLE_RHS:
599 	    switch (TREE_CODE_CLASS (code))
600 	      {
601 	      case tcc_reference:
602 		/* VOP-less references can go through unary case.  */
603 		if ((code == REALPART_EXPR
604 		     || code == IMAGPART_EXPR
605 		     || code == VIEW_CONVERT_EXPR
606 		     || code == BIT_FIELD_REF)
607 		    && (TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME
608 			|| is_gimple_min_invariant (TREE_OPERAND (rhs1, 0))))
609 		  return VN_NARY;
610 
611 		/* Fallthrough.  */
612 	      case tcc_declaration:
613 		return VN_REFERENCE;
614 
615 	      case tcc_constant:
616 		return VN_CONSTANT;
617 
618 	      default:
619 		if (code == ADDR_EXPR)
620 		  return (is_gimple_min_invariant (rhs1)
621 			  ? VN_CONSTANT : VN_REFERENCE);
622 		else if (code == CONSTRUCTOR)
623 		  return VN_NARY;
624 		return VN_NONE;
625 	      }
626 	  default:
627 	    return VN_NONE;
628 	  }
629       }
630     default:
631       return VN_NONE;
632     }
633 }
634 
635 /* Lookup a value id for CONSTANT and return it.  If it does not
636    exist returns 0.  */
637 
638 unsigned int
get_constant_value_id(tree constant)639 get_constant_value_id (tree constant)
640 {
641   vn_constant_s **slot;
642   struct vn_constant_s vc;
643 
644   vc.hashcode = vn_hash_constant_with_type (constant);
645   vc.constant = constant;
646   slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
647   if (slot)
648     return (*slot)->value_id;
649   return 0;
650 }
651 
652 /* Lookup a value id for CONSTANT, and if it does not exist, create a
653    new one and return it.  If it does exist, return it.  */
654 
655 unsigned int
get_or_alloc_constant_value_id(tree constant)656 get_or_alloc_constant_value_id (tree constant)
657 {
658   vn_constant_s **slot;
659   struct vn_constant_s vc;
660   vn_constant_t vcp;
661 
662   /* If the hashtable isn't initialized we're not running from PRE and thus
663      do not need value-ids.  */
664   if (!constant_to_value_id)
665     return 0;
666 
667   vc.hashcode = vn_hash_constant_with_type (constant);
668   vc.constant = constant;
669   slot = constant_to_value_id->find_slot (&vc, INSERT);
670   if (*slot)
671     return (*slot)->value_id;
672 
673   vcp = XNEW (struct vn_constant_s);
674   vcp->hashcode = vc.hashcode;
675   vcp->constant = constant;
676   vcp->value_id = get_next_constant_value_id ();
677   *slot = vcp;
678   return vcp->value_id;
679 }
680 
681 /* Compute the hash for a reference operand VRO1.  */
682 
683 static void
vn_reference_op_compute_hash(const vn_reference_op_t vro1,inchash::hash & hstate)684 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
685 {
686   hstate.add_int (vro1->opcode);
687   if (vro1->op0)
688     inchash::add_expr (vro1->op0, hstate);
689   if (vro1->op1)
690     inchash::add_expr (vro1->op1, hstate);
691   if (vro1->op2)
692     inchash::add_expr (vro1->op2, hstate);
693 }
694 
695 /* Compute a hash for the reference operation VR1 and return it.  */
696 
697 static hashval_t
vn_reference_compute_hash(const vn_reference_t vr1)698 vn_reference_compute_hash (const vn_reference_t vr1)
699 {
700   inchash::hash hstate;
701   hashval_t result;
702   int i;
703   vn_reference_op_t vro;
704   poly_int64 off = -1;
705   bool deref = false;
706 
707   FOR_EACH_VEC_ELT (vr1->operands, i, vro)
708     {
709       if (vro->opcode == MEM_REF)
710 	deref = true;
711       else if (vro->opcode != ADDR_EXPR)
712 	deref = false;
713       if (maybe_ne (vro->off, -1))
714 	{
715 	  if (known_eq (off, -1))
716 	    off = 0;
717 	  off += vro->off;
718 	}
719       else
720 	{
721 	  if (maybe_ne (off, -1)
722 	      && maybe_ne (off, 0))
723 	    hstate.add_poly_int (off);
724 	  off = -1;
725 	  if (deref
726 	      && vro->opcode == ADDR_EXPR)
727 	    {
728 	      if (vro->op0)
729 		{
730 		  tree op = TREE_OPERAND (vro->op0, 0);
731 		  hstate.add_int (TREE_CODE (op));
732 		  inchash::add_expr (op, hstate);
733 		}
734 	    }
735 	  else
736 	    vn_reference_op_compute_hash (vro, hstate);
737 	}
738     }
739   result = hstate.end ();
740   /* ??? We would ICE later if we hash instead of adding that in. */
741   if (vr1->vuse)
742     result += SSA_NAME_VERSION (vr1->vuse);
743 
744   return result;
745 }
746 
747 /* Return true if reference operations VR1 and VR2 are equivalent.  This
748    means they have the same set of operands and vuses.  */
749 
750 bool
vn_reference_eq(const_vn_reference_t const vr1,const_vn_reference_t const vr2)751 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
752 {
753   unsigned i, j;
754 
755   /* Early out if this is not a hash collision.  */
756   if (vr1->hashcode != vr2->hashcode)
757     return false;
758 
759   /* The VOP needs to be the same.  */
760   if (vr1->vuse != vr2->vuse)
761     return false;
762 
763   /* If the operands are the same we are done.  */
764   if (vr1->operands == vr2->operands)
765     return true;
766 
767   if (COMPLETE_TYPE_P (vr1->type) != COMPLETE_TYPE_P (vr2->type)
768       || (COMPLETE_TYPE_P (vr1->type)
769 	  && !expressions_equal_p (TYPE_SIZE (vr1->type),
770 				   TYPE_SIZE (vr2->type))))
771     return false;
772 
773   if (INTEGRAL_TYPE_P (vr1->type)
774       && INTEGRAL_TYPE_P (vr2->type))
775     {
776       if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
777 	return false;
778     }
779   else if (INTEGRAL_TYPE_P (vr1->type)
780 	   && (TYPE_PRECISION (vr1->type)
781 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
782     return false;
783   else if (INTEGRAL_TYPE_P (vr2->type)
784 	   && (TYPE_PRECISION (vr2->type)
785 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
786     return false;
787 
788   i = 0;
789   j = 0;
790   do
791     {
792       poly_int64 off1 = 0, off2 = 0;
793       vn_reference_op_t vro1, vro2;
794       vn_reference_op_s tem1, tem2;
795       bool deref1 = false, deref2 = false;
796       for (; vr1->operands.iterate (i, &vro1); i++)
797 	{
798 	  if (vro1->opcode == MEM_REF)
799 	    deref1 = true;
800 	  /* Do not look through a storage order barrier.  */
801 	  else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
802 	    return false;
803 	  if (known_eq (vro1->off, -1))
804 	    break;
805 	  off1 += vro1->off;
806 	}
807       for (; vr2->operands.iterate (j, &vro2); j++)
808 	{
809 	  if (vro2->opcode == MEM_REF)
810 	    deref2 = true;
811 	  /* Do not look through a storage order barrier.  */
812 	  else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
813 	    return false;
814 	  if (known_eq (vro2->off, -1))
815 	    break;
816 	  off2 += vro2->off;
817 	}
818       if (maybe_ne (off1, off2))
819 	return false;
820       if (deref1 && vro1->opcode == ADDR_EXPR)
821 	{
822 	  memset (&tem1, 0, sizeof (tem1));
823 	  tem1.op0 = TREE_OPERAND (vro1->op0, 0);
824 	  tem1.type = TREE_TYPE (tem1.op0);
825 	  tem1.opcode = TREE_CODE (tem1.op0);
826 	  vro1 = &tem1;
827 	  deref1 = false;
828 	}
829       if (deref2 && vro2->opcode == ADDR_EXPR)
830 	{
831 	  memset (&tem2, 0, sizeof (tem2));
832 	  tem2.op0 = TREE_OPERAND (vro2->op0, 0);
833 	  tem2.type = TREE_TYPE (tem2.op0);
834 	  tem2.opcode = TREE_CODE (tem2.op0);
835 	  vro2 = &tem2;
836 	  deref2 = false;
837 	}
838       if (deref1 != deref2)
839 	return false;
840       if (!vn_reference_op_eq (vro1, vro2))
841 	return false;
842       ++j;
843       ++i;
844     }
845   while (vr1->operands.length () != i
846 	 || vr2->operands.length () != j);
847 
848   return true;
849 }
850 
851 /* Copy the operations present in load/store REF into RESULT, a vector of
852    vn_reference_op_s's.  */
853 
854 static void
copy_reference_ops_from_ref(tree ref,vec<vn_reference_op_s> * result)855 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
856 {
857   /* For non-calls, store the information that makes up the address.  */
858   tree orig = ref;
859   while (ref)
860     {
861       vn_reference_op_s temp;
862 
863       memset (&temp, 0, sizeof (temp));
864       temp.type = TREE_TYPE (ref);
865       temp.opcode = TREE_CODE (ref);
866       temp.off = -1;
867 
868       switch (temp.opcode)
869 	{
870 	case MODIFY_EXPR:
871 	  temp.op0 = TREE_OPERAND (ref, 1);
872 	  break;
873 	case WITH_SIZE_EXPR:
874 	  temp.op0 = TREE_OPERAND (ref, 1);
875 	  temp.off = 0;
876 	  break;
877 	case MEM_REF:
878 	  /* The base address gets its own vn_reference_op_s structure.  */
879 	  temp.op0 = TREE_OPERAND (ref, 1);
880 	  if (!mem_ref_offset (ref).to_shwi (&temp.off))
881 	    temp.off = -1;
882 	  temp.clique = MR_DEPENDENCE_CLIQUE (ref);
883 	  temp.base = MR_DEPENDENCE_BASE (ref);
884 	  temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
885 	  break;
886 	case TARGET_MEM_REF:
887 	  /* The base address gets its own vn_reference_op_s structure.  */
888 	  temp.op0 = TMR_INDEX (ref);
889 	  temp.op1 = TMR_STEP (ref);
890 	  temp.op2 = TMR_OFFSET (ref);
891 	  temp.clique = MR_DEPENDENCE_CLIQUE (ref);
892 	  temp.base = MR_DEPENDENCE_BASE (ref);
893 	  result->safe_push (temp);
894 	  memset (&temp, 0, sizeof (temp));
895 	  temp.type = NULL_TREE;
896 	  temp.opcode = ERROR_MARK;
897 	  temp.op0 = TMR_INDEX2 (ref);
898 	  temp.off = -1;
899 	  break;
900 	case BIT_FIELD_REF:
901 	  /* Record bits, position and storage order.  */
902 	  temp.op0 = TREE_OPERAND (ref, 1);
903 	  temp.op1 = TREE_OPERAND (ref, 2);
904 	  if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
905 	    temp.off = -1;
906 	  temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
907 	  break;
908 	case COMPONENT_REF:
909 	  /* The field decl is enough to unambiguously specify the field,
910 	     a matching type is not necessary and a mismatching type
911 	     is always a spurious difference.  */
912 	  temp.type = NULL_TREE;
913 	  temp.op0 = TREE_OPERAND (ref, 1);
914 	  temp.op1 = TREE_OPERAND (ref, 2);
915 	  {
916 	    tree this_offset = component_ref_field_offset (ref);
917 	    if (this_offset
918 		&& poly_int_tree_p (this_offset))
919 	      {
920 		tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
921 		if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
922 		  {
923 		    poly_offset_int off
924 		      = (wi::to_poly_offset (this_offset)
925 			 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
926 		    /* Probibit value-numbering zero offset components
927 		       of addresses the same before the pass folding
928 		       __builtin_object_size had a chance to run
929 		       (checking cfun->after_inlining does the
930 		       trick here).  */
931 		    if (TREE_CODE (orig) != ADDR_EXPR
932 			|| maybe_ne (off, 0)
933 			|| cfun->after_inlining)
934 		      off.to_shwi (&temp.off);
935 		  }
936 	      }
937 	  }
938 	  break;
939 	case ARRAY_RANGE_REF:
940 	case ARRAY_REF:
941 	  {
942 	    tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
943 	    /* Record index as operand.  */
944 	    temp.op0 = TREE_OPERAND (ref, 1);
945 	    /* Always record lower bounds and element size.  */
946 	    temp.op1 = array_ref_low_bound (ref);
947 	    /* But record element size in units of the type alignment.  */
948 	    temp.op2 = TREE_OPERAND (ref, 3);
949 	    temp.align = eltype->type_common.align;
950 	    if (! temp.op2)
951 	      temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
952 				     size_int (TYPE_ALIGN_UNIT (eltype)));
953 	    if (poly_int_tree_p (temp.op0)
954 		&& poly_int_tree_p (temp.op1)
955 		&& TREE_CODE (temp.op2) == INTEGER_CST)
956 	      {
957 		poly_offset_int off = ((wi::to_poly_offset (temp.op0)
958 					- wi::to_poly_offset (temp.op1))
959 				       * wi::to_offset (temp.op2)
960 				       * vn_ref_op_align_unit (&temp));
961 		off.to_shwi (&temp.off);
962 	      }
963 	  }
964 	  break;
965 	case VAR_DECL:
966 	  if (DECL_HARD_REGISTER (ref))
967 	    {
968 	      temp.op0 = ref;
969 	      break;
970 	    }
971 	  /* Fallthru.  */
972 	case PARM_DECL:
973 	case CONST_DECL:
974 	case RESULT_DECL:
975 	  /* Canonicalize decls to MEM[&decl] which is what we end up with
976 	     when valueizing MEM[ptr] with ptr = &decl.  */
977 	  temp.opcode = MEM_REF;
978 	  temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
979 	  temp.off = 0;
980 	  result->safe_push (temp);
981 	  temp.opcode = ADDR_EXPR;
982 	  temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
983 	  temp.type = TREE_TYPE (temp.op0);
984 	  temp.off = -1;
985 	  break;
986 	case STRING_CST:
987 	case INTEGER_CST:
988 	case POLY_INT_CST:
989 	case COMPLEX_CST:
990 	case VECTOR_CST:
991 	case REAL_CST:
992 	case FIXED_CST:
993 	case CONSTRUCTOR:
994 	case SSA_NAME:
995 	  temp.op0 = ref;
996 	  break;
997 	case ADDR_EXPR:
998 	  if (is_gimple_min_invariant (ref))
999 	    {
1000 	      temp.op0 = ref;
1001 	      break;
1002 	    }
1003 	  break;
1004 	  /* These are only interesting for their operands, their
1005 	     existence, and their type.  They will never be the last
1006 	     ref in the chain of references (IE they require an
1007 	     operand), so we don't have to put anything
1008 	     for op* as it will be handled by the iteration  */
1009 	case REALPART_EXPR:
1010 	  temp.off = 0;
1011 	  break;
1012 	case VIEW_CONVERT_EXPR:
1013 	  temp.off = 0;
1014 	  temp.reverse = storage_order_barrier_p (ref);
1015 	  break;
1016 	case IMAGPART_EXPR:
1017 	  /* This is only interesting for its constant offset.  */
1018 	  temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
1019 	  break;
1020 	default:
1021 	  gcc_unreachable ();
1022 	}
1023       result->safe_push (temp);
1024 
1025       if (REFERENCE_CLASS_P (ref)
1026 	  || TREE_CODE (ref) == MODIFY_EXPR
1027 	  || TREE_CODE (ref) == WITH_SIZE_EXPR
1028 	  || (TREE_CODE (ref) == ADDR_EXPR
1029 	      && !is_gimple_min_invariant (ref)))
1030 	ref = TREE_OPERAND (ref, 0);
1031       else
1032 	ref = NULL_TREE;
1033     }
1034 }
1035 
1036 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
1037    operands in *OPS, the reference alias set SET and the reference type TYPE.
1038    Return true if something useful was produced.  */
1039 
1040 bool
ao_ref_init_from_vn_reference(ao_ref * ref,alias_set_type set,alias_set_type base_set,tree type,vec<vn_reference_op_s> ops)1041 ao_ref_init_from_vn_reference (ao_ref *ref,
1042 			       alias_set_type set, alias_set_type base_set,
1043 			       tree type, vec<vn_reference_op_s> ops)
1044 {
1045   vn_reference_op_t op;
1046   unsigned i;
1047   tree base = NULL_TREE;
1048   tree *op0_p = &base;
1049   poly_offset_int offset = 0;
1050   poly_offset_int max_size;
1051   poly_offset_int size = -1;
1052   tree size_tree = NULL_TREE;
1053 
1054   machine_mode mode = TYPE_MODE (type);
1055   if (mode == BLKmode)
1056     size_tree = TYPE_SIZE (type);
1057   else
1058     size = GET_MODE_BITSIZE (mode);
1059   if (size_tree != NULL_TREE
1060       && poly_int_tree_p (size_tree))
1061     size = wi::to_poly_offset (size_tree);
1062 
1063   /* Lower the final access size from the outermost expression.  */
1064   op = &ops[0];
1065   size_tree = NULL_TREE;
1066   if (op->opcode == COMPONENT_REF)
1067     size_tree = DECL_SIZE (op->op0);
1068   else if (op->opcode == BIT_FIELD_REF)
1069     size_tree = op->op0;
1070   if (size_tree != NULL_TREE
1071       && poly_int_tree_p (size_tree)
1072       && (!known_size_p (size)
1073 	  || known_lt (wi::to_poly_offset (size_tree), size)))
1074     size = wi::to_poly_offset (size_tree);
1075 
1076   /* Initially, maxsize is the same as the accessed element size.
1077      In the following it will only grow (or become -1).  */
1078   max_size = size;
1079 
1080   /* Compute cumulative bit-offset for nested component-refs and array-refs,
1081      and find the ultimate containing object.  */
1082   FOR_EACH_VEC_ELT (ops, i, op)
1083     {
1084       switch (op->opcode)
1085 	{
1086 	/* These may be in the reference ops, but we cannot do anything
1087 	   sensible with them here.  */
1088 	case ADDR_EXPR:
1089 	  /* Apart from ADDR_EXPR arguments to MEM_REF.  */
1090 	  if (base != NULL_TREE
1091 	      && TREE_CODE (base) == MEM_REF
1092 	      && op->op0
1093 	      && DECL_P (TREE_OPERAND (op->op0, 0)))
1094 	    {
1095 	      vn_reference_op_t pop = &ops[i-1];
1096 	      base = TREE_OPERAND (op->op0, 0);
1097 	      if (known_eq (pop->off, -1))
1098 		{
1099 		  max_size = -1;
1100 		  offset = 0;
1101 		}
1102 	      else
1103 		offset += pop->off * BITS_PER_UNIT;
1104 	      op0_p = NULL;
1105 	      break;
1106 	    }
1107 	  /* Fallthru.  */
1108 	case CALL_EXPR:
1109 	  return false;
1110 
1111 	/* Record the base objects.  */
1112 	case MEM_REF:
1113 	  *op0_p = build2 (MEM_REF, op->type,
1114 			   NULL_TREE, op->op0);
1115 	  MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
1116 	  MR_DEPENDENCE_BASE (*op0_p) = op->base;
1117 	  op0_p = &TREE_OPERAND (*op0_p, 0);
1118 	  break;
1119 
1120 	case VAR_DECL:
1121 	case PARM_DECL:
1122 	case RESULT_DECL:
1123 	case SSA_NAME:
1124 	  *op0_p = op->op0;
1125 	  op0_p = NULL;
1126 	  break;
1127 
1128 	/* And now the usual component-reference style ops.  */
1129 	case BIT_FIELD_REF:
1130 	  offset += wi::to_poly_offset (op->op1);
1131 	  break;
1132 
1133 	case COMPONENT_REF:
1134 	  {
1135 	    tree field = op->op0;
1136 	    /* We do not have a complete COMPONENT_REF tree here so we
1137 	       cannot use component_ref_field_offset.  Do the interesting
1138 	       parts manually.  */
1139 	    tree this_offset = DECL_FIELD_OFFSET (field);
1140 
1141 	    if (op->op1 || !poly_int_tree_p (this_offset))
1142 	      max_size = -1;
1143 	    else
1144 	      {
1145 		poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1146 					   << LOG2_BITS_PER_UNIT);
1147 		woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1148 		offset += woffset;
1149 	      }
1150 	    break;
1151 	  }
1152 
1153 	case ARRAY_RANGE_REF:
1154 	case ARRAY_REF:
1155 	  /* We recorded the lower bound and the element size.  */
1156 	  if (!poly_int_tree_p (op->op0)
1157 	      || !poly_int_tree_p (op->op1)
1158 	      || TREE_CODE (op->op2) != INTEGER_CST)
1159 	    max_size = -1;
1160 	  else
1161 	    {
1162 	      poly_offset_int woffset
1163 		= wi::sext (wi::to_poly_offset (op->op0)
1164 			    - wi::to_poly_offset (op->op1),
1165 			    TYPE_PRECISION (sizetype));
1166 	      woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1167 	      woffset <<= LOG2_BITS_PER_UNIT;
1168 	      offset += woffset;
1169 	    }
1170 	  break;
1171 
1172 	case REALPART_EXPR:
1173 	  break;
1174 
1175 	case IMAGPART_EXPR:
1176 	  offset += size;
1177 	  break;
1178 
1179 	case VIEW_CONVERT_EXPR:
1180 	  break;
1181 
1182 	case STRING_CST:
1183 	case INTEGER_CST:
1184 	case COMPLEX_CST:
1185 	case VECTOR_CST:
1186 	case REAL_CST:
1187 	case CONSTRUCTOR:
1188 	case CONST_DECL:
1189 	  return false;
1190 
1191 	default:
1192 	  return false;
1193 	}
1194     }
1195 
1196   if (base == NULL_TREE)
1197     return false;
1198 
1199   ref->ref = NULL_TREE;
1200   ref->base = base;
1201   ref->ref_alias_set = set;
1202   ref->base_alias_set = base_set;
1203   /* We discount volatiles from value-numbering elsewhere.  */
1204   ref->volatile_p = false;
1205 
1206   if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1207     {
1208       ref->offset = 0;
1209       ref->size = -1;
1210       ref->max_size = -1;
1211       return true;
1212     }
1213 
1214   if (!offset.to_shwi (&ref->offset))
1215     {
1216       ref->offset = 0;
1217       ref->max_size = -1;
1218       return true;
1219     }
1220 
1221   if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1222     ref->max_size = -1;
1223 
1224   return true;
1225 }
1226 
1227 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1228    vn_reference_op_s's.  */
1229 
1230 static void
copy_reference_ops_from_call(gcall * call,vec<vn_reference_op_s> * result)1231 copy_reference_ops_from_call (gcall *call,
1232 			      vec<vn_reference_op_s> *result)
1233 {
1234   vn_reference_op_s temp;
1235   unsigned i;
1236   tree lhs = gimple_call_lhs (call);
1237   int lr;
1238 
1239   /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1240      different.  By adding the lhs here in the vector, we ensure that the
1241      hashcode is different, guaranteeing a different value number.  */
1242   if (lhs && TREE_CODE (lhs) != SSA_NAME)
1243     {
1244       memset (&temp, 0, sizeof (temp));
1245       temp.opcode = MODIFY_EXPR;
1246       temp.type = TREE_TYPE (lhs);
1247       temp.op0 = lhs;
1248       temp.off = -1;
1249       result->safe_push (temp);
1250     }
1251 
1252   /* Copy the type, opcode, function, static chain and EH region, if any.  */
1253   memset (&temp, 0, sizeof (temp));
1254   temp.type = gimple_call_fntype (call);
1255   temp.opcode = CALL_EXPR;
1256   temp.op0 = gimple_call_fn (call);
1257   temp.op1 = gimple_call_chain (call);
1258   if (stmt_could_throw_p (cfun, call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1259     temp.op2 = size_int (lr);
1260   temp.off = -1;
1261   result->safe_push (temp);
1262 
1263   /* Copy the call arguments.  As they can be references as well,
1264      just chain them together.  */
1265   for (i = 0; i < gimple_call_num_args (call); ++i)
1266     {
1267       tree callarg = gimple_call_arg (call, i);
1268       copy_reference_ops_from_ref (callarg, result);
1269     }
1270 }
1271 
1272 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1273    *I_P to point to the last element of the replacement.  */
1274 static bool
vn_reference_fold_indirect(vec<vn_reference_op_s> * ops,unsigned int * i_p)1275 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1276 			    unsigned int *i_p)
1277 {
1278   unsigned int i = *i_p;
1279   vn_reference_op_t op = &(*ops)[i];
1280   vn_reference_op_t mem_op = &(*ops)[i - 1];
1281   tree addr_base;
1282   poly_int64 addr_offset = 0;
1283 
1284   /* The only thing we have to do is from &OBJ.foo.bar add the offset
1285      from .foo.bar to the preceding MEM_REF offset and replace the
1286      address with &OBJ.  */
1287   addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (op->op0, 0),
1288 					       &addr_offset, vn_valueize);
1289   gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1290   if (addr_base != TREE_OPERAND (op->op0, 0))
1291     {
1292       poly_offset_int off
1293 	= (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1294 				  SIGNED)
1295 	   + addr_offset);
1296       mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1297       op->op0 = build_fold_addr_expr (addr_base);
1298       if (tree_fits_shwi_p (mem_op->op0))
1299 	mem_op->off = tree_to_shwi (mem_op->op0);
1300       else
1301 	mem_op->off = -1;
1302       return true;
1303     }
1304   return false;
1305 }
1306 
1307 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1308    *I_P to point to the last element of the replacement.  */
1309 static bool
vn_reference_maybe_forwprop_address(vec<vn_reference_op_s> * ops,unsigned int * i_p)1310 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1311 				     unsigned int *i_p)
1312 {
1313   bool changed = false;
1314   vn_reference_op_t op;
1315 
1316   do
1317     {
1318       unsigned int i = *i_p;
1319       op = &(*ops)[i];
1320       vn_reference_op_t mem_op = &(*ops)[i - 1];
1321       gimple *def_stmt;
1322       enum tree_code code;
1323       poly_offset_int off;
1324 
1325       def_stmt = SSA_NAME_DEF_STMT (op->op0);
1326       if (!is_gimple_assign (def_stmt))
1327 	return changed;
1328 
1329       code = gimple_assign_rhs_code (def_stmt);
1330       if (code != ADDR_EXPR
1331 	  && code != POINTER_PLUS_EXPR)
1332 	return changed;
1333 
1334       off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1335 
1336       /* The only thing we have to do is from &OBJ.foo.bar add the offset
1337 	 from .foo.bar to the preceding MEM_REF offset and replace the
1338 	 address with &OBJ.  */
1339       if (code == ADDR_EXPR)
1340 	{
1341 	  tree addr, addr_base;
1342 	  poly_int64 addr_offset;
1343 
1344 	  addr = gimple_assign_rhs1 (def_stmt);
1345 	  addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (addr, 0),
1346 						       &addr_offset,
1347 						       vn_valueize);
1348 	  /* If that didn't work because the address isn't invariant propagate
1349 	     the reference tree from the address operation in case the current
1350 	     dereference isn't offsetted.  */
1351 	  if (!addr_base
1352 	      && *i_p == ops->length () - 1
1353 	      && known_eq (off, 0)
1354 	      /* This makes us disable this transform for PRE where the
1355 		 reference ops might be also used for code insertion which
1356 		 is invalid.  */
1357 	      && default_vn_walk_kind == VN_WALKREWRITE)
1358 	    {
1359 	      auto_vec<vn_reference_op_s, 32> tem;
1360 	      copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1361 	      /* Make sure to preserve TBAA info.  The only objects not
1362 		 wrapped in MEM_REFs that can have their address taken are
1363 		 STRING_CSTs.  */
1364 	      if (tem.length () >= 2
1365 		  && tem[tem.length () - 2].opcode == MEM_REF)
1366 		{
1367 		  vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1368 		  new_mem_op->op0
1369 		      = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1370 					  wi::to_poly_wide (new_mem_op->op0));
1371 		}
1372 	      else
1373 		gcc_assert (tem.last ().opcode == STRING_CST);
1374 	      ops->pop ();
1375 	      ops->pop ();
1376 	      ops->safe_splice (tem);
1377 	      --*i_p;
1378 	      return true;
1379 	    }
1380 	  if (!addr_base
1381 	      || TREE_CODE (addr_base) != MEM_REF
1382 	      || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1383 		  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base,
1384 								    0))))
1385 	    return changed;
1386 
1387 	  off += addr_offset;
1388 	  off += mem_ref_offset (addr_base);
1389 	  op->op0 = TREE_OPERAND (addr_base, 0);
1390 	}
1391       else
1392 	{
1393 	  tree ptr, ptroff;
1394 	  ptr = gimple_assign_rhs1 (def_stmt);
1395 	  ptroff = gimple_assign_rhs2 (def_stmt);
1396 	  if (TREE_CODE (ptr) != SSA_NAME
1397 	      || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1398 	      /* Make sure to not endlessly recurse.
1399 		 See gcc.dg/tree-ssa/20040408-1.c for an example.  Can easily
1400 		 happen when we value-number a PHI to its backedge value.  */
1401 	      || SSA_VAL (ptr) == op->op0
1402 	      || !poly_int_tree_p (ptroff))
1403 	    return changed;
1404 
1405 	  off += wi::to_poly_offset (ptroff);
1406 	  op->op0 = ptr;
1407 	}
1408 
1409       mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1410       if (tree_fits_shwi_p (mem_op->op0))
1411 	mem_op->off = tree_to_shwi (mem_op->op0);
1412       else
1413 	mem_op->off = -1;
1414       /* ???  Can end up with endless recursion here!?
1415 	 gcc.c-torture/execute/strcmp-1.c  */
1416       if (TREE_CODE (op->op0) == SSA_NAME)
1417 	op->op0 = SSA_VAL (op->op0);
1418       if (TREE_CODE (op->op0) != SSA_NAME)
1419 	op->opcode = TREE_CODE (op->op0);
1420 
1421       changed = true;
1422     }
1423   /* Tail-recurse.  */
1424   while (TREE_CODE (op->op0) == SSA_NAME);
1425 
1426   /* Fold a remaining *&.  */
1427   if (TREE_CODE (op->op0) == ADDR_EXPR)
1428     vn_reference_fold_indirect (ops, i_p);
1429 
1430   return changed;
1431 }
1432 
1433 /* Optimize the reference REF to a constant if possible or return
1434    NULL_TREE if not.  */
1435 
1436 tree
fully_constant_vn_reference_p(vn_reference_t ref)1437 fully_constant_vn_reference_p (vn_reference_t ref)
1438 {
1439   vec<vn_reference_op_s> operands = ref->operands;
1440   vn_reference_op_t op;
1441 
1442   /* Try to simplify the translated expression if it is
1443      a call to a builtin function with at most two arguments.  */
1444   op = &operands[0];
1445   if (op->opcode == CALL_EXPR
1446       && TREE_CODE (op->op0) == ADDR_EXPR
1447       && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1448       && fndecl_built_in_p (TREE_OPERAND (op->op0, 0))
1449       && operands.length () >= 2
1450       && operands.length () <= 3)
1451     {
1452       vn_reference_op_t arg0, arg1 = NULL;
1453       bool anyconst = false;
1454       arg0 = &operands[1];
1455       if (operands.length () > 2)
1456 	arg1 = &operands[2];
1457       if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1458 	  || (arg0->opcode == ADDR_EXPR
1459 	      && is_gimple_min_invariant (arg0->op0)))
1460 	anyconst = true;
1461       if (arg1
1462 	  && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1463 	      || (arg1->opcode == ADDR_EXPR
1464 		  && is_gimple_min_invariant (arg1->op0))))
1465 	anyconst = true;
1466       if (anyconst)
1467 	{
1468 	  tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1469 					 arg1 ? 2 : 1,
1470 					 arg0->op0,
1471 					 arg1 ? arg1->op0 : NULL);
1472 	  if (folded
1473 	      && TREE_CODE (folded) == NOP_EXPR)
1474 	    folded = TREE_OPERAND (folded, 0);
1475 	  if (folded
1476 	      && is_gimple_min_invariant (folded))
1477 	    return folded;
1478 	}
1479     }
1480 
1481   /* Simplify reads from constants or constant initializers.  */
1482   else if (BITS_PER_UNIT == 8
1483 	   && COMPLETE_TYPE_P (ref->type)
1484 	   && is_gimple_reg_type (ref->type))
1485     {
1486       poly_int64 off = 0;
1487       HOST_WIDE_INT size;
1488       if (INTEGRAL_TYPE_P (ref->type))
1489 	size = TYPE_PRECISION (ref->type);
1490       else if (tree_fits_shwi_p (TYPE_SIZE (ref->type)))
1491 	size = tree_to_shwi (TYPE_SIZE (ref->type));
1492       else
1493 	return NULL_TREE;
1494       if (size % BITS_PER_UNIT != 0
1495 	  || size > MAX_BITSIZE_MODE_ANY_MODE)
1496 	return NULL_TREE;
1497       size /= BITS_PER_UNIT;
1498       unsigned i;
1499       for (i = 0; i < operands.length (); ++i)
1500 	{
1501 	  if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1502 	    {
1503 	      ++i;
1504 	      break;
1505 	    }
1506 	  if (known_eq (operands[i].off, -1))
1507 	    return NULL_TREE;
1508 	  off += operands[i].off;
1509 	  if (operands[i].opcode == MEM_REF)
1510 	    {
1511 	      ++i;
1512 	      break;
1513 	    }
1514 	}
1515       vn_reference_op_t base = &operands[--i];
1516       tree ctor = error_mark_node;
1517       tree decl = NULL_TREE;
1518       if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1519 	ctor = base->op0;
1520       else if (base->opcode == MEM_REF
1521 	       && base[1].opcode == ADDR_EXPR
1522 	       && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1523 		   || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1524 		   || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1525 	{
1526 	  decl = TREE_OPERAND (base[1].op0, 0);
1527 	  if (TREE_CODE (decl) == STRING_CST)
1528 	    ctor = decl;
1529 	  else
1530 	    ctor = ctor_for_folding (decl);
1531 	}
1532       if (ctor == NULL_TREE)
1533 	return build_zero_cst (ref->type);
1534       else if (ctor != error_mark_node)
1535 	{
1536 	  HOST_WIDE_INT const_off;
1537 	  if (decl)
1538 	    {
1539 	      tree res = fold_ctor_reference (ref->type, ctor,
1540 					      off * BITS_PER_UNIT,
1541 					      size * BITS_PER_UNIT, decl);
1542 	      if (res)
1543 		{
1544 		  STRIP_USELESS_TYPE_CONVERSION (res);
1545 		  if (is_gimple_min_invariant (res))
1546 		    return res;
1547 		}
1548 	    }
1549 	  else if (off.is_constant (&const_off))
1550 	    {
1551 	      unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1552 	      int len = native_encode_expr (ctor, buf, size, const_off);
1553 	      if (len > 0)
1554 		return native_interpret_expr (ref->type, buf, len);
1555 	    }
1556 	}
1557     }
1558 
1559   return NULL_TREE;
1560 }
1561 
1562 /* Return true if OPS contain a storage order barrier.  */
1563 
1564 static bool
contains_storage_order_barrier_p(vec<vn_reference_op_s> ops)1565 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1566 {
1567   vn_reference_op_t op;
1568   unsigned i;
1569 
1570   FOR_EACH_VEC_ELT (ops, i, op)
1571     if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1572       return true;
1573 
1574   return false;
1575 }
1576 
1577 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1578    structures into their value numbers.  This is done in-place, and
1579    the vector passed in is returned.  *VALUEIZED_ANYTHING will specify
1580    whether any operands were valueized.  */
1581 
1582 static void
1583 valueize_refs_1 (vec<vn_reference_op_s> *orig, bool *valueized_anything,
1584 		 bool with_avail = false)
1585 {
1586   vn_reference_op_t vro;
1587   unsigned int i;
1588 
1589   *valueized_anything = false;
1590 
1591   FOR_EACH_VEC_ELT (*orig, i, vro)
1592     {
1593       if (vro->opcode == SSA_NAME
1594 	  || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1595 	{
1596 	  tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0);
1597 	  if (tem != vro->op0)
1598 	    {
1599 	      *valueized_anything = true;
1600 	      vro->op0 = tem;
1601 	    }
1602 	  /* If it transforms from an SSA_NAME to a constant, update
1603 	     the opcode.  */
1604 	  if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1605 	    vro->opcode = TREE_CODE (vro->op0);
1606 	}
1607       if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1608 	{
1609 	  tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1);
1610 	  if (tem != vro->op1)
1611 	    {
1612 	      *valueized_anything = true;
1613 	      vro->op1 = tem;
1614 	    }
1615 	}
1616       if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1617 	{
1618 	  tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2);
1619 	  if (tem != vro->op2)
1620 	    {
1621 	      *valueized_anything = true;
1622 	      vro->op2 = tem;
1623 	    }
1624 	}
1625       /* If it transforms from an SSA_NAME to an address, fold with
1626 	 a preceding indirect reference.  */
1627       if (i > 0
1628 	  && vro->op0
1629 	  && TREE_CODE (vro->op0) == ADDR_EXPR
1630 	  && (*orig)[i - 1].opcode == MEM_REF)
1631 	{
1632 	  if (vn_reference_fold_indirect (orig, &i))
1633 	    *valueized_anything = true;
1634 	}
1635       else if (i > 0
1636 	       && vro->opcode == SSA_NAME
1637 	       && (*orig)[i - 1].opcode == MEM_REF)
1638 	{
1639 	  if (vn_reference_maybe_forwprop_address (orig, &i))
1640 	    *valueized_anything = true;
1641 	}
1642       /* If it transforms a non-constant ARRAY_REF into a constant
1643 	 one, adjust the constant offset.  */
1644       else if (vro->opcode == ARRAY_REF
1645 	       && known_eq (vro->off, -1)
1646 	       && poly_int_tree_p (vro->op0)
1647 	       && poly_int_tree_p (vro->op1)
1648 	       && TREE_CODE (vro->op2) == INTEGER_CST)
1649 	{
1650 	  poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1651 				  - wi::to_poly_offset (vro->op1))
1652 				 * wi::to_offset (vro->op2)
1653 				 * vn_ref_op_align_unit (vro));
1654 	  off.to_shwi (&vro->off);
1655 	}
1656     }
1657 }
1658 
1659 static void
valueize_refs(vec<vn_reference_op_s> * orig)1660 valueize_refs (vec<vn_reference_op_s> *orig)
1661 {
1662   bool tem;
1663   valueize_refs_1 (orig, &tem);
1664 }
1665 
1666 static vec<vn_reference_op_s> shared_lookup_references;
1667 
1668 /* Create a vector of vn_reference_op_s structures from REF, a
1669    REFERENCE_CLASS_P tree.  The vector is shared among all callers of
1670    this function.  *VALUEIZED_ANYTHING will specify whether any
1671    operands were valueized.  */
1672 
1673 static vec<vn_reference_op_s>
valueize_shared_reference_ops_from_ref(tree ref,bool * valueized_anything)1674 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1675 {
1676   if (!ref)
1677     return vNULL;
1678   shared_lookup_references.truncate (0);
1679   copy_reference_ops_from_ref (ref, &shared_lookup_references);
1680   valueize_refs_1 (&shared_lookup_references, valueized_anything);
1681   return shared_lookup_references;
1682 }
1683 
1684 /* Create a vector of vn_reference_op_s structures from CALL, a
1685    call statement.  The vector is shared among all callers of
1686    this function.  */
1687 
1688 static vec<vn_reference_op_s>
valueize_shared_reference_ops_from_call(gcall * call)1689 valueize_shared_reference_ops_from_call (gcall *call)
1690 {
1691   if (!call)
1692     return vNULL;
1693   shared_lookup_references.truncate (0);
1694   copy_reference_ops_from_call (call, &shared_lookup_references);
1695   valueize_refs (&shared_lookup_references);
1696   return shared_lookup_references;
1697 }
1698 
1699 /* Lookup a SCCVN reference operation VR in the current hash table.
1700    Returns the resulting value number if it exists in the hash table,
1701    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
1702    vn_reference_t stored in the hashtable if something is found.  */
1703 
1704 static tree
vn_reference_lookup_1(vn_reference_t vr,vn_reference_t * vnresult)1705 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1706 {
1707   vn_reference_s **slot;
1708   hashval_t hash;
1709 
1710   hash = vr->hashcode;
1711   slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1712   if (slot)
1713     {
1714       if (vnresult)
1715 	*vnresult = (vn_reference_t)*slot;
1716       return ((vn_reference_t)*slot)->result;
1717     }
1718 
1719   return NULL_TREE;
1720 }
1721 
1722 
1723 /* Partial definition tracking support.  */
1724 
1725 struct pd_range
1726 {
1727   HOST_WIDE_INT offset;
1728   HOST_WIDE_INT size;
1729 };
1730 
1731 struct pd_data
1732 {
1733   tree rhs;
1734   HOST_WIDE_INT offset;
1735   HOST_WIDE_INT size;
1736 };
1737 
1738 /* Context for alias walking.  */
1739 
1740 struct vn_walk_cb_data
1741 {
vn_walk_cb_datavn_walk_cb_data1742   vn_walk_cb_data (vn_reference_t vr_, tree orig_ref_, tree *last_vuse_ptr_,
1743 		   vn_lookup_kind vn_walk_kind_, bool tbaa_p_, tree mask_)
1744     : vr (vr_), last_vuse_ptr (last_vuse_ptr_), last_vuse (NULL_TREE),
1745       mask (mask_), masked_result (NULL_TREE), vn_walk_kind (vn_walk_kind_),
1746       tbaa_p (tbaa_p_), saved_operands (vNULL), first_set (-2),
1747       first_base_set (-2), known_ranges (NULL)
1748   {
1749     if (!last_vuse_ptr)
1750       last_vuse_ptr = &last_vuse;
1751     ao_ref_init (&orig_ref, orig_ref_);
1752     if (mask)
1753       {
1754 	wide_int w = wi::to_wide (mask);
1755 	unsigned int pos = 0, prec = w.get_precision ();
1756 	pd_data pd;
1757 	pd.rhs = build_constructor (NULL_TREE, NULL);
1758 	/* When bitwise and with a constant is done on a memory load,
1759 	   we don't really need all the bits to be defined or defined
1760 	   to constants, we don't really care what is in the position
1761 	   corresponding to 0 bits in the mask.
1762 	   So, push the ranges of those 0 bits in the mask as artificial
1763 	   zero stores and let the partial def handling code do the
1764 	   rest.  */
1765 	while (pos < prec)
1766 	  {
1767 	    int tz = wi::ctz (w);
1768 	    if (pos + tz > prec)
1769 	      tz = prec - pos;
1770 	    if (tz)
1771 	      {
1772 		if (BYTES_BIG_ENDIAN)
1773 		  pd.offset = prec - pos - tz;
1774 		else
1775 		  pd.offset = pos;
1776 		pd.size = tz;
1777 		void *r = push_partial_def (pd, 0, 0, 0, prec);
1778 		gcc_assert (r == NULL_TREE);
1779 	      }
1780 	    pos += tz;
1781 	    if (pos == prec)
1782 	      break;
1783 	    w = wi::lrshift (w, tz);
1784 	    tz = wi::ctz (wi::bit_not (w));
1785 	    if (pos + tz > prec)
1786 	      tz = prec - pos;
1787 	    pos += tz;
1788 	    w = wi::lrshift (w, tz);
1789 	  }
1790       }
1791   }
1792   ~vn_walk_cb_data ();
1793   void *finish (alias_set_type, alias_set_type, tree);
1794   void *push_partial_def (pd_data pd,
1795 			  alias_set_type, alias_set_type, HOST_WIDE_INT,
1796 			  HOST_WIDE_INT);
1797 
1798   vn_reference_t vr;
1799   ao_ref orig_ref;
1800   tree *last_vuse_ptr;
1801   tree last_vuse;
1802   tree mask;
1803   tree masked_result;
1804   vn_lookup_kind vn_walk_kind;
1805   bool tbaa_p;
1806   vec<vn_reference_op_s> saved_operands;
1807 
1808   /* The VDEFs of partial defs we come along.  */
1809   auto_vec<pd_data, 2> partial_defs;
1810   /* The first defs range to avoid splay tree setup in most cases.  */
1811   pd_range first_range;
1812   alias_set_type first_set;
1813   alias_set_type first_base_set;
1814   splay_tree known_ranges;
1815   obstack ranges_obstack;
1816 };
1817 
~vn_walk_cb_data()1818 vn_walk_cb_data::~vn_walk_cb_data ()
1819 {
1820   if (known_ranges)
1821     {
1822       splay_tree_delete (known_ranges);
1823       obstack_free (&ranges_obstack, NULL);
1824     }
1825   saved_operands.release ();
1826 }
1827 
1828 void *
finish(alias_set_type set,alias_set_type base_set,tree val)1829 vn_walk_cb_data::finish (alias_set_type set, alias_set_type base_set, tree val)
1830 {
1831   if (first_set != -2)
1832     {
1833       set = first_set;
1834       base_set = first_base_set;
1835     }
1836   if (mask)
1837     {
1838       masked_result = val;
1839       return (void *) -1;
1840     }
1841   vec<vn_reference_op_s> &operands
1842     = saved_operands.exists () ? saved_operands : vr->operands;
1843   return vn_reference_lookup_or_insert_for_pieces (last_vuse, set, base_set,
1844 						   vr->type, operands, val);
1845 }
1846 
1847 /* pd_range splay-tree helpers.  */
1848 
1849 static int
pd_range_compare(splay_tree_key offset1p,splay_tree_key offset2p)1850 pd_range_compare (splay_tree_key offset1p, splay_tree_key offset2p)
1851 {
1852   HOST_WIDE_INT offset1 = *(HOST_WIDE_INT *)offset1p;
1853   HOST_WIDE_INT offset2 = *(HOST_WIDE_INT *)offset2p;
1854   if (offset1 < offset2)
1855     return -1;
1856   else if (offset1 > offset2)
1857     return 1;
1858   return 0;
1859 }
1860 
1861 static void *
pd_tree_alloc(int size,void * data_)1862 pd_tree_alloc (int size, void *data_)
1863 {
1864   vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
1865   return obstack_alloc (&data->ranges_obstack, size);
1866 }
1867 
1868 static void
pd_tree_dealloc(void *,void *)1869 pd_tree_dealloc (void *, void *)
1870 {
1871 }
1872 
1873 /* Push PD to the vector of partial definitions returning a
1874    value when we are ready to combine things with VUSE, SET and MAXSIZEI,
1875    NULL when we want to continue looking for partial defs or -1
1876    on failure.  */
1877 
1878 void *
push_partial_def(pd_data pd,alias_set_type set,alias_set_type base_set,HOST_WIDE_INT offseti,HOST_WIDE_INT maxsizei)1879 vn_walk_cb_data::push_partial_def (pd_data pd,
1880 				   alias_set_type set, alias_set_type base_set,
1881 				   HOST_WIDE_INT offseti,
1882 				   HOST_WIDE_INT maxsizei)
1883 {
1884   const HOST_WIDE_INT bufsize = 64;
1885   /* We're using a fixed buffer for encoding so fail early if the object
1886      we want to interpret is bigger.  */
1887   if (maxsizei > bufsize * BITS_PER_UNIT
1888       || CHAR_BIT != 8
1889       || BITS_PER_UNIT != 8
1890       /* Not prepared to handle PDP endian.  */
1891       || BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
1892     return (void *)-1;
1893 
1894   /* Turn too large constant stores into non-constant stores.  */
1895   if (CONSTANT_CLASS_P (pd.rhs) && pd.size > bufsize * BITS_PER_UNIT)
1896     pd.rhs = error_mark_node;
1897 
1898   /* And for non-constant or CONSTRUCTOR stores shrink them to only keep at
1899      most a partial byte before and/or after the region.  */
1900   if (!CONSTANT_CLASS_P (pd.rhs))
1901     {
1902       if (pd.offset < offseti)
1903 	{
1904 	  HOST_WIDE_INT o = ROUND_DOWN (offseti - pd.offset, BITS_PER_UNIT);
1905 	  gcc_assert (pd.size > o);
1906 	  pd.size -= o;
1907 	  pd.offset += o;
1908 	}
1909       if (pd.size > maxsizei)
1910 	pd.size = maxsizei + ((pd.size - maxsizei) % BITS_PER_UNIT);
1911     }
1912 
1913   pd.offset -= offseti;
1914 
1915   bool pd_constant_p = (TREE_CODE (pd.rhs) == CONSTRUCTOR
1916 			|| CONSTANT_CLASS_P (pd.rhs));
1917   if (partial_defs.is_empty ())
1918     {
1919       /* If we get a clobber upfront, fail.  */
1920       if (TREE_CLOBBER_P (pd.rhs))
1921 	return (void *)-1;
1922       if (!pd_constant_p)
1923 	return (void *)-1;
1924       partial_defs.safe_push (pd);
1925       first_range.offset = pd.offset;
1926       first_range.size = pd.size;
1927       first_set = set;
1928       first_base_set = base_set;
1929       last_vuse_ptr = NULL;
1930       /* Continue looking for partial defs.  */
1931       return NULL;
1932     }
1933 
1934   if (!known_ranges)
1935     {
1936       /* ???  Optimize the case where the 2nd partial def completes things.  */
1937       gcc_obstack_init (&ranges_obstack);
1938       known_ranges = splay_tree_new_with_allocator (pd_range_compare, 0, 0,
1939 						    pd_tree_alloc,
1940 						    pd_tree_dealloc, this);
1941       splay_tree_insert (known_ranges,
1942 			 (splay_tree_key)&first_range.offset,
1943 			 (splay_tree_value)&first_range);
1944     }
1945 
1946   pd_range newr = { pd.offset, pd.size };
1947   splay_tree_node n;
1948   pd_range *r;
1949   /* Lookup the predecessor of offset + 1 and see if we need to merge.  */
1950   HOST_WIDE_INT loffset = newr.offset + 1;
1951   if ((n = splay_tree_predecessor (known_ranges, (splay_tree_key)&loffset))
1952       && ((r = (pd_range *)n->value), true)
1953       && ranges_known_overlap_p (r->offset, r->size + 1,
1954 				 newr.offset, newr.size))
1955     {
1956       /* Ignore partial defs already covered.  Here we also drop shadowed
1957          clobbers arriving here at the floor.  */
1958       if (known_subrange_p (newr.offset, newr.size, r->offset, r->size))
1959 	return NULL;
1960       r->size = MAX (r->offset + r->size, newr.offset + newr.size) - r->offset;
1961     }
1962   else
1963     {
1964       /* newr.offset wasn't covered yet, insert the range.  */
1965       r = XOBNEW (&ranges_obstack, pd_range);
1966       *r = newr;
1967       splay_tree_insert (known_ranges, (splay_tree_key)&r->offset,
1968 			 (splay_tree_value)r);
1969     }
1970   /* Merge r which now contains newr and is a member of the splay tree with
1971      adjacent overlapping ranges.  */
1972   pd_range *rafter;
1973   while ((n = splay_tree_successor (known_ranges, (splay_tree_key)&r->offset))
1974 	 && ((rafter = (pd_range *)n->value), true)
1975 	 && ranges_known_overlap_p (r->offset, r->size + 1,
1976 				    rafter->offset, rafter->size))
1977     {
1978       r->size = MAX (r->offset + r->size,
1979 		     rafter->offset + rafter->size) - r->offset;
1980       splay_tree_remove (known_ranges, (splay_tree_key)&rafter->offset);
1981     }
1982   /* If we get a clobber, fail.  */
1983   if (TREE_CLOBBER_P (pd.rhs))
1984     return (void *)-1;
1985   /* Non-constants are OK as long as they are shadowed by a constant.  */
1986   if (!pd_constant_p)
1987     return (void *)-1;
1988   partial_defs.safe_push (pd);
1989 
1990   /* Now we have merged newr into the range tree.  When we have covered
1991      [offseti, sizei] then the tree will contain exactly one node which has
1992      the desired properties and it will be 'r'.  */
1993   if (!known_subrange_p (0, maxsizei, r->offset, r->size))
1994     /* Continue looking for partial defs.  */
1995     return NULL;
1996 
1997   /* Now simply native encode all partial defs in reverse order.  */
1998   unsigned ndefs = partial_defs.length ();
1999   /* We support up to 512-bit values (for V8DFmode).  */
2000   unsigned char buffer[bufsize + 1];
2001   unsigned char this_buffer[bufsize + 1];
2002   int len;
2003 
2004   memset (buffer, 0, bufsize + 1);
2005   unsigned needed_len = ROUND_UP (maxsizei, BITS_PER_UNIT) / BITS_PER_UNIT;
2006   while (!partial_defs.is_empty ())
2007     {
2008       pd_data pd = partial_defs.pop ();
2009       unsigned int amnt;
2010       if (TREE_CODE (pd.rhs) == CONSTRUCTOR)
2011 	{
2012 	  /* Empty CONSTRUCTOR.  */
2013 	  if (pd.size >= needed_len * BITS_PER_UNIT)
2014 	    len = needed_len;
2015 	  else
2016 	    len = ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT;
2017 	  memset (this_buffer, 0, len);
2018 	}
2019       else
2020 	{
2021 	  len = native_encode_expr (pd.rhs, this_buffer, bufsize,
2022 				    MAX (0, -pd.offset) / BITS_PER_UNIT);
2023 	  if (len <= 0
2024 	      || len < (ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT
2025 			- MAX (0, -pd.offset) / BITS_PER_UNIT))
2026 	    {
2027 	      if (dump_file && (dump_flags & TDF_DETAILS))
2028 		fprintf (dump_file, "Failed to encode %u "
2029 			 "partial definitions\n", ndefs);
2030 	      return (void *)-1;
2031 	    }
2032 	}
2033 
2034       unsigned char *p = buffer;
2035       HOST_WIDE_INT size = pd.size;
2036       if (pd.offset < 0)
2037 	size -= ROUND_DOWN (-pd.offset, BITS_PER_UNIT);
2038       this_buffer[len] = 0;
2039       if (BYTES_BIG_ENDIAN)
2040 	{
2041 	  /* LSB of this_buffer[len - 1] byte should be at
2042 	     pd.offset + pd.size - 1 bits in buffer.  */
2043 	  amnt = ((unsigned HOST_WIDE_INT) pd.offset
2044 		  + pd.size) % BITS_PER_UNIT;
2045 	  if (amnt)
2046 	    shift_bytes_in_array_right (this_buffer, len + 1, amnt);
2047 	  unsigned char *q = this_buffer;
2048 	  unsigned int off = 0;
2049 	  if (pd.offset >= 0)
2050 	    {
2051 	      unsigned int msk;
2052 	      off = pd.offset / BITS_PER_UNIT;
2053 	      gcc_assert (off < needed_len);
2054 	      p = buffer + off;
2055 	      if (size <= amnt)
2056 		{
2057 		  msk = ((1 << size) - 1) << (BITS_PER_UNIT - amnt);
2058 		  *p = (*p & ~msk) | (this_buffer[len] & msk);
2059 		  size = 0;
2060 		}
2061 	      else
2062 		{
2063 		  if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
2064 		    q = (this_buffer + len
2065 			 - (ROUND_UP (size - amnt, BITS_PER_UNIT)
2066 			    / BITS_PER_UNIT));
2067 		  if (pd.offset % BITS_PER_UNIT)
2068 		    {
2069 		      msk = -1U << (BITS_PER_UNIT
2070 				    - (pd.offset % BITS_PER_UNIT));
2071 		      *p = (*p & msk) | (*q & ~msk);
2072 		      p++;
2073 		      q++;
2074 		      off++;
2075 		      size -= BITS_PER_UNIT - (pd.offset % BITS_PER_UNIT);
2076 		      gcc_assert (size >= 0);
2077 		    }
2078 		}
2079 	    }
2080 	  else if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
2081 	    {
2082 	      q = (this_buffer + len
2083 		   - (ROUND_UP (size - amnt, BITS_PER_UNIT)
2084 		      / BITS_PER_UNIT));
2085 	      if (pd.offset % BITS_PER_UNIT)
2086 		{
2087 		  q++;
2088 		  size -= BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) pd.offset
2089 					   % BITS_PER_UNIT);
2090 		  gcc_assert (size >= 0);
2091 		}
2092 	    }
2093 	  if ((unsigned HOST_WIDE_INT) size / BITS_PER_UNIT + off
2094 	      > needed_len)
2095 	    size = (needed_len - off) * BITS_PER_UNIT;
2096 	  memcpy (p, q, size / BITS_PER_UNIT);
2097 	  if (size % BITS_PER_UNIT)
2098 	    {
2099 	      unsigned int msk
2100 		= -1U << (BITS_PER_UNIT - (size % BITS_PER_UNIT));
2101 	      p += size / BITS_PER_UNIT;
2102 	      q += size / BITS_PER_UNIT;
2103 	      *p = (*q & msk) | (*p & ~msk);
2104 	    }
2105 	}
2106       else
2107 	{
2108 	  if (pd.offset >= 0)
2109 	    {
2110 	      /* LSB of this_buffer[0] byte should be at pd.offset bits
2111 		 in buffer.  */
2112 	      unsigned int msk;
2113 	      size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
2114 	      amnt = pd.offset % BITS_PER_UNIT;
2115 	      if (amnt)
2116 		shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2117 	      unsigned int off = pd.offset / BITS_PER_UNIT;
2118 	      gcc_assert (off < needed_len);
2119 	      size = MIN (size,
2120 			  (HOST_WIDE_INT) (needed_len - off) * BITS_PER_UNIT);
2121 	      p = buffer + off;
2122 	      if (amnt + size < BITS_PER_UNIT)
2123 		{
2124 		  /* Low amnt bits come from *p, then size bits
2125 		     from this_buffer[0] and the remaining again from
2126 		     *p.  */
2127 		  msk = ((1 << size) - 1) << amnt;
2128 		  *p = (*p & ~msk) | (this_buffer[0] & msk);
2129 		  size = 0;
2130 		}
2131 	      else if (amnt)
2132 		{
2133 		  msk = -1U << amnt;
2134 		  *p = (*p & ~msk) | (this_buffer[0] & msk);
2135 		  p++;
2136 		  size -= (BITS_PER_UNIT - amnt);
2137 		}
2138 	    }
2139 	  else
2140 	    {
2141 	      amnt = (unsigned HOST_WIDE_INT) pd.offset % BITS_PER_UNIT;
2142 	      if (amnt)
2143 		size -= BITS_PER_UNIT - amnt;
2144 	      size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
2145 	      if (amnt)
2146 		shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2147 	    }
2148 	  memcpy (p, this_buffer + (amnt != 0), size / BITS_PER_UNIT);
2149 	  p += size / BITS_PER_UNIT;
2150 	  if (size % BITS_PER_UNIT)
2151 	    {
2152 	      unsigned int msk = -1U << (size % BITS_PER_UNIT);
2153 	      *p = (this_buffer[(amnt != 0) + size / BITS_PER_UNIT]
2154 		    & ~msk) | (*p & msk);
2155 	    }
2156 	}
2157     }
2158 
2159   tree type = vr->type;
2160   /* Make sure to interpret in a type that has a range covering the whole
2161      access size.  */
2162   if (INTEGRAL_TYPE_P (vr->type) && maxsizei != TYPE_PRECISION (vr->type))
2163     type = build_nonstandard_integer_type (maxsizei, TYPE_UNSIGNED (type));
2164   tree val;
2165   if (BYTES_BIG_ENDIAN)
2166     {
2167       unsigned sz = needed_len;
2168       if (maxsizei % BITS_PER_UNIT)
2169 	shift_bytes_in_array_right (buffer, needed_len,
2170 				    BITS_PER_UNIT
2171 				    - (maxsizei % BITS_PER_UNIT));
2172       if (INTEGRAL_TYPE_P (type))
2173 	sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
2174       if (sz > needed_len)
2175 	{
2176 	  memcpy (this_buffer + (sz - needed_len), buffer, needed_len);
2177 	  val = native_interpret_expr (type, this_buffer, sz);
2178 	}
2179       else
2180 	val = native_interpret_expr (type, buffer, needed_len);
2181     }
2182   else
2183     val = native_interpret_expr (type, buffer, bufsize);
2184   /* If we chop off bits because the types precision doesn't match the memory
2185      access size this is ok when optimizing reads but not when called from
2186      the DSE code during elimination.  */
2187   if (val && type != vr->type)
2188     {
2189       if (! int_fits_type_p (val, vr->type))
2190 	val = NULL_TREE;
2191       else
2192 	val = fold_convert (vr->type, val);
2193     }
2194 
2195   if (val)
2196     {
2197       if (dump_file && (dump_flags & TDF_DETAILS))
2198 	fprintf (dump_file,
2199 		 "Successfully combined %u partial definitions\n", ndefs);
2200       /* We are using the alias-set of the first store we encounter which
2201 	 should be appropriate here.  */
2202       return finish (first_set, first_base_set, val);
2203     }
2204   else
2205     {
2206       if (dump_file && (dump_flags & TDF_DETAILS))
2207 	fprintf (dump_file,
2208 		 "Failed to interpret %u encoded partial definitions\n", ndefs);
2209       return (void *)-1;
2210     }
2211 }
2212 
2213 /* Callback for walk_non_aliased_vuses.  Adjusts the vn_reference_t VR_
2214    with the current VUSE and performs the expression lookup.  */
2215 
2216 static void *
vn_reference_lookup_2(ao_ref * op ATTRIBUTE_UNUSED,tree vuse,void * data_)2217 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *data_)
2218 {
2219   vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2220   vn_reference_t vr = data->vr;
2221   vn_reference_s **slot;
2222   hashval_t hash;
2223 
2224   /* If we have partial definitions recorded we have to go through
2225      vn_reference_lookup_3.  */
2226   if (!data->partial_defs.is_empty ())
2227     return NULL;
2228 
2229   if (data->last_vuse_ptr)
2230     {
2231       *data->last_vuse_ptr = vuse;
2232       data->last_vuse = vuse;
2233     }
2234 
2235   /* Fixup vuse and hash.  */
2236   if (vr->vuse)
2237     vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
2238   vr->vuse = vuse_ssa_val (vuse);
2239   if (vr->vuse)
2240     vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
2241 
2242   hash = vr->hashcode;
2243   slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
2244   if (slot)
2245     {
2246       if ((*slot)->result && data->saved_operands.exists ())
2247 	return data->finish (vr->set, vr->base_set, (*slot)->result);
2248       return *slot;
2249     }
2250 
2251   return NULL;
2252 }
2253 
2254 /* Lookup an existing or insert a new vn_reference entry into the
2255    value table for the VUSE, SET, TYPE, OPERANDS reference which
2256    has the value VALUE which is either a constant or an SSA name.  */
2257 
2258 static vn_reference_t
vn_reference_lookup_or_insert_for_pieces(tree vuse,alias_set_type set,alias_set_type base_set,tree type,vec<vn_reference_op_s,va_heap> operands,tree value)2259 vn_reference_lookup_or_insert_for_pieces (tree vuse,
2260 					  alias_set_type set,
2261 					  alias_set_type base_set,
2262 					  tree type,
2263 					  vec<vn_reference_op_s,
2264 					        va_heap> operands,
2265 					  tree value)
2266 {
2267   vn_reference_s vr1;
2268   vn_reference_t result;
2269   unsigned value_id;
2270   vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2271   vr1.operands = operands;
2272   vr1.type = type;
2273   vr1.set = set;
2274   vr1.base_set = base_set;
2275   vr1.hashcode = vn_reference_compute_hash (&vr1);
2276   if (vn_reference_lookup_1 (&vr1, &result))
2277     return result;
2278   if (TREE_CODE (value) == SSA_NAME)
2279     value_id = VN_INFO (value)->value_id;
2280   else
2281     value_id = get_or_alloc_constant_value_id (value);
2282   return vn_reference_insert_pieces (vuse, set, base_set, type,
2283 				     operands.copy (), value, value_id);
2284 }
2285 
2286 /* Return a value-number for RCODE OPS... either by looking up an existing
2287    value-number for the simplified result or by inserting the operation if
2288    INSERT is true.  */
2289 
2290 static tree
vn_nary_build_or_lookup_1(gimple_match_op * res_op,bool insert)2291 vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert)
2292 {
2293   tree result = NULL_TREE;
2294   /* We will be creating a value number for
2295        RCODE (OPS...).
2296      So first simplify and lookup this expression to see if it
2297      is already available.  */
2298   /* For simplification valueize.  */
2299   unsigned i;
2300   for (i = 0; i < res_op->num_ops; ++i)
2301     if (TREE_CODE (res_op->ops[i]) == SSA_NAME)
2302       {
2303 	tree tem = vn_valueize (res_op->ops[i]);
2304 	if (!tem)
2305 	  break;
2306 	res_op->ops[i] = tem;
2307       }
2308   /* If valueization of an operand fails (it is not available), skip
2309      simplification.  */
2310   bool res = false;
2311   if (i == res_op->num_ops)
2312     {
2313       mprts_hook = vn_lookup_simplify_result;
2314       res = res_op->resimplify (NULL, vn_valueize);
2315       mprts_hook = NULL;
2316     }
2317   gimple *new_stmt = NULL;
2318   if (res
2319       && gimple_simplified_result_is_gimple_val (res_op))
2320     {
2321       /* The expression is already available.  */
2322       result = res_op->ops[0];
2323       /* Valueize it, simplification returns sth in AVAIL only.  */
2324       if (TREE_CODE (result) == SSA_NAME)
2325 	result = SSA_VAL (result);
2326     }
2327   else
2328     {
2329       tree val = vn_lookup_simplify_result (res_op);
2330       if (!val && insert)
2331 	{
2332 	  gimple_seq stmts = NULL;
2333 	  result = maybe_push_res_to_seq (res_op, &stmts);
2334 	  if (result)
2335 	    {
2336 	      gcc_assert (gimple_seq_singleton_p (stmts));
2337 	      new_stmt = gimple_seq_first_stmt (stmts);
2338 	    }
2339 	}
2340       else
2341 	/* The expression is already available.  */
2342 	result = val;
2343     }
2344   if (new_stmt)
2345     {
2346       /* The expression is not yet available, value-number lhs to
2347 	 the new SSA_NAME we created.  */
2348       /* Initialize value-number information properly.  */
2349       vn_ssa_aux_t result_info = VN_INFO (result);
2350       result_info->valnum = result;
2351       result_info->value_id = get_next_value_id ();
2352       result_info->visited = 1;
2353       gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
2354 					  new_stmt);
2355       result_info->needs_insertion = true;
2356       /* ???  PRE phi-translation inserts NARYs without corresponding
2357          SSA name result.  Re-use those but set their result according
2358 	 to the stmt we just built.  */
2359       vn_nary_op_t nary = NULL;
2360       vn_nary_op_lookup_stmt (new_stmt, &nary);
2361       if (nary)
2362 	{
2363 	  gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE);
2364 	  nary->u.result = gimple_assign_lhs (new_stmt);
2365 	}
2366       /* As all "inserted" statements are singleton SCCs, insert
2367 	 to the valid table.  This is strictly needed to
2368 	 avoid re-generating new value SSA_NAMEs for the same
2369 	 expression during SCC iteration over and over (the
2370 	 optimistic table gets cleared after each iteration).
2371 	 We do not need to insert into the optimistic table, as
2372 	 lookups there will fall back to the valid table.  */
2373       else
2374 	{
2375 	  unsigned int length = vn_nary_length_from_stmt (new_stmt);
2376 	  vn_nary_op_t vno1
2377 	    = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
2378 	  vno1->value_id = result_info->value_id;
2379 	  vno1->length = length;
2380 	  vno1->predicated_values = 0;
2381 	  vno1->u.result = result;
2382 	  init_vn_nary_op_from_stmt (vno1, new_stmt);
2383 	  vn_nary_op_insert_into (vno1, valid_info->nary, true);
2384 	  /* Also do not link it into the undo chain.  */
2385 	  last_inserted_nary = vno1->next;
2386 	  vno1->next = (vn_nary_op_t)(void *)-1;
2387 	}
2388       if (dump_file && (dump_flags & TDF_DETAILS))
2389 	{
2390 	  fprintf (dump_file, "Inserting name ");
2391 	  print_generic_expr (dump_file, result);
2392 	  fprintf (dump_file, " for expression ");
2393 	  print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
2394 	  fprintf (dump_file, "\n");
2395 	}
2396     }
2397   return result;
2398 }
2399 
2400 /* Return a value-number for RCODE OPS... either by looking up an existing
2401    value-number for the simplified result or by inserting the operation.  */
2402 
2403 static tree
vn_nary_build_or_lookup(gimple_match_op * res_op)2404 vn_nary_build_or_lookup (gimple_match_op *res_op)
2405 {
2406   return vn_nary_build_or_lookup_1 (res_op, true);
2407 }
2408 
2409 /* Try to simplify the expression RCODE OPS... of type TYPE and return
2410    its value if present.  */
2411 
2412 tree
vn_nary_simplify(vn_nary_op_t nary)2413 vn_nary_simplify (vn_nary_op_t nary)
2414 {
2415   if (nary->length > gimple_match_op::MAX_NUM_OPS)
2416     return NULL_TREE;
2417   gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
2418 		      nary->type, nary->length);
2419   memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
2420   return vn_nary_build_or_lookup_1 (&op, false);
2421 }
2422 
2423 /* Elimination engine.  */
2424 
2425 class eliminate_dom_walker : public dom_walker
2426 {
2427 public:
2428   eliminate_dom_walker (cdi_direction, bitmap);
2429   ~eliminate_dom_walker ();
2430 
2431   virtual edge before_dom_children (basic_block);
2432   virtual void after_dom_children (basic_block);
2433 
2434   virtual tree eliminate_avail (basic_block, tree op);
2435   virtual void eliminate_push_avail (basic_block, tree op);
2436   tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val);
2437 
2438   void eliminate_stmt (basic_block, gimple_stmt_iterator *);
2439 
2440   unsigned eliminate_cleanup (bool region_p = false);
2441 
2442   bool do_pre;
2443   unsigned int el_todo;
2444   unsigned int eliminations;
2445   unsigned int insertions;
2446 
2447   /* SSA names that had their defs inserted by PRE if do_pre.  */
2448   bitmap inserted_exprs;
2449 
2450   /* Blocks with statements that have had their EH properties changed.  */
2451   bitmap need_eh_cleanup;
2452 
2453   /* Blocks with statements that have had their AB properties changed.  */
2454   bitmap need_ab_cleanup;
2455 
2456   /* Local state for the eliminate domwalk.  */
2457   auto_vec<gimple *> to_remove;
2458   auto_vec<gimple *> to_fixup;
2459   auto_vec<tree> avail;
2460   auto_vec<tree> avail_stack;
2461 };
2462 
2463 /* Adaptor to the elimination engine using RPO availability.  */
2464 
2465 class rpo_elim : public eliminate_dom_walker
2466 {
2467 public:
rpo_elim(basic_block entry_)2468   rpo_elim(basic_block entry_)
2469     : eliminate_dom_walker (CDI_DOMINATORS, NULL), entry (entry_),
2470       m_avail_freelist (NULL) {}
2471 
2472   virtual tree eliminate_avail (basic_block, tree op);
2473 
2474   virtual void eliminate_push_avail (basic_block, tree);
2475 
2476   basic_block entry;
2477   /* Freelist of avail entries which are allocated from the vn_ssa_aux
2478      obstack.  */
2479   vn_avail *m_avail_freelist;
2480 };
2481 
2482 /* Global RPO state for access from hooks.  */
2483 static eliminate_dom_walker *rpo_avail;
2484 basic_block vn_context_bb;
2485 
2486 /* Return true if BASE1 and BASE2 can be adjusted so they have the
2487    same address and adjust *OFFSET1 and *OFFSET2 accordingly.
2488    Otherwise return false.  */
2489 
2490 static bool
adjust_offsets_for_equal_base_address(tree base1,poly_int64 * offset1,tree base2,poly_int64 * offset2)2491 adjust_offsets_for_equal_base_address (tree base1, poly_int64 *offset1,
2492 				       tree base2, poly_int64 *offset2)
2493 {
2494   poly_int64 soff;
2495   if (TREE_CODE (base1) == MEM_REF
2496       && TREE_CODE (base2) == MEM_REF)
2497     {
2498       if (mem_ref_offset (base1).to_shwi (&soff))
2499 	{
2500 	  base1 = TREE_OPERAND (base1, 0);
2501 	  *offset1 += soff * BITS_PER_UNIT;
2502 	}
2503       if (mem_ref_offset (base2).to_shwi (&soff))
2504 	{
2505 	  base2 = TREE_OPERAND (base2, 0);
2506 	  *offset2 += soff * BITS_PER_UNIT;
2507 	}
2508       return operand_equal_p (base1, base2, 0);
2509     }
2510   return operand_equal_p (base1, base2, OEP_ADDRESS_OF);
2511 }
2512 
2513 /* Callback for walk_non_aliased_vuses.  Tries to perform a lookup
2514    from the statement defining VUSE and if not successful tries to
2515    translate *REFP and VR_ through an aggregate copy at the definition
2516    of VUSE.  If *DISAMBIGUATE_ONLY is true then do not perform translation
2517    of *REF and *VR.  If only disambiguation was performed then
2518    *DISAMBIGUATE_ONLY is set to true.  */
2519 
2520 static void *
vn_reference_lookup_3(ao_ref * ref,tree vuse,void * data_,translate_flags * disambiguate_only)2521 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
2522 		       translate_flags *disambiguate_only)
2523 {
2524   vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2525   vn_reference_t vr = data->vr;
2526   gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2527   tree base = ao_ref_base (ref);
2528   HOST_WIDE_INT offseti = 0, maxsizei, sizei = 0;
2529   static vec<vn_reference_op_s> lhs_ops;
2530   ao_ref lhs_ref;
2531   bool lhs_ref_ok = false;
2532   poly_int64 copy_size;
2533 
2534   /* First try to disambiguate after value-replacing in the definitions LHS.  */
2535   if (is_gimple_assign (def_stmt))
2536     {
2537       tree lhs = gimple_assign_lhs (def_stmt);
2538       bool valueized_anything = false;
2539       /* Avoid re-allocation overhead.  */
2540       lhs_ops.truncate (0);
2541       basic_block saved_rpo_bb = vn_context_bb;
2542       vn_context_bb = gimple_bb (def_stmt);
2543       if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE)
2544 	{
2545 	  copy_reference_ops_from_ref (lhs, &lhs_ops);
2546 	  valueize_refs_1 (&lhs_ops, &valueized_anything, true);
2547 	}
2548       vn_context_bb = saved_rpo_bb;
2549       ao_ref_init (&lhs_ref, lhs);
2550       lhs_ref_ok = true;
2551       if (valueized_anything
2552 	  && ao_ref_init_from_vn_reference
2553 	       (&lhs_ref, ao_ref_alias_set (&lhs_ref),
2554 		ao_ref_base_alias_set (&lhs_ref), TREE_TYPE (lhs), lhs_ops)
2555 	  && !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
2556 	{
2557 	  *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2558 	  return NULL;
2559 	}
2560 
2561       /* Besides valueizing the LHS we can also use access-path based
2562          disambiguation on the original non-valueized ref.  */
2563       if (!ref->ref
2564 	  && lhs_ref_ok
2565 	  && data->orig_ref.ref)
2566 	{
2567 	  /* We want to use the non-valueized LHS for this, but avoid redundant
2568 	     work.  */
2569 	  ao_ref *lref = &lhs_ref;
2570 	  ao_ref lref_alt;
2571 	  if (valueized_anything)
2572 	    {
2573 	      ao_ref_init (&lref_alt, lhs);
2574 	      lref = &lref_alt;
2575 	    }
2576 	  if (!refs_may_alias_p_1 (&data->orig_ref, lref, data->tbaa_p))
2577 	    {
2578 	      *disambiguate_only = (valueized_anything
2579 				    ? TR_VALUEIZE_AND_DISAMBIGUATE
2580 				    : TR_DISAMBIGUATE);
2581 	      return NULL;
2582 	    }
2583 	}
2584 
2585       /* If we reach a clobbering statement try to skip it and see if
2586          we find a VN result with exactly the same value as the
2587 	 possible clobber.  In this case we can ignore the clobber
2588 	 and return the found value.  */
2589       if (is_gimple_reg_type (TREE_TYPE (lhs))
2590 	  && types_compatible_p (TREE_TYPE (lhs), vr->type)
2591 	  && (ref->ref || data->orig_ref.ref))
2592 	{
2593 	  tree *saved_last_vuse_ptr = data->last_vuse_ptr;
2594 	  /* Do not update last_vuse_ptr in vn_reference_lookup_2.  */
2595 	  data->last_vuse_ptr = NULL;
2596 	  tree saved_vuse = vr->vuse;
2597 	  hashval_t saved_hashcode = vr->hashcode;
2598 	  void *res = vn_reference_lookup_2 (ref, gimple_vuse (def_stmt), data);
2599 	  /* Need to restore vr->vuse and vr->hashcode.  */
2600 	  vr->vuse = saved_vuse;
2601 	  vr->hashcode = saved_hashcode;
2602 	  data->last_vuse_ptr = saved_last_vuse_ptr;
2603 	  if (res && res != (void *)-1)
2604 	    {
2605 	      vn_reference_t vnresult = (vn_reference_t) res;
2606 	      tree rhs = gimple_assign_rhs1 (def_stmt);
2607 	      if (TREE_CODE (rhs) == SSA_NAME)
2608 		rhs = SSA_VAL (rhs);
2609 	      if (vnresult->result
2610 		  && operand_equal_p (vnresult->result, rhs, 0)
2611 		  /* We have to honor our promise about union type punning
2612 		     and also support arbitrary overlaps with
2613 		     -fno-strict-aliasing.  So simply resort to alignment to
2614 		     rule out overlaps.  Do this check last because it is
2615 		     quite expensive compared to the hash-lookup above.  */
2616 		  && multiple_p (get_object_alignment
2617 				   (ref->ref ? ref->ref : data->orig_ref.ref),
2618 				 ref->size)
2619 		  && multiple_p (get_object_alignment (lhs), ref->size))
2620 		return res;
2621 	    }
2622 	}
2623     }
2624   else if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE
2625 	   && gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
2626 	   && gimple_call_num_args (def_stmt) <= 4)
2627     {
2628       /* For builtin calls valueize its arguments and call the
2629          alias oracle again.  Valueization may improve points-to
2630 	 info of pointers and constify size and position arguments.
2631 	 Originally this was motivated by PR61034 which has
2632 	 conditional calls to free falsely clobbering ref because
2633 	 of imprecise points-to info of the argument.  */
2634       tree oldargs[4];
2635       bool valueized_anything = false;
2636       for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2637 	{
2638 	  oldargs[i] = gimple_call_arg (def_stmt, i);
2639 	  tree val = vn_valueize (oldargs[i]);
2640 	  if (val != oldargs[i])
2641 	    {
2642 	      gimple_call_set_arg (def_stmt, i, val);
2643 	      valueized_anything = true;
2644 	    }
2645 	}
2646       if (valueized_anything)
2647 	{
2648 	  bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
2649 					       ref, data->tbaa_p);
2650 	  for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2651 	    gimple_call_set_arg (def_stmt, i, oldargs[i]);
2652 	  if (!res)
2653 	    {
2654 	      *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2655 	      return NULL;
2656 	    }
2657 	}
2658     }
2659 
2660   if (*disambiguate_only > TR_TRANSLATE)
2661     return (void *)-1;
2662 
2663   /* If we cannot constrain the size of the reference we cannot
2664      test if anything kills it.  */
2665   if (!ref->max_size_known_p ())
2666     return (void *)-1;
2667 
2668   poly_int64 offset = ref->offset;
2669   poly_int64 maxsize = ref->max_size;
2670 
2671   /* def_stmt may-defs *ref.  See if we can derive a value for *ref
2672      from that definition.
2673      1) Memset.  */
2674   if (is_gimple_reg_type (vr->type)
2675       && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
2676 	  || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET_CHK))
2677       && (integer_zerop (gimple_call_arg (def_stmt, 1))
2678 	  || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
2679 	       || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
2680 	      && CHAR_BIT == 8
2681 	      && BITS_PER_UNIT == 8
2682 	      && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
2683 	      && offset.is_constant (&offseti)
2684 	      && ref->size.is_constant (&sizei)
2685 	      && (offseti % BITS_PER_UNIT == 0
2686 		  || TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST)))
2687       && (poly_int_tree_p (gimple_call_arg (def_stmt, 2))
2688 	  || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
2689 	      && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)))))
2690       && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2691 	  || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
2692     {
2693       tree base2;
2694       poly_int64 offset2, size2, maxsize2;
2695       bool reverse;
2696       tree ref2 = gimple_call_arg (def_stmt, 0);
2697       if (TREE_CODE (ref2) == SSA_NAME)
2698 	{
2699 	  ref2 = SSA_VAL (ref2);
2700 	  if (TREE_CODE (ref2) == SSA_NAME
2701 	      && (TREE_CODE (base) != MEM_REF
2702 		  || TREE_OPERAND (base, 0) != ref2))
2703 	    {
2704 	      gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
2705 	      if (gimple_assign_single_p (def_stmt)
2706 		  && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2707 		ref2 = gimple_assign_rhs1 (def_stmt);
2708 	    }
2709 	}
2710       if (TREE_CODE (ref2) == ADDR_EXPR)
2711 	{
2712 	  ref2 = TREE_OPERAND (ref2, 0);
2713 	  base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
2714 					   &reverse);
2715 	  if (!known_size_p (maxsize2)
2716 	      || !known_eq (maxsize2, size2)
2717 	      || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
2718 	    return (void *)-1;
2719 	}
2720       else if (TREE_CODE (ref2) == SSA_NAME)
2721 	{
2722 	  poly_int64 soff;
2723 	  if (TREE_CODE (base) != MEM_REF
2724 	      || !(mem_ref_offset (base)
2725 		   << LOG2_BITS_PER_UNIT).to_shwi (&soff))
2726 	    return (void *)-1;
2727 	  offset += soff;
2728 	  offset2 = 0;
2729 	  if (TREE_OPERAND (base, 0) != ref2)
2730 	    {
2731 	      gimple *def = SSA_NAME_DEF_STMT (ref2);
2732 	      if (is_gimple_assign (def)
2733 		  && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2734 		  && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
2735 		  && poly_int_tree_p (gimple_assign_rhs2 (def)))
2736 		{
2737 		  tree rhs2 = gimple_assign_rhs2 (def);
2738 		  if (!(poly_offset_int::from (wi::to_poly_wide (rhs2),
2739 					       SIGNED)
2740 			<< LOG2_BITS_PER_UNIT).to_shwi (&offset2))
2741 		    return (void *)-1;
2742 		  ref2 = gimple_assign_rhs1 (def);
2743 		  if (TREE_CODE (ref2) == SSA_NAME)
2744 		    ref2 = SSA_VAL (ref2);
2745 		}
2746 	      else
2747 		return (void *)-1;
2748 	    }
2749 	}
2750       else
2751 	return (void *)-1;
2752       tree len = gimple_call_arg (def_stmt, 2);
2753       HOST_WIDE_INT leni, offset2i;
2754       if (TREE_CODE (len) == SSA_NAME)
2755 	len = SSA_VAL (len);
2756       /* Sometimes the above trickery is smarter than alias analysis.  Take
2757          advantage of that.  */
2758       if (!ranges_maybe_overlap_p (offset, maxsize, offset2,
2759 				   (wi::to_poly_offset (len)
2760 				    << LOG2_BITS_PER_UNIT)))
2761 	return NULL;
2762       if (data->partial_defs.is_empty ()
2763 	  && known_subrange_p (offset, maxsize, offset2,
2764 			       wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
2765 	{
2766 	  tree val;
2767 	  if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2768 	    val = build_zero_cst (vr->type);
2769 	  else if (INTEGRAL_TYPE_P (vr->type)
2770 		   && known_eq (ref->size, 8)
2771 		   && offseti % BITS_PER_UNIT == 0)
2772 	    {
2773 	      gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
2774 				      vr->type, gimple_call_arg (def_stmt, 1));
2775 	      val = vn_nary_build_or_lookup (&res_op);
2776 	      if (!val
2777 		  || (TREE_CODE (val) == SSA_NAME
2778 		      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2779 		return (void *)-1;
2780 	    }
2781 	  else
2782 	    {
2783 	      unsigned buflen = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type)) + 1;
2784 	      if (INTEGRAL_TYPE_P (vr->type))
2785 		buflen = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (vr->type)) + 1;
2786 	      unsigned char *buf = XALLOCAVEC (unsigned char, buflen);
2787 	      memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
2788 		      buflen);
2789 	      if (BYTES_BIG_ENDIAN)
2790 		{
2791 		  unsigned int amnt
2792 		    = (((unsigned HOST_WIDE_INT) offseti + sizei)
2793 		       % BITS_PER_UNIT);
2794 		  if (amnt)
2795 		    {
2796 		      shift_bytes_in_array_right (buf, buflen,
2797 						  BITS_PER_UNIT - amnt);
2798 		      buf++;
2799 		      buflen--;
2800 		    }
2801 		}
2802 	      else if (offseti % BITS_PER_UNIT != 0)
2803 		{
2804 		  unsigned int amnt
2805 		    = BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) offseti
2806 				       % BITS_PER_UNIT);
2807 		  shift_bytes_in_array_left (buf, buflen, amnt);
2808 		  buf++;
2809 		  buflen--;
2810 		}
2811 	      val = native_interpret_expr (vr->type, buf, buflen);
2812 	      if (!val)
2813 		return (void *)-1;
2814 	    }
2815 	  return data->finish (0, 0, val);
2816 	}
2817       /* For now handle clearing memory with partial defs.  */
2818       else if (known_eq (ref->size, maxsize)
2819 	       && integer_zerop (gimple_call_arg (def_stmt, 1))
2820 	       && tree_fits_poly_int64_p (len)
2821 	       && tree_to_poly_int64 (len).is_constant (&leni)
2822 	       && leni <= INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT
2823 	       && offset.is_constant (&offseti)
2824 	       && offset2.is_constant (&offset2i)
2825 	       && maxsize.is_constant (&maxsizei)
2826 	       && ranges_known_overlap_p (offseti, maxsizei, offset2i,
2827 					  leni << LOG2_BITS_PER_UNIT))
2828 	{
2829 	  pd_data pd;
2830 	  pd.rhs = build_constructor (NULL_TREE, NULL);
2831 	  pd.offset = offset2i;
2832 	  pd.size = leni << LOG2_BITS_PER_UNIT;
2833 	  return data->push_partial_def (pd, 0, 0, offseti, maxsizei);
2834 	}
2835     }
2836 
2837   /* 2) Assignment from an empty CONSTRUCTOR.  */
2838   else if (is_gimple_reg_type (vr->type)
2839 	   && gimple_assign_single_p (def_stmt)
2840 	   && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2841 	   && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2842     {
2843       tree base2;
2844       poly_int64 offset2, size2, maxsize2;
2845       HOST_WIDE_INT offset2i, size2i;
2846       gcc_assert (lhs_ref_ok);
2847       base2 = ao_ref_base (&lhs_ref);
2848       offset2 = lhs_ref.offset;
2849       size2 = lhs_ref.size;
2850       maxsize2 = lhs_ref.max_size;
2851       if (known_size_p (maxsize2)
2852 	  && known_eq (maxsize2, size2)
2853 	  && adjust_offsets_for_equal_base_address (base, &offset,
2854 						    base2, &offset2))
2855 	{
2856 	  if (data->partial_defs.is_empty ()
2857 	      && known_subrange_p (offset, maxsize, offset2, size2))
2858 	    {
2859 	      /* While technically undefined behavior do not optimize
2860 	         a full read from a clobber.  */
2861 	      if (gimple_clobber_p (def_stmt))
2862 		return (void *)-1;
2863 	      tree val = build_zero_cst (vr->type);
2864 	      return data->finish (ao_ref_alias_set (&lhs_ref),
2865 				   ao_ref_base_alias_set (&lhs_ref), val);
2866 	    }
2867 	  else if (known_eq (ref->size, maxsize)
2868 		   && maxsize.is_constant (&maxsizei)
2869 		   && offset.is_constant (&offseti)
2870 		   && offset2.is_constant (&offset2i)
2871 		   && size2.is_constant (&size2i)
2872 		   && ranges_known_overlap_p (offseti, maxsizei,
2873 					      offset2i, size2i))
2874 	    {
2875 	      /* Let clobbers be consumed by the partial-def tracker
2876 	         which can choose to ignore them if they are shadowed
2877 		 by a later def.  */
2878 	      pd_data pd;
2879 	      pd.rhs = gimple_assign_rhs1 (def_stmt);
2880 	      pd.offset = offset2i;
2881 	      pd.size = size2i;
2882 	      return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
2883 					     ao_ref_base_alias_set (&lhs_ref),
2884 					     offseti, maxsizei);
2885 	    }
2886 	}
2887     }
2888 
2889   /* 3) Assignment from a constant.  We can use folds native encode/interpret
2890      routines to extract the assigned bits.  */
2891   else if (known_eq (ref->size, maxsize)
2892 	   && is_gimple_reg_type (vr->type)
2893 	   && !contains_storage_order_barrier_p (vr->operands)
2894 	   && gimple_assign_single_p (def_stmt)
2895 	   && CHAR_BIT == 8
2896 	   && BITS_PER_UNIT == 8
2897 	   && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
2898 	   /* native_encode and native_decode operate on arrays of bytes
2899 	      and so fundamentally need a compile-time size and offset.  */
2900 	   && maxsize.is_constant (&maxsizei)
2901 	   && offset.is_constant (&offseti)
2902 	   && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2903 	       || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2904 		   && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2905     {
2906       tree lhs = gimple_assign_lhs (def_stmt);
2907       tree base2;
2908       poly_int64 offset2, size2, maxsize2;
2909       HOST_WIDE_INT offset2i, size2i;
2910       bool reverse;
2911       gcc_assert (lhs_ref_ok);
2912       base2 = ao_ref_base (&lhs_ref);
2913       offset2 = lhs_ref.offset;
2914       size2 = lhs_ref.size;
2915       maxsize2 = lhs_ref.max_size;
2916       reverse = reverse_storage_order_for_component_p (lhs);
2917       if (base2
2918 	  && !reverse
2919 	  && !storage_order_barrier_p (lhs)
2920 	  && known_eq (maxsize2, size2)
2921 	  && adjust_offsets_for_equal_base_address (base, &offset,
2922 						    base2, &offset2)
2923 	  && offset.is_constant (&offseti)
2924 	  && offset2.is_constant (&offset2i)
2925 	  && size2.is_constant (&size2i))
2926 	{
2927 	  if (data->partial_defs.is_empty ()
2928 	      && known_subrange_p (offseti, maxsizei, offset2, size2))
2929 	    {
2930 	      /* We support up to 512-bit values (for V8DFmode).  */
2931 	      unsigned char buffer[65];
2932 	      int len;
2933 
2934 	      tree rhs = gimple_assign_rhs1 (def_stmt);
2935 	      if (TREE_CODE (rhs) == SSA_NAME)
2936 		rhs = SSA_VAL (rhs);
2937 	      len = native_encode_expr (rhs,
2938 					buffer, sizeof (buffer) - 1,
2939 					(offseti - offset2i) / BITS_PER_UNIT);
2940 	      if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2941 		{
2942 		  tree type = vr->type;
2943 		  unsigned char *buf = buffer;
2944 		  unsigned int amnt = 0;
2945 		  /* Make sure to interpret in a type that has a range
2946 		     covering the whole access size.  */
2947 		  if (INTEGRAL_TYPE_P (vr->type)
2948 		      && maxsizei != TYPE_PRECISION (vr->type))
2949 		    type = build_nonstandard_integer_type (maxsizei,
2950 							   TYPE_UNSIGNED (type));
2951 		  if (BYTES_BIG_ENDIAN)
2952 		    {
2953 		      /* For big-endian native_encode_expr stored the rhs
2954 			 such that the LSB of it is the LSB of buffer[len - 1].
2955 			 That bit is stored into memory at position
2956 			 offset2 + size2 - 1, i.e. in byte
2957 			 base + (offset2 + size2 - 1) / BITS_PER_UNIT.
2958 			 E.g. for offset2 1 and size2 14, rhs -1 and memory
2959 			 previously cleared that is:
2960 			 0        1
2961 			 01111111|11111110
2962 			 Now, if we want to extract offset 2 and size 12 from
2963 			 it using native_interpret_expr (which actually works
2964 			 for integral bitfield types in terms of byte size of
2965 			 the mode), the native_encode_expr stored the value
2966 			 into buffer as
2967 			 XX111111|11111111
2968 			 and returned len 2 (the X bits are outside of
2969 			 precision).
2970 			 Let sz be maxsize / BITS_PER_UNIT if not extracting
2971 			 a bitfield, and GET_MODE_SIZE otherwise.
2972 			 We need to align the LSB of the value we want to
2973 			 extract as the LSB of buf[sz - 1].
2974 			 The LSB from memory we need to read is at position
2975 			 offset + maxsize - 1.  */
2976 		      HOST_WIDE_INT sz = maxsizei / BITS_PER_UNIT;
2977 		      if (INTEGRAL_TYPE_P (type))
2978 			sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
2979 		      amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
2980 			      - offseti - maxsizei) % BITS_PER_UNIT;
2981 		      if (amnt)
2982 			shift_bytes_in_array_right (buffer, len, amnt);
2983 		      amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
2984 			      - offseti - maxsizei - amnt) / BITS_PER_UNIT;
2985 		      if ((unsigned HOST_WIDE_INT) sz + amnt > (unsigned) len)
2986 			len = 0;
2987 		      else
2988 			{
2989 			  buf = buffer + len - sz - amnt;
2990 			  len -= (buf - buffer);
2991 			}
2992 		    }
2993 		  else
2994 		    {
2995 		      amnt = ((unsigned HOST_WIDE_INT) offset2i
2996 			      - offseti) % BITS_PER_UNIT;
2997 		      if (amnt)
2998 			{
2999 			  buffer[len] = 0;
3000 			  shift_bytes_in_array_left (buffer, len + 1, amnt);
3001 			  buf = buffer + 1;
3002 			}
3003 		    }
3004 		  tree val = native_interpret_expr (type, buf, len);
3005 		  /* If we chop off bits because the types precision doesn't
3006 		     match the memory access size this is ok when optimizing
3007 		     reads but not when called from the DSE code during
3008 		     elimination.  */
3009 		  if (val
3010 		      && type != vr->type)
3011 		    {
3012 		      if (! int_fits_type_p (val, vr->type))
3013 			val = NULL_TREE;
3014 		      else
3015 			val = fold_convert (vr->type, val);
3016 		    }
3017 
3018 		  if (val)
3019 		    return data->finish (ao_ref_alias_set (&lhs_ref),
3020 					 ao_ref_base_alias_set (&lhs_ref), val);
3021 		}
3022 	    }
3023 	  else if (ranges_known_overlap_p (offseti, maxsizei, offset2i,
3024 					   size2i))
3025 	    {
3026 	      pd_data pd;
3027 	      tree rhs = gimple_assign_rhs1 (def_stmt);
3028 	      if (TREE_CODE (rhs) == SSA_NAME)
3029 		rhs = SSA_VAL (rhs);
3030 	      pd.rhs = rhs;
3031 	      pd.offset = offset2i;
3032 	      pd.size = size2i;
3033 	      return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3034 					     ao_ref_base_alias_set (&lhs_ref),
3035 					     offseti, maxsizei);
3036 	    }
3037 	}
3038     }
3039 
3040   /* 4) Assignment from an SSA name which definition we may be able
3041      to access pieces from or we can combine to a larger entity.  */
3042   else if (known_eq (ref->size, maxsize)
3043 	   && is_gimple_reg_type (vr->type)
3044 	   && !contains_storage_order_barrier_p (vr->operands)
3045 	   && gimple_assign_single_p (def_stmt)
3046 	   && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
3047     {
3048       tree lhs = gimple_assign_lhs (def_stmt);
3049       tree base2;
3050       poly_int64 offset2, size2, maxsize2;
3051       HOST_WIDE_INT offset2i, size2i, offseti;
3052       bool reverse;
3053       gcc_assert (lhs_ref_ok);
3054       base2 = ao_ref_base (&lhs_ref);
3055       offset2 = lhs_ref.offset;
3056       size2 = lhs_ref.size;
3057       maxsize2 = lhs_ref.max_size;
3058       reverse = reverse_storage_order_for_component_p (lhs);
3059       tree def_rhs = gimple_assign_rhs1 (def_stmt);
3060       if (!reverse
3061 	  && !storage_order_barrier_p (lhs)
3062 	  && known_size_p (maxsize2)
3063 	  && known_eq (maxsize2, size2)
3064 	  && adjust_offsets_for_equal_base_address (base, &offset,
3065 						    base2, &offset2))
3066 	{
3067 	  if (data->partial_defs.is_empty ()
3068 	      && known_subrange_p (offset, maxsize, offset2, size2)
3069 	      /* ???  We can't handle bitfield precision extracts without
3070 		 either using an alternate type for the BIT_FIELD_REF and
3071 		 then doing a conversion or possibly adjusting the offset
3072 		 according to endianness.  */
3073 	      && (! INTEGRAL_TYPE_P (vr->type)
3074 		  || known_eq (ref->size, TYPE_PRECISION (vr->type)))
3075 	      && multiple_p (ref->size, BITS_PER_UNIT))
3076 	    {
3077 	      tree val = NULL_TREE;
3078 	      if (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
3079 		  || type_has_mode_precision_p (TREE_TYPE (def_rhs)))
3080 		{
3081 		  gimple_match_op op (gimple_match_cond::UNCOND,
3082 				      BIT_FIELD_REF, vr->type,
3083 				      SSA_VAL (def_rhs),
3084 				      bitsize_int (ref->size),
3085 				      bitsize_int (offset - offset2));
3086 		  val = vn_nary_build_or_lookup (&op);
3087 		}
3088 	      else if (known_eq (ref->size, size2))
3089 		{
3090 		  gimple_match_op op (gimple_match_cond::UNCOND,
3091 				      VIEW_CONVERT_EXPR, vr->type,
3092 				      SSA_VAL (def_rhs));
3093 		  val = vn_nary_build_or_lookup (&op);
3094 		}
3095 	      if (val
3096 		  && (TREE_CODE (val) != SSA_NAME
3097 		      || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
3098 		return data->finish (ao_ref_alias_set (&lhs_ref),
3099 				     ao_ref_base_alias_set (&lhs_ref), val);
3100 	    }
3101 	  else if (maxsize.is_constant (&maxsizei)
3102 		   && offset.is_constant (&offseti)
3103 		   && offset2.is_constant (&offset2i)
3104 		   && size2.is_constant (&size2i)
3105 		   && ranges_known_overlap_p (offset, maxsize, offset2, size2))
3106 	    {
3107 	      pd_data pd;
3108 	      pd.rhs = SSA_VAL (def_rhs);
3109 	      pd.offset = offset2i;
3110 	      pd.size = size2i;
3111 	      return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3112 					     ao_ref_base_alias_set (&lhs_ref),
3113 					     offseti, maxsizei);
3114 	    }
3115 	}
3116     }
3117 
3118   /* 5) For aggregate copies translate the reference through them if
3119      the copy kills ref.  */
3120   else if (data->vn_walk_kind == VN_WALKREWRITE
3121 	   && gimple_assign_single_p (def_stmt)
3122 	   && (DECL_P (gimple_assign_rhs1 (def_stmt))
3123 	       || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
3124 	       || handled_component_p (gimple_assign_rhs1 (def_stmt))))
3125     {
3126       tree base2;
3127       int i, j, k;
3128       auto_vec<vn_reference_op_s> rhs;
3129       vn_reference_op_t vro;
3130       ao_ref r;
3131 
3132       gcc_assert (lhs_ref_ok);
3133 
3134       /* See if the assignment kills REF.  */
3135       base2 = ao_ref_base (&lhs_ref);
3136       if (!lhs_ref.max_size_known_p ()
3137 	  || (base != base2
3138 	      && (TREE_CODE (base) != MEM_REF
3139 		  || TREE_CODE (base2) != MEM_REF
3140 		  || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
3141 		  || !tree_int_cst_equal (TREE_OPERAND (base, 1),
3142 					  TREE_OPERAND (base2, 1))))
3143 	  || !stmt_kills_ref_p (def_stmt, ref))
3144 	return (void *)-1;
3145 
3146       /* Find the common base of ref and the lhs.  lhs_ops already
3147          contains valueized operands for the lhs.  */
3148       i = vr->operands.length () - 1;
3149       j = lhs_ops.length () - 1;
3150       while (j >= 0 && i >= 0
3151 	     && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
3152 	{
3153 	  i--;
3154 	  j--;
3155 	}
3156 
3157       /* ???  The innermost op should always be a MEM_REF and we already
3158          checked that the assignment to the lhs kills vr.  Thus for
3159 	 aggregate copies using char[] types the vn_reference_op_eq
3160 	 may fail when comparing types for compatibility.  But we really
3161 	 don't care here - further lookups with the rewritten operands
3162 	 will simply fail if we messed up types too badly.  */
3163       poly_int64 extra_off = 0;
3164       if (j == 0 && i >= 0
3165 	  && lhs_ops[0].opcode == MEM_REF
3166 	  && maybe_ne (lhs_ops[0].off, -1))
3167 	{
3168 	  if (known_eq (lhs_ops[0].off, vr->operands[i].off))
3169 	    i--, j--;
3170 	  else if (vr->operands[i].opcode == MEM_REF
3171 		   && maybe_ne (vr->operands[i].off, -1))
3172 	    {
3173 	      extra_off = vr->operands[i].off - lhs_ops[0].off;
3174 	      i--, j--;
3175 	    }
3176 	}
3177 
3178       /* i now points to the first additional op.
3179 	 ???  LHS may not be completely contained in VR, one or more
3180 	 VIEW_CONVERT_EXPRs could be in its way.  We could at least
3181 	 try handling outermost VIEW_CONVERT_EXPRs.  */
3182       if (j != -1)
3183 	return (void *)-1;
3184 
3185       /* Punt if the additional ops contain a storage order barrier.  */
3186       for (k = i; k >= 0; k--)
3187 	{
3188 	  vro = &vr->operands[k];
3189 	  if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
3190 	    return (void *)-1;
3191 	}
3192 
3193       /* Now re-write REF to be based on the rhs of the assignment.  */
3194       tree rhs1 = gimple_assign_rhs1 (def_stmt);
3195       copy_reference_ops_from_ref (rhs1, &rhs);
3196 
3197       /* Apply an extra offset to the inner MEM_REF of the RHS.  */
3198       if (maybe_ne (extra_off, 0))
3199 	{
3200 	  if (rhs.length () < 2)
3201 	    return (void *)-1;
3202 	  int ix = rhs.length () - 2;
3203 	  if (rhs[ix].opcode != MEM_REF
3204 	      || known_eq (rhs[ix].off, -1))
3205 	    return (void *)-1;
3206 	  rhs[ix].off += extra_off;
3207 	  rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
3208 					 build_int_cst (TREE_TYPE (rhs[ix].op0),
3209 							extra_off));
3210 	}
3211 
3212       /* Save the operands since we need to use the original ones for
3213 	 the hash entry we use.  */
3214       if (!data->saved_operands.exists ())
3215 	data->saved_operands = vr->operands.copy ();
3216 
3217       /* We need to pre-pend vr->operands[0..i] to rhs.  */
3218       vec<vn_reference_op_s> old = vr->operands;
3219       if (i + 1 + rhs.length () > vr->operands.length ())
3220 	vr->operands.safe_grow (i + 1 + rhs.length (), true);
3221       else
3222 	vr->operands.truncate (i + 1 + rhs.length ());
3223       FOR_EACH_VEC_ELT (rhs, j, vro)
3224 	vr->operands[i + 1 + j] = *vro;
3225       valueize_refs (&vr->operands);
3226       if (old == shared_lookup_references)
3227 	shared_lookup_references = vr->operands;
3228       vr->hashcode = vn_reference_compute_hash (vr);
3229 
3230       /* Try folding the new reference to a constant.  */
3231       tree val = fully_constant_vn_reference_p (vr);
3232       if (val)
3233 	{
3234 	  if (data->partial_defs.is_empty ())
3235 	    return data->finish (ao_ref_alias_set (&lhs_ref),
3236 				 ao_ref_base_alias_set (&lhs_ref), val);
3237 	  /* This is the only interesting case for partial-def handling
3238 	     coming from targets that like to gimplify init-ctors as
3239 	     aggregate copies from constant data like aarch64 for
3240 	     PR83518.  */
3241 	  if (maxsize.is_constant (&maxsizei) && known_eq (ref->size, maxsize))
3242 	    {
3243 	      pd_data pd;
3244 	      pd.rhs = val;
3245 	      pd.offset = 0;
3246 	      pd.size = maxsizei;
3247 	      return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3248 					     ao_ref_base_alias_set (&lhs_ref),
3249 					     0, maxsizei);
3250 	    }
3251 	}
3252 
3253       /* Continuing with partial defs isn't easily possible here, we
3254          have to find a full def from further lookups from here.  Probably
3255 	 not worth the special-casing everywhere.  */
3256       if (!data->partial_defs.is_empty ())
3257 	return (void *)-1;
3258 
3259       /* Adjust *ref from the new operands.  */
3260       ao_ref rhs1_ref;
3261       ao_ref_init (&rhs1_ref, rhs1);
3262       if (!ao_ref_init_from_vn_reference (&r, ao_ref_alias_set (&rhs1_ref),
3263 					  ao_ref_base_alias_set (&rhs1_ref),
3264 					  vr->type, vr->operands))
3265 	return (void *)-1;
3266       /* This can happen with bitfields.  */
3267       if (maybe_ne (ref->size, r.size))
3268 	{
3269 	  /* If the access lacks some subsetting simply apply that by
3270 	     shortening it.  That in the end can only be successful
3271 	     if we can pun the lookup result which in turn requires
3272 	     exact offsets.  */
3273 	  if (known_eq (r.size, r.max_size)
3274 	      && known_lt (ref->size, r.size))
3275 	    r.size = r.max_size = ref->size;
3276 	  else
3277 	    return (void *)-1;
3278 	}
3279       *ref = r;
3280 
3281       /* Do not update last seen VUSE after translating.  */
3282       data->last_vuse_ptr = NULL;
3283       /* Invalidate the original access path since it now contains
3284          the wrong base.  */
3285       data->orig_ref.ref = NULL_TREE;
3286       /* Use the alias-set of this LHS for recording an eventual result.  */
3287       if (data->first_set == -2)
3288 	{
3289 	  data->first_set = ao_ref_alias_set (&lhs_ref);
3290 	  data->first_base_set = ao_ref_base_alias_set (&lhs_ref);
3291 	}
3292 
3293       /* Keep looking for the adjusted *REF / VR pair.  */
3294       return NULL;
3295     }
3296 
3297   /* 6) For memcpy copies translate the reference through them if the copy
3298      kills ref.  But we cannot (easily) do this translation if the memcpy is
3299      a storage order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that
3300      can modify the storage order of objects (see storage_order_barrier_p).  */
3301   else if (data->vn_walk_kind == VN_WALKREWRITE
3302 	   && is_gimple_reg_type (vr->type)
3303 	   /* ???  Handle BCOPY as well.  */
3304 	   && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
3305 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY_CHK)
3306 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
3307 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY_CHK)
3308 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE)
3309 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE_CHK))
3310 	   && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
3311 	       || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
3312 	   && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
3313 	       || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
3314 	   && (poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size)
3315 	       || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
3316 		   && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)),
3317 				       &copy_size)))
3318 	   /* Handling this is more complicated, give up for now.  */
3319 	   && data->partial_defs.is_empty ())
3320     {
3321       tree lhs, rhs;
3322       ao_ref r;
3323       poly_int64 rhs_offset, lhs_offset;
3324       vn_reference_op_s op;
3325       poly_uint64 mem_offset;
3326       poly_int64 at, byte_maxsize;
3327 
3328       /* Only handle non-variable, addressable refs.  */
3329       if (maybe_ne (ref->size, maxsize)
3330 	  || !multiple_p (offset, BITS_PER_UNIT, &at)
3331 	  || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
3332 	return (void *)-1;
3333 
3334       /* Extract a pointer base and an offset for the destination.  */
3335       lhs = gimple_call_arg (def_stmt, 0);
3336       lhs_offset = 0;
3337       if (TREE_CODE (lhs) == SSA_NAME)
3338 	{
3339 	  lhs = vn_valueize (lhs);
3340 	  if (TREE_CODE (lhs) == SSA_NAME)
3341 	    {
3342 	      gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
3343 	      if (gimple_assign_single_p (def_stmt)
3344 		  && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
3345 		lhs = gimple_assign_rhs1 (def_stmt);
3346 	    }
3347 	}
3348       if (TREE_CODE (lhs) == ADDR_EXPR)
3349 	{
3350 	  if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (lhs)))
3351 	      && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (lhs))))
3352 	    return (void *)-1;
3353 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
3354 						    &lhs_offset);
3355 	  if (!tem)
3356 	    return (void *)-1;
3357 	  if (TREE_CODE (tem) == MEM_REF
3358 	      && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
3359 	    {
3360 	      lhs = TREE_OPERAND (tem, 0);
3361 	      if (TREE_CODE (lhs) == SSA_NAME)
3362 		lhs = vn_valueize (lhs);
3363 	      lhs_offset += mem_offset;
3364 	    }
3365 	  else if (DECL_P (tem))
3366 	    lhs = build_fold_addr_expr (tem);
3367 	  else
3368 	    return (void *)-1;
3369 	}
3370       if (TREE_CODE (lhs) != SSA_NAME
3371 	  && TREE_CODE (lhs) != ADDR_EXPR)
3372 	return (void *)-1;
3373 
3374       /* Extract a pointer base and an offset for the source.  */
3375       rhs = gimple_call_arg (def_stmt, 1);
3376       rhs_offset = 0;
3377       if (TREE_CODE (rhs) == SSA_NAME)
3378 	rhs = vn_valueize (rhs);
3379       if (TREE_CODE (rhs) == ADDR_EXPR)
3380 	{
3381 	  if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (rhs)))
3382 	      && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (rhs))))
3383 	    return (void *)-1;
3384 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
3385 						    &rhs_offset);
3386 	  if (!tem)
3387 	    return (void *)-1;
3388 	  if (TREE_CODE (tem) == MEM_REF
3389 	      && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
3390 	    {
3391 	      rhs = TREE_OPERAND (tem, 0);
3392 	      rhs_offset += mem_offset;
3393 	    }
3394 	  else if (DECL_P (tem)
3395 		   || TREE_CODE (tem) == STRING_CST)
3396 	    rhs = build_fold_addr_expr (tem);
3397 	  else
3398 	    return (void *)-1;
3399 	}
3400       if (TREE_CODE (rhs) == SSA_NAME)
3401 	rhs = SSA_VAL (rhs);
3402       else if (TREE_CODE (rhs) != ADDR_EXPR)
3403 	return (void *)-1;
3404 
3405       /* The bases of the destination and the references have to agree.  */
3406       if (TREE_CODE (base) == MEM_REF)
3407 	{
3408 	  if (TREE_OPERAND (base, 0) != lhs
3409 	      || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
3410 	    return (void *) -1;
3411 	  at += mem_offset;
3412 	}
3413       else if (!DECL_P (base)
3414 	       || TREE_CODE (lhs) != ADDR_EXPR
3415 	       || TREE_OPERAND (lhs, 0) != base)
3416 	return (void *)-1;
3417 
3418       /* If the access is completely outside of the memcpy destination
3419 	 area there is no aliasing.  */
3420       if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
3421 	return NULL;
3422       /* And the access has to be contained within the memcpy destination.  */
3423       if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
3424 	return (void *)-1;
3425 
3426       /* Save the operands since we need to use the original ones for
3427 	 the hash entry we use.  */
3428       if (!data->saved_operands.exists ())
3429 	data->saved_operands = vr->operands.copy ();
3430 
3431       /* Make room for 2 operands in the new reference.  */
3432       if (vr->operands.length () < 2)
3433 	{
3434 	  vec<vn_reference_op_s> old = vr->operands;
3435 	  vr->operands.safe_grow_cleared (2, true);
3436 	  if (old == shared_lookup_references)
3437 	    shared_lookup_references = vr->operands;
3438 	}
3439       else
3440 	vr->operands.truncate (2);
3441 
3442       /* The looked-through reference is a simple MEM_REF.  */
3443       memset (&op, 0, sizeof (op));
3444       op.type = vr->type;
3445       op.opcode = MEM_REF;
3446       op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
3447       op.off = at - lhs_offset + rhs_offset;
3448       vr->operands[0] = op;
3449       op.type = TREE_TYPE (rhs);
3450       op.opcode = TREE_CODE (rhs);
3451       op.op0 = rhs;
3452       op.off = -1;
3453       vr->operands[1] = op;
3454       vr->hashcode = vn_reference_compute_hash (vr);
3455 
3456       /* Try folding the new reference to a constant.  */
3457       tree val = fully_constant_vn_reference_p (vr);
3458       if (val)
3459 	return data->finish (0, 0, val);
3460 
3461       /* Adjust *ref from the new operands.  */
3462       if (!ao_ref_init_from_vn_reference (&r, 0, 0, vr->type, vr->operands))
3463 	return (void *)-1;
3464       /* This can happen with bitfields.  */
3465       if (maybe_ne (ref->size, r.size))
3466 	return (void *)-1;
3467       *ref = r;
3468 
3469       /* Do not update last seen VUSE after translating.  */
3470       data->last_vuse_ptr = NULL;
3471       /* Invalidate the original access path since it now contains
3472          the wrong base.  */
3473       data->orig_ref.ref = NULL_TREE;
3474       /* Use the alias-set of this stmt for recording an eventual result.  */
3475       if (data->first_set == -2)
3476 	{
3477 	  data->first_set = 0;
3478 	  data->first_base_set = 0;
3479 	}
3480 
3481       /* Keep looking for the adjusted *REF / VR pair.  */
3482       return NULL;
3483     }
3484 
3485   /* Bail out and stop walking.  */
3486   return (void *)-1;
3487 }
3488 
3489 /* Return a reference op vector from OP that can be used for
3490    vn_reference_lookup_pieces.  The caller is responsible for releasing
3491    the vector.  */
3492 
3493 vec<vn_reference_op_s>
vn_reference_operands_for_lookup(tree op)3494 vn_reference_operands_for_lookup (tree op)
3495 {
3496   bool valueized;
3497   return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
3498 }
3499 
3500 /* Lookup a reference operation by it's parts, in the current hash table.
3501    Returns the resulting value number if it exists in the hash table,
3502    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
3503    vn_reference_t stored in the hashtable if something is found.  */
3504 
3505 tree
vn_reference_lookup_pieces(tree vuse,alias_set_type set,alias_set_type base_set,tree type,vec<vn_reference_op_s> operands,vn_reference_t * vnresult,vn_lookup_kind kind)3506 vn_reference_lookup_pieces (tree vuse, alias_set_type set,
3507 			    alias_set_type base_set, tree type,
3508 			    vec<vn_reference_op_s> operands,
3509 			    vn_reference_t *vnresult, vn_lookup_kind kind)
3510 {
3511   struct vn_reference_s vr1;
3512   vn_reference_t tmp;
3513   tree cst;
3514 
3515   if (!vnresult)
3516     vnresult = &tmp;
3517   *vnresult = NULL;
3518 
3519   vr1.vuse = vuse_ssa_val (vuse);
3520   shared_lookup_references.truncate (0);
3521   shared_lookup_references.safe_grow (operands.length (), true);
3522   memcpy (shared_lookup_references.address (),
3523 	  operands.address (),
3524 	  sizeof (vn_reference_op_s)
3525 	  * operands.length ());
3526   bool valueized_p;
3527   valueize_refs_1 (&shared_lookup_references, &valueized_p);
3528   vr1.operands = shared_lookup_references;
3529   vr1.type = type;
3530   vr1.set = set;
3531   vr1.base_set = base_set;
3532   vr1.hashcode = vn_reference_compute_hash (&vr1);
3533   if ((cst = fully_constant_vn_reference_p (&vr1)))
3534     return cst;
3535 
3536   vn_reference_lookup_1 (&vr1, vnresult);
3537   if (!*vnresult
3538       && kind != VN_NOWALK
3539       && vr1.vuse)
3540     {
3541       ao_ref r;
3542       unsigned limit = param_sccvn_max_alias_queries_per_access;
3543       vn_walk_cb_data data (&vr1, NULL_TREE, NULL, kind, true, NULL_TREE);
3544       vec<vn_reference_op_s> ops_for_ref;
3545       if (!valueized_p)
3546 	ops_for_ref = vr1.operands;
3547       else
3548 	{
3549 	  /* For ao_ref_from_mem we have to ensure only available SSA names
3550 	     end up in base and the only convenient way to make this work
3551 	     for PRE is to re-valueize with that in mind.  */
3552 	  ops_for_ref.create (operands.length ());
3553 	  ops_for_ref.quick_grow (operands.length ());
3554 	  memcpy (ops_for_ref.address (),
3555 		  operands.address (),
3556 		  sizeof (vn_reference_op_s)
3557 		  * operands.length ());
3558 	  valueize_refs_1 (&ops_for_ref, &valueized_p, true);
3559 	}
3560       if (ao_ref_init_from_vn_reference (&r, set, base_set, type,
3561 					 ops_for_ref))
3562 	*vnresult
3563 	  = ((vn_reference_t)
3564 	     walk_non_aliased_vuses (&r, vr1.vuse, true, vn_reference_lookup_2,
3565 				     vn_reference_lookup_3, vuse_valueize,
3566 				     limit, &data));
3567       if (ops_for_ref != shared_lookup_references)
3568 	ops_for_ref.release ();
3569       gcc_checking_assert (vr1.operands == shared_lookup_references);
3570     }
3571 
3572   if (*vnresult)
3573      return (*vnresult)->result;
3574 
3575   return NULL_TREE;
3576 }
3577 
3578 /* Lookup OP in the current hash table, and return the resulting value
3579    number if it exists in the hash table.  Return NULL_TREE if it does
3580    not exist in the hash table or if the result field of the structure
3581    was NULL..  VNRESULT will be filled in with the vn_reference_t
3582    stored in the hashtable if one exists.  When TBAA_P is false assume
3583    we are looking up a store and treat it as having alias-set zero.
3584    *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded.
3585    MASK is either NULL_TREE, or can be an INTEGER_CST if the result of the
3586    load is bitwise anded with MASK and so we are only interested in a subset
3587    of the bits and can ignore if the other bits are uninitialized or
3588    not initialized with constants.  */
3589 
3590 tree
vn_reference_lookup(tree op,tree vuse,vn_lookup_kind kind,vn_reference_t * vnresult,bool tbaa_p,tree * last_vuse_ptr,tree mask)3591 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
3592 		     vn_reference_t *vnresult, bool tbaa_p,
3593 		     tree *last_vuse_ptr, tree mask)
3594 {
3595   vec<vn_reference_op_s> operands;
3596   struct vn_reference_s vr1;
3597   bool valueized_anything;
3598 
3599   if (vnresult)
3600     *vnresult = NULL;
3601 
3602   vr1.vuse = vuse_ssa_val (vuse);
3603   vr1.operands = operands
3604     = valueize_shared_reference_ops_from_ref (op, &valueized_anything);
3605   vr1.type = TREE_TYPE (op);
3606   ao_ref op_ref;
3607   ao_ref_init (&op_ref, op);
3608   vr1.set = ao_ref_alias_set (&op_ref);
3609   vr1.base_set = ao_ref_base_alias_set (&op_ref);
3610   vr1.hashcode = vn_reference_compute_hash (&vr1);
3611   if (mask == NULL_TREE)
3612     if (tree cst = fully_constant_vn_reference_p (&vr1))
3613       return cst;
3614 
3615   if (kind != VN_NOWALK && vr1.vuse)
3616     {
3617       vn_reference_t wvnresult;
3618       ao_ref r;
3619       unsigned limit = param_sccvn_max_alias_queries_per_access;
3620       auto_vec<vn_reference_op_s> ops_for_ref;
3621       if (valueized_anything)
3622 	{
3623 	  copy_reference_ops_from_ref (op, &ops_for_ref);
3624 	  bool tem;
3625 	  valueize_refs_1 (&ops_for_ref, &tem, true);
3626 	}
3627       /* Make sure to use a valueized reference if we valueized anything.
3628          Otherwise preserve the full reference for advanced TBAA.  */
3629       if (!valueized_anything
3630 	  || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.base_set,
3631 					     vr1.type, ops_for_ref))
3632 	ao_ref_init (&r, op);
3633       vn_walk_cb_data data (&vr1, r.ref ? NULL_TREE : op,
3634 			    last_vuse_ptr, kind, tbaa_p, mask);
3635 
3636       wvnresult
3637 	= ((vn_reference_t)
3638 	   walk_non_aliased_vuses (&r, vr1.vuse, tbaa_p, vn_reference_lookup_2,
3639 				   vn_reference_lookup_3, vuse_valueize, limit,
3640 				   &data));
3641       gcc_checking_assert (vr1.operands == shared_lookup_references);
3642       if (wvnresult)
3643 	{
3644 	  gcc_assert (mask == NULL_TREE);
3645 	  if (vnresult)
3646 	    *vnresult = wvnresult;
3647 	  return wvnresult->result;
3648 	}
3649       else if (mask)
3650 	return data.masked_result;
3651 
3652       return NULL_TREE;
3653     }
3654 
3655   if (last_vuse_ptr)
3656     *last_vuse_ptr = vr1.vuse;
3657   if (mask)
3658     return NULL_TREE;
3659   return vn_reference_lookup_1 (&vr1, vnresult);
3660 }
3661 
3662 /* Lookup CALL in the current hash table and return the entry in
3663    *VNRESULT if found.  Populates *VR for the hashtable lookup.  */
3664 
3665 void
vn_reference_lookup_call(gcall * call,vn_reference_t * vnresult,vn_reference_t vr)3666 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
3667 			  vn_reference_t vr)
3668 {
3669   if (vnresult)
3670     *vnresult = NULL;
3671 
3672   tree vuse = gimple_vuse (call);
3673 
3674   vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
3675   vr->operands = valueize_shared_reference_ops_from_call (call);
3676   vr->type = gimple_expr_type (call);
3677   vr->punned = false;
3678   vr->set = 0;
3679   vr->base_set = 0;
3680   vr->hashcode = vn_reference_compute_hash (vr);
3681   vn_reference_lookup_1 (vr, vnresult);
3682 }
3683 
3684 /* Insert OP into the current hash table with a value number of RESULT.  */
3685 
3686 static void
vn_reference_insert(tree op,tree result,tree vuse,tree vdef)3687 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
3688 {
3689   vn_reference_s **slot;
3690   vn_reference_t vr1;
3691   bool tem;
3692 
3693   vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3694   if (TREE_CODE (result) == SSA_NAME)
3695     vr1->value_id = VN_INFO (result)->value_id;
3696   else
3697     vr1->value_id = get_or_alloc_constant_value_id (result);
3698   vr1->vuse = vuse_ssa_val (vuse);
3699   vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
3700   vr1->type = TREE_TYPE (op);
3701   vr1->punned = false;
3702   ao_ref op_ref;
3703   ao_ref_init (&op_ref, op);
3704   vr1->set = ao_ref_alias_set (&op_ref);
3705   vr1->base_set = ao_ref_base_alias_set (&op_ref);
3706   vr1->hashcode = vn_reference_compute_hash (vr1);
3707   vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
3708   vr1->result_vdef = vdef;
3709 
3710   slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3711 						      INSERT);
3712 
3713   /* Because IL walking on reference lookup can end up visiting
3714      a def that is only to be visited later in iteration order
3715      when we are about to make an irreducible region reducible
3716      the def can be effectively processed and its ref being inserted
3717      by vn_reference_lookup_3 already.  So we cannot assert (!*slot)
3718      but save a lookup if we deal with already inserted refs here.  */
3719   if (*slot)
3720     {
3721       /* We cannot assert that we have the same value either because
3722          when disentangling an irreducible region we may end up visiting
3723 	 a use before the corresponding def.  That's a missed optimization
3724 	 only though.  See gcc.dg/tree-ssa/pr87126.c for example.  */
3725       if (dump_file && (dump_flags & TDF_DETAILS)
3726 	  && !operand_equal_p ((*slot)->result, vr1->result, 0))
3727 	{
3728 	  fprintf (dump_file, "Keeping old value ");
3729 	  print_generic_expr (dump_file, (*slot)->result);
3730 	  fprintf (dump_file, " because of collision\n");
3731 	}
3732       free_reference (vr1);
3733       obstack_free (&vn_tables_obstack, vr1);
3734       return;
3735     }
3736 
3737   *slot = vr1;
3738   vr1->next = last_inserted_ref;
3739   last_inserted_ref = vr1;
3740 }
3741 
3742 /* Insert a reference by it's pieces into the current hash table with
3743    a value number of RESULT.  Return the resulting reference
3744    structure we created.  */
3745 
3746 vn_reference_t
vn_reference_insert_pieces(tree vuse,alias_set_type set,alias_set_type base_set,tree type,vec<vn_reference_op_s> operands,tree result,unsigned int value_id)3747 vn_reference_insert_pieces (tree vuse, alias_set_type set,
3748 			    alias_set_type base_set, tree type,
3749 			    vec<vn_reference_op_s> operands,
3750 			    tree result, unsigned int value_id)
3751 
3752 {
3753   vn_reference_s **slot;
3754   vn_reference_t vr1;
3755 
3756   vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3757   vr1->value_id = value_id;
3758   vr1->vuse = vuse_ssa_val (vuse);
3759   vr1->operands = operands;
3760   valueize_refs (&vr1->operands);
3761   vr1->type = type;
3762   vr1->punned = false;
3763   vr1->set = set;
3764   vr1->base_set = base_set;
3765   vr1->hashcode = vn_reference_compute_hash (vr1);
3766   if (result && TREE_CODE (result) == SSA_NAME)
3767     result = SSA_VAL (result);
3768   vr1->result = result;
3769 
3770   slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3771 						      INSERT);
3772 
3773   /* At this point we should have all the things inserted that we have
3774      seen before, and we should never try inserting something that
3775      already exists.  */
3776   gcc_assert (!*slot);
3777 
3778   *slot = vr1;
3779   vr1->next = last_inserted_ref;
3780   last_inserted_ref = vr1;
3781   return vr1;
3782 }
3783 
3784 /* Compute and return the hash value for nary operation VBO1.  */
3785 
3786 static hashval_t
vn_nary_op_compute_hash(const vn_nary_op_t vno1)3787 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
3788 {
3789   inchash::hash hstate;
3790   unsigned i;
3791 
3792   for (i = 0; i < vno1->length; ++i)
3793     if (TREE_CODE (vno1->op[i]) == SSA_NAME)
3794       vno1->op[i] = SSA_VAL (vno1->op[i]);
3795 
3796   if (((vno1->length == 2
3797 	&& commutative_tree_code (vno1->opcode))
3798        || (vno1->length == 3
3799 	   && commutative_ternary_tree_code (vno1->opcode)))
3800       && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3801     std::swap (vno1->op[0], vno1->op[1]);
3802   else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
3803 	   && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3804     {
3805       std::swap (vno1->op[0], vno1->op[1]);
3806       vno1->opcode = swap_tree_comparison  (vno1->opcode);
3807     }
3808 
3809   hstate.add_int (vno1->opcode);
3810   for (i = 0; i < vno1->length; ++i)
3811     inchash::add_expr (vno1->op[i], hstate);
3812 
3813   return hstate.end ();
3814 }
3815 
3816 /* Compare nary operations VNO1 and VNO2 and return true if they are
3817    equivalent.  */
3818 
3819 bool
vn_nary_op_eq(const_vn_nary_op_t const vno1,const_vn_nary_op_t const vno2)3820 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
3821 {
3822   unsigned i;
3823 
3824   if (vno1->hashcode != vno2->hashcode)
3825     return false;
3826 
3827   if (vno1->length != vno2->length)
3828     return false;
3829 
3830   if (vno1->opcode != vno2->opcode
3831       || !types_compatible_p (vno1->type, vno2->type))
3832     return false;
3833 
3834   for (i = 0; i < vno1->length; ++i)
3835     if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
3836       return false;
3837 
3838   /* BIT_INSERT_EXPR has an implict operand as the type precision
3839      of op1.  Need to check to make sure they are the same.  */
3840   if (vno1->opcode == BIT_INSERT_EXPR
3841       && TREE_CODE (vno1->op[1]) == INTEGER_CST
3842       && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
3843 	 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
3844     return false;
3845 
3846   return true;
3847 }
3848 
3849 /* Initialize VNO from the pieces provided.  */
3850 
3851 static void
init_vn_nary_op_from_pieces(vn_nary_op_t vno,unsigned int length,enum tree_code code,tree type,tree * ops)3852 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
3853 			     enum tree_code code, tree type, tree *ops)
3854 {
3855   vno->opcode = code;
3856   vno->length = length;
3857   vno->type = type;
3858   memcpy (&vno->op[0], ops, sizeof (tree) * length);
3859 }
3860 
3861 /* Return the number of operands for a vn_nary ops structure from STMT.  */
3862 
3863 static unsigned int
vn_nary_length_from_stmt(gimple * stmt)3864 vn_nary_length_from_stmt (gimple *stmt)
3865 {
3866   switch (gimple_assign_rhs_code (stmt))
3867     {
3868     case REALPART_EXPR:
3869     case IMAGPART_EXPR:
3870     case VIEW_CONVERT_EXPR:
3871       return 1;
3872 
3873     case BIT_FIELD_REF:
3874       return 3;
3875 
3876     case CONSTRUCTOR:
3877       return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3878 
3879     default:
3880       return gimple_num_ops (stmt) - 1;
3881     }
3882 }
3883 
3884 /* Initialize VNO from STMT.  */
3885 
3886 static void
init_vn_nary_op_from_stmt(vn_nary_op_t vno,gimple * stmt)3887 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
3888 {
3889   unsigned i;
3890 
3891   vno->opcode = gimple_assign_rhs_code (stmt);
3892   vno->type = gimple_expr_type (stmt);
3893   switch (vno->opcode)
3894     {
3895     case REALPART_EXPR:
3896     case IMAGPART_EXPR:
3897     case VIEW_CONVERT_EXPR:
3898       vno->length = 1;
3899       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3900       break;
3901 
3902     case BIT_FIELD_REF:
3903       vno->length = 3;
3904       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3905       vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
3906       vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
3907       break;
3908 
3909     case CONSTRUCTOR:
3910       vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3911       for (i = 0; i < vno->length; ++i)
3912 	vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
3913       break;
3914 
3915     default:
3916       gcc_checking_assert (!gimple_assign_single_p (stmt));
3917       vno->length = gimple_num_ops (stmt) - 1;
3918       for (i = 0; i < vno->length; ++i)
3919 	vno->op[i] = gimple_op (stmt, i + 1);
3920     }
3921 }
3922 
3923 /* Compute the hashcode for VNO and look for it in the hash table;
3924    return the resulting value number if it exists in the hash table.
3925    Return NULL_TREE if it does not exist in the hash table or if the
3926    result field of the operation is NULL.  VNRESULT will contain the
3927    vn_nary_op_t from the hashtable if it exists.  */
3928 
3929 static tree
vn_nary_op_lookup_1(vn_nary_op_t vno,vn_nary_op_t * vnresult)3930 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
3931 {
3932   vn_nary_op_s **slot;
3933 
3934   if (vnresult)
3935     *vnresult = NULL;
3936 
3937   vno->hashcode = vn_nary_op_compute_hash (vno);
3938   slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
3939   if (!slot)
3940     return NULL_TREE;
3941   if (vnresult)
3942     *vnresult = *slot;
3943   return (*slot)->predicated_values ? NULL_TREE : (*slot)->u.result;
3944 }
3945 
3946 /* Lookup a n-ary operation by its pieces and return the resulting value
3947    number if it exists in the hash table.  Return NULL_TREE if it does
3948    not exist in the hash table or if the result field of the operation
3949    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
3950    if it exists.  */
3951 
3952 tree
vn_nary_op_lookup_pieces(unsigned int length,enum tree_code code,tree type,tree * ops,vn_nary_op_t * vnresult)3953 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
3954 			  tree type, tree *ops, vn_nary_op_t *vnresult)
3955 {
3956   vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
3957 				  sizeof_vn_nary_op (length));
3958   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3959   return vn_nary_op_lookup_1 (vno1, vnresult);
3960 }
3961 
3962 /* Lookup the rhs of STMT in the current hash table, and return the resulting
3963    value number if it exists in the hash table.  Return NULL_TREE if
3964    it does not exist in the hash table.  VNRESULT will contain the
3965    vn_nary_op_t from the hashtable if it exists.  */
3966 
3967 tree
vn_nary_op_lookup_stmt(gimple * stmt,vn_nary_op_t * vnresult)3968 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
3969 {
3970   vn_nary_op_t vno1
3971     = XALLOCAVAR (struct vn_nary_op_s,
3972 		  sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
3973   init_vn_nary_op_from_stmt (vno1, stmt);
3974   return vn_nary_op_lookup_1 (vno1, vnresult);
3975 }
3976 
3977 /* Allocate a vn_nary_op_t with LENGTH operands on STACK.  */
3978 
3979 static vn_nary_op_t
alloc_vn_nary_op_noinit(unsigned int length,struct obstack * stack)3980 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
3981 {
3982   return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
3983 }
3984 
3985 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
3986    obstack.  */
3987 
3988 static vn_nary_op_t
alloc_vn_nary_op(unsigned int length,tree result,unsigned int value_id)3989 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
3990 {
3991   vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
3992 
3993   vno1->value_id = value_id;
3994   vno1->length = length;
3995   vno1->predicated_values = 0;
3996   vno1->u.result = result;
3997 
3998   return vno1;
3999 }
4000 
4001 /* Insert VNO into TABLE.  If COMPUTE_HASH is true, then compute
4002    VNO->HASHCODE first.  */
4003 
4004 static vn_nary_op_t
vn_nary_op_insert_into(vn_nary_op_t vno,vn_nary_op_table_type * table,bool compute_hash)4005 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
4006 			bool compute_hash)
4007 {
4008   vn_nary_op_s **slot;
4009 
4010   if (compute_hash)
4011     {
4012       vno->hashcode = vn_nary_op_compute_hash (vno);
4013       gcc_assert (! vno->predicated_values
4014 		  || (! vno->u.values->next
4015 		      && vno->u.values->n == 1));
4016     }
4017 
4018   slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
4019   vno->unwind_to = *slot;
4020   if (*slot)
4021     {
4022       /* Prefer non-predicated values.
4023          ???  Only if those are constant, otherwise, with constant predicated
4024 	 value, turn them into predicated values with entry-block validity
4025 	 (???  but we always find the first valid result currently).  */
4026       if ((*slot)->predicated_values
4027 	  && ! vno->predicated_values)
4028 	{
4029 	  /* ???  We cannot remove *slot from the unwind stack list.
4030 	     For the moment we deal with this by skipping not found
4031 	     entries but this isn't ideal ...  */
4032 	  *slot = vno;
4033 	  /* ???  Maintain a stack of states we can unwind in
4034 	     vn_nary_op_s?  But how far do we unwind?  In reality
4035 	     we need to push change records somewhere...  Or not
4036 	     unwind vn_nary_op_s and linking them but instead
4037 	     unwind the results "list", linking that, which also
4038 	     doesn't move on hashtable resize.  */
4039 	  /* We can also have a ->unwind_to recording *slot there.
4040 	     That way we can make u.values a fixed size array with
4041 	     recording the number of entries but of course we then
4042 	     have always N copies for each unwind_to-state.  Or we
4043              make sure to only ever append and each unwinding will
4044 	     pop off one entry (but how to deal with predicated
4045 	     replaced with non-predicated here?)  */
4046 	  vno->next = last_inserted_nary;
4047 	  last_inserted_nary = vno;
4048 	  return vno;
4049 	}
4050       else if (vno->predicated_values
4051 	       && ! (*slot)->predicated_values)
4052 	return *slot;
4053       else if (vno->predicated_values
4054 	       && (*slot)->predicated_values)
4055 	{
4056 	  /* ???  Factor this all into a insert_single_predicated_value
4057 	     routine.  */
4058 	  gcc_assert (!vno->u.values->next && vno->u.values->n == 1);
4059 	  basic_block vno_bb
4060 	    = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0]);
4061 	  vn_pval *nval = vno->u.values;
4062 	  vn_pval **next = &vno->u.values;
4063 	  bool found = false;
4064 	  for (vn_pval *val = (*slot)->u.values; val; val = val->next)
4065 	    {
4066 	      if (expressions_equal_p (val->result, vno->u.values->result))
4067 		{
4068 		  found = true;
4069 		  for (unsigned i = 0; i < val->n; ++i)
4070 		    {
4071 		      basic_block val_bb
4072 			= BASIC_BLOCK_FOR_FN (cfun,
4073 					      val->valid_dominated_by_p[i]);
4074 		      if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb))
4075 			/* Value registered with more generic predicate.  */
4076 			return *slot;
4077 		      else if (dominated_by_p (CDI_DOMINATORS, val_bb, vno_bb))
4078 			/* Shouldn't happen, we insert in RPO order.  */
4079 			gcc_unreachable ();
4080 		    }
4081 		  /* Append value.  */
4082 		  *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4083 						     sizeof (vn_pval)
4084 						     + val->n * sizeof (int));
4085 		  (*next)->next = NULL;
4086 		  (*next)->result = val->result;
4087 		  (*next)->n = val->n + 1;
4088 		  memcpy ((*next)->valid_dominated_by_p,
4089 			  val->valid_dominated_by_p,
4090 			  val->n * sizeof (int));
4091 		  (*next)->valid_dominated_by_p[val->n] = vno_bb->index;
4092 		  next = &(*next)->next;
4093 		  if (dump_file && (dump_flags & TDF_DETAILS))
4094 		    fprintf (dump_file, "Appending predicate to value.\n");
4095 		  continue;
4096 		}
4097 	      /* Copy other predicated values.  */
4098 	      *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4099 						 sizeof (vn_pval)
4100 						 + (val->n-1) * sizeof (int));
4101 	      memcpy (*next, val, sizeof (vn_pval) + (val->n-1) * sizeof (int));
4102 	      (*next)->next = NULL;
4103 	      next = &(*next)->next;
4104 	    }
4105 	  if (!found)
4106 	    *next = nval;
4107 
4108 	  *slot = vno;
4109 	  vno->next = last_inserted_nary;
4110 	  last_inserted_nary = vno;
4111 	  return vno;
4112 	}
4113 
4114       /* While we do not want to insert things twice it's awkward to
4115 	 avoid it in the case where visit_nary_op pattern-matches stuff
4116 	 and ends up simplifying the replacement to itself.  We then
4117 	 get two inserts, one from visit_nary_op and one from
4118 	 vn_nary_build_or_lookup.
4119 	 So allow inserts with the same value number.  */
4120       if ((*slot)->u.result == vno->u.result)
4121 	return *slot;
4122     }
4123 
4124   /* ???  There's also optimistic vs. previous commited state merging
4125      that is problematic for the case of unwinding.  */
4126 
4127   /* ???  We should return NULL if we do not use 'vno' and have the
4128      caller release it.  */
4129   gcc_assert (!*slot);
4130 
4131   *slot = vno;
4132   vno->next = last_inserted_nary;
4133   last_inserted_nary = vno;
4134   return vno;
4135 }
4136 
4137 /* Insert a n-ary operation into the current hash table using it's
4138    pieces.  Return the vn_nary_op_t structure we created and put in
4139    the hashtable.  */
4140 
4141 vn_nary_op_t
vn_nary_op_insert_pieces(unsigned int length,enum tree_code code,tree type,tree * ops,tree result,unsigned int value_id)4142 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
4143 			  tree type, tree *ops,
4144 			  tree result, unsigned int value_id)
4145 {
4146   vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
4147   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4148   return vn_nary_op_insert_into (vno1, valid_info->nary, true);
4149 }
4150 
4151 static vn_nary_op_t
vn_nary_op_insert_pieces_predicated(unsigned int length,enum tree_code code,tree type,tree * ops,tree result,unsigned int value_id,edge pred_e)4152 vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code,
4153 				     tree type, tree *ops,
4154 				     tree result, unsigned int value_id,
4155 				     edge pred_e)
4156 {
4157   /* ???  Currently tracking BBs.  */
4158   if (! single_pred_p (pred_e->dest))
4159     {
4160       /* Never record for backedges.  */
4161       if (pred_e->flags & EDGE_DFS_BACK)
4162 	return NULL;
4163       edge_iterator ei;
4164       edge e;
4165       int cnt = 0;
4166       /* Ignore backedges.  */
4167       FOR_EACH_EDGE (e, ei, pred_e->dest->preds)
4168 	if (! dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4169 	  cnt++;
4170       if (cnt != 1)
4171 	return NULL;
4172     }
4173   if (dump_file && (dump_flags & TDF_DETAILS)
4174       /* ???  Fix dumping, but currently we only get comparisons.  */
4175       && TREE_CODE_CLASS (code) == tcc_comparison)
4176     {
4177       fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index,
4178 	       pred_e->dest->index);
4179       print_generic_expr (dump_file, ops[0], TDF_SLIM);
4180       fprintf (dump_file, " %s ", get_tree_code_name (code));
4181       print_generic_expr (dump_file, ops[1], TDF_SLIM);
4182       fprintf (dump_file, " == %s\n",
4183 	       integer_zerop (result) ? "false" : "true");
4184     }
4185   vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE, value_id);
4186   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4187   vno1->predicated_values = 1;
4188   vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4189 					      sizeof (vn_pval));
4190   vno1->u.values->next = NULL;
4191   vno1->u.values->result = result;
4192   vno1->u.values->n = 1;
4193   vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index;
4194   return vn_nary_op_insert_into (vno1, valid_info->nary, true);
4195 }
4196 
4197 static bool
4198 dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool);
4199 
4200 static tree
vn_nary_op_get_predicated_value(vn_nary_op_t vno,basic_block bb)4201 vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb)
4202 {
4203   if (! vno->predicated_values)
4204     return vno->u.result;
4205   for (vn_pval *val = vno->u.values; val; val = val->next)
4206     for (unsigned i = 0; i < val->n; ++i)
4207       /* Do not handle backedge executability optimistically since
4208 	 when figuring out whether to iterate we do not consider
4209 	 changed predication.  */
4210       if (dominated_by_p_w_unex
4211 	    (bb, BASIC_BLOCK_FOR_FN (cfun, val->valid_dominated_by_p[i]),
4212 	     false))
4213 	return val->result;
4214   return NULL_TREE;
4215 }
4216 
4217 /* Insert the rhs of STMT into the current hash table with a value number of
4218    RESULT.  */
4219 
4220 static vn_nary_op_t
vn_nary_op_insert_stmt(gimple * stmt,tree result)4221 vn_nary_op_insert_stmt (gimple *stmt, tree result)
4222 {
4223   vn_nary_op_t vno1
4224     = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
4225 			result, VN_INFO (result)->value_id);
4226   init_vn_nary_op_from_stmt (vno1, stmt);
4227   return vn_nary_op_insert_into (vno1, valid_info->nary, true);
4228 }
4229 
4230 /* Compute a hashcode for PHI operation VP1 and return it.  */
4231 
4232 static inline hashval_t
vn_phi_compute_hash(vn_phi_t vp1)4233 vn_phi_compute_hash (vn_phi_t vp1)
4234 {
4235   inchash::hash hstate;
4236   tree phi1op;
4237   tree type;
4238   edge e;
4239   edge_iterator ei;
4240 
4241   hstate.add_int (EDGE_COUNT (vp1->block->preds));
4242   switch (EDGE_COUNT (vp1->block->preds))
4243     {
4244     case 1:
4245       break;
4246     case 2:
4247       if (vp1->block->loop_father->header == vp1->block)
4248 	;
4249       else
4250 	break;
4251       /* Fallthru.  */
4252     default:
4253       hstate.add_int (vp1->block->index);
4254     }
4255 
4256   /* If all PHI arguments are constants we need to distinguish
4257      the PHI node via its type.  */
4258   type = vp1->type;
4259   hstate.merge_hash (vn_hash_type (type));
4260 
4261   FOR_EACH_EDGE (e, ei, vp1->block->preds)
4262     {
4263       /* Don't hash backedge values they need to be handled as VN_TOP
4264          for optimistic value-numbering.  */
4265       if (e->flags & EDGE_DFS_BACK)
4266 	continue;
4267 
4268       phi1op = vp1->phiargs[e->dest_idx];
4269       if (phi1op == VN_TOP)
4270 	continue;
4271       inchash::add_expr (phi1op, hstate);
4272     }
4273 
4274   return hstate.end ();
4275 }
4276 
4277 
4278 /* Return true if COND1 and COND2 represent the same condition, set
4279    *INVERTED_P if one needs to be inverted to make it the same as
4280    the other.  */
4281 
4282 static bool
cond_stmts_equal_p(gcond * cond1,tree lhs1,tree rhs1,gcond * cond2,tree lhs2,tree rhs2,bool * inverted_p)4283 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
4284 		    gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
4285 {
4286   enum tree_code code1 = gimple_cond_code (cond1);
4287   enum tree_code code2 = gimple_cond_code (cond2);
4288 
4289   *inverted_p = false;
4290   if (code1 == code2)
4291     ;
4292   else if (code1 == swap_tree_comparison (code2))
4293     std::swap (lhs2, rhs2);
4294   else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
4295     *inverted_p = true;
4296   else if (code1 == invert_tree_comparison
4297 	   	      (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
4298     {
4299       std::swap (lhs2, rhs2);
4300       *inverted_p = true;
4301     }
4302   else
4303     return false;
4304 
4305   return ((expressions_equal_p (lhs1, lhs2)
4306 	   && expressions_equal_p (rhs1, rhs2))
4307 	  || (commutative_tree_code (code1)
4308 	      && expressions_equal_p (lhs1, rhs2)
4309 	      && expressions_equal_p (rhs1, lhs2)));
4310 }
4311 
4312 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
4313 
4314 static int
vn_phi_eq(const_vn_phi_t const vp1,const_vn_phi_t const vp2)4315 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
4316 {
4317   if (vp1->hashcode != vp2->hashcode)
4318     return false;
4319 
4320   if (vp1->block != vp2->block)
4321     {
4322       if (EDGE_COUNT (vp1->block->preds) != EDGE_COUNT (vp2->block->preds))
4323 	return false;
4324 
4325       switch (EDGE_COUNT (vp1->block->preds))
4326 	{
4327 	case 1:
4328 	  /* Single-arg PHIs are just copies.  */
4329 	  break;
4330 
4331 	case 2:
4332 	  {
4333 	    /* Rule out backedges into the PHI.  */
4334 	    if (vp1->block->loop_father->header == vp1->block
4335 		|| vp2->block->loop_father->header == vp2->block)
4336 	      return false;
4337 
4338 	    /* If the PHI nodes do not have compatible types
4339 	       they are not the same.  */
4340 	    if (!types_compatible_p (vp1->type, vp2->type))
4341 	      return false;
4342 
4343 	    basic_block idom1
4344 	      = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4345 	    basic_block idom2
4346 	      = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
4347 	    /* If the immediate dominator end in switch stmts multiple
4348 	       values may end up in the same PHI arg via intermediate
4349 	       CFG merges.  */
4350 	    if (EDGE_COUNT (idom1->succs) != 2
4351 		|| EDGE_COUNT (idom2->succs) != 2)
4352 	      return false;
4353 
4354 	    /* Verify the controlling stmt is the same.  */
4355 	    gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
4356 	    gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
4357 	    if (! last1 || ! last2)
4358 	      return false;
4359 	    bool inverted_p;
4360 	    if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
4361 				      last2, vp2->cclhs, vp2->ccrhs,
4362 				      &inverted_p))
4363 	      return false;
4364 
4365 	    /* Get at true/false controlled edges into the PHI.  */
4366 	    edge te1, te2, fe1, fe2;
4367 	    if (! extract_true_false_controlled_edges (idom1, vp1->block,
4368 						       &te1, &fe1)
4369 		|| ! extract_true_false_controlled_edges (idom2, vp2->block,
4370 							  &te2, &fe2))
4371 	      return false;
4372 
4373 	    /* Swap edges if the second condition is the inverted of the
4374 	       first.  */
4375 	    if (inverted_p)
4376 	      std::swap (te2, fe2);
4377 
4378 	    /* ???  Handle VN_TOP specially.  */
4379 	    if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
4380 				       vp2->phiargs[te2->dest_idx])
4381 		|| ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
4382 					  vp2->phiargs[fe2->dest_idx]))
4383 	      return false;
4384 
4385 	    return true;
4386 	  }
4387 
4388 	default:
4389 	  return false;
4390 	}
4391     }
4392 
4393   /* If the PHI nodes do not have compatible types
4394      they are not the same.  */
4395   if (!types_compatible_p (vp1->type, vp2->type))
4396     return false;
4397 
4398   /* Any phi in the same block will have it's arguments in the
4399      same edge order, because of how we store phi nodes.  */
4400   unsigned nargs = EDGE_COUNT (vp1->block->preds);
4401   for (unsigned i = 0; i < nargs; ++i)
4402     {
4403       tree phi1op = vp1->phiargs[i];
4404       tree phi2op = vp2->phiargs[i];
4405       if (phi1op == phi2op)
4406 	continue;
4407       if (!expressions_equal_p (phi1op, phi2op))
4408 	return false;
4409     }
4410 
4411   return true;
4412 }
4413 
4414 /* Lookup PHI in the current hash table, and return the resulting
4415    value number if it exists in the hash table.  Return NULL_TREE if
4416    it does not exist in the hash table. */
4417 
4418 static tree
vn_phi_lookup(gimple * phi,bool backedges_varying_p)4419 vn_phi_lookup (gimple *phi, bool backedges_varying_p)
4420 {
4421   vn_phi_s **slot;
4422   struct vn_phi_s *vp1;
4423   edge e;
4424   edge_iterator ei;
4425 
4426   vp1 = XALLOCAVAR (struct vn_phi_s,
4427 		    sizeof (struct vn_phi_s)
4428 		    + (gimple_phi_num_args (phi) - 1) * sizeof (tree));
4429 
4430   /* Canonicalize the SSA_NAME's to their value number.  */
4431   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4432     {
4433       tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4434       if (TREE_CODE (def) == SSA_NAME
4435 	  && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
4436 	def = SSA_VAL (def);
4437       vp1->phiargs[e->dest_idx] = def;
4438     }
4439   vp1->type = TREE_TYPE (gimple_phi_result (phi));
4440   vp1->block = gimple_bb (phi);
4441   /* Extract values of the controlling condition.  */
4442   vp1->cclhs = NULL_TREE;
4443   vp1->ccrhs = NULL_TREE;
4444   basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4445   if (EDGE_COUNT (idom1->succs) == 2)
4446     if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
4447       {
4448 	/* ???  We want to use SSA_VAL here.  But possibly not
4449 	   allow VN_TOP.  */
4450 	vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
4451 	vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
4452       }
4453   vp1->hashcode = vn_phi_compute_hash (vp1);
4454   slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT);
4455   if (!slot)
4456     return NULL_TREE;
4457   return (*slot)->result;
4458 }
4459 
4460 /* Insert PHI into the current hash table with a value number of
4461    RESULT.  */
4462 
4463 static vn_phi_t
vn_phi_insert(gimple * phi,tree result,bool backedges_varying_p)4464 vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p)
4465 {
4466   vn_phi_s **slot;
4467   vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,
4468 					   sizeof (vn_phi_s)
4469 					   + ((gimple_phi_num_args (phi) - 1)
4470 					      * sizeof (tree)));
4471   edge e;
4472   edge_iterator ei;
4473 
4474   /* Canonicalize the SSA_NAME's to their value number.  */
4475   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4476     {
4477       tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4478       if (TREE_CODE (def) == SSA_NAME
4479 	  && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
4480 	def = SSA_VAL (def);
4481       vp1->phiargs[e->dest_idx] = def;
4482     }
4483   vp1->value_id = VN_INFO (result)->value_id;
4484   vp1->type = TREE_TYPE (gimple_phi_result (phi));
4485   vp1->block = gimple_bb (phi);
4486   /* Extract values of the controlling condition.  */
4487   vp1->cclhs = NULL_TREE;
4488   vp1->ccrhs = NULL_TREE;
4489   basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4490   if (EDGE_COUNT (idom1->succs) == 2)
4491     if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
4492       {
4493 	/* ???  We want to use SSA_VAL here.  But possibly not
4494 	   allow VN_TOP.  */
4495 	vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
4496 	vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
4497       }
4498   vp1->result = result;
4499   vp1->hashcode = vn_phi_compute_hash (vp1);
4500 
4501   slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
4502   gcc_assert (!*slot);
4503 
4504   *slot = vp1;
4505   vp1->next = last_inserted_phi;
4506   last_inserted_phi = vp1;
4507   return vp1;
4508 }
4509 
4510 
4511 /* Return true if BB1 is dominated by BB2 taking into account edges
4512    that are not executable.  When ALLOW_BACK is false consider not
4513    executable backedges as executable.  */
4514 
4515 static bool
dominated_by_p_w_unex(basic_block bb1,basic_block bb2,bool allow_back)4516 dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool allow_back)
4517 {
4518   edge_iterator ei;
4519   edge e;
4520 
4521   if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4522     return true;
4523 
4524   /* Before iterating we'd like to know if there exists a
4525      (executable) path from bb2 to bb1 at all, if not we can
4526      directly return false.  For now simply iterate once.  */
4527 
4528   /* Iterate to the single executable bb1 predecessor.  */
4529   if (EDGE_COUNT (bb1->preds) > 1)
4530     {
4531       edge prede = NULL;
4532       FOR_EACH_EDGE (e, ei, bb1->preds)
4533 	if ((e->flags & EDGE_EXECUTABLE)
4534 	    || (!allow_back && (e->flags & EDGE_DFS_BACK)))
4535 	  {
4536 	    if (prede)
4537 	      {
4538 		prede = NULL;
4539 		break;
4540 	      }
4541 	    prede = e;
4542 	  }
4543       if (prede)
4544 	{
4545 	  bb1 = prede->src;
4546 
4547 	  /* Re-do the dominance check with changed bb1.  */
4548 	  if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4549 	    return true;
4550 	}
4551     }
4552 
4553   /* Iterate to the single executable bb2 successor.  */
4554   edge succe = NULL;
4555   FOR_EACH_EDGE (e, ei, bb2->succs)
4556     if ((e->flags & EDGE_EXECUTABLE)
4557 	|| (!allow_back && (e->flags & EDGE_DFS_BACK)))
4558       {
4559 	if (succe)
4560 	  {
4561 	    succe = NULL;
4562 	    break;
4563 	  }
4564 	succe = e;
4565       }
4566   if (succe)
4567     {
4568       /* Verify the reached block is only reached through succe.
4569 	 If there is only one edge we can spare us the dominator
4570 	 check and iterate directly.  */
4571       if (EDGE_COUNT (succe->dest->preds) > 1)
4572 	{
4573 	  FOR_EACH_EDGE (e, ei, succe->dest->preds)
4574 	    if (e != succe
4575 		&& ((e->flags & EDGE_EXECUTABLE)
4576 		    || (!allow_back && (e->flags & EDGE_DFS_BACK))))
4577 	      {
4578 		succe = NULL;
4579 		break;
4580 	      }
4581 	}
4582       if (succe)
4583 	{
4584 	  bb2 = succe->dest;
4585 
4586 	  /* Re-do the dominance check with changed bb2.  */
4587 	  if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4588 	    return true;
4589 	}
4590     }
4591 
4592   /* We could now iterate updating bb1 / bb2.  */
4593   return false;
4594 }
4595 
4596 /* Set the value number of FROM to TO, return true if it has changed
4597    as a result.  */
4598 
4599 static inline bool
set_ssa_val_to(tree from,tree to)4600 set_ssa_val_to (tree from, tree to)
4601 {
4602   vn_ssa_aux_t from_info = VN_INFO (from);
4603   tree currval = from_info->valnum; // SSA_VAL (from)
4604   poly_int64 toff, coff;
4605   bool curr_undefined = false;
4606   bool curr_invariant = false;
4607 
4608   /* The only thing we allow as value numbers are ssa_names
4609      and invariants.  So assert that here.  We don't allow VN_TOP
4610      as visiting a stmt should produce a value-number other than
4611      that.
4612      ???  Still VN_TOP can happen for unreachable code, so force
4613      it to varying in that case.  Not all code is prepared to
4614      get VN_TOP on valueization.  */
4615   if (to == VN_TOP)
4616     {
4617       /* ???  When iterating and visiting PHI <undef, backedge-value>
4618          for the first time we rightfully get VN_TOP and we need to
4619 	 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
4620 	 With SCCVN we were simply lucky we iterated the other PHI
4621 	 cycles first and thus visited the backedge-value DEF.  */
4622       if (currval == VN_TOP)
4623 	goto set_and_exit;
4624       if (dump_file && (dump_flags & TDF_DETAILS))
4625 	fprintf (dump_file, "Forcing value number to varying on "
4626 		 "receiving VN_TOP\n");
4627       to = from;
4628     }
4629 
4630   gcc_checking_assert (to != NULL_TREE
4631 		       && ((TREE_CODE (to) == SSA_NAME
4632 			    && (to == from || SSA_VAL (to) == to))
4633 			   || is_gimple_min_invariant (to)));
4634 
4635   if (from != to)
4636     {
4637       if (currval == from)
4638 	{
4639 	  if (dump_file && (dump_flags & TDF_DETAILS))
4640 	    {
4641 	      fprintf (dump_file, "Not changing value number of ");
4642 	      print_generic_expr (dump_file, from);
4643 	      fprintf (dump_file, " from VARYING to ");
4644 	      print_generic_expr (dump_file, to);
4645 	      fprintf (dump_file, "\n");
4646 	    }
4647 	  return false;
4648 	}
4649       curr_invariant = is_gimple_min_invariant (currval);
4650       curr_undefined = (TREE_CODE (currval) == SSA_NAME
4651 			&& ssa_undefined_value_p (currval, false));
4652       if (currval != VN_TOP
4653 	  && !curr_invariant
4654 	  && !curr_undefined
4655 	  && is_gimple_min_invariant (to))
4656 	{
4657 	  if (dump_file && (dump_flags & TDF_DETAILS))
4658 	    {
4659 	      fprintf (dump_file, "Forcing VARYING instead of changing "
4660 		       "value number of ");
4661 	      print_generic_expr (dump_file, from);
4662 	      fprintf (dump_file, " from ");
4663 	      print_generic_expr (dump_file, currval);
4664 	      fprintf (dump_file, " (non-constant) to ");
4665 	      print_generic_expr (dump_file, to);
4666 	      fprintf (dump_file, " (constant)\n");
4667 	    }
4668 	  to = from;
4669 	}
4670       else if (currval != VN_TOP
4671 	       && !curr_undefined
4672 	       && TREE_CODE (to) == SSA_NAME
4673 	       && ssa_undefined_value_p (to, false))
4674 	{
4675 	  if (dump_file && (dump_flags & TDF_DETAILS))
4676 	    {
4677 	      fprintf (dump_file, "Forcing VARYING instead of changing "
4678 		       "value number of ");
4679 	      print_generic_expr (dump_file, from);
4680 	      fprintf (dump_file, " from ");
4681 	      print_generic_expr (dump_file, currval);
4682 	      fprintf (dump_file, " (non-undefined) to ");
4683 	      print_generic_expr (dump_file, to);
4684 	      fprintf (dump_file, " (undefined)\n");
4685 	    }
4686 	  to = from;
4687 	}
4688       else if (TREE_CODE (to) == SSA_NAME
4689 	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
4690 	to = from;
4691     }
4692 
4693 set_and_exit:
4694   if (dump_file && (dump_flags & TDF_DETAILS))
4695     {
4696       fprintf (dump_file, "Setting value number of ");
4697       print_generic_expr (dump_file, from);
4698       fprintf (dump_file, " to ");
4699       print_generic_expr (dump_file, to);
4700     }
4701 
4702   if (currval != to
4703       && !operand_equal_p (currval, to, 0)
4704       /* Different undefined SSA names are not actually different.  See
4705          PR82320 for a testcase were we'd otherwise not terminate iteration.  */
4706       && !(curr_undefined
4707 	   && TREE_CODE (to) == SSA_NAME
4708 	   && ssa_undefined_value_p (to, false))
4709       /* ???  For addresses involving volatile objects or types operand_equal_p
4710          does not reliably detect ADDR_EXPRs as equal.  We know we are only
4711 	 getting invariant gimple addresses here, so can use
4712 	 get_addr_base_and_unit_offset to do this comparison.  */
4713       && !(TREE_CODE (currval) == ADDR_EXPR
4714 	   && TREE_CODE (to) == ADDR_EXPR
4715 	   && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
4716 	       == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
4717 	   && known_eq (coff, toff)))
4718     {
4719       if (to != from
4720 	  && currval != VN_TOP
4721 	  && !curr_undefined
4722 	  /* We do not want to allow lattice transitions from one value
4723 	     to another since that may lead to not terminating iteration
4724 	     (see PR95049).  Since there's no convenient way to check
4725 	     for the allowed transition of VAL -> PHI (loop entry value,
4726 	     same on two PHIs, to same PHI result) we restrict the check
4727 	     to invariants.  */
4728 	  && curr_invariant
4729 	  && is_gimple_min_invariant (to))
4730 	{
4731 	  if (dump_file && (dump_flags & TDF_DETAILS))
4732 	    fprintf (dump_file, " forced VARYING");
4733 	  to = from;
4734 	}
4735       if (dump_file && (dump_flags & TDF_DETAILS))
4736 	fprintf (dump_file, " (changed)\n");
4737       from_info->valnum = to;
4738       return true;
4739     }
4740   if (dump_file && (dump_flags & TDF_DETAILS))
4741     fprintf (dump_file, "\n");
4742   return false;
4743 }
4744 
4745 /* Set all definitions in STMT to value number to themselves.
4746    Return true if a value number changed. */
4747 
4748 static bool
defs_to_varying(gimple * stmt)4749 defs_to_varying (gimple *stmt)
4750 {
4751   bool changed = false;
4752   ssa_op_iter iter;
4753   def_operand_p defp;
4754 
4755   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
4756     {
4757       tree def = DEF_FROM_PTR (defp);
4758       changed |= set_ssa_val_to (def, def);
4759     }
4760   return changed;
4761 }
4762 
4763 /* Visit a copy between LHS and RHS, return true if the value number
4764    changed.  */
4765 
4766 static bool
visit_copy(tree lhs,tree rhs)4767 visit_copy (tree lhs, tree rhs)
4768 {
4769   /* Valueize.  */
4770   rhs = SSA_VAL (rhs);
4771 
4772   return set_ssa_val_to (lhs, rhs);
4773 }
4774 
4775 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
4776    is the same.  */
4777 
4778 static tree
valueized_wider_op(tree wide_type,tree op,bool allow_truncate)4779 valueized_wider_op (tree wide_type, tree op, bool allow_truncate)
4780 {
4781   if (TREE_CODE (op) == SSA_NAME)
4782     op = vn_valueize (op);
4783 
4784   /* Either we have the op widened available.  */
4785   tree ops[3] = {};
4786   ops[0] = op;
4787   tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
4788 				       wide_type, ops, NULL);
4789   if (tem)
4790     return tem;
4791 
4792   /* Or the op is truncated from some existing value.  */
4793   if (allow_truncate && TREE_CODE (op) == SSA_NAME)
4794     {
4795       gimple *def = SSA_NAME_DEF_STMT (op);
4796       if (is_gimple_assign (def)
4797 	  && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
4798 	{
4799 	  tem = gimple_assign_rhs1 (def);
4800 	  if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
4801 	    {
4802 	      if (TREE_CODE (tem) == SSA_NAME)
4803 		tem = vn_valueize (tem);
4804 	      return tem;
4805 	    }
4806 	}
4807     }
4808 
4809   /* For constants simply extend it.  */
4810   if (TREE_CODE (op) == INTEGER_CST)
4811     return wide_int_to_tree (wide_type, wi::to_wide (op));
4812 
4813   return NULL_TREE;
4814 }
4815 
4816 /* Visit a nary operator RHS, value number it, and return true if the
4817    value number of LHS has changed as a result.  */
4818 
4819 static bool
visit_nary_op(tree lhs,gassign * stmt)4820 visit_nary_op (tree lhs, gassign *stmt)
4821 {
4822   vn_nary_op_t vnresult;
4823   tree result = vn_nary_op_lookup_stmt (stmt, &vnresult);
4824   if (! result && vnresult)
4825     result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt));
4826   if (result)
4827     return set_ssa_val_to (lhs, result);
4828 
4829   /* Do some special pattern matching for redundancies of operations
4830      in different types.  */
4831   enum tree_code code = gimple_assign_rhs_code (stmt);
4832   tree type = TREE_TYPE (lhs);
4833   tree rhs1 = gimple_assign_rhs1 (stmt);
4834   switch (code)
4835     {
4836     CASE_CONVERT:
4837       /* Match arithmetic done in a different type where we can easily
4838          substitute the result from some earlier sign-changed or widened
4839 	 operation.  */
4840       if (INTEGRAL_TYPE_P (type)
4841 	  && TREE_CODE (rhs1) == SSA_NAME
4842 	  /* We only handle sign-changes, zero-extension -> & mask or
4843 	     sign-extension if we know the inner operation doesn't
4844 	     overflow.  */
4845 	  && (((TYPE_UNSIGNED (TREE_TYPE (rhs1))
4846 		|| (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
4847 		    && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
4848 	       && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
4849 	      || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
4850 	{
4851 	  gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
4852 	  if (def
4853 	      && (gimple_assign_rhs_code (def) == PLUS_EXPR
4854 		  || gimple_assign_rhs_code (def) == MINUS_EXPR
4855 		  || gimple_assign_rhs_code (def) == MULT_EXPR))
4856 	    {
4857 	      tree ops[3] = {};
4858 	      /* When requiring a sign-extension we cannot model a
4859 		 previous truncation with a single op so don't bother.  */
4860 	      bool allow_truncate = TYPE_UNSIGNED (TREE_TYPE (rhs1));
4861 	      /* Either we have the op widened available.  */
4862 	      ops[0] = valueized_wider_op (type, gimple_assign_rhs1 (def),
4863 					   allow_truncate);
4864 	      if (ops[0])
4865 		ops[1] = valueized_wider_op (type, gimple_assign_rhs2 (def),
4866 					     allow_truncate);
4867 	      if (ops[0] && ops[1])
4868 		{
4869 		  ops[0] = vn_nary_op_lookup_pieces
4870 		      (2, gimple_assign_rhs_code (def), type, ops, NULL);
4871 		  /* We have wider operation available.  */
4872 		  if (ops[0]
4873 		      /* If the leader is a wrapping operation we can
4874 		         insert it for code hoisting w/o introducing
4875 			 undefined overflow.  If it is not it has to
4876 			 be available.  See PR86554.  */
4877 		      && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))
4878 			  || (rpo_avail && vn_context_bb
4879 			      && rpo_avail->eliminate_avail (vn_context_bb,
4880 							     ops[0]))))
4881 		    {
4882 		      unsigned lhs_prec = TYPE_PRECISION (type);
4883 		      unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
4884 		      if (lhs_prec == rhs_prec
4885 			  || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
4886 			      && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
4887 			{
4888 			  gimple_match_op match_op (gimple_match_cond::UNCOND,
4889 						    NOP_EXPR, type, ops[0]);
4890 			  result = vn_nary_build_or_lookup (&match_op);
4891 			  if (result)
4892 			    {
4893 			      bool changed = set_ssa_val_to (lhs, result);
4894 			      vn_nary_op_insert_stmt (stmt, result);
4895 			      return changed;
4896 			    }
4897 			}
4898 		      else
4899 			{
4900 			  tree mask = wide_int_to_tree
4901 			    (type, wi::mask (rhs_prec, false, lhs_prec));
4902 			  gimple_match_op match_op (gimple_match_cond::UNCOND,
4903 						    BIT_AND_EXPR,
4904 						    TREE_TYPE (lhs),
4905 						    ops[0], mask);
4906 			  result = vn_nary_build_or_lookup (&match_op);
4907 			  if (result)
4908 			    {
4909 			      bool changed = set_ssa_val_to (lhs, result);
4910 			      vn_nary_op_insert_stmt (stmt, result);
4911 			      return changed;
4912 			    }
4913 			}
4914 		    }
4915 		}
4916 	    }
4917 	}
4918       break;
4919     case BIT_AND_EXPR:
4920       if (INTEGRAL_TYPE_P (type)
4921 	  && TREE_CODE (rhs1) == SSA_NAME
4922 	  && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
4923 	  && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)
4924 	  && default_vn_walk_kind != VN_NOWALK
4925 	  && CHAR_BIT == 8
4926 	  && BITS_PER_UNIT == 8
4927 	  && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
4928 	  && !integer_all_onesp (gimple_assign_rhs2 (stmt))
4929 	  && !integer_zerop (gimple_assign_rhs2 (stmt)))
4930 	{
4931 	  gassign *ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
4932 	  if (ass
4933 	      && !gimple_has_volatile_ops (ass)
4934 	      && vn_get_stmt_kind (ass) == VN_REFERENCE)
4935 	    {
4936 	      tree last_vuse = gimple_vuse (ass);
4937 	      tree op = gimple_assign_rhs1 (ass);
4938 	      tree result = vn_reference_lookup (op, gimple_vuse (ass),
4939 						 default_vn_walk_kind,
4940 						 NULL, true, &last_vuse,
4941 						 gimple_assign_rhs2 (stmt));
4942 	      if (result
4943 		  && useless_type_conversion_p (TREE_TYPE (result),
4944 						TREE_TYPE (op)))
4945 		return set_ssa_val_to (lhs, result);
4946 	    }
4947 	}
4948       break;
4949     case TRUNC_DIV_EXPR:
4950       if (TYPE_UNSIGNED (type))
4951 	break;
4952       /* Fallthru.  */
4953     case RDIV_EXPR:
4954     case MULT_EXPR:
4955       /* Match up ([-]a){/,*}([-])b with v=a{/,*}b, replacing it with -v.  */
4956       if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
4957 	{
4958 	  tree rhs[2];
4959 	  rhs[0] = rhs1;
4960 	  rhs[1] = gimple_assign_rhs2 (stmt);
4961 	  for (unsigned i = 0; i <= 1; ++i)
4962 	    {
4963 	      unsigned j = i == 0 ? 1 : 0;
4964 	      tree ops[2];
4965 	      gimple_match_op match_op (gimple_match_cond::UNCOND,
4966 					NEGATE_EXPR, type, rhs[i]);
4967 	      ops[i] = vn_nary_build_or_lookup_1 (&match_op, false);
4968 	      ops[j] = rhs[j];
4969 	      if (ops[i]
4970 		  && (ops[0] = vn_nary_op_lookup_pieces (2, code,
4971 							 type, ops, NULL)))
4972 		{
4973 		  gimple_match_op match_op (gimple_match_cond::UNCOND,
4974 					    NEGATE_EXPR, type, ops[0]);
4975 		  result = vn_nary_build_or_lookup (&match_op);
4976 		  if (result)
4977 		    {
4978 		      bool changed = set_ssa_val_to (lhs, result);
4979 		      vn_nary_op_insert_stmt (stmt, result);
4980 		      return changed;
4981 		    }
4982 		}
4983 	    }
4984 	}
4985       break;
4986     default:
4987       break;
4988     }
4989 
4990   bool changed = set_ssa_val_to (lhs, lhs);
4991   vn_nary_op_insert_stmt (stmt, lhs);
4992   return changed;
4993 }
4994 
4995 /* Visit a call STMT storing into LHS.  Return true if the value number
4996    of the LHS has changed as a result.  */
4997 
4998 static bool
visit_reference_op_call(tree lhs,gcall * stmt)4999 visit_reference_op_call (tree lhs, gcall *stmt)
5000 {
5001   bool changed = false;
5002   struct vn_reference_s vr1;
5003   vn_reference_t vnresult = NULL;
5004   tree vdef = gimple_vdef (stmt);
5005 
5006   /* Non-ssa lhs is handled in copy_reference_ops_from_call.  */
5007   if (lhs && TREE_CODE (lhs) != SSA_NAME)
5008     lhs = NULL_TREE;
5009 
5010   vn_reference_lookup_call (stmt, &vnresult, &vr1);
5011   if (vnresult)
5012     {
5013       if (vnresult->result_vdef && vdef)
5014 	changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
5015       else if (vdef)
5016 	/* If the call was discovered to be pure or const reflect
5017 	   that as far as possible.  */
5018 	changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
5019 
5020       if (!vnresult->result && lhs)
5021 	vnresult->result = lhs;
5022 
5023       if (vnresult->result && lhs)
5024 	changed |= set_ssa_val_to (lhs, vnresult->result);
5025     }
5026   else
5027     {
5028       vn_reference_t vr2;
5029       vn_reference_s **slot;
5030       tree vdef_val = vdef;
5031       if (vdef)
5032 	{
5033 	  /* If we value numbered an indirect functions function to
5034 	     one not clobbering memory value number its VDEF to its
5035 	     VUSE.  */
5036 	  tree fn = gimple_call_fn (stmt);
5037 	  if (fn && TREE_CODE (fn) == SSA_NAME)
5038 	    {
5039 	      fn = SSA_VAL (fn);
5040 	      if (TREE_CODE (fn) == ADDR_EXPR
5041 		  && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5042 		  && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
5043 		      & (ECF_CONST | ECF_PURE)))
5044 		vdef_val = vuse_ssa_val (gimple_vuse (stmt));
5045 	    }
5046 	  changed |= set_ssa_val_to (vdef, vdef_val);
5047 	}
5048       if (lhs)
5049 	changed |= set_ssa_val_to (lhs, lhs);
5050       vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s);
5051       vr2->vuse = vr1.vuse;
5052       /* As we are not walking the virtual operand chain we know the
5053 	 shared_lookup_references are still original so we can re-use
5054 	 them here.  */
5055       vr2->operands = vr1.operands.copy ();
5056       vr2->type = vr1.type;
5057       vr2->punned = vr1.punned;
5058       vr2->set = vr1.set;
5059       vr2->base_set = vr1.base_set;
5060       vr2->hashcode = vr1.hashcode;
5061       vr2->result = lhs;
5062       vr2->result_vdef = vdef_val;
5063       vr2->value_id = 0;
5064       slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
5065 							  INSERT);
5066       gcc_assert (!*slot);
5067       *slot = vr2;
5068       vr2->next = last_inserted_ref;
5069       last_inserted_ref = vr2;
5070     }
5071 
5072   return changed;
5073 }
5074 
5075 /* Visit a load from a reference operator RHS, part of STMT, value number it,
5076    and return true if the value number of the LHS has changed as a result.  */
5077 
5078 static bool
visit_reference_op_load(tree lhs,tree op,gimple * stmt)5079 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
5080 {
5081   bool changed = false;
5082   tree last_vuse;
5083   tree result;
5084   vn_reference_t res;
5085 
5086   last_vuse = gimple_vuse (stmt);
5087   result = vn_reference_lookup (op, gimple_vuse (stmt),
5088 				default_vn_walk_kind, &res, true, &last_vuse);
5089 
5090   /* We handle type-punning through unions by value-numbering based
5091      on offset and size of the access.  Be prepared to handle a
5092      type-mismatch here via creating a VIEW_CONVERT_EXPR.  */
5093   if (result
5094       && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
5095     {
5096       /* Avoid the type punning in case the result mode has padding where
5097 	 the op we lookup has not.  */
5098       if (maybe_lt (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (result))),
5099 		    GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (op)))))
5100 	result = NULL_TREE;
5101       else
5102 	{
5103 	  /* We will be setting the value number of lhs to the value number
5104 	     of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
5105 	     So first simplify and lookup this expression to see if it
5106 	     is already available.  */
5107 	  gimple_match_op res_op (gimple_match_cond::UNCOND,
5108 				  VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
5109 	  result = vn_nary_build_or_lookup (&res_op);
5110 	  if (result
5111 	      && TREE_CODE (result) == SSA_NAME
5112 	      && VN_INFO (result)->needs_insertion)
5113 	    /* Track whether this is the canonical expression for different
5114 	       typed loads.  We use that as a stopgap measure for code
5115 	       hoisting when dealing with floating point loads.  */
5116 	    res->punned = true;
5117 	}
5118 
5119       /* When building the conversion fails avoid inserting the reference
5120          again.  */
5121       if (!result)
5122 	return set_ssa_val_to (lhs, lhs);
5123     }
5124 
5125   if (result)
5126     changed = set_ssa_val_to (lhs, result);
5127   else
5128     {
5129       changed = set_ssa_val_to (lhs, lhs);
5130       vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
5131     }
5132 
5133   return changed;
5134 }
5135 
5136 
5137 /* Visit a store to a reference operator LHS, part of STMT, value number it,
5138    and return true if the value number of the LHS has changed as a result.  */
5139 
5140 static bool
visit_reference_op_store(tree lhs,tree op,gimple * stmt)5141 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
5142 {
5143   bool changed = false;
5144   vn_reference_t vnresult = NULL;
5145   tree assign;
5146   bool resultsame = false;
5147   tree vuse = gimple_vuse (stmt);
5148   tree vdef = gimple_vdef (stmt);
5149 
5150   if (TREE_CODE (op) == SSA_NAME)
5151     op = SSA_VAL (op);
5152 
5153   /* First we want to lookup using the *vuses* from the store and see
5154      if there the last store to this location with the same address
5155      had the same value.
5156 
5157      The vuses represent the memory state before the store.  If the
5158      memory state, address, and value of the store is the same as the
5159      last store to this location, then this store will produce the
5160      same memory state as that store.
5161 
5162      In this case the vdef versions for this store are value numbered to those
5163      vuse versions, since they represent the same memory state after
5164      this store.
5165 
5166      Otherwise, the vdefs for the store are used when inserting into
5167      the table, since the store generates a new memory state.  */
5168 
5169   vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
5170   if (vnresult
5171       && vnresult->result)
5172     {
5173       tree result = vnresult->result;
5174       gcc_checking_assert (TREE_CODE (result) != SSA_NAME
5175 			   || result == SSA_VAL (result));
5176       resultsame = expressions_equal_p (result, op);
5177       if (resultsame)
5178 	{
5179 	  /* If the TBAA state isn't compatible for downstream reads
5180 	     we cannot value-number the VDEFs the same.  */
5181 	  ao_ref lhs_ref;
5182 	  ao_ref_init (&lhs_ref, lhs);
5183 	  alias_set_type set = ao_ref_alias_set (&lhs_ref);
5184 	  alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
5185 	  if ((vnresult->set != set
5186 	       && ! alias_set_subset_of (set, vnresult->set))
5187 	      || (vnresult->base_set != base_set
5188 		  && ! alias_set_subset_of (base_set, vnresult->base_set)))
5189 	    resultsame = false;
5190 	}
5191     }
5192 
5193   if (!resultsame)
5194     {
5195       /* Only perform the following when being called from PRE
5196 	 which embeds tail merging.  */
5197       if (default_vn_walk_kind == VN_WALK)
5198 	{
5199 	  assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
5200 	  vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
5201 	  if (vnresult)
5202 	    {
5203 	      VN_INFO (vdef)->visited = true;
5204 	      return set_ssa_val_to (vdef, vnresult->result_vdef);
5205 	    }
5206 	}
5207 
5208       if (dump_file && (dump_flags & TDF_DETAILS))
5209 	{
5210 	  fprintf (dump_file, "No store match\n");
5211 	  fprintf (dump_file, "Value numbering store ");
5212 	  print_generic_expr (dump_file, lhs);
5213 	  fprintf (dump_file, " to ");
5214 	  print_generic_expr (dump_file, op);
5215 	  fprintf (dump_file, "\n");
5216 	}
5217       /* Have to set value numbers before insert, since insert is
5218 	 going to valueize the references in-place.  */
5219       if (vdef)
5220 	changed |= set_ssa_val_to (vdef, vdef);
5221 
5222       /* Do not insert structure copies into the tables.  */
5223       if (is_gimple_min_invariant (op)
5224 	  || is_gimple_reg (op))
5225         vn_reference_insert (lhs, op, vdef, NULL);
5226 
5227       /* Only perform the following when being called from PRE
5228 	 which embeds tail merging.  */
5229       if (default_vn_walk_kind == VN_WALK)
5230 	{
5231 	  assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
5232 	  vn_reference_insert (assign, lhs, vuse, vdef);
5233 	}
5234     }
5235   else
5236     {
5237       /* We had a match, so value number the vdef to have the value
5238 	 number of the vuse it came from.  */
5239 
5240       if (dump_file && (dump_flags & TDF_DETAILS))
5241 	fprintf (dump_file, "Store matched earlier value, "
5242 		 "value numbering store vdefs to matching vuses.\n");
5243 
5244       changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
5245     }
5246 
5247   return changed;
5248 }
5249 
5250 /* Visit and value number PHI, return true if the value number
5251    changed.  When BACKEDGES_VARYING_P is true then assume all
5252    backedge values are varying.  When INSERTED is not NULL then
5253    this is just a ahead query for a possible iteration, set INSERTED
5254    to true if we'd insert into the hashtable.  */
5255 
5256 static bool
visit_phi(gimple * phi,bool * inserted,bool backedges_varying_p)5257 visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p)
5258 {
5259   tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
5260   tree backedge_val = NULL_TREE;
5261   bool seen_non_backedge = false;
5262   tree sameval_base = NULL_TREE;
5263   poly_int64 soff, doff;
5264   unsigned n_executable = 0;
5265   edge_iterator ei;
5266   edge e;
5267 
5268   /* TODO: We could check for this in initialization, and replace this
5269      with a gcc_assert.  */
5270   if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
5271     return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
5272 
5273   /* We track whether a PHI was CSEd to to avoid excessive iterations
5274      that would be necessary only because the PHI changed arguments
5275      but not value.  */
5276   if (!inserted)
5277     gimple_set_plf (phi, GF_PLF_1, false);
5278 
5279   /* See if all non-TOP arguments have the same value.  TOP is
5280      equivalent to everything, so we can ignore it.  */
5281   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
5282     if (e->flags & EDGE_EXECUTABLE)
5283       {
5284 	tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5285 
5286 	if (def == PHI_RESULT (phi))
5287 	  continue;
5288 	++n_executable;
5289 	if (TREE_CODE (def) == SSA_NAME)
5290 	  {
5291 	    if (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))
5292 	      def = SSA_VAL (def);
5293 	    if (e->flags & EDGE_DFS_BACK)
5294 	      backedge_val = def;
5295 	  }
5296 	if (!(e->flags & EDGE_DFS_BACK))
5297 	  seen_non_backedge = true;
5298 	if (def == VN_TOP)
5299 	  ;
5300 	/* Ignore undefined defs for sameval but record one.  */
5301 	else if (TREE_CODE (def) == SSA_NAME
5302 		 && ! virtual_operand_p (def)
5303 		 && ssa_undefined_value_p (def, false))
5304 	  seen_undef = def;
5305 	else if (sameval == VN_TOP)
5306 	  sameval = def;
5307 	else if (!expressions_equal_p (def, sameval))
5308 	  {
5309 	    /* We know we're arriving only with invariant addresses here,
5310 	       try harder comparing them.  We can do some caching here
5311 	       which we cannot do in expressions_equal_p.  */
5312 	    if (TREE_CODE (def) == ADDR_EXPR
5313 		&& TREE_CODE (sameval) == ADDR_EXPR
5314 		&& sameval_base != (void *)-1)
5315 	      {
5316 		if (!sameval_base)
5317 		  sameval_base = get_addr_base_and_unit_offset
5318 				   (TREE_OPERAND (sameval, 0), &soff);
5319 		if (!sameval_base)
5320 		  sameval_base = (tree)(void *)-1;
5321 		else if ((get_addr_base_and_unit_offset
5322 			    (TREE_OPERAND (def, 0), &doff) == sameval_base)
5323 			 && known_eq (soff, doff))
5324 		  continue;
5325 	      }
5326 	    sameval = NULL_TREE;
5327 	    break;
5328 	  }
5329       }
5330 
5331   /* If the value we want to use is flowing over the backedge and we
5332      should take it as VARYING but it has a non-VARYING value drop to
5333      VARYING.
5334      If we value-number a virtual operand never value-number to the
5335      value from the backedge as that confuses the alias-walking code.
5336      See gcc.dg/torture/pr87176.c.  If the value is the same on a
5337      non-backedge everything is OK though.  */
5338   bool visited_p;
5339   if ((backedge_val
5340        && !seen_non_backedge
5341        && TREE_CODE (backedge_val) == SSA_NAME
5342        && sameval == backedge_val
5343        && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val)
5344 	   || SSA_VAL (backedge_val) != backedge_val))
5345       /* Do not value-number a virtual operand to sth not visited though
5346 	 given that allows us to escape a region in alias walking.  */
5347       || (sameval
5348 	  && TREE_CODE (sameval) == SSA_NAME
5349 	  && !SSA_NAME_IS_DEFAULT_DEF (sameval)
5350 	  && SSA_NAME_IS_VIRTUAL_OPERAND (sameval)
5351 	  && (SSA_VAL (sameval, &visited_p), !visited_p)))
5352     /* Note this just drops to VARYING without inserting the PHI into
5353        the hashes.  */
5354     result = PHI_RESULT (phi);
5355   /* If none of the edges was executable keep the value-number at VN_TOP,
5356      if only a single edge is exectuable use its value.  */
5357   else if (n_executable <= 1)
5358     result = seen_undef ? seen_undef : sameval;
5359   /* If we saw only undefined values and VN_TOP use one of the
5360      undefined values.  */
5361   else if (sameval == VN_TOP)
5362     result = seen_undef ? seen_undef : sameval;
5363   /* First see if it is equivalent to a phi node in this block.  We prefer
5364      this as it allows IV elimination - see PRs 66502 and 67167.  */
5365   else if ((result = vn_phi_lookup (phi, backedges_varying_p)))
5366     {
5367       if (!inserted
5368 	  && TREE_CODE (result) == SSA_NAME
5369 	  && gimple_code (SSA_NAME_DEF_STMT (result)) == GIMPLE_PHI)
5370 	{
5371 	  gimple_set_plf (SSA_NAME_DEF_STMT (result), GF_PLF_1, true);
5372 	  if (dump_file && (dump_flags & TDF_DETAILS))
5373 	    {
5374 	      fprintf (dump_file, "Marking CSEd to PHI node ");
5375 	      print_gimple_expr (dump_file, SSA_NAME_DEF_STMT (result),
5376 				 0, TDF_SLIM);
5377 	      fprintf (dump_file, "\n");
5378 	    }
5379 	}
5380     }
5381   /* If all values are the same use that, unless we've seen undefined
5382      values as well and the value isn't constant.
5383      CCP/copyprop have the same restriction to not remove uninit warnings.  */
5384   else if (sameval
5385 	   && (! seen_undef || is_gimple_min_invariant (sameval)))
5386     result = sameval;
5387   else
5388     {
5389       result = PHI_RESULT (phi);
5390       /* Only insert PHIs that are varying, for constant value numbers
5391          we mess up equivalences otherwise as we are only comparing
5392 	 the immediate controlling predicates.  */
5393       vn_phi_insert (phi, result, backedges_varying_p);
5394       if (inserted)
5395 	*inserted = true;
5396     }
5397 
5398   return set_ssa_val_to (PHI_RESULT (phi), result);
5399 }
5400 
5401 /* Try to simplify RHS using equivalences and constant folding.  */
5402 
5403 static tree
try_to_simplify(gassign * stmt)5404 try_to_simplify (gassign *stmt)
5405 {
5406   enum tree_code code = gimple_assign_rhs_code (stmt);
5407   tree tem;
5408 
5409   /* For stores we can end up simplifying a SSA_NAME rhs.  Just return
5410      in this case, there is no point in doing extra work.  */
5411   if (code == SSA_NAME)
5412     return NULL_TREE;
5413 
5414   /* First try constant folding based on our current lattice.  */
5415   mprts_hook = vn_lookup_simplify_result;
5416   tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
5417   mprts_hook = NULL;
5418   if (tem
5419       && (TREE_CODE (tem) == SSA_NAME
5420 	  || is_gimple_min_invariant (tem)))
5421     return tem;
5422 
5423   return NULL_TREE;
5424 }
5425 
5426 /* Visit and value number STMT, return true if the value number
5427    changed.  */
5428 
5429 static bool
5430 visit_stmt (gimple *stmt, bool backedges_varying_p = false)
5431 {
5432   bool changed = false;
5433 
5434   if (dump_file && (dump_flags & TDF_DETAILS))
5435     {
5436       fprintf (dump_file, "Value numbering stmt = ");
5437       print_gimple_stmt (dump_file, stmt, 0);
5438     }
5439 
5440   if (gimple_code (stmt) == GIMPLE_PHI)
5441     changed = visit_phi (stmt, NULL, backedges_varying_p);
5442   else if (gimple_has_volatile_ops (stmt))
5443     changed = defs_to_varying (stmt);
5444   else if (gassign *ass = dyn_cast <gassign *> (stmt))
5445     {
5446       enum tree_code code = gimple_assign_rhs_code (ass);
5447       tree lhs = gimple_assign_lhs (ass);
5448       tree rhs1 = gimple_assign_rhs1 (ass);
5449       tree simplified;
5450 
5451       /* Shortcut for copies. Simplifying copies is pointless,
5452 	 since we copy the expression and value they represent.  */
5453       if (code == SSA_NAME
5454 	  && TREE_CODE (lhs) == SSA_NAME)
5455 	{
5456 	  changed = visit_copy (lhs, rhs1);
5457 	  goto done;
5458 	}
5459       simplified = try_to_simplify (ass);
5460       if (simplified)
5461 	{
5462 	  if (dump_file && (dump_flags & TDF_DETAILS))
5463 	    {
5464 	      fprintf (dump_file, "RHS ");
5465 	      print_gimple_expr (dump_file, ass, 0);
5466 	      fprintf (dump_file, " simplified to ");
5467 	      print_generic_expr (dump_file, simplified);
5468 	      fprintf (dump_file, "\n");
5469 	    }
5470 	}
5471       /* Setting value numbers to constants will occasionally
5472 	 screw up phi congruence because constants are not
5473 	 uniquely associated with a single ssa name that can be
5474 	 looked up.  */
5475       if (simplified
5476 	  && is_gimple_min_invariant (simplified)
5477 	  && TREE_CODE (lhs) == SSA_NAME)
5478 	{
5479 	  changed = set_ssa_val_to (lhs, simplified);
5480 	  goto done;
5481 	}
5482       else if (simplified
5483 	       && TREE_CODE (simplified) == SSA_NAME
5484 	       && TREE_CODE (lhs) == SSA_NAME)
5485 	{
5486 	  changed = visit_copy (lhs, simplified);
5487 	  goto done;
5488 	}
5489 
5490       if ((TREE_CODE (lhs) == SSA_NAME
5491 	   /* We can substitute SSA_NAMEs that are live over
5492 	      abnormal edges with their constant value.  */
5493 	   && !(gimple_assign_copy_p (ass)
5494 		&& is_gimple_min_invariant (rhs1))
5495 	   && !(simplified
5496 		&& is_gimple_min_invariant (simplified))
5497 	   && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
5498 	  /* Stores or copies from SSA_NAMEs that are live over
5499 	     abnormal edges are a problem.  */
5500 	  || (code == SSA_NAME
5501 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
5502 	changed = defs_to_varying (ass);
5503       else if (REFERENCE_CLASS_P (lhs)
5504 	       || DECL_P (lhs))
5505 	changed = visit_reference_op_store (lhs, rhs1, ass);
5506       else if (TREE_CODE (lhs) == SSA_NAME)
5507 	{
5508 	  if ((gimple_assign_copy_p (ass)
5509 	       && is_gimple_min_invariant (rhs1))
5510 	      || (simplified
5511 		  && is_gimple_min_invariant (simplified)))
5512 	    {
5513 	      if (simplified)
5514 		changed = set_ssa_val_to (lhs, simplified);
5515 	      else
5516 		changed = set_ssa_val_to (lhs, rhs1);
5517 	    }
5518 	  else
5519 	    {
5520 	      /* Visit the original statement.  */
5521 	      switch (vn_get_stmt_kind (ass))
5522 		{
5523 		case VN_NARY:
5524 		  changed = visit_nary_op (lhs, ass);
5525 		  break;
5526 		case VN_REFERENCE:
5527 		  changed = visit_reference_op_load (lhs, rhs1, ass);
5528 		  break;
5529 		default:
5530 		  changed = defs_to_varying (ass);
5531 		  break;
5532 		}
5533 	    }
5534 	}
5535       else
5536 	changed = defs_to_varying (ass);
5537     }
5538   else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5539     {
5540       tree lhs = gimple_call_lhs (call_stmt);
5541       if (lhs && TREE_CODE (lhs) == SSA_NAME)
5542 	{
5543 	  /* Try constant folding based on our current lattice.  */
5544 	  tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
5545 							    vn_valueize);
5546 	  if (simplified)
5547 	    {
5548 	      if (dump_file && (dump_flags & TDF_DETAILS))
5549 		{
5550 		  fprintf (dump_file, "call ");
5551 		  print_gimple_expr (dump_file, call_stmt, 0);
5552 		  fprintf (dump_file, " simplified to ");
5553 		  print_generic_expr (dump_file, simplified);
5554 		  fprintf (dump_file, "\n");
5555 		}
5556 	    }
5557 	  /* Setting value numbers to constants will occasionally
5558 	     screw up phi congruence because constants are not
5559 	     uniquely associated with a single ssa name that can be
5560 	     looked up.  */
5561 	  if (simplified
5562 	      && is_gimple_min_invariant (simplified))
5563 	    {
5564 	      changed = set_ssa_val_to (lhs, simplified);
5565 	      if (gimple_vdef (call_stmt))
5566 		changed |= set_ssa_val_to (gimple_vdef (call_stmt),
5567 					   SSA_VAL (gimple_vuse (call_stmt)));
5568 	      goto done;
5569 	    }
5570 	  else if (simplified
5571 		   && TREE_CODE (simplified) == SSA_NAME)
5572 	    {
5573 	      changed = visit_copy (lhs, simplified);
5574 	      if (gimple_vdef (call_stmt))
5575 		changed |= set_ssa_val_to (gimple_vdef (call_stmt),
5576 					   SSA_VAL (gimple_vuse (call_stmt)));
5577 	      goto done;
5578 	    }
5579 	  else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
5580 	    {
5581 	      changed = defs_to_varying (call_stmt);
5582 	      goto done;
5583 	    }
5584 	}
5585 
5586       /* Pick up flags from a devirtualization target.  */
5587       tree fn = gimple_call_fn (stmt);
5588       int extra_fnflags = 0;
5589       if (fn && TREE_CODE (fn) == SSA_NAME)
5590 	{
5591 	  fn = SSA_VAL (fn);
5592 	  if (TREE_CODE (fn) == ADDR_EXPR
5593 	      && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
5594 	    extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
5595 	}
5596       if (!gimple_call_internal_p (call_stmt)
5597 	  && (/* Calls to the same function with the same vuse
5598 		 and the same operands do not necessarily return the same
5599 		 value, unless they're pure or const.  */
5600 	      ((gimple_call_flags (call_stmt) | extra_fnflags)
5601 	       & (ECF_PURE | ECF_CONST))
5602 	      /* If calls have a vdef, subsequent calls won't have
5603 		 the same incoming vuse.  So, if 2 calls with vdef have the
5604 		 same vuse, we know they're not subsequent.
5605 		 We can value number 2 calls to the same function with the
5606 		 same vuse and the same operands which are not subsequent
5607 		 the same, because there is no code in the program that can
5608 		 compare the 2 values...  */
5609 	      || (gimple_vdef (call_stmt)
5610 		  /* ... unless the call returns a pointer which does
5611 		     not alias with anything else.  In which case the
5612 		     information that the values are distinct are encoded
5613 		     in the IL.  */
5614 		  && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
5615 		  /* Only perform the following when being called from PRE
5616 		     which embeds tail merging.  */
5617 		  && default_vn_walk_kind == VN_WALK)))
5618 	changed = visit_reference_op_call (lhs, call_stmt);
5619       else
5620 	changed = defs_to_varying (call_stmt);
5621     }
5622   else
5623     changed = defs_to_varying (stmt);
5624  done:
5625   return changed;
5626 }
5627 
5628 
5629 /* Allocate a value number table.  */
5630 
5631 static void
allocate_vn_table(vn_tables_t table,unsigned size)5632 allocate_vn_table (vn_tables_t table, unsigned size)
5633 {
5634   table->phis = new vn_phi_table_type (size);
5635   table->nary = new vn_nary_op_table_type (size);
5636   table->references = new vn_reference_table_type (size);
5637 }
5638 
5639 /* Free a value number table.  */
5640 
5641 static void
free_vn_table(vn_tables_t table)5642 free_vn_table (vn_tables_t table)
5643 {
5644   /* Walk over elements and release vectors.  */
5645   vn_reference_iterator_type hir;
5646   vn_reference_t vr;
5647   FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)
5648     vr->operands.release ();
5649   delete table->phis;
5650   table->phis = NULL;
5651   delete table->nary;
5652   table->nary = NULL;
5653   delete table->references;
5654   table->references = NULL;
5655 }
5656 
5657 /* Set *ID according to RESULT.  */
5658 
5659 static void
set_value_id_for_result(tree result,unsigned int * id)5660 set_value_id_for_result (tree result, unsigned int *id)
5661 {
5662   if (result && TREE_CODE (result) == SSA_NAME)
5663     *id = VN_INFO (result)->value_id;
5664   else if (result && is_gimple_min_invariant (result))
5665     *id = get_or_alloc_constant_value_id (result);
5666   else
5667     *id = get_next_value_id ();
5668 }
5669 
5670 /* Set the value ids in the valid hash tables.  */
5671 
5672 static void
set_hashtable_value_ids(void)5673 set_hashtable_value_ids (void)
5674 {
5675   vn_nary_op_iterator_type hin;
5676   vn_phi_iterator_type hip;
5677   vn_reference_iterator_type hir;
5678   vn_nary_op_t vno;
5679   vn_reference_t vr;
5680   vn_phi_t vp;
5681 
5682   /* Now set the value ids of the things we had put in the hash
5683      table.  */
5684 
5685   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
5686     if (! vno->predicated_values)
5687       set_value_id_for_result (vno->u.result, &vno->value_id);
5688 
5689   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
5690     set_value_id_for_result (vp->result, &vp->value_id);
5691 
5692   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
5693 			       hir)
5694     set_value_id_for_result (vr->result, &vr->value_id);
5695 }
5696 
5697 /* Return the maximum value id we have ever seen.  */
5698 
5699 unsigned int
get_max_value_id(void)5700 get_max_value_id (void)
5701 {
5702   return next_value_id;
5703 }
5704 
5705 /* Return the maximum constant value id we have ever seen.  */
5706 
5707 unsigned int
get_max_constant_value_id(void)5708 get_max_constant_value_id (void)
5709 {
5710   return -next_constant_value_id;
5711 }
5712 
5713 /* Return the next unique value id.  */
5714 
5715 unsigned int
get_next_value_id(void)5716 get_next_value_id (void)
5717 {
5718   gcc_checking_assert ((int)next_value_id > 0);
5719   return next_value_id++;
5720 }
5721 
5722 /* Return the next unique value id for constants.  */
5723 
5724 unsigned int
get_next_constant_value_id(void)5725 get_next_constant_value_id (void)
5726 {
5727   gcc_checking_assert (next_constant_value_id < 0);
5728   return next_constant_value_id--;
5729 }
5730 
5731 
5732 /* Compare two expressions E1 and E2 and return true if they are equal.  */
5733 
5734 bool
expressions_equal_p(tree e1,tree e2)5735 expressions_equal_p (tree e1, tree e2)
5736 {
5737   /* The obvious case.  */
5738   if (e1 == e2)
5739     return true;
5740 
5741   /* If either one is VN_TOP consider them equal.  */
5742   if (e1 == VN_TOP || e2 == VN_TOP)
5743     return true;
5744 
5745   /* SSA_NAME compare pointer equal.  */
5746   if (TREE_CODE (e1) == SSA_NAME || TREE_CODE (e2) == SSA_NAME)
5747     return false;
5748 
5749   /* Now perform the actual comparison.  */
5750   if (TREE_CODE (e1) == TREE_CODE (e2)
5751       && operand_equal_p (e1, e2, OEP_PURE_SAME))
5752     return true;
5753 
5754   return false;
5755 }
5756 
5757 
5758 /* Return true if the nary operation NARY may trap.  This is a copy
5759    of stmt_could_throw_1_p adjusted to the SCCVN IL.  */
5760 
5761 bool
vn_nary_may_trap(vn_nary_op_t nary)5762 vn_nary_may_trap (vn_nary_op_t nary)
5763 {
5764   tree type;
5765   tree rhs2 = NULL_TREE;
5766   bool honor_nans = false;
5767   bool honor_snans = false;
5768   bool fp_operation = false;
5769   bool honor_trapv = false;
5770   bool handled, ret;
5771   unsigned i;
5772 
5773   if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5774       || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5775       || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5776     {
5777       type = nary->type;
5778       fp_operation = FLOAT_TYPE_P (type);
5779       if (fp_operation)
5780 	{
5781 	  honor_nans = flag_trapping_math && !flag_finite_math_only;
5782 	  honor_snans = flag_signaling_nans != 0;
5783 	}
5784       else if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_TRAPS (type))
5785 	honor_trapv = true;
5786     }
5787   if (nary->length >= 2)
5788     rhs2 = nary->op[1];
5789   ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5790 				       honor_trapv, honor_nans, honor_snans,
5791 				       rhs2, &handled);
5792   if (handled && ret)
5793     return true;
5794 
5795   for (i = 0; i < nary->length; ++i)
5796     if (tree_could_trap_p (nary->op[i]))
5797       return true;
5798 
5799   return false;
5800 }
5801 
5802 /* Return true if the reference operation REF may trap.  */
5803 
5804 bool
vn_reference_may_trap(vn_reference_t ref)5805 vn_reference_may_trap (vn_reference_t ref)
5806 {
5807   switch (ref->operands[0].opcode)
5808     {
5809     case MODIFY_EXPR:
5810     case CALL_EXPR:
5811       /* We do not handle calls.  */
5812     case ADDR_EXPR:
5813       /* And toplevel address computations never trap.  */
5814       return false;
5815     default:;
5816     }
5817 
5818   vn_reference_op_t op;
5819   unsigned i;
5820   FOR_EACH_VEC_ELT (ref->operands, i, op)
5821     {
5822       switch (op->opcode)
5823 	{
5824 	case WITH_SIZE_EXPR:
5825 	case TARGET_MEM_REF:
5826 	  /* Always variable.  */
5827 	  return true;
5828 	case COMPONENT_REF:
5829 	  if (op->op1 && TREE_CODE (op->op1) == SSA_NAME)
5830 	    return true;
5831 	  break;
5832 	case ARRAY_RANGE_REF:
5833 	case ARRAY_REF:
5834 	  if (TREE_CODE (op->op0) == SSA_NAME)
5835 	    return true;
5836 	  break;
5837 	case MEM_REF:
5838 	  /* Nothing interesting in itself, the base is separate.  */
5839 	  break;
5840 	/* The following are the address bases.  */
5841 	case SSA_NAME:
5842 	  return true;
5843 	case ADDR_EXPR:
5844 	  if (op->op0)
5845 	    return tree_could_trap_p (TREE_OPERAND (op->op0, 0));
5846 	  return false;
5847 	default:;
5848 	}
5849     }
5850   return false;
5851 }
5852 
eliminate_dom_walker(cdi_direction direction,bitmap inserted_exprs_)5853 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5854 					    bitmap inserted_exprs_)
5855   : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5856     el_todo (0), eliminations (0), insertions (0),
5857     inserted_exprs (inserted_exprs_)
5858 {
5859   need_eh_cleanup = BITMAP_ALLOC (NULL);
5860   need_ab_cleanup = BITMAP_ALLOC (NULL);
5861 }
5862 
~eliminate_dom_walker()5863 eliminate_dom_walker::~eliminate_dom_walker ()
5864 {
5865   BITMAP_FREE (need_eh_cleanup);
5866   BITMAP_FREE (need_ab_cleanup);
5867 }
5868 
5869 /* Return a leader for OP that is available at the current point of the
5870    eliminate domwalk.  */
5871 
5872 tree
eliminate_avail(basic_block,tree op)5873 eliminate_dom_walker::eliminate_avail (basic_block, tree op)
5874 {
5875   tree valnum = VN_INFO (op)->valnum;
5876   if (TREE_CODE (valnum) == SSA_NAME)
5877     {
5878       if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5879 	return valnum;
5880       if (avail.length () > SSA_NAME_VERSION (valnum))
5881 	return avail[SSA_NAME_VERSION (valnum)];
5882     }
5883   else if (is_gimple_min_invariant (valnum))
5884     return valnum;
5885   return NULL_TREE;
5886 }
5887 
5888 /* At the current point of the eliminate domwalk make OP available.  */
5889 
5890 void
eliminate_push_avail(basic_block,tree op)5891 eliminate_dom_walker::eliminate_push_avail (basic_block, tree op)
5892 {
5893   tree valnum = VN_INFO (op)->valnum;
5894   if (TREE_CODE (valnum) == SSA_NAME)
5895     {
5896       if (avail.length () <= SSA_NAME_VERSION (valnum))
5897 	avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1, true);
5898       tree pushop = op;
5899       if (avail[SSA_NAME_VERSION (valnum)])
5900 	pushop = avail[SSA_NAME_VERSION (valnum)];
5901       avail_stack.safe_push (pushop);
5902       avail[SSA_NAME_VERSION (valnum)] = op;
5903     }
5904 }
5905 
5906 /* Insert the expression recorded by SCCVN for VAL at *GSI.  Returns
5907    the leader for the expression if insertion was successful.  */
5908 
5909 tree
eliminate_insert(basic_block bb,gimple_stmt_iterator * gsi,tree val)5910 eliminate_dom_walker::eliminate_insert (basic_block bb,
5911 					gimple_stmt_iterator *gsi, tree val)
5912 {
5913   /* We can insert a sequence with a single assignment only.  */
5914   gimple_seq stmts = VN_INFO (val)->expr;
5915   if (!gimple_seq_singleton_p (stmts))
5916     return NULL_TREE;
5917   gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5918   if (!stmt
5919       || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5920 	  && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5921 	  && gimple_assign_rhs_code (stmt) != NEGATE_EXPR
5922 	  && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5923 	  && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5924 	      || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5925     return NULL_TREE;
5926 
5927   tree op = gimple_assign_rhs1 (stmt);
5928   if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5929       || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5930     op = TREE_OPERAND (op, 0);
5931   tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (bb, op) : op;
5932   if (!leader)
5933     return NULL_TREE;
5934 
5935   tree res;
5936   stmts = NULL;
5937   if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5938     res = gimple_build (&stmts, BIT_FIELD_REF,
5939 			TREE_TYPE (val), leader,
5940 			TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5941 			TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5942   else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5943     res = gimple_build (&stmts, BIT_AND_EXPR,
5944 			TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5945   else
5946     res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5947 			TREE_TYPE (val), leader);
5948   if (TREE_CODE (res) != SSA_NAME
5949       || SSA_NAME_IS_DEFAULT_DEF (res)
5950       || gimple_bb (SSA_NAME_DEF_STMT (res)))
5951     {
5952       gimple_seq_discard (stmts);
5953 
5954       /* During propagation we have to treat SSA info conservatively
5955          and thus we can end up simplifying the inserted expression
5956 	 at elimination time to sth not defined in stmts.  */
5957       /* But then this is a redundancy we failed to detect.  Which means
5958          res now has two values.  That doesn't play well with how
5959 	 we track availability here, so give up.  */
5960       if (dump_file && (dump_flags & TDF_DETAILS))
5961 	{
5962 	  if (TREE_CODE (res) == SSA_NAME)
5963 	    res = eliminate_avail (bb, res);
5964 	  if (res)
5965 	    {
5966 	      fprintf (dump_file, "Failed to insert expression for value ");
5967 	      print_generic_expr (dump_file, val);
5968 	      fprintf (dump_file, " which is really fully redundant to ");
5969 	      print_generic_expr (dump_file, res);
5970 	      fprintf (dump_file, "\n");
5971 	    }
5972 	}
5973 
5974       return NULL_TREE;
5975     }
5976   else
5977     {
5978       gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
5979       vn_ssa_aux_t vn_info = VN_INFO (res);
5980       vn_info->valnum = val;
5981       vn_info->visited = true;
5982     }
5983 
5984   insertions++;
5985   if (dump_file && (dump_flags & TDF_DETAILS))
5986     {
5987       fprintf (dump_file, "Inserted ");
5988       print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
5989     }
5990 
5991   return res;
5992 }
5993 
5994 void
eliminate_stmt(basic_block b,gimple_stmt_iterator * gsi)5995 eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
5996 {
5997   tree sprime = NULL_TREE;
5998   gimple *stmt = gsi_stmt (*gsi);
5999   tree lhs = gimple_get_lhs (stmt);
6000   if (lhs && TREE_CODE (lhs) == SSA_NAME
6001       && !gimple_has_volatile_ops (stmt)
6002       /* See PR43491.  Do not replace a global register variable when
6003 	 it is a the RHS of an assignment.  Do replace local register
6004 	 variables since gcc does not guarantee a local variable will
6005 	 be allocated in register.
6006 	 ???  The fix isn't effective here.  This should instead
6007 	 be ensured by not value-numbering them the same but treating
6008 	 them like volatiles?  */
6009       && !(gimple_assign_single_p (stmt)
6010 	   && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
6011 	       && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
6012 	       && is_global_var (gimple_assign_rhs1 (stmt)))))
6013     {
6014       sprime = eliminate_avail (b, lhs);
6015       if (!sprime)
6016 	{
6017 	  /* If there is no existing usable leader but SCCVN thinks
6018 	     it has an expression it wants to use as replacement,
6019 	     insert that.  */
6020 	  tree val = VN_INFO (lhs)->valnum;
6021 	  vn_ssa_aux_t vn_info;
6022 	  if (val != VN_TOP
6023 	      && TREE_CODE (val) == SSA_NAME
6024 	      && (vn_info = VN_INFO (val), true)
6025 	      && vn_info->needs_insertion
6026 	      && vn_info->expr != NULL
6027 	      && (sprime = eliminate_insert (b, gsi, val)) != NULL_TREE)
6028 	    eliminate_push_avail (b, sprime);
6029 	}
6030 
6031       /* If this now constitutes a copy duplicate points-to
6032 	 and range info appropriately.  This is especially
6033 	 important for inserted code.  See tree-ssa-copy.c
6034 	 for similar code.  */
6035       if (sprime
6036 	  && TREE_CODE (sprime) == SSA_NAME)
6037 	{
6038 	  basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
6039 	  if (POINTER_TYPE_P (TREE_TYPE (lhs))
6040 	      && SSA_NAME_PTR_INFO (lhs)
6041 	      && ! SSA_NAME_PTR_INFO (sprime))
6042 	    {
6043 	      duplicate_ssa_name_ptr_info (sprime,
6044 					   SSA_NAME_PTR_INFO (lhs));
6045 	      if (b != sprime_b)
6046 		reset_flow_sensitive_info (sprime);
6047 	    }
6048 	  else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6049 		   && SSA_NAME_RANGE_INFO (lhs)
6050 		   && ! SSA_NAME_RANGE_INFO (sprime)
6051 		   && b == sprime_b)
6052 	    duplicate_ssa_name_range_info (sprime,
6053 					   SSA_NAME_RANGE_TYPE (lhs),
6054 					   SSA_NAME_RANGE_INFO (lhs));
6055 	}
6056 
6057       /* Inhibit the use of an inserted PHI on a loop header when
6058 	 the address of the memory reference is a simple induction
6059 	 variable.  In other cases the vectorizer won't do anything
6060 	 anyway (either it's loop invariant or a complicated
6061 	 expression).  */
6062       if (sprime
6063 	  && TREE_CODE (sprime) == SSA_NAME
6064 	  && do_pre
6065 	  && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
6066 	  && loop_outer (b->loop_father)
6067 	  && has_zero_uses (sprime)
6068 	  && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
6069 	  && gimple_assign_load_p (stmt))
6070 	{
6071 	  gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
6072 	  basic_block def_bb = gimple_bb (def_stmt);
6073 	  if (gimple_code (def_stmt) == GIMPLE_PHI
6074 	      && def_bb->loop_father->header == def_bb)
6075 	    {
6076 	      loop_p loop = def_bb->loop_father;
6077 	      ssa_op_iter iter;
6078 	      tree op;
6079 	      bool found = false;
6080 	      FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
6081 		{
6082 		  affine_iv iv;
6083 		  def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
6084 		  if (def_bb
6085 		      && flow_bb_inside_loop_p (loop, def_bb)
6086 		      && simple_iv (loop, loop, op, &iv, true))
6087 		    {
6088 		      found = true;
6089 		      break;
6090 		    }
6091 		}
6092 	      if (found)
6093 		{
6094 		  if (dump_file && (dump_flags & TDF_DETAILS))
6095 		    {
6096 		      fprintf (dump_file, "Not replacing ");
6097 		      print_gimple_expr (dump_file, stmt, 0);
6098 		      fprintf (dump_file, " with ");
6099 		      print_generic_expr (dump_file, sprime);
6100 		      fprintf (dump_file, " which would add a loop"
6101 			       " carried dependence to loop %d\n",
6102 			       loop->num);
6103 		    }
6104 		  /* Don't keep sprime available.  */
6105 		  sprime = NULL_TREE;
6106 		}
6107 	    }
6108 	}
6109 
6110       if (sprime)
6111 	{
6112 	  /* If we can propagate the value computed for LHS into
6113 	     all uses don't bother doing anything with this stmt.  */
6114 	  if (may_propagate_copy (lhs, sprime))
6115 	    {
6116 	      /* Mark it for removal.  */
6117 	      to_remove.safe_push (stmt);
6118 
6119 	      /* ???  Don't count copy/constant propagations.  */
6120 	      if (gimple_assign_single_p (stmt)
6121 		  && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
6122 		      || gimple_assign_rhs1 (stmt) == sprime))
6123 		return;
6124 
6125 	      if (dump_file && (dump_flags & TDF_DETAILS))
6126 		{
6127 		  fprintf (dump_file, "Replaced ");
6128 		  print_gimple_expr (dump_file, stmt, 0);
6129 		  fprintf (dump_file, " with ");
6130 		  print_generic_expr (dump_file, sprime);
6131 		  fprintf (dump_file, " in all uses of ");
6132 		  print_gimple_stmt (dump_file, stmt, 0);
6133 		}
6134 
6135 	      eliminations++;
6136 	      return;
6137 	    }
6138 
6139 	  /* If this is an assignment from our leader (which
6140 	     happens in the case the value-number is a constant)
6141 	     then there is nothing to do.  Likewise if we run into
6142 	     inserted code that needed a conversion because of
6143 	     our type-agnostic value-numbering of loads.  */
6144 	  if ((gimple_assign_single_p (stmt)
6145 	       || (is_gimple_assign (stmt)
6146 		   && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
6147 		       || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)))
6148 	      && sprime == gimple_assign_rhs1 (stmt))
6149 	    return;
6150 
6151 	  /* Else replace its RHS.  */
6152 	  if (dump_file && (dump_flags & TDF_DETAILS))
6153 	    {
6154 	      fprintf (dump_file, "Replaced ");
6155 	      print_gimple_expr (dump_file, stmt, 0);
6156 	      fprintf (dump_file, " with ");
6157 	      print_generic_expr (dump_file, sprime);
6158 	      fprintf (dump_file, " in ");
6159 	      print_gimple_stmt (dump_file, stmt, 0);
6160 	    }
6161 	  eliminations++;
6162 
6163 	  bool can_make_abnormal_goto = (is_gimple_call (stmt)
6164 					 && stmt_can_make_abnormal_goto (stmt));
6165 	  gimple *orig_stmt = stmt;
6166 	  if (!useless_type_conversion_p (TREE_TYPE (lhs),
6167 					  TREE_TYPE (sprime)))
6168 	    {
6169 	      /* We preserve conversions to but not from function or method
6170 		 types.  This asymmetry makes it necessary to re-instantiate
6171 		 conversions here.  */
6172 	      if (POINTER_TYPE_P (TREE_TYPE (lhs))
6173 		  && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs))))
6174 		sprime = fold_convert (TREE_TYPE (lhs), sprime);
6175 	      else
6176 		gcc_unreachable ();
6177 	    }
6178 	  tree vdef = gimple_vdef (stmt);
6179 	  tree vuse = gimple_vuse (stmt);
6180 	  propagate_tree_value_into_stmt (gsi, sprime);
6181 	  stmt = gsi_stmt (*gsi);
6182 	  update_stmt (stmt);
6183 	  /* In case the VDEF on the original stmt was released, value-number
6184 	     it to the VUSE.  This is to make vuse_ssa_val able to skip
6185 	     released virtual operands.  */
6186 	  if (vdef != gimple_vdef (stmt))
6187 	    {
6188 	      gcc_assert (SSA_NAME_IN_FREE_LIST (vdef));
6189 	      VN_INFO (vdef)->valnum = vuse;
6190 	    }
6191 
6192 	  /* If we removed EH side-effects from the statement, clean
6193 	     its EH information.  */
6194 	  if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
6195 	    {
6196 	      bitmap_set_bit (need_eh_cleanup,
6197 			      gimple_bb (stmt)->index);
6198 	      if (dump_file && (dump_flags & TDF_DETAILS))
6199 		fprintf (dump_file, "  Removed EH side-effects.\n");
6200 	    }
6201 
6202 	  /* Likewise for AB side-effects.  */
6203 	  if (can_make_abnormal_goto
6204 	      && !stmt_can_make_abnormal_goto (stmt))
6205 	    {
6206 	      bitmap_set_bit (need_ab_cleanup,
6207 			      gimple_bb (stmt)->index);
6208 	      if (dump_file && (dump_flags & TDF_DETAILS))
6209 		fprintf (dump_file, "  Removed AB side-effects.\n");
6210 	    }
6211 
6212 	  return;
6213 	}
6214     }
6215 
6216   /* If the statement is a scalar store, see if the expression
6217      has the same value number as its rhs.  If so, the store is
6218      dead.  */
6219   if (gimple_assign_single_p (stmt)
6220       && !gimple_has_volatile_ops (stmt)
6221       && !is_gimple_reg (gimple_assign_lhs (stmt))
6222       && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
6223 	  || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
6224     {
6225       tree rhs = gimple_assign_rhs1 (stmt);
6226       vn_reference_t vnresult;
6227       /* ???  gcc.dg/torture/pr91445.c shows that we lookup a boolean
6228          typed load of a byte known to be 0x11 as 1 so a store of
6229 	 a boolean 1 is detected as redundant.  Because of this we
6230 	 have to make sure to lookup with a ref where its size
6231 	 matches the precision.  */
6232       tree lookup_lhs = lhs;
6233       if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6234 	  && (TREE_CODE (lhs) != COMPONENT_REF
6235 	      || !DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
6236 	  && !type_has_mode_precision_p (TREE_TYPE (lhs)))
6237 	{
6238 	  if (TREE_CODE (lhs) == COMPONENT_REF
6239 	      || TREE_CODE (lhs) == MEM_REF)
6240 	    {
6241 	      tree ltype = build_nonstandard_integer_type
6242 				(TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (lhs))),
6243 				 TYPE_UNSIGNED (TREE_TYPE (lhs)));
6244 	      if (TREE_CODE (lhs) == COMPONENT_REF)
6245 		{
6246 		  tree foff = component_ref_field_offset (lhs);
6247 		  tree f = TREE_OPERAND (lhs, 1);
6248 		  if (!poly_int_tree_p (foff))
6249 		    lookup_lhs = NULL_TREE;
6250 		  else
6251 		    lookup_lhs = build3 (BIT_FIELD_REF, ltype,
6252 					 TREE_OPERAND (lhs, 0),
6253 					 TYPE_SIZE (TREE_TYPE (lhs)),
6254 					 bit_from_pos
6255 					   (foff, DECL_FIELD_BIT_OFFSET (f)));
6256 		}
6257 	      else
6258 		lookup_lhs = build2 (MEM_REF, ltype,
6259 				     TREE_OPERAND (lhs, 0),
6260 				     TREE_OPERAND (lhs, 1));
6261 	    }
6262 	  else
6263 	    lookup_lhs = NULL_TREE;
6264 	}
6265       tree val = NULL_TREE;
6266       if (lookup_lhs)
6267 	val = vn_reference_lookup (lookup_lhs, gimple_vuse (stmt),
6268 				   VN_WALKREWRITE, &vnresult, false);
6269       if (TREE_CODE (rhs) == SSA_NAME)
6270 	rhs = VN_INFO (rhs)->valnum;
6271       if (val
6272 	  && (operand_equal_p (val, rhs, 0)
6273 	      /* Due to the bitfield lookups above we can get bit
6274 		 interpretations of the same RHS as values here.  Those
6275 		 are redundant as well.  */
6276 	      || (TREE_CODE (val) == SSA_NAME
6277 		  && gimple_assign_single_p (SSA_NAME_DEF_STMT (val))
6278 		  && (val = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (val)))
6279 		  && TREE_CODE (val) == VIEW_CONVERT_EXPR
6280 		  && TREE_OPERAND (val, 0) == rhs)))
6281 	{
6282 	  /* We can only remove the later store if the former aliases
6283 	     at least all accesses the later one does or if the store
6284 	     was to readonly memory storing the same value.  */
6285 	  ao_ref lhs_ref;
6286 	  ao_ref_init (&lhs_ref, lhs);
6287 	  alias_set_type set = ao_ref_alias_set (&lhs_ref);
6288 	  alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
6289 	  if (! vnresult
6290 	      || ((vnresult->set == set
6291 		   || alias_set_subset_of (set, vnresult->set))
6292 		  && (vnresult->base_set == base_set
6293 		      || alias_set_subset_of (base_set, vnresult->base_set))))
6294 	    {
6295 	      if (dump_file && (dump_flags & TDF_DETAILS))
6296 		{
6297 		  fprintf (dump_file, "Deleted redundant store ");
6298 		  print_gimple_stmt (dump_file, stmt, 0);
6299 		}
6300 
6301 	      /* Queue stmt for removal.  */
6302 	      to_remove.safe_push (stmt);
6303 	      return;
6304 	    }
6305 	}
6306     }
6307 
6308   /* If this is a control statement value numbering left edges
6309      unexecuted on force the condition in a way consistent with
6310      that.  */
6311   if (gcond *cond = dyn_cast <gcond *> (stmt))
6312     {
6313       if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
6314 	  ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
6315 	{
6316 	  if (dump_file && (dump_flags & TDF_DETAILS))
6317 	    {
6318 	      fprintf (dump_file, "Removing unexecutable edge from ");
6319 	      print_gimple_stmt (dump_file, stmt, 0);
6320 	    }
6321 	  if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
6322 	      == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
6323 	    gimple_cond_make_true (cond);
6324 	  else
6325 	    gimple_cond_make_false (cond);
6326 	  update_stmt (cond);
6327 	  el_todo |= TODO_cleanup_cfg;
6328 	  return;
6329 	}
6330     }
6331 
6332   bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
6333   bool was_noreturn = (is_gimple_call (stmt)
6334 		       && gimple_call_noreturn_p (stmt));
6335   tree vdef = gimple_vdef (stmt);
6336   tree vuse = gimple_vuse (stmt);
6337 
6338   /* If we didn't replace the whole stmt (or propagate the result
6339      into all uses), replace all uses on this stmt with their
6340      leaders.  */
6341   bool modified = false;
6342   use_operand_p use_p;
6343   ssa_op_iter iter;
6344   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
6345     {
6346       tree use = USE_FROM_PTR (use_p);
6347       /* ???  The call code above leaves stmt operands un-updated.  */
6348       if (TREE_CODE (use) != SSA_NAME)
6349 	continue;
6350       tree sprime;
6351       if (SSA_NAME_IS_DEFAULT_DEF (use))
6352 	/* ???  For default defs BB shouldn't matter, but we have to
6353 	   solve the inconsistency between rpo eliminate and
6354 	   dom eliminate avail valueization first.  */
6355 	sprime = eliminate_avail (b, use);
6356       else
6357 	/* Look for sth available at the definition block of the argument.
6358 	   This avoids inconsistencies between availability there which
6359 	   decides if the stmt can be removed and availability at the
6360 	   use site.  The SSA property ensures that things available
6361 	   at the definition are also available at uses.  */
6362 	sprime = eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use)), use);
6363       if (sprime && sprime != use
6364 	  && may_propagate_copy (use, sprime)
6365 	  /* We substitute into debug stmts to avoid excessive
6366 	     debug temporaries created by removed stmts, but we need
6367 	     to avoid doing so for inserted sprimes as we never want
6368 	     to create debug temporaries for them.  */
6369 	  && (!inserted_exprs
6370 	      || TREE_CODE (sprime) != SSA_NAME
6371 	      || !is_gimple_debug (stmt)
6372 	      || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
6373 	{
6374 	  propagate_value (use_p, sprime);
6375 	  modified = true;
6376 	}
6377     }
6378 
6379   /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
6380      into which is a requirement for the IPA devirt machinery.  */
6381   gimple *old_stmt = stmt;
6382   if (modified)
6383     {
6384       /* If a formerly non-invariant ADDR_EXPR is turned into an
6385 	 invariant one it was on a separate stmt.  */
6386       if (gimple_assign_single_p (stmt)
6387 	  && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
6388 	recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
6389       gimple_stmt_iterator prev = *gsi;
6390       gsi_prev (&prev);
6391       if (fold_stmt (gsi))
6392 	{
6393 	  /* fold_stmt may have created new stmts inbetween
6394 	     the previous stmt and the folded stmt.  Mark
6395 	     all defs created there as varying to not confuse
6396 	     the SCCVN machinery as we're using that even during
6397 	     elimination.  */
6398 	  if (gsi_end_p (prev))
6399 	    prev = gsi_start_bb (b);
6400 	  else
6401 	    gsi_next (&prev);
6402 	  if (gsi_stmt (prev) != gsi_stmt (*gsi))
6403 	    do
6404 	      {
6405 		tree def;
6406 		ssa_op_iter dit;
6407 		FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
6408 					   dit, SSA_OP_ALL_DEFS)
6409 		    /* As existing DEFs may move between stmts
6410 		       only process new ones.  */
6411 		    if (! has_VN_INFO (def))
6412 		      {
6413 			vn_ssa_aux_t vn_info = VN_INFO (def);
6414 			vn_info->valnum = def;
6415 			vn_info->visited = true;
6416 		      }
6417 		if (gsi_stmt (prev) == gsi_stmt (*gsi))
6418 		  break;
6419 		gsi_next (&prev);
6420 	      }
6421 	    while (1);
6422 	}
6423       stmt = gsi_stmt (*gsi);
6424       /* In case we folded the stmt away schedule the NOP for removal.  */
6425       if (gimple_nop_p (stmt))
6426 	to_remove.safe_push (stmt);
6427     }
6428 
6429   /* Visit indirect calls and turn them into direct calls if
6430      possible using the devirtualization machinery.  Do this before
6431      checking for required EH/abnormal/noreturn cleanup as devird
6432      may expose more of those.  */
6433   if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
6434     {
6435       tree fn = gimple_call_fn (call_stmt);
6436       if (fn
6437 	  && flag_devirtualize
6438 	  && virtual_method_call_p (fn))
6439 	{
6440 	  tree otr_type = obj_type_ref_class (fn);
6441 	  unsigned HOST_WIDE_INT otr_tok
6442 	      = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
6443 	  tree instance;
6444 	  ipa_polymorphic_call_context context (current_function_decl,
6445 						fn, stmt, &instance);
6446 	  context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
6447 				    otr_type, stmt, NULL);
6448 	  bool final;
6449 	  vec <cgraph_node *> targets
6450 	      = possible_polymorphic_call_targets (obj_type_ref_class (fn),
6451 						   otr_tok, context, &final);
6452 	  if (dump_file)
6453 	    dump_possible_polymorphic_call_targets (dump_file,
6454 						    obj_type_ref_class (fn),
6455 						    otr_tok, context);
6456 	  if (final && targets.length () <= 1 && dbg_cnt (devirt))
6457 	    {
6458 	      tree fn;
6459 	      if (targets.length () == 1)
6460 		fn = targets[0]->decl;
6461 	      else
6462 		fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6463 	      if (dump_enabled_p ())
6464 		{
6465 		  dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
6466 				   "converting indirect call to "
6467 				   "function %s\n",
6468 				   lang_hooks.decl_printable_name (fn, 2));
6469 		}
6470 	      gimple_call_set_fndecl (call_stmt, fn);
6471 	      /* If changing the call to __builtin_unreachable
6472 		 or similar noreturn function, adjust gimple_call_fntype
6473 		 too.  */
6474 	      if (gimple_call_noreturn_p (call_stmt)
6475 		  && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
6476 		  && TYPE_ARG_TYPES (TREE_TYPE (fn))
6477 		  && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
6478 		      == void_type_node))
6479 		gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
6480 	      maybe_remove_unused_call_args (cfun, call_stmt);
6481 	      modified = true;
6482 	    }
6483 	}
6484     }
6485 
6486   if (modified)
6487     {
6488       /* When changing a call into a noreturn call, cfg cleanup
6489 	 is needed to fix up the noreturn call.  */
6490       if (!was_noreturn
6491 	  && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
6492 	to_fixup.safe_push  (stmt);
6493       /* When changing a condition or switch into one we know what
6494 	 edge will be executed, schedule a cfg cleanup.  */
6495       if ((gimple_code (stmt) == GIMPLE_COND
6496 	   && (gimple_cond_true_p (as_a <gcond *> (stmt))
6497 	       || gimple_cond_false_p (as_a <gcond *> (stmt))))
6498 	  || (gimple_code (stmt) == GIMPLE_SWITCH
6499 	      && TREE_CODE (gimple_switch_index
6500 			    (as_a <gswitch *> (stmt))) == INTEGER_CST))
6501 	el_todo |= TODO_cleanup_cfg;
6502       /* If we removed EH side-effects from the statement, clean
6503 	 its EH information.  */
6504       if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
6505 	{
6506 	  bitmap_set_bit (need_eh_cleanup,
6507 			  gimple_bb (stmt)->index);
6508 	  if (dump_file && (dump_flags & TDF_DETAILS))
6509 	    fprintf (dump_file, "  Removed EH side-effects.\n");
6510 	}
6511       /* Likewise for AB side-effects.  */
6512       if (can_make_abnormal_goto
6513 	  && !stmt_can_make_abnormal_goto (stmt))
6514 	{
6515 	  bitmap_set_bit (need_ab_cleanup,
6516 			  gimple_bb (stmt)->index);
6517 	  if (dump_file && (dump_flags & TDF_DETAILS))
6518 	    fprintf (dump_file, "  Removed AB side-effects.\n");
6519 	}
6520       update_stmt (stmt);
6521       /* In case the VDEF on the original stmt was released, value-number
6522          it to the VUSE.  This is to make vuse_ssa_val able to skip
6523 	 released virtual operands.  */
6524       if (vdef && SSA_NAME_IN_FREE_LIST (vdef))
6525 	VN_INFO (vdef)->valnum = vuse;
6526     }
6527 
6528   /* Make new values available - for fully redundant LHS we
6529      continue with the next stmt above and skip this.  */
6530   def_operand_p defp;
6531   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
6532     eliminate_push_avail (b, DEF_FROM_PTR (defp));
6533 }
6534 
6535 /* Perform elimination for the basic-block B during the domwalk.  */
6536 
6537 edge
before_dom_children(basic_block b)6538 eliminate_dom_walker::before_dom_children (basic_block b)
6539 {
6540   /* Mark new bb.  */
6541   avail_stack.safe_push (NULL_TREE);
6542 
6543   /* Skip unreachable blocks marked unreachable during the SCCVN domwalk.  */
6544   if (!(b->flags & BB_EXECUTABLE))
6545     return NULL;
6546 
6547   vn_context_bb = b;
6548 
6549   for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
6550     {
6551       gphi *phi = gsi.phi ();
6552       tree res = PHI_RESULT (phi);
6553 
6554       if (virtual_operand_p (res))
6555 	{
6556 	  gsi_next (&gsi);
6557 	  continue;
6558 	}
6559 
6560       tree sprime = eliminate_avail (b, res);
6561       if (sprime
6562 	  && sprime != res)
6563 	{
6564 	  if (dump_file && (dump_flags & TDF_DETAILS))
6565 	    {
6566 	      fprintf (dump_file, "Replaced redundant PHI node defining ");
6567 	      print_generic_expr (dump_file, res);
6568 	      fprintf (dump_file, " with ");
6569 	      print_generic_expr (dump_file, sprime);
6570 	      fprintf (dump_file, "\n");
6571 	    }
6572 
6573 	  /* If we inserted this PHI node ourself, it's not an elimination.  */
6574 	  if (! inserted_exprs
6575 	      || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
6576 	    eliminations++;
6577 
6578 	  /* If we will propagate into all uses don't bother to do
6579 	     anything.  */
6580 	  if (may_propagate_copy (res, sprime))
6581 	    {
6582 	      /* Mark the PHI for removal.  */
6583 	      to_remove.safe_push (phi);
6584 	      gsi_next (&gsi);
6585 	      continue;
6586 	    }
6587 
6588 	  remove_phi_node (&gsi, false);
6589 
6590 	  if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
6591 	    sprime = fold_convert (TREE_TYPE (res), sprime);
6592 	  gimple *stmt = gimple_build_assign (res, sprime);
6593 	  gimple_stmt_iterator gsi2 = gsi_after_labels (b);
6594 	  gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
6595 	  continue;
6596 	}
6597 
6598       eliminate_push_avail (b, res);
6599       gsi_next (&gsi);
6600     }
6601 
6602   for (gimple_stmt_iterator gsi = gsi_start_bb (b);
6603        !gsi_end_p (gsi);
6604        gsi_next (&gsi))
6605     eliminate_stmt (b, &gsi);
6606 
6607   /* Replace destination PHI arguments.  */
6608   edge_iterator ei;
6609   edge e;
6610   FOR_EACH_EDGE (e, ei, b->succs)
6611     if (e->flags & EDGE_EXECUTABLE)
6612       for (gphi_iterator gsi = gsi_start_phis (e->dest);
6613 	   !gsi_end_p (gsi);
6614 	   gsi_next (&gsi))
6615 	{
6616 	  gphi *phi = gsi.phi ();
6617 	  use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
6618 	  tree arg = USE_FROM_PTR (use_p);
6619 	  if (TREE_CODE (arg) != SSA_NAME
6620 	      || virtual_operand_p (arg))
6621 	    continue;
6622 	  tree sprime = eliminate_avail (b, arg);
6623 	  if (sprime && may_propagate_copy (arg, sprime))
6624 	    propagate_value (use_p, sprime);
6625 	}
6626 
6627   vn_context_bb = NULL;
6628 
6629   return NULL;
6630 }
6631 
6632 /* Make no longer available leaders no longer available.  */
6633 
6634 void
after_dom_children(basic_block)6635 eliminate_dom_walker::after_dom_children (basic_block)
6636 {
6637   tree entry;
6638   while ((entry = avail_stack.pop ()) != NULL_TREE)
6639     {
6640       tree valnum = VN_INFO (entry)->valnum;
6641       tree old = avail[SSA_NAME_VERSION (valnum)];
6642       if (old == entry)
6643 	avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
6644       else
6645 	avail[SSA_NAME_VERSION (valnum)] = entry;
6646     }
6647 }
6648 
6649 /* Remove queued stmts and perform delayed cleanups.  */
6650 
6651 unsigned
eliminate_cleanup(bool region_p)6652 eliminate_dom_walker::eliminate_cleanup (bool region_p)
6653 {
6654   statistics_counter_event (cfun, "Eliminated", eliminations);
6655   statistics_counter_event (cfun, "Insertions", insertions);
6656 
6657   /* We cannot remove stmts during BB walk, especially not release SSA
6658      names there as this confuses the VN machinery.  The stmts ending
6659      up in to_remove are either stores or simple copies.
6660      Remove stmts in reverse order to make debug stmt creation possible.  */
6661   while (!to_remove.is_empty ())
6662     {
6663       bool do_release_defs = true;
6664       gimple *stmt = to_remove.pop ();
6665 
6666       /* When we are value-numbering a region we do not require exit PHIs to
6667 	 be present so we have to make sure to deal with uses outside of the
6668 	 region of stmts that we thought are eliminated.
6669 	 ??? Note we may be confused by uses in dead regions we didn't run
6670 	 elimination on.  Rather than checking individual uses we accept
6671 	 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
6672 	 contains such example).  */
6673       if (region_p)
6674 	{
6675 	  if (gphi *phi = dyn_cast <gphi *> (stmt))
6676 	    {
6677 	      tree lhs = gimple_phi_result (phi);
6678 	      if (!has_zero_uses (lhs))
6679 		{
6680 		  if (dump_file && (dump_flags & TDF_DETAILS))
6681 		    fprintf (dump_file, "Keeping eliminated stmt live "
6682 			     "as copy because of out-of-region uses\n");
6683 		  tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
6684 		  gimple *copy = gimple_build_assign (lhs, sprime);
6685 		  gimple_stmt_iterator gsi
6686 		    = gsi_after_labels (gimple_bb (stmt));
6687 		  gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
6688 		  do_release_defs = false;
6689 		}
6690 	    }
6691 	  else if (tree lhs = gimple_get_lhs (stmt))
6692 	    if (TREE_CODE (lhs) == SSA_NAME
6693 		&& !has_zero_uses (lhs))
6694 	      {
6695 		if (dump_file && (dump_flags & TDF_DETAILS))
6696 		  fprintf (dump_file, "Keeping eliminated stmt live "
6697 			   "as copy because of out-of-region uses\n");
6698 		tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
6699 		gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
6700 		if (is_gimple_assign (stmt))
6701 		  {
6702 		    gimple_assign_set_rhs_from_tree (&gsi, sprime);
6703 		    stmt = gsi_stmt (gsi);
6704 		    update_stmt (stmt);
6705 		    if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
6706 		      bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
6707 		    continue;
6708 		  }
6709 		else
6710 		  {
6711 		    gimple *copy = gimple_build_assign (lhs, sprime);
6712 		    gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
6713 		    do_release_defs = false;
6714 		  }
6715 	      }
6716 	}
6717 
6718       if (dump_file && (dump_flags & TDF_DETAILS))
6719 	{
6720 	  fprintf (dump_file, "Removing dead stmt ");
6721 	  print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
6722 	}
6723 
6724       gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
6725       if (gimple_code (stmt) == GIMPLE_PHI)
6726 	remove_phi_node (&gsi, do_release_defs);
6727       else
6728 	{
6729 	  basic_block bb = gimple_bb (stmt);
6730 	  unlink_stmt_vdef (stmt);
6731 	  if (gsi_remove (&gsi, true))
6732 	    bitmap_set_bit (need_eh_cleanup, bb->index);
6733 	  if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
6734 	    bitmap_set_bit (need_ab_cleanup, bb->index);
6735 	  if (do_release_defs)
6736 	    release_defs (stmt);
6737 	}
6738 
6739       /* Removing a stmt may expose a forwarder block.  */
6740       el_todo |= TODO_cleanup_cfg;
6741     }
6742 
6743   /* Fixup stmts that became noreturn calls.  This may require splitting
6744      blocks and thus isn't possible during the dominator walk.  Do this
6745      in reverse order so we don't inadvertedly remove a stmt we want to
6746      fixup by visiting a dominating now noreturn call first.  */
6747   while (!to_fixup.is_empty ())
6748     {
6749       gimple *stmt = to_fixup.pop ();
6750 
6751       if (dump_file && (dump_flags & TDF_DETAILS))
6752 	{
6753 	  fprintf (dump_file, "Fixing up noreturn call ");
6754 	  print_gimple_stmt (dump_file, stmt, 0);
6755 	}
6756 
6757       if (fixup_noreturn_call (stmt))
6758 	el_todo |= TODO_cleanup_cfg;
6759     }
6760 
6761   bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
6762   bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
6763 
6764   if (do_eh_cleanup)
6765     gimple_purge_all_dead_eh_edges (need_eh_cleanup);
6766 
6767   if (do_ab_cleanup)
6768     gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
6769 
6770   if (do_eh_cleanup || do_ab_cleanup)
6771     el_todo |= TODO_cleanup_cfg;
6772 
6773   return el_todo;
6774 }
6775 
6776 /* Eliminate fully redundant computations.  */
6777 
6778 unsigned
eliminate_with_rpo_vn(bitmap inserted_exprs)6779 eliminate_with_rpo_vn (bitmap inserted_exprs)
6780 {
6781   eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
6782 
6783   eliminate_dom_walker *saved_rpo_avail = rpo_avail;
6784   rpo_avail = &walker;
6785   walker.walk (cfun->cfg->x_entry_block_ptr);
6786   rpo_avail = saved_rpo_avail;
6787 
6788   return walker.eliminate_cleanup ();
6789 }
6790 
6791 static unsigned
6792 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
6793 	   bool iterate, bool eliminate);
6794 
6795 void
run_rpo_vn(vn_lookup_kind kind)6796 run_rpo_vn (vn_lookup_kind kind)
6797 {
6798   default_vn_walk_kind = kind;
6799   do_rpo_vn (cfun, NULL, NULL, true, false);
6800 
6801   /* ???  Prune requirement of these.  */
6802   constant_to_value_id = new hash_table<vn_constant_hasher> (23);
6803 
6804   /* Initialize the value ids and prune out remaining VN_TOPs
6805      from dead code.  */
6806   tree name;
6807   unsigned i;
6808   FOR_EACH_SSA_NAME (i, name, cfun)
6809     {
6810       vn_ssa_aux_t info = VN_INFO (name);
6811       if (!info->visited
6812 	  || info->valnum == VN_TOP)
6813 	info->valnum = name;
6814       if (info->valnum == name)
6815 	info->value_id = get_next_value_id ();
6816       else if (is_gimple_min_invariant (info->valnum))
6817 	info->value_id = get_or_alloc_constant_value_id (info->valnum);
6818     }
6819 
6820   /* Propagate.  */
6821   FOR_EACH_SSA_NAME (i, name, cfun)
6822     {
6823       vn_ssa_aux_t info = VN_INFO (name);
6824       if (TREE_CODE (info->valnum) == SSA_NAME
6825 	  && info->valnum != name
6826 	  && info->value_id != VN_INFO (info->valnum)->value_id)
6827 	info->value_id = VN_INFO (info->valnum)->value_id;
6828     }
6829 
6830   set_hashtable_value_ids ();
6831 
6832   if (dump_file && (dump_flags & TDF_DETAILS))
6833     {
6834       fprintf (dump_file, "Value numbers:\n");
6835       FOR_EACH_SSA_NAME (i, name, cfun)
6836 	{
6837 	  if (VN_INFO (name)->visited
6838 	      && SSA_VAL (name) != name)
6839 	    {
6840 	      print_generic_expr (dump_file, name);
6841 	      fprintf (dump_file, " = ");
6842 	      print_generic_expr (dump_file, SSA_VAL (name));
6843 	      fprintf (dump_file, " (%04d)\n", VN_INFO (name)->value_id);
6844 	    }
6845 	}
6846     }
6847 }
6848 
6849 /* Free VN associated data structures.  */
6850 
6851 void
free_rpo_vn(void)6852 free_rpo_vn (void)
6853 {
6854   free_vn_table (valid_info);
6855   XDELETE (valid_info);
6856   obstack_free (&vn_tables_obstack, NULL);
6857   obstack_free (&vn_tables_insert_obstack, NULL);
6858 
6859   vn_ssa_aux_iterator_type it;
6860   vn_ssa_aux_t info;
6861   FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash, info, vn_ssa_aux_t, it)
6862     if (info->needs_insertion)
6863       release_ssa_name (info->name);
6864   obstack_free (&vn_ssa_aux_obstack, NULL);
6865   delete vn_ssa_aux_hash;
6866 
6867   delete constant_to_value_id;
6868   constant_to_value_id = NULL;
6869 }
6870 
6871 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables.  */
6872 
6873 static tree
vn_lookup_simplify_result(gimple_match_op * res_op)6874 vn_lookup_simplify_result (gimple_match_op *res_op)
6875 {
6876   if (!res_op->code.is_tree_code ())
6877     return NULL_TREE;
6878   tree *ops = res_op->ops;
6879   unsigned int length = res_op->num_ops;
6880   if (res_op->code == CONSTRUCTOR
6881       /* ???  We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
6882          and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree.  */
6883       && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
6884     {
6885       length = CONSTRUCTOR_NELTS (res_op->ops[0]);
6886       ops = XALLOCAVEC (tree, length);
6887       for (unsigned i = 0; i < length; ++i)
6888 	ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
6889     }
6890   vn_nary_op_t vnresult = NULL;
6891   tree res = vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
6892 				       res_op->type, ops, &vnresult);
6893   /* If this is used from expression simplification make sure to
6894      return an available expression.  */
6895   if (res && TREE_CODE (res) == SSA_NAME && mprts_hook && rpo_avail)
6896     res = rpo_avail->eliminate_avail (vn_context_bb, res);
6897   return res;
6898 }
6899 
6900 /* Return a leader for OPs value that is valid at BB.  */
6901 
6902 tree
eliminate_avail(basic_block bb,tree op)6903 rpo_elim::eliminate_avail (basic_block bb, tree op)
6904 {
6905   bool visited;
6906   tree valnum = SSA_VAL (op, &visited);
6907   /* If we didn't visit OP then it must be defined outside of the
6908      region we process and also dominate it.  So it is available.  */
6909   if (!visited)
6910     return op;
6911   if (TREE_CODE (valnum) == SSA_NAME)
6912     {
6913       if (SSA_NAME_IS_DEFAULT_DEF (valnum))
6914 	return valnum;
6915       vn_avail *av = VN_INFO (valnum)->avail;
6916       if (!av)
6917 	return NULL_TREE;
6918       if (av->location == bb->index)
6919 	/* On tramp3d 90% of the cases are here.  */
6920 	return ssa_name (av->leader);
6921       do
6922 	{
6923 	  basic_block abb = BASIC_BLOCK_FOR_FN (cfun, av->location);
6924 	  /* ???  During elimination we have to use availability at the
6925 	     definition site of a use we try to replace.  This
6926 	     is required to not run into inconsistencies because
6927 	     of dominated_by_p_w_unex behavior and removing a definition
6928 	     while not replacing all uses.
6929 	     ???  We could try to consistently walk dominators
6930 	     ignoring non-executable regions.  The nearest common
6931 	     dominator of bb and abb is where we can stop walking.  We
6932 	     may also be able to "pre-compute" (bits of) the next immediate
6933 	     (non-)dominator during the RPO walk when marking edges as
6934 	     executable.  */
6935 	  if (dominated_by_p_w_unex (bb, abb, true))
6936 	    {
6937 	      tree leader = ssa_name (av->leader);
6938 	      /* Prevent eliminations that break loop-closed SSA.  */
6939 	      if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
6940 		  && ! SSA_NAME_IS_DEFAULT_DEF (leader)
6941 		  && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
6942 							 (leader))->loop_father,
6943 					      bb))
6944 		return NULL_TREE;
6945 	      if (dump_file && (dump_flags & TDF_DETAILS))
6946 		{
6947 		  print_generic_expr (dump_file, leader);
6948 		  fprintf (dump_file, " is available for ");
6949 		  print_generic_expr (dump_file, valnum);
6950 		  fprintf (dump_file, "\n");
6951 		}
6952 	      /* On tramp3d 99% of the _remaining_ cases succeed at
6953 	         the first enty.  */
6954 	      return leader;
6955 	    }
6956 	  /* ???  Can we somehow skip to the immediate dominator
6957 	     RPO index (bb_to_rpo)?  Again, maybe not worth, on
6958 	     tramp3d the worst number of elements in the vector is 9.  */
6959 	  av = av->next;
6960 	}
6961       while (av);
6962     }
6963   else if (valnum != VN_TOP)
6964     /* valnum is is_gimple_min_invariant.  */
6965     return valnum;
6966   return NULL_TREE;
6967 }
6968 
6969 /* Make LEADER a leader for its value at BB.  */
6970 
6971 void
eliminate_push_avail(basic_block bb,tree leader)6972 rpo_elim::eliminate_push_avail (basic_block bb, tree leader)
6973 {
6974   tree valnum = VN_INFO (leader)->valnum;
6975   if (valnum == VN_TOP
6976       || is_gimple_min_invariant (valnum))
6977     return;
6978   if (dump_file && (dump_flags & TDF_DETAILS))
6979     {
6980       fprintf (dump_file, "Making available beyond BB%d ", bb->index);
6981       print_generic_expr (dump_file, leader);
6982       fprintf (dump_file, " for value ");
6983       print_generic_expr (dump_file, valnum);
6984       fprintf (dump_file, "\n");
6985     }
6986   vn_ssa_aux_t value = VN_INFO (valnum);
6987   vn_avail *av;
6988   if (m_avail_freelist)
6989     {
6990       av = m_avail_freelist;
6991       m_avail_freelist = m_avail_freelist->next;
6992     }
6993   else
6994     av = XOBNEW (&vn_ssa_aux_obstack, vn_avail);
6995   av->location = bb->index;
6996   av->leader = SSA_NAME_VERSION (leader);
6997   av->next = value->avail;
6998   av->next_undo = last_pushed_avail;
6999   last_pushed_avail = value;
7000   value->avail = av;
7001 }
7002 
7003 /* Valueization hook for RPO VN plus required state.  */
7004 
7005 tree
rpo_vn_valueize(tree name)7006 rpo_vn_valueize (tree name)
7007 {
7008   if (TREE_CODE (name) == SSA_NAME)
7009     {
7010       vn_ssa_aux_t val = VN_INFO (name);
7011       if (val)
7012 	{
7013 	  tree tem = val->valnum;
7014 	  if (tem != VN_TOP && tem != name)
7015 	    {
7016 	      if (TREE_CODE (tem) != SSA_NAME)
7017 		return tem;
7018 	      /* For all values we only valueize to an available leader
7019 		 which means we can use SSA name info without restriction.  */
7020 	      tem = rpo_avail->eliminate_avail (vn_context_bb, tem);
7021 	      if (tem)
7022 		return tem;
7023 	    }
7024 	}
7025     }
7026   return name;
7027 }
7028 
7029 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
7030    inverted condition.  */
7031 
7032 static void
insert_related_predicates_on_edge(enum tree_code code,tree * ops,edge pred_e)7033 insert_related_predicates_on_edge (enum tree_code code, tree *ops, edge pred_e)
7034 {
7035   switch (code)
7036     {
7037     case LT_EXPR:
7038       /* a < b -> a {!,<}= b */
7039       vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
7040 					   ops, boolean_true_node, 0, pred_e);
7041       vn_nary_op_insert_pieces_predicated (2, LE_EXPR, boolean_type_node,
7042 					   ops, boolean_true_node, 0, pred_e);
7043       /* a < b -> ! a {>,=} b */
7044       vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
7045 					   ops, boolean_false_node, 0, pred_e);
7046       vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
7047 					   ops, boolean_false_node, 0, pred_e);
7048       break;
7049     case GT_EXPR:
7050       /* a > b -> a {!,>}= b */
7051       vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
7052 					   ops, boolean_true_node, 0, pred_e);
7053       vn_nary_op_insert_pieces_predicated (2, GE_EXPR, boolean_type_node,
7054 					   ops, boolean_true_node, 0, pred_e);
7055       /* a > b -> ! a {<,=} b */
7056       vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
7057 					   ops, boolean_false_node, 0, pred_e);
7058       vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
7059 					   ops, boolean_false_node, 0, pred_e);
7060       break;
7061     case EQ_EXPR:
7062       /* a == b -> ! a {<,>} b */
7063       vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
7064 					   ops, boolean_false_node, 0, pred_e);
7065       vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
7066 					   ops, boolean_false_node, 0, pred_e);
7067       break;
7068     case LE_EXPR:
7069     case GE_EXPR:
7070     case NE_EXPR:
7071       /* Nothing besides inverted condition.  */
7072       break;
7073     default:;
7074     }
7075 }
7076 
7077 /* Main stmt worker for RPO VN, process BB.  */
7078 
7079 static unsigned
process_bb(rpo_elim & avail,basic_block bb,bool bb_visited,bool iterate_phis,bool iterate,bool eliminate,bool do_region,bitmap exit_bbs,bool skip_phis)7080 process_bb (rpo_elim &avail, basic_block bb,
7081 	    bool bb_visited, bool iterate_phis, bool iterate, bool eliminate,
7082 	    bool do_region, bitmap exit_bbs, bool skip_phis)
7083 {
7084   unsigned todo = 0;
7085   edge_iterator ei;
7086   edge e;
7087 
7088   vn_context_bb = bb;
7089 
7090   /* If we are in loop-closed SSA preserve this state.  This is
7091      relevant when called on regions from outside of FRE/PRE.  */
7092   bool lc_phi_nodes = false;
7093   if (!skip_phis
7094       && loops_state_satisfies_p (LOOP_CLOSED_SSA))
7095     FOR_EACH_EDGE (e, ei, bb->preds)
7096       if (e->src->loop_father != e->dest->loop_father
7097 	  && flow_loop_nested_p (e->dest->loop_father,
7098 				 e->src->loop_father))
7099 	{
7100 	  lc_phi_nodes = true;
7101 	  break;
7102 	}
7103 
7104   /* When we visit a loop header substitute into loop info.  */
7105   if (!iterate && eliminate && bb->loop_father->header == bb)
7106     {
7107       /* Keep fields in sync with substitute_in_loop_info.  */
7108       if (bb->loop_father->nb_iterations)
7109 	bb->loop_father->nb_iterations
7110 	  = simplify_replace_tree (bb->loop_father->nb_iterations,
7111 				   NULL_TREE, NULL_TREE, &vn_valueize_for_srt);
7112     }
7113 
7114   /* Value-number all defs in the basic-block.  */
7115   if (!skip_phis)
7116     for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7117 	 gsi_next (&gsi))
7118       {
7119 	gphi *phi = gsi.phi ();
7120 	tree res = PHI_RESULT (phi);
7121 	vn_ssa_aux_t res_info = VN_INFO (res);
7122 	if (!bb_visited)
7123 	  {
7124 	    gcc_assert (!res_info->visited);
7125 	    res_info->valnum = VN_TOP;
7126 	    res_info->visited = true;
7127 	  }
7128 
7129 	/* When not iterating force backedge values to varying.  */
7130 	visit_stmt (phi, !iterate_phis);
7131 	if (virtual_operand_p (res))
7132 	  continue;
7133 
7134 	/* Eliminate */
7135 	/* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
7136 	   how we handle backedges and availability.
7137 	   And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization.  */
7138 	tree val = res_info->valnum;
7139 	if (res != val && !iterate && eliminate)
7140 	  {
7141 	    if (tree leader = avail.eliminate_avail (bb, res))
7142 	      {
7143 		if (leader != res
7144 		    /* Preserve loop-closed SSA form.  */
7145 		    && (! lc_phi_nodes
7146 			|| is_gimple_min_invariant (leader)))
7147 		  {
7148 		    if (dump_file && (dump_flags & TDF_DETAILS))
7149 		      {
7150 			fprintf (dump_file, "Replaced redundant PHI node "
7151 				 "defining ");
7152 			print_generic_expr (dump_file, res);
7153 			fprintf (dump_file, " with ");
7154 			print_generic_expr (dump_file, leader);
7155 			fprintf (dump_file, "\n");
7156 		      }
7157 		    avail.eliminations++;
7158 
7159 		    if (may_propagate_copy (res, leader))
7160 		      {
7161 			/* Schedule for removal.  */
7162 			avail.to_remove.safe_push (phi);
7163 			continue;
7164 		      }
7165 		    /* ???  Else generate a copy stmt.  */
7166 		  }
7167 	      }
7168 	  }
7169 	/* Only make defs available that not already are.  But make
7170 	   sure loop-closed SSA PHI node defs are picked up for
7171 	   downstream uses.  */
7172 	if (lc_phi_nodes
7173 	    || res == val
7174 	    || ! avail.eliminate_avail (bb, res))
7175 	  avail.eliminate_push_avail (bb, res);
7176       }
7177 
7178   /* For empty BBs mark outgoing edges executable.  For non-empty BBs
7179      we do this when processing the last stmt as we have to do this
7180      before elimination which otherwise forces GIMPLE_CONDs to
7181      if (1 != 0) style when seeing non-executable edges.  */
7182   if (gsi_end_p (gsi_start_bb (bb)))
7183     {
7184       FOR_EACH_EDGE (e, ei, bb->succs)
7185 	{
7186 	  if (!(e->flags & EDGE_EXECUTABLE))
7187 	    {
7188 	      if (dump_file && (dump_flags & TDF_DETAILS))
7189 		fprintf (dump_file,
7190 			 "marking outgoing edge %d -> %d executable\n",
7191 			 e->src->index, e->dest->index);
7192 	      e->flags |= EDGE_EXECUTABLE;
7193 	      e->dest->flags |= BB_EXECUTABLE;
7194 	    }
7195 	  else if (!(e->dest->flags & BB_EXECUTABLE))
7196 	    {
7197 	      if (dump_file && (dump_flags & TDF_DETAILS))
7198 		fprintf (dump_file,
7199 			 "marking destination block %d reachable\n",
7200 			 e->dest->index);
7201 	      e->dest->flags |= BB_EXECUTABLE;
7202 	    }
7203 	}
7204     }
7205   for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7206        !gsi_end_p (gsi); gsi_next (&gsi))
7207     {
7208       ssa_op_iter i;
7209       tree op;
7210       if (!bb_visited)
7211 	{
7212 	  FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
7213 	    {
7214 	      vn_ssa_aux_t op_info = VN_INFO (op);
7215 	      gcc_assert (!op_info->visited);
7216 	      op_info->valnum = VN_TOP;
7217 	      op_info->visited = true;
7218 	    }
7219 
7220 	  /* We somehow have to deal with uses that are not defined
7221 	     in the processed region.  Forcing unvisited uses to
7222 	     varying here doesn't play well with def-use following during
7223 	     expression simplification, so we deal with this by checking
7224 	     the visited flag in SSA_VAL.  */
7225 	}
7226 
7227       visit_stmt (gsi_stmt (gsi));
7228 
7229       gimple *last = gsi_stmt (gsi);
7230       e = NULL;
7231       switch (gimple_code (last))
7232 	{
7233 	case GIMPLE_SWITCH:
7234 	  e = find_taken_edge (bb, vn_valueize (gimple_switch_index
7235 						(as_a <gswitch *> (last))));
7236 	  break;
7237 	case GIMPLE_COND:
7238 	  {
7239 	    tree lhs = vn_valueize (gimple_cond_lhs (last));
7240 	    tree rhs = vn_valueize (gimple_cond_rhs (last));
7241 	    tree val = gimple_simplify (gimple_cond_code (last),
7242 					boolean_type_node, lhs, rhs,
7243 					NULL, vn_valueize);
7244 	    /* If the condition didn't simplfy see if we have recorded
7245 	       an expression from sofar taken edges.  */
7246 	    if (! val || TREE_CODE (val) != INTEGER_CST)
7247 	      {
7248 		vn_nary_op_t vnresult;
7249 		tree ops[2];
7250 		ops[0] = lhs;
7251 		ops[1] = rhs;
7252 		val = vn_nary_op_lookup_pieces (2, gimple_cond_code (last),
7253 						boolean_type_node, ops,
7254 						&vnresult);
7255 		/* Did we get a predicated value?  */
7256 		if (! val && vnresult && vnresult->predicated_values)
7257 		  {
7258 		    val = vn_nary_op_get_predicated_value (vnresult, bb);
7259 		    if (val && dump_file && (dump_flags & TDF_DETAILS))
7260 		      {
7261 			fprintf (dump_file, "Got predicated value ");
7262 			print_generic_expr (dump_file, val, TDF_NONE);
7263 			fprintf (dump_file, " for ");
7264 			print_gimple_stmt (dump_file, last, TDF_SLIM);
7265 		      }
7266 		  }
7267 	      }
7268 	    if (val)
7269 	      e = find_taken_edge (bb, val);
7270 	    if (! e)
7271 	      {
7272 		/* If we didn't manage to compute the taken edge then
7273 		   push predicated expressions for the condition itself
7274 		   and related conditions to the hashtables.  This allows
7275 		   simplification of redundant conditions which is
7276 		   important as early cleanup.  */
7277 		edge true_e, false_e;
7278 		extract_true_false_edges_from_block (bb, &true_e, &false_e);
7279 		enum tree_code code = gimple_cond_code (last);
7280 		enum tree_code icode
7281 		  = invert_tree_comparison (code, HONOR_NANS (lhs));
7282 		tree ops[2];
7283 		ops[0] = lhs;
7284 		ops[1] = rhs;
7285 		if (do_region
7286 		    && bitmap_bit_p (exit_bbs, true_e->dest->index))
7287 		  true_e = NULL;
7288 		if (do_region
7289 		    && bitmap_bit_p (exit_bbs, false_e->dest->index))
7290 		  false_e = NULL;
7291 		if (true_e)
7292 		  vn_nary_op_insert_pieces_predicated
7293 		    (2, code, boolean_type_node, ops,
7294 		     boolean_true_node, 0, true_e);
7295 		if (false_e)
7296 		  vn_nary_op_insert_pieces_predicated
7297 		    (2, code, boolean_type_node, ops,
7298 		     boolean_false_node, 0, false_e);
7299 		if (icode != ERROR_MARK)
7300 		  {
7301 		    if (true_e)
7302 		      vn_nary_op_insert_pieces_predicated
7303 			(2, icode, boolean_type_node, ops,
7304 			 boolean_false_node, 0, true_e);
7305 		    if (false_e)
7306 		      vn_nary_op_insert_pieces_predicated
7307 			(2, icode, boolean_type_node, ops,
7308 			 boolean_true_node, 0, false_e);
7309 		  }
7310 		/* Relax for non-integers, inverted condition handled
7311 		   above.  */
7312 		if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
7313 		  {
7314 		    if (true_e)
7315 		      insert_related_predicates_on_edge (code, ops, true_e);
7316 		    if (false_e)
7317 		      insert_related_predicates_on_edge (icode, ops, false_e);
7318 		  }
7319 	      }
7320 	    break;
7321 	  }
7322 	case GIMPLE_GOTO:
7323 	  e = find_taken_edge (bb, vn_valueize (gimple_goto_dest (last)));
7324 	  break;
7325 	default:
7326 	  e = NULL;
7327 	}
7328       if (e)
7329 	{
7330 	  todo = TODO_cleanup_cfg;
7331 	  if (!(e->flags & EDGE_EXECUTABLE))
7332 	    {
7333 	      if (dump_file && (dump_flags & TDF_DETAILS))
7334 		fprintf (dump_file,
7335 			 "marking known outgoing %sedge %d -> %d executable\n",
7336 			 e->flags & EDGE_DFS_BACK ? "back-" : "",
7337 			 e->src->index, e->dest->index);
7338 	      e->flags |= EDGE_EXECUTABLE;
7339 	      e->dest->flags |= BB_EXECUTABLE;
7340 	    }
7341 	  else if (!(e->dest->flags & BB_EXECUTABLE))
7342 	    {
7343 	      if (dump_file && (dump_flags & TDF_DETAILS))
7344 		fprintf (dump_file,
7345 			 "marking destination block %d reachable\n",
7346 			 e->dest->index);
7347 	      e->dest->flags |= BB_EXECUTABLE;
7348 	    }
7349 	}
7350       else if (gsi_one_before_end_p (gsi))
7351 	{
7352 	  FOR_EACH_EDGE (e, ei, bb->succs)
7353 	    {
7354 	      if (!(e->flags & EDGE_EXECUTABLE))
7355 		{
7356 		  if (dump_file && (dump_flags & TDF_DETAILS))
7357 		    fprintf (dump_file,
7358 			     "marking outgoing edge %d -> %d executable\n",
7359 			     e->src->index, e->dest->index);
7360 		  e->flags |= EDGE_EXECUTABLE;
7361 		  e->dest->flags |= BB_EXECUTABLE;
7362 		}
7363 	      else if (!(e->dest->flags & BB_EXECUTABLE))
7364 		{
7365 		  if (dump_file && (dump_flags & TDF_DETAILS))
7366 		    fprintf (dump_file,
7367 			     "marking destination block %d reachable\n",
7368 			     e->dest->index);
7369 		  e->dest->flags |= BB_EXECUTABLE;
7370 		}
7371 	    }
7372 	}
7373 
7374       /* Eliminate.  That also pushes to avail.  */
7375       if (eliminate && ! iterate)
7376 	avail.eliminate_stmt (bb, &gsi);
7377       else
7378 	/* If not eliminating, make all not already available defs
7379 	   available.  */
7380 	FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_DEF)
7381 	  if (! avail.eliminate_avail (bb, op))
7382 	    avail.eliminate_push_avail (bb, op);
7383     }
7384 
7385   /* Eliminate in destination PHI arguments.  Always substitute in dest
7386      PHIs, even for non-executable edges.  This handles region
7387      exits PHIs.  */
7388   if (!iterate && eliminate)
7389     FOR_EACH_EDGE (e, ei, bb->succs)
7390       for (gphi_iterator gsi = gsi_start_phis (e->dest);
7391 	   !gsi_end_p (gsi); gsi_next (&gsi))
7392 	{
7393 	  gphi *phi = gsi.phi ();
7394 	  use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
7395 	  tree arg = USE_FROM_PTR (use_p);
7396 	  if (TREE_CODE (arg) != SSA_NAME
7397 	      || virtual_operand_p (arg))
7398 	    continue;
7399 	  tree sprime;
7400 	  if (SSA_NAME_IS_DEFAULT_DEF (arg))
7401 	    {
7402 	      sprime = SSA_VAL (arg);
7403 	      gcc_assert (TREE_CODE (sprime) != SSA_NAME
7404 			  || SSA_NAME_IS_DEFAULT_DEF (sprime));
7405 	    }
7406 	  else
7407 	    /* Look for sth available at the definition block of the argument.
7408 	       This avoids inconsistencies between availability there which
7409 	       decides if the stmt can be removed and availability at the
7410 	       use site.  The SSA property ensures that things available
7411 	       at the definition are also available at uses.  */
7412 	    sprime = avail.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg)),
7413 					    arg);
7414 	  if (sprime
7415 	      && sprime != arg
7416 	      && may_propagate_copy (arg, sprime))
7417 	    propagate_value (use_p, sprime);
7418 	}
7419 
7420   vn_context_bb = NULL;
7421   return todo;
7422 }
7423 
7424 /* Unwind state per basic-block.  */
7425 
7426 struct unwind_state
7427 {
7428   /* Times this block has been visited.  */
7429   unsigned visited;
7430   /* Whether to handle this as iteration point or whether to treat
7431      incoming backedge PHI values as varying.  */
7432   bool iterate;
7433   /* Maximum RPO index this block is reachable from.  */
7434   int max_rpo;
7435   /* Unwind state.  */
7436   void *ob_top;
7437   vn_reference_t ref_top;
7438   vn_phi_t phi_top;
7439   vn_nary_op_t nary_top;
7440   vn_avail *avail_top;
7441 };
7442 
7443 /* Unwind the RPO VN state for iteration.  */
7444 
7445 static void
do_unwind(unwind_state * to,rpo_elim & avail)7446 do_unwind (unwind_state *to, rpo_elim &avail)
7447 {
7448   gcc_assert (to->iterate);
7449   for (; last_inserted_nary != to->nary_top;
7450        last_inserted_nary = last_inserted_nary->next)
7451     {
7452       vn_nary_op_t *slot;
7453       slot = valid_info->nary->find_slot_with_hash
7454 	(last_inserted_nary, last_inserted_nary->hashcode, NO_INSERT);
7455       /* Predication causes the need to restore previous state.  */
7456       if ((*slot)->unwind_to)
7457 	*slot = (*slot)->unwind_to;
7458       else
7459 	valid_info->nary->clear_slot (slot);
7460     }
7461   for (; last_inserted_phi != to->phi_top;
7462        last_inserted_phi = last_inserted_phi->next)
7463     {
7464       vn_phi_t *slot;
7465       slot = valid_info->phis->find_slot_with_hash
7466 	(last_inserted_phi, last_inserted_phi->hashcode, NO_INSERT);
7467       valid_info->phis->clear_slot (slot);
7468     }
7469   for (; last_inserted_ref != to->ref_top;
7470        last_inserted_ref = last_inserted_ref->next)
7471     {
7472       vn_reference_t *slot;
7473       slot = valid_info->references->find_slot_with_hash
7474 	(last_inserted_ref, last_inserted_ref->hashcode, NO_INSERT);
7475       (*slot)->operands.release ();
7476       valid_info->references->clear_slot (slot);
7477     }
7478   obstack_free (&vn_tables_obstack, to->ob_top);
7479 
7480   /* Prune [rpo_idx, ] from avail.  */
7481   for (; last_pushed_avail && last_pushed_avail->avail != to->avail_top;)
7482     {
7483       vn_ssa_aux_t val = last_pushed_avail;
7484       vn_avail *av = val->avail;
7485       val->avail = av->next;
7486       last_pushed_avail = av->next_undo;
7487       av->next = avail.m_avail_freelist;
7488       avail.m_avail_freelist = av;
7489     }
7490 }
7491 
7492 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
7493    If ITERATE is true then treat backedges optimistically as not
7494    executed and iterate.  If ELIMINATE is true then perform
7495    elimination, otherwise leave that to the caller.  */
7496 
7497 static unsigned
do_rpo_vn(function * fn,edge entry,bitmap exit_bbs,bool iterate,bool eliminate)7498 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
7499 	   bool iterate, bool eliminate)
7500 {
7501   unsigned todo = 0;
7502 
7503   /* We currently do not support region-based iteration when
7504      elimination is requested.  */
7505   gcc_assert (!entry || !iterate || !eliminate);
7506   /* When iterating we need loop info up-to-date.  */
7507   gcc_assert (!iterate || !loops_state_satisfies_p (LOOPS_NEED_FIXUP));
7508 
7509   bool do_region = entry != NULL;
7510   if (!do_region)
7511     {
7512       entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn));
7513       exit_bbs = BITMAP_ALLOC (NULL);
7514       bitmap_set_bit (exit_bbs, EXIT_BLOCK);
7515     }
7516 
7517   /* Clear EDGE_DFS_BACK on "all" entry edges, RPO order compute will
7518      re-mark those that are contained in the region.  */
7519   edge_iterator ei;
7520   edge e;
7521   FOR_EACH_EDGE (e, ei, entry->dest->preds)
7522     e->flags &= ~EDGE_DFS_BACK;
7523 
7524   int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS);
7525   auto_vec<std::pair<int, int> > toplevel_scc_extents;
7526   int n = rev_post_order_and_mark_dfs_back_seme
7527     (fn, entry, exit_bbs, true, rpo, !iterate ? &toplevel_scc_extents : NULL);
7528 
7529   if (!do_region)
7530     BITMAP_FREE (exit_bbs);
7531 
7532   /* If there are any non-DFS_BACK edges into entry->dest skip
7533      processing PHI nodes for that block.  This supports
7534      value-numbering loop bodies w/o the actual loop.  */
7535   FOR_EACH_EDGE (e, ei, entry->dest->preds)
7536     if (e != entry
7537 	&& !(e->flags & EDGE_DFS_BACK))
7538       break;
7539   bool skip_entry_phis = e != NULL;
7540   if (skip_entry_phis && dump_file && (dump_flags & TDF_DETAILS))
7541     fprintf (dump_file, "Region does not contain all edges into "
7542 	     "the entry block, skipping its PHIs.\n");
7543 
7544   int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
7545   for (int i = 0; i < n; ++i)
7546     bb_to_rpo[rpo[i]] = i;
7547 
7548   unwind_state *rpo_state = XNEWVEC (unwind_state, n);
7549 
7550   rpo_elim avail (entry->dest);
7551   rpo_avail = &avail;
7552 
7553   /* Verify we have no extra entries into the region.  */
7554   if (flag_checking && do_region)
7555     {
7556       auto_bb_flag bb_in_region (fn);
7557       for (int i = 0; i < n; ++i)
7558 	{
7559 	  basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7560 	  bb->flags |= bb_in_region;
7561 	}
7562       /* We can't merge the first two loops because we cannot rely
7563          on EDGE_DFS_BACK for edges not within the region.  But if
7564 	 we decide to always have the bb_in_region flag we can
7565 	 do the checking during the RPO walk itself (but then it's
7566 	 also easy to handle MEME conservatively).  */
7567       for (int i = 0; i < n; ++i)
7568 	{
7569 	  basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7570 	  edge e;
7571 	  edge_iterator ei;
7572 	  FOR_EACH_EDGE (e, ei, bb->preds)
7573 	    gcc_assert (e == entry
7574 			|| (skip_entry_phis && bb == entry->dest)
7575 			|| (e->src->flags & bb_in_region));
7576 	}
7577       for (int i = 0; i < n; ++i)
7578 	{
7579 	  basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7580 	  bb->flags &= ~bb_in_region;
7581 	}
7582     }
7583 
7584   /* Create the VN state.  For the initial size of the various hashtables
7585      use a heuristic based on region size and number of SSA names.  */
7586   unsigned region_size = (((unsigned HOST_WIDE_INT)n * num_ssa_names)
7587 			  / (n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS));
7588   VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
7589   next_value_id = 1;
7590   next_constant_value_id = -1;
7591 
7592   vn_ssa_aux_hash = new hash_table <vn_ssa_aux_hasher> (region_size * 2);
7593   gcc_obstack_init (&vn_ssa_aux_obstack);
7594 
7595   gcc_obstack_init (&vn_tables_obstack);
7596   gcc_obstack_init (&vn_tables_insert_obstack);
7597   valid_info = XCNEW (struct vn_tables_s);
7598   allocate_vn_table (valid_info, region_size);
7599   last_inserted_ref = NULL;
7600   last_inserted_phi = NULL;
7601   last_inserted_nary = NULL;
7602   last_pushed_avail = NULL;
7603 
7604   vn_valueize = rpo_vn_valueize;
7605 
7606   /* Initialize the unwind state and edge/BB executable state.  */
7607   unsigned curr_scc = 0;
7608   for (int i = 0; i < n; ++i)
7609     {
7610       basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7611       rpo_state[i].visited = 0;
7612       rpo_state[i].max_rpo = i;
7613       if (!iterate && curr_scc < toplevel_scc_extents.length ())
7614 	{
7615 	  if (i >= toplevel_scc_extents[curr_scc].first
7616 	      && i <= toplevel_scc_extents[curr_scc].second)
7617 	    rpo_state[i].max_rpo = toplevel_scc_extents[curr_scc].second;
7618 	  if (i == toplevel_scc_extents[curr_scc].second)
7619 	    curr_scc++;
7620 	}
7621       bb->flags &= ~BB_EXECUTABLE;
7622       bool has_backedges = false;
7623       edge e;
7624       edge_iterator ei;
7625       FOR_EACH_EDGE (e, ei, bb->preds)
7626 	{
7627 	  if (e->flags & EDGE_DFS_BACK)
7628 	    has_backedges = true;
7629 	  e->flags &= ~EDGE_EXECUTABLE;
7630 	  if (iterate || e == entry || (skip_entry_phis && bb == entry->dest))
7631 	    continue;
7632 	}
7633       rpo_state[i].iterate = iterate && has_backedges;
7634     }
7635   entry->flags |= EDGE_EXECUTABLE;
7636   entry->dest->flags |= BB_EXECUTABLE;
7637 
7638   /* As heuristic to improve compile-time we handle only the N innermost
7639      loops and the outermost one optimistically.  */
7640   if (iterate)
7641     {
7642       loop_p loop;
7643       unsigned max_depth = param_rpo_vn_max_loop_depth;
7644       FOR_EACH_LOOP (loop, LI_ONLY_INNERMOST)
7645 	if (loop_depth (loop) > max_depth)
7646 	  for (unsigned i = 2;
7647 	       i < loop_depth (loop) - max_depth; ++i)
7648 	    {
7649 	      basic_block header = superloop_at_depth (loop, i)->header;
7650 	      bool non_latch_backedge = false;
7651 	      edge e;
7652 	      edge_iterator ei;
7653 	      FOR_EACH_EDGE (e, ei, header->preds)
7654 		if (e->flags & EDGE_DFS_BACK)
7655 		  {
7656 		    /* There can be a non-latch backedge into the header
7657 		       which is part of an outer irreducible region.  We
7658 		       cannot avoid iterating this block then.  */
7659 		    if (!dominated_by_p (CDI_DOMINATORS,
7660 					 e->src, e->dest))
7661 		      {
7662 			if (dump_file && (dump_flags & TDF_DETAILS))
7663 			  fprintf (dump_file, "non-latch backedge %d -> %d "
7664 				   "forces iteration of loop %d\n",
7665 				   e->src->index, e->dest->index, loop->num);
7666 			non_latch_backedge = true;
7667 		      }
7668 		    else
7669 		      e->flags |= EDGE_EXECUTABLE;
7670 		  }
7671 	      rpo_state[bb_to_rpo[header->index]].iterate = non_latch_backedge;
7672 	    }
7673     }
7674 
7675   uint64_t nblk = 0;
7676   int idx = 0;
7677   if (iterate)
7678     /* Go and process all blocks, iterating as necessary.  */
7679     do
7680       {
7681 	basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
7682 
7683 	/* If the block has incoming backedges remember unwind state.  This
7684 	   is required even for non-executable blocks since in irreducible
7685 	   regions we might reach them via the backedge and re-start iterating
7686 	   from there.
7687 	   Note we can individually mark blocks with incoming backedges to
7688 	   not iterate where we then handle PHIs conservatively.  We do that
7689 	   heuristically to reduce compile-time for degenerate cases.  */
7690 	if (rpo_state[idx].iterate)
7691 	  {
7692 	    rpo_state[idx].ob_top = obstack_alloc (&vn_tables_obstack, 0);
7693 	    rpo_state[idx].ref_top = last_inserted_ref;
7694 	    rpo_state[idx].phi_top = last_inserted_phi;
7695 	    rpo_state[idx].nary_top = last_inserted_nary;
7696 	    rpo_state[idx].avail_top
7697 	      = last_pushed_avail ? last_pushed_avail->avail : NULL;
7698 	  }
7699 
7700 	if (!(bb->flags & BB_EXECUTABLE))
7701 	  {
7702 	    if (dump_file && (dump_flags & TDF_DETAILS))
7703 	      fprintf (dump_file, "Block %d: BB%d found not executable\n",
7704 		       idx, bb->index);
7705 	    idx++;
7706 	    continue;
7707 	  }
7708 
7709 	if (dump_file && (dump_flags & TDF_DETAILS))
7710 	  fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
7711 	nblk++;
7712 	todo |= process_bb (avail, bb,
7713 			    rpo_state[idx].visited != 0,
7714 			    rpo_state[idx].iterate,
7715 			    iterate, eliminate, do_region, exit_bbs, false);
7716 	rpo_state[idx].visited++;
7717 
7718 	/* Verify if changed values flow over executable outgoing backedges
7719 	   and those change destination PHI values (that's the thing we
7720 	   can easily verify).  Reduce over all such edges to the farthest
7721 	   away PHI.  */
7722 	int iterate_to = -1;
7723 	edge_iterator ei;
7724 	edge e;
7725 	FOR_EACH_EDGE (e, ei, bb->succs)
7726 	  if ((e->flags & (EDGE_DFS_BACK|EDGE_EXECUTABLE))
7727 	      == (EDGE_DFS_BACK|EDGE_EXECUTABLE)
7728 	      && rpo_state[bb_to_rpo[e->dest->index]].iterate)
7729 	    {
7730 	      int destidx = bb_to_rpo[e->dest->index];
7731 	      if (!rpo_state[destidx].visited)
7732 		{
7733 		  if (dump_file && (dump_flags & TDF_DETAILS))
7734 		    fprintf (dump_file, "Unvisited destination %d\n",
7735 			     e->dest->index);
7736 		  if (iterate_to == -1 || destidx < iterate_to)
7737 		    iterate_to = destidx;
7738 		  continue;
7739 		}
7740 	      if (dump_file && (dump_flags & TDF_DETAILS))
7741 		fprintf (dump_file, "Looking for changed values of backedge"
7742 			 " %d->%d destination PHIs\n",
7743 			 e->src->index, e->dest->index);
7744 	      vn_context_bb = e->dest;
7745 	      gphi_iterator gsi;
7746 	      for (gsi = gsi_start_phis (e->dest);
7747 		   !gsi_end_p (gsi); gsi_next (&gsi))
7748 		{
7749 		  bool inserted = false;
7750 		  /* While we'd ideally just iterate on value changes
7751 		     we CSE PHIs and do that even across basic-block
7752 		     boundaries.  So even hashtable state changes can
7753 		     be important (which is roughly equivalent to
7754 		     PHI argument value changes).  To not excessively
7755 		     iterate because of that we track whether a PHI
7756 		     was CSEd to with GF_PLF_1.  */
7757 		  bool phival_changed;
7758 		  if ((phival_changed = visit_phi (gsi.phi (),
7759 						   &inserted, false))
7760 		      || (inserted && gimple_plf (gsi.phi (), GF_PLF_1)))
7761 		    {
7762 		      if (!phival_changed
7763 			  && dump_file && (dump_flags & TDF_DETAILS))
7764 			fprintf (dump_file, "PHI was CSEd and hashtable "
7765 				 "state (changed)\n");
7766 		      if (iterate_to == -1 || destidx < iterate_to)
7767 			iterate_to = destidx;
7768 		      break;
7769 		    }
7770 		}
7771 	      vn_context_bb = NULL;
7772 	    }
7773 	if (iterate_to != -1)
7774 	  {
7775 	    do_unwind (&rpo_state[iterate_to], avail);
7776 	    idx = iterate_to;
7777 	    if (dump_file && (dump_flags & TDF_DETAILS))
7778 	      fprintf (dump_file, "Iterating to %d BB%d\n",
7779 		       iterate_to, rpo[iterate_to]);
7780 	    continue;
7781 	  }
7782 
7783 	idx++;
7784       }
7785     while (idx < n);
7786 
7787   else /* !iterate */
7788     {
7789       /* Process all blocks greedily with a worklist that enforces RPO
7790          processing of reachable blocks.  */
7791       auto_bitmap worklist;
7792       bitmap_set_bit (worklist, 0);
7793       while (!bitmap_empty_p (worklist))
7794 	{
7795 	  int idx = bitmap_first_set_bit (worklist);
7796 	  bitmap_clear_bit (worklist, idx);
7797 	  basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
7798 	  gcc_assert ((bb->flags & BB_EXECUTABLE)
7799 		      && !rpo_state[idx].visited);
7800 
7801 	  if (dump_file && (dump_flags & TDF_DETAILS))
7802 	    fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
7803 
7804 	  /* When we run into predecessor edges where we cannot trust its
7805 	     executable state mark them executable so PHI processing will
7806 	     be conservative.
7807 	     ???  Do we need to force arguments flowing over that edge
7808 	     to be varying or will they even always be?  */
7809 	  edge_iterator ei;
7810 	  edge e;
7811 	  FOR_EACH_EDGE (e, ei, bb->preds)
7812 	    if (!(e->flags & EDGE_EXECUTABLE)
7813 		&& (bb == entry->dest
7814 		    || (!rpo_state[bb_to_rpo[e->src->index]].visited
7815 			&& (rpo_state[bb_to_rpo[e->src->index]].max_rpo
7816 			    >= (int)idx))))
7817 	      {
7818 		if (dump_file && (dump_flags & TDF_DETAILS))
7819 		  fprintf (dump_file, "Cannot trust state of predecessor "
7820 			   "edge %d -> %d, marking executable\n",
7821 			   e->src->index, e->dest->index);
7822 		e->flags |= EDGE_EXECUTABLE;
7823 	      }
7824 
7825 	  nblk++;
7826 	  todo |= process_bb (avail, bb, false, false, false, eliminate,
7827 			      do_region, exit_bbs,
7828 			      skip_entry_phis && bb == entry->dest);
7829 	  rpo_state[idx].visited++;
7830 
7831 	  FOR_EACH_EDGE (e, ei, bb->succs)
7832 	    if ((e->flags & EDGE_EXECUTABLE)
7833 		&& e->dest->index != EXIT_BLOCK
7834 		&& (!do_region || !bitmap_bit_p (exit_bbs, e->dest->index))
7835 		&& !rpo_state[bb_to_rpo[e->dest->index]].visited)
7836 	      bitmap_set_bit (worklist, bb_to_rpo[e->dest->index]);
7837 	}
7838     }
7839 
7840   /* If statistics or dump file active.  */
7841   int nex = 0;
7842   unsigned max_visited = 1;
7843   for (int i = 0; i < n; ++i)
7844     {
7845       basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7846       if (bb->flags & BB_EXECUTABLE)
7847 	nex++;
7848       statistics_histogram_event (cfun, "RPO block visited times",
7849 				  rpo_state[i].visited);
7850       if (rpo_state[i].visited > max_visited)
7851 	max_visited = rpo_state[i].visited;
7852     }
7853   unsigned nvalues = 0, navail = 0;
7854   for (hash_table<vn_ssa_aux_hasher>::iterator i = vn_ssa_aux_hash->begin ();
7855        i != vn_ssa_aux_hash->end (); ++i)
7856     {
7857       nvalues++;
7858       vn_avail *av = (*i)->avail;
7859       while (av)
7860 	{
7861 	  navail++;
7862 	  av = av->next;
7863 	}
7864     }
7865   statistics_counter_event (cfun, "RPO blocks", n);
7866   statistics_counter_event (cfun, "RPO blocks visited", nblk);
7867   statistics_counter_event (cfun, "RPO blocks executable", nex);
7868   statistics_histogram_event (cfun, "RPO iterations", 10*nblk / nex);
7869   statistics_histogram_event (cfun, "RPO num values", nvalues);
7870   statistics_histogram_event (cfun, "RPO num avail", navail);
7871   statistics_histogram_event (cfun, "RPO num lattice",
7872 			      vn_ssa_aux_hash->elements ());
7873   if (dump_file && (dump_flags & (TDF_DETAILS|TDF_STATS)))
7874     {
7875       fprintf (dump_file, "RPO iteration over %d blocks visited %" PRIu64
7876 	       " blocks in total discovering %d executable blocks iterating "
7877 	       "%d.%d times, a block was visited max. %u times\n",
7878 	       n, nblk, nex,
7879 	       (int)((10*nblk / nex)/10), (int)((10*nblk / nex)%10),
7880 	       max_visited);
7881       fprintf (dump_file, "RPO tracked %d values available at %d locations "
7882 	       "and %" PRIu64 " lattice elements\n",
7883 	       nvalues, navail, (uint64_t) vn_ssa_aux_hash->elements ());
7884     }
7885 
7886   if (eliminate)
7887     {
7888       /* When !iterate we already performed elimination during the RPO
7889          walk.  */
7890       if (iterate)
7891 	{
7892 	  /* Elimination for region-based VN needs to be done within the
7893 	     RPO walk.  */
7894 	  gcc_assert (! do_region);
7895 	  /* Note we can't use avail.walk here because that gets confused
7896 	     by the existing availability and it will be less efficient
7897 	     as well.  */
7898 	  todo |= eliminate_with_rpo_vn (NULL);
7899 	}
7900       else
7901 	todo |= avail.eliminate_cleanup (do_region);
7902     }
7903 
7904   vn_valueize = NULL;
7905   rpo_avail = NULL;
7906 
7907   XDELETEVEC (bb_to_rpo);
7908   XDELETEVEC (rpo);
7909   XDELETEVEC (rpo_state);
7910 
7911   return todo;
7912 }
7913 
7914 /* Region-based entry for RPO VN.  Performs value-numbering and elimination
7915    on the SEME region specified by ENTRY and EXIT_BBS.  If ENTRY is not
7916    the only edge into the region at ENTRY->dest PHI nodes in ENTRY->dest
7917    are not considered.  */
7918 
7919 unsigned
do_rpo_vn(function * fn,edge entry,bitmap exit_bbs)7920 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs)
7921 {
7922   default_vn_walk_kind = VN_WALKREWRITE;
7923   unsigned todo = do_rpo_vn (fn, entry, exit_bbs, false, true);
7924   free_rpo_vn ();
7925   return todo;
7926 }
7927 
7928 
7929 namespace {
7930 
7931 const pass_data pass_data_fre =
7932 {
7933   GIMPLE_PASS, /* type */
7934   "fre", /* name */
7935   OPTGROUP_NONE, /* optinfo_flags */
7936   TV_TREE_FRE, /* tv_id */
7937   ( PROP_cfg | PROP_ssa ), /* properties_required */
7938   0, /* properties_provided */
7939   0, /* properties_destroyed */
7940   0, /* todo_flags_start */
7941   0, /* todo_flags_finish */
7942 };
7943 
7944 class pass_fre : public gimple_opt_pass
7945 {
7946 public:
pass_fre(gcc::context * ctxt)7947   pass_fre (gcc::context *ctxt)
7948     : gimple_opt_pass (pass_data_fre, ctxt), may_iterate (true)
7949   {}
7950 
7951   /* opt_pass methods: */
clone()7952   opt_pass * clone () { return new pass_fre (m_ctxt); }
set_pass_param(unsigned int n,bool param)7953   void set_pass_param (unsigned int n, bool param)
7954     {
7955       gcc_assert (n == 0);
7956       may_iterate = param;
7957     }
gate(function *)7958   virtual bool gate (function *)
7959     {
7960       return flag_tree_fre != 0 && (may_iterate || optimize > 1);
7961     }
7962   virtual unsigned int execute (function *);
7963 
7964 private:
7965   bool may_iterate;
7966 }; // class pass_fre
7967 
7968 unsigned int
execute(function * fun)7969 pass_fre::execute (function *fun)
7970 {
7971   unsigned todo = 0;
7972 
7973   /* At -O[1g] use the cheap non-iterating mode.  */
7974   bool iterate_p = may_iterate && (optimize > 1);
7975   calculate_dominance_info (CDI_DOMINATORS);
7976   if (iterate_p)
7977     loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
7978 
7979   default_vn_walk_kind = VN_WALKREWRITE;
7980   todo = do_rpo_vn (fun, NULL, NULL, iterate_p, true);
7981   free_rpo_vn ();
7982 
7983   if (iterate_p)
7984     loop_optimizer_finalize ();
7985 
7986   if (scev_initialized_p ())
7987     scev_reset_htab ();
7988 
7989   /* For late FRE after IVOPTs and unrolling, see if we can
7990      remove some TREE_ADDRESSABLE and rewrite stuff into SSA.  */
7991   if (!may_iterate)
7992     todo |= TODO_update_address_taken;
7993 
7994   return todo;
7995 }
7996 
7997 } // anon namespace
7998 
7999 gimple_opt_pass *
make_pass_fre(gcc::context * ctxt)8000 make_pass_fre (gcc::context *ctxt)
8001 {
8002   return new pass_fre (ctxt);
8003 }
8004 
8005 #undef BB_EXECUTABLE
8006