1 /* SCC value numbering for trees
2    Copyright (C) 2006-2021 Free Software Foundation, Inc.
3    Contributed by Daniel Berlin <dan@dberlin.org>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "splay-tree.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-cfg.h"
58 #include "domwalk.h"
59 #include "gimple-iterator.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
67 #include "dbgcnt.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-loop-niter.h"
72 #include "builtins.h"
73 #include "tree-ssa-sccvn.h"
74 
75 /* This algorithm is based on the SCC algorithm presented by Keith
76    Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
77    (http://citeseer.ist.psu.edu/41805.html).  In
78    straight line code, it is equivalent to a regular hash based value
79    numbering that is performed in reverse postorder.
80 
81    For code with cycles, there are two alternatives, both of which
82    require keeping the hashtables separate from the actual list of
83    value numbers for SSA names.
84 
85    1. Iterate value numbering in an RPO walk of the blocks, removing
86    all the entries from the hashtable after each iteration (but
87    keeping the SSA name->value number mapping between iterations).
88    Iterate until it does not change.
89 
90    2. Perform value numbering as part of an SCC walk on the SSA graph,
91    iterating only the cycles in the SSA graph until they do not change
92    (using a separate, optimistic hashtable for value numbering the SCC
93    operands).
94 
95    The second is not just faster in practice (because most SSA graph
96    cycles do not involve all the variables in the graph), it also has
97    some nice properties.
98 
99    One of these nice properties is that when we pop an SCC off the
100    stack, we are guaranteed to have processed all the operands coming from
101    *outside of that SCC*, so we do not need to do anything special to
102    ensure they have value numbers.
103 
104    Another nice property is that the SCC walk is done as part of a DFS
105    of the SSA graph, which makes it easy to perform combining and
106    simplifying operations at the same time.
107 
108    The code below is deliberately written in a way that makes it easy
109    to separate the SCC walk from the other work it does.
110 
111    In order to propagate constants through the code, we track which
112    expressions contain constants, and use those while folding.  In
113    theory, we could also track expressions whose value numbers are
114    replaced, in case we end up folding based on expression
115    identities.
116 
117    In order to value number memory, we assign value numbers to vuses.
118    This enables us to note that, for example, stores to the same
119    address of the same value from the same starting memory states are
120    equivalent.
121    TODO:
122 
123    1. We can iterate only the changing portions of the SCC's, but
124    I have not seen an SCC big enough for this to be a win.
125    2. If you differentiate between phi nodes for loops and phi nodes
126    for if-then-else, you can properly consider phi nodes in different
127    blocks for equivalence.
128    3. We could value number vuses in more cases, particularly, whole
129    structure copies.
130 */
131 
132 /* There's no BB_EXECUTABLE but we can use BB_VISITED.  */
133 #define BB_EXECUTABLE BB_VISITED
134 
135 static vn_lookup_kind default_vn_walk_kind;
136 
137 /* vn_nary_op hashtable helpers.  */
138 
139 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
140 {
141   typedef vn_nary_op_s *compare_type;
142   static inline hashval_t hash (const vn_nary_op_s *);
143   static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
144 };
145 
146 /* Return the computed hashcode for nary operation P1.  */
147 
148 inline hashval_t
hash(const vn_nary_op_s * vno1)149 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
150 {
151   return vno1->hashcode;
152 }
153 
154 /* Compare nary operations P1 and P2 and return true if they are
155    equivalent.  */
156 
157 inline bool
equal(const vn_nary_op_s * vno1,const vn_nary_op_s * vno2)158 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
159 {
160   return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
161 }
162 
163 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
165 
166 
167 /* vn_phi hashtable helpers.  */
168 
169 static int
170 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
171 
172 struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
173 {
174   static inline hashval_t hash (const vn_phi_s *);
175   static inline bool equal (const vn_phi_s *, const vn_phi_s *);
176 };
177 
178 /* Return the computed hashcode for phi operation P1.  */
179 
180 inline hashval_t
hash(const vn_phi_s * vp1)181 vn_phi_hasher::hash (const vn_phi_s *vp1)
182 {
183   return vp1->hashcode;
184 }
185 
186 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
187 
188 inline bool
equal(const vn_phi_s * vp1,const vn_phi_s * vp2)189 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
190 {
191   return vp1 == vp2 || vn_phi_eq (vp1, vp2);
192 }
193 
194 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
195 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
196 
197 
198 /* Compare two reference operands P1 and P2 for equality.  Return true if
199    they are equal, and false otherwise.  */
200 
201 static int
vn_reference_op_eq(const void * p1,const void * p2)202 vn_reference_op_eq (const void *p1, const void *p2)
203 {
204   const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
205   const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
206 
207   return (vro1->opcode == vro2->opcode
208 	  /* We do not care for differences in type qualification.  */
209 	  && (vro1->type == vro2->type
210 	      || (vro1->type && vro2->type
211 		  && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
212 					 TYPE_MAIN_VARIANT (vro2->type))))
213 	  && expressions_equal_p (vro1->op0, vro2->op0)
214 	  && expressions_equal_p (vro1->op1, vro2->op1)
215 	  && expressions_equal_p (vro1->op2, vro2->op2));
216 }
217 
218 /* Free a reference operation structure VP.  */
219 
220 static inline void
free_reference(vn_reference_s * vr)221 free_reference (vn_reference_s *vr)
222 {
223   vr->operands.release ();
224 }
225 
226 
227 /* vn_reference hashtable helpers.  */
228 
229 struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
230 {
231   static inline hashval_t hash (const vn_reference_s *);
232   static inline bool equal (const vn_reference_s *, const vn_reference_s *);
233 };
234 
235 /* Return the hashcode for a given reference operation P1.  */
236 
237 inline hashval_t
hash(const vn_reference_s * vr1)238 vn_reference_hasher::hash (const vn_reference_s *vr1)
239 {
240   return vr1->hashcode;
241 }
242 
243 inline bool
equal(const vn_reference_s * v,const vn_reference_s * c)244 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
245 {
246   return v == c || vn_reference_eq (v, c);
247 }
248 
249 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
250 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
251 
252 /* Pretty-print OPS to OUTFILE.  */
253 
254 void
print_vn_reference_ops(FILE * outfile,const vec<vn_reference_op_s> ops)255 print_vn_reference_ops (FILE *outfile, const vec<vn_reference_op_s> ops)
256 {
257   vn_reference_op_t vro;
258   unsigned int i;
259   fprintf (outfile, "{");
260   for (i = 0; ops.iterate (i, &vro); i++)
261     {
262       bool closebrace = false;
263       if (vro->opcode != SSA_NAME
264 	  && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
265 	{
266 	  fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
267 	  if (vro->op0)
268 	    {
269 	      fprintf (outfile, "<");
270 	      closebrace = true;
271 	    }
272 	}
273       if (vro->op0)
274 	{
275 	  print_generic_expr (outfile, vro->op0);
276 	  if (vro->op1)
277 	    {
278 	      fprintf (outfile, ",");
279 	      print_generic_expr (outfile, vro->op1);
280 	    }
281 	  if (vro->op2)
282 	    {
283 	      fprintf (outfile, ",");
284 	      print_generic_expr (outfile, vro->op2);
285 	    }
286 	}
287       if (closebrace)
288 	fprintf (outfile, ">");
289       if (i != ops.length () - 1)
290 	fprintf (outfile, ",");
291     }
292   fprintf (outfile, "}");
293 }
294 
295 DEBUG_FUNCTION void
debug_vn_reference_ops(const vec<vn_reference_op_s> ops)296 debug_vn_reference_ops (const vec<vn_reference_op_s> ops)
297 {
298   print_vn_reference_ops (stderr, ops);
299   fputc ('\n', stderr);
300 }
301 
302 /* The set of VN hashtables.  */
303 
304 typedef struct vn_tables_s
305 {
306   vn_nary_op_table_type *nary;
307   vn_phi_table_type *phis;
308   vn_reference_table_type *references;
309 } *vn_tables_t;
310 
311 
312 /* vn_constant hashtable helpers.  */
313 
314 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
315 {
316   static inline hashval_t hash (const vn_constant_s *);
317   static inline bool equal (const vn_constant_s *, const vn_constant_s *);
318 };
319 
320 /* Hash table hash function for vn_constant_t.  */
321 
322 inline hashval_t
hash(const vn_constant_s * vc1)323 vn_constant_hasher::hash (const vn_constant_s *vc1)
324 {
325   return vc1->hashcode;
326 }
327 
328 /* Hash table equality function for vn_constant_t.  */
329 
330 inline bool
equal(const vn_constant_s * vc1,const vn_constant_s * vc2)331 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
332 {
333   if (vc1->hashcode != vc2->hashcode)
334     return false;
335 
336   return vn_constant_eq_with_type (vc1->constant, vc2->constant);
337 }
338 
339 static hash_table<vn_constant_hasher> *constant_to_value_id;
340 
341 
342 /* Obstack we allocate the vn-tables elements from.  */
343 static obstack vn_tables_obstack;
344 /* Special obstack we never unwind.  */
345 static obstack vn_tables_insert_obstack;
346 
347 static vn_reference_t last_inserted_ref;
348 static vn_phi_t last_inserted_phi;
349 static vn_nary_op_t last_inserted_nary;
350 static vn_ssa_aux_t last_pushed_avail;
351 
352 /* Valid hashtables storing information we have proven to be
353    correct.  */
354 static vn_tables_t valid_info;
355 
356 
357 /* Valueization hook for simplify_replace_tree.  Valueize NAME if it is
358    an SSA name, otherwise just return it.  */
359 tree (*vn_valueize) (tree);
360 static tree
vn_valueize_for_srt(tree t,void * context ATTRIBUTE_UNUSED)361 vn_valueize_for_srt (tree t, void* context ATTRIBUTE_UNUSED)
362 {
363   basic_block saved_vn_context_bb = vn_context_bb;
364   /* Look for sth available at the definition block of the argument.
365      This avoids inconsistencies between availability there which
366      decides if the stmt can be removed and availability at the
367      use site.  The SSA property ensures that things available
368      at the definition are also available at uses.  */
369   if (!SSA_NAME_IS_DEFAULT_DEF (t))
370     vn_context_bb = gimple_bb (SSA_NAME_DEF_STMT (t));
371   tree res = vn_valueize (t);
372   vn_context_bb = saved_vn_context_bb;
373   return res;
374 }
375 
376 
377 /* This represents the top of the VN lattice, which is the universal
378    value.  */
379 
380 tree VN_TOP;
381 
382 /* Unique counter for our value ids.  */
383 
384 static unsigned int next_value_id;
385 static int next_constant_value_id;
386 
387 
388 /* Table of vn_ssa_aux_t's, one per ssa_name.  The vn_ssa_aux_t objects
389    are allocated on an obstack for locality reasons, and to free them
390    without looping over the vec.  */
391 
392 struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t>
393 {
394   typedef vn_ssa_aux_t value_type;
395   typedef tree compare_type;
396   static inline hashval_t hash (const value_type &);
397   static inline bool equal (const value_type &, const compare_type &);
mark_deletedvn_ssa_aux_hasher398   static inline void mark_deleted (value_type &) {}
399   static const bool empty_zero_p = true;
mark_emptyvn_ssa_aux_hasher400   static inline void mark_empty (value_type &e) { e = NULL; }
is_deletedvn_ssa_aux_hasher401   static inline bool is_deleted (value_type &) { return false; }
is_emptyvn_ssa_aux_hasher402   static inline bool is_empty (value_type &e) { return e == NULL; }
403 };
404 
405 hashval_t
hash(const value_type & entry)406 vn_ssa_aux_hasher::hash (const value_type &entry)
407 {
408   return SSA_NAME_VERSION (entry->name);
409 }
410 
411 bool
equal(const value_type & entry,const compare_type & name)412 vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name)
413 {
414   return name == entry->name;
415 }
416 
417 static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash;
418 typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type;
419 static struct obstack vn_ssa_aux_obstack;
420 
421 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
422 static unsigned int vn_nary_length_from_stmt (gimple *);
423 static vn_nary_op_t alloc_vn_nary_op_noinit (unsigned int, obstack *);
424 static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
425 					    vn_nary_op_table_type *, bool);
426 static void init_vn_nary_op_from_stmt (vn_nary_op_t, gimple *);
427 static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
428 					 enum tree_code, tree, tree *);
429 static tree vn_lookup_simplify_result (gimple_match_op *);
430 static vn_reference_t vn_reference_lookup_or_insert_for_pieces
431 	  (tree, alias_set_type, alias_set_type, tree,
432 	   vec<vn_reference_op_s, va_heap>, tree);
433 
434 /* Return whether there is value numbering information for a given SSA name.  */
435 
436 bool
has_VN_INFO(tree name)437 has_VN_INFO (tree name)
438 {
439   return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name));
440 }
441 
442 vn_ssa_aux_t
VN_INFO(tree name)443 VN_INFO (tree name)
444 {
445   vn_ssa_aux_t *res
446     = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name),
447 					    INSERT);
448   if (*res != NULL)
449     return *res;
450 
451   vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
452   memset (newinfo, 0, sizeof (struct vn_ssa_aux));
453   newinfo->name = name;
454   newinfo->valnum = VN_TOP;
455   /* We are using the visited flag to handle uses with defs not within the
456      region being value-numbered.  */
457   newinfo->visited = false;
458 
459   /* Given we create the VN_INFOs on-demand now we have to do initialization
460      different than VN_TOP here.  */
461   if (SSA_NAME_IS_DEFAULT_DEF (name))
462     switch (TREE_CODE (SSA_NAME_VAR (name)))
463       {
464       case VAR_DECL:
465         /* All undefined vars are VARYING.  */
466         newinfo->valnum = name;
467 	newinfo->visited = true;
468 	break;
469 
470       case PARM_DECL:
471 	/* Parameters are VARYING but we can record a condition
472 	   if we know it is a non-NULL pointer.  */
473 	newinfo->visited = true;
474 	newinfo->valnum = name;
475 	if (POINTER_TYPE_P (TREE_TYPE (name))
476 	    && nonnull_arg_p (SSA_NAME_VAR (name)))
477 	  {
478 	    tree ops[2];
479 	    ops[0] = name;
480 	    ops[1] = build_int_cst (TREE_TYPE (name), 0);
481 	    vn_nary_op_t nary;
482 	    /* Allocate from non-unwinding stack.  */
483 	    nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
484 	    init_vn_nary_op_from_pieces (nary, 2, NE_EXPR,
485 					 boolean_type_node, ops);
486 	    nary->predicated_values = 0;
487 	    nary->u.result = boolean_true_node;
488 	    vn_nary_op_insert_into (nary, valid_info->nary, true);
489 	    gcc_assert (nary->unwind_to == NULL);
490 	    /* Also do not link it into the undo chain.  */
491 	    last_inserted_nary = nary->next;
492 	    nary->next = (vn_nary_op_t)(void *)-1;
493 	    nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
494 	    init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR,
495 					 boolean_type_node, ops);
496 	    nary->predicated_values = 0;
497 	    nary->u.result = boolean_false_node;
498 	    vn_nary_op_insert_into (nary, valid_info->nary, true);
499 	    gcc_assert (nary->unwind_to == NULL);
500 	    last_inserted_nary = nary->next;
501 	    nary->next = (vn_nary_op_t)(void *)-1;
502 	    if (dump_file && (dump_flags & TDF_DETAILS))
503 	      {
504 		fprintf (dump_file, "Recording ");
505 		print_generic_expr (dump_file, name, TDF_SLIM);
506 		fprintf (dump_file, " != 0\n");
507 	      }
508 	  }
509 	break;
510 
511       case RESULT_DECL:
512 	/* If the result is passed by invisible reference the default
513 	   def is initialized, otherwise it's uninitialized.  Still
514 	   undefined is varying.  */
515 	newinfo->visited = true;
516 	newinfo->valnum = name;
517 	break;
518 
519       default:
520 	gcc_unreachable ();
521       }
522   return newinfo;
523 }
524 
525 /* Return the SSA value of X.  */
526 
527 inline tree
528 SSA_VAL (tree x, bool *visited = NULL)
529 {
530   vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x));
531   if (visited)
532     *visited = tem && tem->visited;
533   return tem && tem->visited ? tem->valnum : x;
534 }
535 
536 /* Return the SSA value of the VUSE x, supporting released VDEFs
537    during elimination which will value-number the VDEF to the
538    associated VUSE (but not substitute in the whole lattice).  */
539 
540 static inline tree
vuse_ssa_val(tree x)541 vuse_ssa_val (tree x)
542 {
543   if (!x)
544     return NULL_TREE;
545 
546   do
547     {
548       x = SSA_VAL (x);
549       gcc_assert (x != VN_TOP);
550     }
551   while (SSA_NAME_IN_FREE_LIST (x));
552 
553   return x;
554 }
555 
556 /* Similar to the above but used as callback for walk_non_aliased_vuses
557    and thus should stop at unvisited VUSE to not walk across region
558    boundaries.  */
559 
560 static tree
vuse_valueize(tree vuse)561 vuse_valueize (tree vuse)
562 {
563   do
564     {
565       bool visited;
566       vuse = SSA_VAL (vuse, &visited);
567       if (!visited)
568 	return NULL_TREE;
569       gcc_assert (vuse != VN_TOP);
570     }
571   while (SSA_NAME_IN_FREE_LIST (vuse));
572   return vuse;
573 }
574 
575 
576 /* Return the vn_kind the expression computed by the stmt should be
577    associated with.  */
578 
579 enum vn_kind
vn_get_stmt_kind(gimple * stmt)580 vn_get_stmt_kind (gimple *stmt)
581 {
582   switch (gimple_code (stmt))
583     {
584     case GIMPLE_CALL:
585       return VN_REFERENCE;
586     case GIMPLE_PHI:
587       return VN_PHI;
588     case GIMPLE_ASSIGN:
589       {
590 	enum tree_code code = gimple_assign_rhs_code (stmt);
591 	tree rhs1 = gimple_assign_rhs1 (stmt);
592 	switch (get_gimple_rhs_class (code))
593 	  {
594 	  case GIMPLE_UNARY_RHS:
595 	  case GIMPLE_BINARY_RHS:
596 	  case GIMPLE_TERNARY_RHS:
597 	    return VN_NARY;
598 	  case GIMPLE_SINGLE_RHS:
599 	    switch (TREE_CODE_CLASS (code))
600 	      {
601 	      case tcc_reference:
602 		/* VOP-less references can go through unary case.  */
603 		if ((code == REALPART_EXPR
604 		     || code == IMAGPART_EXPR
605 		     || code == VIEW_CONVERT_EXPR
606 		     || code == BIT_FIELD_REF)
607 		    && (TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME
608 			|| is_gimple_min_invariant (TREE_OPERAND (rhs1, 0))))
609 		  return VN_NARY;
610 
611 		/* Fallthrough.  */
612 	      case tcc_declaration:
613 		return VN_REFERENCE;
614 
615 	      case tcc_constant:
616 		return VN_CONSTANT;
617 
618 	      default:
619 		if (code == ADDR_EXPR)
620 		  return (is_gimple_min_invariant (rhs1)
621 			  ? VN_CONSTANT : VN_REFERENCE);
622 		else if (code == CONSTRUCTOR)
623 		  return VN_NARY;
624 		return VN_NONE;
625 	      }
626 	  default:
627 	    return VN_NONE;
628 	  }
629       }
630     default:
631       return VN_NONE;
632     }
633 }
634 
635 /* Lookup a value id for CONSTANT and return it.  If it does not
636    exist returns 0.  */
637 
638 unsigned int
get_constant_value_id(tree constant)639 get_constant_value_id (tree constant)
640 {
641   vn_constant_s **slot;
642   struct vn_constant_s vc;
643 
644   vc.hashcode = vn_hash_constant_with_type (constant);
645   vc.constant = constant;
646   slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
647   if (slot)
648     return (*slot)->value_id;
649   return 0;
650 }
651 
652 /* Lookup a value id for CONSTANT, and if it does not exist, create a
653    new one and return it.  If it does exist, return it.  */
654 
655 unsigned int
get_or_alloc_constant_value_id(tree constant)656 get_or_alloc_constant_value_id (tree constant)
657 {
658   vn_constant_s **slot;
659   struct vn_constant_s vc;
660   vn_constant_t vcp;
661 
662   /* If the hashtable isn't initialized we're not running from PRE and thus
663      do not need value-ids.  */
664   if (!constant_to_value_id)
665     return 0;
666 
667   vc.hashcode = vn_hash_constant_with_type (constant);
668   vc.constant = constant;
669   slot = constant_to_value_id->find_slot (&vc, INSERT);
670   if (*slot)
671     return (*slot)->value_id;
672 
673   vcp = XNEW (struct vn_constant_s);
674   vcp->hashcode = vc.hashcode;
675   vcp->constant = constant;
676   vcp->value_id = get_next_constant_value_id ();
677   *slot = vcp;
678   return vcp->value_id;
679 }
680 
681 /* Compute the hash for a reference operand VRO1.  */
682 
683 static void
vn_reference_op_compute_hash(const vn_reference_op_t vro1,inchash::hash & hstate)684 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
685 {
686   hstate.add_int (vro1->opcode);
687   if (vro1->op0)
688     inchash::add_expr (vro1->op0, hstate);
689   if (vro1->op1)
690     inchash::add_expr (vro1->op1, hstate);
691   if (vro1->op2)
692     inchash::add_expr (vro1->op2, hstate);
693 }
694 
695 /* Compute a hash for the reference operation VR1 and return it.  */
696 
697 static hashval_t
vn_reference_compute_hash(const vn_reference_t vr1)698 vn_reference_compute_hash (const vn_reference_t vr1)
699 {
700   inchash::hash hstate;
701   hashval_t result;
702   int i;
703   vn_reference_op_t vro;
704   poly_int64 off = -1;
705   bool deref = false;
706 
707   FOR_EACH_VEC_ELT (vr1->operands, i, vro)
708     {
709       if (vro->opcode == MEM_REF)
710 	deref = true;
711       else if (vro->opcode != ADDR_EXPR)
712 	deref = false;
713       if (maybe_ne (vro->off, -1))
714 	{
715 	  if (known_eq (off, -1))
716 	    off = 0;
717 	  off += vro->off;
718 	}
719       else
720 	{
721 	  if (maybe_ne (off, -1)
722 	      && maybe_ne (off, 0))
723 	    hstate.add_poly_int (off);
724 	  off = -1;
725 	  if (deref
726 	      && vro->opcode == ADDR_EXPR)
727 	    {
728 	      if (vro->op0)
729 		{
730 		  tree op = TREE_OPERAND (vro->op0, 0);
731 		  hstate.add_int (TREE_CODE (op));
732 		  inchash::add_expr (op, hstate);
733 		}
734 	    }
735 	  else
736 	    vn_reference_op_compute_hash (vro, hstate);
737 	}
738     }
739   result = hstate.end ();
740   /* ??? We would ICE later if we hash instead of adding that in. */
741   if (vr1->vuse)
742     result += SSA_NAME_VERSION (vr1->vuse);
743 
744   return result;
745 }
746 
747 /* Return true if reference operations VR1 and VR2 are equivalent.  This
748    means they have the same set of operands and vuses.  */
749 
750 bool
vn_reference_eq(const_vn_reference_t const vr1,const_vn_reference_t const vr2)751 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
752 {
753   unsigned i, j;
754 
755   /* Early out if this is not a hash collision.  */
756   if (vr1->hashcode != vr2->hashcode)
757     return false;
758 
759   /* The VOP needs to be the same.  */
760   if (vr1->vuse != vr2->vuse)
761     return false;
762 
763   /* If the operands are the same we are done.  */
764   if (vr1->operands == vr2->operands)
765     return true;
766 
767   if (COMPLETE_TYPE_P (vr1->type) != COMPLETE_TYPE_P (vr2->type)
768       || (COMPLETE_TYPE_P (vr1->type)
769 	  && !expressions_equal_p (TYPE_SIZE (vr1->type),
770 				   TYPE_SIZE (vr2->type))))
771     return false;
772 
773   if (INTEGRAL_TYPE_P (vr1->type)
774       && INTEGRAL_TYPE_P (vr2->type))
775     {
776       if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
777 	return false;
778     }
779   else if (INTEGRAL_TYPE_P (vr1->type)
780 	   && (TYPE_PRECISION (vr1->type)
781 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
782     return false;
783   else if (INTEGRAL_TYPE_P (vr2->type)
784 	   && (TYPE_PRECISION (vr2->type)
785 	       != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
786     return false;
787 
788   i = 0;
789   j = 0;
790   do
791     {
792       poly_int64 off1 = 0, off2 = 0;
793       vn_reference_op_t vro1, vro2;
794       vn_reference_op_s tem1, tem2;
795       bool deref1 = false, deref2 = false;
796       bool reverse1 = false, reverse2 = false;
797       for (; vr1->operands.iterate (i, &vro1); i++)
798 	{
799 	  if (vro1->opcode == MEM_REF)
800 	    deref1 = true;
801 	  /* Do not look through a storage order barrier.  */
802 	  else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
803 	    return false;
804 	  reverse1 |= vro1->reverse;
805 	  if (known_eq (vro1->off, -1))
806 	    break;
807 	  off1 += vro1->off;
808 	}
809       for (; vr2->operands.iterate (j, &vro2); j++)
810 	{
811 	  if (vro2->opcode == MEM_REF)
812 	    deref2 = true;
813 	  /* Do not look through a storage order barrier.  */
814 	  else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
815 	    return false;
816 	  reverse2 |= vro2->reverse;
817 	  if (known_eq (vro2->off, -1))
818 	    break;
819 	  off2 += vro2->off;
820 	}
821       if (maybe_ne (off1, off2) || reverse1 != reverse2)
822 	return false;
823       if (deref1 && vro1->opcode == ADDR_EXPR)
824 	{
825 	  memset (&tem1, 0, sizeof (tem1));
826 	  tem1.op0 = TREE_OPERAND (vro1->op0, 0);
827 	  tem1.type = TREE_TYPE (tem1.op0);
828 	  tem1.opcode = TREE_CODE (tem1.op0);
829 	  vro1 = &tem1;
830 	  deref1 = false;
831 	}
832       if (deref2 && vro2->opcode == ADDR_EXPR)
833 	{
834 	  memset (&tem2, 0, sizeof (tem2));
835 	  tem2.op0 = TREE_OPERAND (vro2->op0, 0);
836 	  tem2.type = TREE_TYPE (tem2.op0);
837 	  tem2.opcode = TREE_CODE (tem2.op0);
838 	  vro2 = &tem2;
839 	  deref2 = false;
840 	}
841       if (deref1 != deref2)
842 	return false;
843       if (!vn_reference_op_eq (vro1, vro2))
844 	return false;
845       ++j;
846       ++i;
847     }
848   while (vr1->operands.length () != i
849 	 || vr2->operands.length () != j);
850 
851   return true;
852 }
853 
854 /* Copy the operations present in load/store REF into RESULT, a vector of
855    vn_reference_op_s's.  */
856 
857 static void
copy_reference_ops_from_ref(tree ref,vec<vn_reference_op_s> * result)858 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
859 {
860   /* For non-calls, store the information that makes up the address.  */
861   tree orig = ref;
862   while (ref)
863     {
864       vn_reference_op_s temp;
865 
866       memset (&temp, 0, sizeof (temp));
867       temp.type = TREE_TYPE (ref);
868       temp.opcode = TREE_CODE (ref);
869       temp.off = -1;
870 
871       switch (temp.opcode)
872 	{
873 	case MODIFY_EXPR:
874 	  temp.op0 = TREE_OPERAND (ref, 1);
875 	  break;
876 	case WITH_SIZE_EXPR:
877 	  temp.op0 = TREE_OPERAND (ref, 1);
878 	  temp.off = 0;
879 	  break;
880 	case MEM_REF:
881 	  /* The base address gets its own vn_reference_op_s structure.  */
882 	  temp.op0 = TREE_OPERAND (ref, 1);
883 	  if (!mem_ref_offset (ref).to_shwi (&temp.off))
884 	    temp.off = -1;
885 	  temp.clique = MR_DEPENDENCE_CLIQUE (ref);
886 	  temp.base = MR_DEPENDENCE_BASE (ref);
887 	  temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
888 	  break;
889 	case TARGET_MEM_REF:
890 	  /* The base address gets its own vn_reference_op_s structure.  */
891 	  temp.op0 = TMR_INDEX (ref);
892 	  temp.op1 = TMR_STEP (ref);
893 	  temp.op2 = TMR_OFFSET (ref);
894 	  temp.clique = MR_DEPENDENCE_CLIQUE (ref);
895 	  temp.base = MR_DEPENDENCE_BASE (ref);
896 	  result->safe_push (temp);
897 	  memset (&temp, 0, sizeof (temp));
898 	  temp.type = NULL_TREE;
899 	  temp.opcode = ERROR_MARK;
900 	  temp.op0 = TMR_INDEX2 (ref);
901 	  temp.off = -1;
902 	  break;
903 	case BIT_FIELD_REF:
904 	  /* Record bits, position and storage order.  */
905 	  temp.op0 = TREE_OPERAND (ref, 1);
906 	  temp.op1 = TREE_OPERAND (ref, 2);
907 	  if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
908 	    temp.off = -1;
909 	  temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
910 	  break;
911 	case COMPONENT_REF:
912 	  /* The field decl is enough to unambiguously specify the field,
913 	     a matching type is not necessary and a mismatching type
914 	     is always a spurious difference.  */
915 	  temp.type = NULL_TREE;
916 	  temp.op0 = TREE_OPERAND (ref, 1);
917 	  temp.op1 = TREE_OPERAND (ref, 2);
918 	  temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
919 			  && TYPE_REVERSE_STORAGE_ORDER
920 			       (TREE_TYPE (TREE_OPERAND (ref, 0))));
921 	  {
922 	    tree this_offset = component_ref_field_offset (ref);
923 	    if (this_offset
924 		&& poly_int_tree_p (this_offset))
925 	      {
926 		tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
927 		if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
928 		  {
929 		    poly_offset_int off
930 		      = (wi::to_poly_offset (this_offset)
931 			 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
932 		    /* Probibit value-numbering zero offset components
933 		       of addresses the same before the pass folding
934 		       __builtin_object_size had a chance to run
935 		       (checking cfun->after_inlining does the
936 		       trick here).  */
937 		    if (TREE_CODE (orig) != ADDR_EXPR
938 			|| maybe_ne (off, 0)
939 			|| cfun->after_inlining)
940 		      off.to_shwi (&temp.off);
941 		  }
942 	      }
943 	  }
944 	  break;
945 	case ARRAY_RANGE_REF:
946 	case ARRAY_REF:
947 	  {
948 	    tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
949 	    /* Record index as operand.  */
950 	    temp.op0 = TREE_OPERAND (ref, 1);
951 	    /* Always record lower bounds and element size.  */
952 	    temp.op1 = array_ref_low_bound (ref);
953 	    /* But record element size in units of the type alignment.  */
954 	    temp.op2 = TREE_OPERAND (ref, 3);
955 	    temp.align = eltype->type_common.align;
956 	    if (! temp.op2)
957 	      temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
958 				     size_int (TYPE_ALIGN_UNIT (eltype)));
959 	    if (poly_int_tree_p (temp.op0)
960 		&& poly_int_tree_p (temp.op1)
961 		&& TREE_CODE (temp.op2) == INTEGER_CST)
962 	      {
963 		poly_offset_int off = ((wi::to_poly_offset (temp.op0)
964 					- wi::to_poly_offset (temp.op1))
965 				       * wi::to_offset (temp.op2)
966 				       * vn_ref_op_align_unit (&temp));
967 		off.to_shwi (&temp.off);
968 	      }
969 	    temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
970 			    && TYPE_REVERSE_STORAGE_ORDER
971 				 (TREE_TYPE (TREE_OPERAND (ref, 0))));
972 	  }
973 	  break;
974 	case VAR_DECL:
975 	  if (DECL_HARD_REGISTER (ref))
976 	    {
977 	      temp.op0 = ref;
978 	      break;
979 	    }
980 	  /* Fallthru.  */
981 	case PARM_DECL:
982 	case CONST_DECL:
983 	case RESULT_DECL:
984 	  /* Canonicalize decls to MEM[&decl] which is what we end up with
985 	     when valueizing MEM[ptr] with ptr = &decl.  */
986 	  temp.opcode = MEM_REF;
987 	  temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
988 	  temp.off = 0;
989 	  result->safe_push (temp);
990 	  temp.opcode = ADDR_EXPR;
991 	  temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
992 	  temp.type = TREE_TYPE (temp.op0);
993 	  temp.off = -1;
994 	  break;
995 	case STRING_CST:
996 	case INTEGER_CST:
997 	case POLY_INT_CST:
998 	case COMPLEX_CST:
999 	case VECTOR_CST:
1000 	case REAL_CST:
1001 	case FIXED_CST:
1002 	case CONSTRUCTOR:
1003 	case SSA_NAME:
1004 	  temp.op0 = ref;
1005 	  break;
1006 	case ADDR_EXPR:
1007 	  if (is_gimple_min_invariant (ref))
1008 	    {
1009 	      temp.op0 = ref;
1010 	      break;
1011 	    }
1012 	  break;
1013 	  /* These are only interesting for their operands, their
1014 	     existence, and their type.  They will never be the last
1015 	     ref in the chain of references (IE they require an
1016 	     operand), so we don't have to put anything
1017 	     for op* as it will be handled by the iteration  */
1018 	case REALPART_EXPR:
1019 	  temp.off = 0;
1020 	  break;
1021 	case VIEW_CONVERT_EXPR:
1022 	  temp.off = 0;
1023 	  temp.reverse = storage_order_barrier_p (ref);
1024 	  break;
1025 	case IMAGPART_EXPR:
1026 	  /* This is only interesting for its constant offset.  */
1027 	  temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
1028 	  break;
1029 	default:
1030 	  gcc_unreachable ();
1031 	}
1032       result->safe_push (temp);
1033 
1034       if (REFERENCE_CLASS_P (ref)
1035 	  || TREE_CODE (ref) == MODIFY_EXPR
1036 	  || TREE_CODE (ref) == WITH_SIZE_EXPR
1037 	  || (TREE_CODE (ref) == ADDR_EXPR
1038 	      && !is_gimple_min_invariant (ref)))
1039 	ref = TREE_OPERAND (ref, 0);
1040       else
1041 	ref = NULL_TREE;
1042     }
1043 }
1044 
1045 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
1046    operands in *OPS, the reference alias set SET and the reference type TYPE.
1047    Return true if something useful was produced.  */
1048 
1049 bool
ao_ref_init_from_vn_reference(ao_ref * ref,alias_set_type set,alias_set_type base_set,tree type,vec<vn_reference_op_s> ops)1050 ao_ref_init_from_vn_reference (ao_ref *ref,
1051 			       alias_set_type set, alias_set_type base_set,
1052 			       tree type, vec<vn_reference_op_s> ops)
1053 {
1054   vn_reference_op_t op;
1055   unsigned i;
1056   tree base = NULL_TREE;
1057   tree *op0_p = &base;
1058   poly_offset_int offset = 0;
1059   poly_offset_int max_size;
1060   poly_offset_int size = -1;
1061   tree size_tree = NULL_TREE;
1062 
1063   machine_mode mode = TYPE_MODE (type);
1064   if (mode == BLKmode)
1065     size_tree = TYPE_SIZE (type);
1066   else
1067     size = GET_MODE_BITSIZE (mode);
1068   if (size_tree != NULL_TREE
1069       && poly_int_tree_p (size_tree))
1070     size = wi::to_poly_offset (size_tree);
1071 
1072   /* Lower the final access size from the outermost expression.  */
1073   op = &ops[0];
1074   size_tree = NULL_TREE;
1075   if (op->opcode == COMPONENT_REF)
1076     size_tree = DECL_SIZE (op->op0);
1077   else if (op->opcode == BIT_FIELD_REF)
1078     size_tree = op->op0;
1079   if (size_tree != NULL_TREE
1080       && poly_int_tree_p (size_tree)
1081       && (!known_size_p (size)
1082 	  || known_lt (wi::to_poly_offset (size_tree), size)))
1083     size = wi::to_poly_offset (size_tree);
1084 
1085   /* Initially, maxsize is the same as the accessed element size.
1086      In the following it will only grow (or become -1).  */
1087   max_size = size;
1088 
1089   /* Compute cumulative bit-offset for nested component-refs and array-refs,
1090      and find the ultimate containing object.  */
1091   FOR_EACH_VEC_ELT (ops, i, op)
1092     {
1093       switch (op->opcode)
1094 	{
1095 	/* These may be in the reference ops, but we cannot do anything
1096 	   sensible with them here.  */
1097 	case ADDR_EXPR:
1098 	  /* Apart from ADDR_EXPR arguments to MEM_REF.  */
1099 	  if (base != NULL_TREE
1100 	      && TREE_CODE (base) == MEM_REF
1101 	      && op->op0
1102 	      && DECL_P (TREE_OPERAND (op->op0, 0)))
1103 	    {
1104 	      vn_reference_op_t pop = &ops[i-1];
1105 	      base = TREE_OPERAND (op->op0, 0);
1106 	      if (known_eq (pop->off, -1))
1107 		{
1108 		  max_size = -1;
1109 		  offset = 0;
1110 		}
1111 	      else
1112 		offset += pop->off * BITS_PER_UNIT;
1113 	      op0_p = NULL;
1114 	      break;
1115 	    }
1116 	  /* Fallthru.  */
1117 	case CALL_EXPR:
1118 	  return false;
1119 
1120 	/* Record the base objects.  */
1121 	case MEM_REF:
1122 	  *op0_p = build2 (MEM_REF, op->type,
1123 			   NULL_TREE, op->op0);
1124 	  MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
1125 	  MR_DEPENDENCE_BASE (*op0_p) = op->base;
1126 	  op0_p = &TREE_OPERAND (*op0_p, 0);
1127 	  break;
1128 
1129 	case VAR_DECL:
1130 	case PARM_DECL:
1131 	case RESULT_DECL:
1132 	case SSA_NAME:
1133 	  *op0_p = op->op0;
1134 	  op0_p = NULL;
1135 	  break;
1136 
1137 	/* And now the usual component-reference style ops.  */
1138 	case BIT_FIELD_REF:
1139 	  offset += wi::to_poly_offset (op->op1);
1140 	  break;
1141 
1142 	case COMPONENT_REF:
1143 	  {
1144 	    tree field = op->op0;
1145 	    /* We do not have a complete COMPONENT_REF tree here so we
1146 	       cannot use component_ref_field_offset.  Do the interesting
1147 	       parts manually.  */
1148 	    tree this_offset = DECL_FIELD_OFFSET (field);
1149 
1150 	    if (op->op1 || !poly_int_tree_p (this_offset))
1151 	      max_size = -1;
1152 	    else
1153 	      {
1154 		poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1155 					   << LOG2_BITS_PER_UNIT);
1156 		woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1157 		offset += woffset;
1158 	      }
1159 	    break;
1160 	  }
1161 
1162 	case ARRAY_RANGE_REF:
1163 	case ARRAY_REF:
1164 	  /* We recorded the lower bound and the element size.  */
1165 	  if (!poly_int_tree_p (op->op0)
1166 	      || !poly_int_tree_p (op->op1)
1167 	      || TREE_CODE (op->op2) != INTEGER_CST)
1168 	    max_size = -1;
1169 	  else
1170 	    {
1171 	      poly_offset_int woffset
1172 		= wi::sext (wi::to_poly_offset (op->op0)
1173 			    - wi::to_poly_offset (op->op1),
1174 			    TYPE_PRECISION (sizetype));
1175 	      woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1176 	      woffset <<= LOG2_BITS_PER_UNIT;
1177 	      offset += woffset;
1178 	    }
1179 	  break;
1180 
1181 	case REALPART_EXPR:
1182 	  break;
1183 
1184 	case IMAGPART_EXPR:
1185 	  offset += size;
1186 	  break;
1187 
1188 	case VIEW_CONVERT_EXPR:
1189 	  break;
1190 
1191 	case STRING_CST:
1192 	case INTEGER_CST:
1193 	case COMPLEX_CST:
1194 	case VECTOR_CST:
1195 	case REAL_CST:
1196 	case CONSTRUCTOR:
1197 	case CONST_DECL:
1198 	  return false;
1199 
1200 	default:
1201 	  return false;
1202 	}
1203     }
1204 
1205   if (base == NULL_TREE)
1206     return false;
1207 
1208   ref->ref = NULL_TREE;
1209   ref->base = base;
1210   ref->ref_alias_set = set;
1211   ref->base_alias_set = base_set;
1212   /* We discount volatiles from value-numbering elsewhere.  */
1213   ref->volatile_p = false;
1214 
1215   if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1216     {
1217       ref->offset = 0;
1218       ref->size = -1;
1219       ref->max_size = -1;
1220       return true;
1221     }
1222 
1223   if (!offset.to_shwi (&ref->offset))
1224     {
1225       ref->offset = 0;
1226       ref->max_size = -1;
1227       return true;
1228     }
1229 
1230   if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1231     ref->max_size = -1;
1232 
1233   return true;
1234 }
1235 
1236 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1237    vn_reference_op_s's.  */
1238 
1239 static void
copy_reference_ops_from_call(gcall * call,vec<vn_reference_op_s> * result)1240 copy_reference_ops_from_call (gcall *call,
1241 			      vec<vn_reference_op_s> *result)
1242 {
1243   vn_reference_op_s temp;
1244   unsigned i;
1245   tree lhs = gimple_call_lhs (call);
1246   int lr;
1247 
1248   /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1249      different.  By adding the lhs here in the vector, we ensure that the
1250      hashcode is different, guaranteeing a different value number.  */
1251   if (lhs && TREE_CODE (lhs) != SSA_NAME)
1252     {
1253       memset (&temp, 0, sizeof (temp));
1254       temp.opcode = MODIFY_EXPR;
1255       temp.type = TREE_TYPE (lhs);
1256       temp.op0 = lhs;
1257       temp.off = -1;
1258       result->safe_push (temp);
1259     }
1260 
1261   /* Copy the type, opcode, function, static chain and EH region, if any.  */
1262   memset (&temp, 0, sizeof (temp));
1263   temp.type = gimple_call_fntype (call);
1264   temp.opcode = CALL_EXPR;
1265   temp.op0 = gimple_call_fn (call);
1266   temp.op1 = gimple_call_chain (call);
1267   if (stmt_could_throw_p (cfun, call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1268     temp.op2 = size_int (lr);
1269   temp.off = -1;
1270   result->safe_push (temp);
1271 
1272   /* Copy the call arguments.  As they can be references as well,
1273      just chain them together.  */
1274   for (i = 0; i < gimple_call_num_args (call); ++i)
1275     {
1276       tree callarg = gimple_call_arg (call, i);
1277       copy_reference_ops_from_ref (callarg, result);
1278     }
1279 }
1280 
1281 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1282    *I_P to point to the last element of the replacement.  */
1283 static bool
vn_reference_fold_indirect(vec<vn_reference_op_s> * ops,unsigned int * i_p)1284 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1285 			    unsigned int *i_p)
1286 {
1287   unsigned int i = *i_p;
1288   vn_reference_op_t op = &(*ops)[i];
1289   vn_reference_op_t mem_op = &(*ops)[i - 1];
1290   tree addr_base;
1291   poly_int64 addr_offset = 0;
1292 
1293   /* The only thing we have to do is from &OBJ.foo.bar add the offset
1294      from .foo.bar to the preceding MEM_REF offset and replace the
1295      address with &OBJ.  */
1296   addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (op->op0, 0),
1297 					       &addr_offset, vn_valueize);
1298   gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1299   if (addr_base != TREE_OPERAND (op->op0, 0))
1300     {
1301       poly_offset_int off
1302 	= (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1303 				  SIGNED)
1304 	   + addr_offset);
1305       mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1306       op->op0 = build_fold_addr_expr (addr_base);
1307       if (tree_fits_shwi_p (mem_op->op0))
1308 	mem_op->off = tree_to_shwi (mem_op->op0);
1309       else
1310 	mem_op->off = -1;
1311       return true;
1312     }
1313   return false;
1314 }
1315 
1316 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS.  Updates
1317    *I_P to point to the last element of the replacement.  */
1318 static bool
vn_reference_maybe_forwprop_address(vec<vn_reference_op_s> * ops,unsigned int * i_p)1319 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1320 				     unsigned int *i_p)
1321 {
1322   bool changed = false;
1323   vn_reference_op_t op;
1324 
1325   do
1326     {
1327       unsigned int i = *i_p;
1328       op = &(*ops)[i];
1329       vn_reference_op_t mem_op = &(*ops)[i - 1];
1330       gimple *def_stmt;
1331       enum tree_code code;
1332       poly_offset_int off;
1333 
1334       def_stmt = SSA_NAME_DEF_STMT (op->op0);
1335       if (!is_gimple_assign (def_stmt))
1336 	return changed;
1337 
1338       code = gimple_assign_rhs_code (def_stmt);
1339       if (code != ADDR_EXPR
1340 	  && code != POINTER_PLUS_EXPR)
1341 	return changed;
1342 
1343       off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1344 
1345       /* The only thing we have to do is from &OBJ.foo.bar add the offset
1346 	 from .foo.bar to the preceding MEM_REF offset and replace the
1347 	 address with &OBJ.  */
1348       if (code == ADDR_EXPR)
1349 	{
1350 	  tree addr, addr_base;
1351 	  poly_int64 addr_offset;
1352 
1353 	  addr = gimple_assign_rhs1 (def_stmt);
1354 	  addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (addr, 0),
1355 						       &addr_offset,
1356 						       vn_valueize);
1357 	  /* If that didn't work because the address isn't invariant propagate
1358 	     the reference tree from the address operation in case the current
1359 	     dereference isn't offsetted.  */
1360 	  if (!addr_base
1361 	      && *i_p == ops->length () - 1
1362 	      && known_eq (off, 0)
1363 	      /* This makes us disable this transform for PRE where the
1364 		 reference ops might be also used for code insertion which
1365 		 is invalid.  */
1366 	      && default_vn_walk_kind == VN_WALKREWRITE)
1367 	    {
1368 	      auto_vec<vn_reference_op_s, 32> tem;
1369 	      copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1370 	      /* Make sure to preserve TBAA info.  The only objects not
1371 		 wrapped in MEM_REFs that can have their address taken are
1372 		 STRING_CSTs.  */
1373 	      if (tem.length () >= 2
1374 		  && tem[tem.length () - 2].opcode == MEM_REF)
1375 		{
1376 		  vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1377 		  new_mem_op->op0
1378 		      = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1379 					  wi::to_poly_wide (new_mem_op->op0));
1380 		}
1381 	      else
1382 		gcc_assert (tem.last ().opcode == STRING_CST);
1383 	      ops->pop ();
1384 	      ops->pop ();
1385 	      ops->safe_splice (tem);
1386 	      --*i_p;
1387 	      return true;
1388 	    }
1389 	  if (!addr_base
1390 	      || TREE_CODE (addr_base) != MEM_REF
1391 	      || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1392 		  && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base,
1393 								    0))))
1394 	    return changed;
1395 
1396 	  off += addr_offset;
1397 	  off += mem_ref_offset (addr_base);
1398 	  op->op0 = TREE_OPERAND (addr_base, 0);
1399 	}
1400       else
1401 	{
1402 	  tree ptr, ptroff;
1403 	  ptr = gimple_assign_rhs1 (def_stmt);
1404 	  ptroff = gimple_assign_rhs2 (def_stmt);
1405 	  if (TREE_CODE (ptr) != SSA_NAME
1406 	      || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1407 	      /* Make sure to not endlessly recurse.
1408 		 See gcc.dg/tree-ssa/20040408-1.c for an example.  Can easily
1409 		 happen when we value-number a PHI to its backedge value.  */
1410 	      || SSA_VAL (ptr) == op->op0
1411 	      || !poly_int_tree_p (ptroff))
1412 	    return changed;
1413 
1414 	  off += wi::to_poly_offset (ptroff);
1415 	  op->op0 = ptr;
1416 	}
1417 
1418       mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1419       if (tree_fits_shwi_p (mem_op->op0))
1420 	mem_op->off = tree_to_shwi (mem_op->op0);
1421       else
1422 	mem_op->off = -1;
1423       /* ???  Can end up with endless recursion here!?
1424 	 gcc.c-torture/execute/strcmp-1.c  */
1425       if (TREE_CODE (op->op0) == SSA_NAME)
1426 	op->op0 = SSA_VAL (op->op0);
1427       if (TREE_CODE (op->op0) != SSA_NAME)
1428 	op->opcode = TREE_CODE (op->op0);
1429 
1430       changed = true;
1431     }
1432   /* Tail-recurse.  */
1433   while (TREE_CODE (op->op0) == SSA_NAME);
1434 
1435   /* Fold a remaining *&.  */
1436   if (TREE_CODE (op->op0) == ADDR_EXPR)
1437     vn_reference_fold_indirect (ops, i_p);
1438 
1439   return changed;
1440 }
1441 
1442 /* Optimize the reference REF to a constant if possible or return
1443    NULL_TREE if not.  */
1444 
1445 tree
fully_constant_vn_reference_p(vn_reference_t ref)1446 fully_constant_vn_reference_p (vn_reference_t ref)
1447 {
1448   vec<vn_reference_op_s> operands = ref->operands;
1449   vn_reference_op_t op;
1450 
1451   /* Try to simplify the translated expression if it is
1452      a call to a builtin function with at most two arguments.  */
1453   op = &operands[0];
1454   if (op->opcode == CALL_EXPR
1455       && TREE_CODE (op->op0) == ADDR_EXPR
1456       && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1457       && fndecl_built_in_p (TREE_OPERAND (op->op0, 0))
1458       && operands.length () >= 2
1459       && operands.length () <= 3)
1460     {
1461       vn_reference_op_t arg0, arg1 = NULL;
1462       bool anyconst = false;
1463       arg0 = &operands[1];
1464       if (operands.length () > 2)
1465 	arg1 = &operands[2];
1466       if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1467 	  || (arg0->opcode == ADDR_EXPR
1468 	      && is_gimple_min_invariant (arg0->op0)))
1469 	anyconst = true;
1470       if (arg1
1471 	  && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1472 	      || (arg1->opcode == ADDR_EXPR
1473 		  && is_gimple_min_invariant (arg1->op0))))
1474 	anyconst = true;
1475       if (anyconst)
1476 	{
1477 	  tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1478 					 arg1 ? 2 : 1,
1479 					 arg0->op0,
1480 					 arg1 ? arg1->op0 : NULL);
1481 	  if (folded
1482 	      && TREE_CODE (folded) == NOP_EXPR)
1483 	    folded = TREE_OPERAND (folded, 0);
1484 	  if (folded
1485 	      && is_gimple_min_invariant (folded))
1486 	    return folded;
1487 	}
1488     }
1489 
1490   /* Simplify reads from constants or constant initializers.  */
1491   else if (BITS_PER_UNIT == 8
1492 	   && COMPLETE_TYPE_P (ref->type)
1493 	   && is_gimple_reg_type (ref->type))
1494     {
1495       poly_int64 off = 0;
1496       HOST_WIDE_INT size;
1497       if (INTEGRAL_TYPE_P (ref->type))
1498 	size = TYPE_PRECISION (ref->type);
1499       else if (tree_fits_shwi_p (TYPE_SIZE (ref->type)))
1500 	size = tree_to_shwi (TYPE_SIZE (ref->type));
1501       else
1502 	return NULL_TREE;
1503       if (size % BITS_PER_UNIT != 0
1504 	  || size > MAX_BITSIZE_MODE_ANY_MODE)
1505 	return NULL_TREE;
1506       size /= BITS_PER_UNIT;
1507       unsigned i;
1508       for (i = 0; i < operands.length (); ++i)
1509 	{
1510 	  if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1511 	    {
1512 	      ++i;
1513 	      break;
1514 	    }
1515 	  if (known_eq (operands[i].off, -1))
1516 	    return NULL_TREE;
1517 	  off += operands[i].off;
1518 	  if (operands[i].opcode == MEM_REF)
1519 	    {
1520 	      ++i;
1521 	      break;
1522 	    }
1523 	}
1524       vn_reference_op_t base = &operands[--i];
1525       tree ctor = error_mark_node;
1526       tree decl = NULL_TREE;
1527       if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1528 	ctor = base->op0;
1529       else if (base->opcode == MEM_REF
1530 	       && base[1].opcode == ADDR_EXPR
1531 	       && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1532 		   || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1533 		   || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1534 	{
1535 	  decl = TREE_OPERAND (base[1].op0, 0);
1536 	  if (TREE_CODE (decl) == STRING_CST)
1537 	    ctor = decl;
1538 	  else
1539 	    ctor = ctor_for_folding (decl);
1540 	}
1541       if (ctor == NULL_TREE)
1542 	return build_zero_cst (ref->type);
1543       else if (ctor != error_mark_node)
1544 	{
1545 	  HOST_WIDE_INT const_off;
1546 	  if (decl)
1547 	    {
1548 	      tree res = fold_ctor_reference (ref->type, ctor,
1549 					      off * BITS_PER_UNIT,
1550 					      size * BITS_PER_UNIT, decl);
1551 	      if (res)
1552 		{
1553 		  STRIP_USELESS_TYPE_CONVERSION (res);
1554 		  if (is_gimple_min_invariant (res))
1555 		    return res;
1556 		}
1557 	    }
1558 	  else if (off.is_constant (&const_off))
1559 	    {
1560 	      unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1561 	      int len = native_encode_expr (ctor, buf, size, const_off);
1562 	      if (len > 0)
1563 		return native_interpret_expr (ref->type, buf, len);
1564 	    }
1565 	}
1566     }
1567 
1568   return NULL_TREE;
1569 }
1570 
1571 /* Return true if OPS contain a storage order barrier.  */
1572 
1573 static bool
contains_storage_order_barrier_p(vec<vn_reference_op_s> ops)1574 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1575 {
1576   vn_reference_op_t op;
1577   unsigned i;
1578 
1579   FOR_EACH_VEC_ELT (ops, i, op)
1580     if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1581       return true;
1582 
1583   return false;
1584 }
1585 
1586 /* Return true if OPS represent an access with reverse storage order.  */
1587 
1588 static bool
reverse_storage_order_for_component_p(vec<vn_reference_op_s> ops)1589 reverse_storage_order_for_component_p (vec<vn_reference_op_s> ops)
1590 {
1591   unsigned i = 0;
1592   if (ops[i].opcode == REALPART_EXPR || ops[i].opcode == IMAGPART_EXPR)
1593     ++i;
1594   switch (ops[i].opcode)
1595     {
1596     case ARRAY_REF:
1597     case COMPONENT_REF:
1598     case BIT_FIELD_REF:
1599     case MEM_REF:
1600       return ops[i].reverse;
1601     default:
1602       return false;
1603     }
1604 }
1605 
1606 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1607    structures into their value numbers.  This is done in-place, and
1608    the vector passed in is returned.  *VALUEIZED_ANYTHING will specify
1609    whether any operands were valueized.  */
1610 
1611 static void
1612 valueize_refs_1 (vec<vn_reference_op_s> *orig, bool *valueized_anything,
1613 		 bool with_avail = false)
1614 {
1615   vn_reference_op_t vro;
1616   unsigned int i;
1617 
1618   *valueized_anything = false;
1619 
1620   FOR_EACH_VEC_ELT (*orig, i, vro)
1621     {
1622       if (vro->opcode == SSA_NAME
1623 	  || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1624 	{
1625 	  tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0);
1626 	  if (tem != vro->op0)
1627 	    {
1628 	      *valueized_anything = true;
1629 	      vro->op0 = tem;
1630 	    }
1631 	  /* If it transforms from an SSA_NAME to a constant, update
1632 	     the opcode.  */
1633 	  if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1634 	    vro->opcode = TREE_CODE (vro->op0);
1635 	}
1636       if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1637 	{
1638 	  tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1);
1639 	  if (tem != vro->op1)
1640 	    {
1641 	      *valueized_anything = true;
1642 	      vro->op1 = tem;
1643 	    }
1644 	}
1645       if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1646 	{
1647 	  tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2);
1648 	  if (tem != vro->op2)
1649 	    {
1650 	      *valueized_anything = true;
1651 	      vro->op2 = tem;
1652 	    }
1653 	}
1654       /* If it transforms from an SSA_NAME to an address, fold with
1655 	 a preceding indirect reference.  */
1656       if (i > 0
1657 	  && vro->op0
1658 	  && TREE_CODE (vro->op0) == ADDR_EXPR
1659 	  && (*orig)[i - 1].opcode == MEM_REF)
1660 	{
1661 	  if (vn_reference_fold_indirect (orig, &i))
1662 	    *valueized_anything = true;
1663 	}
1664       else if (i > 0
1665 	       && vro->opcode == SSA_NAME
1666 	       && (*orig)[i - 1].opcode == MEM_REF)
1667 	{
1668 	  if (vn_reference_maybe_forwprop_address (orig, &i))
1669 	    *valueized_anything = true;
1670 	}
1671       /* If it transforms a non-constant ARRAY_REF into a constant
1672 	 one, adjust the constant offset.  */
1673       else if (vro->opcode == ARRAY_REF
1674 	       && known_eq (vro->off, -1)
1675 	       && poly_int_tree_p (vro->op0)
1676 	       && poly_int_tree_p (vro->op1)
1677 	       && TREE_CODE (vro->op2) == INTEGER_CST)
1678 	{
1679 	  poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1680 				  - wi::to_poly_offset (vro->op1))
1681 				 * wi::to_offset (vro->op2)
1682 				 * vn_ref_op_align_unit (vro));
1683 	  off.to_shwi (&vro->off);
1684 	}
1685     }
1686 }
1687 
1688 static void
valueize_refs(vec<vn_reference_op_s> * orig)1689 valueize_refs (vec<vn_reference_op_s> *orig)
1690 {
1691   bool tem;
1692   valueize_refs_1 (orig, &tem);
1693 }
1694 
1695 static vec<vn_reference_op_s> shared_lookup_references;
1696 
1697 /* Create a vector of vn_reference_op_s structures from REF, a
1698    REFERENCE_CLASS_P tree.  The vector is shared among all callers of
1699    this function.  *VALUEIZED_ANYTHING will specify whether any
1700    operands were valueized.  */
1701 
1702 static vec<vn_reference_op_s>
valueize_shared_reference_ops_from_ref(tree ref,bool * valueized_anything)1703 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1704 {
1705   if (!ref)
1706     return vNULL;
1707   shared_lookup_references.truncate (0);
1708   copy_reference_ops_from_ref (ref, &shared_lookup_references);
1709   valueize_refs_1 (&shared_lookup_references, valueized_anything);
1710   return shared_lookup_references;
1711 }
1712 
1713 /* Create a vector of vn_reference_op_s structures from CALL, a
1714    call statement.  The vector is shared among all callers of
1715    this function.  */
1716 
1717 static vec<vn_reference_op_s>
valueize_shared_reference_ops_from_call(gcall * call)1718 valueize_shared_reference_ops_from_call (gcall *call)
1719 {
1720   if (!call)
1721     return vNULL;
1722   shared_lookup_references.truncate (0);
1723   copy_reference_ops_from_call (call, &shared_lookup_references);
1724   valueize_refs (&shared_lookup_references);
1725   return shared_lookup_references;
1726 }
1727 
1728 /* Lookup a SCCVN reference operation VR in the current hash table.
1729    Returns the resulting value number if it exists in the hash table,
1730    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
1731    vn_reference_t stored in the hashtable if something is found.  */
1732 
1733 static tree
vn_reference_lookup_1(vn_reference_t vr,vn_reference_t * vnresult)1734 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1735 {
1736   vn_reference_s **slot;
1737   hashval_t hash;
1738 
1739   hash = vr->hashcode;
1740   slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1741   if (slot)
1742     {
1743       if (vnresult)
1744 	*vnresult = (vn_reference_t)*slot;
1745       return ((vn_reference_t)*slot)->result;
1746     }
1747 
1748   return NULL_TREE;
1749 }
1750 
1751 
1752 /* Partial definition tracking support.  */
1753 
1754 struct pd_range
1755 {
1756   HOST_WIDE_INT offset;
1757   HOST_WIDE_INT size;
1758 };
1759 
1760 struct pd_data
1761 {
1762   tree rhs;
1763   HOST_WIDE_INT offset;
1764   HOST_WIDE_INT size;
1765 };
1766 
1767 /* Context for alias walking.  */
1768 
1769 struct vn_walk_cb_data
1770 {
vn_walk_cb_datavn_walk_cb_data1771   vn_walk_cb_data (vn_reference_t vr_, tree orig_ref_, tree *last_vuse_ptr_,
1772 		   vn_lookup_kind vn_walk_kind_, bool tbaa_p_, tree mask_)
1773     : vr (vr_), last_vuse_ptr (last_vuse_ptr_), last_vuse (NULL_TREE),
1774       mask (mask_), masked_result (NULL_TREE), vn_walk_kind (vn_walk_kind_),
1775       tbaa_p (tbaa_p_), saved_operands (vNULL), first_set (-2),
1776       first_base_set (-2), known_ranges (NULL)
1777   {
1778     if (!last_vuse_ptr)
1779       last_vuse_ptr = &last_vuse;
1780     ao_ref_init (&orig_ref, orig_ref_);
1781     if (mask)
1782       {
1783 	wide_int w = wi::to_wide (mask);
1784 	unsigned int pos = 0, prec = w.get_precision ();
1785 	pd_data pd;
1786 	pd.rhs = build_constructor (NULL_TREE, NULL);
1787 	/* When bitwise and with a constant is done on a memory load,
1788 	   we don't really need all the bits to be defined or defined
1789 	   to constants, we don't really care what is in the position
1790 	   corresponding to 0 bits in the mask.
1791 	   So, push the ranges of those 0 bits in the mask as artificial
1792 	   zero stores and let the partial def handling code do the
1793 	   rest.  */
1794 	while (pos < prec)
1795 	  {
1796 	    int tz = wi::ctz (w);
1797 	    if (pos + tz > prec)
1798 	      tz = prec - pos;
1799 	    if (tz)
1800 	      {
1801 		if (BYTES_BIG_ENDIAN)
1802 		  pd.offset = prec - pos - tz;
1803 		else
1804 		  pd.offset = pos;
1805 		pd.size = tz;
1806 		void *r = push_partial_def (pd, 0, 0, 0, prec);
1807 		gcc_assert (r == NULL_TREE);
1808 	      }
1809 	    pos += tz;
1810 	    if (pos == prec)
1811 	      break;
1812 	    w = wi::lrshift (w, tz);
1813 	    tz = wi::ctz (wi::bit_not (w));
1814 	    if (pos + tz > prec)
1815 	      tz = prec - pos;
1816 	    pos += tz;
1817 	    w = wi::lrshift (w, tz);
1818 	  }
1819       }
1820   }
1821   ~vn_walk_cb_data ();
1822   void *finish (alias_set_type, alias_set_type, tree);
1823   void *push_partial_def (pd_data pd,
1824 			  alias_set_type, alias_set_type, HOST_WIDE_INT,
1825 			  HOST_WIDE_INT);
1826 
1827   vn_reference_t vr;
1828   ao_ref orig_ref;
1829   tree *last_vuse_ptr;
1830   tree last_vuse;
1831   tree mask;
1832   tree masked_result;
1833   vn_lookup_kind vn_walk_kind;
1834   bool tbaa_p;
1835   vec<vn_reference_op_s> saved_operands;
1836 
1837   /* The VDEFs of partial defs we come along.  */
1838   auto_vec<pd_data, 2> partial_defs;
1839   /* The first defs range to avoid splay tree setup in most cases.  */
1840   pd_range first_range;
1841   alias_set_type first_set;
1842   alias_set_type first_base_set;
1843   splay_tree known_ranges;
1844   obstack ranges_obstack;
1845 };
1846 
~vn_walk_cb_data()1847 vn_walk_cb_data::~vn_walk_cb_data ()
1848 {
1849   if (known_ranges)
1850     {
1851       splay_tree_delete (known_ranges);
1852       obstack_free (&ranges_obstack, NULL);
1853     }
1854   saved_operands.release ();
1855 }
1856 
1857 void *
finish(alias_set_type set,alias_set_type base_set,tree val)1858 vn_walk_cb_data::finish (alias_set_type set, alias_set_type base_set, tree val)
1859 {
1860   if (first_set != -2)
1861     {
1862       set = first_set;
1863       base_set = first_base_set;
1864     }
1865   if (mask)
1866     {
1867       masked_result = val;
1868       return (void *) -1;
1869     }
1870   vec<vn_reference_op_s> &operands
1871     = saved_operands.exists () ? saved_operands : vr->operands;
1872   return vn_reference_lookup_or_insert_for_pieces (last_vuse, set, base_set,
1873 						   vr->type, operands, val);
1874 }
1875 
1876 /* pd_range splay-tree helpers.  */
1877 
1878 static int
pd_range_compare(splay_tree_key offset1p,splay_tree_key offset2p)1879 pd_range_compare (splay_tree_key offset1p, splay_tree_key offset2p)
1880 {
1881   HOST_WIDE_INT offset1 = *(HOST_WIDE_INT *)offset1p;
1882   HOST_WIDE_INT offset2 = *(HOST_WIDE_INT *)offset2p;
1883   if (offset1 < offset2)
1884     return -1;
1885   else if (offset1 > offset2)
1886     return 1;
1887   return 0;
1888 }
1889 
1890 static void *
pd_tree_alloc(int size,void * data_)1891 pd_tree_alloc (int size, void *data_)
1892 {
1893   vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
1894   return obstack_alloc (&data->ranges_obstack, size);
1895 }
1896 
1897 static void
pd_tree_dealloc(void *,void *)1898 pd_tree_dealloc (void *, void *)
1899 {
1900 }
1901 
1902 /* Push PD to the vector of partial definitions returning a
1903    value when we are ready to combine things with VUSE, SET and MAXSIZEI,
1904    NULL when we want to continue looking for partial defs or -1
1905    on failure.  */
1906 
1907 void *
push_partial_def(pd_data pd,alias_set_type set,alias_set_type base_set,HOST_WIDE_INT offseti,HOST_WIDE_INT maxsizei)1908 vn_walk_cb_data::push_partial_def (pd_data pd,
1909 				   alias_set_type set, alias_set_type base_set,
1910 				   HOST_WIDE_INT offseti,
1911 				   HOST_WIDE_INT maxsizei)
1912 {
1913   const HOST_WIDE_INT bufsize = 64;
1914   /* We're using a fixed buffer for encoding so fail early if the object
1915      we want to interpret is bigger.  */
1916   if (maxsizei > bufsize * BITS_PER_UNIT
1917       || CHAR_BIT != 8
1918       || BITS_PER_UNIT != 8
1919       /* Not prepared to handle PDP endian.  */
1920       || BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
1921     return (void *)-1;
1922 
1923   /* Turn too large constant stores into non-constant stores.  */
1924   if (CONSTANT_CLASS_P (pd.rhs) && pd.size > bufsize * BITS_PER_UNIT)
1925     pd.rhs = error_mark_node;
1926 
1927   /* And for non-constant or CONSTRUCTOR stores shrink them to only keep at
1928      most a partial byte before and/or after the region.  */
1929   if (!CONSTANT_CLASS_P (pd.rhs))
1930     {
1931       if (pd.offset < offseti)
1932 	{
1933 	  HOST_WIDE_INT o = ROUND_DOWN (offseti - pd.offset, BITS_PER_UNIT);
1934 	  gcc_assert (pd.size > o);
1935 	  pd.size -= o;
1936 	  pd.offset += o;
1937 	}
1938       if (pd.size > maxsizei)
1939 	pd.size = maxsizei + ((pd.size - maxsizei) % BITS_PER_UNIT);
1940     }
1941 
1942   pd.offset -= offseti;
1943 
1944   bool pd_constant_p = (TREE_CODE (pd.rhs) == CONSTRUCTOR
1945 			|| CONSTANT_CLASS_P (pd.rhs));
1946   if (partial_defs.is_empty ())
1947     {
1948       /* If we get a clobber upfront, fail.  */
1949       if (TREE_CLOBBER_P (pd.rhs))
1950 	return (void *)-1;
1951       if (!pd_constant_p)
1952 	return (void *)-1;
1953       partial_defs.safe_push (pd);
1954       first_range.offset = pd.offset;
1955       first_range.size = pd.size;
1956       first_set = set;
1957       first_base_set = base_set;
1958       last_vuse_ptr = NULL;
1959       /* Continue looking for partial defs.  */
1960       return NULL;
1961     }
1962 
1963   if (!known_ranges)
1964     {
1965       /* ???  Optimize the case where the 2nd partial def completes things.  */
1966       gcc_obstack_init (&ranges_obstack);
1967       known_ranges = splay_tree_new_with_allocator (pd_range_compare, 0, 0,
1968 						    pd_tree_alloc,
1969 						    pd_tree_dealloc, this);
1970       splay_tree_insert (known_ranges,
1971 			 (splay_tree_key)&first_range.offset,
1972 			 (splay_tree_value)&first_range);
1973     }
1974 
1975   pd_range newr = { pd.offset, pd.size };
1976   splay_tree_node n;
1977   pd_range *r;
1978   /* Lookup the predecessor of offset + 1 and see if we need to merge.  */
1979   HOST_WIDE_INT loffset = newr.offset + 1;
1980   if ((n = splay_tree_predecessor (known_ranges, (splay_tree_key)&loffset))
1981       && ((r = (pd_range *)n->value), true)
1982       && ranges_known_overlap_p (r->offset, r->size + 1,
1983 				 newr.offset, newr.size))
1984     {
1985       /* Ignore partial defs already covered.  Here we also drop shadowed
1986          clobbers arriving here at the floor.  */
1987       if (known_subrange_p (newr.offset, newr.size, r->offset, r->size))
1988 	return NULL;
1989       r->size = MAX (r->offset + r->size, newr.offset + newr.size) - r->offset;
1990     }
1991   else
1992     {
1993       /* newr.offset wasn't covered yet, insert the range.  */
1994       r = XOBNEW (&ranges_obstack, pd_range);
1995       *r = newr;
1996       splay_tree_insert (known_ranges, (splay_tree_key)&r->offset,
1997 			 (splay_tree_value)r);
1998     }
1999   /* Merge r which now contains newr and is a member of the splay tree with
2000      adjacent overlapping ranges.  */
2001   pd_range *rafter;
2002   while ((n = splay_tree_successor (known_ranges, (splay_tree_key)&r->offset))
2003 	 && ((rafter = (pd_range *)n->value), true)
2004 	 && ranges_known_overlap_p (r->offset, r->size + 1,
2005 				    rafter->offset, rafter->size))
2006     {
2007       r->size = MAX (r->offset + r->size,
2008 		     rafter->offset + rafter->size) - r->offset;
2009       splay_tree_remove (known_ranges, (splay_tree_key)&rafter->offset);
2010     }
2011   /* If we get a clobber, fail.  */
2012   if (TREE_CLOBBER_P (pd.rhs))
2013     return (void *)-1;
2014   /* Non-constants are OK as long as they are shadowed by a constant.  */
2015   if (!pd_constant_p)
2016     return (void *)-1;
2017   partial_defs.safe_push (pd);
2018 
2019   /* Now we have merged newr into the range tree.  When we have covered
2020      [offseti, sizei] then the tree will contain exactly one node which has
2021      the desired properties and it will be 'r'.  */
2022   if (!known_subrange_p (0, maxsizei, r->offset, r->size))
2023     /* Continue looking for partial defs.  */
2024     return NULL;
2025 
2026   /* Now simply native encode all partial defs in reverse order.  */
2027   unsigned ndefs = partial_defs.length ();
2028   /* We support up to 512-bit values (for V8DFmode).  */
2029   unsigned char buffer[bufsize + 1];
2030   unsigned char this_buffer[bufsize + 1];
2031   int len;
2032 
2033   memset (buffer, 0, bufsize + 1);
2034   unsigned needed_len = ROUND_UP (maxsizei, BITS_PER_UNIT) / BITS_PER_UNIT;
2035   while (!partial_defs.is_empty ())
2036     {
2037       pd_data pd = partial_defs.pop ();
2038       unsigned int amnt;
2039       if (TREE_CODE (pd.rhs) == CONSTRUCTOR)
2040 	{
2041 	  /* Empty CONSTRUCTOR.  */
2042 	  if (pd.size >= needed_len * BITS_PER_UNIT)
2043 	    len = needed_len;
2044 	  else
2045 	    len = ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT;
2046 	  memset (this_buffer, 0, len);
2047 	}
2048       else
2049 	{
2050 	  len = native_encode_expr (pd.rhs, this_buffer, bufsize,
2051 				    MAX (0, -pd.offset) / BITS_PER_UNIT);
2052 	  if (len <= 0
2053 	      || len < (ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT
2054 			- MAX (0, -pd.offset) / BITS_PER_UNIT))
2055 	    {
2056 	      if (dump_file && (dump_flags & TDF_DETAILS))
2057 		fprintf (dump_file, "Failed to encode %u "
2058 			 "partial definitions\n", ndefs);
2059 	      return (void *)-1;
2060 	    }
2061 	}
2062 
2063       unsigned char *p = buffer;
2064       HOST_WIDE_INT size = pd.size;
2065       if (pd.offset < 0)
2066 	size -= ROUND_DOWN (-pd.offset, BITS_PER_UNIT);
2067       this_buffer[len] = 0;
2068       if (BYTES_BIG_ENDIAN)
2069 	{
2070 	  /* LSB of this_buffer[len - 1] byte should be at
2071 	     pd.offset + pd.size - 1 bits in buffer.  */
2072 	  amnt = ((unsigned HOST_WIDE_INT) pd.offset
2073 		  + pd.size) % BITS_PER_UNIT;
2074 	  if (amnt)
2075 	    shift_bytes_in_array_right (this_buffer, len + 1, amnt);
2076 	  unsigned char *q = this_buffer;
2077 	  unsigned int off = 0;
2078 	  if (pd.offset >= 0)
2079 	    {
2080 	      unsigned int msk;
2081 	      off = pd.offset / BITS_PER_UNIT;
2082 	      gcc_assert (off < needed_len);
2083 	      p = buffer + off;
2084 	      if (size <= amnt)
2085 		{
2086 		  msk = ((1 << size) - 1) << (BITS_PER_UNIT - amnt);
2087 		  *p = (*p & ~msk) | (this_buffer[len] & msk);
2088 		  size = 0;
2089 		}
2090 	      else
2091 		{
2092 		  if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
2093 		    q = (this_buffer + len
2094 			 - (ROUND_UP (size - amnt, BITS_PER_UNIT)
2095 			    / BITS_PER_UNIT));
2096 		  if (pd.offset % BITS_PER_UNIT)
2097 		    {
2098 		      msk = -1U << (BITS_PER_UNIT
2099 				    - (pd.offset % BITS_PER_UNIT));
2100 		      *p = (*p & msk) | (*q & ~msk);
2101 		      p++;
2102 		      q++;
2103 		      off++;
2104 		      size -= BITS_PER_UNIT - (pd.offset % BITS_PER_UNIT);
2105 		      gcc_assert (size >= 0);
2106 		    }
2107 		}
2108 	    }
2109 	  else if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
2110 	    {
2111 	      q = (this_buffer + len
2112 		   - (ROUND_UP (size - amnt, BITS_PER_UNIT)
2113 		      / BITS_PER_UNIT));
2114 	      if (pd.offset % BITS_PER_UNIT)
2115 		{
2116 		  q++;
2117 		  size -= BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) pd.offset
2118 					   % BITS_PER_UNIT);
2119 		  gcc_assert (size >= 0);
2120 		}
2121 	    }
2122 	  if ((unsigned HOST_WIDE_INT) size / BITS_PER_UNIT + off
2123 	      > needed_len)
2124 	    size = (needed_len - off) * BITS_PER_UNIT;
2125 	  memcpy (p, q, size / BITS_PER_UNIT);
2126 	  if (size % BITS_PER_UNIT)
2127 	    {
2128 	      unsigned int msk
2129 		= -1U << (BITS_PER_UNIT - (size % BITS_PER_UNIT));
2130 	      p += size / BITS_PER_UNIT;
2131 	      q += size / BITS_PER_UNIT;
2132 	      *p = (*q & msk) | (*p & ~msk);
2133 	    }
2134 	}
2135       else
2136 	{
2137 	  if (pd.offset >= 0)
2138 	    {
2139 	      /* LSB of this_buffer[0] byte should be at pd.offset bits
2140 		 in buffer.  */
2141 	      unsigned int msk;
2142 	      size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
2143 	      amnt = pd.offset % BITS_PER_UNIT;
2144 	      if (amnt)
2145 		shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2146 	      unsigned int off = pd.offset / BITS_PER_UNIT;
2147 	      gcc_assert (off < needed_len);
2148 	      size = MIN (size,
2149 			  (HOST_WIDE_INT) (needed_len - off) * BITS_PER_UNIT);
2150 	      p = buffer + off;
2151 	      if (amnt + size < BITS_PER_UNIT)
2152 		{
2153 		  /* Low amnt bits come from *p, then size bits
2154 		     from this_buffer[0] and the remaining again from
2155 		     *p.  */
2156 		  msk = ((1 << size) - 1) << amnt;
2157 		  *p = (*p & ~msk) | (this_buffer[0] & msk);
2158 		  size = 0;
2159 		}
2160 	      else if (amnt)
2161 		{
2162 		  msk = -1U << amnt;
2163 		  *p = (*p & ~msk) | (this_buffer[0] & msk);
2164 		  p++;
2165 		  size -= (BITS_PER_UNIT - amnt);
2166 		}
2167 	    }
2168 	  else
2169 	    {
2170 	      amnt = (unsigned HOST_WIDE_INT) pd.offset % BITS_PER_UNIT;
2171 	      if (amnt)
2172 		size -= BITS_PER_UNIT - amnt;
2173 	      size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
2174 	      if (amnt)
2175 		shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2176 	    }
2177 	  memcpy (p, this_buffer + (amnt != 0), size / BITS_PER_UNIT);
2178 	  p += size / BITS_PER_UNIT;
2179 	  if (size % BITS_PER_UNIT)
2180 	    {
2181 	      unsigned int msk = -1U << (size % BITS_PER_UNIT);
2182 	      *p = (this_buffer[(amnt != 0) + size / BITS_PER_UNIT]
2183 		    & ~msk) | (*p & msk);
2184 	    }
2185 	}
2186     }
2187 
2188   tree type = vr->type;
2189   /* Make sure to interpret in a type that has a range covering the whole
2190      access size.  */
2191   if (INTEGRAL_TYPE_P (vr->type) && maxsizei != TYPE_PRECISION (vr->type))
2192     type = build_nonstandard_integer_type (maxsizei, TYPE_UNSIGNED (type));
2193   tree val;
2194   if (BYTES_BIG_ENDIAN)
2195     {
2196       unsigned sz = needed_len;
2197       if (maxsizei % BITS_PER_UNIT)
2198 	shift_bytes_in_array_right (buffer, needed_len,
2199 				    BITS_PER_UNIT
2200 				    - (maxsizei % BITS_PER_UNIT));
2201       if (INTEGRAL_TYPE_P (type))
2202 	sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
2203       if (sz > needed_len)
2204 	{
2205 	  memcpy (this_buffer + (sz - needed_len), buffer, needed_len);
2206 	  val = native_interpret_expr (type, this_buffer, sz);
2207 	}
2208       else
2209 	val = native_interpret_expr (type, buffer, needed_len);
2210     }
2211   else
2212     val = native_interpret_expr (type, buffer, bufsize);
2213   /* If we chop off bits because the types precision doesn't match the memory
2214      access size this is ok when optimizing reads but not when called from
2215      the DSE code during elimination.  */
2216   if (val && type != vr->type)
2217     {
2218       if (! int_fits_type_p (val, vr->type))
2219 	val = NULL_TREE;
2220       else
2221 	val = fold_convert (vr->type, val);
2222     }
2223 
2224   if (val)
2225     {
2226       if (dump_file && (dump_flags & TDF_DETAILS))
2227 	fprintf (dump_file,
2228 		 "Successfully combined %u partial definitions\n", ndefs);
2229       /* We are using the alias-set of the first store we encounter which
2230 	 should be appropriate here.  */
2231       return finish (first_set, first_base_set, val);
2232     }
2233   else
2234     {
2235       if (dump_file && (dump_flags & TDF_DETAILS))
2236 	fprintf (dump_file,
2237 		 "Failed to interpret %u encoded partial definitions\n", ndefs);
2238       return (void *)-1;
2239     }
2240 }
2241 
2242 /* Callback for walk_non_aliased_vuses.  Adjusts the vn_reference_t VR_
2243    with the current VUSE and performs the expression lookup.  */
2244 
2245 static void *
vn_reference_lookup_2(ao_ref * op ATTRIBUTE_UNUSED,tree vuse,void * data_)2246 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *data_)
2247 {
2248   vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2249   vn_reference_t vr = data->vr;
2250   vn_reference_s **slot;
2251   hashval_t hash;
2252 
2253   /* If we have partial definitions recorded we have to go through
2254      vn_reference_lookup_3.  */
2255   if (!data->partial_defs.is_empty ())
2256     return NULL;
2257 
2258   if (data->last_vuse_ptr)
2259     {
2260       *data->last_vuse_ptr = vuse;
2261       data->last_vuse = vuse;
2262     }
2263 
2264   /* Fixup vuse and hash.  */
2265   if (vr->vuse)
2266     vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
2267   vr->vuse = vuse_ssa_val (vuse);
2268   if (vr->vuse)
2269     vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
2270 
2271   hash = vr->hashcode;
2272   slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
2273   if (slot)
2274     {
2275       if ((*slot)->result && data->saved_operands.exists ())
2276 	return data->finish (vr->set, vr->base_set, (*slot)->result);
2277       return *slot;
2278     }
2279 
2280   return NULL;
2281 }
2282 
2283 /* Lookup an existing or insert a new vn_reference entry into the
2284    value table for the VUSE, SET, TYPE, OPERANDS reference which
2285    has the value VALUE which is either a constant or an SSA name.  */
2286 
2287 static vn_reference_t
vn_reference_lookup_or_insert_for_pieces(tree vuse,alias_set_type set,alias_set_type base_set,tree type,vec<vn_reference_op_s,va_heap> operands,tree value)2288 vn_reference_lookup_or_insert_for_pieces (tree vuse,
2289 					  alias_set_type set,
2290 					  alias_set_type base_set,
2291 					  tree type,
2292 					  vec<vn_reference_op_s,
2293 					        va_heap> operands,
2294 					  tree value)
2295 {
2296   vn_reference_s vr1;
2297   vn_reference_t result;
2298   unsigned value_id;
2299   vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2300   vr1.operands = operands;
2301   vr1.type = type;
2302   vr1.set = set;
2303   vr1.base_set = base_set;
2304   vr1.hashcode = vn_reference_compute_hash (&vr1);
2305   if (vn_reference_lookup_1 (&vr1, &result))
2306     return result;
2307   if (TREE_CODE (value) == SSA_NAME)
2308     value_id = VN_INFO (value)->value_id;
2309   else
2310     value_id = get_or_alloc_constant_value_id (value);
2311   return vn_reference_insert_pieces (vuse, set, base_set, type,
2312 				     operands.copy (), value, value_id);
2313 }
2314 
2315 /* Return a value-number for RCODE OPS... either by looking up an existing
2316    value-number for the simplified result or by inserting the operation if
2317    INSERT is true.  */
2318 
2319 static tree
vn_nary_build_or_lookup_1(gimple_match_op * res_op,bool insert)2320 vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert)
2321 {
2322   tree result = NULL_TREE;
2323   /* We will be creating a value number for
2324        RCODE (OPS...).
2325      So first simplify and lookup this expression to see if it
2326      is already available.  */
2327   /* For simplification valueize.  */
2328   unsigned i;
2329   for (i = 0; i < res_op->num_ops; ++i)
2330     if (TREE_CODE (res_op->ops[i]) == SSA_NAME)
2331       {
2332 	tree tem = vn_valueize (res_op->ops[i]);
2333 	if (!tem)
2334 	  break;
2335 	res_op->ops[i] = tem;
2336       }
2337   /* If valueization of an operand fails (it is not available), skip
2338      simplification.  */
2339   bool res = false;
2340   if (i == res_op->num_ops)
2341     {
2342       mprts_hook = vn_lookup_simplify_result;
2343       res = res_op->resimplify (NULL, vn_valueize);
2344       mprts_hook = NULL;
2345     }
2346   gimple *new_stmt = NULL;
2347   if (res
2348       && gimple_simplified_result_is_gimple_val (res_op))
2349     {
2350       /* The expression is already available.  */
2351       result = res_op->ops[0];
2352       /* Valueize it, simplification returns sth in AVAIL only.  */
2353       if (TREE_CODE (result) == SSA_NAME)
2354 	result = SSA_VAL (result);
2355     }
2356   else
2357     {
2358       tree val = vn_lookup_simplify_result (res_op);
2359       if (!val && insert)
2360 	{
2361 	  gimple_seq stmts = NULL;
2362 	  result = maybe_push_res_to_seq (res_op, &stmts);
2363 	  if (result)
2364 	    {
2365 	      gcc_assert (gimple_seq_singleton_p (stmts));
2366 	      new_stmt = gimple_seq_first_stmt (stmts);
2367 	    }
2368 	}
2369       else
2370 	/* The expression is already available.  */
2371 	result = val;
2372     }
2373   if (new_stmt)
2374     {
2375       /* The expression is not yet available, value-number lhs to
2376 	 the new SSA_NAME we created.  */
2377       /* Initialize value-number information properly.  */
2378       vn_ssa_aux_t result_info = VN_INFO (result);
2379       result_info->valnum = result;
2380       result_info->value_id = get_next_value_id ();
2381       result_info->visited = 1;
2382       gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
2383 					  new_stmt);
2384       result_info->needs_insertion = true;
2385       /* ???  PRE phi-translation inserts NARYs without corresponding
2386          SSA name result.  Re-use those but set their result according
2387 	 to the stmt we just built.  */
2388       vn_nary_op_t nary = NULL;
2389       vn_nary_op_lookup_stmt (new_stmt, &nary);
2390       if (nary)
2391 	{
2392 	  gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE);
2393 	  nary->u.result = gimple_assign_lhs (new_stmt);
2394 	}
2395       /* As all "inserted" statements are singleton SCCs, insert
2396 	 to the valid table.  This is strictly needed to
2397 	 avoid re-generating new value SSA_NAMEs for the same
2398 	 expression during SCC iteration over and over (the
2399 	 optimistic table gets cleared after each iteration).
2400 	 We do not need to insert into the optimistic table, as
2401 	 lookups there will fall back to the valid table.  */
2402       else
2403 	{
2404 	  unsigned int length = vn_nary_length_from_stmt (new_stmt);
2405 	  vn_nary_op_t vno1
2406 	    = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
2407 	  vno1->value_id = result_info->value_id;
2408 	  vno1->length = length;
2409 	  vno1->predicated_values = 0;
2410 	  vno1->u.result = result;
2411 	  init_vn_nary_op_from_stmt (vno1, new_stmt);
2412 	  vn_nary_op_insert_into (vno1, valid_info->nary, true);
2413 	  /* Also do not link it into the undo chain.  */
2414 	  last_inserted_nary = vno1->next;
2415 	  vno1->next = (vn_nary_op_t)(void *)-1;
2416 	}
2417       if (dump_file && (dump_flags & TDF_DETAILS))
2418 	{
2419 	  fprintf (dump_file, "Inserting name ");
2420 	  print_generic_expr (dump_file, result);
2421 	  fprintf (dump_file, " for expression ");
2422 	  print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
2423 	  fprintf (dump_file, "\n");
2424 	}
2425     }
2426   return result;
2427 }
2428 
2429 /* Return a value-number for RCODE OPS... either by looking up an existing
2430    value-number for the simplified result or by inserting the operation.  */
2431 
2432 static tree
vn_nary_build_or_lookup(gimple_match_op * res_op)2433 vn_nary_build_or_lookup (gimple_match_op *res_op)
2434 {
2435   return vn_nary_build_or_lookup_1 (res_op, true);
2436 }
2437 
2438 /* Try to simplify the expression RCODE OPS... of type TYPE and return
2439    its value if present.  */
2440 
2441 tree
vn_nary_simplify(vn_nary_op_t nary)2442 vn_nary_simplify (vn_nary_op_t nary)
2443 {
2444   if (nary->length > gimple_match_op::MAX_NUM_OPS)
2445     return NULL_TREE;
2446   gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
2447 		      nary->type, nary->length);
2448   memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
2449   return vn_nary_build_or_lookup_1 (&op, false);
2450 }
2451 
2452 /* Elimination engine.  */
2453 
2454 class eliminate_dom_walker : public dom_walker
2455 {
2456 public:
2457   eliminate_dom_walker (cdi_direction, bitmap);
2458   ~eliminate_dom_walker ();
2459 
2460   virtual edge before_dom_children (basic_block);
2461   virtual void after_dom_children (basic_block);
2462 
2463   virtual tree eliminate_avail (basic_block, tree op);
2464   virtual void eliminate_push_avail (basic_block, tree op);
2465   tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val);
2466 
2467   void eliminate_stmt (basic_block, gimple_stmt_iterator *);
2468 
2469   unsigned eliminate_cleanup (bool region_p = false);
2470 
2471   bool do_pre;
2472   unsigned int el_todo;
2473   unsigned int eliminations;
2474   unsigned int insertions;
2475 
2476   /* SSA names that had their defs inserted by PRE if do_pre.  */
2477   bitmap inserted_exprs;
2478 
2479   /* Blocks with statements that have had their EH properties changed.  */
2480   bitmap need_eh_cleanup;
2481 
2482   /* Blocks with statements that have had their AB properties changed.  */
2483   bitmap need_ab_cleanup;
2484 
2485   /* Local state for the eliminate domwalk.  */
2486   auto_vec<gimple *> to_remove;
2487   auto_vec<gimple *> to_fixup;
2488   auto_vec<tree> avail;
2489   auto_vec<tree> avail_stack;
2490 };
2491 
2492 /* Adaptor to the elimination engine using RPO availability.  */
2493 
2494 class rpo_elim : public eliminate_dom_walker
2495 {
2496 public:
rpo_elim(basic_block entry_)2497   rpo_elim(basic_block entry_)
2498     : eliminate_dom_walker (CDI_DOMINATORS, NULL), entry (entry_),
2499       m_avail_freelist (NULL) {}
2500 
2501   virtual tree eliminate_avail (basic_block, tree op);
2502 
2503   virtual void eliminate_push_avail (basic_block, tree);
2504 
2505   basic_block entry;
2506   /* Freelist of avail entries which are allocated from the vn_ssa_aux
2507      obstack.  */
2508   vn_avail *m_avail_freelist;
2509 };
2510 
2511 /* Global RPO state for access from hooks.  */
2512 static eliminate_dom_walker *rpo_avail;
2513 basic_block vn_context_bb;
2514 
2515 /* Return true if BASE1 and BASE2 can be adjusted so they have the
2516    same address and adjust *OFFSET1 and *OFFSET2 accordingly.
2517    Otherwise return false.  */
2518 
2519 static bool
adjust_offsets_for_equal_base_address(tree base1,poly_int64 * offset1,tree base2,poly_int64 * offset2)2520 adjust_offsets_for_equal_base_address (tree base1, poly_int64 *offset1,
2521 				       tree base2, poly_int64 *offset2)
2522 {
2523   poly_int64 soff;
2524   if (TREE_CODE (base1) == MEM_REF
2525       && TREE_CODE (base2) == MEM_REF)
2526     {
2527       if (mem_ref_offset (base1).to_shwi (&soff))
2528 	{
2529 	  base1 = TREE_OPERAND (base1, 0);
2530 	  *offset1 += soff * BITS_PER_UNIT;
2531 	}
2532       if (mem_ref_offset (base2).to_shwi (&soff))
2533 	{
2534 	  base2 = TREE_OPERAND (base2, 0);
2535 	  *offset2 += soff * BITS_PER_UNIT;
2536 	}
2537       return operand_equal_p (base1, base2, 0);
2538     }
2539   return operand_equal_p (base1, base2, OEP_ADDRESS_OF);
2540 }
2541 
2542 /* Callback for walk_non_aliased_vuses.  Tries to perform a lookup
2543    from the statement defining VUSE and if not successful tries to
2544    translate *REFP and VR_ through an aggregate copy at the definition
2545    of VUSE.  If *DISAMBIGUATE_ONLY is true then do not perform translation
2546    of *REF and *VR.  If only disambiguation was performed then
2547    *DISAMBIGUATE_ONLY is set to true.  */
2548 
2549 static void *
vn_reference_lookup_3(ao_ref * ref,tree vuse,void * data_,translate_flags * disambiguate_only)2550 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
2551 		       translate_flags *disambiguate_only)
2552 {
2553   vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2554   vn_reference_t vr = data->vr;
2555   gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2556   tree base = ao_ref_base (ref);
2557   HOST_WIDE_INT offseti = 0, maxsizei, sizei = 0;
2558   static vec<vn_reference_op_s> lhs_ops;
2559   ao_ref lhs_ref;
2560   bool lhs_ref_ok = false;
2561   poly_int64 copy_size;
2562 
2563   /* First try to disambiguate after value-replacing in the definitions LHS.  */
2564   if (is_gimple_assign (def_stmt))
2565     {
2566       tree lhs = gimple_assign_lhs (def_stmt);
2567       bool valueized_anything = false;
2568       /* Avoid re-allocation overhead.  */
2569       lhs_ops.truncate (0);
2570       basic_block saved_rpo_bb = vn_context_bb;
2571       vn_context_bb = gimple_bb (def_stmt);
2572       if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE)
2573 	{
2574 	  copy_reference_ops_from_ref (lhs, &lhs_ops);
2575 	  valueize_refs_1 (&lhs_ops, &valueized_anything, true);
2576 	}
2577       vn_context_bb = saved_rpo_bb;
2578       ao_ref_init (&lhs_ref, lhs);
2579       lhs_ref_ok = true;
2580       if (valueized_anything
2581 	  && ao_ref_init_from_vn_reference
2582 	       (&lhs_ref, ao_ref_alias_set (&lhs_ref),
2583 		ao_ref_base_alias_set (&lhs_ref), TREE_TYPE (lhs), lhs_ops)
2584 	  && !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
2585 	{
2586 	  *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2587 	  return NULL;
2588 	}
2589 
2590       /* Besides valueizing the LHS we can also use access-path based
2591          disambiguation on the original non-valueized ref.  */
2592       if (!ref->ref
2593 	  && lhs_ref_ok
2594 	  && data->orig_ref.ref)
2595 	{
2596 	  /* We want to use the non-valueized LHS for this, but avoid redundant
2597 	     work.  */
2598 	  ao_ref *lref = &lhs_ref;
2599 	  ao_ref lref_alt;
2600 	  if (valueized_anything)
2601 	    {
2602 	      ao_ref_init (&lref_alt, lhs);
2603 	      lref = &lref_alt;
2604 	    }
2605 	  if (!refs_may_alias_p_1 (&data->orig_ref, lref, data->tbaa_p))
2606 	    {
2607 	      *disambiguate_only = (valueized_anything
2608 				    ? TR_VALUEIZE_AND_DISAMBIGUATE
2609 				    : TR_DISAMBIGUATE);
2610 	      return NULL;
2611 	    }
2612 	}
2613 
2614       /* If we reach a clobbering statement try to skip it and see if
2615          we find a VN result with exactly the same value as the
2616 	 possible clobber.  In this case we can ignore the clobber
2617 	 and return the found value.  */
2618       if (is_gimple_reg_type (TREE_TYPE (lhs))
2619 	  && types_compatible_p (TREE_TYPE (lhs), vr->type)
2620 	  && (ref->ref || data->orig_ref.ref))
2621 	{
2622 	  tree *saved_last_vuse_ptr = data->last_vuse_ptr;
2623 	  /* Do not update last_vuse_ptr in vn_reference_lookup_2.  */
2624 	  data->last_vuse_ptr = NULL;
2625 	  tree saved_vuse = vr->vuse;
2626 	  hashval_t saved_hashcode = vr->hashcode;
2627 	  void *res = vn_reference_lookup_2 (ref, gimple_vuse (def_stmt), data);
2628 	  /* Need to restore vr->vuse and vr->hashcode.  */
2629 	  vr->vuse = saved_vuse;
2630 	  vr->hashcode = saved_hashcode;
2631 	  data->last_vuse_ptr = saved_last_vuse_ptr;
2632 	  if (res && res != (void *)-1)
2633 	    {
2634 	      vn_reference_t vnresult = (vn_reference_t) res;
2635 	      tree rhs = gimple_assign_rhs1 (def_stmt);
2636 	      if (TREE_CODE (rhs) == SSA_NAME)
2637 		rhs = SSA_VAL (rhs);
2638 	      if (vnresult->result
2639 		  && operand_equal_p (vnresult->result, rhs, 0)
2640 		  /* We have to honor our promise about union type punning
2641 		     and also support arbitrary overlaps with
2642 		     -fno-strict-aliasing.  So simply resort to alignment to
2643 		     rule out overlaps.  Do this check last because it is
2644 		     quite expensive compared to the hash-lookup above.  */
2645 		  && multiple_p (get_object_alignment
2646 				   (ref->ref ? ref->ref : data->orig_ref.ref),
2647 				 ref->size)
2648 		  && multiple_p (get_object_alignment (lhs), ref->size))
2649 		return res;
2650 	    }
2651 	}
2652     }
2653   else if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE
2654 	   && gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
2655 	   && gimple_call_num_args (def_stmt) <= 4)
2656     {
2657       /* For builtin calls valueize its arguments and call the
2658          alias oracle again.  Valueization may improve points-to
2659 	 info of pointers and constify size and position arguments.
2660 	 Originally this was motivated by PR61034 which has
2661 	 conditional calls to free falsely clobbering ref because
2662 	 of imprecise points-to info of the argument.  */
2663       tree oldargs[4];
2664       bool valueized_anything = false;
2665       for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2666 	{
2667 	  oldargs[i] = gimple_call_arg (def_stmt, i);
2668 	  tree val = vn_valueize (oldargs[i]);
2669 	  if (val != oldargs[i])
2670 	    {
2671 	      gimple_call_set_arg (def_stmt, i, val);
2672 	      valueized_anything = true;
2673 	    }
2674 	}
2675       if (valueized_anything)
2676 	{
2677 	  bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
2678 					       ref, data->tbaa_p);
2679 	  for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2680 	    gimple_call_set_arg (def_stmt, i, oldargs[i]);
2681 	  if (!res)
2682 	    {
2683 	      *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2684 	      return NULL;
2685 	    }
2686 	}
2687     }
2688 
2689   if (*disambiguate_only > TR_TRANSLATE)
2690     return (void *)-1;
2691 
2692   /* If we cannot constrain the size of the reference we cannot
2693      test if anything kills it.  */
2694   if (!ref->max_size_known_p ())
2695     return (void *)-1;
2696 
2697   poly_int64 offset = ref->offset;
2698   poly_int64 maxsize = ref->max_size;
2699 
2700   /* def_stmt may-defs *ref.  See if we can derive a value for *ref
2701      from that definition.
2702      1) Memset.  */
2703   if (is_gimple_reg_type (vr->type)
2704       && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
2705 	  || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET_CHK))
2706       && (integer_zerop (gimple_call_arg (def_stmt, 1))
2707 	  || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
2708 	       || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
2709 	      && CHAR_BIT == 8
2710 	      && BITS_PER_UNIT == 8
2711 	      && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
2712 	      && offset.is_constant (&offseti)
2713 	      && ref->size.is_constant (&sizei)
2714 	      && (offseti % BITS_PER_UNIT == 0
2715 		  || TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST)))
2716       && (poly_int_tree_p (gimple_call_arg (def_stmt, 2))
2717 	  || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
2718 	      && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)))))
2719       && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2720 	  || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
2721     {
2722       tree base2;
2723       poly_int64 offset2, size2, maxsize2;
2724       bool reverse;
2725       tree ref2 = gimple_call_arg (def_stmt, 0);
2726       if (TREE_CODE (ref2) == SSA_NAME)
2727 	{
2728 	  ref2 = SSA_VAL (ref2);
2729 	  if (TREE_CODE (ref2) == SSA_NAME
2730 	      && (TREE_CODE (base) != MEM_REF
2731 		  || TREE_OPERAND (base, 0) != ref2))
2732 	    {
2733 	      gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
2734 	      if (gimple_assign_single_p (def_stmt)
2735 		  && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2736 		ref2 = gimple_assign_rhs1 (def_stmt);
2737 	    }
2738 	}
2739       if (TREE_CODE (ref2) == ADDR_EXPR)
2740 	{
2741 	  ref2 = TREE_OPERAND (ref2, 0);
2742 	  base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
2743 					   &reverse);
2744 	  if (!known_size_p (maxsize2)
2745 	      || !known_eq (maxsize2, size2)
2746 	      || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
2747 	    return (void *)-1;
2748 	}
2749       else if (TREE_CODE (ref2) == SSA_NAME)
2750 	{
2751 	  poly_int64 soff;
2752 	  if (TREE_CODE (base) != MEM_REF
2753 	      || !(mem_ref_offset (base)
2754 		   << LOG2_BITS_PER_UNIT).to_shwi (&soff))
2755 	    return (void *)-1;
2756 	  offset += soff;
2757 	  offset2 = 0;
2758 	  if (TREE_OPERAND (base, 0) != ref2)
2759 	    {
2760 	      gimple *def = SSA_NAME_DEF_STMT (ref2);
2761 	      if (is_gimple_assign (def)
2762 		  && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2763 		  && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
2764 		  && poly_int_tree_p (gimple_assign_rhs2 (def)))
2765 		{
2766 		  tree rhs2 = gimple_assign_rhs2 (def);
2767 		  if (!(poly_offset_int::from (wi::to_poly_wide (rhs2),
2768 					       SIGNED)
2769 			<< LOG2_BITS_PER_UNIT).to_shwi (&offset2))
2770 		    return (void *)-1;
2771 		  ref2 = gimple_assign_rhs1 (def);
2772 		  if (TREE_CODE (ref2) == SSA_NAME)
2773 		    ref2 = SSA_VAL (ref2);
2774 		}
2775 	      else
2776 		return (void *)-1;
2777 	    }
2778 	}
2779       else
2780 	return (void *)-1;
2781       tree len = gimple_call_arg (def_stmt, 2);
2782       HOST_WIDE_INT leni, offset2i;
2783       if (TREE_CODE (len) == SSA_NAME)
2784 	len = SSA_VAL (len);
2785       /* Sometimes the above trickery is smarter than alias analysis.  Take
2786          advantage of that.  */
2787       if (!ranges_maybe_overlap_p (offset, maxsize, offset2,
2788 				   (wi::to_poly_offset (len)
2789 				    << LOG2_BITS_PER_UNIT)))
2790 	return NULL;
2791       if (data->partial_defs.is_empty ()
2792 	  && known_subrange_p (offset, maxsize, offset2,
2793 			       wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
2794 	{
2795 	  tree val;
2796 	  if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2797 	    val = build_zero_cst (vr->type);
2798 	  else if (INTEGRAL_TYPE_P (vr->type)
2799 		   && known_eq (ref->size, 8)
2800 		   && offseti % BITS_PER_UNIT == 0)
2801 	    {
2802 	      gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
2803 				      vr->type, gimple_call_arg (def_stmt, 1));
2804 	      val = vn_nary_build_or_lookup (&res_op);
2805 	      if (!val
2806 		  || (TREE_CODE (val) == SSA_NAME
2807 		      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2808 		return (void *)-1;
2809 	    }
2810 	  else
2811 	    {
2812 	      unsigned buflen = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type)) + 1;
2813 	      if (INTEGRAL_TYPE_P (vr->type))
2814 		buflen = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (vr->type)) + 1;
2815 	      unsigned char *buf = XALLOCAVEC (unsigned char, buflen);
2816 	      memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
2817 		      buflen);
2818 	      if (BYTES_BIG_ENDIAN)
2819 		{
2820 		  unsigned int amnt
2821 		    = (((unsigned HOST_WIDE_INT) offseti + sizei)
2822 		       % BITS_PER_UNIT);
2823 		  if (amnt)
2824 		    {
2825 		      shift_bytes_in_array_right (buf, buflen,
2826 						  BITS_PER_UNIT - amnt);
2827 		      buf++;
2828 		      buflen--;
2829 		    }
2830 		}
2831 	      else if (offseti % BITS_PER_UNIT != 0)
2832 		{
2833 		  unsigned int amnt
2834 		    = BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) offseti
2835 				       % BITS_PER_UNIT);
2836 		  shift_bytes_in_array_left (buf, buflen, amnt);
2837 		  buf++;
2838 		  buflen--;
2839 		}
2840 	      val = native_interpret_expr (vr->type, buf, buflen);
2841 	      if (!val)
2842 		return (void *)-1;
2843 	    }
2844 	  return data->finish (0, 0, val);
2845 	}
2846       /* For now handle clearing memory with partial defs.  */
2847       else if (known_eq (ref->size, maxsize)
2848 	       && integer_zerop (gimple_call_arg (def_stmt, 1))
2849 	       && tree_fits_poly_int64_p (len)
2850 	       && tree_to_poly_int64 (len).is_constant (&leni)
2851 	       && leni <= INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT
2852 	       && offset.is_constant (&offseti)
2853 	       && offset2.is_constant (&offset2i)
2854 	       && maxsize.is_constant (&maxsizei)
2855 	       && ranges_known_overlap_p (offseti, maxsizei, offset2i,
2856 					  leni << LOG2_BITS_PER_UNIT))
2857 	{
2858 	  pd_data pd;
2859 	  pd.rhs = build_constructor (NULL_TREE, NULL);
2860 	  pd.offset = offset2i;
2861 	  pd.size = leni << LOG2_BITS_PER_UNIT;
2862 	  return data->push_partial_def (pd, 0, 0, offseti, maxsizei);
2863 	}
2864     }
2865 
2866   /* 2) Assignment from an empty CONSTRUCTOR.  */
2867   else if (is_gimple_reg_type (vr->type)
2868 	   && gimple_assign_single_p (def_stmt)
2869 	   && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2870 	   && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2871     {
2872       tree base2;
2873       poly_int64 offset2, size2, maxsize2;
2874       HOST_WIDE_INT offset2i, size2i;
2875       gcc_assert (lhs_ref_ok);
2876       base2 = ao_ref_base (&lhs_ref);
2877       offset2 = lhs_ref.offset;
2878       size2 = lhs_ref.size;
2879       maxsize2 = lhs_ref.max_size;
2880       if (known_size_p (maxsize2)
2881 	  && known_eq (maxsize2, size2)
2882 	  && adjust_offsets_for_equal_base_address (base, &offset,
2883 						    base2, &offset2))
2884 	{
2885 	  if (data->partial_defs.is_empty ()
2886 	      && known_subrange_p (offset, maxsize, offset2, size2))
2887 	    {
2888 	      /* While technically undefined behavior do not optimize
2889 	         a full read from a clobber.  */
2890 	      if (gimple_clobber_p (def_stmt))
2891 		return (void *)-1;
2892 	      tree val = build_zero_cst (vr->type);
2893 	      return data->finish (ao_ref_alias_set (&lhs_ref),
2894 				   ao_ref_base_alias_set (&lhs_ref), val);
2895 	    }
2896 	  else if (known_eq (ref->size, maxsize)
2897 		   && maxsize.is_constant (&maxsizei)
2898 		   && offset.is_constant (&offseti)
2899 		   && offset2.is_constant (&offset2i)
2900 		   && size2.is_constant (&size2i)
2901 		   && ranges_known_overlap_p (offseti, maxsizei,
2902 					      offset2i, size2i))
2903 	    {
2904 	      /* Let clobbers be consumed by the partial-def tracker
2905 	         which can choose to ignore them if they are shadowed
2906 		 by a later def.  */
2907 	      pd_data pd;
2908 	      pd.rhs = gimple_assign_rhs1 (def_stmt);
2909 	      pd.offset = offset2i;
2910 	      pd.size = size2i;
2911 	      return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
2912 					     ao_ref_base_alias_set (&lhs_ref),
2913 					     offseti, maxsizei);
2914 	    }
2915 	}
2916     }
2917 
2918   /* 3) Assignment from a constant.  We can use folds native encode/interpret
2919      routines to extract the assigned bits.  */
2920   else if (known_eq (ref->size, maxsize)
2921 	   && is_gimple_reg_type (vr->type)
2922 	   && !reverse_storage_order_for_component_p (vr->operands)
2923 	   && !contains_storage_order_barrier_p (vr->operands)
2924 	   && gimple_assign_single_p (def_stmt)
2925 	   && CHAR_BIT == 8
2926 	   && BITS_PER_UNIT == 8
2927 	   && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
2928 	   /* native_encode and native_decode operate on arrays of bytes
2929 	      and so fundamentally need a compile-time size and offset.  */
2930 	   && maxsize.is_constant (&maxsizei)
2931 	   && offset.is_constant (&offseti)
2932 	   && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2933 	       || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2934 		   && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2935     {
2936       tree lhs = gimple_assign_lhs (def_stmt);
2937       tree base2;
2938       poly_int64 offset2, size2, maxsize2;
2939       HOST_WIDE_INT offset2i, size2i;
2940       bool reverse;
2941       gcc_assert (lhs_ref_ok);
2942       base2 = ao_ref_base (&lhs_ref);
2943       offset2 = lhs_ref.offset;
2944       size2 = lhs_ref.size;
2945       maxsize2 = lhs_ref.max_size;
2946       reverse = reverse_storage_order_for_component_p (lhs);
2947       if (base2
2948 	  && !reverse
2949 	  && !storage_order_barrier_p (lhs)
2950 	  && known_eq (maxsize2, size2)
2951 	  && adjust_offsets_for_equal_base_address (base, &offset,
2952 						    base2, &offset2)
2953 	  && offset.is_constant (&offseti)
2954 	  && offset2.is_constant (&offset2i)
2955 	  && size2.is_constant (&size2i))
2956 	{
2957 	  if (data->partial_defs.is_empty ()
2958 	      && known_subrange_p (offseti, maxsizei, offset2, size2))
2959 	    {
2960 	      /* We support up to 512-bit values (for V8DFmode).  */
2961 	      unsigned char buffer[65];
2962 	      int len;
2963 
2964 	      tree rhs = gimple_assign_rhs1 (def_stmt);
2965 	      if (TREE_CODE (rhs) == SSA_NAME)
2966 		rhs = SSA_VAL (rhs);
2967 	      len = native_encode_expr (rhs,
2968 					buffer, sizeof (buffer) - 1,
2969 					(offseti - offset2i) / BITS_PER_UNIT);
2970 	      if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2971 		{
2972 		  tree type = vr->type;
2973 		  unsigned char *buf = buffer;
2974 		  unsigned int amnt = 0;
2975 		  /* Make sure to interpret in a type that has a range
2976 		     covering the whole access size.  */
2977 		  if (INTEGRAL_TYPE_P (vr->type)
2978 		      && maxsizei != TYPE_PRECISION (vr->type))
2979 		    type = build_nonstandard_integer_type (maxsizei,
2980 							   TYPE_UNSIGNED (type));
2981 		  if (BYTES_BIG_ENDIAN)
2982 		    {
2983 		      /* For big-endian native_encode_expr stored the rhs
2984 			 such that the LSB of it is the LSB of buffer[len - 1].
2985 			 That bit is stored into memory at position
2986 			 offset2 + size2 - 1, i.e. in byte
2987 			 base + (offset2 + size2 - 1) / BITS_PER_UNIT.
2988 			 E.g. for offset2 1 and size2 14, rhs -1 and memory
2989 			 previously cleared that is:
2990 			 0        1
2991 			 01111111|11111110
2992 			 Now, if we want to extract offset 2 and size 12 from
2993 			 it using native_interpret_expr (which actually works
2994 			 for integral bitfield types in terms of byte size of
2995 			 the mode), the native_encode_expr stored the value
2996 			 into buffer as
2997 			 XX111111|11111111
2998 			 and returned len 2 (the X bits are outside of
2999 			 precision).
3000 			 Let sz be maxsize / BITS_PER_UNIT if not extracting
3001 			 a bitfield, and GET_MODE_SIZE otherwise.
3002 			 We need to align the LSB of the value we want to
3003 			 extract as the LSB of buf[sz - 1].
3004 			 The LSB from memory we need to read is at position
3005 			 offset + maxsize - 1.  */
3006 		      HOST_WIDE_INT sz = maxsizei / BITS_PER_UNIT;
3007 		      if (INTEGRAL_TYPE_P (type))
3008 			sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
3009 		      amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
3010 			      - offseti - maxsizei) % BITS_PER_UNIT;
3011 		      if (amnt)
3012 			shift_bytes_in_array_right (buffer, len, amnt);
3013 		      amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
3014 			      - offseti - maxsizei - amnt) / BITS_PER_UNIT;
3015 		      if ((unsigned HOST_WIDE_INT) sz + amnt > (unsigned) len)
3016 			len = 0;
3017 		      else
3018 			{
3019 			  buf = buffer + len - sz - amnt;
3020 			  len -= (buf - buffer);
3021 			}
3022 		    }
3023 		  else
3024 		    {
3025 		      amnt = ((unsigned HOST_WIDE_INT) offset2i
3026 			      - offseti) % BITS_PER_UNIT;
3027 		      if (amnt)
3028 			{
3029 			  buffer[len] = 0;
3030 			  shift_bytes_in_array_left (buffer, len + 1, amnt);
3031 			  buf = buffer + 1;
3032 			}
3033 		    }
3034 		  tree val = native_interpret_expr (type, buf, len);
3035 		  /* If we chop off bits because the types precision doesn't
3036 		     match the memory access size this is ok when optimizing
3037 		     reads but not when called from the DSE code during
3038 		     elimination.  */
3039 		  if (val
3040 		      && type != vr->type)
3041 		    {
3042 		      if (! int_fits_type_p (val, vr->type))
3043 			val = NULL_TREE;
3044 		      else
3045 			val = fold_convert (vr->type, val);
3046 		    }
3047 
3048 		  if (val)
3049 		    return data->finish (ao_ref_alias_set (&lhs_ref),
3050 					 ao_ref_base_alias_set (&lhs_ref), val);
3051 		}
3052 	    }
3053 	  else if (ranges_known_overlap_p (offseti, maxsizei, offset2i,
3054 					   size2i))
3055 	    {
3056 	      pd_data pd;
3057 	      tree rhs = gimple_assign_rhs1 (def_stmt);
3058 	      if (TREE_CODE (rhs) == SSA_NAME)
3059 		rhs = SSA_VAL (rhs);
3060 	      pd.rhs = rhs;
3061 	      pd.offset = offset2i;
3062 	      pd.size = size2i;
3063 	      return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3064 					     ao_ref_base_alias_set (&lhs_ref),
3065 					     offseti, maxsizei);
3066 	    }
3067 	}
3068     }
3069 
3070   /* 4) Assignment from an SSA name which definition we may be able
3071      to access pieces from or we can combine to a larger entity.  */
3072   else if (known_eq (ref->size, maxsize)
3073 	   && is_gimple_reg_type (vr->type)
3074 	   && !reverse_storage_order_for_component_p (vr->operands)
3075 	   && !contains_storage_order_barrier_p (vr->operands)
3076 	   && gimple_assign_single_p (def_stmt)
3077 	   && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
3078     {
3079       tree lhs = gimple_assign_lhs (def_stmt);
3080       tree base2;
3081       poly_int64 offset2, size2, maxsize2;
3082       HOST_WIDE_INT offset2i, size2i, offseti;
3083       bool reverse;
3084       gcc_assert (lhs_ref_ok);
3085       base2 = ao_ref_base (&lhs_ref);
3086       offset2 = lhs_ref.offset;
3087       size2 = lhs_ref.size;
3088       maxsize2 = lhs_ref.max_size;
3089       reverse = reverse_storage_order_for_component_p (lhs);
3090       tree def_rhs = gimple_assign_rhs1 (def_stmt);
3091       if (!reverse
3092 	  && !storage_order_barrier_p (lhs)
3093 	  && known_size_p (maxsize2)
3094 	  && known_eq (maxsize2, size2)
3095 	  && adjust_offsets_for_equal_base_address (base, &offset,
3096 						    base2, &offset2))
3097 	{
3098 	  if (data->partial_defs.is_empty ()
3099 	      && known_subrange_p (offset, maxsize, offset2, size2)
3100 	      /* ???  We can't handle bitfield precision extracts without
3101 		 either using an alternate type for the BIT_FIELD_REF and
3102 		 then doing a conversion or possibly adjusting the offset
3103 		 according to endianness.  */
3104 	      && (! INTEGRAL_TYPE_P (vr->type)
3105 		  || known_eq (ref->size, TYPE_PRECISION (vr->type)))
3106 	      && multiple_p (ref->size, BITS_PER_UNIT))
3107 	    {
3108 	      tree val = NULL_TREE;
3109 	      if (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
3110 		  || type_has_mode_precision_p (TREE_TYPE (def_rhs)))
3111 		{
3112 		  gimple_match_op op (gimple_match_cond::UNCOND,
3113 				      BIT_FIELD_REF, vr->type,
3114 				      SSA_VAL (def_rhs),
3115 				      bitsize_int (ref->size),
3116 				      bitsize_int (offset - offset2));
3117 		  val = vn_nary_build_or_lookup (&op);
3118 		}
3119 	      else if (known_eq (ref->size, size2))
3120 		{
3121 		  gimple_match_op op (gimple_match_cond::UNCOND,
3122 				      VIEW_CONVERT_EXPR, vr->type,
3123 				      SSA_VAL (def_rhs));
3124 		  val = vn_nary_build_or_lookup (&op);
3125 		}
3126 	      if (val
3127 		  && (TREE_CODE (val) != SSA_NAME
3128 		      || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
3129 		return data->finish (ao_ref_alias_set (&lhs_ref),
3130 				     ao_ref_base_alias_set (&lhs_ref), val);
3131 	    }
3132 	  else if (maxsize.is_constant (&maxsizei)
3133 		   && offset.is_constant (&offseti)
3134 		   && offset2.is_constant (&offset2i)
3135 		   && size2.is_constant (&size2i)
3136 		   && ranges_known_overlap_p (offset, maxsize, offset2, size2))
3137 	    {
3138 	      pd_data pd;
3139 	      pd.rhs = SSA_VAL (def_rhs);
3140 	      pd.offset = offset2i;
3141 	      pd.size = size2i;
3142 	      return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3143 					     ao_ref_base_alias_set (&lhs_ref),
3144 					     offseti, maxsizei);
3145 	    }
3146 	}
3147     }
3148 
3149   /* 5) For aggregate copies translate the reference through them if
3150      the copy kills ref.  */
3151   else if (data->vn_walk_kind == VN_WALKREWRITE
3152 	   && gimple_assign_single_p (def_stmt)
3153 	   && (DECL_P (gimple_assign_rhs1 (def_stmt))
3154 	       || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
3155 	       || handled_component_p (gimple_assign_rhs1 (def_stmt))))
3156     {
3157       tree base2;
3158       int i, j, k;
3159       auto_vec<vn_reference_op_s> rhs;
3160       vn_reference_op_t vro;
3161       ao_ref r;
3162 
3163       gcc_assert (lhs_ref_ok);
3164 
3165       /* See if the assignment kills REF.  */
3166       base2 = ao_ref_base (&lhs_ref);
3167       if (!lhs_ref.max_size_known_p ()
3168 	  || (base != base2
3169 	      && (TREE_CODE (base) != MEM_REF
3170 		  || TREE_CODE (base2) != MEM_REF
3171 		  || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
3172 		  || !tree_int_cst_equal (TREE_OPERAND (base, 1),
3173 					  TREE_OPERAND (base2, 1))))
3174 	  || !stmt_kills_ref_p (def_stmt, ref))
3175 	return (void *)-1;
3176 
3177       /* Find the common base of ref and the lhs.  lhs_ops already
3178          contains valueized operands for the lhs.  */
3179       i = vr->operands.length () - 1;
3180       j = lhs_ops.length () - 1;
3181       while (j >= 0 && i >= 0
3182 	     && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
3183 	{
3184 	  i--;
3185 	  j--;
3186 	}
3187 
3188       /* ???  The innermost op should always be a MEM_REF and we already
3189          checked that the assignment to the lhs kills vr.  Thus for
3190 	 aggregate copies using char[] types the vn_reference_op_eq
3191 	 may fail when comparing types for compatibility.  But we really
3192 	 don't care here - further lookups with the rewritten operands
3193 	 will simply fail if we messed up types too badly.  */
3194       poly_int64 extra_off = 0;
3195       if (j == 0 && i >= 0
3196 	  && lhs_ops[0].opcode == MEM_REF
3197 	  && maybe_ne (lhs_ops[0].off, -1))
3198 	{
3199 	  if (known_eq (lhs_ops[0].off, vr->operands[i].off))
3200 	    i--, j--;
3201 	  else if (vr->operands[i].opcode == MEM_REF
3202 		   && maybe_ne (vr->operands[i].off, -1))
3203 	    {
3204 	      extra_off = vr->operands[i].off - lhs_ops[0].off;
3205 	      i--, j--;
3206 	    }
3207 	}
3208 
3209       /* i now points to the first additional op.
3210 	 ???  LHS may not be completely contained in VR, one or more
3211 	 VIEW_CONVERT_EXPRs could be in its way.  We could at least
3212 	 try handling outermost VIEW_CONVERT_EXPRs.  */
3213       if (j != -1)
3214 	return (void *)-1;
3215 
3216       /* Punt if the additional ops contain a storage order barrier.  */
3217       for (k = i; k >= 0; k--)
3218 	{
3219 	  vro = &vr->operands[k];
3220 	  if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
3221 	    return (void *)-1;
3222 	}
3223 
3224       /* Now re-write REF to be based on the rhs of the assignment.  */
3225       tree rhs1 = gimple_assign_rhs1 (def_stmt);
3226       copy_reference_ops_from_ref (rhs1, &rhs);
3227 
3228       /* Apply an extra offset to the inner MEM_REF of the RHS.  */
3229       if (maybe_ne (extra_off, 0))
3230 	{
3231 	  if (rhs.length () < 2)
3232 	    return (void *)-1;
3233 	  int ix = rhs.length () - 2;
3234 	  if (rhs[ix].opcode != MEM_REF
3235 	      || known_eq (rhs[ix].off, -1))
3236 	    return (void *)-1;
3237 	  rhs[ix].off += extra_off;
3238 	  rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
3239 					 build_int_cst (TREE_TYPE (rhs[ix].op0),
3240 							extra_off));
3241 	}
3242 
3243       /* Save the operands since we need to use the original ones for
3244 	 the hash entry we use.  */
3245       if (!data->saved_operands.exists ())
3246 	data->saved_operands = vr->operands.copy ();
3247 
3248       /* We need to pre-pend vr->operands[0..i] to rhs.  */
3249       vec<vn_reference_op_s> old = vr->operands;
3250       if (i + 1 + rhs.length () > vr->operands.length ())
3251 	vr->operands.safe_grow (i + 1 + rhs.length (), true);
3252       else
3253 	vr->operands.truncate (i + 1 + rhs.length ());
3254       FOR_EACH_VEC_ELT (rhs, j, vro)
3255 	vr->operands[i + 1 + j] = *vro;
3256       valueize_refs (&vr->operands);
3257       if (old == shared_lookup_references)
3258 	shared_lookup_references = vr->operands;
3259       vr->hashcode = vn_reference_compute_hash (vr);
3260 
3261       /* Try folding the new reference to a constant.  */
3262       tree val = fully_constant_vn_reference_p (vr);
3263       if (val)
3264 	{
3265 	  if (data->partial_defs.is_empty ())
3266 	    return data->finish (ao_ref_alias_set (&lhs_ref),
3267 				 ao_ref_base_alias_set (&lhs_ref), val);
3268 	  /* This is the only interesting case for partial-def handling
3269 	     coming from targets that like to gimplify init-ctors as
3270 	     aggregate copies from constant data like aarch64 for
3271 	     PR83518.  */
3272 	  if (maxsize.is_constant (&maxsizei) && known_eq (ref->size, maxsize))
3273 	    {
3274 	      pd_data pd;
3275 	      pd.rhs = val;
3276 	      pd.offset = 0;
3277 	      pd.size = maxsizei;
3278 	      return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3279 					     ao_ref_base_alias_set (&lhs_ref),
3280 					     0, maxsizei);
3281 	    }
3282 	}
3283 
3284       /* Continuing with partial defs isn't easily possible here, we
3285          have to find a full def from further lookups from here.  Probably
3286 	 not worth the special-casing everywhere.  */
3287       if (!data->partial_defs.is_empty ())
3288 	return (void *)-1;
3289 
3290       /* Adjust *ref from the new operands.  */
3291       ao_ref rhs1_ref;
3292       ao_ref_init (&rhs1_ref, rhs1);
3293       if (!ao_ref_init_from_vn_reference (&r, ao_ref_alias_set (&rhs1_ref),
3294 					  ao_ref_base_alias_set (&rhs1_ref),
3295 					  vr->type, vr->operands))
3296 	return (void *)-1;
3297       /* This can happen with bitfields.  */
3298       if (maybe_ne (ref->size, r.size))
3299 	{
3300 	  /* If the access lacks some subsetting simply apply that by
3301 	     shortening it.  That in the end can only be successful
3302 	     if we can pun the lookup result which in turn requires
3303 	     exact offsets.  */
3304 	  if (known_eq (r.size, r.max_size)
3305 	      && known_lt (ref->size, r.size))
3306 	    r.size = r.max_size = ref->size;
3307 	  else
3308 	    return (void *)-1;
3309 	}
3310       *ref = r;
3311 
3312       /* Do not update last seen VUSE after translating.  */
3313       data->last_vuse_ptr = NULL;
3314       /* Invalidate the original access path since it now contains
3315          the wrong base.  */
3316       data->orig_ref.ref = NULL_TREE;
3317       /* Use the alias-set of this LHS for recording an eventual result.  */
3318       if (data->first_set == -2)
3319 	{
3320 	  data->first_set = ao_ref_alias_set (&lhs_ref);
3321 	  data->first_base_set = ao_ref_base_alias_set (&lhs_ref);
3322 	}
3323 
3324       /* Keep looking for the adjusted *REF / VR pair.  */
3325       return NULL;
3326     }
3327 
3328   /* 6) For memcpy copies translate the reference through them if the copy
3329      kills ref.  But we cannot (easily) do this translation if the memcpy is
3330      a storage order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that
3331      can modify the storage order of objects (see storage_order_barrier_p).  */
3332   else if (data->vn_walk_kind == VN_WALKREWRITE
3333 	   && is_gimple_reg_type (vr->type)
3334 	   /* ???  Handle BCOPY as well.  */
3335 	   && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
3336 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY_CHK)
3337 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
3338 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY_CHK)
3339 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE)
3340 	       || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE_CHK))
3341 	   && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
3342 	       || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
3343 	   && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
3344 	       || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
3345 	   && (poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size)
3346 	       || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
3347 		   && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)),
3348 				       &copy_size)))
3349 	   /* Handling this is more complicated, give up for now.  */
3350 	   && data->partial_defs.is_empty ())
3351     {
3352       tree lhs, rhs;
3353       ao_ref r;
3354       poly_int64 rhs_offset, lhs_offset;
3355       vn_reference_op_s op;
3356       poly_uint64 mem_offset;
3357       poly_int64 at, byte_maxsize;
3358 
3359       /* Only handle non-variable, addressable refs.  */
3360       if (maybe_ne (ref->size, maxsize)
3361 	  || !multiple_p (offset, BITS_PER_UNIT, &at)
3362 	  || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
3363 	return (void *)-1;
3364 
3365       /* Extract a pointer base and an offset for the destination.  */
3366       lhs = gimple_call_arg (def_stmt, 0);
3367       lhs_offset = 0;
3368       if (TREE_CODE (lhs) == SSA_NAME)
3369 	{
3370 	  lhs = vn_valueize (lhs);
3371 	  if (TREE_CODE (lhs) == SSA_NAME)
3372 	    {
3373 	      gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
3374 	      if (gimple_assign_single_p (def_stmt)
3375 		  && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
3376 		lhs = gimple_assign_rhs1 (def_stmt);
3377 	    }
3378 	}
3379       if (TREE_CODE (lhs) == ADDR_EXPR)
3380 	{
3381 	  if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (lhs)))
3382 	      && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (lhs))))
3383 	    return (void *)-1;
3384 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
3385 						    &lhs_offset);
3386 	  if (!tem)
3387 	    return (void *)-1;
3388 	  if (TREE_CODE (tem) == MEM_REF
3389 	      && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
3390 	    {
3391 	      lhs = TREE_OPERAND (tem, 0);
3392 	      if (TREE_CODE (lhs) == SSA_NAME)
3393 		lhs = vn_valueize (lhs);
3394 	      lhs_offset += mem_offset;
3395 	    }
3396 	  else if (DECL_P (tem))
3397 	    lhs = build_fold_addr_expr (tem);
3398 	  else
3399 	    return (void *)-1;
3400 	}
3401       if (TREE_CODE (lhs) != SSA_NAME
3402 	  && TREE_CODE (lhs) != ADDR_EXPR)
3403 	return (void *)-1;
3404 
3405       /* Extract a pointer base and an offset for the source.  */
3406       rhs = gimple_call_arg (def_stmt, 1);
3407       rhs_offset = 0;
3408       if (TREE_CODE (rhs) == SSA_NAME)
3409 	rhs = vn_valueize (rhs);
3410       if (TREE_CODE (rhs) == ADDR_EXPR)
3411 	{
3412 	  if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (rhs)))
3413 	      && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (rhs))))
3414 	    return (void *)-1;
3415 	  tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
3416 						    &rhs_offset);
3417 	  if (!tem)
3418 	    return (void *)-1;
3419 	  if (TREE_CODE (tem) == MEM_REF
3420 	      && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
3421 	    {
3422 	      rhs = TREE_OPERAND (tem, 0);
3423 	      rhs_offset += mem_offset;
3424 	    }
3425 	  else if (DECL_P (tem)
3426 		   || TREE_CODE (tem) == STRING_CST)
3427 	    rhs = build_fold_addr_expr (tem);
3428 	  else
3429 	    return (void *)-1;
3430 	}
3431       if (TREE_CODE (rhs) == SSA_NAME)
3432 	rhs = SSA_VAL (rhs);
3433       else if (TREE_CODE (rhs) != ADDR_EXPR)
3434 	return (void *)-1;
3435 
3436       /* The bases of the destination and the references have to agree.  */
3437       if (TREE_CODE (base) == MEM_REF)
3438 	{
3439 	  if (TREE_OPERAND (base, 0) != lhs
3440 	      || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
3441 	    return (void *) -1;
3442 	  at += mem_offset;
3443 	}
3444       else if (!DECL_P (base)
3445 	       || TREE_CODE (lhs) != ADDR_EXPR
3446 	       || TREE_OPERAND (lhs, 0) != base)
3447 	return (void *)-1;
3448 
3449       /* If the access is completely outside of the memcpy destination
3450 	 area there is no aliasing.  */
3451       if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
3452 	return NULL;
3453       /* And the access has to be contained within the memcpy destination.  */
3454       if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
3455 	return (void *)-1;
3456 
3457       /* Save the operands since we need to use the original ones for
3458 	 the hash entry we use.  */
3459       if (!data->saved_operands.exists ())
3460 	data->saved_operands = vr->operands.copy ();
3461 
3462       /* Make room for 2 operands in the new reference.  */
3463       if (vr->operands.length () < 2)
3464 	{
3465 	  vec<vn_reference_op_s> old = vr->operands;
3466 	  vr->operands.safe_grow_cleared (2, true);
3467 	  if (old == shared_lookup_references)
3468 	    shared_lookup_references = vr->operands;
3469 	}
3470       else
3471 	vr->operands.truncate (2);
3472 
3473       /* The looked-through reference is a simple MEM_REF.  */
3474       memset (&op, 0, sizeof (op));
3475       op.type = vr->type;
3476       op.opcode = MEM_REF;
3477       op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
3478       op.off = at - lhs_offset + rhs_offset;
3479       vr->operands[0] = op;
3480       op.type = TREE_TYPE (rhs);
3481       op.opcode = TREE_CODE (rhs);
3482       op.op0 = rhs;
3483       op.off = -1;
3484       vr->operands[1] = op;
3485       vr->hashcode = vn_reference_compute_hash (vr);
3486 
3487       /* Try folding the new reference to a constant.  */
3488       tree val = fully_constant_vn_reference_p (vr);
3489       if (val)
3490 	return data->finish (0, 0, val);
3491 
3492       /* Adjust *ref from the new operands.  */
3493       if (!ao_ref_init_from_vn_reference (&r, 0, 0, vr->type, vr->operands))
3494 	return (void *)-1;
3495       /* This can happen with bitfields.  */
3496       if (maybe_ne (ref->size, r.size))
3497 	return (void *)-1;
3498       *ref = r;
3499 
3500       /* Do not update last seen VUSE after translating.  */
3501       data->last_vuse_ptr = NULL;
3502       /* Invalidate the original access path since it now contains
3503          the wrong base.  */
3504       data->orig_ref.ref = NULL_TREE;
3505       /* Use the alias-set of this stmt for recording an eventual result.  */
3506       if (data->first_set == -2)
3507 	{
3508 	  data->first_set = 0;
3509 	  data->first_base_set = 0;
3510 	}
3511 
3512       /* Keep looking for the adjusted *REF / VR pair.  */
3513       return NULL;
3514     }
3515 
3516   /* Bail out and stop walking.  */
3517   return (void *)-1;
3518 }
3519 
3520 /* Return a reference op vector from OP that can be used for
3521    vn_reference_lookup_pieces.  The caller is responsible for releasing
3522    the vector.  */
3523 
3524 vec<vn_reference_op_s>
vn_reference_operands_for_lookup(tree op)3525 vn_reference_operands_for_lookup (tree op)
3526 {
3527   bool valueized;
3528   return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
3529 }
3530 
3531 /* Lookup a reference operation by it's parts, in the current hash table.
3532    Returns the resulting value number if it exists in the hash table,
3533    NULL_TREE otherwise.  VNRESULT will be filled in with the actual
3534    vn_reference_t stored in the hashtable if something is found.  */
3535 
3536 tree
vn_reference_lookup_pieces(tree vuse,alias_set_type set,alias_set_type base_set,tree type,vec<vn_reference_op_s> operands,vn_reference_t * vnresult,vn_lookup_kind kind)3537 vn_reference_lookup_pieces (tree vuse, alias_set_type set,
3538 			    alias_set_type base_set, tree type,
3539 			    vec<vn_reference_op_s> operands,
3540 			    vn_reference_t *vnresult, vn_lookup_kind kind)
3541 {
3542   struct vn_reference_s vr1;
3543   vn_reference_t tmp;
3544   tree cst;
3545 
3546   if (!vnresult)
3547     vnresult = &tmp;
3548   *vnresult = NULL;
3549 
3550   vr1.vuse = vuse_ssa_val (vuse);
3551   shared_lookup_references.truncate (0);
3552   shared_lookup_references.safe_grow (operands.length (), true);
3553   memcpy (shared_lookup_references.address (),
3554 	  operands.address (),
3555 	  sizeof (vn_reference_op_s)
3556 	  * operands.length ());
3557   bool valueized_p;
3558   valueize_refs_1 (&shared_lookup_references, &valueized_p);
3559   vr1.operands = shared_lookup_references;
3560   vr1.type = type;
3561   vr1.set = set;
3562   vr1.base_set = base_set;
3563   vr1.hashcode = vn_reference_compute_hash (&vr1);
3564   if ((cst = fully_constant_vn_reference_p (&vr1)))
3565     return cst;
3566 
3567   vn_reference_lookup_1 (&vr1, vnresult);
3568   if (!*vnresult
3569       && kind != VN_NOWALK
3570       && vr1.vuse)
3571     {
3572       ao_ref r;
3573       unsigned limit = param_sccvn_max_alias_queries_per_access;
3574       vn_walk_cb_data data (&vr1, NULL_TREE, NULL, kind, true, NULL_TREE);
3575       vec<vn_reference_op_s> ops_for_ref;
3576       if (!valueized_p)
3577 	ops_for_ref = vr1.operands;
3578       else
3579 	{
3580 	  /* For ao_ref_from_mem we have to ensure only available SSA names
3581 	     end up in base and the only convenient way to make this work
3582 	     for PRE is to re-valueize with that in mind.  */
3583 	  ops_for_ref.create (operands.length ());
3584 	  ops_for_ref.quick_grow (operands.length ());
3585 	  memcpy (ops_for_ref.address (),
3586 		  operands.address (),
3587 		  sizeof (vn_reference_op_s)
3588 		  * operands.length ());
3589 	  valueize_refs_1 (&ops_for_ref, &valueized_p, true);
3590 	}
3591       if (ao_ref_init_from_vn_reference (&r, set, base_set, type,
3592 					 ops_for_ref))
3593 	*vnresult
3594 	  = ((vn_reference_t)
3595 	     walk_non_aliased_vuses (&r, vr1.vuse, true, vn_reference_lookup_2,
3596 				     vn_reference_lookup_3, vuse_valueize,
3597 				     limit, &data));
3598       if (ops_for_ref != shared_lookup_references)
3599 	ops_for_ref.release ();
3600       gcc_checking_assert (vr1.operands == shared_lookup_references);
3601     }
3602 
3603   if (*vnresult)
3604      return (*vnresult)->result;
3605 
3606   return NULL_TREE;
3607 }
3608 
3609 /* Lookup OP in the current hash table, and return the resulting value
3610    number if it exists in the hash table.  Return NULL_TREE if it does
3611    not exist in the hash table or if the result field of the structure
3612    was NULL..  VNRESULT will be filled in with the vn_reference_t
3613    stored in the hashtable if one exists.  When TBAA_P is false assume
3614    we are looking up a store and treat it as having alias-set zero.
3615    *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded.
3616    MASK is either NULL_TREE, or can be an INTEGER_CST if the result of the
3617    load is bitwise anded with MASK and so we are only interested in a subset
3618    of the bits and can ignore if the other bits are uninitialized or
3619    not initialized with constants.  */
3620 
3621 tree
vn_reference_lookup(tree op,tree vuse,vn_lookup_kind kind,vn_reference_t * vnresult,bool tbaa_p,tree * last_vuse_ptr,tree mask)3622 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
3623 		     vn_reference_t *vnresult, bool tbaa_p,
3624 		     tree *last_vuse_ptr, tree mask)
3625 {
3626   vec<vn_reference_op_s> operands;
3627   struct vn_reference_s vr1;
3628   bool valueized_anything;
3629 
3630   if (vnresult)
3631     *vnresult = NULL;
3632 
3633   vr1.vuse = vuse_ssa_val (vuse);
3634   vr1.operands = operands
3635     = valueize_shared_reference_ops_from_ref (op, &valueized_anything);
3636   vr1.type = TREE_TYPE (op);
3637   ao_ref op_ref;
3638   ao_ref_init (&op_ref, op);
3639   vr1.set = ao_ref_alias_set (&op_ref);
3640   vr1.base_set = ao_ref_base_alias_set (&op_ref);
3641   vr1.hashcode = vn_reference_compute_hash (&vr1);
3642   if (mask == NULL_TREE)
3643     if (tree cst = fully_constant_vn_reference_p (&vr1))
3644       return cst;
3645 
3646   if (kind != VN_NOWALK && vr1.vuse)
3647     {
3648       vn_reference_t wvnresult;
3649       ao_ref r;
3650       unsigned limit = param_sccvn_max_alias_queries_per_access;
3651       auto_vec<vn_reference_op_s> ops_for_ref;
3652       if (valueized_anything)
3653 	{
3654 	  copy_reference_ops_from_ref (op, &ops_for_ref);
3655 	  bool tem;
3656 	  valueize_refs_1 (&ops_for_ref, &tem, true);
3657 	}
3658       /* Make sure to use a valueized reference if we valueized anything.
3659          Otherwise preserve the full reference for advanced TBAA.  */
3660       if (!valueized_anything
3661 	  || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.base_set,
3662 					     vr1.type, ops_for_ref))
3663 	ao_ref_init (&r, op);
3664       vn_walk_cb_data data (&vr1, r.ref ? NULL_TREE : op,
3665 			    last_vuse_ptr, kind, tbaa_p, mask);
3666 
3667       wvnresult
3668 	= ((vn_reference_t)
3669 	   walk_non_aliased_vuses (&r, vr1.vuse, tbaa_p, vn_reference_lookup_2,
3670 				   vn_reference_lookup_3, vuse_valueize, limit,
3671 				   &data));
3672       gcc_checking_assert (vr1.operands == shared_lookup_references);
3673       if (wvnresult)
3674 	{
3675 	  gcc_assert (mask == NULL_TREE);
3676 	  if (vnresult)
3677 	    *vnresult = wvnresult;
3678 	  return wvnresult->result;
3679 	}
3680       else if (mask)
3681 	return data.masked_result;
3682 
3683       return NULL_TREE;
3684     }
3685 
3686   if (last_vuse_ptr)
3687     *last_vuse_ptr = vr1.vuse;
3688   if (mask)
3689     return NULL_TREE;
3690   return vn_reference_lookup_1 (&vr1, vnresult);
3691 }
3692 
3693 /* Lookup CALL in the current hash table and return the entry in
3694    *VNRESULT if found.  Populates *VR for the hashtable lookup.  */
3695 
3696 void
vn_reference_lookup_call(gcall * call,vn_reference_t * vnresult,vn_reference_t vr)3697 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
3698 			  vn_reference_t vr)
3699 {
3700   if (vnresult)
3701     *vnresult = NULL;
3702 
3703   tree vuse = gimple_vuse (call);
3704 
3705   vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
3706   vr->operands = valueize_shared_reference_ops_from_call (call);
3707   vr->type = gimple_expr_type (call);
3708   vr->punned = false;
3709   vr->set = 0;
3710   vr->base_set = 0;
3711   vr->hashcode = vn_reference_compute_hash (vr);
3712   vn_reference_lookup_1 (vr, vnresult);
3713 }
3714 
3715 /* Insert OP into the current hash table with a value number of RESULT.  */
3716 
3717 static void
vn_reference_insert(tree op,tree result,tree vuse,tree vdef)3718 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
3719 {
3720   vn_reference_s **slot;
3721   vn_reference_t vr1;
3722   bool tem;
3723 
3724   vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3725   if (TREE_CODE (result) == SSA_NAME)
3726     vr1->value_id = VN_INFO (result)->value_id;
3727   else
3728     vr1->value_id = get_or_alloc_constant_value_id (result);
3729   vr1->vuse = vuse_ssa_val (vuse);
3730   vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
3731   vr1->type = TREE_TYPE (op);
3732   vr1->punned = false;
3733   ao_ref op_ref;
3734   ao_ref_init (&op_ref, op);
3735   vr1->set = ao_ref_alias_set (&op_ref);
3736   vr1->base_set = ao_ref_base_alias_set (&op_ref);
3737   vr1->hashcode = vn_reference_compute_hash (vr1);
3738   vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
3739   vr1->result_vdef = vdef;
3740 
3741   slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3742 						      INSERT);
3743 
3744   /* Because IL walking on reference lookup can end up visiting
3745      a def that is only to be visited later in iteration order
3746      when we are about to make an irreducible region reducible
3747      the def can be effectively processed and its ref being inserted
3748      by vn_reference_lookup_3 already.  So we cannot assert (!*slot)
3749      but save a lookup if we deal with already inserted refs here.  */
3750   if (*slot)
3751     {
3752       /* We cannot assert that we have the same value either because
3753          when disentangling an irreducible region we may end up visiting
3754 	 a use before the corresponding def.  That's a missed optimization
3755 	 only though.  See gcc.dg/tree-ssa/pr87126.c for example.  */
3756       if (dump_file && (dump_flags & TDF_DETAILS)
3757 	  && !operand_equal_p ((*slot)->result, vr1->result, 0))
3758 	{
3759 	  fprintf (dump_file, "Keeping old value ");
3760 	  print_generic_expr (dump_file, (*slot)->result);
3761 	  fprintf (dump_file, " because of collision\n");
3762 	}
3763       free_reference (vr1);
3764       obstack_free (&vn_tables_obstack, vr1);
3765       return;
3766     }
3767 
3768   *slot = vr1;
3769   vr1->next = last_inserted_ref;
3770   last_inserted_ref = vr1;
3771 }
3772 
3773 /* Insert a reference by it's pieces into the current hash table with
3774    a value number of RESULT.  Return the resulting reference
3775    structure we created.  */
3776 
3777 vn_reference_t
vn_reference_insert_pieces(tree vuse,alias_set_type set,alias_set_type base_set,tree type,vec<vn_reference_op_s> operands,tree result,unsigned int value_id)3778 vn_reference_insert_pieces (tree vuse, alias_set_type set,
3779 			    alias_set_type base_set, tree type,
3780 			    vec<vn_reference_op_s> operands,
3781 			    tree result, unsigned int value_id)
3782 
3783 {
3784   vn_reference_s **slot;
3785   vn_reference_t vr1;
3786 
3787   vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3788   vr1->value_id = value_id;
3789   vr1->vuse = vuse_ssa_val (vuse);
3790   vr1->operands = operands;
3791   valueize_refs (&vr1->operands);
3792   vr1->type = type;
3793   vr1->punned = false;
3794   vr1->set = set;
3795   vr1->base_set = base_set;
3796   vr1->hashcode = vn_reference_compute_hash (vr1);
3797   if (result && TREE_CODE (result) == SSA_NAME)
3798     result = SSA_VAL (result);
3799   vr1->result = result;
3800   vr1->result_vdef = NULL_TREE;
3801 
3802   slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3803 						      INSERT);
3804 
3805   /* At this point we should have all the things inserted that we have
3806      seen before, and we should never try inserting something that
3807      already exists.  */
3808   gcc_assert (!*slot);
3809 
3810   *slot = vr1;
3811   vr1->next = last_inserted_ref;
3812   last_inserted_ref = vr1;
3813   return vr1;
3814 }
3815 
3816 /* Compute and return the hash value for nary operation VBO1.  */
3817 
3818 static hashval_t
vn_nary_op_compute_hash(const vn_nary_op_t vno1)3819 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
3820 {
3821   inchash::hash hstate;
3822   unsigned i;
3823 
3824   for (i = 0; i < vno1->length; ++i)
3825     if (TREE_CODE (vno1->op[i]) == SSA_NAME)
3826       vno1->op[i] = SSA_VAL (vno1->op[i]);
3827 
3828   if (((vno1->length == 2
3829 	&& commutative_tree_code (vno1->opcode))
3830        || (vno1->length == 3
3831 	   && commutative_ternary_tree_code (vno1->opcode)))
3832       && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3833     std::swap (vno1->op[0], vno1->op[1]);
3834   else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
3835 	   && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3836     {
3837       std::swap (vno1->op[0], vno1->op[1]);
3838       vno1->opcode = swap_tree_comparison  (vno1->opcode);
3839     }
3840 
3841   hstate.add_int (vno1->opcode);
3842   for (i = 0; i < vno1->length; ++i)
3843     inchash::add_expr (vno1->op[i], hstate);
3844 
3845   return hstate.end ();
3846 }
3847 
3848 /* Compare nary operations VNO1 and VNO2 and return true if they are
3849    equivalent.  */
3850 
3851 bool
vn_nary_op_eq(const_vn_nary_op_t const vno1,const_vn_nary_op_t const vno2)3852 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
3853 {
3854   unsigned i;
3855 
3856   if (vno1->hashcode != vno2->hashcode)
3857     return false;
3858 
3859   if (vno1->length != vno2->length)
3860     return false;
3861 
3862   if (vno1->opcode != vno2->opcode
3863       || !types_compatible_p (vno1->type, vno2->type))
3864     return false;
3865 
3866   for (i = 0; i < vno1->length; ++i)
3867     if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
3868       return false;
3869 
3870   /* BIT_INSERT_EXPR has an implict operand as the type precision
3871      of op1.  Need to check to make sure they are the same.  */
3872   if (vno1->opcode == BIT_INSERT_EXPR
3873       && TREE_CODE (vno1->op[1]) == INTEGER_CST
3874       && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
3875 	 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
3876     return false;
3877 
3878   return true;
3879 }
3880 
3881 /* Initialize VNO from the pieces provided.  */
3882 
3883 static void
init_vn_nary_op_from_pieces(vn_nary_op_t vno,unsigned int length,enum tree_code code,tree type,tree * ops)3884 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
3885 			     enum tree_code code, tree type, tree *ops)
3886 {
3887   vno->opcode = code;
3888   vno->length = length;
3889   vno->type = type;
3890   memcpy (&vno->op[0], ops, sizeof (tree) * length);
3891 }
3892 
3893 /* Return the number of operands for a vn_nary ops structure from STMT.  */
3894 
3895 static unsigned int
vn_nary_length_from_stmt(gimple * stmt)3896 vn_nary_length_from_stmt (gimple *stmt)
3897 {
3898   switch (gimple_assign_rhs_code (stmt))
3899     {
3900     case REALPART_EXPR:
3901     case IMAGPART_EXPR:
3902     case VIEW_CONVERT_EXPR:
3903       return 1;
3904 
3905     case BIT_FIELD_REF:
3906       return 3;
3907 
3908     case CONSTRUCTOR:
3909       return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3910 
3911     default:
3912       return gimple_num_ops (stmt) - 1;
3913     }
3914 }
3915 
3916 /* Initialize VNO from STMT.  */
3917 
3918 static void
init_vn_nary_op_from_stmt(vn_nary_op_t vno,gimple * stmt)3919 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
3920 {
3921   unsigned i;
3922 
3923   vno->opcode = gimple_assign_rhs_code (stmt);
3924   vno->type = gimple_expr_type (stmt);
3925   switch (vno->opcode)
3926     {
3927     case REALPART_EXPR:
3928     case IMAGPART_EXPR:
3929     case VIEW_CONVERT_EXPR:
3930       vno->length = 1;
3931       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3932       break;
3933 
3934     case BIT_FIELD_REF:
3935       vno->length = 3;
3936       vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3937       vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
3938       vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
3939       break;
3940 
3941     case CONSTRUCTOR:
3942       vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3943       for (i = 0; i < vno->length; ++i)
3944 	vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
3945       break;
3946 
3947     default:
3948       gcc_checking_assert (!gimple_assign_single_p (stmt));
3949       vno->length = gimple_num_ops (stmt) - 1;
3950       for (i = 0; i < vno->length; ++i)
3951 	vno->op[i] = gimple_op (stmt, i + 1);
3952     }
3953 }
3954 
3955 /* Compute the hashcode for VNO and look for it in the hash table;
3956    return the resulting value number if it exists in the hash table.
3957    Return NULL_TREE if it does not exist in the hash table or if the
3958    result field of the operation is NULL.  VNRESULT will contain the
3959    vn_nary_op_t from the hashtable if it exists.  */
3960 
3961 static tree
vn_nary_op_lookup_1(vn_nary_op_t vno,vn_nary_op_t * vnresult)3962 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
3963 {
3964   vn_nary_op_s **slot;
3965 
3966   if (vnresult)
3967     *vnresult = NULL;
3968 
3969   vno->hashcode = vn_nary_op_compute_hash (vno);
3970   slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
3971   if (!slot)
3972     return NULL_TREE;
3973   if (vnresult)
3974     *vnresult = *slot;
3975   return (*slot)->predicated_values ? NULL_TREE : (*slot)->u.result;
3976 }
3977 
3978 /* Lookup a n-ary operation by its pieces and return the resulting value
3979    number if it exists in the hash table.  Return NULL_TREE if it does
3980    not exist in the hash table or if the result field of the operation
3981    is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
3982    if it exists.  */
3983 
3984 tree
vn_nary_op_lookup_pieces(unsigned int length,enum tree_code code,tree type,tree * ops,vn_nary_op_t * vnresult)3985 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
3986 			  tree type, tree *ops, vn_nary_op_t *vnresult)
3987 {
3988   vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
3989 				  sizeof_vn_nary_op (length));
3990   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3991   return vn_nary_op_lookup_1 (vno1, vnresult);
3992 }
3993 
3994 /* Lookup the rhs of STMT in the current hash table, and return the resulting
3995    value number if it exists in the hash table.  Return NULL_TREE if
3996    it does not exist in the hash table.  VNRESULT will contain the
3997    vn_nary_op_t from the hashtable if it exists.  */
3998 
3999 tree
vn_nary_op_lookup_stmt(gimple * stmt,vn_nary_op_t * vnresult)4000 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
4001 {
4002   vn_nary_op_t vno1
4003     = XALLOCAVAR (struct vn_nary_op_s,
4004 		  sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
4005   init_vn_nary_op_from_stmt (vno1, stmt);
4006   return vn_nary_op_lookup_1 (vno1, vnresult);
4007 }
4008 
4009 /* Allocate a vn_nary_op_t with LENGTH operands on STACK.  */
4010 
4011 static vn_nary_op_t
alloc_vn_nary_op_noinit(unsigned int length,struct obstack * stack)4012 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
4013 {
4014   return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
4015 }
4016 
4017 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
4018    obstack.  */
4019 
4020 static vn_nary_op_t
alloc_vn_nary_op(unsigned int length,tree result,unsigned int value_id)4021 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
4022 {
4023   vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
4024 
4025   vno1->value_id = value_id;
4026   vno1->length = length;
4027   vno1->predicated_values = 0;
4028   vno1->u.result = result;
4029 
4030   return vno1;
4031 }
4032 
4033 /* Insert VNO into TABLE.  If COMPUTE_HASH is true, then compute
4034    VNO->HASHCODE first.  */
4035 
4036 static vn_nary_op_t
vn_nary_op_insert_into(vn_nary_op_t vno,vn_nary_op_table_type * table,bool compute_hash)4037 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
4038 			bool compute_hash)
4039 {
4040   vn_nary_op_s **slot;
4041 
4042   if (compute_hash)
4043     {
4044       vno->hashcode = vn_nary_op_compute_hash (vno);
4045       gcc_assert (! vno->predicated_values
4046 		  || (! vno->u.values->next
4047 		      && vno->u.values->n == 1));
4048     }
4049 
4050   slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
4051   vno->unwind_to = *slot;
4052   if (*slot)
4053     {
4054       /* Prefer non-predicated values.
4055          ???  Only if those are constant, otherwise, with constant predicated
4056 	 value, turn them into predicated values with entry-block validity
4057 	 (???  but we always find the first valid result currently).  */
4058       if ((*slot)->predicated_values
4059 	  && ! vno->predicated_values)
4060 	{
4061 	  /* ???  We cannot remove *slot from the unwind stack list.
4062 	     For the moment we deal with this by skipping not found
4063 	     entries but this isn't ideal ...  */
4064 	  *slot = vno;
4065 	  /* ???  Maintain a stack of states we can unwind in
4066 	     vn_nary_op_s?  But how far do we unwind?  In reality
4067 	     we need to push change records somewhere...  Or not
4068 	     unwind vn_nary_op_s and linking them but instead
4069 	     unwind the results "list", linking that, which also
4070 	     doesn't move on hashtable resize.  */
4071 	  /* We can also have a ->unwind_to recording *slot there.
4072 	     That way we can make u.values a fixed size array with
4073 	     recording the number of entries but of course we then
4074 	     have always N copies for each unwind_to-state.  Or we
4075              make sure to only ever append and each unwinding will
4076 	     pop off one entry (but how to deal with predicated
4077 	     replaced with non-predicated here?)  */
4078 	  vno->next = last_inserted_nary;
4079 	  last_inserted_nary = vno;
4080 	  return vno;
4081 	}
4082       else if (vno->predicated_values
4083 	       && ! (*slot)->predicated_values)
4084 	return *slot;
4085       else if (vno->predicated_values
4086 	       && (*slot)->predicated_values)
4087 	{
4088 	  /* ???  Factor this all into a insert_single_predicated_value
4089 	     routine.  */
4090 	  gcc_assert (!vno->u.values->next && vno->u.values->n == 1);
4091 	  basic_block vno_bb
4092 	    = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0]);
4093 	  vn_pval *nval = vno->u.values;
4094 	  vn_pval **next = &vno->u.values;
4095 	  bool found = false;
4096 	  for (vn_pval *val = (*slot)->u.values; val; val = val->next)
4097 	    {
4098 	      if (expressions_equal_p (val->result, vno->u.values->result))
4099 		{
4100 		  found = true;
4101 		  for (unsigned i = 0; i < val->n; ++i)
4102 		    {
4103 		      basic_block val_bb
4104 			= BASIC_BLOCK_FOR_FN (cfun,
4105 					      val->valid_dominated_by_p[i]);
4106 		      if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb))
4107 			/* Value registered with more generic predicate.  */
4108 			return *slot;
4109 		      else if (dominated_by_p (CDI_DOMINATORS, val_bb, vno_bb))
4110 			/* Shouldn't happen, we insert in RPO order.  */
4111 			gcc_unreachable ();
4112 		    }
4113 		  /* Append value.  */
4114 		  *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4115 						     sizeof (vn_pval)
4116 						     + val->n * sizeof (int));
4117 		  (*next)->next = NULL;
4118 		  (*next)->result = val->result;
4119 		  (*next)->n = val->n + 1;
4120 		  memcpy ((*next)->valid_dominated_by_p,
4121 			  val->valid_dominated_by_p,
4122 			  val->n * sizeof (int));
4123 		  (*next)->valid_dominated_by_p[val->n] = vno_bb->index;
4124 		  next = &(*next)->next;
4125 		  if (dump_file && (dump_flags & TDF_DETAILS))
4126 		    fprintf (dump_file, "Appending predicate to value.\n");
4127 		  continue;
4128 		}
4129 	      /* Copy other predicated values.  */
4130 	      *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4131 						 sizeof (vn_pval)
4132 						 + (val->n-1) * sizeof (int));
4133 	      memcpy (*next, val, sizeof (vn_pval) + (val->n-1) * sizeof (int));
4134 	      (*next)->next = NULL;
4135 	      next = &(*next)->next;
4136 	    }
4137 	  if (!found)
4138 	    *next = nval;
4139 
4140 	  *slot = vno;
4141 	  vno->next = last_inserted_nary;
4142 	  last_inserted_nary = vno;
4143 	  return vno;
4144 	}
4145 
4146       /* While we do not want to insert things twice it's awkward to
4147 	 avoid it in the case where visit_nary_op pattern-matches stuff
4148 	 and ends up simplifying the replacement to itself.  We then
4149 	 get two inserts, one from visit_nary_op and one from
4150 	 vn_nary_build_or_lookup.
4151 	 So allow inserts with the same value number.  */
4152       if ((*slot)->u.result == vno->u.result)
4153 	return *slot;
4154     }
4155 
4156   /* ???  There's also optimistic vs. previous commited state merging
4157      that is problematic for the case of unwinding.  */
4158 
4159   /* ???  We should return NULL if we do not use 'vno' and have the
4160      caller release it.  */
4161   gcc_assert (!*slot);
4162 
4163   *slot = vno;
4164   vno->next = last_inserted_nary;
4165   last_inserted_nary = vno;
4166   return vno;
4167 }
4168 
4169 /* Insert a n-ary operation into the current hash table using it's
4170    pieces.  Return the vn_nary_op_t structure we created and put in
4171    the hashtable.  */
4172 
4173 vn_nary_op_t
vn_nary_op_insert_pieces(unsigned int length,enum tree_code code,tree type,tree * ops,tree result,unsigned int value_id)4174 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
4175 			  tree type, tree *ops,
4176 			  tree result, unsigned int value_id)
4177 {
4178   vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
4179   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4180   return vn_nary_op_insert_into (vno1, valid_info->nary, true);
4181 }
4182 
4183 static vn_nary_op_t
vn_nary_op_insert_pieces_predicated(unsigned int length,enum tree_code code,tree type,tree * ops,tree result,unsigned int value_id,edge pred_e)4184 vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code,
4185 				     tree type, tree *ops,
4186 				     tree result, unsigned int value_id,
4187 				     edge pred_e)
4188 {
4189   /* ???  Currently tracking BBs.  */
4190   if (! single_pred_p (pred_e->dest))
4191     {
4192       /* Never record for backedges.  */
4193       if (pred_e->flags & EDGE_DFS_BACK)
4194 	return NULL;
4195       edge_iterator ei;
4196       edge e;
4197       int cnt = 0;
4198       /* Ignore backedges.  */
4199       FOR_EACH_EDGE (e, ei, pred_e->dest->preds)
4200 	if (! dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4201 	  cnt++;
4202       if (cnt != 1)
4203 	return NULL;
4204     }
4205   if (dump_file && (dump_flags & TDF_DETAILS)
4206       /* ???  Fix dumping, but currently we only get comparisons.  */
4207       && TREE_CODE_CLASS (code) == tcc_comparison)
4208     {
4209       fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index,
4210 	       pred_e->dest->index);
4211       print_generic_expr (dump_file, ops[0], TDF_SLIM);
4212       fprintf (dump_file, " %s ", get_tree_code_name (code));
4213       print_generic_expr (dump_file, ops[1], TDF_SLIM);
4214       fprintf (dump_file, " == %s\n",
4215 	       integer_zerop (result) ? "false" : "true");
4216     }
4217   vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE, value_id);
4218   init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4219   vno1->predicated_values = 1;
4220   vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4221 					      sizeof (vn_pval));
4222   vno1->u.values->next = NULL;
4223   vno1->u.values->result = result;
4224   vno1->u.values->n = 1;
4225   vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index;
4226   return vn_nary_op_insert_into (vno1, valid_info->nary, true);
4227 }
4228 
4229 static bool
4230 dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool);
4231 
4232 static tree
vn_nary_op_get_predicated_value(vn_nary_op_t vno,basic_block bb)4233 vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb)
4234 {
4235   if (! vno->predicated_values)
4236     return vno->u.result;
4237   for (vn_pval *val = vno->u.values; val; val = val->next)
4238     for (unsigned i = 0; i < val->n; ++i)
4239       /* Do not handle backedge executability optimistically since
4240 	 when figuring out whether to iterate we do not consider
4241 	 changed predication.  */
4242       if (dominated_by_p_w_unex
4243 	    (bb, BASIC_BLOCK_FOR_FN (cfun, val->valid_dominated_by_p[i]),
4244 	     false))
4245 	return val->result;
4246   return NULL_TREE;
4247 }
4248 
4249 /* Insert the rhs of STMT into the current hash table with a value number of
4250    RESULT.  */
4251 
4252 static vn_nary_op_t
vn_nary_op_insert_stmt(gimple * stmt,tree result)4253 vn_nary_op_insert_stmt (gimple *stmt, tree result)
4254 {
4255   vn_nary_op_t vno1
4256     = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
4257 			result, VN_INFO (result)->value_id);
4258   init_vn_nary_op_from_stmt (vno1, stmt);
4259   return vn_nary_op_insert_into (vno1, valid_info->nary, true);
4260 }
4261 
4262 /* Compute a hashcode for PHI operation VP1 and return it.  */
4263 
4264 static inline hashval_t
vn_phi_compute_hash(vn_phi_t vp1)4265 vn_phi_compute_hash (vn_phi_t vp1)
4266 {
4267   inchash::hash hstate;
4268   tree phi1op;
4269   tree type;
4270   edge e;
4271   edge_iterator ei;
4272 
4273   hstate.add_int (EDGE_COUNT (vp1->block->preds));
4274   switch (EDGE_COUNT (vp1->block->preds))
4275     {
4276     case 1:
4277       break;
4278     case 2:
4279       if (vp1->block->loop_father->header == vp1->block)
4280 	;
4281       else
4282 	break;
4283       /* Fallthru.  */
4284     default:
4285       hstate.add_int (vp1->block->index);
4286     }
4287 
4288   /* If all PHI arguments are constants we need to distinguish
4289      the PHI node via its type.  */
4290   type = vp1->type;
4291   hstate.merge_hash (vn_hash_type (type));
4292 
4293   FOR_EACH_EDGE (e, ei, vp1->block->preds)
4294     {
4295       /* Don't hash backedge values they need to be handled as VN_TOP
4296          for optimistic value-numbering.  */
4297       if (e->flags & EDGE_DFS_BACK)
4298 	continue;
4299 
4300       phi1op = vp1->phiargs[e->dest_idx];
4301       if (phi1op == VN_TOP)
4302 	continue;
4303       inchash::add_expr (phi1op, hstate);
4304     }
4305 
4306   return hstate.end ();
4307 }
4308 
4309 
4310 /* Return true if COND1 and COND2 represent the same condition, set
4311    *INVERTED_P if one needs to be inverted to make it the same as
4312    the other.  */
4313 
4314 static bool
cond_stmts_equal_p(gcond * cond1,tree lhs1,tree rhs1,gcond * cond2,tree lhs2,tree rhs2,bool * inverted_p)4315 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
4316 		    gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
4317 {
4318   enum tree_code code1 = gimple_cond_code (cond1);
4319   enum tree_code code2 = gimple_cond_code (cond2);
4320 
4321   *inverted_p = false;
4322   if (code1 == code2)
4323     ;
4324   else if (code1 == swap_tree_comparison (code2))
4325     std::swap (lhs2, rhs2);
4326   else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
4327     *inverted_p = true;
4328   else if (code1 == invert_tree_comparison
4329 	   	      (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
4330     {
4331       std::swap (lhs2, rhs2);
4332       *inverted_p = true;
4333     }
4334   else
4335     return false;
4336 
4337   return ((expressions_equal_p (lhs1, lhs2)
4338 	   && expressions_equal_p (rhs1, rhs2))
4339 	  || (commutative_tree_code (code1)
4340 	      && expressions_equal_p (lhs1, rhs2)
4341 	      && expressions_equal_p (rhs1, lhs2)));
4342 }
4343 
4344 /* Compare two phi entries for equality, ignoring VN_TOP arguments.  */
4345 
4346 static int
vn_phi_eq(const_vn_phi_t const vp1,const_vn_phi_t const vp2)4347 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
4348 {
4349   if (vp1->hashcode != vp2->hashcode)
4350     return false;
4351 
4352   if (vp1->block != vp2->block)
4353     {
4354       if (EDGE_COUNT (vp1->block->preds) != EDGE_COUNT (vp2->block->preds))
4355 	return false;
4356 
4357       switch (EDGE_COUNT (vp1->block->preds))
4358 	{
4359 	case 1:
4360 	  /* Single-arg PHIs are just copies.  */
4361 	  break;
4362 
4363 	case 2:
4364 	  {
4365 	    /* Rule out backedges into the PHI.  */
4366 	    if (vp1->block->loop_father->header == vp1->block
4367 		|| vp2->block->loop_father->header == vp2->block)
4368 	      return false;
4369 
4370 	    /* If the PHI nodes do not have compatible types
4371 	       they are not the same.  */
4372 	    if (!types_compatible_p (vp1->type, vp2->type))
4373 	      return false;
4374 
4375 	    basic_block idom1
4376 	      = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4377 	    basic_block idom2
4378 	      = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
4379 	    /* If the immediate dominator end in switch stmts multiple
4380 	       values may end up in the same PHI arg via intermediate
4381 	       CFG merges.  */
4382 	    if (EDGE_COUNT (idom1->succs) != 2
4383 		|| EDGE_COUNT (idom2->succs) != 2)
4384 	      return false;
4385 
4386 	    /* Verify the controlling stmt is the same.  */
4387 	    gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
4388 	    gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
4389 	    if (! last1 || ! last2)
4390 	      return false;
4391 	    bool inverted_p;
4392 	    if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
4393 				      last2, vp2->cclhs, vp2->ccrhs,
4394 				      &inverted_p))
4395 	      return false;
4396 
4397 	    /* Get at true/false controlled edges into the PHI.  */
4398 	    edge te1, te2, fe1, fe2;
4399 	    if (! extract_true_false_controlled_edges (idom1, vp1->block,
4400 						       &te1, &fe1)
4401 		|| ! extract_true_false_controlled_edges (idom2, vp2->block,
4402 							  &te2, &fe2))
4403 	      return false;
4404 
4405 	    /* Swap edges if the second condition is the inverted of the
4406 	       first.  */
4407 	    if (inverted_p)
4408 	      std::swap (te2, fe2);
4409 
4410 	    /* ???  Handle VN_TOP specially.  */
4411 	    if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
4412 				       vp2->phiargs[te2->dest_idx])
4413 		|| ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
4414 					  vp2->phiargs[fe2->dest_idx]))
4415 	      return false;
4416 
4417 	    return true;
4418 	  }
4419 
4420 	default:
4421 	  return false;
4422 	}
4423     }
4424 
4425   /* If the PHI nodes do not have compatible types
4426      they are not the same.  */
4427   if (!types_compatible_p (vp1->type, vp2->type))
4428     return false;
4429 
4430   /* Any phi in the same block will have it's arguments in the
4431      same edge order, because of how we store phi nodes.  */
4432   unsigned nargs = EDGE_COUNT (vp1->block->preds);
4433   for (unsigned i = 0; i < nargs; ++i)
4434     {
4435       tree phi1op = vp1->phiargs[i];
4436       tree phi2op = vp2->phiargs[i];
4437       if (phi1op == phi2op)
4438 	continue;
4439       if (!expressions_equal_p (phi1op, phi2op))
4440 	return false;
4441     }
4442 
4443   return true;
4444 }
4445 
4446 /* Lookup PHI in the current hash table, and return the resulting
4447    value number if it exists in the hash table.  Return NULL_TREE if
4448    it does not exist in the hash table. */
4449 
4450 static tree
vn_phi_lookup(gimple * phi,bool backedges_varying_p)4451 vn_phi_lookup (gimple *phi, bool backedges_varying_p)
4452 {
4453   vn_phi_s **slot;
4454   struct vn_phi_s *vp1;
4455   edge e;
4456   edge_iterator ei;
4457 
4458   vp1 = XALLOCAVAR (struct vn_phi_s,
4459 		    sizeof (struct vn_phi_s)
4460 		    + (gimple_phi_num_args (phi) - 1) * sizeof (tree));
4461 
4462   /* Canonicalize the SSA_NAME's to their value number.  */
4463   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4464     {
4465       tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4466       if (TREE_CODE (def) == SSA_NAME
4467 	  && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
4468 	def = SSA_VAL (def);
4469       vp1->phiargs[e->dest_idx] = def;
4470     }
4471   vp1->type = TREE_TYPE (gimple_phi_result (phi));
4472   vp1->block = gimple_bb (phi);
4473   /* Extract values of the controlling condition.  */
4474   vp1->cclhs = NULL_TREE;
4475   vp1->ccrhs = NULL_TREE;
4476   basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4477   if (EDGE_COUNT (idom1->succs) == 2)
4478     if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
4479       {
4480 	/* ???  We want to use SSA_VAL here.  But possibly not
4481 	   allow VN_TOP.  */
4482 	vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
4483 	vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
4484       }
4485   vp1->hashcode = vn_phi_compute_hash (vp1);
4486   slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT);
4487   if (!slot)
4488     return NULL_TREE;
4489   return (*slot)->result;
4490 }
4491 
4492 /* Insert PHI into the current hash table with a value number of
4493    RESULT.  */
4494 
4495 static vn_phi_t
vn_phi_insert(gimple * phi,tree result,bool backedges_varying_p)4496 vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p)
4497 {
4498   vn_phi_s **slot;
4499   vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,
4500 					   sizeof (vn_phi_s)
4501 					   + ((gimple_phi_num_args (phi) - 1)
4502 					      * sizeof (tree)));
4503   edge e;
4504   edge_iterator ei;
4505 
4506   /* Canonicalize the SSA_NAME's to their value number.  */
4507   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4508     {
4509       tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4510       if (TREE_CODE (def) == SSA_NAME
4511 	  && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
4512 	def = SSA_VAL (def);
4513       vp1->phiargs[e->dest_idx] = def;
4514     }
4515   vp1->value_id = VN_INFO (result)->value_id;
4516   vp1->type = TREE_TYPE (gimple_phi_result (phi));
4517   vp1->block = gimple_bb (phi);
4518   /* Extract values of the controlling condition.  */
4519   vp1->cclhs = NULL_TREE;
4520   vp1->ccrhs = NULL_TREE;
4521   basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4522   if (EDGE_COUNT (idom1->succs) == 2)
4523     if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
4524       {
4525 	/* ???  We want to use SSA_VAL here.  But possibly not
4526 	   allow VN_TOP.  */
4527 	vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
4528 	vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
4529       }
4530   vp1->result = result;
4531   vp1->hashcode = vn_phi_compute_hash (vp1);
4532 
4533   slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
4534   gcc_assert (!*slot);
4535 
4536   *slot = vp1;
4537   vp1->next = last_inserted_phi;
4538   last_inserted_phi = vp1;
4539   return vp1;
4540 }
4541 
4542 
4543 /* Return true if BB1 is dominated by BB2 taking into account edges
4544    that are not executable.  When ALLOW_BACK is false consider not
4545    executable backedges as executable.  */
4546 
4547 static bool
dominated_by_p_w_unex(basic_block bb1,basic_block bb2,bool allow_back)4548 dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool allow_back)
4549 {
4550   edge_iterator ei;
4551   edge e;
4552 
4553   if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4554     return true;
4555 
4556   /* Before iterating we'd like to know if there exists a
4557      (executable) path from bb2 to bb1 at all, if not we can
4558      directly return false.  For now simply iterate once.  */
4559 
4560   /* Iterate to the single executable bb1 predecessor.  */
4561   if (EDGE_COUNT (bb1->preds) > 1)
4562     {
4563       edge prede = NULL;
4564       FOR_EACH_EDGE (e, ei, bb1->preds)
4565 	if ((e->flags & EDGE_EXECUTABLE)
4566 	    || (!allow_back && (e->flags & EDGE_DFS_BACK)))
4567 	  {
4568 	    if (prede)
4569 	      {
4570 		prede = NULL;
4571 		break;
4572 	      }
4573 	    prede = e;
4574 	  }
4575       if (prede)
4576 	{
4577 	  bb1 = prede->src;
4578 
4579 	  /* Re-do the dominance check with changed bb1.  */
4580 	  if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4581 	    return true;
4582 	}
4583     }
4584 
4585   /* Iterate to the single executable bb2 successor.  */
4586   edge succe = NULL;
4587   FOR_EACH_EDGE (e, ei, bb2->succs)
4588     if ((e->flags & EDGE_EXECUTABLE)
4589 	|| (!allow_back && (e->flags & EDGE_DFS_BACK)))
4590       {
4591 	if (succe)
4592 	  {
4593 	    succe = NULL;
4594 	    break;
4595 	  }
4596 	succe = e;
4597       }
4598   if (succe)
4599     {
4600       /* Verify the reached block is only reached through succe.
4601 	 If there is only one edge we can spare us the dominator
4602 	 check and iterate directly.  */
4603       if (EDGE_COUNT (succe->dest->preds) > 1)
4604 	{
4605 	  FOR_EACH_EDGE (e, ei, succe->dest->preds)
4606 	    if (e != succe
4607 		&& ((e->flags & EDGE_EXECUTABLE)
4608 		    || (!allow_back && (e->flags & EDGE_DFS_BACK))))
4609 	      {
4610 		succe = NULL;
4611 		break;
4612 	      }
4613 	}
4614       if (succe)
4615 	{
4616 	  bb2 = succe->dest;
4617 
4618 	  /* Re-do the dominance check with changed bb2.  */
4619 	  if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4620 	    return true;
4621 	}
4622     }
4623 
4624   /* We could now iterate updating bb1 / bb2.  */
4625   return false;
4626 }
4627 
4628 /* Set the value number of FROM to TO, return true if it has changed
4629    as a result.  */
4630 
4631 static inline bool
set_ssa_val_to(tree from,tree to)4632 set_ssa_val_to (tree from, tree to)
4633 {
4634   vn_ssa_aux_t from_info = VN_INFO (from);
4635   tree currval = from_info->valnum; // SSA_VAL (from)
4636   poly_int64 toff, coff;
4637   bool curr_undefined = false;
4638   bool curr_invariant = false;
4639 
4640   /* The only thing we allow as value numbers are ssa_names
4641      and invariants.  So assert that here.  We don't allow VN_TOP
4642      as visiting a stmt should produce a value-number other than
4643      that.
4644      ???  Still VN_TOP can happen for unreachable code, so force
4645      it to varying in that case.  Not all code is prepared to
4646      get VN_TOP on valueization.  */
4647   if (to == VN_TOP)
4648     {
4649       /* ???  When iterating and visiting PHI <undef, backedge-value>
4650          for the first time we rightfully get VN_TOP and we need to
4651 	 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
4652 	 With SCCVN we were simply lucky we iterated the other PHI
4653 	 cycles first and thus visited the backedge-value DEF.  */
4654       if (currval == VN_TOP)
4655 	goto set_and_exit;
4656       if (dump_file && (dump_flags & TDF_DETAILS))
4657 	fprintf (dump_file, "Forcing value number to varying on "
4658 		 "receiving VN_TOP\n");
4659       to = from;
4660     }
4661 
4662   gcc_checking_assert (to != NULL_TREE
4663 		       && ((TREE_CODE (to) == SSA_NAME
4664 			    && (to == from || SSA_VAL (to) == to))
4665 			   || is_gimple_min_invariant (to)));
4666 
4667   if (from != to)
4668     {
4669       if (currval == from)
4670 	{
4671 	  if (dump_file && (dump_flags & TDF_DETAILS))
4672 	    {
4673 	      fprintf (dump_file, "Not changing value number of ");
4674 	      print_generic_expr (dump_file, from);
4675 	      fprintf (dump_file, " from VARYING to ");
4676 	      print_generic_expr (dump_file, to);
4677 	      fprintf (dump_file, "\n");
4678 	    }
4679 	  return false;
4680 	}
4681       curr_invariant = is_gimple_min_invariant (currval);
4682       curr_undefined = (TREE_CODE (currval) == SSA_NAME
4683 			&& ssa_undefined_value_p (currval, false));
4684       if (currval != VN_TOP
4685 	  && !curr_invariant
4686 	  && !curr_undefined
4687 	  && is_gimple_min_invariant (to))
4688 	{
4689 	  if (dump_file && (dump_flags & TDF_DETAILS))
4690 	    {
4691 	      fprintf (dump_file, "Forcing VARYING instead of changing "
4692 		       "value number of ");
4693 	      print_generic_expr (dump_file, from);
4694 	      fprintf (dump_file, " from ");
4695 	      print_generic_expr (dump_file, currval);
4696 	      fprintf (dump_file, " (non-constant) to ");
4697 	      print_generic_expr (dump_file, to);
4698 	      fprintf (dump_file, " (constant)\n");
4699 	    }
4700 	  to = from;
4701 	}
4702       else if (currval != VN_TOP
4703 	       && !curr_undefined
4704 	       && TREE_CODE (to) == SSA_NAME
4705 	       && ssa_undefined_value_p (to, false))
4706 	{
4707 	  if (dump_file && (dump_flags & TDF_DETAILS))
4708 	    {
4709 	      fprintf (dump_file, "Forcing VARYING instead of changing "
4710 		       "value number of ");
4711 	      print_generic_expr (dump_file, from);
4712 	      fprintf (dump_file, " from ");
4713 	      print_generic_expr (dump_file, currval);
4714 	      fprintf (dump_file, " (non-undefined) to ");
4715 	      print_generic_expr (dump_file, to);
4716 	      fprintf (dump_file, " (undefined)\n");
4717 	    }
4718 	  to = from;
4719 	}
4720       else if (TREE_CODE (to) == SSA_NAME
4721 	       && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
4722 	to = from;
4723     }
4724 
4725 set_and_exit:
4726   if (dump_file && (dump_flags & TDF_DETAILS))
4727     {
4728       fprintf (dump_file, "Setting value number of ");
4729       print_generic_expr (dump_file, from);
4730       fprintf (dump_file, " to ");
4731       print_generic_expr (dump_file, to);
4732     }
4733 
4734   if (currval != to
4735       && !operand_equal_p (currval, to, 0)
4736       /* Different undefined SSA names are not actually different.  See
4737          PR82320 for a testcase were we'd otherwise not terminate iteration.  */
4738       && !(curr_undefined
4739 	   && TREE_CODE (to) == SSA_NAME
4740 	   && ssa_undefined_value_p (to, false))
4741       /* ???  For addresses involving volatile objects or types operand_equal_p
4742          does not reliably detect ADDR_EXPRs as equal.  We know we are only
4743 	 getting invariant gimple addresses here, so can use
4744 	 get_addr_base_and_unit_offset to do this comparison.  */
4745       && !(TREE_CODE (currval) == ADDR_EXPR
4746 	   && TREE_CODE (to) == ADDR_EXPR
4747 	   && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
4748 	       == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
4749 	   && known_eq (coff, toff)))
4750     {
4751       if (to != from
4752 	  && currval != VN_TOP
4753 	  && !curr_undefined
4754 	  /* We do not want to allow lattice transitions from one value
4755 	     to another since that may lead to not terminating iteration
4756 	     (see PR95049).  Since there's no convenient way to check
4757 	     for the allowed transition of VAL -> PHI (loop entry value,
4758 	     same on two PHIs, to same PHI result) we restrict the check
4759 	     to invariants.  */
4760 	  && curr_invariant
4761 	  && is_gimple_min_invariant (to))
4762 	{
4763 	  if (dump_file && (dump_flags & TDF_DETAILS))
4764 	    fprintf (dump_file, " forced VARYING");
4765 	  to = from;
4766 	}
4767       if (dump_file && (dump_flags & TDF_DETAILS))
4768 	fprintf (dump_file, " (changed)\n");
4769       from_info->valnum = to;
4770       return true;
4771     }
4772   if (dump_file && (dump_flags & TDF_DETAILS))
4773     fprintf (dump_file, "\n");
4774   return false;
4775 }
4776 
4777 /* Set all definitions in STMT to value number to themselves.
4778    Return true if a value number changed. */
4779 
4780 static bool
defs_to_varying(gimple * stmt)4781 defs_to_varying (gimple *stmt)
4782 {
4783   bool changed = false;
4784   ssa_op_iter iter;
4785   def_operand_p defp;
4786 
4787   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
4788     {
4789       tree def = DEF_FROM_PTR (defp);
4790       changed |= set_ssa_val_to (def, def);
4791     }
4792   return changed;
4793 }
4794 
4795 /* Visit a copy between LHS and RHS, return true if the value number
4796    changed.  */
4797 
4798 static bool
visit_copy(tree lhs,tree rhs)4799 visit_copy (tree lhs, tree rhs)
4800 {
4801   /* Valueize.  */
4802   rhs = SSA_VAL (rhs);
4803 
4804   return set_ssa_val_to (lhs, rhs);
4805 }
4806 
4807 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
4808    is the same.  */
4809 
4810 static tree
valueized_wider_op(tree wide_type,tree op,bool allow_truncate)4811 valueized_wider_op (tree wide_type, tree op, bool allow_truncate)
4812 {
4813   if (TREE_CODE (op) == SSA_NAME)
4814     op = vn_valueize (op);
4815 
4816   /* Either we have the op widened available.  */
4817   tree ops[3] = {};
4818   ops[0] = op;
4819   tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
4820 				       wide_type, ops, NULL);
4821   if (tem)
4822     return tem;
4823 
4824   /* Or the op is truncated from some existing value.  */
4825   if (allow_truncate && TREE_CODE (op) == SSA_NAME)
4826     {
4827       gimple *def = SSA_NAME_DEF_STMT (op);
4828       if (is_gimple_assign (def)
4829 	  && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
4830 	{
4831 	  tem = gimple_assign_rhs1 (def);
4832 	  if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
4833 	    {
4834 	      if (TREE_CODE (tem) == SSA_NAME)
4835 		tem = vn_valueize (tem);
4836 	      return tem;
4837 	    }
4838 	}
4839     }
4840 
4841   /* For constants simply extend it.  */
4842   if (TREE_CODE (op) == INTEGER_CST)
4843     return wide_int_to_tree (wide_type, wi::to_wide (op));
4844 
4845   return NULL_TREE;
4846 }
4847 
4848 /* Visit a nary operator RHS, value number it, and return true if the
4849    value number of LHS has changed as a result.  */
4850 
4851 static bool
visit_nary_op(tree lhs,gassign * stmt)4852 visit_nary_op (tree lhs, gassign *stmt)
4853 {
4854   vn_nary_op_t vnresult;
4855   tree result = vn_nary_op_lookup_stmt (stmt, &vnresult);
4856   if (! result && vnresult)
4857     result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt));
4858   if (result)
4859     return set_ssa_val_to (lhs, result);
4860 
4861   /* Do some special pattern matching for redundancies of operations
4862      in different types.  */
4863   enum tree_code code = gimple_assign_rhs_code (stmt);
4864   tree type = TREE_TYPE (lhs);
4865   tree rhs1 = gimple_assign_rhs1 (stmt);
4866   switch (code)
4867     {
4868     CASE_CONVERT:
4869       /* Match arithmetic done in a different type where we can easily
4870          substitute the result from some earlier sign-changed or widened
4871 	 operation.  */
4872       if (INTEGRAL_TYPE_P (type)
4873 	  && TREE_CODE (rhs1) == SSA_NAME
4874 	  /* We only handle sign-changes, zero-extension -> & mask or
4875 	     sign-extension if we know the inner operation doesn't
4876 	     overflow.  */
4877 	  && (((TYPE_UNSIGNED (TREE_TYPE (rhs1))
4878 		|| (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
4879 		    && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
4880 	       && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
4881 	      || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
4882 	{
4883 	  gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
4884 	  if (def
4885 	      && (gimple_assign_rhs_code (def) == PLUS_EXPR
4886 		  || gimple_assign_rhs_code (def) == MINUS_EXPR
4887 		  || gimple_assign_rhs_code (def) == MULT_EXPR))
4888 	    {
4889 	      tree ops[3] = {};
4890 	      /* When requiring a sign-extension we cannot model a
4891 		 previous truncation with a single op so don't bother.  */
4892 	      bool allow_truncate = TYPE_UNSIGNED (TREE_TYPE (rhs1));
4893 	      /* Either we have the op widened available.  */
4894 	      ops[0] = valueized_wider_op (type, gimple_assign_rhs1 (def),
4895 					   allow_truncate);
4896 	      if (ops[0])
4897 		ops[1] = valueized_wider_op (type, gimple_assign_rhs2 (def),
4898 					     allow_truncate);
4899 	      if (ops[0] && ops[1])
4900 		{
4901 		  ops[0] = vn_nary_op_lookup_pieces
4902 		      (2, gimple_assign_rhs_code (def), type, ops, NULL);
4903 		  /* We have wider operation available.  */
4904 		  if (ops[0]
4905 		      /* If the leader is a wrapping operation we can
4906 		         insert it for code hoisting w/o introducing
4907 			 undefined overflow.  If it is not it has to
4908 			 be available.  See PR86554.  */
4909 		      && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))
4910 			  || (rpo_avail && vn_context_bb
4911 			      && rpo_avail->eliminate_avail (vn_context_bb,
4912 							     ops[0]))))
4913 		    {
4914 		      unsigned lhs_prec = TYPE_PRECISION (type);
4915 		      unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
4916 		      if (lhs_prec == rhs_prec
4917 			  || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
4918 			      && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
4919 			{
4920 			  gimple_match_op match_op (gimple_match_cond::UNCOND,
4921 						    NOP_EXPR, type, ops[0]);
4922 			  result = vn_nary_build_or_lookup (&match_op);
4923 			  if (result)
4924 			    {
4925 			      bool changed = set_ssa_val_to (lhs, result);
4926 			      vn_nary_op_insert_stmt (stmt, result);
4927 			      return changed;
4928 			    }
4929 			}
4930 		      else
4931 			{
4932 			  tree mask = wide_int_to_tree
4933 			    (type, wi::mask (rhs_prec, false, lhs_prec));
4934 			  gimple_match_op match_op (gimple_match_cond::UNCOND,
4935 						    BIT_AND_EXPR,
4936 						    TREE_TYPE (lhs),
4937 						    ops[0], mask);
4938 			  result = vn_nary_build_or_lookup (&match_op);
4939 			  if (result)
4940 			    {
4941 			      bool changed = set_ssa_val_to (lhs, result);
4942 			      vn_nary_op_insert_stmt (stmt, result);
4943 			      return changed;
4944 			    }
4945 			}
4946 		    }
4947 		}
4948 	    }
4949 	}
4950       break;
4951     case BIT_AND_EXPR:
4952       if (INTEGRAL_TYPE_P (type)
4953 	  && TREE_CODE (rhs1) == SSA_NAME
4954 	  && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
4955 	  && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)
4956 	  && default_vn_walk_kind != VN_NOWALK
4957 	  && CHAR_BIT == 8
4958 	  && BITS_PER_UNIT == 8
4959 	  && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
4960 	  && !integer_all_onesp (gimple_assign_rhs2 (stmt))
4961 	  && !integer_zerop (gimple_assign_rhs2 (stmt)))
4962 	{
4963 	  gassign *ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
4964 	  if (ass
4965 	      && !gimple_has_volatile_ops (ass)
4966 	      && vn_get_stmt_kind (ass) == VN_REFERENCE)
4967 	    {
4968 	      tree last_vuse = gimple_vuse (ass);
4969 	      tree op = gimple_assign_rhs1 (ass);
4970 	      tree result = vn_reference_lookup (op, gimple_vuse (ass),
4971 						 default_vn_walk_kind,
4972 						 NULL, true, &last_vuse,
4973 						 gimple_assign_rhs2 (stmt));
4974 	      if (result
4975 		  && useless_type_conversion_p (TREE_TYPE (result),
4976 						TREE_TYPE (op)))
4977 		return set_ssa_val_to (lhs, result);
4978 	    }
4979 	}
4980       break;
4981     case TRUNC_DIV_EXPR:
4982       if (TYPE_UNSIGNED (type))
4983 	break;
4984       /* Fallthru.  */
4985     case RDIV_EXPR:
4986     case MULT_EXPR:
4987       /* Match up ([-]a){/,*}([-])b with v=a{/,*}b, replacing it with -v.  */
4988       if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
4989 	{
4990 	  tree rhs[2];
4991 	  rhs[0] = rhs1;
4992 	  rhs[1] = gimple_assign_rhs2 (stmt);
4993 	  for (unsigned i = 0; i <= 1; ++i)
4994 	    {
4995 	      unsigned j = i == 0 ? 1 : 0;
4996 	      tree ops[2];
4997 	      gimple_match_op match_op (gimple_match_cond::UNCOND,
4998 					NEGATE_EXPR, type, rhs[i]);
4999 	      ops[i] = vn_nary_build_or_lookup_1 (&match_op, false);
5000 	      ops[j] = rhs[j];
5001 	      if (ops[i]
5002 		  && (ops[0] = vn_nary_op_lookup_pieces (2, code,
5003 							 type, ops, NULL)))
5004 		{
5005 		  gimple_match_op match_op (gimple_match_cond::UNCOND,
5006 					    NEGATE_EXPR, type, ops[0]);
5007 		  result = vn_nary_build_or_lookup (&match_op);
5008 		  if (result)
5009 		    {
5010 		      bool changed = set_ssa_val_to (lhs, result);
5011 		      vn_nary_op_insert_stmt (stmt, result);
5012 		      return changed;
5013 		    }
5014 		}
5015 	    }
5016 	}
5017       break;
5018     default:
5019       break;
5020     }
5021 
5022   bool changed = set_ssa_val_to (lhs, lhs);
5023   vn_nary_op_insert_stmt (stmt, lhs);
5024   return changed;
5025 }
5026 
5027 /* Visit a call STMT storing into LHS.  Return true if the value number
5028    of the LHS has changed as a result.  */
5029 
5030 static bool
visit_reference_op_call(tree lhs,gcall * stmt)5031 visit_reference_op_call (tree lhs, gcall *stmt)
5032 {
5033   bool changed = false;
5034   struct vn_reference_s vr1;
5035   vn_reference_t vnresult = NULL;
5036   tree vdef = gimple_vdef (stmt);
5037 
5038   /* Non-ssa lhs is handled in copy_reference_ops_from_call.  */
5039   if (lhs && TREE_CODE (lhs) != SSA_NAME)
5040     lhs = NULL_TREE;
5041 
5042   vn_reference_lookup_call (stmt, &vnresult, &vr1);
5043   if (vnresult)
5044     {
5045       if (vnresult->result_vdef && vdef)
5046 	changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
5047       else if (vdef)
5048 	/* If the call was discovered to be pure or const reflect
5049 	   that as far as possible.  */
5050 	changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
5051 
5052       if (!vnresult->result && lhs)
5053 	vnresult->result = lhs;
5054 
5055       if (vnresult->result && lhs)
5056 	changed |= set_ssa_val_to (lhs, vnresult->result);
5057     }
5058   else
5059     {
5060       vn_reference_t vr2;
5061       vn_reference_s **slot;
5062       tree vdef_val = vdef;
5063       if (vdef)
5064 	{
5065 	  /* If we value numbered an indirect functions function to
5066 	     one not clobbering memory value number its VDEF to its
5067 	     VUSE.  */
5068 	  tree fn = gimple_call_fn (stmt);
5069 	  if (fn && TREE_CODE (fn) == SSA_NAME)
5070 	    {
5071 	      fn = SSA_VAL (fn);
5072 	      if (TREE_CODE (fn) == ADDR_EXPR
5073 		  && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5074 		  && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
5075 		      & (ECF_CONST | ECF_PURE)))
5076 		vdef_val = vuse_ssa_val (gimple_vuse (stmt));
5077 	    }
5078 	  changed |= set_ssa_val_to (vdef, vdef_val);
5079 	}
5080       if (lhs)
5081 	changed |= set_ssa_val_to (lhs, lhs);
5082       vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s);
5083       vr2->vuse = vr1.vuse;
5084       /* As we are not walking the virtual operand chain we know the
5085 	 shared_lookup_references are still original so we can re-use
5086 	 them here.  */
5087       vr2->operands = vr1.operands.copy ();
5088       vr2->type = vr1.type;
5089       vr2->punned = vr1.punned;
5090       vr2->set = vr1.set;
5091       vr2->base_set = vr1.base_set;
5092       vr2->hashcode = vr1.hashcode;
5093       vr2->result = lhs;
5094       vr2->result_vdef = vdef_val;
5095       vr2->value_id = 0;
5096       slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
5097 							  INSERT);
5098       gcc_assert (!*slot);
5099       *slot = vr2;
5100       vr2->next = last_inserted_ref;
5101       last_inserted_ref = vr2;
5102     }
5103 
5104   return changed;
5105 }
5106 
5107 /* Visit a load from a reference operator RHS, part of STMT, value number it,
5108    and return true if the value number of the LHS has changed as a result.  */
5109 
5110 static bool
visit_reference_op_load(tree lhs,tree op,gimple * stmt)5111 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
5112 {
5113   bool changed = false;
5114   tree last_vuse;
5115   tree result;
5116   vn_reference_t res;
5117 
5118   last_vuse = gimple_vuse (stmt);
5119   result = vn_reference_lookup (op, gimple_vuse (stmt),
5120 				default_vn_walk_kind, &res, true, &last_vuse);
5121 
5122   /* We handle type-punning through unions by value-numbering based
5123      on offset and size of the access.  Be prepared to handle a
5124      type-mismatch here via creating a VIEW_CONVERT_EXPR.  */
5125   if (result
5126       && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
5127     {
5128       /* Avoid the type punning in case the result mode has padding where
5129 	 the op we lookup has not.  */
5130       if (maybe_lt (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (result))),
5131 		    GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (op)))))
5132 	result = NULL_TREE;
5133       else
5134 	{
5135 	  /* We will be setting the value number of lhs to the value number
5136 	     of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
5137 	     So first simplify and lookup this expression to see if it
5138 	     is already available.  */
5139 	  gimple_match_op res_op (gimple_match_cond::UNCOND,
5140 				  VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
5141 	  result = vn_nary_build_or_lookup (&res_op);
5142 	  if (result
5143 	      && TREE_CODE (result) == SSA_NAME
5144 	      && VN_INFO (result)->needs_insertion)
5145 	    /* Track whether this is the canonical expression for different
5146 	       typed loads.  We use that as a stopgap measure for code
5147 	       hoisting when dealing with floating point loads.  */
5148 	    res->punned = true;
5149 	}
5150 
5151       /* When building the conversion fails avoid inserting the reference
5152          again.  */
5153       if (!result)
5154 	return set_ssa_val_to (lhs, lhs);
5155     }
5156 
5157   if (result)
5158     changed = set_ssa_val_to (lhs, result);
5159   else
5160     {
5161       changed = set_ssa_val_to (lhs, lhs);
5162       vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
5163     }
5164 
5165   return changed;
5166 }
5167 
5168 
5169 /* Visit a store to a reference operator LHS, part of STMT, value number it,
5170    and return true if the value number of the LHS has changed as a result.  */
5171 
5172 static bool
visit_reference_op_store(tree lhs,tree op,gimple * stmt)5173 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
5174 {
5175   bool changed = false;
5176   vn_reference_t vnresult = NULL;
5177   tree assign;
5178   bool resultsame = false;
5179   tree vuse = gimple_vuse (stmt);
5180   tree vdef = gimple_vdef (stmt);
5181 
5182   if (TREE_CODE (op) == SSA_NAME)
5183     op = SSA_VAL (op);
5184 
5185   /* First we want to lookup using the *vuses* from the store and see
5186      if there the last store to this location with the same address
5187      had the same value.
5188 
5189      The vuses represent the memory state before the store.  If the
5190      memory state, address, and value of the store is the same as the
5191      last store to this location, then this store will produce the
5192      same memory state as that store.
5193 
5194      In this case the vdef versions for this store are value numbered to those
5195      vuse versions, since they represent the same memory state after
5196      this store.
5197 
5198      Otherwise, the vdefs for the store are used when inserting into
5199      the table, since the store generates a new memory state.  */
5200 
5201   vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
5202   if (vnresult
5203       && vnresult->result)
5204     {
5205       tree result = vnresult->result;
5206       gcc_checking_assert (TREE_CODE (result) != SSA_NAME
5207 			   || result == SSA_VAL (result));
5208       resultsame = expressions_equal_p (result, op);
5209       if (resultsame)
5210 	{
5211 	  /* If the TBAA state isn't compatible for downstream reads
5212 	     we cannot value-number the VDEFs the same.  */
5213 	  ao_ref lhs_ref;
5214 	  ao_ref_init (&lhs_ref, lhs);
5215 	  alias_set_type set = ao_ref_alias_set (&lhs_ref);
5216 	  alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
5217 	  if ((vnresult->set != set
5218 	       && ! alias_set_subset_of (set, vnresult->set))
5219 	      || (vnresult->base_set != base_set
5220 		  && ! alias_set_subset_of (base_set, vnresult->base_set)))
5221 	    resultsame = false;
5222 	}
5223     }
5224 
5225   if (!resultsame)
5226     {
5227       /* Only perform the following when being called from PRE
5228 	 which embeds tail merging.  */
5229       if (default_vn_walk_kind == VN_WALK)
5230 	{
5231 	  assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
5232 	  vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
5233 	  if (vnresult)
5234 	    {
5235 	      VN_INFO (vdef)->visited = true;
5236 	      return set_ssa_val_to (vdef, vnresult->result_vdef);
5237 	    }
5238 	}
5239 
5240       if (dump_file && (dump_flags & TDF_DETAILS))
5241 	{
5242 	  fprintf (dump_file, "No store match\n");
5243 	  fprintf (dump_file, "Value numbering store ");
5244 	  print_generic_expr (dump_file, lhs);
5245 	  fprintf (dump_file, " to ");
5246 	  print_generic_expr (dump_file, op);
5247 	  fprintf (dump_file, "\n");
5248 	}
5249       /* Have to set value numbers before insert, since insert is
5250 	 going to valueize the references in-place.  */
5251       if (vdef)
5252 	changed |= set_ssa_val_to (vdef, vdef);
5253 
5254       /* Do not insert structure copies into the tables.  */
5255       if (is_gimple_min_invariant (op)
5256 	  || is_gimple_reg (op))
5257         vn_reference_insert (lhs, op, vdef, NULL);
5258 
5259       /* Only perform the following when being called from PRE
5260 	 which embeds tail merging.  */
5261       if (default_vn_walk_kind == VN_WALK)
5262 	{
5263 	  assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
5264 	  vn_reference_insert (assign, lhs, vuse, vdef);
5265 	}
5266     }
5267   else
5268     {
5269       /* We had a match, so value number the vdef to have the value
5270 	 number of the vuse it came from.  */
5271 
5272       if (dump_file && (dump_flags & TDF_DETAILS))
5273 	fprintf (dump_file, "Store matched earlier value, "
5274 		 "value numbering store vdefs to matching vuses.\n");
5275 
5276       changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
5277     }
5278 
5279   return changed;
5280 }
5281 
5282 /* Visit and value number PHI, return true if the value number
5283    changed.  When BACKEDGES_VARYING_P is true then assume all
5284    backedge values are varying.  When INSERTED is not NULL then
5285    this is just a ahead query for a possible iteration, set INSERTED
5286    to true if we'd insert into the hashtable.  */
5287 
5288 static bool
visit_phi(gimple * phi,bool * inserted,bool backedges_varying_p)5289 visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p)
5290 {
5291   tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
5292   tree backedge_val = NULL_TREE;
5293   bool seen_non_backedge = false;
5294   tree sameval_base = NULL_TREE;
5295   poly_int64 soff, doff;
5296   unsigned n_executable = 0;
5297   edge_iterator ei;
5298   edge e;
5299 
5300   /* TODO: We could check for this in initialization, and replace this
5301      with a gcc_assert.  */
5302   if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
5303     return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
5304 
5305   /* We track whether a PHI was CSEd to to avoid excessive iterations
5306      that would be necessary only because the PHI changed arguments
5307      but not value.  */
5308   if (!inserted)
5309     gimple_set_plf (phi, GF_PLF_1, false);
5310 
5311   /* See if all non-TOP arguments have the same value.  TOP is
5312      equivalent to everything, so we can ignore it.  */
5313   FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
5314     if (e->flags & EDGE_EXECUTABLE)
5315       {
5316 	tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5317 
5318 	if (def == PHI_RESULT (phi))
5319 	  continue;
5320 	++n_executable;
5321 	if (TREE_CODE (def) == SSA_NAME)
5322 	  {
5323 	    if (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))
5324 	      def = SSA_VAL (def);
5325 	    if (e->flags & EDGE_DFS_BACK)
5326 	      backedge_val = def;
5327 	  }
5328 	if (!(e->flags & EDGE_DFS_BACK))
5329 	  seen_non_backedge = true;
5330 	if (def == VN_TOP)
5331 	  ;
5332 	/* Ignore undefined defs for sameval but record one.  */
5333 	else if (TREE_CODE (def) == SSA_NAME
5334 		 && ! virtual_operand_p (def)
5335 		 && ssa_undefined_value_p (def, false))
5336 	  seen_undef = def;
5337 	else if (sameval == VN_TOP)
5338 	  sameval = def;
5339 	else if (!expressions_equal_p (def, sameval))
5340 	  {
5341 	    /* We know we're arriving only with invariant addresses here,
5342 	       try harder comparing them.  We can do some caching here
5343 	       which we cannot do in expressions_equal_p.  */
5344 	    if (TREE_CODE (def) == ADDR_EXPR
5345 		&& TREE_CODE (sameval) == ADDR_EXPR
5346 		&& sameval_base != (void *)-1)
5347 	      {
5348 		if (!sameval_base)
5349 		  sameval_base = get_addr_base_and_unit_offset
5350 				   (TREE_OPERAND (sameval, 0), &soff);
5351 		if (!sameval_base)
5352 		  sameval_base = (tree)(void *)-1;
5353 		else if ((get_addr_base_and_unit_offset
5354 			    (TREE_OPERAND (def, 0), &doff) == sameval_base)
5355 			 && known_eq (soff, doff))
5356 		  continue;
5357 	      }
5358 	    sameval = NULL_TREE;
5359 	    break;
5360 	  }
5361       }
5362 
5363   /* If the value we want to use is flowing over the backedge and we
5364      should take it as VARYING but it has a non-VARYING value drop to
5365      VARYING.
5366      If we value-number a virtual operand never value-number to the
5367      value from the backedge as that confuses the alias-walking code.
5368      See gcc.dg/torture/pr87176.c.  If the value is the same on a
5369      non-backedge everything is OK though.  */
5370   bool visited_p;
5371   if ((backedge_val
5372        && !seen_non_backedge
5373        && TREE_CODE (backedge_val) == SSA_NAME
5374        && sameval == backedge_val
5375        && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val)
5376 	   || SSA_VAL (backedge_val) != backedge_val))
5377       /* Do not value-number a virtual operand to sth not visited though
5378 	 given that allows us to escape a region in alias walking.  */
5379       || (sameval
5380 	  && TREE_CODE (sameval) == SSA_NAME
5381 	  && !SSA_NAME_IS_DEFAULT_DEF (sameval)
5382 	  && SSA_NAME_IS_VIRTUAL_OPERAND (sameval)
5383 	  && (SSA_VAL (sameval, &visited_p), !visited_p)))
5384     /* Note this just drops to VARYING without inserting the PHI into
5385        the hashes.  */
5386     result = PHI_RESULT (phi);
5387   /* If none of the edges was executable keep the value-number at VN_TOP,
5388      if only a single edge is exectuable use its value.  */
5389   else if (n_executable <= 1)
5390     result = seen_undef ? seen_undef : sameval;
5391   /* If we saw only undefined values and VN_TOP use one of the
5392      undefined values.  */
5393   else if (sameval == VN_TOP)
5394     result = seen_undef ? seen_undef : sameval;
5395   /* First see if it is equivalent to a phi node in this block.  We prefer
5396      this as it allows IV elimination - see PRs 66502 and 67167.  */
5397   else if ((result = vn_phi_lookup (phi, backedges_varying_p)))
5398     {
5399       if (!inserted
5400 	  && TREE_CODE (result) == SSA_NAME
5401 	  && gimple_code (SSA_NAME_DEF_STMT (result)) == GIMPLE_PHI)
5402 	{
5403 	  gimple_set_plf (SSA_NAME_DEF_STMT (result), GF_PLF_1, true);
5404 	  if (dump_file && (dump_flags & TDF_DETAILS))
5405 	    {
5406 	      fprintf (dump_file, "Marking CSEd to PHI node ");
5407 	      print_gimple_expr (dump_file, SSA_NAME_DEF_STMT (result),
5408 				 0, TDF_SLIM);
5409 	      fprintf (dump_file, "\n");
5410 	    }
5411 	}
5412     }
5413   /* If all values are the same use that, unless we've seen undefined
5414      values as well and the value isn't constant.
5415      CCP/copyprop have the same restriction to not remove uninit warnings.  */
5416   else if (sameval
5417 	   && (! seen_undef || is_gimple_min_invariant (sameval)))
5418     result = sameval;
5419   else
5420     {
5421       result = PHI_RESULT (phi);
5422       /* Only insert PHIs that are varying, for constant value numbers
5423          we mess up equivalences otherwise as we are only comparing
5424 	 the immediate controlling predicates.  */
5425       vn_phi_insert (phi, result, backedges_varying_p);
5426       if (inserted)
5427 	*inserted = true;
5428     }
5429 
5430   return set_ssa_val_to (PHI_RESULT (phi), result);
5431 }
5432 
5433 /* Try to simplify RHS using equivalences and constant folding.  */
5434 
5435 static tree
try_to_simplify(gassign * stmt)5436 try_to_simplify (gassign *stmt)
5437 {
5438   enum tree_code code = gimple_assign_rhs_code (stmt);
5439   tree tem;
5440 
5441   /* For stores we can end up simplifying a SSA_NAME rhs.  Just return
5442      in this case, there is no point in doing extra work.  */
5443   if (code == SSA_NAME)
5444     return NULL_TREE;
5445 
5446   /* First try constant folding based on our current lattice.  */
5447   mprts_hook = vn_lookup_simplify_result;
5448   tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
5449   mprts_hook = NULL;
5450   if (tem
5451       && (TREE_CODE (tem) == SSA_NAME
5452 	  || is_gimple_min_invariant (tem)))
5453     return tem;
5454 
5455   return NULL_TREE;
5456 }
5457 
5458 /* Visit and value number STMT, return true if the value number
5459    changed.  */
5460 
5461 static bool
5462 visit_stmt (gimple *stmt, bool backedges_varying_p = false)
5463 {
5464   bool changed = false;
5465 
5466   if (dump_file && (dump_flags & TDF_DETAILS))
5467     {
5468       fprintf (dump_file, "Value numbering stmt = ");
5469       print_gimple_stmt (dump_file, stmt, 0);
5470     }
5471 
5472   if (gimple_code (stmt) == GIMPLE_PHI)
5473     changed = visit_phi (stmt, NULL, backedges_varying_p);
5474   else if (gimple_has_volatile_ops (stmt))
5475     changed = defs_to_varying (stmt);
5476   else if (gassign *ass = dyn_cast <gassign *> (stmt))
5477     {
5478       enum tree_code code = gimple_assign_rhs_code (ass);
5479       tree lhs = gimple_assign_lhs (ass);
5480       tree rhs1 = gimple_assign_rhs1 (ass);
5481       tree simplified;
5482 
5483       /* Shortcut for copies. Simplifying copies is pointless,
5484 	 since we copy the expression and value they represent.  */
5485       if (code == SSA_NAME
5486 	  && TREE_CODE (lhs) == SSA_NAME)
5487 	{
5488 	  changed = visit_copy (lhs, rhs1);
5489 	  goto done;
5490 	}
5491       simplified = try_to_simplify (ass);
5492       if (simplified)
5493 	{
5494 	  if (dump_file && (dump_flags & TDF_DETAILS))
5495 	    {
5496 	      fprintf (dump_file, "RHS ");
5497 	      print_gimple_expr (dump_file, ass, 0);
5498 	      fprintf (dump_file, " simplified to ");
5499 	      print_generic_expr (dump_file, simplified);
5500 	      fprintf (dump_file, "\n");
5501 	    }
5502 	}
5503       /* Setting value numbers to constants will occasionally
5504 	 screw up phi congruence because constants are not
5505 	 uniquely associated with a single ssa name that can be
5506 	 looked up.  */
5507       if (simplified
5508 	  && is_gimple_min_invariant (simplified)
5509 	  && TREE_CODE (lhs) == SSA_NAME)
5510 	{
5511 	  changed = set_ssa_val_to (lhs, simplified);
5512 	  goto done;
5513 	}
5514       else if (simplified
5515 	       && TREE_CODE (simplified) == SSA_NAME
5516 	       && TREE_CODE (lhs) == SSA_NAME)
5517 	{
5518 	  changed = visit_copy (lhs, simplified);
5519 	  goto done;
5520 	}
5521 
5522       if ((TREE_CODE (lhs) == SSA_NAME
5523 	   /* We can substitute SSA_NAMEs that are live over
5524 	      abnormal edges with their constant value.  */
5525 	   && !(gimple_assign_copy_p (ass)
5526 		&& is_gimple_min_invariant (rhs1))
5527 	   && !(simplified
5528 		&& is_gimple_min_invariant (simplified))
5529 	   && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
5530 	  /* Stores or copies from SSA_NAMEs that are live over
5531 	     abnormal edges are a problem.  */
5532 	  || (code == SSA_NAME
5533 	      && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
5534 	changed = defs_to_varying (ass);
5535       else if (REFERENCE_CLASS_P (lhs)
5536 	       || DECL_P (lhs))
5537 	changed = visit_reference_op_store (lhs, rhs1, ass);
5538       else if (TREE_CODE (lhs) == SSA_NAME)
5539 	{
5540 	  if ((gimple_assign_copy_p (ass)
5541 	       && is_gimple_min_invariant (rhs1))
5542 	      || (simplified
5543 		  && is_gimple_min_invariant (simplified)))
5544 	    {
5545 	      if (simplified)
5546 		changed = set_ssa_val_to (lhs, simplified);
5547 	      else
5548 		changed = set_ssa_val_to (lhs, rhs1);
5549 	    }
5550 	  else
5551 	    {
5552 	      /* Visit the original statement.  */
5553 	      switch (vn_get_stmt_kind (ass))
5554 		{
5555 		case VN_NARY:
5556 		  changed = visit_nary_op (lhs, ass);
5557 		  break;
5558 		case VN_REFERENCE:
5559 		  changed = visit_reference_op_load (lhs, rhs1, ass);
5560 		  break;
5561 		default:
5562 		  changed = defs_to_varying (ass);
5563 		  break;
5564 		}
5565 	    }
5566 	}
5567       else
5568 	changed = defs_to_varying (ass);
5569     }
5570   else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5571     {
5572       tree lhs = gimple_call_lhs (call_stmt);
5573       if (lhs && TREE_CODE (lhs) == SSA_NAME)
5574 	{
5575 	  /* Try constant folding based on our current lattice.  */
5576 	  tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
5577 							    vn_valueize);
5578 	  if (simplified)
5579 	    {
5580 	      if (dump_file && (dump_flags & TDF_DETAILS))
5581 		{
5582 		  fprintf (dump_file, "call ");
5583 		  print_gimple_expr (dump_file, call_stmt, 0);
5584 		  fprintf (dump_file, " simplified to ");
5585 		  print_generic_expr (dump_file, simplified);
5586 		  fprintf (dump_file, "\n");
5587 		}
5588 	    }
5589 	  /* Setting value numbers to constants will occasionally
5590 	     screw up phi congruence because constants are not
5591 	     uniquely associated with a single ssa name that can be
5592 	     looked up.  */
5593 	  if (simplified
5594 	      && is_gimple_min_invariant (simplified))
5595 	    {
5596 	      changed = set_ssa_val_to (lhs, simplified);
5597 	      if (gimple_vdef (call_stmt))
5598 		changed |= set_ssa_val_to (gimple_vdef (call_stmt),
5599 					   SSA_VAL (gimple_vuse (call_stmt)));
5600 	      goto done;
5601 	    }
5602 	  else if (simplified
5603 		   && TREE_CODE (simplified) == SSA_NAME)
5604 	    {
5605 	      changed = visit_copy (lhs, simplified);
5606 	      if (gimple_vdef (call_stmt))
5607 		changed |= set_ssa_val_to (gimple_vdef (call_stmt),
5608 					   SSA_VAL (gimple_vuse (call_stmt)));
5609 	      goto done;
5610 	    }
5611 	  else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
5612 	    {
5613 	      changed = defs_to_varying (call_stmt);
5614 	      goto done;
5615 	    }
5616 	}
5617 
5618       /* Pick up flags from a devirtualization target.  */
5619       tree fn = gimple_call_fn (stmt);
5620       int extra_fnflags = 0;
5621       if (fn && TREE_CODE (fn) == SSA_NAME)
5622 	{
5623 	  fn = SSA_VAL (fn);
5624 	  if (TREE_CODE (fn) == ADDR_EXPR
5625 	      && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
5626 	    extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
5627 	}
5628       if (!gimple_call_internal_p (call_stmt)
5629 	  && (/* Calls to the same function with the same vuse
5630 		 and the same operands do not necessarily return the same
5631 		 value, unless they're pure or const.  */
5632 	      ((gimple_call_flags (call_stmt) | extra_fnflags)
5633 	       & (ECF_PURE | ECF_CONST))
5634 	      /* If calls have a vdef, subsequent calls won't have
5635 		 the same incoming vuse.  So, if 2 calls with vdef have the
5636 		 same vuse, we know they're not subsequent.
5637 		 We can value number 2 calls to the same function with the
5638 		 same vuse and the same operands which are not subsequent
5639 		 the same, because there is no code in the program that can
5640 		 compare the 2 values...  */
5641 	      || (gimple_vdef (call_stmt)
5642 		  /* ... unless the call returns a pointer which does
5643 		     not alias with anything else.  In which case the
5644 		     information that the values are distinct are encoded
5645 		     in the IL.  */
5646 		  && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
5647 		  /* Only perform the following when being called from PRE
5648 		     which embeds tail merging.  */
5649 		  && default_vn_walk_kind == VN_WALK)))
5650 	changed = visit_reference_op_call (lhs, call_stmt);
5651       else
5652 	changed = defs_to_varying (call_stmt);
5653     }
5654   else
5655     changed = defs_to_varying (stmt);
5656  done:
5657   return changed;
5658 }
5659 
5660 
5661 /* Allocate a value number table.  */
5662 
5663 static void
allocate_vn_table(vn_tables_t table,unsigned size)5664 allocate_vn_table (vn_tables_t table, unsigned size)
5665 {
5666   table->phis = new vn_phi_table_type (size);
5667   table->nary = new vn_nary_op_table_type (size);
5668   table->references = new vn_reference_table_type (size);
5669 }
5670 
5671 /* Free a value number table.  */
5672 
5673 static void
free_vn_table(vn_tables_t table)5674 free_vn_table (vn_tables_t table)
5675 {
5676   /* Walk over elements and release vectors.  */
5677   vn_reference_iterator_type hir;
5678   vn_reference_t vr;
5679   FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)
5680     vr->operands.release ();
5681   delete table->phis;
5682   table->phis = NULL;
5683   delete table->nary;
5684   table->nary = NULL;
5685   delete table->references;
5686   table->references = NULL;
5687 }
5688 
5689 /* Set *ID according to RESULT.  */
5690 
5691 static void
set_value_id_for_result(tree result,unsigned int * id)5692 set_value_id_for_result (tree result, unsigned int *id)
5693 {
5694   if (result && TREE_CODE (result) == SSA_NAME)
5695     *id = VN_INFO (result)->value_id;
5696   else if (result && is_gimple_min_invariant (result))
5697     *id = get_or_alloc_constant_value_id (result);
5698   else
5699     *id = get_next_value_id ();
5700 }
5701 
5702 /* Set the value ids in the valid hash tables.  */
5703 
5704 static void
set_hashtable_value_ids(void)5705 set_hashtable_value_ids (void)
5706 {
5707   vn_nary_op_iterator_type hin;
5708   vn_phi_iterator_type hip;
5709   vn_reference_iterator_type hir;
5710   vn_nary_op_t vno;
5711   vn_reference_t vr;
5712   vn_phi_t vp;
5713 
5714   /* Now set the value ids of the things we had put in the hash
5715      table.  */
5716 
5717   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
5718     if (! vno->predicated_values)
5719       set_value_id_for_result (vno->u.result, &vno->value_id);
5720 
5721   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
5722     set_value_id_for_result (vp->result, &vp->value_id);
5723 
5724   FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
5725 			       hir)
5726     set_value_id_for_result (vr->result, &vr->value_id);
5727 }
5728 
5729 /* Return the maximum value id we have ever seen.  */
5730 
5731 unsigned int
get_max_value_id(void)5732 get_max_value_id (void)
5733 {
5734   return next_value_id;
5735 }
5736 
5737 /* Return the maximum constant value id we have ever seen.  */
5738 
5739 unsigned int
get_max_constant_value_id(void)5740 get_max_constant_value_id (void)
5741 {
5742   return -next_constant_value_id;
5743 }
5744 
5745 /* Return the next unique value id.  */
5746 
5747 unsigned int
get_next_value_id(void)5748 get_next_value_id (void)
5749 {
5750   gcc_checking_assert ((int)next_value_id > 0);
5751   return next_value_id++;
5752 }
5753 
5754 /* Return the next unique value id for constants.  */
5755 
5756 unsigned int
get_next_constant_value_id(void)5757 get_next_constant_value_id (void)
5758 {
5759   gcc_checking_assert (next_constant_value_id < 0);
5760   return next_constant_value_id--;
5761 }
5762 
5763 
5764 /* Compare two expressions E1 and E2 and return true if they are equal.  */
5765 
5766 bool
expressions_equal_p(tree e1,tree e2)5767 expressions_equal_p (tree e1, tree e2)
5768 {
5769   /* The obvious case.  */
5770   if (e1 == e2)
5771     return true;
5772 
5773   /* If either one is VN_TOP consider them equal.  */
5774   if (e1 == VN_TOP || e2 == VN_TOP)
5775     return true;
5776 
5777   /* SSA_NAME compare pointer equal.  */
5778   if (TREE_CODE (e1) == SSA_NAME || TREE_CODE (e2) == SSA_NAME)
5779     return false;
5780 
5781   /* Now perform the actual comparison.  */
5782   if (TREE_CODE (e1) == TREE_CODE (e2)
5783       && operand_equal_p (e1, e2, OEP_PURE_SAME))
5784     return true;
5785 
5786   return false;
5787 }
5788 
5789 
5790 /* Return true if the nary operation NARY may trap.  This is a copy
5791    of stmt_could_throw_1_p adjusted to the SCCVN IL.  */
5792 
5793 bool
vn_nary_may_trap(vn_nary_op_t nary)5794 vn_nary_may_trap (vn_nary_op_t nary)
5795 {
5796   tree type;
5797   tree rhs2 = NULL_TREE;
5798   bool honor_nans = false;
5799   bool honor_snans = false;
5800   bool fp_operation = false;
5801   bool honor_trapv = false;
5802   bool handled, ret;
5803   unsigned i;
5804 
5805   if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5806       || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5807       || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5808     {
5809       type = nary->type;
5810       fp_operation = FLOAT_TYPE_P (type);
5811       if (fp_operation)
5812 	{
5813 	  honor_nans = flag_trapping_math && !flag_finite_math_only;
5814 	  honor_snans = flag_signaling_nans != 0;
5815 	}
5816       else if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_TRAPS (type))
5817 	honor_trapv = true;
5818     }
5819   if (nary->length >= 2)
5820     rhs2 = nary->op[1];
5821   ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5822 				       honor_trapv, honor_nans, honor_snans,
5823 				       rhs2, &handled);
5824   if (handled && ret)
5825     return true;
5826 
5827   for (i = 0; i < nary->length; ++i)
5828     if (tree_could_trap_p (nary->op[i]))
5829       return true;
5830 
5831   return false;
5832 }
5833 
5834 /* Return true if the reference operation REF may trap.  */
5835 
5836 bool
vn_reference_may_trap(vn_reference_t ref)5837 vn_reference_may_trap (vn_reference_t ref)
5838 {
5839   switch (ref->operands[0].opcode)
5840     {
5841     case MODIFY_EXPR:
5842     case CALL_EXPR:
5843       /* We do not handle calls.  */
5844     case ADDR_EXPR:
5845       /* And toplevel address computations never trap.  */
5846       return false;
5847     default:;
5848     }
5849 
5850   vn_reference_op_t op;
5851   unsigned i;
5852   FOR_EACH_VEC_ELT (ref->operands, i, op)
5853     {
5854       switch (op->opcode)
5855 	{
5856 	case WITH_SIZE_EXPR:
5857 	case TARGET_MEM_REF:
5858 	  /* Always variable.  */
5859 	  return true;
5860 	case COMPONENT_REF:
5861 	  if (op->op1 && TREE_CODE (op->op1) == SSA_NAME)
5862 	    return true;
5863 	  break;
5864 	case ARRAY_RANGE_REF:
5865 	case ARRAY_REF:
5866 	  if (TREE_CODE (op->op0) == SSA_NAME)
5867 	    return true;
5868 	  break;
5869 	case MEM_REF:
5870 	  /* Nothing interesting in itself, the base is separate.  */
5871 	  break;
5872 	/* The following are the address bases.  */
5873 	case SSA_NAME:
5874 	  return true;
5875 	case ADDR_EXPR:
5876 	  if (op->op0)
5877 	    return tree_could_trap_p (TREE_OPERAND (op->op0, 0));
5878 	  return false;
5879 	default:;
5880 	}
5881     }
5882   return false;
5883 }
5884 
eliminate_dom_walker(cdi_direction direction,bitmap inserted_exprs_)5885 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5886 					    bitmap inserted_exprs_)
5887   : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5888     el_todo (0), eliminations (0), insertions (0),
5889     inserted_exprs (inserted_exprs_)
5890 {
5891   need_eh_cleanup = BITMAP_ALLOC (NULL);
5892   need_ab_cleanup = BITMAP_ALLOC (NULL);
5893 }
5894 
~eliminate_dom_walker()5895 eliminate_dom_walker::~eliminate_dom_walker ()
5896 {
5897   BITMAP_FREE (need_eh_cleanup);
5898   BITMAP_FREE (need_ab_cleanup);
5899 }
5900 
5901 /* Return a leader for OP that is available at the current point of the
5902    eliminate domwalk.  */
5903 
5904 tree
eliminate_avail(basic_block,tree op)5905 eliminate_dom_walker::eliminate_avail (basic_block, tree op)
5906 {
5907   tree valnum = VN_INFO (op)->valnum;
5908   if (TREE_CODE (valnum) == SSA_NAME)
5909     {
5910       if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5911 	return valnum;
5912       if (avail.length () > SSA_NAME_VERSION (valnum))
5913 	return avail[SSA_NAME_VERSION (valnum)];
5914     }
5915   else if (is_gimple_min_invariant (valnum))
5916     return valnum;
5917   return NULL_TREE;
5918 }
5919 
5920 /* At the current point of the eliminate domwalk make OP available.  */
5921 
5922 void
eliminate_push_avail(basic_block,tree op)5923 eliminate_dom_walker::eliminate_push_avail (basic_block, tree op)
5924 {
5925   tree valnum = VN_INFO (op)->valnum;
5926   if (TREE_CODE (valnum) == SSA_NAME)
5927     {
5928       if (avail.length () <= SSA_NAME_VERSION (valnum))
5929 	avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1, true);
5930       tree pushop = op;
5931       if (avail[SSA_NAME_VERSION (valnum)])
5932 	pushop = avail[SSA_NAME_VERSION (valnum)];
5933       avail_stack.safe_push (pushop);
5934       avail[SSA_NAME_VERSION (valnum)] = op;
5935     }
5936 }
5937 
5938 /* Insert the expression recorded by SCCVN for VAL at *GSI.  Returns
5939    the leader for the expression if insertion was successful.  */
5940 
5941 tree
eliminate_insert(basic_block bb,gimple_stmt_iterator * gsi,tree val)5942 eliminate_dom_walker::eliminate_insert (basic_block bb,
5943 					gimple_stmt_iterator *gsi, tree val)
5944 {
5945   /* We can insert a sequence with a single assignment only.  */
5946   gimple_seq stmts = VN_INFO (val)->expr;
5947   if (!gimple_seq_singleton_p (stmts))
5948     return NULL_TREE;
5949   gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5950   if (!stmt
5951       || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5952 	  && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5953 	  && gimple_assign_rhs_code (stmt) != NEGATE_EXPR
5954 	  && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5955 	  && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5956 	      || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5957     return NULL_TREE;
5958 
5959   tree op = gimple_assign_rhs1 (stmt);
5960   if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5961       || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5962     op = TREE_OPERAND (op, 0);
5963   tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (bb, op) : op;
5964   if (!leader)
5965     return NULL_TREE;
5966 
5967   tree res;
5968   stmts = NULL;
5969   if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5970     res = gimple_build (&stmts, BIT_FIELD_REF,
5971 			TREE_TYPE (val), leader,
5972 			TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5973 			TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5974   else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5975     res = gimple_build (&stmts, BIT_AND_EXPR,
5976 			TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5977   else
5978     res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5979 			TREE_TYPE (val), leader);
5980   if (TREE_CODE (res) != SSA_NAME
5981       || SSA_NAME_IS_DEFAULT_DEF (res)
5982       || gimple_bb (SSA_NAME_DEF_STMT (res)))
5983     {
5984       gimple_seq_discard (stmts);
5985 
5986       /* During propagation we have to treat SSA info conservatively
5987          and thus we can end up simplifying the inserted expression
5988 	 at elimination time to sth not defined in stmts.  */
5989       /* But then this is a redundancy we failed to detect.  Which means
5990          res now has two values.  That doesn't play well with how
5991 	 we track availability here, so give up.  */
5992       if (dump_file && (dump_flags & TDF_DETAILS))
5993 	{
5994 	  if (TREE_CODE (res) == SSA_NAME)
5995 	    res = eliminate_avail (bb, res);
5996 	  if (res)
5997 	    {
5998 	      fprintf (dump_file, "Failed to insert expression for value ");
5999 	      print_generic_expr (dump_file, val);
6000 	      fprintf (dump_file, " which is really fully redundant to ");
6001 	      print_generic_expr (dump_file, res);
6002 	      fprintf (dump_file, "\n");
6003 	    }
6004 	}
6005 
6006       return NULL_TREE;
6007     }
6008   else
6009     {
6010       gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
6011       vn_ssa_aux_t vn_info = VN_INFO (res);
6012       vn_info->valnum = val;
6013       vn_info->visited = true;
6014     }
6015 
6016   insertions++;
6017   if (dump_file && (dump_flags & TDF_DETAILS))
6018     {
6019       fprintf (dump_file, "Inserted ");
6020       print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
6021     }
6022 
6023   return res;
6024 }
6025 
6026 void
eliminate_stmt(basic_block b,gimple_stmt_iterator * gsi)6027 eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
6028 {
6029   tree sprime = NULL_TREE;
6030   gimple *stmt = gsi_stmt (*gsi);
6031   tree lhs = gimple_get_lhs (stmt);
6032   if (lhs && TREE_CODE (lhs) == SSA_NAME
6033       && !gimple_has_volatile_ops (stmt)
6034       /* See PR43491.  Do not replace a global register variable when
6035 	 it is a the RHS of an assignment.  Do replace local register
6036 	 variables since gcc does not guarantee a local variable will
6037 	 be allocated in register.
6038 	 ???  The fix isn't effective here.  This should instead
6039 	 be ensured by not value-numbering them the same but treating
6040 	 them like volatiles?  */
6041       && !(gimple_assign_single_p (stmt)
6042 	   && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
6043 	       && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
6044 	       && is_global_var (gimple_assign_rhs1 (stmt)))))
6045     {
6046       sprime = eliminate_avail (b, lhs);
6047       if (!sprime)
6048 	{
6049 	  /* If there is no existing usable leader but SCCVN thinks
6050 	     it has an expression it wants to use as replacement,
6051 	     insert that.  */
6052 	  tree val = VN_INFO (lhs)->valnum;
6053 	  vn_ssa_aux_t vn_info;
6054 	  if (val != VN_TOP
6055 	      && TREE_CODE (val) == SSA_NAME
6056 	      && (vn_info = VN_INFO (val), true)
6057 	      && vn_info->needs_insertion
6058 	      && vn_info->expr != NULL
6059 	      && (sprime = eliminate_insert (b, gsi, val)) != NULL_TREE)
6060 	    eliminate_push_avail (b, sprime);
6061 	}
6062 
6063       /* If this now constitutes a copy duplicate points-to
6064 	 and range info appropriately.  This is especially
6065 	 important for inserted code.  See tree-ssa-copy.c
6066 	 for similar code.  */
6067       if (sprime
6068 	  && TREE_CODE (sprime) == SSA_NAME)
6069 	{
6070 	  basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
6071 	  if (POINTER_TYPE_P (TREE_TYPE (lhs))
6072 	      && SSA_NAME_PTR_INFO (lhs)
6073 	      && ! SSA_NAME_PTR_INFO (sprime))
6074 	    {
6075 	      duplicate_ssa_name_ptr_info (sprime,
6076 					   SSA_NAME_PTR_INFO (lhs));
6077 	      if (b != sprime_b)
6078 		reset_flow_sensitive_info (sprime);
6079 	    }
6080 	  else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6081 		   && SSA_NAME_RANGE_INFO (lhs)
6082 		   && ! SSA_NAME_RANGE_INFO (sprime)
6083 		   && b == sprime_b)
6084 	    duplicate_ssa_name_range_info (sprime,
6085 					   SSA_NAME_RANGE_TYPE (lhs),
6086 					   SSA_NAME_RANGE_INFO (lhs));
6087 	}
6088 
6089       /* Inhibit the use of an inserted PHI on a loop header when
6090 	 the address of the memory reference is a simple induction
6091 	 variable.  In other cases the vectorizer won't do anything
6092 	 anyway (either it's loop invariant or a complicated
6093 	 expression).  */
6094       if (sprime
6095 	  && TREE_CODE (sprime) == SSA_NAME
6096 	  && do_pre
6097 	  && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
6098 	  && loop_outer (b->loop_father)
6099 	  && has_zero_uses (sprime)
6100 	  && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
6101 	  && gimple_assign_load_p (stmt))
6102 	{
6103 	  gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
6104 	  basic_block def_bb = gimple_bb (def_stmt);
6105 	  if (gimple_code (def_stmt) == GIMPLE_PHI
6106 	      && def_bb->loop_father->header == def_bb)
6107 	    {
6108 	      loop_p loop = def_bb->loop_father;
6109 	      ssa_op_iter iter;
6110 	      tree op;
6111 	      bool found = false;
6112 	      FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
6113 		{
6114 		  affine_iv iv;
6115 		  def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
6116 		  if (def_bb
6117 		      && flow_bb_inside_loop_p (loop, def_bb)
6118 		      && simple_iv (loop, loop, op, &iv, true))
6119 		    {
6120 		      found = true;
6121 		      break;
6122 		    }
6123 		}
6124 	      if (found)
6125 		{
6126 		  if (dump_file && (dump_flags & TDF_DETAILS))
6127 		    {
6128 		      fprintf (dump_file, "Not replacing ");
6129 		      print_gimple_expr (dump_file, stmt, 0);
6130 		      fprintf (dump_file, " with ");
6131 		      print_generic_expr (dump_file, sprime);
6132 		      fprintf (dump_file, " which would add a loop"
6133 			       " carried dependence to loop %d\n",
6134 			       loop->num);
6135 		    }
6136 		  /* Don't keep sprime available.  */
6137 		  sprime = NULL_TREE;
6138 		}
6139 	    }
6140 	}
6141 
6142       if (sprime)
6143 	{
6144 	  /* If we can propagate the value computed for LHS into
6145 	     all uses don't bother doing anything with this stmt.  */
6146 	  if (may_propagate_copy (lhs, sprime))
6147 	    {
6148 	      /* Mark it for removal.  */
6149 	      to_remove.safe_push (stmt);
6150 
6151 	      /* ???  Don't count copy/constant propagations.  */
6152 	      if (gimple_assign_single_p (stmt)
6153 		  && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
6154 		      || gimple_assign_rhs1 (stmt) == sprime))
6155 		return;
6156 
6157 	      if (dump_file && (dump_flags & TDF_DETAILS))
6158 		{
6159 		  fprintf (dump_file, "Replaced ");
6160 		  print_gimple_expr (dump_file, stmt, 0);
6161 		  fprintf (dump_file, " with ");
6162 		  print_generic_expr (dump_file, sprime);
6163 		  fprintf (dump_file, " in all uses of ");
6164 		  print_gimple_stmt (dump_file, stmt, 0);
6165 		}
6166 
6167 	      eliminations++;
6168 	      return;
6169 	    }
6170 
6171 	  /* If this is an assignment from our leader (which
6172 	     happens in the case the value-number is a constant)
6173 	     then there is nothing to do.  Likewise if we run into
6174 	     inserted code that needed a conversion because of
6175 	     our type-agnostic value-numbering of loads.  */
6176 	  if ((gimple_assign_single_p (stmt)
6177 	       || (is_gimple_assign (stmt)
6178 		   && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
6179 		       || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)))
6180 	      && sprime == gimple_assign_rhs1 (stmt))
6181 	    return;
6182 
6183 	  /* Else replace its RHS.  */
6184 	  if (dump_file && (dump_flags & TDF_DETAILS))
6185 	    {
6186 	      fprintf (dump_file, "Replaced ");
6187 	      print_gimple_expr (dump_file, stmt, 0);
6188 	      fprintf (dump_file, " with ");
6189 	      print_generic_expr (dump_file, sprime);
6190 	      fprintf (dump_file, " in ");
6191 	      print_gimple_stmt (dump_file, stmt, 0);
6192 	    }
6193 	  eliminations++;
6194 
6195 	  bool can_make_abnormal_goto = (is_gimple_call (stmt)
6196 					 && stmt_can_make_abnormal_goto (stmt));
6197 	  gimple *orig_stmt = stmt;
6198 	  if (!useless_type_conversion_p (TREE_TYPE (lhs),
6199 					  TREE_TYPE (sprime)))
6200 	    {
6201 	      /* We preserve conversions to but not from function or method
6202 		 types.  This asymmetry makes it necessary to re-instantiate
6203 		 conversions here.  */
6204 	      if (POINTER_TYPE_P (TREE_TYPE (lhs))
6205 		  && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs))))
6206 		sprime = fold_convert (TREE_TYPE (lhs), sprime);
6207 	      else
6208 		gcc_unreachable ();
6209 	    }
6210 	  tree vdef = gimple_vdef (stmt);
6211 	  tree vuse = gimple_vuse (stmt);
6212 	  propagate_tree_value_into_stmt (gsi, sprime);
6213 	  stmt = gsi_stmt (*gsi);
6214 	  update_stmt (stmt);
6215 	  /* In case the VDEF on the original stmt was released, value-number
6216 	     it to the VUSE.  This is to make vuse_ssa_val able to skip
6217 	     released virtual operands.  */
6218 	  if (vdef != gimple_vdef (stmt))
6219 	    {
6220 	      gcc_assert (SSA_NAME_IN_FREE_LIST (vdef));
6221 	      VN_INFO (vdef)->valnum = vuse;
6222 	    }
6223 
6224 	  /* If we removed EH side-effects from the statement, clean
6225 	     its EH information.  */
6226 	  if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
6227 	    {
6228 	      bitmap_set_bit (need_eh_cleanup,
6229 			      gimple_bb (stmt)->index);
6230 	      if (dump_file && (dump_flags & TDF_DETAILS))
6231 		fprintf (dump_file, "  Removed EH side-effects.\n");
6232 	    }
6233 
6234 	  /* Likewise for AB side-effects.  */
6235 	  if (can_make_abnormal_goto
6236 	      && !stmt_can_make_abnormal_goto (stmt))
6237 	    {
6238 	      bitmap_set_bit (need_ab_cleanup,
6239 			      gimple_bb (stmt)->index);
6240 	      if (dump_file && (dump_flags & TDF_DETAILS))
6241 		fprintf (dump_file, "  Removed AB side-effects.\n");
6242 	    }
6243 
6244 	  return;
6245 	}
6246     }
6247 
6248   /* If the statement is a scalar store, see if the expression
6249      has the same value number as its rhs.  If so, the store is
6250      dead.  */
6251   if (gimple_assign_single_p (stmt)
6252       && !gimple_has_volatile_ops (stmt)
6253       && !is_gimple_reg (gimple_assign_lhs (stmt))
6254       && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
6255 	  || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
6256     {
6257       tree rhs = gimple_assign_rhs1 (stmt);
6258       vn_reference_t vnresult;
6259       /* ???  gcc.dg/torture/pr91445.c shows that we lookup a boolean
6260          typed load of a byte known to be 0x11 as 1 so a store of
6261 	 a boolean 1 is detected as redundant.  Because of this we
6262 	 have to make sure to lookup with a ref where its size
6263 	 matches the precision.  */
6264       tree lookup_lhs = lhs;
6265       if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6266 	  && (TREE_CODE (lhs) != COMPONENT_REF
6267 	      || !DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
6268 	  && !type_has_mode_precision_p (TREE_TYPE (lhs)))
6269 	{
6270 	  if (TREE_CODE (lhs) == COMPONENT_REF
6271 	      || TREE_CODE (lhs) == MEM_REF)
6272 	    {
6273 	      tree ltype = build_nonstandard_integer_type
6274 				(TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (lhs))),
6275 				 TYPE_UNSIGNED (TREE_TYPE (lhs)));
6276 	      if (TREE_CODE (lhs) == COMPONENT_REF)
6277 		{
6278 		  tree foff = component_ref_field_offset (lhs);
6279 		  tree f = TREE_OPERAND (lhs, 1);
6280 		  if (!poly_int_tree_p (foff))
6281 		    lookup_lhs = NULL_TREE;
6282 		  else
6283 		    lookup_lhs = build3 (BIT_FIELD_REF, ltype,
6284 					 TREE_OPERAND (lhs, 0),
6285 					 TYPE_SIZE (TREE_TYPE (lhs)),
6286 					 bit_from_pos
6287 					   (foff, DECL_FIELD_BIT_OFFSET (f)));
6288 		}
6289 	      else
6290 		lookup_lhs = build2 (MEM_REF, ltype,
6291 				     TREE_OPERAND (lhs, 0),
6292 				     TREE_OPERAND (lhs, 1));
6293 	    }
6294 	  else
6295 	    lookup_lhs = NULL_TREE;
6296 	}
6297       tree val = NULL_TREE;
6298       if (lookup_lhs)
6299 	val = vn_reference_lookup (lookup_lhs, gimple_vuse (stmt),
6300 				   VN_WALKREWRITE, &vnresult, false);
6301       if (TREE_CODE (rhs) == SSA_NAME)
6302 	rhs = VN_INFO (rhs)->valnum;
6303       if (val
6304 	  && (operand_equal_p (val, rhs, 0)
6305 	      /* Due to the bitfield lookups above we can get bit
6306 		 interpretations of the same RHS as values here.  Those
6307 		 are redundant as well.  */
6308 	      || (TREE_CODE (val) == SSA_NAME
6309 		  && gimple_assign_single_p (SSA_NAME_DEF_STMT (val))
6310 		  && (val = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (val)))
6311 		  && TREE_CODE (val) == VIEW_CONVERT_EXPR
6312 		  && TREE_OPERAND (val, 0) == rhs)))
6313 	{
6314 	  /* We can only remove the later store if the former aliases
6315 	     at least all accesses the later one does or if the store
6316 	     was to readonly memory storing the same value.  */
6317 	  ao_ref lhs_ref;
6318 	  ao_ref_init (&lhs_ref, lhs);
6319 	  alias_set_type set = ao_ref_alias_set (&lhs_ref);
6320 	  alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
6321 	  if (! vnresult
6322 	      || ((vnresult->set == set
6323 		   || alias_set_subset_of (set, vnresult->set))
6324 		  && (vnresult->base_set == base_set
6325 		      || alias_set_subset_of (base_set, vnresult->base_set))))
6326 	    {
6327 	      if (dump_file && (dump_flags & TDF_DETAILS))
6328 		{
6329 		  fprintf (dump_file, "Deleted redundant store ");
6330 		  print_gimple_stmt (dump_file, stmt, 0);
6331 		}
6332 
6333 	      /* Queue stmt for removal.  */
6334 	      to_remove.safe_push (stmt);
6335 	      return;
6336 	    }
6337 	}
6338     }
6339 
6340   /* If this is a control statement value numbering left edges
6341      unexecuted on force the condition in a way consistent with
6342      that.  */
6343   if (gcond *cond = dyn_cast <gcond *> (stmt))
6344     {
6345       if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
6346 	  ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
6347 	{
6348 	  if (dump_file && (dump_flags & TDF_DETAILS))
6349 	    {
6350 	      fprintf (dump_file, "Removing unexecutable edge from ");
6351 	      print_gimple_stmt (dump_file, stmt, 0);
6352 	    }
6353 	  if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
6354 	      == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
6355 	    gimple_cond_make_true (cond);
6356 	  else
6357 	    gimple_cond_make_false (cond);
6358 	  update_stmt (cond);
6359 	  el_todo |= TODO_cleanup_cfg;
6360 	  return;
6361 	}
6362     }
6363 
6364   bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
6365   bool was_noreturn = (is_gimple_call (stmt)
6366 		       && gimple_call_noreturn_p (stmt));
6367   tree vdef = gimple_vdef (stmt);
6368   tree vuse = gimple_vuse (stmt);
6369 
6370   /* If we didn't replace the whole stmt (or propagate the result
6371      into all uses), replace all uses on this stmt with their
6372      leaders.  */
6373   bool modified = false;
6374   use_operand_p use_p;
6375   ssa_op_iter iter;
6376   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
6377     {
6378       tree use = USE_FROM_PTR (use_p);
6379       /* ???  The call code above leaves stmt operands un-updated.  */
6380       if (TREE_CODE (use) != SSA_NAME)
6381 	continue;
6382       tree sprime;
6383       if (SSA_NAME_IS_DEFAULT_DEF (use))
6384 	/* ???  For default defs BB shouldn't matter, but we have to
6385 	   solve the inconsistency between rpo eliminate and
6386 	   dom eliminate avail valueization first.  */
6387 	sprime = eliminate_avail (b, use);
6388       else
6389 	/* Look for sth available at the definition block of the argument.
6390 	   This avoids inconsistencies between availability there which
6391 	   decides if the stmt can be removed and availability at the
6392 	   use site.  The SSA property ensures that things available
6393 	   at the definition are also available at uses.  */
6394 	sprime = eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use)), use);
6395       if (sprime && sprime != use
6396 	  && may_propagate_copy (use, sprime)
6397 	  /* We substitute into debug stmts to avoid excessive
6398 	     debug temporaries created by removed stmts, but we need
6399 	     to avoid doing so for inserted sprimes as we never want
6400 	     to create debug temporaries for them.  */
6401 	  && (!inserted_exprs
6402 	      || TREE_CODE (sprime) != SSA_NAME
6403 	      || !is_gimple_debug (stmt)
6404 	      || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
6405 	{
6406 	  propagate_value (use_p, sprime);
6407 	  modified = true;
6408 	}
6409     }
6410 
6411   /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
6412      into which is a requirement for the IPA devirt machinery.  */
6413   gimple *old_stmt = stmt;
6414   if (modified)
6415     {
6416       /* If a formerly non-invariant ADDR_EXPR is turned into an
6417 	 invariant one it was on a separate stmt.  */
6418       if (gimple_assign_single_p (stmt)
6419 	  && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
6420 	recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
6421       gimple_stmt_iterator prev = *gsi;
6422       gsi_prev (&prev);
6423       if (fold_stmt (gsi))
6424 	{
6425 	  /* fold_stmt may have created new stmts inbetween
6426 	     the previous stmt and the folded stmt.  Mark
6427 	     all defs created there as varying to not confuse
6428 	     the SCCVN machinery as we're using that even during
6429 	     elimination.  */
6430 	  if (gsi_end_p (prev))
6431 	    prev = gsi_start_bb (b);
6432 	  else
6433 	    gsi_next (&prev);
6434 	  if (gsi_stmt (prev) != gsi_stmt (*gsi))
6435 	    do
6436 	      {
6437 		tree def;
6438 		ssa_op_iter dit;
6439 		FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
6440 					   dit, SSA_OP_ALL_DEFS)
6441 		    /* As existing DEFs may move between stmts
6442 		       only process new ones.  */
6443 		    if (! has_VN_INFO (def))
6444 		      {
6445 			vn_ssa_aux_t vn_info = VN_INFO (def);
6446 			vn_info->valnum = def;
6447 			vn_info->visited = true;
6448 		      }
6449 		if (gsi_stmt (prev) == gsi_stmt (*gsi))
6450 		  break;
6451 		gsi_next (&prev);
6452 	      }
6453 	    while (1);
6454 	}
6455       stmt = gsi_stmt (*gsi);
6456       /* In case we folded the stmt away schedule the NOP for removal.  */
6457       if (gimple_nop_p (stmt))
6458 	to_remove.safe_push (stmt);
6459     }
6460 
6461   /* Visit indirect calls and turn them into direct calls if
6462      possible using the devirtualization machinery.  Do this before
6463      checking for required EH/abnormal/noreturn cleanup as devird
6464      may expose more of those.  */
6465   if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
6466     {
6467       tree fn = gimple_call_fn (call_stmt);
6468       if (fn
6469 	  && flag_devirtualize
6470 	  && virtual_method_call_p (fn))
6471 	{
6472 	  tree otr_type = obj_type_ref_class (fn);
6473 	  unsigned HOST_WIDE_INT otr_tok
6474 	      = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
6475 	  tree instance;
6476 	  ipa_polymorphic_call_context context (current_function_decl,
6477 						fn, stmt, &instance);
6478 	  context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
6479 				    otr_type, stmt, NULL);
6480 	  bool final;
6481 	  vec <cgraph_node *> targets
6482 	      = possible_polymorphic_call_targets (obj_type_ref_class (fn),
6483 						   otr_tok, context, &final);
6484 	  if (dump_file)
6485 	    dump_possible_polymorphic_call_targets (dump_file,
6486 						    obj_type_ref_class (fn),
6487 						    otr_tok, context);
6488 	  if (final && targets.length () <= 1 && dbg_cnt (devirt))
6489 	    {
6490 	      tree fn;
6491 	      if (targets.length () == 1)
6492 		fn = targets[0]->decl;
6493 	      else
6494 		fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6495 	      if (dump_enabled_p ())
6496 		{
6497 		  dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
6498 				   "converting indirect call to "
6499 				   "function %s\n",
6500 				   lang_hooks.decl_printable_name (fn, 2));
6501 		}
6502 	      gimple_call_set_fndecl (call_stmt, fn);
6503 	      /* If changing the call to __builtin_unreachable
6504 		 or similar noreturn function, adjust gimple_call_fntype
6505 		 too.  */
6506 	      if (gimple_call_noreturn_p (call_stmt)
6507 		  && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
6508 		  && TYPE_ARG_TYPES (TREE_TYPE (fn))
6509 		  && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
6510 		      == void_type_node))
6511 		gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
6512 	      maybe_remove_unused_call_args (cfun, call_stmt);
6513 	      modified = true;
6514 	    }
6515 	}
6516     }
6517 
6518   if (modified)
6519     {
6520       /* When changing a call into a noreturn call, cfg cleanup
6521 	 is needed to fix up the noreturn call.  */
6522       if (!was_noreturn
6523 	  && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
6524 	to_fixup.safe_push  (stmt);
6525       /* When changing a condition or switch into one we know what
6526 	 edge will be executed, schedule a cfg cleanup.  */
6527       if ((gimple_code (stmt) == GIMPLE_COND
6528 	   && (gimple_cond_true_p (as_a <gcond *> (stmt))
6529 	       || gimple_cond_false_p (as_a <gcond *> (stmt))))
6530 	  || (gimple_code (stmt) == GIMPLE_SWITCH
6531 	      && TREE_CODE (gimple_switch_index
6532 			    (as_a <gswitch *> (stmt))) == INTEGER_CST))
6533 	el_todo |= TODO_cleanup_cfg;
6534       /* If we removed EH side-effects from the statement, clean
6535 	 its EH information.  */
6536       if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
6537 	{
6538 	  bitmap_set_bit (need_eh_cleanup,
6539 			  gimple_bb (stmt)->index);
6540 	  if (dump_file && (dump_flags & TDF_DETAILS))
6541 	    fprintf (dump_file, "  Removed EH side-effects.\n");
6542 	}
6543       /* Likewise for AB side-effects.  */
6544       if (can_make_abnormal_goto
6545 	  && !stmt_can_make_abnormal_goto (stmt))
6546 	{
6547 	  bitmap_set_bit (need_ab_cleanup,
6548 			  gimple_bb (stmt)->index);
6549 	  if (dump_file && (dump_flags & TDF_DETAILS))
6550 	    fprintf (dump_file, "  Removed AB side-effects.\n");
6551 	}
6552       update_stmt (stmt);
6553       /* In case the VDEF on the original stmt was released, value-number
6554          it to the VUSE.  This is to make vuse_ssa_val able to skip
6555 	 released virtual operands.  */
6556       if (vdef && SSA_NAME_IN_FREE_LIST (vdef))
6557 	VN_INFO (vdef)->valnum = vuse;
6558     }
6559 
6560   /* Make new values available - for fully redundant LHS we
6561      continue with the next stmt above and skip this.  */
6562   def_operand_p defp;
6563   FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
6564     eliminate_push_avail (b, DEF_FROM_PTR (defp));
6565 }
6566 
6567 /* Perform elimination for the basic-block B during the domwalk.  */
6568 
6569 edge
before_dom_children(basic_block b)6570 eliminate_dom_walker::before_dom_children (basic_block b)
6571 {
6572   /* Mark new bb.  */
6573   avail_stack.safe_push (NULL_TREE);
6574 
6575   /* Skip unreachable blocks marked unreachable during the SCCVN domwalk.  */
6576   if (!(b->flags & BB_EXECUTABLE))
6577     return NULL;
6578 
6579   vn_context_bb = b;
6580 
6581   for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
6582     {
6583       gphi *phi = gsi.phi ();
6584       tree res = PHI_RESULT (phi);
6585 
6586       if (virtual_operand_p (res))
6587 	{
6588 	  gsi_next (&gsi);
6589 	  continue;
6590 	}
6591 
6592       tree sprime = eliminate_avail (b, res);
6593       if (sprime
6594 	  && sprime != res)
6595 	{
6596 	  if (dump_file && (dump_flags & TDF_DETAILS))
6597 	    {
6598 	      fprintf (dump_file, "Replaced redundant PHI node defining ");
6599 	      print_generic_expr (dump_file, res);
6600 	      fprintf (dump_file, " with ");
6601 	      print_generic_expr (dump_file, sprime);
6602 	      fprintf (dump_file, "\n");
6603 	    }
6604 
6605 	  /* If we inserted this PHI node ourself, it's not an elimination.  */
6606 	  if (! inserted_exprs
6607 	      || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
6608 	    eliminations++;
6609 
6610 	  /* If we will propagate into all uses don't bother to do
6611 	     anything.  */
6612 	  if (may_propagate_copy (res, sprime))
6613 	    {
6614 	      /* Mark the PHI for removal.  */
6615 	      to_remove.safe_push (phi);
6616 	      gsi_next (&gsi);
6617 	      continue;
6618 	    }
6619 
6620 	  remove_phi_node (&gsi, false);
6621 
6622 	  if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
6623 	    sprime = fold_convert (TREE_TYPE (res), sprime);
6624 	  gimple *stmt = gimple_build_assign (res, sprime);
6625 	  gimple_stmt_iterator gsi2 = gsi_after_labels (b);
6626 	  gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
6627 	  continue;
6628 	}
6629 
6630       eliminate_push_avail (b, res);
6631       gsi_next (&gsi);
6632     }
6633 
6634   for (gimple_stmt_iterator gsi = gsi_start_bb (b);
6635        !gsi_end_p (gsi);
6636        gsi_next (&gsi))
6637     eliminate_stmt (b, &gsi);
6638 
6639   /* Replace destination PHI arguments.  */
6640   edge_iterator ei;
6641   edge e;
6642   FOR_EACH_EDGE (e, ei, b->succs)
6643     if (e->flags & EDGE_EXECUTABLE)
6644       for (gphi_iterator gsi = gsi_start_phis (e->dest);
6645 	   !gsi_end_p (gsi);
6646 	   gsi_next (&gsi))
6647 	{
6648 	  gphi *phi = gsi.phi ();
6649 	  use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
6650 	  tree arg = USE_FROM_PTR (use_p);
6651 	  if (TREE_CODE (arg) != SSA_NAME
6652 	      || virtual_operand_p (arg))
6653 	    continue;
6654 	  tree sprime = eliminate_avail (b, arg);
6655 	  if (sprime && may_propagate_copy (arg, sprime))
6656 	    propagate_value (use_p, sprime);
6657 	}
6658 
6659   vn_context_bb = NULL;
6660 
6661   return NULL;
6662 }
6663 
6664 /* Make no longer available leaders no longer available.  */
6665 
6666 void
after_dom_children(basic_block)6667 eliminate_dom_walker::after_dom_children (basic_block)
6668 {
6669   tree entry;
6670   while ((entry = avail_stack.pop ()) != NULL_TREE)
6671     {
6672       tree valnum = VN_INFO (entry)->valnum;
6673       tree old = avail[SSA_NAME_VERSION (valnum)];
6674       if (old == entry)
6675 	avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
6676       else
6677 	avail[SSA_NAME_VERSION (valnum)] = entry;
6678     }
6679 }
6680 
6681 /* Remove queued stmts and perform delayed cleanups.  */
6682 
6683 unsigned
eliminate_cleanup(bool region_p)6684 eliminate_dom_walker::eliminate_cleanup (bool region_p)
6685 {
6686   statistics_counter_event (cfun, "Eliminated", eliminations);
6687   statistics_counter_event (cfun, "Insertions", insertions);
6688 
6689   /* We cannot remove stmts during BB walk, especially not release SSA
6690      names there as this confuses the VN machinery.  The stmts ending
6691      up in to_remove are either stores or simple copies.
6692      Remove stmts in reverse order to make debug stmt creation possible.  */
6693   while (!to_remove.is_empty ())
6694     {
6695       bool do_release_defs = true;
6696       gimple *stmt = to_remove.pop ();
6697 
6698       /* When we are value-numbering a region we do not require exit PHIs to
6699 	 be present so we have to make sure to deal with uses outside of the
6700 	 region of stmts that we thought are eliminated.
6701 	 ??? Note we may be confused by uses in dead regions we didn't run
6702 	 elimination on.  Rather than checking individual uses we accept
6703 	 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
6704 	 contains such example).  */
6705       if (region_p)
6706 	{
6707 	  if (gphi *phi = dyn_cast <gphi *> (stmt))
6708 	    {
6709 	      tree lhs = gimple_phi_result (phi);
6710 	      if (!has_zero_uses (lhs))
6711 		{
6712 		  if (dump_file && (dump_flags & TDF_DETAILS))
6713 		    fprintf (dump_file, "Keeping eliminated stmt live "
6714 			     "as copy because of out-of-region uses\n");
6715 		  tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
6716 		  gimple *copy = gimple_build_assign (lhs, sprime);
6717 		  gimple_stmt_iterator gsi
6718 		    = gsi_after_labels (gimple_bb (stmt));
6719 		  gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
6720 		  do_release_defs = false;
6721 		}
6722 	    }
6723 	  else if (tree lhs = gimple_get_lhs (stmt))
6724 	    if (TREE_CODE (lhs) == SSA_NAME
6725 		&& !has_zero_uses (lhs))
6726 	      {
6727 		if (dump_file && (dump_flags & TDF_DETAILS))
6728 		  fprintf (dump_file, "Keeping eliminated stmt live "
6729 			   "as copy because of out-of-region uses\n");
6730 		tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
6731 		gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
6732 		if (is_gimple_assign (stmt))
6733 		  {
6734 		    gimple_assign_set_rhs_from_tree (&gsi, sprime);
6735 		    stmt = gsi_stmt (gsi);
6736 		    update_stmt (stmt);
6737 		    if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
6738 		      bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
6739 		    continue;
6740 		  }
6741 		else
6742 		  {
6743 		    gimple *copy = gimple_build_assign (lhs, sprime);
6744 		    gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
6745 		    do_release_defs = false;
6746 		  }
6747 	      }
6748 	}
6749 
6750       if (dump_file && (dump_flags & TDF_DETAILS))
6751 	{
6752 	  fprintf (dump_file, "Removing dead stmt ");
6753 	  print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
6754 	}
6755 
6756       gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
6757       if (gimple_code (stmt) == GIMPLE_PHI)
6758 	remove_phi_node (&gsi, do_release_defs);
6759       else
6760 	{
6761 	  basic_block bb = gimple_bb (stmt);
6762 	  unlink_stmt_vdef (stmt);
6763 	  if (gsi_remove (&gsi, true))
6764 	    bitmap_set_bit (need_eh_cleanup, bb->index);
6765 	  if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
6766 	    bitmap_set_bit (need_ab_cleanup, bb->index);
6767 	  if (do_release_defs)
6768 	    release_defs (stmt);
6769 	}
6770 
6771       /* Removing a stmt may expose a forwarder block.  */
6772       el_todo |= TODO_cleanup_cfg;
6773     }
6774 
6775   /* Fixup stmts that became noreturn calls.  This may require splitting
6776      blocks and thus isn't possible during the dominator walk.  Do this
6777      in reverse order so we don't inadvertedly remove a stmt we want to
6778      fixup by visiting a dominating now noreturn call first.  */
6779   while (!to_fixup.is_empty ())
6780     {
6781       gimple *stmt = to_fixup.pop ();
6782 
6783       if (dump_file && (dump_flags & TDF_DETAILS))
6784 	{
6785 	  fprintf (dump_file, "Fixing up noreturn call ");
6786 	  print_gimple_stmt (dump_file, stmt, 0);
6787 	}
6788 
6789       if (fixup_noreturn_call (stmt))
6790 	el_todo |= TODO_cleanup_cfg;
6791     }
6792 
6793   bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
6794   bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
6795 
6796   if (do_eh_cleanup)
6797     gimple_purge_all_dead_eh_edges (need_eh_cleanup);
6798 
6799   if (do_ab_cleanup)
6800     gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
6801 
6802   if (do_eh_cleanup || do_ab_cleanup)
6803     el_todo |= TODO_cleanup_cfg;
6804 
6805   return el_todo;
6806 }
6807 
6808 /* Eliminate fully redundant computations.  */
6809 
6810 unsigned
eliminate_with_rpo_vn(bitmap inserted_exprs)6811 eliminate_with_rpo_vn (bitmap inserted_exprs)
6812 {
6813   eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
6814 
6815   eliminate_dom_walker *saved_rpo_avail = rpo_avail;
6816   rpo_avail = &walker;
6817   walker.walk (cfun->cfg->x_entry_block_ptr);
6818   rpo_avail = saved_rpo_avail;
6819 
6820   return walker.eliminate_cleanup ();
6821 }
6822 
6823 static unsigned
6824 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
6825 	   bool iterate, bool eliminate);
6826 
6827 void
run_rpo_vn(vn_lookup_kind kind)6828 run_rpo_vn (vn_lookup_kind kind)
6829 {
6830   default_vn_walk_kind = kind;
6831   do_rpo_vn (cfun, NULL, NULL, true, false);
6832 
6833   /* ???  Prune requirement of these.  */
6834   constant_to_value_id = new hash_table<vn_constant_hasher> (23);
6835 
6836   /* Initialize the value ids and prune out remaining VN_TOPs
6837      from dead code.  */
6838   tree name;
6839   unsigned i;
6840   FOR_EACH_SSA_NAME (i, name, cfun)
6841     {
6842       vn_ssa_aux_t info = VN_INFO (name);
6843       if (!info->visited
6844 	  || info->valnum == VN_TOP)
6845 	info->valnum = name;
6846       if (info->valnum == name)
6847 	info->value_id = get_next_value_id ();
6848       else if (is_gimple_min_invariant (info->valnum))
6849 	info->value_id = get_or_alloc_constant_value_id (info->valnum);
6850     }
6851 
6852   /* Propagate.  */
6853   FOR_EACH_SSA_NAME (i, name, cfun)
6854     {
6855       vn_ssa_aux_t info = VN_INFO (name);
6856       if (TREE_CODE (info->valnum) == SSA_NAME
6857 	  && info->valnum != name
6858 	  && info->value_id != VN_INFO (info->valnum)->value_id)
6859 	info->value_id = VN_INFO (info->valnum)->value_id;
6860     }
6861 
6862   set_hashtable_value_ids ();
6863 
6864   if (dump_file && (dump_flags & TDF_DETAILS))
6865     {
6866       fprintf (dump_file, "Value numbers:\n");
6867       FOR_EACH_SSA_NAME (i, name, cfun)
6868 	{
6869 	  if (VN_INFO (name)->visited
6870 	      && SSA_VAL (name) != name)
6871 	    {
6872 	      print_generic_expr (dump_file, name);
6873 	      fprintf (dump_file, " = ");
6874 	      print_generic_expr (dump_file, SSA_VAL (name));
6875 	      fprintf (dump_file, " (%04d)\n", VN_INFO (name)->value_id);
6876 	    }
6877 	}
6878     }
6879 }
6880 
6881 /* Free VN associated data structures.  */
6882 
6883 void
free_rpo_vn(void)6884 free_rpo_vn (void)
6885 {
6886   free_vn_table (valid_info);
6887   XDELETE (valid_info);
6888   obstack_free (&vn_tables_obstack, NULL);
6889   obstack_free (&vn_tables_insert_obstack, NULL);
6890 
6891   vn_ssa_aux_iterator_type it;
6892   vn_ssa_aux_t info;
6893   FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash, info, vn_ssa_aux_t, it)
6894     if (info->needs_insertion)
6895       release_ssa_name (info->name);
6896   obstack_free (&vn_ssa_aux_obstack, NULL);
6897   delete vn_ssa_aux_hash;
6898 
6899   delete constant_to_value_id;
6900   constant_to_value_id = NULL;
6901 }
6902 
6903 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables.  */
6904 
6905 static tree
vn_lookup_simplify_result(gimple_match_op * res_op)6906 vn_lookup_simplify_result (gimple_match_op *res_op)
6907 {
6908   if (!res_op->code.is_tree_code ())
6909     return NULL_TREE;
6910   tree *ops = res_op->ops;
6911   unsigned int length = res_op->num_ops;
6912   if (res_op->code == CONSTRUCTOR
6913       /* ???  We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
6914          and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree.  */
6915       && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
6916     {
6917       length = CONSTRUCTOR_NELTS (res_op->ops[0]);
6918       ops = XALLOCAVEC (tree, length);
6919       for (unsigned i = 0; i < length; ++i)
6920 	ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
6921     }
6922   vn_nary_op_t vnresult = NULL;
6923   tree res = vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
6924 				       res_op->type, ops, &vnresult);
6925   /* If this is used from expression simplification make sure to
6926      return an available expression.  */
6927   if (res && TREE_CODE (res) == SSA_NAME && mprts_hook && rpo_avail)
6928     res = rpo_avail->eliminate_avail (vn_context_bb, res);
6929   return res;
6930 }
6931 
6932 /* Return a leader for OPs value that is valid at BB.  */
6933 
6934 tree
eliminate_avail(basic_block bb,tree op)6935 rpo_elim::eliminate_avail (basic_block bb, tree op)
6936 {
6937   bool visited;
6938   tree valnum = SSA_VAL (op, &visited);
6939   /* If we didn't visit OP then it must be defined outside of the
6940      region we process and also dominate it.  So it is available.  */
6941   if (!visited)
6942     return op;
6943   if (TREE_CODE (valnum) == SSA_NAME)
6944     {
6945       if (SSA_NAME_IS_DEFAULT_DEF (valnum))
6946 	return valnum;
6947       vn_avail *av = VN_INFO (valnum)->avail;
6948       if (!av)
6949 	return NULL_TREE;
6950       if (av->location == bb->index)
6951 	/* On tramp3d 90% of the cases are here.  */
6952 	return ssa_name (av->leader);
6953       do
6954 	{
6955 	  basic_block abb = BASIC_BLOCK_FOR_FN (cfun, av->location);
6956 	  /* ???  During elimination we have to use availability at the
6957 	     definition site of a use we try to replace.  This
6958 	     is required to not run into inconsistencies because
6959 	     of dominated_by_p_w_unex behavior and removing a definition
6960 	     while not replacing all uses.
6961 	     ???  We could try to consistently walk dominators
6962 	     ignoring non-executable regions.  The nearest common
6963 	     dominator of bb and abb is where we can stop walking.  We
6964 	     may also be able to "pre-compute" (bits of) the next immediate
6965 	     (non-)dominator during the RPO walk when marking edges as
6966 	     executable.  */
6967 	  if (dominated_by_p_w_unex (bb, abb, true))
6968 	    {
6969 	      tree leader = ssa_name (av->leader);
6970 	      /* Prevent eliminations that break loop-closed SSA.  */
6971 	      if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
6972 		  && ! SSA_NAME_IS_DEFAULT_DEF (leader)
6973 		  && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
6974 							 (leader))->loop_father,
6975 					      bb))
6976 		return NULL_TREE;
6977 	      if (dump_file && (dump_flags & TDF_DETAILS))
6978 		{
6979 		  print_generic_expr (dump_file, leader);
6980 		  fprintf (dump_file, " is available for ");
6981 		  print_generic_expr (dump_file, valnum);
6982 		  fprintf (dump_file, "\n");
6983 		}
6984 	      /* On tramp3d 99% of the _remaining_ cases succeed at
6985 	         the first enty.  */
6986 	      return leader;
6987 	    }
6988 	  /* ???  Can we somehow skip to the immediate dominator
6989 	     RPO index (bb_to_rpo)?  Again, maybe not worth, on
6990 	     tramp3d the worst number of elements in the vector is 9.  */
6991 	  av = av->next;
6992 	}
6993       while (av);
6994     }
6995   else if (valnum != VN_TOP)
6996     /* valnum is is_gimple_min_invariant.  */
6997     return valnum;
6998   return NULL_TREE;
6999 }
7000 
7001 /* Make LEADER a leader for its value at BB.  */
7002 
7003 void
eliminate_push_avail(basic_block bb,tree leader)7004 rpo_elim::eliminate_push_avail (basic_block bb, tree leader)
7005 {
7006   tree valnum = VN_INFO (leader)->valnum;
7007   if (valnum == VN_TOP
7008       || is_gimple_min_invariant (valnum))
7009     return;
7010   if (dump_file && (dump_flags & TDF_DETAILS))
7011     {
7012       fprintf (dump_file, "Making available beyond BB%d ", bb->index);
7013       print_generic_expr (dump_file, leader);
7014       fprintf (dump_file, " for value ");
7015       print_generic_expr (dump_file, valnum);
7016       fprintf (dump_file, "\n");
7017     }
7018   vn_ssa_aux_t value = VN_INFO (valnum);
7019   vn_avail *av;
7020   if (m_avail_freelist)
7021     {
7022       av = m_avail_freelist;
7023       m_avail_freelist = m_avail_freelist->next;
7024     }
7025   else
7026     av = XOBNEW (&vn_ssa_aux_obstack, vn_avail);
7027   av->location = bb->index;
7028   av->leader = SSA_NAME_VERSION (leader);
7029   av->next = value->avail;
7030   av->next_undo = last_pushed_avail;
7031   last_pushed_avail = value;
7032   value->avail = av;
7033 }
7034 
7035 /* Valueization hook for RPO VN plus required state.  */
7036 
7037 tree
rpo_vn_valueize(tree name)7038 rpo_vn_valueize (tree name)
7039 {
7040   if (TREE_CODE (name) == SSA_NAME)
7041     {
7042       vn_ssa_aux_t val = VN_INFO (name);
7043       if (val)
7044 	{
7045 	  tree tem = val->valnum;
7046 	  if (tem != VN_TOP && tem != name)
7047 	    {
7048 	      if (TREE_CODE (tem) != SSA_NAME)
7049 		return tem;
7050 	      /* For all values we only valueize to an available leader
7051 		 which means we can use SSA name info without restriction.  */
7052 	      tem = rpo_avail->eliminate_avail (vn_context_bb, tem);
7053 	      if (tem)
7054 		return tem;
7055 	    }
7056 	}
7057     }
7058   return name;
7059 }
7060 
7061 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
7062    inverted condition.  */
7063 
7064 static void
insert_related_predicates_on_edge(enum tree_code code,tree * ops,edge pred_e)7065 insert_related_predicates_on_edge (enum tree_code code, tree *ops, edge pred_e)
7066 {
7067   switch (code)
7068     {
7069     case LT_EXPR:
7070       /* a < b -> a {!,<}= b */
7071       vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
7072 					   ops, boolean_true_node, 0, pred_e);
7073       vn_nary_op_insert_pieces_predicated (2, LE_EXPR, boolean_type_node,
7074 					   ops, boolean_true_node, 0, pred_e);
7075       /* a < b -> ! a {>,=} b */
7076       vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
7077 					   ops, boolean_false_node, 0, pred_e);
7078       vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
7079 					   ops, boolean_false_node, 0, pred_e);
7080       break;
7081     case GT_EXPR:
7082       /* a > b -> a {!,>}= b */
7083       vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
7084 					   ops, boolean_true_node, 0, pred_e);
7085       vn_nary_op_insert_pieces_predicated (2, GE_EXPR, boolean_type_node,
7086 					   ops, boolean_true_node, 0, pred_e);
7087       /* a > b -> ! a {<,=} b */
7088       vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
7089 					   ops, boolean_false_node, 0, pred_e);
7090       vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
7091 					   ops, boolean_false_node, 0, pred_e);
7092       break;
7093     case EQ_EXPR:
7094       /* a == b -> ! a {<,>} b */
7095       vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
7096 					   ops, boolean_false_node, 0, pred_e);
7097       vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
7098 					   ops, boolean_false_node, 0, pred_e);
7099       break;
7100     case LE_EXPR:
7101     case GE_EXPR:
7102     case NE_EXPR:
7103       /* Nothing besides inverted condition.  */
7104       break;
7105     default:;
7106     }
7107 }
7108 
7109 /* Main stmt worker for RPO VN, process BB.  */
7110 
7111 static unsigned
process_bb(rpo_elim & avail,basic_block bb,bool bb_visited,bool iterate_phis,bool iterate,bool eliminate,bool do_region,bitmap exit_bbs,bool skip_phis)7112 process_bb (rpo_elim &avail, basic_block bb,
7113 	    bool bb_visited, bool iterate_phis, bool iterate, bool eliminate,
7114 	    bool do_region, bitmap exit_bbs, bool skip_phis)
7115 {
7116   unsigned todo = 0;
7117   edge_iterator ei;
7118   edge e;
7119 
7120   vn_context_bb = bb;
7121 
7122   /* If we are in loop-closed SSA preserve this state.  This is
7123      relevant when called on regions from outside of FRE/PRE.  */
7124   bool lc_phi_nodes = false;
7125   if (!skip_phis
7126       && loops_state_satisfies_p (LOOP_CLOSED_SSA))
7127     FOR_EACH_EDGE (e, ei, bb->preds)
7128       if (e->src->loop_father != e->dest->loop_father
7129 	  && flow_loop_nested_p (e->dest->loop_father,
7130 				 e->src->loop_father))
7131 	{
7132 	  lc_phi_nodes = true;
7133 	  break;
7134 	}
7135 
7136   /* When we visit a loop header substitute into loop info.  */
7137   if (!iterate && eliminate && bb->loop_father->header == bb)
7138     {
7139       /* Keep fields in sync with substitute_in_loop_info.  */
7140       if (bb->loop_father->nb_iterations)
7141 	bb->loop_father->nb_iterations
7142 	  = simplify_replace_tree (bb->loop_father->nb_iterations,
7143 				   NULL_TREE, NULL_TREE, &vn_valueize_for_srt);
7144     }
7145 
7146   /* Value-number all defs in the basic-block.  */
7147   if (!skip_phis)
7148     for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7149 	 gsi_next (&gsi))
7150       {
7151 	gphi *phi = gsi.phi ();
7152 	tree res = PHI_RESULT (phi);
7153 	vn_ssa_aux_t res_info = VN_INFO (res);
7154 	if (!bb_visited)
7155 	  {
7156 	    gcc_assert (!res_info->visited);
7157 	    res_info->valnum = VN_TOP;
7158 	    res_info->visited = true;
7159 	  }
7160 
7161 	/* When not iterating force backedge values to varying.  */
7162 	visit_stmt (phi, !iterate_phis);
7163 	if (virtual_operand_p (res))
7164 	  continue;
7165 
7166 	/* Eliminate */
7167 	/* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
7168 	   how we handle backedges and availability.
7169 	   And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization.  */
7170 	tree val = res_info->valnum;
7171 	if (res != val && !iterate && eliminate)
7172 	  {
7173 	    if (tree leader = avail.eliminate_avail (bb, res))
7174 	      {
7175 		if (leader != res
7176 		    /* Preserve loop-closed SSA form.  */
7177 		    && (! lc_phi_nodes
7178 			|| is_gimple_min_invariant (leader)))
7179 		  {
7180 		    if (dump_file && (dump_flags & TDF_DETAILS))
7181 		      {
7182 			fprintf (dump_file, "Replaced redundant PHI node "
7183 				 "defining ");
7184 			print_generic_expr (dump_file, res);
7185 			fprintf (dump_file, " with ");
7186 			print_generic_expr (dump_file, leader);
7187 			fprintf (dump_file, "\n");
7188 		      }
7189 		    avail.eliminations++;
7190 
7191 		    if (may_propagate_copy (res, leader))
7192 		      {
7193 			/* Schedule for removal.  */
7194 			avail.to_remove.safe_push (phi);
7195 			continue;
7196 		      }
7197 		    /* ???  Else generate a copy stmt.  */
7198 		  }
7199 	      }
7200 	  }
7201 	/* Only make defs available that not already are.  But make
7202 	   sure loop-closed SSA PHI node defs are picked up for
7203 	   downstream uses.  */
7204 	if (lc_phi_nodes
7205 	    || res == val
7206 	    || ! avail.eliminate_avail (bb, res))
7207 	  avail.eliminate_push_avail (bb, res);
7208       }
7209 
7210   /* For empty BBs mark outgoing edges executable.  For non-empty BBs
7211      we do this when processing the last stmt as we have to do this
7212      before elimination which otherwise forces GIMPLE_CONDs to
7213      if (1 != 0) style when seeing non-executable edges.  */
7214   if (gsi_end_p (gsi_start_bb (bb)))
7215     {
7216       FOR_EACH_EDGE (e, ei, bb->succs)
7217 	{
7218 	  if (!(e->flags & EDGE_EXECUTABLE))
7219 	    {
7220 	      if (dump_file && (dump_flags & TDF_DETAILS))
7221 		fprintf (dump_file,
7222 			 "marking outgoing edge %d -> %d executable\n",
7223 			 e->src->index, e->dest->index);
7224 	      e->flags |= EDGE_EXECUTABLE;
7225 	      e->dest->flags |= BB_EXECUTABLE;
7226 	    }
7227 	  else if (!(e->dest->flags & BB_EXECUTABLE))
7228 	    {
7229 	      if (dump_file && (dump_flags & TDF_DETAILS))
7230 		fprintf (dump_file,
7231 			 "marking destination block %d reachable\n",
7232 			 e->dest->index);
7233 	      e->dest->flags |= BB_EXECUTABLE;
7234 	    }
7235 	}
7236     }
7237   for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7238        !gsi_end_p (gsi); gsi_next (&gsi))
7239     {
7240       ssa_op_iter i;
7241       tree op;
7242       if (!bb_visited)
7243 	{
7244 	  FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
7245 	    {
7246 	      vn_ssa_aux_t op_info = VN_INFO (op);
7247 	      gcc_assert (!op_info->visited);
7248 	      op_info->valnum = VN_TOP;
7249 	      op_info->visited = true;
7250 	    }
7251 
7252 	  /* We somehow have to deal with uses that are not defined
7253 	     in the processed region.  Forcing unvisited uses to
7254 	     varying here doesn't play well with def-use following during
7255 	     expression simplification, so we deal with this by checking
7256 	     the visited flag in SSA_VAL.  */
7257 	}
7258 
7259       visit_stmt (gsi_stmt (gsi));
7260 
7261       gimple *last = gsi_stmt (gsi);
7262       e = NULL;
7263       switch (gimple_code (last))
7264 	{
7265 	case GIMPLE_SWITCH:
7266 	  e = find_taken_edge (bb, vn_valueize (gimple_switch_index
7267 						(as_a <gswitch *> (last))));
7268 	  break;
7269 	case GIMPLE_COND:
7270 	  {
7271 	    tree lhs = vn_valueize (gimple_cond_lhs (last));
7272 	    tree rhs = vn_valueize (gimple_cond_rhs (last));
7273 	    tree val = gimple_simplify (gimple_cond_code (last),
7274 					boolean_type_node, lhs, rhs,
7275 					NULL, vn_valueize);
7276 	    /* If the condition didn't simplfy see if we have recorded
7277 	       an expression from sofar taken edges.  */
7278 	    if (! val || TREE_CODE (val) != INTEGER_CST)
7279 	      {
7280 		vn_nary_op_t vnresult;
7281 		tree ops[2];
7282 		ops[0] = lhs;
7283 		ops[1] = rhs;
7284 		val = vn_nary_op_lookup_pieces (2, gimple_cond_code (last),
7285 						boolean_type_node, ops,
7286 						&vnresult);
7287 		/* Did we get a predicated value?  */
7288 		if (! val && vnresult && vnresult->predicated_values)
7289 		  {
7290 		    val = vn_nary_op_get_predicated_value (vnresult, bb);
7291 		    if (val && dump_file && (dump_flags & TDF_DETAILS))
7292 		      {
7293 			fprintf (dump_file, "Got predicated value ");
7294 			print_generic_expr (dump_file, val, TDF_NONE);
7295 			fprintf (dump_file, " for ");
7296 			print_gimple_stmt (dump_file, last, TDF_SLIM);
7297 		      }
7298 		  }
7299 	      }
7300 	    if (val)
7301 	      e = find_taken_edge (bb, val);
7302 	    if (! e)
7303 	      {
7304 		/* If we didn't manage to compute the taken edge then
7305 		   push predicated expressions for the condition itself
7306 		   and related conditions to the hashtables.  This allows
7307 		   simplification of redundant conditions which is
7308 		   important as early cleanup.  */
7309 		edge true_e, false_e;
7310 		extract_true_false_edges_from_block (bb, &true_e, &false_e);
7311 		enum tree_code code = gimple_cond_code (last);
7312 		enum tree_code icode
7313 		  = invert_tree_comparison (code, HONOR_NANS (lhs));
7314 		tree ops[2];
7315 		ops[0] = lhs;
7316 		ops[1] = rhs;
7317 		if (do_region
7318 		    && bitmap_bit_p (exit_bbs, true_e->dest->index))
7319 		  true_e = NULL;
7320 		if (do_region
7321 		    && bitmap_bit_p (exit_bbs, false_e->dest->index))
7322 		  false_e = NULL;
7323 		if (true_e)
7324 		  vn_nary_op_insert_pieces_predicated
7325 		    (2, code, boolean_type_node, ops,
7326 		     boolean_true_node, 0, true_e);
7327 		if (false_e)
7328 		  vn_nary_op_insert_pieces_predicated
7329 		    (2, code, boolean_type_node, ops,
7330 		     boolean_false_node, 0, false_e);
7331 		if (icode != ERROR_MARK)
7332 		  {
7333 		    if (true_e)
7334 		      vn_nary_op_insert_pieces_predicated
7335 			(2, icode, boolean_type_node, ops,
7336 			 boolean_false_node, 0, true_e);
7337 		    if (false_e)
7338 		      vn_nary_op_insert_pieces_predicated
7339 			(2, icode, boolean_type_node, ops,
7340 			 boolean_true_node, 0, false_e);
7341 		  }
7342 		/* Relax for non-integers, inverted condition handled
7343 		   above.  */
7344 		if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
7345 		  {
7346 		    if (true_e)
7347 		      insert_related_predicates_on_edge (code, ops, true_e);
7348 		    if (false_e)
7349 		      insert_related_predicates_on_edge (icode, ops, false_e);
7350 		  }
7351 	      }
7352 	    break;
7353 	  }
7354 	case GIMPLE_GOTO:
7355 	  e = find_taken_edge (bb, vn_valueize (gimple_goto_dest (last)));
7356 	  break;
7357 	default:
7358 	  e = NULL;
7359 	}
7360       if (e)
7361 	{
7362 	  todo = TODO_cleanup_cfg;
7363 	  if (!(e->flags & EDGE_EXECUTABLE))
7364 	    {
7365 	      if (dump_file && (dump_flags & TDF_DETAILS))
7366 		fprintf (dump_file,
7367 			 "marking known outgoing %sedge %d -> %d executable\n",
7368 			 e->flags & EDGE_DFS_BACK ? "back-" : "",
7369 			 e->src->index, e->dest->index);
7370 	      e->flags |= EDGE_EXECUTABLE;
7371 	      e->dest->flags |= BB_EXECUTABLE;
7372 	    }
7373 	  else if (!(e->dest->flags & BB_EXECUTABLE))
7374 	    {
7375 	      if (dump_file && (dump_flags & TDF_DETAILS))
7376 		fprintf (dump_file,
7377 			 "marking destination block %d reachable\n",
7378 			 e->dest->index);
7379 	      e->dest->flags |= BB_EXECUTABLE;
7380 	    }
7381 	}
7382       else if (gsi_one_before_end_p (gsi))
7383 	{
7384 	  FOR_EACH_EDGE (e, ei, bb->succs)
7385 	    {
7386 	      if (!(e->flags & EDGE_EXECUTABLE))
7387 		{
7388 		  if (dump_file && (dump_flags & TDF_DETAILS))
7389 		    fprintf (dump_file,
7390 			     "marking outgoing edge %d -> %d executable\n",
7391 			     e->src->index, e->dest->index);
7392 		  e->flags |= EDGE_EXECUTABLE;
7393 		  e->dest->flags |= BB_EXECUTABLE;
7394 		}
7395 	      else if (!(e->dest->flags & BB_EXECUTABLE))
7396 		{
7397 		  if (dump_file && (dump_flags & TDF_DETAILS))
7398 		    fprintf (dump_file,
7399 			     "marking destination block %d reachable\n",
7400 			     e->dest->index);
7401 		  e->dest->flags |= BB_EXECUTABLE;
7402 		}
7403 	    }
7404 	}
7405 
7406       /* Eliminate.  That also pushes to avail.  */
7407       if (eliminate && ! iterate)
7408 	avail.eliminate_stmt (bb, &gsi);
7409       else
7410 	/* If not eliminating, make all not already available defs
7411 	   available.  */
7412 	FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_DEF)
7413 	  if (! avail.eliminate_avail (bb, op))
7414 	    avail.eliminate_push_avail (bb, op);
7415     }
7416 
7417   /* Eliminate in destination PHI arguments.  Always substitute in dest
7418      PHIs, even for non-executable edges.  This handles region
7419      exits PHIs.  */
7420   if (!iterate && eliminate)
7421     FOR_EACH_EDGE (e, ei, bb->succs)
7422       for (gphi_iterator gsi = gsi_start_phis (e->dest);
7423 	   !gsi_end_p (gsi); gsi_next (&gsi))
7424 	{
7425 	  gphi *phi = gsi.phi ();
7426 	  use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
7427 	  tree arg = USE_FROM_PTR (use_p);
7428 	  if (TREE_CODE (arg) != SSA_NAME
7429 	      || virtual_operand_p (arg))
7430 	    continue;
7431 	  tree sprime;
7432 	  if (SSA_NAME_IS_DEFAULT_DEF (arg))
7433 	    {
7434 	      sprime = SSA_VAL (arg);
7435 	      gcc_assert (TREE_CODE (sprime) != SSA_NAME
7436 			  || SSA_NAME_IS_DEFAULT_DEF (sprime));
7437 	    }
7438 	  else
7439 	    /* Look for sth available at the definition block of the argument.
7440 	       This avoids inconsistencies between availability there which
7441 	       decides if the stmt can be removed and availability at the
7442 	       use site.  The SSA property ensures that things available
7443 	       at the definition are also available at uses.  */
7444 	    sprime = avail.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg)),
7445 					    arg);
7446 	  if (sprime
7447 	      && sprime != arg
7448 	      && may_propagate_copy (arg, sprime))
7449 	    propagate_value (use_p, sprime);
7450 	}
7451 
7452   vn_context_bb = NULL;
7453   return todo;
7454 }
7455 
7456 /* Unwind state per basic-block.  */
7457 
7458 struct unwind_state
7459 {
7460   /* Times this block has been visited.  */
7461   unsigned visited;
7462   /* Whether to handle this as iteration point or whether to treat
7463      incoming backedge PHI values as varying.  */
7464   bool iterate;
7465   /* Maximum RPO index this block is reachable from.  */
7466   int max_rpo;
7467   /* Unwind state.  */
7468   void *ob_top;
7469   vn_reference_t ref_top;
7470   vn_phi_t phi_top;
7471   vn_nary_op_t nary_top;
7472   vn_avail *avail_top;
7473 };
7474 
7475 /* Unwind the RPO VN state for iteration.  */
7476 
7477 static void
do_unwind(unwind_state * to,rpo_elim & avail)7478 do_unwind (unwind_state *to, rpo_elim &avail)
7479 {
7480   gcc_assert (to->iterate);
7481   for (; last_inserted_nary != to->nary_top;
7482        last_inserted_nary = last_inserted_nary->next)
7483     {
7484       vn_nary_op_t *slot;
7485       slot = valid_info->nary->find_slot_with_hash
7486 	(last_inserted_nary, last_inserted_nary->hashcode, NO_INSERT);
7487       /* Predication causes the need to restore previous state.  */
7488       if ((*slot)->unwind_to)
7489 	*slot = (*slot)->unwind_to;
7490       else
7491 	valid_info->nary->clear_slot (slot);
7492     }
7493   for (; last_inserted_phi != to->phi_top;
7494        last_inserted_phi = last_inserted_phi->next)
7495     {
7496       vn_phi_t *slot;
7497       slot = valid_info->phis->find_slot_with_hash
7498 	(last_inserted_phi, last_inserted_phi->hashcode, NO_INSERT);
7499       valid_info->phis->clear_slot (slot);
7500     }
7501   for (; last_inserted_ref != to->ref_top;
7502        last_inserted_ref = last_inserted_ref->next)
7503     {
7504       vn_reference_t *slot;
7505       slot = valid_info->references->find_slot_with_hash
7506 	(last_inserted_ref, last_inserted_ref->hashcode, NO_INSERT);
7507       (*slot)->operands.release ();
7508       valid_info->references->clear_slot (slot);
7509     }
7510   obstack_free (&vn_tables_obstack, to->ob_top);
7511 
7512   /* Prune [rpo_idx, ] from avail.  */
7513   for (; last_pushed_avail && last_pushed_avail->avail != to->avail_top;)
7514     {
7515       vn_ssa_aux_t val = last_pushed_avail;
7516       vn_avail *av = val->avail;
7517       val->avail = av->next;
7518       last_pushed_avail = av->next_undo;
7519       av->next = avail.m_avail_freelist;
7520       avail.m_avail_freelist = av;
7521     }
7522 }
7523 
7524 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
7525    If ITERATE is true then treat backedges optimistically as not
7526    executed and iterate.  If ELIMINATE is true then perform
7527    elimination, otherwise leave that to the caller.  */
7528 
7529 static unsigned
do_rpo_vn(function * fn,edge entry,bitmap exit_bbs,bool iterate,bool eliminate)7530 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
7531 	   bool iterate, bool eliminate)
7532 {
7533   unsigned todo = 0;
7534 
7535   /* We currently do not support region-based iteration when
7536      elimination is requested.  */
7537   gcc_assert (!entry || !iterate || !eliminate);
7538   /* When iterating we need loop info up-to-date.  */
7539   gcc_assert (!iterate || !loops_state_satisfies_p (LOOPS_NEED_FIXUP));
7540 
7541   bool do_region = entry != NULL;
7542   if (!do_region)
7543     {
7544       entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn));
7545       exit_bbs = BITMAP_ALLOC (NULL);
7546       bitmap_set_bit (exit_bbs, EXIT_BLOCK);
7547     }
7548 
7549   /* Clear EDGE_DFS_BACK on "all" entry edges, RPO order compute will
7550      re-mark those that are contained in the region.  */
7551   edge_iterator ei;
7552   edge e;
7553   FOR_EACH_EDGE (e, ei, entry->dest->preds)
7554     e->flags &= ~EDGE_DFS_BACK;
7555 
7556   int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS);
7557   auto_vec<std::pair<int, int> > toplevel_scc_extents;
7558   int n = rev_post_order_and_mark_dfs_back_seme
7559     (fn, entry, exit_bbs, true, rpo, !iterate ? &toplevel_scc_extents : NULL);
7560 
7561   if (!do_region)
7562     BITMAP_FREE (exit_bbs);
7563 
7564   /* If there are any non-DFS_BACK edges into entry->dest skip
7565      processing PHI nodes for that block.  This supports
7566      value-numbering loop bodies w/o the actual loop.  */
7567   FOR_EACH_EDGE (e, ei, entry->dest->preds)
7568     if (e != entry
7569 	&& !(e->flags & EDGE_DFS_BACK))
7570       break;
7571   bool skip_entry_phis = e != NULL;
7572   if (skip_entry_phis && dump_file && (dump_flags & TDF_DETAILS))
7573     fprintf (dump_file, "Region does not contain all edges into "
7574 	     "the entry block, skipping its PHIs.\n");
7575 
7576   int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
7577   for (int i = 0; i < n; ++i)
7578     bb_to_rpo[rpo[i]] = i;
7579 
7580   unwind_state *rpo_state = XNEWVEC (unwind_state, n);
7581 
7582   rpo_elim avail (entry->dest);
7583   rpo_avail = &avail;
7584 
7585   /* Verify we have no extra entries into the region.  */
7586   if (flag_checking && do_region)
7587     {
7588       auto_bb_flag bb_in_region (fn);
7589       for (int i = 0; i < n; ++i)
7590 	{
7591 	  basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7592 	  bb->flags |= bb_in_region;
7593 	}
7594       /* We can't merge the first two loops because we cannot rely
7595          on EDGE_DFS_BACK for edges not within the region.  But if
7596 	 we decide to always have the bb_in_region flag we can
7597 	 do the checking during the RPO walk itself (but then it's
7598 	 also easy to handle MEME conservatively).  */
7599       for (int i = 0; i < n; ++i)
7600 	{
7601 	  basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7602 	  edge e;
7603 	  edge_iterator ei;
7604 	  FOR_EACH_EDGE (e, ei, bb->preds)
7605 	    gcc_assert (e == entry
7606 			|| (skip_entry_phis && bb == entry->dest)
7607 			|| (e->src->flags & bb_in_region));
7608 	}
7609       for (int i = 0; i < n; ++i)
7610 	{
7611 	  basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7612 	  bb->flags &= ~bb_in_region;
7613 	}
7614     }
7615 
7616   /* Create the VN state.  For the initial size of the various hashtables
7617      use a heuristic based on region size and number of SSA names.  */
7618   unsigned region_size = (((unsigned HOST_WIDE_INT)n * num_ssa_names)
7619 			  / (n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS));
7620   VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
7621   next_value_id = 1;
7622   next_constant_value_id = -1;
7623 
7624   vn_ssa_aux_hash = new hash_table <vn_ssa_aux_hasher> (region_size * 2);
7625   gcc_obstack_init (&vn_ssa_aux_obstack);
7626 
7627   gcc_obstack_init (&vn_tables_obstack);
7628   gcc_obstack_init (&vn_tables_insert_obstack);
7629   valid_info = XCNEW (struct vn_tables_s);
7630   allocate_vn_table (valid_info, region_size);
7631   last_inserted_ref = NULL;
7632   last_inserted_phi = NULL;
7633   last_inserted_nary = NULL;
7634   last_pushed_avail = NULL;
7635 
7636   vn_valueize = rpo_vn_valueize;
7637 
7638   /* Initialize the unwind state and edge/BB executable state.  */
7639   unsigned curr_scc = 0;
7640   for (int i = 0; i < n; ++i)
7641     {
7642       basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7643       rpo_state[i].visited = 0;
7644       rpo_state[i].max_rpo = i;
7645       if (!iterate && curr_scc < toplevel_scc_extents.length ())
7646 	{
7647 	  if (i >= toplevel_scc_extents[curr_scc].first
7648 	      && i <= toplevel_scc_extents[curr_scc].second)
7649 	    rpo_state[i].max_rpo = toplevel_scc_extents[curr_scc].second;
7650 	  if (i == toplevel_scc_extents[curr_scc].second)
7651 	    curr_scc++;
7652 	}
7653       bb->flags &= ~BB_EXECUTABLE;
7654       bool has_backedges = false;
7655       edge e;
7656       edge_iterator ei;
7657       FOR_EACH_EDGE (e, ei, bb->preds)
7658 	{
7659 	  if (e->flags & EDGE_DFS_BACK)
7660 	    has_backedges = true;
7661 	  e->flags &= ~EDGE_EXECUTABLE;
7662 	  if (iterate || e == entry || (skip_entry_phis && bb == entry->dest))
7663 	    continue;
7664 	}
7665       rpo_state[i].iterate = iterate && has_backedges;
7666     }
7667   entry->flags |= EDGE_EXECUTABLE;
7668   entry->dest->flags |= BB_EXECUTABLE;
7669 
7670   /* As heuristic to improve compile-time we handle only the N innermost
7671      loops and the outermost one optimistically.  */
7672   if (iterate)
7673     {
7674       loop_p loop;
7675       unsigned max_depth = param_rpo_vn_max_loop_depth;
7676       FOR_EACH_LOOP (loop, LI_ONLY_INNERMOST)
7677 	if (loop_depth (loop) > max_depth)
7678 	  for (unsigned i = 2;
7679 	       i < loop_depth (loop) - max_depth; ++i)
7680 	    {
7681 	      basic_block header = superloop_at_depth (loop, i)->header;
7682 	      bool non_latch_backedge = false;
7683 	      edge e;
7684 	      edge_iterator ei;
7685 	      FOR_EACH_EDGE (e, ei, header->preds)
7686 		if (e->flags & EDGE_DFS_BACK)
7687 		  {
7688 		    /* There can be a non-latch backedge into the header
7689 		       which is part of an outer irreducible region.  We
7690 		       cannot avoid iterating this block then.  */
7691 		    if (!dominated_by_p (CDI_DOMINATORS,
7692 					 e->src, e->dest))
7693 		      {
7694 			if (dump_file && (dump_flags & TDF_DETAILS))
7695 			  fprintf (dump_file, "non-latch backedge %d -> %d "
7696 				   "forces iteration of loop %d\n",
7697 				   e->src->index, e->dest->index, loop->num);
7698 			non_latch_backedge = true;
7699 		      }
7700 		    else
7701 		      e->flags |= EDGE_EXECUTABLE;
7702 		  }
7703 	      rpo_state[bb_to_rpo[header->index]].iterate = non_latch_backedge;
7704 	    }
7705     }
7706 
7707   uint64_t nblk = 0;
7708   int idx = 0;
7709   if (iterate)
7710     /* Go and process all blocks, iterating as necessary.  */
7711     do
7712       {
7713 	basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
7714 
7715 	/* If the block has incoming backedges remember unwind state.  This
7716 	   is required even for non-executable blocks since in irreducible
7717 	   regions we might reach them via the backedge and re-start iterating
7718 	   from there.
7719 	   Note we can individually mark blocks with incoming backedges to
7720 	   not iterate where we then handle PHIs conservatively.  We do that
7721 	   heuristically to reduce compile-time for degenerate cases.  */
7722 	if (rpo_state[idx].iterate)
7723 	  {
7724 	    rpo_state[idx].ob_top = obstack_alloc (&vn_tables_obstack, 0);
7725 	    rpo_state[idx].ref_top = last_inserted_ref;
7726 	    rpo_state[idx].phi_top = last_inserted_phi;
7727 	    rpo_state[idx].nary_top = last_inserted_nary;
7728 	    rpo_state[idx].avail_top
7729 	      = last_pushed_avail ? last_pushed_avail->avail : NULL;
7730 	  }
7731 
7732 	if (!(bb->flags & BB_EXECUTABLE))
7733 	  {
7734 	    if (dump_file && (dump_flags & TDF_DETAILS))
7735 	      fprintf (dump_file, "Block %d: BB%d found not executable\n",
7736 		       idx, bb->index);
7737 	    idx++;
7738 	    continue;
7739 	  }
7740 
7741 	if (dump_file && (dump_flags & TDF_DETAILS))
7742 	  fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
7743 	nblk++;
7744 	todo |= process_bb (avail, bb,
7745 			    rpo_state[idx].visited != 0,
7746 			    rpo_state[idx].iterate,
7747 			    iterate, eliminate, do_region, exit_bbs, false);
7748 	rpo_state[idx].visited++;
7749 
7750 	/* Verify if changed values flow over executable outgoing backedges
7751 	   and those change destination PHI values (that's the thing we
7752 	   can easily verify).  Reduce over all such edges to the farthest
7753 	   away PHI.  */
7754 	int iterate_to = -1;
7755 	edge_iterator ei;
7756 	edge e;
7757 	FOR_EACH_EDGE (e, ei, bb->succs)
7758 	  if ((e->flags & (EDGE_DFS_BACK|EDGE_EXECUTABLE))
7759 	      == (EDGE_DFS_BACK|EDGE_EXECUTABLE)
7760 	      && rpo_state[bb_to_rpo[e->dest->index]].iterate)
7761 	    {
7762 	      int destidx = bb_to_rpo[e->dest->index];
7763 	      if (!rpo_state[destidx].visited)
7764 		{
7765 		  if (dump_file && (dump_flags & TDF_DETAILS))
7766 		    fprintf (dump_file, "Unvisited destination %d\n",
7767 			     e->dest->index);
7768 		  if (iterate_to == -1 || destidx < iterate_to)
7769 		    iterate_to = destidx;
7770 		  continue;
7771 		}
7772 	      if (dump_file && (dump_flags & TDF_DETAILS))
7773 		fprintf (dump_file, "Looking for changed values of backedge"
7774 			 " %d->%d destination PHIs\n",
7775 			 e->src->index, e->dest->index);
7776 	      vn_context_bb = e->dest;
7777 	      gphi_iterator gsi;
7778 	      for (gsi = gsi_start_phis (e->dest);
7779 		   !gsi_end_p (gsi); gsi_next (&gsi))
7780 		{
7781 		  bool inserted = false;
7782 		  /* While we'd ideally just iterate on value changes
7783 		     we CSE PHIs and do that even across basic-block
7784 		     boundaries.  So even hashtable state changes can
7785 		     be important (which is roughly equivalent to
7786 		     PHI argument value changes).  To not excessively
7787 		     iterate because of that we track whether a PHI
7788 		     was CSEd to with GF_PLF_1.  */
7789 		  bool phival_changed;
7790 		  if ((phival_changed = visit_phi (gsi.phi (),
7791 						   &inserted, false))
7792 		      || (inserted && gimple_plf (gsi.phi (), GF_PLF_1)))
7793 		    {
7794 		      if (!phival_changed
7795 			  && dump_file && (dump_flags & TDF_DETAILS))
7796 			fprintf (dump_file, "PHI was CSEd and hashtable "
7797 				 "state (changed)\n");
7798 		      if (iterate_to == -1 || destidx < iterate_to)
7799 			iterate_to = destidx;
7800 		      break;
7801 		    }
7802 		}
7803 	      vn_context_bb = NULL;
7804 	    }
7805 	if (iterate_to != -1)
7806 	  {
7807 	    do_unwind (&rpo_state[iterate_to], avail);
7808 	    idx = iterate_to;
7809 	    if (dump_file && (dump_flags & TDF_DETAILS))
7810 	      fprintf (dump_file, "Iterating to %d BB%d\n",
7811 		       iterate_to, rpo[iterate_to]);
7812 	    continue;
7813 	  }
7814 
7815 	idx++;
7816       }
7817     while (idx < n);
7818 
7819   else /* !iterate */
7820     {
7821       /* Process all blocks greedily with a worklist that enforces RPO
7822          processing of reachable blocks.  */
7823       auto_bitmap worklist;
7824       bitmap_set_bit (worklist, 0);
7825       while (!bitmap_empty_p (worklist))
7826 	{
7827 	  int idx = bitmap_first_set_bit (worklist);
7828 	  bitmap_clear_bit (worklist, idx);
7829 	  basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
7830 	  gcc_assert ((bb->flags & BB_EXECUTABLE)
7831 		      && !rpo_state[idx].visited);
7832 
7833 	  if (dump_file && (dump_flags & TDF_DETAILS))
7834 	    fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
7835 
7836 	  /* When we run into predecessor edges where we cannot trust its
7837 	     executable state mark them executable so PHI processing will
7838 	     be conservative.
7839 	     ???  Do we need to force arguments flowing over that edge
7840 	     to be varying or will they even always be?  */
7841 	  edge_iterator ei;
7842 	  edge e;
7843 	  FOR_EACH_EDGE (e, ei, bb->preds)
7844 	    if (!(e->flags & EDGE_EXECUTABLE)
7845 		&& (bb == entry->dest
7846 		    || (!rpo_state[bb_to_rpo[e->src->index]].visited
7847 			&& (rpo_state[bb_to_rpo[e->src->index]].max_rpo
7848 			    >= (int)idx))))
7849 	      {
7850 		if (dump_file && (dump_flags & TDF_DETAILS))
7851 		  fprintf (dump_file, "Cannot trust state of predecessor "
7852 			   "edge %d -> %d, marking executable\n",
7853 			   e->src->index, e->dest->index);
7854 		e->flags |= EDGE_EXECUTABLE;
7855 	      }
7856 
7857 	  nblk++;
7858 	  todo |= process_bb (avail, bb, false, false, false, eliminate,
7859 			      do_region, exit_bbs,
7860 			      skip_entry_phis && bb == entry->dest);
7861 	  rpo_state[idx].visited++;
7862 
7863 	  FOR_EACH_EDGE (e, ei, bb->succs)
7864 	    if ((e->flags & EDGE_EXECUTABLE)
7865 		&& e->dest->index != EXIT_BLOCK
7866 		&& (!do_region || !bitmap_bit_p (exit_bbs, e->dest->index))
7867 		&& !rpo_state[bb_to_rpo[e->dest->index]].visited)
7868 	      bitmap_set_bit (worklist, bb_to_rpo[e->dest->index]);
7869 	}
7870     }
7871 
7872   /* If statistics or dump file active.  */
7873   int nex = 0;
7874   unsigned max_visited = 1;
7875   for (int i = 0; i < n; ++i)
7876     {
7877       basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7878       if (bb->flags & BB_EXECUTABLE)
7879 	nex++;
7880       statistics_histogram_event (cfun, "RPO block visited times",
7881 				  rpo_state[i].visited);
7882       if (rpo_state[i].visited > max_visited)
7883 	max_visited = rpo_state[i].visited;
7884     }
7885   unsigned nvalues = 0, navail = 0;
7886   for (hash_table<vn_ssa_aux_hasher>::iterator i = vn_ssa_aux_hash->begin ();
7887        i != vn_ssa_aux_hash->end (); ++i)
7888     {
7889       nvalues++;
7890       vn_avail *av = (*i)->avail;
7891       while (av)
7892 	{
7893 	  navail++;
7894 	  av = av->next;
7895 	}
7896     }
7897   statistics_counter_event (cfun, "RPO blocks", n);
7898   statistics_counter_event (cfun, "RPO blocks visited", nblk);
7899   statistics_counter_event (cfun, "RPO blocks executable", nex);
7900   statistics_histogram_event (cfun, "RPO iterations", 10*nblk / nex);
7901   statistics_histogram_event (cfun, "RPO num values", nvalues);
7902   statistics_histogram_event (cfun, "RPO num avail", navail);
7903   statistics_histogram_event (cfun, "RPO num lattice",
7904 			      vn_ssa_aux_hash->elements ());
7905   if (dump_file && (dump_flags & (TDF_DETAILS|TDF_STATS)))
7906     {
7907       fprintf (dump_file, "RPO iteration over %d blocks visited %" PRIu64
7908 	       " blocks in total discovering %d executable blocks iterating "
7909 	       "%d.%d times, a block was visited max. %u times\n",
7910 	       n, nblk, nex,
7911 	       (int)((10*nblk / nex)/10), (int)((10*nblk / nex)%10),
7912 	       max_visited);
7913       fprintf (dump_file, "RPO tracked %d values available at %d locations "
7914 	       "and %" PRIu64 " lattice elements\n",
7915 	       nvalues, navail, (uint64_t) vn_ssa_aux_hash->elements ());
7916     }
7917 
7918   if (eliminate)
7919     {
7920       /* When !iterate we already performed elimination during the RPO
7921          walk.  */
7922       if (iterate)
7923 	{
7924 	  /* Elimination for region-based VN needs to be done within the
7925 	     RPO walk.  */
7926 	  gcc_assert (! do_region);
7927 	  /* Note we can't use avail.walk here because that gets confused
7928 	     by the existing availability and it will be less efficient
7929 	     as well.  */
7930 	  todo |= eliminate_with_rpo_vn (NULL);
7931 	}
7932       else
7933 	todo |= avail.eliminate_cleanup (do_region);
7934     }
7935 
7936   vn_valueize = NULL;
7937   rpo_avail = NULL;
7938 
7939   XDELETEVEC (bb_to_rpo);
7940   XDELETEVEC (rpo);
7941   XDELETEVEC (rpo_state);
7942 
7943   return todo;
7944 }
7945 
7946 /* Region-based entry for RPO VN.  Performs value-numbering and elimination
7947    on the SEME region specified by ENTRY and EXIT_BBS.  If ENTRY is not
7948    the only edge into the region at ENTRY->dest PHI nodes in ENTRY->dest
7949    are not considered.  */
7950 
7951 unsigned
do_rpo_vn(function * fn,edge entry,bitmap exit_bbs)7952 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs)
7953 {
7954   default_vn_walk_kind = VN_WALKREWRITE;
7955   unsigned todo = do_rpo_vn (fn, entry, exit_bbs, false, true);
7956   free_rpo_vn ();
7957   return todo;
7958 }
7959 
7960 
7961 namespace {
7962 
7963 const pass_data pass_data_fre =
7964 {
7965   GIMPLE_PASS, /* type */
7966   "fre", /* name */
7967   OPTGROUP_NONE, /* optinfo_flags */
7968   TV_TREE_FRE, /* tv_id */
7969   ( PROP_cfg | PROP_ssa ), /* properties_required */
7970   0, /* properties_provided */
7971   0, /* properties_destroyed */
7972   0, /* todo_flags_start */
7973   0, /* todo_flags_finish */
7974 };
7975 
7976 class pass_fre : public gimple_opt_pass
7977 {
7978 public:
pass_fre(gcc::context * ctxt)7979   pass_fre (gcc::context *ctxt)
7980     : gimple_opt_pass (pass_data_fre, ctxt), may_iterate (true)
7981   {}
7982 
7983   /* opt_pass methods: */
clone()7984   opt_pass * clone () { return new pass_fre (m_ctxt); }
set_pass_param(unsigned int n,bool param)7985   void set_pass_param (unsigned int n, bool param)
7986     {
7987       gcc_assert (n == 0);
7988       may_iterate = param;
7989     }
gate(function *)7990   virtual bool gate (function *)
7991     {
7992       return flag_tree_fre != 0 && (may_iterate || optimize > 1);
7993     }
7994   virtual unsigned int execute (function *);
7995 
7996 private:
7997   bool may_iterate;
7998 }; // class pass_fre
7999 
8000 unsigned int
execute(function * fun)8001 pass_fre::execute (function *fun)
8002 {
8003   unsigned todo = 0;
8004 
8005   /* At -O[1g] use the cheap non-iterating mode.  */
8006   bool iterate_p = may_iterate && (optimize > 1);
8007   calculate_dominance_info (CDI_DOMINATORS);
8008   if (iterate_p)
8009     loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
8010 
8011   default_vn_walk_kind = VN_WALKREWRITE;
8012   todo = do_rpo_vn (fun, NULL, NULL, iterate_p, true);
8013   free_rpo_vn ();
8014 
8015   if (iterate_p)
8016     loop_optimizer_finalize ();
8017 
8018   if (scev_initialized_p ())
8019     scev_reset_htab ();
8020 
8021   /* For late FRE after IVOPTs and unrolling, see if we can
8022      remove some TREE_ADDRESSABLE and rewrite stuff into SSA.  */
8023   if (!may_iterate)
8024     todo |= TODO_update_address_taken;
8025 
8026   return todo;
8027 }
8028 
8029 } // anon namespace
8030 
8031 gimple_opt_pass *
make_pass_fre(gcc::context * ctxt)8032 make_pass_fre (gcc::context *ctxt)
8033 {
8034   return new pass_fre (ctxt);
8035 }
8036 
8037 #undef BB_EXECUTABLE
8038