1 /* Header file for SSA dominator optimizations.
2    Copyright (C) 2013-2021 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14  for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "function.h"
24 #include "basic-block.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "tree-pretty-print.h"
29 #include "tree-ssa-scopedtables.h"
30 #include "tree-ssa-threadedge.h"
31 #include "stor-layout.h"
32 #include "fold-const.h"
33 #include "tree-eh.h"
34 #include "internal-fn.h"
35 #include "tree-dfa.h"
36 #include "options.h"
37 
38 static bool hashable_expr_equal_p (const struct hashable_expr *,
39 				   const struct hashable_expr *);
40 
41 /* Initialize local stacks for this optimizer and record equivalences
42    upon entry to BB.  Equivalences can come from the edge traversed to
43    reach BB or they may come from PHI nodes at the start of BB.  */
44 
45 /* Pop items off the unwinding stack, removing each from the hash table
46    until a marker is encountered.  */
47 
48 void
pop_to_marker()49 avail_exprs_stack::pop_to_marker ()
50 {
51   /* Remove all the expressions made available in this block.  */
52   while (m_stack.length () > 0)
53     {
54       std::pair<expr_hash_elt_t, expr_hash_elt_t> victim = m_stack.pop ();
55       expr_hash_elt **slot;
56 
57       if (victim.first == NULL)
58 	break;
59 
60       /* This must precede the actual removal from the hash table,
61          as ELEMENT and the table entry may share a call argument
62          vector which will be freed during removal.  */
63       if (dump_file && (dump_flags & TDF_DETAILS))
64         {
65           fprintf (dump_file, "<<<< ");
66 	  victim.first->print (dump_file);
67         }
68 
69       slot = m_avail_exprs->find_slot (victim.first, NO_INSERT);
70       gcc_assert (slot && *slot == victim.first);
71       if (victim.second != NULL)
72 	{
73 	  delete *slot;
74 	  *slot = victim.second;
75 	}
76       else
77 	m_avail_exprs->clear_slot (slot);
78     }
79 }
80 
81 /* Add <ELT1,ELT2> to the unwinding stack so they can be later removed
82    from the hash table.  */
83 
84 void
record_expr(class expr_hash_elt * elt1,class expr_hash_elt * elt2,char type)85 avail_exprs_stack::record_expr (class expr_hash_elt *elt1,
86 				class expr_hash_elt *elt2,
87 				char type)
88 {
89   if (elt1 && dump_file && (dump_flags & TDF_DETAILS))
90     {
91       fprintf (dump_file, "%c>>> ", type);
92       elt1->print (dump_file);
93     }
94 
95   m_stack.safe_push (std::pair<expr_hash_elt_t, expr_hash_elt_t> (elt1, elt2));
96 }
97 
98 /* Helper for walk_non_aliased_vuses.  Determine if we arrived at
99    the desired memory state.  */
100 
101 static void *
vuse_eq(ao_ref *,tree vuse1,void * data)102 vuse_eq (ao_ref *, tree vuse1, void *data)
103 {
104   tree vuse2 = (tree) data;
105   if (vuse1 == vuse2)
106     return data;
107 
108   return NULL;
109 }
110 
111 /* We looked for STMT in the hash table, but did not find it.
112 
113    If STMT is an assignment from a binary operator, we may know something
114    about the operands relationship to each other which would allow
115    us to derive a constant value for the RHS of STMT.  */
116 
117 tree
simplify_binary_operation(gimple * stmt,class expr_hash_elt element)118 avail_exprs_stack::simplify_binary_operation (gimple *stmt,
119 					      class expr_hash_elt element)
120 {
121   if (is_gimple_assign (stmt))
122     {
123       struct hashable_expr *expr = element.expr ();
124       if (expr->kind == EXPR_BINARY)
125 	{
126 	  enum tree_code code = expr->ops.binary.op;
127 
128 	  switch (code)
129 	    {
130 	    /* For these cases, if we know the operands
131 	       are equal, then we know the result.  */
132 	    case MIN_EXPR:
133 	    case MAX_EXPR:
134 	    case BIT_IOR_EXPR:
135 	    case BIT_AND_EXPR:
136 	    case BIT_XOR_EXPR:
137 	    case MINUS_EXPR:
138 	    case TRUNC_DIV_EXPR:
139 	    case CEIL_DIV_EXPR:
140 	    case FLOOR_DIV_EXPR:
141 	    case ROUND_DIV_EXPR:
142 	    case EXACT_DIV_EXPR:
143 	    case TRUNC_MOD_EXPR:
144 	    case CEIL_MOD_EXPR:
145 	    case FLOOR_MOD_EXPR:
146 	    case ROUND_MOD_EXPR:
147 	      {
148 		/* Build a simple equality expr and query the hash table
149 		   for it.  */
150 		struct hashable_expr expr;
151 		expr.type = boolean_type_node;
152 		expr.kind = EXPR_BINARY;
153 		expr.ops.binary.op = EQ_EXPR;
154 		expr.ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
155 		expr.ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
156 		class expr_hash_elt element2 (&expr, NULL_TREE);
157 		expr_hash_elt **slot
158 		  = m_avail_exprs->find_slot (&element2, NO_INSERT);
159 		tree result_type = TREE_TYPE (gimple_assign_lhs (stmt));
160 
161 		/* If the query was successful and returned a nonzero
162 		   result, then we know that the operands of the binary
163 		   expression are the same.  In many cases this allows
164 		   us to compute a constant result of the expression
165 		   at compile time, even if we do not know the exact
166 		   values of the operands.  */
167 		if (slot && *slot && integer_onep ((*slot)->lhs ()))
168 		  {
169 		    switch (code)
170 		      {
171 		      case MIN_EXPR:
172 		      case MAX_EXPR:
173 		      case BIT_IOR_EXPR:
174 		      case BIT_AND_EXPR:
175 			return gimple_assign_rhs1 (stmt);
176 
177 		      case MINUS_EXPR:
178 			/* This is unsafe for certain floats even in non-IEEE
179 			   formats.  In IEEE, it is unsafe because it does
180 			   wrong for NaNs.  */
181 			if (FLOAT_TYPE_P (result_type)
182 			    && HONOR_NANS (result_type))
183 			  break;
184 			/* FALLTHRU */
185 		      case BIT_XOR_EXPR:
186 		      case TRUNC_MOD_EXPR:
187 		      case CEIL_MOD_EXPR:
188 		      case FLOOR_MOD_EXPR:
189 		      case ROUND_MOD_EXPR:
190 			return build_zero_cst (result_type);
191 
192 		      case TRUNC_DIV_EXPR:
193 		      case CEIL_DIV_EXPR:
194 		      case FLOOR_DIV_EXPR:
195 		      case ROUND_DIV_EXPR:
196 		      case EXACT_DIV_EXPR:
197 			/* Avoid _Fract types where we can't build 1.  */
198 			if (ALL_FRACT_MODE_P (TYPE_MODE (result_type)))
199 			  break;
200 			return build_one_cst (result_type);
201 
202 		      default:
203 			gcc_unreachable ();
204 		      }
205 		  }
206 		break;
207 	      }
208 
209 	    default:
210 	      break;
211 	    }
212 	}
213     }
214   return NULL_TREE;
215 }
216 
217 /* Search for an existing instance of STMT in the AVAIL_EXPRS_STACK table.
218    If found, return its LHS. Otherwise insert STMT in the table and
219    return NULL_TREE.
220 
221    Also, when an expression is first inserted in the  table, it is also
222    is also added to AVAIL_EXPRS_STACK, so that it can be removed when
223    we finish processing this block and its children.  */
224 
225 tree
lookup_avail_expr(gimple * stmt,bool insert,bool tbaa_p,expr_hash_elt ** elt)226 avail_exprs_stack::lookup_avail_expr (gimple *stmt, bool insert, bool tbaa_p,
227 				      expr_hash_elt **elt)
228 {
229   expr_hash_elt **slot;
230   tree lhs;
231 
232   /* Get LHS of phi, assignment, or call; else NULL_TREE.  */
233   if (gimple_code (stmt) == GIMPLE_PHI)
234     lhs = gimple_phi_result (stmt);
235   else
236     lhs = gimple_get_lhs (stmt);
237 
238   class expr_hash_elt element (stmt, lhs);
239 
240   if (dump_file && (dump_flags & TDF_DETAILS))
241     {
242       fprintf (dump_file, "LKUP ");
243       element.print (dump_file);
244     }
245 
246   /* Don't bother remembering constant assignments and copy operations.
247      Constants and copy operations are handled by the constant/copy propagator
248      in optimize_stmt.  */
249   if (element.expr()->kind == EXPR_SINGLE
250       && (TREE_CODE (element.expr()->ops.single.rhs) == SSA_NAME
251           || is_gimple_min_invariant (element.expr()->ops.single.rhs)))
252     return NULL_TREE;
253 
254   /* Finally try to find the expression in the main expression hash table.  */
255   slot = m_avail_exprs->find_slot (&element, (insert ? INSERT : NO_INSERT));
256   if (slot == NULL)
257     {
258       return NULL_TREE;
259     }
260   else if (*slot == NULL)
261     {
262       /* If we did not find the expression in the hash table, we may still
263 	 be able to produce a result for some expressions.  */
264       tree retval = avail_exprs_stack::simplify_binary_operation (stmt,
265 								  element);
266 
267       /* We have, in effect, allocated *SLOT for ELEMENT at this point.
268 	 We must initialize *SLOT to a real entry, even if we found a
269 	 way to prove ELEMENT was a constant after not finding ELEMENT
270 	 in the hash table.
271 
272 	 An uninitialized or empty slot is an indication no prior objects
273 	 entered into the hash table had a hash collection with ELEMENT.
274 
275 	 If we fail to do so and had such entries in the table, they
276 	 would become unreachable.  */
277       class expr_hash_elt *element2 = new expr_hash_elt (element);
278       *slot = element2;
279 
280       record_expr (element2, NULL, '2');
281       return retval;
282     }
283 
284   /* If we found a redundant memory operation do an alias walk to
285      check if we can re-use it.  */
286   if (gimple_vuse (stmt) != (*slot)->vop ())
287     {
288       tree vuse1 = (*slot)->vop ();
289       tree vuse2 = gimple_vuse (stmt);
290       /* If we have a load of a register and a candidate in the
291 	 hash with vuse1 then try to reach its stmt by walking
292 	 up the virtual use-def chain using walk_non_aliased_vuses.
293 	 But don't do this when removing expressions from the hash.  */
294       ao_ref ref;
295       unsigned limit = param_sccvn_max_alias_queries_per_access;
296       if (!(vuse1 && vuse2
297 	    && gimple_assign_single_p (stmt)
298 	    && TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
299 	    && (ao_ref_init (&ref, gimple_assign_rhs1 (stmt)),
300 		ref.base_alias_set = ref.ref_alias_set = tbaa_p ? -1 : 0, true)
301 	    && walk_non_aliased_vuses (&ref, vuse2, true, vuse_eq, NULL, NULL,
302 				       limit, vuse1) != NULL))
303 	{
304 	  if (insert)
305 	    {
306 	      class expr_hash_elt *element2 = new expr_hash_elt (element);
307 
308 	      /* Insert the expr into the hash by replacing the current
309 		 entry and recording the value to restore in the
310 		 avail_exprs_stack.  */
311 	      record_expr (element2, *slot, '2');
312 	      *slot = element2;
313 	    }
314 	  return NULL_TREE;
315 	}
316     }
317 
318   /* Extract the LHS of the assignment so that it can be used as the current
319      definition of another variable.  */
320   lhs = (*slot)->lhs ();
321   if (elt)
322     *elt = *slot;
323 
324   /* Valueize the result.  */
325   if (TREE_CODE (lhs) == SSA_NAME)
326     {
327       tree tem = SSA_NAME_VALUE (lhs);
328       if (tem)
329 	lhs = tem;
330     }
331 
332   if (dump_file && (dump_flags & TDF_DETAILS))
333     {
334       fprintf (dump_file, "FIND: ");
335       print_generic_expr (dump_file, lhs);
336       fprintf (dump_file, "\n");
337     }
338 
339   return lhs;
340 }
341 
342 /* Enter condition equivalence P into the hash table.
343 
344    This indicates that a conditional expression has a known
345    boolean value.  */
346 
347 void
record_cond(cond_equivalence * p)348 avail_exprs_stack::record_cond (cond_equivalence *p)
349 {
350   class expr_hash_elt *element = new expr_hash_elt (&p->cond, p->value);
351   expr_hash_elt **slot;
352 
353   slot = m_avail_exprs->find_slot_with_hash (element, element->hash (), INSERT);
354   if (*slot == NULL)
355     {
356       *slot = element;
357       record_expr (element, NULL, '1');
358     }
359   else
360     delete element;
361 }
362 
363 /* Generate a hash value for a pair of expressions.  This can be used
364    iteratively by passing a previous result in HSTATE.
365 
366    The same hash value is always returned for a given pair of expressions,
367    regardless of the order in which they are presented.  This is useful in
368    hashing the operands of commutative functions.  */
369 
370 namespace inchash
371 {
372 
373 static void
add_expr_commutative(const_tree t1,const_tree t2,hash & hstate)374 add_expr_commutative (const_tree t1, const_tree t2, hash &hstate)
375 {
376   hash one, two;
377 
378   inchash::add_expr (t1, one);
379   inchash::add_expr (t2, two);
380   hstate.add_commutative (one, two);
381 }
382 
383 /* Compute a hash value for a hashable_expr value EXPR and a
384    previously accumulated hash value VAL.  If two hashable_expr
385    values compare equal with hashable_expr_equal_p, they must
386    hash to the same value, given an identical value of VAL.
387    The logic is intended to follow inchash::add_expr in tree.c.  */
388 
389 static void
add_hashable_expr(const struct hashable_expr * expr,hash & hstate)390 add_hashable_expr (const struct hashable_expr *expr, hash &hstate)
391 {
392   switch (expr->kind)
393     {
394     case EXPR_SINGLE:
395       inchash::add_expr (expr->ops.single.rhs, hstate);
396       break;
397 
398     case EXPR_UNARY:
399       hstate.add_object (expr->ops.unary.op);
400 
401       /* Make sure to include signedness in the hash computation.
402          Don't hash the type, that can lead to having nodes which
403          compare equal according to operand_equal_p, but which
404          have different hash codes.  */
405       if (CONVERT_EXPR_CODE_P (expr->ops.unary.op)
406           || expr->ops.unary.op == NON_LVALUE_EXPR)
407         hstate.add_int (TYPE_UNSIGNED (expr->type));
408 
409       inchash::add_expr (expr->ops.unary.opnd, hstate);
410       break;
411 
412     case EXPR_BINARY:
413       hstate.add_object (expr->ops.binary.op);
414       if (commutative_tree_code (expr->ops.binary.op))
415 	inchash::add_expr_commutative (expr->ops.binary.opnd0,
416 					  expr->ops.binary.opnd1, hstate);
417       else
418         {
419           inchash::add_expr (expr->ops.binary.opnd0, hstate);
420           inchash::add_expr (expr->ops.binary.opnd1, hstate);
421         }
422       break;
423 
424     case EXPR_TERNARY:
425       hstate.add_object (expr->ops.ternary.op);
426       if (commutative_ternary_tree_code (expr->ops.ternary.op))
427 	inchash::add_expr_commutative (expr->ops.ternary.opnd0,
428 					  expr->ops.ternary.opnd1, hstate);
429       else
430         {
431           inchash::add_expr (expr->ops.ternary.opnd0, hstate);
432           inchash::add_expr (expr->ops.ternary.opnd1, hstate);
433         }
434       inchash::add_expr (expr->ops.ternary.opnd2, hstate);
435       break;
436 
437     case EXPR_CALL:
438       {
439         size_t i;
440         enum tree_code code = CALL_EXPR;
441         gcall *fn_from;
442 
443         hstate.add_object (code);
444         fn_from = expr->ops.call.fn_from;
445         if (gimple_call_internal_p (fn_from))
446           hstate.merge_hash ((hashval_t) gimple_call_internal_fn (fn_from));
447         else
448           inchash::add_expr (gimple_call_fn (fn_from), hstate);
449         for (i = 0; i < expr->ops.call.nargs; i++)
450           inchash::add_expr (expr->ops.call.args[i], hstate);
451       }
452       break;
453 
454     case EXPR_PHI:
455       {
456         size_t i;
457 
458         for (i = 0; i < expr->ops.phi.nargs; i++)
459           inchash::add_expr (expr->ops.phi.args[i], hstate);
460       }
461       break;
462 
463     default:
464       gcc_unreachable ();
465     }
466 }
467 
468 }
469 
470 /* Hashing and equality functions.  We compute a value number for expressions
471    using the code of the expression and the SSA numbers of its operands.  */
472 
473 static hashval_t
avail_expr_hash(class expr_hash_elt * p)474 avail_expr_hash (class expr_hash_elt *p)
475 {
476   const struct hashable_expr *expr = p->expr ();
477   inchash::hash hstate;
478 
479   if (expr->kind == EXPR_SINGLE)
480     {
481       /* T could potentially be a switch index or a goto dest.  */
482       tree t = expr->ops.single.rhs;
483       if (TREE_CODE (t) == MEM_REF || handled_component_p (t))
484 	{
485 	  /* Make equivalent statements of both these kinds hash together.
486 	     Dealing with both MEM_REF and ARRAY_REF allows us not to care
487 	     about equivalence with other statements not considered here.  */
488 	  bool reverse;
489 	  poly_int64 offset, size, max_size;
490 	  tree base = get_ref_base_and_extent (t, &offset, &size, &max_size,
491 					       &reverse);
492 	  /* Strictly, we could try to normalize variable-sized accesses too,
493 	    but here we just deal with the common case.  */
494 	  if (known_size_p (max_size)
495 	      && known_eq (size, max_size))
496 	    {
497 	      enum tree_code code = MEM_REF;
498 	      hstate.add_object (code);
499 	      inchash::add_expr (base, hstate,
500 				 TREE_CODE (base) == MEM_REF
501 				 ? OEP_ADDRESS_OF : 0);
502 	      hstate.add_object (offset);
503 	      hstate.add_object (size);
504 	      return hstate.end ();
505 	    }
506 	}
507     }
508 
509   inchash::add_hashable_expr (expr, hstate);
510 
511   return hstate.end ();
512 }
513 
514 /* Compares trees T0 and T1 to see if they are MEM_REF or ARRAY_REFs equivalent
515    to each other.  (That is, they return the value of the same bit of memory.)
516 
517    Return TRUE if the two are so equivalent; FALSE if not (which could still
518    mean the two are equivalent by other means).  */
519 
520 static bool
equal_mem_array_ref_p(tree t0,tree t1)521 equal_mem_array_ref_p (tree t0, tree t1)
522 {
523   if (TREE_CODE (t0) != MEM_REF && ! handled_component_p (t0))
524     return false;
525   if (TREE_CODE (t1) != MEM_REF && ! handled_component_p (t1))
526     return false;
527 
528   if (!types_compatible_p (TREE_TYPE (t0), TREE_TYPE (t1)))
529     return false;
530   bool rev0;
531   poly_int64 off0, sz0, max0;
532   tree base0 = get_ref_base_and_extent (t0, &off0, &sz0, &max0, &rev0);
533   if (!known_size_p (max0)
534       || maybe_ne (sz0, max0))
535     return false;
536 
537   bool rev1;
538   poly_int64 off1, sz1, max1;
539   tree base1 = get_ref_base_and_extent (t1, &off1, &sz1, &max1, &rev1);
540   if (!known_size_p (max1)
541       || maybe_ne (sz1, max1))
542     return false;
543 
544   if (rev0 != rev1 || maybe_ne (sz0, sz1) || maybe_ne (off0, off1))
545     return false;
546 
547   return operand_equal_p (base0, base1,
548 			  (TREE_CODE (base0) == MEM_REF
549 			   || TREE_CODE (base0) == TARGET_MEM_REF)
550 			  && (TREE_CODE (base1) == MEM_REF
551 			      || TREE_CODE (base1) == TARGET_MEM_REF)
552 			  ? OEP_ADDRESS_OF : 0);
553 }
554 
555 /* Compare two hashable_expr structures for equivalence.  They are
556    considered equivalent when the expressions they denote must
557    necessarily be equal.  The logic is intended to follow that of
558    operand_equal_p in fold-const.c */
559 
560 static bool
hashable_expr_equal_p(const struct hashable_expr * expr0,const struct hashable_expr * expr1)561 hashable_expr_equal_p (const struct hashable_expr *expr0,
562 		       const struct hashable_expr *expr1)
563 {
564   tree type0 = expr0->type;
565   tree type1 = expr1->type;
566 
567   /* If either type is NULL, there is nothing to check.  */
568   if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
569     return false;
570 
571   /* If both types don't have the same signedness, precision, and mode,
572      then we can't consider  them equal.  */
573   if (type0 != type1
574       && (TREE_CODE (type0) == ERROR_MARK
575 	  || TREE_CODE (type1) == ERROR_MARK
576 	  || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
577 	  || TYPE_PRECISION (type0) != TYPE_PRECISION (type1)
578 	  || TYPE_MODE (type0) != TYPE_MODE (type1)))
579     return false;
580 
581   if (expr0->kind != expr1->kind)
582     return false;
583 
584   switch (expr0->kind)
585     {
586     case EXPR_SINGLE:
587       return equal_mem_array_ref_p (expr0->ops.single.rhs,
588 				    expr1->ops.single.rhs)
589 	     || operand_equal_p (expr0->ops.single.rhs,
590 				 expr1->ops.single.rhs, 0);
591     case EXPR_UNARY:
592       if (expr0->ops.unary.op != expr1->ops.unary.op)
593         return false;
594 
595       if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
596            || expr0->ops.unary.op == NON_LVALUE_EXPR)
597           && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
598         return false;
599 
600       return operand_equal_p (expr0->ops.unary.opnd,
601                               expr1->ops.unary.opnd, 0);
602 
603     case EXPR_BINARY:
604       if (expr0->ops.binary.op != expr1->ops.binary.op)
605 	return false;
606 
607       if (operand_equal_p (expr0->ops.binary.opnd0,
608 			   expr1->ops.binary.opnd0, 0)
609 	  && operand_equal_p (expr0->ops.binary.opnd1,
610 			      expr1->ops.binary.opnd1, 0))
611 	return true;
612 
613       /* For commutative ops, allow the other order.  */
614       return (commutative_tree_code (expr0->ops.binary.op)
615 	      && operand_equal_p (expr0->ops.binary.opnd0,
616 				  expr1->ops.binary.opnd1, 0)
617 	      && operand_equal_p (expr0->ops.binary.opnd1,
618 				  expr1->ops.binary.opnd0, 0));
619 
620     case EXPR_TERNARY:
621       if (expr0->ops.ternary.op != expr1->ops.ternary.op
622 	  || !operand_equal_p (expr0->ops.ternary.opnd2,
623 			       expr1->ops.ternary.opnd2, 0))
624 	return false;
625 
626       /* BIT_INSERT_EXPR has an implict operand as the type precision
627          of op1.  Need to check to make sure they are the same.  */
628       if (expr0->ops.ternary.op == BIT_INSERT_EXPR
629 	  && TREE_CODE (expr0->ops.ternary.opnd1) == INTEGER_CST
630           && TREE_CODE (expr1->ops.ternary.opnd1) == INTEGER_CST
631           && TYPE_PRECISION (TREE_TYPE (expr0->ops.ternary.opnd1))
632               != TYPE_PRECISION (TREE_TYPE (expr1->ops.ternary.opnd1)))
633         return false;
634 
635       if (operand_equal_p (expr0->ops.ternary.opnd0,
636 			   expr1->ops.ternary.opnd0, 0)
637 	  && operand_equal_p (expr0->ops.ternary.opnd1,
638 			      expr1->ops.ternary.opnd1, 0))
639 	return true;
640 
641       /* For commutative ops, allow the other order.  */
642       return (commutative_ternary_tree_code (expr0->ops.ternary.op)
643 	      && operand_equal_p (expr0->ops.ternary.opnd0,
644 				  expr1->ops.ternary.opnd1, 0)
645 	      && operand_equal_p (expr0->ops.ternary.opnd1,
646 				  expr1->ops.ternary.opnd0, 0));
647 
648     case EXPR_CALL:
649       {
650         size_t i;
651 
652         /* If the calls are to different functions, then they
653            clearly cannot be equal.  */
654         if (!gimple_call_same_target_p (expr0->ops.call.fn_from,
655                                         expr1->ops.call.fn_from))
656           return false;
657 
658         if (! expr0->ops.call.pure)
659           return false;
660 
661         if (expr0->ops.call.nargs !=  expr1->ops.call.nargs)
662           return false;
663 
664         for (i = 0; i < expr0->ops.call.nargs; i++)
665           if (! operand_equal_p (expr0->ops.call.args[i],
666                                  expr1->ops.call.args[i], 0))
667             return false;
668 
669 	if (stmt_could_throw_p (cfun, expr0->ops.call.fn_from))
670 	  {
671 	    int lp0 = lookup_stmt_eh_lp (expr0->ops.call.fn_from);
672 	    int lp1 = lookup_stmt_eh_lp (expr1->ops.call.fn_from);
673 	    if ((lp0 > 0 || lp1 > 0) && lp0 != lp1)
674 	      return false;
675 	  }
676 
677         return true;
678       }
679 
680     case EXPR_PHI:
681       {
682         size_t i;
683 
684         if (expr0->ops.phi.nargs !=  expr1->ops.phi.nargs)
685           return false;
686 
687         for (i = 0; i < expr0->ops.phi.nargs; i++)
688           if (! operand_equal_p (expr0->ops.phi.args[i],
689                                  expr1->ops.phi.args[i], 0))
690             return false;
691 
692         return true;
693       }
694 
695     default:
696       gcc_unreachable ();
697     }
698 }
699 
700 /* Given a statement STMT, construct a hash table element.  */
701 
expr_hash_elt(gimple * stmt,tree orig_lhs)702 expr_hash_elt::expr_hash_elt (gimple *stmt, tree orig_lhs)
703 {
704   enum gimple_code code = gimple_code (stmt);
705   struct hashable_expr *expr = this->expr ();
706 
707   if (code == GIMPLE_ASSIGN)
708     {
709       enum tree_code subcode = gimple_assign_rhs_code (stmt);
710 
711       switch (get_gimple_rhs_class (subcode))
712         {
713         case GIMPLE_SINGLE_RHS:
714 	  expr->kind = EXPR_SINGLE;
715 	  expr->type = TREE_TYPE (gimple_assign_rhs1 (stmt));
716 	  expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
717 	  break;
718         case GIMPLE_UNARY_RHS:
719 	  expr->kind = EXPR_UNARY;
720 	  expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
721 	  if (CONVERT_EXPR_CODE_P (subcode))
722 	    subcode = NOP_EXPR;
723 	  expr->ops.unary.op = subcode;
724 	  expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
725 	  break;
726         case GIMPLE_BINARY_RHS:
727 	  expr->kind = EXPR_BINARY;
728 	  expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
729 	  expr->ops.binary.op = subcode;
730 	  expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
731 	  expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
732 	  break;
733         case GIMPLE_TERNARY_RHS:
734 	  expr->kind = EXPR_TERNARY;
735 	  expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
736 	  expr->ops.ternary.op = subcode;
737 	  expr->ops.ternary.opnd0 = gimple_assign_rhs1 (stmt);
738 	  expr->ops.ternary.opnd1 = gimple_assign_rhs2 (stmt);
739 	  expr->ops.ternary.opnd2 = gimple_assign_rhs3 (stmt);
740 	  break;
741         default:
742           gcc_unreachable ();
743         }
744     }
745   else if (code == GIMPLE_COND)
746     {
747       expr->type = boolean_type_node;
748       expr->kind = EXPR_BINARY;
749       expr->ops.binary.op = gimple_cond_code (stmt);
750       expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
751       expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
752     }
753   else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
754     {
755       size_t nargs = gimple_call_num_args (call_stmt);
756       size_t i;
757 
758       gcc_assert (gimple_call_lhs (call_stmt));
759 
760       expr->type = TREE_TYPE (gimple_call_lhs (call_stmt));
761       expr->kind = EXPR_CALL;
762       expr->ops.call.fn_from = call_stmt;
763 
764       if (gimple_call_flags (call_stmt) & (ECF_CONST | ECF_PURE))
765         expr->ops.call.pure = true;
766       else
767         expr->ops.call.pure = false;
768 
769       expr->ops.call.nargs = nargs;
770       expr->ops.call.args = XCNEWVEC (tree, nargs);
771       for (i = 0; i < nargs; i++)
772         expr->ops.call.args[i] = gimple_call_arg (call_stmt, i);
773     }
774   else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
775     {
776       expr->type = TREE_TYPE (gimple_switch_index (swtch_stmt));
777       expr->kind = EXPR_SINGLE;
778       expr->ops.single.rhs = gimple_switch_index (swtch_stmt);
779     }
780   else if (code == GIMPLE_GOTO)
781     {
782       expr->type = TREE_TYPE (gimple_goto_dest (stmt));
783       expr->kind = EXPR_SINGLE;
784       expr->ops.single.rhs = gimple_goto_dest (stmt);
785     }
786   else if (code == GIMPLE_PHI)
787     {
788       size_t nargs = gimple_phi_num_args (stmt);
789       size_t i;
790 
791       expr->type = TREE_TYPE (gimple_phi_result (stmt));
792       expr->kind = EXPR_PHI;
793       expr->ops.phi.nargs = nargs;
794       expr->ops.phi.args = XCNEWVEC (tree, nargs);
795       for (i = 0; i < nargs; i++)
796         expr->ops.phi.args[i] = gimple_phi_arg_def (stmt, i);
797     }
798   else
799     gcc_unreachable ();
800 
801   m_lhs = orig_lhs;
802   m_vop = gimple_vuse (stmt);
803   m_hash = avail_expr_hash (this);
804   m_stamp = this;
805 }
806 
807 /* Given a hashable_expr expression ORIG and an ORIG_LHS,
808    construct a hash table element.  */
809 
expr_hash_elt(struct hashable_expr * orig,tree orig_lhs)810 expr_hash_elt::expr_hash_elt (struct hashable_expr *orig, tree orig_lhs)
811 {
812   m_expr = *orig;
813   m_lhs = orig_lhs;
814   m_vop = NULL_TREE;
815   m_hash = avail_expr_hash (this);
816   m_stamp = this;
817 }
818 
819 /* Copy constructor for a hash table element.  */
820 
expr_hash_elt(class expr_hash_elt & old_elt)821 expr_hash_elt::expr_hash_elt (class expr_hash_elt &old_elt)
822 {
823   m_expr = old_elt.m_expr;
824   m_lhs = old_elt.m_lhs;
825   m_vop = old_elt.m_vop;
826   m_hash = old_elt.m_hash;
827   m_stamp = this;
828 
829   /* Now deep copy the malloc'd space for CALL and PHI args.  */
830   if (old_elt.m_expr.kind == EXPR_CALL)
831     {
832       size_t nargs = old_elt.m_expr.ops.call.nargs;
833       size_t i;
834 
835       m_expr.ops.call.args = XCNEWVEC (tree, nargs);
836       for (i = 0; i < nargs; i++)
837         m_expr.ops.call.args[i] = old_elt.m_expr.ops.call.args[i];
838     }
839   else if (old_elt.m_expr.kind == EXPR_PHI)
840     {
841       size_t nargs = old_elt.m_expr.ops.phi.nargs;
842       size_t i;
843 
844       m_expr.ops.phi.args = XCNEWVEC (tree, nargs);
845       for (i = 0; i < nargs; i++)
846         m_expr.ops.phi.args[i] = old_elt.m_expr.ops.phi.args[i];
847     }
848 }
849 
850 /* Calls and PHIs have a variable number of arguments that are allocated
851    on the heap.  Thus we have to have a special dtor to release them.  */
852 
~expr_hash_elt()853 expr_hash_elt::~expr_hash_elt ()
854 {
855   if (m_expr.kind == EXPR_CALL)
856     free (m_expr.ops.call.args);
857   else if (m_expr.kind == EXPR_PHI)
858     free (m_expr.ops.phi.args);
859 }
860 
861 /* Print a diagnostic dump of an expression hash table entry.  */
862 
863 void
print(FILE * stream)864 expr_hash_elt::print (FILE *stream)
865 {
866   fprintf (stream, "STMT ");
867 
868   if (m_lhs)
869     {
870       print_generic_expr (stream, m_lhs);
871       fprintf (stream, " = ");
872     }
873 
874   switch (m_expr.kind)
875     {
876       case EXPR_SINGLE:
877 	print_generic_expr (stream, m_expr.ops.single.rhs);
878 	break;
879 
880       case EXPR_UNARY:
881 	fprintf (stream, "%s ", get_tree_code_name (m_expr.ops.unary.op));
882 	print_generic_expr (stream, m_expr.ops.unary.opnd);
883 	break;
884 
885       case EXPR_BINARY:
886 	print_generic_expr (stream, m_expr.ops.binary.opnd0);
887 	fprintf (stream, " %s ", get_tree_code_name (m_expr.ops.binary.op));
888 	print_generic_expr (stream, m_expr.ops.binary.opnd1);
889 	break;
890 
891       case EXPR_TERNARY:
892 	fprintf (stream, " %s <", get_tree_code_name (m_expr.ops.ternary.op));
893 	print_generic_expr (stream, m_expr.ops.ternary.opnd0);
894 	fputs (", ", stream);
895 	print_generic_expr (stream, m_expr.ops.ternary.opnd1);
896 	fputs (", ", stream);
897 	print_generic_expr (stream, m_expr.ops.ternary.opnd2);
898 	fputs (">", stream);
899 	break;
900 
901       case EXPR_CALL:
902         {
903           size_t i;
904           size_t nargs = m_expr.ops.call.nargs;
905           gcall *fn_from;
906 
907           fn_from = m_expr.ops.call.fn_from;
908           if (gimple_call_internal_p (fn_from))
909 	    fprintf (stream, ".%s",
910 		     internal_fn_name (gimple_call_internal_fn (fn_from)));
911           else
912 	    print_generic_expr (stream, gimple_call_fn (fn_from));
913           fprintf (stream, " (");
914           for (i = 0; i < nargs; i++)
915             {
916 	      print_generic_expr (stream, m_expr.ops.call.args[i]);
917               if (i + 1 < nargs)
918                 fprintf (stream, ", ");
919             }
920           fprintf (stream, ")");
921         }
922         break;
923 
924       case EXPR_PHI:
925         {
926           size_t i;
927           size_t nargs = m_expr.ops.phi.nargs;
928 
929           fprintf (stream, "PHI <");
930           for (i = 0; i < nargs; i++)
931             {
932 	      print_generic_expr (stream, m_expr.ops.phi.args[i]);
933               if (i + 1 < nargs)
934                 fprintf (stream, ", ");
935             }
936           fprintf (stream, ">");
937         }
938         break;
939     }
940 
941   if (m_vop)
942     {
943       fprintf (stream, " with ");
944       print_generic_expr (stream, m_vop);
945     }
946 
947   fprintf (stream, "\n");
948 }
949 
950 /* Pop entries off the stack until we hit the NULL marker.
951    For each entry popped, use the SRC/DEST pair to restore
952    SRC to its prior value.  */
953 
954 void
pop_to_marker(void)955 const_and_copies::pop_to_marker (void)
956 {
957   while (m_stack.length () > 0)
958     {
959       tree prev_value, dest;
960 
961       dest = m_stack.pop ();
962 
963       /* A NULL value indicates we should stop unwinding, otherwise
964 	 pop off the next entry as they're recorded in pairs.  */
965       if (dest == NULL)
966 	break;
967 
968       if (dump_file && (dump_flags & TDF_DETAILS))
969 	{
970 	  fprintf (dump_file, "<<<< COPY ");
971 	  print_generic_expr (dump_file, dest);
972 	  fprintf (dump_file, " = ");
973 	  print_generic_expr (dump_file, SSA_NAME_VALUE (dest));
974 	  fprintf (dump_file, "\n");
975 	}
976 
977       prev_value = m_stack.pop ();
978       set_ssa_name_value (dest, prev_value);
979     }
980 }
981 
982 /* Record that X has the value Y and that X's previous value is PREV_X.
983 
984    This variant does not follow the value chain for Y.  */
985 
986 void
record_const_or_copy_raw(tree x,tree y,tree prev_x)987 const_and_copies::record_const_or_copy_raw (tree x, tree y, tree prev_x)
988 {
989   if (dump_file && (dump_flags & TDF_DETAILS))
990     {
991       fprintf (dump_file, "0>>> COPY ");
992       print_generic_expr (dump_file, x);
993       fprintf (dump_file, " = ");
994       print_generic_expr (dump_file, y);
995       fprintf (dump_file, "\n");
996     }
997 
998   set_ssa_name_value (x, y);
999   m_stack.reserve (2);
1000   m_stack.quick_push (prev_x);
1001   m_stack.quick_push (x);
1002 }
1003 
1004 /* Record that X has the value Y.  */
1005 
1006 void
record_const_or_copy(tree x,tree y)1007 const_and_copies::record_const_or_copy (tree x, tree y)
1008 {
1009   record_const_or_copy (x, y, SSA_NAME_VALUE (x));
1010 }
1011 
1012 /* Record that X has the value Y and that X's previous value is PREV_X.
1013 
1014    This variant follow's Y value chain.  */
1015 
1016 void
record_const_or_copy(tree x,tree y,tree prev_x)1017 const_and_copies::record_const_or_copy (tree x, tree y, tree prev_x)
1018 {
1019   /* Y may be NULL if we are invalidating entries in the table.  */
1020   if (y && TREE_CODE (y) == SSA_NAME)
1021     {
1022       tree tmp = SSA_NAME_VALUE (y);
1023       y = tmp ? tmp : y;
1024     }
1025 
1026   record_const_or_copy_raw (x, y, prev_x);
1027 }
1028 
1029 bool
equal(const value_type & p1,const compare_type & p2)1030 expr_elt_hasher::equal (const value_type &p1, const compare_type &p2)
1031 {
1032   const struct hashable_expr *expr1 = p1->expr ();
1033   const class expr_hash_elt *stamp1 = p1->stamp ();
1034   const struct hashable_expr *expr2 = p2->expr ();
1035   const class expr_hash_elt *stamp2 = p2->stamp ();
1036 
1037   /* This case should apply only when removing entries from the table.  */
1038   if (stamp1 == stamp2)
1039     return true;
1040 
1041   if (p1->hash () != p2->hash ())
1042     return false;
1043 
1044   /* In case of a collision, both RHS have to be identical and have the
1045      same VUSE operands.  */
1046   if (hashable_expr_equal_p (expr1, expr2)
1047       && types_compatible_p (expr1->type, expr2->type))
1048     return true;
1049 
1050   return false;
1051 }
1052 
1053 /* Given a conditional expression COND as a tree, initialize
1054    a hashable_expr expression EXPR.  The conditional must be a
1055    comparison or logical negation.  A constant or a variable is
1056    not permitted.  */
1057 
1058 void
initialize_expr_from_cond(tree cond,struct hashable_expr * expr)1059 initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
1060 {
1061   expr->type = boolean_type_node;
1062 
1063   if (COMPARISON_CLASS_P (cond))
1064     {
1065       expr->kind = EXPR_BINARY;
1066       expr->ops.binary.op = TREE_CODE (cond);
1067       expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
1068       expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
1069     }
1070   else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
1071     {
1072       expr->kind = EXPR_UNARY;
1073       expr->ops.unary.op = TRUTH_NOT_EXPR;
1074       expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
1075     }
1076   else
1077     gcc_unreachable ();
1078 }
1079 
1080 /* Build a cond_equivalence record indicating that the comparison
1081    CODE holds between operands OP0 and OP1 and push it to **P.  */
1082 
1083 static void
1084 build_and_record_new_cond (enum tree_code code,
1085                            tree op0, tree op1,
1086                            vec<cond_equivalence> *p,
1087 			   bool val = true)
1088 {
1089   cond_equivalence c;
1090   struct hashable_expr *cond = &c.cond;
1091 
1092   gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
1093 
1094   cond->type = boolean_type_node;
1095   cond->kind = EXPR_BINARY;
1096   cond->ops.binary.op = code;
1097   cond->ops.binary.opnd0 = op0;
1098   cond->ops.binary.opnd1 = op1;
1099 
1100   c.value = val ? boolean_true_node : boolean_false_node;
1101   p->safe_push (c);
1102 }
1103 
1104 /* Record that COND is true and INVERTED is false into the edge information
1105    structure.  Also record that any conditions dominated by COND are true
1106    as well.
1107 
1108    For example, if a < b is true, then a <= b must also be true.  */
1109 
1110 void
record_conditions(vec<cond_equivalence> * p,tree cond,tree inverted)1111 record_conditions (vec<cond_equivalence> *p, tree cond, tree inverted)
1112 {
1113   tree op0, op1;
1114   cond_equivalence c;
1115 
1116   if (!COMPARISON_CLASS_P (cond))
1117     return;
1118 
1119   op0 = TREE_OPERAND (cond, 0);
1120   op1 = TREE_OPERAND (cond, 1);
1121 
1122   switch (TREE_CODE (cond))
1123     {
1124     case LT_EXPR:
1125     case GT_EXPR:
1126       if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1127 	{
1128 	  build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1129 	  build_and_record_new_cond (LTGT_EXPR, op0, op1, p);
1130 	}
1131 
1132       build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
1133 				  ? LE_EXPR : GE_EXPR),
1134 				 op0, op1, p);
1135       build_and_record_new_cond (NE_EXPR, op0, op1, p);
1136       build_and_record_new_cond (EQ_EXPR, op0, op1, p, false);
1137       break;
1138 
1139     case GE_EXPR:
1140     case LE_EXPR:
1141       if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1142 	{
1143 	  build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1144 	}
1145       break;
1146 
1147     case EQ_EXPR:
1148       if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1149 	{
1150 	  build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1151 	}
1152       build_and_record_new_cond (LE_EXPR, op0, op1, p);
1153       build_and_record_new_cond (GE_EXPR, op0, op1, p);
1154       break;
1155 
1156     case UNORDERED_EXPR:
1157       build_and_record_new_cond (NE_EXPR, op0, op1, p);
1158       build_and_record_new_cond (UNLE_EXPR, op0, op1, p);
1159       build_and_record_new_cond (UNGE_EXPR, op0, op1, p);
1160       build_and_record_new_cond (UNEQ_EXPR, op0, op1, p);
1161       build_and_record_new_cond (UNLT_EXPR, op0, op1, p);
1162       build_and_record_new_cond (UNGT_EXPR, op0, op1, p);
1163       break;
1164 
1165     case UNLT_EXPR:
1166     case UNGT_EXPR:
1167       build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1168 				  ? UNLE_EXPR : UNGE_EXPR),
1169 				 op0, op1, p);
1170       build_and_record_new_cond (NE_EXPR, op0, op1, p);
1171       break;
1172 
1173     case UNEQ_EXPR:
1174       build_and_record_new_cond (UNLE_EXPR, op0, op1, p);
1175       build_and_record_new_cond (UNGE_EXPR, op0, op1, p);
1176       break;
1177 
1178     case LTGT_EXPR:
1179       build_and_record_new_cond (NE_EXPR, op0, op1, p);
1180       build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1181       break;
1182 
1183     default:
1184       break;
1185     }
1186 
1187   /* Now store the original true and false conditions into the first
1188      two slots.  */
1189   initialize_expr_from_cond (cond, &c.cond);
1190   c.value = boolean_true_node;
1191   p->safe_push (c);
1192 
1193   /* It is possible for INVERTED to be the negation of a comparison,
1194      and not a valid RHS or GIMPLE_COND condition.  This happens because
1195      invert_truthvalue may return such an expression when asked to invert
1196      a floating-point comparison.  These comparisons are not assumed to
1197      obey the trichotomy law.  */
1198   initialize_expr_from_cond (inverted, &c.cond);
1199   c.value = boolean_false_node;
1200   p->safe_push (c);
1201 }
1202