1 /* Header file for SSA dominator optimizations.
2    Copyright (C) 2013-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14  for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "function.h"
24 #include "basic-block.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "tree-pretty-print.h"
29 #include "tree-ssa-scopedtables.h"
30 #include "tree-ssa-threadedge.h"
31 #include "stor-layout.h"
32 #include "fold-const.h"
33 #include "tree-eh.h"
34 #include "internal-fn.h"
35 #include "tree-dfa.h"
36 #include "options.h"
37 #include "params.h"
38 
39 static bool hashable_expr_equal_p (const struct hashable_expr *,
40 				   const struct hashable_expr *);
41 
42 /* Initialize local stacks for this optimizer and record equivalences
43    upon entry to BB.  Equivalences can come from the edge traversed to
44    reach BB or they may come from PHI nodes at the start of BB.  */
45 
46 /* Pop items off the unwinding stack, removing each from the hash table
47    until a marker is encountered.  */
48 
49 void
50 avail_exprs_stack::pop_to_marker ()
51 {
52   /* Remove all the expressions made available in this block.  */
53   while (m_stack.length () > 0)
54     {
55       std::pair<expr_hash_elt_t, expr_hash_elt_t> victim = m_stack.pop ();
56       expr_hash_elt **slot;
57 
58       if (victim.first == NULL)
59 	break;
60 
61       /* This must precede the actual removal from the hash table,
62          as ELEMENT and the table entry may share a call argument
63          vector which will be freed during removal.  */
64       if (dump_file && (dump_flags & TDF_DETAILS))
65         {
66           fprintf (dump_file, "<<<< ");
67 	  victim.first->print (dump_file);
68         }
69 
70       slot = m_avail_exprs->find_slot (victim.first, NO_INSERT);
71       gcc_assert (slot && *slot == victim.first);
72       if (victim.second != NULL)
73 	{
74 	  delete *slot;
75 	  *slot = victim.second;
76 	}
77       else
78 	m_avail_exprs->clear_slot (slot);
79     }
80 }
81 
82 /* Add <ELT1,ELT2> to the unwinding stack so they can be later removed
83    from the hash table.  */
84 
85 void
86 avail_exprs_stack::record_expr (class expr_hash_elt *elt1,
87 				class expr_hash_elt *elt2,
88 				char type)
89 {
90   if (elt1 && dump_file && (dump_flags & TDF_DETAILS))
91     {
92       fprintf (dump_file, "%c>>> ", type);
93       elt1->print (dump_file);
94     }
95 
96   m_stack.safe_push (std::pair<expr_hash_elt_t, expr_hash_elt_t> (elt1, elt2));
97 }
98 
99 /* Helper for walk_non_aliased_vuses.  Determine if we arrived at
100    the desired memory state.  */
101 
102 static void *
103 vuse_eq (ao_ref *, tree vuse1, unsigned int cnt, void *data)
104 {
105   tree vuse2 = (tree) data;
106   if (vuse1 == vuse2)
107     return data;
108 
109   /* This bounds the stmt walks we perform on reference lookups
110      to O(1) instead of O(N) where N is the number of dominating
111      stores leading to a candidate.  We re-use the SCCVN param
112      for this as it is basically the same complexity.  */
113   if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
114     return (void *)-1;
115 
116   return NULL;
117 }
118 
119 /* We looked for STMT in the hash table, but did not find it.
120 
121    If STMT is an assignment from a binary operator, we may know something
122    about the operands relationship to each other which would allow
123    us to derive a constant value for the RHS of STMT.  */
124 
125 tree
126 avail_exprs_stack::simplify_binary_operation (gimple *stmt,
127 					      class expr_hash_elt element)
128 {
129   if (is_gimple_assign (stmt))
130     {
131       struct hashable_expr *expr = element.expr ();
132       if (expr->kind == EXPR_BINARY)
133 	{
134 	  enum tree_code code = expr->ops.binary.op;
135 
136 	  switch (code)
137 	    {
138 	    /* For these cases, if we know the operands
139 	       are equal, then we know the result.  */
140 	    case MIN_EXPR:
141 	    case MAX_EXPR:
142 	    case BIT_IOR_EXPR:
143 	    case BIT_AND_EXPR:
144 	    case BIT_XOR_EXPR:
145 	    case MINUS_EXPR:
146 	    case TRUNC_DIV_EXPR:
147 	    case CEIL_DIV_EXPR:
148 	    case FLOOR_DIV_EXPR:
149 	    case ROUND_DIV_EXPR:
150 	    case EXACT_DIV_EXPR:
151 	    case TRUNC_MOD_EXPR:
152 	    case CEIL_MOD_EXPR:
153 	    case FLOOR_MOD_EXPR:
154 	    case ROUND_MOD_EXPR:
155 	      {
156 		/* Build a simple equality expr and query the hash table
157 		   for it.  */
158 		struct hashable_expr expr;
159 		expr.type = boolean_type_node;
160 		expr.kind = EXPR_BINARY;
161 		expr.ops.binary.op = EQ_EXPR;
162 		expr.ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
163 		expr.ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
164 		class expr_hash_elt element2 (&expr, NULL_TREE);
165 		expr_hash_elt **slot
166 		  = m_avail_exprs->find_slot (&element2, NO_INSERT);
167 		tree result_type = TREE_TYPE (gimple_assign_lhs (stmt));
168 
169 		/* If the query was successful and returned a nonzero
170 		   result, then we know that the operands of the binary
171 		   expression are the same.  In many cases this allows
172 		   us to compute a constant result of the expression
173 		   at compile time, even if we do not know the exact
174 		   values of the operands.  */
175 		if (slot && *slot && integer_onep ((*slot)->lhs ()))
176 		  {
177 		    switch (code)
178 		      {
179 		      case MIN_EXPR:
180 		      case MAX_EXPR:
181 		      case BIT_IOR_EXPR:
182 		      case BIT_AND_EXPR:
183 			return gimple_assign_rhs1 (stmt);
184 
185 		      case MINUS_EXPR:
186 			/* This is unsafe for certain floats even in non-IEEE
187 			   formats.  In IEEE, it is unsafe because it does
188 			   wrong for NaNs.  */
189 			if (FLOAT_TYPE_P (result_type)
190 			    && HONOR_NANS (result_type))
191 			  break;
192 			/* FALLTHRU */
193 		      case BIT_XOR_EXPR:
194 		      case TRUNC_MOD_EXPR:
195 		      case CEIL_MOD_EXPR:
196 		      case FLOOR_MOD_EXPR:
197 		      case ROUND_MOD_EXPR:
198 			return build_zero_cst (result_type);
199 
200 		      case TRUNC_DIV_EXPR:
201 		      case CEIL_DIV_EXPR:
202 		      case FLOOR_DIV_EXPR:
203 		      case ROUND_DIV_EXPR:
204 		      case EXACT_DIV_EXPR:
205 			/* Avoid _Fract types where we can't build 1.  */
206 			if (ALL_FRACT_MODE_P (TYPE_MODE (result_type)))
207 			  break;
208 			return build_one_cst (result_type);
209 
210 		      default:
211 			gcc_unreachable ();
212 		      }
213 		  }
214 		break;
215 	      }
216 
217 	    default:
218 	      break;
219 	    }
220 	}
221     }
222   return NULL_TREE;
223 }
224 
225 /* Search for an existing instance of STMT in the AVAIL_EXPRS_STACK table.
226    If found, return its LHS. Otherwise insert STMT in the table and
227    return NULL_TREE.
228 
229    Also, when an expression is first inserted in the  table, it is also
230    is also added to AVAIL_EXPRS_STACK, so that it can be removed when
231    we finish processing this block and its children.  */
232 
233 tree
234 avail_exprs_stack::lookup_avail_expr (gimple *stmt, bool insert, bool tbaa_p)
235 {
236   expr_hash_elt **slot;
237   tree lhs;
238 
239   /* Get LHS of phi, assignment, or call; else NULL_TREE.  */
240   if (gimple_code (stmt) == GIMPLE_PHI)
241     lhs = gimple_phi_result (stmt);
242   else
243     lhs = gimple_get_lhs (stmt);
244 
245   class expr_hash_elt element (stmt, lhs);
246 
247   if (dump_file && (dump_flags & TDF_DETAILS))
248     {
249       fprintf (dump_file, "LKUP ");
250       element.print (dump_file);
251     }
252 
253   /* Don't bother remembering constant assignments and copy operations.
254      Constants and copy operations are handled by the constant/copy propagator
255      in optimize_stmt.  */
256   if (element.expr()->kind == EXPR_SINGLE
257       && (TREE_CODE (element.expr()->ops.single.rhs) == SSA_NAME
258           || is_gimple_min_invariant (element.expr()->ops.single.rhs)))
259     return NULL_TREE;
260 
261   /* Finally try to find the expression in the main expression hash table.  */
262   slot = m_avail_exprs->find_slot (&element, (insert ? INSERT : NO_INSERT));
263   if (slot == NULL)
264     {
265       return NULL_TREE;
266     }
267   else if (*slot == NULL)
268     {
269       /* If we did not find the expression in the hash table, we may still
270 	 be able to produce a result for some expressions.  */
271       tree retval = avail_exprs_stack::simplify_binary_operation (stmt,
272 								  element);
273 
274       /* We have, in effect, allocated *SLOT for ELEMENT at this point.
275 	 We must initialize *SLOT to a real entry, even if we found a
276 	 way to prove ELEMENT was a constant after not finding ELEMENT
277 	 in the hash table.
278 
279 	 An uninitialized or empty slot is an indication no prior objects
280 	 entered into the hash table had a hash collection with ELEMENT.
281 
282 	 If we fail to do so and had such entries in the table, they
283 	 would become unreachable.  */
284       class expr_hash_elt *element2 = new expr_hash_elt (element);
285       *slot = element2;
286 
287       record_expr (element2, NULL, '2');
288       return retval;
289     }
290 
291   /* If we found a redundant memory operation do an alias walk to
292      check if we can re-use it.  */
293   if (gimple_vuse (stmt) != (*slot)->vop ())
294     {
295       tree vuse1 = (*slot)->vop ();
296       tree vuse2 = gimple_vuse (stmt);
297       /* If we have a load of a register and a candidate in the
298 	 hash with vuse1 then try to reach its stmt by walking
299 	 up the virtual use-def chain using walk_non_aliased_vuses.
300 	 But don't do this when removing expressions from the hash.  */
301       ao_ref ref;
302       if (!(vuse1 && vuse2
303 	    && gimple_assign_single_p (stmt)
304 	    && TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
305 	    && (ao_ref_init (&ref, gimple_assign_rhs1 (stmt)),
306 		ref.base_alias_set = ref.ref_alias_set = tbaa_p ? -1 : 0, true)
307 	    && walk_non_aliased_vuses (&ref, vuse2,
308 				       vuse_eq, NULL, NULL, vuse1) != NULL))
309 	{
310 	  if (insert)
311 	    {
312 	      class expr_hash_elt *element2 = new expr_hash_elt (element);
313 
314 	      /* Insert the expr into the hash by replacing the current
315 		 entry and recording the value to restore in the
316 		 avail_exprs_stack.  */
317 	      record_expr (element2, *slot, '2');
318 	      *slot = element2;
319 	    }
320 	  return NULL_TREE;
321 	}
322     }
323 
324   /* Extract the LHS of the assignment so that it can be used as the current
325      definition of another variable.  */
326   lhs = (*slot)->lhs ();
327 
328   /* Valueize the result.  */
329   if (TREE_CODE (lhs) == SSA_NAME)
330     {
331       tree tem = SSA_NAME_VALUE (lhs);
332       if (tem)
333 	lhs = tem;
334     }
335 
336   if (dump_file && (dump_flags & TDF_DETAILS))
337     {
338       fprintf (dump_file, "FIND: ");
339       print_generic_expr (dump_file, lhs);
340       fprintf (dump_file, "\n");
341     }
342 
343   return lhs;
344 }
345 
346 /* Enter condition equivalence P into the hash table.
347 
348    This indicates that a conditional expression has a known
349    boolean value.  */
350 
351 void
352 avail_exprs_stack::record_cond (cond_equivalence *p)
353 {
354   class expr_hash_elt *element = new expr_hash_elt (&p->cond, p->value);
355   expr_hash_elt **slot;
356 
357   slot = m_avail_exprs->find_slot_with_hash (element, element->hash (), INSERT);
358   if (*slot == NULL)
359     {
360       *slot = element;
361       record_expr (element, NULL, '1');
362     }
363   else
364     delete element;
365 }
366 
367 /* Generate a hash value for a pair of expressions.  This can be used
368    iteratively by passing a previous result in HSTATE.
369 
370    The same hash value is always returned for a given pair of expressions,
371    regardless of the order in which they are presented.  This is useful in
372    hashing the operands of commutative functions.  */
373 
374 namespace inchash
375 {
376 
377 static void
378 add_expr_commutative (const_tree t1, const_tree t2, hash &hstate)
379 {
380   hash one, two;
381 
382   inchash::add_expr (t1, one);
383   inchash::add_expr (t2, two);
384   hstate.add_commutative (one, two);
385 }
386 
387 /* Compute a hash value for a hashable_expr value EXPR and a
388    previously accumulated hash value VAL.  If two hashable_expr
389    values compare equal with hashable_expr_equal_p, they must
390    hash to the same value, given an identical value of VAL.
391    The logic is intended to follow inchash::add_expr in tree.c.  */
392 
393 static void
394 add_hashable_expr (const struct hashable_expr *expr, hash &hstate)
395 {
396   switch (expr->kind)
397     {
398     case EXPR_SINGLE:
399       inchash::add_expr (expr->ops.single.rhs, hstate);
400       break;
401 
402     case EXPR_UNARY:
403       hstate.add_object (expr->ops.unary.op);
404 
405       /* Make sure to include signedness in the hash computation.
406          Don't hash the type, that can lead to having nodes which
407          compare equal according to operand_equal_p, but which
408          have different hash codes.  */
409       if (CONVERT_EXPR_CODE_P (expr->ops.unary.op)
410           || expr->ops.unary.op == NON_LVALUE_EXPR)
411         hstate.add_int (TYPE_UNSIGNED (expr->type));
412 
413       inchash::add_expr (expr->ops.unary.opnd, hstate);
414       break;
415 
416     case EXPR_BINARY:
417       hstate.add_object (expr->ops.binary.op);
418       if (commutative_tree_code (expr->ops.binary.op))
419 	inchash::add_expr_commutative (expr->ops.binary.opnd0,
420 					  expr->ops.binary.opnd1, hstate);
421       else
422         {
423           inchash::add_expr (expr->ops.binary.opnd0, hstate);
424           inchash::add_expr (expr->ops.binary.opnd1, hstate);
425         }
426       break;
427 
428     case EXPR_TERNARY:
429       hstate.add_object (expr->ops.ternary.op);
430       if (commutative_ternary_tree_code (expr->ops.ternary.op))
431 	inchash::add_expr_commutative (expr->ops.ternary.opnd0,
432 					  expr->ops.ternary.opnd1, hstate);
433       else
434         {
435           inchash::add_expr (expr->ops.ternary.opnd0, hstate);
436           inchash::add_expr (expr->ops.ternary.opnd1, hstate);
437         }
438       inchash::add_expr (expr->ops.ternary.opnd2, hstate);
439       break;
440 
441     case EXPR_CALL:
442       {
443         size_t i;
444         enum tree_code code = CALL_EXPR;
445         gcall *fn_from;
446 
447         hstate.add_object (code);
448         fn_from = expr->ops.call.fn_from;
449         if (gimple_call_internal_p (fn_from))
450           hstate.merge_hash ((hashval_t) gimple_call_internal_fn (fn_from));
451         else
452           inchash::add_expr (gimple_call_fn (fn_from), hstate);
453         for (i = 0; i < expr->ops.call.nargs; i++)
454           inchash::add_expr (expr->ops.call.args[i], hstate);
455       }
456       break;
457 
458     case EXPR_PHI:
459       {
460         size_t i;
461 
462         for (i = 0; i < expr->ops.phi.nargs; i++)
463           inchash::add_expr (expr->ops.phi.args[i], hstate);
464       }
465       break;
466 
467     default:
468       gcc_unreachable ();
469     }
470 }
471 
472 }
473 
474 /* Hashing and equality functions.  We compute a value number for expressions
475    using the code of the expression and the SSA numbers of its operands.  */
476 
477 static hashval_t
478 avail_expr_hash (class expr_hash_elt *p)
479 {
480   const struct hashable_expr *expr = p->expr ();
481   inchash::hash hstate;
482 
483   if (expr->kind == EXPR_SINGLE)
484     {
485       /* T could potentially be a switch index or a goto dest.  */
486       tree t = expr->ops.single.rhs;
487       if (TREE_CODE (t) == MEM_REF || handled_component_p (t))
488 	{
489 	  /* Make equivalent statements of both these kinds hash together.
490 	     Dealing with both MEM_REF and ARRAY_REF allows us not to care
491 	     about equivalence with other statements not considered here.  */
492 	  bool reverse;
493 	  poly_int64 offset, size, max_size;
494 	  tree base = get_ref_base_and_extent (t, &offset, &size, &max_size,
495 					       &reverse);
496 	  /* Strictly, we could try to normalize variable-sized accesses too,
497 	    but here we just deal with the common case.  */
498 	  if (known_size_p (max_size)
499 	      && known_eq (size, max_size))
500 	    {
501 	      enum tree_code code = MEM_REF;
502 	      hstate.add_object (code);
503 	      inchash::add_expr (base, hstate);
504 	      hstate.add_object (offset);
505 	      hstate.add_object (size);
506 	      return hstate.end ();
507 	    }
508 	}
509     }
510 
511   inchash::add_hashable_expr (expr, hstate);
512 
513   return hstate.end ();
514 }
515 
516 /* Compares trees T0 and T1 to see if they are MEM_REF or ARRAY_REFs equivalent
517    to each other.  (That is, they return the value of the same bit of memory.)
518 
519    Return TRUE if the two are so equivalent; FALSE if not (which could still
520    mean the two are equivalent by other means).  */
521 
522 static bool
523 equal_mem_array_ref_p (tree t0, tree t1)
524 {
525   if (TREE_CODE (t0) != MEM_REF && ! handled_component_p (t0))
526     return false;
527   if (TREE_CODE (t1) != MEM_REF && ! handled_component_p (t1))
528     return false;
529 
530   if (!types_compatible_p (TREE_TYPE (t0), TREE_TYPE (t1)))
531     return false;
532   bool rev0;
533   poly_int64 off0, sz0, max0;
534   tree base0 = get_ref_base_and_extent (t0, &off0, &sz0, &max0, &rev0);
535   if (!known_size_p (max0)
536       || maybe_ne (sz0, max0))
537     return false;
538 
539   bool rev1;
540   poly_int64 off1, sz1, max1;
541   tree base1 = get_ref_base_and_extent (t1, &off1, &sz1, &max1, &rev1);
542   if (!known_size_p (max1)
543       || maybe_ne (sz1, max1))
544     return false;
545 
546   if (rev0 != rev1)
547     return false;
548 
549   /* Types were compatible, so this is a sanity check.  */
550   gcc_assert (known_eq (sz0, sz1));
551 
552   return known_eq (off0, off1) && operand_equal_p (base0, base1, 0);
553 }
554 
555 /* Compare two hashable_expr structures for equivalence.  They are
556    considered equivalent when the expressions they denote must
557    necessarily be equal.  The logic is intended to follow that of
558    operand_equal_p in fold-const.c */
559 
560 static bool
561 hashable_expr_equal_p (const struct hashable_expr *expr0,
562 		       const struct hashable_expr *expr1)
563 {
564   tree type0 = expr0->type;
565   tree type1 = expr1->type;
566 
567   /* If either type is NULL, there is nothing to check.  */
568   if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
569     return false;
570 
571   /* If both types don't have the same signedness, precision, and mode,
572      then we can't consider  them equal.  */
573   if (type0 != type1
574       && (TREE_CODE (type0) == ERROR_MARK
575 	  || TREE_CODE (type1) == ERROR_MARK
576 	  || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
577 	  || TYPE_PRECISION (type0) != TYPE_PRECISION (type1)
578 	  || TYPE_MODE (type0) != TYPE_MODE (type1)))
579     return false;
580 
581   if (expr0->kind != expr1->kind)
582     return false;
583 
584   switch (expr0->kind)
585     {
586     case EXPR_SINGLE:
587       return equal_mem_array_ref_p (expr0->ops.single.rhs,
588 				    expr1->ops.single.rhs)
589 	     || operand_equal_p (expr0->ops.single.rhs,
590 				 expr1->ops.single.rhs, 0);
591     case EXPR_UNARY:
592       if (expr0->ops.unary.op != expr1->ops.unary.op)
593         return false;
594 
595       if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
596            || expr0->ops.unary.op == NON_LVALUE_EXPR)
597           && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
598         return false;
599 
600       return operand_equal_p (expr0->ops.unary.opnd,
601                               expr1->ops.unary.opnd, 0);
602 
603     case EXPR_BINARY:
604       if (expr0->ops.binary.op != expr1->ops.binary.op)
605 	return false;
606 
607       if (operand_equal_p (expr0->ops.binary.opnd0,
608 			   expr1->ops.binary.opnd0, 0)
609 	  && operand_equal_p (expr0->ops.binary.opnd1,
610 			      expr1->ops.binary.opnd1, 0))
611 	return true;
612 
613       /* For commutative ops, allow the other order.  */
614       return (commutative_tree_code (expr0->ops.binary.op)
615 	      && operand_equal_p (expr0->ops.binary.opnd0,
616 				  expr1->ops.binary.opnd1, 0)
617 	      && operand_equal_p (expr0->ops.binary.opnd1,
618 				  expr1->ops.binary.opnd0, 0));
619 
620     case EXPR_TERNARY:
621       if (expr0->ops.ternary.op != expr1->ops.ternary.op
622 	  || !operand_equal_p (expr0->ops.ternary.opnd2,
623 			       expr1->ops.ternary.opnd2, 0))
624 	return false;
625 
626       /* BIT_INSERT_EXPR has an implict operand as the type precision
627          of op1.  Need to check to make sure they are the same.  */
628       if (expr0->ops.ternary.op == BIT_INSERT_EXPR
629 	  && TREE_CODE (expr0->ops.ternary.opnd1) == INTEGER_CST
630           && TREE_CODE (expr1->ops.ternary.opnd1) == INTEGER_CST
631           && TYPE_PRECISION (TREE_TYPE (expr0->ops.ternary.opnd1))
632               != TYPE_PRECISION (TREE_TYPE (expr1->ops.ternary.opnd1)))
633         return false;
634 
635       if (operand_equal_p (expr0->ops.ternary.opnd0,
636 			   expr1->ops.ternary.opnd0, 0)
637 	  && operand_equal_p (expr0->ops.ternary.opnd1,
638 			      expr1->ops.ternary.opnd1, 0))
639 	return true;
640 
641       /* For commutative ops, allow the other order.  */
642       return (commutative_ternary_tree_code (expr0->ops.ternary.op)
643 	      && operand_equal_p (expr0->ops.ternary.opnd0,
644 				  expr1->ops.ternary.opnd1, 0)
645 	      && operand_equal_p (expr0->ops.ternary.opnd1,
646 				  expr1->ops.ternary.opnd0, 0));
647 
648     case EXPR_CALL:
649       {
650         size_t i;
651 
652         /* If the calls are to different functions, then they
653            clearly cannot be equal.  */
654         if (!gimple_call_same_target_p (expr0->ops.call.fn_from,
655                                         expr1->ops.call.fn_from))
656           return false;
657 
658         if (! expr0->ops.call.pure)
659           return false;
660 
661         if (expr0->ops.call.nargs !=  expr1->ops.call.nargs)
662           return false;
663 
664         for (i = 0; i < expr0->ops.call.nargs; i++)
665           if (! operand_equal_p (expr0->ops.call.args[i],
666                                  expr1->ops.call.args[i], 0))
667             return false;
668 
669 	if (stmt_could_throw_p (expr0->ops.call.fn_from))
670 	  {
671 	    int lp0 = lookup_stmt_eh_lp (expr0->ops.call.fn_from);
672 	    int lp1 = lookup_stmt_eh_lp (expr1->ops.call.fn_from);
673 	    if ((lp0 > 0 || lp1 > 0) && lp0 != lp1)
674 	      return false;
675 	  }
676 
677         return true;
678       }
679 
680     case EXPR_PHI:
681       {
682         size_t i;
683 
684         if (expr0->ops.phi.nargs !=  expr1->ops.phi.nargs)
685           return false;
686 
687         for (i = 0; i < expr0->ops.phi.nargs; i++)
688           if (! operand_equal_p (expr0->ops.phi.args[i],
689                                  expr1->ops.phi.args[i], 0))
690             return false;
691 
692         return true;
693       }
694 
695     default:
696       gcc_unreachable ();
697     }
698 }
699 
700 /* Given a statement STMT, construct a hash table element.  */
701 
702 expr_hash_elt::expr_hash_elt (gimple *stmt, tree orig_lhs)
703 {
704   enum gimple_code code = gimple_code (stmt);
705   struct hashable_expr *expr = this->expr ();
706 
707   if (code == GIMPLE_ASSIGN)
708     {
709       enum tree_code subcode = gimple_assign_rhs_code (stmt);
710 
711       switch (get_gimple_rhs_class (subcode))
712         {
713         case GIMPLE_SINGLE_RHS:
714 	  expr->kind = EXPR_SINGLE;
715 	  expr->type = TREE_TYPE (gimple_assign_rhs1 (stmt));
716 	  expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
717 	  break;
718         case GIMPLE_UNARY_RHS:
719 	  expr->kind = EXPR_UNARY;
720 	  expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
721 	  if (CONVERT_EXPR_CODE_P (subcode))
722 	    subcode = NOP_EXPR;
723 	  expr->ops.unary.op = subcode;
724 	  expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
725 	  break;
726         case GIMPLE_BINARY_RHS:
727 	  expr->kind = EXPR_BINARY;
728 	  expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
729 	  expr->ops.binary.op = subcode;
730 	  expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
731 	  expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
732 	  break;
733         case GIMPLE_TERNARY_RHS:
734 	  expr->kind = EXPR_TERNARY;
735 	  expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
736 	  expr->ops.ternary.op = subcode;
737 	  expr->ops.ternary.opnd0 = gimple_assign_rhs1 (stmt);
738 	  expr->ops.ternary.opnd1 = gimple_assign_rhs2 (stmt);
739 	  expr->ops.ternary.opnd2 = gimple_assign_rhs3 (stmt);
740 	  break;
741         default:
742           gcc_unreachable ();
743         }
744     }
745   else if (code == GIMPLE_COND)
746     {
747       expr->type = boolean_type_node;
748       expr->kind = EXPR_BINARY;
749       expr->ops.binary.op = gimple_cond_code (stmt);
750       expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
751       expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
752     }
753   else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
754     {
755       size_t nargs = gimple_call_num_args (call_stmt);
756       size_t i;
757 
758       gcc_assert (gimple_call_lhs (call_stmt));
759 
760       expr->type = TREE_TYPE (gimple_call_lhs (call_stmt));
761       expr->kind = EXPR_CALL;
762       expr->ops.call.fn_from = call_stmt;
763 
764       if (gimple_call_flags (call_stmt) & (ECF_CONST | ECF_PURE))
765         expr->ops.call.pure = true;
766       else
767         expr->ops.call.pure = false;
768 
769       expr->ops.call.nargs = nargs;
770       expr->ops.call.args = XCNEWVEC (tree, nargs);
771       for (i = 0; i < nargs; i++)
772         expr->ops.call.args[i] = gimple_call_arg (call_stmt, i);
773     }
774   else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
775     {
776       expr->type = TREE_TYPE (gimple_switch_index (swtch_stmt));
777       expr->kind = EXPR_SINGLE;
778       expr->ops.single.rhs = gimple_switch_index (swtch_stmt);
779     }
780   else if (code == GIMPLE_GOTO)
781     {
782       expr->type = TREE_TYPE (gimple_goto_dest (stmt));
783       expr->kind = EXPR_SINGLE;
784       expr->ops.single.rhs = gimple_goto_dest (stmt);
785     }
786   else if (code == GIMPLE_PHI)
787     {
788       size_t nargs = gimple_phi_num_args (stmt);
789       size_t i;
790 
791       expr->type = TREE_TYPE (gimple_phi_result (stmt));
792       expr->kind = EXPR_PHI;
793       expr->ops.phi.nargs = nargs;
794       expr->ops.phi.args = XCNEWVEC (tree, nargs);
795       for (i = 0; i < nargs; i++)
796         expr->ops.phi.args[i] = gimple_phi_arg_def (stmt, i);
797     }
798   else
799     gcc_unreachable ();
800 
801   m_lhs = orig_lhs;
802   m_vop = gimple_vuse (stmt);
803   m_hash = avail_expr_hash (this);
804   m_stamp = this;
805 }
806 
807 /* Given a hashable_expr expression ORIG and an ORIG_LHS,
808    construct a hash table element.  */
809 
810 expr_hash_elt::expr_hash_elt (struct hashable_expr *orig, tree orig_lhs)
811 {
812   m_expr = *orig;
813   m_lhs = orig_lhs;
814   m_vop = NULL_TREE;
815   m_hash = avail_expr_hash (this);
816   m_stamp = this;
817 }
818 
819 /* Copy constructor for a hash table element.  */
820 
821 expr_hash_elt::expr_hash_elt (class expr_hash_elt &old_elt)
822 {
823   m_expr = old_elt.m_expr;
824   m_lhs = old_elt.m_lhs;
825   m_vop = old_elt.m_vop;
826   m_hash = old_elt.m_hash;
827   m_stamp = this;
828 
829   /* Now deep copy the malloc'd space for CALL and PHI args.  */
830   if (old_elt.m_expr.kind == EXPR_CALL)
831     {
832       size_t nargs = old_elt.m_expr.ops.call.nargs;
833       size_t i;
834 
835       m_expr.ops.call.args = XCNEWVEC (tree, nargs);
836       for (i = 0; i < nargs; i++)
837         m_expr.ops.call.args[i] = old_elt.m_expr.ops.call.args[i];
838     }
839   else if (old_elt.m_expr.kind == EXPR_PHI)
840     {
841       size_t nargs = old_elt.m_expr.ops.phi.nargs;
842       size_t i;
843 
844       m_expr.ops.phi.args = XCNEWVEC (tree, nargs);
845       for (i = 0; i < nargs; i++)
846         m_expr.ops.phi.args[i] = old_elt.m_expr.ops.phi.args[i];
847     }
848 }
849 
850 /* Calls and PHIs have a variable number of arguments that are allocated
851    on the heap.  Thus we have to have a special dtor to release them.  */
852 
853 expr_hash_elt::~expr_hash_elt ()
854 {
855   if (m_expr.kind == EXPR_CALL)
856     free (m_expr.ops.call.args);
857   else if (m_expr.kind == EXPR_PHI)
858     free (m_expr.ops.phi.args);
859 }
860 
861 /* Print a diagnostic dump of an expression hash table entry.  */
862 
863 void
864 expr_hash_elt::print (FILE *stream)
865 {
866   fprintf (stream, "STMT ");
867 
868   if (m_lhs)
869     {
870       print_generic_expr (stream, m_lhs);
871       fprintf (stream, " = ");
872     }
873 
874   switch (m_expr.kind)
875     {
876       case EXPR_SINGLE:
877 	print_generic_expr (stream, m_expr.ops.single.rhs);
878 	break;
879 
880       case EXPR_UNARY:
881 	fprintf (stream, "%s ", get_tree_code_name (m_expr.ops.unary.op));
882 	print_generic_expr (stream, m_expr.ops.unary.opnd);
883 	break;
884 
885       case EXPR_BINARY:
886 	print_generic_expr (stream, m_expr.ops.binary.opnd0);
887 	fprintf (stream, " %s ", get_tree_code_name (m_expr.ops.binary.op));
888 	print_generic_expr (stream, m_expr.ops.binary.opnd1);
889 	break;
890 
891       case EXPR_TERNARY:
892 	fprintf (stream, " %s <", get_tree_code_name (m_expr.ops.ternary.op));
893 	print_generic_expr (stream, m_expr.ops.ternary.opnd0);
894 	fputs (", ", stream);
895 	print_generic_expr (stream, m_expr.ops.ternary.opnd1);
896 	fputs (", ", stream);
897 	print_generic_expr (stream, m_expr.ops.ternary.opnd2);
898 	fputs (">", stream);
899 	break;
900 
901       case EXPR_CALL:
902         {
903           size_t i;
904           size_t nargs = m_expr.ops.call.nargs;
905           gcall *fn_from;
906 
907           fn_from = m_expr.ops.call.fn_from;
908           if (gimple_call_internal_p (fn_from))
909             fputs (internal_fn_name (gimple_call_internal_fn (fn_from)),
910                    stream);
911           else
912 	    print_generic_expr (stream, gimple_call_fn (fn_from));
913           fprintf (stream, " (");
914           for (i = 0; i < nargs; i++)
915             {
916 	      print_generic_expr (stream, m_expr.ops.call.args[i]);
917               if (i + 1 < nargs)
918                 fprintf (stream, ", ");
919             }
920           fprintf (stream, ")");
921         }
922         break;
923 
924       case EXPR_PHI:
925         {
926           size_t i;
927           size_t nargs = m_expr.ops.phi.nargs;
928 
929           fprintf (stream, "PHI <");
930           for (i = 0; i < nargs; i++)
931             {
932 	      print_generic_expr (stream, m_expr.ops.phi.args[i]);
933               if (i + 1 < nargs)
934                 fprintf (stream, ", ");
935             }
936           fprintf (stream, ">");
937         }
938         break;
939     }
940 
941   if (m_vop)
942     {
943       fprintf (stream, " with ");
944       print_generic_expr (stream, m_vop);
945     }
946 
947   fprintf (stream, "\n");
948 }
949 
950 /* Pop entries off the stack until we hit the NULL marker.
951    For each entry popped, use the SRC/DEST pair to restore
952    SRC to its prior value.  */
953 
954 void
955 const_and_copies::pop_to_marker (void)
956 {
957   while (m_stack.length () > 0)
958     {
959       tree prev_value, dest;
960 
961       dest = m_stack.pop ();
962 
963       /* A NULL value indicates we should stop unwinding, otherwise
964 	 pop off the next entry as they're recorded in pairs.  */
965       if (dest == NULL)
966 	break;
967 
968       if (dump_file && (dump_flags & TDF_DETAILS))
969 	{
970 	  fprintf (dump_file, "<<<< COPY ");
971 	  print_generic_expr (dump_file, dest);
972 	  fprintf (dump_file, " = ");
973 	  print_generic_expr (dump_file, SSA_NAME_VALUE (dest));
974 	  fprintf (dump_file, "\n");
975 	}
976 
977       prev_value = m_stack.pop ();
978       set_ssa_name_value (dest, prev_value);
979     }
980 }
981 
982 /* Record that X has the value Y and that X's previous value is PREV_X.
983 
984    This variant does not follow the value chain for Y.  */
985 
986 void
987 const_and_copies::record_const_or_copy_raw (tree x, tree y, tree prev_x)
988 {
989   if (dump_file && (dump_flags & TDF_DETAILS))
990     {
991       fprintf (dump_file, "0>>> COPY ");
992       print_generic_expr (dump_file, x);
993       fprintf (dump_file, " = ");
994       print_generic_expr (dump_file, y);
995       fprintf (dump_file, "\n");
996     }
997 
998   set_ssa_name_value (x, y);
999   m_stack.reserve (2);
1000   m_stack.quick_push (prev_x);
1001   m_stack.quick_push (x);
1002 }
1003 
1004 /* Record that X has the value Y.  */
1005 
1006 void
1007 const_and_copies::record_const_or_copy (tree x, tree y)
1008 {
1009   record_const_or_copy (x, y, SSA_NAME_VALUE (x));
1010 }
1011 
1012 /* Record that X has the value Y and that X's previous value is PREV_X.
1013 
1014    This variant follow's Y value chain.  */
1015 
1016 void
1017 const_and_copies::record_const_or_copy (tree x, tree y, tree prev_x)
1018 {
1019   /* Y may be NULL if we are invalidating entries in the table.  */
1020   if (y && TREE_CODE (y) == SSA_NAME)
1021     {
1022       tree tmp = SSA_NAME_VALUE (y);
1023       y = tmp ? tmp : y;
1024     }
1025 
1026   record_const_or_copy_raw (x, y, prev_x);
1027 }
1028 
1029 bool
1030 expr_elt_hasher::equal (const value_type &p1, const compare_type &p2)
1031 {
1032   const struct hashable_expr *expr1 = p1->expr ();
1033   const struct expr_hash_elt *stamp1 = p1->stamp ();
1034   const struct hashable_expr *expr2 = p2->expr ();
1035   const struct expr_hash_elt *stamp2 = p2->stamp ();
1036 
1037   /* This case should apply only when removing entries from the table.  */
1038   if (stamp1 == stamp2)
1039     return true;
1040 
1041   if (p1->hash () != p2->hash ())
1042     return false;
1043 
1044   /* In case of a collision, both RHS have to be identical and have the
1045      same VUSE operands.  */
1046   if (hashable_expr_equal_p (expr1, expr2)
1047       && types_compatible_p (expr1->type, expr2->type))
1048     return true;
1049 
1050   return false;
1051 }
1052 
1053 /* Given a conditional expression COND as a tree, initialize
1054    a hashable_expr expression EXPR.  The conditional must be a
1055    comparison or logical negation.  A constant or a variable is
1056    not permitted.  */
1057 
1058 void
1059 initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
1060 {
1061   expr->type = boolean_type_node;
1062 
1063   if (COMPARISON_CLASS_P (cond))
1064     {
1065       expr->kind = EXPR_BINARY;
1066       expr->ops.binary.op = TREE_CODE (cond);
1067       expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
1068       expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
1069     }
1070   else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
1071     {
1072       expr->kind = EXPR_UNARY;
1073       expr->ops.unary.op = TRUTH_NOT_EXPR;
1074       expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
1075     }
1076   else
1077     gcc_unreachable ();
1078 }
1079 
1080 /* Build a cond_equivalence record indicating that the comparison
1081    CODE holds between operands OP0 and OP1 and push it to **P.  */
1082 
1083 static void
1084 build_and_record_new_cond (enum tree_code code,
1085                            tree op0, tree op1,
1086                            vec<cond_equivalence> *p,
1087 			   bool val = true)
1088 {
1089   cond_equivalence c;
1090   struct hashable_expr *cond = &c.cond;
1091 
1092   gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
1093 
1094   cond->type = boolean_type_node;
1095   cond->kind = EXPR_BINARY;
1096   cond->ops.binary.op = code;
1097   cond->ops.binary.opnd0 = op0;
1098   cond->ops.binary.opnd1 = op1;
1099 
1100   c.value = val ? boolean_true_node : boolean_false_node;
1101   p->safe_push (c);
1102 }
1103 
1104 /* Record that COND is true and INVERTED is false into the edge information
1105    structure.  Also record that any conditions dominated by COND are true
1106    as well.
1107 
1108    For example, if a < b is true, then a <= b must also be true.  */
1109 
1110 void
1111 record_conditions (vec<cond_equivalence> *p, tree cond, tree inverted)
1112 {
1113   tree op0, op1;
1114   cond_equivalence c;
1115 
1116   if (!COMPARISON_CLASS_P (cond))
1117     return;
1118 
1119   op0 = TREE_OPERAND (cond, 0);
1120   op1 = TREE_OPERAND (cond, 1);
1121 
1122   switch (TREE_CODE (cond))
1123     {
1124     case LT_EXPR:
1125     case GT_EXPR:
1126       if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1127 	{
1128 	  build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1129 	  build_and_record_new_cond (LTGT_EXPR, op0, op1, p);
1130 	}
1131 
1132       build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
1133 				  ? LE_EXPR : GE_EXPR),
1134 				 op0, op1, p);
1135       build_and_record_new_cond (NE_EXPR, op0, op1, p);
1136       build_and_record_new_cond (EQ_EXPR, op0, op1, p, false);
1137       break;
1138 
1139     case GE_EXPR:
1140     case LE_EXPR:
1141       if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1142 	{
1143 	  build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1144 	}
1145       break;
1146 
1147     case EQ_EXPR:
1148       if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1149 	{
1150 	  build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1151 	}
1152       build_and_record_new_cond (LE_EXPR, op0, op1, p);
1153       build_and_record_new_cond (GE_EXPR, op0, op1, p);
1154       break;
1155 
1156     case UNORDERED_EXPR:
1157       build_and_record_new_cond (NE_EXPR, op0, op1, p);
1158       build_and_record_new_cond (UNLE_EXPR, op0, op1, p);
1159       build_and_record_new_cond (UNGE_EXPR, op0, op1, p);
1160       build_and_record_new_cond (UNEQ_EXPR, op0, op1, p);
1161       build_and_record_new_cond (UNLT_EXPR, op0, op1, p);
1162       build_and_record_new_cond (UNGT_EXPR, op0, op1, p);
1163       break;
1164 
1165     case UNLT_EXPR:
1166     case UNGT_EXPR:
1167       build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1168 				  ? UNLE_EXPR : UNGE_EXPR),
1169 				 op0, op1, p);
1170       build_and_record_new_cond (NE_EXPR, op0, op1, p);
1171       break;
1172 
1173     case UNEQ_EXPR:
1174       build_and_record_new_cond (UNLE_EXPR, op0, op1, p);
1175       build_and_record_new_cond (UNGE_EXPR, op0, op1, p);
1176       break;
1177 
1178     case LTGT_EXPR:
1179       build_and_record_new_cond (NE_EXPR, op0, op1, p);
1180       build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1181       break;
1182 
1183     default:
1184       break;
1185     }
1186 
1187   /* Now store the original true and false conditions into the first
1188      two slots.  */
1189   initialize_expr_from_cond (cond, &c.cond);
1190   c.value = boolean_true_node;
1191   p->safe_push (c);
1192 
1193   /* It is possible for INVERTED to be the negation of a comparison,
1194      and not a valid RHS or GIMPLE_COND condition.  This happens because
1195      invert_truthvalue may return such an expression when asked to invert
1196      a floating-point comparison.  These comparisons are not assumed to
1197      obey the trichotomy law.  */
1198   initialize_expr_from_cond (inverted, &c.cond);
1199   c.value = boolean_false_node;
1200   p->safe_push (c);
1201 }
1202