1 /* Header file for SSA dominator optimizations.
2    Copyright (C) 2013-2016 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14  for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "function.h"
24 #include "basic-block.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "tree-pretty-print.h"
29 #include "tree-ssa-scopedtables.h"
30 #include "tree-ssa-threadedge.h"
31 #include "stor-layout.h"
32 #include "fold-const.h"
33 #include "tree-eh.h"
34 #include "internal-fn.h"
35 #include "tree-dfa.h"
36 
37 static bool hashable_expr_equal_p (const struct hashable_expr *,
38 				   const struct hashable_expr *);
39 
40 /* Initialize local stacks for this optimizer and record equivalences
41    upon entry to BB.  Equivalences can come from the edge traversed to
42    reach BB or they may come from PHI nodes at the start of BB.  */
43 
44 /* Pop items off the unwinding stack, removing each from the hash table
45    until a marker is encountered.  */
46 
47 void
pop_to_marker()48 avail_exprs_stack::pop_to_marker ()
49 {
50   /* Remove all the expressions made available in this block.  */
51   while (m_stack.length () > 0)
52     {
53       std::pair<expr_hash_elt_t, expr_hash_elt_t> victim = m_stack.pop ();
54       expr_hash_elt **slot;
55 
56       if (victim.first == NULL)
57 	break;
58 
59       /* This must precede the actual removal from the hash table,
60          as ELEMENT and the table entry may share a call argument
61          vector which will be freed during removal.  */
62       if (dump_file && (dump_flags & TDF_DETAILS))
63         {
64           fprintf (dump_file, "<<<< ");
65 	  victim.first->print (dump_file);
66         }
67 
68       slot = m_avail_exprs->find_slot (victim.first, NO_INSERT);
69       gcc_assert (slot && *slot == victim.first);
70       if (victim.second != NULL)
71 	{
72 	  delete *slot;
73 	  *slot = victim.second;
74 	}
75       else
76 	m_avail_exprs->clear_slot (slot);
77     }
78 }
79 
80 /* Add <ELT1,ELT2> to the unwinding stack so they can be later removed
81    from the hash table.  */
82 
83 void
record_expr(class expr_hash_elt * elt1,class expr_hash_elt * elt2,char type)84 avail_exprs_stack::record_expr (class expr_hash_elt *elt1,
85 				class expr_hash_elt *elt2,
86 				char type)
87 {
88   if (elt1 && dump_file && (dump_flags & TDF_DETAILS))
89     {
90       fprintf (dump_file, "%c>>> ", type);
91       elt1->print (dump_file);
92     }
93 
94   m_stack.safe_push (std::pair<expr_hash_elt_t, expr_hash_elt_t> (elt1, elt2));
95 }
96 
97 /* Generate a hash value for a pair of expressions.  This can be used
98    iteratively by passing a previous result in HSTATE.
99 
100    The same hash value is always returned for a given pair of expressions,
101    regardless of the order in which they are presented.  This is useful in
102    hashing the operands of commutative functions.  */
103 
104 namespace inchash
105 {
106 
107 static void
add_expr_commutative(const_tree t1,const_tree t2,hash & hstate)108 add_expr_commutative (const_tree t1, const_tree t2, hash &hstate)
109 {
110   hash one, two;
111 
112   inchash::add_expr (t1, one);
113   inchash::add_expr (t2, two);
114   hstate.add_commutative (one, two);
115 }
116 
117 /* Compute a hash value for a hashable_expr value EXPR and a
118    previously accumulated hash value VAL.  If two hashable_expr
119    values compare equal with hashable_expr_equal_p, they must
120    hash to the same value, given an identical value of VAL.
121    The logic is intended to follow inchash::add_expr in tree.c.  */
122 
123 static void
add_hashable_expr(const struct hashable_expr * expr,hash & hstate)124 add_hashable_expr (const struct hashable_expr *expr, hash &hstate)
125 {
126   switch (expr->kind)
127     {
128     case EXPR_SINGLE:
129       inchash::add_expr (expr->ops.single.rhs, hstate);
130       break;
131 
132     case EXPR_UNARY:
133       hstate.add_object (expr->ops.unary.op);
134 
135       /* Make sure to include signedness in the hash computation.
136          Don't hash the type, that can lead to having nodes which
137          compare equal according to operand_equal_p, but which
138          have different hash codes.  */
139       if (CONVERT_EXPR_CODE_P (expr->ops.unary.op)
140           || expr->ops.unary.op == NON_LVALUE_EXPR)
141         hstate.add_int (TYPE_UNSIGNED (expr->type));
142 
143       inchash::add_expr (expr->ops.unary.opnd, hstate);
144       break;
145 
146     case EXPR_BINARY:
147       hstate.add_object (expr->ops.binary.op);
148       if (commutative_tree_code (expr->ops.binary.op))
149 	inchash::add_expr_commutative (expr->ops.binary.opnd0,
150 					  expr->ops.binary.opnd1, hstate);
151       else
152         {
153           inchash::add_expr (expr->ops.binary.opnd0, hstate);
154           inchash::add_expr (expr->ops.binary.opnd1, hstate);
155         }
156       break;
157 
158     case EXPR_TERNARY:
159       hstate.add_object (expr->ops.ternary.op);
160       if (commutative_ternary_tree_code (expr->ops.ternary.op))
161 	inchash::add_expr_commutative (expr->ops.ternary.opnd0,
162 					  expr->ops.ternary.opnd1, hstate);
163       else
164         {
165           inchash::add_expr (expr->ops.ternary.opnd0, hstate);
166           inchash::add_expr (expr->ops.ternary.opnd1, hstate);
167         }
168       inchash::add_expr (expr->ops.ternary.opnd2, hstate);
169       break;
170 
171     case EXPR_CALL:
172       {
173         size_t i;
174         enum tree_code code = CALL_EXPR;
175         gcall *fn_from;
176 
177         hstate.add_object (code);
178         fn_from = expr->ops.call.fn_from;
179         if (gimple_call_internal_p (fn_from))
180           hstate.merge_hash ((hashval_t) gimple_call_internal_fn (fn_from));
181         else
182           inchash::add_expr (gimple_call_fn (fn_from), hstate);
183         for (i = 0; i < expr->ops.call.nargs; i++)
184           inchash::add_expr (expr->ops.call.args[i], hstate);
185       }
186       break;
187 
188     case EXPR_PHI:
189       {
190         size_t i;
191 
192         for (i = 0; i < expr->ops.phi.nargs; i++)
193           inchash::add_expr (expr->ops.phi.args[i], hstate);
194       }
195       break;
196 
197     default:
198       gcc_unreachable ();
199     }
200 }
201 
202 }
203 
204 /* Hashing and equality functions.  We compute a value number for expressions
205    using the code of the expression and the SSA numbers of its operands.  */
206 
207 static hashval_t
avail_expr_hash(class expr_hash_elt * p)208 avail_expr_hash (class expr_hash_elt *p)
209 {
210   const struct hashable_expr *expr = p->expr ();
211   inchash::hash hstate;
212 
213   if (expr->kind == EXPR_SINGLE)
214     {
215       /* T could potentially be a switch index or a goto dest.  */
216       tree t = expr->ops.single.rhs;
217       if (TREE_CODE (t) == MEM_REF || handled_component_p (t))
218 	{
219 	  /* Make equivalent statements of both these kinds hash together.
220 	     Dealing with both MEM_REF and ARRAY_REF allows us not to care
221 	     about equivalence with other statements not considered here.  */
222 	  bool reverse;
223 	  HOST_WIDE_INT offset, size, max_size;
224 	  tree base = get_ref_base_and_extent (t, &offset, &size, &max_size,
225 					       &reverse);
226 	  /* Strictly, we could try to normalize variable-sized accesses too,
227 	    but here we just deal with the common case.  */
228 	  if (size != -1
229 	      && size == max_size)
230 	    {
231 	      enum tree_code code = MEM_REF;
232 	      hstate.add_object (code);
233 	      inchash::add_expr (base, hstate);
234 	      hstate.add_object (offset);
235 	      hstate.add_object (size);
236 	      return hstate.end ();
237 	    }
238 	}
239     }
240 
241   inchash::add_hashable_expr (expr, hstate);
242 
243   return hstate.end ();
244 }
245 
246 /* Compares trees T0 and T1 to see if they are MEM_REF or ARRAY_REFs equivalent
247    to each other.  (That is, they return the value of the same bit of memory.)
248 
249    Return TRUE if the two are so equivalent; FALSE if not (which could still
250    mean the two are equivalent by other means).  */
251 
252 static bool
equal_mem_array_ref_p(tree t0,tree t1)253 equal_mem_array_ref_p (tree t0, tree t1)
254 {
255   if (TREE_CODE (t0) != MEM_REF && ! handled_component_p (t0))
256     return false;
257   if (TREE_CODE (t1) != MEM_REF && ! handled_component_p (t1))
258     return false;
259 
260   if (!types_compatible_p (TREE_TYPE (t0), TREE_TYPE (t1)))
261     return false;
262   bool rev0;
263   HOST_WIDE_INT off0, sz0, max0;
264   tree base0 = get_ref_base_and_extent (t0, &off0, &sz0, &max0, &rev0);
265   if (sz0 == -1
266       || sz0 != max0)
267     return false;
268 
269   bool rev1;
270   HOST_WIDE_INT off1, sz1, max1;
271   tree base1 = get_ref_base_and_extent (t1, &off1, &sz1, &max1, &rev1);
272   if (sz1 == -1
273       || sz1 != max1)
274     return false;
275 
276   if (rev0 != rev1)
277     return false;
278 
279   /* Types were compatible, so this is a sanity check.  */
280   gcc_assert (sz0 == sz1);
281 
282   return (off0 == off1) && operand_equal_p (base0, base1, 0);
283 }
284 
285 /* Compare two hashable_expr structures for equivalence.  They are
286    considered equivalent when the expressions they denote must
287    necessarily be equal.  The logic is intended to follow that of
288    operand_equal_p in fold-const.c */
289 
290 static bool
hashable_expr_equal_p(const struct hashable_expr * expr0,const struct hashable_expr * expr1)291 hashable_expr_equal_p (const struct hashable_expr *expr0,
292 		       const struct hashable_expr *expr1)
293 {
294   tree type0 = expr0->type;
295   tree type1 = expr1->type;
296 
297   /* If either type is NULL, there is nothing to check.  */
298   if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
299     return false;
300 
301   /* If both types don't have the same signedness, precision, and mode,
302      then we can't consider  them equal.  */
303   if (type0 != type1
304       && (TREE_CODE (type0) == ERROR_MARK
305 	  || TREE_CODE (type1) == ERROR_MARK
306 	  || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
307 	  || TYPE_PRECISION (type0) != TYPE_PRECISION (type1)
308 	  || TYPE_MODE (type0) != TYPE_MODE (type1)))
309     return false;
310 
311   if (expr0->kind != expr1->kind)
312     return false;
313 
314   switch (expr0->kind)
315     {
316     case EXPR_SINGLE:
317       return equal_mem_array_ref_p (expr0->ops.single.rhs,
318 				    expr1->ops.single.rhs)
319 	     || operand_equal_p (expr0->ops.single.rhs,
320 				 expr1->ops.single.rhs, 0);
321     case EXPR_UNARY:
322       if (expr0->ops.unary.op != expr1->ops.unary.op)
323         return false;
324 
325       if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
326            || expr0->ops.unary.op == NON_LVALUE_EXPR)
327           && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
328         return false;
329 
330       return operand_equal_p (expr0->ops.unary.opnd,
331                               expr1->ops.unary.opnd, 0);
332 
333     case EXPR_BINARY:
334       if (expr0->ops.binary.op != expr1->ops.binary.op)
335 	return false;
336 
337       if (operand_equal_p (expr0->ops.binary.opnd0,
338 			   expr1->ops.binary.opnd0, 0)
339 	  && operand_equal_p (expr0->ops.binary.opnd1,
340 			      expr1->ops.binary.opnd1, 0))
341 	return true;
342 
343       /* For commutative ops, allow the other order.  */
344       return (commutative_tree_code (expr0->ops.binary.op)
345 	      && operand_equal_p (expr0->ops.binary.opnd0,
346 				  expr1->ops.binary.opnd1, 0)
347 	      && operand_equal_p (expr0->ops.binary.opnd1,
348 				  expr1->ops.binary.opnd0, 0));
349 
350     case EXPR_TERNARY:
351       if (expr0->ops.ternary.op != expr1->ops.ternary.op
352 	  || !operand_equal_p (expr0->ops.ternary.opnd2,
353 			       expr1->ops.ternary.opnd2, 0))
354 	return false;
355 
356       if (operand_equal_p (expr0->ops.ternary.opnd0,
357 			   expr1->ops.ternary.opnd0, 0)
358 	  && operand_equal_p (expr0->ops.ternary.opnd1,
359 			      expr1->ops.ternary.opnd1, 0))
360 	return true;
361 
362       /* For commutative ops, allow the other order.  */
363       return (commutative_ternary_tree_code (expr0->ops.ternary.op)
364 	      && operand_equal_p (expr0->ops.ternary.opnd0,
365 				  expr1->ops.ternary.opnd1, 0)
366 	      && operand_equal_p (expr0->ops.ternary.opnd1,
367 				  expr1->ops.ternary.opnd0, 0));
368 
369     case EXPR_CALL:
370       {
371         size_t i;
372 
373         /* If the calls are to different functions, then they
374            clearly cannot be equal.  */
375         if (!gimple_call_same_target_p (expr0->ops.call.fn_from,
376                                         expr1->ops.call.fn_from))
377           return false;
378 
379         if (! expr0->ops.call.pure)
380           return false;
381 
382         if (expr0->ops.call.nargs !=  expr1->ops.call.nargs)
383           return false;
384 
385         for (i = 0; i < expr0->ops.call.nargs; i++)
386           if (! operand_equal_p (expr0->ops.call.args[i],
387                                  expr1->ops.call.args[i], 0))
388             return false;
389 
390 	if (stmt_could_throw_p (expr0->ops.call.fn_from))
391 	  {
392 	    int lp0 = lookup_stmt_eh_lp (expr0->ops.call.fn_from);
393 	    int lp1 = lookup_stmt_eh_lp (expr1->ops.call.fn_from);
394 	    if ((lp0 > 0 || lp1 > 0) && lp0 != lp1)
395 	      return false;
396 	  }
397 
398         return true;
399       }
400 
401     case EXPR_PHI:
402       {
403         size_t i;
404 
405         if (expr0->ops.phi.nargs !=  expr1->ops.phi.nargs)
406           return false;
407 
408         for (i = 0; i < expr0->ops.phi.nargs; i++)
409           if (! operand_equal_p (expr0->ops.phi.args[i],
410                                  expr1->ops.phi.args[i], 0))
411             return false;
412 
413         return true;
414       }
415 
416     default:
417       gcc_unreachable ();
418     }
419 }
420 
421 /* Given a statement STMT, construct a hash table element.  */
422 
expr_hash_elt(gimple * stmt,tree orig_lhs)423 expr_hash_elt::expr_hash_elt (gimple *stmt, tree orig_lhs)
424 {
425   enum gimple_code code = gimple_code (stmt);
426   struct hashable_expr *expr = this->expr ();
427 
428   if (code == GIMPLE_ASSIGN)
429     {
430       enum tree_code subcode = gimple_assign_rhs_code (stmt);
431 
432       switch (get_gimple_rhs_class (subcode))
433         {
434         case GIMPLE_SINGLE_RHS:
435 	  expr->kind = EXPR_SINGLE;
436 	  expr->type = TREE_TYPE (gimple_assign_rhs1 (stmt));
437 	  expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
438 	  break;
439         case GIMPLE_UNARY_RHS:
440 	  expr->kind = EXPR_UNARY;
441 	  expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
442 	  if (CONVERT_EXPR_CODE_P (subcode))
443 	    subcode = NOP_EXPR;
444 	  expr->ops.unary.op = subcode;
445 	  expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
446 	  break;
447         case GIMPLE_BINARY_RHS:
448 	  expr->kind = EXPR_BINARY;
449 	  expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
450 	  expr->ops.binary.op = subcode;
451 	  expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
452 	  expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
453 	  break;
454         case GIMPLE_TERNARY_RHS:
455 	  expr->kind = EXPR_TERNARY;
456 	  expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
457 	  expr->ops.ternary.op = subcode;
458 	  expr->ops.ternary.opnd0 = gimple_assign_rhs1 (stmt);
459 	  expr->ops.ternary.opnd1 = gimple_assign_rhs2 (stmt);
460 	  expr->ops.ternary.opnd2 = gimple_assign_rhs3 (stmt);
461 	  break;
462         default:
463           gcc_unreachable ();
464         }
465     }
466   else if (code == GIMPLE_COND)
467     {
468       expr->type = boolean_type_node;
469       expr->kind = EXPR_BINARY;
470       expr->ops.binary.op = gimple_cond_code (stmt);
471       expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
472       expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
473     }
474   else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
475     {
476       size_t nargs = gimple_call_num_args (call_stmt);
477       size_t i;
478 
479       gcc_assert (gimple_call_lhs (call_stmt));
480 
481       expr->type = TREE_TYPE (gimple_call_lhs (call_stmt));
482       expr->kind = EXPR_CALL;
483       expr->ops.call.fn_from = call_stmt;
484 
485       if (gimple_call_flags (call_stmt) & (ECF_CONST | ECF_PURE))
486         expr->ops.call.pure = true;
487       else
488         expr->ops.call.pure = false;
489 
490       expr->ops.call.nargs = nargs;
491       expr->ops.call.args = XCNEWVEC (tree, nargs);
492       for (i = 0; i < nargs; i++)
493         expr->ops.call.args[i] = gimple_call_arg (call_stmt, i);
494     }
495   else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
496     {
497       expr->type = TREE_TYPE (gimple_switch_index (swtch_stmt));
498       expr->kind = EXPR_SINGLE;
499       expr->ops.single.rhs = gimple_switch_index (swtch_stmt);
500     }
501   else if (code == GIMPLE_GOTO)
502     {
503       expr->type = TREE_TYPE (gimple_goto_dest (stmt));
504       expr->kind = EXPR_SINGLE;
505       expr->ops.single.rhs = gimple_goto_dest (stmt);
506     }
507   else if (code == GIMPLE_PHI)
508     {
509       size_t nargs = gimple_phi_num_args (stmt);
510       size_t i;
511 
512       expr->type = TREE_TYPE (gimple_phi_result (stmt));
513       expr->kind = EXPR_PHI;
514       expr->ops.phi.nargs = nargs;
515       expr->ops.phi.args = XCNEWVEC (tree, nargs);
516       for (i = 0; i < nargs; i++)
517         expr->ops.phi.args[i] = gimple_phi_arg_def (stmt, i);
518     }
519   else
520     gcc_unreachable ();
521 
522   m_lhs = orig_lhs;
523   m_vop = gimple_vuse (stmt);
524   m_hash = avail_expr_hash (this);
525   m_stamp = this;
526 }
527 
528 /* Given a hashable_expr expression ORIG and an ORIG_LHS,
529    construct a hash table element.  */
530 
expr_hash_elt(struct hashable_expr * orig,tree orig_lhs)531 expr_hash_elt::expr_hash_elt (struct hashable_expr *orig, tree orig_lhs)
532 {
533   m_expr = *orig;
534   m_lhs = orig_lhs;
535   m_vop = NULL_TREE;
536   m_hash = avail_expr_hash (this);
537   m_stamp = this;
538 }
539 
540 /* Copy constructor for a hash table element.  */
541 
expr_hash_elt(class expr_hash_elt & old_elt)542 expr_hash_elt::expr_hash_elt (class expr_hash_elt &old_elt)
543 {
544   m_expr = old_elt.m_expr;
545   m_lhs = old_elt.m_lhs;
546   m_vop = old_elt.m_vop;
547   m_hash = old_elt.m_hash;
548   m_stamp = this;
549 
550   /* Now deep copy the malloc'd space for CALL and PHI args.  */
551   if (old_elt.m_expr.kind == EXPR_CALL)
552     {
553       size_t nargs = old_elt.m_expr.ops.call.nargs;
554       size_t i;
555 
556       m_expr.ops.call.args = XCNEWVEC (tree, nargs);
557       for (i = 0; i < nargs; i++)
558         m_expr.ops.call.args[i] = old_elt.m_expr.ops.call.args[i];
559     }
560   else if (old_elt.m_expr.kind == EXPR_PHI)
561     {
562       size_t nargs = old_elt.m_expr.ops.phi.nargs;
563       size_t i;
564 
565       m_expr.ops.phi.args = XCNEWVEC (tree, nargs);
566       for (i = 0; i < nargs; i++)
567         m_expr.ops.phi.args[i] = old_elt.m_expr.ops.phi.args[i];
568     }
569 }
570 
571 /* Calls and PHIs have a variable number of arguments that are allocated
572    on the heap.  Thus we have to have a special dtor to release them.  */
573 
~expr_hash_elt()574 expr_hash_elt::~expr_hash_elt ()
575 {
576   if (m_expr.kind == EXPR_CALL)
577     free (m_expr.ops.call.args);
578   else if (m_expr.kind == EXPR_PHI)
579     free (m_expr.ops.phi.args);
580 }
581 
582 /* Print a diagnostic dump of an expression hash table entry.  */
583 
584 void
print(FILE * stream)585 expr_hash_elt::print (FILE *stream)
586 {
587   fprintf (stream, "STMT ");
588 
589   if (m_lhs)
590     {
591       print_generic_expr (stream, m_lhs, 0);
592       fprintf (stream, " = ");
593     }
594 
595   switch (m_expr.kind)
596     {
597       case EXPR_SINGLE:
598         print_generic_expr (stream, m_expr.ops.single.rhs, 0);
599         break;
600 
601       case EXPR_UNARY:
602 	fprintf (stream, "%s ", get_tree_code_name (m_expr.ops.unary.op));
603         print_generic_expr (stream, m_expr.ops.unary.opnd, 0);
604         break;
605 
606       case EXPR_BINARY:
607         print_generic_expr (stream, m_expr.ops.binary.opnd0, 0);
608 	fprintf (stream, " %s ", get_tree_code_name (m_expr.ops.binary.op));
609         print_generic_expr (stream, m_expr.ops.binary.opnd1, 0);
610         break;
611 
612       case EXPR_TERNARY:
613 	fprintf (stream, " %s <", get_tree_code_name (m_expr.ops.ternary.op));
614         print_generic_expr (stream, m_expr.ops.ternary.opnd0, 0);
615 	fputs (", ", stream);
616         print_generic_expr (stream, m_expr.ops.ternary.opnd1, 0);
617 	fputs (", ", stream);
618         print_generic_expr (stream, m_expr.ops.ternary.opnd2, 0);
619 	fputs (">", stream);
620         break;
621 
622       case EXPR_CALL:
623         {
624           size_t i;
625           size_t nargs = m_expr.ops.call.nargs;
626           gcall *fn_from;
627 
628           fn_from = m_expr.ops.call.fn_from;
629           if (gimple_call_internal_p (fn_from))
630             fputs (internal_fn_name (gimple_call_internal_fn (fn_from)),
631                    stream);
632           else
633             print_generic_expr (stream, gimple_call_fn (fn_from), 0);
634           fprintf (stream, " (");
635           for (i = 0; i < nargs; i++)
636             {
637               print_generic_expr (stream, m_expr.ops.call.args[i], 0);
638               if (i + 1 < nargs)
639                 fprintf (stream, ", ");
640             }
641           fprintf (stream, ")");
642         }
643         break;
644 
645       case EXPR_PHI:
646         {
647           size_t i;
648           size_t nargs = m_expr.ops.phi.nargs;
649 
650           fprintf (stream, "PHI <");
651           for (i = 0; i < nargs; i++)
652             {
653               print_generic_expr (stream, m_expr.ops.phi.args[i], 0);
654               if (i + 1 < nargs)
655                 fprintf (stream, ", ");
656             }
657           fprintf (stream, ">");
658         }
659         break;
660     }
661 
662   if (m_vop)
663     {
664       fprintf (stream, " with ");
665       print_generic_expr (stream, m_vop, 0);
666     }
667 
668   fprintf (stream, "\n");
669 }
670 
671 /* Pop entries off the stack until we hit the NULL marker.
672    For each entry popped, use the SRC/DEST pair to restore
673    SRC to its prior value.  */
674 
675 void
pop_to_marker(void)676 const_and_copies::pop_to_marker (void)
677 {
678   while (m_stack.length () > 0)
679     {
680       tree prev_value, dest;
681 
682       dest = m_stack.pop ();
683 
684       /* A NULL value indicates we should stop unwinding, otherwise
685 	 pop off the next entry as they're recorded in pairs.  */
686       if (dest == NULL)
687 	break;
688 
689       if (dump_file && (dump_flags & TDF_DETAILS))
690 	{
691 	  fprintf (dump_file, "<<<< COPY ");
692 	  print_generic_expr (dump_file, dest, 0);
693 	  fprintf (dump_file, " = ");
694 	  print_generic_expr (dump_file, SSA_NAME_VALUE (dest), 0);
695 	  fprintf (dump_file, "\n");
696 	}
697 
698       prev_value = m_stack.pop ();
699       set_ssa_name_value (dest, prev_value);
700     }
701 }
702 
703 /* Record that X has the value Y and that X's previous value is PREV_X.
704 
705    This variant does not follow the value chain for Y.  */
706 
707 void
record_const_or_copy_raw(tree x,tree y,tree prev_x)708 const_and_copies::record_const_or_copy_raw (tree x, tree y, tree prev_x)
709 {
710   if (dump_file && (dump_flags & TDF_DETAILS))
711     {
712       fprintf (dump_file, "0>>> COPY ");
713       print_generic_expr (dump_file, x, 0);
714       fprintf (dump_file, " = ");
715       print_generic_expr (dump_file, y, 0);
716       fprintf (dump_file, "\n");
717     }
718 
719   set_ssa_name_value (x, y);
720   m_stack.reserve (2);
721   m_stack.quick_push (prev_x);
722   m_stack.quick_push (x);
723 }
724 
725 /* Record that X has the value Y.  */
726 
727 void
record_const_or_copy(tree x,tree y)728 const_and_copies::record_const_or_copy (tree x, tree y)
729 {
730   record_const_or_copy (x, y, SSA_NAME_VALUE (x));
731 }
732 
733 /* Record that X has the value Y and that X's previous value is PREV_X.
734 
735    This variant follow's Y value chain.  */
736 
737 void
record_const_or_copy(tree x,tree y,tree prev_x)738 const_and_copies::record_const_or_copy (tree x, tree y, tree prev_x)
739 {
740   /* Y may be NULL if we are invalidating entries in the table.  */
741   if (y && TREE_CODE (y) == SSA_NAME)
742     {
743       tree tmp = SSA_NAME_VALUE (y);
744       y = tmp ? tmp : y;
745     }
746 
747   record_const_or_copy_raw (x, y, prev_x);
748 }
749 
750 bool
equal(const value_type & p1,const compare_type & p2)751 expr_elt_hasher::equal (const value_type &p1, const compare_type &p2)
752 {
753   const struct hashable_expr *expr1 = p1->expr ();
754   const struct expr_hash_elt *stamp1 = p1->stamp ();
755   const struct hashable_expr *expr2 = p2->expr ();
756   const struct expr_hash_elt *stamp2 = p2->stamp ();
757 
758   /* This case should apply only when removing entries from the table.  */
759   if (stamp1 == stamp2)
760     return true;
761 
762   if (p1->hash () != p2->hash ())
763     return false;
764 
765   /* In case of a collision, both RHS have to be identical and have the
766      same VUSE operands.  */
767   if (hashable_expr_equal_p (expr1, expr2)
768       && types_compatible_p (expr1->type, expr2->type))
769     return true;
770 
771   return false;
772 }
773 
774 /* Given a conditional expression COND as a tree, initialize
775    a hashable_expr expression EXPR.  The conditional must be a
776    comparison or logical negation.  A constant or a variable is
777    not permitted.  */
778 
779 void
initialize_expr_from_cond(tree cond,struct hashable_expr * expr)780 initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
781 {
782   expr->type = boolean_type_node;
783 
784   if (COMPARISON_CLASS_P (cond))
785     {
786       expr->kind = EXPR_BINARY;
787       expr->ops.binary.op = TREE_CODE (cond);
788       expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
789       expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
790     }
791   else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
792     {
793       expr->kind = EXPR_UNARY;
794       expr->ops.unary.op = TRUTH_NOT_EXPR;
795       expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
796     }
797   else
798     gcc_unreachable ();
799 }
800 
801