1 /* Global constant/copy propagation for RTL.
2    Copyright (C) 1997-2021 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "cfghooks.h"
26 #include "df.h"
27 #include "insn-config.h"
28 #include "memmodel.h"
29 #include "emit-rtl.h"
30 #include "recog.h"
31 #include "diagnostic-core.h"
32 #include "toplev.h"
33 #include "cfgrtl.h"
34 #include "cfganal.h"
35 #include "lcm.h"
36 #include "cfgcleanup.h"
37 #include "cselib.h"
38 #include "intl.h"
39 #include "tree-pass.h"
40 #include "dbgcnt.h"
41 #include "cfgloop.h"
42 #include "gcse.h"
43 
44 
45 /* An obstack for our working variables.  */
46 static struct obstack cprop_obstack;
47 
48 /* Occurrence of an expression.
49    There is one per basic block.  If a pattern appears more than once the
50    last appearance is used.  */
51 
52 struct cprop_occr
53 {
54   /* Next occurrence of this expression.  */
55   struct cprop_occr *next;
56   /* The insn that computes the expression.  */
57   rtx_insn *insn;
58 };
59 
60 /* Hash table entry for assignment expressions.  */
61 
62 struct cprop_expr
63 {
64   /* The expression (DEST := SRC).  */
65   rtx dest;
66   rtx src;
67 
68   /* Index in the available expression bitmaps.  */
69   int bitmap_index;
70   /* Next entry with the same hash.  */
71   struct cprop_expr *next_same_hash;
72   /* List of available occurrence in basic blocks in the function.
73      An "available occurrence" is one that is the last occurrence in the
74      basic block and whose operands are not modified by following statements
75      in the basic block [including this insn].  */
76   struct cprop_occr *avail_occr;
77 };
78 
79 /* Hash table for copy propagation expressions.
80    Each hash table is an array of buckets.
81    ??? It is known that if it were an array of entries, structure elements
82    `next_same_hash' and `bitmap_index' wouldn't be necessary.  However, it is
83    not clear whether in the final analysis a sufficient amount of memory would
84    be saved as the size of the available expression bitmaps would be larger
85    [one could build a mapping table without holes afterwards though].
86    Someday I'll perform the computation and figure it out.  */
87 
88 struct hash_table_d
89 {
90   /* The table itself.
91      This is an array of `set_hash_table_size' elements.  */
92   struct cprop_expr **table;
93 
94   /* Size of the hash table, in elements.  */
95   unsigned int size;
96 
97   /* Number of hash table elements.  */
98   unsigned int n_elems;
99 };
100 
101 /* Copy propagation hash table.  */
102 static struct hash_table_d set_hash_table;
103 
104 /* Array of implicit set patterns indexed by basic block index.  */
105 static rtx *implicit_sets;
106 
107 /* Array of indexes of expressions for implicit set patterns indexed by basic
108    block index.  In other words, implicit_set_indexes[i] is the bitmap_index
109    of the expression whose RTX is implicit_sets[i].  */
110 static int *implicit_set_indexes;
111 
112 /* Bitmap containing one bit for each register in the program.
113    Used when performing GCSE to track which registers have been set since
114    the start or end of the basic block while traversing that block.  */
115 static regset reg_set_bitmap;
116 
117 /* Various variables for statistics gathering.  */
118 
119 /* Memory used in a pass.
120    This isn't intended to be absolutely precise.  Its intent is only
121    to keep an eye on memory usage.  */
122 static int bytes_used;
123 
124 /* Number of local constants propagated.  */
125 static int local_const_prop_count;
126 /* Number of local copies propagated.  */
127 static int local_copy_prop_count;
128 /* Number of global constants propagated.  */
129 static int global_const_prop_count;
130 /* Number of global copies propagated.  */
131 static int global_copy_prop_count;
132 
133 #define GOBNEW(T)		((T *) cprop_alloc (sizeof (T)))
134 #define GOBNEWVAR(T, S)		((T *) cprop_alloc ((S)))
135 
136 /* Cover function to obstack_alloc.  */
137 
138 static void *
cprop_alloc(unsigned long size)139 cprop_alloc (unsigned long size)
140 {
141   bytes_used += size;
142   return obstack_alloc (&cprop_obstack, size);
143 }
144 
145 /* Return nonzero if register X is unchanged from INSN to the end
146    of INSN's basic block.  */
147 
148 static int
reg_available_p(const_rtx x,const rtx_insn * insn ATTRIBUTE_UNUSED)149 reg_available_p (const_rtx x, const rtx_insn *insn ATTRIBUTE_UNUSED)
150 {
151   return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
152 }
153 
154 /* Hash a set of register REGNO.
155 
156    Sets are hashed on the register that is set.  This simplifies the PRE copy
157    propagation code.
158 
159    ??? May need to make things more elaborate.  Later, as necessary.  */
160 
161 static unsigned int
hash_mod(int regno,int hash_table_size)162 hash_mod (int regno, int hash_table_size)
163 {
164   return (unsigned) regno % hash_table_size;
165 }
166 
167 /* Insert assignment DEST:=SET from INSN in the hash table.
168    DEST is a register and SET is a register or a suitable constant.
169    If the assignment is already present in the table, record it as
170    the last occurrence in INSN's basic block.
171    IMPLICIT is true if it's an implicit set, false otherwise.  */
172 
173 static void
insert_set_in_table(rtx dest,rtx src,rtx_insn * insn,struct hash_table_d * table,bool implicit)174 insert_set_in_table (rtx dest, rtx src, rtx_insn *insn,
175 		     struct hash_table_d *table, bool implicit)
176 {
177   bool found = false;
178   unsigned int hash;
179   struct cprop_expr *cur_expr, *last_expr = NULL;
180   struct cprop_occr *cur_occr;
181 
182   hash = hash_mod (REGNO (dest), table->size);
183 
184   for (cur_expr = table->table[hash]; cur_expr;
185        cur_expr = cur_expr->next_same_hash)
186     {
187       if (dest == cur_expr->dest
188 	  && src == cur_expr->src)
189 	{
190 	  found = true;
191 	  break;
192 	}
193       last_expr = cur_expr;
194     }
195 
196   if (! found)
197     {
198       cur_expr = GOBNEW (struct cprop_expr);
199       bytes_used += sizeof (struct cprop_expr);
200       if (table->table[hash] == NULL)
201 	/* This is the first pattern that hashed to this index.  */
202 	table->table[hash] = cur_expr;
203       else
204 	/* Add EXPR to end of this hash chain.  */
205 	last_expr->next_same_hash = cur_expr;
206 
207       /* Set the fields of the expr element.
208 	 We must copy X because it can be modified when copy propagation is
209 	 performed on its operands.  */
210       cur_expr->dest = copy_rtx (dest);
211       cur_expr->src = copy_rtx (src);
212       cur_expr->bitmap_index = table->n_elems++;
213       cur_expr->next_same_hash = NULL;
214       cur_expr->avail_occr = NULL;
215     }
216 
217   /* Now record the occurrence.  */
218   cur_occr = cur_expr->avail_occr;
219 
220   if (cur_occr
221       && BLOCK_FOR_INSN (cur_occr->insn) == BLOCK_FOR_INSN (insn))
222     {
223       /* Found another instance of the expression in the same basic block.
224 	 Prefer this occurrence to the currently recorded one.  We want
225 	 the last one in the block and the block is scanned from start
226 	 to end.  */
227       cur_occr->insn = insn;
228     }
229   else
230     {
231       /* First occurrence of this expression in this basic block.  */
232       cur_occr = GOBNEW (struct cprop_occr);
233       bytes_used += sizeof (struct cprop_occr);
234       cur_occr->insn = insn;
235       cur_occr->next = cur_expr->avail_occr;
236       cur_expr->avail_occr = cur_occr;
237     }
238 
239   /* Record bitmap_index of the implicit set in implicit_set_indexes.  */
240   if (implicit)
241     implicit_set_indexes[BLOCK_FOR_INSN (insn)->index]
242       = cur_expr->bitmap_index;
243 }
244 
245 /* Determine whether the rtx X should be treated as a constant for CPROP.
246    Since X might be inserted more than once we have to take care that it
247    is sharable.  */
248 
249 static bool
cprop_constant_p(const_rtx x)250 cprop_constant_p (const_rtx x)
251 {
252   return CONSTANT_P (x) && (GET_CODE (x) != CONST || shared_const_p (x));
253 }
254 
255 /* Determine whether the rtx X should be treated as a register that can
256    be propagated.  Any pseudo-register is fine.  */
257 
258 static bool
cprop_reg_p(const_rtx x)259 cprop_reg_p (const_rtx x)
260 {
261   return REG_P (x) && !HARD_REGISTER_P (x);
262 }
263 
264 /* Scan SET present in INSN and add an entry to the hash TABLE.
265    IMPLICIT is true if it's an implicit set, false otherwise.  */
266 
267 static void
hash_scan_set(rtx set,rtx_insn * insn,struct hash_table_d * table,bool implicit)268 hash_scan_set (rtx set, rtx_insn *insn, struct hash_table_d *table,
269 	       bool implicit)
270 {
271   rtx src = SET_SRC (set);
272   rtx dest = SET_DEST (set);
273 
274   if (cprop_reg_p (dest)
275       && reg_available_p (dest, insn)
276       && can_copy_p (GET_MODE (dest)))
277     {
278       /* See if a REG_EQUAL note shows this equivalent to a simpler expression.
279 
280 	 This allows us to do a single CPROP pass and still eliminate
281 	 redundant constants, addresses or other expressions that are
282 	 constructed with multiple instructions.
283 
284 	 However, keep the original SRC if INSN is a simple reg-reg move.  In
285 	 In this case, there will almost always be a REG_EQUAL note on the
286 	 insn that sets SRC.  By recording the REG_EQUAL value here as SRC
287 	 for INSN, we miss copy propagation opportunities.
288 
289 	 Note that this does not impede profitable constant propagations.  We
290 	 "look through" reg-reg sets in lookup_set.  */
291       rtx note = find_reg_equal_equiv_note (insn);
292       if (note != 0
293 	  && REG_NOTE_KIND (note) == REG_EQUAL
294 	  && !REG_P (src)
295 	  && cprop_constant_p (XEXP (note, 0)))
296 	src = XEXP (note, 0), set = gen_rtx_SET (dest, src);
297 
298       /* Record sets for constant/copy propagation.  */
299       if ((cprop_reg_p (src)
300 	   && src != dest
301 	   && reg_available_p (src, insn))
302 	  || cprop_constant_p (src))
303 	insert_set_in_table (dest, src, insn, table, implicit);
304     }
305 }
306 
307 /* Process INSN and add hash table entries as appropriate.  */
308 
309 static void
hash_scan_insn(rtx_insn * insn,struct hash_table_d * table)310 hash_scan_insn (rtx_insn *insn, struct hash_table_d *table)
311 {
312   rtx pat = PATTERN (insn);
313   int i;
314 
315   /* Pick out the sets of INSN and for other forms of instructions record
316      what's been modified.  */
317 
318   if (GET_CODE (pat) == SET)
319     hash_scan_set (pat, insn, table, false);
320   else if (GET_CODE (pat) == PARALLEL)
321     for (i = 0; i < XVECLEN (pat, 0); i++)
322       {
323 	rtx x = XVECEXP (pat, 0, i);
324 
325 	if (GET_CODE (x) == SET)
326 	  hash_scan_set (x, insn, table, false);
327       }
328 }
329 
330 /* Dump the hash table TABLE to file FILE under the name NAME.  */
331 
332 static void
dump_hash_table(FILE * file,const char * name,struct hash_table_d * table)333 dump_hash_table (FILE *file, const char *name, struct hash_table_d *table)
334 {
335   int i;
336   /* Flattened out table, so it's printed in proper order.  */
337   struct cprop_expr **flat_table;
338   unsigned int *hash_val;
339   struct cprop_expr *expr;
340 
341   flat_table = XCNEWVEC (struct cprop_expr *, table->n_elems);
342   hash_val = XNEWVEC (unsigned int, table->n_elems);
343 
344   for (i = 0; i < (int) table->size; i++)
345     for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
346       {
347 	flat_table[expr->bitmap_index] = expr;
348 	hash_val[expr->bitmap_index] = i;
349       }
350 
351   fprintf (file, "%s hash table (%d buckets, %d entries)\n",
352 	   name, table->size, table->n_elems);
353 
354   for (i = 0; i < (int) table->n_elems; i++)
355     if (flat_table[i] != 0)
356       {
357 	expr = flat_table[i];
358 	fprintf (file, "Index %d (hash value %d)\n  ",
359 		 expr->bitmap_index, hash_val[i]);
360 	print_rtl (file, expr->dest);
361 	fprintf (file, " := ");
362 	print_rtl (file, expr->src);
363 	fprintf (file, "\n");
364       }
365 
366   fprintf (file, "\n");
367 
368   free (flat_table);
369   free (hash_val);
370 }
371 
372 /* Record as unavailable all registers that are DEF operands of INSN.  */
373 
374 static void
make_set_regs_unavailable(rtx_insn * insn)375 make_set_regs_unavailable (rtx_insn *insn)
376 {
377   df_ref def;
378 
379   FOR_EACH_INSN_DEF (def, insn)
380     SET_REGNO_REG_SET (reg_set_bitmap, DF_REF_REGNO (def));
381 }
382 
383 /* Top level function to create an assignment hash table.
384 
385    Assignment entries are placed in the hash table if
386    - they are of the form (set (pseudo-reg) src),
387    - src is something we want to perform const/copy propagation on,
388    - none of the operands or target are subsequently modified in the block
389 
390    Currently src must be a pseudo-reg or a const_int.
391 
392    TABLE is the table computed.  */
393 
394 static void
compute_hash_table_work(struct hash_table_d * table)395 compute_hash_table_work (struct hash_table_d *table)
396 {
397   basic_block bb;
398 
399   /* Allocate vars to track sets of regs.  */
400   reg_set_bitmap = ALLOC_REG_SET (NULL);
401 
402   FOR_EACH_BB_FN (bb, cfun)
403     {
404       rtx_insn *insn;
405 
406       /* Reset tables used to keep track of what's not yet invalid [since
407 	 the end of the block].  */
408       CLEAR_REG_SET (reg_set_bitmap);
409 
410       /* Go over all insns from the last to the first.  This is convenient
411 	 for tracking available registers, i.e. not set between INSN and
412 	 the end of the basic block BB.  */
413       FOR_BB_INSNS_REVERSE (bb, insn)
414         {
415 	  /* Only real insns are interesting.  */
416 	  if (!NONDEBUG_INSN_P (insn))
417 	    continue;
418 
419 	  /* Record interesting sets from INSN in the hash table.  */
420 	  hash_scan_insn (insn, table);
421 
422 	  /* Any registers set in INSN will make SETs above it not AVAIL.  */
423 	  make_set_regs_unavailable (insn);
424 	}
425 
426       /* Insert implicit sets in the hash table, pretending they appear as
427 	 insns at the head of the basic block.  */
428       if (implicit_sets[bb->index] != NULL_RTX)
429 	hash_scan_set (implicit_sets[bb->index], BB_HEAD (bb), table, true);
430     }
431 
432   FREE_REG_SET (reg_set_bitmap);
433 }
434 
435 /* Allocate space for the set/expr hash TABLE.
436    It is used to determine the number of buckets to use.  */
437 
438 static void
alloc_hash_table(struct hash_table_d * table)439 alloc_hash_table (struct hash_table_d *table)
440 {
441   int n;
442 
443   n = get_max_insn_count ();
444 
445   table->size = n / 4;
446   if (table->size < 11)
447     table->size = 11;
448 
449   /* Attempt to maintain efficient use of hash table.
450      Making it an odd number is simplest for now.
451      ??? Later take some measurements.  */
452   table->size |= 1;
453   n = table->size * sizeof (struct cprop_expr *);
454   table->table = XNEWVAR (struct cprop_expr *, n);
455 }
456 
457 /* Free things allocated by alloc_hash_table.  */
458 
459 static void
free_hash_table(struct hash_table_d * table)460 free_hash_table (struct hash_table_d *table)
461 {
462   free (table->table);
463 }
464 
465 /* Compute the hash TABLE for doing copy/const propagation or
466    expression hash table.  */
467 
468 static void
compute_hash_table(struct hash_table_d * table)469 compute_hash_table (struct hash_table_d *table)
470 {
471   /* Initialize count of number of entries in hash table.  */
472   table->n_elems = 0;
473   memset (table->table, 0, table->size * sizeof (struct cprop_expr *));
474 
475   compute_hash_table_work (table);
476 }
477 
478 /* Expression tracking support.  */
479 
480 /* Lookup REGNO in the set TABLE.  The result is a pointer to the
481    table entry, or NULL if not found.  */
482 
483 static struct cprop_expr *
lookup_set(unsigned int regno,struct hash_table_d * table)484 lookup_set (unsigned int regno, struct hash_table_d *table)
485 {
486   unsigned int hash = hash_mod (regno, table->size);
487   struct cprop_expr *expr;
488 
489   expr = table->table[hash];
490 
491   while (expr && REGNO (expr->dest) != regno)
492     expr = expr->next_same_hash;
493 
494   return expr;
495 }
496 
497 /* Return the next entry for REGNO in list EXPR.  */
498 
499 static struct cprop_expr *
next_set(unsigned int regno,struct cprop_expr * expr)500 next_set (unsigned int regno, struct cprop_expr *expr)
501 {
502   do
503     expr = expr->next_same_hash;
504   while (expr && REGNO (expr->dest) != regno);
505 
506   return expr;
507 }
508 
509 /* Reset tables used to keep track of what's still available [since the
510    start of the block].  */
511 
512 static void
reset_opr_set_tables(void)513 reset_opr_set_tables (void)
514 {
515   /* Maintain a bitmap of which regs have been set since beginning of
516      the block.  */
517   CLEAR_REG_SET (reg_set_bitmap);
518 }
519 
520 /* Return nonzero if the register X has not been set yet [since the
521    start of the basic block containing INSN].  */
522 
523 static int
reg_not_set_p(const_rtx x,const rtx_insn * insn ATTRIBUTE_UNUSED)524 reg_not_set_p (const_rtx x, const rtx_insn *insn ATTRIBUTE_UNUSED)
525 {
526   return ! REGNO_REG_SET_P (reg_set_bitmap, REGNO (x));
527 }
528 
529 /* Record things set by INSN.
530    This data is used by reg_not_set_p.  */
531 
532 static void
mark_oprs_set(rtx_insn * insn)533 mark_oprs_set (rtx_insn *insn)
534 {
535   df_ref def;
536 
537   FOR_EACH_INSN_DEF (def, insn)
538     SET_REGNO_REG_SET (reg_set_bitmap, DF_REF_REGNO (def));
539 }
540 
541 /* Compute copy/constant propagation working variables.  */
542 
543 /* Local properties of assignments.  */
544 static sbitmap *cprop_avloc;
545 static sbitmap *cprop_kill;
546 
547 /* Global properties of assignments (computed from the local properties).  */
548 static sbitmap *cprop_avin;
549 static sbitmap *cprop_avout;
550 
551 /* Allocate vars used for copy/const propagation.  N_BLOCKS is the number of
552    basic blocks.  N_SETS is the number of sets.  */
553 
554 static void
alloc_cprop_mem(int n_blocks,int n_sets)555 alloc_cprop_mem (int n_blocks, int n_sets)
556 {
557   cprop_avloc = sbitmap_vector_alloc (n_blocks, n_sets);
558   cprop_kill = sbitmap_vector_alloc (n_blocks, n_sets);
559 
560   cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
561   cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
562 }
563 
564 /* Free vars used by copy/const propagation.  */
565 
566 static void
free_cprop_mem(void)567 free_cprop_mem (void)
568 {
569   sbitmap_vector_free (cprop_avloc);
570   sbitmap_vector_free (cprop_kill);
571   sbitmap_vector_free (cprop_avin);
572   sbitmap_vector_free (cprop_avout);
573 }
574 
575 /* Compute the local properties of each recorded expression.
576 
577    Local properties are those that are defined by the block, irrespective of
578    other blocks.
579 
580    An expression is killed in a block if its operands, either DEST or SRC, are
581    modified in the block.
582 
583    An expression is computed (locally available) in a block if it is computed
584    at least once and expression would contain the same value if the
585    computation was moved to the end of the block.
586 
587    KILL and COMP are destination sbitmaps for recording local properties.  */
588 
589 static void
compute_local_properties(sbitmap * kill,sbitmap * comp,struct hash_table_d * table)590 compute_local_properties (sbitmap *kill, sbitmap *comp,
591 			  struct hash_table_d *table)
592 {
593   unsigned int i;
594 
595   /* Initialize the bitmaps that were passed in.  */
596   bitmap_vector_clear (kill, last_basic_block_for_fn (cfun));
597   bitmap_vector_clear (comp, last_basic_block_for_fn (cfun));
598 
599   for (i = 0; i < table->size; i++)
600     {
601       struct cprop_expr *expr;
602 
603       for (expr = table->table[i]; expr != NULL; expr = expr->next_same_hash)
604 	{
605 	  int indx = expr->bitmap_index;
606 	  df_ref def;
607 	  struct cprop_occr *occr;
608 
609 	  /* For each definition of the destination pseudo-reg, the expression
610 	     is killed in the block where the definition is.  */
611 	  for (def = DF_REG_DEF_CHAIN (REGNO (expr->dest));
612 	       def; def = DF_REF_NEXT_REG (def))
613 	    bitmap_set_bit (kill[DF_REF_BB (def)->index], indx);
614 
615 	  /* If the source is a pseudo-reg, for each definition of the source,
616 	     the expression is killed in the block where the definition is.  */
617 	  if (REG_P (expr->src))
618 	    for (def = DF_REG_DEF_CHAIN (REGNO (expr->src));
619 		 def; def = DF_REF_NEXT_REG (def))
620 	      bitmap_set_bit (kill[DF_REF_BB (def)->index], indx);
621 
622 	  /* The occurrences recorded in avail_occr are exactly those that
623 	     are locally available in the block where they are.  */
624 	  for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
625 	    {
626 	      bitmap_set_bit (comp[BLOCK_FOR_INSN (occr->insn)->index], indx);
627 	    }
628 	}
629     }
630 }
631 
632 /* Hash table support.  */
633 
634 /* Top level routine to do the dataflow analysis needed by copy/const
635    propagation.  */
636 
637 static void
compute_cprop_data(void)638 compute_cprop_data (void)
639 {
640   basic_block bb;
641 
642   compute_local_properties (cprop_kill, cprop_avloc, &set_hash_table);
643   compute_available (cprop_avloc, cprop_kill, cprop_avout, cprop_avin);
644 
645   /* Merge implicit sets into CPROP_AVIN.  They are always available at the
646      entry of their basic block.  We need to do this because 1) implicit sets
647      aren't recorded for the local pass so they cannot be propagated within
648      their basic block by this pass and 2) the global pass would otherwise
649      propagate them only in the successors of their basic block.  */
650   FOR_EACH_BB_FN (bb, cfun)
651     {
652       int index = implicit_set_indexes[bb->index];
653       if (index != -1)
654 	bitmap_set_bit (cprop_avin[bb->index], index);
655     }
656 }
657 
658 /* Copy/constant propagation.  */
659 
660 /* Maximum number of register uses in an insn that we handle.  */
661 #define MAX_USES 8
662 
663 /* Table of uses (registers, both hard and pseudo) found in an insn.
664    Allocated statically to avoid alloc/free complexity and overhead.  */
665 static rtx reg_use_table[MAX_USES];
666 
667 /* Index into `reg_use_table' while building it.  */
668 static unsigned reg_use_count;
669 
670 /* Set up a list of register numbers used in INSN.  The found uses are stored
671    in `reg_use_table'.  `reg_use_count' is initialized to zero before entry,
672    and contains the number of uses in the table upon exit.
673 
674    ??? If a register appears multiple times we will record it multiple times.
675    This doesn't hurt anything but it will slow things down.  */
676 
677 static void
find_used_regs(rtx * xptr,void * data ATTRIBUTE_UNUSED)678 find_used_regs (rtx *xptr, void *data ATTRIBUTE_UNUSED)
679 {
680   int i, j;
681   enum rtx_code code;
682   const char *fmt;
683   rtx x = *xptr;
684 
685   /* repeat is used to turn tail-recursion into iteration since GCC
686      can't do it when there's no return value.  */
687  repeat:
688   if (x == 0)
689     return;
690 
691   code = GET_CODE (x);
692   if (REG_P (x))
693     {
694       if (reg_use_count == MAX_USES)
695 	return;
696 
697       reg_use_table[reg_use_count] = x;
698       reg_use_count++;
699     }
700 
701   /* Recursively scan the operands of this expression.  */
702 
703   for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
704     {
705       if (fmt[i] == 'e')
706 	{
707 	  /* If we are about to do the last recursive call
708 	     needed at this level, change it into iteration.
709 	     This function is called enough to be worth it.  */
710 	  if (i == 0)
711 	    {
712 	      x = XEXP (x, 0);
713 	      goto repeat;
714 	    }
715 
716 	  find_used_regs (&XEXP (x, i), data);
717 	}
718       else if (fmt[i] == 'E')
719 	for (j = 0; j < XVECLEN (x, i); j++)
720 	  find_used_regs (&XVECEXP (x, i, j), data);
721     }
722 }
723 
724 /* Try to replace all uses of FROM in INSN with TO.
725    Return nonzero if successful.  */
726 
727 static int
try_replace_reg(rtx from,rtx to,rtx_insn * insn)728 try_replace_reg (rtx from, rtx to, rtx_insn *insn)
729 {
730   rtx note = find_reg_equal_equiv_note (insn);
731   rtx src = 0;
732   int success = 0;
733   rtx set = single_set (insn);
734 
735   bool check_rtx_costs = true;
736   bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
737   int old_cost = set ? set_rtx_cost (set, speed) : 0;
738 
739   if (!set
740       || CONSTANT_P (SET_SRC (set))
741       || (note != 0
742 	  && REG_NOTE_KIND (note) == REG_EQUAL
743 	  && (GET_CODE (XEXP (note, 0)) == CONST
744 	      || CONSTANT_P (XEXP (note, 0)))))
745     check_rtx_costs = false;
746 
747   /* Usually we substitute easy stuff, so we won't copy everything.
748      We however need to take care to not duplicate non-trivial CONST
749      expressions.  */
750   to = copy_rtx (to);
751 
752   validate_replace_src_group (from, to, insn);
753 
754   /* If TO is a constant, check the cost of the set after propagation
755      to the cost of the set before the propagation.  If the cost is
756      higher, then do not replace FROM with TO.  */
757 
758   if (check_rtx_costs
759       && CONSTANT_P (to)
760       && set_rtx_cost (set, speed) > old_cost)
761     {
762       cancel_changes (0);
763       return false;
764     }
765 
766 
767   if (num_changes_pending () && apply_change_group ())
768     success = 1;
769 
770   /* Try to simplify SET_SRC if we have substituted a constant.  */
771   if (success && set && CONSTANT_P (to))
772     {
773       src = simplify_rtx (SET_SRC (set));
774 
775       if (src)
776 	validate_change (insn, &SET_SRC (set), src, 0);
777     }
778 
779   /* If there is already a REG_EQUAL note, update the expression in it
780      with our replacement.  */
781   if (note != 0 && REG_NOTE_KIND (note) == REG_EQUAL)
782     set_unique_reg_note (insn, REG_EQUAL,
783 			 simplify_replace_rtx (XEXP (note, 0), from, to));
784   if (!success && set && reg_mentioned_p (from, SET_SRC (set)))
785     {
786       /* If above failed and this is a single set, try to simplify the source
787 	 of the set given our substitution.  We could perhaps try this for
788 	 multiple SETs, but it probably won't buy us anything.  */
789       src = simplify_replace_rtx (SET_SRC (set), from, to);
790 
791       if (!rtx_equal_p (src, SET_SRC (set))
792 	  && validate_change (insn, &SET_SRC (set), src, 0))
793 	success = 1;
794 
795       /* If we've failed perform the replacement, have a single SET to
796 	 a REG destination and don't yet have a note, add a REG_EQUAL note
797 	 to not lose information.  */
798       if (!success && note == 0 && set != 0 && REG_P (SET_DEST (set)))
799 	note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
800     }
801 
802   if (set && MEM_P (SET_DEST (set)) && reg_mentioned_p (from, SET_DEST (set)))
803     {
804       /* Registers can also appear as uses in SET_DEST if it is a MEM.
805          We could perhaps try this for multiple SETs, but it probably
806          won't buy us anything.  */
807       rtx dest = simplify_replace_rtx (SET_DEST (set), from, to);
808 
809       if (!rtx_equal_p (dest, SET_DEST (set))
810           && validate_change (insn, &SET_DEST (set), dest, 0))
811         success = 1;
812     }
813 
814   /* REG_EQUAL may get simplified into register.
815      We don't allow that. Remove that note. This code ought
816      not to happen, because previous code ought to synthesize
817      reg-reg move, but be on the safe side.  */
818   if (note && REG_NOTE_KIND (note) == REG_EQUAL && REG_P (XEXP (note, 0)))
819     remove_note (insn, note);
820 
821   return success;
822 }
823 
824 /* Find a set of REGNOs that are available on entry to INSN's block.  If found,
825    SET_RET[0] will be assigned a set with a register source and SET_RET[1] a
826    set with a constant source.  If not found the corresponding entry is set to
827    NULL.  */
828 
829 static void
find_avail_set(int regno,rtx_insn * insn,struct cprop_expr * set_ret[2])830 find_avail_set (int regno, rtx_insn *insn, struct cprop_expr *set_ret[2])
831 {
832   set_ret[0] = set_ret[1] = NULL;
833 
834   /* Loops are not possible here.  To get a loop we would need two sets
835      available at the start of the block containing INSN.  i.e. we would
836      need two sets like this available at the start of the block:
837 
838        (set (reg X) (reg Y))
839        (set (reg Y) (reg X))
840 
841      This cannot happen since the set of (reg Y) would have killed the
842      set of (reg X) making it unavailable at the start of this block.  */
843   while (1)
844     {
845       rtx src;
846       struct cprop_expr *set = lookup_set (regno, &set_hash_table);
847 
848       /* Find a set that is available at the start of the block
849 	 which contains INSN.  */
850       while (set)
851 	{
852 	  if (bitmap_bit_p (cprop_avin[BLOCK_FOR_INSN (insn)->index],
853 			set->bitmap_index))
854 	    break;
855 	  set = next_set (regno, set);
856 	}
857 
858       /* If no available set was found we've reached the end of the
859 	 (possibly empty) copy chain.  */
860       if (set == 0)
861 	break;
862 
863       src = set->src;
864 
865       /* We know the set is available.
866 	 Now check that SRC is locally anticipatable (i.e. none of the
867 	 source operands have changed since the start of the block).
868 
869          If the source operand changed, we may still use it for the next
870          iteration of this loop, but we may not use it for substitutions.  */
871 
872       if (cprop_constant_p (src))
873 	set_ret[1] = set;
874       else if (reg_not_set_p (src, insn))
875 	set_ret[0] = set;
876 
877       /* If the source of the set is anything except a register, then
878 	 we have reached the end of the copy chain.  */
879       if (! REG_P (src))
880 	break;
881 
882       /* Follow the copy chain, i.e. start another iteration of the loop
883 	 and see if we have an available copy into SRC.  */
884       regno = REGNO (src);
885     }
886 }
887 
888 /* Subroutine of cprop_insn that tries to propagate constants into
889    JUMP_INSNS.  JUMP must be a conditional jump.  If SETCC is non-NULL
890    it is the instruction that immediately precedes JUMP, and must be a
891    single SET of a register.  FROM is what we will try to replace,
892    SRC is the constant we will try to substitute for it.  Return nonzero
893    if a change was made.  */
894 
895 static int
cprop_jump(basic_block bb,rtx_insn * setcc,rtx_insn * jump,rtx from,rtx src)896 cprop_jump (basic_block bb, rtx_insn *setcc, rtx_insn *jump, rtx from, rtx src)
897 {
898   rtx new_rtx, set_src, note_src;
899   rtx set = pc_set (jump);
900   rtx note = find_reg_equal_equiv_note (jump);
901 
902   if (note)
903     {
904       note_src = XEXP (note, 0);
905       if (GET_CODE (note_src) == EXPR_LIST)
906 	note_src = NULL_RTX;
907     }
908   else note_src = NULL_RTX;
909 
910   /* Prefer REG_EQUAL notes except those containing EXPR_LISTs.  */
911   set_src = note_src ? note_src : SET_SRC (set);
912 
913   /* First substitute the SETCC condition into the JUMP instruction,
914      then substitute that given values into this expanded JUMP.  */
915   if (setcc != NULL_RTX
916       && !modified_between_p (from, setcc, jump)
917       && !modified_between_p (src, setcc, jump))
918     {
919       rtx setcc_src;
920       rtx setcc_set = single_set (setcc);
921       rtx setcc_note = find_reg_equal_equiv_note (setcc);
922       setcc_src = (setcc_note && GET_CODE (XEXP (setcc_note, 0)) != EXPR_LIST)
923 		? XEXP (setcc_note, 0) : SET_SRC (setcc_set);
924       set_src = simplify_replace_rtx (set_src, SET_DEST (setcc_set),
925 				      setcc_src);
926     }
927   else
928     setcc = NULL;
929 
930   new_rtx = simplify_replace_rtx (set_src, from, src);
931 
932   /* If no simplification can be made, then try the next register.  */
933   if (rtx_equal_p (new_rtx, SET_SRC (set)))
934     return 0;
935 
936   /* If this is now a no-op delete it, otherwise this must be a valid insn.  */
937   if (new_rtx == pc_rtx)
938     delete_insn (jump);
939   else
940     {
941       /* Ensure the value computed inside the jump insn to be equivalent
942          to one computed by setcc.  */
943       if (setcc && modified_in_p (new_rtx, setcc))
944 	return 0;
945       if (! validate_unshare_change (jump, &SET_SRC (set), new_rtx, 0))
946 	{
947 	  /* When (some) constants are not valid in a comparison, and there
948 	     are two registers to be replaced by constants before the entire
949 	     comparison can be folded into a constant, we need to keep
950 	     intermediate information in REG_EQUAL notes.  For targets with
951 	     separate compare insns, such notes are added by try_replace_reg.
952 	     When we have a combined compare-and-branch instruction, however,
953 	     we need to attach a note to the branch itself to make this
954 	     optimization work.  */
955 
956 	  if (!rtx_equal_p (new_rtx, note_src))
957 	    set_unique_reg_note (jump, REG_EQUAL, copy_rtx (new_rtx));
958 	  return 0;
959 	}
960 
961       /* Remove REG_EQUAL note after simplification.  */
962       if (note_src)
963 	remove_note (jump, note);
964      }
965 
966   /* Delete the cc0 setter.  */
967   if (HAVE_cc0 && setcc != NULL && CC0_P (SET_DEST (single_set (setcc))))
968     delete_insn (setcc);
969 
970   global_const_prop_count++;
971   if (dump_file != NULL)
972     {
973       fprintf (dump_file,
974 	       "GLOBAL CONST-PROP: Replacing reg %d in jump_insn %d with "
975 	       "constant ", REGNO (from), INSN_UID (jump));
976       print_rtl (dump_file, src);
977       fprintf (dump_file, "\n");
978     }
979   purge_dead_edges (bb);
980 
981   /* If a conditional jump has been changed into unconditional jump, remove
982      the jump and make the edge fallthru - this is always called in
983      cfglayout mode.  */
984   if (new_rtx != pc_rtx && simplejump_p (jump))
985     {
986       edge e;
987       edge_iterator ei;
988 
989       FOR_EACH_EDGE (e, ei, bb->succs)
990 	if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
991 	    && BB_HEAD (e->dest) == JUMP_LABEL (jump))
992 	  {
993 	    e->flags |= EDGE_FALLTHRU;
994 	    break;
995 	  }
996       delete_insn (jump);
997     }
998 
999   return 1;
1000 }
1001 
1002 /* Subroutine of cprop_insn that tries to propagate constants.  FROM is what
1003    we will try to replace, SRC is the constant we will try to substitute for
1004    it and INSN is the instruction where this will be happening.  */
1005 
1006 static int
constprop_register(rtx from,rtx src,rtx_insn * insn)1007 constprop_register (rtx from, rtx src, rtx_insn *insn)
1008 {
1009   rtx sset;
1010   rtx_insn *next_insn;
1011 
1012   /* Check for reg or cc0 setting instructions followed by
1013      conditional branch instructions first.  */
1014   if ((sset = single_set (insn)) != NULL
1015       && (next_insn = next_nondebug_insn (insn)) != NULL
1016       && any_condjump_p (next_insn)
1017       && onlyjump_p (next_insn))
1018     {
1019       rtx dest = SET_DEST (sset);
1020       if ((REG_P (dest) || CC0_P (dest))
1021 	  && cprop_jump (BLOCK_FOR_INSN (insn), insn, next_insn,
1022 			 from, src))
1023 	return 1;
1024     }
1025 
1026   /* Handle normal insns next.  */
1027   if (NONJUMP_INSN_P (insn) && try_replace_reg (from, src, insn))
1028     return 1;
1029 
1030   /* Try to propagate a CONST_INT into a conditional jump.
1031      We're pretty specific about what we will handle in this
1032      code, we can extend this as necessary over time.
1033 
1034      Right now the insn in question must look like
1035      (set (pc) (if_then_else ...))  */
1036   else if (any_condjump_p (insn) && onlyjump_p (insn))
1037     return cprop_jump (BLOCK_FOR_INSN (insn), NULL, insn, from, src);
1038   return 0;
1039 }
1040 
1041 /* Perform constant and copy propagation on INSN.
1042    Return nonzero if a change was made.  */
1043 
1044 static int
cprop_insn(rtx_insn * insn)1045 cprop_insn (rtx_insn *insn)
1046 {
1047   unsigned i;
1048   int changed = 0, changed_this_round;
1049   rtx note;
1050 
1051   do
1052     {
1053       changed_this_round = 0;
1054       reg_use_count = 0;
1055       note_uses (&PATTERN (insn), find_used_regs, NULL);
1056 
1057       /* We may win even when propagating constants into notes.  */
1058       note = find_reg_equal_equiv_note (insn);
1059       if (note)
1060 	find_used_regs (&XEXP (note, 0), NULL);
1061 
1062       for (i = 0; i < reg_use_count; i++)
1063 	{
1064 	  rtx reg_used = reg_use_table[i];
1065 	  unsigned int regno = REGNO (reg_used);
1066 	  rtx src_cst = NULL, src_reg = NULL;
1067 	  struct cprop_expr *set[2];
1068 
1069 	  /* If the register has already been set in this block, there's
1070 	     nothing we can do.  */
1071 	  if (! reg_not_set_p (reg_used, insn))
1072 	    continue;
1073 
1074 	  /* Find an assignment that sets reg_used and is available
1075 	     at the start of the block.  */
1076 	  find_avail_set (regno, insn, set);
1077 	  if (set[0])
1078 	    src_reg = set[0]->src;
1079 	  if (set[1])
1080 	    src_cst = set[1]->src;
1081 
1082 	  /* Constant propagation.  */
1083 	  if (src_cst && cprop_constant_p (src_cst)
1084 	      && constprop_register (reg_used, src_cst, insn))
1085 	    {
1086 	      changed_this_round = changed = 1;
1087 	      global_const_prop_count++;
1088 	      if (dump_file != NULL)
1089 		{
1090 		  fprintf (dump_file,
1091 			   "GLOBAL CONST-PROP: Replacing reg %d in ", regno);
1092 		  fprintf (dump_file, "insn %d with constant ",
1093 			   INSN_UID (insn));
1094 		  print_rtl (dump_file, src_cst);
1095 		  fprintf (dump_file, "\n");
1096 		}
1097 	      if (insn->deleted ())
1098 		return 1;
1099 	    }
1100 	  /* Copy propagation.  */
1101 	  else if (src_reg && cprop_reg_p (src_reg)
1102 		   && REGNO (src_reg) != regno
1103 		   && try_replace_reg (reg_used, src_reg, insn))
1104 	    {
1105 	      changed_this_round = changed = 1;
1106 	      global_copy_prop_count++;
1107 	      if (dump_file != NULL)
1108 		{
1109 		  fprintf (dump_file,
1110 			   "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
1111 			   regno, INSN_UID (insn));
1112 		  fprintf (dump_file, " with reg %d\n", REGNO (src_reg));
1113 		}
1114 
1115 	      /* The original insn setting reg_used may or may not now be
1116 		 deletable.  We leave the deletion to DCE.  */
1117 	      /* FIXME: If it turns out that the insn isn't deletable,
1118 		 then we may have unnecessarily extended register lifetimes
1119 		 and made things worse.  */
1120 	    }
1121 	}
1122     }
1123   /* If try_replace_reg simplified the insn, the regs found by find_used_regs
1124      may not be valid anymore.  Start over.  */
1125   while (changed_this_round);
1126 
1127   if (changed && DEBUG_INSN_P (insn))
1128     return 0;
1129 
1130   return changed;
1131 }
1132 
1133 /* Like find_used_regs, but avoid recording uses that appear in
1134    input-output contexts such as zero_extract or pre_dec.  This
1135    restricts the cases we consider to those for which local cprop
1136    can legitimately make replacements.  */
1137 
1138 static void
local_cprop_find_used_regs(rtx * xptr,void * data)1139 local_cprop_find_used_regs (rtx *xptr, void *data)
1140 {
1141   rtx x = *xptr;
1142 
1143   if (x == 0)
1144     return;
1145 
1146   switch (GET_CODE (x))
1147     {
1148     case ZERO_EXTRACT:
1149     case SIGN_EXTRACT:
1150     case STRICT_LOW_PART:
1151       return;
1152 
1153     case PRE_DEC:
1154     case PRE_INC:
1155     case POST_DEC:
1156     case POST_INC:
1157     case PRE_MODIFY:
1158     case POST_MODIFY:
1159       /* Can only legitimately appear this early in the context of
1160 	 stack pushes for function arguments, but handle all of the
1161 	 codes nonetheless.  */
1162       return;
1163 
1164     case SUBREG:
1165       if (read_modify_subreg_p (x))
1166 	return;
1167       break;
1168 
1169     default:
1170       break;
1171     }
1172 
1173   find_used_regs (xptr, data);
1174 }
1175 
1176 /* Try to perform local const/copy propagation on X in INSN.  */
1177 
1178 static bool
do_local_cprop(rtx x,rtx_insn * insn)1179 do_local_cprop (rtx x, rtx_insn *insn)
1180 {
1181   rtx newreg = NULL, newcnst = NULL;
1182 
1183   /* Rule out USE instructions and ASM statements as we don't want to
1184      change the hard registers mentioned.  */
1185   if (REG_P (x)
1186       && (cprop_reg_p (x)
1187           || (GET_CODE (PATTERN (insn)) != USE
1188 	      && asm_noperands (PATTERN (insn)) < 0)))
1189     {
1190       cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode);
1191       struct elt_loc_list *l;
1192 
1193       if (!val)
1194 	return false;
1195       for (l = val->locs; l; l = l->next)
1196 	{
1197 	  rtx this_rtx = l->loc;
1198 	  rtx note;
1199 
1200 	  if (cprop_constant_p (this_rtx))
1201 	    newcnst = this_rtx;
1202 	  if (cprop_reg_p (this_rtx)
1203 	      /* Don't copy propagate if it has attached REG_EQUIV note.
1204 		 At this point this only function parameters should have
1205 		 REG_EQUIV notes and if the argument slot is used somewhere
1206 		 explicitly, it means address of parameter has been taken,
1207 		 so we should not extend the lifetime of the pseudo.  */
1208 	      && (!(note = find_reg_note (l->setting_insn, REG_EQUIV, NULL_RTX))
1209 		  || ! MEM_P (XEXP (note, 0))))
1210 	    newreg = this_rtx;
1211 	}
1212       if (newcnst && constprop_register (x, newcnst, insn))
1213 	{
1214 	  if (dump_file != NULL)
1215 	    {
1216 	      fprintf (dump_file, "LOCAL CONST-PROP: Replacing reg %d in ",
1217 		       REGNO (x));
1218 	      fprintf (dump_file, "insn %d with constant ",
1219 		       INSN_UID (insn));
1220 	      print_rtl (dump_file, newcnst);
1221 	      fprintf (dump_file, "\n");
1222 	    }
1223 	  local_const_prop_count++;
1224 	  return true;
1225 	}
1226       else if (newreg && newreg != x && try_replace_reg (x, newreg, insn))
1227 	{
1228 	  if (dump_file != NULL)
1229 	    {
1230 	      fprintf (dump_file,
1231 		       "LOCAL COPY-PROP: Replacing reg %d in insn %d",
1232 		       REGNO (x), INSN_UID (insn));
1233 	      fprintf (dump_file, " with reg %d\n", REGNO (newreg));
1234 	    }
1235 	  local_copy_prop_count++;
1236 	  return true;
1237 	}
1238     }
1239   return false;
1240 }
1241 
1242 /* Do local const/copy propagation (i.e. within each basic block).  */
1243 
1244 static int
local_cprop_pass(void)1245 local_cprop_pass (void)
1246 {
1247   basic_block bb;
1248   rtx_insn *insn;
1249   bool changed = false;
1250   unsigned i;
1251 
1252   auto_vec<rtx_insn *> uncond_traps;
1253 
1254   cselib_init (0);
1255   FOR_EACH_BB_FN (bb, cfun)
1256     {
1257       FOR_BB_INSNS (bb, insn)
1258 	{
1259 	  if (INSN_P (insn))
1260 	    {
1261 	      bool was_uncond_trap
1262 		= (GET_CODE (PATTERN (insn)) == TRAP_IF
1263 		   && XEXP (PATTERN (insn), 0) == const1_rtx);
1264 	      rtx note = find_reg_equal_equiv_note (insn);
1265 	      do
1266 		{
1267 		  reg_use_count = 0;
1268 		  note_uses (&PATTERN (insn), local_cprop_find_used_regs,
1269 			     NULL);
1270 		  if (note)
1271 		    local_cprop_find_used_regs (&XEXP (note, 0), NULL);
1272 
1273 		  for (i = 0; i < reg_use_count; i++)
1274 		    {
1275 		      if (do_local_cprop (reg_use_table[i], insn))
1276 			{
1277 			  if (!DEBUG_INSN_P (insn))
1278 			    changed = true;
1279 			  break;
1280 			}
1281 		    }
1282 		  if (!was_uncond_trap
1283 		      && GET_CODE (PATTERN (insn)) == TRAP_IF
1284 		      && XEXP (PATTERN (insn), 0) == const1_rtx)
1285 		    {
1286 		      uncond_traps.safe_push (insn);
1287 		      break;
1288 		    }
1289 		  if (insn->deleted ())
1290 		    break;
1291 		}
1292 	      while (i < reg_use_count);
1293 	    }
1294 	  cselib_process_insn (insn);
1295 	}
1296 
1297       /* Forget everything at the end of a basic block.  */
1298       cselib_clear_table ();
1299     }
1300 
1301   cselib_finish ();
1302 
1303   while (!uncond_traps.is_empty ())
1304     {
1305       rtx_insn *insn = uncond_traps.pop ();
1306       basic_block to_split = BLOCK_FOR_INSN (insn);
1307       remove_edge (split_block (to_split, insn));
1308       emit_barrier_after_bb (to_split);
1309     }
1310 
1311   return changed;
1312 }
1313 
1314 /* Similar to get_condition, only the resulting condition must be
1315    valid at JUMP, instead of at EARLIEST.
1316 
1317    This differs from noce_get_condition in ifcvt.c in that we prefer not to
1318    settle for the condition variable in the jump instruction being integral.
1319    We prefer to be able to record the value of a user variable, rather than
1320    the value of a temporary used in a condition.  This could be solved by
1321    recording the value of *every* register scanned by canonicalize_condition,
1322    but this would require some code reorganization.  */
1323 
1324 rtx
fis_get_condition(rtx_insn * jump)1325 fis_get_condition (rtx_insn *jump)
1326 {
1327   return get_condition (jump, NULL, false, true);
1328 }
1329 
1330 /* Check the comparison COND to see if we can safely form an implicit
1331    set from it.  */
1332 
1333 static bool
implicit_set_cond_p(const_rtx cond)1334 implicit_set_cond_p (const_rtx cond)
1335 {
1336   machine_mode mode;
1337   rtx cst;
1338 
1339   /* COND must be either an EQ or NE comparison.  */
1340   if (GET_CODE (cond) != EQ && GET_CODE (cond) != NE)
1341     return false;
1342 
1343   /* The first operand of COND must be a register we can propagate.  */
1344   if (!cprop_reg_p (XEXP (cond, 0)))
1345     return false;
1346 
1347   /* The second operand of COND must be a suitable constant.  */
1348   mode = GET_MODE (XEXP (cond, 0));
1349   cst = XEXP (cond, 1);
1350 
1351   /* We can't perform this optimization if either operand might be or might
1352      contain a signed zero.  */
1353   if (HONOR_SIGNED_ZEROS (mode))
1354     {
1355       /* It is sufficient to check if CST is or contains a zero.  We must
1356 	 handle float, complex, and vector.  If any subpart is a zero, then
1357 	 the optimization can't be performed.  */
1358       /* ??? The complex and vector checks are not implemented yet.  We just
1359 	 always return zero for them.  */
1360       if (CONST_DOUBLE_AS_FLOAT_P (cst)
1361 	  && real_equal (CONST_DOUBLE_REAL_VALUE (cst), &dconst0))
1362 	return 0;
1363       else
1364 	return 0;
1365     }
1366 
1367   return cprop_constant_p (cst);
1368 }
1369 
1370 /* Find the implicit sets of a function.  An "implicit set" is a constraint
1371    on the value of a variable, implied by a conditional jump.  For example,
1372    following "if (x == 2)", the then branch may be optimized as though the
1373    conditional performed an "explicit set", in this example, "x = 2".  This
1374    function records the set patterns that are implicit at the start of each
1375    basic block.
1376 
1377    If an implicit set is found but the set is implicit on a critical edge,
1378    this critical edge is split.
1379 
1380    Return true if the CFG was modified, false otherwise.  */
1381 
1382 static bool
find_implicit_sets(void)1383 find_implicit_sets (void)
1384 {
1385   basic_block bb, dest;
1386   rtx cond, new_rtx;
1387   unsigned int count = 0;
1388   bool edges_split = false;
1389   size_t implicit_sets_size = last_basic_block_for_fn (cfun) + 10;
1390 
1391   implicit_sets = XCNEWVEC (rtx, implicit_sets_size);
1392 
1393   FOR_EACH_BB_FN (bb, cfun)
1394     {
1395       /* Check for more than one successor.  */
1396       if (EDGE_COUNT (bb->succs) <= 1)
1397 	continue;
1398 
1399       cond = fis_get_condition (BB_END (bb));
1400 
1401       /* If no condition is found or if it isn't of a suitable form,
1402 	 ignore it.  */
1403       if (! cond || ! implicit_set_cond_p (cond))
1404 	continue;
1405 
1406       dest = GET_CODE (cond) == EQ
1407 	? BRANCH_EDGE (bb)->dest : FALLTHRU_EDGE (bb)->dest;
1408 
1409       /* If DEST doesn't go anywhere, ignore it.  */
1410       if (! dest || dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1411 	continue;
1412 
1413       /* We have found a suitable implicit set.  Try to record it now as
1414 	 a SET in DEST.  If DEST has more than one predecessor, the edge
1415 	 between BB and DEST is a critical edge and we must split it,
1416 	 because we can only record one implicit set per DEST basic block.  */
1417       if (! single_pred_p (dest))
1418         {
1419 	  dest = split_edge (find_edge (bb, dest));
1420 	  edges_split = true;
1421 	}
1422 
1423       if (implicit_sets_size <= (size_t) dest->index)
1424       {
1425         size_t old_implicit_sets_size = implicit_sets_size;
1426 	implicit_sets_size *= 2;
1427 	implicit_sets = XRESIZEVEC (rtx, implicit_sets, implicit_sets_size);
1428 	memset (implicit_sets + old_implicit_sets_size, 0,
1429 		(implicit_sets_size - old_implicit_sets_size) * sizeof (rtx));
1430       }
1431 
1432       new_rtx = gen_rtx_SET (XEXP (cond, 0), XEXP (cond, 1));
1433       implicit_sets[dest->index] = new_rtx;
1434       if (dump_file)
1435 	{
1436 	  fprintf (dump_file, "Implicit set of reg %d in ",
1437 		   REGNO (XEXP (cond, 0)));
1438 	  fprintf (dump_file, "basic block %d\n", dest->index);
1439 	}
1440       count++;
1441     }
1442 
1443   if (dump_file)
1444     fprintf (dump_file, "Found %d implicit sets\n", count);
1445 
1446   /* Confess our sins.  */
1447   return edges_split;
1448 }
1449 
1450 /* Bypass conditional jumps.  */
1451 
1452 /* The value of last_basic_block at the beginning of the jump_bypass
1453    pass.  The use of redirect_edge_and_branch_force may introduce new
1454    basic blocks, but the data flow analysis is only valid for basic
1455    block indices less than bypass_last_basic_block.  */
1456 
1457 static int bypass_last_basic_block;
1458 
1459 /* Find a set of REGNO to a constant that is available at the end of basic
1460    block BB.  Return NULL if no such set is found.  Based heavily upon
1461    find_avail_set.  */
1462 
1463 static struct cprop_expr *
find_bypass_set(int regno,int bb)1464 find_bypass_set (int regno, int bb)
1465 {
1466   struct cprop_expr *result = 0;
1467 
1468   for (;;)
1469     {
1470       rtx src;
1471       struct cprop_expr *set = lookup_set (regno, &set_hash_table);
1472 
1473       while (set)
1474 	{
1475 	  if (bitmap_bit_p (cprop_avout[bb], set->bitmap_index))
1476 	    break;
1477 	  set = next_set (regno, set);
1478 	}
1479 
1480       if (set == 0)
1481 	break;
1482 
1483       src = set->src;
1484       if (cprop_constant_p (src))
1485 	result = set;
1486 
1487       if (! REG_P (src))
1488 	break;
1489 
1490       regno = REGNO (src);
1491     }
1492   return result;
1493 }
1494 
1495 /* Subroutine of bypass_block that checks whether a pseudo is killed by
1496    any of the instructions inserted on an edge.  Jump bypassing places
1497    condition code setters on CFG edges using insert_insn_on_edge.  This
1498    function is required to check that our data flow analysis is still
1499    valid prior to commit_edge_insertions.  */
1500 
1501 static bool
reg_killed_on_edge(const_rtx reg,const_edge e)1502 reg_killed_on_edge (const_rtx reg, const_edge e)
1503 {
1504   rtx_insn *insn;
1505 
1506   for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
1507     if (INSN_P (insn) && reg_set_p (reg, insn))
1508       return true;
1509 
1510   return false;
1511 }
1512 
1513 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
1514    basic block BB which has more than one predecessor.  If not NULL, SETCC
1515    is the first instruction of BB, which is immediately followed by JUMP_INSN
1516    JUMP.  Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
1517    Returns nonzero if a change was made.
1518 
1519    During the jump bypassing pass, we may place copies of SETCC instructions
1520    on CFG edges.  The following routine must be careful to pay attention to
1521    these inserted insns when performing its transformations.  */
1522 
1523 static int
bypass_block(basic_block bb,rtx_insn * setcc,rtx_insn * jump)1524 bypass_block (basic_block bb, rtx_insn *setcc, rtx_insn *jump)
1525 {
1526   rtx_insn *insn;
1527   rtx note;
1528   edge e, edest;
1529   int change;
1530   int may_be_loop_header = false;
1531   unsigned removed_p;
1532   unsigned i;
1533   edge_iterator ei;
1534 
1535   insn = (setcc != NULL) ? setcc : jump;
1536 
1537   /* Determine set of register uses in INSN.  */
1538   reg_use_count = 0;
1539   note_uses (&PATTERN (insn), find_used_regs, NULL);
1540   note = find_reg_equal_equiv_note (insn);
1541   if (note)
1542     find_used_regs (&XEXP (note, 0), NULL);
1543 
1544   if (current_loops)
1545     {
1546       /* If we are to preserve loop structure then do not bypass
1547          a loop header.  This will either rotate the loop, create
1548 	 multiple entry loops or even irreducible regions.  */
1549       if (bb == bb->loop_father->header)
1550 	return 0;
1551     }
1552   else
1553     {
1554       FOR_EACH_EDGE (e, ei, bb->preds)
1555 	if (e->flags & EDGE_DFS_BACK)
1556 	  {
1557 	    may_be_loop_header = true;
1558 	    break;
1559 	  }
1560     }
1561 
1562   change = 0;
1563   for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
1564     {
1565       removed_p = 0;
1566 
1567       if (e->flags & EDGE_COMPLEX)
1568 	{
1569 	  ei_next (&ei);
1570 	  continue;
1571 	}
1572 
1573       /* We can't redirect edges from new basic blocks.  */
1574       if (e->src->index >= bypass_last_basic_block)
1575 	{
1576 	  ei_next (&ei);
1577 	  continue;
1578 	}
1579 
1580       /* The irreducible loops created by redirecting of edges entering the
1581 	 loop from outside would decrease effectiveness of some of the
1582 	 following optimizations, so prevent this.  */
1583       if (may_be_loop_header
1584 	  && !(e->flags & EDGE_DFS_BACK))
1585 	{
1586 	  ei_next (&ei);
1587 	  continue;
1588 	}
1589 
1590       for (i = 0; i < reg_use_count; i++)
1591 	{
1592 	  rtx reg_used = reg_use_table[i];
1593 	  unsigned int regno = REGNO (reg_used);
1594 	  basic_block dest, old_dest;
1595 	  struct cprop_expr *set;
1596 	  rtx src, new_rtx;
1597 
1598 	  set = find_bypass_set (regno, e->src->index);
1599 
1600 	  if (! set)
1601 	    continue;
1602 
1603 	  /* Check the data flow is valid after edge insertions.  */
1604 	  if (e->insns.r && reg_killed_on_edge (reg_used, e))
1605 	    continue;
1606 
1607 	  src = SET_SRC (pc_set (jump));
1608 
1609 	  if (setcc != NULL)
1610 	    src = simplify_replace_rtx (src,
1611 					SET_DEST (PATTERN (setcc)),
1612 					SET_SRC (PATTERN (setcc)));
1613 
1614 	  new_rtx = simplify_replace_rtx (src, reg_used, set->src);
1615 
1616 	  /* Jump bypassing may have already placed instructions on
1617 	     edges of the CFG.  We can't bypass an outgoing edge that
1618 	     has instructions associated with it, as these insns won't
1619 	     get executed if the incoming edge is redirected.  */
1620 	  if (new_rtx == pc_rtx)
1621 	    {
1622 	      edest = FALLTHRU_EDGE (bb);
1623 	      dest = edest->insns.r ? NULL : edest->dest;
1624 	    }
1625 	  else if (GET_CODE (new_rtx) == LABEL_REF)
1626 	    {
1627 	      dest = BLOCK_FOR_INSN (XEXP (new_rtx, 0));
1628 	      /* Don't bypass edges containing instructions.  */
1629 	      edest = find_edge (bb, dest);
1630 	      if (edest && edest->insns.r)
1631 		dest = NULL;
1632 	    }
1633 	  else
1634 	    dest = NULL;
1635 
1636 	  /* Avoid unification of the edge with other edges from original
1637 	     branch.  We would end up emitting the instruction on "both"
1638 	     edges.  */
1639 	  if (dest && setcc && !CC0_P (SET_DEST (PATTERN (setcc)))
1640 	      && find_edge (e->src, dest))
1641 	    dest = NULL;
1642 
1643 	  old_dest = e->dest;
1644 	  if (dest != NULL
1645 	      && dest != old_dest
1646 	      && dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1647             {
1648 	      redirect_edge_and_branch_force (e, dest);
1649 
1650 	      /* Copy the register setter to the redirected edge.
1651 		 Don't copy CC0 setters, as CC0 is dead after jump.  */
1652 	      if (setcc)
1653 		{
1654 		  rtx pat = PATTERN (setcc);
1655 		  if (!CC0_P (SET_DEST (pat)))
1656 		    insert_insn_on_edge (copy_insn (pat), e);
1657 		}
1658 
1659 	      if (dump_file != NULL)
1660 		{
1661 		  fprintf (dump_file, "JUMP-BYPASS: Proved reg %d "
1662 				      "in jump_insn %d equals constant ",
1663 			   regno, INSN_UID (jump));
1664 		  print_rtl (dump_file, set->src);
1665 		  fprintf (dump_file, "\n\t     when BB %d is entered from "
1666 				      "BB %d.  Redirect edge %d->%d to %d.\n",
1667 			   old_dest->index, e->src->index, e->src->index,
1668 			   old_dest->index, dest->index);
1669 		}
1670 	      change = 1;
1671 	      removed_p = 1;
1672 	      break;
1673 	    }
1674 	}
1675       if (!removed_p)
1676 	ei_next (&ei);
1677     }
1678   return change;
1679 }
1680 
1681 /* Find basic blocks with more than one predecessor that only contain a
1682    single conditional jump.  If the result of the comparison is known at
1683    compile-time from any incoming edge, redirect that edge to the
1684    appropriate target.  Return nonzero if a change was made.
1685 
1686    This function is now mis-named, because we also handle indirect jumps.  */
1687 
1688 static int
bypass_conditional_jumps(void)1689 bypass_conditional_jumps (void)
1690 {
1691   basic_block bb;
1692   int changed;
1693   rtx_insn *setcc;
1694   rtx_insn *insn;
1695   rtx dest;
1696 
1697   /* Note we start at block 1.  */
1698   if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1699     return 0;
1700 
1701   mark_dfs_back_edges ();
1702 
1703   changed = 0;
1704   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->next_bb,
1705 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
1706     {
1707       /* Check for more than one predecessor.  */
1708       if (!single_pred_p (bb))
1709 	{
1710 	  setcc = NULL;
1711 	  FOR_BB_INSNS (bb, insn)
1712 	    if (DEBUG_INSN_P (insn))
1713 	      continue;
1714 	    else if (NONJUMP_INSN_P (insn))
1715 	      {
1716 		if (setcc)
1717 		  break;
1718 		if (GET_CODE (PATTERN (insn)) != SET)
1719 		  break;
1720 
1721 		dest = SET_DEST (PATTERN (insn));
1722 		if (REG_P (dest) || CC0_P (dest))
1723 		  setcc = insn;
1724 		else
1725 		  break;
1726 	      }
1727 	    else if (JUMP_P (insn))
1728 	      {
1729 		if ((any_condjump_p (insn) || computed_jump_p (insn))
1730 		    && onlyjump_p (insn))
1731 		  changed |= bypass_block (bb, setcc, insn);
1732 		break;
1733 	      }
1734 	    else if (INSN_P (insn))
1735 	      break;
1736 	}
1737     }
1738 
1739   /* If we bypassed any register setting insns, we inserted a
1740      copy on the redirected edge.  These need to be committed.  */
1741   if (changed)
1742     commit_edge_insertions ();
1743 
1744   return changed;
1745 }
1746 
1747 /* Main function for the CPROP pass.  */
1748 
1749 static int
one_cprop_pass(void)1750 one_cprop_pass (void)
1751 {
1752   int i;
1753   int changed = 0;
1754 
1755   /* Return if there's nothing to do, or it is too expensive.  */
1756   if (n_basic_blocks_for_fn (cfun) <= NUM_FIXED_BLOCKS + 1
1757       || gcse_or_cprop_is_too_expensive (_ ("const/copy propagation disabled")))
1758     return 0;
1759 
1760   global_const_prop_count = local_const_prop_count = 0;
1761   global_copy_prop_count = local_copy_prop_count = 0;
1762 
1763   bytes_used = 0;
1764   gcc_obstack_init (&cprop_obstack);
1765 
1766   /* Do a local const/copy propagation pass first.  The global pass
1767      only handles global opportunities.
1768      If the local pass changes something, remove any unreachable blocks
1769      because the CPROP global dataflow analysis may get into infinite
1770      loops for CFGs with unreachable blocks.
1771 
1772      FIXME: This local pass should not be necessary after CSE (but for
1773 	    some reason it still is).  It is also (proven) not necessary
1774 	    to run the local pass right after FWPWOP.
1775 
1776      FIXME: The global analysis would not get into infinite loops if it
1777 	    would use the DF solver (via df_simple_dataflow) instead of
1778 	    the solver implemented in this file.  */
1779   changed |= local_cprop_pass ();
1780   if (changed)
1781     delete_unreachable_blocks ();
1782 
1783   /* Determine implicit sets.  This may change the CFG (split critical
1784      edges if that exposes an implicit set).
1785      Note that find_implicit_sets() does not rely on up-to-date DF caches
1786      so that we do not have to re-run df_analyze() even if local CPROP
1787      changed something.
1788      ??? This could run earlier so that any uncovered implicit sets
1789 	 sets could be exploited in local_cprop_pass() also.  Later.  */
1790   changed |= find_implicit_sets ();
1791 
1792   /* If local_cprop_pass() or find_implicit_sets() changed something,
1793      run df_analyze() to bring all insn caches up-to-date, and to take
1794      new basic blocks from edge splitting on the DF radar.
1795      NB: This also runs the fast DCE pass, because execute_rtl_cprop
1796      sets DF_LR_RUN_DCE.  */
1797   if (changed)
1798     df_analyze ();
1799 
1800   /* Initialize implicit_set_indexes array.  */
1801   implicit_set_indexes = XNEWVEC (int, last_basic_block_for_fn (cfun));
1802   for (i = 0; i < last_basic_block_for_fn (cfun); i++)
1803     implicit_set_indexes[i] = -1;
1804 
1805   alloc_hash_table (&set_hash_table);
1806   compute_hash_table (&set_hash_table);
1807 
1808   /* Free implicit_sets before peak usage.  */
1809   free (implicit_sets);
1810   implicit_sets = NULL;
1811 
1812   if (dump_file)
1813     dump_hash_table (dump_file, "SET", &set_hash_table);
1814   if (set_hash_table.n_elems > 0)
1815     {
1816       basic_block bb;
1817       auto_vec<rtx_insn *> uncond_traps;
1818 
1819       alloc_cprop_mem (last_basic_block_for_fn (cfun),
1820 		       set_hash_table.n_elems);
1821       compute_cprop_data ();
1822 
1823       free (implicit_set_indexes);
1824       implicit_set_indexes = NULL;
1825 
1826       /* Allocate vars to track sets of regs.  */
1827       reg_set_bitmap = ALLOC_REG_SET (NULL);
1828 
1829       FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->next_bb,
1830 		      EXIT_BLOCK_PTR_FOR_FN (cfun),
1831 		      next_bb)
1832 	{
1833 	  bool seen_uncond_trap = false;
1834 	  rtx_insn *insn;
1835 
1836 	  /* Reset tables used to keep track of what's still valid [since
1837 	     the start of the block].  */
1838 	  reset_opr_set_tables ();
1839 
1840 	  FOR_BB_INSNS (bb, insn)
1841 	    if (INSN_P (insn))
1842 	      {
1843 		bool was_uncond_trap
1844 		  = (GET_CODE (PATTERN (insn)) == TRAP_IF
1845 		     && XEXP (PATTERN (insn), 0) == const1_rtx);
1846 
1847 		changed |= cprop_insn (insn);
1848 
1849 		/* Keep track of everything modified by this insn.  */
1850 		/* ??? Need to be careful w.r.t. mods done to INSN.
1851 		       Don't call mark_oprs_set if we turned the
1852 		       insn into a NOTE, or deleted the insn.  */
1853 		if (! NOTE_P (insn) && ! insn->deleted ())
1854 		  mark_oprs_set (insn);
1855 
1856 		if (!was_uncond_trap
1857 		    && GET_CODE (PATTERN (insn)) == TRAP_IF
1858 		    && XEXP (PATTERN (insn), 0) == const1_rtx)
1859 		  {
1860 		    /* If we have already seen an unconditional trap
1861 		       earlier, the rest of the bb is going to be removed
1862 		       as unreachable.  Just turn it into a note, so that
1863 		       RTL verification doesn't complain about it before
1864 		       it is finally removed.  */
1865 		    if (seen_uncond_trap)
1866 		      set_insn_deleted (insn);
1867 		    else
1868 		      {
1869 			seen_uncond_trap = true;
1870 			uncond_traps.safe_push (insn);
1871 		      }
1872 		  }
1873 	      }
1874 	}
1875 
1876       /* Make sure bypass_conditional_jumps will ignore not just its new
1877 	 basic blocks, but also the ones after unconditional traps (those are
1878 	 unreachable and will be eventually removed as such).  */
1879       bypass_last_basic_block = last_basic_block_for_fn (cfun);
1880 
1881       while (!uncond_traps.is_empty ())
1882 	{
1883 	  rtx_insn *insn = uncond_traps.pop ();
1884 	  basic_block to_split = BLOCK_FOR_INSN (insn);
1885 	  remove_edge (split_block (to_split, insn));
1886 	  emit_barrier_after_bb (to_split);
1887 	}
1888 
1889       changed |= bypass_conditional_jumps ();
1890 
1891       FREE_REG_SET (reg_set_bitmap);
1892       free_cprop_mem ();
1893     }
1894   else
1895     {
1896       free (implicit_set_indexes);
1897       implicit_set_indexes = NULL;
1898     }
1899 
1900   free_hash_table (&set_hash_table);
1901   obstack_free (&cprop_obstack, NULL);
1902 
1903   if (dump_file)
1904     {
1905       fprintf (dump_file, "CPROP of %s, %d basic blocks, %d bytes needed, ",
1906 	       current_function_name (), n_basic_blocks_for_fn (cfun),
1907 	       bytes_used);
1908       fprintf (dump_file, "%d local const props, %d local copy props, ",
1909 	       local_const_prop_count, local_copy_prop_count);
1910       fprintf (dump_file, "%d global const props, %d global copy props\n\n",
1911 	       global_const_prop_count, global_copy_prop_count);
1912     }
1913 
1914   return changed;
1915 }
1916 
1917 /* All the passes implemented in this file.  Each pass has its
1918    own gate and execute function, and at the end of the file a
1919    pass definition for passes.c.
1920 
1921    We do not construct an accurate cfg in functions which call
1922    setjmp, so none of these passes runs if the function calls
1923    setjmp.
1924    FIXME: Should just handle setjmp via REG_SETJMP notes.  */
1925 
1926 static unsigned int
execute_rtl_cprop(void)1927 execute_rtl_cprop (void)
1928 {
1929   int changed;
1930   delete_unreachable_blocks ();
1931   df_set_flags (DF_LR_RUN_DCE);
1932   df_analyze ();
1933   changed = one_cprop_pass ();
1934   flag_rerun_cse_after_global_opts |= changed;
1935   if (changed)
1936     cleanup_cfg (CLEANUP_CFG_CHANGED);
1937   return 0;
1938 }
1939 
1940 namespace {
1941 
1942 const pass_data pass_data_rtl_cprop =
1943 {
1944   RTL_PASS, /* type */
1945   "cprop", /* name */
1946   OPTGROUP_NONE, /* optinfo_flags */
1947   TV_CPROP, /* tv_id */
1948   PROP_cfglayout, /* properties_required */
1949   0, /* properties_provided */
1950   0, /* properties_destroyed */
1951   0, /* todo_flags_start */
1952   TODO_df_finish, /* todo_flags_finish */
1953 };
1954 
1955 class pass_rtl_cprop : public rtl_opt_pass
1956 {
1957 public:
pass_rtl_cprop(gcc::context * ctxt)1958   pass_rtl_cprop (gcc::context *ctxt)
1959     : rtl_opt_pass (pass_data_rtl_cprop, ctxt)
1960   {}
1961 
1962   /* opt_pass methods: */
clone()1963   opt_pass * clone () { return new pass_rtl_cprop (m_ctxt); }
gate(function * fun)1964   virtual bool gate (function *fun)
1965     {
1966       return optimize > 0 && flag_gcse
1967 	&& !fun->calls_setjmp
1968 	&& dbg_cnt (cprop);
1969     }
1970 
execute(function *)1971   virtual unsigned int execute (function *) { return execute_rtl_cprop (); }
1972 
1973 }; // class pass_rtl_cprop
1974 
1975 } // anon namespace
1976 
1977 rtl_opt_pass *
make_pass_rtl_cprop(gcc::context * ctxt)1978 make_pass_rtl_cprop (gcc::context *ctxt)
1979 {
1980   return new pass_rtl_cprop (ctxt);
1981 }
1982