110d565efSmrg /* Post reload partially redundant load elimination
2*ec02198aSmrg    Copyright (C) 2004-2020 Free Software Foundation, Inc.
310d565efSmrg 
410d565efSmrg This file is part of GCC.
510d565efSmrg 
610d565efSmrg GCC is free software; you can redistribute it and/or modify it under
710d565efSmrg the terms of the GNU General Public License as published by the Free
810d565efSmrg Software Foundation; either version 3, or (at your option) any later
910d565efSmrg version.
1010d565efSmrg 
1110d565efSmrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
1210d565efSmrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
1310d565efSmrg FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
1410d565efSmrg for more details.
1510d565efSmrg 
1610d565efSmrg You should have received a copy of the GNU General Public License
1710d565efSmrg along with GCC; see the file COPYING3.  If not see
1810d565efSmrg <http://www.gnu.org/licenses/>.  */
1910d565efSmrg 
2010d565efSmrg #include "config.h"
2110d565efSmrg #include "system.h"
2210d565efSmrg #include "coretypes.h"
2310d565efSmrg #include "backend.h"
2410d565efSmrg #include "target.h"
2510d565efSmrg #include "rtl.h"
2610d565efSmrg #include "tree.h"
2710d565efSmrg #include "predict.h"
2810d565efSmrg #include "df.h"
2910d565efSmrg #include "memmodel.h"
3010d565efSmrg #include "tm_p.h"
3110d565efSmrg #include "insn-config.h"
3210d565efSmrg #include "emit-rtl.h"
3310d565efSmrg #include "recog.h"
3410d565efSmrg 
3510d565efSmrg #include "cfgrtl.h"
3610d565efSmrg #include "profile.h"
3710d565efSmrg #include "expr.h"
3810d565efSmrg #include "tree-pass.h"
3910d565efSmrg #include "dbgcnt.h"
40*ec02198aSmrg #include "intl.h"
4110d565efSmrg #include "gcse-common.h"
42*ec02198aSmrg #include "gcse.h"
43*ec02198aSmrg #include "regs.h"
44*ec02198aSmrg #include "function-abi.h"
4510d565efSmrg 
4610d565efSmrg /* The following code implements gcse after reload, the purpose of this
4710d565efSmrg    pass is to cleanup redundant loads generated by reload and other
4810d565efSmrg    optimizations that come after gcse. It searches for simple inter-block
4910d565efSmrg    redundancies and tries to eliminate them by adding moves and loads
5010d565efSmrg    in cold places.
5110d565efSmrg 
5210d565efSmrg    Perform partially redundant load elimination, try to eliminate redundant
5310d565efSmrg    loads created by the reload pass.  We try to look for full or partial
5410d565efSmrg    redundant loads fed by one or more loads/stores in predecessor BBs,
5510d565efSmrg    and try adding loads to make them fully redundant.  We also check if
5610d565efSmrg    it's worth adding loads to be able to delete the redundant load.
5710d565efSmrg 
5810d565efSmrg    Algorithm:
5910d565efSmrg    1. Build available expressions hash table:
6010d565efSmrg        For each load/store instruction, if the loaded/stored memory didn't
6110d565efSmrg        change until the end of the basic block add this memory expression to
6210d565efSmrg        the hash table.
6310d565efSmrg    2. Perform Redundancy elimination:
6410d565efSmrg       For each load instruction do the following:
6510d565efSmrg 	 perform partial redundancy elimination, check if it's worth adding
6610d565efSmrg 	 loads to make the load fully redundant.  If so add loads and
6710d565efSmrg 	 register copies and delete the load.
6810d565efSmrg    3. Delete instructions made redundant in step 2.
6910d565efSmrg 
7010d565efSmrg    Future enhancement:
7110d565efSmrg      If the loaded register is used/defined between load and some store,
7210d565efSmrg      look for some other free register between load and all its stores,
7310d565efSmrg      and replace the load with a copy from this register to the loaded
7410d565efSmrg      register.
7510d565efSmrg */
7610d565efSmrg 
7710d565efSmrg 
7810d565efSmrg /* Keep statistics of this pass.  */
7910d565efSmrg static struct
8010d565efSmrg {
8110d565efSmrg   int moves_inserted;
8210d565efSmrg   int copies_inserted;
8310d565efSmrg   int insns_deleted;
8410d565efSmrg } stats;
8510d565efSmrg 
8610d565efSmrg /* We need to keep a hash table of expressions.  The table entries are of
8710d565efSmrg    type 'struct expr', and for each expression there is a single linked
8810d565efSmrg    list of occurrences.  */
8910d565efSmrg 
9010d565efSmrg /* Expression elements in the hash table.  */
9110d565efSmrg struct expr
9210d565efSmrg {
9310d565efSmrg   /* The expression (SET_SRC for expressions, PATTERN for assignments).  */
9410d565efSmrg   rtx expr;
9510d565efSmrg 
9610d565efSmrg   /* The same hash for this entry.  */
9710d565efSmrg   hashval_t hash;
9810d565efSmrg 
9910d565efSmrg   /* Index in the transparent bitmaps.  */
10010d565efSmrg   unsigned int bitmap_index;
10110d565efSmrg 
10210d565efSmrg   /* List of available occurrence in basic blocks in the function.  */
10310d565efSmrg   struct occr *avail_occr;
10410d565efSmrg };
10510d565efSmrg 
10610d565efSmrg /* Hashtable helpers.  */
10710d565efSmrg 
10810d565efSmrg struct expr_hasher : nofree_ptr_hash <expr>
10910d565efSmrg {
11010d565efSmrg   static inline hashval_t hash (const expr *);
11110d565efSmrg   static inline bool equal (const expr *, const expr *);
11210d565efSmrg };
11310d565efSmrg 
11410d565efSmrg 
11510d565efSmrg /* Hash expression X.
11610d565efSmrg    DO_NOT_RECORD_P is a boolean indicating if a volatile operand is found
11710d565efSmrg    or if the expression contains something we don't want to insert in the
11810d565efSmrg    table.  */
11910d565efSmrg 
12010d565efSmrg static hashval_t
hash_expr(rtx x,int * do_not_record_p)12110d565efSmrg hash_expr (rtx x, int *do_not_record_p)
12210d565efSmrg {
12310d565efSmrg   *do_not_record_p = 0;
12410d565efSmrg   return hash_rtx (x, GET_MODE (x), do_not_record_p,
12510d565efSmrg 		   NULL,  /*have_reg_qty=*/false);
12610d565efSmrg }
12710d565efSmrg 
12810d565efSmrg /* Callback for hashtab.
12910d565efSmrg    Return the hash value for expression EXP.  We don't actually hash
13010d565efSmrg    here, we just return the cached hash value.  */
13110d565efSmrg 
13210d565efSmrg inline hashval_t
hash(const expr * exp)13310d565efSmrg expr_hasher::hash (const expr *exp)
13410d565efSmrg {
13510d565efSmrg   return exp->hash;
13610d565efSmrg }
13710d565efSmrg 
13810d565efSmrg /* Callback for hashtab.
13910d565efSmrg    Return nonzero if exp1 is equivalent to exp2.  */
14010d565efSmrg 
14110d565efSmrg inline bool
equal(const expr * exp1,const expr * exp2)14210d565efSmrg expr_hasher::equal (const expr *exp1, const expr *exp2)
14310d565efSmrg {
14410d565efSmrg   int equiv_p = exp_equiv_p (exp1->expr, exp2->expr, 0, true);
14510d565efSmrg 
14610d565efSmrg   gcc_assert (!equiv_p || exp1->hash == exp2->hash);
14710d565efSmrg   return equiv_p;
14810d565efSmrg }
14910d565efSmrg 
15010d565efSmrg /* The table itself.  */
15110d565efSmrg static hash_table<expr_hasher> *expr_table;
15210d565efSmrg 
15310d565efSmrg 
15410d565efSmrg static struct obstack expr_obstack;
15510d565efSmrg 
15610d565efSmrg /* Occurrence of an expression.
15710d565efSmrg    There is at most one occurrence per basic block.  If a pattern appears
15810d565efSmrg    more than once, the last appearance is used.  */
15910d565efSmrg 
16010d565efSmrg struct occr
16110d565efSmrg {
16210d565efSmrg   /* Next occurrence of this expression.  */
16310d565efSmrg   struct occr *next;
16410d565efSmrg   /* The insn that computes the expression.  */
16510d565efSmrg   rtx_insn *insn;
16610d565efSmrg   /* Nonzero if this [anticipatable] occurrence has been deleted.  */
16710d565efSmrg   char deleted_p;
16810d565efSmrg };
16910d565efSmrg 
17010d565efSmrg static struct obstack occr_obstack;
17110d565efSmrg 
17210d565efSmrg /* The following structure holds the information about the occurrences of
17310d565efSmrg    the redundant instructions.  */
17410d565efSmrg struct unoccr
17510d565efSmrg {
17610d565efSmrg   struct unoccr *next;
17710d565efSmrg   edge pred;
17810d565efSmrg   rtx_insn *insn;
17910d565efSmrg };
18010d565efSmrg 
18110d565efSmrg static struct obstack unoccr_obstack;
18210d565efSmrg 
18310d565efSmrg /* Array where each element is the CUID if the insn that last set the hard
18410d565efSmrg    register with the number of the element, since the start of the current
18510d565efSmrg    basic block.
18610d565efSmrg 
18710d565efSmrg    This array is used during the building of the hash table (step 1) to
18810d565efSmrg    determine if a reg is killed before the end of a basic block.
18910d565efSmrg 
19010d565efSmrg    It is also used when eliminating partial redundancies (step 2) to see
19110d565efSmrg    if a reg was modified since the start of a basic block.  */
19210d565efSmrg static int *reg_avail_info;
19310d565efSmrg 
19410d565efSmrg /* A list of insns that may modify memory within the current basic block.  */
19510d565efSmrg struct modifies_mem
19610d565efSmrg {
19710d565efSmrg   rtx_insn *insn;
19810d565efSmrg   struct modifies_mem *next;
19910d565efSmrg };
20010d565efSmrg static struct modifies_mem *modifies_mem_list;
20110d565efSmrg 
20210d565efSmrg /* The modifies_mem structs also go on an obstack, only this obstack is
20310d565efSmrg    freed each time after completing the analysis or transformations on
20410d565efSmrg    a basic block.  So we allocate a dummy modifies_mem_obstack_bottom
20510d565efSmrg    object on the obstack to keep track of the bottom of the obstack.  */
20610d565efSmrg static struct obstack modifies_mem_obstack;
20710d565efSmrg static struct modifies_mem  *modifies_mem_obstack_bottom;
20810d565efSmrg 
20910d565efSmrg /* Mapping of insn UIDs to CUIDs.
21010d565efSmrg    CUIDs are like UIDs except they increase monotonically in each basic
21110d565efSmrg    block, have no gaps, and only apply to real insns.  */
21210d565efSmrg static int *uid_cuid;
21310d565efSmrg #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
21410d565efSmrg 
21510d565efSmrg /* Bitmap of blocks which have memory stores.  */
21610d565efSmrg static bitmap modify_mem_list_set;
21710d565efSmrg 
21810d565efSmrg /* Bitmap of blocks which have calls.  */
21910d565efSmrg static bitmap blocks_with_calls;
22010d565efSmrg 
22110d565efSmrg /* Vector indexed by block # with a list of all the insns that
22210d565efSmrg    modify memory within the block.  */
22310d565efSmrg static vec<rtx_insn *> *modify_mem_list;
22410d565efSmrg 
22510d565efSmrg /* Vector indexed by block # with a canonicalized list of insns
22610d565efSmrg    that modify memory in the block.  */
22710d565efSmrg static vec<modify_pair> *canon_modify_mem_list;
22810d565efSmrg 
22910d565efSmrg /* Vector of simple bitmaps indexed by block number.  Each component sbitmap
23010d565efSmrg    indicates which expressions are transparent through the block.  */
23110d565efSmrg static sbitmap *transp;
23210d565efSmrg 
23310d565efSmrg 
23410d565efSmrg /* Helpers for memory allocation/freeing.  */
23510d565efSmrg static void alloc_mem (void);
23610d565efSmrg static void free_mem (void);
23710d565efSmrg 
23810d565efSmrg /* Support for hash table construction and transformations.  */
23910d565efSmrg static bool oprs_unchanged_p (rtx, rtx_insn *, bool);
24010d565efSmrg static void record_last_reg_set_info (rtx_insn *, rtx);
24110d565efSmrg static void record_last_reg_set_info_regno (rtx_insn *, int);
24210d565efSmrg static void record_last_mem_set_info (rtx_insn *);
24310d565efSmrg static void record_last_set_info (rtx, const_rtx, void *);
24410d565efSmrg static void record_opr_changes (rtx_insn *);
24510d565efSmrg 
24610d565efSmrg static void find_mem_conflicts (rtx, const_rtx, void *);
24710d565efSmrg static int load_killed_in_block_p (int, rtx, bool);
24810d565efSmrg static void reset_opr_set_tables (void);
24910d565efSmrg 
25010d565efSmrg /* Hash table support.  */
25110d565efSmrg static hashval_t hash_expr (rtx, int *);
25210d565efSmrg static void insert_expr_in_table (rtx, rtx_insn *);
25310d565efSmrg static struct expr *lookup_expr_in_table (rtx);
25410d565efSmrg static void dump_hash_table (FILE *);
25510d565efSmrg 
25610d565efSmrg /* Helpers for eliminate_partially_redundant_load.  */
25710d565efSmrg static bool reg_killed_on_edge (rtx, edge);
25810d565efSmrg static bool reg_used_on_edge (rtx, edge);
25910d565efSmrg 
26010d565efSmrg static rtx get_avail_load_store_reg (rtx_insn *);
26110d565efSmrg 
26210d565efSmrg static bool bb_has_well_behaved_predecessors (basic_block);
26310d565efSmrg static struct occr* get_bb_avail_insn (basic_block, struct occr *, int);
26410d565efSmrg static void hash_scan_set (rtx_insn *);
26510d565efSmrg static void compute_hash_table (void);
26610d565efSmrg 
26710d565efSmrg /* The work horses of this pass.  */
26810d565efSmrg static void eliminate_partially_redundant_load (basic_block,
26910d565efSmrg 						rtx_insn *,
27010d565efSmrg 						struct expr *);
27110d565efSmrg static void eliminate_partially_redundant_loads (void);
27210d565efSmrg 
27310d565efSmrg 
27410d565efSmrg /* Allocate memory for the CUID mapping array and register/memory
27510d565efSmrg    tracking tables.  */
27610d565efSmrg 
27710d565efSmrg static void
alloc_mem(void)27810d565efSmrg alloc_mem (void)
27910d565efSmrg {
28010d565efSmrg   int i;
28110d565efSmrg   basic_block bb;
28210d565efSmrg   rtx_insn *insn;
28310d565efSmrg 
28410d565efSmrg   /* Find the largest UID and create a mapping from UIDs to CUIDs.  */
28510d565efSmrg   uid_cuid = XCNEWVEC (int, get_max_uid () + 1);
28610d565efSmrg   i = 1;
28710d565efSmrg   FOR_EACH_BB_FN (bb, cfun)
28810d565efSmrg     FOR_BB_INSNS (bb, insn)
28910d565efSmrg       {
29010d565efSmrg         if (INSN_P (insn))
29110d565efSmrg 	  uid_cuid[INSN_UID (insn)] = i++;
29210d565efSmrg 	else
29310d565efSmrg 	  uid_cuid[INSN_UID (insn)] = i;
29410d565efSmrg       }
29510d565efSmrg 
29610d565efSmrg   /* Allocate the available expressions hash table.  We don't want to
29710d565efSmrg      make the hash table too small, but unnecessarily making it too large
29810d565efSmrg      also doesn't help.  The i/4 is a gcse.c relic, and seems like a
29910d565efSmrg      reasonable choice.  */
30010d565efSmrg   expr_table = new hash_table<expr_hasher> (MAX (i / 4, 13));
30110d565efSmrg 
30210d565efSmrg   /* We allocate everything on obstacks because we often can roll back
30310d565efSmrg      the whole obstack to some point.  Freeing obstacks is very fast.  */
30410d565efSmrg   gcc_obstack_init (&expr_obstack);
30510d565efSmrg   gcc_obstack_init (&occr_obstack);
30610d565efSmrg   gcc_obstack_init (&unoccr_obstack);
30710d565efSmrg   gcc_obstack_init (&modifies_mem_obstack);
30810d565efSmrg 
30910d565efSmrg   /* Working array used to track the last set for each register
31010d565efSmrg      in the current block.  */
31110d565efSmrg   reg_avail_info = (int *) xmalloc (FIRST_PSEUDO_REGISTER * sizeof (int));
31210d565efSmrg 
31310d565efSmrg   /* Put a dummy modifies_mem object on the modifies_mem_obstack, so we
31410d565efSmrg      can roll it back in reset_opr_set_tables.  */
31510d565efSmrg   modifies_mem_obstack_bottom =
31610d565efSmrg     (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
31710d565efSmrg 					   sizeof (struct modifies_mem));
31810d565efSmrg 
31910d565efSmrg   blocks_with_calls = BITMAP_ALLOC (NULL);
32010d565efSmrg   modify_mem_list_set = BITMAP_ALLOC (NULL);
32110d565efSmrg 
32210d565efSmrg   modify_mem_list = (vec_rtx_heap *) xcalloc (last_basic_block_for_fn (cfun),
32310d565efSmrg 					      sizeof (vec_rtx_heap));
32410d565efSmrg   canon_modify_mem_list
32510d565efSmrg     = (vec_modify_pair_heap *) xcalloc (last_basic_block_for_fn (cfun),
32610d565efSmrg 					sizeof (vec_modify_pair_heap));
32710d565efSmrg }
32810d565efSmrg 
32910d565efSmrg /* Free memory allocated by alloc_mem.  */
33010d565efSmrg 
33110d565efSmrg static void
free_mem(void)33210d565efSmrg free_mem (void)
33310d565efSmrg {
33410d565efSmrg   free (uid_cuid);
33510d565efSmrg 
33610d565efSmrg   delete expr_table;
33710d565efSmrg   expr_table = NULL;
33810d565efSmrg 
33910d565efSmrg   obstack_free (&expr_obstack, NULL);
34010d565efSmrg   obstack_free (&occr_obstack, NULL);
34110d565efSmrg   obstack_free (&unoccr_obstack, NULL);
34210d565efSmrg   obstack_free (&modifies_mem_obstack, NULL);
34310d565efSmrg 
34410d565efSmrg   unsigned i;
34510d565efSmrg   bitmap_iterator bi;
34610d565efSmrg   EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi)
34710d565efSmrg     {
34810d565efSmrg       modify_mem_list[i].release ();
34910d565efSmrg       canon_modify_mem_list[i].release ();
35010d565efSmrg     }
35110d565efSmrg 
35210d565efSmrg   BITMAP_FREE (blocks_with_calls);
35310d565efSmrg   BITMAP_FREE (modify_mem_list_set);
35410d565efSmrg   free (reg_avail_info);
35510d565efSmrg   free (modify_mem_list);
35610d565efSmrg   free (canon_modify_mem_list);
35710d565efSmrg }
35810d565efSmrg 
35910d565efSmrg 
36010d565efSmrg /* Insert expression X in INSN in the hash TABLE.
36110d565efSmrg    If it is already present, record it as the last occurrence in INSN's
36210d565efSmrg    basic block.  */
36310d565efSmrg 
36410d565efSmrg static void
insert_expr_in_table(rtx x,rtx_insn * insn)36510d565efSmrg insert_expr_in_table (rtx x, rtx_insn *insn)
36610d565efSmrg {
36710d565efSmrg   int do_not_record_p;
36810d565efSmrg   hashval_t hash;
36910d565efSmrg   struct expr *cur_expr, **slot;
370*ec02198aSmrg   struct occr *avail_occr;
37110d565efSmrg 
37210d565efSmrg   hash = hash_expr (x, &do_not_record_p);
37310d565efSmrg 
37410d565efSmrg   /* Do not insert expression in the table if it contains volatile operands,
37510d565efSmrg      or if hash_expr determines the expression is something we don't want
37610d565efSmrg      to or can't handle.  */
37710d565efSmrg   if (do_not_record_p)
37810d565efSmrg     return;
37910d565efSmrg 
38010d565efSmrg   /* We anticipate that redundant expressions are rare, so for convenience
38110d565efSmrg      allocate a new hash table element here already and set its fields.
38210d565efSmrg      If we don't do this, we need a hack with a static struct expr.  Anyway,
38310d565efSmrg      obstack_free is really fast and one more obstack_alloc doesn't hurt if
38410d565efSmrg      we're going to see more expressions later on.  */
38510d565efSmrg   cur_expr = (struct expr *) obstack_alloc (&expr_obstack,
38610d565efSmrg 					    sizeof (struct expr));
38710d565efSmrg   cur_expr->expr = x;
38810d565efSmrg   cur_expr->hash = hash;
38910d565efSmrg   cur_expr->avail_occr = NULL;
39010d565efSmrg 
39110d565efSmrg   slot = expr_table->find_slot_with_hash (cur_expr, hash, INSERT);
39210d565efSmrg 
39310d565efSmrg   if (! (*slot))
39410d565efSmrg     {
39510d565efSmrg       /* The expression isn't found, so insert it.  */
39610d565efSmrg       *slot = cur_expr;
39710d565efSmrg 
39810d565efSmrg       /* Anytime we add an entry to the table, record the index
39910d565efSmrg 	 of the new entry.  The bitmap index starts counting
40010d565efSmrg 	 at zero.  */
40110d565efSmrg       cur_expr->bitmap_index = expr_table->elements () - 1;
40210d565efSmrg     }
40310d565efSmrg   else
40410d565efSmrg     {
40510d565efSmrg       /* The expression is already in the table, so roll back the
40610d565efSmrg 	 obstack and use the existing table entry.  */
40710d565efSmrg       obstack_free (&expr_obstack, cur_expr);
40810d565efSmrg       cur_expr = *slot;
40910d565efSmrg     }
41010d565efSmrg 
411*ec02198aSmrg   /* Search for another occurrence in the same basic block.  We insert
412*ec02198aSmrg      insns blockwise from start to end, so keep appending to the
413*ec02198aSmrg      start of the list so we have to check only a single element.  */
41410d565efSmrg   avail_occr = cur_expr->avail_occr;
415*ec02198aSmrg   if (avail_occr
416*ec02198aSmrg       && BLOCK_FOR_INSN (avail_occr->insn) == BLOCK_FOR_INSN (insn))
41710d565efSmrg     avail_occr->insn = insn;
41810d565efSmrg   else
41910d565efSmrg     {
42010d565efSmrg       /* First occurrence of this expression in this basic block.  */
42110d565efSmrg       avail_occr = (struct occr *) obstack_alloc (&occr_obstack,
42210d565efSmrg 						  sizeof (struct occr));
42310d565efSmrg       avail_occr->insn = insn;
424*ec02198aSmrg       avail_occr->next = cur_expr->avail_occr;
42510d565efSmrg       avail_occr->deleted_p = 0;
426*ec02198aSmrg       cur_expr->avail_occr = avail_occr;
42710d565efSmrg     }
42810d565efSmrg }
42910d565efSmrg 
43010d565efSmrg 
43110d565efSmrg /* Lookup pattern PAT in the expression hash table.
43210d565efSmrg    The result is a pointer to the table entry, or NULL if not found.  */
43310d565efSmrg 
43410d565efSmrg static struct expr *
lookup_expr_in_table(rtx pat)43510d565efSmrg lookup_expr_in_table (rtx pat)
43610d565efSmrg {
43710d565efSmrg   int do_not_record_p;
43810d565efSmrg   struct expr **slot, *tmp_expr;
43910d565efSmrg   hashval_t hash = hash_expr (pat, &do_not_record_p);
44010d565efSmrg 
44110d565efSmrg   if (do_not_record_p)
44210d565efSmrg     return NULL;
44310d565efSmrg 
44410d565efSmrg   tmp_expr = (struct expr *) obstack_alloc (&expr_obstack,
44510d565efSmrg 					    sizeof (struct expr));
44610d565efSmrg   tmp_expr->expr = pat;
44710d565efSmrg   tmp_expr->hash = hash;
44810d565efSmrg   tmp_expr->avail_occr = NULL;
44910d565efSmrg 
45010d565efSmrg   slot = expr_table->find_slot_with_hash (tmp_expr, hash, INSERT);
45110d565efSmrg   obstack_free (&expr_obstack, tmp_expr);
45210d565efSmrg 
45310d565efSmrg   if (!slot)
45410d565efSmrg     return NULL;
45510d565efSmrg   else
45610d565efSmrg     return (*slot);
45710d565efSmrg }
45810d565efSmrg 
45910d565efSmrg 
46010d565efSmrg /* Dump all expressions and occurrences that are currently in the
46110d565efSmrg    expression hash table to FILE.  */
46210d565efSmrg 
46310d565efSmrg /* This helper is called via htab_traverse.  */
46410d565efSmrg int
dump_expr_hash_table_entry(expr ** slot,FILE * file)46510d565efSmrg dump_expr_hash_table_entry (expr **slot, FILE *file)
46610d565efSmrg {
46710d565efSmrg   struct expr *exprs = *slot;
46810d565efSmrg   struct occr *occr;
46910d565efSmrg 
47010d565efSmrg   fprintf (file, "expr: ");
47110d565efSmrg   print_rtl (file, exprs->expr);
47210d565efSmrg   fprintf (file,"\nhashcode: %u\n", exprs->hash);
47310d565efSmrg   fprintf (file,"list of occurrences:\n");
47410d565efSmrg   occr = exprs->avail_occr;
47510d565efSmrg   while (occr)
47610d565efSmrg     {
47710d565efSmrg       rtx_insn *insn = occr->insn;
47810d565efSmrg       print_rtl_single (file, insn);
47910d565efSmrg       fprintf (file, "\n");
48010d565efSmrg       occr = occr->next;
48110d565efSmrg     }
48210d565efSmrg   fprintf (file, "\n");
48310d565efSmrg   return 1;
48410d565efSmrg }
48510d565efSmrg 
48610d565efSmrg static void
dump_hash_table(FILE * file)48710d565efSmrg dump_hash_table (FILE *file)
48810d565efSmrg {
48910d565efSmrg   fprintf (file, "\n\nexpression hash table\n");
49010d565efSmrg   fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
49110d565efSmrg            (long) expr_table->size (),
49210d565efSmrg            (long) expr_table->elements (),
49310d565efSmrg            expr_table->collisions ());
494*ec02198aSmrg   if (!expr_table->is_empty ())
49510d565efSmrg     {
49610d565efSmrg       fprintf (file, "\n\ntable entries:\n");
49710d565efSmrg       expr_table->traverse <FILE *, dump_expr_hash_table_entry> (file);
49810d565efSmrg     }
49910d565efSmrg   fprintf (file, "\n");
50010d565efSmrg }
50110d565efSmrg 
50210d565efSmrg /* Return true if register X is recorded as being set by an instruction
50310d565efSmrg    whose CUID is greater than the one given.  */
50410d565efSmrg 
50510d565efSmrg static bool
reg_changed_after_insn_p(rtx x,int cuid)50610d565efSmrg reg_changed_after_insn_p (rtx x, int cuid)
50710d565efSmrg {
50810d565efSmrg   unsigned int regno, end_regno;
50910d565efSmrg 
51010d565efSmrg   regno = REGNO (x);
51110d565efSmrg   end_regno = END_REGNO (x);
51210d565efSmrg   do
51310d565efSmrg     if (reg_avail_info[regno] > cuid)
51410d565efSmrg       return true;
51510d565efSmrg   while (++regno < end_regno);
51610d565efSmrg   return false;
51710d565efSmrg }
51810d565efSmrg 
51910d565efSmrg /* Return nonzero if the operands of expression X are unchanged
52010d565efSmrg    1) from the start of INSN's basic block up to but not including INSN
52110d565efSmrg       if AFTER_INSN is false, or
52210d565efSmrg    2) from INSN to the end of INSN's basic block if AFTER_INSN is true.  */
52310d565efSmrg 
52410d565efSmrg static bool
oprs_unchanged_p(rtx x,rtx_insn * insn,bool after_insn)52510d565efSmrg oprs_unchanged_p (rtx x, rtx_insn *insn, bool after_insn)
52610d565efSmrg {
52710d565efSmrg   int i, j;
52810d565efSmrg   enum rtx_code code;
52910d565efSmrg   const char *fmt;
53010d565efSmrg 
53110d565efSmrg   if (x == 0)
53210d565efSmrg     return 1;
53310d565efSmrg 
53410d565efSmrg   code = GET_CODE (x);
53510d565efSmrg   switch (code)
53610d565efSmrg     {
53710d565efSmrg     case REG:
53810d565efSmrg       /* We are called after register allocation.  */
53910d565efSmrg       gcc_assert (REGNO (x) < FIRST_PSEUDO_REGISTER);
54010d565efSmrg       if (after_insn)
54110d565efSmrg 	return !reg_changed_after_insn_p (x, INSN_CUID (insn) - 1);
54210d565efSmrg       else
54310d565efSmrg 	return !reg_changed_after_insn_p (x, 0);
54410d565efSmrg 
54510d565efSmrg     case MEM:
54610d565efSmrg       if (load_killed_in_block_p (INSN_CUID (insn), x, after_insn))
54710d565efSmrg 	return 0;
54810d565efSmrg       else
54910d565efSmrg 	return oprs_unchanged_p (XEXP (x, 0), insn, after_insn);
55010d565efSmrg 
55110d565efSmrg     case PC:
55210d565efSmrg     case CC0: /*FIXME*/
55310d565efSmrg     case CONST:
55410d565efSmrg     CASE_CONST_ANY:
55510d565efSmrg     case SYMBOL_REF:
55610d565efSmrg     case LABEL_REF:
55710d565efSmrg     case ADDR_VEC:
55810d565efSmrg     case ADDR_DIFF_VEC:
55910d565efSmrg       return 1;
56010d565efSmrg 
56110d565efSmrg     case PRE_DEC:
56210d565efSmrg     case PRE_INC:
56310d565efSmrg     case POST_DEC:
56410d565efSmrg     case POST_INC:
56510d565efSmrg     case PRE_MODIFY:
56610d565efSmrg     case POST_MODIFY:
56710d565efSmrg       if (after_insn)
56810d565efSmrg 	return 0;
56910d565efSmrg       break;
57010d565efSmrg 
57110d565efSmrg     default:
57210d565efSmrg       break;
57310d565efSmrg     }
57410d565efSmrg 
57510d565efSmrg   for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
57610d565efSmrg     {
57710d565efSmrg       if (fmt[i] == 'e')
57810d565efSmrg 	{
57910d565efSmrg 	  if (! oprs_unchanged_p (XEXP (x, i), insn, after_insn))
58010d565efSmrg 	    return 0;
58110d565efSmrg 	}
58210d565efSmrg       else if (fmt[i] == 'E')
58310d565efSmrg 	for (j = 0; j < XVECLEN (x, i); j++)
58410d565efSmrg 	  if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, after_insn))
58510d565efSmrg 	    return 0;
58610d565efSmrg     }
58710d565efSmrg 
58810d565efSmrg   return 1;
58910d565efSmrg }
59010d565efSmrg 
59110d565efSmrg 
59210d565efSmrg /* Used for communication between find_mem_conflicts and
59310d565efSmrg    load_killed_in_block_p.  Nonzero if find_mem_conflicts finds a
59410d565efSmrg    conflict between two memory references.
59510d565efSmrg    This is a bit of a hack to work around the limitations of note_stores.  */
59610d565efSmrg static int mems_conflict_p;
59710d565efSmrg 
59810d565efSmrg /* DEST is the output of an instruction.  If it is a memory reference, and
59910d565efSmrg    possibly conflicts with the load found in DATA, then set mems_conflict_p
60010d565efSmrg    to a nonzero value.  */
60110d565efSmrg 
60210d565efSmrg static void
find_mem_conflicts(rtx dest,const_rtx setter ATTRIBUTE_UNUSED,void * data)60310d565efSmrg find_mem_conflicts (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
60410d565efSmrg 		    void *data)
60510d565efSmrg {
60610d565efSmrg   rtx mem_op = (rtx) data;
60710d565efSmrg 
60810d565efSmrg   while (GET_CODE (dest) == SUBREG
60910d565efSmrg 	 || GET_CODE (dest) == ZERO_EXTRACT
61010d565efSmrg 	 || GET_CODE (dest) == STRICT_LOW_PART)
61110d565efSmrg     dest = XEXP (dest, 0);
61210d565efSmrg 
61310d565efSmrg   /* If DEST is not a MEM, then it will not conflict with the load.  Note
61410d565efSmrg      that function calls are assumed to clobber memory, but are handled
61510d565efSmrg      elsewhere.  */
61610d565efSmrg   if (! MEM_P (dest))
61710d565efSmrg     return;
61810d565efSmrg 
61910d565efSmrg   if (true_dependence (dest, GET_MODE (dest), mem_op))
62010d565efSmrg     mems_conflict_p = 1;
62110d565efSmrg }
62210d565efSmrg 
62310d565efSmrg 
62410d565efSmrg /* Return nonzero if the expression in X (a memory reference) is killed
62510d565efSmrg    in the current basic block before (if AFTER_INSN is false) or after
62610d565efSmrg    (if AFTER_INSN is true) the insn with the CUID in UID_LIMIT.
62710d565efSmrg 
62810d565efSmrg    This function assumes that the modifies_mem table is flushed when
62910d565efSmrg    the hash table construction or redundancy elimination phases start
63010d565efSmrg    processing a new basic block.  */
63110d565efSmrg 
63210d565efSmrg static int
load_killed_in_block_p(int uid_limit,rtx x,bool after_insn)63310d565efSmrg load_killed_in_block_p (int uid_limit, rtx x, bool after_insn)
63410d565efSmrg {
63510d565efSmrg   struct modifies_mem *list_entry = modifies_mem_list;
63610d565efSmrg 
63710d565efSmrg   while (list_entry)
63810d565efSmrg     {
63910d565efSmrg       rtx_insn *setter = list_entry->insn;
64010d565efSmrg 
64110d565efSmrg       /* Ignore entries in the list that do not apply.  */
64210d565efSmrg       if ((after_insn
64310d565efSmrg 	   && INSN_CUID (setter) < uid_limit)
64410d565efSmrg 	  || (! after_insn
64510d565efSmrg 	      && INSN_CUID (setter) > uid_limit))
64610d565efSmrg 	{
64710d565efSmrg 	  list_entry = list_entry->next;
64810d565efSmrg 	  continue;
64910d565efSmrg 	}
65010d565efSmrg 
65110d565efSmrg       /* If SETTER is a call everything is clobbered.  Note that calls
65210d565efSmrg 	 to pure functions are never put on the list, so we need not
65310d565efSmrg 	 worry about them.  */
65410d565efSmrg       if (CALL_P (setter))
65510d565efSmrg 	return 1;
65610d565efSmrg 
65710d565efSmrg       /* SETTER must be an insn of some kind that sets memory.  Call
65810d565efSmrg 	 note_stores to examine each hunk of memory that is modified.
65910d565efSmrg 	 It will set mems_conflict_p to nonzero if there may be a
66010d565efSmrg 	 conflict between X and SETTER.  */
66110d565efSmrg       mems_conflict_p = 0;
662*ec02198aSmrg       note_stores (setter, find_mem_conflicts, x);
66310d565efSmrg       if (mems_conflict_p)
66410d565efSmrg 	return 1;
66510d565efSmrg 
66610d565efSmrg       list_entry = list_entry->next;
66710d565efSmrg     }
66810d565efSmrg   return 0;
66910d565efSmrg }
67010d565efSmrg 
67110d565efSmrg 
67210d565efSmrg /* Record register first/last/block set information for REGNO in INSN.  */
67310d565efSmrg 
67410d565efSmrg static inline void
record_last_reg_set_info(rtx_insn * insn,rtx reg)67510d565efSmrg record_last_reg_set_info (rtx_insn *insn, rtx reg)
67610d565efSmrg {
67710d565efSmrg   unsigned int regno, end_regno;
67810d565efSmrg 
67910d565efSmrg   regno = REGNO (reg);
68010d565efSmrg   end_regno = END_REGNO (reg);
68110d565efSmrg   do
68210d565efSmrg     reg_avail_info[regno] = INSN_CUID (insn);
68310d565efSmrg   while (++regno < end_regno);
68410d565efSmrg }
68510d565efSmrg 
68610d565efSmrg static inline void
record_last_reg_set_info_regno(rtx_insn * insn,int regno)68710d565efSmrg record_last_reg_set_info_regno (rtx_insn *insn, int regno)
68810d565efSmrg {
68910d565efSmrg   reg_avail_info[regno] = INSN_CUID (insn);
69010d565efSmrg }
69110d565efSmrg 
69210d565efSmrg 
69310d565efSmrg /* Record memory modification information for INSN.  We do not actually care
69410d565efSmrg    about the memory location(s) that are set, or even how they are set (consider
69510d565efSmrg    a CALL_INSN).  We merely need to record which insns modify memory.  */
69610d565efSmrg 
69710d565efSmrg static void
record_last_mem_set_info(rtx_insn * insn)69810d565efSmrg record_last_mem_set_info (rtx_insn *insn)
69910d565efSmrg {
70010d565efSmrg   struct modifies_mem *list_entry;
70110d565efSmrg 
70210d565efSmrg   list_entry = (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
70310d565efSmrg 						      sizeof (struct modifies_mem));
70410d565efSmrg   list_entry->insn = insn;
70510d565efSmrg   list_entry->next = modifies_mem_list;
70610d565efSmrg   modifies_mem_list = list_entry;
70710d565efSmrg 
70810d565efSmrg   record_last_mem_set_info_common (insn, modify_mem_list,
70910d565efSmrg 				   canon_modify_mem_list,
71010d565efSmrg 				   modify_mem_list_set,
71110d565efSmrg 				   blocks_with_calls);
71210d565efSmrg }
71310d565efSmrg 
71410d565efSmrg /* Called from compute_hash_table via note_stores to handle one
71510d565efSmrg    SET or CLOBBER in an insn.  DATA is really the instruction in which
71610d565efSmrg    the SET is taking place.  */
71710d565efSmrg 
71810d565efSmrg static void
record_last_set_info(rtx dest,const_rtx setter ATTRIBUTE_UNUSED,void * data)71910d565efSmrg record_last_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data)
72010d565efSmrg {
72110d565efSmrg   rtx_insn *last_set_insn = (rtx_insn *) data;
72210d565efSmrg 
72310d565efSmrg   if (GET_CODE (dest) == SUBREG)
72410d565efSmrg     dest = SUBREG_REG (dest);
72510d565efSmrg 
72610d565efSmrg   if (REG_P (dest))
72710d565efSmrg     record_last_reg_set_info (last_set_insn, dest);
72810d565efSmrg   else if (MEM_P (dest))
72910d565efSmrg     {
73010d565efSmrg       /* Ignore pushes, they don't clobber memory.  They may still
73110d565efSmrg 	 clobber the stack pointer though.  Some targets do argument
73210d565efSmrg 	 pushes without adding REG_INC notes.  See e.g. PR25196,
73310d565efSmrg 	 where a pushsi2 on i386 doesn't have REG_INC notes.  Note
73410d565efSmrg 	 such changes here too.  */
73510d565efSmrg       if (! push_operand (dest, GET_MODE (dest)))
73610d565efSmrg 	record_last_mem_set_info (last_set_insn);
73710d565efSmrg       else
73810d565efSmrg 	record_last_reg_set_info_regno (last_set_insn, STACK_POINTER_REGNUM);
73910d565efSmrg     }
74010d565efSmrg }
74110d565efSmrg 
74210d565efSmrg 
74310d565efSmrg /* Reset tables used to keep track of what's still available since the
74410d565efSmrg    start of the block.  */
74510d565efSmrg 
74610d565efSmrg static void
reset_opr_set_tables(void)74710d565efSmrg reset_opr_set_tables (void)
74810d565efSmrg {
74910d565efSmrg   memset (reg_avail_info, 0, FIRST_PSEUDO_REGISTER * sizeof (int));
75010d565efSmrg   obstack_free (&modifies_mem_obstack, modifies_mem_obstack_bottom);
75110d565efSmrg   modifies_mem_list = NULL;
75210d565efSmrg }
75310d565efSmrg 
75410d565efSmrg 
75510d565efSmrg /* Record things set by INSN.
75610d565efSmrg    This data is used by oprs_unchanged_p.  */
75710d565efSmrg 
75810d565efSmrg static void
record_opr_changes(rtx_insn * insn)75910d565efSmrg record_opr_changes (rtx_insn *insn)
76010d565efSmrg {
76110d565efSmrg   rtx note;
76210d565efSmrg 
76310d565efSmrg   /* Find all stores and record them.  */
764*ec02198aSmrg   note_stores (insn, record_last_set_info, insn);
76510d565efSmrg 
76610d565efSmrg   /* Also record autoincremented REGs for this insn as changed.  */
76710d565efSmrg   for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
76810d565efSmrg     if (REG_NOTE_KIND (note) == REG_INC)
76910d565efSmrg       record_last_reg_set_info (insn, XEXP (note, 0));
77010d565efSmrg 
77110d565efSmrg   /* Finally, if this is a call, record all call clobbers.  */
77210d565efSmrg   if (CALL_P (insn))
77310d565efSmrg     {
77410d565efSmrg       unsigned int regno;
77510d565efSmrg       hard_reg_set_iterator hrsi;
776*ec02198aSmrg       /* We don't track modes of hard registers, so we need to be
777*ec02198aSmrg 	 conservative and assume that partial kills are full kills.  */
778*ec02198aSmrg       HARD_REG_SET callee_clobbers
779*ec02198aSmrg 	= insn_callee_abi (insn).full_and_partial_reg_clobbers ();
780*ec02198aSmrg       EXECUTE_IF_SET_IN_HARD_REG_SET (callee_clobbers, 0, regno, hrsi)
78110d565efSmrg 	record_last_reg_set_info_regno (insn, regno);
78210d565efSmrg 
78310d565efSmrg       if (! RTL_CONST_OR_PURE_CALL_P (insn))
78410d565efSmrg 	record_last_mem_set_info (insn);
78510d565efSmrg     }
78610d565efSmrg }
78710d565efSmrg 
78810d565efSmrg 
78910d565efSmrg /* Scan the pattern of INSN and add an entry to the hash TABLE.
79010d565efSmrg    After reload we are interested in loads/stores only.  */
79110d565efSmrg 
79210d565efSmrg static void
hash_scan_set(rtx_insn * insn)79310d565efSmrg hash_scan_set (rtx_insn *insn)
79410d565efSmrg {
79510d565efSmrg   rtx pat = PATTERN (insn);
79610d565efSmrg   rtx src = SET_SRC (pat);
79710d565efSmrg   rtx dest = SET_DEST (pat);
79810d565efSmrg 
79910d565efSmrg   /* We are only interested in loads and stores.  */
80010d565efSmrg   if (! MEM_P (src) && ! MEM_P (dest))
80110d565efSmrg     return;
80210d565efSmrg 
80310d565efSmrg   /* Don't mess with jumps and nops.  */
80410d565efSmrg   if (JUMP_P (insn) || set_noop_p (pat))
80510d565efSmrg     return;
80610d565efSmrg 
80710d565efSmrg   if (REG_P (dest))
80810d565efSmrg     {
80910d565efSmrg       if (/* Don't CSE something if we can't do a reg/reg copy.  */
81010d565efSmrg 	  can_copy_p (GET_MODE (dest))
81110d565efSmrg 	  /* Is SET_SRC something we want to gcse?  */
81210d565efSmrg 	  && general_operand (src, GET_MODE (src))
81310d565efSmrg #ifdef STACK_REGS
81410d565efSmrg 	  /* Never consider insns touching the register stack.  It may
81510d565efSmrg 	     create situations that reg-stack cannot handle (e.g. a stack
81610d565efSmrg 	     register live across an abnormal edge).  */
81710d565efSmrg 	  && (REGNO (dest) < FIRST_STACK_REG || REGNO (dest) > LAST_STACK_REG)
81810d565efSmrg #endif
81910d565efSmrg 	  /* An expression is not available if its operands are
82010d565efSmrg 	     subsequently modified, including this insn.  */
82110d565efSmrg 	  && oprs_unchanged_p (src, insn, true))
82210d565efSmrg 	{
82310d565efSmrg 	  insert_expr_in_table (src, insn);
82410d565efSmrg 	}
82510d565efSmrg     }
82610d565efSmrg   else if (REG_P (src))
82710d565efSmrg     {
82810d565efSmrg       /* Only record sets of pseudo-regs in the hash table.  */
82910d565efSmrg       if (/* Don't CSE something if we can't do a reg/reg copy.  */
83010d565efSmrg 	  can_copy_p (GET_MODE (src))
83110d565efSmrg 	  /* Is SET_DEST something we want to gcse?  */
83210d565efSmrg 	  && general_operand (dest, GET_MODE (dest))
83310d565efSmrg #ifdef STACK_REGS
83410d565efSmrg 	  /* As above for STACK_REGS.  */
83510d565efSmrg 	  && (REGNO (src) < FIRST_STACK_REG || REGNO (src) > LAST_STACK_REG)
83610d565efSmrg #endif
83710d565efSmrg 	  && ! (flag_float_store && FLOAT_MODE_P (GET_MODE (dest)))
83810d565efSmrg 	  /* Check if the memory expression is killed after insn.  */
83910d565efSmrg 	  && ! load_killed_in_block_p (INSN_CUID (insn) + 1, dest, true)
84010d565efSmrg 	  && oprs_unchanged_p (XEXP (dest, 0), insn, true))
84110d565efSmrg 	{
84210d565efSmrg 	  insert_expr_in_table (dest, insn);
84310d565efSmrg 	}
84410d565efSmrg     }
84510d565efSmrg }
84610d565efSmrg 
84710d565efSmrg 
84810d565efSmrg /* Create hash table of memory expressions available at end of basic
84910d565efSmrg    blocks.  Basically you should think of this hash table as the
85010d565efSmrg    representation of AVAIL_OUT.  This is the set of expressions that
85110d565efSmrg    is generated in a basic block and not killed before the end of the
85210d565efSmrg    same basic block.  Notice that this is really a local computation.  */
85310d565efSmrg 
85410d565efSmrg static void
compute_hash_table(void)85510d565efSmrg compute_hash_table (void)
85610d565efSmrg {
85710d565efSmrg   basic_block bb;
85810d565efSmrg 
85910d565efSmrg   FOR_EACH_BB_FN (bb, cfun)
86010d565efSmrg     {
86110d565efSmrg       rtx_insn *insn;
86210d565efSmrg 
86310d565efSmrg       /* First pass over the instructions records information used to
86410d565efSmrg 	 determine when registers and memory are last set.
86510d565efSmrg 	 Since we compute a "local" AVAIL_OUT, reset the tables that
86610d565efSmrg 	 help us keep track of what has been modified since the start
86710d565efSmrg 	 of the block.  */
86810d565efSmrg       reset_opr_set_tables ();
86910d565efSmrg       FOR_BB_INSNS (bb, insn)
87010d565efSmrg 	{
87110d565efSmrg 	  if (INSN_P (insn))
87210d565efSmrg             record_opr_changes (insn);
87310d565efSmrg 	}
87410d565efSmrg 
87510d565efSmrg       /* The next pass actually builds the hash table.  */
87610d565efSmrg       FOR_BB_INSNS (bb, insn)
87710d565efSmrg 	if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SET)
87810d565efSmrg 	  hash_scan_set (insn);
87910d565efSmrg     }
88010d565efSmrg }
88110d565efSmrg 
88210d565efSmrg 
88310d565efSmrg /* Check if register REG is killed in any insn waiting to be inserted on
88410d565efSmrg    edge E.  This function is required to check that our data flow analysis
88510d565efSmrg    is still valid prior to commit_edge_insertions.  */
88610d565efSmrg 
88710d565efSmrg static bool
reg_killed_on_edge(rtx reg,edge e)88810d565efSmrg reg_killed_on_edge (rtx reg, edge e)
88910d565efSmrg {
89010d565efSmrg   rtx_insn *insn;
89110d565efSmrg 
89210d565efSmrg   for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
89310d565efSmrg     if (INSN_P (insn) && reg_set_p (reg, insn))
89410d565efSmrg       return true;
89510d565efSmrg 
89610d565efSmrg   return false;
89710d565efSmrg }
89810d565efSmrg 
89910d565efSmrg /* Similar to above - check if register REG is used in any insn waiting
90010d565efSmrg    to be inserted on edge E.
90110d565efSmrg    Assumes no such insn can be a CALL_INSN; if so call reg_used_between_p
90210d565efSmrg    with PREV(insn),NEXT(insn) instead of calling reg_overlap_mentioned_p.  */
90310d565efSmrg 
90410d565efSmrg static bool
reg_used_on_edge(rtx reg,edge e)90510d565efSmrg reg_used_on_edge (rtx reg, edge e)
90610d565efSmrg {
90710d565efSmrg   rtx_insn *insn;
90810d565efSmrg 
90910d565efSmrg   for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
91010d565efSmrg     if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
91110d565efSmrg       return true;
91210d565efSmrg 
91310d565efSmrg   return false;
91410d565efSmrg }
91510d565efSmrg 
91610d565efSmrg /* Return the loaded/stored register of a load/store instruction.  */
91710d565efSmrg 
91810d565efSmrg static rtx
get_avail_load_store_reg(rtx_insn * insn)91910d565efSmrg get_avail_load_store_reg (rtx_insn *insn)
92010d565efSmrg {
92110d565efSmrg   if (REG_P (SET_DEST (PATTERN (insn))))
92210d565efSmrg     /* A load.  */
92310d565efSmrg     return SET_DEST (PATTERN (insn));
92410d565efSmrg   else
92510d565efSmrg     {
92610d565efSmrg       /* A store.  */
92710d565efSmrg       gcc_assert (REG_P (SET_SRC (PATTERN (insn))));
92810d565efSmrg       return SET_SRC (PATTERN (insn));
92910d565efSmrg     }
93010d565efSmrg }
93110d565efSmrg 
93210d565efSmrg /* Return nonzero if the predecessors of BB are "well behaved".  */
93310d565efSmrg 
93410d565efSmrg static bool
bb_has_well_behaved_predecessors(basic_block bb)93510d565efSmrg bb_has_well_behaved_predecessors (basic_block bb)
93610d565efSmrg {
93710d565efSmrg   edge pred;
93810d565efSmrg   edge_iterator ei;
93910d565efSmrg 
94010d565efSmrg   if (EDGE_COUNT (bb->preds) == 0)
94110d565efSmrg     return false;
94210d565efSmrg 
94310d565efSmrg   FOR_EACH_EDGE (pred, ei, bb->preds)
94410d565efSmrg     {
94510d565efSmrg       /* commit_one_edge_insertion refuses to insert on abnormal edges even if
94610d565efSmrg 	 the source has only one successor so EDGE_CRITICAL_P is too weak.  */
94710d565efSmrg       if ((pred->flags & EDGE_ABNORMAL) && !single_pred_p (pred->dest))
94810d565efSmrg 	return false;
94910d565efSmrg 
95010d565efSmrg       if ((pred->flags & EDGE_ABNORMAL_CALL) && cfun->has_nonlocal_label)
95110d565efSmrg 	return false;
95210d565efSmrg 
95310d565efSmrg       if (tablejump_p (BB_END (pred->src), NULL, NULL))
95410d565efSmrg 	return false;
95510d565efSmrg     }
95610d565efSmrg   return true;
95710d565efSmrg }
95810d565efSmrg 
95910d565efSmrg 
96010d565efSmrg /* Search for the occurrences of expression in BB.  */
96110d565efSmrg 
96210d565efSmrg static struct occr*
get_bb_avail_insn(basic_block bb,struct occr * orig_occr,int bitmap_index)96310d565efSmrg get_bb_avail_insn (basic_block bb, struct occr *orig_occr, int bitmap_index)
96410d565efSmrg {
96510d565efSmrg   struct occr *occr = orig_occr;
96610d565efSmrg 
96710d565efSmrg   for (; occr != NULL; occr = occr->next)
96810d565efSmrg     if (BLOCK_FOR_INSN (occr->insn) == bb)
96910d565efSmrg       return occr;
97010d565efSmrg 
97110d565efSmrg   /* If we could not find an occurrence in BB, see if BB
97210d565efSmrg      has a single predecessor with an occurrence that is
97310d565efSmrg      transparent through BB.  */
974*ec02198aSmrg   if (transp
975*ec02198aSmrg       && single_pred_p (bb)
97610d565efSmrg       && bitmap_bit_p (transp[bb->index], bitmap_index)
97710d565efSmrg       && (occr = get_bb_avail_insn (single_pred (bb), orig_occr, bitmap_index)))
97810d565efSmrg     {
97910d565efSmrg       rtx avail_reg = get_avail_load_store_reg (occr->insn);
98010d565efSmrg       if (!reg_set_between_p (avail_reg,
98110d565efSmrg 			      PREV_INSN (BB_HEAD (bb)),
98210d565efSmrg 			      NEXT_INSN (BB_END (bb)))
98310d565efSmrg 	  && !reg_killed_on_edge (avail_reg, single_pred_edge (bb)))
98410d565efSmrg 	return occr;
98510d565efSmrg     }
98610d565efSmrg 
98710d565efSmrg   return NULL;
98810d565efSmrg }
98910d565efSmrg 
99010d565efSmrg 
99110d565efSmrg /* This helper is called via htab_traverse.  */
99210d565efSmrg int
compute_expr_transp(expr ** slot,FILE * dump_file ATTRIBUTE_UNUSED)99310d565efSmrg compute_expr_transp (expr **slot, FILE *dump_file ATTRIBUTE_UNUSED)
99410d565efSmrg {
99510d565efSmrg   struct expr *expr = *slot;
99610d565efSmrg 
99710d565efSmrg   compute_transp (expr->expr, expr->bitmap_index, transp,
99810d565efSmrg 		  blocks_with_calls, modify_mem_list_set,
99910d565efSmrg 		  canon_modify_mem_list);
100010d565efSmrg   return 1;
100110d565efSmrg }
100210d565efSmrg 
100310d565efSmrg /* This handles the case where several stores feed a partially redundant
100410d565efSmrg    load. It checks if the redundancy elimination is possible and if it's
100510d565efSmrg    worth it.
100610d565efSmrg 
100710d565efSmrg    Redundancy elimination is possible if,
100810d565efSmrg    1) None of the operands of an insn have been modified since the start
100910d565efSmrg       of the current basic block.
101010d565efSmrg    2) In any predecessor of the current basic block, the same expression
101110d565efSmrg       is generated.
101210d565efSmrg 
101310d565efSmrg    See the function body for the heuristics that determine if eliminating
101410d565efSmrg    a redundancy is also worth doing, assuming it is possible.  */
101510d565efSmrg 
101610d565efSmrg static void
eliminate_partially_redundant_load(basic_block bb,rtx_insn * insn,struct expr * expr)101710d565efSmrg eliminate_partially_redundant_load (basic_block bb, rtx_insn *insn,
101810d565efSmrg 				    struct expr *expr)
101910d565efSmrg {
102010d565efSmrg   edge pred;
102110d565efSmrg   rtx_insn *avail_insn = NULL;
102210d565efSmrg   rtx avail_reg;
102310d565efSmrg   rtx dest, pat;
102410d565efSmrg   struct occr *a_occr;
102510d565efSmrg   struct unoccr *occr, *avail_occrs = NULL;
102610d565efSmrg   struct unoccr *unoccr, *unavail_occrs = NULL, *rollback_unoccr = NULL;
102710d565efSmrg   int npred_ok = 0;
1028c7a68eb7Smrg   profile_count ok_count = profile_count::zero ();
1029c7a68eb7Smrg 		 /* Redundant load execution count.  */
1030c7a68eb7Smrg   profile_count critical_count = profile_count::zero ();
1031c7a68eb7Smrg 		 /* Execution count of critical edges.  */
103210d565efSmrg   edge_iterator ei;
103310d565efSmrg   bool critical_edge_split = false;
103410d565efSmrg 
103510d565efSmrg   /* The execution count of the loads to be added to make the
103610d565efSmrg      load fully redundant.  */
1037c7a68eb7Smrg   profile_count not_ok_count = profile_count::zero ();
103810d565efSmrg   basic_block pred_bb;
103910d565efSmrg 
104010d565efSmrg   pat = PATTERN (insn);
104110d565efSmrg   dest = SET_DEST (pat);
104210d565efSmrg 
104310d565efSmrg   /* Check that the loaded register is not used, set, or killed from the
104410d565efSmrg      beginning of the block.  */
104510d565efSmrg   if (reg_changed_after_insn_p (dest, 0)
104610d565efSmrg       || reg_used_between_p (dest, PREV_INSN (BB_HEAD (bb)), insn))
104710d565efSmrg     return;
104810d565efSmrg 
104910d565efSmrg   /* Check potential for replacing load with copy for predecessors.  */
105010d565efSmrg   FOR_EACH_EDGE (pred, ei, bb->preds)
105110d565efSmrg     {
105210d565efSmrg       rtx_insn *next_pred_bb_end;
105310d565efSmrg 
105410d565efSmrg       avail_insn = NULL;
105510d565efSmrg       avail_reg = NULL_RTX;
105610d565efSmrg       pred_bb = pred->src;
105710d565efSmrg       for (a_occr = get_bb_avail_insn (pred_bb,
105810d565efSmrg 				       expr->avail_occr,
105910d565efSmrg 				       expr->bitmap_index);
106010d565efSmrg 	   a_occr;
106110d565efSmrg 	   a_occr = get_bb_avail_insn (pred_bb,
106210d565efSmrg 				       a_occr->next,
106310d565efSmrg 				       expr->bitmap_index))
106410d565efSmrg 	{
106510d565efSmrg 	  /* Check if the loaded register is not used.  */
106610d565efSmrg 	  avail_insn = a_occr->insn;
106710d565efSmrg 	  avail_reg = get_avail_load_store_reg (avail_insn);
106810d565efSmrg 	  gcc_assert (avail_reg);
106910d565efSmrg 
107010d565efSmrg 	  /* Make sure we can generate a move from register avail_reg to
107110d565efSmrg 	     dest.  */
107210d565efSmrg 	  rtx_insn *move = gen_move_insn (copy_rtx (dest),
107310d565efSmrg 					  copy_rtx (avail_reg));
107410d565efSmrg 	  extract_insn (move);
107510d565efSmrg 	  if (! constrain_operands (1, get_preferred_alternatives (insn,
107610d565efSmrg 								   pred_bb))
107710d565efSmrg 	      || reg_killed_on_edge (avail_reg, pred)
107810d565efSmrg 	      || reg_used_on_edge (dest, pred))
107910d565efSmrg 	    {
108010d565efSmrg 	      avail_insn = NULL;
108110d565efSmrg 	      continue;
108210d565efSmrg 	    }
108310d565efSmrg 	  next_pred_bb_end = NEXT_INSN (BB_END (BLOCK_FOR_INSN (avail_insn)));
108410d565efSmrg 	  if (!reg_set_between_p (avail_reg, avail_insn, next_pred_bb_end))
108510d565efSmrg 	    /* AVAIL_INSN remains non-null.  */
108610d565efSmrg 	    break;
108710d565efSmrg 	  else
108810d565efSmrg 	    avail_insn = NULL;
108910d565efSmrg 	}
109010d565efSmrg 
1091c7a68eb7Smrg       if (EDGE_CRITICAL_P (pred) && pred->count ().initialized_p ())
1092c7a68eb7Smrg 	critical_count += pred->count ();
109310d565efSmrg 
109410d565efSmrg       if (avail_insn != NULL_RTX)
109510d565efSmrg 	{
109610d565efSmrg 	  npred_ok++;
1097c7a68eb7Smrg 	  if (pred->count ().initialized_p ())
1098c7a68eb7Smrg 	    ok_count = ok_count + pred->count ();
109910d565efSmrg 	  if (! set_noop_p (PATTERN (gen_move_insn (copy_rtx (dest),
110010d565efSmrg 						    copy_rtx (avail_reg)))))
110110d565efSmrg 	    {
110210d565efSmrg 	      /* Check if there is going to be a split.  */
110310d565efSmrg 	      if (EDGE_CRITICAL_P (pred))
110410d565efSmrg 		critical_edge_split = true;
110510d565efSmrg 	    }
110610d565efSmrg 	  else /* Its a dead move no need to generate.  */
110710d565efSmrg 	    continue;
110810d565efSmrg 	  occr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
110910d565efSmrg 						  sizeof (struct unoccr));
111010d565efSmrg 	  occr->insn = avail_insn;
111110d565efSmrg 	  occr->pred = pred;
111210d565efSmrg 	  occr->next = avail_occrs;
111310d565efSmrg 	  avail_occrs = occr;
111410d565efSmrg 	  if (! rollback_unoccr)
111510d565efSmrg 	    rollback_unoccr = occr;
111610d565efSmrg 	}
111710d565efSmrg       else
111810d565efSmrg 	{
111910d565efSmrg 	  /* Adding a load on a critical edge will cause a split.  */
112010d565efSmrg 	  if (EDGE_CRITICAL_P (pred))
112110d565efSmrg 	    critical_edge_split = true;
1122c7a68eb7Smrg 	  if (pred->count ().initialized_p ())
1123c7a68eb7Smrg 	    not_ok_count = not_ok_count + pred->count ();
112410d565efSmrg 	  unoccr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
112510d565efSmrg 						    sizeof (struct unoccr));
112610d565efSmrg 	  unoccr->insn = NULL;
112710d565efSmrg 	  unoccr->pred = pred;
112810d565efSmrg 	  unoccr->next = unavail_occrs;
112910d565efSmrg 	  unavail_occrs = unoccr;
113010d565efSmrg 	  if (! rollback_unoccr)
113110d565efSmrg 	    rollback_unoccr = unoccr;
113210d565efSmrg 	}
113310d565efSmrg     }
113410d565efSmrg 
113510d565efSmrg   if (/* No load can be replaced by copy.  */
113610d565efSmrg       npred_ok == 0
113710d565efSmrg       /* Prevent exploding the code.  */
113810d565efSmrg       || (optimize_bb_for_size_p (bb) && npred_ok > 1)
113910d565efSmrg       /* If we don't have profile information we cannot tell if splitting
114010d565efSmrg          a critical edge is profitable or not so don't do it.  */
1141c7a68eb7Smrg       || ((!profile_info || profile_status_for_fn (cfun) != PROFILE_READ
114210d565efSmrg 	   || targetm.cannot_modify_jumps_p ())
114310d565efSmrg 	  && critical_edge_split))
114410d565efSmrg     goto cleanup;
114510d565efSmrg 
114610d565efSmrg   /* Check if it's worth applying the partial redundancy elimination.  */
1147c7a68eb7Smrg   if (ok_count.to_gcov_type ()
1148*ec02198aSmrg       < param_gcse_after_reload_partial_fraction * not_ok_count.to_gcov_type ())
114910d565efSmrg     goto cleanup;
11500fc04c29Smrg 
11510fc04c29Smrg   gcov_type threshold;
11520fc04c29Smrg #if (GCC_VERSION >= 5000)
1153*ec02198aSmrg   if (__builtin_mul_overflow (param_gcse_after_reload_critical_fraction,
11540fc04c29Smrg 			      critical_count.to_gcov_type (), &threshold))
11550fc04c29Smrg     threshold = profile_count::max_count;
11560fc04c29Smrg #else
11570fc04c29Smrg   threshold
1158*ec02198aSmrg     = (param_gcse_after_reload_critical_fraction
1159*ec02198aSmrg        * critical_count.to_gcov_type ());
11600fc04c29Smrg #endif
11610fc04c29Smrg 
11620fc04c29Smrg   if (ok_count.to_gcov_type () < threshold)
116310d565efSmrg     goto cleanup;
116410d565efSmrg 
116510d565efSmrg   /* Generate moves to the loaded register from where
116610d565efSmrg      the memory is available.  */
116710d565efSmrg   for (occr = avail_occrs; occr; occr = occr->next)
116810d565efSmrg     {
116910d565efSmrg       avail_insn = occr->insn;
117010d565efSmrg       pred = occr->pred;
117110d565efSmrg       /* Set avail_reg to be the register having the value of the
117210d565efSmrg 	 memory.  */
117310d565efSmrg       avail_reg = get_avail_load_store_reg (avail_insn);
117410d565efSmrg       gcc_assert (avail_reg);
117510d565efSmrg 
117610d565efSmrg       insert_insn_on_edge (gen_move_insn (copy_rtx (dest),
117710d565efSmrg 					  copy_rtx (avail_reg)),
117810d565efSmrg 			   pred);
117910d565efSmrg       stats.moves_inserted++;
118010d565efSmrg 
118110d565efSmrg       if (dump_file)
118210d565efSmrg 	fprintf (dump_file,
118310d565efSmrg 		 "generating move from %d to %d on edge from %d to %d\n",
118410d565efSmrg 		 REGNO (avail_reg),
118510d565efSmrg 		 REGNO (dest),
118610d565efSmrg 		 pred->src->index,
118710d565efSmrg 		 pred->dest->index);
118810d565efSmrg     }
118910d565efSmrg 
119010d565efSmrg   /* Regenerate loads where the memory is unavailable.  */
119110d565efSmrg   for (unoccr = unavail_occrs; unoccr; unoccr = unoccr->next)
119210d565efSmrg     {
119310d565efSmrg       pred = unoccr->pred;
119410d565efSmrg       insert_insn_on_edge (copy_insn (PATTERN (insn)), pred);
119510d565efSmrg       stats.copies_inserted++;
119610d565efSmrg 
119710d565efSmrg       if (dump_file)
119810d565efSmrg 	{
119910d565efSmrg 	  fprintf (dump_file,
120010d565efSmrg 		   "generating on edge from %d to %d a copy of load: ",
120110d565efSmrg 		   pred->src->index,
120210d565efSmrg 		   pred->dest->index);
120310d565efSmrg 	  print_rtl (dump_file, PATTERN (insn));
120410d565efSmrg 	  fprintf (dump_file, "\n");
120510d565efSmrg 	}
120610d565efSmrg     }
120710d565efSmrg 
120810d565efSmrg   /* Delete the insn if it is not available in this block and mark it
120910d565efSmrg      for deletion if it is available. If insn is available it may help
121010d565efSmrg      discover additional redundancies, so mark it for later deletion.  */
121110d565efSmrg   for (a_occr = get_bb_avail_insn (bb, expr->avail_occr, expr->bitmap_index);
121210d565efSmrg        a_occr && (a_occr->insn != insn);
121310d565efSmrg        a_occr = get_bb_avail_insn (bb, a_occr->next, expr->bitmap_index))
121410d565efSmrg     ;
121510d565efSmrg 
121610d565efSmrg   if (!a_occr)
121710d565efSmrg     {
121810d565efSmrg       stats.insns_deleted++;
121910d565efSmrg 
122010d565efSmrg       if (dump_file)
122110d565efSmrg 	{
122210d565efSmrg 	  fprintf (dump_file, "deleting insn:\n");
122310d565efSmrg           print_rtl_single (dump_file, insn);
122410d565efSmrg           fprintf (dump_file, "\n");
122510d565efSmrg 	}
122610d565efSmrg       delete_insn (insn);
122710d565efSmrg     }
122810d565efSmrg   else
122910d565efSmrg     a_occr->deleted_p = 1;
123010d565efSmrg 
123110d565efSmrg cleanup:
123210d565efSmrg   if (rollback_unoccr)
123310d565efSmrg     obstack_free (&unoccr_obstack, rollback_unoccr);
123410d565efSmrg }
123510d565efSmrg 
123610d565efSmrg /* Performing the redundancy elimination as described before.  */
123710d565efSmrg 
123810d565efSmrg static void
eliminate_partially_redundant_loads(void)123910d565efSmrg eliminate_partially_redundant_loads (void)
124010d565efSmrg {
124110d565efSmrg   rtx_insn *insn;
124210d565efSmrg   basic_block bb;
124310d565efSmrg 
124410d565efSmrg   /* Note we start at block 1.  */
124510d565efSmrg 
124610d565efSmrg   if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
124710d565efSmrg     return;
124810d565efSmrg 
124910d565efSmrg   FOR_BB_BETWEEN (bb,
125010d565efSmrg 		  ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->next_bb,
125110d565efSmrg 		  EXIT_BLOCK_PTR_FOR_FN (cfun),
125210d565efSmrg 		  next_bb)
125310d565efSmrg     {
125410d565efSmrg       /* Don't try anything on basic blocks with strange predecessors.  */
125510d565efSmrg       if (! bb_has_well_behaved_predecessors (bb))
125610d565efSmrg 	continue;
125710d565efSmrg 
125810d565efSmrg       /* Do not try anything on cold basic blocks.  */
125910d565efSmrg       if (optimize_bb_for_size_p (bb))
126010d565efSmrg 	continue;
126110d565efSmrg 
126210d565efSmrg       /* Reset the table of things changed since the start of the current
126310d565efSmrg 	 basic block.  */
126410d565efSmrg       reset_opr_set_tables ();
126510d565efSmrg 
126610d565efSmrg       /* Look at all insns in the current basic block and see if there are
126710d565efSmrg 	 any loads in it that we can record.  */
126810d565efSmrg       FOR_BB_INSNS (bb, insn)
126910d565efSmrg 	{
127010d565efSmrg 	  /* Is it a load - of the form (set (reg) (mem))?  */
127110d565efSmrg 	  if (NONJUMP_INSN_P (insn)
127210d565efSmrg               && GET_CODE (PATTERN (insn)) == SET
127310d565efSmrg 	      && REG_P (SET_DEST (PATTERN (insn)))
127410d565efSmrg 	      && MEM_P (SET_SRC (PATTERN (insn))))
127510d565efSmrg 	    {
127610d565efSmrg 	      rtx pat = PATTERN (insn);
127710d565efSmrg 	      rtx src = SET_SRC (pat);
127810d565efSmrg 	      struct expr *expr;
127910d565efSmrg 
128010d565efSmrg 	      if (!MEM_VOLATILE_P (src)
128110d565efSmrg 		  && GET_MODE (src) != BLKmode
128210d565efSmrg 		  && general_operand (src, GET_MODE (src))
128310d565efSmrg 		  /* Are the operands unchanged since the start of the
128410d565efSmrg 		     block?  */
128510d565efSmrg 		  && oprs_unchanged_p (src, insn, false)
128610d565efSmrg 		  && !(cfun->can_throw_non_call_exceptions && may_trap_p (src))
128710d565efSmrg 		  && !side_effects_p (src)
128810d565efSmrg 		  /* Is the expression recorded?  */
128910d565efSmrg 		  && (expr = lookup_expr_in_table (src)) != NULL)
129010d565efSmrg 		{
129110d565efSmrg 		  /* We now have a load (insn) and an available memory at
129210d565efSmrg 		     its BB start (expr). Try to remove the loads if it is
129310d565efSmrg 		     redundant.  */
129410d565efSmrg 		  eliminate_partially_redundant_load (bb, insn, expr);
129510d565efSmrg 		}
129610d565efSmrg 	    }
129710d565efSmrg 
129810d565efSmrg 	  /* Keep track of everything modified by this insn, so that we
129910d565efSmrg 	     know what has been modified since the start of the current
130010d565efSmrg 	     basic block.  */
130110d565efSmrg 	  if (INSN_P (insn))
130210d565efSmrg 	    record_opr_changes (insn);
130310d565efSmrg 	}
130410d565efSmrg     }
130510d565efSmrg 
130610d565efSmrg   commit_edge_insertions ();
130710d565efSmrg }
130810d565efSmrg 
130910d565efSmrg /* Go over the expression hash table and delete insns that were
131010d565efSmrg    marked for later deletion.  */
131110d565efSmrg 
131210d565efSmrg /* This helper is called via htab_traverse.  */
131310d565efSmrg int
delete_redundant_insns_1(expr ** slot,void * data ATTRIBUTE_UNUSED)131410d565efSmrg delete_redundant_insns_1 (expr **slot, void *data ATTRIBUTE_UNUSED)
131510d565efSmrg {
131610d565efSmrg   struct expr *exprs = *slot;
131710d565efSmrg   struct occr *occr;
131810d565efSmrg 
131910d565efSmrg   for (occr = exprs->avail_occr; occr != NULL; occr = occr->next)
132010d565efSmrg     {
132110d565efSmrg       if (occr->deleted_p && dbg_cnt (gcse2_delete))
132210d565efSmrg 	{
132310d565efSmrg 	  delete_insn (occr->insn);
132410d565efSmrg 	  stats.insns_deleted++;
132510d565efSmrg 
132610d565efSmrg 	  if (dump_file)
132710d565efSmrg 	    {
132810d565efSmrg 	      fprintf (dump_file, "deleting insn:\n");
132910d565efSmrg 	      print_rtl_single (dump_file, occr->insn);
133010d565efSmrg 	      fprintf (dump_file, "\n");
133110d565efSmrg 	    }
133210d565efSmrg 	}
133310d565efSmrg     }
133410d565efSmrg 
133510d565efSmrg   return 1;
133610d565efSmrg }
133710d565efSmrg 
133810d565efSmrg static void
delete_redundant_insns(void)133910d565efSmrg delete_redundant_insns (void)
134010d565efSmrg {
134110d565efSmrg   expr_table->traverse <void *, delete_redundant_insns_1> (NULL);
134210d565efSmrg   if (dump_file)
134310d565efSmrg     fprintf (dump_file, "\n");
134410d565efSmrg }
134510d565efSmrg 
134610d565efSmrg /* Main entry point of the GCSE after reload - clean some redundant loads
134710d565efSmrg    due to spilling.  */
134810d565efSmrg 
134910d565efSmrg static void
gcse_after_reload_main(rtx f ATTRIBUTE_UNUSED)135010d565efSmrg gcse_after_reload_main (rtx f ATTRIBUTE_UNUSED)
135110d565efSmrg {
1352*ec02198aSmrg   /* Disable computing transparentness if it is too expensive.  */
1353*ec02198aSmrg   bool do_transp
1354*ec02198aSmrg     = !gcse_or_cprop_is_too_expensive (_("using simple load CSE after register "
1355*ec02198aSmrg 					 "allocation"));
135610d565efSmrg 
135710d565efSmrg   memset (&stats, 0, sizeof (stats));
135810d565efSmrg 
135910d565efSmrg   /* Allocate memory for this pass.
136010d565efSmrg      Also computes and initializes the insns' CUIDs.  */
136110d565efSmrg   alloc_mem ();
136210d565efSmrg 
136310d565efSmrg   /* We need alias analysis.  */
136410d565efSmrg   init_alias_analysis ();
136510d565efSmrg 
136610d565efSmrg   compute_hash_table ();
136710d565efSmrg 
136810d565efSmrg   if (dump_file)
136910d565efSmrg     dump_hash_table (dump_file);
137010d565efSmrg 
1371*ec02198aSmrg   if (!expr_table->is_empty ())
137210d565efSmrg     {
137310d565efSmrg       /* Knowing which MEMs are transparent through a block can signifiantly
137410d565efSmrg 	 increase the number of redundant loads found.  So compute transparency
137510d565efSmrg 	 information for each memory expression in the hash table.  */
137610d565efSmrg       df_analyze ();
1377*ec02198aSmrg       if (do_transp)
1378*ec02198aSmrg 	{
137910d565efSmrg 	  /* This cannot be part of the normal allocation routine because
138010d565efSmrg 	     we have to know the number of elements in the hash table.  */
138110d565efSmrg 	  transp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
138210d565efSmrg 					 expr_table->elements ());
138310d565efSmrg 	  bitmap_vector_ones (transp, last_basic_block_for_fn (cfun));
138410d565efSmrg 	  expr_table->traverse <FILE *, compute_expr_transp> (dump_file);
1385*ec02198aSmrg 	}
1386*ec02198aSmrg       else
1387*ec02198aSmrg 	transp = NULL;
138810d565efSmrg       eliminate_partially_redundant_loads ();
138910d565efSmrg       delete_redundant_insns ();
1390*ec02198aSmrg       if (do_transp)
139110d565efSmrg 	sbitmap_vector_free (transp);
139210d565efSmrg 
139310d565efSmrg       if (dump_file)
139410d565efSmrg 	{
139510d565efSmrg 	  fprintf (dump_file, "GCSE AFTER RELOAD stats:\n");
139610d565efSmrg 	  fprintf (dump_file, "copies inserted: %d\n", stats.copies_inserted);
139710d565efSmrg 	  fprintf (dump_file, "moves inserted:  %d\n", stats.moves_inserted);
139810d565efSmrg 	  fprintf (dump_file, "insns deleted:   %d\n", stats.insns_deleted);
139910d565efSmrg 	  fprintf (dump_file, "\n\n");
140010d565efSmrg 	}
140110d565efSmrg 
140210d565efSmrg       statistics_counter_event (cfun, "copies inserted",
140310d565efSmrg 				stats.copies_inserted);
140410d565efSmrg       statistics_counter_event (cfun, "moves inserted",
140510d565efSmrg 				stats.moves_inserted);
140610d565efSmrg       statistics_counter_event (cfun, "insns deleted",
140710d565efSmrg 				stats.insns_deleted);
140810d565efSmrg     }
140910d565efSmrg 
141010d565efSmrg   /* We are finished with alias.  */
141110d565efSmrg   end_alias_analysis ();
141210d565efSmrg 
141310d565efSmrg   free_mem ();
141410d565efSmrg }
141510d565efSmrg 
141610d565efSmrg 
141710d565efSmrg 
141810d565efSmrg static unsigned int
rest_of_handle_gcse2(void)141910d565efSmrg rest_of_handle_gcse2 (void)
142010d565efSmrg {
142110d565efSmrg   gcse_after_reload_main (get_insns ());
142210d565efSmrg   rebuild_jump_labels (get_insns ());
142310d565efSmrg   return 0;
142410d565efSmrg }
142510d565efSmrg 
142610d565efSmrg namespace {
142710d565efSmrg 
142810d565efSmrg const pass_data pass_data_gcse2 =
142910d565efSmrg {
143010d565efSmrg   RTL_PASS, /* type */
143110d565efSmrg   "gcse2", /* name */
143210d565efSmrg   OPTGROUP_NONE, /* optinfo_flags */
143310d565efSmrg   TV_GCSE_AFTER_RELOAD, /* tv_id */
143410d565efSmrg   0, /* properties_required */
143510d565efSmrg   0, /* properties_provided */
143610d565efSmrg   0, /* properties_destroyed */
143710d565efSmrg   0, /* todo_flags_start */
143810d565efSmrg   0, /* todo_flags_finish */
143910d565efSmrg };
144010d565efSmrg 
144110d565efSmrg class pass_gcse2 : public rtl_opt_pass
144210d565efSmrg {
144310d565efSmrg public:
pass_gcse2(gcc::context * ctxt)144410d565efSmrg   pass_gcse2 (gcc::context *ctxt)
144510d565efSmrg     : rtl_opt_pass (pass_data_gcse2, ctxt)
144610d565efSmrg   {}
144710d565efSmrg 
144810d565efSmrg   /* opt_pass methods: */
gate(function * fun)144910d565efSmrg   virtual bool gate (function *fun)
145010d565efSmrg     {
145110d565efSmrg       return (optimize > 0 && flag_gcse_after_reload
145210d565efSmrg 	      && optimize_function_for_speed_p (fun));
145310d565efSmrg     }
145410d565efSmrg 
execute(function *)145510d565efSmrg   virtual unsigned int execute (function *) { return rest_of_handle_gcse2 (); }
145610d565efSmrg 
145710d565efSmrg }; // class pass_gcse2
145810d565efSmrg 
145910d565efSmrg } // anon namespace
146010d565efSmrg 
146110d565efSmrg rtl_opt_pass *
make_pass_gcse2(gcc::context * ctxt)146210d565efSmrg make_pass_gcse2 (gcc::context *ctxt)
146310d565efSmrg {
146410d565efSmrg   return new pass_gcse2 (ctxt);
146510d565efSmrg }
1466