1 /* RTL dead store elimination.
2    Copyright (C) 2005-2013 Free Software Foundation, Inc.
3 
4    Contributed by Richard Sandiford <rsandifor@codesourcery.com>
5    and Kenneth Zadeck <zadeck@naturalbridge.com>
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 #undef BASELINE
24 
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "hash-table.h"
29 #include "tm.h"
30 #include "rtl.h"
31 #include "tree.h"
32 #include "tm_p.h"
33 #include "regs.h"
34 #include "hard-reg-set.h"
35 #include "regset.h"
36 #include "flags.h"
37 #include "df.h"
38 #include "cselib.h"
39 #include "tree-pass.h"
40 #include "alloc-pool.h"
41 #include "alias.h"
42 #include "insn-config.h"
43 #include "expr.h"
44 #include "recog.h"
45 #include "optabs.h"
46 #include "dbgcnt.h"
47 #include "target.h"
48 #include "params.h"
49 #include "tree-flow.h" /* for may_be_aliased */
50 
51 /* This file contains three techniques for performing Dead Store
52    Elimination (dse).
53 
54    * The first technique performs dse locally on any base address.  It
55    is based on the cselib which is a local value numbering technique.
56    This technique is local to a basic block but deals with a fairly
57    general addresses.
58 
59    * The second technique performs dse globally but is restricted to
60    base addresses that are either constant or are relative to the
61    frame_pointer.
62 
63    * The third technique, (which is only done after register allocation)
64    processes the spill spill slots.  This differs from the second
65    technique because it takes advantage of the fact that spilling is
66    completely free from the effects of aliasing.
67 
68    Logically, dse is a backwards dataflow problem.  A store can be
69    deleted if it if cannot be reached in the backward direction by any
70    use of the value being stored.  However, the local technique uses a
71    forwards scan of the basic block because cselib requires that the
72    block be processed in that order.
73 
74    The pass is logically broken into 7 steps:
75 
76    0) Initialization.
77 
78    1) The local algorithm, as well as scanning the insns for the two
79    global algorithms.
80 
81    2) Analysis to see if the global algs are necessary.  In the case
82    of stores base on a constant address, there must be at least two
83    stores to that address, to make it possible to delete some of the
84    stores.  In the case of stores off of the frame or spill related
85    stores, only one store to an address is necessary because those
86    stores die at the end of the function.
87 
88    3) Set up the global dataflow equations based on processing the
89    info parsed in the first step.
90 
91    4) Solve the dataflow equations.
92 
93    5) Delete the insns that the global analysis has indicated are
94    unnecessary.
95 
96    6) Delete insns that store the same value as preceding store
97    where the earlier store couldn't be eliminated.
98 
99    7) Cleanup.
100 
101    This step uses cselib and canon_rtx to build the largest expression
102    possible for each address.  This pass is a forwards pass through
103    each basic block.  From the point of view of the global technique,
104    the first pass could examine a block in either direction.  The
105    forwards ordering is to accommodate cselib.
106 
107    We make a simplifying assumption: addresses fall into four broad
108    categories:
109 
110    1) base has rtx_varies_p == false, offset is constant.
111    2) base has rtx_varies_p == false, offset variable.
112    3) base has rtx_varies_p == true, offset constant.
113    4) base has rtx_varies_p == true, offset variable.
114 
115    The local passes are able to process all 4 kinds of addresses.  The
116    global pass only handles 1).
117 
118    The global problem is formulated as follows:
119 
120      A store, S1, to address A, where A is not relative to the stack
121      frame, can be eliminated if all paths from S1 to the end of the
122      function contain another store to A before a read to A.
123 
124      If the address A is relative to the stack frame, a store S2 to A
125      can be eliminated if there are no paths from S2 that reach the
126      end of the function that read A before another store to A.  In
127      this case S2 can be deleted if there are paths from S2 to the
128      end of the function that have no reads or writes to A.  This
129      second case allows stores to the stack frame to be deleted that
130      would otherwise die when the function returns.  This cannot be
131      done if stores_off_frame_dead_at_return is not true.  See the doc
132      for that variable for when this variable is false.
133 
134      The global problem is formulated as a backwards set union
135      dataflow problem where the stores are the gens and reads are the
136      kills.  Set union problems are rare and require some special
137      handling given our representation of bitmaps.  A straightforward
138      implementation requires a lot of bitmaps filled with 1s.
139      These are expensive and cumbersome in our bitmap formulation so
140      care has been taken to avoid large vectors filled with 1s.  See
141      the comments in bb_info and in the dataflow confluence functions
142      for details.
143 
144    There are two places for further enhancements to this algorithm:
145 
146    1) The original dse which was embedded in a pass called flow also
147    did local address forwarding.  For example in
148 
149    A <- r100
150    ... <- A
151 
152    flow would replace the right hand side of the second insn with a
153    reference to r100.  Most of the information is available to add this
154    to this pass.  It has not done it because it is a lot of work in
155    the case that either r100 is assigned to between the first and
156    second insn and/or the second insn is a load of part of the value
157    stored by the first insn.
158 
159    insn 5 in gcc.c-torture/compile/990203-1.c simple case.
160    insn 15 in gcc.c-torture/execute/20001017-2.c simple case.
161    insn 25 in gcc.c-torture/execute/20001026-1.c simple case.
162    insn 44 in gcc.c-torture/execute/20010910-1.c simple case.
163 
164    2) The cleaning up of spill code is quite profitable.  It currently
165    depends on reading tea leaves and chicken entrails left by reload.
166    This pass depends on reload creating a singleton alias set for each
167    spill slot and telling the next dse pass which of these alias sets
168    are the singletons.  Rather than analyze the addresses of the
169    spills, dse's spill processing just does analysis of the loads and
170    stores that use those alias sets.  There are three cases where this
171    falls short:
172 
173      a) Reload sometimes creates the slot for one mode of access, and
174      then inserts loads and/or stores for a smaller mode.  In this
175      case, the current code just punts on the slot.  The proper thing
176      to do is to back out and use one bit vector position for each
177      byte of the entity associated with the slot.  This depends on
178      KNOWING that reload always generates the accesses for each of the
179      bytes in some canonical (read that easy to understand several
180      passes after reload happens) way.
181 
182      b) Reload sometimes decides that spill slot it allocated was not
183      large enough for the mode and goes back and allocates more slots
184      with the same mode and alias set.  The backout in this case is a
185      little more graceful than (a).  In this case the slot is unmarked
186      as being a spill slot and if final address comes out to be based
187      off the frame pointer, the global algorithm handles this slot.
188 
189      c) For any pass that may prespill, there is currently no
190      mechanism to tell the dse pass that the slot being used has the
191      special properties that reload uses.  It may be that all that is
192      required is to have those passes make the same calls that reload
193      does, assuming that the alias sets can be manipulated in the same
194      way.  */
195 
196 /* There are limits to the size of constant offsets we model for the
197    global problem.  There are certainly test cases, that exceed this
198    limit, however, it is unlikely that there are important programs
199    that really have constant offsets this size.  */
200 #define MAX_OFFSET (64 * 1024)
201 
202 /* Obstack for the DSE dataflow bitmaps.  We don't want to put these
203    on the default obstack because these bitmaps can grow quite large
204    (~2GB for the small (!) test case of PR54146) and we'll hold on to
205    all that memory until the end of the compiler run.
206    As a bonus, delete_tree_live_info can destroy all the bitmaps by just
207    releasing the whole obstack.  */
208 static bitmap_obstack dse_bitmap_obstack;
209 
210 /* Obstack for other data.  As for above: Kinda nice to be able to
211    throw it all away at the end in one big sweep.  */
212 static struct obstack dse_obstack;
213 
214 /* Scratch bitmap for cselib's cselib_expand_value_rtx.  */
215 static bitmap scratch = NULL;
216 
217 struct insn_info;
218 
219 /* This structure holds information about a candidate store.  */
220 struct store_info
221 {
222 
223   /* False means this is a clobber.  */
224   bool is_set;
225 
226   /* False if a single HOST_WIDE_INT bitmap is used for positions_needed.  */
227   bool is_large;
228 
229   /* The id of the mem group of the base address.  If rtx_varies_p is
230      true, this is -1.  Otherwise, it is the index into the group
231      table.  */
232   int group_id;
233 
234   /* This is the cselib value.  */
235   cselib_val *cse_base;
236 
237   /* This canonized mem.  */
238   rtx mem;
239 
240   /* Canonized MEM address for use by canon_true_dependence.  */
241   rtx mem_addr;
242 
243   /* If this is non-zero, it is the alias set of a spill location.  */
244   alias_set_type alias_set;
245 
246   /* The offset of the first and byte before the last byte associated
247      with the operation.  */
248   HOST_WIDE_INT begin, end;
249 
250   union
251     {
252       /* A bitmask as wide as the number of bytes in the word that
253 	 contains a 1 if the byte may be needed.  The store is unused if
254 	 all of the bits are 0.  This is used if IS_LARGE is false.  */
255       unsigned HOST_WIDE_INT small_bitmask;
256 
257       struct
258 	{
259 	  /* A bitmap with one bit per byte.  Cleared bit means the position
260 	     is needed.  Used if IS_LARGE is false.  */
261 	  bitmap bmap;
262 
263 	  /* Number of set bits (i.e. unneeded bytes) in BITMAP.  If it is
264 	     equal to END - BEGIN, the whole store is unused.  */
265 	  int count;
266 	} large;
267     } positions_needed;
268 
269   /* The next store info for this insn.  */
270   struct store_info *next;
271 
272   /* The right hand side of the store.  This is used if there is a
273      subsequent reload of the mems address somewhere later in the
274      basic block.  */
275   rtx rhs;
276 
277   /* If rhs is or holds a constant, this contains that constant,
278      otherwise NULL.  */
279   rtx const_rhs;
280 
281   /* Set if this store stores the same constant value as REDUNDANT_REASON
282      insn stored.  These aren't eliminated early, because doing that
283      might prevent the earlier larger store to be eliminated.  */
284   struct insn_info *redundant_reason;
285 };
286 
287 /* Return a bitmask with the first N low bits set.  */
288 
289 static unsigned HOST_WIDE_INT
lowpart_bitmask(int n)290 lowpart_bitmask (int n)
291 {
292   unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT) 0;
293   return mask >> (HOST_BITS_PER_WIDE_INT - n);
294 }
295 
296 typedef struct store_info *store_info_t;
297 static alloc_pool cse_store_info_pool;
298 static alloc_pool rtx_store_info_pool;
299 
300 /* This structure holds information about a load.  These are only
301    built for rtx bases.  */
302 struct read_info
303 {
304   /* The id of the mem group of the base address.  */
305   int group_id;
306 
307   /* If this is non-zero, it is the alias set of a spill location.  */
308   alias_set_type alias_set;
309 
310   /* The offset of the first and byte after the last byte associated
311      with the operation.  If begin == end == 0, the read did not have
312      a constant offset.  */
313   int begin, end;
314 
315   /* The mem being read.  */
316   rtx mem;
317 
318   /* The next read_info for this insn.  */
319   struct read_info *next;
320 };
321 typedef struct read_info *read_info_t;
322 static alloc_pool read_info_pool;
323 
324 
325 /* One of these records is created for each insn.  */
326 
327 struct insn_info
328 {
329   /* Set true if the insn contains a store but the insn itself cannot
330      be deleted.  This is set if the insn is a parallel and there is
331      more than one non dead output or if the insn is in some way
332      volatile.  */
333   bool cannot_delete;
334 
335   /* This field is only used by the global algorithm.  It is set true
336      if the insn contains any read of mem except for a (1).  This is
337      also set if the insn is a call or has a clobber mem.  If the insn
338      contains a wild read, the use_rec will be null.  */
339   bool wild_read;
340 
341   /* This is true only for CALL instructions which could potentially read
342      any non-frame memory location. This field is used by the global
343      algorithm.  */
344   bool non_frame_wild_read;
345 
346   /* This field is only used for the processing of const functions.
347      These functions cannot read memory, but they can read the stack
348      because that is where they may get their parms.  We need to be
349      this conservative because, like the store motion pass, we don't
350      consider CALL_INSN_FUNCTION_USAGE when processing call insns.
351      Moreover, we need to distinguish two cases:
352      1. Before reload (register elimination), the stores related to
353 	outgoing arguments are stack pointer based and thus deemed
354 	of non-constant base in this pass.  This requires special
355 	handling but also means that the frame pointer based stores
356 	need not be killed upon encountering a const function call.
357      2. After reload, the stores related to outgoing arguments can be
358 	either stack pointer or hard frame pointer based.  This means
359 	that we have no other choice than also killing all the frame
360 	pointer based stores upon encountering a const function call.
361      This field is set after reload for const function calls.  Having
362      this set is less severe than a wild read, it just means that all
363      the frame related stores are killed rather than all the stores.  */
364   bool frame_read;
365 
366   /* This field is only used for the processing of const functions.
367      It is set if the insn may contain a stack pointer based store.  */
368   bool stack_pointer_based;
369 
370   /* This is true if any of the sets within the store contains a
371      cselib base.  Such stores can only be deleted by the local
372      algorithm.  */
373   bool contains_cselib_groups;
374 
375   /* The insn. */
376   rtx insn;
377 
378   /* The list of mem sets or mem clobbers that are contained in this
379      insn.  If the insn is deletable, it contains only one mem set.
380      But it could also contain clobbers.  Insns that contain more than
381      one mem set are not deletable, but each of those mems are here in
382      order to provide info to delete other insns.  */
383   store_info_t store_rec;
384 
385   /* The linked list of mem uses in this insn.  Only the reads from
386      rtx bases are listed here.  The reads to cselib bases are
387      completely processed during the first scan and so are never
388      created.  */
389   read_info_t read_rec;
390 
391   /* The live fixed registers.  We assume only fixed registers can
392      cause trouble by being clobbered from an expanded pattern;
393      storing only the live fixed registers (rather than all registers)
394      means less memory needs to be allocated / copied for the individual
395      stores.  */
396   regset fixed_regs_live;
397 
398   /* The prev insn in the basic block.  */
399   struct insn_info * prev_insn;
400 
401   /* The linked list of insns that are in consideration for removal in
402      the forwards pass through the basic block.  This pointer may be
403      trash as it is not cleared when a wild read occurs.  The only
404      time it is guaranteed to be correct is when the traversal starts
405      at active_local_stores.  */
406   struct insn_info * next_local_store;
407 };
408 
409 typedef struct insn_info *insn_info_t;
410 static alloc_pool insn_info_pool;
411 
412 /* The linked list of stores that are under consideration in this
413    basic block.  */
414 static insn_info_t active_local_stores;
415 static int active_local_stores_len;
416 
417 struct bb_info
418 {
419 
420   /* Pointer to the insn info for the last insn in the block.  These
421      are linked so this is how all of the insns are reached.  During
422      scanning this is the current insn being scanned.  */
423   insn_info_t last_insn;
424 
425   /* The info for the global dataflow problem.  */
426 
427 
428   /* This is set if the transfer function should and in the wild_read
429      bitmap before applying the kill and gen sets.  That vector knocks
430      out most of the bits in the bitmap and thus speeds up the
431      operations.  */
432   bool apply_wild_read;
433 
434   /* The following 4 bitvectors hold information about which positions
435      of which stores are live or dead.  They are indexed by
436      get_bitmap_index.  */
437 
438   /* The set of store positions that exist in this block before a wild read.  */
439   bitmap gen;
440 
441   /* The set of load positions that exist in this block above the
442      same position of a store.  */
443   bitmap kill;
444 
445   /* The set of stores that reach the top of the block without being
446      killed by a read.
447 
448      Do not represent the in if it is all ones.  Note that this is
449      what the bitvector should logically be initialized to for a set
450      intersection problem.  However, like the kill set, this is too
451      expensive.  So initially, the in set will only be created for the
452      exit block and any block that contains a wild read.  */
453   bitmap in;
454 
455   /* The set of stores that reach the bottom of the block from it's
456      successors.
457 
458      Do not represent the in if it is all ones.  Note that this is
459      what the bitvector should logically be initialized to for a set
460      intersection problem.  However, like the kill and in set, this is
461      too expensive.  So what is done is that the confluence operator
462      just initializes the vector from one of the out sets of the
463      successors of the block.  */
464   bitmap out;
465 
466   /* The following bitvector is indexed by the reg number.  It
467      contains the set of regs that are live at the current instruction
468      being processed.  While it contains info for all of the
469      registers, only the hard registers are actually examined.  It is used
470      to assure that shift and/or add sequences that are inserted do not
471      accidentally clobber live hard regs.  */
472   bitmap regs_live;
473 };
474 
475 typedef struct bb_info *bb_info_t;
476 static alloc_pool bb_info_pool;
477 
478 /* Table to hold all bb_infos.  */
479 static bb_info_t *bb_table;
480 
481 /* There is a group_info for each rtx base that is used to reference
482    memory.  There are also not many of the rtx bases because they are
483    very limited in scope.  */
484 
485 struct group_info
486 {
487   /* The actual base of the address.  */
488   rtx rtx_base;
489 
490   /* The sequential id of the base.  This allows us to have a
491      canonical ordering of these that is not based on addresses.  */
492   int id;
493 
494   /* True if there are any positions that are to be processed
495      globally.  */
496   bool process_globally;
497 
498   /* True if the base of this group is either the frame_pointer or
499      hard_frame_pointer.  */
500   bool frame_related;
501 
502   /* A mem wrapped around the base pointer for the group in order to do
503      read dependency.  It must be given BLKmode in order to encompass all
504      the possible offsets from the base.  */
505   rtx base_mem;
506 
507   /* Canonized version of base_mem's address.  */
508   rtx canon_base_addr;
509 
510   /* These two sets of two bitmaps are used to keep track of how many
511      stores are actually referencing that position from this base.  We
512      only do this for rtx bases as this will be used to assign
513      positions in the bitmaps for the global problem.  Bit N is set in
514      store1 on the first store for offset N.  Bit N is set in store2
515      for the second store to offset N.  This is all we need since we
516      only care about offsets that have two or more stores for them.
517 
518      The "_n" suffix is for offsets less than 0 and the "_p" suffix is
519      for 0 and greater offsets.
520 
521      There is one special case here, for stores into the stack frame,
522      we will or store1 into store2 before deciding which stores look
523      at globally.  This is because stores to the stack frame that have
524      no other reads before the end of the function can also be
525      deleted.  */
526   bitmap store1_n, store1_p, store2_n, store2_p;
527 
528   /* These bitmaps keep track of offsets in this group escape this function.
529      An offset escapes if it corresponds to a named variable whose
530      addressable flag is set.  */
531   bitmap escaped_n, escaped_p;
532 
533   /* The positions in this bitmap have the same assignments as the in,
534      out, gen and kill bitmaps.  This bitmap is all zeros except for
535      the positions that are occupied by stores for this group.  */
536   bitmap group_kill;
537 
538   /* The offset_map is used to map the offsets from this base into
539      positions in the global bitmaps.  It is only created after all of
540      the all of stores have been scanned and we know which ones we
541      care about.  */
542   int *offset_map_n, *offset_map_p;
543   int offset_map_size_n, offset_map_size_p;
544 };
545 typedef struct group_info *group_info_t;
546 typedef const struct group_info *const_group_info_t;
547 static alloc_pool rtx_group_info_pool;
548 
549 /* Index into the rtx_group_vec.  */
550 static int rtx_group_next_id;
551 
552 
553 static vec<group_info_t> rtx_group_vec;
554 
555 
556 /* This structure holds the set of changes that are being deferred
557    when removing read operation.  See replace_read.  */
558 struct deferred_change
559 {
560 
561   /* The mem that is being replaced.  */
562   rtx *loc;
563 
564   /* The reg it is being replaced with.  */
565   rtx reg;
566 
567   struct deferred_change *next;
568 };
569 
570 typedef struct deferred_change *deferred_change_t;
571 static alloc_pool deferred_change_pool;
572 
573 static deferred_change_t deferred_change_list = NULL;
574 
575 /* This are used to hold the alias sets of spill variables.  Since
576    these are never aliased and there may be a lot of them, it makes
577    sense to treat them specially.  This bitvector is only allocated in
578    calls from dse_record_singleton_alias_set which currently is only
579    made during reload1.  So when dse is called before reload this
580    mechanism does nothing.  */
581 
582 static bitmap clear_alias_sets = NULL;
583 
584 /* The set of clear_alias_sets that have been disqualified because
585    there are loads or stores using a different mode than the alias set
586    was registered with.  */
587 static bitmap disqualified_clear_alias_sets = NULL;
588 
589 /* The group that holds all of the clear_alias_sets.  */
590 static group_info_t clear_alias_group;
591 
592 /* The modes of the clear_alias_sets.  */
593 static htab_t clear_alias_mode_table;
594 
595 /* Hash table element to look up the mode for an alias set.  */
596 struct clear_alias_mode_holder
597 {
598   alias_set_type alias_set;
599   enum machine_mode mode;
600 };
601 
602 static alloc_pool clear_alias_mode_pool;
603 
604 /* This is true except if cfun->stdarg -- i.e. we cannot do
605    this for vararg functions because they play games with the frame.  */
606 static bool stores_off_frame_dead_at_return;
607 
608 /* Counter for stats.  */
609 static int globally_deleted;
610 static int locally_deleted;
611 static int spill_deleted;
612 
613 static bitmap all_blocks;
614 
615 /* Locations that are killed by calls in the global phase.  */
616 static bitmap kill_on_calls;
617 
618 /* The number of bits used in the global bitmaps.  */
619 static unsigned int current_position;
620 
621 
622 static bool gate_dse1 (void);
623 static bool gate_dse2 (void);
624 
625 
626 /*----------------------------------------------------------------------------
627    Zeroth step.
628 
629    Initialization.
630 ----------------------------------------------------------------------------*/
631 
632 
633 /* Find the entry associated with ALIAS_SET.  */
634 
635 static struct clear_alias_mode_holder *
clear_alias_set_lookup(alias_set_type alias_set)636 clear_alias_set_lookup (alias_set_type alias_set)
637 {
638   struct clear_alias_mode_holder tmp_holder;
639   void **slot;
640 
641   tmp_holder.alias_set = alias_set;
642   slot = htab_find_slot (clear_alias_mode_table, &tmp_holder, NO_INSERT);
643   gcc_assert (*slot);
644 
645   return (struct clear_alias_mode_holder *) *slot;
646 }
647 
648 
649 /* Hashtable callbacks for maintaining the "bases" field of
650    store_group_info, given that the addresses are function invariants.  */
651 
652 struct invariant_group_base_hasher : typed_noop_remove <group_info>
653 {
654   typedef group_info value_type;
655   typedef group_info compare_type;
656   static inline hashval_t hash (const value_type *);
657   static inline bool equal (const value_type *, const compare_type *);
658 };
659 
660 inline bool
equal(const value_type * gi1,const compare_type * gi2)661 invariant_group_base_hasher::equal (const value_type *gi1,
662 				    const compare_type *gi2)
663 {
664   return rtx_equal_p (gi1->rtx_base, gi2->rtx_base);
665 }
666 
667 inline hashval_t
hash(const value_type * gi)668 invariant_group_base_hasher::hash (const value_type *gi)
669 {
670   int do_not_record;
671   return hash_rtx (gi->rtx_base, Pmode, &do_not_record, NULL, false);
672 }
673 
674 /* Tables of group_info structures, hashed by base value.  */
675 static hash_table <invariant_group_base_hasher> rtx_group_table;
676 
677 
678 /* Get the GROUP for BASE.  Add a new group if it is not there.  */
679 
680 static group_info_t
get_group_info(rtx base)681 get_group_info (rtx base)
682 {
683   struct group_info tmp_gi;
684   group_info_t gi;
685   group_info **slot;
686 
687   if (base)
688     {
689       /* Find the store_base_info structure for BASE, creating a new one
690 	 if necessary.  */
691       tmp_gi.rtx_base = base;
692       slot = rtx_group_table.find_slot (&tmp_gi, INSERT);
693       gi = (group_info_t) *slot;
694     }
695   else
696     {
697       if (!clear_alias_group)
698 	{
699 	  clear_alias_group = gi =
700 	    (group_info_t) pool_alloc (rtx_group_info_pool);
701 	  memset (gi, 0, sizeof (struct group_info));
702 	  gi->id = rtx_group_next_id++;
703 	  gi->store1_n = BITMAP_ALLOC (&dse_bitmap_obstack);
704 	  gi->store1_p = BITMAP_ALLOC (&dse_bitmap_obstack);
705 	  gi->store2_n = BITMAP_ALLOC (&dse_bitmap_obstack);
706 	  gi->store2_p = BITMAP_ALLOC (&dse_bitmap_obstack);
707 	  gi->escaped_p = BITMAP_ALLOC (&dse_bitmap_obstack);
708 	  gi->escaped_n = BITMAP_ALLOC (&dse_bitmap_obstack);
709 	  gi->group_kill = BITMAP_ALLOC (&dse_bitmap_obstack);
710 	  gi->process_globally = false;
711 	  gi->offset_map_size_n = 0;
712 	  gi->offset_map_size_p = 0;
713 	  gi->offset_map_n = NULL;
714 	  gi->offset_map_p = NULL;
715 	  rtx_group_vec.safe_push (gi);
716 	}
717       return clear_alias_group;
718     }
719 
720   if (gi == NULL)
721     {
722       *slot = gi = (group_info_t) pool_alloc (rtx_group_info_pool);
723       gi->rtx_base = base;
724       gi->id = rtx_group_next_id++;
725       gi->base_mem = gen_rtx_MEM (BLKmode, base);
726       gi->canon_base_addr = canon_rtx (base);
727       gi->store1_n = BITMAP_ALLOC (&dse_bitmap_obstack);
728       gi->store1_p = BITMAP_ALLOC (&dse_bitmap_obstack);
729       gi->store2_n = BITMAP_ALLOC (&dse_bitmap_obstack);
730       gi->store2_p = BITMAP_ALLOC (&dse_bitmap_obstack);
731       gi->escaped_p = BITMAP_ALLOC (&dse_bitmap_obstack);
732       gi->escaped_n = BITMAP_ALLOC (&dse_bitmap_obstack);
733       gi->group_kill = BITMAP_ALLOC (&dse_bitmap_obstack);
734       gi->process_globally = false;
735       gi->frame_related =
736 	(base == frame_pointer_rtx) || (base == hard_frame_pointer_rtx);
737       gi->offset_map_size_n = 0;
738       gi->offset_map_size_p = 0;
739       gi->offset_map_n = NULL;
740       gi->offset_map_p = NULL;
741       rtx_group_vec.safe_push (gi);
742     }
743 
744   return gi;
745 }
746 
747 
748 /* Initialization of data structures.  */
749 
750 static void
dse_step0(void)751 dse_step0 (void)
752 {
753   locally_deleted = 0;
754   globally_deleted = 0;
755   spill_deleted = 0;
756 
757   bitmap_obstack_initialize (&dse_bitmap_obstack);
758   gcc_obstack_init (&dse_obstack);
759 
760   scratch = BITMAP_ALLOC (&reg_obstack);
761   kill_on_calls = BITMAP_ALLOC (&dse_bitmap_obstack);
762 
763   rtx_store_info_pool
764     = create_alloc_pool ("rtx_store_info_pool",
765 			 sizeof (struct store_info), 100);
766   read_info_pool
767     = create_alloc_pool ("read_info_pool",
768 			 sizeof (struct read_info), 100);
769   insn_info_pool
770     = create_alloc_pool ("insn_info_pool",
771 			 sizeof (struct insn_info), 100);
772   bb_info_pool
773     = create_alloc_pool ("bb_info_pool",
774 			 sizeof (struct bb_info), 100);
775   rtx_group_info_pool
776     = create_alloc_pool ("rtx_group_info_pool",
777 			 sizeof (struct group_info), 100);
778   deferred_change_pool
779     = create_alloc_pool ("deferred_change_pool",
780 			 sizeof (struct deferred_change), 10);
781 
782   rtx_group_table.create (11);
783 
784   bb_table = XNEWVEC (bb_info_t, last_basic_block);
785   rtx_group_next_id = 0;
786 
787   stores_off_frame_dead_at_return = !cfun->stdarg;
788 
789   init_alias_analysis ();
790 
791   if (clear_alias_sets)
792     clear_alias_group = get_group_info (NULL);
793   else
794     clear_alias_group = NULL;
795 }
796 
797 
798 
799 /*----------------------------------------------------------------------------
800    First step.
801 
802    Scan all of the insns.  Any random ordering of the blocks is fine.
803    Each block is scanned in forward order to accommodate cselib which
804    is used to remove stores with non-constant bases.
805 ----------------------------------------------------------------------------*/
806 
807 /* Delete all of the store_info recs from INSN_INFO.  */
808 
809 static void
free_store_info(insn_info_t insn_info)810 free_store_info (insn_info_t insn_info)
811 {
812   store_info_t store_info = insn_info->store_rec;
813   while (store_info)
814     {
815       store_info_t next = store_info->next;
816       if (store_info->is_large)
817 	BITMAP_FREE (store_info->positions_needed.large.bmap);
818       if (store_info->cse_base)
819 	pool_free (cse_store_info_pool, store_info);
820       else
821 	pool_free (rtx_store_info_pool, store_info);
822       store_info = next;
823     }
824 
825   insn_info->cannot_delete = true;
826   insn_info->contains_cselib_groups = false;
827   insn_info->store_rec = NULL;
828 }
829 
830 typedef struct
831 {
832   rtx first, current;
833   regset fixed_regs_live;
834   bool failure;
835 } note_add_store_info;
836 
837 /* Callback for emit_inc_dec_insn_before via note_stores.
838    Check if a register is clobbered which is live afterwards.  */
839 
840 static void
note_add_store(rtx loc,const_rtx expr ATTRIBUTE_UNUSED,void * data)841 note_add_store (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *data)
842 {
843   rtx insn;
844   note_add_store_info *info = (note_add_store_info *) data;
845   int r, n;
846 
847   if (!REG_P (loc))
848     return;
849 
850   /* If this register is referenced by the current or an earlier insn,
851      that's OK.  E.g. this applies to the register that is being incremented
852      with this addition.  */
853   for (insn = info->first;
854        insn != NEXT_INSN (info->current);
855        insn = NEXT_INSN (insn))
856     if (reg_referenced_p (loc, PATTERN (insn)))
857       return;
858 
859   /* If we come here, we have a clobber of a register that's only OK
860      if that register is not live.  If we don't have liveness information
861      available, fail now.  */
862   if (!info->fixed_regs_live)
863     {
864       info->failure =  true;
865       return;
866     }
867   /* Now check if this is a live fixed register.  */
868   r = REGNO (loc);
869   n = hard_regno_nregs[r][GET_MODE (loc)];
870   while (--n >=  0)
871     if (REGNO_REG_SET_P (info->fixed_regs_live, r+n))
872       info->failure =  true;
873 }
874 
875 /* Callback for for_each_inc_dec that emits an INSN that sets DEST to
876    SRC + SRCOFF before insn ARG.  */
877 
878 static int
emit_inc_dec_insn_before(rtx mem ATTRIBUTE_UNUSED,rtx op ATTRIBUTE_UNUSED,rtx dest,rtx src,rtx srcoff,void * arg)879 emit_inc_dec_insn_before (rtx mem ATTRIBUTE_UNUSED,
880 			  rtx op ATTRIBUTE_UNUSED,
881 			  rtx dest, rtx src, rtx srcoff, void *arg)
882 {
883   insn_info_t insn_info = (insn_info_t) arg;
884   rtx insn = insn_info->insn, new_insn, cur;
885   note_add_store_info info;
886 
887   /* We can reuse all operands without copying, because we are about
888      to delete the insn that contained it.  */
889   if (srcoff)
890     {
891       start_sequence ();
892       emit_insn (gen_add3_insn (dest, src, srcoff));
893       new_insn = get_insns ();
894       end_sequence ();
895     }
896   else
897     new_insn = gen_move_insn (dest, src);
898   info.first = new_insn;
899   info.fixed_regs_live = insn_info->fixed_regs_live;
900   info.failure = false;
901   for (cur = new_insn; cur; cur = NEXT_INSN (cur))
902     {
903       info.current = cur;
904       note_stores (PATTERN (cur), note_add_store, &info);
905     }
906 
907   /* If a failure was flagged above, return 1 so that for_each_inc_dec will
908      return it immediately, communicating the failure to its caller.  */
909   if (info.failure)
910     return 1;
911 
912   emit_insn_before (new_insn, insn);
913 
914   return -1;
915 }
916 
917 /* Before we delete INSN_INFO->INSN, make sure that the auto inc/dec, if it
918    is there, is split into a separate insn.
919    Return true on success (or if there was nothing to do), false on failure.  */
920 
921 static bool
check_for_inc_dec_1(insn_info_t insn_info)922 check_for_inc_dec_1 (insn_info_t insn_info)
923 {
924   rtx insn = insn_info->insn;
925   rtx note = find_reg_note (insn, REG_INC, NULL_RTX);
926   if (note)
927     return for_each_inc_dec (&insn, emit_inc_dec_insn_before, insn_info) == 0;
928   return true;
929 }
930 
931 
932 /* Entry point for postreload.  If you work on reload_cse, or you need this
933    anywhere else, consider if you can provide register liveness information
934    and add a parameter to this function so that it can be passed down in
935    insn_info.fixed_regs_live.  */
936 bool
check_for_inc_dec(rtx insn)937 check_for_inc_dec (rtx insn)
938 {
939   struct insn_info insn_info;
940   rtx note;
941 
942   insn_info.insn = insn;
943   insn_info.fixed_regs_live = NULL;
944   note = find_reg_note (insn, REG_INC, NULL_RTX);
945   if (note)
946     return for_each_inc_dec (&insn, emit_inc_dec_insn_before, &insn_info) == 0;
947   return true;
948 }
949 
950 /* Delete the insn and free all of the fields inside INSN_INFO.  */
951 
952 static void
delete_dead_store_insn(insn_info_t insn_info)953 delete_dead_store_insn (insn_info_t insn_info)
954 {
955   read_info_t read_info;
956 
957   if (!dbg_cnt (dse))
958     return;
959 
960   if (!check_for_inc_dec_1 (insn_info))
961     return;
962   if (dump_file && (dump_flags & TDF_DETAILS))
963     {
964       fprintf (dump_file, "Locally deleting insn %d ",
965 	       INSN_UID (insn_info->insn));
966       if (insn_info->store_rec->alias_set)
967 	fprintf (dump_file, "alias set %d\n",
968 		 (int) insn_info->store_rec->alias_set);
969       else
970 	fprintf (dump_file, "\n");
971     }
972 
973   free_store_info (insn_info);
974   read_info = insn_info->read_rec;
975 
976   while (read_info)
977     {
978       read_info_t next = read_info->next;
979       pool_free (read_info_pool, read_info);
980       read_info = next;
981     }
982   insn_info->read_rec = NULL;
983 
984   delete_insn (insn_info->insn);
985   locally_deleted++;
986   insn_info->insn = NULL;
987 
988   insn_info->wild_read = false;
989 }
990 
991 /* Return whether DECL, a local variable, can possibly escape the current
992    function scope.  */
993 
994 static bool
local_variable_can_escape(tree decl)995 local_variable_can_escape (tree decl)
996 {
997   if (TREE_ADDRESSABLE (decl))
998     return true;
999 
1000   /* If this is a partitioned variable, we need to consider all the variables
1001      in the partition.  This is necessary because a store into one of them can
1002      be replaced with a store into another and this may not change the outcome
1003      of the escape analysis.  */
1004   if (cfun->gimple_df->decls_to_pointers != NULL)
1005     {
1006       void *namep
1007 	= pointer_map_contains (cfun->gimple_df->decls_to_pointers, decl);
1008       if (namep)
1009 	return TREE_ADDRESSABLE (*(tree *)namep);
1010     }
1011 
1012   return false;
1013 }
1014 
1015 /* Return whether EXPR can possibly escape the current function scope.  */
1016 
1017 static bool
can_escape(tree expr)1018 can_escape (tree expr)
1019 {
1020   tree base;
1021   if (!expr)
1022     return true;
1023   base = get_base_address (expr);
1024   if (DECL_P (base)
1025       && !may_be_aliased (base)
1026       && !(TREE_CODE (base) == VAR_DECL
1027 	   && !DECL_EXTERNAL (base)
1028 	   && !TREE_STATIC (base)
1029 	   && local_variable_can_escape (base)))
1030     return false;
1031   return true;
1032 }
1033 
1034 /* Set the store* bitmaps offset_map_size* fields in GROUP based on
1035    OFFSET and WIDTH.  */
1036 
1037 static void
set_usage_bits(group_info_t group,HOST_WIDE_INT offset,HOST_WIDE_INT width,tree expr)1038 set_usage_bits (group_info_t group, HOST_WIDE_INT offset, HOST_WIDE_INT width,
1039                 tree expr)
1040 {
1041   HOST_WIDE_INT i;
1042   bool expr_escapes = can_escape (expr);
1043   if (offset > -MAX_OFFSET && offset + width < MAX_OFFSET)
1044     for (i=offset; i<offset+width; i++)
1045       {
1046 	bitmap store1;
1047 	bitmap store2;
1048         bitmap escaped;
1049 	int ai;
1050 	if (i < 0)
1051 	  {
1052 	    store1 = group->store1_n;
1053 	    store2 = group->store2_n;
1054 	    escaped = group->escaped_n;
1055 	    ai = -i;
1056 	  }
1057 	else
1058 	  {
1059 	    store1 = group->store1_p;
1060 	    store2 = group->store2_p;
1061 	    escaped = group->escaped_p;
1062 	    ai = i;
1063 	  }
1064 
1065 	if (!bitmap_set_bit (store1, ai))
1066 	  bitmap_set_bit (store2, ai);
1067 	else
1068 	  {
1069 	    if (i < 0)
1070 	      {
1071 		if (group->offset_map_size_n < ai)
1072 		  group->offset_map_size_n = ai;
1073 	      }
1074 	    else
1075 	      {
1076 		if (group->offset_map_size_p < ai)
1077 		  group->offset_map_size_p = ai;
1078 	      }
1079 	  }
1080         if (expr_escapes)
1081           bitmap_set_bit (escaped, ai);
1082       }
1083 }
1084 
1085 static void
reset_active_stores(void)1086 reset_active_stores (void)
1087 {
1088   active_local_stores = NULL;
1089   active_local_stores_len = 0;
1090 }
1091 
1092 /* Free all READ_REC of the LAST_INSN of BB_INFO.  */
1093 
1094 static void
free_read_records(bb_info_t bb_info)1095 free_read_records (bb_info_t bb_info)
1096 {
1097   insn_info_t insn_info = bb_info->last_insn;
1098   read_info_t *ptr = &insn_info->read_rec;
1099   while (*ptr)
1100     {
1101       read_info_t next = (*ptr)->next;
1102       if ((*ptr)->alias_set == 0)
1103         {
1104           pool_free (read_info_pool, *ptr);
1105           *ptr = next;
1106         }
1107       else
1108         ptr = &(*ptr)->next;
1109     }
1110 }
1111 
1112 /* Set the BB_INFO so that the last insn is marked as a wild read.  */
1113 
1114 static void
add_wild_read(bb_info_t bb_info)1115 add_wild_read (bb_info_t bb_info)
1116 {
1117   insn_info_t insn_info = bb_info->last_insn;
1118   insn_info->wild_read = true;
1119   free_read_records (bb_info);
1120   reset_active_stores ();
1121 }
1122 
1123 /* Set the BB_INFO so that the last insn is marked as a wild read of
1124    non-frame locations.  */
1125 
1126 static void
add_non_frame_wild_read(bb_info_t bb_info)1127 add_non_frame_wild_read (bb_info_t bb_info)
1128 {
1129   insn_info_t insn_info = bb_info->last_insn;
1130   insn_info->non_frame_wild_read = true;
1131   free_read_records (bb_info);
1132   reset_active_stores ();
1133 }
1134 
1135 /* Return true if X is a constant or one of the registers that behave
1136    as a constant over the life of a function.  This is equivalent to
1137    !rtx_varies_p for memory addresses.  */
1138 
1139 static bool
const_or_frame_p(rtx x)1140 const_or_frame_p (rtx x)
1141 {
1142   if (CONSTANT_P (x))
1143     return true;
1144 
1145   if (GET_CODE (x) == REG)
1146     {
1147       /* Note that we have to test for the actual rtx used for the frame
1148 	 and arg pointers and not just the register number in case we have
1149 	 eliminated the frame and/or arg pointer and are using it
1150 	 for pseudos.  */
1151       if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
1152 	  /* The arg pointer varies if it is not a fixed register.  */
1153 	  || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
1154 	  || x == pic_offset_table_rtx)
1155 	return true;
1156       return false;
1157     }
1158 
1159   return false;
1160 }
1161 
1162 /* Take all reasonable action to put the address of MEM into the form
1163    that we can do analysis on.
1164 
1165    The gold standard is to get the address into the form: address +
1166    OFFSET where address is something that rtx_varies_p considers a
1167    constant.  When we can get the address in this form, we can do
1168    global analysis on it.  Note that for constant bases, address is
1169    not actually returned, only the group_id.  The address can be
1170    obtained from that.
1171 
1172    If that fails, we try cselib to get a value we can at least use
1173    locally.  If that fails we return false.
1174 
1175    The GROUP_ID is set to -1 for cselib bases and the index of the
1176    group for non_varying bases.
1177 
1178    FOR_READ is true if this is a mem read and false if not.  */
1179 
1180 static bool
canon_address(rtx mem,alias_set_type * alias_set_out,int * group_id,HOST_WIDE_INT * offset,cselib_val ** base)1181 canon_address (rtx mem,
1182 	       alias_set_type *alias_set_out,
1183 	       int *group_id,
1184 	       HOST_WIDE_INT *offset,
1185 	       cselib_val **base)
1186 {
1187   enum machine_mode address_mode = get_address_mode (mem);
1188   rtx mem_address = XEXP (mem, 0);
1189   rtx expanded_address, address;
1190   int expanded;
1191 
1192   /* Make sure that cselib is has initialized all of the operands of
1193      the address before asking it to do the subst.  */
1194 
1195   if (clear_alias_sets)
1196     {
1197       /* If this is a spill, do not do any further processing.  */
1198       alias_set_type alias_set = MEM_ALIAS_SET (mem);
1199       if (dump_file && (dump_flags & TDF_DETAILS))
1200 	fprintf (dump_file, "found alias set %d\n", (int) alias_set);
1201       if (bitmap_bit_p (clear_alias_sets, alias_set))
1202 	{
1203 	  struct clear_alias_mode_holder *entry
1204 	    = clear_alias_set_lookup (alias_set);
1205 
1206 	  /* If the modes do not match, we cannot process this set.  */
1207 	  if (entry->mode != GET_MODE (mem))
1208 	    {
1209 	      if (dump_file && (dump_flags & TDF_DETAILS))
1210 		fprintf (dump_file,
1211 			 "disqualifying alias set %d, (%s) != (%s)\n",
1212 			 (int) alias_set, GET_MODE_NAME (entry->mode),
1213 			 GET_MODE_NAME (GET_MODE (mem)));
1214 
1215 	      bitmap_set_bit (disqualified_clear_alias_sets, alias_set);
1216 	      return false;
1217 	    }
1218 
1219 	  *alias_set_out = alias_set;
1220 	  *group_id = clear_alias_group->id;
1221 	  return true;
1222 	}
1223     }
1224 
1225   *alias_set_out = 0;
1226 
1227   cselib_lookup (mem_address, address_mode, 1, GET_MODE (mem));
1228 
1229   if (dump_file && (dump_flags & TDF_DETAILS))
1230     {
1231       fprintf (dump_file, "  mem: ");
1232       print_inline_rtx (dump_file, mem_address, 0);
1233       fprintf (dump_file, "\n");
1234     }
1235 
1236   /* First see if just canon_rtx (mem_address) is const or frame,
1237      if not, try cselib_expand_value_rtx and call canon_rtx on that.  */
1238   address = NULL_RTX;
1239   for (expanded = 0; expanded < 2; expanded++)
1240     {
1241       if (expanded)
1242 	{
1243 	  /* Use cselib to replace all of the reg references with the full
1244 	     expression.  This will take care of the case where we have
1245 
1246 	     r_x = base + offset;
1247 	     val = *r_x;
1248 
1249 	     by making it into
1250 
1251 	     val = *(base + offset);  */
1252 
1253 	  expanded_address = cselib_expand_value_rtx (mem_address,
1254 						      scratch, 5);
1255 
1256 	  /* If this fails, just go with the address from first
1257 	     iteration.  */
1258 	  if (!expanded_address)
1259 	    break;
1260 	}
1261       else
1262 	expanded_address = mem_address;
1263 
1264       /* Split the address into canonical BASE + OFFSET terms.  */
1265       address = canon_rtx (expanded_address);
1266 
1267       *offset = 0;
1268 
1269       if (dump_file && (dump_flags & TDF_DETAILS))
1270 	{
1271 	  if (expanded)
1272 	    {
1273 	      fprintf (dump_file, "\n   after cselib_expand address: ");
1274 	      print_inline_rtx (dump_file, expanded_address, 0);
1275 	      fprintf (dump_file, "\n");
1276 	    }
1277 
1278 	  fprintf (dump_file, "\n   after canon_rtx address: ");
1279 	  print_inline_rtx (dump_file, address, 0);
1280 	  fprintf (dump_file, "\n");
1281 	}
1282 
1283       if (GET_CODE (address) == CONST)
1284 	address = XEXP (address, 0);
1285 
1286       if (GET_CODE (address) == PLUS
1287 	  && CONST_INT_P (XEXP (address, 1)))
1288 	{
1289 	  *offset = INTVAL (XEXP (address, 1));
1290 	  address = XEXP (address, 0);
1291 	}
1292 
1293       if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (mem))
1294 	  && const_or_frame_p (address))
1295 	{
1296 	  group_info_t group = get_group_info (address);
1297 
1298 	  if (dump_file && (dump_flags & TDF_DETAILS))
1299 	    fprintf (dump_file, "  gid=%d offset=%d \n",
1300 		     group->id, (int)*offset);
1301 	  *base = NULL;
1302 	  *group_id = group->id;
1303 	  return true;
1304 	}
1305     }
1306 
1307   *base = cselib_lookup (address, address_mode, true, GET_MODE (mem));
1308   *group_id = -1;
1309 
1310   if (*base == NULL)
1311     {
1312       if (dump_file && (dump_flags & TDF_DETAILS))
1313 	fprintf (dump_file, " no cselib val - should be a wild read.\n");
1314       return false;
1315     }
1316   if (dump_file && (dump_flags & TDF_DETAILS))
1317     fprintf (dump_file, "  varying cselib base=%u:%u offset = %d\n",
1318 	     (*base)->uid, (*base)->hash, (int)*offset);
1319   return true;
1320 }
1321 
1322 
1323 /* Clear the rhs field from the active_local_stores array.  */
1324 
1325 static void
clear_rhs_from_active_local_stores(void)1326 clear_rhs_from_active_local_stores (void)
1327 {
1328   insn_info_t ptr = active_local_stores;
1329 
1330   while (ptr)
1331     {
1332       store_info_t store_info = ptr->store_rec;
1333       /* Skip the clobbers.  */
1334       while (!store_info->is_set)
1335 	store_info = store_info->next;
1336 
1337       store_info->rhs = NULL;
1338       store_info->const_rhs = NULL;
1339 
1340       ptr = ptr->next_local_store;
1341     }
1342 }
1343 
1344 
1345 /* Mark byte POS bytes from the beginning of store S_INFO as unneeded.  */
1346 
1347 static inline void
set_position_unneeded(store_info_t s_info,int pos)1348 set_position_unneeded (store_info_t s_info, int pos)
1349 {
1350   if (__builtin_expect (s_info->is_large, false))
1351     {
1352       if (bitmap_set_bit (s_info->positions_needed.large.bmap, pos))
1353 	s_info->positions_needed.large.count++;
1354     }
1355   else
1356     s_info->positions_needed.small_bitmask
1357       &= ~(((unsigned HOST_WIDE_INT) 1) << pos);
1358 }
1359 
1360 /* Mark the whole store S_INFO as unneeded.  */
1361 
1362 static inline void
set_all_positions_unneeded(store_info_t s_info)1363 set_all_positions_unneeded (store_info_t s_info)
1364 {
1365   if (__builtin_expect (s_info->is_large, false))
1366     {
1367       int pos, end = s_info->end - s_info->begin;
1368       for (pos = 0; pos < end; pos++)
1369 	bitmap_set_bit (s_info->positions_needed.large.bmap, pos);
1370       s_info->positions_needed.large.count = end;
1371     }
1372   else
1373     s_info->positions_needed.small_bitmask = (unsigned HOST_WIDE_INT) 0;
1374 }
1375 
1376 /* Return TRUE if any bytes from S_INFO store are needed.  */
1377 
1378 static inline bool
any_positions_needed_p(store_info_t s_info)1379 any_positions_needed_p (store_info_t s_info)
1380 {
1381   if (__builtin_expect (s_info->is_large, false))
1382     return (s_info->positions_needed.large.count
1383 	    < s_info->end - s_info->begin);
1384   else
1385     return (s_info->positions_needed.small_bitmask
1386 	    != (unsigned HOST_WIDE_INT) 0);
1387 }
1388 
1389 /* Return TRUE if all bytes START through START+WIDTH-1 from S_INFO
1390    store are needed.  */
1391 
1392 static inline bool
all_positions_needed_p(store_info_t s_info,int start,int width)1393 all_positions_needed_p (store_info_t s_info, int start, int width)
1394 {
1395   if (__builtin_expect (s_info->is_large, false))
1396     {
1397       int end = start + width;
1398       while (start < end)
1399 	if (bitmap_bit_p (s_info->positions_needed.large.bmap, start++))
1400 	  return false;
1401       return true;
1402     }
1403   else
1404     {
1405       unsigned HOST_WIDE_INT mask = lowpart_bitmask (width) << start;
1406       return (s_info->positions_needed.small_bitmask & mask) == mask;
1407     }
1408 }
1409 
1410 
1411 static rtx get_stored_val (store_info_t, enum machine_mode, HOST_WIDE_INT,
1412 			   HOST_WIDE_INT, basic_block, bool);
1413 
1414 
1415 /* BODY is an instruction pattern that belongs to INSN.  Return 1 if
1416    there is a candidate store, after adding it to the appropriate
1417    local store group if so.  */
1418 
1419 static int
record_store(rtx body,bb_info_t bb_info)1420 record_store (rtx body, bb_info_t bb_info)
1421 {
1422   rtx mem, rhs, const_rhs, mem_addr;
1423   HOST_WIDE_INT offset = 0;
1424   HOST_WIDE_INT width = 0;
1425   alias_set_type spill_alias_set;
1426   insn_info_t insn_info = bb_info->last_insn;
1427   store_info_t store_info = NULL;
1428   int group_id;
1429   cselib_val *base = NULL;
1430   insn_info_t ptr, last, redundant_reason;
1431   bool store_is_unused;
1432 
1433   if (GET_CODE (body) != SET && GET_CODE (body) != CLOBBER)
1434     return 0;
1435 
1436   mem = SET_DEST (body);
1437 
1438   /* If this is not used, then this cannot be used to keep the insn
1439      from being deleted.  On the other hand, it does provide something
1440      that can be used to prove that another store is dead.  */
1441   store_is_unused
1442     = (find_reg_note (insn_info->insn, REG_UNUSED, mem) != NULL);
1443 
1444   /* Check whether that value is a suitable memory location.  */
1445   if (!MEM_P (mem))
1446     {
1447       /* If the set or clobber is unused, then it does not effect our
1448 	 ability to get rid of the entire insn.  */
1449       if (!store_is_unused)
1450 	insn_info->cannot_delete = true;
1451       return 0;
1452     }
1453 
1454   /* At this point we know mem is a mem. */
1455   if (GET_MODE (mem) == BLKmode)
1456     {
1457       if (GET_CODE (XEXP (mem, 0)) == SCRATCH)
1458 	{
1459 	  if (dump_file && (dump_flags & TDF_DETAILS))
1460 	    fprintf (dump_file, " adding wild read for (clobber (mem:BLK (scratch))\n");
1461 	  add_wild_read (bb_info);
1462 	  insn_info->cannot_delete = true;
1463 	  return 0;
1464 	}
1465       /* Handle (set (mem:BLK (addr) [... S36 ...]) (const_int 0))
1466 	 as memset (addr, 0, 36);  */
1467       else if (!MEM_SIZE_KNOWN_P (mem)
1468 	       || MEM_SIZE (mem) <= 0
1469 	       || MEM_SIZE (mem) > MAX_OFFSET
1470 	       || GET_CODE (body) != SET
1471 	       || !CONST_INT_P (SET_SRC (body)))
1472 	{
1473 	  if (!store_is_unused)
1474 	    {
1475 	      /* If the set or clobber is unused, then it does not effect our
1476 		 ability to get rid of the entire insn.  */
1477 	      insn_info->cannot_delete = true;
1478 	      clear_rhs_from_active_local_stores ();
1479 	    }
1480 	  return 0;
1481 	}
1482     }
1483 
1484   /* We can still process a volatile mem, we just cannot delete it.  */
1485   if (MEM_VOLATILE_P (mem))
1486     insn_info->cannot_delete = true;
1487 
1488   if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
1489     {
1490       clear_rhs_from_active_local_stores ();
1491       return 0;
1492     }
1493 
1494   if (GET_MODE (mem) == BLKmode)
1495     width = MEM_SIZE (mem);
1496   else
1497     width = GET_MODE_SIZE (GET_MODE (mem));
1498 
1499   if (spill_alias_set)
1500     {
1501       bitmap store1 = clear_alias_group->store1_p;
1502       bitmap store2 = clear_alias_group->store2_p;
1503 
1504       gcc_assert (GET_MODE (mem) != BLKmode);
1505 
1506       if (!bitmap_set_bit (store1, spill_alias_set))
1507 	bitmap_set_bit (store2, spill_alias_set);
1508 
1509       if (clear_alias_group->offset_map_size_p < spill_alias_set)
1510 	clear_alias_group->offset_map_size_p = spill_alias_set;
1511 
1512       store_info = (store_info_t) pool_alloc (rtx_store_info_pool);
1513 
1514       if (dump_file && (dump_flags & TDF_DETAILS))
1515 	fprintf (dump_file, " processing spill store %d(%s)\n",
1516 		 (int) spill_alias_set, GET_MODE_NAME (GET_MODE (mem)));
1517     }
1518   else if (group_id >= 0)
1519     {
1520       /* In the restrictive case where the base is a constant or the
1521 	 frame pointer we can do global analysis.  */
1522 
1523       group_info_t group
1524 	= rtx_group_vec[group_id];
1525       tree expr = MEM_EXPR (mem);
1526 
1527       store_info = (store_info_t) pool_alloc (rtx_store_info_pool);
1528       set_usage_bits (group, offset, width, expr);
1529 
1530       if (dump_file && (dump_flags & TDF_DETAILS))
1531 	fprintf (dump_file, " processing const base store gid=%d[%d..%d)\n",
1532 		 group_id, (int)offset, (int)(offset+width));
1533     }
1534   else
1535     {
1536       if (may_be_sp_based_p (XEXP (mem, 0)))
1537 	insn_info->stack_pointer_based = true;
1538       insn_info->contains_cselib_groups = true;
1539 
1540       store_info = (store_info_t) pool_alloc (cse_store_info_pool);
1541       group_id = -1;
1542 
1543       if (dump_file && (dump_flags & TDF_DETAILS))
1544 	fprintf (dump_file, " processing cselib store [%d..%d)\n",
1545 		 (int)offset, (int)(offset+width));
1546     }
1547 
1548   const_rhs = rhs = NULL_RTX;
1549   if (GET_CODE (body) == SET
1550       /* No place to keep the value after ra.  */
1551       && !reload_completed
1552       && (REG_P (SET_SRC (body))
1553 	  || GET_CODE (SET_SRC (body)) == SUBREG
1554 	  || CONSTANT_P (SET_SRC (body)))
1555       && !MEM_VOLATILE_P (mem)
1556       /* Sometimes the store and reload is used for truncation and
1557 	 rounding.  */
1558       && !(FLOAT_MODE_P (GET_MODE (mem)) && (flag_float_store)))
1559     {
1560       rhs = SET_SRC (body);
1561       if (CONSTANT_P (rhs))
1562 	const_rhs = rhs;
1563       else if (body == PATTERN (insn_info->insn))
1564 	{
1565 	  rtx tem = find_reg_note (insn_info->insn, REG_EQUAL, NULL_RTX);
1566 	  if (tem && CONSTANT_P (XEXP (tem, 0)))
1567 	    const_rhs = XEXP (tem, 0);
1568 	}
1569       if (const_rhs == NULL_RTX && REG_P (rhs))
1570 	{
1571 	  rtx tem = cselib_expand_value_rtx (rhs, scratch, 5);
1572 
1573 	  if (tem && CONSTANT_P (tem))
1574 	    const_rhs = tem;
1575 	}
1576     }
1577 
1578   /* Check to see if this stores causes some other stores to be
1579      dead.  */
1580   ptr = active_local_stores;
1581   last = NULL;
1582   redundant_reason = NULL;
1583   mem = canon_rtx (mem);
1584   /* For alias_set != 0 canon_true_dependence should be never called.  */
1585   if (spill_alias_set)
1586     mem_addr = NULL_RTX;
1587   else
1588     {
1589       if (group_id < 0)
1590 	mem_addr = base->val_rtx;
1591       else
1592 	{
1593 	  group_info_t group
1594 	    = rtx_group_vec[group_id];
1595 	  mem_addr = group->canon_base_addr;
1596 	}
1597       /* get_addr can only handle VALUE but cannot handle expr like:
1598 	 VALUE + OFFSET, so call get_addr to get original addr for
1599 	 mem_addr before plus_constant.  */
1600       mem_addr = get_addr (mem_addr);
1601       if (offset)
1602 	mem_addr = plus_constant (get_address_mode (mem), mem_addr, offset);
1603     }
1604 
1605   while (ptr)
1606     {
1607       insn_info_t next = ptr->next_local_store;
1608       store_info_t s_info = ptr->store_rec;
1609       bool del = true;
1610 
1611       /* Skip the clobbers. We delete the active insn if this insn
1612 	 shadows the set.  To have been put on the active list, it
1613 	 has exactly on set. */
1614       while (!s_info->is_set)
1615 	s_info = s_info->next;
1616 
1617       if (s_info->alias_set != spill_alias_set)
1618 	del = false;
1619       else if (s_info->alias_set)
1620 	{
1621 	  struct clear_alias_mode_holder *entry
1622 	    = clear_alias_set_lookup (s_info->alias_set);
1623 	  /* Generally, spills cannot be processed if and of the
1624 	     references to the slot have a different mode.  But if
1625 	     we are in the same block and mode is exactly the same
1626 	     between this store and one before in the same block,
1627 	     we can still delete it.  */
1628 	  if ((GET_MODE (mem) == GET_MODE (s_info->mem))
1629 	      && (GET_MODE (mem) == entry->mode))
1630 	    {
1631 	      del = true;
1632 	      set_all_positions_unneeded (s_info);
1633 	    }
1634 	  if (dump_file && (dump_flags & TDF_DETAILS))
1635 	    fprintf (dump_file, "    trying spill store in insn=%d alias_set=%d\n",
1636 		     INSN_UID (ptr->insn), (int) s_info->alias_set);
1637 	}
1638       else if ((s_info->group_id == group_id)
1639 	       && (s_info->cse_base == base))
1640 	{
1641 	  HOST_WIDE_INT i;
1642 	  if (dump_file && (dump_flags & TDF_DETAILS))
1643 	    fprintf (dump_file, "    trying store in insn=%d gid=%d[%d..%d)\n",
1644 		     INSN_UID (ptr->insn), s_info->group_id,
1645 		     (int)s_info->begin, (int)s_info->end);
1646 
1647 	  /* Even if PTR won't be eliminated as unneeded, if both
1648 	     PTR and this insn store the same constant value, we might
1649 	     eliminate this insn instead.  */
1650 	  if (s_info->const_rhs
1651 	      && const_rhs
1652 	      && offset >= s_info->begin
1653 	      && offset + width <= s_info->end
1654 	      && all_positions_needed_p (s_info, offset - s_info->begin,
1655 					 width))
1656 	    {
1657 	      if (GET_MODE (mem) == BLKmode)
1658 		{
1659 		  if (GET_MODE (s_info->mem) == BLKmode
1660 		      && s_info->const_rhs == const_rhs)
1661 		    redundant_reason = ptr;
1662 		}
1663 	      else if (s_info->const_rhs == const0_rtx
1664 		       && const_rhs == const0_rtx)
1665 		redundant_reason = ptr;
1666 	      else
1667 		{
1668 		  rtx val;
1669 		  start_sequence ();
1670 		  val = get_stored_val (s_info, GET_MODE (mem),
1671 					offset, offset + width,
1672 					BLOCK_FOR_INSN (insn_info->insn),
1673 					true);
1674 		  if (get_insns () != NULL)
1675 		    val = NULL_RTX;
1676 		  end_sequence ();
1677 		  if (val && rtx_equal_p (val, const_rhs))
1678 		    redundant_reason = ptr;
1679 		}
1680 	    }
1681 
1682 	  for (i = MAX (offset, s_info->begin);
1683 	       i < offset + width && i < s_info->end;
1684 	       i++)
1685 	    set_position_unneeded (s_info, i - s_info->begin);
1686 	}
1687       else if (s_info->rhs)
1688 	/* Need to see if it is possible for this store to overwrite
1689 	   the value of store_info.  If it is, set the rhs to NULL to
1690 	   keep it from being used to remove a load.  */
1691 	{
1692 	  if (canon_true_dependence (s_info->mem,
1693 				     GET_MODE (s_info->mem),
1694 				     s_info->mem_addr,
1695 				     mem, mem_addr))
1696 	    {
1697 	      s_info->rhs = NULL;
1698 	      s_info->const_rhs = NULL;
1699 	    }
1700 	}
1701 
1702       /* An insn can be deleted if every position of every one of
1703 	 its s_infos is zero.  */
1704       if (any_positions_needed_p (s_info))
1705 	del = false;
1706 
1707       if (del)
1708 	{
1709 	  insn_info_t insn_to_delete = ptr;
1710 
1711 	  active_local_stores_len--;
1712 	  if (last)
1713 	    last->next_local_store = ptr->next_local_store;
1714 	  else
1715 	    active_local_stores = ptr->next_local_store;
1716 
1717 	  if (!insn_to_delete->cannot_delete)
1718 	    delete_dead_store_insn (insn_to_delete);
1719 	}
1720       else
1721 	last = ptr;
1722 
1723       ptr = next;
1724     }
1725 
1726   /* Finish filling in the store_info.  */
1727   store_info->next = insn_info->store_rec;
1728   insn_info->store_rec = store_info;
1729   store_info->mem = mem;
1730   store_info->alias_set = spill_alias_set;
1731   store_info->mem_addr = mem_addr;
1732   store_info->cse_base = base;
1733   if (width > HOST_BITS_PER_WIDE_INT)
1734     {
1735       store_info->is_large = true;
1736       store_info->positions_needed.large.count = 0;
1737       store_info->positions_needed.large.bmap = BITMAP_ALLOC (&dse_bitmap_obstack);
1738     }
1739   else
1740     {
1741       store_info->is_large = false;
1742       store_info->positions_needed.small_bitmask = lowpart_bitmask (width);
1743     }
1744   store_info->group_id = group_id;
1745   store_info->begin = offset;
1746   store_info->end = offset + width;
1747   store_info->is_set = GET_CODE (body) == SET;
1748   store_info->rhs = rhs;
1749   store_info->const_rhs = const_rhs;
1750   store_info->redundant_reason = redundant_reason;
1751 
1752   /* If this is a clobber, we return 0.  We will only be able to
1753      delete this insn if there is only one store USED store, but we
1754      can use the clobber to delete other stores earlier.  */
1755   return store_info->is_set ? 1 : 0;
1756 }
1757 
1758 
1759 static void
dump_insn_info(const char * start,insn_info_t insn_info)1760 dump_insn_info (const char * start, insn_info_t insn_info)
1761 {
1762   fprintf (dump_file, "%s insn=%d %s\n", start,
1763 	   INSN_UID (insn_info->insn),
1764 	   insn_info->store_rec ? "has store" : "naked");
1765 }
1766 
1767 
1768 /* If the modes are different and the value's source and target do not
1769    line up, we need to extract the value from lower part of the rhs of
1770    the store, shift it, and then put it into a form that can be shoved
1771    into the read_insn.  This function generates a right SHIFT of a
1772    value that is at least ACCESS_SIZE bytes wide of READ_MODE.  The
1773    shift sequence is returned or NULL if we failed to find a
1774    shift.  */
1775 
1776 static rtx
find_shift_sequence(int access_size,store_info_t store_info,enum machine_mode read_mode,int shift,bool speed,bool require_cst)1777 find_shift_sequence (int access_size,
1778 		     store_info_t store_info,
1779 		     enum machine_mode read_mode,
1780 		     int shift, bool speed, bool require_cst)
1781 {
1782   enum machine_mode store_mode = GET_MODE (store_info->mem);
1783   enum machine_mode new_mode;
1784   rtx read_reg = NULL;
1785 
1786   /* Some machines like the x86 have shift insns for each size of
1787      operand.  Other machines like the ppc or the ia-64 may only have
1788      shift insns that shift values within 32 or 64 bit registers.
1789      This loop tries to find the smallest shift insn that will right
1790      justify the value we want to read but is available in one insn on
1791      the machine.  */
1792 
1793   for (new_mode = smallest_mode_for_size (access_size * BITS_PER_UNIT,
1794 					  MODE_INT);
1795        GET_MODE_BITSIZE (new_mode) <= BITS_PER_WORD;
1796        new_mode = GET_MODE_WIDER_MODE (new_mode))
1797     {
1798       rtx target, new_reg, shift_seq, insn, new_lhs;
1799       int cost;
1800 
1801       /* If a constant was stored into memory, try to simplify it here,
1802 	 otherwise the cost of the shift might preclude this optimization
1803 	 e.g. at -Os, even when no actual shift will be needed.  */
1804       if (store_info->const_rhs)
1805 	{
1806 	  unsigned int byte = subreg_lowpart_offset (new_mode, store_mode);
1807 	  rtx ret = simplify_subreg (new_mode, store_info->const_rhs,
1808 				     store_mode, byte);
1809 	  if (ret && CONSTANT_P (ret))
1810 	    {
1811 	      ret = simplify_const_binary_operation (LSHIFTRT, new_mode,
1812 						     ret, GEN_INT (shift));
1813 	      if (ret && CONSTANT_P (ret))
1814 		{
1815 		  byte = subreg_lowpart_offset (read_mode, new_mode);
1816 		  ret = simplify_subreg (read_mode, ret, new_mode, byte);
1817 		  if (ret && CONSTANT_P (ret)
1818 		      && set_src_cost (ret, speed) <= COSTS_N_INSNS (1))
1819 		    return ret;
1820 		}
1821 	    }
1822 	}
1823 
1824       if (require_cst)
1825 	return NULL_RTX;
1826 
1827       /* Try a wider mode if truncating the store mode to NEW_MODE
1828 	 requires a real instruction.  */
1829       if (GET_MODE_BITSIZE (new_mode) < GET_MODE_BITSIZE (store_mode)
1830 	  && !TRULY_NOOP_TRUNCATION_MODES_P (new_mode, store_mode))
1831 	continue;
1832 
1833       /* Also try a wider mode if the necessary punning is either not
1834 	 desirable or not possible.  */
1835       if (!CONSTANT_P (store_info->rhs)
1836 	  && !MODES_TIEABLE_P (new_mode, store_mode))
1837 	continue;
1838 
1839       new_reg = gen_reg_rtx (new_mode);
1840 
1841       start_sequence ();
1842 
1843       /* In theory we could also check for an ashr.  Ian Taylor knows
1844 	 of one dsp where the cost of these two was not the same.  But
1845 	 this really is a rare case anyway.  */
1846       target = expand_binop (new_mode, lshr_optab, new_reg,
1847 			     GEN_INT (shift), new_reg, 1, OPTAB_DIRECT);
1848 
1849       shift_seq = get_insns ();
1850       end_sequence ();
1851 
1852       if (target != new_reg || shift_seq == NULL)
1853 	continue;
1854 
1855       cost = 0;
1856       for (insn = shift_seq; insn != NULL_RTX; insn = NEXT_INSN (insn))
1857 	if (INSN_P (insn))
1858 	  cost += insn_rtx_cost (PATTERN (insn), speed);
1859 
1860       /* The computation up to here is essentially independent
1861 	 of the arguments and could be precomputed.  It may
1862 	 not be worth doing so.  We could precompute if
1863 	 worthwhile or at least cache the results.  The result
1864 	 technically depends on both SHIFT and ACCESS_SIZE,
1865 	 but in practice the answer will depend only on ACCESS_SIZE.  */
1866 
1867       if (cost > COSTS_N_INSNS (1))
1868 	continue;
1869 
1870       new_lhs = extract_low_bits (new_mode, store_mode,
1871 				  copy_rtx (store_info->rhs));
1872       if (new_lhs == NULL_RTX)
1873 	continue;
1874 
1875       /* We found an acceptable shift.  Generate a move to
1876 	 take the value from the store and put it into the
1877 	 shift pseudo, then shift it, then generate another
1878 	 move to put in into the target of the read.  */
1879       emit_move_insn (new_reg, new_lhs);
1880       emit_insn (shift_seq);
1881       read_reg = extract_low_bits (read_mode, new_mode, new_reg);
1882       break;
1883     }
1884 
1885   return read_reg;
1886 }
1887 
1888 
1889 /* Call back for note_stores to find the hard regs set or clobbered by
1890    insn.  Data is a bitmap of the hardregs set so far.  */
1891 
1892 static void
look_for_hardregs(rtx x,const_rtx pat ATTRIBUTE_UNUSED,void * data)1893 look_for_hardregs (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
1894 {
1895   bitmap regs_set = (bitmap) data;
1896 
1897   if (REG_P (x)
1898       && HARD_REGISTER_P (x))
1899     {
1900       unsigned int regno = REGNO (x);
1901       bitmap_set_range (regs_set, regno,
1902 			hard_regno_nregs[regno][GET_MODE (x)]);
1903     }
1904 }
1905 
1906 /* Helper function for replace_read and record_store.
1907    Attempt to return a value stored in STORE_INFO, from READ_BEGIN
1908    to one before READ_END bytes read in READ_MODE.  Return NULL
1909    if not successful.  If REQUIRE_CST is true, return always constant.  */
1910 
1911 static rtx
get_stored_val(store_info_t store_info,enum machine_mode read_mode,HOST_WIDE_INT read_begin,HOST_WIDE_INT read_end,basic_block bb,bool require_cst)1912 get_stored_val (store_info_t store_info, enum machine_mode read_mode,
1913 		HOST_WIDE_INT read_begin, HOST_WIDE_INT read_end,
1914 		basic_block bb, bool require_cst)
1915 {
1916   enum machine_mode store_mode = GET_MODE (store_info->mem);
1917   int shift;
1918   int access_size; /* In bytes.  */
1919   rtx read_reg;
1920 
1921   /* To get here the read is within the boundaries of the write so
1922      shift will never be negative.  Start out with the shift being in
1923      bytes.  */
1924   if (store_mode == BLKmode)
1925     shift = 0;
1926   else if (BYTES_BIG_ENDIAN)
1927     shift = store_info->end - read_end;
1928   else
1929     shift = read_begin - store_info->begin;
1930 
1931   access_size = shift + GET_MODE_SIZE (read_mode);
1932 
1933   /* From now on it is bits.  */
1934   shift *= BITS_PER_UNIT;
1935 
1936   if (shift)
1937     read_reg = find_shift_sequence (access_size, store_info, read_mode, shift,
1938     				    optimize_bb_for_speed_p (bb),
1939 				    require_cst);
1940   else if (store_mode == BLKmode)
1941     {
1942       /* The store is a memset (addr, const_val, const_size).  */
1943       gcc_assert (CONST_INT_P (store_info->rhs));
1944       store_mode = int_mode_for_mode (read_mode);
1945       if (store_mode == BLKmode)
1946 	read_reg = NULL_RTX;
1947       else if (store_info->rhs == const0_rtx)
1948 	read_reg = extract_low_bits (read_mode, store_mode, const0_rtx);
1949       else if (GET_MODE_BITSIZE (store_mode) > HOST_BITS_PER_WIDE_INT
1950 	       || BITS_PER_UNIT >= HOST_BITS_PER_WIDE_INT)
1951 	read_reg = NULL_RTX;
1952       else
1953 	{
1954 	  unsigned HOST_WIDE_INT c
1955 	    = INTVAL (store_info->rhs)
1956 	      & (((HOST_WIDE_INT) 1 << BITS_PER_UNIT) - 1);
1957 	  int shift = BITS_PER_UNIT;
1958 	  while (shift < HOST_BITS_PER_WIDE_INT)
1959 	    {
1960 	      c |= (c << shift);
1961 	      shift <<= 1;
1962 	    }
1963 	  read_reg = gen_int_mode (c, store_mode);
1964 	  read_reg = extract_low_bits (read_mode, store_mode, read_reg);
1965 	}
1966     }
1967   else if (store_info->const_rhs
1968 	   && (require_cst
1969 	       || GET_MODE_CLASS (read_mode) != GET_MODE_CLASS (store_mode)))
1970     read_reg = extract_low_bits (read_mode, store_mode,
1971 				 copy_rtx (store_info->const_rhs));
1972   else
1973     read_reg = extract_low_bits (read_mode, store_mode,
1974 				 copy_rtx (store_info->rhs));
1975   if (require_cst && read_reg && !CONSTANT_P (read_reg))
1976     read_reg = NULL_RTX;
1977   return read_reg;
1978 }
1979 
1980 /* Take a sequence of:
1981      A <- r1
1982      ...
1983      ... <- A
1984 
1985    and change it into
1986    r2 <- r1
1987    A <- r1
1988    ...
1989    ... <- r2
1990 
1991    or
1992 
1993    r3 <- extract (r1)
1994    r3 <- r3 >> shift
1995    r2 <- extract (r3)
1996    ... <- r2
1997 
1998    or
1999 
2000    r2 <- extract (r1)
2001    ... <- r2
2002 
2003    Depending on the alignment and the mode of the store and
2004    subsequent load.
2005 
2006 
2007    The STORE_INFO and STORE_INSN are for the store and READ_INFO
2008    and READ_INSN are for the read.  Return true if the replacement
2009    went ok.  */
2010 
2011 static bool
replace_read(store_info_t store_info,insn_info_t store_insn,read_info_t read_info,insn_info_t read_insn,rtx * loc,bitmap regs_live)2012 replace_read (store_info_t store_info, insn_info_t store_insn,
2013 	      read_info_t read_info, insn_info_t read_insn, rtx *loc,
2014 	      bitmap regs_live)
2015 {
2016   enum machine_mode store_mode = GET_MODE (store_info->mem);
2017   enum machine_mode read_mode = GET_MODE (read_info->mem);
2018   rtx insns, this_insn, read_reg;
2019   basic_block bb;
2020 
2021   if (!dbg_cnt (dse))
2022     return false;
2023 
2024   /* Create a sequence of instructions to set up the read register.
2025      This sequence goes immediately before the store and its result
2026      is read by the load.
2027 
2028      We need to keep this in perspective.  We are replacing a read
2029      with a sequence of insns, but the read will almost certainly be
2030      in cache, so it is not going to be an expensive one.  Thus, we
2031      are not willing to do a multi insn shift or worse a subroutine
2032      call to get rid of the read.  */
2033   if (dump_file && (dump_flags & TDF_DETAILS))
2034     fprintf (dump_file, "trying to replace %smode load in insn %d"
2035 	     " from %smode store in insn %d\n",
2036 	     GET_MODE_NAME (read_mode), INSN_UID (read_insn->insn),
2037 	     GET_MODE_NAME (store_mode), INSN_UID (store_insn->insn));
2038   start_sequence ();
2039   bb = BLOCK_FOR_INSN (read_insn->insn);
2040   read_reg = get_stored_val (store_info,
2041 			     read_mode, read_info->begin, read_info->end,
2042 			     bb, false);
2043   if (read_reg == NULL_RTX)
2044     {
2045       end_sequence ();
2046       if (dump_file && (dump_flags & TDF_DETAILS))
2047 	fprintf (dump_file, " -- could not extract bits of stored value\n");
2048       return false;
2049     }
2050   /* Force the value into a new register so that it won't be clobbered
2051      between the store and the load.  */
2052   read_reg = copy_to_mode_reg (read_mode, read_reg);
2053   insns = get_insns ();
2054   end_sequence ();
2055 
2056   if (insns != NULL_RTX)
2057     {
2058       /* Now we have to scan the set of new instructions to see if the
2059 	 sequence contains and sets of hardregs that happened to be
2060 	 live at this point.  For instance, this can happen if one of
2061 	 the insns sets the CC and the CC happened to be live at that
2062 	 point.  This does occasionally happen, see PR 37922.  */
2063       bitmap regs_set = BITMAP_ALLOC (&reg_obstack);
2064 
2065       for (this_insn = insns; this_insn != NULL_RTX; this_insn = NEXT_INSN (this_insn))
2066 	note_stores (PATTERN (this_insn), look_for_hardregs, regs_set);
2067 
2068       bitmap_and_into (regs_set, regs_live);
2069       if (!bitmap_empty_p (regs_set))
2070 	{
2071 	  if (dump_file && (dump_flags & TDF_DETAILS))
2072 	    {
2073 	      fprintf (dump_file,
2074 		       "abandoning replacement because sequence clobbers live hardregs:");
2075 	      df_print_regset (dump_file, regs_set);
2076 	    }
2077 
2078 	  BITMAP_FREE (regs_set);
2079 	  return false;
2080 	}
2081       BITMAP_FREE (regs_set);
2082     }
2083 
2084   if (validate_change (read_insn->insn, loc, read_reg, 0))
2085     {
2086       deferred_change_t deferred_change =
2087 	(deferred_change_t) pool_alloc (deferred_change_pool);
2088 
2089       /* Insert this right before the store insn where it will be safe
2090 	 from later insns that might change it before the read.  */
2091       emit_insn_before (insns, store_insn->insn);
2092 
2093       /* And now for the kludge part: cselib croaks if you just
2094 	 return at this point.  There are two reasons for this:
2095 
2096 	 1) Cselib has an idea of how many pseudos there are and
2097 	 that does not include the new ones we just added.
2098 
2099 	 2) Cselib does not know about the move insn we added
2100 	 above the store_info, and there is no way to tell it
2101 	 about it, because it has "moved on".
2102 
2103 	 Problem (1) is fixable with a certain amount of engineering.
2104 	 Problem (2) is requires starting the bb from scratch.  This
2105 	 could be expensive.
2106 
2107 	 So we are just going to have to lie.  The move/extraction
2108 	 insns are not really an issue, cselib did not see them.  But
2109 	 the use of the new pseudo read_insn is a real problem because
2110 	 cselib has not scanned this insn.  The way that we solve this
2111 	 problem is that we are just going to put the mem back for now
2112 	 and when we are finished with the block, we undo this.  We
2113 	 keep a table of mems to get rid of.  At the end of the basic
2114 	 block we can put them back.  */
2115 
2116       *loc = read_info->mem;
2117       deferred_change->next = deferred_change_list;
2118       deferred_change_list = deferred_change;
2119       deferred_change->loc = loc;
2120       deferred_change->reg = read_reg;
2121 
2122       /* Get rid of the read_info, from the point of view of the
2123 	 rest of dse, play like this read never happened.  */
2124       read_insn->read_rec = read_info->next;
2125       pool_free (read_info_pool, read_info);
2126       if (dump_file && (dump_flags & TDF_DETAILS))
2127 	{
2128 	  fprintf (dump_file, " -- replaced the loaded MEM with ");
2129 	  print_simple_rtl (dump_file, read_reg);
2130 	  fprintf (dump_file, "\n");
2131 	}
2132       return true;
2133     }
2134   else
2135     {
2136       if (dump_file && (dump_flags & TDF_DETAILS))
2137 	{
2138 	  fprintf (dump_file, " -- replacing the loaded MEM with ");
2139 	  print_simple_rtl (dump_file, read_reg);
2140 	  fprintf (dump_file, " led to an invalid instruction\n");
2141 	}
2142       return false;
2143     }
2144 }
2145 
2146 /* A for_each_rtx callback in which DATA is the bb_info.  Check to see
2147    if LOC is a mem and if it is look at the address and kill any
2148    appropriate stores that may be active.  */
2149 
2150 static int
check_mem_read_rtx(rtx * loc,void * data)2151 check_mem_read_rtx (rtx *loc, void *data)
2152 {
2153   rtx mem = *loc, mem_addr;
2154   bb_info_t bb_info;
2155   insn_info_t insn_info;
2156   HOST_WIDE_INT offset = 0;
2157   HOST_WIDE_INT width = 0;
2158   alias_set_type spill_alias_set = 0;
2159   cselib_val *base = NULL;
2160   int group_id;
2161   read_info_t read_info;
2162 
2163   if (!mem || !MEM_P (mem))
2164     return 0;
2165 
2166   bb_info = (bb_info_t) data;
2167   insn_info = bb_info->last_insn;
2168 
2169   if ((MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2170       || (MEM_VOLATILE_P (mem)))
2171     {
2172       if (dump_file && (dump_flags & TDF_DETAILS))
2173 	fprintf (dump_file, " adding wild read, volatile or barrier.\n");
2174       add_wild_read (bb_info);
2175       insn_info->cannot_delete = true;
2176       return 0;
2177     }
2178 
2179   /* If it is reading readonly mem, then there can be no conflict with
2180      another write. */
2181   if (MEM_READONLY_P (mem))
2182     return 0;
2183 
2184   if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
2185     {
2186       if (dump_file && (dump_flags & TDF_DETAILS))
2187 	fprintf (dump_file, " adding wild read, canon_address failure.\n");
2188       add_wild_read (bb_info);
2189       return 0;
2190     }
2191 
2192   if (GET_MODE (mem) == BLKmode)
2193     width = -1;
2194   else
2195     width = GET_MODE_SIZE (GET_MODE (mem));
2196 
2197   read_info = (read_info_t) pool_alloc (read_info_pool);
2198   read_info->group_id = group_id;
2199   read_info->mem = mem;
2200   read_info->alias_set = spill_alias_set;
2201   read_info->begin = offset;
2202   read_info->end = offset + width;
2203   read_info->next = insn_info->read_rec;
2204   insn_info->read_rec = read_info;
2205   /* For alias_set != 0 canon_true_dependence should be never called.  */
2206   if (spill_alias_set)
2207     mem_addr = NULL_RTX;
2208   else
2209     {
2210       if (group_id < 0)
2211 	mem_addr = base->val_rtx;
2212       else
2213 	{
2214 	  group_info_t group
2215 	    = rtx_group_vec[group_id];
2216 	  mem_addr = group->canon_base_addr;
2217 	}
2218       /* get_addr can only handle VALUE but cannot handle expr like:
2219 	 VALUE + OFFSET, so call get_addr to get original addr for
2220 	 mem_addr before plus_constant.  */
2221       mem_addr = get_addr (mem_addr);
2222       if (offset)
2223 	mem_addr = plus_constant (get_address_mode (mem), mem_addr, offset);
2224     }
2225 
2226   /* We ignore the clobbers in store_info.  The is mildly aggressive,
2227      but there really should not be a clobber followed by a read.  */
2228 
2229   if (spill_alias_set)
2230     {
2231       insn_info_t i_ptr = active_local_stores;
2232       insn_info_t last = NULL;
2233 
2234       if (dump_file && (dump_flags & TDF_DETAILS))
2235 	fprintf (dump_file, " processing spill load %d\n",
2236 		 (int) spill_alias_set);
2237 
2238       while (i_ptr)
2239 	{
2240 	  store_info_t store_info = i_ptr->store_rec;
2241 
2242 	  /* Skip the clobbers.  */
2243 	  while (!store_info->is_set)
2244 	    store_info = store_info->next;
2245 
2246 	  if (store_info->alias_set == spill_alias_set)
2247 	    {
2248 	      if (dump_file && (dump_flags & TDF_DETAILS))
2249 		dump_insn_info ("removing from active", i_ptr);
2250 
2251 	      active_local_stores_len--;
2252 	      if (last)
2253 		last->next_local_store = i_ptr->next_local_store;
2254 	      else
2255 		active_local_stores = i_ptr->next_local_store;
2256 	    }
2257 	  else
2258 	    last = i_ptr;
2259 	  i_ptr = i_ptr->next_local_store;
2260 	}
2261     }
2262   else if (group_id >= 0)
2263     {
2264       /* This is the restricted case where the base is a constant or
2265 	 the frame pointer and offset is a constant.  */
2266       insn_info_t i_ptr = active_local_stores;
2267       insn_info_t last = NULL;
2268 
2269       if (dump_file && (dump_flags & TDF_DETAILS))
2270 	{
2271 	  if (width == -1)
2272 	    fprintf (dump_file, " processing const load gid=%d[BLK]\n",
2273 		     group_id);
2274 	  else
2275 	    fprintf (dump_file, " processing const load gid=%d[%d..%d)\n",
2276 		     group_id, (int)offset, (int)(offset+width));
2277 	}
2278 
2279       while (i_ptr)
2280 	{
2281 	  bool remove = false;
2282 	  store_info_t store_info = i_ptr->store_rec;
2283 
2284 	  /* Skip the clobbers.  */
2285 	  while (!store_info->is_set)
2286 	    store_info = store_info->next;
2287 
2288 	  /* There are three cases here.  */
2289 	  if (store_info->group_id < 0)
2290 	    /* We have a cselib store followed by a read from a
2291 	       const base. */
2292 	    remove
2293 	      = canon_true_dependence (store_info->mem,
2294 				       GET_MODE (store_info->mem),
2295 				       store_info->mem_addr,
2296 				       mem, mem_addr);
2297 
2298 	  else if (group_id == store_info->group_id)
2299 	    {
2300 	      /* This is a block mode load.  We may get lucky and
2301 		 canon_true_dependence may save the day.  */
2302 	      if (width == -1)
2303 		remove
2304 		  = canon_true_dependence (store_info->mem,
2305 					   GET_MODE (store_info->mem),
2306 					   store_info->mem_addr,
2307 					   mem, mem_addr);
2308 
2309 	      /* If this read is just reading back something that we just
2310 		 stored, rewrite the read.  */
2311 	      else
2312 		{
2313 		  if (store_info->rhs
2314 		      && offset >= store_info->begin
2315 		      && offset + width <= store_info->end
2316 		      && all_positions_needed_p (store_info,
2317 						 offset - store_info->begin,
2318 						 width)
2319 		      && replace_read (store_info, i_ptr, read_info,
2320 				       insn_info, loc, bb_info->regs_live))
2321 		    return 0;
2322 
2323 		  /* The bases are the same, just see if the offsets
2324 		     overlap.  */
2325 		  if ((offset < store_info->end)
2326 		      && (offset + width > store_info->begin))
2327 		    remove = true;
2328 		}
2329 	    }
2330 
2331 	  /* else
2332 	     The else case that is missing here is that the
2333 	     bases are constant but different.  There is nothing
2334 	     to do here because there is no overlap.  */
2335 
2336 	  if (remove)
2337 	    {
2338 	      if (dump_file && (dump_flags & TDF_DETAILS))
2339 		dump_insn_info ("removing from active", i_ptr);
2340 
2341 	      active_local_stores_len--;
2342 	      if (last)
2343 		last->next_local_store = i_ptr->next_local_store;
2344 	      else
2345 		active_local_stores = i_ptr->next_local_store;
2346 	    }
2347 	  else
2348 	    last = i_ptr;
2349 	  i_ptr = i_ptr->next_local_store;
2350 	}
2351     }
2352   else
2353     {
2354       insn_info_t i_ptr = active_local_stores;
2355       insn_info_t last = NULL;
2356       if (dump_file && (dump_flags & TDF_DETAILS))
2357 	{
2358 	  fprintf (dump_file, " processing cselib load mem:");
2359 	  print_inline_rtx (dump_file, mem, 0);
2360 	  fprintf (dump_file, "\n");
2361 	}
2362 
2363       while (i_ptr)
2364 	{
2365 	  bool remove = false;
2366 	  store_info_t store_info = i_ptr->store_rec;
2367 
2368 	  if (dump_file && (dump_flags & TDF_DETAILS))
2369 	    fprintf (dump_file, " processing cselib load against insn %d\n",
2370 		     INSN_UID (i_ptr->insn));
2371 
2372 	  /* Skip the clobbers.  */
2373 	  while (!store_info->is_set)
2374 	    store_info = store_info->next;
2375 
2376 	  /* If this read is just reading back something that we just
2377 	     stored, rewrite the read.  */
2378 	  if (store_info->rhs
2379 	      && store_info->group_id == -1
2380 	      && store_info->cse_base == base
2381 	      && width != -1
2382 	      && offset >= store_info->begin
2383 	      && offset + width <= store_info->end
2384 	      && all_positions_needed_p (store_info,
2385 					 offset - store_info->begin, width)
2386 	      && replace_read (store_info, i_ptr,  read_info, insn_info, loc,
2387 			       bb_info->regs_live))
2388 	    return 0;
2389 
2390 	  if (!store_info->alias_set)
2391 	    remove = canon_true_dependence (store_info->mem,
2392 					    GET_MODE (store_info->mem),
2393 					    store_info->mem_addr,
2394 					    mem, mem_addr);
2395 
2396 	  if (remove)
2397 	    {
2398 	      if (dump_file && (dump_flags & TDF_DETAILS))
2399 		dump_insn_info ("removing from active", i_ptr);
2400 
2401 	      active_local_stores_len--;
2402 	      if (last)
2403 		last->next_local_store = i_ptr->next_local_store;
2404 	      else
2405 		active_local_stores = i_ptr->next_local_store;
2406 	    }
2407 	  else
2408 	    last = i_ptr;
2409 	  i_ptr = i_ptr->next_local_store;
2410 	}
2411     }
2412   return 0;
2413 }
2414 
2415 /* A for_each_rtx callback in which DATA points the INSN_INFO for
2416    as check_mem_read_rtx.  Nullify the pointer if i_m_r_m_r returns
2417    true for any part of *LOC.  */
2418 
2419 static void
check_mem_read_use(rtx * loc,void * data)2420 check_mem_read_use (rtx *loc, void *data)
2421 {
2422   for_each_rtx (loc, check_mem_read_rtx, data);
2423 }
2424 
2425 
2426 /* Get arguments passed to CALL_INSN.  Return TRUE if successful.
2427    So far it only handles arguments passed in registers.  */
2428 
2429 static bool
get_call_args(rtx call_insn,tree fn,rtx * args,int nargs)2430 get_call_args (rtx call_insn, tree fn, rtx *args, int nargs)
2431 {
2432   CUMULATIVE_ARGS args_so_far_v;
2433   cumulative_args_t args_so_far;
2434   tree arg;
2435   int idx;
2436 
2437   INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
2438   args_so_far = pack_cumulative_args (&args_so_far_v);
2439 
2440   arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
2441   for (idx = 0;
2442        arg != void_list_node && idx < nargs;
2443        arg = TREE_CHAIN (arg), idx++)
2444     {
2445       enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
2446       rtx reg, link, tmp;
2447       reg = targetm.calls.function_arg (args_so_far, mode, NULL_TREE, true);
2448       if (!reg || !REG_P (reg) || GET_MODE (reg) != mode
2449 	  || GET_MODE_CLASS (mode) != MODE_INT)
2450 	return false;
2451 
2452       for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
2453 	   link;
2454 	   link = XEXP (link, 1))
2455 	if (GET_CODE (XEXP (link, 0)) == USE)
2456 	  {
2457 	    args[idx] = XEXP (XEXP (link, 0), 0);
2458 	    if (REG_P (args[idx])
2459 		&& REGNO (args[idx]) == REGNO (reg)
2460 		&& (GET_MODE (args[idx]) == mode
2461 		    || (GET_MODE_CLASS (GET_MODE (args[idx])) == MODE_INT
2462 			&& (GET_MODE_SIZE (GET_MODE (args[idx]))
2463 			    <= UNITS_PER_WORD)
2464 			&& (GET_MODE_SIZE (GET_MODE (args[idx]))
2465 			    > GET_MODE_SIZE (mode)))))
2466 	      break;
2467 	  }
2468       if (!link)
2469 	return false;
2470 
2471       tmp = cselib_expand_value_rtx (args[idx], scratch, 5);
2472       if (GET_MODE (args[idx]) != mode)
2473 	{
2474 	  if (!tmp || !CONST_INT_P (tmp))
2475 	    return false;
2476 	  tmp = gen_int_mode (INTVAL (tmp), mode);
2477 	}
2478       if (tmp)
2479 	args[idx] = tmp;
2480 
2481       targetm.calls.function_arg_advance (args_so_far, mode, NULL_TREE, true);
2482     }
2483   if (arg != void_list_node || idx != nargs)
2484     return false;
2485   return true;
2486 }
2487 
2488 /* Return a bitmap of the fixed registers contained in IN.  */
2489 
2490 static bitmap
copy_fixed_regs(const_bitmap in)2491 copy_fixed_regs (const_bitmap in)
2492 {
2493   bitmap ret;
2494 
2495   ret = ALLOC_REG_SET (NULL);
2496   bitmap_and (ret, in, fixed_reg_set_regset);
2497   return ret;
2498 }
2499 
2500 /* Apply record_store to all candidate stores in INSN.  Mark INSN
2501    if some part of it is not a candidate store and assigns to a
2502    non-register target.  */
2503 
2504 static void
scan_insn(bb_info_t bb_info,rtx insn)2505 scan_insn (bb_info_t bb_info, rtx insn)
2506 {
2507   rtx body;
2508   insn_info_t insn_info = (insn_info_t) pool_alloc (insn_info_pool);
2509   int mems_found = 0;
2510   memset (insn_info, 0, sizeof (struct insn_info));
2511 
2512   if (dump_file && (dump_flags & TDF_DETAILS))
2513     fprintf (dump_file, "\n**scanning insn=%d\n",
2514 	     INSN_UID (insn));
2515 
2516   insn_info->prev_insn = bb_info->last_insn;
2517   insn_info->insn = insn;
2518   bb_info->last_insn = insn_info;
2519 
2520   if (DEBUG_INSN_P (insn))
2521     {
2522       insn_info->cannot_delete = true;
2523       return;
2524     }
2525 
2526   /* Cselib clears the table for this case, so we have to essentially
2527      do the same.  */
2528   if (NONJUMP_INSN_P (insn)
2529       && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2530       && MEM_VOLATILE_P (PATTERN (insn)))
2531     {
2532       add_wild_read (bb_info);
2533       insn_info->cannot_delete = true;
2534       return;
2535     }
2536 
2537   /* Look at all of the uses in the insn.  */
2538   note_uses (&PATTERN (insn), check_mem_read_use, bb_info);
2539 
2540   if (CALL_P (insn))
2541     {
2542       bool const_call;
2543       tree memset_call = NULL_TREE;
2544 
2545       insn_info->cannot_delete = true;
2546 
2547       /* Const functions cannot do anything bad i.e. read memory,
2548 	 however, they can read their parameters which may have
2549 	 been pushed onto the stack.
2550 	 memset and bzero don't read memory either.  */
2551       const_call = RTL_CONST_CALL_P (insn);
2552       if (!const_call)
2553 	{
2554 	  rtx call = get_call_rtx_from (insn);
2555 	  if (call && GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
2556 	    {
2557 	      rtx symbol = XEXP (XEXP (call, 0), 0);
2558 	      if (SYMBOL_REF_DECL (symbol)
2559 		  && TREE_CODE (SYMBOL_REF_DECL (symbol)) == FUNCTION_DECL)
2560 		{
2561 		  if ((DECL_BUILT_IN_CLASS (SYMBOL_REF_DECL (symbol))
2562 		       == BUILT_IN_NORMAL
2563 		       && (DECL_FUNCTION_CODE (SYMBOL_REF_DECL (symbol))
2564 			   == BUILT_IN_MEMSET))
2565 		      || SYMBOL_REF_DECL (symbol) == block_clear_fn)
2566 		    memset_call = SYMBOL_REF_DECL (symbol);
2567 		}
2568 	    }
2569 	}
2570       if (const_call || memset_call)
2571 	{
2572 	  insn_info_t i_ptr = active_local_stores;
2573 	  insn_info_t last = NULL;
2574 
2575 	  if (dump_file && (dump_flags & TDF_DETAILS))
2576 	    fprintf (dump_file, "%s call %d\n",
2577 		     const_call ? "const" : "memset", INSN_UID (insn));
2578 
2579 	  /* See the head comment of the frame_read field.  */
2580 	  if (reload_completed)
2581 	    insn_info->frame_read = true;
2582 
2583 	  /* Loop over the active stores and remove those which are
2584 	     killed by the const function call.  */
2585 	  while (i_ptr)
2586 	    {
2587 	      bool remove_store = false;
2588 
2589 	      /* The stack pointer based stores are always killed.  */
2590 	      if (i_ptr->stack_pointer_based)
2591 	        remove_store = true;
2592 
2593 	      /* If the frame is read, the frame related stores are killed.  */
2594 	      else if (insn_info->frame_read)
2595 		{
2596 		  store_info_t store_info = i_ptr->store_rec;
2597 
2598 		  /* Skip the clobbers.  */
2599 		  while (!store_info->is_set)
2600 		    store_info = store_info->next;
2601 
2602 		  if (store_info->group_id >= 0
2603 		      && rtx_group_vec[store_info->group_id]->frame_related)
2604 		    remove_store = true;
2605 		}
2606 
2607 	      if (remove_store)
2608 		{
2609 		  if (dump_file && (dump_flags & TDF_DETAILS))
2610 		    dump_insn_info ("removing from active", i_ptr);
2611 
2612 		  active_local_stores_len--;
2613 		  if (last)
2614 		    last->next_local_store = i_ptr->next_local_store;
2615 		  else
2616 		    active_local_stores = i_ptr->next_local_store;
2617 		}
2618 	      else
2619 		last = i_ptr;
2620 
2621 	      i_ptr = i_ptr->next_local_store;
2622 	    }
2623 
2624 	  if (memset_call)
2625 	    {
2626 	      rtx args[3];
2627 	      if (get_call_args (insn, memset_call, args, 3)
2628 		  && CONST_INT_P (args[1])
2629 		  && CONST_INT_P (args[2])
2630 		  && INTVAL (args[2]) > 0)
2631 		{
2632 		  rtx mem = gen_rtx_MEM (BLKmode, args[0]);
2633 		  set_mem_size (mem, INTVAL (args[2]));
2634 		  body = gen_rtx_SET (VOIDmode, mem, args[1]);
2635 		  mems_found += record_store (body, bb_info);
2636 		  if (dump_file && (dump_flags & TDF_DETAILS))
2637 		    fprintf (dump_file, "handling memset as BLKmode store\n");
2638 		  if (mems_found == 1)
2639 		    {
2640 		      if (active_local_stores_len++
2641 			  >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES))
2642 			{
2643 			  active_local_stores_len = 1;
2644 			  active_local_stores = NULL;
2645 			}
2646 		      insn_info->fixed_regs_live
2647 			= copy_fixed_regs (bb_info->regs_live);
2648 		      insn_info->next_local_store = active_local_stores;
2649 		      active_local_stores = insn_info;
2650 		    }
2651 		}
2652 	    }
2653 	}
2654 
2655       else
2656 	/* Every other call, including pure functions, may read any memory
2657            that is not relative to the frame.  */
2658         add_non_frame_wild_read (bb_info);
2659 
2660       return;
2661     }
2662 
2663   /* Assuming that there are sets in these insns, we cannot delete
2664      them.  */
2665   if ((GET_CODE (PATTERN (insn)) == CLOBBER)
2666       || volatile_refs_p (PATTERN (insn))
2667       || (!cfun->can_delete_dead_exceptions && !insn_nothrow_p (insn))
2668       || (RTX_FRAME_RELATED_P (insn))
2669       || find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX))
2670     insn_info->cannot_delete = true;
2671 
2672   body = PATTERN (insn);
2673   if (GET_CODE (body) == PARALLEL)
2674     {
2675       int i;
2676       for (i = 0; i < XVECLEN (body, 0); i++)
2677 	mems_found += record_store (XVECEXP (body, 0, i), bb_info);
2678     }
2679   else
2680     mems_found += record_store (body, bb_info);
2681 
2682   if (dump_file && (dump_flags & TDF_DETAILS))
2683     fprintf (dump_file, "mems_found = %d, cannot_delete = %s\n",
2684 	     mems_found, insn_info->cannot_delete ? "true" : "false");
2685 
2686   /* If we found some sets of mems, add it into the active_local_stores so
2687      that it can be locally deleted if found dead or used for
2688      replace_read and redundant constant store elimination.  Otherwise mark
2689      it as cannot delete.  This simplifies the processing later.  */
2690   if (mems_found == 1)
2691     {
2692       if (active_local_stores_len++
2693 	  >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES))
2694 	{
2695 	  active_local_stores_len = 1;
2696 	  active_local_stores = NULL;
2697 	}
2698       insn_info->fixed_regs_live = copy_fixed_regs (bb_info->regs_live);
2699       insn_info->next_local_store = active_local_stores;
2700       active_local_stores = insn_info;
2701     }
2702   else
2703     insn_info->cannot_delete = true;
2704 }
2705 
2706 
2707 /* Remove BASE from the set of active_local_stores.  This is a
2708    callback from cselib that is used to get rid of the stores in
2709    active_local_stores.  */
2710 
2711 static void
remove_useless_values(cselib_val * base)2712 remove_useless_values (cselib_val *base)
2713 {
2714   insn_info_t insn_info = active_local_stores;
2715   insn_info_t last = NULL;
2716 
2717   while (insn_info)
2718     {
2719       store_info_t store_info = insn_info->store_rec;
2720       bool del = false;
2721 
2722       /* If ANY of the store_infos match the cselib group that is
2723 	 being deleted, then the insn can not be deleted.  */
2724       while (store_info)
2725 	{
2726 	  if ((store_info->group_id == -1)
2727 	      && (store_info->cse_base == base))
2728 	    {
2729 	      del = true;
2730 	      break;
2731 	    }
2732 	  store_info = store_info->next;
2733 	}
2734 
2735       if (del)
2736 	{
2737 	  active_local_stores_len--;
2738 	  if (last)
2739 	    last->next_local_store = insn_info->next_local_store;
2740 	  else
2741 	    active_local_stores = insn_info->next_local_store;
2742 	  free_store_info (insn_info);
2743 	}
2744       else
2745 	last = insn_info;
2746 
2747       insn_info = insn_info->next_local_store;
2748     }
2749 }
2750 
2751 
2752 /* Do all of step 1.  */
2753 
2754 static void
dse_step1(void)2755 dse_step1 (void)
2756 {
2757   basic_block bb;
2758   bitmap regs_live = BITMAP_ALLOC (&reg_obstack);
2759 
2760   cselib_init (0);
2761   all_blocks = BITMAP_ALLOC (NULL);
2762   bitmap_set_bit (all_blocks, ENTRY_BLOCK);
2763   bitmap_set_bit (all_blocks, EXIT_BLOCK);
2764 
2765   FOR_ALL_BB (bb)
2766     {
2767       insn_info_t ptr;
2768       bb_info_t bb_info = (bb_info_t) pool_alloc (bb_info_pool);
2769 
2770       memset (bb_info, 0, sizeof (struct bb_info));
2771       bitmap_set_bit (all_blocks, bb->index);
2772       bb_info->regs_live = regs_live;
2773 
2774       bitmap_copy (regs_live, DF_LR_IN (bb));
2775       df_simulate_initialize_forwards (bb, regs_live);
2776 
2777       bb_table[bb->index] = bb_info;
2778       cselib_discard_hook = remove_useless_values;
2779 
2780       if (bb->index >= NUM_FIXED_BLOCKS)
2781 	{
2782 	  rtx insn;
2783 
2784 	  cse_store_info_pool
2785 	    = create_alloc_pool ("cse_store_info_pool",
2786 				 sizeof (struct store_info), 100);
2787 	  active_local_stores = NULL;
2788 	  active_local_stores_len = 0;
2789 	  cselib_clear_table ();
2790 
2791 	  /* Scan the insns.  */
2792 	  FOR_BB_INSNS (bb, insn)
2793 	    {
2794 	      if (INSN_P (insn))
2795 		scan_insn (bb_info, insn);
2796 	      cselib_process_insn (insn);
2797 	      if (INSN_P (insn))
2798 		df_simulate_one_insn_forwards (bb, insn, regs_live);
2799 	    }
2800 
2801 	  /* This is something of a hack, because the global algorithm
2802 	     is supposed to take care of the case where stores go dead
2803 	     at the end of the function.  However, the global
2804 	     algorithm must take a more conservative view of block
2805 	     mode reads than the local alg does.  So to get the case
2806 	     where you have a store to the frame followed by a non
2807 	     overlapping block more read, we look at the active local
2808 	     stores at the end of the function and delete all of the
2809 	     frame and spill based ones.  */
2810 	  if (stores_off_frame_dead_at_return
2811 	      && (EDGE_COUNT (bb->succs) == 0
2812 		  || (single_succ_p (bb)
2813 		      && single_succ (bb) == EXIT_BLOCK_PTR
2814 		      && ! crtl->calls_eh_return)))
2815 	    {
2816 	      insn_info_t i_ptr = active_local_stores;
2817 	      while (i_ptr)
2818 		{
2819 		  store_info_t store_info = i_ptr->store_rec;
2820 
2821 		  /* Skip the clobbers.  */
2822 		  while (!store_info->is_set)
2823 		    store_info = store_info->next;
2824 		  if (store_info->alias_set && !i_ptr->cannot_delete)
2825 		    delete_dead_store_insn (i_ptr);
2826 		  else
2827 		    if (store_info->group_id >= 0)
2828 		      {
2829 			group_info_t group
2830 			  = rtx_group_vec[store_info->group_id];
2831 			if (group->frame_related && !i_ptr->cannot_delete)
2832 			  delete_dead_store_insn (i_ptr);
2833 		      }
2834 
2835 		  i_ptr = i_ptr->next_local_store;
2836 		}
2837 	    }
2838 
2839 	  /* Get rid of the loads that were discovered in
2840 	     replace_read.  Cselib is finished with this block.  */
2841 	  while (deferred_change_list)
2842 	    {
2843 	      deferred_change_t next = deferred_change_list->next;
2844 
2845 	      /* There is no reason to validate this change.  That was
2846 		 done earlier.  */
2847 	      *deferred_change_list->loc = deferred_change_list->reg;
2848 	      pool_free (deferred_change_pool, deferred_change_list);
2849 	      deferred_change_list = next;
2850 	    }
2851 
2852 	  /* Get rid of all of the cselib based store_infos in this
2853 	     block and mark the containing insns as not being
2854 	     deletable.  */
2855 	  ptr = bb_info->last_insn;
2856 	  while (ptr)
2857 	    {
2858 	      if (ptr->contains_cselib_groups)
2859 		{
2860 		  store_info_t s_info = ptr->store_rec;
2861 		  while (s_info && !s_info->is_set)
2862 		    s_info = s_info->next;
2863 		  if (s_info
2864 		      && s_info->redundant_reason
2865 		      && s_info->redundant_reason->insn
2866 		      && !ptr->cannot_delete)
2867 		    {
2868 		      if (dump_file && (dump_flags & TDF_DETAILS))
2869 			fprintf (dump_file, "Locally deleting insn %d "
2870 					    "because insn %d stores the "
2871 					    "same value and couldn't be "
2872 					    "eliminated\n",
2873 				 INSN_UID (ptr->insn),
2874 				 INSN_UID (s_info->redundant_reason->insn));
2875 		      delete_dead_store_insn (ptr);
2876 		    }
2877 		  free_store_info (ptr);
2878 		}
2879 	      else
2880 		{
2881 		  store_info_t s_info;
2882 
2883 		  /* Free at least positions_needed bitmaps.  */
2884 		  for (s_info = ptr->store_rec; s_info; s_info = s_info->next)
2885 		    if (s_info->is_large)
2886 		      {
2887 			BITMAP_FREE (s_info->positions_needed.large.bmap);
2888 			s_info->is_large = false;
2889 		      }
2890 		}
2891 	      ptr = ptr->prev_insn;
2892 	    }
2893 
2894 	  free_alloc_pool (cse_store_info_pool);
2895 	}
2896       bb_info->regs_live = NULL;
2897     }
2898 
2899   BITMAP_FREE (regs_live);
2900   cselib_finish ();
2901   rtx_group_table.empty ();
2902 }
2903 
2904 
2905 /*----------------------------------------------------------------------------
2906    Second step.
2907 
2908    Assign each byte position in the stores that we are going to
2909    analyze globally to a position in the bitmaps.  Returns true if
2910    there are any bit positions assigned.
2911 ----------------------------------------------------------------------------*/
2912 
2913 static void
dse_step2_init(void)2914 dse_step2_init (void)
2915 {
2916   unsigned int i;
2917   group_info_t group;
2918 
2919   FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
2920     {
2921       /* For all non stack related bases, we only consider a store to
2922 	 be deletable if there are two or more stores for that
2923 	 position.  This is because it takes one store to make the
2924 	 other store redundant.  However, for the stores that are
2925 	 stack related, we consider them if there is only one store
2926 	 for the position.  We do this because the stack related
2927 	 stores can be deleted if their is no read between them and
2928 	 the end of the function.
2929 
2930 	 To make this work in the current framework, we take the stack
2931 	 related bases add all of the bits from store1 into store2.
2932 	 This has the effect of making the eligible even if there is
2933 	 only one store.   */
2934 
2935       if (stores_off_frame_dead_at_return && group->frame_related)
2936 	{
2937 	  bitmap_ior_into (group->store2_n, group->store1_n);
2938 	  bitmap_ior_into (group->store2_p, group->store1_p);
2939 	  if (dump_file && (dump_flags & TDF_DETAILS))
2940 	    fprintf (dump_file, "group %d is frame related ", i);
2941 	}
2942 
2943       group->offset_map_size_n++;
2944       group->offset_map_n = XOBNEWVEC (&dse_obstack, int,
2945 				       group->offset_map_size_n);
2946       group->offset_map_size_p++;
2947       group->offset_map_p = XOBNEWVEC (&dse_obstack, int,
2948 				       group->offset_map_size_p);
2949       group->process_globally = false;
2950       if (dump_file && (dump_flags & TDF_DETAILS))
2951 	{
2952 	  fprintf (dump_file, "group %d(%d+%d): ", i,
2953 		   (int)bitmap_count_bits (group->store2_n),
2954 		   (int)bitmap_count_bits (group->store2_p));
2955 	  bitmap_print (dump_file, group->store2_n, "n ", " ");
2956 	  bitmap_print (dump_file, group->store2_p, "p ", "\n");
2957 	}
2958     }
2959 }
2960 
2961 
2962 /* Init the offset tables for the normal case.  */
2963 
2964 static bool
dse_step2_nospill(void)2965 dse_step2_nospill (void)
2966 {
2967   unsigned int i;
2968   group_info_t group;
2969   /* Position 0 is unused because 0 is used in the maps to mean
2970      unused.  */
2971   current_position = 1;
2972   FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
2973     {
2974       bitmap_iterator bi;
2975       unsigned int j;
2976 
2977       if (group == clear_alias_group)
2978 	continue;
2979 
2980       memset (group->offset_map_n, 0, sizeof(int) * group->offset_map_size_n);
2981       memset (group->offset_map_p, 0, sizeof(int) * group->offset_map_size_p);
2982       bitmap_clear (group->group_kill);
2983 
2984       EXECUTE_IF_SET_IN_BITMAP (group->store2_n, 0, j, bi)
2985 	{
2986 	  bitmap_set_bit (group->group_kill, current_position);
2987           if (bitmap_bit_p (group->escaped_n, j))
2988 	    bitmap_set_bit (kill_on_calls, current_position);
2989 	  group->offset_map_n[j] = current_position++;
2990 	  group->process_globally = true;
2991 	}
2992       EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
2993 	{
2994 	  bitmap_set_bit (group->group_kill, current_position);
2995           if (bitmap_bit_p (group->escaped_p, j))
2996 	    bitmap_set_bit (kill_on_calls, current_position);
2997 	  group->offset_map_p[j] = current_position++;
2998 	  group->process_globally = true;
2999 	}
3000     }
3001   return current_position != 1;
3002 }
3003 
3004 
3005 /* Init the offset tables for the spill case.  */
3006 
3007 static bool
dse_step2_spill(void)3008 dse_step2_spill (void)
3009 {
3010   unsigned int j;
3011   group_info_t group = clear_alias_group;
3012   bitmap_iterator bi;
3013 
3014   /* Position 0 is unused because 0 is used in the maps to mean
3015      unused.  */
3016   current_position = 1;
3017 
3018   if (dump_file && (dump_flags & TDF_DETAILS))
3019     {
3020       bitmap_print (dump_file, clear_alias_sets,
3021 		    "clear alias sets              ", "\n");
3022       bitmap_print (dump_file, disqualified_clear_alias_sets,
3023 		    "disqualified clear alias sets ", "\n");
3024     }
3025 
3026   memset (group->offset_map_n, 0, sizeof(int) * group->offset_map_size_n);
3027   memset (group->offset_map_p, 0, sizeof(int) * group->offset_map_size_p);
3028   bitmap_clear (group->group_kill);
3029 
3030   /* Remove the disqualified positions from the store2_p set.  */
3031   bitmap_and_compl_into (group->store2_p, disqualified_clear_alias_sets);
3032 
3033   /* We do not need to process the store2_n set because
3034      alias_sets are always positive.  */
3035   EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
3036     {
3037       bitmap_set_bit (group->group_kill, current_position);
3038       group->offset_map_p[j] = current_position++;
3039       group->process_globally = true;
3040     }
3041 
3042   return current_position != 1;
3043 }
3044 
3045 
3046 
3047 /*----------------------------------------------------------------------------
3048   Third step.
3049 
3050   Build the bit vectors for the transfer functions.
3051 ----------------------------------------------------------------------------*/
3052 
3053 
3054 /* Look up the bitmap index for OFFSET in GROUP_INFO.  If it is not
3055    there, return 0.  */
3056 
3057 static int
get_bitmap_index(group_info_t group_info,HOST_WIDE_INT offset)3058 get_bitmap_index (group_info_t group_info, HOST_WIDE_INT offset)
3059 {
3060   if (offset < 0)
3061     {
3062       HOST_WIDE_INT offset_p = -offset;
3063       if (offset_p >= group_info->offset_map_size_n)
3064 	return 0;
3065       return group_info->offset_map_n[offset_p];
3066     }
3067   else
3068     {
3069       if (offset >= group_info->offset_map_size_p)
3070 	return 0;
3071       return group_info->offset_map_p[offset];
3072     }
3073 }
3074 
3075 
3076 /* Process the STORE_INFOs into the bitmaps into GEN and KILL.  KILL
3077    may be NULL. */
3078 
3079 static void
scan_stores_nospill(store_info_t store_info,bitmap gen,bitmap kill)3080 scan_stores_nospill (store_info_t store_info, bitmap gen, bitmap kill)
3081 {
3082   while (store_info)
3083     {
3084       HOST_WIDE_INT i;
3085       group_info_t group_info
3086 	= rtx_group_vec[store_info->group_id];
3087       if (group_info->process_globally)
3088 	for (i = store_info->begin; i < store_info->end; i++)
3089 	  {
3090 	    int index = get_bitmap_index (group_info, i);
3091 	    if (index != 0)
3092 	      {
3093 		bitmap_set_bit (gen, index);
3094 		if (kill)
3095 		  bitmap_clear_bit (kill, index);
3096 	      }
3097 	  }
3098       store_info = store_info->next;
3099     }
3100 }
3101 
3102 
3103 /* Process the STORE_INFOs into the bitmaps into GEN and KILL.  KILL
3104    may be NULL. */
3105 
3106 static void
scan_stores_spill(store_info_t store_info,bitmap gen,bitmap kill)3107 scan_stores_spill (store_info_t store_info, bitmap gen, bitmap kill)
3108 {
3109   while (store_info)
3110     {
3111       if (store_info->alias_set)
3112 	{
3113 	  int index = get_bitmap_index (clear_alias_group,
3114 					store_info->alias_set);
3115 	  if (index != 0)
3116 	    {
3117 	      bitmap_set_bit (gen, index);
3118 	      if (kill)
3119 		bitmap_clear_bit (kill, index);
3120 	    }
3121 	}
3122       store_info = store_info->next;
3123     }
3124 }
3125 
3126 
3127 /* Process the READ_INFOs into the bitmaps into GEN and KILL.  KILL
3128    may be NULL.  */
3129 
3130 static void
scan_reads_nospill(insn_info_t insn_info,bitmap gen,bitmap kill)3131 scan_reads_nospill (insn_info_t insn_info, bitmap gen, bitmap kill)
3132 {
3133   read_info_t read_info = insn_info->read_rec;
3134   int i;
3135   group_info_t group;
3136 
3137   /* If this insn reads the frame, kill all the frame related stores.  */
3138   if (insn_info->frame_read)
3139     {
3140       FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3141 	if (group->process_globally && group->frame_related)
3142 	  {
3143 	    if (kill)
3144 	      bitmap_ior_into (kill, group->group_kill);
3145 	    bitmap_and_compl_into (gen, group->group_kill);
3146 	  }
3147     }
3148   if (insn_info->non_frame_wild_read)
3149     {
3150       /* Kill all non-frame related stores.  Kill all stores of variables that
3151          escape.  */
3152       if (kill)
3153         bitmap_ior_into (kill, kill_on_calls);
3154       bitmap_and_compl_into (gen, kill_on_calls);
3155       FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3156 	if (group->process_globally && !group->frame_related)
3157 	  {
3158 	    if (kill)
3159 	      bitmap_ior_into (kill, group->group_kill);
3160 	    bitmap_and_compl_into (gen, group->group_kill);
3161 	  }
3162     }
3163   while (read_info)
3164     {
3165       FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3166 	{
3167 	  if (group->process_globally)
3168 	    {
3169 	      if (i == read_info->group_id)
3170 		{
3171 		  if (read_info->begin > read_info->end)
3172 		    {
3173 		      /* Begin > end for block mode reads.  */
3174 		      if (kill)
3175 			bitmap_ior_into (kill, group->group_kill);
3176 		      bitmap_and_compl_into (gen, group->group_kill);
3177 		    }
3178 		  else
3179 		    {
3180 		      /* The groups are the same, just process the
3181 			 offsets.  */
3182 		      HOST_WIDE_INT j;
3183 		      for (j = read_info->begin; j < read_info->end; j++)
3184 			{
3185 			  int index = get_bitmap_index (group, j);
3186 			  if (index != 0)
3187 			    {
3188 			      if (kill)
3189 				bitmap_set_bit (kill, index);
3190 			      bitmap_clear_bit (gen, index);
3191 			    }
3192 			}
3193 		    }
3194 		}
3195 	      else
3196 		{
3197 		  /* The groups are different, if the alias sets
3198 		     conflict, clear the entire group.  We only need
3199 		     to apply this test if the read_info is a cselib
3200 		     read.  Anything with a constant base cannot alias
3201 		     something else with a different constant
3202 		     base.  */
3203 		  if ((read_info->group_id < 0)
3204 		      && canon_true_dependence (group->base_mem,
3205 						GET_MODE (group->base_mem),
3206 						group->canon_base_addr,
3207 						read_info->mem, NULL_RTX))
3208 		    {
3209 		      if (kill)
3210 			bitmap_ior_into (kill, group->group_kill);
3211 		      bitmap_and_compl_into (gen, group->group_kill);
3212 		    }
3213 		}
3214 	    }
3215 	}
3216 
3217       read_info = read_info->next;
3218     }
3219 }
3220 
3221 /* Process the READ_INFOs into the bitmaps into GEN and KILL.  KILL
3222    may be NULL.  */
3223 
3224 static void
scan_reads_spill(read_info_t read_info,bitmap gen,bitmap kill)3225 scan_reads_spill (read_info_t read_info, bitmap gen, bitmap kill)
3226 {
3227   while (read_info)
3228     {
3229       if (read_info->alias_set)
3230 	{
3231 	  int index = get_bitmap_index (clear_alias_group,
3232 					read_info->alias_set);
3233 	  if (index != 0)
3234 	    {
3235 	      if (kill)
3236 		bitmap_set_bit (kill, index);
3237 	      bitmap_clear_bit (gen, index);
3238 	    }
3239 	}
3240 
3241       read_info = read_info->next;
3242     }
3243 }
3244 
3245 
3246 /* Return the insn in BB_INFO before the first wild read or if there
3247    are no wild reads in the block, return the last insn.  */
3248 
3249 static insn_info_t
find_insn_before_first_wild_read(bb_info_t bb_info)3250 find_insn_before_first_wild_read (bb_info_t bb_info)
3251 {
3252   insn_info_t insn_info = bb_info->last_insn;
3253   insn_info_t last_wild_read = NULL;
3254 
3255   while (insn_info)
3256     {
3257       if (insn_info->wild_read)
3258 	{
3259 	  last_wild_read = insn_info->prev_insn;
3260 	  /* Block starts with wild read.  */
3261 	  if (!last_wild_read)
3262 	    return NULL;
3263 	}
3264 
3265       insn_info = insn_info->prev_insn;
3266     }
3267 
3268   if (last_wild_read)
3269     return last_wild_read;
3270   else
3271     return bb_info->last_insn;
3272 }
3273 
3274 
3275 /* Scan the insns in BB_INFO starting at PTR and going to the top of
3276    the block in order to build the gen and kill sets for the block.
3277    We start at ptr which may be the last insn in the block or may be
3278    the first insn with a wild read.  In the latter case we are able to
3279    skip the rest of the block because it just does not matter:
3280    anything that happens is hidden by the wild read.  */
3281 
3282 static void
dse_step3_scan(bool for_spills,basic_block bb)3283 dse_step3_scan (bool for_spills, basic_block bb)
3284 {
3285   bb_info_t bb_info = bb_table[bb->index];
3286   insn_info_t insn_info;
3287 
3288   if (for_spills)
3289     /* There are no wild reads in the spill case.  */
3290     insn_info = bb_info->last_insn;
3291   else
3292     insn_info = find_insn_before_first_wild_read (bb_info);
3293 
3294   /* In the spill case or in the no_spill case if there is no wild
3295      read in the block, we will need a kill set.  */
3296   if (insn_info == bb_info->last_insn)
3297     {
3298       if (bb_info->kill)
3299 	bitmap_clear (bb_info->kill);
3300       else
3301 	bb_info->kill = BITMAP_ALLOC (&dse_bitmap_obstack);
3302     }
3303   else
3304     if (bb_info->kill)
3305       BITMAP_FREE (bb_info->kill);
3306 
3307   while (insn_info)
3308     {
3309       /* There may have been code deleted by the dce pass run before
3310 	 this phase.  */
3311       if (insn_info->insn && INSN_P (insn_info->insn))
3312 	{
3313 	  /* Process the read(s) last.  */
3314 	  if (for_spills)
3315 	    {
3316 	      scan_stores_spill (insn_info->store_rec, bb_info->gen, bb_info->kill);
3317 	      scan_reads_spill (insn_info->read_rec, bb_info->gen, bb_info->kill);
3318 	    }
3319 	  else
3320 	    {
3321 	      scan_stores_nospill (insn_info->store_rec, bb_info->gen, bb_info->kill);
3322 	      scan_reads_nospill (insn_info, bb_info->gen, bb_info->kill);
3323 	    }
3324 	}
3325 
3326       insn_info = insn_info->prev_insn;
3327     }
3328 }
3329 
3330 
3331 /* Set the gen set of the exit block, and also any block with no
3332    successors that does not have a wild read.  */
3333 
3334 static void
dse_step3_exit_block_scan(bb_info_t bb_info)3335 dse_step3_exit_block_scan (bb_info_t bb_info)
3336 {
3337   /* The gen set is all 0's for the exit block except for the
3338      frame_pointer_group.  */
3339 
3340   if (stores_off_frame_dead_at_return)
3341     {
3342       unsigned int i;
3343       group_info_t group;
3344 
3345       FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3346 	{
3347 	  if (group->process_globally && group->frame_related)
3348 	    bitmap_ior_into (bb_info->gen, group->group_kill);
3349 	}
3350     }
3351 }
3352 
3353 
3354 /* Find all of the blocks that are not backwards reachable from the
3355    exit block or any block with no successors (BB).  These are the
3356    infinite loops or infinite self loops.  These blocks will still
3357    have their bits set in UNREACHABLE_BLOCKS.  */
3358 
3359 static void
mark_reachable_blocks(sbitmap unreachable_blocks,basic_block bb)3360 mark_reachable_blocks (sbitmap unreachable_blocks, basic_block bb)
3361 {
3362   edge e;
3363   edge_iterator ei;
3364 
3365   if (bitmap_bit_p (unreachable_blocks, bb->index))
3366     {
3367       bitmap_clear_bit (unreachable_blocks, bb->index);
3368       FOR_EACH_EDGE (e, ei, bb->preds)
3369 	{
3370 	  mark_reachable_blocks (unreachable_blocks, e->src);
3371 	}
3372     }
3373 }
3374 
3375 /* Build the transfer functions for the function.  */
3376 
3377 static void
dse_step3(bool for_spills)3378 dse_step3 (bool for_spills)
3379 {
3380   basic_block bb;
3381   sbitmap unreachable_blocks = sbitmap_alloc (last_basic_block);
3382   sbitmap_iterator sbi;
3383   bitmap all_ones = NULL;
3384   unsigned int i;
3385 
3386   bitmap_ones (unreachable_blocks);
3387 
3388   FOR_ALL_BB (bb)
3389     {
3390       bb_info_t bb_info = bb_table[bb->index];
3391       if (bb_info->gen)
3392 	bitmap_clear (bb_info->gen);
3393       else
3394 	bb_info->gen = BITMAP_ALLOC (&dse_bitmap_obstack);
3395 
3396       if (bb->index == ENTRY_BLOCK)
3397 	;
3398       else if (bb->index == EXIT_BLOCK)
3399 	dse_step3_exit_block_scan (bb_info);
3400       else
3401 	dse_step3_scan (for_spills, bb);
3402       if (EDGE_COUNT (bb->succs) == 0)
3403 	mark_reachable_blocks (unreachable_blocks, bb);
3404 
3405       /* If this is the second time dataflow is run, delete the old
3406 	 sets.  */
3407       if (bb_info->in)
3408 	BITMAP_FREE (bb_info->in);
3409       if (bb_info->out)
3410 	BITMAP_FREE (bb_info->out);
3411     }
3412 
3413   /* For any block in an infinite loop, we must initialize the out set
3414      to all ones.  This could be expensive, but almost never occurs in
3415      practice. However, it is common in regression tests.  */
3416   EXECUTE_IF_SET_IN_BITMAP (unreachable_blocks, 0, i, sbi)
3417     {
3418       if (bitmap_bit_p (all_blocks, i))
3419 	{
3420 	  bb_info_t bb_info = bb_table[i];
3421 	  if (!all_ones)
3422 	    {
3423 	      unsigned int j;
3424 	      group_info_t group;
3425 
3426 	      all_ones = BITMAP_ALLOC (&dse_bitmap_obstack);
3427 	      FOR_EACH_VEC_ELT (rtx_group_vec, j, group)
3428 		bitmap_ior_into (all_ones, group->group_kill);
3429 	    }
3430 	  if (!bb_info->out)
3431 	    {
3432 	      bb_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
3433 	      bitmap_copy (bb_info->out, all_ones);
3434 	    }
3435 	}
3436     }
3437 
3438   if (all_ones)
3439     BITMAP_FREE (all_ones);
3440   sbitmap_free (unreachable_blocks);
3441 }
3442 
3443 
3444 
3445 /*----------------------------------------------------------------------------
3446    Fourth step.
3447 
3448    Solve the bitvector equations.
3449 ----------------------------------------------------------------------------*/
3450 
3451 
3452 /* Confluence function for blocks with no successors.  Create an out
3453    set from the gen set of the exit block.  This block logically has
3454    the exit block as a successor.  */
3455 
3456 
3457 
3458 static void
dse_confluence_0(basic_block bb)3459 dse_confluence_0 (basic_block bb)
3460 {
3461   bb_info_t bb_info = bb_table[bb->index];
3462 
3463   if (bb->index == EXIT_BLOCK)
3464     return;
3465 
3466   if (!bb_info->out)
3467     {
3468       bb_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
3469       bitmap_copy (bb_info->out, bb_table[EXIT_BLOCK]->gen);
3470     }
3471 }
3472 
3473 /* Propagate the information from the in set of the dest of E to the
3474    out set of the src of E.  If the various in or out sets are not
3475    there, that means they are all ones.  */
3476 
3477 static bool
dse_confluence_n(edge e)3478 dse_confluence_n (edge e)
3479 {
3480   bb_info_t src_info = bb_table[e->src->index];
3481   bb_info_t dest_info = bb_table[e->dest->index];
3482 
3483   if (dest_info->in)
3484     {
3485       if (src_info->out)
3486 	bitmap_and_into (src_info->out, dest_info->in);
3487       else
3488 	{
3489 	  src_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
3490 	  bitmap_copy (src_info->out, dest_info->in);
3491 	}
3492     }
3493   return true;
3494 }
3495 
3496 
3497 /* Propagate the info from the out to the in set of BB_INDEX's basic
3498    block.  There are three cases:
3499 
3500    1) The block has no kill set.  In this case the kill set is all
3501    ones.  It does not matter what the out set of the block is, none of
3502    the info can reach the top.  The only thing that reaches the top is
3503    the gen set and we just copy the set.
3504 
3505    2) There is a kill set but no out set and bb has successors.  In
3506    this case we just return. Eventually an out set will be created and
3507    it is better to wait than to create a set of ones.
3508 
3509    3) There is both a kill and out set.  We apply the obvious transfer
3510    function.
3511 */
3512 
3513 static bool
dse_transfer_function(int bb_index)3514 dse_transfer_function (int bb_index)
3515 {
3516   bb_info_t bb_info = bb_table[bb_index];
3517 
3518   if (bb_info->kill)
3519     {
3520       if (bb_info->out)
3521 	{
3522 	  /* Case 3 above.  */
3523 	  if (bb_info->in)
3524 	    return bitmap_ior_and_compl (bb_info->in, bb_info->gen,
3525 					 bb_info->out, bb_info->kill);
3526 	  else
3527 	    {
3528 	      bb_info->in = BITMAP_ALLOC (&dse_bitmap_obstack);
3529 	      bitmap_ior_and_compl (bb_info->in, bb_info->gen,
3530 				    bb_info->out, bb_info->kill);
3531 	      return true;
3532 	    }
3533 	}
3534       else
3535 	/* Case 2 above.  */
3536 	return false;
3537     }
3538   else
3539     {
3540       /* Case 1 above.  If there is already an in set, nothing
3541 	 happens.  */
3542       if (bb_info->in)
3543 	return false;
3544       else
3545 	{
3546 	  bb_info->in = BITMAP_ALLOC (&dse_bitmap_obstack);
3547 	  bitmap_copy (bb_info->in, bb_info->gen);
3548 	  return true;
3549 	}
3550     }
3551 }
3552 
3553 /* Solve the dataflow equations.  */
3554 
3555 static void
dse_step4(void)3556 dse_step4 (void)
3557 {
3558   df_simple_dataflow (DF_BACKWARD, NULL, dse_confluence_0,
3559 		      dse_confluence_n, dse_transfer_function,
3560 	   	      all_blocks, df_get_postorder (DF_BACKWARD),
3561 		      df_get_n_blocks (DF_BACKWARD));
3562   if (dump_file && (dump_flags & TDF_DETAILS))
3563     {
3564       basic_block bb;
3565 
3566       fprintf (dump_file, "\n\n*** Global dataflow info after analysis.\n");
3567       FOR_ALL_BB (bb)
3568 	{
3569 	  bb_info_t bb_info = bb_table[bb->index];
3570 
3571 	  df_print_bb_index (bb, dump_file);
3572 	  if (bb_info->in)
3573 	    bitmap_print (dump_file, bb_info->in, "  in:   ", "\n");
3574 	  else
3575 	    fprintf (dump_file, "  in:   *MISSING*\n");
3576 	  if (bb_info->gen)
3577 	    bitmap_print (dump_file, bb_info->gen, "  gen:  ", "\n");
3578 	  else
3579 	    fprintf (dump_file, "  gen:  *MISSING*\n");
3580 	  if (bb_info->kill)
3581 	    bitmap_print (dump_file, bb_info->kill, "  kill: ", "\n");
3582 	  else
3583 	    fprintf (dump_file, "  kill: *MISSING*\n");
3584 	  if (bb_info->out)
3585 	    bitmap_print (dump_file, bb_info->out, "  out:  ", "\n");
3586 	  else
3587 	    fprintf (dump_file, "  out:  *MISSING*\n\n");
3588 	}
3589     }
3590 }
3591 
3592 
3593 
3594 /*----------------------------------------------------------------------------
3595    Fifth step.
3596 
3597    Delete the stores that can only be deleted using the global information.
3598 ----------------------------------------------------------------------------*/
3599 
3600 
3601 static void
dse_step5_nospill(void)3602 dse_step5_nospill (void)
3603 {
3604   basic_block bb;
3605   FOR_EACH_BB (bb)
3606     {
3607       bb_info_t bb_info = bb_table[bb->index];
3608       insn_info_t insn_info = bb_info->last_insn;
3609       bitmap v = bb_info->out;
3610 
3611       while (insn_info)
3612 	{
3613 	  bool deleted = false;
3614 	  if (dump_file && insn_info->insn)
3615 	    {
3616 	      fprintf (dump_file, "starting to process insn %d\n",
3617 		       INSN_UID (insn_info->insn));
3618 	      bitmap_print (dump_file, v, "  v:  ", "\n");
3619 	    }
3620 
3621 	  /* There may have been code deleted by the dce pass run before
3622 	     this phase.  */
3623 	  if (insn_info->insn
3624 	      && INSN_P (insn_info->insn)
3625 	      && (!insn_info->cannot_delete)
3626 	      && (!bitmap_empty_p (v)))
3627 	    {
3628 	      store_info_t store_info = insn_info->store_rec;
3629 
3630 	      /* Try to delete the current insn.  */
3631 	      deleted = true;
3632 
3633 	      /* Skip the clobbers.  */
3634 	      while (!store_info->is_set)
3635 		store_info = store_info->next;
3636 
3637 	      if (store_info->alias_set)
3638 		deleted = false;
3639 	      else
3640 		{
3641 		  HOST_WIDE_INT i;
3642 		  group_info_t group_info
3643 		    = rtx_group_vec[store_info->group_id];
3644 
3645 		  for (i = store_info->begin; i < store_info->end; i++)
3646 		    {
3647 		      int index = get_bitmap_index (group_info, i);
3648 
3649 		      if (dump_file && (dump_flags & TDF_DETAILS))
3650 			fprintf (dump_file, "i = %d, index = %d\n", (int)i, index);
3651 		      if (index == 0 || !bitmap_bit_p (v, index))
3652 			{
3653 			  if (dump_file && (dump_flags & TDF_DETAILS))
3654 			    fprintf (dump_file, "failing at i = %d\n", (int)i);
3655 			  deleted = false;
3656 			  break;
3657 			}
3658 		    }
3659 		}
3660 	      if (deleted)
3661 		{
3662 		  if (dbg_cnt (dse)
3663 		      && check_for_inc_dec_1 (insn_info))
3664 		    {
3665 		      delete_insn (insn_info->insn);
3666 		      insn_info->insn = NULL;
3667 		      globally_deleted++;
3668 		    }
3669 		}
3670 	    }
3671 	  /* We do want to process the local info if the insn was
3672 	     deleted.  For instance, if the insn did a wild read, we
3673 	     no longer need to trash the info.  */
3674 	  if (insn_info->insn
3675 	      && INSN_P (insn_info->insn)
3676 	      && (!deleted))
3677 	    {
3678 	      scan_stores_nospill (insn_info->store_rec, v, NULL);
3679 	      if (insn_info->wild_read)
3680 		{
3681 		  if (dump_file && (dump_flags & TDF_DETAILS))
3682 		    fprintf (dump_file, "wild read\n");
3683 		  bitmap_clear (v);
3684 		}
3685 	      else if (insn_info->read_rec
3686                        || insn_info->non_frame_wild_read)
3687 		{
3688 		  if (dump_file && !insn_info->non_frame_wild_read)
3689 		    fprintf (dump_file, "regular read\n");
3690                   else if (dump_file && (dump_flags & TDF_DETAILS))
3691 		    fprintf (dump_file, "non-frame wild read\n");
3692 		  scan_reads_nospill (insn_info, v, NULL);
3693 		}
3694 	    }
3695 
3696 	  insn_info = insn_info->prev_insn;
3697 	}
3698     }
3699 }
3700 
3701 
3702 static void
dse_step5_spill(void)3703 dse_step5_spill (void)
3704 {
3705   basic_block bb;
3706   FOR_EACH_BB (bb)
3707     {
3708       bb_info_t bb_info = bb_table[bb->index];
3709       insn_info_t insn_info = bb_info->last_insn;
3710       bitmap v = bb_info->out;
3711 
3712       while (insn_info)
3713 	{
3714 	  bool deleted = false;
3715 	  /* There may have been code deleted by the dce pass run before
3716 	     this phase.  */
3717 	  if (insn_info->insn
3718 	      && INSN_P (insn_info->insn)
3719 	      && (!insn_info->cannot_delete)
3720 	      && (!bitmap_empty_p (v)))
3721 	    {
3722 	      /* Try to delete the current insn.  */
3723 	      store_info_t store_info = insn_info->store_rec;
3724 	      deleted = true;
3725 
3726 	      while (store_info)
3727 		{
3728 		  if (store_info->alias_set)
3729 		    {
3730 		      int index = get_bitmap_index (clear_alias_group,
3731 						    store_info->alias_set);
3732 		      if (index == 0 || !bitmap_bit_p (v, index))
3733 			{
3734 			  deleted = false;
3735 			  break;
3736 			}
3737 		    }
3738 		  else
3739 		    deleted = false;
3740 		  store_info = store_info->next;
3741 		}
3742 	      if (deleted && dbg_cnt (dse)
3743 		  && check_for_inc_dec_1 (insn_info))
3744 		{
3745 		  if (dump_file && (dump_flags & TDF_DETAILS))
3746 		    fprintf (dump_file, "Spill deleting insn %d\n",
3747 			     INSN_UID (insn_info->insn));
3748 		  delete_insn (insn_info->insn);
3749 		  spill_deleted++;
3750 		  insn_info->insn = NULL;
3751 		}
3752 	    }
3753 
3754 	  if (insn_info->insn
3755 	      && INSN_P (insn_info->insn)
3756 	      && (!deleted))
3757 	    {
3758 	      scan_stores_spill (insn_info->store_rec, v, NULL);
3759 	      scan_reads_spill (insn_info->read_rec, v, NULL);
3760 	    }
3761 
3762 	  insn_info = insn_info->prev_insn;
3763 	}
3764     }
3765 }
3766 
3767 
3768 
3769 /*----------------------------------------------------------------------------
3770    Sixth step.
3771 
3772    Delete stores made redundant by earlier stores (which store the same
3773    value) that couldn't be eliminated.
3774 ----------------------------------------------------------------------------*/
3775 
3776 static void
dse_step6(void)3777 dse_step6 (void)
3778 {
3779   basic_block bb;
3780 
3781   FOR_ALL_BB (bb)
3782     {
3783       bb_info_t bb_info = bb_table[bb->index];
3784       insn_info_t insn_info = bb_info->last_insn;
3785 
3786       while (insn_info)
3787 	{
3788 	  /* There may have been code deleted by the dce pass run before
3789 	     this phase.  */
3790 	  if (insn_info->insn
3791 	      && INSN_P (insn_info->insn)
3792 	      && !insn_info->cannot_delete)
3793 	    {
3794 	      store_info_t s_info = insn_info->store_rec;
3795 
3796 	      while (s_info && !s_info->is_set)
3797 		s_info = s_info->next;
3798 	      if (s_info
3799 		  && s_info->redundant_reason
3800 		  && s_info->redundant_reason->insn
3801 		  && INSN_P (s_info->redundant_reason->insn))
3802 		{
3803 		  rtx rinsn = s_info->redundant_reason->insn;
3804 		  if (dump_file && (dump_flags & TDF_DETAILS))
3805 		    fprintf (dump_file, "Locally deleting insn %d "
3806 					"because insn %d stores the "
3807 					"same value and couldn't be "
3808 					"eliminated\n",
3809 					INSN_UID (insn_info->insn),
3810 					INSN_UID (rinsn));
3811 		  delete_dead_store_insn (insn_info);
3812 		}
3813 	    }
3814 	  insn_info = insn_info->prev_insn;
3815 	}
3816     }
3817 }
3818 
3819 /*----------------------------------------------------------------------------
3820    Seventh step.
3821 
3822    Destroy everything left standing.
3823 ----------------------------------------------------------------------------*/
3824 
3825 static void
dse_step7(void)3826 dse_step7 (void)
3827 {
3828   bitmap_obstack_release (&dse_bitmap_obstack);
3829   obstack_free (&dse_obstack, NULL);
3830 
3831   if (clear_alias_sets)
3832     {
3833       BITMAP_FREE (clear_alias_sets);
3834       BITMAP_FREE (disqualified_clear_alias_sets);
3835       free_alloc_pool (clear_alias_mode_pool);
3836       htab_delete (clear_alias_mode_table);
3837     }
3838 
3839   end_alias_analysis ();
3840   free (bb_table);
3841   rtx_group_table.dispose ();
3842   rtx_group_vec.release ();
3843   BITMAP_FREE (all_blocks);
3844   BITMAP_FREE (scratch);
3845 
3846   free_alloc_pool (rtx_store_info_pool);
3847   free_alloc_pool (read_info_pool);
3848   free_alloc_pool (insn_info_pool);
3849   free_alloc_pool (bb_info_pool);
3850   free_alloc_pool (rtx_group_info_pool);
3851   free_alloc_pool (deferred_change_pool);
3852 }
3853 
3854 
3855 /* -------------------------------------------------------------------------
3856    DSE
3857    ------------------------------------------------------------------------- */
3858 
3859 /* Callback for running pass_rtl_dse.  */
3860 
3861 static unsigned int
rest_of_handle_dse(void)3862 rest_of_handle_dse (void)
3863 {
3864   bool did_global = false;
3865 
3866   df_set_flags (DF_DEFER_INSN_RESCAN);
3867 
3868   /* Need the notes since we must track live hardregs in the forwards
3869      direction.  */
3870   df_note_add_problem ();
3871   df_analyze ();
3872 
3873   dse_step0 ();
3874   dse_step1 ();
3875   dse_step2_init ();
3876   if (dse_step2_nospill ())
3877     {
3878       df_set_flags (DF_LR_RUN_DCE);
3879       df_analyze ();
3880       did_global = true;
3881       if (dump_file && (dump_flags & TDF_DETAILS))
3882 	fprintf (dump_file, "doing global processing\n");
3883       dse_step3 (false);
3884       dse_step4 ();
3885       dse_step5_nospill ();
3886     }
3887 
3888   /* For the instance of dse that runs after reload, we make a special
3889      pass to process the spills.  These are special in that they are
3890      totally transparent, i.e, there is no aliasing issues that need
3891      to be considered.  This means that the wild reads that kill
3892      everything else do not apply here.  */
3893   if (clear_alias_sets && dse_step2_spill ())
3894     {
3895       if (!did_global)
3896 	{
3897 	  df_set_flags (DF_LR_RUN_DCE);
3898 	  df_analyze ();
3899 	}
3900       did_global = true;
3901       if (dump_file && (dump_flags & TDF_DETAILS))
3902 	fprintf (dump_file, "doing global spill processing\n");
3903       dse_step3 (true);
3904       dse_step4 ();
3905       dse_step5_spill ();
3906     }
3907 
3908   dse_step6 ();
3909   dse_step7 ();
3910 
3911   if (dump_file)
3912     fprintf (dump_file, "dse: local deletions = %d, global deletions = %d, spill deletions = %d\n",
3913 	     locally_deleted, globally_deleted, spill_deleted);
3914   return 0;
3915 }
3916 
3917 static bool
gate_dse1(void)3918 gate_dse1 (void)
3919 {
3920   return optimize > 0 && flag_dse
3921     && dbg_cnt (dse1);
3922 }
3923 
3924 static bool
gate_dse2(void)3925 gate_dse2 (void)
3926 {
3927   return optimize > 0 && flag_dse
3928     && dbg_cnt (dse2);
3929 }
3930 
3931 struct rtl_opt_pass pass_rtl_dse1 =
3932 {
3933  {
3934   RTL_PASS,
3935   "dse1",                               /* name */
3936   OPTGROUP_NONE,                        /* optinfo_flags */
3937   gate_dse1,                            /* gate */
3938   rest_of_handle_dse,                   /* execute */
3939   NULL,                                 /* sub */
3940   NULL,                                 /* next */
3941   0,                                    /* static_pass_number */
3942   TV_DSE1,                              /* tv_id */
3943   0,                                    /* properties_required */
3944   0,                                    /* properties_provided */
3945   0,                                    /* properties_destroyed */
3946   0,                                    /* todo_flags_start */
3947   TODO_df_finish | TODO_verify_rtl_sharing |
3948   TODO_ggc_collect                      /* todo_flags_finish */
3949  }
3950 };
3951 
3952 struct rtl_opt_pass pass_rtl_dse2 =
3953 {
3954  {
3955   RTL_PASS,
3956   "dse2",                               /* name */
3957   OPTGROUP_NONE,                        /* optinfo_flags */
3958   gate_dse2,                            /* gate */
3959   rest_of_handle_dse,                   /* execute */
3960   NULL,                                 /* sub */
3961   NULL,                                 /* next */
3962   0,                                    /* static_pass_number */
3963   TV_DSE2,                              /* tv_id */
3964   0,                                    /* properties_required */
3965   0,                                    /* properties_provided */
3966   0,                                    /* properties_destroyed */
3967   0,                                    /* todo_flags_start */
3968   TODO_df_finish | TODO_verify_rtl_sharing |
3969   TODO_ggc_collect                      /* todo_flags_finish */
3970  }
3971 };
3972