xref: /openbsd/gnu/gcc/gcc/basic-block.h (revision 404b540a)
1 /* Define control and data flow tables, and regsets.
2    Copyright (C) 1987, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
3    Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING.  If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA.  */
21 
22 #ifndef GCC_BASIC_BLOCK_H
23 #define GCC_BASIC_BLOCK_H
24 
25 #include "bitmap.h"
26 #include "sbitmap.h"
27 #include "varray.h"
28 #include "partition.h"
29 #include "hard-reg-set.h"
30 #include "predict.h"
31 #include "vec.h"
32 #include "function.h"
33 
34 /* Head of register set linked list.  */
35 typedef bitmap_head regset_head;
36 
37 /* A pointer to a regset_head.  */
38 typedef bitmap regset;
39 
40 /* Allocate a register set with oballoc.  */
41 #define ALLOC_REG_SET(OBSTACK) BITMAP_ALLOC (OBSTACK)
42 
43 /* Do any cleanup needed on a regset when it is no longer used.  */
44 #define FREE_REG_SET(REGSET) BITMAP_FREE (REGSET)
45 
46 /* Initialize a new regset.  */
47 #define INIT_REG_SET(HEAD) bitmap_initialize (HEAD, &reg_obstack)
48 
49 /* Clear a register set by freeing up the linked list.  */
50 #define CLEAR_REG_SET(HEAD) bitmap_clear (HEAD)
51 
52 /* Copy a register set to another register set.  */
53 #define COPY_REG_SET(TO, FROM) bitmap_copy (TO, FROM)
54 
55 /* Compare two register sets.  */
56 #define REG_SET_EQUAL_P(A, B) bitmap_equal_p (A, B)
57 
58 /* `and' a register set with a second register set.  */
59 #define AND_REG_SET(TO, FROM) bitmap_and_into (TO, FROM)
60 
61 /* `and' the complement of a register set with a register set.  */
62 #define AND_COMPL_REG_SET(TO, FROM) bitmap_and_compl_into (TO, FROM)
63 
64 /* Inclusive or a register set with a second register set.  */
65 #define IOR_REG_SET(TO, FROM) bitmap_ior_into (TO, FROM)
66 
67 /* Exclusive or a register set with a second register set.  */
68 #define XOR_REG_SET(TO, FROM) bitmap_xor_into (TO, FROM)
69 
70 /* Or into TO the register set FROM1 `and'ed with the complement of FROM2.  */
71 #define IOR_AND_COMPL_REG_SET(TO, FROM1, FROM2) \
72   bitmap_ior_and_compl_into (TO, FROM1, FROM2)
73 
74 /* Clear a single register in a register set.  */
75 #define CLEAR_REGNO_REG_SET(HEAD, REG) bitmap_clear_bit (HEAD, REG)
76 
77 /* Set a single register in a register set.  */
78 #define SET_REGNO_REG_SET(HEAD, REG) bitmap_set_bit (HEAD, REG)
79 
80 /* Return true if a register is set in a register set.  */
81 #define REGNO_REG_SET_P(TO, REG) bitmap_bit_p (TO, REG)
82 
83 /* Copy the hard registers in a register set to the hard register set.  */
84 extern void reg_set_to_hard_reg_set (HARD_REG_SET *, bitmap);
85 #define REG_SET_TO_HARD_REG_SET(TO, FROM)				\
86 do {									\
87   CLEAR_HARD_REG_SET (TO);						\
88   reg_set_to_hard_reg_set (&TO, FROM);					\
89 } while (0)
90 
91 typedef bitmap_iterator reg_set_iterator;
92 
93 /* Loop over all registers in REGSET, starting with MIN, setting REGNUM to the
94    register number and executing CODE for all registers that are set.  */
95 #define EXECUTE_IF_SET_IN_REG_SET(REGSET, MIN, REGNUM, RSI)	\
96   EXECUTE_IF_SET_IN_BITMAP (REGSET, MIN, REGNUM, RSI)
97 
98 /* Loop over all registers in REGSET1 and REGSET2, starting with MIN, setting
99    REGNUM to the register number and executing CODE for all registers that are
100    set in the first regset and not set in the second.  */
101 #define EXECUTE_IF_AND_COMPL_IN_REG_SET(REGSET1, REGSET2, MIN, REGNUM, RSI) \
102   EXECUTE_IF_AND_COMPL_IN_BITMAP (REGSET1, REGSET2, MIN, REGNUM, RSI)
103 
104 /* Loop over all registers in REGSET1 and REGSET2, starting with MIN, setting
105    REGNUM to the register number and executing CODE for all registers that are
106    set in both regsets.  */
107 #define EXECUTE_IF_AND_IN_REG_SET(REGSET1, REGSET2, MIN, REGNUM, RSI) \
108   EXECUTE_IF_AND_IN_BITMAP (REGSET1, REGSET2, MIN, REGNUM, RSI)	\
109 
110 /* Type we use to hold basic block counters.  Should be at least
111    64bit.  Although a counter cannot be negative, we use a signed
112    type, because erroneous negative counts can be generated when the
113    flow graph is manipulated by various optimizations.  A signed type
114    makes those easy to detect.  */
115 typedef HOST_WIDEST_INT gcov_type;
116 
117 /* Control flow edge information.  */
118 struct edge_def GTY(())
119 {
120   /* The two blocks at the ends of the edge.  */
121   struct basic_block_def *src;
122   struct basic_block_def *dest;
123 
124   /* Instructions queued on the edge.  */
125   union edge_def_insns {
126     rtx GTY ((tag ("0"))) r;
127     tree GTY ((tag ("1"))) t;
128   } GTY ((desc ("ir_type ()"))) insns;
129 
130   /* Auxiliary info specific to a pass.  */
131   PTR GTY ((skip (""))) aux;
132 
133   /* Location of any goto implicit in the edge, during tree-ssa.  */
134   source_locus goto_locus;
135 
136   int flags;			/* see EDGE_* below  */
137   int probability;		/* biased by REG_BR_PROB_BASE */
138   gcov_type count;		/* Expected number of executions calculated
139 				   in profile.c  */
140 
141   /* The index number corresponding to this edge in the edge vector
142      dest->preds.  */
143   unsigned int dest_idx;
144 };
145 
146 typedef struct edge_def *edge;
147 DEF_VEC_P(edge);
148 DEF_VEC_ALLOC_P(edge,gc);
149 
150 #define EDGE_FALLTHRU		1	/* 'Straight line' flow */
151 #define EDGE_ABNORMAL		2	/* Strange flow, like computed
152 					   label, or eh */
153 #define EDGE_ABNORMAL_CALL	4	/* Call with abnormal exit
154 					   like an exception, or sibcall */
155 #define EDGE_EH			8	/* Exception throw */
156 #define EDGE_FAKE		16	/* Not a real edge (profile.c) */
157 #define EDGE_DFS_BACK		32	/* A backwards edge */
158 #define EDGE_CAN_FALLTHRU	64	/* Candidate for straight line
159 					   flow.  */
160 #define EDGE_IRREDUCIBLE_LOOP	128	/* Part of irreducible loop.  */
161 #define EDGE_SIBCALL		256	/* Edge from sibcall to exit.  */
162 #define EDGE_LOOP_EXIT		512	/* Exit of a loop.  */
163 #define EDGE_TRUE_VALUE		1024	/* Edge taken when controlling
164 					   predicate is nonzero.  */
165 #define EDGE_FALSE_VALUE	2048	/* Edge taken when controlling
166 					   predicate is zero.  */
167 #define EDGE_EXECUTABLE		4096	/* Edge is executable.  Only
168 					   valid during SSA-CCP.  */
169 #define EDGE_CROSSING		8192    /* Edge crosses between hot
170 					   and cold sections, when we
171 					   do partitioning.  */
172 #define EDGE_ALL_FLAGS	       16383
173 
174 #define EDGE_COMPLEX	(EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_EH)
175 
176 /* Counter summary from the last set of coverage counts read by
177    profile.c.  */
178 extern const struct gcov_ctr_summary *profile_info;
179 
180 /* Declared in cfgloop.h.  */
181 struct loop;
182 struct loops;
183 
184 /* Declared in tree-flow.h.  */
185 struct edge_prediction;
186 struct rtl_bb_info;
187 
188 /* A basic block is a sequence of instructions with only entry and
189    only one exit.  If any one of the instructions are executed, they
190    will all be executed, and in sequence from first to last.
191 
192    There may be COND_EXEC instructions in the basic block.  The
193    COND_EXEC *instructions* will be executed -- but if the condition
194    is false the conditionally executed *expressions* will of course
195    not be executed.  We don't consider the conditionally executed
196    expression (which might have side-effects) to be in a separate
197    basic block because the program counter will always be at the same
198    location after the COND_EXEC instruction, regardless of whether the
199    condition is true or not.
200 
201    Basic blocks need not start with a label nor end with a jump insn.
202    For example, a previous basic block may just "conditionally fall"
203    into the succeeding basic block, and the last basic block need not
204    end with a jump insn.  Block 0 is a descendant of the entry block.
205 
206    A basic block beginning with two labels cannot have notes between
207    the labels.
208 
209    Data for jump tables are stored in jump_insns that occur in no
210    basic block even though these insns can follow or precede insns in
211    basic blocks.  */
212 
213 /* Basic block information indexed by block number.  */
214 struct basic_block_def GTY((chain_next ("%h.next_bb"), chain_prev ("%h.prev_bb")))
215 {
216   /* Pointers to the first and last trees of the block.  */
217   tree stmt_list;
218 
219   /* The edges into and out of the block.  */
220   VEC(edge,gc) *preds;
221   VEC(edge,gc) *succs;
222 
223   /* Auxiliary info specific to a pass.  */
224   PTR GTY ((skip (""))) aux;
225 
226   /* Innermost loop containing the block.  */
227   struct loop * GTY ((skip (""))) loop_father;
228 
229   /* The dominance and postdominance information node.  */
230   struct et_node * GTY ((skip (""))) dom[2];
231 
232   /* Previous and next blocks in the chain.  */
233   struct basic_block_def *prev_bb;
234   struct basic_block_def *next_bb;
235 
236   union basic_block_il_dependent {
237       struct rtl_bb_info * GTY ((tag ("1"))) rtl;
238     } GTY ((desc ("((%1.flags & BB_RTL) != 0)"))) il;
239 
240   /* Chain of PHI nodes for this block.  */
241   tree phi_nodes;
242 
243   /* A list of predictions.  */
244   struct edge_prediction *predictions;
245 
246   /* Expected number of executions: calculated in profile.c.  */
247   gcov_type count;
248 
249   /* The index of this block.  */
250   int index;
251 
252   /* The loop depth of this block.  */
253   int loop_depth;
254 
255   /* Expected frequency.  Normalized to be in range 0 to BB_FREQ_MAX.  */
256   int frequency;
257 
258   /* Various flags.  See BB_* below.  */
259   int flags;
260 };
261 
262 struct rtl_bb_info GTY(())
263 {
264   /* The first and last insns of the block.  */
265   rtx head_;
266   rtx end_;
267 
268   /* The registers that are live on entry to this block.  */
269   bitmap GTY ((skip (""))) global_live_at_start;
270 
271   /* The registers that are live on exit from this block.  */
272   bitmap GTY ((skip (""))) global_live_at_end;
273 
274   /* In CFGlayout mode points to insn notes/jumptables to be placed just before
275      and after the block.   */
276   rtx header;
277   rtx footer;
278 
279   /* This field is used by the bb-reorder and tracer passes.  */
280   int visited;
281 };
282 
283 typedef struct basic_block_def *basic_block;
284 
285 DEF_VEC_P(basic_block);
286 DEF_VEC_ALLOC_P(basic_block,gc);
287 DEF_VEC_ALLOC_P(basic_block,heap);
288 
289 #define BB_FREQ_MAX 10000
290 
291 /* Masks for basic_block.flags.
292 
293    BB_HOT_PARTITION and BB_COLD_PARTITION should be preserved throughout
294    the compilation, so they are never cleared.
295 
296    All other flags may be cleared by clear_bb_flags().  It is generally
297    a bad idea to rely on any flags being up-to-date.  */
298 
299 enum bb_flags
300 {
301 
302   /* Set if insns in BB have are modified.  Used for updating liveness info.  */
303   BB_DIRTY = 1,
304 
305   /* Only set on blocks that have just been created by create_bb.  */
306   BB_NEW = 2,
307 
308   /* Set by find_unreachable_blocks.  Do not rely on this being set in any
309      pass.  */
310   BB_REACHABLE = 4,
311 
312   /* Set for blocks in an irreducible loop by loop analysis.  */
313   BB_IRREDUCIBLE_LOOP = 8,
314 
315   /* Set on blocks that may actually not be single-entry single-exit block.  */
316   BB_SUPERBLOCK = 16,
317 
318   /* Set on basic blocks that the scheduler should not touch.  This is used
319      by SMS to prevent other schedulers from messing with the loop schedule.  */
320   BB_DISABLE_SCHEDULE = 32,
321 
322   /* Set on blocks that should be put in a hot section.  */
323   BB_HOT_PARTITION = 64,
324 
325   /* Set on blocks that should be put in a cold section.  */
326   BB_COLD_PARTITION = 128,
327 
328   /* Set on block that was duplicated.  */
329   BB_DUPLICATED = 256,
330 
331   /* Set on blocks that are in RTL format.  */
332   BB_RTL = 1024,
333 
334   /* Set on blocks that are forwarder blocks.
335      Only used in cfgcleanup.c.  */
336   BB_FORWARDER_BLOCK = 2048,
337 
338   /* Set on blocks that cannot be threaded through.
339      Only used in cfgcleanup.c.  */
340   BB_NONTHREADABLE_BLOCK = 4096
341 };
342 
343 /* Dummy flag for convenience in the hot/cold partitioning code.  */
344 #define BB_UNPARTITIONED	0
345 
346 /* Partitions, to be used when partitioning hot and cold basic blocks into
347    separate sections.  */
348 #define BB_PARTITION(bb) ((bb)->flags & (BB_HOT_PARTITION|BB_COLD_PARTITION))
349 #define BB_SET_PARTITION(bb, part) do {					\
350   basic_block bb_ = (bb);						\
351   bb_->flags = ((bb_->flags & ~(BB_HOT_PARTITION|BB_COLD_PARTITION))	\
352 		| (part));						\
353 } while (0)
354 
355 #define BB_COPY_PARTITION(dstbb, srcbb) \
356   BB_SET_PARTITION (dstbb, BB_PARTITION (srcbb))
357 
358 /* A structure to group all the per-function control flow graph data.
359    The x_* prefixing is necessary because otherwise references to the
360    fields of this struct are interpreted as the defines for backward
361    source compatibility following the definition of this struct.  */
362 struct control_flow_graph GTY(())
363 {
364   /* Block pointers for the exit and entry of a function.
365      These are always the head and tail of the basic block list.  */
366   basic_block x_entry_block_ptr;
367   basic_block x_exit_block_ptr;
368 
369   /* Index by basic block number, get basic block struct info.  */
370   VEC(basic_block,gc) *x_basic_block_info;
371 
372   /* Number of basic blocks in this flow graph.  */
373   int x_n_basic_blocks;
374 
375   /* Number of edges in this flow graph.  */
376   int x_n_edges;
377 
378   /* The first free basic block number.  */
379   int x_last_basic_block;
380 
381   /* Mapping of labels to their associated blocks.  At present
382      only used for the tree CFG.  */
383   VEC(basic_block,gc) *x_label_to_block_map;
384 
385   enum profile_status {
386     PROFILE_ABSENT,
387     PROFILE_GUESSED,
388     PROFILE_READ
389   } x_profile_status;
390 };
391 
392 /* Defines for accessing the fields of the CFG structure for function FN.  */
393 #define ENTRY_BLOCK_PTR_FOR_FUNCTION(FN)     ((FN)->cfg->x_entry_block_ptr)
394 #define EXIT_BLOCK_PTR_FOR_FUNCTION(FN)	     ((FN)->cfg->x_exit_block_ptr)
395 #define basic_block_info_for_function(FN)    ((FN)->cfg->x_basic_block_info)
396 #define n_basic_blocks_for_function(FN)	     ((FN)->cfg->x_n_basic_blocks)
397 #define n_edges_for_function(FN)	     ((FN)->cfg->x_n_edges)
398 #define last_basic_block_for_function(FN)    ((FN)->cfg->x_last_basic_block)
399 #define label_to_block_map_for_function(FN)  ((FN)->cfg->x_label_to_block_map)
400 
401 #define BASIC_BLOCK_FOR_FUNCTION(FN,N) \
402   (VEC_index (basic_block, basic_block_info_for_function(FN), (N)))
403 
404 /* Defines for textual backward source compatibility.  */
405 #define ENTRY_BLOCK_PTR		(cfun->cfg->x_entry_block_ptr)
406 #define EXIT_BLOCK_PTR		(cfun->cfg->x_exit_block_ptr)
407 #define basic_block_info	(cfun->cfg->x_basic_block_info)
408 #define n_basic_blocks		(cfun->cfg->x_n_basic_blocks)
409 #define n_edges			(cfun->cfg->x_n_edges)
410 #define last_basic_block	(cfun->cfg->x_last_basic_block)
411 #define label_to_block_map	(cfun->cfg->x_label_to_block_map)
412 #define profile_status		(cfun->cfg->x_profile_status)
413 
414 #define BASIC_BLOCK(N)		(VEC_index (basic_block, basic_block_info, (N)))
415 #define SET_BASIC_BLOCK(N,BB)	(VEC_replace (basic_block, basic_block_info, (N), (BB)))
416 
417 /* For iterating over basic blocks.  */
418 #define FOR_BB_BETWEEN(BB, FROM, TO, DIR) \
419   for (BB = FROM; BB != TO; BB = BB->DIR)
420 
421 #define FOR_EACH_BB_FN(BB, FN) \
422   FOR_BB_BETWEEN (BB, (FN)->cfg->x_entry_block_ptr->next_bb, (FN)->cfg->x_exit_block_ptr, next_bb)
423 
424 #define FOR_EACH_BB(BB) FOR_EACH_BB_FN (BB, cfun)
425 
426 #define FOR_EACH_BB_REVERSE_FN(BB, FN) \
427   FOR_BB_BETWEEN (BB, (FN)->cfg->x_exit_block_ptr->prev_bb, (FN)->cfg->x_entry_block_ptr, prev_bb)
428 
429 #define FOR_EACH_BB_REVERSE(BB) FOR_EACH_BB_REVERSE_FN(BB, cfun)
430 
431 /* For iterating over insns in basic block.  */
432 #define FOR_BB_INSNS(BB, INSN)			\
433   for ((INSN) = BB_HEAD (BB);			\
434        (INSN) && (INSN) != NEXT_INSN (BB_END (BB));	\
435        (INSN) = NEXT_INSN (INSN))
436 
437 #define FOR_BB_INSNS_REVERSE(BB, INSN)		\
438   for ((INSN) = BB_END (BB);			\
439        (INSN) && (INSN) != PREV_INSN (BB_HEAD (BB));	\
440        (INSN) = PREV_INSN (INSN))
441 
442 /* Cycles through _all_ basic blocks, even the fake ones (entry and
443    exit block).  */
444 
445 #define FOR_ALL_BB(BB) \
446   for (BB = ENTRY_BLOCK_PTR; BB; BB = BB->next_bb)
447 
448 #define FOR_ALL_BB_FN(BB, FN) \
449   for (BB = ENTRY_BLOCK_PTR_FOR_FUNCTION (FN); BB; BB = BB->next_bb)
450 
451 extern bitmap_obstack reg_obstack;
452 
453 /* Indexed by n, gives number of basic block that  (REG n) is used in.
454    If the value is REG_BLOCK_GLOBAL (-2),
455    it means (REG n) is used in more than one basic block.
456    REG_BLOCK_UNKNOWN (-1) means it hasn't been seen yet so we don't know.
457    This information remains valid for the rest of the compilation
458    of the current function; it is used to control register allocation.  */
459 
460 #define REG_BLOCK_UNKNOWN -1
461 #define REG_BLOCK_GLOBAL -2
462 
463 #define REG_BASIC_BLOCK(N)				\
464   (VEC_index (reg_info_p, reg_n_info, N)->basic_block)
465 
466 /* Stuff for recording basic block info.  */
467 
468 #define BB_HEAD(B)      (B)->il.rtl->head_
469 #define BB_END(B)       (B)->il.rtl->end_
470 
471 /* Special block numbers [markers] for entry and exit.  */
472 #define ENTRY_BLOCK (0)
473 #define EXIT_BLOCK (1)
474 
475 /* The two blocks that are always in the cfg.  */
476 #define NUM_FIXED_BLOCKS (2)
477 
478 
479 #define BLOCK_NUM(INSN)	      (BLOCK_FOR_INSN (INSN)->index + 0)
480 #define set_block_for_insn(INSN, BB)  (BLOCK_FOR_INSN (INSN) = BB)
481 
482 extern void compute_bb_for_insn (void);
483 extern unsigned int free_bb_for_insn (void);
484 extern void update_bb_for_insn (basic_block);
485 
486 extern void free_basic_block_vars (void);
487 
488 extern void insert_insn_on_edge (rtx, edge);
489 
490 extern void commit_edge_insertions (void);
491 extern void commit_edge_insertions_watch_calls (void);
492 
493 extern void remove_fake_edges (void);
494 extern void remove_fake_exit_edges (void);
495 extern void add_noreturn_fake_exit_edges (void);
496 extern void connect_infinite_loops_to_exit (void);
497 extern edge unchecked_make_edge (basic_block, basic_block, int);
498 extern edge cached_make_edge (sbitmap, basic_block, basic_block, int);
499 extern edge make_edge (basic_block, basic_block, int);
500 extern edge make_single_succ_edge (basic_block, basic_block, int);
501 extern void remove_edge (edge);
502 extern void redirect_edge_succ (edge, basic_block);
503 extern edge redirect_edge_succ_nodup (edge, basic_block);
504 extern void redirect_edge_pred (edge, basic_block);
505 extern basic_block create_basic_block_structure (rtx, rtx, rtx, basic_block);
506 extern void clear_bb_flags (void);
507 extern int post_order_compute (int *, bool);
508 extern int pre_and_rev_post_order_compute (int *, int *, bool);
509 extern int dfs_enumerate_from (basic_block, int,
510 			       bool (*)(basic_block, void *),
511 			       basic_block *, int, void *);
512 extern void compute_dominance_frontiers (bitmap *);
513 extern void dump_bb_info (basic_block, bool, bool, int, const char *, FILE *);
514 extern void dump_edge_info (FILE *, edge, int);
515 extern void brief_dump_cfg (FILE *);
516 extern void clear_edges (void);
517 extern rtx first_insn_after_basic_block_note (basic_block);
518 extern void scale_bbs_frequencies_int (basic_block *, int, int, int);
519 extern void scale_bbs_frequencies_gcov_type (basic_block *, int, gcov_type,
520 					     gcov_type);
521 
522 /* Structure to group all of the information to process IF-THEN and
523    IF-THEN-ELSE blocks for the conditional execution support.  This
524    needs to be in a public file in case the IFCVT macros call
525    functions passing the ce_if_block data structure.  */
526 
527 typedef struct ce_if_block
528 {
529   basic_block test_bb;			/* First test block.  */
530   basic_block then_bb;			/* THEN block.  */
531   basic_block else_bb;			/* ELSE block or NULL.  */
532   basic_block join_bb;			/* Join THEN/ELSE blocks.  */
533   basic_block last_test_bb;		/* Last bb to hold && or || tests.  */
534   int num_multiple_test_blocks;		/* # of && and || basic blocks.  */
535   int num_and_and_blocks;		/* # of && blocks.  */
536   int num_or_or_blocks;			/* # of || blocks.  */
537   int num_multiple_test_insns;		/* # of insns in && and || blocks.  */
538   int and_and_p;			/* Complex test is &&.  */
539   int num_then_insns;			/* # of insns in THEN block.  */
540   int num_else_insns;			/* # of insns in ELSE block.  */
541   int pass;				/* Pass number.  */
542 
543 #ifdef IFCVT_EXTRA_FIELDS
544   IFCVT_EXTRA_FIELDS			/* Any machine dependent fields.  */
545 #endif
546 
547 } ce_if_block_t;
548 
549 /* This structure maintains an edge list vector.  */
550 struct edge_list
551 {
552   int num_blocks;
553   int num_edges;
554   edge *index_to_edge;
555 };
556 
557 /* The base value for branch probability notes and edge probabilities.  */
558 #define REG_BR_PROB_BASE  10000
559 
560 /* This is the value which indicates no edge is present.  */
561 #define EDGE_INDEX_NO_EDGE	-1
562 
563 /* EDGE_INDEX returns an integer index for an edge, or EDGE_INDEX_NO_EDGE
564    if there is no edge between the 2 basic blocks.  */
565 #define EDGE_INDEX(el, pred, succ) (find_edge_index ((el), (pred), (succ)))
566 
567 /* INDEX_EDGE_PRED_BB and INDEX_EDGE_SUCC_BB return a pointer to the basic
568    block which is either the pred or succ end of the indexed edge.  */
569 #define INDEX_EDGE_PRED_BB(el, index)	((el)->index_to_edge[(index)]->src)
570 #define INDEX_EDGE_SUCC_BB(el, index)	((el)->index_to_edge[(index)]->dest)
571 
572 /* INDEX_EDGE returns a pointer to the edge.  */
573 #define INDEX_EDGE(el, index)           ((el)->index_to_edge[(index)])
574 
575 /* Number of edges in the compressed edge list.  */
576 #define NUM_EDGES(el)			((el)->num_edges)
577 
578 /* BB is assumed to contain conditional jump.  Return the fallthru edge.  */
579 #define FALLTHRU_EDGE(bb)		(EDGE_SUCC ((bb), 0)->flags & EDGE_FALLTHRU \
580 					 ? EDGE_SUCC ((bb), 0) : EDGE_SUCC ((bb), 1))
581 
582 /* BB is assumed to contain conditional jump.  Return the branch edge.  */
583 #define BRANCH_EDGE(bb)			(EDGE_SUCC ((bb), 0)->flags & EDGE_FALLTHRU \
584 					 ? EDGE_SUCC ((bb), 1) : EDGE_SUCC ((bb), 0))
585 
586 /* Return expected execution frequency of the edge E.  */
587 #define EDGE_FREQUENCY(e)		(((e)->src->frequency \
588 					  * (e)->probability \
589 					  + REG_BR_PROB_BASE / 2) \
590 					 / REG_BR_PROB_BASE)
591 
592 /* Return nonzero if edge is critical.  */
593 #define EDGE_CRITICAL_P(e)		(EDGE_COUNT ((e)->src->succs) >= 2 \
594 					 && EDGE_COUNT ((e)->dest->preds) >= 2)
595 
596 #define EDGE_COUNT(ev)			VEC_length (edge, (ev))
597 #define EDGE_I(ev,i)			VEC_index  (edge, (ev), (i))
598 #define EDGE_PRED(bb,i)			VEC_index  (edge, (bb)->preds, (i))
599 #define EDGE_SUCC(bb,i)			VEC_index  (edge, (bb)->succs, (i))
600 
601 /* Returns true if BB has precisely one successor.  */
602 
603 static inline bool
single_succ_p(basic_block bb)604 single_succ_p (basic_block bb)
605 {
606   return EDGE_COUNT (bb->succs) == 1;
607 }
608 
609 /* Returns true if BB has precisely one predecessor.  */
610 
611 static inline bool
single_pred_p(basic_block bb)612 single_pred_p (basic_block bb)
613 {
614   return EDGE_COUNT (bb->preds) == 1;
615 }
616 
617 /* Returns the single successor edge of basic block BB.  Aborts if
618    BB does not have exactly one successor.  */
619 
620 static inline edge
single_succ_edge(basic_block bb)621 single_succ_edge (basic_block bb)
622 {
623   gcc_assert (single_succ_p (bb));
624   return EDGE_SUCC (bb, 0);
625 }
626 
627 /* Returns the single predecessor edge of basic block BB.  Aborts
628    if BB does not have exactly one predecessor.  */
629 
630 static inline edge
single_pred_edge(basic_block bb)631 single_pred_edge (basic_block bb)
632 {
633   gcc_assert (single_pred_p (bb));
634   return EDGE_PRED (bb, 0);
635 }
636 
637 /* Returns the single successor block of basic block BB.  Aborts
638    if BB does not have exactly one successor.  */
639 
640 static inline basic_block
single_succ(basic_block bb)641 single_succ (basic_block bb)
642 {
643   return single_succ_edge (bb)->dest;
644 }
645 
646 /* Returns the single predecessor block of basic block BB.  Aborts
647    if BB does not have exactly one predecessor.*/
648 
649 static inline basic_block
single_pred(basic_block bb)650 single_pred (basic_block bb)
651 {
652   return single_pred_edge (bb)->src;
653 }
654 
655 /* Iterator object for edges.  */
656 
657 typedef struct {
658   unsigned index;
659   VEC(edge,gc) **container;
660 } edge_iterator;
661 
VEC(edge,gc)662 static inline VEC(edge,gc) *
663 ei_container (edge_iterator i)
664 {
665   gcc_assert (i.container);
666   return *i.container;
667 }
668 
669 #define ei_start(iter) ei_start_1 (&(iter))
670 #define ei_last(iter) ei_last_1 (&(iter))
671 
672 /* Return an iterator pointing to the start of an edge vector.  */
673 static inline edge_iterator
ei_start_1(VEC (edge,gc)** ev)674 ei_start_1 (VEC(edge,gc) **ev)
675 {
676   edge_iterator i;
677 
678   i.index = 0;
679   i.container = ev;
680 
681   return i;
682 }
683 
684 /* Return an iterator pointing to the last element of an edge
685    vector.  */
686 static inline edge_iterator
ei_last_1(VEC (edge,gc)** ev)687 ei_last_1 (VEC(edge,gc) **ev)
688 {
689   edge_iterator i;
690 
691   i.index = EDGE_COUNT (*ev) - 1;
692   i.container = ev;
693 
694   return i;
695 }
696 
697 /* Is the iterator `i' at the end of the sequence?  */
698 static inline bool
ei_end_p(edge_iterator i)699 ei_end_p (edge_iterator i)
700 {
701   return (i.index == EDGE_COUNT (ei_container (i)));
702 }
703 
704 /* Is the iterator `i' at one position before the end of the
705    sequence?  */
706 static inline bool
ei_one_before_end_p(edge_iterator i)707 ei_one_before_end_p (edge_iterator i)
708 {
709   return (i.index + 1 == EDGE_COUNT (ei_container (i)));
710 }
711 
712 /* Advance the iterator to the next element.  */
713 static inline void
ei_next(edge_iterator * i)714 ei_next (edge_iterator *i)
715 {
716   gcc_assert (i->index < EDGE_COUNT (ei_container (*i)));
717   i->index++;
718 }
719 
720 /* Move the iterator to the previous element.  */
721 static inline void
ei_prev(edge_iterator * i)722 ei_prev (edge_iterator *i)
723 {
724   gcc_assert (i->index > 0);
725   i->index--;
726 }
727 
728 /* Return the edge pointed to by the iterator `i'.  */
729 static inline edge
ei_edge(edge_iterator i)730 ei_edge (edge_iterator i)
731 {
732   return EDGE_I (ei_container (i), i.index);
733 }
734 
735 /* Return an edge pointed to by the iterator.  Do it safely so that
736    NULL is returned when the iterator is pointing at the end of the
737    sequence.  */
738 static inline edge
ei_safe_edge(edge_iterator i)739 ei_safe_edge (edge_iterator i)
740 {
741   return !ei_end_p (i) ? ei_edge (i) : NULL;
742 }
743 
744 /* Return 1 if we should continue to iterate.  Return 0 otherwise.
745    *Edge P is set to the next edge if we are to continue to iterate
746    and NULL otherwise.  */
747 
748 static inline bool
ei_cond(edge_iterator ei,edge * p)749 ei_cond (edge_iterator ei, edge *p)
750 {
751   if (!ei_end_p (ei))
752     {
753       *p = ei_edge (ei);
754       return 1;
755     }
756   else
757     {
758       *p = NULL;
759       return 0;
760     }
761 }
762 
763 /* This macro serves as a convenient way to iterate each edge in a
764    vector of predecessor or successor edges.  It must not be used when
765    an element might be removed during the traversal, otherwise
766    elements will be missed.  Instead, use a for-loop like that shown
767    in the following pseudo-code:
768 
769    FOR (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
770      {
771 	IF (e != taken_edge)
772 	  remove_edge (e);
773 	ELSE
774 	  ei_next (&ei);
775      }
776 */
777 
778 #define FOR_EACH_EDGE(EDGE,ITER,EDGE_VEC)	\
779   for ((ITER) = ei_start ((EDGE_VEC));		\
780        ei_cond ((ITER), &(EDGE));		\
781        ei_next (&(ITER)))
782 
783 struct edge_list * create_edge_list (void);
784 void free_edge_list (struct edge_list *);
785 void print_edge_list (FILE *, struct edge_list *);
786 void verify_edge_list (FILE *, struct edge_list *);
787 int find_edge_index (struct edge_list *, basic_block, basic_block);
788 edge find_edge (basic_block, basic_block);
789 
790 
791 enum update_life_extent
792 {
793   UPDATE_LIFE_LOCAL = 0,
794   UPDATE_LIFE_GLOBAL = 1,
795   UPDATE_LIFE_GLOBAL_RM_NOTES = 2
796 };
797 
798 /* Flags for life_analysis and update_life_info.  */
799 
800 #define PROP_DEATH_NOTES	1	/* Create DEAD and UNUSED notes.  */
801 #define PROP_LOG_LINKS		2	/* Create LOG_LINKS.  */
802 #define PROP_REG_INFO		4	/* Update regs_ever_live et al.  */
803 #define PROP_KILL_DEAD_CODE	8	/* Remove dead code.  */
804 #define PROP_SCAN_DEAD_CODE	16	/* Scan for dead code.  */
805 #define PROP_ALLOW_CFG_CHANGES	32	/* Allow the CFG to be changed
806 					   by dead code removal.  */
807 #define PROP_AUTOINC		64	/* Create autoinc mem references.  */
808 #define PROP_SCAN_DEAD_STORES	128	/* Scan for dead code.  */
809 #define PROP_ASM_SCAN		256	/* Internal flag used within flow.c
810 					   to flag analysis of asms.  */
811 #define PROP_DEAD_INSN		1024	/* Internal flag used within flow.c
812 					   to flag analysis of dead insn.  */
813 #define PROP_POST_REGSTACK	2048	/* We run after reg-stack and need
814 					   to preserve REG_DEAD notes for
815 					   stack regs.  */
816 #define PROP_FINAL		(PROP_DEATH_NOTES | PROP_LOG_LINKS  \
817 				 | PROP_REG_INFO | PROP_KILL_DEAD_CODE  \
818 				 | PROP_SCAN_DEAD_CODE | PROP_AUTOINC \
819 				 | PROP_ALLOW_CFG_CHANGES \
820 				 | PROP_SCAN_DEAD_STORES)
821 #define PROP_POSTRELOAD		(PROP_DEATH_NOTES  \
822 				 | PROP_KILL_DEAD_CODE  \
823 				 | PROP_SCAN_DEAD_CODE \
824 				 | PROP_SCAN_DEAD_STORES)
825 
826 #define CLEANUP_EXPENSIVE	1	/* Do relatively expensive optimizations
827 					   except for edge forwarding */
828 #define CLEANUP_CROSSJUMP	2	/* Do crossjumping.  */
829 #define CLEANUP_POST_REGSTACK	4	/* We run after reg-stack and need
830 					   to care REG_DEAD notes.  */
831 #define CLEANUP_UPDATE_LIFE	8	/* Keep life information up to date.  */
832 #define CLEANUP_THREADING	16	/* Do jump threading.  */
833 #define CLEANUP_NO_INSN_DEL	32	/* Do not try to delete trivially dead
834 					   insns.  */
835 #define CLEANUP_CFGLAYOUT	64	/* Do cleanup in cfglayout mode.  */
836 #define CLEANUP_LOG_LINKS	128	/* Update log links.  */
837 
838 /* The following are ORed in on top of the CLEANUP* flags in calls to
839    struct_equiv_block_eq.  */
840 #define STRUCT_EQUIV_START	256	 /* Initializes the search range.  */
841 #define STRUCT_EQUIV_RERUN	512	/* Rerun to find register use in
842 					   found equivalence.  */
843 #define STRUCT_EQUIV_FINAL	1024	/* Make any changes necessary to get
844 					   actual equivalence.  */
845 #define STRUCT_EQUIV_NEED_FULL_BLOCK 2048 /* struct_equiv_block_eq is required
846 					     to match only full blocks  */
847 #define STRUCT_EQUIV_MATCH_JUMPS 4096	/* Also include the jumps at the end of the block in the comparison.  */
848 
849 extern void life_analysis (int);
850 extern int update_life_info (sbitmap, enum update_life_extent, int);
851 extern int update_life_info_in_dirty_blocks (enum update_life_extent, int);
852 extern int count_or_remove_death_notes (sbitmap, int);
853 extern int propagate_block (basic_block, regset, regset, regset, int);
854 
855 struct propagate_block_info;
856 extern rtx propagate_one_insn (struct propagate_block_info *, rtx);
857 extern struct propagate_block_info *init_propagate_block_info
858  (basic_block, regset, regset, regset, int);
859 extern void free_propagate_block_info (struct propagate_block_info *);
860 
861 /* In lcm.c */
862 extern struct edge_list *pre_edge_lcm (int, sbitmap *, sbitmap *,
863 				       sbitmap *, sbitmap *, sbitmap **,
864 				       sbitmap **);
865 extern struct edge_list *pre_edge_rev_lcm (int, sbitmap *,
866 					   sbitmap *, sbitmap *,
867 					   sbitmap *, sbitmap **,
868 					   sbitmap **);
869 extern void compute_available (sbitmap *, sbitmap *, sbitmap *, sbitmap *);
870 
871 /* In predict.c */
872 extern void expected_value_to_br_prob (void);
873 extern bool maybe_hot_bb_p (basic_block);
874 extern bool probably_cold_bb_p (basic_block);
875 extern bool probably_never_executed_bb_p (basic_block);
876 extern bool tree_predicted_by_p (basic_block, enum br_predictor);
877 extern bool rtl_predicted_by_p (basic_block, enum br_predictor);
878 extern void tree_predict_edge (edge, enum br_predictor, int);
879 extern void rtl_predict_edge (edge, enum br_predictor, int);
880 extern void predict_edge_def (edge, enum br_predictor, enum prediction);
881 extern void guess_outgoing_edge_probabilities (basic_block);
882 extern void remove_predictions_associated_with_edge (edge);
883 extern bool edge_probability_reliable_p (edge);
884 extern bool br_prob_note_reliable_p (rtx);
885 
886 /* In flow.c */
887 extern void init_flow (void);
888 extern void debug_bb (basic_block);
889 extern basic_block debug_bb_n (int);
890 extern void dump_regset (regset, FILE *);
891 extern void debug_regset (regset);
892 extern void allocate_reg_life_data (void);
893 extern void expunge_block (basic_block);
894 extern void link_block (basic_block, basic_block);
895 extern void unlink_block (basic_block);
896 extern void compact_blocks (void);
897 extern basic_block alloc_block (void);
898 extern void find_unreachable_blocks (void);
899 extern int delete_noop_moves (void);
900 extern basic_block force_nonfallthru (edge);
901 extern rtx block_label (basic_block);
902 extern bool forwarder_block_p (basic_block);
903 extern bool purge_all_dead_edges (void);
904 extern bool purge_dead_edges (basic_block);
905 extern void find_many_sub_basic_blocks (sbitmap);
906 extern void rtl_make_eh_edge (sbitmap, basic_block, rtx);
907 extern bool can_fallthru (basic_block, basic_block);
908 extern bool could_fall_through (basic_block, basic_block);
909 extern void flow_nodes_print (const char *, const sbitmap, FILE *);
910 extern void flow_edge_list_print (const char *, const edge *, int, FILE *);
911 extern void alloc_aux_for_block (basic_block, int);
912 extern void alloc_aux_for_blocks (int);
913 extern void clear_aux_for_blocks (void);
914 extern void free_aux_for_blocks (void);
915 extern void alloc_aux_for_edge (edge, int);
916 extern void alloc_aux_for_edges (int);
917 extern void clear_aux_for_edges (void);
918 extern void free_aux_for_edges (void);
919 extern void find_basic_blocks (rtx);
920 extern bool cleanup_cfg (int);
921 extern bool delete_unreachable_blocks (void);
922 extern bool merge_seq_blocks (void);
923 
924 typedef struct conflict_graph_def *conflict_graph;
925 
926 /* Callback function when enumerating conflicts.  The arguments are
927    the smaller and larger regno in the conflict.  Returns zero if
928    enumeration is to continue, nonzero to halt enumeration.  */
929 typedef int (*conflict_graph_enum_fn) (int, int, void *);
930 
931 
932 /* Prototypes of operations on conflict graphs.  */
933 
934 extern conflict_graph conflict_graph_new
935  (int);
936 extern void conflict_graph_delete (conflict_graph);
937 extern int conflict_graph_add (conflict_graph, int, int);
938 extern int conflict_graph_conflict_p (conflict_graph, int, int);
939 extern void conflict_graph_enum (conflict_graph, int, conflict_graph_enum_fn,
940 				 void *);
941 extern void conflict_graph_merge_regs (conflict_graph, int, int);
942 extern void conflict_graph_print (conflict_graph, FILE*);
943 extern bool mark_dfs_back_edges (void);
944 extern void set_edge_can_fallthru_flag (void);
945 extern void update_br_prob_note (basic_block);
946 extern void fixup_abnormal_edges (void);
947 extern bool inside_basic_block_p (rtx);
948 extern bool control_flow_insn_p (rtx);
949 extern rtx get_last_bb_insn (basic_block);
950 
951 /* In bb-reorder.c */
952 extern void reorder_basic_blocks (unsigned int);
953 
954 /* In dominance.c */
955 
956 enum cdi_direction
957 {
958   CDI_DOMINATORS,
959   CDI_POST_DOMINATORS
960 };
961 
962 enum dom_state
963 {
964   DOM_NONE,		/* Not computed at all.  */
965   DOM_NO_FAST_QUERY,	/* The data is OK, but the fast query data are not usable.  */
966   DOM_OK		/* Everything is ok.  */
967 };
968 
969 extern enum dom_state dom_computed[2];
970 
971 extern bool dom_info_available_p (enum cdi_direction);
972 extern void calculate_dominance_info (enum cdi_direction);
973 extern void free_dominance_info (enum cdi_direction);
974 extern basic_block nearest_common_dominator (enum cdi_direction,
975 					     basic_block, basic_block);
976 extern basic_block nearest_common_dominator_for_set (enum cdi_direction,
977 						     bitmap);
978 extern void set_immediate_dominator (enum cdi_direction, basic_block,
979 				     basic_block);
980 extern basic_block get_immediate_dominator (enum cdi_direction, basic_block);
981 extern bool dominated_by_p (enum cdi_direction, basic_block, basic_block);
982 extern int get_dominated_by (enum cdi_direction, basic_block, basic_block **);
983 extern unsigned get_dominated_by_region (enum cdi_direction, basic_block *,
984 					 unsigned, basic_block *);
985 extern void add_to_dominance_info (enum cdi_direction, basic_block);
986 extern void delete_from_dominance_info (enum cdi_direction, basic_block);
987 basic_block recount_dominator (enum cdi_direction, basic_block);
988 extern void redirect_immediate_dominators (enum cdi_direction, basic_block,
989 					   basic_block);
990 extern void iterate_fix_dominators (enum cdi_direction, basic_block *, int);
991 extern void verify_dominators (enum cdi_direction);
992 extern basic_block first_dom_son (enum cdi_direction, basic_block);
993 extern basic_block next_dom_son (enum cdi_direction, basic_block);
994 unsigned bb_dom_dfs_in (enum cdi_direction, basic_block);
995 unsigned bb_dom_dfs_out (enum cdi_direction, basic_block);
996 
997 extern edge try_redirect_by_replacing_jump (edge, basic_block, bool);
998 extern void break_superblocks (void);
999 extern void check_bb_profile (basic_block, FILE *);
1000 extern void update_bb_profile_for_threading (basic_block, int, gcov_type, edge);
1001 extern void init_rtl_bb_info (basic_block);
1002 
1003 extern void initialize_original_copy_tables (void);
1004 extern void free_original_copy_tables (void);
1005 extern void set_bb_original (basic_block, basic_block);
1006 extern basic_block get_bb_original (basic_block);
1007 extern void set_bb_copy (basic_block, basic_block);
1008 extern basic_block get_bb_copy (basic_block);
1009 
1010 extern rtx insert_insn_end_bb_new (rtx, basic_block);
1011 
1012 #include "cfghooks.h"
1013 
1014 /* In struct-equiv.c */
1015 
1016 /* Constants used to size arrays in struct equiv_info (currently only one).
1017    When these limits are exceeded, struct_equiv returns zero.
1018    The maximum number of pseudo registers that are different in the two blocks,
1019    but appear in equivalent places and are dead at the end (or where one of
1020    a pair is dead at the end).  */
1021 #define STRUCT_EQUIV_MAX_LOCAL 16
1022 /* The maximum number of references to an input register that struct_equiv
1023    can handle.  */
1024 
1025 /* Structure used to track state during struct_equiv that can be rolled
1026    back when we find we can't match an insn, or if we want to match part
1027    of it in a different way.
1028    This information pertains to the pair of partial blocks that has been
1029    matched so far.  Since this pair is structurally equivalent, this is
1030    conceptually just one partial block expressed in two potentially
1031    different ways.  */
1032 struct struct_equiv_checkpoint
1033 {
1034   int ninsns;       /* Insns are matched so far.  */
1035   int local_count;  /* Number of block-local registers.  */
1036   int input_count;  /* Number of inputs to the block.  */
1037 
1038   /* X_START and Y_START are the first insns (in insn stream order)
1039      of the partial blocks that have been considered for matching so far.
1040      Since we are scanning backwards, they are also the instructions that
1041      are currently considered - or the last ones that have been considered -
1042      for matching (Unless we tracked back to these because a preceding
1043      instruction failed to match).  */
1044   rtx x_start, y_start;
1045 
1046   /*  INPUT_VALID indicates if we have actually set up X_INPUT / Y_INPUT
1047       during the current pass; we keep X_INPUT / Y_INPUT around between passes
1048       so that we can match REG_EQUAL / REG_EQUIV notes referring to these.  */
1049   bool input_valid;
1050 
1051   /* Some information would be expensive to exactly checkpoint, so we
1052      merely increment VERSION any time information about local
1053      registers, inputs and/or register liveness changes.  When backtracking,
1054      it is decremented for changes that can be undone, and if a discrepancy
1055      remains, NEED_RERUN in the relevant struct equiv_info is set to indicate
1056      that a new pass should be made over the entire block match to get
1057      accurate register information.  */
1058   int version;
1059 };
1060 
1061 /* A struct equiv_info is used to pass information to struct_equiv and
1062    to gather state while two basic blocks are checked for structural
1063    equivalence.  */
1064 
1065 struct equiv_info
1066 {
1067   /* Fields set up by the caller to struct_equiv_block_eq */
1068 
1069   basic_block x_block, y_block;  /* The two blocks being matched.  */
1070 
1071   /* MODE carries the mode bits from cleanup_cfg if we are called from
1072      try_crossjump_to_edge, and additionally it carries the
1073      STRUCT_EQUIV_* bits described above.  */
1074   int mode;
1075 
1076   /* INPUT_COST is the cost that adding an extra input to the matched blocks
1077      is supposed to have, and is taken into account when considering if the
1078      matched sequence should be extended backwards.  input_cost < 0 means
1079      don't accept any inputs at all.  */
1080   int input_cost;
1081 
1082 
1083   /* Fields to track state inside of struct_equiv_block_eq.  Some of these
1084      are also outputs.  */
1085 
1086   /* X_INPUT and Y_INPUT are used by struct_equiv to record a register that
1087      is used as an input parameter, i.e. where different registers are used
1088      as sources.  This is only used for a register that is live at the end
1089      of the blocks, or in some identical code at the end of the blocks;
1090      Inputs that are dead at the end go into X_LOCAL / Y_LOCAL.  */
1091   rtx x_input, y_input;
1092   /* When a previous pass has identified a valid input, INPUT_REG is set
1093      by struct_equiv_block_eq, and it is henceforth replaced in X_BLOCK
1094      for the input.  */
1095   rtx input_reg;
1096 
1097   /* COMMON_LIVE keeps track of the registers which are currently live
1098      (as we scan backwards from the end) and have the same numbers in both
1099      blocks.  N.B. a register that is in common_live is unsuitable to become
1100      a local reg.  */
1101   regset common_live;
1102   /* Likewise, X_LOCAL_LIVE / Y_LOCAL_LIVE keep track of registers that are
1103      local to one of the blocks; these registers must not be accepted as
1104      identical when encountered in both blocks.  */
1105   regset x_local_live, y_local_live;
1106 
1107   /* EQUIV_USED indicates for which insns a REG_EQUAL or REG_EQUIV note is
1108      being used, to avoid having to backtrack in the next pass, so that we
1109      get accurate life info for this insn then.  For each such insn,
1110      the bit with the number corresponding to the CUR.NINSNS value at the
1111      time of scanning is set.  */
1112   bitmap equiv_used;
1113 
1114   /* Current state that can be saved & restored easily.  */
1115   struct struct_equiv_checkpoint cur;
1116   /* BEST_MATCH is used to store the best match so far, weighing the
1117      cost of matched insns COSTS_N_INSNS (CUR.NINSNS) against the cost
1118      CUR.INPUT_COUNT * INPUT_COST of setting up the inputs.  */
1119   struct struct_equiv_checkpoint best_match;
1120   /* If a checkpoint restore failed, or an input conflict newly arises,
1121      NEED_RERUN is set.  This has to be tested by the caller to re-run
1122      the comparison if the match appears otherwise sound.  The state kept in
1123      x_start, y_start, equiv_used and check_input_conflict ensures that
1124      we won't loop indefinitely.  */
1125   bool need_rerun;
1126   /* If there is indication of an input conflict at the end,
1127      CHECK_INPUT_CONFLICT is set so that we'll check for input conflicts
1128      for each insn in the next pass.  This is needed so that we won't discard
1129      a partial match if there is a longer match that has to be abandoned due
1130      to an input conflict.  */
1131   bool check_input_conflict;
1132   /* HAD_INPUT_CONFLICT is set if CHECK_INPUT_CONFLICT was already set and we
1133      have passed a point where there were multiple dying inputs.  This helps
1134      us decide if we should set check_input_conflict for the next pass.  */
1135   bool had_input_conflict;
1136 
1137   /* LIVE_UPDATE controls if we want to change any life info at all.  We
1138      set it to false during REG_EQUAL / REG_EUQIV note comparison of the final
1139      pass so that we don't introduce new registers just for the note; if we
1140      can't match the notes without the current register information, we drop
1141      them.  */
1142   bool live_update;
1143 
1144   /* X_LOCAL and Y_LOCAL are used to gather register numbers of register pairs
1145      that are local to X_BLOCK and Y_BLOCK, with CUR.LOCAL_COUNT being the index
1146      to the next free entry.  */
1147   rtx x_local[STRUCT_EQUIV_MAX_LOCAL], y_local[STRUCT_EQUIV_MAX_LOCAL];
1148   /* LOCAL_RVALUE is nonzero if the corresponding X_LOCAL / Y_LOCAL entry
1149      was a source operand (including STRICT_LOW_PART) for the last invocation
1150      of struct_equiv mentioning it, zero if it was a destination-only operand.
1151      Since we are scanning backwards, this means the register is input/local
1152      for the (partial) block scanned so far.  */
1153   bool local_rvalue[STRUCT_EQUIV_MAX_LOCAL];
1154 
1155 
1156   /* Additional fields that are computed for the convenience of the caller.  */
1157 
1158   /* DYING_INPUTS is set to the number of local registers that turn out
1159      to be inputs to the (possibly partial) block.  */
1160   int dying_inputs;
1161   /* X_END and Y_END are the last insns in X_BLOCK and Y_BLOCK, respectively,
1162      that are being compared.  A final jump insn will not be included.  */
1163   rtx x_end, y_end;
1164 
1165   /* If we are matching tablejumps, X_LABEL in X_BLOCK corresponds to
1166      Y_LABEL in Y_BLOCK.  */
1167   rtx x_label, y_label;
1168 
1169 };
1170 
1171 extern bool insns_match_p (rtx, rtx, struct equiv_info *);
1172 extern int struct_equiv_block_eq (int, struct equiv_info *);
1173 extern bool struct_equiv_init (int, struct equiv_info *);
1174 extern bool rtx_equiv_p (rtx *, rtx, int, struct equiv_info *);
1175 
1176 /* In cfgrtl.c */
1177 extern bool condjump_equiv_p (struct equiv_info *, bool);
1178 
1179 /* Return true when one of the predecessor edges of BB is marked with EDGE_EH.  */
bb_has_eh_pred(basic_block bb)1180 static inline bool bb_has_eh_pred (basic_block bb)
1181 {
1182   edge e;
1183   edge_iterator ei;
1184 
1185   FOR_EACH_EDGE (e, ei, bb->preds)
1186     {
1187       if (e->flags & EDGE_EH)
1188 	return true;
1189     }
1190   return false;
1191 }
1192 
1193 #endif /* GCC_BASIC_BLOCK_H */
1194