xref: /dragonfly/contrib/gcc-4.7/gcc/cfgloop.h (revision 25a2db75)
1 /* Natural loop functions
2    Copyright (C) 1987, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004,
3    2005, 2006, 2007, 2008, 2009, 2010  Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #ifndef GCC_CFGLOOP_H
22 #define GCC_CFGLOOP_H
23 
24 #include "basic-block.h"
25 /* For rtx_code.  */
26 #include "rtl.h"
27 #include "vecprim.h"
28 #include "double-int.h"
29 
30 #include "bitmap.h"
31 #include "sbitmap.h"
32 
33 /* Structure to hold decision about unrolling/peeling.  */
34 enum lpt_dec
35 {
36   LPT_NONE,
37   LPT_PEEL_COMPLETELY,
38   LPT_PEEL_SIMPLE,
39   LPT_UNROLL_CONSTANT,
40   LPT_UNROLL_RUNTIME,
41   LPT_UNROLL_STUPID
42 };
43 
44 struct GTY (()) lpt_decision {
45   enum lpt_dec decision;
46   unsigned times;
47 };
48 
49 /* The structure describing a bound on number of iterations of a loop.  */
50 
51 struct GTY ((chain_next ("%h.next"))) nb_iter_bound {
52   /* The statement STMT is executed at most ...  */
53   gimple stmt;
54 
55   /* ... BOUND + 1 times (BOUND must be an unsigned constant).
56      The + 1 is added for the following reasons:
57 
58      a) 0 would otherwise be unused, while we would need to care more about
59         overflows (as MAX + 1 is sometimes produced as the estimate on number
60 	of executions of STMT).
61      b) it is consistent with the result of number_of_iterations_exit.  */
62   double_int bound;
63 
64   /* True if the statement will cause the loop to be leaved the (at most)
65      BOUND + 1-st time it is executed, that is, all the statements after it
66      are executed at most BOUND times.  */
67   bool is_exit;
68 
69   /* The next bound in the list.  */
70   struct nb_iter_bound *next;
71 };
72 
73 /* Description of the loop exit.  */
74 
75 struct GTY (()) loop_exit {
76   /* The exit edge.  */
77   struct edge_def *e;
78 
79   /* Previous and next exit in the list of the exits of the loop.  */
80   struct loop_exit *prev;
81   struct loop_exit *next;
82 
83   /* Next element in the list of loops from that E exits.  */
84   struct loop_exit *next_e;
85 };
86 
87 typedef struct loop *loop_p;
88 DEF_VEC_P (loop_p);
89 DEF_VEC_ALLOC_P (loop_p, heap);
90 DEF_VEC_ALLOC_P (loop_p, gc);
91 
92 /* An integer estimation of the number of iterations.  Estimate_state
93    describes what is the state of the estimation.  */
94 enum loop_estimation
95 {
96   /* Estimate was not computed yet.  */
97   EST_NOT_COMPUTED,
98   /* Estimate is ready.  */
99   EST_AVAILABLE
100 };
101 
102 /* Structure to hold information for each natural loop.  */
103 struct GTY ((chain_next ("%h.next"))) loop {
104   /* Index into loops array.  */
105   int num;
106 
107   /* Number of loop insns.  */
108   unsigned ninsns;
109 
110   /* Basic block of loop header.  */
111   struct basic_block_def *header;
112 
113   /* Basic block of loop latch.  */
114   struct basic_block_def *latch;
115 
116   /* For loop unrolling/peeling decision.  */
117   struct lpt_decision lpt_decision;
118 
119   /* Average number of executed insns per iteration.  */
120   unsigned av_ninsns;
121 
122   /* Number of blocks contained within the loop.  */
123   unsigned num_nodes;
124 
125   /* Superloops of the loop, starting with the outermost loop.  */
126   VEC (loop_p, gc) *superloops;
127 
128   /* The first inner (child) loop or NULL if innermost loop.  */
129   struct loop *inner;
130 
131   /* Link to the next (sibling) loop.  */
132   struct loop *next;
133 
134   /* Auxiliary info specific to a pass.  */
135   PTR GTY ((skip (""))) aux;
136 
137   /* The number of times the latch of the loop is executed.  This can be an
138      INTEGER_CST, or a symbolic expression representing the number of
139      iterations like "N - 1", or a COND_EXPR containing the runtime
140      conditions under which the number of iterations is non zero.
141 
142      Don't access this field directly: number_of_latch_executions
143      computes and caches the computed information in this field.  */
144   tree nb_iterations;
145 
146   /* An integer guaranteed to be greater or equal to nb_iterations.  Only
147      valid if any_upper_bound is true.  */
148   double_int nb_iterations_upper_bound;
149 
150   /* An integer giving an estimate on nb_iterations.  Unlike
151      nb_iterations_upper_bound, there is no guarantee that it is at least
152      nb_iterations.  */
153   double_int nb_iterations_estimate;
154 
155   bool any_upper_bound;
156   bool any_estimate;
157 
158   /* True if the loop can be parallel.  */
159   bool can_be_parallel;
160 
161   /* An integer estimation of the number of iterations.  Estimate_state
162      describes what is the state of the estimation.  */
163   enum loop_estimation estimate_state;
164 
165   /* Upper bound on number of iterations of a loop.  */
166   struct nb_iter_bound *bounds;
167 
168   /* Head of the cyclic list of the exits of the loop.  */
169   struct loop_exit *exits;
170 };
171 
172 /* Flags for state of loop structure.  */
173 enum
174 {
175   LOOPS_HAVE_PREHEADERS = 1,
176   LOOPS_HAVE_SIMPLE_LATCHES = 2,
177   LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS = 4,
178   LOOPS_HAVE_RECORDED_EXITS = 8,
179   LOOPS_MAY_HAVE_MULTIPLE_LATCHES = 16,
180   LOOP_CLOSED_SSA = 32,
181   LOOPS_NEED_FIXUP = 64,
182   LOOPS_HAVE_FALLTHRU_PREHEADERS = 128
183 };
184 
185 #define LOOPS_NORMAL (LOOPS_HAVE_PREHEADERS | LOOPS_HAVE_SIMPLE_LATCHES \
186 		      | LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS)
187 #define AVOID_CFG_MODIFICATIONS (LOOPS_MAY_HAVE_MULTIPLE_LATCHES)
188 
189 /* Structure to hold CFG information about natural loops within a function.  */
190 struct GTY (()) loops {
191   /* State of loops.  */
192   int state;
193 
194   /* Array of the loops.  */
195   VEC (loop_p, gc) *larray;
196 
197   /* Maps edges to the list of their descriptions as loop exits.  Edges
198      whose sources or destinations have loop_father == NULL (which may
199      happen during the cfg manipulations) should not appear in EXITS.  */
200   htab_t GTY((param_is (struct loop_exit))) exits;
201 
202   /* Pointer to root of loop hierarchy tree.  */
203   struct loop *tree_root;
204 };
205 
206 /* Loop recognition.  */
207 extern int flow_loops_find (struct loops *);
208 extern void disambiguate_loops_with_multiple_latches (void);
209 extern void flow_loops_free (struct loops *);
210 extern void flow_loops_dump (FILE *,
211 			     void (*)(const struct loop *, FILE *, int), int);
212 extern void flow_loop_dump (const struct loop *, FILE *,
213 			    void (*)(const struct loop *, FILE *, int), int);
214 struct loop *alloc_loop (void);
215 extern void flow_loop_free (struct loop *);
216 int flow_loop_nodes_find (basic_block, struct loop *);
217 void fix_loop_structure (bitmap changed_bbs);
218 bool mark_irreducible_loops (void);
219 void release_recorded_exits (void);
220 void record_loop_exits (void);
221 void rescan_loop_exit (edge, bool, bool);
222 
223 /* Loop data structure manipulation/querying.  */
224 extern void flow_loop_tree_node_add (struct loop *, struct loop *);
225 extern void flow_loop_tree_node_remove (struct loop *);
226 extern void add_loop (struct loop *, struct loop *);
227 extern bool flow_loop_nested_p	(const struct loop *, const struct loop *);
228 extern bool flow_bb_inside_loop_p (const struct loop *, const_basic_block);
229 extern struct loop * find_common_loop (struct loop *, struct loop *);
230 struct loop *superloop_at_depth (struct loop *, unsigned);
231 struct eni_weights_d;
232 extern unsigned tree_num_loop_insns (struct loop *, struct eni_weights_d *);
233 extern int num_loop_insns (const struct loop *);
234 extern int average_num_loop_insns (const struct loop *);
235 extern unsigned get_loop_level (const struct loop *);
236 extern bool loop_exit_edge_p (const struct loop *, const_edge);
237 extern bool loop_exits_to_bb_p (struct loop *, basic_block);
238 extern bool loop_exits_from_bb_p (struct loop *, basic_block);
239 extern void mark_loop_exit_edges (void);
240 
241 /* Loops & cfg manipulation.  */
242 extern basic_block *get_loop_body (const struct loop *);
243 extern unsigned get_loop_body_with_size (const struct loop *, basic_block *,
244 					 unsigned);
245 extern basic_block *get_loop_body_in_dom_order (const struct loop *);
246 extern basic_block *get_loop_body_in_bfs_order (const struct loop *);
247 extern basic_block *get_loop_body_in_custom_order (const struct loop *,
248 			       int (*) (const void *, const void *));
249 
250 extern VEC (edge, heap) *get_loop_exit_edges (const struct loop *);
251 edge single_exit (const struct loop *);
252 extern unsigned num_loop_branches (const struct loop *);
253 
254 extern edge loop_preheader_edge (const struct loop *);
255 extern edge loop_latch_edge (const struct loop *);
256 
257 extern void add_bb_to_loop (basic_block, struct loop *);
258 extern void remove_bb_from_loops (basic_block);
259 
260 extern void cancel_loop_tree (struct loop *);
261 extern void delete_loop (struct loop *);
262 
263 enum
264 {
265   CP_SIMPLE_PREHEADERS = 1,
266   CP_FALLTHRU_PREHEADERS = 2
267 };
268 
269 basic_block create_preheader (struct loop *, int);
270 extern void create_preheaders (int);
271 extern void force_single_succ_latches (void);
272 
273 extern void verify_loop_structure (void);
274 
275 /* Loop analysis.  */
276 extern bool just_once_each_iteration_p (const struct loop *, const_basic_block);
277 gcov_type expected_loop_iterations_unbounded (const struct loop *);
278 extern unsigned expected_loop_iterations (const struct loop *);
279 extern rtx doloop_condition_get (rtx);
280 
281 void estimate_numbers_of_iterations_loop (struct loop *, bool);
282 HOST_WIDE_INT estimated_loop_iterations_int (struct loop *, bool);
283 HOST_WIDE_INT max_stmt_executions_int (struct loop *, bool);
284 bool estimated_loop_iterations (struct loop *, bool, double_int *);
285 bool max_stmt_executions (struct loop *, bool, double_int *);
286 
287 /* Loop manipulation.  */
288 extern bool can_duplicate_loop_p (const struct loop *loop);
289 
290 #define DLTHE_FLAG_UPDATE_FREQ	1	/* Update frequencies in
291 					   duplicate_loop_to_header_edge.  */
292 #define DLTHE_RECORD_COPY_NUMBER 2	/* Record copy number in the aux
293 					   field of newly create BB.  */
294 #define DLTHE_FLAG_COMPLETTE_PEEL 4	/* Update frequencies expecting
295 					   a complete peeling.  */
296 
297 extern edge create_empty_if_region_on_edge (edge, tree);
298 extern struct loop *create_empty_loop_on_edge (edge, tree, tree, tree, tree,
299 					       tree *, tree *, struct loop *);
300 extern struct loop * duplicate_loop (struct loop *, struct loop *);
301 extern void duplicate_subloops (struct loop *, struct loop *);
302 extern bool duplicate_loop_to_header_edge (struct loop *, edge,
303 					   unsigned, sbitmap, edge,
304  					   VEC (edge, heap) **, int);
305 extern struct loop *loopify (edge, edge,
306 			     basic_block, edge, edge, bool,
307 			     unsigned, unsigned);
308 struct loop * loop_version (struct loop *, void *,
309 			    basic_block *, unsigned, unsigned, unsigned, bool);
310 extern bool remove_path (edge);
311 void scale_loop_frequencies (struct loop *, int, int);
312 
313 /* Induction variable analysis.  */
314 
315 /* The description of induction variable.  The things are a bit complicated
316    due to need to handle subregs and extends.  The value of the object described
317    by it can be obtained as follows (all computations are done in extend_mode):
318 
319    Value in i-th iteration is
320      delta + mult * extend_{extend_mode} (subreg_{mode} (base + i * step)).
321 
322    If first_special is true, the value in the first iteration is
323      delta + mult * base
324 
325    If extend = UNKNOWN, first_special must be false, delta 0, mult 1 and value is
326      subreg_{mode} (base + i * step)
327 
328    The get_iv_value function can be used to obtain these expressions.
329 
330    ??? Add a third mode field that would specify the mode in that inner
331    computation is done, which would enable it to be different from the
332    outer one?  */
333 
334 struct rtx_iv
335 {
336   /* Its base and step (mode of base and step is supposed to be extend_mode,
337      see the description above).  */
338   rtx base, step;
339 
340   /* The type of extend applied to it (SIGN_EXTEND, ZERO_EXTEND or UNKNOWN).  */
341   enum rtx_code extend;
342 
343   /* Operations applied in the extended mode.  */
344   rtx delta, mult;
345 
346   /* The mode it is extended to.  */
347   enum machine_mode extend_mode;
348 
349   /* The mode the variable iterates in.  */
350   enum machine_mode mode;
351 
352   /* Whether the first iteration needs to be handled specially.  */
353   unsigned first_special : 1;
354 };
355 
356 /* The description of an exit from the loop and of the number of iterations
357    till we take the exit.  */
358 
359 struct niter_desc
360 {
361   /* The edge out of the loop.  */
362   edge out_edge;
363 
364   /* The other edge leading from the condition.  */
365   edge in_edge;
366 
367   /* True if we are able to say anything about number of iterations of the
368      loop.  */
369   bool simple_p;
370 
371   /* True if the loop iterates the constant number of times.  */
372   bool const_iter;
373 
374   /* Number of iterations if constant.  */
375   unsigned HOST_WIDEST_INT niter;
376 
377   /* Upper bound on the number of iterations.  */
378   unsigned HOST_WIDEST_INT niter_max;
379 
380   /* Assumptions under that the rest of the information is valid.  */
381   rtx assumptions;
382 
383   /* Assumptions under that the loop ends before reaching the latch,
384      even if value of niter_expr says otherwise.  */
385   rtx noloop_assumptions;
386 
387   /* Condition under that the loop is infinite.  */
388   rtx infinite;
389 
390   /* Whether the comparison is signed.  */
391   bool signed_p;
392 
393   /* The mode in that niter_expr should be computed.  */
394   enum machine_mode mode;
395 
396   /* The number of iterations of the loop.  */
397   rtx niter_expr;
398 };
399 
400 extern void iv_analysis_loop_init (struct loop *);
401 extern bool iv_analyze (rtx, rtx, struct rtx_iv *);
402 extern bool iv_analyze_result (rtx, rtx, struct rtx_iv *);
403 extern bool iv_analyze_expr (rtx, rtx, enum machine_mode, struct rtx_iv *);
404 extern rtx get_iv_value (struct rtx_iv *, rtx);
405 extern bool biv_p (rtx, rtx);
406 extern void find_simple_exit (struct loop *, struct niter_desc *);
407 extern void iv_analysis_done (void);
408 
409 extern struct niter_desc *get_simple_loop_desc (struct loop *loop);
410 extern void free_simple_loop_desc (struct loop *loop);
411 
412 static inline struct niter_desc *
413 simple_loop_desc (struct loop *loop)
414 {
415   return (struct niter_desc *) loop->aux;
416 }
417 
418 /* Accessors for the loop structures.  */
419 
420 /* Returns the loop with index NUM from current_loops.  */
421 
422 static inline struct loop *
423 get_loop (unsigned num)
424 {
425   return VEC_index (loop_p, current_loops->larray, num);
426 }
427 
428 /* Returns the number of superloops of LOOP.  */
429 
430 static inline unsigned
431 loop_depth (const struct loop *loop)
432 {
433   return VEC_length (loop_p, loop->superloops);
434 }
435 
436 /* Returns the immediate superloop of LOOP, or NULL if LOOP is the outermost
437    loop.  */
438 
439 static inline struct loop *
440 loop_outer (const struct loop *loop)
441 {
442   unsigned n = VEC_length (loop_p, loop->superloops);
443 
444   if (n == 0)
445     return NULL;
446 
447   return VEC_index (loop_p, loop->superloops, n - 1);
448 }
449 
450 /* Returns true if LOOP has at least one exit edge.  */
451 
452 static inline bool
453 loop_has_exit_edges (const struct loop *loop)
454 {
455   return loop->exits->next->e != NULL;
456 }
457 
458 /* Returns the list of loops in current_loops.  */
459 
460 static inline VEC (loop_p, gc) *
461 get_loops (void)
462 {
463   if (!current_loops)
464     return NULL;
465 
466   return current_loops->larray;
467 }
468 
469 /* Returns the number of loops in current_loops (including the removed
470    ones and the fake loop that forms the root of the loop tree).  */
471 
472 static inline unsigned
473 number_of_loops (void)
474 {
475   if (!current_loops)
476     return 0;
477 
478   return VEC_length (loop_p, current_loops->larray);
479 }
480 
481 /* Returns true if state of the loops satisfies all properties
482    described by FLAGS.  */
483 
484 static inline bool
485 loops_state_satisfies_p (unsigned flags)
486 {
487   return (current_loops->state & flags) == flags;
488 }
489 
490 /* Sets FLAGS to the loops state.  */
491 
492 static inline void
493 loops_state_set (unsigned flags)
494 {
495   current_loops->state |= flags;
496 }
497 
498 /* Clears FLAGS from the loops state.  */
499 
500 static inline void
501 loops_state_clear (unsigned flags)
502 {
503   if (!current_loops)
504     return;
505   current_loops->state &= ~flags;
506 }
507 
508 /* Loop iterators.  */
509 
510 /* Flags for loop iteration.  */
511 
512 enum li_flags
513 {
514   LI_INCLUDE_ROOT = 1,		/* Include the fake root of the loop tree.  */
515   LI_FROM_INNERMOST = 2,	/* Iterate over the loops in the reverse order,
516 				   starting from innermost ones.  */
517   LI_ONLY_INNERMOST = 4		/* Iterate only over innermost loops.  */
518 };
519 
520 /* The iterator for loops.  */
521 
522 typedef struct
523 {
524   /* The list of loops to visit.  */
525   VEC(int,heap) *to_visit;
526 
527   /* The index of the actual loop.  */
528   unsigned idx;
529 } loop_iterator;
530 
531 static inline void
532 fel_next (loop_iterator *li, loop_p *loop)
533 {
534   int anum;
535 
536   while (VEC_iterate (int, li->to_visit, li->idx, anum))
537     {
538       li->idx++;
539       *loop = get_loop (anum);
540       if (*loop)
541 	return;
542     }
543 
544   VEC_free (int, heap, li->to_visit);
545   *loop = NULL;
546 }
547 
548 static inline void
549 fel_init (loop_iterator *li, loop_p *loop, unsigned flags)
550 {
551   struct loop *aloop;
552   unsigned i;
553   int mn;
554 
555   li->idx = 0;
556   if (!current_loops)
557     {
558       li->to_visit = NULL;
559       *loop = NULL;
560       return;
561     }
562 
563   li->to_visit = VEC_alloc (int, heap, number_of_loops ());
564   mn = (flags & LI_INCLUDE_ROOT) ? 0 : 1;
565 
566   if (flags & LI_ONLY_INNERMOST)
567     {
568       for (i = 0; VEC_iterate (loop_p, current_loops->larray, i, aloop); i++)
569 	if (aloop != NULL
570 	    && aloop->inner == NULL
571 	    && aloop->num >= mn)
572 	  VEC_quick_push (int, li->to_visit, aloop->num);
573     }
574   else if (flags & LI_FROM_INNERMOST)
575     {
576       /* Push the loops to LI->TO_VISIT in postorder.  */
577       for (aloop = current_loops->tree_root;
578 	   aloop->inner != NULL;
579 	   aloop = aloop->inner)
580 	continue;
581 
582       while (1)
583 	{
584 	  if (aloop->num >= mn)
585 	    VEC_quick_push (int, li->to_visit, aloop->num);
586 
587 	  if (aloop->next)
588 	    {
589 	      for (aloop = aloop->next;
590 		   aloop->inner != NULL;
591 		   aloop = aloop->inner)
592 		continue;
593 	    }
594 	  else if (!loop_outer (aloop))
595 	    break;
596 	  else
597 	    aloop = loop_outer (aloop);
598 	}
599     }
600   else
601     {
602       /* Push the loops to LI->TO_VISIT in preorder.  */
603       aloop = current_loops->tree_root;
604       while (1)
605 	{
606 	  if (aloop->num >= mn)
607 	    VEC_quick_push (int, li->to_visit, aloop->num);
608 
609 	  if (aloop->inner != NULL)
610 	    aloop = aloop->inner;
611 	  else
612 	    {
613 	      while (aloop != NULL && aloop->next == NULL)
614 		aloop = loop_outer (aloop);
615 	      if (aloop == NULL)
616 		break;
617 	      aloop = aloop->next;
618 	    }
619 	}
620     }
621 
622   fel_next (li, loop);
623 }
624 
625 #define FOR_EACH_LOOP(LI, LOOP, FLAGS) \
626   for (fel_init (&(LI), &(LOOP), FLAGS); \
627        (LOOP); \
628        fel_next (&(LI), &(LOOP)))
629 
630 #define FOR_EACH_LOOP_BREAK(LI) \
631   { \
632     VEC_free (int, heap, (LI).to_visit); \
633     break; \
634   }
635 
636 /* The properties of the target.  */
637 struct target_cfgloop {
638   /* Number of available registers.  */
639   unsigned x_target_avail_regs;
640 
641   /* Number of available registers that are call-clobbered.  */
642   unsigned x_target_clobbered_regs;
643 
644   /* Number of registers reserved for temporary expressions.  */
645   unsigned x_target_res_regs;
646 
647   /* The cost for register when there still is some reserve, but we are
648      approaching the number of available registers.  */
649   unsigned x_target_reg_cost[2];
650 
651   /* The cost for register when we need to spill.  */
652   unsigned x_target_spill_cost[2];
653 };
654 
655 extern struct target_cfgloop default_target_cfgloop;
656 #if SWITCHABLE_TARGET
657 extern struct target_cfgloop *this_target_cfgloop;
658 #else
659 #define this_target_cfgloop (&default_target_cfgloop)
660 #endif
661 
662 #define target_avail_regs \
663   (this_target_cfgloop->x_target_avail_regs)
664 #define target_clobbered_regs \
665   (this_target_cfgloop->x_target_clobbered_regs)
666 #define target_res_regs \
667   (this_target_cfgloop->x_target_res_regs)
668 #define target_reg_cost \
669   (this_target_cfgloop->x_target_reg_cost)
670 #define target_spill_cost \
671   (this_target_cfgloop->x_target_spill_cost)
672 
673 /* Register pressure estimation for induction variable optimizations & loop
674    invariant motion.  */
675 extern unsigned estimate_reg_pressure_cost (unsigned, unsigned, bool, bool);
676 extern void init_set_costs (void);
677 
678 /* Loop optimizer initialization.  */
679 extern void loop_optimizer_init (unsigned);
680 extern void loop_optimizer_finalize (void);
681 
682 /* Optimization passes.  */
683 extern void unswitch_loops (void);
684 
685 enum
686 {
687   UAP_PEEL = 1,		/* Enables loop peeling.  */
688   UAP_UNROLL = 2,	/* Enables unrolling of loops if it seems profitable.  */
689   UAP_UNROLL_ALL = 4	/* Enables unrolling of all loops.  */
690 };
691 
692 extern void unroll_and_peel_loops (int);
693 extern void doloop_optimize_loops (void);
694 extern void move_loop_invariants (void);
695 extern bool finite_loop_p (struct loop *);
696 
697 #endif /* GCC_CFGLOOP_H */
698