1 /* Control flow functions for trees.
2 Copyright (C) 2001-2018 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 #include "opts.h"
64 #include "asan.h"
65
66 /* This file contains functions for building the Control Flow Graph (CFG)
67 for a function tree. */
68
69 /* Local declarations. */
70
71 /* Initial capacity for the basic block array. */
72 static const int initial_cfg_capacity = 20;
73
74 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
75 which use a particular edge. The CASE_LABEL_EXPRs are chained together
76 via their CASE_CHAIN field, which we clear after we're done with the
77 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
78
79 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
80 update the case vector in response to edge redirections.
81
82 Right now this table is set up and torn down at key points in the
83 compilation process. It would be nice if we could make the table
84 more persistent. The key is getting notification of changes to
85 the CFG (particularly edge removal, creation and redirection). */
86
87 static hash_map<edge, tree> *edge_to_cases;
88
89 /* If we record edge_to_cases, this bitmap will hold indexes
90 of basic blocks that end in a GIMPLE_SWITCH which we touched
91 due to edge manipulations. */
92
93 static bitmap touched_switch_bbs;
94
95 /* CFG statistics. */
96 struct cfg_stats_d
97 {
98 long num_merged_labels;
99 };
100
101 static struct cfg_stats_d cfg_stats;
102
103 /* Data to pass to replace_block_vars_by_duplicates_1. */
104 struct replace_decls_d
105 {
106 hash_map<tree, tree> *vars_map;
107 tree to_context;
108 };
109
110 /* Hash table to store last discriminator assigned for each locus. */
111 struct locus_discrim_map
112 {
113 location_t locus;
114 int discriminator;
115 };
116
117 /* Hashtable helpers. */
118
119 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
120 {
121 static inline hashval_t hash (const locus_discrim_map *);
122 static inline bool equal (const locus_discrim_map *,
123 const locus_discrim_map *);
124 };
125
126 /* Trivial hash function for a location_t. ITEM is a pointer to
127 a hash table entry that maps a location_t to a discriminator. */
128
129 inline hashval_t
hash(const locus_discrim_map * item)130 locus_discrim_hasher::hash (const locus_discrim_map *item)
131 {
132 return LOCATION_LINE (item->locus);
133 }
134
135 /* Equality function for the locus-to-discriminator map. A and B
136 point to the two hash table entries to compare. */
137
138 inline bool
equal(const locus_discrim_map * a,const locus_discrim_map * b)139 locus_discrim_hasher::equal (const locus_discrim_map *a,
140 const locus_discrim_map *b)
141 {
142 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
143 }
144
145 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
146
147 /* Basic blocks and flowgraphs. */
148 static void make_blocks (gimple_seq);
149
150 /* Edges. */
151 static void make_edges (void);
152 static void assign_discriminators (void);
153 static void make_cond_expr_edges (basic_block);
154 static void make_gimple_switch_edges (gswitch *, basic_block);
155 static bool make_goto_expr_edges (basic_block);
156 static void make_gimple_asm_edges (basic_block);
157 static edge gimple_redirect_edge_and_branch (edge, basic_block);
158 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
159
160 /* Various helpers. */
161 static inline bool stmt_starts_bb_p (gimple *, gimple *);
162 static int gimple_verify_flow_info (void);
163 static void gimple_make_forwarder_block (edge);
164 static gimple *first_non_label_stmt (basic_block);
165 static bool verify_gimple_transaction (gtransaction *);
166 static bool call_can_make_abnormal_goto (gimple *);
167
168 /* Flowgraph optimization and cleanup. */
169 static void gimple_merge_blocks (basic_block, basic_block);
170 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
171 static void remove_bb (basic_block);
172 static edge find_taken_edge_computed_goto (basic_block, tree);
173 static edge find_taken_edge_cond_expr (const gcond *, tree);
174 static edge find_taken_edge_switch_expr (const gswitch *, tree);
175 static tree find_case_label_for_value (const gswitch *, tree);
176 static void lower_phi_internal_fn ();
177
178 void
init_empty_tree_cfg_for_function(struct function * fn)179 init_empty_tree_cfg_for_function (struct function *fn)
180 {
181 /* Initialize the basic block array. */
182 init_flow (fn);
183 profile_status_for_fn (fn) = PROFILE_ABSENT;
184 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
185 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
186 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
187 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
188 initial_cfg_capacity);
189
190 /* Build a mapping of labels to their associated blocks. */
191 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
192 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
193 initial_cfg_capacity);
194
195 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
196 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
197
198 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
199 = EXIT_BLOCK_PTR_FOR_FN (fn);
200 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
201 = ENTRY_BLOCK_PTR_FOR_FN (fn);
202 }
203
204 void
init_empty_tree_cfg(void)205 init_empty_tree_cfg (void)
206 {
207 init_empty_tree_cfg_for_function (cfun);
208 }
209
210 /*---------------------------------------------------------------------------
211 Create basic blocks
212 ---------------------------------------------------------------------------*/
213
214 /* Entry point to the CFG builder for trees. SEQ is the sequence of
215 statements to be added to the flowgraph. */
216
217 static void
build_gimple_cfg(gimple_seq seq)218 build_gimple_cfg (gimple_seq seq)
219 {
220 /* Register specific gimple functions. */
221 gimple_register_cfg_hooks ();
222
223 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
224
225 init_empty_tree_cfg ();
226
227 make_blocks (seq);
228
229 /* Make sure there is always at least one block, even if it's empty. */
230 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
231 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
232
233 /* Adjust the size of the array. */
234 if (basic_block_info_for_fn (cfun)->length ()
235 < (size_t) n_basic_blocks_for_fn (cfun))
236 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
237 n_basic_blocks_for_fn (cfun));
238
239 /* To speed up statement iterator walks, we first purge dead labels. */
240 cleanup_dead_labels ();
241
242 /* Group case nodes to reduce the number of edges.
243 We do this after cleaning up dead labels because otherwise we miss
244 a lot of obvious case merging opportunities. */
245 group_case_labels ();
246
247 /* Create the edges of the flowgraph. */
248 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
249 make_edges ();
250 assign_discriminators ();
251 lower_phi_internal_fn ();
252 cleanup_dead_labels ();
253 delete discriminator_per_locus;
254 discriminator_per_locus = NULL;
255 }
256
257 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
258 them and propagate the information to LOOP. We assume that the annotations
259 come immediately before the condition in BB, if any. */
260
261 static void
replace_loop_annotate_in_block(basic_block bb,struct loop * loop)262 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
263 {
264 gimple_stmt_iterator gsi = gsi_last_bb (bb);
265 gimple *stmt = gsi_stmt (gsi);
266
267 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
268 return;
269
270 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
271 {
272 stmt = gsi_stmt (gsi);
273 if (gimple_code (stmt) != GIMPLE_CALL)
274 break;
275 if (!gimple_call_internal_p (stmt)
276 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
277 break;
278
279 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
280 {
281 case annot_expr_ivdep_kind:
282 loop->safelen = INT_MAX;
283 break;
284 case annot_expr_unroll_kind:
285 loop->unroll
286 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
287 cfun->has_unroll = true;
288 break;
289 case annot_expr_no_vector_kind:
290 loop->dont_vectorize = true;
291 break;
292 case annot_expr_vector_kind:
293 loop->force_vectorize = true;
294 cfun->has_force_vectorize_loops = true;
295 break;
296 case annot_expr_parallel_kind:
297 loop->can_be_parallel = true;
298 loop->safelen = INT_MAX;
299 break;
300 default:
301 gcc_unreachable ();
302 }
303
304 stmt = gimple_build_assign (gimple_call_lhs (stmt),
305 gimple_call_arg (stmt, 0));
306 gsi_replace (&gsi, stmt, true);
307 }
308 }
309
310 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
311 them and propagate the information to the loop. We assume that the
312 annotations come immediately before the condition of the loop. */
313
314 static void
replace_loop_annotate(void)315 replace_loop_annotate (void)
316 {
317 struct loop *loop;
318 basic_block bb;
319 gimple_stmt_iterator gsi;
320 gimple *stmt;
321
322 FOR_EACH_LOOP (loop, 0)
323 {
324 /* First look into the header. */
325 replace_loop_annotate_in_block (loop->header, loop);
326
327 /* Then look into the latch, if any. */
328 if (loop->latch)
329 replace_loop_annotate_in_block (loop->latch, loop);
330 }
331
332 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
333 FOR_EACH_BB_FN (bb, cfun)
334 {
335 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
336 {
337 stmt = gsi_stmt (gsi);
338 if (gimple_code (stmt) != GIMPLE_CALL)
339 continue;
340 if (!gimple_call_internal_p (stmt)
341 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
342 continue;
343
344 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
345 {
346 case annot_expr_ivdep_kind:
347 case annot_expr_unroll_kind:
348 case annot_expr_no_vector_kind:
349 case annot_expr_vector_kind:
350 case annot_expr_parallel_kind:
351 break;
352 default:
353 gcc_unreachable ();
354 }
355
356 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
357 stmt = gimple_build_assign (gimple_call_lhs (stmt),
358 gimple_call_arg (stmt, 0));
359 gsi_replace (&gsi, stmt, true);
360 }
361 }
362 }
363
364 /* Lower internal PHI function from GIMPLE FE. */
365
366 static void
lower_phi_internal_fn()367 lower_phi_internal_fn ()
368 {
369 basic_block bb, pred = NULL;
370 gimple_stmt_iterator gsi;
371 tree lhs;
372 gphi *phi_node;
373 gimple *stmt;
374
375 /* After edge creation, handle __PHI function from GIMPLE FE. */
376 FOR_EACH_BB_FN (bb, cfun)
377 {
378 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi);)
379 {
380 stmt = gsi_stmt (gsi);
381 if (! gimple_call_internal_p (stmt, IFN_PHI))
382 break;
383
384 lhs = gimple_call_lhs (stmt);
385 phi_node = create_phi_node (lhs, bb);
386
387 /* Add arguments to the PHI node. */
388 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
389 {
390 tree arg = gimple_call_arg (stmt, i);
391 if (TREE_CODE (arg) == LABEL_DECL)
392 pred = label_to_block (arg);
393 else
394 {
395 edge e = find_edge (pred, bb);
396 add_phi_arg (phi_node, arg, e, UNKNOWN_LOCATION);
397 }
398 }
399
400 gsi_remove (&gsi, true);
401 }
402 }
403 }
404
405 static unsigned int
execute_build_cfg(void)406 execute_build_cfg (void)
407 {
408 gimple_seq body = gimple_body (current_function_decl);
409
410 build_gimple_cfg (body);
411 gimple_set_body (current_function_decl, NULL);
412 if (dump_file && (dump_flags & TDF_DETAILS))
413 {
414 fprintf (dump_file, "Scope blocks:\n");
415 dump_scope_blocks (dump_file, dump_flags);
416 }
417 cleanup_tree_cfg ();
418 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
419 replace_loop_annotate ();
420 return 0;
421 }
422
423 namespace {
424
425 const pass_data pass_data_build_cfg =
426 {
427 GIMPLE_PASS, /* type */
428 "cfg", /* name */
429 OPTGROUP_NONE, /* optinfo_flags */
430 TV_TREE_CFG, /* tv_id */
431 PROP_gimple_leh, /* properties_required */
432 ( PROP_cfg | PROP_loops ), /* properties_provided */
433 0, /* properties_destroyed */
434 0, /* todo_flags_start */
435 0, /* todo_flags_finish */
436 };
437
438 class pass_build_cfg : public gimple_opt_pass
439 {
440 public:
pass_build_cfg(gcc::context * ctxt)441 pass_build_cfg (gcc::context *ctxt)
442 : gimple_opt_pass (pass_data_build_cfg, ctxt)
443 {}
444
445 /* opt_pass methods: */
execute(function *)446 virtual unsigned int execute (function *) { return execute_build_cfg (); }
447
448 }; // class pass_build_cfg
449
450 } // anon namespace
451
452 gimple_opt_pass *
make_pass_build_cfg(gcc::context * ctxt)453 make_pass_build_cfg (gcc::context *ctxt)
454 {
455 return new pass_build_cfg (ctxt);
456 }
457
458
459 /* Return true if T is a computed goto. */
460
461 bool
computed_goto_p(gimple * t)462 computed_goto_p (gimple *t)
463 {
464 return (gimple_code (t) == GIMPLE_GOTO
465 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
466 }
467
468 /* Returns true if the sequence of statements STMTS only contains
469 a call to __builtin_unreachable (). */
470
471 bool
gimple_seq_unreachable_p(gimple_seq stmts)472 gimple_seq_unreachable_p (gimple_seq stmts)
473 {
474 if (stmts == NULL
475 /* Return false if -fsanitize=unreachable, we don't want to
476 optimize away those calls, but rather turn them into
477 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
478 later. */
479 || sanitize_flags_p (SANITIZE_UNREACHABLE))
480 return false;
481
482 gimple_stmt_iterator gsi = gsi_last (stmts);
483
484 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
485 return false;
486
487 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
488 {
489 gimple *stmt = gsi_stmt (gsi);
490 if (gimple_code (stmt) != GIMPLE_LABEL
491 && !is_gimple_debug (stmt)
492 && !gimple_clobber_p (stmt))
493 return false;
494 }
495 return true;
496 }
497
498 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
499 the other edge points to a bb with just __builtin_unreachable ().
500 I.e. return true for C->M edge in:
501 <bb C>:
502 ...
503 if (something)
504 goto <bb N>;
505 else
506 goto <bb M>;
507 <bb N>:
508 __builtin_unreachable ();
509 <bb M>: */
510
511 bool
assert_unreachable_fallthru_edge_p(edge e)512 assert_unreachable_fallthru_edge_p (edge e)
513 {
514 basic_block pred_bb = e->src;
515 gimple *last = last_stmt (pred_bb);
516 if (last && gimple_code (last) == GIMPLE_COND)
517 {
518 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
519 if (other_bb == e->dest)
520 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
521 if (EDGE_COUNT (other_bb->succs) == 0)
522 return gimple_seq_unreachable_p (bb_seq (other_bb));
523 }
524 return false;
525 }
526
527
528 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
529 could alter control flow except via eh. We initialize the flag at
530 CFG build time and only ever clear it later. */
531
532 static void
gimple_call_initialize_ctrl_altering(gimple * stmt)533 gimple_call_initialize_ctrl_altering (gimple *stmt)
534 {
535 int flags = gimple_call_flags (stmt);
536
537 /* A call alters control flow if it can make an abnormal goto. */
538 if (call_can_make_abnormal_goto (stmt)
539 /* A call also alters control flow if it does not return. */
540 || flags & ECF_NORETURN
541 /* TM ending statements have backedges out of the transaction.
542 Return true so we split the basic block containing them.
543 Note that the TM_BUILTIN test is merely an optimization. */
544 || ((flags & ECF_TM_BUILTIN)
545 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
546 /* BUILT_IN_RETURN call is same as return statement. */
547 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
548 /* IFN_UNIQUE should be the last insn, to make checking for it
549 as cheap as possible. */
550 || (gimple_call_internal_p (stmt)
551 && gimple_call_internal_unique_p (stmt)))
552 gimple_call_set_ctrl_altering (stmt, true);
553 else
554 gimple_call_set_ctrl_altering (stmt, false);
555 }
556
557
558 /* Insert SEQ after BB and build a flowgraph. */
559
560 static basic_block
make_blocks_1(gimple_seq seq,basic_block bb)561 make_blocks_1 (gimple_seq seq, basic_block bb)
562 {
563 gimple_stmt_iterator i = gsi_start (seq);
564 gimple *stmt = NULL;
565 gimple *prev_stmt = NULL;
566 bool start_new_block = true;
567 bool first_stmt_of_seq = true;
568
569 while (!gsi_end_p (i))
570 {
571 /* PREV_STMT should only be set to a debug stmt if the debug
572 stmt is before nondebug stmts. Once stmt reaches a nondebug
573 nonlabel, prev_stmt will be set to it, so that
574 stmt_starts_bb_p will know to start a new block if a label is
575 found. However, if stmt was a label after debug stmts only,
576 keep the label in prev_stmt even if we find further debug
577 stmts, for there may be other labels after them, and they
578 should land in the same block. */
579 if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
580 prev_stmt = stmt;
581 stmt = gsi_stmt (i);
582
583 if (stmt && is_gimple_call (stmt))
584 gimple_call_initialize_ctrl_altering (stmt);
585
586 /* If the statement starts a new basic block or if we have determined
587 in a previous pass that we need to create a new block for STMT, do
588 so now. */
589 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
590 {
591 if (!first_stmt_of_seq)
592 gsi_split_seq_before (&i, &seq);
593 bb = create_basic_block (seq, bb);
594 start_new_block = false;
595 prev_stmt = NULL;
596 }
597
598 /* Now add STMT to BB and create the subgraphs for special statement
599 codes. */
600 gimple_set_bb (stmt, bb);
601
602 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
603 next iteration. */
604 if (stmt_ends_bb_p (stmt))
605 {
606 /* If the stmt can make abnormal goto use a new temporary
607 for the assignment to the LHS. This makes sure the old value
608 of the LHS is available on the abnormal edge. Otherwise
609 we will end up with overlapping life-ranges for abnormal
610 SSA names. */
611 if (gimple_has_lhs (stmt)
612 && stmt_can_make_abnormal_goto (stmt)
613 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
614 {
615 tree lhs = gimple_get_lhs (stmt);
616 tree tmp = create_tmp_var (TREE_TYPE (lhs));
617 gimple *s = gimple_build_assign (lhs, tmp);
618 gimple_set_location (s, gimple_location (stmt));
619 gimple_set_block (s, gimple_block (stmt));
620 gimple_set_lhs (stmt, tmp);
621 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
622 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
623 DECL_GIMPLE_REG_P (tmp) = 1;
624 gsi_insert_after (&i, s, GSI_SAME_STMT);
625 }
626 start_new_block = true;
627 }
628
629 gsi_next (&i);
630 first_stmt_of_seq = false;
631 }
632 return bb;
633 }
634
635 /* Build a flowgraph for the sequence of stmts SEQ. */
636
637 static void
make_blocks(gimple_seq seq)638 make_blocks (gimple_seq seq)
639 {
640 /* Look for debug markers right before labels, and move the debug
641 stmts after the labels. Accepting labels among debug markers
642 adds no value, just complexity; if we wanted to annotate labels
643 with view numbers (so sequencing among markers would matter) or
644 somesuch, we're probably better off still moving the labels, but
645 adding other debug annotations in their original positions or
646 emitting nonbind or bind markers associated with the labels in
647 the original position of the labels.
648
649 Moving labels would probably be simpler, but we can't do that:
650 moving labels assigns label ids to them, and doing so because of
651 debug markers makes for -fcompare-debug and possibly even codegen
652 differences. So, we have to move the debug stmts instead. To
653 that end, we scan SEQ backwards, marking the position of the
654 latest (earliest we find) label, and moving debug stmts that are
655 not separated from it by nondebug nonlabel stmts after the
656 label. */
657 if (MAY_HAVE_DEBUG_MARKER_STMTS)
658 {
659 gimple_stmt_iterator label = gsi_none ();
660
661 for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
662 {
663 gimple *stmt = gsi_stmt (i);
664
665 /* If this is the first label we encounter (latest in SEQ)
666 before nondebug stmts, record its position. */
667 if (is_a <glabel *> (stmt))
668 {
669 if (gsi_end_p (label))
670 label = i;
671 continue;
672 }
673
674 /* Without a recorded label position to move debug stmts to,
675 there's nothing to do. */
676 if (gsi_end_p (label))
677 continue;
678
679 /* Move the debug stmt at I after LABEL. */
680 if (is_gimple_debug (stmt))
681 {
682 gcc_assert (gimple_debug_nonbind_marker_p (stmt));
683 /* As STMT is removed, I advances to the stmt after
684 STMT, so the gsi_prev in the for "increment"
685 expression gets us to the stmt we're to visit after
686 STMT. LABEL, however, would advance to the moved
687 stmt if we passed it to gsi_move_after, so pass it a
688 copy instead, so as to keep LABEL pointing to the
689 LABEL. */
690 gimple_stmt_iterator copy = label;
691 gsi_move_after (&i, ©);
692 continue;
693 }
694
695 /* There aren't any (more?) debug stmts before label, so
696 there isn't anything else to move after it. */
697 label = gsi_none ();
698 }
699 }
700
701 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
702 }
703
704 /* Create and return a new empty basic block after bb AFTER. */
705
706 static basic_block
create_bb(void * h,void * e,basic_block after)707 create_bb (void *h, void *e, basic_block after)
708 {
709 basic_block bb;
710
711 gcc_assert (!e);
712
713 /* Create and initialize a new basic block. Since alloc_block uses
714 GC allocation that clears memory to allocate a basic block, we do
715 not have to clear the newly allocated basic block here. */
716 bb = alloc_block ();
717
718 bb->index = last_basic_block_for_fn (cfun);
719 bb->flags = BB_NEW;
720 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
721
722 /* Add the new block to the linked list of blocks. */
723 link_block (bb, after);
724
725 /* Grow the basic block array if needed. */
726 if ((size_t) last_basic_block_for_fn (cfun)
727 == basic_block_info_for_fn (cfun)->length ())
728 {
729 size_t new_size =
730 (last_basic_block_for_fn (cfun)
731 + (last_basic_block_for_fn (cfun) + 3) / 4);
732 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
733 }
734
735 /* Add the newly created block to the array. */
736 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
737
738 n_basic_blocks_for_fn (cfun)++;
739 last_basic_block_for_fn (cfun)++;
740
741 return bb;
742 }
743
744
745 /*---------------------------------------------------------------------------
746 Edge creation
747 ---------------------------------------------------------------------------*/
748
749 /* If basic block BB has an abnormal edge to a basic block
750 containing IFN_ABNORMAL_DISPATCHER internal call, return
751 that the dispatcher's basic block, otherwise return NULL. */
752
753 basic_block
get_abnormal_succ_dispatcher(basic_block bb)754 get_abnormal_succ_dispatcher (basic_block bb)
755 {
756 edge e;
757 edge_iterator ei;
758
759 FOR_EACH_EDGE (e, ei, bb->succs)
760 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
761 {
762 gimple_stmt_iterator gsi
763 = gsi_start_nondebug_after_labels_bb (e->dest);
764 gimple *g = gsi_stmt (gsi);
765 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
766 return e->dest;
767 }
768 return NULL;
769 }
770
771 /* Helper function for make_edges. Create a basic block with
772 with ABNORMAL_DISPATCHER internal call in it if needed, and
773 create abnormal edges from BBS to it and from it to FOR_BB
774 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
775
776 static void
handle_abnormal_edges(basic_block * dispatcher_bbs,basic_block for_bb,int * bb_to_omp_idx,auto_vec<basic_block> * bbs,bool computed_goto)777 handle_abnormal_edges (basic_block *dispatcher_bbs,
778 basic_block for_bb, int *bb_to_omp_idx,
779 auto_vec<basic_block> *bbs, bool computed_goto)
780 {
781 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
782 unsigned int idx = 0;
783 basic_block bb;
784 bool inner = false;
785
786 if (bb_to_omp_idx)
787 {
788 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
789 if (bb_to_omp_idx[for_bb->index] != 0)
790 inner = true;
791 }
792
793 /* If the dispatcher has been created already, then there are basic
794 blocks with abnormal edges to it, so just make a new edge to
795 for_bb. */
796 if (*dispatcher == NULL)
797 {
798 /* Check if there are any basic blocks that need to have
799 abnormal edges to this dispatcher. If there are none, return
800 early. */
801 if (bb_to_omp_idx == NULL)
802 {
803 if (bbs->is_empty ())
804 return;
805 }
806 else
807 {
808 FOR_EACH_VEC_ELT (*bbs, idx, bb)
809 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
810 break;
811 if (bb == NULL)
812 return;
813 }
814
815 /* Create the dispatcher bb. */
816 *dispatcher = create_basic_block (NULL, for_bb);
817 if (computed_goto)
818 {
819 /* Factor computed gotos into a common computed goto site. Also
820 record the location of that site so that we can un-factor the
821 gotos after we have converted back to normal form. */
822 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
823
824 /* Create the destination of the factored goto. Each original
825 computed goto will put its desired destination into this
826 variable and jump to the label we create immediately below. */
827 tree var = create_tmp_var (ptr_type_node, "gotovar");
828
829 /* Build a label for the new block which will contain the
830 factored computed goto. */
831 tree factored_label_decl
832 = create_artificial_label (UNKNOWN_LOCATION);
833 gimple *factored_computed_goto_label
834 = gimple_build_label (factored_label_decl);
835 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
836
837 /* Build our new computed goto. */
838 gimple *factored_computed_goto = gimple_build_goto (var);
839 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
840
841 FOR_EACH_VEC_ELT (*bbs, idx, bb)
842 {
843 if (bb_to_omp_idx
844 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
845 continue;
846
847 gsi = gsi_last_bb (bb);
848 gimple *last = gsi_stmt (gsi);
849
850 gcc_assert (computed_goto_p (last));
851
852 /* Copy the original computed goto's destination into VAR. */
853 gimple *assignment
854 = gimple_build_assign (var, gimple_goto_dest (last));
855 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
856
857 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
858 e->goto_locus = gimple_location (last);
859 gsi_remove (&gsi, true);
860 }
861 }
862 else
863 {
864 tree arg = inner ? boolean_true_node : boolean_false_node;
865 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
866 1, arg);
867 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
868 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
869
870 /* Create predecessor edges of the dispatcher. */
871 FOR_EACH_VEC_ELT (*bbs, idx, bb)
872 {
873 if (bb_to_omp_idx
874 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
875 continue;
876 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
877 }
878 }
879 }
880
881 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
882 }
883
884 /* Creates outgoing edges for BB. Returns 1 when it ends with an
885 computed goto, returns 2 when it ends with a statement that
886 might return to this function via an nonlocal goto, otherwise
887 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
888
889 static int
make_edges_bb(basic_block bb,struct omp_region ** pcur_region,int * pomp_index)890 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
891 {
892 gimple *last = last_stmt (bb);
893 bool fallthru = false;
894 int ret = 0;
895
896 if (!last)
897 return ret;
898
899 switch (gimple_code (last))
900 {
901 case GIMPLE_GOTO:
902 if (make_goto_expr_edges (bb))
903 ret = 1;
904 fallthru = false;
905 break;
906 case GIMPLE_RETURN:
907 {
908 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
909 e->goto_locus = gimple_location (last);
910 fallthru = false;
911 }
912 break;
913 case GIMPLE_COND:
914 make_cond_expr_edges (bb);
915 fallthru = false;
916 break;
917 case GIMPLE_SWITCH:
918 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
919 fallthru = false;
920 break;
921 case GIMPLE_RESX:
922 make_eh_edges (last);
923 fallthru = false;
924 break;
925 case GIMPLE_EH_DISPATCH:
926 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
927 break;
928
929 case GIMPLE_CALL:
930 /* If this function receives a nonlocal goto, then we need to
931 make edges from this call site to all the nonlocal goto
932 handlers. */
933 if (stmt_can_make_abnormal_goto (last))
934 ret = 2;
935
936 /* If this statement has reachable exception handlers, then
937 create abnormal edges to them. */
938 make_eh_edges (last);
939
940 /* BUILTIN_RETURN is really a return statement. */
941 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
942 {
943 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
944 fallthru = false;
945 }
946 /* Some calls are known not to return. */
947 else
948 fallthru = !gimple_call_noreturn_p (last);
949 break;
950
951 case GIMPLE_ASSIGN:
952 /* A GIMPLE_ASSIGN may throw internally and thus be considered
953 control-altering. */
954 if (is_ctrl_altering_stmt (last))
955 make_eh_edges (last);
956 fallthru = true;
957 break;
958
959 case GIMPLE_ASM:
960 make_gimple_asm_edges (bb);
961 fallthru = true;
962 break;
963
964 CASE_GIMPLE_OMP:
965 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
966 break;
967
968 case GIMPLE_TRANSACTION:
969 {
970 gtransaction *txn = as_a <gtransaction *> (last);
971 tree label1 = gimple_transaction_label_norm (txn);
972 tree label2 = gimple_transaction_label_uninst (txn);
973
974 if (label1)
975 make_edge (bb, label_to_block (label1), EDGE_FALLTHRU);
976 if (label2)
977 make_edge (bb, label_to_block (label2),
978 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
979
980 tree label3 = gimple_transaction_label_over (txn);
981 if (gimple_transaction_subcode (txn)
982 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
983 make_edge (bb, label_to_block (label3), EDGE_TM_ABORT);
984
985 fallthru = false;
986 }
987 break;
988
989 default:
990 gcc_assert (!stmt_ends_bb_p (last));
991 fallthru = true;
992 break;
993 }
994
995 if (fallthru)
996 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
997
998 return ret;
999 }
1000
1001 /* Join all the blocks in the flowgraph. */
1002
1003 static void
make_edges(void)1004 make_edges (void)
1005 {
1006 basic_block bb;
1007 struct omp_region *cur_region = NULL;
1008 auto_vec<basic_block> ab_edge_goto;
1009 auto_vec<basic_block> ab_edge_call;
1010 int *bb_to_omp_idx = NULL;
1011 int cur_omp_region_idx = 0;
1012
1013 /* Create an edge from entry to the first block with executable
1014 statements in it. */
1015 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
1016 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
1017 EDGE_FALLTHRU);
1018
1019 /* Traverse the basic block array placing edges. */
1020 FOR_EACH_BB_FN (bb, cfun)
1021 {
1022 int mer;
1023
1024 if (bb_to_omp_idx)
1025 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
1026
1027 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1028 if (mer == 1)
1029 ab_edge_goto.safe_push (bb);
1030 else if (mer == 2)
1031 ab_edge_call.safe_push (bb);
1032
1033 if (cur_region && bb_to_omp_idx == NULL)
1034 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
1035 }
1036
1037 /* Computed gotos are hell to deal with, especially if there are
1038 lots of them with a large number of destinations. So we factor
1039 them to a common computed goto location before we build the
1040 edge list. After we convert back to normal form, we will un-factor
1041 the computed gotos since factoring introduces an unwanted jump.
1042 For non-local gotos and abnormal edges from calls to calls that return
1043 twice or forced labels, factor the abnormal edges too, by having all
1044 abnormal edges from the calls go to a common artificial basic block
1045 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1046 basic block to all forced labels and calls returning twice.
1047 We do this per-OpenMP structured block, because those regions
1048 are guaranteed to be single entry single exit by the standard,
1049 so it is not allowed to enter or exit such regions abnormally this way,
1050 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1051 must not transfer control across SESE region boundaries. */
1052 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1053 {
1054 gimple_stmt_iterator gsi;
1055 basic_block dispatcher_bb_array[2] = { NULL, NULL };
1056 basic_block *dispatcher_bbs = dispatcher_bb_array;
1057 int count = n_basic_blocks_for_fn (cfun);
1058
1059 if (bb_to_omp_idx)
1060 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1061
1062 FOR_EACH_BB_FN (bb, cfun)
1063 {
1064 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1065 {
1066 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1067 tree target;
1068
1069 if (!label_stmt)
1070 break;
1071
1072 target = gimple_label_label (label_stmt);
1073
1074 /* Make an edge to every label block that has been marked as a
1075 potential target for a computed goto or a non-local goto. */
1076 if (FORCED_LABEL (target))
1077 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1078 &ab_edge_goto, true);
1079 if (DECL_NONLOCAL (target))
1080 {
1081 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1082 &ab_edge_call, false);
1083 break;
1084 }
1085 }
1086
1087 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1088 gsi_next_nondebug (&gsi);
1089 if (!gsi_end_p (gsi))
1090 {
1091 /* Make an edge to every setjmp-like call. */
1092 gimple *call_stmt = gsi_stmt (gsi);
1093 if (is_gimple_call (call_stmt)
1094 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1095 || gimple_call_builtin_p (call_stmt,
1096 BUILT_IN_SETJMP_RECEIVER)))
1097 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1098 &ab_edge_call, false);
1099 }
1100 }
1101
1102 if (bb_to_omp_idx)
1103 XDELETE (dispatcher_bbs);
1104 }
1105
1106 XDELETE (bb_to_omp_idx);
1107
1108 omp_free_regions ();
1109 }
1110
1111 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1112 needed. Returns true if new bbs were created.
1113 Note: This is transitional code, and should not be used for new code. We
1114 should be able to get rid of this by rewriting all target va-arg
1115 gimplification hooks to use an interface gimple_build_cond_value as described
1116 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1117
1118 bool
gimple_find_sub_bbs(gimple_seq seq,gimple_stmt_iterator * gsi)1119 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1120 {
1121 gimple *stmt = gsi_stmt (*gsi);
1122 basic_block bb = gimple_bb (stmt);
1123 basic_block lastbb, afterbb;
1124 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1125 edge e;
1126 lastbb = make_blocks_1 (seq, bb);
1127 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1128 return false;
1129 e = split_block (bb, stmt);
1130 /* Move e->dest to come after the new basic blocks. */
1131 afterbb = e->dest;
1132 unlink_block (afterbb);
1133 link_block (afterbb, lastbb);
1134 redirect_edge_succ (e, bb->next_bb);
1135 bb = bb->next_bb;
1136 while (bb != afterbb)
1137 {
1138 struct omp_region *cur_region = NULL;
1139 profile_count cnt = profile_count::zero ();
1140 bool all = true;
1141
1142 int cur_omp_region_idx = 0;
1143 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1144 gcc_assert (!mer && !cur_region);
1145 add_bb_to_loop (bb, afterbb->loop_father);
1146
1147 edge e;
1148 edge_iterator ei;
1149 FOR_EACH_EDGE (e, ei, bb->preds)
1150 {
1151 if (e->count ().initialized_p ())
1152 cnt += e->count ();
1153 else
1154 all = false;
1155 }
1156 tree_guess_outgoing_edge_probabilities (bb);
1157 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1158 bb->count = cnt;
1159
1160 bb = bb->next_bb;
1161 }
1162 return true;
1163 }
1164
1165 /* Find the next available discriminator value for LOCUS. The
1166 discriminator distinguishes among several basic blocks that
1167 share a common locus, allowing for more accurate sample-based
1168 profiling. */
1169
1170 static int
next_discriminator_for_locus(location_t locus)1171 next_discriminator_for_locus (location_t locus)
1172 {
1173 struct locus_discrim_map item;
1174 struct locus_discrim_map **slot;
1175
1176 item.locus = locus;
1177 item.discriminator = 0;
1178 slot = discriminator_per_locus->find_slot_with_hash (
1179 &item, LOCATION_LINE (locus), INSERT);
1180 gcc_assert (slot);
1181 if (*slot == HTAB_EMPTY_ENTRY)
1182 {
1183 *slot = XNEW (struct locus_discrim_map);
1184 gcc_assert (*slot);
1185 (*slot)->locus = locus;
1186 (*slot)->discriminator = 0;
1187 }
1188 (*slot)->discriminator++;
1189 return (*slot)->discriminator;
1190 }
1191
1192 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1193
1194 static bool
same_line_p(location_t locus1,location_t locus2)1195 same_line_p (location_t locus1, location_t locus2)
1196 {
1197 expanded_location from, to;
1198
1199 if (locus1 == locus2)
1200 return true;
1201
1202 from = expand_location (locus1);
1203 to = expand_location (locus2);
1204
1205 if (from.line != to.line)
1206 return false;
1207 if (from.file == to.file)
1208 return true;
1209 return (from.file != NULL
1210 && to.file != NULL
1211 && filename_cmp (from.file, to.file) == 0);
1212 }
1213
1214 /* Assign discriminators to each basic block. */
1215
1216 static void
assign_discriminators(void)1217 assign_discriminators (void)
1218 {
1219 basic_block bb;
1220
1221 FOR_EACH_BB_FN (bb, cfun)
1222 {
1223 edge e;
1224 edge_iterator ei;
1225 gimple *last = last_stmt (bb);
1226 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1227
1228 if (locus == UNKNOWN_LOCATION)
1229 continue;
1230
1231 FOR_EACH_EDGE (e, ei, bb->succs)
1232 {
1233 gimple *first = first_non_label_stmt (e->dest);
1234 gimple *last = last_stmt (e->dest);
1235 if ((first && same_line_p (locus, gimple_location (first)))
1236 || (last && same_line_p (locus, gimple_location (last))))
1237 {
1238 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1239 bb->discriminator = next_discriminator_for_locus (locus);
1240 else
1241 e->dest->discriminator = next_discriminator_for_locus (locus);
1242 }
1243 }
1244 }
1245 }
1246
1247 /* Create the edges for a GIMPLE_COND starting at block BB. */
1248
1249 static void
make_cond_expr_edges(basic_block bb)1250 make_cond_expr_edges (basic_block bb)
1251 {
1252 gcond *entry = as_a <gcond *> (last_stmt (bb));
1253 gimple *then_stmt, *else_stmt;
1254 basic_block then_bb, else_bb;
1255 tree then_label, else_label;
1256 edge e;
1257
1258 gcc_assert (entry);
1259 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1260
1261 /* Entry basic blocks for each component. */
1262 then_label = gimple_cond_true_label (entry);
1263 else_label = gimple_cond_false_label (entry);
1264 then_bb = label_to_block (then_label);
1265 else_bb = label_to_block (else_label);
1266 then_stmt = first_stmt (then_bb);
1267 else_stmt = first_stmt (else_bb);
1268
1269 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1270 e->goto_locus = gimple_location (then_stmt);
1271 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1272 if (e)
1273 e->goto_locus = gimple_location (else_stmt);
1274
1275 /* We do not need the labels anymore. */
1276 gimple_cond_set_true_label (entry, NULL_TREE);
1277 gimple_cond_set_false_label (entry, NULL_TREE);
1278 }
1279
1280
1281 /* Called for each element in the hash table (P) as we delete the
1282 edge to cases hash table.
1283
1284 Clear all the CASE_CHAINs to prevent problems with copying of
1285 SWITCH_EXPRs and structure sharing rules, then free the hash table
1286 element. */
1287
1288 bool
edge_to_cases_cleanup(edge const &,tree const & value,void *)1289 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1290 {
1291 tree t, next;
1292
1293 for (t = value; t; t = next)
1294 {
1295 next = CASE_CHAIN (t);
1296 CASE_CHAIN (t) = NULL;
1297 }
1298
1299 return true;
1300 }
1301
1302 /* Start recording information mapping edges to case labels. */
1303
1304 void
start_recording_case_labels(void)1305 start_recording_case_labels (void)
1306 {
1307 gcc_assert (edge_to_cases == NULL);
1308 edge_to_cases = new hash_map<edge, tree>;
1309 touched_switch_bbs = BITMAP_ALLOC (NULL);
1310 }
1311
1312 /* Return nonzero if we are recording information for case labels. */
1313
1314 static bool
recording_case_labels_p(void)1315 recording_case_labels_p (void)
1316 {
1317 return (edge_to_cases != NULL);
1318 }
1319
1320 /* Stop recording information mapping edges to case labels and
1321 remove any information we have recorded. */
1322 void
end_recording_case_labels(void)1323 end_recording_case_labels (void)
1324 {
1325 bitmap_iterator bi;
1326 unsigned i;
1327 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1328 delete edge_to_cases;
1329 edge_to_cases = NULL;
1330 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1331 {
1332 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1333 if (bb)
1334 {
1335 gimple *stmt = last_stmt (bb);
1336 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1337 group_case_labels_stmt (as_a <gswitch *> (stmt));
1338 }
1339 }
1340 BITMAP_FREE (touched_switch_bbs);
1341 }
1342
1343 /* If we are inside a {start,end}_recording_cases block, then return
1344 a chain of CASE_LABEL_EXPRs from T which reference E.
1345
1346 Otherwise return NULL. */
1347
1348 static tree
get_cases_for_edge(edge e,gswitch * t)1349 get_cases_for_edge (edge e, gswitch *t)
1350 {
1351 tree *slot;
1352 size_t i, n;
1353
1354 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1355 chains available. Return NULL so the caller can detect this case. */
1356 if (!recording_case_labels_p ())
1357 return NULL;
1358
1359 slot = edge_to_cases->get (e);
1360 if (slot)
1361 return *slot;
1362
1363 /* If we did not find E in the hash table, then this must be the first
1364 time we have been queried for information about E & T. Add all the
1365 elements from T to the hash table then perform the query again. */
1366
1367 n = gimple_switch_num_labels (t);
1368 for (i = 0; i < n; i++)
1369 {
1370 tree elt = gimple_switch_label (t, i);
1371 tree lab = CASE_LABEL (elt);
1372 basic_block label_bb = label_to_block (lab);
1373 edge this_edge = find_edge (e->src, label_bb);
1374
1375 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1376 a new chain. */
1377 tree &s = edge_to_cases->get_or_insert (this_edge);
1378 CASE_CHAIN (elt) = s;
1379 s = elt;
1380 }
1381
1382 return *edge_to_cases->get (e);
1383 }
1384
1385 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1386
1387 static void
make_gimple_switch_edges(gswitch * entry,basic_block bb)1388 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1389 {
1390 size_t i, n;
1391
1392 n = gimple_switch_num_labels (entry);
1393
1394 for (i = 0; i < n; ++i)
1395 {
1396 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1397 basic_block label_bb = label_to_block (lab);
1398 make_edge (bb, label_bb, 0);
1399 }
1400 }
1401
1402
1403 /* Return the basic block holding label DEST. */
1404
1405 basic_block
label_to_block_fn(struct function * ifun,tree dest)1406 label_to_block_fn (struct function *ifun, tree dest)
1407 {
1408 int uid = LABEL_DECL_UID (dest);
1409
1410 /* We would die hard when faced by an undefined label. Emit a label to
1411 the very first basic block. This will hopefully make even the dataflow
1412 and undefined variable warnings quite right. */
1413 if (seen_error () && uid < 0)
1414 {
1415 gimple_stmt_iterator gsi =
1416 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1417 gimple *stmt;
1418
1419 stmt = gimple_build_label (dest);
1420 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1421 uid = LABEL_DECL_UID (dest);
1422 }
1423 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1424 return NULL;
1425 return (*ifun->cfg->x_label_to_block_map)[uid];
1426 }
1427
1428 /* Create edges for a goto statement at block BB. Returns true
1429 if abnormal edges should be created. */
1430
1431 static bool
make_goto_expr_edges(basic_block bb)1432 make_goto_expr_edges (basic_block bb)
1433 {
1434 gimple_stmt_iterator last = gsi_last_bb (bb);
1435 gimple *goto_t = gsi_stmt (last);
1436
1437 /* A simple GOTO creates normal edges. */
1438 if (simple_goto_p (goto_t))
1439 {
1440 tree dest = gimple_goto_dest (goto_t);
1441 basic_block label_bb = label_to_block (dest);
1442 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1443 e->goto_locus = gimple_location (goto_t);
1444 gsi_remove (&last, true);
1445 return false;
1446 }
1447
1448 /* A computed GOTO creates abnormal edges. */
1449 return true;
1450 }
1451
1452 /* Create edges for an asm statement with labels at block BB. */
1453
1454 static void
make_gimple_asm_edges(basic_block bb)1455 make_gimple_asm_edges (basic_block bb)
1456 {
1457 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1458 int i, n = gimple_asm_nlabels (stmt);
1459
1460 for (i = 0; i < n; ++i)
1461 {
1462 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1463 basic_block label_bb = label_to_block (label);
1464 make_edge (bb, label_bb, 0);
1465 }
1466 }
1467
1468 /*---------------------------------------------------------------------------
1469 Flowgraph analysis
1470 ---------------------------------------------------------------------------*/
1471
1472 /* Cleanup useless labels in basic blocks. This is something we wish
1473 to do early because it allows us to group case labels before creating
1474 the edges for the CFG, and it speeds up block statement iterators in
1475 all passes later on.
1476 We rerun this pass after CFG is created, to get rid of the labels that
1477 are no longer referenced. After then we do not run it any more, since
1478 (almost) no new labels should be created. */
1479
1480 /* A map from basic block index to the leading label of that block. */
1481 static struct label_record
1482 {
1483 /* The label. */
1484 tree label;
1485
1486 /* True if the label is referenced from somewhere. */
1487 bool used;
1488 } *label_for_bb;
1489
1490 /* Given LABEL return the first label in the same basic block. */
1491
1492 static tree
main_block_label(tree label)1493 main_block_label (tree label)
1494 {
1495 basic_block bb = label_to_block (label);
1496 tree main_label = label_for_bb[bb->index].label;
1497
1498 /* label_to_block possibly inserted undefined label into the chain. */
1499 if (!main_label)
1500 {
1501 label_for_bb[bb->index].label = label;
1502 main_label = label;
1503 }
1504
1505 label_for_bb[bb->index].used = true;
1506 return main_label;
1507 }
1508
1509 /* Clean up redundant labels within the exception tree. */
1510
1511 static void
cleanup_dead_labels_eh(void)1512 cleanup_dead_labels_eh (void)
1513 {
1514 eh_landing_pad lp;
1515 eh_region r;
1516 tree lab;
1517 int i;
1518
1519 if (cfun->eh == NULL)
1520 return;
1521
1522 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1523 if (lp && lp->post_landing_pad)
1524 {
1525 lab = main_block_label (lp->post_landing_pad);
1526 if (lab != lp->post_landing_pad)
1527 {
1528 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1529 EH_LANDING_PAD_NR (lab) = lp->index;
1530 }
1531 }
1532
1533 FOR_ALL_EH_REGION (r)
1534 switch (r->type)
1535 {
1536 case ERT_CLEANUP:
1537 case ERT_MUST_NOT_THROW:
1538 break;
1539
1540 case ERT_TRY:
1541 {
1542 eh_catch c;
1543 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1544 {
1545 lab = c->label;
1546 if (lab)
1547 c->label = main_block_label (lab);
1548 }
1549 }
1550 break;
1551
1552 case ERT_ALLOWED_EXCEPTIONS:
1553 lab = r->u.allowed.label;
1554 if (lab)
1555 r->u.allowed.label = main_block_label (lab);
1556 break;
1557 }
1558 }
1559
1560
1561 /* Cleanup redundant labels. This is a three-step process:
1562 1) Find the leading label for each block.
1563 2) Redirect all references to labels to the leading labels.
1564 3) Cleanup all useless labels. */
1565
1566 void
cleanup_dead_labels(void)1567 cleanup_dead_labels (void)
1568 {
1569 basic_block bb;
1570 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1571
1572 /* Find a suitable label for each block. We use the first user-defined
1573 label if there is one, or otherwise just the first label we see. */
1574 FOR_EACH_BB_FN (bb, cfun)
1575 {
1576 gimple_stmt_iterator i;
1577
1578 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1579 {
1580 tree label;
1581 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1582
1583 if (!label_stmt)
1584 break;
1585
1586 label = gimple_label_label (label_stmt);
1587
1588 /* If we have not yet seen a label for the current block,
1589 remember this one and see if there are more labels. */
1590 if (!label_for_bb[bb->index].label)
1591 {
1592 label_for_bb[bb->index].label = label;
1593 continue;
1594 }
1595
1596 /* If we did see a label for the current block already, but it
1597 is an artificially created label, replace it if the current
1598 label is a user defined label. */
1599 if (!DECL_ARTIFICIAL (label)
1600 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1601 {
1602 label_for_bb[bb->index].label = label;
1603 break;
1604 }
1605 }
1606 }
1607
1608 /* Now redirect all jumps/branches to the selected label.
1609 First do so for each block ending in a control statement. */
1610 FOR_EACH_BB_FN (bb, cfun)
1611 {
1612 gimple *stmt = last_stmt (bb);
1613 tree label, new_label;
1614
1615 if (!stmt)
1616 continue;
1617
1618 switch (gimple_code (stmt))
1619 {
1620 case GIMPLE_COND:
1621 {
1622 gcond *cond_stmt = as_a <gcond *> (stmt);
1623 label = gimple_cond_true_label (cond_stmt);
1624 if (label)
1625 {
1626 new_label = main_block_label (label);
1627 if (new_label != label)
1628 gimple_cond_set_true_label (cond_stmt, new_label);
1629 }
1630
1631 label = gimple_cond_false_label (cond_stmt);
1632 if (label)
1633 {
1634 new_label = main_block_label (label);
1635 if (new_label != label)
1636 gimple_cond_set_false_label (cond_stmt, new_label);
1637 }
1638 }
1639 break;
1640
1641 case GIMPLE_SWITCH:
1642 {
1643 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1644 size_t i, n = gimple_switch_num_labels (switch_stmt);
1645
1646 /* Replace all destination labels. */
1647 for (i = 0; i < n; ++i)
1648 {
1649 tree case_label = gimple_switch_label (switch_stmt, i);
1650 label = CASE_LABEL (case_label);
1651 new_label = main_block_label (label);
1652 if (new_label != label)
1653 CASE_LABEL (case_label) = new_label;
1654 }
1655 break;
1656 }
1657
1658 case GIMPLE_ASM:
1659 {
1660 gasm *asm_stmt = as_a <gasm *> (stmt);
1661 int i, n = gimple_asm_nlabels (asm_stmt);
1662
1663 for (i = 0; i < n; ++i)
1664 {
1665 tree cons = gimple_asm_label_op (asm_stmt, i);
1666 tree label = main_block_label (TREE_VALUE (cons));
1667 TREE_VALUE (cons) = label;
1668 }
1669 break;
1670 }
1671
1672 /* We have to handle gotos until they're removed, and we don't
1673 remove them until after we've created the CFG edges. */
1674 case GIMPLE_GOTO:
1675 if (!computed_goto_p (stmt))
1676 {
1677 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1678 label = gimple_goto_dest (goto_stmt);
1679 new_label = main_block_label (label);
1680 if (new_label != label)
1681 gimple_goto_set_dest (goto_stmt, new_label);
1682 }
1683 break;
1684
1685 case GIMPLE_TRANSACTION:
1686 {
1687 gtransaction *txn = as_a <gtransaction *> (stmt);
1688
1689 label = gimple_transaction_label_norm (txn);
1690 if (label)
1691 {
1692 new_label = main_block_label (label);
1693 if (new_label != label)
1694 gimple_transaction_set_label_norm (txn, new_label);
1695 }
1696
1697 label = gimple_transaction_label_uninst (txn);
1698 if (label)
1699 {
1700 new_label = main_block_label (label);
1701 if (new_label != label)
1702 gimple_transaction_set_label_uninst (txn, new_label);
1703 }
1704
1705 label = gimple_transaction_label_over (txn);
1706 if (label)
1707 {
1708 new_label = main_block_label (label);
1709 if (new_label != label)
1710 gimple_transaction_set_label_over (txn, new_label);
1711 }
1712 }
1713 break;
1714
1715 default:
1716 break;
1717 }
1718 }
1719
1720 /* Do the same for the exception region tree labels. */
1721 cleanup_dead_labels_eh ();
1722
1723 /* Finally, purge dead labels. All user-defined labels and labels that
1724 can be the target of non-local gotos and labels which have their
1725 address taken are preserved. */
1726 FOR_EACH_BB_FN (bb, cfun)
1727 {
1728 gimple_stmt_iterator i;
1729 tree label_for_this_bb = label_for_bb[bb->index].label;
1730
1731 if (!label_for_this_bb)
1732 continue;
1733
1734 /* If the main label of the block is unused, we may still remove it. */
1735 if (!label_for_bb[bb->index].used)
1736 label_for_this_bb = NULL;
1737
1738 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1739 {
1740 tree label;
1741 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1742
1743 if (!label_stmt)
1744 break;
1745
1746 label = gimple_label_label (label_stmt);
1747
1748 if (label == label_for_this_bb
1749 || !DECL_ARTIFICIAL (label)
1750 || DECL_NONLOCAL (label)
1751 || FORCED_LABEL (label))
1752 gsi_next (&i);
1753 else
1754 gsi_remove (&i, true);
1755 }
1756 }
1757
1758 free (label_for_bb);
1759 }
1760
1761 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1762 the ones jumping to the same label.
1763 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1764
1765 bool
group_case_labels_stmt(gswitch * stmt)1766 group_case_labels_stmt (gswitch *stmt)
1767 {
1768 int old_size = gimple_switch_num_labels (stmt);
1769 int i, next_index, new_size;
1770 basic_block default_bb = NULL;
1771 hash_set<tree> *removed_labels = NULL;
1772
1773 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1774
1775 /* Look for possible opportunities to merge cases. */
1776 new_size = i = 1;
1777 while (i < old_size)
1778 {
1779 tree base_case, base_high;
1780 basic_block base_bb;
1781
1782 base_case = gimple_switch_label (stmt, i);
1783
1784 gcc_assert (base_case);
1785 base_bb = label_to_block (CASE_LABEL (base_case));
1786
1787 /* Discard cases that have the same destination as the default case or
1788 whose destination blocks have already been removed as unreachable. */
1789 if (base_bb == NULL
1790 || base_bb == default_bb
1791 || (removed_labels
1792 && removed_labels->contains (CASE_LABEL (base_case))))
1793 {
1794 i++;
1795 continue;
1796 }
1797
1798 base_high = CASE_HIGH (base_case)
1799 ? CASE_HIGH (base_case)
1800 : CASE_LOW (base_case);
1801 next_index = i + 1;
1802
1803 /* Try to merge case labels. Break out when we reach the end
1804 of the label vector or when we cannot merge the next case
1805 label with the current one. */
1806 while (next_index < old_size)
1807 {
1808 tree merge_case = gimple_switch_label (stmt, next_index);
1809 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1810 wide_int bhp1 = wi::to_wide (base_high) + 1;
1811
1812 /* Merge the cases if they jump to the same place,
1813 and their ranges are consecutive. */
1814 if (merge_bb == base_bb
1815 && (removed_labels == NULL
1816 || !removed_labels->contains (CASE_LABEL (merge_case)))
1817 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1818 {
1819 base_high
1820 = (CASE_HIGH (merge_case)
1821 ? CASE_HIGH (merge_case) : CASE_LOW (merge_case));
1822 CASE_HIGH (base_case) = base_high;
1823 next_index++;
1824 }
1825 else
1826 break;
1827 }
1828
1829 /* Discard cases that have an unreachable destination block. */
1830 if (EDGE_COUNT (base_bb->succs) == 0
1831 && gimple_seq_unreachable_p (bb_seq (base_bb))
1832 /* Don't optimize this if __builtin_unreachable () is the
1833 implicitly added one by the C++ FE too early, before
1834 -Wreturn-type can be diagnosed. We'll optimize it later
1835 during switchconv pass or any other cfg cleanup. */
1836 && (gimple_in_ssa_p (cfun)
1837 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1838 != BUILTINS_LOCATION)))
1839 {
1840 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1841 if (base_edge != NULL)
1842 {
1843 for (gimple_stmt_iterator gsi = gsi_start_bb (base_bb);
1844 !gsi_end_p (gsi); gsi_next (&gsi))
1845 if (glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi)))
1846 {
1847 if (FORCED_LABEL (gimple_label_label (stmt))
1848 || DECL_NONLOCAL (gimple_label_label (stmt)))
1849 {
1850 /* Forced/non-local labels aren't going to be removed,
1851 but they will be moved to some neighbouring basic
1852 block. If some later case label refers to one of
1853 those labels, we should throw that case away rather
1854 than keeping it around and refering to some random
1855 other basic block without an edge to it. */
1856 if (removed_labels == NULL)
1857 removed_labels = new hash_set<tree>;
1858 removed_labels->add (gimple_label_label (stmt));
1859 }
1860 }
1861 else
1862 break;
1863 remove_edge_and_dominated_blocks (base_edge);
1864 }
1865 i = next_index;
1866 continue;
1867 }
1868
1869 if (new_size < i)
1870 gimple_switch_set_label (stmt, new_size,
1871 gimple_switch_label (stmt, i));
1872 i = next_index;
1873 new_size++;
1874 }
1875
1876 gcc_assert (new_size <= old_size);
1877
1878 if (new_size < old_size)
1879 gimple_switch_set_num_labels (stmt, new_size);
1880
1881 delete removed_labels;
1882 return new_size < old_size;
1883 }
1884
1885 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1886 and scan the sorted vector of cases. Combine the ones jumping to the
1887 same label. */
1888
1889 bool
group_case_labels(void)1890 group_case_labels (void)
1891 {
1892 basic_block bb;
1893 bool changed = false;
1894
1895 FOR_EACH_BB_FN (bb, cfun)
1896 {
1897 gimple *stmt = last_stmt (bb);
1898 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1899 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1900 }
1901
1902 return changed;
1903 }
1904
1905 /* Checks whether we can merge block B into block A. */
1906
1907 static bool
gimple_can_merge_blocks_p(basic_block a,basic_block b)1908 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1909 {
1910 gimple *stmt;
1911
1912 if (!single_succ_p (a))
1913 return false;
1914
1915 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1916 return false;
1917
1918 if (single_succ (a) != b)
1919 return false;
1920
1921 if (!single_pred_p (b))
1922 return false;
1923
1924 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1925 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1926 return false;
1927
1928 /* If A ends by a statement causing exceptions or something similar, we
1929 cannot merge the blocks. */
1930 stmt = last_stmt (a);
1931 if (stmt && stmt_ends_bb_p (stmt))
1932 return false;
1933
1934 /* Do not allow a block with only a non-local label to be merged. */
1935 if (stmt)
1936 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1937 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1938 return false;
1939
1940 /* Examine the labels at the beginning of B. */
1941 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1942 gsi_next (&gsi))
1943 {
1944 tree lab;
1945 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1946 if (!label_stmt)
1947 break;
1948 lab = gimple_label_label (label_stmt);
1949
1950 /* Do not remove user forced labels or for -O0 any user labels. */
1951 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1952 return false;
1953 }
1954
1955 /* Protect simple loop latches. We only want to avoid merging
1956 the latch with the loop header or with a block in another
1957 loop in this case. */
1958 if (current_loops
1959 && b->loop_father->latch == b
1960 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1961 && (b->loop_father->header == a
1962 || b->loop_father != a->loop_father))
1963 return false;
1964
1965 /* It must be possible to eliminate all phi nodes in B. If ssa form
1966 is not up-to-date and a name-mapping is registered, we cannot eliminate
1967 any phis. Symbols marked for renaming are never a problem though. */
1968 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1969 gsi_next (&gsi))
1970 {
1971 gphi *phi = gsi.phi ();
1972 /* Technically only new names matter. */
1973 if (name_registered_for_update_p (PHI_RESULT (phi)))
1974 return false;
1975 }
1976
1977 /* When not optimizing, don't merge if we'd lose goto_locus. */
1978 if (!optimize
1979 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1980 {
1981 location_t goto_locus = single_succ_edge (a)->goto_locus;
1982 gimple_stmt_iterator prev, next;
1983 prev = gsi_last_nondebug_bb (a);
1984 next = gsi_after_labels (b);
1985 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1986 gsi_next_nondebug (&next);
1987 if ((gsi_end_p (prev)
1988 || gimple_location (gsi_stmt (prev)) != goto_locus)
1989 && (gsi_end_p (next)
1990 || gimple_location (gsi_stmt (next)) != goto_locus))
1991 return false;
1992 }
1993
1994 return true;
1995 }
1996
1997 /* Replaces all uses of NAME by VAL. */
1998
1999 void
replace_uses_by(tree name,tree val)2000 replace_uses_by (tree name, tree val)
2001 {
2002 imm_use_iterator imm_iter;
2003 use_operand_p use;
2004 gimple *stmt;
2005 edge e;
2006
2007 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
2008 {
2009 /* Mark the block if we change the last stmt in it. */
2010 if (cfgcleanup_altered_bbs
2011 && stmt_ends_bb_p (stmt))
2012 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
2013
2014 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
2015 {
2016 replace_exp (use, val);
2017
2018 if (gimple_code (stmt) == GIMPLE_PHI)
2019 {
2020 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
2021 PHI_ARG_INDEX_FROM_USE (use));
2022 if (e->flags & EDGE_ABNORMAL
2023 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
2024 {
2025 /* This can only occur for virtual operands, since
2026 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
2027 would prevent replacement. */
2028 gcc_checking_assert (virtual_operand_p (name));
2029 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
2030 }
2031 }
2032 }
2033
2034 if (gimple_code (stmt) != GIMPLE_PHI)
2035 {
2036 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2037 gimple *orig_stmt = stmt;
2038 size_t i;
2039
2040 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
2041 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
2042 only change sth from non-invariant to invariant, and only
2043 when propagating constants. */
2044 if (is_gimple_min_invariant (val))
2045 for (i = 0; i < gimple_num_ops (stmt); i++)
2046 {
2047 tree op = gimple_op (stmt, i);
2048 /* Operands may be empty here. For example, the labels
2049 of a GIMPLE_COND are nulled out following the creation
2050 of the corresponding CFG edges. */
2051 if (op && TREE_CODE (op) == ADDR_EXPR)
2052 recompute_tree_invariant_for_addr_expr (op);
2053 }
2054
2055 if (fold_stmt (&gsi))
2056 stmt = gsi_stmt (gsi);
2057
2058 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2059 gimple_purge_dead_eh_edges (gimple_bb (stmt));
2060
2061 update_stmt (stmt);
2062 }
2063 }
2064
2065 gcc_checking_assert (has_zero_uses (name));
2066
2067 /* Also update the trees stored in loop structures. */
2068 if (current_loops)
2069 {
2070 struct loop *loop;
2071
2072 FOR_EACH_LOOP (loop, 0)
2073 {
2074 substitute_in_loop_info (loop, name, val);
2075 }
2076 }
2077 }
2078
2079 /* Merge block B into block A. */
2080
2081 static void
gimple_merge_blocks(basic_block a,basic_block b)2082 gimple_merge_blocks (basic_block a, basic_block b)
2083 {
2084 gimple_stmt_iterator last, gsi;
2085 gphi_iterator psi;
2086
2087 if (dump_file)
2088 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2089
2090 /* Remove all single-valued PHI nodes from block B of the form
2091 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2092 gsi = gsi_last_bb (a);
2093 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2094 {
2095 gimple *phi = gsi_stmt (psi);
2096 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2097 gimple *copy;
2098 bool may_replace_uses = (virtual_operand_p (def)
2099 || may_propagate_copy (def, use));
2100
2101 /* In case we maintain loop closed ssa form, do not propagate arguments
2102 of loop exit phi nodes. */
2103 if (current_loops
2104 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2105 && !virtual_operand_p (def)
2106 && TREE_CODE (use) == SSA_NAME
2107 && a->loop_father != b->loop_father)
2108 may_replace_uses = false;
2109
2110 if (!may_replace_uses)
2111 {
2112 gcc_assert (!virtual_operand_p (def));
2113
2114 /* Note that just emitting the copies is fine -- there is no problem
2115 with ordering of phi nodes. This is because A is the single
2116 predecessor of B, therefore results of the phi nodes cannot
2117 appear as arguments of the phi nodes. */
2118 copy = gimple_build_assign (def, use);
2119 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2120 remove_phi_node (&psi, false);
2121 }
2122 else
2123 {
2124 /* If we deal with a PHI for virtual operands, we can simply
2125 propagate these without fussing with folding or updating
2126 the stmt. */
2127 if (virtual_operand_p (def))
2128 {
2129 imm_use_iterator iter;
2130 use_operand_p use_p;
2131 gimple *stmt;
2132
2133 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2134 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2135 SET_USE (use_p, use);
2136
2137 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2138 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2139 }
2140 else
2141 replace_uses_by (def, use);
2142
2143 remove_phi_node (&psi, true);
2144 }
2145 }
2146
2147 /* Ensure that B follows A. */
2148 move_block_after (b, a);
2149
2150 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2151 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2152
2153 /* Remove labels from B and set gimple_bb to A for other statements. */
2154 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2155 {
2156 gimple *stmt = gsi_stmt (gsi);
2157 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2158 {
2159 tree label = gimple_label_label (label_stmt);
2160 int lp_nr;
2161
2162 gsi_remove (&gsi, false);
2163
2164 /* Now that we can thread computed gotos, we might have
2165 a situation where we have a forced label in block B
2166 However, the label at the start of block B might still be
2167 used in other ways (think about the runtime checking for
2168 Fortran assigned gotos). So we can not just delete the
2169 label. Instead we move the label to the start of block A. */
2170 if (FORCED_LABEL (label))
2171 {
2172 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2173 tree first_label = NULL_TREE;
2174 if (!gsi_end_p (dest_gsi))
2175 if (glabel *first_label_stmt
2176 = dyn_cast <glabel *> (gsi_stmt (dest_gsi)))
2177 first_label = gimple_label_label (first_label_stmt);
2178 if (first_label
2179 && (DECL_NONLOCAL (first_label)
2180 || EH_LANDING_PAD_NR (first_label) != 0))
2181 gsi_insert_after (&dest_gsi, stmt, GSI_NEW_STMT);
2182 else
2183 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2184 }
2185 /* Other user labels keep around in a form of a debug stmt. */
2186 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2187 {
2188 gimple *dbg = gimple_build_debug_bind (label,
2189 integer_zero_node,
2190 stmt);
2191 gimple_debug_bind_reset_value (dbg);
2192 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2193 }
2194
2195 lp_nr = EH_LANDING_PAD_NR (label);
2196 if (lp_nr)
2197 {
2198 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2199 lp->post_landing_pad = NULL;
2200 }
2201 }
2202 else
2203 {
2204 gimple_set_bb (stmt, a);
2205 gsi_next (&gsi);
2206 }
2207 }
2208
2209 /* When merging two BBs, if their counts are different, the larger count
2210 is selected as the new bb count. This is to handle inconsistent
2211 profiles. */
2212 if (a->loop_father == b->loop_father)
2213 {
2214 a->count = a->count.merge (b->count);
2215 }
2216
2217 /* Merge the sequences. */
2218 last = gsi_last_bb (a);
2219 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2220 set_bb_seq (b, NULL);
2221
2222 if (cfgcleanup_altered_bbs)
2223 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2224 }
2225
2226
2227 /* Return the one of two successors of BB that is not reachable by a
2228 complex edge, if there is one. Else, return BB. We use
2229 this in optimizations that use post-dominators for their heuristics,
2230 to catch the cases in C++ where function calls are involved. */
2231
2232 basic_block
single_noncomplex_succ(basic_block bb)2233 single_noncomplex_succ (basic_block bb)
2234 {
2235 edge e0, e1;
2236 if (EDGE_COUNT (bb->succs) != 2)
2237 return bb;
2238
2239 e0 = EDGE_SUCC (bb, 0);
2240 e1 = EDGE_SUCC (bb, 1);
2241 if (e0->flags & EDGE_COMPLEX)
2242 return e1->dest;
2243 if (e1->flags & EDGE_COMPLEX)
2244 return e0->dest;
2245
2246 return bb;
2247 }
2248
2249 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2250
2251 void
notice_special_calls(gcall * call)2252 notice_special_calls (gcall *call)
2253 {
2254 int flags = gimple_call_flags (call);
2255
2256 if (flags & ECF_MAY_BE_ALLOCA)
2257 cfun->calls_alloca = true;
2258 if (flags & ECF_RETURNS_TWICE)
2259 cfun->calls_setjmp = true;
2260 }
2261
2262
2263 /* Clear flags set by notice_special_calls. Used by dead code removal
2264 to update the flags. */
2265
2266 void
clear_special_calls(void)2267 clear_special_calls (void)
2268 {
2269 cfun->calls_alloca = false;
2270 cfun->calls_setjmp = false;
2271 }
2272
2273 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2274
2275 static void
remove_phi_nodes_and_edges_for_unreachable_block(basic_block bb)2276 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2277 {
2278 /* Since this block is no longer reachable, we can just delete all
2279 of its PHI nodes. */
2280 remove_phi_nodes (bb);
2281
2282 /* Remove edges to BB's successors. */
2283 while (EDGE_COUNT (bb->succs) > 0)
2284 remove_edge (EDGE_SUCC (bb, 0));
2285 }
2286
2287
2288 /* Remove statements of basic block BB. */
2289
2290 static void
remove_bb(basic_block bb)2291 remove_bb (basic_block bb)
2292 {
2293 gimple_stmt_iterator i;
2294
2295 if (dump_file)
2296 {
2297 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2298 if (dump_flags & TDF_DETAILS)
2299 {
2300 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2301 fprintf (dump_file, "\n");
2302 }
2303 }
2304
2305 if (current_loops)
2306 {
2307 struct loop *loop = bb->loop_father;
2308
2309 /* If a loop gets removed, clean up the information associated
2310 with it. */
2311 if (loop->latch == bb
2312 || loop->header == bb)
2313 free_numbers_of_iterations_estimates (loop);
2314 }
2315
2316 /* Remove all the instructions in the block. */
2317 if (bb_seq (bb) != NULL)
2318 {
2319 /* Walk backwards so as to get a chance to substitute all
2320 released DEFs into debug stmts. See
2321 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2322 details. */
2323 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2324 {
2325 gimple *stmt = gsi_stmt (i);
2326 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2327 if (label_stmt
2328 && (FORCED_LABEL (gimple_label_label (label_stmt))
2329 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2330 {
2331 basic_block new_bb;
2332 gimple_stmt_iterator new_gsi;
2333
2334 /* A non-reachable non-local label may still be referenced.
2335 But it no longer needs to carry the extra semantics of
2336 non-locality. */
2337 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2338 {
2339 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2340 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2341 }
2342
2343 new_bb = bb->prev_bb;
2344 /* Don't move any labels into ENTRY block. */
2345 if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2346 {
2347 new_bb = single_succ (new_bb);
2348 gcc_assert (new_bb != bb);
2349 }
2350 new_gsi = gsi_after_labels (new_bb);
2351 gsi_remove (&i, false);
2352 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2353 }
2354 else
2355 {
2356 /* Release SSA definitions. */
2357 release_defs (stmt);
2358 gsi_remove (&i, true);
2359 }
2360
2361 if (gsi_end_p (i))
2362 i = gsi_last_bb (bb);
2363 else
2364 gsi_prev (&i);
2365 }
2366 }
2367
2368 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2369 bb->il.gimple.seq = NULL;
2370 bb->il.gimple.phi_nodes = NULL;
2371 }
2372
2373
2374 /* Given a basic block BB and a value VAL for use in the final statement
2375 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2376 the edge that will be taken out of the block.
2377 If VAL is NULL_TREE, then the current value of the final statement's
2378 predicate or index is used.
2379 If the value does not match a unique edge, NULL is returned. */
2380
2381 edge
find_taken_edge(basic_block bb,tree val)2382 find_taken_edge (basic_block bb, tree val)
2383 {
2384 gimple *stmt;
2385
2386 stmt = last_stmt (bb);
2387
2388 /* Handle ENTRY and EXIT. */
2389 if (!stmt)
2390 return NULL;
2391
2392 if (gimple_code (stmt) == GIMPLE_COND)
2393 return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2394
2395 if (gimple_code (stmt) == GIMPLE_SWITCH)
2396 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2397
2398 if (computed_goto_p (stmt))
2399 {
2400 /* Only optimize if the argument is a label, if the argument is
2401 not a label then we can not construct a proper CFG.
2402
2403 It may be the case that we only need to allow the LABEL_REF to
2404 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2405 appear inside a LABEL_EXPR just to be safe. */
2406 if (val
2407 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2408 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2409 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2410 }
2411
2412 /* Otherwise we only know the taken successor edge if it's unique. */
2413 return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2414 }
2415
2416 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2417 statement, determine which of the outgoing edges will be taken out of the
2418 block. Return NULL if either edge may be taken. */
2419
2420 static edge
find_taken_edge_computed_goto(basic_block bb,tree val)2421 find_taken_edge_computed_goto (basic_block bb, tree val)
2422 {
2423 basic_block dest;
2424 edge e = NULL;
2425
2426 dest = label_to_block (val);
2427 if (dest)
2428 e = find_edge (bb, dest);
2429
2430 /* It's possible for find_edge to return NULL here on invalid code
2431 that abuses the labels-as-values extension (e.g. code that attempts to
2432 jump *between* functions via stored labels-as-values; PR 84136).
2433 If so, then we simply return that NULL for the edge.
2434 We don't currently have a way of detecting such invalid code, so we
2435 can't assert that it was the case when a NULL edge occurs here. */
2436
2437 return e;
2438 }
2439
2440 /* Given COND_STMT and a constant value VAL for use as the predicate,
2441 determine which of the two edges will be taken out of
2442 the statement's block. Return NULL if either edge may be taken.
2443 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2444 is used. */
2445
2446 static edge
find_taken_edge_cond_expr(const gcond * cond_stmt,tree val)2447 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2448 {
2449 edge true_edge, false_edge;
2450
2451 if (val == NULL_TREE)
2452 {
2453 /* Use the current value of the predicate. */
2454 if (gimple_cond_true_p (cond_stmt))
2455 val = integer_one_node;
2456 else if (gimple_cond_false_p (cond_stmt))
2457 val = integer_zero_node;
2458 else
2459 return NULL;
2460 }
2461 else if (TREE_CODE (val) != INTEGER_CST)
2462 return NULL;
2463
2464 extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2465 &true_edge, &false_edge);
2466
2467 return (integer_zerop (val) ? false_edge : true_edge);
2468 }
2469
2470 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2471 which edge will be taken out of the statement's block. Return NULL if any
2472 edge may be taken.
2473 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2474 is used. */
2475
2476 static edge
find_taken_edge_switch_expr(const gswitch * switch_stmt,tree val)2477 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2478 {
2479 basic_block dest_bb;
2480 edge e;
2481 tree taken_case;
2482
2483 if (gimple_switch_num_labels (switch_stmt) == 1)
2484 taken_case = gimple_switch_default_label (switch_stmt);
2485 else
2486 {
2487 if (val == NULL_TREE)
2488 val = gimple_switch_index (switch_stmt);
2489 if (TREE_CODE (val) != INTEGER_CST)
2490 return NULL;
2491 else
2492 taken_case = find_case_label_for_value (switch_stmt, val);
2493 }
2494 dest_bb = label_to_block (CASE_LABEL (taken_case));
2495
2496 e = find_edge (gimple_bb (switch_stmt), dest_bb);
2497 gcc_assert (e);
2498 return e;
2499 }
2500
2501
2502 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2503 We can make optimal use here of the fact that the case labels are
2504 sorted: We can do a binary search for a case matching VAL. */
2505
2506 static tree
find_case_label_for_value(const gswitch * switch_stmt,tree val)2507 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2508 {
2509 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2510 tree default_case = gimple_switch_default_label (switch_stmt);
2511
2512 for (low = 0, high = n; high - low > 1; )
2513 {
2514 size_t i = (high + low) / 2;
2515 tree t = gimple_switch_label (switch_stmt, i);
2516 int cmp;
2517
2518 /* Cache the result of comparing CASE_LOW and val. */
2519 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2520
2521 if (cmp > 0)
2522 high = i;
2523 else
2524 low = i;
2525
2526 if (CASE_HIGH (t) == NULL)
2527 {
2528 /* A singe-valued case label. */
2529 if (cmp == 0)
2530 return t;
2531 }
2532 else
2533 {
2534 /* A case range. We can only handle integer ranges. */
2535 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2536 return t;
2537 }
2538 }
2539
2540 return default_case;
2541 }
2542
2543
2544 /* Dump a basic block on stderr. */
2545
2546 void
gimple_debug_bb(basic_block bb)2547 gimple_debug_bb (basic_block bb)
2548 {
2549 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2550 }
2551
2552
2553 /* Dump basic block with index N on stderr. */
2554
2555 basic_block
gimple_debug_bb_n(int n)2556 gimple_debug_bb_n (int n)
2557 {
2558 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2559 return BASIC_BLOCK_FOR_FN (cfun, n);
2560 }
2561
2562
2563 /* Dump the CFG on stderr.
2564
2565 FLAGS are the same used by the tree dumping functions
2566 (see TDF_* in dumpfile.h). */
2567
2568 void
gimple_debug_cfg(dump_flags_t flags)2569 gimple_debug_cfg (dump_flags_t flags)
2570 {
2571 gimple_dump_cfg (stderr, flags);
2572 }
2573
2574
2575 /* Dump the program showing basic block boundaries on the given FILE.
2576
2577 FLAGS are the same used by the tree dumping functions (see TDF_* in
2578 tree.h). */
2579
2580 void
gimple_dump_cfg(FILE * file,dump_flags_t flags)2581 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2582 {
2583 if (flags & TDF_DETAILS)
2584 {
2585 dump_function_header (file, current_function_decl, flags);
2586 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2587 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2588 last_basic_block_for_fn (cfun));
2589
2590 brief_dump_cfg (file, flags);
2591 fprintf (file, "\n");
2592 }
2593
2594 if (flags & TDF_STATS)
2595 dump_cfg_stats (file);
2596
2597 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2598 }
2599
2600
2601 /* Dump CFG statistics on FILE. */
2602
2603 void
dump_cfg_stats(FILE * file)2604 dump_cfg_stats (FILE *file)
2605 {
2606 static long max_num_merged_labels = 0;
2607 unsigned long size, total = 0;
2608 long num_edges;
2609 basic_block bb;
2610 const char * const fmt_str = "%-30s%-13s%12s\n";
2611 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2612 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2613 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2614 const char *funcname = current_function_name ();
2615
2616 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2617
2618 fprintf (file, "---------------------------------------------------------\n");
2619 fprintf (file, fmt_str, "", " Number of ", "Memory");
2620 fprintf (file, fmt_str, "", " instances ", "used ");
2621 fprintf (file, "---------------------------------------------------------\n");
2622
2623 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2624 total += size;
2625 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2626 SCALE (size), LABEL (size));
2627
2628 num_edges = 0;
2629 FOR_EACH_BB_FN (bb, cfun)
2630 num_edges += EDGE_COUNT (bb->succs);
2631 size = num_edges * sizeof (struct edge_def);
2632 total += size;
2633 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2634
2635 fprintf (file, "---------------------------------------------------------\n");
2636 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2637 LABEL (total));
2638 fprintf (file, "---------------------------------------------------------\n");
2639 fprintf (file, "\n");
2640
2641 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2642 max_num_merged_labels = cfg_stats.num_merged_labels;
2643
2644 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2645 cfg_stats.num_merged_labels, max_num_merged_labels);
2646
2647 fprintf (file, "\n");
2648 }
2649
2650
2651 /* Dump CFG statistics on stderr. Keep extern so that it's always
2652 linked in the final executable. */
2653
2654 DEBUG_FUNCTION void
debug_cfg_stats(void)2655 debug_cfg_stats (void)
2656 {
2657 dump_cfg_stats (stderr);
2658 }
2659
2660 /*---------------------------------------------------------------------------
2661 Miscellaneous helpers
2662 ---------------------------------------------------------------------------*/
2663
2664 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2665 flow. Transfers of control flow associated with EH are excluded. */
2666
2667 static bool
call_can_make_abnormal_goto(gimple * t)2668 call_can_make_abnormal_goto (gimple *t)
2669 {
2670 /* If the function has no non-local labels, then a call cannot make an
2671 abnormal transfer of control. */
2672 if (!cfun->has_nonlocal_label
2673 && !cfun->calls_setjmp)
2674 return false;
2675
2676 /* Likewise if the call has no side effects. */
2677 if (!gimple_has_side_effects (t))
2678 return false;
2679
2680 /* Likewise if the called function is leaf. */
2681 if (gimple_call_flags (t) & ECF_LEAF)
2682 return false;
2683
2684 return true;
2685 }
2686
2687
2688 /* Return true if T can make an abnormal transfer of control flow.
2689 Transfers of control flow associated with EH are excluded. */
2690
2691 bool
stmt_can_make_abnormal_goto(gimple * t)2692 stmt_can_make_abnormal_goto (gimple *t)
2693 {
2694 if (computed_goto_p (t))
2695 return true;
2696 if (is_gimple_call (t))
2697 return call_can_make_abnormal_goto (t);
2698 return false;
2699 }
2700
2701
2702 /* Return true if T represents a stmt that always transfers control. */
2703
2704 bool
is_ctrl_stmt(gimple * t)2705 is_ctrl_stmt (gimple *t)
2706 {
2707 switch (gimple_code (t))
2708 {
2709 case GIMPLE_COND:
2710 case GIMPLE_SWITCH:
2711 case GIMPLE_GOTO:
2712 case GIMPLE_RETURN:
2713 case GIMPLE_RESX:
2714 return true;
2715 default:
2716 return false;
2717 }
2718 }
2719
2720
2721 /* Return true if T is a statement that may alter the flow of control
2722 (e.g., a call to a non-returning function). */
2723
2724 bool
is_ctrl_altering_stmt(gimple * t)2725 is_ctrl_altering_stmt (gimple *t)
2726 {
2727 gcc_assert (t);
2728
2729 switch (gimple_code (t))
2730 {
2731 case GIMPLE_CALL:
2732 /* Per stmt call flag indicates whether the call could alter
2733 controlflow. */
2734 if (gimple_call_ctrl_altering_p (t))
2735 return true;
2736 break;
2737
2738 case GIMPLE_EH_DISPATCH:
2739 /* EH_DISPATCH branches to the individual catch handlers at
2740 this level of a try or allowed-exceptions region. It can
2741 fallthru to the next statement as well. */
2742 return true;
2743
2744 case GIMPLE_ASM:
2745 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2746 return true;
2747 break;
2748
2749 CASE_GIMPLE_OMP:
2750 /* OpenMP directives alter control flow. */
2751 return true;
2752
2753 case GIMPLE_TRANSACTION:
2754 /* A transaction start alters control flow. */
2755 return true;
2756
2757 default:
2758 break;
2759 }
2760
2761 /* If a statement can throw, it alters control flow. */
2762 return stmt_can_throw_internal (t);
2763 }
2764
2765
2766 /* Return true if T is a simple local goto. */
2767
2768 bool
simple_goto_p(gimple * t)2769 simple_goto_p (gimple *t)
2770 {
2771 return (gimple_code (t) == GIMPLE_GOTO
2772 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2773 }
2774
2775
2776 /* Return true if STMT should start a new basic block. PREV_STMT is
2777 the statement preceding STMT. It is used when STMT is a label or a
2778 case label. Labels should only start a new basic block if their
2779 previous statement wasn't a label. Otherwise, sequence of labels
2780 would generate unnecessary basic blocks that only contain a single
2781 label. */
2782
2783 static inline bool
stmt_starts_bb_p(gimple * stmt,gimple * prev_stmt)2784 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2785 {
2786 if (stmt == NULL)
2787 return false;
2788
2789 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2790 any nondebug stmts in the block. We don't want to start another
2791 block in this case: the debug stmt will already have started the
2792 one STMT would start if we weren't outputting debug stmts. */
2793 if (prev_stmt && is_gimple_debug (prev_stmt))
2794 return false;
2795
2796 /* Labels start a new basic block only if the preceding statement
2797 wasn't a label of the same type. This prevents the creation of
2798 consecutive blocks that have nothing but a single label. */
2799 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2800 {
2801 /* Nonlocal and computed GOTO targets always start a new block. */
2802 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2803 || FORCED_LABEL (gimple_label_label (label_stmt)))
2804 return true;
2805
2806 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2807 {
2808 if (DECL_NONLOCAL (gimple_label_label (
2809 as_a <glabel *> (prev_stmt))))
2810 return true;
2811
2812 cfg_stats.num_merged_labels++;
2813 return false;
2814 }
2815 else
2816 return true;
2817 }
2818 else if (gimple_code (stmt) == GIMPLE_CALL)
2819 {
2820 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2821 /* setjmp acts similar to a nonlocal GOTO target and thus should
2822 start a new block. */
2823 return true;
2824 if (gimple_call_internal_p (stmt, IFN_PHI)
2825 && prev_stmt
2826 && gimple_code (prev_stmt) != GIMPLE_LABEL
2827 && (gimple_code (prev_stmt) != GIMPLE_CALL
2828 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2829 /* PHI nodes start a new block unless preceeded by a label
2830 or another PHI. */
2831 return true;
2832 }
2833
2834 return false;
2835 }
2836
2837
2838 /* Return true if T should end a basic block. */
2839
2840 bool
stmt_ends_bb_p(gimple * t)2841 stmt_ends_bb_p (gimple *t)
2842 {
2843 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2844 }
2845
2846 /* Remove block annotations and other data structures. */
2847
2848 void
delete_tree_cfg_annotations(struct function * fn)2849 delete_tree_cfg_annotations (struct function *fn)
2850 {
2851 vec_free (label_to_block_map_for_fn (fn));
2852 }
2853
2854 /* Return the virtual phi in BB. */
2855
2856 gphi *
get_virtual_phi(basic_block bb)2857 get_virtual_phi (basic_block bb)
2858 {
2859 for (gphi_iterator gsi = gsi_start_phis (bb);
2860 !gsi_end_p (gsi);
2861 gsi_next (&gsi))
2862 {
2863 gphi *phi = gsi.phi ();
2864
2865 if (virtual_operand_p (PHI_RESULT (phi)))
2866 return phi;
2867 }
2868
2869 return NULL;
2870 }
2871
2872 /* Return the first statement in basic block BB. */
2873
2874 gimple *
first_stmt(basic_block bb)2875 first_stmt (basic_block bb)
2876 {
2877 gimple_stmt_iterator i = gsi_start_bb (bb);
2878 gimple *stmt = NULL;
2879
2880 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2881 {
2882 gsi_next (&i);
2883 stmt = NULL;
2884 }
2885 return stmt;
2886 }
2887
2888 /* Return the first non-label statement in basic block BB. */
2889
2890 static gimple *
first_non_label_stmt(basic_block bb)2891 first_non_label_stmt (basic_block bb)
2892 {
2893 gimple_stmt_iterator i = gsi_start_bb (bb);
2894 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2895 gsi_next (&i);
2896 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2897 }
2898
2899 /* Return the last statement in basic block BB. */
2900
2901 gimple *
last_stmt(basic_block bb)2902 last_stmt (basic_block bb)
2903 {
2904 gimple_stmt_iterator i = gsi_last_bb (bb);
2905 gimple *stmt = NULL;
2906
2907 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2908 {
2909 gsi_prev (&i);
2910 stmt = NULL;
2911 }
2912 return stmt;
2913 }
2914
2915 /* Return the last statement of an otherwise empty block. Return NULL
2916 if the block is totally empty, or if it contains more than one
2917 statement. */
2918
2919 gimple *
last_and_only_stmt(basic_block bb)2920 last_and_only_stmt (basic_block bb)
2921 {
2922 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2923 gimple *last, *prev;
2924
2925 if (gsi_end_p (i))
2926 return NULL;
2927
2928 last = gsi_stmt (i);
2929 gsi_prev_nondebug (&i);
2930 if (gsi_end_p (i))
2931 return last;
2932
2933 /* Empty statements should no longer appear in the instruction stream.
2934 Everything that might have appeared before should be deleted by
2935 remove_useless_stmts, and the optimizers should just gsi_remove
2936 instead of smashing with build_empty_stmt.
2937
2938 Thus the only thing that should appear here in a block containing
2939 one executable statement is a label. */
2940 prev = gsi_stmt (i);
2941 if (gimple_code (prev) == GIMPLE_LABEL)
2942 return last;
2943 else
2944 return NULL;
2945 }
2946
2947 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2948
2949 static void
reinstall_phi_args(edge new_edge,edge old_edge)2950 reinstall_phi_args (edge new_edge, edge old_edge)
2951 {
2952 edge_var_map *vm;
2953 int i;
2954 gphi_iterator phis;
2955
2956 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2957 if (!v)
2958 return;
2959
2960 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2961 v->iterate (i, &vm) && !gsi_end_p (phis);
2962 i++, gsi_next (&phis))
2963 {
2964 gphi *phi = phis.phi ();
2965 tree result = redirect_edge_var_map_result (vm);
2966 tree arg = redirect_edge_var_map_def (vm);
2967
2968 gcc_assert (result == gimple_phi_result (phi));
2969
2970 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2971 }
2972
2973 redirect_edge_var_map_clear (old_edge);
2974 }
2975
2976 /* Returns the basic block after which the new basic block created
2977 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2978 near its "logical" location. This is of most help to humans looking
2979 at debugging dumps. */
2980
2981 basic_block
split_edge_bb_loc(edge edge_in)2982 split_edge_bb_loc (edge edge_in)
2983 {
2984 basic_block dest = edge_in->dest;
2985 basic_block dest_prev = dest->prev_bb;
2986
2987 if (dest_prev)
2988 {
2989 edge e = find_edge (dest_prev, dest);
2990 if (e && !(e->flags & EDGE_COMPLEX))
2991 return edge_in->src;
2992 }
2993 return dest_prev;
2994 }
2995
2996 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2997 Abort on abnormal edges. */
2998
2999 static basic_block
gimple_split_edge(edge edge_in)3000 gimple_split_edge (edge edge_in)
3001 {
3002 basic_block new_bb, after_bb, dest;
3003 edge new_edge, e;
3004
3005 /* Abnormal edges cannot be split. */
3006 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3007
3008 dest = edge_in->dest;
3009
3010 after_bb = split_edge_bb_loc (edge_in);
3011
3012 new_bb = create_empty_bb (after_bb);
3013 new_bb->count = edge_in->count ();
3014
3015 e = redirect_edge_and_branch (edge_in, new_bb);
3016 gcc_assert (e == edge_in);
3017
3018 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
3019 reinstall_phi_args (new_edge, e);
3020
3021 return new_bb;
3022 }
3023
3024
3025 /* Verify properties of the address expression T with base object BASE. */
3026
3027 static tree
verify_address(tree t,tree base)3028 verify_address (tree t, tree base)
3029 {
3030 bool old_constant;
3031 bool old_side_effects;
3032 bool new_constant;
3033 bool new_side_effects;
3034
3035 old_constant = TREE_CONSTANT (t);
3036 old_side_effects = TREE_SIDE_EFFECTS (t);
3037
3038 recompute_tree_invariant_for_addr_expr (t);
3039 new_side_effects = TREE_SIDE_EFFECTS (t);
3040 new_constant = TREE_CONSTANT (t);
3041
3042 if (old_constant != new_constant)
3043 {
3044 error ("constant not recomputed when ADDR_EXPR changed");
3045 return t;
3046 }
3047 if (old_side_effects != new_side_effects)
3048 {
3049 error ("side effects not recomputed when ADDR_EXPR changed");
3050 return t;
3051 }
3052
3053 if (!(VAR_P (base)
3054 || TREE_CODE (base) == PARM_DECL
3055 || TREE_CODE (base) == RESULT_DECL))
3056 return NULL_TREE;
3057
3058 if (DECL_GIMPLE_REG_P (base))
3059 {
3060 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
3061 return base;
3062 }
3063
3064 return NULL_TREE;
3065 }
3066
3067 /* Callback for walk_tree, check that all elements with address taken are
3068 properly noticed as such. The DATA is an int* that is 1 if TP was seen
3069 inside a PHI node. */
3070
3071 static tree
verify_expr(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)3072 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3073 {
3074 tree t = *tp, x;
3075
3076 if (TYPE_P (t))
3077 *walk_subtrees = 0;
3078
3079 /* Check operand N for being valid GIMPLE and give error MSG if not. */
3080 #define CHECK_OP(N, MSG) \
3081 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
3082 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
3083
3084 switch (TREE_CODE (t))
3085 {
3086 case SSA_NAME:
3087 if (SSA_NAME_IN_FREE_LIST (t))
3088 {
3089 error ("SSA name in freelist but still referenced");
3090 return *tp;
3091 }
3092 break;
3093
3094 case PARM_DECL:
3095 case VAR_DECL:
3096 case RESULT_DECL:
3097 {
3098 tree context = decl_function_context (t);
3099 if (context != cfun->decl
3100 && !SCOPE_FILE_SCOPE_P (context)
3101 && !TREE_STATIC (t)
3102 && !DECL_EXTERNAL (t))
3103 {
3104 error ("Local declaration from a different function");
3105 return t;
3106 }
3107 }
3108 break;
3109
3110 case INDIRECT_REF:
3111 error ("INDIRECT_REF in gimple IL");
3112 return t;
3113
3114 case MEM_REF:
3115 x = TREE_OPERAND (t, 0);
3116 if (!POINTER_TYPE_P (TREE_TYPE (x))
3117 || !is_gimple_mem_ref_addr (x))
3118 {
3119 error ("invalid first operand of MEM_REF");
3120 return x;
3121 }
3122 if (!poly_int_tree_p (TREE_OPERAND (t, 1))
3123 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
3124 {
3125 error ("invalid offset operand of MEM_REF");
3126 return TREE_OPERAND (t, 1);
3127 }
3128 if (TREE_CODE (x) == ADDR_EXPR)
3129 {
3130 tree va = verify_address (x, TREE_OPERAND (x, 0));
3131 if (va)
3132 return va;
3133 x = TREE_OPERAND (x, 0);
3134 }
3135 walk_tree (&x, verify_expr, data, NULL);
3136 *walk_subtrees = 0;
3137 break;
3138
3139 case ASSERT_EXPR:
3140 x = fold (ASSERT_EXPR_COND (t));
3141 if (x == boolean_false_node)
3142 {
3143 error ("ASSERT_EXPR with an always-false condition");
3144 return *tp;
3145 }
3146 break;
3147
3148 case MODIFY_EXPR:
3149 error ("MODIFY_EXPR not expected while having tuples");
3150 return *tp;
3151
3152 case ADDR_EXPR:
3153 {
3154 tree tem;
3155
3156 gcc_assert (is_gimple_address (t));
3157
3158 /* Skip any references (they will be checked when we recurse down the
3159 tree) and ensure that any variable used as a prefix is marked
3160 addressable. */
3161 for (x = TREE_OPERAND (t, 0);
3162 handled_component_p (x);
3163 x = TREE_OPERAND (x, 0))
3164 ;
3165
3166 if ((tem = verify_address (t, x)))
3167 return tem;
3168
3169 if (!(VAR_P (x)
3170 || TREE_CODE (x) == PARM_DECL
3171 || TREE_CODE (x) == RESULT_DECL))
3172 return NULL;
3173
3174 if (!TREE_ADDRESSABLE (x))
3175 {
3176 error ("address taken, but ADDRESSABLE bit not set");
3177 return x;
3178 }
3179
3180 break;
3181 }
3182
3183 case COND_EXPR:
3184 x = COND_EXPR_COND (t);
3185 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
3186 {
3187 error ("non-integral used in condition");
3188 return x;
3189 }
3190 if (!is_gimple_condexpr (x))
3191 {
3192 error ("invalid conditional operand");
3193 return x;
3194 }
3195 break;
3196
3197 case NON_LVALUE_EXPR:
3198 case TRUTH_NOT_EXPR:
3199 gcc_unreachable ();
3200
3201 CASE_CONVERT:
3202 case FIX_TRUNC_EXPR:
3203 case FLOAT_EXPR:
3204 case NEGATE_EXPR:
3205 case ABS_EXPR:
3206 case BIT_NOT_EXPR:
3207 CHECK_OP (0, "invalid operand to unary operator");
3208 break;
3209
3210 case REALPART_EXPR:
3211 case IMAGPART_EXPR:
3212 case BIT_FIELD_REF:
3213 if (!is_gimple_reg_type (TREE_TYPE (t)))
3214 {
3215 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3216 return t;
3217 }
3218
3219 if (TREE_CODE (t) == BIT_FIELD_REF)
3220 {
3221 tree t0 = TREE_OPERAND (t, 0);
3222 tree t1 = TREE_OPERAND (t, 1);
3223 tree t2 = TREE_OPERAND (t, 2);
3224 poly_uint64 size, bitpos;
3225 if (!poly_int_tree_p (t1, &size)
3226 || !poly_int_tree_p (t2, &bitpos)
3227 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3228 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3229 {
3230 error ("invalid position or size operand to BIT_FIELD_REF");
3231 return t;
3232 }
3233 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
3234 && maybe_ne (TYPE_PRECISION (TREE_TYPE (t)), size))
3235 {
3236 error ("integral result type precision does not match "
3237 "field size of BIT_FIELD_REF");
3238 return t;
3239 }
3240 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
3241 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
3242 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t))),
3243 size))
3244 {
3245 error ("mode size of non-integral result does not "
3246 "match field size of BIT_FIELD_REF");
3247 return t;
3248 }
3249 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
3250 && maybe_gt (size + bitpos,
3251 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (t0)))))
3252 {
3253 error ("position plus size exceeds size of referenced object in "
3254 "BIT_FIELD_REF");
3255 return t;
3256 }
3257 }
3258 t = TREE_OPERAND (t, 0);
3259
3260 /* Fall-through. */
3261 case COMPONENT_REF:
3262 case ARRAY_REF:
3263 case ARRAY_RANGE_REF:
3264 case VIEW_CONVERT_EXPR:
3265 /* We have a nest of references. Verify that each of the operands
3266 that determine where to reference is either a constant or a variable,
3267 verify that the base is valid, and then show we've already checked
3268 the subtrees. */
3269 while (handled_component_p (t))
3270 {
3271 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
3272 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3273 else if (TREE_CODE (t) == ARRAY_REF
3274 || TREE_CODE (t) == ARRAY_RANGE_REF)
3275 {
3276 CHECK_OP (1, "invalid array index");
3277 if (TREE_OPERAND (t, 2))
3278 CHECK_OP (2, "invalid array lower bound");
3279 if (TREE_OPERAND (t, 3))
3280 CHECK_OP (3, "invalid array stride");
3281 }
3282 else if (TREE_CODE (t) == BIT_FIELD_REF
3283 || TREE_CODE (t) == REALPART_EXPR
3284 || TREE_CODE (t) == IMAGPART_EXPR)
3285 {
3286 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3287 "REALPART_EXPR");
3288 return t;
3289 }
3290
3291 t = TREE_OPERAND (t, 0);
3292 }
3293
3294 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3295 {
3296 error ("invalid reference prefix");
3297 return t;
3298 }
3299 walk_tree (&t, verify_expr, data, NULL);
3300 *walk_subtrees = 0;
3301 break;
3302 case PLUS_EXPR:
3303 case MINUS_EXPR:
3304 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3305 POINTER_PLUS_EXPR. */
3306 if (POINTER_TYPE_P (TREE_TYPE (t)))
3307 {
3308 error ("invalid operand to plus/minus, type is a pointer");
3309 return t;
3310 }
3311 CHECK_OP (0, "invalid operand to binary operator");
3312 CHECK_OP (1, "invalid operand to binary operator");
3313 break;
3314
3315 case POINTER_DIFF_EXPR:
3316 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0)))
3317 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
3318 {
3319 error ("invalid operand to pointer diff, operand is not a pointer");
3320 return t;
3321 }
3322 if (TREE_CODE (TREE_TYPE (t)) != INTEGER_TYPE
3323 || TYPE_UNSIGNED (TREE_TYPE (t))
3324 || (TYPE_PRECISION (TREE_TYPE (t))
3325 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))))
3326 {
3327 error ("invalid type for pointer diff");
3328 return t;
3329 }
3330 CHECK_OP (0, "invalid operand to pointer diff");
3331 CHECK_OP (1, "invalid operand to pointer diff");
3332 break;
3333
3334 case POINTER_PLUS_EXPR:
3335 /* Check to make sure the first operand is a pointer or reference type. */
3336 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3337 {
3338 error ("invalid operand to pointer plus, first operand is not a pointer");
3339 return t;
3340 }
3341 /* Check to make sure the second operand is a ptrofftype. */
3342 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3343 {
3344 error ("invalid operand to pointer plus, second operand is not an "
3345 "integer type of appropriate width");
3346 return t;
3347 }
3348 /* FALLTHROUGH */
3349 case LT_EXPR:
3350 case LE_EXPR:
3351 case GT_EXPR:
3352 case GE_EXPR:
3353 case EQ_EXPR:
3354 case NE_EXPR:
3355 case UNORDERED_EXPR:
3356 case ORDERED_EXPR:
3357 case UNLT_EXPR:
3358 case UNLE_EXPR:
3359 case UNGT_EXPR:
3360 case UNGE_EXPR:
3361 case UNEQ_EXPR:
3362 case LTGT_EXPR:
3363 case MULT_EXPR:
3364 case TRUNC_DIV_EXPR:
3365 case CEIL_DIV_EXPR:
3366 case FLOOR_DIV_EXPR:
3367 case ROUND_DIV_EXPR:
3368 case TRUNC_MOD_EXPR:
3369 case CEIL_MOD_EXPR:
3370 case FLOOR_MOD_EXPR:
3371 case ROUND_MOD_EXPR:
3372 case RDIV_EXPR:
3373 case EXACT_DIV_EXPR:
3374 case MIN_EXPR:
3375 case MAX_EXPR:
3376 case LSHIFT_EXPR:
3377 case RSHIFT_EXPR:
3378 case LROTATE_EXPR:
3379 case RROTATE_EXPR:
3380 case BIT_IOR_EXPR:
3381 case BIT_XOR_EXPR:
3382 case BIT_AND_EXPR:
3383 CHECK_OP (0, "invalid operand to binary operator");
3384 CHECK_OP (1, "invalid operand to binary operator");
3385 break;
3386
3387 case CONSTRUCTOR:
3388 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3389 *walk_subtrees = 0;
3390 break;
3391
3392 case CASE_LABEL_EXPR:
3393 if (CASE_CHAIN (t))
3394 {
3395 error ("invalid CASE_CHAIN");
3396 return t;
3397 }
3398 break;
3399
3400 default:
3401 break;
3402 }
3403 return NULL;
3404
3405 #undef CHECK_OP
3406 }
3407
3408
3409 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3410 Returns true if there is an error, otherwise false. */
3411
3412 static bool
verify_types_in_gimple_min_lval(tree expr)3413 verify_types_in_gimple_min_lval (tree expr)
3414 {
3415 tree op;
3416
3417 if (is_gimple_id (expr))
3418 return false;
3419
3420 if (TREE_CODE (expr) != TARGET_MEM_REF
3421 && TREE_CODE (expr) != MEM_REF)
3422 {
3423 error ("invalid expression for min lvalue");
3424 return true;
3425 }
3426
3427 /* TARGET_MEM_REFs are strange beasts. */
3428 if (TREE_CODE (expr) == TARGET_MEM_REF)
3429 return false;
3430
3431 op = TREE_OPERAND (expr, 0);
3432 if (!is_gimple_val (op))
3433 {
3434 error ("invalid operand in indirect reference");
3435 debug_generic_stmt (op);
3436 return true;
3437 }
3438 /* Memory references now generally can involve a value conversion. */
3439
3440 return false;
3441 }
3442
3443 /* Verify if EXPR is a valid GIMPLE reference expression. If
3444 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3445 if there is an error, otherwise false. */
3446
3447 static bool
verify_types_in_gimple_reference(tree expr,bool require_lvalue)3448 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3449 {
3450 while (handled_component_p (expr))
3451 {
3452 tree op = TREE_OPERAND (expr, 0);
3453
3454 if (TREE_CODE (expr) == ARRAY_REF
3455 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3456 {
3457 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3458 || (TREE_OPERAND (expr, 2)
3459 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3460 || (TREE_OPERAND (expr, 3)
3461 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3462 {
3463 error ("invalid operands to array reference");
3464 debug_generic_stmt (expr);
3465 return true;
3466 }
3467 }
3468
3469 /* Verify if the reference array element types are compatible. */
3470 if (TREE_CODE (expr) == ARRAY_REF
3471 && !useless_type_conversion_p (TREE_TYPE (expr),
3472 TREE_TYPE (TREE_TYPE (op))))
3473 {
3474 error ("type mismatch in array reference");
3475 debug_generic_stmt (TREE_TYPE (expr));
3476 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3477 return true;
3478 }
3479 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3480 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3481 TREE_TYPE (TREE_TYPE (op))))
3482 {
3483 error ("type mismatch in array range reference");
3484 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3485 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3486 return true;
3487 }
3488
3489 if ((TREE_CODE (expr) == REALPART_EXPR
3490 || TREE_CODE (expr) == IMAGPART_EXPR)
3491 && !useless_type_conversion_p (TREE_TYPE (expr),
3492 TREE_TYPE (TREE_TYPE (op))))
3493 {
3494 error ("type mismatch in real/imagpart reference");
3495 debug_generic_stmt (TREE_TYPE (expr));
3496 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3497 return true;
3498 }
3499
3500 if (TREE_CODE (expr) == COMPONENT_REF
3501 && !useless_type_conversion_p (TREE_TYPE (expr),
3502 TREE_TYPE (TREE_OPERAND (expr, 1))))
3503 {
3504 error ("type mismatch in component reference");
3505 debug_generic_stmt (TREE_TYPE (expr));
3506 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3507 return true;
3508 }
3509
3510 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3511 {
3512 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3513 that their operand is not an SSA name or an invariant when
3514 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3515 bug). Otherwise there is nothing to verify, gross mismatches at
3516 most invoke undefined behavior. */
3517 if (require_lvalue
3518 && (TREE_CODE (op) == SSA_NAME
3519 || is_gimple_min_invariant (op)))
3520 {
3521 error ("conversion of an SSA_NAME on the left hand side");
3522 debug_generic_stmt (expr);
3523 return true;
3524 }
3525 else if (TREE_CODE (op) == SSA_NAME
3526 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3527 {
3528 error ("conversion of register to a different size");
3529 debug_generic_stmt (expr);
3530 return true;
3531 }
3532 else if (!handled_component_p (op))
3533 return false;
3534 }
3535
3536 expr = op;
3537 }
3538
3539 if (TREE_CODE (expr) == MEM_REF)
3540 {
3541 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3542 {
3543 error ("invalid address operand in MEM_REF");
3544 debug_generic_stmt (expr);
3545 return true;
3546 }
3547 if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3548 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3549 {
3550 error ("invalid offset operand in MEM_REF");
3551 debug_generic_stmt (expr);
3552 return true;
3553 }
3554 }
3555 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3556 {
3557 if (!TMR_BASE (expr)
3558 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3559 {
3560 error ("invalid address operand in TARGET_MEM_REF");
3561 return true;
3562 }
3563 if (!TMR_OFFSET (expr)
3564 || !poly_int_tree_p (TMR_OFFSET (expr))
3565 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3566 {
3567 error ("invalid offset operand in TARGET_MEM_REF");
3568 debug_generic_stmt (expr);
3569 return true;
3570 }
3571 }
3572
3573 return ((require_lvalue || !is_gimple_min_invariant (expr))
3574 && verify_types_in_gimple_min_lval (expr));
3575 }
3576
3577 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3578 list of pointer-to types that is trivially convertible to DEST. */
3579
3580 static bool
one_pointer_to_useless_type_conversion_p(tree dest,tree src_obj)3581 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3582 {
3583 tree src;
3584
3585 if (!TYPE_POINTER_TO (src_obj))
3586 return true;
3587
3588 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3589 if (useless_type_conversion_p (dest, src))
3590 return true;
3591
3592 return false;
3593 }
3594
3595 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3596 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3597
3598 static bool
valid_fixed_convert_types_p(tree type1,tree type2)3599 valid_fixed_convert_types_p (tree type1, tree type2)
3600 {
3601 return (FIXED_POINT_TYPE_P (type1)
3602 && (INTEGRAL_TYPE_P (type2)
3603 || SCALAR_FLOAT_TYPE_P (type2)
3604 || FIXED_POINT_TYPE_P (type2)));
3605 }
3606
3607 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3608 is a problem, otherwise false. */
3609
3610 static bool
verify_gimple_call(gcall * stmt)3611 verify_gimple_call (gcall *stmt)
3612 {
3613 tree fn = gimple_call_fn (stmt);
3614 tree fntype, fndecl;
3615 unsigned i;
3616
3617 if (gimple_call_internal_p (stmt))
3618 {
3619 if (fn)
3620 {
3621 error ("gimple call has two targets");
3622 debug_generic_stmt (fn);
3623 return true;
3624 }
3625 /* FIXME : for passing label as arg in internal fn PHI from GIMPLE FE*/
3626 else if (gimple_call_internal_fn (stmt) == IFN_PHI)
3627 {
3628 return false;
3629 }
3630 }
3631 else
3632 {
3633 if (!fn)
3634 {
3635 error ("gimple call has no target");
3636 return true;
3637 }
3638 }
3639
3640 if (fn && !is_gimple_call_addr (fn))
3641 {
3642 error ("invalid function in gimple call");
3643 debug_generic_stmt (fn);
3644 return true;
3645 }
3646
3647 if (fn
3648 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3649 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3650 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3651 {
3652 error ("non-function in gimple call");
3653 return true;
3654 }
3655
3656 fndecl = gimple_call_fndecl (stmt);
3657 if (fndecl
3658 && TREE_CODE (fndecl) == FUNCTION_DECL
3659 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3660 && !DECL_PURE_P (fndecl)
3661 && !TREE_READONLY (fndecl))
3662 {
3663 error ("invalid pure const state for function");
3664 return true;
3665 }
3666
3667 tree lhs = gimple_call_lhs (stmt);
3668 if (lhs
3669 && (!is_gimple_lvalue (lhs)
3670 || verify_types_in_gimple_reference (lhs, true)))
3671 {
3672 error ("invalid LHS in gimple call");
3673 return true;
3674 }
3675
3676 if (gimple_call_ctrl_altering_p (stmt)
3677 && gimple_call_noreturn_p (stmt)
3678 && should_remove_lhs_p (lhs))
3679 {
3680 error ("LHS in noreturn call");
3681 return true;
3682 }
3683
3684 fntype = gimple_call_fntype (stmt);
3685 if (fntype
3686 && lhs
3687 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3688 /* ??? At least C++ misses conversions at assignments from
3689 void * call results.
3690 For now simply allow arbitrary pointer type conversions. */
3691 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3692 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3693 {
3694 error ("invalid conversion in gimple call");
3695 debug_generic_stmt (TREE_TYPE (lhs));
3696 debug_generic_stmt (TREE_TYPE (fntype));
3697 return true;
3698 }
3699
3700 if (gimple_call_chain (stmt)
3701 && !is_gimple_val (gimple_call_chain (stmt)))
3702 {
3703 error ("invalid static chain in gimple call");
3704 debug_generic_stmt (gimple_call_chain (stmt));
3705 return true;
3706 }
3707
3708 /* If there is a static chain argument, the call should either be
3709 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3710 if (gimple_call_chain (stmt)
3711 && fndecl
3712 && !DECL_STATIC_CHAIN (fndecl))
3713 {
3714 error ("static chain with function that doesn%'t use one");
3715 return true;
3716 }
3717
3718 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3719 {
3720 switch (DECL_FUNCTION_CODE (fndecl))
3721 {
3722 case BUILT_IN_UNREACHABLE:
3723 case BUILT_IN_TRAP:
3724 if (gimple_call_num_args (stmt) > 0)
3725 {
3726 /* Built-in unreachable with parameters might not be caught by
3727 undefined behavior sanitizer. Front-ends do check users do not
3728 call them that way but we also produce calls to
3729 __builtin_unreachable internally, for example when IPA figures
3730 out a call cannot happen in a legal program. In such cases,
3731 we must make sure arguments are stripped off. */
3732 error ("__builtin_unreachable or __builtin_trap call with "
3733 "arguments");
3734 return true;
3735 }
3736 break;
3737 default:
3738 break;
3739 }
3740 }
3741
3742 /* ??? The C frontend passes unpromoted arguments in case it
3743 didn't see a function declaration before the call. So for now
3744 leave the call arguments mostly unverified. Once we gimplify
3745 unit-at-a-time we have a chance to fix this. */
3746
3747 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3748 {
3749 tree arg = gimple_call_arg (stmt, i);
3750 if ((is_gimple_reg_type (TREE_TYPE (arg))
3751 && !is_gimple_val (arg))
3752 || (!is_gimple_reg_type (TREE_TYPE (arg))
3753 && !is_gimple_lvalue (arg)))
3754 {
3755 error ("invalid argument to gimple call");
3756 debug_generic_expr (arg);
3757 return true;
3758 }
3759 }
3760
3761 return false;
3762 }
3763
3764 /* Verifies the gimple comparison with the result type TYPE and
3765 the operands OP0 and OP1, comparison code is CODE. */
3766
3767 static bool
verify_gimple_comparison(tree type,tree op0,tree op1,enum tree_code code)3768 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3769 {
3770 tree op0_type = TREE_TYPE (op0);
3771 tree op1_type = TREE_TYPE (op1);
3772
3773 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3774 {
3775 error ("invalid operands in gimple comparison");
3776 return true;
3777 }
3778
3779 /* For comparisons we do not have the operations type as the
3780 effective type the comparison is carried out in. Instead
3781 we require that either the first operand is trivially
3782 convertible into the second, or the other way around.
3783 Because we special-case pointers to void we allow
3784 comparisons of pointers with the same mode as well. */
3785 if (!useless_type_conversion_p (op0_type, op1_type)
3786 && !useless_type_conversion_p (op1_type, op0_type)
3787 && (!POINTER_TYPE_P (op0_type)
3788 || !POINTER_TYPE_P (op1_type)
3789 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3790 {
3791 error ("mismatching comparison operand types");
3792 debug_generic_expr (op0_type);
3793 debug_generic_expr (op1_type);
3794 return true;
3795 }
3796
3797 /* The resulting type of a comparison may be an effective boolean type. */
3798 if (INTEGRAL_TYPE_P (type)
3799 && (TREE_CODE (type) == BOOLEAN_TYPE
3800 || TYPE_PRECISION (type) == 1))
3801 {
3802 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3803 || TREE_CODE (op1_type) == VECTOR_TYPE)
3804 && code != EQ_EXPR && code != NE_EXPR
3805 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3806 && !VECTOR_INTEGER_TYPE_P (op0_type))
3807 {
3808 error ("unsupported operation or type for vector comparison"
3809 " returning a boolean");
3810 debug_generic_expr (op0_type);
3811 debug_generic_expr (op1_type);
3812 return true;
3813 }
3814 }
3815 /* Or a boolean vector type with the same element count
3816 as the comparison operand types. */
3817 else if (TREE_CODE (type) == VECTOR_TYPE
3818 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3819 {
3820 if (TREE_CODE (op0_type) != VECTOR_TYPE
3821 || TREE_CODE (op1_type) != VECTOR_TYPE)
3822 {
3823 error ("non-vector operands in vector comparison");
3824 debug_generic_expr (op0_type);
3825 debug_generic_expr (op1_type);
3826 return true;
3827 }
3828
3829 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3830 TYPE_VECTOR_SUBPARTS (op0_type)))
3831 {
3832 error ("invalid vector comparison resulting type");
3833 debug_generic_expr (type);
3834 return true;
3835 }
3836 }
3837 else
3838 {
3839 error ("bogus comparison result type");
3840 debug_generic_expr (type);
3841 return true;
3842 }
3843
3844 return false;
3845 }
3846
3847 /* Verify a gimple assignment statement STMT with an unary rhs.
3848 Returns true if anything is wrong. */
3849
3850 static bool
verify_gimple_assign_unary(gassign * stmt)3851 verify_gimple_assign_unary (gassign *stmt)
3852 {
3853 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3854 tree lhs = gimple_assign_lhs (stmt);
3855 tree lhs_type = TREE_TYPE (lhs);
3856 tree rhs1 = gimple_assign_rhs1 (stmt);
3857 tree rhs1_type = TREE_TYPE (rhs1);
3858
3859 if (!is_gimple_reg (lhs))
3860 {
3861 error ("non-register as LHS of unary operation");
3862 return true;
3863 }
3864
3865 if (!is_gimple_val (rhs1))
3866 {
3867 error ("invalid operand in unary operation");
3868 return true;
3869 }
3870
3871 /* First handle conversions. */
3872 switch (rhs_code)
3873 {
3874 CASE_CONVERT:
3875 {
3876 /* Allow conversions from pointer type to integral type only if
3877 there is no sign or zero extension involved.
3878 For targets were the precision of ptrofftype doesn't match that
3879 of pointers we need to allow arbitrary conversions to ptrofftype. */
3880 if ((POINTER_TYPE_P (lhs_type)
3881 && INTEGRAL_TYPE_P (rhs1_type))
3882 || (POINTER_TYPE_P (rhs1_type)
3883 && INTEGRAL_TYPE_P (lhs_type)
3884 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3885 || ptrofftype_p (sizetype))))
3886 return false;
3887
3888 /* Allow conversion from integral to offset type and vice versa. */
3889 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3890 && INTEGRAL_TYPE_P (rhs1_type))
3891 || (INTEGRAL_TYPE_P (lhs_type)
3892 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3893 return false;
3894
3895 /* Otherwise assert we are converting between types of the
3896 same kind. */
3897 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3898 {
3899 error ("invalid types in nop conversion");
3900 debug_generic_expr (lhs_type);
3901 debug_generic_expr (rhs1_type);
3902 return true;
3903 }
3904
3905 return false;
3906 }
3907
3908 case ADDR_SPACE_CONVERT_EXPR:
3909 {
3910 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3911 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3912 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3913 {
3914 error ("invalid types in address space conversion");
3915 debug_generic_expr (lhs_type);
3916 debug_generic_expr (rhs1_type);
3917 return true;
3918 }
3919
3920 return false;
3921 }
3922
3923 case FIXED_CONVERT_EXPR:
3924 {
3925 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3926 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3927 {
3928 error ("invalid types in fixed-point conversion");
3929 debug_generic_expr (lhs_type);
3930 debug_generic_expr (rhs1_type);
3931 return true;
3932 }
3933
3934 return false;
3935 }
3936
3937 case FLOAT_EXPR:
3938 {
3939 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3940 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3941 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3942 {
3943 error ("invalid types in conversion to floating point");
3944 debug_generic_expr (lhs_type);
3945 debug_generic_expr (rhs1_type);
3946 return true;
3947 }
3948
3949 return false;
3950 }
3951
3952 case FIX_TRUNC_EXPR:
3953 {
3954 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3955 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3956 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3957 {
3958 error ("invalid types in conversion to integer");
3959 debug_generic_expr (lhs_type);
3960 debug_generic_expr (rhs1_type);
3961 return true;
3962 }
3963
3964 return false;
3965 }
3966
3967 case VEC_UNPACK_HI_EXPR:
3968 case VEC_UNPACK_LO_EXPR:
3969 case VEC_UNPACK_FLOAT_HI_EXPR:
3970 case VEC_UNPACK_FLOAT_LO_EXPR:
3971 /* FIXME. */
3972 return false;
3973
3974 case NEGATE_EXPR:
3975 case ABS_EXPR:
3976 case BIT_NOT_EXPR:
3977 case PAREN_EXPR:
3978 case CONJ_EXPR:
3979 break;
3980
3981 case VEC_DUPLICATE_EXPR:
3982 if (TREE_CODE (lhs_type) != VECTOR_TYPE
3983 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3984 {
3985 error ("vec_duplicate should be from a scalar to a like vector");
3986 debug_generic_expr (lhs_type);
3987 debug_generic_expr (rhs1_type);
3988 return true;
3989 }
3990 return false;
3991
3992 default:
3993 gcc_unreachable ();
3994 }
3995
3996 /* For the remaining codes assert there is no conversion involved. */
3997 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3998 {
3999 error ("non-trivial conversion in unary operation");
4000 debug_generic_expr (lhs_type);
4001 debug_generic_expr (rhs1_type);
4002 return true;
4003 }
4004
4005 return false;
4006 }
4007
4008 /* Verify a gimple assignment statement STMT with a binary rhs.
4009 Returns true if anything is wrong. */
4010
4011 static bool
verify_gimple_assign_binary(gassign * stmt)4012 verify_gimple_assign_binary (gassign *stmt)
4013 {
4014 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4015 tree lhs = gimple_assign_lhs (stmt);
4016 tree lhs_type = TREE_TYPE (lhs);
4017 tree rhs1 = gimple_assign_rhs1 (stmt);
4018 tree rhs1_type = TREE_TYPE (rhs1);
4019 tree rhs2 = gimple_assign_rhs2 (stmt);
4020 tree rhs2_type = TREE_TYPE (rhs2);
4021
4022 if (!is_gimple_reg (lhs))
4023 {
4024 error ("non-register as LHS of binary operation");
4025 return true;
4026 }
4027
4028 if (!is_gimple_val (rhs1)
4029 || !is_gimple_val (rhs2))
4030 {
4031 error ("invalid operands in binary operation");
4032 return true;
4033 }
4034
4035 /* First handle operations that involve different types. */
4036 switch (rhs_code)
4037 {
4038 case COMPLEX_EXPR:
4039 {
4040 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
4041 || !(INTEGRAL_TYPE_P (rhs1_type)
4042 || SCALAR_FLOAT_TYPE_P (rhs1_type))
4043 || !(INTEGRAL_TYPE_P (rhs2_type)
4044 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
4045 {
4046 error ("type mismatch in complex expression");
4047 debug_generic_expr (lhs_type);
4048 debug_generic_expr (rhs1_type);
4049 debug_generic_expr (rhs2_type);
4050 return true;
4051 }
4052
4053 return false;
4054 }
4055
4056 case LSHIFT_EXPR:
4057 case RSHIFT_EXPR:
4058 case LROTATE_EXPR:
4059 case RROTATE_EXPR:
4060 {
4061 /* Shifts and rotates are ok on integral types, fixed point
4062 types and integer vector types. */
4063 if ((!INTEGRAL_TYPE_P (rhs1_type)
4064 && !FIXED_POINT_TYPE_P (rhs1_type)
4065 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
4066 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
4067 || (!INTEGRAL_TYPE_P (rhs2_type)
4068 /* Vector shifts of vectors are also ok. */
4069 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
4070 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4071 && TREE_CODE (rhs2_type) == VECTOR_TYPE
4072 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
4073 || !useless_type_conversion_p (lhs_type, rhs1_type))
4074 {
4075 error ("type mismatch in shift expression");
4076 debug_generic_expr (lhs_type);
4077 debug_generic_expr (rhs1_type);
4078 debug_generic_expr (rhs2_type);
4079 return true;
4080 }
4081
4082 return false;
4083 }
4084
4085 case WIDEN_LSHIFT_EXPR:
4086 {
4087 if (!INTEGRAL_TYPE_P (lhs_type)
4088 || !INTEGRAL_TYPE_P (rhs1_type)
4089 || TREE_CODE (rhs2) != INTEGER_CST
4090 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
4091 {
4092 error ("type mismatch in widening vector shift expression");
4093 debug_generic_expr (lhs_type);
4094 debug_generic_expr (rhs1_type);
4095 debug_generic_expr (rhs2_type);
4096 return true;
4097 }
4098
4099 return false;
4100 }
4101
4102 case VEC_WIDEN_LSHIFT_HI_EXPR:
4103 case VEC_WIDEN_LSHIFT_LO_EXPR:
4104 {
4105 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4106 || TREE_CODE (lhs_type) != VECTOR_TYPE
4107 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4108 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
4109 || TREE_CODE (rhs2) != INTEGER_CST
4110 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
4111 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
4112 {
4113 error ("type mismatch in widening vector shift expression");
4114 debug_generic_expr (lhs_type);
4115 debug_generic_expr (rhs1_type);
4116 debug_generic_expr (rhs2_type);
4117 return true;
4118 }
4119
4120 return false;
4121 }
4122
4123 case PLUS_EXPR:
4124 case MINUS_EXPR:
4125 {
4126 tree lhs_etype = lhs_type;
4127 tree rhs1_etype = rhs1_type;
4128 tree rhs2_etype = rhs2_type;
4129 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
4130 {
4131 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4132 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
4133 {
4134 error ("invalid non-vector operands to vector valued plus");
4135 return true;
4136 }
4137 lhs_etype = TREE_TYPE (lhs_type);
4138 rhs1_etype = TREE_TYPE (rhs1_type);
4139 rhs2_etype = TREE_TYPE (rhs2_type);
4140 }
4141 if (POINTER_TYPE_P (lhs_etype)
4142 || POINTER_TYPE_P (rhs1_etype)
4143 || POINTER_TYPE_P (rhs2_etype))
4144 {
4145 error ("invalid (pointer) operands to plus/minus");
4146 return true;
4147 }
4148
4149 /* Continue with generic binary expression handling. */
4150 break;
4151 }
4152
4153 case POINTER_PLUS_EXPR:
4154 {
4155 if (!POINTER_TYPE_P (rhs1_type)
4156 || !useless_type_conversion_p (lhs_type, rhs1_type)
4157 || !ptrofftype_p (rhs2_type))
4158 {
4159 error ("type mismatch in pointer plus expression");
4160 debug_generic_stmt (lhs_type);
4161 debug_generic_stmt (rhs1_type);
4162 debug_generic_stmt (rhs2_type);
4163 return true;
4164 }
4165
4166 return false;
4167 }
4168
4169 case POINTER_DIFF_EXPR:
4170 {
4171 if (!POINTER_TYPE_P (rhs1_type)
4172 || !POINTER_TYPE_P (rhs2_type)
4173 /* Because we special-case pointers to void we allow difference
4174 of arbitrary pointers with the same mode. */
4175 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
4176 || !INTEGRAL_TYPE_P (lhs_type)
4177 || TYPE_UNSIGNED (lhs_type)
4178 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
4179 {
4180 error ("type mismatch in pointer diff expression");
4181 debug_generic_stmt (lhs_type);
4182 debug_generic_stmt (rhs1_type);
4183 debug_generic_stmt (rhs2_type);
4184 return true;
4185 }
4186
4187 return false;
4188 }
4189
4190 case TRUTH_ANDIF_EXPR:
4191 case TRUTH_ORIF_EXPR:
4192 case TRUTH_AND_EXPR:
4193 case TRUTH_OR_EXPR:
4194 case TRUTH_XOR_EXPR:
4195
4196 gcc_unreachable ();
4197
4198 case LT_EXPR:
4199 case LE_EXPR:
4200 case GT_EXPR:
4201 case GE_EXPR:
4202 case EQ_EXPR:
4203 case NE_EXPR:
4204 case UNORDERED_EXPR:
4205 case ORDERED_EXPR:
4206 case UNLT_EXPR:
4207 case UNLE_EXPR:
4208 case UNGT_EXPR:
4209 case UNGE_EXPR:
4210 case UNEQ_EXPR:
4211 case LTGT_EXPR:
4212 /* Comparisons are also binary, but the result type is not
4213 connected to the operand types. */
4214 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
4215
4216 case WIDEN_MULT_EXPR:
4217 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
4218 return true;
4219 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
4220 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
4221
4222 case WIDEN_SUM_EXPR:
4223 {
4224 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4225 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4226 && ((!INTEGRAL_TYPE_P (rhs1_type)
4227 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4228 || (!INTEGRAL_TYPE_P (lhs_type)
4229 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4230 || !useless_type_conversion_p (lhs_type, rhs2_type)
4231 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
4232 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4233 {
4234 error ("type mismatch in widening sum reduction");
4235 debug_generic_expr (lhs_type);
4236 debug_generic_expr (rhs1_type);
4237 debug_generic_expr (rhs2_type);
4238 return true;
4239 }
4240 return false;
4241 }
4242
4243 case VEC_WIDEN_MULT_HI_EXPR:
4244 case VEC_WIDEN_MULT_LO_EXPR:
4245 case VEC_WIDEN_MULT_EVEN_EXPR:
4246 case VEC_WIDEN_MULT_ODD_EXPR:
4247 {
4248 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4249 || TREE_CODE (lhs_type) != VECTOR_TYPE
4250 || !types_compatible_p (rhs1_type, rhs2_type)
4251 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
4252 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4253 {
4254 error ("type mismatch in vector widening multiplication");
4255 debug_generic_expr (lhs_type);
4256 debug_generic_expr (rhs1_type);
4257 debug_generic_expr (rhs2_type);
4258 return true;
4259 }
4260 return false;
4261 }
4262
4263 case VEC_PACK_TRUNC_EXPR:
4264 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4265 vector boolean types. */
4266 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4267 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4268 && types_compatible_p (rhs1_type, rhs2_type)
4269 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
4270 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4271 return false;
4272
4273 /* Fallthru. */
4274 case VEC_PACK_SAT_EXPR:
4275 case VEC_PACK_FIX_TRUNC_EXPR:
4276 {
4277 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4278 || TREE_CODE (lhs_type) != VECTOR_TYPE
4279 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4280 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4281 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4282 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4283 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4284 || !types_compatible_p (rhs1_type, rhs2_type)
4285 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4286 2 * GET_MODE_SIZE (element_mode (lhs_type))))
4287 {
4288 error ("type mismatch in vector pack expression");
4289 debug_generic_expr (lhs_type);
4290 debug_generic_expr (rhs1_type);
4291 debug_generic_expr (rhs2_type);
4292 return true;
4293 }
4294
4295 return false;
4296 }
4297
4298 case MULT_EXPR:
4299 case MULT_HIGHPART_EXPR:
4300 case TRUNC_DIV_EXPR:
4301 case CEIL_DIV_EXPR:
4302 case FLOOR_DIV_EXPR:
4303 case ROUND_DIV_EXPR:
4304 case TRUNC_MOD_EXPR:
4305 case CEIL_MOD_EXPR:
4306 case FLOOR_MOD_EXPR:
4307 case ROUND_MOD_EXPR:
4308 case RDIV_EXPR:
4309 case EXACT_DIV_EXPR:
4310 case MIN_EXPR:
4311 case MAX_EXPR:
4312 case BIT_IOR_EXPR:
4313 case BIT_XOR_EXPR:
4314 case BIT_AND_EXPR:
4315 /* Continue with generic binary expression handling. */
4316 break;
4317
4318 case VEC_SERIES_EXPR:
4319 if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4320 {
4321 error ("type mismatch in series expression");
4322 debug_generic_expr (rhs1_type);
4323 debug_generic_expr (rhs2_type);
4324 return true;
4325 }
4326 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4327 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4328 {
4329 error ("vector type expected in series expression");
4330 debug_generic_expr (lhs_type);
4331 return true;
4332 }
4333 return false;
4334
4335 default:
4336 gcc_unreachable ();
4337 }
4338
4339 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4340 || !useless_type_conversion_p (lhs_type, rhs2_type))
4341 {
4342 error ("type mismatch in binary expression");
4343 debug_generic_stmt (lhs_type);
4344 debug_generic_stmt (rhs1_type);
4345 debug_generic_stmt (rhs2_type);
4346 return true;
4347 }
4348
4349 return false;
4350 }
4351
4352 /* Verify a gimple assignment statement STMT with a ternary rhs.
4353 Returns true if anything is wrong. */
4354
4355 static bool
verify_gimple_assign_ternary(gassign * stmt)4356 verify_gimple_assign_ternary (gassign *stmt)
4357 {
4358 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4359 tree lhs = gimple_assign_lhs (stmt);
4360 tree lhs_type = TREE_TYPE (lhs);
4361 tree rhs1 = gimple_assign_rhs1 (stmt);
4362 tree rhs1_type = TREE_TYPE (rhs1);
4363 tree rhs2 = gimple_assign_rhs2 (stmt);
4364 tree rhs2_type = TREE_TYPE (rhs2);
4365 tree rhs3 = gimple_assign_rhs3 (stmt);
4366 tree rhs3_type = TREE_TYPE (rhs3);
4367
4368 if (!is_gimple_reg (lhs))
4369 {
4370 error ("non-register as LHS of ternary operation");
4371 return true;
4372 }
4373
4374 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4375 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4376 || !is_gimple_val (rhs2)
4377 || !is_gimple_val (rhs3))
4378 {
4379 error ("invalid operands in ternary operation");
4380 return true;
4381 }
4382
4383 /* First handle operations that involve different types. */
4384 switch (rhs_code)
4385 {
4386 case WIDEN_MULT_PLUS_EXPR:
4387 case WIDEN_MULT_MINUS_EXPR:
4388 if ((!INTEGRAL_TYPE_P (rhs1_type)
4389 && !FIXED_POINT_TYPE_P (rhs1_type))
4390 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4391 || !useless_type_conversion_p (lhs_type, rhs3_type)
4392 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4393 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4394 {
4395 error ("type mismatch in widening multiply-accumulate expression");
4396 debug_generic_expr (lhs_type);
4397 debug_generic_expr (rhs1_type);
4398 debug_generic_expr (rhs2_type);
4399 debug_generic_expr (rhs3_type);
4400 return true;
4401 }
4402 break;
4403
4404 case FMA_EXPR:
4405 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4406 || !useless_type_conversion_p (lhs_type, rhs2_type)
4407 || !useless_type_conversion_p (lhs_type, rhs3_type))
4408 {
4409 error ("type mismatch in fused multiply-add expression");
4410 debug_generic_expr (lhs_type);
4411 debug_generic_expr (rhs1_type);
4412 debug_generic_expr (rhs2_type);
4413 debug_generic_expr (rhs3_type);
4414 return true;
4415 }
4416 break;
4417
4418 case VEC_COND_EXPR:
4419 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4420 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4421 TYPE_VECTOR_SUBPARTS (lhs_type)))
4422 {
4423 error ("the first argument of a VEC_COND_EXPR must be of a "
4424 "boolean vector type of the same number of elements "
4425 "as the result");
4426 debug_generic_expr (lhs_type);
4427 debug_generic_expr (rhs1_type);
4428 return true;
4429 }
4430 /* Fallthrough. */
4431 case COND_EXPR:
4432 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4433 || !useless_type_conversion_p (lhs_type, rhs3_type))
4434 {
4435 error ("type mismatch in conditional expression");
4436 debug_generic_expr (lhs_type);
4437 debug_generic_expr (rhs2_type);
4438 debug_generic_expr (rhs3_type);
4439 return true;
4440 }
4441 break;
4442
4443 case VEC_PERM_EXPR:
4444 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4445 || !useless_type_conversion_p (lhs_type, rhs2_type))
4446 {
4447 error ("type mismatch in vector permute expression");
4448 debug_generic_expr (lhs_type);
4449 debug_generic_expr (rhs1_type);
4450 debug_generic_expr (rhs2_type);
4451 debug_generic_expr (rhs3_type);
4452 return true;
4453 }
4454
4455 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4456 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4457 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4458 {
4459 error ("vector types expected in vector permute expression");
4460 debug_generic_expr (lhs_type);
4461 debug_generic_expr (rhs1_type);
4462 debug_generic_expr (rhs2_type);
4463 debug_generic_expr (rhs3_type);
4464 return true;
4465 }
4466
4467 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4468 TYPE_VECTOR_SUBPARTS (rhs2_type))
4469 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4470 TYPE_VECTOR_SUBPARTS (rhs3_type))
4471 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4472 TYPE_VECTOR_SUBPARTS (lhs_type)))
4473 {
4474 error ("vectors with different element number found "
4475 "in vector permute expression");
4476 debug_generic_expr (lhs_type);
4477 debug_generic_expr (rhs1_type);
4478 debug_generic_expr (rhs2_type);
4479 debug_generic_expr (rhs3_type);
4480 return true;
4481 }
4482
4483 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4484 || (TREE_CODE (rhs3) != VECTOR_CST
4485 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4486 (TREE_TYPE (rhs3_type)))
4487 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4488 (TREE_TYPE (rhs1_type))))))
4489 {
4490 error ("invalid mask type in vector permute expression");
4491 debug_generic_expr (lhs_type);
4492 debug_generic_expr (rhs1_type);
4493 debug_generic_expr (rhs2_type);
4494 debug_generic_expr (rhs3_type);
4495 return true;
4496 }
4497
4498 return false;
4499
4500 case SAD_EXPR:
4501 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4502 || !useless_type_conversion_p (lhs_type, rhs3_type)
4503 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4504 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4505 {
4506 error ("type mismatch in sad expression");
4507 debug_generic_expr (lhs_type);
4508 debug_generic_expr (rhs1_type);
4509 debug_generic_expr (rhs2_type);
4510 debug_generic_expr (rhs3_type);
4511 return true;
4512 }
4513
4514 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4515 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4516 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4517 {
4518 error ("vector types expected in sad expression");
4519 debug_generic_expr (lhs_type);
4520 debug_generic_expr (rhs1_type);
4521 debug_generic_expr (rhs2_type);
4522 debug_generic_expr (rhs3_type);
4523 return true;
4524 }
4525
4526 return false;
4527
4528 case BIT_INSERT_EXPR:
4529 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4530 {
4531 error ("type mismatch in BIT_INSERT_EXPR");
4532 debug_generic_expr (lhs_type);
4533 debug_generic_expr (rhs1_type);
4534 return true;
4535 }
4536 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4537 && INTEGRAL_TYPE_P (rhs2_type))
4538 || (VECTOR_TYPE_P (rhs1_type)
4539 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))))
4540 {
4541 error ("not allowed type combination in BIT_INSERT_EXPR");
4542 debug_generic_expr (rhs1_type);
4543 debug_generic_expr (rhs2_type);
4544 return true;
4545 }
4546 if (! tree_fits_uhwi_p (rhs3)
4547 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4548 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4549 {
4550 error ("invalid position or size in BIT_INSERT_EXPR");
4551 return true;
4552 }
4553 if (INTEGRAL_TYPE_P (rhs1_type))
4554 {
4555 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4556 if (bitpos >= TYPE_PRECISION (rhs1_type)
4557 || (bitpos + TYPE_PRECISION (rhs2_type)
4558 > TYPE_PRECISION (rhs1_type)))
4559 {
4560 error ("insertion out of range in BIT_INSERT_EXPR");
4561 return true;
4562 }
4563 }
4564 else if (VECTOR_TYPE_P (rhs1_type))
4565 {
4566 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4567 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4568 if (bitpos % bitsize != 0)
4569 {
4570 error ("vector insertion not at element boundary");
4571 return true;
4572 }
4573 }
4574 return false;
4575
4576 case DOT_PROD_EXPR:
4577 {
4578 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4579 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4580 && ((!INTEGRAL_TYPE_P (rhs1_type)
4581 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4582 || (!INTEGRAL_TYPE_P (lhs_type)
4583 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4584 || !types_compatible_p (rhs1_type, rhs2_type)
4585 || !useless_type_conversion_p (lhs_type, rhs3_type)
4586 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4587 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4588 {
4589 error ("type mismatch in dot product reduction");
4590 debug_generic_expr (lhs_type);
4591 debug_generic_expr (rhs1_type);
4592 debug_generic_expr (rhs2_type);
4593 return true;
4594 }
4595 return false;
4596 }
4597
4598 case REALIGN_LOAD_EXPR:
4599 /* FIXME. */
4600 return false;
4601
4602 default:
4603 gcc_unreachable ();
4604 }
4605 return false;
4606 }
4607
4608 /* Verify a gimple assignment statement STMT with a single rhs.
4609 Returns true if anything is wrong. */
4610
4611 static bool
verify_gimple_assign_single(gassign * stmt)4612 verify_gimple_assign_single (gassign *stmt)
4613 {
4614 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4615 tree lhs = gimple_assign_lhs (stmt);
4616 tree lhs_type = TREE_TYPE (lhs);
4617 tree rhs1 = gimple_assign_rhs1 (stmt);
4618 tree rhs1_type = TREE_TYPE (rhs1);
4619 bool res = false;
4620
4621 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4622 {
4623 error ("non-trivial conversion at assignment");
4624 debug_generic_expr (lhs_type);
4625 debug_generic_expr (rhs1_type);
4626 return true;
4627 }
4628
4629 if (gimple_clobber_p (stmt)
4630 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4631 {
4632 error ("non-decl/MEM_REF LHS in clobber statement");
4633 debug_generic_expr (lhs);
4634 return true;
4635 }
4636
4637 if (handled_component_p (lhs)
4638 || TREE_CODE (lhs) == MEM_REF
4639 || TREE_CODE (lhs) == TARGET_MEM_REF)
4640 res |= verify_types_in_gimple_reference (lhs, true);
4641
4642 /* Special codes we cannot handle via their class. */
4643 switch (rhs_code)
4644 {
4645 case ADDR_EXPR:
4646 {
4647 tree op = TREE_OPERAND (rhs1, 0);
4648 if (!is_gimple_addressable (op))
4649 {
4650 error ("invalid operand in unary expression");
4651 return true;
4652 }
4653
4654 /* Technically there is no longer a need for matching types, but
4655 gimple hygiene asks for this check. In LTO we can end up
4656 combining incompatible units and thus end up with addresses
4657 of globals that change their type to a common one. */
4658 if (!in_lto_p
4659 && !types_compatible_p (TREE_TYPE (op),
4660 TREE_TYPE (TREE_TYPE (rhs1)))
4661 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4662 TREE_TYPE (op)))
4663 {
4664 error ("type mismatch in address expression");
4665 debug_generic_stmt (TREE_TYPE (rhs1));
4666 debug_generic_stmt (TREE_TYPE (op));
4667 return true;
4668 }
4669
4670 return verify_types_in_gimple_reference (op, true);
4671 }
4672
4673 /* tcc_reference */
4674 case INDIRECT_REF:
4675 error ("INDIRECT_REF in gimple IL");
4676 return true;
4677
4678 case COMPONENT_REF:
4679 case BIT_FIELD_REF:
4680 case ARRAY_REF:
4681 case ARRAY_RANGE_REF:
4682 case VIEW_CONVERT_EXPR:
4683 case REALPART_EXPR:
4684 case IMAGPART_EXPR:
4685 case TARGET_MEM_REF:
4686 case MEM_REF:
4687 if (!is_gimple_reg (lhs)
4688 && is_gimple_reg_type (TREE_TYPE (lhs)))
4689 {
4690 error ("invalid rhs for gimple memory store");
4691 debug_generic_stmt (lhs);
4692 debug_generic_stmt (rhs1);
4693 return true;
4694 }
4695 return res || verify_types_in_gimple_reference (rhs1, false);
4696
4697 /* tcc_constant */
4698 case SSA_NAME:
4699 case INTEGER_CST:
4700 case REAL_CST:
4701 case FIXED_CST:
4702 case COMPLEX_CST:
4703 case VECTOR_CST:
4704 case STRING_CST:
4705 return res;
4706
4707 /* tcc_declaration */
4708 case CONST_DECL:
4709 return res;
4710 case VAR_DECL:
4711 case PARM_DECL:
4712 if (!is_gimple_reg (lhs)
4713 && !is_gimple_reg (rhs1)
4714 && is_gimple_reg_type (TREE_TYPE (lhs)))
4715 {
4716 error ("invalid rhs for gimple memory store");
4717 debug_generic_stmt (lhs);
4718 debug_generic_stmt (rhs1);
4719 return true;
4720 }
4721 return res;
4722
4723 case CONSTRUCTOR:
4724 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4725 {
4726 unsigned int i;
4727 tree elt_i, elt_v, elt_t = NULL_TREE;
4728
4729 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4730 return res;
4731 /* For vector CONSTRUCTORs we require that either it is empty
4732 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4733 (then the element count must be correct to cover the whole
4734 outer vector and index must be NULL on all elements, or it is
4735 a CONSTRUCTOR of scalar elements, where we as an exception allow
4736 smaller number of elements (assuming zero filling) and
4737 consecutive indexes as compared to NULL indexes (such
4738 CONSTRUCTORs can appear in the IL from FEs). */
4739 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4740 {
4741 if (elt_t == NULL_TREE)
4742 {
4743 elt_t = TREE_TYPE (elt_v);
4744 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4745 {
4746 tree elt_t = TREE_TYPE (elt_v);
4747 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4748 TREE_TYPE (elt_t)))
4749 {
4750 error ("incorrect type of vector CONSTRUCTOR"
4751 " elements");
4752 debug_generic_stmt (rhs1);
4753 return true;
4754 }
4755 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4756 * TYPE_VECTOR_SUBPARTS (elt_t),
4757 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4758 {
4759 error ("incorrect number of vector CONSTRUCTOR"
4760 " elements");
4761 debug_generic_stmt (rhs1);
4762 return true;
4763 }
4764 }
4765 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4766 elt_t))
4767 {
4768 error ("incorrect type of vector CONSTRUCTOR elements");
4769 debug_generic_stmt (rhs1);
4770 return true;
4771 }
4772 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4773 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4774 {
4775 error ("incorrect number of vector CONSTRUCTOR elements");
4776 debug_generic_stmt (rhs1);
4777 return true;
4778 }
4779 }
4780 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4781 {
4782 error ("incorrect type of vector CONSTRUCTOR elements");
4783 debug_generic_stmt (rhs1);
4784 return true;
4785 }
4786 if (elt_i != NULL_TREE
4787 && (TREE_CODE (elt_t) == VECTOR_TYPE
4788 || TREE_CODE (elt_i) != INTEGER_CST
4789 || compare_tree_int (elt_i, i) != 0))
4790 {
4791 error ("vector CONSTRUCTOR with non-NULL element index");
4792 debug_generic_stmt (rhs1);
4793 return true;
4794 }
4795 if (!is_gimple_val (elt_v))
4796 {
4797 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4798 debug_generic_stmt (rhs1);
4799 return true;
4800 }
4801 }
4802 }
4803 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4804 {
4805 error ("non-vector CONSTRUCTOR with elements");
4806 debug_generic_stmt (rhs1);
4807 return true;
4808 }
4809 return res;
4810 case OBJ_TYPE_REF:
4811 case ASSERT_EXPR:
4812 case WITH_SIZE_EXPR:
4813 /* FIXME. */
4814 return res;
4815
4816 default:;
4817 }
4818
4819 return res;
4820 }
4821
4822 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4823 is a problem, otherwise false. */
4824
4825 static bool
verify_gimple_assign(gassign * stmt)4826 verify_gimple_assign (gassign *stmt)
4827 {
4828 switch (gimple_assign_rhs_class (stmt))
4829 {
4830 case GIMPLE_SINGLE_RHS:
4831 return verify_gimple_assign_single (stmt);
4832
4833 case GIMPLE_UNARY_RHS:
4834 return verify_gimple_assign_unary (stmt);
4835
4836 case GIMPLE_BINARY_RHS:
4837 return verify_gimple_assign_binary (stmt);
4838
4839 case GIMPLE_TERNARY_RHS:
4840 return verify_gimple_assign_ternary (stmt);
4841
4842 default:
4843 gcc_unreachable ();
4844 }
4845 }
4846
4847 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4848 is a problem, otherwise false. */
4849
4850 static bool
verify_gimple_return(greturn * stmt)4851 verify_gimple_return (greturn *stmt)
4852 {
4853 tree op = gimple_return_retval (stmt);
4854 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4855
4856 /* We cannot test for present return values as we do not fix up missing
4857 return values from the original source. */
4858 if (op == NULL)
4859 return false;
4860
4861 if (!is_gimple_val (op)
4862 && TREE_CODE (op) != RESULT_DECL)
4863 {
4864 error ("invalid operand in return statement");
4865 debug_generic_stmt (op);
4866 return true;
4867 }
4868
4869 if ((TREE_CODE (op) == RESULT_DECL
4870 && DECL_BY_REFERENCE (op))
4871 || (TREE_CODE (op) == SSA_NAME
4872 && SSA_NAME_VAR (op)
4873 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4874 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4875 op = TREE_TYPE (op);
4876
4877 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4878 {
4879 error ("invalid conversion in return statement");
4880 debug_generic_stmt (restype);
4881 debug_generic_stmt (TREE_TYPE (op));
4882 return true;
4883 }
4884
4885 return false;
4886 }
4887
4888
4889 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4890 is a problem, otherwise false. */
4891
4892 static bool
verify_gimple_goto(ggoto * stmt)4893 verify_gimple_goto (ggoto *stmt)
4894 {
4895 tree dest = gimple_goto_dest (stmt);
4896
4897 /* ??? We have two canonical forms of direct goto destinations, a
4898 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4899 if (TREE_CODE (dest) != LABEL_DECL
4900 && (!is_gimple_val (dest)
4901 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4902 {
4903 error ("goto destination is neither a label nor a pointer");
4904 return true;
4905 }
4906
4907 return false;
4908 }
4909
4910 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4911 is a problem, otherwise false. */
4912
4913 static bool
verify_gimple_switch(gswitch * stmt)4914 verify_gimple_switch (gswitch *stmt)
4915 {
4916 unsigned int i, n;
4917 tree elt, prev_upper_bound = NULL_TREE;
4918 tree index_type, elt_type = NULL_TREE;
4919
4920 if (!is_gimple_val (gimple_switch_index (stmt)))
4921 {
4922 error ("invalid operand to switch statement");
4923 debug_generic_stmt (gimple_switch_index (stmt));
4924 return true;
4925 }
4926
4927 index_type = TREE_TYPE (gimple_switch_index (stmt));
4928 if (! INTEGRAL_TYPE_P (index_type))
4929 {
4930 error ("non-integral type switch statement");
4931 debug_generic_expr (index_type);
4932 return true;
4933 }
4934
4935 elt = gimple_switch_label (stmt, 0);
4936 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4937 {
4938 error ("invalid default case label in switch statement");
4939 debug_generic_expr (elt);
4940 return true;
4941 }
4942
4943 n = gimple_switch_num_labels (stmt);
4944 for (i = 1; i < n; i++)
4945 {
4946 elt = gimple_switch_label (stmt, i);
4947
4948 if (! CASE_LOW (elt))
4949 {
4950 error ("invalid case label in switch statement");
4951 debug_generic_expr (elt);
4952 return true;
4953 }
4954 if (CASE_HIGH (elt)
4955 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4956 {
4957 error ("invalid case range in switch statement");
4958 debug_generic_expr (elt);
4959 return true;
4960 }
4961
4962 if (elt_type)
4963 {
4964 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4965 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4966 {
4967 error ("type mismatch for case label in switch statement");
4968 debug_generic_expr (elt);
4969 return true;
4970 }
4971 }
4972 else
4973 {
4974 elt_type = TREE_TYPE (CASE_LOW (elt));
4975 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4976 {
4977 error ("type precision mismatch in switch statement");
4978 return true;
4979 }
4980 }
4981
4982 if (prev_upper_bound)
4983 {
4984 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4985 {
4986 error ("case labels not sorted in switch statement");
4987 return true;
4988 }
4989 }
4990
4991 prev_upper_bound = CASE_HIGH (elt);
4992 if (! prev_upper_bound)
4993 prev_upper_bound = CASE_LOW (elt);
4994 }
4995
4996 return false;
4997 }
4998
4999 /* Verify a gimple debug statement STMT.
5000 Returns true if anything is wrong. */
5001
5002 static bool
verify_gimple_debug(gimple * stmt ATTRIBUTE_UNUSED)5003 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
5004 {
5005 /* There isn't much that could be wrong in a gimple debug stmt. A
5006 gimple debug bind stmt, for example, maps a tree, that's usually
5007 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
5008 component or member of an aggregate type, to another tree, that
5009 can be an arbitrary expression. These stmts expand into debug
5010 insns, and are converted to debug notes by var-tracking.c. */
5011 return false;
5012 }
5013
5014 /* Verify a gimple label statement STMT.
5015 Returns true if anything is wrong. */
5016
5017 static bool
verify_gimple_label(glabel * stmt)5018 verify_gimple_label (glabel *stmt)
5019 {
5020 tree decl = gimple_label_label (stmt);
5021 int uid;
5022 bool err = false;
5023
5024 if (TREE_CODE (decl) != LABEL_DECL)
5025 return true;
5026 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
5027 && DECL_CONTEXT (decl) != current_function_decl)
5028 {
5029 error ("label's context is not the current function decl");
5030 err |= true;
5031 }
5032
5033 uid = LABEL_DECL_UID (decl);
5034 if (cfun->cfg
5035 && (uid == -1
5036 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
5037 {
5038 error ("incorrect entry in label_to_block_map");
5039 err |= true;
5040 }
5041
5042 uid = EH_LANDING_PAD_NR (decl);
5043 if (uid)
5044 {
5045 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
5046 if (decl != lp->post_landing_pad)
5047 {
5048 error ("incorrect setting of landing pad number");
5049 err |= true;
5050 }
5051 }
5052
5053 return err;
5054 }
5055
5056 /* Verify a gimple cond statement STMT.
5057 Returns true if anything is wrong. */
5058
5059 static bool
verify_gimple_cond(gcond * stmt)5060 verify_gimple_cond (gcond *stmt)
5061 {
5062 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
5063 {
5064 error ("invalid comparison code in gimple cond");
5065 return true;
5066 }
5067 if (!(!gimple_cond_true_label (stmt)
5068 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
5069 || !(!gimple_cond_false_label (stmt)
5070 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
5071 {
5072 error ("invalid labels in gimple cond");
5073 return true;
5074 }
5075
5076 return verify_gimple_comparison (boolean_type_node,
5077 gimple_cond_lhs (stmt),
5078 gimple_cond_rhs (stmt),
5079 gimple_cond_code (stmt));
5080 }
5081
5082 /* Verify the GIMPLE statement STMT. Returns true if there is an
5083 error, otherwise false. */
5084
5085 static bool
verify_gimple_stmt(gimple * stmt)5086 verify_gimple_stmt (gimple *stmt)
5087 {
5088 switch (gimple_code (stmt))
5089 {
5090 case GIMPLE_ASSIGN:
5091 return verify_gimple_assign (as_a <gassign *> (stmt));
5092
5093 case GIMPLE_LABEL:
5094 return verify_gimple_label (as_a <glabel *> (stmt));
5095
5096 case GIMPLE_CALL:
5097 return verify_gimple_call (as_a <gcall *> (stmt));
5098
5099 case GIMPLE_COND:
5100 return verify_gimple_cond (as_a <gcond *> (stmt));
5101
5102 case GIMPLE_GOTO:
5103 return verify_gimple_goto (as_a <ggoto *> (stmt));
5104
5105 case GIMPLE_SWITCH:
5106 return verify_gimple_switch (as_a <gswitch *> (stmt));
5107
5108 case GIMPLE_RETURN:
5109 return verify_gimple_return (as_a <greturn *> (stmt));
5110
5111 case GIMPLE_ASM:
5112 return false;
5113
5114 case GIMPLE_TRANSACTION:
5115 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
5116
5117 /* Tuples that do not have tree operands. */
5118 case GIMPLE_NOP:
5119 case GIMPLE_PREDICT:
5120 case GIMPLE_RESX:
5121 case GIMPLE_EH_DISPATCH:
5122 case GIMPLE_EH_MUST_NOT_THROW:
5123 return false;
5124
5125 CASE_GIMPLE_OMP:
5126 /* OpenMP directives are validated by the FE and never operated
5127 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
5128 non-gimple expressions when the main index variable has had
5129 its address taken. This does not affect the loop itself
5130 because the header of an GIMPLE_OMP_FOR is merely used to determine
5131 how to setup the parallel iteration. */
5132 return false;
5133
5134 case GIMPLE_DEBUG:
5135 return verify_gimple_debug (stmt);
5136
5137 default:
5138 gcc_unreachable ();
5139 }
5140 }
5141
5142 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
5143 and false otherwise. */
5144
5145 static bool
verify_gimple_phi(gimple * phi)5146 verify_gimple_phi (gimple *phi)
5147 {
5148 bool err = false;
5149 unsigned i;
5150 tree phi_result = gimple_phi_result (phi);
5151 bool virtual_p;
5152
5153 if (!phi_result)
5154 {
5155 error ("invalid PHI result");
5156 return true;
5157 }
5158
5159 virtual_p = virtual_operand_p (phi_result);
5160 if (TREE_CODE (phi_result) != SSA_NAME
5161 || (virtual_p
5162 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
5163 {
5164 error ("invalid PHI result");
5165 err = true;
5166 }
5167
5168 for (i = 0; i < gimple_phi_num_args (phi); i++)
5169 {
5170 tree t = gimple_phi_arg_def (phi, i);
5171
5172 if (!t)
5173 {
5174 error ("missing PHI def");
5175 err |= true;
5176 continue;
5177 }
5178 /* Addressable variables do have SSA_NAMEs but they
5179 are not considered gimple values. */
5180 else if ((TREE_CODE (t) == SSA_NAME
5181 && virtual_p != virtual_operand_p (t))
5182 || (virtual_p
5183 && (TREE_CODE (t) != SSA_NAME
5184 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
5185 || (!virtual_p
5186 && !is_gimple_val (t)))
5187 {
5188 error ("invalid PHI argument");
5189 debug_generic_expr (t);
5190 err |= true;
5191 }
5192 #ifdef ENABLE_TYPES_CHECKING
5193 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
5194 {
5195 error ("incompatible types in PHI argument %u", i);
5196 debug_generic_stmt (TREE_TYPE (phi_result));
5197 debug_generic_stmt (TREE_TYPE (t));
5198 err |= true;
5199 }
5200 #endif
5201 }
5202
5203 return err;
5204 }
5205
5206 /* Verify the GIMPLE statements inside the sequence STMTS. */
5207
5208 static bool
verify_gimple_in_seq_2(gimple_seq stmts)5209 verify_gimple_in_seq_2 (gimple_seq stmts)
5210 {
5211 gimple_stmt_iterator ittr;
5212 bool err = false;
5213
5214 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
5215 {
5216 gimple *stmt = gsi_stmt (ittr);
5217
5218 switch (gimple_code (stmt))
5219 {
5220 case GIMPLE_BIND:
5221 err |= verify_gimple_in_seq_2 (
5222 gimple_bind_body (as_a <gbind *> (stmt)));
5223 break;
5224
5225 case GIMPLE_TRY:
5226 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
5227 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
5228 break;
5229
5230 case GIMPLE_EH_FILTER:
5231 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5232 break;
5233
5234 case GIMPLE_EH_ELSE:
5235 {
5236 geh_else *eh_else = as_a <geh_else *> (stmt);
5237 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5238 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5239 }
5240 break;
5241
5242 case GIMPLE_CATCH:
5243 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5244 as_a <gcatch *> (stmt)));
5245 break;
5246
5247 case GIMPLE_TRANSACTION:
5248 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5249 break;
5250
5251 default:
5252 {
5253 bool err2 = verify_gimple_stmt (stmt);
5254 if (err2)
5255 debug_gimple_stmt (stmt);
5256 err |= err2;
5257 }
5258 }
5259 }
5260
5261 return err;
5262 }
5263
5264 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5265 is a problem, otherwise false. */
5266
5267 static bool
verify_gimple_transaction(gtransaction * stmt)5268 verify_gimple_transaction (gtransaction *stmt)
5269 {
5270 tree lab;
5271
5272 lab = gimple_transaction_label_norm (stmt);
5273 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5274 return true;
5275 lab = gimple_transaction_label_uninst (stmt);
5276 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5277 return true;
5278 lab = gimple_transaction_label_over (stmt);
5279 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5280 return true;
5281
5282 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5283 }
5284
5285
5286 /* Verify the GIMPLE statements inside the statement list STMTS. */
5287
5288 DEBUG_FUNCTION void
verify_gimple_in_seq(gimple_seq stmts)5289 verify_gimple_in_seq (gimple_seq stmts)
5290 {
5291 timevar_push (TV_TREE_STMT_VERIFY);
5292 if (verify_gimple_in_seq_2 (stmts))
5293 internal_error ("verify_gimple failed");
5294 timevar_pop (TV_TREE_STMT_VERIFY);
5295 }
5296
5297 /* Return true when the T can be shared. */
5298
5299 static bool
tree_node_can_be_shared(tree t)5300 tree_node_can_be_shared (tree t)
5301 {
5302 if (IS_TYPE_OR_DECL_P (t)
5303 || is_gimple_min_invariant (t)
5304 || TREE_CODE (t) == SSA_NAME
5305 || t == error_mark_node
5306 || TREE_CODE (t) == IDENTIFIER_NODE)
5307 return true;
5308
5309 if (TREE_CODE (t) == CASE_LABEL_EXPR)
5310 return true;
5311
5312 if (DECL_P (t))
5313 return true;
5314
5315 return false;
5316 }
5317
5318 /* Called via walk_tree. Verify tree sharing. */
5319
5320 static tree
verify_node_sharing_1(tree * tp,int * walk_subtrees,void * data)5321 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5322 {
5323 hash_set<void *> *visited = (hash_set<void *> *) data;
5324
5325 if (tree_node_can_be_shared (*tp))
5326 {
5327 *walk_subtrees = false;
5328 return NULL;
5329 }
5330
5331 if (visited->add (*tp))
5332 return *tp;
5333
5334 return NULL;
5335 }
5336
5337 /* Called via walk_gimple_stmt. Verify tree sharing. */
5338
5339 static tree
verify_node_sharing(tree * tp,int * walk_subtrees,void * data)5340 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5341 {
5342 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5343 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5344 }
5345
5346 static bool eh_error_found;
5347 bool
verify_eh_throw_stmt_node(gimple * const & stmt,const int &,hash_set<gimple * > * visited)5348 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5349 hash_set<gimple *> *visited)
5350 {
5351 if (!visited->contains (stmt))
5352 {
5353 error ("dead STMT in EH table");
5354 debug_gimple_stmt (stmt);
5355 eh_error_found = true;
5356 }
5357 return true;
5358 }
5359
5360 /* Verify if the location LOCs block is in BLOCKS. */
5361
5362 static bool
verify_location(hash_set<tree> * blocks,location_t loc)5363 verify_location (hash_set<tree> *blocks, location_t loc)
5364 {
5365 tree block = LOCATION_BLOCK (loc);
5366 if (block != NULL_TREE
5367 && !blocks->contains (block))
5368 {
5369 error ("location references block not in block tree");
5370 return true;
5371 }
5372 if (block != NULL_TREE)
5373 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5374 return false;
5375 }
5376
5377 /* Called via walk_tree. Verify that expressions have no blocks. */
5378
5379 static tree
verify_expr_no_block(tree * tp,int * walk_subtrees,void *)5380 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5381 {
5382 if (!EXPR_P (*tp))
5383 {
5384 *walk_subtrees = false;
5385 return NULL;
5386 }
5387
5388 location_t loc = EXPR_LOCATION (*tp);
5389 if (LOCATION_BLOCK (loc) != NULL)
5390 return *tp;
5391
5392 return NULL;
5393 }
5394
5395 /* Called via walk_tree. Verify locations of expressions. */
5396
5397 static tree
verify_expr_location_1(tree * tp,int * walk_subtrees,void * data)5398 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5399 {
5400 hash_set<tree> *blocks = (hash_set<tree> *) data;
5401
5402 if (VAR_P (*tp) && DECL_HAS_DEBUG_EXPR_P (*tp))
5403 {
5404 tree t = DECL_DEBUG_EXPR (*tp);
5405 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5406 if (addr)
5407 return addr;
5408 }
5409 if ((VAR_P (*tp)
5410 || TREE_CODE (*tp) == PARM_DECL
5411 || TREE_CODE (*tp) == RESULT_DECL)
5412 && DECL_HAS_VALUE_EXPR_P (*tp))
5413 {
5414 tree t = DECL_VALUE_EXPR (*tp);
5415 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
5416 if (addr)
5417 return addr;
5418 }
5419
5420 if (!EXPR_P (*tp))
5421 {
5422 *walk_subtrees = false;
5423 return NULL;
5424 }
5425
5426 location_t loc = EXPR_LOCATION (*tp);
5427 if (verify_location (blocks, loc))
5428 return *tp;
5429
5430 return NULL;
5431 }
5432
5433 /* Called via walk_gimple_op. Verify locations of expressions. */
5434
5435 static tree
verify_expr_location(tree * tp,int * walk_subtrees,void * data)5436 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5437 {
5438 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5439 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5440 }
5441
5442 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5443
5444 static void
collect_subblocks(hash_set<tree> * blocks,tree block)5445 collect_subblocks (hash_set<tree> *blocks, tree block)
5446 {
5447 tree t;
5448 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5449 {
5450 blocks->add (t);
5451 collect_subblocks (blocks, t);
5452 }
5453 }
5454
5455 /* Verify the GIMPLE statements in the CFG of FN. */
5456
5457 DEBUG_FUNCTION void
verify_gimple_in_cfg(struct function * fn,bool verify_nothrow)5458 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5459 {
5460 basic_block bb;
5461 bool err = false;
5462
5463 timevar_push (TV_TREE_STMT_VERIFY);
5464 hash_set<void *> visited;
5465 hash_set<gimple *> visited_stmts;
5466
5467 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5468 hash_set<tree> blocks;
5469 if (DECL_INITIAL (fn->decl))
5470 {
5471 blocks.add (DECL_INITIAL (fn->decl));
5472 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5473 }
5474
5475 FOR_EACH_BB_FN (bb, fn)
5476 {
5477 gimple_stmt_iterator gsi;
5478
5479 for (gphi_iterator gpi = gsi_start_phis (bb);
5480 !gsi_end_p (gpi);
5481 gsi_next (&gpi))
5482 {
5483 gphi *phi = gpi.phi ();
5484 bool err2 = false;
5485 unsigned i;
5486
5487 visited_stmts.add (phi);
5488
5489 if (gimple_bb (phi) != bb)
5490 {
5491 error ("gimple_bb (phi) is set to a wrong basic block");
5492 err2 = true;
5493 }
5494
5495 err2 |= verify_gimple_phi (phi);
5496
5497 /* Only PHI arguments have locations. */
5498 if (gimple_location (phi) != UNKNOWN_LOCATION)
5499 {
5500 error ("PHI node with location");
5501 err2 = true;
5502 }
5503
5504 for (i = 0; i < gimple_phi_num_args (phi); i++)
5505 {
5506 tree arg = gimple_phi_arg_def (phi, i);
5507 tree addr = walk_tree (&arg, verify_node_sharing_1,
5508 &visited, NULL);
5509 if (addr)
5510 {
5511 error ("incorrect sharing of tree nodes");
5512 debug_generic_expr (addr);
5513 err2 |= true;
5514 }
5515 location_t loc = gimple_phi_arg_location (phi, i);
5516 if (virtual_operand_p (gimple_phi_result (phi))
5517 && loc != UNKNOWN_LOCATION)
5518 {
5519 error ("virtual PHI with argument locations");
5520 err2 = true;
5521 }
5522 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5523 if (addr)
5524 {
5525 debug_generic_expr (addr);
5526 err2 = true;
5527 }
5528 err2 |= verify_location (&blocks, loc);
5529 }
5530
5531 if (err2)
5532 debug_gimple_stmt (phi);
5533 err |= err2;
5534 }
5535
5536 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5537 {
5538 gimple *stmt = gsi_stmt (gsi);
5539 bool err2 = false;
5540 struct walk_stmt_info wi;
5541 tree addr;
5542 int lp_nr;
5543
5544 visited_stmts.add (stmt);
5545
5546 if (gimple_bb (stmt) != bb)
5547 {
5548 error ("gimple_bb (stmt) is set to a wrong basic block");
5549 err2 = true;
5550 }
5551
5552 err2 |= verify_gimple_stmt (stmt);
5553 err2 |= verify_location (&blocks, gimple_location (stmt));
5554
5555 memset (&wi, 0, sizeof (wi));
5556 wi.info = (void *) &visited;
5557 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5558 if (addr)
5559 {
5560 error ("incorrect sharing of tree nodes");
5561 debug_generic_expr (addr);
5562 err2 |= true;
5563 }
5564
5565 memset (&wi, 0, sizeof (wi));
5566 wi.info = (void *) &blocks;
5567 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5568 if (addr)
5569 {
5570 debug_generic_expr (addr);
5571 err2 |= true;
5572 }
5573
5574 /* ??? Instead of not checking these stmts at all the walker
5575 should know its context via wi. */
5576 if (!is_gimple_debug (stmt)
5577 && !is_gimple_omp (stmt))
5578 {
5579 memset (&wi, 0, sizeof (wi));
5580 addr = walk_gimple_op (stmt, verify_expr, &wi);
5581 if (addr)
5582 {
5583 debug_generic_expr (addr);
5584 inform (gimple_location (stmt), "in statement");
5585 err2 |= true;
5586 }
5587 }
5588
5589 /* If the statement is marked as part of an EH region, then it is
5590 expected that the statement could throw. Verify that when we
5591 have optimizations that simplify statements such that we prove
5592 that they cannot throw, that we update other data structures
5593 to match. */
5594 lp_nr = lookup_stmt_eh_lp (stmt);
5595 if (lp_nr > 0)
5596 {
5597 if (!stmt_could_throw_p (stmt))
5598 {
5599 if (verify_nothrow)
5600 {
5601 error ("statement marked for throw, but doesn%'t");
5602 err2 |= true;
5603 }
5604 }
5605 else if (!gsi_one_before_end_p (gsi))
5606 {
5607 error ("statement marked for throw in middle of block");
5608 err2 |= true;
5609 }
5610 }
5611
5612 if (err2)
5613 debug_gimple_stmt (stmt);
5614 err |= err2;
5615 }
5616 }
5617
5618 eh_error_found = false;
5619 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5620 if (eh_table)
5621 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5622 (&visited_stmts);
5623
5624 if (err || eh_error_found)
5625 internal_error ("verify_gimple failed");
5626
5627 verify_histograms ();
5628 timevar_pop (TV_TREE_STMT_VERIFY);
5629 }
5630
5631
5632 /* Verifies that the flow information is OK. */
5633
5634 static int
gimple_verify_flow_info(void)5635 gimple_verify_flow_info (void)
5636 {
5637 int err = 0;
5638 basic_block bb;
5639 gimple_stmt_iterator gsi;
5640 gimple *stmt;
5641 edge e;
5642 edge_iterator ei;
5643
5644 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5645 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5646 {
5647 error ("ENTRY_BLOCK has IL associated with it");
5648 err = 1;
5649 }
5650
5651 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5652 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5653 {
5654 error ("EXIT_BLOCK has IL associated with it");
5655 err = 1;
5656 }
5657
5658 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5659 if (e->flags & EDGE_FALLTHRU)
5660 {
5661 error ("fallthru to exit from bb %d", e->src->index);
5662 err = 1;
5663 }
5664
5665 FOR_EACH_BB_FN (bb, cfun)
5666 {
5667 bool found_ctrl_stmt = false;
5668
5669 stmt = NULL;
5670
5671 /* Skip labels on the start of basic block. */
5672 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5673 {
5674 tree label;
5675 gimple *prev_stmt = stmt;
5676
5677 stmt = gsi_stmt (gsi);
5678
5679 if (gimple_code (stmt) != GIMPLE_LABEL)
5680 break;
5681
5682 label = gimple_label_label (as_a <glabel *> (stmt));
5683 if (prev_stmt && DECL_NONLOCAL (label))
5684 {
5685 error ("nonlocal label ");
5686 print_generic_expr (stderr, label);
5687 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5688 bb->index);
5689 err = 1;
5690 }
5691
5692 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5693 {
5694 error ("EH landing pad label ");
5695 print_generic_expr (stderr, label);
5696 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5697 bb->index);
5698 err = 1;
5699 }
5700
5701 if (label_to_block (label) != bb)
5702 {
5703 error ("label ");
5704 print_generic_expr (stderr, label);
5705 fprintf (stderr, " to block does not match in bb %d",
5706 bb->index);
5707 err = 1;
5708 }
5709
5710 if (decl_function_context (label) != current_function_decl)
5711 {
5712 error ("label ");
5713 print_generic_expr (stderr, label);
5714 fprintf (stderr, " has incorrect context in bb %d",
5715 bb->index);
5716 err = 1;
5717 }
5718 }
5719
5720 /* Verify that body of basic block BB is free of control flow. */
5721 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5722 {
5723 gimple *stmt = gsi_stmt (gsi);
5724
5725 if (found_ctrl_stmt)
5726 {
5727 error ("control flow in the middle of basic block %d",
5728 bb->index);
5729 err = 1;
5730 }
5731
5732 if (stmt_ends_bb_p (stmt))
5733 found_ctrl_stmt = true;
5734
5735 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5736 {
5737 error ("label ");
5738 print_generic_expr (stderr, gimple_label_label (label_stmt));
5739 fprintf (stderr, " in the middle of basic block %d", bb->index);
5740 err = 1;
5741 }
5742 }
5743
5744 gsi = gsi_last_nondebug_bb (bb);
5745 if (gsi_end_p (gsi))
5746 continue;
5747
5748 stmt = gsi_stmt (gsi);
5749
5750 if (gimple_code (stmt) == GIMPLE_LABEL)
5751 continue;
5752
5753 err |= verify_eh_edges (stmt);
5754
5755 if (is_ctrl_stmt (stmt))
5756 {
5757 FOR_EACH_EDGE (e, ei, bb->succs)
5758 if (e->flags & EDGE_FALLTHRU)
5759 {
5760 error ("fallthru edge after a control statement in bb %d",
5761 bb->index);
5762 err = 1;
5763 }
5764 }
5765
5766 if (gimple_code (stmt) != GIMPLE_COND)
5767 {
5768 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5769 after anything else but if statement. */
5770 FOR_EACH_EDGE (e, ei, bb->succs)
5771 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5772 {
5773 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5774 bb->index);
5775 err = 1;
5776 }
5777 }
5778
5779 switch (gimple_code (stmt))
5780 {
5781 case GIMPLE_COND:
5782 {
5783 edge true_edge;
5784 edge false_edge;
5785
5786 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5787
5788 if (!true_edge
5789 || !false_edge
5790 || !(true_edge->flags & EDGE_TRUE_VALUE)
5791 || !(false_edge->flags & EDGE_FALSE_VALUE)
5792 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5793 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5794 || EDGE_COUNT (bb->succs) >= 3)
5795 {
5796 error ("wrong outgoing edge flags at end of bb %d",
5797 bb->index);
5798 err = 1;
5799 }
5800 }
5801 break;
5802
5803 case GIMPLE_GOTO:
5804 if (simple_goto_p (stmt))
5805 {
5806 error ("explicit goto at end of bb %d", bb->index);
5807 err = 1;
5808 }
5809 else
5810 {
5811 /* FIXME. We should double check that the labels in the
5812 destination blocks have their address taken. */
5813 FOR_EACH_EDGE (e, ei, bb->succs)
5814 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5815 | EDGE_FALSE_VALUE))
5816 || !(e->flags & EDGE_ABNORMAL))
5817 {
5818 error ("wrong outgoing edge flags at end of bb %d",
5819 bb->index);
5820 err = 1;
5821 }
5822 }
5823 break;
5824
5825 case GIMPLE_CALL:
5826 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5827 break;
5828 /* fallthru */
5829 case GIMPLE_RETURN:
5830 if (!single_succ_p (bb)
5831 || (single_succ_edge (bb)->flags
5832 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5833 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5834 {
5835 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5836 err = 1;
5837 }
5838 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5839 {
5840 error ("return edge does not point to exit in bb %d",
5841 bb->index);
5842 err = 1;
5843 }
5844 break;
5845
5846 case GIMPLE_SWITCH:
5847 {
5848 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5849 tree prev;
5850 edge e;
5851 size_t i, n;
5852
5853 n = gimple_switch_num_labels (switch_stmt);
5854
5855 /* Mark all the destination basic blocks. */
5856 for (i = 0; i < n; ++i)
5857 {
5858 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5859 basic_block label_bb = label_to_block (lab);
5860 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5861 label_bb->aux = (void *)1;
5862 }
5863
5864 /* Verify that the case labels are sorted. */
5865 prev = gimple_switch_label (switch_stmt, 0);
5866 for (i = 1; i < n; ++i)
5867 {
5868 tree c = gimple_switch_label (switch_stmt, i);
5869 if (!CASE_LOW (c))
5870 {
5871 error ("found default case not at the start of "
5872 "case vector");
5873 err = 1;
5874 continue;
5875 }
5876 if (CASE_LOW (prev)
5877 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5878 {
5879 error ("case labels not sorted: ");
5880 print_generic_expr (stderr, prev);
5881 fprintf (stderr," is greater than ");
5882 print_generic_expr (stderr, c);
5883 fprintf (stderr," but comes before it.\n");
5884 err = 1;
5885 }
5886 prev = c;
5887 }
5888 /* VRP will remove the default case if it can prove it will
5889 never be executed. So do not verify there always exists
5890 a default case here. */
5891
5892 FOR_EACH_EDGE (e, ei, bb->succs)
5893 {
5894 if (!e->dest->aux)
5895 {
5896 error ("extra outgoing edge %d->%d",
5897 bb->index, e->dest->index);
5898 err = 1;
5899 }
5900
5901 e->dest->aux = (void *)2;
5902 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5903 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5904 {
5905 error ("wrong outgoing edge flags at end of bb %d",
5906 bb->index);
5907 err = 1;
5908 }
5909 }
5910
5911 /* Check that we have all of them. */
5912 for (i = 0; i < n; ++i)
5913 {
5914 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5915 basic_block label_bb = label_to_block (lab);
5916
5917 if (label_bb->aux != (void *)2)
5918 {
5919 error ("missing edge %i->%i", bb->index, label_bb->index);
5920 err = 1;
5921 }
5922 }
5923
5924 FOR_EACH_EDGE (e, ei, bb->succs)
5925 e->dest->aux = (void *)0;
5926 }
5927 break;
5928
5929 case GIMPLE_EH_DISPATCH:
5930 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5931 break;
5932
5933 default:
5934 break;
5935 }
5936 }
5937
5938 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5939 verify_dominators (CDI_DOMINATORS);
5940
5941 return err;
5942 }
5943
5944
5945 /* Updates phi nodes after creating a forwarder block joined
5946 by edge FALLTHRU. */
5947
5948 static void
gimple_make_forwarder_block(edge fallthru)5949 gimple_make_forwarder_block (edge fallthru)
5950 {
5951 edge e;
5952 edge_iterator ei;
5953 basic_block dummy, bb;
5954 tree var;
5955 gphi_iterator gsi;
5956
5957 dummy = fallthru->src;
5958 bb = fallthru->dest;
5959
5960 if (single_pred_p (bb))
5961 return;
5962
5963 /* If we redirected a branch we must create new PHI nodes at the
5964 start of BB. */
5965 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5966 {
5967 gphi *phi, *new_phi;
5968
5969 phi = gsi.phi ();
5970 var = gimple_phi_result (phi);
5971 new_phi = create_phi_node (var, bb);
5972 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5973 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5974 UNKNOWN_LOCATION);
5975 }
5976
5977 /* Add the arguments we have stored on edges. */
5978 FOR_EACH_EDGE (e, ei, bb->preds)
5979 {
5980 if (e == fallthru)
5981 continue;
5982
5983 flush_pending_stmts (e);
5984 }
5985 }
5986
5987
5988 /* Return a non-special label in the head of basic block BLOCK.
5989 Create one if it doesn't exist. */
5990
5991 tree
gimple_block_label(basic_block bb)5992 gimple_block_label (basic_block bb)
5993 {
5994 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5995 bool first = true;
5996 tree label;
5997 glabel *stmt;
5998
5999 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
6000 {
6001 stmt = dyn_cast <glabel *> (gsi_stmt (i));
6002 if (!stmt)
6003 break;
6004 label = gimple_label_label (stmt);
6005 if (!DECL_NONLOCAL (label))
6006 {
6007 if (!first)
6008 gsi_move_before (&i, &s);
6009 return label;
6010 }
6011 }
6012
6013 label = create_artificial_label (UNKNOWN_LOCATION);
6014 stmt = gimple_build_label (label);
6015 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
6016 return label;
6017 }
6018
6019
6020 /* Attempt to perform edge redirection by replacing a possibly complex
6021 jump instruction by a goto or by removing the jump completely.
6022 This can apply only if all edges now point to the same block. The
6023 parameters and return values are equivalent to
6024 redirect_edge_and_branch. */
6025
6026 static edge
gimple_try_redirect_by_replacing_jump(edge e,basic_block target)6027 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
6028 {
6029 basic_block src = e->src;
6030 gimple_stmt_iterator i;
6031 gimple *stmt;
6032
6033 /* We can replace or remove a complex jump only when we have exactly
6034 two edges. */
6035 if (EDGE_COUNT (src->succs) != 2
6036 /* Verify that all targets will be TARGET. Specifically, the
6037 edge that is not E must also go to TARGET. */
6038 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
6039 return NULL;
6040
6041 i = gsi_last_bb (src);
6042 if (gsi_end_p (i))
6043 return NULL;
6044
6045 stmt = gsi_stmt (i);
6046
6047 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
6048 {
6049 gsi_remove (&i, true);
6050 e = ssa_redirect_edge (e, target);
6051 e->flags = EDGE_FALLTHRU;
6052 return e;
6053 }
6054
6055 return NULL;
6056 }
6057
6058
6059 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
6060 edge representing the redirected branch. */
6061
6062 static edge
gimple_redirect_edge_and_branch(edge e,basic_block dest)6063 gimple_redirect_edge_and_branch (edge e, basic_block dest)
6064 {
6065 basic_block bb = e->src;
6066 gimple_stmt_iterator gsi;
6067 edge ret;
6068 gimple *stmt;
6069
6070 if (e->flags & EDGE_ABNORMAL)
6071 return NULL;
6072
6073 if (e->dest == dest)
6074 return NULL;
6075
6076 if (e->flags & EDGE_EH)
6077 return redirect_eh_edge (e, dest);
6078
6079 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
6080 {
6081 ret = gimple_try_redirect_by_replacing_jump (e, dest);
6082 if (ret)
6083 return ret;
6084 }
6085
6086 gsi = gsi_last_nondebug_bb (bb);
6087 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
6088
6089 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
6090 {
6091 case GIMPLE_COND:
6092 /* For COND_EXPR, we only need to redirect the edge. */
6093 break;
6094
6095 case GIMPLE_GOTO:
6096 /* No non-abnormal edges should lead from a non-simple goto, and
6097 simple ones should be represented implicitly. */
6098 gcc_unreachable ();
6099
6100 case GIMPLE_SWITCH:
6101 {
6102 gswitch *switch_stmt = as_a <gswitch *> (stmt);
6103 tree label = gimple_block_label (dest);
6104 tree cases = get_cases_for_edge (e, switch_stmt);
6105
6106 /* If we have a list of cases associated with E, then use it
6107 as it's a lot faster than walking the entire case vector. */
6108 if (cases)
6109 {
6110 edge e2 = find_edge (e->src, dest);
6111 tree last, first;
6112
6113 first = cases;
6114 while (cases)
6115 {
6116 last = cases;
6117 CASE_LABEL (cases) = label;
6118 cases = CASE_CHAIN (cases);
6119 }
6120
6121 /* If there was already an edge in the CFG, then we need
6122 to move all the cases associated with E to E2. */
6123 if (e2)
6124 {
6125 tree cases2 = get_cases_for_edge (e2, switch_stmt);
6126
6127 CASE_CHAIN (last) = CASE_CHAIN (cases2);
6128 CASE_CHAIN (cases2) = first;
6129 }
6130 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
6131 }
6132 else
6133 {
6134 size_t i, n = gimple_switch_num_labels (switch_stmt);
6135
6136 for (i = 0; i < n; i++)
6137 {
6138 tree elt = gimple_switch_label (switch_stmt, i);
6139 if (label_to_block (CASE_LABEL (elt)) == e->dest)
6140 CASE_LABEL (elt) = label;
6141 }
6142 }
6143 }
6144 break;
6145
6146 case GIMPLE_ASM:
6147 {
6148 gasm *asm_stmt = as_a <gasm *> (stmt);
6149 int i, n = gimple_asm_nlabels (asm_stmt);
6150 tree label = NULL;
6151
6152 for (i = 0; i < n; ++i)
6153 {
6154 tree cons = gimple_asm_label_op (asm_stmt, i);
6155 if (label_to_block (TREE_VALUE (cons)) == e->dest)
6156 {
6157 if (!label)
6158 label = gimple_block_label (dest);
6159 TREE_VALUE (cons) = label;
6160 }
6161 }
6162
6163 /* If we didn't find any label matching the former edge in the
6164 asm labels, we must be redirecting the fallthrough
6165 edge. */
6166 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
6167 }
6168 break;
6169
6170 case GIMPLE_RETURN:
6171 gsi_remove (&gsi, true);
6172 e->flags |= EDGE_FALLTHRU;
6173 break;
6174
6175 case GIMPLE_OMP_RETURN:
6176 case GIMPLE_OMP_CONTINUE:
6177 case GIMPLE_OMP_SECTIONS_SWITCH:
6178 case GIMPLE_OMP_FOR:
6179 /* The edges from OMP constructs can be simply redirected. */
6180 break;
6181
6182 case GIMPLE_EH_DISPATCH:
6183 if (!(e->flags & EDGE_FALLTHRU))
6184 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
6185 break;
6186
6187 case GIMPLE_TRANSACTION:
6188 if (e->flags & EDGE_TM_ABORT)
6189 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
6190 gimple_block_label (dest));
6191 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
6192 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
6193 gimple_block_label (dest));
6194 else
6195 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
6196 gimple_block_label (dest));
6197 break;
6198
6199 default:
6200 /* Otherwise it must be a fallthru edge, and we don't need to
6201 do anything besides redirecting it. */
6202 gcc_assert (e->flags & EDGE_FALLTHRU);
6203 break;
6204 }
6205
6206 /* Update/insert PHI nodes as necessary. */
6207
6208 /* Now update the edges in the CFG. */
6209 e = ssa_redirect_edge (e, dest);
6210
6211 return e;
6212 }
6213
6214 /* Returns true if it is possible to remove edge E by redirecting
6215 it to the destination of the other edge from E->src. */
6216
6217 static bool
gimple_can_remove_branch_p(const_edge e)6218 gimple_can_remove_branch_p (const_edge e)
6219 {
6220 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
6221 return false;
6222
6223 return true;
6224 }
6225
6226 /* Simple wrapper, as we can always redirect fallthru edges. */
6227
6228 static basic_block
gimple_redirect_edge_and_branch_force(edge e,basic_block dest)6229 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6230 {
6231 e = gimple_redirect_edge_and_branch (e, dest);
6232 gcc_assert (e);
6233
6234 return NULL;
6235 }
6236
6237
6238 /* Splits basic block BB after statement STMT (but at least after the
6239 labels). If STMT is NULL, BB is split just after the labels. */
6240
6241 static basic_block
gimple_split_block(basic_block bb,void * stmt)6242 gimple_split_block (basic_block bb, void *stmt)
6243 {
6244 gimple_stmt_iterator gsi;
6245 gimple_stmt_iterator gsi_tgt;
6246 gimple_seq list;
6247 basic_block new_bb;
6248 edge e;
6249 edge_iterator ei;
6250
6251 new_bb = create_empty_bb (bb);
6252
6253 /* Redirect the outgoing edges. */
6254 new_bb->succs = bb->succs;
6255 bb->succs = NULL;
6256 FOR_EACH_EDGE (e, ei, new_bb->succs)
6257 e->src = new_bb;
6258
6259 /* Get a stmt iterator pointing to the first stmt to move. */
6260 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6261 gsi = gsi_after_labels (bb);
6262 else
6263 {
6264 gsi = gsi_for_stmt ((gimple *) stmt);
6265 gsi_next (&gsi);
6266 }
6267
6268 /* Move everything from GSI to the new basic block. */
6269 if (gsi_end_p (gsi))
6270 return new_bb;
6271
6272 /* Split the statement list - avoid re-creating new containers as this
6273 brings ugly quadratic memory consumption in the inliner.
6274 (We are still quadratic since we need to update stmt BB pointers,
6275 sadly.) */
6276 gsi_split_seq_before (&gsi, &list);
6277 set_bb_seq (new_bb, list);
6278 for (gsi_tgt = gsi_start (list);
6279 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6280 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6281
6282 return new_bb;
6283 }
6284
6285
6286 /* Moves basic block BB after block AFTER. */
6287
6288 static bool
gimple_move_block_after(basic_block bb,basic_block after)6289 gimple_move_block_after (basic_block bb, basic_block after)
6290 {
6291 if (bb->prev_bb == after)
6292 return true;
6293
6294 unlink_block (bb);
6295 link_block (bb, after);
6296
6297 return true;
6298 }
6299
6300
6301 /* Return TRUE if block BB has no executable statements, otherwise return
6302 FALSE. */
6303
6304 static bool
gimple_empty_block_p(basic_block bb)6305 gimple_empty_block_p (basic_block bb)
6306 {
6307 /* BB must have no executable statements. */
6308 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6309 if (phi_nodes (bb))
6310 return false;
6311 if (gsi_end_p (gsi))
6312 return true;
6313 if (is_gimple_debug (gsi_stmt (gsi)))
6314 gsi_next_nondebug (&gsi);
6315 return gsi_end_p (gsi);
6316 }
6317
6318
6319 /* Split a basic block if it ends with a conditional branch and if the
6320 other part of the block is not empty. */
6321
6322 static basic_block
gimple_split_block_before_cond_jump(basic_block bb)6323 gimple_split_block_before_cond_jump (basic_block bb)
6324 {
6325 gimple *last, *split_point;
6326 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6327 if (gsi_end_p (gsi))
6328 return NULL;
6329 last = gsi_stmt (gsi);
6330 if (gimple_code (last) != GIMPLE_COND
6331 && gimple_code (last) != GIMPLE_SWITCH)
6332 return NULL;
6333 gsi_prev (&gsi);
6334 split_point = gsi_stmt (gsi);
6335 return split_block (bb, split_point)->dest;
6336 }
6337
6338
6339 /* Return true if basic_block can be duplicated. */
6340
6341 static bool
gimple_can_duplicate_bb_p(const_basic_block bb ATTRIBUTE_UNUSED)6342 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6343 {
6344 return true;
6345 }
6346
6347 /* Create a duplicate of the basic block BB. NOTE: This does not
6348 preserve SSA form. */
6349
6350 static basic_block
gimple_duplicate_bb(basic_block bb,copy_bb_data * id)6351 gimple_duplicate_bb (basic_block bb, copy_bb_data *id)
6352 {
6353 basic_block new_bb;
6354 gimple_stmt_iterator gsi_tgt;
6355
6356 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6357
6358 /* Copy the PHI nodes. We ignore PHI node arguments here because
6359 the incoming edges have not been setup yet. */
6360 for (gphi_iterator gpi = gsi_start_phis (bb);
6361 !gsi_end_p (gpi);
6362 gsi_next (&gpi))
6363 {
6364 gphi *phi, *copy;
6365 phi = gpi.phi ();
6366 copy = create_phi_node (NULL_TREE, new_bb);
6367 create_new_def_for (gimple_phi_result (phi), copy,
6368 gimple_phi_result_ptr (copy));
6369 gimple_set_uid (copy, gimple_uid (phi));
6370 }
6371
6372 gsi_tgt = gsi_start_bb (new_bb);
6373 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6374 !gsi_end_p (gsi);
6375 gsi_next (&gsi))
6376 {
6377 def_operand_p def_p;
6378 ssa_op_iter op_iter;
6379 tree lhs;
6380 gimple *stmt, *copy;
6381
6382 stmt = gsi_stmt (gsi);
6383 if (gimple_code (stmt) == GIMPLE_LABEL)
6384 continue;
6385
6386 /* Don't duplicate label debug stmts. */
6387 if (gimple_debug_bind_p (stmt)
6388 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6389 == LABEL_DECL)
6390 continue;
6391
6392 /* Create a new copy of STMT and duplicate STMT's virtual
6393 operands. */
6394 copy = gimple_copy (stmt);
6395 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6396
6397 maybe_duplicate_eh_stmt (copy, stmt);
6398 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6399
6400 /* When copying around a stmt writing into a local non-user
6401 aggregate, make sure it won't share stack slot with other
6402 vars. */
6403 lhs = gimple_get_lhs (stmt);
6404 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6405 {
6406 tree base = get_base_address (lhs);
6407 if (base
6408 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6409 && DECL_IGNORED_P (base)
6410 && !TREE_STATIC (base)
6411 && !DECL_EXTERNAL (base)
6412 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6413 DECL_NONSHAREABLE (base) = 1;
6414 }
6415
6416 /* If requested remap dependence info of cliques brought in
6417 via inlining. */
6418 if (id)
6419 for (unsigned i = 0; i < gimple_num_ops (copy); ++i)
6420 {
6421 tree op = gimple_op (copy, i);
6422 if (!op)
6423 continue;
6424 if (TREE_CODE (op) == ADDR_EXPR
6425 || TREE_CODE (op) == WITH_SIZE_EXPR)
6426 op = TREE_OPERAND (op, 0);
6427 while (handled_component_p (op))
6428 op = TREE_OPERAND (op, 0);
6429 if ((TREE_CODE (op) == MEM_REF
6430 || TREE_CODE (op) == TARGET_MEM_REF)
6431 && MR_DEPENDENCE_CLIQUE (op) > 1
6432 && MR_DEPENDENCE_CLIQUE (op) != bb->loop_father->owned_clique)
6433 {
6434 if (!id->dependence_map)
6435 id->dependence_map = new hash_map<dependence_hash,
6436 unsigned short>;
6437 bool existed;
6438 unsigned short &newc = id->dependence_map->get_or_insert
6439 (MR_DEPENDENCE_CLIQUE (op), &existed);
6440 if (!existed)
6441 {
6442 gcc_assert (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
6443 newc = ++cfun->last_clique;
6444 }
6445 MR_DEPENDENCE_CLIQUE (op) = newc;
6446 }
6447 }
6448
6449 /* Create new names for all the definitions created by COPY and
6450 add replacement mappings for each new name. */
6451 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6452 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6453 }
6454
6455 return new_bb;
6456 }
6457
6458 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6459
6460 static void
add_phi_args_after_copy_edge(edge e_copy)6461 add_phi_args_after_copy_edge (edge e_copy)
6462 {
6463 basic_block bb, bb_copy = e_copy->src, dest;
6464 edge e;
6465 edge_iterator ei;
6466 gphi *phi, *phi_copy;
6467 tree def;
6468 gphi_iterator psi, psi_copy;
6469
6470 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6471 return;
6472
6473 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6474
6475 if (e_copy->dest->flags & BB_DUPLICATED)
6476 dest = get_bb_original (e_copy->dest);
6477 else
6478 dest = e_copy->dest;
6479
6480 e = find_edge (bb, dest);
6481 if (!e)
6482 {
6483 /* During loop unrolling the target of the latch edge is copied.
6484 In this case we are not looking for edge to dest, but to
6485 duplicated block whose original was dest. */
6486 FOR_EACH_EDGE (e, ei, bb->succs)
6487 {
6488 if ((e->dest->flags & BB_DUPLICATED)
6489 && get_bb_original (e->dest) == dest)
6490 break;
6491 }
6492
6493 gcc_assert (e != NULL);
6494 }
6495
6496 for (psi = gsi_start_phis (e->dest),
6497 psi_copy = gsi_start_phis (e_copy->dest);
6498 !gsi_end_p (psi);
6499 gsi_next (&psi), gsi_next (&psi_copy))
6500 {
6501 phi = psi.phi ();
6502 phi_copy = psi_copy.phi ();
6503 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6504 add_phi_arg (phi_copy, def, e_copy,
6505 gimple_phi_arg_location_from_edge (phi, e));
6506 }
6507 }
6508
6509
6510 /* Basic block BB_COPY was created by code duplication. Add phi node
6511 arguments for edges going out of BB_COPY. The blocks that were
6512 duplicated have BB_DUPLICATED set. */
6513
6514 void
add_phi_args_after_copy_bb(basic_block bb_copy)6515 add_phi_args_after_copy_bb (basic_block bb_copy)
6516 {
6517 edge e_copy;
6518 edge_iterator ei;
6519
6520 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6521 {
6522 add_phi_args_after_copy_edge (e_copy);
6523 }
6524 }
6525
6526 /* Blocks in REGION_COPY array of length N_REGION were created by
6527 duplication of basic blocks. Add phi node arguments for edges
6528 going from these blocks. If E_COPY is not NULL, also add
6529 phi node arguments for its destination.*/
6530
6531 void
add_phi_args_after_copy(basic_block * region_copy,unsigned n_region,edge e_copy)6532 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6533 edge e_copy)
6534 {
6535 unsigned i;
6536
6537 for (i = 0; i < n_region; i++)
6538 region_copy[i]->flags |= BB_DUPLICATED;
6539
6540 for (i = 0; i < n_region; i++)
6541 add_phi_args_after_copy_bb (region_copy[i]);
6542 if (e_copy)
6543 add_phi_args_after_copy_edge (e_copy);
6544
6545 for (i = 0; i < n_region; i++)
6546 region_copy[i]->flags &= ~BB_DUPLICATED;
6547 }
6548
6549 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6550 important exit edge EXIT. By important we mean that no SSA name defined
6551 inside region is live over the other exit edges of the region. All entry
6552 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6553 to the duplicate of the region. Dominance and loop information is
6554 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6555 UPDATE_DOMINANCE is false then we assume that the caller will update the
6556 dominance information after calling this function. The new basic
6557 blocks are stored to REGION_COPY in the same order as they had in REGION,
6558 provided that REGION_COPY is not NULL.
6559 The function returns false if it is unable to copy the region,
6560 true otherwise. */
6561
6562 bool
gimple_duplicate_sese_region(edge entry,edge exit,basic_block * region,unsigned n_region,basic_block * region_copy,bool update_dominance)6563 gimple_duplicate_sese_region (edge entry, edge exit,
6564 basic_block *region, unsigned n_region,
6565 basic_block *region_copy,
6566 bool update_dominance)
6567 {
6568 unsigned i;
6569 bool free_region_copy = false, copying_header = false;
6570 struct loop *loop = entry->dest->loop_father;
6571 edge exit_copy;
6572 vec<basic_block> doms = vNULL;
6573 edge redirected;
6574 profile_count total_count = profile_count::uninitialized ();
6575 profile_count entry_count = profile_count::uninitialized ();
6576
6577 if (!can_copy_bbs_p (region, n_region))
6578 return false;
6579
6580 /* Some sanity checking. Note that we do not check for all possible
6581 missuses of the functions. I.e. if you ask to copy something weird,
6582 it will work, but the state of structures probably will not be
6583 correct. */
6584 for (i = 0; i < n_region; i++)
6585 {
6586 /* We do not handle subloops, i.e. all the blocks must belong to the
6587 same loop. */
6588 if (region[i]->loop_father != loop)
6589 return false;
6590
6591 if (region[i] != entry->dest
6592 && region[i] == loop->header)
6593 return false;
6594 }
6595
6596 /* In case the function is used for loop header copying (which is the primary
6597 use), ensure that EXIT and its copy will be new latch and entry edges. */
6598 if (loop->header == entry->dest)
6599 {
6600 copying_header = true;
6601
6602 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6603 return false;
6604
6605 for (i = 0; i < n_region; i++)
6606 if (region[i] != exit->src
6607 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6608 return false;
6609 }
6610
6611 initialize_original_copy_tables ();
6612
6613 if (copying_header)
6614 set_loop_copy (loop, loop_outer (loop));
6615 else
6616 set_loop_copy (loop, loop);
6617
6618 if (!region_copy)
6619 {
6620 region_copy = XNEWVEC (basic_block, n_region);
6621 free_region_copy = true;
6622 }
6623
6624 /* Record blocks outside the region that are dominated by something
6625 inside. */
6626 if (update_dominance)
6627 {
6628 doms.create (0);
6629 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6630 }
6631
6632 if (entry->dest->count.initialized_p ())
6633 {
6634 total_count = entry->dest->count;
6635 entry_count = entry->count ();
6636 /* Fix up corner cases, to avoid division by zero or creation of negative
6637 frequencies. */
6638 if (entry_count > total_count)
6639 entry_count = total_count;
6640 }
6641
6642 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6643 split_edge_bb_loc (entry), update_dominance);
6644 if (total_count.initialized_p () && entry_count.initialized_p ())
6645 {
6646 scale_bbs_frequencies_profile_count (region, n_region,
6647 total_count - entry_count,
6648 total_count);
6649 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6650 total_count);
6651 }
6652
6653 if (copying_header)
6654 {
6655 loop->header = exit->dest;
6656 loop->latch = exit->src;
6657 }
6658
6659 /* Redirect the entry and add the phi node arguments. */
6660 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6661 gcc_assert (redirected != NULL);
6662 flush_pending_stmts (entry);
6663
6664 /* Concerning updating of dominators: We must recount dominators
6665 for entry block and its copy. Anything that is outside of the
6666 region, but was dominated by something inside needs recounting as
6667 well. */
6668 if (update_dominance)
6669 {
6670 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6671 doms.safe_push (get_bb_original (entry->dest));
6672 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6673 doms.release ();
6674 }
6675
6676 /* Add the other PHI node arguments. */
6677 add_phi_args_after_copy (region_copy, n_region, NULL);
6678
6679 if (free_region_copy)
6680 free (region_copy);
6681
6682 free_original_copy_tables ();
6683 return true;
6684 }
6685
6686 /* Checks if BB is part of the region defined by N_REGION BBS. */
6687 static bool
bb_part_of_region_p(basic_block bb,basic_block * bbs,unsigned n_region)6688 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6689 {
6690 unsigned int n;
6691
6692 for (n = 0; n < n_region; n++)
6693 {
6694 if (bb == bbs[n])
6695 return true;
6696 }
6697 return false;
6698 }
6699
6700 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6701 are stored to REGION_COPY in the same order in that they appear
6702 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6703 the region, EXIT an exit from it. The condition guarding EXIT
6704 is moved to ENTRY. Returns true if duplication succeeds, false
6705 otherwise.
6706
6707 For example,
6708
6709 some_code;
6710 if (cond)
6711 A;
6712 else
6713 B;
6714
6715 is transformed to
6716
6717 if (cond)
6718 {
6719 some_code;
6720 A;
6721 }
6722 else
6723 {
6724 some_code;
6725 B;
6726 }
6727 */
6728
6729 bool
gimple_duplicate_sese_tail(edge entry,edge exit,basic_block * region,unsigned n_region,basic_block * region_copy)6730 gimple_duplicate_sese_tail (edge entry, edge exit,
6731 basic_block *region, unsigned n_region,
6732 basic_block *region_copy)
6733 {
6734 unsigned i;
6735 bool free_region_copy = false;
6736 struct loop *loop = exit->dest->loop_father;
6737 struct loop *orig_loop = entry->dest->loop_father;
6738 basic_block switch_bb, entry_bb, nentry_bb;
6739 vec<basic_block> doms;
6740 profile_count total_count = profile_count::uninitialized (),
6741 exit_count = profile_count::uninitialized ();
6742 edge exits[2], nexits[2], e;
6743 gimple_stmt_iterator gsi;
6744 gimple *cond_stmt;
6745 edge sorig, snew;
6746 basic_block exit_bb;
6747 gphi_iterator psi;
6748 gphi *phi;
6749 tree def;
6750 struct loop *target, *aloop, *cloop;
6751
6752 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6753 exits[0] = exit;
6754 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6755
6756 if (!can_copy_bbs_p (region, n_region))
6757 return false;
6758
6759 initialize_original_copy_tables ();
6760 set_loop_copy (orig_loop, loop);
6761
6762 target= loop;
6763 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6764 {
6765 if (bb_part_of_region_p (aloop->header, region, n_region))
6766 {
6767 cloop = duplicate_loop (aloop, target);
6768 duplicate_subloops (aloop, cloop);
6769 }
6770 }
6771
6772 if (!region_copy)
6773 {
6774 region_copy = XNEWVEC (basic_block, n_region);
6775 free_region_copy = true;
6776 }
6777
6778 gcc_assert (!need_ssa_update_p (cfun));
6779
6780 /* Record blocks outside the region that are dominated by something
6781 inside. */
6782 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6783
6784 total_count = exit->src->count;
6785 exit_count = exit->count ();
6786 /* Fix up corner cases, to avoid division by zero or creation of negative
6787 frequencies. */
6788 if (exit_count > total_count)
6789 exit_count = total_count;
6790
6791 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6792 split_edge_bb_loc (exit), true);
6793 if (total_count.initialized_p () && exit_count.initialized_p ())
6794 {
6795 scale_bbs_frequencies_profile_count (region, n_region,
6796 total_count - exit_count,
6797 total_count);
6798 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6799 total_count);
6800 }
6801
6802 /* Create the switch block, and put the exit condition to it. */
6803 entry_bb = entry->dest;
6804 nentry_bb = get_bb_copy (entry_bb);
6805 if (!last_stmt (entry->src)
6806 || !stmt_ends_bb_p (last_stmt (entry->src)))
6807 switch_bb = entry->src;
6808 else
6809 switch_bb = split_edge (entry);
6810 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6811
6812 gsi = gsi_last_bb (switch_bb);
6813 cond_stmt = last_stmt (exit->src);
6814 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6815 cond_stmt = gimple_copy (cond_stmt);
6816
6817 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6818
6819 sorig = single_succ_edge (switch_bb);
6820 sorig->flags = exits[1]->flags;
6821 sorig->probability = exits[1]->probability;
6822 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6823 snew->probability = exits[0]->probability;
6824
6825
6826 /* Register the new edge from SWITCH_BB in loop exit lists. */
6827 rescan_loop_exit (snew, true, false);
6828
6829 /* Add the PHI node arguments. */
6830 add_phi_args_after_copy (region_copy, n_region, snew);
6831
6832 /* Get rid of now superfluous conditions and associated edges (and phi node
6833 arguments). */
6834 exit_bb = exit->dest;
6835
6836 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6837 PENDING_STMT (e) = NULL;
6838
6839 /* The latch of ORIG_LOOP was copied, and so was the backedge
6840 to the original header. We redirect this backedge to EXIT_BB. */
6841 for (i = 0; i < n_region; i++)
6842 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6843 {
6844 gcc_assert (single_succ_edge (region_copy[i]));
6845 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6846 PENDING_STMT (e) = NULL;
6847 for (psi = gsi_start_phis (exit_bb);
6848 !gsi_end_p (psi);
6849 gsi_next (&psi))
6850 {
6851 phi = psi.phi ();
6852 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6853 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6854 }
6855 }
6856 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6857 PENDING_STMT (e) = NULL;
6858
6859 /* Anything that is outside of the region, but was dominated by something
6860 inside needs to update dominance info. */
6861 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6862 doms.release ();
6863 /* Update the SSA web. */
6864 update_ssa (TODO_update_ssa);
6865
6866 if (free_region_copy)
6867 free (region_copy);
6868
6869 free_original_copy_tables ();
6870 return true;
6871 }
6872
6873 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6874 adding blocks when the dominator traversal reaches EXIT. This
6875 function silently assumes that ENTRY strictly dominates EXIT. */
6876
6877 void
gather_blocks_in_sese_region(basic_block entry,basic_block exit,vec<basic_block> * bbs_p)6878 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6879 vec<basic_block> *bbs_p)
6880 {
6881 basic_block son;
6882
6883 for (son = first_dom_son (CDI_DOMINATORS, entry);
6884 son;
6885 son = next_dom_son (CDI_DOMINATORS, son))
6886 {
6887 bbs_p->safe_push (son);
6888 if (son != exit)
6889 gather_blocks_in_sese_region (son, exit, bbs_p);
6890 }
6891 }
6892
6893 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6894 The duplicates are recorded in VARS_MAP. */
6895
6896 static void
replace_by_duplicate_decl(tree * tp,hash_map<tree,tree> * vars_map,tree to_context)6897 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6898 tree to_context)
6899 {
6900 tree t = *tp, new_t;
6901 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6902
6903 if (DECL_CONTEXT (t) == to_context)
6904 return;
6905
6906 bool existed;
6907 tree &loc = vars_map->get_or_insert (t, &existed);
6908
6909 if (!existed)
6910 {
6911 if (SSA_VAR_P (t))
6912 {
6913 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6914 add_local_decl (f, new_t);
6915 }
6916 else
6917 {
6918 gcc_assert (TREE_CODE (t) == CONST_DECL);
6919 new_t = copy_node (t);
6920 }
6921 DECL_CONTEXT (new_t) = to_context;
6922
6923 loc = new_t;
6924 }
6925 else
6926 new_t = loc;
6927
6928 *tp = new_t;
6929 }
6930
6931
6932 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6933 VARS_MAP maps old ssa names and var_decls to the new ones. */
6934
6935 static tree
replace_ssa_name(tree name,hash_map<tree,tree> * vars_map,tree to_context)6936 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6937 tree to_context)
6938 {
6939 tree new_name;
6940
6941 gcc_assert (!virtual_operand_p (name));
6942
6943 tree *loc = vars_map->get (name);
6944
6945 if (!loc)
6946 {
6947 tree decl = SSA_NAME_VAR (name);
6948 if (decl)
6949 {
6950 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6951 replace_by_duplicate_decl (&decl, vars_map, to_context);
6952 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6953 decl, SSA_NAME_DEF_STMT (name));
6954 }
6955 else
6956 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6957 name, SSA_NAME_DEF_STMT (name));
6958
6959 /* Now that we've used the def stmt to define new_name, make sure it
6960 doesn't define name anymore. */
6961 SSA_NAME_DEF_STMT (name) = NULL;
6962
6963 vars_map->put (name, new_name);
6964 }
6965 else
6966 new_name = *loc;
6967
6968 return new_name;
6969 }
6970
6971 struct move_stmt_d
6972 {
6973 tree orig_block;
6974 tree new_block;
6975 tree from_context;
6976 tree to_context;
6977 hash_map<tree, tree> *vars_map;
6978 htab_t new_label_map;
6979 hash_map<void *, void *> *eh_map;
6980 bool remap_decls_p;
6981 };
6982
6983 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6984 contained in *TP if it has been ORIG_BLOCK previously and change the
6985 DECL_CONTEXT of every local variable referenced in *TP. */
6986
6987 static tree
move_stmt_op(tree * tp,int * walk_subtrees,void * data)6988 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6989 {
6990 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6991 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6992 tree t = *tp;
6993
6994 if (EXPR_P (t))
6995 {
6996 tree block = TREE_BLOCK (t);
6997 if (block == NULL_TREE)
6998 ;
6999 else if (block == p->orig_block
7000 || p->orig_block == NULL_TREE)
7001 {
7002 /* tree_node_can_be_shared says we can share invariant
7003 addresses but unshare_expr copies them anyways. Make sure
7004 to unshare before adjusting the block in place - we do not
7005 always see a copy here. */
7006 if (TREE_CODE (t) == ADDR_EXPR
7007 && is_gimple_min_invariant (t))
7008 *tp = t = unshare_expr (t);
7009 TREE_SET_BLOCK (t, p->new_block);
7010 }
7011 else if (flag_checking)
7012 {
7013 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
7014 block = BLOCK_SUPERCONTEXT (block);
7015 gcc_assert (block == p->orig_block);
7016 }
7017 }
7018 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
7019 {
7020 if (TREE_CODE (t) == SSA_NAME)
7021 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
7022 else if (TREE_CODE (t) == PARM_DECL
7023 && gimple_in_ssa_p (cfun))
7024 *tp = *(p->vars_map->get (t));
7025 else if (TREE_CODE (t) == LABEL_DECL)
7026 {
7027 if (p->new_label_map)
7028 {
7029 struct tree_map in, *out;
7030 in.base.from = t;
7031 out = (struct tree_map *)
7032 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
7033 if (out)
7034 *tp = t = out->to;
7035 }
7036
7037 /* For FORCED_LABELs we can end up with references from other
7038 functions if some SESE regions are outlined. It is UB to
7039 jump in between them, but they could be used just for printing
7040 addresses etc. In that case, DECL_CONTEXT on the label should
7041 be the function containing the glabel stmt with that LABEL_DECL,
7042 rather than whatever function a reference to the label was seen
7043 last time. */
7044 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
7045 DECL_CONTEXT (t) = p->to_context;
7046 }
7047 else if (p->remap_decls_p)
7048 {
7049 /* Replace T with its duplicate. T should no longer appear in the
7050 parent function, so this looks wasteful; however, it may appear
7051 in referenced_vars, and more importantly, as virtual operands of
7052 statements, and in alias lists of other variables. It would be
7053 quite difficult to expunge it from all those places. ??? It might
7054 suffice to do this for addressable variables. */
7055 if ((VAR_P (t) && !is_global_var (t))
7056 || TREE_CODE (t) == CONST_DECL)
7057 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
7058 }
7059 *walk_subtrees = 0;
7060 }
7061 else if (TYPE_P (t))
7062 *walk_subtrees = 0;
7063
7064 return NULL_TREE;
7065 }
7066
7067 /* Helper for move_stmt_r. Given an EH region number for the source
7068 function, map that to the duplicate EH regio number in the dest. */
7069
7070 static int
move_stmt_eh_region_nr(int old_nr,struct move_stmt_d * p)7071 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
7072 {
7073 eh_region old_r, new_r;
7074
7075 old_r = get_eh_region_from_number (old_nr);
7076 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
7077
7078 return new_r->index;
7079 }
7080
7081 /* Similar, but operate on INTEGER_CSTs. */
7082
7083 static tree
move_stmt_eh_region_tree_nr(tree old_t_nr,struct move_stmt_d * p)7084 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
7085 {
7086 int old_nr, new_nr;
7087
7088 old_nr = tree_to_shwi (old_t_nr);
7089 new_nr = move_stmt_eh_region_nr (old_nr, p);
7090
7091 return build_int_cst (integer_type_node, new_nr);
7092 }
7093
7094 /* Like move_stmt_op, but for gimple statements.
7095
7096 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
7097 contained in the current statement in *GSI_P and change the
7098 DECL_CONTEXT of every local variable referenced in the current
7099 statement. */
7100
7101 static tree
move_stmt_r(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)7102 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
7103 struct walk_stmt_info *wi)
7104 {
7105 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
7106 gimple *stmt = gsi_stmt (*gsi_p);
7107 tree block = gimple_block (stmt);
7108
7109 if (block == p->orig_block
7110 || (p->orig_block == NULL_TREE
7111 && block != NULL_TREE))
7112 gimple_set_block (stmt, p->new_block);
7113
7114 switch (gimple_code (stmt))
7115 {
7116 case GIMPLE_CALL:
7117 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
7118 {
7119 tree r, fndecl = gimple_call_fndecl (stmt);
7120 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
7121 switch (DECL_FUNCTION_CODE (fndecl))
7122 {
7123 case BUILT_IN_EH_COPY_VALUES:
7124 r = gimple_call_arg (stmt, 1);
7125 r = move_stmt_eh_region_tree_nr (r, p);
7126 gimple_call_set_arg (stmt, 1, r);
7127 /* FALLTHRU */
7128
7129 case BUILT_IN_EH_POINTER:
7130 case BUILT_IN_EH_FILTER:
7131 r = gimple_call_arg (stmt, 0);
7132 r = move_stmt_eh_region_tree_nr (r, p);
7133 gimple_call_set_arg (stmt, 0, r);
7134 break;
7135
7136 default:
7137 break;
7138 }
7139 }
7140 break;
7141
7142 case GIMPLE_RESX:
7143 {
7144 gresx *resx_stmt = as_a <gresx *> (stmt);
7145 int r = gimple_resx_region (resx_stmt);
7146 r = move_stmt_eh_region_nr (r, p);
7147 gimple_resx_set_region (resx_stmt, r);
7148 }
7149 break;
7150
7151 case GIMPLE_EH_DISPATCH:
7152 {
7153 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
7154 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
7155 r = move_stmt_eh_region_nr (r, p);
7156 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
7157 }
7158 break;
7159
7160 case GIMPLE_OMP_RETURN:
7161 case GIMPLE_OMP_CONTINUE:
7162 break;
7163
7164 case GIMPLE_LABEL:
7165 {
7166 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
7167 so that such labels can be referenced from other regions.
7168 Make sure to update it when seeing a GIMPLE_LABEL though,
7169 that is the owner of the label. */
7170 walk_gimple_op (stmt, move_stmt_op, wi);
7171 *handled_ops_p = true;
7172 tree label = gimple_label_label (as_a <glabel *> (stmt));
7173 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
7174 DECL_CONTEXT (label) = p->to_context;
7175 }
7176 break;
7177
7178 default:
7179 if (is_gimple_omp (stmt))
7180 {
7181 /* Do not remap variables inside OMP directives. Variables
7182 referenced in clauses and directive header belong to the
7183 parent function and should not be moved into the child
7184 function. */
7185 bool save_remap_decls_p = p->remap_decls_p;
7186 p->remap_decls_p = false;
7187 *handled_ops_p = true;
7188
7189 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
7190 move_stmt_op, wi);
7191
7192 p->remap_decls_p = save_remap_decls_p;
7193 }
7194 break;
7195 }
7196
7197 return NULL_TREE;
7198 }
7199
7200 /* Move basic block BB from function CFUN to function DEST_FN. The
7201 block is moved out of the original linked list and placed after
7202 block AFTER in the new list. Also, the block is removed from the
7203 original array of blocks and placed in DEST_FN's array of blocks.
7204 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7205 updated to reflect the moved edges.
7206
7207 The local variables are remapped to new instances, VARS_MAP is used
7208 to record the mapping. */
7209
7210 static void
move_block_to_fn(struct function * dest_cfun,basic_block bb,basic_block after,bool update_edge_count_p,struct move_stmt_d * d)7211 move_block_to_fn (struct function *dest_cfun, basic_block bb,
7212 basic_block after, bool update_edge_count_p,
7213 struct move_stmt_d *d)
7214 {
7215 struct control_flow_graph *cfg;
7216 edge_iterator ei;
7217 edge e;
7218 gimple_stmt_iterator si;
7219 unsigned old_len, new_len;
7220
7221 /* Remove BB from dominance structures. */
7222 delete_from_dominance_info (CDI_DOMINATORS, bb);
7223
7224 /* Move BB from its current loop to the copy in the new function. */
7225 if (current_loops)
7226 {
7227 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
7228 if (new_loop)
7229 bb->loop_father = new_loop;
7230 }
7231
7232 /* Link BB to the new linked list. */
7233 move_block_after (bb, after);
7234
7235 /* Update the edge count in the corresponding flowgraphs. */
7236 if (update_edge_count_p)
7237 FOR_EACH_EDGE (e, ei, bb->succs)
7238 {
7239 cfun->cfg->x_n_edges--;
7240 dest_cfun->cfg->x_n_edges++;
7241 }
7242
7243 /* Remove BB from the original basic block array. */
7244 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7245 cfun->cfg->x_n_basic_blocks--;
7246
7247 /* Grow DEST_CFUN's basic block array if needed. */
7248 cfg = dest_cfun->cfg;
7249 cfg->x_n_basic_blocks++;
7250 if (bb->index >= cfg->x_last_basic_block)
7251 cfg->x_last_basic_block = bb->index + 1;
7252
7253 old_len = vec_safe_length (cfg->x_basic_block_info);
7254 if ((unsigned) cfg->x_last_basic_block >= old_len)
7255 {
7256 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
7257 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
7258 }
7259
7260 (*cfg->x_basic_block_info)[bb->index] = bb;
7261
7262 /* Remap the variables in phi nodes. */
7263 for (gphi_iterator psi = gsi_start_phis (bb);
7264 !gsi_end_p (psi); )
7265 {
7266 gphi *phi = psi.phi ();
7267 use_operand_p use;
7268 tree op = PHI_RESULT (phi);
7269 ssa_op_iter oi;
7270 unsigned i;
7271
7272 if (virtual_operand_p (op))
7273 {
7274 /* Remove the phi nodes for virtual operands (alias analysis will be
7275 run for the new function, anyway). But replace all uses that
7276 might be outside of the region we move. */
7277 use_operand_p use_p;
7278 imm_use_iterator iter;
7279 gimple *use_stmt;
7280 FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
7281 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7282 SET_USE (use_p, SSA_NAME_VAR (op));
7283 remove_phi_node (&psi, true);
7284 continue;
7285 }
7286
7287 SET_PHI_RESULT (phi,
7288 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7289 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7290 {
7291 op = USE_FROM_PTR (use);
7292 if (TREE_CODE (op) == SSA_NAME)
7293 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7294 }
7295
7296 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7297 {
7298 location_t locus = gimple_phi_arg_location (phi, i);
7299 tree block = LOCATION_BLOCK (locus);
7300
7301 if (locus == UNKNOWN_LOCATION)
7302 continue;
7303 if (d->orig_block == NULL_TREE || block == d->orig_block)
7304 {
7305 locus = set_block (locus, d->new_block);
7306 gimple_phi_arg_set_location (phi, i, locus);
7307 }
7308 }
7309
7310 gsi_next (&psi);
7311 }
7312
7313 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7314 {
7315 gimple *stmt = gsi_stmt (si);
7316 struct walk_stmt_info wi;
7317
7318 memset (&wi, 0, sizeof (wi));
7319 wi.info = d;
7320 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7321
7322 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7323 {
7324 tree label = gimple_label_label (label_stmt);
7325 int uid = LABEL_DECL_UID (label);
7326
7327 gcc_assert (uid > -1);
7328
7329 old_len = vec_safe_length (cfg->x_label_to_block_map);
7330 if (old_len <= (unsigned) uid)
7331 {
7332 new_len = 3 * uid / 2 + 1;
7333 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
7334 }
7335
7336 (*cfg->x_label_to_block_map)[uid] = bb;
7337 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7338
7339 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7340
7341 if (uid >= dest_cfun->cfg->last_label_uid)
7342 dest_cfun->cfg->last_label_uid = uid + 1;
7343 }
7344
7345 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7346 remove_stmt_from_eh_lp_fn (cfun, stmt);
7347
7348 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7349 gimple_remove_stmt_histograms (cfun, stmt);
7350
7351 /* We cannot leave any operands allocated from the operand caches of
7352 the current function. */
7353 free_stmt_operands (cfun, stmt);
7354 push_cfun (dest_cfun);
7355 update_stmt (stmt);
7356 if (is_gimple_call (stmt))
7357 notice_special_calls (as_a <gcall *> (stmt));
7358 pop_cfun ();
7359 }
7360
7361 FOR_EACH_EDGE (e, ei, bb->succs)
7362 if (e->goto_locus != UNKNOWN_LOCATION)
7363 {
7364 tree block = LOCATION_BLOCK (e->goto_locus);
7365 if (d->orig_block == NULL_TREE
7366 || block == d->orig_block)
7367 e->goto_locus = set_block (e->goto_locus, d->new_block);
7368 }
7369 }
7370
7371 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7372 the outermost EH region. Use REGION as the incoming base EH region.
7373 If there is no single outermost region, return NULL and set *ALL to
7374 true. */
7375
7376 static eh_region
find_outermost_region_in_block(struct function * src_cfun,basic_block bb,eh_region region,bool * all)7377 find_outermost_region_in_block (struct function *src_cfun,
7378 basic_block bb, eh_region region,
7379 bool *all)
7380 {
7381 gimple_stmt_iterator si;
7382
7383 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7384 {
7385 gimple *stmt = gsi_stmt (si);
7386 eh_region stmt_region;
7387 int lp_nr;
7388
7389 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7390 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7391 if (stmt_region)
7392 {
7393 if (region == NULL)
7394 region = stmt_region;
7395 else if (stmt_region != region)
7396 {
7397 region = eh_region_outermost (src_cfun, stmt_region, region);
7398 if (region == NULL)
7399 {
7400 *all = true;
7401 return NULL;
7402 }
7403 }
7404 }
7405 }
7406
7407 return region;
7408 }
7409
7410 static tree
new_label_mapper(tree decl,void * data)7411 new_label_mapper (tree decl, void *data)
7412 {
7413 htab_t hash = (htab_t) data;
7414 struct tree_map *m;
7415 void **slot;
7416
7417 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7418
7419 m = XNEW (struct tree_map);
7420 m->hash = DECL_UID (decl);
7421 m->base.from = decl;
7422 m->to = create_artificial_label (UNKNOWN_LOCATION);
7423 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7424 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7425 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7426
7427 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7428 gcc_assert (*slot == NULL);
7429
7430 *slot = m;
7431
7432 return m->to;
7433 }
7434
7435 /* Tree walker to replace the decls used inside value expressions by
7436 duplicates. */
7437
7438 static tree
replace_block_vars_by_duplicates_1(tree * tp,int * walk_subtrees,void * data)7439 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7440 {
7441 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7442
7443 switch (TREE_CODE (*tp))
7444 {
7445 case VAR_DECL:
7446 case PARM_DECL:
7447 case RESULT_DECL:
7448 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7449 break;
7450 default:
7451 break;
7452 }
7453
7454 if (IS_TYPE_OR_DECL_P (*tp))
7455 *walk_subtrees = false;
7456
7457 return NULL;
7458 }
7459
7460 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7461 subblocks. */
7462
7463 static void
replace_block_vars_by_duplicates(tree block,hash_map<tree,tree> * vars_map,tree to_context)7464 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7465 tree to_context)
7466 {
7467 tree *tp, t;
7468
7469 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7470 {
7471 t = *tp;
7472 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7473 continue;
7474 replace_by_duplicate_decl (&t, vars_map, to_context);
7475 if (t != *tp)
7476 {
7477 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7478 {
7479 tree x = DECL_VALUE_EXPR (*tp);
7480 struct replace_decls_d rd = { vars_map, to_context };
7481 unshare_expr (x);
7482 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7483 SET_DECL_VALUE_EXPR (t, x);
7484 DECL_HAS_VALUE_EXPR_P (t) = 1;
7485 }
7486 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7487 *tp = t;
7488 }
7489 }
7490
7491 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7492 replace_block_vars_by_duplicates (block, vars_map, to_context);
7493 }
7494
7495 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7496 from FN1 to FN2. */
7497
7498 static void
fixup_loop_arrays_after_move(struct function * fn1,struct function * fn2,struct loop * loop)7499 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7500 struct loop *loop)
7501 {
7502 /* Discard it from the old loop array. */
7503 (*get_loops (fn1))[loop->num] = NULL;
7504
7505 /* Place it in the new loop array, assigning it a new number. */
7506 loop->num = number_of_loops (fn2);
7507 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7508
7509 /* Recurse to children. */
7510 for (loop = loop->inner; loop; loop = loop->next)
7511 fixup_loop_arrays_after_move (fn1, fn2, loop);
7512 }
7513
7514 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7515 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7516
7517 DEBUG_FUNCTION void
verify_sese(basic_block entry,basic_block exit,vec<basic_block> * bbs_p)7518 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7519 {
7520 basic_block bb;
7521 edge_iterator ei;
7522 edge e;
7523 bitmap bbs = BITMAP_ALLOC (NULL);
7524 int i;
7525
7526 gcc_assert (entry != NULL);
7527 gcc_assert (entry != exit);
7528 gcc_assert (bbs_p != NULL);
7529
7530 gcc_assert (bbs_p->length () > 0);
7531
7532 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7533 bitmap_set_bit (bbs, bb->index);
7534
7535 gcc_assert (bitmap_bit_p (bbs, entry->index));
7536 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7537
7538 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7539 {
7540 if (bb == entry)
7541 {
7542 gcc_assert (single_pred_p (entry));
7543 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7544 }
7545 else
7546 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7547 {
7548 e = ei_edge (ei);
7549 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7550 }
7551
7552 if (bb == exit)
7553 {
7554 gcc_assert (single_succ_p (exit));
7555 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7556 }
7557 else
7558 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7559 {
7560 e = ei_edge (ei);
7561 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7562 }
7563 }
7564
7565 BITMAP_FREE (bbs);
7566 }
7567
7568 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7569
7570 bool
gather_ssa_name_hash_map_from(tree const & from,tree const &,void * data)7571 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7572 {
7573 bitmap release_names = (bitmap)data;
7574
7575 if (TREE_CODE (from) != SSA_NAME)
7576 return true;
7577
7578 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7579 return true;
7580 }
7581
7582 /* Return LOOP_DIST_ALIAS call if present in BB. */
7583
7584 static gimple *
find_loop_dist_alias(basic_block bb)7585 find_loop_dist_alias (basic_block bb)
7586 {
7587 gimple *g = last_stmt (bb);
7588 if (g == NULL || gimple_code (g) != GIMPLE_COND)
7589 return NULL;
7590
7591 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7592 gsi_prev (&gsi);
7593 if (gsi_end_p (gsi))
7594 return NULL;
7595
7596 g = gsi_stmt (gsi);
7597 if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7598 return g;
7599 return NULL;
7600 }
7601
7602 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7603 to VALUE and update any immediate uses of it's LHS. */
7604
7605 void
fold_loop_internal_call(gimple * g,tree value)7606 fold_loop_internal_call (gimple *g, tree value)
7607 {
7608 tree lhs = gimple_call_lhs (g);
7609 use_operand_p use_p;
7610 imm_use_iterator iter;
7611 gimple *use_stmt;
7612 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7613
7614 update_call_from_tree (&gsi, value);
7615 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7616 {
7617 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7618 SET_USE (use_p, value);
7619 update_stmt (use_stmt);
7620 }
7621 }
7622
7623 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7624 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7625 single basic block in the original CFG and the new basic block is
7626 returned. DEST_CFUN must not have a CFG yet.
7627
7628 Note that the region need not be a pure SESE region. Blocks inside
7629 the region may contain calls to abort/exit. The only restriction
7630 is that ENTRY_BB should be the only entry point and it must
7631 dominate EXIT_BB.
7632
7633 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7634 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7635 to the new function.
7636
7637 All local variables referenced in the region are assumed to be in
7638 the corresponding BLOCK_VARS and unexpanded variable lists
7639 associated with DEST_CFUN.
7640
7641 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7642 reimplement move_sese_region_to_fn by duplicating the region rather than
7643 moving it. */
7644
7645 basic_block
move_sese_region_to_fn(struct function * dest_cfun,basic_block entry_bb,basic_block exit_bb,tree orig_block)7646 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7647 basic_block exit_bb, tree orig_block)
7648 {
7649 vec<basic_block> bbs, dom_bbs;
7650 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7651 basic_block after, bb, *entry_pred, *exit_succ, abb;
7652 struct function *saved_cfun = cfun;
7653 int *entry_flag, *exit_flag;
7654 profile_probability *entry_prob, *exit_prob;
7655 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7656 edge e;
7657 edge_iterator ei;
7658 htab_t new_label_map;
7659 hash_map<void *, void *> *eh_map;
7660 struct loop *loop = entry_bb->loop_father;
7661 struct loop *loop0 = get_loop (saved_cfun, 0);
7662 struct move_stmt_d d;
7663
7664 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7665 region. */
7666 gcc_assert (entry_bb != exit_bb
7667 && (!exit_bb
7668 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7669
7670 /* Collect all the blocks in the region. Manually add ENTRY_BB
7671 because it won't be added by dfs_enumerate_from. */
7672 bbs.create (0);
7673 bbs.safe_push (entry_bb);
7674 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7675
7676 if (flag_checking)
7677 verify_sese (entry_bb, exit_bb, &bbs);
7678
7679 /* The blocks that used to be dominated by something in BBS will now be
7680 dominated by the new block. */
7681 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7682 bbs.address (),
7683 bbs.length ());
7684
7685 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7686 the predecessor edges to ENTRY_BB and the successor edges to
7687 EXIT_BB so that we can re-attach them to the new basic block that
7688 will replace the region. */
7689 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7690 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7691 entry_flag = XNEWVEC (int, num_entry_edges);
7692 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7693 i = 0;
7694 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7695 {
7696 entry_prob[i] = e->probability;
7697 entry_flag[i] = e->flags;
7698 entry_pred[i++] = e->src;
7699 remove_edge (e);
7700 }
7701
7702 if (exit_bb)
7703 {
7704 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7705 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7706 exit_flag = XNEWVEC (int, num_exit_edges);
7707 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7708 i = 0;
7709 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7710 {
7711 exit_prob[i] = e->probability;
7712 exit_flag[i] = e->flags;
7713 exit_succ[i++] = e->dest;
7714 remove_edge (e);
7715 }
7716 }
7717 else
7718 {
7719 num_exit_edges = 0;
7720 exit_succ = NULL;
7721 exit_flag = NULL;
7722 exit_prob = NULL;
7723 }
7724
7725 /* Switch context to the child function to initialize DEST_FN's CFG. */
7726 gcc_assert (dest_cfun->cfg == NULL);
7727 push_cfun (dest_cfun);
7728
7729 init_empty_tree_cfg ();
7730
7731 /* Initialize EH information for the new function. */
7732 eh_map = NULL;
7733 new_label_map = NULL;
7734 if (saved_cfun->eh)
7735 {
7736 eh_region region = NULL;
7737 bool all = false;
7738
7739 FOR_EACH_VEC_ELT (bbs, i, bb)
7740 {
7741 region = find_outermost_region_in_block (saved_cfun, bb, region, &all);
7742 if (all)
7743 break;
7744 }
7745
7746 init_eh_for_function ();
7747 if (region != NULL || all)
7748 {
7749 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7750 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7751 new_label_mapper, new_label_map);
7752 }
7753 }
7754
7755 /* Initialize an empty loop tree. */
7756 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7757 init_loops_structure (dest_cfun, loops, 1);
7758 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7759 set_loops_for_fn (dest_cfun, loops);
7760
7761 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7762
7763 /* Move the outlined loop tree part. */
7764 num_nodes = bbs.length ();
7765 FOR_EACH_VEC_ELT (bbs, i, bb)
7766 {
7767 if (bb->loop_father->header == bb)
7768 {
7769 struct loop *this_loop = bb->loop_father;
7770 struct loop *outer = loop_outer (this_loop);
7771 if (outer == loop
7772 /* If the SESE region contains some bbs ending with
7773 a noreturn call, those are considered to belong
7774 to the outermost loop in saved_cfun, rather than
7775 the entry_bb's loop_father. */
7776 || outer == loop0)
7777 {
7778 if (outer != loop)
7779 num_nodes -= this_loop->num_nodes;
7780 flow_loop_tree_node_remove (bb->loop_father);
7781 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7782 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7783 }
7784 }
7785 else if (bb->loop_father == loop0 && loop0 != loop)
7786 num_nodes--;
7787
7788 /* Remove loop exits from the outlined region. */
7789 if (loops_for_fn (saved_cfun)->exits)
7790 FOR_EACH_EDGE (e, ei, bb->succs)
7791 {
7792 struct loops *l = loops_for_fn (saved_cfun);
7793 loop_exit **slot
7794 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7795 NO_INSERT);
7796 if (slot)
7797 l->exits->clear_slot (slot);
7798 }
7799 }
7800
7801 /* Adjust the number of blocks in the tree root of the outlined part. */
7802 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7803
7804 /* Setup a mapping to be used by move_block_to_fn. */
7805 loop->aux = current_loops->tree_root;
7806 loop0->aux = current_loops->tree_root;
7807
7808 /* Fix up orig_loop_num. If the block referenced in it has been moved
7809 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7810 struct loop *dloop;
7811 signed char *moved_orig_loop_num = NULL;
7812 FOR_EACH_LOOP_FN (dest_cfun, dloop, 0)
7813 if (dloop->orig_loop_num)
7814 {
7815 if (moved_orig_loop_num == NULL)
7816 moved_orig_loop_num
7817 = XCNEWVEC (signed char, vec_safe_length (larray));
7818 if ((*larray)[dloop->orig_loop_num] != NULL
7819 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7820 {
7821 if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
7822 && moved_orig_loop_num[dloop->orig_loop_num] < 2)
7823 moved_orig_loop_num[dloop->orig_loop_num]++;
7824 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7825 }
7826 else
7827 {
7828 moved_orig_loop_num[dloop->orig_loop_num] = -1;
7829 dloop->orig_loop_num = 0;
7830 }
7831 }
7832 pop_cfun ();
7833
7834 if (moved_orig_loop_num)
7835 {
7836 FOR_EACH_VEC_ELT (bbs, i, bb)
7837 {
7838 gimple *g = find_loop_dist_alias (bb);
7839 if (g == NULL)
7840 continue;
7841
7842 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7843 gcc_assert (orig_loop_num
7844 && (unsigned) orig_loop_num < vec_safe_length (larray));
7845 if (moved_orig_loop_num[orig_loop_num] == 2)
7846 {
7847 /* If we have moved both loops with this orig_loop_num into
7848 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7849 too, update the first argument. */
7850 gcc_assert ((*larray)[dloop->orig_loop_num] != NULL
7851 && (get_loop (saved_cfun, dloop->orig_loop_num)
7852 == NULL));
7853 tree t = build_int_cst (integer_type_node,
7854 (*larray)[dloop->orig_loop_num]->num);
7855 gimple_call_set_arg (g, 0, t);
7856 update_stmt (g);
7857 /* Make sure the following loop will not update it. */
7858 moved_orig_loop_num[orig_loop_num] = 0;
7859 }
7860 else
7861 /* Otherwise at least one of the loops stayed in saved_cfun.
7862 Remove the LOOP_DIST_ALIAS call. */
7863 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7864 }
7865 FOR_EACH_BB_FN (bb, saved_cfun)
7866 {
7867 gimple *g = find_loop_dist_alias (bb);
7868 if (g == NULL)
7869 continue;
7870 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7871 gcc_assert (orig_loop_num
7872 && (unsigned) orig_loop_num < vec_safe_length (larray));
7873 if (moved_orig_loop_num[orig_loop_num])
7874 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7875 of the corresponding loops was moved, remove it. */
7876 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7877 }
7878 XDELETEVEC (moved_orig_loop_num);
7879 }
7880 ggc_free (larray);
7881
7882 /* Move blocks from BBS into DEST_CFUN. */
7883 gcc_assert (bbs.length () >= 2);
7884 after = dest_cfun->cfg->x_entry_block_ptr;
7885 hash_map<tree, tree> vars_map;
7886
7887 memset (&d, 0, sizeof (d));
7888 d.orig_block = orig_block;
7889 d.new_block = DECL_INITIAL (dest_cfun->decl);
7890 d.from_context = cfun->decl;
7891 d.to_context = dest_cfun->decl;
7892 d.vars_map = &vars_map;
7893 d.new_label_map = new_label_map;
7894 d.eh_map = eh_map;
7895 d.remap_decls_p = true;
7896
7897 if (gimple_in_ssa_p (cfun))
7898 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7899 {
7900 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7901 set_ssa_default_def (dest_cfun, arg, narg);
7902 vars_map.put (arg, narg);
7903 }
7904
7905 FOR_EACH_VEC_ELT (bbs, i, bb)
7906 {
7907 /* No need to update edge counts on the last block. It has
7908 already been updated earlier when we detached the region from
7909 the original CFG. */
7910 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7911 after = bb;
7912 }
7913
7914 loop->aux = NULL;
7915 loop0->aux = NULL;
7916 /* Loop sizes are no longer correct, fix them up. */
7917 loop->num_nodes -= num_nodes;
7918 for (struct loop *outer = loop_outer (loop);
7919 outer; outer = loop_outer (outer))
7920 outer->num_nodes -= num_nodes;
7921 loop0->num_nodes -= bbs.length () - num_nodes;
7922
7923 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7924 {
7925 struct loop *aloop;
7926 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7927 if (aloop != NULL)
7928 {
7929 if (aloop->simduid)
7930 {
7931 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7932 d.to_context);
7933 dest_cfun->has_simduid_loops = true;
7934 }
7935 if (aloop->force_vectorize)
7936 dest_cfun->has_force_vectorize_loops = true;
7937 }
7938 }
7939
7940 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7941 if (orig_block)
7942 {
7943 tree block;
7944 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7945 == NULL_TREE);
7946 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7947 = BLOCK_SUBBLOCKS (orig_block);
7948 for (block = BLOCK_SUBBLOCKS (orig_block);
7949 block; block = BLOCK_CHAIN (block))
7950 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7951 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7952 }
7953
7954 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7955 &vars_map, dest_cfun->decl);
7956
7957 if (new_label_map)
7958 htab_delete (new_label_map);
7959 if (eh_map)
7960 delete eh_map;
7961
7962 if (gimple_in_ssa_p (cfun))
7963 {
7964 /* We need to release ssa-names in a defined order, so first find them,
7965 and then iterate in ascending version order. */
7966 bitmap release_names = BITMAP_ALLOC (NULL);
7967 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7968 bitmap_iterator bi;
7969 unsigned i;
7970 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7971 release_ssa_name (ssa_name (i));
7972 BITMAP_FREE (release_names);
7973 }
7974
7975 /* Rewire the entry and exit blocks. The successor to the entry
7976 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7977 the child function. Similarly, the predecessor of DEST_FN's
7978 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7979 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7980 various CFG manipulation function get to the right CFG.
7981
7982 FIXME, this is silly. The CFG ought to become a parameter to
7983 these helpers. */
7984 push_cfun (dest_cfun);
7985 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
7986 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7987 if (exit_bb)
7988 {
7989 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7990 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
7991 }
7992 else
7993 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
7994 pop_cfun ();
7995
7996 /* Back in the original function, the SESE region has disappeared,
7997 create a new basic block in its place. */
7998 bb = create_empty_bb (entry_pred[0]);
7999 if (current_loops)
8000 add_bb_to_loop (bb, loop);
8001 for (i = 0; i < num_entry_edges; i++)
8002 {
8003 e = make_edge (entry_pred[i], bb, entry_flag[i]);
8004 e->probability = entry_prob[i];
8005 }
8006
8007 for (i = 0; i < num_exit_edges; i++)
8008 {
8009 e = make_edge (bb, exit_succ[i], exit_flag[i]);
8010 e->probability = exit_prob[i];
8011 }
8012
8013 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
8014 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
8015 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
8016 dom_bbs.release ();
8017
8018 if (exit_bb)
8019 {
8020 free (exit_prob);
8021 free (exit_flag);
8022 free (exit_succ);
8023 }
8024 free (entry_prob);
8025 free (entry_flag);
8026 free (entry_pred);
8027 bbs.release ();
8028
8029 return bb;
8030 }
8031
8032 /* Dump default def DEF to file FILE using FLAGS and indentation
8033 SPC. */
8034
8035 static void
dump_default_def(FILE * file,tree def,int spc,dump_flags_t flags)8036 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
8037 {
8038 for (int i = 0; i < spc; ++i)
8039 fprintf (file, " ");
8040 dump_ssaname_info_to_file (file, def, spc);
8041
8042 print_generic_expr (file, TREE_TYPE (def), flags);
8043 fprintf (file, " ");
8044 print_generic_expr (file, def, flags);
8045 fprintf (file, " = ");
8046 print_generic_expr (file, SSA_NAME_VAR (def), flags);
8047 fprintf (file, ";\n");
8048 }
8049
8050 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
8051
8052 static void
print_no_sanitize_attr_value(FILE * file,tree value)8053 print_no_sanitize_attr_value (FILE *file, tree value)
8054 {
8055 unsigned int flags = tree_to_uhwi (value);
8056 bool first = true;
8057 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
8058 {
8059 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
8060 {
8061 if (!first)
8062 fprintf (file, " | ");
8063 fprintf (file, "%s", sanitizer_opts[i].name);
8064 first = false;
8065 }
8066 }
8067 }
8068
8069 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
8070 */
8071
8072 void
dump_function_to_file(tree fndecl,FILE * file,dump_flags_t flags)8073 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
8074 {
8075 tree arg, var, old_current_fndecl = current_function_decl;
8076 struct function *dsf;
8077 bool ignore_topmost_bind = false, any_var = false;
8078 basic_block bb;
8079 tree chain;
8080 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
8081 && decl_is_tm_clone (fndecl));
8082 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
8083
8084 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
8085 {
8086 fprintf (file, "__attribute__((");
8087
8088 bool first = true;
8089 tree chain;
8090 for (chain = DECL_ATTRIBUTES (fndecl); chain;
8091 first = false, chain = TREE_CHAIN (chain))
8092 {
8093 if (!first)
8094 fprintf (file, ", ");
8095
8096 tree name = get_attribute_name (chain);
8097 print_generic_expr (file, name, dump_flags);
8098 if (TREE_VALUE (chain) != NULL_TREE)
8099 {
8100 fprintf (file, " (");
8101
8102 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
8103 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
8104 else
8105 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
8106 fprintf (file, ")");
8107 }
8108 }
8109
8110 fprintf (file, "))\n");
8111 }
8112
8113 current_function_decl = fndecl;
8114 if (flags & TDF_GIMPLE)
8115 {
8116 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
8117 dump_flags | TDF_SLIM);
8118 fprintf (file, " __GIMPLE ()\n%s (", function_name (fun));
8119 }
8120 else
8121 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
8122
8123 arg = DECL_ARGUMENTS (fndecl);
8124 while (arg)
8125 {
8126 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
8127 fprintf (file, " ");
8128 print_generic_expr (file, arg, dump_flags);
8129 if (DECL_CHAIN (arg))
8130 fprintf (file, ", ");
8131 arg = DECL_CHAIN (arg);
8132 }
8133 fprintf (file, ")\n");
8134
8135 dsf = DECL_STRUCT_FUNCTION (fndecl);
8136 if (dsf && (flags & TDF_EH))
8137 dump_eh_tree (file, dsf);
8138
8139 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
8140 {
8141 dump_node (fndecl, TDF_SLIM | flags, file);
8142 current_function_decl = old_current_fndecl;
8143 return;
8144 }
8145
8146 /* When GIMPLE is lowered, the variables are no longer available in
8147 BIND_EXPRs, so display them separately. */
8148 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
8149 {
8150 unsigned ix;
8151 ignore_topmost_bind = true;
8152
8153 fprintf (file, "{\n");
8154 if (gimple_in_ssa_p (fun)
8155 && (flags & TDF_ALIAS))
8156 {
8157 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
8158 arg = DECL_CHAIN (arg))
8159 {
8160 tree def = ssa_default_def (fun, arg);
8161 if (def)
8162 dump_default_def (file, def, 2, flags);
8163 }
8164
8165 tree res = DECL_RESULT (fun->decl);
8166 if (res != NULL_TREE
8167 && DECL_BY_REFERENCE (res))
8168 {
8169 tree def = ssa_default_def (fun, res);
8170 if (def)
8171 dump_default_def (file, def, 2, flags);
8172 }
8173
8174 tree static_chain = fun->static_chain_decl;
8175 if (static_chain != NULL_TREE)
8176 {
8177 tree def = ssa_default_def (fun, static_chain);
8178 if (def)
8179 dump_default_def (file, def, 2, flags);
8180 }
8181 }
8182
8183 if (!vec_safe_is_empty (fun->local_decls))
8184 FOR_EACH_LOCAL_DECL (fun, ix, var)
8185 {
8186 print_generic_decl (file, var, flags);
8187 fprintf (file, "\n");
8188
8189 any_var = true;
8190 }
8191
8192 tree name;
8193
8194 if (gimple_in_ssa_p (cfun))
8195 FOR_EACH_SSA_NAME (ix, name, cfun)
8196 {
8197 if (!SSA_NAME_VAR (name))
8198 {
8199 fprintf (file, " ");
8200 print_generic_expr (file, TREE_TYPE (name), flags);
8201 fprintf (file, " ");
8202 print_generic_expr (file, name, flags);
8203 fprintf (file, ";\n");
8204
8205 any_var = true;
8206 }
8207 }
8208 }
8209
8210 if (fun && fun->decl == fndecl
8211 && fun->cfg
8212 && basic_block_info_for_fn (fun))
8213 {
8214 /* If the CFG has been built, emit a CFG-based dump. */
8215 if (!ignore_topmost_bind)
8216 fprintf (file, "{\n");
8217
8218 if (any_var && n_basic_blocks_for_fn (fun))
8219 fprintf (file, "\n");
8220
8221 FOR_EACH_BB_FN (bb, fun)
8222 dump_bb (file, bb, 2, flags);
8223
8224 fprintf (file, "}\n");
8225 }
8226 else if (fun->curr_properties & PROP_gimple_any)
8227 {
8228 /* The function is now in GIMPLE form but the CFG has not been
8229 built yet. Emit the single sequence of GIMPLE statements
8230 that make up its body. */
8231 gimple_seq body = gimple_body (fndecl);
8232
8233 if (gimple_seq_first_stmt (body)
8234 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
8235 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
8236 print_gimple_seq (file, body, 0, flags);
8237 else
8238 {
8239 if (!ignore_topmost_bind)
8240 fprintf (file, "{\n");
8241
8242 if (any_var)
8243 fprintf (file, "\n");
8244
8245 print_gimple_seq (file, body, 2, flags);
8246 fprintf (file, "}\n");
8247 }
8248 }
8249 else
8250 {
8251 int indent;
8252
8253 /* Make a tree based dump. */
8254 chain = DECL_SAVED_TREE (fndecl);
8255 if (chain && TREE_CODE (chain) == BIND_EXPR)
8256 {
8257 if (ignore_topmost_bind)
8258 {
8259 chain = BIND_EXPR_BODY (chain);
8260 indent = 2;
8261 }
8262 else
8263 indent = 0;
8264 }
8265 else
8266 {
8267 if (!ignore_topmost_bind)
8268 {
8269 fprintf (file, "{\n");
8270 /* No topmost bind, pretend it's ignored for later. */
8271 ignore_topmost_bind = true;
8272 }
8273 indent = 2;
8274 }
8275
8276 if (any_var)
8277 fprintf (file, "\n");
8278
8279 print_generic_stmt_indented (file, chain, flags, indent);
8280 if (ignore_topmost_bind)
8281 fprintf (file, "}\n");
8282 }
8283
8284 if (flags & TDF_ENUMERATE_LOCALS)
8285 dump_enumerated_decls (file, flags);
8286 fprintf (file, "\n\n");
8287
8288 current_function_decl = old_current_fndecl;
8289 }
8290
8291 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8292
8293 DEBUG_FUNCTION void
debug_function(tree fn,dump_flags_t flags)8294 debug_function (tree fn, dump_flags_t flags)
8295 {
8296 dump_function_to_file (fn, stderr, flags);
8297 }
8298
8299
8300 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8301
8302 static void
print_pred_bbs(FILE * file,basic_block bb)8303 print_pred_bbs (FILE *file, basic_block bb)
8304 {
8305 edge e;
8306 edge_iterator ei;
8307
8308 FOR_EACH_EDGE (e, ei, bb->preds)
8309 fprintf (file, "bb_%d ", e->src->index);
8310 }
8311
8312
8313 /* Print on FILE the indexes for the successors of basic_block BB. */
8314
8315 static void
print_succ_bbs(FILE * file,basic_block bb)8316 print_succ_bbs (FILE *file, basic_block bb)
8317 {
8318 edge e;
8319 edge_iterator ei;
8320
8321 FOR_EACH_EDGE (e, ei, bb->succs)
8322 fprintf (file, "bb_%d ", e->dest->index);
8323 }
8324
8325 /* Print to FILE the basic block BB following the VERBOSITY level. */
8326
8327 void
print_loops_bb(FILE * file,basic_block bb,int indent,int verbosity)8328 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8329 {
8330 char *s_indent = (char *) alloca ((size_t) indent + 1);
8331 memset ((void *) s_indent, ' ', (size_t) indent);
8332 s_indent[indent] = '\0';
8333
8334 /* Print basic_block's header. */
8335 if (verbosity >= 2)
8336 {
8337 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
8338 print_pred_bbs (file, bb);
8339 fprintf (file, "}, succs = {");
8340 print_succ_bbs (file, bb);
8341 fprintf (file, "})\n");
8342 }
8343
8344 /* Print basic_block's body. */
8345 if (verbosity >= 3)
8346 {
8347 fprintf (file, "%s {\n", s_indent);
8348 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8349 fprintf (file, "%s }\n", s_indent);
8350 }
8351 }
8352
8353 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
8354
8355 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8356 VERBOSITY level this outputs the contents of the loop, or just its
8357 structure. */
8358
8359 static void
print_loop(FILE * file,struct loop * loop,int indent,int verbosity)8360 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
8361 {
8362 char *s_indent;
8363 basic_block bb;
8364
8365 if (loop == NULL)
8366 return;
8367
8368 s_indent = (char *) alloca ((size_t) indent + 1);
8369 memset ((void *) s_indent, ' ', (size_t) indent);
8370 s_indent[indent] = '\0';
8371
8372 /* Print loop's header. */
8373 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8374 if (loop->header)
8375 fprintf (file, "header = %d", loop->header->index);
8376 else
8377 {
8378 fprintf (file, "deleted)\n");
8379 return;
8380 }
8381 if (loop->latch)
8382 fprintf (file, ", latch = %d", loop->latch->index);
8383 else
8384 fprintf (file, ", multiple latches");
8385 fprintf (file, ", niter = ");
8386 print_generic_expr (file, loop->nb_iterations);
8387
8388 if (loop->any_upper_bound)
8389 {
8390 fprintf (file, ", upper_bound = ");
8391 print_decu (loop->nb_iterations_upper_bound, file);
8392 }
8393 if (loop->any_likely_upper_bound)
8394 {
8395 fprintf (file, ", likely_upper_bound = ");
8396 print_decu (loop->nb_iterations_likely_upper_bound, file);
8397 }
8398
8399 if (loop->any_estimate)
8400 {
8401 fprintf (file, ", estimate = ");
8402 print_decu (loop->nb_iterations_estimate, file);
8403 }
8404 if (loop->unroll)
8405 fprintf (file, ", unroll = %d", loop->unroll);
8406 fprintf (file, ")\n");
8407
8408 /* Print loop's body. */
8409 if (verbosity >= 1)
8410 {
8411 fprintf (file, "%s{\n", s_indent);
8412 FOR_EACH_BB_FN (bb, cfun)
8413 if (bb->loop_father == loop)
8414 print_loops_bb (file, bb, indent, verbosity);
8415
8416 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8417 fprintf (file, "%s}\n", s_indent);
8418 }
8419 }
8420
8421 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8422 spaces. Following VERBOSITY level this outputs the contents of the
8423 loop, or just its structure. */
8424
8425 static void
print_loop_and_siblings(FILE * file,struct loop * loop,int indent,int verbosity)8426 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
8427 int verbosity)
8428 {
8429 if (loop == NULL)
8430 return;
8431
8432 print_loop (file, loop, indent, verbosity);
8433 print_loop_and_siblings (file, loop->next, indent, verbosity);
8434 }
8435
8436 /* Follow a CFG edge from the entry point of the program, and on entry
8437 of a loop, pretty print the loop structure on FILE. */
8438
8439 void
print_loops(FILE * file,int verbosity)8440 print_loops (FILE *file, int verbosity)
8441 {
8442 basic_block bb;
8443
8444 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8445 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8446 if (bb && bb->loop_father)
8447 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8448 }
8449
8450 /* Dump a loop. */
8451
8452 DEBUG_FUNCTION void
debug(struct loop & ref)8453 debug (struct loop &ref)
8454 {
8455 print_loop (stderr, &ref, 0, /*verbosity*/0);
8456 }
8457
8458 DEBUG_FUNCTION void
debug(struct loop * ptr)8459 debug (struct loop *ptr)
8460 {
8461 if (ptr)
8462 debug (*ptr);
8463 else
8464 fprintf (stderr, "<nil>\n");
8465 }
8466
8467 /* Dump a loop verbosely. */
8468
8469 DEBUG_FUNCTION void
debug_verbose(struct loop & ref)8470 debug_verbose (struct loop &ref)
8471 {
8472 print_loop (stderr, &ref, 0, /*verbosity*/3);
8473 }
8474
8475 DEBUG_FUNCTION void
debug_verbose(struct loop * ptr)8476 debug_verbose (struct loop *ptr)
8477 {
8478 if (ptr)
8479 debug (*ptr);
8480 else
8481 fprintf (stderr, "<nil>\n");
8482 }
8483
8484
8485 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8486
8487 DEBUG_FUNCTION void
debug_loops(int verbosity)8488 debug_loops (int verbosity)
8489 {
8490 print_loops (stderr, verbosity);
8491 }
8492
8493 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8494
8495 DEBUG_FUNCTION void
debug_loop(struct loop * loop,int verbosity)8496 debug_loop (struct loop *loop, int verbosity)
8497 {
8498 print_loop (stderr, loop, 0, verbosity);
8499 }
8500
8501 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8502 level. */
8503
8504 DEBUG_FUNCTION void
debug_loop_num(unsigned num,int verbosity)8505 debug_loop_num (unsigned num, int verbosity)
8506 {
8507 debug_loop (get_loop (cfun, num), verbosity);
8508 }
8509
8510 /* Return true if BB ends with a call, possibly followed by some
8511 instructions that must stay with the call. Return false,
8512 otherwise. */
8513
8514 static bool
gimple_block_ends_with_call_p(basic_block bb)8515 gimple_block_ends_with_call_p (basic_block bb)
8516 {
8517 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8518 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8519 }
8520
8521
8522 /* Return true if BB ends with a conditional branch. Return false,
8523 otherwise. */
8524
8525 static bool
gimple_block_ends_with_condjump_p(const_basic_block bb)8526 gimple_block_ends_with_condjump_p (const_basic_block bb)
8527 {
8528 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8529 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8530 }
8531
8532
8533 /* Return true if statement T may terminate execution of BB in ways not
8534 explicitly represtented in the CFG. */
8535
8536 bool
stmt_can_terminate_bb_p(gimple * t)8537 stmt_can_terminate_bb_p (gimple *t)
8538 {
8539 tree fndecl = NULL_TREE;
8540 int call_flags = 0;
8541
8542 /* Eh exception not handled internally terminates execution of the whole
8543 function. */
8544 if (stmt_can_throw_external (t))
8545 return true;
8546
8547 /* NORETURN and LONGJMP calls already have an edge to exit.
8548 CONST and PURE calls do not need one.
8549 We don't currently check for CONST and PURE here, although
8550 it would be a good idea, because those attributes are
8551 figured out from the RTL in mark_constant_function, and
8552 the counter incrementation code from -fprofile-arcs
8553 leads to different results from -fbranch-probabilities. */
8554 if (is_gimple_call (t))
8555 {
8556 fndecl = gimple_call_fndecl (t);
8557 call_flags = gimple_call_flags (t);
8558 }
8559
8560 if (is_gimple_call (t)
8561 && fndecl
8562 && DECL_BUILT_IN (fndecl)
8563 && (call_flags & ECF_NOTHROW)
8564 && !(call_flags & ECF_RETURNS_TWICE)
8565 /* fork() doesn't really return twice, but the effect of
8566 wrapping it in __gcov_fork() which calls __gcov_flush()
8567 and clears the counters before forking has the same
8568 effect as returning twice. Force a fake edge. */
8569 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8570 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
8571 return false;
8572
8573 if (is_gimple_call (t))
8574 {
8575 edge_iterator ei;
8576 edge e;
8577 basic_block bb;
8578
8579 if (call_flags & (ECF_PURE | ECF_CONST)
8580 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8581 return false;
8582
8583 /* Function call may do longjmp, terminate program or do other things.
8584 Special case noreturn that have non-abnormal edges out as in this case
8585 the fact is sufficiently represented by lack of edges out of T. */
8586 if (!(call_flags & ECF_NORETURN))
8587 return true;
8588
8589 bb = gimple_bb (t);
8590 FOR_EACH_EDGE (e, ei, bb->succs)
8591 if ((e->flags & EDGE_FAKE) == 0)
8592 return true;
8593 }
8594
8595 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8596 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8597 return true;
8598
8599 return false;
8600 }
8601
8602
8603 /* Add fake edges to the function exit for any non constant and non
8604 noreturn calls (or noreturn calls with EH/abnormal edges),
8605 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8606 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8607 that were split.
8608
8609 The goal is to expose cases in which entering a basic block does
8610 not imply that all subsequent instructions must be executed. */
8611
8612 static int
gimple_flow_call_edges_add(sbitmap blocks)8613 gimple_flow_call_edges_add (sbitmap blocks)
8614 {
8615 int i;
8616 int blocks_split = 0;
8617 int last_bb = last_basic_block_for_fn (cfun);
8618 bool check_last_block = false;
8619
8620 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8621 return 0;
8622
8623 if (! blocks)
8624 check_last_block = true;
8625 else
8626 check_last_block = bitmap_bit_p (blocks,
8627 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8628
8629 /* In the last basic block, before epilogue generation, there will be
8630 a fallthru edge to EXIT. Special care is required if the last insn
8631 of the last basic block is a call because make_edge folds duplicate
8632 edges, which would result in the fallthru edge also being marked
8633 fake, which would result in the fallthru edge being removed by
8634 remove_fake_edges, which would result in an invalid CFG.
8635
8636 Moreover, we can't elide the outgoing fake edge, since the block
8637 profiler needs to take this into account in order to solve the minimal
8638 spanning tree in the case that the call doesn't return.
8639
8640 Handle this by adding a dummy instruction in a new last basic block. */
8641 if (check_last_block)
8642 {
8643 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8644 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8645 gimple *t = NULL;
8646
8647 if (!gsi_end_p (gsi))
8648 t = gsi_stmt (gsi);
8649
8650 if (t && stmt_can_terminate_bb_p (t))
8651 {
8652 edge e;
8653
8654 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8655 if (e)
8656 {
8657 gsi_insert_on_edge (e, gimple_build_nop ());
8658 gsi_commit_edge_inserts ();
8659 }
8660 }
8661 }
8662
8663 /* Now add fake edges to the function exit for any non constant
8664 calls since there is no way that we can determine if they will
8665 return or not... */
8666 for (i = 0; i < last_bb; i++)
8667 {
8668 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8669 gimple_stmt_iterator gsi;
8670 gimple *stmt, *last_stmt;
8671
8672 if (!bb)
8673 continue;
8674
8675 if (blocks && !bitmap_bit_p (blocks, i))
8676 continue;
8677
8678 gsi = gsi_last_nondebug_bb (bb);
8679 if (!gsi_end_p (gsi))
8680 {
8681 last_stmt = gsi_stmt (gsi);
8682 do
8683 {
8684 stmt = gsi_stmt (gsi);
8685 if (stmt_can_terminate_bb_p (stmt))
8686 {
8687 edge e;
8688
8689 /* The handling above of the final block before the
8690 epilogue should be enough to verify that there is
8691 no edge to the exit block in CFG already.
8692 Calling make_edge in such case would cause us to
8693 mark that edge as fake and remove it later. */
8694 if (flag_checking && stmt == last_stmt)
8695 {
8696 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8697 gcc_assert (e == NULL);
8698 }
8699
8700 /* Note that the following may create a new basic block
8701 and renumber the existing basic blocks. */
8702 if (stmt != last_stmt)
8703 {
8704 e = split_block (bb, stmt);
8705 if (e)
8706 blocks_split++;
8707 }
8708 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8709 e->probability = profile_probability::guessed_never ();
8710 }
8711 gsi_prev (&gsi);
8712 }
8713 while (!gsi_end_p (gsi));
8714 }
8715 }
8716
8717 if (blocks_split)
8718 checking_verify_flow_info ();
8719
8720 return blocks_split;
8721 }
8722
8723 /* Removes edge E and all the blocks dominated by it, and updates dominance
8724 information. The IL in E->src needs to be updated separately.
8725 If dominance info is not available, only the edge E is removed.*/
8726
8727 void
remove_edge_and_dominated_blocks(edge e)8728 remove_edge_and_dominated_blocks (edge e)
8729 {
8730 vec<basic_block> bbs_to_remove = vNULL;
8731 vec<basic_block> bbs_to_fix_dom = vNULL;
8732 edge f;
8733 edge_iterator ei;
8734 bool none_removed = false;
8735 unsigned i;
8736 basic_block bb, dbb;
8737 bitmap_iterator bi;
8738
8739 /* If we are removing a path inside a non-root loop that may change
8740 loop ownership of blocks or remove loops. Mark loops for fixup. */
8741 if (current_loops
8742 && loop_outer (e->src->loop_father) != NULL
8743 && e->src->loop_father == e->dest->loop_father)
8744 loops_state_set (LOOPS_NEED_FIXUP);
8745
8746 if (!dom_info_available_p (CDI_DOMINATORS))
8747 {
8748 remove_edge (e);
8749 return;
8750 }
8751
8752 /* No updating is needed for edges to exit. */
8753 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8754 {
8755 if (cfgcleanup_altered_bbs)
8756 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8757 remove_edge (e);
8758 return;
8759 }
8760
8761 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8762 that is not dominated by E->dest, then this set is empty. Otherwise,
8763 all the basic blocks dominated by E->dest are removed.
8764
8765 Also, to DF_IDOM we store the immediate dominators of the blocks in
8766 the dominance frontier of E (i.e., of the successors of the
8767 removed blocks, if there are any, and of E->dest otherwise). */
8768 FOR_EACH_EDGE (f, ei, e->dest->preds)
8769 {
8770 if (f == e)
8771 continue;
8772
8773 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8774 {
8775 none_removed = true;
8776 break;
8777 }
8778 }
8779
8780 auto_bitmap df, df_idom;
8781 if (none_removed)
8782 bitmap_set_bit (df_idom,
8783 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8784 else
8785 {
8786 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8787 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8788 {
8789 FOR_EACH_EDGE (f, ei, bb->succs)
8790 {
8791 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8792 bitmap_set_bit (df, f->dest->index);
8793 }
8794 }
8795 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8796 bitmap_clear_bit (df, bb->index);
8797
8798 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8799 {
8800 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8801 bitmap_set_bit (df_idom,
8802 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8803 }
8804 }
8805
8806 if (cfgcleanup_altered_bbs)
8807 {
8808 /* Record the set of the altered basic blocks. */
8809 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8810 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8811 }
8812
8813 /* Remove E and the cancelled blocks. */
8814 if (none_removed)
8815 remove_edge (e);
8816 else
8817 {
8818 /* Walk backwards so as to get a chance to substitute all
8819 released DEFs into debug stmts. See
8820 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8821 details. */
8822 for (i = bbs_to_remove.length (); i-- > 0; )
8823 delete_basic_block (bbs_to_remove[i]);
8824 }
8825
8826 /* Update the dominance information. The immediate dominator may change only
8827 for blocks whose immediate dominator belongs to DF_IDOM:
8828
8829 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8830 removal. Let Z the arbitrary block such that idom(Z) = Y and
8831 Z dominates X after the removal. Before removal, there exists a path P
8832 from Y to X that avoids Z. Let F be the last edge on P that is
8833 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8834 dominates W, and because of P, Z does not dominate W), and W belongs to
8835 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8836 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8837 {
8838 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8839 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8840 dbb;
8841 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8842 bbs_to_fix_dom.safe_push (dbb);
8843 }
8844
8845 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8846
8847 bbs_to_remove.release ();
8848 bbs_to_fix_dom.release ();
8849 }
8850
8851 /* Purge dead EH edges from basic block BB. */
8852
8853 bool
gimple_purge_dead_eh_edges(basic_block bb)8854 gimple_purge_dead_eh_edges (basic_block bb)
8855 {
8856 bool changed = false;
8857 edge e;
8858 edge_iterator ei;
8859 gimple *stmt = last_stmt (bb);
8860
8861 if (stmt && stmt_can_throw_internal (stmt))
8862 return false;
8863
8864 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8865 {
8866 if (e->flags & EDGE_EH)
8867 {
8868 remove_edge_and_dominated_blocks (e);
8869 changed = true;
8870 }
8871 else
8872 ei_next (&ei);
8873 }
8874
8875 return changed;
8876 }
8877
8878 /* Purge dead EH edges from basic block listed in BLOCKS. */
8879
8880 bool
gimple_purge_all_dead_eh_edges(const_bitmap blocks)8881 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8882 {
8883 bool changed = false;
8884 unsigned i;
8885 bitmap_iterator bi;
8886
8887 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8888 {
8889 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8890
8891 /* Earlier gimple_purge_dead_eh_edges could have removed
8892 this basic block already. */
8893 gcc_assert (bb || changed);
8894 if (bb != NULL)
8895 changed |= gimple_purge_dead_eh_edges (bb);
8896 }
8897
8898 return changed;
8899 }
8900
8901 /* Purge dead abnormal call edges from basic block BB. */
8902
8903 bool
gimple_purge_dead_abnormal_call_edges(basic_block bb)8904 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8905 {
8906 bool changed = false;
8907 edge e;
8908 edge_iterator ei;
8909 gimple *stmt = last_stmt (bb);
8910
8911 if (!cfun->has_nonlocal_label
8912 && !cfun->calls_setjmp)
8913 return false;
8914
8915 if (stmt && stmt_can_make_abnormal_goto (stmt))
8916 return false;
8917
8918 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8919 {
8920 if (e->flags & EDGE_ABNORMAL)
8921 {
8922 if (e->flags & EDGE_FALLTHRU)
8923 e->flags &= ~EDGE_ABNORMAL;
8924 else
8925 remove_edge_and_dominated_blocks (e);
8926 changed = true;
8927 }
8928 else
8929 ei_next (&ei);
8930 }
8931
8932 return changed;
8933 }
8934
8935 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8936
8937 bool
gimple_purge_all_dead_abnormal_call_edges(const_bitmap blocks)8938 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8939 {
8940 bool changed = false;
8941 unsigned i;
8942 bitmap_iterator bi;
8943
8944 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8945 {
8946 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8947
8948 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8949 this basic block already. */
8950 gcc_assert (bb || changed);
8951 if (bb != NULL)
8952 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8953 }
8954
8955 return changed;
8956 }
8957
8958 /* This function is called whenever a new edge is created or
8959 redirected. */
8960
8961 static void
gimple_execute_on_growing_pred(edge e)8962 gimple_execute_on_growing_pred (edge e)
8963 {
8964 basic_block bb = e->dest;
8965
8966 if (!gimple_seq_empty_p (phi_nodes (bb)))
8967 reserve_phi_args_for_new_edge (bb);
8968 }
8969
8970 /* This function is called immediately before edge E is removed from
8971 the edge vector E->dest->preds. */
8972
8973 static void
gimple_execute_on_shrinking_pred(edge e)8974 gimple_execute_on_shrinking_pred (edge e)
8975 {
8976 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8977 remove_phi_args (e);
8978 }
8979
8980 /*---------------------------------------------------------------------------
8981 Helper functions for Loop versioning
8982 ---------------------------------------------------------------------------*/
8983
8984 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8985 of 'first'. Both of them are dominated by 'new_head' basic block. When
8986 'new_head' was created by 'second's incoming edge it received phi arguments
8987 on the edge by split_edge(). Later, additional edge 'e' was created to
8988 connect 'new_head' and 'first'. Now this routine adds phi args on this
8989 additional edge 'e' that new_head to second edge received as part of edge
8990 splitting. */
8991
8992 static void
gimple_lv_adjust_loop_header_phi(basic_block first,basic_block second,basic_block new_head,edge e)8993 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8994 basic_block new_head, edge e)
8995 {
8996 gphi *phi1, *phi2;
8997 gphi_iterator psi1, psi2;
8998 tree def;
8999 edge e2 = find_edge (new_head, second);
9000
9001 /* Because NEW_HEAD has been created by splitting SECOND's incoming
9002 edge, we should always have an edge from NEW_HEAD to SECOND. */
9003 gcc_assert (e2 != NULL);
9004
9005 /* Browse all 'second' basic block phi nodes and add phi args to
9006 edge 'e' for 'first' head. PHI args are always in correct order. */
9007
9008 for (psi2 = gsi_start_phis (second),
9009 psi1 = gsi_start_phis (first);
9010 !gsi_end_p (psi2) && !gsi_end_p (psi1);
9011 gsi_next (&psi2), gsi_next (&psi1))
9012 {
9013 phi1 = psi1.phi ();
9014 phi2 = psi2.phi ();
9015 def = PHI_ARG_DEF (phi2, e2->dest_idx);
9016 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
9017 }
9018 }
9019
9020
9021 /* Adds a if else statement to COND_BB with condition COND_EXPR.
9022 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
9023 the destination of the ELSE part. */
9024
9025 static void
gimple_lv_add_condition_to_bb(basic_block first_head ATTRIBUTE_UNUSED,basic_block second_head ATTRIBUTE_UNUSED,basic_block cond_bb,void * cond_e)9026 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
9027 basic_block second_head ATTRIBUTE_UNUSED,
9028 basic_block cond_bb, void *cond_e)
9029 {
9030 gimple_stmt_iterator gsi;
9031 gimple *new_cond_expr;
9032 tree cond_expr = (tree) cond_e;
9033 edge e0;
9034
9035 /* Build new conditional expr */
9036 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
9037 NULL_TREE, NULL_TREE);
9038
9039 /* Add new cond in cond_bb. */
9040 gsi = gsi_last_bb (cond_bb);
9041 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
9042
9043 /* Adjust edges appropriately to connect new head with first head
9044 as well as second head. */
9045 e0 = single_succ_edge (cond_bb);
9046 e0->flags &= ~EDGE_FALLTHRU;
9047 e0->flags |= EDGE_FALSE_VALUE;
9048 }
9049
9050
9051 /* Do book-keeping of basic block BB for the profile consistency checker.
9052 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
9053 then do post-pass accounting. Store the counting in RECORD. */
9054 static void
gimple_account_profile_record(basic_block bb,int after_pass,struct profile_record * record)9055 gimple_account_profile_record (basic_block bb, int after_pass,
9056 struct profile_record *record)
9057 {
9058 gimple_stmt_iterator i;
9059 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
9060 {
9061 record->size[after_pass]
9062 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
9063 if (bb->count.initialized_p ())
9064 record->time[after_pass]
9065 += estimate_num_insns (gsi_stmt (i),
9066 &eni_time_weights) * bb->count.to_gcov_type ();
9067 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
9068 record->time[after_pass]
9069 += estimate_num_insns (gsi_stmt (i),
9070 &eni_time_weights) * bb->count.to_frequency (cfun);
9071 }
9072 }
9073
9074 struct cfg_hooks gimple_cfg_hooks = {
9075 "gimple",
9076 gimple_verify_flow_info,
9077 gimple_dump_bb, /* dump_bb */
9078 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
9079 create_bb, /* create_basic_block */
9080 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
9081 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
9082 gimple_can_remove_branch_p, /* can_remove_branch_p */
9083 remove_bb, /* delete_basic_block */
9084 gimple_split_block, /* split_block */
9085 gimple_move_block_after, /* move_block_after */
9086 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
9087 gimple_merge_blocks, /* merge_blocks */
9088 gimple_predict_edge, /* predict_edge */
9089 gimple_predicted_by_p, /* predicted_by_p */
9090 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
9091 gimple_duplicate_bb, /* duplicate_block */
9092 gimple_split_edge, /* split_edge */
9093 gimple_make_forwarder_block, /* make_forward_block */
9094 NULL, /* tidy_fallthru_edge */
9095 NULL, /* force_nonfallthru */
9096 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
9097 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
9098 gimple_flow_call_edges_add, /* flow_call_edges_add */
9099 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
9100 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
9101 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
9102 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
9103 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
9104 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
9105 flush_pending_stmts, /* flush_pending_stmts */
9106 gimple_empty_block_p, /* block_empty_p */
9107 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
9108 gimple_account_profile_record,
9109 };
9110
9111
9112 /* Split all critical edges. */
9113
9114 unsigned int
split_critical_edges(void)9115 split_critical_edges (void)
9116 {
9117 basic_block bb;
9118 edge e;
9119 edge_iterator ei;
9120
9121 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
9122 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
9123 mappings around the calls to split_edge. */
9124 start_recording_case_labels ();
9125 FOR_ALL_BB_FN (bb, cfun)
9126 {
9127 FOR_EACH_EDGE (e, ei, bb->succs)
9128 {
9129 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
9130 split_edge (e);
9131 /* PRE inserts statements to edges and expects that
9132 since split_critical_edges was done beforehand, committing edge
9133 insertions will not split more edges. In addition to critical
9134 edges we must split edges that have multiple successors and
9135 end by control flow statements, such as RESX.
9136 Go ahead and split them too. This matches the logic in
9137 gimple_find_edge_insert_loc. */
9138 else if ((!single_pred_p (e->dest)
9139 || !gimple_seq_empty_p (phi_nodes (e->dest))
9140 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
9141 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
9142 && !(e->flags & EDGE_ABNORMAL))
9143 {
9144 gimple_stmt_iterator gsi;
9145
9146 gsi = gsi_last_bb (e->src);
9147 if (!gsi_end_p (gsi)
9148 && stmt_ends_bb_p (gsi_stmt (gsi))
9149 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
9150 && !gimple_call_builtin_p (gsi_stmt (gsi),
9151 BUILT_IN_RETURN)))
9152 split_edge (e);
9153 }
9154 }
9155 }
9156 end_recording_case_labels ();
9157 return 0;
9158 }
9159
9160 namespace {
9161
9162 const pass_data pass_data_split_crit_edges =
9163 {
9164 GIMPLE_PASS, /* type */
9165 "crited", /* name */
9166 OPTGROUP_NONE, /* optinfo_flags */
9167 TV_TREE_SPLIT_EDGES, /* tv_id */
9168 PROP_cfg, /* properties_required */
9169 PROP_no_crit_edges, /* properties_provided */
9170 0, /* properties_destroyed */
9171 0, /* todo_flags_start */
9172 0, /* todo_flags_finish */
9173 };
9174
9175 class pass_split_crit_edges : public gimple_opt_pass
9176 {
9177 public:
pass_split_crit_edges(gcc::context * ctxt)9178 pass_split_crit_edges (gcc::context *ctxt)
9179 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
9180 {}
9181
9182 /* opt_pass methods: */
execute(function *)9183 virtual unsigned int execute (function *) { return split_critical_edges (); }
9184
clone()9185 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
9186 }; // class pass_split_crit_edges
9187
9188 } // anon namespace
9189
9190 gimple_opt_pass *
make_pass_split_crit_edges(gcc::context * ctxt)9191 make_pass_split_crit_edges (gcc::context *ctxt)
9192 {
9193 return new pass_split_crit_edges (ctxt);
9194 }
9195
9196
9197 /* Insert COND expression which is GIMPLE_COND after STMT
9198 in basic block BB with appropriate basic block split
9199 and creation of a new conditionally executed basic block.
9200 Update profile so the new bb is visited with probability PROB.
9201 Return created basic block. */
9202 basic_block
insert_cond_bb(basic_block bb,gimple * stmt,gimple * cond,profile_probability prob)9203 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
9204 profile_probability prob)
9205 {
9206 edge fall = split_block (bb, stmt);
9207 gimple_stmt_iterator iter = gsi_last_bb (bb);
9208 basic_block new_bb;
9209
9210 /* Insert cond statement. */
9211 gcc_assert (gimple_code (cond) == GIMPLE_COND);
9212 if (gsi_end_p (iter))
9213 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
9214 else
9215 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
9216
9217 /* Create conditionally executed block. */
9218 new_bb = create_empty_bb (bb);
9219 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
9220 e->probability = prob;
9221 new_bb->count = e->count ();
9222 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
9223
9224 /* Fix edge for split bb. */
9225 fall->flags = EDGE_FALSE_VALUE;
9226 fall->probability -= e->probability;
9227
9228 /* Update dominance info. */
9229 if (dom_info_available_p (CDI_DOMINATORS))
9230 {
9231 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
9232 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
9233 }
9234
9235 /* Update loop info. */
9236 if (current_loops)
9237 add_bb_to_loop (new_bb, bb->loop_father);
9238
9239 return new_bb;
9240 }
9241
9242 /* Build a ternary operation and gimplify it. Emit code before GSI.
9243 Return the gimple_val holding the result. */
9244
9245 tree
gimplify_build3(gimple_stmt_iterator * gsi,enum tree_code code,tree type,tree a,tree b,tree c)9246 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
9247 tree type, tree a, tree b, tree c)
9248 {
9249 tree ret;
9250 location_t loc = gimple_location (gsi_stmt (*gsi));
9251
9252 ret = fold_build3_loc (loc, code, type, a, b, c);
9253 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9254 GSI_SAME_STMT);
9255 }
9256
9257 /* Build a binary operation and gimplify it. Emit code before GSI.
9258 Return the gimple_val holding the result. */
9259
9260 tree
gimplify_build2(gimple_stmt_iterator * gsi,enum tree_code code,tree type,tree a,tree b)9261 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
9262 tree type, tree a, tree b)
9263 {
9264 tree ret;
9265
9266 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
9267 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9268 GSI_SAME_STMT);
9269 }
9270
9271 /* Build a unary operation and gimplify it. Emit code before GSI.
9272 Return the gimple_val holding the result. */
9273
9274 tree
gimplify_build1(gimple_stmt_iterator * gsi,enum tree_code code,tree type,tree a)9275 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
9276 tree a)
9277 {
9278 tree ret;
9279
9280 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
9281 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9282 GSI_SAME_STMT);
9283 }
9284
9285
9286
9287 /* Given a basic block B which ends with a conditional and has
9288 precisely two successors, determine which of the edges is taken if
9289 the conditional is true and which is taken if the conditional is
9290 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9291
9292 void
extract_true_false_edges_from_block(basic_block b,edge * true_edge,edge * false_edge)9293 extract_true_false_edges_from_block (basic_block b,
9294 edge *true_edge,
9295 edge *false_edge)
9296 {
9297 edge e = EDGE_SUCC (b, 0);
9298
9299 if (e->flags & EDGE_TRUE_VALUE)
9300 {
9301 *true_edge = e;
9302 *false_edge = EDGE_SUCC (b, 1);
9303 }
9304 else
9305 {
9306 *false_edge = e;
9307 *true_edge = EDGE_SUCC (b, 1);
9308 }
9309 }
9310
9311
9312 /* From a controlling predicate in the immediate dominator DOM of
9313 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9314 predicate evaluates to true and false and store them to
9315 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9316 they are non-NULL. Returns true if the edges can be determined,
9317 else return false. */
9318
9319 bool
extract_true_false_controlled_edges(basic_block dom,basic_block phiblock,edge * true_controlled_edge,edge * false_controlled_edge)9320 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9321 edge *true_controlled_edge,
9322 edge *false_controlled_edge)
9323 {
9324 basic_block bb = phiblock;
9325 edge true_edge, false_edge, tem;
9326 edge e0 = NULL, e1 = NULL;
9327
9328 /* We have to verify that one edge into the PHI node is dominated
9329 by the true edge of the predicate block and the other edge
9330 dominated by the false edge. This ensures that the PHI argument
9331 we are going to take is completely determined by the path we
9332 take from the predicate block.
9333 We can only use BB dominance checks below if the destination of
9334 the true/false edges are dominated by their edge, thus only
9335 have a single predecessor. */
9336 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9337 tem = EDGE_PRED (bb, 0);
9338 if (tem == true_edge
9339 || (single_pred_p (true_edge->dest)
9340 && (tem->src == true_edge->dest
9341 || dominated_by_p (CDI_DOMINATORS,
9342 tem->src, true_edge->dest))))
9343 e0 = tem;
9344 else if (tem == false_edge
9345 || (single_pred_p (false_edge->dest)
9346 && (tem->src == false_edge->dest
9347 || dominated_by_p (CDI_DOMINATORS,
9348 tem->src, false_edge->dest))))
9349 e1 = tem;
9350 else
9351 return false;
9352 tem = EDGE_PRED (bb, 1);
9353 if (tem == true_edge
9354 || (single_pred_p (true_edge->dest)
9355 && (tem->src == true_edge->dest
9356 || dominated_by_p (CDI_DOMINATORS,
9357 tem->src, true_edge->dest))))
9358 e0 = tem;
9359 else if (tem == false_edge
9360 || (single_pred_p (false_edge->dest)
9361 && (tem->src == false_edge->dest
9362 || dominated_by_p (CDI_DOMINATORS,
9363 tem->src, false_edge->dest))))
9364 e1 = tem;
9365 else
9366 return false;
9367 if (!e0 || !e1)
9368 return false;
9369
9370 if (true_controlled_edge)
9371 *true_controlled_edge = e0;
9372 if (false_controlled_edge)
9373 *false_controlled_edge = e1;
9374
9375 return true;
9376 }
9377
9378 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9379 range [low, high]. Place associated stmts before *GSI. */
9380
9381 void
generate_range_test(basic_block bb,tree index,tree low,tree high,tree * lhs,tree * rhs)9382 generate_range_test (basic_block bb, tree index, tree low, tree high,
9383 tree *lhs, tree *rhs)
9384 {
9385 tree type = TREE_TYPE (index);
9386 tree utype = unsigned_type_for (type);
9387
9388 low = fold_convert (utype, low);
9389 high = fold_convert (utype, high);
9390
9391 gimple_seq seq = NULL;
9392 index = gimple_convert (&seq, utype, index);
9393 *lhs = gimple_build (&seq, MINUS_EXPR, utype, index, low);
9394 *rhs = const_binop (MINUS_EXPR, utype, high, low);
9395
9396 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9397 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
9398 }
9399
9400 /* Emit return warnings. */
9401
9402 namespace {
9403
9404 const pass_data pass_data_warn_function_return =
9405 {
9406 GIMPLE_PASS, /* type */
9407 "*warn_function_return", /* name */
9408 OPTGROUP_NONE, /* optinfo_flags */
9409 TV_NONE, /* tv_id */
9410 PROP_cfg, /* properties_required */
9411 0, /* properties_provided */
9412 0, /* properties_destroyed */
9413 0, /* todo_flags_start */
9414 0, /* todo_flags_finish */
9415 };
9416
9417 class pass_warn_function_return : public gimple_opt_pass
9418 {
9419 public:
pass_warn_function_return(gcc::context * ctxt)9420 pass_warn_function_return (gcc::context *ctxt)
9421 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9422 {}
9423
9424 /* opt_pass methods: */
9425 virtual unsigned int execute (function *);
9426
9427 }; // class pass_warn_function_return
9428
9429 unsigned int
execute(function * fun)9430 pass_warn_function_return::execute (function *fun)
9431 {
9432 source_location location;
9433 gimple *last;
9434 edge e;
9435 edge_iterator ei;
9436
9437 if (!targetm.warn_func_return (fun->decl))
9438 return 0;
9439
9440 /* If we have a path to EXIT, then we do return. */
9441 if (TREE_THIS_VOLATILE (fun->decl)
9442 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9443 {
9444 location = UNKNOWN_LOCATION;
9445 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9446 (e = ei_safe_edge (ei)); )
9447 {
9448 last = last_stmt (e->src);
9449 if ((gimple_code (last) == GIMPLE_RETURN
9450 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9451 && location == UNKNOWN_LOCATION
9452 && ((location = LOCATION_LOCUS (gimple_location (last)))
9453 != UNKNOWN_LOCATION)
9454 && !optimize)
9455 break;
9456 /* When optimizing, replace return stmts in noreturn functions
9457 with __builtin_unreachable () call. */
9458 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9459 {
9460 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9461 gimple *new_stmt = gimple_build_call (fndecl, 0);
9462 gimple_set_location (new_stmt, gimple_location (last));
9463 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9464 gsi_replace (&gsi, new_stmt, true);
9465 remove_edge (e);
9466 }
9467 else
9468 ei_next (&ei);
9469 }
9470 if (location == UNKNOWN_LOCATION)
9471 location = cfun->function_end_locus;
9472 warning_at (location, 0, "%<noreturn%> function does return");
9473 }
9474
9475 /* If we see "return;" in some basic block, then we do reach the end
9476 without returning a value. */
9477 else if (warn_return_type > 0
9478 && !TREE_NO_WARNING (fun->decl)
9479 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9480 {
9481 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9482 {
9483 gimple *last = last_stmt (e->src);
9484 greturn *return_stmt = dyn_cast <greturn *> (last);
9485 if (return_stmt
9486 && gimple_return_retval (return_stmt) == NULL
9487 && !gimple_no_warning_p (last))
9488 {
9489 location = gimple_location (last);
9490 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9491 location = fun->function_end_locus;
9492 warning_at (location, OPT_Wreturn_type,
9493 "control reaches end of non-void function");
9494 TREE_NO_WARNING (fun->decl) = 1;
9495 break;
9496 }
9497 }
9498 /* The C++ FE turns fallthrough from the end of non-void function
9499 into __builtin_unreachable () call with BUILTINS_LOCATION.
9500 Recognize those too. */
9501 basic_block bb;
9502 if (!TREE_NO_WARNING (fun->decl))
9503 FOR_EACH_BB_FN (bb, fun)
9504 if (EDGE_COUNT (bb->succs) == 0)
9505 {
9506 gimple *last = last_stmt (bb);
9507 const enum built_in_function ubsan_missing_ret
9508 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9509 if (last
9510 && ((LOCATION_LOCUS (gimple_location (last))
9511 == BUILTINS_LOCATION
9512 && gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE))
9513 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9514 {
9515 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9516 gsi_prev_nondebug (&gsi);
9517 gimple *prev = gsi_stmt (gsi);
9518 if (prev == NULL)
9519 location = UNKNOWN_LOCATION;
9520 else
9521 location = gimple_location (prev);
9522 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9523 location = fun->function_end_locus;
9524 warning_at (location, OPT_Wreturn_type,
9525 "control reaches end of non-void function");
9526 TREE_NO_WARNING (fun->decl) = 1;
9527 break;
9528 }
9529 }
9530 }
9531 return 0;
9532 }
9533
9534 } // anon namespace
9535
9536 gimple_opt_pass *
make_pass_warn_function_return(gcc::context * ctxt)9537 make_pass_warn_function_return (gcc::context *ctxt)
9538 {
9539 return new pass_warn_function_return (ctxt);
9540 }
9541
9542 /* Walk a gimplified function and warn for functions whose return value is
9543 ignored and attribute((warn_unused_result)) is set. This is done before
9544 inlining, so we don't have to worry about that. */
9545
9546 static void
do_warn_unused_result(gimple_seq seq)9547 do_warn_unused_result (gimple_seq seq)
9548 {
9549 tree fdecl, ftype;
9550 gimple_stmt_iterator i;
9551
9552 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9553 {
9554 gimple *g = gsi_stmt (i);
9555
9556 switch (gimple_code (g))
9557 {
9558 case GIMPLE_BIND:
9559 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9560 break;
9561 case GIMPLE_TRY:
9562 do_warn_unused_result (gimple_try_eval (g));
9563 do_warn_unused_result (gimple_try_cleanup (g));
9564 break;
9565 case GIMPLE_CATCH:
9566 do_warn_unused_result (gimple_catch_handler (
9567 as_a <gcatch *> (g)));
9568 break;
9569 case GIMPLE_EH_FILTER:
9570 do_warn_unused_result (gimple_eh_filter_failure (g));
9571 break;
9572
9573 case GIMPLE_CALL:
9574 if (gimple_call_lhs (g))
9575 break;
9576 if (gimple_call_internal_p (g))
9577 break;
9578
9579 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9580 LHS. All calls whose value is ignored should be
9581 represented like this. Look for the attribute. */
9582 fdecl = gimple_call_fndecl (g);
9583 ftype = gimple_call_fntype (g);
9584
9585 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9586 {
9587 location_t loc = gimple_location (g);
9588
9589 if (fdecl)
9590 warning_at (loc, OPT_Wunused_result,
9591 "ignoring return value of %qD, "
9592 "declared with attribute warn_unused_result",
9593 fdecl);
9594 else
9595 warning_at (loc, OPT_Wunused_result,
9596 "ignoring return value of function "
9597 "declared with attribute warn_unused_result");
9598 }
9599 break;
9600
9601 default:
9602 /* Not a container, not a call, or a call whose value is used. */
9603 break;
9604 }
9605 }
9606 }
9607
9608 namespace {
9609
9610 const pass_data pass_data_warn_unused_result =
9611 {
9612 GIMPLE_PASS, /* type */
9613 "*warn_unused_result", /* name */
9614 OPTGROUP_NONE, /* optinfo_flags */
9615 TV_NONE, /* tv_id */
9616 PROP_gimple_any, /* properties_required */
9617 0, /* properties_provided */
9618 0, /* properties_destroyed */
9619 0, /* todo_flags_start */
9620 0, /* todo_flags_finish */
9621 };
9622
9623 class pass_warn_unused_result : public gimple_opt_pass
9624 {
9625 public:
pass_warn_unused_result(gcc::context * ctxt)9626 pass_warn_unused_result (gcc::context *ctxt)
9627 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9628 {}
9629
9630 /* opt_pass methods: */
gate(function *)9631 virtual bool gate (function *) { return flag_warn_unused_result; }
execute(function *)9632 virtual unsigned int execute (function *)
9633 {
9634 do_warn_unused_result (gimple_body (current_function_decl));
9635 return 0;
9636 }
9637
9638 }; // class pass_warn_unused_result
9639
9640 } // anon namespace
9641
9642 gimple_opt_pass *
make_pass_warn_unused_result(gcc::context * ctxt)9643 make_pass_warn_unused_result (gcc::context *ctxt)
9644 {
9645 return new pass_warn_unused_result (ctxt);
9646 }
9647
9648 /* IPA passes, compilation of earlier functions or inlining
9649 might have changed some properties, such as marked functions nothrow,
9650 pure, const or noreturn.
9651 Remove redundant edges and basic blocks, and create new ones if necessary.
9652
9653 This pass can't be executed as stand alone pass from pass manager, because
9654 in between inlining and this fixup the verify_flow_info would fail. */
9655
9656 unsigned int
execute_fixup_cfg(void)9657 execute_fixup_cfg (void)
9658 {
9659 basic_block bb;
9660 gimple_stmt_iterator gsi;
9661 int todo = 0;
9662 cgraph_node *node = cgraph_node::get (current_function_decl);
9663 profile_count num = node->count;
9664 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9665 bool scale = num.initialized_p () && !(num == den);
9666
9667 if (scale)
9668 {
9669 profile_count::adjust_for_ipa_scaling (&num, &den);
9670 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9671 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9672 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9673 }
9674
9675 FOR_EACH_BB_FN (bb, cfun)
9676 {
9677 if (scale)
9678 bb->count = bb->count.apply_scale (num, den);
9679 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9680 {
9681 gimple *stmt = gsi_stmt (gsi);
9682 tree decl = is_gimple_call (stmt)
9683 ? gimple_call_fndecl (stmt)
9684 : NULL;
9685 if (decl)
9686 {
9687 int flags = gimple_call_flags (stmt);
9688 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9689 {
9690 if (gimple_purge_dead_abnormal_call_edges (bb))
9691 todo |= TODO_cleanup_cfg;
9692
9693 if (gimple_in_ssa_p (cfun))
9694 {
9695 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9696 update_stmt (stmt);
9697 }
9698 }
9699
9700 if (flags & ECF_NORETURN
9701 && fixup_noreturn_call (stmt))
9702 todo |= TODO_cleanup_cfg;
9703 }
9704
9705 /* Remove stores to variables we marked write-only.
9706 Keep access when store has side effect, i.e. in case when source
9707 is volatile. */
9708 if (gimple_store_p (stmt)
9709 && !gimple_has_side_effects (stmt))
9710 {
9711 tree lhs = get_base_address (gimple_get_lhs (stmt));
9712
9713 if (VAR_P (lhs)
9714 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9715 && varpool_node::get (lhs)->writeonly)
9716 {
9717 unlink_stmt_vdef (stmt);
9718 gsi_remove (&gsi, true);
9719 release_defs (stmt);
9720 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9721 continue;
9722 }
9723 }
9724 /* For calls we can simply remove LHS when it is known
9725 to be write-only. */
9726 if (is_gimple_call (stmt)
9727 && gimple_get_lhs (stmt))
9728 {
9729 tree lhs = get_base_address (gimple_get_lhs (stmt));
9730
9731 if (VAR_P (lhs)
9732 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9733 && varpool_node::get (lhs)->writeonly)
9734 {
9735 gimple_call_set_lhs (stmt, NULL);
9736 update_stmt (stmt);
9737 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9738 }
9739 }
9740
9741 if (maybe_clean_eh_stmt (stmt)
9742 && gimple_purge_dead_eh_edges (bb))
9743 todo |= TODO_cleanup_cfg;
9744 gsi_next (&gsi);
9745 }
9746
9747 /* If we have a basic block with no successors that does not
9748 end with a control statement or a noreturn call end it with
9749 a call to __builtin_unreachable. This situation can occur
9750 when inlining a noreturn call that does in fact return. */
9751 if (EDGE_COUNT (bb->succs) == 0)
9752 {
9753 gimple *stmt = last_stmt (bb);
9754 if (!stmt
9755 || (!is_ctrl_stmt (stmt)
9756 && (!is_gimple_call (stmt)
9757 || !gimple_call_noreturn_p (stmt))))
9758 {
9759 if (stmt && is_gimple_call (stmt))
9760 gimple_call_set_ctrl_altering (stmt, false);
9761 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9762 stmt = gimple_build_call (fndecl, 0);
9763 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9764 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9765 if (!cfun->after_inlining)
9766 {
9767 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9768 node->create_edge (cgraph_node::get_create (fndecl),
9769 call_stmt, bb->count);
9770 }
9771 }
9772 }
9773 }
9774 if (scale)
9775 compute_function_frequency ();
9776
9777 if (current_loops
9778 && (todo & TODO_cleanup_cfg))
9779 loops_state_set (LOOPS_NEED_FIXUP);
9780
9781 return todo;
9782 }
9783
9784 namespace {
9785
9786 const pass_data pass_data_fixup_cfg =
9787 {
9788 GIMPLE_PASS, /* type */
9789 "fixup_cfg", /* name */
9790 OPTGROUP_NONE, /* optinfo_flags */
9791 TV_NONE, /* tv_id */
9792 PROP_cfg, /* properties_required */
9793 0, /* properties_provided */
9794 0, /* properties_destroyed */
9795 0, /* todo_flags_start */
9796 0, /* todo_flags_finish */
9797 };
9798
9799 class pass_fixup_cfg : public gimple_opt_pass
9800 {
9801 public:
pass_fixup_cfg(gcc::context * ctxt)9802 pass_fixup_cfg (gcc::context *ctxt)
9803 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9804 {}
9805
9806 /* opt_pass methods: */
clone()9807 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
execute(function *)9808 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9809
9810 }; // class pass_fixup_cfg
9811
9812 } // anon namespace
9813
9814 gimple_opt_pass *
make_pass_fixup_cfg(gcc::context * ctxt)9815 make_pass_fixup_cfg (gcc::context *ctxt)
9816 {
9817 return new pass_fixup_cfg (ctxt);
9818 }
9819
9820 /* Garbage collection support for edge_def. */
9821
9822 extern void gt_ggc_mx (tree&);
9823 extern void gt_ggc_mx (gimple *&);
9824 extern void gt_ggc_mx (rtx&);
9825 extern void gt_ggc_mx (basic_block&);
9826
9827 static void
gt_ggc_mx(rtx_insn * & x)9828 gt_ggc_mx (rtx_insn *& x)
9829 {
9830 if (x)
9831 gt_ggc_mx_rtx_def ((void *) x);
9832 }
9833
9834 void
gt_ggc_mx(edge_def * e)9835 gt_ggc_mx (edge_def *e)
9836 {
9837 tree block = LOCATION_BLOCK (e->goto_locus);
9838 gt_ggc_mx (e->src);
9839 gt_ggc_mx (e->dest);
9840 if (current_ir_type () == IR_GIMPLE)
9841 gt_ggc_mx (e->insns.g);
9842 else
9843 gt_ggc_mx (e->insns.r);
9844 gt_ggc_mx (block);
9845 }
9846
9847 /* PCH support for edge_def. */
9848
9849 extern void gt_pch_nx (tree&);
9850 extern void gt_pch_nx (gimple *&);
9851 extern void gt_pch_nx (rtx&);
9852 extern void gt_pch_nx (basic_block&);
9853
9854 static void
gt_pch_nx(rtx_insn * & x)9855 gt_pch_nx (rtx_insn *& x)
9856 {
9857 if (x)
9858 gt_pch_nx_rtx_def ((void *) x);
9859 }
9860
9861 void
gt_pch_nx(edge_def * e)9862 gt_pch_nx (edge_def *e)
9863 {
9864 tree block = LOCATION_BLOCK (e->goto_locus);
9865 gt_pch_nx (e->src);
9866 gt_pch_nx (e->dest);
9867 if (current_ir_type () == IR_GIMPLE)
9868 gt_pch_nx (e->insns.g);
9869 else
9870 gt_pch_nx (e->insns.r);
9871 gt_pch_nx (block);
9872 }
9873
9874 void
gt_pch_nx(edge_def * e,gt_pointer_operator op,void * cookie)9875 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9876 {
9877 tree block = LOCATION_BLOCK (e->goto_locus);
9878 op (&(e->src), cookie);
9879 op (&(e->dest), cookie);
9880 if (current_ir_type () == IR_GIMPLE)
9881 op (&(e->insns.g), cookie);
9882 else
9883 op (&(e->insns.r), cookie);
9884 op (&(block), cookie);
9885 }
9886
9887 #if CHECKING_P
9888
9889 namespace selftest {
9890
9891 /* Helper function for CFG selftests: create a dummy function decl
9892 and push it as cfun. */
9893
9894 static tree
push_fndecl(const char * name)9895 push_fndecl (const char *name)
9896 {
9897 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9898 /* FIXME: this uses input_location: */
9899 tree fndecl = build_fn_decl (name, fn_type);
9900 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9901 NULL_TREE, integer_type_node);
9902 DECL_RESULT (fndecl) = retval;
9903 push_struct_function (fndecl);
9904 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9905 ASSERT_TRUE (fun != NULL);
9906 init_empty_tree_cfg_for_function (fun);
9907 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9908 ASSERT_EQ (0, n_edges_for_fn (fun));
9909 return fndecl;
9910 }
9911
9912 /* These tests directly create CFGs.
9913 Compare with the static fns within tree-cfg.c:
9914 - build_gimple_cfg
9915 - make_blocks: calls create_basic_block (seq, bb);
9916 - make_edges. */
9917
9918 /* Verify a simple cfg of the form:
9919 ENTRY -> A -> B -> C -> EXIT. */
9920
9921 static void
test_linear_chain()9922 test_linear_chain ()
9923 {
9924 gimple_register_cfg_hooks ();
9925
9926 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9927 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9928
9929 /* Create some empty blocks. */
9930 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9931 basic_block bb_b = create_empty_bb (bb_a);
9932 basic_block bb_c = create_empty_bb (bb_b);
9933
9934 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9935 ASSERT_EQ (0, n_edges_for_fn (fun));
9936
9937 /* Create some edges: a simple linear chain of BBs. */
9938 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9939 make_edge (bb_a, bb_b, 0);
9940 make_edge (bb_b, bb_c, 0);
9941 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9942
9943 /* Verify the edges. */
9944 ASSERT_EQ (4, n_edges_for_fn (fun));
9945 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9946 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9947 ASSERT_EQ (1, bb_a->preds->length ());
9948 ASSERT_EQ (1, bb_a->succs->length ());
9949 ASSERT_EQ (1, bb_b->preds->length ());
9950 ASSERT_EQ (1, bb_b->succs->length ());
9951 ASSERT_EQ (1, bb_c->preds->length ());
9952 ASSERT_EQ (1, bb_c->succs->length ());
9953 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9954 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9955
9956 /* Verify the dominance information
9957 Each BB in our simple chain should be dominated by the one before
9958 it. */
9959 calculate_dominance_info (CDI_DOMINATORS);
9960 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9961 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9962 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9963 ASSERT_EQ (1, dom_by_b.length ());
9964 ASSERT_EQ (bb_c, dom_by_b[0]);
9965 free_dominance_info (CDI_DOMINATORS);
9966 dom_by_b.release ();
9967
9968 /* Similarly for post-dominance: each BB in our chain is post-dominated
9969 by the one after it. */
9970 calculate_dominance_info (CDI_POST_DOMINATORS);
9971 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9972 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9973 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9974 ASSERT_EQ (1, postdom_by_b.length ());
9975 ASSERT_EQ (bb_a, postdom_by_b[0]);
9976 free_dominance_info (CDI_POST_DOMINATORS);
9977 postdom_by_b.release ();
9978
9979 pop_cfun ();
9980 }
9981
9982 /* Verify a simple CFG of the form:
9983 ENTRY
9984 |
9985 A
9986 / \
9987 /t \f
9988 B C
9989 \ /
9990 \ /
9991 D
9992 |
9993 EXIT. */
9994
9995 static void
test_diamond()9996 test_diamond ()
9997 {
9998 gimple_register_cfg_hooks ();
9999
10000 tree fndecl = push_fndecl ("cfg_test_diamond");
10001 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10002
10003 /* Create some empty blocks. */
10004 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
10005 basic_block bb_b = create_empty_bb (bb_a);
10006 basic_block bb_c = create_empty_bb (bb_a);
10007 basic_block bb_d = create_empty_bb (bb_b);
10008
10009 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
10010 ASSERT_EQ (0, n_edges_for_fn (fun));
10011
10012 /* Create the edges. */
10013 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
10014 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
10015 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
10016 make_edge (bb_b, bb_d, 0);
10017 make_edge (bb_c, bb_d, 0);
10018 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10019
10020 /* Verify the edges. */
10021 ASSERT_EQ (6, n_edges_for_fn (fun));
10022 ASSERT_EQ (1, bb_a->preds->length ());
10023 ASSERT_EQ (2, bb_a->succs->length ());
10024 ASSERT_EQ (1, bb_b->preds->length ());
10025 ASSERT_EQ (1, bb_b->succs->length ());
10026 ASSERT_EQ (1, bb_c->preds->length ());
10027 ASSERT_EQ (1, bb_c->succs->length ());
10028 ASSERT_EQ (2, bb_d->preds->length ());
10029 ASSERT_EQ (1, bb_d->succs->length ());
10030
10031 /* Verify the dominance information. */
10032 calculate_dominance_info (CDI_DOMINATORS);
10033 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
10034 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
10035 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
10036 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
10037 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
10038 dom_by_a.release ();
10039 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
10040 ASSERT_EQ (0, dom_by_b.length ());
10041 dom_by_b.release ();
10042 free_dominance_info (CDI_DOMINATORS);
10043
10044 /* Similarly for post-dominance. */
10045 calculate_dominance_info (CDI_POST_DOMINATORS);
10046 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
10047 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
10048 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
10049 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
10050 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
10051 postdom_by_d.release ();
10052 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
10053 ASSERT_EQ (0, postdom_by_b.length ());
10054 postdom_by_b.release ();
10055 free_dominance_info (CDI_POST_DOMINATORS);
10056
10057 pop_cfun ();
10058 }
10059
10060 /* Verify that we can handle a CFG containing a "complete" aka
10061 fully-connected subgraph (where A B C D below all have edges
10062 pointing to each other node, also to themselves).
10063 e.g.:
10064 ENTRY EXIT
10065 | ^
10066 | /
10067 | /
10068 | /
10069 V/
10070 A<--->B
10071 ^^ ^^
10072 | \ / |
10073 | X |
10074 | / \ |
10075 VV VV
10076 C<--->D
10077 */
10078
10079 static void
test_fully_connected()10080 test_fully_connected ()
10081 {
10082 gimple_register_cfg_hooks ();
10083
10084 tree fndecl = push_fndecl ("cfg_fully_connected");
10085 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10086
10087 const int n = 4;
10088
10089 /* Create some empty blocks. */
10090 auto_vec <basic_block> subgraph_nodes;
10091 for (int i = 0; i < n; i++)
10092 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
10093
10094 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
10095 ASSERT_EQ (0, n_edges_for_fn (fun));
10096
10097 /* Create the edges. */
10098 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
10099 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10100 for (int i = 0; i < n; i++)
10101 for (int j = 0; j < n; j++)
10102 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
10103
10104 /* Verify the edges. */
10105 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
10106 /* The first one is linked to ENTRY/EXIT as well as itself and
10107 everything else. */
10108 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
10109 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
10110 /* The other ones in the subgraph are linked to everything in
10111 the subgraph (including themselves). */
10112 for (int i = 1; i < n; i++)
10113 {
10114 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
10115 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
10116 }
10117
10118 /* Verify the dominance information. */
10119 calculate_dominance_info (CDI_DOMINATORS);
10120 /* The initial block in the subgraph should be dominated by ENTRY. */
10121 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
10122 get_immediate_dominator (CDI_DOMINATORS,
10123 subgraph_nodes[0]));
10124 /* Every other block in the subgraph should be dominated by the
10125 initial block. */
10126 for (int i = 1; i < n; i++)
10127 ASSERT_EQ (subgraph_nodes[0],
10128 get_immediate_dominator (CDI_DOMINATORS,
10129 subgraph_nodes[i]));
10130 free_dominance_info (CDI_DOMINATORS);
10131
10132 /* Similarly for post-dominance. */
10133 calculate_dominance_info (CDI_POST_DOMINATORS);
10134 /* The initial block in the subgraph should be postdominated by EXIT. */
10135 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
10136 get_immediate_dominator (CDI_POST_DOMINATORS,
10137 subgraph_nodes[0]));
10138 /* Every other block in the subgraph should be postdominated by the
10139 initial block, since that leads to EXIT. */
10140 for (int i = 1; i < n; i++)
10141 ASSERT_EQ (subgraph_nodes[0],
10142 get_immediate_dominator (CDI_POST_DOMINATORS,
10143 subgraph_nodes[i]));
10144 free_dominance_info (CDI_POST_DOMINATORS);
10145
10146 pop_cfun ();
10147 }
10148
10149 /* Run all of the selftests within this file. */
10150
10151 void
tree_cfg_c_tests()10152 tree_cfg_c_tests ()
10153 {
10154 test_linear_chain ();
10155 test_diamond ();
10156 test_fully_connected ();
10157 }
10158
10159 } // namespace selftest
10160
10161 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10162 - loop
10163 - nested loops
10164 - switch statement (a block with many out-edges)
10165 - something that jumps to itself
10166 - etc */
10167
10168 #endif /* CHECKING_P */
10169