1 /* Control flow functions for trees.
2 Copyright (C) 2001-2021 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "tree-ssa-dce.h"
58 #include "omp-general.h"
59 #include "omp-expand.h"
60 #include "tree-cfgcleanup.h"
61 #include "gimplify.h"
62 #include "attribs.h"
63 #include "selftest.h"
64 #include "opts.h"
65 #include "asan.h"
66 #include "profile.h"
67 #include "sreal.h"
68
69 /* This file contains functions for building the Control Flow Graph (CFG)
70 for a function tree. */
71
72 /* Local declarations. */
73
74 /* Initial capacity for the basic block array. */
75 static const int initial_cfg_capacity = 20;
76
77 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
78 which use a particular edge. The CASE_LABEL_EXPRs are chained together
79 via their CASE_CHAIN field, which we clear after we're done with the
80 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
81
82 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
83 update the case vector in response to edge redirections.
84
85 Right now this table is set up and torn down at key points in the
86 compilation process. It would be nice if we could make the table
87 more persistent. The key is getting notification of changes to
88 the CFG (particularly edge removal, creation and redirection). */
89
90 static hash_map<edge, tree> *edge_to_cases;
91
92 /* If we record edge_to_cases, this bitmap will hold indexes
93 of basic blocks that end in a GIMPLE_SWITCH which we touched
94 due to edge manipulations. */
95
96 static bitmap touched_switch_bbs;
97
98 /* OpenMP region idxs for blocks during cfg pass. */
99 static vec<int> bb_to_omp_idx;
100
101 /* CFG statistics. */
102 struct cfg_stats_d
103 {
104 long num_merged_labels;
105 };
106
107 static struct cfg_stats_d cfg_stats;
108
109 /* Data to pass to replace_block_vars_by_duplicates_1. */
110 struct replace_decls_d
111 {
112 hash_map<tree, tree> *vars_map;
113 tree to_context;
114 };
115
116 /* Hash table to store last discriminator assigned for each locus. */
117 struct locus_discrim_map
118 {
119 int location_line;
120 int discriminator;
121 };
122
123 /* Hashtable helpers. */
124
125 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
126 {
127 static inline hashval_t hash (const locus_discrim_map *);
128 static inline bool equal (const locus_discrim_map *,
129 const locus_discrim_map *);
130 };
131
132 /* Trivial hash function for a location_t. ITEM is a pointer to
133 a hash table entry that maps a location_t to a discriminator. */
134
135 inline hashval_t
hash(const locus_discrim_map * item)136 locus_discrim_hasher::hash (const locus_discrim_map *item)
137 {
138 return item->location_line;
139 }
140
141 /* Equality function for the locus-to-discriminator map. A and B
142 point to the two hash table entries to compare. */
143
144 inline bool
equal(const locus_discrim_map * a,const locus_discrim_map * b)145 locus_discrim_hasher::equal (const locus_discrim_map *a,
146 const locus_discrim_map *b)
147 {
148 return a->location_line == b->location_line;
149 }
150
151 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
152
153 /* Basic blocks and flowgraphs. */
154 static void make_blocks (gimple_seq);
155
156 /* Edges. */
157 static void make_edges (void);
158 static void assign_discriminators (void);
159 static void make_cond_expr_edges (basic_block);
160 static void make_gimple_switch_edges (gswitch *, basic_block);
161 static bool make_goto_expr_edges (basic_block);
162 static void make_gimple_asm_edges (basic_block);
163 static edge gimple_redirect_edge_and_branch (edge, basic_block);
164 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
165
166 /* Various helpers. */
167 static inline bool stmt_starts_bb_p (gimple *, gimple *);
168 static int gimple_verify_flow_info (void);
169 static void gimple_make_forwarder_block (edge);
170 static gimple *first_non_label_stmt (basic_block);
171 static bool verify_gimple_transaction (gtransaction *);
172 static bool call_can_make_abnormal_goto (gimple *);
173
174 /* Flowgraph optimization and cleanup. */
175 static void gimple_merge_blocks (basic_block, basic_block);
176 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
177 static void remove_bb (basic_block);
178 static edge find_taken_edge_computed_goto (basic_block, tree);
179 static edge find_taken_edge_cond_expr (const gcond *, tree);
180
181 void
init_empty_tree_cfg_for_function(struct function * fn)182 init_empty_tree_cfg_for_function (struct function *fn)
183 {
184 /* Initialize the basic block array. */
185 init_flow (fn);
186 profile_status_for_fn (fn) = PROFILE_ABSENT;
187 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
188 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
189 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
190 initial_cfg_capacity, true);
191
192 /* Build a mapping of labels to their associated blocks. */
193 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
194 initial_cfg_capacity, true);
195
196 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
197 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
198
199 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
200 = EXIT_BLOCK_PTR_FOR_FN (fn);
201 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
202 = ENTRY_BLOCK_PTR_FOR_FN (fn);
203 }
204
205 void
init_empty_tree_cfg(void)206 init_empty_tree_cfg (void)
207 {
208 init_empty_tree_cfg_for_function (cfun);
209 }
210
211 /*---------------------------------------------------------------------------
212 Create basic blocks
213 ---------------------------------------------------------------------------*/
214
215 /* Entry point to the CFG builder for trees. SEQ is the sequence of
216 statements to be added to the flowgraph. */
217
218 static void
build_gimple_cfg(gimple_seq seq)219 build_gimple_cfg (gimple_seq seq)
220 {
221 /* Register specific gimple functions. */
222 gimple_register_cfg_hooks ();
223
224 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
225
226 init_empty_tree_cfg ();
227
228 make_blocks (seq);
229
230 /* Make sure there is always at least one block, even if it's empty. */
231 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
232 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
233
234 /* Adjust the size of the array. */
235 if (basic_block_info_for_fn (cfun)->length ()
236 < (size_t) n_basic_blocks_for_fn (cfun))
237 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
238 n_basic_blocks_for_fn (cfun));
239
240 /* To speed up statement iterator walks, we first purge dead labels. */
241 cleanup_dead_labels ();
242
243 /* Group case nodes to reduce the number of edges.
244 We do this after cleaning up dead labels because otherwise we miss
245 a lot of obvious case merging opportunities. */
246 group_case_labels ();
247
248 /* Create the edges of the flowgraph. */
249 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
250 make_edges ();
251 assign_discriminators ();
252 cleanup_dead_labels ();
253 delete discriminator_per_locus;
254 discriminator_per_locus = NULL;
255 }
256
257 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
258 them and propagate the information to LOOP. We assume that the annotations
259 come immediately before the condition in BB, if any. */
260
261 static void
replace_loop_annotate_in_block(basic_block bb,class loop * loop)262 replace_loop_annotate_in_block (basic_block bb, class loop *loop)
263 {
264 gimple_stmt_iterator gsi = gsi_last_bb (bb);
265 gimple *stmt = gsi_stmt (gsi);
266
267 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
268 return;
269
270 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
271 {
272 stmt = gsi_stmt (gsi);
273 if (gimple_code (stmt) != GIMPLE_CALL)
274 break;
275 if (!gimple_call_internal_p (stmt)
276 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
277 break;
278
279 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
280 {
281 case annot_expr_ivdep_kind:
282 loop->safelen = INT_MAX;
283 break;
284 case annot_expr_unroll_kind:
285 loop->unroll
286 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
287 cfun->has_unroll = true;
288 break;
289 case annot_expr_no_vector_kind:
290 loop->dont_vectorize = true;
291 break;
292 case annot_expr_vector_kind:
293 loop->force_vectorize = true;
294 cfun->has_force_vectorize_loops = true;
295 break;
296 case annot_expr_parallel_kind:
297 loop->can_be_parallel = true;
298 loop->safelen = INT_MAX;
299 break;
300 default:
301 gcc_unreachable ();
302 }
303
304 stmt = gimple_build_assign (gimple_call_lhs (stmt),
305 gimple_call_arg (stmt, 0));
306 gsi_replace (&gsi, stmt, true);
307 }
308 }
309
310 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
311 them and propagate the information to the loop. We assume that the
312 annotations come immediately before the condition of the loop. */
313
314 static void
replace_loop_annotate(void)315 replace_loop_annotate (void)
316 {
317 basic_block bb;
318 gimple_stmt_iterator gsi;
319 gimple *stmt;
320
321 for (auto loop : loops_list (cfun, 0))
322 {
323 /* First look into the header. */
324 replace_loop_annotate_in_block (loop->header, loop);
325
326 /* Then look into the latch, if any. */
327 if (loop->latch)
328 replace_loop_annotate_in_block (loop->latch, loop);
329
330 /* Push the global flag_finite_loops state down to individual loops. */
331 loop->finite_p = flag_finite_loops;
332 }
333
334 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
335 FOR_EACH_BB_FN (bb, cfun)
336 {
337 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
338 {
339 stmt = gsi_stmt (gsi);
340 if (gimple_code (stmt) != GIMPLE_CALL)
341 continue;
342 if (!gimple_call_internal_p (stmt)
343 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
344 continue;
345
346 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
347 {
348 case annot_expr_ivdep_kind:
349 case annot_expr_unroll_kind:
350 case annot_expr_no_vector_kind:
351 case annot_expr_vector_kind:
352 case annot_expr_parallel_kind:
353 break;
354 default:
355 gcc_unreachable ();
356 }
357
358 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
359 stmt = gimple_build_assign (gimple_call_lhs (stmt),
360 gimple_call_arg (stmt, 0));
361 gsi_replace (&gsi, stmt, true);
362 }
363 }
364 }
365
366 static unsigned int
execute_build_cfg(void)367 execute_build_cfg (void)
368 {
369 gimple_seq body = gimple_body (current_function_decl);
370
371 build_gimple_cfg (body);
372 gimple_set_body (current_function_decl, NULL);
373 if (dump_file && (dump_flags & TDF_DETAILS))
374 {
375 fprintf (dump_file, "Scope blocks:\n");
376 dump_scope_blocks (dump_file, dump_flags);
377 }
378 cleanup_tree_cfg ();
379
380 bb_to_omp_idx.release ();
381
382 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
383 replace_loop_annotate ();
384 return 0;
385 }
386
387 namespace {
388
389 const pass_data pass_data_build_cfg =
390 {
391 GIMPLE_PASS, /* type */
392 "cfg", /* name */
393 OPTGROUP_NONE, /* optinfo_flags */
394 TV_TREE_CFG, /* tv_id */
395 PROP_gimple_leh, /* properties_required */
396 ( PROP_cfg | PROP_loops ), /* properties_provided */
397 0, /* properties_destroyed */
398 0, /* todo_flags_start */
399 0, /* todo_flags_finish */
400 };
401
402 class pass_build_cfg : public gimple_opt_pass
403 {
404 public:
pass_build_cfg(gcc::context * ctxt)405 pass_build_cfg (gcc::context *ctxt)
406 : gimple_opt_pass (pass_data_build_cfg, ctxt)
407 {}
408
409 /* opt_pass methods: */
execute(function *)410 virtual unsigned int execute (function *) { return execute_build_cfg (); }
411
412 }; // class pass_build_cfg
413
414 } // anon namespace
415
416 gimple_opt_pass *
make_pass_build_cfg(gcc::context * ctxt)417 make_pass_build_cfg (gcc::context *ctxt)
418 {
419 return new pass_build_cfg (ctxt);
420 }
421
422
423 /* Return true if T is a computed goto. */
424
425 bool
computed_goto_p(gimple * t)426 computed_goto_p (gimple *t)
427 {
428 return (gimple_code (t) == GIMPLE_GOTO
429 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
430 }
431
432 /* Returns true if the sequence of statements STMTS only contains
433 a call to __builtin_unreachable (). */
434
435 bool
gimple_seq_unreachable_p(gimple_seq stmts)436 gimple_seq_unreachable_p (gimple_seq stmts)
437 {
438 if (stmts == NULL
439 /* Return false if -fsanitize=unreachable, we don't want to
440 optimize away those calls, but rather turn them into
441 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
442 later. */
443 || sanitize_flags_p (SANITIZE_UNREACHABLE))
444 return false;
445
446 gimple_stmt_iterator gsi = gsi_last (stmts);
447
448 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
449 return false;
450
451 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
452 {
453 gimple *stmt = gsi_stmt (gsi);
454 if (gimple_code (stmt) != GIMPLE_LABEL
455 && !is_gimple_debug (stmt)
456 && !gimple_clobber_p (stmt))
457 return false;
458 }
459 return true;
460 }
461
462 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
463 the other edge points to a bb with just __builtin_unreachable ().
464 I.e. return true for C->M edge in:
465 <bb C>:
466 ...
467 if (something)
468 goto <bb N>;
469 else
470 goto <bb M>;
471 <bb N>:
472 __builtin_unreachable ();
473 <bb M>: */
474
475 bool
assert_unreachable_fallthru_edge_p(edge e)476 assert_unreachable_fallthru_edge_p (edge e)
477 {
478 basic_block pred_bb = e->src;
479 gimple *last = last_stmt (pred_bb);
480 if (last && gimple_code (last) == GIMPLE_COND)
481 {
482 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
483 if (other_bb == e->dest)
484 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
485 if (EDGE_COUNT (other_bb->succs) == 0)
486 return gimple_seq_unreachable_p (bb_seq (other_bb));
487 }
488 return false;
489 }
490
491
492 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
493 could alter control flow except via eh. We initialize the flag at
494 CFG build time and only ever clear it later. */
495
496 static void
gimple_call_initialize_ctrl_altering(gimple * stmt)497 gimple_call_initialize_ctrl_altering (gimple *stmt)
498 {
499 int flags = gimple_call_flags (stmt);
500
501 /* A call alters control flow if it can make an abnormal goto. */
502 if (call_can_make_abnormal_goto (stmt)
503 /* A call also alters control flow if it does not return. */
504 || flags & ECF_NORETURN
505 /* TM ending statements have backedges out of the transaction.
506 Return true so we split the basic block containing them.
507 Note that the TM_BUILTIN test is merely an optimization. */
508 || ((flags & ECF_TM_BUILTIN)
509 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
510 /* BUILT_IN_RETURN call is same as return statement. */
511 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
512 /* IFN_UNIQUE should be the last insn, to make checking for it
513 as cheap as possible. */
514 || (gimple_call_internal_p (stmt)
515 && gimple_call_internal_unique_p (stmt)))
516 gimple_call_set_ctrl_altering (stmt, true);
517 else
518 gimple_call_set_ctrl_altering (stmt, false);
519 }
520
521
522 /* Insert SEQ after BB and build a flowgraph. */
523
524 static basic_block
make_blocks_1(gimple_seq seq,basic_block bb)525 make_blocks_1 (gimple_seq seq, basic_block bb)
526 {
527 gimple_stmt_iterator i = gsi_start (seq);
528 gimple *stmt = NULL;
529 gimple *prev_stmt = NULL;
530 bool start_new_block = true;
531 bool first_stmt_of_seq = true;
532
533 while (!gsi_end_p (i))
534 {
535 /* PREV_STMT should only be set to a debug stmt if the debug
536 stmt is before nondebug stmts. Once stmt reaches a nondebug
537 nonlabel, prev_stmt will be set to it, so that
538 stmt_starts_bb_p will know to start a new block if a label is
539 found. However, if stmt was a label after debug stmts only,
540 keep the label in prev_stmt even if we find further debug
541 stmts, for there may be other labels after them, and they
542 should land in the same block. */
543 if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
544 prev_stmt = stmt;
545 stmt = gsi_stmt (i);
546
547 if (stmt && is_gimple_call (stmt))
548 gimple_call_initialize_ctrl_altering (stmt);
549
550 /* If the statement starts a new basic block or if we have determined
551 in a previous pass that we need to create a new block for STMT, do
552 so now. */
553 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
554 {
555 if (!first_stmt_of_seq)
556 gsi_split_seq_before (&i, &seq);
557 bb = create_basic_block (seq, bb);
558 start_new_block = false;
559 prev_stmt = NULL;
560 }
561
562 /* Now add STMT to BB and create the subgraphs for special statement
563 codes. */
564 gimple_set_bb (stmt, bb);
565
566 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
567 next iteration. */
568 if (stmt_ends_bb_p (stmt))
569 {
570 /* If the stmt can make abnormal goto use a new temporary
571 for the assignment to the LHS. This makes sure the old value
572 of the LHS is available on the abnormal edge. Otherwise
573 we will end up with overlapping life-ranges for abnormal
574 SSA names. */
575 if (gimple_has_lhs (stmt)
576 && stmt_can_make_abnormal_goto (stmt)
577 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
578 {
579 tree lhs = gimple_get_lhs (stmt);
580 tree tmp = create_tmp_var (TREE_TYPE (lhs));
581 gimple *s = gimple_build_assign (lhs, tmp);
582 gimple_set_location (s, gimple_location (stmt));
583 gimple_set_block (s, gimple_block (stmt));
584 gimple_set_lhs (stmt, tmp);
585 gsi_insert_after (&i, s, GSI_SAME_STMT);
586 }
587 start_new_block = true;
588 }
589
590 gsi_next (&i);
591 first_stmt_of_seq = false;
592 }
593 return bb;
594 }
595
596 /* Build a flowgraph for the sequence of stmts SEQ. */
597
598 static void
make_blocks(gimple_seq seq)599 make_blocks (gimple_seq seq)
600 {
601 /* Look for debug markers right before labels, and move the debug
602 stmts after the labels. Accepting labels among debug markers
603 adds no value, just complexity; if we wanted to annotate labels
604 with view numbers (so sequencing among markers would matter) or
605 somesuch, we're probably better off still moving the labels, but
606 adding other debug annotations in their original positions or
607 emitting nonbind or bind markers associated with the labels in
608 the original position of the labels.
609
610 Moving labels would probably be simpler, but we can't do that:
611 moving labels assigns label ids to them, and doing so because of
612 debug markers makes for -fcompare-debug and possibly even codegen
613 differences. So, we have to move the debug stmts instead. To
614 that end, we scan SEQ backwards, marking the position of the
615 latest (earliest we find) label, and moving debug stmts that are
616 not separated from it by nondebug nonlabel stmts after the
617 label. */
618 if (MAY_HAVE_DEBUG_MARKER_STMTS)
619 {
620 gimple_stmt_iterator label = gsi_none ();
621
622 for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
623 {
624 gimple *stmt = gsi_stmt (i);
625
626 /* If this is the first label we encounter (latest in SEQ)
627 before nondebug stmts, record its position. */
628 if (is_a <glabel *> (stmt))
629 {
630 if (gsi_end_p (label))
631 label = i;
632 continue;
633 }
634
635 /* Without a recorded label position to move debug stmts to,
636 there's nothing to do. */
637 if (gsi_end_p (label))
638 continue;
639
640 /* Move the debug stmt at I after LABEL. */
641 if (is_gimple_debug (stmt))
642 {
643 gcc_assert (gimple_debug_nonbind_marker_p (stmt));
644 /* As STMT is removed, I advances to the stmt after
645 STMT, so the gsi_prev in the for "increment"
646 expression gets us to the stmt we're to visit after
647 STMT. LABEL, however, would advance to the moved
648 stmt if we passed it to gsi_move_after, so pass it a
649 copy instead, so as to keep LABEL pointing to the
650 LABEL. */
651 gimple_stmt_iterator copy = label;
652 gsi_move_after (&i, ©);
653 continue;
654 }
655
656 /* There aren't any (more?) debug stmts before label, so
657 there isn't anything else to move after it. */
658 label = gsi_none ();
659 }
660 }
661
662 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
663 }
664
665 /* Create and return a new empty basic block after bb AFTER. */
666
667 static basic_block
create_bb(void * h,void * e,basic_block after)668 create_bb (void *h, void *e, basic_block after)
669 {
670 basic_block bb;
671
672 gcc_assert (!e);
673
674 /* Create and initialize a new basic block. Since alloc_block uses
675 GC allocation that clears memory to allocate a basic block, we do
676 not have to clear the newly allocated basic block here. */
677 bb = alloc_block ();
678
679 bb->index = last_basic_block_for_fn (cfun);
680 bb->flags = BB_NEW;
681 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
682
683 /* Add the new block to the linked list of blocks. */
684 link_block (bb, after);
685
686 /* Grow the basic block array if needed. */
687 if ((size_t) last_basic_block_for_fn (cfun)
688 == basic_block_info_for_fn (cfun)->length ())
689 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
690 last_basic_block_for_fn (cfun) + 1);
691
692 /* Add the newly created block to the array. */
693 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
694
695 n_basic_blocks_for_fn (cfun)++;
696 last_basic_block_for_fn (cfun)++;
697
698 return bb;
699 }
700
701
702 /*---------------------------------------------------------------------------
703 Edge creation
704 ---------------------------------------------------------------------------*/
705
706 /* If basic block BB has an abnormal edge to a basic block
707 containing IFN_ABNORMAL_DISPATCHER internal call, return
708 that the dispatcher's basic block, otherwise return NULL. */
709
710 basic_block
get_abnormal_succ_dispatcher(basic_block bb)711 get_abnormal_succ_dispatcher (basic_block bb)
712 {
713 edge e;
714 edge_iterator ei;
715
716 FOR_EACH_EDGE (e, ei, bb->succs)
717 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
718 {
719 gimple_stmt_iterator gsi
720 = gsi_start_nondebug_after_labels_bb (e->dest);
721 gimple *g = gsi_stmt (gsi);
722 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
723 return e->dest;
724 }
725 return NULL;
726 }
727
728 /* Helper function for make_edges. Create a basic block with
729 with ABNORMAL_DISPATCHER internal call in it if needed, and
730 create abnormal edges from BBS to it and from it to FOR_BB
731 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
732
733 static void
handle_abnormal_edges(basic_block * dispatcher_bbs,basic_block for_bb,auto_vec<basic_block> * bbs,bool computed_goto)734 handle_abnormal_edges (basic_block *dispatcher_bbs, basic_block for_bb,
735 auto_vec<basic_block> *bbs, bool computed_goto)
736 {
737 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
738 unsigned int idx = 0;
739 basic_block bb;
740 bool inner = false;
741
742 if (!bb_to_omp_idx.is_empty ())
743 {
744 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
745 if (bb_to_omp_idx[for_bb->index] != 0)
746 inner = true;
747 }
748
749 /* If the dispatcher has been created already, then there are basic
750 blocks with abnormal edges to it, so just make a new edge to
751 for_bb. */
752 if (*dispatcher == NULL)
753 {
754 /* Check if there are any basic blocks that need to have
755 abnormal edges to this dispatcher. If there are none, return
756 early. */
757 if (bb_to_omp_idx.is_empty ())
758 {
759 if (bbs->is_empty ())
760 return;
761 }
762 else
763 {
764 FOR_EACH_VEC_ELT (*bbs, idx, bb)
765 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
766 break;
767 if (bb == NULL)
768 return;
769 }
770
771 /* Create the dispatcher bb. */
772 *dispatcher = create_basic_block (NULL, for_bb);
773 if (computed_goto)
774 {
775 /* Factor computed gotos into a common computed goto site. Also
776 record the location of that site so that we can un-factor the
777 gotos after we have converted back to normal form. */
778 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
779
780 /* Create the destination of the factored goto. Each original
781 computed goto will put its desired destination into this
782 variable and jump to the label we create immediately below. */
783 tree var = create_tmp_var (ptr_type_node, "gotovar");
784
785 /* Build a label for the new block which will contain the
786 factored computed goto. */
787 tree factored_label_decl
788 = create_artificial_label (UNKNOWN_LOCATION);
789 gimple *factored_computed_goto_label
790 = gimple_build_label (factored_label_decl);
791 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
792
793 /* Build our new computed goto. */
794 gimple *factored_computed_goto = gimple_build_goto (var);
795 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
796
797 FOR_EACH_VEC_ELT (*bbs, idx, bb)
798 {
799 if (!bb_to_omp_idx.is_empty ()
800 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
801 continue;
802
803 gsi = gsi_last_bb (bb);
804 gimple *last = gsi_stmt (gsi);
805
806 gcc_assert (computed_goto_p (last));
807
808 /* Copy the original computed goto's destination into VAR. */
809 gimple *assignment
810 = gimple_build_assign (var, gimple_goto_dest (last));
811 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
812
813 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
814 e->goto_locus = gimple_location (last);
815 gsi_remove (&gsi, true);
816 }
817 }
818 else
819 {
820 tree arg = inner ? boolean_true_node : boolean_false_node;
821 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
822 1, arg);
823 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
824 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
825
826 /* Create predecessor edges of the dispatcher. */
827 FOR_EACH_VEC_ELT (*bbs, idx, bb)
828 {
829 if (!bb_to_omp_idx.is_empty ()
830 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
831 continue;
832 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
833 }
834 }
835 }
836
837 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
838 }
839
840 /* Creates outgoing edges for BB. Returns 1 when it ends with an
841 computed goto, returns 2 when it ends with a statement that
842 might return to this function via an nonlocal goto, otherwise
843 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
844
845 static int
make_edges_bb(basic_block bb,struct omp_region ** pcur_region,int * pomp_index)846 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
847 {
848 gimple *last = last_stmt (bb);
849 bool fallthru = false;
850 int ret = 0;
851
852 if (!last)
853 return ret;
854
855 switch (gimple_code (last))
856 {
857 case GIMPLE_GOTO:
858 if (make_goto_expr_edges (bb))
859 ret = 1;
860 fallthru = false;
861 break;
862 case GIMPLE_RETURN:
863 {
864 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
865 e->goto_locus = gimple_location (last);
866 fallthru = false;
867 }
868 break;
869 case GIMPLE_COND:
870 make_cond_expr_edges (bb);
871 fallthru = false;
872 break;
873 case GIMPLE_SWITCH:
874 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
875 fallthru = false;
876 break;
877 case GIMPLE_RESX:
878 make_eh_edges (last);
879 fallthru = false;
880 break;
881 case GIMPLE_EH_DISPATCH:
882 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
883 break;
884
885 case GIMPLE_CALL:
886 /* If this function receives a nonlocal goto, then we need to
887 make edges from this call site to all the nonlocal goto
888 handlers. */
889 if (stmt_can_make_abnormal_goto (last))
890 ret = 2;
891
892 /* If this statement has reachable exception handlers, then
893 create abnormal edges to them. */
894 make_eh_edges (last);
895
896 /* BUILTIN_RETURN is really a return statement. */
897 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
898 {
899 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
900 fallthru = false;
901 }
902 /* Some calls are known not to return. */
903 else
904 fallthru = !gimple_call_noreturn_p (last);
905 break;
906
907 case GIMPLE_ASSIGN:
908 /* A GIMPLE_ASSIGN may throw internally and thus be considered
909 control-altering. */
910 if (is_ctrl_altering_stmt (last))
911 make_eh_edges (last);
912 fallthru = true;
913 break;
914
915 case GIMPLE_ASM:
916 make_gimple_asm_edges (bb);
917 fallthru = true;
918 break;
919
920 CASE_GIMPLE_OMP:
921 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
922 break;
923
924 case GIMPLE_TRANSACTION:
925 {
926 gtransaction *txn = as_a <gtransaction *> (last);
927 tree label1 = gimple_transaction_label_norm (txn);
928 tree label2 = gimple_transaction_label_uninst (txn);
929
930 if (label1)
931 make_edge (bb, label_to_block (cfun, label1), EDGE_FALLTHRU);
932 if (label2)
933 make_edge (bb, label_to_block (cfun, label2),
934 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
935
936 tree label3 = gimple_transaction_label_over (txn);
937 if (gimple_transaction_subcode (txn)
938 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
939 make_edge (bb, label_to_block (cfun, label3), EDGE_TM_ABORT);
940
941 fallthru = false;
942 }
943 break;
944
945 default:
946 gcc_assert (!stmt_ends_bb_p (last));
947 fallthru = true;
948 break;
949 }
950
951 if (fallthru)
952 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
953
954 return ret;
955 }
956
957 /* Join all the blocks in the flowgraph. */
958
959 static void
make_edges(void)960 make_edges (void)
961 {
962 basic_block bb;
963 struct omp_region *cur_region = NULL;
964 auto_vec<basic_block> ab_edge_goto;
965 auto_vec<basic_block> ab_edge_call;
966 int cur_omp_region_idx = 0;
967
968 /* Create an edge from entry to the first block with executable
969 statements in it. */
970 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
971 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
972 EDGE_FALLTHRU);
973
974 /* Traverse the basic block array placing edges. */
975 FOR_EACH_BB_FN (bb, cfun)
976 {
977 int mer;
978
979 if (!bb_to_omp_idx.is_empty ())
980 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
981
982 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
983 if (mer == 1)
984 ab_edge_goto.safe_push (bb);
985 else if (mer == 2)
986 ab_edge_call.safe_push (bb);
987
988 if (cur_region && bb_to_omp_idx.is_empty ())
989 bb_to_omp_idx.safe_grow_cleared (n_basic_blocks_for_fn (cfun), true);
990 }
991
992 /* Computed gotos are hell to deal with, especially if there are
993 lots of them with a large number of destinations. So we factor
994 them to a common computed goto location before we build the
995 edge list. After we convert back to normal form, we will un-factor
996 the computed gotos since factoring introduces an unwanted jump.
997 For non-local gotos and abnormal edges from calls to calls that return
998 twice or forced labels, factor the abnormal edges too, by having all
999 abnormal edges from the calls go to a common artificial basic block
1000 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1001 basic block to all forced labels and calls returning twice.
1002 We do this per-OpenMP structured block, because those regions
1003 are guaranteed to be single entry single exit by the standard,
1004 so it is not allowed to enter or exit such regions abnormally this way,
1005 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1006 must not transfer control across SESE region boundaries. */
1007 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1008 {
1009 gimple_stmt_iterator gsi;
1010 basic_block dispatcher_bb_array[2] = { NULL, NULL };
1011 basic_block *dispatcher_bbs = dispatcher_bb_array;
1012 int count = n_basic_blocks_for_fn (cfun);
1013
1014 if (!bb_to_omp_idx.is_empty ())
1015 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1016
1017 FOR_EACH_BB_FN (bb, cfun)
1018 {
1019 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1020 {
1021 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1022 tree target;
1023
1024 if (!label_stmt)
1025 break;
1026
1027 target = gimple_label_label (label_stmt);
1028
1029 /* Make an edge to every label block that has been marked as a
1030 potential target for a computed goto or a non-local goto. */
1031 if (FORCED_LABEL (target))
1032 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_goto,
1033 true);
1034 if (DECL_NONLOCAL (target))
1035 {
1036 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_call,
1037 false);
1038 break;
1039 }
1040 }
1041
1042 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1043 gsi_next_nondebug (&gsi);
1044 if (!gsi_end_p (gsi))
1045 {
1046 /* Make an edge to every setjmp-like call. */
1047 gimple *call_stmt = gsi_stmt (gsi);
1048 if (is_gimple_call (call_stmt)
1049 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1050 || gimple_call_builtin_p (call_stmt,
1051 BUILT_IN_SETJMP_RECEIVER)))
1052 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_call,
1053 false);
1054 }
1055 }
1056
1057 if (!bb_to_omp_idx.is_empty ())
1058 XDELETE (dispatcher_bbs);
1059 }
1060
1061 omp_free_regions ();
1062 }
1063
1064 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1065 needed. Returns true if new bbs were created.
1066 Note: This is transitional code, and should not be used for new code. We
1067 should be able to get rid of this by rewriting all target va-arg
1068 gimplification hooks to use an interface gimple_build_cond_value as described
1069 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1070
1071 bool
gimple_find_sub_bbs(gimple_seq seq,gimple_stmt_iterator * gsi)1072 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1073 {
1074 gimple *stmt = gsi_stmt (*gsi);
1075 basic_block bb = gimple_bb (stmt);
1076 basic_block lastbb, afterbb;
1077 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1078 edge e;
1079 lastbb = make_blocks_1 (seq, bb);
1080 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1081 return false;
1082 e = split_block (bb, stmt);
1083 /* Move e->dest to come after the new basic blocks. */
1084 afterbb = e->dest;
1085 unlink_block (afterbb);
1086 link_block (afterbb, lastbb);
1087 redirect_edge_succ (e, bb->next_bb);
1088 bb = bb->next_bb;
1089 while (bb != afterbb)
1090 {
1091 struct omp_region *cur_region = NULL;
1092 profile_count cnt = profile_count::zero ();
1093 bool all = true;
1094
1095 int cur_omp_region_idx = 0;
1096 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1097 gcc_assert (!mer && !cur_region);
1098 add_bb_to_loop (bb, afterbb->loop_father);
1099
1100 edge e;
1101 edge_iterator ei;
1102 FOR_EACH_EDGE (e, ei, bb->preds)
1103 {
1104 if (e->count ().initialized_p ())
1105 cnt += e->count ();
1106 else
1107 all = false;
1108 }
1109 tree_guess_outgoing_edge_probabilities (bb);
1110 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1111 bb->count = cnt;
1112
1113 bb = bb->next_bb;
1114 }
1115 return true;
1116 }
1117
1118 /* Find the next available discriminator value for LOCUS. The
1119 discriminator distinguishes among several basic blocks that
1120 share a common locus, allowing for more accurate sample-based
1121 profiling. */
1122
1123 static int
next_discriminator_for_locus(int line)1124 next_discriminator_for_locus (int line)
1125 {
1126 struct locus_discrim_map item;
1127 struct locus_discrim_map **slot;
1128
1129 item.location_line = line;
1130 item.discriminator = 0;
1131 slot = discriminator_per_locus->find_slot_with_hash (&item, line, INSERT);
1132 gcc_assert (slot);
1133 if (*slot == HTAB_EMPTY_ENTRY)
1134 {
1135 *slot = XNEW (struct locus_discrim_map);
1136 gcc_assert (*slot);
1137 (*slot)->location_line = line;
1138 (*slot)->discriminator = 0;
1139 }
1140 (*slot)->discriminator++;
1141 return (*slot)->discriminator;
1142 }
1143
1144 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1145
1146 static bool
same_line_p(location_t locus1,expanded_location * from,location_t locus2)1147 same_line_p (location_t locus1, expanded_location *from, location_t locus2)
1148 {
1149 expanded_location to;
1150
1151 if (locus1 == locus2)
1152 return true;
1153
1154 to = expand_location (locus2);
1155
1156 if (from->line != to.line)
1157 return false;
1158 if (from->file == to.file)
1159 return true;
1160 return (from->file != NULL
1161 && to.file != NULL
1162 && filename_cmp (from->file, to.file) == 0);
1163 }
1164
1165 /* Assign discriminators to each basic block. */
1166
1167 static void
assign_discriminators(void)1168 assign_discriminators (void)
1169 {
1170 basic_block bb;
1171
1172 FOR_EACH_BB_FN (bb, cfun)
1173 {
1174 edge e;
1175 edge_iterator ei;
1176 gimple *last = last_stmt (bb);
1177 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1178
1179 if (locus == UNKNOWN_LOCATION)
1180 continue;
1181
1182 expanded_location locus_e = expand_location (locus);
1183
1184 FOR_EACH_EDGE (e, ei, bb->succs)
1185 {
1186 gimple *first = first_non_label_stmt (e->dest);
1187 gimple *last = last_stmt (e->dest);
1188 if ((first && same_line_p (locus, &locus_e,
1189 gimple_location (first)))
1190 || (last && same_line_p (locus, &locus_e,
1191 gimple_location (last))))
1192 {
1193 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1194 bb->discriminator
1195 = next_discriminator_for_locus (locus_e.line);
1196 else
1197 e->dest->discriminator
1198 = next_discriminator_for_locus (locus_e.line);
1199 }
1200 }
1201 }
1202 }
1203
1204 /* Create the edges for a GIMPLE_COND starting at block BB. */
1205
1206 static void
make_cond_expr_edges(basic_block bb)1207 make_cond_expr_edges (basic_block bb)
1208 {
1209 gcond *entry = as_a <gcond *> (last_stmt (bb));
1210 gimple *then_stmt, *else_stmt;
1211 basic_block then_bb, else_bb;
1212 tree then_label, else_label;
1213 edge e;
1214
1215 gcc_assert (entry);
1216 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1217
1218 /* Entry basic blocks for each component. */
1219 then_label = gimple_cond_true_label (entry);
1220 else_label = gimple_cond_false_label (entry);
1221 then_bb = label_to_block (cfun, then_label);
1222 else_bb = label_to_block (cfun, else_label);
1223 then_stmt = first_stmt (then_bb);
1224 else_stmt = first_stmt (else_bb);
1225
1226 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1227 e->goto_locus = gimple_location (then_stmt);
1228 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1229 if (e)
1230 e->goto_locus = gimple_location (else_stmt);
1231
1232 /* We do not need the labels anymore. */
1233 gimple_cond_set_true_label (entry, NULL_TREE);
1234 gimple_cond_set_false_label (entry, NULL_TREE);
1235 }
1236
1237
1238 /* Called for each element in the hash table (P) as we delete the
1239 edge to cases hash table.
1240
1241 Clear all the CASE_CHAINs to prevent problems with copying of
1242 SWITCH_EXPRs and structure sharing rules, then free the hash table
1243 element. */
1244
1245 bool
edge_to_cases_cleanup(edge const &,tree const & value,void *)1246 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1247 {
1248 tree t, next;
1249
1250 for (t = value; t; t = next)
1251 {
1252 next = CASE_CHAIN (t);
1253 CASE_CHAIN (t) = NULL;
1254 }
1255
1256 return true;
1257 }
1258
1259 /* Start recording information mapping edges to case labels. */
1260
1261 void
start_recording_case_labels(void)1262 start_recording_case_labels (void)
1263 {
1264 gcc_assert (edge_to_cases == NULL);
1265 edge_to_cases = new hash_map<edge, tree>;
1266 touched_switch_bbs = BITMAP_ALLOC (NULL);
1267 }
1268
1269 /* Return nonzero if we are recording information for case labels. */
1270
1271 static bool
recording_case_labels_p(void)1272 recording_case_labels_p (void)
1273 {
1274 return (edge_to_cases != NULL);
1275 }
1276
1277 /* Stop recording information mapping edges to case labels and
1278 remove any information we have recorded. */
1279 void
end_recording_case_labels(void)1280 end_recording_case_labels (void)
1281 {
1282 bitmap_iterator bi;
1283 unsigned i;
1284 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1285 delete edge_to_cases;
1286 edge_to_cases = NULL;
1287 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1288 {
1289 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1290 if (bb)
1291 {
1292 gimple *stmt = last_stmt (bb);
1293 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1294 group_case_labels_stmt (as_a <gswitch *> (stmt));
1295 }
1296 }
1297 BITMAP_FREE (touched_switch_bbs);
1298 }
1299
1300 /* If we are inside a {start,end}_recording_cases block, then return
1301 a chain of CASE_LABEL_EXPRs from T which reference E.
1302
1303 Otherwise return NULL. */
1304
1305 static tree
get_cases_for_edge(edge e,gswitch * t)1306 get_cases_for_edge (edge e, gswitch *t)
1307 {
1308 tree *slot;
1309 size_t i, n;
1310
1311 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1312 chains available. Return NULL so the caller can detect this case. */
1313 if (!recording_case_labels_p ())
1314 return NULL;
1315
1316 slot = edge_to_cases->get (e);
1317 if (slot)
1318 return *slot;
1319
1320 /* If we did not find E in the hash table, then this must be the first
1321 time we have been queried for information about E & T. Add all the
1322 elements from T to the hash table then perform the query again. */
1323
1324 n = gimple_switch_num_labels (t);
1325 for (i = 0; i < n; i++)
1326 {
1327 tree elt = gimple_switch_label (t, i);
1328 tree lab = CASE_LABEL (elt);
1329 basic_block label_bb = label_to_block (cfun, lab);
1330 edge this_edge = find_edge (e->src, label_bb);
1331
1332 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1333 a new chain. */
1334 tree &s = edge_to_cases->get_or_insert (this_edge);
1335 CASE_CHAIN (elt) = s;
1336 s = elt;
1337 }
1338
1339 return *edge_to_cases->get (e);
1340 }
1341
1342 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1343
1344 static void
make_gimple_switch_edges(gswitch * entry,basic_block bb)1345 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1346 {
1347 size_t i, n;
1348
1349 n = gimple_switch_num_labels (entry);
1350
1351 for (i = 0; i < n; ++i)
1352 {
1353 basic_block label_bb = gimple_switch_label_bb (cfun, entry, i);
1354 make_edge (bb, label_bb, 0);
1355 }
1356 }
1357
1358
1359 /* Return the basic block holding label DEST. */
1360
1361 basic_block
label_to_block(struct function * ifun,tree dest)1362 label_to_block (struct function *ifun, tree dest)
1363 {
1364 int uid = LABEL_DECL_UID (dest);
1365
1366 /* We would die hard when faced by an undefined label. Emit a label to
1367 the very first basic block. This will hopefully make even the dataflow
1368 and undefined variable warnings quite right. */
1369 if (seen_error () && uid < 0)
1370 {
1371 gimple_stmt_iterator gsi =
1372 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1373 gimple *stmt;
1374
1375 stmt = gimple_build_label (dest);
1376 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1377 uid = LABEL_DECL_UID (dest);
1378 }
1379 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1380 return NULL;
1381 return (*ifun->cfg->x_label_to_block_map)[uid];
1382 }
1383
1384 /* Create edges for a goto statement at block BB. Returns true
1385 if abnormal edges should be created. */
1386
1387 static bool
make_goto_expr_edges(basic_block bb)1388 make_goto_expr_edges (basic_block bb)
1389 {
1390 gimple_stmt_iterator last = gsi_last_bb (bb);
1391 gimple *goto_t = gsi_stmt (last);
1392
1393 /* A simple GOTO creates normal edges. */
1394 if (simple_goto_p (goto_t))
1395 {
1396 tree dest = gimple_goto_dest (goto_t);
1397 basic_block label_bb = label_to_block (cfun, dest);
1398 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1399 e->goto_locus = gimple_location (goto_t);
1400 gsi_remove (&last, true);
1401 return false;
1402 }
1403
1404 /* A computed GOTO creates abnormal edges. */
1405 return true;
1406 }
1407
1408 /* Create edges for an asm statement with labels at block BB. */
1409
1410 static void
make_gimple_asm_edges(basic_block bb)1411 make_gimple_asm_edges (basic_block bb)
1412 {
1413 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1414 int i, n = gimple_asm_nlabels (stmt);
1415
1416 for (i = 0; i < n; ++i)
1417 {
1418 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1419 basic_block label_bb = label_to_block (cfun, label);
1420 make_edge (bb, label_bb, 0);
1421 }
1422 }
1423
1424 /*---------------------------------------------------------------------------
1425 Flowgraph analysis
1426 ---------------------------------------------------------------------------*/
1427
1428 /* Cleanup useless labels in basic blocks. This is something we wish
1429 to do early because it allows us to group case labels before creating
1430 the edges for the CFG, and it speeds up block statement iterators in
1431 all passes later on.
1432 We rerun this pass after CFG is created, to get rid of the labels that
1433 are no longer referenced. After then we do not run it any more, since
1434 (almost) no new labels should be created. */
1435
1436 /* A map from basic block index to the leading label of that block. */
1437 struct label_record
1438 {
1439 /* The label. */
1440 tree label;
1441
1442 /* True if the label is referenced from somewhere. */
1443 bool used;
1444 };
1445
1446 /* Given LABEL return the first label in the same basic block. */
1447
1448 static tree
main_block_label(tree label,label_record * label_for_bb)1449 main_block_label (tree label, label_record *label_for_bb)
1450 {
1451 basic_block bb = label_to_block (cfun, label);
1452 tree main_label = label_for_bb[bb->index].label;
1453
1454 /* label_to_block possibly inserted undefined label into the chain. */
1455 if (!main_label)
1456 {
1457 label_for_bb[bb->index].label = label;
1458 main_label = label;
1459 }
1460
1461 label_for_bb[bb->index].used = true;
1462 return main_label;
1463 }
1464
1465 /* Clean up redundant labels within the exception tree. */
1466
1467 static void
cleanup_dead_labels_eh(label_record * label_for_bb)1468 cleanup_dead_labels_eh (label_record *label_for_bb)
1469 {
1470 eh_landing_pad lp;
1471 eh_region r;
1472 tree lab;
1473 int i;
1474
1475 if (cfun->eh == NULL)
1476 return;
1477
1478 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1479 if (lp && lp->post_landing_pad)
1480 {
1481 lab = main_block_label (lp->post_landing_pad, label_for_bb);
1482 if (lab != lp->post_landing_pad)
1483 {
1484 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1485 lp->post_landing_pad = lab;
1486 EH_LANDING_PAD_NR (lab) = lp->index;
1487 }
1488 }
1489
1490 FOR_ALL_EH_REGION (r)
1491 switch (r->type)
1492 {
1493 case ERT_CLEANUP:
1494 case ERT_MUST_NOT_THROW:
1495 break;
1496
1497 case ERT_TRY:
1498 {
1499 eh_catch c;
1500 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1501 {
1502 lab = c->label;
1503 if (lab)
1504 c->label = main_block_label (lab, label_for_bb);
1505 }
1506 }
1507 break;
1508
1509 case ERT_ALLOWED_EXCEPTIONS:
1510 lab = r->u.allowed.label;
1511 if (lab)
1512 r->u.allowed.label = main_block_label (lab, label_for_bb);
1513 break;
1514 }
1515 }
1516
1517
1518 /* Cleanup redundant labels. This is a three-step process:
1519 1) Find the leading label for each block.
1520 2) Redirect all references to labels to the leading labels.
1521 3) Cleanup all useless labels. */
1522
1523 void
cleanup_dead_labels(void)1524 cleanup_dead_labels (void)
1525 {
1526 basic_block bb;
1527 label_record *label_for_bb = XCNEWVEC (struct label_record,
1528 last_basic_block_for_fn (cfun));
1529
1530 /* Find a suitable label for each block. We use the first user-defined
1531 label if there is one, or otherwise just the first label we see. */
1532 FOR_EACH_BB_FN (bb, cfun)
1533 {
1534 gimple_stmt_iterator i;
1535
1536 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1537 {
1538 tree label;
1539 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1540
1541 if (!label_stmt)
1542 break;
1543
1544 label = gimple_label_label (label_stmt);
1545
1546 /* If we have not yet seen a label for the current block,
1547 remember this one and see if there are more labels. */
1548 if (!label_for_bb[bb->index].label)
1549 {
1550 label_for_bb[bb->index].label = label;
1551 continue;
1552 }
1553
1554 /* If we did see a label for the current block already, but it
1555 is an artificially created label, replace it if the current
1556 label is a user defined label. */
1557 if (!DECL_ARTIFICIAL (label)
1558 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1559 {
1560 label_for_bb[bb->index].label = label;
1561 break;
1562 }
1563 }
1564 }
1565
1566 /* Now redirect all jumps/branches to the selected label.
1567 First do so for each block ending in a control statement. */
1568 FOR_EACH_BB_FN (bb, cfun)
1569 {
1570 gimple *stmt = last_stmt (bb);
1571 tree label, new_label;
1572
1573 if (!stmt)
1574 continue;
1575
1576 switch (gimple_code (stmt))
1577 {
1578 case GIMPLE_COND:
1579 {
1580 gcond *cond_stmt = as_a <gcond *> (stmt);
1581 label = gimple_cond_true_label (cond_stmt);
1582 if (label)
1583 {
1584 new_label = main_block_label (label, label_for_bb);
1585 if (new_label != label)
1586 gimple_cond_set_true_label (cond_stmt, new_label);
1587 }
1588
1589 label = gimple_cond_false_label (cond_stmt);
1590 if (label)
1591 {
1592 new_label = main_block_label (label, label_for_bb);
1593 if (new_label != label)
1594 gimple_cond_set_false_label (cond_stmt, new_label);
1595 }
1596 }
1597 break;
1598
1599 case GIMPLE_SWITCH:
1600 {
1601 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1602 size_t i, n = gimple_switch_num_labels (switch_stmt);
1603
1604 /* Replace all destination labels. */
1605 for (i = 0; i < n; ++i)
1606 {
1607 tree case_label = gimple_switch_label (switch_stmt, i);
1608 label = CASE_LABEL (case_label);
1609 new_label = main_block_label (label, label_for_bb);
1610 if (new_label != label)
1611 CASE_LABEL (case_label) = new_label;
1612 }
1613 break;
1614 }
1615
1616 case GIMPLE_ASM:
1617 {
1618 gasm *asm_stmt = as_a <gasm *> (stmt);
1619 int i, n = gimple_asm_nlabels (asm_stmt);
1620
1621 for (i = 0; i < n; ++i)
1622 {
1623 tree cons = gimple_asm_label_op (asm_stmt, i);
1624 tree label = main_block_label (TREE_VALUE (cons), label_for_bb);
1625 TREE_VALUE (cons) = label;
1626 }
1627 break;
1628 }
1629
1630 /* We have to handle gotos until they're removed, and we don't
1631 remove them until after we've created the CFG edges. */
1632 case GIMPLE_GOTO:
1633 if (!computed_goto_p (stmt))
1634 {
1635 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1636 label = gimple_goto_dest (goto_stmt);
1637 new_label = main_block_label (label, label_for_bb);
1638 if (new_label != label)
1639 gimple_goto_set_dest (goto_stmt, new_label);
1640 }
1641 break;
1642
1643 case GIMPLE_TRANSACTION:
1644 {
1645 gtransaction *txn = as_a <gtransaction *> (stmt);
1646
1647 label = gimple_transaction_label_norm (txn);
1648 if (label)
1649 {
1650 new_label = main_block_label (label, label_for_bb);
1651 if (new_label != label)
1652 gimple_transaction_set_label_norm (txn, new_label);
1653 }
1654
1655 label = gimple_transaction_label_uninst (txn);
1656 if (label)
1657 {
1658 new_label = main_block_label (label, label_for_bb);
1659 if (new_label != label)
1660 gimple_transaction_set_label_uninst (txn, new_label);
1661 }
1662
1663 label = gimple_transaction_label_over (txn);
1664 if (label)
1665 {
1666 new_label = main_block_label (label, label_for_bb);
1667 if (new_label != label)
1668 gimple_transaction_set_label_over (txn, new_label);
1669 }
1670 }
1671 break;
1672
1673 default:
1674 break;
1675 }
1676 }
1677
1678 /* Do the same for the exception region tree labels. */
1679 cleanup_dead_labels_eh (label_for_bb);
1680
1681 /* Finally, purge dead labels. All user-defined labels and labels that
1682 can be the target of non-local gotos and labels which have their
1683 address taken are preserved. */
1684 FOR_EACH_BB_FN (bb, cfun)
1685 {
1686 gimple_stmt_iterator i;
1687 tree label_for_this_bb = label_for_bb[bb->index].label;
1688
1689 if (!label_for_this_bb)
1690 continue;
1691
1692 /* If the main label of the block is unused, we may still remove it. */
1693 if (!label_for_bb[bb->index].used)
1694 label_for_this_bb = NULL;
1695
1696 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1697 {
1698 tree label;
1699 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1700
1701 if (!label_stmt)
1702 break;
1703
1704 label = gimple_label_label (label_stmt);
1705
1706 if (label == label_for_this_bb
1707 || !DECL_ARTIFICIAL (label)
1708 || DECL_NONLOCAL (label)
1709 || FORCED_LABEL (label))
1710 gsi_next (&i);
1711 else
1712 {
1713 gcc_checking_assert (EH_LANDING_PAD_NR (label) == 0);
1714 gsi_remove (&i, true);
1715 }
1716 }
1717 }
1718
1719 free (label_for_bb);
1720 }
1721
1722 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1723 the ones jumping to the same label.
1724 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1725
1726 bool
group_case_labels_stmt(gswitch * stmt)1727 group_case_labels_stmt (gswitch *stmt)
1728 {
1729 int old_size = gimple_switch_num_labels (stmt);
1730 int i, next_index, new_size;
1731 basic_block default_bb = NULL;
1732 hash_set<tree> *removed_labels = NULL;
1733
1734 default_bb = gimple_switch_default_bb (cfun, stmt);
1735
1736 /* Look for possible opportunities to merge cases. */
1737 new_size = i = 1;
1738 while (i < old_size)
1739 {
1740 tree base_case, base_high;
1741 basic_block base_bb;
1742
1743 base_case = gimple_switch_label (stmt, i);
1744
1745 gcc_assert (base_case);
1746 base_bb = label_to_block (cfun, CASE_LABEL (base_case));
1747
1748 /* Discard cases that have the same destination as the default case or
1749 whose destination blocks have already been removed as unreachable. */
1750 if (base_bb == NULL
1751 || base_bb == default_bb
1752 || (removed_labels
1753 && removed_labels->contains (CASE_LABEL (base_case))))
1754 {
1755 i++;
1756 continue;
1757 }
1758
1759 base_high = CASE_HIGH (base_case)
1760 ? CASE_HIGH (base_case)
1761 : CASE_LOW (base_case);
1762 next_index = i + 1;
1763
1764 /* Try to merge case labels. Break out when we reach the end
1765 of the label vector or when we cannot merge the next case
1766 label with the current one. */
1767 while (next_index < old_size)
1768 {
1769 tree merge_case = gimple_switch_label (stmt, next_index);
1770 basic_block merge_bb = label_to_block (cfun, CASE_LABEL (merge_case));
1771 wide_int bhp1 = wi::to_wide (base_high) + 1;
1772
1773 /* Merge the cases if they jump to the same place,
1774 and their ranges are consecutive. */
1775 if (merge_bb == base_bb
1776 && (removed_labels == NULL
1777 || !removed_labels->contains (CASE_LABEL (merge_case)))
1778 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1779 {
1780 base_high
1781 = (CASE_HIGH (merge_case)
1782 ? CASE_HIGH (merge_case) : CASE_LOW (merge_case));
1783 CASE_HIGH (base_case) = base_high;
1784 next_index++;
1785 }
1786 else
1787 break;
1788 }
1789
1790 /* Discard cases that have an unreachable destination block. */
1791 if (EDGE_COUNT (base_bb->succs) == 0
1792 && gimple_seq_unreachable_p (bb_seq (base_bb))
1793 /* Don't optimize this if __builtin_unreachable () is the
1794 implicitly added one by the C++ FE too early, before
1795 -Wreturn-type can be diagnosed. We'll optimize it later
1796 during switchconv pass or any other cfg cleanup. */
1797 && (gimple_in_ssa_p (cfun)
1798 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1799 != BUILTINS_LOCATION)))
1800 {
1801 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1802 if (base_edge != NULL)
1803 {
1804 for (gimple_stmt_iterator gsi = gsi_start_bb (base_bb);
1805 !gsi_end_p (gsi); gsi_next (&gsi))
1806 if (glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi)))
1807 {
1808 if (FORCED_LABEL (gimple_label_label (stmt))
1809 || DECL_NONLOCAL (gimple_label_label (stmt)))
1810 {
1811 /* Forced/non-local labels aren't going to be removed,
1812 but they will be moved to some neighbouring basic
1813 block. If some later case label refers to one of
1814 those labels, we should throw that case away rather
1815 than keeping it around and refering to some random
1816 other basic block without an edge to it. */
1817 if (removed_labels == NULL)
1818 removed_labels = new hash_set<tree>;
1819 removed_labels->add (gimple_label_label (stmt));
1820 }
1821 }
1822 else
1823 break;
1824 remove_edge_and_dominated_blocks (base_edge);
1825 }
1826 i = next_index;
1827 continue;
1828 }
1829
1830 if (new_size < i)
1831 gimple_switch_set_label (stmt, new_size,
1832 gimple_switch_label (stmt, i));
1833 i = next_index;
1834 new_size++;
1835 }
1836
1837 gcc_assert (new_size <= old_size);
1838
1839 if (new_size < old_size)
1840 gimple_switch_set_num_labels (stmt, new_size);
1841
1842 delete removed_labels;
1843 return new_size < old_size;
1844 }
1845
1846 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1847 and scan the sorted vector of cases. Combine the ones jumping to the
1848 same label. */
1849
1850 bool
group_case_labels(void)1851 group_case_labels (void)
1852 {
1853 basic_block bb;
1854 bool changed = false;
1855
1856 FOR_EACH_BB_FN (bb, cfun)
1857 {
1858 gimple *stmt = last_stmt (bb);
1859 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1860 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1861 }
1862
1863 return changed;
1864 }
1865
1866 /* Checks whether we can merge block B into block A. */
1867
1868 static bool
gimple_can_merge_blocks_p(basic_block a,basic_block b)1869 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1870 {
1871 gimple *stmt;
1872
1873 if (!single_succ_p (a))
1874 return false;
1875
1876 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1877 return false;
1878
1879 if (single_succ (a) != b)
1880 return false;
1881
1882 if (!single_pred_p (b))
1883 return false;
1884
1885 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1886 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1887 return false;
1888
1889 /* If A ends by a statement causing exceptions or something similar, we
1890 cannot merge the blocks. */
1891 stmt = last_stmt (a);
1892 if (stmt && stmt_ends_bb_p (stmt))
1893 return false;
1894
1895 /* Do not allow a block with only a non-local label to be merged. */
1896 if (stmt)
1897 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1898 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1899 return false;
1900
1901 /* Examine the labels at the beginning of B. */
1902 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1903 gsi_next (&gsi))
1904 {
1905 tree lab;
1906 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1907 if (!label_stmt)
1908 break;
1909 lab = gimple_label_label (label_stmt);
1910
1911 /* Do not remove user forced labels or for -O0 any user labels. */
1912 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1913 return false;
1914 }
1915
1916 /* Protect simple loop latches. We only want to avoid merging
1917 the latch with the loop header or with a block in another
1918 loop in this case. */
1919 if (current_loops
1920 && b->loop_father->latch == b
1921 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1922 && (b->loop_father->header == a
1923 || b->loop_father != a->loop_father))
1924 return false;
1925
1926 /* It must be possible to eliminate all phi nodes in B. If ssa form
1927 is not up-to-date and a name-mapping is registered, we cannot eliminate
1928 any phis. Symbols marked for renaming are never a problem though. */
1929 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1930 gsi_next (&gsi))
1931 {
1932 gphi *phi = gsi.phi ();
1933 /* Technically only new names matter. */
1934 if (name_registered_for_update_p (PHI_RESULT (phi)))
1935 return false;
1936 }
1937
1938 /* When not optimizing, don't merge if we'd lose goto_locus. */
1939 if (!optimize
1940 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1941 {
1942 location_t goto_locus = single_succ_edge (a)->goto_locus;
1943 gimple_stmt_iterator prev, next;
1944 prev = gsi_last_nondebug_bb (a);
1945 next = gsi_after_labels (b);
1946 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1947 gsi_next_nondebug (&next);
1948 if ((gsi_end_p (prev)
1949 || gimple_location (gsi_stmt (prev)) != goto_locus)
1950 && (gsi_end_p (next)
1951 || gimple_location (gsi_stmt (next)) != goto_locus))
1952 return false;
1953 }
1954
1955 return true;
1956 }
1957
1958 /* Replaces all uses of NAME by VAL. */
1959
1960 void
replace_uses_by(tree name,tree val)1961 replace_uses_by (tree name, tree val)
1962 {
1963 imm_use_iterator imm_iter;
1964 use_operand_p use;
1965 gimple *stmt;
1966 edge e;
1967
1968 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1969 {
1970 /* Mark the block if we change the last stmt in it. */
1971 if (cfgcleanup_altered_bbs
1972 && stmt_ends_bb_p (stmt))
1973 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1974
1975 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1976 {
1977 replace_exp (use, val);
1978
1979 if (gimple_code (stmt) == GIMPLE_PHI)
1980 {
1981 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1982 PHI_ARG_INDEX_FROM_USE (use));
1983 if (e->flags & EDGE_ABNORMAL
1984 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1985 {
1986 /* This can only occur for virtual operands, since
1987 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1988 would prevent replacement. */
1989 gcc_checking_assert (virtual_operand_p (name));
1990 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1991 }
1992 }
1993 }
1994
1995 if (gimple_code (stmt) != GIMPLE_PHI)
1996 {
1997 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1998 gimple *orig_stmt = stmt;
1999 size_t i;
2000
2001 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
2002 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
2003 only change sth from non-invariant to invariant, and only
2004 when propagating constants. */
2005 if (is_gimple_min_invariant (val))
2006 for (i = 0; i < gimple_num_ops (stmt); i++)
2007 {
2008 tree op = gimple_op (stmt, i);
2009 /* Operands may be empty here. For example, the labels
2010 of a GIMPLE_COND are nulled out following the creation
2011 of the corresponding CFG edges. */
2012 if (op && TREE_CODE (op) == ADDR_EXPR)
2013 recompute_tree_invariant_for_addr_expr (op);
2014 }
2015
2016 if (fold_stmt (&gsi))
2017 stmt = gsi_stmt (gsi);
2018
2019 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2020 gimple_purge_dead_eh_edges (gimple_bb (stmt));
2021
2022 update_stmt (stmt);
2023 }
2024 }
2025
2026 gcc_checking_assert (has_zero_uses (name));
2027
2028 /* Also update the trees stored in loop structures. */
2029 if (current_loops)
2030 {
2031 for (auto loop : loops_list (cfun, 0))
2032 substitute_in_loop_info (loop, name, val);
2033 }
2034 }
2035
2036 /* Merge block B into block A. */
2037
2038 static void
gimple_merge_blocks(basic_block a,basic_block b)2039 gimple_merge_blocks (basic_block a, basic_block b)
2040 {
2041 gimple_stmt_iterator last, gsi;
2042 gphi_iterator psi;
2043
2044 if (dump_file)
2045 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2046
2047 /* Remove all single-valued PHI nodes from block B of the form
2048 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2049 gsi = gsi_last_bb (a);
2050 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2051 {
2052 gimple *phi = gsi_stmt (psi);
2053 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2054 gimple *copy;
2055 bool may_replace_uses = (virtual_operand_p (def)
2056 || may_propagate_copy (def, use));
2057
2058 /* In case we maintain loop closed ssa form, do not propagate arguments
2059 of loop exit phi nodes. */
2060 if (current_loops
2061 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2062 && !virtual_operand_p (def)
2063 && TREE_CODE (use) == SSA_NAME
2064 && a->loop_father != b->loop_father)
2065 may_replace_uses = false;
2066
2067 if (!may_replace_uses)
2068 {
2069 gcc_assert (!virtual_operand_p (def));
2070
2071 /* Note that just emitting the copies is fine -- there is no problem
2072 with ordering of phi nodes. This is because A is the single
2073 predecessor of B, therefore results of the phi nodes cannot
2074 appear as arguments of the phi nodes. */
2075 copy = gimple_build_assign (def, use);
2076 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2077 remove_phi_node (&psi, false);
2078 }
2079 else
2080 {
2081 /* If we deal with a PHI for virtual operands, we can simply
2082 propagate these without fussing with folding or updating
2083 the stmt. */
2084 if (virtual_operand_p (def))
2085 {
2086 imm_use_iterator iter;
2087 use_operand_p use_p;
2088 gimple *stmt;
2089
2090 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2091 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2092 SET_USE (use_p, use);
2093
2094 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2095 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2096 }
2097 else
2098 replace_uses_by (def, use);
2099
2100 remove_phi_node (&psi, true);
2101 }
2102 }
2103
2104 /* Ensure that B follows A. */
2105 move_block_after (b, a);
2106
2107 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2108 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2109
2110 /* Remove labels from B and set gimple_bb to A for other statements. */
2111 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2112 {
2113 gimple *stmt = gsi_stmt (gsi);
2114 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2115 {
2116 tree label = gimple_label_label (label_stmt);
2117 int lp_nr;
2118
2119 gsi_remove (&gsi, false);
2120
2121 /* Now that we can thread computed gotos, we might have
2122 a situation where we have a forced label in block B
2123 However, the label at the start of block B might still be
2124 used in other ways (think about the runtime checking for
2125 Fortran assigned gotos). So we cannot just delete the
2126 label. Instead we move the label to the start of block A. */
2127 if (FORCED_LABEL (label))
2128 {
2129 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2130 tree first_label = NULL_TREE;
2131 if (!gsi_end_p (dest_gsi))
2132 if (glabel *first_label_stmt
2133 = dyn_cast <glabel *> (gsi_stmt (dest_gsi)))
2134 first_label = gimple_label_label (first_label_stmt);
2135 if (first_label
2136 && (DECL_NONLOCAL (first_label)
2137 || EH_LANDING_PAD_NR (first_label) != 0))
2138 gsi_insert_after (&dest_gsi, stmt, GSI_NEW_STMT);
2139 else
2140 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2141 }
2142 /* Other user labels keep around in a form of a debug stmt. */
2143 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2144 {
2145 gimple *dbg = gimple_build_debug_bind (label,
2146 integer_zero_node,
2147 stmt);
2148 gimple_debug_bind_reset_value (dbg);
2149 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2150 }
2151
2152 lp_nr = EH_LANDING_PAD_NR (label);
2153 if (lp_nr)
2154 {
2155 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2156 lp->post_landing_pad = NULL;
2157 }
2158 }
2159 else
2160 {
2161 gimple_set_bb (stmt, a);
2162 gsi_next (&gsi);
2163 }
2164 }
2165
2166 /* When merging two BBs, if their counts are different, the larger count
2167 is selected as the new bb count. This is to handle inconsistent
2168 profiles. */
2169 if (a->loop_father == b->loop_father)
2170 {
2171 a->count = a->count.merge (b->count);
2172 }
2173
2174 /* Merge the sequences. */
2175 last = gsi_last_bb (a);
2176 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2177 set_bb_seq (b, NULL);
2178
2179 if (cfgcleanup_altered_bbs)
2180 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2181 }
2182
2183
2184 /* Return the one of two successors of BB that is not reachable by a
2185 complex edge, if there is one. Else, return BB. We use
2186 this in optimizations that use post-dominators for their heuristics,
2187 to catch the cases in C++ where function calls are involved. */
2188
2189 basic_block
single_noncomplex_succ(basic_block bb)2190 single_noncomplex_succ (basic_block bb)
2191 {
2192 edge e0, e1;
2193 if (EDGE_COUNT (bb->succs) != 2)
2194 return bb;
2195
2196 e0 = EDGE_SUCC (bb, 0);
2197 e1 = EDGE_SUCC (bb, 1);
2198 if (e0->flags & EDGE_COMPLEX)
2199 return e1->dest;
2200 if (e1->flags & EDGE_COMPLEX)
2201 return e0->dest;
2202
2203 return bb;
2204 }
2205
2206 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2207
2208 void
notice_special_calls(gcall * call)2209 notice_special_calls (gcall *call)
2210 {
2211 int flags = gimple_call_flags (call);
2212
2213 if (flags & ECF_MAY_BE_ALLOCA)
2214 cfun->calls_alloca = true;
2215 if (flags & ECF_RETURNS_TWICE)
2216 cfun->calls_setjmp = true;
2217 }
2218
2219
2220 /* Clear flags set by notice_special_calls. Used by dead code removal
2221 to update the flags. */
2222
2223 void
clear_special_calls(void)2224 clear_special_calls (void)
2225 {
2226 cfun->calls_alloca = false;
2227 cfun->calls_setjmp = false;
2228 }
2229
2230 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2231
2232 static void
remove_phi_nodes_and_edges_for_unreachable_block(basic_block bb)2233 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2234 {
2235 /* Since this block is no longer reachable, we can just delete all
2236 of its PHI nodes. */
2237 remove_phi_nodes (bb);
2238
2239 /* Remove edges to BB's successors. */
2240 while (EDGE_COUNT (bb->succs) > 0)
2241 remove_edge (EDGE_SUCC (bb, 0));
2242 }
2243
2244
2245 /* Remove statements of basic block BB. */
2246
2247 static void
remove_bb(basic_block bb)2248 remove_bb (basic_block bb)
2249 {
2250 gimple_stmt_iterator i;
2251
2252 if (dump_file)
2253 {
2254 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2255 if (dump_flags & TDF_DETAILS)
2256 {
2257 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2258 fprintf (dump_file, "\n");
2259 }
2260 }
2261
2262 if (current_loops)
2263 {
2264 class loop *loop = bb->loop_father;
2265
2266 /* If a loop gets removed, clean up the information associated
2267 with it. */
2268 if (loop->latch == bb
2269 || loop->header == bb)
2270 free_numbers_of_iterations_estimates (loop);
2271 }
2272
2273 /* Remove all the instructions in the block. */
2274 if (bb_seq (bb) != NULL)
2275 {
2276 /* Walk backwards so as to get a chance to substitute all
2277 released DEFs into debug stmts. See
2278 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2279 details. */
2280 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2281 {
2282 gimple *stmt = gsi_stmt (i);
2283 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2284 if (label_stmt
2285 && (FORCED_LABEL (gimple_label_label (label_stmt))
2286 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2287 {
2288 basic_block new_bb;
2289 gimple_stmt_iterator new_gsi;
2290
2291 /* A non-reachable non-local label may still be referenced.
2292 But it no longer needs to carry the extra semantics of
2293 non-locality. */
2294 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2295 {
2296 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2297 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2298 }
2299
2300 new_bb = bb->prev_bb;
2301 /* Don't move any labels into ENTRY block. */
2302 if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2303 {
2304 new_bb = single_succ (new_bb);
2305 gcc_assert (new_bb != bb);
2306 }
2307 if ((unsigned) bb->index < bb_to_omp_idx.length ()
2308 && ((unsigned) new_bb->index >= bb_to_omp_idx.length ()
2309 || (bb_to_omp_idx[bb->index]
2310 != bb_to_omp_idx[new_bb->index])))
2311 {
2312 /* During cfg pass make sure to put orphaned labels
2313 into the right OMP region. */
2314 unsigned int i;
2315 int idx;
2316 new_bb = NULL;
2317 FOR_EACH_VEC_ELT (bb_to_omp_idx, i, idx)
2318 if (i >= NUM_FIXED_BLOCKS
2319 && idx == bb_to_omp_idx[bb->index]
2320 && i != (unsigned) bb->index)
2321 {
2322 new_bb = BASIC_BLOCK_FOR_FN (cfun, i);
2323 break;
2324 }
2325 if (new_bb == NULL)
2326 {
2327 new_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2328 gcc_assert (new_bb != bb);
2329 }
2330 }
2331 new_gsi = gsi_after_labels (new_bb);
2332 gsi_remove (&i, false);
2333 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2334 }
2335 else
2336 {
2337 /* Release SSA definitions. */
2338 release_defs (stmt);
2339 gsi_remove (&i, true);
2340 }
2341
2342 if (gsi_end_p (i))
2343 i = gsi_last_bb (bb);
2344 else
2345 gsi_prev (&i);
2346 }
2347 }
2348
2349 if ((unsigned) bb->index < bb_to_omp_idx.length ())
2350 bb_to_omp_idx[bb->index] = -1;
2351 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2352 bb->il.gimple.seq = NULL;
2353 bb->il.gimple.phi_nodes = NULL;
2354 }
2355
2356
2357 /* Given a basic block BB and a value VAL for use in the final statement
2358 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2359 the edge that will be taken out of the block.
2360 If VAL is NULL_TREE, then the current value of the final statement's
2361 predicate or index is used.
2362 If the value does not match a unique edge, NULL is returned. */
2363
2364 edge
find_taken_edge(basic_block bb,tree val)2365 find_taken_edge (basic_block bb, tree val)
2366 {
2367 gimple *stmt;
2368
2369 stmt = last_stmt (bb);
2370
2371 /* Handle ENTRY and EXIT. */
2372 if (!stmt)
2373 return NULL;
2374
2375 if (gimple_code (stmt) == GIMPLE_COND)
2376 return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2377
2378 if (gimple_code (stmt) == GIMPLE_SWITCH)
2379 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2380
2381 if (computed_goto_p (stmt))
2382 {
2383 /* Only optimize if the argument is a label, if the argument is
2384 not a label then we cannot construct a proper CFG.
2385
2386 It may be the case that we only need to allow the LABEL_REF to
2387 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2388 appear inside a LABEL_EXPR just to be safe. */
2389 if (val
2390 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2391 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2392 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2393 }
2394
2395 /* Otherwise we only know the taken successor edge if it's unique. */
2396 return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2397 }
2398
2399 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2400 statement, determine which of the outgoing edges will be taken out of the
2401 block. Return NULL if either edge may be taken. */
2402
2403 static edge
find_taken_edge_computed_goto(basic_block bb,tree val)2404 find_taken_edge_computed_goto (basic_block bb, tree val)
2405 {
2406 basic_block dest;
2407 edge e = NULL;
2408
2409 dest = label_to_block (cfun, val);
2410 if (dest)
2411 e = find_edge (bb, dest);
2412
2413 /* It's possible for find_edge to return NULL here on invalid code
2414 that abuses the labels-as-values extension (e.g. code that attempts to
2415 jump *between* functions via stored labels-as-values; PR 84136).
2416 If so, then we simply return that NULL for the edge.
2417 We don't currently have a way of detecting such invalid code, so we
2418 can't assert that it was the case when a NULL edge occurs here. */
2419
2420 return e;
2421 }
2422
2423 /* Given COND_STMT and a constant value VAL for use as the predicate,
2424 determine which of the two edges will be taken out of
2425 the statement's block. Return NULL if either edge may be taken.
2426 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2427 is used. */
2428
2429 static edge
find_taken_edge_cond_expr(const gcond * cond_stmt,tree val)2430 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2431 {
2432 edge true_edge, false_edge;
2433
2434 if (val == NULL_TREE)
2435 {
2436 /* Use the current value of the predicate. */
2437 if (gimple_cond_true_p (cond_stmt))
2438 val = integer_one_node;
2439 else if (gimple_cond_false_p (cond_stmt))
2440 val = integer_zero_node;
2441 else
2442 return NULL;
2443 }
2444 else if (TREE_CODE (val) != INTEGER_CST)
2445 return NULL;
2446
2447 extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2448 &true_edge, &false_edge);
2449
2450 return (integer_zerop (val) ? false_edge : true_edge);
2451 }
2452
2453 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2454 which edge will be taken out of the statement's block. Return NULL if any
2455 edge may be taken.
2456 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2457 is used. */
2458
2459 edge
find_taken_edge_switch_expr(const gswitch * switch_stmt,tree val)2460 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2461 {
2462 basic_block dest_bb;
2463 edge e;
2464 tree taken_case;
2465
2466 if (gimple_switch_num_labels (switch_stmt) == 1)
2467 taken_case = gimple_switch_default_label (switch_stmt);
2468 else
2469 {
2470 if (val == NULL_TREE)
2471 val = gimple_switch_index (switch_stmt);
2472 if (TREE_CODE (val) != INTEGER_CST)
2473 return NULL;
2474 else
2475 taken_case = find_case_label_for_value (switch_stmt, val);
2476 }
2477 dest_bb = label_to_block (cfun, CASE_LABEL (taken_case));
2478
2479 e = find_edge (gimple_bb (switch_stmt), dest_bb);
2480 gcc_assert (e);
2481 return e;
2482 }
2483
2484
2485 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2486 We can make optimal use here of the fact that the case labels are
2487 sorted: We can do a binary search for a case matching VAL. */
2488
2489 tree
find_case_label_for_value(const gswitch * switch_stmt,tree val)2490 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2491 {
2492 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2493 tree default_case = gimple_switch_default_label (switch_stmt);
2494
2495 for (low = 0, high = n; high - low > 1; )
2496 {
2497 size_t i = (high + low) / 2;
2498 tree t = gimple_switch_label (switch_stmt, i);
2499 int cmp;
2500
2501 /* Cache the result of comparing CASE_LOW and val. */
2502 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2503
2504 if (cmp > 0)
2505 high = i;
2506 else
2507 low = i;
2508
2509 if (CASE_HIGH (t) == NULL)
2510 {
2511 /* A singe-valued case label. */
2512 if (cmp == 0)
2513 return t;
2514 }
2515 else
2516 {
2517 /* A case range. We can only handle integer ranges. */
2518 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2519 return t;
2520 }
2521 }
2522
2523 return default_case;
2524 }
2525
2526
2527 /* Dump a basic block on stderr. */
2528
2529 void
gimple_debug_bb(basic_block bb)2530 gimple_debug_bb (basic_block bb)
2531 {
2532 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2533 }
2534
2535
2536 /* Dump basic block with index N on stderr. */
2537
2538 basic_block
gimple_debug_bb_n(int n)2539 gimple_debug_bb_n (int n)
2540 {
2541 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2542 return BASIC_BLOCK_FOR_FN (cfun, n);
2543 }
2544
2545
2546 /* Dump the CFG on stderr.
2547
2548 FLAGS are the same used by the tree dumping functions
2549 (see TDF_* in dumpfile.h). */
2550
2551 void
gimple_debug_cfg(dump_flags_t flags)2552 gimple_debug_cfg (dump_flags_t flags)
2553 {
2554 gimple_dump_cfg (stderr, flags);
2555 }
2556
2557
2558 /* Dump the program showing basic block boundaries on the given FILE.
2559
2560 FLAGS are the same used by the tree dumping functions (see TDF_* in
2561 tree.h). */
2562
2563 void
gimple_dump_cfg(FILE * file,dump_flags_t flags)2564 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2565 {
2566 if (flags & TDF_DETAILS)
2567 {
2568 dump_function_header (file, current_function_decl, flags);
2569 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2570 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2571 last_basic_block_for_fn (cfun));
2572
2573 brief_dump_cfg (file, flags);
2574 fprintf (file, "\n");
2575 }
2576
2577 if (flags & TDF_STATS)
2578 dump_cfg_stats (file);
2579
2580 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2581 }
2582
2583
2584 /* Dump CFG statistics on FILE. */
2585
2586 void
dump_cfg_stats(FILE * file)2587 dump_cfg_stats (FILE *file)
2588 {
2589 static long max_num_merged_labels = 0;
2590 unsigned long size, total = 0;
2591 long num_edges;
2592 basic_block bb;
2593 const char * const fmt_str = "%-30s%-13s%12s\n";
2594 const char * const fmt_str_1 = "%-30s%13d" PRsa (11) "\n";
2595 const char * const fmt_str_2 = "%-30s%13ld" PRsa (11) "\n";
2596 const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
2597 const char *funcname = current_function_name ();
2598
2599 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2600
2601 fprintf (file, "---------------------------------------------------------\n");
2602 fprintf (file, fmt_str, "", " Number of ", "Memory");
2603 fprintf (file, fmt_str, "", " instances ", "used ");
2604 fprintf (file, "---------------------------------------------------------\n");
2605
2606 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2607 total += size;
2608 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2609 SIZE_AMOUNT (size));
2610
2611 num_edges = 0;
2612 FOR_EACH_BB_FN (bb, cfun)
2613 num_edges += EDGE_COUNT (bb->succs);
2614 size = num_edges * sizeof (class edge_def);
2615 total += size;
2616 fprintf (file, fmt_str_2, "Edges", num_edges, SIZE_AMOUNT (size));
2617
2618 fprintf (file, "---------------------------------------------------------\n");
2619 fprintf (file, fmt_str_3, "Total memory used by CFG data",
2620 SIZE_AMOUNT (total));
2621 fprintf (file, "---------------------------------------------------------\n");
2622 fprintf (file, "\n");
2623
2624 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2625 max_num_merged_labels = cfg_stats.num_merged_labels;
2626
2627 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2628 cfg_stats.num_merged_labels, max_num_merged_labels);
2629
2630 fprintf (file, "\n");
2631 }
2632
2633
2634 /* Dump CFG statistics on stderr. Keep extern so that it's always
2635 linked in the final executable. */
2636
2637 DEBUG_FUNCTION void
debug_cfg_stats(void)2638 debug_cfg_stats (void)
2639 {
2640 dump_cfg_stats (stderr);
2641 }
2642
2643 /*---------------------------------------------------------------------------
2644 Miscellaneous helpers
2645 ---------------------------------------------------------------------------*/
2646
2647 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2648 flow. Transfers of control flow associated with EH are excluded. */
2649
2650 static bool
call_can_make_abnormal_goto(gimple * t)2651 call_can_make_abnormal_goto (gimple *t)
2652 {
2653 /* If the function has no non-local labels, then a call cannot make an
2654 abnormal transfer of control. */
2655 if (!cfun->has_nonlocal_label
2656 && !cfun->calls_setjmp)
2657 return false;
2658
2659 /* Likewise if the call has no side effects. */
2660 if (!gimple_has_side_effects (t))
2661 return false;
2662
2663 /* Likewise if the called function is leaf. */
2664 if (gimple_call_flags (t) & ECF_LEAF)
2665 return false;
2666
2667 return true;
2668 }
2669
2670
2671 /* Return true if T can make an abnormal transfer of control flow.
2672 Transfers of control flow associated with EH are excluded. */
2673
2674 bool
stmt_can_make_abnormal_goto(gimple * t)2675 stmt_can_make_abnormal_goto (gimple *t)
2676 {
2677 if (computed_goto_p (t))
2678 return true;
2679 if (is_gimple_call (t))
2680 return call_can_make_abnormal_goto (t);
2681 return false;
2682 }
2683
2684
2685 /* Return true if T represents a stmt that always transfers control. */
2686
2687 bool
is_ctrl_stmt(gimple * t)2688 is_ctrl_stmt (gimple *t)
2689 {
2690 switch (gimple_code (t))
2691 {
2692 case GIMPLE_COND:
2693 case GIMPLE_SWITCH:
2694 case GIMPLE_GOTO:
2695 case GIMPLE_RETURN:
2696 case GIMPLE_RESX:
2697 return true;
2698 default:
2699 return false;
2700 }
2701 }
2702
2703
2704 /* Return true if T is a statement that may alter the flow of control
2705 (e.g., a call to a non-returning function). */
2706
2707 bool
is_ctrl_altering_stmt(gimple * t)2708 is_ctrl_altering_stmt (gimple *t)
2709 {
2710 gcc_assert (t);
2711
2712 switch (gimple_code (t))
2713 {
2714 case GIMPLE_CALL:
2715 /* Per stmt call flag indicates whether the call could alter
2716 controlflow. */
2717 if (gimple_call_ctrl_altering_p (t))
2718 return true;
2719 break;
2720
2721 case GIMPLE_EH_DISPATCH:
2722 /* EH_DISPATCH branches to the individual catch handlers at
2723 this level of a try or allowed-exceptions region. It can
2724 fallthru to the next statement as well. */
2725 return true;
2726
2727 case GIMPLE_ASM:
2728 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2729 return true;
2730 break;
2731
2732 CASE_GIMPLE_OMP:
2733 /* OpenMP directives alter control flow. */
2734 return true;
2735
2736 case GIMPLE_TRANSACTION:
2737 /* A transaction start alters control flow. */
2738 return true;
2739
2740 default:
2741 break;
2742 }
2743
2744 /* If a statement can throw, it alters control flow. */
2745 return stmt_can_throw_internal (cfun, t);
2746 }
2747
2748
2749 /* Return true if T is a simple local goto. */
2750
2751 bool
simple_goto_p(gimple * t)2752 simple_goto_p (gimple *t)
2753 {
2754 return (gimple_code (t) == GIMPLE_GOTO
2755 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2756 }
2757
2758
2759 /* Return true if STMT should start a new basic block. PREV_STMT is
2760 the statement preceding STMT. It is used when STMT is a label or a
2761 case label. Labels should only start a new basic block if their
2762 previous statement wasn't a label. Otherwise, sequence of labels
2763 would generate unnecessary basic blocks that only contain a single
2764 label. */
2765
2766 static inline bool
stmt_starts_bb_p(gimple * stmt,gimple * prev_stmt)2767 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2768 {
2769 if (stmt == NULL)
2770 return false;
2771
2772 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2773 any nondebug stmts in the block. We don't want to start another
2774 block in this case: the debug stmt will already have started the
2775 one STMT would start if we weren't outputting debug stmts. */
2776 if (prev_stmt && is_gimple_debug (prev_stmt))
2777 return false;
2778
2779 /* Labels start a new basic block only if the preceding statement
2780 wasn't a label of the same type. This prevents the creation of
2781 consecutive blocks that have nothing but a single label. */
2782 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2783 {
2784 /* Nonlocal and computed GOTO targets always start a new block. */
2785 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2786 || FORCED_LABEL (gimple_label_label (label_stmt)))
2787 return true;
2788
2789 if (glabel *plabel = safe_dyn_cast <glabel *> (prev_stmt))
2790 {
2791 if (DECL_NONLOCAL (gimple_label_label (plabel))
2792 || !DECL_ARTIFICIAL (gimple_label_label (plabel)))
2793 return true;
2794
2795 cfg_stats.num_merged_labels++;
2796 return false;
2797 }
2798 else
2799 return true;
2800 }
2801 else if (gimple_code (stmt) == GIMPLE_CALL)
2802 {
2803 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2804 /* setjmp acts similar to a nonlocal GOTO target and thus should
2805 start a new block. */
2806 return true;
2807 if (gimple_call_internal_p (stmt, IFN_PHI)
2808 && prev_stmt
2809 && gimple_code (prev_stmt) != GIMPLE_LABEL
2810 && (gimple_code (prev_stmt) != GIMPLE_CALL
2811 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2812 /* PHI nodes start a new block unless preceeded by a label
2813 or another PHI. */
2814 return true;
2815 }
2816
2817 return false;
2818 }
2819
2820
2821 /* Return true if T should end a basic block. */
2822
2823 bool
stmt_ends_bb_p(gimple * t)2824 stmt_ends_bb_p (gimple *t)
2825 {
2826 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2827 }
2828
2829 /* Remove block annotations and other data structures. */
2830
2831 void
delete_tree_cfg_annotations(struct function * fn)2832 delete_tree_cfg_annotations (struct function *fn)
2833 {
2834 vec_free (label_to_block_map_for_fn (fn));
2835 }
2836
2837 /* Return the virtual phi in BB. */
2838
2839 gphi *
get_virtual_phi(basic_block bb)2840 get_virtual_phi (basic_block bb)
2841 {
2842 for (gphi_iterator gsi = gsi_start_phis (bb);
2843 !gsi_end_p (gsi);
2844 gsi_next (&gsi))
2845 {
2846 gphi *phi = gsi.phi ();
2847
2848 if (virtual_operand_p (PHI_RESULT (phi)))
2849 return phi;
2850 }
2851
2852 return NULL;
2853 }
2854
2855 /* Return the first statement in basic block BB. */
2856
2857 gimple *
first_stmt(basic_block bb)2858 first_stmt (basic_block bb)
2859 {
2860 gimple_stmt_iterator i = gsi_start_bb (bb);
2861 gimple *stmt = NULL;
2862
2863 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2864 {
2865 gsi_next (&i);
2866 stmt = NULL;
2867 }
2868 return stmt;
2869 }
2870
2871 /* Return the first non-label statement in basic block BB. */
2872
2873 static gimple *
first_non_label_stmt(basic_block bb)2874 first_non_label_stmt (basic_block bb)
2875 {
2876 gimple_stmt_iterator i = gsi_start_bb (bb);
2877 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2878 gsi_next (&i);
2879 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2880 }
2881
2882 /* Return the last statement in basic block BB. */
2883
2884 gimple *
last_stmt(basic_block bb)2885 last_stmt (basic_block bb)
2886 {
2887 gimple_stmt_iterator i = gsi_last_bb (bb);
2888 gimple *stmt = NULL;
2889
2890 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2891 {
2892 gsi_prev (&i);
2893 stmt = NULL;
2894 }
2895 return stmt;
2896 }
2897
2898 /* Return the last statement of an otherwise empty block. Return NULL
2899 if the block is totally empty, or if it contains more than one
2900 statement. */
2901
2902 gimple *
last_and_only_stmt(basic_block bb)2903 last_and_only_stmt (basic_block bb)
2904 {
2905 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2906 gimple *last, *prev;
2907
2908 if (gsi_end_p (i))
2909 return NULL;
2910
2911 last = gsi_stmt (i);
2912 gsi_prev_nondebug (&i);
2913 if (gsi_end_p (i))
2914 return last;
2915
2916 /* Empty statements should no longer appear in the instruction stream.
2917 Everything that might have appeared before should be deleted by
2918 remove_useless_stmts, and the optimizers should just gsi_remove
2919 instead of smashing with build_empty_stmt.
2920
2921 Thus the only thing that should appear here in a block containing
2922 one executable statement is a label. */
2923 prev = gsi_stmt (i);
2924 if (gimple_code (prev) == GIMPLE_LABEL)
2925 return last;
2926 else
2927 return NULL;
2928 }
2929
2930 /* Returns the basic block after which the new basic block created
2931 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2932 near its "logical" location. This is of most help to humans looking
2933 at debugging dumps. */
2934
2935 basic_block
split_edge_bb_loc(edge edge_in)2936 split_edge_bb_loc (edge edge_in)
2937 {
2938 basic_block dest = edge_in->dest;
2939 basic_block dest_prev = dest->prev_bb;
2940
2941 if (dest_prev)
2942 {
2943 edge e = find_edge (dest_prev, dest);
2944 if (e && !(e->flags & EDGE_COMPLEX))
2945 return edge_in->src;
2946 }
2947 return dest_prev;
2948 }
2949
2950 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2951 Abort on abnormal edges. */
2952
2953 static basic_block
gimple_split_edge(edge edge_in)2954 gimple_split_edge (edge edge_in)
2955 {
2956 basic_block new_bb, after_bb, dest;
2957 edge new_edge, e;
2958
2959 /* Abnormal edges cannot be split. */
2960 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2961
2962 dest = edge_in->dest;
2963
2964 after_bb = split_edge_bb_loc (edge_in);
2965
2966 new_bb = create_empty_bb (after_bb);
2967 new_bb->count = edge_in->count ();
2968
2969 /* We want to avoid re-allocating PHIs when we first
2970 add the fallthru edge from new_bb to dest but we also
2971 want to avoid changing PHI argument order when
2972 first redirecting edge_in away from dest. The former
2973 avoids changing PHI argument order by adding them
2974 last and then the redirection swapping it back into
2975 place by means of unordered remove.
2976 So hack around things by temporarily removing all PHIs
2977 from the destination during the edge redirection and then
2978 making sure the edges stay in order. */
2979 gimple_seq saved_phis = phi_nodes (dest);
2980 unsigned old_dest_idx = edge_in->dest_idx;
2981 set_phi_nodes (dest, NULL);
2982 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2983 e = redirect_edge_and_branch (edge_in, new_bb);
2984 gcc_assert (e == edge_in && new_edge->dest_idx == old_dest_idx);
2985 /* set_phi_nodes sets the BB of the PHI nodes, so do it manually here. */
2986 dest->il.gimple.phi_nodes = saved_phis;
2987
2988 return new_bb;
2989 }
2990
2991
2992 /* Verify properties of the address expression T whose base should be
2993 TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true. */
2994
2995 static bool
verify_address(tree t,bool verify_addressable)2996 verify_address (tree t, bool verify_addressable)
2997 {
2998 bool old_constant;
2999 bool old_side_effects;
3000 bool new_constant;
3001 bool new_side_effects;
3002
3003 old_constant = TREE_CONSTANT (t);
3004 old_side_effects = TREE_SIDE_EFFECTS (t);
3005
3006 recompute_tree_invariant_for_addr_expr (t);
3007 new_side_effects = TREE_SIDE_EFFECTS (t);
3008 new_constant = TREE_CONSTANT (t);
3009
3010 if (old_constant != new_constant)
3011 {
3012 error ("constant not recomputed when %<ADDR_EXPR%> changed");
3013 return true;
3014 }
3015 if (old_side_effects != new_side_effects)
3016 {
3017 error ("side effects not recomputed when %<ADDR_EXPR%> changed");
3018 return true;
3019 }
3020
3021 tree base = TREE_OPERAND (t, 0);
3022 while (handled_component_p (base))
3023 base = TREE_OPERAND (base, 0);
3024
3025 if (!(VAR_P (base)
3026 || TREE_CODE (base) == PARM_DECL
3027 || TREE_CODE (base) == RESULT_DECL))
3028 return false;
3029
3030 if (verify_addressable && !TREE_ADDRESSABLE (base))
3031 {
3032 error ("address taken but %<TREE_ADDRESSABLE%> bit not set");
3033 return true;
3034 }
3035
3036 return false;
3037 }
3038
3039
3040 /* Verify if EXPR is a valid GIMPLE reference expression. If
3041 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3042 if there is an error, otherwise false. */
3043
3044 static bool
verify_types_in_gimple_reference(tree expr,bool require_lvalue)3045 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3046 {
3047 const char *code_name = get_tree_code_name (TREE_CODE (expr));
3048
3049 if (TREE_CODE (expr) == REALPART_EXPR
3050 || TREE_CODE (expr) == IMAGPART_EXPR
3051 || TREE_CODE (expr) == BIT_FIELD_REF)
3052 {
3053 tree op = TREE_OPERAND (expr, 0);
3054 if (!is_gimple_reg_type (TREE_TYPE (expr)))
3055 {
3056 error ("non-scalar %qs", code_name);
3057 return true;
3058 }
3059
3060 if (TREE_CODE (expr) == BIT_FIELD_REF)
3061 {
3062 tree t1 = TREE_OPERAND (expr, 1);
3063 tree t2 = TREE_OPERAND (expr, 2);
3064 poly_uint64 size, bitpos;
3065 if (!poly_int_tree_p (t1, &size)
3066 || !poly_int_tree_p (t2, &bitpos)
3067 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3068 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3069 {
3070 error ("invalid position or size operand to %qs", code_name);
3071 return true;
3072 }
3073 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
3074 && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr)), size))
3075 {
3076 error ("integral result type precision does not match "
3077 "field size of %qs", code_name);
3078 return true;
3079 }
3080 else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3081 && TYPE_MODE (TREE_TYPE (expr)) != BLKmode
3082 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr))),
3083 size))
3084 {
3085 error ("mode size of non-integral result does not "
3086 "match field size of %qs",
3087 code_name);
3088 return true;
3089 }
3090 if (INTEGRAL_TYPE_P (TREE_TYPE (op))
3091 && !type_has_mode_precision_p (TREE_TYPE (op)))
3092 {
3093 error ("%qs of non-mode-precision operand", code_name);
3094 return true;
3095 }
3096 if (!AGGREGATE_TYPE_P (TREE_TYPE (op))
3097 && maybe_gt (size + bitpos,
3098 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op)))))
3099 {
3100 error ("position plus size exceeds size of referenced object in "
3101 "%qs", code_name);
3102 return true;
3103 }
3104 }
3105
3106 if ((TREE_CODE (expr) == REALPART_EXPR
3107 || TREE_CODE (expr) == IMAGPART_EXPR)
3108 && !useless_type_conversion_p (TREE_TYPE (expr),
3109 TREE_TYPE (TREE_TYPE (op))))
3110 {
3111 error ("type mismatch in %qs reference", code_name);
3112 debug_generic_stmt (TREE_TYPE (expr));
3113 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3114 return true;
3115 }
3116 expr = op;
3117 }
3118
3119 while (handled_component_p (expr))
3120 {
3121 code_name = get_tree_code_name (TREE_CODE (expr));
3122
3123 if (TREE_CODE (expr) == REALPART_EXPR
3124 || TREE_CODE (expr) == IMAGPART_EXPR
3125 || TREE_CODE (expr) == BIT_FIELD_REF)
3126 {
3127 error ("non-top-level %qs", code_name);
3128 return true;
3129 }
3130
3131 tree op = TREE_OPERAND (expr, 0);
3132
3133 if (TREE_CODE (expr) == ARRAY_REF
3134 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3135 {
3136 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3137 || (TREE_OPERAND (expr, 2)
3138 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3139 || (TREE_OPERAND (expr, 3)
3140 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3141 {
3142 error ("invalid operands to %qs", code_name);
3143 debug_generic_stmt (expr);
3144 return true;
3145 }
3146 }
3147
3148 /* Verify if the reference array element types are compatible. */
3149 if (TREE_CODE (expr) == ARRAY_REF
3150 && !useless_type_conversion_p (TREE_TYPE (expr),
3151 TREE_TYPE (TREE_TYPE (op))))
3152 {
3153 error ("type mismatch in %qs", code_name);
3154 debug_generic_stmt (TREE_TYPE (expr));
3155 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3156 return true;
3157 }
3158 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3159 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3160 TREE_TYPE (TREE_TYPE (op))))
3161 {
3162 error ("type mismatch in %qs", code_name);
3163 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3164 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3165 return true;
3166 }
3167
3168 if (TREE_CODE (expr) == COMPONENT_REF)
3169 {
3170 if (TREE_OPERAND (expr, 2)
3171 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3172 {
3173 error ("invalid %qs offset operator", code_name);
3174 return true;
3175 }
3176 if (!useless_type_conversion_p (TREE_TYPE (expr),
3177 TREE_TYPE (TREE_OPERAND (expr, 1))))
3178 {
3179 error ("type mismatch in %qs", code_name);
3180 debug_generic_stmt (TREE_TYPE (expr));
3181 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3182 return true;
3183 }
3184 }
3185
3186 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3187 {
3188 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3189 that their operand is not an SSA name or an invariant when
3190 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3191 bug). Otherwise there is nothing to verify, gross mismatches at
3192 most invoke undefined behavior. */
3193 if (require_lvalue
3194 && (TREE_CODE (op) == SSA_NAME
3195 || is_gimple_min_invariant (op)))
3196 {
3197 error ("conversion of %qs on the left hand side of %qs",
3198 get_tree_code_name (TREE_CODE (op)), code_name);
3199 debug_generic_stmt (expr);
3200 return true;
3201 }
3202 else if (TREE_CODE (op) == SSA_NAME
3203 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3204 {
3205 error ("conversion of register to a different size in %qs",
3206 code_name);
3207 debug_generic_stmt (expr);
3208 return true;
3209 }
3210 else if (!handled_component_p (op))
3211 return false;
3212 }
3213
3214 expr = op;
3215 }
3216
3217 code_name = get_tree_code_name (TREE_CODE (expr));
3218
3219 if (TREE_CODE (expr) == MEM_REF)
3220 {
3221 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))
3222 || (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
3223 && verify_address (TREE_OPERAND (expr, 0), false)))
3224 {
3225 error ("invalid address operand in %qs", code_name);
3226 debug_generic_stmt (expr);
3227 return true;
3228 }
3229 if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3230 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3231 {
3232 error ("invalid offset operand in %qs", code_name);
3233 debug_generic_stmt (expr);
3234 return true;
3235 }
3236 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3237 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3238 {
3239 error ("invalid clique in %qs", code_name);
3240 debug_generic_stmt (expr);
3241 return true;
3242 }
3243 }
3244 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3245 {
3246 if (!TMR_BASE (expr)
3247 || !is_gimple_mem_ref_addr (TMR_BASE (expr))
3248 || (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
3249 && verify_address (TMR_BASE (expr), false)))
3250 {
3251 error ("invalid address operand in %qs", code_name);
3252 return true;
3253 }
3254 if (!TMR_OFFSET (expr)
3255 || !poly_int_tree_p (TMR_OFFSET (expr))
3256 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3257 {
3258 error ("invalid offset operand in %qs", code_name);
3259 debug_generic_stmt (expr);
3260 return true;
3261 }
3262 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3263 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3264 {
3265 error ("invalid clique in %qs", code_name);
3266 debug_generic_stmt (expr);
3267 return true;
3268 }
3269 }
3270 else if (TREE_CODE (expr) == INDIRECT_REF)
3271 {
3272 error ("%qs in gimple IL", code_name);
3273 debug_generic_stmt (expr);
3274 return true;
3275 }
3276
3277 if (!require_lvalue
3278 && (TREE_CODE (expr) == SSA_NAME || is_gimple_min_invariant (expr)))
3279 return false;
3280
3281 if (TREE_CODE (expr) != SSA_NAME && is_gimple_id (expr))
3282 return false;
3283
3284 if (TREE_CODE (expr) != TARGET_MEM_REF
3285 && TREE_CODE (expr) != MEM_REF)
3286 {
3287 error ("invalid expression for min lvalue");
3288 return true;
3289 }
3290
3291 return false;
3292 }
3293
3294 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3295 list of pointer-to types that is trivially convertible to DEST. */
3296
3297 static bool
one_pointer_to_useless_type_conversion_p(tree dest,tree src_obj)3298 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3299 {
3300 tree src;
3301
3302 if (!TYPE_POINTER_TO (src_obj))
3303 return true;
3304
3305 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3306 if (useless_type_conversion_p (dest, src))
3307 return true;
3308
3309 return false;
3310 }
3311
3312 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3313 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3314
3315 static bool
valid_fixed_convert_types_p(tree type1,tree type2)3316 valid_fixed_convert_types_p (tree type1, tree type2)
3317 {
3318 return (FIXED_POINT_TYPE_P (type1)
3319 && (INTEGRAL_TYPE_P (type2)
3320 || SCALAR_FLOAT_TYPE_P (type2)
3321 || FIXED_POINT_TYPE_P (type2)));
3322 }
3323
3324 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3325 is a problem, otherwise false. */
3326
3327 static bool
verify_gimple_call(gcall * stmt)3328 verify_gimple_call (gcall *stmt)
3329 {
3330 tree fn = gimple_call_fn (stmt);
3331 tree fntype, fndecl;
3332 unsigned i;
3333
3334 if (gimple_call_internal_p (stmt))
3335 {
3336 if (fn)
3337 {
3338 error ("gimple call has two targets");
3339 debug_generic_stmt (fn);
3340 return true;
3341 }
3342 }
3343 else
3344 {
3345 if (!fn)
3346 {
3347 error ("gimple call has no target");
3348 return true;
3349 }
3350 }
3351
3352 if (fn && !is_gimple_call_addr (fn))
3353 {
3354 error ("invalid function in gimple call");
3355 debug_generic_stmt (fn);
3356 return true;
3357 }
3358
3359 if (fn
3360 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3361 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3362 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3363 {
3364 error ("non-function in gimple call");
3365 return true;
3366 }
3367
3368 fndecl = gimple_call_fndecl (stmt);
3369 if (fndecl
3370 && TREE_CODE (fndecl) == FUNCTION_DECL
3371 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3372 && !DECL_PURE_P (fndecl)
3373 && !TREE_READONLY (fndecl))
3374 {
3375 error ("invalid pure const state for function");
3376 return true;
3377 }
3378
3379 tree lhs = gimple_call_lhs (stmt);
3380 if (lhs
3381 && (!is_gimple_reg (lhs)
3382 && (!is_gimple_lvalue (lhs)
3383 || verify_types_in_gimple_reference
3384 (TREE_CODE (lhs) == WITH_SIZE_EXPR
3385 ? TREE_OPERAND (lhs, 0) : lhs, true))))
3386 {
3387 error ("invalid LHS in gimple call");
3388 return true;
3389 }
3390
3391 if (gimple_call_ctrl_altering_p (stmt)
3392 && gimple_call_noreturn_p (stmt)
3393 && should_remove_lhs_p (lhs))
3394 {
3395 error ("LHS in %<noreturn%> call");
3396 return true;
3397 }
3398
3399 fntype = gimple_call_fntype (stmt);
3400 if (fntype
3401 && lhs
3402 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3403 /* ??? At least C++ misses conversions at assignments from
3404 void * call results.
3405 For now simply allow arbitrary pointer type conversions. */
3406 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3407 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3408 {
3409 error ("invalid conversion in gimple call");
3410 debug_generic_stmt (TREE_TYPE (lhs));
3411 debug_generic_stmt (TREE_TYPE (fntype));
3412 return true;
3413 }
3414
3415 if (gimple_call_chain (stmt)
3416 && !is_gimple_val (gimple_call_chain (stmt)))
3417 {
3418 error ("invalid static chain in gimple call");
3419 debug_generic_stmt (gimple_call_chain (stmt));
3420 return true;
3421 }
3422
3423 /* If there is a static chain argument, the call should either be
3424 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3425 if (gimple_call_chain (stmt)
3426 && fndecl
3427 && !DECL_STATIC_CHAIN (fndecl))
3428 {
3429 error ("static chain with function that doesn%'t use one");
3430 return true;
3431 }
3432
3433 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3434 {
3435 switch (DECL_FUNCTION_CODE (fndecl))
3436 {
3437 case BUILT_IN_UNREACHABLE:
3438 case BUILT_IN_TRAP:
3439 if (gimple_call_num_args (stmt) > 0)
3440 {
3441 /* Built-in unreachable with parameters might not be caught by
3442 undefined behavior sanitizer. Front-ends do check users do not
3443 call them that way but we also produce calls to
3444 __builtin_unreachable internally, for example when IPA figures
3445 out a call cannot happen in a legal program. In such cases,
3446 we must make sure arguments are stripped off. */
3447 error ("%<__builtin_unreachable%> or %<__builtin_trap%> call "
3448 "with arguments");
3449 return true;
3450 }
3451 break;
3452 default:
3453 break;
3454 }
3455 }
3456
3457 /* For a call to .DEFERRED_INIT,
3458 LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, IS_VLA)
3459 we should guarantee that the 1st and the 3rd arguments are consistent:
3460 1st argument: SIZE of the DECL;
3461 3rd argument: IS_VLA, 0 NO, 1 YES;
3462
3463 if IS_VLA is false, the 1st argument should be a constant and the same as
3464 the size of the LHS. */
3465 if (gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
3466 {
3467 tree size_of_arg0 = gimple_call_arg (stmt, 0);
3468 tree size_of_lhs = TYPE_SIZE_UNIT (TREE_TYPE (lhs));
3469 tree is_vla_node = gimple_call_arg (stmt, 2);
3470 bool is_vla = (bool) TREE_INT_CST_LOW (is_vla_node);
3471
3472 if (TREE_CODE (lhs) == SSA_NAME)
3473 lhs = SSA_NAME_VAR (lhs);
3474
3475 poly_uint64 size_from_arg0, size_from_lhs;
3476 bool is_constant_size_arg0 = poly_int_tree_p (size_of_arg0,
3477 &size_from_arg0);
3478 bool is_constant_size_lhs = poly_int_tree_p (size_of_lhs,
3479 &size_from_lhs);
3480 if (!is_vla)
3481 {
3482 if (!is_constant_size_arg0)
3483 {
3484 error ("%<DEFFERED_INIT%> calls for non-VLA should have "
3485 "constant size for the first argument");
3486 return true;
3487 }
3488 else if (!is_constant_size_lhs)
3489 {
3490 error ("%<DEFFERED_INIT%> calls for non-VLA should have "
3491 "constant size for the LHS");
3492 return true;
3493 }
3494 else if (maybe_ne (size_from_arg0, size_from_lhs))
3495 {
3496 error ("%<DEFFERED_INIT%> calls for non-VLA should have same "
3497 "constant size for the first argument and LHS");
3498 return true;
3499 }
3500 }
3501 }
3502
3503 /* ??? The C frontend passes unpromoted arguments in case it
3504 didn't see a function declaration before the call. So for now
3505 leave the call arguments mostly unverified. Once we gimplify
3506 unit-at-a-time we have a chance to fix this. */
3507 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3508 {
3509 tree arg = gimple_call_arg (stmt, i);
3510 if ((is_gimple_reg_type (TREE_TYPE (arg))
3511 && !is_gimple_val (arg))
3512 || (!is_gimple_reg_type (TREE_TYPE (arg))
3513 && !is_gimple_lvalue (arg)))
3514 {
3515 error ("invalid argument to gimple call");
3516 debug_generic_expr (arg);
3517 return true;
3518 }
3519 if (!is_gimple_reg (arg))
3520 {
3521 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
3522 arg = TREE_OPERAND (arg, 0);
3523 if (verify_types_in_gimple_reference (arg, false))
3524 return true;
3525 }
3526 }
3527
3528 return false;
3529 }
3530
3531 /* Verifies the gimple comparison with the result type TYPE and
3532 the operands OP0 and OP1, comparison code is CODE. */
3533
3534 static bool
verify_gimple_comparison(tree type,tree op0,tree op1,enum tree_code code)3535 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3536 {
3537 tree op0_type = TREE_TYPE (op0);
3538 tree op1_type = TREE_TYPE (op1);
3539
3540 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3541 {
3542 error ("invalid operands in gimple comparison");
3543 return true;
3544 }
3545
3546 /* For comparisons we do not have the operations type as the
3547 effective type the comparison is carried out in. Instead
3548 we require that either the first operand is trivially
3549 convertible into the second, or the other way around. */
3550 if (!useless_type_conversion_p (op0_type, op1_type)
3551 && !useless_type_conversion_p (op1_type, op0_type))
3552 {
3553 error ("mismatching comparison operand types");
3554 debug_generic_expr (op0_type);
3555 debug_generic_expr (op1_type);
3556 return true;
3557 }
3558
3559 /* The resulting type of a comparison may be an effective boolean type. */
3560 if (INTEGRAL_TYPE_P (type)
3561 && (TREE_CODE (type) == BOOLEAN_TYPE
3562 || TYPE_PRECISION (type) == 1))
3563 {
3564 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3565 || TREE_CODE (op1_type) == VECTOR_TYPE)
3566 && code != EQ_EXPR && code != NE_EXPR
3567 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3568 && !VECTOR_INTEGER_TYPE_P (op0_type))
3569 {
3570 error ("unsupported operation or type for vector comparison"
3571 " returning a boolean");
3572 debug_generic_expr (op0_type);
3573 debug_generic_expr (op1_type);
3574 return true;
3575 }
3576 }
3577 /* Or a boolean vector type with the same element count
3578 as the comparison operand types. */
3579 else if (TREE_CODE (type) == VECTOR_TYPE
3580 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3581 {
3582 if (TREE_CODE (op0_type) != VECTOR_TYPE
3583 || TREE_CODE (op1_type) != VECTOR_TYPE)
3584 {
3585 error ("non-vector operands in vector comparison");
3586 debug_generic_expr (op0_type);
3587 debug_generic_expr (op1_type);
3588 return true;
3589 }
3590
3591 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3592 TYPE_VECTOR_SUBPARTS (op0_type)))
3593 {
3594 error ("invalid vector comparison resulting type");
3595 debug_generic_expr (type);
3596 return true;
3597 }
3598 }
3599 else
3600 {
3601 error ("bogus comparison result type");
3602 debug_generic_expr (type);
3603 return true;
3604 }
3605
3606 return false;
3607 }
3608
3609 /* Verify a gimple assignment statement STMT with an unary rhs.
3610 Returns true if anything is wrong. */
3611
3612 static bool
verify_gimple_assign_unary(gassign * stmt)3613 verify_gimple_assign_unary (gassign *stmt)
3614 {
3615 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3616 tree lhs = gimple_assign_lhs (stmt);
3617 tree lhs_type = TREE_TYPE (lhs);
3618 tree rhs1 = gimple_assign_rhs1 (stmt);
3619 tree rhs1_type = TREE_TYPE (rhs1);
3620
3621 if (!is_gimple_reg (lhs))
3622 {
3623 error ("non-register as LHS of unary operation");
3624 return true;
3625 }
3626
3627 if (!is_gimple_val (rhs1))
3628 {
3629 error ("invalid operand in unary operation");
3630 return true;
3631 }
3632
3633 const char* const code_name = get_tree_code_name (rhs_code);
3634
3635 /* First handle conversions. */
3636 switch (rhs_code)
3637 {
3638 CASE_CONVERT:
3639 {
3640 /* Allow conversions between vectors with the same number of elements,
3641 provided that the conversion is OK for the element types too. */
3642 if (VECTOR_TYPE_P (lhs_type)
3643 && VECTOR_TYPE_P (rhs1_type)
3644 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
3645 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3646 {
3647 lhs_type = TREE_TYPE (lhs_type);
3648 rhs1_type = TREE_TYPE (rhs1_type);
3649 }
3650 else if (VECTOR_TYPE_P (lhs_type) || VECTOR_TYPE_P (rhs1_type))
3651 {
3652 error ("invalid vector types in nop conversion");
3653 debug_generic_expr (lhs_type);
3654 debug_generic_expr (rhs1_type);
3655 return true;
3656 }
3657
3658 /* Allow conversions from pointer type to integral type only if
3659 there is no sign or zero extension involved.
3660 For targets were the precision of ptrofftype doesn't match that
3661 of pointers we allow conversions to types where
3662 POINTERS_EXTEND_UNSIGNED specifies how that works. */
3663 if ((POINTER_TYPE_P (lhs_type)
3664 && INTEGRAL_TYPE_P (rhs1_type))
3665 || (POINTER_TYPE_P (rhs1_type)
3666 && INTEGRAL_TYPE_P (lhs_type)
3667 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3668 #if defined(POINTERS_EXTEND_UNSIGNED)
3669 || (TYPE_MODE (rhs1_type) == ptr_mode
3670 && (TYPE_PRECISION (lhs_type)
3671 == BITS_PER_WORD /* word_mode */
3672 || (TYPE_PRECISION (lhs_type)
3673 == GET_MODE_PRECISION (Pmode))))
3674 #endif
3675 )))
3676 return false;
3677
3678 /* Allow conversion from integral to offset type and vice versa. */
3679 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3680 && INTEGRAL_TYPE_P (rhs1_type))
3681 || (INTEGRAL_TYPE_P (lhs_type)
3682 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3683 return false;
3684
3685 /* Otherwise assert we are converting between types of the
3686 same kind. */
3687 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3688 {
3689 error ("invalid types in nop conversion");
3690 debug_generic_expr (lhs_type);
3691 debug_generic_expr (rhs1_type);
3692 return true;
3693 }
3694
3695 return false;
3696 }
3697
3698 case ADDR_SPACE_CONVERT_EXPR:
3699 {
3700 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3701 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3702 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3703 {
3704 error ("invalid types in address space conversion");
3705 debug_generic_expr (lhs_type);
3706 debug_generic_expr (rhs1_type);
3707 return true;
3708 }
3709
3710 return false;
3711 }
3712
3713 case FIXED_CONVERT_EXPR:
3714 {
3715 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3716 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3717 {
3718 error ("invalid types in fixed-point conversion");
3719 debug_generic_expr (lhs_type);
3720 debug_generic_expr (rhs1_type);
3721 return true;
3722 }
3723
3724 return false;
3725 }
3726
3727 case FLOAT_EXPR:
3728 {
3729 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3730 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3731 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3732 {
3733 error ("invalid types in conversion to floating-point");
3734 debug_generic_expr (lhs_type);
3735 debug_generic_expr (rhs1_type);
3736 return true;
3737 }
3738
3739 return false;
3740 }
3741
3742 case FIX_TRUNC_EXPR:
3743 {
3744 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3745 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3746 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3747 {
3748 error ("invalid types in conversion to integer");
3749 debug_generic_expr (lhs_type);
3750 debug_generic_expr (rhs1_type);
3751 return true;
3752 }
3753
3754 return false;
3755 }
3756
3757 case VEC_UNPACK_HI_EXPR:
3758 case VEC_UNPACK_LO_EXPR:
3759 case VEC_UNPACK_FLOAT_HI_EXPR:
3760 case VEC_UNPACK_FLOAT_LO_EXPR:
3761 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3762 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3763 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3764 || TREE_CODE (lhs_type) != VECTOR_TYPE
3765 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3766 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type)))
3767 || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3768 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3769 || ((rhs_code == VEC_UNPACK_HI_EXPR
3770 || rhs_code == VEC_UNPACK_LO_EXPR)
3771 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3772 != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3773 || ((rhs_code == VEC_UNPACK_FLOAT_HI_EXPR
3774 || rhs_code == VEC_UNPACK_FLOAT_LO_EXPR)
3775 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3776 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))))
3777 || ((rhs_code == VEC_UNPACK_FIX_TRUNC_HI_EXPR
3778 || rhs_code == VEC_UNPACK_FIX_TRUNC_LO_EXPR)
3779 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3780 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))))
3781 || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3782 2 * GET_MODE_SIZE (element_mode (rhs1_type)))
3783 && (!VECTOR_BOOLEAN_TYPE_P (lhs_type)
3784 || !VECTOR_BOOLEAN_TYPE_P (rhs1_type)))
3785 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type),
3786 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3787 {
3788 error ("type mismatch in %qs expression", code_name);
3789 debug_generic_expr (lhs_type);
3790 debug_generic_expr (rhs1_type);
3791 return true;
3792 }
3793
3794 return false;
3795
3796 case NEGATE_EXPR:
3797 case ABS_EXPR:
3798 case BIT_NOT_EXPR:
3799 case PAREN_EXPR:
3800 case CONJ_EXPR:
3801 /* Disallow pointer and offset types for many of the unary gimple. */
3802 if (POINTER_TYPE_P (lhs_type)
3803 || TREE_CODE (lhs_type) == OFFSET_TYPE)
3804 {
3805 error ("invalid types for %qs", code_name);
3806 debug_generic_expr (lhs_type);
3807 debug_generic_expr (rhs1_type);
3808 return true;
3809 }
3810 break;
3811
3812 case ABSU_EXPR:
3813 if (!ANY_INTEGRAL_TYPE_P (lhs_type)
3814 || !TYPE_UNSIGNED (lhs_type)
3815 || !ANY_INTEGRAL_TYPE_P (rhs1_type)
3816 || TYPE_UNSIGNED (rhs1_type)
3817 || element_precision (lhs_type) != element_precision (rhs1_type))
3818 {
3819 error ("invalid types for %qs", code_name);
3820 debug_generic_expr (lhs_type);
3821 debug_generic_expr (rhs1_type);
3822 return true;
3823 }
3824 return false;
3825
3826 case VEC_DUPLICATE_EXPR:
3827 if (TREE_CODE (lhs_type) != VECTOR_TYPE
3828 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3829 {
3830 error ("%qs should be from a scalar to a like vector", code_name);
3831 debug_generic_expr (lhs_type);
3832 debug_generic_expr (rhs1_type);
3833 return true;
3834 }
3835 return false;
3836
3837 default:
3838 gcc_unreachable ();
3839 }
3840
3841 /* For the remaining codes assert there is no conversion involved. */
3842 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3843 {
3844 error ("non-trivial conversion in unary operation");
3845 debug_generic_expr (lhs_type);
3846 debug_generic_expr (rhs1_type);
3847 return true;
3848 }
3849
3850 return false;
3851 }
3852
3853 /* Verify a gimple assignment statement STMT with a binary rhs.
3854 Returns true if anything is wrong. */
3855
3856 static bool
verify_gimple_assign_binary(gassign * stmt)3857 verify_gimple_assign_binary (gassign *stmt)
3858 {
3859 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3860 tree lhs = gimple_assign_lhs (stmt);
3861 tree lhs_type = TREE_TYPE (lhs);
3862 tree rhs1 = gimple_assign_rhs1 (stmt);
3863 tree rhs1_type = TREE_TYPE (rhs1);
3864 tree rhs2 = gimple_assign_rhs2 (stmt);
3865 tree rhs2_type = TREE_TYPE (rhs2);
3866
3867 if (!is_gimple_reg (lhs))
3868 {
3869 error ("non-register as LHS of binary operation");
3870 return true;
3871 }
3872
3873 if (!is_gimple_val (rhs1)
3874 || !is_gimple_val (rhs2))
3875 {
3876 error ("invalid operands in binary operation");
3877 return true;
3878 }
3879
3880 const char* const code_name = get_tree_code_name (rhs_code);
3881
3882 /* First handle operations that involve different types. */
3883 switch (rhs_code)
3884 {
3885 case COMPLEX_EXPR:
3886 {
3887 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3888 || !(INTEGRAL_TYPE_P (rhs1_type)
3889 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3890 || !(INTEGRAL_TYPE_P (rhs2_type)
3891 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3892 {
3893 error ("type mismatch in %qs", code_name);
3894 debug_generic_expr (lhs_type);
3895 debug_generic_expr (rhs1_type);
3896 debug_generic_expr (rhs2_type);
3897 return true;
3898 }
3899
3900 return false;
3901 }
3902
3903 case LSHIFT_EXPR:
3904 case RSHIFT_EXPR:
3905 case LROTATE_EXPR:
3906 case RROTATE_EXPR:
3907 {
3908 /* Shifts and rotates are ok on integral types, fixed point
3909 types and integer vector types. */
3910 if ((!INTEGRAL_TYPE_P (rhs1_type)
3911 && !FIXED_POINT_TYPE_P (rhs1_type)
3912 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3913 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3914 || (!INTEGRAL_TYPE_P (rhs2_type)
3915 /* Vector shifts of vectors are also ok. */
3916 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3917 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3918 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3919 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3920 || !useless_type_conversion_p (lhs_type, rhs1_type))
3921 {
3922 error ("type mismatch in %qs", code_name);
3923 debug_generic_expr (lhs_type);
3924 debug_generic_expr (rhs1_type);
3925 debug_generic_expr (rhs2_type);
3926 return true;
3927 }
3928
3929 return false;
3930 }
3931
3932 case WIDEN_LSHIFT_EXPR:
3933 {
3934 if (!INTEGRAL_TYPE_P (lhs_type)
3935 || !INTEGRAL_TYPE_P (rhs1_type)
3936 || TREE_CODE (rhs2) != INTEGER_CST
3937 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3938 {
3939 error ("type mismatch in %qs", code_name);
3940 debug_generic_expr (lhs_type);
3941 debug_generic_expr (rhs1_type);
3942 debug_generic_expr (rhs2_type);
3943 return true;
3944 }
3945
3946 return false;
3947 }
3948
3949 case VEC_WIDEN_LSHIFT_HI_EXPR:
3950 case VEC_WIDEN_LSHIFT_LO_EXPR:
3951 {
3952 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3953 || TREE_CODE (lhs_type) != VECTOR_TYPE
3954 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3955 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3956 || TREE_CODE (rhs2) != INTEGER_CST
3957 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3958 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3959 {
3960 error ("type mismatch in %qs", code_name);
3961 debug_generic_expr (lhs_type);
3962 debug_generic_expr (rhs1_type);
3963 debug_generic_expr (rhs2_type);
3964 return true;
3965 }
3966
3967 return false;
3968 }
3969
3970 case WIDEN_PLUS_EXPR:
3971 case WIDEN_MINUS_EXPR:
3972 case PLUS_EXPR:
3973 case MINUS_EXPR:
3974 {
3975 tree lhs_etype = lhs_type;
3976 tree rhs1_etype = rhs1_type;
3977 tree rhs2_etype = rhs2_type;
3978 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3979 {
3980 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3981 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3982 {
3983 error ("invalid non-vector operands to %qs", code_name);
3984 return true;
3985 }
3986 lhs_etype = TREE_TYPE (lhs_type);
3987 rhs1_etype = TREE_TYPE (rhs1_type);
3988 rhs2_etype = TREE_TYPE (rhs2_type);
3989 }
3990 if (POINTER_TYPE_P (lhs_etype)
3991 || POINTER_TYPE_P (rhs1_etype)
3992 || POINTER_TYPE_P (rhs2_etype))
3993 {
3994 error ("invalid (pointer) operands %qs", code_name);
3995 return true;
3996 }
3997
3998 /* Continue with generic binary expression handling. */
3999 break;
4000 }
4001
4002 case POINTER_PLUS_EXPR:
4003 {
4004 if (!POINTER_TYPE_P (rhs1_type)
4005 || !useless_type_conversion_p (lhs_type, rhs1_type)
4006 || !ptrofftype_p (rhs2_type))
4007 {
4008 error ("type mismatch in %qs", code_name);
4009 debug_generic_stmt (lhs_type);
4010 debug_generic_stmt (rhs1_type);
4011 debug_generic_stmt (rhs2_type);
4012 return true;
4013 }
4014
4015 return false;
4016 }
4017
4018 case POINTER_DIFF_EXPR:
4019 {
4020 if (!POINTER_TYPE_P (rhs1_type)
4021 || !POINTER_TYPE_P (rhs2_type)
4022 /* Because we special-case pointers to void we allow difference
4023 of arbitrary pointers with the same mode. */
4024 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
4025 || !INTEGRAL_TYPE_P (lhs_type)
4026 || TYPE_UNSIGNED (lhs_type)
4027 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
4028 {
4029 error ("type mismatch in %qs", code_name);
4030 debug_generic_stmt (lhs_type);
4031 debug_generic_stmt (rhs1_type);
4032 debug_generic_stmt (rhs2_type);
4033 return true;
4034 }
4035
4036 return false;
4037 }
4038
4039 case TRUTH_ANDIF_EXPR:
4040 case TRUTH_ORIF_EXPR:
4041 case TRUTH_AND_EXPR:
4042 case TRUTH_OR_EXPR:
4043 case TRUTH_XOR_EXPR:
4044
4045 gcc_unreachable ();
4046
4047 case LT_EXPR:
4048 case LE_EXPR:
4049 case GT_EXPR:
4050 case GE_EXPR:
4051 case EQ_EXPR:
4052 case NE_EXPR:
4053 case UNORDERED_EXPR:
4054 case ORDERED_EXPR:
4055 case UNLT_EXPR:
4056 case UNLE_EXPR:
4057 case UNGT_EXPR:
4058 case UNGE_EXPR:
4059 case UNEQ_EXPR:
4060 case LTGT_EXPR:
4061 /* Comparisons are also binary, but the result type is not
4062 connected to the operand types. */
4063 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
4064
4065 case WIDEN_MULT_EXPR:
4066 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
4067 return true;
4068 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
4069 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
4070
4071 case WIDEN_SUM_EXPR:
4072 {
4073 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4074 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4075 && ((!INTEGRAL_TYPE_P (rhs1_type)
4076 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4077 || (!INTEGRAL_TYPE_P (lhs_type)
4078 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4079 || !useless_type_conversion_p (lhs_type, rhs2_type)
4080 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
4081 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4082 {
4083 error ("type mismatch in %qs", code_name);
4084 debug_generic_expr (lhs_type);
4085 debug_generic_expr (rhs1_type);
4086 debug_generic_expr (rhs2_type);
4087 return true;
4088 }
4089 return false;
4090 }
4091
4092 case VEC_WIDEN_MINUS_HI_EXPR:
4093 case VEC_WIDEN_MINUS_LO_EXPR:
4094 case VEC_WIDEN_PLUS_HI_EXPR:
4095 case VEC_WIDEN_PLUS_LO_EXPR:
4096 case VEC_WIDEN_MULT_HI_EXPR:
4097 case VEC_WIDEN_MULT_LO_EXPR:
4098 case VEC_WIDEN_MULT_EVEN_EXPR:
4099 case VEC_WIDEN_MULT_ODD_EXPR:
4100 {
4101 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4102 || TREE_CODE (lhs_type) != VECTOR_TYPE
4103 || !types_compatible_p (rhs1_type, rhs2_type)
4104 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
4105 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4106 {
4107 error ("type mismatch in %qs", code_name);
4108 debug_generic_expr (lhs_type);
4109 debug_generic_expr (rhs1_type);
4110 debug_generic_expr (rhs2_type);
4111 return true;
4112 }
4113 return false;
4114 }
4115
4116 case VEC_PACK_TRUNC_EXPR:
4117 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4118 vector boolean types. */
4119 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4120 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4121 && types_compatible_p (rhs1_type, rhs2_type)
4122 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
4123 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4124 return false;
4125
4126 /* Fallthru. */
4127 case VEC_PACK_SAT_EXPR:
4128 case VEC_PACK_FIX_TRUNC_EXPR:
4129 {
4130 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4131 || TREE_CODE (lhs_type) != VECTOR_TYPE
4132 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4133 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4134 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4135 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4136 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4137 || !types_compatible_p (rhs1_type, rhs2_type)
4138 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4139 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4140 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4141 TYPE_VECTOR_SUBPARTS (lhs_type)))
4142 {
4143 error ("type mismatch in %qs", code_name);
4144 debug_generic_expr (lhs_type);
4145 debug_generic_expr (rhs1_type);
4146 debug_generic_expr (rhs2_type);
4147 return true;
4148 }
4149
4150 return false;
4151 }
4152
4153 case VEC_PACK_FLOAT_EXPR:
4154 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4155 || TREE_CODE (lhs_type) != VECTOR_TYPE
4156 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4157 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))
4158 || !types_compatible_p (rhs1_type, rhs2_type)
4159 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4160 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4161 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4162 TYPE_VECTOR_SUBPARTS (lhs_type)))
4163 {
4164 error ("type mismatch in %qs", code_name);
4165 debug_generic_expr (lhs_type);
4166 debug_generic_expr (rhs1_type);
4167 debug_generic_expr (rhs2_type);
4168 return true;
4169 }
4170
4171 return false;
4172
4173 case MULT_EXPR:
4174 case MULT_HIGHPART_EXPR:
4175 case TRUNC_DIV_EXPR:
4176 case CEIL_DIV_EXPR:
4177 case FLOOR_DIV_EXPR:
4178 case ROUND_DIV_EXPR:
4179 case TRUNC_MOD_EXPR:
4180 case CEIL_MOD_EXPR:
4181 case FLOOR_MOD_EXPR:
4182 case ROUND_MOD_EXPR:
4183 case RDIV_EXPR:
4184 case EXACT_DIV_EXPR:
4185 /* Disallow pointer and offset types for many of the binary gimple. */
4186 if (POINTER_TYPE_P (lhs_type)
4187 || TREE_CODE (lhs_type) == OFFSET_TYPE)
4188 {
4189 error ("invalid types for %qs", code_name);
4190 debug_generic_expr (lhs_type);
4191 debug_generic_expr (rhs1_type);
4192 debug_generic_expr (rhs2_type);
4193 return true;
4194 }
4195 /* Continue with generic binary expression handling. */
4196 break;
4197
4198 case MIN_EXPR:
4199 case MAX_EXPR:
4200 case BIT_IOR_EXPR:
4201 case BIT_XOR_EXPR:
4202 case BIT_AND_EXPR:
4203 /* Continue with generic binary expression handling. */
4204 break;
4205
4206 case VEC_SERIES_EXPR:
4207 if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4208 {
4209 error ("type mismatch in %qs", code_name);
4210 debug_generic_expr (rhs1_type);
4211 debug_generic_expr (rhs2_type);
4212 return true;
4213 }
4214 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4215 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4216 {
4217 error ("vector type expected in %qs", code_name);
4218 debug_generic_expr (lhs_type);
4219 return true;
4220 }
4221 return false;
4222
4223 default:
4224 gcc_unreachable ();
4225 }
4226
4227 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4228 || !useless_type_conversion_p (lhs_type, rhs2_type))
4229 {
4230 error ("type mismatch in binary expression");
4231 debug_generic_stmt (lhs_type);
4232 debug_generic_stmt (rhs1_type);
4233 debug_generic_stmt (rhs2_type);
4234 return true;
4235 }
4236
4237 return false;
4238 }
4239
4240 /* Verify a gimple assignment statement STMT with a ternary rhs.
4241 Returns true if anything is wrong. */
4242
4243 static bool
verify_gimple_assign_ternary(gassign * stmt)4244 verify_gimple_assign_ternary (gassign *stmt)
4245 {
4246 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4247 tree lhs = gimple_assign_lhs (stmt);
4248 tree lhs_type = TREE_TYPE (lhs);
4249 tree rhs1 = gimple_assign_rhs1 (stmt);
4250 tree rhs1_type = TREE_TYPE (rhs1);
4251 tree rhs2 = gimple_assign_rhs2 (stmt);
4252 tree rhs2_type = TREE_TYPE (rhs2);
4253 tree rhs3 = gimple_assign_rhs3 (stmt);
4254 tree rhs3_type = TREE_TYPE (rhs3);
4255
4256 if (!is_gimple_reg (lhs))
4257 {
4258 error ("non-register as LHS of ternary operation");
4259 return true;
4260 }
4261
4262 if ((rhs_code == COND_EXPR
4263 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4264 || !is_gimple_val (rhs2)
4265 || !is_gimple_val (rhs3))
4266 {
4267 error ("invalid operands in ternary operation");
4268 return true;
4269 }
4270
4271 const char* const code_name = get_tree_code_name (rhs_code);
4272
4273 /* First handle operations that involve different types. */
4274 switch (rhs_code)
4275 {
4276 case WIDEN_MULT_PLUS_EXPR:
4277 case WIDEN_MULT_MINUS_EXPR:
4278 if ((!INTEGRAL_TYPE_P (rhs1_type)
4279 && !FIXED_POINT_TYPE_P (rhs1_type))
4280 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4281 || !useless_type_conversion_p (lhs_type, rhs3_type)
4282 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4283 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4284 {
4285 error ("type mismatch in %qs", code_name);
4286 debug_generic_expr (lhs_type);
4287 debug_generic_expr (rhs1_type);
4288 debug_generic_expr (rhs2_type);
4289 debug_generic_expr (rhs3_type);
4290 return true;
4291 }
4292 break;
4293
4294 case VEC_COND_EXPR:
4295 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4296 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4297 TYPE_VECTOR_SUBPARTS (lhs_type)))
4298 {
4299 error ("the first argument of a %qs must be of a "
4300 "boolean vector type of the same number of elements "
4301 "as the result", code_name);
4302 debug_generic_expr (lhs_type);
4303 debug_generic_expr (rhs1_type);
4304 return true;
4305 }
4306 if (!is_gimple_val (rhs1))
4307 return true;
4308 /* Fallthrough. */
4309 case COND_EXPR:
4310 if (!is_gimple_val (rhs1)
4311 && verify_gimple_comparison (TREE_TYPE (rhs1),
4312 TREE_OPERAND (rhs1, 0),
4313 TREE_OPERAND (rhs1, 1),
4314 TREE_CODE (rhs1)))
4315 return true;
4316 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4317 || !useless_type_conversion_p (lhs_type, rhs3_type))
4318 {
4319 error ("type mismatch in %qs", code_name);
4320 debug_generic_expr (lhs_type);
4321 debug_generic_expr (rhs2_type);
4322 debug_generic_expr (rhs3_type);
4323 return true;
4324 }
4325 break;
4326
4327 case VEC_PERM_EXPR:
4328 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4329 || !useless_type_conversion_p (lhs_type, rhs2_type))
4330 {
4331 error ("type mismatch in %qs", code_name);
4332 debug_generic_expr (lhs_type);
4333 debug_generic_expr (rhs1_type);
4334 debug_generic_expr (rhs2_type);
4335 debug_generic_expr (rhs3_type);
4336 return true;
4337 }
4338
4339 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4340 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4341 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4342 {
4343 error ("vector types expected in %qs", code_name);
4344 debug_generic_expr (lhs_type);
4345 debug_generic_expr (rhs1_type);
4346 debug_generic_expr (rhs2_type);
4347 debug_generic_expr (rhs3_type);
4348 return true;
4349 }
4350
4351 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4352 TYPE_VECTOR_SUBPARTS (rhs2_type))
4353 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4354 TYPE_VECTOR_SUBPARTS (rhs3_type))
4355 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4356 TYPE_VECTOR_SUBPARTS (lhs_type)))
4357 {
4358 error ("vectors with different element number found in %qs",
4359 code_name);
4360 debug_generic_expr (lhs_type);
4361 debug_generic_expr (rhs1_type);
4362 debug_generic_expr (rhs2_type);
4363 debug_generic_expr (rhs3_type);
4364 return true;
4365 }
4366
4367 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4368 || (TREE_CODE (rhs3) != VECTOR_CST
4369 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4370 (TREE_TYPE (rhs3_type)))
4371 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4372 (TREE_TYPE (rhs1_type))))))
4373 {
4374 error ("invalid mask type in %qs", code_name);
4375 debug_generic_expr (lhs_type);
4376 debug_generic_expr (rhs1_type);
4377 debug_generic_expr (rhs2_type);
4378 debug_generic_expr (rhs3_type);
4379 return true;
4380 }
4381
4382 return false;
4383
4384 case SAD_EXPR:
4385 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4386 || !useless_type_conversion_p (lhs_type, rhs3_type)
4387 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4388 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4389 {
4390 error ("type mismatch in %qs", code_name);
4391 debug_generic_expr (lhs_type);
4392 debug_generic_expr (rhs1_type);
4393 debug_generic_expr (rhs2_type);
4394 debug_generic_expr (rhs3_type);
4395 return true;
4396 }
4397
4398 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4399 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4400 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4401 {
4402 error ("vector types expected in %qs", code_name);
4403 debug_generic_expr (lhs_type);
4404 debug_generic_expr (rhs1_type);
4405 debug_generic_expr (rhs2_type);
4406 debug_generic_expr (rhs3_type);
4407 return true;
4408 }
4409
4410 return false;
4411
4412 case BIT_INSERT_EXPR:
4413 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4414 {
4415 error ("type mismatch in %qs", code_name);
4416 debug_generic_expr (lhs_type);
4417 debug_generic_expr (rhs1_type);
4418 return true;
4419 }
4420 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4421 && INTEGRAL_TYPE_P (rhs2_type))
4422 /* Vector element insert. */
4423 || (VECTOR_TYPE_P (rhs1_type)
4424 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))
4425 /* Aligned sub-vector insert. */
4426 || (VECTOR_TYPE_P (rhs1_type)
4427 && VECTOR_TYPE_P (rhs2_type)
4428 && types_compatible_p (TREE_TYPE (rhs1_type),
4429 TREE_TYPE (rhs2_type))
4430 && multiple_p (TYPE_VECTOR_SUBPARTS (rhs1_type),
4431 TYPE_VECTOR_SUBPARTS (rhs2_type))
4432 && multiple_of_p (bitsizetype, rhs3, TYPE_SIZE (rhs2_type)))))
4433 {
4434 error ("not allowed type combination in %qs", code_name);
4435 debug_generic_expr (rhs1_type);
4436 debug_generic_expr (rhs2_type);
4437 return true;
4438 }
4439 if (! tree_fits_uhwi_p (rhs3)
4440 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4441 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4442 {
4443 error ("invalid position or size in %qs", code_name);
4444 return true;
4445 }
4446 if (INTEGRAL_TYPE_P (rhs1_type)
4447 && !type_has_mode_precision_p (rhs1_type))
4448 {
4449 error ("%qs into non-mode-precision operand", code_name);
4450 return true;
4451 }
4452 if (INTEGRAL_TYPE_P (rhs1_type))
4453 {
4454 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4455 if (bitpos >= TYPE_PRECISION (rhs1_type)
4456 || (bitpos + TYPE_PRECISION (rhs2_type)
4457 > TYPE_PRECISION (rhs1_type)))
4458 {
4459 error ("insertion out of range in %qs", code_name);
4460 return true;
4461 }
4462 }
4463 else if (VECTOR_TYPE_P (rhs1_type))
4464 {
4465 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4466 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4467 if (bitpos % bitsize != 0)
4468 {
4469 error ("%qs not at element boundary", code_name);
4470 return true;
4471 }
4472 }
4473 return false;
4474
4475 case DOT_PROD_EXPR:
4476 {
4477 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4478 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4479 && ((!INTEGRAL_TYPE_P (rhs1_type)
4480 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4481 || (!INTEGRAL_TYPE_P (lhs_type)
4482 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4483 /* rhs1_type and rhs2_type may differ in sign. */
4484 || !tree_nop_conversion_p (rhs1_type, rhs2_type)
4485 || !useless_type_conversion_p (lhs_type, rhs3_type)
4486 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4487 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4488 {
4489 error ("type mismatch in %qs", code_name);
4490 debug_generic_expr (lhs_type);
4491 debug_generic_expr (rhs1_type);
4492 debug_generic_expr (rhs2_type);
4493 return true;
4494 }
4495 return false;
4496 }
4497
4498 case REALIGN_LOAD_EXPR:
4499 /* FIXME. */
4500 return false;
4501
4502 default:
4503 gcc_unreachable ();
4504 }
4505 return false;
4506 }
4507
4508 /* Verify a gimple assignment statement STMT with a single rhs.
4509 Returns true if anything is wrong. */
4510
4511 static bool
verify_gimple_assign_single(gassign * stmt)4512 verify_gimple_assign_single (gassign *stmt)
4513 {
4514 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4515 tree lhs = gimple_assign_lhs (stmt);
4516 tree lhs_type = TREE_TYPE (lhs);
4517 tree rhs1 = gimple_assign_rhs1 (stmt);
4518 tree rhs1_type = TREE_TYPE (rhs1);
4519 bool res = false;
4520
4521 const char* const code_name = get_tree_code_name (rhs_code);
4522
4523 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4524 {
4525 error ("non-trivial conversion in %qs", code_name);
4526 debug_generic_expr (lhs_type);
4527 debug_generic_expr (rhs1_type);
4528 return true;
4529 }
4530
4531 if (gimple_clobber_p (stmt)
4532 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4533 {
4534 error ("%qs LHS in clobber statement",
4535 get_tree_code_name (TREE_CODE (lhs)));
4536 debug_generic_expr (lhs);
4537 return true;
4538 }
4539
4540 if (TREE_CODE (lhs) == WITH_SIZE_EXPR)
4541 {
4542 error ("%qs LHS in assignment statement",
4543 get_tree_code_name (TREE_CODE (lhs)));
4544 debug_generic_expr (lhs);
4545 return true;
4546 }
4547
4548 if (handled_component_p (lhs)
4549 || TREE_CODE (lhs) == MEM_REF
4550 || TREE_CODE (lhs) == TARGET_MEM_REF)
4551 res |= verify_types_in_gimple_reference (lhs, true);
4552
4553 /* Special codes we cannot handle via their class. */
4554 switch (rhs_code)
4555 {
4556 case ADDR_EXPR:
4557 {
4558 tree op = TREE_OPERAND (rhs1, 0);
4559 if (!is_gimple_addressable (op))
4560 {
4561 error ("invalid operand in %qs", code_name);
4562 return true;
4563 }
4564
4565 /* Technically there is no longer a need for matching types, but
4566 gimple hygiene asks for this check. In LTO we can end up
4567 combining incompatible units and thus end up with addresses
4568 of globals that change their type to a common one. */
4569 if (!in_lto_p
4570 && !types_compatible_p (TREE_TYPE (op),
4571 TREE_TYPE (TREE_TYPE (rhs1)))
4572 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4573 TREE_TYPE (op)))
4574 {
4575 error ("type mismatch in %qs", code_name);
4576 debug_generic_stmt (TREE_TYPE (rhs1));
4577 debug_generic_stmt (TREE_TYPE (op));
4578 return true;
4579 }
4580
4581 return (verify_address (rhs1, true)
4582 || verify_types_in_gimple_reference (op, true));
4583 }
4584
4585 /* tcc_reference */
4586 case INDIRECT_REF:
4587 error ("%qs in gimple IL", code_name);
4588 return true;
4589
4590 case COMPONENT_REF:
4591 case BIT_FIELD_REF:
4592 case ARRAY_REF:
4593 case ARRAY_RANGE_REF:
4594 case VIEW_CONVERT_EXPR:
4595 case REALPART_EXPR:
4596 case IMAGPART_EXPR:
4597 case TARGET_MEM_REF:
4598 case MEM_REF:
4599 if (!is_gimple_reg (lhs)
4600 && is_gimple_reg_type (TREE_TYPE (lhs)))
4601 {
4602 error ("invalid RHS for gimple memory store: %qs", code_name);
4603 debug_generic_stmt (lhs);
4604 debug_generic_stmt (rhs1);
4605 return true;
4606 }
4607 return res || verify_types_in_gimple_reference (rhs1, false);
4608
4609 /* tcc_constant */
4610 case SSA_NAME:
4611 case INTEGER_CST:
4612 case REAL_CST:
4613 case FIXED_CST:
4614 case COMPLEX_CST:
4615 case VECTOR_CST:
4616 case STRING_CST:
4617 return res;
4618
4619 /* tcc_declaration */
4620 case CONST_DECL:
4621 return res;
4622 case VAR_DECL:
4623 case PARM_DECL:
4624 if (!is_gimple_reg (lhs)
4625 && !is_gimple_reg (rhs1)
4626 && is_gimple_reg_type (TREE_TYPE (lhs)))
4627 {
4628 error ("invalid RHS for gimple memory store: %qs", code_name);
4629 debug_generic_stmt (lhs);
4630 debug_generic_stmt (rhs1);
4631 return true;
4632 }
4633 return res;
4634
4635 case CONSTRUCTOR:
4636 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4637 {
4638 unsigned int i;
4639 tree elt_i, elt_v, elt_t = NULL_TREE;
4640
4641 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4642 return res;
4643 /* For vector CONSTRUCTORs we require that either it is empty
4644 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4645 (then the element count must be correct to cover the whole
4646 outer vector and index must be NULL on all elements, or it is
4647 a CONSTRUCTOR of scalar elements, where we as an exception allow
4648 smaller number of elements (assuming zero filling) and
4649 consecutive indexes as compared to NULL indexes (such
4650 CONSTRUCTORs can appear in the IL from FEs). */
4651 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4652 {
4653 if (elt_t == NULL_TREE)
4654 {
4655 elt_t = TREE_TYPE (elt_v);
4656 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4657 {
4658 tree elt_t = TREE_TYPE (elt_v);
4659 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4660 TREE_TYPE (elt_t)))
4661 {
4662 error ("incorrect type of vector %qs elements",
4663 code_name);
4664 debug_generic_stmt (rhs1);
4665 return true;
4666 }
4667 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4668 * TYPE_VECTOR_SUBPARTS (elt_t),
4669 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4670 {
4671 error ("incorrect number of vector %qs elements",
4672 code_name);
4673 debug_generic_stmt (rhs1);
4674 return true;
4675 }
4676 }
4677 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4678 elt_t))
4679 {
4680 error ("incorrect type of vector %qs elements",
4681 code_name);
4682 debug_generic_stmt (rhs1);
4683 return true;
4684 }
4685 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4686 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4687 {
4688 error ("incorrect number of vector %qs elements",
4689 code_name);
4690 debug_generic_stmt (rhs1);
4691 return true;
4692 }
4693 }
4694 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4695 {
4696 error ("incorrect type of vector CONSTRUCTOR elements");
4697 debug_generic_stmt (rhs1);
4698 return true;
4699 }
4700 if (elt_i != NULL_TREE
4701 && (TREE_CODE (elt_t) == VECTOR_TYPE
4702 || TREE_CODE (elt_i) != INTEGER_CST
4703 || compare_tree_int (elt_i, i) != 0))
4704 {
4705 error ("vector %qs with non-NULL element index",
4706 code_name);
4707 debug_generic_stmt (rhs1);
4708 return true;
4709 }
4710 if (!is_gimple_val (elt_v))
4711 {
4712 error ("vector %qs element is not a GIMPLE value",
4713 code_name);
4714 debug_generic_stmt (rhs1);
4715 return true;
4716 }
4717 }
4718 }
4719 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4720 {
4721 error ("non-vector %qs with elements", code_name);
4722 debug_generic_stmt (rhs1);
4723 return true;
4724 }
4725 return res;
4726
4727 case ASSERT_EXPR:
4728 /* FIXME. */
4729 rhs1 = fold (ASSERT_EXPR_COND (rhs1));
4730 if (rhs1 == boolean_false_node)
4731 {
4732 error ("%qs with an always-false condition", code_name);
4733 debug_generic_stmt (rhs1);
4734 return true;
4735 }
4736 break;
4737
4738 case WITH_SIZE_EXPR:
4739 error ("%qs RHS in assignment statement",
4740 get_tree_code_name (rhs_code));
4741 debug_generic_expr (rhs1);
4742 return true;
4743
4744 case OBJ_TYPE_REF:
4745 /* FIXME. */
4746 return res;
4747
4748 default:;
4749 }
4750
4751 return res;
4752 }
4753
4754 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4755 is a problem, otherwise false. */
4756
4757 static bool
verify_gimple_assign(gassign * stmt)4758 verify_gimple_assign (gassign *stmt)
4759 {
4760 switch (gimple_assign_rhs_class (stmt))
4761 {
4762 case GIMPLE_SINGLE_RHS:
4763 return verify_gimple_assign_single (stmt);
4764
4765 case GIMPLE_UNARY_RHS:
4766 return verify_gimple_assign_unary (stmt);
4767
4768 case GIMPLE_BINARY_RHS:
4769 return verify_gimple_assign_binary (stmt);
4770
4771 case GIMPLE_TERNARY_RHS:
4772 return verify_gimple_assign_ternary (stmt);
4773
4774 default:
4775 gcc_unreachable ();
4776 }
4777 }
4778
4779 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4780 is a problem, otherwise false. */
4781
4782 static bool
verify_gimple_return(greturn * stmt)4783 verify_gimple_return (greturn *stmt)
4784 {
4785 tree op = gimple_return_retval (stmt);
4786 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4787
4788 /* We cannot test for present return values as we do not fix up missing
4789 return values from the original source. */
4790 if (op == NULL)
4791 return false;
4792
4793 if (!is_gimple_val (op)
4794 && TREE_CODE (op) != RESULT_DECL)
4795 {
4796 error ("invalid operand in return statement");
4797 debug_generic_stmt (op);
4798 return true;
4799 }
4800
4801 if ((TREE_CODE (op) == RESULT_DECL
4802 && DECL_BY_REFERENCE (op))
4803 || (TREE_CODE (op) == SSA_NAME
4804 && SSA_NAME_VAR (op)
4805 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4806 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4807 op = TREE_TYPE (op);
4808
4809 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4810 {
4811 error ("invalid conversion in return statement");
4812 debug_generic_stmt (restype);
4813 debug_generic_stmt (TREE_TYPE (op));
4814 return true;
4815 }
4816
4817 return false;
4818 }
4819
4820
4821 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4822 is a problem, otherwise false. */
4823
4824 static bool
verify_gimple_goto(ggoto * stmt)4825 verify_gimple_goto (ggoto *stmt)
4826 {
4827 tree dest = gimple_goto_dest (stmt);
4828
4829 /* ??? We have two canonical forms of direct goto destinations, a
4830 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4831 if (TREE_CODE (dest) != LABEL_DECL
4832 && (!is_gimple_val (dest)
4833 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4834 {
4835 error ("goto destination is neither a label nor a pointer");
4836 return true;
4837 }
4838
4839 return false;
4840 }
4841
4842 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4843 is a problem, otherwise false. */
4844
4845 static bool
verify_gimple_switch(gswitch * stmt)4846 verify_gimple_switch (gswitch *stmt)
4847 {
4848 unsigned int i, n;
4849 tree elt, prev_upper_bound = NULL_TREE;
4850 tree index_type, elt_type = NULL_TREE;
4851
4852 if (!is_gimple_val (gimple_switch_index (stmt)))
4853 {
4854 error ("invalid operand to switch statement");
4855 debug_generic_stmt (gimple_switch_index (stmt));
4856 return true;
4857 }
4858
4859 index_type = TREE_TYPE (gimple_switch_index (stmt));
4860 if (! INTEGRAL_TYPE_P (index_type))
4861 {
4862 error ("non-integral type switch statement");
4863 debug_generic_expr (index_type);
4864 return true;
4865 }
4866
4867 elt = gimple_switch_label (stmt, 0);
4868 if (CASE_LOW (elt) != NULL_TREE
4869 || CASE_HIGH (elt) != NULL_TREE
4870 || CASE_CHAIN (elt) != NULL_TREE)
4871 {
4872 error ("invalid default case label in switch statement");
4873 debug_generic_expr (elt);
4874 return true;
4875 }
4876
4877 n = gimple_switch_num_labels (stmt);
4878 for (i = 1; i < n; i++)
4879 {
4880 elt = gimple_switch_label (stmt, i);
4881
4882 if (CASE_CHAIN (elt))
4883 {
4884 error ("invalid %<CASE_CHAIN%>");
4885 debug_generic_expr (elt);
4886 return true;
4887 }
4888 if (! CASE_LOW (elt))
4889 {
4890 error ("invalid case label in switch statement");
4891 debug_generic_expr (elt);
4892 return true;
4893 }
4894 if (CASE_HIGH (elt)
4895 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4896 {
4897 error ("invalid case range in switch statement");
4898 debug_generic_expr (elt);
4899 return true;
4900 }
4901
4902 if (! elt_type)
4903 {
4904 elt_type = TREE_TYPE (CASE_LOW (elt));
4905 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4906 {
4907 error ("type precision mismatch in switch statement");
4908 return true;
4909 }
4910 }
4911 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4912 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4913 {
4914 error ("type mismatch for case label in switch statement");
4915 debug_generic_expr (elt);
4916 return true;
4917 }
4918
4919 if (prev_upper_bound)
4920 {
4921 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4922 {
4923 error ("case labels not sorted in switch statement");
4924 return true;
4925 }
4926 }
4927
4928 prev_upper_bound = CASE_HIGH (elt);
4929 if (! prev_upper_bound)
4930 prev_upper_bound = CASE_LOW (elt);
4931 }
4932
4933 return false;
4934 }
4935
4936 /* Verify a gimple debug statement STMT.
4937 Returns true if anything is wrong. */
4938
4939 static bool
verify_gimple_debug(gimple * stmt ATTRIBUTE_UNUSED)4940 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4941 {
4942 /* There isn't much that could be wrong in a gimple debug stmt. A
4943 gimple debug bind stmt, for example, maps a tree, that's usually
4944 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4945 component or member of an aggregate type, to another tree, that
4946 can be an arbitrary expression. These stmts expand into debug
4947 insns, and are converted to debug notes by var-tracking.c. */
4948 return false;
4949 }
4950
4951 /* Verify a gimple label statement STMT.
4952 Returns true if anything is wrong. */
4953
4954 static bool
verify_gimple_label(glabel * stmt)4955 verify_gimple_label (glabel *stmt)
4956 {
4957 tree decl = gimple_label_label (stmt);
4958 int uid;
4959 bool err = false;
4960
4961 if (TREE_CODE (decl) != LABEL_DECL)
4962 return true;
4963 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4964 && DECL_CONTEXT (decl) != current_function_decl)
4965 {
4966 error ("label context is not the current function declaration");
4967 err |= true;
4968 }
4969
4970 uid = LABEL_DECL_UID (decl);
4971 if (cfun->cfg
4972 && (uid == -1
4973 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4974 {
4975 error ("incorrect entry in %<label_to_block_map%>");
4976 err |= true;
4977 }
4978
4979 uid = EH_LANDING_PAD_NR (decl);
4980 if (uid)
4981 {
4982 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4983 if (decl != lp->post_landing_pad)
4984 {
4985 error ("incorrect setting of landing pad number");
4986 err |= true;
4987 }
4988 }
4989
4990 return err;
4991 }
4992
4993 /* Verify a gimple cond statement STMT.
4994 Returns true if anything is wrong. */
4995
4996 static bool
verify_gimple_cond(gcond * stmt)4997 verify_gimple_cond (gcond *stmt)
4998 {
4999 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
5000 {
5001 error ("invalid comparison code in gimple cond");
5002 return true;
5003 }
5004 if (!(!gimple_cond_true_label (stmt)
5005 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
5006 || !(!gimple_cond_false_label (stmt)
5007 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
5008 {
5009 error ("invalid labels in gimple cond");
5010 return true;
5011 }
5012
5013 return verify_gimple_comparison (boolean_type_node,
5014 gimple_cond_lhs (stmt),
5015 gimple_cond_rhs (stmt),
5016 gimple_cond_code (stmt));
5017 }
5018
5019 /* Verify the GIMPLE statement STMT. Returns true if there is an
5020 error, otherwise false. */
5021
5022 static bool
verify_gimple_stmt(gimple * stmt)5023 verify_gimple_stmt (gimple *stmt)
5024 {
5025 switch (gimple_code (stmt))
5026 {
5027 case GIMPLE_ASSIGN:
5028 return verify_gimple_assign (as_a <gassign *> (stmt));
5029
5030 case GIMPLE_LABEL:
5031 return verify_gimple_label (as_a <glabel *> (stmt));
5032
5033 case GIMPLE_CALL:
5034 return verify_gimple_call (as_a <gcall *> (stmt));
5035
5036 case GIMPLE_COND:
5037 return verify_gimple_cond (as_a <gcond *> (stmt));
5038
5039 case GIMPLE_GOTO:
5040 return verify_gimple_goto (as_a <ggoto *> (stmt));
5041
5042 case GIMPLE_SWITCH:
5043 return verify_gimple_switch (as_a <gswitch *> (stmt));
5044
5045 case GIMPLE_RETURN:
5046 return verify_gimple_return (as_a <greturn *> (stmt));
5047
5048 case GIMPLE_ASM:
5049 return false;
5050
5051 case GIMPLE_TRANSACTION:
5052 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
5053
5054 /* Tuples that do not have tree operands. */
5055 case GIMPLE_NOP:
5056 case GIMPLE_PREDICT:
5057 case GIMPLE_RESX:
5058 case GIMPLE_EH_DISPATCH:
5059 case GIMPLE_EH_MUST_NOT_THROW:
5060 return false;
5061
5062 CASE_GIMPLE_OMP:
5063 /* OpenMP directives are validated by the FE and never operated
5064 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
5065 non-gimple expressions when the main index variable has had
5066 its address taken. This does not affect the loop itself
5067 because the header of an GIMPLE_OMP_FOR is merely used to determine
5068 how to setup the parallel iteration. */
5069 return false;
5070
5071 case GIMPLE_DEBUG:
5072 return verify_gimple_debug (stmt);
5073
5074 default:
5075 gcc_unreachable ();
5076 }
5077 }
5078
5079 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
5080 and false otherwise. */
5081
5082 static bool
verify_gimple_phi(gphi * phi)5083 verify_gimple_phi (gphi *phi)
5084 {
5085 bool err = false;
5086 unsigned i;
5087 tree phi_result = gimple_phi_result (phi);
5088 bool virtual_p;
5089
5090 if (!phi_result)
5091 {
5092 error ("invalid %<PHI%> result");
5093 return true;
5094 }
5095
5096 virtual_p = virtual_operand_p (phi_result);
5097 if (TREE_CODE (phi_result) != SSA_NAME
5098 || (virtual_p
5099 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
5100 {
5101 error ("invalid %<PHI%> result");
5102 err = true;
5103 }
5104
5105 for (i = 0; i < gimple_phi_num_args (phi); i++)
5106 {
5107 tree t = gimple_phi_arg_def (phi, i);
5108
5109 if (!t)
5110 {
5111 error ("missing %<PHI%> def");
5112 err |= true;
5113 continue;
5114 }
5115 /* Addressable variables do have SSA_NAMEs but they
5116 are not considered gimple values. */
5117 else if ((TREE_CODE (t) == SSA_NAME
5118 && virtual_p != virtual_operand_p (t))
5119 || (virtual_p
5120 && (TREE_CODE (t) != SSA_NAME
5121 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
5122 || (!virtual_p
5123 && !is_gimple_val (t)))
5124 {
5125 error ("invalid %<PHI%> argument");
5126 debug_generic_expr (t);
5127 err |= true;
5128 }
5129 #ifdef ENABLE_TYPES_CHECKING
5130 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
5131 {
5132 error ("incompatible types in %<PHI%> argument %u", i);
5133 debug_generic_stmt (TREE_TYPE (phi_result));
5134 debug_generic_stmt (TREE_TYPE (t));
5135 err |= true;
5136 }
5137 #endif
5138 }
5139
5140 return err;
5141 }
5142
5143 /* Verify the GIMPLE statements inside the sequence STMTS. */
5144
5145 static bool
verify_gimple_in_seq_2(gimple_seq stmts)5146 verify_gimple_in_seq_2 (gimple_seq stmts)
5147 {
5148 gimple_stmt_iterator ittr;
5149 bool err = false;
5150
5151 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
5152 {
5153 gimple *stmt = gsi_stmt (ittr);
5154
5155 switch (gimple_code (stmt))
5156 {
5157 case GIMPLE_BIND:
5158 err |= verify_gimple_in_seq_2 (
5159 gimple_bind_body (as_a <gbind *> (stmt)));
5160 break;
5161
5162 case GIMPLE_TRY:
5163 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
5164 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
5165 break;
5166
5167 case GIMPLE_EH_FILTER:
5168 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5169 break;
5170
5171 case GIMPLE_EH_ELSE:
5172 {
5173 geh_else *eh_else = as_a <geh_else *> (stmt);
5174 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5175 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5176 }
5177 break;
5178
5179 case GIMPLE_CATCH:
5180 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5181 as_a <gcatch *> (stmt)));
5182 break;
5183
5184 case GIMPLE_TRANSACTION:
5185 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5186 break;
5187
5188 default:
5189 {
5190 bool err2 = verify_gimple_stmt (stmt);
5191 if (err2)
5192 debug_gimple_stmt (stmt);
5193 err |= err2;
5194 }
5195 }
5196 }
5197
5198 return err;
5199 }
5200
5201 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5202 is a problem, otherwise false. */
5203
5204 static bool
verify_gimple_transaction(gtransaction * stmt)5205 verify_gimple_transaction (gtransaction *stmt)
5206 {
5207 tree lab;
5208
5209 lab = gimple_transaction_label_norm (stmt);
5210 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5211 return true;
5212 lab = gimple_transaction_label_uninst (stmt);
5213 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5214 return true;
5215 lab = gimple_transaction_label_over (stmt);
5216 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5217 return true;
5218
5219 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5220 }
5221
5222
5223 /* Verify the GIMPLE statements inside the statement list STMTS. */
5224
5225 DEBUG_FUNCTION void
verify_gimple_in_seq(gimple_seq stmts)5226 verify_gimple_in_seq (gimple_seq stmts)
5227 {
5228 timevar_push (TV_TREE_STMT_VERIFY);
5229 if (verify_gimple_in_seq_2 (stmts))
5230 internal_error ("%<verify_gimple%> failed");
5231 timevar_pop (TV_TREE_STMT_VERIFY);
5232 }
5233
5234 /* Return true when the T can be shared. */
5235
5236 static bool
tree_node_can_be_shared(tree t)5237 tree_node_can_be_shared (tree t)
5238 {
5239 if (IS_TYPE_OR_DECL_P (t)
5240 || TREE_CODE (t) == SSA_NAME
5241 || TREE_CODE (t) == IDENTIFIER_NODE
5242 || TREE_CODE (t) == CASE_LABEL_EXPR
5243 || is_gimple_min_invariant (t))
5244 return true;
5245
5246 if (t == error_mark_node)
5247 return true;
5248
5249 return false;
5250 }
5251
5252 /* Called via walk_tree. Verify tree sharing. */
5253
5254 static tree
verify_node_sharing_1(tree * tp,int * walk_subtrees,void * data)5255 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5256 {
5257 hash_set<void *> *visited = (hash_set<void *> *) data;
5258
5259 if (tree_node_can_be_shared (*tp))
5260 {
5261 *walk_subtrees = false;
5262 return NULL;
5263 }
5264
5265 if (visited->add (*tp))
5266 return *tp;
5267
5268 return NULL;
5269 }
5270
5271 /* Called via walk_gimple_stmt. Verify tree sharing. */
5272
5273 static tree
verify_node_sharing(tree * tp,int * walk_subtrees,void * data)5274 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5275 {
5276 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5277 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5278 }
5279
5280 static bool eh_error_found;
5281 bool
verify_eh_throw_stmt_node(gimple * const & stmt,const int &,hash_set<gimple * > * visited)5282 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5283 hash_set<gimple *> *visited)
5284 {
5285 if (!visited->contains (stmt))
5286 {
5287 error ("dead statement in EH table");
5288 debug_gimple_stmt (stmt);
5289 eh_error_found = true;
5290 }
5291 return true;
5292 }
5293
5294 /* Verify if the location LOCs block is in BLOCKS. */
5295
5296 static bool
verify_location(hash_set<tree> * blocks,location_t loc)5297 verify_location (hash_set<tree> *blocks, location_t loc)
5298 {
5299 tree block = LOCATION_BLOCK (loc);
5300 if (block != NULL_TREE
5301 && !blocks->contains (block))
5302 {
5303 error ("location references block not in block tree");
5304 return true;
5305 }
5306 if (block != NULL_TREE)
5307 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5308 return false;
5309 }
5310
5311 /* Called via walk_tree. Verify that expressions have no blocks. */
5312
5313 static tree
verify_expr_no_block(tree * tp,int * walk_subtrees,void *)5314 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5315 {
5316 if (!EXPR_P (*tp))
5317 {
5318 *walk_subtrees = false;
5319 return NULL;
5320 }
5321
5322 location_t loc = EXPR_LOCATION (*tp);
5323 if (LOCATION_BLOCK (loc) != NULL)
5324 return *tp;
5325
5326 return NULL;
5327 }
5328
5329 /* Called via walk_tree. Verify locations of expressions. */
5330
5331 static tree
verify_expr_location_1(tree * tp,int * walk_subtrees,void * data)5332 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5333 {
5334 hash_set<tree> *blocks = (hash_set<tree> *) data;
5335 tree t = *tp;
5336
5337 /* ??? This doesn't really belong here but there's no good place to
5338 stick this remainder of old verify_expr. */
5339 /* ??? This barfs on debug stmts which contain binds to vars with
5340 different function context. */
5341 #if 0
5342 if (VAR_P (t)
5343 || TREE_CODE (t) == PARM_DECL
5344 || TREE_CODE (t) == RESULT_DECL)
5345 {
5346 tree context = decl_function_context (t);
5347 if (context != cfun->decl
5348 && !SCOPE_FILE_SCOPE_P (context)
5349 && !TREE_STATIC (t)
5350 && !DECL_EXTERNAL (t))
5351 {
5352 error ("local declaration from a different function");
5353 return t;
5354 }
5355 }
5356 #endif
5357
5358 if (VAR_P (t) && DECL_HAS_DEBUG_EXPR_P (t))
5359 {
5360 tree x = DECL_DEBUG_EXPR (t);
5361 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5362 if (addr)
5363 return addr;
5364 }
5365 if ((VAR_P (t)
5366 || TREE_CODE (t) == PARM_DECL
5367 || TREE_CODE (t) == RESULT_DECL)
5368 && DECL_HAS_VALUE_EXPR_P (t))
5369 {
5370 tree x = DECL_VALUE_EXPR (t);
5371 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5372 if (addr)
5373 return addr;
5374 }
5375
5376 if (!EXPR_P (t))
5377 {
5378 *walk_subtrees = false;
5379 return NULL;
5380 }
5381
5382 location_t loc = EXPR_LOCATION (t);
5383 if (verify_location (blocks, loc))
5384 return t;
5385
5386 return NULL;
5387 }
5388
5389 /* Called via walk_gimple_op. Verify locations of expressions. */
5390
5391 static tree
verify_expr_location(tree * tp,int * walk_subtrees,void * data)5392 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5393 {
5394 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5395 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5396 }
5397
5398 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5399
5400 static void
collect_subblocks(hash_set<tree> * blocks,tree block)5401 collect_subblocks (hash_set<tree> *blocks, tree block)
5402 {
5403 tree t;
5404 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5405 {
5406 blocks->add (t);
5407 collect_subblocks (blocks, t);
5408 }
5409 }
5410
5411 /* Disable warnings about missing quoting in GCC diagnostics for
5412 the verification errors. Their format strings don't follow
5413 GCC diagnostic conventions and trigger an ICE in the end. */
5414 #if __GNUC__ >= 10
5415 # pragma GCC diagnostic push
5416 # pragma GCC diagnostic ignored "-Wformat-diag"
5417 #endif
5418
5419 /* Verify the GIMPLE statements in the CFG of FN. */
5420
5421 DEBUG_FUNCTION void
verify_gimple_in_cfg(struct function * fn,bool verify_nothrow)5422 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5423 {
5424 basic_block bb;
5425 bool err = false;
5426
5427 timevar_push (TV_TREE_STMT_VERIFY);
5428 hash_set<void *> visited;
5429 hash_set<gimple *> visited_throwing_stmts;
5430
5431 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5432 hash_set<tree> blocks;
5433 if (DECL_INITIAL (fn->decl))
5434 {
5435 blocks.add (DECL_INITIAL (fn->decl));
5436 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5437 }
5438
5439 FOR_EACH_BB_FN (bb, fn)
5440 {
5441 gimple_stmt_iterator gsi;
5442 edge_iterator ei;
5443 edge e;
5444
5445 for (gphi_iterator gpi = gsi_start_phis (bb);
5446 !gsi_end_p (gpi);
5447 gsi_next (&gpi))
5448 {
5449 gphi *phi = gpi.phi ();
5450 bool err2 = false;
5451 unsigned i;
5452
5453 if (gimple_bb (phi) != bb)
5454 {
5455 error ("gimple_bb (phi) is set to a wrong basic block");
5456 err2 = true;
5457 }
5458
5459 err2 |= verify_gimple_phi (phi);
5460
5461 /* Only PHI arguments have locations. */
5462 if (gimple_location (phi) != UNKNOWN_LOCATION)
5463 {
5464 error ("PHI node with location");
5465 err2 = true;
5466 }
5467
5468 for (i = 0; i < gimple_phi_num_args (phi); i++)
5469 {
5470 tree arg = gimple_phi_arg_def (phi, i);
5471 tree addr = walk_tree (&arg, verify_node_sharing_1,
5472 &visited, NULL);
5473 if (addr)
5474 {
5475 error ("incorrect sharing of tree nodes");
5476 debug_generic_expr (addr);
5477 err2 |= true;
5478 }
5479 location_t loc = gimple_phi_arg_location (phi, i);
5480 if (virtual_operand_p (gimple_phi_result (phi))
5481 && loc != UNKNOWN_LOCATION)
5482 {
5483 error ("virtual PHI with argument locations");
5484 err2 = true;
5485 }
5486 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5487 if (addr)
5488 {
5489 debug_generic_expr (addr);
5490 err2 = true;
5491 }
5492 err2 |= verify_location (&blocks, loc);
5493 }
5494
5495 if (err2)
5496 debug_gimple_stmt (phi);
5497 err |= err2;
5498 }
5499
5500 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5501 {
5502 gimple *stmt = gsi_stmt (gsi);
5503 bool err2 = false;
5504 struct walk_stmt_info wi;
5505 tree addr;
5506 int lp_nr;
5507
5508 if (gimple_bb (stmt) != bb)
5509 {
5510 error ("gimple_bb (stmt) is set to a wrong basic block");
5511 err2 = true;
5512 }
5513
5514 err2 |= verify_gimple_stmt (stmt);
5515 err2 |= verify_location (&blocks, gimple_location (stmt));
5516
5517 memset (&wi, 0, sizeof (wi));
5518 wi.info = (void *) &visited;
5519 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5520 if (addr)
5521 {
5522 error ("incorrect sharing of tree nodes");
5523 debug_generic_expr (addr);
5524 err2 |= true;
5525 }
5526
5527 memset (&wi, 0, sizeof (wi));
5528 wi.info = (void *) &blocks;
5529 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5530 if (addr)
5531 {
5532 debug_generic_expr (addr);
5533 err2 |= true;
5534 }
5535
5536 /* If the statement is marked as part of an EH region, then it is
5537 expected that the statement could throw. Verify that when we
5538 have optimizations that simplify statements such that we prove
5539 that they cannot throw, that we update other data structures
5540 to match. */
5541 lp_nr = lookup_stmt_eh_lp (stmt);
5542 if (lp_nr != 0)
5543 visited_throwing_stmts.add (stmt);
5544 if (lp_nr > 0)
5545 {
5546 if (!stmt_could_throw_p (cfun, stmt))
5547 {
5548 if (verify_nothrow)
5549 {
5550 error ("statement marked for throw, but doesn%'t");
5551 err2 |= true;
5552 }
5553 }
5554 else if (!gsi_one_before_end_p (gsi))
5555 {
5556 error ("statement marked for throw in middle of block");
5557 err2 |= true;
5558 }
5559 }
5560
5561 if (err2)
5562 debug_gimple_stmt (stmt);
5563 err |= err2;
5564 }
5565
5566 FOR_EACH_EDGE (e, ei, bb->succs)
5567 if (e->goto_locus != UNKNOWN_LOCATION)
5568 err |= verify_location (&blocks, e->goto_locus);
5569 }
5570
5571 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5572 eh_error_found = false;
5573 if (eh_table)
5574 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5575 (&visited_throwing_stmts);
5576
5577 if (err || eh_error_found)
5578 internal_error ("verify_gimple failed");
5579
5580 verify_histograms ();
5581 timevar_pop (TV_TREE_STMT_VERIFY);
5582 }
5583
5584
5585 /* Verifies that the flow information is OK. */
5586
5587 static int
gimple_verify_flow_info(void)5588 gimple_verify_flow_info (void)
5589 {
5590 int err = 0;
5591 basic_block bb;
5592 gimple_stmt_iterator gsi;
5593 gimple *stmt;
5594 edge e;
5595 edge_iterator ei;
5596
5597 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5598 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5599 {
5600 error ("ENTRY_BLOCK has IL associated with it");
5601 err = 1;
5602 }
5603
5604 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5605 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5606 {
5607 error ("EXIT_BLOCK has IL associated with it");
5608 err = 1;
5609 }
5610
5611 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5612 if (e->flags & EDGE_FALLTHRU)
5613 {
5614 error ("fallthru to exit from bb %d", e->src->index);
5615 err = 1;
5616 }
5617
5618 FOR_EACH_BB_FN (bb, cfun)
5619 {
5620 bool found_ctrl_stmt = false;
5621
5622 stmt = NULL;
5623
5624 /* Skip labels on the start of basic block. */
5625 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5626 {
5627 tree label;
5628 gimple *prev_stmt = stmt;
5629
5630 stmt = gsi_stmt (gsi);
5631
5632 if (gimple_code (stmt) != GIMPLE_LABEL)
5633 break;
5634
5635 label = gimple_label_label (as_a <glabel *> (stmt));
5636 if (prev_stmt && DECL_NONLOCAL (label))
5637 {
5638 error ("nonlocal label %qD is not first in a sequence "
5639 "of labels in bb %d", label, bb->index);
5640 err = 1;
5641 }
5642
5643 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5644 {
5645 error ("EH landing pad label %qD is not first in a sequence "
5646 "of labels in bb %d", label, bb->index);
5647 err = 1;
5648 }
5649
5650 if (label_to_block (cfun, label) != bb)
5651 {
5652 error ("label %qD to block does not match in bb %d",
5653 label, bb->index);
5654 err = 1;
5655 }
5656
5657 if (decl_function_context (label) != current_function_decl)
5658 {
5659 error ("label %qD has incorrect context in bb %d",
5660 label, bb->index);
5661 err = 1;
5662 }
5663 }
5664
5665 /* Verify that body of basic block BB is free of control flow. */
5666 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5667 {
5668 gimple *stmt = gsi_stmt (gsi);
5669
5670 if (found_ctrl_stmt)
5671 {
5672 error ("control flow in the middle of basic block %d",
5673 bb->index);
5674 err = 1;
5675 }
5676
5677 if (stmt_ends_bb_p (stmt))
5678 found_ctrl_stmt = true;
5679
5680 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5681 {
5682 error ("label %qD in the middle of basic block %d",
5683 gimple_label_label (label_stmt), bb->index);
5684 err = 1;
5685 }
5686 }
5687
5688 gsi = gsi_last_nondebug_bb (bb);
5689 if (gsi_end_p (gsi))
5690 continue;
5691
5692 stmt = gsi_stmt (gsi);
5693
5694 if (gimple_code (stmt) == GIMPLE_LABEL)
5695 continue;
5696
5697 err |= verify_eh_edges (stmt);
5698
5699 if (is_ctrl_stmt (stmt))
5700 {
5701 FOR_EACH_EDGE (e, ei, bb->succs)
5702 if (e->flags & EDGE_FALLTHRU)
5703 {
5704 error ("fallthru edge after a control statement in bb %d",
5705 bb->index);
5706 err = 1;
5707 }
5708 }
5709
5710 if (gimple_code (stmt) != GIMPLE_COND)
5711 {
5712 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5713 after anything else but if statement. */
5714 FOR_EACH_EDGE (e, ei, bb->succs)
5715 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5716 {
5717 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5718 bb->index);
5719 err = 1;
5720 }
5721 }
5722
5723 switch (gimple_code (stmt))
5724 {
5725 case GIMPLE_COND:
5726 {
5727 edge true_edge;
5728 edge false_edge;
5729
5730 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5731
5732 if (!true_edge
5733 || !false_edge
5734 || !(true_edge->flags & EDGE_TRUE_VALUE)
5735 || !(false_edge->flags & EDGE_FALSE_VALUE)
5736 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5737 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5738 || EDGE_COUNT (bb->succs) >= 3)
5739 {
5740 error ("wrong outgoing edge flags at end of bb %d",
5741 bb->index);
5742 err = 1;
5743 }
5744 }
5745 break;
5746
5747 case GIMPLE_GOTO:
5748 if (simple_goto_p (stmt))
5749 {
5750 error ("explicit goto at end of bb %d", bb->index);
5751 err = 1;
5752 }
5753 else
5754 {
5755 /* FIXME. We should double check that the labels in the
5756 destination blocks have their address taken. */
5757 FOR_EACH_EDGE (e, ei, bb->succs)
5758 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5759 | EDGE_FALSE_VALUE))
5760 || !(e->flags & EDGE_ABNORMAL))
5761 {
5762 error ("wrong outgoing edge flags at end of bb %d",
5763 bb->index);
5764 err = 1;
5765 }
5766 }
5767 break;
5768
5769 case GIMPLE_CALL:
5770 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5771 break;
5772 /* fallthru */
5773 case GIMPLE_RETURN:
5774 if (!single_succ_p (bb)
5775 || (single_succ_edge (bb)->flags
5776 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5777 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5778 {
5779 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5780 err = 1;
5781 }
5782 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5783 {
5784 error ("return edge does not point to exit in bb %d",
5785 bb->index);
5786 err = 1;
5787 }
5788 break;
5789
5790 case GIMPLE_SWITCH:
5791 {
5792 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5793 tree prev;
5794 edge e;
5795 size_t i, n;
5796
5797 n = gimple_switch_num_labels (switch_stmt);
5798
5799 /* Mark all the destination basic blocks. */
5800 for (i = 0; i < n; ++i)
5801 {
5802 basic_block label_bb = gimple_switch_label_bb (cfun, switch_stmt, i);
5803 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5804 label_bb->aux = (void *)1;
5805 }
5806
5807 /* Verify that the case labels are sorted. */
5808 prev = gimple_switch_label (switch_stmt, 0);
5809 for (i = 1; i < n; ++i)
5810 {
5811 tree c = gimple_switch_label (switch_stmt, i);
5812 if (!CASE_LOW (c))
5813 {
5814 error ("found default case not at the start of "
5815 "case vector");
5816 err = 1;
5817 continue;
5818 }
5819 if (CASE_LOW (prev)
5820 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5821 {
5822 error ("case labels not sorted: ");
5823 print_generic_expr (stderr, prev);
5824 fprintf (stderr," is greater than ");
5825 print_generic_expr (stderr, c);
5826 fprintf (stderr," but comes before it.\n");
5827 err = 1;
5828 }
5829 prev = c;
5830 }
5831 /* VRP will remove the default case if it can prove it will
5832 never be executed. So do not verify there always exists
5833 a default case here. */
5834
5835 FOR_EACH_EDGE (e, ei, bb->succs)
5836 {
5837 if (!e->dest->aux)
5838 {
5839 error ("extra outgoing edge %d->%d",
5840 bb->index, e->dest->index);
5841 err = 1;
5842 }
5843
5844 e->dest->aux = (void *)2;
5845 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5846 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5847 {
5848 error ("wrong outgoing edge flags at end of bb %d",
5849 bb->index);
5850 err = 1;
5851 }
5852 }
5853
5854 /* Check that we have all of them. */
5855 for (i = 0; i < n; ++i)
5856 {
5857 basic_block label_bb = gimple_switch_label_bb (cfun,
5858 switch_stmt, i);
5859
5860 if (label_bb->aux != (void *)2)
5861 {
5862 error ("missing edge %i->%i", bb->index, label_bb->index);
5863 err = 1;
5864 }
5865 }
5866
5867 FOR_EACH_EDGE (e, ei, bb->succs)
5868 e->dest->aux = (void *)0;
5869 }
5870 break;
5871
5872 case GIMPLE_EH_DISPATCH:
5873 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5874 break;
5875
5876 default:
5877 break;
5878 }
5879 }
5880
5881 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5882 verify_dominators (CDI_DOMINATORS);
5883
5884 return err;
5885 }
5886
5887 #if __GNUC__ >= 10
5888 # pragma GCC diagnostic pop
5889 #endif
5890
5891 /* Updates phi nodes after creating a forwarder block joined
5892 by edge FALLTHRU. */
5893
5894 static void
gimple_make_forwarder_block(edge fallthru)5895 gimple_make_forwarder_block (edge fallthru)
5896 {
5897 edge e;
5898 edge_iterator ei;
5899 basic_block dummy, bb;
5900 tree var;
5901 gphi_iterator gsi;
5902 bool forward_location_p;
5903
5904 dummy = fallthru->src;
5905 bb = fallthru->dest;
5906
5907 if (single_pred_p (bb))
5908 return;
5909
5910 /* We can forward location info if we have only one predecessor. */
5911 forward_location_p = single_pred_p (dummy);
5912
5913 /* If we redirected a branch we must create new PHI nodes at the
5914 start of BB. */
5915 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5916 {
5917 gphi *phi, *new_phi;
5918
5919 phi = gsi.phi ();
5920 var = gimple_phi_result (phi);
5921 new_phi = create_phi_node (var, bb);
5922 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5923 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5924 forward_location_p
5925 ? gimple_phi_arg_location (phi, 0) : UNKNOWN_LOCATION);
5926 }
5927
5928 /* Add the arguments we have stored on edges. */
5929 FOR_EACH_EDGE (e, ei, bb->preds)
5930 {
5931 if (e == fallthru)
5932 continue;
5933
5934 flush_pending_stmts (e);
5935 }
5936 }
5937
5938
5939 /* Return a non-special label in the head of basic block BLOCK.
5940 Create one if it doesn't exist. */
5941
5942 tree
gimple_block_label(basic_block bb)5943 gimple_block_label (basic_block bb)
5944 {
5945 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5946 bool first = true;
5947 tree label;
5948 glabel *stmt;
5949
5950 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5951 {
5952 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5953 if (!stmt)
5954 break;
5955 label = gimple_label_label (stmt);
5956 if (!DECL_NONLOCAL (label))
5957 {
5958 if (!first)
5959 gsi_move_before (&i, &s);
5960 return label;
5961 }
5962 }
5963
5964 label = create_artificial_label (UNKNOWN_LOCATION);
5965 stmt = gimple_build_label (label);
5966 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5967 return label;
5968 }
5969
5970
5971 /* Attempt to perform edge redirection by replacing a possibly complex
5972 jump instruction by a goto or by removing the jump completely.
5973 This can apply only if all edges now point to the same block. The
5974 parameters and return values are equivalent to
5975 redirect_edge_and_branch. */
5976
5977 static edge
gimple_try_redirect_by_replacing_jump(edge e,basic_block target)5978 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5979 {
5980 basic_block src = e->src;
5981 gimple_stmt_iterator i;
5982 gimple *stmt;
5983
5984 /* We can replace or remove a complex jump only when we have exactly
5985 two edges. */
5986 if (EDGE_COUNT (src->succs) != 2
5987 /* Verify that all targets will be TARGET. Specifically, the
5988 edge that is not E must also go to TARGET. */
5989 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5990 return NULL;
5991
5992 i = gsi_last_bb (src);
5993 if (gsi_end_p (i))
5994 return NULL;
5995
5996 stmt = gsi_stmt (i);
5997
5998 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5999 {
6000 gsi_remove (&i, true);
6001 e = ssa_redirect_edge (e, target);
6002 e->flags = EDGE_FALLTHRU;
6003 return e;
6004 }
6005
6006 return NULL;
6007 }
6008
6009
6010 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
6011 edge representing the redirected branch. */
6012
6013 static edge
gimple_redirect_edge_and_branch(edge e,basic_block dest)6014 gimple_redirect_edge_and_branch (edge e, basic_block dest)
6015 {
6016 basic_block bb = e->src;
6017 gimple_stmt_iterator gsi;
6018 edge ret;
6019 gimple *stmt;
6020
6021 if (e->flags & EDGE_ABNORMAL)
6022 return NULL;
6023
6024 if (e->dest == dest)
6025 return NULL;
6026
6027 if (e->flags & EDGE_EH)
6028 return redirect_eh_edge (e, dest);
6029
6030 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
6031 {
6032 ret = gimple_try_redirect_by_replacing_jump (e, dest);
6033 if (ret)
6034 return ret;
6035 }
6036
6037 gsi = gsi_last_nondebug_bb (bb);
6038 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
6039
6040 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
6041 {
6042 case GIMPLE_COND:
6043 /* For COND_EXPR, we only need to redirect the edge. */
6044 break;
6045
6046 case GIMPLE_GOTO:
6047 /* No non-abnormal edges should lead from a non-simple goto, and
6048 simple ones should be represented implicitly. */
6049 gcc_unreachable ();
6050
6051 case GIMPLE_SWITCH:
6052 {
6053 gswitch *switch_stmt = as_a <gswitch *> (stmt);
6054 tree label = gimple_block_label (dest);
6055 tree cases = get_cases_for_edge (e, switch_stmt);
6056
6057 /* If we have a list of cases associated with E, then use it
6058 as it's a lot faster than walking the entire case vector. */
6059 if (cases)
6060 {
6061 edge e2 = find_edge (e->src, dest);
6062 tree last, first;
6063
6064 first = cases;
6065 while (cases)
6066 {
6067 last = cases;
6068 CASE_LABEL (cases) = label;
6069 cases = CASE_CHAIN (cases);
6070 }
6071
6072 /* If there was already an edge in the CFG, then we need
6073 to move all the cases associated with E to E2. */
6074 if (e2)
6075 {
6076 tree cases2 = get_cases_for_edge (e2, switch_stmt);
6077
6078 CASE_CHAIN (last) = CASE_CHAIN (cases2);
6079 CASE_CHAIN (cases2) = first;
6080 }
6081 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
6082 }
6083 else
6084 {
6085 size_t i, n = gimple_switch_num_labels (switch_stmt);
6086
6087 for (i = 0; i < n; i++)
6088 {
6089 tree elt = gimple_switch_label (switch_stmt, i);
6090 if (label_to_block (cfun, CASE_LABEL (elt)) == e->dest)
6091 CASE_LABEL (elt) = label;
6092 }
6093 }
6094 }
6095 break;
6096
6097 case GIMPLE_ASM:
6098 {
6099 gasm *asm_stmt = as_a <gasm *> (stmt);
6100 int i, n = gimple_asm_nlabels (asm_stmt);
6101 tree label = NULL;
6102
6103 for (i = 0; i < n; ++i)
6104 {
6105 tree cons = gimple_asm_label_op (asm_stmt, i);
6106 if (label_to_block (cfun, TREE_VALUE (cons)) == e->dest)
6107 {
6108 if (!label)
6109 label = gimple_block_label (dest);
6110 TREE_VALUE (cons) = label;
6111 }
6112 }
6113
6114 /* If we didn't find any label matching the former edge in the
6115 asm labels, we must be redirecting the fallthrough
6116 edge. */
6117 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
6118 }
6119 break;
6120
6121 case GIMPLE_RETURN:
6122 gsi_remove (&gsi, true);
6123 e->flags |= EDGE_FALLTHRU;
6124 break;
6125
6126 case GIMPLE_OMP_RETURN:
6127 case GIMPLE_OMP_CONTINUE:
6128 case GIMPLE_OMP_SECTIONS_SWITCH:
6129 case GIMPLE_OMP_FOR:
6130 /* The edges from OMP constructs can be simply redirected. */
6131 break;
6132
6133 case GIMPLE_EH_DISPATCH:
6134 if (!(e->flags & EDGE_FALLTHRU))
6135 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
6136 break;
6137
6138 case GIMPLE_TRANSACTION:
6139 if (e->flags & EDGE_TM_ABORT)
6140 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
6141 gimple_block_label (dest));
6142 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
6143 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
6144 gimple_block_label (dest));
6145 else
6146 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
6147 gimple_block_label (dest));
6148 break;
6149
6150 default:
6151 /* Otherwise it must be a fallthru edge, and we don't need to
6152 do anything besides redirecting it. */
6153 gcc_assert (e->flags & EDGE_FALLTHRU);
6154 break;
6155 }
6156
6157 /* Update/insert PHI nodes as necessary. */
6158
6159 /* Now update the edges in the CFG. */
6160 e = ssa_redirect_edge (e, dest);
6161
6162 return e;
6163 }
6164
6165 /* Returns true if it is possible to remove edge E by redirecting
6166 it to the destination of the other edge from E->src. */
6167
6168 static bool
gimple_can_remove_branch_p(const_edge e)6169 gimple_can_remove_branch_p (const_edge e)
6170 {
6171 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
6172 return false;
6173
6174 return true;
6175 }
6176
6177 /* Simple wrapper, as we can always redirect fallthru edges. */
6178
6179 static basic_block
gimple_redirect_edge_and_branch_force(edge e,basic_block dest)6180 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6181 {
6182 e = gimple_redirect_edge_and_branch (e, dest);
6183 gcc_assert (e);
6184
6185 return NULL;
6186 }
6187
6188
6189 /* Splits basic block BB after statement STMT (but at least after the
6190 labels). If STMT is NULL, BB is split just after the labels. */
6191
6192 static basic_block
gimple_split_block(basic_block bb,void * stmt)6193 gimple_split_block (basic_block bb, void *stmt)
6194 {
6195 gimple_stmt_iterator gsi;
6196 gimple_stmt_iterator gsi_tgt;
6197 gimple_seq list;
6198 basic_block new_bb;
6199 edge e;
6200 edge_iterator ei;
6201
6202 new_bb = create_empty_bb (bb);
6203
6204 /* Redirect the outgoing edges. */
6205 new_bb->succs = bb->succs;
6206 bb->succs = NULL;
6207 FOR_EACH_EDGE (e, ei, new_bb->succs)
6208 e->src = new_bb;
6209
6210 /* Get a stmt iterator pointing to the first stmt to move. */
6211 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6212 gsi = gsi_after_labels (bb);
6213 else
6214 {
6215 gsi = gsi_for_stmt ((gimple *) stmt);
6216 gsi_next (&gsi);
6217 }
6218
6219 /* Move everything from GSI to the new basic block. */
6220 if (gsi_end_p (gsi))
6221 return new_bb;
6222
6223 /* Split the statement list - avoid re-creating new containers as this
6224 brings ugly quadratic memory consumption in the inliner.
6225 (We are still quadratic since we need to update stmt BB pointers,
6226 sadly.) */
6227 gsi_split_seq_before (&gsi, &list);
6228 set_bb_seq (new_bb, list);
6229 for (gsi_tgt = gsi_start (list);
6230 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6231 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6232
6233 return new_bb;
6234 }
6235
6236
6237 /* Moves basic block BB after block AFTER. */
6238
6239 static bool
gimple_move_block_after(basic_block bb,basic_block after)6240 gimple_move_block_after (basic_block bb, basic_block after)
6241 {
6242 if (bb->prev_bb == after)
6243 return true;
6244
6245 unlink_block (bb);
6246 link_block (bb, after);
6247
6248 return true;
6249 }
6250
6251
6252 /* Return TRUE if block BB has no executable statements, otherwise return
6253 FALSE. */
6254
6255 static bool
gimple_empty_block_p(basic_block bb)6256 gimple_empty_block_p (basic_block bb)
6257 {
6258 /* BB must have no executable statements. */
6259 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6260 if (phi_nodes (bb))
6261 return false;
6262 while (!gsi_end_p (gsi))
6263 {
6264 gimple *stmt = gsi_stmt (gsi);
6265 if (is_gimple_debug (stmt))
6266 ;
6267 else if (gimple_code (stmt) == GIMPLE_NOP
6268 || gimple_code (stmt) == GIMPLE_PREDICT)
6269 ;
6270 else
6271 return false;
6272 gsi_next (&gsi);
6273 }
6274 return true;
6275 }
6276
6277
6278 /* Split a basic block if it ends with a conditional branch and if the
6279 other part of the block is not empty. */
6280
6281 static basic_block
gimple_split_block_before_cond_jump(basic_block bb)6282 gimple_split_block_before_cond_jump (basic_block bb)
6283 {
6284 gimple *last, *split_point;
6285 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6286 if (gsi_end_p (gsi))
6287 return NULL;
6288 last = gsi_stmt (gsi);
6289 if (gimple_code (last) != GIMPLE_COND
6290 && gimple_code (last) != GIMPLE_SWITCH)
6291 return NULL;
6292 gsi_prev (&gsi);
6293 split_point = gsi_stmt (gsi);
6294 return split_block (bb, split_point)->dest;
6295 }
6296
6297
6298 /* Return true if basic_block can be duplicated. */
6299
6300 static bool
gimple_can_duplicate_bb_p(const_basic_block bb)6301 gimple_can_duplicate_bb_p (const_basic_block bb)
6302 {
6303 gimple *last = last_stmt (CONST_CAST_BB (bb));
6304
6305 /* Do checks that can only fail for the last stmt, to minimize the work in the
6306 stmt loop. */
6307 if (last) {
6308 /* A transaction is a single entry multiple exit region. It
6309 must be duplicated in its entirety or not at all. */
6310 if (gimple_code (last) == GIMPLE_TRANSACTION)
6311 return false;
6312
6313 /* An IFN_UNIQUE call must be duplicated as part of its group,
6314 or not at all. */
6315 if (is_gimple_call (last)
6316 && gimple_call_internal_p (last)
6317 && gimple_call_internal_unique_p (last))
6318 return false;
6319 }
6320
6321 for (gimple_stmt_iterator gsi = gsi_start_bb (CONST_CAST_BB (bb));
6322 !gsi_end_p (gsi); gsi_next (&gsi))
6323 {
6324 gimple *g = gsi_stmt (gsi);
6325
6326 /* An IFN_GOMP_SIMT_ENTER_ALLOC/IFN_GOMP_SIMT_EXIT call must be
6327 duplicated as part of its group, or not at all.
6328 The IFN_GOMP_SIMT_VOTE_ANY and IFN_GOMP_SIMT_XCHG_* are part of such a
6329 group, so the same holds there. */
6330 if (is_gimple_call (g)
6331 && (gimple_call_internal_p (g, IFN_GOMP_SIMT_ENTER_ALLOC)
6332 || gimple_call_internal_p (g, IFN_GOMP_SIMT_EXIT)
6333 || gimple_call_internal_p (g, IFN_GOMP_SIMT_VOTE_ANY)
6334 || gimple_call_internal_p (g, IFN_GOMP_SIMT_XCHG_BFLY)
6335 || gimple_call_internal_p (g, IFN_GOMP_SIMT_XCHG_IDX)))
6336 return false;
6337 }
6338
6339 return true;
6340 }
6341
6342 /* Create a duplicate of the basic block BB. NOTE: This does not
6343 preserve SSA form. */
6344
6345 static basic_block
gimple_duplicate_bb(basic_block bb,copy_bb_data * id)6346 gimple_duplicate_bb (basic_block bb, copy_bb_data *id)
6347 {
6348 basic_block new_bb;
6349 gimple_stmt_iterator gsi_tgt;
6350
6351 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6352
6353 /* Copy the PHI nodes. We ignore PHI node arguments here because
6354 the incoming edges have not been setup yet. */
6355 for (gphi_iterator gpi = gsi_start_phis (bb);
6356 !gsi_end_p (gpi);
6357 gsi_next (&gpi))
6358 {
6359 gphi *phi, *copy;
6360 phi = gpi.phi ();
6361 copy = create_phi_node (NULL_TREE, new_bb);
6362 create_new_def_for (gimple_phi_result (phi), copy,
6363 gimple_phi_result_ptr (copy));
6364 gimple_set_uid (copy, gimple_uid (phi));
6365 }
6366
6367 gsi_tgt = gsi_start_bb (new_bb);
6368 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6369 !gsi_end_p (gsi);
6370 gsi_next (&gsi))
6371 {
6372 def_operand_p def_p;
6373 ssa_op_iter op_iter;
6374 tree lhs;
6375 gimple *stmt, *copy;
6376
6377 stmt = gsi_stmt (gsi);
6378 if (gimple_code (stmt) == GIMPLE_LABEL)
6379 continue;
6380
6381 /* Don't duplicate label debug stmts. */
6382 if (gimple_debug_bind_p (stmt)
6383 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6384 == LABEL_DECL)
6385 continue;
6386
6387 /* Create a new copy of STMT and duplicate STMT's virtual
6388 operands. */
6389 copy = gimple_copy (stmt);
6390 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6391
6392 maybe_duplicate_eh_stmt (copy, stmt);
6393 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6394
6395 /* When copying around a stmt writing into a local non-user
6396 aggregate, make sure it won't share stack slot with other
6397 vars. */
6398 lhs = gimple_get_lhs (stmt);
6399 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6400 {
6401 tree base = get_base_address (lhs);
6402 if (base
6403 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6404 && DECL_IGNORED_P (base)
6405 && !TREE_STATIC (base)
6406 && !DECL_EXTERNAL (base)
6407 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6408 DECL_NONSHAREABLE (base) = 1;
6409 }
6410
6411 /* If requested remap dependence info of cliques brought in
6412 via inlining. */
6413 if (id)
6414 for (unsigned i = 0; i < gimple_num_ops (copy); ++i)
6415 {
6416 tree op = gimple_op (copy, i);
6417 if (!op)
6418 continue;
6419 if (TREE_CODE (op) == ADDR_EXPR
6420 || TREE_CODE (op) == WITH_SIZE_EXPR)
6421 op = TREE_OPERAND (op, 0);
6422 while (handled_component_p (op))
6423 op = TREE_OPERAND (op, 0);
6424 if ((TREE_CODE (op) == MEM_REF
6425 || TREE_CODE (op) == TARGET_MEM_REF)
6426 && MR_DEPENDENCE_CLIQUE (op) > 1
6427 && MR_DEPENDENCE_CLIQUE (op) != bb->loop_father->owned_clique)
6428 {
6429 if (!id->dependence_map)
6430 id->dependence_map = new hash_map<dependence_hash,
6431 unsigned short>;
6432 bool existed;
6433 unsigned short &newc = id->dependence_map->get_or_insert
6434 (MR_DEPENDENCE_CLIQUE (op), &existed);
6435 if (!existed)
6436 {
6437 gcc_assert (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
6438 newc = ++cfun->last_clique;
6439 }
6440 MR_DEPENDENCE_CLIQUE (op) = newc;
6441 }
6442 }
6443
6444 /* Create new names for all the definitions created by COPY and
6445 add replacement mappings for each new name. */
6446 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6447 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6448 }
6449
6450 return new_bb;
6451 }
6452
6453 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6454
6455 static void
add_phi_args_after_copy_edge(edge e_copy)6456 add_phi_args_after_copy_edge (edge e_copy)
6457 {
6458 basic_block bb, bb_copy = e_copy->src, dest;
6459 edge e;
6460 edge_iterator ei;
6461 gphi *phi, *phi_copy;
6462 tree def;
6463 gphi_iterator psi, psi_copy;
6464
6465 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6466 return;
6467
6468 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6469
6470 if (e_copy->dest->flags & BB_DUPLICATED)
6471 dest = get_bb_original (e_copy->dest);
6472 else
6473 dest = e_copy->dest;
6474
6475 e = find_edge (bb, dest);
6476 if (!e)
6477 {
6478 /* During loop unrolling the target of the latch edge is copied.
6479 In this case we are not looking for edge to dest, but to
6480 duplicated block whose original was dest. */
6481 FOR_EACH_EDGE (e, ei, bb->succs)
6482 {
6483 if ((e->dest->flags & BB_DUPLICATED)
6484 && get_bb_original (e->dest) == dest)
6485 break;
6486 }
6487
6488 gcc_assert (e != NULL);
6489 }
6490
6491 for (psi = gsi_start_phis (e->dest),
6492 psi_copy = gsi_start_phis (e_copy->dest);
6493 !gsi_end_p (psi);
6494 gsi_next (&psi), gsi_next (&psi_copy))
6495 {
6496 phi = psi.phi ();
6497 phi_copy = psi_copy.phi ();
6498 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6499 add_phi_arg (phi_copy, def, e_copy,
6500 gimple_phi_arg_location_from_edge (phi, e));
6501 }
6502 }
6503
6504
6505 /* Basic block BB_COPY was created by code duplication. Add phi node
6506 arguments for edges going out of BB_COPY. The blocks that were
6507 duplicated have BB_DUPLICATED set. */
6508
6509 void
add_phi_args_after_copy_bb(basic_block bb_copy)6510 add_phi_args_after_copy_bb (basic_block bb_copy)
6511 {
6512 edge e_copy;
6513 edge_iterator ei;
6514
6515 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6516 {
6517 add_phi_args_after_copy_edge (e_copy);
6518 }
6519 }
6520
6521 /* Blocks in REGION_COPY array of length N_REGION were created by
6522 duplication of basic blocks. Add phi node arguments for edges
6523 going from these blocks. If E_COPY is not NULL, also add
6524 phi node arguments for its destination.*/
6525
6526 void
add_phi_args_after_copy(basic_block * region_copy,unsigned n_region,edge e_copy)6527 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6528 edge e_copy)
6529 {
6530 unsigned i;
6531
6532 for (i = 0; i < n_region; i++)
6533 region_copy[i]->flags |= BB_DUPLICATED;
6534
6535 for (i = 0; i < n_region; i++)
6536 add_phi_args_after_copy_bb (region_copy[i]);
6537 if (e_copy)
6538 add_phi_args_after_copy_edge (e_copy);
6539
6540 for (i = 0; i < n_region; i++)
6541 region_copy[i]->flags &= ~BB_DUPLICATED;
6542 }
6543
6544 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6545 important exit edge EXIT. By important we mean that no SSA name defined
6546 inside region is live over the other exit edges of the region. All entry
6547 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6548 to the duplicate of the region. Dominance and loop information is
6549 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6550 UPDATE_DOMINANCE is false then we assume that the caller will update the
6551 dominance information after calling this function. The new basic
6552 blocks are stored to REGION_COPY in the same order as they had in REGION,
6553 provided that REGION_COPY is not NULL.
6554 The function returns false if it is unable to copy the region,
6555 true otherwise. */
6556
6557 bool
gimple_duplicate_sese_region(edge entry,edge exit,basic_block * region,unsigned n_region,basic_block * region_copy,bool update_dominance)6558 gimple_duplicate_sese_region (edge entry, edge exit,
6559 basic_block *region, unsigned n_region,
6560 basic_block *region_copy,
6561 bool update_dominance)
6562 {
6563 unsigned i;
6564 bool free_region_copy = false, copying_header = false;
6565 class loop *loop = entry->dest->loop_father;
6566 edge exit_copy;
6567 edge redirected;
6568 profile_count total_count = profile_count::uninitialized ();
6569 profile_count entry_count = profile_count::uninitialized ();
6570
6571 if (!can_copy_bbs_p (region, n_region))
6572 return false;
6573
6574 /* Some sanity checking. Note that we do not check for all possible
6575 missuses of the functions. I.e. if you ask to copy something weird,
6576 it will work, but the state of structures probably will not be
6577 correct. */
6578 for (i = 0; i < n_region; i++)
6579 {
6580 /* We do not handle subloops, i.e. all the blocks must belong to the
6581 same loop. */
6582 if (region[i]->loop_father != loop)
6583 return false;
6584
6585 if (region[i] != entry->dest
6586 && region[i] == loop->header)
6587 return false;
6588 }
6589
6590 /* In case the function is used for loop header copying (which is the primary
6591 use), ensure that EXIT and its copy will be new latch and entry edges. */
6592 if (loop->header == entry->dest)
6593 {
6594 copying_header = true;
6595
6596 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6597 return false;
6598
6599 for (i = 0; i < n_region; i++)
6600 if (region[i] != exit->src
6601 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6602 return false;
6603 }
6604
6605 initialize_original_copy_tables ();
6606
6607 if (copying_header)
6608 set_loop_copy (loop, loop_outer (loop));
6609 else
6610 set_loop_copy (loop, loop);
6611
6612 if (!region_copy)
6613 {
6614 region_copy = XNEWVEC (basic_block, n_region);
6615 free_region_copy = true;
6616 }
6617
6618 /* Record blocks outside the region that are dominated by something
6619 inside. */
6620 auto_vec<basic_block> doms;
6621 if (update_dominance)
6622 {
6623 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6624 }
6625
6626 if (entry->dest->count.initialized_p ())
6627 {
6628 total_count = entry->dest->count;
6629 entry_count = entry->count ();
6630 /* Fix up corner cases, to avoid division by zero or creation of negative
6631 frequencies. */
6632 if (entry_count > total_count)
6633 entry_count = total_count;
6634 }
6635
6636 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6637 split_edge_bb_loc (entry), update_dominance);
6638 if (total_count.initialized_p () && entry_count.initialized_p ())
6639 {
6640 scale_bbs_frequencies_profile_count (region, n_region,
6641 total_count - entry_count,
6642 total_count);
6643 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6644 total_count);
6645 }
6646
6647 if (copying_header)
6648 {
6649 loop->header = exit->dest;
6650 loop->latch = exit->src;
6651 }
6652
6653 /* Redirect the entry and add the phi node arguments. */
6654 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6655 gcc_assert (redirected != NULL);
6656 flush_pending_stmts (entry);
6657
6658 /* Concerning updating of dominators: We must recount dominators
6659 for entry block and its copy. Anything that is outside of the
6660 region, but was dominated by something inside needs recounting as
6661 well. */
6662 if (update_dominance)
6663 {
6664 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6665 doms.safe_push (get_bb_original (entry->dest));
6666 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6667 }
6668
6669 /* Add the other PHI node arguments. */
6670 add_phi_args_after_copy (region_copy, n_region, NULL);
6671
6672 if (free_region_copy)
6673 free (region_copy);
6674
6675 free_original_copy_tables ();
6676 return true;
6677 }
6678
6679 /* Checks if BB is part of the region defined by N_REGION BBS. */
6680 static bool
bb_part_of_region_p(basic_block bb,basic_block * bbs,unsigned n_region)6681 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6682 {
6683 unsigned int n;
6684
6685 for (n = 0; n < n_region; n++)
6686 {
6687 if (bb == bbs[n])
6688 return true;
6689 }
6690 return false;
6691 }
6692
6693 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6694 are stored to REGION_COPY in the same order in that they appear
6695 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6696 the region, EXIT an exit from it. The condition guarding EXIT
6697 is moved to ENTRY. Returns true if duplication succeeds, false
6698 otherwise.
6699
6700 For example,
6701
6702 some_code;
6703 if (cond)
6704 A;
6705 else
6706 B;
6707
6708 is transformed to
6709
6710 if (cond)
6711 {
6712 some_code;
6713 A;
6714 }
6715 else
6716 {
6717 some_code;
6718 B;
6719 }
6720 */
6721
6722 bool
gimple_duplicate_sese_tail(edge entry,edge exit,basic_block * region,unsigned n_region,basic_block * region_copy)6723 gimple_duplicate_sese_tail (edge entry, edge exit,
6724 basic_block *region, unsigned n_region,
6725 basic_block *region_copy)
6726 {
6727 unsigned i;
6728 bool free_region_copy = false;
6729 class loop *loop = exit->dest->loop_father;
6730 class loop *orig_loop = entry->dest->loop_father;
6731 basic_block switch_bb, entry_bb, nentry_bb;
6732 profile_count total_count = profile_count::uninitialized (),
6733 exit_count = profile_count::uninitialized ();
6734 edge exits[2], nexits[2], e;
6735 gimple_stmt_iterator gsi;
6736 gimple *cond_stmt;
6737 edge sorig, snew;
6738 basic_block exit_bb;
6739 gphi_iterator psi;
6740 gphi *phi;
6741 tree def;
6742 class loop *target, *aloop, *cloop;
6743
6744 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6745 exits[0] = exit;
6746 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6747
6748 if (!can_copy_bbs_p (region, n_region))
6749 return false;
6750
6751 initialize_original_copy_tables ();
6752 set_loop_copy (orig_loop, loop);
6753
6754 target= loop;
6755 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6756 {
6757 if (bb_part_of_region_p (aloop->header, region, n_region))
6758 {
6759 cloop = duplicate_loop (aloop, target);
6760 duplicate_subloops (aloop, cloop);
6761 }
6762 }
6763
6764 if (!region_copy)
6765 {
6766 region_copy = XNEWVEC (basic_block, n_region);
6767 free_region_copy = true;
6768 }
6769
6770 gcc_assert (!need_ssa_update_p (cfun));
6771
6772 /* Record blocks outside the region that are dominated by something
6773 inside. */
6774 auto_vec<basic_block> doms = get_dominated_by_region (CDI_DOMINATORS, region,
6775 n_region);
6776
6777 total_count = exit->src->count;
6778 exit_count = exit->count ();
6779 /* Fix up corner cases, to avoid division by zero or creation of negative
6780 frequencies. */
6781 if (exit_count > total_count)
6782 exit_count = total_count;
6783
6784 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6785 split_edge_bb_loc (exit), true);
6786 if (total_count.initialized_p () && exit_count.initialized_p ())
6787 {
6788 scale_bbs_frequencies_profile_count (region, n_region,
6789 total_count - exit_count,
6790 total_count);
6791 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6792 total_count);
6793 }
6794
6795 /* Create the switch block, and put the exit condition to it. */
6796 entry_bb = entry->dest;
6797 nentry_bb = get_bb_copy (entry_bb);
6798 if (!last_stmt (entry->src)
6799 || !stmt_ends_bb_p (last_stmt (entry->src)))
6800 switch_bb = entry->src;
6801 else
6802 switch_bb = split_edge (entry);
6803 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6804
6805 gsi = gsi_last_bb (switch_bb);
6806 cond_stmt = last_stmt (exit->src);
6807 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6808 cond_stmt = gimple_copy (cond_stmt);
6809
6810 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6811
6812 sorig = single_succ_edge (switch_bb);
6813 sorig->flags = exits[1]->flags;
6814 sorig->probability = exits[1]->probability;
6815 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6816 snew->probability = exits[0]->probability;
6817
6818
6819 /* Register the new edge from SWITCH_BB in loop exit lists. */
6820 rescan_loop_exit (snew, true, false);
6821
6822 /* Add the PHI node arguments. */
6823 add_phi_args_after_copy (region_copy, n_region, snew);
6824
6825 /* Get rid of now superfluous conditions and associated edges (and phi node
6826 arguments). */
6827 exit_bb = exit->dest;
6828
6829 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6830 PENDING_STMT (e) = NULL;
6831
6832 /* The latch of ORIG_LOOP was copied, and so was the backedge
6833 to the original header. We redirect this backedge to EXIT_BB. */
6834 for (i = 0; i < n_region; i++)
6835 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6836 {
6837 gcc_assert (single_succ_edge (region_copy[i]));
6838 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6839 PENDING_STMT (e) = NULL;
6840 for (psi = gsi_start_phis (exit_bb);
6841 !gsi_end_p (psi);
6842 gsi_next (&psi))
6843 {
6844 phi = psi.phi ();
6845 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6846 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6847 }
6848 }
6849 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6850 PENDING_STMT (e) = NULL;
6851
6852 /* Anything that is outside of the region, but was dominated by something
6853 inside needs to update dominance info. */
6854 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6855 /* Update the SSA web. */
6856 update_ssa (TODO_update_ssa);
6857
6858 if (free_region_copy)
6859 free (region_copy);
6860
6861 free_original_copy_tables ();
6862 return true;
6863 }
6864
6865 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6866 adding blocks when the dominator traversal reaches EXIT. This
6867 function silently assumes that ENTRY strictly dominates EXIT. */
6868
6869 void
gather_blocks_in_sese_region(basic_block entry,basic_block exit,vec<basic_block> * bbs_p)6870 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6871 vec<basic_block> *bbs_p)
6872 {
6873 basic_block son;
6874
6875 for (son = first_dom_son (CDI_DOMINATORS, entry);
6876 son;
6877 son = next_dom_son (CDI_DOMINATORS, son))
6878 {
6879 bbs_p->safe_push (son);
6880 if (son != exit)
6881 gather_blocks_in_sese_region (son, exit, bbs_p);
6882 }
6883 }
6884
6885 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6886 The duplicates are recorded in VARS_MAP. */
6887
6888 static void
replace_by_duplicate_decl(tree * tp,hash_map<tree,tree> * vars_map,tree to_context)6889 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6890 tree to_context)
6891 {
6892 tree t = *tp, new_t;
6893 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6894
6895 if (DECL_CONTEXT (t) == to_context)
6896 return;
6897
6898 bool existed;
6899 tree &loc = vars_map->get_or_insert (t, &existed);
6900
6901 if (!existed)
6902 {
6903 if (SSA_VAR_P (t))
6904 {
6905 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6906 add_local_decl (f, new_t);
6907 }
6908 else
6909 {
6910 gcc_assert (TREE_CODE (t) == CONST_DECL);
6911 new_t = copy_node (t);
6912 }
6913 DECL_CONTEXT (new_t) = to_context;
6914
6915 loc = new_t;
6916 }
6917 else
6918 new_t = loc;
6919
6920 *tp = new_t;
6921 }
6922
6923
6924 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6925 VARS_MAP maps old ssa names and var_decls to the new ones. */
6926
6927 static tree
replace_ssa_name(tree name,hash_map<tree,tree> * vars_map,tree to_context)6928 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6929 tree to_context)
6930 {
6931 tree new_name;
6932
6933 gcc_assert (!virtual_operand_p (name));
6934
6935 tree *loc = vars_map->get (name);
6936
6937 if (!loc)
6938 {
6939 tree decl = SSA_NAME_VAR (name);
6940 if (decl)
6941 {
6942 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6943 replace_by_duplicate_decl (&decl, vars_map, to_context);
6944 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6945 decl, SSA_NAME_DEF_STMT (name));
6946 }
6947 else
6948 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6949 name, SSA_NAME_DEF_STMT (name));
6950
6951 /* Now that we've used the def stmt to define new_name, make sure it
6952 doesn't define name anymore. */
6953 SSA_NAME_DEF_STMT (name) = NULL;
6954
6955 vars_map->put (name, new_name);
6956 }
6957 else
6958 new_name = *loc;
6959
6960 return new_name;
6961 }
6962
6963 struct move_stmt_d
6964 {
6965 tree orig_block;
6966 tree new_block;
6967 tree from_context;
6968 tree to_context;
6969 hash_map<tree, tree> *vars_map;
6970 htab_t new_label_map;
6971 hash_map<void *, void *> *eh_map;
6972 bool remap_decls_p;
6973 };
6974
6975 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6976 contained in *TP if it has been ORIG_BLOCK previously and change the
6977 DECL_CONTEXT of every local variable referenced in *TP. */
6978
6979 static tree
move_stmt_op(tree * tp,int * walk_subtrees,void * data)6980 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6981 {
6982 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6983 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6984 tree t = *tp;
6985
6986 if (EXPR_P (t))
6987 {
6988 tree block = TREE_BLOCK (t);
6989 if (block == NULL_TREE)
6990 ;
6991 else if (block == p->orig_block
6992 || p->orig_block == NULL_TREE)
6993 {
6994 /* tree_node_can_be_shared says we can share invariant
6995 addresses but unshare_expr copies them anyways. Make sure
6996 to unshare before adjusting the block in place - we do not
6997 always see a copy here. */
6998 if (TREE_CODE (t) == ADDR_EXPR
6999 && is_gimple_min_invariant (t))
7000 *tp = t = unshare_expr (t);
7001 TREE_SET_BLOCK (t, p->new_block);
7002 }
7003 else if (flag_checking)
7004 {
7005 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
7006 block = BLOCK_SUPERCONTEXT (block);
7007 gcc_assert (block == p->orig_block);
7008 }
7009 }
7010 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
7011 {
7012 if (TREE_CODE (t) == SSA_NAME)
7013 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
7014 else if (TREE_CODE (t) == PARM_DECL
7015 && gimple_in_ssa_p (cfun))
7016 *tp = *(p->vars_map->get (t));
7017 else if (TREE_CODE (t) == LABEL_DECL)
7018 {
7019 if (p->new_label_map)
7020 {
7021 struct tree_map in, *out;
7022 in.base.from = t;
7023 out = (struct tree_map *)
7024 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
7025 if (out)
7026 *tp = t = out->to;
7027 }
7028
7029 /* For FORCED_LABELs we can end up with references from other
7030 functions if some SESE regions are outlined. It is UB to
7031 jump in between them, but they could be used just for printing
7032 addresses etc. In that case, DECL_CONTEXT on the label should
7033 be the function containing the glabel stmt with that LABEL_DECL,
7034 rather than whatever function a reference to the label was seen
7035 last time. */
7036 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
7037 DECL_CONTEXT (t) = p->to_context;
7038 }
7039 else if (p->remap_decls_p)
7040 {
7041 /* Replace T with its duplicate. T should no longer appear in the
7042 parent function, so this looks wasteful; however, it may appear
7043 in referenced_vars, and more importantly, as virtual operands of
7044 statements, and in alias lists of other variables. It would be
7045 quite difficult to expunge it from all those places. ??? It might
7046 suffice to do this for addressable variables. */
7047 if ((VAR_P (t) && !is_global_var (t))
7048 || TREE_CODE (t) == CONST_DECL)
7049 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
7050 }
7051 *walk_subtrees = 0;
7052 }
7053 else if (TYPE_P (t))
7054 *walk_subtrees = 0;
7055
7056 return NULL_TREE;
7057 }
7058
7059 /* Helper for move_stmt_r. Given an EH region number for the source
7060 function, map that to the duplicate EH regio number in the dest. */
7061
7062 static int
move_stmt_eh_region_nr(int old_nr,struct move_stmt_d * p)7063 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
7064 {
7065 eh_region old_r, new_r;
7066
7067 old_r = get_eh_region_from_number (old_nr);
7068 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
7069
7070 return new_r->index;
7071 }
7072
7073 /* Similar, but operate on INTEGER_CSTs. */
7074
7075 static tree
move_stmt_eh_region_tree_nr(tree old_t_nr,struct move_stmt_d * p)7076 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
7077 {
7078 int old_nr, new_nr;
7079
7080 old_nr = tree_to_shwi (old_t_nr);
7081 new_nr = move_stmt_eh_region_nr (old_nr, p);
7082
7083 return build_int_cst (integer_type_node, new_nr);
7084 }
7085
7086 /* Like move_stmt_op, but for gimple statements.
7087
7088 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
7089 contained in the current statement in *GSI_P and change the
7090 DECL_CONTEXT of every local variable referenced in the current
7091 statement. */
7092
7093 static tree
move_stmt_r(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)7094 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
7095 struct walk_stmt_info *wi)
7096 {
7097 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
7098 gimple *stmt = gsi_stmt (*gsi_p);
7099 tree block = gimple_block (stmt);
7100
7101 if (block == p->orig_block
7102 || (p->orig_block == NULL_TREE
7103 && block != NULL_TREE))
7104 gimple_set_block (stmt, p->new_block);
7105
7106 switch (gimple_code (stmt))
7107 {
7108 case GIMPLE_CALL:
7109 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
7110 {
7111 tree r, fndecl = gimple_call_fndecl (stmt);
7112 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
7113 switch (DECL_FUNCTION_CODE (fndecl))
7114 {
7115 case BUILT_IN_EH_COPY_VALUES:
7116 r = gimple_call_arg (stmt, 1);
7117 r = move_stmt_eh_region_tree_nr (r, p);
7118 gimple_call_set_arg (stmt, 1, r);
7119 /* FALLTHRU */
7120
7121 case BUILT_IN_EH_POINTER:
7122 case BUILT_IN_EH_FILTER:
7123 r = gimple_call_arg (stmt, 0);
7124 r = move_stmt_eh_region_tree_nr (r, p);
7125 gimple_call_set_arg (stmt, 0, r);
7126 break;
7127
7128 default:
7129 break;
7130 }
7131 }
7132 break;
7133
7134 case GIMPLE_RESX:
7135 {
7136 gresx *resx_stmt = as_a <gresx *> (stmt);
7137 int r = gimple_resx_region (resx_stmt);
7138 r = move_stmt_eh_region_nr (r, p);
7139 gimple_resx_set_region (resx_stmt, r);
7140 }
7141 break;
7142
7143 case GIMPLE_EH_DISPATCH:
7144 {
7145 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
7146 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
7147 r = move_stmt_eh_region_nr (r, p);
7148 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
7149 }
7150 break;
7151
7152 case GIMPLE_OMP_RETURN:
7153 case GIMPLE_OMP_CONTINUE:
7154 break;
7155
7156 case GIMPLE_LABEL:
7157 {
7158 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
7159 so that such labels can be referenced from other regions.
7160 Make sure to update it when seeing a GIMPLE_LABEL though,
7161 that is the owner of the label. */
7162 walk_gimple_op (stmt, move_stmt_op, wi);
7163 *handled_ops_p = true;
7164 tree label = gimple_label_label (as_a <glabel *> (stmt));
7165 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
7166 DECL_CONTEXT (label) = p->to_context;
7167 }
7168 break;
7169
7170 default:
7171 if (is_gimple_omp (stmt))
7172 {
7173 /* Do not remap variables inside OMP directives. Variables
7174 referenced in clauses and directive header belong to the
7175 parent function and should not be moved into the child
7176 function. */
7177 bool save_remap_decls_p = p->remap_decls_p;
7178 p->remap_decls_p = false;
7179 *handled_ops_p = true;
7180
7181 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
7182 move_stmt_op, wi);
7183
7184 p->remap_decls_p = save_remap_decls_p;
7185 }
7186 break;
7187 }
7188
7189 return NULL_TREE;
7190 }
7191
7192 /* Move basic block BB from function CFUN to function DEST_FN. The
7193 block is moved out of the original linked list and placed after
7194 block AFTER in the new list. Also, the block is removed from the
7195 original array of blocks and placed in DEST_FN's array of blocks.
7196 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7197 updated to reflect the moved edges.
7198
7199 The local variables are remapped to new instances, VARS_MAP is used
7200 to record the mapping. */
7201
7202 static void
move_block_to_fn(struct function * dest_cfun,basic_block bb,basic_block after,bool update_edge_count_p,struct move_stmt_d * d)7203 move_block_to_fn (struct function *dest_cfun, basic_block bb,
7204 basic_block after, bool update_edge_count_p,
7205 struct move_stmt_d *d)
7206 {
7207 struct control_flow_graph *cfg;
7208 edge_iterator ei;
7209 edge e;
7210 gimple_stmt_iterator si;
7211 unsigned old_len;
7212
7213 /* Remove BB from dominance structures. */
7214 delete_from_dominance_info (CDI_DOMINATORS, bb);
7215
7216 /* Move BB from its current loop to the copy in the new function. */
7217 if (current_loops)
7218 {
7219 class loop *new_loop = (class loop *)bb->loop_father->aux;
7220 if (new_loop)
7221 bb->loop_father = new_loop;
7222 }
7223
7224 /* Link BB to the new linked list. */
7225 move_block_after (bb, after);
7226
7227 /* Update the edge count in the corresponding flowgraphs. */
7228 if (update_edge_count_p)
7229 FOR_EACH_EDGE (e, ei, bb->succs)
7230 {
7231 cfun->cfg->x_n_edges--;
7232 dest_cfun->cfg->x_n_edges++;
7233 }
7234
7235 /* Remove BB from the original basic block array. */
7236 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7237 cfun->cfg->x_n_basic_blocks--;
7238
7239 /* Grow DEST_CFUN's basic block array if needed. */
7240 cfg = dest_cfun->cfg;
7241 cfg->x_n_basic_blocks++;
7242 if (bb->index >= cfg->x_last_basic_block)
7243 cfg->x_last_basic_block = bb->index + 1;
7244
7245 old_len = vec_safe_length (cfg->x_basic_block_info);
7246 if ((unsigned) cfg->x_last_basic_block >= old_len)
7247 vec_safe_grow_cleared (cfg->x_basic_block_info,
7248 cfg->x_last_basic_block + 1);
7249
7250 (*cfg->x_basic_block_info)[bb->index] = bb;
7251
7252 /* Remap the variables in phi nodes. */
7253 for (gphi_iterator psi = gsi_start_phis (bb);
7254 !gsi_end_p (psi); )
7255 {
7256 gphi *phi = psi.phi ();
7257 use_operand_p use;
7258 tree op = PHI_RESULT (phi);
7259 ssa_op_iter oi;
7260 unsigned i;
7261
7262 if (virtual_operand_p (op))
7263 {
7264 /* Remove the phi nodes for virtual operands (alias analysis will be
7265 run for the new function, anyway). But replace all uses that
7266 might be outside of the region we move. */
7267 use_operand_p use_p;
7268 imm_use_iterator iter;
7269 gimple *use_stmt;
7270 FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
7271 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7272 SET_USE (use_p, SSA_NAME_VAR (op));
7273 remove_phi_node (&psi, true);
7274 continue;
7275 }
7276
7277 SET_PHI_RESULT (phi,
7278 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7279 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7280 {
7281 op = USE_FROM_PTR (use);
7282 if (TREE_CODE (op) == SSA_NAME)
7283 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7284 }
7285
7286 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7287 {
7288 location_t locus = gimple_phi_arg_location (phi, i);
7289 tree block = LOCATION_BLOCK (locus);
7290
7291 if (locus == UNKNOWN_LOCATION)
7292 continue;
7293 if (d->orig_block == NULL_TREE || block == d->orig_block)
7294 {
7295 locus = set_block (locus, d->new_block);
7296 gimple_phi_arg_set_location (phi, i, locus);
7297 }
7298 }
7299
7300 gsi_next (&psi);
7301 }
7302
7303 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7304 {
7305 gimple *stmt = gsi_stmt (si);
7306 struct walk_stmt_info wi;
7307
7308 memset (&wi, 0, sizeof (wi));
7309 wi.info = d;
7310 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7311
7312 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7313 {
7314 tree label = gimple_label_label (label_stmt);
7315 int uid = LABEL_DECL_UID (label);
7316
7317 gcc_assert (uid > -1);
7318
7319 old_len = vec_safe_length (cfg->x_label_to_block_map);
7320 if (old_len <= (unsigned) uid)
7321 vec_safe_grow_cleared (cfg->x_label_to_block_map, uid + 1);
7322
7323 (*cfg->x_label_to_block_map)[uid] = bb;
7324 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7325
7326 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7327
7328 if (uid >= dest_cfun->cfg->last_label_uid)
7329 dest_cfun->cfg->last_label_uid = uid + 1;
7330 }
7331
7332 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7333 remove_stmt_from_eh_lp_fn (cfun, stmt);
7334
7335 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7336 gimple_remove_stmt_histograms (cfun, stmt);
7337
7338 /* We cannot leave any operands allocated from the operand caches of
7339 the current function. */
7340 free_stmt_operands (cfun, stmt);
7341 push_cfun (dest_cfun);
7342 update_stmt (stmt);
7343 if (is_gimple_call (stmt))
7344 notice_special_calls (as_a <gcall *> (stmt));
7345 pop_cfun ();
7346 }
7347
7348 FOR_EACH_EDGE (e, ei, bb->succs)
7349 if (e->goto_locus != UNKNOWN_LOCATION)
7350 {
7351 tree block = LOCATION_BLOCK (e->goto_locus);
7352 if (d->orig_block == NULL_TREE
7353 || block == d->orig_block)
7354 e->goto_locus = set_block (e->goto_locus, d->new_block);
7355 }
7356 }
7357
7358 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7359 the outermost EH region. Use REGION as the incoming base EH region.
7360 If there is no single outermost region, return NULL and set *ALL to
7361 true. */
7362
7363 static eh_region
find_outermost_region_in_block(struct function * src_cfun,basic_block bb,eh_region region,bool * all)7364 find_outermost_region_in_block (struct function *src_cfun,
7365 basic_block bb, eh_region region,
7366 bool *all)
7367 {
7368 gimple_stmt_iterator si;
7369
7370 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7371 {
7372 gimple *stmt = gsi_stmt (si);
7373 eh_region stmt_region;
7374 int lp_nr;
7375
7376 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7377 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7378 if (stmt_region)
7379 {
7380 if (region == NULL)
7381 region = stmt_region;
7382 else if (stmt_region != region)
7383 {
7384 region = eh_region_outermost (src_cfun, stmt_region, region);
7385 if (region == NULL)
7386 {
7387 *all = true;
7388 return NULL;
7389 }
7390 }
7391 }
7392 }
7393
7394 return region;
7395 }
7396
7397 static tree
new_label_mapper(tree decl,void * data)7398 new_label_mapper (tree decl, void *data)
7399 {
7400 htab_t hash = (htab_t) data;
7401 struct tree_map *m;
7402 void **slot;
7403
7404 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7405
7406 m = XNEW (struct tree_map);
7407 m->hash = DECL_UID (decl);
7408 m->base.from = decl;
7409 m->to = create_artificial_label (UNKNOWN_LOCATION);
7410 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7411 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7412 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7413
7414 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7415 gcc_assert (*slot == NULL);
7416
7417 *slot = m;
7418
7419 return m->to;
7420 }
7421
7422 /* Tree walker to replace the decls used inside value expressions by
7423 duplicates. */
7424
7425 static tree
replace_block_vars_by_duplicates_1(tree * tp,int * walk_subtrees,void * data)7426 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7427 {
7428 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7429
7430 switch (TREE_CODE (*tp))
7431 {
7432 case VAR_DECL:
7433 case PARM_DECL:
7434 case RESULT_DECL:
7435 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7436 break;
7437 default:
7438 break;
7439 }
7440
7441 if (IS_TYPE_OR_DECL_P (*tp))
7442 *walk_subtrees = false;
7443
7444 return NULL;
7445 }
7446
7447 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7448 subblocks. */
7449
7450 static void
replace_block_vars_by_duplicates(tree block,hash_map<tree,tree> * vars_map,tree to_context)7451 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7452 tree to_context)
7453 {
7454 tree *tp, t;
7455
7456 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7457 {
7458 t = *tp;
7459 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7460 continue;
7461 replace_by_duplicate_decl (&t, vars_map, to_context);
7462 if (t != *tp)
7463 {
7464 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7465 {
7466 tree x = DECL_VALUE_EXPR (*tp);
7467 struct replace_decls_d rd = { vars_map, to_context };
7468 unshare_expr (x);
7469 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7470 SET_DECL_VALUE_EXPR (t, x);
7471 DECL_HAS_VALUE_EXPR_P (t) = 1;
7472 }
7473 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7474 *tp = t;
7475 }
7476 }
7477
7478 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7479 replace_block_vars_by_duplicates (block, vars_map, to_context);
7480 }
7481
7482 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7483 from FN1 to FN2. */
7484
7485 static void
fixup_loop_arrays_after_move(struct function * fn1,struct function * fn2,class loop * loop)7486 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7487 class loop *loop)
7488 {
7489 /* Discard it from the old loop array. */
7490 (*get_loops (fn1))[loop->num] = NULL;
7491
7492 /* Place it in the new loop array, assigning it a new number. */
7493 loop->num = number_of_loops (fn2);
7494 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7495
7496 /* Recurse to children. */
7497 for (loop = loop->inner; loop; loop = loop->next)
7498 fixup_loop_arrays_after_move (fn1, fn2, loop);
7499 }
7500
7501 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7502 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7503
7504 DEBUG_FUNCTION void
verify_sese(basic_block entry,basic_block exit,vec<basic_block> * bbs_p)7505 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7506 {
7507 basic_block bb;
7508 edge_iterator ei;
7509 edge e;
7510 bitmap bbs = BITMAP_ALLOC (NULL);
7511 int i;
7512
7513 gcc_assert (entry != NULL);
7514 gcc_assert (entry != exit);
7515 gcc_assert (bbs_p != NULL);
7516
7517 gcc_assert (bbs_p->length () > 0);
7518
7519 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7520 bitmap_set_bit (bbs, bb->index);
7521
7522 gcc_assert (bitmap_bit_p (bbs, entry->index));
7523 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7524
7525 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7526 {
7527 if (bb == entry)
7528 {
7529 gcc_assert (single_pred_p (entry));
7530 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7531 }
7532 else
7533 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7534 {
7535 e = ei_edge (ei);
7536 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7537 }
7538
7539 if (bb == exit)
7540 {
7541 gcc_assert (single_succ_p (exit));
7542 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7543 }
7544 else
7545 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7546 {
7547 e = ei_edge (ei);
7548 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7549 }
7550 }
7551
7552 BITMAP_FREE (bbs);
7553 }
7554
7555 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7556
7557 bool
gather_ssa_name_hash_map_from(tree const & from,tree const &,void * data)7558 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7559 {
7560 bitmap release_names = (bitmap)data;
7561
7562 if (TREE_CODE (from) != SSA_NAME)
7563 return true;
7564
7565 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7566 return true;
7567 }
7568
7569 /* Return LOOP_DIST_ALIAS call if present in BB. */
7570
7571 static gimple *
find_loop_dist_alias(basic_block bb)7572 find_loop_dist_alias (basic_block bb)
7573 {
7574 gimple *g = last_stmt (bb);
7575 if (g == NULL || gimple_code (g) != GIMPLE_COND)
7576 return NULL;
7577
7578 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7579 gsi_prev (&gsi);
7580 if (gsi_end_p (gsi))
7581 return NULL;
7582
7583 g = gsi_stmt (gsi);
7584 if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7585 return g;
7586 return NULL;
7587 }
7588
7589 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7590 to VALUE and update any immediate uses of it's LHS. */
7591
7592 void
fold_loop_internal_call(gimple * g,tree value)7593 fold_loop_internal_call (gimple *g, tree value)
7594 {
7595 tree lhs = gimple_call_lhs (g);
7596 use_operand_p use_p;
7597 imm_use_iterator iter;
7598 gimple *use_stmt;
7599 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7600
7601 replace_call_with_value (&gsi, value);
7602 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7603 {
7604 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7605 SET_USE (use_p, value);
7606 update_stmt (use_stmt);
7607 }
7608 }
7609
7610 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7611 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7612 single basic block in the original CFG and the new basic block is
7613 returned. DEST_CFUN must not have a CFG yet.
7614
7615 Note that the region need not be a pure SESE region. Blocks inside
7616 the region may contain calls to abort/exit. The only restriction
7617 is that ENTRY_BB should be the only entry point and it must
7618 dominate EXIT_BB.
7619
7620 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7621 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7622 to the new function.
7623
7624 All local variables referenced in the region are assumed to be in
7625 the corresponding BLOCK_VARS and unexpanded variable lists
7626 associated with DEST_CFUN.
7627
7628 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7629 reimplement move_sese_region_to_fn by duplicating the region rather than
7630 moving it. */
7631
7632 basic_block
move_sese_region_to_fn(struct function * dest_cfun,basic_block entry_bb,basic_block exit_bb,tree orig_block)7633 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7634 basic_block exit_bb, tree orig_block)
7635 {
7636 vec<basic_block> bbs;
7637 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7638 basic_block after, bb, *entry_pred, *exit_succ, abb;
7639 struct function *saved_cfun = cfun;
7640 int *entry_flag, *exit_flag;
7641 profile_probability *entry_prob, *exit_prob;
7642 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7643 edge e;
7644 edge_iterator ei;
7645 htab_t new_label_map;
7646 hash_map<void *, void *> *eh_map;
7647 class loop *loop = entry_bb->loop_father;
7648 class loop *loop0 = get_loop (saved_cfun, 0);
7649 struct move_stmt_d d;
7650
7651 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7652 region. */
7653 gcc_assert (entry_bb != exit_bb
7654 && (!exit_bb
7655 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7656
7657 /* Collect all the blocks in the region. Manually add ENTRY_BB
7658 because it won't be added by dfs_enumerate_from. */
7659 bbs.create (0);
7660 bbs.safe_push (entry_bb);
7661 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7662
7663 if (flag_checking)
7664 verify_sese (entry_bb, exit_bb, &bbs);
7665
7666 /* The blocks that used to be dominated by something in BBS will now be
7667 dominated by the new block. */
7668 auto_vec<basic_block> dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7669 bbs.address (),
7670 bbs.length ());
7671
7672 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7673 the predecessor edges to ENTRY_BB and the successor edges to
7674 EXIT_BB so that we can re-attach them to the new basic block that
7675 will replace the region. */
7676 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7677 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7678 entry_flag = XNEWVEC (int, num_entry_edges);
7679 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7680 i = 0;
7681 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7682 {
7683 entry_prob[i] = e->probability;
7684 entry_flag[i] = e->flags;
7685 entry_pred[i++] = e->src;
7686 remove_edge (e);
7687 }
7688
7689 if (exit_bb)
7690 {
7691 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7692 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7693 exit_flag = XNEWVEC (int, num_exit_edges);
7694 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7695 i = 0;
7696 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7697 {
7698 exit_prob[i] = e->probability;
7699 exit_flag[i] = e->flags;
7700 exit_succ[i++] = e->dest;
7701 remove_edge (e);
7702 }
7703 }
7704 else
7705 {
7706 num_exit_edges = 0;
7707 exit_succ = NULL;
7708 exit_flag = NULL;
7709 exit_prob = NULL;
7710 }
7711
7712 /* Switch context to the child function to initialize DEST_FN's CFG. */
7713 gcc_assert (dest_cfun->cfg == NULL);
7714 push_cfun (dest_cfun);
7715
7716 init_empty_tree_cfg ();
7717
7718 /* Initialize EH information for the new function. */
7719 eh_map = NULL;
7720 new_label_map = NULL;
7721 if (saved_cfun->eh)
7722 {
7723 eh_region region = NULL;
7724 bool all = false;
7725
7726 FOR_EACH_VEC_ELT (bbs, i, bb)
7727 {
7728 region = find_outermost_region_in_block (saved_cfun, bb, region, &all);
7729 if (all)
7730 break;
7731 }
7732
7733 init_eh_for_function ();
7734 if (region != NULL || all)
7735 {
7736 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7737 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7738 new_label_mapper, new_label_map);
7739 }
7740 }
7741
7742 /* Initialize an empty loop tree. */
7743 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7744 init_loops_structure (dest_cfun, loops, 1);
7745 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7746 set_loops_for_fn (dest_cfun, loops);
7747
7748 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7749
7750 /* Move the outlined loop tree part. */
7751 num_nodes = bbs.length ();
7752 FOR_EACH_VEC_ELT (bbs, i, bb)
7753 {
7754 if (bb->loop_father->header == bb)
7755 {
7756 class loop *this_loop = bb->loop_father;
7757 class loop *outer = loop_outer (this_loop);
7758 if (outer == loop
7759 /* If the SESE region contains some bbs ending with
7760 a noreturn call, those are considered to belong
7761 to the outermost loop in saved_cfun, rather than
7762 the entry_bb's loop_father. */
7763 || outer == loop0)
7764 {
7765 if (outer != loop)
7766 num_nodes -= this_loop->num_nodes;
7767 flow_loop_tree_node_remove (bb->loop_father);
7768 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7769 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7770 }
7771 }
7772 else if (bb->loop_father == loop0 && loop0 != loop)
7773 num_nodes--;
7774
7775 /* Remove loop exits from the outlined region. */
7776 if (loops_for_fn (saved_cfun)->exits)
7777 FOR_EACH_EDGE (e, ei, bb->succs)
7778 {
7779 struct loops *l = loops_for_fn (saved_cfun);
7780 loop_exit **slot
7781 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7782 NO_INSERT);
7783 if (slot)
7784 l->exits->clear_slot (slot);
7785 }
7786 }
7787
7788 /* Adjust the number of blocks in the tree root of the outlined part. */
7789 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7790
7791 /* Setup a mapping to be used by move_block_to_fn. */
7792 loop->aux = current_loops->tree_root;
7793 loop0->aux = current_loops->tree_root;
7794
7795 /* Fix up orig_loop_num. If the block referenced in it has been moved
7796 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7797 signed char *moved_orig_loop_num = NULL;
7798 for (auto dloop : loops_list (dest_cfun, 0))
7799 if (dloop->orig_loop_num)
7800 {
7801 if (moved_orig_loop_num == NULL)
7802 moved_orig_loop_num
7803 = XCNEWVEC (signed char, vec_safe_length (larray));
7804 if ((*larray)[dloop->orig_loop_num] != NULL
7805 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7806 {
7807 if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
7808 && moved_orig_loop_num[dloop->orig_loop_num] < 2)
7809 moved_orig_loop_num[dloop->orig_loop_num]++;
7810 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7811 }
7812 else
7813 {
7814 moved_orig_loop_num[dloop->orig_loop_num] = -1;
7815 dloop->orig_loop_num = 0;
7816 }
7817 }
7818 pop_cfun ();
7819
7820 if (moved_orig_loop_num)
7821 {
7822 FOR_EACH_VEC_ELT (bbs, i, bb)
7823 {
7824 gimple *g = find_loop_dist_alias (bb);
7825 if (g == NULL)
7826 continue;
7827
7828 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7829 gcc_assert (orig_loop_num
7830 && (unsigned) orig_loop_num < vec_safe_length (larray));
7831 if (moved_orig_loop_num[orig_loop_num] == 2)
7832 {
7833 /* If we have moved both loops with this orig_loop_num into
7834 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7835 too, update the first argument. */
7836 gcc_assert ((*larray)[orig_loop_num] != NULL
7837 && (get_loop (saved_cfun, orig_loop_num) == NULL));
7838 tree t = build_int_cst (integer_type_node,
7839 (*larray)[orig_loop_num]->num);
7840 gimple_call_set_arg (g, 0, t);
7841 update_stmt (g);
7842 /* Make sure the following loop will not update it. */
7843 moved_orig_loop_num[orig_loop_num] = 0;
7844 }
7845 else
7846 /* Otherwise at least one of the loops stayed in saved_cfun.
7847 Remove the LOOP_DIST_ALIAS call. */
7848 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7849 }
7850 FOR_EACH_BB_FN (bb, saved_cfun)
7851 {
7852 gimple *g = find_loop_dist_alias (bb);
7853 if (g == NULL)
7854 continue;
7855 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7856 gcc_assert (orig_loop_num
7857 && (unsigned) orig_loop_num < vec_safe_length (larray));
7858 if (moved_orig_loop_num[orig_loop_num])
7859 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7860 of the corresponding loops was moved, remove it. */
7861 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7862 }
7863 XDELETEVEC (moved_orig_loop_num);
7864 }
7865 ggc_free (larray);
7866
7867 /* Move blocks from BBS into DEST_CFUN. */
7868 gcc_assert (bbs.length () >= 2);
7869 after = dest_cfun->cfg->x_entry_block_ptr;
7870 hash_map<tree, tree> vars_map;
7871
7872 memset (&d, 0, sizeof (d));
7873 d.orig_block = orig_block;
7874 d.new_block = DECL_INITIAL (dest_cfun->decl);
7875 d.from_context = cfun->decl;
7876 d.to_context = dest_cfun->decl;
7877 d.vars_map = &vars_map;
7878 d.new_label_map = new_label_map;
7879 d.eh_map = eh_map;
7880 d.remap_decls_p = true;
7881
7882 if (gimple_in_ssa_p (cfun))
7883 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7884 {
7885 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7886 set_ssa_default_def (dest_cfun, arg, narg);
7887 vars_map.put (arg, narg);
7888 }
7889
7890 FOR_EACH_VEC_ELT (bbs, i, bb)
7891 {
7892 /* No need to update edge counts on the last block. It has
7893 already been updated earlier when we detached the region from
7894 the original CFG. */
7895 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7896 after = bb;
7897 }
7898
7899 /* Adjust the maximum clique used. */
7900 dest_cfun->last_clique = saved_cfun->last_clique;
7901
7902 loop->aux = NULL;
7903 loop0->aux = NULL;
7904 /* Loop sizes are no longer correct, fix them up. */
7905 loop->num_nodes -= num_nodes;
7906 for (class loop *outer = loop_outer (loop);
7907 outer; outer = loop_outer (outer))
7908 outer->num_nodes -= num_nodes;
7909 loop0->num_nodes -= bbs.length () - num_nodes;
7910
7911 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7912 {
7913 class loop *aloop;
7914 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7915 if (aloop != NULL)
7916 {
7917 if (aloop->simduid)
7918 {
7919 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7920 d.to_context);
7921 dest_cfun->has_simduid_loops = true;
7922 }
7923 if (aloop->force_vectorize)
7924 dest_cfun->has_force_vectorize_loops = true;
7925 }
7926 }
7927
7928 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7929 if (orig_block)
7930 {
7931 tree block;
7932 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7933 == NULL_TREE);
7934 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7935 = BLOCK_SUBBLOCKS (orig_block);
7936 for (block = BLOCK_SUBBLOCKS (orig_block);
7937 block; block = BLOCK_CHAIN (block))
7938 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7939 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7940 }
7941
7942 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7943 &vars_map, dest_cfun->decl);
7944
7945 if (new_label_map)
7946 htab_delete (new_label_map);
7947 if (eh_map)
7948 delete eh_map;
7949
7950 if (gimple_in_ssa_p (cfun))
7951 {
7952 /* We need to release ssa-names in a defined order, so first find them,
7953 and then iterate in ascending version order. */
7954 bitmap release_names = BITMAP_ALLOC (NULL);
7955 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7956 bitmap_iterator bi;
7957 unsigned i;
7958 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7959 release_ssa_name (ssa_name (i));
7960 BITMAP_FREE (release_names);
7961 }
7962
7963 /* Rewire the entry and exit blocks. The successor to the entry
7964 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7965 the child function. Similarly, the predecessor of DEST_FN's
7966 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7967 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7968 various CFG manipulation function get to the right CFG.
7969
7970 FIXME, this is silly. The CFG ought to become a parameter to
7971 these helpers. */
7972 push_cfun (dest_cfun);
7973 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
7974 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7975 if (exit_bb)
7976 {
7977 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7978 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
7979 }
7980 else
7981 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
7982 pop_cfun ();
7983
7984 /* Back in the original function, the SESE region has disappeared,
7985 create a new basic block in its place. */
7986 bb = create_empty_bb (entry_pred[0]);
7987 if (current_loops)
7988 add_bb_to_loop (bb, loop);
7989 for (i = 0; i < num_entry_edges; i++)
7990 {
7991 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7992 e->probability = entry_prob[i];
7993 }
7994
7995 for (i = 0; i < num_exit_edges; i++)
7996 {
7997 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7998 e->probability = exit_prob[i];
7999 }
8000
8001 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
8002 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
8003 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
8004
8005 if (exit_bb)
8006 {
8007 free (exit_prob);
8008 free (exit_flag);
8009 free (exit_succ);
8010 }
8011 free (entry_prob);
8012 free (entry_flag);
8013 free (entry_pred);
8014 bbs.release ();
8015
8016 return bb;
8017 }
8018
8019 /* Dump default def DEF to file FILE using FLAGS and indentation
8020 SPC. */
8021
8022 static void
dump_default_def(FILE * file,tree def,int spc,dump_flags_t flags)8023 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
8024 {
8025 for (int i = 0; i < spc; ++i)
8026 fprintf (file, " ");
8027 dump_ssaname_info_to_file (file, def, spc);
8028
8029 print_generic_expr (file, TREE_TYPE (def), flags);
8030 fprintf (file, " ");
8031 print_generic_expr (file, def, flags);
8032 fprintf (file, " = ");
8033 print_generic_expr (file, SSA_NAME_VAR (def), flags);
8034 fprintf (file, ";\n");
8035 }
8036
8037 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
8038
8039 static void
print_no_sanitize_attr_value(FILE * file,tree value)8040 print_no_sanitize_attr_value (FILE *file, tree value)
8041 {
8042 unsigned int flags = tree_to_uhwi (value);
8043 bool first = true;
8044 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
8045 {
8046 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
8047 {
8048 if (!first)
8049 fprintf (file, " | ");
8050 fprintf (file, "%s", sanitizer_opts[i].name);
8051 first = false;
8052 }
8053 }
8054 }
8055
8056 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
8057 */
8058
8059 void
dump_function_to_file(tree fndecl,FILE * file,dump_flags_t flags)8060 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
8061 {
8062 tree arg, var, old_current_fndecl = current_function_decl;
8063 struct function *dsf;
8064 bool ignore_topmost_bind = false, any_var = false;
8065 basic_block bb;
8066 tree chain;
8067 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
8068 && decl_is_tm_clone (fndecl));
8069 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
8070
8071 tree fntype = TREE_TYPE (fndecl);
8072 tree attrs[] = { DECL_ATTRIBUTES (fndecl), TYPE_ATTRIBUTES (fntype) };
8073
8074 for (int i = 0; i != 2; ++i)
8075 {
8076 if (!attrs[i])
8077 continue;
8078
8079 fprintf (file, "__attribute__((");
8080
8081 bool first = true;
8082 tree chain;
8083 for (chain = attrs[i]; chain; first = false, chain = TREE_CHAIN (chain))
8084 {
8085 if (!first)
8086 fprintf (file, ", ");
8087
8088 tree name = get_attribute_name (chain);
8089 print_generic_expr (file, name, dump_flags);
8090 if (TREE_VALUE (chain) != NULL_TREE)
8091 {
8092 fprintf (file, " (");
8093
8094 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
8095 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
8096 else
8097 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
8098 fprintf (file, ")");
8099 }
8100 }
8101
8102 fprintf (file, "))\n");
8103 }
8104
8105 current_function_decl = fndecl;
8106 if (flags & TDF_GIMPLE)
8107 {
8108 static bool hotness_bb_param_printed = false;
8109 if (profile_info != NULL
8110 && !hotness_bb_param_printed)
8111 {
8112 hotness_bb_param_printed = true;
8113 fprintf (file,
8114 "/* --param=gimple-fe-computed-hot-bb-threshold=%" PRId64
8115 " */\n", get_hot_bb_threshold ());
8116 }
8117
8118 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
8119 dump_flags | TDF_SLIM);
8120 fprintf (file, " __GIMPLE (%s",
8121 (fun->curr_properties & PROP_ssa) ? "ssa"
8122 : (fun->curr_properties & PROP_cfg) ? "cfg"
8123 : "");
8124
8125 if (fun && fun->cfg)
8126 {
8127 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (fun);
8128 if (bb->count.initialized_p ())
8129 fprintf (file, ",%s(%" PRIu64 ")",
8130 profile_quality_as_string (bb->count.quality ()),
8131 bb->count.value ());
8132 if (dump_flags & TDF_UID)
8133 fprintf (file, ")\n%sD_%u (", function_name (fun),
8134 DECL_UID (fndecl));
8135 else
8136 fprintf (file, ")\n%s (", function_name (fun));
8137 }
8138 }
8139 else
8140 {
8141 print_generic_expr (file, TREE_TYPE (fntype), dump_flags);
8142 if (dump_flags & TDF_UID)
8143 fprintf (file, " %sD.%u %s(", function_name (fun), DECL_UID (fndecl),
8144 tmclone ? "[tm-clone] " : "");
8145 else
8146 fprintf (file, " %s %s(", function_name (fun),
8147 tmclone ? "[tm-clone] " : "");
8148 }
8149
8150 arg = DECL_ARGUMENTS (fndecl);
8151 while (arg)
8152 {
8153 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
8154 fprintf (file, " ");
8155 print_generic_expr (file, arg, dump_flags);
8156 if (DECL_CHAIN (arg))
8157 fprintf (file, ", ");
8158 arg = DECL_CHAIN (arg);
8159 }
8160 fprintf (file, ")\n");
8161
8162 dsf = DECL_STRUCT_FUNCTION (fndecl);
8163 if (dsf && (flags & TDF_EH))
8164 dump_eh_tree (file, dsf);
8165
8166 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
8167 {
8168 dump_node (fndecl, TDF_SLIM | flags, file);
8169 current_function_decl = old_current_fndecl;
8170 return;
8171 }
8172
8173 /* When GIMPLE is lowered, the variables are no longer available in
8174 BIND_EXPRs, so display them separately. */
8175 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
8176 {
8177 unsigned ix;
8178 ignore_topmost_bind = true;
8179
8180 fprintf (file, "{\n");
8181 if (gimple_in_ssa_p (fun)
8182 && (flags & TDF_ALIAS))
8183 {
8184 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
8185 arg = DECL_CHAIN (arg))
8186 {
8187 tree def = ssa_default_def (fun, arg);
8188 if (def)
8189 dump_default_def (file, def, 2, flags);
8190 }
8191
8192 tree res = DECL_RESULT (fun->decl);
8193 if (res != NULL_TREE
8194 && DECL_BY_REFERENCE (res))
8195 {
8196 tree def = ssa_default_def (fun, res);
8197 if (def)
8198 dump_default_def (file, def, 2, flags);
8199 }
8200
8201 tree static_chain = fun->static_chain_decl;
8202 if (static_chain != NULL_TREE)
8203 {
8204 tree def = ssa_default_def (fun, static_chain);
8205 if (def)
8206 dump_default_def (file, def, 2, flags);
8207 }
8208 }
8209
8210 if (!vec_safe_is_empty (fun->local_decls))
8211 FOR_EACH_LOCAL_DECL (fun, ix, var)
8212 {
8213 print_generic_decl (file, var, flags);
8214 fprintf (file, "\n");
8215
8216 any_var = true;
8217 }
8218
8219 tree name;
8220
8221 if (gimple_in_ssa_p (fun))
8222 FOR_EACH_SSA_NAME (ix, name, fun)
8223 {
8224 if (!SSA_NAME_VAR (name)
8225 /* SSA name with decls without a name still get
8226 dumped as _N, list those explicitely as well even
8227 though we've dumped the decl declaration as D.xxx
8228 above. */
8229 || !SSA_NAME_IDENTIFIER (name))
8230 {
8231 fprintf (file, " ");
8232 print_generic_expr (file, TREE_TYPE (name), flags);
8233 fprintf (file, " ");
8234 print_generic_expr (file, name, flags);
8235 fprintf (file, ";\n");
8236
8237 any_var = true;
8238 }
8239 }
8240 }
8241
8242 if (fun && fun->decl == fndecl
8243 && fun->cfg
8244 && basic_block_info_for_fn (fun))
8245 {
8246 /* If the CFG has been built, emit a CFG-based dump. */
8247 if (!ignore_topmost_bind)
8248 fprintf (file, "{\n");
8249
8250 if (any_var && n_basic_blocks_for_fn (fun))
8251 fprintf (file, "\n");
8252
8253 FOR_EACH_BB_FN (bb, fun)
8254 dump_bb (file, bb, 2, flags);
8255
8256 fprintf (file, "}\n");
8257 }
8258 else if (fun && (fun->curr_properties & PROP_gimple_any))
8259 {
8260 /* The function is now in GIMPLE form but the CFG has not been
8261 built yet. Emit the single sequence of GIMPLE statements
8262 that make up its body. */
8263 gimple_seq body = gimple_body (fndecl);
8264
8265 if (gimple_seq_first_stmt (body)
8266 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
8267 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
8268 print_gimple_seq (file, body, 0, flags);
8269 else
8270 {
8271 if (!ignore_topmost_bind)
8272 fprintf (file, "{\n");
8273
8274 if (any_var)
8275 fprintf (file, "\n");
8276
8277 print_gimple_seq (file, body, 2, flags);
8278 fprintf (file, "}\n");
8279 }
8280 }
8281 else
8282 {
8283 int indent;
8284
8285 /* Make a tree based dump. */
8286 chain = DECL_SAVED_TREE (fndecl);
8287 if (chain && TREE_CODE (chain) == BIND_EXPR)
8288 {
8289 if (ignore_topmost_bind)
8290 {
8291 chain = BIND_EXPR_BODY (chain);
8292 indent = 2;
8293 }
8294 else
8295 indent = 0;
8296 }
8297 else
8298 {
8299 if (!ignore_topmost_bind)
8300 {
8301 fprintf (file, "{\n");
8302 /* No topmost bind, pretend it's ignored for later. */
8303 ignore_topmost_bind = true;
8304 }
8305 indent = 2;
8306 }
8307
8308 if (any_var)
8309 fprintf (file, "\n");
8310
8311 print_generic_stmt_indented (file, chain, flags, indent);
8312 if (ignore_topmost_bind)
8313 fprintf (file, "}\n");
8314 }
8315
8316 if (flags & TDF_ENUMERATE_LOCALS)
8317 dump_enumerated_decls (file, flags);
8318 fprintf (file, "\n\n");
8319
8320 current_function_decl = old_current_fndecl;
8321 }
8322
8323 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8324
8325 DEBUG_FUNCTION void
debug_function(tree fn,dump_flags_t flags)8326 debug_function (tree fn, dump_flags_t flags)
8327 {
8328 dump_function_to_file (fn, stderr, flags);
8329 }
8330
8331
8332 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8333
8334 static void
print_pred_bbs(FILE * file,basic_block bb)8335 print_pred_bbs (FILE *file, basic_block bb)
8336 {
8337 edge e;
8338 edge_iterator ei;
8339
8340 FOR_EACH_EDGE (e, ei, bb->preds)
8341 fprintf (file, "bb_%d ", e->src->index);
8342 }
8343
8344
8345 /* Print on FILE the indexes for the successors of basic_block BB. */
8346
8347 static void
print_succ_bbs(FILE * file,basic_block bb)8348 print_succ_bbs (FILE *file, basic_block bb)
8349 {
8350 edge e;
8351 edge_iterator ei;
8352
8353 FOR_EACH_EDGE (e, ei, bb->succs)
8354 fprintf (file, "bb_%d ", e->dest->index);
8355 }
8356
8357 /* Print to FILE the basic block BB following the VERBOSITY level. */
8358
8359 void
print_loops_bb(FILE * file,basic_block bb,int indent,int verbosity)8360 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8361 {
8362 char *s_indent = (char *) alloca ((size_t) indent + 1);
8363 memset ((void *) s_indent, ' ', (size_t) indent);
8364 s_indent[indent] = '\0';
8365
8366 /* Print basic_block's header. */
8367 if (verbosity >= 2)
8368 {
8369 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
8370 print_pred_bbs (file, bb);
8371 fprintf (file, "}, succs = {");
8372 print_succ_bbs (file, bb);
8373 fprintf (file, "})\n");
8374 }
8375
8376 /* Print basic_block's body. */
8377 if (verbosity >= 3)
8378 {
8379 fprintf (file, "%s {\n", s_indent);
8380 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8381 fprintf (file, "%s }\n", s_indent);
8382 }
8383 }
8384
8385 static void print_loop_and_siblings (FILE *, class loop *, int, int);
8386
8387 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8388 VERBOSITY level this outputs the contents of the loop, or just its
8389 structure. */
8390
8391 static void
print_loop(FILE * file,class loop * loop,int indent,int verbosity)8392 print_loop (FILE *file, class loop *loop, int indent, int verbosity)
8393 {
8394 char *s_indent;
8395 basic_block bb;
8396
8397 if (loop == NULL)
8398 return;
8399
8400 s_indent = (char *) alloca ((size_t) indent + 1);
8401 memset ((void *) s_indent, ' ', (size_t) indent);
8402 s_indent[indent] = '\0';
8403
8404 /* Print loop's header. */
8405 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8406 if (loop->header)
8407 fprintf (file, "header = %d", loop->header->index);
8408 else
8409 {
8410 fprintf (file, "deleted)\n");
8411 return;
8412 }
8413 if (loop->latch)
8414 fprintf (file, ", latch = %d", loop->latch->index);
8415 else
8416 fprintf (file, ", multiple latches");
8417 fprintf (file, ", niter = ");
8418 print_generic_expr (file, loop->nb_iterations);
8419
8420 if (loop->any_upper_bound)
8421 {
8422 fprintf (file, ", upper_bound = ");
8423 print_decu (loop->nb_iterations_upper_bound, file);
8424 }
8425 if (loop->any_likely_upper_bound)
8426 {
8427 fprintf (file, ", likely_upper_bound = ");
8428 print_decu (loop->nb_iterations_likely_upper_bound, file);
8429 }
8430
8431 if (loop->any_estimate)
8432 {
8433 fprintf (file, ", estimate = ");
8434 print_decu (loop->nb_iterations_estimate, file);
8435 }
8436 if (loop->unroll)
8437 fprintf (file, ", unroll = %d", loop->unroll);
8438 fprintf (file, ")\n");
8439
8440 /* Print loop's body. */
8441 if (verbosity >= 1)
8442 {
8443 fprintf (file, "%s{\n", s_indent);
8444 FOR_EACH_BB_FN (bb, cfun)
8445 if (bb->loop_father == loop)
8446 print_loops_bb (file, bb, indent, verbosity);
8447
8448 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8449 fprintf (file, "%s}\n", s_indent);
8450 }
8451 }
8452
8453 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8454 spaces. Following VERBOSITY level this outputs the contents of the
8455 loop, or just its structure. */
8456
8457 static void
print_loop_and_siblings(FILE * file,class loop * loop,int indent,int verbosity)8458 print_loop_and_siblings (FILE *file, class loop *loop, int indent,
8459 int verbosity)
8460 {
8461 if (loop == NULL)
8462 return;
8463
8464 print_loop (file, loop, indent, verbosity);
8465 print_loop_and_siblings (file, loop->next, indent, verbosity);
8466 }
8467
8468 /* Follow a CFG edge from the entry point of the program, and on entry
8469 of a loop, pretty print the loop structure on FILE. */
8470
8471 void
print_loops(FILE * file,int verbosity)8472 print_loops (FILE *file, int verbosity)
8473 {
8474 basic_block bb;
8475
8476 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8477 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8478 if (bb && bb->loop_father)
8479 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8480 }
8481
8482 /* Dump a loop. */
8483
8484 DEBUG_FUNCTION void
debug(class loop & ref)8485 debug (class loop &ref)
8486 {
8487 print_loop (stderr, &ref, 0, /*verbosity*/0);
8488 }
8489
8490 DEBUG_FUNCTION void
debug(class loop * ptr)8491 debug (class loop *ptr)
8492 {
8493 if (ptr)
8494 debug (*ptr);
8495 else
8496 fprintf (stderr, "<nil>\n");
8497 }
8498
8499 /* Dump a loop verbosely. */
8500
8501 DEBUG_FUNCTION void
debug_verbose(class loop & ref)8502 debug_verbose (class loop &ref)
8503 {
8504 print_loop (stderr, &ref, 0, /*verbosity*/3);
8505 }
8506
8507 DEBUG_FUNCTION void
debug_verbose(class loop * ptr)8508 debug_verbose (class loop *ptr)
8509 {
8510 if (ptr)
8511 debug (*ptr);
8512 else
8513 fprintf (stderr, "<nil>\n");
8514 }
8515
8516
8517 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8518
8519 DEBUG_FUNCTION void
debug_loops(int verbosity)8520 debug_loops (int verbosity)
8521 {
8522 print_loops (stderr, verbosity);
8523 }
8524
8525 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8526
8527 DEBUG_FUNCTION void
debug_loop(class loop * loop,int verbosity)8528 debug_loop (class loop *loop, int verbosity)
8529 {
8530 print_loop (stderr, loop, 0, verbosity);
8531 }
8532
8533 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8534 level. */
8535
8536 DEBUG_FUNCTION void
debug_loop_num(unsigned num,int verbosity)8537 debug_loop_num (unsigned num, int verbosity)
8538 {
8539 debug_loop (get_loop (cfun, num), verbosity);
8540 }
8541
8542 /* Return true if BB ends with a call, possibly followed by some
8543 instructions that must stay with the call. Return false,
8544 otherwise. */
8545
8546 static bool
gimple_block_ends_with_call_p(basic_block bb)8547 gimple_block_ends_with_call_p (basic_block bb)
8548 {
8549 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8550 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8551 }
8552
8553
8554 /* Return true if BB ends with a conditional branch. Return false,
8555 otherwise. */
8556
8557 static bool
gimple_block_ends_with_condjump_p(const_basic_block bb)8558 gimple_block_ends_with_condjump_p (const_basic_block bb)
8559 {
8560 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8561 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8562 }
8563
8564
8565 /* Return true if statement T may terminate execution of BB in ways not
8566 explicitly represtented in the CFG. */
8567
8568 bool
stmt_can_terminate_bb_p(gimple * t)8569 stmt_can_terminate_bb_p (gimple *t)
8570 {
8571 tree fndecl = NULL_TREE;
8572 int call_flags = 0;
8573
8574 /* Eh exception not handled internally terminates execution of the whole
8575 function. */
8576 if (stmt_can_throw_external (cfun, t))
8577 return true;
8578
8579 /* NORETURN and LONGJMP calls already have an edge to exit.
8580 CONST and PURE calls do not need one.
8581 We don't currently check for CONST and PURE here, although
8582 it would be a good idea, because those attributes are
8583 figured out from the RTL in mark_constant_function, and
8584 the counter incrementation code from -fprofile-arcs
8585 leads to different results from -fbranch-probabilities. */
8586 if (is_gimple_call (t))
8587 {
8588 fndecl = gimple_call_fndecl (t);
8589 call_flags = gimple_call_flags (t);
8590 }
8591
8592 if (is_gimple_call (t)
8593 && fndecl
8594 && fndecl_built_in_p (fndecl)
8595 && (call_flags & ECF_NOTHROW)
8596 && !(call_flags & ECF_RETURNS_TWICE)
8597 /* fork() doesn't really return twice, but the effect of
8598 wrapping it in __gcov_fork() which calls __gcov_dump() and
8599 __gcov_reset() and clears the counters before forking has the same
8600 effect as returning twice. Force a fake edge. */
8601 && !fndecl_built_in_p (fndecl, BUILT_IN_FORK))
8602 return false;
8603
8604 if (is_gimple_call (t))
8605 {
8606 edge_iterator ei;
8607 edge e;
8608 basic_block bb;
8609
8610 if (call_flags & (ECF_PURE | ECF_CONST)
8611 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8612 return false;
8613
8614 /* Function call may do longjmp, terminate program or do other things.
8615 Special case noreturn that have non-abnormal edges out as in this case
8616 the fact is sufficiently represented by lack of edges out of T. */
8617 if (!(call_flags & ECF_NORETURN))
8618 return true;
8619
8620 bb = gimple_bb (t);
8621 FOR_EACH_EDGE (e, ei, bb->succs)
8622 if ((e->flags & EDGE_FAKE) == 0)
8623 return true;
8624 }
8625
8626 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8627 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8628 return true;
8629
8630 return false;
8631 }
8632
8633
8634 /* Add fake edges to the function exit for any non constant and non
8635 noreturn calls (or noreturn calls with EH/abnormal edges),
8636 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8637 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8638 that were split.
8639
8640 The goal is to expose cases in which entering a basic block does
8641 not imply that all subsequent instructions must be executed. */
8642
8643 static int
gimple_flow_call_edges_add(sbitmap blocks)8644 gimple_flow_call_edges_add (sbitmap blocks)
8645 {
8646 int i;
8647 int blocks_split = 0;
8648 int last_bb = last_basic_block_for_fn (cfun);
8649 bool check_last_block = false;
8650
8651 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8652 return 0;
8653
8654 if (! blocks)
8655 check_last_block = true;
8656 else
8657 check_last_block = bitmap_bit_p (blocks,
8658 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8659
8660 /* In the last basic block, before epilogue generation, there will be
8661 a fallthru edge to EXIT. Special care is required if the last insn
8662 of the last basic block is a call because make_edge folds duplicate
8663 edges, which would result in the fallthru edge also being marked
8664 fake, which would result in the fallthru edge being removed by
8665 remove_fake_edges, which would result in an invalid CFG.
8666
8667 Moreover, we can't elide the outgoing fake edge, since the block
8668 profiler needs to take this into account in order to solve the minimal
8669 spanning tree in the case that the call doesn't return.
8670
8671 Handle this by adding a dummy instruction in a new last basic block. */
8672 if (check_last_block)
8673 {
8674 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8675 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8676 gimple *t = NULL;
8677
8678 if (!gsi_end_p (gsi))
8679 t = gsi_stmt (gsi);
8680
8681 if (t && stmt_can_terminate_bb_p (t))
8682 {
8683 edge e;
8684
8685 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8686 if (e)
8687 {
8688 gsi_insert_on_edge (e, gimple_build_nop ());
8689 gsi_commit_edge_inserts ();
8690 }
8691 }
8692 }
8693
8694 /* Now add fake edges to the function exit for any non constant
8695 calls since there is no way that we can determine if they will
8696 return or not... */
8697 for (i = 0; i < last_bb; i++)
8698 {
8699 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8700 gimple_stmt_iterator gsi;
8701 gimple *stmt, *last_stmt;
8702
8703 if (!bb)
8704 continue;
8705
8706 if (blocks && !bitmap_bit_p (blocks, i))
8707 continue;
8708
8709 gsi = gsi_last_nondebug_bb (bb);
8710 if (!gsi_end_p (gsi))
8711 {
8712 last_stmt = gsi_stmt (gsi);
8713 do
8714 {
8715 stmt = gsi_stmt (gsi);
8716 if (stmt_can_terminate_bb_p (stmt))
8717 {
8718 edge e;
8719
8720 /* The handling above of the final block before the
8721 epilogue should be enough to verify that there is
8722 no edge to the exit block in CFG already.
8723 Calling make_edge in such case would cause us to
8724 mark that edge as fake and remove it later. */
8725 if (flag_checking && stmt == last_stmt)
8726 {
8727 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8728 gcc_assert (e == NULL);
8729 }
8730
8731 /* Note that the following may create a new basic block
8732 and renumber the existing basic blocks. */
8733 if (stmt != last_stmt)
8734 {
8735 e = split_block (bb, stmt);
8736 if (e)
8737 blocks_split++;
8738 }
8739 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8740 e->probability = profile_probability::guessed_never ();
8741 }
8742 gsi_prev (&gsi);
8743 }
8744 while (!gsi_end_p (gsi));
8745 }
8746 }
8747
8748 if (blocks_split)
8749 checking_verify_flow_info ();
8750
8751 return blocks_split;
8752 }
8753
8754 /* Removes edge E and all the blocks dominated by it, and updates dominance
8755 information. The IL in E->src needs to be updated separately.
8756 If dominance info is not available, only the edge E is removed.*/
8757
8758 void
remove_edge_and_dominated_blocks(edge e)8759 remove_edge_and_dominated_blocks (edge e)
8760 {
8761 vec<basic_block> bbs_to_fix_dom = vNULL;
8762 edge f;
8763 edge_iterator ei;
8764 bool none_removed = false;
8765 unsigned i;
8766 basic_block bb, dbb;
8767 bitmap_iterator bi;
8768
8769 /* If we are removing a path inside a non-root loop that may change
8770 loop ownership of blocks or remove loops. Mark loops for fixup. */
8771 if (current_loops
8772 && loop_outer (e->src->loop_father) != NULL
8773 && e->src->loop_father == e->dest->loop_father)
8774 loops_state_set (LOOPS_NEED_FIXUP);
8775
8776 if (!dom_info_available_p (CDI_DOMINATORS))
8777 {
8778 remove_edge (e);
8779 return;
8780 }
8781
8782 /* No updating is needed for edges to exit. */
8783 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8784 {
8785 if (cfgcleanup_altered_bbs)
8786 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8787 remove_edge (e);
8788 return;
8789 }
8790
8791 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8792 that is not dominated by E->dest, then this set is empty. Otherwise,
8793 all the basic blocks dominated by E->dest are removed.
8794
8795 Also, to DF_IDOM we store the immediate dominators of the blocks in
8796 the dominance frontier of E (i.e., of the successors of the
8797 removed blocks, if there are any, and of E->dest otherwise). */
8798 FOR_EACH_EDGE (f, ei, e->dest->preds)
8799 {
8800 if (f == e)
8801 continue;
8802
8803 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8804 {
8805 none_removed = true;
8806 break;
8807 }
8808 }
8809
8810 auto_bitmap df, df_idom;
8811 auto_vec<basic_block> bbs_to_remove;
8812 if (none_removed)
8813 bitmap_set_bit (df_idom,
8814 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8815 else
8816 {
8817 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8818 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8819 {
8820 FOR_EACH_EDGE (f, ei, bb->succs)
8821 {
8822 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8823 bitmap_set_bit (df, f->dest->index);
8824 }
8825 }
8826 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8827 bitmap_clear_bit (df, bb->index);
8828
8829 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8830 {
8831 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8832 bitmap_set_bit (df_idom,
8833 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8834 }
8835 }
8836
8837 if (cfgcleanup_altered_bbs)
8838 {
8839 /* Record the set of the altered basic blocks. */
8840 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8841 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8842 }
8843
8844 /* Remove E and the cancelled blocks. */
8845 if (none_removed)
8846 remove_edge (e);
8847 else
8848 {
8849 /* Walk backwards so as to get a chance to substitute all
8850 released DEFs into debug stmts. See
8851 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8852 details. */
8853 for (i = bbs_to_remove.length (); i-- > 0; )
8854 delete_basic_block (bbs_to_remove[i]);
8855 }
8856
8857 /* Update the dominance information. The immediate dominator may change only
8858 for blocks whose immediate dominator belongs to DF_IDOM:
8859
8860 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8861 removal. Let Z the arbitrary block such that idom(Z) = Y and
8862 Z dominates X after the removal. Before removal, there exists a path P
8863 from Y to X that avoids Z. Let F be the last edge on P that is
8864 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8865 dominates W, and because of P, Z does not dominate W), and W belongs to
8866 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8867 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8868 {
8869 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8870 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8871 dbb;
8872 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8873 bbs_to_fix_dom.safe_push (dbb);
8874 }
8875
8876 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8877
8878 bbs_to_fix_dom.release ();
8879 }
8880
8881 /* Purge dead EH edges from basic block BB. */
8882
8883 bool
gimple_purge_dead_eh_edges(basic_block bb)8884 gimple_purge_dead_eh_edges (basic_block bb)
8885 {
8886 bool changed = false;
8887 edge e;
8888 edge_iterator ei;
8889 gimple *stmt = last_stmt (bb);
8890
8891 if (stmt && stmt_can_throw_internal (cfun, stmt))
8892 return false;
8893
8894 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8895 {
8896 if (e->flags & EDGE_EH)
8897 {
8898 remove_edge_and_dominated_blocks (e);
8899 changed = true;
8900 }
8901 else
8902 ei_next (&ei);
8903 }
8904
8905 return changed;
8906 }
8907
8908 /* Purge dead EH edges from basic block listed in BLOCKS. */
8909
8910 bool
gimple_purge_all_dead_eh_edges(const_bitmap blocks)8911 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8912 {
8913 bool changed = false;
8914 unsigned i;
8915 bitmap_iterator bi;
8916
8917 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8918 {
8919 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8920
8921 /* Earlier gimple_purge_dead_eh_edges could have removed
8922 this basic block already. */
8923 gcc_assert (bb || changed);
8924 if (bb != NULL)
8925 changed |= gimple_purge_dead_eh_edges (bb);
8926 }
8927
8928 return changed;
8929 }
8930
8931 /* Purge dead abnormal call edges from basic block BB. */
8932
8933 bool
gimple_purge_dead_abnormal_call_edges(basic_block bb)8934 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8935 {
8936 bool changed = false;
8937 edge e;
8938 edge_iterator ei;
8939 gimple *stmt = last_stmt (bb);
8940
8941 if (!cfun->has_nonlocal_label
8942 && !cfun->calls_setjmp)
8943 return false;
8944
8945 if (stmt && stmt_can_make_abnormal_goto (stmt))
8946 return false;
8947
8948 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8949 {
8950 if (e->flags & EDGE_ABNORMAL)
8951 {
8952 if (e->flags & EDGE_FALLTHRU)
8953 e->flags &= ~EDGE_ABNORMAL;
8954 else
8955 remove_edge_and_dominated_blocks (e);
8956 changed = true;
8957 }
8958 else
8959 ei_next (&ei);
8960 }
8961
8962 return changed;
8963 }
8964
8965 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8966
8967 bool
gimple_purge_all_dead_abnormal_call_edges(const_bitmap blocks)8968 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8969 {
8970 bool changed = false;
8971 unsigned i;
8972 bitmap_iterator bi;
8973
8974 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8975 {
8976 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8977
8978 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8979 this basic block already. */
8980 gcc_assert (bb || changed);
8981 if (bb != NULL)
8982 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8983 }
8984
8985 return changed;
8986 }
8987
8988 /* This function is called whenever a new edge is created or
8989 redirected. */
8990
8991 static void
gimple_execute_on_growing_pred(edge e)8992 gimple_execute_on_growing_pred (edge e)
8993 {
8994 basic_block bb = e->dest;
8995
8996 if (!gimple_seq_empty_p (phi_nodes (bb)))
8997 reserve_phi_args_for_new_edge (bb);
8998 }
8999
9000 /* This function is called immediately before edge E is removed from
9001 the edge vector E->dest->preds. */
9002
9003 static void
gimple_execute_on_shrinking_pred(edge e)9004 gimple_execute_on_shrinking_pred (edge e)
9005 {
9006 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
9007 remove_phi_args (e);
9008 }
9009
9010 /*---------------------------------------------------------------------------
9011 Helper functions for Loop versioning
9012 ---------------------------------------------------------------------------*/
9013
9014 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
9015 of 'first'. Both of them are dominated by 'new_head' basic block. When
9016 'new_head' was created by 'second's incoming edge it received phi arguments
9017 on the edge by split_edge(). Later, additional edge 'e' was created to
9018 connect 'new_head' and 'first'. Now this routine adds phi args on this
9019 additional edge 'e' that new_head to second edge received as part of edge
9020 splitting. */
9021
9022 static void
gimple_lv_adjust_loop_header_phi(basic_block first,basic_block second,basic_block new_head,edge e)9023 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
9024 basic_block new_head, edge e)
9025 {
9026 gphi *phi1, *phi2;
9027 gphi_iterator psi1, psi2;
9028 tree def;
9029 edge e2 = find_edge (new_head, second);
9030
9031 /* Because NEW_HEAD has been created by splitting SECOND's incoming
9032 edge, we should always have an edge from NEW_HEAD to SECOND. */
9033 gcc_assert (e2 != NULL);
9034
9035 /* Browse all 'second' basic block phi nodes and add phi args to
9036 edge 'e' for 'first' head. PHI args are always in correct order. */
9037
9038 for (psi2 = gsi_start_phis (second),
9039 psi1 = gsi_start_phis (first);
9040 !gsi_end_p (psi2) && !gsi_end_p (psi1);
9041 gsi_next (&psi2), gsi_next (&psi1))
9042 {
9043 phi1 = psi1.phi ();
9044 phi2 = psi2.phi ();
9045 def = PHI_ARG_DEF (phi2, e2->dest_idx);
9046 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
9047 }
9048 }
9049
9050
9051 /* Adds a if else statement to COND_BB with condition COND_EXPR.
9052 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
9053 the destination of the ELSE part. */
9054
9055 static void
gimple_lv_add_condition_to_bb(basic_block first_head ATTRIBUTE_UNUSED,basic_block second_head ATTRIBUTE_UNUSED,basic_block cond_bb,void * cond_e)9056 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
9057 basic_block second_head ATTRIBUTE_UNUSED,
9058 basic_block cond_bb, void *cond_e)
9059 {
9060 gimple_stmt_iterator gsi;
9061 gimple *new_cond_expr;
9062 tree cond_expr = (tree) cond_e;
9063 edge e0;
9064
9065 /* Build new conditional expr */
9066 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
9067 NULL_TREE, NULL_TREE);
9068
9069 /* Add new cond in cond_bb. */
9070 gsi = gsi_last_bb (cond_bb);
9071 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
9072
9073 /* Adjust edges appropriately to connect new head with first head
9074 as well as second head. */
9075 e0 = single_succ_edge (cond_bb);
9076 e0->flags &= ~EDGE_FALLTHRU;
9077 e0->flags |= EDGE_FALSE_VALUE;
9078 }
9079
9080
9081 /* Do book-keeping of basic block BB for the profile consistency checker.
9082 Store the counting in RECORD. */
9083 static void
gimple_account_profile_record(basic_block bb,struct profile_record * record)9084 gimple_account_profile_record (basic_block bb,
9085 struct profile_record *record)
9086 {
9087 gimple_stmt_iterator i;
9088 for (i = gsi_start_nondebug_after_labels_bb (bb); !gsi_end_p (i);
9089 gsi_next_nondebug (&i))
9090 {
9091 record->size
9092 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
9093 if (profile_info)
9094 {
9095 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().initialized_p ()
9096 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().nonzero_p ()
9097 && bb->count.ipa ().initialized_p ())
9098 record->time
9099 += estimate_num_insns (gsi_stmt (i),
9100 &eni_time_weights)
9101 * bb->count.ipa ().to_gcov_type ();
9102 }
9103 else if (bb->count.initialized_p ()
9104 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.initialized_p ())
9105 record->time
9106 += estimate_num_insns
9107 (gsi_stmt (i),
9108 &eni_time_weights)
9109 * bb->count.to_sreal_scale
9110 (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count).to_double ();
9111 else
9112 record->time
9113 += estimate_num_insns (gsi_stmt (i), &eni_time_weights);
9114 }
9115 }
9116
9117 struct cfg_hooks gimple_cfg_hooks = {
9118 "gimple",
9119 gimple_verify_flow_info,
9120 gimple_dump_bb, /* dump_bb */
9121 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
9122 create_bb, /* create_basic_block */
9123 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
9124 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
9125 gimple_can_remove_branch_p, /* can_remove_branch_p */
9126 remove_bb, /* delete_basic_block */
9127 gimple_split_block, /* split_block */
9128 gimple_move_block_after, /* move_block_after */
9129 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
9130 gimple_merge_blocks, /* merge_blocks */
9131 gimple_predict_edge, /* predict_edge */
9132 gimple_predicted_by_p, /* predicted_by_p */
9133 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
9134 gimple_duplicate_bb, /* duplicate_block */
9135 gimple_split_edge, /* split_edge */
9136 gimple_make_forwarder_block, /* make_forward_block */
9137 NULL, /* tidy_fallthru_edge */
9138 NULL, /* force_nonfallthru */
9139 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
9140 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
9141 gimple_flow_call_edges_add, /* flow_call_edges_add */
9142 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
9143 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
9144 gimple_duplicate_loop_body_to_header_edge, /* duplicate loop for trees */
9145 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
9146 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
9147 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
9148 flush_pending_stmts, /* flush_pending_stmts */
9149 gimple_empty_block_p, /* block_empty_p */
9150 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
9151 gimple_account_profile_record,
9152 };
9153
9154
9155 /* Split all critical edges. Split some extra (not necessarily critical) edges
9156 if FOR_EDGE_INSERTION_P is true. */
9157
9158 unsigned int
split_critical_edges(bool for_edge_insertion_p)9159 split_critical_edges (bool for_edge_insertion_p /* = false */)
9160 {
9161 basic_block bb;
9162 edge e;
9163 edge_iterator ei;
9164
9165 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
9166 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
9167 mappings around the calls to split_edge. */
9168 start_recording_case_labels ();
9169 FOR_ALL_BB_FN (bb, cfun)
9170 {
9171 FOR_EACH_EDGE (e, ei, bb->succs)
9172 {
9173 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
9174 split_edge (e);
9175 /* PRE inserts statements to edges and expects that
9176 since split_critical_edges was done beforehand, committing edge
9177 insertions will not split more edges. In addition to critical
9178 edges we must split edges that have multiple successors and
9179 end by control flow statements, such as RESX.
9180 Go ahead and split them too. This matches the logic in
9181 gimple_find_edge_insert_loc. */
9182 else if (for_edge_insertion_p
9183 && (!single_pred_p (e->dest)
9184 || !gimple_seq_empty_p (phi_nodes (e->dest))
9185 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
9186 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
9187 && !(e->flags & EDGE_ABNORMAL))
9188 {
9189 gimple_stmt_iterator gsi;
9190
9191 gsi = gsi_last_bb (e->src);
9192 if (!gsi_end_p (gsi)
9193 && stmt_ends_bb_p (gsi_stmt (gsi))
9194 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
9195 && !gimple_call_builtin_p (gsi_stmt (gsi),
9196 BUILT_IN_RETURN)))
9197 split_edge (e);
9198 }
9199 }
9200 }
9201 end_recording_case_labels ();
9202 return 0;
9203 }
9204
9205 namespace {
9206
9207 const pass_data pass_data_split_crit_edges =
9208 {
9209 GIMPLE_PASS, /* type */
9210 "crited", /* name */
9211 OPTGROUP_NONE, /* optinfo_flags */
9212 TV_TREE_SPLIT_EDGES, /* tv_id */
9213 PROP_cfg, /* properties_required */
9214 PROP_no_crit_edges, /* properties_provided */
9215 0, /* properties_destroyed */
9216 0, /* todo_flags_start */
9217 0, /* todo_flags_finish */
9218 };
9219
9220 class pass_split_crit_edges : public gimple_opt_pass
9221 {
9222 public:
pass_split_crit_edges(gcc::context * ctxt)9223 pass_split_crit_edges (gcc::context *ctxt)
9224 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
9225 {}
9226
9227 /* opt_pass methods: */
execute(function *)9228 virtual unsigned int execute (function *) { return split_critical_edges (); }
9229
clone()9230 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
9231 }; // class pass_split_crit_edges
9232
9233 } // anon namespace
9234
9235 gimple_opt_pass *
make_pass_split_crit_edges(gcc::context * ctxt)9236 make_pass_split_crit_edges (gcc::context *ctxt)
9237 {
9238 return new pass_split_crit_edges (ctxt);
9239 }
9240
9241
9242 /* Insert COND expression which is GIMPLE_COND after STMT
9243 in basic block BB with appropriate basic block split
9244 and creation of a new conditionally executed basic block.
9245 Update profile so the new bb is visited with probability PROB.
9246 Return created basic block. */
9247 basic_block
insert_cond_bb(basic_block bb,gimple * stmt,gimple * cond,profile_probability prob)9248 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
9249 profile_probability prob)
9250 {
9251 edge fall = split_block (bb, stmt);
9252 gimple_stmt_iterator iter = gsi_last_bb (bb);
9253 basic_block new_bb;
9254
9255 /* Insert cond statement. */
9256 gcc_assert (gimple_code (cond) == GIMPLE_COND);
9257 if (gsi_end_p (iter))
9258 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
9259 else
9260 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
9261
9262 /* Create conditionally executed block. */
9263 new_bb = create_empty_bb (bb);
9264 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
9265 e->probability = prob;
9266 new_bb->count = e->count ();
9267 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
9268
9269 /* Fix edge for split bb. */
9270 fall->flags = EDGE_FALSE_VALUE;
9271 fall->probability -= e->probability;
9272
9273 /* Update dominance info. */
9274 if (dom_info_available_p (CDI_DOMINATORS))
9275 {
9276 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
9277 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
9278 }
9279
9280 /* Update loop info. */
9281 if (current_loops)
9282 add_bb_to_loop (new_bb, bb->loop_father);
9283
9284 return new_bb;
9285 }
9286
9287
9288
9289 /* Given a basic block B which ends with a conditional and has
9290 precisely two successors, determine which of the edges is taken if
9291 the conditional is true and which is taken if the conditional is
9292 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9293
9294 void
extract_true_false_edges_from_block(basic_block b,edge * true_edge,edge * false_edge)9295 extract_true_false_edges_from_block (basic_block b,
9296 edge *true_edge,
9297 edge *false_edge)
9298 {
9299 edge e = EDGE_SUCC (b, 0);
9300
9301 if (e->flags & EDGE_TRUE_VALUE)
9302 {
9303 *true_edge = e;
9304 *false_edge = EDGE_SUCC (b, 1);
9305 }
9306 else
9307 {
9308 *false_edge = e;
9309 *true_edge = EDGE_SUCC (b, 1);
9310 }
9311 }
9312
9313
9314 /* From a controlling predicate in the immediate dominator DOM of
9315 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9316 predicate evaluates to true and false and store them to
9317 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9318 they are non-NULL. Returns true if the edges can be determined,
9319 else return false. */
9320
9321 bool
extract_true_false_controlled_edges(basic_block dom,basic_block phiblock,edge * true_controlled_edge,edge * false_controlled_edge)9322 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9323 edge *true_controlled_edge,
9324 edge *false_controlled_edge)
9325 {
9326 basic_block bb = phiblock;
9327 edge true_edge, false_edge, tem;
9328 edge e0 = NULL, e1 = NULL;
9329
9330 /* We have to verify that one edge into the PHI node is dominated
9331 by the true edge of the predicate block and the other edge
9332 dominated by the false edge. This ensures that the PHI argument
9333 we are going to take is completely determined by the path we
9334 take from the predicate block.
9335 We can only use BB dominance checks below if the destination of
9336 the true/false edges are dominated by their edge, thus only
9337 have a single predecessor. */
9338 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9339 tem = EDGE_PRED (bb, 0);
9340 if (tem == true_edge
9341 || (single_pred_p (true_edge->dest)
9342 && (tem->src == true_edge->dest
9343 || dominated_by_p (CDI_DOMINATORS,
9344 tem->src, true_edge->dest))))
9345 e0 = tem;
9346 else if (tem == false_edge
9347 || (single_pred_p (false_edge->dest)
9348 && (tem->src == false_edge->dest
9349 || dominated_by_p (CDI_DOMINATORS,
9350 tem->src, false_edge->dest))))
9351 e1 = tem;
9352 else
9353 return false;
9354 tem = EDGE_PRED (bb, 1);
9355 if (tem == true_edge
9356 || (single_pred_p (true_edge->dest)
9357 && (tem->src == true_edge->dest
9358 || dominated_by_p (CDI_DOMINATORS,
9359 tem->src, true_edge->dest))))
9360 e0 = tem;
9361 else if (tem == false_edge
9362 || (single_pred_p (false_edge->dest)
9363 && (tem->src == false_edge->dest
9364 || dominated_by_p (CDI_DOMINATORS,
9365 tem->src, false_edge->dest))))
9366 e1 = tem;
9367 else
9368 return false;
9369 if (!e0 || !e1)
9370 return false;
9371
9372 if (true_controlled_edge)
9373 *true_controlled_edge = e0;
9374 if (false_controlled_edge)
9375 *false_controlled_edge = e1;
9376
9377 return true;
9378 }
9379
9380 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9381 range [low, high]. Place associated stmts before *GSI. */
9382
9383 void
generate_range_test(basic_block bb,tree index,tree low,tree high,tree * lhs,tree * rhs)9384 generate_range_test (basic_block bb, tree index, tree low, tree high,
9385 tree *lhs, tree *rhs)
9386 {
9387 tree type = TREE_TYPE (index);
9388 tree utype = range_check_type (type);
9389
9390 low = fold_convert (utype, low);
9391 high = fold_convert (utype, high);
9392
9393 gimple_seq seq = NULL;
9394 index = gimple_convert (&seq, utype, index);
9395 *lhs = gimple_build (&seq, MINUS_EXPR, utype, index, low);
9396 *rhs = const_binop (MINUS_EXPR, utype, high, low);
9397
9398 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9399 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
9400 }
9401
9402 /* Return the basic block that belongs to label numbered INDEX
9403 of a switch statement. */
9404
9405 basic_block
gimple_switch_label_bb(function * ifun,gswitch * gs,unsigned index)9406 gimple_switch_label_bb (function *ifun, gswitch *gs, unsigned index)
9407 {
9408 return label_to_block (ifun, CASE_LABEL (gimple_switch_label (gs, index)));
9409 }
9410
9411 /* Return the default basic block of a switch statement. */
9412
9413 basic_block
gimple_switch_default_bb(function * ifun,gswitch * gs)9414 gimple_switch_default_bb (function *ifun, gswitch *gs)
9415 {
9416 return gimple_switch_label_bb (ifun, gs, 0);
9417 }
9418
9419 /* Return the edge that belongs to label numbered INDEX
9420 of a switch statement. */
9421
9422 edge
gimple_switch_edge(function * ifun,gswitch * gs,unsigned index)9423 gimple_switch_edge (function *ifun, gswitch *gs, unsigned index)
9424 {
9425 return find_edge (gimple_bb (gs), gimple_switch_label_bb (ifun, gs, index));
9426 }
9427
9428 /* Return the default edge of a switch statement. */
9429
9430 edge
gimple_switch_default_edge(function * ifun,gswitch * gs)9431 gimple_switch_default_edge (function *ifun, gswitch *gs)
9432 {
9433 return gimple_switch_edge (ifun, gs, 0);
9434 }
9435
9436
9437 /* Emit return warnings. */
9438
9439 namespace {
9440
9441 const pass_data pass_data_warn_function_return =
9442 {
9443 GIMPLE_PASS, /* type */
9444 "*warn_function_return", /* name */
9445 OPTGROUP_NONE, /* optinfo_flags */
9446 TV_NONE, /* tv_id */
9447 PROP_cfg, /* properties_required */
9448 0, /* properties_provided */
9449 0, /* properties_destroyed */
9450 0, /* todo_flags_start */
9451 0, /* todo_flags_finish */
9452 };
9453
9454 class pass_warn_function_return : public gimple_opt_pass
9455 {
9456 public:
pass_warn_function_return(gcc::context * ctxt)9457 pass_warn_function_return (gcc::context *ctxt)
9458 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9459 {}
9460
9461 /* opt_pass methods: */
9462 virtual unsigned int execute (function *);
9463
9464 }; // class pass_warn_function_return
9465
9466 unsigned int
execute(function * fun)9467 pass_warn_function_return::execute (function *fun)
9468 {
9469 location_t location;
9470 gimple *last;
9471 edge e;
9472 edge_iterator ei;
9473
9474 if (!targetm.warn_func_return (fun->decl))
9475 return 0;
9476
9477 /* If we have a path to EXIT, then we do return. */
9478 if (TREE_THIS_VOLATILE (fun->decl)
9479 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9480 {
9481 location = UNKNOWN_LOCATION;
9482 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9483 (e = ei_safe_edge (ei)); )
9484 {
9485 last = last_stmt (e->src);
9486 if ((gimple_code (last) == GIMPLE_RETURN
9487 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9488 && location == UNKNOWN_LOCATION
9489 && ((location = LOCATION_LOCUS (gimple_location (last)))
9490 != UNKNOWN_LOCATION)
9491 && !optimize)
9492 break;
9493 /* When optimizing, replace return stmts in noreturn functions
9494 with __builtin_unreachable () call. */
9495 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9496 {
9497 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9498 gimple *new_stmt = gimple_build_call (fndecl, 0);
9499 gimple_set_location (new_stmt, gimple_location (last));
9500 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9501 gsi_replace (&gsi, new_stmt, true);
9502 remove_edge (e);
9503 }
9504 else
9505 ei_next (&ei);
9506 }
9507 if (location == UNKNOWN_LOCATION)
9508 location = cfun->function_end_locus;
9509 warning_at (location, 0, "%<noreturn%> function does return");
9510 }
9511
9512 /* If we see "return;" in some basic block, then we do reach the end
9513 without returning a value. */
9514 else if (warn_return_type > 0
9515 && !warning_suppressed_p (fun->decl, OPT_Wreturn_type)
9516 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9517 {
9518 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9519 {
9520 gimple *last = last_stmt (e->src);
9521 greturn *return_stmt = dyn_cast <greturn *> (last);
9522 if (return_stmt
9523 && gimple_return_retval (return_stmt) == NULL
9524 && !warning_suppressed_p (last, OPT_Wreturn_type))
9525 {
9526 location = gimple_location (last);
9527 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9528 location = fun->function_end_locus;
9529 if (warning_at (location, OPT_Wreturn_type,
9530 "control reaches end of non-void function"))
9531 suppress_warning (fun->decl, OPT_Wreturn_type);
9532 break;
9533 }
9534 }
9535 /* The C++ FE turns fallthrough from the end of non-void function
9536 into __builtin_unreachable () call with BUILTINS_LOCATION.
9537 Recognize those too. */
9538 basic_block bb;
9539 if (!warning_suppressed_p (fun->decl, OPT_Wreturn_type))
9540 FOR_EACH_BB_FN (bb, fun)
9541 if (EDGE_COUNT (bb->succs) == 0)
9542 {
9543 gimple *last = last_stmt (bb);
9544 const enum built_in_function ubsan_missing_ret
9545 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9546 if (last
9547 && ((LOCATION_LOCUS (gimple_location (last))
9548 == BUILTINS_LOCATION
9549 && gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE))
9550 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9551 {
9552 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9553 gsi_prev_nondebug (&gsi);
9554 gimple *prev = gsi_stmt (gsi);
9555 if (prev == NULL)
9556 location = UNKNOWN_LOCATION;
9557 else
9558 location = gimple_location (prev);
9559 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9560 location = fun->function_end_locus;
9561 if (warning_at (location, OPT_Wreturn_type,
9562 "control reaches end of non-void function"))
9563 suppress_warning (fun->decl, OPT_Wreturn_type);
9564 break;
9565 }
9566 }
9567 }
9568 return 0;
9569 }
9570
9571 } // anon namespace
9572
9573 gimple_opt_pass *
make_pass_warn_function_return(gcc::context * ctxt)9574 make_pass_warn_function_return (gcc::context *ctxt)
9575 {
9576 return new pass_warn_function_return (ctxt);
9577 }
9578
9579 /* Walk a gimplified function and warn for functions whose return value is
9580 ignored and attribute((warn_unused_result)) is set. This is done before
9581 inlining, so we don't have to worry about that. */
9582
9583 static void
do_warn_unused_result(gimple_seq seq)9584 do_warn_unused_result (gimple_seq seq)
9585 {
9586 tree fdecl, ftype;
9587 gimple_stmt_iterator i;
9588
9589 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9590 {
9591 gimple *g = gsi_stmt (i);
9592
9593 switch (gimple_code (g))
9594 {
9595 case GIMPLE_BIND:
9596 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9597 break;
9598 case GIMPLE_TRY:
9599 do_warn_unused_result (gimple_try_eval (g));
9600 do_warn_unused_result (gimple_try_cleanup (g));
9601 break;
9602 case GIMPLE_CATCH:
9603 do_warn_unused_result (gimple_catch_handler (
9604 as_a <gcatch *> (g)));
9605 break;
9606 case GIMPLE_EH_FILTER:
9607 do_warn_unused_result (gimple_eh_filter_failure (g));
9608 break;
9609
9610 case GIMPLE_CALL:
9611 if (gimple_call_lhs (g))
9612 break;
9613 if (gimple_call_internal_p (g))
9614 break;
9615
9616 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9617 LHS. All calls whose value is ignored should be
9618 represented like this. Look for the attribute. */
9619 fdecl = gimple_call_fndecl (g);
9620 ftype = gimple_call_fntype (g);
9621
9622 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9623 {
9624 location_t loc = gimple_location (g);
9625
9626 if (fdecl)
9627 warning_at (loc, OPT_Wunused_result,
9628 "ignoring return value of %qD "
9629 "declared with attribute %<warn_unused_result%>",
9630 fdecl);
9631 else
9632 warning_at (loc, OPT_Wunused_result,
9633 "ignoring return value of function "
9634 "declared with attribute %<warn_unused_result%>");
9635 }
9636 break;
9637
9638 default:
9639 /* Not a container, not a call, or a call whose value is used. */
9640 break;
9641 }
9642 }
9643 }
9644
9645 namespace {
9646
9647 const pass_data pass_data_warn_unused_result =
9648 {
9649 GIMPLE_PASS, /* type */
9650 "*warn_unused_result", /* name */
9651 OPTGROUP_NONE, /* optinfo_flags */
9652 TV_NONE, /* tv_id */
9653 PROP_gimple_any, /* properties_required */
9654 0, /* properties_provided */
9655 0, /* properties_destroyed */
9656 0, /* todo_flags_start */
9657 0, /* todo_flags_finish */
9658 };
9659
9660 class pass_warn_unused_result : public gimple_opt_pass
9661 {
9662 public:
pass_warn_unused_result(gcc::context * ctxt)9663 pass_warn_unused_result (gcc::context *ctxt)
9664 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9665 {}
9666
9667 /* opt_pass methods: */
gate(function *)9668 virtual bool gate (function *) { return flag_warn_unused_result; }
execute(function *)9669 virtual unsigned int execute (function *)
9670 {
9671 do_warn_unused_result (gimple_body (current_function_decl));
9672 return 0;
9673 }
9674
9675 }; // class pass_warn_unused_result
9676
9677 } // anon namespace
9678
9679 gimple_opt_pass *
make_pass_warn_unused_result(gcc::context * ctxt)9680 make_pass_warn_unused_result (gcc::context *ctxt)
9681 {
9682 return new pass_warn_unused_result (ctxt);
9683 }
9684
9685 /* Maybe Remove stores to variables we marked write-only.
9686 Return true if a store was removed. */
9687 static bool
maybe_remove_writeonly_store(gimple_stmt_iterator & gsi,gimple * stmt,bitmap dce_ssa_names)9688 maybe_remove_writeonly_store (gimple_stmt_iterator &gsi, gimple *stmt,
9689 bitmap dce_ssa_names)
9690 {
9691 /* Keep access when store has side effect, i.e. in case when source
9692 is volatile. */
9693 if (!gimple_store_p (stmt)
9694 || gimple_has_side_effects (stmt)
9695 || optimize_debug)
9696 return false;
9697
9698 tree lhs = get_base_address (gimple_get_lhs (stmt));
9699
9700 if (!VAR_P (lhs)
9701 || (!TREE_STATIC (lhs) && !DECL_EXTERNAL (lhs))
9702 || !varpool_node::get (lhs)->writeonly)
9703 return false;
9704
9705 if (dump_file && (dump_flags & TDF_DETAILS))
9706 {
9707 fprintf (dump_file, "Removing statement, writes"
9708 " to write only var:\n");
9709 print_gimple_stmt (dump_file, stmt, 0,
9710 TDF_VOPS|TDF_MEMSYMS);
9711 }
9712
9713 /* Mark ssa name defining to be checked for simple dce. */
9714 if (gimple_assign_single_p (stmt))
9715 {
9716 tree rhs = gimple_assign_rhs1 (stmt);
9717 if (TREE_CODE (rhs) == SSA_NAME
9718 && !SSA_NAME_IS_DEFAULT_DEF (rhs))
9719 bitmap_set_bit (dce_ssa_names, SSA_NAME_VERSION (rhs));
9720 }
9721 unlink_stmt_vdef (stmt);
9722 gsi_remove (&gsi, true);
9723 release_defs (stmt);
9724 return true;
9725 }
9726
9727 /* IPA passes, compilation of earlier functions or inlining
9728 might have changed some properties, such as marked functions nothrow,
9729 pure, const or noreturn.
9730 Remove redundant edges and basic blocks, and create new ones if necessary. */
9731
9732 unsigned int
execute_fixup_cfg(void)9733 execute_fixup_cfg (void)
9734 {
9735 basic_block bb;
9736 gimple_stmt_iterator gsi;
9737 int todo = 0;
9738 cgraph_node *node = cgraph_node::get (current_function_decl);
9739 /* Same scaling is also done by ipa_merge_profiles. */
9740 profile_count num = node->count;
9741 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9742 bool scale = num.initialized_p () && !(num == den);
9743 auto_bitmap dce_ssa_names;
9744
9745 if (scale)
9746 {
9747 profile_count::adjust_for_ipa_scaling (&num, &den);
9748 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9749 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9750 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9751 }
9752
9753 FOR_EACH_BB_FN (bb, cfun)
9754 {
9755 if (scale)
9756 bb->count = bb->count.apply_scale (num, den);
9757 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9758 {
9759 gimple *stmt = gsi_stmt (gsi);
9760 tree decl = is_gimple_call (stmt)
9761 ? gimple_call_fndecl (stmt)
9762 : NULL;
9763 if (decl)
9764 {
9765 int flags = gimple_call_flags (stmt);
9766 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9767 {
9768 if (gimple_purge_dead_abnormal_call_edges (bb))
9769 todo |= TODO_cleanup_cfg;
9770
9771 if (gimple_in_ssa_p (cfun))
9772 {
9773 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9774 update_stmt (stmt);
9775 }
9776 }
9777
9778 if (flags & ECF_NORETURN
9779 && fixup_noreturn_call (stmt))
9780 todo |= TODO_cleanup_cfg;
9781 }
9782
9783 /* Remove stores to variables we marked write-only. */
9784 if (maybe_remove_writeonly_store (gsi, stmt, dce_ssa_names))
9785 {
9786 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9787 continue;
9788 }
9789
9790 /* For calls we can simply remove LHS when it is known
9791 to be write-only. */
9792 if (is_gimple_call (stmt)
9793 && gimple_get_lhs (stmt))
9794 {
9795 tree lhs = get_base_address (gimple_get_lhs (stmt));
9796
9797 if (VAR_P (lhs)
9798 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9799 && varpool_node::get (lhs)->writeonly)
9800 {
9801 gimple_call_set_lhs (stmt, NULL);
9802 update_stmt (stmt);
9803 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9804 }
9805 }
9806
9807 if (maybe_clean_eh_stmt (stmt)
9808 && gimple_purge_dead_eh_edges (bb))
9809 todo |= TODO_cleanup_cfg;
9810 gsi_next (&gsi);
9811 }
9812
9813 /* If we have a basic block with no successors that does not
9814 end with a control statement or a noreturn call end it with
9815 a call to __builtin_unreachable. This situation can occur
9816 when inlining a noreturn call that does in fact return. */
9817 if (EDGE_COUNT (bb->succs) == 0)
9818 {
9819 gimple *stmt = last_stmt (bb);
9820 if (!stmt
9821 || (!is_ctrl_stmt (stmt)
9822 && (!is_gimple_call (stmt)
9823 || !gimple_call_noreturn_p (stmt))))
9824 {
9825 if (stmt && is_gimple_call (stmt))
9826 gimple_call_set_ctrl_altering (stmt, false);
9827 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9828 stmt = gimple_build_call (fndecl, 0);
9829 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9830 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9831 if (!cfun->after_inlining)
9832 {
9833 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9834 node->create_edge (cgraph_node::get_create (fndecl),
9835 call_stmt, bb->count);
9836 }
9837 }
9838 }
9839 }
9840 if (scale)
9841 {
9842 update_max_bb_count ();
9843 compute_function_frequency ();
9844 }
9845
9846 if (current_loops
9847 && (todo & TODO_cleanup_cfg))
9848 loops_state_set (LOOPS_NEED_FIXUP);
9849
9850 simple_dce_from_worklist (dce_ssa_names);
9851
9852 return todo;
9853 }
9854
9855 namespace {
9856
9857 const pass_data pass_data_fixup_cfg =
9858 {
9859 GIMPLE_PASS, /* type */
9860 "fixup_cfg", /* name */
9861 OPTGROUP_NONE, /* optinfo_flags */
9862 TV_NONE, /* tv_id */
9863 PROP_cfg, /* properties_required */
9864 0, /* properties_provided */
9865 0, /* properties_destroyed */
9866 0, /* todo_flags_start */
9867 0, /* todo_flags_finish */
9868 };
9869
9870 class pass_fixup_cfg : public gimple_opt_pass
9871 {
9872 public:
pass_fixup_cfg(gcc::context * ctxt)9873 pass_fixup_cfg (gcc::context *ctxt)
9874 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9875 {}
9876
9877 /* opt_pass methods: */
clone()9878 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
execute(function *)9879 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9880
9881 }; // class pass_fixup_cfg
9882
9883 } // anon namespace
9884
9885 gimple_opt_pass *
make_pass_fixup_cfg(gcc::context * ctxt)9886 make_pass_fixup_cfg (gcc::context *ctxt)
9887 {
9888 return new pass_fixup_cfg (ctxt);
9889 }
9890
9891 /* Garbage collection support for edge_def. */
9892
9893 extern void gt_ggc_mx (tree&);
9894 extern void gt_ggc_mx (gimple *&);
9895 extern void gt_ggc_mx (rtx&);
9896 extern void gt_ggc_mx (basic_block&);
9897
9898 static void
gt_ggc_mx(rtx_insn * & x)9899 gt_ggc_mx (rtx_insn *& x)
9900 {
9901 if (x)
9902 gt_ggc_mx_rtx_def ((void *) x);
9903 }
9904
9905 void
gt_ggc_mx(edge_def * e)9906 gt_ggc_mx (edge_def *e)
9907 {
9908 tree block = LOCATION_BLOCK (e->goto_locus);
9909 gt_ggc_mx (e->src);
9910 gt_ggc_mx (e->dest);
9911 if (current_ir_type () == IR_GIMPLE)
9912 gt_ggc_mx (e->insns.g);
9913 else
9914 gt_ggc_mx (e->insns.r);
9915 gt_ggc_mx (block);
9916 }
9917
9918 /* PCH support for edge_def. */
9919
9920 extern void gt_pch_nx (tree&);
9921 extern void gt_pch_nx (gimple *&);
9922 extern void gt_pch_nx (rtx&);
9923 extern void gt_pch_nx (basic_block&);
9924
9925 static void
gt_pch_nx(rtx_insn * & x)9926 gt_pch_nx (rtx_insn *& x)
9927 {
9928 if (x)
9929 gt_pch_nx_rtx_def ((void *) x);
9930 }
9931
9932 void
gt_pch_nx(edge_def * e)9933 gt_pch_nx (edge_def *e)
9934 {
9935 tree block = LOCATION_BLOCK (e->goto_locus);
9936 gt_pch_nx (e->src);
9937 gt_pch_nx (e->dest);
9938 if (current_ir_type () == IR_GIMPLE)
9939 gt_pch_nx (e->insns.g);
9940 else
9941 gt_pch_nx (e->insns.r);
9942 gt_pch_nx (block);
9943 }
9944
9945 void
gt_pch_nx(edge_def * e,gt_pointer_operator op,void * cookie)9946 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9947 {
9948 tree block = LOCATION_BLOCK (e->goto_locus);
9949 op (&(e->src), cookie);
9950 op (&(e->dest), cookie);
9951 if (current_ir_type () == IR_GIMPLE)
9952 op (&(e->insns.g), cookie);
9953 else
9954 op (&(e->insns.r), cookie);
9955 op (&(block), cookie);
9956 }
9957
9958 #if CHECKING_P
9959
9960 namespace selftest {
9961
9962 /* Helper function for CFG selftests: create a dummy function decl
9963 and push it as cfun. */
9964
9965 static tree
push_fndecl(const char * name)9966 push_fndecl (const char *name)
9967 {
9968 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9969 /* FIXME: this uses input_location: */
9970 tree fndecl = build_fn_decl (name, fn_type);
9971 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9972 NULL_TREE, integer_type_node);
9973 DECL_RESULT (fndecl) = retval;
9974 push_struct_function (fndecl);
9975 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9976 ASSERT_TRUE (fun != NULL);
9977 init_empty_tree_cfg_for_function (fun);
9978 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9979 ASSERT_EQ (0, n_edges_for_fn (fun));
9980 return fndecl;
9981 }
9982
9983 /* These tests directly create CFGs.
9984 Compare with the static fns within tree-cfg.c:
9985 - build_gimple_cfg
9986 - make_blocks: calls create_basic_block (seq, bb);
9987 - make_edges. */
9988
9989 /* Verify a simple cfg of the form:
9990 ENTRY -> A -> B -> C -> EXIT. */
9991
9992 static void
test_linear_chain()9993 test_linear_chain ()
9994 {
9995 gimple_register_cfg_hooks ();
9996
9997 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9998 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9999
10000 /* Create some empty blocks. */
10001 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
10002 basic_block bb_b = create_empty_bb (bb_a);
10003 basic_block bb_c = create_empty_bb (bb_b);
10004
10005 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
10006 ASSERT_EQ (0, n_edges_for_fn (fun));
10007
10008 /* Create some edges: a simple linear chain of BBs. */
10009 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
10010 make_edge (bb_a, bb_b, 0);
10011 make_edge (bb_b, bb_c, 0);
10012 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10013
10014 /* Verify the edges. */
10015 ASSERT_EQ (4, n_edges_for_fn (fun));
10016 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
10017 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
10018 ASSERT_EQ (1, bb_a->preds->length ());
10019 ASSERT_EQ (1, bb_a->succs->length ());
10020 ASSERT_EQ (1, bb_b->preds->length ());
10021 ASSERT_EQ (1, bb_b->succs->length ());
10022 ASSERT_EQ (1, bb_c->preds->length ());
10023 ASSERT_EQ (1, bb_c->succs->length ());
10024 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
10025 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
10026
10027 /* Verify the dominance information
10028 Each BB in our simple chain should be dominated by the one before
10029 it. */
10030 calculate_dominance_info (CDI_DOMINATORS);
10031 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
10032 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
10033 auto_vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
10034 ASSERT_EQ (1, dom_by_b.length ());
10035 ASSERT_EQ (bb_c, dom_by_b[0]);
10036 free_dominance_info (CDI_DOMINATORS);
10037
10038 /* Similarly for post-dominance: each BB in our chain is post-dominated
10039 by the one after it. */
10040 calculate_dominance_info (CDI_POST_DOMINATORS);
10041 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
10042 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
10043 auto_vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
10044 ASSERT_EQ (1, postdom_by_b.length ());
10045 ASSERT_EQ (bb_a, postdom_by_b[0]);
10046 free_dominance_info (CDI_POST_DOMINATORS);
10047
10048 pop_cfun ();
10049 }
10050
10051 /* Verify a simple CFG of the form:
10052 ENTRY
10053 |
10054 A
10055 / \
10056 /t \f
10057 B C
10058 \ /
10059 \ /
10060 D
10061 |
10062 EXIT. */
10063
10064 static void
test_diamond()10065 test_diamond ()
10066 {
10067 gimple_register_cfg_hooks ();
10068
10069 tree fndecl = push_fndecl ("cfg_test_diamond");
10070 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10071
10072 /* Create some empty blocks. */
10073 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
10074 basic_block bb_b = create_empty_bb (bb_a);
10075 basic_block bb_c = create_empty_bb (bb_a);
10076 basic_block bb_d = create_empty_bb (bb_b);
10077
10078 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
10079 ASSERT_EQ (0, n_edges_for_fn (fun));
10080
10081 /* Create the edges. */
10082 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
10083 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
10084 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
10085 make_edge (bb_b, bb_d, 0);
10086 make_edge (bb_c, bb_d, 0);
10087 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10088
10089 /* Verify the edges. */
10090 ASSERT_EQ (6, n_edges_for_fn (fun));
10091 ASSERT_EQ (1, bb_a->preds->length ());
10092 ASSERT_EQ (2, bb_a->succs->length ());
10093 ASSERT_EQ (1, bb_b->preds->length ());
10094 ASSERT_EQ (1, bb_b->succs->length ());
10095 ASSERT_EQ (1, bb_c->preds->length ());
10096 ASSERT_EQ (1, bb_c->succs->length ());
10097 ASSERT_EQ (2, bb_d->preds->length ());
10098 ASSERT_EQ (1, bb_d->succs->length ());
10099
10100 /* Verify the dominance information. */
10101 calculate_dominance_info (CDI_DOMINATORS);
10102 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
10103 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
10104 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
10105 auto_vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
10106 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
10107 dom_by_a.release ();
10108 auto_vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
10109 ASSERT_EQ (0, dom_by_b.length ());
10110 dom_by_b.release ();
10111 free_dominance_info (CDI_DOMINATORS);
10112
10113 /* Similarly for post-dominance. */
10114 calculate_dominance_info (CDI_POST_DOMINATORS);
10115 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
10116 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
10117 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
10118 auto_vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
10119 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
10120 postdom_by_d.release ();
10121 auto_vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
10122 ASSERT_EQ (0, postdom_by_b.length ());
10123 postdom_by_b.release ();
10124 free_dominance_info (CDI_POST_DOMINATORS);
10125
10126 pop_cfun ();
10127 }
10128
10129 /* Verify that we can handle a CFG containing a "complete" aka
10130 fully-connected subgraph (where A B C D below all have edges
10131 pointing to each other node, also to themselves).
10132 e.g.:
10133 ENTRY EXIT
10134 | ^
10135 | /
10136 | /
10137 | /
10138 V/
10139 A<--->B
10140 ^^ ^^
10141 | \ / |
10142 | X |
10143 | / \ |
10144 VV VV
10145 C<--->D
10146 */
10147
10148 static void
test_fully_connected()10149 test_fully_connected ()
10150 {
10151 gimple_register_cfg_hooks ();
10152
10153 tree fndecl = push_fndecl ("cfg_fully_connected");
10154 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10155
10156 const int n = 4;
10157
10158 /* Create some empty blocks. */
10159 auto_vec <basic_block> subgraph_nodes;
10160 for (int i = 0; i < n; i++)
10161 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
10162
10163 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
10164 ASSERT_EQ (0, n_edges_for_fn (fun));
10165
10166 /* Create the edges. */
10167 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
10168 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10169 for (int i = 0; i < n; i++)
10170 for (int j = 0; j < n; j++)
10171 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
10172
10173 /* Verify the edges. */
10174 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
10175 /* The first one is linked to ENTRY/EXIT as well as itself and
10176 everything else. */
10177 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
10178 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
10179 /* The other ones in the subgraph are linked to everything in
10180 the subgraph (including themselves). */
10181 for (int i = 1; i < n; i++)
10182 {
10183 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
10184 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
10185 }
10186
10187 /* Verify the dominance information. */
10188 calculate_dominance_info (CDI_DOMINATORS);
10189 /* The initial block in the subgraph should be dominated by ENTRY. */
10190 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
10191 get_immediate_dominator (CDI_DOMINATORS,
10192 subgraph_nodes[0]));
10193 /* Every other block in the subgraph should be dominated by the
10194 initial block. */
10195 for (int i = 1; i < n; i++)
10196 ASSERT_EQ (subgraph_nodes[0],
10197 get_immediate_dominator (CDI_DOMINATORS,
10198 subgraph_nodes[i]));
10199 free_dominance_info (CDI_DOMINATORS);
10200
10201 /* Similarly for post-dominance. */
10202 calculate_dominance_info (CDI_POST_DOMINATORS);
10203 /* The initial block in the subgraph should be postdominated by EXIT. */
10204 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
10205 get_immediate_dominator (CDI_POST_DOMINATORS,
10206 subgraph_nodes[0]));
10207 /* Every other block in the subgraph should be postdominated by the
10208 initial block, since that leads to EXIT. */
10209 for (int i = 1; i < n; i++)
10210 ASSERT_EQ (subgraph_nodes[0],
10211 get_immediate_dominator (CDI_POST_DOMINATORS,
10212 subgraph_nodes[i]));
10213 free_dominance_info (CDI_POST_DOMINATORS);
10214
10215 pop_cfun ();
10216 }
10217
10218 /* Run all of the selftests within this file. */
10219
10220 void
tree_cfg_c_tests()10221 tree_cfg_c_tests ()
10222 {
10223 test_linear_chain ();
10224 test_diamond ();
10225 test_fully_connected ();
10226 }
10227
10228 } // namespace selftest
10229
10230 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10231 - loop
10232 - nested loops
10233 - switch statement (a block with many out-edges)
10234 - something that jumps to itself
10235 - etc */
10236
10237 #endif /* CHECKING_P */
10238