1 /* Control flow functions for trees.
2 Copyright (C) 2001-2020 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "tree-pass.h"
31 #include "ssa.h"
32 #include "cgraph.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "cfganal.h"
40 #include "gimple-fold.h"
41 #include "tree-eh.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-cfg.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
49 #include "tree-dfa.h"
50 #include "tree-ssa.h"
51 #include "except.h"
52 #include "cfgloop.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "omp-general.h"
58 #include "omp-expand.h"
59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h"
61 #include "attribs.h"
62 #include "selftest.h"
63 #include "opts.h"
64 #include "asan.h"
65 #include "profile.h"
66
67 /* This file contains functions for building the Control Flow Graph (CFG)
68 for a function tree. */
69
70 /* Local declarations. */
71
72 /* Initial capacity for the basic block array. */
73 static const int initial_cfg_capacity = 20;
74
75 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
76 which use a particular edge. The CASE_LABEL_EXPRs are chained together
77 via their CASE_CHAIN field, which we clear after we're done with the
78 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
79
80 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
81 update the case vector in response to edge redirections.
82
83 Right now this table is set up and torn down at key points in the
84 compilation process. It would be nice if we could make the table
85 more persistent. The key is getting notification of changes to
86 the CFG (particularly edge removal, creation and redirection). */
87
88 static hash_map<edge, tree> *edge_to_cases;
89
90 /* If we record edge_to_cases, this bitmap will hold indexes
91 of basic blocks that end in a GIMPLE_SWITCH which we touched
92 due to edge manipulations. */
93
94 static bitmap touched_switch_bbs;
95
96 /* CFG statistics. */
97 struct cfg_stats_d
98 {
99 long num_merged_labels;
100 };
101
102 static struct cfg_stats_d cfg_stats;
103
104 /* Data to pass to replace_block_vars_by_duplicates_1. */
105 struct replace_decls_d
106 {
107 hash_map<tree, tree> *vars_map;
108 tree to_context;
109 };
110
111 /* Hash table to store last discriminator assigned for each locus. */
112 struct locus_discrim_map
113 {
114 int location_line;
115 int discriminator;
116 };
117
118 /* Hashtable helpers. */
119
120 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
121 {
122 static inline hashval_t hash (const locus_discrim_map *);
123 static inline bool equal (const locus_discrim_map *,
124 const locus_discrim_map *);
125 };
126
127 /* Trivial hash function for a location_t. ITEM is a pointer to
128 a hash table entry that maps a location_t to a discriminator. */
129
130 inline hashval_t
hash(const locus_discrim_map * item)131 locus_discrim_hasher::hash (const locus_discrim_map *item)
132 {
133 return item->location_line;
134 }
135
136 /* Equality function for the locus-to-discriminator map. A and B
137 point to the two hash table entries to compare. */
138
139 inline bool
equal(const locus_discrim_map * a,const locus_discrim_map * b)140 locus_discrim_hasher::equal (const locus_discrim_map *a,
141 const locus_discrim_map *b)
142 {
143 return a->location_line == b->location_line;
144 }
145
146 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
147
148 /* Basic blocks and flowgraphs. */
149 static void make_blocks (gimple_seq);
150
151 /* Edges. */
152 static void make_edges (void);
153 static void assign_discriminators (void);
154 static void make_cond_expr_edges (basic_block);
155 static void make_gimple_switch_edges (gswitch *, basic_block);
156 static bool make_goto_expr_edges (basic_block);
157 static void make_gimple_asm_edges (basic_block);
158 static edge gimple_redirect_edge_and_branch (edge, basic_block);
159 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
160
161 /* Various helpers. */
162 static inline bool stmt_starts_bb_p (gimple *, gimple *);
163 static int gimple_verify_flow_info (void);
164 static void gimple_make_forwarder_block (edge);
165 static gimple *first_non_label_stmt (basic_block);
166 static bool verify_gimple_transaction (gtransaction *);
167 static bool call_can_make_abnormal_goto (gimple *);
168
169 /* Flowgraph optimization and cleanup. */
170 static void gimple_merge_blocks (basic_block, basic_block);
171 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
172 static void remove_bb (basic_block);
173 static edge find_taken_edge_computed_goto (basic_block, tree);
174 static edge find_taken_edge_cond_expr (const gcond *, tree);
175
176 void
init_empty_tree_cfg_for_function(struct function * fn)177 init_empty_tree_cfg_for_function (struct function *fn)
178 {
179 /* Initialize the basic block array. */
180 init_flow (fn);
181 profile_status_for_fn (fn) = PROFILE_ABSENT;
182 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
183 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
184 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
185 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
186 initial_cfg_capacity);
187
188 /* Build a mapping of labels to their associated blocks. */
189 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
190 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
191 initial_cfg_capacity);
192
193 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
194 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
195
196 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
197 = EXIT_BLOCK_PTR_FOR_FN (fn);
198 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
199 = ENTRY_BLOCK_PTR_FOR_FN (fn);
200 }
201
202 void
init_empty_tree_cfg(void)203 init_empty_tree_cfg (void)
204 {
205 init_empty_tree_cfg_for_function (cfun);
206 }
207
208 /*---------------------------------------------------------------------------
209 Create basic blocks
210 ---------------------------------------------------------------------------*/
211
212 /* Entry point to the CFG builder for trees. SEQ is the sequence of
213 statements to be added to the flowgraph. */
214
215 static void
build_gimple_cfg(gimple_seq seq)216 build_gimple_cfg (gimple_seq seq)
217 {
218 /* Register specific gimple functions. */
219 gimple_register_cfg_hooks ();
220
221 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
222
223 init_empty_tree_cfg ();
224
225 make_blocks (seq);
226
227 /* Make sure there is always at least one block, even if it's empty. */
228 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
229 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
230
231 /* Adjust the size of the array. */
232 if (basic_block_info_for_fn (cfun)->length ()
233 < (size_t) n_basic_blocks_for_fn (cfun))
234 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
235 n_basic_blocks_for_fn (cfun));
236
237 /* To speed up statement iterator walks, we first purge dead labels. */
238 cleanup_dead_labels ();
239
240 /* Group case nodes to reduce the number of edges.
241 We do this after cleaning up dead labels because otherwise we miss
242 a lot of obvious case merging opportunities. */
243 group_case_labels ();
244
245 /* Create the edges of the flowgraph. */
246 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
247 make_edges ();
248 assign_discriminators ();
249 cleanup_dead_labels ();
250 delete discriminator_per_locus;
251 discriminator_per_locus = NULL;
252 }
253
254 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
255 them and propagate the information to LOOP. We assume that the annotations
256 come immediately before the condition in BB, if any. */
257
258 static void
replace_loop_annotate_in_block(basic_block bb,class loop * loop)259 replace_loop_annotate_in_block (basic_block bb, class loop *loop)
260 {
261 gimple_stmt_iterator gsi = gsi_last_bb (bb);
262 gimple *stmt = gsi_stmt (gsi);
263
264 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
265 return;
266
267 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
268 {
269 stmt = gsi_stmt (gsi);
270 if (gimple_code (stmt) != GIMPLE_CALL)
271 break;
272 if (!gimple_call_internal_p (stmt)
273 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
274 break;
275
276 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
277 {
278 case annot_expr_ivdep_kind:
279 loop->safelen = INT_MAX;
280 break;
281 case annot_expr_unroll_kind:
282 loop->unroll
283 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
284 cfun->has_unroll = true;
285 break;
286 case annot_expr_no_vector_kind:
287 loop->dont_vectorize = true;
288 break;
289 case annot_expr_vector_kind:
290 loop->force_vectorize = true;
291 cfun->has_force_vectorize_loops = true;
292 break;
293 case annot_expr_parallel_kind:
294 loop->can_be_parallel = true;
295 loop->safelen = INT_MAX;
296 break;
297 default:
298 gcc_unreachable ();
299 }
300
301 stmt = gimple_build_assign (gimple_call_lhs (stmt),
302 gimple_call_arg (stmt, 0));
303 gsi_replace (&gsi, stmt, true);
304 }
305 }
306
307 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
308 them and propagate the information to the loop. We assume that the
309 annotations come immediately before the condition of the loop. */
310
311 static void
replace_loop_annotate(void)312 replace_loop_annotate (void)
313 {
314 class loop *loop;
315 basic_block bb;
316 gimple_stmt_iterator gsi;
317 gimple *stmt;
318
319 FOR_EACH_LOOP (loop, 0)
320 {
321 /* First look into the header. */
322 replace_loop_annotate_in_block (loop->header, loop);
323
324 /* Then look into the latch, if any. */
325 if (loop->latch)
326 replace_loop_annotate_in_block (loop->latch, loop);
327
328 /* Push the global flag_finite_loops state down to individual loops. */
329 loop->finite_p = flag_finite_loops;
330 }
331
332 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
333 FOR_EACH_BB_FN (bb, cfun)
334 {
335 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
336 {
337 stmt = gsi_stmt (gsi);
338 if (gimple_code (stmt) != GIMPLE_CALL)
339 continue;
340 if (!gimple_call_internal_p (stmt)
341 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
342 continue;
343
344 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
345 {
346 case annot_expr_ivdep_kind:
347 case annot_expr_unroll_kind:
348 case annot_expr_no_vector_kind:
349 case annot_expr_vector_kind:
350 case annot_expr_parallel_kind:
351 break;
352 default:
353 gcc_unreachable ();
354 }
355
356 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
357 stmt = gimple_build_assign (gimple_call_lhs (stmt),
358 gimple_call_arg (stmt, 0));
359 gsi_replace (&gsi, stmt, true);
360 }
361 }
362 }
363
364 static unsigned int
execute_build_cfg(void)365 execute_build_cfg (void)
366 {
367 gimple_seq body = gimple_body (current_function_decl);
368
369 build_gimple_cfg (body);
370 gimple_set_body (current_function_decl, NULL);
371 if (dump_file && (dump_flags & TDF_DETAILS))
372 {
373 fprintf (dump_file, "Scope blocks:\n");
374 dump_scope_blocks (dump_file, dump_flags);
375 }
376 cleanup_tree_cfg ();
377 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
378 replace_loop_annotate ();
379 return 0;
380 }
381
382 namespace {
383
384 const pass_data pass_data_build_cfg =
385 {
386 GIMPLE_PASS, /* type */
387 "cfg", /* name */
388 OPTGROUP_NONE, /* optinfo_flags */
389 TV_TREE_CFG, /* tv_id */
390 PROP_gimple_leh, /* properties_required */
391 ( PROP_cfg | PROP_loops ), /* properties_provided */
392 0, /* properties_destroyed */
393 0, /* todo_flags_start */
394 0, /* todo_flags_finish */
395 };
396
397 class pass_build_cfg : public gimple_opt_pass
398 {
399 public:
pass_build_cfg(gcc::context * ctxt)400 pass_build_cfg (gcc::context *ctxt)
401 : gimple_opt_pass (pass_data_build_cfg, ctxt)
402 {}
403
404 /* opt_pass methods: */
execute(function *)405 virtual unsigned int execute (function *) { return execute_build_cfg (); }
406
407 }; // class pass_build_cfg
408
409 } // anon namespace
410
411 gimple_opt_pass *
make_pass_build_cfg(gcc::context * ctxt)412 make_pass_build_cfg (gcc::context *ctxt)
413 {
414 return new pass_build_cfg (ctxt);
415 }
416
417
418 /* Return true if T is a computed goto. */
419
420 bool
computed_goto_p(gimple * t)421 computed_goto_p (gimple *t)
422 {
423 return (gimple_code (t) == GIMPLE_GOTO
424 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
425 }
426
427 /* Returns true if the sequence of statements STMTS only contains
428 a call to __builtin_unreachable (). */
429
430 bool
gimple_seq_unreachable_p(gimple_seq stmts)431 gimple_seq_unreachable_p (gimple_seq stmts)
432 {
433 if (stmts == NULL
434 /* Return false if -fsanitize=unreachable, we don't want to
435 optimize away those calls, but rather turn them into
436 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
437 later. */
438 || sanitize_flags_p (SANITIZE_UNREACHABLE))
439 return false;
440
441 gimple_stmt_iterator gsi = gsi_last (stmts);
442
443 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
444 return false;
445
446 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
447 {
448 gimple *stmt = gsi_stmt (gsi);
449 if (gimple_code (stmt) != GIMPLE_LABEL
450 && !is_gimple_debug (stmt)
451 && !gimple_clobber_p (stmt))
452 return false;
453 }
454 return true;
455 }
456
457 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
458 the other edge points to a bb with just __builtin_unreachable ().
459 I.e. return true for C->M edge in:
460 <bb C>:
461 ...
462 if (something)
463 goto <bb N>;
464 else
465 goto <bb M>;
466 <bb N>:
467 __builtin_unreachable ();
468 <bb M>: */
469
470 bool
assert_unreachable_fallthru_edge_p(edge e)471 assert_unreachable_fallthru_edge_p (edge e)
472 {
473 basic_block pred_bb = e->src;
474 gimple *last = last_stmt (pred_bb);
475 if (last && gimple_code (last) == GIMPLE_COND)
476 {
477 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
478 if (other_bb == e->dest)
479 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
480 if (EDGE_COUNT (other_bb->succs) == 0)
481 return gimple_seq_unreachable_p (bb_seq (other_bb));
482 }
483 return false;
484 }
485
486
487 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
488 could alter control flow except via eh. We initialize the flag at
489 CFG build time and only ever clear it later. */
490
491 static void
gimple_call_initialize_ctrl_altering(gimple * stmt)492 gimple_call_initialize_ctrl_altering (gimple *stmt)
493 {
494 int flags = gimple_call_flags (stmt);
495
496 /* A call alters control flow if it can make an abnormal goto. */
497 if (call_can_make_abnormal_goto (stmt)
498 /* A call also alters control flow if it does not return. */
499 || flags & ECF_NORETURN
500 /* TM ending statements have backedges out of the transaction.
501 Return true so we split the basic block containing them.
502 Note that the TM_BUILTIN test is merely an optimization. */
503 || ((flags & ECF_TM_BUILTIN)
504 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
505 /* BUILT_IN_RETURN call is same as return statement. */
506 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
507 /* IFN_UNIQUE should be the last insn, to make checking for it
508 as cheap as possible. */
509 || (gimple_call_internal_p (stmt)
510 && gimple_call_internal_unique_p (stmt)))
511 gimple_call_set_ctrl_altering (stmt, true);
512 else
513 gimple_call_set_ctrl_altering (stmt, false);
514 }
515
516
517 /* Insert SEQ after BB and build a flowgraph. */
518
519 static basic_block
make_blocks_1(gimple_seq seq,basic_block bb)520 make_blocks_1 (gimple_seq seq, basic_block bb)
521 {
522 gimple_stmt_iterator i = gsi_start (seq);
523 gimple *stmt = NULL;
524 gimple *prev_stmt = NULL;
525 bool start_new_block = true;
526 bool first_stmt_of_seq = true;
527
528 while (!gsi_end_p (i))
529 {
530 /* PREV_STMT should only be set to a debug stmt if the debug
531 stmt is before nondebug stmts. Once stmt reaches a nondebug
532 nonlabel, prev_stmt will be set to it, so that
533 stmt_starts_bb_p will know to start a new block if a label is
534 found. However, if stmt was a label after debug stmts only,
535 keep the label in prev_stmt even if we find further debug
536 stmts, for there may be other labels after them, and they
537 should land in the same block. */
538 if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
539 prev_stmt = stmt;
540 stmt = gsi_stmt (i);
541
542 if (stmt && is_gimple_call (stmt))
543 gimple_call_initialize_ctrl_altering (stmt);
544
545 /* If the statement starts a new basic block or if we have determined
546 in a previous pass that we need to create a new block for STMT, do
547 so now. */
548 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
549 {
550 if (!first_stmt_of_seq)
551 gsi_split_seq_before (&i, &seq);
552 bb = create_basic_block (seq, bb);
553 start_new_block = false;
554 prev_stmt = NULL;
555 }
556
557 /* Now add STMT to BB and create the subgraphs for special statement
558 codes. */
559 gimple_set_bb (stmt, bb);
560
561 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
562 next iteration. */
563 if (stmt_ends_bb_p (stmt))
564 {
565 /* If the stmt can make abnormal goto use a new temporary
566 for the assignment to the LHS. This makes sure the old value
567 of the LHS is available on the abnormal edge. Otherwise
568 we will end up with overlapping life-ranges for abnormal
569 SSA names. */
570 if (gimple_has_lhs (stmt)
571 && stmt_can_make_abnormal_goto (stmt)
572 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
573 {
574 tree lhs = gimple_get_lhs (stmt);
575 tree tmp = create_tmp_var (TREE_TYPE (lhs));
576 gimple *s = gimple_build_assign (lhs, tmp);
577 gimple_set_location (s, gimple_location (stmt));
578 gimple_set_block (s, gimple_block (stmt));
579 gimple_set_lhs (stmt, tmp);
580 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
581 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
582 DECL_GIMPLE_REG_P (tmp) = 1;
583 gsi_insert_after (&i, s, GSI_SAME_STMT);
584 }
585 start_new_block = true;
586 }
587
588 gsi_next (&i);
589 first_stmt_of_seq = false;
590 }
591 return bb;
592 }
593
594 /* Build a flowgraph for the sequence of stmts SEQ. */
595
596 static void
make_blocks(gimple_seq seq)597 make_blocks (gimple_seq seq)
598 {
599 /* Look for debug markers right before labels, and move the debug
600 stmts after the labels. Accepting labels among debug markers
601 adds no value, just complexity; if we wanted to annotate labels
602 with view numbers (so sequencing among markers would matter) or
603 somesuch, we're probably better off still moving the labels, but
604 adding other debug annotations in their original positions or
605 emitting nonbind or bind markers associated with the labels in
606 the original position of the labels.
607
608 Moving labels would probably be simpler, but we can't do that:
609 moving labels assigns label ids to them, and doing so because of
610 debug markers makes for -fcompare-debug and possibly even codegen
611 differences. So, we have to move the debug stmts instead. To
612 that end, we scan SEQ backwards, marking the position of the
613 latest (earliest we find) label, and moving debug stmts that are
614 not separated from it by nondebug nonlabel stmts after the
615 label. */
616 if (MAY_HAVE_DEBUG_MARKER_STMTS)
617 {
618 gimple_stmt_iterator label = gsi_none ();
619
620 for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
621 {
622 gimple *stmt = gsi_stmt (i);
623
624 /* If this is the first label we encounter (latest in SEQ)
625 before nondebug stmts, record its position. */
626 if (is_a <glabel *> (stmt))
627 {
628 if (gsi_end_p (label))
629 label = i;
630 continue;
631 }
632
633 /* Without a recorded label position to move debug stmts to,
634 there's nothing to do. */
635 if (gsi_end_p (label))
636 continue;
637
638 /* Move the debug stmt at I after LABEL. */
639 if (is_gimple_debug (stmt))
640 {
641 gcc_assert (gimple_debug_nonbind_marker_p (stmt));
642 /* As STMT is removed, I advances to the stmt after
643 STMT, so the gsi_prev in the for "increment"
644 expression gets us to the stmt we're to visit after
645 STMT. LABEL, however, would advance to the moved
646 stmt if we passed it to gsi_move_after, so pass it a
647 copy instead, so as to keep LABEL pointing to the
648 LABEL. */
649 gimple_stmt_iterator copy = label;
650 gsi_move_after (&i, ©);
651 continue;
652 }
653
654 /* There aren't any (more?) debug stmts before label, so
655 there isn't anything else to move after it. */
656 label = gsi_none ();
657 }
658 }
659
660 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
661 }
662
663 /* Create and return a new empty basic block after bb AFTER. */
664
665 static basic_block
create_bb(void * h,void * e,basic_block after)666 create_bb (void *h, void *e, basic_block after)
667 {
668 basic_block bb;
669
670 gcc_assert (!e);
671
672 /* Create and initialize a new basic block. Since alloc_block uses
673 GC allocation that clears memory to allocate a basic block, we do
674 not have to clear the newly allocated basic block here. */
675 bb = alloc_block ();
676
677 bb->index = last_basic_block_for_fn (cfun);
678 bb->flags = BB_NEW;
679 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
680
681 /* Add the new block to the linked list of blocks. */
682 link_block (bb, after);
683
684 /* Grow the basic block array if needed. */
685 if ((size_t) last_basic_block_for_fn (cfun)
686 == basic_block_info_for_fn (cfun)->length ())
687 {
688 size_t new_size =
689 (last_basic_block_for_fn (cfun)
690 + (last_basic_block_for_fn (cfun) + 3) / 4);
691 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
692 }
693
694 /* Add the newly created block to the array. */
695 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
696
697 n_basic_blocks_for_fn (cfun)++;
698 last_basic_block_for_fn (cfun)++;
699
700 return bb;
701 }
702
703
704 /*---------------------------------------------------------------------------
705 Edge creation
706 ---------------------------------------------------------------------------*/
707
708 /* If basic block BB has an abnormal edge to a basic block
709 containing IFN_ABNORMAL_DISPATCHER internal call, return
710 that the dispatcher's basic block, otherwise return NULL. */
711
712 basic_block
get_abnormal_succ_dispatcher(basic_block bb)713 get_abnormal_succ_dispatcher (basic_block bb)
714 {
715 edge e;
716 edge_iterator ei;
717
718 FOR_EACH_EDGE (e, ei, bb->succs)
719 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
720 {
721 gimple_stmt_iterator gsi
722 = gsi_start_nondebug_after_labels_bb (e->dest);
723 gimple *g = gsi_stmt (gsi);
724 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
725 return e->dest;
726 }
727 return NULL;
728 }
729
730 /* Helper function for make_edges. Create a basic block with
731 with ABNORMAL_DISPATCHER internal call in it if needed, and
732 create abnormal edges from BBS to it and from it to FOR_BB
733 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
734
735 static void
handle_abnormal_edges(basic_block * dispatcher_bbs,basic_block for_bb,int * bb_to_omp_idx,auto_vec<basic_block> * bbs,bool computed_goto)736 handle_abnormal_edges (basic_block *dispatcher_bbs,
737 basic_block for_bb, int *bb_to_omp_idx,
738 auto_vec<basic_block> *bbs, bool computed_goto)
739 {
740 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
741 unsigned int idx = 0;
742 basic_block bb;
743 bool inner = false;
744
745 if (bb_to_omp_idx)
746 {
747 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
748 if (bb_to_omp_idx[for_bb->index] != 0)
749 inner = true;
750 }
751
752 /* If the dispatcher has been created already, then there are basic
753 blocks with abnormal edges to it, so just make a new edge to
754 for_bb. */
755 if (*dispatcher == NULL)
756 {
757 /* Check if there are any basic blocks that need to have
758 abnormal edges to this dispatcher. If there are none, return
759 early. */
760 if (bb_to_omp_idx == NULL)
761 {
762 if (bbs->is_empty ())
763 return;
764 }
765 else
766 {
767 FOR_EACH_VEC_ELT (*bbs, idx, bb)
768 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
769 break;
770 if (bb == NULL)
771 return;
772 }
773
774 /* Create the dispatcher bb. */
775 *dispatcher = create_basic_block (NULL, for_bb);
776 if (computed_goto)
777 {
778 /* Factor computed gotos into a common computed goto site. Also
779 record the location of that site so that we can un-factor the
780 gotos after we have converted back to normal form. */
781 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
782
783 /* Create the destination of the factored goto. Each original
784 computed goto will put its desired destination into this
785 variable and jump to the label we create immediately below. */
786 tree var = create_tmp_var (ptr_type_node, "gotovar");
787
788 /* Build a label for the new block which will contain the
789 factored computed goto. */
790 tree factored_label_decl
791 = create_artificial_label (UNKNOWN_LOCATION);
792 gimple *factored_computed_goto_label
793 = gimple_build_label (factored_label_decl);
794 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
795
796 /* Build our new computed goto. */
797 gimple *factored_computed_goto = gimple_build_goto (var);
798 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
799
800 FOR_EACH_VEC_ELT (*bbs, idx, bb)
801 {
802 if (bb_to_omp_idx
803 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
804 continue;
805
806 gsi = gsi_last_bb (bb);
807 gimple *last = gsi_stmt (gsi);
808
809 gcc_assert (computed_goto_p (last));
810
811 /* Copy the original computed goto's destination into VAR. */
812 gimple *assignment
813 = gimple_build_assign (var, gimple_goto_dest (last));
814 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
815
816 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
817 e->goto_locus = gimple_location (last);
818 gsi_remove (&gsi, true);
819 }
820 }
821 else
822 {
823 tree arg = inner ? boolean_true_node : boolean_false_node;
824 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
825 1, arg);
826 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
827 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
828
829 /* Create predecessor edges of the dispatcher. */
830 FOR_EACH_VEC_ELT (*bbs, idx, bb)
831 {
832 if (bb_to_omp_idx
833 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
834 continue;
835 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
836 }
837 }
838 }
839
840 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
841 }
842
843 /* Creates outgoing edges for BB. Returns 1 when it ends with an
844 computed goto, returns 2 when it ends with a statement that
845 might return to this function via an nonlocal goto, otherwise
846 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
847
848 static int
make_edges_bb(basic_block bb,struct omp_region ** pcur_region,int * pomp_index)849 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
850 {
851 gimple *last = last_stmt (bb);
852 bool fallthru = false;
853 int ret = 0;
854
855 if (!last)
856 return ret;
857
858 switch (gimple_code (last))
859 {
860 case GIMPLE_GOTO:
861 if (make_goto_expr_edges (bb))
862 ret = 1;
863 fallthru = false;
864 break;
865 case GIMPLE_RETURN:
866 {
867 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
868 e->goto_locus = gimple_location (last);
869 fallthru = false;
870 }
871 break;
872 case GIMPLE_COND:
873 make_cond_expr_edges (bb);
874 fallthru = false;
875 break;
876 case GIMPLE_SWITCH:
877 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
878 fallthru = false;
879 break;
880 case GIMPLE_RESX:
881 make_eh_edges (last);
882 fallthru = false;
883 break;
884 case GIMPLE_EH_DISPATCH:
885 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
886 break;
887
888 case GIMPLE_CALL:
889 /* If this function receives a nonlocal goto, then we need to
890 make edges from this call site to all the nonlocal goto
891 handlers. */
892 if (stmt_can_make_abnormal_goto (last))
893 ret = 2;
894
895 /* If this statement has reachable exception handlers, then
896 create abnormal edges to them. */
897 make_eh_edges (last);
898
899 /* BUILTIN_RETURN is really a return statement. */
900 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
901 {
902 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
903 fallthru = false;
904 }
905 /* Some calls are known not to return. */
906 else
907 fallthru = !gimple_call_noreturn_p (last);
908 break;
909
910 case GIMPLE_ASSIGN:
911 /* A GIMPLE_ASSIGN may throw internally and thus be considered
912 control-altering. */
913 if (is_ctrl_altering_stmt (last))
914 make_eh_edges (last);
915 fallthru = true;
916 break;
917
918 case GIMPLE_ASM:
919 make_gimple_asm_edges (bb);
920 fallthru = true;
921 break;
922
923 CASE_GIMPLE_OMP:
924 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
925 break;
926
927 case GIMPLE_TRANSACTION:
928 {
929 gtransaction *txn = as_a <gtransaction *> (last);
930 tree label1 = gimple_transaction_label_norm (txn);
931 tree label2 = gimple_transaction_label_uninst (txn);
932
933 if (label1)
934 make_edge (bb, label_to_block (cfun, label1), EDGE_FALLTHRU);
935 if (label2)
936 make_edge (bb, label_to_block (cfun, label2),
937 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
938
939 tree label3 = gimple_transaction_label_over (txn);
940 if (gimple_transaction_subcode (txn)
941 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
942 make_edge (bb, label_to_block (cfun, label3), EDGE_TM_ABORT);
943
944 fallthru = false;
945 }
946 break;
947
948 default:
949 gcc_assert (!stmt_ends_bb_p (last));
950 fallthru = true;
951 break;
952 }
953
954 if (fallthru)
955 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
956
957 return ret;
958 }
959
960 /* Join all the blocks in the flowgraph. */
961
962 static void
make_edges(void)963 make_edges (void)
964 {
965 basic_block bb;
966 struct omp_region *cur_region = NULL;
967 auto_vec<basic_block> ab_edge_goto;
968 auto_vec<basic_block> ab_edge_call;
969 int *bb_to_omp_idx = NULL;
970 int cur_omp_region_idx = 0;
971
972 /* Create an edge from entry to the first block with executable
973 statements in it. */
974 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
975 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
976 EDGE_FALLTHRU);
977
978 /* Traverse the basic block array placing edges. */
979 FOR_EACH_BB_FN (bb, cfun)
980 {
981 int mer;
982
983 if (bb_to_omp_idx)
984 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
985
986 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
987 if (mer == 1)
988 ab_edge_goto.safe_push (bb);
989 else if (mer == 2)
990 ab_edge_call.safe_push (bb);
991
992 if (cur_region && bb_to_omp_idx == NULL)
993 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
994 }
995
996 /* Computed gotos are hell to deal with, especially if there are
997 lots of them with a large number of destinations. So we factor
998 them to a common computed goto location before we build the
999 edge list. After we convert back to normal form, we will un-factor
1000 the computed gotos since factoring introduces an unwanted jump.
1001 For non-local gotos and abnormal edges from calls to calls that return
1002 twice or forced labels, factor the abnormal edges too, by having all
1003 abnormal edges from the calls go to a common artificial basic block
1004 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1005 basic block to all forced labels and calls returning twice.
1006 We do this per-OpenMP structured block, because those regions
1007 are guaranteed to be single entry single exit by the standard,
1008 so it is not allowed to enter or exit such regions abnormally this way,
1009 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1010 must not transfer control across SESE region boundaries. */
1011 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1012 {
1013 gimple_stmt_iterator gsi;
1014 basic_block dispatcher_bb_array[2] = { NULL, NULL };
1015 basic_block *dispatcher_bbs = dispatcher_bb_array;
1016 int count = n_basic_blocks_for_fn (cfun);
1017
1018 if (bb_to_omp_idx)
1019 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1020
1021 FOR_EACH_BB_FN (bb, cfun)
1022 {
1023 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1024 {
1025 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1026 tree target;
1027
1028 if (!label_stmt)
1029 break;
1030
1031 target = gimple_label_label (label_stmt);
1032
1033 /* Make an edge to every label block that has been marked as a
1034 potential target for a computed goto or a non-local goto. */
1035 if (FORCED_LABEL (target))
1036 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1037 &ab_edge_goto, true);
1038 if (DECL_NONLOCAL (target))
1039 {
1040 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1041 &ab_edge_call, false);
1042 break;
1043 }
1044 }
1045
1046 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1047 gsi_next_nondebug (&gsi);
1048 if (!gsi_end_p (gsi))
1049 {
1050 /* Make an edge to every setjmp-like call. */
1051 gimple *call_stmt = gsi_stmt (gsi);
1052 if (is_gimple_call (call_stmt)
1053 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1054 || gimple_call_builtin_p (call_stmt,
1055 BUILT_IN_SETJMP_RECEIVER)))
1056 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
1057 &ab_edge_call, false);
1058 }
1059 }
1060
1061 if (bb_to_omp_idx)
1062 XDELETE (dispatcher_bbs);
1063 }
1064
1065 XDELETE (bb_to_omp_idx);
1066
1067 omp_free_regions ();
1068 }
1069
1070 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1071 needed. Returns true if new bbs were created.
1072 Note: This is transitional code, and should not be used for new code. We
1073 should be able to get rid of this by rewriting all target va-arg
1074 gimplification hooks to use an interface gimple_build_cond_value as described
1075 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1076
1077 bool
gimple_find_sub_bbs(gimple_seq seq,gimple_stmt_iterator * gsi)1078 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1079 {
1080 gimple *stmt = gsi_stmt (*gsi);
1081 basic_block bb = gimple_bb (stmt);
1082 basic_block lastbb, afterbb;
1083 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1084 edge e;
1085 lastbb = make_blocks_1 (seq, bb);
1086 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1087 return false;
1088 e = split_block (bb, stmt);
1089 /* Move e->dest to come after the new basic blocks. */
1090 afterbb = e->dest;
1091 unlink_block (afterbb);
1092 link_block (afterbb, lastbb);
1093 redirect_edge_succ (e, bb->next_bb);
1094 bb = bb->next_bb;
1095 while (bb != afterbb)
1096 {
1097 struct omp_region *cur_region = NULL;
1098 profile_count cnt = profile_count::zero ();
1099 bool all = true;
1100
1101 int cur_omp_region_idx = 0;
1102 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1103 gcc_assert (!mer && !cur_region);
1104 add_bb_to_loop (bb, afterbb->loop_father);
1105
1106 edge e;
1107 edge_iterator ei;
1108 FOR_EACH_EDGE (e, ei, bb->preds)
1109 {
1110 if (e->count ().initialized_p ())
1111 cnt += e->count ();
1112 else
1113 all = false;
1114 }
1115 tree_guess_outgoing_edge_probabilities (bb);
1116 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1117 bb->count = cnt;
1118
1119 bb = bb->next_bb;
1120 }
1121 return true;
1122 }
1123
1124 /* Find the next available discriminator value for LOCUS. The
1125 discriminator distinguishes among several basic blocks that
1126 share a common locus, allowing for more accurate sample-based
1127 profiling. */
1128
1129 static int
next_discriminator_for_locus(int line)1130 next_discriminator_for_locus (int line)
1131 {
1132 struct locus_discrim_map item;
1133 struct locus_discrim_map **slot;
1134
1135 item.location_line = line;
1136 item.discriminator = 0;
1137 slot = discriminator_per_locus->find_slot_with_hash (&item, line, INSERT);
1138 gcc_assert (slot);
1139 if (*slot == HTAB_EMPTY_ENTRY)
1140 {
1141 *slot = XNEW (struct locus_discrim_map);
1142 gcc_assert (*slot);
1143 (*slot)->location_line = line;
1144 (*slot)->discriminator = 0;
1145 }
1146 (*slot)->discriminator++;
1147 return (*slot)->discriminator;
1148 }
1149
1150 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1151
1152 static bool
same_line_p(location_t locus1,expanded_location * from,location_t locus2)1153 same_line_p (location_t locus1, expanded_location *from, location_t locus2)
1154 {
1155 expanded_location to;
1156
1157 if (locus1 == locus2)
1158 return true;
1159
1160 to = expand_location (locus2);
1161
1162 if (from->line != to.line)
1163 return false;
1164 if (from->file == to.file)
1165 return true;
1166 return (from->file != NULL
1167 && to.file != NULL
1168 && filename_cmp (from->file, to.file) == 0);
1169 }
1170
1171 /* Assign discriminators to each basic block. */
1172
1173 static void
assign_discriminators(void)1174 assign_discriminators (void)
1175 {
1176 basic_block bb;
1177
1178 FOR_EACH_BB_FN (bb, cfun)
1179 {
1180 edge e;
1181 edge_iterator ei;
1182 gimple *last = last_stmt (bb);
1183 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1184
1185 if (locus == UNKNOWN_LOCATION)
1186 continue;
1187
1188 expanded_location locus_e = expand_location (locus);
1189
1190 FOR_EACH_EDGE (e, ei, bb->succs)
1191 {
1192 gimple *first = first_non_label_stmt (e->dest);
1193 gimple *last = last_stmt (e->dest);
1194 if ((first && same_line_p (locus, &locus_e,
1195 gimple_location (first)))
1196 || (last && same_line_p (locus, &locus_e,
1197 gimple_location (last))))
1198 {
1199 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1200 bb->discriminator
1201 = next_discriminator_for_locus (locus_e.line);
1202 else
1203 e->dest->discriminator
1204 = next_discriminator_for_locus (locus_e.line);
1205 }
1206 }
1207 }
1208 }
1209
1210 /* Create the edges for a GIMPLE_COND starting at block BB. */
1211
1212 static void
make_cond_expr_edges(basic_block bb)1213 make_cond_expr_edges (basic_block bb)
1214 {
1215 gcond *entry = as_a <gcond *> (last_stmt (bb));
1216 gimple *then_stmt, *else_stmt;
1217 basic_block then_bb, else_bb;
1218 tree then_label, else_label;
1219 edge e;
1220
1221 gcc_assert (entry);
1222 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1223
1224 /* Entry basic blocks for each component. */
1225 then_label = gimple_cond_true_label (entry);
1226 else_label = gimple_cond_false_label (entry);
1227 then_bb = label_to_block (cfun, then_label);
1228 else_bb = label_to_block (cfun, else_label);
1229 then_stmt = first_stmt (then_bb);
1230 else_stmt = first_stmt (else_bb);
1231
1232 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1233 e->goto_locus = gimple_location (then_stmt);
1234 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1235 if (e)
1236 e->goto_locus = gimple_location (else_stmt);
1237
1238 /* We do not need the labels anymore. */
1239 gimple_cond_set_true_label (entry, NULL_TREE);
1240 gimple_cond_set_false_label (entry, NULL_TREE);
1241 }
1242
1243
1244 /* Called for each element in the hash table (P) as we delete the
1245 edge to cases hash table.
1246
1247 Clear all the CASE_CHAINs to prevent problems with copying of
1248 SWITCH_EXPRs and structure sharing rules, then free the hash table
1249 element. */
1250
1251 bool
edge_to_cases_cleanup(edge const &,tree const & value,void *)1252 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1253 {
1254 tree t, next;
1255
1256 for (t = value; t; t = next)
1257 {
1258 next = CASE_CHAIN (t);
1259 CASE_CHAIN (t) = NULL;
1260 }
1261
1262 return true;
1263 }
1264
1265 /* Start recording information mapping edges to case labels. */
1266
1267 void
start_recording_case_labels(void)1268 start_recording_case_labels (void)
1269 {
1270 gcc_assert (edge_to_cases == NULL);
1271 edge_to_cases = new hash_map<edge, tree>;
1272 touched_switch_bbs = BITMAP_ALLOC (NULL);
1273 }
1274
1275 /* Return nonzero if we are recording information for case labels. */
1276
1277 static bool
recording_case_labels_p(void)1278 recording_case_labels_p (void)
1279 {
1280 return (edge_to_cases != NULL);
1281 }
1282
1283 /* Stop recording information mapping edges to case labels and
1284 remove any information we have recorded. */
1285 void
end_recording_case_labels(void)1286 end_recording_case_labels (void)
1287 {
1288 bitmap_iterator bi;
1289 unsigned i;
1290 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1291 delete edge_to_cases;
1292 edge_to_cases = NULL;
1293 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1294 {
1295 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1296 if (bb)
1297 {
1298 gimple *stmt = last_stmt (bb);
1299 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1300 group_case_labels_stmt (as_a <gswitch *> (stmt));
1301 }
1302 }
1303 BITMAP_FREE (touched_switch_bbs);
1304 }
1305
1306 /* If we are inside a {start,end}_recording_cases block, then return
1307 a chain of CASE_LABEL_EXPRs from T which reference E.
1308
1309 Otherwise return NULL. */
1310
1311 static tree
get_cases_for_edge(edge e,gswitch * t)1312 get_cases_for_edge (edge e, gswitch *t)
1313 {
1314 tree *slot;
1315 size_t i, n;
1316
1317 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1318 chains available. Return NULL so the caller can detect this case. */
1319 if (!recording_case_labels_p ())
1320 return NULL;
1321
1322 slot = edge_to_cases->get (e);
1323 if (slot)
1324 return *slot;
1325
1326 /* If we did not find E in the hash table, then this must be the first
1327 time we have been queried for information about E & T. Add all the
1328 elements from T to the hash table then perform the query again. */
1329
1330 n = gimple_switch_num_labels (t);
1331 for (i = 0; i < n; i++)
1332 {
1333 tree elt = gimple_switch_label (t, i);
1334 tree lab = CASE_LABEL (elt);
1335 basic_block label_bb = label_to_block (cfun, lab);
1336 edge this_edge = find_edge (e->src, label_bb);
1337
1338 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1339 a new chain. */
1340 tree &s = edge_to_cases->get_or_insert (this_edge);
1341 CASE_CHAIN (elt) = s;
1342 s = elt;
1343 }
1344
1345 return *edge_to_cases->get (e);
1346 }
1347
1348 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1349
1350 static void
make_gimple_switch_edges(gswitch * entry,basic_block bb)1351 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1352 {
1353 size_t i, n;
1354
1355 n = gimple_switch_num_labels (entry);
1356
1357 for (i = 0; i < n; ++i)
1358 {
1359 basic_block label_bb = gimple_switch_label_bb (cfun, entry, i);
1360 make_edge (bb, label_bb, 0);
1361 }
1362 }
1363
1364
1365 /* Return the basic block holding label DEST. */
1366
1367 basic_block
label_to_block(struct function * ifun,tree dest)1368 label_to_block (struct function *ifun, tree dest)
1369 {
1370 int uid = LABEL_DECL_UID (dest);
1371
1372 /* We would die hard when faced by an undefined label. Emit a label to
1373 the very first basic block. This will hopefully make even the dataflow
1374 and undefined variable warnings quite right. */
1375 if (seen_error () && uid < 0)
1376 {
1377 gimple_stmt_iterator gsi =
1378 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1379 gimple *stmt;
1380
1381 stmt = gimple_build_label (dest);
1382 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1383 uid = LABEL_DECL_UID (dest);
1384 }
1385 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1386 return NULL;
1387 return (*ifun->cfg->x_label_to_block_map)[uid];
1388 }
1389
1390 /* Create edges for a goto statement at block BB. Returns true
1391 if abnormal edges should be created. */
1392
1393 static bool
make_goto_expr_edges(basic_block bb)1394 make_goto_expr_edges (basic_block bb)
1395 {
1396 gimple_stmt_iterator last = gsi_last_bb (bb);
1397 gimple *goto_t = gsi_stmt (last);
1398
1399 /* A simple GOTO creates normal edges. */
1400 if (simple_goto_p (goto_t))
1401 {
1402 tree dest = gimple_goto_dest (goto_t);
1403 basic_block label_bb = label_to_block (cfun, dest);
1404 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1405 e->goto_locus = gimple_location (goto_t);
1406 gsi_remove (&last, true);
1407 return false;
1408 }
1409
1410 /* A computed GOTO creates abnormal edges. */
1411 return true;
1412 }
1413
1414 /* Create edges for an asm statement with labels at block BB. */
1415
1416 static void
make_gimple_asm_edges(basic_block bb)1417 make_gimple_asm_edges (basic_block bb)
1418 {
1419 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1420 int i, n = gimple_asm_nlabels (stmt);
1421
1422 for (i = 0; i < n; ++i)
1423 {
1424 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1425 basic_block label_bb = label_to_block (cfun, label);
1426 make_edge (bb, label_bb, 0);
1427 }
1428 }
1429
1430 /*---------------------------------------------------------------------------
1431 Flowgraph analysis
1432 ---------------------------------------------------------------------------*/
1433
1434 /* Cleanup useless labels in basic blocks. This is something we wish
1435 to do early because it allows us to group case labels before creating
1436 the edges for the CFG, and it speeds up block statement iterators in
1437 all passes later on.
1438 We rerun this pass after CFG is created, to get rid of the labels that
1439 are no longer referenced. After then we do not run it any more, since
1440 (almost) no new labels should be created. */
1441
1442 /* A map from basic block index to the leading label of that block. */
1443 struct label_record
1444 {
1445 /* The label. */
1446 tree label;
1447
1448 /* True if the label is referenced from somewhere. */
1449 bool used;
1450 };
1451
1452 /* Given LABEL return the first label in the same basic block. */
1453
1454 static tree
main_block_label(tree label,label_record * label_for_bb)1455 main_block_label (tree label, label_record *label_for_bb)
1456 {
1457 basic_block bb = label_to_block (cfun, label);
1458 tree main_label = label_for_bb[bb->index].label;
1459
1460 /* label_to_block possibly inserted undefined label into the chain. */
1461 if (!main_label)
1462 {
1463 label_for_bb[bb->index].label = label;
1464 main_label = label;
1465 }
1466
1467 label_for_bb[bb->index].used = true;
1468 return main_label;
1469 }
1470
1471 /* Clean up redundant labels within the exception tree. */
1472
1473 static void
cleanup_dead_labels_eh(label_record * label_for_bb)1474 cleanup_dead_labels_eh (label_record *label_for_bb)
1475 {
1476 eh_landing_pad lp;
1477 eh_region r;
1478 tree lab;
1479 int i;
1480
1481 if (cfun->eh == NULL)
1482 return;
1483
1484 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1485 if (lp && lp->post_landing_pad)
1486 {
1487 lab = main_block_label (lp->post_landing_pad, label_for_bb);
1488 if (lab != lp->post_landing_pad)
1489 {
1490 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1491 EH_LANDING_PAD_NR (lab) = lp->index;
1492 }
1493 }
1494
1495 FOR_ALL_EH_REGION (r)
1496 switch (r->type)
1497 {
1498 case ERT_CLEANUP:
1499 case ERT_MUST_NOT_THROW:
1500 break;
1501
1502 case ERT_TRY:
1503 {
1504 eh_catch c;
1505 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1506 {
1507 lab = c->label;
1508 if (lab)
1509 c->label = main_block_label (lab, label_for_bb);
1510 }
1511 }
1512 break;
1513
1514 case ERT_ALLOWED_EXCEPTIONS:
1515 lab = r->u.allowed.label;
1516 if (lab)
1517 r->u.allowed.label = main_block_label (lab, label_for_bb);
1518 break;
1519 }
1520 }
1521
1522
1523 /* Cleanup redundant labels. This is a three-step process:
1524 1) Find the leading label for each block.
1525 2) Redirect all references to labels to the leading labels.
1526 3) Cleanup all useless labels. */
1527
1528 void
cleanup_dead_labels(void)1529 cleanup_dead_labels (void)
1530 {
1531 basic_block bb;
1532 label_record *label_for_bb = XCNEWVEC (struct label_record,
1533 last_basic_block_for_fn (cfun));
1534
1535 /* Find a suitable label for each block. We use the first user-defined
1536 label if there is one, or otherwise just the first label we see. */
1537 FOR_EACH_BB_FN (bb, cfun)
1538 {
1539 gimple_stmt_iterator i;
1540
1541 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1542 {
1543 tree label;
1544 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1545
1546 if (!label_stmt)
1547 break;
1548
1549 label = gimple_label_label (label_stmt);
1550
1551 /* If we have not yet seen a label for the current block,
1552 remember this one and see if there are more labels. */
1553 if (!label_for_bb[bb->index].label)
1554 {
1555 label_for_bb[bb->index].label = label;
1556 continue;
1557 }
1558
1559 /* If we did see a label for the current block already, but it
1560 is an artificially created label, replace it if the current
1561 label is a user defined label. */
1562 if (!DECL_ARTIFICIAL (label)
1563 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1564 {
1565 label_for_bb[bb->index].label = label;
1566 break;
1567 }
1568 }
1569 }
1570
1571 /* Now redirect all jumps/branches to the selected label.
1572 First do so for each block ending in a control statement. */
1573 FOR_EACH_BB_FN (bb, cfun)
1574 {
1575 gimple *stmt = last_stmt (bb);
1576 tree label, new_label;
1577
1578 if (!stmt)
1579 continue;
1580
1581 switch (gimple_code (stmt))
1582 {
1583 case GIMPLE_COND:
1584 {
1585 gcond *cond_stmt = as_a <gcond *> (stmt);
1586 label = gimple_cond_true_label (cond_stmt);
1587 if (label)
1588 {
1589 new_label = main_block_label (label, label_for_bb);
1590 if (new_label != label)
1591 gimple_cond_set_true_label (cond_stmt, new_label);
1592 }
1593
1594 label = gimple_cond_false_label (cond_stmt);
1595 if (label)
1596 {
1597 new_label = main_block_label (label, label_for_bb);
1598 if (new_label != label)
1599 gimple_cond_set_false_label (cond_stmt, new_label);
1600 }
1601 }
1602 break;
1603
1604 case GIMPLE_SWITCH:
1605 {
1606 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1607 size_t i, n = gimple_switch_num_labels (switch_stmt);
1608
1609 /* Replace all destination labels. */
1610 for (i = 0; i < n; ++i)
1611 {
1612 tree case_label = gimple_switch_label (switch_stmt, i);
1613 label = CASE_LABEL (case_label);
1614 new_label = main_block_label (label, label_for_bb);
1615 if (new_label != label)
1616 CASE_LABEL (case_label) = new_label;
1617 }
1618 break;
1619 }
1620
1621 case GIMPLE_ASM:
1622 {
1623 gasm *asm_stmt = as_a <gasm *> (stmt);
1624 int i, n = gimple_asm_nlabels (asm_stmt);
1625
1626 for (i = 0; i < n; ++i)
1627 {
1628 tree cons = gimple_asm_label_op (asm_stmt, i);
1629 tree label = main_block_label (TREE_VALUE (cons), label_for_bb);
1630 TREE_VALUE (cons) = label;
1631 }
1632 break;
1633 }
1634
1635 /* We have to handle gotos until they're removed, and we don't
1636 remove them until after we've created the CFG edges. */
1637 case GIMPLE_GOTO:
1638 if (!computed_goto_p (stmt))
1639 {
1640 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1641 label = gimple_goto_dest (goto_stmt);
1642 new_label = main_block_label (label, label_for_bb);
1643 if (new_label != label)
1644 gimple_goto_set_dest (goto_stmt, new_label);
1645 }
1646 break;
1647
1648 case GIMPLE_TRANSACTION:
1649 {
1650 gtransaction *txn = as_a <gtransaction *> (stmt);
1651
1652 label = gimple_transaction_label_norm (txn);
1653 if (label)
1654 {
1655 new_label = main_block_label (label, label_for_bb);
1656 if (new_label != label)
1657 gimple_transaction_set_label_norm (txn, new_label);
1658 }
1659
1660 label = gimple_transaction_label_uninst (txn);
1661 if (label)
1662 {
1663 new_label = main_block_label (label, label_for_bb);
1664 if (new_label != label)
1665 gimple_transaction_set_label_uninst (txn, new_label);
1666 }
1667
1668 label = gimple_transaction_label_over (txn);
1669 if (label)
1670 {
1671 new_label = main_block_label (label, label_for_bb);
1672 if (new_label != label)
1673 gimple_transaction_set_label_over (txn, new_label);
1674 }
1675 }
1676 break;
1677
1678 default:
1679 break;
1680 }
1681 }
1682
1683 /* Do the same for the exception region tree labels. */
1684 cleanup_dead_labels_eh (label_for_bb);
1685
1686 /* Finally, purge dead labels. All user-defined labels and labels that
1687 can be the target of non-local gotos and labels which have their
1688 address taken are preserved. */
1689 FOR_EACH_BB_FN (bb, cfun)
1690 {
1691 gimple_stmt_iterator i;
1692 tree label_for_this_bb = label_for_bb[bb->index].label;
1693
1694 if (!label_for_this_bb)
1695 continue;
1696
1697 /* If the main label of the block is unused, we may still remove it. */
1698 if (!label_for_bb[bb->index].used)
1699 label_for_this_bb = NULL;
1700
1701 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1702 {
1703 tree label;
1704 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1705
1706 if (!label_stmt)
1707 break;
1708
1709 label = gimple_label_label (label_stmt);
1710
1711 if (label == label_for_this_bb
1712 || !DECL_ARTIFICIAL (label)
1713 || DECL_NONLOCAL (label)
1714 || FORCED_LABEL (label))
1715 gsi_next (&i);
1716 else
1717 gsi_remove (&i, true);
1718 }
1719 }
1720
1721 free (label_for_bb);
1722 }
1723
1724 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1725 the ones jumping to the same label.
1726 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1727
1728 bool
group_case_labels_stmt(gswitch * stmt)1729 group_case_labels_stmt (gswitch *stmt)
1730 {
1731 int old_size = gimple_switch_num_labels (stmt);
1732 int i, next_index, new_size;
1733 basic_block default_bb = NULL;
1734 hash_set<tree> *removed_labels = NULL;
1735
1736 default_bb = gimple_switch_default_bb (cfun, stmt);
1737
1738 /* Look for possible opportunities to merge cases. */
1739 new_size = i = 1;
1740 while (i < old_size)
1741 {
1742 tree base_case, base_high;
1743 basic_block base_bb;
1744
1745 base_case = gimple_switch_label (stmt, i);
1746
1747 gcc_assert (base_case);
1748 base_bb = label_to_block (cfun, CASE_LABEL (base_case));
1749
1750 /* Discard cases that have the same destination as the default case or
1751 whose destination blocks have already been removed as unreachable. */
1752 if (base_bb == NULL
1753 || base_bb == default_bb
1754 || (removed_labels
1755 && removed_labels->contains (CASE_LABEL (base_case))))
1756 {
1757 i++;
1758 continue;
1759 }
1760
1761 base_high = CASE_HIGH (base_case)
1762 ? CASE_HIGH (base_case)
1763 : CASE_LOW (base_case);
1764 next_index = i + 1;
1765
1766 /* Try to merge case labels. Break out when we reach the end
1767 of the label vector or when we cannot merge the next case
1768 label with the current one. */
1769 while (next_index < old_size)
1770 {
1771 tree merge_case = gimple_switch_label (stmt, next_index);
1772 basic_block merge_bb = label_to_block (cfun, CASE_LABEL (merge_case));
1773 wide_int bhp1 = wi::to_wide (base_high) + 1;
1774
1775 /* Merge the cases if they jump to the same place,
1776 and their ranges are consecutive. */
1777 if (merge_bb == base_bb
1778 && (removed_labels == NULL
1779 || !removed_labels->contains (CASE_LABEL (merge_case)))
1780 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1781 {
1782 base_high
1783 = (CASE_HIGH (merge_case)
1784 ? CASE_HIGH (merge_case) : CASE_LOW (merge_case));
1785 CASE_HIGH (base_case) = base_high;
1786 next_index++;
1787 }
1788 else
1789 break;
1790 }
1791
1792 /* Discard cases that have an unreachable destination block. */
1793 if (EDGE_COUNT (base_bb->succs) == 0
1794 && gimple_seq_unreachable_p (bb_seq (base_bb))
1795 /* Don't optimize this if __builtin_unreachable () is the
1796 implicitly added one by the C++ FE too early, before
1797 -Wreturn-type can be diagnosed. We'll optimize it later
1798 during switchconv pass or any other cfg cleanup. */
1799 && (gimple_in_ssa_p (cfun)
1800 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1801 != BUILTINS_LOCATION)))
1802 {
1803 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1804 if (base_edge != NULL)
1805 {
1806 for (gimple_stmt_iterator gsi = gsi_start_bb (base_bb);
1807 !gsi_end_p (gsi); gsi_next (&gsi))
1808 if (glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi)))
1809 {
1810 if (FORCED_LABEL (gimple_label_label (stmt))
1811 || DECL_NONLOCAL (gimple_label_label (stmt)))
1812 {
1813 /* Forced/non-local labels aren't going to be removed,
1814 but they will be moved to some neighbouring basic
1815 block. If some later case label refers to one of
1816 those labels, we should throw that case away rather
1817 than keeping it around and refering to some random
1818 other basic block without an edge to it. */
1819 if (removed_labels == NULL)
1820 removed_labels = new hash_set<tree>;
1821 removed_labels->add (gimple_label_label (stmt));
1822 }
1823 }
1824 else
1825 break;
1826 remove_edge_and_dominated_blocks (base_edge);
1827 }
1828 i = next_index;
1829 continue;
1830 }
1831
1832 if (new_size < i)
1833 gimple_switch_set_label (stmt, new_size,
1834 gimple_switch_label (stmt, i));
1835 i = next_index;
1836 new_size++;
1837 }
1838
1839 gcc_assert (new_size <= old_size);
1840
1841 if (new_size < old_size)
1842 gimple_switch_set_num_labels (stmt, new_size);
1843
1844 delete removed_labels;
1845 return new_size < old_size;
1846 }
1847
1848 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1849 and scan the sorted vector of cases. Combine the ones jumping to the
1850 same label. */
1851
1852 bool
group_case_labels(void)1853 group_case_labels (void)
1854 {
1855 basic_block bb;
1856 bool changed = false;
1857
1858 FOR_EACH_BB_FN (bb, cfun)
1859 {
1860 gimple *stmt = last_stmt (bb);
1861 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1862 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1863 }
1864
1865 return changed;
1866 }
1867
1868 /* Checks whether we can merge block B into block A. */
1869
1870 static bool
gimple_can_merge_blocks_p(basic_block a,basic_block b)1871 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1872 {
1873 gimple *stmt;
1874
1875 if (!single_succ_p (a))
1876 return false;
1877
1878 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1879 return false;
1880
1881 if (single_succ (a) != b)
1882 return false;
1883
1884 if (!single_pred_p (b))
1885 return false;
1886
1887 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1888 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1889 return false;
1890
1891 /* If A ends by a statement causing exceptions or something similar, we
1892 cannot merge the blocks. */
1893 stmt = last_stmt (a);
1894 if (stmt && stmt_ends_bb_p (stmt))
1895 return false;
1896
1897 /* Do not allow a block with only a non-local label to be merged. */
1898 if (stmt)
1899 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1900 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1901 return false;
1902
1903 /* Examine the labels at the beginning of B. */
1904 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1905 gsi_next (&gsi))
1906 {
1907 tree lab;
1908 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1909 if (!label_stmt)
1910 break;
1911 lab = gimple_label_label (label_stmt);
1912
1913 /* Do not remove user forced labels or for -O0 any user labels. */
1914 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1915 return false;
1916 }
1917
1918 /* Protect simple loop latches. We only want to avoid merging
1919 the latch with the loop header or with a block in another
1920 loop in this case. */
1921 if (current_loops
1922 && b->loop_father->latch == b
1923 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1924 && (b->loop_father->header == a
1925 || b->loop_father != a->loop_father))
1926 return false;
1927
1928 /* It must be possible to eliminate all phi nodes in B. If ssa form
1929 is not up-to-date and a name-mapping is registered, we cannot eliminate
1930 any phis. Symbols marked for renaming are never a problem though. */
1931 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1932 gsi_next (&gsi))
1933 {
1934 gphi *phi = gsi.phi ();
1935 /* Technically only new names matter. */
1936 if (name_registered_for_update_p (PHI_RESULT (phi)))
1937 return false;
1938 }
1939
1940 /* When not optimizing, don't merge if we'd lose goto_locus. */
1941 if (!optimize
1942 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1943 {
1944 location_t goto_locus = single_succ_edge (a)->goto_locus;
1945 gimple_stmt_iterator prev, next;
1946 prev = gsi_last_nondebug_bb (a);
1947 next = gsi_after_labels (b);
1948 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1949 gsi_next_nondebug (&next);
1950 if ((gsi_end_p (prev)
1951 || gimple_location (gsi_stmt (prev)) != goto_locus)
1952 && (gsi_end_p (next)
1953 || gimple_location (gsi_stmt (next)) != goto_locus))
1954 return false;
1955 }
1956
1957 return true;
1958 }
1959
1960 /* Replaces all uses of NAME by VAL. */
1961
1962 void
replace_uses_by(tree name,tree val)1963 replace_uses_by (tree name, tree val)
1964 {
1965 imm_use_iterator imm_iter;
1966 use_operand_p use;
1967 gimple *stmt;
1968 edge e;
1969
1970 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1971 {
1972 /* Mark the block if we change the last stmt in it. */
1973 if (cfgcleanup_altered_bbs
1974 && stmt_ends_bb_p (stmt))
1975 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1976
1977 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1978 {
1979 replace_exp (use, val);
1980
1981 if (gimple_code (stmt) == GIMPLE_PHI)
1982 {
1983 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1984 PHI_ARG_INDEX_FROM_USE (use));
1985 if (e->flags & EDGE_ABNORMAL
1986 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1987 {
1988 /* This can only occur for virtual operands, since
1989 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1990 would prevent replacement. */
1991 gcc_checking_assert (virtual_operand_p (name));
1992 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1993 }
1994 }
1995 }
1996
1997 if (gimple_code (stmt) != GIMPLE_PHI)
1998 {
1999 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2000 gimple *orig_stmt = stmt;
2001 size_t i;
2002
2003 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
2004 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
2005 only change sth from non-invariant to invariant, and only
2006 when propagating constants. */
2007 if (is_gimple_min_invariant (val))
2008 for (i = 0; i < gimple_num_ops (stmt); i++)
2009 {
2010 tree op = gimple_op (stmt, i);
2011 /* Operands may be empty here. For example, the labels
2012 of a GIMPLE_COND are nulled out following the creation
2013 of the corresponding CFG edges. */
2014 if (op && TREE_CODE (op) == ADDR_EXPR)
2015 recompute_tree_invariant_for_addr_expr (op);
2016 }
2017
2018 if (fold_stmt (&gsi))
2019 stmt = gsi_stmt (gsi);
2020
2021 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2022 gimple_purge_dead_eh_edges (gimple_bb (stmt));
2023
2024 update_stmt (stmt);
2025 }
2026 }
2027
2028 gcc_checking_assert (has_zero_uses (name));
2029
2030 /* Also update the trees stored in loop structures. */
2031 if (current_loops)
2032 {
2033 class loop *loop;
2034
2035 FOR_EACH_LOOP (loop, 0)
2036 {
2037 substitute_in_loop_info (loop, name, val);
2038 }
2039 }
2040 }
2041
2042 /* Merge block B into block A. */
2043
2044 static void
gimple_merge_blocks(basic_block a,basic_block b)2045 gimple_merge_blocks (basic_block a, basic_block b)
2046 {
2047 gimple_stmt_iterator last, gsi;
2048 gphi_iterator psi;
2049
2050 if (dump_file)
2051 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2052
2053 /* Remove all single-valued PHI nodes from block B of the form
2054 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2055 gsi = gsi_last_bb (a);
2056 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2057 {
2058 gimple *phi = gsi_stmt (psi);
2059 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2060 gimple *copy;
2061 bool may_replace_uses = (virtual_operand_p (def)
2062 || may_propagate_copy (def, use));
2063
2064 /* In case we maintain loop closed ssa form, do not propagate arguments
2065 of loop exit phi nodes. */
2066 if (current_loops
2067 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2068 && !virtual_operand_p (def)
2069 && TREE_CODE (use) == SSA_NAME
2070 && a->loop_father != b->loop_father)
2071 may_replace_uses = false;
2072
2073 if (!may_replace_uses)
2074 {
2075 gcc_assert (!virtual_operand_p (def));
2076
2077 /* Note that just emitting the copies is fine -- there is no problem
2078 with ordering of phi nodes. This is because A is the single
2079 predecessor of B, therefore results of the phi nodes cannot
2080 appear as arguments of the phi nodes. */
2081 copy = gimple_build_assign (def, use);
2082 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2083 remove_phi_node (&psi, false);
2084 }
2085 else
2086 {
2087 /* If we deal with a PHI for virtual operands, we can simply
2088 propagate these without fussing with folding or updating
2089 the stmt. */
2090 if (virtual_operand_p (def))
2091 {
2092 imm_use_iterator iter;
2093 use_operand_p use_p;
2094 gimple *stmt;
2095
2096 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2097 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2098 SET_USE (use_p, use);
2099
2100 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2101 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2102 }
2103 else
2104 replace_uses_by (def, use);
2105
2106 remove_phi_node (&psi, true);
2107 }
2108 }
2109
2110 /* Ensure that B follows A. */
2111 move_block_after (b, a);
2112
2113 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2114 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2115
2116 /* Remove labels from B and set gimple_bb to A for other statements. */
2117 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2118 {
2119 gimple *stmt = gsi_stmt (gsi);
2120 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2121 {
2122 tree label = gimple_label_label (label_stmt);
2123 int lp_nr;
2124
2125 gsi_remove (&gsi, false);
2126
2127 /* Now that we can thread computed gotos, we might have
2128 a situation where we have a forced label in block B
2129 However, the label at the start of block B might still be
2130 used in other ways (think about the runtime checking for
2131 Fortran assigned gotos). So we cannot just delete the
2132 label. Instead we move the label to the start of block A. */
2133 if (FORCED_LABEL (label))
2134 {
2135 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2136 tree first_label = NULL_TREE;
2137 if (!gsi_end_p (dest_gsi))
2138 if (glabel *first_label_stmt
2139 = dyn_cast <glabel *> (gsi_stmt (dest_gsi)))
2140 first_label = gimple_label_label (first_label_stmt);
2141 if (first_label
2142 && (DECL_NONLOCAL (first_label)
2143 || EH_LANDING_PAD_NR (first_label) != 0))
2144 gsi_insert_after (&dest_gsi, stmt, GSI_NEW_STMT);
2145 else
2146 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2147 }
2148 /* Other user labels keep around in a form of a debug stmt. */
2149 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2150 {
2151 gimple *dbg = gimple_build_debug_bind (label,
2152 integer_zero_node,
2153 stmt);
2154 gimple_debug_bind_reset_value (dbg);
2155 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2156 }
2157
2158 lp_nr = EH_LANDING_PAD_NR (label);
2159 if (lp_nr)
2160 {
2161 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2162 lp->post_landing_pad = NULL;
2163 }
2164 }
2165 else
2166 {
2167 gimple_set_bb (stmt, a);
2168 gsi_next (&gsi);
2169 }
2170 }
2171
2172 /* When merging two BBs, if their counts are different, the larger count
2173 is selected as the new bb count. This is to handle inconsistent
2174 profiles. */
2175 if (a->loop_father == b->loop_father)
2176 {
2177 a->count = a->count.merge (b->count);
2178 }
2179
2180 /* Merge the sequences. */
2181 last = gsi_last_bb (a);
2182 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2183 set_bb_seq (b, NULL);
2184
2185 if (cfgcleanup_altered_bbs)
2186 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2187 }
2188
2189
2190 /* Return the one of two successors of BB that is not reachable by a
2191 complex edge, if there is one. Else, return BB. We use
2192 this in optimizations that use post-dominators for their heuristics,
2193 to catch the cases in C++ where function calls are involved. */
2194
2195 basic_block
single_noncomplex_succ(basic_block bb)2196 single_noncomplex_succ (basic_block bb)
2197 {
2198 edge e0, e1;
2199 if (EDGE_COUNT (bb->succs) != 2)
2200 return bb;
2201
2202 e0 = EDGE_SUCC (bb, 0);
2203 e1 = EDGE_SUCC (bb, 1);
2204 if (e0->flags & EDGE_COMPLEX)
2205 return e1->dest;
2206 if (e1->flags & EDGE_COMPLEX)
2207 return e0->dest;
2208
2209 return bb;
2210 }
2211
2212 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2213
2214 void
notice_special_calls(gcall * call)2215 notice_special_calls (gcall *call)
2216 {
2217 int flags = gimple_call_flags (call);
2218
2219 if (flags & ECF_MAY_BE_ALLOCA)
2220 cfun->calls_alloca = true;
2221 if (flags & ECF_RETURNS_TWICE)
2222 cfun->calls_setjmp = true;
2223 }
2224
2225
2226 /* Clear flags set by notice_special_calls. Used by dead code removal
2227 to update the flags. */
2228
2229 void
clear_special_calls(void)2230 clear_special_calls (void)
2231 {
2232 cfun->calls_alloca = false;
2233 cfun->calls_setjmp = false;
2234 }
2235
2236 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2237
2238 static void
remove_phi_nodes_and_edges_for_unreachable_block(basic_block bb)2239 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2240 {
2241 /* Since this block is no longer reachable, we can just delete all
2242 of its PHI nodes. */
2243 remove_phi_nodes (bb);
2244
2245 /* Remove edges to BB's successors. */
2246 while (EDGE_COUNT (bb->succs) > 0)
2247 remove_edge (EDGE_SUCC (bb, 0));
2248 }
2249
2250
2251 /* Remove statements of basic block BB. */
2252
2253 static void
remove_bb(basic_block bb)2254 remove_bb (basic_block bb)
2255 {
2256 gimple_stmt_iterator i;
2257
2258 if (dump_file)
2259 {
2260 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2261 if (dump_flags & TDF_DETAILS)
2262 {
2263 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2264 fprintf (dump_file, "\n");
2265 }
2266 }
2267
2268 if (current_loops)
2269 {
2270 class loop *loop = bb->loop_father;
2271
2272 /* If a loop gets removed, clean up the information associated
2273 with it. */
2274 if (loop->latch == bb
2275 || loop->header == bb)
2276 free_numbers_of_iterations_estimates (loop);
2277 }
2278
2279 /* Remove all the instructions in the block. */
2280 if (bb_seq (bb) != NULL)
2281 {
2282 /* Walk backwards so as to get a chance to substitute all
2283 released DEFs into debug stmts. See
2284 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2285 details. */
2286 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2287 {
2288 gimple *stmt = gsi_stmt (i);
2289 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2290 if (label_stmt
2291 && (FORCED_LABEL (gimple_label_label (label_stmt))
2292 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2293 {
2294 basic_block new_bb;
2295 gimple_stmt_iterator new_gsi;
2296
2297 /* A non-reachable non-local label may still be referenced.
2298 But it no longer needs to carry the extra semantics of
2299 non-locality. */
2300 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2301 {
2302 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2303 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2304 }
2305
2306 new_bb = bb->prev_bb;
2307 /* Don't move any labels into ENTRY block. */
2308 if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2309 {
2310 new_bb = single_succ (new_bb);
2311 gcc_assert (new_bb != bb);
2312 }
2313 new_gsi = gsi_after_labels (new_bb);
2314 gsi_remove (&i, false);
2315 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2316 }
2317 else
2318 {
2319 /* Release SSA definitions. */
2320 release_defs (stmt);
2321 gsi_remove (&i, true);
2322 }
2323
2324 if (gsi_end_p (i))
2325 i = gsi_last_bb (bb);
2326 else
2327 gsi_prev (&i);
2328 }
2329 }
2330
2331 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2332 bb->il.gimple.seq = NULL;
2333 bb->il.gimple.phi_nodes = NULL;
2334 }
2335
2336
2337 /* Given a basic block BB and a value VAL for use in the final statement
2338 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2339 the edge that will be taken out of the block.
2340 If VAL is NULL_TREE, then the current value of the final statement's
2341 predicate or index is used.
2342 If the value does not match a unique edge, NULL is returned. */
2343
2344 edge
find_taken_edge(basic_block bb,tree val)2345 find_taken_edge (basic_block bb, tree val)
2346 {
2347 gimple *stmt;
2348
2349 stmt = last_stmt (bb);
2350
2351 /* Handle ENTRY and EXIT. */
2352 if (!stmt)
2353 return NULL;
2354
2355 if (gimple_code (stmt) == GIMPLE_COND)
2356 return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2357
2358 if (gimple_code (stmt) == GIMPLE_SWITCH)
2359 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2360
2361 if (computed_goto_p (stmt))
2362 {
2363 /* Only optimize if the argument is a label, if the argument is
2364 not a label then we cannot construct a proper CFG.
2365
2366 It may be the case that we only need to allow the LABEL_REF to
2367 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2368 appear inside a LABEL_EXPR just to be safe. */
2369 if (val
2370 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2371 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2372 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2373 }
2374
2375 /* Otherwise we only know the taken successor edge if it's unique. */
2376 return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2377 }
2378
2379 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2380 statement, determine which of the outgoing edges will be taken out of the
2381 block. Return NULL if either edge may be taken. */
2382
2383 static edge
find_taken_edge_computed_goto(basic_block bb,tree val)2384 find_taken_edge_computed_goto (basic_block bb, tree val)
2385 {
2386 basic_block dest;
2387 edge e = NULL;
2388
2389 dest = label_to_block (cfun, val);
2390 if (dest)
2391 e = find_edge (bb, dest);
2392
2393 /* It's possible for find_edge to return NULL here on invalid code
2394 that abuses the labels-as-values extension (e.g. code that attempts to
2395 jump *between* functions via stored labels-as-values; PR 84136).
2396 If so, then we simply return that NULL for the edge.
2397 We don't currently have a way of detecting such invalid code, so we
2398 can't assert that it was the case when a NULL edge occurs here. */
2399
2400 return e;
2401 }
2402
2403 /* Given COND_STMT and a constant value VAL for use as the predicate,
2404 determine which of the two edges will be taken out of
2405 the statement's block. Return NULL if either edge may be taken.
2406 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2407 is used. */
2408
2409 static edge
find_taken_edge_cond_expr(const gcond * cond_stmt,tree val)2410 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2411 {
2412 edge true_edge, false_edge;
2413
2414 if (val == NULL_TREE)
2415 {
2416 /* Use the current value of the predicate. */
2417 if (gimple_cond_true_p (cond_stmt))
2418 val = integer_one_node;
2419 else if (gimple_cond_false_p (cond_stmt))
2420 val = integer_zero_node;
2421 else
2422 return NULL;
2423 }
2424 else if (TREE_CODE (val) != INTEGER_CST)
2425 return NULL;
2426
2427 extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2428 &true_edge, &false_edge);
2429
2430 return (integer_zerop (val) ? false_edge : true_edge);
2431 }
2432
2433 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2434 which edge will be taken out of the statement's block. Return NULL if any
2435 edge may be taken.
2436 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2437 is used. */
2438
2439 edge
find_taken_edge_switch_expr(const gswitch * switch_stmt,tree val)2440 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2441 {
2442 basic_block dest_bb;
2443 edge e;
2444 tree taken_case;
2445
2446 if (gimple_switch_num_labels (switch_stmt) == 1)
2447 taken_case = gimple_switch_default_label (switch_stmt);
2448 else
2449 {
2450 if (val == NULL_TREE)
2451 val = gimple_switch_index (switch_stmt);
2452 if (TREE_CODE (val) != INTEGER_CST)
2453 return NULL;
2454 else
2455 taken_case = find_case_label_for_value (switch_stmt, val);
2456 }
2457 dest_bb = label_to_block (cfun, CASE_LABEL (taken_case));
2458
2459 e = find_edge (gimple_bb (switch_stmt), dest_bb);
2460 gcc_assert (e);
2461 return e;
2462 }
2463
2464
2465 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2466 We can make optimal use here of the fact that the case labels are
2467 sorted: We can do a binary search for a case matching VAL. */
2468
2469 tree
find_case_label_for_value(const gswitch * switch_stmt,tree val)2470 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2471 {
2472 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2473 tree default_case = gimple_switch_default_label (switch_stmt);
2474
2475 for (low = 0, high = n; high - low > 1; )
2476 {
2477 size_t i = (high + low) / 2;
2478 tree t = gimple_switch_label (switch_stmt, i);
2479 int cmp;
2480
2481 /* Cache the result of comparing CASE_LOW and val. */
2482 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2483
2484 if (cmp > 0)
2485 high = i;
2486 else
2487 low = i;
2488
2489 if (CASE_HIGH (t) == NULL)
2490 {
2491 /* A singe-valued case label. */
2492 if (cmp == 0)
2493 return t;
2494 }
2495 else
2496 {
2497 /* A case range. We can only handle integer ranges. */
2498 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2499 return t;
2500 }
2501 }
2502
2503 return default_case;
2504 }
2505
2506
2507 /* Dump a basic block on stderr. */
2508
2509 void
gimple_debug_bb(basic_block bb)2510 gimple_debug_bb (basic_block bb)
2511 {
2512 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2513 }
2514
2515
2516 /* Dump basic block with index N on stderr. */
2517
2518 basic_block
gimple_debug_bb_n(int n)2519 gimple_debug_bb_n (int n)
2520 {
2521 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2522 return BASIC_BLOCK_FOR_FN (cfun, n);
2523 }
2524
2525
2526 /* Dump the CFG on stderr.
2527
2528 FLAGS are the same used by the tree dumping functions
2529 (see TDF_* in dumpfile.h). */
2530
2531 void
gimple_debug_cfg(dump_flags_t flags)2532 gimple_debug_cfg (dump_flags_t flags)
2533 {
2534 gimple_dump_cfg (stderr, flags);
2535 }
2536
2537
2538 /* Dump the program showing basic block boundaries on the given FILE.
2539
2540 FLAGS are the same used by the tree dumping functions (see TDF_* in
2541 tree.h). */
2542
2543 void
gimple_dump_cfg(FILE * file,dump_flags_t flags)2544 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2545 {
2546 if (flags & TDF_DETAILS)
2547 {
2548 dump_function_header (file, current_function_decl, flags);
2549 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2550 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2551 last_basic_block_for_fn (cfun));
2552
2553 brief_dump_cfg (file, flags);
2554 fprintf (file, "\n");
2555 }
2556
2557 if (flags & TDF_STATS)
2558 dump_cfg_stats (file);
2559
2560 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2561 }
2562
2563
2564 /* Dump CFG statistics on FILE. */
2565
2566 void
dump_cfg_stats(FILE * file)2567 dump_cfg_stats (FILE *file)
2568 {
2569 static long max_num_merged_labels = 0;
2570 unsigned long size, total = 0;
2571 long num_edges;
2572 basic_block bb;
2573 const char * const fmt_str = "%-30s%-13s%12s\n";
2574 const char * const fmt_str_1 = "%-30s%13d" PRsa (11) "\n";
2575 const char * const fmt_str_2 = "%-30s%13ld" PRsa (11) "\n";
2576 const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
2577 const char *funcname = current_function_name ();
2578
2579 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2580
2581 fprintf (file, "---------------------------------------------------------\n");
2582 fprintf (file, fmt_str, "", " Number of ", "Memory");
2583 fprintf (file, fmt_str, "", " instances ", "used ");
2584 fprintf (file, "---------------------------------------------------------\n");
2585
2586 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2587 total += size;
2588 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2589 SIZE_AMOUNT (size));
2590
2591 num_edges = 0;
2592 FOR_EACH_BB_FN (bb, cfun)
2593 num_edges += EDGE_COUNT (bb->succs);
2594 size = num_edges * sizeof (class edge_def);
2595 total += size;
2596 fprintf (file, fmt_str_2, "Edges", num_edges, SIZE_AMOUNT (size));
2597
2598 fprintf (file, "---------------------------------------------------------\n");
2599 fprintf (file, fmt_str_3, "Total memory used by CFG data",
2600 SIZE_AMOUNT (total));
2601 fprintf (file, "---------------------------------------------------------\n");
2602 fprintf (file, "\n");
2603
2604 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2605 max_num_merged_labels = cfg_stats.num_merged_labels;
2606
2607 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2608 cfg_stats.num_merged_labels, max_num_merged_labels);
2609
2610 fprintf (file, "\n");
2611 }
2612
2613
2614 /* Dump CFG statistics on stderr. Keep extern so that it's always
2615 linked in the final executable. */
2616
2617 DEBUG_FUNCTION void
debug_cfg_stats(void)2618 debug_cfg_stats (void)
2619 {
2620 dump_cfg_stats (stderr);
2621 }
2622
2623 /*---------------------------------------------------------------------------
2624 Miscellaneous helpers
2625 ---------------------------------------------------------------------------*/
2626
2627 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2628 flow. Transfers of control flow associated with EH are excluded. */
2629
2630 static bool
call_can_make_abnormal_goto(gimple * t)2631 call_can_make_abnormal_goto (gimple *t)
2632 {
2633 /* If the function has no non-local labels, then a call cannot make an
2634 abnormal transfer of control. */
2635 if (!cfun->has_nonlocal_label
2636 && !cfun->calls_setjmp)
2637 return false;
2638
2639 /* Likewise if the call has no side effects. */
2640 if (!gimple_has_side_effects (t))
2641 return false;
2642
2643 /* Likewise if the called function is leaf. */
2644 if (gimple_call_flags (t) & ECF_LEAF)
2645 return false;
2646
2647 return true;
2648 }
2649
2650
2651 /* Return true if T can make an abnormal transfer of control flow.
2652 Transfers of control flow associated with EH are excluded. */
2653
2654 bool
stmt_can_make_abnormal_goto(gimple * t)2655 stmt_can_make_abnormal_goto (gimple *t)
2656 {
2657 if (computed_goto_p (t))
2658 return true;
2659 if (is_gimple_call (t))
2660 return call_can_make_abnormal_goto (t);
2661 return false;
2662 }
2663
2664
2665 /* Return true if T represents a stmt that always transfers control. */
2666
2667 bool
is_ctrl_stmt(gimple * t)2668 is_ctrl_stmt (gimple *t)
2669 {
2670 switch (gimple_code (t))
2671 {
2672 case GIMPLE_COND:
2673 case GIMPLE_SWITCH:
2674 case GIMPLE_GOTO:
2675 case GIMPLE_RETURN:
2676 case GIMPLE_RESX:
2677 return true;
2678 default:
2679 return false;
2680 }
2681 }
2682
2683
2684 /* Return true if T is a statement that may alter the flow of control
2685 (e.g., a call to a non-returning function). */
2686
2687 bool
is_ctrl_altering_stmt(gimple * t)2688 is_ctrl_altering_stmt (gimple *t)
2689 {
2690 gcc_assert (t);
2691
2692 switch (gimple_code (t))
2693 {
2694 case GIMPLE_CALL:
2695 /* Per stmt call flag indicates whether the call could alter
2696 controlflow. */
2697 if (gimple_call_ctrl_altering_p (t))
2698 return true;
2699 break;
2700
2701 case GIMPLE_EH_DISPATCH:
2702 /* EH_DISPATCH branches to the individual catch handlers at
2703 this level of a try or allowed-exceptions region. It can
2704 fallthru to the next statement as well. */
2705 return true;
2706
2707 case GIMPLE_ASM:
2708 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2709 return true;
2710 break;
2711
2712 CASE_GIMPLE_OMP:
2713 /* OpenMP directives alter control flow. */
2714 return true;
2715
2716 case GIMPLE_TRANSACTION:
2717 /* A transaction start alters control flow. */
2718 return true;
2719
2720 default:
2721 break;
2722 }
2723
2724 /* If a statement can throw, it alters control flow. */
2725 return stmt_can_throw_internal (cfun, t);
2726 }
2727
2728
2729 /* Return true if T is a simple local goto. */
2730
2731 bool
simple_goto_p(gimple * t)2732 simple_goto_p (gimple *t)
2733 {
2734 return (gimple_code (t) == GIMPLE_GOTO
2735 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2736 }
2737
2738
2739 /* Return true if STMT should start a new basic block. PREV_STMT is
2740 the statement preceding STMT. It is used when STMT is a label or a
2741 case label. Labels should only start a new basic block if their
2742 previous statement wasn't a label. Otherwise, sequence of labels
2743 would generate unnecessary basic blocks that only contain a single
2744 label. */
2745
2746 static inline bool
stmt_starts_bb_p(gimple * stmt,gimple * prev_stmt)2747 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2748 {
2749 if (stmt == NULL)
2750 return false;
2751
2752 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2753 any nondebug stmts in the block. We don't want to start another
2754 block in this case: the debug stmt will already have started the
2755 one STMT would start if we weren't outputting debug stmts. */
2756 if (prev_stmt && is_gimple_debug (prev_stmt))
2757 return false;
2758
2759 /* Labels start a new basic block only if the preceding statement
2760 wasn't a label of the same type. This prevents the creation of
2761 consecutive blocks that have nothing but a single label. */
2762 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2763 {
2764 /* Nonlocal and computed GOTO targets always start a new block. */
2765 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2766 || FORCED_LABEL (gimple_label_label (label_stmt)))
2767 return true;
2768
2769 if (glabel *plabel = safe_dyn_cast <glabel *> (prev_stmt))
2770 {
2771 if (DECL_NONLOCAL (gimple_label_label (plabel))
2772 || !DECL_ARTIFICIAL (gimple_label_label (plabel)))
2773 return true;
2774
2775 cfg_stats.num_merged_labels++;
2776 return false;
2777 }
2778 else
2779 return true;
2780 }
2781 else if (gimple_code (stmt) == GIMPLE_CALL)
2782 {
2783 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2784 /* setjmp acts similar to a nonlocal GOTO target and thus should
2785 start a new block. */
2786 return true;
2787 if (gimple_call_internal_p (stmt, IFN_PHI)
2788 && prev_stmt
2789 && gimple_code (prev_stmt) != GIMPLE_LABEL
2790 && (gimple_code (prev_stmt) != GIMPLE_CALL
2791 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2792 /* PHI nodes start a new block unless preceeded by a label
2793 or another PHI. */
2794 return true;
2795 }
2796
2797 return false;
2798 }
2799
2800
2801 /* Return true if T should end a basic block. */
2802
2803 bool
stmt_ends_bb_p(gimple * t)2804 stmt_ends_bb_p (gimple *t)
2805 {
2806 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2807 }
2808
2809 /* Remove block annotations and other data structures. */
2810
2811 void
delete_tree_cfg_annotations(struct function * fn)2812 delete_tree_cfg_annotations (struct function *fn)
2813 {
2814 vec_free (label_to_block_map_for_fn (fn));
2815 }
2816
2817 /* Return the virtual phi in BB. */
2818
2819 gphi *
get_virtual_phi(basic_block bb)2820 get_virtual_phi (basic_block bb)
2821 {
2822 for (gphi_iterator gsi = gsi_start_phis (bb);
2823 !gsi_end_p (gsi);
2824 gsi_next (&gsi))
2825 {
2826 gphi *phi = gsi.phi ();
2827
2828 if (virtual_operand_p (PHI_RESULT (phi)))
2829 return phi;
2830 }
2831
2832 return NULL;
2833 }
2834
2835 /* Return the first statement in basic block BB. */
2836
2837 gimple *
first_stmt(basic_block bb)2838 first_stmt (basic_block bb)
2839 {
2840 gimple_stmt_iterator i = gsi_start_bb (bb);
2841 gimple *stmt = NULL;
2842
2843 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2844 {
2845 gsi_next (&i);
2846 stmt = NULL;
2847 }
2848 return stmt;
2849 }
2850
2851 /* Return the first non-label statement in basic block BB. */
2852
2853 static gimple *
first_non_label_stmt(basic_block bb)2854 first_non_label_stmt (basic_block bb)
2855 {
2856 gimple_stmt_iterator i = gsi_start_bb (bb);
2857 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2858 gsi_next (&i);
2859 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2860 }
2861
2862 /* Return the last statement in basic block BB. */
2863
2864 gimple *
last_stmt(basic_block bb)2865 last_stmt (basic_block bb)
2866 {
2867 gimple_stmt_iterator i = gsi_last_bb (bb);
2868 gimple *stmt = NULL;
2869
2870 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2871 {
2872 gsi_prev (&i);
2873 stmt = NULL;
2874 }
2875 return stmt;
2876 }
2877
2878 /* Return the last statement of an otherwise empty block. Return NULL
2879 if the block is totally empty, or if it contains more than one
2880 statement. */
2881
2882 gimple *
last_and_only_stmt(basic_block bb)2883 last_and_only_stmt (basic_block bb)
2884 {
2885 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2886 gimple *last, *prev;
2887
2888 if (gsi_end_p (i))
2889 return NULL;
2890
2891 last = gsi_stmt (i);
2892 gsi_prev_nondebug (&i);
2893 if (gsi_end_p (i))
2894 return last;
2895
2896 /* Empty statements should no longer appear in the instruction stream.
2897 Everything that might have appeared before should be deleted by
2898 remove_useless_stmts, and the optimizers should just gsi_remove
2899 instead of smashing with build_empty_stmt.
2900
2901 Thus the only thing that should appear here in a block containing
2902 one executable statement is a label. */
2903 prev = gsi_stmt (i);
2904 if (gimple_code (prev) == GIMPLE_LABEL)
2905 return last;
2906 else
2907 return NULL;
2908 }
2909
2910 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2911
2912 static void
reinstall_phi_args(edge new_edge,edge old_edge)2913 reinstall_phi_args (edge new_edge, edge old_edge)
2914 {
2915 edge_var_map *vm;
2916 int i;
2917 gphi_iterator phis;
2918
2919 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2920 if (!v)
2921 return;
2922
2923 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2924 v->iterate (i, &vm) && !gsi_end_p (phis);
2925 i++, gsi_next (&phis))
2926 {
2927 gphi *phi = phis.phi ();
2928 tree result = redirect_edge_var_map_result (vm);
2929 tree arg = redirect_edge_var_map_def (vm);
2930
2931 gcc_assert (result == gimple_phi_result (phi));
2932
2933 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2934 }
2935
2936 redirect_edge_var_map_clear (old_edge);
2937 }
2938
2939 /* Returns the basic block after which the new basic block created
2940 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2941 near its "logical" location. This is of most help to humans looking
2942 at debugging dumps. */
2943
2944 basic_block
split_edge_bb_loc(edge edge_in)2945 split_edge_bb_loc (edge edge_in)
2946 {
2947 basic_block dest = edge_in->dest;
2948 basic_block dest_prev = dest->prev_bb;
2949
2950 if (dest_prev)
2951 {
2952 edge e = find_edge (dest_prev, dest);
2953 if (e && !(e->flags & EDGE_COMPLEX))
2954 return edge_in->src;
2955 }
2956 return dest_prev;
2957 }
2958
2959 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2960 Abort on abnormal edges. */
2961
2962 static basic_block
gimple_split_edge(edge edge_in)2963 gimple_split_edge (edge edge_in)
2964 {
2965 basic_block new_bb, after_bb, dest;
2966 edge new_edge, e;
2967
2968 /* Abnormal edges cannot be split. */
2969 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2970
2971 dest = edge_in->dest;
2972
2973 after_bb = split_edge_bb_loc (edge_in);
2974
2975 new_bb = create_empty_bb (after_bb);
2976 new_bb->count = edge_in->count ();
2977
2978 e = redirect_edge_and_branch (edge_in, new_bb);
2979 gcc_assert (e == edge_in);
2980
2981 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
2982 reinstall_phi_args (new_edge, e);
2983
2984 return new_bb;
2985 }
2986
2987
2988 /* Verify properties of the address expression T whose base should be
2989 TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true. */
2990
2991 static bool
verify_address(tree t,bool verify_addressable)2992 verify_address (tree t, bool verify_addressable)
2993 {
2994 bool old_constant;
2995 bool old_side_effects;
2996 bool new_constant;
2997 bool new_side_effects;
2998
2999 old_constant = TREE_CONSTANT (t);
3000 old_side_effects = TREE_SIDE_EFFECTS (t);
3001
3002 recompute_tree_invariant_for_addr_expr (t);
3003 new_side_effects = TREE_SIDE_EFFECTS (t);
3004 new_constant = TREE_CONSTANT (t);
3005
3006 if (old_constant != new_constant)
3007 {
3008 error ("constant not recomputed when %<ADDR_EXPR%> changed");
3009 return true;
3010 }
3011 if (old_side_effects != new_side_effects)
3012 {
3013 error ("side effects not recomputed when %<ADDR_EXPR%> changed");
3014 return true;
3015 }
3016
3017 tree base = TREE_OPERAND (t, 0);
3018 while (handled_component_p (base))
3019 base = TREE_OPERAND (base, 0);
3020
3021 if (!(VAR_P (base)
3022 || TREE_CODE (base) == PARM_DECL
3023 || TREE_CODE (base) == RESULT_DECL))
3024 return false;
3025
3026 if (DECL_GIMPLE_REG_P (base))
3027 {
3028 error ("%<DECL_GIMPLE_REG_P%> set on a variable with address taken");
3029 return true;
3030 }
3031
3032 if (verify_addressable && !TREE_ADDRESSABLE (base))
3033 {
3034 error ("address taken but %<TREE_ADDRESSABLE%> bit not set");
3035 return true;
3036 }
3037
3038 return false;
3039 }
3040
3041
3042 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3043 Returns true if there is an error, otherwise false. */
3044
3045 static bool
verify_types_in_gimple_min_lval(tree expr)3046 verify_types_in_gimple_min_lval (tree expr)
3047 {
3048 tree op;
3049
3050 if (is_gimple_id (expr))
3051 return false;
3052
3053 if (TREE_CODE (expr) != TARGET_MEM_REF
3054 && TREE_CODE (expr) != MEM_REF)
3055 {
3056 error ("invalid expression for min lvalue");
3057 return true;
3058 }
3059
3060 /* TARGET_MEM_REFs are strange beasts. */
3061 if (TREE_CODE (expr) == TARGET_MEM_REF)
3062 return false;
3063
3064 op = TREE_OPERAND (expr, 0);
3065 if (!is_gimple_val (op))
3066 {
3067 error ("invalid operand in indirect reference");
3068 debug_generic_stmt (op);
3069 return true;
3070 }
3071 /* Memory references now generally can involve a value conversion. */
3072
3073 return false;
3074 }
3075
3076 /* Verify if EXPR is a valid GIMPLE reference expression. If
3077 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3078 if there is an error, otherwise false. */
3079
3080 static bool
verify_types_in_gimple_reference(tree expr,bool require_lvalue)3081 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3082 {
3083 const char *code_name = get_tree_code_name (TREE_CODE (expr));
3084
3085 if (TREE_CODE (expr) == REALPART_EXPR
3086 || TREE_CODE (expr) == IMAGPART_EXPR
3087 || TREE_CODE (expr) == BIT_FIELD_REF)
3088 {
3089 tree op = TREE_OPERAND (expr, 0);
3090 if (!is_gimple_reg_type (TREE_TYPE (expr)))
3091 {
3092 error ("non-scalar %qs", code_name);
3093 return true;
3094 }
3095
3096 if (TREE_CODE (expr) == BIT_FIELD_REF)
3097 {
3098 tree t1 = TREE_OPERAND (expr, 1);
3099 tree t2 = TREE_OPERAND (expr, 2);
3100 poly_uint64 size, bitpos;
3101 if (!poly_int_tree_p (t1, &size)
3102 || !poly_int_tree_p (t2, &bitpos)
3103 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3104 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3105 {
3106 error ("invalid position or size operand to %qs", code_name);
3107 return true;
3108 }
3109 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
3110 && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr)), size))
3111 {
3112 error ("integral result type precision does not match "
3113 "field size of %qs", code_name);
3114 return true;
3115 }
3116 else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3117 && TYPE_MODE (TREE_TYPE (expr)) != BLKmode
3118 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr))),
3119 size))
3120 {
3121 error ("mode size of non-integral result does not "
3122 "match field size of %qs",
3123 code_name);
3124 return true;
3125 }
3126 if (INTEGRAL_TYPE_P (TREE_TYPE (op))
3127 && !type_has_mode_precision_p (TREE_TYPE (op)))
3128 {
3129 error ("%qs of non-mode-precision operand", code_name);
3130 return true;
3131 }
3132 if (!AGGREGATE_TYPE_P (TREE_TYPE (op))
3133 && maybe_gt (size + bitpos,
3134 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op)))))
3135 {
3136 error ("position plus size exceeds size of referenced object in "
3137 "%qs", code_name);
3138 return true;
3139 }
3140 }
3141
3142 if ((TREE_CODE (expr) == REALPART_EXPR
3143 || TREE_CODE (expr) == IMAGPART_EXPR)
3144 && !useless_type_conversion_p (TREE_TYPE (expr),
3145 TREE_TYPE (TREE_TYPE (op))))
3146 {
3147 error ("type mismatch in %qs reference", code_name);
3148 debug_generic_stmt (TREE_TYPE (expr));
3149 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3150 return true;
3151 }
3152 expr = op;
3153 }
3154
3155 while (handled_component_p (expr))
3156 {
3157 code_name = get_tree_code_name (TREE_CODE (expr));
3158
3159 if (TREE_CODE (expr) == REALPART_EXPR
3160 || TREE_CODE (expr) == IMAGPART_EXPR
3161 || TREE_CODE (expr) == BIT_FIELD_REF)
3162 {
3163 error ("non-top-level %qs", code_name);
3164 return true;
3165 }
3166
3167 tree op = TREE_OPERAND (expr, 0);
3168
3169 if (TREE_CODE (expr) == ARRAY_REF
3170 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3171 {
3172 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3173 || (TREE_OPERAND (expr, 2)
3174 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3175 || (TREE_OPERAND (expr, 3)
3176 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3177 {
3178 error ("invalid operands to %qs", code_name);
3179 debug_generic_stmt (expr);
3180 return true;
3181 }
3182 }
3183
3184 /* Verify if the reference array element types are compatible. */
3185 if (TREE_CODE (expr) == ARRAY_REF
3186 && !useless_type_conversion_p (TREE_TYPE (expr),
3187 TREE_TYPE (TREE_TYPE (op))))
3188 {
3189 error ("type mismatch in %qs", code_name);
3190 debug_generic_stmt (TREE_TYPE (expr));
3191 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3192 return true;
3193 }
3194 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3195 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3196 TREE_TYPE (TREE_TYPE (op))))
3197 {
3198 error ("type mismatch in %qs", code_name);
3199 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3200 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3201 return true;
3202 }
3203
3204 if (TREE_CODE (expr) == COMPONENT_REF)
3205 {
3206 if (TREE_OPERAND (expr, 2)
3207 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3208 {
3209 error ("invalid %qs offset operator", code_name);
3210 return true;
3211 }
3212 if (!useless_type_conversion_p (TREE_TYPE (expr),
3213 TREE_TYPE (TREE_OPERAND (expr, 1))))
3214 {
3215 error ("type mismatch in %qs", code_name);
3216 debug_generic_stmt (TREE_TYPE (expr));
3217 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3218 return true;
3219 }
3220 }
3221
3222 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3223 {
3224 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3225 that their operand is not an SSA name or an invariant when
3226 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3227 bug). Otherwise there is nothing to verify, gross mismatches at
3228 most invoke undefined behavior. */
3229 if (require_lvalue
3230 && (TREE_CODE (op) == SSA_NAME
3231 || is_gimple_min_invariant (op)))
3232 {
3233 error ("conversion of %qs on the left hand side of %qs",
3234 get_tree_code_name (TREE_CODE (op)), code_name);
3235 debug_generic_stmt (expr);
3236 return true;
3237 }
3238 else if (TREE_CODE (op) == SSA_NAME
3239 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3240 {
3241 error ("conversion of register to a different size in %qs",
3242 code_name);
3243 debug_generic_stmt (expr);
3244 return true;
3245 }
3246 else if (!handled_component_p (op))
3247 return false;
3248 }
3249
3250 expr = op;
3251 }
3252
3253 code_name = get_tree_code_name (TREE_CODE (expr));
3254
3255 if (TREE_CODE (expr) == MEM_REF)
3256 {
3257 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))
3258 || (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
3259 && verify_address (TREE_OPERAND (expr, 0), false)))
3260 {
3261 error ("invalid address operand in %qs", code_name);
3262 debug_generic_stmt (expr);
3263 return true;
3264 }
3265 if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3266 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3267 {
3268 error ("invalid offset operand in %qs", code_name);
3269 debug_generic_stmt (expr);
3270 return true;
3271 }
3272 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3273 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3274 {
3275 error ("invalid clique in %qs", code_name);
3276 debug_generic_stmt (expr);
3277 return true;
3278 }
3279 }
3280 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3281 {
3282 if (!TMR_BASE (expr)
3283 || !is_gimple_mem_ref_addr (TMR_BASE (expr))
3284 || (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
3285 && verify_address (TMR_BASE (expr), false)))
3286 {
3287 error ("invalid address operand in %qs", code_name);
3288 return true;
3289 }
3290 if (!TMR_OFFSET (expr)
3291 || !poly_int_tree_p (TMR_OFFSET (expr))
3292 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3293 {
3294 error ("invalid offset operand in %qs", code_name);
3295 debug_generic_stmt (expr);
3296 return true;
3297 }
3298 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3299 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3300 {
3301 error ("invalid clique in %qs", code_name);
3302 debug_generic_stmt (expr);
3303 return true;
3304 }
3305 }
3306 else if (TREE_CODE (expr) == INDIRECT_REF)
3307 {
3308 error ("%qs in gimple IL", code_name);
3309 debug_generic_stmt (expr);
3310 return true;
3311 }
3312
3313 return ((require_lvalue || !is_gimple_min_invariant (expr))
3314 && verify_types_in_gimple_min_lval (expr));
3315 }
3316
3317 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3318 list of pointer-to types that is trivially convertible to DEST. */
3319
3320 static bool
one_pointer_to_useless_type_conversion_p(tree dest,tree src_obj)3321 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3322 {
3323 tree src;
3324
3325 if (!TYPE_POINTER_TO (src_obj))
3326 return true;
3327
3328 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3329 if (useless_type_conversion_p (dest, src))
3330 return true;
3331
3332 return false;
3333 }
3334
3335 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3336 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3337
3338 static bool
valid_fixed_convert_types_p(tree type1,tree type2)3339 valid_fixed_convert_types_p (tree type1, tree type2)
3340 {
3341 return (FIXED_POINT_TYPE_P (type1)
3342 && (INTEGRAL_TYPE_P (type2)
3343 || SCALAR_FLOAT_TYPE_P (type2)
3344 || FIXED_POINT_TYPE_P (type2)));
3345 }
3346
3347 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3348 is a problem, otherwise false. */
3349
3350 static bool
verify_gimple_call(gcall * stmt)3351 verify_gimple_call (gcall *stmt)
3352 {
3353 tree fn = gimple_call_fn (stmt);
3354 tree fntype, fndecl;
3355 unsigned i;
3356
3357 if (gimple_call_internal_p (stmt))
3358 {
3359 if (fn)
3360 {
3361 error ("gimple call has two targets");
3362 debug_generic_stmt (fn);
3363 return true;
3364 }
3365 }
3366 else
3367 {
3368 if (!fn)
3369 {
3370 error ("gimple call has no target");
3371 return true;
3372 }
3373 }
3374
3375 if (fn && !is_gimple_call_addr (fn))
3376 {
3377 error ("invalid function in gimple call");
3378 debug_generic_stmt (fn);
3379 return true;
3380 }
3381
3382 if (fn
3383 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3384 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3385 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3386 {
3387 error ("non-function in gimple call");
3388 return true;
3389 }
3390
3391 fndecl = gimple_call_fndecl (stmt);
3392 if (fndecl
3393 && TREE_CODE (fndecl) == FUNCTION_DECL
3394 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3395 && !DECL_PURE_P (fndecl)
3396 && !TREE_READONLY (fndecl))
3397 {
3398 error ("invalid pure const state for function");
3399 return true;
3400 }
3401
3402 tree lhs = gimple_call_lhs (stmt);
3403 if (lhs
3404 && (!is_gimple_lvalue (lhs)
3405 || verify_types_in_gimple_reference (lhs, true)))
3406 {
3407 error ("invalid LHS in gimple call");
3408 return true;
3409 }
3410
3411 if (gimple_call_ctrl_altering_p (stmt)
3412 && gimple_call_noreturn_p (stmt)
3413 && should_remove_lhs_p (lhs))
3414 {
3415 error ("LHS in %<noreturn%> call");
3416 return true;
3417 }
3418
3419 fntype = gimple_call_fntype (stmt);
3420 if (fntype
3421 && lhs
3422 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3423 /* ??? At least C++ misses conversions at assignments from
3424 void * call results.
3425 For now simply allow arbitrary pointer type conversions. */
3426 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3427 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3428 {
3429 error ("invalid conversion in gimple call");
3430 debug_generic_stmt (TREE_TYPE (lhs));
3431 debug_generic_stmt (TREE_TYPE (fntype));
3432 return true;
3433 }
3434
3435 if (gimple_call_chain (stmt)
3436 && !is_gimple_val (gimple_call_chain (stmt)))
3437 {
3438 error ("invalid static chain in gimple call");
3439 debug_generic_stmt (gimple_call_chain (stmt));
3440 return true;
3441 }
3442
3443 /* If there is a static chain argument, the call should either be
3444 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3445 if (gimple_call_chain (stmt)
3446 && fndecl
3447 && !DECL_STATIC_CHAIN (fndecl))
3448 {
3449 error ("static chain with function that doesn%'t use one");
3450 return true;
3451 }
3452
3453 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3454 {
3455 switch (DECL_FUNCTION_CODE (fndecl))
3456 {
3457 case BUILT_IN_UNREACHABLE:
3458 case BUILT_IN_TRAP:
3459 if (gimple_call_num_args (stmt) > 0)
3460 {
3461 /* Built-in unreachable with parameters might not be caught by
3462 undefined behavior sanitizer. Front-ends do check users do not
3463 call them that way but we also produce calls to
3464 __builtin_unreachable internally, for example when IPA figures
3465 out a call cannot happen in a legal program. In such cases,
3466 we must make sure arguments are stripped off. */
3467 error ("%<__builtin_unreachable%> or %<__builtin_trap%> call "
3468 "with arguments");
3469 return true;
3470 }
3471 break;
3472 default:
3473 break;
3474 }
3475 }
3476
3477 /* ??? The C frontend passes unpromoted arguments in case it
3478 didn't see a function declaration before the call. So for now
3479 leave the call arguments mostly unverified. Once we gimplify
3480 unit-at-a-time we have a chance to fix this. */
3481
3482 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3483 {
3484 tree arg = gimple_call_arg (stmt, i);
3485 if ((is_gimple_reg_type (TREE_TYPE (arg))
3486 && !is_gimple_val (arg))
3487 || (!is_gimple_reg_type (TREE_TYPE (arg))
3488 && !is_gimple_lvalue (arg)))
3489 {
3490 error ("invalid argument to gimple call");
3491 debug_generic_expr (arg);
3492 return true;
3493 }
3494 }
3495
3496 return false;
3497 }
3498
3499 /* Verifies the gimple comparison with the result type TYPE and
3500 the operands OP0 and OP1, comparison code is CODE. */
3501
3502 static bool
verify_gimple_comparison(tree type,tree op0,tree op1,enum tree_code code)3503 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3504 {
3505 tree op0_type = TREE_TYPE (op0);
3506 tree op1_type = TREE_TYPE (op1);
3507
3508 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3509 {
3510 error ("invalid operands in gimple comparison");
3511 return true;
3512 }
3513
3514 /* For comparisons we do not have the operations type as the
3515 effective type the comparison is carried out in. Instead
3516 we require that either the first operand is trivially
3517 convertible into the second, or the other way around.
3518 Because we special-case pointers to void we allow
3519 comparisons of pointers with the same mode as well. */
3520 if (!useless_type_conversion_p (op0_type, op1_type)
3521 && !useless_type_conversion_p (op1_type, op0_type)
3522 && (!POINTER_TYPE_P (op0_type)
3523 || !POINTER_TYPE_P (op1_type)
3524 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3525 {
3526 error ("mismatching comparison operand types");
3527 debug_generic_expr (op0_type);
3528 debug_generic_expr (op1_type);
3529 return true;
3530 }
3531
3532 /* The resulting type of a comparison may be an effective boolean type. */
3533 if (INTEGRAL_TYPE_P (type)
3534 && (TREE_CODE (type) == BOOLEAN_TYPE
3535 || TYPE_PRECISION (type) == 1))
3536 {
3537 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3538 || TREE_CODE (op1_type) == VECTOR_TYPE)
3539 && code != EQ_EXPR && code != NE_EXPR
3540 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3541 && !VECTOR_INTEGER_TYPE_P (op0_type))
3542 {
3543 error ("unsupported operation or type for vector comparison"
3544 " returning a boolean");
3545 debug_generic_expr (op0_type);
3546 debug_generic_expr (op1_type);
3547 return true;
3548 }
3549 }
3550 /* Or a boolean vector type with the same element count
3551 as the comparison operand types. */
3552 else if (TREE_CODE (type) == VECTOR_TYPE
3553 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3554 {
3555 if (TREE_CODE (op0_type) != VECTOR_TYPE
3556 || TREE_CODE (op1_type) != VECTOR_TYPE)
3557 {
3558 error ("non-vector operands in vector comparison");
3559 debug_generic_expr (op0_type);
3560 debug_generic_expr (op1_type);
3561 return true;
3562 }
3563
3564 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3565 TYPE_VECTOR_SUBPARTS (op0_type)))
3566 {
3567 error ("invalid vector comparison resulting type");
3568 debug_generic_expr (type);
3569 return true;
3570 }
3571 }
3572 else
3573 {
3574 error ("bogus comparison result type");
3575 debug_generic_expr (type);
3576 return true;
3577 }
3578
3579 return false;
3580 }
3581
3582 /* Verify a gimple assignment statement STMT with an unary rhs.
3583 Returns true if anything is wrong. */
3584
3585 static bool
verify_gimple_assign_unary(gassign * stmt)3586 verify_gimple_assign_unary (gassign *stmt)
3587 {
3588 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3589 tree lhs = gimple_assign_lhs (stmt);
3590 tree lhs_type = TREE_TYPE (lhs);
3591 tree rhs1 = gimple_assign_rhs1 (stmt);
3592 tree rhs1_type = TREE_TYPE (rhs1);
3593
3594 if (!is_gimple_reg (lhs))
3595 {
3596 error ("non-register as LHS of unary operation");
3597 return true;
3598 }
3599
3600 if (!is_gimple_val (rhs1))
3601 {
3602 error ("invalid operand in unary operation");
3603 return true;
3604 }
3605
3606 const char* const code_name = get_tree_code_name (rhs_code);
3607
3608 /* First handle conversions. */
3609 switch (rhs_code)
3610 {
3611 CASE_CONVERT:
3612 {
3613 /* Allow conversions between vectors with the same number of elements,
3614 provided that the conversion is OK for the element types too. */
3615 if (VECTOR_TYPE_P (lhs_type)
3616 && VECTOR_TYPE_P (rhs1_type)
3617 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
3618 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3619 {
3620 lhs_type = TREE_TYPE (lhs_type);
3621 rhs1_type = TREE_TYPE (rhs1_type);
3622 }
3623 else if (VECTOR_TYPE_P (lhs_type) || VECTOR_TYPE_P (rhs1_type))
3624 {
3625 error ("invalid vector types in nop conversion");
3626 debug_generic_expr (lhs_type);
3627 debug_generic_expr (rhs1_type);
3628 return true;
3629 }
3630
3631 /* Allow conversions from pointer type to integral type only if
3632 there is no sign or zero extension involved.
3633 For targets were the precision of ptrofftype doesn't match that
3634 of pointers we need to allow arbitrary conversions to ptrofftype. */
3635 if ((POINTER_TYPE_P (lhs_type)
3636 && INTEGRAL_TYPE_P (rhs1_type))
3637 || (POINTER_TYPE_P (rhs1_type)
3638 && INTEGRAL_TYPE_P (lhs_type)
3639 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3640 || ptrofftype_p (lhs_type))))
3641 return false;
3642
3643 /* Allow conversion from integral to offset type and vice versa. */
3644 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3645 && INTEGRAL_TYPE_P (rhs1_type))
3646 || (INTEGRAL_TYPE_P (lhs_type)
3647 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3648 return false;
3649
3650 /* Otherwise assert we are converting between types of the
3651 same kind. */
3652 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3653 {
3654 error ("invalid types in nop conversion");
3655 debug_generic_expr (lhs_type);
3656 debug_generic_expr (rhs1_type);
3657 return true;
3658 }
3659
3660 return false;
3661 }
3662
3663 case ADDR_SPACE_CONVERT_EXPR:
3664 {
3665 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3666 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3667 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3668 {
3669 error ("invalid types in address space conversion");
3670 debug_generic_expr (lhs_type);
3671 debug_generic_expr (rhs1_type);
3672 return true;
3673 }
3674
3675 return false;
3676 }
3677
3678 case FIXED_CONVERT_EXPR:
3679 {
3680 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3681 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3682 {
3683 error ("invalid types in fixed-point conversion");
3684 debug_generic_expr (lhs_type);
3685 debug_generic_expr (rhs1_type);
3686 return true;
3687 }
3688
3689 return false;
3690 }
3691
3692 case FLOAT_EXPR:
3693 {
3694 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3695 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3696 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3697 {
3698 error ("invalid types in conversion to floating-point");
3699 debug_generic_expr (lhs_type);
3700 debug_generic_expr (rhs1_type);
3701 return true;
3702 }
3703
3704 return false;
3705 }
3706
3707 case FIX_TRUNC_EXPR:
3708 {
3709 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3710 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3711 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3712 {
3713 error ("invalid types in conversion to integer");
3714 debug_generic_expr (lhs_type);
3715 debug_generic_expr (rhs1_type);
3716 return true;
3717 }
3718
3719 return false;
3720 }
3721
3722 case VEC_UNPACK_HI_EXPR:
3723 case VEC_UNPACK_LO_EXPR:
3724 case VEC_UNPACK_FLOAT_HI_EXPR:
3725 case VEC_UNPACK_FLOAT_LO_EXPR:
3726 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3727 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3728 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3729 || TREE_CODE (lhs_type) != VECTOR_TYPE
3730 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3731 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type)))
3732 || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3733 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3734 || ((rhs_code == VEC_UNPACK_HI_EXPR
3735 || rhs_code == VEC_UNPACK_LO_EXPR)
3736 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3737 != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3738 || ((rhs_code == VEC_UNPACK_FLOAT_HI_EXPR
3739 || rhs_code == VEC_UNPACK_FLOAT_LO_EXPR)
3740 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3741 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))))
3742 || ((rhs_code == VEC_UNPACK_FIX_TRUNC_HI_EXPR
3743 || rhs_code == VEC_UNPACK_FIX_TRUNC_LO_EXPR)
3744 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3745 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))))
3746 || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3747 2 * GET_MODE_SIZE (element_mode (rhs1_type)))
3748 && (!VECTOR_BOOLEAN_TYPE_P (lhs_type)
3749 || !VECTOR_BOOLEAN_TYPE_P (rhs1_type)))
3750 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type),
3751 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3752 {
3753 error ("type mismatch in %qs expression", code_name);
3754 debug_generic_expr (lhs_type);
3755 debug_generic_expr (rhs1_type);
3756 return true;
3757 }
3758
3759 return false;
3760
3761 case NEGATE_EXPR:
3762 case ABS_EXPR:
3763 case BIT_NOT_EXPR:
3764 case PAREN_EXPR:
3765 case CONJ_EXPR:
3766 break;
3767
3768 case ABSU_EXPR:
3769 if (!ANY_INTEGRAL_TYPE_P (lhs_type)
3770 || !TYPE_UNSIGNED (lhs_type)
3771 || !ANY_INTEGRAL_TYPE_P (rhs1_type)
3772 || TYPE_UNSIGNED (rhs1_type)
3773 || element_precision (lhs_type) != element_precision (rhs1_type))
3774 {
3775 error ("invalid types for %qs", code_name);
3776 debug_generic_expr (lhs_type);
3777 debug_generic_expr (rhs1_type);
3778 return true;
3779 }
3780 return false;
3781
3782 case VEC_DUPLICATE_EXPR:
3783 if (TREE_CODE (lhs_type) != VECTOR_TYPE
3784 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3785 {
3786 error ("%qs should be from a scalar to a like vector", code_name);
3787 debug_generic_expr (lhs_type);
3788 debug_generic_expr (rhs1_type);
3789 return true;
3790 }
3791 return false;
3792
3793 default:
3794 gcc_unreachable ();
3795 }
3796
3797 /* For the remaining codes assert there is no conversion involved. */
3798 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3799 {
3800 error ("non-trivial conversion in unary operation");
3801 debug_generic_expr (lhs_type);
3802 debug_generic_expr (rhs1_type);
3803 return true;
3804 }
3805
3806 return false;
3807 }
3808
3809 /* Verify a gimple assignment statement STMT with a binary rhs.
3810 Returns true if anything is wrong. */
3811
3812 static bool
verify_gimple_assign_binary(gassign * stmt)3813 verify_gimple_assign_binary (gassign *stmt)
3814 {
3815 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3816 tree lhs = gimple_assign_lhs (stmt);
3817 tree lhs_type = TREE_TYPE (lhs);
3818 tree rhs1 = gimple_assign_rhs1 (stmt);
3819 tree rhs1_type = TREE_TYPE (rhs1);
3820 tree rhs2 = gimple_assign_rhs2 (stmt);
3821 tree rhs2_type = TREE_TYPE (rhs2);
3822
3823 if (!is_gimple_reg (lhs))
3824 {
3825 error ("non-register as LHS of binary operation");
3826 return true;
3827 }
3828
3829 if (!is_gimple_val (rhs1)
3830 || !is_gimple_val (rhs2))
3831 {
3832 error ("invalid operands in binary operation");
3833 return true;
3834 }
3835
3836 const char* const code_name = get_tree_code_name (rhs_code);
3837
3838 /* First handle operations that involve different types. */
3839 switch (rhs_code)
3840 {
3841 case COMPLEX_EXPR:
3842 {
3843 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3844 || !(INTEGRAL_TYPE_P (rhs1_type)
3845 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3846 || !(INTEGRAL_TYPE_P (rhs2_type)
3847 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3848 {
3849 error ("type mismatch in %qs", code_name);
3850 debug_generic_expr (lhs_type);
3851 debug_generic_expr (rhs1_type);
3852 debug_generic_expr (rhs2_type);
3853 return true;
3854 }
3855
3856 return false;
3857 }
3858
3859 case LSHIFT_EXPR:
3860 case RSHIFT_EXPR:
3861 case LROTATE_EXPR:
3862 case RROTATE_EXPR:
3863 {
3864 /* Shifts and rotates are ok on integral types, fixed point
3865 types and integer vector types. */
3866 if ((!INTEGRAL_TYPE_P (rhs1_type)
3867 && !FIXED_POINT_TYPE_P (rhs1_type)
3868 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3869 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3870 || (!INTEGRAL_TYPE_P (rhs2_type)
3871 /* Vector shifts of vectors are also ok. */
3872 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3873 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3874 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3875 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3876 || !useless_type_conversion_p (lhs_type, rhs1_type))
3877 {
3878 error ("type mismatch in %qs", code_name);
3879 debug_generic_expr (lhs_type);
3880 debug_generic_expr (rhs1_type);
3881 debug_generic_expr (rhs2_type);
3882 return true;
3883 }
3884
3885 return false;
3886 }
3887
3888 case WIDEN_LSHIFT_EXPR:
3889 {
3890 if (!INTEGRAL_TYPE_P (lhs_type)
3891 || !INTEGRAL_TYPE_P (rhs1_type)
3892 || TREE_CODE (rhs2) != INTEGER_CST
3893 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3894 {
3895 error ("type mismatch in %qs", code_name);
3896 debug_generic_expr (lhs_type);
3897 debug_generic_expr (rhs1_type);
3898 debug_generic_expr (rhs2_type);
3899 return true;
3900 }
3901
3902 return false;
3903 }
3904
3905 case VEC_WIDEN_LSHIFT_HI_EXPR:
3906 case VEC_WIDEN_LSHIFT_LO_EXPR:
3907 {
3908 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3909 || TREE_CODE (lhs_type) != VECTOR_TYPE
3910 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3911 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3912 || TREE_CODE (rhs2) != INTEGER_CST
3913 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3914 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3915 {
3916 error ("type mismatch in %qs", code_name);
3917 debug_generic_expr (lhs_type);
3918 debug_generic_expr (rhs1_type);
3919 debug_generic_expr (rhs2_type);
3920 return true;
3921 }
3922
3923 return false;
3924 }
3925
3926 case PLUS_EXPR:
3927 case MINUS_EXPR:
3928 {
3929 tree lhs_etype = lhs_type;
3930 tree rhs1_etype = rhs1_type;
3931 tree rhs2_etype = rhs2_type;
3932 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3933 {
3934 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3935 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3936 {
3937 error ("invalid non-vector operands to %qs", code_name);
3938 return true;
3939 }
3940 lhs_etype = TREE_TYPE (lhs_type);
3941 rhs1_etype = TREE_TYPE (rhs1_type);
3942 rhs2_etype = TREE_TYPE (rhs2_type);
3943 }
3944 if (POINTER_TYPE_P (lhs_etype)
3945 || POINTER_TYPE_P (rhs1_etype)
3946 || POINTER_TYPE_P (rhs2_etype))
3947 {
3948 error ("invalid (pointer) operands %qs", code_name);
3949 return true;
3950 }
3951
3952 /* Continue with generic binary expression handling. */
3953 break;
3954 }
3955
3956 case POINTER_PLUS_EXPR:
3957 {
3958 if (!POINTER_TYPE_P (rhs1_type)
3959 || !useless_type_conversion_p (lhs_type, rhs1_type)
3960 || !ptrofftype_p (rhs2_type))
3961 {
3962 error ("type mismatch in %qs", code_name);
3963 debug_generic_stmt (lhs_type);
3964 debug_generic_stmt (rhs1_type);
3965 debug_generic_stmt (rhs2_type);
3966 return true;
3967 }
3968
3969 return false;
3970 }
3971
3972 case POINTER_DIFF_EXPR:
3973 {
3974 if (!POINTER_TYPE_P (rhs1_type)
3975 || !POINTER_TYPE_P (rhs2_type)
3976 /* Because we special-case pointers to void we allow difference
3977 of arbitrary pointers with the same mode. */
3978 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
3979 || !INTEGRAL_TYPE_P (lhs_type)
3980 || TYPE_UNSIGNED (lhs_type)
3981 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
3982 {
3983 error ("type mismatch in %qs", code_name);
3984 debug_generic_stmt (lhs_type);
3985 debug_generic_stmt (rhs1_type);
3986 debug_generic_stmt (rhs2_type);
3987 return true;
3988 }
3989
3990 return false;
3991 }
3992
3993 case TRUTH_ANDIF_EXPR:
3994 case TRUTH_ORIF_EXPR:
3995 case TRUTH_AND_EXPR:
3996 case TRUTH_OR_EXPR:
3997 case TRUTH_XOR_EXPR:
3998
3999 gcc_unreachable ();
4000
4001 case LT_EXPR:
4002 case LE_EXPR:
4003 case GT_EXPR:
4004 case GE_EXPR:
4005 case EQ_EXPR:
4006 case NE_EXPR:
4007 case UNORDERED_EXPR:
4008 case ORDERED_EXPR:
4009 case UNLT_EXPR:
4010 case UNLE_EXPR:
4011 case UNGT_EXPR:
4012 case UNGE_EXPR:
4013 case UNEQ_EXPR:
4014 case LTGT_EXPR:
4015 /* Comparisons are also binary, but the result type is not
4016 connected to the operand types. */
4017 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
4018
4019 case WIDEN_MULT_EXPR:
4020 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
4021 return true;
4022 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
4023 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
4024
4025 case WIDEN_SUM_EXPR:
4026 {
4027 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4028 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4029 && ((!INTEGRAL_TYPE_P (rhs1_type)
4030 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4031 || (!INTEGRAL_TYPE_P (lhs_type)
4032 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4033 || !useless_type_conversion_p (lhs_type, rhs2_type)
4034 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
4035 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4036 {
4037 error ("type mismatch in %qs", code_name);
4038 debug_generic_expr (lhs_type);
4039 debug_generic_expr (rhs1_type);
4040 debug_generic_expr (rhs2_type);
4041 return true;
4042 }
4043 return false;
4044 }
4045
4046 case VEC_WIDEN_MULT_HI_EXPR:
4047 case VEC_WIDEN_MULT_LO_EXPR:
4048 case VEC_WIDEN_MULT_EVEN_EXPR:
4049 case VEC_WIDEN_MULT_ODD_EXPR:
4050 {
4051 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4052 || TREE_CODE (lhs_type) != VECTOR_TYPE
4053 || !types_compatible_p (rhs1_type, rhs2_type)
4054 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
4055 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4056 {
4057 error ("type mismatch in %qs", code_name);
4058 debug_generic_expr (lhs_type);
4059 debug_generic_expr (rhs1_type);
4060 debug_generic_expr (rhs2_type);
4061 return true;
4062 }
4063 return false;
4064 }
4065
4066 case VEC_PACK_TRUNC_EXPR:
4067 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4068 vector boolean types. */
4069 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4070 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4071 && types_compatible_p (rhs1_type, rhs2_type)
4072 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
4073 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4074 return false;
4075
4076 /* Fallthru. */
4077 case VEC_PACK_SAT_EXPR:
4078 case VEC_PACK_FIX_TRUNC_EXPR:
4079 {
4080 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4081 || TREE_CODE (lhs_type) != VECTOR_TYPE
4082 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4083 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4084 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4085 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4086 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4087 || !types_compatible_p (rhs1_type, rhs2_type)
4088 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4089 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4090 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4091 TYPE_VECTOR_SUBPARTS (lhs_type)))
4092 {
4093 error ("type mismatch in %qs", code_name);
4094 debug_generic_expr (lhs_type);
4095 debug_generic_expr (rhs1_type);
4096 debug_generic_expr (rhs2_type);
4097 return true;
4098 }
4099
4100 return false;
4101 }
4102
4103 case VEC_PACK_FLOAT_EXPR:
4104 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4105 || TREE_CODE (lhs_type) != VECTOR_TYPE
4106 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4107 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))
4108 || !types_compatible_p (rhs1_type, rhs2_type)
4109 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4110 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4111 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4112 TYPE_VECTOR_SUBPARTS (lhs_type)))
4113 {
4114 error ("type mismatch in %qs", code_name);
4115 debug_generic_expr (lhs_type);
4116 debug_generic_expr (rhs1_type);
4117 debug_generic_expr (rhs2_type);
4118 return true;
4119 }
4120
4121 return false;
4122
4123 case MULT_EXPR:
4124 case MULT_HIGHPART_EXPR:
4125 case TRUNC_DIV_EXPR:
4126 case CEIL_DIV_EXPR:
4127 case FLOOR_DIV_EXPR:
4128 case ROUND_DIV_EXPR:
4129 case TRUNC_MOD_EXPR:
4130 case CEIL_MOD_EXPR:
4131 case FLOOR_MOD_EXPR:
4132 case ROUND_MOD_EXPR:
4133 case RDIV_EXPR:
4134 case EXACT_DIV_EXPR:
4135 case MIN_EXPR:
4136 case MAX_EXPR:
4137 case BIT_IOR_EXPR:
4138 case BIT_XOR_EXPR:
4139 case BIT_AND_EXPR:
4140 /* Continue with generic binary expression handling. */
4141 break;
4142
4143 case VEC_SERIES_EXPR:
4144 if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4145 {
4146 error ("type mismatch in %qs", code_name);
4147 debug_generic_expr (rhs1_type);
4148 debug_generic_expr (rhs2_type);
4149 return true;
4150 }
4151 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4152 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4153 {
4154 error ("vector type expected in %qs", code_name);
4155 debug_generic_expr (lhs_type);
4156 return true;
4157 }
4158 return false;
4159
4160 default:
4161 gcc_unreachable ();
4162 }
4163
4164 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4165 || !useless_type_conversion_p (lhs_type, rhs2_type))
4166 {
4167 error ("type mismatch in binary expression");
4168 debug_generic_stmt (lhs_type);
4169 debug_generic_stmt (rhs1_type);
4170 debug_generic_stmt (rhs2_type);
4171 return true;
4172 }
4173
4174 return false;
4175 }
4176
4177 /* Verify a gimple assignment statement STMT with a ternary rhs.
4178 Returns true if anything is wrong. */
4179
4180 static bool
verify_gimple_assign_ternary(gassign * stmt)4181 verify_gimple_assign_ternary (gassign *stmt)
4182 {
4183 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4184 tree lhs = gimple_assign_lhs (stmt);
4185 tree lhs_type = TREE_TYPE (lhs);
4186 tree rhs1 = gimple_assign_rhs1 (stmt);
4187 tree rhs1_type = TREE_TYPE (rhs1);
4188 tree rhs2 = gimple_assign_rhs2 (stmt);
4189 tree rhs2_type = TREE_TYPE (rhs2);
4190 tree rhs3 = gimple_assign_rhs3 (stmt);
4191 tree rhs3_type = TREE_TYPE (rhs3);
4192
4193 if (!is_gimple_reg (lhs))
4194 {
4195 error ("non-register as LHS of ternary operation");
4196 return true;
4197 }
4198
4199 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
4200 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
4201 || !is_gimple_val (rhs2)
4202 || !is_gimple_val (rhs3))
4203 {
4204 error ("invalid operands in ternary operation");
4205 return true;
4206 }
4207
4208 const char* const code_name = get_tree_code_name (rhs_code);
4209
4210 /* First handle operations that involve different types. */
4211 switch (rhs_code)
4212 {
4213 case WIDEN_MULT_PLUS_EXPR:
4214 case WIDEN_MULT_MINUS_EXPR:
4215 if ((!INTEGRAL_TYPE_P (rhs1_type)
4216 && !FIXED_POINT_TYPE_P (rhs1_type))
4217 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4218 || !useless_type_conversion_p (lhs_type, rhs3_type)
4219 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4220 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4221 {
4222 error ("type mismatch in %qs", code_name);
4223 debug_generic_expr (lhs_type);
4224 debug_generic_expr (rhs1_type);
4225 debug_generic_expr (rhs2_type);
4226 debug_generic_expr (rhs3_type);
4227 return true;
4228 }
4229 break;
4230
4231 case VEC_COND_EXPR:
4232 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4233 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4234 TYPE_VECTOR_SUBPARTS (lhs_type)))
4235 {
4236 error ("the first argument of a %qs must be of a "
4237 "boolean vector type of the same number of elements "
4238 "as the result", code_name);
4239 debug_generic_expr (lhs_type);
4240 debug_generic_expr (rhs1_type);
4241 return true;
4242 }
4243 /* Fallthrough. */
4244 case COND_EXPR:
4245 if (!is_gimple_val (rhs1)
4246 && (!is_gimple_condexpr (rhs1)
4247 || verify_gimple_comparison (TREE_TYPE (rhs1),
4248 TREE_OPERAND (rhs1, 0),
4249 TREE_OPERAND (rhs1, 1),
4250 TREE_CODE (rhs1))))
4251 return true;
4252 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4253 || !useless_type_conversion_p (lhs_type, rhs3_type))
4254 {
4255 error ("type mismatch in %qs", code_name);
4256 debug_generic_expr (lhs_type);
4257 debug_generic_expr (rhs2_type);
4258 debug_generic_expr (rhs3_type);
4259 return true;
4260 }
4261 break;
4262
4263 case VEC_PERM_EXPR:
4264 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4265 || !useless_type_conversion_p (lhs_type, rhs2_type))
4266 {
4267 error ("type mismatch in %qs", code_name);
4268 debug_generic_expr (lhs_type);
4269 debug_generic_expr (rhs1_type);
4270 debug_generic_expr (rhs2_type);
4271 debug_generic_expr (rhs3_type);
4272 return true;
4273 }
4274
4275 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4276 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4277 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4278 {
4279 error ("vector types expected in %qs", code_name);
4280 debug_generic_expr (lhs_type);
4281 debug_generic_expr (rhs1_type);
4282 debug_generic_expr (rhs2_type);
4283 debug_generic_expr (rhs3_type);
4284 return true;
4285 }
4286
4287 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4288 TYPE_VECTOR_SUBPARTS (rhs2_type))
4289 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4290 TYPE_VECTOR_SUBPARTS (rhs3_type))
4291 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4292 TYPE_VECTOR_SUBPARTS (lhs_type)))
4293 {
4294 error ("vectors with different element number found in %qs",
4295 code_name);
4296 debug_generic_expr (lhs_type);
4297 debug_generic_expr (rhs1_type);
4298 debug_generic_expr (rhs2_type);
4299 debug_generic_expr (rhs3_type);
4300 return true;
4301 }
4302
4303 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4304 || (TREE_CODE (rhs3) != VECTOR_CST
4305 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4306 (TREE_TYPE (rhs3_type)))
4307 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4308 (TREE_TYPE (rhs1_type))))))
4309 {
4310 error ("invalid mask type in %qs", code_name);
4311 debug_generic_expr (lhs_type);
4312 debug_generic_expr (rhs1_type);
4313 debug_generic_expr (rhs2_type);
4314 debug_generic_expr (rhs3_type);
4315 return true;
4316 }
4317
4318 return false;
4319
4320 case SAD_EXPR:
4321 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4322 || !useless_type_conversion_p (lhs_type, rhs3_type)
4323 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4324 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4325 {
4326 error ("type mismatch in %qs", code_name);
4327 debug_generic_expr (lhs_type);
4328 debug_generic_expr (rhs1_type);
4329 debug_generic_expr (rhs2_type);
4330 debug_generic_expr (rhs3_type);
4331 return true;
4332 }
4333
4334 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4335 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4336 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4337 {
4338 error ("vector types expected in %qs", code_name);
4339 debug_generic_expr (lhs_type);
4340 debug_generic_expr (rhs1_type);
4341 debug_generic_expr (rhs2_type);
4342 debug_generic_expr (rhs3_type);
4343 return true;
4344 }
4345
4346 return false;
4347
4348 case BIT_INSERT_EXPR:
4349 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4350 {
4351 error ("type mismatch in %qs", code_name);
4352 debug_generic_expr (lhs_type);
4353 debug_generic_expr (rhs1_type);
4354 return true;
4355 }
4356 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4357 && INTEGRAL_TYPE_P (rhs2_type))
4358 /* Vector element insert. */
4359 || (VECTOR_TYPE_P (rhs1_type)
4360 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))
4361 /* Aligned sub-vector insert. */
4362 || (VECTOR_TYPE_P (rhs1_type)
4363 && VECTOR_TYPE_P (rhs2_type)
4364 && types_compatible_p (TREE_TYPE (rhs1_type),
4365 TREE_TYPE (rhs2_type))
4366 && multiple_p (TYPE_VECTOR_SUBPARTS (rhs1_type),
4367 TYPE_VECTOR_SUBPARTS (rhs2_type))
4368 && multiple_of_p (bitsizetype, rhs3, TYPE_SIZE (rhs2_type)))))
4369 {
4370 error ("not allowed type combination in %qs", code_name);
4371 debug_generic_expr (rhs1_type);
4372 debug_generic_expr (rhs2_type);
4373 return true;
4374 }
4375 if (! tree_fits_uhwi_p (rhs3)
4376 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4377 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4378 {
4379 error ("invalid position or size in %qs", code_name);
4380 return true;
4381 }
4382 if (INTEGRAL_TYPE_P (rhs1_type)
4383 && !type_has_mode_precision_p (rhs1_type))
4384 {
4385 error ("%qs into non-mode-precision operand", code_name);
4386 return true;
4387 }
4388 if (INTEGRAL_TYPE_P (rhs1_type))
4389 {
4390 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4391 if (bitpos >= TYPE_PRECISION (rhs1_type)
4392 || (bitpos + TYPE_PRECISION (rhs2_type)
4393 > TYPE_PRECISION (rhs1_type)))
4394 {
4395 error ("insertion out of range in %qs", code_name);
4396 return true;
4397 }
4398 }
4399 else if (VECTOR_TYPE_P (rhs1_type))
4400 {
4401 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4402 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4403 if (bitpos % bitsize != 0)
4404 {
4405 error ("%qs not at element boundary", code_name);
4406 return true;
4407 }
4408 }
4409 return false;
4410
4411 case DOT_PROD_EXPR:
4412 {
4413 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4414 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4415 && ((!INTEGRAL_TYPE_P (rhs1_type)
4416 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4417 || (!INTEGRAL_TYPE_P (lhs_type)
4418 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4419 || !types_compatible_p (rhs1_type, rhs2_type)
4420 || !useless_type_conversion_p (lhs_type, rhs3_type)
4421 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4422 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4423 {
4424 error ("type mismatch in %qs", code_name);
4425 debug_generic_expr (lhs_type);
4426 debug_generic_expr (rhs1_type);
4427 debug_generic_expr (rhs2_type);
4428 return true;
4429 }
4430 return false;
4431 }
4432
4433 case REALIGN_LOAD_EXPR:
4434 /* FIXME. */
4435 return false;
4436
4437 default:
4438 gcc_unreachable ();
4439 }
4440 return false;
4441 }
4442
4443 /* Verify a gimple assignment statement STMT with a single rhs.
4444 Returns true if anything is wrong. */
4445
4446 static bool
verify_gimple_assign_single(gassign * stmt)4447 verify_gimple_assign_single (gassign *stmt)
4448 {
4449 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4450 tree lhs = gimple_assign_lhs (stmt);
4451 tree lhs_type = TREE_TYPE (lhs);
4452 tree rhs1 = gimple_assign_rhs1 (stmt);
4453 tree rhs1_type = TREE_TYPE (rhs1);
4454 bool res = false;
4455
4456 const char* const code_name = get_tree_code_name (rhs_code);
4457
4458 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4459 {
4460 error ("non-trivial conversion in %qs", code_name);
4461 debug_generic_expr (lhs_type);
4462 debug_generic_expr (rhs1_type);
4463 return true;
4464 }
4465
4466 if (gimple_clobber_p (stmt)
4467 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4468 {
4469 error ("%qs LHS in clobber statement",
4470 get_tree_code_name (TREE_CODE (lhs)));
4471 debug_generic_expr (lhs);
4472 return true;
4473 }
4474
4475 if (handled_component_p (lhs)
4476 || TREE_CODE (lhs) == MEM_REF
4477 || TREE_CODE (lhs) == TARGET_MEM_REF)
4478 res |= verify_types_in_gimple_reference (lhs, true);
4479
4480 /* Special codes we cannot handle via their class. */
4481 switch (rhs_code)
4482 {
4483 case ADDR_EXPR:
4484 {
4485 tree op = TREE_OPERAND (rhs1, 0);
4486 if (!is_gimple_addressable (op))
4487 {
4488 error ("invalid operand in %qs", code_name);
4489 return true;
4490 }
4491
4492 /* Technically there is no longer a need for matching types, but
4493 gimple hygiene asks for this check. In LTO we can end up
4494 combining incompatible units and thus end up with addresses
4495 of globals that change their type to a common one. */
4496 if (!in_lto_p
4497 && !types_compatible_p (TREE_TYPE (op),
4498 TREE_TYPE (TREE_TYPE (rhs1)))
4499 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4500 TREE_TYPE (op)))
4501 {
4502 error ("type mismatch in %qs", code_name);
4503 debug_generic_stmt (TREE_TYPE (rhs1));
4504 debug_generic_stmt (TREE_TYPE (op));
4505 return true;
4506 }
4507
4508 return (verify_address (rhs1, true)
4509 || verify_types_in_gimple_reference (op, true));
4510 }
4511
4512 /* tcc_reference */
4513 case INDIRECT_REF:
4514 error ("%qs in gimple IL", code_name);
4515 return true;
4516
4517 case COMPONENT_REF:
4518 case BIT_FIELD_REF:
4519 case ARRAY_REF:
4520 case ARRAY_RANGE_REF:
4521 case VIEW_CONVERT_EXPR:
4522 case REALPART_EXPR:
4523 case IMAGPART_EXPR:
4524 case TARGET_MEM_REF:
4525 case MEM_REF:
4526 if (!is_gimple_reg (lhs)
4527 && is_gimple_reg_type (TREE_TYPE (lhs)))
4528 {
4529 error ("invalid RHS for gimple memory store: %qs", code_name);
4530 debug_generic_stmt (lhs);
4531 debug_generic_stmt (rhs1);
4532 return true;
4533 }
4534 return res || verify_types_in_gimple_reference (rhs1, false);
4535
4536 /* tcc_constant */
4537 case SSA_NAME:
4538 case INTEGER_CST:
4539 case REAL_CST:
4540 case FIXED_CST:
4541 case COMPLEX_CST:
4542 case VECTOR_CST:
4543 case STRING_CST:
4544 return res;
4545
4546 /* tcc_declaration */
4547 case CONST_DECL:
4548 return res;
4549 case VAR_DECL:
4550 case PARM_DECL:
4551 if (!is_gimple_reg (lhs)
4552 && !is_gimple_reg (rhs1)
4553 && is_gimple_reg_type (TREE_TYPE (lhs)))
4554 {
4555 error ("invalid RHS for gimple memory store: %qs", code_name);
4556 debug_generic_stmt (lhs);
4557 debug_generic_stmt (rhs1);
4558 return true;
4559 }
4560 return res;
4561
4562 case CONSTRUCTOR:
4563 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4564 {
4565 unsigned int i;
4566 tree elt_i, elt_v, elt_t = NULL_TREE;
4567
4568 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4569 return res;
4570 /* For vector CONSTRUCTORs we require that either it is empty
4571 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4572 (then the element count must be correct to cover the whole
4573 outer vector and index must be NULL on all elements, or it is
4574 a CONSTRUCTOR of scalar elements, where we as an exception allow
4575 smaller number of elements (assuming zero filling) and
4576 consecutive indexes as compared to NULL indexes (such
4577 CONSTRUCTORs can appear in the IL from FEs). */
4578 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4579 {
4580 if (elt_t == NULL_TREE)
4581 {
4582 elt_t = TREE_TYPE (elt_v);
4583 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4584 {
4585 tree elt_t = TREE_TYPE (elt_v);
4586 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4587 TREE_TYPE (elt_t)))
4588 {
4589 error ("incorrect type of vector %qs elements",
4590 code_name);
4591 debug_generic_stmt (rhs1);
4592 return true;
4593 }
4594 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4595 * TYPE_VECTOR_SUBPARTS (elt_t),
4596 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4597 {
4598 error ("incorrect number of vector %qs elements",
4599 code_name);
4600 debug_generic_stmt (rhs1);
4601 return true;
4602 }
4603 }
4604 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4605 elt_t))
4606 {
4607 error ("incorrect type of vector %qs elements",
4608 code_name);
4609 debug_generic_stmt (rhs1);
4610 return true;
4611 }
4612 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4613 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4614 {
4615 error ("incorrect number of vector %qs elements",
4616 code_name);
4617 debug_generic_stmt (rhs1);
4618 return true;
4619 }
4620 }
4621 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4622 {
4623 error ("incorrect type of vector CONSTRUCTOR elements");
4624 debug_generic_stmt (rhs1);
4625 return true;
4626 }
4627 if (elt_i != NULL_TREE
4628 && (TREE_CODE (elt_t) == VECTOR_TYPE
4629 || TREE_CODE (elt_i) != INTEGER_CST
4630 || compare_tree_int (elt_i, i) != 0))
4631 {
4632 error ("vector %qs with non-NULL element index",
4633 code_name);
4634 debug_generic_stmt (rhs1);
4635 return true;
4636 }
4637 if (!is_gimple_val (elt_v))
4638 {
4639 error ("vector %qs element is not a GIMPLE value",
4640 code_name);
4641 debug_generic_stmt (rhs1);
4642 return true;
4643 }
4644 }
4645 }
4646 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4647 {
4648 error ("non-vector %qs with elements", code_name);
4649 debug_generic_stmt (rhs1);
4650 return true;
4651 }
4652 return res;
4653
4654 case ASSERT_EXPR:
4655 /* FIXME. */
4656 rhs1 = fold (ASSERT_EXPR_COND (rhs1));
4657 if (rhs1 == boolean_false_node)
4658 {
4659 error ("%qs with an always-false condition", code_name);
4660 debug_generic_stmt (rhs1);
4661 return true;
4662 }
4663 break;
4664
4665 case OBJ_TYPE_REF:
4666 case WITH_SIZE_EXPR:
4667 /* FIXME. */
4668 return res;
4669
4670 default:;
4671 }
4672
4673 return res;
4674 }
4675
4676 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4677 is a problem, otherwise false. */
4678
4679 static bool
verify_gimple_assign(gassign * stmt)4680 verify_gimple_assign (gassign *stmt)
4681 {
4682 switch (gimple_assign_rhs_class (stmt))
4683 {
4684 case GIMPLE_SINGLE_RHS:
4685 return verify_gimple_assign_single (stmt);
4686
4687 case GIMPLE_UNARY_RHS:
4688 return verify_gimple_assign_unary (stmt);
4689
4690 case GIMPLE_BINARY_RHS:
4691 return verify_gimple_assign_binary (stmt);
4692
4693 case GIMPLE_TERNARY_RHS:
4694 return verify_gimple_assign_ternary (stmt);
4695
4696 default:
4697 gcc_unreachable ();
4698 }
4699 }
4700
4701 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4702 is a problem, otherwise false. */
4703
4704 static bool
verify_gimple_return(greturn * stmt)4705 verify_gimple_return (greturn *stmt)
4706 {
4707 tree op = gimple_return_retval (stmt);
4708 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4709
4710 /* We cannot test for present return values as we do not fix up missing
4711 return values from the original source. */
4712 if (op == NULL)
4713 return false;
4714
4715 if (!is_gimple_val (op)
4716 && TREE_CODE (op) != RESULT_DECL)
4717 {
4718 error ("invalid operand in return statement");
4719 debug_generic_stmt (op);
4720 return true;
4721 }
4722
4723 if ((TREE_CODE (op) == RESULT_DECL
4724 && DECL_BY_REFERENCE (op))
4725 || (TREE_CODE (op) == SSA_NAME
4726 && SSA_NAME_VAR (op)
4727 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4728 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4729 op = TREE_TYPE (op);
4730
4731 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4732 {
4733 error ("invalid conversion in return statement");
4734 debug_generic_stmt (restype);
4735 debug_generic_stmt (TREE_TYPE (op));
4736 return true;
4737 }
4738
4739 return false;
4740 }
4741
4742
4743 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4744 is a problem, otherwise false. */
4745
4746 static bool
verify_gimple_goto(ggoto * stmt)4747 verify_gimple_goto (ggoto *stmt)
4748 {
4749 tree dest = gimple_goto_dest (stmt);
4750
4751 /* ??? We have two canonical forms of direct goto destinations, a
4752 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4753 if (TREE_CODE (dest) != LABEL_DECL
4754 && (!is_gimple_val (dest)
4755 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4756 {
4757 error ("goto destination is neither a label nor a pointer");
4758 return true;
4759 }
4760
4761 return false;
4762 }
4763
4764 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4765 is a problem, otherwise false. */
4766
4767 static bool
verify_gimple_switch(gswitch * stmt)4768 verify_gimple_switch (gswitch *stmt)
4769 {
4770 unsigned int i, n;
4771 tree elt, prev_upper_bound = NULL_TREE;
4772 tree index_type, elt_type = NULL_TREE;
4773
4774 if (!is_gimple_val (gimple_switch_index (stmt)))
4775 {
4776 error ("invalid operand to switch statement");
4777 debug_generic_stmt (gimple_switch_index (stmt));
4778 return true;
4779 }
4780
4781 index_type = TREE_TYPE (gimple_switch_index (stmt));
4782 if (! INTEGRAL_TYPE_P (index_type))
4783 {
4784 error ("non-integral type switch statement");
4785 debug_generic_expr (index_type);
4786 return true;
4787 }
4788
4789 elt = gimple_switch_label (stmt, 0);
4790 if (CASE_LOW (elt) != NULL_TREE
4791 || CASE_HIGH (elt) != NULL_TREE
4792 || CASE_CHAIN (elt) != NULL_TREE)
4793 {
4794 error ("invalid default case label in switch statement");
4795 debug_generic_expr (elt);
4796 return true;
4797 }
4798
4799 n = gimple_switch_num_labels (stmt);
4800 for (i = 1; i < n; i++)
4801 {
4802 elt = gimple_switch_label (stmt, i);
4803
4804 if (CASE_CHAIN (elt))
4805 {
4806 error ("invalid %<CASE_CHAIN%>");
4807 debug_generic_expr (elt);
4808 return true;
4809 }
4810 if (! CASE_LOW (elt))
4811 {
4812 error ("invalid case label in switch statement");
4813 debug_generic_expr (elt);
4814 return true;
4815 }
4816 if (CASE_HIGH (elt)
4817 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4818 {
4819 error ("invalid case range in switch statement");
4820 debug_generic_expr (elt);
4821 return true;
4822 }
4823
4824 if (elt_type)
4825 {
4826 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4827 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4828 {
4829 error ("type mismatch for case label in switch statement");
4830 debug_generic_expr (elt);
4831 return true;
4832 }
4833 }
4834 else
4835 {
4836 elt_type = TREE_TYPE (CASE_LOW (elt));
4837 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4838 {
4839 error ("type precision mismatch in switch statement");
4840 return true;
4841 }
4842 }
4843
4844 if (prev_upper_bound)
4845 {
4846 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4847 {
4848 error ("case labels not sorted in switch statement");
4849 return true;
4850 }
4851 }
4852
4853 prev_upper_bound = CASE_HIGH (elt);
4854 if (! prev_upper_bound)
4855 prev_upper_bound = CASE_LOW (elt);
4856 }
4857
4858 return false;
4859 }
4860
4861 /* Verify a gimple debug statement STMT.
4862 Returns true if anything is wrong. */
4863
4864 static bool
verify_gimple_debug(gimple * stmt ATTRIBUTE_UNUSED)4865 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4866 {
4867 /* There isn't much that could be wrong in a gimple debug stmt. A
4868 gimple debug bind stmt, for example, maps a tree, that's usually
4869 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4870 component or member of an aggregate type, to another tree, that
4871 can be an arbitrary expression. These stmts expand into debug
4872 insns, and are converted to debug notes by var-tracking.c. */
4873 return false;
4874 }
4875
4876 /* Verify a gimple label statement STMT.
4877 Returns true if anything is wrong. */
4878
4879 static bool
verify_gimple_label(glabel * stmt)4880 verify_gimple_label (glabel *stmt)
4881 {
4882 tree decl = gimple_label_label (stmt);
4883 int uid;
4884 bool err = false;
4885
4886 if (TREE_CODE (decl) != LABEL_DECL)
4887 return true;
4888 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4889 && DECL_CONTEXT (decl) != current_function_decl)
4890 {
4891 error ("label context is not the current function declaration");
4892 err |= true;
4893 }
4894
4895 uid = LABEL_DECL_UID (decl);
4896 if (cfun->cfg
4897 && (uid == -1
4898 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4899 {
4900 error ("incorrect entry in %<label_to_block_map%>");
4901 err |= true;
4902 }
4903
4904 uid = EH_LANDING_PAD_NR (decl);
4905 if (uid)
4906 {
4907 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4908 if (decl != lp->post_landing_pad)
4909 {
4910 error ("incorrect setting of landing pad number");
4911 err |= true;
4912 }
4913 }
4914
4915 return err;
4916 }
4917
4918 /* Verify a gimple cond statement STMT.
4919 Returns true if anything is wrong. */
4920
4921 static bool
verify_gimple_cond(gcond * stmt)4922 verify_gimple_cond (gcond *stmt)
4923 {
4924 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4925 {
4926 error ("invalid comparison code in gimple cond");
4927 return true;
4928 }
4929 if (!(!gimple_cond_true_label (stmt)
4930 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4931 || !(!gimple_cond_false_label (stmt)
4932 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4933 {
4934 error ("invalid labels in gimple cond");
4935 return true;
4936 }
4937
4938 return verify_gimple_comparison (boolean_type_node,
4939 gimple_cond_lhs (stmt),
4940 gimple_cond_rhs (stmt),
4941 gimple_cond_code (stmt));
4942 }
4943
4944 /* Verify the GIMPLE statement STMT. Returns true if there is an
4945 error, otherwise false. */
4946
4947 static bool
verify_gimple_stmt(gimple * stmt)4948 verify_gimple_stmt (gimple *stmt)
4949 {
4950 switch (gimple_code (stmt))
4951 {
4952 case GIMPLE_ASSIGN:
4953 return verify_gimple_assign (as_a <gassign *> (stmt));
4954
4955 case GIMPLE_LABEL:
4956 return verify_gimple_label (as_a <glabel *> (stmt));
4957
4958 case GIMPLE_CALL:
4959 return verify_gimple_call (as_a <gcall *> (stmt));
4960
4961 case GIMPLE_COND:
4962 return verify_gimple_cond (as_a <gcond *> (stmt));
4963
4964 case GIMPLE_GOTO:
4965 return verify_gimple_goto (as_a <ggoto *> (stmt));
4966
4967 case GIMPLE_SWITCH:
4968 return verify_gimple_switch (as_a <gswitch *> (stmt));
4969
4970 case GIMPLE_RETURN:
4971 return verify_gimple_return (as_a <greturn *> (stmt));
4972
4973 case GIMPLE_ASM:
4974 return false;
4975
4976 case GIMPLE_TRANSACTION:
4977 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4978
4979 /* Tuples that do not have tree operands. */
4980 case GIMPLE_NOP:
4981 case GIMPLE_PREDICT:
4982 case GIMPLE_RESX:
4983 case GIMPLE_EH_DISPATCH:
4984 case GIMPLE_EH_MUST_NOT_THROW:
4985 return false;
4986
4987 CASE_GIMPLE_OMP:
4988 /* OpenMP directives are validated by the FE and never operated
4989 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4990 non-gimple expressions when the main index variable has had
4991 its address taken. This does not affect the loop itself
4992 because the header of an GIMPLE_OMP_FOR is merely used to determine
4993 how to setup the parallel iteration. */
4994 return false;
4995
4996 case GIMPLE_DEBUG:
4997 return verify_gimple_debug (stmt);
4998
4999 default:
5000 gcc_unreachable ();
5001 }
5002 }
5003
5004 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
5005 and false otherwise. */
5006
5007 static bool
verify_gimple_phi(gphi * phi)5008 verify_gimple_phi (gphi *phi)
5009 {
5010 bool err = false;
5011 unsigned i;
5012 tree phi_result = gimple_phi_result (phi);
5013 bool virtual_p;
5014
5015 if (!phi_result)
5016 {
5017 error ("invalid %<PHI%> result");
5018 return true;
5019 }
5020
5021 virtual_p = virtual_operand_p (phi_result);
5022 if (TREE_CODE (phi_result) != SSA_NAME
5023 || (virtual_p
5024 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
5025 {
5026 error ("invalid %<PHI%> result");
5027 err = true;
5028 }
5029
5030 for (i = 0; i < gimple_phi_num_args (phi); i++)
5031 {
5032 tree t = gimple_phi_arg_def (phi, i);
5033
5034 if (!t)
5035 {
5036 error ("missing %<PHI%> def");
5037 err |= true;
5038 continue;
5039 }
5040 /* Addressable variables do have SSA_NAMEs but they
5041 are not considered gimple values. */
5042 else if ((TREE_CODE (t) == SSA_NAME
5043 && virtual_p != virtual_operand_p (t))
5044 || (virtual_p
5045 && (TREE_CODE (t) != SSA_NAME
5046 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
5047 || (!virtual_p
5048 && !is_gimple_val (t)))
5049 {
5050 error ("invalid %<PHI%> argument");
5051 debug_generic_expr (t);
5052 err |= true;
5053 }
5054 #ifdef ENABLE_TYPES_CHECKING
5055 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
5056 {
5057 error ("incompatible types in %<PHI%> argument %u", i);
5058 debug_generic_stmt (TREE_TYPE (phi_result));
5059 debug_generic_stmt (TREE_TYPE (t));
5060 err |= true;
5061 }
5062 #endif
5063 }
5064
5065 return err;
5066 }
5067
5068 /* Verify the GIMPLE statements inside the sequence STMTS. */
5069
5070 static bool
verify_gimple_in_seq_2(gimple_seq stmts)5071 verify_gimple_in_seq_2 (gimple_seq stmts)
5072 {
5073 gimple_stmt_iterator ittr;
5074 bool err = false;
5075
5076 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
5077 {
5078 gimple *stmt = gsi_stmt (ittr);
5079
5080 switch (gimple_code (stmt))
5081 {
5082 case GIMPLE_BIND:
5083 err |= verify_gimple_in_seq_2 (
5084 gimple_bind_body (as_a <gbind *> (stmt)));
5085 break;
5086
5087 case GIMPLE_TRY:
5088 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
5089 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
5090 break;
5091
5092 case GIMPLE_EH_FILTER:
5093 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5094 break;
5095
5096 case GIMPLE_EH_ELSE:
5097 {
5098 geh_else *eh_else = as_a <geh_else *> (stmt);
5099 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5100 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5101 }
5102 break;
5103
5104 case GIMPLE_CATCH:
5105 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5106 as_a <gcatch *> (stmt)));
5107 break;
5108
5109 case GIMPLE_TRANSACTION:
5110 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5111 break;
5112
5113 default:
5114 {
5115 bool err2 = verify_gimple_stmt (stmt);
5116 if (err2)
5117 debug_gimple_stmt (stmt);
5118 err |= err2;
5119 }
5120 }
5121 }
5122
5123 return err;
5124 }
5125
5126 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5127 is a problem, otherwise false. */
5128
5129 static bool
verify_gimple_transaction(gtransaction * stmt)5130 verify_gimple_transaction (gtransaction *stmt)
5131 {
5132 tree lab;
5133
5134 lab = gimple_transaction_label_norm (stmt);
5135 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5136 return true;
5137 lab = gimple_transaction_label_uninst (stmt);
5138 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5139 return true;
5140 lab = gimple_transaction_label_over (stmt);
5141 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5142 return true;
5143
5144 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5145 }
5146
5147
5148 /* Verify the GIMPLE statements inside the statement list STMTS. */
5149
5150 DEBUG_FUNCTION void
verify_gimple_in_seq(gimple_seq stmts)5151 verify_gimple_in_seq (gimple_seq stmts)
5152 {
5153 timevar_push (TV_TREE_STMT_VERIFY);
5154 if (verify_gimple_in_seq_2 (stmts))
5155 internal_error ("%<verify_gimple%> failed");
5156 timevar_pop (TV_TREE_STMT_VERIFY);
5157 }
5158
5159 /* Return true when the T can be shared. */
5160
5161 static bool
tree_node_can_be_shared(tree t)5162 tree_node_can_be_shared (tree t)
5163 {
5164 if (IS_TYPE_OR_DECL_P (t)
5165 || TREE_CODE (t) == SSA_NAME
5166 || TREE_CODE (t) == IDENTIFIER_NODE
5167 || TREE_CODE (t) == CASE_LABEL_EXPR
5168 || is_gimple_min_invariant (t))
5169 return true;
5170
5171 if (t == error_mark_node)
5172 return true;
5173
5174 return false;
5175 }
5176
5177 /* Called via walk_tree. Verify tree sharing. */
5178
5179 static tree
verify_node_sharing_1(tree * tp,int * walk_subtrees,void * data)5180 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5181 {
5182 hash_set<void *> *visited = (hash_set<void *> *) data;
5183
5184 if (tree_node_can_be_shared (*tp))
5185 {
5186 *walk_subtrees = false;
5187 return NULL;
5188 }
5189
5190 if (visited->add (*tp))
5191 return *tp;
5192
5193 return NULL;
5194 }
5195
5196 /* Called via walk_gimple_stmt. Verify tree sharing. */
5197
5198 static tree
verify_node_sharing(tree * tp,int * walk_subtrees,void * data)5199 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5200 {
5201 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5202 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5203 }
5204
5205 static bool eh_error_found;
5206 bool
verify_eh_throw_stmt_node(gimple * const & stmt,const int &,hash_set<gimple * > * visited)5207 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5208 hash_set<gimple *> *visited)
5209 {
5210 if (!visited->contains (stmt))
5211 {
5212 error ("dead statement in EH table");
5213 debug_gimple_stmt (stmt);
5214 eh_error_found = true;
5215 }
5216 return true;
5217 }
5218
5219 /* Verify if the location LOCs block is in BLOCKS. */
5220
5221 static bool
verify_location(hash_set<tree> * blocks,location_t loc)5222 verify_location (hash_set<tree> *blocks, location_t loc)
5223 {
5224 tree block = LOCATION_BLOCK (loc);
5225 if (block != NULL_TREE
5226 && !blocks->contains (block))
5227 {
5228 error ("location references block not in block tree");
5229 return true;
5230 }
5231 if (block != NULL_TREE)
5232 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5233 return false;
5234 }
5235
5236 /* Called via walk_tree. Verify that expressions have no blocks. */
5237
5238 static tree
verify_expr_no_block(tree * tp,int * walk_subtrees,void *)5239 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5240 {
5241 if (!EXPR_P (*tp))
5242 {
5243 *walk_subtrees = false;
5244 return NULL;
5245 }
5246
5247 location_t loc = EXPR_LOCATION (*tp);
5248 if (LOCATION_BLOCK (loc) != NULL)
5249 return *tp;
5250
5251 return NULL;
5252 }
5253
5254 /* Called via walk_tree. Verify locations of expressions. */
5255
5256 static tree
verify_expr_location_1(tree * tp,int * walk_subtrees,void * data)5257 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5258 {
5259 hash_set<tree> *blocks = (hash_set<tree> *) data;
5260 tree t = *tp;
5261
5262 /* ??? This doesn't really belong here but there's no good place to
5263 stick this remainder of old verify_expr. */
5264 /* ??? This barfs on debug stmts which contain binds to vars with
5265 different function context. */
5266 #if 0
5267 if (VAR_P (t)
5268 || TREE_CODE (t) == PARM_DECL
5269 || TREE_CODE (t) == RESULT_DECL)
5270 {
5271 tree context = decl_function_context (t);
5272 if (context != cfun->decl
5273 && !SCOPE_FILE_SCOPE_P (context)
5274 && !TREE_STATIC (t)
5275 && !DECL_EXTERNAL (t))
5276 {
5277 error ("local declaration from a different function");
5278 return t;
5279 }
5280 }
5281 #endif
5282
5283 if (VAR_P (t) && DECL_HAS_DEBUG_EXPR_P (t))
5284 {
5285 tree x = DECL_DEBUG_EXPR (t);
5286 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5287 if (addr)
5288 return addr;
5289 }
5290 if ((VAR_P (t)
5291 || TREE_CODE (t) == PARM_DECL
5292 || TREE_CODE (t) == RESULT_DECL)
5293 && DECL_HAS_VALUE_EXPR_P (t))
5294 {
5295 tree x = DECL_VALUE_EXPR (t);
5296 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5297 if (addr)
5298 return addr;
5299 }
5300
5301 if (!EXPR_P (t))
5302 {
5303 *walk_subtrees = false;
5304 return NULL;
5305 }
5306
5307 location_t loc = EXPR_LOCATION (t);
5308 if (verify_location (blocks, loc))
5309 return t;
5310
5311 return NULL;
5312 }
5313
5314 /* Called via walk_gimple_op. Verify locations of expressions. */
5315
5316 static tree
verify_expr_location(tree * tp,int * walk_subtrees,void * data)5317 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5318 {
5319 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5320 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5321 }
5322
5323 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5324
5325 static void
collect_subblocks(hash_set<tree> * blocks,tree block)5326 collect_subblocks (hash_set<tree> *blocks, tree block)
5327 {
5328 tree t;
5329 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5330 {
5331 blocks->add (t);
5332 collect_subblocks (blocks, t);
5333 }
5334 }
5335
5336 /* Disable warnings about missing quoting in GCC diagnostics for
5337 the verification errors. Their format strings don't follow
5338 GCC diagnostic conventions and trigger an ICE in the end. */
5339 #if __GNUC__ >= 10
5340 # pragma GCC diagnostic push
5341 # pragma GCC diagnostic ignored "-Wformat-diag"
5342 #endif
5343
5344 /* Verify the GIMPLE statements in the CFG of FN. */
5345
5346 DEBUG_FUNCTION void
verify_gimple_in_cfg(struct function * fn,bool verify_nothrow)5347 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
5348 {
5349 basic_block bb;
5350 bool err = false;
5351
5352 timevar_push (TV_TREE_STMT_VERIFY);
5353 hash_set<void *> visited;
5354 hash_set<gimple *> visited_throwing_stmts;
5355
5356 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5357 hash_set<tree> blocks;
5358 if (DECL_INITIAL (fn->decl))
5359 {
5360 blocks.add (DECL_INITIAL (fn->decl));
5361 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5362 }
5363
5364 FOR_EACH_BB_FN (bb, fn)
5365 {
5366 gimple_stmt_iterator gsi;
5367 edge_iterator ei;
5368 edge e;
5369
5370 for (gphi_iterator gpi = gsi_start_phis (bb);
5371 !gsi_end_p (gpi);
5372 gsi_next (&gpi))
5373 {
5374 gphi *phi = gpi.phi ();
5375 bool err2 = false;
5376 unsigned i;
5377
5378 if (gimple_bb (phi) != bb)
5379 {
5380 error ("gimple_bb (phi) is set to a wrong basic block");
5381 err2 = true;
5382 }
5383
5384 err2 |= verify_gimple_phi (phi);
5385
5386 /* Only PHI arguments have locations. */
5387 if (gimple_location (phi) != UNKNOWN_LOCATION)
5388 {
5389 error ("PHI node with location");
5390 err2 = true;
5391 }
5392
5393 for (i = 0; i < gimple_phi_num_args (phi); i++)
5394 {
5395 tree arg = gimple_phi_arg_def (phi, i);
5396 tree addr = walk_tree (&arg, verify_node_sharing_1,
5397 &visited, NULL);
5398 if (addr)
5399 {
5400 error ("incorrect sharing of tree nodes");
5401 debug_generic_expr (addr);
5402 err2 |= true;
5403 }
5404 location_t loc = gimple_phi_arg_location (phi, i);
5405 if (virtual_operand_p (gimple_phi_result (phi))
5406 && loc != UNKNOWN_LOCATION)
5407 {
5408 error ("virtual PHI with argument locations");
5409 err2 = true;
5410 }
5411 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5412 if (addr)
5413 {
5414 debug_generic_expr (addr);
5415 err2 = true;
5416 }
5417 err2 |= verify_location (&blocks, loc);
5418 }
5419
5420 if (err2)
5421 debug_gimple_stmt (phi);
5422 err |= err2;
5423 }
5424
5425 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5426 {
5427 gimple *stmt = gsi_stmt (gsi);
5428 bool err2 = false;
5429 struct walk_stmt_info wi;
5430 tree addr;
5431 int lp_nr;
5432
5433 if (gimple_bb (stmt) != bb)
5434 {
5435 error ("gimple_bb (stmt) is set to a wrong basic block");
5436 err2 = true;
5437 }
5438
5439 err2 |= verify_gimple_stmt (stmt);
5440 err2 |= verify_location (&blocks, gimple_location (stmt));
5441
5442 memset (&wi, 0, sizeof (wi));
5443 wi.info = (void *) &visited;
5444 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5445 if (addr)
5446 {
5447 error ("incorrect sharing of tree nodes");
5448 debug_generic_expr (addr);
5449 err2 |= true;
5450 }
5451
5452 memset (&wi, 0, sizeof (wi));
5453 wi.info = (void *) &blocks;
5454 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5455 if (addr)
5456 {
5457 debug_generic_expr (addr);
5458 err2 |= true;
5459 }
5460
5461 /* If the statement is marked as part of an EH region, then it is
5462 expected that the statement could throw. Verify that when we
5463 have optimizations that simplify statements such that we prove
5464 that they cannot throw, that we update other data structures
5465 to match. */
5466 lp_nr = lookup_stmt_eh_lp (stmt);
5467 if (lp_nr != 0)
5468 visited_throwing_stmts.add (stmt);
5469 if (lp_nr > 0)
5470 {
5471 if (!stmt_could_throw_p (cfun, stmt))
5472 {
5473 if (verify_nothrow)
5474 {
5475 error ("statement marked for throw, but doesn%'t");
5476 err2 |= true;
5477 }
5478 }
5479 else if (!gsi_one_before_end_p (gsi))
5480 {
5481 error ("statement marked for throw in middle of block");
5482 err2 |= true;
5483 }
5484 }
5485
5486 if (err2)
5487 debug_gimple_stmt (stmt);
5488 err |= err2;
5489 }
5490
5491 FOR_EACH_EDGE (e, ei, bb->succs)
5492 if (e->goto_locus != UNKNOWN_LOCATION)
5493 err |= verify_location (&blocks, e->goto_locus);
5494 }
5495
5496 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5497 eh_error_found = false;
5498 if (eh_table)
5499 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5500 (&visited_throwing_stmts);
5501
5502 if (err || eh_error_found)
5503 internal_error ("verify_gimple failed");
5504
5505 verify_histograms ();
5506 timevar_pop (TV_TREE_STMT_VERIFY);
5507 }
5508
5509
5510 /* Verifies that the flow information is OK. */
5511
5512 static int
gimple_verify_flow_info(void)5513 gimple_verify_flow_info (void)
5514 {
5515 int err = 0;
5516 basic_block bb;
5517 gimple_stmt_iterator gsi;
5518 gimple *stmt;
5519 edge e;
5520 edge_iterator ei;
5521
5522 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5523 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5524 {
5525 error ("ENTRY_BLOCK has IL associated with it");
5526 err = 1;
5527 }
5528
5529 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5530 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5531 {
5532 error ("EXIT_BLOCK has IL associated with it");
5533 err = 1;
5534 }
5535
5536 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5537 if (e->flags & EDGE_FALLTHRU)
5538 {
5539 error ("fallthru to exit from bb %d", e->src->index);
5540 err = 1;
5541 }
5542
5543 FOR_EACH_BB_FN (bb, cfun)
5544 {
5545 bool found_ctrl_stmt = false;
5546
5547 stmt = NULL;
5548
5549 /* Skip labels on the start of basic block. */
5550 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5551 {
5552 tree label;
5553 gimple *prev_stmt = stmt;
5554
5555 stmt = gsi_stmt (gsi);
5556
5557 if (gimple_code (stmt) != GIMPLE_LABEL)
5558 break;
5559
5560 label = gimple_label_label (as_a <glabel *> (stmt));
5561 if (prev_stmt && DECL_NONLOCAL (label))
5562 {
5563 error ("nonlocal label ");
5564 print_generic_expr (stderr, label);
5565 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5566 bb->index);
5567 err = 1;
5568 }
5569
5570 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5571 {
5572 error ("EH landing pad label ");
5573 print_generic_expr (stderr, label);
5574 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5575 bb->index);
5576 err = 1;
5577 }
5578
5579 if (label_to_block (cfun, label) != bb)
5580 {
5581 error ("label ");
5582 print_generic_expr (stderr, label);
5583 fprintf (stderr, " to block does not match in bb %d",
5584 bb->index);
5585 err = 1;
5586 }
5587
5588 if (decl_function_context (label) != current_function_decl)
5589 {
5590 error ("label ");
5591 print_generic_expr (stderr, label);
5592 fprintf (stderr, " has incorrect context in bb %d",
5593 bb->index);
5594 err = 1;
5595 }
5596 }
5597
5598 /* Verify that body of basic block BB is free of control flow. */
5599 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5600 {
5601 gimple *stmt = gsi_stmt (gsi);
5602
5603 if (found_ctrl_stmt)
5604 {
5605 error ("control flow in the middle of basic block %d",
5606 bb->index);
5607 err = 1;
5608 }
5609
5610 if (stmt_ends_bb_p (stmt))
5611 found_ctrl_stmt = true;
5612
5613 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5614 {
5615 error ("label ");
5616 print_generic_expr (stderr, gimple_label_label (label_stmt));
5617 fprintf (stderr, " in the middle of basic block %d", bb->index);
5618 err = 1;
5619 }
5620 }
5621
5622 gsi = gsi_last_nondebug_bb (bb);
5623 if (gsi_end_p (gsi))
5624 continue;
5625
5626 stmt = gsi_stmt (gsi);
5627
5628 if (gimple_code (stmt) == GIMPLE_LABEL)
5629 continue;
5630
5631 err |= verify_eh_edges (stmt);
5632
5633 if (is_ctrl_stmt (stmt))
5634 {
5635 FOR_EACH_EDGE (e, ei, bb->succs)
5636 if (e->flags & EDGE_FALLTHRU)
5637 {
5638 error ("fallthru edge after a control statement in bb %d",
5639 bb->index);
5640 err = 1;
5641 }
5642 }
5643
5644 if (gimple_code (stmt) != GIMPLE_COND)
5645 {
5646 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5647 after anything else but if statement. */
5648 FOR_EACH_EDGE (e, ei, bb->succs)
5649 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5650 {
5651 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5652 bb->index);
5653 err = 1;
5654 }
5655 }
5656
5657 switch (gimple_code (stmt))
5658 {
5659 case GIMPLE_COND:
5660 {
5661 edge true_edge;
5662 edge false_edge;
5663
5664 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5665
5666 if (!true_edge
5667 || !false_edge
5668 || !(true_edge->flags & EDGE_TRUE_VALUE)
5669 || !(false_edge->flags & EDGE_FALSE_VALUE)
5670 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5671 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5672 || EDGE_COUNT (bb->succs) >= 3)
5673 {
5674 error ("wrong outgoing edge flags at end of bb %d",
5675 bb->index);
5676 err = 1;
5677 }
5678 }
5679 break;
5680
5681 case GIMPLE_GOTO:
5682 if (simple_goto_p (stmt))
5683 {
5684 error ("explicit goto at end of bb %d", bb->index);
5685 err = 1;
5686 }
5687 else
5688 {
5689 /* FIXME. We should double check that the labels in the
5690 destination blocks have their address taken. */
5691 FOR_EACH_EDGE (e, ei, bb->succs)
5692 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5693 | EDGE_FALSE_VALUE))
5694 || !(e->flags & EDGE_ABNORMAL))
5695 {
5696 error ("wrong outgoing edge flags at end of bb %d",
5697 bb->index);
5698 err = 1;
5699 }
5700 }
5701 break;
5702
5703 case GIMPLE_CALL:
5704 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5705 break;
5706 /* fallthru */
5707 case GIMPLE_RETURN:
5708 if (!single_succ_p (bb)
5709 || (single_succ_edge (bb)->flags
5710 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5711 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5712 {
5713 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5714 err = 1;
5715 }
5716 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5717 {
5718 error ("return edge does not point to exit in bb %d",
5719 bb->index);
5720 err = 1;
5721 }
5722 break;
5723
5724 case GIMPLE_SWITCH:
5725 {
5726 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5727 tree prev;
5728 edge e;
5729 size_t i, n;
5730
5731 n = gimple_switch_num_labels (switch_stmt);
5732
5733 /* Mark all the destination basic blocks. */
5734 for (i = 0; i < n; ++i)
5735 {
5736 basic_block label_bb = gimple_switch_label_bb (cfun, switch_stmt, i);
5737 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5738 label_bb->aux = (void *)1;
5739 }
5740
5741 /* Verify that the case labels are sorted. */
5742 prev = gimple_switch_label (switch_stmt, 0);
5743 for (i = 1; i < n; ++i)
5744 {
5745 tree c = gimple_switch_label (switch_stmt, i);
5746 if (!CASE_LOW (c))
5747 {
5748 error ("found default case not at the start of "
5749 "case vector");
5750 err = 1;
5751 continue;
5752 }
5753 if (CASE_LOW (prev)
5754 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5755 {
5756 error ("case labels not sorted: ");
5757 print_generic_expr (stderr, prev);
5758 fprintf (stderr," is greater than ");
5759 print_generic_expr (stderr, c);
5760 fprintf (stderr," but comes before it.\n");
5761 err = 1;
5762 }
5763 prev = c;
5764 }
5765 /* VRP will remove the default case if it can prove it will
5766 never be executed. So do not verify there always exists
5767 a default case here. */
5768
5769 FOR_EACH_EDGE (e, ei, bb->succs)
5770 {
5771 if (!e->dest->aux)
5772 {
5773 error ("extra outgoing edge %d->%d",
5774 bb->index, e->dest->index);
5775 err = 1;
5776 }
5777
5778 e->dest->aux = (void *)2;
5779 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5780 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5781 {
5782 error ("wrong outgoing edge flags at end of bb %d",
5783 bb->index);
5784 err = 1;
5785 }
5786 }
5787
5788 /* Check that we have all of them. */
5789 for (i = 0; i < n; ++i)
5790 {
5791 basic_block label_bb = gimple_switch_label_bb (cfun,
5792 switch_stmt, i);
5793
5794 if (label_bb->aux != (void *)2)
5795 {
5796 error ("missing edge %i->%i", bb->index, label_bb->index);
5797 err = 1;
5798 }
5799 }
5800
5801 FOR_EACH_EDGE (e, ei, bb->succs)
5802 e->dest->aux = (void *)0;
5803 }
5804 break;
5805
5806 case GIMPLE_EH_DISPATCH:
5807 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5808 break;
5809
5810 default:
5811 break;
5812 }
5813 }
5814
5815 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5816 verify_dominators (CDI_DOMINATORS);
5817
5818 return err;
5819 }
5820
5821 #if __GNUC__ >= 10
5822 # pragma GCC diagnostic pop
5823 #endif
5824
5825 /* Updates phi nodes after creating a forwarder block joined
5826 by edge FALLTHRU. */
5827
5828 static void
gimple_make_forwarder_block(edge fallthru)5829 gimple_make_forwarder_block (edge fallthru)
5830 {
5831 edge e;
5832 edge_iterator ei;
5833 basic_block dummy, bb;
5834 tree var;
5835 gphi_iterator gsi;
5836 bool forward_location_p;
5837
5838 dummy = fallthru->src;
5839 bb = fallthru->dest;
5840
5841 if (single_pred_p (bb))
5842 return;
5843
5844 /* We can forward location info if we have only one predecessor. */
5845 forward_location_p = single_pred_p (dummy);
5846
5847 /* If we redirected a branch we must create new PHI nodes at the
5848 start of BB. */
5849 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5850 {
5851 gphi *phi, *new_phi;
5852
5853 phi = gsi.phi ();
5854 var = gimple_phi_result (phi);
5855 new_phi = create_phi_node (var, bb);
5856 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5857 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5858 forward_location_p
5859 ? gimple_phi_arg_location (phi, 0) : UNKNOWN_LOCATION);
5860 }
5861
5862 /* Add the arguments we have stored on edges. */
5863 FOR_EACH_EDGE (e, ei, bb->preds)
5864 {
5865 if (e == fallthru)
5866 continue;
5867
5868 flush_pending_stmts (e);
5869 }
5870 }
5871
5872
5873 /* Return a non-special label in the head of basic block BLOCK.
5874 Create one if it doesn't exist. */
5875
5876 tree
gimple_block_label(basic_block bb)5877 gimple_block_label (basic_block bb)
5878 {
5879 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5880 bool first = true;
5881 tree label;
5882 glabel *stmt;
5883
5884 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5885 {
5886 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5887 if (!stmt)
5888 break;
5889 label = gimple_label_label (stmt);
5890 if (!DECL_NONLOCAL (label))
5891 {
5892 if (!first)
5893 gsi_move_before (&i, &s);
5894 return label;
5895 }
5896 }
5897
5898 label = create_artificial_label (UNKNOWN_LOCATION);
5899 stmt = gimple_build_label (label);
5900 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5901 return label;
5902 }
5903
5904
5905 /* Attempt to perform edge redirection by replacing a possibly complex
5906 jump instruction by a goto or by removing the jump completely.
5907 This can apply only if all edges now point to the same block. The
5908 parameters and return values are equivalent to
5909 redirect_edge_and_branch. */
5910
5911 static edge
gimple_try_redirect_by_replacing_jump(edge e,basic_block target)5912 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5913 {
5914 basic_block src = e->src;
5915 gimple_stmt_iterator i;
5916 gimple *stmt;
5917
5918 /* We can replace or remove a complex jump only when we have exactly
5919 two edges. */
5920 if (EDGE_COUNT (src->succs) != 2
5921 /* Verify that all targets will be TARGET. Specifically, the
5922 edge that is not E must also go to TARGET. */
5923 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5924 return NULL;
5925
5926 i = gsi_last_bb (src);
5927 if (gsi_end_p (i))
5928 return NULL;
5929
5930 stmt = gsi_stmt (i);
5931
5932 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5933 {
5934 gsi_remove (&i, true);
5935 e = ssa_redirect_edge (e, target);
5936 e->flags = EDGE_FALLTHRU;
5937 return e;
5938 }
5939
5940 return NULL;
5941 }
5942
5943
5944 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5945 edge representing the redirected branch. */
5946
5947 static edge
gimple_redirect_edge_and_branch(edge e,basic_block dest)5948 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5949 {
5950 basic_block bb = e->src;
5951 gimple_stmt_iterator gsi;
5952 edge ret;
5953 gimple *stmt;
5954
5955 if (e->flags & EDGE_ABNORMAL)
5956 return NULL;
5957
5958 if (e->dest == dest)
5959 return NULL;
5960
5961 if (e->flags & EDGE_EH)
5962 return redirect_eh_edge (e, dest);
5963
5964 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5965 {
5966 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5967 if (ret)
5968 return ret;
5969 }
5970
5971 gsi = gsi_last_nondebug_bb (bb);
5972 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5973
5974 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5975 {
5976 case GIMPLE_COND:
5977 /* For COND_EXPR, we only need to redirect the edge. */
5978 break;
5979
5980 case GIMPLE_GOTO:
5981 /* No non-abnormal edges should lead from a non-simple goto, and
5982 simple ones should be represented implicitly. */
5983 gcc_unreachable ();
5984
5985 case GIMPLE_SWITCH:
5986 {
5987 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5988 tree label = gimple_block_label (dest);
5989 tree cases = get_cases_for_edge (e, switch_stmt);
5990
5991 /* If we have a list of cases associated with E, then use it
5992 as it's a lot faster than walking the entire case vector. */
5993 if (cases)
5994 {
5995 edge e2 = find_edge (e->src, dest);
5996 tree last, first;
5997
5998 first = cases;
5999 while (cases)
6000 {
6001 last = cases;
6002 CASE_LABEL (cases) = label;
6003 cases = CASE_CHAIN (cases);
6004 }
6005
6006 /* If there was already an edge in the CFG, then we need
6007 to move all the cases associated with E to E2. */
6008 if (e2)
6009 {
6010 tree cases2 = get_cases_for_edge (e2, switch_stmt);
6011
6012 CASE_CHAIN (last) = CASE_CHAIN (cases2);
6013 CASE_CHAIN (cases2) = first;
6014 }
6015 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
6016 }
6017 else
6018 {
6019 size_t i, n = gimple_switch_num_labels (switch_stmt);
6020
6021 for (i = 0; i < n; i++)
6022 {
6023 tree elt = gimple_switch_label (switch_stmt, i);
6024 if (label_to_block (cfun, CASE_LABEL (elt)) == e->dest)
6025 CASE_LABEL (elt) = label;
6026 }
6027 }
6028 }
6029 break;
6030
6031 case GIMPLE_ASM:
6032 {
6033 gasm *asm_stmt = as_a <gasm *> (stmt);
6034 int i, n = gimple_asm_nlabels (asm_stmt);
6035 tree label = NULL;
6036
6037 for (i = 0; i < n; ++i)
6038 {
6039 tree cons = gimple_asm_label_op (asm_stmt, i);
6040 if (label_to_block (cfun, TREE_VALUE (cons)) == e->dest)
6041 {
6042 if (!label)
6043 label = gimple_block_label (dest);
6044 TREE_VALUE (cons) = label;
6045 }
6046 }
6047
6048 /* If we didn't find any label matching the former edge in the
6049 asm labels, we must be redirecting the fallthrough
6050 edge. */
6051 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
6052 }
6053 break;
6054
6055 case GIMPLE_RETURN:
6056 gsi_remove (&gsi, true);
6057 e->flags |= EDGE_FALLTHRU;
6058 break;
6059
6060 case GIMPLE_OMP_RETURN:
6061 case GIMPLE_OMP_CONTINUE:
6062 case GIMPLE_OMP_SECTIONS_SWITCH:
6063 case GIMPLE_OMP_FOR:
6064 /* The edges from OMP constructs can be simply redirected. */
6065 break;
6066
6067 case GIMPLE_EH_DISPATCH:
6068 if (!(e->flags & EDGE_FALLTHRU))
6069 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
6070 break;
6071
6072 case GIMPLE_TRANSACTION:
6073 if (e->flags & EDGE_TM_ABORT)
6074 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
6075 gimple_block_label (dest));
6076 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
6077 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
6078 gimple_block_label (dest));
6079 else
6080 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
6081 gimple_block_label (dest));
6082 break;
6083
6084 default:
6085 /* Otherwise it must be a fallthru edge, and we don't need to
6086 do anything besides redirecting it. */
6087 gcc_assert (e->flags & EDGE_FALLTHRU);
6088 break;
6089 }
6090
6091 /* Update/insert PHI nodes as necessary. */
6092
6093 /* Now update the edges in the CFG. */
6094 e = ssa_redirect_edge (e, dest);
6095
6096 return e;
6097 }
6098
6099 /* Returns true if it is possible to remove edge E by redirecting
6100 it to the destination of the other edge from E->src. */
6101
6102 static bool
gimple_can_remove_branch_p(const_edge e)6103 gimple_can_remove_branch_p (const_edge e)
6104 {
6105 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
6106 return false;
6107
6108 return true;
6109 }
6110
6111 /* Simple wrapper, as we can always redirect fallthru edges. */
6112
6113 static basic_block
gimple_redirect_edge_and_branch_force(edge e,basic_block dest)6114 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6115 {
6116 e = gimple_redirect_edge_and_branch (e, dest);
6117 gcc_assert (e);
6118
6119 return NULL;
6120 }
6121
6122
6123 /* Splits basic block BB after statement STMT (but at least after the
6124 labels). If STMT is NULL, BB is split just after the labels. */
6125
6126 static basic_block
gimple_split_block(basic_block bb,void * stmt)6127 gimple_split_block (basic_block bb, void *stmt)
6128 {
6129 gimple_stmt_iterator gsi;
6130 gimple_stmt_iterator gsi_tgt;
6131 gimple_seq list;
6132 basic_block new_bb;
6133 edge e;
6134 edge_iterator ei;
6135
6136 new_bb = create_empty_bb (bb);
6137
6138 /* Redirect the outgoing edges. */
6139 new_bb->succs = bb->succs;
6140 bb->succs = NULL;
6141 FOR_EACH_EDGE (e, ei, new_bb->succs)
6142 e->src = new_bb;
6143
6144 /* Get a stmt iterator pointing to the first stmt to move. */
6145 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6146 gsi = gsi_after_labels (bb);
6147 else
6148 {
6149 gsi = gsi_for_stmt ((gimple *) stmt);
6150 gsi_next (&gsi);
6151 }
6152
6153 /* Move everything from GSI to the new basic block. */
6154 if (gsi_end_p (gsi))
6155 return new_bb;
6156
6157 /* Split the statement list - avoid re-creating new containers as this
6158 brings ugly quadratic memory consumption in the inliner.
6159 (We are still quadratic since we need to update stmt BB pointers,
6160 sadly.) */
6161 gsi_split_seq_before (&gsi, &list);
6162 set_bb_seq (new_bb, list);
6163 for (gsi_tgt = gsi_start (list);
6164 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6165 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6166
6167 return new_bb;
6168 }
6169
6170
6171 /* Moves basic block BB after block AFTER. */
6172
6173 static bool
gimple_move_block_after(basic_block bb,basic_block after)6174 gimple_move_block_after (basic_block bb, basic_block after)
6175 {
6176 if (bb->prev_bb == after)
6177 return true;
6178
6179 unlink_block (bb);
6180 link_block (bb, after);
6181
6182 return true;
6183 }
6184
6185
6186 /* Return TRUE if block BB has no executable statements, otherwise return
6187 FALSE. */
6188
6189 static bool
gimple_empty_block_p(basic_block bb)6190 gimple_empty_block_p (basic_block bb)
6191 {
6192 /* BB must have no executable statements. */
6193 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6194 if (phi_nodes (bb))
6195 return false;
6196 while (!gsi_end_p (gsi))
6197 {
6198 gimple *stmt = gsi_stmt (gsi);
6199 if (is_gimple_debug (stmt))
6200 ;
6201 else if (gimple_code (stmt) == GIMPLE_NOP
6202 || gimple_code (stmt) == GIMPLE_PREDICT)
6203 ;
6204 else
6205 return false;
6206 gsi_next (&gsi);
6207 }
6208 return true;
6209 }
6210
6211
6212 /* Split a basic block if it ends with a conditional branch and if the
6213 other part of the block is not empty. */
6214
6215 static basic_block
gimple_split_block_before_cond_jump(basic_block bb)6216 gimple_split_block_before_cond_jump (basic_block bb)
6217 {
6218 gimple *last, *split_point;
6219 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6220 if (gsi_end_p (gsi))
6221 return NULL;
6222 last = gsi_stmt (gsi);
6223 if (gimple_code (last) != GIMPLE_COND
6224 && gimple_code (last) != GIMPLE_SWITCH)
6225 return NULL;
6226 gsi_prev (&gsi);
6227 split_point = gsi_stmt (gsi);
6228 return split_block (bb, split_point)->dest;
6229 }
6230
6231
6232 /* Return true if basic_block can be duplicated. */
6233
6234 static bool
gimple_can_duplicate_bb_p(const_basic_block bb ATTRIBUTE_UNUSED)6235 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
6236 {
6237 return true;
6238 }
6239
6240 /* Create a duplicate of the basic block BB. NOTE: This does not
6241 preserve SSA form. */
6242
6243 static basic_block
gimple_duplicate_bb(basic_block bb,copy_bb_data * id)6244 gimple_duplicate_bb (basic_block bb, copy_bb_data *id)
6245 {
6246 basic_block new_bb;
6247 gimple_stmt_iterator gsi_tgt;
6248
6249 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6250
6251 /* Copy the PHI nodes. We ignore PHI node arguments here because
6252 the incoming edges have not been setup yet. */
6253 for (gphi_iterator gpi = gsi_start_phis (bb);
6254 !gsi_end_p (gpi);
6255 gsi_next (&gpi))
6256 {
6257 gphi *phi, *copy;
6258 phi = gpi.phi ();
6259 copy = create_phi_node (NULL_TREE, new_bb);
6260 create_new_def_for (gimple_phi_result (phi), copy,
6261 gimple_phi_result_ptr (copy));
6262 gimple_set_uid (copy, gimple_uid (phi));
6263 }
6264
6265 gsi_tgt = gsi_start_bb (new_bb);
6266 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6267 !gsi_end_p (gsi);
6268 gsi_next (&gsi))
6269 {
6270 def_operand_p def_p;
6271 ssa_op_iter op_iter;
6272 tree lhs;
6273 gimple *stmt, *copy;
6274
6275 stmt = gsi_stmt (gsi);
6276 if (gimple_code (stmt) == GIMPLE_LABEL)
6277 continue;
6278
6279 /* Don't duplicate label debug stmts. */
6280 if (gimple_debug_bind_p (stmt)
6281 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6282 == LABEL_DECL)
6283 continue;
6284
6285 /* Create a new copy of STMT and duplicate STMT's virtual
6286 operands. */
6287 copy = gimple_copy (stmt);
6288 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6289
6290 maybe_duplicate_eh_stmt (copy, stmt);
6291 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6292
6293 /* When copying around a stmt writing into a local non-user
6294 aggregate, make sure it won't share stack slot with other
6295 vars. */
6296 lhs = gimple_get_lhs (stmt);
6297 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6298 {
6299 tree base = get_base_address (lhs);
6300 if (base
6301 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6302 && DECL_IGNORED_P (base)
6303 && !TREE_STATIC (base)
6304 && !DECL_EXTERNAL (base)
6305 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6306 DECL_NONSHAREABLE (base) = 1;
6307 }
6308
6309 /* If requested remap dependence info of cliques brought in
6310 via inlining. */
6311 if (id)
6312 for (unsigned i = 0; i < gimple_num_ops (copy); ++i)
6313 {
6314 tree op = gimple_op (copy, i);
6315 if (!op)
6316 continue;
6317 if (TREE_CODE (op) == ADDR_EXPR
6318 || TREE_CODE (op) == WITH_SIZE_EXPR)
6319 op = TREE_OPERAND (op, 0);
6320 while (handled_component_p (op))
6321 op = TREE_OPERAND (op, 0);
6322 if ((TREE_CODE (op) == MEM_REF
6323 || TREE_CODE (op) == TARGET_MEM_REF)
6324 && MR_DEPENDENCE_CLIQUE (op) > 1
6325 && MR_DEPENDENCE_CLIQUE (op) != bb->loop_father->owned_clique)
6326 {
6327 if (!id->dependence_map)
6328 id->dependence_map = new hash_map<dependence_hash,
6329 unsigned short>;
6330 bool existed;
6331 unsigned short &newc = id->dependence_map->get_or_insert
6332 (MR_DEPENDENCE_CLIQUE (op), &existed);
6333 if (!existed)
6334 {
6335 gcc_assert (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
6336 newc = ++cfun->last_clique;
6337 }
6338 MR_DEPENDENCE_CLIQUE (op) = newc;
6339 }
6340 }
6341
6342 /* Create new names for all the definitions created by COPY and
6343 add replacement mappings for each new name. */
6344 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6345 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6346 }
6347
6348 return new_bb;
6349 }
6350
6351 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6352
6353 static void
add_phi_args_after_copy_edge(edge e_copy)6354 add_phi_args_after_copy_edge (edge e_copy)
6355 {
6356 basic_block bb, bb_copy = e_copy->src, dest;
6357 edge e;
6358 edge_iterator ei;
6359 gphi *phi, *phi_copy;
6360 tree def;
6361 gphi_iterator psi, psi_copy;
6362
6363 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6364 return;
6365
6366 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6367
6368 if (e_copy->dest->flags & BB_DUPLICATED)
6369 dest = get_bb_original (e_copy->dest);
6370 else
6371 dest = e_copy->dest;
6372
6373 e = find_edge (bb, dest);
6374 if (!e)
6375 {
6376 /* During loop unrolling the target of the latch edge is copied.
6377 In this case we are not looking for edge to dest, but to
6378 duplicated block whose original was dest. */
6379 FOR_EACH_EDGE (e, ei, bb->succs)
6380 {
6381 if ((e->dest->flags & BB_DUPLICATED)
6382 && get_bb_original (e->dest) == dest)
6383 break;
6384 }
6385
6386 gcc_assert (e != NULL);
6387 }
6388
6389 for (psi = gsi_start_phis (e->dest),
6390 psi_copy = gsi_start_phis (e_copy->dest);
6391 !gsi_end_p (psi);
6392 gsi_next (&psi), gsi_next (&psi_copy))
6393 {
6394 phi = psi.phi ();
6395 phi_copy = psi_copy.phi ();
6396 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6397 add_phi_arg (phi_copy, def, e_copy,
6398 gimple_phi_arg_location_from_edge (phi, e));
6399 }
6400 }
6401
6402
6403 /* Basic block BB_COPY was created by code duplication. Add phi node
6404 arguments for edges going out of BB_COPY. The blocks that were
6405 duplicated have BB_DUPLICATED set. */
6406
6407 void
add_phi_args_after_copy_bb(basic_block bb_copy)6408 add_phi_args_after_copy_bb (basic_block bb_copy)
6409 {
6410 edge e_copy;
6411 edge_iterator ei;
6412
6413 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6414 {
6415 add_phi_args_after_copy_edge (e_copy);
6416 }
6417 }
6418
6419 /* Blocks in REGION_COPY array of length N_REGION were created by
6420 duplication of basic blocks. Add phi node arguments for edges
6421 going from these blocks. If E_COPY is not NULL, also add
6422 phi node arguments for its destination.*/
6423
6424 void
add_phi_args_after_copy(basic_block * region_copy,unsigned n_region,edge e_copy)6425 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6426 edge e_copy)
6427 {
6428 unsigned i;
6429
6430 for (i = 0; i < n_region; i++)
6431 region_copy[i]->flags |= BB_DUPLICATED;
6432
6433 for (i = 0; i < n_region; i++)
6434 add_phi_args_after_copy_bb (region_copy[i]);
6435 if (e_copy)
6436 add_phi_args_after_copy_edge (e_copy);
6437
6438 for (i = 0; i < n_region; i++)
6439 region_copy[i]->flags &= ~BB_DUPLICATED;
6440 }
6441
6442 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6443 important exit edge EXIT. By important we mean that no SSA name defined
6444 inside region is live over the other exit edges of the region. All entry
6445 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6446 to the duplicate of the region. Dominance and loop information is
6447 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6448 UPDATE_DOMINANCE is false then we assume that the caller will update the
6449 dominance information after calling this function. The new basic
6450 blocks are stored to REGION_COPY in the same order as they had in REGION,
6451 provided that REGION_COPY is not NULL.
6452 The function returns false if it is unable to copy the region,
6453 true otherwise. */
6454
6455 bool
gimple_duplicate_sese_region(edge entry,edge exit,basic_block * region,unsigned n_region,basic_block * region_copy,bool update_dominance)6456 gimple_duplicate_sese_region (edge entry, edge exit,
6457 basic_block *region, unsigned n_region,
6458 basic_block *region_copy,
6459 bool update_dominance)
6460 {
6461 unsigned i;
6462 bool free_region_copy = false, copying_header = false;
6463 class loop *loop = entry->dest->loop_father;
6464 edge exit_copy;
6465 vec<basic_block> doms = vNULL;
6466 edge redirected;
6467 profile_count total_count = profile_count::uninitialized ();
6468 profile_count entry_count = profile_count::uninitialized ();
6469
6470 if (!can_copy_bbs_p (region, n_region))
6471 return false;
6472
6473 /* Some sanity checking. Note that we do not check for all possible
6474 missuses of the functions. I.e. if you ask to copy something weird,
6475 it will work, but the state of structures probably will not be
6476 correct. */
6477 for (i = 0; i < n_region; i++)
6478 {
6479 /* We do not handle subloops, i.e. all the blocks must belong to the
6480 same loop. */
6481 if (region[i]->loop_father != loop)
6482 return false;
6483
6484 if (region[i] != entry->dest
6485 && region[i] == loop->header)
6486 return false;
6487 }
6488
6489 /* In case the function is used for loop header copying (which is the primary
6490 use), ensure that EXIT and its copy will be new latch and entry edges. */
6491 if (loop->header == entry->dest)
6492 {
6493 copying_header = true;
6494
6495 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6496 return false;
6497
6498 for (i = 0; i < n_region; i++)
6499 if (region[i] != exit->src
6500 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6501 return false;
6502 }
6503
6504 initialize_original_copy_tables ();
6505
6506 if (copying_header)
6507 set_loop_copy (loop, loop_outer (loop));
6508 else
6509 set_loop_copy (loop, loop);
6510
6511 if (!region_copy)
6512 {
6513 region_copy = XNEWVEC (basic_block, n_region);
6514 free_region_copy = true;
6515 }
6516
6517 /* Record blocks outside the region that are dominated by something
6518 inside. */
6519 if (update_dominance)
6520 {
6521 doms.create (0);
6522 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6523 }
6524
6525 if (entry->dest->count.initialized_p ())
6526 {
6527 total_count = entry->dest->count;
6528 entry_count = entry->count ();
6529 /* Fix up corner cases, to avoid division by zero or creation of negative
6530 frequencies. */
6531 if (entry_count > total_count)
6532 entry_count = total_count;
6533 }
6534
6535 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6536 split_edge_bb_loc (entry), update_dominance);
6537 if (total_count.initialized_p () && entry_count.initialized_p ())
6538 {
6539 scale_bbs_frequencies_profile_count (region, n_region,
6540 total_count - entry_count,
6541 total_count);
6542 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6543 total_count);
6544 }
6545
6546 if (copying_header)
6547 {
6548 loop->header = exit->dest;
6549 loop->latch = exit->src;
6550 }
6551
6552 /* Redirect the entry and add the phi node arguments. */
6553 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6554 gcc_assert (redirected != NULL);
6555 flush_pending_stmts (entry);
6556
6557 /* Concerning updating of dominators: We must recount dominators
6558 for entry block and its copy. Anything that is outside of the
6559 region, but was dominated by something inside needs recounting as
6560 well. */
6561 if (update_dominance)
6562 {
6563 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6564 doms.safe_push (get_bb_original (entry->dest));
6565 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6566 doms.release ();
6567 }
6568
6569 /* Add the other PHI node arguments. */
6570 add_phi_args_after_copy (region_copy, n_region, NULL);
6571
6572 if (free_region_copy)
6573 free (region_copy);
6574
6575 free_original_copy_tables ();
6576 return true;
6577 }
6578
6579 /* Checks if BB is part of the region defined by N_REGION BBS. */
6580 static bool
bb_part_of_region_p(basic_block bb,basic_block * bbs,unsigned n_region)6581 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6582 {
6583 unsigned int n;
6584
6585 for (n = 0; n < n_region; n++)
6586 {
6587 if (bb == bbs[n])
6588 return true;
6589 }
6590 return false;
6591 }
6592
6593 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6594 are stored to REGION_COPY in the same order in that they appear
6595 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6596 the region, EXIT an exit from it. The condition guarding EXIT
6597 is moved to ENTRY. Returns true if duplication succeeds, false
6598 otherwise.
6599
6600 For example,
6601
6602 some_code;
6603 if (cond)
6604 A;
6605 else
6606 B;
6607
6608 is transformed to
6609
6610 if (cond)
6611 {
6612 some_code;
6613 A;
6614 }
6615 else
6616 {
6617 some_code;
6618 B;
6619 }
6620 */
6621
6622 bool
gimple_duplicate_sese_tail(edge entry,edge exit,basic_block * region,unsigned n_region,basic_block * region_copy)6623 gimple_duplicate_sese_tail (edge entry, edge exit,
6624 basic_block *region, unsigned n_region,
6625 basic_block *region_copy)
6626 {
6627 unsigned i;
6628 bool free_region_copy = false;
6629 class loop *loop = exit->dest->loop_father;
6630 class loop *orig_loop = entry->dest->loop_father;
6631 basic_block switch_bb, entry_bb, nentry_bb;
6632 vec<basic_block> doms;
6633 profile_count total_count = profile_count::uninitialized (),
6634 exit_count = profile_count::uninitialized ();
6635 edge exits[2], nexits[2], e;
6636 gimple_stmt_iterator gsi;
6637 gimple *cond_stmt;
6638 edge sorig, snew;
6639 basic_block exit_bb;
6640 gphi_iterator psi;
6641 gphi *phi;
6642 tree def;
6643 class loop *target, *aloop, *cloop;
6644
6645 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6646 exits[0] = exit;
6647 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6648
6649 if (!can_copy_bbs_p (region, n_region))
6650 return false;
6651
6652 initialize_original_copy_tables ();
6653 set_loop_copy (orig_loop, loop);
6654
6655 target= loop;
6656 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6657 {
6658 if (bb_part_of_region_p (aloop->header, region, n_region))
6659 {
6660 cloop = duplicate_loop (aloop, target);
6661 duplicate_subloops (aloop, cloop);
6662 }
6663 }
6664
6665 if (!region_copy)
6666 {
6667 region_copy = XNEWVEC (basic_block, n_region);
6668 free_region_copy = true;
6669 }
6670
6671 gcc_assert (!need_ssa_update_p (cfun));
6672
6673 /* Record blocks outside the region that are dominated by something
6674 inside. */
6675 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6676
6677 total_count = exit->src->count;
6678 exit_count = exit->count ();
6679 /* Fix up corner cases, to avoid division by zero or creation of negative
6680 frequencies. */
6681 if (exit_count > total_count)
6682 exit_count = total_count;
6683
6684 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6685 split_edge_bb_loc (exit), true);
6686 if (total_count.initialized_p () && exit_count.initialized_p ())
6687 {
6688 scale_bbs_frequencies_profile_count (region, n_region,
6689 total_count - exit_count,
6690 total_count);
6691 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6692 total_count);
6693 }
6694
6695 /* Create the switch block, and put the exit condition to it. */
6696 entry_bb = entry->dest;
6697 nentry_bb = get_bb_copy (entry_bb);
6698 if (!last_stmt (entry->src)
6699 || !stmt_ends_bb_p (last_stmt (entry->src)))
6700 switch_bb = entry->src;
6701 else
6702 switch_bb = split_edge (entry);
6703 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6704
6705 gsi = gsi_last_bb (switch_bb);
6706 cond_stmt = last_stmt (exit->src);
6707 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6708 cond_stmt = gimple_copy (cond_stmt);
6709
6710 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6711
6712 sorig = single_succ_edge (switch_bb);
6713 sorig->flags = exits[1]->flags;
6714 sorig->probability = exits[1]->probability;
6715 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6716 snew->probability = exits[0]->probability;
6717
6718
6719 /* Register the new edge from SWITCH_BB in loop exit lists. */
6720 rescan_loop_exit (snew, true, false);
6721
6722 /* Add the PHI node arguments. */
6723 add_phi_args_after_copy (region_copy, n_region, snew);
6724
6725 /* Get rid of now superfluous conditions and associated edges (and phi node
6726 arguments). */
6727 exit_bb = exit->dest;
6728
6729 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6730 PENDING_STMT (e) = NULL;
6731
6732 /* The latch of ORIG_LOOP was copied, and so was the backedge
6733 to the original header. We redirect this backedge to EXIT_BB. */
6734 for (i = 0; i < n_region; i++)
6735 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6736 {
6737 gcc_assert (single_succ_edge (region_copy[i]));
6738 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6739 PENDING_STMT (e) = NULL;
6740 for (psi = gsi_start_phis (exit_bb);
6741 !gsi_end_p (psi);
6742 gsi_next (&psi))
6743 {
6744 phi = psi.phi ();
6745 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6746 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6747 }
6748 }
6749 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6750 PENDING_STMT (e) = NULL;
6751
6752 /* Anything that is outside of the region, but was dominated by something
6753 inside needs to update dominance info. */
6754 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6755 doms.release ();
6756 /* Update the SSA web. */
6757 update_ssa (TODO_update_ssa);
6758
6759 if (free_region_copy)
6760 free (region_copy);
6761
6762 free_original_copy_tables ();
6763 return true;
6764 }
6765
6766 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6767 adding blocks when the dominator traversal reaches EXIT. This
6768 function silently assumes that ENTRY strictly dominates EXIT. */
6769
6770 void
gather_blocks_in_sese_region(basic_block entry,basic_block exit,vec<basic_block> * bbs_p)6771 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6772 vec<basic_block> *bbs_p)
6773 {
6774 basic_block son;
6775
6776 for (son = first_dom_son (CDI_DOMINATORS, entry);
6777 son;
6778 son = next_dom_son (CDI_DOMINATORS, son))
6779 {
6780 bbs_p->safe_push (son);
6781 if (son != exit)
6782 gather_blocks_in_sese_region (son, exit, bbs_p);
6783 }
6784 }
6785
6786 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6787 The duplicates are recorded in VARS_MAP. */
6788
6789 static void
replace_by_duplicate_decl(tree * tp,hash_map<tree,tree> * vars_map,tree to_context)6790 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6791 tree to_context)
6792 {
6793 tree t = *tp, new_t;
6794 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6795
6796 if (DECL_CONTEXT (t) == to_context)
6797 return;
6798
6799 bool existed;
6800 tree &loc = vars_map->get_or_insert (t, &existed);
6801
6802 if (!existed)
6803 {
6804 if (SSA_VAR_P (t))
6805 {
6806 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6807 add_local_decl (f, new_t);
6808 }
6809 else
6810 {
6811 gcc_assert (TREE_CODE (t) == CONST_DECL);
6812 new_t = copy_node (t);
6813 }
6814 DECL_CONTEXT (new_t) = to_context;
6815
6816 loc = new_t;
6817 }
6818 else
6819 new_t = loc;
6820
6821 *tp = new_t;
6822 }
6823
6824
6825 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6826 VARS_MAP maps old ssa names and var_decls to the new ones. */
6827
6828 static tree
replace_ssa_name(tree name,hash_map<tree,tree> * vars_map,tree to_context)6829 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6830 tree to_context)
6831 {
6832 tree new_name;
6833
6834 gcc_assert (!virtual_operand_p (name));
6835
6836 tree *loc = vars_map->get (name);
6837
6838 if (!loc)
6839 {
6840 tree decl = SSA_NAME_VAR (name);
6841 if (decl)
6842 {
6843 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6844 replace_by_duplicate_decl (&decl, vars_map, to_context);
6845 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6846 decl, SSA_NAME_DEF_STMT (name));
6847 }
6848 else
6849 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6850 name, SSA_NAME_DEF_STMT (name));
6851
6852 /* Now that we've used the def stmt to define new_name, make sure it
6853 doesn't define name anymore. */
6854 SSA_NAME_DEF_STMT (name) = NULL;
6855
6856 vars_map->put (name, new_name);
6857 }
6858 else
6859 new_name = *loc;
6860
6861 return new_name;
6862 }
6863
6864 struct move_stmt_d
6865 {
6866 tree orig_block;
6867 tree new_block;
6868 tree from_context;
6869 tree to_context;
6870 hash_map<tree, tree> *vars_map;
6871 htab_t new_label_map;
6872 hash_map<void *, void *> *eh_map;
6873 bool remap_decls_p;
6874 };
6875
6876 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6877 contained in *TP if it has been ORIG_BLOCK previously and change the
6878 DECL_CONTEXT of every local variable referenced in *TP. */
6879
6880 static tree
move_stmt_op(tree * tp,int * walk_subtrees,void * data)6881 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6882 {
6883 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6884 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6885 tree t = *tp;
6886
6887 if (EXPR_P (t))
6888 {
6889 tree block = TREE_BLOCK (t);
6890 if (block == NULL_TREE)
6891 ;
6892 else if (block == p->orig_block
6893 || p->orig_block == NULL_TREE)
6894 {
6895 /* tree_node_can_be_shared says we can share invariant
6896 addresses but unshare_expr copies them anyways. Make sure
6897 to unshare before adjusting the block in place - we do not
6898 always see a copy here. */
6899 if (TREE_CODE (t) == ADDR_EXPR
6900 && is_gimple_min_invariant (t))
6901 *tp = t = unshare_expr (t);
6902 TREE_SET_BLOCK (t, p->new_block);
6903 }
6904 else if (flag_checking)
6905 {
6906 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6907 block = BLOCK_SUPERCONTEXT (block);
6908 gcc_assert (block == p->orig_block);
6909 }
6910 }
6911 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6912 {
6913 if (TREE_CODE (t) == SSA_NAME)
6914 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6915 else if (TREE_CODE (t) == PARM_DECL
6916 && gimple_in_ssa_p (cfun))
6917 *tp = *(p->vars_map->get (t));
6918 else if (TREE_CODE (t) == LABEL_DECL)
6919 {
6920 if (p->new_label_map)
6921 {
6922 struct tree_map in, *out;
6923 in.base.from = t;
6924 out = (struct tree_map *)
6925 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6926 if (out)
6927 *tp = t = out->to;
6928 }
6929
6930 /* For FORCED_LABELs we can end up with references from other
6931 functions if some SESE regions are outlined. It is UB to
6932 jump in between them, but they could be used just for printing
6933 addresses etc. In that case, DECL_CONTEXT on the label should
6934 be the function containing the glabel stmt with that LABEL_DECL,
6935 rather than whatever function a reference to the label was seen
6936 last time. */
6937 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
6938 DECL_CONTEXT (t) = p->to_context;
6939 }
6940 else if (p->remap_decls_p)
6941 {
6942 /* Replace T with its duplicate. T should no longer appear in the
6943 parent function, so this looks wasteful; however, it may appear
6944 in referenced_vars, and more importantly, as virtual operands of
6945 statements, and in alias lists of other variables. It would be
6946 quite difficult to expunge it from all those places. ??? It might
6947 suffice to do this for addressable variables. */
6948 if ((VAR_P (t) && !is_global_var (t))
6949 || TREE_CODE (t) == CONST_DECL)
6950 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6951 }
6952 *walk_subtrees = 0;
6953 }
6954 else if (TYPE_P (t))
6955 *walk_subtrees = 0;
6956
6957 return NULL_TREE;
6958 }
6959
6960 /* Helper for move_stmt_r. Given an EH region number for the source
6961 function, map that to the duplicate EH regio number in the dest. */
6962
6963 static int
move_stmt_eh_region_nr(int old_nr,struct move_stmt_d * p)6964 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6965 {
6966 eh_region old_r, new_r;
6967
6968 old_r = get_eh_region_from_number (old_nr);
6969 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6970
6971 return new_r->index;
6972 }
6973
6974 /* Similar, but operate on INTEGER_CSTs. */
6975
6976 static tree
move_stmt_eh_region_tree_nr(tree old_t_nr,struct move_stmt_d * p)6977 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6978 {
6979 int old_nr, new_nr;
6980
6981 old_nr = tree_to_shwi (old_t_nr);
6982 new_nr = move_stmt_eh_region_nr (old_nr, p);
6983
6984 return build_int_cst (integer_type_node, new_nr);
6985 }
6986
6987 /* Like move_stmt_op, but for gimple statements.
6988
6989 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6990 contained in the current statement in *GSI_P and change the
6991 DECL_CONTEXT of every local variable referenced in the current
6992 statement. */
6993
6994 static tree
move_stmt_r(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)6995 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6996 struct walk_stmt_info *wi)
6997 {
6998 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6999 gimple *stmt = gsi_stmt (*gsi_p);
7000 tree block = gimple_block (stmt);
7001
7002 if (block == p->orig_block
7003 || (p->orig_block == NULL_TREE
7004 && block != NULL_TREE))
7005 gimple_set_block (stmt, p->new_block);
7006
7007 switch (gimple_code (stmt))
7008 {
7009 case GIMPLE_CALL:
7010 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
7011 {
7012 tree r, fndecl = gimple_call_fndecl (stmt);
7013 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
7014 switch (DECL_FUNCTION_CODE (fndecl))
7015 {
7016 case BUILT_IN_EH_COPY_VALUES:
7017 r = gimple_call_arg (stmt, 1);
7018 r = move_stmt_eh_region_tree_nr (r, p);
7019 gimple_call_set_arg (stmt, 1, r);
7020 /* FALLTHRU */
7021
7022 case BUILT_IN_EH_POINTER:
7023 case BUILT_IN_EH_FILTER:
7024 r = gimple_call_arg (stmt, 0);
7025 r = move_stmt_eh_region_tree_nr (r, p);
7026 gimple_call_set_arg (stmt, 0, r);
7027 break;
7028
7029 default:
7030 break;
7031 }
7032 }
7033 break;
7034
7035 case GIMPLE_RESX:
7036 {
7037 gresx *resx_stmt = as_a <gresx *> (stmt);
7038 int r = gimple_resx_region (resx_stmt);
7039 r = move_stmt_eh_region_nr (r, p);
7040 gimple_resx_set_region (resx_stmt, r);
7041 }
7042 break;
7043
7044 case GIMPLE_EH_DISPATCH:
7045 {
7046 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
7047 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
7048 r = move_stmt_eh_region_nr (r, p);
7049 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
7050 }
7051 break;
7052
7053 case GIMPLE_OMP_RETURN:
7054 case GIMPLE_OMP_CONTINUE:
7055 break;
7056
7057 case GIMPLE_LABEL:
7058 {
7059 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
7060 so that such labels can be referenced from other regions.
7061 Make sure to update it when seeing a GIMPLE_LABEL though,
7062 that is the owner of the label. */
7063 walk_gimple_op (stmt, move_stmt_op, wi);
7064 *handled_ops_p = true;
7065 tree label = gimple_label_label (as_a <glabel *> (stmt));
7066 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
7067 DECL_CONTEXT (label) = p->to_context;
7068 }
7069 break;
7070
7071 default:
7072 if (is_gimple_omp (stmt))
7073 {
7074 /* Do not remap variables inside OMP directives. Variables
7075 referenced in clauses and directive header belong to the
7076 parent function and should not be moved into the child
7077 function. */
7078 bool save_remap_decls_p = p->remap_decls_p;
7079 p->remap_decls_p = false;
7080 *handled_ops_p = true;
7081
7082 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
7083 move_stmt_op, wi);
7084
7085 p->remap_decls_p = save_remap_decls_p;
7086 }
7087 break;
7088 }
7089
7090 return NULL_TREE;
7091 }
7092
7093 /* Move basic block BB from function CFUN to function DEST_FN. The
7094 block is moved out of the original linked list and placed after
7095 block AFTER in the new list. Also, the block is removed from the
7096 original array of blocks and placed in DEST_FN's array of blocks.
7097 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7098 updated to reflect the moved edges.
7099
7100 The local variables are remapped to new instances, VARS_MAP is used
7101 to record the mapping. */
7102
7103 static void
move_block_to_fn(struct function * dest_cfun,basic_block bb,basic_block after,bool update_edge_count_p,struct move_stmt_d * d)7104 move_block_to_fn (struct function *dest_cfun, basic_block bb,
7105 basic_block after, bool update_edge_count_p,
7106 struct move_stmt_d *d)
7107 {
7108 struct control_flow_graph *cfg;
7109 edge_iterator ei;
7110 edge e;
7111 gimple_stmt_iterator si;
7112 unsigned old_len, new_len;
7113
7114 /* Remove BB from dominance structures. */
7115 delete_from_dominance_info (CDI_DOMINATORS, bb);
7116
7117 /* Move BB from its current loop to the copy in the new function. */
7118 if (current_loops)
7119 {
7120 class loop *new_loop = (class loop *)bb->loop_father->aux;
7121 if (new_loop)
7122 bb->loop_father = new_loop;
7123 }
7124
7125 /* Link BB to the new linked list. */
7126 move_block_after (bb, after);
7127
7128 /* Update the edge count in the corresponding flowgraphs. */
7129 if (update_edge_count_p)
7130 FOR_EACH_EDGE (e, ei, bb->succs)
7131 {
7132 cfun->cfg->x_n_edges--;
7133 dest_cfun->cfg->x_n_edges++;
7134 }
7135
7136 /* Remove BB from the original basic block array. */
7137 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7138 cfun->cfg->x_n_basic_blocks--;
7139
7140 /* Grow DEST_CFUN's basic block array if needed. */
7141 cfg = dest_cfun->cfg;
7142 cfg->x_n_basic_blocks++;
7143 if (bb->index >= cfg->x_last_basic_block)
7144 cfg->x_last_basic_block = bb->index + 1;
7145
7146 old_len = vec_safe_length (cfg->x_basic_block_info);
7147 if ((unsigned) cfg->x_last_basic_block >= old_len)
7148 {
7149 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
7150 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
7151 }
7152
7153 (*cfg->x_basic_block_info)[bb->index] = bb;
7154
7155 /* Remap the variables in phi nodes. */
7156 for (gphi_iterator psi = gsi_start_phis (bb);
7157 !gsi_end_p (psi); )
7158 {
7159 gphi *phi = psi.phi ();
7160 use_operand_p use;
7161 tree op = PHI_RESULT (phi);
7162 ssa_op_iter oi;
7163 unsigned i;
7164
7165 if (virtual_operand_p (op))
7166 {
7167 /* Remove the phi nodes for virtual operands (alias analysis will be
7168 run for the new function, anyway). But replace all uses that
7169 might be outside of the region we move. */
7170 use_operand_p use_p;
7171 imm_use_iterator iter;
7172 gimple *use_stmt;
7173 FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
7174 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7175 SET_USE (use_p, SSA_NAME_VAR (op));
7176 remove_phi_node (&psi, true);
7177 continue;
7178 }
7179
7180 SET_PHI_RESULT (phi,
7181 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7182 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7183 {
7184 op = USE_FROM_PTR (use);
7185 if (TREE_CODE (op) == SSA_NAME)
7186 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7187 }
7188
7189 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7190 {
7191 location_t locus = gimple_phi_arg_location (phi, i);
7192 tree block = LOCATION_BLOCK (locus);
7193
7194 if (locus == UNKNOWN_LOCATION)
7195 continue;
7196 if (d->orig_block == NULL_TREE || block == d->orig_block)
7197 {
7198 locus = set_block (locus, d->new_block);
7199 gimple_phi_arg_set_location (phi, i, locus);
7200 }
7201 }
7202
7203 gsi_next (&psi);
7204 }
7205
7206 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7207 {
7208 gimple *stmt = gsi_stmt (si);
7209 struct walk_stmt_info wi;
7210
7211 memset (&wi, 0, sizeof (wi));
7212 wi.info = d;
7213 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7214
7215 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7216 {
7217 tree label = gimple_label_label (label_stmt);
7218 int uid = LABEL_DECL_UID (label);
7219
7220 gcc_assert (uid > -1);
7221
7222 old_len = vec_safe_length (cfg->x_label_to_block_map);
7223 if (old_len <= (unsigned) uid)
7224 {
7225 new_len = 3 * uid / 2 + 1;
7226 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
7227 }
7228
7229 (*cfg->x_label_to_block_map)[uid] = bb;
7230 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7231
7232 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7233
7234 if (uid >= dest_cfun->cfg->last_label_uid)
7235 dest_cfun->cfg->last_label_uid = uid + 1;
7236 }
7237
7238 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7239 remove_stmt_from_eh_lp_fn (cfun, stmt);
7240
7241 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7242 gimple_remove_stmt_histograms (cfun, stmt);
7243
7244 /* We cannot leave any operands allocated from the operand caches of
7245 the current function. */
7246 free_stmt_operands (cfun, stmt);
7247 push_cfun (dest_cfun);
7248 update_stmt (stmt);
7249 if (is_gimple_call (stmt))
7250 notice_special_calls (as_a <gcall *> (stmt));
7251 pop_cfun ();
7252 }
7253
7254 FOR_EACH_EDGE (e, ei, bb->succs)
7255 if (e->goto_locus != UNKNOWN_LOCATION)
7256 {
7257 tree block = LOCATION_BLOCK (e->goto_locus);
7258 if (d->orig_block == NULL_TREE
7259 || block == d->orig_block)
7260 e->goto_locus = set_block (e->goto_locus, d->new_block);
7261 }
7262 }
7263
7264 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7265 the outermost EH region. Use REGION as the incoming base EH region.
7266 If there is no single outermost region, return NULL and set *ALL to
7267 true. */
7268
7269 static eh_region
find_outermost_region_in_block(struct function * src_cfun,basic_block bb,eh_region region,bool * all)7270 find_outermost_region_in_block (struct function *src_cfun,
7271 basic_block bb, eh_region region,
7272 bool *all)
7273 {
7274 gimple_stmt_iterator si;
7275
7276 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7277 {
7278 gimple *stmt = gsi_stmt (si);
7279 eh_region stmt_region;
7280 int lp_nr;
7281
7282 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7283 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7284 if (stmt_region)
7285 {
7286 if (region == NULL)
7287 region = stmt_region;
7288 else if (stmt_region != region)
7289 {
7290 region = eh_region_outermost (src_cfun, stmt_region, region);
7291 if (region == NULL)
7292 {
7293 *all = true;
7294 return NULL;
7295 }
7296 }
7297 }
7298 }
7299
7300 return region;
7301 }
7302
7303 static tree
new_label_mapper(tree decl,void * data)7304 new_label_mapper (tree decl, void *data)
7305 {
7306 htab_t hash = (htab_t) data;
7307 struct tree_map *m;
7308 void **slot;
7309
7310 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7311
7312 m = XNEW (struct tree_map);
7313 m->hash = DECL_UID (decl);
7314 m->base.from = decl;
7315 m->to = create_artificial_label (UNKNOWN_LOCATION);
7316 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7317 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7318 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7319
7320 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7321 gcc_assert (*slot == NULL);
7322
7323 *slot = m;
7324
7325 return m->to;
7326 }
7327
7328 /* Tree walker to replace the decls used inside value expressions by
7329 duplicates. */
7330
7331 static tree
replace_block_vars_by_duplicates_1(tree * tp,int * walk_subtrees,void * data)7332 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7333 {
7334 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7335
7336 switch (TREE_CODE (*tp))
7337 {
7338 case VAR_DECL:
7339 case PARM_DECL:
7340 case RESULT_DECL:
7341 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7342 break;
7343 default:
7344 break;
7345 }
7346
7347 if (IS_TYPE_OR_DECL_P (*tp))
7348 *walk_subtrees = false;
7349
7350 return NULL;
7351 }
7352
7353 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7354 subblocks. */
7355
7356 static void
replace_block_vars_by_duplicates(tree block,hash_map<tree,tree> * vars_map,tree to_context)7357 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7358 tree to_context)
7359 {
7360 tree *tp, t;
7361
7362 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7363 {
7364 t = *tp;
7365 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7366 continue;
7367 replace_by_duplicate_decl (&t, vars_map, to_context);
7368 if (t != *tp)
7369 {
7370 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7371 {
7372 tree x = DECL_VALUE_EXPR (*tp);
7373 struct replace_decls_d rd = { vars_map, to_context };
7374 unshare_expr (x);
7375 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7376 SET_DECL_VALUE_EXPR (t, x);
7377 DECL_HAS_VALUE_EXPR_P (t) = 1;
7378 }
7379 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7380 *tp = t;
7381 }
7382 }
7383
7384 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7385 replace_block_vars_by_duplicates (block, vars_map, to_context);
7386 }
7387
7388 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7389 from FN1 to FN2. */
7390
7391 static void
fixup_loop_arrays_after_move(struct function * fn1,struct function * fn2,class loop * loop)7392 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7393 class loop *loop)
7394 {
7395 /* Discard it from the old loop array. */
7396 (*get_loops (fn1))[loop->num] = NULL;
7397
7398 /* Place it in the new loop array, assigning it a new number. */
7399 loop->num = number_of_loops (fn2);
7400 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7401
7402 /* Recurse to children. */
7403 for (loop = loop->inner; loop; loop = loop->next)
7404 fixup_loop_arrays_after_move (fn1, fn2, loop);
7405 }
7406
7407 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7408 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7409
7410 DEBUG_FUNCTION void
verify_sese(basic_block entry,basic_block exit,vec<basic_block> * bbs_p)7411 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7412 {
7413 basic_block bb;
7414 edge_iterator ei;
7415 edge e;
7416 bitmap bbs = BITMAP_ALLOC (NULL);
7417 int i;
7418
7419 gcc_assert (entry != NULL);
7420 gcc_assert (entry != exit);
7421 gcc_assert (bbs_p != NULL);
7422
7423 gcc_assert (bbs_p->length () > 0);
7424
7425 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7426 bitmap_set_bit (bbs, bb->index);
7427
7428 gcc_assert (bitmap_bit_p (bbs, entry->index));
7429 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7430
7431 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7432 {
7433 if (bb == entry)
7434 {
7435 gcc_assert (single_pred_p (entry));
7436 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7437 }
7438 else
7439 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7440 {
7441 e = ei_edge (ei);
7442 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7443 }
7444
7445 if (bb == exit)
7446 {
7447 gcc_assert (single_succ_p (exit));
7448 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7449 }
7450 else
7451 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7452 {
7453 e = ei_edge (ei);
7454 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7455 }
7456 }
7457
7458 BITMAP_FREE (bbs);
7459 }
7460
7461 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7462
7463 bool
gather_ssa_name_hash_map_from(tree const & from,tree const &,void * data)7464 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7465 {
7466 bitmap release_names = (bitmap)data;
7467
7468 if (TREE_CODE (from) != SSA_NAME)
7469 return true;
7470
7471 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7472 return true;
7473 }
7474
7475 /* Return LOOP_DIST_ALIAS call if present in BB. */
7476
7477 static gimple *
find_loop_dist_alias(basic_block bb)7478 find_loop_dist_alias (basic_block bb)
7479 {
7480 gimple *g = last_stmt (bb);
7481 if (g == NULL || gimple_code (g) != GIMPLE_COND)
7482 return NULL;
7483
7484 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7485 gsi_prev (&gsi);
7486 if (gsi_end_p (gsi))
7487 return NULL;
7488
7489 g = gsi_stmt (gsi);
7490 if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7491 return g;
7492 return NULL;
7493 }
7494
7495 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7496 to VALUE and update any immediate uses of it's LHS. */
7497
7498 void
fold_loop_internal_call(gimple * g,tree value)7499 fold_loop_internal_call (gimple *g, tree value)
7500 {
7501 tree lhs = gimple_call_lhs (g);
7502 use_operand_p use_p;
7503 imm_use_iterator iter;
7504 gimple *use_stmt;
7505 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7506
7507 update_call_from_tree (&gsi, value);
7508 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7509 {
7510 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7511 SET_USE (use_p, value);
7512 update_stmt (use_stmt);
7513 }
7514 }
7515
7516 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7517 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7518 single basic block in the original CFG and the new basic block is
7519 returned. DEST_CFUN must not have a CFG yet.
7520
7521 Note that the region need not be a pure SESE region. Blocks inside
7522 the region may contain calls to abort/exit. The only restriction
7523 is that ENTRY_BB should be the only entry point and it must
7524 dominate EXIT_BB.
7525
7526 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7527 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7528 to the new function.
7529
7530 All local variables referenced in the region are assumed to be in
7531 the corresponding BLOCK_VARS and unexpanded variable lists
7532 associated with DEST_CFUN.
7533
7534 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7535 reimplement move_sese_region_to_fn by duplicating the region rather than
7536 moving it. */
7537
7538 basic_block
move_sese_region_to_fn(struct function * dest_cfun,basic_block entry_bb,basic_block exit_bb,tree orig_block)7539 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7540 basic_block exit_bb, tree orig_block)
7541 {
7542 vec<basic_block> bbs, dom_bbs;
7543 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7544 basic_block after, bb, *entry_pred, *exit_succ, abb;
7545 struct function *saved_cfun = cfun;
7546 int *entry_flag, *exit_flag;
7547 profile_probability *entry_prob, *exit_prob;
7548 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7549 edge e;
7550 edge_iterator ei;
7551 htab_t new_label_map;
7552 hash_map<void *, void *> *eh_map;
7553 class loop *loop = entry_bb->loop_father;
7554 class loop *loop0 = get_loop (saved_cfun, 0);
7555 struct move_stmt_d d;
7556
7557 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7558 region. */
7559 gcc_assert (entry_bb != exit_bb
7560 && (!exit_bb
7561 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7562
7563 /* Collect all the blocks in the region. Manually add ENTRY_BB
7564 because it won't be added by dfs_enumerate_from. */
7565 bbs.create (0);
7566 bbs.safe_push (entry_bb);
7567 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7568
7569 if (flag_checking)
7570 verify_sese (entry_bb, exit_bb, &bbs);
7571
7572 /* The blocks that used to be dominated by something in BBS will now be
7573 dominated by the new block. */
7574 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7575 bbs.address (),
7576 bbs.length ());
7577
7578 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7579 the predecessor edges to ENTRY_BB and the successor edges to
7580 EXIT_BB so that we can re-attach them to the new basic block that
7581 will replace the region. */
7582 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7583 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7584 entry_flag = XNEWVEC (int, num_entry_edges);
7585 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7586 i = 0;
7587 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7588 {
7589 entry_prob[i] = e->probability;
7590 entry_flag[i] = e->flags;
7591 entry_pred[i++] = e->src;
7592 remove_edge (e);
7593 }
7594
7595 if (exit_bb)
7596 {
7597 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7598 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7599 exit_flag = XNEWVEC (int, num_exit_edges);
7600 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7601 i = 0;
7602 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7603 {
7604 exit_prob[i] = e->probability;
7605 exit_flag[i] = e->flags;
7606 exit_succ[i++] = e->dest;
7607 remove_edge (e);
7608 }
7609 }
7610 else
7611 {
7612 num_exit_edges = 0;
7613 exit_succ = NULL;
7614 exit_flag = NULL;
7615 exit_prob = NULL;
7616 }
7617
7618 /* Switch context to the child function to initialize DEST_FN's CFG. */
7619 gcc_assert (dest_cfun->cfg == NULL);
7620 push_cfun (dest_cfun);
7621
7622 init_empty_tree_cfg ();
7623
7624 /* Initialize EH information for the new function. */
7625 eh_map = NULL;
7626 new_label_map = NULL;
7627 if (saved_cfun->eh)
7628 {
7629 eh_region region = NULL;
7630 bool all = false;
7631
7632 FOR_EACH_VEC_ELT (bbs, i, bb)
7633 {
7634 region = find_outermost_region_in_block (saved_cfun, bb, region, &all);
7635 if (all)
7636 break;
7637 }
7638
7639 init_eh_for_function ();
7640 if (region != NULL || all)
7641 {
7642 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7643 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7644 new_label_mapper, new_label_map);
7645 }
7646 }
7647
7648 /* Initialize an empty loop tree. */
7649 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7650 init_loops_structure (dest_cfun, loops, 1);
7651 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7652 set_loops_for_fn (dest_cfun, loops);
7653
7654 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7655
7656 /* Move the outlined loop tree part. */
7657 num_nodes = bbs.length ();
7658 FOR_EACH_VEC_ELT (bbs, i, bb)
7659 {
7660 if (bb->loop_father->header == bb)
7661 {
7662 class loop *this_loop = bb->loop_father;
7663 class loop *outer = loop_outer (this_loop);
7664 if (outer == loop
7665 /* If the SESE region contains some bbs ending with
7666 a noreturn call, those are considered to belong
7667 to the outermost loop in saved_cfun, rather than
7668 the entry_bb's loop_father. */
7669 || outer == loop0)
7670 {
7671 if (outer != loop)
7672 num_nodes -= this_loop->num_nodes;
7673 flow_loop_tree_node_remove (bb->loop_father);
7674 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7675 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7676 }
7677 }
7678 else if (bb->loop_father == loop0 && loop0 != loop)
7679 num_nodes--;
7680
7681 /* Remove loop exits from the outlined region. */
7682 if (loops_for_fn (saved_cfun)->exits)
7683 FOR_EACH_EDGE (e, ei, bb->succs)
7684 {
7685 struct loops *l = loops_for_fn (saved_cfun);
7686 loop_exit **slot
7687 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7688 NO_INSERT);
7689 if (slot)
7690 l->exits->clear_slot (slot);
7691 }
7692 }
7693
7694 /* Adjust the number of blocks in the tree root of the outlined part. */
7695 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7696
7697 /* Setup a mapping to be used by move_block_to_fn. */
7698 loop->aux = current_loops->tree_root;
7699 loop0->aux = current_loops->tree_root;
7700
7701 /* Fix up orig_loop_num. If the block referenced in it has been moved
7702 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7703 class loop *dloop;
7704 signed char *moved_orig_loop_num = NULL;
7705 FOR_EACH_LOOP_FN (dest_cfun, dloop, 0)
7706 if (dloop->orig_loop_num)
7707 {
7708 if (moved_orig_loop_num == NULL)
7709 moved_orig_loop_num
7710 = XCNEWVEC (signed char, vec_safe_length (larray));
7711 if ((*larray)[dloop->orig_loop_num] != NULL
7712 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7713 {
7714 if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
7715 && moved_orig_loop_num[dloop->orig_loop_num] < 2)
7716 moved_orig_loop_num[dloop->orig_loop_num]++;
7717 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7718 }
7719 else
7720 {
7721 moved_orig_loop_num[dloop->orig_loop_num] = -1;
7722 dloop->orig_loop_num = 0;
7723 }
7724 }
7725 pop_cfun ();
7726
7727 if (moved_orig_loop_num)
7728 {
7729 FOR_EACH_VEC_ELT (bbs, i, bb)
7730 {
7731 gimple *g = find_loop_dist_alias (bb);
7732 if (g == NULL)
7733 continue;
7734
7735 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7736 gcc_assert (orig_loop_num
7737 && (unsigned) orig_loop_num < vec_safe_length (larray));
7738 if (moved_orig_loop_num[orig_loop_num] == 2)
7739 {
7740 /* If we have moved both loops with this orig_loop_num into
7741 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7742 too, update the first argument. */
7743 gcc_assert ((*larray)[dloop->orig_loop_num] != NULL
7744 && (get_loop (saved_cfun, dloop->orig_loop_num)
7745 == NULL));
7746 tree t = build_int_cst (integer_type_node,
7747 (*larray)[dloop->orig_loop_num]->num);
7748 gimple_call_set_arg (g, 0, t);
7749 update_stmt (g);
7750 /* Make sure the following loop will not update it. */
7751 moved_orig_loop_num[orig_loop_num] = 0;
7752 }
7753 else
7754 /* Otherwise at least one of the loops stayed in saved_cfun.
7755 Remove the LOOP_DIST_ALIAS call. */
7756 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7757 }
7758 FOR_EACH_BB_FN (bb, saved_cfun)
7759 {
7760 gimple *g = find_loop_dist_alias (bb);
7761 if (g == NULL)
7762 continue;
7763 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7764 gcc_assert (orig_loop_num
7765 && (unsigned) orig_loop_num < vec_safe_length (larray));
7766 if (moved_orig_loop_num[orig_loop_num])
7767 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7768 of the corresponding loops was moved, remove it. */
7769 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7770 }
7771 XDELETEVEC (moved_orig_loop_num);
7772 }
7773 ggc_free (larray);
7774
7775 /* Move blocks from BBS into DEST_CFUN. */
7776 gcc_assert (bbs.length () >= 2);
7777 after = dest_cfun->cfg->x_entry_block_ptr;
7778 hash_map<tree, tree> vars_map;
7779
7780 memset (&d, 0, sizeof (d));
7781 d.orig_block = orig_block;
7782 d.new_block = DECL_INITIAL (dest_cfun->decl);
7783 d.from_context = cfun->decl;
7784 d.to_context = dest_cfun->decl;
7785 d.vars_map = &vars_map;
7786 d.new_label_map = new_label_map;
7787 d.eh_map = eh_map;
7788 d.remap_decls_p = true;
7789
7790 if (gimple_in_ssa_p (cfun))
7791 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7792 {
7793 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7794 set_ssa_default_def (dest_cfun, arg, narg);
7795 vars_map.put (arg, narg);
7796 }
7797
7798 FOR_EACH_VEC_ELT (bbs, i, bb)
7799 {
7800 /* No need to update edge counts on the last block. It has
7801 already been updated earlier when we detached the region from
7802 the original CFG. */
7803 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7804 after = bb;
7805 }
7806
7807 /* Adjust the maximum clique used. */
7808 dest_cfun->last_clique = saved_cfun->last_clique;
7809
7810 loop->aux = NULL;
7811 loop0->aux = NULL;
7812 /* Loop sizes are no longer correct, fix them up. */
7813 loop->num_nodes -= num_nodes;
7814 for (class loop *outer = loop_outer (loop);
7815 outer; outer = loop_outer (outer))
7816 outer->num_nodes -= num_nodes;
7817 loop0->num_nodes -= bbs.length () - num_nodes;
7818
7819 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7820 {
7821 class loop *aloop;
7822 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7823 if (aloop != NULL)
7824 {
7825 if (aloop->simduid)
7826 {
7827 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7828 d.to_context);
7829 dest_cfun->has_simduid_loops = true;
7830 }
7831 if (aloop->force_vectorize)
7832 dest_cfun->has_force_vectorize_loops = true;
7833 }
7834 }
7835
7836 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7837 if (orig_block)
7838 {
7839 tree block;
7840 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7841 == NULL_TREE);
7842 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7843 = BLOCK_SUBBLOCKS (orig_block);
7844 for (block = BLOCK_SUBBLOCKS (orig_block);
7845 block; block = BLOCK_CHAIN (block))
7846 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7847 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7848 }
7849
7850 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7851 &vars_map, dest_cfun->decl);
7852
7853 if (new_label_map)
7854 htab_delete (new_label_map);
7855 if (eh_map)
7856 delete eh_map;
7857
7858 if (gimple_in_ssa_p (cfun))
7859 {
7860 /* We need to release ssa-names in a defined order, so first find them,
7861 and then iterate in ascending version order. */
7862 bitmap release_names = BITMAP_ALLOC (NULL);
7863 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7864 bitmap_iterator bi;
7865 unsigned i;
7866 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7867 release_ssa_name (ssa_name (i));
7868 BITMAP_FREE (release_names);
7869 }
7870
7871 /* Rewire the entry and exit blocks. The successor to the entry
7872 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7873 the child function. Similarly, the predecessor of DEST_FN's
7874 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7875 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7876 various CFG manipulation function get to the right CFG.
7877
7878 FIXME, this is silly. The CFG ought to become a parameter to
7879 these helpers. */
7880 push_cfun (dest_cfun);
7881 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
7882 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7883 if (exit_bb)
7884 {
7885 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7886 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
7887 }
7888 else
7889 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
7890 pop_cfun ();
7891
7892 /* Back in the original function, the SESE region has disappeared,
7893 create a new basic block in its place. */
7894 bb = create_empty_bb (entry_pred[0]);
7895 if (current_loops)
7896 add_bb_to_loop (bb, loop);
7897 for (i = 0; i < num_entry_edges; i++)
7898 {
7899 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7900 e->probability = entry_prob[i];
7901 }
7902
7903 for (i = 0; i < num_exit_edges; i++)
7904 {
7905 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7906 e->probability = exit_prob[i];
7907 }
7908
7909 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7910 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7911 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7912 dom_bbs.release ();
7913
7914 if (exit_bb)
7915 {
7916 free (exit_prob);
7917 free (exit_flag);
7918 free (exit_succ);
7919 }
7920 free (entry_prob);
7921 free (entry_flag);
7922 free (entry_pred);
7923 bbs.release ();
7924
7925 return bb;
7926 }
7927
7928 /* Dump default def DEF to file FILE using FLAGS and indentation
7929 SPC. */
7930
7931 static void
dump_default_def(FILE * file,tree def,int spc,dump_flags_t flags)7932 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
7933 {
7934 for (int i = 0; i < spc; ++i)
7935 fprintf (file, " ");
7936 dump_ssaname_info_to_file (file, def, spc);
7937
7938 print_generic_expr (file, TREE_TYPE (def), flags);
7939 fprintf (file, " ");
7940 print_generic_expr (file, def, flags);
7941 fprintf (file, " = ");
7942 print_generic_expr (file, SSA_NAME_VAR (def), flags);
7943 fprintf (file, ";\n");
7944 }
7945
7946 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
7947
7948 static void
print_no_sanitize_attr_value(FILE * file,tree value)7949 print_no_sanitize_attr_value (FILE *file, tree value)
7950 {
7951 unsigned int flags = tree_to_uhwi (value);
7952 bool first = true;
7953 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
7954 {
7955 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
7956 {
7957 if (!first)
7958 fprintf (file, " | ");
7959 fprintf (file, "%s", sanitizer_opts[i].name);
7960 first = false;
7961 }
7962 }
7963 }
7964
7965 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7966 */
7967
7968 void
dump_function_to_file(tree fndecl,FILE * file,dump_flags_t flags)7969 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
7970 {
7971 tree arg, var, old_current_fndecl = current_function_decl;
7972 struct function *dsf;
7973 bool ignore_topmost_bind = false, any_var = false;
7974 basic_block bb;
7975 tree chain;
7976 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7977 && decl_is_tm_clone (fndecl));
7978 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7979
7980 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7981 {
7982 fprintf (file, "__attribute__((");
7983
7984 bool first = true;
7985 tree chain;
7986 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7987 first = false, chain = TREE_CHAIN (chain))
7988 {
7989 if (!first)
7990 fprintf (file, ", ");
7991
7992 tree name = get_attribute_name (chain);
7993 print_generic_expr (file, name, dump_flags);
7994 if (TREE_VALUE (chain) != NULL_TREE)
7995 {
7996 fprintf (file, " (");
7997
7998 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
7999 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
8000 else
8001 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
8002 fprintf (file, ")");
8003 }
8004 }
8005
8006 fprintf (file, "))\n");
8007 }
8008
8009 current_function_decl = fndecl;
8010 if (flags & TDF_GIMPLE)
8011 {
8012 static bool hotness_bb_param_printed = false;
8013 if (profile_info != NULL
8014 && !hotness_bb_param_printed)
8015 {
8016 hotness_bb_param_printed = true;
8017 fprintf (file,
8018 "/* --param=gimple-fe-computed-hot-bb-threshold=%" PRId64
8019 " */\n", get_hot_bb_threshold ());
8020 }
8021
8022 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
8023 dump_flags | TDF_SLIM);
8024 fprintf (file, " __GIMPLE (%s",
8025 (fun->curr_properties & PROP_ssa) ? "ssa"
8026 : (fun->curr_properties & PROP_cfg) ? "cfg"
8027 : "");
8028
8029 if (cfun->cfg)
8030 {
8031 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8032 if (bb->count.initialized_p ())
8033 fprintf (file, ",%s(%d)",
8034 profile_quality_as_string (bb->count.quality ()),
8035 bb->count.value ());
8036 fprintf (file, ")\n%s (", function_name (fun));
8037 }
8038 }
8039 else
8040 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
8041
8042 arg = DECL_ARGUMENTS (fndecl);
8043 while (arg)
8044 {
8045 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
8046 fprintf (file, " ");
8047 print_generic_expr (file, arg, dump_flags);
8048 if (DECL_CHAIN (arg))
8049 fprintf (file, ", ");
8050 arg = DECL_CHAIN (arg);
8051 }
8052 fprintf (file, ")\n");
8053
8054 dsf = DECL_STRUCT_FUNCTION (fndecl);
8055 if (dsf && (flags & TDF_EH))
8056 dump_eh_tree (file, dsf);
8057
8058 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
8059 {
8060 dump_node (fndecl, TDF_SLIM | flags, file);
8061 current_function_decl = old_current_fndecl;
8062 return;
8063 }
8064
8065 /* When GIMPLE is lowered, the variables are no longer available in
8066 BIND_EXPRs, so display them separately. */
8067 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
8068 {
8069 unsigned ix;
8070 ignore_topmost_bind = true;
8071
8072 fprintf (file, "{\n");
8073 if (gimple_in_ssa_p (fun)
8074 && (flags & TDF_ALIAS))
8075 {
8076 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
8077 arg = DECL_CHAIN (arg))
8078 {
8079 tree def = ssa_default_def (fun, arg);
8080 if (def)
8081 dump_default_def (file, def, 2, flags);
8082 }
8083
8084 tree res = DECL_RESULT (fun->decl);
8085 if (res != NULL_TREE
8086 && DECL_BY_REFERENCE (res))
8087 {
8088 tree def = ssa_default_def (fun, res);
8089 if (def)
8090 dump_default_def (file, def, 2, flags);
8091 }
8092
8093 tree static_chain = fun->static_chain_decl;
8094 if (static_chain != NULL_TREE)
8095 {
8096 tree def = ssa_default_def (fun, static_chain);
8097 if (def)
8098 dump_default_def (file, def, 2, flags);
8099 }
8100 }
8101
8102 if (!vec_safe_is_empty (fun->local_decls))
8103 FOR_EACH_LOCAL_DECL (fun, ix, var)
8104 {
8105 print_generic_decl (file, var, flags);
8106 fprintf (file, "\n");
8107
8108 any_var = true;
8109 }
8110
8111 tree name;
8112
8113 if (gimple_in_ssa_p (cfun))
8114 FOR_EACH_SSA_NAME (ix, name, cfun)
8115 {
8116 if (!SSA_NAME_VAR (name))
8117 {
8118 fprintf (file, " ");
8119 print_generic_expr (file, TREE_TYPE (name), flags);
8120 fprintf (file, " ");
8121 print_generic_expr (file, name, flags);
8122 fprintf (file, ";\n");
8123
8124 any_var = true;
8125 }
8126 }
8127 }
8128
8129 if (fun && fun->decl == fndecl
8130 && fun->cfg
8131 && basic_block_info_for_fn (fun))
8132 {
8133 /* If the CFG has been built, emit a CFG-based dump. */
8134 if (!ignore_topmost_bind)
8135 fprintf (file, "{\n");
8136
8137 if (any_var && n_basic_blocks_for_fn (fun))
8138 fprintf (file, "\n");
8139
8140 FOR_EACH_BB_FN (bb, fun)
8141 dump_bb (file, bb, 2, flags);
8142
8143 fprintf (file, "}\n");
8144 }
8145 else if (fun->curr_properties & PROP_gimple_any)
8146 {
8147 /* The function is now in GIMPLE form but the CFG has not been
8148 built yet. Emit the single sequence of GIMPLE statements
8149 that make up its body. */
8150 gimple_seq body = gimple_body (fndecl);
8151
8152 if (gimple_seq_first_stmt (body)
8153 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
8154 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
8155 print_gimple_seq (file, body, 0, flags);
8156 else
8157 {
8158 if (!ignore_topmost_bind)
8159 fprintf (file, "{\n");
8160
8161 if (any_var)
8162 fprintf (file, "\n");
8163
8164 print_gimple_seq (file, body, 2, flags);
8165 fprintf (file, "}\n");
8166 }
8167 }
8168 else
8169 {
8170 int indent;
8171
8172 /* Make a tree based dump. */
8173 chain = DECL_SAVED_TREE (fndecl);
8174 if (chain && TREE_CODE (chain) == BIND_EXPR)
8175 {
8176 if (ignore_topmost_bind)
8177 {
8178 chain = BIND_EXPR_BODY (chain);
8179 indent = 2;
8180 }
8181 else
8182 indent = 0;
8183 }
8184 else
8185 {
8186 if (!ignore_topmost_bind)
8187 {
8188 fprintf (file, "{\n");
8189 /* No topmost bind, pretend it's ignored for later. */
8190 ignore_topmost_bind = true;
8191 }
8192 indent = 2;
8193 }
8194
8195 if (any_var)
8196 fprintf (file, "\n");
8197
8198 print_generic_stmt_indented (file, chain, flags, indent);
8199 if (ignore_topmost_bind)
8200 fprintf (file, "}\n");
8201 }
8202
8203 if (flags & TDF_ENUMERATE_LOCALS)
8204 dump_enumerated_decls (file, flags);
8205 fprintf (file, "\n\n");
8206
8207 current_function_decl = old_current_fndecl;
8208 }
8209
8210 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8211
8212 DEBUG_FUNCTION void
debug_function(tree fn,dump_flags_t flags)8213 debug_function (tree fn, dump_flags_t flags)
8214 {
8215 dump_function_to_file (fn, stderr, flags);
8216 }
8217
8218
8219 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8220
8221 static void
print_pred_bbs(FILE * file,basic_block bb)8222 print_pred_bbs (FILE *file, basic_block bb)
8223 {
8224 edge e;
8225 edge_iterator ei;
8226
8227 FOR_EACH_EDGE (e, ei, bb->preds)
8228 fprintf (file, "bb_%d ", e->src->index);
8229 }
8230
8231
8232 /* Print on FILE the indexes for the successors of basic_block BB. */
8233
8234 static void
print_succ_bbs(FILE * file,basic_block bb)8235 print_succ_bbs (FILE *file, basic_block bb)
8236 {
8237 edge e;
8238 edge_iterator ei;
8239
8240 FOR_EACH_EDGE (e, ei, bb->succs)
8241 fprintf (file, "bb_%d ", e->dest->index);
8242 }
8243
8244 /* Print to FILE the basic block BB following the VERBOSITY level. */
8245
8246 void
print_loops_bb(FILE * file,basic_block bb,int indent,int verbosity)8247 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8248 {
8249 char *s_indent = (char *) alloca ((size_t) indent + 1);
8250 memset ((void *) s_indent, ' ', (size_t) indent);
8251 s_indent[indent] = '\0';
8252
8253 /* Print basic_block's header. */
8254 if (verbosity >= 2)
8255 {
8256 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
8257 print_pred_bbs (file, bb);
8258 fprintf (file, "}, succs = {");
8259 print_succ_bbs (file, bb);
8260 fprintf (file, "})\n");
8261 }
8262
8263 /* Print basic_block's body. */
8264 if (verbosity >= 3)
8265 {
8266 fprintf (file, "%s {\n", s_indent);
8267 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8268 fprintf (file, "%s }\n", s_indent);
8269 }
8270 }
8271
8272 static void print_loop_and_siblings (FILE *, class loop *, int, int);
8273
8274 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8275 VERBOSITY level this outputs the contents of the loop, or just its
8276 structure. */
8277
8278 static void
print_loop(FILE * file,class loop * loop,int indent,int verbosity)8279 print_loop (FILE *file, class loop *loop, int indent, int verbosity)
8280 {
8281 char *s_indent;
8282 basic_block bb;
8283
8284 if (loop == NULL)
8285 return;
8286
8287 s_indent = (char *) alloca ((size_t) indent + 1);
8288 memset ((void *) s_indent, ' ', (size_t) indent);
8289 s_indent[indent] = '\0';
8290
8291 /* Print loop's header. */
8292 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8293 if (loop->header)
8294 fprintf (file, "header = %d", loop->header->index);
8295 else
8296 {
8297 fprintf (file, "deleted)\n");
8298 return;
8299 }
8300 if (loop->latch)
8301 fprintf (file, ", latch = %d", loop->latch->index);
8302 else
8303 fprintf (file, ", multiple latches");
8304 fprintf (file, ", niter = ");
8305 print_generic_expr (file, loop->nb_iterations);
8306
8307 if (loop->any_upper_bound)
8308 {
8309 fprintf (file, ", upper_bound = ");
8310 print_decu (loop->nb_iterations_upper_bound, file);
8311 }
8312 if (loop->any_likely_upper_bound)
8313 {
8314 fprintf (file, ", likely_upper_bound = ");
8315 print_decu (loop->nb_iterations_likely_upper_bound, file);
8316 }
8317
8318 if (loop->any_estimate)
8319 {
8320 fprintf (file, ", estimate = ");
8321 print_decu (loop->nb_iterations_estimate, file);
8322 }
8323 if (loop->unroll)
8324 fprintf (file, ", unroll = %d", loop->unroll);
8325 fprintf (file, ")\n");
8326
8327 /* Print loop's body. */
8328 if (verbosity >= 1)
8329 {
8330 fprintf (file, "%s{\n", s_indent);
8331 FOR_EACH_BB_FN (bb, cfun)
8332 if (bb->loop_father == loop)
8333 print_loops_bb (file, bb, indent, verbosity);
8334
8335 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8336 fprintf (file, "%s}\n", s_indent);
8337 }
8338 }
8339
8340 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8341 spaces. Following VERBOSITY level this outputs the contents of the
8342 loop, or just its structure. */
8343
8344 static void
print_loop_and_siblings(FILE * file,class loop * loop,int indent,int verbosity)8345 print_loop_and_siblings (FILE *file, class loop *loop, int indent,
8346 int verbosity)
8347 {
8348 if (loop == NULL)
8349 return;
8350
8351 print_loop (file, loop, indent, verbosity);
8352 print_loop_and_siblings (file, loop->next, indent, verbosity);
8353 }
8354
8355 /* Follow a CFG edge from the entry point of the program, and on entry
8356 of a loop, pretty print the loop structure on FILE. */
8357
8358 void
print_loops(FILE * file,int verbosity)8359 print_loops (FILE *file, int verbosity)
8360 {
8361 basic_block bb;
8362
8363 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8364 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8365 if (bb && bb->loop_father)
8366 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8367 }
8368
8369 /* Dump a loop. */
8370
8371 DEBUG_FUNCTION void
debug(class loop & ref)8372 debug (class loop &ref)
8373 {
8374 print_loop (stderr, &ref, 0, /*verbosity*/0);
8375 }
8376
8377 DEBUG_FUNCTION void
debug(class loop * ptr)8378 debug (class loop *ptr)
8379 {
8380 if (ptr)
8381 debug (*ptr);
8382 else
8383 fprintf (stderr, "<nil>\n");
8384 }
8385
8386 /* Dump a loop verbosely. */
8387
8388 DEBUG_FUNCTION void
debug_verbose(class loop & ref)8389 debug_verbose (class loop &ref)
8390 {
8391 print_loop (stderr, &ref, 0, /*verbosity*/3);
8392 }
8393
8394 DEBUG_FUNCTION void
debug_verbose(class loop * ptr)8395 debug_verbose (class loop *ptr)
8396 {
8397 if (ptr)
8398 debug (*ptr);
8399 else
8400 fprintf (stderr, "<nil>\n");
8401 }
8402
8403
8404 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8405
8406 DEBUG_FUNCTION void
debug_loops(int verbosity)8407 debug_loops (int verbosity)
8408 {
8409 print_loops (stderr, verbosity);
8410 }
8411
8412 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8413
8414 DEBUG_FUNCTION void
debug_loop(class loop * loop,int verbosity)8415 debug_loop (class loop *loop, int verbosity)
8416 {
8417 print_loop (stderr, loop, 0, verbosity);
8418 }
8419
8420 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8421 level. */
8422
8423 DEBUG_FUNCTION void
debug_loop_num(unsigned num,int verbosity)8424 debug_loop_num (unsigned num, int verbosity)
8425 {
8426 debug_loop (get_loop (cfun, num), verbosity);
8427 }
8428
8429 /* Return true if BB ends with a call, possibly followed by some
8430 instructions that must stay with the call. Return false,
8431 otherwise. */
8432
8433 static bool
gimple_block_ends_with_call_p(basic_block bb)8434 gimple_block_ends_with_call_p (basic_block bb)
8435 {
8436 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8437 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8438 }
8439
8440
8441 /* Return true if BB ends with a conditional branch. Return false,
8442 otherwise. */
8443
8444 static bool
gimple_block_ends_with_condjump_p(const_basic_block bb)8445 gimple_block_ends_with_condjump_p (const_basic_block bb)
8446 {
8447 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8448 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8449 }
8450
8451
8452 /* Return true if statement T may terminate execution of BB in ways not
8453 explicitly represtented in the CFG. */
8454
8455 bool
stmt_can_terminate_bb_p(gimple * t)8456 stmt_can_terminate_bb_p (gimple *t)
8457 {
8458 tree fndecl = NULL_TREE;
8459 int call_flags = 0;
8460
8461 /* Eh exception not handled internally terminates execution of the whole
8462 function. */
8463 if (stmt_can_throw_external (cfun, t))
8464 return true;
8465
8466 /* NORETURN and LONGJMP calls already have an edge to exit.
8467 CONST and PURE calls do not need one.
8468 We don't currently check for CONST and PURE here, although
8469 it would be a good idea, because those attributes are
8470 figured out from the RTL in mark_constant_function, and
8471 the counter incrementation code from -fprofile-arcs
8472 leads to different results from -fbranch-probabilities. */
8473 if (is_gimple_call (t))
8474 {
8475 fndecl = gimple_call_fndecl (t);
8476 call_flags = gimple_call_flags (t);
8477 }
8478
8479 if (is_gimple_call (t)
8480 && fndecl
8481 && fndecl_built_in_p (fndecl)
8482 && (call_flags & ECF_NOTHROW)
8483 && !(call_flags & ECF_RETURNS_TWICE)
8484 /* fork() doesn't really return twice, but the effect of
8485 wrapping it in __gcov_fork() which calls __gcov_flush()
8486 and clears the counters before forking has the same
8487 effect as returning twice. Force a fake edge. */
8488 && !fndecl_built_in_p (fndecl, BUILT_IN_FORK))
8489 return false;
8490
8491 if (is_gimple_call (t))
8492 {
8493 edge_iterator ei;
8494 edge e;
8495 basic_block bb;
8496
8497 if (call_flags & (ECF_PURE | ECF_CONST)
8498 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8499 return false;
8500
8501 /* Function call may do longjmp, terminate program or do other things.
8502 Special case noreturn that have non-abnormal edges out as in this case
8503 the fact is sufficiently represented by lack of edges out of T. */
8504 if (!(call_flags & ECF_NORETURN))
8505 return true;
8506
8507 bb = gimple_bb (t);
8508 FOR_EACH_EDGE (e, ei, bb->succs)
8509 if ((e->flags & EDGE_FAKE) == 0)
8510 return true;
8511 }
8512
8513 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8514 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8515 return true;
8516
8517 return false;
8518 }
8519
8520
8521 /* Add fake edges to the function exit for any non constant and non
8522 noreturn calls (or noreturn calls with EH/abnormal edges),
8523 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8524 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8525 that were split.
8526
8527 The goal is to expose cases in which entering a basic block does
8528 not imply that all subsequent instructions must be executed. */
8529
8530 static int
gimple_flow_call_edges_add(sbitmap blocks)8531 gimple_flow_call_edges_add (sbitmap blocks)
8532 {
8533 int i;
8534 int blocks_split = 0;
8535 int last_bb = last_basic_block_for_fn (cfun);
8536 bool check_last_block = false;
8537
8538 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8539 return 0;
8540
8541 if (! blocks)
8542 check_last_block = true;
8543 else
8544 check_last_block = bitmap_bit_p (blocks,
8545 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8546
8547 /* In the last basic block, before epilogue generation, there will be
8548 a fallthru edge to EXIT. Special care is required if the last insn
8549 of the last basic block is a call because make_edge folds duplicate
8550 edges, which would result in the fallthru edge also being marked
8551 fake, which would result in the fallthru edge being removed by
8552 remove_fake_edges, which would result in an invalid CFG.
8553
8554 Moreover, we can't elide the outgoing fake edge, since the block
8555 profiler needs to take this into account in order to solve the minimal
8556 spanning tree in the case that the call doesn't return.
8557
8558 Handle this by adding a dummy instruction in a new last basic block. */
8559 if (check_last_block)
8560 {
8561 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8562 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8563 gimple *t = NULL;
8564
8565 if (!gsi_end_p (gsi))
8566 t = gsi_stmt (gsi);
8567
8568 if (t && stmt_can_terminate_bb_p (t))
8569 {
8570 edge e;
8571
8572 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8573 if (e)
8574 {
8575 gsi_insert_on_edge (e, gimple_build_nop ());
8576 gsi_commit_edge_inserts ();
8577 }
8578 }
8579 }
8580
8581 /* Now add fake edges to the function exit for any non constant
8582 calls since there is no way that we can determine if they will
8583 return or not... */
8584 for (i = 0; i < last_bb; i++)
8585 {
8586 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8587 gimple_stmt_iterator gsi;
8588 gimple *stmt, *last_stmt;
8589
8590 if (!bb)
8591 continue;
8592
8593 if (blocks && !bitmap_bit_p (blocks, i))
8594 continue;
8595
8596 gsi = gsi_last_nondebug_bb (bb);
8597 if (!gsi_end_p (gsi))
8598 {
8599 last_stmt = gsi_stmt (gsi);
8600 do
8601 {
8602 stmt = gsi_stmt (gsi);
8603 if (stmt_can_terminate_bb_p (stmt))
8604 {
8605 edge e;
8606
8607 /* The handling above of the final block before the
8608 epilogue should be enough to verify that there is
8609 no edge to the exit block in CFG already.
8610 Calling make_edge in such case would cause us to
8611 mark that edge as fake and remove it later. */
8612 if (flag_checking && stmt == last_stmt)
8613 {
8614 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8615 gcc_assert (e == NULL);
8616 }
8617
8618 /* Note that the following may create a new basic block
8619 and renumber the existing basic blocks. */
8620 if (stmt != last_stmt)
8621 {
8622 e = split_block (bb, stmt);
8623 if (e)
8624 blocks_split++;
8625 }
8626 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8627 e->probability = profile_probability::guessed_never ();
8628 }
8629 gsi_prev (&gsi);
8630 }
8631 while (!gsi_end_p (gsi));
8632 }
8633 }
8634
8635 if (blocks_split)
8636 checking_verify_flow_info ();
8637
8638 return blocks_split;
8639 }
8640
8641 /* Removes edge E and all the blocks dominated by it, and updates dominance
8642 information. The IL in E->src needs to be updated separately.
8643 If dominance info is not available, only the edge E is removed.*/
8644
8645 void
remove_edge_and_dominated_blocks(edge e)8646 remove_edge_and_dominated_blocks (edge e)
8647 {
8648 vec<basic_block> bbs_to_remove = vNULL;
8649 vec<basic_block> bbs_to_fix_dom = vNULL;
8650 edge f;
8651 edge_iterator ei;
8652 bool none_removed = false;
8653 unsigned i;
8654 basic_block bb, dbb;
8655 bitmap_iterator bi;
8656
8657 /* If we are removing a path inside a non-root loop that may change
8658 loop ownership of blocks or remove loops. Mark loops for fixup. */
8659 if (current_loops
8660 && loop_outer (e->src->loop_father) != NULL
8661 && e->src->loop_father == e->dest->loop_father)
8662 loops_state_set (LOOPS_NEED_FIXUP);
8663
8664 if (!dom_info_available_p (CDI_DOMINATORS))
8665 {
8666 remove_edge (e);
8667 return;
8668 }
8669
8670 /* No updating is needed for edges to exit. */
8671 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8672 {
8673 if (cfgcleanup_altered_bbs)
8674 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8675 remove_edge (e);
8676 return;
8677 }
8678
8679 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8680 that is not dominated by E->dest, then this set is empty. Otherwise,
8681 all the basic blocks dominated by E->dest are removed.
8682
8683 Also, to DF_IDOM we store the immediate dominators of the blocks in
8684 the dominance frontier of E (i.e., of the successors of the
8685 removed blocks, if there are any, and of E->dest otherwise). */
8686 FOR_EACH_EDGE (f, ei, e->dest->preds)
8687 {
8688 if (f == e)
8689 continue;
8690
8691 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8692 {
8693 none_removed = true;
8694 break;
8695 }
8696 }
8697
8698 auto_bitmap df, df_idom;
8699 if (none_removed)
8700 bitmap_set_bit (df_idom,
8701 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8702 else
8703 {
8704 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8705 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8706 {
8707 FOR_EACH_EDGE (f, ei, bb->succs)
8708 {
8709 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8710 bitmap_set_bit (df, f->dest->index);
8711 }
8712 }
8713 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8714 bitmap_clear_bit (df, bb->index);
8715
8716 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8717 {
8718 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8719 bitmap_set_bit (df_idom,
8720 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8721 }
8722 }
8723
8724 if (cfgcleanup_altered_bbs)
8725 {
8726 /* Record the set of the altered basic blocks. */
8727 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8728 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8729 }
8730
8731 /* Remove E and the cancelled blocks. */
8732 if (none_removed)
8733 remove_edge (e);
8734 else
8735 {
8736 /* Walk backwards so as to get a chance to substitute all
8737 released DEFs into debug stmts. See
8738 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8739 details. */
8740 for (i = bbs_to_remove.length (); i-- > 0; )
8741 delete_basic_block (bbs_to_remove[i]);
8742 }
8743
8744 /* Update the dominance information. The immediate dominator may change only
8745 for blocks whose immediate dominator belongs to DF_IDOM:
8746
8747 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8748 removal. Let Z the arbitrary block such that idom(Z) = Y and
8749 Z dominates X after the removal. Before removal, there exists a path P
8750 from Y to X that avoids Z. Let F be the last edge on P that is
8751 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8752 dominates W, and because of P, Z does not dominate W), and W belongs to
8753 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8754 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8755 {
8756 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8757 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8758 dbb;
8759 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8760 bbs_to_fix_dom.safe_push (dbb);
8761 }
8762
8763 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8764
8765 bbs_to_remove.release ();
8766 bbs_to_fix_dom.release ();
8767 }
8768
8769 /* Purge dead EH edges from basic block BB. */
8770
8771 bool
gimple_purge_dead_eh_edges(basic_block bb)8772 gimple_purge_dead_eh_edges (basic_block bb)
8773 {
8774 bool changed = false;
8775 edge e;
8776 edge_iterator ei;
8777 gimple *stmt = last_stmt (bb);
8778
8779 if (stmt && stmt_can_throw_internal (cfun, stmt))
8780 return false;
8781
8782 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8783 {
8784 if (e->flags & EDGE_EH)
8785 {
8786 remove_edge_and_dominated_blocks (e);
8787 changed = true;
8788 }
8789 else
8790 ei_next (&ei);
8791 }
8792
8793 return changed;
8794 }
8795
8796 /* Purge dead EH edges from basic block listed in BLOCKS. */
8797
8798 bool
gimple_purge_all_dead_eh_edges(const_bitmap blocks)8799 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8800 {
8801 bool changed = false;
8802 unsigned i;
8803 bitmap_iterator bi;
8804
8805 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8806 {
8807 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8808
8809 /* Earlier gimple_purge_dead_eh_edges could have removed
8810 this basic block already. */
8811 gcc_assert (bb || changed);
8812 if (bb != NULL)
8813 changed |= gimple_purge_dead_eh_edges (bb);
8814 }
8815
8816 return changed;
8817 }
8818
8819 /* Purge dead abnormal call edges from basic block BB. */
8820
8821 bool
gimple_purge_dead_abnormal_call_edges(basic_block bb)8822 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8823 {
8824 bool changed = false;
8825 edge e;
8826 edge_iterator ei;
8827 gimple *stmt = last_stmt (bb);
8828
8829 if (!cfun->has_nonlocal_label
8830 && !cfun->calls_setjmp)
8831 return false;
8832
8833 if (stmt && stmt_can_make_abnormal_goto (stmt))
8834 return false;
8835
8836 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8837 {
8838 if (e->flags & EDGE_ABNORMAL)
8839 {
8840 if (e->flags & EDGE_FALLTHRU)
8841 e->flags &= ~EDGE_ABNORMAL;
8842 else
8843 remove_edge_and_dominated_blocks (e);
8844 changed = true;
8845 }
8846 else
8847 ei_next (&ei);
8848 }
8849
8850 return changed;
8851 }
8852
8853 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8854
8855 bool
gimple_purge_all_dead_abnormal_call_edges(const_bitmap blocks)8856 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8857 {
8858 bool changed = false;
8859 unsigned i;
8860 bitmap_iterator bi;
8861
8862 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8863 {
8864 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8865
8866 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8867 this basic block already. */
8868 gcc_assert (bb || changed);
8869 if (bb != NULL)
8870 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8871 }
8872
8873 return changed;
8874 }
8875
8876 /* This function is called whenever a new edge is created or
8877 redirected. */
8878
8879 static void
gimple_execute_on_growing_pred(edge e)8880 gimple_execute_on_growing_pred (edge e)
8881 {
8882 basic_block bb = e->dest;
8883
8884 if (!gimple_seq_empty_p (phi_nodes (bb)))
8885 reserve_phi_args_for_new_edge (bb);
8886 }
8887
8888 /* This function is called immediately before edge E is removed from
8889 the edge vector E->dest->preds. */
8890
8891 static void
gimple_execute_on_shrinking_pred(edge e)8892 gimple_execute_on_shrinking_pred (edge e)
8893 {
8894 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8895 remove_phi_args (e);
8896 }
8897
8898 /*---------------------------------------------------------------------------
8899 Helper functions for Loop versioning
8900 ---------------------------------------------------------------------------*/
8901
8902 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8903 of 'first'. Both of them are dominated by 'new_head' basic block. When
8904 'new_head' was created by 'second's incoming edge it received phi arguments
8905 on the edge by split_edge(). Later, additional edge 'e' was created to
8906 connect 'new_head' and 'first'. Now this routine adds phi args on this
8907 additional edge 'e' that new_head to second edge received as part of edge
8908 splitting. */
8909
8910 static void
gimple_lv_adjust_loop_header_phi(basic_block first,basic_block second,basic_block new_head,edge e)8911 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8912 basic_block new_head, edge e)
8913 {
8914 gphi *phi1, *phi2;
8915 gphi_iterator psi1, psi2;
8916 tree def;
8917 edge e2 = find_edge (new_head, second);
8918
8919 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8920 edge, we should always have an edge from NEW_HEAD to SECOND. */
8921 gcc_assert (e2 != NULL);
8922
8923 /* Browse all 'second' basic block phi nodes and add phi args to
8924 edge 'e' for 'first' head. PHI args are always in correct order. */
8925
8926 for (psi2 = gsi_start_phis (second),
8927 psi1 = gsi_start_phis (first);
8928 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8929 gsi_next (&psi2), gsi_next (&psi1))
8930 {
8931 phi1 = psi1.phi ();
8932 phi2 = psi2.phi ();
8933 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8934 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8935 }
8936 }
8937
8938
8939 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8940 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8941 the destination of the ELSE part. */
8942
8943 static void
gimple_lv_add_condition_to_bb(basic_block first_head ATTRIBUTE_UNUSED,basic_block second_head ATTRIBUTE_UNUSED,basic_block cond_bb,void * cond_e)8944 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8945 basic_block second_head ATTRIBUTE_UNUSED,
8946 basic_block cond_bb, void *cond_e)
8947 {
8948 gimple_stmt_iterator gsi;
8949 gimple *new_cond_expr;
8950 tree cond_expr = (tree) cond_e;
8951 edge e0;
8952
8953 /* Build new conditional expr */
8954 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8955 NULL_TREE, NULL_TREE);
8956
8957 /* Add new cond in cond_bb. */
8958 gsi = gsi_last_bb (cond_bb);
8959 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8960
8961 /* Adjust edges appropriately to connect new head with first head
8962 as well as second head. */
8963 e0 = single_succ_edge (cond_bb);
8964 e0->flags &= ~EDGE_FALLTHRU;
8965 e0->flags |= EDGE_FALSE_VALUE;
8966 }
8967
8968
8969 /* Do book-keeping of basic block BB for the profile consistency checker.
8970 Store the counting in RECORD. */
8971 static void
gimple_account_profile_record(basic_block bb,struct profile_record * record)8972 gimple_account_profile_record (basic_block bb,
8973 struct profile_record *record)
8974 {
8975 gimple_stmt_iterator i;
8976 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8977 {
8978 record->size
8979 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8980 if (bb->count.initialized_p ())
8981 record->time
8982 += estimate_num_insns (gsi_stmt (i),
8983 &eni_time_weights) * bb->count.to_gcov_type ();
8984 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8985 record->time
8986 += estimate_num_insns (gsi_stmt (i),
8987 &eni_time_weights) * bb->count.to_frequency (cfun);
8988 }
8989 }
8990
8991 struct cfg_hooks gimple_cfg_hooks = {
8992 "gimple",
8993 gimple_verify_flow_info,
8994 gimple_dump_bb, /* dump_bb */
8995 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8996 create_bb, /* create_basic_block */
8997 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8998 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8999 gimple_can_remove_branch_p, /* can_remove_branch_p */
9000 remove_bb, /* delete_basic_block */
9001 gimple_split_block, /* split_block */
9002 gimple_move_block_after, /* move_block_after */
9003 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
9004 gimple_merge_blocks, /* merge_blocks */
9005 gimple_predict_edge, /* predict_edge */
9006 gimple_predicted_by_p, /* predicted_by_p */
9007 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
9008 gimple_duplicate_bb, /* duplicate_block */
9009 gimple_split_edge, /* split_edge */
9010 gimple_make_forwarder_block, /* make_forward_block */
9011 NULL, /* tidy_fallthru_edge */
9012 NULL, /* force_nonfallthru */
9013 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
9014 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
9015 gimple_flow_call_edges_add, /* flow_call_edges_add */
9016 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
9017 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
9018 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
9019 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
9020 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
9021 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
9022 flush_pending_stmts, /* flush_pending_stmts */
9023 gimple_empty_block_p, /* block_empty_p */
9024 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
9025 gimple_account_profile_record,
9026 };
9027
9028
9029 /* Split all critical edges. Split some extra (not necessarily critical) edges
9030 if FOR_EDGE_INSERTION_P is true. */
9031
9032 unsigned int
split_critical_edges(bool for_edge_insertion_p)9033 split_critical_edges (bool for_edge_insertion_p /* = false */)
9034 {
9035 basic_block bb;
9036 edge e;
9037 edge_iterator ei;
9038
9039 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
9040 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
9041 mappings around the calls to split_edge. */
9042 start_recording_case_labels ();
9043 FOR_ALL_BB_FN (bb, cfun)
9044 {
9045 FOR_EACH_EDGE (e, ei, bb->succs)
9046 {
9047 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
9048 split_edge (e);
9049 /* PRE inserts statements to edges and expects that
9050 since split_critical_edges was done beforehand, committing edge
9051 insertions will not split more edges. In addition to critical
9052 edges we must split edges that have multiple successors and
9053 end by control flow statements, such as RESX.
9054 Go ahead and split them too. This matches the logic in
9055 gimple_find_edge_insert_loc. */
9056 else if (for_edge_insertion_p
9057 && (!single_pred_p (e->dest)
9058 || !gimple_seq_empty_p (phi_nodes (e->dest))
9059 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
9060 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
9061 && !(e->flags & EDGE_ABNORMAL))
9062 {
9063 gimple_stmt_iterator gsi;
9064
9065 gsi = gsi_last_bb (e->src);
9066 if (!gsi_end_p (gsi)
9067 && stmt_ends_bb_p (gsi_stmt (gsi))
9068 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
9069 && !gimple_call_builtin_p (gsi_stmt (gsi),
9070 BUILT_IN_RETURN)))
9071 split_edge (e);
9072 }
9073 }
9074 }
9075 end_recording_case_labels ();
9076 return 0;
9077 }
9078
9079 namespace {
9080
9081 const pass_data pass_data_split_crit_edges =
9082 {
9083 GIMPLE_PASS, /* type */
9084 "crited", /* name */
9085 OPTGROUP_NONE, /* optinfo_flags */
9086 TV_TREE_SPLIT_EDGES, /* tv_id */
9087 PROP_cfg, /* properties_required */
9088 PROP_no_crit_edges, /* properties_provided */
9089 0, /* properties_destroyed */
9090 0, /* todo_flags_start */
9091 0, /* todo_flags_finish */
9092 };
9093
9094 class pass_split_crit_edges : public gimple_opt_pass
9095 {
9096 public:
pass_split_crit_edges(gcc::context * ctxt)9097 pass_split_crit_edges (gcc::context *ctxt)
9098 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
9099 {}
9100
9101 /* opt_pass methods: */
execute(function *)9102 virtual unsigned int execute (function *) { return split_critical_edges (); }
9103
clone()9104 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
9105 }; // class pass_split_crit_edges
9106
9107 } // anon namespace
9108
9109 gimple_opt_pass *
make_pass_split_crit_edges(gcc::context * ctxt)9110 make_pass_split_crit_edges (gcc::context *ctxt)
9111 {
9112 return new pass_split_crit_edges (ctxt);
9113 }
9114
9115
9116 /* Insert COND expression which is GIMPLE_COND after STMT
9117 in basic block BB with appropriate basic block split
9118 and creation of a new conditionally executed basic block.
9119 Update profile so the new bb is visited with probability PROB.
9120 Return created basic block. */
9121 basic_block
insert_cond_bb(basic_block bb,gimple * stmt,gimple * cond,profile_probability prob)9122 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
9123 profile_probability prob)
9124 {
9125 edge fall = split_block (bb, stmt);
9126 gimple_stmt_iterator iter = gsi_last_bb (bb);
9127 basic_block new_bb;
9128
9129 /* Insert cond statement. */
9130 gcc_assert (gimple_code (cond) == GIMPLE_COND);
9131 if (gsi_end_p (iter))
9132 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
9133 else
9134 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
9135
9136 /* Create conditionally executed block. */
9137 new_bb = create_empty_bb (bb);
9138 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
9139 e->probability = prob;
9140 new_bb->count = e->count ();
9141 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
9142
9143 /* Fix edge for split bb. */
9144 fall->flags = EDGE_FALSE_VALUE;
9145 fall->probability -= e->probability;
9146
9147 /* Update dominance info. */
9148 if (dom_info_available_p (CDI_DOMINATORS))
9149 {
9150 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
9151 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
9152 }
9153
9154 /* Update loop info. */
9155 if (current_loops)
9156 add_bb_to_loop (new_bb, bb->loop_father);
9157
9158 return new_bb;
9159 }
9160
9161 /* Build a ternary operation and gimplify it. Emit code before GSI.
9162 Return the gimple_val holding the result. */
9163
9164 tree
gimplify_build3(gimple_stmt_iterator * gsi,enum tree_code code,tree type,tree a,tree b,tree c)9165 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
9166 tree type, tree a, tree b, tree c)
9167 {
9168 tree ret;
9169 location_t loc = gimple_location (gsi_stmt (*gsi));
9170
9171 ret = fold_build3_loc (loc, code, type, a, b, c);
9172 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9173 GSI_SAME_STMT);
9174 }
9175
9176 /* Build a binary operation and gimplify it. Emit code before GSI.
9177 Return the gimple_val holding the result. */
9178
9179 tree
gimplify_build2(gimple_stmt_iterator * gsi,enum tree_code code,tree type,tree a,tree b)9180 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
9181 tree type, tree a, tree b)
9182 {
9183 tree ret;
9184
9185 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
9186 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9187 GSI_SAME_STMT);
9188 }
9189
9190 /* Build a unary operation and gimplify it. Emit code before GSI.
9191 Return the gimple_val holding the result. */
9192
9193 tree
gimplify_build1(gimple_stmt_iterator * gsi,enum tree_code code,tree type,tree a)9194 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
9195 tree a)
9196 {
9197 tree ret;
9198
9199 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
9200 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
9201 GSI_SAME_STMT);
9202 }
9203
9204
9205
9206 /* Given a basic block B which ends with a conditional and has
9207 precisely two successors, determine which of the edges is taken if
9208 the conditional is true and which is taken if the conditional is
9209 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9210
9211 void
extract_true_false_edges_from_block(basic_block b,edge * true_edge,edge * false_edge)9212 extract_true_false_edges_from_block (basic_block b,
9213 edge *true_edge,
9214 edge *false_edge)
9215 {
9216 edge e = EDGE_SUCC (b, 0);
9217
9218 if (e->flags & EDGE_TRUE_VALUE)
9219 {
9220 *true_edge = e;
9221 *false_edge = EDGE_SUCC (b, 1);
9222 }
9223 else
9224 {
9225 *false_edge = e;
9226 *true_edge = EDGE_SUCC (b, 1);
9227 }
9228 }
9229
9230
9231 /* From a controlling predicate in the immediate dominator DOM of
9232 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9233 predicate evaluates to true and false and store them to
9234 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9235 they are non-NULL. Returns true if the edges can be determined,
9236 else return false. */
9237
9238 bool
extract_true_false_controlled_edges(basic_block dom,basic_block phiblock,edge * true_controlled_edge,edge * false_controlled_edge)9239 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9240 edge *true_controlled_edge,
9241 edge *false_controlled_edge)
9242 {
9243 basic_block bb = phiblock;
9244 edge true_edge, false_edge, tem;
9245 edge e0 = NULL, e1 = NULL;
9246
9247 /* We have to verify that one edge into the PHI node is dominated
9248 by the true edge of the predicate block and the other edge
9249 dominated by the false edge. This ensures that the PHI argument
9250 we are going to take is completely determined by the path we
9251 take from the predicate block.
9252 We can only use BB dominance checks below if the destination of
9253 the true/false edges are dominated by their edge, thus only
9254 have a single predecessor. */
9255 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9256 tem = EDGE_PRED (bb, 0);
9257 if (tem == true_edge
9258 || (single_pred_p (true_edge->dest)
9259 && (tem->src == true_edge->dest
9260 || dominated_by_p (CDI_DOMINATORS,
9261 tem->src, true_edge->dest))))
9262 e0 = tem;
9263 else if (tem == false_edge
9264 || (single_pred_p (false_edge->dest)
9265 && (tem->src == false_edge->dest
9266 || dominated_by_p (CDI_DOMINATORS,
9267 tem->src, false_edge->dest))))
9268 e1 = tem;
9269 else
9270 return false;
9271 tem = EDGE_PRED (bb, 1);
9272 if (tem == true_edge
9273 || (single_pred_p (true_edge->dest)
9274 && (tem->src == true_edge->dest
9275 || dominated_by_p (CDI_DOMINATORS,
9276 tem->src, true_edge->dest))))
9277 e0 = tem;
9278 else if (tem == false_edge
9279 || (single_pred_p (false_edge->dest)
9280 && (tem->src == false_edge->dest
9281 || dominated_by_p (CDI_DOMINATORS,
9282 tem->src, false_edge->dest))))
9283 e1 = tem;
9284 else
9285 return false;
9286 if (!e0 || !e1)
9287 return false;
9288
9289 if (true_controlled_edge)
9290 *true_controlled_edge = e0;
9291 if (false_controlled_edge)
9292 *false_controlled_edge = e1;
9293
9294 return true;
9295 }
9296
9297 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9298 range [low, high]. Place associated stmts before *GSI. */
9299
9300 void
generate_range_test(basic_block bb,tree index,tree low,tree high,tree * lhs,tree * rhs)9301 generate_range_test (basic_block bb, tree index, tree low, tree high,
9302 tree *lhs, tree *rhs)
9303 {
9304 tree type = TREE_TYPE (index);
9305 tree utype = range_check_type (type);
9306
9307 low = fold_convert (utype, low);
9308 high = fold_convert (utype, high);
9309
9310 gimple_seq seq = NULL;
9311 index = gimple_convert (&seq, utype, index);
9312 *lhs = gimple_build (&seq, MINUS_EXPR, utype, index, low);
9313 *rhs = const_binop (MINUS_EXPR, utype, high, low);
9314
9315 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9316 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
9317 }
9318
9319 /* Return the basic block that belongs to label numbered INDEX
9320 of a switch statement. */
9321
9322 basic_block
gimple_switch_label_bb(function * ifun,gswitch * gs,unsigned index)9323 gimple_switch_label_bb (function *ifun, gswitch *gs, unsigned index)
9324 {
9325 return label_to_block (ifun, CASE_LABEL (gimple_switch_label (gs, index)));
9326 }
9327
9328 /* Return the default basic block of a switch statement. */
9329
9330 basic_block
gimple_switch_default_bb(function * ifun,gswitch * gs)9331 gimple_switch_default_bb (function *ifun, gswitch *gs)
9332 {
9333 return gimple_switch_label_bb (ifun, gs, 0);
9334 }
9335
9336 /* Return the edge that belongs to label numbered INDEX
9337 of a switch statement. */
9338
9339 edge
gimple_switch_edge(function * ifun,gswitch * gs,unsigned index)9340 gimple_switch_edge (function *ifun, gswitch *gs, unsigned index)
9341 {
9342 return find_edge (gimple_bb (gs), gimple_switch_label_bb (ifun, gs, index));
9343 }
9344
9345 /* Return the default edge of a switch statement. */
9346
9347 edge
gimple_switch_default_edge(function * ifun,gswitch * gs)9348 gimple_switch_default_edge (function *ifun, gswitch *gs)
9349 {
9350 return gimple_switch_edge (ifun, gs, 0);
9351 }
9352
9353
9354 /* Emit return warnings. */
9355
9356 namespace {
9357
9358 const pass_data pass_data_warn_function_return =
9359 {
9360 GIMPLE_PASS, /* type */
9361 "*warn_function_return", /* name */
9362 OPTGROUP_NONE, /* optinfo_flags */
9363 TV_NONE, /* tv_id */
9364 PROP_cfg, /* properties_required */
9365 0, /* properties_provided */
9366 0, /* properties_destroyed */
9367 0, /* todo_flags_start */
9368 0, /* todo_flags_finish */
9369 };
9370
9371 class pass_warn_function_return : public gimple_opt_pass
9372 {
9373 public:
pass_warn_function_return(gcc::context * ctxt)9374 pass_warn_function_return (gcc::context *ctxt)
9375 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9376 {}
9377
9378 /* opt_pass methods: */
9379 virtual unsigned int execute (function *);
9380
9381 }; // class pass_warn_function_return
9382
9383 unsigned int
execute(function * fun)9384 pass_warn_function_return::execute (function *fun)
9385 {
9386 location_t location;
9387 gimple *last;
9388 edge e;
9389 edge_iterator ei;
9390
9391 if (!targetm.warn_func_return (fun->decl))
9392 return 0;
9393
9394 /* If we have a path to EXIT, then we do return. */
9395 if (TREE_THIS_VOLATILE (fun->decl)
9396 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9397 {
9398 location = UNKNOWN_LOCATION;
9399 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9400 (e = ei_safe_edge (ei)); )
9401 {
9402 last = last_stmt (e->src);
9403 if ((gimple_code (last) == GIMPLE_RETURN
9404 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9405 && location == UNKNOWN_LOCATION
9406 && ((location = LOCATION_LOCUS (gimple_location (last)))
9407 != UNKNOWN_LOCATION)
9408 && !optimize)
9409 break;
9410 /* When optimizing, replace return stmts in noreturn functions
9411 with __builtin_unreachable () call. */
9412 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9413 {
9414 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9415 gimple *new_stmt = gimple_build_call (fndecl, 0);
9416 gimple_set_location (new_stmt, gimple_location (last));
9417 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9418 gsi_replace (&gsi, new_stmt, true);
9419 remove_edge (e);
9420 }
9421 else
9422 ei_next (&ei);
9423 }
9424 if (location == UNKNOWN_LOCATION)
9425 location = cfun->function_end_locus;
9426
9427 #ifdef notyet
9428 if (warn_missing_noreturn)
9429 warning_at (location, 0, "%<noreturn%> function does return");
9430 #endif
9431 }
9432
9433 /* If we see "return;" in some basic block, then we do reach the end
9434 without returning a value. */
9435 else if (warn_return_type > 0
9436 && !TREE_NO_WARNING (fun->decl)
9437 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9438 {
9439 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9440 {
9441 gimple *last = last_stmt (e->src);
9442 greturn *return_stmt = dyn_cast <greturn *> (last);
9443 if (return_stmt
9444 && gimple_return_retval (return_stmt) == NULL
9445 && !gimple_no_warning_p (last))
9446 {
9447 location = gimple_location (last);
9448 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9449 location = fun->function_end_locus;
9450 if (warning_at (location, OPT_Wreturn_type,
9451 "control reaches end of non-void function"))
9452 TREE_NO_WARNING (fun->decl) = 1;
9453 break;
9454 }
9455 }
9456 /* The C++ FE turns fallthrough from the end of non-void function
9457 into __builtin_unreachable () call with BUILTINS_LOCATION.
9458 Recognize those too. */
9459 basic_block bb;
9460 if (!TREE_NO_WARNING (fun->decl))
9461 FOR_EACH_BB_FN (bb, fun)
9462 if (EDGE_COUNT (bb->succs) == 0)
9463 {
9464 gimple *last = last_stmt (bb);
9465 const enum built_in_function ubsan_missing_ret
9466 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9467 if (last
9468 && ((LOCATION_LOCUS (gimple_location (last))
9469 == BUILTINS_LOCATION
9470 && gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE))
9471 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9472 {
9473 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9474 gsi_prev_nondebug (&gsi);
9475 gimple *prev = gsi_stmt (gsi);
9476 if (prev == NULL)
9477 location = UNKNOWN_LOCATION;
9478 else
9479 location = gimple_location (prev);
9480 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9481 location = fun->function_end_locus;
9482 if (warning_at (location, OPT_Wreturn_type,
9483 "control reaches end of non-void function"))
9484 TREE_NO_WARNING (fun->decl) = 1;
9485 break;
9486 }
9487 }
9488 }
9489 return 0;
9490 }
9491
9492 } // anon namespace
9493
9494 gimple_opt_pass *
make_pass_warn_function_return(gcc::context * ctxt)9495 make_pass_warn_function_return (gcc::context *ctxt)
9496 {
9497 return new pass_warn_function_return (ctxt);
9498 }
9499
9500 /* Walk a gimplified function and warn for functions whose return value is
9501 ignored and attribute((warn_unused_result)) is set. This is done before
9502 inlining, so we don't have to worry about that. */
9503
9504 static void
do_warn_unused_result(gimple_seq seq)9505 do_warn_unused_result (gimple_seq seq)
9506 {
9507 tree fdecl, ftype;
9508 gimple_stmt_iterator i;
9509
9510 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9511 {
9512 gimple *g = gsi_stmt (i);
9513
9514 switch (gimple_code (g))
9515 {
9516 case GIMPLE_BIND:
9517 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9518 break;
9519 case GIMPLE_TRY:
9520 do_warn_unused_result (gimple_try_eval (g));
9521 do_warn_unused_result (gimple_try_cleanup (g));
9522 break;
9523 case GIMPLE_CATCH:
9524 do_warn_unused_result (gimple_catch_handler (
9525 as_a <gcatch *> (g)));
9526 break;
9527 case GIMPLE_EH_FILTER:
9528 do_warn_unused_result (gimple_eh_filter_failure (g));
9529 break;
9530
9531 case GIMPLE_CALL:
9532 if (gimple_call_lhs (g))
9533 break;
9534 if (gimple_call_internal_p (g))
9535 break;
9536
9537 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9538 LHS. All calls whose value is ignored should be
9539 represented like this. Look for the attribute. */
9540 fdecl = gimple_call_fndecl (g);
9541 ftype = gimple_call_fntype (g);
9542
9543 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9544 {
9545 location_t loc = gimple_location (g);
9546
9547 if (fdecl)
9548 warning_at (loc, OPT_Wunused_result,
9549 "ignoring return value of %qD "
9550 "declared with attribute %<warn_unused_result%>",
9551 fdecl);
9552 else
9553 warning_at (loc, OPT_Wunused_result,
9554 "ignoring return value of function "
9555 "declared with attribute %<warn_unused_result%>");
9556 }
9557 break;
9558
9559 default:
9560 /* Not a container, not a call, or a call whose value is used. */
9561 break;
9562 }
9563 }
9564 }
9565
9566 namespace {
9567
9568 const pass_data pass_data_warn_unused_result =
9569 {
9570 GIMPLE_PASS, /* type */
9571 "*warn_unused_result", /* name */
9572 OPTGROUP_NONE, /* optinfo_flags */
9573 TV_NONE, /* tv_id */
9574 PROP_gimple_any, /* properties_required */
9575 0, /* properties_provided */
9576 0, /* properties_destroyed */
9577 0, /* todo_flags_start */
9578 0, /* todo_flags_finish */
9579 };
9580
9581 class pass_warn_unused_result : public gimple_opt_pass
9582 {
9583 public:
pass_warn_unused_result(gcc::context * ctxt)9584 pass_warn_unused_result (gcc::context *ctxt)
9585 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9586 {}
9587
9588 /* opt_pass methods: */
gate(function *)9589 virtual bool gate (function *) { return flag_warn_unused_result; }
execute(function *)9590 virtual unsigned int execute (function *)
9591 {
9592 do_warn_unused_result (gimple_body (current_function_decl));
9593 return 0;
9594 }
9595
9596 }; // class pass_warn_unused_result
9597
9598 } // anon namespace
9599
9600 gimple_opt_pass *
make_pass_warn_unused_result(gcc::context * ctxt)9601 make_pass_warn_unused_result (gcc::context *ctxt)
9602 {
9603 return new pass_warn_unused_result (ctxt);
9604 }
9605
9606 /* IPA passes, compilation of earlier functions or inlining
9607 might have changed some properties, such as marked functions nothrow,
9608 pure, const or noreturn.
9609 Remove redundant edges and basic blocks, and create new ones if necessary.
9610
9611 This pass can't be executed as stand alone pass from pass manager, because
9612 in between inlining and this fixup the verify_flow_info would fail. */
9613
9614 unsigned int
execute_fixup_cfg(void)9615 execute_fixup_cfg (void)
9616 {
9617 basic_block bb;
9618 gimple_stmt_iterator gsi;
9619 int todo = 0;
9620 cgraph_node *node = cgraph_node::get (current_function_decl);
9621 /* Same scaling is also done by ipa_merge_profiles. */
9622 profile_count num = node->count;
9623 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9624 bool scale = num.initialized_p () && !(num == den);
9625
9626 if (scale)
9627 {
9628 profile_count::adjust_for_ipa_scaling (&num, &den);
9629 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9630 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9631 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9632 }
9633
9634 FOR_EACH_BB_FN (bb, cfun)
9635 {
9636 if (scale)
9637 bb->count = bb->count.apply_scale (num, den);
9638 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9639 {
9640 gimple *stmt = gsi_stmt (gsi);
9641 tree decl = is_gimple_call (stmt)
9642 ? gimple_call_fndecl (stmt)
9643 : NULL;
9644 if (decl)
9645 {
9646 int flags = gimple_call_flags (stmt);
9647 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9648 {
9649 if (gimple_purge_dead_abnormal_call_edges (bb))
9650 todo |= TODO_cleanup_cfg;
9651
9652 if (gimple_in_ssa_p (cfun))
9653 {
9654 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9655 update_stmt (stmt);
9656 }
9657 }
9658
9659 if (flags & ECF_NORETURN
9660 && fixup_noreturn_call (stmt))
9661 todo |= TODO_cleanup_cfg;
9662 }
9663
9664 /* Remove stores to variables we marked write-only.
9665 Keep access when store has side effect, i.e. in case when source
9666 is volatile. */
9667 if (gimple_store_p (stmt)
9668 && !gimple_has_side_effects (stmt)
9669 && !optimize_debug)
9670 {
9671 tree lhs = get_base_address (gimple_get_lhs (stmt));
9672
9673 if (VAR_P (lhs)
9674 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9675 && varpool_node::get (lhs)->writeonly)
9676 {
9677 unlink_stmt_vdef (stmt);
9678 gsi_remove (&gsi, true);
9679 release_defs (stmt);
9680 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9681 continue;
9682 }
9683 }
9684 /* For calls we can simply remove LHS when it is known
9685 to be write-only. */
9686 if (is_gimple_call (stmt)
9687 && gimple_get_lhs (stmt))
9688 {
9689 tree lhs = get_base_address (gimple_get_lhs (stmt));
9690
9691 if (VAR_P (lhs)
9692 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9693 && varpool_node::get (lhs)->writeonly)
9694 {
9695 gimple_call_set_lhs (stmt, NULL);
9696 update_stmt (stmt);
9697 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9698 }
9699 }
9700
9701 if (maybe_clean_eh_stmt (stmt)
9702 && gimple_purge_dead_eh_edges (bb))
9703 todo |= TODO_cleanup_cfg;
9704 gsi_next (&gsi);
9705 }
9706
9707 /* If we have a basic block with no successors that does not
9708 end with a control statement or a noreturn call end it with
9709 a call to __builtin_unreachable. This situation can occur
9710 when inlining a noreturn call that does in fact return. */
9711 if (EDGE_COUNT (bb->succs) == 0)
9712 {
9713 gimple *stmt = last_stmt (bb);
9714 if (!stmt
9715 || (!is_ctrl_stmt (stmt)
9716 && (!is_gimple_call (stmt)
9717 || !gimple_call_noreturn_p (stmt))))
9718 {
9719 if (stmt && is_gimple_call (stmt))
9720 gimple_call_set_ctrl_altering (stmt, false);
9721 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
9722 stmt = gimple_build_call (fndecl, 0);
9723 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9724 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9725 if (!cfun->after_inlining)
9726 {
9727 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9728 node->create_edge (cgraph_node::get_create (fndecl),
9729 call_stmt, bb->count);
9730 }
9731 }
9732 }
9733 }
9734 if (scale)
9735 {
9736 update_max_bb_count ();
9737 compute_function_frequency ();
9738 }
9739
9740 if (current_loops
9741 && (todo & TODO_cleanup_cfg))
9742 loops_state_set (LOOPS_NEED_FIXUP);
9743
9744 return todo;
9745 }
9746
9747 namespace {
9748
9749 const pass_data pass_data_fixup_cfg =
9750 {
9751 GIMPLE_PASS, /* type */
9752 "fixup_cfg", /* name */
9753 OPTGROUP_NONE, /* optinfo_flags */
9754 TV_NONE, /* tv_id */
9755 PROP_cfg, /* properties_required */
9756 0, /* properties_provided */
9757 0, /* properties_destroyed */
9758 0, /* todo_flags_start */
9759 0, /* todo_flags_finish */
9760 };
9761
9762 class pass_fixup_cfg : public gimple_opt_pass
9763 {
9764 public:
pass_fixup_cfg(gcc::context * ctxt)9765 pass_fixup_cfg (gcc::context *ctxt)
9766 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
9767 {}
9768
9769 /* opt_pass methods: */
clone()9770 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
execute(function *)9771 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
9772
9773 }; // class pass_fixup_cfg
9774
9775 } // anon namespace
9776
9777 gimple_opt_pass *
make_pass_fixup_cfg(gcc::context * ctxt)9778 make_pass_fixup_cfg (gcc::context *ctxt)
9779 {
9780 return new pass_fixup_cfg (ctxt);
9781 }
9782
9783 /* Garbage collection support for edge_def. */
9784
9785 extern void gt_ggc_mx (tree&);
9786 extern void gt_ggc_mx (gimple *&);
9787 extern void gt_ggc_mx (rtx&);
9788 extern void gt_ggc_mx (basic_block&);
9789
9790 static void
gt_ggc_mx(rtx_insn * & x)9791 gt_ggc_mx (rtx_insn *& x)
9792 {
9793 if (x)
9794 gt_ggc_mx_rtx_def ((void *) x);
9795 }
9796
9797 void
gt_ggc_mx(edge_def * e)9798 gt_ggc_mx (edge_def *e)
9799 {
9800 tree block = LOCATION_BLOCK (e->goto_locus);
9801 gt_ggc_mx (e->src);
9802 gt_ggc_mx (e->dest);
9803 if (current_ir_type () == IR_GIMPLE)
9804 gt_ggc_mx (e->insns.g);
9805 else
9806 gt_ggc_mx (e->insns.r);
9807 gt_ggc_mx (block);
9808 }
9809
9810 /* PCH support for edge_def. */
9811
9812 extern void gt_pch_nx (tree&);
9813 extern void gt_pch_nx (gimple *&);
9814 extern void gt_pch_nx (rtx&);
9815 extern void gt_pch_nx (basic_block&);
9816
9817 static void
gt_pch_nx(rtx_insn * & x)9818 gt_pch_nx (rtx_insn *& x)
9819 {
9820 if (x)
9821 gt_pch_nx_rtx_def ((void *) x);
9822 }
9823
9824 void
gt_pch_nx(edge_def * e)9825 gt_pch_nx (edge_def *e)
9826 {
9827 tree block = LOCATION_BLOCK (e->goto_locus);
9828 gt_pch_nx (e->src);
9829 gt_pch_nx (e->dest);
9830 if (current_ir_type () == IR_GIMPLE)
9831 gt_pch_nx (e->insns.g);
9832 else
9833 gt_pch_nx (e->insns.r);
9834 gt_pch_nx (block);
9835 }
9836
9837 void
gt_pch_nx(edge_def * e,gt_pointer_operator op,void * cookie)9838 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9839 {
9840 tree block = LOCATION_BLOCK (e->goto_locus);
9841 op (&(e->src), cookie);
9842 op (&(e->dest), cookie);
9843 if (current_ir_type () == IR_GIMPLE)
9844 op (&(e->insns.g), cookie);
9845 else
9846 op (&(e->insns.r), cookie);
9847 op (&(block), cookie);
9848 }
9849
9850 #if CHECKING_P
9851
9852 namespace selftest {
9853
9854 /* Helper function for CFG selftests: create a dummy function decl
9855 and push it as cfun. */
9856
9857 static tree
push_fndecl(const char * name)9858 push_fndecl (const char *name)
9859 {
9860 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
9861 /* FIXME: this uses input_location: */
9862 tree fndecl = build_fn_decl (name, fn_type);
9863 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
9864 NULL_TREE, integer_type_node);
9865 DECL_RESULT (fndecl) = retval;
9866 push_struct_function (fndecl);
9867 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9868 ASSERT_TRUE (fun != NULL);
9869 init_empty_tree_cfg_for_function (fun);
9870 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
9871 ASSERT_EQ (0, n_edges_for_fn (fun));
9872 return fndecl;
9873 }
9874
9875 /* These tests directly create CFGs.
9876 Compare with the static fns within tree-cfg.c:
9877 - build_gimple_cfg
9878 - make_blocks: calls create_basic_block (seq, bb);
9879 - make_edges. */
9880
9881 /* Verify a simple cfg of the form:
9882 ENTRY -> A -> B -> C -> EXIT. */
9883
9884 static void
test_linear_chain()9885 test_linear_chain ()
9886 {
9887 gimple_register_cfg_hooks ();
9888
9889 tree fndecl = push_fndecl ("cfg_test_linear_chain");
9890 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9891
9892 /* Create some empty blocks. */
9893 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9894 basic_block bb_b = create_empty_bb (bb_a);
9895 basic_block bb_c = create_empty_bb (bb_b);
9896
9897 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
9898 ASSERT_EQ (0, n_edges_for_fn (fun));
9899
9900 /* Create some edges: a simple linear chain of BBs. */
9901 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9902 make_edge (bb_a, bb_b, 0);
9903 make_edge (bb_b, bb_c, 0);
9904 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9905
9906 /* Verify the edges. */
9907 ASSERT_EQ (4, n_edges_for_fn (fun));
9908 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
9909 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
9910 ASSERT_EQ (1, bb_a->preds->length ());
9911 ASSERT_EQ (1, bb_a->succs->length ());
9912 ASSERT_EQ (1, bb_b->preds->length ());
9913 ASSERT_EQ (1, bb_b->succs->length ());
9914 ASSERT_EQ (1, bb_c->preds->length ());
9915 ASSERT_EQ (1, bb_c->succs->length ());
9916 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
9917 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
9918
9919 /* Verify the dominance information
9920 Each BB in our simple chain should be dominated by the one before
9921 it. */
9922 calculate_dominance_info (CDI_DOMINATORS);
9923 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9924 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9925 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
9926 ASSERT_EQ (1, dom_by_b.length ());
9927 ASSERT_EQ (bb_c, dom_by_b[0]);
9928 free_dominance_info (CDI_DOMINATORS);
9929 dom_by_b.release ();
9930
9931 /* Similarly for post-dominance: each BB in our chain is post-dominated
9932 by the one after it. */
9933 calculate_dominance_info (CDI_POST_DOMINATORS);
9934 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
9935 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
9936 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
9937 ASSERT_EQ (1, postdom_by_b.length ());
9938 ASSERT_EQ (bb_a, postdom_by_b[0]);
9939 free_dominance_info (CDI_POST_DOMINATORS);
9940 postdom_by_b.release ();
9941
9942 pop_cfun ();
9943 }
9944
9945 /* Verify a simple CFG of the form:
9946 ENTRY
9947 |
9948 A
9949 / \
9950 /t \f
9951 B C
9952 \ /
9953 \ /
9954 D
9955 |
9956 EXIT. */
9957
9958 static void
test_diamond()9959 test_diamond ()
9960 {
9961 gimple_register_cfg_hooks ();
9962
9963 tree fndecl = push_fndecl ("cfg_test_diamond");
9964 function *fun = DECL_STRUCT_FUNCTION (fndecl);
9965
9966 /* Create some empty blocks. */
9967 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
9968 basic_block bb_b = create_empty_bb (bb_a);
9969 basic_block bb_c = create_empty_bb (bb_a);
9970 basic_block bb_d = create_empty_bb (bb_b);
9971
9972 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
9973 ASSERT_EQ (0, n_edges_for_fn (fun));
9974
9975 /* Create the edges. */
9976 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
9977 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
9978 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
9979 make_edge (bb_b, bb_d, 0);
9980 make_edge (bb_c, bb_d, 0);
9981 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
9982
9983 /* Verify the edges. */
9984 ASSERT_EQ (6, n_edges_for_fn (fun));
9985 ASSERT_EQ (1, bb_a->preds->length ());
9986 ASSERT_EQ (2, bb_a->succs->length ());
9987 ASSERT_EQ (1, bb_b->preds->length ());
9988 ASSERT_EQ (1, bb_b->succs->length ());
9989 ASSERT_EQ (1, bb_c->preds->length ());
9990 ASSERT_EQ (1, bb_c->succs->length ());
9991 ASSERT_EQ (2, bb_d->preds->length ());
9992 ASSERT_EQ (1, bb_d->succs->length ());
9993
9994 /* Verify the dominance information. */
9995 calculate_dominance_info (CDI_DOMINATORS);
9996 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
9997 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
9998 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
9999 vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
10000 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
10001 dom_by_a.release ();
10002 vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
10003 ASSERT_EQ (0, dom_by_b.length ());
10004 dom_by_b.release ();
10005 free_dominance_info (CDI_DOMINATORS);
10006
10007 /* Similarly for post-dominance. */
10008 calculate_dominance_info (CDI_POST_DOMINATORS);
10009 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
10010 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
10011 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
10012 vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
10013 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
10014 postdom_by_d.release ();
10015 vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
10016 ASSERT_EQ (0, postdom_by_b.length ());
10017 postdom_by_b.release ();
10018 free_dominance_info (CDI_POST_DOMINATORS);
10019
10020 pop_cfun ();
10021 }
10022
10023 /* Verify that we can handle a CFG containing a "complete" aka
10024 fully-connected subgraph (where A B C D below all have edges
10025 pointing to each other node, also to themselves).
10026 e.g.:
10027 ENTRY EXIT
10028 | ^
10029 | /
10030 | /
10031 | /
10032 V/
10033 A<--->B
10034 ^^ ^^
10035 | \ / |
10036 | X |
10037 | / \ |
10038 VV VV
10039 C<--->D
10040 */
10041
10042 static void
test_fully_connected()10043 test_fully_connected ()
10044 {
10045 gimple_register_cfg_hooks ();
10046
10047 tree fndecl = push_fndecl ("cfg_fully_connected");
10048 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10049
10050 const int n = 4;
10051
10052 /* Create some empty blocks. */
10053 auto_vec <basic_block> subgraph_nodes;
10054 for (int i = 0; i < n; i++)
10055 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
10056
10057 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
10058 ASSERT_EQ (0, n_edges_for_fn (fun));
10059
10060 /* Create the edges. */
10061 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
10062 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10063 for (int i = 0; i < n; i++)
10064 for (int j = 0; j < n; j++)
10065 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
10066
10067 /* Verify the edges. */
10068 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
10069 /* The first one is linked to ENTRY/EXIT as well as itself and
10070 everything else. */
10071 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
10072 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
10073 /* The other ones in the subgraph are linked to everything in
10074 the subgraph (including themselves). */
10075 for (int i = 1; i < n; i++)
10076 {
10077 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
10078 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
10079 }
10080
10081 /* Verify the dominance information. */
10082 calculate_dominance_info (CDI_DOMINATORS);
10083 /* The initial block in the subgraph should be dominated by ENTRY. */
10084 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
10085 get_immediate_dominator (CDI_DOMINATORS,
10086 subgraph_nodes[0]));
10087 /* Every other block in the subgraph should be dominated by the
10088 initial block. */
10089 for (int i = 1; i < n; i++)
10090 ASSERT_EQ (subgraph_nodes[0],
10091 get_immediate_dominator (CDI_DOMINATORS,
10092 subgraph_nodes[i]));
10093 free_dominance_info (CDI_DOMINATORS);
10094
10095 /* Similarly for post-dominance. */
10096 calculate_dominance_info (CDI_POST_DOMINATORS);
10097 /* The initial block in the subgraph should be postdominated by EXIT. */
10098 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
10099 get_immediate_dominator (CDI_POST_DOMINATORS,
10100 subgraph_nodes[0]));
10101 /* Every other block in the subgraph should be postdominated by the
10102 initial block, since that leads to EXIT. */
10103 for (int i = 1; i < n; i++)
10104 ASSERT_EQ (subgraph_nodes[0],
10105 get_immediate_dominator (CDI_POST_DOMINATORS,
10106 subgraph_nodes[i]));
10107 free_dominance_info (CDI_POST_DOMINATORS);
10108
10109 pop_cfun ();
10110 }
10111
10112 /* Run all of the selftests within this file. */
10113
10114 void
tree_cfg_c_tests()10115 tree_cfg_c_tests ()
10116 {
10117 test_linear_chain ();
10118 test_diamond ();
10119 test_fully_connected ();
10120 }
10121
10122 } // namespace selftest
10123
10124 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10125 - loop
10126 - nested loops
10127 - switch statement (a block with many out-edges)
10128 - something that jumps to itself
10129 - etc */
10130
10131 #endif /* CHECKING_P */
10132