1 /* GIMPLE lowering pass.  Converts High GIMPLE into Low GIMPLE.
2 
3    Copyright (C) 2003-2018 Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "fold-const.h"
29 #include "tree-nested.h"
30 #include "calls.h"
31 #include "gimple-iterator.h"
32 #include "gimple-low.h"
33 #include "predict.h"
34 #include "gimple-predict.h"
35 
36 /* The differences between High GIMPLE and Low GIMPLE are the
37    following:
38 
39    1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
40 
41    2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
42       flow and exception regions are built as an on-the-side region
43       hierarchy (See tree-eh.c:lower_eh_constructs).
44 
45    3- Multiple identical return statements are grouped into a single
46       return and gotos to the unique return site.  */
47 
48 /* Match a return statement with a label.  During lowering, we identify
49    identical return statements and replace duplicates with a jump to
50    the corresponding label.  */
51 struct return_statements_t
52 {
53   tree label;
54   greturn *stmt;
55 };
56 typedef struct return_statements_t return_statements_t;
57 
58 
59 struct lower_data
60 {
61   /* Block the current statement belongs to.  */
62   tree block;
63 
64   /* A vector of label and return statements to be moved to the end
65      of the function.  */
66   vec<return_statements_t> return_statements;
67 
68   /* True if the current statement cannot fall through.  */
69   bool cannot_fallthru;
70 };
71 
72 static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
73 static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
74 static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
75 static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
76 static void lower_builtin_setjmp (gimple_stmt_iterator *);
77 static void lower_builtin_posix_memalign (gimple_stmt_iterator *);
78 
79 
80 /* Lower the body of current_function_decl from High GIMPLE into Low
81    GIMPLE.  */
82 
83 static unsigned int
lower_function_body(void)84 lower_function_body (void)
85 {
86   struct lower_data data;
87   gimple_seq body = gimple_body (current_function_decl);
88   gimple_seq lowered_body;
89   gimple_stmt_iterator i;
90   gimple *bind;
91   gimple *x;
92 
93   /* The gimplifier should've left a body of exactly one statement,
94      namely a GIMPLE_BIND.  */
95   gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
96 	      && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
97 
98   memset (&data, 0, sizeof (data));
99   data.block = DECL_INITIAL (current_function_decl);
100   BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
101   BLOCK_CHAIN (data.block) = NULL_TREE;
102   TREE_ASM_WRITTEN (data.block) = 1;
103   data.return_statements.create (8);
104 
105   bind = gimple_seq_first_stmt (body);
106   lowered_body = NULL;
107   gimple_seq_add_stmt (&lowered_body, bind);
108   i = gsi_start (lowered_body);
109   lower_gimple_bind (&i, &data);
110 
111   i = gsi_last (lowered_body);
112 
113   /* If we had begin stmt markers from e.g. PCH, but this compilation
114      doesn't want them, lower_stmt will have cleaned them up; we can
115      now clear the flag that indicates we had them.  */
116   if (!MAY_HAVE_DEBUG_MARKER_STMTS && cfun->debug_nonbind_markers)
117     {
118       /* This counter needs not be exact, but before lowering it will
119 	 most certainly be.  */
120       gcc_assert (cfun->debug_marker_count == 0);
121       cfun->debug_nonbind_markers = false;
122     }
123 
124   /* If the function falls off the end, we need a null return statement.
125      If we've already got one in the return_statements vector, we don't
126      need to do anything special.  Otherwise build one by hand.  */
127   bool may_fallthru = gimple_seq_may_fallthru (lowered_body);
128   if (may_fallthru
129       && (data.return_statements.is_empty ()
130 	  || (gimple_return_retval (data.return_statements.last().stmt)
131 	      != NULL)))
132     {
133       x = gimple_build_return (NULL);
134       gimple_set_location (x, cfun->function_end_locus);
135       gimple_set_block (x, DECL_INITIAL (current_function_decl));
136       gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
137       may_fallthru = false;
138     }
139 
140   /* If we lowered any return statements, emit the representative
141      at the end of the function.  */
142   while (!data.return_statements.is_empty ())
143     {
144       return_statements_t t = data.return_statements.pop ();
145       x = gimple_build_label (t.label);
146       gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
147       gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
148       if (may_fallthru)
149 	{
150 	  /* Remove the line number from the representative return statement.
151 	     It now fills in for the fallthru too.  Failure to remove this
152 	     will result in incorrect results for coverage analysis.  */
153 	  gimple_set_location (t.stmt, UNKNOWN_LOCATION);
154 	  may_fallthru = false;
155 	}
156     }
157 
158   /* Once the old body has been lowered, replace it with the new
159      lowered sequence.  */
160   gimple_set_body (current_function_decl, lowered_body);
161 
162   gcc_assert (data.block == DECL_INITIAL (current_function_decl));
163   BLOCK_SUBBLOCKS (data.block)
164     = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
165 
166   clear_block_marks (data.block);
167   data.return_statements.release ();
168   return 0;
169 }
170 
171 namespace {
172 
173 const pass_data pass_data_lower_cf =
174 {
175   GIMPLE_PASS, /* type */
176   "lower", /* name */
177   OPTGROUP_NONE, /* optinfo_flags */
178   TV_NONE, /* tv_id */
179   PROP_gimple_any, /* properties_required */
180   PROP_gimple_lcf, /* properties_provided */
181   0, /* properties_destroyed */
182   0, /* todo_flags_start */
183   0, /* todo_flags_finish */
184 };
185 
186 class pass_lower_cf : public gimple_opt_pass
187 {
188 public:
pass_lower_cf(gcc::context * ctxt)189   pass_lower_cf (gcc::context *ctxt)
190     : gimple_opt_pass (pass_data_lower_cf, ctxt)
191   {}
192 
193   /* opt_pass methods: */
execute(function *)194   virtual unsigned int execute (function *) { return lower_function_body (); }
195 
196 }; // class pass_lower_cf
197 
198 } // anon namespace
199 
200 gimple_opt_pass *
make_pass_lower_cf(gcc::context * ctxt)201 make_pass_lower_cf (gcc::context *ctxt)
202 {
203   return new pass_lower_cf (ctxt);
204 }
205 
206 /* Lower sequence SEQ.  Unlike gimplification the statements are not relowered
207    when they are changed -- if this has to be done, the lowering routine must
208    do it explicitly.  DATA is passed through the recursion.  */
209 
210 static void
lower_sequence(gimple_seq * seq,struct lower_data * data)211 lower_sequence (gimple_seq *seq, struct lower_data *data)
212 {
213   gimple_stmt_iterator gsi;
214 
215   for (gsi = gsi_start (*seq); !gsi_end_p (gsi); )
216     lower_stmt (&gsi, data);
217 }
218 
219 
220 /* Lower the OpenMP directive statement pointed by GSI.  DATA is
221    passed through the recursion.  */
222 
223 static void
lower_omp_directive(gimple_stmt_iterator * gsi,struct lower_data * data)224 lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
225 {
226   gimple *stmt;
227 
228   stmt = gsi_stmt (*gsi);
229 
230   lower_sequence (gimple_omp_body_ptr (stmt), data);
231   gsi_insert_seq_after (gsi, gimple_omp_body (stmt), GSI_CONTINUE_LINKING);
232   gimple_omp_set_body (stmt, NULL);
233   gsi_next (gsi);
234 }
235 
236 
237 /* Lower statement GSI.  DATA is passed through the recursion.  We try to
238    track the fallthruness of statements and get rid of unreachable return
239    statements in order to prevent the EH lowering pass from adding useless
240    edges that can cause bogus warnings to be issued later; this guess need
241    not be 100% accurate, simply be conservative and reset cannot_fallthru
242    to false if we don't know.  */
243 
244 static void
lower_stmt(gimple_stmt_iterator * gsi,struct lower_data * data)245 lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
246 {
247   gimple *stmt = gsi_stmt (*gsi);
248 
249   gimple_set_block (stmt, data->block);
250 
251   switch (gimple_code (stmt))
252     {
253     case GIMPLE_BIND:
254       lower_gimple_bind (gsi, data);
255       /* Propagate fallthruness.  */
256       return;
257 
258     case GIMPLE_COND:
259     case GIMPLE_GOTO:
260     case GIMPLE_SWITCH:
261       data->cannot_fallthru = true;
262       gsi_next (gsi);
263       return;
264 
265     case GIMPLE_RETURN:
266       if (data->cannot_fallthru)
267 	{
268 	  gsi_remove (gsi, false);
269 	  /* Propagate fallthruness.  */
270 	}
271       else
272 	{
273 	  lower_gimple_return (gsi, data);
274 	  data->cannot_fallthru = true;
275 	}
276       return;
277 
278     case GIMPLE_TRY:
279       if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
280 	lower_try_catch (gsi, data);
281       else
282 	{
283 	  /* It must be a GIMPLE_TRY_FINALLY.  */
284 	  bool cannot_fallthru;
285 	  lower_sequence (gimple_try_eval_ptr (stmt), data);
286 	  cannot_fallthru = data->cannot_fallthru;
287 
288 	  /* The finally clause is always executed after the try clause,
289 	     so if it does not fall through, then the try-finally will not
290 	     fall through.  Otherwise, if the try clause does not fall
291 	     through, then when the finally clause falls through it will
292 	     resume execution wherever the try clause was going.  So the
293 	     whole try-finally will only fall through if both the try
294 	     clause and the finally clause fall through.  */
295 	  data->cannot_fallthru = false;
296 	  lower_sequence (gimple_try_cleanup_ptr (stmt), data);
297 	  data->cannot_fallthru |= cannot_fallthru;
298 	  gsi_next (gsi);
299 	}
300       return;
301 
302     case GIMPLE_EH_ELSE:
303       {
304 	geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
305 	lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt), data);
306 	lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt), data);
307       }
308       break;
309 
310     case GIMPLE_DEBUG:
311       gcc_checking_assert (cfun->debug_nonbind_markers);
312       /* We can't possibly have debug bind stmts before lowering, we
313 	 first emit them when entering SSA.  */
314       gcc_checking_assert (gimple_debug_nonbind_marker_p (stmt));
315       /* Propagate fallthruness.  */
316       /* If the function (e.g. from PCH) had debug stmts, but they're
317 	 disabled for this compilation, remove them.  */
318       if (!MAY_HAVE_DEBUG_MARKER_STMTS)
319 	gsi_remove (gsi, true);
320       else
321 	gsi_next (gsi);
322       return;
323 
324     case GIMPLE_NOP:
325     case GIMPLE_ASM:
326     case GIMPLE_ASSIGN:
327     case GIMPLE_PREDICT:
328     case GIMPLE_LABEL:
329     case GIMPLE_EH_MUST_NOT_THROW:
330     case GIMPLE_OMP_FOR:
331     case GIMPLE_OMP_SECTIONS:
332     case GIMPLE_OMP_SECTIONS_SWITCH:
333     case GIMPLE_OMP_SECTION:
334     case GIMPLE_OMP_SINGLE:
335     case GIMPLE_OMP_MASTER:
336     case GIMPLE_OMP_TASKGROUP:
337     case GIMPLE_OMP_ORDERED:
338     case GIMPLE_OMP_CRITICAL:
339     case GIMPLE_OMP_RETURN:
340     case GIMPLE_OMP_ATOMIC_LOAD:
341     case GIMPLE_OMP_ATOMIC_STORE:
342     case GIMPLE_OMP_CONTINUE:
343       break;
344 
345     case GIMPLE_CALL:
346       {
347 	tree decl = gimple_call_fndecl (stmt);
348 	unsigned i;
349 
350 	for (i = 0; i < gimple_call_num_args (stmt); i++)
351 	  {
352 	    tree arg = gimple_call_arg (stmt, i);
353 	    if (EXPR_P (arg))
354 	      TREE_SET_BLOCK (arg, data->block);
355 	  }
356 
357 	if (decl
358 	    && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
359 	  {
360 	    if (DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
361 	      {
362 		lower_builtin_setjmp (gsi);
363 		data->cannot_fallthru = false;
364 		return;
365 	      }
366 	    else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_POSIX_MEMALIGN
367 		     && flag_tree_bit_ccp
368 		     && gimple_builtin_call_types_compatible_p (stmt, decl))
369 	      {
370 		lower_builtin_posix_memalign (gsi);
371 		return;
372 	      }
373 	  }
374 
375 	if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
376 	  {
377 	    data->cannot_fallthru = true;
378 	    gsi_next (gsi);
379 	    return;
380 	  }
381       }
382       break;
383 
384     case GIMPLE_OMP_PARALLEL:
385     case GIMPLE_OMP_TASK:
386     case GIMPLE_OMP_TARGET:
387     case GIMPLE_OMP_TEAMS:
388     case GIMPLE_OMP_GRID_BODY:
389       data->cannot_fallthru = false;
390       lower_omp_directive (gsi, data);
391       data->cannot_fallthru = false;
392       return;
393 
394     case GIMPLE_TRANSACTION:
395       lower_sequence (gimple_transaction_body_ptr (
396 			as_a <gtransaction *> (stmt)),
397 		      data);
398       break;
399 
400     default:
401       gcc_unreachable ();
402     }
403 
404   data->cannot_fallthru = false;
405   gsi_next (gsi);
406 }
407 
408 /* Lower a bind_expr TSI.  DATA is passed through the recursion.  */
409 
410 static void
lower_gimple_bind(gimple_stmt_iterator * gsi,struct lower_data * data)411 lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
412 {
413   tree old_block = data->block;
414   gbind *stmt = as_a <gbind *> (gsi_stmt (*gsi));
415   tree new_block = gimple_bind_block (stmt);
416 
417   if (new_block)
418     {
419       if (new_block == old_block)
420 	{
421 	  /* The outermost block of the original function may not be the
422 	     outermost statement chain of the gimplified function.  So we
423 	     may see the outermost block just inside the function.  */
424 	  gcc_assert (new_block == DECL_INITIAL (current_function_decl));
425 	  new_block = NULL;
426 	}
427       else
428 	{
429 	  /* We do not expect to handle duplicate blocks.  */
430 	  gcc_assert (!TREE_ASM_WRITTEN (new_block));
431 	  TREE_ASM_WRITTEN (new_block) = 1;
432 
433 	  /* Block tree may get clobbered by inlining.  Normally this would
434 	     be fixed in rest_of_decl_compilation using block notes, but
435 	     since we are not going to emit them, it is up to us.  */
436 	  BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
437 	  BLOCK_SUBBLOCKS (old_block) = new_block;
438 	  BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
439 	  BLOCK_SUPERCONTEXT (new_block) = old_block;
440 
441 	  data->block = new_block;
442 	}
443     }
444 
445   record_vars (gimple_bind_vars (stmt));
446 
447   /* Scrap DECL_CHAIN up to BLOCK_VARS to ease GC after we no longer
448      need gimple_bind_vars.  */
449   tree next;
450   /* BLOCK_VARS and gimple_bind_vars share a common sub-chain.  Find
451      it by marking all BLOCK_VARS.  */
452   if (gimple_bind_block (stmt))
453     for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
454       TREE_VISITED (t) = 1;
455   for (tree var = gimple_bind_vars (stmt);
456        var && ! TREE_VISITED (var); var = next)
457     {
458       next = DECL_CHAIN (var);
459       DECL_CHAIN (var) = NULL_TREE;
460     }
461   /* Unmark BLOCK_VARS.  */
462   if (gimple_bind_block (stmt))
463     for (tree t = BLOCK_VARS (gimple_bind_block (stmt)); t; t = DECL_CHAIN (t))
464       TREE_VISITED (t) = 0;
465 
466   lower_sequence (gimple_bind_body_ptr (stmt), data);
467 
468   if (new_block)
469     {
470       gcc_assert (data->block == new_block);
471 
472       BLOCK_SUBBLOCKS (new_block)
473 	= blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
474       data->block = old_block;
475     }
476 
477   /* The GIMPLE_BIND no longer carries any useful information -- kill it.  */
478   gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
479   gsi_remove (gsi, false);
480 }
481 
482 /* Same as above, but for a GIMPLE_TRY_CATCH.  */
483 
484 static void
lower_try_catch(gimple_stmt_iterator * gsi,struct lower_data * data)485 lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
486 {
487   bool cannot_fallthru;
488   gimple *stmt = gsi_stmt (*gsi);
489   gimple_stmt_iterator i;
490 
491   /* We don't handle GIMPLE_TRY_FINALLY.  */
492   gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
493 
494   lower_sequence (gimple_try_eval_ptr (stmt), data);
495   cannot_fallthru = data->cannot_fallthru;
496 
497   i = gsi_start (*gimple_try_cleanup_ptr (stmt));
498   switch (gimple_code (gsi_stmt (i)))
499     {
500     case GIMPLE_CATCH:
501       /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
502 	 catch expression and a body.  The whole try/catch may fall
503 	 through iff any of the catch bodies falls through.  */
504       for (; !gsi_end_p (i); gsi_next (&i))
505 	{
506 	  data->cannot_fallthru = false;
507 	  lower_sequence (gimple_catch_handler_ptr (
508                             as_a <gcatch *> (gsi_stmt (i))),
509 			  data);
510 	  if (!data->cannot_fallthru)
511 	    cannot_fallthru = false;
512 	}
513       break;
514 
515     case GIMPLE_EH_FILTER:
516       /* The exception filter expression only matters if there is an
517 	 exception.  If the exception does not match EH_FILTER_TYPES,
518 	 we will execute EH_FILTER_FAILURE, and we will fall through
519 	 if that falls through.  If the exception does match
520 	 EH_FILTER_TYPES, the stack unwinder will continue up the
521 	 stack, so we will not fall through.  We don't know whether we
522 	 will throw an exception which matches EH_FILTER_TYPES or not,
523 	 so we just ignore EH_FILTER_TYPES and assume that we might
524 	 throw an exception which doesn't match.  */
525       data->cannot_fallthru = false;
526       lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data);
527       if (!data->cannot_fallthru)
528 	cannot_fallthru = false;
529       break;
530 
531     case GIMPLE_DEBUG:
532       gcc_checking_assert (gimple_debug_begin_stmt_p (stmt));
533       break;
534 
535     default:
536       /* This case represents statements to be executed when an
537 	 exception occurs.  Those statements are implicitly followed
538 	 by a GIMPLE_RESX to resume execution after the exception.  So
539 	 in this case the try/catch never falls through.  */
540       data->cannot_fallthru = false;
541       lower_sequence (gimple_try_cleanup_ptr (stmt), data);
542       break;
543     }
544 
545   data->cannot_fallthru = cannot_fallthru;
546   gsi_next (gsi);
547 }
548 
549 
550 /* Try to determine whether a TRY_CATCH expression can fall through.
551    This is a subroutine of gimple_stmt_may_fallthru.  */
552 
553 static bool
gimple_try_catch_may_fallthru(gtry * stmt)554 gimple_try_catch_may_fallthru (gtry *stmt)
555 {
556   gimple_stmt_iterator i;
557 
558   /* We don't handle GIMPLE_TRY_FINALLY.  */
559   gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
560 
561   /* If the TRY block can fall through, the whole TRY_CATCH can
562      fall through.  */
563   if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
564     return true;
565 
566   i = gsi_start (*gimple_try_cleanup_ptr (stmt));
567   switch (gimple_code (gsi_stmt (i)))
568     {
569     case GIMPLE_CATCH:
570       /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
571 	 catch expression and a body.  The whole try/catch may fall
572 	 through iff any of the catch bodies falls through.  */
573       for (; !gsi_end_p (i); gsi_next (&i))
574 	{
575 	  if (gimple_seq_may_fallthru (gimple_catch_handler (
576 					 as_a <gcatch *> (gsi_stmt (i)))))
577 	    return true;
578 	}
579       return false;
580 
581     case GIMPLE_EH_FILTER:
582       /* The exception filter expression only matters if there is an
583 	 exception.  If the exception does not match EH_FILTER_TYPES,
584 	 we will execute EH_FILTER_FAILURE, and we will fall through
585 	 if that falls through.  If the exception does match
586 	 EH_FILTER_TYPES, the stack unwinder will continue up the
587 	 stack, so we will not fall through.  We don't know whether we
588 	 will throw an exception which matches EH_FILTER_TYPES or not,
589 	 so we just ignore EH_FILTER_TYPES and assume that we might
590 	 throw an exception which doesn't match.  */
591       return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
592 
593     default:
594       /* This case represents statements to be executed when an
595 	 exception occurs.  Those statements are implicitly followed
596 	 by a GIMPLE_RESX to resume execution after the exception.  So
597 	 in this case the try/catch never falls through.  */
598       return false;
599     }
600 }
601 
602 
603 /* Try to determine if we can continue executing the statement
604    immediately following STMT.  This guess need not be 100% accurate;
605    simply be conservative and return true if we don't know.  This is
606    used only to avoid stupidly generating extra code. If we're wrong,
607    we'll just delete the extra code later.  */
608 
609 bool
gimple_stmt_may_fallthru(gimple * stmt)610 gimple_stmt_may_fallthru (gimple *stmt)
611 {
612   if (!stmt)
613     return true;
614 
615   switch (gimple_code (stmt))
616     {
617     case GIMPLE_GOTO:
618     case GIMPLE_RETURN:
619     case GIMPLE_RESX:
620       /* Easy cases.  If the last statement of the seq implies
621 	 control transfer, then we can't fall through.  */
622       return false;
623 
624     case GIMPLE_SWITCH:
625       /* Switch has already been lowered and represents a branch
626 	 to a selected label and hence can't fall through.  */
627       return false;
628 
629     case GIMPLE_COND:
630       /* GIMPLE_COND's are already lowered into a two-way branch.  They
631 	 can't fall through.  */
632       return false;
633 
634     case GIMPLE_BIND:
635       return gimple_seq_may_fallthru (
636 	       gimple_bind_body (as_a <gbind *> (stmt)));
637 
638     case GIMPLE_TRY:
639       if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
640         return gimple_try_catch_may_fallthru (as_a <gtry *> (stmt));
641 
642       /* It must be a GIMPLE_TRY_FINALLY.  */
643 
644       /* The finally clause is always executed after the try clause,
645 	 so if it does not fall through, then the try-finally will not
646 	 fall through.  Otherwise, if the try clause does not fall
647 	 through, then when the finally clause falls through it will
648 	 resume execution wherever the try clause was going.  So the
649 	 whole try-finally will only fall through if both the try
650 	 clause and the finally clause fall through.  */
651       return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
652 	      && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
653 
654     case GIMPLE_EH_ELSE:
655       {
656 	geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
657 	return (gimple_seq_may_fallthru (gimple_eh_else_n_body (eh_else_stmt))
658 		|| gimple_seq_may_fallthru (gimple_eh_else_e_body (
659 					      eh_else_stmt)));
660       }
661 
662     case GIMPLE_CALL:
663       /* Functions that do not return do not fall through.  */
664       return !gimple_call_noreturn_p (stmt);
665 
666     default:
667       return true;
668     }
669 }
670 
671 
672 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ.  */
673 
674 bool
gimple_seq_may_fallthru(gimple_seq seq)675 gimple_seq_may_fallthru (gimple_seq seq)
676 {
677   return gimple_stmt_may_fallthru (gimple_seq_last_nondebug_stmt (seq));
678 }
679 
680 
681 /* Lower a GIMPLE_RETURN GSI.  DATA is passed through the recursion.  */
682 
683 static void
lower_gimple_return(gimple_stmt_iterator * gsi,struct lower_data * data)684 lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
685 {
686   greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi));
687   gimple *t;
688   int i;
689   return_statements_t tmp_rs;
690 
691   /* Match this up with an existing return statement that's been created.  */
692   for (i = data->return_statements.length () - 1;
693        i >= 0; i--)
694     {
695       tmp_rs = data->return_statements[i];
696 
697       if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
698 	{
699 	  /* Remove the line number from the representative return statement.
700 	     It now fills in for many such returns.  Failure to remove this
701 	     will result in incorrect results for coverage analysis.  */
702 	  gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
703 
704 	  goto found;
705 	}
706     }
707 
708   /* Not found.  Create a new label and record the return statement.  */
709   tmp_rs.label = create_artificial_label (cfun->function_end_locus);
710   tmp_rs.stmt = stmt;
711   data->return_statements.safe_push (tmp_rs);
712 
713   /* Generate a goto statement and remove the return statement.  */
714  found:
715   /* When not optimizing, make sure user returns are preserved.  */
716   if (!optimize && gimple_has_location (stmt))
717     DECL_ARTIFICIAL (tmp_rs.label) = 0;
718   t = gimple_build_goto (tmp_rs.label);
719   gimple_set_location (t, gimple_location (stmt));
720   gimple_set_block (t, gimple_block (stmt));
721   gsi_insert_before (gsi, t, GSI_SAME_STMT);
722   gsi_remove (gsi, false);
723 }
724 
725 /* Lower a __builtin_setjmp GSI.
726 
727    __builtin_setjmp is passed a pointer to an array of five words (not
728    all will be used on all machines).  It operates similarly to the C
729    library function of the same name, but is more efficient.
730 
731    It is lowered into 2 other builtins, namely __builtin_setjmp_setup,
732    __builtin_setjmp_receiver.
733 
734    After full lowering, the body of the function should look like:
735 
736     {
737       int D.1844;
738       int D.2844;
739 
740       [...]
741 
742       __builtin_setjmp_setup (&buf, &<D1847>);
743       D.1844 = 0;
744       goto <D1846>;
745       <D1847>:;
746       __builtin_setjmp_receiver (&<D1847>);
747       D.1844 = 1;
748       <D1846>:;
749       if (D.1844 == 0) goto <D1848>; else goto <D1849>;
750 
751       [...]
752 
753       __builtin_setjmp_setup (&buf, &<D2847>);
754       D.2844 = 0;
755       goto <D2846>;
756       <D2847>:;
757       __builtin_setjmp_receiver (&<D2847>);
758       D.2844 = 1;
759       <D2846>:;
760       if (D.2844 == 0) goto <D2848>; else goto <D2849>;
761 
762       [...]
763 
764       <D3850>:;
765       return;
766     }
767 
768    During cfg creation an extra per-function (or per-OpenMP region)
769    block with ABNORMAL_DISPATCHER internal call will be added, unique
770    destination of all the abnormal call edges and the unique source of
771    all the abnormal edges to the receivers, thus keeping the complexity
772    explosion localized.  */
773 
774 static void
lower_builtin_setjmp(gimple_stmt_iterator * gsi)775 lower_builtin_setjmp (gimple_stmt_iterator *gsi)
776 {
777   gimple *stmt = gsi_stmt (*gsi);
778   location_t loc = gimple_location (stmt);
779   tree cont_label = create_artificial_label (loc);
780   tree next_label = create_artificial_label (loc);
781   tree dest, t, arg;
782   gimple *g;
783 
784   /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
785      these builtins are modelled as non-local label jumps to the label
786      that is passed to these two builtins, so pretend we have a non-local
787      label during GIMPLE passes too.  See PR60003.  */
788   cfun->has_nonlocal_label = 1;
789 
790   /* NEXT_LABEL is the label __builtin_longjmp will jump to.  Its address is
791      passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver.  */
792   FORCED_LABEL (next_label) = 1;
793 
794   tree orig_dest = dest = gimple_call_lhs (stmt);
795   if (orig_dest && TREE_CODE (orig_dest) == SSA_NAME)
796     dest = create_tmp_reg (TREE_TYPE (orig_dest));
797 
798   /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert.  */
799   arg = build_addr (next_label);
800   t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
801   g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
802   gimple_set_location (g, loc);
803   gimple_set_block (g, gimple_block (stmt));
804   gsi_insert_before (gsi, g, GSI_SAME_STMT);
805 
806   /* Build 'DEST = 0' and insert.  */
807   if (dest)
808     {
809       g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
810       gimple_set_location (g, loc);
811       gimple_set_block (g, gimple_block (stmt));
812       gsi_insert_before (gsi, g, GSI_SAME_STMT);
813     }
814 
815   /* Build 'goto CONT_LABEL' and insert.  */
816   g = gimple_build_goto (cont_label);
817   gsi_insert_before (gsi, g, GSI_SAME_STMT);
818 
819   /* Build 'NEXT_LABEL:' and insert.  */
820   g = gimple_build_label (next_label);
821   gsi_insert_before (gsi, g, GSI_SAME_STMT);
822 
823   /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert.  */
824   arg = build_addr (next_label);
825   t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
826   g = gimple_build_call (t, 1, arg);
827   gimple_set_location (g, loc);
828   gimple_set_block (g, gimple_block (stmt));
829   gsi_insert_before (gsi, g, GSI_SAME_STMT);
830 
831   /* Build 'DEST = 1' and insert.  */
832   if (dest)
833     {
834       g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
835 						       integer_one_node));
836       gimple_set_location (g, loc);
837       gimple_set_block (g, gimple_block (stmt));
838       gsi_insert_before (gsi, g, GSI_SAME_STMT);
839     }
840 
841   /* Build 'CONT_LABEL:' and insert.  */
842   g = gimple_build_label (cont_label);
843   gsi_insert_before (gsi, g, GSI_SAME_STMT);
844 
845   /* Build orig_dest = dest if necessary.  */
846   if (dest != orig_dest)
847     {
848       g = gimple_build_assign (orig_dest, dest);
849       gsi_insert_before (gsi, g, GSI_SAME_STMT);
850     }
851 
852   /* Remove the call to __builtin_setjmp.  */
853   gsi_remove (gsi, false);
854 }
855 
856 /* Lower calls to posix_memalign to
857      res = posix_memalign (ptr, align, size);
858      if (res == 0)
859        *ptr = __builtin_assume_aligned (*ptr, align);
860    or to
861      void *tem;
862      res = posix_memalign (&tem, align, size);
863      if (res == 0)
864        ptr = __builtin_assume_aligned (tem, align);
865    in case the first argument was &ptr.  That way we can get at the
866    alignment of the heap pointer in CCP.  */
867 
868 static void
lower_builtin_posix_memalign(gimple_stmt_iterator * gsi)869 lower_builtin_posix_memalign (gimple_stmt_iterator *gsi)
870 {
871   gimple *stmt, *call = gsi_stmt (*gsi);
872   tree pptr = gimple_call_arg (call, 0);
873   tree align = gimple_call_arg (call, 1);
874   tree res = gimple_call_lhs (call);
875   tree ptr = create_tmp_reg (ptr_type_node);
876   if (TREE_CODE (pptr) == ADDR_EXPR)
877     {
878       tree tem = create_tmp_var (ptr_type_node);
879       TREE_ADDRESSABLE (tem) = 1;
880       gimple_call_set_arg (call, 0, build_fold_addr_expr (tem));
881       stmt = gimple_build_assign (ptr, tem);
882     }
883   else
884     stmt = gimple_build_assign (ptr,
885 				fold_build2 (MEM_REF, ptr_type_node, pptr,
886 					     build_int_cst (ptr_type_node, 0)));
887   if (res == NULL_TREE)
888     {
889       res = create_tmp_reg (integer_type_node);
890       gimple_call_set_lhs (call, res);
891     }
892   tree align_label = create_artificial_label (UNKNOWN_LOCATION);
893   tree noalign_label = create_artificial_label (UNKNOWN_LOCATION);
894   gimple *cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
895 				   align_label, noalign_label);
896   gsi_insert_after (gsi, cond, GSI_NEW_STMT);
897   gsi_insert_after (gsi, gimple_build_label (align_label), GSI_NEW_STMT);
898   gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
899   stmt = gimple_build_call (builtin_decl_implicit (BUILT_IN_ASSUME_ALIGNED),
900 			    2, ptr, align);
901   gimple_call_set_lhs (stmt, ptr);
902   gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
903   stmt = gimple_build_assign (fold_build2 (MEM_REF, ptr_type_node, pptr,
904 					   build_int_cst (ptr_type_node, 0)),
905 			      ptr);
906   gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
907   gsi_insert_after (gsi, gimple_build_label (noalign_label), GSI_NEW_STMT);
908 }
909 
910 
911 /* Record the variables in VARS into function FN.  */
912 
913 void
record_vars_into(tree vars,tree fn)914 record_vars_into (tree vars, tree fn)
915 {
916   for (; vars; vars = DECL_CHAIN (vars))
917     {
918       tree var = vars;
919 
920       /* BIND_EXPRs contains also function/type/constant declarations
921          we don't need to care about.  */
922       if (!VAR_P (var))
923 	continue;
924 
925       /* Nothing to do in this case.  */
926       if (DECL_EXTERNAL (var))
927 	continue;
928 
929       /* Record the variable.  */
930       add_local_decl (DECL_STRUCT_FUNCTION (fn), var);
931     }
932 }
933 
934 
935 /* Record the variables in VARS into current_function_decl.  */
936 
937 void
record_vars(tree vars)938 record_vars (tree vars)
939 {
940   record_vars_into (vars, current_function_decl);
941 }
942