xref: /dragonfly/contrib/gcc-4.7/gcc/gimple-low.c (revision ef2687d4)
1 /* GIMPLE lowering pass.  Converts High GIMPLE into Low GIMPLE.
2 
3    Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4    Free Software Foundation, Inc.
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "tree-iterator.h"
29 #include "tree-inline.h"
30 #include "tree-flow.h"
31 #include "flags.h"
32 #include "function.h"
33 #include "diagnostic-core.h"
34 #include "tree-pass.h"
35 #include "langhooks.h"
36 
37 /* The differences between High GIMPLE and Low GIMPLE are the
38    following:
39 
40    1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
41 
42    2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
43       flow and exception regions are built as an on-the-side region
44       hierarchy (See tree-eh.c:lower_eh_constructs).
45 
46    3- Multiple identical return statements are grouped into a single
47       return and gotos to the unique return site.  */
48 
49 /* Match a return statement with a label.  During lowering, we identify
50    identical return statements and replace duplicates with a jump to
51    the corresponding label.  */
52 struct return_statements_t
53 {
54   tree label;
55   gimple stmt;
56 };
57 typedef struct return_statements_t return_statements_t;
58 
59 DEF_VEC_O(return_statements_t);
60 DEF_VEC_ALLOC_O(return_statements_t,heap);
61 
62 struct lower_data
63 {
64   /* Block the current statement belongs to.  */
65   tree block;
66 
67   /* A vector of label and return statements to be moved to the end
68      of the function.  */
69   VEC(return_statements_t,heap) *return_statements;
70 
71   /* True if the current statement cannot fall through.  */
72   bool cannot_fallthru;
73 
74   /* True if the function calls __builtin_setjmp.  */
75   bool calls_builtin_setjmp;
76 };
77 
78 static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
79 static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
80 static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
81 static void lower_builtin_setjmp (gimple_stmt_iterator *);
82 
83 
84 /* Lower the body of current_function_decl from High GIMPLE into Low
85    GIMPLE.  */
86 
87 static unsigned int
88 lower_function_body (void)
89 {
90   struct lower_data data;
91   gimple_seq body = gimple_body (current_function_decl);
92   gimple_seq lowered_body;
93   gimple_stmt_iterator i;
94   gimple bind;
95   tree t;
96   gimple x;
97 
98   /* The gimplifier should've left a body of exactly one statement,
99      namely a GIMPLE_BIND.  */
100   gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
101 	      && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
102 
103   memset (&data, 0, sizeof (data));
104   data.block = DECL_INITIAL (current_function_decl);
105   BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
106   BLOCK_CHAIN (data.block) = NULL_TREE;
107   TREE_ASM_WRITTEN (data.block) = 1;
108   data.return_statements = VEC_alloc (return_statements_t, heap, 8);
109 
110   bind = gimple_seq_first_stmt (body);
111   lowered_body = NULL;
112   gimple_seq_add_stmt (&lowered_body, bind);
113   i = gsi_start (lowered_body);
114   lower_gimple_bind (&i, &data);
115 
116   /* Once the old body has been lowered, replace it with the new
117      lowered sequence.  */
118   gimple_set_body (current_function_decl, lowered_body);
119 
120   i = gsi_last (lowered_body);
121 
122   /* If the function falls off the end, we need a null return statement.
123      If we've already got one in the return_statements vector, we don't
124      need to do anything special.  Otherwise build one by hand.  */
125   if (gimple_seq_may_fallthru (lowered_body)
126       && (VEC_empty (return_statements_t, data.return_statements)
127 	  || gimple_return_retval (VEC_last (return_statements_t,
128 			           data.return_statements)->stmt) != NULL))
129     {
130       x = gimple_build_return (NULL);
131       gimple_set_location (x, cfun->function_end_locus);
132       gimple_set_block (x, DECL_INITIAL (current_function_decl));
133       gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
134     }
135 
136   /* If we lowered any return statements, emit the representative
137      at the end of the function.  */
138   while (!VEC_empty (return_statements_t, data.return_statements))
139     {
140       return_statements_t t;
141 
142       /* Unfortunately, we can't use VEC_pop because it returns void for
143 	 objects.  */
144       t = *VEC_last (return_statements_t, data.return_statements);
145       VEC_truncate (return_statements_t,
146 	  	    data.return_statements,
147 	  	    VEC_length (return_statements_t,
148 		      		data.return_statements) - 1);
149 
150       x = gimple_build_label (t.label);
151       gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
152       gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
153     }
154 
155   /* If the function calls __builtin_setjmp, we need to emit the computed
156      goto that will serve as the unique dispatcher for all the receivers.  */
157   if (data.calls_builtin_setjmp)
158     {
159       tree disp_label, disp_var, arg;
160 
161       /* Build 'DISP_LABEL:' and insert.  */
162       disp_label = create_artificial_label (cfun->function_end_locus);
163       /* This mark will create forward edges from every call site.  */
164       DECL_NONLOCAL (disp_label) = 1;
165       cfun->has_nonlocal_label = 1;
166       x = gimple_build_label (disp_label);
167       gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
168 
169       /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
170 	 and insert.  */
171       disp_var = create_tmp_var (ptr_type_node, "setjmpvar");
172       arg = build_addr (disp_label, current_function_decl);
173       t = builtin_decl_implicit (BUILT_IN_SETJMP_DISPATCHER);
174       x = gimple_build_call (t, 1, arg);
175       gimple_call_set_lhs (x, disp_var);
176 
177       /* Build 'goto DISP_VAR;' and insert.  */
178       gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
179       x = gimple_build_goto (disp_var);
180       gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
181     }
182 
183   gcc_assert (data.block == DECL_INITIAL (current_function_decl));
184   BLOCK_SUBBLOCKS (data.block)
185     = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
186 
187   clear_block_marks (data.block);
188   VEC_free(return_statements_t, heap, data.return_statements);
189   return 0;
190 }
191 
192 struct gimple_opt_pass pass_lower_cf =
193 {
194  {
195   GIMPLE_PASS,
196   "lower",				/* name */
197   NULL,					/* gate */
198   lower_function_body,			/* execute */
199   NULL,					/* sub */
200   NULL,					/* next */
201   0,					/* static_pass_number */
202   TV_NONE,				/* tv_id */
203   PROP_gimple_any,			/* properties_required */
204   PROP_gimple_lcf,			/* properties_provided */
205   0,					/* properties_destroyed */
206   0,					/* todo_flags_start */
207   0             			/* todo_flags_finish */
208  }
209 };
210 
211 
212 
213 /* Verify if the type of the argument matches that of the function
214    declaration.  If we cannot verify this or there is a mismatch,
215    return false.  */
216 
217 static bool
218 gimple_check_call_args (gimple stmt, tree fndecl)
219 {
220   tree parms, p;
221   unsigned int i, nargs;
222 
223   /* Calls to internal functions always match their signature.  */
224   if (gimple_call_internal_p (stmt))
225     return true;
226 
227   nargs = gimple_call_num_args (stmt);
228 
229   /* Get argument types for verification.  */
230   if (fndecl)
231     parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
232   else
233     parms = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
234 
235   /* Verify if the type of the argument matches that of the function
236      declaration.  If we cannot verify this or there is a mismatch,
237      return false.  */
238   if (fndecl && DECL_ARGUMENTS (fndecl))
239     {
240       for (i = 0, p = DECL_ARGUMENTS (fndecl);
241 	   i < nargs;
242 	   i++, p = DECL_CHAIN (p))
243 	{
244 	  tree arg;
245 	  /* We cannot distinguish a varargs function from the case
246 	     of excess parameters, still deferring the inlining decision
247 	     to the callee is possible.  */
248 	  if (!p)
249 	    break;
250 	  arg = gimple_call_arg (stmt, i);
251 	  if (p == error_mark_node
252 	      || DECL_ARG_TYPE (p) == error_mark_node
253 	      || arg == error_mark_node
254 	      || (!types_compatible_p (DECL_ARG_TYPE (p), TREE_TYPE (arg))
255 		  && !fold_convertible_p (DECL_ARG_TYPE (p), arg)))
256             return false;
257 	}
258     }
259   else if (parms)
260     {
261       for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
262 	{
263 	  tree arg;
264 	  /* If this is a varargs function defer inlining decision
265 	     to callee.  */
266 	  if (!p)
267 	    break;
268 	  arg = gimple_call_arg (stmt, i);
269 	  if (TREE_VALUE (p) == error_mark_node
270 	      || arg == error_mark_node
271 	      || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
272 	      || (!types_compatible_p (TREE_VALUE (p), TREE_TYPE (arg))
273 		  && !fold_convertible_p (TREE_VALUE (p), arg)))
274             return false;
275 	}
276     }
277   else
278     {
279       if (nargs != 0)
280         return false;
281     }
282   return true;
283 }
284 
285 /* Verify if the type of the argument and lhs of CALL_STMT matches
286    that of the function declaration CALLEE.
287    If we cannot verify this or there is a mismatch, return false.  */
288 
289 bool
290 gimple_check_call_matching_types (gimple call_stmt, tree callee)
291 {
292   tree lhs;
293 
294   if ((DECL_RESULT (callee)
295        && !DECL_BY_REFERENCE (DECL_RESULT (callee))
296        && (lhs = gimple_call_lhs (call_stmt)) != NULL_TREE
297        && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
298                                       TREE_TYPE (lhs))
299        && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
300       || !gimple_check_call_args (call_stmt, callee))
301     return false;
302   return true;
303 }
304 
305 /* Lower sequence SEQ.  Unlike gimplification the statements are not relowered
306    when they are changed -- if this has to be done, the lowering routine must
307    do it explicitly.  DATA is passed through the recursion.  */
308 
309 static void
310 lower_sequence (gimple_seq seq, struct lower_data *data)
311 {
312   gimple_stmt_iterator gsi;
313 
314   for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
315     lower_stmt (&gsi, data);
316 }
317 
318 
319 /* Lower the OpenMP directive statement pointed by GSI.  DATA is
320    passed through the recursion.  */
321 
322 static void
323 lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
324 {
325   gimple stmt;
326 
327   stmt = gsi_stmt (*gsi);
328 
329   lower_sequence (gimple_omp_body (stmt), data);
330   gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
331   gsi_insert_seq_before (gsi, gimple_omp_body (stmt), GSI_SAME_STMT);
332   gimple_omp_set_body (stmt, NULL);
333   gsi_remove (gsi, false);
334 }
335 
336 
337 /* Lower statement GSI.  DATA is passed through the recursion.  We try to
338    track the fallthruness of statements and get rid of unreachable return
339    statements in order to prevent the EH lowering pass from adding useless
340    edges that can cause bogus warnings to be issued later; this guess need
341    not be 100% accurate, simply be conservative and reset cannot_fallthru
342    to false if we don't know.  */
343 
344 static void
345 lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
346 {
347   gimple stmt = gsi_stmt (*gsi);
348 
349   gimple_set_block (stmt, data->block);
350 
351   switch (gimple_code (stmt))
352     {
353     case GIMPLE_BIND:
354       lower_gimple_bind (gsi, data);
355       /* Propagate fallthruness.  */
356       return;
357 
358     case GIMPLE_COND:
359     case GIMPLE_GOTO:
360     case GIMPLE_SWITCH:
361       data->cannot_fallthru = true;
362       gsi_next (gsi);
363       return;
364 
365     case GIMPLE_RETURN:
366       if (data->cannot_fallthru)
367 	{
368 	  gsi_remove (gsi, false);
369 	  /* Propagate fallthruness.  */
370 	}
371       else
372 	{
373 	  lower_gimple_return (gsi, data);
374 	  data->cannot_fallthru = true;
375 	}
376       return;
377 
378     case GIMPLE_TRY:
379       {
380 	bool try_cannot_fallthru;
381 	lower_sequence (gimple_try_eval (stmt), data);
382 	try_cannot_fallthru = data->cannot_fallthru;
383 	data->cannot_fallthru = false;
384 	lower_sequence (gimple_try_cleanup (stmt), data);
385 	/* See gimple_stmt_may_fallthru for the rationale.  */
386 	if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
387 	  {
388 	    data->cannot_fallthru |= try_cannot_fallthru;
389 	    gsi_next (gsi);
390 	    return;
391 	  }
392       }
393       break;
394 
395     case GIMPLE_CATCH:
396       data->cannot_fallthru = false;
397       lower_sequence (gimple_catch_handler (stmt), data);
398       break;
399 
400     case GIMPLE_EH_FILTER:
401       data->cannot_fallthru = false;
402       lower_sequence (gimple_eh_filter_failure (stmt), data);
403       break;
404 
405     case GIMPLE_EH_ELSE:
406       lower_sequence (gimple_eh_else_n_body (stmt), data);
407       lower_sequence (gimple_eh_else_e_body (stmt), data);
408       break;
409 
410     case GIMPLE_NOP:
411     case GIMPLE_ASM:
412     case GIMPLE_ASSIGN:
413     case GIMPLE_PREDICT:
414     case GIMPLE_LABEL:
415     case GIMPLE_EH_MUST_NOT_THROW:
416     case GIMPLE_OMP_FOR:
417     case GIMPLE_OMP_SECTIONS:
418     case GIMPLE_OMP_SECTIONS_SWITCH:
419     case GIMPLE_OMP_SECTION:
420     case GIMPLE_OMP_SINGLE:
421     case GIMPLE_OMP_MASTER:
422     case GIMPLE_OMP_ORDERED:
423     case GIMPLE_OMP_CRITICAL:
424     case GIMPLE_OMP_RETURN:
425     case GIMPLE_OMP_ATOMIC_LOAD:
426     case GIMPLE_OMP_ATOMIC_STORE:
427     case GIMPLE_OMP_CONTINUE:
428       break;
429 
430     case GIMPLE_CALL:
431       {
432 	tree decl = gimple_call_fndecl (stmt);
433 
434 	if (decl
435 	    && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
436 	    && DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
437 	  {
438 	    lower_builtin_setjmp (gsi);
439 	    data->cannot_fallthru = false;
440 	    data->calls_builtin_setjmp = true;
441 	    return;
442 	  }
443 
444 	if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
445 	  {
446 	    data->cannot_fallthru = true;
447 	    gsi_next (gsi);
448 	    return;
449 	  }
450       }
451       break;
452 
453     case GIMPLE_OMP_PARALLEL:
454     case GIMPLE_OMP_TASK:
455       data->cannot_fallthru = false;
456       lower_omp_directive (gsi, data);
457       data->cannot_fallthru = false;
458       return;
459 
460     case GIMPLE_TRANSACTION:
461       lower_sequence (gimple_transaction_body (stmt), data);
462       break;
463 
464     default:
465       gcc_unreachable ();
466     }
467 
468   data->cannot_fallthru = false;
469   gsi_next (gsi);
470 }
471 
472 /* Lower a bind_expr TSI.  DATA is passed through the recursion.  */
473 
474 static void
475 lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
476 {
477   tree old_block = data->block;
478   gimple stmt = gsi_stmt (*gsi);
479   tree new_block = gimple_bind_block (stmt);
480 
481   if (new_block)
482     {
483       if (new_block == old_block)
484 	{
485 	  /* The outermost block of the original function may not be the
486 	     outermost statement chain of the gimplified function.  So we
487 	     may see the outermost block just inside the function.  */
488 	  gcc_assert (new_block == DECL_INITIAL (current_function_decl));
489 	  new_block = NULL;
490 	}
491       else
492 	{
493 	  /* We do not expect to handle duplicate blocks.  */
494 	  gcc_assert (!TREE_ASM_WRITTEN (new_block));
495 	  TREE_ASM_WRITTEN (new_block) = 1;
496 
497 	  /* Block tree may get clobbered by inlining.  Normally this would
498 	     be fixed in rest_of_decl_compilation using block notes, but
499 	     since we are not going to emit them, it is up to us.  */
500 	  BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
501 	  BLOCK_SUBBLOCKS (old_block) = new_block;
502 	  BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
503 	  BLOCK_SUPERCONTEXT (new_block) = old_block;
504 
505 	  data->block = new_block;
506 	}
507     }
508 
509   record_vars (gimple_bind_vars (stmt));
510   lower_sequence (gimple_bind_body (stmt), data);
511 
512   if (new_block)
513     {
514       gcc_assert (data->block == new_block);
515 
516       BLOCK_SUBBLOCKS (new_block)
517 	= blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
518       data->block = old_block;
519     }
520 
521   /* The GIMPLE_BIND no longer carries any useful information -- kill it.  */
522   gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
523   gsi_remove (gsi, false);
524 }
525 
526 /* Try to determine whether a TRY_CATCH expression can fall through.
527    This is a subroutine of block_may_fallthru.  */
528 
529 static bool
530 try_catch_may_fallthru (const_tree stmt)
531 {
532   tree_stmt_iterator i;
533 
534   /* If the TRY block can fall through, the whole TRY_CATCH can
535      fall through.  */
536   if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
537     return true;
538 
539   i = tsi_start (TREE_OPERAND (stmt, 1));
540   switch (TREE_CODE (tsi_stmt (i)))
541     {
542     case CATCH_EXPR:
543       /* We expect to see a sequence of CATCH_EXPR trees, each with a
544 	 catch expression and a body.  The whole TRY_CATCH may fall
545 	 through iff any of the catch bodies falls through.  */
546       for (; !tsi_end_p (i); tsi_next (&i))
547 	{
548 	  if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
549 	    return true;
550 	}
551       return false;
552 
553     case EH_FILTER_EXPR:
554       /* The exception filter expression only matters if there is an
555 	 exception.  If the exception does not match EH_FILTER_TYPES,
556 	 we will execute EH_FILTER_FAILURE, and we will fall through
557 	 if that falls through.  If the exception does match
558 	 EH_FILTER_TYPES, the stack unwinder will continue up the
559 	 stack, so we will not fall through.  We don't know whether we
560 	 will throw an exception which matches EH_FILTER_TYPES or not,
561 	 so we just ignore EH_FILTER_TYPES and assume that we might
562 	 throw an exception which doesn't match.  */
563       return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
564 
565     default:
566       /* This case represents statements to be executed when an
567 	 exception occurs.  Those statements are implicitly followed
568 	 by a RESX statement to resume execution after the exception.
569 	 So in this case the TRY_CATCH never falls through.  */
570       return false;
571     }
572 }
573 
574 
575 /* Same as above, but for a GIMPLE_TRY_CATCH.  */
576 
577 static bool
578 gimple_try_catch_may_fallthru (gimple stmt)
579 {
580   gimple_stmt_iterator i;
581 
582   /* We don't handle GIMPLE_TRY_FINALLY.  */
583   gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
584 
585   /* If the TRY block can fall through, the whole TRY_CATCH can
586      fall through.  */
587   if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
588     return true;
589 
590   i = gsi_start (gimple_try_cleanup (stmt));
591   switch (gimple_code (gsi_stmt (i)))
592     {
593     case GIMPLE_CATCH:
594       /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
595 	 catch expression and a body.  The whole try/catch may fall
596 	 through iff any of the catch bodies falls through.  */
597       for (; !gsi_end_p (i); gsi_next (&i))
598 	{
599 	  if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i))))
600 	    return true;
601 	}
602       return false;
603 
604     case GIMPLE_EH_FILTER:
605       /* The exception filter expression only matters if there is an
606 	 exception.  If the exception does not match EH_FILTER_TYPES,
607 	 we will execute EH_FILTER_FAILURE, and we will fall through
608 	 if that falls through.  If the exception does match
609 	 EH_FILTER_TYPES, the stack unwinder will continue up the
610 	 stack, so we will not fall through.  We don't know whether we
611 	 will throw an exception which matches EH_FILTER_TYPES or not,
612 	 so we just ignore EH_FILTER_TYPES and assume that we might
613 	 throw an exception which doesn't match.  */
614       return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
615 
616     default:
617       /* This case represents statements to be executed when an
618 	 exception occurs.  Those statements are implicitly followed
619 	 by a GIMPLE_RESX to resume execution after the exception.  So
620 	 in this case the try/catch never falls through.  */
621       return false;
622     }
623 }
624 
625 
626 /* Try to determine if we can fall out of the bottom of BLOCK.  This guess
627    need not be 100% accurate; simply be conservative and return true if we
628    don't know.  This is used only to avoid stupidly generating extra code.
629    If we're wrong, we'll just delete the extra code later.  */
630 
631 bool
632 block_may_fallthru (const_tree block)
633 {
634   /* This CONST_CAST is okay because expr_last returns its argument
635      unmodified and we assign it to a const_tree.  */
636   const_tree stmt = expr_last (CONST_CAST_TREE(block));
637 
638   switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
639     {
640     case GOTO_EXPR:
641     case RETURN_EXPR:
642       /* Easy cases.  If the last statement of the block implies
643 	 control transfer, then we can't fall through.  */
644       return false;
645 
646     case SWITCH_EXPR:
647       /* If SWITCH_LABELS is set, this is lowered, and represents a
648 	 branch to a selected label and hence can not fall through.
649 	 Otherwise SWITCH_BODY is set, and the switch can fall
650 	 through.  */
651       return SWITCH_LABELS (stmt) == NULL_TREE;
652 
653     case COND_EXPR:
654       if (block_may_fallthru (COND_EXPR_THEN (stmt)))
655 	return true;
656       return block_may_fallthru (COND_EXPR_ELSE (stmt));
657 
658     case BIND_EXPR:
659       return block_may_fallthru (BIND_EXPR_BODY (stmt));
660 
661     case TRY_CATCH_EXPR:
662       return try_catch_may_fallthru (stmt);
663 
664     case TRY_FINALLY_EXPR:
665       /* The finally clause is always executed after the try clause,
666 	 so if it does not fall through, then the try-finally will not
667 	 fall through.  Otherwise, if the try clause does not fall
668 	 through, then when the finally clause falls through it will
669 	 resume execution wherever the try clause was going.  So the
670 	 whole try-finally will only fall through if both the try
671 	 clause and the finally clause fall through.  */
672       return (block_may_fallthru (TREE_OPERAND (stmt, 0))
673 	      && block_may_fallthru (TREE_OPERAND (stmt, 1)));
674 
675     case MODIFY_EXPR:
676       if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
677 	stmt = TREE_OPERAND (stmt, 1);
678       else
679 	return true;
680       /* FALLTHRU */
681 
682     case CALL_EXPR:
683       /* Functions that do not return do not fall through.  */
684       return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
685 
686     case CLEANUP_POINT_EXPR:
687       return block_may_fallthru (TREE_OPERAND (stmt, 0));
688 
689     case TARGET_EXPR:
690       return block_may_fallthru (TREE_OPERAND (stmt, 1));
691 
692     case ERROR_MARK:
693       return true;
694 
695     default:
696       return lang_hooks.block_may_fallthru (stmt);
697     }
698 }
699 
700 
701 /* Try to determine if we can continue executing the statement
702    immediately following STMT.  This guess need not be 100% accurate;
703    simply be conservative and return true if we don't know.  This is
704    used only to avoid stupidly generating extra code. If we're wrong,
705    we'll just delete the extra code later.  */
706 
707 bool
708 gimple_stmt_may_fallthru (gimple stmt)
709 {
710   if (!stmt)
711     return true;
712 
713   switch (gimple_code (stmt))
714     {
715     case GIMPLE_GOTO:
716     case GIMPLE_RETURN:
717     case GIMPLE_RESX:
718       /* Easy cases.  If the last statement of the seq implies
719 	 control transfer, then we can't fall through.  */
720       return false;
721 
722     case GIMPLE_SWITCH:
723       /* Switch has already been lowered and represents a branch
724 	 to a selected label and hence can't fall through.  */
725       return false;
726 
727     case GIMPLE_COND:
728       /* GIMPLE_COND's are already lowered into a two-way branch.  They
729 	 can't fall through.  */
730       return false;
731 
732     case GIMPLE_BIND:
733       return gimple_seq_may_fallthru (gimple_bind_body (stmt));
734 
735     case GIMPLE_TRY:
736       if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
737         return gimple_try_catch_may_fallthru (stmt);
738 
739       /* It must be a GIMPLE_TRY_FINALLY.  */
740 
741       /* The finally clause is always executed after the try clause,
742 	 so if it does not fall through, then the try-finally will not
743 	 fall through.  Otherwise, if the try clause does not fall
744 	 through, then when the finally clause falls through it will
745 	 resume execution wherever the try clause was going.  So the
746 	 whole try-finally will only fall through if both the try
747 	 clause and the finally clause fall through.  */
748       return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
749 	      && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
750 
751     case GIMPLE_EH_ELSE:
752       return (gimple_seq_may_fallthru (gimple_eh_else_n_body (stmt))
753 	      || gimple_seq_may_fallthru (gimple_eh_else_e_body (stmt)));
754 
755     case GIMPLE_CALL:
756       /* Functions that do not return do not fall through.  */
757       return (gimple_call_flags (stmt) & ECF_NORETURN) == 0;
758 
759     default:
760       return true;
761     }
762 }
763 
764 
765 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ.  */
766 
767 bool
768 gimple_seq_may_fallthru (gimple_seq seq)
769 {
770   return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
771 }
772 
773 
774 /* Lower a GIMPLE_RETURN GSI.  DATA is passed through the recursion.  */
775 
776 static void
777 lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
778 {
779   gimple stmt = gsi_stmt (*gsi);
780   gimple t;
781   int i;
782   return_statements_t tmp_rs;
783 
784   /* Match this up with an existing return statement that's been created.  */
785   for (i = VEC_length (return_statements_t, data->return_statements) - 1;
786        i >= 0; i--)
787     {
788       tmp_rs = *VEC_index (return_statements_t, data->return_statements, i);
789 
790       if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
791 	{
792 	  /* Remove the line number from the representative return statement.
793 	     It now fills in for many such returns.  Failure to remove this
794 	     will result in incorrect results for coverage analysis.  */
795 	  gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
796 
797 	  goto found;
798 	}
799     }
800 
801   /* Not found.  Create a new label and record the return statement.  */
802   tmp_rs.label = create_artificial_label (cfun->function_end_locus);
803   tmp_rs.stmt = stmt;
804   VEC_safe_push (return_statements_t, heap, data->return_statements, &tmp_rs);
805 
806   /* Generate a goto statement and remove the return statement.  */
807  found:
808   /* When not optimizing, make sure user returns are preserved.  */
809   if (!optimize && gimple_has_location (stmt))
810     DECL_ARTIFICIAL (tmp_rs.label) = 0;
811   t = gimple_build_goto (tmp_rs.label);
812   gimple_set_location (t, gimple_location (stmt));
813   gimple_set_block (t, gimple_block (stmt));
814   gsi_insert_before (gsi, t, GSI_SAME_STMT);
815   gsi_remove (gsi, false);
816 }
817 
818 /* Lower a __builtin_setjmp GSI.
819 
820    __builtin_setjmp is passed a pointer to an array of five words (not
821    all will be used on all machines).  It operates similarly to the C
822    library function of the same name, but is more efficient.
823 
824    It is lowered into 3 other builtins, namely __builtin_setjmp_setup,
825    __builtin_setjmp_dispatcher and __builtin_setjmp_receiver, but with
826    __builtin_setjmp_dispatcher shared among all the instances; that's
827    why it is only emitted at the end by lower_function_body.
828 
829    After full lowering, the body of the function should look like:
830 
831     {
832       void * setjmpvar.0;
833       int D.1844;
834       int D.2844;
835 
836       [...]
837 
838       __builtin_setjmp_setup (&buf, &<D1847>);
839       D.1844 = 0;
840       goto <D1846>;
841       <D1847>:;
842       __builtin_setjmp_receiver (&<D1847>);
843       D.1844 = 1;
844       <D1846>:;
845       if (D.1844 == 0) goto <D1848>; else goto <D1849>;
846 
847       [...]
848 
849       __builtin_setjmp_setup (&buf, &<D2847>);
850       D.2844 = 0;
851       goto <D2846>;
852       <D2847>:;
853       __builtin_setjmp_receiver (&<D2847>);
854       D.2844 = 1;
855       <D2846>:;
856       if (D.2844 == 0) goto <D2848>; else goto <D2849>;
857 
858       [...]
859 
860       <D3850>:;
861       return;
862       <D3853>: [non-local];
863       setjmpvar.0 = __builtin_setjmp_dispatcher (&<D3853>);
864       goto setjmpvar.0;
865     }
866 
867    The dispatcher block will be both the unique destination of all the
868    abnormal call edges and the unique source of all the abnormal edges
869    to the receivers, thus keeping the complexity explosion localized.  */
870 
871 static void
872 lower_builtin_setjmp (gimple_stmt_iterator *gsi)
873 {
874   gimple stmt = gsi_stmt (*gsi);
875   location_t loc = gimple_location (stmt);
876   tree cont_label = create_artificial_label (loc);
877   tree next_label = create_artificial_label (loc);
878   tree dest, t, arg;
879   gimple g;
880 
881   /* NEXT_LABEL is the label __builtin_longjmp will jump to.  Its address is
882      passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver.  */
883   FORCED_LABEL (next_label) = 1;
884 
885   dest = gimple_call_lhs (stmt);
886 
887   /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert.  */
888   arg = build_addr (next_label, current_function_decl);
889   t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
890   g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
891   gimple_set_location (g, loc);
892   gimple_set_block (g, gimple_block (stmt));
893   gsi_insert_before (gsi, g, GSI_SAME_STMT);
894 
895   /* Build 'DEST = 0' and insert.  */
896   if (dest)
897     {
898       g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
899       gimple_set_location (g, loc);
900       gimple_set_block (g, gimple_block (stmt));
901       gsi_insert_before (gsi, g, GSI_SAME_STMT);
902     }
903 
904   /* Build 'goto CONT_LABEL' and insert.  */
905   g = gimple_build_goto (cont_label);
906   gsi_insert_before (gsi, g, GSI_SAME_STMT);
907 
908   /* Build 'NEXT_LABEL:' and insert.  */
909   g = gimple_build_label (next_label);
910   gsi_insert_before (gsi, g, GSI_SAME_STMT);
911 
912   /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert.  */
913   arg = build_addr (next_label, current_function_decl);
914   t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
915   g = gimple_build_call (t, 1, arg);
916   gimple_set_location (g, loc);
917   gimple_set_block (g, gimple_block (stmt));
918   gsi_insert_before (gsi, g, GSI_SAME_STMT);
919 
920   /* Build 'DEST = 1' and insert.  */
921   if (dest)
922     {
923       g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
924 						       integer_one_node));
925       gimple_set_location (g, loc);
926       gimple_set_block (g, gimple_block (stmt));
927       gsi_insert_before (gsi, g, GSI_SAME_STMT);
928     }
929 
930   /* Build 'CONT_LABEL:' and insert.  */
931   g = gimple_build_label (cont_label);
932   gsi_insert_before (gsi, g, GSI_SAME_STMT);
933 
934   /* Remove the call to __builtin_setjmp.  */
935   gsi_remove (gsi, false);
936 }
937 
938 
939 /* Record the variables in VARS into function FN.  */
940 
941 void
942 record_vars_into (tree vars, tree fn)
943 {
944   if (fn != current_function_decl)
945     push_cfun (DECL_STRUCT_FUNCTION (fn));
946 
947   for (; vars; vars = DECL_CHAIN (vars))
948     {
949       tree var = vars;
950 
951       /* BIND_EXPRs contains also function/type/constant declarations
952          we don't need to care about.  */
953       if (TREE_CODE (var) != VAR_DECL)
954 	continue;
955 
956       /* Nothing to do in this case.  */
957       if (DECL_EXTERNAL (var))
958 	continue;
959 
960       /* Record the variable.  */
961       add_local_decl (cfun, var);
962       if (gimple_referenced_vars (cfun))
963 	add_referenced_var (var);
964     }
965 
966   if (fn != current_function_decl)
967     pop_cfun ();
968 }
969 
970 
971 /* Record the variables in VARS into current_function_decl.  */
972 
973 void
974 record_vars (tree vars)
975 {
976   record_vars_into (vars, current_function_decl);
977 }
978