1 /* GIMPLE lowering pass.  Converts High GIMPLE into Low GIMPLE.
2 
3    Copyright (C) 2003-2016 Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "fold-const.h"
29 #include "tree-nested.h"
30 #include "calls.h"
31 #include "gimple-iterator.h"
32 #include "gimple-low.h"
33 
34 /* The differences between High GIMPLE and Low GIMPLE are the
35    following:
36 
37    1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
38 
39    2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
40       flow and exception regions are built as an on-the-side region
41       hierarchy (See tree-eh.c:lower_eh_constructs).
42 
43    3- Multiple identical return statements are grouped into a single
44       return and gotos to the unique return site.  */
45 
46 /* Match a return statement with a label.  During lowering, we identify
47    identical return statements and replace duplicates with a jump to
48    the corresponding label.  */
49 struct return_statements_t
50 {
51   tree label;
52   greturn *stmt;
53 };
54 typedef struct return_statements_t return_statements_t;
55 
56 
57 struct lower_data
58 {
59   /* Block the current statement belongs to.  */
60   tree block;
61 
62   /* A vector of label and return statements to be moved to the end
63      of the function.  */
64   vec<return_statements_t> return_statements;
65 
66   /* True if the current statement cannot fall through.  */
67   bool cannot_fallthru;
68 };
69 
70 static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
71 static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
72 static void lower_try_catch (gimple_stmt_iterator *, struct lower_data *);
73 static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
74 static void lower_builtin_setjmp (gimple_stmt_iterator *);
75 static void lower_builtin_posix_memalign (gimple_stmt_iterator *);
76 
77 
78 /* Lower the body of current_function_decl from High GIMPLE into Low
79    GIMPLE.  */
80 
81 static unsigned int
lower_function_body(void)82 lower_function_body (void)
83 {
84   struct lower_data data;
85   gimple_seq body = gimple_body (current_function_decl);
86   gimple_seq lowered_body;
87   gimple_stmt_iterator i;
88   gimple *bind;
89   gimple *x;
90 
91   /* The gimplifier should've left a body of exactly one statement,
92      namely a GIMPLE_BIND.  */
93   gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
94 	      && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
95 
96   memset (&data, 0, sizeof (data));
97   data.block = DECL_INITIAL (current_function_decl);
98   BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
99   BLOCK_CHAIN (data.block) = NULL_TREE;
100   TREE_ASM_WRITTEN (data.block) = 1;
101   data.return_statements.create (8);
102 
103   bind = gimple_seq_first_stmt (body);
104   lowered_body = NULL;
105   gimple_seq_add_stmt (&lowered_body, bind);
106   i = gsi_start (lowered_body);
107   lower_gimple_bind (&i, &data);
108 
109   i = gsi_last (lowered_body);
110 
111   /* If the function falls off the end, we need a null return statement.
112      If we've already got one in the return_statements vector, we don't
113      need to do anything special.  Otherwise build one by hand.  */
114   bool may_fallthru = gimple_seq_may_fallthru (lowered_body);
115   if (may_fallthru
116       && (data.return_statements.is_empty ()
117 	  || (gimple_return_retval (data.return_statements.last().stmt)
118 	      != NULL)))
119     {
120       x = gimple_build_return (NULL);
121       gimple_set_location (x, cfun->function_end_locus);
122       gimple_set_block (x, DECL_INITIAL (current_function_decl));
123       gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
124       may_fallthru = false;
125     }
126 
127   /* If we lowered any return statements, emit the representative
128      at the end of the function.  */
129   while (!data.return_statements.is_empty ())
130     {
131       return_statements_t t = data.return_statements.pop ();
132       x = gimple_build_label (t.label);
133       gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
134       gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
135       if (may_fallthru)
136 	{
137 	  /* Remove the line number from the representative return statement.
138 	     It now fills in for the fallthru too.  Failure to remove this
139 	     will result in incorrect results for coverage analysis.  */
140 	  gimple_set_location (t.stmt, UNKNOWN_LOCATION);
141 	  may_fallthru = false;
142 	}
143     }
144 
145   /* Once the old body has been lowered, replace it with the new
146      lowered sequence.  */
147   gimple_set_body (current_function_decl, lowered_body);
148 
149   gcc_assert (data.block == DECL_INITIAL (current_function_decl));
150   BLOCK_SUBBLOCKS (data.block)
151     = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
152 
153   clear_block_marks (data.block);
154   data.return_statements.release ();
155   return 0;
156 }
157 
158 namespace {
159 
160 const pass_data pass_data_lower_cf =
161 {
162   GIMPLE_PASS, /* type */
163   "lower", /* name */
164   OPTGROUP_NONE, /* optinfo_flags */
165   TV_NONE, /* tv_id */
166   PROP_gimple_any, /* properties_required */
167   PROP_gimple_lcf, /* properties_provided */
168   0, /* properties_destroyed */
169   0, /* todo_flags_start */
170   0, /* todo_flags_finish */
171 };
172 
173 class pass_lower_cf : public gimple_opt_pass
174 {
175 public:
pass_lower_cf(gcc::context * ctxt)176   pass_lower_cf (gcc::context *ctxt)
177     : gimple_opt_pass (pass_data_lower_cf, ctxt)
178   {}
179 
180   /* opt_pass methods: */
execute(function *)181   virtual unsigned int execute (function *) { return lower_function_body (); }
182 
183 }; // class pass_lower_cf
184 
185 } // anon namespace
186 
187 gimple_opt_pass *
make_pass_lower_cf(gcc::context * ctxt)188 make_pass_lower_cf (gcc::context *ctxt)
189 {
190   return new pass_lower_cf (ctxt);
191 }
192 
193 /* Lower sequence SEQ.  Unlike gimplification the statements are not relowered
194    when they are changed -- if this has to be done, the lowering routine must
195    do it explicitly.  DATA is passed through the recursion.  */
196 
197 static void
lower_sequence(gimple_seq * seq,struct lower_data * data)198 lower_sequence (gimple_seq *seq, struct lower_data *data)
199 {
200   gimple_stmt_iterator gsi;
201 
202   for (gsi = gsi_start (*seq); !gsi_end_p (gsi); )
203     lower_stmt (&gsi, data);
204 }
205 
206 
207 /* Lower the OpenMP directive statement pointed by GSI.  DATA is
208    passed through the recursion.  */
209 
210 static void
lower_omp_directive(gimple_stmt_iterator * gsi,struct lower_data * data)211 lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
212 {
213   gimple *stmt;
214 
215   stmt = gsi_stmt (*gsi);
216 
217   lower_sequence (gimple_omp_body_ptr (stmt), data);
218   gsi_insert_seq_after (gsi, gimple_omp_body (stmt), GSI_CONTINUE_LINKING);
219   gimple_omp_set_body (stmt, NULL);
220   gsi_next (gsi);
221 }
222 
223 
224 /* Lower statement GSI.  DATA is passed through the recursion.  We try to
225    track the fallthruness of statements and get rid of unreachable return
226    statements in order to prevent the EH lowering pass from adding useless
227    edges that can cause bogus warnings to be issued later; this guess need
228    not be 100% accurate, simply be conservative and reset cannot_fallthru
229    to false if we don't know.  */
230 
231 static void
lower_stmt(gimple_stmt_iterator * gsi,struct lower_data * data)232 lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
233 {
234   gimple *stmt = gsi_stmt (*gsi);
235 
236   gimple_set_block (stmt, data->block);
237 
238   switch (gimple_code (stmt))
239     {
240     case GIMPLE_BIND:
241       lower_gimple_bind (gsi, data);
242       /* Propagate fallthruness.  */
243       return;
244 
245     case GIMPLE_COND:
246     case GIMPLE_GOTO:
247     case GIMPLE_SWITCH:
248       data->cannot_fallthru = true;
249       gsi_next (gsi);
250       return;
251 
252     case GIMPLE_RETURN:
253       if (data->cannot_fallthru)
254 	{
255 	  gsi_remove (gsi, false);
256 	  /* Propagate fallthruness.  */
257 	}
258       else
259 	{
260 	  lower_gimple_return (gsi, data);
261 	  data->cannot_fallthru = true;
262 	}
263       return;
264 
265     case GIMPLE_TRY:
266       if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
267 	lower_try_catch (gsi, data);
268       else
269 	{
270 	  /* It must be a GIMPLE_TRY_FINALLY.  */
271 	  bool cannot_fallthru;
272 	  lower_sequence (gimple_try_eval_ptr (stmt), data);
273 	  cannot_fallthru = data->cannot_fallthru;
274 
275 	  /* The finally clause is always executed after the try clause,
276 	     so if it does not fall through, then the try-finally will not
277 	     fall through.  Otherwise, if the try clause does not fall
278 	     through, then when the finally clause falls through it will
279 	     resume execution wherever the try clause was going.  So the
280 	     whole try-finally will only fall through if both the try
281 	     clause and the finally clause fall through.  */
282 	  data->cannot_fallthru = false;
283 	  lower_sequence (gimple_try_cleanup_ptr (stmt), data);
284 	  data->cannot_fallthru |= cannot_fallthru;
285 	  gsi_next (gsi);
286 	}
287       return;
288 
289     case GIMPLE_EH_ELSE:
290       {
291 	geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
292 	lower_sequence (gimple_eh_else_n_body_ptr (eh_else_stmt), data);
293 	lower_sequence (gimple_eh_else_e_body_ptr (eh_else_stmt), data);
294       }
295       break;
296 
297     case GIMPLE_NOP:
298     case GIMPLE_ASM:
299     case GIMPLE_ASSIGN:
300     case GIMPLE_PREDICT:
301     case GIMPLE_LABEL:
302     case GIMPLE_EH_MUST_NOT_THROW:
303     case GIMPLE_OMP_FOR:
304     case GIMPLE_OMP_SECTIONS:
305     case GIMPLE_OMP_SECTIONS_SWITCH:
306     case GIMPLE_OMP_SECTION:
307     case GIMPLE_OMP_SINGLE:
308     case GIMPLE_OMP_MASTER:
309     case GIMPLE_OMP_TASKGROUP:
310     case GIMPLE_OMP_ORDERED:
311     case GIMPLE_OMP_CRITICAL:
312     case GIMPLE_OMP_RETURN:
313     case GIMPLE_OMP_ATOMIC_LOAD:
314     case GIMPLE_OMP_ATOMIC_STORE:
315     case GIMPLE_OMP_CONTINUE:
316       break;
317 
318     case GIMPLE_CALL:
319       {
320 	tree decl = gimple_call_fndecl (stmt);
321 	unsigned i;
322 
323 	for (i = 0; i < gimple_call_num_args (stmt); i++)
324 	  {
325 	    tree arg = gimple_call_arg (stmt, i);
326 	    if (EXPR_P (arg))
327 	      TREE_SET_BLOCK (arg, data->block);
328 	  }
329 
330 	if (decl
331 	    && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
332 	  {
333 	    if (DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
334 	      {
335 		lower_builtin_setjmp (gsi);
336 		data->cannot_fallthru = false;
337 		return;
338 	      }
339 	    else if (DECL_FUNCTION_CODE (decl) == BUILT_IN_POSIX_MEMALIGN
340 		     && flag_tree_bit_ccp
341 		     && gimple_builtin_call_types_compatible_p (stmt, decl))
342 	      {
343 		lower_builtin_posix_memalign (gsi);
344 		return;
345 	      }
346 	  }
347 
348 	if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
349 	  {
350 	    data->cannot_fallthru = true;
351 	    gsi_next (gsi);
352 	    return;
353 	  }
354       }
355       break;
356 
357     case GIMPLE_OMP_PARALLEL:
358     case GIMPLE_OMP_TASK:
359     case GIMPLE_OMP_TARGET:
360     case GIMPLE_OMP_TEAMS:
361     case GIMPLE_OMP_GRID_BODY:
362       data->cannot_fallthru = false;
363       lower_omp_directive (gsi, data);
364       data->cannot_fallthru = false;
365       return;
366 
367     case GIMPLE_TRANSACTION:
368       lower_sequence (gimple_transaction_body_ptr (
369 			as_a <gtransaction *> (stmt)),
370 		      data);
371       break;
372 
373     default:
374       gcc_unreachable ();
375     }
376 
377   data->cannot_fallthru = false;
378   gsi_next (gsi);
379 }
380 
381 /* Lower a bind_expr TSI.  DATA is passed through the recursion.  */
382 
383 static void
lower_gimple_bind(gimple_stmt_iterator * gsi,struct lower_data * data)384 lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
385 {
386   tree old_block = data->block;
387   gbind *stmt = as_a <gbind *> (gsi_stmt (*gsi));
388   tree new_block = gimple_bind_block (stmt);
389 
390   if (new_block)
391     {
392       if (new_block == old_block)
393 	{
394 	  /* The outermost block of the original function may not be the
395 	     outermost statement chain of the gimplified function.  So we
396 	     may see the outermost block just inside the function.  */
397 	  gcc_assert (new_block == DECL_INITIAL (current_function_decl));
398 	  new_block = NULL;
399 	}
400       else
401 	{
402 	  /* We do not expect to handle duplicate blocks.  */
403 	  gcc_assert (!TREE_ASM_WRITTEN (new_block));
404 	  TREE_ASM_WRITTEN (new_block) = 1;
405 
406 	  /* Block tree may get clobbered by inlining.  Normally this would
407 	     be fixed in rest_of_decl_compilation using block notes, but
408 	     since we are not going to emit them, it is up to us.  */
409 	  BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
410 	  BLOCK_SUBBLOCKS (old_block) = new_block;
411 	  BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
412 	  BLOCK_SUPERCONTEXT (new_block) = old_block;
413 
414 	  data->block = new_block;
415 	}
416     }
417 
418   record_vars (gimple_bind_vars (stmt));
419   lower_sequence (gimple_bind_body_ptr (stmt), data);
420 
421   if (new_block)
422     {
423       gcc_assert (data->block == new_block);
424 
425       BLOCK_SUBBLOCKS (new_block)
426 	= blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
427       data->block = old_block;
428     }
429 
430   /* The GIMPLE_BIND no longer carries any useful information -- kill it.  */
431   gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
432   gsi_remove (gsi, false);
433 }
434 
435 /* Same as above, but for a GIMPLE_TRY_CATCH.  */
436 
437 static void
lower_try_catch(gimple_stmt_iterator * gsi,struct lower_data * data)438 lower_try_catch (gimple_stmt_iterator *gsi, struct lower_data *data)
439 {
440   bool cannot_fallthru;
441   gimple *stmt = gsi_stmt (*gsi);
442   gimple_stmt_iterator i;
443 
444   /* We don't handle GIMPLE_TRY_FINALLY.  */
445   gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
446 
447   lower_sequence (gimple_try_eval_ptr (stmt), data);
448   cannot_fallthru = data->cannot_fallthru;
449 
450   i = gsi_start (*gimple_try_cleanup_ptr (stmt));
451   switch (gimple_code (gsi_stmt (i)))
452     {
453     case GIMPLE_CATCH:
454       /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
455 	 catch expression and a body.  The whole try/catch may fall
456 	 through iff any of the catch bodies falls through.  */
457       for (; !gsi_end_p (i); gsi_next (&i))
458 	{
459 	  data->cannot_fallthru = false;
460 	  lower_sequence (gimple_catch_handler_ptr (
461                             as_a <gcatch *> (gsi_stmt (i))),
462 			  data);
463 	  if (!data->cannot_fallthru)
464 	    cannot_fallthru = false;
465 	}
466       break;
467 
468     case GIMPLE_EH_FILTER:
469       /* The exception filter expression only matters if there is an
470 	 exception.  If the exception does not match EH_FILTER_TYPES,
471 	 we will execute EH_FILTER_FAILURE, and we will fall through
472 	 if that falls through.  If the exception does match
473 	 EH_FILTER_TYPES, the stack unwinder will continue up the
474 	 stack, so we will not fall through.  We don't know whether we
475 	 will throw an exception which matches EH_FILTER_TYPES or not,
476 	 so we just ignore EH_FILTER_TYPES and assume that we might
477 	 throw an exception which doesn't match.  */
478       data->cannot_fallthru = false;
479       lower_sequence (gimple_eh_filter_failure_ptr (gsi_stmt (i)), data);
480       if (!data->cannot_fallthru)
481 	cannot_fallthru = false;
482       break;
483 
484     default:
485       /* This case represents statements to be executed when an
486 	 exception occurs.  Those statements are implicitly followed
487 	 by a GIMPLE_RESX to resume execution after the exception.  So
488 	 in this case the try/catch never falls through.  */
489       data->cannot_fallthru = false;
490       lower_sequence (gimple_try_cleanup_ptr (stmt), data);
491       break;
492     }
493 
494   data->cannot_fallthru = cannot_fallthru;
495   gsi_next (gsi);
496 }
497 
498 
499 /* Try to determine whether a TRY_CATCH expression can fall through.
500    This is a subroutine of gimple_stmt_may_fallthru.  */
501 
502 static bool
gimple_try_catch_may_fallthru(gtry * stmt)503 gimple_try_catch_may_fallthru (gtry *stmt)
504 {
505   gimple_stmt_iterator i;
506 
507   /* We don't handle GIMPLE_TRY_FINALLY.  */
508   gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
509 
510   /* If the TRY block can fall through, the whole TRY_CATCH can
511      fall through.  */
512   if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
513     return true;
514 
515   i = gsi_start (*gimple_try_cleanup_ptr (stmt));
516   switch (gimple_code (gsi_stmt (i)))
517     {
518     case GIMPLE_CATCH:
519       /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
520 	 catch expression and a body.  The whole try/catch may fall
521 	 through iff any of the catch bodies falls through.  */
522       for (; !gsi_end_p (i); gsi_next (&i))
523 	{
524 	  if (gimple_seq_may_fallthru (gimple_catch_handler (
525 					 as_a <gcatch *> (gsi_stmt (i)))))
526 	    return true;
527 	}
528       return false;
529 
530     case GIMPLE_EH_FILTER:
531       /* The exception filter expression only matters if there is an
532 	 exception.  If the exception does not match EH_FILTER_TYPES,
533 	 we will execute EH_FILTER_FAILURE, and we will fall through
534 	 if that falls through.  If the exception does match
535 	 EH_FILTER_TYPES, the stack unwinder will continue up the
536 	 stack, so we will not fall through.  We don't know whether we
537 	 will throw an exception which matches EH_FILTER_TYPES or not,
538 	 so we just ignore EH_FILTER_TYPES and assume that we might
539 	 throw an exception which doesn't match.  */
540       return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
541 
542     default:
543       /* This case represents statements to be executed when an
544 	 exception occurs.  Those statements are implicitly followed
545 	 by a GIMPLE_RESX to resume execution after the exception.  So
546 	 in this case the try/catch never falls through.  */
547       return false;
548     }
549 }
550 
551 
552 /* Try to determine if we can continue executing the statement
553    immediately following STMT.  This guess need not be 100% accurate;
554    simply be conservative and return true if we don't know.  This is
555    used only to avoid stupidly generating extra code. If we're wrong,
556    we'll just delete the extra code later.  */
557 
558 bool
gimple_stmt_may_fallthru(gimple * stmt)559 gimple_stmt_may_fallthru (gimple *stmt)
560 {
561   if (!stmt)
562     return true;
563 
564   switch (gimple_code (stmt))
565     {
566     case GIMPLE_GOTO:
567     case GIMPLE_RETURN:
568     case GIMPLE_RESX:
569       /* Easy cases.  If the last statement of the seq implies
570 	 control transfer, then we can't fall through.  */
571       return false;
572 
573     case GIMPLE_SWITCH:
574       /* Switch has already been lowered and represents a branch
575 	 to a selected label and hence can't fall through.  */
576       return false;
577 
578     case GIMPLE_COND:
579       /* GIMPLE_COND's are already lowered into a two-way branch.  They
580 	 can't fall through.  */
581       return false;
582 
583     case GIMPLE_BIND:
584       return gimple_seq_may_fallthru (
585 	       gimple_bind_body (as_a <gbind *> (stmt)));
586 
587     case GIMPLE_TRY:
588       if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
589         return gimple_try_catch_may_fallthru (as_a <gtry *> (stmt));
590 
591       /* It must be a GIMPLE_TRY_FINALLY.  */
592 
593       /* The finally clause is always executed after the try clause,
594 	 so if it does not fall through, then the try-finally will not
595 	 fall through.  Otherwise, if the try clause does not fall
596 	 through, then when the finally clause falls through it will
597 	 resume execution wherever the try clause was going.  So the
598 	 whole try-finally will only fall through if both the try
599 	 clause and the finally clause fall through.  */
600       return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
601 	      && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
602 
603     case GIMPLE_EH_ELSE:
604       {
605 	geh_else *eh_else_stmt = as_a <geh_else *> (stmt);
606 	return (gimple_seq_may_fallthru (gimple_eh_else_n_body (eh_else_stmt))
607 		|| gimple_seq_may_fallthru (gimple_eh_else_e_body (
608 					      eh_else_stmt)));
609       }
610 
611     case GIMPLE_CALL:
612       /* Functions that do not return do not fall through.  */
613       return (gimple_call_flags (stmt) & ECF_NORETURN) == 0;
614 
615     default:
616       return true;
617     }
618 }
619 
620 
621 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ.  */
622 
623 bool
gimple_seq_may_fallthru(gimple_seq seq)624 gimple_seq_may_fallthru (gimple_seq seq)
625 {
626   return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
627 }
628 
629 
630 /* Lower a GIMPLE_RETURN GSI.  DATA is passed through the recursion.  */
631 
632 static void
lower_gimple_return(gimple_stmt_iterator * gsi,struct lower_data * data)633 lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
634 {
635   greturn *stmt = as_a <greturn *> (gsi_stmt (*gsi));
636   gimple *t;
637   int i;
638   return_statements_t tmp_rs;
639 
640   /* Match this up with an existing return statement that's been created.  */
641   for (i = data->return_statements.length () - 1;
642        i >= 0; i--)
643     {
644       tmp_rs = data->return_statements[i];
645 
646       if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
647 	{
648 	  /* Remove the line number from the representative return statement.
649 	     It now fills in for many such returns.  Failure to remove this
650 	     will result in incorrect results for coverage analysis.  */
651 	  gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
652 
653 	  goto found;
654 	}
655     }
656 
657   /* Not found.  Create a new label and record the return statement.  */
658   tmp_rs.label = create_artificial_label (cfun->function_end_locus);
659   tmp_rs.stmt = stmt;
660   data->return_statements.safe_push (tmp_rs);
661 
662   /* Generate a goto statement and remove the return statement.  */
663  found:
664   /* When not optimizing, make sure user returns are preserved.  */
665   if (!optimize && gimple_has_location (stmt))
666     DECL_ARTIFICIAL (tmp_rs.label) = 0;
667   t = gimple_build_goto (tmp_rs.label);
668   gimple_set_location (t, gimple_location (stmt));
669   gimple_set_block (t, gimple_block (stmt));
670   gsi_insert_before (gsi, t, GSI_SAME_STMT);
671   gsi_remove (gsi, false);
672 }
673 
674 /* Lower a __builtin_setjmp GSI.
675 
676    __builtin_setjmp is passed a pointer to an array of five words (not
677    all will be used on all machines).  It operates similarly to the C
678    library function of the same name, but is more efficient.
679 
680    It is lowered into 2 other builtins, namely __builtin_setjmp_setup,
681    __builtin_setjmp_receiver.
682 
683    After full lowering, the body of the function should look like:
684 
685     {
686       int D.1844;
687       int D.2844;
688 
689       [...]
690 
691       __builtin_setjmp_setup (&buf, &<D1847>);
692       D.1844 = 0;
693       goto <D1846>;
694       <D1847>:;
695       __builtin_setjmp_receiver (&<D1847>);
696       D.1844 = 1;
697       <D1846>:;
698       if (D.1844 == 0) goto <D1848>; else goto <D1849>;
699 
700       [...]
701 
702       __builtin_setjmp_setup (&buf, &<D2847>);
703       D.2844 = 0;
704       goto <D2846>;
705       <D2847>:;
706       __builtin_setjmp_receiver (&<D2847>);
707       D.2844 = 1;
708       <D2846>:;
709       if (D.2844 == 0) goto <D2848>; else goto <D2849>;
710 
711       [...]
712 
713       <D3850>:;
714       return;
715     }
716 
717    During cfg creation an extra per-function (or per-OpenMP region)
718    block with ABNORMAL_DISPATCHER internal call will be added, unique
719    destination of all the abnormal call edges and the unique source of
720    all the abnormal edges to the receivers, thus keeping the complexity
721    explosion localized.  */
722 
723 static void
lower_builtin_setjmp(gimple_stmt_iterator * gsi)724 lower_builtin_setjmp (gimple_stmt_iterator *gsi)
725 {
726   gimple *stmt = gsi_stmt (*gsi);
727   location_t loc = gimple_location (stmt);
728   tree cont_label = create_artificial_label (loc);
729   tree next_label = create_artificial_label (loc);
730   tree dest, t, arg;
731   gimple *g;
732 
733   /* __builtin_setjmp_{setup,receiver} aren't ECF_RETURNS_TWICE and for RTL
734      these builtins are modelled as non-local label jumps to the label
735      that is passed to these two builtins, so pretend we have a non-local
736      label during GIMPLE passes too.  See PR60003.  */
737   cfun->has_nonlocal_label = 1;
738 
739   /* NEXT_LABEL is the label __builtin_longjmp will jump to.  Its address is
740      passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver.  */
741   FORCED_LABEL (next_label) = 1;
742 
743   dest = gimple_call_lhs (stmt);
744 
745   /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert.  */
746   arg = build_addr (next_label);
747   t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
748   g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
749   gimple_set_location (g, loc);
750   gimple_set_block (g, gimple_block (stmt));
751   gsi_insert_before (gsi, g, GSI_SAME_STMT);
752 
753   /* Build 'DEST = 0' and insert.  */
754   if (dest)
755     {
756       g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
757       gimple_set_location (g, loc);
758       gimple_set_block (g, gimple_block (stmt));
759       gsi_insert_before (gsi, g, GSI_SAME_STMT);
760     }
761 
762   /* Build 'goto CONT_LABEL' and insert.  */
763   g = gimple_build_goto (cont_label);
764   gsi_insert_before (gsi, g, GSI_SAME_STMT);
765 
766   /* Build 'NEXT_LABEL:' and insert.  */
767   g = gimple_build_label (next_label);
768   gsi_insert_before (gsi, g, GSI_SAME_STMT);
769 
770   /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert.  */
771   arg = build_addr (next_label);
772   t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
773   g = gimple_build_call (t, 1, arg);
774   gimple_set_location (g, loc);
775   gimple_set_block (g, gimple_block (stmt));
776   gsi_insert_before (gsi, g, GSI_SAME_STMT);
777 
778   /* Build 'DEST = 1' and insert.  */
779   if (dest)
780     {
781       g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
782 						       integer_one_node));
783       gimple_set_location (g, loc);
784       gimple_set_block (g, gimple_block (stmt));
785       gsi_insert_before (gsi, g, GSI_SAME_STMT);
786     }
787 
788   /* Build 'CONT_LABEL:' and insert.  */
789   g = gimple_build_label (cont_label);
790   gsi_insert_before (gsi, g, GSI_SAME_STMT);
791 
792   /* Remove the call to __builtin_setjmp.  */
793   gsi_remove (gsi, false);
794 }
795 
796 /* Lower calls to posix_memalign to
797      res = posix_memalign (ptr, align, size);
798      if (res == 0)
799        *ptr = __builtin_assume_aligned (*ptr, align);
800    or to
801      void *tem;
802      res = posix_memalign (&tem, align, size);
803      if (res == 0)
804        ptr = __builtin_assume_aligned (tem, align);
805    in case the first argument was &ptr.  That way we can get at the
806    alignment of the heap pointer in CCP.  */
807 
808 static void
lower_builtin_posix_memalign(gimple_stmt_iterator * gsi)809 lower_builtin_posix_memalign (gimple_stmt_iterator *gsi)
810 {
811   gimple *stmt, *call = gsi_stmt (*gsi);
812   tree pptr = gimple_call_arg (call, 0);
813   tree align = gimple_call_arg (call, 1);
814   tree res = gimple_call_lhs (call);
815   tree ptr = create_tmp_reg (ptr_type_node);
816   if (TREE_CODE (pptr) == ADDR_EXPR)
817     {
818       tree tem = create_tmp_var (ptr_type_node);
819       TREE_ADDRESSABLE (tem) = 1;
820       gimple_call_set_arg (call, 0, build_fold_addr_expr (tem));
821       stmt = gimple_build_assign (ptr, tem);
822     }
823   else
824     stmt = gimple_build_assign (ptr,
825 				fold_build2 (MEM_REF, ptr_type_node, pptr,
826 					     build_int_cst (ptr_type_node, 0)));
827   if (res == NULL_TREE)
828     {
829       res = create_tmp_reg (integer_type_node);
830       gimple_call_set_lhs (call, res);
831     }
832   tree align_label = create_artificial_label (UNKNOWN_LOCATION);
833   tree noalign_label = create_artificial_label (UNKNOWN_LOCATION);
834   gimple *cond = gimple_build_cond (EQ_EXPR, res, integer_zero_node,
835 				   align_label, noalign_label);
836   gsi_insert_after (gsi, cond, GSI_NEW_STMT);
837   gsi_insert_after (gsi, gimple_build_label (align_label), GSI_NEW_STMT);
838   gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
839   stmt = gimple_build_call (builtin_decl_implicit (BUILT_IN_ASSUME_ALIGNED),
840 			    2, ptr, align);
841   gimple_call_set_lhs (stmt, ptr);
842   gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
843   stmt = gimple_build_assign (fold_build2 (MEM_REF, ptr_type_node, pptr,
844 					   build_int_cst (ptr_type_node, 0)),
845 			      ptr);
846   gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
847   gsi_insert_after (gsi, gimple_build_label (noalign_label), GSI_NEW_STMT);
848 }
849 
850 
851 /* Record the variables in VARS into function FN.  */
852 
853 void
record_vars_into(tree vars,tree fn)854 record_vars_into (tree vars, tree fn)
855 {
856   for (; vars; vars = DECL_CHAIN (vars))
857     {
858       tree var = vars;
859 
860       /* BIND_EXPRs contains also function/type/constant declarations
861          we don't need to care about.  */
862       if (TREE_CODE (var) != VAR_DECL)
863 	continue;
864 
865       /* Nothing to do in this case.  */
866       if (DECL_EXTERNAL (var))
867 	continue;
868 
869       /* Record the variable.  */
870       add_local_decl (DECL_STRUCT_FUNCTION (fn), var);
871     }
872 }
873 
874 
875 /* Record the variables in VARS into current_function_decl.  */
876 
877 void
record_vars(tree vars)878 record_vars (tree vars)
879 {
880   record_vars_into (vars, current_function_decl);
881 }
882