1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
2 
3    Copyright (C) 2002-2019 Free Software Foundation, Inc.
4    Contributed by Jason Merrill <jason@redhat.com>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "asan.h"
37 #include "gcc-rich-location.h"
38 
39 /* Forward declarations.  */
40 
41 static tree cp_genericize_r (tree *, int *, void *);
42 static tree cp_fold_r (tree *, int *, void *);
43 static void cp_genericize_tree (tree*, bool);
44 static tree cp_fold (tree);
45 
46 /* Local declarations.  */
47 
48 enum bc_t { bc_break = 0, bc_continue = 1 };
49 
50 /* Stack of labels which are targets for "break" or "continue",
51    linked through TREE_CHAIN.  */
52 static tree bc_label[2];
53 
54 /* Begin a scope which can be exited by a break or continue statement.  BC
55    indicates which.
56 
57    Just creates a label with location LOCATION and pushes it into the current
58    context.  */
59 
60 static tree
begin_bc_block(enum bc_t bc,location_t location)61 begin_bc_block (enum bc_t bc, location_t location)
62 {
63   tree label = create_artificial_label (location);
64   DECL_CHAIN (label) = bc_label[bc];
65   bc_label[bc] = label;
66   if (bc == bc_break)
67     LABEL_DECL_BREAK (label) = true;
68   else
69     LABEL_DECL_CONTINUE (label) = true;
70   return label;
71 }
72 
73 /* Finish a scope which can be exited by a break or continue statement.
74    LABEL was returned from the most recent call to begin_bc_block.  BLOCK is
75    an expression for the contents of the scope.
76 
77    If we saw a break (or continue) in the scope, append a LABEL_EXPR to
78    BLOCK.  Otherwise, just forget the label.  */
79 
80 static void
finish_bc_block(tree * block,enum bc_t bc,tree label)81 finish_bc_block (tree *block, enum bc_t bc, tree label)
82 {
83   gcc_assert (label == bc_label[bc]);
84 
85   if (TREE_USED (label))
86     append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
87 			      block);
88 
89   bc_label[bc] = DECL_CHAIN (label);
90   DECL_CHAIN (label) = NULL_TREE;
91 }
92 
93 /* Get the LABEL_EXPR to represent a break or continue statement
94    in the current block scope.  BC indicates which.  */
95 
96 static tree
get_bc_label(enum bc_t bc)97 get_bc_label (enum bc_t bc)
98 {
99   tree label = bc_label[bc];
100 
101   /* Mark the label used for finish_bc_block.  */
102   TREE_USED (label) = 1;
103   return label;
104 }
105 
106 /* Genericize a TRY_BLOCK.  */
107 
108 static void
genericize_try_block(tree * stmt_p)109 genericize_try_block (tree *stmt_p)
110 {
111   tree body = TRY_STMTS (*stmt_p);
112   tree cleanup = TRY_HANDLERS (*stmt_p);
113 
114   *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
115 }
116 
117 /* Genericize a HANDLER by converting to a CATCH_EXPR.  */
118 
119 static void
genericize_catch_block(tree * stmt_p)120 genericize_catch_block (tree *stmt_p)
121 {
122   tree type = HANDLER_TYPE (*stmt_p);
123   tree body = HANDLER_BODY (*stmt_p);
124 
125   /* FIXME should the caught type go in TREE_TYPE?  */
126   *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
127 }
128 
129 /* A terser interface for building a representation of an exception
130    specification.  */
131 
132 static tree
build_gimple_eh_filter_tree(tree body,tree allowed,tree failure)133 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
134 {
135   tree t;
136 
137   /* FIXME should the allowed types go in TREE_TYPE?  */
138   t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
139   append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
140 
141   t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
142   append_to_statement_list (body, &TREE_OPERAND (t, 0));
143 
144   return t;
145 }
146 
147 /* Genericize an EH_SPEC_BLOCK by converting it to a
148    TRY_CATCH_EXPR/EH_FILTER_EXPR pair.  */
149 
150 static void
genericize_eh_spec_block(tree * stmt_p)151 genericize_eh_spec_block (tree *stmt_p)
152 {
153   tree body = EH_SPEC_STMTS (*stmt_p);
154   tree allowed = EH_SPEC_RAISES (*stmt_p);
155   tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
156 
157   *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
158   TREE_NO_WARNING (*stmt_p) = true;
159   TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
160 }
161 
162 /* Return the first non-compound statement in STMT.  */
163 
164 tree
first_stmt(tree stmt)165 first_stmt (tree stmt)
166 {
167   switch (TREE_CODE (stmt))
168     {
169     case STATEMENT_LIST:
170       if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
171 	return first_stmt (p->stmt);
172       return void_node;
173 
174     case BIND_EXPR:
175       return first_stmt (BIND_EXPR_BODY (stmt));
176 
177     default:
178       return stmt;
179     }
180 }
181 
182 /* Genericize an IF_STMT by turning it into a COND_EXPR.  */
183 
184 static void
genericize_if_stmt(tree * stmt_p)185 genericize_if_stmt (tree *stmt_p)
186 {
187   tree stmt, cond, then_, else_;
188   location_t locus = EXPR_LOCATION (*stmt_p);
189 
190   stmt = *stmt_p;
191   cond = IF_COND (stmt);
192   then_ = THEN_CLAUSE (stmt);
193   else_ = ELSE_CLAUSE (stmt);
194 
195   if (then_ && else_)
196     {
197       tree ft = first_stmt (then_);
198       tree fe = first_stmt (else_);
199       br_predictor pr;
200       if (TREE_CODE (ft) == PREDICT_EXPR
201 	  && TREE_CODE (fe) == PREDICT_EXPR
202 	  && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
203 	  && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
204 	{
205 	  gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
206 	  richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
207 	  warning_at (&richloc, OPT_Wattributes,
208 		      "both branches of %<if%> statement marked as %qs",
209 		      pr == PRED_HOT_LABEL ? "likely" : "unlikely");
210 	}
211     }
212 
213   if (!then_)
214     then_ = build_empty_stmt (locus);
215   if (!else_)
216     else_ = build_empty_stmt (locus);
217 
218   if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
219     stmt = then_;
220   else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
221     stmt = else_;
222   else
223     stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
224   if (!EXPR_HAS_LOCATION (stmt))
225     protected_set_expr_location (stmt, locus);
226   *stmt_p = stmt;
227 }
228 
229 /* Build a generic representation of one of the C loop forms.  COND is the
230    loop condition or NULL_TREE.  BODY is the (possibly compound) statement
231    controlled by the loop.  INCR is the increment expression of a for-loop,
232    or NULL_TREE.  COND_IS_FIRST indicates whether the condition is
233    evaluated before the loop body as in while and for loops, or after the
234    loop body as in do-while loops.  */
235 
236 static void
genericize_cp_loop(tree * stmt_p,location_t start_locus,tree cond,tree body,tree incr,bool cond_is_first,int * walk_subtrees,void * data)237 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
238 		    tree incr, bool cond_is_first, int *walk_subtrees,
239 		    void *data)
240 {
241   tree blab, clab;
242   tree exit = NULL;
243   tree stmt_list = NULL;
244   tree debug_begin = NULL;
245 
246   if (EXPR_LOCATION (incr) == UNKNOWN_LOCATION)
247     protected_set_expr_location (incr, start_locus);
248 
249   cp_walk_tree (&cond, cp_genericize_r, data, NULL);
250   cp_walk_tree (&incr, cp_genericize_r, data, NULL);
251 
252   blab = begin_bc_block (bc_break, start_locus);
253   clab = begin_bc_block (bc_continue, start_locus);
254 
255   cp_walk_tree (&body, cp_genericize_r, data, NULL);
256   *walk_subtrees = 0;
257 
258   if (MAY_HAVE_DEBUG_MARKER_STMTS
259       && (!cond || !integer_zerop (cond)))
260     {
261       debug_begin = build0 (DEBUG_BEGIN_STMT, void_type_node);
262       SET_EXPR_LOCATION (debug_begin, cp_expr_loc_or_loc (cond, start_locus));
263     }
264 
265   if (cond && TREE_CODE (cond) != INTEGER_CST)
266     {
267       /* If COND is constant, don't bother building an exit.  If it's false,
268 	 we won't build a loop.  If it's true, any exits are in the body.  */
269       location_t cloc = cp_expr_loc_or_loc (cond, start_locus);
270       exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
271 			 get_bc_label (bc_break));
272       exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
273 			      build_empty_stmt (cloc), exit);
274     }
275 
276   if (exit && cond_is_first)
277     {
278       append_to_statement_list (debug_begin, &stmt_list);
279       debug_begin = NULL_TREE;
280       append_to_statement_list (exit, &stmt_list);
281     }
282   append_to_statement_list (body, &stmt_list);
283   finish_bc_block (&stmt_list, bc_continue, clab);
284   if (incr)
285     {
286       if (MAY_HAVE_DEBUG_MARKER_STMTS)
287 	{
288 	  tree d = build0 (DEBUG_BEGIN_STMT, void_type_node);
289 	  SET_EXPR_LOCATION (d, cp_expr_loc_or_loc (incr, start_locus));
290 	  append_to_statement_list (d, &stmt_list);
291 	}
292       append_to_statement_list (incr, &stmt_list);
293     }
294   append_to_statement_list (debug_begin, &stmt_list);
295   if (exit && !cond_is_first)
296     append_to_statement_list (exit, &stmt_list);
297 
298   if (!stmt_list)
299     stmt_list = build_empty_stmt (start_locus);
300 
301   tree loop;
302   if (cond && integer_zerop (cond))
303     {
304       if (cond_is_first)
305 	loop = fold_build3_loc (start_locus, COND_EXPR,
306 				void_type_node, cond, stmt_list,
307 				build_empty_stmt (start_locus));
308       else
309 	loop = stmt_list;
310     }
311   else
312     {
313       location_t loc = start_locus;
314       if (!cond || integer_nonzerop (cond))
315 	loc = EXPR_LOCATION (expr_first (body));
316       if (loc == UNKNOWN_LOCATION)
317 	loc = start_locus;
318       loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
319     }
320 
321   stmt_list = NULL;
322   append_to_statement_list (loop, &stmt_list);
323   finish_bc_block (&stmt_list, bc_break, blab);
324   if (!stmt_list)
325     stmt_list = build_empty_stmt (start_locus);
326 
327   *stmt_p = stmt_list;
328 }
329 
330 /* Genericize a FOR_STMT node *STMT_P.  */
331 
332 static void
genericize_for_stmt(tree * stmt_p,int * walk_subtrees,void * data)333 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
334 {
335   tree stmt = *stmt_p;
336   tree expr = NULL;
337   tree loop;
338   tree init = FOR_INIT_STMT (stmt);
339 
340   if (init)
341     {
342       cp_walk_tree (&init, cp_genericize_r, data, NULL);
343       append_to_statement_list (init, &expr);
344     }
345 
346   genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
347 		      FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
348   append_to_statement_list (loop, &expr);
349   if (expr == NULL_TREE)
350     expr = loop;
351   *stmt_p = expr;
352 }
353 
354 /* Genericize a WHILE_STMT node *STMT_P.  */
355 
356 static void
genericize_while_stmt(tree * stmt_p,int * walk_subtrees,void * data)357 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
358 {
359   tree stmt = *stmt_p;
360   genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
361 		      WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
362 }
363 
364 /* Genericize a DO_STMT node *STMT_P.  */
365 
366 static void
genericize_do_stmt(tree * stmt_p,int * walk_subtrees,void * data)367 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
368 {
369   tree stmt = *stmt_p;
370   genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
371 		      DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
372 }
373 
374 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR.  */
375 
376 static void
genericize_switch_stmt(tree * stmt_p,int * walk_subtrees,void * data)377 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
378 {
379   tree stmt = *stmt_p;
380   tree break_block, body, cond, type;
381   location_t stmt_locus = EXPR_LOCATION (stmt);
382 
383   body = SWITCH_STMT_BODY (stmt);
384   if (!body)
385     body = build_empty_stmt (stmt_locus);
386   cond = SWITCH_STMT_COND (stmt);
387   type = SWITCH_STMT_TYPE (stmt);
388 
389   cp_walk_tree (&cond, cp_genericize_r, data, NULL);
390 
391   break_block = begin_bc_block (bc_break, stmt_locus);
392 
393   cp_walk_tree (&body, cp_genericize_r, data, NULL);
394   cp_walk_tree (&type, cp_genericize_r, data, NULL);
395   *walk_subtrees = 0;
396 
397   if (TREE_USED (break_block))
398     SWITCH_BREAK_LABEL_P (break_block) = 1;
399   finish_bc_block (&body, bc_break, break_block);
400   *stmt_p = build2_loc (stmt_locus, SWITCH_EXPR, type, cond, body);
401   SWITCH_ALL_CASES_P (*stmt_p) = SWITCH_STMT_ALL_CASES_P (stmt);
402   gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt)
403 		       || !TREE_USED (break_block));
404 }
405 
406 /* Genericize a CONTINUE_STMT node *STMT_P.  */
407 
408 static void
genericize_continue_stmt(tree * stmt_p)409 genericize_continue_stmt (tree *stmt_p)
410 {
411   tree stmt_list = NULL;
412   tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
413   tree label = get_bc_label (bc_continue);
414   location_t location = EXPR_LOCATION (*stmt_p);
415   tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
416   append_to_statement_list_force (pred, &stmt_list);
417   append_to_statement_list (jump, &stmt_list);
418   *stmt_p = stmt_list;
419 }
420 
421 /* Genericize a BREAK_STMT node *STMT_P.  */
422 
423 static void
genericize_break_stmt(tree * stmt_p)424 genericize_break_stmt (tree *stmt_p)
425 {
426   tree label = get_bc_label (bc_break);
427   location_t location = EXPR_LOCATION (*stmt_p);
428   *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
429 }
430 
431 /* Genericize a OMP_FOR node *STMT_P.  */
432 
433 static void
genericize_omp_for_stmt(tree * stmt_p,int * walk_subtrees,void * data)434 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
435 {
436   tree stmt = *stmt_p;
437   location_t locus = EXPR_LOCATION (stmt);
438   tree clab = begin_bc_block (bc_continue, locus);
439 
440   cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
441   if (TREE_CODE (stmt) != OMP_TASKLOOP)
442     cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
443   cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
444   cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
445   cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
446   cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
447   *walk_subtrees = 0;
448 
449   finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
450 }
451 
452 /* Hook into the middle of gimplifying an OMP_FOR node.  */
453 
454 static enum gimplify_status
cp_gimplify_omp_for(tree * expr_p,gimple_seq * pre_p)455 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
456 {
457   tree for_stmt = *expr_p;
458   gimple_seq seq = NULL;
459 
460   /* Protect ourselves from recursion.  */
461   if (OMP_FOR_GIMPLIFYING_P (for_stmt))
462     return GS_UNHANDLED;
463   OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
464 
465   gimplify_and_add (for_stmt, &seq);
466   gimple_seq_add_seq (pre_p, seq);
467 
468   OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
469 
470   return GS_ALL_DONE;
471 }
472 
473 /*  Gimplify an EXPR_STMT node.  */
474 
475 static void
gimplify_expr_stmt(tree * stmt_p)476 gimplify_expr_stmt (tree *stmt_p)
477 {
478   tree stmt = EXPR_STMT_EXPR (*stmt_p);
479 
480   if (stmt == error_mark_node)
481     stmt = NULL;
482 
483   /* Gimplification of a statement expression will nullify the
484      statement if all its side effects are moved to *PRE_P and *POST_P.
485 
486      In this case we will not want to emit the gimplified statement.
487      However, we may still want to emit a warning, so we do that before
488      gimplification.  */
489   if (stmt && warn_unused_value)
490     {
491       if (!TREE_SIDE_EFFECTS (stmt))
492 	{
493 	  if (!IS_EMPTY_STMT (stmt)
494 	      && !VOID_TYPE_P (TREE_TYPE (stmt))
495 	      && !TREE_NO_WARNING (stmt))
496 	    warning (OPT_Wunused_value, "statement with no effect");
497 	}
498       else
499 	warn_if_unused_value (stmt, input_location);
500     }
501 
502   if (stmt == NULL_TREE)
503     stmt = alloc_stmt_list ();
504 
505   *stmt_p = stmt;
506 }
507 
508 /* Gimplify initialization from an AGGR_INIT_EXPR.  */
509 
510 static void
cp_gimplify_init_expr(tree * expr_p)511 cp_gimplify_init_expr (tree *expr_p)
512 {
513   tree from = TREE_OPERAND (*expr_p, 1);
514   tree to = TREE_OPERAND (*expr_p, 0);
515   tree t;
516 
517   /* What about code that pulls out the temp and uses it elsewhere?  I
518      think that such code never uses the TARGET_EXPR as an initializer.  If
519      I'm wrong, we'll abort because the temp won't have any RTL.  In that
520      case, I guess we'll need to replace references somehow.  */
521   if (TREE_CODE (from) == TARGET_EXPR)
522     from = TARGET_EXPR_INITIAL (from);
523 
524   /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
525      inside the TARGET_EXPR.  */
526   for (t = from; t; )
527     {
528       tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
529 
530       /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
531 	 replace the slot operand with our target.
532 
533 	 Should we add a target parm to gimplify_expr instead?  No, as in this
534 	 case we want to replace the INIT_EXPR.  */
535       if (TREE_CODE (sub) == AGGR_INIT_EXPR
536 	  || TREE_CODE (sub) == VEC_INIT_EXPR)
537 	{
538 	  if (TREE_CODE (sub) == AGGR_INIT_EXPR)
539 	    AGGR_INIT_EXPR_SLOT (sub) = to;
540 	  else
541 	    VEC_INIT_EXPR_SLOT (sub) = to;
542 	  *expr_p = from;
543 
544 	  /* The initialization is now a side-effect, so the container can
545 	     become void.  */
546 	  if (from != sub)
547 	    TREE_TYPE (from) = void_type_node;
548 	}
549 
550       /* Handle aggregate NSDMI.  */
551       replace_placeholders (sub, to);
552 
553       if (t == sub)
554 	break;
555       else
556 	t = TREE_OPERAND (t, 1);
557     }
558 
559 }
560 
561 /* Gimplify a MUST_NOT_THROW_EXPR.  */
562 
563 static enum gimplify_status
gimplify_must_not_throw_expr(tree * expr_p,gimple_seq * pre_p)564 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
565 {
566   tree stmt = *expr_p;
567   tree temp = voidify_wrapper_expr (stmt, NULL);
568   tree body = TREE_OPERAND (stmt, 0);
569   gimple_seq try_ = NULL;
570   gimple_seq catch_ = NULL;
571   gimple *mnt;
572 
573   gimplify_and_add (body, &try_);
574   mnt = gimple_build_eh_must_not_throw (terminate_fn);
575   gimple_seq_add_stmt_without_update (&catch_, mnt);
576   mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
577 
578   gimple_seq_add_stmt_without_update (pre_p, mnt);
579   if (temp)
580     {
581       *expr_p = temp;
582       return GS_OK;
583     }
584 
585   *expr_p = NULL;
586   return GS_ALL_DONE;
587 }
588 
589 /* Return TRUE if an operand (OP) of a given TYPE being copied is
590    really just an empty class copy.
591 
592    Check that the operand has a simple form so that TARGET_EXPRs and
593    non-empty CONSTRUCTORs get reduced properly, and we leave the
594    return slot optimization alone because it isn't a copy.  */
595 
596 static bool
simple_empty_class_p(tree type,tree op)597 simple_empty_class_p (tree type, tree op)
598 {
599   return
600     ((TREE_CODE (op) == COMPOUND_EXPR
601       && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
602      || TREE_CODE (op) == EMPTY_CLASS_EXPR
603      || is_gimple_lvalue (op)
604      || INDIRECT_REF_P (op)
605      || (TREE_CODE (op) == CONSTRUCTOR
606 	 && CONSTRUCTOR_NELTS (op) == 0
607 	 && !TREE_CLOBBER_P (op))
608      || (TREE_CODE (op) == CALL_EXPR
609 	 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
610     && is_really_empty_class (type, /*ignore_vptr*/true);
611 }
612 
613 /* Returns true if evaluating E as an lvalue has side-effects;
614    specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
615    have side-effects until there is a read or write through it.  */
616 
617 static bool
lvalue_has_side_effects(tree e)618 lvalue_has_side_effects (tree e)
619 {
620   if (!TREE_SIDE_EFFECTS (e))
621     return false;
622   while (handled_component_p (e))
623     {
624       if (TREE_CODE (e) == ARRAY_REF
625 	  && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
626 	return true;
627       e = TREE_OPERAND (e, 0);
628     }
629   if (DECL_P (e))
630     /* Just naming a variable has no side-effects.  */
631     return false;
632   else if (INDIRECT_REF_P (e))
633     /* Similarly, indirection has no side-effects.  */
634     return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
635   else
636     /* For anything else, trust TREE_SIDE_EFFECTS.  */
637     return TREE_SIDE_EFFECTS (e);
638 }
639 
640 /* Do C++-specific gimplification.  Args are as for gimplify_expr.  */
641 
642 int
cp_gimplify_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p)643 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
644 {
645   int saved_stmts_are_full_exprs_p = 0;
646   location_t loc = cp_expr_loc_or_loc (*expr_p, input_location);
647   enum tree_code code = TREE_CODE (*expr_p);
648   enum gimplify_status ret;
649 
650   if (STATEMENT_CODE_P (code))
651     {
652       saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
653       current_stmt_tree ()->stmts_are_full_exprs_p
654 	= STMT_IS_FULL_EXPR_P (*expr_p);
655     }
656 
657   switch (code)
658     {
659     case AGGR_INIT_EXPR:
660       simplify_aggr_init_expr (expr_p);
661       ret = GS_OK;
662       break;
663 
664     case VEC_INIT_EXPR:
665       {
666 	location_t loc = input_location;
667 	tree init = VEC_INIT_EXPR_INIT (*expr_p);
668 	int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
669 	gcc_assert (EXPR_HAS_LOCATION (*expr_p));
670 	input_location = EXPR_LOCATION (*expr_p);
671 	*expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
672 				  init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
673 				  from_array,
674 				  tf_warning_or_error);
675 	hash_set<tree> pset;
676 	cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
677 	cp_genericize_tree (expr_p, false);
678 	ret = GS_OK;
679 	input_location = loc;
680       }
681       break;
682 
683     case THROW_EXPR:
684       /* FIXME communicate throw type to back end, probably by moving
685 	 THROW_EXPR into ../tree.def.  */
686       *expr_p = TREE_OPERAND (*expr_p, 0);
687       ret = GS_OK;
688       break;
689 
690     case MUST_NOT_THROW_EXPR:
691       ret = gimplify_must_not_throw_expr (expr_p, pre_p);
692       break;
693 
694       /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
695 	 LHS of an assignment might also be involved in the RHS, as in bug
696 	 25979.  */
697     case INIT_EXPR:
698       cp_gimplify_init_expr (expr_p);
699       if (TREE_CODE (*expr_p) != INIT_EXPR)
700 	return GS_OK;
701       /* Fall through.  */
702     case MODIFY_EXPR:
703     modify_expr_case:
704       {
705 	/* If the back end isn't clever enough to know that the lhs and rhs
706 	   types are the same, add an explicit conversion.  */
707 	tree op0 = TREE_OPERAND (*expr_p, 0);
708 	tree op1 = TREE_OPERAND (*expr_p, 1);
709 
710 	if (!error_operand_p (op0)
711 	    && !error_operand_p (op1)
712 	    && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
713 		|| TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
714 	    && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
715 	  TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
716 					      TREE_TYPE (op0), op1);
717 
718 	else if (simple_empty_class_p (TREE_TYPE (op0), op1))
719 	  {
720 	    /* Remove any copies of empty classes.  Also drop volatile
721 	       variables on the RHS to avoid infinite recursion from
722 	       gimplify_expr trying to load the value.  */
723 	    if (TREE_SIDE_EFFECTS (op1))
724 	      {
725 		if (TREE_THIS_VOLATILE (op1)
726 		    && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
727 		  op1 = build_fold_addr_expr (op1);
728 
729 		gimplify_and_add (op1, pre_p);
730 	      }
731 	    gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
732 			   is_gimple_lvalue, fb_lvalue);
733 	    *expr_p = TREE_OPERAND (*expr_p, 0);
734 	  }
735 	/* P0145 says that the RHS is sequenced before the LHS.
736 	   gimplify_modify_expr gimplifies the RHS before the LHS, but that
737 	   isn't quite strong enough in two cases:
738 
739 	   1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
740 	   mean it's evaluated after the LHS.
741 
742 	   2) the value calculation of the RHS is also sequenced before the
743 	   LHS, so for scalar assignment we need to preevaluate if the
744 	   RHS could be affected by LHS side-effects even if it has no
745 	   side-effects of its own.  We don't need this for classes because
746 	   class assignment takes its RHS by reference.  */
747        else if (flag_strong_eval_order > 1
748                 && TREE_CODE (*expr_p) == MODIFY_EXPR
749                 && lvalue_has_side_effects (op0)
750 		&& (TREE_CODE (op1) == CALL_EXPR
751 		    || (SCALAR_TYPE_P (TREE_TYPE (op1))
752 			&& !TREE_CONSTANT (op1))))
753 	 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
754       }
755       ret = GS_OK;
756       break;
757 
758     case EMPTY_CLASS_EXPR:
759       /* We create an empty CONSTRUCTOR with RECORD_TYPE.  */
760       *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
761       ret = GS_OK;
762       break;
763 
764     case BASELINK:
765       *expr_p = BASELINK_FUNCTIONS (*expr_p);
766       ret = GS_OK;
767       break;
768 
769     case TRY_BLOCK:
770       genericize_try_block (expr_p);
771       ret = GS_OK;
772       break;
773 
774     case HANDLER:
775       genericize_catch_block (expr_p);
776       ret = GS_OK;
777       break;
778 
779     case EH_SPEC_BLOCK:
780       genericize_eh_spec_block (expr_p);
781       ret = GS_OK;
782       break;
783 
784     case USING_STMT:
785       gcc_unreachable ();
786 
787     case FOR_STMT:
788     case WHILE_STMT:
789     case DO_STMT:
790     case SWITCH_STMT:
791     case CONTINUE_STMT:
792     case BREAK_STMT:
793       gcc_unreachable ();
794 
795     case OMP_FOR:
796     case OMP_SIMD:
797     case OMP_DISTRIBUTE:
798     case OMP_TASKLOOP:
799       ret = cp_gimplify_omp_for (expr_p, pre_p);
800       break;
801 
802     case EXPR_STMT:
803       gimplify_expr_stmt (expr_p);
804       ret = GS_OK;
805       break;
806 
807     case UNARY_PLUS_EXPR:
808       {
809 	tree arg = TREE_OPERAND (*expr_p, 0);
810 	tree type = TREE_TYPE (*expr_p);
811 	*expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
812 					    : arg;
813 	ret = GS_OK;
814       }
815       break;
816 
817     case CALL_EXPR:
818       ret = GS_OK;
819       if (flag_strong_eval_order == 2
820 	  && CALL_EXPR_FN (*expr_p)
821 	  && cp_get_callee_fndecl_nofold (*expr_p) == NULL_TREE)
822 	{
823 	  tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
824 	  enum gimplify_status t
825 	    = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
826 			     is_gimple_call_addr, fb_rvalue);
827 	  if (t == GS_ERROR)
828 	    ret = GS_ERROR;
829 	  else if (is_gimple_variable (CALL_EXPR_FN (*expr_p))
830 		   && TREE_CODE (CALL_EXPR_FN (*expr_p)) != SSA_NAME)
831 	    CALL_EXPR_FN (*expr_p)
832 	      = get_initialized_tmp_var (CALL_EXPR_FN (*expr_p), pre_p,
833 					 NULL);
834 	  /* GIMPLE considers most pointer conversion useless, but for
835 	     calls we actually care about the exact function pointer type.  */
836 	  if (t != GS_ERROR && TREE_TYPE (CALL_EXPR_FN (*expr_p)) != fnptrtype)
837 	    CALL_EXPR_FN (*expr_p)
838 	      = build1 (NOP_EXPR, fnptrtype, CALL_EXPR_FN (*expr_p));
839 	}
840       if (!CALL_EXPR_FN (*expr_p))
841 	/* Internal function call.  */;
842       else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
843 	{
844 	  /* This is a call to a (compound) assignment operator that used
845 	     the operator syntax; gimplify the RHS first.  */
846 	  gcc_assert (call_expr_nargs (*expr_p) == 2);
847 	  gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
848 	  enum gimplify_status t
849 	    = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
850 	  if (t == GS_ERROR)
851 	    ret = GS_ERROR;
852 	}
853       else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
854 	{
855 	  /* Leave the last argument for gimplify_call_expr, to avoid problems
856 	     with __builtin_va_arg_pack().  */
857 	  int nargs = call_expr_nargs (*expr_p) - 1;
858 	  for (int i = 0; i < nargs; ++i)
859 	    {
860 	      enum gimplify_status t
861 		= gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
862 	      if (t == GS_ERROR)
863 		ret = GS_ERROR;
864 	    }
865 	}
866       else if (flag_strong_eval_order
867 	       && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
868 	{
869 	  /* If flag_strong_eval_order, evaluate the object argument first.  */
870 	  tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
871 	  if (INDIRECT_TYPE_P (fntype))
872 	    fntype = TREE_TYPE (fntype);
873 	  if (TREE_CODE (fntype) == METHOD_TYPE)
874 	    {
875 	      enum gimplify_status t
876 		= gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
877 	      if (t == GS_ERROR)
878 		ret = GS_ERROR;
879 	    }
880 	}
881       if (ret != GS_ERROR)
882 	{
883 	  tree decl = cp_get_callee_fndecl_nofold (*expr_p);
884 	  if (decl
885 	      && fndecl_built_in_p (decl, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
886 				  BUILT_IN_FRONTEND))
887 	    *expr_p = boolean_false_node;
888 	}
889       break;
890 
891     case PTRMEM_CST:
892       *expr_p = cplus_expand_constant (*expr_p);
893       if (TREE_CODE (*expr_p) == PTRMEM_CST)
894 	ret = GS_ERROR;
895       else
896 	ret = GS_OK;
897       break;
898 
899     case RETURN_EXPR:
900       if (TREE_OPERAND (*expr_p, 0)
901 	  && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
902 	      || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
903 	{
904 	  expr_p = &TREE_OPERAND (*expr_p, 0);
905 	  code = TREE_CODE (*expr_p);
906 	  /* Avoid going through the INIT_EXPR case, which can
907 	     degrade INIT_EXPRs into AGGR_INIT_EXPRs.  */
908 	  goto modify_expr_case;
909 	}
910       /* Fall through.  */
911 
912     default:
913       ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
914       break;
915     }
916 
917   /* Restore saved state.  */
918   if (STATEMENT_CODE_P (code))
919     current_stmt_tree ()->stmts_are_full_exprs_p
920       = saved_stmts_are_full_exprs_p;
921 
922   return ret;
923 }
924 
925 static inline bool
is_invisiref_parm(const_tree t)926 is_invisiref_parm (const_tree t)
927 {
928   return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
929 	  && DECL_BY_REFERENCE (t));
930 }
931 
932 /* Return true if the uid in both int tree maps are equal.  */
933 
934 bool
equal(cxx_int_tree_map * a,cxx_int_tree_map * b)935 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
936 {
937   return (a->uid == b->uid);
938 }
939 
940 /* Hash a UID in a cxx_int_tree_map.  */
941 
942 unsigned int
hash(cxx_int_tree_map * item)943 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
944 {
945   return item->uid;
946 }
947 
948 /* A stable comparison routine for use with splay trees and DECLs.  */
949 
950 static int
splay_tree_compare_decl_uid(splay_tree_key xa,splay_tree_key xb)951 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
952 {
953   tree a = (tree) xa;
954   tree b = (tree) xb;
955 
956   return DECL_UID (a) - DECL_UID (b);
957 }
958 
959 /* OpenMP context during genericization.  */
960 
961 struct cp_genericize_omp_taskreg
962 {
963   bool is_parallel;
964   bool default_shared;
965   struct cp_genericize_omp_taskreg *outer;
966   splay_tree variables;
967 };
968 
969 /* Return true if genericization should try to determine if
970    DECL is firstprivate or shared within task regions.  */
971 
972 static bool
omp_var_to_track(tree decl)973 omp_var_to_track (tree decl)
974 {
975   tree type = TREE_TYPE (decl);
976   if (is_invisiref_parm (decl))
977     type = TREE_TYPE (type);
978   else if (TYPE_REF_P (type))
979     type = TREE_TYPE (type);
980   while (TREE_CODE (type) == ARRAY_TYPE)
981     type = TREE_TYPE (type);
982   if (type == error_mark_node || !CLASS_TYPE_P (type))
983     return false;
984   if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
985     return false;
986   if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
987     return false;
988   return true;
989 }
990 
991 /* Note DECL use in OpenMP region OMP_CTX during genericization.  */
992 
993 static void
omp_cxx_notice_variable(struct cp_genericize_omp_taskreg * omp_ctx,tree decl)994 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
995 {
996   splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
997 					 (splay_tree_key) decl);
998   if (n == NULL)
999     {
1000       int flags = OMP_CLAUSE_DEFAULT_SHARED;
1001       if (omp_ctx->outer)
1002 	omp_cxx_notice_variable (omp_ctx->outer, decl);
1003       if (!omp_ctx->default_shared)
1004 	{
1005 	  struct cp_genericize_omp_taskreg *octx;
1006 
1007 	  for (octx = omp_ctx->outer; octx; octx = octx->outer)
1008 	    {
1009 	      n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
1010 	      if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
1011 		{
1012 		  flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
1013 		  break;
1014 		}
1015 	      if (octx->is_parallel)
1016 		break;
1017 	    }
1018 	  if (octx == NULL
1019 	      && (TREE_CODE (decl) == PARM_DECL
1020 		  || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1021 		      && DECL_CONTEXT (decl) == current_function_decl)))
1022 	    flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
1023 	  if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
1024 	    {
1025 	      /* DECL is implicitly determined firstprivate in
1026 		 the current task construct.  Ensure copy ctor and
1027 		 dtor are instantiated, because during gimplification
1028 		 it will be already too late.  */
1029 	      tree type = TREE_TYPE (decl);
1030 	      if (is_invisiref_parm (decl))
1031 		type = TREE_TYPE (type);
1032 	      else if (TYPE_REF_P (type))
1033 		type = TREE_TYPE (type);
1034 	      while (TREE_CODE (type) == ARRAY_TYPE)
1035 		type = TREE_TYPE (type);
1036 	      get_copy_ctor (type, tf_none);
1037 	      get_dtor (type, tf_none);
1038 	    }
1039 	}
1040       splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
1041     }
1042 }
1043 
1044 /* Genericization context.  */
1045 
1046 struct cp_genericize_data
1047 {
1048   hash_set<tree> *p_set;
1049   vec<tree> bind_expr_stack;
1050   struct cp_genericize_omp_taskreg *omp_ctx;
1051   tree try_block;
1052   bool no_sanitize_p;
1053   bool handle_invisiref_parm_p;
1054 };
1055 
1056 /* Perform any pre-gimplification folding of C++ front end trees to
1057    GENERIC.
1058    Note:  The folding of none-omp cases is something to move into
1059      the middle-end.  As for now we have most foldings only on GENERIC
1060      in fold-const, we need to perform this before transformation to
1061      GIMPLE-form.  */
1062 
1063 static tree
cp_fold_r(tree * stmt_p,int * walk_subtrees,void * data)1064 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
1065 {
1066   tree stmt;
1067   enum tree_code code;
1068 
1069   *stmt_p = stmt = cp_fold (*stmt_p);
1070 
1071   if (((hash_set<tree> *) data)->add (stmt))
1072     {
1073       /* Don't walk subtrees of stmts we've already walked once, otherwise
1074 	 we can have exponential complexity with e.g. lots of nested
1075 	 SAVE_EXPRs or TARGET_EXPRs.  cp_fold uses a cache and will return
1076 	 always the same tree, which the first time cp_fold_r has been
1077 	 called on it had the subtrees walked.  */
1078       *walk_subtrees = 0;
1079       return NULL;
1080     }
1081 
1082   code = TREE_CODE (stmt);
1083   if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
1084       || code == OMP_TASKLOOP || code == OACC_LOOP)
1085     {
1086       tree x;
1087       int i, n;
1088 
1089       cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1090       cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1091       cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1092       x = OMP_FOR_COND (stmt);
1093       if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1094 	{
1095 	  cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1096 	  cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1097 	}
1098       else if (x && TREE_CODE (x) == TREE_VEC)
1099 	{
1100 	  n = TREE_VEC_LENGTH (x);
1101 	  for (i = 0; i < n; i++)
1102 	    {
1103 	      tree o = TREE_VEC_ELT (x, i);
1104 	      if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1105 		cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1106 	    }
1107 	}
1108       x = OMP_FOR_INCR (stmt);
1109       if (x && TREE_CODE (x) == TREE_VEC)
1110 	{
1111 	  n = TREE_VEC_LENGTH (x);
1112 	  for (i = 0; i < n; i++)
1113 	    {
1114 	      tree o = TREE_VEC_ELT (x, i);
1115 	      if (o && TREE_CODE (o) == MODIFY_EXPR)
1116 		o = TREE_OPERAND (o, 1);
1117 	      if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1118 			|| TREE_CODE (o) == POINTER_PLUS_EXPR))
1119 		{
1120 		  cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1121 		  cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1122 		}
1123 	    }
1124 	}
1125       cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1126       *walk_subtrees = 0;
1127     }
1128 
1129   return NULL;
1130 }
1131 
1132 /* Fold ALL the trees!  FIXME we should be able to remove this, but
1133    apparently that still causes optimization regressions.  */
1134 
1135 void
cp_fold_function(tree fndecl)1136 cp_fold_function (tree fndecl)
1137 {
1138   hash_set<tree> pset;
1139   cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
1140 }
1141 
1142 /* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1143    to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
1144    the middle-end (c++/88256).  */
1145 
1146 static tree
predeclare_vla(tree expr)1147 predeclare_vla (tree expr)
1148 {
1149   tree type = TREE_TYPE (expr);
1150   if (type == error_mark_node)
1151     return expr;
1152 
1153   /* We need to strip pointers for gimplify_type_sizes.  */
1154   tree vla = type;
1155   while (POINTER_TYPE_P (vla))
1156     {
1157       if (TYPE_NAME (vla))
1158 	return expr;
1159       vla = TREE_TYPE (vla);
1160     }
1161   if (TYPE_NAME (vla) || !variably_modified_type_p (vla, NULL_TREE))
1162     return expr;
1163 
1164   tree decl = build_decl (input_location, TYPE_DECL, NULL_TREE, vla);
1165   DECL_ARTIFICIAL (decl) = 1;
1166   TYPE_NAME (vla) = decl;
1167   tree dexp = build_stmt (input_location, DECL_EXPR, decl);
1168   expr = build2 (COMPOUND_EXPR, type, dexp, expr);
1169   return expr;
1170 }
1171 
1172 /* Perform any pre-gimplification lowering of C++ front end trees to
1173    GENERIC.  */
1174 
1175 static tree
cp_genericize_r(tree * stmt_p,int * walk_subtrees,void * data)1176 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1177 {
1178   tree stmt = *stmt_p;
1179   struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1180   hash_set<tree> *p_set = wtd->p_set;
1181 
1182   /* If in an OpenMP context, note var uses.  */
1183   if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1184       && (VAR_P (stmt)
1185 	  || TREE_CODE (stmt) == PARM_DECL
1186 	  || TREE_CODE (stmt) == RESULT_DECL)
1187       && omp_var_to_track (stmt))
1188     omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1189 
1190   /* Don't dereference parms in a thunk, pass the references through. */
1191   if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1192       || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1193     {
1194       *walk_subtrees = 0;
1195       return NULL;
1196     }
1197 
1198   /* Dereference invisible reference parms.  */
1199   if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1200     {
1201       *stmt_p = convert_from_reference (stmt);
1202       p_set->add (*stmt_p);
1203       *walk_subtrees = 0;
1204       return NULL;
1205     }
1206 
1207   /* Map block scope extern declarations to visible declarations with the
1208      same name and type in outer scopes if any.  */
1209   if (cp_function_chain->extern_decl_map
1210       && VAR_OR_FUNCTION_DECL_P (stmt)
1211       && DECL_EXTERNAL (stmt))
1212     {
1213       struct cxx_int_tree_map *h, in;
1214       in.uid = DECL_UID (stmt);
1215       h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1216       if (h)
1217 	{
1218 	  *stmt_p = h->to;
1219 	  TREE_USED (h->to) |= TREE_USED (stmt);
1220 	  *walk_subtrees = 0;
1221 	  return NULL;
1222 	}
1223     }
1224 
1225   if (TREE_CODE (stmt) == INTEGER_CST
1226       && TYPE_REF_P (TREE_TYPE (stmt))
1227       && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1228       && !wtd->no_sanitize_p)
1229     {
1230       ubsan_maybe_instrument_reference (stmt_p);
1231       if (*stmt_p != stmt)
1232 	{
1233 	  *walk_subtrees = 0;
1234 	  return NULL_TREE;
1235 	}
1236     }
1237 
1238   /* Other than invisiref parms, don't walk the same tree twice.  */
1239   if (p_set->contains (stmt))
1240     {
1241       *walk_subtrees = 0;
1242       return NULL_TREE;
1243     }
1244 
1245   switch (TREE_CODE (stmt))
1246     {
1247     case ADDR_EXPR:
1248       if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1249 	{
1250 	  /* If in an OpenMP context, note var uses.  */
1251 	  if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1252 	      && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1253 	    omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1254 	  *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1255 	  *walk_subtrees = 0;
1256 	}
1257       break;
1258 
1259     case RETURN_EXPR:
1260       if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1261 	/* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR.  */
1262 	*walk_subtrees = 0;
1263       break;
1264 
1265     case OMP_CLAUSE:
1266       switch (OMP_CLAUSE_CODE (stmt))
1267 	{
1268 	case OMP_CLAUSE_LASTPRIVATE:
1269 	  /* Don't dereference an invisiref in OpenMP clauses.  */
1270 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1271 	    {
1272 	      *walk_subtrees = 0;
1273 	      if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1274 		cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1275 			      cp_genericize_r, data, NULL);
1276 	    }
1277 	  break;
1278 	case OMP_CLAUSE_PRIVATE:
1279 	  /* Don't dereference an invisiref in OpenMP clauses.  */
1280 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1281 	    *walk_subtrees = 0;
1282 	  else if (wtd->omp_ctx != NULL)
1283 	    {
1284 	      /* Private clause doesn't cause any references to the
1285 		 var in outer contexts, avoid calling
1286 		 omp_cxx_notice_variable for it.  */
1287 	      struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1288 	      wtd->omp_ctx = NULL;
1289 	      cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1290 			    data, NULL);
1291 	      wtd->omp_ctx = old;
1292 	      *walk_subtrees = 0;
1293 	    }
1294 	  break;
1295 	case OMP_CLAUSE_SHARED:
1296 	case OMP_CLAUSE_FIRSTPRIVATE:
1297 	case OMP_CLAUSE_COPYIN:
1298 	case OMP_CLAUSE_COPYPRIVATE:
1299 	  /* Don't dereference an invisiref in OpenMP clauses.  */
1300 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1301 	    *walk_subtrees = 0;
1302 	  break;
1303 	case OMP_CLAUSE_REDUCTION:
1304 	case OMP_CLAUSE_IN_REDUCTION:
1305 	case OMP_CLAUSE_TASK_REDUCTION:
1306 	  /* Don't dereference an invisiref in reduction clause's
1307 	     OMP_CLAUSE_DECL either.  OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1308 	     still needs to be genericized.  */
1309 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1310 	    {
1311 	      *walk_subtrees = 0;
1312 	      if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1313 		cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1314 			      cp_genericize_r, data, NULL);
1315 	      if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1316 		cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1317 			      cp_genericize_r, data, NULL);
1318 	    }
1319 	  break;
1320 	default:
1321 	  break;
1322 	}
1323       break;
1324 
1325     /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1326        to lower this construct before scanning it, so we need to lower these
1327        before doing anything else.  */
1328     case CLEANUP_STMT:
1329       *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1330 			    CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1331 						   : TRY_FINALLY_EXPR,
1332 			    void_type_node,
1333 			    CLEANUP_BODY (stmt),
1334 			    CLEANUP_EXPR (stmt));
1335       break;
1336 
1337     case IF_STMT:
1338       genericize_if_stmt (stmt_p);
1339       /* *stmt_p has changed, tail recurse to handle it again.  */
1340       return cp_genericize_r (stmt_p, walk_subtrees, data);
1341 
1342     /* COND_EXPR might have incompatible types in branches if one or both
1343        arms are bitfields.  Fix it up now.  */
1344     case COND_EXPR:
1345       {
1346 	tree type_left
1347 	  = (TREE_OPERAND (stmt, 1)
1348 	     ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1349 	     : NULL_TREE);
1350 	tree type_right
1351 	  = (TREE_OPERAND (stmt, 2)
1352 	     ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1353 	     : NULL_TREE);
1354 	if (type_left
1355 	    && !useless_type_conversion_p (TREE_TYPE (stmt),
1356 					   TREE_TYPE (TREE_OPERAND (stmt, 1))))
1357 	  {
1358 	    TREE_OPERAND (stmt, 1)
1359 	      = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1360 	    gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1361 						   type_left));
1362 	  }
1363 	if (type_right
1364 	    && !useless_type_conversion_p (TREE_TYPE (stmt),
1365 					   TREE_TYPE (TREE_OPERAND (stmt, 2))))
1366 	  {
1367 	    TREE_OPERAND (stmt, 2)
1368 	      = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1369 	    gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1370 						   type_right));
1371 	  }
1372       }
1373       break;
1374 
1375     case BIND_EXPR:
1376       if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1377 	{
1378 	  tree decl;
1379 	  for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1380 	    if (VAR_P (decl)
1381 		&& !DECL_EXTERNAL (decl)
1382 		&& omp_var_to_track (decl))
1383 	      {
1384 		splay_tree_node n
1385 		  = splay_tree_lookup (wtd->omp_ctx->variables,
1386 				       (splay_tree_key) decl);
1387 		if (n == NULL)
1388 		  splay_tree_insert (wtd->omp_ctx->variables,
1389 				     (splay_tree_key) decl,
1390 				     TREE_STATIC (decl)
1391 				     ? OMP_CLAUSE_DEFAULT_SHARED
1392 				     : OMP_CLAUSE_DEFAULT_PRIVATE);
1393 	      }
1394 	}
1395       if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1396 	{
1397 	  /* The point here is to not sanitize static initializers.  */
1398 	  bool no_sanitize_p = wtd->no_sanitize_p;
1399 	  wtd->no_sanitize_p = true;
1400 	  for (tree decl = BIND_EXPR_VARS (stmt);
1401 	       decl;
1402 	       decl = DECL_CHAIN (decl))
1403 	    if (VAR_P (decl)
1404 		&& TREE_STATIC (decl)
1405 		&& DECL_INITIAL (decl))
1406 	      cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1407 	  wtd->no_sanitize_p = no_sanitize_p;
1408 	}
1409       wtd->bind_expr_stack.safe_push (stmt);
1410       cp_walk_tree (&BIND_EXPR_BODY (stmt),
1411 		    cp_genericize_r, data, NULL);
1412       wtd->bind_expr_stack.pop ();
1413       break;
1414 
1415     case USING_STMT:
1416       {
1417 	tree block = NULL_TREE;
1418 
1419 	/* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1420 	   BLOCK, and append an IMPORTED_DECL to its
1421 	   BLOCK_VARS chained list.  */
1422 	if (wtd->bind_expr_stack.exists ())
1423 	  {
1424 	    int i;
1425 	    for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1426 	      if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1427 		break;
1428 	  }
1429 	if (block)
1430 	  {
1431 	    tree decl = TREE_OPERAND (stmt, 0);
1432 	    gcc_assert (decl);
1433 
1434 	    if (undeduced_auto_decl (decl))
1435 	      /* Omit from the GENERIC, the back-end can't handle it.  */;
1436 	    else
1437 	      {
1438 		tree using_directive = make_node (IMPORTED_DECL);
1439 		TREE_TYPE (using_directive) = void_type_node;
1440 
1441 		IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1442 		DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1443 		BLOCK_VARS (block) = using_directive;
1444 	      }
1445 	  }
1446 	/* The USING_STMT won't appear in GENERIC.  */
1447 	*stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1448 	*walk_subtrees = 0;
1449       }
1450       break;
1451 
1452     case DECL_EXPR:
1453       if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1454 	{
1455 	  /* Using decls inside DECL_EXPRs are just dropped on the floor.  */
1456 	  *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1457 	  *walk_subtrees = 0;
1458 	}
1459       else
1460 	{
1461 	  tree d = DECL_EXPR_DECL (stmt);
1462 	  if (VAR_P (d))
1463 	    gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1464 	}
1465       break;
1466 
1467     case OMP_PARALLEL:
1468     case OMP_TASK:
1469     case OMP_TASKLOOP:
1470       {
1471 	struct cp_genericize_omp_taskreg omp_ctx;
1472 	tree c, decl;
1473 	splay_tree_node n;
1474 
1475 	*walk_subtrees = 0;
1476 	cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1477 	omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1478 	omp_ctx.default_shared = omp_ctx.is_parallel;
1479 	omp_ctx.outer = wtd->omp_ctx;
1480 	omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1481 	wtd->omp_ctx = &omp_ctx;
1482 	for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1483 	  switch (OMP_CLAUSE_CODE (c))
1484 	    {
1485 	    case OMP_CLAUSE_SHARED:
1486 	    case OMP_CLAUSE_PRIVATE:
1487 	    case OMP_CLAUSE_FIRSTPRIVATE:
1488 	    case OMP_CLAUSE_LASTPRIVATE:
1489 	      decl = OMP_CLAUSE_DECL (c);
1490 	      if (decl == error_mark_node || !omp_var_to_track (decl))
1491 		break;
1492 	      n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1493 	      if (n != NULL)
1494 		break;
1495 	      splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1496 				 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1497 				 ? OMP_CLAUSE_DEFAULT_SHARED
1498 				 : OMP_CLAUSE_DEFAULT_PRIVATE);
1499 	      if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1500 		omp_cxx_notice_variable (omp_ctx.outer, decl);
1501 	      break;
1502 	    case OMP_CLAUSE_DEFAULT:
1503 	      if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1504 		omp_ctx.default_shared = true;
1505 	    default:
1506 	      break;
1507 	    }
1508 	if (TREE_CODE (stmt) == OMP_TASKLOOP)
1509 	  genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1510 	else
1511 	  cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1512 	wtd->omp_ctx = omp_ctx.outer;
1513 	splay_tree_delete (omp_ctx.variables);
1514       }
1515       break;
1516 
1517     case TRY_BLOCK:
1518       {
1519         *walk_subtrees = 0;
1520         tree try_block = wtd->try_block;
1521         wtd->try_block = stmt;
1522         cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1523         wtd->try_block = try_block;
1524         cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1525       }
1526       break;
1527 
1528     case MUST_NOT_THROW_EXPR:
1529       /* MUST_NOT_THROW_COND might be something else with TM.  */
1530       if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1531 	{
1532 	  *walk_subtrees = 0;
1533 	  tree try_block = wtd->try_block;
1534 	  wtd->try_block = stmt;
1535 	  cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1536 	  wtd->try_block = try_block;
1537 	}
1538       break;
1539 
1540     case THROW_EXPR:
1541       {
1542 	location_t loc = location_of (stmt);
1543 	if (TREE_NO_WARNING (stmt))
1544 	  /* Never mind.  */;
1545 	else if (wtd->try_block)
1546 	  {
1547 	    if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
1548 	      {
1549 		auto_diagnostic_group d;
1550 		if (warning_at (loc, OPT_Wterminate,
1551 				"throw will always call terminate()")
1552 		    && cxx_dialect >= cxx11
1553 		    && DECL_DESTRUCTOR_P (current_function_decl))
1554 		  inform (loc, "in C++11 destructors default to noexcept");
1555 	      }
1556 	  }
1557 	else
1558 	  {
1559 	    if (warn_cxx11_compat && cxx_dialect < cxx11
1560 		&& DECL_DESTRUCTOR_P (current_function_decl)
1561 		&& (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1562 		    == NULL_TREE)
1563 		&& (get_defaulted_eh_spec (current_function_decl)
1564 		    == empty_except_spec))
1565 	      warning_at (loc, OPT_Wc__11_compat,
1566 			  "in C++11 this throw will terminate because "
1567 			  "destructors default to noexcept");
1568 	  }
1569       }
1570       break;
1571 
1572     case CONVERT_EXPR:
1573       gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1574       break;
1575 
1576     case FOR_STMT:
1577       genericize_for_stmt (stmt_p, walk_subtrees, data);
1578       break;
1579 
1580     case WHILE_STMT:
1581       genericize_while_stmt (stmt_p, walk_subtrees, data);
1582       break;
1583 
1584     case DO_STMT:
1585       genericize_do_stmt (stmt_p, walk_subtrees, data);
1586       break;
1587 
1588     case SWITCH_STMT:
1589       genericize_switch_stmt (stmt_p, walk_subtrees, data);
1590       break;
1591 
1592     case CONTINUE_STMT:
1593       genericize_continue_stmt (stmt_p);
1594       break;
1595 
1596     case BREAK_STMT:
1597       genericize_break_stmt (stmt_p);
1598       break;
1599 
1600     case OMP_FOR:
1601     case OMP_SIMD:
1602     case OMP_DISTRIBUTE:
1603     case OACC_LOOP:
1604       genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1605       break;
1606 
1607     case PTRMEM_CST:
1608       /* By the time we get here we're handing off to the back end, so we don't
1609 	 need or want to preserve PTRMEM_CST anymore.  */
1610       *stmt_p = cplus_expand_constant (stmt);
1611       *walk_subtrees = 0;
1612       break;
1613 
1614     case MEM_REF:
1615       /* For MEM_REF, make sure not to sanitize the second operand even
1616 	 if it has reference type.  It is just an offset with a type
1617 	 holding other information.  There is no other processing we
1618 	 need to do for INTEGER_CSTs, so just ignore the second argument
1619 	 unconditionally.  */
1620       cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1621       *walk_subtrees = 0;
1622       break;
1623 
1624     case NOP_EXPR:
1625       *stmt_p = predeclare_vla (*stmt_p);
1626       if (!wtd->no_sanitize_p
1627 	  && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1628 	  && TYPE_REF_P (TREE_TYPE (stmt)))
1629 	ubsan_maybe_instrument_reference (stmt_p);
1630       break;
1631 
1632     case CALL_EXPR:
1633       if (!wtd->no_sanitize_p
1634 	  && sanitize_flags_p ((SANITIZE_NULL
1635 				| SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1636 	{
1637 	  tree fn = CALL_EXPR_FN (stmt);
1638 	  if (fn != NULL_TREE
1639 	      && !error_operand_p (fn)
1640 	      && INDIRECT_TYPE_P (TREE_TYPE (fn))
1641 	      && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1642 	    {
1643 	      bool is_ctor
1644 		= TREE_CODE (fn) == ADDR_EXPR
1645 		  && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1646 		  && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1647 	      if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1648 		ubsan_maybe_instrument_member_call (stmt, is_ctor);
1649 	      if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1650 		cp_ubsan_maybe_instrument_member_call (stmt);
1651 	    }
1652 	  else if (fn == NULL_TREE
1653 		   && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1654 		   && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
1655 		   && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
1656 	    *walk_subtrees = 0;
1657 	}
1658       /* Fall through.  */
1659     case AGGR_INIT_EXPR:
1660       /* For calls to a multi-versioned function, overload resolution
1661 	 returns the function with the highest target priority, that is,
1662 	 the version that will checked for dispatching first.  If this
1663 	 version is inlinable, a direct call to this version can be made
1664 	 otherwise the call should go through the dispatcher.  */
1665       {
1666 	tree fn = cp_get_callee_fndecl_nofold (stmt);
1667 	if (fn && DECL_FUNCTION_VERSIONED (fn)
1668 	    && (current_function_decl == NULL
1669 		|| !targetm.target_option.can_inline_p (current_function_decl,
1670 							fn)))
1671 	  if (tree dis = get_function_version_dispatcher (fn))
1672 	    {
1673 	      mark_versions_used (dis);
1674 	      dis = build_address (dis);
1675 	      if (TREE_CODE (stmt) == CALL_EXPR)
1676 		CALL_EXPR_FN (stmt) = dis;
1677 	      else
1678 		AGGR_INIT_EXPR_FN (stmt) = dis;
1679 	    }
1680       }
1681       break;
1682 
1683     case TARGET_EXPR:
1684       if (TARGET_EXPR_INITIAL (stmt)
1685 	  && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1686 	  && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1687 	TARGET_EXPR_NO_ELIDE (stmt) = 1;
1688       break;
1689 
1690     default:
1691       if (IS_TYPE_OR_DECL_P (stmt))
1692 	*walk_subtrees = 0;
1693       break;
1694     }
1695 
1696   p_set->add (*stmt_p);
1697 
1698   return NULL;
1699 }
1700 
1701 /* Lower C++ front end trees to GENERIC in T_P.  */
1702 
1703 static void
cp_genericize_tree(tree * t_p,bool handle_invisiref_parm_p)1704 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1705 {
1706   struct cp_genericize_data wtd;
1707 
1708   wtd.p_set = new hash_set<tree>;
1709   wtd.bind_expr_stack.create (0);
1710   wtd.omp_ctx = NULL;
1711   wtd.try_block = NULL_TREE;
1712   wtd.no_sanitize_p = false;
1713   wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1714   cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1715   delete wtd.p_set;
1716   wtd.bind_expr_stack.release ();
1717   if (sanitize_flags_p (SANITIZE_VPTR))
1718     cp_ubsan_instrument_member_accesses (t_p);
1719 }
1720 
1721 /* If a function that should end with a return in non-void
1722    function doesn't obviously end with return, add ubsan
1723    instrumentation code to verify it at runtime.  If -fsanitize=return
1724    is not enabled, instrument __builtin_unreachable.  */
1725 
1726 static void
cp_maybe_instrument_return(tree fndecl)1727 cp_maybe_instrument_return (tree fndecl)
1728 {
1729   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1730       || DECL_CONSTRUCTOR_P (fndecl)
1731       || DECL_DESTRUCTOR_P (fndecl)
1732       || !targetm.warn_func_return (fndecl))
1733     return;
1734 
1735   if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1736       /* Don't add __builtin_unreachable () if not optimizing, it will not
1737 	 improve any optimizations in that case, just break UB code.
1738 	 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1739 	 UBSan covers this with ubsan_instrument_return above where sufficient
1740 	 information is provided, while the __builtin_unreachable () below
1741 	 if return sanitization is disabled will just result in hard to
1742 	 understand runtime error without location.  */
1743       && (!optimize
1744 	  || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1745     return;
1746 
1747   tree t = DECL_SAVED_TREE (fndecl);
1748   while (t)
1749     {
1750       switch (TREE_CODE (t))
1751 	{
1752 	case BIND_EXPR:
1753 	  t = BIND_EXPR_BODY (t);
1754 	  continue;
1755 	case TRY_FINALLY_EXPR:
1756 	case CLEANUP_POINT_EXPR:
1757 	  t = TREE_OPERAND (t, 0);
1758 	  continue;
1759 	case STATEMENT_LIST:
1760 	  {
1761 	    tree_stmt_iterator i = tsi_last (t);
1762 	    while (!tsi_end_p (i))
1763 	      {
1764 		tree p = tsi_stmt (i);
1765 		if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1766 		  break;
1767 		tsi_prev (&i);
1768 	      }
1769 	    if (!tsi_end_p (i))
1770 	      {
1771 		t = tsi_stmt (i);
1772 		continue;
1773 	      }
1774 	  }
1775 	  break;
1776 	case RETURN_EXPR:
1777 	  return;
1778 	default:
1779 	  break;
1780 	}
1781       break;
1782     }
1783   if (t == NULL_TREE)
1784     return;
1785   tree *p = &DECL_SAVED_TREE (fndecl);
1786   if (TREE_CODE (*p) == BIND_EXPR)
1787     p = &BIND_EXPR_BODY (*p);
1788 
1789   location_t loc = DECL_SOURCE_LOCATION (fndecl);
1790   if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1791     t = ubsan_instrument_return (loc);
1792   else
1793     {
1794       tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1795       t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1796     }
1797 
1798   append_to_statement_list (t, p);
1799 }
1800 
1801 void
cp_genericize(tree fndecl)1802 cp_genericize (tree fndecl)
1803 {
1804   tree t;
1805 
1806   /* Fix up the types of parms passed by invisible reference.  */
1807   for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1808     if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1809       {
1810 	/* If a function's arguments are copied to create a thunk,
1811 	   then DECL_BY_REFERENCE will be set -- but the type of the
1812 	   argument will be a pointer type, so we will never get
1813 	   here.  */
1814 	gcc_assert (!DECL_BY_REFERENCE (t));
1815 	gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1816 	TREE_TYPE (t) = DECL_ARG_TYPE (t);
1817 	DECL_BY_REFERENCE (t) = 1;
1818 	TREE_ADDRESSABLE (t) = 0;
1819 	relayout_decl (t);
1820       }
1821 
1822   /* Do the same for the return value.  */
1823   if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1824     {
1825       t = DECL_RESULT (fndecl);
1826       TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1827       DECL_BY_REFERENCE (t) = 1;
1828       TREE_ADDRESSABLE (t) = 0;
1829       relayout_decl (t);
1830       if (DECL_NAME (t))
1831 	{
1832 	  /* Adjust DECL_VALUE_EXPR of the original var.  */
1833 	  tree outer = outer_curly_brace_block (current_function_decl);
1834 	  tree var;
1835 
1836 	  if (outer)
1837 	    for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1838 	      if (VAR_P (var)
1839 		  && DECL_NAME (t) == DECL_NAME (var)
1840 		  && DECL_HAS_VALUE_EXPR_P (var)
1841 		  && DECL_VALUE_EXPR (var) == t)
1842 		{
1843 		  tree val = convert_from_reference (t);
1844 		  SET_DECL_VALUE_EXPR (var, val);
1845 		  break;
1846 		}
1847 	}
1848     }
1849 
1850   /* If we're a clone, the body is already GIMPLE.  */
1851   if (DECL_CLONED_FUNCTION_P (fndecl))
1852     return;
1853 
1854   /* Allow cp_genericize calls to be nested.  */
1855   tree save_bc_label[2];
1856   save_bc_label[bc_break] = bc_label[bc_break];
1857   save_bc_label[bc_continue] = bc_label[bc_continue];
1858   bc_label[bc_break] = NULL_TREE;
1859   bc_label[bc_continue] = NULL_TREE;
1860 
1861   /* We do want to see every occurrence of the parms, so we can't just use
1862      walk_tree's hash functionality.  */
1863   cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1864 
1865   cp_maybe_instrument_return (fndecl);
1866 
1867   /* Do everything else.  */
1868   c_genericize (fndecl);
1869 
1870   gcc_assert (bc_label[bc_break] == NULL);
1871   gcc_assert (bc_label[bc_continue] == NULL);
1872   bc_label[bc_break] = save_bc_label[bc_break];
1873   bc_label[bc_continue] = save_bc_label[bc_continue];
1874 }
1875 
1876 /* Build code to apply FN to each member of ARG1 and ARG2.  FN may be
1877    NULL if there is in fact nothing to do.  ARG2 may be null if FN
1878    actually only takes one argument.  */
1879 
1880 static tree
cxx_omp_clause_apply_fn(tree fn,tree arg1,tree arg2)1881 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1882 {
1883   tree defparm, parm, t;
1884   int i = 0;
1885   int nargs;
1886   tree *argarray;
1887 
1888   if (fn == NULL)
1889     return NULL;
1890 
1891   nargs = list_length (DECL_ARGUMENTS (fn));
1892   argarray = XALLOCAVEC (tree, nargs);
1893 
1894   defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1895   if (arg2)
1896     defparm = TREE_CHAIN (defparm);
1897 
1898   bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
1899   if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1900     {
1901       tree inner_type = TREE_TYPE (arg1);
1902       tree start1, end1, p1;
1903       tree start2 = NULL, p2 = NULL;
1904       tree ret = NULL, lab;
1905 
1906       start1 = arg1;
1907       start2 = arg2;
1908       do
1909 	{
1910 	  inner_type = TREE_TYPE (inner_type);
1911 	  start1 = build4 (ARRAY_REF, inner_type, start1,
1912 			   size_zero_node, NULL, NULL);
1913 	  if (arg2)
1914 	    start2 = build4 (ARRAY_REF, inner_type, start2,
1915 			     size_zero_node, NULL, NULL);
1916 	}
1917       while (TREE_CODE (inner_type) == ARRAY_TYPE);
1918       start1 = build_fold_addr_expr_loc (input_location, start1);
1919       if (arg2)
1920 	start2 = build_fold_addr_expr_loc (input_location, start2);
1921 
1922       end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1923       end1 = fold_build_pointer_plus (start1, end1);
1924 
1925       p1 = create_tmp_var (TREE_TYPE (start1));
1926       t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1927       append_to_statement_list (t, &ret);
1928 
1929       if (arg2)
1930 	{
1931 	  p2 = create_tmp_var (TREE_TYPE (start2));
1932 	  t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1933 	  append_to_statement_list (t, &ret);
1934 	}
1935 
1936       lab = create_artificial_label (input_location);
1937       t = build1 (LABEL_EXPR, void_type_node, lab);
1938       append_to_statement_list (t, &ret);
1939 
1940       argarray[i++] = p1;
1941       if (arg2)
1942 	argarray[i++] = p2;
1943       /* Handle default arguments.  */
1944       for (parm = defparm; parm && parm != void_list_node;
1945 	   parm = TREE_CHAIN (parm), i++)
1946 	argarray[i] = convert_default_arg (TREE_VALUE (parm),
1947 					   TREE_PURPOSE (parm), fn,
1948 					   i - is_method, tf_warning_or_error);
1949       t = build_call_a (fn, i, argarray);
1950       t = fold_convert (void_type_node, t);
1951       t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1952       append_to_statement_list (t, &ret);
1953 
1954       t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1955       t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1956       append_to_statement_list (t, &ret);
1957 
1958       if (arg2)
1959 	{
1960 	  t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1961 	  t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1962 	  append_to_statement_list (t, &ret);
1963 	}
1964 
1965       t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1966       t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1967       append_to_statement_list (t, &ret);
1968 
1969       return ret;
1970     }
1971   else
1972     {
1973       argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1974       if (arg2)
1975 	argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1976       /* Handle default arguments.  */
1977       for (parm = defparm; parm && parm != void_list_node;
1978 	   parm = TREE_CHAIN (parm), i++)
1979 	argarray[i] = convert_default_arg (TREE_VALUE (parm),
1980 					   TREE_PURPOSE (parm), fn,
1981 					   i - is_method, tf_warning_or_error);
1982       t = build_call_a (fn, i, argarray);
1983       t = fold_convert (void_type_node, t);
1984       return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1985     }
1986 }
1987 
1988 /* Return code to initialize DECL with its default constructor, or
1989    NULL if there's nothing to do.  */
1990 
1991 tree
cxx_omp_clause_default_ctor(tree clause,tree decl,tree)1992 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1993 {
1994   tree info = CP_OMP_CLAUSE_INFO (clause);
1995   tree ret = NULL;
1996 
1997   if (info)
1998     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1999 
2000   return ret;
2001 }
2002 
2003 /* Return code to initialize DST with a copy constructor from SRC.  */
2004 
2005 tree
cxx_omp_clause_copy_ctor(tree clause,tree dst,tree src)2006 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
2007 {
2008   tree info = CP_OMP_CLAUSE_INFO (clause);
2009   tree ret = NULL;
2010 
2011   if (info)
2012     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
2013   if (ret == NULL)
2014     ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2015 
2016   return ret;
2017 }
2018 
2019 /* Similarly, except use an assignment operator instead.  */
2020 
2021 tree
cxx_omp_clause_assign_op(tree clause,tree dst,tree src)2022 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
2023 {
2024   tree info = CP_OMP_CLAUSE_INFO (clause);
2025   tree ret = NULL;
2026 
2027   if (info)
2028     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
2029   if (ret == NULL)
2030     ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2031 
2032   return ret;
2033 }
2034 
2035 /* Return code to destroy DECL.  */
2036 
2037 tree
cxx_omp_clause_dtor(tree clause,tree decl)2038 cxx_omp_clause_dtor (tree clause, tree decl)
2039 {
2040   tree info = CP_OMP_CLAUSE_INFO (clause);
2041   tree ret = NULL;
2042 
2043   if (info)
2044     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
2045 
2046   return ret;
2047 }
2048 
2049 /* True if OpenMP should privatize what this DECL points to rather
2050    than the DECL itself.  */
2051 
2052 bool
cxx_omp_privatize_by_reference(const_tree decl)2053 cxx_omp_privatize_by_reference (const_tree decl)
2054 {
2055   return (TYPE_REF_P (TREE_TYPE (decl))
2056 	  || is_invisiref_parm (decl));
2057 }
2058 
2059 /* Return true if DECL is const qualified var having no mutable member.  */
2060 bool
cxx_omp_const_qual_no_mutable(tree decl)2061 cxx_omp_const_qual_no_mutable (tree decl)
2062 {
2063   tree type = TREE_TYPE (decl);
2064   if (TYPE_REF_P (type))
2065     {
2066       if (!is_invisiref_parm (decl))
2067 	return false;
2068       type = TREE_TYPE (type);
2069 
2070       if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
2071 	{
2072 	  /* NVR doesn't preserve const qualification of the
2073 	     variable's type.  */
2074 	  tree outer = outer_curly_brace_block (current_function_decl);
2075 	  tree var;
2076 
2077 	  if (outer)
2078 	    for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2079 	      if (VAR_P (var)
2080 		  && DECL_NAME (decl) == DECL_NAME (var)
2081 		  && (TYPE_MAIN_VARIANT (type)
2082 		      == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
2083 		{
2084 		  if (TYPE_READONLY (TREE_TYPE (var)))
2085 		    type = TREE_TYPE (var);
2086 		  break;
2087 		}
2088 	}
2089     }
2090 
2091   if (type == error_mark_node)
2092     return false;
2093 
2094   /* Variables with const-qualified type having no mutable member
2095      are predetermined shared.  */
2096   if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
2097     return true;
2098 
2099   return false;
2100 }
2101 
2102 /* True if OpenMP sharing attribute of DECL is predetermined.  */
2103 
2104 enum omp_clause_default_kind
cxx_omp_predetermined_sharing_1(tree decl)2105 cxx_omp_predetermined_sharing_1 (tree decl)
2106 {
2107   /* Static data members are predetermined shared.  */
2108   if (TREE_STATIC (decl))
2109     {
2110       tree ctx = CP_DECL_CONTEXT (decl);
2111       if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2112 	return OMP_CLAUSE_DEFAULT_SHARED;
2113 
2114       if (c_omp_predefined_variable (decl))
2115 	return OMP_CLAUSE_DEFAULT_SHARED;
2116     }
2117 
2118   /* this may not be specified in data-sharing clauses, still we need
2119      to predetermined it firstprivate.  */
2120   if (decl == current_class_ptr)
2121     return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
2122 
2123   return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2124 }
2125 
2126 /* Likewise, but also include the artificial vars.  We don't want to
2127    disallow the artificial vars being mentioned in explicit clauses,
2128    as we use artificial vars e.g. for loop constructs with random
2129    access iterators other than pointers, but during gimplification
2130    we want to treat them as predetermined.  */
2131 
2132 enum omp_clause_default_kind
cxx_omp_predetermined_sharing(tree decl)2133 cxx_omp_predetermined_sharing (tree decl)
2134 {
2135   enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2136   if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2137     return ret;
2138 
2139   /* Predetermine artificial variables holding integral values, those
2140      are usually result of gimplify_one_sizepos or SAVE_EXPR
2141      gimplification.  */
2142   if (VAR_P (decl)
2143       && DECL_ARTIFICIAL (decl)
2144       && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2145       && !(DECL_LANG_SPECIFIC (decl)
2146 	   && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2147     return OMP_CLAUSE_DEFAULT_SHARED;
2148 
2149   /* Similarly for typeinfo symbols.  */
2150   if (VAR_P (decl) && DECL_ARTIFICIAL (decl) && DECL_TINFO_P (decl))
2151     return OMP_CLAUSE_DEFAULT_SHARED;
2152 
2153   return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2154 }
2155 
2156 /* Finalize an implicitly determined clause.  */
2157 
2158 void
cxx_omp_finish_clause(tree c,gimple_seq *)2159 cxx_omp_finish_clause (tree c, gimple_seq *)
2160 {
2161   tree decl, inner_type;
2162   bool make_shared = false;
2163 
2164   if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2165     return;
2166 
2167   decl = OMP_CLAUSE_DECL (c);
2168   decl = require_complete_type (decl);
2169   inner_type = TREE_TYPE (decl);
2170   if (decl == error_mark_node)
2171     make_shared = true;
2172   else if (TYPE_REF_P (TREE_TYPE (decl)))
2173     inner_type = TREE_TYPE (inner_type);
2174 
2175   /* We're interested in the base element, not arrays.  */
2176   while (TREE_CODE (inner_type) == ARRAY_TYPE)
2177     inner_type = TREE_TYPE (inner_type);
2178 
2179   /* Check for special function availability by building a call to one.
2180      Save the results, because later we won't be in the right context
2181      for making these queries.  */
2182   if (!make_shared
2183       && CLASS_TYPE_P (inner_type)
2184       && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
2185     make_shared = true;
2186 
2187   if (make_shared)
2188     {
2189       OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2190       OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2191       OMP_CLAUSE_SHARED_READONLY (c) = 0;
2192     }
2193 }
2194 
2195 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2196    disregarded in OpenMP construct, because it is going to be
2197    remapped during OpenMP lowering.  SHARED is true if DECL
2198    is going to be shared, false if it is going to be privatized.  */
2199 
2200 bool
cxx_omp_disregard_value_expr(tree decl,bool shared)2201 cxx_omp_disregard_value_expr (tree decl, bool shared)
2202 {
2203   if (shared)
2204     return false;
2205   if (VAR_P (decl)
2206       && DECL_HAS_VALUE_EXPR_P (decl)
2207       && DECL_ARTIFICIAL (decl)
2208       && DECL_LANG_SPECIFIC (decl)
2209       && DECL_OMP_PRIVATIZED_MEMBER (decl))
2210     return true;
2211   if (VAR_P (decl) && DECL_CONTEXT (decl) && is_capture_proxy (decl))
2212     return true;
2213   return false;
2214 }
2215 
2216 /* Fold expression X which is used as an rvalue if RVAL is true.  */
2217 
2218 tree
cp_fold_maybe_rvalue(tree x,bool rval)2219 cp_fold_maybe_rvalue (tree x, bool rval)
2220 {
2221   while (true)
2222     {
2223       x = cp_fold (x);
2224       if (rval)
2225 	x = mark_rvalue_use (x);
2226       if (rval && DECL_P (x)
2227 	  && !TYPE_REF_P (TREE_TYPE (x)))
2228 	{
2229 	  tree v = decl_constant_value (x);
2230 	  if (v != x && v != error_mark_node)
2231 	    {
2232 	      x = v;
2233 	      continue;
2234 	    }
2235 	}
2236       break;
2237     }
2238   return x;
2239 }
2240 
2241 /* Fold expression X which is used as an rvalue.  */
2242 
2243 tree
cp_fold_rvalue(tree x)2244 cp_fold_rvalue (tree x)
2245 {
2246   return cp_fold_maybe_rvalue (x, true);
2247 }
2248 
2249 /* Perform folding on expression X.  */
2250 
2251 tree
cp_fully_fold(tree x)2252 cp_fully_fold (tree x)
2253 {
2254   if (processing_template_decl)
2255     return x;
2256   /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2257      have to call both.  */
2258   if (cxx_dialect >= cxx11)
2259     {
2260       x = maybe_constant_value (x);
2261       /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2262 	 a TARGET_EXPR; undo that here.  */
2263       if (TREE_CODE (x) == TARGET_EXPR)
2264 	x = TARGET_EXPR_INITIAL (x);
2265       else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2266 	       && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2267 	       && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2268 	x = TREE_OPERAND (x, 0);
2269     }
2270   return cp_fold_rvalue (x);
2271 }
2272 
2273 /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2274    in some cases.  */
2275 
2276 tree
cp_fully_fold_init(tree x)2277 cp_fully_fold_init (tree x)
2278 {
2279   if (processing_template_decl)
2280     return x;
2281   x = cp_fully_fold (x);
2282   hash_set<tree> pset;
2283   cp_walk_tree (&x, cp_fold_r, &pset, NULL);
2284   return x;
2285 }
2286 
2287 /* c-common interface to cp_fold.  If IN_INIT, this is in a static initializer
2288    and certain changes are made to the folding done.  Or should be (FIXME).  We
2289    never touch maybe_const, as it is only used for the C front-end
2290    C_MAYBE_CONST_EXPR.  */
2291 
2292 tree
c_fully_fold(tree x,bool,bool *,bool lval)2293 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2294 {
2295   return cp_fold_maybe_rvalue (x, !lval);
2296 }
2297 
2298 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2299 
2300 /* Dispose of the whole FOLD_CACHE.  */
2301 
2302 void
clear_fold_cache(void)2303 clear_fold_cache (void)
2304 {
2305   if (fold_cache != NULL)
2306     fold_cache->empty ();
2307 }
2308 
2309 /*  This function tries to fold an expression X.
2310     To avoid combinatorial explosion, folding results are kept in fold_cache.
2311     If X is invalid, we don't fold at all.
2312     For performance reasons we don't cache expressions representing a
2313     declaration or constant.
2314     Function returns X or its folded variant.  */
2315 
2316 static tree
cp_fold(tree x)2317 cp_fold (tree x)
2318 {
2319   tree op0, op1, op2, op3;
2320   tree org_x = x, r = NULL_TREE;
2321   enum tree_code code;
2322   location_t loc;
2323   bool rval_ops = true;
2324 
2325   if (!x || x == error_mark_node)
2326     return x;
2327 
2328   if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
2329     return x;
2330 
2331   /* Don't bother to cache DECLs or constants.  */
2332   if (DECL_P (x) || CONSTANT_CLASS_P (x))
2333     return x;
2334 
2335   if (fold_cache == NULL)
2336     fold_cache = hash_map<tree, tree>::create_ggc (101);
2337 
2338   if (tree *cached = fold_cache->get (x))
2339     return *cached;
2340 
2341   code = TREE_CODE (x);
2342   switch (code)
2343     {
2344     case CLEANUP_POINT_EXPR:
2345       /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2346 	 effects.  */
2347       r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2348       if (!TREE_SIDE_EFFECTS (r))
2349 	x = r;
2350       break;
2351 
2352     case SIZEOF_EXPR:
2353       x = fold_sizeof_expr (x);
2354       break;
2355 
2356     case VIEW_CONVERT_EXPR:
2357       rval_ops = false;
2358       /* FALLTHRU */
2359     case CONVERT_EXPR:
2360     case NOP_EXPR:
2361     case NON_LVALUE_EXPR:
2362 
2363       if (VOID_TYPE_P (TREE_TYPE (x)))
2364 	{
2365 	  /* This is just to make sure we don't end up with casts to
2366 	     void from error_mark_node.  If we just return x, then
2367 	     cp_fold_r might fold the operand into error_mark_node and
2368 	     leave the conversion in the IR.  STRIP_USELESS_TYPE_CONVERSION
2369 	     during gimplification doesn't like such casts.
2370 	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2371 	     folding of the operand should be in the caches and if in cp_fold_r
2372 	     it will modify it in place.  */
2373 	  op0 = cp_fold (TREE_OPERAND (x, 0));
2374 	  if (op0 == error_mark_node)
2375 	    x = error_mark_node;
2376 	  break;
2377 	}
2378 
2379       loc = EXPR_LOCATION (x);
2380       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2381 
2382       if (code == CONVERT_EXPR
2383 	  && SCALAR_TYPE_P (TREE_TYPE (x))
2384 	  && op0 != void_node)
2385 	/* During parsing we used convert_to_*_nofold; re-convert now using the
2386 	   folding variants, since fold() doesn't do those transformations.  */
2387 	x = fold (convert (TREE_TYPE (x), op0));
2388       else if (op0 != TREE_OPERAND (x, 0))
2389 	{
2390 	  if (op0 == error_mark_node)
2391 	    x = error_mark_node;
2392 	  else
2393 	    x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2394 	}
2395       else
2396 	x = fold (x);
2397 
2398       /* Conversion of an out-of-range value has implementation-defined
2399 	 behavior; the language considers it different from arithmetic
2400 	 overflow, which is undefined.  */
2401       if (TREE_CODE (op0) == INTEGER_CST
2402 	  && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2403 	TREE_OVERFLOW (x) = false;
2404 
2405       break;
2406 
2407     case INDIRECT_REF:
2408       /* We don't need the decltype(auto) obfuscation anymore.  */
2409       if (REF_PARENTHESIZED_P (x))
2410 	{
2411 	  tree p = maybe_undo_parenthesized_ref (x);
2412 	  return cp_fold (p);
2413 	}
2414       goto unary;
2415 
2416     case ADDR_EXPR:
2417       loc = EXPR_LOCATION (x);
2418       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2419 
2420       /* Cope with user tricks that amount to offsetof.  */
2421       if (op0 != error_mark_node
2422 	  && TREE_CODE (TREE_TYPE (op0)) != FUNCTION_TYPE
2423 	  && TREE_CODE (TREE_TYPE (op0)) != METHOD_TYPE)
2424 	{
2425 	  tree val = get_base_address (op0);
2426 	  if (val
2427 	      && INDIRECT_REF_P (val)
2428 	      && COMPLETE_TYPE_P (TREE_TYPE (val))
2429 	      && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2430 	    {
2431 	      val = TREE_OPERAND (val, 0);
2432 	      STRIP_NOPS (val);
2433 	      val = maybe_constant_value (val);
2434 	      if (TREE_CODE (val) == INTEGER_CST)
2435 		return fold_offsetof (op0, TREE_TYPE (x));
2436 	    }
2437 	}
2438       goto finish_unary;
2439 
2440     case REALPART_EXPR:
2441     case IMAGPART_EXPR:
2442       rval_ops = false;
2443       /* FALLTHRU */
2444     case CONJ_EXPR:
2445     case FIX_TRUNC_EXPR:
2446     case FLOAT_EXPR:
2447     case NEGATE_EXPR:
2448     case ABS_EXPR:
2449     case ABSU_EXPR:
2450     case BIT_NOT_EXPR:
2451     case TRUTH_NOT_EXPR:
2452     case FIXED_CONVERT_EXPR:
2453     unary:
2454 
2455       loc = EXPR_LOCATION (x);
2456       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2457 
2458     finish_unary:
2459       if (op0 != TREE_OPERAND (x, 0))
2460 	{
2461 	  if (op0 == error_mark_node)
2462 	    x = error_mark_node;
2463 	  else
2464 	    {
2465 	      x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2466 	      if (code == INDIRECT_REF
2467 		  && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2468 		{
2469 		  TREE_READONLY (x) = TREE_READONLY (org_x);
2470 		  TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2471 		  TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2472 		}
2473 	    }
2474 	}
2475       else
2476 	x = fold (x);
2477 
2478       gcc_assert (TREE_CODE (x) != COND_EXPR
2479 		  || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2480       break;
2481 
2482     case UNARY_PLUS_EXPR:
2483       op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2484       if (op0 == error_mark_node)
2485 	x = error_mark_node;
2486       else
2487 	x = fold_convert (TREE_TYPE (x), op0);
2488       break;
2489 
2490     case POSTDECREMENT_EXPR:
2491     case POSTINCREMENT_EXPR:
2492     case INIT_EXPR:
2493     case PREDECREMENT_EXPR:
2494     case PREINCREMENT_EXPR:
2495     case COMPOUND_EXPR:
2496     case MODIFY_EXPR:
2497       rval_ops = false;
2498       /* FALLTHRU */
2499     case POINTER_PLUS_EXPR:
2500     case PLUS_EXPR:
2501     case POINTER_DIFF_EXPR:
2502     case MINUS_EXPR:
2503     case MULT_EXPR:
2504     case TRUNC_DIV_EXPR:
2505     case CEIL_DIV_EXPR:
2506     case FLOOR_DIV_EXPR:
2507     case ROUND_DIV_EXPR:
2508     case TRUNC_MOD_EXPR:
2509     case CEIL_MOD_EXPR:
2510     case ROUND_MOD_EXPR:
2511     case RDIV_EXPR:
2512     case EXACT_DIV_EXPR:
2513     case MIN_EXPR:
2514     case MAX_EXPR:
2515     case LSHIFT_EXPR:
2516     case RSHIFT_EXPR:
2517     case LROTATE_EXPR:
2518     case RROTATE_EXPR:
2519     case BIT_AND_EXPR:
2520     case BIT_IOR_EXPR:
2521     case BIT_XOR_EXPR:
2522     case TRUTH_AND_EXPR:
2523     case TRUTH_ANDIF_EXPR:
2524     case TRUTH_OR_EXPR:
2525     case TRUTH_ORIF_EXPR:
2526     case TRUTH_XOR_EXPR:
2527     case LT_EXPR: case LE_EXPR:
2528     case GT_EXPR: case GE_EXPR:
2529     case EQ_EXPR: case NE_EXPR:
2530     case UNORDERED_EXPR: case ORDERED_EXPR:
2531     case UNLT_EXPR: case UNLE_EXPR:
2532     case UNGT_EXPR: case UNGE_EXPR:
2533     case UNEQ_EXPR: case LTGT_EXPR:
2534     case RANGE_EXPR: case COMPLEX_EXPR:
2535 
2536       loc = EXPR_LOCATION (x);
2537       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2538       op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2539 
2540       if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2541 	{
2542 	  if (op0 == error_mark_node || op1 == error_mark_node)
2543 	    x = error_mark_node;
2544 	  else
2545 	    x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2546 	}
2547       else
2548 	x = fold (x);
2549 
2550       /* This is only needed for -Wnonnull-compare and only if
2551 	 TREE_NO_WARNING (org_x), but to avoid that option affecting code
2552 	 generation, we do it always.  */
2553       if (COMPARISON_CLASS_P (org_x))
2554 	{
2555 	  if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2556 	    ;
2557 	  else if (COMPARISON_CLASS_P (x))
2558 	    {
2559 	      if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2560 		TREE_NO_WARNING (x) = 1;
2561 	    }
2562 	  /* Otherwise give up on optimizing these, let GIMPLE folders
2563 	     optimize those later on.  */
2564 	  else if (op0 != TREE_OPERAND (org_x, 0)
2565 		   || op1 != TREE_OPERAND (org_x, 1))
2566 	    {
2567 	      x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2568 	      if (TREE_NO_WARNING (org_x) && warn_nonnull_compare)
2569 		TREE_NO_WARNING (x) = 1;
2570 	    }
2571 	  else
2572 	    x = org_x;
2573 	}
2574       break;
2575 
2576     case VEC_COND_EXPR:
2577     case COND_EXPR:
2578       loc = EXPR_LOCATION (x);
2579       op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2580       op1 = cp_fold (TREE_OPERAND (x, 1));
2581       op2 = cp_fold (TREE_OPERAND (x, 2));
2582 
2583       if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2584 	{
2585 	  warning_sentinel s (warn_int_in_bool_context);
2586 	  if (!VOID_TYPE_P (TREE_TYPE (op1)))
2587 	    op1 = cp_truthvalue_conversion (op1);
2588 	  if (!VOID_TYPE_P (TREE_TYPE (op2)))
2589 	    op2 = cp_truthvalue_conversion (op2);
2590 	}
2591       else if (VOID_TYPE_P (TREE_TYPE (x)))
2592 	{
2593 	  if (TREE_CODE (op0) == INTEGER_CST)
2594 	    {
2595 	      /* If the condition is constant, fold can fold away
2596 		 the COND_EXPR.  If some statement-level uses of COND_EXPR
2597 		 have one of the branches NULL, avoid folding crash.  */
2598 	      if (!op1)
2599 		op1 = build_empty_stmt (loc);
2600 	      if (!op2)
2601 		op2 = build_empty_stmt (loc);
2602 	    }
2603 	  else
2604 	    {
2605 	      /* Otherwise, don't bother folding a void condition, since
2606 		 it can't produce a constant value.  */
2607 	      if (op0 != TREE_OPERAND (x, 0)
2608 		  || op1 != TREE_OPERAND (x, 1)
2609 		  || op2 != TREE_OPERAND (x, 2))
2610 		x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2611 	      break;
2612 	    }
2613 	}
2614 
2615       if (op0 != TREE_OPERAND (x, 0)
2616 	  || op1 != TREE_OPERAND (x, 1)
2617 	  || op2 != TREE_OPERAND (x, 2))
2618 	{
2619 	  if (op0 == error_mark_node
2620 	      || op1 == error_mark_node
2621 	      || op2 == error_mark_node)
2622 	    x = error_mark_node;
2623 	  else
2624 	    x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2625 	}
2626       else
2627 	x = fold (x);
2628 
2629       /* A COND_EXPR might have incompatible types in branches if one or both
2630 	 arms are bitfields.  If folding exposed such a branch, fix it up.  */
2631       if (TREE_CODE (x) != code
2632 	  && x != error_mark_node
2633 	  && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2634 	x = fold_convert (TREE_TYPE (org_x), x);
2635 
2636       break;
2637 
2638     case CALL_EXPR:
2639       {
2640 	int i, m, sv = optimize, nw = sv, changed = 0;
2641 	tree callee = get_callee_fndecl (x);
2642 
2643 	/* Some built-in function calls will be evaluated at compile-time in
2644 	   fold ().  Set optimize to 1 when folding __builtin_constant_p inside
2645 	   a constexpr function so that fold_builtin_1 doesn't fold it to 0.  */
2646 	if (callee && fndecl_built_in_p (callee) && !optimize
2647 	    && DECL_IS_BUILTIN_CONSTANT_P (callee)
2648 	    && current_function_decl
2649 	    && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2650 	  nw = 1;
2651 
2652 	/* Defer folding __builtin_is_constant_evaluated.  */
2653 	if (callee
2654 	    && fndecl_built_in_p (callee, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
2655 				BUILT_IN_FRONTEND))
2656 	  break;
2657 
2658 	x = copy_node (x);
2659 
2660 	m = call_expr_nargs (x);
2661 	for (i = 0; i < m; i++)
2662 	  {
2663 	    r = cp_fold (CALL_EXPR_ARG (x, i));
2664 	    if (r != CALL_EXPR_ARG (x, i))
2665 	      {
2666 		if (r == error_mark_node)
2667 		  {
2668 		    x = error_mark_node;
2669 		    break;
2670 		  }
2671 		changed = 1;
2672 	      }
2673 	    CALL_EXPR_ARG (x, i) = r;
2674 	  }
2675 	if (x == error_mark_node)
2676 	  break;
2677 
2678 	optimize = nw;
2679 	r = fold (x);
2680 	optimize = sv;
2681 
2682 	if (TREE_CODE (r) != CALL_EXPR)
2683 	  {
2684 	    x = cp_fold (r);
2685 	    break;
2686 	  }
2687 
2688 	optimize = nw;
2689 
2690 	/* Invoke maybe_constant_value for functions declared
2691 	   constexpr and not called with AGGR_INIT_EXPRs.
2692 	   TODO:
2693 	   Do constexpr expansion of expressions where the call itself is not
2694 	   constant, but the call followed by an INDIRECT_REF is.  */
2695 	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2696 	    && !flag_no_inline)
2697 	  r = maybe_constant_value (x);
2698 	optimize = sv;
2699 
2700         if (TREE_CODE (r) != CALL_EXPR)
2701 	  {
2702 	    if (DECL_CONSTRUCTOR_P (callee))
2703 	      {
2704 		loc = EXPR_LOCATION (x);
2705 		tree s = build_fold_indirect_ref_loc (loc,
2706 						      CALL_EXPR_ARG (x, 0));
2707 		r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2708 	      }
2709 	    x = r;
2710 	    break;
2711 	  }
2712 
2713 	if (!changed)
2714 	  x = org_x;
2715 	break;
2716       }
2717 
2718     case CONSTRUCTOR:
2719       {
2720 	unsigned i;
2721 	constructor_elt *p;
2722 	vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2723 	vec<constructor_elt, va_gc> *nelts = NULL;
2724 	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2725 	  {
2726 	    tree op = cp_fold (p->value);
2727 	    if (op != p->value)
2728 	      {
2729 		if (op == error_mark_node)
2730 		  {
2731 		    x = error_mark_node;
2732 		    vec_free (nelts);
2733 		    break;
2734 		  }
2735 		if (nelts == NULL)
2736 		  nelts = elts->copy ();
2737 		(*nelts)[i].value = op;
2738 	      }
2739 	  }
2740 	if (nelts)
2741 	  {
2742 	    x = build_constructor (TREE_TYPE (x), nelts);
2743 	    CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
2744 	      = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
2745 	  }
2746 	if (VECTOR_TYPE_P (TREE_TYPE (x)))
2747 	  x = fold (x);
2748 	break;
2749       }
2750     case TREE_VEC:
2751       {
2752 	bool changed = false;
2753 	vec<tree, va_gc> *vec = make_tree_vector ();
2754 	int i, n = TREE_VEC_LENGTH (x);
2755 	vec_safe_reserve (vec, n);
2756 
2757 	for (i = 0; i < n; i++)
2758 	  {
2759 	    tree op = cp_fold (TREE_VEC_ELT (x, i));
2760 	    vec->quick_push (op);
2761 	    if (op != TREE_VEC_ELT (x, i))
2762 	      changed = true;
2763 	  }
2764 
2765 	if (changed)
2766 	  {
2767 	    r = copy_node (x);
2768 	    for (i = 0; i < n; i++)
2769 	      TREE_VEC_ELT (r, i) = (*vec)[i];
2770 	    x = r;
2771 	  }
2772 
2773 	release_tree_vector (vec);
2774       }
2775 
2776       break;
2777 
2778     case ARRAY_REF:
2779     case ARRAY_RANGE_REF:
2780 
2781       loc = EXPR_LOCATION (x);
2782       op0 = cp_fold (TREE_OPERAND (x, 0));
2783       op1 = cp_fold (TREE_OPERAND (x, 1));
2784       op2 = cp_fold (TREE_OPERAND (x, 2));
2785       op3 = cp_fold (TREE_OPERAND (x, 3));
2786 
2787       if (op0 != TREE_OPERAND (x, 0)
2788 	  || op1 != TREE_OPERAND (x, 1)
2789 	  || op2 != TREE_OPERAND (x, 2)
2790 	  || op3 != TREE_OPERAND (x, 3))
2791 	{
2792 	  if (op0 == error_mark_node
2793 	      || op1 == error_mark_node
2794 	      || op2 == error_mark_node
2795 	      || op3 == error_mark_node)
2796 	    x = error_mark_node;
2797 	  else
2798 	    {
2799 	      x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2800 	      TREE_READONLY (x) = TREE_READONLY (org_x);
2801 	      TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2802 	      TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2803 	    }
2804 	}
2805 
2806       x = fold (x);
2807       break;
2808 
2809     case SAVE_EXPR:
2810       /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2811 	 folding, evaluates to an invariant.  In that case no need to wrap
2812 	 this folded tree with a SAVE_EXPR.  */
2813       r = cp_fold (TREE_OPERAND (x, 0));
2814       if (tree_invariant_p (r))
2815 	x = r;
2816       break;
2817 
2818     default:
2819       return org_x;
2820     }
2821 
2822   if (EXPR_P (x) && TREE_CODE (x) == code)
2823     {
2824       TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2825       TREE_NO_WARNING (x) = TREE_NO_WARNING (org_x);
2826     }
2827 
2828   fold_cache->put (org_x, x);
2829   /* Prevent that we try to fold an already folded result again.  */
2830   if (x != org_x)
2831     fold_cache->put (x, x);
2832 
2833   return x;
2834 }
2835 
2836 /* Look up either "hot" or "cold" in attribute list LIST.  */
2837 
2838 tree
lookup_hotness_attribute(tree list)2839 lookup_hotness_attribute (tree list)
2840 {
2841   for (; list; list = TREE_CHAIN (list))
2842     {
2843       tree name = get_attribute_name (list);
2844       if (is_attribute_p ("hot", name)
2845 	  || is_attribute_p ("cold", name)
2846 	  || is_attribute_p ("likely", name)
2847 	  || is_attribute_p ("unlikely", name))
2848 	break;
2849     }
2850   return list;
2851 }
2852 
2853 /* Remove both "hot" and "cold" attributes from LIST.  */
2854 
2855 static tree
remove_hotness_attribute(tree list)2856 remove_hotness_attribute (tree list)
2857 {
2858   list = remove_attribute ("hot", list);
2859   list = remove_attribute ("cold", list);
2860   list = remove_attribute ("likely", list);
2861   list = remove_attribute ("unlikely", list);
2862   return list;
2863 }
2864 
2865 /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
2866    PREDICT_EXPR.  */
2867 
2868 tree
process_stmt_hotness_attribute(tree std_attrs,location_t attrs_loc)2869 process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc)
2870 {
2871   if (std_attrs == error_mark_node)
2872     return std_attrs;
2873   if (tree attr = lookup_hotness_attribute (std_attrs))
2874     {
2875       tree name = get_attribute_name (attr);
2876       bool hot = (is_attribute_p ("hot", name)
2877 		  || is_attribute_p ("likely", name));
2878       tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
2879 				      hot ? TAKEN : NOT_TAKEN);
2880       SET_EXPR_LOCATION (pred, attrs_loc);
2881       add_stmt (pred);
2882       if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
2883 	warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
2884 		 get_attribute_name (other), name);
2885       std_attrs = remove_hotness_attribute (std_attrs);
2886     }
2887   return std_attrs;
2888 }
2889 
2890 #include "gt-cp-cp-gimplify.h"
2891