1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
2 
3    Copyright (C) 2002-2018 Free Software Foundation, Inc.
4    Contributed by Jason Merrill <jason@redhat.com>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "asan.h"
37 
38 /* Forward declarations.  */
39 
40 static tree cp_genericize_r (tree *, int *, void *);
41 static tree cp_fold_r (tree *, int *, void *);
42 static void cp_genericize_tree (tree*, bool);
43 static tree cp_fold (tree);
44 
45 /* Local declarations.  */
46 
47 enum bc_t { bc_break = 0, bc_continue = 1 };
48 
49 /* Stack of labels which are targets for "break" or "continue",
50    linked through TREE_CHAIN.  */
51 static tree bc_label[2];
52 
53 /* Begin a scope which can be exited by a break or continue statement.  BC
54    indicates which.
55 
56    Just creates a label with location LOCATION and pushes it into the current
57    context.  */
58 
59 static tree
begin_bc_block(enum bc_t bc,location_t location)60 begin_bc_block (enum bc_t bc, location_t location)
61 {
62   tree label = create_artificial_label (location);
63   DECL_CHAIN (label) = bc_label[bc];
64   bc_label[bc] = label;
65   if (bc == bc_break)
66     LABEL_DECL_BREAK (label) = true;
67   else
68     LABEL_DECL_CONTINUE (label) = true;
69   return label;
70 }
71 
72 /* Finish a scope which can be exited by a break or continue statement.
73    LABEL was returned from the most recent call to begin_bc_block.  BLOCK is
74    an expression for the contents of the scope.
75 
76    If we saw a break (or continue) in the scope, append a LABEL_EXPR to
77    BLOCK.  Otherwise, just forget the label.  */
78 
79 static void
finish_bc_block(tree * block,enum bc_t bc,tree label)80 finish_bc_block (tree *block, enum bc_t bc, tree label)
81 {
82   gcc_assert (label == bc_label[bc]);
83 
84   if (TREE_USED (label))
85     append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
86 			      block);
87 
88   bc_label[bc] = DECL_CHAIN (label);
89   DECL_CHAIN (label) = NULL_TREE;
90 }
91 
92 /* Get the LABEL_EXPR to represent a break or continue statement
93    in the current block scope.  BC indicates which.  */
94 
95 static tree
get_bc_label(enum bc_t bc)96 get_bc_label (enum bc_t bc)
97 {
98   tree label = bc_label[bc];
99 
100   /* Mark the label used for finish_bc_block.  */
101   TREE_USED (label) = 1;
102   return label;
103 }
104 
105 /* Genericize a TRY_BLOCK.  */
106 
107 static void
genericize_try_block(tree * stmt_p)108 genericize_try_block (tree *stmt_p)
109 {
110   tree body = TRY_STMTS (*stmt_p);
111   tree cleanup = TRY_HANDLERS (*stmt_p);
112 
113   *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
114 }
115 
116 /* Genericize a HANDLER by converting to a CATCH_EXPR.  */
117 
118 static void
genericize_catch_block(tree * stmt_p)119 genericize_catch_block (tree *stmt_p)
120 {
121   tree type = HANDLER_TYPE (*stmt_p);
122   tree body = HANDLER_BODY (*stmt_p);
123 
124   /* FIXME should the caught type go in TREE_TYPE?  */
125   *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
126 }
127 
128 /* A terser interface for building a representation of an exception
129    specification.  */
130 
131 static tree
build_gimple_eh_filter_tree(tree body,tree allowed,tree failure)132 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
133 {
134   tree t;
135 
136   /* FIXME should the allowed types go in TREE_TYPE?  */
137   t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
138   append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
139 
140   t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
141   append_to_statement_list (body, &TREE_OPERAND (t, 0));
142 
143   return t;
144 }
145 
146 /* Genericize an EH_SPEC_BLOCK by converting it to a
147    TRY_CATCH_EXPR/EH_FILTER_EXPR pair.  */
148 
149 static void
genericize_eh_spec_block(tree * stmt_p)150 genericize_eh_spec_block (tree *stmt_p)
151 {
152   tree body = EH_SPEC_STMTS (*stmt_p);
153   tree allowed = EH_SPEC_RAISES (*stmt_p);
154   tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
155 
156   *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
157   TREE_NO_WARNING (*stmt_p) = true;
158   TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
159 }
160 
161 /* Genericize an IF_STMT by turning it into a COND_EXPR.  */
162 
163 static void
genericize_if_stmt(tree * stmt_p)164 genericize_if_stmt (tree *stmt_p)
165 {
166   tree stmt, cond, then_, else_;
167   location_t locus = EXPR_LOCATION (*stmt_p);
168 
169   stmt = *stmt_p;
170   cond = IF_COND (stmt);
171   then_ = THEN_CLAUSE (stmt);
172   else_ = ELSE_CLAUSE (stmt);
173 
174   if (!then_)
175     then_ = build_empty_stmt (locus);
176   if (!else_)
177     else_ = build_empty_stmt (locus);
178 
179   if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
180     stmt = then_;
181   else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
182     stmt = else_;
183   else
184     stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
185   if (!EXPR_HAS_LOCATION (stmt))
186     protected_set_expr_location (stmt, locus);
187   *stmt_p = stmt;
188 }
189 
190 /* Build a generic representation of one of the C loop forms.  COND is the
191    loop condition or NULL_TREE.  BODY is the (possibly compound) statement
192    controlled by the loop.  INCR is the increment expression of a for-loop,
193    or NULL_TREE.  COND_IS_FIRST indicates whether the condition is
194    evaluated before the loop body as in while and for loops, or after the
195    loop body as in do-while loops.  */
196 
197 static void
genericize_cp_loop(tree * stmt_p,location_t start_locus,tree cond,tree body,tree incr,bool cond_is_first,int * walk_subtrees,void * data)198 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
199 		    tree incr, bool cond_is_first, int *walk_subtrees,
200 		    void *data)
201 {
202   tree blab, clab;
203   tree exit = NULL;
204   tree stmt_list = NULL;
205   tree debug_begin = NULL;
206 
207   blab = begin_bc_block (bc_break, start_locus);
208   clab = begin_bc_block (bc_continue, start_locus);
209 
210   if (EXPR_LOCATION (incr) == UNKNOWN_LOCATION)
211     protected_set_expr_location (incr, start_locus);
212 
213   cp_walk_tree (&cond, cp_genericize_r, data, NULL);
214   cp_walk_tree (&body, cp_genericize_r, data, NULL);
215   cp_walk_tree (&incr, cp_genericize_r, data, NULL);
216   *walk_subtrees = 0;
217 
218   if (MAY_HAVE_DEBUG_MARKER_STMTS
219       && (!cond || !integer_zerop (cond)))
220     {
221       debug_begin = build0 (DEBUG_BEGIN_STMT, void_type_node);
222       SET_EXPR_LOCATION (debug_begin, EXPR_LOC_OR_LOC (cond, start_locus));
223     }
224 
225   if (cond && TREE_CODE (cond) != INTEGER_CST)
226     {
227       /* If COND is constant, don't bother building an exit.  If it's false,
228 	 we won't build a loop.  If it's true, any exits are in the body.  */
229       location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
230       exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
231 			 get_bc_label (bc_break));
232       exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
233 			      build_empty_stmt (cloc), exit);
234     }
235 
236   if (exit && cond_is_first)
237     {
238       append_to_statement_list (debug_begin, &stmt_list);
239       debug_begin = NULL_TREE;
240       append_to_statement_list (exit, &stmt_list);
241     }
242   append_to_statement_list (body, &stmt_list);
243   finish_bc_block (&stmt_list, bc_continue, clab);
244   if (incr)
245     {
246       if (MAY_HAVE_DEBUG_MARKER_STMTS)
247 	{
248 	  tree d = build0 (DEBUG_BEGIN_STMT, void_type_node);
249 	  SET_EXPR_LOCATION (d, EXPR_LOC_OR_LOC (incr, start_locus));
250 	  append_to_statement_list (d, &stmt_list);
251 	}
252       append_to_statement_list (incr, &stmt_list);
253     }
254   append_to_statement_list (debug_begin, &stmt_list);
255   if (exit && !cond_is_first)
256     append_to_statement_list (exit, &stmt_list);
257 
258   if (!stmt_list)
259     stmt_list = build_empty_stmt (start_locus);
260 
261   tree loop;
262   if (cond && integer_zerop (cond))
263     {
264       if (cond_is_first)
265 	loop = fold_build3_loc (start_locus, COND_EXPR,
266 				void_type_node, cond, stmt_list,
267 				build_empty_stmt (start_locus));
268       else
269 	loop = stmt_list;
270     }
271   else
272     {
273       location_t loc = start_locus;
274       if (!cond || integer_nonzerop (cond))
275 	loc = EXPR_LOCATION (expr_first (body));
276       if (loc == UNKNOWN_LOCATION)
277 	loc = start_locus;
278       loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
279     }
280 
281   stmt_list = NULL;
282   append_to_statement_list (loop, &stmt_list);
283   finish_bc_block (&stmt_list, bc_break, blab);
284   if (!stmt_list)
285     stmt_list = build_empty_stmt (start_locus);
286 
287   *stmt_p = stmt_list;
288 }
289 
290 /* Genericize a FOR_STMT node *STMT_P.  */
291 
292 static void
genericize_for_stmt(tree * stmt_p,int * walk_subtrees,void * data)293 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
294 {
295   tree stmt = *stmt_p;
296   tree expr = NULL;
297   tree loop;
298   tree init = FOR_INIT_STMT (stmt);
299 
300   if (init)
301     {
302       cp_walk_tree (&init, cp_genericize_r, data, NULL);
303       append_to_statement_list (init, &expr);
304     }
305 
306   genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
307 		      FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
308   append_to_statement_list (loop, &expr);
309   if (expr == NULL_TREE)
310     expr = loop;
311   *stmt_p = expr;
312 }
313 
314 /* Genericize a WHILE_STMT node *STMT_P.  */
315 
316 static void
genericize_while_stmt(tree * stmt_p,int * walk_subtrees,void * data)317 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
318 {
319   tree stmt = *stmt_p;
320   genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
321 		      WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
322 }
323 
324 /* Genericize a DO_STMT node *STMT_P.  */
325 
326 static void
genericize_do_stmt(tree * stmt_p,int * walk_subtrees,void * data)327 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
328 {
329   tree stmt = *stmt_p;
330   genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
331 		      DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
332 }
333 
334 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR.  */
335 
336 static void
genericize_switch_stmt(tree * stmt_p,int * walk_subtrees,void * data)337 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
338 {
339   tree stmt = *stmt_p;
340   tree break_block, body, cond, type;
341   location_t stmt_locus = EXPR_LOCATION (stmt);
342 
343   break_block = begin_bc_block (bc_break, stmt_locus);
344 
345   body = SWITCH_STMT_BODY (stmt);
346   if (!body)
347     body = build_empty_stmt (stmt_locus);
348   cond = SWITCH_STMT_COND (stmt);
349   type = SWITCH_STMT_TYPE (stmt);
350 
351   cp_walk_tree (&body, cp_genericize_r, data, NULL);
352   cp_walk_tree (&cond, cp_genericize_r, data, NULL);
353   cp_walk_tree (&type, cp_genericize_r, data, NULL);
354   *walk_subtrees = 0;
355 
356   if (TREE_USED (break_block))
357     SWITCH_BREAK_LABEL_P (break_block) = 1;
358   finish_bc_block (&body, bc_break, break_block);
359   *stmt_p = build2_loc (stmt_locus, SWITCH_EXPR, type, cond, body);
360   SWITCH_ALL_CASES_P (*stmt_p) = SWITCH_STMT_ALL_CASES_P (stmt);
361   gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt)
362 		       || !TREE_USED (break_block));
363 }
364 
365 /* Genericize a CONTINUE_STMT node *STMT_P.  */
366 
367 static void
genericize_continue_stmt(tree * stmt_p)368 genericize_continue_stmt (tree *stmt_p)
369 {
370   tree stmt_list = NULL;
371   tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
372   tree label = get_bc_label (bc_continue);
373   location_t location = EXPR_LOCATION (*stmt_p);
374   tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
375   append_to_statement_list_force (pred, &stmt_list);
376   append_to_statement_list (jump, &stmt_list);
377   *stmt_p = stmt_list;
378 }
379 
380 /* Genericize a BREAK_STMT node *STMT_P.  */
381 
382 static void
genericize_break_stmt(tree * stmt_p)383 genericize_break_stmt (tree *stmt_p)
384 {
385   tree label = get_bc_label (bc_break);
386   location_t location = EXPR_LOCATION (*stmt_p);
387   *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
388 }
389 
390 /* Genericize a OMP_FOR node *STMT_P.  */
391 
392 static void
genericize_omp_for_stmt(tree * stmt_p,int * walk_subtrees,void * data)393 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
394 {
395   tree stmt = *stmt_p;
396   location_t locus = EXPR_LOCATION (stmt);
397   tree clab = begin_bc_block (bc_continue, locus);
398 
399   cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
400   if (TREE_CODE (stmt) != OMP_TASKLOOP)
401     cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
402   cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
403   cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
404   cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
405   cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
406   *walk_subtrees = 0;
407 
408   finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
409 }
410 
411 /* Hook into the middle of gimplifying an OMP_FOR node.  */
412 
413 static enum gimplify_status
cp_gimplify_omp_for(tree * expr_p,gimple_seq * pre_p)414 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
415 {
416   tree for_stmt = *expr_p;
417   gimple_seq seq = NULL;
418 
419   /* Protect ourselves from recursion.  */
420   if (OMP_FOR_GIMPLIFYING_P (for_stmt))
421     return GS_UNHANDLED;
422   OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
423 
424   gimplify_and_add (for_stmt, &seq);
425   gimple_seq_add_seq (pre_p, seq);
426 
427   OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
428 
429   return GS_ALL_DONE;
430 }
431 
432 /*  Gimplify an EXPR_STMT node.  */
433 
434 static void
gimplify_expr_stmt(tree * stmt_p)435 gimplify_expr_stmt (tree *stmt_p)
436 {
437   tree stmt = EXPR_STMT_EXPR (*stmt_p);
438 
439   if (stmt == error_mark_node)
440     stmt = NULL;
441 
442   /* Gimplification of a statement expression will nullify the
443      statement if all its side effects are moved to *PRE_P and *POST_P.
444 
445      In this case we will not want to emit the gimplified statement.
446      However, we may still want to emit a warning, so we do that before
447      gimplification.  */
448   if (stmt && warn_unused_value)
449     {
450       if (!TREE_SIDE_EFFECTS (stmt))
451 	{
452 	  if (!IS_EMPTY_STMT (stmt)
453 	      && !VOID_TYPE_P (TREE_TYPE (stmt))
454 	      && !TREE_NO_WARNING (stmt))
455 	    warning (OPT_Wunused_value, "statement with no effect");
456 	}
457       else
458 	warn_if_unused_value (stmt, input_location);
459     }
460 
461   if (stmt == NULL_TREE)
462     stmt = alloc_stmt_list ();
463 
464   *stmt_p = stmt;
465 }
466 
467 /* Gimplify initialization from an AGGR_INIT_EXPR.  */
468 
469 static void
cp_gimplify_init_expr(tree * expr_p)470 cp_gimplify_init_expr (tree *expr_p)
471 {
472   tree from = TREE_OPERAND (*expr_p, 1);
473   tree to = TREE_OPERAND (*expr_p, 0);
474   tree t;
475 
476   /* What about code that pulls out the temp and uses it elsewhere?  I
477      think that such code never uses the TARGET_EXPR as an initializer.  If
478      I'm wrong, we'll abort because the temp won't have any RTL.  In that
479      case, I guess we'll need to replace references somehow.  */
480   if (TREE_CODE (from) == TARGET_EXPR)
481     from = TARGET_EXPR_INITIAL (from);
482 
483   /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
484      inside the TARGET_EXPR.  */
485   for (t = from; t; )
486     {
487       tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
488 
489       /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
490 	 replace the slot operand with our target.
491 
492 	 Should we add a target parm to gimplify_expr instead?  No, as in this
493 	 case we want to replace the INIT_EXPR.  */
494       if (TREE_CODE (sub) == AGGR_INIT_EXPR
495 	  || TREE_CODE (sub) == VEC_INIT_EXPR)
496 	{
497 	  if (TREE_CODE (sub) == AGGR_INIT_EXPR)
498 	    AGGR_INIT_EXPR_SLOT (sub) = to;
499 	  else
500 	    VEC_INIT_EXPR_SLOT (sub) = to;
501 	  *expr_p = from;
502 
503 	  /* The initialization is now a side-effect, so the container can
504 	     become void.  */
505 	  if (from != sub)
506 	    TREE_TYPE (from) = void_type_node;
507 	}
508 
509       /* Handle aggregate NSDMI.  */
510       replace_placeholders (sub, to);
511 
512       if (t == sub)
513 	break;
514       else
515 	t = TREE_OPERAND (t, 1);
516     }
517 
518 }
519 
520 /* Gimplify a MUST_NOT_THROW_EXPR.  */
521 
522 static enum gimplify_status
gimplify_must_not_throw_expr(tree * expr_p,gimple_seq * pre_p)523 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
524 {
525   tree stmt = *expr_p;
526   tree temp = voidify_wrapper_expr (stmt, NULL);
527   tree body = TREE_OPERAND (stmt, 0);
528   gimple_seq try_ = NULL;
529   gimple_seq catch_ = NULL;
530   gimple *mnt;
531 
532   gimplify_and_add (body, &try_);
533   mnt = gimple_build_eh_must_not_throw (terminate_fn);
534   gimple_seq_add_stmt_without_update (&catch_, mnt);
535   mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
536 
537   gimple_seq_add_stmt_without_update (pre_p, mnt);
538   if (temp)
539     {
540       *expr_p = temp;
541       return GS_OK;
542     }
543 
544   *expr_p = NULL;
545   return GS_ALL_DONE;
546 }
547 
548 /* Return TRUE if an operand (OP) of a given TYPE being copied is
549    really just an empty class copy.
550 
551    Check that the operand has a simple form so that TARGET_EXPRs and
552    non-empty CONSTRUCTORs get reduced properly, and we leave the
553    return slot optimization alone because it isn't a copy.  */
554 
555 static bool
simple_empty_class_p(tree type,tree op)556 simple_empty_class_p (tree type, tree op)
557 {
558   return
559     ((TREE_CODE (op) == COMPOUND_EXPR
560       && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
561      || TREE_CODE (op) == EMPTY_CLASS_EXPR
562      || is_gimple_lvalue (op)
563      || INDIRECT_REF_P (op)
564      || (TREE_CODE (op) == CONSTRUCTOR
565 	 && CONSTRUCTOR_NELTS (op) == 0
566 	 && !TREE_CLOBBER_P (op))
567      || (TREE_CODE (op) == CALL_EXPR
568 	 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
569     && is_really_empty_class (type);
570 }
571 
572 /* Returns true if evaluating E as an lvalue has side-effects;
573    specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
574    have side-effects until there is a read or write through it.  */
575 
576 static bool
lvalue_has_side_effects(tree e)577 lvalue_has_side_effects (tree e)
578 {
579   if (!TREE_SIDE_EFFECTS (e))
580     return false;
581   while (handled_component_p (e))
582     {
583       if (TREE_CODE (e) == ARRAY_REF
584 	  && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
585 	return true;
586       e = TREE_OPERAND (e, 0);
587     }
588   if (DECL_P (e))
589     /* Just naming a variable has no side-effects.  */
590     return false;
591   else if (INDIRECT_REF_P (e))
592     /* Similarly, indirection has no side-effects.  */
593     return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
594   else
595     /* For anything else, trust TREE_SIDE_EFFECTS.  */
596     return TREE_SIDE_EFFECTS (e);
597 }
598 
599 /* Do C++-specific gimplification.  Args are as for gimplify_expr.  */
600 
601 int
cp_gimplify_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p)602 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
603 {
604   int saved_stmts_are_full_exprs_p = 0;
605   location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
606   enum tree_code code = TREE_CODE (*expr_p);
607   enum gimplify_status ret;
608 
609   if (STATEMENT_CODE_P (code))
610     {
611       saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
612       current_stmt_tree ()->stmts_are_full_exprs_p
613 	= STMT_IS_FULL_EXPR_P (*expr_p);
614     }
615 
616   switch (code)
617     {
618     case AGGR_INIT_EXPR:
619       simplify_aggr_init_expr (expr_p);
620       ret = GS_OK;
621       break;
622 
623     case VEC_INIT_EXPR:
624       {
625 	location_t loc = input_location;
626 	tree init = VEC_INIT_EXPR_INIT (*expr_p);
627 	int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
628 	gcc_assert (EXPR_HAS_LOCATION (*expr_p));
629 	input_location = EXPR_LOCATION (*expr_p);
630 	*expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
631 				  init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
632 				  from_array,
633 				  tf_warning_or_error);
634 	hash_set<tree> pset;
635 	cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
636 	cp_genericize_tree (expr_p, false);
637 	ret = GS_OK;
638 	input_location = loc;
639       }
640       break;
641 
642     case THROW_EXPR:
643       /* FIXME communicate throw type to back end, probably by moving
644 	 THROW_EXPR into ../tree.def.  */
645       *expr_p = TREE_OPERAND (*expr_p, 0);
646       ret = GS_OK;
647       break;
648 
649     case MUST_NOT_THROW_EXPR:
650       ret = gimplify_must_not_throw_expr (expr_p, pre_p);
651       break;
652 
653       /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
654 	 LHS of an assignment might also be involved in the RHS, as in bug
655 	 25979.  */
656     case INIT_EXPR:
657       cp_gimplify_init_expr (expr_p);
658       if (TREE_CODE (*expr_p) != INIT_EXPR)
659 	return GS_OK;
660       /* Fall through.  */
661     case MODIFY_EXPR:
662     modify_expr_case:
663       {
664 	/* If the back end isn't clever enough to know that the lhs and rhs
665 	   types are the same, add an explicit conversion.  */
666 	tree op0 = TREE_OPERAND (*expr_p, 0);
667 	tree op1 = TREE_OPERAND (*expr_p, 1);
668 
669 	if (!error_operand_p (op0)
670 	    && !error_operand_p (op1)
671 	    && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
672 		|| TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
673 	    && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
674 	  TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
675 					      TREE_TYPE (op0), op1);
676 
677 	else if (simple_empty_class_p (TREE_TYPE (op0), op1))
678 	  {
679 	    /* Remove any copies of empty classes.  Also drop volatile
680 	       variables on the RHS to avoid infinite recursion from
681 	       gimplify_expr trying to load the value.  */
682 	    if (TREE_SIDE_EFFECTS (op1))
683 	      {
684 		if (TREE_THIS_VOLATILE (op1)
685 		    && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
686 		  op1 = build_fold_addr_expr (op1);
687 
688 		gimplify_and_add (op1, pre_p);
689 	      }
690 	    gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
691 			   is_gimple_lvalue, fb_lvalue);
692 	    *expr_p = TREE_OPERAND (*expr_p, 0);
693 	  }
694 	/* P0145 says that the RHS is sequenced before the LHS.
695 	   gimplify_modify_expr gimplifies the RHS before the LHS, but that
696 	   isn't quite strong enough in two cases:
697 
698 	   1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
699 	   mean it's evaluated after the LHS.
700 
701 	   2) the value calculation of the RHS is also sequenced before the
702 	   LHS, so for scalar assignment we need to preevaluate if the
703 	   RHS could be affected by LHS side-effects even if it has no
704 	   side-effects of its own.  We don't need this for classes because
705 	   class assignment takes its RHS by reference.  */
706        else if (flag_strong_eval_order > 1
707                 && TREE_CODE (*expr_p) == MODIFY_EXPR
708                 && lvalue_has_side_effects (op0)
709 		&& (TREE_CODE (op1) == CALL_EXPR
710 		    || (SCALAR_TYPE_P (TREE_TYPE (op1))
711 			&& !TREE_CONSTANT (op1))))
712 	 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
713       }
714       ret = GS_OK;
715       break;
716 
717     case EMPTY_CLASS_EXPR:
718       /* We create an empty CONSTRUCTOR with RECORD_TYPE.  */
719       *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
720       ret = GS_OK;
721       break;
722 
723     case BASELINK:
724       *expr_p = BASELINK_FUNCTIONS (*expr_p);
725       ret = GS_OK;
726       break;
727 
728     case TRY_BLOCK:
729       genericize_try_block (expr_p);
730       ret = GS_OK;
731       break;
732 
733     case HANDLER:
734       genericize_catch_block (expr_p);
735       ret = GS_OK;
736       break;
737 
738     case EH_SPEC_BLOCK:
739       genericize_eh_spec_block (expr_p);
740       ret = GS_OK;
741       break;
742 
743     case USING_STMT:
744       gcc_unreachable ();
745 
746     case FOR_STMT:
747     case WHILE_STMT:
748     case DO_STMT:
749     case SWITCH_STMT:
750     case CONTINUE_STMT:
751     case BREAK_STMT:
752       gcc_unreachable ();
753 
754     case OMP_FOR:
755     case OMP_SIMD:
756     case OMP_DISTRIBUTE:
757     case OMP_TASKLOOP:
758       ret = cp_gimplify_omp_for (expr_p, pre_p);
759       break;
760 
761     case EXPR_STMT:
762       gimplify_expr_stmt (expr_p);
763       ret = GS_OK;
764       break;
765 
766     case UNARY_PLUS_EXPR:
767       {
768 	tree arg = TREE_OPERAND (*expr_p, 0);
769 	tree type = TREE_TYPE (*expr_p);
770 	*expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
771 					    : arg;
772 	ret = GS_OK;
773       }
774       break;
775 
776     case CALL_EXPR:
777       ret = GS_OK;
778       if (flag_strong_eval_order == 2
779 	  && CALL_EXPR_FN (*expr_p)
780 	  && cp_get_callee_fndecl_nofold (*expr_p) == NULL_TREE)
781 	{
782 	  tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
783 	  enum gimplify_status t
784 	    = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
785 			     is_gimple_call_addr, fb_rvalue);
786 	  if (t == GS_ERROR)
787 	    ret = GS_ERROR;
788 	  else if (is_gimple_variable (CALL_EXPR_FN (*expr_p))
789 		   && TREE_CODE (CALL_EXPR_FN (*expr_p)) != SSA_NAME)
790 	    CALL_EXPR_FN (*expr_p)
791 	      = get_initialized_tmp_var (CALL_EXPR_FN (*expr_p), pre_p,
792 					 NULL);
793 	  /* GIMPLE considers most pointer conversion useless, but for
794 	     calls we actually care about the exact function pointer type.  */
795 	  if (t != GS_ERROR && TREE_TYPE (CALL_EXPR_FN (*expr_p)) != fnptrtype)
796 	    CALL_EXPR_FN (*expr_p)
797 	      = build1 (NOP_EXPR, fnptrtype, CALL_EXPR_FN (*expr_p));
798 	}
799       if (!CALL_EXPR_FN (*expr_p))
800 	/* Internal function call.  */;
801       else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
802 	{
803 	  /* This is a call to a (compound) assignment operator that used
804 	     the operator syntax; gimplify the RHS first.  */
805 	  gcc_assert (call_expr_nargs (*expr_p) == 2);
806 	  gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
807 	  enum gimplify_status t
808 	    = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
809 	  if (t == GS_ERROR)
810 	    ret = GS_ERROR;
811 	}
812       else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
813 	{
814 	  /* Leave the last argument for gimplify_call_expr, to avoid problems
815 	     with __builtin_va_arg_pack().  */
816 	  int nargs = call_expr_nargs (*expr_p) - 1;
817 	  for (int i = 0; i < nargs; ++i)
818 	    {
819 	      enum gimplify_status t
820 		= gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
821 	      if (t == GS_ERROR)
822 		ret = GS_ERROR;
823 	    }
824 	}
825       else if (flag_strong_eval_order
826 	       && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
827 	{
828 	  /* If flag_strong_eval_order, evaluate the object argument first.  */
829 	  tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
830 	  if (POINTER_TYPE_P (fntype))
831 	    fntype = TREE_TYPE (fntype);
832 	  if (TREE_CODE (fntype) == METHOD_TYPE)
833 	    {
834 	      enum gimplify_status t
835 		= gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
836 	      if (t == GS_ERROR)
837 		ret = GS_ERROR;
838 	    }
839 	}
840       break;
841 
842     case PTRMEM_CST:
843       *expr_p = cplus_expand_constant (*expr_p);
844       if (TREE_CODE (*expr_p) == PTRMEM_CST)
845 	ret = GS_ERROR;
846       else
847 	ret = GS_OK;
848       break;
849 
850     case RETURN_EXPR:
851       if (TREE_OPERAND (*expr_p, 0)
852 	  && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
853 	      || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
854 	{
855 	  expr_p = &TREE_OPERAND (*expr_p, 0);
856 	  code = TREE_CODE (*expr_p);
857 	  /* Avoid going through the INIT_EXPR case, which can
858 	     degrade INIT_EXPRs into AGGR_INIT_EXPRs.  */
859 	  goto modify_expr_case;
860 	}
861       /* Fall through.  */
862 
863     default:
864       ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
865       break;
866     }
867 
868   /* Restore saved state.  */
869   if (STATEMENT_CODE_P (code))
870     current_stmt_tree ()->stmts_are_full_exprs_p
871       = saved_stmts_are_full_exprs_p;
872 
873   return ret;
874 }
875 
876 static inline bool
is_invisiref_parm(const_tree t)877 is_invisiref_parm (const_tree t)
878 {
879   return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
880 	  && DECL_BY_REFERENCE (t));
881 }
882 
883 /* Return true if the uid in both int tree maps are equal.  */
884 
885 bool
equal(cxx_int_tree_map * a,cxx_int_tree_map * b)886 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
887 {
888   return (a->uid == b->uid);
889 }
890 
891 /* Hash a UID in a cxx_int_tree_map.  */
892 
893 unsigned int
hash(cxx_int_tree_map * item)894 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
895 {
896   return item->uid;
897 }
898 
899 /* A stable comparison routine for use with splay trees and DECLs.  */
900 
901 static int
splay_tree_compare_decl_uid(splay_tree_key xa,splay_tree_key xb)902 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
903 {
904   tree a = (tree) xa;
905   tree b = (tree) xb;
906 
907   return DECL_UID (a) - DECL_UID (b);
908 }
909 
910 /* OpenMP context during genericization.  */
911 
912 struct cp_genericize_omp_taskreg
913 {
914   bool is_parallel;
915   bool default_shared;
916   struct cp_genericize_omp_taskreg *outer;
917   splay_tree variables;
918 };
919 
920 /* Return true if genericization should try to determine if
921    DECL is firstprivate or shared within task regions.  */
922 
923 static bool
omp_var_to_track(tree decl)924 omp_var_to_track (tree decl)
925 {
926   tree type = TREE_TYPE (decl);
927   if (is_invisiref_parm (decl))
928     type = TREE_TYPE (type);
929   else if (TREE_CODE (type) == REFERENCE_TYPE)
930     type = TREE_TYPE (type);
931   while (TREE_CODE (type) == ARRAY_TYPE)
932     type = TREE_TYPE (type);
933   if (type == error_mark_node || !CLASS_TYPE_P (type))
934     return false;
935   if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
936     return false;
937   if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
938     return false;
939   return true;
940 }
941 
942 /* Note DECL use in OpenMP region OMP_CTX during genericization.  */
943 
944 static void
omp_cxx_notice_variable(struct cp_genericize_omp_taskreg * omp_ctx,tree decl)945 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
946 {
947   splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
948 					 (splay_tree_key) decl);
949   if (n == NULL)
950     {
951       int flags = OMP_CLAUSE_DEFAULT_SHARED;
952       if (omp_ctx->outer)
953 	omp_cxx_notice_variable (omp_ctx->outer, decl);
954       if (!omp_ctx->default_shared)
955 	{
956 	  struct cp_genericize_omp_taskreg *octx;
957 
958 	  for (octx = omp_ctx->outer; octx; octx = octx->outer)
959 	    {
960 	      n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
961 	      if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
962 		{
963 		  flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
964 		  break;
965 		}
966 	      if (octx->is_parallel)
967 		break;
968 	    }
969 	  if (octx == NULL
970 	      && (TREE_CODE (decl) == PARM_DECL
971 		  || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
972 		      && DECL_CONTEXT (decl) == current_function_decl)))
973 	    flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
974 	  if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
975 	    {
976 	      /* DECL is implicitly determined firstprivate in
977 		 the current task construct.  Ensure copy ctor and
978 		 dtor are instantiated, because during gimplification
979 		 it will be already too late.  */
980 	      tree type = TREE_TYPE (decl);
981 	      if (is_invisiref_parm (decl))
982 		type = TREE_TYPE (type);
983 	      else if (TREE_CODE (type) == REFERENCE_TYPE)
984 		type = TREE_TYPE (type);
985 	      while (TREE_CODE (type) == ARRAY_TYPE)
986 		type = TREE_TYPE (type);
987 	      get_copy_ctor (type, tf_none);
988 	      get_dtor (type, tf_none);
989 	    }
990 	}
991       splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
992     }
993 }
994 
995 /* Genericization context.  */
996 
997 struct cp_genericize_data
998 {
999   hash_set<tree> *p_set;
1000   vec<tree> bind_expr_stack;
1001   struct cp_genericize_omp_taskreg *omp_ctx;
1002   tree try_block;
1003   bool no_sanitize_p;
1004   bool handle_invisiref_parm_p;
1005 };
1006 
1007 /* Perform any pre-gimplification folding of C++ front end trees to
1008    GENERIC.
1009    Note:  The folding of none-omp cases is something to move into
1010      the middle-end.  As for now we have most foldings only on GENERIC
1011      in fold-const, we need to perform this before transformation to
1012      GIMPLE-form.  */
1013 
1014 static tree
cp_fold_r(tree * stmt_p,int * walk_subtrees,void * data)1015 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
1016 {
1017   tree stmt;
1018   enum tree_code code;
1019 
1020   *stmt_p = stmt = cp_fold (*stmt_p);
1021 
1022   if (((hash_set<tree> *) data)->add (stmt))
1023     {
1024       /* Don't walk subtrees of stmts we've already walked once, otherwise
1025 	 we can have exponential complexity with e.g. lots of nested
1026 	 SAVE_EXPRs or TARGET_EXPRs.  cp_fold uses a cache and will return
1027 	 always the same tree, which the first time cp_fold_r has been
1028 	 called on it had the subtrees walked.  */
1029       *walk_subtrees = 0;
1030       return NULL;
1031     }
1032 
1033   code = TREE_CODE (stmt);
1034   if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
1035       || code == OMP_TASKLOOP || code == OACC_LOOP)
1036     {
1037       tree x;
1038       int i, n;
1039 
1040       cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1041       cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1042       cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1043       x = OMP_FOR_COND (stmt);
1044       if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1045 	{
1046 	  cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1047 	  cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1048 	}
1049       else if (x && TREE_CODE (x) == TREE_VEC)
1050 	{
1051 	  n = TREE_VEC_LENGTH (x);
1052 	  for (i = 0; i < n; i++)
1053 	    {
1054 	      tree o = TREE_VEC_ELT (x, i);
1055 	      if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1056 		cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1057 	    }
1058 	}
1059       x = OMP_FOR_INCR (stmt);
1060       if (x && TREE_CODE (x) == TREE_VEC)
1061 	{
1062 	  n = TREE_VEC_LENGTH (x);
1063 	  for (i = 0; i < n; i++)
1064 	    {
1065 	      tree o = TREE_VEC_ELT (x, i);
1066 	      if (o && TREE_CODE (o) == MODIFY_EXPR)
1067 		o = TREE_OPERAND (o, 1);
1068 	      if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1069 			|| TREE_CODE (o) == POINTER_PLUS_EXPR))
1070 		{
1071 		  cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1072 		  cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1073 		}
1074 	    }
1075 	}
1076       cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1077       *walk_subtrees = 0;
1078     }
1079 
1080   return NULL;
1081 }
1082 
1083 /* Fold ALL the trees!  FIXME we should be able to remove this, but
1084    apparently that still causes optimization regressions.  */
1085 
1086 void
cp_fold_function(tree fndecl)1087 cp_fold_function (tree fndecl)
1088 {
1089   hash_set<tree> pset;
1090   cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
1091 }
1092 
1093 /* Perform any pre-gimplification lowering of C++ front end trees to
1094    GENERIC.  */
1095 
1096 static tree
cp_genericize_r(tree * stmt_p,int * walk_subtrees,void * data)1097 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1098 {
1099   tree stmt = *stmt_p;
1100   struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1101   hash_set<tree> *p_set = wtd->p_set;
1102 
1103   /* If in an OpenMP context, note var uses.  */
1104   if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1105       && (VAR_P (stmt)
1106 	  || TREE_CODE (stmt) == PARM_DECL
1107 	  || TREE_CODE (stmt) == RESULT_DECL)
1108       && omp_var_to_track (stmt))
1109     omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1110 
1111   /* Don't dereference parms in a thunk, pass the references through. */
1112   if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1113       || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1114     {
1115       *walk_subtrees = 0;
1116       return NULL;
1117     }
1118 
1119   /* Dereference invisible reference parms.  */
1120   if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1121     {
1122       *stmt_p = convert_from_reference (stmt);
1123       p_set->add (*stmt_p);
1124       *walk_subtrees = 0;
1125       return NULL;
1126     }
1127 
1128   /* Map block scope extern declarations to visible declarations with the
1129      same name and type in outer scopes if any.  */
1130   if (cp_function_chain->extern_decl_map
1131       && VAR_OR_FUNCTION_DECL_P (stmt)
1132       && DECL_EXTERNAL (stmt))
1133     {
1134       struct cxx_int_tree_map *h, in;
1135       in.uid = DECL_UID (stmt);
1136       h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1137       if (h)
1138 	{
1139 	  *stmt_p = h->to;
1140 	  TREE_USED (h->to) |= TREE_USED (stmt);
1141 	  *walk_subtrees = 0;
1142 	  return NULL;
1143 	}
1144     }
1145 
1146   if (TREE_CODE (stmt) == INTEGER_CST
1147       && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE
1148       && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1149       && !wtd->no_sanitize_p)
1150     {
1151       ubsan_maybe_instrument_reference (stmt_p);
1152       if (*stmt_p != stmt)
1153 	{
1154 	  *walk_subtrees = 0;
1155 	  return NULL_TREE;
1156 	}
1157     }
1158 
1159   /* Other than invisiref parms, don't walk the same tree twice.  */
1160   if (p_set->contains (stmt))
1161     {
1162       *walk_subtrees = 0;
1163       return NULL_TREE;
1164     }
1165 
1166   switch (TREE_CODE (stmt))
1167     {
1168     case ADDR_EXPR:
1169       if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1170 	{
1171 	  /* If in an OpenMP context, note var uses.  */
1172 	  if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1173 	      && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1174 	    omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1175 	  *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1176 	  *walk_subtrees = 0;
1177 	}
1178       break;
1179 
1180     case RETURN_EXPR:
1181       if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1182 	/* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR.  */
1183 	*walk_subtrees = 0;
1184       break;
1185 
1186     case OMP_CLAUSE:
1187       switch (OMP_CLAUSE_CODE (stmt))
1188 	{
1189 	case OMP_CLAUSE_LASTPRIVATE:
1190 	  /* Don't dereference an invisiref in OpenMP clauses.  */
1191 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1192 	    {
1193 	      *walk_subtrees = 0;
1194 	      if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1195 		cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1196 			      cp_genericize_r, data, NULL);
1197 	    }
1198 	  break;
1199 	case OMP_CLAUSE_PRIVATE:
1200 	  /* Don't dereference an invisiref in OpenMP clauses.  */
1201 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1202 	    *walk_subtrees = 0;
1203 	  else if (wtd->omp_ctx != NULL)
1204 	    {
1205 	      /* Private clause doesn't cause any references to the
1206 		 var in outer contexts, avoid calling
1207 		 omp_cxx_notice_variable for it.  */
1208 	      struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1209 	      wtd->omp_ctx = NULL;
1210 	      cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1211 			    data, NULL);
1212 	      wtd->omp_ctx = old;
1213 	      *walk_subtrees = 0;
1214 	    }
1215 	  break;
1216 	case OMP_CLAUSE_SHARED:
1217 	case OMP_CLAUSE_FIRSTPRIVATE:
1218 	case OMP_CLAUSE_COPYIN:
1219 	case OMP_CLAUSE_COPYPRIVATE:
1220 	  /* Don't dereference an invisiref in OpenMP clauses.  */
1221 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1222 	    *walk_subtrees = 0;
1223 	  break;
1224 	case OMP_CLAUSE_REDUCTION:
1225 	  /* Don't dereference an invisiref in reduction clause's
1226 	     OMP_CLAUSE_DECL either.  OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1227 	     still needs to be genericized.  */
1228 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1229 	    {
1230 	      *walk_subtrees = 0;
1231 	      if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1232 		cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1233 			      cp_genericize_r, data, NULL);
1234 	      if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1235 		cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1236 			      cp_genericize_r, data, NULL);
1237 	    }
1238 	  break;
1239 	default:
1240 	  break;
1241 	}
1242       break;
1243 
1244     /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1245        to lower this construct before scanning it, so we need to lower these
1246        before doing anything else.  */
1247     case CLEANUP_STMT:
1248       *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1249 			    CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1250 						   : TRY_FINALLY_EXPR,
1251 			    void_type_node,
1252 			    CLEANUP_BODY (stmt),
1253 			    CLEANUP_EXPR (stmt));
1254       break;
1255 
1256     case IF_STMT:
1257       genericize_if_stmt (stmt_p);
1258       /* *stmt_p has changed, tail recurse to handle it again.  */
1259       return cp_genericize_r (stmt_p, walk_subtrees, data);
1260 
1261     /* COND_EXPR might have incompatible types in branches if one or both
1262        arms are bitfields.  Fix it up now.  */
1263     case COND_EXPR:
1264       {
1265 	tree type_left
1266 	  = (TREE_OPERAND (stmt, 1)
1267 	     ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1268 	     : NULL_TREE);
1269 	tree type_right
1270 	  = (TREE_OPERAND (stmt, 2)
1271 	     ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1272 	     : NULL_TREE);
1273 	if (type_left
1274 	    && !useless_type_conversion_p (TREE_TYPE (stmt),
1275 					   TREE_TYPE (TREE_OPERAND (stmt, 1))))
1276 	  {
1277 	    TREE_OPERAND (stmt, 1)
1278 	      = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1279 	    gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1280 						   type_left));
1281 	  }
1282 	if (type_right
1283 	    && !useless_type_conversion_p (TREE_TYPE (stmt),
1284 					   TREE_TYPE (TREE_OPERAND (stmt, 2))))
1285 	  {
1286 	    TREE_OPERAND (stmt, 2)
1287 	      = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1288 	    gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1289 						   type_right));
1290 	  }
1291       }
1292       break;
1293 
1294     case BIND_EXPR:
1295       if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1296 	{
1297 	  tree decl;
1298 	  for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1299 	    if (VAR_P (decl)
1300 		&& !DECL_EXTERNAL (decl)
1301 		&& omp_var_to_track (decl))
1302 	      {
1303 		splay_tree_node n
1304 		  = splay_tree_lookup (wtd->omp_ctx->variables,
1305 				       (splay_tree_key) decl);
1306 		if (n == NULL)
1307 		  splay_tree_insert (wtd->omp_ctx->variables,
1308 				     (splay_tree_key) decl,
1309 				     TREE_STATIC (decl)
1310 				     ? OMP_CLAUSE_DEFAULT_SHARED
1311 				     : OMP_CLAUSE_DEFAULT_PRIVATE);
1312 	      }
1313 	}
1314       if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1315 	{
1316 	  /* The point here is to not sanitize static initializers.  */
1317 	  bool no_sanitize_p = wtd->no_sanitize_p;
1318 	  wtd->no_sanitize_p = true;
1319 	  for (tree decl = BIND_EXPR_VARS (stmt);
1320 	       decl;
1321 	       decl = DECL_CHAIN (decl))
1322 	    if (VAR_P (decl)
1323 		&& TREE_STATIC (decl)
1324 		&& DECL_INITIAL (decl))
1325 	      cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1326 	  wtd->no_sanitize_p = no_sanitize_p;
1327 	}
1328       wtd->bind_expr_stack.safe_push (stmt);
1329       cp_walk_tree (&BIND_EXPR_BODY (stmt),
1330 		    cp_genericize_r, data, NULL);
1331       wtd->bind_expr_stack.pop ();
1332       break;
1333 
1334     case USING_STMT:
1335       {
1336 	tree block = NULL_TREE;
1337 
1338 	/* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1339 	   BLOCK, and append an IMPORTED_DECL to its
1340 	   BLOCK_VARS chained list.  */
1341 	if (wtd->bind_expr_stack.exists ())
1342 	  {
1343 	    int i;
1344 	    for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1345 	      if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1346 		break;
1347 	  }
1348 	if (block)
1349 	  {
1350 	    tree decl = TREE_OPERAND (stmt, 0);
1351 	    gcc_assert (decl);
1352 
1353 	    if (undeduced_auto_decl (decl))
1354 	      /* Omit from the GENERIC, the back-end can't handle it.  */;
1355 	    else
1356 	      {
1357 		tree using_directive = make_node (IMPORTED_DECL);
1358 		TREE_TYPE (using_directive) = void_type_node;
1359 
1360 		IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1361 		DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1362 		BLOCK_VARS (block) = using_directive;
1363 	      }
1364 	  }
1365 	/* The USING_STMT won't appear in GENERIC.  */
1366 	*stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1367 	*walk_subtrees = 0;
1368       }
1369       break;
1370 
1371     case DECL_EXPR:
1372       if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1373 	{
1374 	  /* Using decls inside DECL_EXPRs are just dropped on the floor.  */
1375 	  *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1376 	  *walk_subtrees = 0;
1377 	}
1378       else
1379 	{
1380 	  tree d = DECL_EXPR_DECL (stmt);
1381 	  if (VAR_P (d))
1382 	    gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1383 	}
1384       break;
1385 
1386     case OMP_PARALLEL:
1387     case OMP_TASK:
1388     case OMP_TASKLOOP:
1389       {
1390 	struct cp_genericize_omp_taskreg omp_ctx;
1391 	tree c, decl;
1392 	splay_tree_node n;
1393 
1394 	*walk_subtrees = 0;
1395 	cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1396 	omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1397 	omp_ctx.default_shared = omp_ctx.is_parallel;
1398 	omp_ctx.outer = wtd->omp_ctx;
1399 	omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1400 	wtd->omp_ctx = &omp_ctx;
1401 	for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1402 	  switch (OMP_CLAUSE_CODE (c))
1403 	    {
1404 	    case OMP_CLAUSE_SHARED:
1405 	    case OMP_CLAUSE_PRIVATE:
1406 	    case OMP_CLAUSE_FIRSTPRIVATE:
1407 	    case OMP_CLAUSE_LASTPRIVATE:
1408 	      decl = OMP_CLAUSE_DECL (c);
1409 	      if (decl == error_mark_node || !omp_var_to_track (decl))
1410 		break;
1411 	      n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1412 	      if (n != NULL)
1413 		break;
1414 	      splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1415 				 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1416 				 ? OMP_CLAUSE_DEFAULT_SHARED
1417 				 : OMP_CLAUSE_DEFAULT_PRIVATE);
1418 	      if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1419 		omp_cxx_notice_variable (omp_ctx.outer, decl);
1420 	      break;
1421 	    case OMP_CLAUSE_DEFAULT:
1422 	      if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1423 		omp_ctx.default_shared = true;
1424 	    default:
1425 	      break;
1426 	    }
1427 	if (TREE_CODE (stmt) == OMP_TASKLOOP)
1428 	  genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1429 	else
1430 	  cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1431 	wtd->omp_ctx = omp_ctx.outer;
1432 	splay_tree_delete (omp_ctx.variables);
1433       }
1434       break;
1435 
1436     case TRY_BLOCK:
1437       {
1438         *walk_subtrees = 0;
1439         tree try_block = wtd->try_block;
1440         wtd->try_block = stmt;
1441         cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1442         wtd->try_block = try_block;
1443         cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1444       }
1445       break;
1446 
1447     case MUST_NOT_THROW_EXPR:
1448       /* MUST_NOT_THROW_COND might be something else with TM.  */
1449       if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1450 	{
1451 	  *walk_subtrees = 0;
1452 	  tree try_block = wtd->try_block;
1453 	  wtd->try_block = stmt;
1454 	  cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1455 	  wtd->try_block = try_block;
1456 	}
1457       break;
1458 
1459     case THROW_EXPR:
1460       {
1461 	location_t loc = location_of (stmt);
1462 	if (TREE_NO_WARNING (stmt))
1463 	  /* Never mind.  */;
1464 	else if (wtd->try_block)
1465 	  {
1466 	    if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1467 		&& warning_at (loc, OPT_Wterminate,
1468 			       "throw will always call terminate()")
1469 		&& cxx_dialect >= cxx11
1470 		&& DECL_DESTRUCTOR_P (current_function_decl))
1471 	      inform (loc, "in C++11 destructors default to noexcept");
1472 	  }
1473 	else
1474 	  {
1475 	    if (warn_cxx11_compat && cxx_dialect < cxx11
1476 		&& DECL_DESTRUCTOR_P (current_function_decl)
1477 		&& (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1478 		    == NULL_TREE)
1479 		&& (get_defaulted_eh_spec (current_function_decl)
1480 		    == empty_except_spec))
1481 	      warning_at (loc, OPT_Wc__11_compat,
1482 			  "in C++11 this throw will terminate because "
1483 			  "destructors default to noexcept");
1484 	  }
1485       }
1486       break;
1487 
1488     case CONVERT_EXPR:
1489       gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1490       break;
1491 
1492     case FOR_STMT:
1493       genericize_for_stmt (stmt_p, walk_subtrees, data);
1494       break;
1495 
1496     case WHILE_STMT:
1497       genericize_while_stmt (stmt_p, walk_subtrees, data);
1498       break;
1499 
1500     case DO_STMT:
1501       genericize_do_stmt (stmt_p, walk_subtrees, data);
1502       break;
1503 
1504     case SWITCH_STMT:
1505       genericize_switch_stmt (stmt_p, walk_subtrees, data);
1506       break;
1507 
1508     case CONTINUE_STMT:
1509       genericize_continue_stmt (stmt_p);
1510       break;
1511 
1512     case BREAK_STMT:
1513       genericize_break_stmt (stmt_p);
1514       break;
1515 
1516     case OMP_FOR:
1517     case OMP_SIMD:
1518     case OMP_DISTRIBUTE:
1519     case OACC_LOOP:
1520       genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1521       break;
1522 
1523     case PTRMEM_CST:
1524       /* By the time we get here we're handing off to the back end, so we don't
1525 	 need or want to preserve PTRMEM_CST anymore.  */
1526       *stmt_p = cplus_expand_constant (stmt);
1527       *walk_subtrees = 0;
1528       break;
1529 
1530     case MEM_REF:
1531       /* For MEM_REF, make sure not to sanitize the second operand even
1532 	 if it has reference type.  It is just an offset with a type
1533 	 holding other information.  There is no other processing we
1534 	 need to do for INTEGER_CSTs, so just ignore the second argument
1535 	 unconditionally.  */
1536       cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1537       *walk_subtrees = 0;
1538       break;
1539 
1540     case NOP_EXPR:
1541       if (!wtd->no_sanitize_p
1542 	  && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1543 	  && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1544 	ubsan_maybe_instrument_reference (stmt_p);
1545       break;
1546 
1547     case CALL_EXPR:
1548       if (!wtd->no_sanitize_p
1549 	  && sanitize_flags_p ((SANITIZE_NULL
1550 				| SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1551 	{
1552 	  tree fn = CALL_EXPR_FN (stmt);
1553 	  if (fn != NULL_TREE
1554 	      && !error_operand_p (fn)
1555 	      && POINTER_TYPE_P (TREE_TYPE (fn))
1556 	      && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1557 	    {
1558 	      bool is_ctor
1559 		= TREE_CODE (fn) == ADDR_EXPR
1560 		  && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1561 		  && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1562 	      if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1563 		ubsan_maybe_instrument_member_call (stmt, is_ctor);
1564 	      if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1565 		cp_ubsan_maybe_instrument_member_call (stmt);
1566 	    }
1567 	  else if (fn == NULL_TREE
1568 		   && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1569 		   && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
1570 		   && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (stmt, 0)))
1571 		       == REFERENCE_TYPE))
1572 	    *walk_subtrees = 0;
1573 	}
1574       /* Fall through.  */
1575     case AGGR_INIT_EXPR:
1576       /* For calls to a multi-versioned function, overload resolution
1577 	 returns the function with the highest target priority, that is,
1578 	 the version that will checked for dispatching first.  If this
1579 	 version is inlinable, a direct call to this version can be made
1580 	 otherwise the call should go through the dispatcher.  */
1581       {
1582 	tree fn = cp_get_callee_fndecl_nofold (stmt);
1583 	if (fn && DECL_FUNCTION_VERSIONED (fn)
1584 	    && (current_function_decl == NULL
1585 		|| !targetm.target_option.can_inline_p (current_function_decl,
1586 							fn)))
1587 	  if (tree dis = get_function_version_dispatcher (fn))
1588 	    {
1589 	      mark_versions_used (dis);
1590 	      dis = build_address (dis);
1591 	      if (TREE_CODE (stmt) == CALL_EXPR)
1592 		CALL_EXPR_FN (stmt) = dis;
1593 	      else
1594 		AGGR_INIT_EXPR_FN (stmt) = dis;
1595 	    }
1596       }
1597       break;
1598 
1599     case TARGET_EXPR:
1600       if (TARGET_EXPR_INITIAL (stmt)
1601 	  && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1602 	  && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1603 	TARGET_EXPR_NO_ELIDE (stmt) = 1;
1604       break;
1605 
1606     default:
1607       if (IS_TYPE_OR_DECL_P (stmt))
1608 	*walk_subtrees = 0;
1609       break;
1610     }
1611 
1612   p_set->add (*stmt_p);
1613 
1614   return NULL;
1615 }
1616 
1617 /* Lower C++ front end trees to GENERIC in T_P.  */
1618 
1619 static void
cp_genericize_tree(tree * t_p,bool handle_invisiref_parm_p)1620 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1621 {
1622   struct cp_genericize_data wtd;
1623 
1624   wtd.p_set = new hash_set<tree>;
1625   wtd.bind_expr_stack.create (0);
1626   wtd.omp_ctx = NULL;
1627   wtd.try_block = NULL_TREE;
1628   wtd.no_sanitize_p = false;
1629   wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1630   cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1631   delete wtd.p_set;
1632   wtd.bind_expr_stack.release ();
1633   if (sanitize_flags_p (SANITIZE_VPTR))
1634     cp_ubsan_instrument_member_accesses (t_p);
1635 }
1636 
1637 /* If a function that should end with a return in non-void
1638    function doesn't obviously end with return, add ubsan
1639    instrumentation code to verify it at runtime.  If -fsanitize=return
1640    is not enabled, instrument __builtin_unreachable.  */
1641 
1642 static void
cp_maybe_instrument_return(tree fndecl)1643 cp_maybe_instrument_return (tree fndecl)
1644 {
1645   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1646       || DECL_CONSTRUCTOR_P (fndecl)
1647       || DECL_DESTRUCTOR_P (fndecl)
1648       || !targetm.warn_func_return (fndecl))
1649     return;
1650 
1651   if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1652       /* Don't add __builtin_unreachable () if not optimizing, it will not
1653 	 improve any optimizations in that case, just break UB code.
1654 	 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1655 	 UBSan covers this with ubsan_instrument_return above where sufficient
1656 	 information is provided, while the __builtin_unreachable () below
1657 	 if return sanitization is disabled will just result in hard to
1658 	 understand runtime error without location.  */
1659       && (!optimize
1660 	  || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1661     return;
1662 
1663   tree t = DECL_SAVED_TREE (fndecl);
1664   while (t)
1665     {
1666       switch (TREE_CODE (t))
1667 	{
1668 	case BIND_EXPR:
1669 	  t = BIND_EXPR_BODY (t);
1670 	  continue;
1671 	case TRY_FINALLY_EXPR:
1672 	case CLEANUP_POINT_EXPR:
1673 	  t = TREE_OPERAND (t, 0);
1674 	  continue;
1675 	case STATEMENT_LIST:
1676 	  {
1677 	    tree_stmt_iterator i = tsi_last (t);
1678 	    while (!tsi_end_p (i))
1679 	      {
1680 		tree p = tsi_stmt (i);
1681 		if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1682 		  break;
1683 		tsi_prev (&i);
1684 	      }
1685 	    if (!tsi_end_p (i))
1686 	      {
1687 		t = tsi_stmt (i);
1688 		continue;
1689 	      }
1690 	  }
1691 	  break;
1692 	case RETURN_EXPR:
1693 	  return;
1694 	default:
1695 	  break;
1696 	}
1697       break;
1698     }
1699   if (t == NULL_TREE)
1700     return;
1701   tree *p = &DECL_SAVED_TREE (fndecl);
1702   if (TREE_CODE (*p) == BIND_EXPR)
1703     p = &BIND_EXPR_BODY (*p);
1704 
1705   location_t loc = DECL_SOURCE_LOCATION (fndecl);
1706   if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1707     t = ubsan_instrument_return (loc);
1708   else
1709     {
1710       tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1711       t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1712     }
1713 
1714   append_to_statement_list (t, p);
1715 }
1716 
1717 void
cp_genericize(tree fndecl)1718 cp_genericize (tree fndecl)
1719 {
1720   tree t;
1721 
1722   /* Fix up the types of parms passed by invisible reference.  */
1723   for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1724     if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1725       {
1726 	/* If a function's arguments are copied to create a thunk,
1727 	   then DECL_BY_REFERENCE will be set -- but the type of the
1728 	   argument will be a pointer type, so we will never get
1729 	   here.  */
1730 	gcc_assert (!DECL_BY_REFERENCE (t));
1731 	gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1732 	TREE_TYPE (t) = DECL_ARG_TYPE (t);
1733 	DECL_BY_REFERENCE (t) = 1;
1734 	TREE_ADDRESSABLE (t) = 0;
1735 	relayout_decl (t);
1736       }
1737 
1738   /* Do the same for the return value.  */
1739   if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1740     {
1741       t = DECL_RESULT (fndecl);
1742       TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1743       DECL_BY_REFERENCE (t) = 1;
1744       TREE_ADDRESSABLE (t) = 0;
1745       relayout_decl (t);
1746       if (DECL_NAME (t))
1747 	{
1748 	  /* Adjust DECL_VALUE_EXPR of the original var.  */
1749 	  tree outer = outer_curly_brace_block (current_function_decl);
1750 	  tree var;
1751 
1752 	  if (outer)
1753 	    for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1754 	      if (VAR_P (var)
1755 		  && DECL_NAME (t) == DECL_NAME (var)
1756 		  && DECL_HAS_VALUE_EXPR_P (var)
1757 		  && DECL_VALUE_EXPR (var) == t)
1758 		{
1759 		  tree val = convert_from_reference (t);
1760 		  SET_DECL_VALUE_EXPR (var, val);
1761 		  break;
1762 		}
1763 	}
1764     }
1765 
1766   /* If we're a clone, the body is already GIMPLE.  */
1767   if (DECL_CLONED_FUNCTION_P (fndecl))
1768     return;
1769 
1770   /* Allow cp_genericize calls to be nested.  */
1771   tree save_bc_label[2];
1772   save_bc_label[bc_break] = bc_label[bc_break];
1773   save_bc_label[bc_continue] = bc_label[bc_continue];
1774   bc_label[bc_break] = NULL_TREE;
1775   bc_label[bc_continue] = NULL_TREE;
1776 
1777   /* We do want to see every occurrence of the parms, so we can't just use
1778      walk_tree's hash functionality.  */
1779   cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1780 
1781   cp_maybe_instrument_return (fndecl);
1782 
1783   /* Do everything else.  */
1784   c_genericize (fndecl);
1785 
1786   gcc_assert (bc_label[bc_break] == NULL);
1787   gcc_assert (bc_label[bc_continue] == NULL);
1788   bc_label[bc_break] = save_bc_label[bc_break];
1789   bc_label[bc_continue] = save_bc_label[bc_continue];
1790 }
1791 
1792 /* Build code to apply FN to each member of ARG1 and ARG2.  FN may be
1793    NULL if there is in fact nothing to do.  ARG2 may be null if FN
1794    actually only takes one argument.  */
1795 
1796 static tree
cxx_omp_clause_apply_fn(tree fn,tree arg1,tree arg2)1797 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1798 {
1799   tree defparm, parm, t;
1800   int i = 0;
1801   int nargs;
1802   tree *argarray;
1803 
1804   if (fn == NULL)
1805     return NULL;
1806 
1807   nargs = list_length (DECL_ARGUMENTS (fn));
1808   argarray = XALLOCAVEC (tree, nargs);
1809 
1810   defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1811   if (arg2)
1812     defparm = TREE_CHAIN (defparm);
1813 
1814   bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
1815   if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1816     {
1817       tree inner_type = TREE_TYPE (arg1);
1818       tree start1, end1, p1;
1819       tree start2 = NULL, p2 = NULL;
1820       tree ret = NULL, lab;
1821 
1822       start1 = arg1;
1823       start2 = arg2;
1824       do
1825 	{
1826 	  inner_type = TREE_TYPE (inner_type);
1827 	  start1 = build4 (ARRAY_REF, inner_type, start1,
1828 			   size_zero_node, NULL, NULL);
1829 	  if (arg2)
1830 	    start2 = build4 (ARRAY_REF, inner_type, start2,
1831 			     size_zero_node, NULL, NULL);
1832 	}
1833       while (TREE_CODE (inner_type) == ARRAY_TYPE);
1834       start1 = build_fold_addr_expr_loc (input_location, start1);
1835       if (arg2)
1836 	start2 = build_fold_addr_expr_loc (input_location, start2);
1837 
1838       end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1839       end1 = fold_build_pointer_plus (start1, end1);
1840 
1841       p1 = create_tmp_var (TREE_TYPE (start1));
1842       t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1843       append_to_statement_list (t, &ret);
1844 
1845       if (arg2)
1846 	{
1847 	  p2 = create_tmp_var (TREE_TYPE (start2));
1848 	  t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1849 	  append_to_statement_list (t, &ret);
1850 	}
1851 
1852       lab = create_artificial_label (input_location);
1853       t = build1 (LABEL_EXPR, void_type_node, lab);
1854       append_to_statement_list (t, &ret);
1855 
1856       argarray[i++] = p1;
1857       if (arg2)
1858 	argarray[i++] = p2;
1859       /* Handle default arguments.  */
1860       for (parm = defparm; parm && parm != void_list_node;
1861 	   parm = TREE_CHAIN (parm), i++)
1862 	argarray[i] = convert_default_arg (TREE_VALUE (parm),
1863 					   TREE_PURPOSE (parm), fn,
1864 					   i - is_method, tf_warning_or_error);
1865       t = build_call_a (fn, i, argarray);
1866       t = fold_convert (void_type_node, t);
1867       t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1868       append_to_statement_list (t, &ret);
1869 
1870       t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1871       t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1872       append_to_statement_list (t, &ret);
1873 
1874       if (arg2)
1875 	{
1876 	  t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1877 	  t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1878 	  append_to_statement_list (t, &ret);
1879 	}
1880 
1881       t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1882       t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1883       append_to_statement_list (t, &ret);
1884 
1885       return ret;
1886     }
1887   else
1888     {
1889       argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1890       if (arg2)
1891 	argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1892       /* Handle default arguments.  */
1893       for (parm = defparm; parm && parm != void_list_node;
1894 	   parm = TREE_CHAIN (parm), i++)
1895 	argarray[i] = convert_default_arg (TREE_VALUE (parm),
1896 					   TREE_PURPOSE (parm), fn,
1897 					   i - is_method, tf_warning_or_error);
1898       t = build_call_a (fn, i, argarray);
1899       t = fold_convert (void_type_node, t);
1900       return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1901     }
1902 }
1903 
1904 /* Return code to initialize DECL with its default constructor, or
1905    NULL if there's nothing to do.  */
1906 
1907 tree
cxx_omp_clause_default_ctor(tree clause,tree decl,tree)1908 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1909 {
1910   tree info = CP_OMP_CLAUSE_INFO (clause);
1911   tree ret = NULL;
1912 
1913   if (info)
1914     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1915 
1916   return ret;
1917 }
1918 
1919 /* Return code to initialize DST with a copy constructor from SRC.  */
1920 
1921 tree
cxx_omp_clause_copy_ctor(tree clause,tree dst,tree src)1922 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1923 {
1924   tree info = CP_OMP_CLAUSE_INFO (clause);
1925   tree ret = NULL;
1926 
1927   if (info)
1928     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1929   if (ret == NULL)
1930     ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1931 
1932   return ret;
1933 }
1934 
1935 /* Similarly, except use an assignment operator instead.  */
1936 
1937 tree
cxx_omp_clause_assign_op(tree clause,tree dst,tree src)1938 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1939 {
1940   tree info = CP_OMP_CLAUSE_INFO (clause);
1941   tree ret = NULL;
1942 
1943   if (info)
1944     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1945   if (ret == NULL)
1946     ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1947 
1948   return ret;
1949 }
1950 
1951 /* Return code to destroy DECL.  */
1952 
1953 tree
cxx_omp_clause_dtor(tree clause,tree decl)1954 cxx_omp_clause_dtor (tree clause, tree decl)
1955 {
1956   tree info = CP_OMP_CLAUSE_INFO (clause);
1957   tree ret = NULL;
1958 
1959   if (info)
1960     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1961 
1962   return ret;
1963 }
1964 
1965 /* True if OpenMP should privatize what this DECL points to rather
1966    than the DECL itself.  */
1967 
1968 bool
cxx_omp_privatize_by_reference(const_tree decl)1969 cxx_omp_privatize_by_reference (const_tree decl)
1970 {
1971   return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1972 	  || is_invisiref_parm (decl));
1973 }
1974 
1975 /* Return true if DECL is const qualified var having no mutable member.  */
1976 bool
cxx_omp_const_qual_no_mutable(tree decl)1977 cxx_omp_const_qual_no_mutable (tree decl)
1978 {
1979   tree type = TREE_TYPE (decl);
1980   if (TREE_CODE (type) == REFERENCE_TYPE)
1981     {
1982       if (!is_invisiref_parm (decl))
1983 	return false;
1984       type = TREE_TYPE (type);
1985 
1986       if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1987 	{
1988 	  /* NVR doesn't preserve const qualification of the
1989 	     variable's type.  */
1990 	  tree outer = outer_curly_brace_block (current_function_decl);
1991 	  tree var;
1992 
1993 	  if (outer)
1994 	    for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1995 	      if (VAR_P (var)
1996 		  && DECL_NAME (decl) == DECL_NAME (var)
1997 		  && (TYPE_MAIN_VARIANT (type)
1998 		      == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1999 		{
2000 		  if (TYPE_READONLY (TREE_TYPE (var)))
2001 		    type = TREE_TYPE (var);
2002 		  break;
2003 		}
2004 	}
2005     }
2006 
2007   if (type == error_mark_node)
2008     return false;
2009 
2010   /* Variables with const-qualified type having no mutable member
2011      are predetermined shared.  */
2012   if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
2013     return true;
2014 
2015   return false;
2016 }
2017 
2018 /* True if OpenMP sharing attribute of DECL is predetermined.  */
2019 
2020 enum omp_clause_default_kind
cxx_omp_predetermined_sharing_1(tree decl)2021 cxx_omp_predetermined_sharing_1 (tree decl)
2022 {
2023   /* Static data members are predetermined shared.  */
2024   if (TREE_STATIC (decl))
2025     {
2026       tree ctx = CP_DECL_CONTEXT (decl);
2027       if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2028 	return OMP_CLAUSE_DEFAULT_SHARED;
2029     }
2030 
2031   /* Const qualified vars having no mutable member are predetermined
2032      shared.  */
2033   if (cxx_omp_const_qual_no_mutable (decl))
2034     return OMP_CLAUSE_DEFAULT_SHARED;
2035 
2036   return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2037 }
2038 
2039 /* Likewise, but also include the artificial vars.  We don't want to
2040    disallow the artificial vars being mentioned in explicit clauses,
2041    as we use artificial vars e.g. for loop constructs with random
2042    access iterators other than pointers, but during gimplification
2043    we want to treat them as predetermined.  */
2044 
2045 enum omp_clause_default_kind
cxx_omp_predetermined_sharing(tree decl)2046 cxx_omp_predetermined_sharing (tree decl)
2047 {
2048   enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2049   if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2050     return ret;
2051 
2052   /* Predetermine artificial variables holding integral values, those
2053      are usually result of gimplify_one_sizepos or SAVE_EXPR
2054      gimplification.  */
2055   if (VAR_P (decl)
2056       && DECL_ARTIFICIAL (decl)
2057       && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2058       && !(DECL_LANG_SPECIFIC (decl)
2059 	   && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2060     return OMP_CLAUSE_DEFAULT_SHARED;
2061 
2062   return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2063 }
2064 
2065 /* Finalize an implicitly determined clause.  */
2066 
2067 void
cxx_omp_finish_clause(tree c,gimple_seq *)2068 cxx_omp_finish_clause (tree c, gimple_seq *)
2069 {
2070   tree decl, inner_type;
2071   bool make_shared = false;
2072 
2073   if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2074     return;
2075 
2076   decl = OMP_CLAUSE_DECL (c);
2077   decl = require_complete_type (decl);
2078   inner_type = TREE_TYPE (decl);
2079   if (decl == error_mark_node)
2080     make_shared = true;
2081   else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
2082     inner_type = TREE_TYPE (inner_type);
2083 
2084   /* We're interested in the base element, not arrays.  */
2085   while (TREE_CODE (inner_type) == ARRAY_TYPE)
2086     inner_type = TREE_TYPE (inner_type);
2087 
2088   /* Check for special function availability by building a call to one.
2089      Save the results, because later we won't be in the right context
2090      for making these queries.  */
2091   if (!make_shared
2092       && CLASS_TYPE_P (inner_type)
2093       && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
2094     make_shared = true;
2095 
2096   if (make_shared)
2097     {
2098       OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2099       OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2100       OMP_CLAUSE_SHARED_READONLY (c) = 0;
2101     }
2102 }
2103 
2104 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2105    disregarded in OpenMP construct, because it is going to be
2106    remapped during OpenMP lowering.  SHARED is true if DECL
2107    is going to be shared, false if it is going to be privatized.  */
2108 
2109 bool
cxx_omp_disregard_value_expr(tree decl,bool shared)2110 cxx_omp_disregard_value_expr (tree decl, bool shared)
2111 {
2112   if (shared)
2113     return false;
2114   if (VAR_P (decl)
2115       && DECL_HAS_VALUE_EXPR_P (decl)
2116       && DECL_ARTIFICIAL (decl)
2117       && DECL_LANG_SPECIFIC (decl)
2118       && DECL_OMP_PRIVATIZED_MEMBER (decl))
2119     return true;
2120   if (VAR_P (decl) && DECL_CONTEXT (decl) && is_capture_proxy (decl))
2121     return true;
2122   return false;
2123 }
2124 
2125 /* Fold expression X which is used as an rvalue if RVAL is true.  */
2126 
2127 static tree
cp_fold_maybe_rvalue(tree x,bool rval)2128 cp_fold_maybe_rvalue (tree x, bool rval)
2129 {
2130   while (true)
2131     {
2132       x = cp_fold (x);
2133       if (rval && DECL_P (x)
2134 	  && TREE_CODE (TREE_TYPE (x)) != REFERENCE_TYPE)
2135 	{
2136 	  tree v = decl_constant_value (x);
2137 	  if (v != x && v != error_mark_node)
2138 	    {
2139 	      x = v;
2140 	      continue;
2141 	    }
2142 	}
2143       break;
2144     }
2145   return x;
2146 }
2147 
2148 /* Fold expression X which is used as an rvalue.  */
2149 
2150 static tree
cp_fold_rvalue(tree x)2151 cp_fold_rvalue (tree x)
2152 {
2153   return cp_fold_maybe_rvalue (x, true);
2154 }
2155 
2156 /* Perform folding on expression X.  */
2157 
2158 tree
cp_fully_fold(tree x)2159 cp_fully_fold (tree x)
2160 {
2161   if (processing_template_decl)
2162     return x;
2163   /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2164      have to call both.  */
2165   if (cxx_dialect >= cxx11)
2166     {
2167       x = maybe_constant_value (x);
2168       /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2169 	 a TARGET_EXPR; undo that here.  */
2170       if (TREE_CODE (x) == TARGET_EXPR)
2171 	x = TARGET_EXPR_INITIAL (x);
2172       else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2173 	       && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2174 	       && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2175 	x = TREE_OPERAND (x, 0);
2176     }
2177   return cp_fold_rvalue (x);
2178 }
2179 
2180 /* c-common interface to cp_fold.  If IN_INIT, this is in a static initializer
2181    and certain changes are made to the folding done.  Or should be (FIXME).  We
2182    never touch maybe_const, as it is only used for the C front-end
2183    C_MAYBE_CONST_EXPR.  */
2184 
2185 tree
c_fully_fold(tree x,bool,bool *,bool lval)2186 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2187 {
2188   return cp_fold_maybe_rvalue (x, !lval);
2189 }
2190 
2191 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2192 
2193 /* Dispose of the whole FOLD_CACHE.  */
2194 
2195 void
clear_fold_cache(void)2196 clear_fold_cache (void)
2197 {
2198   if (fold_cache != NULL)
2199     fold_cache->empty ();
2200 }
2201 
2202 /*  This function tries to fold an expression X.
2203     To avoid combinatorial explosion, folding results are kept in fold_cache.
2204     If X is invalid, we don't fold at all.
2205     For performance reasons we don't cache expressions representing a
2206     declaration or constant.
2207     Function returns X or its folded variant.  */
2208 
2209 static tree
cp_fold(tree x)2210 cp_fold (tree x)
2211 {
2212   tree op0, op1, op2, op3;
2213   tree org_x = x, r = NULL_TREE;
2214   enum tree_code code;
2215   location_t loc;
2216   bool rval_ops = true;
2217 
2218   if (!x || x == error_mark_node)
2219     return x;
2220 
2221   if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
2222     return x;
2223 
2224   /* Don't bother to cache DECLs or constants.  */
2225   if (DECL_P (x) || CONSTANT_CLASS_P (x))
2226     return x;
2227 
2228   if (fold_cache == NULL)
2229     fold_cache = hash_map<tree, tree>::create_ggc (101);
2230 
2231   if (tree *cached = fold_cache->get (x))
2232     return *cached;
2233 
2234   code = TREE_CODE (x);
2235   switch (code)
2236     {
2237     case CLEANUP_POINT_EXPR:
2238       /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2239 	 effects.  */
2240       r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2241       if (!TREE_SIDE_EFFECTS (r))
2242 	x = r;
2243       break;
2244 
2245     case SIZEOF_EXPR:
2246       x = fold_sizeof_expr (x);
2247       break;
2248 
2249     case VIEW_CONVERT_EXPR:
2250       rval_ops = false;
2251       /* FALLTHRU */
2252     case CONVERT_EXPR:
2253     case NOP_EXPR:
2254     case NON_LVALUE_EXPR:
2255 
2256       if (VOID_TYPE_P (TREE_TYPE (x)))
2257 	{
2258 	  /* This is just to make sure we don't end up with casts to
2259 	     void from error_mark_node.  If we just return x, then
2260 	     cp_fold_r might fold the operand into error_mark_node and
2261 	     leave the conversion in the IR.  STRIP_USELESS_TYPE_CONVERSION
2262 	     during gimplification doesn't like such casts.
2263 	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2264 	     folding of the operand should be in the caches and if in cp_fold_r
2265 	     it will modify it in place.  */
2266 	  op0 = cp_fold (TREE_OPERAND (x, 0));
2267 	  if (op0 == error_mark_node)
2268 	    x = error_mark_node;
2269 	  break;
2270 	}
2271 
2272       loc = EXPR_LOCATION (x);
2273       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2274 
2275       if (code == CONVERT_EXPR
2276 	  && SCALAR_TYPE_P (TREE_TYPE (x))
2277 	  && op0 != void_node)
2278 	/* During parsing we used convert_to_*_nofold; re-convert now using the
2279 	   folding variants, since fold() doesn't do those transformations.  */
2280 	x = fold (convert (TREE_TYPE (x), op0));
2281       else if (op0 != TREE_OPERAND (x, 0))
2282 	{
2283 	  if (op0 == error_mark_node)
2284 	    x = error_mark_node;
2285 	  else
2286 	    x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2287 	}
2288       else
2289 	x = fold (x);
2290 
2291       /* Conversion of an out-of-range value has implementation-defined
2292 	 behavior; the language considers it different from arithmetic
2293 	 overflow, which is undefined.  */
2294       if (TREE_CODE (op0) == INTEGER_CST
2295 	  && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2296 	TREE_OVERFLOW (x) = false;
2297 
2298       break;
2299 
2300     case INDIRECT_REF:
2301       /* We don't need the decltype(auto) obfuscation anymore.  */
2302       if (REF_PARENTHESIZED_P (x))
2303 	{
2304 	  tree p = maybe_undo_parenthesized_ref (x);
2305 	  return cp_fold (p);
2306 	}
2307       goto unary;
2308 
2309     case ADDR_EXPR:
2310       loc = EXPR_LOCATION (x);
2311       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2312 
2313       /* Cope with user tricks that amount to offsetof.  */
2314       if (op0 != error_mark_node
2315 	  && TREE_CODE (TREE_TYPE (op0)) != FUNCTION_TYPE
2316 	  && TREE_CODE (TREE_TYPE (op0)) != METHOD_TYPE)
2317 	{
2318 	  tree val = get_base_address (op0);
2319 	  if (val
2320 	      && INDIRECT_REF_P (val)
2321 	      && COMPLETE_TYPE_P (TREE_TYPE (val))
2322 	      && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2323 	    {
2324 	      val = TREE_OPERAND (val, 0);
2325 	      STRIP_NOPS (val);
2326 	      val = maybe_constant_value (val);
2327 	      if (TREE_CODE (val) == INTEGER_CST)
2328 		return fold_offsetof (op0, TREE_TYPE (x));
2329 	    }
2330 	}
2331       goto finish_unary;
2332 
2333     case REALPART_EXPR:
2334     case IMAGPART_EXPR:
2335       rval_ops = false;
2336       /* FALLTHRU */
2337     case CONJ_EXPR:
2338     case FIX_TRUNC_EXPR:
2339     case FLOAT_EXPR:
2340     case NEGATE_EXPR:
2341     case ABS_EXPR:
2342     case BIT_NOT_EXPR:
2343     case TRUTH_NOT_EXPR:
2344     case FIXED_CONVERT_EXPR:
2345     unary:
2346 
2347       loc = EXPR_LOCATION (x);
2348       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2349 
2350     finish_unary:
2351       if (op0 != TREE_OPERAND (x, 0))
2352 	{
2353 	  if (op0 == error_mark_node)
2354 	    x = error_mark_node;
2355 	  else
2356 	    {
2357 	      x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2358 	      if (code == INDIRECT_REF
2359 		  && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2360 		{
2361 		  TREE_READONLY (x) = TREE_READONLY (org_x);
2362 		  TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2363 		  TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2364 		}
2365 	    }
2366 	}
2367       else
2368 	x = fold (x);
2369 
2370       gcc_assert (TREE_CODE (x) != COND_EXPR
2371 		  || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2372       break;
2373 
2374     case UNARY_PLUS_EXPR:
2375       op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2376       if (op0 == error_mark_node)
2377 	x = error_mark_node;
2378       else
2379 	x = fold_convert (TREE_TYPE (x), op0);
2380       break;
2381 
2382     case POSTDECREMENT_EXPR:
2383     case POSTINCREMENT_EXPR:
2384     case INIT_EXPR:
2385     case PREDECREMENT_EXPR:
2386     case PREINCREMENT_EXPR:
2387     case COMPOUND_EXPR:
2388     case MODIFY_EXPR:
2389       rval_ops = false;
2390       /* FALLTHRU */
2391     case POINTER_PLUS_EXPR:
2392     case PLUS_EXPR:
2393     case POINTER_DIFF_EXPR:
2394     case MINUS_EXPR:
2395     case MULT_EXPR:
2396     case TRUNC_DIV_EXPR:
2397     case CEIL_DIV_EXPR:
2398     case FLOOR_DIV_EXPR:
2399     case ROUND_DIV_EXPR:
2400     case TRUNC_MOD_EXPR:
2401     case CEIL_MOD_EXPR:
2402     case ROUND_MOD_EXPR:
2403     case RDIV_EXPR:
2404     case EXACT_DIV_EXPR:
2405     case MIN_EXPR:
2406     case MAX_EXPR:
2407     case LSHIFT_EXPR:
2408     case RSHIFT_EXPR:
2409     case LROTATE_EXPR:
2410     case RROTATE_EXPR:
2411     case BIT_AND_EXPR:
2412     case BIT_IOR_EXPR:
2413     case BIT_XOR_EXPR:
2414     case TRUTH_AND_EXPR:
2415     case TRUTH_ANDIF_EXPR:
2416     case TRUTH_OR_EXPR:
2417     case TRUTH_ORIF_EXPR:
2418     case TRUTH_XOR_EXPR:
2419     case LT_EXPR: case LE_EXPR:
2420     case GT_EXPR: case GE_EXPR:
2421     case EQ_EXPR: case NE_EXPR:
2422     case UNORDERED_EXPR: case ORDERED_EXPR:
2423     case UNLT_EXPR: case UNLE_EXPR:
2424     case UNGT_EXPR: case UNGE_EXPR:
2425     case UNEQ_EXPR: case LTGT_EXPR:
2426     case RANGE_EXPR: case COMPLEX_EXPR:
2427 
2428       loc = EXPR_LOCATION (x);
2429       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2430       op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2431 
2432       if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2433 	{
2434 	  if (op0 == error_mark_node || op1 == error_mark_node)
2435 	    x = error_mark_node;
2436 	  else
2437 	    x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2438 	}
2439       else
2440 	x = fold (x);
2441 
2442       if (TREE_NO_WARNING (org_x)
2443 	  && warn_nonnull_compare
2444 	  && COMPARISON_CLASS_P (org_x))
2445 	{
2446 	  if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2447 	    ;
2448 	  else if (COMPARISON_CLASS_P (x))
2449 	    TREE_NO_WARNING (x) = 1;
2450 	  /* Otherwise give up on optimizing these, let GIMPLE folders
2451 	     optimize those later on.  */
2452 	  else if (op0 != TREE_OPERAND (org_x, 0)
2453 		   || op1 != TREE_OPERAND (org_x, 1))
2454 	    {
2455 	      x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2456 	      TREE_NO_WARNING (x) = 1;
2457 	    }
2458 	  else
2459 	    x = org_x;
2460 	}
2461       break;
2462 
2463     case VEC_COND_EXPR:
2464     case COND_EXPR:
2465       loc = EXPR_LOCATION (x);
2466       op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2467       op1 = cp_fold (TREE_OPERAND (x, 1));
2468       op2 = cp_fold (TREE_OPERAND (x, 2));
2469 
2470       if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2471 	{
2472 	  warning_sentinel s (warn_int_in_bool_context);
2473 	  if (!VOID_TYPE_P (TREE_TYPE (op1)))
2474 	    op1 = cp_truthvalue_conversion (op1);
2475 	  if (!VOID_TYPE_P (TREE_TYPE (op2)))
2476 	    op2 = cp_truthvalue_conversion (op2);
2477 	}
2478       else if (VOID_TYPE_P (TREE_TYPE (x)))
2479 	{
2480 	  if (TREE_CODE (op0) == INTEGER_CST)
2481 	    {
2482 	      /* If the condition is constant, fold can fold away
2483 		 the COND_EXPR.  If some statement-level uses of COND_EXPR
2484 		 have one of the branches NULL, avoid folding crash.  */
2485 	      if (!op1)
2486 		op1 = build_empty_stmt (loc);
2487 	      if (!op2)
2488 		op2 = build_empty_stmt (loc);
2489 	    }
2490 	  else
2491 	    {
2492 	      /* Otherwise, don't bother folding a void condition, since
2493 		 it can't produce a constant value.  */
2494 	      if (op0 != TREE_OPERAND (x, 0)
2495 		  || op1 != TREE_OPERAND (x, 1)
2496 		  || op2 != TREE_OPERAND (x, 2))
2497 		x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2498 	      break;
2499 	    }
2500 	}
2501 
2502       if (op0 != TREE_OPERAND (x, 0)
2503 	  || op1 != TREE_OPERAND (x, 1)
2504 	  || op2 != TREE_OPERAND (x, 2))
2505 	{
2506 	  if (op0 == error_mark_node
2507 	      || op1 == error_mark_node
2508 	      || op2 == error_mark_node)
2509 	    x = error_mark_node;
2510 	  else
2511 	    x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2512 	}
2513       else
2514 	x = fold (x);
2515 
2516       /* A COND_EXPR might have incompatible types in branches if one or both
2517 	 arms are bitfields.  If folding exposed such a branch, fix it up.  */
2518       if (TREE_CODE (x) != code
2519 	  && x != error_mark_node
2520 	  && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2521 	x = fold_convert (TREE_TYPE (org_x), x);
2522 
2523       break;
2524 
2525     case CALL_EXPR:
2526       {
2527 	int i, m, sv = optimize, nw = sv, changed = 0;
2528 	tree callee = get_callee_fndecl (x);
2529 
2530 	/* Some built-in function calls will be evaluated at compile-time in
2531 	   fold ().  Set optimize to 1 when folding __builtin_constant_p inside
2532 	   a constexpr function so that fold_builtin_1 doesn't fold it to 0.  */
2533 	if (callee && DECL_BUILT_IN (callee) && !optimize
2534 	    && DECL_IS_BUILTIN_CONSTANT_P (callee)
2535 	    && current_function_decl
2536 	    && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2537 	  nw = 1;
2538 
2539 	x = copy_node (x);
2540 
2541 	m = call_expr_nargs (x);
2542 	for (i = 0; i < m; i++)
2543 	  {
2544 	    r = cp_fold (CALL_EXPR_ARG (x, i));
2545 	    if (r != CALL_EXPR_ARG (x, i))
2546 	      {
2547 		if (r == error_mark_node)
2548 		  {
2549 		    x = error_mark_node;
2550 		    break;
2551 		  }
2552 		changed = 1;
2553 	      }
2554 	    CALL_EXPR_ARG (x, i) = r;
2555 	  }
2556 	if (x == error_mark_node)
2557 	  break;
2558 
2559 	optimize = nw;
2560 	r = fold (x);
2561 	optimize = sv;
2562 
2563 	if (TREE_CODE (r) != CALL_EXPR)
2564 	  {
2565 	    x = cp_fold (r);
2566 	    break;
2567 	  }
2568 
2569 	optimize = nw;
2570 
2571 	/* Invoke maybe_constant_value for functions declared
2572 	   constexpr and not called with AGGR_INIT_EXPRs.
2573 	   TODO:
2574 	   Do constexpr expansion of expressions where the call itself is not
2575 	   constant, but the call followed by an INDIRECT_REF is.  */
2576 	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2577 	    && !flag_no_inline)
2578 	  r = maybe_constant_value (x);
2579 	optimize = sv;
2580 
2581         if (TREE_CODE (r) != CALL_EXPR)
2582 	  {
2583 	    if (DECL_CONSTRUCTOR_P (callee))
2584 	      {
2585 		loc = EXPR_LOCATION (x);
2586 		tree s = build_fold_indirect_ref_loc (loc,
2587 						      CALL_EXPR_ARG (x, 0));
2588 		r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2589 	      }
2590 	    x = r;
2591 	    break;
2592 	  }
2593 
2594 	if (!changed)
2595 	  x = org_x;
2596 	break;
2597       }
2598 
2599     case CONSTRUCTOR:
2600       {
2601 	unsigned i;
2602 	constructor_elt *p;
2603 	vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2604 	vec<constructor_elt, va_gc> *nelts = NULL;
2605 	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2606 	  {
2607 	    tree op = cp_fold (p->value);
2608 	    if (op != p->value)
2609 	      {
2610 		if (op == error_mark_node)
2611 		  {
2612 		    x = error_mark_node;
2613 		    vec_free (nelts);
2614 		    break;
2615 		  }
2616 		if (nelts == NULL)
2617 		  nelts = elts->copy ();
2618 		(*nelts)[i].value = op;
2619 	      }
2620 	  }
2621 	if (nelts)
2622 	  {
2623 	    x = build_constructor (TREE_TYPE (x), nelts);
2624 	    CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
2625 	      = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
2626 	  }
2627 	if (VECTOR_TYPE_P (TREE_TYPE (x)))
2628 	  x = fold (x);
2629 	break;
2630       }
2631     case TREE_VEC:
2632       {
2633 	bool changed = false;
2634 	vec<tree, va_gc> *vec = make_tree_vector ();
2635 	int i, n = TREE_VEC_LENGTH (x);
2636 	vec_safe_reserve (vec, n);
2637 
2638 	for (i = 0; i < n; i++)
2639 	  {
2640 	    tree op = cp_fold (TREE_VEC_ELT (x, i));
2641 	    vec->quick_push (op);
2642 	    if (op != TREE_VEC_ELT (x, i))
2643 	      changed = true;
2644 	  }
2645 
2646 	if (changed)
2647 	  {
2648 	    r = copy_node (x);
2649 	    for (i = 0; i < n; i++)
2650 	      TREE_VEC_ELT (r, i) = (*vec)[i];
2651 	    x = r;
2652 	  }
2653 
2654 	release_tree_vector (vec);
2655       }
2656 
2657       break;
2658 
2659     case ARRAY_REF:
2660     case ARRAY_RANGE_REF:
2661 
2662       loc = EXPR_LOCATION (x);
2663       op0 = cp_fold (TREE_OPERAND (x, 0));
2664       op1 = cp_fold (TREE_OPERAND (x, 1));
2665       op2 = cp_fold (TREE_OPERAND (x, 2));
2666       op3 = cp_fold (TREE_OPERAND (x, 3));
2667 
2668       if (op0 != TREE_OPERAND (x, 0)
2669 	  || op1 != TREE_OPERAND (x, 1)
2670 	  || op2 != TREE_OPERAND (x, 2)
2671 	  || op3 != TREE_OPERAND (x, 3))
2672 	{
2673 	  if (op0 == error_mark_node
2674 	      || op1 == error_mark_node
2675 	      || op2 == error_mark_node
2676 	      || op3 == error_mark_node)
2677 	    x = error_mark_node;
2678 	  else
2679 	    {
2680 	      x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2681 	      TREE_READONLY (x) = TREE_READONLY (org_x);
2682 	      TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2683 	      TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2684 	    }
2685 	}
2686 
2687       x = fold (x);
2688       break;
2689 
2690     case SAVE_EXPR:
2691       /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2692 	 folding, evaluates to an invariant.  In that case no need to wrap
2693 	 this folded tree with a SAVE_EXPR.  */
2694       r = cp_fold (TREE_OPERAND (x, 0));
2695       if (tree_invariant_p (r))
2696 	x = r;
2697       break;
2698 
2699     default:
2700       return org_x;
2701     }
2702 
2703   fold_cache->put (org_x, x);
2704   /* Prevent that we try to fold an already folded result again.  */
2705   if (x != org_x)
2706     fold_cache->put (x, x);
2707 
2708   return x;
2709 }
2710 
2711 #include "gt-cp-cp-gimplify.h"
2712