1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
2 
3    Copyright (C) 2002-2018 Free Software Foundation, Inc.
4    Contributed by Jason Merrill <jason@redhat.com>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "asan.h"
37 
38 /* Forward declarations.  */
39 
40 static tree cp_genericize_r (tree *, int *, void *);
41 static tree cp_fold_r (tree *, int *, void *);
42 static void cp_genericize_tree (tree*, bool);
43 static tree cp_fold (tree);
44 
45 /* Local declarations.  */
46 
47 enum bc_t { bc_break = 0, bc_continue = 1 };
48 
49 /* Stack of labels which are targets for "break" or "continue",
50    linked through TREE_CHAIN.  */
51 static tree bc_label[2];
52 
53 /* Begin a scope which can be exited by a break or continue statement.  BC
54    indicates which.
55 
56    Just creates a label with location LOCATION and pushes it into the current
57    context.  */
58 
59 static tree
begin_bc_block(enum bc_t bc,location_t location)60 begin_bc_block (enum bc_t bc, location_t location)
61 {
62   tree label = create_artificial_label (location);
63   DECL_CHAIN (label) = bc_label[bc];
64   bc_label[bc] = label;
65   if (bc == bc_break)
66     LABEL_DECL_BREAK (label) = true;
67   else
68     LABEL_DECL_CONTINUE (label) = true;
69   return label;
70 }
71 
72 /* Finish a scope which can be exited by a break or continue statement.
73    LABEL was returned from the most recent call to begin_bc_block.  BLOCK is
74    an expression for the contents of the scope.
75 
76    If we saw a break (or continue) in the scope, append a LABEL_EXPR to
77    BLOCK.  Otherwise, just forget the label.  */
78 
79 static void
finish_bc_block(tree * block,enum bc_t bc,tree label)80 finish_bc_block (tree *block, enum bc_t bc, tree label)
81 {
82   gcc_assert (label == bc_label[bc]);
83 
84   if (TREE_USED (label))
85     append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
86 			      block);
87 
88   bc_label[bc] = DECL_CHAIN (label);
89   DECL_CHAIN (label) = NULL_TREE;
90 }
91 
92 /* Get the LABEL_EXPR to represent a break or continue statement
93    in the current block scope.  BC indicates which.  */
94 
95 static tree
get_bc_label(enum bc_t bc)96 get_bc_label (enum bc_t bc)
97 {
98   tree label = bc_label[bc];
99 
100   /* Mark the label used for finish_bc_block.  */
101   TREE_USED (label) = 1;
102   return label;
103 }
104 
105 /* Genericize a TRY_BLOCK.  */
106 
107 static void
genericize_try_block(tree * stmt_p)108 genericize_try_block (tree *stmt_p)
109 {
110   tree body = TRY_STMTS (*stmt_p);
111   tree cleanup = TRY_HANDLERS (*stmt_p);
112 
113   *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
114 }
115 
116 /* Genericize a HANDLER by converting to a CATCH_EXPR.  */
117 
118 static void
genericize_catch_block(tree * stmt_p)119 genericize_catch_block (tree *stmt_p)
120 {
121   tree type = HANDLER_TYPE (*stmt_p);
122   tree body = HANDLER_BODY (*stmt_p);
123 
124   /* FIXME should the caught type go in TREE_TYPE?  */
125   *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
126 }
127 
128 /* A terser interface for building a representation of an exception
129    specification.  */
130 
131 static tree
build_gimple_eh_filter_tree(tree body,tree allowed,tree failure)132 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
133 {
134   tree t;
135 
136   /* FIXME should the allowed types go in TREE_TYPE?  */
137   t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
138   append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
139 
140   t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
141   append_to_statement_list (body, &TREE_OPERAND (t, 0));
142 
143   return t;
144 }
145 
146 /* Genericize an EH_SPEC_BLOCK by converting it to a
147    TRY_CATCH_EXPR/EH_FILTER_EXPR pair.  */
148 
149 static void
genericize_eh_spec_block(tree * stmt_p)150 genericize_eh_spec_block (tree *stmt_p)
151 {
152   tree body = EH_SPEC_STMTS (*stmt_p);
153   tree allowed = EH_SPEC_RAISES (*stmt_p);
154   tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
155 
156   *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
157   TREE_NO_WARNING (*stmt_p) = true;
158   TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
159 }
160 
161 /* Genericize an IF_STMT by turning it into a COND_EXPR.  */
162 
163 static void
genericize_if_stmt(tree * stmt_p)164 genericize_if_stmt (tree *stmt_p)
165 {
166   tree stmt, cond, then_, else_;
167   location_t locus = EXPR_LOCATION (*stmt_p);
168 
169   stmt = *stmt_p;
170   cond = IF_COND (stmt);
171   then_ = THEN_CLAUSE (stmt);
172   else_ = ELSE_CLAUSE (stmt);
173 
174   if (!then_)
175     then_ = build_empty_stmt (locus);
176   if (!else_)
177     else_ = build_empty_stmt (locus);
178 
179   if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
180     stmt = then_;
181   else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
182     stmt = else_;
183   else
184     stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
185   if (!EXPR_HAS_LOCATION (stmt))
186     protected_set_expr_location (stmt, locus);
187   *stmt_p = stmt;
188 }
189 
190 /* Build a generic representation of one of the C loop forms.  COND is the
191    loop condition or NULL_TREE.  BODY is the (possibly compound) statement
192    controlled by the loop.  INCR is the increment expression of a for-loop,
193    or NULL_TREE.  COND_IS_FIRST indicates whether the condition is
194    evaluated before the loop body as in while and for loops, or after the
195    loop body as in do-while loops.  */
196 
197 static void
genericize_cp_loop(tree * stmt_p,location_t start_locus,tree cond,tree body,tree incr,bool cond_is_first,int * walk_subtrees,void * data)198 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
199 		    tree incr, bool cond_is_first, int *walk_subtrees,
200 		    void *data)
201 {
202   tree blab, clab;
203   tree exit = NULL;
204   tree stmt_list = NULL;
205   tree debug_begin = NULL;
206 
207   blab = begin_bc_block (bc_break, start_locus);
208   clab = begin_bc_block (bc_continue, start_locus);
209 
210   if (EXPR_LOCATION (incr) == UNKNOWN_LOCATION)
211     protected_set_expr_location (incr, start_locus);
212 
213   cp_walk_tree (&cond, cp_genericize_r, data, NULL);
214   cp_walk_tree (&body, cp_genericize_r, data, NULL);
215   cp_walk_tree (&incr, cp_genericize_r, data, NULL);
216   *walk_subtrees = 0;
217 
218   if (MAY_HAVE_DEBUG_MARKER_STMTS
219       && (!cond || !integer_zerop (cond)))
220     {
221       debug_begin = build0 (DEBUG_BEGIN_STMT, void_type_node);
222       SET_EXPR_LOCATION (debug_begin, EXPR_LOC_OR_LOC (cond, start_locus));
223     }
224 
225   if (cond && TREE_CODE (cond) != INTEGER_CST)
226     {
227       /* If COND is constant, don't bother building an exit.  If it's false,
228 	 we won't build a loop.  If it's true, any exits are in the body.  */
229       location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
230       exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
231 			 get_bc_label (bc_break));
232       exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
233 			      build_empty_stmt (cloc), exit);
234     }
235 
236   if (exit && cond_is_first)
237     {
238       append_to_statement_list (debug_begin, &stmt_list);
239       debug_begin = NULL_TREE;
240       append_to_statement_list (exit, &stmt_list);
241     }
242   append_to_statement_list (body, &stmt_list);
243   finish_bc_block (&stmt_list, bc_continue, clab);
244   if (incr)
245     {
246       if (MAY_HAVE_DEBUG_MARKER_STMTS)
247 	{
248 	  tree d = build0 (DEBUG_BEGIN_STMT, void_type_node);
249 	  SET_EXPR_LOCATION (d, EXPR_LOC_OR_LOC (incr, start_locus));
250 	  append_to_statement_list (d, &stmt_list);
251 	}
252       append_to_statement_list (incr, &stmt_list);
253     }
254   append_to_statement_list (debug_begin, &stmt_list);
255   if (exit && !cond_is_first)
256     append_to_statement_list (exit, &stmt_list);
257 
258   if (!stmt_list)
259     stmt_list = build_empty_stmt (start_locus);
260 
261   tree loop;
262   if (cond && integer_zerop (cond))
263     {
264       if (cond_is_first)
265 	loop = fold_build3_loc (start_locus, COND_EXPR,
266 				void_type_node, cond, stmt_list,
267 				build_empty_stmt (start_locus));
268       else
269 	loop = stmt_list;
270     }
271   else
272     {
273       location_t loc = start_locus;
274       if (!cond || integer_nonzerop (cond))
275 	loc = EXPR_LOCATION (expr_first (body));
276       if (loc == UNKNOWN_LOCATION)
277 	loc = start_locus;
278       loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
279     }
280 
281   stmt_list = NULL;
282   append_to_statement_list (loop, &stmt_list);
283   finish_bc_block (&stmt_list, bc_break, blab);
284   if (!stmt_list)
285     stmt_list = build_empty_stmt (start_locus);
286 
287   *stmt_p = stmt_list;
288 }
289 
290 /* Genericize a FOR_STMT node *STMT_P.  */
291 
292 static void
genericize_for_stmt(tree * stmt_p,int * walk_subtrees,void * data)293 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
294 {
295   tree stmt = *stmt_p;
296   tree expr = NULL;
297   tree loop;
298   tree init = FOR_INIT_STMT (stmt);
299 
300   if (init)
301     {
302       cp_walk_tree (&init, cp_genericize_r, data, NULL);
303       append_to_statement_list (init, &expr);
304     }
305 
306   genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
307 		      FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
308   append_to_statement_list (loop, &expr);
309   if (expr == NULL_TREE)
310     expr = loop;
311   *stmt_p = expr;
312 }
313 
314 /* Genericize a WHILE_STMT node *STMT_P.  */
315 
316 static void
genericize_while_stmt(tree * stmt_p,int * walk_subtrees,void * data)317 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
318 {
319   tree stmt = *stmt_p;
320   genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
321 		      WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
322 }
323 
324 /* Genericize a DO_STMT node *STMT_P.  */
325 
326 static void
genericize_do_stmt(tree * stmt_p,int * walk_subtrees,void * data)327 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
328 {
329   tree stmt = *stmt_p;
330   genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
331 		      DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
332 }
333 
334 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR.  */
335 
336 static void
genericize_switch_stmt(tree * stmt_p,int * walk_subtrees,void * data)337 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
338 {
339   tree stmt = *stmt_p;
340   tree break_block, body, cond, type;
341   location_t stmt_locus = EXPR_LOCATION (stmt);
342 
343   break_block = begin_bc_block (bc_break, stmt_locus);
344 
345   body = SWITCH_STMT_BODY (stmt);
346   if (!body)
347     body = build_empty_stmt (stmt_locus);
348   cond = SWITCH_STMT_COND (stmt);
349   type = SWITCH_STMT_TYPE (stmt);
350 
351   cp_walk_tree (&body, cp_genericize_r, data, NULL);
352   cp_walk_tree (&cond, cp_genericize_r, data, NULL);
353   cp_walk_tree (&type, cp_genericize_r, data, NULL);
354   *walk_subtrees = 0;
355 
356   if (TREE_USED (break_block))
357     SWITCH_BREAK_LABEL_P (break_block) = 1;
358   finish_bc_block (&body, bc_break, break_block);
359   *stmt_p = build2_loc (stmt_locus, SWITCH_EXPR, type, cond, body);
360   SWITCH_ALL_CASES_P (*stmt_p) = SWITCH_STMT_ALL_CASES_P (stmt);
361   gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt)
362 		       || !TREE_USED (break_block));
363 }
364 
365 /* Genericize a CONTINUE_STMT node *STMT_P.  */
366 
367 static void
genericize_continue_stmt(tree * stmt_p)368 genericize_continue_stmt (tree *stmt_p)
369 {
370   tree stmt_list = NULL;
371   tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
372   tree label = get_bc_label (bc_continue);
373   location_t location = EXPR_LOCATION (*stmt_p);
374   tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
375   append_to_statement_list_force (pred, &stmt_list);
376   append_to_statement_list (jump, &stmt_list);
377   *stmt_p = stmt_list;
378 }
379 
380 /* Genericize a BREAK_STMT node *STMT_P.  */
381 
382 static void
genericize_break_stmt(tree * stmt_p)383 genericize_break_stmt (tree *stmt_p)
384 {
385   tree label = get_bc_label (bc_break);
386   location_t location = EXPR_LOCATION (*stmt_p);
387   *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
388 }
389 
390 /* Genericize a OMP_FOR node *STMT_P.  */
391 
392 static void
genericize_omp_for_stmt(tree * stmt_p,int * walk_subtrees,void * data)393 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
394 {
395   tree stmt = *stmt_p;
396   location_t locus = EXPR_LOCATION (stmt);
397   tree clab = begin_bc_block (bc_continue, locus);
398 
399   cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
400   if (TREE_CODE (stmt) != OMP_TASKLOOP)
401     cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
402   cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
403   cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
404   cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
405   cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
406   *walk_subtrees = 0;
407 
408   finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
409 }
410 
411 /* Hook into the middle of gimplifying an OMP_FOR node.  */
412 
413 static enum gimplify_status
cp_gimplify_omp_for(tree * expr_p,gimple_seq * pre_p)414 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
415 {
416   tree for_stmt = *expr_p;
417   gimple_seq seq = NULL;
418 
419   /* Protect ourselves from recursion.  */
420   if (OMP_FOR_GIMPLIFYING_P (for_stmt))
421     return GS_UNHANDLED;
422   OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
423 
424   gimplify_and_add (for_stmt, &seq);
425   gimple_seq_add_seq (pre_p, seq);
426 
427   OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
428 
429   return GS_ALL_DONE;
430 }
431 
432 /*  Gimplify an EXPR_STMT node.  */
433 
434 static void
gimplify_expr_stmt(tree * stmt_p)435 gimplify_expr_stmt (tree *stmt_p)
436 {
437   tree stmt = EXPR_STMT_EXPR (*stmt_p);
438 
439   if (stmt == error_mark_node)
440     stmt = NULL;
441 
442   /* Gimplification of a statement expression will nullify the
443      statement if all its side effects are moved to *PRE_P and *POST_P.
444 
445      In this case we will not want to emit the gimplified statement.
446      However, we may still want to emit a warning, so we do that before
447      gimplification.  */
448   if (stmt && warn_unused_value)
449     {
450       if (!TREE_SIDE_EFFECTS (stmt))
451 	{
452 	  if (!IS_EMPTY_STMT (stmt)
453 	      && !VOID_TYPE_P (TREE_TYPE (stmt))
454 	      && !TREE_NO_WARNING (stmt))
455 	    warning (OPT_Wunused_value, "statement with no effect");
456 	}
457       else
458 	warn_if_unused_value (stmt, input_location);
459     }
460 
461   if (stmt == NULL_TREE)
462     stmt = alloc_stmt_list ();
463 
464   *stmt_p = stmt;
465 }
466 
467 /* Gimplify initialization from an AGGR_INIT_EXPR.  */
468 
469 static void
cp_gimplify_init_expr(tree * expr_p)470 cp_gimplify_init_expr (tree *expr_p)
471 {
472   tree from = TREE_OPERAND (*expr_p, 1);
473   tree to = TREE_OPERAND (*expr_p, 0);
474   tree t;
475 
476   /* What about code that pulls out the temp and uses it elsewhere?  I
477      think that such code never uses the TARGET_EXPR as an initializer.  If
478      I'm wrong, we'll abort because the temp won't have any RTL.  In that
479      case, I guess we'll need to replace references somehow.  */
480   if (TREE_CODE (from) == TARGET_EXPR)
481     from = TARGET_EXPR_INITIAL (from);
482 
483   /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
484      inside the TARGET_EXPR.  */
485   for (t = from; t; )
486     {
487       tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
488 
489       /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
490 	 replace the slot operand with our target.
491 
492 	 Should we add a target parm to gimplify_expr instead?  No, as in this
493 	 case we want to replace the INIT_EXPR.  */
494       if (TREE_CODE (sub) == AGGR_INIT_EXPR
495 	  || TREE_CODE (sub) == VEC_INIT_EXPR)
496 	{
497 	  if (TREE_CODE (sub) == AGGR_INIT_EXPR)
498 	    AGGR_INIT_EXPR_SLOT (sub) = to;
499 	  else
500 	    VEC_INIT_EXPR_SLOT (sub) = to;
501 	  *expr_p = from;
502 
503 	  /* The initialization is now a side-effect, so the container can
504 	     become void.  */
505 	  if (from != sub)
506 	    TREE_TYPE (from) = void_type_node;
507 	}
508 
509       /* Handle aggregate NSDMI.  */
510       replace_placeholders (sub, to);
511 
512       if (t == sub)
513 	break;
514       else
515 	t = TREE_OPERAND (t, 1);
516     }
517 
518 }
519 
520 /* Gimplify a MUST_NOT_THROW_EXPR.  */
521 
522 static enum gimplify_status
gimplify_must_not_throw_expr(tree * expr_p,gimple_seq * pre_p)523 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
524 {
525   tree stmt = *expr_p;
526   tree temp = voidify_wrapper_expr (stmt, NULL);
527   tree body = TREE_OPERAND (stmt, 0);
528   gimple_seq try_ = NULL;
529   gimple_seq catch_ = NULL;
530   gimple *mnt;
531 
532   gimplify_and_add (body, &try_);
533   mnt = gimple_build_eh_must_not_throw (terminate_fn);
534   gimple_seq_add_stmt_without_update (&catch_, mnt);
535   mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
536 
537   gimple_seq_add_stmt_without_update (pre_p, mnt);
538   if (temp)
539     {
540       *expr_p = temp;
541       return GS_OK;
542     }
543 
544   *expr_p = NULL;
545   return GS_ALL_DONE;
546 }
547 
548 /* Return TRUE if an operand (OP) of a given TYPE being copied is
549    really just an empty class copy.
550 
551    Check that the operand has a simple form so that TARGET_EXPRs and
552    non-empty CONSTRUCTORs get reduced properly, and we leave the
553    return slot optimization alone because it isn't a copy.  */
554 
555 static bool
simple_empty_class_p(tree type,tree op)556 simple_empty_class_p (tree type, tree op)
557 {
558   return
559     ((TREE_CODE (op) == COMPOUND_EXPR
560       && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
561      || TREE_CODE (op) == EMPTY_CLASS_EXPR
562      || is_gimple_lvalue (op)
563      || INDIRECT_REF_P (op)
564      || (TREE_CODE (op) == CONSTRUCTOR
565 	 && CONSTRUCTOR_NELTS (op) == 0
566 	 && !TREE_CLOBBER_P (op))
567      || (TREE_CODE (op) == CALL_EXPR
568 	 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
569     && is_really_empty_class (type);
570 }
571 
572 /* Returns true if evaluating E as an lvalue has side-effects;
573    specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
574    have side-effects until there is a read or write through it.  */
575 
576 static bool
lvalue_has_side_effects(tree e)577 lvalue_has_side_effects (tree e)
578 {
579   if (!TREE_SIDE_EFFECTS (e))
580     return false;
581   while (handled_component_p (e))
582     {
583       if (TREE_CODE (e) == ARRAY_REF
584 	  && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
585 	return true;
586       e = TREE_OPERAND (e, 0);
587     }
588   if (DECL_P (e))
589     /* Just naming a variable has no side-effects.  */
590     return false;
591   else if (INDIRECT_REF_P (e))
592     /* Similarly, indirection has no side-effects.  */
593     return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
594   else
595     /* For anything else, trust TREE_SIDE_EFFECTS.  */
596     return TREE_SIDE_EFFECTS (e);
597 }
598 
599 /* Do C++-specific gimplification.  Args are as for gimplify_expr.  */
600 
601 int
cp_gimplify_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p)602 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
603 {
604   int saved_stmts_are_full_exprs_p = 0;
605   location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
606   enum tree_code code = TREE_CODE (*expr_p);
607   enum gimplify_status ret;
608 
609   if (STATEMENT_CODE_P (code))
610     {
611       saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
612       current_stmt_tree ()->stmts_are_full_exprs_p
613 	= STMT_IS_FULL_EXPR_P (*expr_p);
614     }
615 
616   switch (code)
617     {
618     case AGGR_INIT_EXPR:
619       simplify_aggr_init_expr (expr_p);
620       ret = GS_OK;
621       break;
622 
623     case VEC_INIT_EXPR:
624       {
625 	location_t loc = input_location;
626 	tree init = VEC_INIT_EXPR_INIT (*expr_p);
627 	int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
628 	gcc_assert (EXPR_HAS_LOCATION (*expr_p));
629 	input_location = EXPR_LOCATION (*expr_p);
630 	*expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
631 				  init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
632 				  from_array,
633 				  tf_warning_or_error);
634 	hash_set<tree> pset;
635 	cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
636 	cp_genericize_tree (expr_p, false);
637 	ret = GS_OK;
638 	input_location = loc;
639       }
640       break;
641 
642     case THROW_EXPR:
643       /* FIXME communicate throw type to back end, probably by moving
644 	 THROW_EXPR into ../tree.def.  */
645       *expr_p = TREE_OPERAND (*expr_p, 0);
646       ret = GS_OK;
647       break;
648 
649     case MUST_NOT_THROW_EXPR:
650       ret = gimplify_must_not_throw_expr (expr_p, pre_p);
651       break;
652 
653       /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
654 	 LHS of an assignment might also be involved in the RHS, as in bug
655 	 25979.  */
656     case INIT_EXPR:
657       cp_gimplify_init_expr (expr_p);
658       if (TREE_CODE (*expr_p) != INIT_EXPR)
659 	return GS_OK;
660       /* Fall through.  */
661     case MODIFY_EXPR:
662     modify_expr_case:
663       {
664 	/* If the back end isn't clever enough to know that the lhs and rhs
665 	   types are the same, add an explicit conversion.  */
666 	tree op0 = TREE_OPERAND (*expr_p, 0);
667 	tree op1 = TREE_OPERAND (*expr_p, 1);
668 
669 	if (!error_operand_p (op0)
670 	    && !error_operand_p (op1)
671 	    && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
672 		|| TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
673 	    && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
674 	  TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
675 					      TREE_TYPE (op0), op1);
676 
677 	else if (simple_empty_class_p (TREE_TYPE (op0), op1))
678 	  {
679 	    /* Remove any copies of empty classes.  Also drop volatile
680 	       variables on the RHS to avoid infinite recursion from
681 	       gimplify_expr trying to load the value.  */
682 	    if (TREE_SIDE_EFFECTS (op1))
683 	      {
684 		if (TREE_THIS_VOLATILE (op1)
685 		    && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
686 		  op1 = build_fold_addr_expr (op1);
687 
688 		gimplify_and_add (op1, pre_p);
689 	      }
690 	    gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
691 			   is_gimple_lvalue, fb_lvalue);
692 	    *expr_p = TREE_OPERAND (*expr_p, 0);
693 	  }
694 	/* P0145 says that the RHS is sequenced before the LHS.
695 	   gimplify_modify_expr gimplifies the RHS before the LHS, but that
696 	   isn't quite strong enough in two cases:
697 
698 	   1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
699 	   mean it's evaluated after the LHS.
700 
701 	   2) the value calculation of the RHS is also sequenced before the
702 	   LHS, so for scalar assignment we need to preevaluate if the
703 	   RHS could be affected by LHS side-effects even if it has no
704 	   side-effects of its own.  We don't need this for classes because
705 	   class assignment takes its RHS by reference.  */
706        else if (flag_strong_eval_order > 1
707                 && TREE_CODE (*expr_p) == MODIFY_EXPR
708                 && lvalue_has_side_effects (op0)
709 		&& (TREE_CODE (op1) == CALL_EXPR
710 		    || (SCALAR_TYPE_P (TREE_TYPE (op1))
711 			&& !TREE_CONSTANT (op1))))
712 	 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
713       }
714       ret = GS_OK;
715       break;
716 
717     case EMPTY_CLASS_EXPR:
718       /* We create an empty CONSTRUCTOR with RECORD_TYPE.  */
719       *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
720       ret = GS_OK;
721       break;
722 
723     case BASELINK:
724       *expr_p = BASELINK_FUNCTIONS (*expr_p);
725       ret = GS_OK;
726       break;
727 
728     case TRY_BLOCK:
729       genericize_try_block (expr_p);
730       ret = GS_OK;
731       break;
732 
733     case HANDLER:
734       genericize_catch_block (expr_p);
735       ret = GS_OK;
736       break;
737 
738     case EH_SPEC_BLOCK:
739       genericize_eh_spec_block (expr_p);
740       ret = GS_OK;
741       break;
742 
743     case USING_STMT:
744       gcc_unreachable ();
745 
746     case FOR_STMT:
747     case WHILE_STMT:
748     case DO_STMT:
749     case SWITCH_STMT:
750     case CONTINUE_STMT:
751     case BREAK_STMT:
752       gcc_unreachable ();
753 
754     case OMP_FOR:
755     case OMP_SIMD:
756     case OMP_DISTRIBUTE:
757     case OMP_TASKLOOP:
758       ret = cp_gimplify_omp_for (expr_p, pre_p);
759       break;
760 
761     case EXPR_STMT:
762       gimplify_expr_stmt (expr_p);
763       ret = GS_OK;
764       break;
765 
766     case UNARY_PLUS_EXPR:
767       {
768 	tree arg = TREE_OPERAND (*expr_p, 0);
769 	tree type = TREE_TYPE (*expr_p);
770 	*expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
771 					    : arg;
772 	ret = GS_OK;
773       }
774       break;
775 
776     case CALL_EXPR:
777       ret = GS_OK;
778       if (flag_strong_eval_order == 2
779 	  && CALL_EXPR_FN (*expr_p)
780 	  && cp_get_callee_fndecl_nofold (*expr_p) == NULL_TREE)
781 	{
782 	  tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
783 	  enum gimplify_status t
784 	    = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
785 			     is_gimple_call_addr, fb_rvalue);
786 	  if (t == GS_ERROR)
787 	    ret = GS_ERROR;
788 	  else if (is_gimple_variable (CALL_EXPR_FN (*expr_p))
789 		   && TREE_CODE (CALL_EXPR_FN (*expr_p)) != SSA_NAME)
790 	    CALL_EXPR_FN (*expr_p)
791 	      = get_initialized_tmp_var (CALL_EXPR_FN (*expr_p), pre_p,
792 					 NULL);
793 	  /* GIMPLE considers most pointer conversion useless, but for
794 	     calls we actually care about the exact function pointer type.  */
795 	  if (t != GS_ERROR && TREE_TYPE (CALL_EXPR_FN (*expr_p)) != fnptrtype)
796 	    CALL_EXPR_FN (*expr_p)
797 	      = build1 (NOP_EXPR, fnptrtype, CALL_EXPR_FN (*expr_p));
798 	}
799       if (!CALL_EXPR_FN (*expr_p))
800 	/* Internal function call.  */;
801       else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
802 	{
803 	  /* This is a call to a (compound) assignment operator that used
804 	     the operator syntax; gimplify the RHS first.  */
805 	  gcc_assert (call_expr_nargs (*expr_p) == 2);
806 	  gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
807 	  enum gimplify_status t
808 	    = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
809 	  if (t == GS_ERROR)
810 	    ret = GS_ERROR;
811 	}
812       else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
813 	{
814 	  /* Leave the last argument for gimplify_call_expr, to avoid problems
815 	     with __builtin_va_arg_pack().  */
816 	  int nargs = call_expr_nargs (*expr_p) - 1;
817 	  for (int i = 0; i < nargs; ++i)
818 	    {
819 	      enum gimplify_status t
820 		= gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
821 	      if (t == GS_ERROR)
822 		ret = GS_ERROR;
823 	    }
824 	}
825       else if (flag_strong_eval_order
826 	       && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
827 	{
828 	  /* If flag_strong_eval_order, evaluate the object argument first.  */
829 	  tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
830 	  if (POINTER_TYPE_P (fntype))
831 	    fntype = TREE_TYPE (fntype);
832 	  if (TREE_CODE (fntype) == METHOD_TYPE)
833 	    {
834 	      enum gimplify_status t
835 		= gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
836 	      if (t == GS_ERROR)
837 		ret = GS_ERROR;
838 	    }
839 	}
840       break;
841 
842     case RETURN_EXPR:
843       if (TREE_OPERAND (*expr_p, 0)
844 	  && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
845 	      || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
846 	{
847 	  expr_p = &TREE_OPERAND (*expr_p, 0);
848 	  code = TREE_CODE (*expr_p);
849 	  /* Avoid going through the INIT_EXPR case, which can
850 	     degrade INIT_EXPRs into AGGR_INIT_EXPRs.  */
851 	  goto modify_expr_case;
852 	}
853       /* Fall through.  */
854 
855     default:
856       ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
857       break;
858     }
859 
860   /* Restore saved state.  */
861   if (STATEMENT_CODE_P (code))
862     current_stmt_tree ()->stmts_are_full_exprs_p
863       = saved_stmts_are_full_exprs_p;
864 
865   return ret;
866 }
867 
868 static inline bool
is_invisiref_parm(const_tree t)869 is_invisiref_parm (const_tree t)
870 {
871   return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
872 	  && DECL_BY_REFERENCE (t));
873 }
874 
875 /* Return true if the uid in both int tree maps are equal.  */
876 
877 bool
equal(cxx_int_tree_map * a,cxx_int_tree_map * b)878 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
879 {
880   return (a->uid == b->uid);
881 }
882 
883 /* Hash a UID in a cxx_int_tree_map.  */
884 
885 unsigned int
hash(cxx_int_tree_map * item)886 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
887 {
888   return item->uid;
889 }
890 
891 /* A stable comparison routine for use with splay trees and DECLs.  */
892 
893 static int
splay_tree_compare_decl_uid(splay_tree_key xa,splay_tree_key xb)894 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
895 {
896   tree a = (tree) xa;
897   tree b = (tree) xb;
898 
899   return DECL_UID (a) - DECL_UID (b);
900 }
901 
902 /* OpenMP context during genericization.  */
903 
904 struct cp_genericize_omp_taskreg
905 {
906   bool is_parallel;
907   bool default_shared;
908   struct cp_genericize_omp_taskreg *outer;
909   splay_tree variables;
910 };
911 
912 /* Return true if genericization should try to determine if
913    DECL is firstprivate or shared within task regions.  */
914 
915 static bool
omp_var_to_track(tree decl)916 omp_var_to_track (tree decl)
917 {
918   tree type = TREE_TYPE (decl);
919   if (is_invisiref_parm (decl))
920     type = TREE_TYPE (type);
921   else if (TREE_CODE (type) == REFERENCE_TYPE)
922     type = TREE_TYPE (type);
923   while (TREE_CODE (type) == ARRAY_TYPE)
924     type = TREE_TYPE (type);
925   if (type == error_mark_node || !CLASS_TYPE_P (type))
926     return false;
927   if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
928     return false;
929   if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
930     return false;
931   return true;
932 }
933 
934 /* Note DECL use in OpenMP region OMP_CTX during genericization.  */
935 
936 static void
omp_cxx_notice_variable(struct cp_genericize_omp_taskreg * omp_ctx,tree decl)937 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
938 {
939   splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
940 					 (splay_tree_key) decl);
941   if (n == NULL)
942     {
943       int flags = OMP_CLAUSE_DEFAULT_SHARED;
944       if (omp_ctx->outer)
945 	omp_cxx_notice_variable (omp_ctx->outer, decl);
946       if (!omp_ctx->default_shared)
947 	{
948 	  struct cp_genericize_omp_taskreg *octx;
949 
950 	  for (octx = omp_ctx->outer; octx; octx = octx->outer)
951 	    {
952 	      n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
953 	      if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
954 		{
955 		  flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
956 		  break;
957 		}
958 	      if (octx->is_parallel)
959 		break;
960 	    }
961 	  if (octx == NULL
962 	      && (TREE_CODE (decl) == PARM_DECL
963 		  || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
964 		      && DECL_CONTEXT (decl) == current_function_decl)))
965 	    flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
966 	  if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
967 	    {
968 	      /* DECL is implicitly determined firstprivate in
969 		 the current task construct.  Ensure copy ctor and
970 		 dtor are instantiated, because during gimplification
971 		 it will be already too late.  */
972 	      tree type = TREE_TYPE (decl);
973 	      if (is_invisiref_parm (decl))
974 		type = TREE_TYPE (type);
975 	      else if (TREE_CODE (type) == REFERENCE_TYPE)
976 		type = TREE_TYPE (type);
977 	      while (TREE_CODE (type) == ARRAY_TYPE)
978 		type = TREE_TYPE (type);
979 	      get_copy_ctor (type, tf_none);
980 	      get_dtor (type, tf_none);
981 	    }
982 	}
983       splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
984     }
985 }
986 
987 /* Genericization context.  */
988 
989 struct cp_genericize_data
990 {
991   hash_set<tree> *p_set;
992   vec<tree> bind_expr_stack;
993   struct cp_genericize_omp_taskreg *omp_ctx;
994   tree try_block;
995   bool no_sanitize_p;
996   bool handle_invisiref_parm_p;
997 };
998 
999 /* Perform any pre-gimplification folding of C++ front end trees to
1000    GENERIC.
1001    Note:  The folding of none-omp cases is something to move into
1002      the middle-end.  As for now we have most foldings only on GENERIC
1003      in fold-const, we need to perform this before transformation to
1004      GIMPLE-form.  */
1005 
1006 static tree
cp_fold_r(tree * stmt_p,int * walk_subtrees,void * data)1007 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
1008 {
1009   tree stmt;
1010   enum tree_code code;
1011 
1012   *stmt_p = stmt = cp_fold (*stmt_p);
1013 
1014   if (((hash_set<tree> *) data)->add (stmt))
1015     {
1016       /* Don't walk subtrees of stmts we've already walked once, otherwise
1017 	 we can have exponential complexity with e.g. lots of nested
1018 	 SAVE_EXPRs or TARGET_EXPRs.  cp_fold uses a cache and will return
1019 	 always the same tree, which the first time cp_fold_r has been
1020 	 called on it had the subtrees walked.  */
1021       *walk_subtrees = 0;
1022       return NULL;
1023     }
1024 
1025   code = TREE_CODE (stmt);
1026   if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
1027       || code == OMP_TASKLOOP || code == OACC_LOOP)
1028     {
1029       tree x;
1030       int i, n;
1031 
1032       cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1033       cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1034       cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1035       x = OMP_FOR_COND (stmt);
1036       if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1037 	{
1038 	  cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1039 	  cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1040 	}
1041       else if (x && TREE_CODE (x) == TREE_VEC)
1042 	{
1043 	  n = TREE_VEC_LENGTH (x);
1044 	  for (i = 0; i < n; i++)
1045 	    {
1046 	      tree o = TREE_VEC_ELT (x, i);
1047 	      if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1048 		cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1049 	    }
1050 	}
1051       x = OMP_FOR_INCR (stmt);
1052       if (x && TREE_CODE (x) == TREE_VEC)
1053 	{
1054 	  n = TREE_VEC_LENGTH (x);
1055 	  for (i = 0; i < n; i++)
1056 	    {
1057 	      tree o = TREE_VEC_ELT (x, i);
1058 	      if (o && TREE_CODE (o) == MODIFY_EXPR)
1059 		o = TREE_OPERAND (o, 1);
1060 	      if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1061 			|| TREE_CODE (o) == POINTER_PLUS_EXPR))
1062 		{
1063 		  cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1064 		  cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1065 		}
1066 	    }
1067 	}
1068       cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1069       *walk_subtrees = 0;
1070     }
1071 
1072   return NULL;
1073 }
1074 
1075 /* Fold ALL the trees!  FIXME we should be able to remove this, but
1076    apparently that still causes optimization regressions.  */
1077 
1078 void
cp_fold_function(tree fndecl)1079 cp_fold_function (tree fndecl)
1080 {
1081   hash_set<tree> pset;
1082   cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
1083 }
1084 
1085 /* Perform any pre-gimplification lowering of C++ front end trees to
1086    GENERIC.  */
1087 
1088 static tree
cp_genericize_r(tree * stmt_p,int * walk_subtrees,void * data)1089 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1090 {
1091   tree stmt = *stmt_p;
1092   struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1093   hash_set<tree> *p_set = wtd->p_set;
1094 
1095   /* If in an OpenMP context, note var uses.  */
1096   if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1097       && (VAR_P (stmt)
1098 	  || TREE_CODE (stmt) == PARM_DECL
1099 	  || TREE_CODE (stmt) == RESULT_DECL)
1100       && omp_var_to_track (stmt))
1101     omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1102 
1103   /* Don't dereference parms in a thunk, pass the references through. */
1104   if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1105       || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1106     {
1107       *walk_subtrees = 0;
1108       return NULL;
1109     }
1110 
1111   /* Dereference invisible reference parms.  */
1112   if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1113     {
1114       *stmt_p = convert_from_reference (stmt);
1115       p_set->add (*stmt_p);
1116       *walk_subtrees = 0;
1117       return NULL;
1118     }
1119 
1120   /* Map block scope extern declarations to visible declarations with the
1121      same name and type in outer scopes if any.  */
1122   if (cp_function_chain->extern_decl_map
1123       && VAR_OR_FUNCTION_DECL_P (stmt)
1124       && DECL_EXTERNAL (stmt))
1125     {
1126       struct cxx_int_tree_map *h, in;
1127       in.uid = DECL_UID (stmt);
1128       h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1129       if (h)
1130 	{
1131 	  *stmt_p = h->to;
1132 	  TREE_USED (h->to) |= TREE_USED (stmt);
1133 	  *walk_subtrees = 0;
1134 	  return NULL;
1135 	}
1136     }
1137 
1138   if (TREE_CODE (stmt) == INTEGER_CST
1139       && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE
1140       && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1141       && !wtd->no_sanitize_p)
1142     {
1143       ubsan_maybe_instrument_reference (stmt_p);
1144       if (*stmt_p != stmt)
1145 	{
1146 	  *walk_subtrees = 0;
1147 	  return NULL_TREE;
1148 	}
1149     }
1150 
1151   /* Other than invisiref parms, don't walk the same tree twice.  */
1152   if (p_set->contains (stmt))
1153     {
1154       *walk_subtrees = 0;
1155       return NULL_TREE;
1156     }
1157 
1158   switch (TREE_CODE (stmt))
1159     {
1160     case ADDR_EXPR:
1161       if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1162 	{
1163 	  /* If in an OpenMP context, note var uses.  */
1164 	  if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1165 	      && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1166 	    omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1167 	  *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1168 	  *walk_subtrees = 0;
1169 	}
1170       break;
1171 
1172     case RETURN_EXPR:
1173       if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1174 	/* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR.  */
1175 	*walk_subtrees = 0;
1176       break;
1177 
1178     case OMP_CLAUSE:
1179       switch (OMP_CLAUSE_CODE (stmt))
1180 	{
1181 	case OMP_CLAUSE_LASTPRIVATE:
1182 	  /* Don't dereference an invisiref in OpenMP clauses.  */
1183 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1184 	    {
1185 	      *walk_subtrees = 0;
1186 	      if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1187 		cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1188 			      cp_genericize_r, data, NULL);
1189 	    }
1190 	  break;
1191 	case OMP_CLAUSE_PRIVATE:
1192 	  /* Don't dereference an invisiref in OpenMP clauses.  */
1193 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1194 	    *walk_subtrees = 0;
1195 	  else if (wtd->omp_ctx != NULL)
1196 	    {
1197 	      /* Private clause doesn't cause any references to the
1198 		 var in outer contexts, avoid calling
1199 		 omp_cxx_notice_variable for it.  */
1200 	      struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1201 	      wtd->omp_ctx = NULL;
1202 	      cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1203 			    data, NULL);
1204 	      wtd->omp_ctx = old;
1205 	      *walk_subtrees = 0;
1206 	    }
1207 	  break;
1208 	case OMP_CLAUSE_SHARED:
1209 	case OMP_CLAUSE_FIRSTPRIVATE:
1210 	case OMP_CLAUSE_COPYIN:
1211 	case OMP_CLAUSE_COPYPRIVATE:
1212 	  /* Don't dereference an invisiref in OpenMP clauses.  */
1213 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1214 	    *walk_subtrees = 0;
1215 	  break;
1216 	case OMP_CLAUSE_REDUCTION:
1217 	  /* Don't dereference an invisiref in reduction clause's
1218 	     OMP_CLAUSE_DECL either.  OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1219 	     still needs to be genericized.  */
1220 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1221 	    {
1222 	      *walk_subtrees = 0;
1223 	      if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1224 		cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1225 			      cp_genericize_r, data, NULL);
1226 	      if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1227 		cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1228 			      cp_genericize_r, data, NULL);
1229 	    }
1230 	  break;
1231 	default:
1232 	  break;
1233 	}
1234       break;
1235 
1236     /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1237        to lower this construct before scanning it, so we need to lower these
1238        before doing anything else.  */
1239     case CLEANUP_STMT:
1240       *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1241 			    CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1242 						   : TRY_FINALLY_EXPR,
1243 			    void_type_node,
1244 			    CLEANUP_BODY (stmt),
1245 			    CLEANUP_EXPR (stmt));
1246       break;
1247 
1248     case IF_STMT:
1249       genericize_if_stmt (stmt_p);
1250       /* *stmt_p has changed, tail recurse to handle it again.  */
1251       return cp_genericize_r (stmt_p, walk_subtrees, data);
1252 
1253     /* COND_EXPR might have incompatible types in branches if one or both
1254        arms are bitfields.  Fix it up now.  */
1255     case COND_EXPR:
1256       {
1257 	tree type_left
1258 	  = (TREE_OPERAND (stmt, 1)
1259 	     ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1260 	     : NULL_TREE);
1261 	tree type_right
1262 	  = (TREE_OPERAND (stmt, 2)
1263 	     ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1264 	     : NULL_TREE);
1265 	if (type_left
1266 	    && !useless_type_conversion_p (TREE_TYPE (stmt),
1267 					   TREE_TYPE (TREE_OPERAND (stmt, 1))))
1268 	  {
1269 	    TREE_OPERAND (stmt, 1)
1270 	      = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1271 	    gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1272 						   type_left));
1273 	  }
1274 	if (type_right
1275 	    && !useless_type_conversion_p (TREE_TYPE (stmt),
1276 					   TREE_TYPE (TREE_OPERAND (stmt, 2))))
1277 	  {
1278 	    TREE_OPERAND (stmt, 2)
1279 	      = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1280 	    gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1281 						   type_right));
1282 	  }
1283       }
1284       break;
1285 
1286     case BIND_EXPR:
1287       if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1288 	{
1289 	  tree decl;
1290 	  for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1291 	    if (VAR_P (decl)
1292 		&& !DECL_EXTERNAL (decl)
1293 		&& omp_var_to_track (decl))
1294 	      {
1295 		splay_tree_node n
1296 		  = splay_tree_lookup (wtd->omp_ctx->variables,
1297 				       (splay_tree_key) decl);
1298 		if (n == NULL)
1299 		  splay_tree_insert (wtd->omp_ctx->variables,
1300 				     (splay_tree_key) decl,
1301 				     TREE_STATIC (decl)
1302 				     ? OMP_CLAUSE_DEFAULT_SHARED
1303 				     : OMP_CLAUSE_DEFAULT_PRIVATE);
1304 	      }
1305 	}
1306       if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1307 	{
1308 	  /* The point here is to not sanitize static initializers.  */
1309 	  bool no_sanitize_p = wtd->no_sanitize_p;
1310 	  wtd->no_sanitize_p = true;
1311 	  for (tree decl = BIND_EXPR_VARS (stmt);
1312 	       decl;
1313 	       decl = DECL_CHAIN (decl))
1314 	    if (VAR_P (decl)
1315 		&& TREE_STATIC (decl)
1316 		&& DECL_INITIAL (decl))
1317 	      cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1318 	  wtd->no_sanitize_p = no_sanitize_p;
1319 	}
1320       wtd->bind_expr_stack.safe_push (stmt);
1321       cp_walk_tree (&BIND_EXPR_BODY (stmt),
1322 		    cp_genericize_r, data, NULL);
1323       wtd->bind_expr_stack.pop ();
1324       break;
1325 
1326     case USING_STMT:
1327       {
1328 	tree block = NULL_TREE;
1329 
1330 	/* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1331 	   BLOCK, and append an IMPORTED_DECL to its
1332 	   BLOCK_VARS chained list.  */
1333 	if (wtd->bind_expr_stack.exists ())
1334 	  {
1335 	    int i;
1336 	    for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1337 	      if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1338 		break;
1339 	  }
1340 	if (block)
1341 	  {
1342 	    tree decl = TREE_OPERAND (stmt, 0);
1343 	    gcc_assert (decl);
1344 
1345 	    if (undeduced_auto_decl (decl))
1346 	      /* Omit from the GENERIC, the back-end can't handle it.  */;
1347 	    else
1348 	      {
1349 		tree using_directive = make_node (IMPORTED_DECL);
1350 		TREE_TYPE (using_directive) = void_type_node;
1351 
1352 		IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1353 		DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1354 		BLOCK_VARS (block) = using_directive;
1355 	      }
1356 	  }
1357 	/* The USING_STMT won't appear in GENERIC.  */
1358 	*stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1359 	*walk_subtrees = 0;
1360       }
1361       break;
1362 
1363     case DECL_EXPR:
1364       if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1365 	{
1366 	  /* Using decls inside DECL_EXPRs are just dropped on the floor.  */
1367 	  *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1368 	  *walk_subtrees = 0;
1369 	}
1370       else
1371 	{
1372 	  tree d = DECL_EXPR_DECL (stmt);
1373 	  if (VAR_P (d))
1374 	    gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1375 	}
1376       break;
1377 
1378     case OMP_PARALLEL:
1379     case OMP_TASK:
1380     case OMP_TASKLOOP:
1381       {
1382 	struct cp_genericize_omp_taskreg omp_ctx;
1383 	tree c, decl;
1384 	splay_tree_node n;
1385 
1386 	*walk_subtrees = 0;
1387 	cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1388 	omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1389 	omp_ctx.default_shared = omp_ctx.is_parallel;
1390 	omp_ctx.outer = wtd->omp_ctx;
1391 	omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1392 	wtd->omp_ctx = &omp_ctx;
1393 	for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1394 	  switch (OMP_CLAUSE_CODE (c))
1395 	    {
1396 	    case OMP_CLAUSE_SHARED:
1397 	    case OMP_CLAUSE_PRIVATE:
1398 	    case OMP_CLAUSE_FIRSTPRIVATE:
1399 	    case OMP_CLAUSE_LASTPRIVATE:
1400 	      decl = OMP_CLAUSE_DECL (c);
1401 	      if (decl == error_mark_node || !omp_var_to_track (decl))
1402 		break;
1403 	      n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1404 	      if (n != NULL)
1405 		break;
1406 	      splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1407 				 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1408 				 ? OMP_CLAUSE_DEFAULT_SHARED
1409 				 : OMP_CLAUSE_DEFAULT_PRIVATE);
1410 	      if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1411 		omp_cxx_notice_variable (omp_ctx.outer, decl);
1412 	      break;
1413 	    case OMP_CLAUSE_DEFAULT:
1414 	      if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1415 		omp_ctx.default_shared = true;
1416 	    default:
1417 	      break;
1418 	    }
1419 	if (TREE_CODE (stmt) == OMP_TASKLOOP)
1420 	  genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1421 	else
1422 	  cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1423 	wtd->omp_ctx = omp_ctx.outer;
1424 	splay_tree_delete (omp_ctx.variables);
1425       }
1426       break;
1427 
1428     case TRY_BLOCK:
1429       {
1430         *walk_subtrees = 0;
1431         tree try_block = wtd->try_block;
1432         wtd->try_block = stmt;
1433         cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1434         wtd->try_block = try_block;
1435         cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1436       }
1437       break;
1438 
1439     case MUST_NOT_THROW_EXPR:
1440       /* MUST_NOT_THROW_COND might be something else with TM.  */
1441       if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1442 	{
1443 	  *walk_subtrees = 0;
1444 	  tree try_block = wtd->try_block;
1445 	  wtd->try_block = stmt;
1446 	  cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1447 	  wtd->try_block = try_block;
1448 	}
1449       break;
1450 
1451     case THROW_EXPR:
1452       {
1453 	location_t loc = location_of (stmt);
1454 	if (TREE_NO_WARNING (stmt))
1455 	  /* Never mind.  */;
1456 	else if (wtd->try_block)
1457 	  {
1458 	    if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1459 		&& warning_at (loc, OPT_Wterminate,
1460 			       "throw will always call terminate()")
1461 		&& cxx_dialect >= cxx11
1462 		&& DECL_DESTRUCTOR_P (current_function_decl))
1463 	      inform (loc, "in C++11 destructors default to noexcept");
1464 	  }
1465 	else
1466 	  {
1467 	    if (warn_cxx11_compat && cxx_dialect < cxx11
1468 		&& DECL_DESTRUCTOR_P (current_function_decl)
1469 		&& (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1470 		    == NULL_TREE)
1471 		&& (get_defaulted_eh_spec (current_function_decl)
1472 		    == empty_except_spec))
1473 	      warning_at (loc, OPT_Wc__11_compat,
1474 			  "in C++11 this throw will terminate because "
1475 			  "destructors default to noexcept");
1476 	  }
1477       }
1478       break;
1479 
1480     case CONVERT_EXPR:
1481       gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1482       break;
1483 
1484     case FOR_STMT:
1485       genericize_for_stmt (stmt_p, walk_subtrees, data);
1486       break;
1487 
1488     case WHILE_STMT:
1489       genericize_while_stmt (stmt_p, walk_subtrees, data);
1490       break;
1491 
1492     case DO_STMT:
1493       genericize_do_stmt (stmt_p, walk_subtrees, data);
1494       break;
1495 
1496     case SWITCH_STMT:
1497       genericize_switch_stmt (stmt_p, walk_subtrees, data);
1498       break;
1499 
1500     case CONTINUE_STMT:
1501       genericize_continue_stmt (stmt_p);
1502       break;
1503 
1504     case BREAK_STMT:
1505       genericize_break_stmt (stmt_p);
1506       break;
1507 
1508     case OMP_FOR:
1509     case OMP_SIMD:
1510     case OMP_DISTRIBUTE:
1511     case OACC_LOOP:
1512       genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1513       break;
1514 
1515     case PTRMEM_CST:
1516       /* By the time we get here we're handing off to the back end, so we don't
1517 	 need or want to preserve PTRMEM_CST anymore.  */
1518       *stmt_p = cplus_expand_constant (stmt);
1519       *walk_subtrees = 0;
1520       break;
1521 
1522     case MEM_REF:
1523       /* For MEM_REF, make sure not to sanitize the second operand even
1524 	 if it has reference type.  It is just an offset with a type
1525 	 holding other information.  There is no other processing we
1526 	 need to do for INTEGER_CSTs, so just ignore the second argument
1527 	 unconditionally.  */
1528       cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1529       *walk_subtrees = 0;
1530       break;
1531 
1532     case NOP_EXPR:
1533       if (!wtd->no_sanitize_p
1534 	  && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1535 	  && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1536 	ubsan_maybe_instrument_reference (stmt_p);
1537       break;
1538 
1539     case CALL_EXPR:
1540       if (!wtd->no_sanitize_p
1541 	  && sanitize_flags_p ((SANITIZE_NULL
1542 				| SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1543 	{
1544 	  tree fn = CALL_EXPR_FN (stmt);
1545 	  if (fn != NULL_TREE
1546 	      && !error_operand_p (fn)
1547 	      && POINTER_TYPE_P (TREE_TYPE (fn))
1548 	      && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1549 	    {
1550 	      bool is_ctor
1551 		= TREE_CODE (fn) == ADDR_EXPR
1552 		  && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1553 		  && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1554 	      if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1555 		ubsan_maybe_instrument_member_call (stmt, is_ctor);
1556 	      if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1557 		cp_ubsan_maybe_instrument_member_call (stmt);
1558 	    }
1559 	  else if (fn == NULL_TREE
1560 		   && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1561 		   && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
1562 		   && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (stmt, 0)))
1563 		       == REFERENCE_TYPE))
1564 	    *walk_subtrees = 0;
1565 	}
1566       /* Fall through.  */
1567     case AGGR_INIT_EXPR:
1568       /* For calls to a multi-versioned function, overload resolution
1569 	 returns the function with the highest target priority, that is,
1570 	 the version that will checked for dispatching first.  If this
1571 	 version is inlinable, a direct call to this version can be made
1572 	 otherwise the call should go through the dispatcher.  */
1573       {
1574 	tree fn = cp_get_callee_fndecl_nofold (stmt);
1575 	if (fn && DECL_FUNCTION_VERSIONED (fn)
1576 	    && (current_function_decl == NULL
1577 		|| !targetm.target_option.can_inline_p (current_function_decl,
1578 							fn)))
1579 	  if (tree dis = get_function_version_dispatcher (fn))
1580 	    {
1581 	      mark_versions_used (dis);
1582 	      dis = build_address (dis);
1583 	      if (TREE_CODE (stmt) == CALL_EXPR)
1584 		CALL_EXPR_FN (stmt) = dis;
1585 	      else
1586 		AGGR_INIT_EXPR_FN (stmt) = dis;
1587 	    }
1588       }
1589       break;
1590 
1591     case TARGET_EXPR:
1592       if (TARGET_EXPR_INITIAL (stmt)
1593 	  && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1594 	  && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1595 	TARGET_EXPR_NO_ELIDE (stmt) = 1;
1596       break;
1597 
1598     default:
1599       if (IS_TYPE_OR_DECL_P (stmt))
1600 	*walk_subtrees = 0;
1601       break;
1602     }
1603 
1604   p_set->add (*stmt_p);
1605 
1606   return NULL;
1607 }
1608 
1609 /* Lower C++ front end trees to GENERIC in T_P.  */
1610 
1611 static void
cp_genericize_tree(tree * t_p,bool handle_invisiref_parm_p)1612 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1613 {
1614   struct cp_genericize_data wtd;
1615 
1616   wtd.p_set = new hash_set<tree>;
1617   wtd.bind_expr_stack.create (0);
1618   wtd.omp_ctx = NULL;
1619   wtd.try_block = NULL_TREE;
1620   wtd.no_sanitize_p = false;
1621   wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1622   cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1623   delete wtd.p_set;
1624   wtd.bind_expr_stack.release ();
1625   if (sanitize_flags_p (SANITIZE_VPTR))
1626     cp_ubsan_instrument_member_accesses (t_p);
1627 }
1628 
1629 /* If a function that should end with a return in non-void
1630    function doesn't obviously end with return, add ubsan
1631    instrumentation code to verify it at runtime.  If -fsanitize=return
1632    is not enabled, instrument __builtin_unreachable.  */
1633 
1634 static void
cp_maybe_instrument_return(tree fndecl)1635 cp_maybe_instrument_return (tree fndecl)
1636 {
1637   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1638       || DECL_CONSTRUCTOR_P (fndecl)
1639       || DECL_DESTRUCTOR_P (fndecl)
1640       || !targetm.warn_func_return (fndecl))
1641     return;
1642 
1643   if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1644       /* Don't add __builtin_unreachable () if not optimizing, it will not
1645 	 improve any optimizations in that case, just break UB code.
1646 	 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1647 	 UBSan covers this with ubsan_instrument_return above where sufficient
1648 	 information is provided, while the __builtin_unreachable () below
1649 	 if return sanitization is disabled will just result in hard to
1650 	 understand runtime error without location.  */
1651       && (!optimize
1652 	  || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1653     return;
1654 
1655   tree t = DECL_SAVED_TREE (fndecl);
1656   while (t)
1657     {
1658       switch (TREE_CODE (t))
1659 	{
1660 	case BIND_EXPR:
1661 	  t = BIND_EXPR_BODY (t);
1662 	  continue;
1663 	case TRY_FINALLY_EXPR:
1664 	case CLEANUP_POINT_EXPR:
1665 	  t = TREE_OPERAND (t, 0);
1666 	  continue;
1667 	case STATEMENT_LIST:
1668 	  {
1669 	    tree_stmt_iterator i = tsi_last (t);
1670 	    while (!tsi_end_p (i))
1671 	      {
1672 		tree p = tsi_stmt (i);
1673 		if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1674 		  break;
1675 		tsi_prev (&i);
1676 	      }
1677 	    if (!tsi_end_p (i))
1678 	      {
1679 		t = tsi_stmt (i);
1680 		continue;
1681 	      }
1682 	  }
1683 	  break;
1684 	case RETURN_EXPR:
1685 	  return;
1686 	default:
1687 	  break;
1688 	}
1689       break;
1690     }
1691   if (t == NULL_TREE)
1692     return;
1693   tree *p = &DECL_SAVED_TREE (fndecl);
1694   if (TREE_CODE (*p) == BIND_EXPR)
1695     p = &BIND_EXPR_BODY (*p);
1696 
1697   location_t loc = DECL_SOURCE_LOCATION (fndecl);
1698   if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1699     t = ubsan_instrument_return (loc);
1700   else
1701     {
1702       tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1703       t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1704     }
1705 
1706   append_to_statement_list (t, p);
1707 }
1708 
1709 void
cp_genericize(tree fndecl)1710 cp_genericize (tree fndecl)
1711 {
1712   tree t;
1713 
1714   /* Fix up the types of parms passed by invisible reference.  */
1715   for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1716     if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1717       {
1718 	/* If a function's arguments are copied to create a thunk,
1719 	   then DECL_BY_REFERENCE will be set -- but the type of the
1720 	   argument will be a pointer type, so we will never get
1721 	   here.  */
1722 	gcc_assert (!DECL_BY_REFERENCE (t));
1723 	gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1724 	TREE_TYPE (t) = DECL_ARG_TYPE (t);
1725 	DECL_BY_REFERENCE (t) = 1;
1726 	TREE_ADDRESSABLE (t) = 0;
1727 	relayout_decl (t);
1728       }
1729 
1730   /* Do the same for the return value.  */
1731   if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1732     {
1733       t = DECL_RESULT (fndecl);
1734       TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1735       DECL_BY_REFERENCE (t) = 1;
1736       TREE_ADDRESSABLE (t) = 0;
1737       relayout_decl (t);
1738       if (DECL_NAME (t))
1739 	{
1740 	  /* Adjust DECL_VALUE_EXPR of the original var.  */
1741 	  tree outer = outer_curly_brace_block (current_function_decl);
1742 	  tree var;
1743 
1744 	  if (outer)
1745 	    for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1746 	      if (VAR_P (var)
1747 		  && DECL_NAME (t) == DECL_NAME (var)
1748 		  && DECL_HAS_VALUE_EXPR_P (var)
1749 		  && DECL_VALUE_EXPR (var) == t)
1750 		{
1751 		  tree val = convert_from_reference (t);
1752 		  SET_DECL_VALUE_EXPR (var, val);
1753 		  break;
1754 		}
1755 	}
1756     }
1757 
1758   /* If we're a clone, the body is already GIMPLE.  */
1759   if (DECL_CLONED_FUNCTION_P (fndecl))
1760     return;
1761 
1762   /* Allow cp_genericize calls to be nested.  */
1763   tree save_bc_label[2];
1764   save_bc_label[bc_break] = bc_label[bc_break];
1765   save_bc_label[bc_continue] = bc_label[bc_continue];
1766   bc_label[bc_break] = NULL_TREE;
1767   bc_label[bc_continue] = NULL_TREE;
1768 
1769   /* We do want to see every occurrence of the parms, so we can't just use
1770      walk_tree's hash functionality.  */
1771   cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1772 
1773   cp_maybe_instrument_return (fndecl);
1774 
1775   /* Do everything else.  */
1776   c_genericize (fndecl);
1777 
1778   gcc_assert (bc_label[bc_break] == NULL);
1779   gcc_assert (bc_label[bc_continue] == NULL);
1780   bc_label[bc_break] = save_bc_label[bc_break];
1781   bc_label[bc_continue] = save_bc_label[bc_continue];
1782 }
1783 
1784 /* Build code to apply FN to each member of ARG1 and ARG2.  FN may be
1785    NULL if there is in fact nothing to do.  ARG2 may be null if FN
1786    actually only takes one argument.  */
1787 
1788 static tree
cxx_omp_clause_apply_fn(tree fn,tree arg1,tree arg2)1789 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1790 {
1791   tree defparm, parm, t;
1792   int i = 0;
1793   int nargs;
1794   tree *argarray;
1795 
1796   if (fn == NULL)
1797     return NULL;
1798 
1799   nargs = list_length (DECL_ARGUMENTS (fn));
1800   argarray = XALLOCAVEC (tree, nargs);
1801 
1802   defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1803   if (arg2)
1804     defparm = TREE_CHAIN (defparm);
1805 
1806   bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
1807   if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1808     {
1809       tree inner_type = TREE_TYPE (arg1);
1810       tree start1, end1, p1;
1811       tree start2 = NULL, p2 = NULL;
1812       tree ret = NULL, lab;
1813 
1814       start1 = arg1;
1815       start2 = arg2;
1816       do
1817 	{
1818 	  inner_type = TREE_TYPE (inner_type);
1819 	  start1 = build4 (ARRAY_REF, inner_type, start1,
1820 			   size_zero_node, NULL, NULL);
1821 	  if (arg2)
1822 	    start2 = build4 (ARRAY_REF, inner_type, start2,
1823 			     size_zero_node, NULL, NULL);
1824 	}
1825       while (TREE_CODE (inner_type) == ARRAY_TYPE);
1826       start1 = build_fold_addr_expr_loc (input_location, start1);
1827       if (arg2)
1828 	start2 = build_fold_addr_expr_loc (input_location, start2);
1829 
1830       end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1831       end1 = fold_build_pointer_plus (start1, end1);
1832 
1833       p1 = create_tmp_var (TREE_TYPE (start1));
1834       t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1835       append_to_statement_list (t, &ret);
1836 
1837       if (arg2)
1838 	{
1839 	  p2 = create_tmp_var (TREE_TYPE (start2));
1840 	  t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1841 	  append_to_statement_list (t, &ret);
1842 	}
1843 
1844       lab = create_artificial_label (input_location);
1845       t = build1 (LABEL_EXPR, void_type_node, lab);
1846       append_to_statement_list (t, &ret);
1847 
1848       argarray[i++] = p1;
1849       if (arg2)
1850 	argarray[i++] = p2;
1851       /* Handle default arguments.  */
1852       for (parm = defparm; parm && parm != void_list_node;
1853 	   parm = TREE_CHAIN (parm), i++)
1854 	argarray[i] = convert_default_arg (TREE_VALUE (parm),
1855 					   TREE_PURPOSE (parm), fn,
1856 					   i - is_method, tf_warning_or_error);
1857       t = build_call_a (fn, i, argarray);
1858       t = fold_convert (void_type_node, t);
1859       t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1860       append_to_statement_list (t, &ret);
1861 
1862       t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1863       t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1864       append_to_statement_list (t, &ret);
1865 
1866       if (arg2)
1867 	{
1868 	  t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1869 	  t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1870 	  append_to_statement_list (t, &ret);
1871 	}
1872 
1873       t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1874       t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1875       append_to_statement_list (t, &ret);
1876 
1877       return ret;
1878     }
1879   else
1880     {
1881       argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1882       if (arg2)
1883 	argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1884       /* Handle default arguments.  */
1885       for (parm = defparm; parm && parm != void_list_node;
1886 	   parm = TREE_CHAIN (parm), i++)
1887 	argarray[i] = convert_default_arg (TREE_VALUE (parm),
1888 					   TREE_PURPOSE (parm), fn,
1889 					   i - is_method, tf_warning_or_error);
1890       t = build_call_a (fn, i, argarray);
1891       t = fold_convert (void_type_node, t);
1892       return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1893     }
1894 }
1895 
1896 /* Return code to initialize DECL with its default constructor, or
1897    NULL if there's nothing to do.  */
1898 
1899 tree
cxx_omp_clause_default_ctor(tree clause,tree decl,tree)1900 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1901 {
1902   tree info = CP_OMP_CLAUSE_INFO (clause);
1903   tree ret = NULL;
1904 
1905   if (info)
1906     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1907 
1908   return ret;
1909 }
1910 
1911 /* Return code to initialize DST with a copy constructor from SRC.  */
1912 
1913 tree
cxx_omp_clause_copy_ctor(tree clause,tree dst,tree src)1914 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1915 {
1916   tree info = CP_OMP_CLAUSE_INFO (clause);
1917   tree ret = NULL;
1918 
1919   if (info)
1920     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1921   if (ret == NULL)
1922     ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1923 
1924   return ret;
1925 }
1926 
1927 /* Similarly, except use an assignment operator instead.  */
1928 
1929 tree
cxx_omp_clause_assign_op(tree clause,tree dst,tree src)1930 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1931 {
1932   tree info = CP_OMP_CLAUSE_INFO (clause);
1933   tree ret = NULL;
1934 
1935   if (info)
1936     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1937   if (ret == NULL)
1938     ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1939 
1940   return ret;
1941 }
1942 
1943 /* Return code to destroy DECL.  */
1944 
1945 tree
cxx_omp_clause_dtor(tree clause,tree decl)1946 cxx_omp_clause_dtor (tree clause, tree decl)
1947 {
1948   tree info = CP_OMP_CLAUSE_INFO (clause);
1949   tree ret = NULL;
1950 
1951   if (info)
1952     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1953 
1954   return ret;
1955 }
1956 
1957 /* True if OpenMP should privatize what this DECL points to rather
1958    than the DECL itself.  */
1959 
1960 bool
cxx_omp_privatize_by_reference(const_tree decl)1961 cxx_omp_privatize_by_reference (const_tree decl)
1962 {
1963   return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1964 	  || is_invisiref_parm (decl));
1965 }
1966 
1967 /* Return true if DECL is const qualified var having no mutable member.  */
1968 bool
cxx_omp_const_qual_no_mutable(tree decl)1969 cxx_omp_const_qual_no_mutable (tree decl)
1970 {
1971   tree type = TREE_TYPE (decl);
1972   if (TREE_CODE (type) == REFERENCE_TYPE)
1973     {
1974       if (!is_invisiref_parm (decl))
1975 	return false;
1976       type = TREE_TYPE (type);
1977 
1978       if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1979 	{
1980 	  /* NVR doesn't preserve const qualification of the
1981 	     variable's type.  */
1982 	  tree outer = outer_curly_brace_block (current_function_decl);
1983 	  tree var;
1984 
1985 	  if (outer)
1986 	    for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1987 	      if (VAR_P (var)
1988 		  && DECL_NAME (decl) == DECL_NAME (var)
1989 		  && (TYPE_MAIN_VARIANT (type)
1990 		      == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1991 		{
1992 		  if (TYPE_READONLY (TREE_TYPE (var)))
1993 		    type = TREE_TYPE (var);
1994 		  break;
1995 		}
1996 	}
1997     }
1998 
1999   if (type == error_mark_node)
2000     return false;
2001 
2002   /* Variables with const-qualified type having no mutable member
2003      are predetermined shared.  */
2004   if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
2005     return true;
2006 
2007   return false;
2008 }
2009 
2010 /* True if OpenMP sharing attribute of DECL is predetermined.  */
2011 
2012 enum omp_clause_default_kind
cxx_omp_predetermined_sharing_1(tree decl)2013 cxx_omp_predetermined_sharing_1 (tree decl)
2014 {
2015   /* Static data members are predetermined shared.  */
2016   if (TREE_STATIC (decl))
2017     {
2018       tree ctx = CP_DECL_CONTEXT (decl);
2019       if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2020 	return OMP_CLAUSE_DEFAULT_SHARED;
2021     }
2022 
2023   /* Const qualified vars having no mutable member are predetermined
2024      shared.  */
2025   if (cxx_omp_const_qual_no_mutable (decl))
2026     return OMP_CLAUSE_DEFAULT_SHARED;
2027 
2028   return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2029 }
2030 
2031 /* Likewise, but also include the artificial vars.  We don't want to
2032    disallow the artificial vars being mentioned in explicit clauses,
2033    as we use artificial vars e.g. for loop constructs with random
2034    access iterators other than pointers, but during gimplification
2035    we want to treat them as predetermined.  */
2036 
2037 enum omp_clause_default_kind
cxx_omp_predetermined_sharing(tree decl)2038 cxx_omp_predetermined_sharing (tree decl)
2039 {
2040   enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2041   if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2042     return ret;
2043 
2044   /* Predetermine artificial variables holding integral values, those
2045      are usually result of gimplify_one_sizepos or SAVE_EXPR
2046      gimplification.  */
2047   if (VAR_P (decl)
2048       && DECL_ARTIFICIAL (decl)
2049       && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2050       && !(DECL_LANG_SPECIFIC (decl)
2051 	   && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2052     return OMP_CLAUSE_DEFAULT_SHARED;
2053 
2054   return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2055 }
2056 
2057 /* Finalize an implicitly determined clause.  */
2058 
2059 void
cxx_omp_finish_clause(tree c,gimple_seq *)2060 cxx_omp_finish_clause (tree c, gimple_seq *)
2061 {
2062   tree decl, inner_type;
2063   bool make_shared = false;
2064 
2065   if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2066     return;
2067 
2068   decl = OMP_CLAUSE_DECL (c);
2069   decl = require_complete_type (decl);
2070   inner_type = TREE_TYPE (decl);
2071   if (decl == error_mark_node)
2072     make_shared = true;
2073   else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
2074     inner_type = TREE_TYPE (inner_type);
2075 
2076   /* We're interested in the base element, not arrays.  */
2077   while (TREE_CODE (inner_type) == ARRAY_TYPE)
2078     inner_type = TREE_TYPE (inner_type);
2079 
2080   /* Check for special function availability by building a call to one.
2081      Save the results, because later we won't be in the right context
2082      for making these queries.  */
2083   if (!make_shared
2084       && CLASS_TYPE_P (inner_type)
2085       && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
2086     make_shared = true;
2087 
2088   if (make_shared)
2089     {
2090       OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2091       OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2092       OMP_CLAUSE_SHARED_READONLY (c) = 0;
2093     }
2094 }
2095 
2096 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2097    disregarded in OpenMP construct, because it is going to be
2098    remapped during OpenMP lowering.  SHARED is true if DECL
2099    is going to be shared, false if it is going to be privatized.  */
2100 
2101 bool
cxx_omp_disregard_value_expr(tree decl,bool shared)2102 cxx_omp_disregard_value_expr (tree decl, bool shared)
2103 {
2104   return !shared
2105 	 && VAR_P (decl)
2106 	 && DECL_HAS_VALUE_EXPR_P (decl)
2107 	 && DECL_ARTIFICIAL (decl)
2108 	 && DECL_LANG_SPECIFIC (decl)
2109 	 && DECL_OMP_PRIVATIZED_MEMBER (decl);
2110 }
2111 
2112 /* Fold expression X which is used as an rvalue if RVAL is true.  */
2113 
2114 static tree
cp_fold_maybe_rvalue(tree x,bool rval)2115 cp_fold_maybe_rvalue (tree x, bool rval)
2116 {
2117   while (true)
2118     {
2119       x = cp_fold (x);
2120       if (rval && DECL_P (x)
2121 	  && TREE_CODE (TREE_TYPE (x)) != REFERENCE_TYPE)
2122 	{
2123 	  tree v = decl_constant_value (x);
2124 	  if (v != x && v != error_mark_node)
2125 	    {
2126 	      x = v;
2127 	      continue;
2128 	    }
2129 	}
2130       break;
2131     }
2132   return x;
2133 }
2134 
2135 /* Fold expression X which is used as an rvalue.  */
2136 
2137 static tree
cp_fold_rvalue(tree x)2138 cp_fold_rvalue (tree x)
2139 {
2140   return cp_fold_maybe_rvalue (x, true);
2141 }
2142 
2143 /* Perform folding on expression X.  */
2144 
2145 tree
cp_fully_fold(tree x)2146 cp_fully_fold (tree x)
2147 {
2148   if (processing_template_decl)
2149     return x;
2150   /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2151      have to call both.  */
2152   if (cxx_dialect >= cxx11)
2153     {
2154       x = maybe_constant_value (x);
2155       /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2156 	 a TARGET_EXPR; undo that here.  */
2157       if (TREE_CODE (x) == TARGET_EXPR)
2158 	x = TARGET_EXPR_INITIAL (x);
2159       else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2160 	       && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2161 	       && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2162 	x = TREE_OPERAND (x, 0);
2163     }
2164   return cp_fold_rvalue (x);
2165 }
2166 
2167 /* c-common interface to cp_fold.  If IN_INIT, this is in a static initializer
2168    and certain changes are made to the folding done.  Or should be (FIXME).  We
2169    never touch maybe_const, as it is only used for the C front-end
2170    C_MAYBE_CONST_EXPR.  */
2171 
2172 tree
c_fully_fold(tree x,bool,bool *,bool lval)2173 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2174 {
2175   return cp_fold_maybe_rvalue (x, !lval);
2176 }
2177 
2178 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2179 
2180 /* Dispose of the whole FOLD_CACHE.  */
2181 
2182 void
clear_fold_cache(void)2183 clear_fold_cache (void)
2184 {
2185   if (fold_cache != NULL)
2186     fold_cache->empty ();
2187 }
2188 
2189 /*  This function tries to fold an expression X.
2190     To avoid combinatorial explosion, folding results are kept in fold_cache.
2191     If X is invalid, we don't fold at all.
2192     For performance reasons we don't cache expressions representing a
2193     declaration or constant.
2194     Function returns X or its folded variant.  */
2195 
2196 static tree
cp_fold(tree x)2197 cp_fold (tree x)
2198 {
2199   tree op0, op1, op2, op3;
2200   tree org_x = x, r = NULL_TREE;
2201   enum tree_code code;
2202   location_t loc;
2203   bool rval_ops = true;
2204 
2205   if (!x || x == error_mark_node)
2206     return x;
2207 
2208   if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
2209     return x;
2210 
2211   /* Don't bother to cache DECLs or constants.  */
2212   if (DECL_P (x) || CONSTANT_CLASS_P (x))
2213     return x;
2214 
2215   if (fold_cache == NULL)
2216     fold_cache = hash_map<tree, tree>::create_ggc (101);
2217 
2218   if (tree *cached = fold_cache->get (x))
2219     return *cached;
2220 
2221   code = TREE_CODE (x);
2222   switch (code)
2223     {
2224     case CLEANUP_POINT_EXPR:
2225       /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2226 	 effects.  */
2227       r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2228       if (!TREE_SIDE_EFFECTS (r))
2229 	x = r;
2230       break;
2231 
2232     case SIZEOF_EXPR:
2233       x = fold_sizeof_expr (x);
2234       break;
2235 
2236     case VIEW_CONVERT_EXPR:
2237       rval_ops = false;
2238       /* FALLTHRU */
2239     case CONVERT_EXPR:
2240     case NOP_EXPR:
2241     case NON_LVALUE_EXPR:
2242 
2243       if (VOID_TYPE_P (TREE_TYPE (x)))
2244 	{
2245 	  /* This is just to make sure we don't end up with casts to
2246 	     void from error_mark_node.  If we just return x, then
2247 	     cp_fold_r might fold the operand into error_mark_node and
2248 	     leave the conversion in the IR.  STRIP_USELESS_TYPE_CONVERSION
2249 	     during gimplification doesn't like such casts.
2250 	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2251 	     folding of the operand should be in the caches and if in cp_fold_r
2252 	     it will modify it in place.  */
2253 	  op0 = cp_fold (TREE_OPERAND (x, 0));
2254 	  if (op0 == error_mark_node)
2255 	    x = error_mark_node;
2256 	  break;
2257 	}
2258 
2259       loc = EXPR_LOCATION (x);
2260       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2261 
2262       if (code == CONVERT_EXPR
2263 	  && SCALAR_TYPE_P (TREE_TYPE (x))
2264 	  && op0 != void_node)
2265 	/* During parsing we used convert_to_*_nofold; re-convert now using the
2266 	   folding variants, since fold() doesn't do those transformations.  */
2267 	x = fold (convert (TREE_TYPE (x), op0));
2268       else if (op0 != TREE_OPERAND (x, 0))
2269 	{
2270 	  if (op0 == error_mark_node)
2271 	    x = error_mark_node;
2272 	  else
2273 	    x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2274 	}
2275       else
2276 	x = fold (x);
2277 
2278       /* Conversion of an out-of-range value has implementation-defined
2279 	 behavior; the language considers it different from arithmetic
2280 	 overflow, which is undefined.  */
2281       if (TREE_CODE (op0) == INTEGER_CST
2282 	  && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2283 	TREE_OVERFLOW (x) = false;
2284 
2285       break;
2286 
2287     case INDIRECT_REF:
2288       /* We don't need the decltype(auto) obfuscation anymore.  */
2289       if (REF_PARENTHESIZED_P (x))
2290 	{
2291 	  tree p = maybe_undo_parenthesized_ref (x);
2292 	  return cp_fold (p);
2293 	}
2294       goto unary;
2295 
2296     case ADDR_EXPR:
2297       loc = EXPR_LOCATION (x);
2298       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2299 
2300       /* Cope with user tricks that amount to offsetof.  */
2301       if (op0 != error_mark_node
2302 	  && TREE_CODE (TREE_TYPE (op0)) != FUNCTION_TYPE
2303 	  && TREE_CODE (TREE_TYPE (op0)) != METHOD_TYPE)
2304 	{
2305 	  tree val = get_base_address (op0);
2306 	  if (val
2307 	      && INDIRECT_REF_P (val)
2308 	      && COMPLETE_TYPE_P (TREE_TYPE (val))
2309 	      && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2310 	    {
2311 	      val = TREE_OPERAND (val, 0);
2312 	      STRIP_NOPS (val);
2313 	      val = maybe_constant_value (val);
2314 	      if (TREE_CODE (val) == INTEGER_CST)
2315 		return fold_offsetof (op0, TREE_TYPE (x));
2316 	    }
2317 	}
2318       goto finish_unary;
2319 
2320     case REALPART_EXPR:
2321     case IMAGPART_EXPR:
2322       rval_ops = false;
2323       /* FALLTHRU */
2324     case CONJ_EXPR:
2325     case FIX_TRUNC_EXPR:
2326     case FLOAT_EXPR:
2327     case NEGATE_EXPR:
2328     case ABS_EXPR:
2329     case BIT_NOT_EXPR:
2330     case TRUTH_NOT_EXPR:
2331     case FIXED_CONVERT_EXPR:
2332     unary:
2333 
2334       loc = EXPR_LOCATION (x);
2335       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2336 
2337     finish_unary:
2338       if (op0 != TREE_OPERAND (x, 0))
2339 	{
2340 	  if (op0 == error_mark_node)
2341 	    x = error_mark_node;
2342 	  else
2343 	    {
2344 	      x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2345 	      if (code == INDIRECT_REF
2346 		  && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2347 		{
2348 		  TREE_READONLY (x) = TREE_READONLY (org_x);
2349 		  TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2350 		  TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2351 		}
2352 	    }
2353 	}
2354       else
2355 	x = fold (x);
2356 
2357       gcc_assert (TREE_CODE (x) != COND_EXPR
2358 		  || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2359       break;
2360 
2361     case UNARY_PLUS_EXPR:
2362       op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2363       if (op0 == error_mark_node)
2364 	x = error_mark_node;
2365       else
2366 	x = fold_convert (TREE_TYPE (x), op0);
2367       break;
2368 
2369     case POSTDECREMENT_EXPR:
2370     case POSTINCREMENT_EXPR:
2371     case INIT_EXPR:
2372     case PREDECREMENT_EXPR:
2373     case PREINCREMENT_EXPR:
2374     case COMPOUND_EXPR:
2375     case MODIFY_EXPR:
2376       rval_ops = false;
2377       /* FALLTHRU */
2378     case POINTER_PLUS_EXPR:
2379     case PLUS_EXPR:
2380     case POINTER_DIFF_EXPR:
2381     case MINUS_EXPR:
2382     case MULT_EXPR:
2383     case TRUNC_DIV_EXPR:
2384     case CEIL_DIV_EXPR:
2385     case FLOOR_DIV_EXPR:
2386     case ROUND_DIV_EXPR:
2387     case TRUNC_MOD_EXPR:
2388     case CEIL_MOD_EXPR:
2389     case ROUND_MOD_EXPR:
2390     case RDIV_EXPR:
2391     case EXACT_DIV_EXPR:
2392     case MIN_EXPR:
2393     case MAX_EXPR:
2394     case LSHIFT_EXPR:
2395     case RSHIFT_EXPR:
2396     case LROTATE_EXPR:
2397     case RROTATE_EXPR:
2398     case BIT_AND_EXPR:
2399     case BIT_IOR_EXPR:
2400     case BIT_XOR_EXPR:
2401     case TRUTH_AND_EXPR:
2402     case TRUTH_ANDIF_EXPR:
2403     case TRUTH_OR_EXPR:
2404     case TRUTH_ORIF_EXPR:
2405     case TRUTH_XOR_EXPR:
2406     case LT_EXPR: case LE_EXPR:
2407     case GT_EXPR: case GE_EXPR:
2408     case EQ_EXPR: case NE_EXPR:
2409     case UNORDERED_EXPR: case ORDERED_EXPR:
2410     case UNLT_EXPR: case UNLE_EXPR:
2411     case UNGT_EXPR: case UNGE_EXPR:
2412     case UNEQ_EXPR: case LTGT_EXPR:
2413     case RANGE_EXPR: case COMPLEX_EXPR:
2414 
2415       loc = EXPR_LOCATION (x);
2416       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2417       op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2418 
2419       if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2420 	{
2421 	  if (op0 == error_mark_node || op1 == error_mark_node)
2422 	    x = error_mark_node;
2423 	  else
2424 	    x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2425 	}
2426       else
2427 	x = fold (x);
2428 
2429       if (TREE_NO_WARNING (org_x)
2430 	  && warn_nonnull_compare
2431 	  && COMPARISON_CLASS_P (org_x))
2432 	{
2433 	  if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2434 	    ;
2435 	  else if (COMPARISON_CLASS_P (x))
2436 	    TREE_NO_WARNING (x) = 1;
2437 	  /* Otherwise give up on optimizing these, let GIMPLE folders
2438 	     optimize those later on.  */
2439 	  else if (op0 != TREE_OPERAND (org_x, 0)
2440 		   || op1 != TREE_OPERAND (org_x, 1))
2441 	    {
2442 	      x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2443 	      TREE_NO_WARNING (x) = 1;
2444 	    }
2445 	  else
2446 	    x = org_x;
2447 	}
2448       break;
2449 
2450     case VEC_COND_EXPR:
2451     case COND_EXPR:
2452       loc = EXPR_LOCATION (x);
2453       op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2454       op1 = cp_fold (TREE_OPERAND (x, 1));
2455       op2 = cp_fold (TREE_OPERAND (x, 2));
2456 
2457       if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2458 	{
2459 	  warning_sentinel s (warn_int_in_bool_context);
2460 	  if (!VOID_TYPE_P (TREE_TYPE (op1)))
2461 	    op1 = cp_truthvalue_conversion (op1);
2462 	  if (!VOID_TYPE_P (TREE_TYPE (op2)))
2463 	    op2 = cp_truthvalue_conversion (op2);
2464 	}
2465       else if (VOID_TYPE_P (TREE_TYPE (x)))
2466 	{
2467 	  if (TREE_CODE (op0) == INTEGER_CST)
2468 	    {
2469 	      /* If the condition is constant, fold can fold away
2470 		 the COND_EXPR.  If some statement-level uses of COND_EXPR
2471 		 have one of the branches NULL, avoid folding crash.  */
2472 	      if (!op1)
2473 		op1 = build_empty_stmt (loc);
2474 	      if (!op2)
2475 		op2 = build_empty_stmt (loc);
2476 	    }
2477 	  else
2478 	    {
2479 	      /* Otherwise, don't bother folding a void condition, since
2480 		 it can't produce a constant value.  */
2481 	      if (op0 != TREE_OPERAND (x, 0)
2482 		  || op1 != TREE_OPERAND (x, 1)
2483 		  || op2 != TREE_OPERAND (x, 2))
2484 		x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2485 	      break;
2486 	    }
2487 	}
2488 
2489       if (op0 != TREE_OPERAND (x, 0)
2490 	  || op1 != TREE_OPERAND (x, 1)
2491 	  || op2 != TREE_OPERAND (x, 2))
2492 	{
2493 	  if (op0 == error_mark_node
2494 	      || op1 == error_mark_node
2495 	      || op2 == error_mark_node)
2496 	    x = error_mark_node;
2497 	  else
2498 	    x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2499 	}
2500       else
2501 	x = fold (x);
2502 
2503       /* A COND_EXPR might have incompatible types in branches if one or both
2504 	 arms are bitfields.  If folding exposed such a branch, fix it up.  */
2505       if (TREE_CODE (x) != code
2506 	  && x != error_mark_node
2507 	  && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2508 	x = fold_convert (TREE_TYPE (org_x), x);
2509 
2510       break;
2511 
2512     case CALL_EXPR:
2513       {
2514 	int i, m, sv = optimize, nw = sv, changed = 0;
2515 	tree callee = get_callee_fndecl (x);
2516 
2517 	/* Some built-in function calls will be evaluated at compile-time in
2518 	   fold ().  Set optimize to 1 when folding __builtin_constant_p inside
2519 	   a constexpr function so that fold_builtin_1 doesn't fold it to 0.  */
2520 	if (callee && DECL_BUILT_IN (callee) && !optimize
2521 	    && DECL_IS_BUILTIN_CONSTANT_P (callee)
2522 	    && current_function_decl
2523 	    && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2524 	  nw = 1;
2525 
2526 	x = copy_node (x);
2527 
2528 	m = call_expr_nargs (x);
2529 	for (i = 0; i < m; i++)
2530 	  {
2531 	    r = cp_fold (CALL_EXPR_ARG (x, i));
2532 	    if (r != CALL_EXPR_ARG (x, i))
2533 	      {
2534 		if (r == error_mark_node)
2535 		  {
2536 		    x = error_mark_node;
2537 		    break;
2538 		  }
2539 		changed = 1;
2540 	      }
2541 	    CALL_EXPR_ARG (x, i) = r;
2542 	  }
2543 	if (x == error_mark_node)
2544 	  break;
2545 
2546 	optimize = nw;
2547 	r = fold (x);
2548 	optimize = sv;
2549 
2550 	if (TREE_CODE (r) != CALL_EXPR)
2551 	  {
2552 	    x = cp_fold (r);
2553 	    break;
2554 	  }
2555 
2556 	optimize = nw;
2557 
2558 	/* Invoke maybe_constant_value for functions declared
2559 	   constexpr and not called with AGGR_INIT_EXPRs.
2560 	   TODO:
2561 	   Do constexpr expansion of expressions where the call itself is not
2562 	   constant, but the call followed by an INDIRECT_REF is.  */
2563 	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2564 	    && !flag_no_inline)
2565 	  r = maybe_constant_value (x);
2566 	optimize = sv;
2567 
2568         if (TREE_CODE (r) != CALL_EXPR)
2569 	  {
2570 	    if (DECL_CONSTRUCTOR_P (callee))
2571 	      {
2572 		loc = EXPR_LOCATION (x);
2573 		tree s = build_fold_indirect_ref_loc (loc,
2574 						      CALL_EXPR_ARG (x, 0));
2575 		r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2576 	      }
2577 	    x = r;
2578 	    break;
2579 	  }
2580 
2581 	if (!changed)
2582 	  x = org_x;
2583 	break;
2584       }
2585 
2586     case CONSTRUCTOR:
2587       {
2588 	unsigned i;
2589 	constructor_elt *p;
2590 	vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2591 	vec<constructor_elt, va_gc> *nelts = NULL;
2592 	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2593 	  {
2594 	    tree op = cp_fold (p->value);
2595 	    if (op != p->value)
2596 	      {
2597 		if (op == error_mark_node)
2598 		  {
2599 		    x = error_mark_node;
2600 		    vec_free (nelts);
2601 		    break;
2602 		  }
2603 		if (nelts == NULL)
2604 		  nelts = elts->copy ();
2605 		(*nelts)[i].value = op;
2606 	      }
2607 	  }
2608 	if (nelts)
2609 	  {
2610 	    x = build_constructor (TREE_TYPE (x), nelts);
2611 	    CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
2612 	      = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
2613 	  }
2614 	if (VECTOR_TYPE_P (TREE_TYPE (x)))
2615 	  x = fold (x);
2616 	break;
2617       }
2618     case TREE_VEC:
2619       {
2620 	bool changed = false;
2621 	vec<tree, va_gc> *vec = make_tree_vector ();
2622 	int i, n = TREE_VEC_LENGTH (x);
2623 	vec_safe_reserve (vec, n);
2624 
2625 	for (i = 0; i < n; i++)
2626 	  {
2627 	    tree op = cp_fold (TREE_VEC_ELT (x, i));
2628 	    vec->quick_push (op);
2629 	    if (op != TREE_VEC_ELT (x, i))
2630 	      changed = true;
2631 	  }
2632 
2633 	if (changed)
2634 	  {
2635 	    r = copy_node (x);
2636 	    for (i = 0; i < n; i++)
2637 	      TREE_VEC_ELT (r, i) = (*vec)[i];
2638 	    x = r;
2639 	  }
2640 
2641 	release_tree_vector (vec);
2642       }
2643 
2644       break;
2645 
2646     case ARRAY_REF:
2647     case ARRAY_RANGE_REF:
2648 
2649       loc = EXPR_LOCATION (x);
2650       op0 = cp_fold (TREE_OPERAND (x, 0));
2651       op1 = cp_fold (TREE_OPERAND (x, 1));
2652       op2 = cp_fold (TREE_OPERAND (x, 2));
2653       op3 = cp_fold (TREE_OPERAND (x, 3));
2654 
2655       if (op0 != TREE_OPERAND (x, 0)
2656 	  || op1 != TREE_OPERAND (x, 1)
2657 	  || op2 != TREE_OPERAND (x, 2)
2658 	  || op3 != TREE_OPERAND (x, 3))
2659 	{
2660 	  if (op0 == error_mark_node
2661 	      || op1 == error_mark_node
2662 	      || op2 == error_mark_node
2663 	      || op3 == error_mark_node)
2664 	    x = error_mark_node;
2665 	  else
2666 	    {
2667 	      x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2668 	      TREE_READONLY (x) = TREE_READONLY (org_x);
2669 	      TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2670 	      TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2671 	    }
2672 	}
2673 
2674       x = fold (x);
2675       break;
2676 
2677     case SAVE_EXPR:
2678       /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2679 	 folding, evaluates to an invariant.  In that case no need to wrap
2680 	 this folded tree with a SAVE_EXPR.  */
2681       r = cp_fold (TREE_OPERAND (x, 0));
2682       if (tree_invariant_p (r))
2683 	x = r;
2684       break;
2685 
2686     default:
2687       return org_x;
2688     }
2689 
2690   fold_cache->put (org_x, x);
2691   /* Prevent that we try to fold an already folded result again.  */
2692   if (x != org_x)
2693     fold_cache->put (x, x);
2694 
2695   return x;
2696 }
2697 
2698 #include "gt-cp-cp-gimplify.h"
2699