xref: /dragonfly/contrib/gcc-8.0/gcc/cp/cp-gimplify.c (revision 0600465e)
1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
2 
3    Copyright (C) 2002-2018 Free Software Foundation, Inc.
4    Contributed by Jason Merrill <jason@redhat.com>
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "asan.h"
37 
38 /* Forward declarations.  */
39 
40 static tree cp_genericize_r (tree *, int *, void *);
41 static tree cp_fold_r (tree *, int *, void *);
42 static void cp_genericize_tree (tree*, bool);
43 static tree cp_fold (tree);
44 
45 /* Local declarations.  */
46 
47 enum bc_t { bc_break = 0, bc_continue = 1 };
48 
49 /* Stack of labels which are targets for "break" or "continue",
50    linked through TREE_CHAIN.  */
51 static tree bc_label[2];
52 
53 /* Begin a scope which can be exited by a break or continue statement.  BC
54    indicates which.
55 
56    Just creates a label with location LOCATION and pushes it into the current
57    context.  */
58 
59 static tree
60 begin_bc_block (enum bc_t bc, location_t location)
61 {
62   tree label = create_artificial_label (location);
63   DECL_CHAIN (label) = bc_label[bc];
64   bc_label[bc] = label;
65   if (bc == bc_break)
66     LABEL_DECL_BREAK (label) = true;
67   else
68     LABEL_DECL_CONTINUE (label) = true;
69   return label;
70 }
71 
72 /* Finish a scope which can be exited by a break or continue statement.
73    LABEL was returned from the most recent call to begin_bc_block.  BLOCK is
74    an expression for the contents of the scope.
75 
76    If we saw a break (or continue) in the scope, append a LABEL_EXPR to
77    BLOCK.  Otherwise, just forget the label.  */
78 
79 static void
80 finish_bc_block (tree *block, enum bc_t bc, tree label)
81 {
82   gcc_assert (label == bc_label[bc]);
83 
84   if (TREE_USED (label))
85     append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label),
86 			      block);
87 
88   bc_label[bc] = DECL_CHAIN (label);
89   DECL_CHAIN (label) = NULL_TREE;
90 }
91 
92 /* Get the LABEL_EXPR to represent a break or continue statement
93    in the current block scope.  BC indicates which.  */
94 
95 static tree
96 get_bc_label (enum bc_t bc)
97 {
98   tree label = bc_label[bc];
99 
100   /* Mark the label used for finish_bc_block.  */
101   TREE_USED (label) = 1;
102   return label;
103 }
104 
105 /* Genericize a TRY_BLOCK.  */
106 
107 static void
108 genericize_try_block (tree *stmt_p)
109 {
110   tree body = TRY_STMTS (*stmt_p);
111   tree cleanup = TRY_HANDLERS (*stmt_p);
112 
113   *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
114 }
115 
116 /* Genericize a HANDLER by converting to a CATCH_EXPR.  */
117 
118 static void
119 genericize_catch_block (tree *stmt_p)
120 {
121   tree type = HANDLER_TYPE (*stmt_p);
122   tree body = HANDLER_BODY (*stmt_p);
123 
124   /* FIXME should the caught type go in TREE_TYPE?  */
125   *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
126 }
127 
128 /* A terser interface for building a representation of an exception
129    specification.  */
130 
131 static tree
132 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
133 {
134   tree t;
135 
136   /* FIXME should the allowed types go in TREE_TYPE?  */
137   t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
138   append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
139 
140   t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
141   append_to_statement_list (body, &TREE_OPERAND (t, 0));
142 
143   return t;
144 }
145 
146 /* Genericize an EH_SPEC_BLOCK by converting it to a
147    TRY_CATCH_EXPR/EH_FILTER_EXPR pair.  */
148 
149 static void
150 genericize_eh_spec_block (tree *stmt_p)
151 {
152   tree body = EH_SPEC_STMTS (*stmt_p);
153   tree allowed = EH_SPEC_RAISES (*stmt_p);
154   tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
155 
156   *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
157   TREE_NO_WARNING (*stmt_p) = true;
158   TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
159 }
160 
161 /* Genericize an IF_STMT by turning it into a COND_EXPR.  */
162 
163 static void
164 genericize_if_stmt (tree *stmt_p)
165 {
166   tree stmt, cond, then_, else_;
167   location_t locus = EXPR_LOCATION (*stmt_p);
168 
169   stmt = *stmt_p;
170   cond = IF_COND (stmt);
171   then_ = THEN_CLAUSE (stmt);
172   else_ = ELSE_CLAUSE (stmt);
173 
174   if (!then_)
175     then_ = build_empty_stmt (locus);
176   if (!else_)
177     else_ = build_empty_stmt (locus);
178 
179   if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
180     stmt = then_;
181   else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
182     stmt = else_;
183   else
184     stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
185   if (!EXPR_HAS_LOCATION (stmt))
186     protected_set_expr_location (stmt, locus);
187   *stmt_p = stmt;
188 }
189 
190 /* Build a generic representation of one of the C loop forms.  COND is the
191    loop condition or NULL_TREE.  BODY is the (possibly compound) statement
192    controlled by the loop.  INCR is the increment expression of a for-loop,
193    or NULL_TREE.  COND_IS_FIRST indicates whether the condition is
194    evaluated before the loop body as in while and for loops, or after the
195    loop body as in do-while loops.  */
196 
197 static void
198 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body,
199 		    tree incr, bool cond_is_first, int *walk_subtrees,
200 		    void *data)
201 {
202   tree blab, clab;
203   tree exit = NULL;
204   tree stmt_list = NULL;
205 
206   blab = begin_bc_block (bc_break, start_locus);
207   clab = begin_bc_block (bc_continue, start_locus);
208 
209   protected_set_expr_location (incr, start_locus);
210 
211   cp_walk_tree (&cond, cp_genericize_r, data, NULL);
212   cp_walk_tree (&body, cp_genericize_r, data, NULL);
213   cp_walk_tree (&incr, cp_genericize_r, data, NULL);
214   *walk_subtrees = 0;
215 
216   if (cond && TREE_CODE (cond) != INTEGER_CST)
217     {
218       /* If COND is constant, don't bother building an exit.  If it's false,
219 	 we won't build a loop.  If it's true, any exits are in the body.  */
220       location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus);
221       exit = build1_loc (cloc, GOTO_EXPR, void_type_node,
222 			 get_bc_label (bc_break));
223       exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond,
224 			      build_empty_stmt (cloc), exit);
225     }
226 
227   if (exit && cond_is_first)
228     append_to_statement_list (exit, &stmt_list);
229   append_to_statement_list (body, &stmt_list);
230   finish_bc_block (&stmt_list, bc_continue, clab);
231   append_to_statement_list (incr, &stmt_list);
232   if (exit && !cond_is_first)
233     append_to_statement_list (exit, &stmt_list);
234 
235   if (!stmt_list)
236     stmt_list = build_empty_stmt (start_locus);
237 
238   tree loop;
239   if (cond && integer_zerop (cond))
240     {
241       if (cond_is_first)
242 	loop = fold_build3_loc (start_locus, COND_EXPR,
243 				void_type_node, cond, stmt_list,
244 				build_empty_stmt (start_locus));
245       else
246 	loop = stmt_list;
247     }
248   else
249     {
250       location_t loc = start_locus;
251       if (!cond || integer_nonzerop (cond))
252 	loc = EXPR_LOCATION (expr_first (body));
253       if (loc == UNKNOWN_LOCATION)
254 	loc = start_locus;
255       loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list);
256     }
257 
258   stmt_list = NULL;
259   append_to_statement_list (loop, &stmt_list);
260   finish_bc_block (&stmt_list, bc_break, blab);
261   if (!stmt_list)
262     stmt_list = build_empty_stmt (start_locus);
263 
264   *stmt_p = stmt_list;
265 }
266 
267 /* Genericize a FOR_STMT node *STMT_P.  */
268 
269 static void
270 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
271 {
272   tree stmt = *stmt_p;
273   tree expr = NULL;
274   tree loop;
275   tree init = FOR_INIT_STMT (stmt);
276 
277   if (init)
278     {
279       cp_walk_tree (&init, cp_genericize_r, data, NULL);
280       append_to_statement_list (init, &expr);
281     }
282 
283   genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt),
284 		      FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data);
285   append_to_statement_list (loop, &expr);
286   if (expr == NULL_TREE)
287     expr = loop;
288   *stmt_p = expr;
289 }
290 
291 /* Genericize a WHILE_STMT node *STMT_P.  */
292 
293 static void
294 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data)
295 {
296   tree stmt = *stmt_p;
297   genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt),
298 		      WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data);
299 }
300 
301 /* Genericize a DO_STMT node *STMT_P.  */
302 
303 static void
304 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data)
305 {
306   tree stmt = *stmt_p;
307   genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt),
308 		      DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data);
309 }
310 
311 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR.  */
312 
313 static void
314 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data)
315 {
316   tree stmt = *stmt_p;
317   tree break_block, body, cond, type;
318   location_t stmt_locus = EXPR_LOCATION (stmt);
319 
320   break_block = begin_bc_block (bc_break, stmt_locus);
321 
322   body = SWITCH_STMT_BODY (stmt);
323   if (!body)
324     body = build_empty_stmt (stmt_locus);
325   cond = SWITCH_STMT_COND (stmt);
326   type = SWITCH_STMT_TYPE (stmt);
327 
328   cp_walk_tree (&body, cp_genericize_r, data, NULL);
329   cp_walk_tree (&cond, cp_genericize_r, data, NULL);
330   cp_walk_tree (&type, cp_genericize_r, data, NULL);
331   *walk_subtrees = 0;
332 
333   if (TREE_USED (break_block))
334     SWITCH_BREAK_LABEL_P (break_block) = 1;
335   finish_bc_block (&body, bc_break, break_block);
336   *stmt_p = build2_loc (stmt_locus, SWITCH_EXPR, type, cond, body);
337   SWITCH_ALL_CASES_P (*stmt_p) = SWITCH_STMT_ALL_CASES_P (stmt);
338   gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt)
339 		       || !TREE_USED (break_block));
340 }
341 
342 /* Genericize a CONTINUE_STMT node *STMT_P.  */
343 
344 static void
345 genericize_continue_stmt (tree *stmt_p)
346 {
347   tree stmt_list = NULL;
348   tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN);
349   tree label = get_bc_label (bc_continue);
350   location_t location = EXPR_LOCATION (*stmt_p);
351   tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label);
352   append_to_statement_list_force (pred, &stmt_list);
353   append_to_statement_list (jump, &stmt_list);
354   *stmt_p = stmt_list;
355 }
356 
357 /* Genericize a BREAK_STMT node *STMT_P.  */
358 
359 static void
360 genericize_break_stmt (tree *stmt_p)
361 {
362   tree label = get_bc_label (bc_break);
363   location_t location = EXPR_LOCATION (*stmt_p);
364   *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label);
365 }
366 
367 /* Genericize a OMP_FOR node *STMT_P.  */
368 
369 static void
370 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data)
371 {
372   tree stmt = *stmt_p;
373   location_t locus = EXPR_LOCATION (stmt);
374   tree clab = begin_bc_block (bc_continue, locus);
375 
376   cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL);
377   if (TREE_CODE (stmt) != OMP_TASKLOOP)
378     cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL);
379   cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL);
380   cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL);
381   cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL);
382   cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL);
383   *walk_subtrees = 0;
384 
385   finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab);
386 }
387 
388 /* Hook into the middle of gimplifying an OMP_FOR node.  */
389 
390 static enum gimplify_status
391 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
392 {
393   tree for_stmt = *expr_p;
394   gimple_seq seq = NULL;
395 
396   /* Protect ourselves from recursion.  */
397   if (OMP_FOR_GIMPLIFYING_P (for_stmt))
398     return GS_UNHANDLED;
399   OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
400 
401   gimplify_and_add (for_stmt, &seq);
402   gimple_seq_add_seq (pre_p, seq);
403 
404   OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
405 
406   return GS_ALL_DONE;
407 }
408 
409 /*  Gimplify an EXPR_STMT node.  */
410 
411 static void
412 gimplify_expr_stmt (tree *stmt_p)
413 {
414   tree stmt = EXPR_STMT_EXPR (*stmt_p);
415 
416   if (stmt == error_mark_node)
417     stmt = NULL;
418 
419   /* Gimplification of a statement expression will nullify the
420      statement if all its side effects are moved to *PRE_P and *POST_P.
421 
422      In this case we will not want to emit the gimplified statement.
423      However, we may still want to emit a warning, so we do that before
424      gimplification.  */
425   if (stmt && warn_unused_value)
426     {
427       if (!TREE_SIDE_EFFECTS (stmt))
428 	{
429 	  if (!IS_EMPTY_STMT (stmt)
430 	      && !VOID_TYPE_P (TREE_TYPE (stmt))
431 	      && !TREE_NO_WARNING (stmt))
432 	    warning (OPT_Wunused_value, "statement with no effect");
433 	}
434       else
435 	warn_if_unused_value (stmt, input_location);
436     }
437 
438   if (stmt == NULL_TREE)
439     stmt = alloc_stmt_list ();
440 
441   *stmt_p = stmt;
442 }
443 
444 /* Gimplify initialization from an AGGR_INIT_EXPR.  */
445 
446 static void
447 cp_gimplify_init_expr (tree *expr_p)
448 {
449   tree from = TREE_OPERAND (*expr_p, 1);
450   tree to = TREE_OPERAND (*expr_p, 0);
451   tree t;
452 
453   /* What about code that pulls out the temp and uses it elsewhere?  I
454      think that such code never uses the TARGET_EXPR as an initializer.  If
455      I'm wrong, we'll abort because the temp won't have any RTL.  In that
456      case, I guess we'll need to replace references somehow.  */
457   if (TREE_CODE (from) == TARGET_EXPR)
458     from = TARGET_EXPR_INITIAL (from);
459 
460   /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
461      inside the TARGET_EXPR.  */
462   for (t = from; t; )
463     {
464       tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
465 
466       /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
467 	 replace the slot operand with our target.
468 
469 	 Should we add a target parm to gimplify_expr instead?  No, as in this
470 	 case we want to replace the INIT_EXPR.  */
471       if (TREE_CODE (sub) == AGGR_INIT_EXPR
472 	  || TREE_CODE (sub) == VEC_INIT_EXPR)
473 	{
474 	  if (TREE_CODE (sub) == AGGR_INIT_EXPR)
475 	    AGGR_INIT_EXPR_SLOT (sub) = to;
476 	  else
477 	    VEC_INIT_EXPR_SLOT (sub) = to;
478 	  *expr_p = from;
479 
480 	  /* The initialization is now a side-effect, so the container can
481 	     become void.  */
482 	  if (from != sub)
483 	    TREE_TYPE (from) = void_type_node;
484 	}
485 
486       /* Handle aggregate NSDMI.  */
487       replace_placeholders (sub, to);
488 
489       if (t == sub)
490 	break;
491       else
492 	t = TREE_OPERAND (t, 1);
493     }
494 
495 }
496 
497 /* Gimplify a MUST_NOT_THROW_EXPR.  */
498 
499 static enum gimplify_status
500 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
501 {
502   tree stmt = *expr_p;
503   tree temp = voidify_wrapper_expr (stmt, NULL);
504   tree body = TREE_OPERAND (stmt, 0);
505   gimple_seq try_ = NULL;
506   gimple_seq catch_ = NULL;
507   gimple *mnt;
508 
509   gimplify_and_add (body, &try_);
510   mnt = gimple_build_eh_must_not_throw (terminate_fn);
511   gimple_seq_add_stmt_without_update (&catch_, mnt);
512   mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
513 
514   gimple_seq_add_stmt_without_update (pre_p, mnt);
515   if (temp)
516     {
517       *expr_p = temp;
518       return GS_OK;
519     }
520 
521   *expr_p = NULL;
522   return GS_ALL_DONE;
523 }
524 
525 /* Return TRUE if an operand (OP) of a given TYPE being copied is
526    really just an empty class copy.
527 
528    Check that the operand has a simple form so that TARGET_EXPRs and
529    non-empty CONSTRUCTORs get reduced properly, and we leave the
530    return slot optimization alone because it isn't a copy.  */
531 
532 static bool
533 simple_empty_class_p (tree type, tree op)
534 {
535   return
536     ((TREE_CODE (op) == COMPOUND_EXPR
537       && simple_empty_class_p (type, TREE_OPERAND (op, 1)))
538      || TREE_CODE (op) == EMPTY_CLASS_EXPR
539      || is_gimple_lvalue (op)
540      || INDIRECT_REF_P (op)
541      || (TREE_CODE (op) == CONSTRUCTOR
542 	 && CONSTRUCTOR_NELTS (op) == 0
543 	 && !TREE_CLOBBER_P (op))
544      || (TREE_CODE (op) == CALL_EXPR
545 	 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
546     && is_really_empty_class (type);
547 }
548 
549 /* Returns true if evaluating E as an lvalue has side-effects;
550    specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
551    have side-effects until there is a read or write through it.  */
552 
553 static bool
554 lvalue_has_side_effects (tree e)
555 {
556   if (!TREE_SIDE_EFFECTS (e))
557     return false;
558   while (handled_component_p (e))
559     {
560       if (TREE_CODE (e) == ARRAY_REF
561 	  && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
562 	return true;
563       e = TREE_OPERAND (e, 0);
564     }
565   if (DECL_P (e))
566     /* Just naming a variable has no side-effects.  */
567     return false;
568   else if (INDIRECT_REF_P (e))
569     /* Similarly, indirection has no side-effects.  */
570     return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
571   else
572     /* For anything else, trust TREE_SIDE_EFFECTS.  */
573     return TREE_SIDE_EFFECTS (e);
574 }
575 
576 /* Do C++-specific gimplification.  Args are as for gimplify_expr.  */
577 
578 int
579 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
580 {
581   int saved_stmts_are_full_exprs_p = 0;
582   location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location);
583   enum tree_code code = TREE_CODE (*expr_p);
584   enum gimplify_status ret;
585 
586   if (STATEMENT_CODE_P (code))
587     {
588       saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
589       current_stmt_tree ()->stmts_are_full_exprs_p
590 	= STMT_IS_FULL_EXPR_P (*expr_p);
591     }
592 
593   switch (code)
594     {
595     case AGGR_INIT_EXPR:
596       simplify_aggr_init_expr (expr_p);
597       ret = GS_OK;
598       break;
599 
600     case VEC_INIT_EXPR:
601       {
602 	location_t loc = input_location;
603 	tree init = VEC_INIT_EXPR_INIT (*expr_p);
604 	int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
605 	gcc_assert (EXPR_HAS_LOCATION (*expr_p));
606 	input_location = EXPR_LOCATION (*expr_p);
607 	*expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
608 				  init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
609 				  from_array,
610 				  tf_warning_or_error);
611 	hash_set<tree> pset;
612 	cp_walk_tree (expr_p, cp_fold_r, &pset, NULL);
613 	cp_genericize_tree (expr_p, false);
614 	ret = GS_OK;
615 	input_location = loc;
616       }
617       break;
618 
619     case THROW_EXPR:
620       /* FIXME communicate throw type to back end, probably by moving
621 	 THROW_EXPR into ../tree.def.  */
622       *expr_p = TREE_OPERAND (*expr_p, 0);
623       ret = GS_OK;
624       break;
625 
626     case MUST_NOT_THROW_EXPR:
627       ret = gimplify_must_not_throw_expr (expr_p, pre_p);
628       break;
629 
630       /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
631 	 LHS of an assignment might also be involved in the RHS, as in bug
632 	 25979.  */
633     case INIT_EXPR:
634       cp_gimplify_init_expr (expr_p);
635       if (TREE_CODE (*expr_p) != INIT_EXPR)
636 	return GS_OK;
637       /* Fall through.  */
638     case MODIFY_EXPR:
639     modify_expr_case:
640       {
641 	/* If the back end isn't clever enough to know that the lhs and rhs
642 	   types are the same, add an explicit conversion.  */
643 	tree op0 = TREE_OPERAND (*expr_p, 0);
644 	tree op1 = TREE_OPERAND (*expr_p, 1);
645 
646 	if (!error_operand_p (op0)
647 	    && !error_operand_p (op1)
648 	    && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
649 		|| TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
650 	    && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
651 	  TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
652 					      TREE_TYPE (op0), op1);
653 
654 	else if (simple_empty_class_p (TREE_TYPE (op0), op1))
655 	  {
656 	    /* Remove any copies of empty classes.  Also drop volatile
657 	       variables on the RHS to avoid infinite recursion from
658 	       gimplify_expr trying to load the value.  */
659 	    if (TREE_SIDE_EFFECTS (op1))
660 	      {
661 		if (TREE_THIS_VOLATILE (op1)
662 		    && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
663 		  op1 = build_fold_addr_expr (op1);
664 
665 		gimplify_and_add (op1, pre_p);
666 	      }
667 	    gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
668 			   is_gimple_lvalue, fb_lvalue);
669 	    *expr_p = TREE_OPERAND (*expr_p, 0);
670 	  }
671 	/* P0145 says that the RHS is sequenced before the LHS.
672 	   gimplify_modify_expr gimplifies the RHS before the LHS, but that
673 	   isn't quite strong enough in two cases:
674 
675 	   1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would
676 	   mean it's evaluated after the LHS.
677 
678 	   2) the value calculation of the RHS is also sequenced before the
679 	   LHS, so for scalar assignment we need to preevaluate if the
680 	   RHS could be affected by LHS side-effects even if it has no
681 	   side-effects of its own.  We don't need this for classes because
682 	   class assignment takes its RHS by reference.  */
683        else if (flag_strong_eval_order > 1
684                 && TREE_CODE (*expr_p) == MODIFY_EXPR
685                 && lvalue_has_side_effects (op0)
686 		&& (TREE_CODE (op1) == CALL_EXPR
687 		    || (SCALAR_TYPE_P (TREE_TYPE (op1))
688 			&& !TREE_CONSTANT (op1))))
689 	 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p);
690       }
691       ret = GS_OK;
692       break;
693 
694     case EMPTY_CLASS_EXPR:
695       /* We create an empty CONSTRUCTOR with RECORD_TYPE.  */
696       *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
697       ret = GS_OK;
698       break;
699 
700     case BASELINK:
701       *expr_p = BASELINK_FUNCTIONS (*expr_p);
702       ret = GS_OK;
703       break;
704 
705     case TRY_BLOCK:
706       genericize_try_block (expr_p);
707       ret = GS_OK;
708       break;
709 
710     case HANDLER:
711       genericize_catch_block (expr_p);
712       ret = GS_OK;
713       break;
714 
715     case EH_SPEC_BLOCK:
716       genericize_eh_spec_block (expr_p);
717       ret = GS_OK;
718       break;
719 
720     case USING_STMT:
721       gcc_unreachable ();
722 
723     case FOR_STMT:
724     case WHILE_STMT:
725     case DO_STMT:
726     case SWITCH_STMT:
727     case CONTINUE_STMT:
728     case BREAK_STMT:
729       gcc_unreachable ();
730 
731     case OMP_FOR:
732     case OMP_SIMD:
733     case OMP_DISTRIBUTE:
734     case OMP_TASKLOOP:
735       ret = cp_gimplify_omp_for (expr_p, pre_p);
736       break;
737 
738     case EXPR_STMT:
739       gimplify_expr_stmt (expr_p);
740       ret = GS_OK;
741       break;
742 
743     case UNARY_PLUS_EXPR:
744       {
745 	tree arg = TREE_OPERAND (*expr_p, 0);
746 	tree type = TREE_TYPE (*expr_p);
747 	*expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
748 					    : arg;
749 	ret = GS_OK;
750       }
751       break;
752 
753     case CALL_EXPR:
754       ret = GS_OK;
755       if (!CALL_EXPR_FN (*expr_p))
756 	/* Internal function call.  */;
757       else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
758 	{
759 	  /* This is a call to a (compound) assignment operator that used
760 	     the operator syntax; gimplify the RHS first.  */
761 	  gcc_assert (call_expr_nargs (*expr_p) == 2);
762 	  gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
763 	  enum gimplify_status t
764 	    = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc);
765 	  if (t == GS_ERROR)
766 	    ret = GS_ERROR;
767 	}
768       else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
769 	{
770 	  /* Leave the last argument for gimplify_call_expr, to avoid problems
771 	     with __builtin_va_arg_pack().  */
772 	  int nargs = call_expr_nargs (*expr_p) - 1;
773 	  for (int i = 0; i < nargs; ++i)
774 	    {
775 	      enum gimplify_status t
776 		= gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc);
777 	      if (t == GS_ERROR)
778 		ret = GS_ERROR;
779 	    }
780 	}
781       else if (flag_strong_eval_order
782 	       && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
783 	{
784 	  /* If flag_strong_eval_order, evaluate the object argument first.  */
785 	  tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
786 	  if (POINTER_TYPE_P (fntype))
787 	    fntype = TREE_TYPE (fntype);
788 	  if (TREE_CODE (fntype) == METHOD_TYPE)
789 	    {
790 	      enum gimplify_status t
791 		= gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc);
792 	      if (t == GS_ERROR)
793 		ret = GS_ERROR;
794 	    }
795 	}
796       break;
797 
798     case RETURN_EXPR:
799       if (TREE_OPERAND (*expr_p, 0)
800 	  && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
801 	      || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
802 	{
803 	  expr_p = &TREE_OPERAND (*expr_p, 0);
804 	  code = TREE_CODE (*expr_p);
805 	  /* Avoid going through the INIT_EXPR case, which can
806 	     degrade INIT_EXPRs into AGGR_INIT_EXPRs.  */
807 	  goto modify_expr_case;
808 	}
809       /* Fall through.  */
810 
811     default:
812       ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
813       break;
814     }
815 
816   /* Restore saved state.  */
817   if (STATEMENT_CODE_P (code))
818     current_stmt_tree ()->stmts_are_full_exprs_p
819       = saved_stmts_are_full_exprs_p;
820 
821   return ret;
822 }
823 
824 static inline bool
825 is_invisiref_parm (const_tree t)
826 {
827   return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
828 	  && DECL_BY_REFERENCE (t));
829 }
830 
831 /* Return true if the uid in both int tree maps are equal.  */
832 
833 bool
834 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b)
835 {
836   return (a->uid == b->uid);
837 }
838 
839 /* Hash a UID in a cxx_int_tree_map.  */
840 
841 unsigned int
842 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item)
843 {
844   return item->uid;
845 }
846 
847 /* A stable comparison routine for use with splay trees and DECLs.  */
848 
849 static int
850 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
851 {
852   tree a = (tree) xa;
853   tree b = (tree) xb;
854 
855   return DECL_UID (a) - DECL_UID (b);
856 }
857 
858 /* OpenMP context during genericization.  */
859 
860 struct cp_genericize_omp_taskreg
861 {
862   bool is_parallel;
863   bool default_shared;
864   struct cp_genericize_omp_taskreg *outer;
865   splay_tree variables;
866 };
867 
868 /* Return true if genericization should try to determine if
869    DECL is firstprivate or shared within task regions.  */
870 
871 static bool
872 omp_var_to_track (tree decl)
873 {
874   tree type = TREE_TYPE (decl);
875   if (is_invisiref_parm (decl))
876     type = TREE_TYPE (type);
877   else if (TREE_CODE (type) == REFERENCE_TYPE)
878     type = TREE_TYPE (type);
879   while (TREE_CODE (type) == ARRAY_TYPE)
880     type = TREE_TYPE (type);
881   if (type == error_mark_node || !CLASS_TYPE_P (type))
882     return false;
883   if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
884     return false;
885   if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
886     return false;
887   return true;
888 }
889 
890 /* Note DECL use in OpenMP region OMP_CTX during genericization.  */
891 
892 static void
893 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
894 {
895   splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
896 					 (splay_tree_key) decl);
897   if (n == NULL)
898     {
899       int flags = OMP_CLAUSE_DEFAULT_SHARED;
900       if (omp_ctx->outer)
901 	omp_cxx_notice_variable (omp_ctx->outer, decl);
902       if (!omp_ctx->default_shared)
903 	{
904 	  struct cp_genericize_omp_taskreg *octx;
905 
906 	  for (octx = omp_ctx->outer; octx; octx = octx->outer)
907 	    {
908 	      n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
909 	      if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
910 		{
911 		  flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
912 		  break;
913 		}
914 	      if (octx->is_parallel)
915 		break;
916 	    }
917 	  if (octx == NULL
918 	      && (TREE_CODE (decl) == PARM_DECL
919 		  || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
920 		      && DECL_CONTEXT (decl) == current_function_decl)))
921 	    flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
922 	  if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
923 	    {
924 	      /* DECL is implicitly determined firstprivate in
925 		 the current task construct.  Ensure copy ctor and
926 		 dtor are instantiated, because during gimplification
927 		 it will be already too late.  */
928 	      tree type = TREE_TYPE (decl);
929 	      if (is_invisiref_parm (decl))
930 		type = TREE_TYPE (type);
931 	      else if (TREE_CODE (type) == REFERENCE_TYPE)
932 		type = TREE_TYPE (type);
933 	      while (TREE_CODE (type) == ARRAY_TYPE)
934 		type = TREE_TYPE (type);
935 	      get_copy_ctor (type, tf_none);
936 	      get_dtor (type, tf_none);
937 	    }
938 	}
939       splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
940     }
941 }
942 
943 /* Genericization context.  */
944 
945 struct cp_genericize_data
946 {
947   hash_set<tree> *p_set;
948   vec<tree> bind_expr_stack;
949   struct cp_genericize_omp_taskreg *omp_ctx;
950   tree try_block;
951   bool no_sanitize_p;
952   bool handle_invisiref_parm_p;
953 };
954 
955 /* Perform any pre-gimplification folding of C++ front end trees to
956    GENERIC.
957    Note:  The folding of none-omp cases is something to move into
958      the middle-end.  As for now we have most foldings only on GENERIC
959      in fold-const, we need to perform this before transformation to
960      GIMPLE-form.  */
961 
962 static tree
963 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data)
964 {
965   tree stmt;
966   enum tree_code code;
967 
968   *stmt_p = stmt = cp_fold (*stmt_p);
969 
970   if (((hash_set<tree> *) data)->add (stmt))
971     {
972       /* Don't walk subtrees of stmts we've already walked once, otherwise
973 	 we can have exponential complexity with e.g. lots of nested
974 	 SAVE_EXPRs or TARGET_EXPRs.  cp_fold uses a cache and will return
975 	 always the same tree, which the first time cp_fold_r has been
976 	 called on it had the subtrees walked.  */
977       *walk_subtrees = 0;
978       return NULL;
979     }
980 
981   code = TREE_CODE (stmt);
982   if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE
983       || code == OMP_TASKLOOP || code == OACC_LOOP)
984     {
985       tree x;
986       int i, n;
987 
988       cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
989       cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
990       cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
991       x = OMP_FOR_COND (stmt);
992       if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
993 	{
994 	  cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
995 	  cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
996 	}
997       else if (x && TREE_CODE (x) == TREE_VEC)
998 	{
999 	  n = TREE_VEC_LENGTH (x);
1000 	  for (i = 0; i < n; i++)
1001 	    {
1002 	      tree o = TREE_VEC_ELT (x, i);
1003 	      if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1004 		cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1005 	    }
1006 	}
1007       x = OMP_FOR_INCR (stmt);
1008       if (x && TREE_CODE (x) == TREE_VEC)
1009 	{
1010 	  n = TREE_VEC_LENGTH (x);
1011 	  for (i = 0; i < n; i++)
1012 	    {
1013 	      tree o = TREE_VEC_ELT (x, i);
1014 	      if (o && TREE_CODE (o) == MODIFY_EXPR)
1015 		o = TREE_OPERAND (o, 1);
1016 	      if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1017 			|| TREE_CODE (o) == POINTER_PLUS_EXPR))
1018 		{
1019 		  cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1020 		  cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1021 		}
1022 	    }
1023 	}
1024       cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1025       *walk_subtrees = 0;
1026     }
1027 
1028   return NULL;
1029 }
1030 
1031 /* Fold ALL the trees!  FIXME we should be able to remove this, but
1032    apparently that still causes optimization regressions.  */
1033 
1034 void
1035 cp_fold_function (tree fndecl)
1036 {
1037   hash_set<tree> pset;
1038   cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL);
1039 }
1040 
1041 /* Perform any pre-gimplification lowering of C++ front end trees to
1042    GENERIC.  */
1043 
1044 static tree
1045 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1046 {
1047   tree stmt = *stmt_p;
1048   struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1049   hash_set<tree> *p_set = wtd->p_set;
1050 
1051   /* If in an OpenMP context, note var uses.  */
1052   if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1053       && (VAR_P (stmt)
1054 	  || TREE_CODE (stmt) == PARM_DECL
1055 	  || TREE_CODE (stmt) == RESULT_DECL)
1056       && omp_var_to_track (stmt))
1057     omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1058 
1059   /* Don't dereference parms in a thunk, pass the references through. */
1060   if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt))
1061       || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1062     {
1063       *walk_subtrees = 0;
1064       return NULL;
1065     }
1066 
1067   /* Dereference invisible reference parms.  */
1068   if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1069     {
1070       *stmt_p = convert_from_reference (stmt);
1071       p_set->add (*stmt_p);
1072       *walk_subtrees = 0;
1073       return NULL;
1074     }
1075 
1076   /* Map block scope extern declarations to visible declarations with the
1077      same name and type in outer scopes if any.  */
1078   if (cp_function_chain->extern_decl_map
1079       && VAR_OR_FUNCTION_DECL_P (stmt)
1080       && DECL_EXTERNAL (stmt))
1081     {
1082       struct cxx_int_tree_map *h, in;
1083       in.uid = DECL_UID (stmt);
1084       h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid);
1085       if (h)
1086 	{
1087 	  *stmt_p = h->to;
1088 	  TREE_USED (h->to) |= TREE_USED (stmt);
1089 	  *walk_subtrees = 0;
1090 	  return NULL;
1091 	}
1092     }
1093 
1094   if (TREE_CODE (stmt) == INTEGER_CST
1095       && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE
1096       && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1097       && !wtd->no_sanitize_p)
1098     {
1099       ubsan_maybe_instrument_reference (stmt_p);
1100       if (*stmt_p != stmt)
1101 	{
1102 	  *walk_subtrees = 0;
1103 	  return NULL_TREE;
1104 	}
1105     }
1106 
1107   /* Other than invisiref parms, don't walk the same tree twice.  */
1108   if (p_set->contains (stmt))
1109     {
1110       *walk_subtrees = 0;
1111       return NULL_TREE;
1112     }
1113 
1114   switch (TREE_CODE (stmt))
1115     {
1116     case ADDR_EXPR:
1117       if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1118 	{
1119 	  /* If in an OpenMP context, note var uses.  */
1120 	  if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1121 	      && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1122 	    omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1123 	  *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1124 	  *walk_subtrees = 0;
1125 	}
1126       break;
1127 
1128     case RETURN_EXPR:
1129       if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1130 	/* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR.  */
1131 	*walk_subtrees = 0;
1132       break;
1133 
1134     case OMP_CLAUSE:
1135       switch (OMP_CLAUSE_CODE (stmt))
1136 	{
1137 	case OMP_CLAUSE_LASTPRIVATE:
1138 	  /* Don't dereference an invisiref in OpenMP clauses.  */
1139 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1140 	    {
1141 	      *walk_subtrees = 0;
1142 	      if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1143 		cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1144 			      cp_genericize_r, data, NULL);
1145 	    }
1146 	  break;
1147 	case OMP_CLAUSE_PRIVATE:
1148 	  /* Don't dereference an invisiref in OpenMP clauses.  */
1149 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1150 	    *walk_subtrees = 0;
1151 	  else if (wtd->omp_ctx != NULL)
1152 	    {
1153 	      /* Private clause doesn't cause any references to the
1154 		 var in outer contexts, avoid calling
1155 		 omp_cxx_notice_variable for it.  */
1156 	      struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1157 	      wtd->omp_ctx = NULL;
1158 	      cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1159 			    data, NULL);
1160 	      wtd->omp_ctx = old;
1161 	      *walk_subtrees = 0;
1162 	    }
1163 	  break;
1164 	case OMP_CLAUSE_SHARED:
1165 	case OMP_CLAUSE_FIRSTPRIVATE:
1166 	case OMP_CLAUSE_COPYIN:
1167 	case OMP_CLAUSE_COPYPRIVATE:
1168 	  /* Don't dereference an invisiref in OpenMP clauses.  */
1169 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1170 	    *walk_subtrees = 0;
1171 	  break;
1172 	case OMP_CLAUSE_REDUCTION:
1173 	  /* Don't dereference an invisiref in reduction clause's
1174 	     OMP_CLAUSE_DECL either.  OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1175 	     still needs to be genericized.  */
1176 	  if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1177 	    {
1178 	      *walk_subtrees = 0;
1179 	      if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1180 		cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1181 			      cp_genericize_r, data, NULL);
1182 	      if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1183 		cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1184 			      cp_genericize_r, data, NULL);
1185 	    }
1186 	  break;
1187 	default:
1188 	  break;
1189 	}
1190       break;
1191 
1192     /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1193        to lower this construct before scanning it, so we need to lower these
1194        before doing anything else.  */
1195     case CLEANUP_STMT:
1196       *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1197 			    CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1198 						   : TRY_FINALLY_EXPR,
1199 			    void_type_node,
1200 			    CLEANUP_BODY (stmt),
1201 			    CLEANUP_EXPR (stmt));
1202       break;
1203 
1204     case IF_STMT:
1205       genericize_if_stmt (stmt_p);
1206       /* *stmt_p has changed, tail recurse to handle it again.  */
1207       return cp_genericize_r (stmt_p, walk_subtrees, data);
1208 
1209     /* COND_EXPR might have incompatible types in branches if one or both
1210        arms are bitfields.  Fix it up now.  */
1211     case COND_EXPR:
1212       {
1213 	tree type_left
1214 	  = (TREE_OPERAND (stmt, 1)
1215 	     ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1216 	     : NULL_TREE);
1217 	tree type_right
1218 	  = (TREE_OPERAND (stmt, 2)
1219 	     ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1220 	     : NULL_TREE);
1221 	if (type_left
1222 	    && !useless_type_conversion_p (TREE_TYPE (stmt),
1223 					   TREE_TYPE (TREE_OPERAND (stmt, 1))))
1224 	  {
1225 	    TREE_OPERAND (stmt, 1)
1226 	      = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1227 	    gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1228 						   type_left));
1229 	  }
1230 	if (type_right
1231 	    && !useless_type_conversion_p (TREE_TYPE (stmt),
1232 					   TREE_TYPE (TREE_OPERAND (stmt, 2))))
1233 	  {
1234 	    TREE_OPERAND (stmt, 2)
1235 	      = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1236 	    gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1237 						   type_right));
1238 	  }
1239       }
1240       break;
1241 
1242     case BIND_EXPR:
1243       if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1244 	{
1245 	  tree decl;
1246 	  for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1247 	    if (VAR_P (decl)
1248 		&& !DECL_EXTERNAL (decl)
1249 		&& omp_var_to_track (decl))
1250 	      {
1251 		splay_tree_node n
1252 		  = splay_tree_lookup (wtd->omp_ctx->variables,
1253 				       (splay_tree_key) decl);
1254 		if (n == NULL)
1255 		  splay_tree_insert (wtd->omp_ctx->variables,
1256 				     (splay_tree_key) decl,
1257 				     TREE_STATIC (decl)
1258 				     ? OMP_CLAUSE_DEFAULT_SHARED
1259 				     : OMP_CLAUSE_DEFAULT_PRIVATE);
1260 	      }
1261 	}
1262       if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1263 	{
1264 	  /* The point here is to not sanitize static initializers.  */
1265 	  bool no_sanitize_p = wtd->no_sanitize_p;
1266 	  wtd->no_sanitize_p = true;
1267 	  for (tree decl = BIND_EXPR_VARS (stmt);
1268 	       decl;
1269 	       decl = DECL_CHAIN (decl))
1270 	    if (VAR_P (decl)
1271 		&& TREE_STATIC (decl)
1272 		&& DECL_INITIAL (decl))
1273 	      cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1274 	  wtd->no_sanitize_p = no_sanitize_p;
1275 	}
1276       wtd->bind_expr_stack.safe_push (stmt);
1277       cp_walk_tree (&BIND_EXPR_BODY (stmt),
1278 		    cp_genericize_r, data, NULL);
1279       wtd->bind_expr_stack.pop ();
1280       break;
1281 
1282     case USING_STMT:
1283       {
1284 	tree block = NULL_TREE;
1285 
1286 	/* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1287 	   BLOCK, and append an IMPORTED_DECL to its
1288 	   BLOCK_VARS chained list.  */
1289 	if (wtd->bind_expr_stack.exists ())
1290 	  {
1291 	    int i;
1292 	    for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1293 	      if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1294 		break;
1295 	  }
1296 	if (block)
1297 	  {
1298 	    tree decl = TREE_OPERAND (stmt, 0);
1299 	    gcc_assert (decl);
1300 
1301 	    if (undeduced_auto_decl (decl))
1302 	      /* Omit from the GENERIC, the back-end can't handle it.  */;
1303 	    else
1304 	      {
1305 		tree using_directive = make_node (IMPORTED_DECL);
1306 		TREE_TYPE (using_directive) = void_type_node;
1307 
1308 		IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1309 		DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1310 		BLOCK_VARS (block) = using_directive;
1311 	      }
1312 	  }
1313 	/* The USING_STMT won't appear in GENERIC.  */
1314 	*stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1315 	*walk_subtrees = 0;
1316       }
1317       break;
1318 
1319     case DECL_EXPR:
1320       if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1321 	{
1322 	  /* Using decls inside DECL_EXPRs are just dropped on the floor.  */
1323 	  *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1324 	  *walk_subtrees = 0;
1325 	}
1326       else
1327 	{
1328 	  tree d = DECL_EXPR_DECL (stmt);
1329 	  if (VAR_P (d))
1330 	    gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1331 	}
1332       break;
1333 
1334     case OMP_PARALLEL:
1335     case OMP_TASK:
1336     case OMP_TASKLOOP:
1337       {
1338 	struct cp_genericize_omp_taskreg omp_ctx;
1339 	tree c, decl;
1340 	splay_tree_node n;
1341 
1342 	*walk_subtrees = 0;
1343 	cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1344 	omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1345 	omp_ctx.default_shared = omp_ctx.is_parallel;
1346 	omp_ctx.outer = wtd->omp_ctx;
1347 	omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1348 	wtd->omp_ctx = &omp_ctx;
1349 	for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1350 	  switch (OMP_CLAUSE_CODE (c))
1351 	    {
1352 	    case OMP_CLAUSE_SHARED:
1353 	    case OMP_CLAUSE_PRIVATE:
1354 	    case OMP_CLAUSE_FIRSTPRIVATE:
1355 	    case OMP_CLAUSE_LASTPRIVATE:
1356 	      decl = OMP_CLAUSE_DECL (c);
1357 	      if (decl == error_mark_node || !omp_var_to_track (decl))
1358 		break;
1359 	      n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1360 	      if (n != NULL)
1361 		break;
1362 	      splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1363 				 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1364 				 ? OMP_CLAUSE_DEFAULT_SHARED
1365 				 : OMP_CLAUSE_DEFAULT_PRIVATE);
1366 	      if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1367 		omp_cxx_notice_variable (omp_ctx.outer, decl);
1368 	      break;
1369 	    case OMP_CLAUSE_DEFAULT:
1370 	      if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1371 		omp_ctx.default_shared = true;
1372 	    default:
1373 	      break;
1374 	    }
1375 	if (TREE_CODE (stmt) == OMP_TASKLOOP)
1376 	  genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1377 	else
1378 	  cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1379 	wtd->omp_ctx = omp_ctx.outer;
1380 	splay_tree_delete (omp_ctx.variables);
1381       }
1382       break;
1383 
1384     case TRY_BLOCK:
1385       {
1386         *walk_subtrees = 0;
1387         tree try_block = wtd->try_block;
1388         wtd->try_block = stmt;
1389         cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1390         wtd->try_block = try_block;
1391         cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1392       }
1393       break;
1394 
1395     case MUST_NOT_THROW_EXPR:
1396       /* MUST_NOT_THROW_COND might be something else with TM.  */
1397       if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1398 	{
1399 	  *walk_subtrees = 0;
1400 	  tree try_block = wtd->try_block;
1401 	  wtd->try_block = stmt;
1402 	  cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1403 	  wtd->try_block = try_block;
1404 	}
1405       break;
1406 
1407     case THROW_EXPR:
1408       {
1409 	location_t loc = location_of (stmt);
1410 	if (TREE_NO_WARNING (stmt))
1411 	  /* Never mind.  */;
1412 	else if (wtd->try_block)
1413 	  {
1414 	    if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR
1415 		&& warning_at (loc, OPT_Wterminate,
1416 			       "throw will always call terminate()")
1417 		&& cxx_dialect >= cxx11
1418 		&& DECL_DESTRUCTOR_P (current_function_decl))
1419 	      inform (loc, "in C++11 destructors default to noexcept");
1420 	  }
1421 	else
1422 	  {
1423 	    if (warn_cxx11_compat && cxx_dialect < cxx11
1424 		&& DECL_DESTRUCTOR_P (current_function_decl)
1425 		&& (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1426 		    == NULL_TREE)
1427 		&& (get_defaulted_eh_spec (current_function_decl)
1428 		    == empty_except_spec))
1429 	      warning_at (loc, OPT_Wc__11_compat,
1430 			  "in C++11 this throw will terminate because "
1431 			  "destructors default to noexcept");
1432 	  }
1433       }
1434       break;
1435 
1436     case CONVERT_EXPR:
1437       gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1438       break;
1439 
1440     case FOR_STMT:
1441       genericize_for_stmt (stmt_p, walk_subtrees, data);
1442       break;
1443 
1444     case WHILE_STMT:
1445       genericize_while_stmt (stmt_p, walk_subtrees, data);
1446       break;
1447 
1448     case DO_STMT:
1449       genericize_do_stmt (stmt_p, walk_subtrees, data);
1450       break;
1451 
1452     case SWITCH_STMT:
1453       genericize_switch_stmt (stmt_p, walk_subtrees, data);
1454       break;
1455 
1456     case CONTINUE_STMT:
1457       genericize_continue_stmt (stmt_p);
1458       break;
1459 
1460     case BREAK_STMT:
1461       genericize_break_stmt (stmt_p);
1462       break;
1463 
1464     case OMP_FOR:
1465     case OMP_SIMD:
1466     case OMP_DISTRIBUTE:
1467     case OACC_LOOP:
1468       genericize_omp_for_stmt (stmt_p, walk_subtrees, data);
1469       break;
1470 
1471     case PTRMEM_CST:
1472       /* By the time we get here we're handing off to the back end, so we don't
1473 	 need or want to preserve PTRMEM_CST anymore.  */
1474       *stmt_p = cplus_expand_constant (stmt);
1475       *walk_subtrees = 0;
1476       break;
1477 
1478     case MEM_REF:
1479       /* For MEM_REF, make sure not to sanitize the second operand even
1480 	 if it has reference type.  It is just an offset with a type
1481 	 holding other information.  There is no other processing we
1482 	 need to do for INTEGER_CSTs, so just ignore the second argument
1483 	 unconditionally.  */
1484       cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1485       *walk_subtrees = 0;
1486       break;
1487 
1488     case NOP_EXPR:
1489       if (!wtd->no_sanitize_p
1490 	  && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1491 	  && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE)
1492 	ubsan_maybe_instrument_reference (stmt_p);
1493       break;
1494 
1495     case CALL_EXPR:
1496       if (!wtd->no_sanitize_p
1497 	  && sanitize_flags_p ((SANITIZE_NULL
1498 				| SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1499 	{
1500 	  tree fn = CALL_EXPR_FN (stmt);
1501 	  if (fn != NULL_TREE
1502 	      && !error_operand_p (fn)
1503 	      && POINTER_TYPE_P (TREE_TYPE (fn))
1504 	      && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1505 	    {
1506 	      bool is_ctor
1507 		= TREE_CODE (fn) == ADDR_EXPR
1508 		  && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1509 		  && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1510 	      if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1511 		ubsan_maybe_instrument_member_call (stmt, is_ctor);
1512 	      if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1513 		cp_ubsan_maybe_instrument_member_call (stmt);
1514 	    }
1515 	  else if (fn == NULL_TREE
1516 		   && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1517 		   && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
1518 		   && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (stmt, 0)))
1519 		       == REFERENCE_TYPE))
1520 	    *walk_subtrees = 0;
1521 	}
1522       /* Fall through.  */
1523     case AGGR_INIT_EXPR:
1524       /* For calls to a multi-versioned function, overload resolution
1525 	 returns the function with the highest target priority, that is,
1526 	 the version that will checked for dispatching first.  If this
1527 	 version is inlinable, a direct call to this version can be made
1528 	 otherwise the call should go through the dispatcher.  */
1529       {
1530 	tree fn = cp_get_callee_fndecl_nofold (stmt);
1531 	if (fn && DECL_FUNCTION_VERSIONED (fn)
1532 	    && (current_function_decl == NULL
1533 		|| !targetm.target_option.can_inline_p (current_function_decl,
1534 							fn)))
1535 	  if (tree dis = get_function_version_dispatcher (fn))
1536 	    {
1537 	      mark_versions_used (dis);
1538 	      dis = build_address (dis);
1539 	      if (TREE_CODE (stmt) == CALL_EXPR)
1540 		CALL_EXPR_FN (stmt) = dis;
1541 	      else
1542 		AGGR_INIT_EXPR_FN (stmt) = dis;
1543 	    }
1544       }
1545       break;
1546 
1547     case TARGET_EXPR:
1548       if (TARGET_EXPR_INITIAL (stmt)
1549 	  && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1550 	  && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1551 	TARGET_EXPR_NO_ELIDE (stmt) = 1;
1552       break;
1553 
1554     default:
1555       if (IS_TYPE_OR_DECL_P (stmt))
1556 	*walk_subtrees = 0;
1557       break;
1558     }
1559 
1560   p_set->add (*stmt_p);
1561 
1562   return NULL;
1563 }
1564 
1565 /* Lower C++ front end trees to GENERIC in T_P.  */
1566 
1567 static void
1568 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1569 {
1570   struct cp_genericize_data wtd;
1571 
1572   wtd.p_set = new hash_set<tree>;
1573   wtd.bind_expr_stack.create (0);
1574   wtd.omp_ctx = NULL;
1575   wtd.try_block = NULL_TREE;
1576   wtd.no_sanitize_p = false;
1577   wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1578   cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1579   delete wtd.p_set;
1580   wtd.bind_expr_stack.release ();
1581   if (sanitize_flags_p (SANITIZE_VPTR))
1582     cp_ubsan_instrument_member_accesses (t_p);
1583 }
1584 
1585 /* If a function that should end with a return in non-void
1586    function doesn't obviously end with return, add ubsan
1587    instrumentation code to verify it at runtime.  If -fsanitize=return
1588    is not enabled, instrument __builtin_unreachable.  */
1589 
1590 static void
1591 cp_maybe_instrument_return (tree fndecl)
1592 {
1593   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1594       || DECL_CONSTRUCTOR_P (fndecl)
1595       || DECL_DESTRUCTOR_P (fndecl)
1596       || !targetm.warn_func_return (fndecl))
1597     return;
1598 
1599   if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1600       /* Don't add __builtin_unreachable () if not optimizing, it will not
1601 	 improve any optimizations in that case, just break UB code.
1602 	 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1603 	 UBSan covers this with ubsan_instrument_return above where sufficient
1604 	 information is provided, while the __builtin_unreachable () below
1605 	 if return sanitization is disabled will just result in hard to
1606 	 understand runtime error without location.  */
1607       && (!optimize
1608 	  || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1609     return;
1610 
1611   tree t = DECL_SAVED_TREE (fndecl);
1612   while (t)
1613     {
1614       switch (TREE_CODE (t))
1615 	{
1616 	case BIND_EXPR:
1617 	  t = BIND_EXPR_BODY (t);
1618 	  continue;
1619 	case TRY_FINALLY_EXPR:
1620 	case CLEANUP_POINT_EXPR:
1621 	  t = TREE_OPERAND (t, 0);
1622 	  continue;
1623 	case STATEMENT_LIST:
1624 	  {
1625 	    tree_stmt_iterator i = tsi_last (t);
1626 	    while (!tsi_end_p (i))
1627 	      {
1628 		tree p = tsi_stmt (i);
1629 		if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1630 		  break;
1631 		tsi_prev (&i);
1632 	      }
1633 	    if (!tsi_end_p (i))
1634 	      {
1635 		t = tsi_stmt (i);
1636 		continue;
1637 	      }
1638 	  }
1639 	  break;
1640 	case RETURN_EXPR:
1641 	  return;
1642 	default:
1643 	  break;
1644 	}
1645       break;
1646     }
1647   if (t == NULL_TREE)
1648     return;
1649   tree *p = &DECL_SAVED_TREE (fndecl);
1650   if (TREE_CODE (*p) == BIND_EXPR)
1651     p = &BIND_EXPR_BODY (*p);
1652 
1653   location_t loc = DECL_SOURCE_LOCATION (fndecl);
1654   if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1655     t = ubsan_instrument_return (loc);
1656   else
1657     {
1658       tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1659       t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1660     }
1661 
1662   append_to_statement_list (t, p);
1663 }
1664 
1665 void
1666 cp_genericize (tree fndecl)
1667 {
1668   tree t;
1669 
1670   /* Fix up the types of parms passed by invisible reference.  */
1671   for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1672     if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1673       {
1674 	/* If a function's arguments are copied to create a thunk,
1675 	   then DECL_BY_REFERENCE will be set -- but the type of the
1676 	   argument will be a pointer type, so we will never get
1677 	   here.  */
1678 	gcc_assert (!DECL_BY_REFERENCE (t));
1679 	gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1680 	TREE_TYPE (t) = DECL_ARG_TYPE (t);
1681 	DECL_BY_REFERENCE (t) = 1;
1682 	TREE_ADDRESSABLE (t) = 0;
1683 	relayout_decl (t);
1684       }
1685 
1686   /* Do the same for the return value.  */
1687   if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1688     {
1689       t = DECL_RESULT (fndecl);
1690       TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1691       DECL_BY_REFERENCE (t) = 1;
1692       TREE_ADDRESSABLE (t) = 0;
1693       relayout_decl (t);
1694       if (DECL_NAME (t))
1695 	{
1696 	  /* Adjust DECL_VALUE_EXPR of the original var.  */
1697 	  tree outer = outer_curly_brace_block (current_function_decl);
1698 	  tree var;
1699 
1700 	  if (outer)
1701 	    for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1702 	      if (VAR_P (var)
1703 		  && DECL_NAME (t) == DECL_NAME (var)
1704 		  && DECL_HAS_VALUE_EXPR_P (var)
1705 		  && DECL_VALUE_EXPR (var) == t)
1706 		{
1707 		  tree val = convert_from_reference (t);
1708 		  SET_DECL_VALUE_EXPR (var, val);
1709 		  break;
1710 		}
1711 	}
1712     }
1713 
1714   /* If we're a clone, the body is already GIMPLE.  */
1715   if (DECL_CLONED_FUNCTION_P (fndecl))
1716     return;
1717 
1718   /* Allow cp_genericize calls to be nested.  */
1719   tree save_bc_label[2];
1720   save_bc_label[bc_break] = bc_label[bc_break];
1721   save_bc_label[bc_continue] = bc_label[bc_continue];
1722   bc_label[bc_break] = NULL_TREE;
1723   bc_label[bc_continue] = NULL_TREE;
1724 
1725   /* We do want to see every occurrence of the parms, so we can't just use
1726      walk_tree's hash functionality.  */
1727   cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1728 
1729   cp_maybe_instrument_return (fndecl);
1730 
1731   /* Do everything else.  */
1732   c_genericize (fndecl);
1733 
1734   gcc_assert (bc_label[bc_break] == NULL);
1735   gcc_assert (bc_label[bc_continue] == NULL);
1736   bc_label[bc_break] = save_bc_label[bc_break];
1737   bc_label[bc_continue] = save_bc_label[bc_continue];
1738 }
1739 
1740 /* Build code to apply FN to each member of ARG1 and ARG2.  FN may be
1741    NULL if there is in fact nothing to do.  ARG2 may be null if FN
1742    actually only takes one argument.  */
1743 
1744 static tree
1745 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1746 {
1747   tree defparm, parm, t;
1748   int i = 0;
1749   int nargs;
1750   tree *argarray;
1751 
1752   if (fn == NULL)
1753     return NULL;
1754 
1755   nargs = list_length (DECL_ARGUMENTS (fn));
1756   argarray = XALLOCAVEC (tree, nargs);
1757 
1758   defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1759   if (arg2)
1760     defparm = TREE_CHAIN (defparm);
1761 
1762   bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
1763   if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1764     {
1765       tree inner_type = TREE_TYPE (arg1);
1766       tree start1, end1, p1;
1767       tree start2 = NULL, p2 = NULL;
1768       tree ret = NULL, lab;
1769 
1770       start1 = arg1;
1771       start2 = arg2;
1772       do
1773 	{
1774 	  inner_type = TREE_TYPE (inner_type);
1775 	  start1 = build4 (ARRAY_REF, inner_type, start1,
1776 			   size_zero_node, NULL, NULL);
1777 	  if (arg2)
1778 	    start2 = build4 (ARRAY_REF, inner_type, start2,
1779 			     size_zero_node, NULL, NULL);
1780 	}
1781       while (TREE_CODE (inner_type) == ARRAY_TYPE);
1782       start1 = build_fold_addr_expr_loc (input_location, start1);
1783       if (arg2)
1784 	start2 = build_fold_addr_expr_loc (input_location, start2);
1785 
1786       end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1787       end1 = fold_build_pointer_plus (start1, end1);
1788 
1789       p1 = create_tmp_var (TREE_TYPE (start1));
1790       t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1791       append_to_statement_list (t, &ret);
1792 
1793       if (arg2)
1794 	{
1795 	  p2 = create_tmp_var (TREE_TYPE (start2));
1796 	  t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1797 	  append_to_statement_list (t, &ret);
1798 	}
1799 
1800       lab = create_artificial_label (input_location);
1801       t = build1 (LABEL_EXPR, void_type_node, lab);
1802       append_to_statement_list (t, &ret);
1803 
1804       argarray[i++] = p1;
1805       if (arg2)
1806 	argarray[i++] = p2;
1807       /* Handle default arguments.  */
1808       for (parm = defparm; parm && parm != void_list_node;
1809 	   parm = TREE_CHAIN (parm), i++)
1810 	argarray[i] = convert_default_arg (TREE_VALUE (parm),
1811 					   TREE_PURPOSE (parm), fn,
1812 					   i - is_method, tf_warning_or_error);
1813       t = build_call_a (fn, i, argarray);
1814       t = fold_convert (void_type_node, t);
1815       t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1816       append_to_statement_list (t, &ret);
1817 
1818       t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1819       t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1820       append_to_statement_list (t, &ret);
1821 
1822       if (arg2)
1823 	{
1824 	  t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1825 	  t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1826 	  append_to_statement_list (t, &ret);
1827 	}
1828 
1829       t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1830       t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1831       append_to_statement_list (t, &ret);
1832 
1833       return ret;
1834     }
1835   else
1836     {
1837       argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1838       if (arg2)
1839 	argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1840       /* Handle default arguments.  */
1841       for (parm = defparm; parm && parm != void_list_node;
1842 	   parm = TREE_CHAIN (parm), i++)
1843 	argarray[i] = convert_default_arg (TREE_VALUE (parm),
1844 					   TREE_PURPOSE (parm), fn,
1845 					   i - is_method, tf_warning_or_error);
1846       t = build_call_a (fn, i, argarray);
1847       t = fold_convert (void_type_node, t);
1848       return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1849     }
1850 }
1851 
1852 /* Return code to initialize DECL with its default constructor, or
1853    NULL if there's nothing to do.  */
1854 
1855 tree
1856 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
1857 {
1858   tree info = CP_OMP_CLAUSE_INFO (clause);
1859   tree ret = NULL;
1860 
1861   if (info)
1862     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1863 
1864   return ret;
1865 }
1866 
1867 /* Return code to initialize DST with a copy constructor from SRC.  */
1868 
1869 tree
1870 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1871 {
1872   tree info = CP_OMP_CLAUSE_INFO (clause);
1873   tree ret = NULL;
1874 
1875   if (info)
1876     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1877   if (ret == NULL)
1878     ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1879 
1880   return ret;
1881 }
1882 
1883 /* Similarly, except use an assignment operator instead.  */
1884 
1885 tree
1886 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1887 {
1888   tree info = CP_OMP_CLAUSE_INFO (clause);
1889   tree ret = NULL;
1890 
1891   if (info)
1892     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1893   if (ret == NULL)
1894     ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1895 
1896   return ret;
1897 }
1898 
1899 /* Return code to destroy DECL.  */
1900 
1901 tree
1902 cxx_omp_clause_dtor (tree clause, tree decl)
1903 {
1904   tree info = CP_OMP_CLAUSE_INFO (clause);
1905   tree ret = NULL;
1906 
1907   if (info)
1908     ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1909 
1910   return ret;
1911 }
1912 
1913 /* True if OpenMP should privatize what this DECL points to rather
1914    than the DECL itself.  */
1915 
1916 bool
1917 cxx_omp_privatize_by_reference (const_tree decl)
1918 {
1919   return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
1920 	  || is_invisiref_parm (decl));
1921 }
1922 
1923 /* Return true if DECL is const qualified var having no mutable member.  */
1924 bool
1925 cxx_omp_const_qual_no_mutable (tree decl)
1926 {
1927   tree type = TREE_TYPE (decl);
1928   if (TREE_CODE (type) == REFERENCE_TYPE)
1929     {
1930       if (!is_invisiref_parm (decl))
1931 	return false;
1932       type = TREE_TYPE (type);
1933 
1934       if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1935 	{
1936 	  /* NVR doesn't preserve const qualification of the
1937 	     variable's type.  */
1938 	  tree outer = outer_curly_brace_block (current_function_decl);
1939 	  tree var;
1940 
1941 	  if (outer)
1942 	    for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1943 	      if (VAR_P (var)
1944 		  && DECL_NAME (decl) == DECL_NAME (var)
1945 		  && (TYPE_MAIN_VARIANT (type)
1946 		      == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1947 		{
1948 		  if (TYPE_READONLY (TREE_TYPE (var)))
1949 		    type = TREE_TYPE (var);
1950 		  break;
1951 		}
1952 	}
1953     }
1954 
1955   if (type == error_mark_node)
1956     return false;
1957 
1958   /* Variables with const-qualified type having no mutable member
1959      are predetermined shared.  */
1960   if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1961     return true;
1962 
1963   return false;
1964 }
1965 
1966 /* True if OpenMP sharing attribute of DECL is predetermined.  */
1967 
1968 enum omp_clause_default_kind
1969 cxx_omp_predetermined_sharing_1 (tree decl)
1970 {
1971   /* Static data members are predetermined shared.  */
1972   if (TREE_STATIC (decl))
1973     {
1974       tree ctx = CP_DECL_CONTEXT (decl);
1975       if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1976 	return OMP_CLAUSE_DEFAULT_SHARED;
1977     }
1978 
1979   /* Const qualified vars having no mutable member are predetermined
1980      shared.  */
1981   if (cxx_omp_const_qual_no_mutable (decl))
1982     return OMP_CLAUSE_DEFAULT_SHARED;
1983 
1984   return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1985 }
1986 
1987 /* Likewise, but also include the artificial vars.  We don't want to
1988    disallow the artificial vars being mentioned in explicit clauses,
1989    as we use artificial vars e.g. for loop constructs with random
1990    access iterators other than pointers, but during gimplification
1991    we want to treat them as predetermined.  */
1992 
1993 enum omp_clause_default_kind
1994 cxx_omp_predetermined_sharing (tree decl)
1995 {
1996   enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
1997   if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
1998     return ret;
1999 
2000   /* Predetermine artificial variables holding integral values, those
2001      are usually result of gimplify_one_sizepos or SAVE_EXPR
2002      gimplification.  */
2003   if (VAR_P (decl)
2004       && DECL_ARTIFICIAL (decl)
2005       && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2006       && !(DECL_LANG_SPECIFIC (decl)
2007 	   && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2008     return OMP_CLAUSE_DEFAULT_SHARED;
2009 
2010   return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2011 }
2012 
2013 /* Finalize an implicitly determined clause.  */
2014 
2015 void
2016 cxx_omp_finish_clause (tree c, gimple_seq *)
2017 {
2018   tree decl, inner_type;
2019   bool make_shared = false;
2020 
2021   if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2022     return;
2023 
2024   decl = OMP_CLAUSE_DECL (c);
2025   decl = require_complete_type (decl);
2026   inner_type = TREE_TYPE (decl);
2027   if (decl == error_mark_node)
2028     make_shared = true;
2029   else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
2030     inner_type = TREE_TYPE (inner_type);
2031 
2032   /* We're interested in the base element, not arrays.  */
2033   while (TREE_CODE (inner_type) == ARRAY_TYPE)
2034     inner_type = TREE_TYPE (inner_type);
2035 
2036   /* Check for special function availability by building a call to one.
2037      Save the results, because later we won't be in the right context
2038      for making these queries.  */
2039   if (!make_shared
2040       && CLASS_TYPE_P (inner_type)
2041       && cxx_omp_create_clause_info (c, inner_type, false, true, false, true))
2042     make_shared = true;
2043 
2044   if (make_shared)
2045     {
2046       OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2047       OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2048       OMP_CLAUSE_SHARED_READONLY (c) = 0;
2049     }
2050 }
2051 
2052 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2053    disregarded in OpenMP construct, because it is going to be
2054    remapped during OpenMP lowering.  SHARED is true if DECL
2055    is going to be shared, false if it is going to be privatized.  */
2056 
2057 bool
2058 cxx_omp_disregard_value_expr (tree decl, bool shared)
2059 {
2060   return !shared
2061 	 && VAR_P (decl)
2062 	 && DECL_HAS_VALUE_EXPR_P (decl)
2063 	 && DECL_ARTIFICIAL (decl)
2064 	 && DECL_LANG_SPECIFIC (decl)
2065 	 && DECL_OMP_PRIVATIZED_MEMBER (decl);
2066 }
2067 
2068 /* Fold expression X which is used as an rvalue if RVAL is true.  */
2069 
2070 static tree
2071 cp_fold_maybe_rvalue (tree x, bool rval)
2072 {
2073   while (true)
2074     {
2075       x = cp_fold (x);
2076       if (rval && DECL_P (x)
2077 	  && TREE_CODE (TREE_TYPE (x)) != REFERENCE_TYPE)
2078 	{
2079 	  tree v = decl_constant_value (x);
2080 	  if (v != x && v != error_mark_node)
2081 	    {
2082 	      x = v;
2083 	      continue;
2084 	    }
2085 	}
2086       break;
2087     }
2088   return x;
2089 }
2090 
2091 /* Fold expression X which is used as an rvalue.  */
2092 
2093 static tree
2094 cp_fold_rvalue (tree x)
2095 {
2096   return cp_fold_maybe_rvalue (x, true);
2097 }
2098 
2099 /* Perform folding on expression X.  */
2100 
2101 tree
2102 cp_fully_fold (tree x)
2103 {
2104   if (processing_template_decl)
2105     return x;
2106   /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2107      have to call both.  */
2108   if (cxx_dialect >= cxx11)
2109     {
2110       x = maybe_constant_value (x);
2111       /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2112 	 a TARGET_EXPR; undo that here.  */
2113       if (TREE_CODE (x) == TARGET_EXPR)
2114 	x = TARGET_EXPR_INITIAL (x);
2115       else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2116 	       && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2117 	       && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2118 	x = TREE_OPERAND (x, 0);
2119     }
2120   return cp_fold_rvalue (x);
2121 }
2122 
2123 /* c-common interface to cp_fold.  If IN_INIT, this is in a static initializer
2124    and certain changes are made to the folding done.  Or should be (FIXME).  We
2125    never touch maybe_const, as it is only used for the C front-end
2126    C_MAYBE_CONST_EXPR.  */
2127 
2128 tree
2129 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2130 {
2131   return cp_fold_maybe_rvalue (x, !lval);
2132 }
2133 
2134 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2135 
2136 /* Dispose of the whole FOLD_CACHE.  */
2137 
2138 void
2139 clear_fold_cache (void)
2140 {
2141   if (fold_cache != NULL)
2142     fold_cache->empty ();
2143 }
2144 
2145 /*  This function tries to fold an expression X.
2146     To avoid combinatorial explosion, folding results are kept in fold_cache.
2147     If X is invalid, we don't fold at all.
2148     For performance reasons we don't cache expressions representing a
2149     declaration or constant.
2150     Function returns X or its folded variant.  */
2151 
2152 static tree
2153 cp_fold (tree x)
2154 {
2155   tree op0, op1, op2, op3;
2156   tree org_x = x, r = NULL_TREE;
2157   enum tree_code code;
2158   location_t loc;
2159   bool rval_ops = true;
2160 
2161   if (!x || x == error_mark_node)
2162     return x;
2163 
2164   if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
2165     return x;
2166 
2167   /* Don't bother to cache DECLs or constants.  */
2168   if (DECL_P (x) || CONSTANT_CLASS_P (x))
2169     return x;
2170 
2171   if (fold_cache == NULL)
2172     fold_cache = hash_map<tree, tree>::create_ggc (101);
2173 
2174   if (tree *cached = fold_cache->get (x))
2175     return *cached;
2176 
2177   code = TREE_CODE (x);
2178   switch (code)
2179     {
2180     case CLEANUP_POINT_EXPR:
2181       /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2182 	 effects.  */
2183       r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2184       if (!TREE_SIDE_EFFECTS (r))
2185 	x = r;
2186       break;
2187 
2188     case SIZEOF_EXPR:
2189       x = fold_sizeof_expr (x);
2190       break;
2191 
2192     case VIEW_CONVERT_EXPR:
2193       rval_ops = false;
2194       /* FALLTHRU */
2195     case CONVERT_EXPR:
2196     case NOP_EXPR:
2197     case NON_LVALUE_EXPR:
2198 
2199       if (VOID_TYPE_P (TREE_TYPE (x)))
2200 	{
2201 	  /* This is just to make sure we don't end up with casts to
2202 	     void from error_mark_node.  If we just return x, then
2203 	     cp_fold_r might fold the operand into error_mark_node and
2204 	     leave the conversion in the IR.  STRIP_USELESS_TYPE_CONVERSION
2205 	     during gimplification doesn't like such casts.
2206 	     Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2207 	     folding of the operand should be in the caches and if in cp_fold_r
2208 	     it will modify it in place.  */
2209 	  op0 = cp_fold (TREE_OPERAND (x, 0));
2210 	  if (op0 == error_mark_node)
2211 	    x = error_mark_node;
2212 	  break;
2213 	}
2214 
2215       loc = EXPR_LOCATION (x);
2216       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2217 
2218       if (code == CONVERT_EXPR
2219 	  && SCALAR_TYPE_P (TREE_TYPE (x))
2220 	  && op0 != void_node)
2221 	/* During parsing we used convert_to_*_nofold; re-convert now using the
2222 	   folding variants, since fold() doesn't do those transformations.  */
2223 	x = fold (convert (TREE_TYPE (x), op0));
2224       else if (op0 != TREE_OPERAND (x, 0))
2225 	{
2226 	  if (op0 == error_mark_node)
2227 	    x = error_mark_node;
2228 	  else
2229 	    x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2230 	}
2231       else
2232 	x = fold (x);
2233 
2234       /* Conversion of an out-of-range value has implementation-defined
2235 	 behavior; the language considers it different from arithmetic
2236 	 overflow, which is undefined.  */
2237       if (TREE_CODE (op0) == INTEGER_CST
2238 	  && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2239 	TREE_OVERFLOW (x) = false;
2240 
2241       break;
2242 
2243     case INDIRECT_REF:
2244       /* We don't need the decltype(auto) obfuscation anymore.  */
2245       if (REF_PARENTHESIZED_P (x))
2246 	{
2247 	  tree p = maybe_undo_parenthesized_ref (x);
2248 	  return cp_fold (p);
2249 	}
2250       goto unary;
2251 
2252     case ADDR_EXPR:
2253       loc = EXPR_LOCATION (x);
2254       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2255 
2256       /* Cope with user tricks that amount to offsetof.  */
2257       if (op0 != error_mark_node
2258 	  && TREE_CODE (TREE_TYPE (op0)) != FUNCTION_TYPE
2259 	  && TREE_CODE (TREE_TYPE (op0)) != METHOD_TYPE)
2260 	{
2261 	  tree val = get_base_address (op0);
2262 	  if (val
2263 	      && INDIRECT_REF_P (val)
2264 	      && COMPLETE_TYPE_P (TREE_TYPE (val))
2265 	      && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2266 	    {
2267 	      val = TREE_OPERAND (val, 0);
2268 	      STRIP_NOPS (val);
2269 	      val = maybe_constant_value (val);
2270 	      if (TREE_CODE (val) == INTEGER_CST)
2271 		return fold_offsetof (op0, TREE_TYPE (x));
2272 	    }
2273 	}
2274       goto finish_unary;
2275 
2276     case REALPART_EXPR:
2277     case IMAGPART_EXPR:
2278       rval_ops = false;
2279       /* FALLTHRU */
2280     case CONJ_EXPR:
2281     case FIX_TRUNC_EXPR:
2282     case FLOAT_EXPR:
2283     case NEGATE_EXPR:
2284     case ABS_EXPR:
2285     case BIT_NOT_EXPR:
2286     case TRUTH_NOT_EXPR:
2287     case FIXED_CONVERT_EXPR:
2288     unary:
2289 
2290       loc = EXPR_LOCATION (x);
2291       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2292 
2293     finish_unary:
2294       if (op0 != TREE_OPERAND (x, 0))
2295 	{
2296 	  if (op0 == error_mark_node)
2297 	    x = error_mark_node;
2298 	  else
2299 	    {
2300 	      x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2301 	      if (code == INDIRECT_REF
2302 		  && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2303 		{
2304 		  TREE_READONLY (x) = TREE_READONLY (org_x);
2305 		  TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2306 		  TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2307 		}
2308 	    }
2309 	}
2310       else
2311 	x = fold (x);
2312 
2313       gcc_assert (TREE_CODE (x) != COND_EXPR
2314 		  || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2315       break;
2316 
2317     case UNARY_PLUS_EXPR:
2318       op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2319       if (op0 == error_mark_node)
2320 	x = error_mark_node;
2321       else
2322 	x = fold_convert (TREE_TYPE (x), op0);
2323       break;
2324 
2325     case POSTDECREMENT_EXPR:
2326     case POSTINCREMENT_EXPR:
2327     case INIT_EXPR:
2328     case PREDECREMENT_EXPR:
2329     case PREINCREMENT_EXPR:
2330     case COMPOUND_EXPR:
2331     case MODIFY_EXPR:
2332       rval_ops = false;
2333       /* FALLTHRU */
2334     case POINTER_PLUS_EXPR:
2335     case PLUS_EXPR:
2336     case POINTER_DIFF_EXPR:
2337     case MINUS_EXPR:
2338     case MULT_EXPR:
2339     case TRUNC_DIV_EXPR:
2340     case CEIL_DIV_EXPR:
2341     case FLOOR_DIV_EXPR:
2342     case ROUND_DIV_EXPR:
2343     case TRUNC_MOD_EXPR:
2344     case CEIL_MOD_EXPR:
2345     case ROUND_MOD_EXPR:
2346     case RDIV_EXPR:
2347     case EXACT_DIV_EXPR:
2348     case MIN_EXPR:
2349     case MAX_EXPR:
2350     case LSHIFT_EXPR:
2351     case RSHIFT_EXPR:
2352     case LROTATE_EXPR:
2353     case RROTATE_EXPR:
2354     case BIT_AND_EXPR:
2355     case BIT_IOR_EXPR:
2356     case BIT_XOR_EXPR:
2357     case TRUTH_AND_EXPR:
2358     case TRUTH_ANDIF_EXPR:
2359     case TRUTH_OR_EXPR:
2360     case TRUTH_ORIF_EXPR:
2361     case TRUTH_XOR_EXPR:
2362     case LT_EXPR: case LE_EXPR:
2363     case GT_EXPR: case GE_EXPR:
2364     case EQ_EXPR: case NE_EXPR:
2365     case UNORDERED_EXPR: case ORDERED_EXPR:
2366     case UNLT_EXPR: case UNLE_EXPR:
2367     case UNGT_EXPR: case UNGE_EXPR:
2368     case UNEQ_EXPR: case LTGT_EXPR:
2369     case RANGE_EXPR: case COMPLEX_EXPR:
2370 
2371       loc = EXPR_LOCATION (x);
2372       op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2373       op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2374 
2375       if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2376 	{
2377 	  if (op0 == error_mark_node || op1 == error_mark_node)
2378 	    x = error_mark_node;
2379 	  else
2380 	    x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2381 	}
2382       else
2383 	x = fold (x);
2384 
2385       if (TREE_NO_WARNING (org_x)
2386 	  && warn_nonnull_compare
2387 	  && COMPARISON_CLASS_P (org_x))
2388 	{
2389 	  if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2390 	    ;
2391 	  else if (COMPARISON_CLASS_P (x))
2392 	    TREE_NO_WARNING (x) = 1;
2393 	  /* Otherwise give up on optimizing these, let GIMPLE folders
2394 	     optimize those later on.  */
2395 	  else if (op0 != TREE_OPERAND (org_x, 0)
2396 		   || op1 != TREE_OPERAND (org_x, 1))
2397 	    {
2398 	      x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2399 	      TREE_NO_WARNING (x) = 1;
2400 	    }
2401 	  else
2402 	    x = org_x;
2403 	}
2404       break;
2405 
2406     case VEC_COND_EXPR:
2407     case COND_EXPR:
2408       loc = EXPR_LOCATION (x);
2409       op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2410       op1 = cp_fold (TREE_OPERAND (x, 1));
2411       op2 = cp_fold (TREE_OPERAND (x, 2));
2412 
2413       if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2414 	{
2415 	  warning_sentinel s (warn_int_in_bool_context);
2416 	  if (!VOID_TYPE_P (TREE_TYPE (op1)))
2417 	    op1 = cp_truthvalue_conversion (op1);
2418 	  if (!VOID_TYPE_P (TREE_TYPE (op2)))
2419 	    op2 = cp_truthvalue_conversion (op2);
2420 	}
2421       else if (VOID_TYPE_P (TREE_TYPE (x)))
2422 	{
2423 	  if (TREE_CODE (op0) == INTEGER_CST)
2424 	    {
2425 	      /* If the condition is constant, fold can fold away
2426 		 the COND_EXPR.  If some statement-level uses of COND_EXPR
2427 		 have one of the branches NULL, avoid folding crash.  */
2428 	      if (!op1)
2429 		op1 = build_empty_stmt (loc);
2430 	      if (!op2)
2431 		op2 = build_empty_stmt (loc);
2432 	    }
2433 	  else
2434 	    {
2435 	      /* Otherwise, don't bother folding a void condition, since
2436 		 it can't produce a constant value.  */
2437 	      if (op0 != TREE_OPERAND (x, 0)
2438 		  || op1 != TREE_OPERAND (x, 1)
2439 		  || op2 != TREE_OPERAND (x, 2))
2440 		x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2441 	      break;
2442 	    }
2443 	}
2444 
2445       if (op0 != TREE_OPERAND (x, 0)
2446 	  || op1 != TREE_OPERAND (x, 1)
2447 	  || op2 != TREE_OPERAND (x, 2))
2448 	{
2449 	  if (op0 == error_mark_node
2450 	      || op1 == error_mark_node
2451 	      || op2 == error_mark_node)
2452 	    x = error_mark_node;
2453 	  else
2454 	    x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2455 	}
2456       else
2457 	x = fold (x);
2458 
2459       /* A COND_EXPR might have incompatible types in branches if one or both
2460 	 arms are bitfields.  If folding exposed such a branch, fix it up.  */
2461       if (TREE_CODE (x) != code
2462 	  && x != error_mark_node
2463 	  && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2464 	x = fold_convert (TREE_TYPE (org_x), x);
2465 
2466       break;
2467 
2468     case CALL_EXPR:
2469       {
2470 	int i, m, sv = optimize, nw = sv, changed = 0;
2471 	tree callee = get_callee_fndecl (x);
2472 
2473 	/* Some built-in function calls will be evaluated at compile-time in
2474 	   fold ().  Set optimize to 1 when folding __builtin_constant_p inside
2475 	   a constexpr function so that fold_builtin_1 doesn't fold it to 0.  */
2476 	if (callee && DECL_BUILT_IN (callee) && !optimize
2477 	    && DECL_IS_BUILTIN_CONSTANT_P (callee)
2478 	    && current_function_decl
2479 	    && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2480 	  nw = 1;
2481 
2482 	x = copy_node (x);
2483 
2484 	m = call_expr_nargs (x);
2485 	for (i = 0; i < m; i++)
2486 	  {
2487 	    r = cp_fold (CALL_EXPR_ARG (x, i));
2488 	    if (r != CALL_EXPR_ARG (x, i))
2489 	      {
2490 		if (r == error_mark_node)
2491 		  {
2492 		    x = error_mark_node;
2493 		    break;
2494 		  }
2495 		changed = 1;
2496 	      }
2497 	    CALL_EXPR_ARG (x, i) = r;
2498 	  }
2499 	if (x == error_mark_node)
2500 	  break;
2501 
2502 	optimize = nw;
2503 	r = fold (x);
2504 	optimize = sv;
2505 
2506 	if (TREE_CODE (r) != CALL_EXPR)
2507 	  {
2508 	    x = cp_fold (r);
2509 	    break;
2510 	  }
2511 
2512 	optimize = nw;
2513 
2514 	/* Invoke maybe_constant_value for functions declared
2515 	   constexpr and not called with AGGR_INIT_EXPRs.
2516 	   TODO:
2517 	   Do constexpr expansion of expressions where the call itself is not
2518 	   constant, but the call followed by an INDIRECT_REF is.  */
2519 	if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2520 	    && !flag_no_inline)
2521 	  r = maybe_constant_value (x);
2522 	optimize = sv;
2523 
2524         if (TREE_CODE (r) != CALL_EXPR)
2525 	  {
2526 	    if (DECL_CONSTRUCTOR_P (callee))
2527 	      {
2528 		loc = EXPR_LOCATION (x);
2529 		tree s = build_fold_indirect_ref_loc (loc,
2530 						      CALL_EXPR_ARG (x, 0));
2531 		r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2532 	      }
2533 	    x = r;
2534 	    break;
2535 	  }
2536 
2537 	if (!changed)
2538 	  x = org_x;
2539 	break;
2540       }
2541 
2542     case CONSTRUCTOR:
2543       {
2544 	unsigned i;
2545 	constructor_elt *p;
2546 	vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2547 	vec<constructor_elt, va_gc> *nelts = NULL;
2548 	FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2549 	  {
2550 	    tree op = cp_fold (p->value);
2551 	    if (op != p->value)
2552 	      {
2553 		if (op == error_mark_node)
2554 		  {
2555 		    x = error_mark_node;
2556 		    vec_free (nelts);
2557 		    break;
2558 		  }
2559 		if (nelts == NULL)
2560 		  nelts = elts->copy ();
2561 		(*nelts)[i].value = op;
2562 	      }
2563 	  }
2564 	if (nelts)
2565 	  {
2566 	    x = build_constructor (TREE_TYPE (x), nelts);
2567 	    CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
2568 	      = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
2569 	  }
2570 	if (VECTOR_TYPE_P (TREE_TYPE (x)))
2571 	  x = fold (x);
2572 	break;
2573       }
2574     case TREE_VEC:
2575       {
2576 	bool changed = false;
2577 	vec<tree, va_gc> *vec = make_tree_vector ();
2578 	int i, n = TREE_VEC_LENGTH (x);
2579 	vec_safe_reserve (vec, n);
2580 
2581 	for (i = 0; i < n; i++)
2582 	  {
2583 	    tree op = cp_fold (TREE_VEC_ELT (x, i));
2584 	    vec->quick_push (op);
2585 	    if (op != TREE_VEC_ELT (x, i))
2586 	      changed = true;
2587 	  }
2588 
2589 	if (changed)
2590 	  {
2591 	    r = copy_node (x);
2592 	    for (i = 0; i < n; i++)
2593 	      TREE_VEC_ELT (r, i) = (*vec)[i];
2594 	    x = r;
2595 	  }
2596 
2597 	release_tree_vector (vec);
2598       }
2599 
2600       break;
2601 
2602     case ARRAY_REF:
2603     case ARRAY_RANGE_REF:
2604 
2605       loc = EXPR_LOCATION (x);
2606       op0 = cp_fold (TREE_OPERAND (x, 0));
2607       op1 = cp_fold (TREE_OPERAND (x, 1));
2608       op2 = cp_fold (TREE_OPERAND (x, 2));
2609       op3 = cp_fold (TREE_OPERAND (x, 3));
2610 
2611       if (op0 != TREE_OPERAND (x, 0)
2612 	  || op1 != TREE_OPERAND (x, 1)
2613 	  || op2 != TREE_OPERAND (x, 2)
2614 	  || op3 != TREE_OPERAND (x, 3))
2615 	{
2616 	  if (op0 == error_mark_node
2617 	      || op1 == error_mark_node
2618 	      || op2 == error_mark_node
2619 	      || op3 == error_mark_node)
2620 	    x = error_mark_node;
2621 	  else
2622 	    {
2623 	      x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
2624 	      TREE_READONLY (x) = TREE_READONLY (org_x);
2625 	      TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2626 	      TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2627 	    }
2628 	}
2629 
2630       x = fold (x);
2631       break;
2632 
2633     case SAVE_EXPR:
2634       /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
2635 	 folding, evaluates to an invariant.  In that case no need to wrap
2636 	 this folded tree with a SAVE_EXPR.  */
2637       r = cp_fold (TREE_OPERAND (x, 0));
2638       if (tree_invariant_p (r))
2639 	x = r;
2640       break;
2641 
2642     default:
2643       return org_x;
2644     }
2645 
2646   fold_cache->put (org_x, x);
2647   /* Prevent that we try to fold an already folded result again.  */
2648   if (x != org_x)
2649     fold_cache->put (x, x);
2650 
2651   return x;
2652 }
2653 
2654 #include "gt-cp-cp-gimplify.h"
2655