1 /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c.
2
3 Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
5 Contributed by Jason Merrill <jason@redhat.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "cp-tree.h"
29 #include "c-family/c-common.h"
30 #include "tree-iterator.h"
31 #include "gimple.h"
32 #include "hashtab.h"
33 #include "pointer-set.h"
34 #include "flags.h"
35 #include "splay-tree.h"
36
37 /* Local declarations. */
38
39 enum bc_t { bc_break = 0, bc_continue = 1 };
40
41 /* Stack of labels which are targets for "break" or "continue",
42 linked through TREE_CHAIN. */
43 static tree bc_label[2];
44
45 /* Begin a scope which can be exited by a break or continue statement. BC
46 indicates which.
47
48 Just creates a label and pushes it into the current context. */
49
50 static tree
begin_bc_block(enum bc_t bc)51 begin_bc_block (enum bc_t bc)
52 {
53 tree label = create_artificial_label (input_location);
54 DECL_CHAIN (label) = bc_label[bc];
55 bc_label[bc] = label;
56 return label;
57 }
58
59 /* Finish a scope which can be exited by a break or continue statement.
60 LABEL was returned from the most recent call to begin_bc_block. BODY is
61 an expression for the contents of the scope.
62
63 If we saw a break (or continue) in the scope, append a LABEL_EXPR to
64 body. Otherwise, just forget the label. */
65
66 static gimple_seq
finish_bc_block(enum bc_t bc,tree label,gimple_seq body)67 finish_bc_block (enum bc_t bc, tree label, gimple_seq body)
68 {
69 gcc_assert (label == bc_label[bc]);
70
71 if (TREE_USED (label))
72 {
73 gimple_seq_add_stmt (&body, gimple_build_label (label));
74 }
75
76 bc_label[bc] = DECL_CHAIN (label);
77 DECL_CHAIN (label) = NULL_TREE;
78 return body;
79 }
80
81 /* Get the LABEL_EXPR to represent a break or continue statement
82 in the current block scope. BC indicates which. */
83
84 static tree
get_bc_label(enum bc_t bc)85 get_bc_label (enum bc_t bc)
86 {
87 tree label = bc_label[bc];
88
89 /* Mark the label used for finish_bc_block. */
90 TREE_USED (label) = 1;
91 return label;
92 }
93
94 /* Genericize a TRY_BLOCK. */
95
96 static void
genericize_try_block(tree * stmt_p)97 genericize_try_block (tree *stmt_p)
98 {
99 tree body = TRY_STMTS (*stmt_p);
100 tree cleanup = TRY_HANDLERS (*stmt_p);
101
102 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
103 }
104
105 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
106
107 static void
genericize_catch_block(tree * stmt_p)108 genericize_catch_block (tree *stmt_p)
109 {
110 tree type = HANDLER_TYPE (*stmt_p);
111 tree body = HANDLER_BODY (*stmt_p);
112
113 /* FIXME should the caught type go in TREE_TYPE? */
114 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
115 }
116
117 /* A terser interface for building a representation of an exception
118 specification. */
119
120 static tree
build_gimple_eh_filter_tree(tree body,tree allowed,tree failure)121 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
122 {
123 tree t;
124
125 /* FIXME should the allowed types go in TREE_TYPE? */
126 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
127 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
128
129 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
130 append_to_statement_list (body, &TREE_OPERAND (t, 0));
131
132 return t;
133 }
134
135 /* Genericize an EH_SPEC_BLOCK by converting it to a
136 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
137
138 static void
genericize_eh_spec_block(tree * stmt_p)139 genericize_eh_spec_block (tree *stmt_p)
140 {
141 tree body = EH_SPEC_STMTS (*stmt_p);
142 tree allowed = EH_SPEC_RAISES (*stmt_p);
143 tree failure = build_call_n (call_unexpected_node, 1, build_exc_ptr ());
144
145 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
146 TREE_NO_WARNING (*stmt_p) = true;
147 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true;
148 }
149
150 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
151
152 static void
genericize_if_stmt(tree * stmt_p)153 genericize_if_stmt (tree *stmt_p)
154 {
155 tree stmt, cond, then_, else_;
156 location_t locus = EXPR_LOCATION (*stmt_p);
157
158 stmt = *stmt_p;
159 cond = IF_COND (stmt);
160 then_ = THEN_CLAUSE (stmt);
161 else_ = ELSE_CLAUSE (stmt);
162
163 if (!then_)
164 then_ = build_empty_stmt (locus);
165 if (!else_)
166 else_ = build_empty_stmt (locus);
167
168 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
169 stmt = then_;
170 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
171 stmt = else_;
172 else
173 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
174 if (CAN_HAVE_LOCATION_P (stmt) && !EXPR_HAS_LOCATION (stmt))
175 SET_EXPR_LOCATION (stmt, locus);
176 *stmt_p = stmt;
177 }
178
179 /* Build a generic representation of one of the C loop forms. COND is the
180 loop condition or NULL_TREE. BODY is the (possibly compound) statement
181 controlled by the loop. INCR is the increment expression of a for-loop,
182 or NULL_TREE. COND_IS_FIRST indicates whether the condition is
183 evaluated before the loop body as in while and for loops, or after the
184 loop body as in do-while loops. */
185
186 static gimple_seq
gimplify_cp_loop(tree cond,tree body,tree incr,bool cond_is_first)187 gimplify_cp_loop (tree cond, tree body, tree incr, bool cond_is_first)
188 {
189 gimple top, entry, stmt;
190 gimple_seq stmt_list, body_seq, incr_seq, exit_seq;
191 tree cont_block, break_block;
192 location_t stmt_locus;
193
194 stmt_locus = input_location;
195 stmt_list = NULL;
196 body_seq = NULL;
197 incr_seq = NULL;
198 exit_seq = NULL;
199 entry = NULL;
200
201 break_block = begin_bc_block (bc_break);
202 cont_block = begin_bc_block (bc_continue);
203
204 /* If condition is zero don't generate a loop construct. */
205 if (cond && integer_zerop (cond))
206 {
207 top = NULL;
208 if (cond_is_first)
209 {
210 stmt = gimple_build_goto (get_bc_label (bc_break));
211 gimple_set_location (stmt, stmt_locus);
212 gimple_seq_add_stmt (&stmt_list, stmt);
213 }
214 }
215 else
216 {
217 /* If we use a LOOP_EXPR here, we have to feed the whole thing
218 back through the main gimplifier to lower it. Given that we
219 have to gimplify the loop body NOW so that we can resolve
220 break/continue stmts, seems easier to just expand to gotos. */
221 top = gimple_build_label (create_artificial_label (stmt_locus));
222
223 /* If we have an exit condition, then we build an IF with gotos either
224 out of the loop, or to the top of it. If there's no exit condition,
225 then we just build a jump back to the top. */
226 if (cond && !integer_nonzerop (cond))
227 {
228 if (cond != error_mark_node)
229 {
230 gimplify_expr (&cond, &exit_seq, NULL, is_gimple_val, fb_rvalue);
231 stmt = gimple_build_cond (NE_EXPR, cond,
232 build_int_cst (TREE_TYPE (cond), 0),
233 gimple_label_label (top),
234 get_bc_label (bc_break));
235 gimple_seq_add_stmt (&exit_seq, stmt);
236 }
237
238 if (cond_is_first)
239 {
240 if (incr)
241 {
242 entry = gimple_build_label
243 (create_artificial_label (stmt_locus));
244 stmt = gimple_build_goto (gimple_label_label (entry));
245 }
246 else
247 stmt = gimple_build_goto (get_bc_label (bc_continue));
248 gimple_set_location (stmt, stmt_locus);
249 gimple_seq_add_stmt (&stmt_list, stmt);
250 }
251 }
252 else
253 {
254 stmt = gimple_build_goto (gimple_label_label (top));
255 gimple_seq_add_stmt (&exit_seq, stmt);
256 }
257 }
258
259 gimplify_stmt (&body, &body_seq);
260 gimplify_stmt (&incr, &incr_seq);
261
262 body_seq = finish_bc_block (bc_continue, cont_block, body_seq);
263
264 gimple_seq_add_stmt (&stmt_list, top);
265 gimple_seq_add_seq (&stmt_list, body_seq);
266 gimple_seq_add_seq (&stmt_list, incr_seq);
267 gimple_seq_add_stmt (&stmt_list, entry);
268 gimple_seq_add_seq (&stmt_list, exit_seq);
269
270 annotate_all_with_location (stmt_list, stmt_locus);
271
272 return finish_bc_block (bc_break, break_block, stmt_list);
273 }
274
275 /* Gimplify a FOR_STMT node. Move the stuff in the for-init-stmt into the
276 prequeue and hand off to gimplify_cp_loop. */
277
278 static void
gimplify_for_stmt(tree * stmt_p,gimple_seq * pre_p)279 gimplify_for_stmt (tree *stmt_p, gimple_seq *pre_p)
280 {
281 tree stmt = *stmt_p;
282
283 if (FOR_INIT_STMT (stmt))
284 gimplify_and_add (FOR_INIT_STMT (stmt), pre_p);
285
286 gimple_seq_add_seq (pre_p,
287 gimplify_cp_loop (FOR_COND (stmt), FOR_BODY (stmt),
288 FOR_EXPR (stmt), 1));
289 *stmt_p = NULL_TREE;
290 }
291
292 /* Gimplify a WHILE_STMT node. */
293
294 static void
gimplify_while_stmt(tree * stmt_p,gimple_seq * pre_p)295 gimplify_while_stmt (tree *stmt_p, gimple_seq *pre_p)
296 {
297 tree stmt = *stmt_p;
298 gimple_seq_add_seq (pre_p,
299 gimplify_cp_loop (WHILE_COND (stmt), WHILE_BODY (stmt),
300 NULL_TREE, 1));
301 *stmt_p = NULL_TREE;
302 }
303
304 /* Gimplify a DO_STMT node. */
305
306 static void
gimplify_do_stmt(tree * stmt_p,gimple_seq * pre_p)307 gimplify_do_stmt (tree *stmt_p, gimple_seq *pre_p)
308 {
309 tree stmt = *stmt_p;
310 gimple_seq_add_seq (pre_p,
311 gimplify_cp_loop (DO_COND (stmt), DO_BODY (stmt),
312 NULL_TREE, 0));
313 *stmt_p = NULL_TREE;
314 }
315
316 /* Genericize a SWITCH_STMT by turning it into a SWITCH_EXPR. */
317
318 static void
gimplify_switch_stmt(tree * stmt_p,gimple_seq * pre_p)319 gimplify_switch_stmt (tree *stmt_p, gimple_seq *pre_p)
320 {
321 tree stmt = *stmt_p;
322 tree break_block, body, t;
323 location_t stmt_locus = input_location;
324 gimple_seq seq = NULL;
325
326 break_block = begin_bc_block (bc_break);
327
328 body = SWITCH_STMT_BODY (stmt);
329 if (!body)
330 body = build_empty_stmt (stmt_locus);
331
332 t = build3 (SWITCH_EXPR, SWITCH_STMT_TYPE (stmt),
333 SWITCH_STMT_COND (stmt), body, NULL_TREE);
334 SET_EXPR_LOCATION (t, stmt_locus);
335 gimplify_and_add (t, &seq);
336
337 seq = finish_bc_block (bc_break, break_block, seq);
338 gimple_seq_add_seq (pre_p, seq);
339 *stmt_p = NULL_TREE;
340 }
341
342 /* Hook into the middle of gimplifying an OMP_FOR node. This is required
343 in order to properly gimplify CONTINUE statements. Here we merely
344 manage the continue stack; the rest of the job is performed by the
345 regular gimplifier. */
346
347 static enum gimplify_status
cp_gimplify_omp_for(tree * expr_p,gimple_seq * pre_p)348 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
349 {
350 tree for_stmt = *expr_p;
351 tree cont_block;
352 gimple stmt;
353 gimple_seq seq = NULL;
354
355 /* Protect ourselves from recursion. */
356 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
357 return GS_UNHANDLED;
358 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
359
360 /* Note that while technically the continue label is enabled too soon
361 here, we should have already diagnosed invalid continues nested within
362 statement expressions within the INIT, COND, or INCR expressions. */
363 cont_block = begin_bc_block (bc_continue);
364
365 gimplify_and_add (for_stmt, &seq);
366 stmt = gimple_seq_last_stmt (seq);
367 if (gimple_code (stmt) == GIMPLE_OMP_FOR)
368 gimple_omp_set_body (stmt, finish_bc_block (bc_continue, cont_block,
369 gimple_omp_body (stmt)));
370 else
371 seq = finish_bc_block (bc_continue, cont_block, seq);
372 gimple_seq_add_seq (pre_p, seq);
373
374 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
375
376 return GS_ALL_DONE;
377 }
378
379 /* Gimplify an EXPR_STMT node. */
380
381 static void
gimplify_expr_stmt(tree * stmt_p)382 gimplify_expr_stmt (tree *stmt_p)
383 {
384 tree stmt = EXPR_STMT_EXPR (*stmt_p);
385
386 if (stmt == error_mark_node)
387 stmt = NULL;
388
389 /* Gimplification of a statement expression will nullify the
390 statement if all its side effects are moved to *PRE_P and *POST_P.
391
392 In this case we will not want to emit the gimplified statement.
393 However, we may still want to emit a warning, so we do that before
394 gimplification. */
395 if (stmt && warn_unused_value)
396 {
397 if (!TREE_SIDE_EFFECTS (stmt))
398 {
399 if (!IS_EMPTY_STMT (stmt)
400 && !VOID_TYPE_P (TREE_TYPE (stmt))
401 && !TREE_NO_WARNING (stmt))
402 warning (OPT_Wunused_value, "statement with no effect");
403 }
404 else
405 warn_if_unused_value (stmt, input_location);
406 }
407
408 if (stmt == NULL_TREE)
409 stmt = alloc_stmt_list ();
410
411 *stmt_p = stmt;
412 }
413
414 /* Gimplify initialization from an AGGR_INIT_EXPR. */
415
416 static void
cp_gimplify_init_expr(tree * expr_p)417 cp_gimplify_init_expr (tree *expr_p)
418 {
419 tree from = TREE_OPERAND (*expr_p, 1);
420 tree to = TREE_OPERAND (*expr_p, 0);
421 tree t;
422
423 /* What about code that pulls out the temp and uses it elsewhere? I
424 think that such code never uses the TARGET_EXPR as an initializer. If
425 I'm wrong, we'll abort because the temp won't have any RTL. In that
426 case, I guess we'll need to replace references somehow. */
427 if (TREE_CODE (from) == TARGET_EXPR)
428 from = TARGET_EXPR_INITIAL (from);
429
430 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
431 inside the TARGET_EXPR. */
432 for (t = from; t; )
433 {
434 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
435
436 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
437 replace the slot operand with our target.
438
439 Should we add a target parm to gimplify_expr instead? No, as in this
440 case we want to replace the INIT_EXPR. */
441 if (TREE_CODE (sub) == AGGR_INIT_EXPR
442 || TREE_CODE (sub) == VEC_INIT_EXPR)
443 {
444 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
445 AGGR_INIT_EXPR_SLOT (sub) = to;
446 else
447 VEC_INIT_EXPR_SLOT (sub) = to;
448 *expr_p = from;
449
450 /* The initialization is now a side-effect, so the container can
451 become void. */
452 if (from != sub)
453 TREE_TYPE (from) = void_type_node;
454 }
455
456 if (t == sub)
457 break;
458 else
459 t = TREE_OPERAND (t, 1);
460 }
461
462 }
463
464 /* Gimplify a MUST_NOT_THROW_EXPR. */
465
466 static enum gimplify_status
gimplify_must_not_throw_expr(tree * expr_p,gimple_seq * pre_p)467 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
468 {
469 tree stmt = *expr_p;
470 tree temp = voidify_wrapper_expr (stmt, NULL);
471 tree body = TREE_OPERAND (stmt, 0);
472 gimple_seq try_ = NULL;
473 gimple_seq catch_ = NULL;
474 gimple mnt;
475
476 gimplify_and_add (body, &try_);
477 mnt = gimple_build_eh_must_not_throw (terminate_node);
478 gimple_seq_add_stmt_without_update (&catch_, mnt);
479 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
480
481 gimple_seq_add_stmt_without_update (pre_p, mnt);
482 if (temp)
483 {
484 *expr_p = temp;
485 return GS_OK;
486 }
487
488 *expr_p = NULL;
489 return GS_ALL_DONE;
490 }
491
492 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
493
494 int
cp_gimplify_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p)495 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
496 {
497 int saved_stmts_are_full_exprs_p = 0;
498 enum tree_code code = TREE_CODE (*expr_p);
499 enum gimplify_status ret;
500
501 if (STATEMENT_CODE_P (code))
502 {
503 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
504 current_stmt_tree ()->stmts_are_full_exprs_p
505 = STMT_IS_FULL_EXPR_P (*expr_p);
506 }
507
508 switch (code)
509 {
510 case PTRMEM_CST:
511 *expr_p = cplus_expand_constant (*expr_p);
512 ret = GS_OK;
513 break;
514
515 case AGGR_INIT_EXPR:
516 simplify_aggr_init_expr (expr_p);
517 ret = GS_OK;
518 break;
519
520 case VEC_INIT_EXPR:
521 {
522 location_t loc = input_location;
523 tree init = VEC_INIT_EXPR_INIT (*expr_p);
524 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
525 gcc_assert (EXPR_HAS_LOCATION (*expr_p));
526 input_location = EXPR_LOCATION (*expr_p);
527 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE,
528 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p),
529 from_array,
530 tf_warning_or_error);
531 ret = GS_OK;
532 input_location = loc;
533 }
534 break;
535
536 case THROW_EXPR:
537 /* FIXME communicate throw type to back end, probably by moving
538 THROW_EXPR into ../tree.def. */
539 *expr_p = TREE_OPERAND (*expr_p, 0);
540 ret = GS_OK;
541 break;
542
543 case MUST_NOT_THROW_EXPR:
544 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
545 break;
546
547 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
548 LHS of an assignment might also be involved in the RHS, as in bug
549 25979. */
550 case INIT_EXPR:
551 cp_gimplify_init_expr (expr_p);
552 if (TREE_CODE (*expr_p) != INIT_EXPR)
553 return GS_OK;
554 /* Otherwise fall through. */
555 case MODIFY_EXPR:
556 {
557 /* If the back end isn't clever enough to know that the lhs and rhs
558 types are the same, add an explicit conversion. */
559 tree op0 = TREE_OPERAND (*expr_p, 0);
560 tree op1 = TREE_OPERAND (*expr_p, 1);
561
562 if (!error_operand_p (op0)
563 && !error_operand_p (op1)
564 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
565 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
566 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
567 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
568 TREE_TYPE (op0), op1);
569
570 else if ((is_gimple_lvalue (op1) || INDIRECT_REF_P (op1)
571 || (TREE_CODE (op1) == CONSTRUCTOR
572 && CONSTRUCTOR_NELTS (op1) == 0
573 && !TREE_CLOBBER_P (op1))
574 || (TREE_CODE (op1) == CALL_EXPR
575 && !CALL_EXPR_RETURN_SLOT_OPT (op1)))
576 && is_really_empty_class (TREE_TYPE (op0)))
577 {
578 /* Remove any copies of empty classes. We check that the RHS
579 has a simple form so that TARGET_EXPRs and non-empty
580 CONSTRUCTORs get reduced properly, and we leave the return
581 slot optimization alone because it isn't a copy (FIXME so it
582 shouldn't be represented as one).
583
584 Also drop volatile variables on the RHS to avoid infinite
585 recursion from gimplify_expr trying to load the value. */
586 if (!TREE_SIDE_EFFECTS (op1)
587 || (DECL_P (op1) && TREE_THIS_VOLATILE (op1)))
588 *expr_p = op0;
589 else if (TREE_CODE (op1) == MEM_REF
590 && TREE_THIS_VOLATILE (op1))
591 {
592 /* Similarly for volatile MEM_REFs on the RHS. */
593 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (op1, 0)))
594 *expr_p = op0;
595 else
596 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
597 TREE_OPERAND (op1, 0), op0);
598 }
599 else
600 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
601 op0, op1);
602 }
603 }
604 ret = GS_OK;
605 break;
606
607 case EMPTY_CLASS_EXPR:
608 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
609 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
610 ret = GS_OK;
611 break;
612
613 case BASELINK:
614 *expr_p = BASELINK_FUNCTIONS (*expr_p);
615 ret = GS_OK;
616 break;
617
618 case TRY_BLOCK:
619 genericize_try_block (expr_p);
620 ret = GS_OK;
621 break;
622
623 case HANDLER:
624 genericize_catch_block (expr_p);
625 ret = GS_OK;
626 break;
627
628 case EH_SPEC_BLOCK:
629 genericize_eh_spec_block (expr_p);
630 ret = GS_OK;
631 break;
632
633 case USING_STMT:
634 gcc_unreachable ();
635
636 case FOR_STMT:
637 gimplify_for_stmt (expr_p, pre_p);
638 ret = GS_OK;
639 break;
640
641 case WHILE_STMT:
642 gimplify_while_stmt (expr_p, pre_p);
643 ret = GS_OK;
644 break;
645
646 case DO_STMT:
647 gimplify_do_stmt (expr_p, pre_p);
648 ret = GS_OK;
649 break;
650
651 case SWITCH_STMT:
652 gimplify_switch_stmt (expr_p, pre_p);
653 ret = GS_OK;
654 break;
655
656 case OMP_FOR:
657 ret = cp_gimplify_omp_for (expr_p, pre_p);
658 break;
659
660 case CONTINUE_STMT:
661 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_CONTINUE, NOT_TAKEN));
662 gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_continue)));
663 *expr_p = NULL_TREE;
664 ret = GS_ALL_DONE;
665 break;
666
667 case BREAK_STMT:
668 gimple_seq_add_stmt (pre_p, gimple_build_goto (get_bc_label (bc_break)));
669 *expr_p = NULL_TREE;
670 ret = GS_ALL_DONE;
671 break;
672
673 case EXPR_STMT:
674 gimplify_expr_stmt (expr_p);
675 ret = GS_OK;
676 break;
677
678 case UNARY_PLUS_EXPR:
679 {
680 tree arg = TREE_OPERAND (*expr_p, 0);
681 tree type = TREE_TYPE (*expr_p);
682 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
683 : arg;
684 ret = GS_OK;
685 }
686 break;
687
688 default:
689 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
690 break;
691 }
692
693 /* Restore saved state. */
694 if (STATEMENT_CODE_P (code))
695 current_stmt_tree ()->stmts_are_full_exprs_p
696 = saved_stmts_are_full_exprs_p;
697
698 return ret;
699 }
700
701 static inline bool
is_invisiref_parm(const_tree t)702 is_invisiref_parm (const_tree t)
703 {
704 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
705 && DECL_BY_REFERENCE (t));
706 }
707
708 /* Return true if the uid in both int tree maps are equal. */
709
710 int
cxx_int_tree_map_eq(const void * va,const void * vb)711 cxx_int_tree_map_eq (const void *va, const void *vb)
712 {
713 const struct cxx_int_tree_map *a = (const struct cxx_int_tree_map *) va;
714 const struct cxx_int_tree_map *b = (const struct cxx_int_tree_map *) vb;
715 return (a->uid == b->uid);
716 }
717
718 /* Hash a UID in a cxx_int_tree_map. */
719
720 unsigned int
cxx_int_tree_map_hash(const void * item)721 cxx_int_tree_map_hash (const void *item)
722 {
723 return ((const struct cxx_int_tree_map *)item)->uid;
724 }
725
726 /* A stable comparison routine for use with splay trees and DECLs. */
727
728 static int
splay_tree_compare_decl_uid(splay_tree_key xa,splay_tree_key xb)729 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
730 {
731 tree a = (tree) xa;
732 tree b = (tree) xb;
733
734 return DECL_UID (a) - DECL_UID (b);
735 }
736
737 /* OpenMP context during genericization. */
738
739 struct cp_genericize_omp_taskreg
740 {
741 bool is_parallel;
742 bool default_shared;
743 struct cp_genericize_omp_taskreg *outer;
744 splay_tree variables;
745 };
746
747 /* Return true if genericization should try to determine if
748 DECL is firstprivate or shared within task regions. */
749
750 static bool
omp_var_to_track(tree decl)751 omp_var_to_track (tree decl)
752 {
753 tree type = TREE_TYPE (decl);
754 if (is_invisiref_parm (decl))
755 type = TREE_TYPE (type);
756 while (TREE_CODE (type) == ARRAY_TYPE)
757 type = TREE_TYPE (type);
758 if (type == error_mark_node || !CLASS_TYPE_P (type))
759 return false;
760 if (TREE_CODE (decl) == VAR_DECL && DECL_THREAD_LOCAL_P (decl))
761 return false;
762 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
763 return false;
764 return true;
765 }
766
767 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
768
769 static void
omp_cxx_notice_variable(struct cp_genericize_omp_taskreg * omp_ctx,tree decl)770 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
771 {
772 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
773 (splay_tree_key) decl);
774 if (n == NULL)
775 {
776 int flags = OMP_CLAUSE_DEFAULT_SHARED;
777 if (omp_ctx->outer)
778 omp_cxx_notice_variable (omp_ctx->outer, decl);
779 if (!omp_ctx->default_shared)
780 {
781 struct cp_genericize_omp_taskreg *octx;
782
783 for (octx = omp_ctx->outer; octx; octx = octx->outer)
784 {
785 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
786 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
787 {
788 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
789 break;
790 }
791 if (octx->is_parallel)
792 break;
793 }
794 if (octx == NULL
795 && (TREE_CODE (decl) == PARM_DECL
796 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
797 && DECL_CONTEXT (decl) == current_function_decl)))
798 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
799 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
800 {
801 /* DECL is implicitly determined firstprivate in
802 the current task construct. Ensure copy ctor and
803 dtor are instantiated, because during gimplification
804 it will be already too late. */
805 tree type = TREE_TYPE (decl);
806 if (is_invisiref_parm (decl))
807 type = TREE_TYPE (type);
808 while (TREE_CODE (type) == ARRAY_TYPE)
809 type = TREE_TYPE (type);
810 get_copy_ctor (type, tf_none);
811 get_dtor (type, tf_none);
812 }
813 }
814 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
815 }
816 }
817
818 /* Genericization context. */
819
820 struct cp_genericize_data
821 {
822 struct pointer_set_t *p_set;
823 VEC (tree, heap) *bind_expr_stack;
824 struct cp_genericize_omp_taskreg *omp_ctx;
825 };
826
827 /* Perform any pre-gimplification lowering of C++ front end trees to
828 GENERIC. */
829
830 static tree
cp_genericize_r(tree * stmt_p,int * walk_subtrees,void * data)831 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
832 {
833 tree stmt = *stmt_p;
834 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
835 struct pointer_set_t *p_set = wtd->p_set;
836
837 /* If in an OpenMP context, note var uses. */
838 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
839 && (TREE_CODE (stmt) == VAR_DECL
840 || TREE_CODE (stmt) == PARM_DECL
841 || TREE_CODE (stmt) == RESULT_DECL)
842 && omp_var_to_track (stmt))
843 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
844
845 if (is_invisiref_parm (stmt)
846 /* Don't dereference parms in a thunk, pass the references through. */
847 && !(DECL_THUNK_P (current_function_decl)
848 && TREE_CODE (stmt) == PARM_DECL))
849 {
850 *stmt_p = convert_from_reference (stmt);
851 *walk_subtrees = 0;
852 return NULL;
853 }
854
855 /* Map block scope extern declarations to visible declarations with the
856 same name and type in outer scopes if any. */
857 if (cp_function_chain->extern_decl_map
858 && (TREE_CODE (stmt) == FUNCTION_DECL || TREE_CODE (stmt) == VAR_DECL)
859 && DECL_EXTERNAL (stmt))
860 {
861 struct cxx_int_tree_map *h, in;
862 in.uid = DECL_UID (stmt);
863 h = (struct cxx_int_tree_map *)
864 htab_find_with_hash (cp_function_chain->extern_decl_map,
865 &in, in.uid);
866 if (h)
867 {
868 *stmt_p = h->to;
869 *walk_subtrees = 0;
870 return NULL;
871 }
872 }
873
874 /* Other than invisiref parms, don't walk the same tree twice. */
875 if (pointer_set_contains (p_set, stmt))
876 {
877 *walk_subtrees = 0;
878 return NULL_TREE;
879 }
880
881 if (TREE_CODE (stmt) == ADDR_EXPR
882 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
883 {
884 /* If in an OpenMP context, note var uses. */
885 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
886 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
887 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
888 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
889 *walk_subtrees = 0;
890 }
891 else if (TREE_CODE (stmt) == RETURN_EXPR
892 && TREE_OPERAND (stmt, 0)
893 && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
894 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
895 *walk_subtrees = 0;
896 else if (TREE_CODE (stmt) == OMP_CLAUSE)
897 switch (OMP_CLAUSE_CODE (stmt))
898 {
899 case OMP_CLAUSE_LASTPRIVATE:
900 /* Don't dereference an invisiref in OpenMP clauses. */
901 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
902 {
903 *walk_subtrees = 0;
904 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
905 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
906 cp_genericize_r, data, NULL);
907 }
908 break;
909 case OMP_CLAUSE_PRIVATE:
910 /* Don't dereference an invisiref in OpenMP clauses. */
911 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
912 *walk_subtrees = 0;
913 else if (wtd->omp_ctx != NULL)
914 {
915 /* Private clause doesn't cause any references to the
916 var in outer contexts, avoid calling
917 omp_cxx_notice_variable for it. */
918 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
919 wtd->omp_ctx = NULL;
920 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
921 data, NULL);
922 wtd->omp_ctx = old;
923 *walk_subtrees = 0;
924 }
925 break;
926 case OMP_CLAUSE_SHARED:
927 case OMP_CLAUSE_FIRSTPRIVATE:
928 case OMP_CLAUSE_COPYIN:
929 case OMP_CLAUSE_COPYPRIVATE:
930 /* Don't dereference an invisiref in OpenMP clauses. */
931 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
932 *walk_subtrees = 0;
933 break;
934 case OMP_CLAUSE_REDUCTION:
935 gcc_assert (!is_invisiref_parm (OMP_CLAUSE_DECL (stmt)));
936 break;
937 default:
938 break;
939 }
940 else if (IS_TYPE_OR_DECL_P (stmt))
941 *walk_subtrees = 0;
942
943 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
944 to lower this construct before scanning it, so we need to lower these
945 before doing anything else. */
946 else if (TREE_CODE (stmt) == CLEANUP_STMT)
947 *stmt_p = build2 (CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
948 : TRY_FINALLY_EXPR,
949 void_type_node,
950 CLEANUP_BODY (stmt),
951 CLEANUP_EXPR (stmt));
952
953 else if (TREE_CODE (stmt) == IF_STMT)
954 {
955 genericize_if_stmt (stmt_p);
956 /* *stmt_p has changed, tail recurse to handle it again. */
957 return cp_genericize_r (stmt_p, walk_subtrees, data);
958 }
959
960 /* COND_EXPR might have incompatible types in branches if one or both
961 arms are bitfields. Fix it up now. */
962 else if (TREE_CODE (stmt) == COND_EXPR)
963 {
964 tree type_left
965 = (TREE_OPERAND (stmt, 1)
966 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
967 : NULL_TREE);
968 tree type_right
969 = (TREE_OPERAND (stmt, 2)
970 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
971 : NULL_TREE);
972 if (type_left
973 && !useless_type_conversion_p (TREE_TYPE (stmt),
974 TREE_TYPE (TREE_OPERAND (stmt, 1))))
975 {
976 TREE_OPERAND (stmt, 1)
977 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
978 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
979 type_left));
980 }
981 if (type_right
982 && !useless_type_conversion_p (TREE_TYPE (stmt),
983 TREE_TYPE (TREE_OPERAND (stmt, 2))))
984 {
985 TREE_OPERAND (stmt, 2)
986 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
987 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
988 type_right));
989 }
990 }
991
992 else if (TREE_CODE (stmt) == BIND_EXPR)
993 {
994 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
995 {
996 tree decl;
997 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
998 if (TREE_CODE (decl) == VAR_DECL
999 && !DECL_EXTERNAL (decl)
1000 && omp_var_to_track (decl))
1001 {
1002 splay_tree_node n
1003 = splay_tree_lookup (wtd->omp_ctx->variables,
1004 (splay_tree_key) decl);
1005 if (n == NULL)
1006 splay_tree_insert (wtd->omp_ctx->variables,
1007 (splay_tree_key) decl,
1008 TREE_STATIC (decl)
1009 ? OMP_CLAUSE_DEFAULT_SHARED
1010 : OMP_CLAUSE_DEFAULT_PRIVATE);
1011 }
1012 }
1013 VEC_safe_push (tree, heap, wtd->bind_expr_stack, stmt);
1014 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1015 cp_genericize_r, data, NULL);
1016 VEC_pop (tree, wtd->bind_expr_stack);
1017 }
1018
1019 else if (TREE_CODE (stmt) == USING_STMT)
1020 {
1021 tree block = NULL_TREE;
1022
1023 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1024 BLOCK, and append an IMPORTED_DECL to its
1025 BLOCK_VARS chained list. */
1026 if (wtd->bind_expr_stack)
1027 {
1028 int i;
1029 for (i = VEC_length (tree, wtd->bind_expr_stack) - 1; i >= 0; i--)
1030 if ((block = BIND_EXPR_BLOCK (VEC_index (tree,
1031 wtd->bind_expr_stack, i))))
1032 break;
1033 }
1034 if (block)
1035 {
1036 tree using_directive;
1037 gcc_assert (TREE_OPERAND (stmt, 0));
1038
1039 using_directive = make_node (IMPORTED_DECL);
1040 TREE_TYPE (using_directive) = void_type_node;
1041
1042 IMPORTED_DECL_ASSOCIATED_DECL (using_directive)
1043 = TREE_OPERAND (stmt, 0);
1044 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1045 BLOCK_VARS (block) = using_directive;
1046 }
1047 /* The USING_STMT won't appear in GENERIC. */
1048 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1049 *walk_subtrees = 0;
1050 }
1051
1052 else if (TREE_CODE (stmt) == DECL_EXPR
1053 && TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1054 {
1055 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1056 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1057 *walk_subtrees = 0;
1058 }
1059 else if (TREE_CODE (stmt) == OMP_PARALLEL || TREE_CODE (stmt) == OMP_TASK)
1060 {
1061 struct cp_genericize_omp_taskreg omp_ctx;
1062 tree c, decl;
1063 splay_tree_node n;
1064
1065 *walk_subtrees = 0;
1066 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1067 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1068 omp_ctx.default_shared = omp_ctx.is_parallel;
1069 omp_ctx.outer = wtd->omp_ctx;
1070 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1071 wtd->omp_ctx = &omp_ctx;
1072 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1073 switch (OMP_CLAUSE_CODE (c))
1074 {
1075 case OMP_CLAUSE_SHARED:
1076 case OMP_CLAUSE_PRIVATE:
1077 case OMP_CLAUSE_FIRSTPRIVATE:
1078 case OMP_CLAUSE_LASTPRIVATE:
1079 decl = OMP_CLAUSE_DECL (c);
1080 if (decl == error_mark_node || !omp_var_to_track (decl))
1081 break;
1082 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1083 if (n != NULL)
1084 break;
1085 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1086 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1087 ? OMP_CLAUSE_DEFAULT_SHARED
1088 : OMP_CLAUSE_DEFAULT_PRIVATE);
1089 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
1090 && omp_ctx.outer)
1091 omp_cxx_notice_variable (omp_ctx.outer, decl);
1092 break;
1093 case OMP_CLAUSE_DEFAULT:
1094 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1095 omp_ctx.default_shared = true;
1096 default:
1097 break;
1098 }
1099 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1100 wtd->omp_ctx = omp_ctx.outer;
1101 splay_tree_delete (omp_ctx.variables);
1102 }
1103 else if (TREE_CODE (stmt) == CONVERT_EXPR)
1104 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1105
1106 pointer_set_insert (p_set, *stmt_p);
1107
1108 return NULL;
1109 }
1110
1111 void
cp_genericize(tree fndecl)1112 cp_genericize (tree fndecl)
1113 {
1114 tree t;
1115 struct cp_genericize_data wtd;
1116
1117 /* Fix up the types of parms passed by invisible reference. */
1118 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1119 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1120 {
1121 /* If a function's arguments are copied to create a thunk,
1122 then DECL_BY_REFERENCE will be set -- but the type of the
1123 argument will be a pointer type, so we will never get
1124 here. */
1125 gcc_assert (!DECL_BY_REFERENCE (t));
1126 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1127 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1128 DECL_BY_REFERENCE (t) = 1;
1129 TREE_ADDRESSABLE (t) = 0;
1130 relayout_decl (t);
1131 }
1132
1133 /* Do the same for the return value. */
1134 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1135 {
1136 t = DECL_RESULT (fndecl);
1137 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1138 DECL_BY_REFERENCE (t) = 1;
1139 TREE_ADDRESSABLE (t) = 0;
1140 relayout_decl (t);
1141 if (DECL_NAME (t))
1142 {
1143 /* Adjust DECL_VALUE_EXPR of the original var. */
1144 tree outer = outer_curly_brace_block (current_function_decl);
1145 tree var;
1146
1147 if (outer)
1148 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1149 if (DECL_NAME (t) == DECL_NAME (var)
1150 && DECL_HAS_VALUE_EXPR_P (var)
1151 && DECL_VALUE_EXPR (var) == t)
1152 {
1153 tree val = convert_from_reference (t);
1154 SET_DECL_VALUE_EXPR (var, val);
1155 break;
1156 }
1157 }
1158 }
1159
1160 /* If we're a clone, the body is already GIMPLE. */
1161 if (DECL_CLONED_FUNCTION_P (fndecl))
1162 return;
1163
1164 /* We do want to see every occurrence of the parms, so we can't just use
1165 walk_tree's hash functionality. */
1166 wtd.p_set = pointer_set_create ();
1167 wtd.bind_expr_stack = NULL;
1168 wtd.omp_ctx = NULL;
1169 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_genericize_r, &wtd, NULL);
1170 pointer_set_destroy (wtd.p_set);
1171 VEC_free (tree, heap, wtd.bind_expr_stack);
1172
1173 /* Do everything else. */
1174 c_genericize (fndecl);
1175
1176 gcc_assert (bc_label[bc_break] == NULL);
1177 gcc_assert (bc_label[bc_continue] == NULL);
1178 }
1179
1180 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1181 NULL if there is in fact nothing to do. ARG2 may be null if FN
1182 actually only takes one argument. */
1183
1184 static tree
cxx_omp_clause_apply_fn(tree fn,tree arg1,tree arg2)1185 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1186 {
1187 tree defparm, parm, t;
1188 int i = 0;
1189 int nargs;
1190 tree *argarray;
1191
1192 if (fn == NULL)
1193 return NULL;
1194
1195 nargs = list_length (DECL_ARGUMENTS (fn));
1196 argarray = XALLOCAVEC (tree, nargs);
1197
1198 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1199 if (arg2)
1200 defparm = TREE_CHAIN (defparm);
1201
1202 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1203 {
1204 tree inner_type = TREE_TYPE (arg1);
1205 tree start1, end1, p1;
1206 tree start2 = NULL, p2 = NULL;
1207 tree ret = NULL, lab;
1208
1209 start1 = arg1;
1210 start2 = arg2;
1211 do
1212 {
1213 inner_type = TREE_TYPE (inner_type);
1214 start1 = build4 (ARRAY_REF, inner_type, start1,
1215 size_zero_node, NULL, NULL);
1216 if (arg2)
1217 start2 = build4 (ARRAY_REF, inner_type, start2,
1218 size_zero_node, NULL, NULL);
1219 }
1220 while (TREE_CODE (inner_type) == ARRAY_TYPE);
1221 start1 = build_fold_addr_expr_loc (input_location, start1);
1222 if (arg2)
1223 start2 = build_fold_addr_expr_loc (input_location, start2);
1224
1225 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
1226 end1 = fold_build_pointer_plus (start1, end1);
1227
1228 p1 = create_tmp_var (TREE_TYPE (start1), NULL);
1229 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
1230 append_to_statement_list (t, &ret);
1231
1232 if (arg2)
1233 {
1234 p2 = create_tmp_var (TREE_TYPE (start2), NULL);
1235 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
1236 append_to_statement_list (t, &ret);
1237 }
1238
1239 lab = create_artificial_label (input_location);
1240 t = build1 (LABEL_EXPR, void_type_node, lab);
1241 append_to_statement_list (t, &ret);
1242
1243 argarray[i++] = p1;
1244 if (arg2)
1245 argarray[i++] = p2;
1246 /* Handle default arguments. */
1247 for (parm = defparm; parm && parm != void_list_node;
1248 parm = TREE_CHAIN (parm), i++)
1249 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1250 TREE_PURPOSE (parm), fn, i);
1251 t = build_call_a (fn, i, argarray);
1252 t = fold_convert (void_type_node, t);
1253 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1254 append_to_statement_list (t, &ret);
1255
1256 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
1257 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
1258 append_to_statement_list (t, &ret);
1259
1260 if (arg2)
1261 {
1262 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
1263 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
1264 append_to_statement_list (t, &ret);
1265 }
1266
1267 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
1268 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
1269 append_to_statement_list (t, &ret);
1270
1271 return ret;
1272 }
1273 else
1274 {
1275 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
1276 if (arg2)
1277 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
1278 /* Handle default arguments. */
1279 for (parm = defparm; parm && parm != void_list_node;
1280 parm = TREE_CHAIN (parm), i++)
1281 argarray[i] = convert_default_arg (TREE_VALUE (parm),
1282 TREE_PURPOSE (parm),
1283 fn, i);
1284 t = build_call_a (fn, i, argarray);
1285 t = fold_convert (void_type_node, t);
1286 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
1287 }
1288 }
1289
1290 /* Return code to initialize DECL with its default constructor, or
1291 NULL if there's nothing to do. */
1292
1293 tree
cxx_omp_clause_default_ctor(tree clause,tree decl,tree outer ATTRIBUTE_UNUSED)1294 cxx_omp_clause_default_ctor (tree clause, tree decl,
1295 tree outer ATTRIBUTE_UNUSED)
1296 {
1297 tree info = CP_OMP_CLAUSE_INFO (clause);
1298 tree ret = NULL;
1299
1300 if (info)
1301 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
1302
1303 return ret;
1304 }
1305
1306 /* Return code to initialize DST with a copy constructor from SRC. */
1307
1308 tree
cxx_omp_clause_copy_ctor(tree clause,tree dst,tree src)1309 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
1310 {
1311 tree info = CP_OMP_CLAUSE_INFO (clause);
1312 tree ret = NULL;
1313
1314 if (info)
1315 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
1316 if (ret == NULL)
1317 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1318
1319 return ret;
1320 }
1321
1322 /* Similarly, except use an assignment operator instead. */
1323
1324 tree
cxx_omp_clause_assign_op(tree clause,tree dst,tree src)1325 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
1326 {
1327 tree info = CP_OMP_CLAUSE_INFO (clause);
1328 tree ret = NULL;
1329
1330 if (info)
1331 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
1332 if (ret == NULL)
1333 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
1334
1335 return ret;
1336 }
1337
1338 /* Return code to destroy DECL. */
1339
1340 tree
cxx_omp_clause_dtor(tree clause,tree decl)1341 cxx_omp_clause_dtor (tree clause, tree decl)
1342 {
1343 tree info = CP_OMP_CLAUSE_INFO (clause);
1344 tree ret = NULL;
1345
1346 if (info)
1347 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
1348
1349 return ret;
1350 }
1351
1352 /* True if OpenMP should privatize what this DECL points to rather
1353 than the DECL itself. */
1354
1355 bool
cxx_omp_privatize_by_reference(const_tree decl)1356 cxx_omp_privatize_by_reference (const_tree decl)
1357 {
1358 return is_invisiref_parm (decl);
1359 }
1360
1361 /* Return true if DECL is const qualified var having no mutable member. */
1362 bool
cxx_omp_const_qual_no_mutable(tree decl)1363 cxx_omp_const_qual_no_mutable (tree decl)
1364 {
1365 tree type = TREE_TYPE (decl);
1366 if (TREE_CODE (type) == REFERENCE_TYPE)
1367 {
1368 if (!is_invisiref_parm (decl))
1369 return false;
1370 type = TREE_TYPE (type);
1371
1372 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
1373 {
1374 /* NVR doesn't preserve const qualification of the
1375 variable's type. */
1376 tree outer = outer_curly_brace_block (current_function_decl);
1377 tree var;
1378
1379 if (outer)
1380 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1381 if (DECL_NAME (decl) == DECL_NAME (var)
1382 && (TYPE_MAIN_VARIANT (type)
1383 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
1384 {
1385 if (TYPE_READONLY (TREE_TYPE (var)))
1386 type = TREE_TYPE (var);
1387 break;
1388 }
1389 }
1390 }
1391
1392 if (type == error_mark_node)
1393 return false;
1394
1395 /* Variables with const-qualified type having no mutable member
1396 are predetermined shared. */
1397 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
1398 return true;
1399
1400 return false;
1401 }
1402
1403 /* True if OpenMP sharing attribute of DECL is predetermined. */
1404
1405 enum omp_clause_default_kind
cxx_omp_predetermined_sharing(tree decl)1406 cxx_omp_predetermined_sharing (tree decl)
1407 {
1408 /* Static data members are predetermined shared. */
1409 if (TREE_STATIC (decl))
1410 {
1411 tree ctx = CP_DECL_CONTEXT (decl);
1412 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
1413 return OMP_CLAUSE_DEFAULT_SHARED;
1414 }
1415
1416 /* Const qualified vars having no mutable member are predetermined
1417 shared. */
1418 if (cxx_omp_const_qual_no_mutable (decl))
1419 return OMP_CLAUSE_DEFAULT_SHARED;
1420
1421 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
1422 }
1423
1424 /* Finalize an implicitly determined clause. */
1425
1426 void
cxx_omp_finish_clause(tree c)1427 cxx_omp_finish_clause (tree c)
1428 {
1429 tree decl, inner_type;
1430 bool make_shared = false;
1431
1432 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
1433 return;
1434
1435 decl = OMP_CLAUSE_DECL (c);
1436 decl = require_complete_type (decl);
1437 inner_type = TREE_TYPE (decl);
1438 if (decl == error_mark_node)
1439 make_shared = true;
1440 else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1441 {
1442 if (is_invisiref_parm (decl))
1443 inner_type = TREE_TYPE (inner_type);
1444 else
1445 {
1446 error ("%qE implicitly determined as %<firstprivate%> has reference type",
1447 decl);
1448 make_shared = true;
1449 }
1450 }
1451
1452 /* We're interested in the base element, not arrays. */
1453 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1454 inner_type = TREE_TYPE (inner_type);
1455
1456 /* Check for special function availability by building a call to one.
1457 Save the results, because later we won't be in the right context
1458 for making these queries. */
1459 if (!make_shared
1460 && CLASS_TYPE_P (inner_type)
1461 && cxx_omp_create_clause_info (c, inner_type, false, true, false))
1462 make_shared = true;
1463
1464 if (make_shared)
1465 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
1466 }
1467